diff --git a/idaes/core/scaling/custom_scaler_base.py b/idaes/core/scaling/custom_scaler_base.py index 8e94cc7c5a..a6b311086d 100644 --- a/idaes/core/scaling/custom_scaler_base.py +++ b/idaes/core/scaling/custom_scaler_base.py @@ -16,7 +16,6 @@ Author: Andrew Lee """ from copy import copy -from enum import Enum from pyomo.environ import ComponentMap, units, value from pyomo.core.base.units_container import UnitsError @@ -26,6 +25,7 @@ from idaes.core.scaling.scaling_base import CONFIG, ScalerBase from idaes.core.scaling.util import get_scaling_factor, NominalValueExtractionVisitor import idaes.logger as idaeslog +from idaes.core.util.misc import StrEnum # Set up logger _log = idaeslog.getLogger(__name__) @@ -41,7 +41,7 @@ } -class ConstraintScalingScheme(str, Enum): +class ConstraintScalingScheme(StrEnum): """ Schemes available for calculating constraint scaling factors. diff --git a/idaes/core/util/misc.py b/idaes/core/util/misc.py index 5daa73676d..78185256cd 100644 --- a/idaes/core/util/misc.py +++ b/idaes/core/util/misc.py @@ -15,9 +15,12 @@ This module contains miscellaneous utility functions for use in IDAES models. """ from enum import Enum +import sys import pyomo.environ as pyo from pyomo.common.config import ConfigBlock +from pyomo.core.expr.visitor import _ToStringVisitor +from pyomo.core.base.expression import ExpressionData import idaes.logger as idaeslog @@ -177,3 +180,66 @@ class StrEnum(str, Enum): def __str__(self): return str(self.value) + + +class _ToExprStringVisitor(_ToStringVisitor): + """ + Derived version of the Pyomo _ToStringVisitor class + which checks for named Expressions in the expression tree + and prints their name instead of expanding the expression tree. + """ + + def visiting_potential_leaf(self, node): + # Check if node is a named Expression + if isinstance(node, ExpressionData): + # If it is, return the name of the Expression + return True, node.name + + # Otherwise, continue descending as normal + return super().visiting_potential_leaf(node) + + +def compact_expression_to_string(expr): + """Return a compact string representation of an expression. + + Unlike the normal Pyomo string representations, this function will + identify any Expressions that appear within the expression tree and + print the name of these rather than expanding the expression tree. + + Args: + expr: ExpressionBase. The root node of an expression tree. + + Returns: + A string representation for the expression. + """ + # Create and execute the visitor pattern + # + visitor = _ToExprStringVisitor(verbose=False, smap=None) + return visitor.dfs_postorder_stack(expr) + + +def print_compact_form(expr, stream=None): + """ + Writes a compact string representation of the given component or + expression to the specified stream. + + Unlike the normal Pyomo string representations, this function will + identify any Expressions that appear within the expression tree and + print the name of these rather than expanding the expression tree. + + Args: + expr: component or expression to print + stream: StringIO object to write to. Default is stdout. + + Returns: + None + """ + if stream is None: + stream = sys.stdout + + if hasattr(expr, "expr"): + # We have a Constraint or Expression + # We want to print the expression, not the object name + expr = expr.expr + + stream.write(compact_expression_to_string(expr)) diff --git a/idaes/core/util/model_diagnostics.py b/idaes/core/util/model_diagnostics.py index c7f5335c26..f701677d47 100644 --- a/idaes/core/util/model_diagnostics.py +++ b/idaes/core/util/model_diagnostics.py @@ -20,10 +20,11 @@ from operator import itemgetter import sys from inspect import signature -from math import log, isclose, inf, isfinite +from math import log, log10, isclose, inf, isfinite import json from typing import List import logging +from itertools import combinations, chain import numpy as np from scipy.linalg import svd @@ -35,6 +36,7 @@ Integers, Block, check_optimal_termination, + ComponentMap, ConcreteModel, Constraint, Expression, @@ -58,6 +60,7 @@ from pyomo.core.base.block import BlockData from pyomo.core.base.var import VarData from pyomo.core.base.constraint import ConstraintData +from pyomo.core.base.expression import ExpressionData from pyomo.repn.standard_repn import ( # pylint: disable=no-name-in-module generate_standard_repn, ) @@ -67,6 +70,8 @@ ConfigValue, document_kwargs_from_configdict, PositiveInt, + NonNegativeFloat, + NonNegativeInt, ) from pyomo.util.check_units import identify_inconsistent_units from pyomo.contrib.incidence_analysis import IncidenceGraphInterface @@ -78,8 +83,12 @@ from pyomo.common.deprecation import deprecation_warning from pyomo.common.errors import PyomoException from pyomo.common.tempfiles import TempfileManager +from pyomo.core import expr as EXPR +from pyomo.common.numeric_types import native_types +from pyomo.core.base.units_container import _PyomoUnit from idaes.core.solvers.get_solver import get_solver +from idaes.core.util.misc import compact_expression_to_string from idaes.core.util.model_statistics import ( activated_blocks_set, deactivated_blocks_set, @@ -180,7 +189,6 @@ def svd_sparse(jacobian, number_singular_values): """ u, s, vT = svds(jacobian, k=number_singular_values, which="SM") - print(u, s, vT, number_singular_values) return u, s, vT.transpose() @@ -189,7 +197,7 @@ def svd_sparse(jacobian, number_singular_values): "variable_bounds_absolute_tolerance", ConfigValue( default=1e-4, - domain=float, + domain=NonNegativeFloat, description="Absolute tolerance for considering a variable to be close " "to its bounds.", ), @@ -198,7 +206,7 @@ def svd_sparse(jacobian, number_singular_values): "variable_bounds_relative_tolerance", ConfigValue( default=1e-4, - domain=float, + domain=NonNegativeFloat, description="Relative tolerance for considering a variable to be close " "to its bounds.", ), @@ -207,7 +215,7 @@ def svd_sparse(jacobian, number_singular_values): "variable_bounds_violation_tolerance", ConfigValue( default=0, - domain=float, + domain=NonNegativeFloat, description="Absolute tolerance for considering a variable to violate its bounds.", doc="Absolute tolerance for considering a variable to violate its bounds. " "Some solvers relax bounds on variables thus allowing a small violation to be " @@ -218,15 +226,47 @@ def svd_sparse(jacobian, number_singular_values): "constraint_residual_tolerance", ConfigValue( default=1e-5, - domain=float, + domain=NonNegativeFloat, description="Absolute tolerance to use when checking constraint residuals.", ), ) +CONFIG.declare( + "constraint_term_mismatch_tolerance", + ConfigValue( + default=1e6, + domain=NonNegativeFloat, + description="Magnitude difference to use when checking for mismatched additive terms in constraints.", + ), +) +CONFIG.declare( + "constraint_term_cancellation_tolerance", + ConfigValue( + default=1e-4, + domain=NonNegativeFloat, + description="Absolute tolerance to use when checking for canceling additive terms in constraints.", + ), +) +CONFIG.declare( + "max_canceling_terms", + ConfigValue( + default=5, + domain=NonNegativeInt, + description="Maximum number of terms to consider when looking for canceling combinations in expressions.", + ), +) +CONFIG.declare( + "constraint_term_zero_tolerance", + ConfigValue( + default=1e-10, + domain=NonNegativeFloat, + description="Absolute tolerance to use when determining if a constraint term is equal to zero.", + ), +) CONFIG.declare( "variable_large_value_tolerance", ConfigValue( default=1e4, - domain=float, + domain=NonNegativeFloat, description="Absolute tolerance for considering a value to be large.", ), ) @@ -234,7 +274,7 @@ def svd_sparse(jacobian, number_singular_values): "variable_small_value_tolerance", ConfigValue( default=1e-4, - domain=float, + domain=NonNegativeFloat, description="Absolute tolerance for considering a value to be small.", ), ) @@ -242,7 +282,7 @@ def svd_sparse(jacobian, number_singular_values): "variable_zero_value_tolerance", ConfigValue( default=1e-8, - domain=float, + domain=NonNegativeFloat, description="Absolute tolerance for considering a value to be near to zero.", ), ) @@ -250,7 +290,7 @@ def svd_sparse(jacobian, number_singular_values): "jacobian_large_value_caution", ConfigValue( default=1e4, - domain=float, + domain=NonNegativeFloat, description="Tolerance for raising a caution for large Jacobian values.", ), ) @@ -258,7 +298,7 @@ def svd_sparse(jacobian, number_singular_values): "jacobian_large_value_warning", ConfigValue( default=1e8, - domain=float, + domain=NonNegativeFloat, description="Tolerance for raising a warning for large Jacobian values.", ), ) @@ -266,7 +306,7 @@ def svd_sparse(jacobian, number_singular_values): "jacobian_small_value_caution", ConfigValue( default=1e-4, - domain=float, + domain=NonNegativeFloat, description="Tolerance for raising a caution for small Jacobian values.", ), ) @@ -274,7 +314,7 @@ def svd_sparse(jacobian, number_singular_values): "jacobian_small_value_warning", ConfigValue( default=1e-8, - domain=float, + domain=NonNegativeFloat, description="Tolerance for raising a warning for small Jacobian values.", ), ) @@ -290,7 +330,7 @@ def svd_sparse(jacobian, number_singular_values): "parallel_component_tolerance", ConfigValue( default=1e-8, - domain=float, + domain=NonNegativeFloat, description="Tolerance for identifying near-parallel Jacobian rows/columns", ), ) @@ -298,7 +338,7 @@ def svd_sparse(jacobian, number_singular_values): "absolute_feasibility_tolerance", ConfigValue( default=1e-6, - domain=float, + domain=NonNegativeFloat, description="Feasibility tolerance for identifying infeasible constraints and bounds", ), ) @@ -336,7 +376,7 @@ def svd_sparse(jacobian, number_singular_values): "singular_value_tolerance", ConfigValue( default=1e-6, - domain=float, + domain=NonNegativeFloat, description="Tolerance for defining a small singular value", ), ) @@ -344,7 +384,7 @@ def svd_sparse(jacobian, number_singular_values): "size_cutoff_in_singular_vector", ConfigValue( default=0.1, - domain=float, + domain=NonNegativeFloat, description="Size below which to ignore constraints and variables in " "the singular vector", ), @@ -371,7 +411,7 @@ def svd_sparse(jacobian, number_singular_values): "M", # TODO: Need better name ConfigValue( default=1e5, - domain=float, + domain=NonNegativeFloat, description="Maximum value for nu in MILP models.", ), ) @@ -379,7 +419,7 @@ def svd_sparse(jacobian, number_singular_values): "m_small", # TODO: Need better name ConfigValue( default=1e-5, - domain=float, + domain=NonNegativeFloat, description="Smallest value for nu to be considered non-zero in MILP models.", ), ) @@ -387,7 +427,7 @@ def svd_sparse(jacobian, number_singular_values): "trivial_constraint_tolerance", ConfigValue( default=1e-6, - domain=float, + domain=NonNegativeFloat, description="Tolerance for identifying non-zero rows in Jacobian.", ), ) @@ -1052,6 +1092,239 @@ def display_near_parallel_variables(self, stream=None): # TODO: Block triangularization analysis # Number and size of blocks, polynomial degree of 1x1 blocks, simple pivot check of moderate sized sub-blocks? + def _collect_constraint_mismatches(self, descend_into=True): + """ + Call ConstraintTermAnalysisVisitor on all Constraints in model to walk expression + tree and collect any instances of sum expressions with mismatched terms or potential + cancellations. + + Args: + descend_into: whether to descend_into child_blocks + + Returns: + List of strings summarising constraints with mismatched terms + List of strings summarising constraints with cancellations + List of strings with constraint names where constraint contains no free variables + """ + walker = ConstraintTermAnalysisVisitor( + term_mismatch_tolerance=self.config.constraint_term_mismatch_tolerance, + term_cancellation_tolerance=self.config.constraint_term_cancellation_tolerance, + term_zero_tolerance=self.config.constraint_term_zero_tolerance, + # for the high level summary, we only need to know if there are any cancellations, + # but don't need to find all of them + max_cancellations_per_node=1, + max_canceling_terms=self.config.max_canceling_terms, + ) + + mismatch = [] + cancellation = [] + constant = [] + + for c in self._model.component_data_objects( + Constraint, descend_into=descend_into + ): + _, expr_mismatch, expr_cancellation, expr_constant, _ = ( + walker.walk_expression(c.expr) + ) + + if len(expr_mismatch) > 0: + mismatch.append(f"{c.name}: {len(expr_mismatch)} mismatched term(s)") + + if len(expr_cancellation) > 0: + cancellation.append( + f"{c.name}: {len(expr_cancellation)} potential canceling term(s)" + ) + + if expr_constant: + constant.append(c.name) + + return mismatch, cancellation, constant + + def display_constraints_with_mismatched_terms(self, stream=None): + """ + Display constraints in model which contain additive terms of significantly different magnitude. + + Args: + stream: I/O object to write report to (default = stdout) + + Returns: + None + + """ + if stream is None: + stream = sys.stdout + + mismatch, _, _ = self._collect_constraint_mismatches() + + # Write the output + _write_report_section( + stream=stream, + lines_list=mismatch, + title="The following constraints have mismatched terms:", + end_line="Call display_problematic_constraint_terms(constraint) for more information.", + header="=", + footer="=", + ) + + def display_constraints_with_canceling_terms(self, stream=None): + """ + Display constraints in model which contain additive terms which potentially cancel each other. + + Note that this method looks a the current state of the constraint, and will flag terms as + cancelling if you have a form A == B + C where C is significantly smaller than A and B. In some + cases this behavior is intended, as C is a correction term which happens to be very + small at the current state. However, you should review these constraints to determine whether + the correction term is important for the situation you are modeling and consider removing the + term if it will never be significant. + + Args: + stream: I/O object to write report to (default = stdout) + + Returns: + None + + """ + if stream is None: + stream = sys.stdout + + _, canceling, _ = self._collect_constraint_mismatches() + + # Write the output + _write_report_section( + stream=stream, + lines_list=canceling, + title="The following constraints have canceling terms:", + end_line="Call display_problematic_constraint_terms(constraint) for more information.", + header="=", + footer="=", + ) + + def display_problematic_constraint_terms( + self, constraint, max_cancellations: int = 5, stream=None + ): + """ + Display a summary of potentially problematic terms in a given constraint. + + Note that this method looks a the current state of the constraint, and will flag terms as + cancelling if you have a form A == B + C where C is significantly smaller than A and B. In some + cases this behavior is intended, as C is a correction term which happens to be very + small at the current state. However, you should review these constraints to determine whether + the correction term is important for the situation you are modeling and consider removing the + term if it will never be significant. + + Args: + constraint: ConstraintData object to be examined + max_cancellations: maximum number of cancellations per node before termination. + None = find all cancellations. + stream: I/O object to write report to (default = stdout) + + Returns: + None + + """ + if stream is None: + stream = sys.stdout + + # Check that constraint is of correct type to give useful error message + if not isinstance(constraint, ConstraintData): + # Wrong type, check if it is an indexed constraint + if isinstance(constraint, Constraint): + raise TypeError( + f"{constraint.name} is an IndexedConstraint. Please provide " + f"an individual element of {constraint.name} (ConstraintData) " + "to be examined for problematic terms." + ) + else: + # Not a constraint + raise TypeError( + f"{constraint.name} is not an instance of a Pyomo Constraint." + ) + + walker = ConstraintTermAnalysisVisitor( + term_mismatch_tolerance=self.config.constraint_term_mismatch_tolerance, + term_cancellation_tolerance=self.config.constraint_term_cancellation_tolerance, + term_zero_tolerance=self.config.constraint_term_zero_tolerance, + max_cancellations_per_node=max_cancellations, + max_canceling_terms=self.config.max_canceling_terms, + ) + + _, expr_mismatch, expr_cancellation, _, tripped = walker.walk_expression( + constraint.expr + ) + + # Combine mismatches and cancellations into a summary list + issues = [] + for k, v in expr_mismatch.items(): + tag = " " + if isinstance(k, ExpressionData): + # For clarity, if the problem node is a named Expression, note this in output + tag = " Expression " + # Want to show full expression node plus largest and smallest magnitudes + issues.append( + f"Mismatch in{tag}{compact_expression_to_string(k)} (Max {v[0]}, Min {v[1]})" + ) + # Collect summary of cancelling terms for user + # Walker gives us back a list of nodes with cancelling terms + for k, v in expr_cancellation.items(): + # Each node may have multiple cancellations, these are given as separate tuples + tag = " " + if isinstance(k, ExpressionData): + # For clarity, if the problem node is a named Expression, note this in output + tag = " Expression " + for i in v: + # For each cancellation, iterate over contributing terms and write a summary + terms = "" + for j in i: + if len(terms) > 0: + terms += ", " + # +1 to switch from 0-index to 1-index + terms += f"{j[0]+1} ({j[1]})" + issues.append( + f"Cancellation in{tag}{compact_expression_to_string(k)}. Terms {terms}" + ) + + if tripped: + end_line = ( + f"Number of canceling terms per node limited to {max_cancellations}." + ) + else: + end_line = None + + # Write the output + _write_report_section( + stream=stream, + lines_list=issues, + title=f"The following terms in {constraint.name} are potentially problematic:", + end_line=end_line, + header="=", + footer="=", + ) + + def display_constraints_with_no_free_variables(self, stream=None): + """ + Display constraints in model which contain no free variables. + + Args: + stream: I/O object to write report to (default = stdout) + + Returns: + None + + """ + if stream is None: + stream = sys.stdout + + _, _, constant = self._collect_constraint_mismatches() + + # Write the output + _write_report_section( + stream=stream, + lines_list=constant, + title="The following constraints have no free variables:", + header="=", + footer="=", + ) + def _collect_structural_warnings( self, ignore_evaluation_errors=False, ignore_unit_consistency=False ): @@ -1335,6 +1608,28 @@ def _collect_numerical_cautions(self, jac=None, nlp=None): cstring = "Variable" cautions.append(f"Caution: {len(none_value)} {cstring} with None value") + # Constraints with possible ill-posed terms + mismatch, cancellation, constant = self._collect_constraint_mismatches() + if len(mismatch) > 0: + cstring = "Constraints" + if len(mismatch) == 1: + cstring = "Constraint" + cautions.append(f"Caution: {len(mismatch)} {cstring} with mismatched terms") + if len(cancellation) > 0: + cstring = "Constraints" + if len(cancellation) == 1: + cstring = "Constraint" + cautions.append( + f"Caution: {len(cancellation)} {cstring} with potential cancellation of terms" + ) + if len(constant) > 0: + cstring = "Constraints" + if len(constant) == 1: + cstring = "Constraint" + cautions.append( + f"Caution: {len(constant)} {cstring} with no free variables" + ) + # Extreme Jacobian rows and columns jac_col = extreme_jacobian_columns( jac=jac, @@ -2777,8 +3072,6 @@ def eq_degenerate(m_dh, v): # This variable does not appear in any constraint return Constraint.Skip - m_dh.pprint() - m_dh.degenerate = Constraint(m_dh.V, rule=eq_degenerate) # When y_pos = 1, nu >= m_small @@ -4122,3 +4415,457 @@ def _collect_model_statistics(model): ) return stats + + +class ConstraintTermAnalysisVisitor(EXPR.StreamBasedExpressionVisitor): + """ + Expression walker for checking Constraints for problematic terms. + + This walker will walk the expression and look for summation terms + with mismatched magnitudes or potential cancellations. + + Args: + term_mismatch_tolerance: tolerance to use when determining mismatched + terms + term_cancellation_tolerance: tolerance to use when identifying + possible cancellation of terms + term_zero_tolerance: tolerance for considering terms equal to zero + max_canceling_terms: maximum number of terms to consider when looking + for canceling combinations (None = consider all possible combinations) + max_cancellations_per_node: maximum number of cancellations to collect + for a single node. Collection will terminate when this many cancellations + have been identified (None = collect all cancellations) + + Returns: + list of values for top-level summation terms + list of terms with mismatched magnitudes + list of terms with potential cancellations + bool indicating whether expression is a constant + """ + + def __init__( + self, + term_mismatch_tolerance: float = 1e6, + term_cancellation_tolerance: float = 1e-4, + term_zero_tolerance: float = 1e-10, + max_canceling_terms: int = 4, + max_cancellations_per_node: int = 5, + ): + super().__init__() + + # Tolerance attributes + self._log_mm_tol = log10(term_mismatch_tolerance) + self._sum_tol = term_cancellation_tolerance + self._zero_tolerance = term_zero_tolerance + self._max_canceling_terms = max_canceling_terms + self._max_cancellations_per_node = max_cancellations_per_node + + # Placeholders for collecting results + self.canceling_terms = ComponentMap() + self.mismatched_terms = ComponentMap() + + # Flag for if cancellation collection hit limit + self._cancellation_tripped = False + + def _get_value_for_sum_subexpression(self, child_data): + # child_data is a tuple, with the 0-th element being the node values + if isinstance(child_data[0][0], str): + # Values may be a list containing a string in some cases (e.g. external functions) + # Return the string in this case + return child_data[0][0] + return sum(i for i in child_data[0]) + + def _generate_combinations(self, inputs, equality=False): + # We want to test all combinations of terms for cancellation + + # The number of combinations we check depends on whether this is an (in)equality + # expression or a sum node deeper in the expression tree. + # We expect (in)equalities to generally sum to 0 (0 == expr) thus we want to + # check if any subset of the sum terms sum to zero (i.e. are any terms unnecessary). + # For other sum nodes, we need to check for any combination of terms. + + # Maximum number of terms to include in combinations + max_comb = len(inputs) + if equality: + # Subtract 1 if (in)equality node + max_comb += -1 + # We also have a limit on the maximum number of terms to consider + if self._max_canceling_terms is not None: + max_comb = min(max_comb, self._max_canceling_terms) + + # Single terms cannot cancel, thus we want all combinations of length 2 to max terms + # Note the need for +1 due to way range works + combo_range = range(2, max_comb + 1) + + # Collect combinations of terms in an iterator + # In order to identify the terms in each cancellation, we will pair each value + # with its index in the input set as a tuple using enumerate + for i in chain.from_iterable( + combinations(enumerate(inputs), r) for r in combo_range + ): + # Yield each combination in the set + yield i + + def _check_sum_cancellations(self, values_list, equality=False): + # First, strip any terms with value 0 as they do not contribute to cancellation + # We do this to keep the number of possible combinations as small as possible + stripped = [i for i in values_list if abs(i) >= self._zero_tolerance] + + cancellations = [] + + if len(stripped) == 0: + # If the stripped list is empty, there are no non-zero terms + # We can stop here and return False as there are no possible cancellations + return cancellations + + # For scaling of tolerance, we want to compare to the largest absolute value of + # the input values + max_value = abs(max(stripped, key=abs)) + + for i in self._generate_combinations(stripped, equality): + # Generate combinations will return a list of combinations of input terms + # each element of the list will be a 2-tuple representing a term in the + # input list with the first value being the position in the input set and + # the second being the value + + # Check if the sum of values in the combination are below tolerance + if abs(sum(j[1] for j in i)) <= self._sum_tol * max_value: + # If so, record combination as canceling + cancellations.append(i) + + # Terminate loop if we have reached the max cancellations to collect + if len(cancellations) >= self._max_cancellations_per_node: + self._cancellation_tripped = True + break + + return cancellations + + def _perform_checks(self, node, child_data): + # Perform checks for problematic expressions + # First, need to check to see if any child data is a list + # This indicates a sum expression + const = True + + for d in child_data: + # We will check for canceling terms here, rather than the sum itself, to handle special cases + # We want to look for cases where a sum term results in a value much smaller + # than the terms of the sum + # Each element of child_data is a tuple where the 0-th element is the node values + if isinstance(d[0][0], str): + # Values may be a list containing a string in some cases (e.g. external functions) + # Skip if this is the case + pass + else: + for c in self._check_sum_cancellations(d[0]): + if node in self.canceling_terms.keys(): + self.canceling_terms[node].append(c) + else: + self.canceling_terms[node] = [c] + + # Expression is not constant if any child is not constant + # Element 1 is a bool indicating if the child node is constant + if not d[1]: + const = False + + # Return any problematic terms found + return const + + def _check_base_type(self, node): + if isinstance(node, VarData): + const = node.fixed + else: + const = True + return [value(node)], const, False + + def _check_equality_expression(self, node, child_data): + # (In)equality expressions are a special case of sum expressions + # child_data has two elements; 0 is the LHS of the (in)equality and 1 is the RHS + # Each of these then contains three elements; 0 is a list of values for the sum components, + # 1 is a bool indicating if the child node term is constant, and 2 is a bool indicating if + # the child ode is a sum expression. + + # First, to check for cancellation we need to negate one side of the expression + # mdata will contain the new set of child_data with negated values + mdata = [] + # child_data[0][0] has the values of the LHS of the (in)equality, and we will negate these + vals = [] + for j in child_data[0][0]: + vals.append(-j) + # Append the negated values along with whether the node is constant to mdata + mdata.append((vals, child_data[0][1])) + # child_data[1] is the RHS, so we take this as it appears and append to mdata + mdata.append(child_data[1]) + + # Next, call the method to check the sum expression + vals, const, _ = self._check_sum_expression(node, mdata) + + # Next, we need to check for canceling terms. + # child_data[x][2] indicates if a node is a sum expression or not + if not child_data[0][2] and not child_data[1][2]: + # If both sides are not sum expressions, we have the form a == b + # Simple lLinking constraints do not need to be checked + pass + # Next, we can ignore any term that has already been flagged as mismatched + elif node in self.mismatched_terms.keys(): + pass + # We can also ignore any case where one side of the (in)equality is constant + # I.e. if either child_node[x][1] is True + elif any(d[1] for d in mdata): + pass + else: + # Check for cancellation + # First, collect terms from both sides + # Note: outer loop comes first in list comprehension + t = [i for d in mdata for i in d[0]] + + # Then check for cancellations + for c in self._check_sum_cancellations(t, equality=True): + if node in self.canceling_terms.keys(): + self.canceling_terms[node].append(c) + else: + self.canceling_terms[node] = [c] + + return vals, const, False + + def _check_product_expr(self, node, child_data): + # We expect child_data to be a tuple of len 2 (a*b) + # If both or neither a and b are sums (xor), handle like other expressions + if not child_data[0][2] ^ child_data[1][2]: + return self._check_general_expr(node, child_data) + else: + # Here we have the case of a sum and a multiplier + # For this case, we will make the result look like a sum with the + # multiplier applied + # This is important for scaling of the sum terms, as cancellation should be + # Checked on the scaled value + + # First, check if both terms are constants - if so we can just get the value of + # this node and move on. + if child_data[0][1] and child_data[1][1]: + return self._check_general_expr(node, child_data) + + # The length of the values (child_data[i][0]) of the multiplier will be 1 + # We can just iterate over all terms in both value lists and multiply + # each pair + vals = [i * j for i in child_data[0][0] for j in child_data[1][0]] + + # Return the list of values, not constant, is a sum expression (apparent) + return vals, False, True + + def _check_div_expr(self, node, child_data): + # We expect child_data to be a tuple of len 2 (a/b) + # If the numerator is not a sum, handle like general expression + # child_data[0] is numerator, child_data[1] is denominator + if not child_data[0][2]: + return self._check_general_expr(node, child_data) + else: + # If the numerator is a sum, we will treat this as if the division + # were applied to each term in the numerator separately + # This is important for scaling of the sum terms, as cancellation should be + # Checked on the scaled value + + # First, check if the numerator is constant - if so we can just get the value of + # this node and move on. + if child_data[0][1]: + return self._check_general_expr(node, child_data) + + # Next, we need to get the value of the denominator + denom = self._get_value_for_sum_subexpression(child_data[1]) + + try: + vals = [i / denom for i in child_data[0][0]] + except ZeroDivisionError: + raise ZeroDivisionError( + f"Error in ConstraintTermAnalysisVisitor: found division with denominator of 0 " + f"({str(node)})." + ) + + # Return the list of values, not constant, is a sum expression (apparent) + return vals, False, True + + def _check_negation_expr(self, node, child_data): + # Here we need to defer checking of cancellations too due to different ways + # these can appear in an expression. + # We will simply negate all values on the child node values (child_data[0][0]) + # and pass on the rest. + vals = [-i for i in child_data[0][0]] + + # Return the list of values, not constant, is a sum expression (apparent) + return vals, child_data[0][1], child_data[0][2] + + def _check_general_expr(self, node, child_data): + const = self._perform_checks(node, child_data) + + try: + # pylint: disable-next=protected-access + val = node._apply_operation( + list(map(self._get_value_for_sum_subexpression, child_data)) + ) + except ValueError: + raise ValueError( + f"Error in ConstraintTermAnalysisVisitor: error evaluating {str(node)}." + ) + except ZeroDivisionError: + raise ZeroDivisionError( + f"Error in ConstraintTermAnalysisVisitor: found division with denominator of 0 " + f"({str(node)})." + ) + except Exception as err: + # Catch and re-raise any other error that occurs + _log.exception(f"Unexpected {err=}, {type(err)=}") + raise + + return [val], const, False + + def _check_other_expression(self, node, child_data): + const = self._perform_checks(node, child_data) + + # First, need to get value of input terms, which may be sub-expressions + input_mag = [] + for i in child_data: + input_mag.append(self._get_value_for_sum_subexpression(i)) + + # Next, create a copy of the function with expected magnitudes as inputs + newfunc = node.create_node_with_local_data(input_mag) + + # Evaluate new function and return the value along with check results + return [value(newfunc)], const, False + + def _check_ranged_expression(self, node, child_data): + # child_data should have 3 elements, LHS, middle term, and RHS + lhs_vals, lhs_const, _ = self._check_equality_expression(node, child_data[:2]) + rhs_vals, rhs_const, _ = self._check_equality_expression(node, child_data[1:]) + + # Constant is a bit vague in terms ranged expressions. + # We will call the term constant only if all parts are constant + const = lhs_const and rhs_const + + # For values, we need to avoid double counting the middle term + # Also for sign convention, we will negate the outer terms + vals = lhs_vals + [-rhs_vals[1]] + + return vals, const, False + + def _check_sum_expression(self, node, child_data): + # Sum expressions need special handling + # For sums, collect all child values into a list + vals = [] + # We will check for cancellation in this node at the next level + # Pyomo is generally good at simplifying compound sums + const = True + # Collect data from child nodes + for d in child_data: + vals.append(self._get_value_for_sum_subexpression(d)) + + # Expression is not constant if any child is not constant + # Element 1 is a bool indicating if node is constant + if not d[1]: + const = False + + # Check for mismatched terms + if len(vals) > 1: + absvals = [abs(v) for v in vals if abs(v) >= self._zero_tolerance] + if len(absvals) > 0: + vl = max(absvals) + vs = min(absvals) + if vl != vs: + if vs == 0: + diff = log10(vl) + else: + diff = log10(vl / vs) + + if diff >= self._log_mm_tol: + self.mismatched_terms[node] = (vl, vs) + + return vals, const, True + + node_type_method_map = { + EXPR.EqualityExpression: _check_equality_expression, + EXPR.InequalityExpression: _check_equality_expression, + EXPR.RangedExpression: _check_ranged_expression, + EXPR.SumExpression: _check_sum_expression, + EXPR.NPV_SumExpression: _check_sum_expression, + EXPR.ProductExpression: _check_product_expr, + EXPR.MonomialTermExpression: _check_product_expr, + EXPR.NPV_ProductExpression: _check_product_expr, + EXPR.DivisionExpression: _check_div_expr, + EXPR.NPV_DivisionExpression: _check_div_expr, + EXPR.PowExpression: _check_general_expr, + EXPR.NPV_PowExpression: _check_general_expr, + EXPR.NegationExpression: _check_negation_expr, + EXPR.NPV_NegationExpression: _check_negation_expr, + EXPR.AbsExpression: _check_general_expr, + EXPR.NPV_AbsExpression: _check_general_expr, + EXPR.UnaryFunctionExpression: _check_general_expr, + EXPR.NPV_UnaryFunctionExpression: _check_general_expr, + EXPR.Expr_ifExpression: _check_other_expression, + EXPR.ExternalFunctionExpression: _check_other_expression, + EXPR.NPV_ExternalFunctionExpression: _check_other_expression, + EXPR.LinearExpression: _check_sum_expression, + } + + def exitNode(self, node, data): + """ + Method to call when exiting node to check for potential issues. + """ + # Return [node values], constant (bool), sum expression (bool) + # first check if the node is a leaf + nodetype = type(node) + + if nodetype in native_types: + return [node], True, False + + node_func = self.node_type_method_map.get(nodetype, None) + if node_func is not None: + return node_func(self, node, data) + + if not node.is_expression_type(): + # this is a leaf, but not a native type + if nodetype is _PyomoUnit: + # Unit have no value, so return 1 as a placeholder + return [1], True, False + + # Var or Param + return self._check_base_type(node) + # might want to add other common types here + + # not a leaf - check if it is a named expression + if ( + hasattr(node, "is_named_expression_type") + and node.is_named_expression_type() + ): + return self._check_other_expression(node, data) + + raise TypeError( + f"An unhandled expression node type: {str(nodetype)} was encountered while " + f"analyzing constraint terms {str(node)}" + ) + + def walk_expression(self, expr): + """ + Main method to call to walk an expression and return analysis. + + Args: + expr - expression to be analyzed + + Returns: + list of values of top-level additive terms + ComponentSet containing any mismatched terms + ComponentSet containing any canceling terms + Bool indicating whether expression is a constant + """ + # Create new holders for collected terms + self.canceling_terms = ComponentMap() + self.mismatched_terms = ComponentMap() + + # Call parent walk_expression method + vals, const, _ = super().walk_expression(expr) + + # Return results + return ( + vals, + self.mismatched_terms, + self.canceling_terms, + const, + self._cancellation_tripped, + ) diff --git a/idaes/core/util/tests/test_misc.py b/idaes/core/util/tests/test_misc.py index 594b790e12..363fcd8df0 100644 --- a/idaes/core/util/tests/test_misc.py +++ b/idaes/core/util/tests/test_misc.py @@ -14,16 +14,19 @@ This module contains miscellaneous utility functions for use in IDAES models. """ +from io import StringIO import pytest import re -from pyomo.environ import ConcreteModel, Set, Block, Var, units +from pyomo.environ import ConcreteModel, Constraint, Expression, Set, Block, Var, units from pyomo.common.config import ConfigBlock from pyomo.core.base.units_container import UnitsError from idaes.core.util.misc import ( add_object_reference, set_param_from_config, + compact_expression_to_string, + print_compact_form, ) import idaes.logger as idaeslog @@ -361,3 +364,54 @@ def test_unitted_default(self, caplog): "b no units provided for parameter test_param - assuming " "default units" in caplog.text ) + + +class TestToExprStringVisitor: + @pytest.mark.unit + def test_compact_expression_to_string(self): + m = ConcreteModel() + m.v1 = Var() + m.v2 = Var() + m.v3 = Var() + + m.e1 = Expression(expr=m.v1 + m.v2) + m.c1 = Constraint(expr=0 == m.v3 * m.e1) + + str = compact_expression_to_string(m.c1.expr) + expected = "v3*e1 == 0" + + assert str == expected + + @pytest.mark.unit + def test_print_compact_form_expr(self): + m = ConcreteModel() + m.v1 = Var() + m.v2 = Var() + m.v3 = Var() + + m.e1 = Expression(expr=m.v1 + m.v2) + m.c1 = Constraint(expr=0 == m.v3 * m.e1) + + expected = "v3*e1 == 0" + + stream = StringIO() + print_compact_form(m.c1.expr, stream=stream) + + assert stream.getvalue() == expected + + @pytest.mark.unit + def test_print_compact_form_component(self): + m = ConcreteModel() + m.v1 = Var() + m.v2 = Var() + m.v3 = Var() + + m.e1 = Expression(expr=m.v1 + m.v2) + m.c1 = Constraint(expr=0 == m.v3 * m.e1) + + expected = "v3*e1 == 0" + + stream = StringIO() + print_compact_form(m.c1, stream=stream) + + assert stream.getvalue() == expected diff --git a/idaes/core/util/tests/test_model_diagnostics.py b/idaes/core/util/tests/test_model_diagnostics.py index ec5b671e34..e10f8a7c57 100644 --- a/idaes/core/util/tests/test_model_diagnostics.py +++ b/idaes/core/util/tests/test_model_diagnostics.py @@ -13,52 +13,67 @@ """ This module contains model diagnostic utility functions for use in IDAES (Pyomo) models. """ +from copy import deepcopy from io import StringIO import math -import numpy as np -import pytest -import re import os -from copy import deepcopy +import re +from unittest import TestCase +import numpy as np from pandas import DataFrame +import pytest from pyomo.environ import ( Block, ConcreteModel, Constraint, Expression, - log, - tan, - asin, - acos, - sqrt, + Expr_if, Objective, PositiveIntegers, Set, SolverFactory, - Suffix, - TransformationFactory, units, value, Var, assert_optimal_termination, Param, Integers, + exp, + log, + log10, + sin, + cos, + tan, + asin, + acos, + atan, + sqrt, + sinh, + cosh, + tanh, + asinh, + acosh, + atanh, ) from pyomo.common.collections import ComponentSet from pyomo.contrib.pynumero.asl import AmplInterface from pyomo.contrib.pynumero.interfaces.pyomo_nlp import PyomoNLP from pyomo.common.fileutils import this_file_dir from pyomo.common.tempfiles import TempfileManager +from pyomo.core import expr as EXPR import idaes.core.util.scaling as iscale import idaes.logger as idaeslog from idaes.core.solvers import get_solver from idaes.core import FlowsheetBlock from idaes.core.util.testing import PhysicalParameterTestBlock -from unittest import TestCase - +from idaes.models.properties.modular_properties.eos.ceos_common import ( + cubic_roots_available, + CubicThermoExpressions, + CubicType as CubicEoS, +) from idaes.core.util.model_diagnostics import ( DiagnosticsToolbox, SVDToolbox, @@ -81,6 +96,7 @@ _collect_model_statistics, check_parallel_jacobian, compute_ill_conditioning_certificate, + ConstraintTermAnalysisVisitor, ) from idaes.core.util.parameter_sweep import ( SequentialSweepRunner, @@ -90,6 +106,7 @@ UniformSampling, ) from idaes.core.util.testing import _enable_scip_solver_for_testing +from idaes.models.properties import iapws95 __author__ = "Alex Dowling, Douglas Allan, Andrew Lee" @@ -999,6 +1016,232 @@ def test_display_near_parallel_variables(self): v1, v4 v2, v4 +==================================================================================== +""" + + assert stream.getvalue() == expected + + @pytest.mark.component + def test_collect_constraint_mismatches(self): + m = ConcreteModel() + m.v1 = Var(initialize=2) + m.v2 = Var(initialize=3) + + # Constraint with no free variables + m.c1 = Constraint(expr=m.v1 == m.v2) + m.v1.fix() + m.v2.fix() + + # Constraint with mismatched terms + m.v3 = Var(initialize=10) + m.v4 = Var(initialize=10) + m.v5 = Var(initialize=1e-6) + m.c2 = Constraint(expr=m.v3 == m.v4 + m.v5) + + # Constraint with cancellation + m.v6 = Var(initialize=10) + m.c3 = Constraint(expr=m.v6 == 10 + m.v3 - m.v4) + + dt = DiagnosticsToolbox(model=m) + + mismatch, cancellation, constant = dt._collect_constraint_mismatches() + + assert mismatch == ["c2: 1 mismatched term(s)"] + assert cancellation == [ + "c2: 1 potential canceling term(s)", + "c3: 1 potential canceling term(s)", + ] + assert constant == ["c1"] + + @pytest.mark.component + def test_display_problematic_constraint_terms(self): + m = ConcreteModel() + + # Constraint with mismatched terms + m.v3 = Var(initialize=10) + m.v4 = Var(initialize=10) + m.v5 = Var(initialize=1e-6) + m.c2 = Constraint(expr=m.v3 == m.v4 + m.v5) + + dt = DiagnosticsToolbox(model=m) + + stream = StringIO() + dt.display_problematic_constraint_terms(m.c2, stream=stream) + + expected = """==================================================================================== +The following terms in c2 are potentially problematic: + + Mismatch in v4 + v5 (Max 10, Min 1e-06) + Cancellation in v3 == v4 + v5. Terms 1 (-10), 2 (10) + +==================================================================================== +""" + + assert stream.getvalue() == expected + + @pytest.mark.component + def test_display_problematic_constraint_terms_limited(self): + m = ConcreteModel() + m.v1 = Var(initialize=10) + m.v2 = Var(initialize=-10) + m.v4 = Var(initialize=10) + + # Constraint with cancellation + m.v6 = Var(initialize=10) + m.c1 = Constraint(expr=m.v6 == 10 - m.v4 + m.v1 + m.v2) + + dt = DiagnosticsToolbox(model=m) + + stream = StringIO() + dt.display_problematic_constraint_terms(m.c1, stream=stream) + + expected = """==================================================================================== +The following terms in c1 are potentially problematic: + + Cancellation in v6 == 10 - v4 + v1 + v2. Terms 1 (-10), 2 (10) + Cancellation in v6 == 10 - v4 + v1 + v2. Terms 1 (-10), 4 (10) + Cancellation in v6 == 10 - v4 + v1 + v2. Terms 2 (10), 3 (-10) + Cancellation in v6 == 10 - v4 + v1 + v2. Terms 2 (10), 5 (-10) + Cancellation in v6 == 10 - v4 + v1 + v2. Terms 3 (-10), 4 (10) + +Number of canceling terms per node limited to 5. +==================================================================================== +""" + + assert stream.getvalue() == expected + + @pytest.mark.component + def test_display_problematic_constraint_terms_indexed_error(self): + m = ConcreteModel() + m.s = Set(initialize=[1, 2]) + m.v1 = Var(m.s, initialize=2) + m.v2 = Var(m.s, initialize=3) + + def _c_rule(b, i): + return b.v1[i] == b.v2[i] + + m.c1 = Constraint(m.s, rule=_c_rule) + + dt = DiagnosticsToolbox(model=m) + + # Check for indexed constraints + with pytest.raises( + TypeError, + match=re.escape( + "c1 is an IndexedConstraint. Please provide " + "an individual element of c1 (ConstraintData) " + "to be examined for problematic terms." + ), + ): + dt.display_problematic_constraint_terms(m.c1) + + # Check for not a constraints + with pytest.raises( + TypeError, match="v1 is not an instance of a Pyomo Constraint." + ): + dt.display_problematic_constraint_terms(m.v1) + + @pytest.mark.component + def test_display_problematic_constraint_terms_named_expression(self): + m = ConcreteModel() + + # Constraint with mismatched terms + m.v1 = Var(initialize=10) + m.v2 = Var(initialize=10) + m.v3 = Var(initialize=1e-6) + + m.e1 = Expression(expr=(m.v1 - m.v2)) + + m.c2 = Constraint(expr=m.v3 == m.e1**2) + + dt = DiagnosticsToolbox(model=m) + + stream = StringIO() + dt.display_problematic_constraint_terms(m.c2, stream=stream) + + expected = """==================================================================================== +The following terms in c2 are potentially problematic: + + Cancellation in Expression e1. Terms 1 (10), 2 (-10) + +==================================================================================== +""" + + assert stream.getvalue() == expected + + @pytest.mark.component + def test_display_constraints_with_mismatched_terms(self): + m = ConcreteModel() + # Constraint with mismatched terms + m.v3 = Var(initialize=10) + m.v4 = Var(initialize=10) + m.v5 = Var(initialize=1e-6) + m.c2 = Constraint(expr=m.v3 == m.v4 + m.v5) + + dt = DiagnosticsToolbox(model=m) + + stream = StringIO() + dt.display_constraints_with_mismatched_terms(stream=stream) + + expected = """==================================================================================== +The following constraints have mismatched terms: + + c2: 1 mismatched term(s) + +Call display_problematic_constraint_terms(constraint) for more information. +==================================================================================== +""" + + assert stream.getvalue() == expected + + @pytest.mark.component + def test_display_constraints_with_canceling_terms(self): + m = ConcreteModel() + # Constraint with mismatched terms + m.v3 = Var(initialize=10) + m.v4 = Var(initialize=10) + + # Constraint with cancellation + m.v6 = Var(initialize=10) + m.c3 = Constraint(expr=m.v6 == 10 + m.v3 - m.v4) + + dt = DiagnosticsToolbox(model=m) + + stream = StringIO() + dt.display_constraints_with_canceling_terms(stream=stream) + + expected = """==================================================================================== +The following constraints have canceling terms: + + c3: 1 potential canceling term(s) + +Call display_problematic_constraint_terms(constraint) for more information. +==================================================================================== +""" + + assert stream.getvalue() == expected + + @pytest.mark.component + def test_display_constraints_with_no_free_variables(self): + m = ConcreteModel() + m.v1 = Var(initialize=2) + m.v2 = Var(initialize=3) + + # Constraint with no free variables + m.c1 = Constraint(expr=m.v1 == m.v2) + m.v1.fix() + m.v2.fix() + + dt = DiagnosticsToolbox(model=m) + + stream = StringIO() + dt.display_constraints_with_no_free_variables(stream=stream) + + expected = """==================================================================================== +The following constraints have no free variables: + + c1 + ==================================================================================== """ @@ -1151,6 +1394,7 @@ def test_collect_numerical_cautions(self, model): dt = DiagnosticsToolbox(model=model.b) cautions = dt._collect_numerical_cautions() + assert len(cautions) == 5 assert ( "Caution: 2 Variables with value close to their bounds (abs=1.0E-04, rel=1.0E-04)" @@ -1440,6 +1684,58 @@ def test_report_numerical_issues(self, model): compute_infeasibility_explanation() display_variables_at_or_outside_bounds() +==================================================================================== +""" + + assert stream.getvalue() == expected + + @pytest.mark.component + def test_report_numerical_issues_cancellation(self): + model = ConcreteModel() + + model.v1 = Var(initialize=1) + model.v2 = Var(initialize=2) + model.v3 = Var(initialize=1e-8) + + # Non-problematic constraints + model.c1 = Constraint(expr=2 * model.v1 == model.v2) + + # Problematic constraints + model.c10 = Constraint(expr=0 == exp(model.v1 - 0.5 * model.v2)) + model.c11 = Constraint(expr=0 == model.v1 - 0.5 * model.v2 + model.v3) + + dt = DiagnosticsToolbox(model=model) + + stream = StringIO() + dt.report_numerical_issues(stream) + + expected = """==================================================================================== +Model Statistics + + Jacobian Condition Number: Undefined (Exactly Singular) + +------------------------------------------------------------------------------------ +3 WARNINGS + + WARNING: 1 Constraint with large residuals (>1.0E-05) + WARNING: 1 pair of constraints are parallel (to tolerance 1.0E-08) + WARNING: 1 pair of variables are parallel (to tolerance 1.0E-08) + +------------------------------------------------------------------------------------ +3 Cautions + + Caution: 1 Variable with value close to zero (tol=1.0E-08) + Caution: 1 Constraint with mismatched terms + Caution: 1 Constraint with potential cancellation of terms + +------------------------------------------------------------------------------------ +Suggested next steps: + + display_constraints_with_large_residuals() + compute_infeasibility_explanation() + display_near_parallel_constraints() + display_near_parallel_variables() + ==================================================================================== """ @@ -2267,7 +2563,6 @@ def test_build_model(self, model): ca = IpoptConvergenceAnalysis(model) clone = ca._build_model() - clone.pprint() assert clone is not model assert isinstance(clone.v1, Var) @@ -4053,3 +4348,1057 @@ def test_output(self, model): Constraints / bounds in guards for stability: """ assert expected in stream.getvalue() + + +class TestConstraintTermAnalysisVisitor: + @pytest.mark.unit + def test_sum_combinations(self): + # Check method to generate sums of all combinations of terms + # excludes single term sums + terms = [1, 2, 3, 4, 5] + visitor = ConstraintTermAnalysisVisitor(max_canceling_terms=None) + sums = [i for i in visitor._generate_combinations(terms)] + + print(sums) + + expected = [ + ((0, 1), (1, 2)), + ((0, 1), (2, 3)), + ((0, 1), (3, 4)), + ((0, 1), (4, 5)), + ((1, 2), (2, 3)), + ((1, 2), (3, 4)), + ((1, 2), (4, 5)), + ((2, 3), (3, 4)), + ((2, 3), (4, 5)), + ((3, 4), (4, 5)), + ((0, 1), (1, 2), (2, 3)), + ((0, 1), (1, 2), (3, 4)), + ((0, 1), (1, 2), (4, 5)), + ((0, 1), (2, 3), (3, 4)), + ((0, 1), (2, 3), (4, 5)), + ((0, 1), (3, 4), (4, 5)), + ((1, 2), (2, 3), (3, 4)), + ((1, 2), (2, 3), (4, 5)), + ((1, 2), (3, 4), (4, 5)), + ((2, 3), (3, 4), (4, 5)), + ((0, 1), (1, 2), (2, 3), (3, 4)), + ((0, 1), (1, 2), (2, 3), (4, 5)), + ((0, 1), (1, 2), (3, 4), (4, 5)), + ((0, 1), (2, 3), (3, 4), (4, 5)), + ((1, 2), (2, 3), (3, 4), (4, 5)), + ((0, 1), (1, 2), (2, 3), (3, 4), (4, 5)), + ] + + assert sums == expected + + @pytest.mark.unit + def test_sum_combinations_limited_default(self): + # Check method to generate sums of all combinations of terms + # excludes single term sums + terms = [1, 2, 3, 4, 5] + visitor = ConstraintTermAnalysisVisitor() + sums = [i for i in visitor._generate_combinations(terms)] + + expected = expected = [ + ((0, 1), (1, 2)), + ((0, 1), (2, 3)), + ((0, 1), (3, 4)), + ((0, 1), (4, 5)), + ((1, 2), (2, 3)), + ((1, 2), (3, 4)), + ((1, 2), (4, 5)), + ((2, 3), (3, 4)), + ((2, 3), (4, 5)), + ((3, 4), (4, 5)), + ((0, 1), (1, 2), (2, 3)), + ((0, 1), (1, 2), (3, 4)), + ((0, 1), (1, 2), (4, 5)), + ((0, 1), (2, 3), (3, 4)), + ((0, 1), (2, 3), (4, 5)), + ((0, 1), (3, 4), (4, 5)), + ((1, 2), (2, 3), (3, 4)), + ((1, 2), (2, 3), (4, 5)), + ((1, 2), (3, 4), (4, 5)), + ((2, 3), (3, 4), (4, 5)), + ((0, 1), (1, 2), (2, 3), (3, 4)), + ((0, 1), (1, 2), (2, 3), (4, 5)), + ((0, 1), (1, 2), (3, 4), (4, 5)), + ((0, 1), (2, 3), (3, 4), (4, 5)), + ((1, 2), (2, 3), (3, 4), (4, 5)), + # 5 term combination should be excluded + ] + + assert sums == expected + + @pytest.mark.unit + def test_check_sum_cancellations(self): + terms = [1, -2, 3, -4, 5] + visitor = ConstraintTermAnalysisVisitor() + cancellations = visitor._check_sum_cancellations(terms) + + # We expect to canceling combinations + # Results should be a list with each entry being a tuple containing a + # canceling combination + # In turn, each element of the tuple should be a 2-tuple with the + # position of the term in the input list and its value + expected = [((0, 1), (2, 3), (3, -4)), ((0, 1), (1, -2), (3, -4), (4, 5))] + + assert cancellations == expected + + @pytest.mark.unit + def test_int(self): + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=7) + + assert vv == [7] + assert len(mm) == 0 + assert len(cc) == 0 + assert k + assert not tr + + @pytest.mark.unit + def test_float(self): + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=7.7) + + assert vv == [7.7] + assert len(mm) == 0 + assert len(cc) == 0 + assert k + assert not tr + + @pytest.mark.unit + def test_scalar_param(self): + m = ConcreteModel() + m.scalar_param = Param(initialize=1, mutable=True) + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression( + expr=m.scalar_param + ) + + assert vv == [1] + assert len(mm) == 0 + assert len(cc) == 0 + assert k + assert not tr + + @pytest.mark.unit + def test_indexed_param(self): + m = ConcreteModel() + m.indexed_param = Param(["a", "b"], initialize=1, mutable=True) + + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression( + expr=m.indexed_param["a"] + ) + assert vv == [1] + assert len(mm) == 0 + assert len(cc) == 0 + assert k + assert not tr + + @pytest.mark.unit + def test_scalar_var(self): + m = ConcreteModel() + m.scalar_var = Var(initialize=1) + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression( + expr=m.scalar_var + ) + + assert vv == [1] + assert len(mm) == 0 + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_indexed_var(self): + m = ConcreteModel() + m.indexed_var = Var(["a", "b"], initialize=1) + + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression( + expr=m.indexed_var["a"] + ) + assert vv == [1] + assert len(mm) == 0 + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_scalar_var_fixed(self): + m = ConcreteModel() + m.scalar_var = Var(initialize=1) + m.scalar_var.fix() + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression( + expr=m.scalar_var + ) + + assert vv == [1] + assert len(mm) == 0 + assert len(cc) == 0 + assert k + assert not tr + + @pytest.mark.unit + def test_indexed_var_fixed(self): + m = ConcreteModel() + m.indexed_var = Var(["a", "b"], initialize=1) + m.indexed_var.fix() + + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression( + expr=m.indexed_var["a"] + ) + assert vv == [1] + assert len(mm) == 0 + assert len(cc) == 0 + assert k + assert not tr + + @pytest.mark.unit + def test_equality_expr(self): + m = ConcreteModel() + m.v1 = Var(initialize=1e-7) + m.v2 = Var(initialize=1e7) + + expr = m.v1 == m.v2 + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [-1e-7, 1e7] + assert expr in mm + assert len(mm) == 1 + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_equality_expr_constant(self): + m = ConcreteModel() + m.v1 = Var(initialize=1e-7) + m.v2 = Var(initialize=1e7) + + # Fix v1, not constant yet as v2 still free + m.v1.fix() + + expr = m.v1 == m.v2 + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [-1e-7, 1e7] + assert expr in mm + assert len(mm) == 1 + assert len(cc) == 0 + assert not k + assert not tr + + # Fix v2, now constant + m.v2.fix() + + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [-1e-7, 1e7] + assert expr in mm + assert len(mm) == 1 + assert len(cc) == 0 + assert k + assert not tr + + @pytest.mark.unit + def test_sum_expr(self): + m = ConcreteModel() + m.v1 = Var(["a", "b", "c"], initialize=1e7) + m.v1["a"].set_value(1e-7) + + expr = sum(m.v1[i] for i in m.v1) + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [1e-7, 1e7, 1e7] + assert expr in mm + assert len(mm) == 1 + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_sum_expr_constant(self): + m = ConcreteModel() + m.v1 = Var(["a", "b", "c"], initialize=1e7) + m.v1["a"].set_value(1e-7) + m.v1.fix() + + expr = sum(m.v1[i] for i in m.v1) + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [1e-7, 1e7, 1e7] + assert expr in mm + assert len(mm) == 1 + assert len(cc) == 0 + assert k + assert not tr + + @pytest.fixture(scope="class") + def model(self): + m = ConcreteModel() + m.v1 = Var(initialize=2) + m.v2 = Var(initialize=3) + m.v3 = Var(initialize=5.0000001) + m.v4 = Var(initialize=5) + + return m + + @pytest.mark.unit + def test_product_expr(self, model): + m = ConcreteModel() + expr = model.v1 * model.v2 * model.v3 + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [pytest.approx(30.0000006, rel=1e-8)] + assert len(mm) == 0 + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_product_sum_expr(self, model): + expr = (model.v1 + model.v2) * (model.v3 + model.v4) + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [pytest.approx((2 + 3) * (5.0000001 + 5), rel=1e-8)] + assert len(mm) == 0 + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_product_sum_expr_w_negation(self, model): + expr = (model.v1 + model.v2) * (model.v3 - model.v4) + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [pytest.approx(0.0000005, rel=1e-8)] + assert len(mm) == 0 + assert expr in cc + assert len(cc) == 1 + assert not k + assert not tr + + @pytest.mark.unit + def test_division_expr(self, model): + expr = model.v1 / model.v2 / model.v3 + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [pytest.approx(2 / 3 / 5.0000001, rel=1e-8)] + assert len(mm) == 0 + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_division_sum_expr(self, model): + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression( + expr=(model.v1 + model.v2) / (model.v3 + model.v4) + ) + + assert vv == [ + pytest.approx(2 / (5.0000001 + 5), rel=1e-8), + pytest.approx(3 / (5.0000001 + 5), rel=1e-8), + ] + assert len(mm) == 0 + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_division_sum_expr_w_negation(self, model): + expr = (model.v1 - model.v2) / (model.v3 - model.v4) + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [ + pytest.approx(2 / 0.0000001, rel=1e-8), + pytest.approx(-3 / 0.0000001, rel=1e-8), + ] + assert len(mm) == 0 + # Check for cancellation should be deferred + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_division_sum_expr_w_zero_denominator(self): + m = ConcreteModel() + m.v1 = Var(initialize=2) + m.v2 = Var(initialize=3) + m.v3 = Var(initialize=5) + m.v4 = Var(initialize=5) + + expr = (m.v1 - m.v2) / (m.v3 - m.v4) + with pytest.raises( + ZeroDivisionError, + match=re.escape( + "Error in ConstraintTermAnalysisVisitor: found division with denominator of 0 " + "((v1 - v2)/(v3 - v4))." + ), + ): + ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + @pytest.mark.unit + def test_division_sum_expr_w_negation_deferred(self): + m = ConcreteModel() + m.v1 = Var(initialize=2) + m.v2 = Var(initialize=2) + m.v3 = Var(initialize=5.0000001) + m.v4 = Var(initialize=5) + + expr = ((m.v1 - m.v2) / (m.v3 - m.v4)) ** 2 + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [pytest.approx(0, abs=1e-8)] + assert len(mm) == 0 + assert expr in cc + assert len(cc) == 1 + assert not k + assert not tr + + @pytest.mark.unit + def test_division_expr_error(self): + m = ConcreteModel() + m.v1 = Var(initialize=2) + m.v2 = Var(initialize=0) + + with pytest.raises( + ZeroDivisionError, + match=re.escape( + "Error in ConstraintTermAnalysisVisitor: found division with " + "denominator of 0 (v1/v2)." + ), + ): + ConstraintTermAnalysisVisitor().walk_expression(expr=m.v1 / m.v2) + + @pytest.mark.unit + def test_pow_expr(self, model): + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression( + expr=model.v1**model.v2 + ) + + assert vv == [8] + assert len(mm) == 0 + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_pow_sum_expr(self, model): + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression( + expr=(model.v1 + model.v2) ** (model.v3 + model.v4) + ) + + assert vv == [pytest.approx(5**10.0000001, rel=1e-8)] + assert len(mm) == 0 + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_pow_sum_expr_w_negation(self, model): + expr = (model.v1 + model.v2) ** (model.v3 - model.v4) + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [pytest.approx((2 + 3) ** (0.0000001), rel=1e-8)] + assert len(mm) == 0 + assert expr in cc + assert len(cc) == 1 + assert not k + assert not tr + + @pytest.fixture(scope="class") + def func_model(self): + m = ConcreteModel() + m.v1 = Var(initialize=1) + m.v2 = Var(initialize=0.99999) + m.v3 = Var(initialize=-100) + + m.v2.fix() + + return m + + @pytest.mark.unit + def test_negation_expr(self, func_model): + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression( + expr=-func_model.v1 + ) + + assert vv == [-1] + assert len(mm) == 0 + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_negation_sum_expr(self, func_model): + expr = -(func_model.v1 - func_model.v2) + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + # Checking the cancellation should be deferred + # Expect to get two values back + assert vv == [pytest.approx(-1, rel=1e-8), pytest.approx(0.99999, rel=1e-8)] + assert len(mm) == 0 + # Check is deferred, so no cancellations identified + assert len(cc) == 0 + assert not k + assert not tr + + # acosh has bounds that don't fit with other functions - we will assume we caught enough + func_list = [ + exp, + log, + log10, + sqrt, + sin, + cos, + tan, + asin, + acos, + atan, + sinh, + cosh, + tanh, + asinh, + atanh, + ] + func_error_list = [log, log10, sqrt, asin, acos, acosh, atanh] + + @pytest.mark.unit + @pytest.mark.parametrize("func", func_list) + def test_func_expr(self, func_model, func): + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression( + expr=func(func_model.v2) + ) + + assert vv == [pytest.approx(value(func(0.99999)), rel=1e-8)] + assert len(mm) == 0 + assert len(cc) == 0 + assert k + assert not tr + + @pytest.mark.unit + @pytest.mark.parametrize("func", func_list) + def test_func_sum_expr(self, func_model, func): + expr = func(func_model.v1 - func_model.v2) + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [pytest.approx(value(func(0.00001)), rel=1e-8)] + assert len(mm) == 0 + assert expr in cc + assert len(cc) == 1 + assert not k + assert not tr + + @pytest.mark.unit + @pytest.mark.parametrize("func", func_list) + def test_func_sum_expr_w_negation(self, func_model, func): + expr = func(func_model.v1 - func_model.v2) + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [pytest.approx(value(func(0.00001)), rel=1e-8)] + assert len(mm) == 0 + assert expr in cc + assert len(cc) == 1 + assert not k + assert not tr + + @pytest.mark.unit + @pytest.mark.parametrize("func", func_error_list) + def test_func_expr_error(self, func_model, func): + with pytest.raises( + ValueError, + match=re.escape( + "Error in ConstraintTermAnalysisVisitor: error evaluating " + ), + ): + ConstraintTermAnalysisVisitor().walk_expression(expr=func(func_model.v3)) + + @pytest.mark.unit + def test_expr_if(self, model): + model.exprif = Expr_if( + IF=model.v1, + THEN=model.v1 + model.v2, + ELSE=model.v3 - model.v4, + ) + + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression( + expr=model.exprif + ) + + assert vv == [pytest.approx(5, rel=1e-8)] + assert len(mm) == 0 + assert model.exprif in cc + assert len(cc) == 1 + assert not k + assert not tr + + @pytest.mark.unit + @pytest.mark.skipif( + not AmplInterface.available(), reason="pynumero_ASL is not available" + ) + @pytest.mark.skipif(not cubic_roots_available, reason="Cubic roots not available") + def test_ext_func(self): + # Use the cubic root external function to test + m = ConcreteModel() + m.a = Var(initialize=1) + m.b = Var(initialize=1) + + m.expr_write = CubicThermoExpressions(m) + Z = m.expr_write.z_liq(eos=CubicEoS.PR, A=m.a, B=m.b) + + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=Z) + + assert vv == [pytest.approx(-2.1149075414767577, rel=1e-8)] + assert len(mm) == 0 + assert Z in cc + assert len(cc) == 1 + assert not k + assert not tr + + # Check that model state did not change + assert value(m.a) == 1 + assert value(m.b) == 1 + assert value(Z) == pytest.approx(-2.1149075414767577, rel=1e-8) + + @pytest.mark.unit + def test_equality_sum_expr(self): + m = ConcreteModel() + m.v1 = Var(["a", "b", "c"], initialize=1e7) + m.v2 = Var(initialize=1e-7) + + expr = m.v2 == sum(m.v1[i] for i in m.v1) + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [-1e-7, 3e7] + assert expr in mm + assert len(mm) == 1 + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_inequality_sum_expr(self): + m = ConcreteModel() + m.v1 = Var(["a", "b", "c"], initialize=1e7) + m.v2 = Var(initialize=1e-7) + + expr = m.v2 <= sum(m.v1[i] for i in m.v1) + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [-1e-7, 3e7] + assert expr in mm + assert len(mm) == 1 + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_compound_equality_expr_1(self): + m = ConcreteModel() + m.v1 = Var(["a", "b", "c"], initialize=1e7) + m.v2 = Var(initialize=1e-7) + + expr = 6 * m.v2 == 8 * sum(m.v1[i] for i in m.v1) + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [-6e-7, 2.4e8] + assert expr in mm + assert len(mm) == 1 + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_ranged_expr(self): + m = ConcreteModel() + m.v1 = Var(initialize=1e7) + m.v2 = Var(initialize=1e-7) + m.v3 = Var(initialize=1e7) + + m.expr = EXPR.RangedExpression(args=(m.v1, m.v2, m.v3), strict=True) + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=m.expr) + + assert vv == [-1e7, 1e-7, -1e7] + assert m.expr in mm + assert len(mm) == 1 + assert len(cc) == 0 + assert not k + assert not tr + + # Fix v1 and v2 to make first two terms constant + m.v1.fix() + m.v2.fix() + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=m.expr) + + # Should not be flagged as constant due to v3 + assert vv == [-1e7, 1e-7, -1e7] + assert m.expr in mm + assert len(mm) == 1 + assert len(cc) == 0 + assert not k + assert not tr + + # Fix v3 to make all terms constant + m.v3.fix() + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=m.expr) + + # Should now be constant + assert vv == [-1e7, 1e-7, -1e7] + assert m.expr in mm + assert len(mm) == 1 + assert len(cc) == 0 + assert k + assert not tr + + @pytest.mark.unit + def test_compound_equality_expr_2(self): + m = ConcreteModel() + m.v1 = Var(["a", "b", "c"], initialize=1e7) + m.v2 = Var(initialize=1e-7) + m.v3 = Var(initialize=1e3) + + # Set this small so we get two mismatched warnings + m.v1["a"].set_value(1e-7) + + expr1 = sum(m.v1[i] for i in m.v1) + expr = 6 * m.v2 == 8 * expr1 + m.v3 + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [-6e-7, pytest.approx(8 * (2e7 + 1e-7) + 1000, rel=1e-8)] + assert expr in mm + assert expr1 in mm + assert len(mm) == 2 + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_canceling_sum_expr(self): + m = ConcreteModel() + m.v1 = Var(initialize=2) + m.v2 = Var(initialize=2) + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression( + expr=m.v1 - m.v2 + ) + + assert vv == [2, -2] + assert len(mm) == 0 + # We do not check cancellation at the sum, so this should be empty + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_expr_w_canceling_sum(self): + m = ConcreteModel() + m.v1 = Var(initialize=2) + m.v2 = Var(initialize=2) + m.v3 = Var(initialize=3) + + expr = m.v3 * (m.v1 - m.v2) + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [6, -6] + assert len(mm) == 0 + # Check for cancellation should be deferred + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_expr_w_deferred_canceling_sum(self): + m = ConcreteModel() + m.v1 = Var(initialize=2) + m.v2 = Var(initialize=2) + m.v3 = Var(initialize=3) + + expr = (m.v3 * (m.v1 - m.v2)) ** 2 + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [0] + assert len(mm) == 0 + # We should get a warning about canceling sums here + assert expr in cc + assert len(cc) == 1 + assert not k + assert not tr + + # Check for tolerance of sum cancellation + m.v2.set_value(2.00022) + + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor( + term_cancellation_tolerance=1e-4 + ).walk_expression(expr=expr) + + assert vv == [pytest.approx((3 * -0.00022) ** 2, rel=1e-8)] + assert len(mm) == 0 + # This should pass as the difference is greater than tol + assert len(cc) == 0 + assert not k + assert not tr + + # Change tolerance so it should identify cancellation + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor( + term_cancellation_tolerance=1e-3 + ).walk_expression(expr=expr) + + assert vv == [pytest.approx((3 * -0.00022) ** 2, rel=1e-8)] + assert len(mm) == 0 + assert expr in cc + assert len(cc) == 1 + assert not k + assert not tr + + @pytest.mark.unit + def test_canceling_equality_expr_safe(self): + m = ConcreteModel() + m.v1 = Var(initialize=1e7) + m.v2 = Var(initialize=1e7) + + # This is a standard constraint form, so should have no issues despite cancellation + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression( + expr=0 == m.v1 - m.v2 + ) + + assert vv == [0, pytest.approx(0, abs=1e-12)] + assert len(mm) == 0 + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_canceling_equality_expr_zero_term(self): + m = ConcreteModel() + m.v1 = Var(initialize=1e7) + m.v2 = Var(initialize=1e7) + m.v3 = Var(initialize=0) + + expr = m.v3 == m.v1 - m.v2 + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [0, pytest.approx(0, abs=1e-12)] + assert len(mm) == 0 + # No canceling terms as v3=0 is ignored thus we have a=b form + assert len(cc) == 0 + assert not k + assert not tr + + # Set v3 above zero tolerance + m.v3.set_value(1e-4) + + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + assert vv == [-1e-4, pytest.approx(0, abs=1e-12)] + assert len(mm) == 0 + assert expr in cc + assert len(cc) == 1 + assert not k + assert not tr + + @pytest.mark.unit + def test_canceling_equality_expr_compound(self): + m = ConcreteModel() + m.v1 = Var(["a", "b"], initialize=5e6) + m.v2 = Var(initialize=1e7) + m.v3 = Var(initialize=0) + + expr = m.v3 == sum(v for v in m.v1.values()) - m.v2 + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [0, pytest.approx(0, abs=1e-12)] + assert len(mm) == 0 + # No canceling terms as v3=0 is ignored thus we have a=b form + assert len(cc) == 0 + assert not k + assert not tr + + # Set v3 above zero tolerance + m.v3.set_value(1e-4) + + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + assert vv == [-1e-4, pytest.approx(0, abs=1e-12)] + assert len(mm) == 0 + assert expr in cc + assert len(cc) == 1 + assert not k + assert not tr + + # Double check for a+eps=c form gets flagged in some way + @pytest.mark.unit + def test_canceling_equality_expr_canceling_sides(self): + m = ConcreteModel() + m.v1 = Var(initialize=1) + m.v2 = Var(initialize=1) + m.v3 = Var(initialize=1e-8) + + expr1 = m.v1 + m.v3 + expr = m.v2 == expr1 + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [-1, pytest.approx(1 + 1e-8, abs=1e-8)] + assert expr1 in mm + assert len(mm) == 1 + assert expr in cc + assert len(cc) == 1 + assert not k + assert not tr + + # Check to make sure simple linking constraints are not flagged as canceling + @pytest.mark.unit + def test_linking_equality_expr(self): + m = ConcreteModel() + m.v1 = Var(initialize=1) + m.v2 = Var(initialize=1) + + expr = m.v1 == m.v2 + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [-1, 1] + assert len(mm) == 0 + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_linking_equality_expr_compound(self): + m = ConcreteModel() + m.v1 = Var(initialize=1) + m.v2 = Var(initialize=1) + m.v3 = Var(initialize=1) + + expr = m.v1 == m.v2 * m.v3 + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [-1, 1] + assert len(mm) == 0 + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.component + def test_external_function_w_string_argument(self): + m = ConcreteModel() + m.properties = iapws95.Iapws95ParameterBlock() + m.state = m.properties.build_state_block([0]) + + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression( + expr=m.state[0].enth_mol + ) + + assert vv == [pytest.approx(1.1021387e-2, rel=1e-6)] + assert len(mm) == 0 + assert len(cc) == 0 + assert not k + assert not tr + + # Test nested external functions + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression( + expr=m.state[0].temperature + ) + + assert vv == [pytest.approx(270.4877, rel=1e-6)] + assert len(mm) == 0 + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_zero_tolerance(self): + m = ConcreteModel() + m.v1 = Var(initialize=1e-12) + m.v2 = Var(initialize=1) + m.v3 = Var(initialize=1e-12) + + expr1 = m.v1 - m.v3 + expr2 = expr1 + 1 + expr = m.v2 == expr2 + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [-1, pytest.approx(1, abs=1e-8)] + # We expect no mismatches, as smallest terms are below zero tolerance + assert len(mm) == 0 + # We expect no canceling terms, as v1 and v3 are ignored (zero tolerance), leaving + # 1 == 1 + assert len(cc) == 0 + assert not k + assert not tr + + # Set v1 and v3 above zero tolerance + m.v1.set_value(1e-8) + m.v3.set_value(1e-8) + + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [-1, pytest.approx(1, abs=1e-8)] + assert expr2 in mm + assert len(mm) == 1 + assert expr in cc + assert len(cc) == 1 + assert not k + assert not tr + + # Test to make sure scaled constraints are not flagged as issues + @pytest.mark.unit + def test_scaled_equality_expr_product(self): + m = ConcreteModel() + m.v1 = Var(initialize=1) + m.v2 = Var(initialize=1) + m.v3 = Var(initialize=2) + + expr = 0 == m.v3 * (m.v1 - m.v2) + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [0, 0] + assert len(mm) == 0 + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_scaled_equality_expr_division(self): + m = ConcreteModel() + m.v1 = Var(initialize=1) + m.v2 = Var(initialize=1) + m.v3 = Var(initialize=2) + + expr = 0 == (m.v1 - m.v2) / m.v3 + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [0, 0] + assert len(mm) == 0 + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_mole_fraction_constraint(self): + m = ConcreteModel() + m.mole_frac_a = Var(initialize=0.5) + m.flow_a = Var(initialize=100) + m.flow_b = Var(initialize=100) + + expr = m.mole_frac_a * (m.flow_a + m.flow_b) == m.flow_a + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [pytest.approx(-100, rel=1e-5), pytest.approx(100, rel=1e-5)] + assert len(mm) == 0 + assert len(cc) == 0 + assert not k + assert not tr + + @pytest.mark.unit + def test_mole_fraction_constraint_trace(self): + m = ConcreteModel() + m.mole_frac_a = Var(initialize=0.999810) + m.flow_a = Var(initialize=122.746) + m.flow_b = Var(initialize=0.0233239) + + expr = m.mole_frac_a * (m.flow_a + m.flow_b) == m.flow_a + vv, mm, cc, k, tr = ConstraintTermAnalysisVisitor().walk_expression(expr=expr) + + assert vv == [ + pytest.approx(-122.746, rel=1e-5), + pytest.approx(122.746, rel=1e-5), + ] + assert len(mm) == 0 + assert len(cc) == 0 + assert not k + assert not tr diff --git a/setup.py b/setup.py index d5303b756a..19868f5d1d 100644 --- a/setup.py +++ b/setup.py @@ -82,7 +82,7 @@ def __getitem__(self, key): # Put abstract (non-versioned) deps here. # Concrete dependencies go in requirements[-dev].txt install_requires=[ - "pyomo >= 6.8.0", + "pyomo @ https://github.com/IDAES/pyomo/archive/6.8.0.idaes.2024.10.25.zip", "pint >= 0.24.1", # required to use Pyomo units. Pint 0.24.1 needed for Python 3.9 support "networkx", # required to use Pyomo network "numpy>=1,<3",