summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/CMakeLists.txt4
-rw-r--r--src/theory/arith/arith_lemma.h2
-rw-r--r--src/theory/arith/inference_id.cpp61
-rw-r--r--src/theory/arith/inference_id.h116
-rw-r--r--src/theory/arith/inference_manager.h2
-rw-r--r--src/theory/arith/nl/cad_solver.cpp6
-rw-r--r--src/theory/arith/nl/ext/factoring_check.cpp4
-rw-r--r--src/theory/arith/nl/ext/monomial_bounds_check.cpp4
-rw-r--r--src/theory/arith/nl/ext/monomial_check.cpp6
-rw-r--r--src/theory/arith/nl/ext/split_zero_check.cpp2
-rw-r--r--src/theory/arith/nl/ext/tangent_plane_check.cpp2
-rw-r--r--src/theory/arith/nl/iand_solver.cpp8
-rw-r--r--src/theory/arith/nl/icp/icp_solver.cpp4
-rw-r--r--src/theory/arith/nl/nonlinear_extension.cpp2
-rw-r--r--src/theory/arith/nl/stats.h2
-rw-r--r--src/theory/arith/nl/transcendental/exponential_solver.cpp14
-rw-r--r--src/theory/arith/nl/transcendental/sine_solver.cpp16
-rw-r--r--src/theory/arith/nl/transcendental/transcendental_state.cpp6
-rw-r--r--src/theory/bags/infer_info.cpp29
-rw-r--r--src/theory/bags/infer_info.h45
-rw-r--r--src/theory/bags/inference_generator.cpp22
-rw-r--r--src/theory/datatypes/infer_proof_cons.cpp22
-rw-r--r--src/theory/datatypes/infer_proof_cons.h2
-rw-r--r--src/theory/datatypes/inference.cpp29
-rw-r--r--src/theory/datatypes/inference.h52
-rw-r--r--src/theory/datatypes/inference_manager.cpp12
-rw-r--r--src/theory/datatypes/inference_manager.h18
-rw-r--r--src/theory/datatypes/theory_datatypes.cpp24
-rw-r--r--src/theory/inference_id.cpp150
-rw-r--r--src/theory/inference_id.h429
-rw-r--r--src/theory/strings/base_solver.cpp16
-rw-r--r--src/theory/strings/core_solver.cpp82
-rw-r--r--src/theory/strings/extf_solver.cpp18
-rw-r--r--src/theory/strings/infer_info.cpp81
-rw-r--r--src/theory/strings/infer_info.h313
-rw-r--r--src/theory/strings/infer_proof_cons.cpp164
-rw-r--r--src/theory/strings/infer_proof_cons.h2
-rw-r--r--src/theory/strings/inference_manager.cpp10
-rw-r--r--src/theory/strings/inference_manager.h8
-rw-r--r--src/theory/strings/regexp_solver.cpp18
-rw-r--r--src/theory/strings/sequences_stats.h4
-rw-r--r--src/theory/strings/solver_state.cpp2
-rw-r--r--src/theory/strings/theory_strings.cpp4
43 files changed, 848 insertions, 969 deletions
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index 7a7a90981..42370b8fc 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -333,8 +333,6 @@ libcvc4_add_sources(
theory/arith/fc_simplex.h
theory/arith/infer_bounds.cpp
theory/arith/infer_bounds.h
- theory/arith/inference_id.cpp
- theory/arith/inference_id.h
theory/arith/inference_manager.cpp
theory/arith/inference_manager.h
theory/arith/linear_equality.cpp
@@ -612,6 +610,8 @@ libcvc4_add_sources(
theory/fp/theory_fp_type_rules.h
theory/fp/type_enumerator.h
theory/interrupted.h
+ theory/inference_id.cpp
+ theory/inference_id.h
theory/inference_manager_buffered.cpp
theory/inference_manager_buffered.h
theory/logic_info.cpp
diff --git a/src/theory/arith/arith_lemma.h b/src/theory/arith/arith_lemma.h
index 1c90066fb..85e55b1e3 100644
--- a/src/theory/arith/arith_lemma.h
+++ b/src/theory/arith/arith_lemma.h
@@ -19,7 +19,7 @@
#include <vector>
#include "expr/node.h"
-#include "theory/arith/inference_id.h"
+#include "theory/inference_id.h"
#include "theory/inference_manager_buffered.h"
#include "theory/output_channel.h"
#include "theory/theory_inference.h"
diff --git a/src/theory/arith/inference_id.cpp b/src/theory/arith/inference_id.cpp
deleted file mode 100644
index 4bdbacbc7..000000000
--- a/src/theory/arith/inference_id.cpp
+++ /dev/null
@@ -1,61 +0,0 @@
-/********************* */
-/*! \file inference_id.cpp
- ** \verbatim
- ** Top contributors (to current version):
- ** Gereon Kremer, Yoni Zohar
- ** This file is part of the CVC4 project.
- ** Copyright (c) 2009-2020 by the authors listed in the file AUTHORS
- ** in the top-level source directory and their institutional affiliations.
- ** All rights reserved. See the file COPYING in the top-level source
- ** directory for licensing information.\endverbatim
- **
- ** \brief Implementation of inference enumeration.
- **/
-
-#include "theory/arith/inference_id.h"
-
-namespace CVC4 {
-namespace theory {
-namespace arith {
-
-const char* toString(InferenceId i)
-{
- switch (i)
- {
- case InferenceId::NL_CONGRUENCE: return "CONGRUENCE";
- case InferenceId::NL_SHARED_TERM_VALUE_SPLIT: return "SHARED_TERM_VALUE_SPLIT";
- case InferenceId::NL_SPLIT_ZERO: return "SPLIT_ZERO";
- case InferenceId::NL_SIGN: return "SIGN";
- case InferenceId::NL_COMPARISON: return "COMPARISON";
- case InferenceId::NL_INFER_BOUNDS: return "INFER_BOUNDS";
- case InferenceId::NL_INFER_BOUNDS_NT: return "INFER_BOUNDS_NT";
- case InferenceId::NL_FACTOR: return "FACTOR";
- case InferenceId::NL_RES_INFER_BOUNDS: return "RES_INFER_BOUNDS";
- case InferenceId::NL_TANGENT_PLANE: return "TANGENT_PLANE";
- case InferenceId::NL_T_PURIFY_ARG: return "T_PURIFY_ARG";
- case InferenceId::NL_T_INIT_REFINE: return "T_INIT_REFINE";
- case InferenceId::NL_T_PI_BOUND: return "T_PI_BOUND";
- case InferenceId::NL_T_MONOTONICITY: return "T_MONOTONICITY";
- case InferenceId::NL_T_SECANT: return "T_SECANT";
- case InferenceId::NL_T_TANGENT: return "T_TANGENT";
- case InferenceId::NL_IAND_INIT_REFINE: return "IAND_INIT_REFINE";
- case InferenceId::NL_IAND_VALUE_REFINE: return "IAND_VALUE_REFINE";
- case InferenceId::NL_IAND_SUM_REFINE: return "IAND_SUM_REFINE";
- case InferenceId::NL_IAND_BITWISE_REFINE: return "IAND_BITWISE_REFINE";
- case InferenceId::NL_CAD_CONFLICT: return "CAD_CONFLICT";
- case InferenceId::NL_CAD_EXCLUDED_INTERVAL: return "CAD_EXCLUDED_INTERVAL";
- case InferenceId::NL_ICP_CONFLICT: return "ICP_CONFLICT";
- case InferenceId::NL_ICP_PROPAGATION: return "ICP_PROPAGATION";
- default: return "?";
- }
-}
-
-std::ostream& operator<<(std::ostream& out, InferenceId i)
-{
- out << toString(i);
- return out;
-}
-
-} // namespace arith
-} // namespace theory
-} // namespace CVC4
diff --git a/src/theory/arith/inference_id.h b/src/theory/arith/inference_id.h
deleted file mode 100644
index 853965dc9..000000000
--- a/src/theory/arith/inference_id.h
+++ /dev/null
@@ -1,116 +0,0 @@
-/********************* */
-/*! \file inference_id.h
- ** \verbatim
- ** Top contributors (to current version):
- ** Andrew Reynolds, Gereon Kremer, Yoni Zohar
- ** This file is part of the CVC4 project.
- ** Copyright (c) 2009-2020 by the authors listed in the file AUTHORS
- ** in the top-level source directory and their institutional affiliations.
- ** All rights reserved. See the file COPYING in the top-level source
- ** directory for licensing information.\endverbatim
- **
- ** \brief Inference enumeration for arithmetic.
- **/
-
-#include "cvc4_private.h"
-
-#ifndef CVC4__THEORY__ARITH__INFERENCE_ID_H
-#define CVC4__THEORY__ARITH__INFERENCE_ID_H
-
-#include <map>
-#include <vector>
-
-#include "util/safe_print.h"
-
-namespace CVC4 {
-namespace theory {
-namespace arith {
-
-/**
- * Types of inferences used in the procedure
- */
-enum class InferenceId : uint32_t
-{
- //-------------------- core
- // simple congruence x=y => f(x)=f(y)
- NL_CONGRUENCE,
- // shared term value split (for naive theory combination)
- NL_SHARED_TERM_VALUE_SPLIT,
- //-------------------- incremental linearization solver
- // splitting on zero (NlSolver::checkSplitZero)
- NL_SPLIT_ZERO,
- // based on sign (NlSolver::checkMonomialSign)
- NL_SIGN,
- // based on comparing (abs) model values (NlSolver::checkMonomialMagnitude)
- NL_COMPARISON,
- // based on inferring bounds (NlSolver::checkMonomialInferBounds)
- NL_INFER_BOUNDS,
- // same as above, for inferences that introduce new terms
- NL_INFER_BOUNDS_NT,
- // factoring (NlSolver::checkFactoring)
- NL_FACTOR,
- // resolution bound inferences (NlSolver::checkMonomialInferResBounds)
- NL_RES_INFER_BOUNDS,
- // tangent planes (NlSolver::checkTangentPlanes)
- NL_TANGENT_PLANE,
- //-------------------- transcendental solver
- // purification of arguments to transcendental functions
- NL_T_PURIFY_ARG,
- // initial refinement (TranscendentalSolver::checkTranscendentalInitialRefine)
- NL_T_INIT_REFINE,
- // pi bounds
- NL_T_PI_BOUND,
- // monotonicity (TranscendentalSolver::checkTranscendentalMonotonic)
- NL_T_MONOTONICITY,
- // tangent refinement (TranscendentalSolver::checkTranscendentalTangentPlanes)
- NL_T_TANGENT,
- // secant refinement, the dual of the above inference
- NL_T_SECANT,
- //-------------------- iand solver
- // initial refinements (IAndSolver::checkInitialRefine)
- NL_IAND_INIT_REFINE,
- // value refinements (IAndSolver::checkFullRefine)
- NL_IAND_VALUE_REFINE,
- // sum refinements (IAndSolver::checkFullRefine)
- NL_IAND_SUM_REFINE,
- // bitwise refinements (IAndSolver::checkFullRefine)
- NL_IAND_BITWISE_REFINE,
- //-------------------- cad solver
- // conflict / infeasible subset obtained from cad
- NL_CAD_CONFLICT,
- // excludes an interval for a single variable
- NL_CAD_EXCLUDED_INTERVAL,
- //-------------------- icp solver
- // conflict obtained from icp
- NL_ICP_CONFLICT,
- // propagation / contraction of variable bounds from icp
- NL_ICP_PROPAGATION,
- //-------------------- unknown
- UNKNOWN,
-};
-
-/**
- * Converts an inference to a string. Note: This function is also used in
- * `safe_print()`. Changing this functions name or signature will result in
- * `safe_print()` printing "<unsupported>" instead of the proper strings for
- * the enum values.
- *
- * @param i The inference
- * @return The name of the inference
- */
-const char* toString(InferenceId i);
-
-/**
- * Writes an inference name to a stream.
- *
- * @param out The stream to write to
- * @param i The inference to write to the stream
- * @return The stream
- */
-std::ostream& operator<<(std::ostream& out, InferenceId i);
-
-} // namespace arith
-} // namespace theory
-} // namespace CVC4
-
-#endif /* CVC4__THEORY__ARITH__INFERENCE_H */
diff --git a/src/theory/arith/inference_manager.h b/src/theory/arith/inference_manager.h
index 6de65d677..ea3e1850a 100644
--- a/src/theory/arith/inference_manager.h
+++ b/src/theory/arith/inference_manager.h
@@ -22,7 +22,7 @@
#include "theory/arith/arith_lemma.h"
#include "theory/arith/arith_state.h"
-#include "theory/arith/inference_id.h"
+#include "theory/inference_id.h"
#include "theory/arith/nl/nl_lemma_utils.h"
#include "theory/inference_manager_buffered.h"
diff --git a/src/theory/arith/nl/cad_solver.cpp b/src/theory/arith/nl/cad_solver.cpp
index ea0739235..5418ea5bb 100644
--- a/src/theory/arith/nl/cad_solver.cpp
+++ b/src/theory/arith/nl/cad_solver.cpp
@@ -19,7 +19,7 @@
#endif
#include "options/arith_options.h"
-#include "theory/arith/inference_id.h"
+#include "theory/inference_id.h"
#include "theory/arith/nl/cad/cdcac.h"
#include "theory/arith/nl/poly_conversion.h"
#include "util/poly_util.h"
@@ -93,7 +93,7 @@ void CadSolver::checkFull()
n = n.negate();
}
d_im.addPendingArithLemma(NodeManager::currentNM()->mkOr(mis),
- InferenceId::NL_CAD_CONFLICT);
+ InferenceId::ARITH_NL_CAD_CONFLICT);
}
#else
Warning() << "Tried to use CadSolver but libpoly is not available. Compile "
@@ -140,7 +140,7 @@ void CadSolver::checkPartial()
Trace("nl-cad") << "Excluding " << first_var << " -> "
<< interval.d_interval << " using " << lemma
<< std::endl;
- d_im.addPendingArithLemma(lemma, InferenceId::NL_CAD_EXCLUDED_INTERVAL);
+ d_im.addPendingArithLemma(lemma, InferenceId::ARITH_NL_CAD_EXCLUDED_INTERVAL);
}
}
}
diff --git a/src/theory/arith/nl/ext/factoring_check.cpp b/src/theory/arith/nl/ext/factoring_check.cpp
index 4a567a0f5..20f89aa82 100644
--- a/src/theory/arith/nl/ext/factoring_check.cpp
+++ b/src/theory/arith/nl/ext/factoring_check.cpp
@@ -168,7 +168,7 @@ void FactoringCheck::check(const std::vector<Node>& asserts,
flem, PfRule::MACRO_SR_PRED_TRANSFORM, {split, k_eq}, {flem});
}
d_data->d_im.addPendingArithLemma(
- flem, InferenceId::NL_FACTOR, proof);
+ flem, InferenceId::ARITH_NL_FACTOR, proof);
}
}
}
@@ -186,7 +186,7 @@ Node FactoringCheck::getFactorSkolem(Node n, CDProof* proof)
Node k_eq = k.eqNode(n);
Trace("nl-ext-factor") << "...adding factor skolem " << k << " == " << n
<< std::endl;
- d_data->d_im.addPendingArithLemma(k_eq, InferenceId::NL_FACTOR, proof);
+ d_data->d_im.addPendingArithLemma(k_eq, InferenceId::ARITH_NL_FACTOR, proof);
d_factor_skolem[n] = k;
}
else
diff --git a/src/theory/arith/nl/ext/monomial_bounds_check.cpp b/src/theory/arith/nl/ext/monomial_bounds_check.cpp
index 9eb496de8..9ffaf53c1 100644
--- a/src/theory/arith/nl/ext/monomial_bounds_check.cpp
+++ b/src/theory/arith/nl/ext/monomial_bounds_check.cpp
@@ -320,7 +320,7 @@ void MonomialBoundsCheck::checkBounds(const std::vector<Node>& asserts,
// Trace("nl-ext-bound-lemma") << " intro new
// monomials = " << introNewTerms << std::endl;
d_data->d_im.addPendingArithLemma(
- iblem, InferenceId::NL_INFER_BOUNDS_NT, nullptr, introNewTerms);
+ iblem, InferenceId::ARITH_NL_INFER_BOUNDS_NT, nullptr, introNewTerms);
}
}
}
@@ -479,7 +479,7 @@ void MonomialBoundsCheck::checkResBounds()
Trace("nl-ext-rbound-lemma")
<< "Resolution bound lemma : " << rblem << std::endl;
d_data->d_im.addPendingArithLemma(
- rblem, InferenceId::NL_RES_INFER_BOUNDS);
+ rblem, InferenceId::ARITH_NL_RES_INFER_BOUNDS);
}
}
exp.pop_back();
diff --git a/src/theory/arith/nl/ext/monomial_check.cpp b/src/theory/arith/nl/ext/monomial_check.cpp
index 199ba1746..a007dd4a6 100644
--- a/src/theory/arith/nl/ext/monomial_check.cpp
+++ b/src/theory/arith/nl/ext/monomial_check.cpp
@@ -295,7 +295,7 @@ int MonomialCheck::compareSign(
{
Node lemma =
nm->mkAnd(exp).impNode(mkLit(oa, d_data->d_zero, status * 2));
- d_data->d_im.addPendingArithLemma(lemma, InferenceId::NL_SIGN);
+ d_data->d_im.addPendingArithLemma(lemma, InferenceId::ARITH_NL_SIGN);
}
return status;
}
@@ -321,7 +321,7 @@ int MonomialCheck::compareSign(
proof->addStep(conc, PfRule::MACRO_SR_PRED_INTRO, {prem}, {conc});
proof->addStep(lemma, PfRule::SCOPE, {conc}, {prem});
}
- d_data->d_im.addPendingArithLemma(lemma, InferenceId::NL_SIGN, proof);
+ d_data->d_im.addPendingArithLemma(lemma, InferenceId::ARITH_NL_SIGN, proof);
}
return 0;
}
@@ -410,7 +410,7 @@ bool MonomialCheck::compareMonomial(
Kind::IMPLIES, nm->mkAnd(exp), mkLit(oa, ob, status, true));
Trace("nl-ext-comp-lemma") << "comparison lemma : " << clem << std::endl;
lem.emplace_back(
- clem, LemmaProperty::NONE, nullptr, InferenceId::NL_COMPARISON);
+ clem, LemmaProperty::NONE, nullptr, InferenceId::ARITH_NL_COMPARISON);
cmp_infers[status][oa][ob] = clem;
}
return true;
diff --git a/src/theory/arith/nl/ext/split_zero_check.cpp b/src/theory/arith/nl/ext/split_zero_check.cpp
index d158281cc..2e3cb3cd1 100644
--- a/src/theory/arith/nl/ext/split_zero_check.cpp
+++ b/src/theory/arith/nl/ext/split_zero_check.cpp
@@ -45,7 +45,7 @@ void SplitZeroCheck::check()
proof->addStep(lem, PfRule::SPLIT, {}, {eq});
}
d_data->d_im.addPendingPhaseRequirement(eq, true);
- d_data->d_im.addPendingArithLemma(lem, InferenceId::NL_SPLIT_ZERO, proof);
+ d_data->d_im.addPendingArithLemma(lem, InferenceId::ARITH_NL_SPLIT_ZERO, proof);
}
}
}
diff --git a/src/theory/arith/nl/ext/tangent_plane_check.cpp b/src/theory/arith/nl/ext/tangent_plane_check.cpp
index 18c31368f..a4642b73a 100644
--- a/src/theory/arith/nl/ext/tangent_plane_check.cpp
+++ b/src/theory/arith/nl/ext/tangent_plane_check.cpp
@@ -147,7 +147,7 @@ void TangentPlaneCheck::check(bool asWaitingLemmas)
nm->mkConst(Rational(d == 0 ? -1 : 1))});
}
d_data->d_im.addPendingArithLemma(
- tlem, InferenceId::NL_TANGENT_PLANE, proof, asWaitingLemmas);
+ tlem, InferenceId::ARITH_NL_TANGENT_PLANE, proof, asWaitingLemmas);
}
}
}
diff --git a/src/theory/arith/nl/iand_solver.cpp b/src/theory/arith/nl/iand_solver.cpp
index 5415e6a86..ff51f7bb5 100644
--- a/src/theory/arith/nl/iand_solver.cpp
+++ b/src/theory/arith/nl/iand_solver.cpp
@@ -99,7 +99,7 @@ void IAndSolver::checkInitialRefine()
Node lem = conj.size() == 1 ? conj[0] : nm->mkNode(AND, conj);
Trace("iand-lemma") << "IAndSolver::Lemma: " << lem << " ; INIT_REFINE"
<< std::endl;
- d_im.addPendingArithLemma(lem, InferenceId::NL_IAND_INIT_REFINE);
+ d_im.addPendingArithLemma(lem, InferenceId::ARITH_NL_IAND_INIT_REFINE);
}
}
}
@@ -153,7 +153,7 @@ void IAndSolver::checkFullRefine()
// note that lemma can contain div/mod, and will be preprocessed in the
// prop engine
d_im.addPendingArithLemma(
- lem, InferenceId::NL_IAND_SUM_REFINE, nullptr, true);
+ lem, InferenceId::ARITH_NL_IAND_SUM_REFINE, nullptr, true);
}
else if (options::iandMode() == options::IandMode::BITWISE)
{
@@ -163,7 +163,7 @@ void IAndSolver::checkFullRefine()
// note that lemma can contain div/mod, and will be preprocessed in the
// prop engine
d_im.addPendingArithLemma(
- lem, InferenceId::NL_IAND_BITWISE_REFINE, nullptr, true);
+ lem, InferenceId::ARITH_NL_IAND_BITWISE_REFINE, nullptr, true);
}
else
{
@@ -173,7 +173,7 @@ void IAndSolver::checkFullRefine()
<< "IAndSolver::Lemma: " << lem << " ; VALUE_REFINE" << std::endl;
// value lemmas should not contain div/mod so we don't need to tag it with PREPROCESS
d_im.addPendingArithLemma(lem,
- InferenceId::NL_IAND_VALUE_REFINE,
+ InferenceId::ARITH_NL_IAND_VALUE_REFINE,
nullptr,
true,
LemmaProperty::NONE);
diff --git a/src/theory/arith/nl/icp/icp_solver.cpp b/src/theory/arith/nl/icp/icp_solver.cpp
index ec32656e0..af6093be1 100644
--- a/src/theory/arith/nl/icp/icp_solver.cpp
+++ b/src/theory/arith/nl/icp/icp_solver.cpp
@@ -349,7 +349,7 @@ void ICPSolver::check()
mis.emplace_back(n.negate());
}
d_im.addPendingArithLemma(NodeManager::currentNM()->mkOr(mis),
- InferenceId::NL_ICP_CONFLICT);
+ InferenceId::ARITH_NL_ICP_CONFLICT);
did_progress = true;
progress = false;
break;
@@ -360,7 +360,7 @@ void ICPSolver::check()
std::vector<Node> lemmas = generateLemmas();
for (const auto& l : lemmas)
{
- d_im.addPendingArithLemma(l, InferenceId::NL_ICP_PROPAGATION);
+ d_im.addPendingArithLemma(l, InferenceId::ARITH_NL_ICP_PROPAGATION);
}
}
}
diff --git a/src/theory/arith/nl/nonlinear_extension.cpp b/src/theory/arith/nl/nonlinear_extension.cpp
index 57b2803d9..d6a1e94a1 100644
--- a/src/theory/arith/nl/nonlinear_extension.cpp
+++ b/src/theory/arith/nl/nonlinear_extension.cpp
@@ -511,7 +511,7 @@ bool NonlinearExtension::modelBasedRefinement()
d_containing.getOutputChannel().requirePhase(literal, true);
Trace("nl-ext-debug") << "Split on : " << literal << std::endl;
Node split = literal.orNode(literal.negate());
- NlLemma nsplit(split, InferenceId::NL_SHARED_TERM_VALUE_SPLIT);
+ NlLemma nsplit(split, InferenceId::ARITH_NL_SHARED_TERM_VALUE_SPLIT);
d_im.addPendingArithLemma(nsplit, true);
}
if (d_im.hasWaitingLemma())
diff --git a/src/theory/arith/nl/stats.h b/src/theory/arith/nl/stats.h
index 6e78b7dd2..5d3dd845c 100644
--- a/src/theory/arith/nl/stats.h
+++ b/src/theory/arith/nl/stats.h
@@ -18,7 +18,7 @@
#define CVC4__THEORY__ARITH__NL__STATS_H
#include "expr/kind.h"
-#include "theory/arith/inference_id.h"
+#include "theory/inference_id.h"
#include "util/statistics_registry.h"
namespace CVC4 {
diff --git a/src/theory/arith/nl/transcendental/exponential_solver.cpp b/src/theory/arith/nl/transcendental/exponential_solver.cpp
index 2ad7d39a2..cc60d20fd 100644
--- a/src/theory/arith/nl/transcendental/exponential_solver.cpp
+++ b/src/theory/arith/nl/transcendental/exponential_solver.cpp
@@ -45,7 +45,7 @@ void ExponentialSolver::doPurification(TNode a, TNode new_a, TNode y)
// note we must do preprocess on this lemma
Trace("nl-ext-lemma") << "NonlinearExtension::Lemma : purify : " << lem
<< std::endl;
- NlLemma nlem(lem, LemmaProperty::NONE, nullptr, InferenceId::NL_T_PURIFY_ARG);
+ NlLemma nlem(lem, LemmaProperty::NONE, nullptr, InferenceId::ARITH_NL_T_PURIFY_ARG);
d_data->d_im.addPendingArithLemma(nlem);
}
@@ -72,7 +72,7 @@ void ExponentialSolver::checkInitialRefine()
// exp is always positive: exp(t) > 0
Node lem = nm->mkNode(Kind::GT, t, d_data->d_zero);
d_data->d_im.addPendingArithLemma(
- lem, InferenceId::NL_T_INIT_REFINE, nullptr);
+ lem, InferenceId::ARITH_NL_T_INIT_REFINE, nullptr);
}
{
// exp at zero: (t = 0) <=> (exp(t) = 1)
@@ -80,7 +80,7 @@ void ExponentialSolver::checkInitialRefine()
t[0].eqNode(d_data->d_zero),
t.eqNode(d_data->d_one));
d_data->d_im.addPendingArithLemma(
- lem, InferenceId::NL_T_INIT_REFINE, nullptr);
+ lem, InferenceId::ARITH_NL_T_INIT_REFINE, nullptr);
}
{
// exp on negative values: (t < 0) <=> (exp(t) < 1)
@@ -88,7 +88,7 @@ void ExponentialSolver::checkInitialRefine()
nm->mkNode(Kind::LT, t[0], d_data->d_zero),
nm->mkNode(Kind::LT, t, d_data->d_one));
d_data->d_im.addPendingArithLemma(
- lem, InferenceId::NL_T_INIT_REFINE, nullptr);
+ lem, InferenceId::ARITH_NL_T_INIT_REFINE, nullptr);
}
{
// exp on positive values: (t <= 0) or (exp(t) > t+1)
@@ -98,7 +98,7 @@ void ExponentialSolver::checkInitialRefine()
nm->mkNode(
Kind::GT, t, nm->mkNode(Kind::PLUS, t[0], d_data->d_one)));
d_data->d_im.addPendingArithLemma(
- lem, InferenceId::NL_T_INIT_REFINE, nullptr);
+ lem, InferenceId::ARITH_NL_T_INIT_REFINE, nullptr);
}
}
}
@@ -163,7 +163,7 @@ void ExponentialSolver::checkMonotonic()
Trace("nl-ext-exp") << "Monotonicity lemma : " << mono_lem << std::endl;
d_data->d_im.addPendingArithLemma(mono_lem,
- InferenceId::NL_T_MONOTONICITY);
+ InferenceId::ARITH_NL_T_MONOTONICITY);
}
// store the previous values
targ = sarg;
@@ -190,7 +190,7 @@ void ExponentialSolver::doTangentLemma(TNode e, TNode c, TNode poly_approx)
Trace("nl-ext-exp") << "*** Tangent plane lemma : " << lem << std::endl;
Assert(d_data->d_model.computeAbstractModelValue(lem) == d_data->d_false);
// Figure 3 : line 9
- d_data->d_im.addPendingArithLemma(lem, InferenceId::NL_T_TANGENT, nullptr, true);
+ d_data->d_im.addPendingArithLemma(lem, InferenceId::ARITH_NL_T_TANGENT, nullptr, true);
}
void ExponentialSolver::doSecantLemmas(TNode e,
diff --git a/src/theory/arith/nl/transcendental/sine_solver.cpp b/src/theory/arith/nl/transcendental/sine_solver.cpp
index 1e3e1753d..99bb093bb 100644
--- a/src/theory/arith/nl/transcendental/sine_solver.cpp
+++ b/src/theory/arith/nl/transcendental/sine_solver.cpp
@@ -79,7 +79,7 @@ void SineSolver::doPhaseShift(TNode a, TNode new_a, TNode y)
// note we must do preprocess on this lemma
Trace("nl-ext-lemma") << "NonlinearExtension::Lemma : purify : " << lem
<< std::endl;
- NlLemma nlem(lem, LemmaProperty::NONE, proof, InferenceId::NL_T_PURIFY_ARG);
+ NlLemma nlem(lem, LemmaProperty::NONE, proof, InferenceId::ARITH_NL_T_PURIFY_ARG);
d_data->d_im.addPendingArithLemma(nlem);
}
@@ -116,13 +116,13 @@ void SineSolver::checkInitialRefine()
nm->mkNode(Kind::LEQ, t, d_data->d_one),
nm->mkNode(Kind::GEQ, t, d_data->d_neg_one));
d_data->d_im.addPendingArithLemma(
- lem, InferenceId::NL_T_INIT_REFINE);
+ lem, InferenceId::ARITH_NL_T_INIT_REFINE);
}
{
// sine symmetry: sin(t) - sin(-t) = 0
Node lem = nm->mkNode(Kind::PLUS, t, symn).eqNode(d_data->d_zero);
d_data->d_im.addPendingArithLemma(
- lem, InferenceId::NL_T_INIT_REFINE);
+ lem, InferenceId::ARITH_NL_T_INIT_REFINE);
}
{
// sine zero tangent:
@@ -137,7 +137,7 @@ void SineSolver::checkInitialRefine()
nm->mkNode(Kind::LT, t[0], d_data->d_zero),
nm->mkNode(Kind::GT, t, t[0])));
d_data->d_im.addPendingArithLemma(
- lem, InferenceId::NL_T_INIT_REFINE);
+ lem, InferenceId::ARITH_NL_T_INIT_REFINE);
}
{
// sine pi tangent:
@@ -158,7 +158,7 @@ void SineSolver::checkInitialRefine()
t,
nm->mkNode(Kind::MINUS, d_data->d_pi, t[0]))));
d_data->d_im.addPendingArithLemma(
- lem, InferenceId::NL_T_INIT_REFINE);
+ lem, InferenceId::ARITH_NL_T_INIT_REFINE);
}
{
Node lem =
@@ -172,7 +172,7 @@ void SineSolver::checkInitialRefine()
nm->mkNode(Kind::GT, t[0], d_data->d_zero),
nm->mkNode(Kind::GT, t, d_data->d_zero)));
d_data->d_im.addPendingArithLemma(
- lem, InferenceId::NL_T_INIT_REFINE);
+ lem, InferenceId::ARITH_NL_T_INIT_REFINE);
}
}
}
@@ -312,7 +312,7 @@ void SineSolver::checkMonotonic()
<< "Monotonicity lemma : " << mono_lem << std::endl;
d_data->d_im.addPendingArithLemma(mono_lem,
- InferenceId::NL_T_MONOTONICITY);
+ InferenceId::ARITH_NL_T_MONOTONICITY);
}
}
// store the previous values
@@ -353,7 +353,7 @@ void SineSolver::doTangentLemma(TNode e, TNode c, TNode poly_approx, int region)
Assert(d_data->d_model.computeAbstractModelValue(lem) == d_data->d_false);
// Figure 3 : line 9
d_data->d_im.addPendingArithLemma(
- lem, InferenceId::NL_T_TANGENT, nullptr, true);
+ lem, InferenceId::ARITH_NL_T_TANGENT, nullptr, true);
}
void SineSolver::doSecantLemmas(TNode e,
diff --git a/src/theory/arith/nl/transcendental/transcendental_state.cpp b/src/theory/arith/nl/transcendental/transcendental_state.cpp
index b4f2e4cf6..032cf1411 100644
--- a/src/theory/arith/nl/transcendental/transcendental_state.cpp
+++ b/src/theory/arith/nl/transcendental/transcendental_state.cpp
@@ -179,7 +179,7 @@ void TranscendentalState::ensureCongruence(TNode a,
}
Node expn = exp.size() == 1 ? exp[0] : nm->mkNode(Kind::AND, exp);
Node cong_lemma = expn.impNode(a.eqNode(aa));
- d_im.addPendingArithLemma(cong_lemma, InferenceId::NL_CONGRUENCE);
+ d_im.addPendingArithLemma(cong_lemma, InferenceId::ARITH_NL_CONGRUENCE);
}
}
else
@@ -222,7 +222,7 @@ void TranscendentalState::getCurrentPiBounds()
proof->addStep(
pi_lem, PfRule::ARITH_TRANS_PI, {}, {d_pi_bound[0], d_pi_bound[1]});
}
- d_im.addPendingArithLemma(pi_lem, InferenceId::NL_T_PI_BOUND, proof);
+ d_im.addPendingArithLemma(pi_lem, InferenceId::ARITH_NL_T_PI_BOUND, proof);
}
std::pair<Node, Node> TranscendentalState::getClosestSecantPoints(TNode e,
@@ -319,7 +319,7 @@ NlLemma TranscendentalState::mkSecantLemma(TNode lower,
lem = Rewriter::rewrite(lem);
Trace("nl-trans-lemma") << "*** Secant plane lemma : " << lem << std::endl;
Assert(d_model.computeAbstractModelValue(lem) == d_false);
- return NlLemma(lem, LemmaProperty::NONE, nullptr, InferenceId::NL_T_SECANT);
+ return NlLemma(lem, LemmaProperty::NONE, nullptr, InferenceId::ARITH_NL_T_SECANT);
}
void TranscendentalState::doSecantLemmas(const std::pair<Node, Node>& bounds,
diff --git a/src/theory/bags/infer_info.cpp b/src/theory/bags/infer_info.cpp
index 9bf546af1..9b5187689 100644
--- a/src/theory/bags/infer_info.cpp
+++ b/src/theory/bags/infer_info.cpp
@@ -20,34 +20,7 @@ namespace CVC4 {
namespace theory {
namespace bags {
-const char* toString(Inference i)
-{
- switch (i)
- {
- case Inference::NONE: return "NONE";
- case Inference::BAG_NON_NEGATIVE_COUNT: return "BAG_NON_NEGATIVE_COUNT";
- case Inference::BAG_MK_BAG_SAME_ELEMENT: return "BAG_MK_BAG_SAME_ELEMENT";
- case Inference::BAG_MK_BAG: return "BAG_MK_BAG";
- case Inference::BAG_EQUALITY: return "BAG_EQUALITY";
- case Inference::BAG_DISEQUALITY: return "BAG_DISEQUALITY";
- case Inference::BAG_EMPTY: return "BAG_EMPTY";
- case Inference::BAG_UNION_DISJOINT: return "BAG_UNION_DISJOINT";
- case Inference::BAG_UNION_MAX: return "BAG_UNION_MAX";
- case Inference::BAG_INTERSECTION_MIN: return "BAG_INTERSECTION_MIN";
- case Inference::BAG_DIFFERENCE_SUBTRACT: return "BAG_DIFFERENCE_SUBTRACT";
- case Inference::BAG_DIFFERENCE_REMOVE: return "BAG_DIFFERENCE_REMOVE";
- case Inference::BAG_DUPLICATE_REMOVAL: return "BAG_DUPLICATE_REMOVAL";
- default: return "?";
- }
-}
-
-std::ostream& operator<<(std::ostream& out, Inference i)
-{
- out << toString(i);
- return out;
-}
-
-InferInfo::InferInfo() : d_id(Inference::NONE) {}
+InferInfo::InferInfo() : d_id(InferenceId::UNKNOWN) {}
bool InferInfo::process(TheoryInferenceManager* im, bool asLemma)
{
diff --git a/src/theory/bags/infer_info.h b/src/theory/bags/infer_info.h
index ecfc354d1..8628d19f6 100644
--- a/src/theory/bags/infer_info.h
+++ b/src/theory/bags/infer_info.h
@@ -21,52 +21,13 @@
#include <vector>
#include "expr/node.h"
+#include "theory/inference_id.h"
#include "theory/theory_inference.h"
namespace CVC4 {
namespace theory {
namespace bags {
-/**
- * Types of inferences used in the procedure
- */
-enum class Inference : uint32_t
-{
- NONE,
- BAG_NON_NEGATIVE_COUNT,
- BAG_MK_BAG_SAME_ELEMENT,
- BAG_MK_BAG,
- BAG_EQUALITY,
- BAG_DISEQUALITY,
- BAG_EMPTY,
- BAG_UNION_DISJOINT,
- BAG_UNION_MAX,
- BAG_INTERSECTION_MIN,
- BAG_DIFFERENCE_SUBTRACT,
- BAG_DIFFERENCE_REMOVE,
- BAG_DUPLICATE_REMOVAL
-};
-
-/**
- * Converts an inference to a string. Note: This function is also used in
- * `safe_print()`. Changing this functions name or signature will result in
- * `safe_print()` printing "<unsupported>" instead of the proper strings for
- * the enum values.
- *
- * @param i The inference
- * @return The name of the inference
- */
-const char* toString(Inference i);
-
-/**
- * Writes an inference name to a stream.
- *
- * @param out The stream to write to
- * @param i The inference to write to the stream
- * @return The stream
- */
-std::ostream& operator<<(std::ostream& out, Inference i);
-
class InferenceManager;
/**
@@ -82,8 +43,8 @@ class InferInfo : public TheoryInference
/** Process this inference */
bool process(TheoryInferenceManager* im, bool asLemma) override;
/** The inference identifier */
- Inference d_id;
- /** The conclusions */
+ InferenceId d_id;
+ /** The conclusion */
Node d_conclusion;
/**
* The premise(s) of the inference, interpreted conjunctively. These are
diff --git a/src/theory/bags/inference_generator.cpp b/src/theory/bags/inference_generator.cpp
index 708c25f34..6d8b6a33b 100644
--- a/src/theory/bags/inference_generator.cpp
+++ b/src/theory/bags/inference_generator.cpp
@@ -38,7 +38,7 @@ InferInfo InferenceGenerator::nonNegativeCount(Node n, Node e)
Assert(e.getType() == n.getType().getBagElementType());
InferInfo inferInfo;
- inferInfo.d_id = Inference::BAG_NON_NEGATIVE_COUNT;
+ inferInfo.d_id = InferenceId::BAG_NON_NEGATIVE_COUNT;
Node count = d_nm->mkNode(kind::BAG_COUNT, e, n);
Node gte = d_nm->mkNode(kind::GEQ, count, d_zero);
@@ -58,7 +58,7 @@ InferInfo InferenceGenerator::mkBag(Node n, Node e)
{
// TODO issue #78: refactor this with BagRewriter
// (=> true (= (bag.count e (bag e c)) c))
- inferInfo.d_id = Inference::BAG_MK_BAG_SAME_ELEMENT;
+ inferInfo.d_id = InferenceId::BAG_MK_BAG_SAME_ELEMENT;
inferInfo.d_conclusion = count.eqNode(n[1]);
}
else
@@ -66,7 +66,7 @@ InferInfo InferenceGenerator::mkBag(Node n, Node e)
// (=>
// true
// (= (bag.count e (bag x c)) (ite (= e x) c 0)))
- inferInfo.d_id = Inference::BAG_MK_BAG;
+ inferInfo.d_id = InferenceId::BAG_MK_BAG;
Node same = d_nm->mkNode(kind::EQUAL, n[0], e);
Node ite = d_nm->mkNode(kind::ITE, same, n[1], d_zero);
Node equal = count.eqNode(ite);
@@ -88,7 +88,7 @@ InferInfo InferenceGenerator::bagDisequality(Node n)
Node B = n[1];
InferInfo inferInfo;
- inferInfo.d_id = Inference::BAG_DISEQUALITY;
+ inferInfo.d_id = InferenceId::BAG_DISEQUALITY;
TypeNode elementType = A.getType().getBagElementType();
BoundVarManager* bvm = d_nm->getBoundVarManager();
@@ -123,7 +123,7 @@ InferInfo InferenceGenerator::empty(Node n, Node e)
InferInfo inferInfo;
Node skolem = getSkolem(n, inferInfo);
- inferInfo.d_id = Inference::BAG_EMPTY;
+ inferInfo.d_id = InferenceId::BAG_EMPTY;
Node count = getMultiplicityTerm(e, skolem);
Node equal = count.eqNode(d_zero);
@@ -139,7 +139,7 @@ InferInfo InferenceGenerator::unionDisjoint(Node n, Node e)
Node A = n[0];
Node B = n[1];
InferInfo inferInfo;
- inferInfo.d_id = Inference::BAG_UNION_DISJOINT;
+ inferInfo.d_id = InferenceId::BAG_UNION_DISJOINT;
Node countA = getMultiplicityTerm(e, A);
Node countB = getMultiplicityTerm(e, B);
@@ -162,7 +162,7 @@ InferInfo InferenceGenerator::unionMax(Node n, Node e)
Node A = n[0];
Node B = n[1];
InferInfo inferInfo;
- inferInfo.d_id = Inference::BAG_UNION_MAX;
+ inferInfo.d_id = InferenceId::BAG_UNION_MAX;
Node countA = getMultiplicityTerm(e, A);
Node countB = getMultiplicityTerm(e, B);
@@ -186,7 +186,7 @@ InferInfo InferenceGenerator::intersection(Node n, Node e)
Node A = n[0];
Node B = n[1];
InferInfo inferInfo;
- inferInfo.d_id = Inference::BAG_INTERSECTION_MIN;
+ inferInfo.d_id = InferenceId::BAG_INTERSECTION_MIN;
Node countA = getMultiplicityTerm(e, A);
Node countB = getMultiplicityTerm(e, B);
@@ -208,7 +208,7 @@ InferInfo InferenceGenerator::differenceSubtract(Node n, Node e)
Node A = n[0];
Node B = n[1];
InferInfo inferInfo;
- inferInfo.d_id = Inference::BAG_DIFFERENCE_SUBTRACT;
+ inferInfo.d_id = InferenceId::BAG_DIFFERENCE_SUBTRACT;
Node countA = getMultiplicityTerm(e, A);
Node countB = getMultiplicityTerm(e, B);
@@ -231,7 +231,7 @@ InferInfo InferenceGenerator::differenceRemove(Node n, Node e)
Node A = n[0];
Node B = n[1];
InferInfo inferInfo;
- inferInfo.d_id = Inference::BAG_DIFFERENCE_REMOVE;
+ inferInfo.d_id = InferenceId::BAG_DIFFERENCE_REMOVE;
Node countA = getMultiplicityTerm(e, A);
Node countB = getMultiplicityTerm(e, B);
@@ -253,7 +253,7 @@ InferInfo InferenceGenerator::duplicateRemoval(Node n, Node e)
Node A = n[0];
InferInfo inferInfo;
- inferInfo.d_id = Inference::BAG_DUPLICATE_REMOVAL;
+ inferInfo.d_id = InferenceId::BAG_DUPLICATE_REMOVAL;
Node countA = getMultiplicityTerm(e, A);
Node skolem = getSkolem(n, inferInfo);
diff --git a/src/theory/datatypes/infer_proof_cons.cpp b/src/theory/datatypes/infer_proof_cons.cpp
index f483291ca..fcf16b127 100644
--- a/src/theory/datatypes/infer_proof_cons.cpp
+++ b/src/theory/datatypes/infer_proof_cons.cpp
@@ -44,7 +44,7 @@ void InferProofCons::notifyFact(const std::shared_ptr<DatatypesInference>& di)
d_lazyFactMap.insert(fact, di);
}
-void InferProofCons::convert(InferId infer, TNode conc, TNode exp, CDProof* cdp)
+void InferProofCons::convert(InferenceId infer, TNode conc, TNode exp, CDProof* cdp)
{
Trace("dt-ipc") << "convert: " << infer << ": " << conc << " by " << exp
<< std::endl;
@@ -68,7 +68,7 @@ void InferProofCons::convert(InferId infer, TNode conc, TNode exp, CDProof* cdp)
bool success = false;
switch (infer)
{
- case InferId::UNIF:
+ case InferenceId::DATATYPES_UNIF:
{
Assert(expv.size() == 1);
Assert(exp.getKind() == EQUAL && exp[0].getKind() == APPLY_CONSTRUCTOR
@@ -126,7 +126,7 @@ void InferProofCons::convert(InferId infer, TNode conc, TNode exp, CDProof* cdp)
}
}
break;
- case InferId::INST:
+ case InferenceId::DATATYPES_INST:
{
if (expv.size() == 1)
{
@@ -144,7 +144,7 @@ void InferProofCons::convert(InferId infer, TNode conc, TNode exp, CDProof* cdp)
}
}
break;
- case InferId::SPLIT:
+ case InferenceId::DATATYPES_SPLIT:
{
Assert(expv.empty());
Node t = conc.getKind() == OR ? conc[0][0] : conc[0];
@@ -152,7 +152,7 @@ void InferProofCons::convert(InferId infer, TNode conc, TNode exp, CDProof* cdp)
success = true;
}
break;
- case InferId::COLLAPSE_SEL:
+ case InferenceId::DATATYPES_COLLAPSE_SEL:
{
Assert(exp.getKind() == EQUAL);
Node concEq = conc;
@@ -189,13 +189,13 @@ void InferProofCons::convert(InferId infer, TNode conc, TNode exp, CDProof* cdp)
success = true;
}
break;
- case InferId::CLASH_CONFLICT:
+ case InferenceId::DATATYPES_CLASH_CONFLICT:
{
cdp->addStep(conc, PfRule::MACRO_SR_PRED_ELIM, {exp}, {});
success = true;
}
break;
- case InferId::TESTER_CONFLICT:
+ case InferenceId::DATATYPES_TESTER_CONFLICT:
{
// rewrites to false under substitution
Node fn = nm->mkConst(false);
@@ -203,7 +203,7 @@ void InferProofCons::convert(InferId infer, TNode conc, TNode exp, CDProof* cdp)
success = true;
}
break;
- case InferId::TESTER_MERGE_CONFLICT:
+ case InferenceId::DATATYPES_TESTER_MERGE_CONFLICT:
{
Assert(expv.size() == 3);
Node tester1 = expv[0];
@@ -219,9 +219,9 @@ void InferProofCons::convert(InferId infer, TNode conc, TNode exp, CDProof* cdp)
}
break;
// inferences currently not supported
- case InferId::LABEL_EXH:
- case InferId::BISIMILAR:
- case InferId::CYCLE:
+ case InferenceId::DATATYPES_LABEL_EXH:
+ case InferenceId::DATATYPES_BISIMILAR:
+ case InferenceId::DATATYPES_CYCLE:
default:
Trace("dt-ipc") << "...no conversion for inference " << infer
<< std::endl;
diff --git a/src/theory/datatypes/infer_proof_cons.h b/src/theory/datatypes/infer_proof_cons.h
index af6efc0a8..b18a5d8ec 100644
--- a/src/theory/datatypes/infer_proof_cons.h
+++ b/src/theory/datatypes/infer_proof_cons.h
@@ -83,7 +83,7 @@ class InferProofCons : public ProofGenerator
* step(s) are for concluding the conclusion of the inference. This
* information is stored in cdp.
*/
- void convert(InferId infer, TNode conc, TNode exp, CDProof* cdp);
+ void convert(InferenceId infer, TNode conc, TNode exp, CDProof* cdp);
/** A dummy context used by this class if none is provided */
context::Context d_context;
/** the proof node manager */
diff --git a/src/theory/datatypes/inference.cpp b/src/theory/datatypes/inference.cpp
index 308222146..0f5a5e869 100644
--- a/src/theory/datatypes/inference.cpp
+++ b/src/theory/datatypes/inference.cpp
@@ -25,35 +25,10 @@ namespace CVC4 {
namespace theory {
namespace datatypes {
-const char* toString(InferId i)
-{
- switch (i)
- {
- case InferId::NONE: return "NONE";
- case InferId::UNIF: return "UNIF";
- case InferId::INST: return "INST";
- case InferId::SPLIT: return "SPLIT";
- case InferId::LABEL_EXH: return "LABEL_EXH";
- case InferId::COLLAPSE_SEL: return "COLLAPSE_SEL";
- case InferId::CLASH_CONFLICT: return "CLASH_CONFLICT";
- case InferId::TESTER_CONFLICT: return "TESTER_CONFLICT";
- case InferId::TESTER_MERGE_CONFLICT: return "TESTER_MERGE_CONFLICT";
- case InferId::BISIMILAR: return "BISIMILAR";
- case InferId::CYCLE: return "CYCLE";
- default: return "?";
- }
-}
-
-std::ostream& operator<<(std::ostream& out, InferId i)
-{
- out << toString(i);
- return out;
-}
-
DatatypesInference::DatatypesInference(InferenceManager* im,
Node conc,
Node exp,
- InferId i)
+ InferenceId i)
: SimpleTheoryInternalFact(conc, exp, nullptr), d_im(im), d_id(i)
{
// false is not a valid explanation
@@ -98,7 +73,7 @@ bool DatatypesInference::process(TheoryInferenceManager* im, bool asLemma)
return d_im->processDtFact(d_conc, d_exp, d_id);
}
-InferId DatatypesInference::getInferId() const { return d_id; }
+InferenceId DatatypesInference::getInferId() const { return d_id; }
} // namespace datatypes
} // namespace theory
diff --git a/src/theory/datatypes/inference.h b/src/theory/datatypes/inference.h
index 1cf135a7b..ca7e08f43 100644
--- a/src/theory/datatypes/inference.h
+++ b/src/theory/datatypes/inference.h
@@ -20,56 +20,12 @@
#include "context/cdhashmap.h"
#include "expr/node.h"
#include "theory/inference_manager_buffered.h"
+#include "theory/inference_id.h"
namespace CVC4 {
namespace theory {
namespace datatypes {
-enum class InferId : uint32_t
-{
- NONE,
- // (= (C t1 ... tn) (C s1 .. sn)) => (= ti si)
- UNIF,
- // ((_ is Ci) t) => (= t (Ci (sel_1 t) ... (sel_n t)))
- INST,
- // (or ((_ is C1) t) V ... V ((_ is Cn) t))
- SPLIT,
- // (not ((_ is C1) t)) ^ ... [j] ... ^ (not ((_ is Cn) t)) => ((_ is Cj) t)
- LABEL_EXH,
- // (= t (Ci t1 ... tn)) => (= (sel_j t) rewrite((sel_j (Ci t1 ... tn))))
- COLLAPSE_SEL,
- // (= (Ci t1...tn) (Cj t1...tn)) => false
- CLASH_CONFLICT,
- // ((_ is Ci) t) ^ (= t (Cj t1 ... tn)) => false
- TESTER_CONFLICT,
- // ((_ is Ci) t) ^ ((_ is Cj) s) ^ (= t s) => false
- TESTER_MERGE_CONFLICT,
- // bisimilarity for codatatypes
- BISIMILAR,
- // cycle conflict for datatypes
- CYCLE,
-};
-
-/**
- * Converts an inference to a string. Note: This function is also used in
- * `safe_print()`. Changing this functions name or signature will result in
- * `safe_print()` printing "<unsupported>" instead of the proper strings for
- * the enum values.
- *
- * @param i The inference
- * @return The name of the inference
- */
-const char* toString(InferId i);
-
-/**
- * Writes an inference name to a stream.
- *
- * @param out The stream to write to
- * @param i The inference to write to the stream
- * @return The stream
- */
-std::ostream& operator<<(std::ostream& out, InferId i);
-
class InferenceManager;
/**
@@ -83,7 +39,7 @@ class DatatypesInference : public SimpleTheoryInternalFact
DatatypesInference(InferenceManager* im,
Node conc,
Node exp,
- InferId i = InferId::NONE);
+ InferenceId i = InferenceId::UNKNOWN);
/**
* Must communicate fact method.
* The datatypes decision procedure makes "internal" inferences :
@@ -107,13 +63,13 @@ class DatatypesInference : public SimpleTheoryInternalFact
*/
bool process(TheoryInferenceManager* im, bool asLemma) override;
/** Get the inference identifier */
- InferId getInferId() const;
+ InferenceId getInferId() const;
private:
/** Pointer to the inference manager */
InferenceManager* d_im;
/** The inference */
- InferId d_id;
+ InferenceId d_id;
};
} // namespace datatypes
diff --git a/src/theory/datatypes/inference_manager.cpp b/src/theory/datatypes/inference_manager.cpp
index e9496ab0a..7ecac6e3f 100644
--- a/src/theory/datatypes/inference_manager.cpp
+++ b/src/theory/datatypes/inference_manager.cpp
@@ -56,7 +56,7 @@ InferenceManager::~InferenceManager()
void InferenceManager::addPendingInference(Node conc,
Node exp,
bool forceLemma,
- InferId i)
+ InferenceId i)
{
if (forceLemma)
{
@@ -77,7 +77,7 @@ void InferenceManager::process()
}
void InferenceManager::sendDtLemma(Node lem,
- InferId id,
+ InferenceId id,
LemmaProperty p,
bool doCache)
{
@@ -93,7 +93,7 @@ void InferenceManager::sendDtLemma(Node lem,
}
}
-void InferenceManager::sendDtConflict(const std::vector<Node>& conf, InferId id)
+void InferenceManager::sendDtConflict(const std::vector<Node>& conf, InferenceId id)
{
if (isProofEnabled())
{
@@ -120,7 +120,7 @@ bool InferenceManager::sendLemmas(const std::vector<Node>& lemmas)
bool InferenceManager::isProofEnabled() const { return d_ipc != nullptr; }
bool InferenceManager::processDtLemma(
- Node conc, Node exp, InferId id, LemmaProperty p, bool doCache)
+ Node conc, Node exp, InferenceId id, LemmaProperty p, bool doCache)
{
// set up a proof constructor
std::shared_ptr<InferProofCons> ipcl;
@@ -163,7 +163,7 @@ bool InferenceManager::processDtLemma(
return true;
}
-bool InferenceManager::processDtFact(Node conc, Node exp, InferId id)
+bool InferenceManager::processDtFact(Node conc, Node exp, InferenceId id)
{
conc = prepareDtInference(conc, exp, id, d_ipc.get());
// assert the internal fact, which has the same issue as above
@@ -189,7 +189,7 @@ bool InferenceManager::processDtFact(Node conc, Node exp, InferId id)
Node InferenceManager::prepareDtInference(Node conc,
Node exp,
- InferId id,
+ InferenceId id,
InferProofCons* ipc)
{
Trace("dt-lemma-debug") << "prepareDtInference : " << conc << " via " << exp
diff --git a/src/theory/datatypes/inference_manager.h b/src/theory/datatypes/inference_manager.h
index 88b2befd7..683b696c9 100644
--- a/src/theory/datatypes/inference_manager.h
+++ b/src/theory/datatypes/inference_manager.h
@@ -54,7 +54,7 @@ class InferenceManager : public InferenceManagerBuffered
void addPendingInference(Node conc,
Node exp,
bool forceLemma = false,
- InferId i = InferId::NONE);
+ InferenceId i = InferenceId::UNKNOWN);
/**
* Process the current lemmas and facts. This is a custom method that can
* be seen as overriding the behavior of calling both doPendingLemmas and
@@ -66,13 +66,13 @@ class InferenceManager : public InferenceManagerBuffered
* Send lemma immediately on the output channel
*/
void sendDtLemma(Node lem,
- InferId i = InferId::NONE,
+ InferenceId i = InferenceId::UNKNOWN,
LemmaProperty p = LemmaProperty::NONE,
bool doCache = true);
/**
* Send conflict immediately on the output channel
*/
- void sendDtConflict(const std::vector<Node>& conf, InferId i = InferId::NONE);
+ void sendDtConflict(const std::vector<Node>& conf, InferenceId i = InferenceId::UNKNOWN);
/**
* Send lemmas with property NONE on the output channel immediately.
* Returns true if any lemma was sent.
@@ -87,13 +87,13 @@ class InferenceManager : public InferenceManagerBuffered
*/
bool processDtLemma(Node conc,
Node exp,
- InferId id,
+ InferenceId id,
LemmaProperty p = LemmaProperty::NONE,
bool doCache = true);
/**
* Process datatype inference as a fact
*/
- bool processDtFact(Node conc, Node exp, InferId id);
+ bool processDtFact(Node conc, Node exp, InferenceId id);
/**
* Helper function for the above methods. Returns the conclusion, which
* may be modified so that it is compatible with proofs. If proofs are
@@ -106,16 +106,16 @@ class InferenceManager : public InferenceManagerBuffered
* status for proof generation. If this is not done, then it is possible
* to have proofs with missing connections and hence free assumptions.
*/
- Node prepareDtInference(Node conc, Node exp, InferId id, InferProofCons* ipc);
+ Node prepareDtInference(Node conc, Node exp, InferenceId id, InferProofCons* ipc);
/** The false node */
Node d_false;
/**
* Counts the number of applications of each type of inference processed by
* the above method as facts, lemmas and conflicts.
*/
- HistogramStat<InferId> d_inferenceLemmas;
- HistogramStat<InferId> d_inferenceFacts;
- HistogramStat<InferId> d_inferenceConflicts;
+ HistogramStat<InferenceId> d_inferenceLemmas;
+ HistogramStat<InferenceId> d_inferenceFacts;
+ HistogramStat<InferenceId> d_inferenceConflicts;
/** Pointer to the proof node manager */
ProofNodeManager* d_pnm;
/** The inference to proof converter */
diff --git a/src/theory/datatypes/theory_datatypes.cpp b/src/theory/datatypes/theory_datatypes.cpp
index cb3198423..064a4b2d1 100644
--- a/src/theory/datatypes/theory_datatypes.cpp
+++ b/src/theory/datatypes/theory_datatypes.cpp
@@ -307,7 +307,7 @@ void TheoryDatatypes::postCheck(Effort level)
//this may not be necessary?
//if only one constructor, then this term must be this constructor
Node t = utils::mkTester(n, 0, dt);
- d_im.addPendingInference(t, d_true, false, InferId::SPLIT);
+ d_im.addPendingInference(t, d_true, false, InferenceId::DATATYPES_SPLIT);
Trace("datatypes-infer") << "DtInfer : 1-cons (full) : " << t << std::endl;
}else{
Assert(consIndex != -1 || dt.isSygus());
@@ -325,7 +325,7 @@ void TheoryDatatypes::postCheck(Effort level)
Node lemma = utils::mkSplit(n, dt);
Trace("dt-split-debug") << "Split lemma is : " << lemma << std::endl;
d_im.sendDtLemma(lemma,
- InferId::SPLIT,
+ InferenceId::DATATYPES_SPLIT,
LemmaProperty::SEND_ATOMS,
false);
}
@@ -679,7 +679,7 @@ void TheoryDatatypes::merge( Node t1, Node t2 ){
conf.push_back(unifEq);
Trace("dt-conflict")
<< "CONFLICT: Clash conflict : " << conf << std::endl;
- d_im.sendDtConflict(conf, InferId::CLASH_CONFLICT);
+ d_im.sendDtConflict(conf, InferenceId::DATATYPES_CLASH_CONFLICT);
return;
}
else
@@ -688,7 +688,7 @@ void TheoryDatatypes::merge( Node t1, Node t2 ){
for( int i=0; i<(int)cons1.getNumChildren(); i++ ) {
if( !areEqual( cons1[i], cons2[i] ) ){
Node eq = cons1[i].eqNode( cons2[i] );
- d_im.addPendingInference(eq, unifEq, false, InferId::UNIF);
+ d_im.addPendingInference(eq, unifEq, false, InferenceId::DATATYPES_UNIF);
Trace("datatypes-infer") << "DtInfer : cons-inj : " << eq << " by " << unifEq << std::endl;
}
}
@@ -883,7 +883,7 @@ void TheoryDatatypes::addTester(
conf.push_back(t_arg.eqNode(eqc->d_constructor.get()));
Trace("dt-conflict")
<< "CONFLICT: Tester eq conflict " << conf << std::endl;
- d_im.sendDtConflict(conf, InferId::TESTER_CONFLICT);
+ d_im.sendDtConflict(conf, InferenceId::DATATYPES_TESTER_CONFLICT);
return;
}else{
makeConflict = true;
@@ -975,7 +975,7 @@ void TheoryDatatypes::addTester(
: utils::mkTester(t_arg, testerIndex, dt);
Node t_concl_exp = ( nb.getNumChildren() == 1 ) ? nb.getChild( 0 ) : nb;
d_im.addPendingInference(
- t_concl, t_concl_exp, false, InferId::LABEL_EXH);
+ t_concl, t_concl_exp, false, InferenceId::DATATYPES_LABEL_EXH);
Trace("datatypes-infer") << "DtInfer : label : " << t_concl << " by " << t_concl_exp << std::endl;
return;
}
@@ -989,7 +989,7 @@ void TheoryDatatypes::addTester(
conf.push_back(t);
conf.push_back(jt[0].eqNode(t_arg));
Trace("dt-conflict") << "CONFLICT: Tester conflict : " << conf << std::endl;
- d_im.sendDtConflict(conf, InferId::TESTER_MERGE_CONFLICT);
+ d_im.sendDtConflict(conf, InferenceId::DATATYPES_TESTER_MERGE_CONFLICT);
}
}
@@ -1047,7 +1047,7 @@ void TheoryDatatypes::addConstructor( Node c, EqcInfo* eqc, Node n ){
conf.push_back(t[0][0].eqNode(c));
Trace("dt-conflict")
<< "CONFLICT: Tester merge eq conflict : " << conf << std::endl;
- d_im.sendDtConflict(conf, InferId::TESTER_CONFLICT);
+ d_im.sendDtConflict(conf, InferenceId::DATATYPES_TESTER_CONFLICT);
return;
}
}
@@ -1111,7 +1111,7 @@ void TheoryDatatypes::collapseSelector( Node s, Node c ) {
bool forceLemma = !s.getType().isDatatype();
Trace("datatypes-infer") << "DtInfer : collapse sel";
Trace("datatypes-infer") << " : " << eq << " by " << eq_exp << std::endl;
- d_im.addPendingInference(eq, eq_exp, forceLemma, InferId::COLLAPSE_SEL);
+ d_im.addPendingInference(eq, eq_exp, forceLemma, InferenceId::DATATYPES_COLLAPSE_SEL);
}
}
}
@@ -1565,7 +1565,7 @@ void TheoryDatatypes::instantiate( EqcInfo* eqc, Node n ){
}
Trace("datatypes-infer-debug") << "DtInstantiate : " << eqc << " " << eq
<< " forceLemma = " << forceLemma << std::endl;
- d_im.addPendingInference(eq, exp, forceLemma, InferId::INST);
+ d_im.addPendingInference(eq, exp, forceLemma, InferenceId::DATATYPES_INST);
Trace("datatypes-infer") << "DtInfer : instantiate : " << eq << " by " << exp
<< std::endl;
}
@@ -1601,7 +1601,7 @@ void TheoryDatatypes::checkCycles() {
Assert(expl.size() > 0);
Trace("dt-conflict")
<< "CONFLICT: Cycle conflict : " << expl << std::endl;
- d_im.sendDtConflict(expl, InferId::CYCLE);
+ d_im.sendDtConflict(expl, InferenceId::DATATYPES_CYCLE);
return;
}
}
@@ -1651,7 +1651,7 @@ void TheoryDatatypes::checkCycles() {
Trace("dt-cdt") << std::endl;
Node eq = part_out[i][0].eqNode( part_out[i][j] );
Node eqExp = NodeManager::currentNM()->mkAnd(exp);
- d_im.addPendingInference(eq, eqExp, false, InferId::BISIMILAR);
+ d_im.addPendingInference(eq, eqExp, false, InferenceId::DATATYPES_BISIMILAR);
Trace("datatypes-infer") << "DtInfer : cdt-bisimilar : " << eq << " by " << eqExp << std::endl;
}
}
diff --git a/src/theory/inference_id.cpp b/src/theory/inference_id.cpp
new file mode 100644
index 000000000..c037a035a
--- /dev/null
+++ b/src/theory/inference_id.cpp
@@ -0,0 +1,150 @@
+/********************* */
+/*! \file inference_id.cpp
+ ** \verbatim
+ ** Top contributors (to current version):
+ ** Gereon Kremer, Yoni Zohar
+ ** This file is part of the CVC4 project.
+ ** Copyright (c) 2009-2020 by the authors listed in the file AUTHORS
+ ** in the top-level source directory and their institutional affiliations.
+ ** All rights reserved. See the file COPYING in the top-level source
+ ** directory for licensing information.\endverbatim
+ **
+ ** \brief Implementation of inference enumeration.
+ **/
+
+#include "theory/inference_id.h"
+
+namespace CVC4 {
+namespace theory {
+
+const char* toString(InferenceId i)
+{
+ switch (i)
+ {
+ case InferenceId::ARITH_NL_CONGRUENCE: return "CONGRUENCE";
+ case InferenceId::ARITH_NL_SHARED_TERM_VALUE_SPLIT: return "SHARED_TERM_VALUE_SPLIT";
+ case InferenceId::ARITH_NL_SPLIT_ZERO: return "SPLIT_ZERO";
+ case InferenceId::ARITH_NL_SIGN: return "SIGN";
+ case InferenceId::ARITH_NL_COMPARISON: return "COMPARISON";
+ case InferenceId::ARITH_NL_INFER_BOUNDS: return "INFER_BOUNDS";
+ case InferenceId::ARITH_NL_INFER_BOUNDS_NT: return "INFER_BOUNDS_NT";
+ case InferenceId::ARITH_NL_FACTOR: return "FACTOR";
+ case InferenceId::ARITH_NL_RES_INFER_BOUNDS: return "RES_INFER_BOUNDS";
+ case InferenceId::ARITH_NL_TANGENT_PLANE: return "TANGENT_PLANE";
+ case InferenceId::ARITH_NL_T_PURIFY_ARG: return "T_PURIFY_ARG";
+ case InferenceId::ARITH_NL_T_INIT_REFINE: return "T_INIT_REFINE";
+ case InferenceId::ARITH_NL_T_PI_BOUND: return "T_PI_BOUND";
+ case InferenceId::ARITH_NL_T_MONOTONICITY: return "T_MONOTONICITY";
+ case InferenceId::ARITH_NL_T_SECANT: return "T_SECANT";
+ case InferenceId::ARITH_NL_T_TANGENT: return "T_TANGENT";
+ case InferenceId::ARITH_NL_IAND_INIT_REFINE: return "IAND_INIT_REFINE";
+ case InferenceId::ARITH_NL_IAND_VALUE_REFINE: return "IAND_VALUE_REFINE";
+ case InferenceId::ARITH_NL_IAND_SUM_REFINE: return "IAND_SUM_REFINE";
+ case InferenceId::ARITH_NL_IAND_BITWISE_REFINE: return "IAND_BITWISE_REFINE";
+ case InferenceId::ARITH_NL_CAD_CONFLICT: return "CAD_CONFLICT";
+ case InferenceId::ARITH_NL_CAD_EXCLUDED_INTERVAL: return "CAD_EXCLUDED_INTERVAL";
+ case InferenceId::ARITH_NL_ICP_CONFLICT: return "ICP_CONFLICT";
+ case InferenceId::ARITH_NL_ICP_PROPAGATION: return "ICP_PROPAGATION";
+
+ case InferenceId::BAG_NON_NEGATIVE_COUNT: return "BAG_NON_NEGATIVE_COUNT";
+ case InferenceId::BAG_MK_BAG_SAME_ELEMENT: return "BAG_MK_BAG_SAME_ELEMENT";
+ case InferenceId::BAG_MK_BAG: return "BAG_MK_BAG";
+ case InferenceId::BAG_EQUALITY: return "BAG_EQUALITY";
+ case InferenceId::BAG_DISEQUALITY: return "BAG_DISEQUALITY";
+ case InferenceId::BAG_EMPTY: return "BAG_EMPTY";
+ case InferenceId::BAG_UNION_DISJOINT: return "BAG_UNION_DISJOINT";
+ case InferenceId::BAG_UNION_MAX: return "BAG_UNION_MAX";
+ case InferenceId::BAG_INTERSECTION_MIN: return "BAG_INTERSECTION_MIN";
+ case InferenceId::BAG_DIFFERENCE_SUBTRACT: return "BAG_DIFFERENCE_SUBTRACT";
+ case InferenceId::BAG_DIFFERENCE_REMOVE: return "BAG_DIFFERENCE_REMOVE";
+ case InferenceId::BAG_DUPLICATE_REMOVAL: return "BAG_DUPLICATE_REMOVAL";
+
+ case InferenceId::DATATYPES_UNIF: return "UNIF";
+ case InferenceId::DATATYPES_INST: return "INST";
+ case InferenceId::DATATYPES_SPLIT: return "SPLIT";
+ case InferenceId::DATATYPES_LABEL_EXH: return "LABEL_EXH";
+ case InferenceId::DATATYPES_COLLAPSE_SEL: return "COLLAPSE_SEL";
+ case InferenceId::DATATYPES_CLASH_CONFLICT: return "CLASH_CONFLICT";
+ case InferenceId::DATATYPES_TESTER_CONFLICT: return "TESTER_CONFLICT";
+ case InferenceId::DATATYPES_TESTER_MERGE_CONFLICT: return "TESTER_MERGE_CONFLICT";
+ case InferenceId::DATATYPES_BISIMILAR: return "BISIMILAR";
+ case InferenceId::DATATYPES_CYCLE: return "CYCLE";
+
+ case InferenceId::STRINGS_I_NORM_S: return "I_NORM_S";
+ case InferenceId::STRINGS_I_CONST_MERGE: return "I_CONST_MERGE";
+ case InferenceId::STRINGS_I_CONST_CONFLICT: return "I_CONST_CONFLICT";
+ case InferenceId::STRINGS_I_NORM: return "I_NORM";
+ case InferenceId::STRINGS_UNIT_INJ: return "UNIT_INJ";
+ case InferenceId::STRINGS_UNIT_CONST_CONFLICT: return "UNIT_CONST_CONFLICT";
+ case InferenceId::STRINGS_UNIT_INJ_DEQ: return "UNIT_INJ_DEQ";
+ case InferenceId::STRINGS_CARD_SP: return "CARD_SP";
+ case InferenceId::STRINGS_CARDINALITY: return "CARDINALITY";
+ case InferenceId::STRINGS_I_CYCLE_E: return "I_CYCLE_E";
+ case InferenceId::STRINGS_I_CYCLE: return "I_CYCLE";
+ case InferenceId::STRINGS_F_CONST: return "F_CONST";
+ case InferenceId::STRINGS_F_UNIFY: return "F_UNIFY";
+ case InferenceId::STRINGS_F_ENDPOINT_EMP: return "F_ENDPOINT_EMP";
+ case InferenceId::STRINGS_F_ENDPOINT_EQ: return "F_ENDPOINT_EQ";
+ case InferenceId::STRINGS_F_NCTN: return "F_NCTN";
+ case InferenceId::STRINGS_N_EQ_CONF: return "N_EQ_CONF";
+ case InferenceId::STRINGS_N_ENDPOINT_EMP: return "N_ENDPOINT_EMP";
+ case InferenceId::STRINGS_N_UNIFY: return "N_UNIFY";
+ case InferenceId::STRINGS_N_ENDPOINT_EQ: return "N_ENDPOINT_EQ";
+ case InferenceId::STRINGS_N_CONST: return "N_CONST";
+ case InferenceId::STRINGS_INFER_EMP: return "INFER_EMP";
+ case InferenceId::STRINGS_SSPLIT_CST_PROP: return "SSPLIT_CST_PROP";
+ case InferenceId::STRINGS_SSPLIT_VAR_PROP: return "SSPLIT_VAR_PROP";
+ case InferenceId::STRINGS_LEN_SPLIT: return "LEN_SPLIT";
+ case InferenceId::STRINGS_LEN_SPLIT_EMP: return "LEN_SPLIT_EMP";
+ case InferenceId::STRINGS_SSPLIT_CST: return "SSPLIT_CST";
+ case InferenceId::STRINGS_SSPLIT_VAR: return "SSPLIT_VAR";
+ case InferenceId::STRINGS_FLOOP: return "FLOOP";
+ case InferenceId::STRINGS_FLOOP_CONFLICT: return "FLOOP_CONFLICT";
+ case InferenceId::STRINGS_NORMAL_FORM: return "NORMAL_FORM";
+ case InferenceId::STRINGS_N_NCTN: return "N_NCTN";
+ case InferenceId::STRINGS_LEN_NORM: return "LEN_NORM";
+ case InferenceId::STRINGS_DEQ_DISL_EMP_SPLIT: return "DEQ_DISL_EMP_SPLIT";
+ case InferenceId::STRINGS_DEQ_DISL_FIRST_CHAR_EQ_SPLIT:
+ return "DEQ_DISL_FIRST_CHAR_EQ_SPLIT";
+ case InferenceId::STRINGS_DEQ_DISL_FIRST_CHAR_STRING_SPLIT:
+ return "DEQ_DISL_FIRST_CHAR_STRING_SPLIT";
+ case InferenceId::STRINGS_DEQ_STRINGS_EQ: return "DEQ_STRINGS_EQ";
+ case InferenceId::STRINGS_DEQ_DISL_STRINGS_SPLIT: return "DEQ_DISL_STRINGS_SPLIT";
+ case InferenceId::STRINGS_DEQ_LENS_EQ: return "DEQ_LENS_EQ";
+ case InferenceId::STRINGS_DEQ_NORM_EMP: return "DEQ_NORM_EMP";
+ case InferenceId::STRINGS_DEQ_LENGTH_SP: return "DEQ_LENGTH_SP";
+ case InferenceId::STRINGS_CODE_PROXY: return "CODE_PROXY";
+ case InferenceId::STRINGS_CODE_INJ: return "CODE_INJ";
+ case InferenceId::STRINGS_RE_NF_CONFLICT: return "RE_NF_CONFLICT";
+ case InferenceId::STRINGS_RE_UNFOLD_POS: return "RE_UNFOLD_POS";
+ case InferenceId::STRINGS_RE_UNFOLD_NEG: return "RE_UNFOLD_NEG";
+ case InferenceId::STRINGS_RE_INTER_INCLUDE: return "RE_INTER_INCLUDE";
+ case InferenceId::STRINGS_RE_INTER_CONF: return "RE_INTER_CONF";
+ case InferenceId::STRINGS_RE_INTER_INFER: return "RE_INTER_INFER";
+ case InferenceId::STRINGS_RE_DELTA: return "RE_DELTA";
+ case InferenceId::STRINGS_RE_DELTA_CONF: return "RE_DELTA_CONF";
+ case InferenceId::STRINGS_RE_DERIVE: return "RE_DERIVE";
+ case InferenceId::STRINGS_EXTF: return "EXTF";
+ case InferenceId::STRINGS_EXTF_N: return "EXTF_N";
+ case InferenceId::STRINGS_EXTF_D: return "EXTF_D";
+ case InferenceId::STRINGS_EXTF_D_N: return "EXTF_D_N";
+ case InferenceId::STRINGS_EXTF_EQ_REW: return "EXTF_EQ_REW";
+ case InferenceId::STRINGS_CTN_TRANS: return "CTN_TRANS";
+ case InferenceId::STRINGS_CTN_DECOMPOSE: return "CTN_DECOMPOSE";
+ case InferenceId::STRINGS_CTN_NEG_EQUAL: return "CTN_NEG_EQUAL";
+ case InferenceId::STRINGS_CTN_POS: return "CTN_POS";
+ case InferenceId::STRINGS_REDUCTION: return "REDUCTION";
+ case InferenceId::STRINGS_PREFIX_CONFLICT: return "PREFIX_CONFLICT";
+
+ default: return "?";
+ }
+}
+
+std::ostream& operator<<(std::ostream& out, InferenceId i)
+{
+ out << toString(i);
+ return out;
+}
+
+} // namespace theory
+} // namespace CVC4
diff --git a/src/theory/inference_id.h b/src/theory/inference_id.h
new file mode 100644
index 000000000..f2192437a
--- /dev/null
+++ b/src/theory/inference_id.h
@@ -0,0 +1,429 @@
+/********************* */
+/*! \file inference_id.h
+ ** \verbatim
+ ** Top contributors (to current version):
+ ** Andrew Reynolds, Gereon Kremer, Yoni Zohar
+ ** This file is part of the CVC4 project.
+ ** Copyright (c) 2009-2020 by the authors listed in the file AUTHORS
+ ** in the top-level source directory and their institutional affiliations.
+ ** All rights reserved. See the file COPYING in the top-level source
+ ** directory for licensing information.\endverbatim
+ **
+ ** \brief Inference enumeration.
+ **/
+
+#include "cvc4_private.h"
+
+#ifndef CVC4__THEORY__INFERENCE_ID_H
+#define CVC4__THEORY__INFERENCE_ID_H
+
+#include <map>
+#include <vector>
+
+#include "util/safe_print.h"
+
+namespace CVC4 {
+namespace theory {
+
+/** Types of inferences used in the procedure
+ *
+ * Note: The order in this enum matters in certain cases (e.g. inferences
+ * related to normal forms in strings), where inferences that come first are
+ * generally preferred.
+ *
+ * Notice that an inference is intentionally distinct from PfRule. An
+ * inference captures *why* we performed a reasoning step, and a PfRule
+ * rule captures *what* reasoning step was used. For instance, the inference
+ * LEN_SPLIT translates to PfRule::SPLIT. The use of stats on inferences allows
+ * us to know that we performed N splits (PfRule::SPLIT) because we wanted
+ * to split on lengths for string equalities (Inference::LEN_SPLIT).
+ */
+enum class InferenceId
+{
+ //-------------------- core
+ // simple congruence x=y => f(x)=f(y)
+ ARITH_NL_CONGRUENCE,
+ // shared term value split (for naive theory combination)
+ ARITH_NL_SHARED_TERM_VALUE_SPLIT,
+ //-------------------- incremental linearization solver
+ // splitting on zero (NlSolver::checkSplitZero)
+ ARITH_NL_SPLIT_ZERO,
+ // based on sign (NlSolver::checkMonomialSign)
+ ARITH_NL_SIGN,
+ // based on comparing (abs) model values (NlSolver::checkMonomialMagnitude)
+ ARITH_NL_COMPARISON,
+ // based on inferring bounds (NlSolver::checkMonomialInferBounds)
+ ARITH_NL_INFER_BOUNDS,
+ // same as above, for inferences that introduce new terms
+ ARITH_NL_INFER_BOUNDS_NT,
+ // factoring (NlSolver::checkFactoring)
+ ARITH_NL_FACTOR,
+ // resolution bound inferences (NlSolver::checkMonomialInferResBounds)
+ ARITH_NL_RES_INFER_BOUNDS,
+ // tangent planes (NlSolver::checkTangentPlanes)
+ ARITH_NL_TANGENT_PLANE,
+ //-------------------- transcendental solver
+ // purification of arguments to transcendental functions
+ ARITH_NL_T_PURIFY_ARG,
+ // initial refinement (TranscendentalSolver::checkTranscendentalInitialRefine)
+ ARITH_NL_T_INIT_REFINE,
+ // pi bounds
+ ARITH_NL_T_PI_BOUND,
+ // monotonicity (TranscendentalSolver::checkTranscendentalMonotonic)
+ ARITH_NL_T_MONOTONICITY,
+ // tangent refinement (TranscendentalSolver::checkTranscendentalTangentPlanes)
+ ARITH_NL_T_TANGENT,
+ // secant refinement, the dual of the above inference
+ ARITH_NL_T_SECANT,
+ //-------------------- iand solver
+ // initial refinements (IAndSolver::checkInitialRefine)
+ ARITH_NL_IAND_INIT_REFINE,
+ // value refinements (IAndSolver::checkFullRefine)
+ ARITH_NL_IAND_VALUE_REFINE,
+ // sum refinements (IAndSolver::checkFullRefine)
+ ARITH_NL_IAND_SUM_REFINE,
+ // bitwise refinements (IAndSolver::checkFullRefine)
+ ARITH_NL_IAND_BITWISE_REFINE,
+ //-------------------- cad solver
+ // conflict / infeasible subset obtained from cad
+ ARITH_NL_CAD_CONFLICT,
+ // excludes an interval for a single variable
+ ARITH_NL_CAD_EXCLUDED_INTERVAL,
+ //-------------------- icp solver
+ // conflict obtained from icp
+ ARITH_NL_ICP_CONFLICT,
+ // propagation / contraction of variable bounds from icp
+ ARITH_NL_ICP_PROPAGATION,
+ //-------------------- unknown
+
+
+ BAG_NON_NEGATIVE_COUNT,
+ BAG_MK_BAG_SAME_ELEMENT,
+ BAG_MK_BAG,
+ BAG_EQUALITY,
+ BAG_DISEQUALITY,
+ BAG_EMPTY,
+ BAG_UNION_DISJOINT,
+ BAG_UNION_MAX,
+ BAG_INTERSECTION_MIN,
+ BAG_DIFFERENCE_SUBTRACT,
+ BAG_DIFFERENCE_REMOVE,
+ BAG_DUPLICATE_REMOVAL,
+
+ // (= (C t1 ... tn) (C s1 .. sn)) => (= ti si)
+ DATATYPES_UNIF,
+ // ((_ is Ci) t) => (= t (Ci (sel_1 t) ... (sel_n t)))
+ DATATYPES_INST,
+ // (or ((_ is C1) t) V ... V ((_ is Cn) t))
+ DATATYPES_SPLIT,
+ // (not ((_ is C1) t)) ^ ... [j] ... ^ (not ((_ is Cn) t)) => ((_ is Cj) t)
+ DATATYPES_LABEL_EXH,
+ // (= t (Ci t1 ... tn)) => (= (sel_j t) rewrite((sel_j (Ci t1 ... tn))))
+ DATATYPES_COLLAPSE_SEL,
+ // (= (Ci t1...tn) (Cj t1...tn)) => false
+ DATATYPES_CLASH_CONFLICT,
+ // ((_ is Ci) t) ^ (= t (Cj t1 ... tn)) => false
+ DATATYPES_TESTER_CONFLICT,
+ // ((_ is Ci) t) ^ ((_ is Cj) s) ^ (= t s) => false
+ DATATYPES_TESTER_MERGE_CONFLICT,
+ // bisimilarity for codatatypes
+ DATATYPES_BISIMILAR,
+ // cycle conflict for datatypes
+ DATATYPES_CYCLE,
+
+ //-------------------------------------- base solver
+ // initial normalize singular
+ // x1 = "" ^ ... ^ x_{i-1} = "" ^ x_{i+1} = "" ^ ... ^ xn = "" =>
+ // x1 ++ ... ++ xn = xi
+ STRINGS_I_NORM_S,
+ // initial constant merge
+ // explain_constant(x, c) => x = c
+ // Above, explain_constant(x,c) is a basic explanation of why x must be equal
+ // to string constant c, which is computed by taking arguments of
+ // concatenation terms that are entailed to be constants. For example:
+ // ( y = "AB" ^ z = "C" ) => y ++ z = "ABC"
+ STRINGS_I_CONST_MERGE,
+ // initial constant conflict
+ // ( explain_constant(x, c1) ^ explain_constant(x, c2) ^ x = y) => false
+ // where c1 != c2.
+ STRINGS_I_CONST_CONFLICT,
+ // initial normalize
+ // Given two concatenation terms, this is applied when we find that they are
+ // equal after e.g. removing strings that are currently empty. For example:
+ // y = "" ^ z = "" => x ++ y = z ++ x
+ STRINGS_I_NORM,
+ // injectivity of seq.unit
+ // (seq.unit x) = (seq.unit y) => x=y, or
+ // (seq.unit x) = (seq.unit c) => x=c
+ STRINGS_UNIT_INJ,
+ // unit constant conflict
+ // (seq.unit x) = C => false if |C| != 1.
+ STRINGS_UNIT_CONST_CONFLICT,
+ // injectivity of seq.unit for disequality
+ // (seq.unit x) != (seq.unit y) => x != y, or
+ // (seq.unit x) != (seq.unit c) => x != c
+ STRINGS_UNIT_INJ_DEQ,
+ // A split due to cardinality
+ STRINGS_CARD_SP,
+ // The cardinality inference for strings, see Liang et al CAV 2014.
+ STRINGS_CARDINALITY,
+ //-------------------------------------- end base solver
+ //-------------------------------------- core solver
+ // A cycle in the empty string equivalence class, e.g.:
+ // x ++ y = "" => x = ""
+ // This is typically not applied due to length constraints implying emptiness.
+ STRINGS_I_CYCLE_E,
+ // A cycle in the containment ordering.
+ // x = y ++ x => y = "" or
+ // x = y ++ z ^ y = x ++ w => z = "" ^ w = ""
+ // This is typically not applied due to length constraints implying emptiness.
+ STRINGS_I_CYCLE,
+ // Flat form constant
+ // x = y ^ x = z ++ c ... ^ y = z ++ d => false
+ // where c and d are distinct constants.
+ STRINGS_F_CONST,
+ // Flat form unify
+ // x = y ^ x = z ++ x' ... ^ y = z ++ y' ^ len(x') = len(y') => x' = y'
+ // Notice flat form instances are similar to normal form inferences but do
+ // not involve recursive explanations.
+ STRINGS_F_UNIFY,
+ // Flat form endpoint empty
+ // x = y ^ x = z ^ y = z ++ y' => y' = ""
+ STRINGS_F_ENDPOINT_EMP,
+ // Flat form endpoint equal
+ // x = y ^ x = z ++ x' ^ y = z ++ y' => x' = y'
+ STRINGS_F_ENDPOINT_EQ,
+ // Flat form not contained
+ // x = c ^ x = y => false when rewrite( contains( y, c ) ) = false
+ STRINGS_F_NCTN,
+ // Normal form equality conflict
+ // x = N[x] ^ y = N[y] ^ x=y => false
+ // where Rewriter::rewrite(N[x]=N[y]) = false.
+ STRINGS_N_EQ_CONF,
+ // Given two normal forms, infers that the remainder one of them has to be
+ // empty. For example:
+ // If x1 ++ x2 = y1 and x1 = y1, then x2 = ""
+ STRINGS_N_ENDPOINT_EMP,
+ // Given two normal forms, infers that two components have to be the same if
+ // they have the same length. For example:
+ // If x1 ++ x2 = x3 ++ x4 and len(x1) = len(x3) then x1 = x3
+ STRINGS_N_UNIFY,
+ // Given two normal forms, infers that the endpoints have to be the same. For
+ // example:
+ // If x1 ++ x2 = x3 ++ x4 ++ x5 and x1 = x3 then x2 = x4 ++ x5
+ STRINGS_N_ENDPOINT_EQ,
+ // Given two normal forms with constant endpoints, infers a conflict if the
+ // endpoints do not agree. For example:
+ // If "abc" ++ ... = "bc" ++ ... then conflict
+ STRINGS_N_CONST,
+ // infer empty, for example:
+ // (~) x = ""
+ // This is inferred when we encounter an x such that x = "" rewrites to a
+ // constant. This inference is used for instance when we otherwise would have
+ // split on the emptiness of x but the rewriter tells us the emptiness of x
+ // can be inferred.
+ STRINGS_INFER_EMP,
+ // string split constant propagation, for example:
+ // x = y, x = "abc", y = y1 ++ "b" ++ y2
+ // implies y1 = "a" ++ y1'
+ STRINGS_SSPLIT_CST_PROP,
+ // string split variable propagation, for example:
+ // x = y, x = x1 ++ x2, y = y1 ++ y2, len( x1 ) >= len( y1 )
+ // implies x1 = y1 ++ x1'
+ // This is inspired by Zheng et al CAV 2015.
+ STRINGS_SSPLIT_VAR_PROP,
+ // length split, for example:
+ // len( x1 ) = len( y1 ) V len( x1 ) != len( y1 )
+ // This is inferred when e.g. x = y, x = x1 ++ x2, y = y1 ++ y2.
+ STRINGS_LEN_SPLIT,
+ // length split empty, for example:
+ // z = "" V z != ""
+ // This is inferred when, e.g. x = y, x = z ++ x1, y = y1 ++ z
+ STRINGS_LEN_SPLIT_EMP,
+ // string split constant
+ // x = y, x = "c" ++ x2, y = y1 ++ y2, y1 != ""
+ // implies y1 = "c" ++ y1'
+ // This is a special case of F-Split in Figure 5 of Liang et al CAV 2014.
+ STRINGS_SSPLIT_CST,
+ // string split variable, for example:
+ // x = y, x = x1 ++ x2, y = y1 ++ y2
+ // implies x1 = y1 ++ x1' V y1 = x1 ++ y1'
+ // This is rule F-Split in Figure 5 of Liang et al CAV 2014.
+ STRINGS_SSPLIT_VAR,
+ // flat form loop, for example:
+ // x = y, x = x1 ++ z, y = z ++ y2
+ // implies z = u2 ++ u1, u in ( u1 ++ u2 )*, x1 = u2 ++ u, y2 = u ++ u1
+ // for fresh u, u1, u2.
+ // This is the rule F-Loop from Figure 5 of Liang et al CAV 2014.
+ STRINGS_FLOOP,
+ // loop conflict ???
+ STRINGS_FLOOP_CONFLICT,
+ // Normal form inference
+ // x = y ^ z = y => x = z
+ // This is applied when y is the normal form of both x and z.
+ STRINGS_NORMAL_FORM,
+ // Normal form not contained, same as FFROM_NCTN but for normal forms
+ STRINGS_N_NCTN,
+ // Length normalization
+ // x = y => len( x ) = len( y )
+ // Typically applied when y is the normal form of x.
+ STRINGS_LEN_NORM,
+ // When x ++ x' ++ ... != "abc" ++ y' ++ ... ^ len(x) != len(y), we apply the
+ // inference:
+ // x = "" v x != ""
+ STRINGS_DEQ_DISL_EMP_SPLIT,
+ // When x ++ x' ++ ... != "abc" ++ y' ++ ... ^ len(x) = 1, we apply the
+ // inference:
+ // x = "a" v x != "a"
+ STRINGS_DEQ_DISL_FIRST_CHAR_EQ_SPLIT,
+ // When x ++ x' ++ ... != "abc" ++ y' ++ ... ^ len(x) != "", we apply the
+ // inference:
+ // ni = x ++ x' ++ ... ^ nj = "abc" ++ y' ++ ... ^ x != "" --->
+ // x = k1 ++ k2 ^ len(k1) = 1 ^ (k1 != "a" v x = "a" ++ k2)
+ STRINGS_DEQ_DISL_FIRST_CHAR_STRING_SPLIT,
+ // When x ++ x' ++ ... != y ++ y' ++ ... ^ len(x) != len(y), we apply the
+ // inference:
+ // ni = x ++ x' ++ ... ^ nj = y ++ y' ++ ... ^ ni != nj ^ len(x) != len(y)
+ // --->
+ // len(k1) = len(x) ^ len(k2) = len(y) ^ (y = k1 ++ k3 v x = k1 ++ k2)
+ STRINGS_DEQ_DISL_STRINGS_SPLIT,
+ // When x ++ x' ++ ... != y ++ y' ++ ... ^ len(x) = len(y), we apply the
+ // inference:
+ // x = y v x != y
+ STRINGS_DEQ_STRINGS_EQ,
+ // When x ++ x' ++ ... != y ++ y' ++ ... and we do not know how the lengths
+ // of x and y compare, we apply the inference:
+ // len(x) = len(y) v len(x) != len(y)
+ STRINGS_DEQ_LENS_EQ,
+ // When px ++ x ++ ... != py ^ len(px ++ x ++ ...) = len(py), we apply the
+ // following inference that infers that the remainder of the longer normal
+ // form must be empty:
+ // ni = px ++ x ++ ... ^ nj = py ^ len(ni) = len(nj) --->
+ // x = "" ^ ...
+ STRINGS_DEQ_NORM_EMP,
+ // When two strings are disequal s != t and the comparison of their lengths
+ // is unknown, we apply the inference:
+ // len(s) != len(t) V len(s) = len(t)
+ STRINGS_DEQ_LENGTH_SP,
+ //-------------------------------------- end core solver
+ //-------------------------------------- codes solver
+ // str.to_code( v ) = rewrite( str.to_code(c) )
+ // where v is the proxy variable for c.
+ STRINGS_CODE_PROXY,
+ // str.code(x) = -1 V str.code(x) != str.code(y) V x = y
+ STRINGS_CODE_INJ,
+ //-------------------------------------- end codes solver
+ //-------------------------------------- regexp solver
+ // regular expression normal form conflict
+ // ( x in R ^ x = y ^ rewrite((str.in_re y R)) = false ) => false
+ // where y is the normal form computed for x.
+ STRINGS_RE_NF_CONFLICT,
+ // regular expression unfolding
+ // This is a general class of inferences of the form:
+ // (x in R) => F
+ // where F is formula expressing the next step of checking whether x is in
+ // R. For example:
+ // (x in (R)*) =>
+ // x = "" V x in R V ( x = x1 ++ x2 ++ x3 ^ x1 in R ^ x2 in (R)* ^ x3 in R)
+ STRINGS_RE_UNFOLD_POS,
+ // Same as above, for negative memberships
+ STRINGS_RE_UNFOLD_NEG,
+ // intersection inclusion conflict
+ // (x in R1 ^ ~ x in R2) => false where [[includes(R2,R1)]]
+ // Where includes(R2,R1) is a heuristic check for whether R2 includes R1.
+ STRINGS_RE_INTER_INCLUDE,
+ // intersection conflict, using regexp intersection computation
+ // (x in R1 ^ x in R2) => false where [[intersect(R1, R2) = empty]]
+ STRINGS_RE_INTER_CONF,
+ // intersection inference
+ // (x in R1 ^ y in R2 ^ x = y) => (x in re.inter(R1,R2))
+ STRINGS_RE_INTER_INFER,
+ // regular expression delta
+ // (x = "" ^ x in R) => C
+ // where "" in R holds if and only if C holds.
+ STRINGS_RE_DELTA,
+ // regular expression delta conflict
+ // (x = "" ^ x in R) => false
+ // where R does not accept the empty string.
+ STRINGS_RE_DELTA_CONF,
+ // regular expression derive ???
+ STRINGS_RE_DERIVE,
+ //-------------------------------------- end regexp solver
+ //-------------------------------------- extended function solver
+ // Standard extended function inferences from context-dependent rewriting
+ // produced by constant substitutions. See Reynolds et al CAV 2017. These are
+ // inferences of the form:
+ // X = Y => f(X) = t when rewrite( f(Y) ) = t
+ // where X = Y is a vector of equalities, where some of Y may be constants.
+ STRINGS_EXTF,
+ // Same as above, for normal form substitutions.
+ STRINGS_EXTF_N,
+ // Decompositions based on extended function inferences from context-dependent
+ // rewriting produced by constant substitutions. This is like the above, but
+ // handles cases where the inferred predicate is not necessarily an equality
+ // involving f(X). For example:
+ // x = "A" ^ contains( y ++ x, "B" ) => contains( y, "B" )
+ // This is generally only inferred if contains( y, "B" ) is a known term in
+ // the current context.
+ STRINGS_EXTF_D,
+ // Same as above, for normal form substitutions.
+ STRINGS_EXTF_D_N,
+ // Extended function equality rewrite. This is an inference of the form:
+ // t = s => P
+ // where P is a predicate implied by rewrite( t = s ).
+ // Typically, t is an application of an extended function and s is a constant.
+ // It is generally only inferred if P is a predicate over known terms.
+ STRINGS_EXTF_EQ_REW,
+ // contain transitive
+ // ( str.contains( s, t ) ^ ~contains( s, r ) ) => ~contains( t, r ).
+ STRINGS_CTN_TRANS,
+ // contain decompose
+ // str.contains( x, str.++( y1, ..., yn ) ) => str.contains( x, yi ) or
+ // ~str.contains( str.++( x1, ..., xn ), y ) => ~str.contains( xi, y )
+ STRINGS_CTN_DECOMPOSE,
+ // contain neg equal
+ // ( len( x ) = len( s ) ^ ~contains( x, s ) ) => x != s
+ STRINGS_CTN_NEG_EQUAL,
+ // contain positive
+ // str.contains( x, y ) => x = w1 ++ y ++ w2
+ // where w1 and w2 are skolem variables.
+ STRINGS_CTN_POS,
+ // All reduction inferences of the form:
+ // f(x1, .., xn) = y ^ P(x1, ..., xn, y)
+ // where f is an extended function, y is the purification variable for
+ // f(x1, .., xn) and P is the reduction predicate for f
+ // (see theory_strings_preprocess).
+ STRINGS_REDUCTION,
+ //-------------------------------------- end extended function solver
+ //-------------------------------------- prefix conflict
+ // prefix conflict (coarse-grained)
+ STRINGS_PREFIX_CONFLICT,
+ //-------------------------------------- end prefix conflict
+
+ UNKNOWN,
+};
+
+/**
+ * Converts an inference to a string. Note: This function is also used in
+ * `safe_print()`. Changing this functions name or signature will result in
+ * `safe_print()` printing "<unsupported>" instead of the proper strings for
+ * the enum values.
+ *
+ * @param i The inference
+ * @return The name of the inference
+ */
+const char* toString(InferenceId i);
+
+/**
+ * Writes an inference name to a stream.
+ *
+ * @param out The stream to write to
+ * @param i The inference to write to the stream
+ * @return The stream
+ */
+std::ostream& operator<<(std::ostream& out, InferenceId i);
+
+} // namespace theory
+} // namespace CVC4
+
+#endif /* CVC4__THEORY__INFERENCE_H */
diff --git a/src/theory/strings/base_solver.cpp b/src/theory/strings/base_solver.cpp
index 93803ea02..75ca3c77c 100644
--- a/src/theory/strings/base_solver.cpp
+++ b/src/theory/strings/base_solver.cpp
@@ -108,7 +108,7 @@ void BaseSolver::checkInit()
{
// (seq.unit x) = C => false if |C| != 1.
d_im.sendInference(
- exp, d_false, Inference::UNIT_CONST_CONFLICT);
+ exp, d_false, InferenceId::STRINGS_UNIT_CONST_CONFLICT);
return;
}
}
@@ -117,7 +117,7 @@ void BaseSolver::checkInit()
// (seq.unit x) = (seq.unit y) => x=y, or
// (seq.unit x) = (seq.unit c) => x=c
Assert(s.getType() == t.getType());
- d_im.sendInference(exp, s.eqNode(t), Inference::UNIT_INJ);
+ d_im.sendInference(exp, s.eqNode(t), InferenceId::STRINGS_UNIT_INJ);
}
}
// update best content
@@ -187,7 +187,7 @@ void BaseSolver::checkInit()
}
}
// infer the equality
- d_im.sendInference(exp, n.eqNode(nc), Inference::I_NORM);
+ d_im.sendInference(exp, n.eqNode(nc), InferenceId::STRINGS_I_NORM);
}
else
{
@@ -237,7 +237,7 @@ void BaseSolver::checkInit()
}
AlwaysAssert(foundNEmpty);
// infer the equality
- d_im.sendInference(exp, n.eqNode(ns), Inference::I_NORM_S);
+ d_im.sendInference(exp, n.eqNode(ns), InferenceId::STRINGS_I_NORM_S);
}
d_congruent.insert(n);
congruentCount[k].first++;
@@ -440,7 +440,7 @@ void BaseSolver::checkConstantEquivalenceClasses(TermIndex* ti,
}
else if (d_state.hasTerm(c))
{
- d_im.sendInference(exp, n.eqNode(c), Inference::I_CONST_MERGE);
+ d_im.sendInference(exp, n.eqNode(c), InferenceId::STRINGS_I_CONST_MERGE);
return;
}
else if (!d_im.hasProcessed())
@@ -473,7 +473,7 @@ void BaseSolver::checkConstantEquivalenceClasses(TermIndex* ti,
exp.push_back(bei.d_exp);
d_im.addToExplanation(n, bei.d_base, exp);
}
- d_im.sendInference(exp, d_false, Inference::I_CONST_CONFLICT);
+ d_im.sendInference(exp, d_false, InferenceId::STRINGS_I_CONST_CONFLICT);
return;
}
else
@@ -622,7 +622,7 @@ void BaseSolver::checkCardinalityType(TypeNode tn,
if (!d_state.areDisequal(*itr1, *itr2))
{
// add split lemma
- if (d_im.sendSplit(*itr1, *itr2, Inference::CARD_SP))
+ if (d_im.sendSplit(*itr1, *itr2, InferenceId::STRINGS_CARD_SP))
{
return;
}
@@ -660,7 +660,7 @@ void BaseSolver::checkCardinalityType(TypeNode tn,
if (!cons.isConst() || !cons.getConst<bool>())
{
d_im.sendInference(
- expn, expn, cons, Inference::CARDINALITY, false, true);
+ expn, expn, cons, InferenceId::STRINGS_CARDINALITY, false, true);
return;
}
}
diff --git a/src/theory/strings/core_solver.cpp b/src/theory/strings/core_solver.cpp
index e95e79d68..343332da5 100644
--- a/src/theory/strings/core_solver.cpp
+++ b/src/theory/strings/core_solver.cpp
@@ -186,7 +186,7 @@ void CoreSolver::checkFlatForms()
}
d_bsolver.explainConstantEqc(n, eqc, exp);
Node conc = d_false;
- d_im.sendInference(exp, conc, Inference::F_NCTN);
+ d_im.sendInference(exp, conc, InferenceId::STRINGS_F_NCTN);
return;
}
}
@@ -247,7 +247,7 @@ void CoreSolver::checkFlatForm(std::vector<Node>& eqc,
{
std::vector<Node> exp;
Node conc;
- Inference infType = Inference::NONE;
+ InferenceId infType = InferenceId::UNKNOWN;
size_t eqc_size = eqc.size();
size_t asize = d_flat_form[a].size();
if (count == asize)
@@ -275,7 +275,7 @@ void CoreSolver::checkFlatForm(std::vector<Node>& eqc,
}
Assert(!conc_c.empty());
conc = utils::mkAnd(conc_c);
- infType = Inference::F_ENDPOINT_EMP;
+ infType = InferenceId::STRINGS_F_ENDPOINT_EMP;
Assert(count > 0);
// swap, will enforce is empty past current
a = eqc[i];
@@ -315,7 +315,7 @@ void CoreSolver::checkFlatForm(std::vector<Node>& eqc,
}
Assert(!conc_c.empty());
conc = utils::mkAnd(conc_c);
- infType = Inference::F_ENDPOINT_EMP;
+ infType = InferenceId::STRINGS_F_ENDPOINT_EMP;
Assert(count > 0);
break;
}
@@ -338,7 +338,7 @@ void CoreSolver::checkFlatForm(std::vector<Node>& eqc,
d_bsolver.explainConstantEqc(ac,curr,exp);
d_bsolver.explainConstantEqc(bc,cc,exp);
conc = d_false;
- infType = Inference::F_CONST;
+ infType = InferenceId::STRINGS_F_CONST;
break;
}
}
@@ -346,7 +346,7 @@ void CoreSolver::checkFlatForm(std::vector<Node>& eqc,
&& (d_flat_form[b].size() - 1) == count)
{
conc = ac.eqNode(bc);
- infType = Inference::F_ENDPOINT_EQ;
+ infType = InferenceId::STRINGS_F_ENDPOINT_EQ;
break;
}
else
@@ -380,7 +380,7 @@ void CoreSolver::checkFlatForm(std::vector<Node>& eqc,
d_im.addToExplanation(lcurr, lcc, lexpc);
lant = utils::mkAnd(lexpc);
conc = ac.eqNode(bc);
- infType = Inference::F_UNIFY;
+ infType = InferenceId::STRINGS_F_UNIFY;
break;
}
}
@@ -406,8 +406,8 @@ void CoreSolver::checkFlatForm(std::vector<Node>& eqc,
{
Node c = t == 0 ? a : b;
ssize_t jj;
- if (infType == Inference::F_ENDPOINT_EQ
- || (t == 1 && infType == Inference::F_ENDPOINT_EMP))
+ if (infType == InferenceId::STRINGS_F_ENDPOINT_EQ
+ || (t == 1 && infType == InferenceId::STRINGS_F_ENDPOINT_EMP))
{
// explain all the empty components for F_EndpointEq, all for
// the short end for F_EndpointEmp
@@ -480,7 +480,7 @@ Node CoreSolver::checkCycles( Node eqc, std::vector< Node >& curr, std::vector<
std::vector<Node> exps;
exps.push_back(n.eqNode(emp));
d_im.sendInference(
- exps, n[i].eqNode(emp), Inference::I_CYCLE_E);
+ exps, n[i].eqNode(emp), InferenceId::STRINGS_I_CYCLE_E);
return Node::null();
}
}else{
@@ -502,7 +502,7 @@ Node CoreSolver::checkCycles( Node eqc, std::vector< Node >& curr, std::vector<
if (j != i && !d_state.areEqual(n[j], emp))
{
d_im.sendInference(
- exp, n[j].eqNode(emp), Inference::I_CYCLE);
+ exp, n[j].eqNode(emp), InferenceId::STRINGS_I_CYCLE);
return Node::null();
}
}
@@ -567,7 +567,7 @@ void CoreSolver::checkNormalFormsEq()
nf_exp.push_back(eexp);
}
Node eq = nfe.d_base.eqNode(nfe_eq.d_base);
- d_im.sendInference(nf_exp, eq, Inference::NORMAL_FORM);
+ d_im.sendInference(nf_exp, eq, InferenceId::STRINGS_NORMAL_FORM);
if (d_im.hasProcessed())
{
return;
@@ -1105,7 +1105,7 @@ void CoreSolver::processNEqc(Node eqc,
// Notice although not implemented, this can be minimized based on
// firstc/lastc, normal_forms_exp_depend.
d_bsolver.explainConstantEqc(n, eqc, exp);
- d_im.sendInference(exp, d_false, Inference::N_NCTN);
+ d_im.sendInference(exp, d_false, InferenceId::STRINGS_N_NCTN);
// conflict, finished
return;
}
@@ -1196,7 +1196,7 @@ void CoreSolver::processNEqc(Node eqc,
exp.insert(exp.end(), nfi.d_exp.begin(), nfi.d_exp.end());
exp.insert(exp.end(), nfj.d_exp.begin(), nfj.d_exp.end());
exp.push_back(nfi.d_base.eqNode(nfj.d_base));
- d_im.sendInference(exp, d_false, Inference::N_EQ_CONF);
+ d_im.sendInference(exp, d_false, InferenceId::STRINGS_N_EQ_CONF);
return;
}
}
@@ -1215,7 +1215,7 @@ void CoreSolver::processNEqc(Node eqc,
bool set_use_index = false;
Trace("strings-solve") << "Possible inferences (" << pinfer.size()
<< ") : " << std::endl;
- Inference min_id = Inference::NONE;
+ InferenceId min_id = InferenceId::UNKNOWN;
unsigned max_index = 0;
for (unsigned i = 0, psize = pinfer.size(); i < psize; i++)
{
@@ -1277,7 +1277,7 @@ void CoreSolver::processSimpleNEq(NormalForm& nfi,
// can infer that this string must be empty
Node eq = nfkv[index_k].eqNode(emp);
Assert(!d_state.areEqual(emp, nfkv[index_k]));
- d_im.sendInference(curr_exp, eq, Inference::N_ENDPOINT_EMP, isRev);
+ d_im.sendInference(curr_exp, eq, InferenceId::STRINGS_N_ENDPOINT_EMP, isRev);
index_k++;
}
break;
@@ -1320,7 +1320,7 @@ void CoreSolver::processSimpleNEq(NormalForm& nfi,
if (x.isConst() && y.isConst())
{
// if both are constant, it's just a constant conflict
- d_im.sendInference(ant, d_false, Inference::N_CONST, isRev, true);
+ d_im.sendInference(ant, d_false, InferenceId::STRINGS_N_CONST, isRev, true);
return;
}
// `x` and `y` have the same length. We infer that the two components
@@ -1335,7 +1335,7 @@ void CoreSolver::processSimpleNEq(NormalForm& nfi,
// set the explanation for length
Node lant = utils::mkAnd(lenExp);
ant.push_back(lant);
- d_im.sendInference(ant, eq, Inference::N_UNIFY, isRev);
+ d_im.sendInference(ant, eq, InferenceId::STRINGS_N_UNIFY, isRev);
break;
}
else if ((!x.isConst() && index == nfiv.size() - rproc - 1)
@@ -1375,7 +1375,7 @@ void CoreSolver::processSimpleNEq(NormalForm& nfi,
NormalForm::getExplanationForPrefixEq(nfi, nfj, -1, -1, antec);
d_im.sendInference(antec,
eqn[0].eqNode(eqn[1]),
- Inference::N_ENDPOINT_EQ,
+ InferenceId::STRINGS_N_ENDPOINT_EQ,
isRev,
true);
}
@@ -1437,7 +1437,7 @@ void CoreSolver::processSimpleNEq(NormalForm& nfi,
// E.g. "abc" ++ ... = "bc" ++ ... ---> conflict
std::vector<Node> antec;
NormalForm::getExplanationForPrefixEq(nfi, nfj, index, index, antec);
- d_im.sendInference(antec, d_false, Inference::N_CONST, isRev, true);
+ d_im.sendInference(antec, d_false, InferenceId::STRINGS_N_CONST, isRev, true);
break;
}
}
@@ -1466,7 +1466,7 @@ void CoreSolver::processSimpleNEq(NormalForm& nfi,
Node lenEq = nm->mkNode(EQUAL, xLenTerm, yLenTerm);
lenEq = Rewriter::rewrite(lenEq);
iinfo.d_conc = nm->mkNode(OR, lenEq, lenEq.negate());
- iinfo.d_id = Inference::LEN_SPLIT;
+ iinfo.d_id = InferenceId::STRINGS_LEN_SPLIT;
info.d_pendingPhase[lenEq] = true;
pinfer.push_back(info);
break;
@@ -1546,12 +1546,12 @@ void CoreSolver::processSimpleNEq(NormalForm& nfi,
// inferred
iinfo.d_conc = nm->mkNode(
AND, p.eqNode(nc), !eq.getConst<bool>() ? pEq.negate() : pEq);
- iinfo.d_id = Inference::INFER_EMP;
+ iinfo.d_id = InferenceId::STRINGS_INFER_EMP;
}
else
{
iinfo.d_conc = nm->mkNode(OR, eq, eq.negate());
- iinfo.d_id = Inference::LEN_SPLIT_EMP;
+ iinfo.d_id = InferenceId::STRINGS_LEN_SPLIT_EMP;
}
pinfer.push_back(info);
break;
@@ -1594,7 +1594,7 @@ void CoreSolver::processSimpleNEq(NormalForm& nfi,
xcv, stra, PfRule::CONCAT_CPROP, isRev, skc, newSkolems);
Assert(newSkolems.size() == 1);
iinfo.d_skolems[LENGTH_SPLIT].push_back(newSkolems[0]);
- iinfo.d_id = Inference::SSPLIT_CST_PROP;
+ iinfo.d_id = InferenceId::STRINGS_SSPLIT_CST_PROP;
iinfo.d_idRev = isRev;
pinfer.push_back(info);
break;
@@ -1614,7 +1614,7 @@ void CoreSolver::processSimpleNEq(NormalForm& nfi,
iinfo.d_premises.push_back(expNonEmpty);
Assert(newSkolems.size() == 1);
iinfo.d_skolems[LENGTH_SPLIT].push_back(newSkolems[0]);
- iinfo.d_id = Inference::SSPLIT_CST;
+ iinfo.d_id = InferenceId::STRINGS_SSPLIT_CST;
iinfo.d_idRev = isRev;
pinfer.push_back(info);
break;
@@ -1703,7 +1703,7 @@ void CoreSolver::processSimpleNEq(NormalForm& nfi,
// make the conclusion
if (lentTestSuccess == -1)
{
- iinfo.d_id = Inference::SSPLIT_VAR;
+ iinfo.d_id = InferenceId::STRINGS_SSPLIT_VAR;
iinfo.d_conc =
getConclusion(x, y, PfRule::CONCAT_SPLIT, isRev, skc, newSkolems);
if (options::stringUnifiedVSpt() && !options::stringLenConc())
@@ -1714,14 +1714,14 @@ void CoreSolver::processSimpleNEq(NormalForm& nfi,
}
else if (lentTestSuccess == 0)
{
- iinfo.d_id = Inference::SSPLIT_VAR_PROP;
+ iinfo.d_id = InferenceId::STRINGS_SSPLIT_VAR_PROP;
iinfo.d_conc =
getConclusion(x, y, PfRule::CONCAT_LPROP, isRev, skc, newSkolems);
}
else
{
Assert(lentTestSuccess == 1);
- iinfo.d_id = Inference::SSPLIT_VAR_PROP;
+ iinfo.d_id = InferenceId::STRINGS_SSPLIT_VAR_PROP;
iinfo.d_conc =
getConclusion(y, x, PfRule::CONCAT_LPROP, isRev, skc, newSkolems);
}
@@ -1835,7 +1835,7 @@ CoreSolver::ProcessLoopResult CoreSolver::processLoop(NormalForm& nfi,
Trace("strings-loop") << "Strings::Loop: tails are different."
<< std::endl;
d_im.sendInference(
- iinfo.d_premises, conc, Inference::FLOOP_CONFLICT, false, true);
+ iinfo.d_premises, conc, InferenceId::STRINGS_FLOOP_CONFLICT, false, true);
return ProcessLoopResult::CONFLICT;
}
}
@@ -1856,7 +1856,7 @@ CoreSolver::ProcessLoopResult CoreSolver::processLoop(NormalForm& nfi,
iinfo.d_premises.clear();
// try to make t equal to empty to avoid loop
iinfo.d_conc = nm->mkNode(kind::OR, split_eq, split_eq.negate());
- iinfo.d_id = Inference::LEN_SPLIT_EMP;
+ iinfo.d_id = InferenceId::STRINGS_LEN_SPLIT_EMP;
return ProcessLoopResult::INFERENCE;
}
else
@@ -1973,7 +1973,7 @@ CoreSolver::ProcessLoopResult CoreSolver::processLoop(NormalForm& nfi,
// we will be done
iinfo.d_conc = conc;
- iinfo.d_id = Inference::FLOOP;
+ iinfo.d_id = InferenceId::STRINGS_FLOOP;
info.d_nfPair[0] = nfi.d_base;
info.d_nfPair[1] = nfj.d_base;
return ProcessLoopResult::INFERENCE;
@@ -2042,7 +2042,7 @@ void CoreSolver::processDeq(Node ni, Node nj)
std::vector<Node> premises;
premises.push_back(deq);
Node conc = u[0].eqNode(vc).notNode();
- d_im.sendInference(premises, conc, Inference::UNIT_INJ_DEQ, false, true);
+ d_im.sendInference(premises, conc, InferenceId::STRINGS_UNIT_INJ_DEQ, false, true);
return;
}
Trace("strings-solve-debug") << "...trivial" << std::endl;
@@ -2135,7 +2135,7 @@ void CoreSolver::processDeq(Node ni, Node nj)
// E.g. x ++ x' ++ ... != "abc" ++ y' ++ ... ^ len(x) != len(y) --->
// x = "" v x != ""
Node emp = Word::mkEmptyWord(nck.getType());
- d_im.sendSplit(nck, emp, Inference::DEQ_DISL_EMP_SPLIT);
+ d_im.sendSplit(nck, emp, InferenceId::STRINGS_DEQ_DISL_EMP_SPLIT);
return;
}
@@ -2163,7 +2163,7 @@ void CoreSolver::processDeq(Node ni, Node nj)
// x = "a" v x != "a"
if (d_im.sendSplit(firstChar,
nck,
- Inference::DEQ_DISL_FIRST_CHAR_EQ_SPLIT,
+ InferenceId::STRINGS_DEQ_DISL_FIRST_CHAR_EQ_SPLIT,
false))
{
return;
@@ -2195,7 +2195,7 @@ void CoreSolver::processDeq(Node ni, Node nj)
d_im.sendInference(antecLen,
antecLen,
conc,
- Inference::DEQ_DISL_FIRST_CHAR_STRING_SPLIT,
+ InferenceId::STRINGS_DEQ_DISL_FIRST_CHAR_STRING_SPLIT,
false,
true);
return;
@@ -2240,7 +2240,7 @@ void CoreSolver::processDeq(Node ni, Node nj)
d_im.sendInference(antecLen,
antecLen,
conc,
- Inference::DEQ_DISL_STRINGS_SPLIT,
+ InferenceId::STRINGS_DEQ_DISL_STRINGS_SPLIT,
false,
true);
}
@@ -2256,7 +2256,7 @@ void CoreSolver::processDeq(Node ni, Node nj)
// E.g. x ++ x' ++ ... != y ++ y' ++ ... ^ len(x) = len(y) --->
// x = y v x != y
Assert(!d_state.areDisequal(x, y));
- if (d_im.sendSplit(x, y, Inference::DEQ_STRINGS_EQ, false))
+ if (d_im.sendSplit(x, y, InferenceId::STRINGS_DEQ_STRINGS_EQ, false))
{
return;
}
@@ -2268,7 +2268,7 @@ void CoreSolver::processDeq(Node ni, Node nj)
//
// E.g. x ++ x' ++ ... != y ++ y' ++ ... --->
// len(x) = len(y) v len(x) != len(y)
- if (d_im.sendSplit(xLenTerm, yLenTerm, Inference::DEQ_LENS_EQ))
+ if (d_im.sendSplit(xLenTerm, yLenTerm, InferenceId::STRINGS_DEQ_LENS_EQ))
{
return;
}
@@ -2343,7 +2343,7 @@ bool CoreSolver::processSimpleDeq(std::vector<Node>& nfi,
Node conc = cc.size() == 1
? cc[0]
: NodeManager::currentNM()->mkNode(kind::AND, cc);
- d_im.sendInference(ant, antn, conc, Inference::DEQ_NORM_EMP, isRev, true);
+ d_im.sendInference(ant, antn, conc, InferenceId::STRINGS_DEQ_NORM_EMP, isRev, true);
return true;
}
@@ -2519,7 +2519,7 @@ void CoreSolver::checkNormalFormsDeq()
}
if (!d_state.areEqual(lt[0], lt[1]) && !d_state.areDisequal(lt[0], lt[1]))
{
- d_im.sendSplit(lt[0], lt[1], Inference::DEQ_LENGTH_SP);
+ d_im.sendSplit(lt[0], lt[1], InferenceId::STRINGS_DEQ_LENGTH_SP);
}
}
}
@@ -2627,7 +2627,7 @@ void CoreSolver::checkLengthsEqc() {
{
Node eq = llt.eqNode(lc);
ei->d_normalizedLength.set(eq);
- d_im.sendInference(ant, eq, Inference::LEN_NORM, false, true);
+ d_im.sendInference(ant, eq, InferenceId::STRINGS_LEN_NORM, false, true);
}
}
}
diff --git a/src/theory/strings/extf_solver.cpp b/src/theory/strings/extf_solver.cpp
index 8db53c53e..c4e06e39c 100644
--- a/src/theory/strings/extf_solver.cpp
+++ b/src/theory/strings/extf_solver.cpp
@@ -128,7 +128,7 @@ bool ExtfSolver::doReduction(int effort, Node n)
lexp.push_back(n.negate());
Node xneqs = x.eqNode(s).negate();
d_im.sendInference(
- lexp, xneqs, Inference::CTN_NEG_EQUAL, false, true);
+ lexp, xneqs, InferenceId::STRINGS_CTN_NEG_EQUAL, false, true);
}
// this depends on the current assertions, so this
// inference is context-dependent
@@ -177,7 +177,7 @@ bool ExtfSolver::doReduction(int effort, Node n)
eq = eq[1];
std::vector<Node> expn;
expn.push_back(n);
- d_im.sendInference(expn, expn, eq, Inference::CTN_POS, false, true);
+ d_im.sendInference(expn, expn, eq, InferenceId::STRINGS_CTN_POS, false, true);
Trace("strings-extf-debug")
<< " resolve extf : " << n << " based on positive contain reduction."
<< std::endl;
@@ -206,7 +206,7 @@ bool ExtfSolver::doReduction(int effort, Node n)
Trace("strings-red-lemma") << "...from " << n << std::endl;
Trace("strings-red-lemma")
<< "Reduction_" << effort << " rewritten : " << Rewriter::rewrite(nnlem) << std::endl;
- d_im.sendInference(d_emptyVec, nnlem, Inference::REDUCTION, false, true);
+ d_im.sendInference(d_emptyVec, nnlem, InferenceId::STRINGS_REDUCTION, false, true);
Trace("strings-extf-debug")
<< " resolve extf : " << n << " based on reduction." << std::endl;
// add as reduction lemma
@@ -387,7 +387,7 @@ void ExtfSolver::checkExtfEval(int effort)
{
Trace("strings-extf")
<< " resolve extf : " << sn << " -> " << nrc << std::endl;
- Inference inf = effort == 0 ? Inference::EXTF : Inference::EXTF_N;
+ InferenceId inf = effort == 0 ? InferenceId::STRINGS_EXTF : InferenceId::STRINGS_EXTF_N;
d_im.sendInference(einfo.d_exp, conc, inf, false, true);
d_statistics.d_cdSimplifications << n.getKind();
}
@@ -424,8 +424,8 @@ void ExtfSolver::checkExtfEval(int effort)
// reduced since this argument may be circular: we may infer than n
// can be reduced to something else, but that thing may argue that it
// can be reduced to n, in theory.
- Inference infer =
- effort == 0 ? Inference::EXTF_D : Inference::EXTF_D_N;
+ InferenceId infer =
+ effort == 0 ? InferenceId::STRINGS_EXTF_D : InferenceId::STRINGS_EXTF_D_N;
d_im.sendInternalInference(einfo.d_exp, nrcAssert, infer);
}
to_reduce = nrc;
@@ -545,7 +545,7 @@ void ExtfSolver::checkExtfInference(Node n,
if (d_state.areEqual(conc, d_false))
{
// we are in conflict
- d_im.sendInference(in.d_exp, conc, Inference::CTN_DECOMPOSE);
+ d_im.sendInference(in.d_exp, conc, InferenceId::STRINGS_CTN_DECOMPOSE);
}
else if (d_extt.hasFunctionKind(conc.getKind()))
{
@@ -622,7 +622,7 @@ void ExtfSolver::checkExtfInference(Node n,
exp_c.insert(exp_c.end(),
d_extfInfoTmp[ofrom].d_exp.begin(),
d_extfInfoTmp[ofrom].d_exp.end());
- d_im.sendInference(exp_c, conc, Inference::CTN_TRANS);
+ d_im.sendInference(exp_c, conc, InferenceId::STRINGS_CTN_TRANS);
}
}
}
@@ -656,7 +656,7 @@ void ExtfSolver::checkExtfInference(Node n,
inferEqrr = Rewriter::rewrite(inferEqrr);
Trace("strings-extf-infer") << "checkExtfInference: " << inferEq
<< " ...reduces to " << inferEqrr << std::endl;
- d_im.sendInternalInference(in.d_exp, inferEqrr, Inference::EXTF_EQ_REW);
+ d_im.sendInternalInference(in.d_exp, inferEqrr, InferenceId::STRINGS_EXTF_EQ_REW);
}
}
diff --git a/src/theory/strings/infer_info.cpp b/src/theory/strings/infer_info.cpp
index 4cb072efb..c543de0e0 100644
--- a/src/theory/strings/infer_info.cpp
+++ b/src/theory/strings/infer_info.cpp
@@ -21,86 +21,7 @@ namespace CVC4 {
namespace theory {
namespace strings {
-const char* toString(Inference i)
-{
- switch (i)
- {
- case Inference::I_NORM_S: return "I_NORM_S";
- case Inference::I_CONST_MERGE: return "I_CONST_MERGE";
- case Inference::I_CONST_CONFLICT: return "I_CONST_CONFLICT";
- case Inference::I_NORM: return "I_NORM";
- case Inference::UNIT_INJ: return "UNIT_INJ";
- case Inference::UNIT_CONST_CONFLICT: return "UNIT_CONST_CONFLICT";
- case Inference::UNIT_INJ_DEQ: return "UNIT_INJ_DEQ";
- case Inference::CARD_SP: return "CARD_SP";
- case Inference::CARDINALITY: return "CARDINALITY";
- case Inference::I_CYCLE_E: return "I_CYCLE_E";
- case Inference::I_CYCLE: return "I_CYCLE";
- case Inference::F_CONST: return "F_CONST";
- case Inference::F_UNIFY: return "F_UNIFY";
- case Inference::F_ENDPOINT_EMP: return "F_ENDPOINT_EMP";
- case Inference::F_ENDPOINT_EQ: return "F_ENDPOINT_EQ";
- case Inference::F_NCTN: return "F_NCTN";
- case Inference::N_EQ_CONF: return "N_EQ_CONF";
- case Inference::N_ENDPOINT_EMP: return "N_ENDPOINT_EMP";
- case Inference::N_UNIFY: return "N_UNIFY";
- case Inference::N_ENDPOINT_EQ: return "N_ENDPOINT_EQ";
- case Inference::N_CONST: return "N_CONST";
- case Inference::INFER_EMP: return "INFER_EMP";
- case Inference::SSPLIT_CST_PROP: return "SSPLIT_CST_PROP";
- case Inference::SSPLIT_VAR_PROP: return "SSPLIT_VAR_PROP";
- case Inference::LEN_SPLIT: return "LEN_SPLIT";
- case Inference::LEN_SPLIT_EMP: return "LEN_SPLIT_EMP";
- case Inference::SSPLIT_CST: return "SSPLIT_CST";
- case Inference::SSPLIT_VAR: return "SSPLIT_VAR";
- case Inference::FLOOP: return "FLOOP";
- case Inference::FLOOP_CONFLICT: return "FLOOP_CONFLICT";
- case Inference::NORMAL_FORM: return "NORMAL_FORM";
- case Inference::N_NCTN: return "N_NCTN";
- case Inference::LEN_NORM: return "LEN_NORM";
- case Inference::DEQ_DISL_EMP_SPLIT: return "DEQ_DISL_EMP_SPLIT";
- case Inference::DEQ_DISL_FIRST_CHAR_EQ_SPLIT:
- return "DEQ_DISL_FIRST_CHAR_EQ_SPLIT";
- case Inference::DEQ_DISL_FIRST_CHAR_STRING_SPLIT:
- return "DEQ_DISL_FIRST_CHAR_STRING_SPLIT";
- case Inference::DEQ_STRINGS_EQ: return "DEQ_STRINGS_EQ";
- case Inference::DEQ_DISL_STRINGS_SPLIT: return "DEQ_DISL_STRINGS_SPLIT";
- case Inference::DEQ_LENS_EQ: return "DEQ_LENS_EQ";
- case Inference::DEQ_NORM_EMP: return "DEQ_NORM_EMP";
- case Inference::DEQ_LENGTH_SP: return "DEQ_LENGTH_SP";
- case Inference::CODE_PROXY: return "CODE_PROXY";
- case Inference::CODE_INJ: return "CODE_INJ";
- case Inference::RE_NF_CONFLICT: return "RE_NF_CONFLICT";
- case Inference::RE_UNFOLD_POS: return "RE_UNFOLD_POS";
- case Inference::RE_UNFOLD_NEG: return "RE_UNFOLD_NEG";
- case Inference::RE_INTER_INCLUDE: return "RE_INTER_INCLUDE";
- case Inference::RE_INTER_CONF: return "RE_INTER_CONF";
- case Inference::RE_INTER_INFER: return "RE_INTER_INFER";
- case Inference::RE_DELTA: return "RE_DELTA";
- case Inference::RE_DELTA_CONF: return "RE_DELTA_CONF";
- case Inference::RE_DERIVE: return "RE_DERIVE";
- case Inference::EXTF: return "EXTF";
- case Inference::EXTF_N: return "EXTF_N";
- case Inference::EXTF_D: return "EXTF_D";
- case Inference::EXTF_D_N: return "EXTF_D_N";
- case Inference::EXTF_EQ_REW: return "EXTF_EQ_REW";
- case Inference::CTN_TRANS: return "CTN_TRANS";
- case Inference::CTN_DECOMPOSE: return "CTN_DECOMPOSE";
- case Inference::CTN_NEG_EQUAL: return "CTN_NEG_EQUAL";
- case Inference::CTN_POS: return "CTN_POS";
- case Inference::REDUCTION: return "REDUCTION";
- case Inference::PREFIX_CONFLICT: return "PREFIX_CONFLICT";
- default: return "?";
- }
-}
-
-std::ostream& operator<<(std::ostream& out, Inference i)
-{
- out << toString(i);
- return out;
-}
-
-InferInfo::InferInfo() : d_sim(nullptr), d_id(Inference::NONE), d_idRev(false)
+InferInfo::InferInfo() : d_sim(nullptr), d_id(InferenceId::UNKNOWN), d_idRev(false)
{
}
diff --git a/src/theory/strings/infer_info.h b/src/theory/strings/infer_info.h
index a6c4776eb..c0667e53c 100644
--- a/src/theory/strings/infer_info.h
+++ b/src/theory/strings/infer_info.h
@@ -21,6 +21,7 @@
#include <vector>
#include "expr/node.h"
+#include "theory/inference_id.h"
#include "theory/theory_inference.h"
#include "util/safe_print.h"
@@ -28,316 +29,6 @@ namespace CVC4 {
namespace theory {
namespace strings {
-/** Types of inferences used in the procedure
- *
- * These are variants of the inference rules in Figures 3-5 of Liang et al.
- * "A DPLL(T) Solver for a Theory of Strings and Regular Expressions", CAV 2014.
- *
- * Note: The order in this enum matters in certain cases (e.g. inferences
- * related to normal forms), inferences that come first are generally
- * preferred.
- *
- * Notice that an inference is intentionally distinct from PfRule. An
- * inference captures *why* we performed a reasoning step, and a PfRule
- * rule captures *what* reasoning step was used. For instance, the inference
- * LEN_SPLIT translates to PfRule::SPLIT. The use of stats on inferences allows
- * us to know that we performed N splits (PfRule::SPLIT) because we wanted
- * to split on lengths for string equalities (Inference::LEN_SPLIT).
- */
-enum class Inference : uint32_t
-{
- BEGIN,
- //-------------------------------------- base solver
- // initial normalize singular
- // x1 = "" ^ ... ^ x_{i-1} = "" ^ x_{i+1} = "" ^ ... ^ xn = "" =>
- // x1 ++ ... ++ xn = xi
- I_NORM_S,
- // initial constant merge
- // explain_constant(x, c) => x = c
- // Above, explain_constant(x,c) is a basic explanation of why x must be equal
- // to string constant c, which is computed by taking arguments of
- // concatenation terms that are entailed to be constants. For example:
- // ( y = "AB" ^ z = "C" ) => y ++ z = "ABC"
- I_CONST_MERGE,
- // initial constant conflict
- // ( explain_constant(x, c1) ^ explain_constant(x, c2) ^ x = y) => false
- // where c1 != c2.
- I_CONST_CONFLICT,
- // initial normalize
- // Given two concatenation terms, this is applied when we find that they are
- // equal after e.g. removing strings that are currently empty. For example:
- // y = "" ^ z = "" => x ++ y = z ++ x
- I_NORM,
- // injectivity of seq.unit
- // (seq.unit x) = (seq.unit y) => x=y, or
- // (seq.unit x) = (seq.unit c) => x=c
- UNIT_INJ,
- // unit constant conflict
- // (seq.unit x) = C => false if |C| != 1.
- UNIT_CONST_CONFLICT,
- // injectivity of seq.unit for disequality
- // (seq.unit x) != (seq.unit y) => x != y, or
- // (seq.unit x) != (seq.unit c) => x != c
- UNIT_INJ_DEQ,
- // A split due to cardinality
- CARD_SP,
- // The cardinality inference for strings, see Liang et al CAV 2014.
- CARDINALITY,
- //-------------------------------------- end base solver
- //-------------------------------------- core solver
- // A cycle in the empty string equivalence class, e.g.:
- // x ++ y = "" => x = ""
- // This is typically not applied due to length constraints implying emptiness.
- I_CYCLE_E,
- // A cycle in the containment ordering.
- // x = y ++ x => y = "" or
- // x = y ++ z ^ y = x ++ w => z = "" ^ w = ""
- // This is typically not applied due to length constraints implying emptiness.
- I_CYCLE,
- // Flat form constant
- // x = y ^ x = z ++ c ... ^ y = z ++ d => false
- // where c and d are distinct constants.
- F_CONST,
- // Flat form unify
- // x = y ^ x = z ++ x' ... ^ y = z ++ y' ^ len(x') = len(y') => x' = y'
- // Notice flat form instances are similar to normal form inferences but do
- // not involve recursive explanations.
- F_UNIFY,
- // Flat form endpoint empty
- // x = y ^ x = z ^ y = z ++ y' => y' = ""
- F_ENDPOINT_EMP,
- // Flat form endpoint equal
- // x = y ^ x = z ++ x' ^ y = z ++ y' => x' = y'
- F_ENDPOINT_EQ,
- // Flat form not contained
- // x = c ^ x = y => false when rewrite( contains( y, c ) ) = false
- F_NCTN,
- // Normal form equality conflict
- // x = N[x] ^ y = N[y] ^ x=y => false
- // where Rewriter::rewrite(N[x]=N[y]) = false.
- N_EQ_CONF,
- // Given two normal forms, infers that the remainder one of them has to be
- // empty. For example:
- // If x1 ++ x2 = y1 and x1 = y1, then x2 = ""
- N_ENDPOINT_EMP,
- // Given two normal forms, infers that two components have to be the same if
- // they have the same length. For example:
- // If x1 ++ x2 = x3 ++ x4 and len(x1) = len(x3) then x1 = x3
- N_UNIFY,
- // Given two normal forms, infers that the endpoints have to be the same. For
- // example:
- // If x1 ++ x2 = x3 ++ x4 ++ x5 and x1 = x3 then x2 = x4 ++ x5
- N_ENDPOINT_EQ,
- // Given two normal forms with constant endpoints, infers a conflict if the
- // endpoints do not agree. For example:
- // If "abc" ++ ... = "bc" ++ ... then conflict
- N_CONST,
- // infer empty, for example:
- // (~) x = ""
- // This is inferred when we encounter an x such that x = "" rewrites to a
- // constant. This inference is used for instance when we otherwise would have
- // split on the emptiness of x but the rewriter tells us the emptiness of x
- // can be inferred.
- INFER_EMP,
- // string split constant propagation, for example:
- // x = y, x = "abc", y = y1 ++ "b" ++ y2
- // implies y1 = "a" ++ y1'
- SSPLIT_CST_PROP,
- // string split variable propagation, for example:
- // x = y, x = x1 ++ x2, y = y1 ++ y2, len( x1 ) >= len( y1 )
- // implies x1 = y1 ++ x1'
- // This is inspired by Zheng et al CAV 2015.
- SSPLIT_VAR_PROP,
- // length split, for example:
- // len( x1 ) = len( y1 ) V len( x1 ) != len( y1 )
- // This is inferred when e.g. x = y, x = x1 ++ x2, y = y1 ++ y2.
- LEN_SPLIT,
- // length split empty, for example:
- // z = "" V z != ""
- // This is inferred when, e.g. x = y, x = z ++ x1, y = y1 ++ z
- LEN_SPLIT_EMP,
- // string split constant
- // x = y, x = "c" ++ x2, y = y1 ++ y2, y1 != ""
- // implies y1 = "c" ++ y1'
- // This is a special case of F-Split in Figure 5 of Liang et al CAV 2014.
- SSPLIT_CST,
- // string split variable, for example:
- // x = y, x = x1 ++ x2, y = y1 ++ y2
- // implies x1 = y1 ++ x1' V y1 = x1 ++ y1'
- // This is rule F-Split in Figure 5 of Liang et al CAV 2014.
- SSPLIT_VAR,
- // flat form loop, for example:
- // x = y, x = x1 ++ z, y = z ++ y2
- // implies z = u2 ++ u1, u in ( u1 ++ u2 )*, x1 = u2 ++ u, y2 = u ++ u1
- // for fresh u, u1, u2.
- // This is the rule F-Loop from Figure 5 of Liang et al CAV 2014.
- FLOOP,
- // loop conflict ???
- FLOOP_CONFLICT,
- // Normal form inference
- // x = y ^ z = y => x = z
- // This is applied when y is the normal form of both x and z.
- NORMAL_FORM,
- // Normal form not contained, same as FFROM_NCTN but for normal forms
- N_NCTN,
- // Length normalization
- // x = y => len( x ) = len( y )
- // Typically applied when y is the normal form of x.
- LEN_NORM,
- // When x ++ x' ++ ... != "abc" ++ y' ++ ... ^ len(x) != len(y), we apply the
- // inference:
- // x = "" v x != ""
- DEQ_DISL_EMP_SPLIT,
- // When x ++ x' ++ ... != "abc" ++ y' ++ ... ^ len(x) = 1, we apply the
- // inference:
- // x = "a" v x != "a"
- DEQ_DISL_FIRST_CHAR_EQ_SPLIT,
- // When x ++ x' ++ ... != "abc" ++ y' ++ ... ^ len(x) != "", we apply the
- // inference:
- // ni = x ++ x' ++ ... ^ nj = "abc" ++ y' ++ ... ^ x != "" --->
- // x = k1 ++ k2 ^ len(k1) = 1 ^ (k1 != "a" v x = "a" ++ k2)
- DEQ_DISL_FIRST_CHAR_STRING_SPLIT,
- // When x ++ x' ++ ... != y ++ y' ++ ... ^ len(x) != len(y), we apply the
- // inference:
- // ni = x ++ x' ++ ... ^ nj = y ++ y' ++ ... ^ ni != nj ^ len(x) != len(y)
- // --->
- // len(k1) = len(x) ^ len(k2) = len(y) ^ (y = k1 ++ k3 v x = k1 ++ k2)
- DEQ_DISL_STRINGS_SPLIT,
- // When x ++ x' ++ ... != y ++ y' ++ ... ^ len(x) = len(y), we apply the
- // inference:
- // x = y v x != y
- DEQ_STRINGS_EQ,
- // When x ++ x' ++ ... != y ++ y' ++ ... and we do not know how the lengths
- // of x and y compare, we apply the inference:
- // len(x) = len(y) v len(x) != len(y)
- DEQ_LENS_EQ,
- // When px ++ x ++ ... != py ^ len(px ++ x ++ ...) = len(py), we apply the
- // following inference that infers that the remainder of the longer normal
- // form must be empty:
- // ni = px ++ x ++ ... ^ nj = py ^ len(ni) = len(nj) --->
- // x = "" ^ ...
- DEQ_NORM_EMP,
- // When two strings are disequal s != t and the comparison of their lengths
- // is unknown, we apply the inference:
- // len(s) != len(t) V len(s) = len(t)
- DEQ_LENGTH_SP,
- //-------------------------------------- end core solver
- //-------------------------------------- codes solver
- // str.to_code( v ) = rewrite( str.to_code(c) )
- // where v is the proxy variable for c.
- CODE_PROXY,
- // str.code(x) = -1 V str.code(x) != str.code(y) V x = y
- CODE_INJ,
- //-------------------------------------- end codes solver
- //-------------------------------------- regexp solver
- // regular expression normal form conflict
- // ( x in R ^ x = y ^ rewrite((str.in_re y R)) = false ) => false
- // where y is the normal form computed for x.
- RE_NF_CONFLICT,
- // regular expression unfolding
- // This is a general class of inferences of the form:
- // (x in R) => F
- // where F is formula expressing the next step of checking whether x is in
- // R. For example:
- // (x in (R)*) =>
- // x = "" V x in R V ( x = x1 ++ x2 ++ x3 ^ x1 in R ^ x2 in (R)* ^ x3 in R)
- RE_UNFOLD_POS,
- // Same as above, for negative memberships
- RE_UNFOLD_NEG,
- // intersection inclusion conflict
- // (x in R1 ^ ~ x in R2) => false where [[includes(R2,R1)]]
- // Where includes(R2,R1) is a heuristic check for whether R2 includes R1.
- RE_INTER_INCLUDE,
- // intersection conflict, using regexp intersection computation
- // (x in R1 ^ x in R2) => false where [[intersect(R1, R2) = empty]]
- RE_INTER_CONF,
- // intersection inference
- // (x in R1 ^ y in R2 ^ x = y) => (x in re.inter(R1,R2))
- RE_INTER_INFER,
- // regular expression delta
- // (x = "" ^ x in R) => C
- // where "" in R holds if and only if C holds.
- RE_DELTA,
- // regular expression delta conflict
- // (x = "" ^ x in R) => false
- // where R does not accept the empty string.
- RE_DELTA_CONF,
- // regular expression derive ???
- RE_DERIVE,
- //-------------------------------------- end regexp solver
- //-------------------------------------- extended function solver
- // Standard extended function inferences from context-dependent rewriting
- // produced by constant substitutions. See Reynolds et al CAV 2017. These are
- // inferences of the form:
- // X = Y => f(X) = t when rewrite( f(Y) ) = t
- // where X = Y is a vector of equalities, where some of Y may be constants.
- EXTF,
- // Same as above, for normal form substitutions.
- EXTF_N,
- // Decompositions based on extended function inferences from context-dependent
- // rewriting produced by constant substitutions. This is like the above, but
- // handles cases where the inferred predicate is not necessarily an equality
- // involving f(X). For example:
- // x = "A" ^ contains( y ++ x, "B" ) => contains( y, "B" )
- // This is generally only inferred if contains( y, "B" ) is a known term in
- // the current context.
- EXTF_D,
- // Same as above, for normal form substitutions.
- EXTF_D_N,
- // Extended function equality rewrite. This is an inference of the form:
- // t = s => P
- // where P is a predicate implied by rewrite( t = s ).
- // Typically, t is an application of an extended function and s is a constant.
- // It is generally only inferred if P is a predicate over known terms.
- EXTF_EQ_REW,
- // contain transitive
- // ( str.contains( s, t ) ^ ~contains( s, r ) ) => ~contains( t, r ).
- CTN_TRANS,
- // contain decompose
- // str.contains( x, str.++( y1, ..., yn ) ) => str.contains( x, yi ) or
- // ~str.contains( str.++( x1, ..., xn ), y ) => ~str.contains( xi, y )
- CTN_DECOMPOSE,
- // contain neg equal
- // ( len( x ) = len( s ) ^ ~contains( x, s ) ) => x != s
- CTN_NEG_EQUAL,
- // contain positive
- // str.contains( x, y ) => x = w1 ++ y ++ w2
- // where w1 and w2 are skolem variables.
- CTN_POS,
- // All reduction inferences of the form:
- // f(x1, .., xn) = y ^ P(x1, ..., xn, y)
- // where f is an extended function, y is the purification variable for
- // f(x1, .., xn) and P is the reduction predicate for f
- // (see theory_strings_preprocess).
- REDUCTION,
- //-------------------------------------- end extended function solver
- //-------------------------------------- prefix conflict
- // prefix conflict (coarse-grained)
- PREFIX_CONFLICT,
- //-------------------------------------- end prefix conflict
- NONE,
-};
-
-/**
- * Converts an inference to a string. Note: This function is also used in
- * `safe_print()`. Changing this functions name or signature will result in
- * `safe_print()` printing "<unsupported>" instead of the proper strings for
- * the enum values.
- *
- * @param i The inference
- * @return The name of the inference
- */
-const char* toString(Inference i);
-
-/**
- * Writes an inference name to a stream.
- *
- * @param out The stream to write to
- * @param i The inference to write to the stream
- * @return The stream
- */
-std::ostream& operator<<(std::ostream& out, Inference i);
-
/**
* Length status, used for indicating the length constraints for Skolems
* introduced by the theory of strings.
@@ -388,7 +79,7 @@ class InferInfo : public TheoryInference
/** Pointer to the class used for processing this info */
InferenceManager* d_sim;
/** The inference identifier */
- Inference d_id;
+ InferenceId d_id;
/** Whether it is the reverse form of the above id */
bool d_idRev;
/** The conclusion */
diff --git a/src/theory/strings/infer_proof_cons.cpp b/src/theory/strings/infer_proof_cons.cpp
index 4df7ca36a..4817df39d 100644
--- a/src/theory/strings/infer_proof_cons.cpp
+++ b/src/theory/strings/infer_proof_cons.cpp
@@ -56,7 +56,7 @@ void InferProofCons::notifyFact(const InferInfo& ii)
d_lazyFactMap.insert(ii.d_conc, iic);
}
-void InferProofCons::convert(Inference infer,
+void InferProofCons::convert(InferenceId infer,
bool isRev,
Node conc,
const std::vector<Node>& exp,
@@ -93,12 +93,12 @@ void InferProofCons::convert(Inference infer,
switch (infer)
{
// ========================== equal by substitution+rewriting
- case Inference::I_NORM_S:
- case Inference::I_CONST_MERGE:
- case Inference::I_NORM:
- case Inference::LEN_NORM:
- case Inference::NORMAL_FORM:
- case Inference::CODE_PROXY:
+ case InferenceId::STRINGS_I_NORM_S:
+ case InferenceId::STRINGS_I_CONST_MERGE:
+ case InferenceId::STRINGS_I_NORM:
+ case InferenceId::STRINGS_LEN_NORM:
+ case InferenceId::STRINGS_NORMAL_FORM:
+ case InferenceId::STRINGS_CODE_PROXY:
{
ps.d_args.push_back(conc);
// will attempt this rule
@@ -106,13 +106,13 @@ void InferProofCons::convert(Inference infer,
}
break;
// ========================== substitution + rewriting
- case Inference::RE_NF_CONFLICT:
- case Inference::EXTF:
- case Inference::EXTF_N:
- case Inference::EXTF_D:
- case Inference::EXTF_D_N:
- case Inference::I_CONST_CONFLICT:
- case Inference::UNIT_CONST_CONFLICT:
+ case InferenceId::STRINGS_RE_NF_CONFLICT:
+ case InferenceId::STRINGS_EXTF:
+ case InferenceId::STRINGS_EXTF_N:
+ case InferenceId::STRINGS_EXTF_D:
+ case InferenceId::STRINGS_EXTF_D_N:
+ case InferenceId::STRINGS_I_CONST_CONFLICT:
+ case InferenceId::STRINGS_UNIT_CONST_CONFLICT:
{
if (!ps.d_children.empty())
{
@@ -132,8 +132,8 @@ void InferProofCons::convert(Inference infer,
}
break;
// ========================== rewrite pred
- case Inference::EXTF_EQ_REW:
- case Inference::INFER_EMP:
+ case InferenceId::STRINGS_EXTF_EQ_REW:
+ case InferenceId::STRINGS_INFER_EMP:
{
// the last child is the predicate we are operating on, move to front
Node src = ps.d_children[ps.d_children.size() - 1];
@@ -159,21 +159,21 @@ void InferProofCons::convert(Inference infer,
}
break;
// ========================== substitution+rewriting, CONCAT_EQ, ...
- case Inference::F_CONST:
- case Inference::F_UNIFY:
- case Inference::F_ENDPOINT_EMP:
- case Inference::F_ENDPOINT_EQ:
- case Inference::F_NCTN:
- case Inference::N_EQ_CONF:
- case Inference::N_CONST:
- case Inference::N_UNIFY:
- case Inference::N_ENDPOINT_EMP:
- case Inference::N_ENDPOINT_EQ:
- case Inference::N_NCTN:
- case Inference::SSPLIT_CST_PROP:
- case Inference::SSPLIT_VAR_PROP:
- case Inference::SSPLIT_CST:
- case Inference::SSPLIT_VAR:
+ case InferenceId::STRINGS_F_CONST:
+ case InferenceId::STRINGS_F_UNIFY:
+ case InferenceId::STRINGS_F_ENDPOINT_EMP:
+ case InferenceId::STRINGS_F_ENDPOINT_EQ:
+ case InferenceId::STRINGS_F_NCTN:
+ case InferenceId::STRINGS_N_EQ_CONF:
+ case InferenceId::STRINGS_N_CONST:
+ case InferenceId::STRINGS_N_UNIFY:
+ case InferenceId::STRINGS_N_ENDPOINT_EMP:
+ case InferenceId::STRINGS_N_ENDPOINT_EQ:
+ case InferenceId::STRINGS_N_NCTN:
+ case InferenceId::STRINGS_SSPLIT_CST_PROP:
+ case InferenceId::STRINGS_SSPLIT_VAR_PROP:
+ case InferenceId::STRINGS_SSPLIT_CST:
+ case InferenceId::STRINGS_SSPLIT_VAR:
{
Trace("strings-ipc-core") << "Generate core rule for " << infer
<< " (rev=" << isRev << ")" << std::endl;
@@ -189,10 +189,10 @@ void InferProofCons::convert(Inference infer,
// the length constraint
std::vector<Node> lenConstraint;
// these inferences have a length constraint as the last explain
- if (infer == Inference::N_UNIFY || infer == Inference::F_UNIFY
- || infer == Inference::SSPLIT_CST || infer == Inference::SSPLIT_VAR
- || infer == Inference::SSPLIT_VAR_PROP
- || infer == Inference::SSPLIT_CST_PROP)
+ if (infer == InferenceId::STRINGS_N_UNIFY || infer == InferenceId::STRINGS_F_UNIFY
+ || infer == InferenceId::STRINGS_SSPLIT_CST || infer == InferenceId::STRINGS_SSPLIT_VAR
+ || infer == InferenceId::STRINGS_SSPLIT_VAR_PROP
+ || infer == InferenceId::STRINGS_SSPLIT_CST_PROP)
{
if (exp.size() >= 2)
{
@@ -269,10 +269,10 @@ void InferProofCons::convert(Inference infer,
}
// Now, mainEqCeq is an equality t ++ ... == s ++ ... where the
// inference involved t and s.
- if (infer == Inference::N_ENDPOINT_EQ
- || infer == Inference::N_ENDPOINT_EMP
- || infer == Inference::F_ENDPOINT_EQ
- || infer == Inference::F_ENDPOINT_EMP)
+ if (infer == InferenceId::STRINGS_N_ENDPOINT_EQ
+ || infer == InferenceId::STRINGS_N_ENDPOINT_EMP
+ || infer == InferenceId::STRINGS_F_ENDPOINT_EQ
+ || infer == InferenceId::STRINGS_F_ENDPOINT_EMP)
{
// Should be equal to conclusion already, or rewrite to it.
// Notice that this step is necessary to handle the "rproc"
@@ -295,8 +295,8 @@ void InferProofCons::convert(Inference infer,
// t1 ++ ... ++ tn == "". However, these are very rarely applied, let
// alone for 2+ children. This case is intentionally unhandled here.
}
- else if (infer == Inference::N_CONST || infer == Inference::F_CONST
- || infer == Inference::N_EQ_CONF)
+ else if (infer == InferenceId::STRINGS_N_CONST || infer == InferenceId::STRINGS_F_CONST
+ || infer == InferenceId::STRINGS_N_EQ_CONF)
{
// should be a constant conflict
std::vector<Node> childrenC;
@@ -320,15 +320,15 @@ void InferProofCons::convert(Inference infer,
Node s0 = svec[isRev ? svec.size() - 1 : 0];
bool applySym = false;
// may need to apply symmetry
- if ((infer == Inference::SSPLIT_CST
- || infer == Inference::SSPLIT_CST_PROP)
+ if ((infer == InferenceId::STRINGS_SSPLIT_CST
+ || infer == InferenceId::STRINGS_SSPLIT_CST_PROP)
&& t0.isConst())
{
Assert(!s0.isConst());
applySym = true;
std::swap(t0, s0);
}
- if (infer == Inference::N_UNIFY || infer == Inference::F_UNIFY)
+ if (infer == InferenceId::STRINGS_N_UNIFY || infer == InferenceId::STRINGS_F_UNIFY)
{
if (conc.getKind() != EQUAL)
{
@@ -347,7 +347,7 @@ void InferProofCons::convert(Inference infer,
// the form of the required length constraint expected by the proof
Node lenReq;
bool lenSuccess = false;
- if (infer == Inference::N_UNIFY || infer == Inference::F_UNIFY)
+ if (infer == InferenceId::STRINGS_N_UNIFY || infer == InferenceId::STRINGS_F_UNIFY)
{
// the required premise for unify is always len(x) = len(y),
// however the explanation may not be literally this. Thus, we
@@ -359,7 +359,7 @@ void InferProofCons::convert(Inference infer,
lenSuccess = convertLengthPf(lenReq, lenConstraint, psb);
rule = PfRule::CONCAT_UNIFY;
}
- else if (infer == Inference::SSPLIT_VAR)
+ else if (infer == InferenceId::STRINGS_SSPLIT_VAR)
{
// it should be the case that lenConstraint => lenReq
lenReq = nm->mkNode(STRING_LENGTH, t0)
@@ -368,7 +368,7 @@ void InferProofCons::convert(Inference infer,
lenSuccess = convertLengthPf(lenReq, lenConstraint, psb);
rule = PfRule::CONCAT_SPLIT;
}
- else if (infer == Inference::SSPLIT_CST)
+ else if (infer == InferenceId::STRINGS_SSPLIT_CST)
{
// it should be the case that lenConstraint => lenReq
lenReq = nm->mkNode(STRING_LENGTH, t0)
@@ -377,7 +377,7 @@ void InferProofCons::convert(Inference infer,
lenSuccess = convertLengthPf(lenReq, lenConstraint, psb);
rule = PfRule::CONCAT_CSPLIT;
}
- else if (infer == Inference::SSPLIT_VAR_PROP)
+ else if (infer == InferenceId::STRINGS_SSPLIT_VAR_PROP)
{
// it should be the case that lenConstraint => lenReq
for (unsigned r = 0; r < 2; r++)
@@ -399,7 +399,7 @@ void InferProofCons::convert(Inference infer,
}
rule = PfRule::CONCAT_LPROP;
}
- else if (infer == Inference::SSPLIT_CST_PROP)
+ else if (infer == InferenceId::STRINGS_SSPLIT_CST_PROP)
{
// it should be the case that lenConstraint => lenReq
lenReq = nm->mkNode(STRING_LENGTH, t0)
@@ -465,8 +465,8 @@ void InferProofCons::convert(Inference infer,
}
break;
// ========================== Disequalities
- case Inference::DEQ_DISL_FIRST_CHAR_STRING_SPLIT:
- case Inference::DEQ_DISL_STRINGS_SPLIT:
+ case InferenceId::STRINGS_DEQ_DISL_FIRST_CHAR_STRING_SPLIT:
+ case InferenceId::STRINGS_DEQ_DISL_STRINGS_SPLIT:
{
if (conc.getKind() != AND || conc.getNumChildren() != 2
|| conc[0].getKind() != EQUAL || !conc[0][0].getType().isStringLike()
@@ -506,14 +506,14 @@ void InferProofCons::convert(Inference infer,
}
break;
// ========================== Boolean split
- case Inference::CARD_SP:
- case Inference::LEN_SPLIT:
- case Inference::LEN_SPLIT_EMP:
- case Inference::DEQ_DISL_EMP_SPLIT:
- case Inference::DEQ_DISL_FIRST_CHAR_EQ_SPLIT:
- case Inference::DEQ_STRINGS_EQ:
- case Inference::DEQ_LENS_EQ:
- case Inference::DEQ_LENGTH_SP:
+ case InferenceId::STRINGS_CARD_SP:
+ case InferenceId::STRINGS_LEN_SPLIT:
+ case InferenceId::STRINGS_LEN_SPLIT_EMP:
+ case InferenceId::STRINGS_DEQ_DISL_EMP_SPLIT:
+ case InferenceId::STRINGS_DEQ_DISL_FIRST_CHAR_EQ_SPLIT:
+ case InferenceId::STRINGS_DEQ_STRINGS_EQ:
+ case InferenceId::STRINGS_DEQ_LENS_EQ:
+ case InferenceId::STRINGS_DEQ_LENGTH_SP:
{
if (conc.getKind() != OR)
{
@@ -530,10 +530,10 @@ void InferProofCons::convert(Inference infer,
}
break;
// ========================== Regular expression unfolding
- case Inference::RE_UNFOLD_POS:
- case Inference::RE_UNFOLD_NEG:
+ case InferenceId::STRINGS_RE_UNFOLD_POS:
+ case InferenceId::STRINGS_RE_UNFOLD_NEG:
{
- if (infer == Inference::RE_UNFOLD_POS)
+ if (infer == InferenceId::STRINGS_RE_UNFOLD_POS)
{
ps.d_rule = PfRule::RE_UNFOLD_POS;
}
@@ -559,8 +559,8 @@ void InferProofCons::convert(Inference infer,
}
break;
// ========================== Reduction
- case Inference::CTN_POS:
- case Inference::CTN_NEG_EQUAL:
+ case InferenceId::STRINGS_CTN_POS:
+ case InferenceId::STRINGS_CTN_NEG_EQUAL:
{
if (ps.d_children.size() != 1)
{
@@ -595,7 +595,7 @@ void InferProofCons::convert(Inference infer,
}
break;
- case Inference::REDUCTION:
+ case InferenceId::STRINGS_REDUCTION:
{
size_t nchild = conc.getNumChildren();
Node mainEq;
@@ -635,7 +635,7 @@ void InferProofCons::convert(Inference infer,
}
break;
// ========================== code injectivity
- case Inference::CODE_INJ:
+ case InferenceId::STRINGS_CODE_INJ:
{
ps.d_rule = PfRule::STRING_CODE_INJ;
Assert(conc.getKind() == OR && conc.getNumChildren() == 3
@@ -645,11 +645,11 @@ void InferProofCons::convert(Inference infer,
}
break;
// ========================== unit injectivity
- case Inference::UNIT_INJ: { ps.d_rule = PfRule::STRING_SEQ_UNIT_INJ;
+ case InferenceId::STRINGS_UNIT_INJ: { ps.d_rule = PfRule::STRING_SEQ_UNIT_INJ;
}
break;
// ========================== prefix conflict
- case Inference::PREFIX_CONFLICT:
+ case InferenceId::STRINGS_PREFIX_CONFLICT:
{
Trace("strings-ipc-prefix") << "Prefix conflict..." << std::endl;
std::vector<Node> eqs;
@@ -740,9 +740,9 @@ void InferProofCons::convert(Inference infer,
}
break;
// ========================== regular expressions
- case Inference::RE_INTER_INCLUDE:
- case Inference::RE_INTER_CONF:
- case Inference::RE_INTER_INFER:
+ case InferenceId::STRINGS_RE_INTER_INCLUDE:
+ case InferenceId::STRINGS_RE_INTER_CONF:
+ case InferenceId::STRINGS_RE_INTER_INFER:
{
std::vector<Node> reiExp;
std::vector<Node> reis;
@@ -810,17 +810,17 @@ void InferProofCons::convert(Inference infer,
}
break;
// ========================== unknown and currently unsupported
- case Inference::CARDINALITY:
- case Inference::I_CYCLE_E:
- case Inference::I_CYCLE:
- case Inference::RE_DELTA:
- case Inference::RE_DELTA_CONF:
- case Inference::RE_DERIVE:
- case Inference::FLOOP:
- case Inference::FLOOP_CONFLICT:
- case Inference::DEQ_NORM_EMP:
- case Inference::CTN_TRANS:
- case Inference::CTN_DECOMPOSE:
+ case InferenceId::STRINGS_CARDINALITY:
+ case InferenceId::STRINGS_I_CYCLE_E:
+ case InferenceId::STRINGS_I_CYCLE:
+ case InferenceId::STRINGS_RE_DELTA:
+ case InferenceId::STRINGS_RE_DELTA_CONF:
+ case InferenceId::STRINGS_RE_DERIVE:
+ case InferenceId::STRINGS_FLOOP:
+ case InferenceId::STRINGS_FLOOP_CONFLICT:
+ case InferenceId::STRINGS_DEQ_NORM_EMP:
+ case InferenceId::STRINGS_CTN_TRANS:
+ case InferenceId::STRINGS_CTN_DECOMPOSE:
default:
// do nothing, these will be converted to STRING_TRUST below since the
// rule is unknown.
diff --git a/src/theory/strings/infer_proof_cons.h b/src/theory/strings/infer_proof_cons.h
index 1b066b4b3..49fd338b5 100644
--- a/src/theory/strings/infer_proof_cons.h
+++ b/src/theory/strings/infer_proof_cons.h
@@ -100,7 +100,7 @@ class InferProofCons : public ProofGenerator
* In particular, psb will contain a set of steps that form a proof
* whose conclusion is ii.d_conc and whose free assumptions are ii.d_ant.
*/
- void convert(Inference infer,
+ void convert(InferenceId infer,
bool isRev,
Node conc,
const std::vector<Node>& exp,
diff --git a/src/theory/strings/inference_manager.cpp b/src/theory/strings/inference_manager.cpp
index 0c55d26e8..6a4fd55df 100644
--- a/src/theory/strings/inference_manager.cpp
+++ b/src/theory/strings/inference_manager.cpp
@@ -65,7 +65,7 @@ void InferenceManager::doPending()
bool InferenceManager::sendInternalInference(std::vector<Node>& exp,
Node conc,
- Inference infer)
+ InferenceId infer)
{
if (conc.getKind() == AND
|| (conc.getKind() == NOT && conc[0].getKind() == OR))
@@ -125,7 +125,7 @@ bool InferenceManager::sendInternalInference(std::vector<Node>& exp,
bool InferenceManager::sendInference(const std::vector<Node>& exp,
const std::vector<Node>& noExplain,
Node eq,
- Inference infer,
+ InferenceId infer,
bool isRev,
bool asLemma)
{
@@ -151,7 +151,7 @@ bool InferenceManager::sendInference(const std::vector<Node>& exp,
bool InferenceManager::sendInference(const std::vector<Node>& exp,
Node eq,
- Inference infer,
+ InferenceId infer,
bool isRev,
bool asLemma)
{
@@ -225,7 +225,7 @@ void InferenceManager::sendInference(InferInfo& ii, bool asLemma)
addPendingFact(std::unique_ptr<InferInfo>(new InferInfo(ii)));
}
-bool InferenceManager::sendSplit(Node a, Node b, Inference infer, bool preq)
+bool InferenceManager::sendSplit(Node a, Node b, InferenceId infer, bool preq)
{
Node eq = a.eqNode(b);
eq = Rewriter::rewrite(eq);
@@ -412,7 +412,7 @@ bool InferenceManager::processLemma(InferInfo& ii)
}
}
LemmaProperty p = LemmaProperty::NONE;
- if (ii.d_id == Inference::REDUCTION)
+ if (ii.d_id == InferenceId::STRINGS_REDUCTION)
{
p |= LemmaProperty::NEEDS_JUSTIFY;
}
diff --git a/src/theory/strings/inference_manager.h b/src/theory/strings/inference_manager.h
index 3280281bd..f16ce7183 100644
--- a/src/theory/strings/inference_manager.h
+++ b/src/theory/strings/inference_manager.h
@@ -114,7 +114,7 @@ class InferenceManager : public InferenceManagerBuffered
*/
bool sendInternalInference(std::vector<Node>& exp,
Node conc,
- Inference infer);
+ InferenceId infer);
/** send inference
*
@@ -164,13 +164,13 @@ class InferenceManager : public InferenceManagerBuffered
bool sendInference(const std::vector<Node>& exp,
const std::vector<Node>& noExplain,
Node eq,
- Inference infer,
+ InferenceId infer,
bool isRev = false,
bool asLemma = false);
/** same as above, but where noExplain is empty */
bool sendInference(const std::vector<Node>& exp,
Node eq,
- Inference infer,
+ InferenceId infer,
bool isRev = false,
bool asLemma = false);
@@ -200,7 +200,7 @@ class InferenceManager : public InferenceManagerBuffered
* This method returns true if the split was non-trivial, and false
* otherwise. A split is trivial if a=b rewrites to a constant.
*/
- bool sendSplit(Node a, Node b, Inference infer, bool preq = true);
+ bool sendSplit(Node a, Node b, InferenceId infer, bool preq = true);
/**
* Set that we are incomplete for the current set of assertions (in other
* words, we must answer "unknown" instead of "sat"); this calls the output
diff --git a/src/theory/strings/regexp_solver.cpp b/src/theory/strings/regexp_solver.cpp
index 46570df48..2892b2398 100644
--- a/src/theory/strings/regexp_solver.cpp
+++ b/src/theory/strings/regexp_solver.cpp
@@ -227,7 +227,7 @@ void RegExpSolver::check(const std::map<Node, std::vector<Node> >& mems)
noExplain.push_back(assertion);
Node conc = Node::null();
d_im.sendInference(
- iexp, noExplain, conc, Inference::RE_NF_CONFLICT);
+ iexp, noExplain, conc, InferenceId::STRINGS_RE_NF_CONFLICT);
addedLemma = true;
break;
}
@@ -282,8 +282,8 @@ void RegExpSolver::check(const std::map<Node, std::vector<Node> >& mems)
{
d_statistics.d_regexpUnfoldingsNeg << atom[1].getKind();
}
- Inference inf =
- polarity ? Inference::RE_UNFOLD_POS : Inference::RE_UNFOLD_NEG;
+ InferenceId inf =
+ polarity ? InferenceId::STRINGS_RE_UNFOLD_POS : InferenceId::STRINGS_RE_UNFOLD_NEG;
d_im.sendInference(iexp, noExplain, conc, inf);
addedLemma = true;
if (changed)
@@ -404,7 +404,7 @@ bool RegExpSolver::checkEqcInclusion(std::vector<Node>& mems)
Node conc;
d_im.sendInference(
- vec_nodes, conc, Inference::RE_INTER_INCLUDE, false, true);
+ vec_nodes, conc, InferenceId::STRINGS_RE_INTER_INCLUDE, false, true);
return false;
}
}
@@ -486,7 +486,7 @@ bool RegExpSolver::checkEqcIntersect(const std::vector<Node>& mems)
}
Node conc;
d_im.sendInference(
- vec_nodes, conc, Inference::RE_INTER_CONF, false, true);
+ vec_nodes, conc, InferenceId::STRINGS_RE_INTER_CONF, false, true);
// conflict, return
return false;
}
@@ -516,7 +516,7 @@ bool RegExpSolver::checkEqcIntersect(const std::vector<Node>& mems)
vec_nodes.push_back(mi[0].eqNode(m[0]));
}
d_im.sendInference(
- vec_nodes, mres, Inference::RE_INTER_INFER, false, true);
+ vec_nodes, mres, InferenceId::STRINGS_RE_INTER_INFER, false, true);
// both are reduced
d_im.markReduced(m);
d_im.markReduced(mi);
@@ -542,7 +542,7 @@ bool RegExpSolver::checkPDerivative(
noExplain.push_back(x.eqNode(d_emptyString));
std::vector<Node> iexp = nf_exp;
iexp.insert(iexp.end(), noExplain.begin(), noExplain.end());
- d_im.sendInference(iexp, noExplain, exp, Inference::RE_DELTA);
+ d_im.sendInference(iexp, noExplain, exp, InferenceId::STRINGS_RE_DELTA);
addedLemma = true;
d_regexp_ccached.insert(atom);
return false;
@@ -559,7 +559,7 @@ bool RegExpSolver::checkPDerivative(
noExplain.push_back(x.eqNode(d_emptyString));
std::vector<Node> iexp = nf_exp;
iexp.insert(iexp.end(), noExplain.begin(), noExplain.end());
- d_im.sendInference(iexp, noExplain, d_false, Inference::RE_DELTA_CONF);
+ d_im.sendInference(iexp, noExplain, d_false, InferenceId::STRINGS_RE_DELTA_CONF);
addedLemma = true;
d_regexp_ccached.insert(atom);
return false;
@@ -652,7 +652,7 @@ bool RegExpSolver::deriveRegExp(Node x,
std::vector<Node> noExplain;
noExplain.push_back(atom);
iexp.push_back(atom);
- d_im.sendInference(iexp, noExplain, conc, Inference::RE_DERIVE);
+ d_im.sendInference(iexp, noExplain, conc, InferenceId::STRINGS_RE_DERIVE);
return true;
}
return false;
diff --git a/src/theory/strings/sequences_stats.h b/src/theory/strings/sequences_stats.h
index e35d951f7..e7e45b18f 100644
--- a/src/theory/strings/sequences_stats.h
+++ b/src/theory/strings/sequences_stats.h
@@ -61,12 +61,12 @@ class SequencesStatistics
IntStat d_strategyRuns;
//--------------- inferences
/** Counts the number of applications of each type of inference */
- HistogramStat<Inference> d_inferences;
+ HistogramStat<InferenceId> d_inferences;
/**
* Counts the number of applications of each type of inference that were not
* processed as a proof step. This is a subset of d_inferences.
*/
- HistogramStat<Inference> d_inferencesNoPf;
+ HistogramStat<InferenceId> d_inferencesNoPf;
/**
* Counts the number of applications of each type of context-dependent
* simplification. The sum of this map is equal to the number of EXTF or
diff --git a/src/theory/strings/solver_state.cpp b/src/theory/strings/solver_state.cpp
index f341e681d..b6e9c68f4 100644
--- a/src/theory/strings/solver_state.cpp
+++ b/src/theory/strings/solver_state.cpp
@@ -138,7 +138,7 @@ void SolverState::setPendingPrefixConflictWhen(Node conf)
return;
}
InferInfo iiPrefixConf;
- iiPrefixConf.d_id = Inference::PREFIX_CONFLICT;
+ iiPrefixConf.d_id = InferenceId::STRINGS_PREFIX_CONFLICT;
iiPrefixConf.d_conc = d_false;
utils::flattenOp(AND, conf, iiPrefixConf.d_premises);
setPendingConflict(iiPrefixConf);
diff --git a/src/theory/strings/theory_strings.cpp b/src/theory/strings/theory_strings.cpp
index f25f9e29c..48d461f7a 100644
--- a/src/theory/strings/theory_strings.cpp
+++ b/src/theory/strings/theory_strings.cpp
@@ -921,7 +921,7 @@ void TheoryStrings::checkCodes()
if (!d_state.areEqual(cc, vc))
{
std::vector<Node> emptyVec;
- d_im.sendInference(emptyVec, cc.eqNode(vc), Inference::CODE_PROXY);
+ d_im.sendInference(emptyVec, cc.eqNode(vc), InferenceId::STRINGS_CODE_PROXY);
}
const_codes.push_back(vc);
}
@@ -961,7 +961,7 @@ void TheoryStrings::checkCodes()
deq = Rewriter::rewrite(deq);
d_im.addPendingPhaseRequirement(deq, false);
std::vector<Node> emptyVec;
- d_im.sendInference(emptyVec, inj_lem, Inference::CODE_INJ);
+ d_im.sendInference(emptyVec, inj_lem, InferenceId::STRINGS_CODE_INJ);
}
}
}
generated by cgit on debian on lair
contact matthew@masot.net with questions or feedback