summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/theory/arith/arith_preprocess.cpp2
-rw-r--r--src/theory/arith/nl/iand_solver.cpp5
-rw-r--r--src/theory/arith/nl/nl_lemma_utils.cpp5
-rw-r--r--src/theory/arith/nl/nl_lemma_utils.h2
-rw-r--r--src/theory/arrays/inference_manager.cpp13
-rw-r--r--src/theory/arrays/inference_manager.h6
-rw-r--r--src/theory/bags/bag_solver.cpp22
-rw-r--r--src/theory/bags/infer_info.cpp35
-rw-r--r--src/theory/bags/infer_info.h12
-rw-r--r--src/theory/bags/inference_generator.cpp25
-rw-r--r--src/theory/bags/inference_generator.h6
-rw-r--r--src/theory/bags/inference_manager.h1
-rw-r--r--src/theory/bags/theory_bags.cpp2
-rw-r--r--src/theory/datatypes/inference.cpp19
-rw-r--r--src/theory/datatypes/inference.h9
-rw-r--r--src/theory/datatypes/inference_manager.cpp46
-rw-r--r--src/theory/datatypes/inference_manager.h11
-rw-r--r--src/theory/datatypes/theory_datatypes.cpp3
-rw-r--r--src/theory/inference_manager_buffered.cpp52
-rw-r--r--src/theory/inference_manager_buffered.h16
-rw-r--r--src/theory/sets/term_registry.cpp6
-rw-r--r--src/theory/strings/infer_info.cpp14
-rw-r--r--src/theory/strings/infer_info.h6
-rw-r--r--src/theory/strings/inference_manager.cpp61
-rw-r--r--src/theory/strings/inference_manager.h4
-rw-r--r--src/theory/theory_inference.cpp27
-rw-r--r--src/theory/theory_inference.h62
-rw-r--r--src/theory/theory_inference_manager.cpp29
-rw-r--r--src/theory/theory_inference_manager.h34
-rw-r--r--src/theory/uf/cardinality_extension.cpp6
-rw-r--r--src/theory/uf/theory_uf.cpp2
31 files changed, 251 insertions, 292 deletions
diff --git a/src/theory/arith/arith_preprocess.cpp b/src/theory/arith/arith_preprocess.cpp
index 3e0937e8b..31eb8bb8f 100644
--- a/src/theory/arith/arith_preprocess.cpp
+++ b/src/theory/arith/arith_preprocess.cpp
@@ -48,7 +48,7 @@ bool ArithPreprocess::reduceAssertion(TNode atom)
Assert(tn.getKind() == TrustNodeKind::REWRITE);
// tn is of kind REWRITE, turn this into a LEMMA here
TrustNode tlem = TrustNode::mkTrustLemma(tn.getProven(), tn.getGenerator());
- // must preprocess
+ // send the trusted lemma
d_im.trustedLemma(tlem, InferenceId::ARITH_PP_ELIM_OPERATORS);
// mark the atom as reduced
d_reduced[atom] = true;
diff --git a/src/theory/arith/nl/iand_solver.cpp b/src/theory/arith/nl/iand_solver.cpp
index 78ffb3c09..a5b4e87bd 100644
--- a/src/theory/arith/nl/iand_solver.cpp
+++ b/src/theory/arith/nl/iand_solver.cpp
@@ -171,12 +171,11 @@ void IAndSolver::checkFullRefine()
Node lem = valueBasedLemma(i);
Trace("iand-lemma")
<< "IAndSolver::Lemma: " << lem << " ; VALUE_REFINE" << std::endl;
- // value lemmas should not contain div/mod so we don't need to tag it with PREPROCESS
+ // send the value lemma
d_im.addPendingLemma(lem,
InferenceId::ARITH_NL_IAND_VALUE_REFINE,
nullptr,
- true,
- LemmaProperty::NONE);
+ true);
}
}
}
diff --git a/src/theory/arith/nl/nl_lemma_utils.cpp b/src/theory/arith/nl/nl_lemma_utils.cpp
index 7cb1da728..1589341ed 100644
--- a/src/theory/arith/nl/nl_lemma_utils.cpp
+++ b/src/theory/arith/nl/nl_lemma_utils.cpp
@@ -22,14 +22,13 @@ namespace theory {
namespace arith {
namespace nl {
-bool NlLemma::process(TheoryInferenceManager* im, bool asLemma)
+TrustNode NlLemma::processLemma(LemmaProperty& p)
{
- bool res = SimpleTheoryLemma::process(im, asLemma);
if (d_nlext != nullptr)
{
d_nlext->processSideEffect(*this);
}
- return res;
+ return SimpleTheoryLemma::processLemma(p);
}
std::ostream& operator<<(std::ostream& out, NlLemma& n)
diff --git a/src/theory/arith/nl/nl_lemma_utils.h b/src/theory/arith/nl/nl_lemma_utils.h
index 277751fe8..b782b33d7 100644
--- a/src/theory/arith/nl/nl_lemma_utils.h
+++ b/src/theory/arith/nl/nl_lemma_utils.h
@@ -52,7 +52,7 @@ class NlLemma : public SimpleTheoryLemma
}
~NlLemma() {}
- bool process(TheoryInferenceManager* im, bool asLemma) override;
+ TrustNode processLemma(LemmaProperty& p) override;
/** secant points to add
*
diff --git a/src/theory/arrays/inference_manager.cpp b/src/theory/arrays/inference_manager.cpp
index a4b8ecd44..96f2b02c3 100644
--- a/src/theory/arrays/inference_manager.cpp
+++ b/src/theory/arrays/inference_manager.cpp
@@ -27,9 +27,10 @@ namespace arrays {
InferenceManager::InferenceManager(Theory& t,
TheoryState& state,
ProofNodeManager* pnm)
- : TheoryInferenceManager(t, state, pnm, "theory::arrays"),
- d_lemmaPg(pnm ? new EagerProofGenerator(
- pnm, state.getUserContext(), "ArrayLemmaProofGenerator")
+ : TheoryInferenceManager(t, state, pnm, "theory::arrays", false),
+ d_lemmaPg(pnm ? new EagerProofGenerator(pnm,
+ state.getUserContext(),
+ "ArrayLemmaProofGenerator")
: nullptr)
{
}
@@ -59,7 +60,7 @@ bool InferenceManager::assertInference(TNode atom,
}
bool InferenceManager::arrayLemma(
- Node conc, InferenceId id, Node exp, PfRule pfr, LemmaProperty p, bool doCache)
+ Node conc, InferenceId id, Node exp, PfRule pfr, LemmaProperty p)
{
Trace("arrays-infer") << "TheoryArrays::arrayLemma: " << conc << " by " << exp
<< "; " << id << std::endl;
@@ -72,11 +73,11 @@ bool InferenceManager::arrayLemma(
convert(pfr, conc, exp, children, args);
// make the trusted lemma based on the eager proof generator and send
TrustNode tlem = d_lemmaPg->mkTrustNode(conc, pfr, children, args);
- return trustedLemma(tlem, id, p, doCache);
+ return trustedLemma(tlem, id, p);
}
// send lemma without proofs
Node lem = nm->mkNode(IMPLIES, exp, conc);
- return lemma(lem, id, p, doCache);
+ return lemma(lem, id, p);
}
void InferenceManager::convert(PfRule& id,
diff --git a/src/theory/arrays/inference_manager.h b/src/theory/arrays/inference_manager.h
index 96af0b677..99e586fe4 100644
--- a/src/theory/arrays/inference_manager.h
+++ b/src/theory/arrays/inference_manager.h
@@ -47,15 +47,13 @@ class InferenceManager : public TheoryInferenceManager
*/
bool assertInference(TNode atom, bool polarity, InferenceId id, TNode reason, PfRule pfr);
/**
- * Send lemma (exp => conc) based on proof rule id with properties p. Cache
- * the lemma if doCache is true.
+ * Send lemma (exp => conc) based on proof rule id with properties p.
*/
bool arrayLemma(Node conc,
InferenceId id,
Node exp,
PfRule pfr,
- LemmaProperty p = LemmaProperty::NONE,
- bool doCache = false);
+ LemmaProperty p = LemmaProperty::NONE);
private:
/**
diff --git a/src/theory/bags/bag_solver.cpp b/src/theory/bags/bag_solver.cpp
index bdd4a9b30..76c001ba2 100644
--- a/src/theory/bags/bag_solver.cpp
+++ b/src/theory/bags/bag_solver.cpp
@@ -27,7 +27,7 @@ namespace theory {
namespace bags {
BagSolver::BagSolver(SolverState& s, InferenceManager& im, TermRegistry& tr)
- : d_state(s), d_ig(&d_state), d_im(im), d_termReg(tr)
+ : d_state(s), d_ig(&s, &im), d_im(im), d_termReg(tr)
{
d_zero = NodeManager::currentNM()->mkConst(Rational(0));
d_one = NodeManager::currentNM()->mkConst(Rational(1));
@@ -102,7 +102,7 @@ void BagSolver::checkEmpty(const Node& n)
for (const Node& e : d_state.getElements(n))
{
InferInfo i = d_ig.empty(n, e);
- i.process(&d_im, true);
+ d_im.lemmaTheoryInference(&i);
}
}
@@ -113,7 +113,7 @@ void BagSolver::checkUnionDisjoint(const Node& n)
for (const Node& e : elements)
{
InferInfo i = d_ig.unionDisjoint(n, e);
- i.process(&d_im, true);
+ d_im.lemmaTheoryInference(&i);
}
}
@@ -124,7 +124,7 @@ void BagSolver::checkUnionMax(const Node& n)
for (const Node& e : elements)
{
InferInfo i = d_ig.unionMax(n, e);
- i.process(&d_im, true);
+ d_im.lemmaTheoryInference(&i);
}
}
@@ -135,7 +135,7 @@ void BagSolver::checkIntersectionMin(const Node& n)
for (const Node& e : elements)
{
InferInfo i = d_ig.intersection(n, e);
- i.process(&d_im, true);
+ d_im.lemmaTheoryInference(&i);
}
}
@@ -146,7 +146,7 @@ void BagSolver::checkDifferenceSubtract(const Node& n)
for (const Node& e : elements)
{
InferInfo i = d_ig.differenceSubtract(n, e);
- i.process(&d_im, true);
+ d_im.lemmaTheoryInference(&i);
}
}
@@ -159,13 +159,13 @@ void BagSolver::checkMkBag(const Node& n)
for (const Node& e : d_state.getElements(n))
{
InferInfo i = d_ig.mkBag(n, e);
- i.process(&d_im, true);
+ d_im.lemmaTheoryInference(&i);
}
}
void BagSolver::checkNonNegativeCountTerms(const Node& bag, const Node& element)
{
InferInfo i = d_ig.nonNegativeCount(bag, element);
- i.process(&d_im, true);
+ d_im.lemmaTheoryInference(&i);
}
void BagSolver::checkDifferenceRemove(const Node& n)
@@ -175,7 +175,7 @@ void BagSolver::checkDifferenceRemove(const Node& n)
for (const Node& e : elements)
{
InferInfo i = d_ig.differenceRemove(n, e);
- i.process(&d_im, true);
+ d_im.lemmaTheoryInference(&i);
}
}
@@ -192,7 +192,7 @@ void BagSolver::checkDuplicateRemoval(Node n)
for (const Node& e : elements)
{
InferInfo i = d_ig.duplicateRemoval(n, e);
- i.process(&d_im, true);
+ d_im.lemmaTheoryInference(&i);
}
}
@@ -201,7 +201,7 @@ void BagSolver::checkDisequalBagTerms()
for (const Node& n : d_state.getDisequalBagTerms())
{
InferInfo info = d_ig.bagDisequality(n);
- info.process(&d_im, true);
+ d_im.lemmaTheoryInference(&info);
}
}
diff --git a/src/theory/bags/infer_info.cpp b/src/theory/bags/infer_info.cpp
index 0655b6bbf..969c6b3de 100644
--- a/src/theory/bags/infer_info.cpp
+++ b/src/theory/bags/infer_info.cpp
@@ -20,39 +20,28 @@ namespace CVC4 {
namespace theory {
namespace bags {
-InferInfo::InferInfo(InferenceId id) : TheoryInference(id) {}
+InferInfo::InferInfo(TheoryInferenceManager* im, InferenceId id)
+ : TheoryInference(id), d_im(im)
+{
+}
-bool InferInfo::process(TheoryInferenceManager* im, bool asLemma)
+TrustNode InferInfo::processLemma(LemmaProperty& p)
{
- Node lemma = d_conclusion;
- if (d_premises.size() >= 2)
- {
- Node andNode = NodeManager::currentNM()->mkNode(kind::AND, d_premises);
- lemma = andNode.impNode(lemma);
- }
- else if (d_premises.size() == 1)
- {
- lemma = d_premises[0].impNode(lemma);
- }
- if (asLemma)
- {
- TrustNode trustedLemma = TrustNode::mkTrustLemma(lemma, nullptr);
- im->trustedLemma(trustedLemma, getId());
- }
- else
- {
- Unimplemented();
- }
+ NodeManager* nm = NodeManager::currentNM();
+ Node pnode = nm->mkAnd(d_premises);
+ Node lemma = nm->mkNode(kind::IMPLIES, pnode, d_conclusion);
+
+ // send lemmas corresponding to the skolems introduced
for (const auto& pair : d_skolems)
{
Node n = pair.first.eqNode(pair.second);
TrustNode trustedLemma = TrustNode::mkTrustLemma(n, nullptr);
- im->trustedLemma(trustedLemma, getId());
+ d_im->trustedLemma(trustedLemma, getId(), p);
}
Trace("bags::InferInfo::process") << (*this) << std::endl;
- return true;
+ return TrustNode::mkTrustLemma(lemma, nullptr);
}
bool InferInfo::isTrivial() const
diff --git a/src/theory/bags/infer_info.h b/src/theory/bags/infer_info.h
index 66d75b5dc..a533acf58 100644
--- a/src/theory/bags/infer_info.h
+++ b/src/theory/bags/infer_info.h
@@ -26,9 +26,11 @@
namespace CVC4 {
namespace theory {
+
+class TheoryInferenceManager;
+
namespace bags {
-class InferenceManager;
/**
* An inference. This is a class to track an unprocessed call to either
@@ -38,10 +40,12 @@ class InferenceManager;
class InferInfo : public TheoryInference
{
public:
- InferInfo(InferenceId id);
+ InferInfo(TheoryInferenceManager* im, InferenceId id);
~InferInfo() {}
- /** Process this inference */
- bool process(TheoryInferenceManager* im, bool asLemma) override;
+ /** Process lemma */
+ TrustNode processLemma(LemmaProperty& p) override;
+ /** Pointer to the class used for processing this info */
+ TheoryInferenceManager* d_im;
/** The conclusion */
Node d_conclusion;
/**
diff --git a/src/theory/bags/inference_generator.cpp b/src/theory/bags/inference_generator.cpp
index 2d4a5afed..bc1ed771c 100644
--- a/src/theory/bags/inference_generator.cpp
+++ b/src/theory/bags/inference_generator.cpp
@@ -23,7 +23,8 @@ namespace CVC4 {
namespace theory {
namespace bags {
-InferenceGenerator::InferenceGenerator(SolverState* state) : d_state(state)
+InferenceGenerator::InferenceGenerator(SolverState* state, InferenceManager* im)
+ : d_state(state), d_im(im)
{
d_nm = NodeManager::currentNM();
d_sm = d_nm->getSkolemManager();
@@ -37,7 +38,7 @@ InferInfo InferenceGenerator::nonNegativeCount(Node n, Node e)
Assert(n.getType().isBag());
Assert(e.getType() == n.getType().getBagElementType());
- InferInfo inferInfo(InferenceId::BAG_NON_NEGATIVE_COUNT);
+ InferInfo inferInfo(d_im, InferenceId::BAG_NON_NEGATIVE_COUNT);
Node count = d_nm->mkNode(kind::BAG_COUNT, e, n);
Node gte = d_nm->mkNode(kind::GEQ, count, d_zero);
@@ -54,7 +55,7 @@ InferInfo InferenceGenerator::mkBag(Node n, Node e)
{
// TODO issue #78: refactor this with BagRewriter
// (=> true (= (bag.count e (bag e c)) c))
- InferInfo inferInfo(InferenceId::BAG_MK_BAG_SAME_ELEMENT);
+ InferInfo inferInfo(d_im, InferenceId::BAG_MK_BAG_SAME_ELEMENT);
Node skolem = getSkolem(n, inferInfo);
Node count = getMultiplicityTerm(e, skolem);
inferInfo.d_conclusion = count.eqNode(n[1]);
@@ -65,7 +66,7 @@ InferInfo InferenceGenerator::mkBag(Node n, Node e)
// (=>
// true
// (= (bag.count e (bag x c)) (ite (= e x) c 0)))
- InferInfo inferInfo(InferenceId::BAG_MK_BAG);
+ InferInfo inferInfo(d_im, InferenceId::BAG_MK_BAG);
Node skolem = getSkolem(n, inferInfo);
Node count = getMultiplicityTerm(e, skolem);
Node same = d_nm->mkNode(kind::EQUAL, n[0], e);
@@ -88,7 +89,7 @@ InferInfo InferenceGenerator::bagDisequality(Node n)
Node A = n[0];
Node B = n[1];
- InferInfo inferInfo(InferenceId::BAG_DISEQUALITY);
+ InferInfo inferInfo(d_im, InferenceId::BAG_DISEQUALITY);
TypeNode elementType = A.getType().getBagElementType();
BoundVarManager* bvm = d_nm->getBoundVarManager();
@@ -121,7 +122,7 @@ InferInfo InferenceGenerator::empty(Node n, Node e)
Assert(n.getKind() == kind::EMPTYBAG);
Assert(e.getType() == n.getType().getBagElementType());
- InferInfo inferInfo(InferenceId::BAG_EMPTY);
+ InferInfo inferInfo(d_im, InferenceId::BAG_EMPTY);
Node skolem = getSkolem(n, inferInfo);
Node count = getMultiplicityTerm(e, skolem);
@@ -137,7 +138,7 @@ InferInfo InferenceGenerator::unionDisjoint(Node n, Node e)
Node A = n[0];
Node B = n[1];
- InferInfo inferInfo(InferenceId::BAG_UNION_DISJOINT);
+ InferInfo inferInfo(d_im, InferenceId::BAG_UNION_DISJOINT);
Node countA = getMultiplicityTerm(e, A);
Node countB = getMultiplicityTerm(e, B);
@@ -159,7 +160,7 @@ InferInfo InferenceGenerator::unionMax(Node n, Node e)
Node A = n[0];
Node B = n[1];
- InferInfo inferInfo(InferenceId::BAG_UNION_MAX);
+ InferInfo inferInfo(d_im, InferenceId::BAG_UNION_MAX);
Node countA = getMultiplicityTerm(e, A);
Node countB = getMultiplicityTerm(e, B);
@@ -182,7 +183,7 @@ InferInfo InferenceGenerator::intersection(Node n, Node e)
Node A = n[0];
Node B = n[1];
- InferInfo inferInfo(InferenceId::BAG_INTERSECTION_MIN);
+ InferInfo inferInfo(d_im, InferenceId::BAG_INTERSECTION_MIN);
Node countA = getMultiplicityTerm(e, A);
Node countB = getMultiplicityTerm(e, B);
@@ -203,7 +204,7 @@ InferInfo InferenceGenerator::differenceSubtract(Node n, Node e)
Node A = n[0];
Node B = n[1];
- InferInfo inferInfo(InferenceId::BAG_DIFFERENCE_SUBTRACT);
+ InferInfo inferInfo(d_im, InferenceId::BAG_DIFFERENCE_SUBTRACT);
Node countA = getMultiplicityTerm(e, A);
Node countB = getMultiplicityTerm(e, B);
@@ -225,7 +226,7 @@ InferInfo InferenceGenerator::differenceRemove(Node n, Node e)
Node A = n[0];
Node B = n[1];
- InferInfo inferInfo(InferenceId::BAG_DIFFERENCE_REMOVE);
+ InferInfo inferInfo(d_im, InferenceId::BAG_DIFFERENCE_REMOVE);
Node countA = getMultiplicityTerm(e, A);
Node countB = getMultiplicityTerm(e, B);
@@ -246,7 +247,7 @@ InferInfo InferenceGenerator::duplicateRemoval(Node n, Node e)
Assert(e.getType() == n[0].getType().getBagElementType());
Node A = n[0];
- InferInfo inferInfo(InferenceId::BAG_DUPLICATE_REMOVAL);
+ InferInfo inferInfo(d_im, InferenceId::BAG_DUPLICATE_REMOVAL);
Node countA = getMultiplicityTerm(e, A);
Node skolem = getSkolem(n, inferInfo);
diff --git a/src/theory/bags/inference_generator.h b/src/theory/bags/inference_generator.h
index 4a852530a..f10a1051f 100644
--- a/src/theory/bags/inference_generator.h
+++ b/src/theory/bags/inference_generator.h
@@ -22,6 +22,7 @@
#include "expr/node.h"
#include "infer_info.h"
+#include "theory/bags/inference_manager.h"
#include "theory/bags/solver_state.h"
namespace CVC4 {
@@ -35,7 +36,7 @@ namespace bags {
class InferenceGenerator
{
public:
- InferenceGenerator(SolverState* state);
+ InferenceGenerator(SolverState* state, InferenceManager* im);
/**
* @param A is a bag of type (Bag E)
@@ -179,6 +180,9 @@ class InferenceGenerator
NodeManager* d_nm;
SkolemManager* d_sm;
SolverState* d_state;
+ /** Pointer to the inference manager */
+ InferenceManager* d_im;
+ /** Commonly used constants */
Node d_true;
Node d_zero;
Node d_one;
diff --git a/src/theory/bags/inference_manager.h b/src/theory/bags/inference_manager.h
index 71a014582..1b132fc37 100644
--- a/src/theory/bags/inference_manager.h
+++ b/src/theory/bags/inference_manager.h
@@ -17,6 +17,7 @@
#ifndef CVC4__THEORY__BAGS__INFERENCE_MANAGER_H
#define CVC4__THEORY__BAGS__INFERENCE_MANAGER_H
+#include "theory/bags/infer_info.h"
#include "theory/bags/solver_state.h"
#include "theory/inference_manager_buffered.h"
diff --git a/src/theory/bags/theory_bags.cpp b/src/theory/bags/theory_bags.cpp
index 6df44295e..48fc38b8f 100644
--- a/src/theory/bags/theory_bags.cpp
+++ b/src/theory/bags/theory_bags.cpp
@@ -31,7 +31,7 @@ TheoryBags::TheoryBags(context::Context* c,
: Theory(THEORY_BAGS, c, u, out, valuation, logicInfo, pnm),
d_state(c, u, valuation),
d_im(*this, d_state, nullptr),
- d_ig(&d_state),
+ d_ig(&d_state, &d_im),
d_notify(*this, d_im),
d_statistics(),
d_rewriter(&d_statistics.d_rewrites),
diff --git a/src/theory/datatypes/inference.cpp b/src/theory/datatypes/inference.cpp
index d03bb0f2f..04b1072c3 100644
--- a/src/theory/datatypes/inference.cpp
+++ b/src/theory/datatypes/inference.cpp
@@ -61,16 +61,21 @@ bool DatatypesInference::mustCommunicateFact(Node n, Node exp)
return false;
}
-bool DatatypesInference::process(TheoryInferenceManager* im, bool asLemma)
+TrustNode DatatypesInference::processLemma(LemmaProperty& p)
{
- // Check to see if we have to communicate it to the rest of the system.
- // The flag asLemma is true when the inference was marked that it must be
- // sent as a lemma.
- if (asLemma)
+ // we don't pass lemma property p currently, as it is always default
+ return d_im->processDtLemma(d_conc, d_exp, getId());
+}
+
+Node DatatypesInference::processFact(std::vector<Node>& exp,
+ ProofGenerator*& pg)
+{
+ // add to the explanation vector if applicable (when non-trivial)
+ if (!d_exp.isNull() && !d_exp.isConst())
{
- return d_im->processDtLemma(d_conc, d_exp, getId());
+ exp.push_back(d_exp);
}
- return d_im->processDtFact(d_conc, d_exp, getId());
+ return d_im->processDtFact(d_conc, d_exp, getId(), pg);
}
} // namespace datatypes
diff --git a/src/theory/datatypes/inference.h b/src/theory/datatypes/inference.h
index d31f7b839..03fe7ad14 100644
--- a/src/theory/datatypes/inference.h
+++ b/src/theory/datatypes/inference.h
@@ -57,11 +57,10 @@ class DatatypesInference : public SimpleTheoryInternalFact
* set to true.
*/
static bool mustCommunicateFact(Node n, Node exp);
- /**
- * Process this fact, possibly as a fact or as a lemma, depending on the
- * above method.
- */
- bool process(TheoryInferenceManager* im, bool asLemma) override;
+ /** Process lemma */
+ TrustNode processLemma(LemmaProperty& p) override;
+ /** Process internal fact */
+ Node processFact(std::vector<Node>& exp, ProofGenerator*& pg) override;
private:
/** Pointer to the inference manager */
diff --git a/src/theory/datatypes/inference_manager.cpp b/src/theory/datatypes/inference_manager.cpp
index b4a536b14..cf93009bb 100644
--- a/src/theory/datatypes/inference_manager.cpp
+++ b/src/theory/datatypes/inference_manager.cpp
@@ -79,10 +79,7 @@ void InferenceManager::process()
doPendingFacts();
}
-void InferenceManager::sendDtLemma(Node lem,
- InferenceId id,
- LemmaProperty p,
- bool doCache)
+void InferenceManager::sendDtLemma(Node lem, InferenceId id, LemmaProperty p)
{
if (isProofEnabled())
{
@@ -90,7 +87,7 @@ void InferenceManager::sendDtLemma(Node lem,
return;
}
// otherwise send as a normal lemma
- if (lemma(lem, id, p, doCache))
+ if (lemma(lem, id, p))
{
d_inferenceLemmas << id;
}
@@ -122,8 +119,7 @@ bool InferenceManager::sendLemmas(const std::vector<Node>& lemmas)
bool InferenceManager::isProofEnabled() const { return d_ipc != nullptr; }
-bool InferenceManager::processDtLemma(
- Node conc, Node exp, InferenceId id, LemmaProperty p, bool doCache)
+TrustNode InferenceManager::processDtLemma(Node conc, Node exp, InferenceId id)
{
// set up a proof constructor
std::shared_ptr<InferProofCons> ipcl;
@@ -156,38 +152,16 @@ bool InferenceManager::processDtLemma(
d_lemPg->setProofFor(lem, pn);
}
// use trusted lemma
- TrustNode tlem = TrustNode::mkTrustLemma(lem, d_lemPg.get());
- if (!trustedLemma(tlem, id))
- {
- Trace("dt-lemma-debug") << "...duplicate lemma" << std::endl;
- return false;
- }
- d_inferenceLemmas << id;
- return true;
+ return TrustNode::mkTrustLemma(lem, d_lemPg.get());
}
-bool InferenceManager::processDtFact(Node conc, Node exp, InferenceId id)
+Node InferenceManager::processDtFact(Node conc,
+ Node exp,
+ InferenceId id,
+ ProofGenerator*& pg)
{
- conc = prepareDtInference(conc, exp, id, d_ipc.get());
- // assert the internal fact, which has the same issue as above
- bool polarity = conc.getKind() != NOT;
- TNode atom = polarity ? conc : conc[0];
- if (isProofEnabled())
- {
- std::vector<Node> expv;
- if (!exp.isNull() && !exp.isConst())
- {
- expv.push_back(exp);
- }
- assertInternalFact(atom, polarity, id, expv, d_ipc.get());
- }
- else
- {
- // use version without proofs
- assertInternalFact(atom, polarity, id, exp);
- }
- d_inferenceFacts << id;
- return true;
+ pg = d_ipc.get();
+ return prepareDtInference(conc, exp, id, d_ipc.get());
}
Node InferenceManager::prepareDtInference(Node conc,
diff --git a/src/theory/datatypes/inference_manager.h b/src/theory/datatypes/inference_manager.h
index 683b696c9..9c9a2bcb5 100644
--- a/src/theory/datatypes/inference_manager.h
+++ b/src/theory/datatypes/inference_manager.h
@@ -67,8 +67,7 @@ class InferenceManager : public InferenceManagerBuffered
*/
void sendDtLemma(Node lem,
InferenceId i = InferenceId::UNKNOWN,
- LemmaProperty p = LemmaProperty::NONE,
- bool doCache = true);
+ LemmaProperty p = LemmaProperty::NONE);
/**
* Send conflict immediately on the output channel
*/
@@ -85,15 +84,11 @@ class InferenceManager : public InferenceManagerBuffered
/**
* Process datatype inference as a lemma
*/
- bool processDtLemma(Node conc,
- Node exp,
- InferenceId id,
- LemmaProperty p = LemmaProperty::NONE,
- bool doCache = true);
+ TrustNode processDtLemma(Node conc, Node exp, InferenceId id);
/**
* Process datatype inference as a fact
*/
- bool processDtFact(Node conc, Node exp, InferenceId id);
+ Node processDtFact(Node conc, Node exp, InferenceId id, ProofGenerator*& pg);
/**
* Helper function for the above methods. Returns the conclusion, which
* may be modified so that it is compatible with proofs. If proofs are
diff --git a/src/theory/datatypes/theory_datatypes.cpp b/src/theory/datatypes/theory_datatypes.cpp
index dc7bdc369..8945fb735 100644
--- a/src/theory/datatypes/theory_datatypes.cpp
+++ b/src/theory/datatypes/theory_datatypes.cpp
@@ -326,8 +326,7 @@ void TheoryDatatypes::postCheck(Effort level)
Trace("dt-split-debug") << "Split lemma is : " << lemma << std::endl;
d_im.sendDtLemma(lemma,
InferenceId::DATATYPES_SPLIT,
- LemmaProperty::SEND_ATOMS,
- false);
+ LemmaProperty::SEND_ATOMS);
}
if( !options::dtBlastSplits() ){
break;
diff --git a/src/theory/inference_manager_buffered.cpp b/src/theory/inference_manager_buffered.cpp
index f4ede969a..0c143f265 100644
--- a/src/theory/inference_manager_buffered.cpp
+++ b/src/theory/inference_manager_buffered.cpp
@@ -25,8 +25,9 @@ namespace theory {
InferenceManagerBuffered::InferenceManagerBuffered(Theory& t,
TheoryState& state,
ProofNodeManager* pnm,
- const std::string& name)
- : TheoryInferenceManager(t, state, pnm, name),
+ const std::string& name,
+ bool cacheLemmas)
+ : TheoryInferenceManager(t, state, pnm, name, cacheLemmas),
d_processingPendingLemmas(false)
{
}
@@ -99,9 +100,9 @@ void InferenceManagerBuffered::doPendingFacts()
size_t i = 0;
while (!d_theoryState.isInConflict() && i < d_pendingFact.size())
{
- // process this fact, which notice may enqueue more pending facts in this
- // loop.
- d_pendingFact[i]->process(this, false);
+ // assert the internal fact, which notice may enqueue more pending facts in
+ // this loop, or result in a conflict.
+ assertInternalFactTheoryInference(d_pendingFact[i].get());
i++;
}
d_pendingFact.clear();
@@ -120,7 +121,7 @@ void InferenceManagerBuffered::doPendingLemmas()
{
// process this lemma, which notice may enqueue more pending lemmas in this
// loop, or clear the lemmas.
- d_pendingLem[i]->process(this, true);
+ lemmaTheoryInference(d_pendingLem[i].get());
i++;
}
d_pendingLem.clear();
@@ -149,13 +150,40 @@ void InferenceManagerBuffered::clearPendingPhaseRequirements()
d_pendingReqPhase.clear();
}
+std::size_t InferenceManagerBuffered::numPendingLemmas() const
+{
+ return d_pendingLem.size();
+}
+std::size_t InferenceManagerBuffered::numPendingFacts() const
+{
+ return d_pendingFact.size();
+}
- std::size_t InferenceManagerBuffered::numPendingLemmas() const {
- return d_pendingLem.size();
- }
- std::size_t InferenceManagerBuffered::numPendingFacts() const {
- return d_pendingFact.size();
- }
+void InferenceManagerBuffered::lemmaTheoryInference(TheoryInference* lem)
+{
+ // process this lemma
+ LemmaProperty p = LemmaProperty::NONE;
+ TrustNode tlem = lem->processLemma(p);
+ Assert(!tlem.isNull());
+ // send the lemma
+ trustedLemma(tlem, lem->getId(), p);
+}
+
+void InferenceManagerBuffered::assertInternalFactTheoryInference(
+ TheoryInference* fact)
+{
+ // process this fact
+ std::vector<Node> exp;
+ ProofGenerator* pg = nullptr;
+ Node lit = fact->processFact(exp, pg);
+ Assert(!lit.isNull());
+ bool pol = lit.getKind() != NOT;
+ TNode atom = pol ? lit : lit[0];
+ // no double negation or conjunctive conclusions
+ Assert(atom.getKind() != NOT && atom.getKind() != AND);
+ // assert the internal fact
+ assertInternalFact(atom, pol, fact->getId(), exp, pg);
+}
} // namespace theory
} // namespace CVC4
diff --git a/src/theory/inference_manager_buffered.h b/src/theory/inference_manager_buffered.h
index d12bd40b3..985bad3bc 100644
--- a/src/theory/inference_manager_buffered.h
+++ b/src/theory/inference_manager_buffered.h
@@ -35,7 +35,8 @@ class InferenceManagerBuffered : public TheoryInferenceManager
InferenceManagerBuffered(Theory& t,
TheoryState& state,
ProofNodeManager* pnm,
- const std::string& name);
+ const std::string& name,
+ bool cacheLemmas = true);
virtual ~InferenceManagerBuffered() {}
/**
* Do we have a pending fact or lemma?
@@ -145,6 +146,19 @@ class InferenceManagerBuffered : public TheoryInferenceManager
/** Returns the number of pending facts. */
std::size_t numPendingFacts() const;
+ /**
+ * Send the given theory inference as a lemma on the output channel of this
+ * inference manager. This calls TheoryInferenceManager::trustedLemma based
+ * on the provided theory inference.
+ */
+ void lemmaTheoryInference(TheoryInference* lem);
+ /**
+ * Add the given theory inference as an internal fact. This calls
+ * TheoryInferenceManager::assertInternalFact based on the provided theory
+ * inference.
+ */
+ void assertInternalFactTheoryInference(TheoryInference* fact);
+
protected:
/** A set of pending inferences to be processed as lemmas */
std::vector<std::unique_ptr<TheoryInference>> d_pendingLem;
diff --git a/src/theory/sets/term_registry.cpp b/src/theory/sets/term_registry.cpp
index 464846b1a..777ca2d9e 100644
--- a/src/theory/sets/term_registry.cpp
+++ b/src/theory/sets/term_registry.cpp
@@ -53,13 +53,13 @@ Node TermRegistry::getProxy(Node n)
d_proxy_to_term[k] = n;
Node eq = k.eqNode(n);
Trace("sets-lemma") << "Sets::Lemma : " << eq << " by proxy" << std::endl;
- d_im.lemma(eq, InferenceId::SETS_PROXY, LemmaProperty::NONE, false);
+ d_im.lemma(eq, InferenceId::SETS_PROXY);
if (nk == SINGLETON)
{
Node slem = nm->mkNode(MEMBER, n[0], k);
Trace("sets-lemma") << "Sets::Lemma : " << slem << " by singleton"
<< std::endl;
- d_im.lemma(slem, InferenceId::SETS_PROXY_SINGLETON, LemmaProperty::NONE, false);
+ d_im.lemma(slem, InferenceId::SETS_PROXY_SINGLETON);
}
return k;
}
@@ -104,7 +104,7 @@ Node TermRegistry::getUnivSet(TypeNode tn)
Node ulem = nm->mkNode(SUBSET, n1, n2);
Trace("sets-lemma") << "Sets::Lemma : " << ulem << " by univ-type"
<< std::endl;
- d_im.lemma(ulem, InferenceId::SETS_UNIV_TYPE, LemmaProperty::NONE, false);
+ d_im.lemma(ulem, InferenceId::SETS_UNIV_TYPE);
}
}
d_univset[tn] = n;
diff --git a/src/theory/strings/infer_info.cpp b/src/theory/strings/infer_info.cpp
index 7242c58bc..6d87aa944 100644
--- a/src/theory/strings/infer_info.cpp
+++ b/src/theory/strings/infer_info.cpp
@@ -25,13 +25,19 @@ InferInfo::InferInfo(InferenceId id): TheoryInference(id), d_sim(nullptr), d_idR
{
}
-bool InferInfo::process(TheoryInferenceManager* im, bool asLemma)
+TrustNode InferInfo::processLemma(LemmaProperty& p)
{
- if (asLemma)
+ return d_sim->processLemma(*this, p);
+}
+
+Node InferInfo::processFact(std::vector<Node>& exp, ProofGenerator*& pg)
+{
+ for (const Node& ec : d_premises)
{
- return d_sim->processLemma(*this);
+ utils::flattenOp(kind::AND, ec, exp);
}
- return d_sim->processFact(*this);
+ d_sim->processFact(*this, pg);
+ return d_conc;
}
bool InferInfo::isTrivial() const
diff --git a/src/theory/strings/infer_info.h b/src/theory/strings/infer_info.h
index d697f42ae..176d32e5b 100644
--- a/src/theory/strings/infer_info.h
+++ b/src/theory/strings/infer_info.h
@@ -74,8 +74,10 @@ class InferInfo : public TheoryInference
public:
InferInfo(InferenceId id);
~InferInfo() {}
- /** Process this inference */
- bool process(TheoryInferenceManager* im, bool asLemma) override;
+ /** Process lemma */
+ TrustNode processLemma(LemmaProperty& p) override;
+ /** Process internal fact */
+ Node processFact(std::vector<Node>& exp, ProofGenerator*& pg) override;
/** Pointer to the class used for processing this info */
InferenceManager* d_sim;
/** Whether it is the reverse form of the above id */
diff --git a/src/theory/strings/inference_manager.cpp b/src/theory/strings/inference_manager.cpp
index a161c7854..242bf3ab1 100644
--- a/src/theory/strings/inference_manager.cpp
+++ b/src/theory/strings/inference_manager.cpp
@@ -34,7 +34,7 @@ InferenceManager::InferenceManager(Theory& t,
ExtTheory& e,
SequencesStatistics& statistics,
ProofNodeManager* pnm)
- : InferenceManagerBuffered(t, s, pnm, "theory::strings"),
+ : InferenceManagerBuffered(t, s, pnm, "theory::strings", false),
d_state(s),
d_termReg(tr),
d_extt(e),
@@ -301,64 +301,23 @@ void InferenceManager::processConflict(const InferInfo& ii)
trustedConflict(tconf, ii.getId());
}
-bool InferenceManager::processFact(InferInfo& ii)
+void InferenceManager::processFact(InferInfo& ii, ProofGenerator*& pg)
{
- // Get the fact(s). There are multiple facts if the conclusion is an AND
- std::vector<Node> facts;
- if (ii.d_conc.getKind() == AND)
- {
- for (const Node& cc : ii.d_conc)
- {
- facts.push_back(cc);
- }
- }
- else
- {
- facts.push_back(ii.d_conc);
- }
Trace("strings-assert") << "(assert (=> " << ii.getPremises() << " "
<< ii.d_conc << ")) ; fact " << ii.getId() << std::endl;
Trace("strings-lemma") << "Strings::Fact: " << ii.d_conc << " from "
<< ii.getPremises() << " by " << ii.getId()
<< std::endl;
- std::vector<Node> exp;
- for (const Node& ec : ii.d_premises)
- {
- utils::flattenOp(AND, ec, exp);
- }
- bool ret = false;
- // convert for each fact
- for (const Node& fact : facts)
+ if (d_ipc != nullptr)
{
- ii.d_conc = fact;
- bool polarity = fact.getKind() != NOT;
- TNode atom = polarity ? fact : fact[0];
- bool curRet = false;
- if (d_ipc != nullptr)
- {
- // ensure the proof generator is ready to explain this fact in the
- // current SAT context
- d_ipc->notifyFact(ii);
- // now, assert the internal fact with d_ipc as proof generator
- curRet = assertInternalFact(atom, polarity, ii.getId(), exp, d_ipc.get());
- }
- else
- {
- Node cexp = utils::mkAnd(exp);
- // without proof generator
- curRet = assertInternalFact(atom, polarity, ii.getId(), cexp);
- }
- ret = ret || curRet;
- // may be in conflict
- if (d_state.isInConflict())
- {
- break;
- }
+ // ensure the proof generator is ready to explain this fact in the
+ // current SAT context
+ d_ipc->notifyFact(ii);
+ pg = d_ipc.get();
}
- return ret;
}
-bool InferenceManager::processLemma(InferInfo& ii)
+TrustNode InferenceManager::processLemma(InferInfo& ii, LemmaProperty& p)
{
Assert(!ii.isTrivial());
Assert(!ii.isConflict());
@@ -405,7 +364,6 @@ bool InferenceManager::processLemma(InferInfo& ii)
d_termReg.registerTermAtomic(n, sks.first);
}
}
- LemmaProperty p = LemmaProperty::NONE;
if (ii.getId() == InferenceId::STRINGS_REDUCTION)
{
p |= LemmaProperty::NEEDS_JUSTIFY;
@@ -416,8 +374,7 @@ bool InferenceManager::processLemma(InferInfo& ii)
<< ii.getId() << std::endl;
++(d_statistics.d_lemmasInfer);
- // call the trusted lemma, without caching
- return trustedLemma(tlem, ii.getId(), p, false);
+ return tlem;
}
} // namespace strings
diff --git a/src/theory/strings/inference_manager.h b/src/theory/strings/inference_manager.h
index f16ce7183..6d2d7f419 100644
--- a/src/theory/strings/inference_manager.h
+++ b/src/theory/strings/inference_manager.h
@@ -249,9 +249,9 @@ class InferenceManager : public InferenceManagerBuffered
private:
/** Called when ii is ready to be processed as a fact */
- bool processFact(InferInfo& ii);
+ void processFact(InferInfo& ii, ProofGenerator*& pg);
/** Called when ii is ready to be processed as a lemma */
- bool processLemma(InferInfo& ii);
+ TrustNode processLemma(InferInfo& ii, LemmaProperty& p);
/** Reference to the solver state of the theory of strings. */
SolverState& d_state;
/** Reference to the term registry of theory of strings */
diff --git a/src/theory/theory_inference.cpp b/src/theory/theory_inference.cpp
index 8ccbb7e1f..1089cdff3 100644
--- a/src/theory/theory_inference.cpp
+++ b/src/theory/theory_inference.cpp
@@ -29,12 +29,11 @@ SimpleTheoryLemma::SimpleTheoryLemma(InferenceId id,
{
}
-bool SimpleTheoryLemma::process(TheoryInferenceManager* im, bool asLemma)
+TrustNode SimpleTheoryLemma::processLemma(LemmaProperty& p)
{
Assert(!d_node.isNull());
- Assert(asLemma);
- // send (trusted) lemma on the output channel with property p
- return im->trustedLemma(TrustNode::mkTrustLemma(d_node, d_pg), getId(), d_property);
+ p = d_property;
+ return TrustNode::mkTrustLemma(d_node, d_pg);
}
SimpleTheoryInternalFact::SimpleTheoryInternalFact(InferenceId id,
@@ -45,22 +44,12 @@ SimpleTheoryInternalFact::SimpleTheoryInternalFact(InferenceId id,
{
}
-bool SimpleTheoryInternalFact::process(TheoryInferenceManager* im, bool asLemma)
+Node SimpleTheoryInternalFact::processFact(std::vector<Node>& exp,
+ ProofGenerator*& pg)
{
- Assert(!asLemma);
- bool polarity = d_conc.getKind() != NOT;
- TNode atom = polarity ? d_conc : d_conc[0];
- // no double negation or conjunctive conclusions
- Assert(atom.getKind() != NOT && atom.getKind() != AND);
- if (d_pg != nullptr)
- {
- im->assertInternalFact(atom, polarity, getId(), {d_exp}, d_pg);
- }
- else
- {
- im->assertInternalFact(atom, polarity, getId(), d_exp);
- }
- return true;
+ exp.push_back(d_exp);
+ pg = d_pg;
+ return d_conc;
}
} // namespace theory
diff --git a/src/theory/theory_inference.h b/src/theory/theory_inference.h
index 9cf787886..847b61786 100644
--- a/src/theory/theory_inference.h
+++ b/src/theory/theory_inference.h
@@ -37,31 +37,33 @@ class TheoryInference
public:
TheoryInference(InferenceId id) : d_id(id) {}
virtual ~TheoryInference() {}
+
+ /**
+ * Process lemma, return the trust node to pass to
+ * TheoryInferenceManager::trustedLemma. In addition, the inference should
+ * process any internal side effects of the lemma.
+ *
+ * @param p The property of the lemma which will be passed to trustedLemma
+ * for this inference. If this call does not update p, the default value will
+ * be used.
+ * @return The trust node (of kind TrustNodeKind::LEMMA) corresponding to the
+ * lemma and its proof generator.
+ */
+ virtual TrustNode processLemma(LemmaProperty& p) { return TrustNode::null(); }
/**
- * Called by the provided inference manager to process this inference. This
- * method should make call(s) to inference manager to process this
- * inference, as well as processing any specific side effects. This method
- * typically makes a single call to the provided inference manager e.g.
- * d_im->trustedLemma or d_im->assertInternalFact. Notice it is the sole
- * responsibility of this class to make this call; the inference manager
- * does not call itself otherwise when processing pending inferences.
+ * Process internal fact, return the conclusion to pass to
+ * TheoryInferenceManager::assertInternalFact. In addition, the inference
+ * should process any internal side effects of the fact.
*
- * @param im The inference manager to use
- * @param asLemma Whether this inference is being processed as a lemma
- * during doPendingLemmas (otherwise it is being processed in doPendingFacts).
- * Typically, this method calls lemma or conflict when asLemma is
- * true, and assertInternalFact when this flag is false, although this
- * behavior is (intentionally) not strictly enforced. In particular, the
- * choice to send a conflict, lemma or fact may depend on local state of the
- * theory, which may change while the inference is pending. Hence the
- * implementation of this method may choose to implement any call to the
- * inference manager. This flag simply serves to track whether the inference
- * initially was added a pending lemma or not.
- * @return true if the inference was successfully processed by the inference
- * manager. This method for instance returns false if it corresponds to a
- * lemma that was already cached by im and hence was discarded.
+ * @param exp The explanation for the returned conclusion. Each node added to
+ * exp should be a (conjunction of) literals that hold in the current equality
+ * engine.
+ * @return The (possibly negated) conclusion.
*/
- virtual bool process(TheoryInferenceManager* im, bool asLemma) = 0;
+ virtual Node processFact(std::vector<Node>& exp, ProofGenerator*& pg)
+ {
+ return Node::null();
+ }
/** Get the InferenceId of this theory inference. */
InferenceId getId() const { return d_id; }
@@ -81,12 +83,8 @@ class SimpleTheoryLemma : public TheoryInference
public:
SimpleTheoryLemma(InferenceId id, Node n, LemmaProperty p, ProofGenerator* pg);
virtual ~SimpleTheoryLemma() {}
- /**
- * Send the lemma using inference manager im, return true if the lemma was
- * sent. It should be the case that asLemma = true or an assertion failure
- * is thrown.
- */
- virtual bool process(TheoryInferenceManager* im, bool asLemma) override;
+ /** Process lemma */
+ TrustNode processLemma(LemmaProperty& p) override;
/** The lemma to send */
Node d_node;
/** The lemma property (see OutputChannel::lemma) */
@@ -109,12 +107,8 @@ class SimpleTheoryInternalFact : public TheoryInference
public:
SimpleTheoryInternalFact(InferenceId id, Node conc, Node exp, ProofGenerator* pg);
virtual ~SimpleTheoryInternalFact() {}
- /**
- * Send the lemma using inference manager im, return true if the lemma was
- * sent. It should be the case that asLemma = false or an assertion failure
- * is thrown.
- */
- virtual bool process(TheoryInferenceManager* im, bool asLemma) override;
+ /** Process internal fact */
+ Node processFact(std::vector<Node>& exp, ProofGenerator*& pg) override;
/** The lemma to send */
Node d_conc;
/** The explanation */
diff --git a/src/theory/theory_inference_manager.cpp b/src/theory/theory_inference_manager.cpp
index a1c1545bf..53a812653 100644
--- a/src/theory/theory_inference_manager.cpp
+++ b/src/theory/theory_inference_manager.cpp
@@ -26,12 +26,14 @@ namespace theory {
TheoryInferenceManager::TheoryInferenceManager(Theory& t,
TheoryState& state,
ProofNodeManager* pnm,
- const std::string& name)
+ const std::string& name,
+ bool cacheLemmas)
: d_theory(t),
d_theoryState(state),
d_out(t.getOutputChannel()),
d_ee(nullptr),
d_pnm(pnm),
+ d_cacheLemmas(cacheLemmas),
d_keep(t.getSatContext()),
d_lemmasSent(t.getUserContext()),
d_numConflicts(0),
@@ -226,21 +228,19 @@ TrustNode TheoryInferenceManager::explainConflictEqConstantMerge(TNode a,
<< " mkTrustedConflictEqConstantMerge";
}
-bool TheoryInferenceManager::lemma(TNode lem,
- InferenceId id,
- LemmaProperty p,
- bool doCache)
+bool TheoryInferenceManager::lemma(TNode lem, InferenceId id, LemmaProperty p)
{
TrustNode tlem = TrustNode::mkTrustLemma(lem, nullptr);
- return trustedLemma(tlem, id, p, doCache);
+ return trustedLemma(tlem, id, p);
}
bool TheoryInferenceManager::trustedLemma(const TrustNode& tlem,
InferenceId id,
- LemmaProperty p,
- bool doCache)
+ LemmaProperty p)
{
- if (doCache)
+ // if the policy says to cache lemmas, check the cache and return false if
+ // we are a duplicate
+ if (d_cacheLemmas)
{
if (!cacheLemma(tlem.getNode(), p))
{
@@ -259,13 +259,12 @@ bool TheoryInferenceManager::lemmaExp(Node conc,
const std::vector<Node>& exp,
const std::vector<Node>& noExplain,
const std::vector<Node>& args,
- LemmaProperty p,
- bool doCache)
+ LemmaProperty p)
{
// make the trust node
TrustNode trn = mkLemmaExp(conc, pfr, exp, noExplain, args);
// send it on the output channel
- return trustedLemma(trn, id, p, doCache);
+ return trustedLemma(trn, id, p);
}
TrustNode TheoryInferenceManager::mkLemmaExp(Node conc,
@@ -290,13 +289,12 @@ bool TheoryInferenceManager::lemmaExp(Node conc,
const std::vector<Node>& exp,
const std::vector<Node>& noExplain,
ProofGenerator* pg,
- LemmaProperty p,
- bool doCache)
+ LemmaProperty p)
{
// make the trust node
TrustNode trn = mkLemmaExp(conc, exp, noExplain, pg);
// send it on the output channel
- return trustedLemma(trn, id, p, doCache);
+ return trustedLemma(trn, id, p);
}
TrustNode TheoryInferenceManager::mkLemmaExp(Node conc,
@@ -358,7 +356,6 @@ bool TheoryInferenceManager::assertInternalFact(TNode atom,
const std::vector<Node>& exp,
ProofGenerator* pg)
{
- Assert(pg != nullptr);
d_factIdStats << id;
return processInternalFact(atom, pol, PfRule::ASSUME, exp, {}, pg);
}
diff --git a/src/theory/theory_inference_manager.h b/src/theory/theory_inference_manager.h
index 0142f814a..d3a317cbc 100644
--- a/src/theory/theory_inference_manager.h
+++ b/src/theory/theory_inference_manager.h
@@ -69,11 +69,23 @@ class TheoryInferenceManager
public:
/**
* Constructor, note that state should be the official state of theory t.
+ *
+ * @param t The theory this inference manager is for
+ * @param state The state of the theory
+ * @param pnm The proof node manager, which if non-null, enables proofs for
+ * this inference manager
+ * @param name The name of the inference manager, which is used for giving
+ * unique names for statistics,
+ * @param cacheLemmas Whether all lemmas sent using this theory inference
+ * manager are added to a user-context dependent cache. This means that
+ * only lemmas that are unique after rewriting are sent to the theory engine
+ * from this inference manager.
*/
TheoryInferenceManager(Theory& t,
TheoryState& state,
ProofNodeManager* pnm,
- const std::string& name);
+ const std::string& name,
+ bool cacheLemmas = true);
virtual ~TheoryInferenceManager();
/**
* Set equality engine, ee is a pointer to the official equality engine
@@ -183,22 +195,16 @@ class TheoryInferenceManager
*
* @param tlem The trust node containing the lemma and its proof generator.
* @param p The property of the lemma
- * @param doCache If true, we send the lemma only if it has not already been
- * cached (see cacheLemma), and add it to the cache during this call.
* @return true if the lemma was sent on the output channel.
*/
bool trustedLemma(const TrustNode& tlem,
InferenceId id,
- LemmaProperty p = LemmaProperty::NONE,
- bool doCache = true);
+ LemmaProperty p = LemmaProperty::NONE);
/**
* Send lemma lem with property p on the output channel. Same as above, with
* a node instead of a trust node.
*/
- bool lemma(TNode lem,
- InferenceId id,
- LemmaProperty p = LemmaProperty::NONE,
- bool doCache = true);
+ bool lemma(TNode lem, InferenceId id, LemmaProperty p = LemmaProperty::NONE);
/**
* Explained lemma. This should be called when
* ( exp => conc )
@@ -219,7 +225,6 @@ class TheoryInferenceManager
* equality engine
* @param args The arguments to the proof step concluding conc
* @param p The property of the lemma
- * @param doCache Whether to check and add the lemma to the cache
* @return true if the lemma was sent on the output channel.
*/
bool lemmaExp(Node conc,
@@ -228,8 +233,7 @@ class TheoryInferenceManager
const std::vector<Node>& exp,
const std::vector<Node>& noExplain,
const std::vector<Node>& args,
- LemmaProperty p = LemmaProperty::NONE,
- bool doCache = true);
+ LemmaProperty p = LemmaProperty::NONE);
/**
* Make the trust node for the above method. It is responsibility of the
* caller to subsequently call trustedLemma with the returned trust node.
@@ -251,7 +255,6 @@ class TheoryInferenceManager
* equality engine
* @param pg If non-null, the proof generator who can provide a proof of conc.
* @param p The property of the lemma
- * @param doCache Whether to check and add the lemma to the cache
* @return true if the lemma was sent on the output channel.
*/
bool lemmaExp(Node conc,
@@ -259,8 +262,7 @@ class TheoryInferenceManager
const std::vector<Node>& exp,
const std::vector<Node>& noExplain,
ProofGenerator* pg = nullptr,
- LemmaProperty p = LemmaProperty::NONE,
- bool doCache = true);
+ LemmaProperty p = LemmaProperty::NONE);
/**
* Make the trust node for the above method. It is responsibility of the
* caller to subsequently call trustedLemma with the returned trust node.
@@ -417,6 +419,8 @@ class TheoryInferenceManager
std::unique_ptr<eq::ProofEqEngine> d_pfee;
/** The proof node manager of the theory */
ProofNodeManager* d_pnm;
+ /** Whether this manager caches lemmas */
+ bool d_cacheLemmas;
/**
* The keep set of this class. This set is maintained to ensure that
* facts and their explanations are ref-counted. Since facts and their
diff --git a/src/theory/uf/cardinality_extension.cpp b/src/theory/uf/cardinality_extension.cpp
index 861da3558..aa73e3419 100644
--- a/src/theory/uf/cardinality_extension.cpp
+++ b/src/theory/uf/cardinality_extension.cpp
@@ -1179,7 +1179,7 @@ bool SortModel::checkLastCall()
Node lem = NodeManager::currentNM()->mkNode(
OR, cl, NodeManager::currentNM()->mkAnd(force_cl));
Trace("uf-ss-lemma") << "*** Enforce negative cardinality constraint lemma : " << lem << std::endl;
- d_im.lemma(lem, InferenceId::UF_CARD_ENFORCE_NEGATIVE, LemmaProperty::NONE, false);
+ d_im.lemma(lem, InferenceId::UF_CARD_ENFORCE_NEGATIVE);
return false;
}
}
@@ -1399,7 +1399,7 @@ void CardinalityExtension::assertNode(Node n, bool isDecision)
Node eqv_lit = NodeManager::currentNM()->mkNode( CARDINALITY_CONSTRAINT, ct, lit[1] );
eqv_lit = lit.eqNode( eqv_lit );
Trace("uf-ss-lemma") << "*** Cardinality equiv lemma : " << eqv_lit << std::endl;
- d_im.lemma(eqv_lit, InferenceId::UF_CARD_EQUIV, LemmaProperty::NONE, false);
+ d_im.lemma(eqv_lit, InferenceId::UF_CARD_EQUIV);
d_card_assertions_eqv_lemma[lit] = true;
}
}
@@ -1528,7 +1528,7 @@ void CardinalityExtension::check(Theory::Effort level)
Node eq = Rewriter::rewrite( a.eqNode( b ) );
Node lem = NodeManager::currentNM()->mkNode( kind::OR, eq, eq.negate() );
Trace("uf-ss-lemma") << "*** Split (no-minimal) : " << lem << std::endl;
- d_im.lemma(lem, InferenceId::UF_CARD_SPLIT, LemmaProperty::NONE, false);
+ d_im.lemma(lem, InferenceId::UF_CARD_SPLIT);
d_im.requirePhase(eq, true);
type_proc[tn] = true;
break;
diff --git a/src/theory/uf/theory_uf.cpp b/src/theory/uf/theory_uf.cpp
index 0328080ee..db8b89d89 100644
--- a/src/theory/uf/theory_uf.cpp
+++ b/src/theory/uf/theory_uf.cpp
@@ -51,7 +51,7 @@ TheoryUF::TheoryUF(context::Context* c,
d_functionsTerms(c),
d_symb(u, instanceName),
d_state(c, u, valuation),
- d_im(*this, d_state, pnm, "theory::uf"),
+ d_im(*this, d_state, pnm, "theory::uf", false),
d_notify(d_im, *this)
{
d_true = NodeManager::currentNM()->mkConst( true );
generated by cgit on debian on lair
contact matthew@masot.net with questions or feedback