summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/CMakeLists.txt2
-rw-r--r--src/theory/datatypes/inference_manager.cpp99
-rw-r--r--src/theory/datatypes/inference_manager.h59
-rw-r--r--src/theory/inference_manager_buffered.cpp49
-rw-r--r--src/theory/inference_manager_buffered.h56
-rw-r--r--src/theory/theory_inference.cpp63
-rw-r--r--src/theory/theory_inference.h106
-rw-r--r--src/theory/theory_inference_manager.cpp83
-rw-r--r--src/theory/theory_inference_manager.h72
9 files changed, 438 insertions, 151 deletions
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index 2a7fd11a8..052479624 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -827,6 +827,8 @@ libcvc4_add_sources(
theory/theory_engine_proof_generator.h
theory/theory_id.cpp
theory/theory_id.h
+ theory/theory_inference.cpp
+ theory/theory_inference.h
theory/theory_inference_manager.cpp
theory/theory_inference_manager.h
theory/theory_model.cpp
diff --git a/src/theory/datatypes/inference_manager.cpp b/src/theory/datatypes/inference_manager.cpp
index 42cad0b65..3dc16355b 100644
--- a/src/theory/datatypes/inference_manager.cpp
+++ b/src/theory/datatypes/inference_manager.cpp
@@ -24,20 +24,20 @@ namespace CVC4 {
namespace theory {
namespace datatypes {
-InferenceManager::InferenceManager(Theory& t,
- TheoryState& state,
- ProofNodeManager* pnm)
- : InferenceManagerBuffered(t, state, pnm)
+DatatypesInference::DatatypesInference(Node conc, Node exp, ProofGenerator* pg)
+ : SimpleTheoryInternalFact(conc, exp, pg)
{
- d_true = NodeManager::currentNM()->mkConst(true);
+ // false is not a valid explanation
+ Assert(d_exp.isNull() || !d_exp.isConst() || d_exp.getConst<bool>());
}
-bool InferenceManager::mustCommunicateFact(Node n, Node exp) const
+bool DatatypesInference::mustCommunicateFact(Node n, Node exp)
{
Trace("dt-lemma-debug") << "Compute for " << exp << " => " << n << std::endl;
bool addLemma = false;
- if (options::dtInferAsLemmas() && exp != d_true)
+ if (options::dtInferAsLemmas() && !exp.isConst())
{
+ // all units are lemmas
addLemma = true;
}
else if (n.getKind() == EQUAL)
@@ -66,61 +66,46 @@ bool InferenceManager::mustCommunicateFact(Node n, Node exp) const
return false;
}
-void InferenceManager::process()
+bool DatatypesInference::process(TheoryInferenceManager* im)
{
- // process pending lemmas, used infrequently, only for definitional lemmas
- doPendingLemmas();
- // now process the pending facts
- size_t i = 0;
- NodeManager* nm = NodeManager::currentNM();
- while (!d_theoryState.isInConflict() && i < d_pendingFact.size())
+ // check to see if we have to communicate it to the rest of the system
+ if (mustCommunicateFact(d_conc, d_exp))
{
- std::pair<Node, Node>& pfact = d_pendingFact[i];
- const Node& fact = pfact.first;
- const Node& exp = pfact.second;
- Trace("datatypes-debug")
- << "Assert fact (#" << (i + 1) << "/" << d_pendingFact.size() << ") "
- << fact << " with explanation " << exp << std::endl;
- // check to see if we have to communicate it to the rest of the system
- if (mustCommunicateFact(fact, exp))
- {
- Node lem = fact;
- if (exp.isNull() || exp == d_true)
- {
- Trace("dt-lemma-debug") << "Trivial explanation." << std::endl;
- }
- else
- {
- Trace("dt-lemma-debug") << "Get explanation..." << std::endl;
- std::vector<TNode> assumptions;
- explain(exp, assumptions);
- if (!assumptions.empty())
- {
- std::vector<Node> children;
- for (const TNode& assumption : assumptions)
- {
- children.push_back(assumption.negate());
- }
- children.push_back(fact);
- lem = nm->mkNode(OR, children);
- }
- }
- Trace("dt-lemma") << "Datatypes lemma : " << lem << std::endl;
- lemma(lem);
- }
- else
+ // send it as an (explained) lemma
+ std::vector<Node> exp;
+ if (!d_exp.isNull() && !d_exp.isConst())
{
- // assert the internal fact
- bool polarity = fact.getKind() != NOT;
- TNode atom = polarity ? fact : fact[0];
- assertInternalFact(atom, polarity, exp);
+ exp.push_back(d_exp);
}
- Trace("datatypes-debug") << "Finished fact " << fact
- << ", now = " << d_theoryState.isInConflict()
- << " " << d_pendingFact.size() << std::endl;
- i++;
+ return im->lemmaExp(d_conc, exp, {});
}
- d_pendingFact.clear();
+ // assert the internal fact
+ bool polarity = d_conc.getKind() != NOT;
+ TNode atom = polarity ? d_conc : d_conc[0];
+ im->assertInternalFact(atom, polarity, d_exp);
+ return true;
+}
+
+InferenceManager::InferenceManager(Theory& t,
+ TheoryState& state,
+ ProofNodeManager* pnm)
+ : InferenceManagerBuffered(t, state, pnm)
+{
+}
+
+void InferenceManager::addPendingInference(Node conc,
+ Node exp,
+ ProofGenerator* pg)
+{
+ d_pendingFact.push_back(std::make_shared<DatatypesInference>(conc, exp, pg));
+}
+
+void InferenceManager::process()
+{
+ // process pending lemmas, used infrequently, only for definitional lemmas
+ doPendingLemmas();
+ // now process the pending facts
+ doPendingFacts();
}
} // namespace datatypes
diff --git a/src/theory/datatypes/inference_manager.h b/src/theory/datatypes/inference_manager.h
index 786cd8129..0dda3259e 100644
--- a/src/theory/datatypes/inference_manager.h
+++ b/src/theory/datatypes/inference_manager.h
@@ -26,29 +26,14 @@ namespace theory {
namespace datatypes {
/**
- * The datatypes inference manager. The main unique features of this inference
- * manager are:
- * (1) Explicit caching of lemmas,
- * (2) A custom process() method with relies on a policy determining which
- * facts must be sent as lemmas (mustCommunicateFact).
- * (3) Methods for tracking when lemmas and facts have been processed.
+ * A custom inference class. The main feature of this class is that it
+ * dynamically decides whether to process itself as a fact or as a lemma,
+ * based on the mustCommunicateFact method below.
*/
-class InferenceManager : public InferenceManagerBuffered
+class DatatypesInference : public SimpleTheoryInternalFact
{
- typedef context::CDHashSet<Node, NodeHashFunction> NodeSet;
-
public:
- InferenceManager(Theory& t, TheoryState& state, ProofNodeManager* pnm);
- ~InferenceManager() {}
- /**
- * Process the current lemmas and facts. This is a custom method that can
- * be seen as overriding the behavior of calling both doPendingLemmas and
- * doPendingFacts. It determines whether facts should be sent as lemmas
- * or processed internally.
- */
- void process();
-
- protected:
+ DatatypesInference(Node conc, Node exp, ProofGenerator* pg);
/**
* Must communicate fact method.
* The datatypes decision procedure makes "internal" inferences :
@@ -65,9 +50,37 @@ class InferenceManager : public InferenceManagerBuffered
* communicate outwards if the conclusions involve other theories. Also
* communicate (6) and OR conclusions.
*/
- bool mustCommunicateFact(Node n, Node exp) const;
- /** Common node */
- Node d_true;
+ static bool mustCommunicateFact(Node n, Node exp);
+ /**
+ * Process this fact, possibly as a fact or as a lemma, depending on the
+ * above method.
+ */
+ bool process(TheoryInferenceManager* im) override;
+};
+
+/**
+ * The datatypes inference manager, which uses the above class for
+ * inferences.
+ */
+class InferenceManager : public InferenceManagerBuffered
+{
+ typedef context::CDHashSet<Node, NodeHashFunction> NodeSet;
+
+ public:
+ InferenceManager(Theory& t, TheoryState& state, ProofNodeManager* pnm);
+ ~InferenceManager() {}
+ /**
+ * Add pending inference, which may be processed as either a fact or
+ * a lemma based on mustCommunicateFact in DatatypesInference above.
+ */
+ void addPendingInference(Node conc, Node exp, ProofGenerator* pg = nullptr);
+ /**
+ * Process the current lemmas and facts. This is a custom method that can
+ * be seen as overriding the behavior of calling both doPendingLemmas and
+ * doPendingFacts. It determines whether facts should be sent as lemmas
+ * or processed internally.
+ */
+ void process();
};
} // namespace datatypes
diff --git a/src/theory/inference_manager_buffered.cpp b/src/theory/inference_manager_buffered.cpp
index 14f6c4f4a..a1cb9c4e9 100644
--- a/src/theory/inference_manager_buffered.cpp
+++ b/src/theory/inference_manager_buffered.cpp
@@ -49,18 +49,30 @@ void InferenceManagerBuffered::addPendingLemma(Node lem,
LemmaProperty p,
ProofGenerator* pg)
{
- d_pendingLem.push_back(std::make_shared<Lemma>(lem, p, pg));
+ // make the simple theory lemma
+ d_pendingLem.push_back(std::make_shared<SimpleTheoryLemma>(lem, p, pg));
}
-void InferenceManagerBuffered::addPendingLemma(std::shared_ptr<Lemma> lemma)
+void InferenceManagerBuffered::addPendingLemma(
+ std::shared_ptr<TheoryInference> lemma)
{
d_pendingLem.emplace_back(std::move(lemma));
}
-void InferenceManagerBuffered::addPendingFact(Node fact, Node exp)
+void InferenceManagerBuffered::addPendingFact(Node conc,
+ Node exp,
+ ProofGenerator* pg)
{
- Assert(fact.getKind() != AND && fact.getKind() != OR);
- d_pendingFact.push_back(std::pair<Node, Node>(fact, exp));
+ // make a simple theory internal fact
+ Assert(conc.getKind() != AND && conc.getKind() != OR);
+ d_pendingFact.push_back(
+ std::make_shared<SimpleTheoryInternalFact>(conc, exp, pg));
+}
+
+void InferenceManagerBuffered::addPendingFact(
+ std::shared_ptr<TheoryInference> fact)
+{
+ d_pendingFact.emplace_back(std::move(fact));
}
void InferenceManagerBuffered::addPendingPhaseRequirement(Node lit, bool pol)
@@ -75,14 +87,9 @@ void InferenceManagerBuffered::doPendingFacts()
size_t i = 0;
while (!d_theoryState.isInConflict() && i < d_pendingFact.size())
{
- std::pair<Node, Node>& pfact = d_pendingFact[i];
- Node fact = pfact.first;
- Node exp = pfact.second;
- bool polarity = fact.getKind() != NOT;
- TNode atom = polarity ? fact : fact[0];
- // no double negation or conjunctive conclusions
- Assert(atom.getKind() != NOT && atom.getKind() != AND);
- assertInternalFact(atom, polarity, exp);
+ // process this fact, which notice may enqueue more pending facts in this
+ // loop.
+ d_pendingFact[i]->process(this);
i++;
}
d_pendingFact.clear();
@@ -90,20 +97,10 @@ void InferenceManagerBuffered::doPendingFacts()
void InferenceManagerBuffered::doPendingLemmas()
{
- // process all the pending lemmas
- for (const std::shared_ptr<Lemma>& plem : d_pendingLem)
+ for (const std::shared_ptr<TheoryInference>& plem : d_pendingLem)
{
- if (!plem->notifySend())
- {
- // the lemma indicated that it should not be sent after all
- continue;
- }
- Node lem = plem->d_node;
- LemmaProperty p = plem->d_property;
- ProofGenerator* pg = plem->d_pg;
- Assert(!lem.isNull());
- // send (trusted) lemma on the output channel with property p
- trustedLemma(TrustNode::mkTrustLemma(lem, pg), p);
+ // process this lemma
+ plem->process(this);
}
d_pendingLem.clear();
}
diff --git a/src/theory/inference_manager_buffered.h b/src/theory/inference_manager_buffered.h
index 596a346de..e6e7822c5 100644
--- a/src/theory/inference_manager_buffered.h
+++ b/src/theory/inference_manager_buffered.h
@@ -19,45 +19,13 @@
#include "context/cdhashmap.h"
#include "expr/node.h"
+#include "theory/theory_inference.h"
#include "theory/theory_inference_manager.h"
namespace CVC4 {
namespace theory {
/**
- * A lemma base class. This class is an abstract data structure for storing
- * pending lemmas in the inference manager below.
- */
-class Lemma
-{
- public:
- Lemma(Node n, LemmaProperty p, ProofGenerator* pg)
- : d_node(n), d_property(p), d_pg(pg)
- {
- }
- virtual ~Lemma() {}
- /**
- * Called just before this lemma is sent on the output channel. The purpose
- * of this callback is to do any specific process of the lemma, e.g. take
- * debug statistics, cache, etc.
- *
- * @return true if the lemma should be sent on the output channel.
- */
- virtual bool notifySend() { return true; }
- /** The lemma to send */
- Node d_node;
- /** The lemma property (see OutputChannel::lemma) */
- LemmaProperty d_property;
- /**
- * The proof generator for this lemma, which if non-null, is wrapped in a
- * TrustNode to be set on the output channel via trustedLemma at the time
- * the lemma is sent. This proof generator must be able to provide a proof
- * for d_node in the remainder of the user context.
- */
- ProofGenerator* d_pg;
-};
-
-/**
* The buffered inference manager. This class implements standard methods
* for buffering facts, lemmas and phase requirements.
*/
@@ -92,21 +60,25 @@ class InferenceManagerBuffered : public TheoryInferenceManager
ProofGenerator* pg = nullptr);
/**
* Add pending lemma, where lemma can be a (derived) class of the
- * above one. Pending lemmas are sent to the output channel using
- * doPendingLemmas.
+ * theory inference base class.
*/
- void addPendingLemma(std::shared_ptr<Lemma> lemma);
+ void addPendingLemma(std::shared_ptr<TheoryInference> lemma);
/**
* Add pending fact, which adds a fact on the pending fact queue. It must
* be the case that:
- * (1) exp => fact is valid,
+ * (1) exp => conc is valid,
* (2) exp is a literal (or conjunction of literals) that holds in the
* equality engine of the theory.
*
* Pending facts are sent to the equality engine of this class using
* doPendingFacts.
*/
- void addPendingFact(Node fact, Node exp);
+ void addPendingFact(Node conc, Node exp, ProofGenerator* pg = nullptr);
+ /**
+ * Add pending fact, where fact can be a (derived) class of the
+ * theory inference base class.
+ */
+ void addPendingFact(std::shared_ptr<TheoryInference> fact);
/** Add pending phase requirement
*
* This method is called to indicate this class should send a phase
@@ -153,10 +125,10 @@ class InferenceManagerBuffered : public TheoryInferenceManager
void clearPendingPhaseRequirements();
protected:
- /** A set of pending lemmas */
- std::vector<std::shared_ptr<Lemma>> d_pendingLem;
- /** A set of pending facts, paired with their explanations */
- std::vector<std::pair<Node, Node>> d_pendingFact;
+ /** A set of pending inferences to be processed as lemmas */
+ std::vector<std::shared_ptr<TheoryInference>> d_pendingLem;
+ /** A set of pending inferences to be processed as facts */
+ std::vector<std::shared_ptr<TheoryInference>> d_pendingFact;
/** A map from literals to their pending phase requirement */
std::map<Node, bool> d_pendingReqPhase;
};
diff --git a/src/theory/theory_inference.cpp b/src/theory/theory_inference.cpp
new file mode 100644
index 000000000..618dc640b
--- /dev/null
+++ b/src/theory/theory_inference.cpp
@@ -0,0 +1,63 @@
+/********************* */
+/*! \file theory_inference.cpp
+ ** \verbatim
+ ** Top contributors (to current version):
+ ** Andrew Reynolds
+ ** This file is part of the CVC4 project.
+ ** Copyright (c) 2009-2020 by the authors listed in the file AUTHORS
+ ** in the top-level source directory) and their institutional affiliations.
+ ** All rights reserved. See the file COPYING in the top-level source
+ ** directory for licensing information.\endverbatim
+ **
+ ** \brief The theory inference utility
+ **/
+
+#include "theory/theory_inference.h"
+
+#include "theory/theory_inference_manager.h"
+
+using namespace CVC4::kind;
+
+namespace CVC4 {
+namespace theory {
+
+SimpleTheoryLemma::SimpleTheoryLemma(Node n,
+ LemmaProperty p,
+ ProofGenerator* pg)
+ : d_node(n), d_property(p), d_pg(pg)
+{
+}
+
+bool SimpleTheoryLemma::process(TheoryInferenceManager* im)
+{
+ Assert(!d_node.isNull());
+ // send (trusted) lemma on the output channel with property p
+ return im->trustedLemma(TrustNode::mkTrustLemma(d_node, d_pg), d_property);
+}
+
+SimpleTheoryInternalFact::SimpleTheoryInternalFact(Node conc,
+ Node exp,
+ ProofGenerator* pg)
+ : d_conc(conc), d_exp(exp), d_pg(pg)
+{
+}
+
+bool SimpleTheoryInternalFact::process(TheoryInferenceManager* im)
+{
+ bool polarity = d_conc.getKind() != NOT;
+ TNode atom = polarity ? d_conc : d_conc[0];
+ // no double negation or conjunctive conclusions
+ Assert(atom.getKind() != NOT && atom.getKind() != AND);
+ if (d_pg != nullptr)
+ {
+ im->assertInternalFact(atom, polarity, {d_exp}, d_pg);
+ }
+ else
+ {
+ im->assertInternalFact(atom, polarity, d_exp);
+ }
+ return true;
+}
+
+} // namespace theory
+} // namespace CVC4
diff --git a/src/theory/theory_inference.h b/src/theory/theory_inference.h
new file mode 100644
index 000000000..8d98051bf
--- /dev/null
+++ b/src/theory/theory_inference.h
@@ -0,0 +1,106 @@
+/********************* */
+/*! \file theory_inference.h
+ ** \verbatim
+ ** Top contributors (to current version):
+ ** Andrew Reynolds
+ ** This file is part of the CVC4 project.
+ ** Copyright (c) 2009-2020 by the authors listed in the file AUTHORS
+ ** in the top-level source directory) and their institutional affiliations.
+ ** All rights reserved. See the file COPYING in the top-level source
+ ** directory for licensing information.\endverbatim
+ **
+ ** \brief The theory inference utility
+ **/
+
+#include "cvc4_private.h"
+
+#ifndef CVC4__THEORY__THEORY_INFERENCE_H
+#define CVC4__THEORY__THEORY_INFERENCE_H
+
+#include "expr/node.h"
+#include "theory/output_channel.h"
+
+namespace CVC4 {
+namespace theory {
+
+class TheoryInferenceManager;
+
+/**
+ * A theory inference base class. This class is an abstract data structure for
+ * storing pending lemmas or facts in the buffered inference manager. It can
+ * be seen a single use object capturing instructions for making a single
+ * call to TheoryInferenceManager for lemmas or facts.
+ */
+class TheoryInference
+{
+ public:
+ virtual ~TheoryInference() {}
+ /**
+ * Called by the provided inference manager to process this inference. This
+ * method should make call(s) to inference manager to process this
+ * inference, as well as processing any specific side effects. This method
+ * typically makes a single call to the provided inference manager e.g.
+ * d_im->trustedLemma or d_im->assertFactInternal. Notice it is the sole
+ * responsibility of this class to make this call; the inference manager
+ * does not call itself otherwise when processing pending inferences.
+ *
+ * @return true if the inference was successfully processed by the inference
+ * manager. This method for instance returns false if it corresponds to a
+ * lemma that was already cached by im and hence was discarded.
+ */
+ virtual bool process(TheoryInferenceManager* im) = 0;
+};
+
+/**
+ * A simple theory lemma with no side effects. Makes a single call to
+ * trustedLemma in its process method.
+ */
+class SimpleTheoryLemma : public TheoryInference
+{
+ public:
+ SimpleTheoryLemma(Node n, LemmaProperty p, ProofGenerator* pg);
+ virtual ~SimpleTheoryLemma() {}
+ /**
+ * Send the lemma using inference manager im, return true if the lemma was
+ * sent.
+ */
+ virtual bool process(TheoryInferenceManager* im) override;
+ /** The lemma to send */
+ Node d_node;
+ /** The lemma property (see OutputChannel::lemma) */
+ LemmaProperty d_property;
+ /**
+ * The proof generator for this lemma, which if non-null, is wrapped in a
+ * TrustNode to be set on the output channel via trustedLemma at the time
+ * the lemma is sent. This proof generator must be able to provide a proof
+ * for d_node in the remainder of the user context.
+ */
+ ProofGenerator* d_pg;
+};
+
+/**
+ * A simple internal fact with no side effects. Makes a single call to
+ * assertFactInternal in its process method.
+ */
+class SimpleTheoryInternalFact : public TheoryInference
+{
+ public:
+ SimpleTheoryInternalFact(Node conc, Node exp, ProofGenerator* pg);
+ virtual ~SimpleTheoryInternalFact() {}
+ /**
+ * Send the lemma using inference manager im, return true if the lemma was
+ * sent.
+ */
+ virtual bool process(TheoryInferenceManager* im) override;
+ /** The lemma to send */
+ Node d_conc;
+ /** The explanation */
+ Node d_exp;
+ /** The proof generator */
+ ProofGenerator* d_pg;
+};
+
+} // namespace theory
+} // namespace CVC4
+
+#endif
diff --git a/src/theory/theory_inference_manager.cpp b/src/theory/theory_inference_manager.cpp
index 20c1fbf37..3abe530f1 100644
--- a/src/theory/theory_inference_manager.cpp
+++ b/src/theory/theory_inference_manager.cpp
@@ -84,6 +84,27 @@ void TheoryInferenceManager::trustedConflict(TrustNode tconf)
}
}
+void TheoryInferenceManager::conflictExp(PfRule id,
+ const std::vector<Node>& exp,
+ const std::vector<Node>& args)
+{
+ if (!d_theoryState.isInConflict())
+ {
+ if (d_pfee != nullptr)
+ {
+ // use proof equality engine to construct the trust node
+ TrustNode tconf = d_pfee->assertConflict(id, exp, args);
+ d_out.trustedConflict(tconf);
+ }
+ else
+ {
+ // version without proofs
+ Node conf = mkExplainPartial(exp, {});
+ conflict(conf);
+ }
+ }
+}
+
bool TheoryInferenceManager::propagateLit(TNode lit)
{
// If already in conflict, no more propagation
@@ -123,7 +144,7 @@ TrustNode TheoryInferenceManager::explainConflictEqConstantMerge(TNode a,
Node lit = a.eqNode(b);
if (d_pfee != nullptr)
{
- return d_pfee->explain(lit);
+ return d_pfee->assertConflict(lit);
}
if (d_ee != nullptr)
{
@@ -156,6 +177,45 @@ bool TheoryInferenceManager::trustedLemma(const TrustNode& tlem,
return true;
}
+bool TheoryInferenceManager::lemmaExp(Node conc,
+ PfRule id,
+ const std::vector<Node>& exp,
+ const std::vector<Node>& noExplain,
+ const std::vector<Node>& args,
+ LemmaProperty p,
+ bool doCache)
+{
+ if (d_pfee != nullptr)
+ {
+ // make the trust node from the proof equality engine
+ TrustNode trn = d_pfee->assertLemma(conc, id, exp, noExplain, args);
+ return trustedLemma(trn, p, doCache);
+ }
+ // otherwise, not using proofs, explain and send lemma
+ Node ant = mkExplainPartial(exp, noExplain);
+ Node lem = NodeManager::currentNM()->mkNode(kind::IMPLIES, ant, conc);
+ return lemma(lem, p, doCache);
+}
+
+bool TheoryInferenceManager::lemmaExp(Node conc,
+ const std::vector<Node>& exp,
+ const std::vector<Node>& noExplain,
+ ProofGenerator* pg,
+ LemmaProperty p,
+ bool doCache)
+{
+ if (d_pfee != nullptr)
+ {
+ // make the trust node from the proof equality engine
+ TrustNode trn = d_pfee->assertLemma(conc, exp, noExplain, pg);
+ return trustedLemma(trn, p, doCache);
+ }
+ // otherwise, not using proofs, explain and send lemma
+ Node ant = mkExplainPartial(exp, noExplain);
+ Node lem = NodeManager::currentNM()->mkNode(kind::IMPLIES, ant, conc);
+ return lemma(lem, p, doCache);
+}
+
bool TheoryInferenceManager::hasCachedLemma(TNode lem, LemmaProperty p)
{
return d_lemmasSent.find(lem) != d_lemmasSent.end();
@@ -280,6 +340,27 @@ Node TheoryInferenceManager::mkExplain(TNode n)
return NodeManager::currentNM()->mkAnd(assumptions);
}
+Node TheoryInferenceManager::mkExplainPartial(
+ const std::vector<Node>& exp, const std::vector<Node>& noExplain)
+{
+ std::vector<TNode> assumps;
+ for (const Node& e : exp)
+ {
+ if (std::find(noExplain.begin(), noExplain.end(), e) != noExplain.end())
+ {
+ if (std::find(assumps.begin(), assumps.end(), e) == assumps.end())
+ {
+ // a non-explained literal
+ assumps.push_back(e);
+ }
+ continue;
+ }
+ // otherwise, explain it
+ explain(e, assumps);
+ }
+ return NodeManager::currentNM()->mkAnd(assumps);
+}
+
uint32_t TheoryInferenceManager::numAddedFacts() const
{
return d_numCurrentFacts;
diff --git a/src/theory/theory_inference_manager.h b/src/theory/theory_inference_manager.h
index d5c0fe1b2..2ddcd0985 100644
--- a/src/theory/theory_inference_manager.h
+++ b/src/theory/theory_inference_manager.h
@@ -125,6 +125,16 @@ class TheoryInferenceManager
* been provided in a custom way.
*/
void trustedConflict(TrustNode tconf);
+ /**
+ * Explain and send conflict from contradictory facts. This method is called
+ * when the proof rule id with premises exp and arguments args concludes
+ * false. This method sends a trusted conflict corresponding to the official
+ * equality engine's explanation of literals in exp, with the proof equality
+ * engine as the proof generator (if it exists).
+ */
+ void conflictExp(PfRule id,
+ const std::vector<Node>& exp,
+ const std::vector<Node>& args);
//--------------------------------------- lemmas
/**
* Send (trusted) lemma lem with property p on the output channel.
@@ -146,6 +156,56 @@ class TheoryInferenceManager
LemmaProperty p = LemmaProperty::NONE,
bool doCache = true);
/**
+ * Explained lemma. This should be called when
+ * ( exp => conc )
+ * is a valid theory lemma. This method adds a lemma where part of exp
+ * is replaced by its explanation according to the official equality engine
+ * of the theory.
+ *
+ * In particular, this method adds a lemma on the output channel of the form
+ * ( ^_{e in exp \ noExplain} EXPLAIN(e) ^ noExplain ) => conc
+ * where EXPLAIN(e) returns the explanation of literal e according to the
+ * official equality engine of the theory. Note that noExplain is the *subset*
+ * of exp that should not be explained.
+ *
+ * @param conc The conclusion of the lemma
+ * @param id The proof rule concluding conc
+ * @param exp The set of (all) literals that imply conc
+ * @param noExplain The subset of exp that should not be explained by the
+ * equality engine
+ * @param args The arguments to the proof step concluding conc
+ * @param p The property of the lemma
+ * @param doCache Whether to check and add the lemma to the cache
+ * @return true if the lemma was sent on the output channel.
+ */
+ bool lemmaExp(Node conc,
+ PfRule id,
+ const std::vector<Node>& exp,
+ const std::vector<Node>& noExplain,
+ const std::vector<Node>& args,
+ LemmaProperty p = LemmaProperty::NONE,
+ bool doCache = true);
+ /**
+ * Same as above, but where pg can provide a proof of conc from free
+ * assumptions in exp. It is required to do so in the remainder of the user
+ * context when this method returns true.
+ *
+ * @param conc The conclusion of the lemma
+ * @param exp The set of (all) literals that imply conc
+ * @param noExplain The subset of exp that should not be explained by the
+ * equality engine
+ * @param pg If non-null, the proof generator who can provide a proof of conc.
+ * @param p The property of the lemma
+ * @param doCache Whether to check and add the lemma to the cache
+ * @return true if the lemma was sent on the output channel.
+ */
+ bool lemmaExp(Node conc,
+ const std::vector<Node>& exp,
+ const std::vector<Node>& noExplain,
+ ProofGenerator* pg = nullptr,
+ LemmaProperty p = LemmaProperty::NONE,
+ bool doCache = true);
+ /**
* Has this inference manager cached and sent the given lemma (in this user
* context)? This method can be overridden by the particular manager. If not,
* this returns true if lem is in the cache d_lemmasSent maintained by this
@@ -194,8 +254,8 @@ class TheoryInferenceManager
* @param atom The atom of the fact to assert
* @param pol Its polarity
* @param exp Its explanation, interpreted as a conjunction
- * @param pg The proof generator for this step. It must be the case that pf
- * can provide a proof concluding (~) atom from free asumptions in exp in
+ * @param pg The proof generator for this step. If non-null, pg must be able
+ * to provide a proof concluding (~) atom from free asumptions in exp in
* the remainder of the current SAT context.
*/
void assertInternalFact(TNode atom,
@@ -236,6 +296,14 @@ class TheoryInferenceManager
*/
Node mkExplain(TNode n);
/**
+ * Explain the set of formulas in exp using the official equality engine of
+ * the theory. We ask the equality engine to explain literals in exp
+ * that do not occur in noExplain, and return unchanged those that occur in
+ * noExplain. Note the vector noExplain should be a subset of exp.
+ */
+ Node mkExplainPartial(const std::vector<Node>& exp,
+ const std::vector<Node>& noExplain);
+ /**
* Cache that lemma lem is being sent with property p. Return true if it
* did not already exist in the cache maintained by this class. If this
* method is not overridden, then we use the d_lemmasSent cache maintained
generated by cgit on debian on lair
contact matthew@masot.net with questions or feedback