summaryrefslogtreecommitdiff
path: root/src/theory/theory_inference_manager.h
diff options
context:
space:
mode:
authorAndrew Reynolds <andrew.j.reynolds@gmail.com>2021-02-19 09:26:36 -0600
committerGitHub <noreply@github.com>2021-02-19 09:26:36 -0600
commitc4822869beac8d4a0eac4b234e0662d3db49f995 (patch)
treed34b86c54b0ac8de6df4734e1e76afa5f43d5efb /src/theory/theory_inference_manager.h
parent00479d03cdeac3e864a1930dddb16c71c5bf2ce9 (diff)
Refactoring theory inference process (#5920)
This PR refactors TheoryInference so that it is not responsible for calling back into InferenceManager, instead it sets data so that InferenceManagerBuffered has enough information to do this itself. It also makes the decision of whether to cache lemmas in theory inference manager a global choice per-theory instead of per-lemma.
Diffstat (limited to 'src/theory/theory_inference_manager.h')
-rw-r--r--src/theory/theory_inference_manager.h34
1 files changed, 19 insertions, 15 deletions
diff --git a/src/theory/theory_inference_manager.h b/src/theory/theory_inference_manager.h
index 0142f814a..d3a317cbc 100644
--- a/src/theory/theory_inference_manager.h
+++ b/src/theory/theory_inference_manager.h
@@ -69,11 +69,23 @@ class TheoryInferenceManager
public:
/**
* Constructor, note that state should be the official state of theory t.
+ *
+ * @param t The theory this inference manager is for
+ * @param state The state of the theory
+ * @param pnm The proof node manager, which if non-null, enables proofs for
+ * this inference manager
+ * @param name The name of the inference manager, which is used for giving
+ * unique names for statistics,
+ * @param cacheLemmas Whether all lemmas sent using this theory inference
+ * manager are added to a user-context dependent cache. This means that
+ * only lemmas that are unique after rewriting are sent to the theory engine
+ * from this inference manager.
*/
TheoryInferenceManager(Theory& t,
TheoryState& state,
ProofNodeManager* pnm,
- const std::string& name);
+ const std::string& name,
+ bool cacheLemmas = true);
virtual ~TheoryInferenceManager();
/**
* Set equality engine, ee is a pointer to the official equality engine
@@ -183,22 +195,16 @@ class TheoryInferenceManager
*
* @param tlem The trust node containing the lemma and its proof generator.
* @param p The property of the lemma
- * @param doCache If true, we send the lemma only if it has not already been
- * cached (see cacheLemma), and add it to the cache during this call.
* @return true if the lemma was sent on the output channel.
*/
bool trustedLemma(const TrustNode& tlem,
InferenceId id,
- LemmaProperty p = LemmaProperty::NONE,
- bool doCache = true);
+ LemmaProperty p = LemmaProperty::NONE);
/**
* Send lemma lem with property p on the output channel. Same as above, with
* a node instead of a trust node.
*/
- bool lemma(TNode lem,
- InferenceId id,
- LemmaProperty p = LemmaProperty::NONE,
- bool doCache = true);
+ bool lemma(TNode lem, InferenceId id, LemmaProperty p = LemmaProperty::NONE);
/**
* Explained lemma. This should be called when
* ( exp => conc )
@@ -219,7 +225,6 @@ class TheoryInferenceManager
* equality engine
* @param args The arguments to the proof step concluding conc
* @param p The property of the lemma
- * @param doCache Whether to check and add the lemma to the cache
* @return true if the lemma was sent on the output channel.
*/
bool lemmaExp(Node conc,
@@ -228,8 +233,7 @@ class TheoryInferenceManager
const std::vector<Node>& exp,
const std::vector<Node>& noExplain,
const std::vector<Node>& args,
- LemmaProperty p = LemmaProperty::NONE,
- bool doCache = true);
+ LemmaProperty p = LemmaProperty::NONE);
/**
* Make the trust node for the above method. It is responsibility of the
* caller to subsequently call trustedLemma with the returned trust node.
@@ -251,7 +255,6 @@ class TheoryInferenceManager
* equality engine
* @param pg If non-null, the proof generator who can provide a proof of conc.
* @param p The property of the lemma
- * @param doCache Whether to check and add the lemma to the cache
* @return true if the lemma was sent on the output channel.
*/
bool lemmaExp(Node conc,
@@ -259,8 +262,7 @@ class TheoryInferenceManager
const std::vector<Node>& exp,
const std::vector<Node>& noExplain,
ProofGenerator* pg = nullptr,
- LemmaProperty p = LemmaProperty::NONE,
- bool doCache = true);
+ LemmaProperty p = LemmaProperty::NONE);
/**
* Make the trust node for the above method. It is responsibility of the
* caller to subsequently call trustedLemma with the returned trust node.
@@ -417,6 +419,8 @@ class TheoryInferenceManager
std::unique_ptr<eq::ProofEqEngine> d_pfee;
/** The proof node manager of the theory */
ProofNodeManager* d_pnm;
+ /** Whether this manager caches lemmas */
+ bool d_cacheLemmas;
/**
* The keep set of this class. This set is maintained to ensure that
* facts and their explanations are ref-counted. Since facts and their
generated by cgit on debian on lair
contact matthew@masot.net with questions or feedback