summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndrew Reynolds <andrew.j.reynolds@gmail.com>2021-02-19 10:44:45 -0600
committerGitHub <noreply@github.com>2021-02-19 10:44:45 -0600
commit7430455bc71b6641f121edb3ec0cf1706bf40235 (patch)
tree64b08ccb56128ed54e874220ee502396c0b6fd84
parentc4822869beac8d4a0eac4b234e0662d3db49f995 (diff)
Fill in missing inference ids in datatypes theory (#5931)
Also updates its inference manager to not track stats since the standard ones are now used. This also sets up some dependencies in the sygus extension which will be used to implement InferenceId for the sygus extension, to be done on a separate PR.
-rw-r--r--src/theory/datatypes/inference_manager.cpp29
-rw-r--r--src/theory/datatypes/inference_manager.h22
-rw-r--r--src/theory/datatypes/sygus_extension.cpp53
-rw-r--r--src/theory/datatypes/sygus_extension.h18
-rw-r--r--src/theory/datatypes/sygus_simple_sym.cpp5
-rw-r--r--src/theory/datatypes/sygus_simple_sym.h2
-rw-r--r--src/theory/datatypes/theory_datatypes.cpp39
-rw-r--r--src/theory/inference_id.cpp31
-rw-r--r--src/theory/inference_id.h13
9 files changed, 113 insertions, 99 deletions
diff --git a/src/theory/datatypes/inference_manager.cpp b/src/theory/datatypes/inference_manager.cpp
index cf93009bb..8c12258af 100644
--- a/src/theory/datatypes/inference_manager.cpp
+++ b/src/theory/datatypes/inference_manager.cpp
@@ -29,9 +29,6 @@ InferenceManager::InferenceManager(Theory& t,
TheoryState& state,
ProofNodeManager* pnm)
: InferenceManagerBuffered(t, state, pnm, "theory::datatypes"),
- d_inferenceLemmas("theory::datatypes::inferenceLemmas"),
- d_inferenceFacts("theory::datatypes::inferenceFacts"),
- d_inferenceConflicts("theory::datatypes::inferenceConflicts"),
d_pnm(pnm),
d_ipc(pnm == nullptr ? nullptr
: new InferProofCons(state.getSatContext(), pnm)),
@@ -41,33 +38,27 @@ InferenceManager::InferenceManager(Theory& t,
pnm, state.getUserContext(), "datatypes::lemPg"))
{
d_false = NodeManager::currentNM()->mkConst(false);
- smtStatisticsRegistry()->registerStat(&d_inferenceLemmas);
- smtStatisticsRegistry()->registerStat(&d_inferenceFacts);
- smtStatisticsRegistry()->registerStat(&d_inferenceConflicts);
}
InferenceManager::~InferenceManager()
{
- smtStatisticsRegistry()->unregisterStat(&d_inferenceLemmas);
- smtStatisticsRegistry()->unregisterStat(&d_inferenceFacts);
- smtStatisticsRegistry()->unregisterStat(&d_inferenceConflicts);
}
void InferenceManager::addPendingInference(Node conc,
+ InferenceId id,
Node exp,
- bool forceLemma,
- InferenceId i)
+ bool forceLemma)
{
// if we are forcing the inference to be processed as a lemma, or if the
// inference must be sent as a lemma based on the policy in
// mustCommunicateFact.
if (forceLemma || DatatypesInference::mustCommunicateFact(conc, exp))
{
- d_pendingLem.emplace_back(new DatatypesInference(this, conc, exp, i));
+ d_pendingLem.emplace_back(new DatatypesInference(this, conc, exp, id));
}
else
{
- d_pendingFact.emplace_back(new DatatypesInference(this, conc, exp, i));
+ d_pendingFact.emplace_back(new DatatypesInference(this, conc, exp, id));
}
}
@@ -87,10 +78,7 @@ void InferenceManager::sendDtLemma(Node lem, InferenceId id, LemmaProperty p)
return;
}
// otherwise send as a normal lemma
- if (lemma(lem, id, p))
- {
- d_inferenceLemmas << id;
- }
+ lemma(lem, id, p);
}
void InferenceManager::sendDtConflict(const std::vector<Node>& conf, InferenceId id)
@@ -101,15 +89,15 @@ void InferenceManager::sendDtConflict(const std::vector<Node>& conf, InferenceId
prepareDtInference(d_false, exp, id, d_ipc.get());
}
conflictExp(id, conf, d_ipc.get());
- d_inferenceConflicts << id;
}
-bool InferenceManager::sendLemmas(const std::vector<Node>& lemmas)
+bool InferenceManager::sendLemmas(const std::vector<Node>& lemmas,
+ InferenceId id)
{
bool ret = false;
for (const Node& lem : lemmas)
{
- if (lemma(lem, InferenceId::UNKNOWN))
+ if (lemma(lem, id))
{
ret = true;
}
@@ -151,7 +139,6 @@ TrustNode InferenceManager::processDtLemma(Node conc, Node exp, InferenceId id)
}
d_lemPg->setProofFor(lem, pn);
}
- // use trusted lemma
return TrustNode::mkTrustLemma(lem, d_lemPg.get());
}
diff --git a/src/theory/datatypes/inference_manager.h b/src/theory/datatypes/inference_manager.h
index 9c9a2bcb5..ba46d2a42 100644
--- a/src/theory/datatypes/inference_manager.h
+++ b/src/theory/datatypes/inference_manager.h
@@ -22,7 +22,6 @@
#include "theory/datatypes/infer_proof_cons.h"
#include "theory/datatypes/inference.h"
#include "theory/inference_manager_buffered.h"
-#include "util/statistics_registry.h"
namespace CVC4 {
namespace theory {
@@ -45,16 +44,16 @@ class InferenceManager : public InferenceManagerBuffered
*
* @param conc The conclusion of the inference
* @param exp The explanation of the inference
+ * @param id The inference, used for stats and as a hint for constructing
+ * the proof of (conc => exp)
* @param forceLemma Whether this inference *must* be processed as a lemma.
* Otherwise, it may be processed as a fact or lemma based on
* mustCommunicateFact.
- * @param i The inference, used for stats and as a hint for constructing
- * the proof of (conc => exp)
*/
void addPendingInference(Node conc,
+ InferenceId id,
Node exp,
- bool forceLemma = false,
- InferenceId i = InferenceId::UNKNOWN);
+ bool forceLemma = false);
/**
* Process the current lemmas and facts. This is a custom method that can
* be seen as overriding the behavior of calling both doPendingLemmas and
@@ -66,17 +65,17 @@ class InferenceManager : public InferenceManagerBuffered
* Send lemma immediately on the output channel
*/
void sendDtLemma(Node lem,
- InferenceId i = InferenceId::UNKNOWN,
+ InferenceId id,
LemmaProperty p = LemmaProperty::NONE);
/**
* Send conflict immediately on the output channel
*/
- void sendDtConflict(const std::vector<Node>& conf, InferenceId i = InferenceId::UNKNOWN);
+ void sendDtConflict(const std::vector<Node>& conf, InferenceId id);
/**
* Send lemmas with property NONE on the output channel immediately.
* Returns true if any lemma was sent.
*/
- bool sendLemmas(const std::vector<Node>& lemmas);
+ bool sendLemmas(const std::vector<Node>& lemmas, InferenceId id);
private:
/** Are proofs enabled? */
@@ -104,13 +103,6 @@ class InferenceManager : public InferenceManagerBuffered
Node prepareDtInference(Node conc, Node exp, InferenceId id, InferProofCons* ipc);
/** The false node */
Node d_false;
- /**
- * Counts the number of applications of each type of inference processed by
- * the above method as facts, lemmas and conflicts.
- */
- HistogramStat<InferenceId> d_inferenceLemmas;
- HistogramStat<InferenceId> d_inferenceFacts;
- HistogramStat<InferenceId> d_inferenceConflicts;
/** Pointer to the proof node manager */
ProofNodeManager* d_pnm;
/** The inference to proof converter */
diff --git a/src/theory/datatypes/sygus_extension.cpp b/src/theory/datatypes/sygus_extension.cpp
index 18b75e631..7efd2a9ac 100644
--- a/src/theory/datatypes/sygus_extension.cpp
+++ b/src/theory/datatypes/sygus_extension.cpp
@@ -22,12 +22,10 @@
#include "options/quantifiers_options.h"
#include "printer/printer.h"
#include "theory/datatypes/sygus_datatype_utils.h"
-#include "theory/datatypes/theory_datatypes.h"
#include "theory/datatypes/theory_datatypes_utils.h"
#include "theory/quantifiers/sygus/sygus_explain.h"
#include "theory/quantifiers/sygus/term_database_sygus.h"
#include "theory/quantifiers/term_util.h"
-#include "theory/quantifiers_engine.h"
#include "theory/theory_model.h"
using namespace CVC4;
@@ -36,16 +34,19 @@ using namespace CVC4::context;
using namespace CVC4::theory;
using namespace CVC4::theory::datatypes;
-SygusExtension::SygusExtension(TheoryDatatypes* td,
- QuantifiersEngine* qe,
- context::Context* c)
- : d_td(td),
- d_tds(qe->getTermDatabaseSygus()),
- d_ssb(qe),
- d_testers(c),
- d_testers_exp(c),
- d_active_terms(c),
- d_currTermSize(c)
+SygusExtension::SygusExtension(TheoryState& s,
+ InferenceManager& im,
+ quantifiers::TermDbSygus* tds,
+ DecisionManager* dm)
+ : d_state(s),
+ d_im(im),
+ d_tds(tds),
+ d_dm(dm),
+ d_ssb(tds),
+ d_testers(s.getSatContext()),
+ d_testers_exp(s.getSatContext()),
+ d_active_terms(s.getSatContext()),
+ d_currTermSize(s.getSatContext())
{
d_zero = NodeManager::currentNM()->mkConst(Rational(0));
d_true = NodeManager::currentNM()->mkConst(true);
@@ -1303,12 +1304,11 @@ void SygusExtension::registerSizeTerm(Node e, std::vector<Node>& lemmas)
d_anchor_to_ag_strategy[e].reset(
new DecisionStrategySingleton("sygus_enum_active",
ag,
- d_td->getSatContext(),
- d_td->getValuation()));
+ d_state.getSatContext(),
+ d_state.getValuation()));
}
- d_td->getDecisionManager()->registerStrategy(
- DecisionManager::STRAT_DT_SYGUS_ENUM_ACTIVE,
- d_anchor_to_ag_strategy[e].get());
+ d_dm->registerStrategy(DecisionManager::STRAT_DT_SYGUS_ENUM_ACTIVE,
+ d_anchor_to_ag_strategy[e].get());
}
Node m;
if (!ag.isNull())
@@ -1387,10 +1387,10 @@ void SygusExtension::registerMeasureTerm( Node m ) {
if( it==d_szinfo.end() ){
Trace("sygus-sb") << "Sygus : register measure term : " << m << std::endl;
d_szinfo[m].reset(new SygusSizeDecisionStrategy(
- m, d_td->getSatContext(), d_td->getValuation()));
+ m, d_state.getSatContext(), d_state.getValuation()));
// register this as a decision strategy
- d_td->getDecisionManager()->registerStrategy(
- DecisionManager::STRAT_DT_SYGUS_ENUM_SIZE, d_szinfo[m].get());
+ d_dm->registerStrategy(DecisionManager::STRAT_DT_SYGUS_ENUM_SIZE,
+ d_szinfo[m].get());
}
}
@@ -1561,7 +1561,7 @@ void SygusExtension::check( std::vector< Node >& lemmas ) {
Trace("dt-sygus-debug") << "Checking model value of " << prog << "..."
<< std::endl;
Assert(prog.getType().isDatatype());
- Node progv = d_td->getValuation().getModel()->getValue( prog );
+ Node progv = d_state.getValuation().getModel()->getValue(prog);
if (Trace.isOn("dt-sygus"))
{
Trace("dt-sygus") << "* DT model : " << prog << " -> ";
@@ -1578,7 +1578,7 @@ void SygusExtension::check( std::vector< Node >& lemmas ) {
if (options::sygusFair() == options::SygusFairMode::DT_SIZE)
{
Node prog_sz = NodeManager::currentNM()->mkNode( kind::DT_SIZE, prog );
- Node prog_szv = d_td->getValuation().getModel()->getValue( prog_sz );
+ Node prog_szv = d_state.getValuation().getModel()->getValue(prog_sz);
Node progv_sz = NodeManager::currentNM()->mkNode( kind::DT_SIZE, progv );
Trace("sygus-sb") << " Mv[" << prog << "] = " << progv << ", size = " << prog_szv << std::endl;
@@ -1663,7 +1663,7 @@ bool SygusExtension::checkValue(Node n,
if (Trace.isOn("sygus-sb-check-value"))
{
Node prog_sz = nm->mkNode(DT_SIZE, n);
- Node prog_szv = d_td->getValuation().getModel()->getValue( prog_sz );
+ Node prog_szv = d_state.getValuation().getModel()->getValue(prog_sz);
for( int i=0; i<ind; i++ ){
Trace("sygus-sb-check-value") << " ";
}
@@ -1676,11 +1676,11 @@ bool SygusExtension::checkValue(Node n,
// ensure that the expected size bound is met
int cindex = utils::indexOf(vn.getOperator());
Node tst = utils::mkTester(n, cindex, dt);
- bool hastst = d_td->getEqualityEngine()->hasTerm(tst);
+ bool hastst = d_state.getEqualityEngine()->hasTerm(tst);
Node tstrep;
if (hastst)
{
- tstrep = d_td->getEqualityEngine()->getRepresentative(tst);
+ tstrep = d_state.getEqualityEngine()->getRepresentative(tst);
}
if (!hastst || tstrep != d_true)
{
@@ -1789,7 +1789,8 @@ Node SygusExtension::SygusSizeDecisionStrategy::mkLiteral(unsigned s)
int SygusExtension::getGuardStatus( Node g ) {
bool value;
- if( d_td->getValuation().hasSatValue( g, value ) ) {
+ if (d_state.getValuation().hasSatValue(g, value))
+ {
if( value ){
return 1;
}else{
diff --git a/src/theory/datatypes/sygus_extension.h b/src/theory/datatypes/sygus_extension.h
index 349ee07c9..99a0f6170 100644
--- a/src/theory/datatypes/sygus_extension.h
+++ b/src/theory/datatypes/sygus_extension.h
@@ -28,6 +28,7 @@
#include "expr/dtype.h"
#include "expr/node.h"
#include "theory/datatypes/sygus_simple_sym.h"
+#include "theory/decision_manager.h"
#include "theory/quantifiers/sygus/sygus_explain.h"
#include "theory/quantifiers/sygus/synth_conjecture.h"
#include "theory/quantifiers/sygus_sampler.h"
@@ -37,7 +38,7 @@ namespace CVC4 {
namespace theory {
namespace datatypes {
-class TheoryDatatypes;
+class InferenceManager;
/**
* This is the sygus extension of the decision procedure for quantifier-free
@@ -70,9 +71,10 @@ class SygusExtension
typedef context::CDHashSet<Node, NodeHashFunction> NodeSet;
public:
- SygusExtension(TheoryDatatypes* td,
- QuantifiersEngine* qe,
- context::Context* c);
+ SygusExtension(TheoryState& s,
+ InferenceManager& im,
+ quantifiers::TermDbSygus* tds,
+ DecisionManager* dm);
~SygusExtension();
/**
* Notify this class that tester for constructor tindex has been asserted for
@@ -106,10 +108,14 @@ class SygusExtension
*/
void check(std::vector<Node>& lemmas);
private:
- /** Pointer to the datatype theory that owns this class. */
- TheoryDatatypes* d_td;
+ /** The theory state of the datatype theory */
+ TheoryState& d_state;
+ /** The inference manager of the datatype theory */
+ InferenceManager& d_im;
/** Pointer to the sygus term database */
quantifiers::TermDbSygus* d_tds;
+ /** Pointer to the decision manager */
+ DecisionManager* d_dm;
/** the simple symmetry breaking utility */
SygusSimpleSymBreak d_ssb;
/**
diff --git a/src/theory/datatypes/sygus_simple_sym.cpp b/src/theory/datatypes/sygus_simple_sym.cpp
index 04ac14c11..b7ab9b4f7 100644
--- a/src/theory/datatypes/sygus_simple_sym.cpp
+++ b/src/theory/datatypes/sygus_simple_sym.cpp
@@ -15,7 +15,6 @@
#include "theory/datatypes/sygus_simple_sym.h"
#include "theory/quantifiers/term_util.h"
-#include "theory/quantifiers_engine.h"
using namespace std;
using namespace CVC4::kind;
@@ -24,8 +23,8 @@ namespace CVC4 {
namespace theory {
namespace datatypes {
-SygusSimpleSymBreak::SygusSimpleSymBreak(QuantifiersEngine* qe)
- : d_tds(qe->getTermDatabaseSygus())
+SygusSimpleSymBreak::SygusSimpleSymBreak(quantifiers::TermDbSygus* tds)
+ : d_tds(tds)
{
}
diff --git a/src/theory/datatypes/sygus_simple_sym.h b/src/theory/datatypes/sygus_simple_sym.h
index c2940512a..3ca55309b 100644
--- a/src/theory/datatypes/sygus_simple_sym.h
+++ b/src/theory/datatypes/sygus_simple_sym.h
@@ -40,7 +40,7 @@ namespace datatypes {
class SygusSimpleSymBreak
{
public:
- SygusSimpleSymBreak(QuantifiersEngine* qe);
+ SygusSimpleSymBreak(quantifiers::TermDbSygus* tds);
~SygusSimpleSymBreak() {}
/** consider argument kind
*
diff --git a/src/theory/datatypes/theory_datatypes.cpp b/src/theory/datatypes/theory_datatypes.cpp
index 8945fb735..9d3b5a143 100644
--- a/src/theory/datatypes/theory_datatypes.cpp
+++ b/src/theory/datatypes/theory_datatypes.cpp
@@ -107,8 +107,10 @@ void TheoryDatatypes::finishInit()
// this is not done.
if (getQuantifiersEngine() && options::sygus())
{
+ quantifiers::TermDbSygus* tds =
+ getQuantifiersEngine()->getTermDatabaseSygus();
d_sygusExtension.reset(
- new SygusExtension(this, getQuantifiersEngine(), getSatContext()));
+ new SygusExtension(d_state, d_im, tds, getDecisionManager()));
// do congruence on evaluation functions
d_equalityEngine->addFunctionKind(kind::DT_SYGUS_EVAL);
}
@@ -177,7 +179,7 @@ void TheoryDatatypes::postCheck(Effort level)
Assert(d_sygusExtension != nullptr);
std::vector<Node> lemmas;
d_sygusExtension->check(lemmas);
- d_im.sendLemmas(lemmas);
+ d_im.sendLemmas(lemmas, InferenceId::UNKNOWN);
return;
}
else if (level == EFFORT_FULL && !d_state.isInConflict()
@@ -252,7 +254,7 @@ void TheoryDatatypes::postCheck(Effort level)
assumptions.push_back(assumption);
Node lemma = assumptions.size()==1 ? assumptions[0] : NodeManager::currentNM()->mkNode( OR, assumptions );
Trace("dt-singleton") << "*************Singleton equality lemma " << lemma << std::endl;
- d_im.lemma(lemma, InferenceId::UNKNOWN);
+ d_im.lemma(lemma, InferenceId::DATATYPES_REC_SINGLETON_EQ);
}
}
}else{
@@ -307,7 +309,8 @@ void TheoryDatatypes::postCheck(Effort level)
//this may not be necessary?
//if only one constructor, then this term must be this constructor
Node t = utils::mkTester(n, 0, dt);
- d_im.addPendingInference(t, d_true, false, InferenceId::DATATYPES_SPLIT);
+ d_im.addPendingInference(
+ t, InferenceId::DATATYPES_SPLIT, d_true);
Trace("datatypes-infer") << "DtInfer : 1-cons (full) : " << t << std::endl;
}else{
Assert(consIndex != -1 || dt.isSygus());
@@ -318,7 +321,7 @@ void TheoryDatatypes::postCheck(Effort level)
NodeBuilder<> nb(kind::OR);
nb << test << test.notNode();
Node lemma = nb;
- d_im.lemma(lemma, InferenceId::UNKNOWN);
+ d_im.lemma(lemma, InferenceId::DATATYPES_BINARY_SPLIT);
d_out->requirePhase( test, true );
}else{
Trace("dt-split") << "*************Split for constructors on " << n << endl;
@@ -389,7 +392,7 @@ void TheoryDatatypes::notifyFact(TNode atom,
{
std::vector< Node > lemmas;
d_sygusExtension->assertFact(atom, polarity, lemmas);
- d_im.sendLemmas(lemmas);
+ d_im.sendLemmas(lemmas, InferenceId::UNKNOWN);
}
//add to tester if applicable
Node t_arg;
@@ -412,7 +415,7 @@ void TheoryDatatypes::notifyFact(TNode atom,
std::vector< Node > lemmas;
d_sygusExtension->assertTester(tindex, t_arg, atom, lemmas);
Trace("dt-tester") << "Done assert tester to sygus." << std::endl;
- d_im.sendLemmas(lemmas);
+ d_im.sendLemmas(lemmas, InferenceId::UNKNOWN);
}
}
}else{
@@ -472,7 +475,7 @@ void TheoryDatatypes::preRegisterTerm(TNode n)
{
std::vector< Node > lemmas;
d_sygusExtension->preRegisterTerm(n, lemmas);
- d_im.sendLemmas(lemmas);
+ d_im.sendLemmas(lemmas, InferenceId::UNKNOWN);
}
break;
}
@@ -687,7 +690,8 @@ void TheoryDatatypes::merge( Node t1, Node t2 ){
for( int i=0; i<(int)cons1.getNumChildren(); i++ ) {
if( !areEqual( cons1[i], cons2[i] ) ){
Node eq = cons1[i].eqNode( cons2[i] );
- d_im.addPendingInference(eq, unifEq, false, InferenceId::DATATYPES_UNIF);
+ d_im.addPendingInference(
+ eq, InferenceId::DATATYPES_UNIF, unifEq);
Trace("datatypes-infer") << "DtInfer : cons-inj : " << eq << " by " << unifEq << std::endl;
}
}
@@ -849,7 +853,7 @@ Node TheoryDatatypes::getTermSkolemFor( Node n ) {
d_term_sk[n] = k;
Node eq = k.eqNode( n );
Trace("datatypes-infer") << "DtInfer : ref : " << eq << std::endl;
- d_im.addPendingLemma(eq, InferenceId::UNKNOWN);
+ d_im.addPendingLemma(eq, InferenceId::DATATYPES_PURIFY);
return k;
}else{
return (*it).second;
@@ -974,7 +978,7 @@ void TheoryDatatypes::addTester(
: utils::mkTester(t_arg, testerIndex, dt);
Node t_concl_exp = ( nb.getNumChildren() == 1 ) ? nb.getChild( 0 ) : nb;
d_im.addPendingInference(
- t_concl, t_concl_exp, false, InferenceId::DATATYPES_LABEL_EXH);
+ t_concl, InferenceId::DATATYPES_LABEL_EXH, t_concl_exp);
Trace("datatypes-infer") << "DtInfer : label : " << t_concl << " by " << t_concl_exp << std::endl;
return;
}
@@ -1110,7 +1114,8 @@ void TheoryDatatypes::collapseSelector( Node s, Node c ) {
bool forceLemma = !s.getType().isDatatype();
Trace("datatypes-infer") << "DtInfer : collapse sel";
Trace("datatypes-infer") << " : " << eq << " by " << eq_exp << std::endl;
- d_im.addPendingInference(eq, eq_exp, forceLemma, InferenceId::DATATYPES_COLLAPSE_SEL);
+ d_im.addPendingInference(
+ eq, InferenceId::DATATYPES_COLLAPSE_SEL, eq_exp, forceLemma);
}
}
}
@@ -1414,7 +1419,7 @@ Node TheoryDatatypes::getSingletonLemma( TypeNode tn, bool pol ) {
Node v2 = NodeManager::currentNM()->mkSkolem( "k2", tn );
a = v1.eqNode( v2 ).negate();
//send out immediately as lemma
- d_im.lemma(a, InferenceId::UNKNOWN);
+ d_im.lemma(a, InferenceId::DATATYPES_REC_SINGLETON_FORCE_DEQ);
Trace("dt-singleton") << "******** assert " << a << " to avoid singleton cardinality for type " << tn << std::endl;
}
d_singleton_lemma[index][tn] = a;
@@ -1472,7 +1477,7 @@ void TheoryDatatypes::collectTerms( Node n ) {
Node lem = nm->mkNode(LEQ, d_zero, n);
Trace("datatypes-infer")
<< "DtInfer : size geq zero : " << lem << std::endl;
- d_im.addPendingLemma(lem, InferenceId::UNKNOWN);
+ d_im.addPendingLemma(lem, InferenceId::DATATYPES_SIZE_POS);
}
else if (nk == DT_HEIGHT_BOUND && n[1].getConst<Rational>().isZero())
{
@@ -1497,7 +1502,7 @@ void TheoryDatatypes::collectTerms( Node n ) {
: nm->mkNode(OR, children));
}
Trace("datatypes-infer") << "DtInfer : zero height : " << lem << std::endl;
- d_im.addPendingLemma(lem, InferenceId::UNKNOWN);
+ d_im.addPendingLemma(lem, InferenceId::DATATYPES_HEIGHT_ZERO);
}
}
@@ -1564,7 +1569,7 @@ void TheoryDatatypes::instantiate( EqcInfo* eqc, Node n ){
}
Trace("datatypes-infer-debug") << "DtInstantiate : " << eqc << " " << eq
<< " forceLemma = " << forceLemma << std::endl;
- d_im.addPendingInference(eq, exp, forceLemma, InferenceId::DATATYPES_INST);
+ d_im.addPendingInference(eq, InferenceId::DATATYPES_INST, exp, forceLemma);
Trace("datatypes-infer") << "DtInfer : instantiate : " << eq << " by " << exp
<< std::endl;
}
@@ -1650,7 +1655,7 @@ void TheoryDatatypes::checkCycles() {
Trace("dt-cdt") << std::endl;
Node eq = part_out[i][0].eqNode( part_out[i][j] );
Node eqExp = NodeManager::currentNM()->mkAnd(exp);
- d_im.addPendingInference(eq, eqExp, false, InferenceId::DATATYPES_BISIMILAR);
+ d_im.addPendingInference(eq, InferenceId::DATATYPES_BISIMILAR, eqExp);
Trace("datatypes-infer") << "DtInfer : cdt-bisimilar : " << eq << " by " << eqExp << std::endl;
}
}
diff --git a/src/theory/inference_id.cpp b/src/theory/inference_id.cpp
index a561cc1b4..fda694e3d 100644
--- a/src/theory/inference_id.cpp
+++ b/src/theory/inference_id.cpp
@@ -76,16 +76,27 @@ const char* toString(InferenceId i)
case InferenceId::BAG_DIFFERENCE_REMOVE: return "BAG_DIFFERENCE_REMOVE";
case InferenceId::BAG_DUPLICATE_REMOVAL: return "BAG_DUPLICATE_REMOVAL";
- case InferenceId::DATATYPES_UNIF: return "UNIF";
- case InferenceId::DATATYPES_INST: return "INST";
- case InferenceId::DATATYPES_SPLIT: return "SPLIT";
- case InferenceId::DATATYPES_LABEL_EXH: return "LABEL_EXH";
- case InferenceId::DATATYPES_COLLAPSE_SEL: return "COLLAPSE_SEL";
- case InferenceId::DATATYPES_CLASH_CONFLICT: return "CLASH_CONFLICT";
- case InferenceId::DATATYPES_TESTER_CONFLICT: return "TESTER_CONFLICT";
- case InferenceId::DATATYPES_TESTER_MERGE_CONFLICT: return "TESTER_MERGE_CONFLICT";
- case InferenceId::DATATYPES_BISIMILAR: return "BISIMILAR";
- case InferenceId::DATATYPES_CYCLE: return "CYCLE";
+ case InferenceId::DATATYPES_PURIFY: return "DATATYPES_PURIFY";
+ case InferenceId::DATATYPES_UNIF: return "DATATYPES_UNIF";
+ case InferenceId::DATATYPES_INST: return "DATATYPES_INST";
+ case InferenceId::DATATYPES_SPLIT: return "DATATYPES_SPLIT";
+ case InferenceId::DATATYPES_BINARY_SPLIT: return "DATATYPES_BINARY_SPLIT";
+ case InferenceId::DATATYPES_LABEL_EXH: return "DATATYPES_LABEL_EXH";
+ case InferenceId::DATATYPES_COLLAPSE_SEL: return "DATATYPES_COLLAPSE_SEL";
+ case InferenceId::DATATYPES_CLASH_CONFLICT:
+ return "DATATYPES_CLASH_CONFLICT";
+ case InferenceId::DATATYPES_TESTER_CONFLICT:
+ return "DATATYPES_TESTER_CONFLICT";
+ case InferenceId::DATATYPES_TESTER_MERGE_CONFLICT:
+ return "DATATYPES_TESTER_MERGE_CONFLICT";
+ case InferenceId::DATATYPES_BISIMILAR: return "DATATYPES_BISIMILAR";
+ case InferenceId::DATATYPES_REC_SINGLETON_EQ:
+ return "DATATYPES_REC_SINGLETON_EQ";
+ case InferenceId::DATATYPES_REC_SINGLETON_FORCE_DEQ:
+ return "DATATYPES_REC_SINGLETON_FORCE_DEQ";
+ case InferenceId::DATATYPES_CYCLE: return "DATATYPES_CYCLE";
+ case InferenceId::DATATYPES_SIZE_POS: return "DATATYPES_SIZE_POS";
+ case InferenceId::DATATYPES_HEIGHT_ZERO: return "DATATYPES_HEIGHT_ZERO";
case InferenceId::SEP_PTO_NEG_PROP: return "SEP_PTO_NEG_PROP";
case InferenceId::SEP_PTO_PROP: return "SEP_PTO_PROP";
diff --git a/src/theory/inference_id.h b/src/theory/inference_id.h
index 2891d5132..884a7c428 100644
--- a/src/theory/inference_id.h
+++ b/src/theory/inference_id.h
@@ -134,12 +134,16 @@ enum class InferenceId
// ---------------------------------- end bitvector theory
// ---------------------------------- datatypes theory
+ // (= k t) for fresh k
+ DATATYPES_PURIFY,
// (= (C t1 ... tn) (C s1 .. sn)) => (= ti si)
DATATYPES_UNIF,
// ((_ is Ci) t) => (= t (Ci (sel_1 t) ... (sel_n t)))
DATATYPES_INST,
// (or ((_ is C1) t) V ... V ((_ is Cn) t))
DATATYPES_SPLIT,
+ // (or ((_ is Ci) t) V (not ((_ is Ci) t)))
+ DATATYPES_BINARY_SPLIT,
// (not ((_ is C1) t)) ^ ... [j] ... ^ (not ((_ is Cn) t)) => ((_ is Cj) t)
DATATYPES_LABEL_EXH,
// (= t (Ci t1 ... tn)) => (= (sel_j t) rewrite((sel_j (Ci t1 ... tn))))
@@ -152,8 +156,17 @@ enum class InferenceId
DATATYPES_TESTER_MERGE_CONFLICT,
// bisimilarity for codatatypes
DATATYPES_BISIMILAR,
+ // corecursive singleton equality
+ DATATYPES_REC_SINGLETON_EQ,
+ // corecursive singleton equality (not (= k1 k2)) for fresh k1, k2
+ DATATYPES_REC_SINGLETON_FORCE_DEQ,
// cycle conflict for datatypes
DATATYPES_CYCLE,
+ //-------------------- datatypes size/height
+ // (>= (dt.size t) 0)
+ DATATYPES_SIZE_POS,
+ // (=> (= (dt.height t) 0) => (and (= (dt.height (sel_1 t)) 0) .... ))
+ DATATYPES_HEIGHT_ZERO,
// ---------------------------------- end datatypes theory
// ---------------------------------- sep theory
generated by cgit on debian on lair
contact matthew@masot.net with questions or feedback