summaryrefslogtreecommitdiff
path: root/src/theory/quantifiers/ce_guided_instantiation.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/theory/quantifiers/ce_guided_instantiation.cpp')
-rw-r--r--src/theory/quantifiers/ce_guided_instantiation.cpp128
1 files changed, 100 insertions, 28 deletions
diff --git a/src/theory/quantifiers/ce_guided_instantiation.cpp b/src/theory/quantifiers/ce_guided_instantiation.cpp
index ad900ee82..cae3e730e 100644
--- a/src/theory/quantifiers/ce_guided_instantiation.cpp
+++ b/src/theory/quantifiers/ce_guided_instantiation.cpp
@@ -131,14 +131,18 @@ Node CegConjecture::getLiteral( QuantifiersEngine * qe, int i ) {
qe->getOutputChannel().lemma( lem );
qe->getOutputChannel().requirePhase( lit, true );
- if( getCegqiFairMode()==CEGQI_FAIR_DT_HEIGHT_PRED ){
+ if( getCegqiFairMode()==CEGQI_FAIR_DT_HEIGHT_PRED || getCegqiFairMode()==CEGQI_FAIR_DT_SIZE_PRED ){
//implies height bounds on each candidate variable
std::vector< Node > lem_c;
for( unsigned j=0; j<d_candidates.size(); j++ ){
- lem_c.push_back( NodeManager::currentNM()->mkNode( DT_HEIGHT_BOUND, d_candidates[j], c ) );
+ if( getCegqiFairMode()==CEGQI_FAIR_DT_HEIGHT_PRED ){
+ lem_c.push_back( NodeManager::currentNM()->mkNode( DT_HEIGHT_BOUND, d_candidates[j], c ) );
+ }else{
+ //lem_c.push_back( NodeManager::currentNM()->mkNode( DT_SIZE_BOUND, d_candidates[j], c ) );
+ }
}
Node hlem = NodeManager::currentNM()->mkNode( OR, lit.negate(), lem_c.size()==1 ? lem_c[0] : NodeManager::currentNM()->mkNode( AND, lem_c ) );
- Trace("cegqi-lemma") << "Cegqi::Lemma : Fairness expansion (dt-height-pred) : " << hlem << std::endl;
+ Trace("cegqi-lemma") << "Cegqi::Lemma : Fairness expansion (pred) : " << hlem << std::endl;
qe->getOutputChannel().lemma( hlem );
}
return lit;
@@ -258,8 +262,28 @@ void CegInstantiation::check( Theory::Effort e, unsigned quant_e ) {
}
}
+void CegInstantiation::preRegisterQuantifier( Node q ) {
+ if( options::sygusDirectEval() ){
+ if( q.getNumChildren()==3 && q[2].getKind()==INST_PATTERN_LIST && q[2][0].getKind()==INST_PATTERN ){
+ //check whether it is an evaluation axiom
+ Node pat = q[2][0][0];
+ if( pat.getKind()==APPLY_UF ){
+ TypeNode tn = pat[0].getType();
+ if( datatypes::DatatypesRewriter::isTypeDatatype(tn) ){
+ const Datatype& dt = ((DatatypeType)(tn).toType()).getDatatype();
+ if( dt.isSygus() ){
+ //take ownership of this quantified formula (will use direct evaluation instead of instantiation)
+ d_quantEngine->setOwner( q, this );
+ d_eval_axioms[q] = true;
+ }
+ }
+ }
+ }
+ }
+}
+
void CegInstantiation::registerQuantifier( Node q ) {
- if( d_quantEngine->getOwner( q )==this ){
+ if( d_quantEngine->getOwner( q )==this && d_eval_axioms.find( q )==d_eval_axioms.end() ){
if( !d_conj->isAssigned() ){
Trace("cegqi") << "Register conjecture : " << q << std::endl;
d_conj->assign( q );
@@ -279,7 +303,7 @@ void CegInstantiation::registerQuantifier( Node q ) {
if( it!=d_uf_measure.end() ){
mc.push_back( NodeManager::currentNM()->mkNode( APPLY_UF, it->second, d_conj->d_candidates[j] ) );
}
- }else if( d_conj->getCegqiFairMode()==CEGQI_FAIR_DT_HEIGHT_PRED ){
+ }else if( d_conj->getCegqiFairMode()==CEGQI_FAIR_DT_HEIGHT_PRED || d_conj->getCegqiFairMode()==CEGQI_FAIR_DT_SIZE_PRED ){
//measure term is a fresh constant
mc.push_back( NodeManager::currentNM()->mkSkolem( "K", NodeManager::currentNM()->integerType() ) );
}
@@ -353,6 +377,11 @@ void CegInstantiation::checkCegConjecture( CegConjecture * conj ) {
Trace("cegqi-engine-debug") << conj->d_candidates[i] << " ";
}
Trace("cegqi-engine-debug") << std::endl;
+ Trace("cegqi-engine-debug") << " * Candidate ce skolems : ";
+ for( unsigned i=0; i<conj->d_ce_sk.size(); i++ ){
+ Trace("cegqi-engine-debug") << conj->d_ce_sk[i] << " ";
+ }
+ Trace("cegqi-engine-debug") << std::endl;
if( conj->getCegqiFairMode()!=CEGQI_FAIR_NONE ){
Trace("cegqi-engine") << " * Current term size : " << conj->d_curr_lit.get() << std::endl;
}
@@ -377,6 +406,19 @@ void CegInstantiation::checkCegConjecture( CegConjecture * conj ) {
}
std::vector< Node > model_values;
if( getModelValues( conj, conj->d_candidates, model_values ) ){
+ if( options::sygusDirectEval() ){
+ std::vector< Node > eager_eval_lem;
+ for( unsigned j=0; j<conj->d_candidates.size(); j++ ){
+ d_quantEngine->getTermDatabaseSygus()->registerModelValue( conj->d_candidates[j], model_values[j], eager_eval_lem );
+ }
+ if( !eager_eval_lem.empty() ){
+ for( unsigned j=0; j<eager_eval_lem.size(); j++ ){
+ Trace("cegqi-lemma") << "Cegqi::Lemma : evaluation : " << eager_eval_lem[j] << std::endl;
+ d_quantEngine->addLemma( eager_eval_lem[j] );
+ }
+ return;
+ }
+ }
//check if we must apply fairness lemmas
if( conj->getCegqiFairMode()==CEGQI_FAIR_UF_DT_SIZE ){
std::vector< Node > lems;
@@ -425,18 +467,28 @@ void CegInstantiation::checkCegConjecture( CegConjecture * conj ) {
Node lem = NodeManager::currentNM()->mkNode( OR, ic );
lem = Rewriter::rewrite( lem );
d_last_inst_si = false;
+ //eagerly unfold applications of evaluation function
+ if( options::sygusDirectEval() ){
+ Trace("cegqi-eager") << "pre-unfold counterexample : " << lem << std::endl;
+ std::map< Node, Node > visited_n;
+ lem = getEagerUnfold( lem, visited_n );
+ }
+
Trace("cegqi-lemma") << "Cegqi::Lemma : counterexample : " << lem << std::endl;
- d_quantEngine->addLemma( lem );
- ++(d_statistics.d_cegqi_lemmas_ce);
- Trace("cegqi-engine") << " ...find counterexample." << std::endl;
- //optimization : eagerly unfold applications of evaluation function
- if( options::sygusEagerUnfold() ){
- std::vector< Node > eager_lems;
- std::map< Node, bool > visited;
- getEagerUnfoldLemmas( eager_lems, lem, visited );
- for( unsigned i=0; i<eager_lems.size(); i++ ){
- Trace("cegqi-lemma") << "Cegqi::Lemma : eager unfold : " << eager_lems[i] << std::endl;
- d_quantEngine->addLemma( eager_lems[i] );
+ if( d_quantEngine->addLemma( lem ) ){
+ ++(d_statistics.d_cegqi_lemmas_ce);
+ Trace("cegqi-engine") << " ...find counterexample." << std::endl;
+ }else{
+ //this may happen if we eagerly unfold, simplify to true
+ if( !options::sygusDirectEval() ){
+ Trace("cegqi-engine") << " ...FAILED to add candidate!" << std::endl;
+ }else{
+ Trace("cegqi-engine-debug") << " ...FAILED to add candidate!" << std::endl;
+ }
+ if( conj->d_refine_count==0 ){
+ //immediately go to refine candidate
+ checkCegConjecture( conj );
+ return;
}
}
}
@@ -602,16 +654,17 @@ void CegInstantiation::getMeasureLemmas( Node n, Node v, std::vector< Node >& le
}
}
-void CegInstantiation::getEagerUnfoldLemmas( std::vector< Node >& lems, Node n, std::map< Node, bool >& visited ) {
- if( visited.find( n )==visited.end() ){
- Trace("cegqi-eager-debug") << "getEagerUnfoldLemmas " << n << std::endl;
- visited[n] = true;
+Node CegInstantiation::getEagerUnfold( Node n, std::map< Node, Node >& visited ) {
+ std::map< Node, Node >::iterator itv = visited.find( n );
+ if( itv==visited.end() ){
+ Trace("cegqi-eager-debug") << "getEagerUnfold " << n << std::endl;
+ Node ret;
if( n.getKind()==APPLY_UF ){
TypeNode tn = n[0].getType();
Trace("cegqi-eager-debug") << "check " << n[0].getType() << std::endl;
if( datatypes::DatatypesRewriter::isTypeDatatype(tn) ){
const Datatype& dt = ((DatatypeType)(tn).toType()).getDatatype();
- if( dt.isSygus() ){
+ if( dt.isSygus() ){
Trace("cegqi-eager") << "Unfold eager : " << n << std::endl;
Node bTerm = d_quantEngine->getTermDatabaseSygus()->sygusToBuiltin( n[0], tn );
Trace("cegqi-eager") << "Built-in term : " << bTerm << std::endl;
@@ -623,12 +676,13 @@ void CegInstantiation::getEagerUnfoldLemmas( std::vector< Node >& lems, Node n,
vars.push_back( var_list[j] );
}
for( unsigned j=1; j<n.getNumChildren(); j++ ){
+ Node nc = getEagerUnfold( n[j], visited );
if( var_list[j-1].getType().isBoolean() ){
//TODO: remove this case when boolean term conversion is eliminated
Node c = NodeManager::currentNM()->mkConst(BitVector(1u, 1u));
- subs.push_back( n[j].eqNode( c ) );
+ subs.push_back( nc.eqNode( c ) );
}else{
- subs.push_back( n[j] );
+ subs.push_back( nc );
}
Assert( subs[j-1].getType()==var_list[j-1].getType() );
}
@@ -636,16 +690,34 @@ void CegInstantiation::getEagerUnfoldLemmas( std::vector< Node >& lems, Node n,
Trace("cegqi-eager") << "Built-in term after subs : " << bTerm << std::endl;
Trace("cegqi-eager-debug") << "Types : " << bTerm.getType() << " " << n.getType() << std::endl;
Assert( n.getType()==bTerm.getType() );
- Node lem = Rewriter::rewrite( NodeManager::currentNM()->mkNode( n.getType().isBoolean() ? IFF : EQUAL, n, bTerm ) );
- lems.push_back( lem );
+ ret = bTerm;
}
}
}
- if( n.getKind()!=FORALL ){
- for( unsigned i=0; i<n.getNumChildren(); i++ ){
- getEagerUnfoldLemmas( lems, n[i], visited );
+ if( ret.isNull() ){
+ if( n.getKind()!=FORALL ){
+ bool childChanged = false;
+ std::vector< Node > children;
+ for( unsigned i=0; i<n.getNumChildren(); i++ ){
+ Node nc = getEagerUnfold( n[i], visited );
+ childChanged = childChanged || n[i]!=nc;
+ children.push_back( nc );
+ }
+ if( childChanged ){
+ if( n.getMetaKind() == kind::metakind::PARAMETERIZED ){
+ children.insert( children.begin(), n.getOperator() );
+ }
+ ret = NodeManager::currentNM()->mkNode( n.getKind(), children );
+ }
+ }
+ if( ret.isNull() ){
+ ret = n;
}
}
+ visited[n] = ret;
+ return ret;
+ }else{
+ return itv->second;
}
}
generated by cgit on debian on lair
contact matthew@masot.net with questions or feedback