diff --git a/src/main/scala/leon/purescala/TypeTreeOps.scala b/src/main/scala/leon/purescala/TypeTreeOps.scala index d7533097e4da156889b603e4f69b9e47f6969443..46f8be837713ae0ece9bbf37649168671563654c 100644 --- a/src/main/scala/leon/purescala/TypeTreeOps.scala +++ b/src/main/scala/leon/purescala/TypeTreeOps.scala @@ -12,6 +12,10 @@ import Extractors._ import Constructors._ object TypeTreeOps { + def typeDepth(t: TypeTree): Int = t match { + case NAryType(tps, builder) => 1+tps.foldLeft(0) { case (d, t) => d max typeDepth(t) } + } + def typeParamsOf(t: TypeTree): Set[TypeParameter] = t match { case tp: TypeParameter => Set(tp) case _ => diff --git a/src/main/scala/leon/synthesis/rules/Cegis.scala b/src/main/scala/leon/synthesis/rules/Cegis.scala index 3f2c755485b1e360379043df96aaf7db6f687250..3a744c42c0b04c7a9f899748195c9ee5e84c6843 100644 --- a/src/main/scala/leon/synthesis/rules/Cegis.scala +++ b/src/main/scala/leon/synthesis/rules/Cegis.scala @@ -14,8 +14,9 @@ import utils._ case object CEGIS extends CEGISLike[TypeTree]("CEGIS") { def getParams(sctx: SynthesisContext, p: Problem) = { + import ExpressionGrammars._ CegisParams( - grammar = ExpressionGrammars.default(sctx, p), + grammar = depthBound(default(sctx, p), 2), rootLabel = {(tpe: TypeTree) => tpe } ) } diff --git a/src/main/scala/leon/synthesis/rules/CegisLike.scala b/src/main/scala/leon/synthesis/rules/CegisLike.scala index 622ab658d15b0dbb90cedf2de8c3081b89d5cfc5..1e6cc3a0bcfb89405b3eb86639604959bac68d14 100644 --- a/src/main/scala/leon/synthesis/rules/CegisLike.scala +++ b/src/main/scala/leon/synthesis/rules/CegisLike.scala @@ -4,7 +4,7 @@ package leon package synthesis package rules -import leon.utils.StreamUtils +import leon.utils.SeqUtils import solvers._ import solvers.z3._ @@ -48,6 +48,9 @@ abstract class CEGISLike[T <% Typed](name: String) extends Rule(name) { val exSolverTo = 2000L val cexSolverTo = 2000L + // Track non-deterministic programs up to 50'000 programs, or give up + val nProgramsLimit = 100000; + val sctx = hctx.sctx val ctx = sctx.context @@ -124,29 +127,58 @@ abstract class CEGISLike[T <% Typed](name: String) extends Rule(name) { */ def isBActive(b: Identifier) = !closedBs.contains(b) + + def allProgramsCount(): Int = { + var nAltsCache = Map[Identifier, Int]() + + def nAltsFor(c: Identifier): Int = { + if (!(nAltsCache contains c)) { + val subs = for ((b, _, subcs) <- cTree(c) if isBActive(b)) yield { + if (subcs.isEmpty) { + 1 + } else { + subcs.toSeq.map(nAltsFor).product + } + } + + nAltsCache += c -> subs.sum + } + nAltsCache(c) + } + + p.xs.map(nAltsFor).product + } + /** * Returns all possible assignments to Bs in order to enumerate all possible programs */ def allPrograms(): Traversable[Set[Identifier]] = { - import StreamUtils._ + import SeqUtils._ - def allProgramsFor(cs: Set[Identifier]): Stream[Set[Identifier]] = { - val streams = for (c <- cs.toSeq) yield { - val subs = for ((b, _, subcs) <- cTree(c) if isBActive(b)) yield { + if (allProgramsCount() > nProgramsLimit) { + return Seq() + } - if (subcs.isEmpty) { - Seq(Set(b)) - } else { - for (p <- allProgramsFor(subcs)) yield { - p + b + var cache = Map[Identifier, Seq[Set[Identifier]]]() + + def allProgramsFor(cs: Set[Identifier]): Seq[Set[Identifier]] = { + val seqs = for (c <- cs.toSeq) yield { + if (!(cache contains c)) { + val subs = for ((b, _, subcs) <- cTree(c) if isBActive(b)) yield { + if (subcs.isEmpty) { + Seq(Set(b)) + } else { + for (p <- allProgramsFor(subcs)) yield { + p + b + } } } + cache += c -> subs.flatten } - - subs.flatten.toStream + cache(c) } - StreamUtils.cartesianProduct(streams).map { ls => + SeqUtils.cartesianProduct(seqs).map { ls => ls.foldLeft(Set[Identifier]())(_ ++ _) } } diff --git a/src/main/scala/leon/synthesis/utils/ExpressionGrammar.scala b/src/main/scala/leon/synthesis/utils/ExpressionGrammar.scala index 8b3ff4d55b5e217deed7251c34c5f0131846cc8d..e9d79114e9da47b37d1ce0edd0f6263c7ae072f7 100644 --- a/src/main/scala/leon/synthesis/utils/ExpressionGrammar.scala +++ b/src/main/scala/leon/synthesis/utils/ExpressionGrammar.scala @@ -34,6 +34,13 @@ abstract class ExpressionGrammar[T <% Typed] { def computeProductions(t: T): Seq[Gen] + def filter(f: Gen => Boolean) = { + val that = this; + new ExpressionGrammar[T] { + def computeProductions(t: T) = that.computeProductions(t).filter(f) + } + } + final def ||(that: ExpressionGrammar[T]): ExpressionGrammar[T] = { ExpressionGrammars.Or(Seq(this, that)) } @@ -186,6 +193,7 @@ object ExpressionGrammars { val normalGrammar = BoundedGrammar(EmbeddedGrammar( BaseGrammar || + OneOf(terminals.toSeq) || FunctionCalls(sctx.program, sctx.functionContext, p.as.map(_.getType), excludeFCalls) || SafeRecCalls(sctx.program, p.ws, p.pc), { (t: TypeTree) => Label(t, "B", None)}, @@ -430,4 +438,8 @@ object ExpressionGrammars { def default(sctx: SynthesisContext, p: Problem): ExpressionGrammar[TypeTree] = { default(sctx.program, p.as.map(_.toVariable), sctx.functionContext, sctx.settings.functionsToIgnore, p.ws, p.pc) } + + def depthBound[T <% Typed](g: ExpressionGrammar[T], b: Int) = { + g.filter(g => g.subTrees.forall(t => typeDepth(t.getType) <= b)) + } } diff --git a/src/main/scala/leon/utils/SeqUtils.scala b/src/main/scala/leon/utils/SeqUtils.scala new file mode 100644 index 0000000000000000000000000000000000000000..c3b17b82b15277366acd2ff62a11e0895350af09 --- /dev/null +++ b/src/main/scala/leon/utils/SeqUtils.scala @@ -0,0 +1,33 @@ +/* Copyright 2009-2014 EPFL, Lausanne */ +package leon.utils + +import scala.collection.mutable.ArrayBuffer + +object SeqUtils { + type Tuple[T] = Seq[T] + + + def cartesianProduct[T](seqs: Tuple[Seq[T]]): Seq[Tuple[T]] = { + val sizes = seqs.map(_.size) + val max = sizes.product + + val result = new ArrayBuffer[Tuple[T]](max) + var i = 0; + + while (i < max) { + var c = i; + var sel = -1; + val elem = for (s <- sizes) yield { + val index = c % s; + c = c / s + sel += 1 + seqs(sel)(index) + } + + i+=1 + result += elem + } + + result + } +}