diff --git a/src/main/scala/inox/Program.scala b/src/main/scala/inox/Program.scala index 15fc7e6d7e5b072cc989ee418fa79e3920ec0386..549c4b22baaa3ecc19064aaf3755c13a1dad0682 100644 --- a/src/main/scala/inox/Program.scala +++ b/src/main/scala/inox/Program.scala @@ -17,4 +17,11 @@ trait Program { val symbols = Program.this.symbols.transform(t) val ctx = Program.this.ctx } + + def extend(functions: Seq[FunDef] = Seq.empty, classes: Seq[ClassDef] = Seq.empty): + Program { val trees: Program.this.trees.type } = new Program { + val trees: Program.this.trees.type = Program.this.trees + val symbols = Program.this.symbols.extend(functions, classes) + val ctx = Program.this.ctx + } } diff --git a/src/main/scala/inox/ast/Definitions.scala b/src/main/scala/inox/ast/Definitions.scala index 5de66e6fb40fe2a597b2426996897f60fa537b36..62513626618569de8e53d47e9c47372daa91c389 100644 --- a/src/main/scala/inox/ast/Definitions.scala +++ b/src/main/scala/inox/ast/Definitions.scala @@ -128,6 +128,7 @@ trait Definitions { self: Trees => } def transform(t: TreeTransformer): Symbols + def extend(functions: Seq[FunDef] = Seq.empty, classes: Seq[ClassDef] = Seq.empty): Symbols } case class TypeParameterDef(tp: TypeParameter) extends Definition { diff --git a/src/main/scala/inox/datagen/DataGenerator.scala b/src/main/scala/inox/datagen/DataGenerator.scala index ea6aac313f357fd6c04577d8a00261e4dfa0fe5c..1b5583fdfe2d65b5e6b3dd9dbb16bc4c6947fbc2 100644 --- a/src/main/scala/inox/datagen/DataGenerator.scala +++ b/src/main/scala/inox/datagen/DataGenerator.scala @@ -1,18 +1,21 @@ /* Copyright 2009-2016 EPFL, Lausanne */ -package leon +package inox package datagen -import purescala.Expressions._ -import purescala.Common._ import utils._ import java.util.concurrent.atomic.AtomicBoolean +object DebugSectionDataGen extends DebugSection("datagen") + trait DataGenerator extends Interruptible { + val program: Program + import program.trees._ + implicit val debugSection = DebugSectionDataGen - def generateFor(ins: Seq[Identifier], satisfying: Expr, maxValid: Int, maxEnumerated: Int): Iterator[Seq[Expr]] + def generateFor(ins: Seq[Variable], satisfying: Expr, maxValid: Int, maxEnumerated: Int): Iterator[Seq[Expr]] protected val interrupted: AtomicBoolean = new AtomicBoolean(false) diff --git a/src/main/scala/inox/datagen/GrammarDataGen.scala b/src/main/scala/inox/datagen/GrammarDataGen.scala index ffe7b8b4741d7323a99bdfc2741722fb356b5394..49341f8578bfb75829e5fb154eeb9c19104f137c 100644 --- a/src/main/scala/inox/datagen/GrammarDataGen.scala +++ b/src/main/scala/inox/datagen/GrammarDataGen.scala @@ -1,26 +1,21 @@ /* Copyright 2009-2016 EPFL, Lausanne */ -package leon +package inox package datagen -import purescala.Expressions._ -import purescala.Types._ -import purescala.Common._ -import purescala.Constructors._ -import purescala.Extractors._ -import purescala.ExprOps._ import evaluators._ import bonsai.enumerators._ -import grammars._ import utils.UniqueCounter import utils.SeqUtils.cartesianProduct +import grammars._ /** Utility functions to generate values of a given type. * In fact, it could be used to generate *terms* of a given type, * e.g. by passing trees representing variables for the "bounds". */ -class GrammarDataGen(evaluator: Evaluator, grammar: ExpressionGrammar = ValueGrammar) extends DataGenerator { - implicit val ctx = evaluator.context +trait GrammarDataGen extends DataGenerator { + val evaluator: DeterministicEvaluator { val program: GrammarDataGen.program.type } + val grammar: ExpressionGrammar = ValueGrammar // Assume e contains generic values with index 0. // Return a series of expressions with all normalized combinations of generic values. @@ -50,15 +45,14 @@ class GrammarDataGen(evaluator: Evaluator, grammar: ExpressionGrammar = ValueGra } substitutions map (replace(_, withUniqueCounters)) - } - def generate(tpe: TypeTree): Iterator[Expr] = { + def generate(tpe: Type): Iterator[Expr] = { val enum = new MemoizedEnumerator[Label, Expr, ProductionRule[Label, Expr]](grammar.getProductions) enum.iterator(Label(tpe)).flatMap(expandGenerics).takeWhile(_ => !interrupted.get) } - def generateFor(ins: Seq[Identifier], satisfying: Expr, maxValid: Int, maxEnumerated: Int): Iterator[Seq[Expr]] = { + def generateFor(ins: Seq[Variable], satisfying: Expr, maxValid: Int, maxEnumerated: Int): Iterator[Seq[Expr]] = { def filterCond(vs: Seq[Expr]): Boolean = satisfying match { case BooleanLiteral(true) => @@ -87,7 +81,7 @@ class GrammarDataGen(evaluator: Evaluator, grammar: ExpressionGrammar = ValueGra } } - def generateMapping(ins: Seq[Identifier], satisfying: Expr, maxValid: Int, maxEnumerated: Int) = { + def generateMapping(ins: Seq[Variable], satisfying: Expr, maxValid: Int, maxEnumerated: Int) = { generateFor(ins, satisfying, maxValid, maxEnumerated) map (ins zip _) } diff --git a/src/main/scala/inox/utils/ModelEnumerator.scala b/src/main/scala/inox/datagen/ModelEnumerator.scala similarity index 90% rename from src/main/scala/inox/utils/ModelEnumerator.scala rename to src/main/scala/inox/datagen/ModelEnumerator.scala index f1cf05af188cc060cca568ec8cadb23dbec0a069..3d433936dcb8ca51af970180a58f1a7d74901b42 100644 --- a/src/main/scala/inox/utils/ModelEnumerator.scala +++ b/src/main/scala/inox/datagen/ModelEnumerator.scala @@ -1,25 +1,22 @@ /* Copyright 2009-2016 EPFL, Lausanne */ -package leon -package utils - -import purescala.Definitions._ -import purescala.Common._ -import purescala.Expressions._ -import purescala.Constructors._ -import purescala.ExprOps._ -import purescala.Types._ +package inox +package datagen + import evaluators._ import solvers._ -class ModelEnumerator(sf: SolverFactory[Solver]) { - val program = sf.program +class ModelEnumerator(factory: SolverFactory[Solver]) { + val program: factory.program.type = factory.program + import program._ import program.trees._ + import program.symbols._ - protected val evaluator = new DefaultEvaluator(ctx, pgm) + protected val evaluator: RecursiveEvaluator { val program: ModelEnumerator.this.program.type } = + RecursiveEvaluator(program) - def enumSimple(ids: Seq[Identifier], satisfying: Expr): FreeableIterator[Map[Identifier, Expr]] = { - enumVarying0(ids, satisfying, None, -1) + def enumSimple(vs: Seq[Variable], satisfying: Expr): FreeableIterator[Map[Variable, Expr]] = { + enumVarying0(vs, satisfying, None, -1) } /** @@ -34,7 +31,7 @@ class ModelEnumerator(sf: SolverFactory[Solver]) { } private[this] def enumVarying0(ids: Seq[Identifier], satisfying: Expr, measure: Option[Expr], nPerMeasure: Int = 1): FreeableIterator[Map[Identifier, Expr]] = { - val s = sf.getNewSolver + val s = factory.getNewSolver s.assertCnstr(satisfying) @@ -82,7 +79,7 @@ class ModelEnumerator(sf: SolverFactory[Solver]) { } def free() { - sf.reclaim(s) + factory.reclaim(s) } } } diff --git a/src/main/scala/inox/datagen/NaiveDataGen.scala b/src/main/scala/inox/datagen/NaiveDataGen.scala deleted file mode 100644 index e40359e686e76030fb29171fe18c6eab82482898..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/datagen/NaiveDataGen.scala +++ /dev/null @@ -1,104 +0,0 @@ -/* Copyright 2009-2016 EPFL, Lausanne */ - -package leon -package datagen - -import purescala.Common._ -import purescala.Expressions._ -import purescala.Types._ -import purescala.Definitions._ -import purescala.Quantification._ -import utils.StreamUtils._ - -import evaluators._ - -import scala.collection.mutable.{Map=>MutableMap} - -/** Utility functions to generate values of a given type. - * In fact, it could be used to generate *terms* of a given type, - * e.g. by passing trees representing variables for the "bounds". */ -@deprecated("Stream-based datagen is deprecated, use GrammarDataGen with ValueGrammar instead", "3.0") -class NaiveDataGen(ctx: LeonContext, p: Program, evaluator: Evaluator, _bounds : Option[Map[TypeTree,Seq[Expr]]] = None) extends DataGenerator { - - val bounds = _bounds.getOrElse(Map()) - - private def tpStream(tp: TypeParameter, i: Int = 1): Stream[Expr] = Stream.cons(GenericValue(tp, i), tpStream(tp, i+1)) - - private val streamCache : MutableMap[TypeTree,Stream[Expr]] = MutableMap.empty - def generate(tpe : TypeTree) : Stream[Expr] = { - try { - streamCache.getOrElse(tpe, { - val s = generate0(tpe) - streamCache(tpe) = s - s - }) - } catch { - case so: StackOverflowError => - Stream.empty - } - } - - private def generate0(tpe: TypeTree): Stream[Expr] = bounds.get(tpe).map(_.toStream).getOrElse { - tpe match { - case BooleanType => - BooleanLiteral(true) #:: BooleanLiteral(false) #:: Stream.empty - - case Int32Type => - IntLiteral(0) #:: IntLiteral(1) #:: IntLiteral(2) #:: IntLiteral(-1) #:: Stream.empty - - case tp: TypeParameter => - tpStream(tp) - - case TupleType(bses) => - cartesianProduct(bses.map(generate)).map(Tuple) - - case act : AbstractClassType => - // We prioritize base cases among the children. - // Otherwise we run the risk of infinite recursion when - // generating lists. - val ccChildren = act.knownCCDescendants - - val (leafs,conss) = ccChildren.partition(_.fields.isEmpty) - - // FIXME: Will not work for mutually recursive types - val sortedConss = conss sortBy { _.fields.count{ _.getType.isInstanceOf[ClassType]}} - - // The stream for leafs... - val leafsStream = leafs.toStream.flatMap(generate) - - // ...to which we append the streams for constructors. - leafsStream.append(interleave(sortedConss.map(generate))) - - case cct : CaseClassType => - if(cct.fields.isEmpty) { - Stream.cons(CaseClass(cct, Nil), Stream.empty) - } else { - val fieldTypes = cct.fieldsTypes - cartesianProduct(fieldTypes.map(generate)).map(CaseClass(cct, _)) - } - - case _ => Stream.empty - } - } - - def generateFor(ins: Seq[Identifier], satisfying: Expr, maxValid : Int, maxEnumerated : Int) : Iterator[Seq[Expr]] = { - val filtering = if (satisfying == BooleanLiteral(true)) { - { (e: Seq[Expr]) => true } - } else { - evaluator.compile(satisfying, ins).map { evalFun => - val sat = EvaluationResults.Successful(BooleanLiteral(true)) - - { (e: Seq[Expr]) => evalFun(new solvers.Model((ins zip e).toMap)) == sat } - } getOrElse { - { (e: Seq[Expr]) => false } - } - } - - cartesianProduct(ins.map(id => generate(id.getType))) - .take(maxEnumerated) - .takeWhile(s => !interrupted.get) - .filter{filtering} - .take(maxValid) - .iterator - } -} diff --git a/src/main/scala/inox/datagen/SolverDataGen.scala b/src/main/scala/inox/datagen/SolverDataGen.scala index 37dcfba061b7ef9fd71bb2d31e57754950eed5d5..f3eff883b9194ee0183a75f115474ae46feba96a 100644 --- a/src/main/scala/inox/datagen/SolverDataGen.scala +++ b/src/main/scala/inox/datagen/SolverDataGen.scala @@ -1,78 +1,79 @@ /* Copyright 2009-2016 EPFL, Lausanne */ -package leon +package inox package datagen -import purescala.Expressions._ -import purescala.Types._ -import purescala.Definitions._ -import purescala.Common._ -import purescala.Constructors._ import solvers._ import utils._ -class SolverDataGen(ctx: LeonContext, pgm: Program, sf: SolverFactory[Solver]) extends DataGenerator { - implicit val ctx0 = ctx +trait SolverDataGen extends DataGenerator { + import program._ + import program.trees._ + import program.symbols._ - def generate(tpe: TypeTree): FreeableIterator[Expr] = { - generateFor(Seq(FreshIdentifier("tmp", tpe)), BooleanLiteral(true), 20, 20).map(_.head).takeWhile(_ => !interrupted.get) + def factory(p: Program): SolverFactory { val program: p.type } + + def generate(tpe: Type): FreeableIterator[Expr] = { + generateFor(Seq(Variable(FreshIdentifier("tmp"), tpe)), + BooleanLiteral(true), 20, 20).map(_.head).takeWhile(_ => !interrupted.get) } - def generateFor(ins: Seq[Identifier], satisfying: Expr, maxValid: Int, maxEnumerated: Int): FreeableIterator[Seq[Expr]] = { + def generateFor(ins: Seq[Variable], satisfying: Expr, maxValid: Int, maxEnumerated: Int): FreeableIterator[Seq[Expr]] = { if (ins.isEmpty) { FreeableIterator.empty } else { - var fds = Map[ClassDef, FunDef]() - - def sizeFor(of: Expr): Expr = of.getType match { - case AbstractClassType(acd, tps) => - val fd = fds.getOrElse(acd, { - val actDef = AbstractClassType(acd, acd.tparams.map(_.tp)) - - val e = FreshIdentifier("e", actDef) - - val fd: FunDef = new FunDef(FreshIdentifier("sizeOf", Untyped), acd.tparams, Seq(ValDef(e)), IntegerType) - - fds += acd -> fd - - - fd.body = Some(MatchExpr(e.toVariable, - actDef.knownCCDescendants.map { cct => - val fields = cct.fieldsTypes.map ( t => FreshIdentifier("field", t)) + var cdToId: Map[ClassDef, Identifier] = Map.empty + var fds: Seq[FunDef] = Seq.empty + + def sizeFor(of: Expr): Expr = bestRealType(of.getType) match { + case ct: ClassType => + val tcd = ct.tcd + val root = tcd.cd.root + val id = cdToId.getOrElse(root, { + val id = FreshIdentifier("sizeOf", true) + val tparams = root.tparams.map(_.freshen) + cdToId += root -> id + + def typed(ccd: CaseClassDef) = TypedCaseClassDef(ccd, tparams.map(_.tp)) + def sizeOfCaseClass(ccd: CaseClassDef, expr: Expr): Expr = + typed(ccd).fields.foldLeft(IntegerLiteral(1): Expr) { (i, f) => + plus(i, sizeFor(expr.getField(f.id))) + } - val rhs = fields.foldLeft(InfiniteIntegerLiteral(1): Expr) { (i, f) => - plus(i, sizeFor(f.toVariable)) + import dsl._ + val x = Variable(FreshIdentifier("x", true), tcd.root.toType) + fds +:= new FunDef(id, tparams, Seq(x.toVal), IntegerType, Some(root match { + case acd: AbstractClassDef => + val (child +: rest) = acd.descendants + def sizeOf(ccd: CaseClassDef) = sizeOfCaseClass(ccd, x.asInstOf(typed(ccd).toType)) + rest.foldLeft(sizeOf(child)) { (elze, ccd) => + if_ (x.isInstOf(typed(ccd).toType)) { sizeOf(ccd) } else_ { elze } } - MatchCase(CaseClassPattern(None, cct, fields.map(f => WildcardPattern(Some(f)))), None, rhs) - } - )) - - fd + case ccd: CaseClassDef => + sizeOfCaseClass(ccd, x) + }), Set.empty) }) - FunctionInvocation(fd.typed(tps), Seq(of)) + FunctionInvocation(id, ct.tps, Seq(of)) case tt @ TupleType(tps) => val exprs = for ((t,i) <- tps.zipWithIndex) yield { sizeFor(tupleSelect(of, i+1, tps.size)) } - exprs.foldLeft(InfiniteIntegerLiteral(1): Expr)(plus) + exprs.foldLeft(IntegerLiteral(1): Expr)(plus) case _ => - InfiniteIntegerLiteral(1) + IntegerLiteral(1) } - val sizeOf = sizeFor(tupleWrap(ins.map(_.toVariable))) + val sizeOf = sizeFor(tupleWrap(ins)) // We need to synthesize a size function for ins' types. - val pgm1 = Program(pgm.units :+ UnitDef(FreshIdentifier("new"), List( - ModuleDef(FreshIdentifier("new"), fds.values.toSeq, false) - ))) - - val modelEnum = new ModelEnumerator(ctx, pgm1, sf) + val pgm1 = program.extend(functions = fds) + val modelEnum = new ModelEnumerator(factory(pgm1)) val enum = modelEnum.enumVarying(ins, satisfying, sizeOf, 5) diff --git a/src/main/scala/inox/datagen/VanuatooDataGen.scala b/src/main/scala/inox/datagen/VanuatooDataGen.scala deleted file mode 100644 index ccba55af919489697d9d9e6a3aa59768d29dbab2..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/datagen/VanuatooDataGen.scala +++ /dev/null @@ -1,409 +0,0 @@ -/* Copyright 2009-2016 EPFL, Lausanne */ - -package leon -package datagen - -import purescala.Common._ -import purescala.Definitions._ -import purescala.ExprOps._ -import purescala.Expressions._ -import purescala.Types._ -import purescala.Extractors._ -import purescala.Constructors._ - -import codegen.CompilationUnit -import codegen.CodeGenParams -import codegen.runtime.StdMonitor -import vanuatoo.{Pattern => VPattern, _} - -import evaluators._ - -class VanuatooDataGen(ctx: LeonContext, p: Program, bank: EvaluationBank = new EvaluationBank) extends DataGenerator { - val unit = new CompilationUnit(ctx, p, bank, CodeGenParams.default.copy(doInstrument = true)) - - val ints = (for (i <- Set(0, 1, 2, 3)) yield { - i -> Constructor[Expr, TypeTree](List(), Int32Type, s => IntLiteral(i), ""+i) - }).toMap - - val bigInts = (for (i <- Set(0, 1, 2, 3)) yield { - i -> Constructor[Expr, TypeTree](List(), IntegerType, s => InfiniteIntegerLiteral(i), ""+i) - }).toMap - - val booleans = (for (b <- Set(true, false)) yield { - b -> Constructor[Expr, TypeTree](List(), BooleanType, s => BooleanLiteral(b), ""+b) - }).toMap - - val chars = (for (c <- Set('a', 'b', 'c', 'd')) yield { - c -> Constructor[Expr, TypeTree](List(), CharType, s => CharLiteral(c), ""+c) - }).toMap - - val rationals = (for (n <- Set(0, 1, 2, 3); d <- Set(1,2,3,4)) yield { - (n, d) -> Constructor[Expr, TypeTree](List(), RealType, s => FractionalLiteral(n, d), "" + n + "/" + d) - }).toMap - - val strings = (for (b <- Set("", "a", "foo", "bar")) yield { - b -> Constructor[Expr, TypeTree](List(), StringType, s => StringLiteral(b), b) - }).toMap - - - def intConstructor(i: Int) = ints(i) - - def bigIntConstructor(i: Int) = bigInts(i) - - def boolConstructor(b: Boolean) = booleans(b) - - def charConstructor(c: Char) = chars(c) - - def rationalConstructor(n: Int, d: Int) = rationals(n -> d) - - def stringConstructor(s: String) = strings(s) - - lazy val stubValues = ints.values ++ bigInts.values ++ booleans.values ++ chars.values ++ rationals.values ++ strings.values - - def cPattern(c: Constructor[Expr, TypeTree], args: Seq[VPattern[Expr, TypeTree]]) = { - ConstructorPattern[Expr, TypeTree](c, args) - } - - private var constructors = Map[TypeTree, List[Constructor[Expr, TypeTree]]]() - - private def getConstructorFor(t: CaseClassType, act: AbstractClassType): Constructor[Expr, TypeTree] = { - // We "up-cast" the returnType of the specific caseclass generator to match its superclass - getConstructors(t).head.copy(retType = act) - } - - private def getConstructors(t: TypeTree): List[Constructor[Expr, TypeTree]] = t match { - case UnitType => - constructors.getOrElse(t, { - val cs = List(Constructor[Expr, TypeTree](List(), t, s => UnitLiteral(), "()")) - constructors += t -> cs - cs - }) - - case at @ ArrayType(sub) => - constructors.getOrElse(at, { - val cs = for (size <- List(0, 1, 2, 5)) yield { - Constructor[Expr, TypeTree]( - (1 to size).map(i => sub).toList, - at, - s => finiteArray(s, None, sub), - at.asString(ctx)+"@"+size - ) - } - constructors += at -> cs - cs - }) - - case st @ SetType(sub) => - constructors.getOrElse(st, { - val cs = for (size <- List(0, 1, 2, 5)) yield { - Constructor[Expr, TypeTree]( - (1 to size).map(i => sub).toList, - st, - s => FiniteSet(s.toSet, sub), - st.asString(ctx)+"@"+size - ) - } - constructors += st -> cs - cs - }) - - case bt @ BagType(sub) => - constructors.getOrElse(bt, { - val cs = for (size <- List(0, 1, 2, 5)) yield { - val subs = (1 to size).flatMap(i => List(sub, IntegerType)).toList - Constructor[Expr, TypeTree](subs, bt, s => FiniteBag(s.grouped(2).map { - case Seq(k, i @ InfiniteIntegerLiteral(v)) => - k -> (if (v > 0) i else InfiniteIntegerLiteral(-v + 1)) - }.toMap, sub), bt.asString(ctx)+"@"+size) - } - constructors += bt -> cs - cs - }) - - case tt @ TupleType(parts) => - constructors.getOrElse(tt, { - val cs = List(Constructor[Expr, TypeTree](parts, tt, s => tupleWrap(s), tt.asString(ctx))) - constructors += tt -> cs - cs - }) - - case mt @ MapType(from, to) => - constructors.getOrElse(mt, { - val cs = for (size <- List(0, 1, 2, 5)) yield { - val subs = (1 to size).flatMap(i => List(from, to)).toList - Constructor[Expr, TypeTree](subs, mt, s => FiniteMap(s.grouped(2).map(t => (t(0), t(1))).toMap, from, to), mt.asString(ctx)+"@"+size) - } - constructors += mt -> cs - cs - }) - - case ft @ FunctionType(from, to) => - constructors.getOrElse(ft, { - val cs = for (size <- List(1, 2, 3, 5)) yield { - val subs = (1 to size).flatMap(_ => from :+ to).toList - Constructor[Expr, TypeTree](subs, ft, { s => - val grouped = s.grouped(from.size + 1).toSeq - val mapping = grouped.init.map { case args :+ res => (args -> res) } - FiniteLambda(mapping, grouped.last.last, ft) - }, ft.asString(ctx) + "@" + size) - } - constructors += ft -> cs - cs - }) - - case tp: TypeParameter => - constructors.getOrElse(tp, { - val cs = for (i <- List(1, 2)) yield { - Constructor[Expr, TypeTree](List(), tp, s => GenericValue(tp, i), tp.id+"#"+i) - } - constructors += tp -> cs - cs - }) - - case act: AbstractClassType => - constructors.getOrElse(act, { - val cs = act.knownCCDescendants.map { - cct => getConstructorFor(cct, act) - }.toList - - constructors += act -> cs - - cs - }) - - case cct: CaseClassType => - constructors.getOrElse(cct, { - val c = List(Constructor[Expr, TypeTree](cct.fieldsTypes, cct, s => CaseClass(cct, s), cct.id.name)) - constructors += cct -> c - c - }) - - case _ => - ctx.reporter.error("Unknown type to generate constructor for: "+t) - Nil - } - - // Returns the pattern and whether it is fully precise - private def valueToPattern(v: AnyRef, expType: TypeTree): (VPattern[Expr, TypeTree], Boolean) = (v, expType) match { - case (i: Integer, Int32Type) => - (cPattern(intConstructor(i), List()), true) - - case (i: Integer, IntegerType) => - (cPattern(bigIntConstructor(i), List()), true) - - case (b: java.lang.Boolean, BooleanType) => - (cPattern(boolConstructor(b), List()), true) - - case (c: java.lang.Character, CharType) => - (cPattern(charConstructor(c), List()), true) - - case (b: java.lang.String, StringType) => - (cPattern(stringConstructor(b), List()), true) - - case (cc: codegen.runtime.CaseClass, ct: ClassType) => - val r = cc.__getRead() - - unit.jvmClassToLeonClass(cc.getClass.getName) match { - case Some(ccd: CaseClassDef) => - val cct = CaseClassType(ccd, ct.tps) - val c = ct match { - case act : AbstractClassType => - getConstructorFor(cct, act) - case cct : CaseClassType => - getConstructors(cct).head - } - - val fields = cc.productElements() - - val elems = for (i <- 0 until fields.length) yield { - if (((r >> i) & 1) == 1) { - // has been read - valueToPattern(fields(i), cct.fieldsTypes(i)) - } else { - (AnyPattern[Expr, TypeTree](), false) - } - } - - (ConstructorPattern(c, elems.map(_._1)), elems.forall(_._2)) - - case _ => - ctx.reporter.error("Could not retrieve type for :"+cc.getClass.getName) - (AnyPattern[Expr, TypeTree](), false) - } - - case (t: codegen.runtime.Tuple, tpe) => - val r = t.__getRead() - - val parts = unwrapTupleType(tpe, t.getArity) - - val c = getConstructors(tpe).head - - val elems = for (i <- 0 until t.getArity) yield { - if (((r >> i) & 1) == 1) { - // has been read - valueToPattern(t.get(i), parts(i)) - } else { - (AnyPattern[Expr, TypeTree](), false) - } - } - - (ConstructorPattern(c, elems.map(_._1)), elems.forall(_._2)) - - case (gv: GenericValue, t: TypeParameter) => - (cPattern(getConstructors(t)(gv.id-1), List()), true) - - case (v, t) => - ctx.reporter.debug("Unsupported value, can't paternify : "+v+" ("+v.getClass+") : "+t) - (AnyPattern[Expr, TypeTree](), false) - } - - type InstrumentedResult = (EvaluationResults.Result[Expr], Option[vanuatoo.Pattern[Expr, TypeTree]]) - - def compile(expression: Expr, argorder: Seq[Identifier]) : Option[Expr=>InstrumentedResult] = { - import leon.codegen.runtime.LeonCodeGenRuntimeException - import leon.codegen.runtime.LeonCodeGenEvaluationException - - try { - val ttype = tupleTypeWrap(argorder.map(_.getType)) - val tid = FreshIdentifier("tup", ttype) - - val map = argorder.zipWithIndex.map{ case (id, i) => id -> tupleSelect(Variable(tid), i + 1, argorder.size) }.toMap - - val newExpr = replaceFromIDs(map, expression) - - val ce = unit.compileExpression(newExpr, Seq(tid))(ctx) - - Some((args : Expr) => { - try { - val monitor = new StdMonitor(unit, unit.params.maxFunctionInvocations, Map()) - - val jvmArgs = ce.argsToJVM(Seq(args), monitor) - - val result = ce.evalFromJVM(jvmArgs, monitor) - - // jvmArgs is getting updated by evaluating - val pattern = valueToPattern(jvmArgs.head, ttype) - - (EvaluationResults.Successful(result), if (!pattern._2) Some(pattern._1) else None) - } catch { - case e : StackOverflowError => - (EvaluationResults.RuntimeError(e.getMessage), None) - - case e : ClassCastException => - (EvaluationResults.RuntimeError(e.getMessage), None) - - case e : ArithmeticException => - (EvaluationResults.RuntimeError(e.getMessage), None) - - case e : ArrayIndexOutOfBoundsException => - (EvaluationResults.RuntimeError(e.getMessage), None) - - case e : LeonCodeGenRuntimeException => - (EvaluationResults.RuntimeError(e.getMessage), None) - - case e : LeonCodeGenEvaluationException => - (EvaluationResults.EvaluatorError(e.getMessage), None) - } - }) - } catch { - case t: Throwable => - ctx.reporter.warning("Error while compiling expression: "+t.getMessage); t.printStackTrace() - None - } - } - - def generateFor(ins: Seq[Identifier], satisfying: Expr, maxValid: Int, maxEnumerated: Int): Iterator[Seq[Expr]] = { - // Split conjunctions - val TopLevelAnds(ands) = satisfying - - val runners = ands.flatMap(a => compile(a, ins) match { - case Some(runner) => Some(runner) - case None => - ctx.reporter.error("Could not compile predicate " + a) - None - }) - - val gen = new StubGenerator[Expr, TypeTree](stubValues.toSeq, - Some(getConstructors _), - treatEmptyStubsAsChildless = true) - - /** - * Gather at most <n> isomoprhic models before skipping them - * - Too little means skipping many excluding patterns - * - Too large means repetitive (and not useful models) before reaching maxEnumerated - */ - - val maxIsomorphicModels = maxValid+1 - - val it = gen.enumerate(tupleTypeWrap(ins.map(_.getType))) - - new Iterator[Seq[Expr]] { - var total = 0 - var found = 0 - - var theNext: Option[Seq[Expr]] = None - - def hasNext = { - if (total == 0) { - theNext = computeNext() - } - - theNext.isDefined - } - - def next() = { - val res = theNext.get - theNext = computeNext() - res - } - - - def computeNext(): Option[Seq[Expr]] = { - //return None - while (total < maxEnumerated && found < maxValid && it.hasNext && !interrupted.get) { - val model = it.next() - it.hasNext // FIXME: required for some reason by StubGenerator or will return false during loop check - - if (model eq null) { - total = maxEnumerated - } else { - total += 1 - - var failed = false - - for (r <- runners) r(model) match { - case (EvaluationResults.Successful(BooleanLiteral(true)), _) => - - case (_, Some(pattern)) => - failed = true - it.exclude(pattern) - - case (_, None) => - failed = true; - } - - if (!failed) { - //println("Got model:") - //for ((i, v) <- (ins zip model.exprs)) { - // println(" - "+i+" -> "+v) - //} - - found += 1 - - if (found % maxIsomorphicModels == 0) { - it.skipIsomorphic() - } - - return Some(unwrapTuple(model, ins.size)) - } - - //if (total % 1000 == 0) { - // println("... "+total+" ...") - //} - } - } - None - } - } - } -} diff --git a/src/main/scala/inox/evaluators/Evaluator.scala b/src/main/scala/inox/evaluators/Evaluator.scala index b9069f65f681a8c094c1bee6d5eeff7dbd92445f..ab91667481ef692cae37b80c2b37fa49967d0766 100644 --- a/src/main/scala/inox/evaluators/Evaluator.scala +++ b/src/main/scala/inox/evaluators/Evaluator.scala @@ -5,6 +5,8 @@ package evaluators case class EvaluatorOptions(options: Seq[InoxOption[Any]]) extends InoxOptions + + trait Evaluator { val program: Program val options: EvaluatorOptions diff --git a/src/main/scala/inox/evaluators/RecursiveEvaluator.scala b/src/main/scala/inox/evaluators/RecursiveEvaluator.scala index 0ae72adf1157c017f4516a73aaaeb27f7f39393c..c62f0cff846f988a2666c3e41c55d90566c2235e 100644 --- a/src/main/scala/inox/evaluators/RecursiveEvaluator.scala +++ b/src/main/scala/inox/evaluators/RecursiveEvaluator.scala @@ -15,8 +15,7 @@ trait RecursiveEvaluator import program.symbols._ import program.trees.exprOps._ - val name = "evaluator" - val description = "Recursive interpreter for Inox expressions" + val name = "Recursive Evaluator" private def shift(b: BitSet, size: Int, i: Int): BitSet = b.map(_ + i).filter(bit => bit >= 1 && bit <= size) @@ -48,7 +47,6 @@ trait RecursiveEvaluator case Let(i,ex,b) => val first = e(ex) - //println(s"Eval $i to $first") e(b)(rctx.withNewVar(i, first), gctx) case Assume(cond, body) => @@ -530,3 +528,10 @@ trait RecursiveEvaluator } } +object RecursiveEvaluator { + def apply(p: Program)(opts: EvaluatorOptions): RecursiveEvaluator { val program: p.type } = new { + val program: p.type = p + } with RecursiveEvaluator with HasDefaultGlobalContext with HasDefaultRecContext { + val maxSteps = 50000 + } +} diff --git a/src/main/scala/inox/solvers/smtlib/SMTLIBTarget.scala b/src/main/scala/inox/solvers/smtlib/SMTLIBTarget.scala index e3473a3b38e0207966a8457c9ce79341aa45b87e..52c3ec00c3dc3fe8f3ed432c804e4e90328a64c9 100644 --- a/src/main/scala/inox/solvers/smtlib/SMTLIBTarget.scala +++ b/src/main/scala/inox/solvers/smtlib/SMTLIBTarget.scala @@ -129,7 +129,7 @@ trait SMTLIBTarget extends Interruptible with ADTManagers { import scala.language.implicitConversions protected implicit def symbolToQualifiedId(s: SSymbol): QualifiedIdentifier = SimpleSymbol(s) - protected val adtManager = new ADTManager(ctx) + protected val adtManager = new ADTManager protected def id2sym(id: Identifier): SSymbol = { SSymbol(id.uniqueNameDelimited("!").replace("|", "$pipe").replace("\\", "$backslash")) @@ -178,37 +178,33 @@ trait SMTLIBTarget extends Interruptible with ADTManagers { // Returns a quantified term where all free variables in the body have been quantified protected def quantifiedTerm(quantifier: (SortedVar, Seq[SortedVar], Term) => Term, body: Expr) - (implicit bindings: Map[Identifier, Term]) - : Term = { + (implicit bindings: Map[Identifier, Term]): Term = { quantifiedTerm(quantifier, exprOps.variablesOf(body).toSeq.map(_.toVal), body) } - protected def declareSort(t: Type): Sort = { - val tpe = normalizeType(t) - sorts.cachedB(tpe) { - tpe match { - case BooleanType => Core.BoolSort() - case IntegerType => Ints.IntSort() - case RealType => Reals.RealSort() - case Int32Type => FixedSizeBitVectors.BitVectorSort(32) - case CharType => FixedSizeBitVectors.BitVectorSort(32) + protected def declareSort(t: Type): Sort = bestRealType(t) match { + case BooleanType => Core.BoolSort() + case IntegerType => Ints.IntSort() + case RealType => Reals.RealSort() + case Int32Type => FixedSizeBitVectors.BitVectorSort(32) + case CharType => FixedSizeBitVectors.BitVectorSort(32) - case MapType(from, to) => - Sort(SMTIdentifier(SSymbol("Array")), Seq(declareSort(from), declareSort(to))) + case mt @ MapType(from, to) => + sorts.cachedB(mt) { + Sort(SMTIdentifier(SSymbol("Array")), Seq(declareSort(from), declareSort(to))) + } - case FunctionType(from, to) => - Ints.IntSort() + case FunctionType(from, to) => + Ints.IntSort() - case _: ClassType | _: TupleType | _: TypeParameter | UnitType => - declareStructuralSort(tpe) + case tpe @ (_: ClassType | _: TupleType | _: TypeParameter | UnitType) => + declareStructuralSort(tpe) - case other => - unsupported(other, s"Could not transform $other into an SMT sort") - } - } + case other => + unsupported(other, s"Could not transform $other into an SMT sort") } - protected def declareDatatypes(datatypes: Map[Type, DataType]): Unit = { + protected def declareDatatypes(datatypes: Seq[(Type, DataType)]): Unit = { // We pre-declare ADTs for ((tpe, DataType(sym, _)) <- datatypes) { sorts += tpe -> Sort(SMTIdentifier(id2sym(sym))) @@ -239,16 +235,8 @@ trait SMTLIBTarget extends Interruptible with ADTManagers { } protected def declareStructuralSort(t: Type): Sort = { - // Populates the dependencies of the structural type to define. - adtManager.defineADT(t) match { - case Left(adts) => - declareDatatypes(adts) - sorts.toB(normalizeType(t)) - - case Right(conflicts) => - conflicts.foreach { declareStructuralSort } - declareStructuralSort(t) - } + adtManager.declareADTs(t, declareDatatypes) + sorts.toB(bestRealType(t)) } protected def declareVariable(v: Variable): SSymbol = { @@ -408,10 +396,8 @@ trait SMTLIBTarget extends Interruptible with ADTManagers { val dyn = declareLambda(caller.getType.asInstanceOf[FunctionType]) FunctionApplication(dyn, (caller +: args).map(toSMT)) - case Not(u) => u.getType match { - case BooleanType => Core.Not(toSMT(u)) - case Int32Type => FixedSizeBitVectors.Not(toSMT(u)) - } + case Not(u) => Core.Not(toSMT(u)) + case UMinus(u) => u.getType match { case IntegerType => Ints.Neg(toSMT(u)) case Int32Type => FixedSizeBitVectors.Neg(toSMT(u)) @@ -481,6 +467,7 @@ trait SMTLIBTarget extends Interruptible with ADTManagers { case CharType => FixedSizeBitVectors.SGreaterEquals(toSMT(a), toSMT(b)) } + case BVNot(u) => FixedSizeBitVectors.Not(toSMT(u)) case BVAnd(a, b) => FixedSizeBitVectors.And(toSMT(a), toSMT(b)) case BVOr(a, b) => FixedSizeBitVectors.Or(toSMT(a), toSMT(b)) case BVXOr(a, b) => FixedSizeBitVectors.XOr(toSMT(a), toSMT(b)) diff --git a/src/main/scala/inox/solvers/z3/AbstractZ3Solver.scala b/src/main/scala/inox/solvers/z3/AbstractZ3Solver.scala index e9008fbc583925606df4b0a6a7f270e0f43610f8..cffc247a4e3874348d4cf366fd14863b63d9f816 100644 --- a/src/main/scala/inox/solvers/z3/AbstractZ3Solver.scala +++ b/src/main/scala/inox/solvers/z3/AbstractZ3Solver.scala @@ -226,9 +226,7 @@ trait AbstractZ3Solver sorts(oldtt) case tpe @ (_: ClassType | _: TupleType | _: TypeParameter | UnitType) => - sorts.cached(tpe) { - declareStructuralSort(tpe) - } + sorts.getOrElse(tpe, declareStructuralSort(tpe)) case tt @ SetType(base) => sorts.cached(tt) { diff --git a/src/main/scala/inox/solvers/z3/Z3UnrollingSolver.scala b/src/main/scala/inox/solvers/z3/Z3UnrollingSolver.scala deleted file mode 100644 index 454cc37dc2de78e38b038f835a2ec38f24c8d539..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/solvers/z3/Z3UnrollingSolver.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* Copyright 2009-2015 EPFL, Lausanne */ - -package leon -package solvers -package z3 - -import purescala.Definitions._ - -import unrolling._ -import theories._ - -class Z3UnrollingSolver(context: SolverContext, program: Program, underlying: Z3Solver) - extends UnrollingSolver(context, program, underlying, new StringEncoder(context.context, program)) - with Z3Solver