diff --git a/build.sbt b/build.sbt index 4330f51b8296c7a78f31f760efb738998c2722c5..9036f22d0f5fef46c70c93ebf0b3c0148a3d392a 100644 --- a/build.sbt +++ b/build.sbt @@ -95,7 +95,6 @@ def ghProject(repo: String, version: String) = RootProject(uri(s"git://$repo#$ve def svnProject(repo: String, version: String, user: String, pass: String) = RootProject(uri(s"svn+credentials://$repo#username=$user&password=$pass&$version")) -lazy val bonsai = ghProject("github.com/colder/bonsai.git", "10eaaee4ea0ff6567f4f866922cb871bae2da0ac") lazy val scalaSmtlib = ghProject("github.com/regb/scala-smtlib.git", "850580ae86e299a1baa0eaef9e24eed905fefe58") lazy val princess = svnProject("hal4.it.uu.se/princess/interpolation/trunk", "2703", "anonymous", "anonymous") @@ -121,13 +120,12 @@ lazy val root = (project in file(".")) parallelExecution := false )) : _*) .settings(compile <<= (compile in Compile) dependsOn script) - .dependsOn(bonsai) .dependsOn(scalaSmtlib) .dependsOn(princess) .settings(classpathSettings : _*) .settings( // @nv: ignore warnings from projects that are out of our control logLevel in (scalaSmtlib, Compile) := Level.Error, - logLevel in (princess, Compile) := Level.Error + logLevel in (princess, Compile) := Level.Error ) diff --git a/src/main/scala/inox/Main.scala b/src/main/scala/inox/Main.scala index 80959c1d15fd10525ee9f8cb3db19aa227826b95..2e27cf6789c11a531136609588562f20e1113cb6 100644 --- a/src/main/scala/inox/Main.scala +++ b/src/main/scala/inox/Main.scala @@ -8,7 +8,6 @@ trait MainHelpers { protected def getDebugSections: Set[DebugSection] = Set( ast.DebugSectionTrees, - datagen.DebugSectionDataGen, solvers.DebugSectionSolver ) diff --git a/src/main/scala/inox/datagen/DataGenerator.scala b/src/main/scala/inox/datagen/DataGenerator.scala deleted file mode 100644 index d8a80bed63a318661ee9e6e86a9accd5a85a57b1..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/datagen/DataGenerator.scala +++ /dev/null @@ -1,29 +0,0 @@ -/* Copyright 2009-2016 EPFL, Lausanne */ - -package inox -package datagen - -import utils._ - -import java.util.concurrent.atomic.AtomicBoolean - -object DebugSectionDataGen extends DebugSection("datagen") - -trait DataGenerator extends Interruptible { - val program: Program - import program.trees._ - - implicit val debugSection = DebugSectionDataGen - - def generateFor(ins: Seq[ValDef], satisfying: Expr, maxValid: Int, maxEnumerated: Int): Iterator[Seq[Expr]] - - protected val interrupted: AtomicBoolean = new AtomicBoolean(false) - - def interrupt(): Unit = { - interrupted.set(true) - } - - def recoverInterrupt(): Unit = { - interrupted.set(false) - } -} diff --git a/src/main/scala/inox/datagen/GrammarDataGen.scala b/src/main/scala/inox/datagen/GrammarDataGen.scala deleted file mode 100644 index 7b9280831cfd33e9549e653d003df960bc90fcbe..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/datagen/GrammarDataGen.scala +++ /dev/null @@ -1,95 +0,0 @@ -/* Copyright 2009-2016 EPFL, Lausanne */ - -package inox -package datagen - -import evaluators._ -import bonsai.enumerators._ - -import utils.UniqueCounter -import utils.SeqUtils.cartesianProduct -import grammars._ - -/** Utility functions to generate values of a given type. - * In fact, it could be used to generate *terms* of a given type, - * e.g. by passing trees representing variables for the "bounds". */ -trait GrammarDataGen extends DataGenerator { self => - val grammars: GrammarsUniverse { val program: self.program.type } - import grammars._ - val evaluator: DeterministicEvaluator { val program: self.program.type } - val grammar: ExpressionGrammar - - import program._ - import program.trees._ - import program.symbols._ - import exprOps._ - - // Assume e contains generic values with index 0. - // Return a series of expressions with all normalized combinations of generic values. - private def expandGenerics(e: Expr): Seq[Expr] = { - val c = new UniqueCounter[TypeParameter] - val withUniqueCounters: Expr = postMap { - case GenericValue(t, _) => - Some(GenericValue(t, c.next(t))) - case _ => None - }(e) - - val indices = c.current - - val (tps, substInt) = (for { - tp <- indices.keySet.toSeq - } yield tp -> (for { - from <- 0 to indices(tp) - to <- 0 to from - } yield (from, to))).unzip - - val combos = cartesianProduct(substInt) - - val substitutions = combos map { subst => - tps.zip(subst).map { case (tp, (from, to)) => - (GenericValue(tp, from): Expr) -> (GenericValue(tp, to): Expr) - }.toMap - } - - substitutions map (replace(_, withUniqueCounters)) - } - - def generate(tpe: Type): Iterator[Expr] = { - val enum = new MemoizedEnumerator[Label, Expr, ProductionRule[Label, Expr]](grammar.getProductions) - enum.iterator(Label(tpe)).flatMap(expandGenerics).takeWhile(_ => !interrupted.get) - } - - def generateFor(ins: Seq[ValDef], satisfying: Expr, maxValid: Int, maxEnumerated: Int): Iterator[Seq[Expr]] = { - - def filterCond(vs: Seq[Expr]): Boolean = satisfying match { - case BooleanLiteral(true) => - true - case e => - // in -> e should be enough. We shouldn't find any subexpressions of in. - evaluator.eval(e, (ins zip vs).toMap) match { - case EvaluationResults.Successful(BooleanLiteral(true)) => true - case _ => false - } - } - - if (ins.isEmpty) { - Iterator(Seq[Expr]()).filter(filterCond) - } else { - val values = generate(tupleTypeWrap(ins.map{ _.getType })) - - val detupled = values.map { - v => unwrapTuple(v, ins.size) - } - - detupled.take(maxEnumerated) - .filter(filterCond) - .take(maxValid) - .takeWhile(_ => !interrupted.get) - } - } - - def generateMapping(ins: Seq[ValDef], satisfying: Expr, maxValid: Int, maxEnumerated: Int) = { - generateFor(ins, satisfying, maxValid, maxEnumerated) map (ins zip _) - } - -} diff --git a/src/main/scala/inox/datagen/ModelEnumerator.scala b/src/main/scala/inox/datagen/ModelEnumerator.scala deleted file mode 100644 index 6cfbc50d884955c874bd29db5eb1190f132b17c8..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/datagen/ModelEnumerator.scala +++ /dev/null @@ -1,212 +0,0 @@ -/* Copyright 2009-2016 EPFL, Lausanne */ - -package inox -package datagen - -import evaluators._ -import solvers._ -import utils._ - -trait ModelEnumerator { - val program: Program - val factory: SolverFactory { val program: ModelEnumerator.this.program.type } - val evaluator: DeterministicEvaluator { val program: ModelEnumerator.this.program.type } - - import program._ - import program.trees._ - import program.symbols._ - - import SolverResponses._ - - def enumSimple(vs: Seq[ValDef], satisfying: Expr): FreeableIterator[Map[ValDef, Expr]] = { - enumVarying0(vs, satisfying, None, -1) - } - - /** - * Enumerate at most `nPerCaracteristic` models with the same value for - * `caracteristic`. - * - * Note: there is no guarantee that the models enumerated consecutively share the - * same `caracteristic`. - */ - def enumVarying(vs: Seq[ValDef], satisfying: Expr, measure: Expr, nPerMeasure: Int = 1) = { - enumVarying0(vs, satisfying, Some(measure), nPerMeasure) - } - - private[this] def enumVarying0(vs: Seq[ValDef], satisfying: Expr, measure: Option[Expr], nPerMeasure: Int = 1): FreeableIterator[Map[ValDef, Expr]] = { - val s = factory.getNewSolver - - s.assertCnstr(satisfying) - - val m = measure match { - case Some(ms) => - val m = Variable(FreshIdentifier("measure"), ms.getType) - s.assertCnstr(Equals(m, ms)) - m - case None => - Variable(FreshIdentifier("noop"), BooleanType) - } - - var perMeasureRem = Map[Expr, Int]().withDefaultValue(nPerMeasure) - - new FreeableIterator[Map[ValDef, Expr]] { - def computeNext() = { - s.check(Model) match { - case SatWithModel(model) => - val fullModel = vs.map { v => - v -> model.getOrElse(v, simplestValue(v.getType)) - }.toMap - - // Vary the model - s.assertCnstr(not(andJoin(fullModel.toSeq.sortBy(_._1).map { - case (k, v) => equality(k.toVariable, v) - }))) - - measure match { - case Some(ms) => - val mValue = evaluator.eval(ms, fullModel).result.get - - perMeasureRem += (mValue -> (perMeasureRem(mValue) - 1)) - - if (perMeasureRem(mValue) <= 0) { - s.assertCnstr(not(equality(m, mValue))) - } - - case None => - } - - Some(fullModel) - - case _ => - None - } - } - - def free() { - factory.reclaim(s) - } - } - } - - def enumMinimizing(vs: Seq[ValDef], cnstr: Expr, measure: Expr) = { - enumOptimizing(vs, cnstr, measure, Down) - } - - def enumMaximizing(vs: Seq[ValDef], cnstr: Expr, measure: Expr) = { - enumOptimizing(vs, cnstr, measure, Up) - } - - abstract class SearchDirection - case object Up extends SearchDirection - case object Down extends SearchDirection - - private[this] def enumOptimizing(vs: Seq[ValDef], satisfying: Expr, measure: Expr, dir: SearchDirection): FreeableIterator[Map[ValDef, Expr]] = { - assert(measure.getType == IntegerType) - - val s = factory.getNewSolver - - s.assertCnstr(satisfying) - - val m = Variable(FreshIdentifier("measure"), measure.getType) - s.assertCnstr(Equals(m, measure)) - - // Search Range - var ub: Option[BigInt] = None - var lb: Option[BigInt] = None - - def rangeEmpty() = (lb, ub) match { - case (Some(l), Some(u)) => u-l <= 1 - case _ => false - } - - def getPivot(): Option[BigInt] = (lb, ub, dir) match { - // Bisection Method - case (Some(l), Some(u), _) => Some(l + (u-l)/2) - // No bound yet, let the solver find at least one bound - case (None, None, _) => None - - // Increase lower bound - case (Some(l), None, Up) => Some(l + l.abs + 1) - // Decrease upper bound - case (None, Some(u), Down) => Some(u - u.abs - 1) - - // This shouldn't happen - case _ => None - } - - new FreeableIterator[Map[ValDef, Expr]] { - def computeNext(): Option[Map[ValDef, Expr]] = { - if (rangeEmpty()) { - None - } else { - // Assert a new pivot point - val thisTry = getPivot().map { t => - s.push() - dir match { - case Up => - s.assertCnstr(GreaterThan(m, IntegerLiteral(t))) - case Down => - s.assertCnstr(LessThan(m, IntegerLiteral(t))) - } - t - } - - s.check(Model) match { - case SatWithModel(model) => - val fullModel = vs.map { v => - v -> model.getOrElse(v, simplestValue(v.getType)) - }.toMap - - evaluator.eval(measure, fullModel).result match { - case Some(IntegerLiteral(measureVal)) => - // Positive result - dir match { - case Up => lb = Some(measureVal) - case Down => ub = Some(measureVal) - } - - Some(fullModel) - - case _ => - ctx.reporter.warning("Evaluator failed to evaluate measure!") - None - } - - case Unsat => - // Negative result - thisTry match { - case Some(t) => - s.pop() - - dir match { - case Up => ub = Some(t) - case Down => lb = Some(t) - } - computeNext() - - case None => - None - } - - case Unknown => - None - } - } - } - - def free() { - factory.reclaim(s) - } - } - } -} - -object ModelEnumerator { - def apply(p: Program) - (sf: SolverFactory { val program: p.type }, ev: DeterministicEvaluator { val program: p.type }): - ModelEnumerator { val program: p.type; val factory: sf.type; val evaluator: ev.type } = new { - val program: p.type = p - val factory: sf.type = sf - val evaluator: ev.type = ev - } with ModelEnumerator -} diff --git a/src/main/scala/inox/datagen/SolverDataGen.scala b/src/main/scala/inox/datagen/SolverDataGen.scala deleted file mode 100644 index 2576742684986384139a28a7b875d26e79a1d168..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/datagen/SolverDataGen.scala +++ /dev/null @@ -1,96 +0,0 @@ -/* Copyright 2009-2016 EPFL, Lausanne */ - -package inox -package datagen - -import evaluators._ -import solvers._ -import utils._ - -trait SolverDataGen extends DataGenerator { self => - import program._ - import program.trees._ - import program.symbols._ - - def factory(p: Program { val trees: self.program.trees.type }): SolverFactory { val program: p.type } - def evaluator(p: Program { val trees: self.program.trees.type }): DeterministicEvaluator { val program: p.type } - - def generate(tpe: Type): FreeableIterator[Expr] = { - generateFor(Seq(ValDef(FreshIdentifier("tmp"), tpe)), - BooleanLiteral(true), 20, 20).map(_.head).takeWhile(_ => !interrupted.get) - } - - def generateFor(ins: Seq[ValDef], satisfying: Expr, maxValid: Int, maxEnumerated: Int): FreeableIterator[Seq[Expr]] = { - if (ins.isEmpty) { - FreeableIterator.empty - } else { - - var cdToId: Map[ADTDefinition, Identifier] = Map.empty - var fds: Seq[FunDef] = Seq.empty - - def sizeFor(of: Expr): Expr = bestRealType(of.getType) match { - case adt: ADTType => - val tadt = adt.getADT - val root = tadt.definition.root - val id = cdToId.getOrElse(root, { - import dsl._ - - val id = FreshIdentifier("sizeOf", true) - val tparams = root.tparams.map(_.freshen) - cdToId += root -> id - - def typed(cons: ADTConstructor) = TypedADTConstructor(cons, tparams.map(_.tp)) - def sizeOfConstructor(cons: ADTConstructor, expr: Expr): Expr = - typed(cons).fields.foldLeft(IntegerLiteral(1): Expr) { (i, f) => - plus(i, sizeFor(expr.getField(f.id))) - } - - val x = Variable(FreshIdentifier("x", true), tadt.root.toType) - fds +:= new FunDef(id, tparams, Seq(x.toVal), IntegerType, root match { - case sort: ADTSort => - val (child +: rest) = sort.constructors - def sizeOf(cons: ADTConstructor) = sizeOfConstructor(cons, x.asInstOf(typed(cons).toType)) - rest.foldLeft(sizeOf(child)) { (elze, ccd) => - if_ (x.isInstOf(typed(ccd).toType)) { sizeOf(ccd) } else_ { elze } - } - - case cons: ADTConstructor => - sizeOfConstructor(cons, x) - }, Set.empty) - - id - }) - - FunctionInvocation(id, adt.tps, Seq(of)) - - case tt @ TupleType(tps) => - val exprs = for ((t,i) <- tps.zipWithIndex) yield { - sizeFor(tupleSelect(of, i+1, tps.size)) - } - - exprs.foldLeft(IntegerLiteral(1): Expr)(plus) - - case _ => - IntegerLiteral(1) - } - - val sizeOf = sizeFor(tupleWrap(ins.map(_.toVariable))) - - // We need to synthesize a size function for ins' types. - val pgm1 = program.withFunctions(fds) - val modelEnum = ModelEnumerator(pgm1)(factory(pgm1), evaluator(pgm1)) - - val enum = modelEnum.enumVarying(ins, satisfying, sizeOf, 5) - - enum.take(maxValid).map(model => ins.map(model)).takeWhile(_ => !interrupted.get) - } - } -} - -object SolverDataGen { - def apply(p: InoxProgram): SolverDataGen { val program: p.type } = new SolverDataGen { - val program: p.type = p - def factory(p: InoxProgram): SolverFactory { val program: p.type } = SolverFactory.default(p) - def evaluator(p: InoxProgram): RecursiveEvaluator { val program: p.type } = RecursiveEvaluator.default(p) - } -} diff --git a/src/main/scala/inox/grammars/AllGrammars.scala b/src/main/scala/inox/grammars/AllGrammars.scala deleted file mode 100644 index d999404ac7fde5a7d8eab46740e4bbd1a8d538d7..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/AllGrammars.scala +++ /dev/null @@ -1,11 +0,0 @@ -package inox.grammars - -trait AllGrammars extends ElementaryGrammars - with BaseGrammars - with ValueGrammars - with ConstantGrammars - with ClosureGrammars - with EqualityGrammars - with FunctionCallsGrammars { - self: GrammarsUniverse => -} diff --git a/src/main/scala/inox/grammars/Aspects.scala b/src/main/scala/inox/grammars/Aspects.scala deleted file mode 100644 index 7ef23f7b1d7d46962ff62daca8bba3c2347b3bcf..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/Aspects.scala +++ /dev/null @@ -1,32 +0,0 @@ -/* Copyright 2009-2015 EPFL, Lausanne */ - -package inox -package grammars - -/** - * An Aspect applies to a label, and attaches information to it. - * - * For instance, the "size" aspect provides information about the size of - * expressions the label represents, (displayed as |5|). - * - * Int|5| is thus a Label "Int" with aspect "Sized(5)". The applyTo method of - * the aspect can filter/modify/generate sub-productions: - * If the grammar contains a Int -> Int + Int production, then - * Int|5| will generate productions by doing: |5|.applyTo(Int + Int), - * which itself returns: - * - Int|1| + Int|3| - * - Int|3| + Int|1| - * - Int|2| + Int|2| - * - */ - -trait Aspects { self: GrammarsUniverse => - import program._ - import trees._ - - trait Aspect extends Printable { - final type Production = ProductionRule[Label, Expr] - - def applyTo(l: Label, ps: Seq[Production]): Seq[Production] - } -} diff --git a/src/main/scala/inox/grammars/BaseGrammars.scala b/src/main/scala/inox/grammars/BaseGrammars.scala deleted file mode 100644 index 47d88d0445b27d0cd8bb347209002f233fb9e420..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/BaseGrammars.scala +++ /dev/null @@ -1,87 +0,0 @@ -/* Copyright 2009-2016 EPFL, Lausanne */ - -package inox -package grammars - -trait BaseGrammars { self: GrammarsUniverse => - import program._ - import trees._ - import symbols._ - - /** The basic grammar for Inox expressions. - * Generates the most obvious expressions for a given type, - * without regard of context (variables in scope, current function etc.) - * Also does some trivial simplifications. - */ - case object BaseGrammar extends SimpleExpressionGrammar { - - def computeProductions(t: Type): Seq[Prod] = t match { - case BooleanType => - List( - terminal(BooleanLiteral(false), BooleanC), - terminal(BooleanLiteral(true), BooleanC), - nonTerminal(List(BooleanType), { case Seq(a) => not(a) }, Not), - nonTerminal(List(BooleanType, BooleanType), { case Seq(a, b) => and(a, b) }, And), - nonTerminal(List(BooleanType, BooleanType), { case Seq(a, b) => or(a, b) }, Or ), - nonTerminal(List(Int32Type, Int32Type), { case Seq(a, b) => LessThan(a, b) }), - nonTerminal(List(Int32Type, Int32Type), { case Seq(a, b) => LessEquals(a, b) }), - nonTerminal(List(IntegerType, IntegerType), { case Seq(a, b) => LessThan(a, b) }), - nonTerminal(List(IntegerType, IntegerType), { case Seq(a, b) => LessEquals(a, b) }) - ) - case Int32Type => - List( - terminal(IntLiteral(0), Zero), - terminal(IntLiteral(1), One ), - nonTerminal(List(Int32Type, Int32Type), { case Seq(a,b) => plus(a, b) }, Plus ), - nonTerminal(List(Int32Type, Int32Type), { case Seq(a,b) => minus(a, b) }, Minus), - nonTerminal(List(Int32Type, Int32Type), { case Seq(a,b) => times(a, b) }, Times) - ) - - case IntegerType => - List( - terminal(IntegerLiteral(0), Zero), - terminal(IntegerLiteral(1), One ), - nonTerminal(List(IntegerType, IntegerType), { case Seq(a,b) => plus(a, b) }, Plus ), - nonTerminal(List(IntegerType, IntegerType), { case Seq(a,b) => minus(a, b) }, Minus), - nonTerminal(List(IntegerType, IntegerType), { case Seq(a,b) => times(a, b) }, Times)//, - //nonTerminal(List(IntegerType, IntegerType), { case Seq(a,b) => Modulo(a, b) }, Mod), - //nonTerminal(List(IntegerType, IntegerType), { case Seq(a,b) => Division(a, b) }, Div) - ) - - case TupleType(stps) => - List( - nonTerminal(stps, Tuple, Constructor(isTerminal = false)) - ) - - case adt: ADTType => - adt.getADT match { - case tcons: TypedADTConstructor => - List( - nonTerminal(tcons.fields.map(_.getType), ADT(adt, _), tagOf(tcons.definition) ) - ) - - case tsort: TypedADTSort => - tsort.constructors.map { tcons => - nonTerminal(tcons.fields.map(_.getType), ADT(tcons.toType, _), tagOf(tcons.definition) ) - } - } - - case st @ SetType(base) => - List( - terminal(FiniteSet(Seq(), base), Constant), - nonTerminal(List(base), { case elems => FiniteSet(elems, base) }, Constructor(isTerminal = false)), - nonTerminal(List(st, st), { case Seq(a, b) => SetUnion(a, b) }), - nonTerminal(List(st, st), { case Seq(a, b) => SetIntersection(a, b) }), - nonTerminal(List(st, st), { case Seq(a, b) => SetDifference(a, b) }) - ) - - case UnitType => - List( - terminal(UnitLiteral(), Constant) - ) - - case _ => - Nil - } - } -} diff --git a/src/main/scala/inox/grammars/ClosureGrammars.scala b/src/main/scala/inox/grammars/ClosureGrammars.scala deleted file mode 100644 index cbf304ab5b434009b58c0c8f28cde92229b914c4..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/ClosureGrammars.scala +++ /dev/null @@ -1,28 +0,0 @@ -/* Copyright 2009-2015 EPFL, Lausanne */ - -package inox -package grammars - -trait ClosureGrammars { self: GrammarsUniverse => - import program._ - import trees._ - import symbols._ - - case object Closures extends ExpressionGrammar { - def computeProductions(lab: Label): Seq[ProductionRule[Label, Expr]] = lab.getType match { - case FunctionType(argsTpes, ret) => - val args = argsTpes.zipWithIndex.map { case (tpe, i) => - ValDef(FreshIdentifier("a"+i), tpe) - } - - val rlab = Label(ret).withAspect(ExtraTerminals(args.map(_.toVariable).toSet)) - - applyAspects(lab, List( - ProductionRule(List(rlab), { case List(body) => Lambda(args, body) }, Top, 1) - )) - - case _ => - Nil - } - } -} diff --git a/src/main/scala/inox/grammars/ConstantGrammars.scala b/src/main/scala/inox/grammars/ConstantGrammars.scala deleted file mode 100644 index f4b33ade7c89750fa47aec0d77cb5c7aaaabd983..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/ConstantGrammars.scala +++ /dev/null @@ -1,34 +0,0 @@ -/* Copyright 2009-2016 EPFL, Lausanne */ - -package inox -package grammars - -trait ConstantGrammars { self: GrammarsUniverse => - import program._ - import trees._ - import exprOps.collect - - /** Generates constants found in an [[inox.ast.Expressions.Expr]]. - * Some constants that are generated by other grammars (like 0, 1) will be excluded - */ - case class Constants(e: Expr) extends SimpleExpressionGrammar { - - private val excluded: Set[Expr] = Set( - IntegerLiteral(1), - IntegerLiteral(0), - IntLiteral(1), - IntLiteral(0), - BooleanLiteral(true), - BooleanLiteral(false) - ) - - def computeProductions(t: Type): Seq[Prod] = { - val literals = collect[Expr]{ - case IsTyped(l:Literal[_], `t`) => Set(l) - case _ => Set() - }(e) - - (literals -- excluded map (terminal(_, Constant))).toSeq - } - } -} diff --git a/src/main/scala/inox/grammars/ElementaryGrammars.scala b/src/main/scala/inox/grammars/ElementaryGrammars.scala deleted file mode 100644 index 520d29c6e4e1e45c66b7b175c4a62a725a7a74f8..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/ElementaryGrammars.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* Copyright 2009-2016 EPFL, Lausanne */ - -package inox -package grammars - -trait ElementaryGrammars { self: GrammarsUniverse => - import program.trees._ - import program.symbols._ - - /** Generates one production rule for each expression in a sequence that has compatible type */ - case class OneOf(inputs: Seq[Expr]) extends SimpleExpressionGrammar { - def computeProductions(lab: Type): Seq[Prod] = { - inputs.collect { - case i if isSubtypeOf(i.getType, lab.getType) => - terminal(i) - } - } - } - - case class Union(gs: Seq[ExpressionGrammar]) extends ExpressionGrammar { - val subGrammars: Seq[ExpressionGrammar] = gs.flatMap { - case u: Union => u.subGrammars - case g => Seq(g) - } - - def computeProductions(label: Label): Seq[ProductionRule[Label, Expr]] = - subGrammars.flatMap(_.computeProductions(label)) - } - - /** The empty expression grammar */ - case class Empty() extends ExpressionGrammar { - def computeProductions(l: Label) = Nil - } - -} - diff --git a/src/main/scala/inox/grammars/EqualityGrammars.scala b/src/main/scala/inox/grammars/EqualityGrammars.scala deleted file mode 100644 index e5a7bc7297ac558d463d3192152643d94b8475ba..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/EqualityGrammars.scala +++ /dev/null @@ -1,25 +0,0 @@ -/* Copyright 2009-2016 EPFL, Lausanne */ - -package inox -package grammars - -trait EqualityGrammars { self: GrammarsUniverse => - import program._ - import trees._ - import symbols._ - - /** A grammar of equalities - * - * @param types The set of types for which equalities will be generated - */ - case class EqualityGrammar(types: Set[Type]) extends SimpleExpressionGrammar { - def computeProductions(t: Type): Seq[Prod] = t match { - case BooleanType => - types.toList map { tp => - nonTerminal(List(tp, tp), { case Seq(a, b) => equality(a, b) }, Equals) - } - - case _ => Nil - } - } -} diff --git a/src/main/scala/inox/grammars/ExpressionGrammars.scala b/src/main/scala/inox/grammars/ExpressionGrammars.scala deleted file mode 100644 index 0bc1bf8754e24ff21251876cd9f87c98d50a7094..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/ExpressionGrammars.scala +++ /dev/null @@ -1,89 +0,0 @@ -/* Copyright 2009-2016 EPFL, Lausanne */ - -package inox -package grammars - -import scala.collection.mutable.{HashMap => MutableMap} - -/** Represents a context-free grammar of expressions */ -trait ExpressionGrammars { self: GrammarsUniverse => - import program._ - import trees._ - - trait ExpressionGrammar { - private[this] val cache = new MutableMap[Label, Seq[ProductionRule[Label, Expr]]]() - - /** The list of production rules for this grammar for a given nonterminal. - * - * @param lab The nonterminal for which production rules will be generated - * @note This is the cached version of [[computeProductions]]. Clients should use this method. - */ - final def getProductions(lab: Label) = { - cache.getOrElse(lab, { - val res = applyAspects(lab, computeProductions(lab)) - cache += lab -> res - res - }) - } - - /** The list of production rules for this grammar for a given nonterminal. - * - * @param lab The nonterminal for which production rules will be generated - * @note Clients should use the cached version, [[getProductions]] instead - */ - def computeProductions(lab: Label): Seq[ProductionRule[Label, Expr]] - - protected def applyAspects(lab: Label, ps: Seq[ProductionRule[Label, Expr]]) = { - lab.aspects.foldLeft(ps) { - case (ps, a) => a.applyTo(lab, ps) - } - } - - /** Returns the union of two generators. */ - //final def ||(that: ExpressionGrammar): ExpressionGrammar = { - // Union(Seq(this, that)) - //} - - final def printProductions(printer: String => Unit) { - def sorter(lp1: (Label, Seq[ProductionRule[Label, Expr]]), lp2: (Label, Seq[ProductionRule[Label, Expr]])): Boolean = { - val l1 = lp1._1 - val l2 = lp2._1 - - val os1 = l1.aspects.collectFirst { case Sized(size) => size } - val os2 = l2.aspects.collectFirst { case Sized(size) => size } - - (os1, os2) match { - case (Some(s1), Some(s2)) => - if (s1 > s2) { - true - } else if (s1 == s2) { - l1.asString < l2.asString - } else { - false - } - case _ => l1.asString < l2.asString - } - } - - for ((lab, gs) <- cache.toSeq.sortWith(sorter)) { - val lhs = f"${Console.BOLD}${lab.asString}%50s${Console.RESET} ::= " - - if (gs.isEmpty) { - printer(s"${lhs}ε") - } else { - val rhs = for (g <- gs) yield { - val subs = g.subTrees.map { t => - new Variable( - FreshIdentifier(Console.BOLD + t.asString + Console.RESET), - t.getType - ) - } - - g.builder(subs).asString - } - printer(lhs + rhs.mkString("\n" + " " * 55)) - } - } - } - } -} diff --git a/src/main/scala/inox/grammars/FunctionCallsGrammars.scala b/src/main/scala/inox/grammars/FunctionCallsGrammars.scala deleted file mode 100644 index c82da4bd2fae57568b5754b8b3dc2a5eab07c14b..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/FunctionCallsGrammars.scala +++ /dev/null @@ -1,87 +0,0 @@ -/* Copyright 2009-2016 EPFL, Lausanne */ - -package inox -package grammars - -trait FunctionCallsGrammars extends utils.Helpers { self: GrammarsUniverse => - import program._ - import trees._ - import exprOps._ - import symbols._ - - /** Generates non-recursive function calls - * - * @param currentFunction The currend function for which no calls will be generated - * @param types The candidate real type parameters for [[currentFunction]] - * @param exclude An additional set of functions for which no calls will be generated - */ - case class FunctionCalls(currentFunction: FunDef, types: Seq[Type], exclude: Set[FunDef]) extends SimpleExpressionGrammar { - def computeProductions(t: Type): Seq[Prod] = { - - def getCandidates(fd: FunDef): Seq[TypedFunDef] = { - // Prevents recursive calls - val cfd = currentFunction - - val isRecursiveCall = (transitiveCallers(cfd) + cfd) contains fd - - val isDet = true // TODO FIXME fd.body.exists(isDeterministic) - - if (!isRecursiveCall && isDet) { - canBeSubtypeOf(fd.returnType, t) match { - case Some(tpsMap) => - val free = fd.tparams.map(_.tp) - val tfd = fd.typed(free.map(tp => tpsMap.getOrElse(tp, tp))) - - if (tpsMap.size < free.size) { - /* Some type params remain free, we want to assign them: - * - * List[T] => Int, for instance, will be found when - * requesting Int, but we need to assign T to viable - * types. For that we use list of input types as heuristic, - * and look for instantiations of T such that input <?: - * List[T]. - */ - types.distinct.flatMap { (atpe: Type) => - var finalFree = free.toSet -- tpsMap.keySet - var finalMap = tpsMap - - for (ptpe <- tfd.params.map(_.getType).distinct) { - unify(atpe, ptpe, finalFree.toSeq) match { // FIXME!!!! this may allow weird things if lub!=ptpe - case Some(ntpsMap) => - finalFree --= ntpsMap.keySet - finalMap ++= ntpsMap - case _ => - } - } - - if (finalFree.isEmpty) { - List(fd.typed(free.map(tp => finalMap.getOrElse(tp, tp)))) - } else { - Nil - } - } - } else { - // All type parameters that used to be free are assigned - List(tfd) - } - case None => - Nil - } - } else { - Nil - } - } - - val filter = (tfd:TypedFunDef) => /* TODO: Reimplement this somehow tfd.fd.isSynthetic || tfd.fd.isInner || */ (exclude contains tfd.fd) - - val funcs = functionsAvailable.toSeq.sortBy(_.id).flatMap(getCandidates).filterNot(filter) - - funcs.map{ tfd => - nonTerminal(tfd.params.map(_.getType), FunctionInvocation(tfd.id, tfd.tps, _))//, tagOf(tfd.fd, isSafe = false)) - } - - } - } - -} - diff --git a/src/main/scala/inox/grammars/GrammarsUniverse.scala b/src/main/scala/inox/grammars/GrammarsUniverse.scala deleted file mode 100644 index 96145201005badc44d779c5068b8522a436c57d2..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/GrammarsUniverse.scala +++ /dev/null @@ -1,15 +0,0 @@ -package inox -package grammars - -trait GrammarsUniverse - extends Tags - with Productions - with Aspects - with aspects.PersistentAspects - with aspects.AllAspects - with Labels - with ExpressionGrammars - with SimpleExpressionGrammars - with AllGrammars { - val program: Program -} diff --git a/src/main/scala/inox/grammars/Labels.scala b/src/main/scala/inox/grammars/Labels.scala deleted file mode 100644 index 65628b63c730f2000afb08a49d8532d4ae313b90..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/Labels.scala +++ /dev/null @@ -1,21 +0,0 @@ -/* Copyright 2009-2015 EPFL, Lausanne */ - -package inox -package grammars - -trait Labels { self: GrammarsUniverse => - import program.{printerOpts => _, _} - import trees._ - - case class Label(tpe: Type, aspects: List[Aspect] = Nil) extends Typed { - def getType(implicit s: Symbols) = tpe - - def asString(implicit opts: PrinterOptions): String = { - val ts = tpe.asString - - ts + aspects.map(_.asString).mkString - } - - def withAspect(a: Aspect) = Label(tpe, aspects :+ a) - } -} diff --git a/src/main/scala/inox/grammars/Productions.scala b/src/main/scala/inox/grammars/Productions.scala deleted file mode 100644 index 00a5344f564049d3a4b1c10512fb8494a6047ef1..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/Productions.scala +++ /dev/null @@ -1,21 +0,0 @@ -/* Copyright 2009-2016 EPFL, Lausanne */ - -package inox -package grammars - -import bonsai.Generator - -/** Represents a production rule of a non-terminal symbol of an [[ExpressionGrammar]]. - * - * @param subTrees The nonterminals that are used in the right-hand side of this [[ProductionRule]] - * (and will generate deeper syntax trees). - * @param builder A function that builds the syntax tree that this [[ProductionRule]] represents from nested trees. - * @param tag Gives information about the nature of this production rule. - * @tparam T The type of nonterminal symbols of the grammar - * @tparam R The type of syntax trees of the grammar - */ -trait Productions { self: GrammarsUniverse => - - case class ProductionRule[T, R](override val subTrees: Seq[T], override val builder: Seq[R] => R, tag: Tag, cost: Int) - extends Generator[T,R](subTrees, builder) -} diff --git a/src/main/scala/inox/grammars/SimpleExpressionGrammars.scala b/src/main/scala/inox/grammars/SimpleExpressionGrammars.scala deleted file mode 100644 index 6282c52542ca3cadd9c0a941c2a6fbe54d8dc9b2..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/SimpleExpressionGrammars.scala +++ /dev/null @@ -1,44 +0,0 @@ -/* Copyright 2009-2015 EPFL, Lausanne */ - -package inox -package grammars - -/** An [[ExpressionGrammar]] whose productions for a given [[Label]] - * depend only on the underlying [[Type]] of the label - */ -trait SimpleExpressionGrammars { self: GrammarsUniverse => - import program.trees._ - import program.symbols - - trait SimpleExpressionGrammar extends ExpressionGrammar { - type Prod = ProductionRule[Type, Expr] - - /** Generates a [[ProductionRule]] without nonterminal symbols */ - def terminal(builder: => Expr, tag: Tag = Top, cost: Int = 1) = { - ProductionRule[Type, Expr](Nil, { (subs: Seq[Expr]) => builder }, tag, cost) - } - - /** Generates a [[ProductionRule]] with nonterminal symbols */ - def nonTerminal(subs: Seq[Type], builder: (Seq[Expr] => Expr), tag: Tag = Top, cost: Int = 1) = { - ProductionRule[Type, Expr](subs, builder, tag, cost) - } - - def filter(f: Prod => Boolean) = { - new SimpleExpressionGrammar { - def computeProductions(lab: Type) = { - SimpleExpressionGrammar.this.computeProductions(lab).filter(f) - } - } - } - - // Finalize this to depend only on the type of the label - final def computeProductions(lab: Label): Seq[ProductionRule[Label, Expr]] = { - computeProductions(lab.getType).map { p => - ProductionRule(p.subTrees.map(Label(_)), p.builder, p.tag, p.cost) - } - } - - /** Version of [[ExpressionGrammar.computeProductions]] which depends only a [[Type]] */ - def computeProductions(tpe: Type): Seq[Prod] - } -} diff --git a/src/main/scala/inox/grammars/Tags.scala b/src/main/scala/inox/grammars/Tags.scala deleted file mode 100644 index 919b40d58bb62d1d83dd8cec48a3f6dfb1d9aff4..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/Tags.scala +++ /dev/null @@ -1,66 +0,0 @@ -/* Copyright 2009-2016 EPFL, Lausanne */ - -package inox -package grammars - -trait Tags { self: GrammarsUniverse => - import program._ - import trees.ADTConstructor - import trees.FunDef - - /** A class for tags that tag a [[ProductionRule]] with the kind of expression in generates. */ - abstract class Tag - case object Top extends Tag // Tag for the top-level of the grammar (default) - case object Zero extends Tag // Tag for 0 - case object One extends Tag // Tag for 1 - case object BooleanC extends Tag // Tag for boolean constants - case object Constant extends Tag // Tag for other constants - case object And extends Tag // Tags for boolean operations - case object Or extends Tag - case object Not extends Tag - case object Plus extends Tag // Tags for arithmetic operations - case object Minus extends Tag - case object Times extends Tag - case object Mod extends Tag - case object Div extends Tag - case object Variable extends Tag // Tag for variables - case object Equals extends Tag // Tag for equality - /** Constructors like Tuple, CaseClass... - * - * @param isTerminal If true, this constructor represents a terminal symbol - * (in practice, case class with 0 fields) - */ - case class Constructor(isTerminal: Boolean) extends Tag - /** Tag for function calls - * - * @param isMethod Whether the function called is a method - * @param isSafe Whether this constructor represents a safe function call. - * We need this because this call implicitly contains a variable, - * so we want to allow constants in all arguments. - */ - //case class FunCall(isMethod: Boolean, isSafe: Boolean) extends Tag - - /** The set of tags that represent constants */ - val isConst: Set[Tag] = Set(Zero, One, Constant, BooleanC, Constructor(true)) - - /** The set of tags that represent commutative operations */ - val isCommut: Set[Tag] = Set(Plus, Times, Equals) - - /** The set of tags which have trivial results for equal arguments */ - val symmetricTrivial = Set(Minus, And, Or, Equals, Div, Mod) - - /** Tags which allow constants in all their operands - * - * In reality, the current version never allows that: it is only allowed in safe function calls - * which by construction contain a hidden reference to a variable. - * TODO: Experiment with different conditions, e.g. are constants allowed in - * top-level/ general function calls/ constructors/...? - */ - def allConstArgsAllowed(t: Tag) = t match { - //case FunCall(_, true) => true - case _ => false - } - - def tagOf(cons: ADTConstructor) = Constructor(cons.fields.isEmpty) - //def tagOf(fd: FunDef, isSafe: Boolean) = FunCall(fd.methodOwner.isDefined, isSafe) -} diff --git a/src/main/scala/inox/grammars/ValueGrammars.scala b/src/main/scala/inox/grammars/ValueGrammars.scala deleted file mode 100644 index cf3f60cc8aceaf66b607308cfe4f61d3ca3fca1c..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/ValueGrammars.scala +++ /dev/null @@ -1,98 +0,0 @@ -/* Copyright 2009-2016 EPFL, Lausanne */ - -package inox -package grammars - -trait ValueGrammars { self: GrammarsUniverse => - import program._ - import trees._ - import symbols._ - - /** A grammar of values (ground terms) */ - case object ValueGrammar extends SimpleExpressionGrammar { - def computeProductions(t: Type): Seq[Prod] = t match { - case BooleanType => - List( - terminal(BooleanLiteral(true), One), - terminal(BooleanLiteral(false), Zero) - ) - case Int32Type => - List( - terminal(IntLiteral(0), Zero), - terminal(IntLiteral(1), One), - terminal(IntLiteral(5), Constant) - ) - case IntegerType => - List( - terminal(IntegerLiteral(0), Zero), - terminal(IntegerLiteral(1), One), - terminal(IntegerLiteral(5), Constant) - ) - case CharType => - List( - terminal(CharLiteral('a'), Constant), - terminal(CharLiteral('b'), Constant), - terminal(CharLiteral('0'), Constant) - ) - case RealType => - List( - terminal(FractionLiteral(0, 1), Zero), - terminal(FractionLiteral(1, 1), One), - terminal(FractionLiteral(-1, 2), Constant), - terminal(FractionLiteral(555, 42), Constant) - ) - case StringType => - List( - terminal(StringLiteral(""), Constant), - terminal(StringLiteral("a"), Constant), - terminal(StringLiteral("foo"), Constant), - terminal(StringLiteral("bar"), Constant) - ) - - case tp: TypeParameter => - List( - terminal(GenericValue(tp, 0)) - ) - - case TupleType(stps) => - List( - nonTerminal(stps, Tuple, Constructor(stps.isEmpty)) - ) - - case adt: ADTType => - adt.getADT match { - case tcons: TypedADTConstructor => - List( - nonTerminal(tcons.fields.map(_.getType), ADT(adt, _), tagOf(tcons.definition)) - ) - - case tsort: TypedADTSort => - tsort.constructors.map { tcons => - nonTerminal(tcons.fields.map(_.getType), ADT(tcons.toType, _), tagOf(tcons.definition)) - } - } - - case st @ SetType(base) => - List( - terminal(FiniteSet(Seq(), base), Constant), - nonTerminal(List(base), { elems => FiniteSet(elems, base) }, Constructor(isTerminal = false)), - nonTerminal(List(base, base), { elems => FiniteSet(elems, base) }, Constructor(isTerminal = false)) - ) - - case UnitType => - List( - terminal(UnitLiteral(), Constant) - ) - - case FunctionType(from, to) => - val args = from map (tp => ValDef(FreshIdentifier("x", true), tp)) - List( - nonTerminal(Seq(to), { case Seq(e) => Lambda(args, e) }) - ) - - case _ => - Nil - } - } -} - diff --git a/src/main/scala/inox/grammars/aspects/AllAspects.scala b/src/main/scala/inox/grammars/aspects/AllAspects.scala deleted file mode 100644 index 57e7cda88599ad37cacaac25b5cf09d7c35e4948..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/aspects/AllAspects.scala +++ /dev/null @@ -1,12 +0,0 @@ -package inox.grammars -package aspects - -trait AllAspects extends SizeAspects - with ExtraTerminalsAspects - with SimilarToAspects - with DepthBoundAspects - with TypeDepthBoundAspects - with TaggedAspects -{ - self: GrammarsUniverse => -} diff --git a/src/main/scala/inox/grammars/aspects/DepthBoundAspects.scala b/src/main/scala/inox/grammars/aspects/DepthBoundAspects.scala deleted file mode 100644 index 3f617782af59f8cd8a339303f0120716b6c914a9..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/aspects/DepthBoundAspects.scala +++ /dev/null @@ -1,27 +0,0 @@ -/* Copyright 2009-2016 EPFL, Lausanne */ - -package inox -package grammars -package aspects - -trait DepthBoundAspects { self: GrammarsUniverse => - import program.trees.PrinterOptions - - /** Limits a grammar by depth */ - case class DepthBound(depth: Int) extends Aspect { - require(depth >= 0) - - def asString(implicit opts: PrinterOptions): String = s"D$depth" - - def applyTo(l: Label, ps: Seq[Production]) = { - if (depth == 0) Nil - else if (depth == 1) ps.filter(_.isTerminal) - else { - ps map { prod => - prod.copy(subTrees = prod.subTrees.map(_.withAspect(DepthBound(depth - 1)))) - } - } - } - } -} - diff --git a/src/main/scala/inox/grammars/aspects/ExtraTerminalsAspects.scala b/src/main/scala/inox/grammars/aspects/ExtraTerminalsAspects.scala deleted file mode 100644 index 2090376987a0f593e0a3258eefcf83cbdc2ee489..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/aspects/ExtraTerminalsAspects.scala +++ /dev/null @@ -1,31 +0,0 @@ -/* Copyright 2009-2015 EPFL, Lausanne */ - -package inox -package grammars -package aspects - -trait ExtraTerminalsAspects { self: GrammarsUniverse => - import program._ - import trees._ - import exprOps.formulaSize - import symbols._ - - /** - * Informs sub-productions that there are extra terminals available (used by - * grammars.ExtraTerminals). - */ - case class ExtraTerminals(s: Set[Expr]) extends PersistentAspect { - def asString(implicit opts: PrinterOptions) = { - s.toList.map(_.asString(opts)).mkString("{", ",", "}") - } - - override def applyTo(lab: Label, ps: Seq[Production]) = { - super.applyTo(lab, ps) ++ { - s.filter(e => isSubtypeOf(e.getType, lab.getType)).map { e => - ProductionRule[Label, Expr](Nil, { (es: Seq[Expr]) => e }, Top, formulaSize(e)) - } - } - } - } -} - diff --git a/src/main/scala/inox/grammars/aspects/PersistentAspects.scala b/src/main/scala/inox/grammars/aspects/PersistentAspects.scala deleted file mode 100644 index ff54abec841b4a995fd0e758702585456a2fba6c..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/aspects/PersistentAspects.scala +++ /dev/null @@ -1,29 +0,0 @@ -/* Copyright 2009-2015 EPFL, Lausanne */ - -package inox -package grammars -package aspects - -trait PersistentAspects { self: GrammarsUniverse => - - /** - * Persistent aspects allow label information to be propagated down: - * Int{e} means (Int with a terminal 'e'). And thus, the Closure grammar - * is able to have, as production: - * Int=>Int := (e: Int) => Int{e} - * In turn, all Int productions, e.g. Int := Int + Int, gets transformed by the - * aspect and generate: - * Int{e} := Int{e} + Int{e} - * - * This with the ExtraTerminals grammar, enables the generation of expressions - * like: - * e + 1 - */ - abstract class PersistentAspect extends Aspect { - def applyTo(lab: Label, ps: Seq[Production]) = { - ps.map { p => - p.copy(subTrees = p.subTrees.map(lab => lab.withAspect(this))) - } - } - } -} diff --git a/src/main/scala/inox/grammars/aspects/SimilarToAspects.scala b/src/main/scala/inox/grammars/aspects/SimilarToAspects.scala deleted file mode 100644 index 9c226caf5c886eb041d1fac0104c44de5aae7dd5..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/aspects/SimilarToAspects.scala +++ /dev/null @@ -1,144 +0,0 @@ -/* Copyright 2009-2015 EPFL, Lausanne */ - -package inox -package grammars -package aspects - -import inox.utils.SeqUtils._ - -trait SimilarToAspects { self: GrammarsUniverse => - import program._ - import trees.{Minus => EMinus, Plus => EPlus, Times => ETimes, _} - import symbols._ - import exprOps._ - - /** Generates expressions similar to a [[Seq]] of given expressions - * - * @param es The expressions for which similar ones will be generated - */ - case class SimilarTo(es: Seq[Expr]) extends Aspect { - type Prods = Seq[ProductionRule[Label, Expr]] - - def asString(implicit opts: PrinterOptions) = es.map(_.asString(opts)).mkString("~", "~", "~") - - def term(e: Expr, tag: Tag = Top, cost: Int = 1): ProductionRule[Label, Expr] = { - ProductionRule(Nil, { case Seq() => e }, tag, cost) - } - - /** - * ~f(a,b)~ ::= f(~a~, b) - * f(a, ~b~) - * f(b, a) // if non-commut - */ - def applyTo(lab: Label, ps: Seq[Production]) = { - def isCommutative(e: Expr) = e match { - case _: EPlus | _: ETimes => true - case _ => false - } - - val similarProds: Prods = es.filter(e => isSubtypeOf(e.getType, lab.getType)).flatMap { e => - val swaps: Prods = e match { - case Deconstructor(as, b) if as.nonEmpty && !isCommutative(e) => - val ast = as.zipWithIndex.groupBy(_._1.getType).mapValues(_.map(_._2).toList) - - val perms = ast.values.map { is => - is.permutations.toList.filter(p => p != is).map(ps => (is zip ps).toMap) - }.filter(_.nonEmpty).toList - - //println("Perms:") - //for (ps <- perms) { - // println(" - "+ps.mkString(", ")) - //} - - for (ps <- cartesianProduct(perms)) yield { - val perm = ps.foldLeft(Map[Int, Int]())( _ ++ _ ) - - //println("Trying permutation "+perm+": "+ - // b(as.indices.map { i => - // as(perm.getOrElse(i, i)) - // })) - - term(b(as.indices.map { i => as(perm.getOrElse(i, i)) })) - } - case _ => - Nil - } - - val subs: Prods = e match { - case Operator(as, b) if as.nonEmpty => - for ((a, i) <- as.zipWithIndex) yield { - ProductionRule[Label, Expr]( - List(Label(a.getType).withAspect(SimilarTo(Seq(a)))), - { case Seq(e) => - b(as.updated(i, e)) - }, - Top, - 1 - ) - } - case _ => - Nil - } - - val typeVariations: Prods = e match { - case IntegerLiteral(v) => - List( - term(IntegerLiteral(v + 1)), - term(IntegerLiteral(v - 1)) - ) - - case IntLiteral(v) => - List( - term(IntLiteral(v + 1)), - term(IntLiteral(v - 1)) - ) - - case BooleanLiteral(v) => - List( - term(BooleanLiteral(!v)) - ) - - case IsTyped(e, IntegerType) => - List( - term(EPlus(e, IntegerLiteral(1))), - term(EMinus(e, IntegerLiteral(1))) - ) - case IsTyped(e, Int32Type) => - List( - term(EPlus(e, IntLiteral(1))), - term(EMinus(e, IntLiteral(1))) - ) - case IsTyped(e, BooleanType) => - List( - term(not(e)) - ) - - case _ => - Nil - } - - val ccVariations: Prods = e match { - case ADT(adt, args) => - val resType = adt.getADT.toConstructor - val neighbors = resType.root match { - case tsort: TypedADTSort => - tsort.constructors diff Seq(resType) - case tcons: TypedADTConstructor => - Nil - } - - for (scct <- neighbors if scct.fieldsTypes == resType.fieldsTypes) yield { - term(ADT(scct.toType, args)) - } - case _ => - Nil - } - - swaps ++ subs ++ typeVariations ++ ccVariations - } - - ps ++ similarProds - } - } -} - diff --git a/src/main/scala/inox/grammars/aspects/SizeAspects.scala b/src/main/scala/inox/grammars/aspects/SizeAspects.scala deleted file mode 100644 index 911b79798739d6a5faa06883a72eb2d6bc0b84f3..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/aspects/SizeAspects.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* Copyright 2009-2015 EPFL, Lausanne */ - -package inox -package grammars -package aspects - -import inox.utils.SeqUtils._ - -/** - * Attach sizes to labels and transmit them down accordingly - */ -trait SizeAspects { self: GrammarsUniverse => - import program.trees.PrinterOptions - - case class Sized(size: Int) extends Aspect { - def asString(implicit opts: PrinterOptions) = "|"+size+"|" - - def applyTo(lab: Label, ps: Seq[Production]) = { - val optimizeCommut = true - - ps.flatMap { p => - if (size <= 0) { - Nil - } else if (p.arity == 0) { - if (size == p.cost) { - List(p) - } else { - Nil - } - } else { - val sizes = if(optimizeCommut && isCommut(p.tag) && p.subTrees.toSet.size == 1) { - sumToOrdered(size - p.cost, p.arity) - } else { - sumTo(size - p.cost, p.arity) - } - - for (ss <- sizes) yield { - val newSubTrees = (p.subTrees zip ss).map { - case (lab, s) => lab.withAspect(Sized(s)) - } - - ProductionRule(newSubTrees, p.builder, p.tag, p.cost) - } - } - } - } - - } - -} - diff --git a/src/main/scala/inox/grammars/aspects/TaggedAspects.scala b/src/main/scala/inox/grammars/aspects/TaggedAspects.scala deleted file mode 100644 index 34126a2c5555a03af7fc5b6a61d3f626294febbf..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/aspects/TaggedAspects.scala +++ /dev/null @@ -1,92 +0,0 @@ -/* Copyright 2009-2015 EPFL, Lausanne */ - -package inox -package grammars -package aspects - -trait TaggedAspects { self: GrammarsUniverse => - import program.trees.PrinterOptions - - case class Tagged(tag: Tag, pos: Int, isConst: Option[Boolean]) extends Aspect { - private val cString = isConst match { - case Some(true) => "↓" - case Some(false) => "↑" - case None => "○" - } - - /** [[isConst]] is printed as follows: ↓ for constants only, ↑ for nonconstants only, - * ○ for anything allowed. - */ - def asString(implicit opts: PrinterOptions): String = s"#$tag$cString@$pos" - - def applyTo(lab: Label, ps: Seq[Production]) = { - - // Tags to avoid depending on parent aspect - val excludedTags: Set[Tag] = (tag, pos) match { - case (Top, _) => Set() - case (And, 0) => Set(And, BooleanC) - case (And, 1) => Set(BooleanC) - case (Or, 0) => Set(Or, BooleanC) - case (Or, 1) => Set(BooleanC) - case (Plus, 0) => Set(Plus, Zero, One) - case (Plus, 1) => Set(Zero) - case (Minus, 1) => Set(Zero) - case (Not, _) => Set(Not, BooleanC) - case (Times, 0) => Set(Times, Zero, One) - case (Times, 1) => Set(Zero, One) - case (Equals,_) => Set(Not, BooleanC) - case (Div | Mod, 0 | 1) => Set(Zero, One) - //case (FunCall(true, _), 0) => Set(Constructor(true)) // Don't allow Nil().size etc. - case _ => Set() - } - - def powerSet[A](t: Set[A]): Set[Set[A]] = { - @scala.annotation.tailrec - def pwr(t: Set[A], ps: Set[Set[A]]): Set[Set[A]] = - if (t.isEmpty) ps - else pwr(t.tail, ps ++ (ps map (_ + t.head))) - - pwr(t, Set(Set.empty[A])) - } - - - ps.flatMap { p => - val tagsValid = !(excludedTags contains p.tag) - - // If const-ness is explicit, make sure the production has similar const-ness - val constValid = isConst match { - case Some(b) => TaggedAspects.this.isConst(p.tag) == b - case None => true - } - - if (constValid && tagsValid) { - val subAspects = if (p.isTerminal || allConstArgsAllowed(p.tag)) { - Seq(p.subTrees.indices.map { i => - Tagged(p.tag, i, None) - }) - } else { - // All positions are either forced to be const or forced to be - // non-const. We don't want all consts though. - val indices = p.subTrees.indices.toSet - - (powerSet(indices) - indices) map { isConst => - p.subTrees.indices.map { i => - Tagged(p.tag, i, Some(isConst(i))) - } - } - } - - for (as <- subAspects) yield { - val newSubTrees = (p.subTrees zip as).map { case (lab, a) => - lab.withAspect(a) - } - - ProductionRule(newSubTrees, p.builder, p.tag, p.cost) - } - } else { - Nil - } - } - } - } -} diff --git a/src/main/scala/inox/grammars/aspects/TypeDepthBoundAspects.scala b/src/main/scala/inox/grammars/aspects/TypeDepthBoundAspects.scala deleted file mode 100644 index c1fc410a4c2efd8a64e4a702e55dd33787d47e80..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/aspects/TypeDepthBoundAspects.scala +++ /dev/null @@ -1,21 +0,0 @@ -/* Copyright 2009-2016 EPFL, Lausanne */ - -package inox -package grammars -package aspects - -trait TypeDepthBoundAspects { self: GrammarsUniverse => - import program._ - import trees._ - import symbols.typeOps.depth - - case class TypeDepthBound(bound: Int) extends PersistentAspect { - override def asString(implicit opts: PrinterOptions): String = "" // This is just debug pollution to print - - override def applyTo(lab: Label, ps: Seq[Production]) = { - if (depth(lab.getType) > bound) Nil - else super.applyTo(lab, ps) - } - - } -} diff --git a/src/main/scala/inox/grammars/utils/Helpers.scala b/src/main/scala/inox/grammars/utils/Helpers.scala deleted file mode 100644 index 08e91da1a71434423bfc1fe53c3d9529b79e2ec7..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/grammars/utils/Helpers.scala +++ /dev/null @@ -1,107 +0,0 @@ -/* Copyright 2009-2016 EPFL, Lausanne */ - -package inox -package grammars -package utils - -trait Helpers { self: GrammarsUniverse => - import program._ - import trees.{ Minus => EMinus, Plus => EPlus, Variable => EVariable, _ } - import exprOps._ - import symbols._ - - /** - * Filter functions potentially returning target type - * - * If the function takes type parameters, it will try to find an assignment - * such that the function returns the target type. - * - * The return is thus a set of typed functions. - */ - def functionsReturning(fds: Set[FunDef], tpe: Type): Set[TypedFunDef] = { - fds.flatMap { fd => - canBeSubtypeOf(fd.returnType, tpe) match { - case Some(tpsMap) => - Some(fd.typed(fd.typeArgs.map(tp => tpsMap.getOrElse(tp, tp)))) - case None => - None - } - } - } - - /** Given an initial set of function calls provided by a list of [[Terminating]], - * returns function calls that will hopefully be safe to call recursively from within this initial function calls. - * - * For each returned call, one argument is substituted by a "smaller" one, while the rest are left as holes. - * - * @param prog The current program - * @param ws Helper predicates that contain [[Terminating]]s with the initial calls - * @param pc The path condition - * @param tpe The expected type for the returned function calls. If absent, all types are permitted. - * @return A list of pairs (safe function call, holes), - * where holes stand for the rest of the arguments of the function. - */ - def terminatingCalls(prog: Program, wss: Seq[FunctionInvocation], pc: Path, tpe: Option[Type], introduceHoles: Boolean): List[(FunctionInvocation, Option[Set[Identifier]])] = { - - def subExprsOf(expr: Expr, v: EVariable): Option[(EVariable, Expr)] = expr match { - case ADTSelector(r, _) => subExprsOf(r, v) - case (r: EVariable) if leastUpperBound(r.getType, v.getType).isDefined => Some(r -> v) - case _ => None - } - - val z = IntegerLiteral(0) - val one = IntegerLiteral(1) - val knownSmallers = (pc.bindings.flatMap { - // @nv: used to check both Equals(id, selector) and Equals(selector, id) - case (id, s @ ADTSelector(r, _)) => subExprsOf(s, id.toVariable) - case _ => None - } ++ pc.conditions.flatMap { - case GreaterThan(v: EVariable, `z`) => - Some(v -> EMinus(v, one)) - case LessThan(`z`, v: EVariable) => - Some(v -> EMinus(v, one)) - case LessThan(v: EVariable, `z`) => - Some(v -> EPlus(v, one)) - case GreaterThan(`z`, v: EVariable) => - Some(v -> EPlus(v, one)) - case _ => None - }).groupBy(_._1).mapValues(v => v.map(_._2)) - - def argsSmaller(e: Expr, tpe: Type): Seq[Expr] = e match { - case ADT(adt, args) => - (adt.getADT.toConstructor.fields.map(_.getType) zip args).collect { - case (t, e) if isSubtypeOf(t, tpe) => - List(e) ++ argsSmaller(e, tpe) - }.flatten - case v: EVariable => - knownSmallers.getOrElse(v, Seq()) - case _ => Nil - } - - val res = wss.flatMap { - case FunctionInvocation(fid, tps, args) => - val resFun = getFunction(fid) - if (tpe forall (isSubtypeOf(resFun.returnType, _))) Nil else { - val ids = resFun.params.map(vd => EVariable(FreshIdentifier("<hole>", true), vd.getType)).toList - - for (((a, i), tpe) <- args.zipWithIndex zip resFun.params.map(_.getType); - smaller <- argsSmaller(a, tpe)) yield { - val newArgs = (if (introduceHoles) ids else args).updated(i, smaller) - val newFd = FunctionInvocation(fid, tps, newArgs) - val freeIds = if(introduceHoles) Some((ids.toSet - ids(i)).map(_.id)) else None - (newFd, freeIds) - } - } - } - - res.toList - } - - - /** - * All functions we call use in synthesis, which includes: - * - all functions in main units - * - all functions imported, or methods of classes imported - */ - def functionsAvailable: Set[FunDef] = program.symbols.functions.values.toSet -} diff --git a/src/main/scala/inox/solvers/EnumerationSolver.scala b/src/main/scala/inox/solvers/EnumerationSolver.scala deleted file mode 100644 index 51a7b7db089db80e5bd4c401dd3f73769b1dba99..0000000000000000000000000000000000000000 --- a/src/main/scala/inox/solvers/EnumerationSolver.scala +++ /dev/null @@ -1,122 +0,0 @@ -/* Copyright 2009-2016 EPFL, Lausanne */ - -package inox -package solvers - -import evaluators._ -import SolverResponses._ -import grammars.GrammarsUniverse -import utils._ -import datagen._ - -trait EnumerationSolver extends Solver { self => - - val grammars: GrammarsUniverse { val program: self.program.type } - val evaluator: DeterministicEvaluator { val program: self.program.type } - - import program._ - import trees._ - import symbols._ - import exprOps._ - - def name = "Enum" - - val maxTried = 10000 - - var datagen: Option[DataGenerator { val program: self.program.type }] = None - - private var interrupted = false - - val freeVars = new IncrementalSet[ValDef]() - val constraints = new IncrementalSeq[Expr]() - - def assertCnstr(expression: Expr): Unit = { - constraints += expression - freeVars ++= variablesOf(expression).map(_.toVal) - } - - def push() = { - freeVars.push() - constraints.push() - } - - def pop() = { - freeVars.pop() - constraints.pop() - } - - def reset() = { - freeVars.clear() - constraints.clear() - interrupted = false - datagen = None - } - - def check(config: CheckConfiguration): config.Response[Model, Assumptions] = config.cast { - val res: SolverResponse[Model, Assumptions] = try { - datagen = Some(new GrammarDataGen { - val grammars: self.grammars.type = self.grammars - val evaluator: DeterministicEvaluator { val program: self.program.type } = self.evaluator - val grammar: grammars.ExpressionGrammar = grammars.ValueGrammar - val program: self.program.type = self.program - }) - - if (interrupted) { - Unknown - } else { - val allFreeVars = freeVars.toSeq.sortBy(_.id.name) - val allConstraints = constraints.toSeq - - val it: Iterator[Seq[Expr]] = datagen.get.generateFor(allFreeVars, andJoin(allConstraints), 1, maxTried) - - if (it.hasNext) { - if (config.withModel) { - val varModels = it.next - SatWithModel(allFreeVars.zip(varModels).toMap) - } else { - Sat - } - } else { - val cardinalities = allFreeVars.map(vd => typeCardinality(vd.tpe)) - if (cardinalities.forall(_.isDefined) && cardinalities.flatten.product < maxTried) { - Unsat - } else { - Unknown - } - } - } - } catch { - case e: Throwable => - Unknown - } - datagen = None - res - } - - def checkAssumptions(config: Configuration)(assumptions: Set[Expr]): config.Response[Model, Assumptions] = { - push() - for (c <- assumptions) assertCnstr(c) - val res: config.Response[Model, Assumptions] = config.cast { - (check(Model min config) : SolverResponse[Model, Assumptions]) match { - case Unsat if config.withUnsatAssumptions => UnsatWithAssumptions(Set.empty) - case c => c - } - } - pop() - res - } - - def free() = { - constraints.clear() - } - - def interrupt(): Unit = { - interrupted = true - datagen.foreach(_.interrupt()) - } - - def recoverInterrupt(): Unit = { - interrupted = false - datagen.foreach(_.recoverInterrupt()) - } -} diff --git a/src/main/scala/inox/solvers/SolverFactory.scala b/src/main/scala/inox/solvers/SolverFactory.scala index afbd829eb4f4f12d10054214acb908f591ebedf6..34b253974671af029e6a4d9f35fc9de0c8ac2419 100644 --- a/src/main/scala/inox/solvers/SolverFactory.scala +++ b/src/main/scala/inox/solvers/SolverFactory.scala @@ -3,8 +3,6 @@ package inox package solvers -import inox.grammars.GrammarsUniverse - trait SolverFactory { val program: Program @@ -64,13 +62,13 @@ object SolverFactory { import evaluators._ import combinators._ + import unrolling._ val solverNames = Map( "nativez3" -> "Native Z3 with z3-templates for unrolling", "unrollz3" -> "Native Z3 with inox-templates for unrolling", "smt-cvc4" -> "CVC4 through SMT-LIB", "smt-z3" -> "Z3 through SMT-LIB", - "enum" -> "Enumeration-based counter-example finder", "princess" -> "Princess with inox unrolling" ) @@ -78,9 +76,12 @@ object SolverFactory { (p: Program, opts: Options) (ev: DeterministicEvaluator with SolvingEvaluator { val program: p.type }, enc: ast.ProgramTransformer { - val sourceProgram: p.type - val targetProgram: Program { val trees: inox.trees.type } - }): SolverFactory { val program: p.type; type S <: TimeoutSolver { val program: p.type } } = { + val sourceProgram: p.type + val targetProgram: Program { val trees: inox.trees.type } + }): SolverFactory { + val program: p.type + type S <: TimeoutSolver { val program: p.type } + } = { name match { case "nativez3" => create(p)(name, () => new { val program: p.type = p @@ -94,7 +95,7 @@ object SolverFactory { val program: p.type = p val options = opts val encoder = enc - } with unrolling.UnrollingSolver with theories.Z3Theories with TimeoutSolver with tip.TipDebugger { + } with UnrollingSolver with theories.Z3Theories with TimeoutSolver with tip.TipDebugger { val evaluator = ev lazy val modelEvaluator = RecursiveEvaluator(targetProgram, options + optIgnoreContracts(true)) @@ -108,7 +109,7 @@ object SolverFactory { val program: p.type = p val options = opts val encoder = enc - } with unrolling.UnrollingSolver with theories.CVC4Theories with TimeoutSolver with tip.TipDebugger { + } with UnrollingSolver with theories.CVC4Theories with TimeoutSolver with tip.TipDebugger { val evaluator = ev lazy val modelEvaluator = RecursiveEvaluator(targetProgram, options + optIgnoreContracts(true)) @@ -122,7 +123,7 @@ object SolverFactory { val program: p.type = p val options = opts val encoder = enc - } with unrolling.UnrollingSolver with theories.Z3Theories with TimeoutSolver with tip.TipDebugger { + } with UnrollingSolver with theories.Z3Theories with TimeoutSolver with tip.TipDebugger { val evaluator = ev lazy val modelEvaluator = RecursiveEvaluator(targetProgram, options + optIgnoreContracts(true)) @@ -142,16 +143,6 @@ object SolverFactory { val evaluator = ev }) - case "enum" => create(p)(name, () => new { - val program: p.type = p - val options = opts - } with EnumerationSolver with TimeoutSolver { - val evaluator = ev - val grammars: GrammarsUniverse {val program: p.type} = new GrammarsUniverse { - val program: p.type = p - } - }) - case _ => throw FatalError("Unknown solver: " + name) } } diff --git a/src/main/scala/inox/solvers/combinators/TimeoutSolver.scala b/src/main/scala/inox/solvers/combinators/TimeoutSolver.scala index e9b03ebc46ad3af55090d0d5d19f9201333dddbd..39ac8f13348425b658ae4c4eaea964eef90ce0cb 100644 --- a/src/main/scala/inox/solvers/combinators/TimeoutSolver.scala +++ b/src/main/scala/inox/solvers/combinators/TimeoutSolver.scala @@ -25,7 +25,7 @@ trait TimeoutSolver extends Solver { this } - abstract override def check(config: CheckConfiguration) = { + abstract override def check(config: CheckConfiguration): config.Response[Model, Assumptions] = { optTimeout match { case Some(to) => ti.interruptAfter(to) { @@ -37,7 +37,7 @@ trait TimeoutSolver extends Solver { } abstract override def checkAssumptions(config: Configuration) - (assumptions: Set[Expr]) = { + (assumptions: Set[Expr]): config.Response[Model, Assumptions] = { optTimeout match { case Some(to) => ti.interruptAfter(to) {