From 324a83d6800c6c9345a9da69706788bb235a1d47 Mon Sep 17 00:00:00 2001
From: Etienne Kneuss <ekneuss@gmail.com>
Date: Fri, 4 Mar 2016 11:33:32 +0100
Subject: [PATCH] Refactor grammars, introduce Labels, and Label Aspects

 * Expression grammars are now alwayse 'Labels -> Expr', simple grammars
   that are 'Type -> Expr' can simply extend SimpleExpressionGrammars

 * Labels now have Aspects. A label aspect describes a particular
   feature of the label (e.g. its size), as well as how this aspect
   applies to sub-productions.

   Aspects describe how sub-productions are generated/filtered/modified.
   See comments in Aspect for more information
---
 .../scala/leon/datagen/GrammarDataGen.scala   |   6 +-
 src/main/scala/leon/grammars/Aspect.scala     |  34 ++
 .../scala/leon/grammars/BaseGrammar.scala     |   2 +-
 src/main/scala/leon/grammars/Closures.scala   |  29 ++
 src/main/scala/leon/grammars/Constants.scala  |   4 +-
 src/main/scala/leon/grammars/Empty.scala      |   4 +-
 .../scala/leon/grammars/EqualityGrammar.scala |   2 +-
 .../leon/grammars/ExpressionGrammar.scala     |  69 ++--
 .../scala/leon/grammars/FunctionCalls.scala   |   2 +-
 src/main/scala/leon/grammars/Grammars.scala   |   9 +-
 src/main/scala/leon/grammars/Label.scala      |  20 ++
 src/main/scala/leon/grammars/OneOf.scala      |   6 +-
 .../leon/grammars/SafeRecursiveCalls.scala    |   2 +-
 src/main/scala/leon/grammars/SimilarTo.scala  | 310 +++++++++---------
 .../grammars/SimpleExpressionGrammar.scala    |  42 +++
 src/main/scala/leon/grammars/Union.scala      |  17 +
 .../scala/leon/grammars/ValueGrammar.scala    |   2 +-
 .../grammars/aspects/ExtraTerminals.scala     |  28 ++
 .../grammars/aspects/PersistantAspect.scala   |  28 ++
 .../leon/grammars/aspects/SimilarTo.scala     | 136 ++++++++
 .../scala/leon/grammars/aspects/Sized.scala   |  45 +++
 .../scala/leon/grammars/aspects/Tagged.scala  |  93 ++++++
 .../transformers/DepthBoundedGrammar.scala    |  21 --
 .../transformers/EmbeddedGrammar.scala        |  20 --
 .../transformers/SizeBoundedGrammar.scala     |  64 ----
 .../grammars/transformers/TaggedGrammar.scala | 109 ------
 .../leon/grammars/transformers/Union.scala    |  17 -
 .../disambiguation/QuestionBuilder.scala      |   6 +-
 .../scala/leon/synthesis/rules/CEGIS.scala    |  18 +-
 .../leon/synthesis/rules/CEGISLike.scala      |  32 +-
 .../scala/leon/synthesis/rules/CEGLESS.scala  |   6 +-
 .../rules/unused/BottomUpTegis.scala          |  16 +-
 .../leon/synthesis/rules/unused/TEGIS.scala   |   4 +-
 .../synthesis/rules/unused/TEGISLike.scala    |   8 +-
 .../leon/synthesis/rules/unused/TEGLESS.scala |   7 +-
 .../integration/grammars/GrammarsSuite.scala  | 243 ++++++++++++++
 .../leon/test/helpers/ExpressionsDSL.scala    |  14 +
 .../synthesis/current/HOFs/FilterNonNeg.scala |  26 ++
 .../synthesis/current/HOFs/MapPlus1.scala     |  42 +++
 39 files changed, 1058 insertions(+), 485 deletions(-)
 create mode 100644 src/main/scala/leon/grammars/Aspect.scala
 create mode 100644 src/main/scala/leon/grammars/Closures.scala
 create mode 100644 src/main/scala/leon/grammars/Label.scala
 create mode 100644 src/main/scala/leon/grammars/SimpleExpressionGrammar.scala
 create mode 100644 src/main/scala/leon/grammars/Union.scala
 create mode 100644 src/main/scala/leon/grammars/aspects/ExtraTerminals.scala
 create mode 100644 src/main/scala/leon/grammars/aspects/PersistantAspect.scala
 create mode 100644 src/main/scala/leon/grammars/aspects/SimilarTo.scala
 create mode 100644 src/main/scala/leon/grammars/aspects/Sized.scala
 create mode 100644 src/main/scala/leon/grammars/aspects/Tagged.scala
 delete mode 100644 src/main/scala/leon/grammars/transformers/DepthBoundedGrammar.scala
 delete mode 100644 src/main/scala/leon/grammars/transformers/EmbeddedGrammar.scala
 delete mode 100644 src/main/scala/leon/grammars/transformers/SizeBoundedGrammar.scala
 delete mode 100644 src/main/scala/leon/grammars/transformers/TaggedGrammar.scala
 delete mode 100644 src/main/scala/leon/grammars/transformers/Union.scala
 create mode 100644 src/test/scala/leon/integration/grammars/GrammarsSuite.scala
 create mode 100644 testcases/synthesis/current/HOFs/FilterNonNeg.scala
 create mode 100644 testcases/synthesis/current/HOFs/MapPlus1.scala

diff --git a/src/main/scala/leon/datagen/GrammarDataGen.scala b/src/main/scala/leon/datagen/GrammarDataGen.scala
index 22cf17f6e..619b76606 100644
--- a/src/main/scala/leon/datagen/GrammarDataGen.scala
+++ b/src/main/scala/leon/datagen/GrammarDataGen.scala
@@ -19,7 +19,7 @@ import utils.SeqUtils.cartesianProduct
 /** Utility functions to generate values of a given type.
   * In fact, it could be used to generate *terms* of a given type,
   * e.g. by passing trees representing variables for the "bounds". */
-class GrammarDataGen(evaluator: Evaluator, grammar: ExpressionGrammar[TypeTree] = ValueGrammar) extends DataGenerator {
+class GrammarDataGen(evaluator: Evaluator, grammar: ExpressionGrammar = ValueGrammar) extends DataGenerator {
   implicit val ctx = evaluator.context
 
   // Assume e contains generic values with index 0.
@@ -54,8 +54,8 @@ class GrammarDataGen(evaluator: Evaluator, grammar: ExpressionGrammar[TypeTree]
   }
 
   def generate(tpe: TypeTree): Iterator[Expr] = {
-    val enum = new MemoizedEnumerator[TypeTree, Expr, ProductionRule[TypeTree, Expr]](grammar.getProductions)
-    enum.iterator(tpe).flatMap(expandGenerics)
+    val enum = new MemoizedEnumerator[Label, Expr, ProductionRule[Label, Expr]](grammar.getProductions)
+    enum.iterator(Label(tpe)).flatMap(expandGenerics)
   }
 
   def generateFor(ins: Seq[Identifier], satisfying: Expr, maxValid: Int, maxEnumerated: Int): Iterator[Seq[Expr]] = {
diff --git a/src/main/scala/leon/grammars/Aspect.scala b/src/main/scala/leon/grammars/Aspect.scala
new file mode 100644
index 000000000..a7761faab
--- /dev/null
+++ b/src/main/scala/leon/grammars/Aspect.scala
@@ -0,0 +1,34 @@
+/* Copyright 2009-2015 EPFL, Lausanne */
+
+package leon
+package grammars
+
+import purescala.Expressions.Expr
+import utils.SeqUtils._
+import Tags._
+
+/**
+ * An Aspect applies to a label, and attaches information to it.
+ *
+ * For instance, the "size" aspect provides information about the size of
+ * expressions the label represents, (displayed as |5|).
+ *
+ * Int|5| is thus a Label "Int" with aspect "Sized(5)". The applyTo method of
+ * the aspect can filter/modify/generate sub-productions:
+ * If the grammar contains a Int -> Int + Int production, then
+ * Int|5| will generate productions by doing: |5|.applyTo(Int + Int),
+ * which itself returns:
+ *   - Int|1| + Int|3|
+ *   - Int|3| + Int|1|
+ *   - Int|2| + Int|2|
+ *
+ */
+
+
+abstract class Aspect {
+  type Production = ProductionRule[Label, Expr]
+
+  def asString(implicit ctx: LeonContext): String
+
+  def applyTo(l: Label, ps: Seq[Production])(implicit ctx: LeonContext): Seq[Production]
+}
diff --git a/src/main/scala/leon/grammars/BaseGrammar.scala b/src/main/scala/leon/grammars/BaseGrammar.scala
index b10b5a92a..ee3a85791 100644
--- a/src/main/scala/leon/grammars/BaseGrammar.scala
+++ b/src/main/scala/leon/grammars/BaseGrammar.scala
@@ -12,7 +12,7 @@ import purescala.Constructors._
   * without regard of context (variables in scope, current function etc.)
   * Also does some trivial simplifications.
   */
-case object BaseGrammar extends ExpressionGrammar[TypeTree] {
+case object BaseGrammar extends SimpleExpressionGrammar {
 
   def computeProductions(t: TypeTree)(implicit ctx: LeonContext): Seq[Prod] = t match {
     case BooleanType =>
diff --git a/src/main/scala/leon/grammars/Closures.scala b/src/main/scala/leon/grammars/Closures.scala
new file mode 100644
index 000000000..101534302
--- /dev/null
+++ b/src/main/scala/leon/grammars/Closures.scala
@@ -0,0 +1,29 @@
+/* Copyright 2009-2015 EPFL, Lausanne */
+
+package leon
+package grammars
+
+import purescala.Types._
+import purescala.TypeOps._
+import purescala.Definitions._
+import purescala.Expressions._
+import purescala.Common._
+import purescala.Constructors._
+
+case object Closures extends ExpressionGrammar {
+  def computeProductions(lab: Label)(implicit ctx: LeonContext): Seq[ProductionRule[Label, Expr]] = lab.getType match {
+    case FunctionType(argsTpes, ret) =>
+      val args = argsTpes.zipWithIndex.map { case (tpe, i) =>
+        ValDef(FreshIdentifier("a"+i, tpe))
+      }
+
+      val rlab = Label(ret).withAspect(aspects.ExtraTerminals(args.map(_.toVariable).toSet))
+
+      applyAspects(lab, List(
+        ProductionRule(List(rlab), { case List(body) => Lambda(args, body) }, Tags.Top, 1)
+      ))
+
+    case _ =>
+      Nil
+  }
+}
diff --git a/src/main/scala/leon/grammars/Constants.scala b/src/main/scala/leon/grammars/Constants.scala
index 5decc853c..a542a9a0e 100644
--- a/src/main/scala/leon/grammars/Constants.scala
+++ b/src/main/scala/leon/grammars/Constants.scala
@@ -11,7 +11,7 @@ import purescala.Extractors.IsTyped
 /** Generates constants found in an [[leon.purescala.Expressions.Expr]].
   * Some constants that are generated by other grammars (like 0, 1) will be excluded
   */
-case class Constants(e: Expr) extends ExpressionGrammar[TypeTree] {
+case class Constants(e: Expr) extends SimpleExpressionGrammar {
 
   private val excluded: Set[Expr] = Set(
     InfiniteIntegerLiteral(1),
@@ -30,4 +30,4 @@ case class Constants(e: Expr) extends ExpressionGrammar[TypeTree] {
 
     (literals -- excluded map (terminal(_, Tags.Constant))).toSeq
   }
-}
\ No newline at end of file
+}
diff --git a/src/main/scala/leon/grammars/Empty.scala b/src/main/scala/leon/grammars/Empty.scala
index ebf35ab52..79f391820 100644
--- a/src/main/scala/leon/grammars/Empty.scala
+++ b/src/main/scala/leon/grammars/Empty.scala
@@ -6,6 +6,6 @@ package grammars
 import purescala.Types.Typed
 
 /** The empty expression grammar */
-case class Empty[T <: Typed]() extends ExpressionGrammar[T] {
-  def computeProductions(t: T)(implicit ctx: LeonContext): Seq[Prod] = Nil
+case class Empty() extends ExpressionGrammar {
+  def computeProductions(l: Label)(implicit ctx: LeonContext) = Nil
 }
diff --git a/src/main/scala/leon/grammars/EqualityGrammar.scala b/src/main/scala/leon/grammars/EqualityGrammar.scala
index 2120fdde0..8ffa26878 100644
--- a/src/main/scala/leon/grammars/EqualityGrammar.scala
+++ b/src/main/scala/leon/grammars/EqualityGrammar.scala
@@ -10,7 +10,7 @@ import purescala.Constructors._
   *
   * @param types The set of types for which equalities will be generated
   */
-case class EqualityGrammar(types: Set[TypeTree]) extends ExpressionGrammar[TypeTree] {
+case class EqualityGrammar(types: Set[TypeTree]) extends SimpleExpressionGrammar {
   def computeProductions(t: TypeTree)(implicit ctx: LeonContext): Seq[Prod] = t match {
     case BooleanType =>
       types.toList map { tp =>
diff --git a/src/main/scala/leon/grammars/ExpressionGrammar.scala b/src/main/scala/leon/grammars/ExpressionGrammar.scala
index 3e3d3c898..8c5e304e5 100644
--- a/src/main/scala/leon/grammars/ExpressionGrammar.scala
+++ b/src/main/scala/leon/grammars/ExpressionGrammar.scala
@@ -6,63 +6,70 @@ package grammars
 import purescala.Expressions._
 import purescala.Types._
 import purescala.Common._
-import transformers.Union
 
 import scala.collection.mutable.{HashMap => MutableMap}
 
 /** Represents a context-free grammar of expressions
-  *
-  * @tparam T The type of nonterminal symbols for this grammar
   */
-abstract class ExpressionGrammar[T <: Typed] {
+abstract class ExpressionGrammar {
 
-  type Prod = ProductionRule[T, Expr]
-
-  private[this] val cache = new MutableMap[T, Seq[Prod]]()
-
-  /** Generates a [[ProductionRule]] without nonterminal symbols */
-  def terminal(builder: => Expr, tag: Tags.Tag = Tags.Top, cost: Int = 1) = {
-    ProductionRule[T, Expr](Nil, { (subs: Seq[Expr]) => builder }, tag, cost)
-  }
-
-  /** Generates a [[ProductionRule]] with nonterminal symbols */
-  def nonTerminal(subs: Seq[T], builder: (Seq[Expr] => Expr), tag: Tags.Tag = Tags.Top, cost: Int = 1): ProductionRule[T, Expr] = {
-    ProductionRule[T, Expr](subs, builder, tag, cost)
-  }
+  private[this] val cache = new MutableMap[Label, Seq[ProductionRule[Label, Expr]]]()
 
   /** The list of production rules for this grammar for a given nonterminal.
     * This is the cached version of [[getProductions]] which clients should use.
     *
-    * @param t The nonterminal for which production rules will be generated
+    * @param lab The nonterminal for which production rules will be generated
     */
-  def getProductions(t: T)(implicit ctx: LeonContext): Seq[Prod] = {
-    cache.getOrElse(t, {
-      val res = computeProductions(t)
-      cache += t -> res
+  def getProductions(lab: Label)(implicit ctx: LeonContext) = {
+    cache.getOrElse(lab, {
+      val res = applyAspects(lab, computeProductions(lab))
+      cache += lab -> res
       res
     })
   }
 
   /** The list of production rules for this grammar for a given nonterminal
     *
-    * @param t The nonterminal for which production rules will be generated
+    * @param lab The nonterminal for which production rules will be generated
     */
-  def computeProductions(t: T)(implicit ctx: LeonContext): Seq[Prod]
+  def computeProductions(lab: Label)(implicit ctx: LeonContext): Seq[ProductionRule[Label, Expr]]
+
 
-  def filter(f: Prod => Boolean) = {
-    new ExpressionGrammar[T] {
-      def computeProductions(t: T)(implicit ctx: LeonContext) = ExpressionGrammar.this.computeProductions(t).filter(f)
+  def applyAspects(lab: Label, ps: Seq[ProductionRule[Label, Expr]])(implicit ctx: LeonContext) = {
+    lab.aspects.foldLeft(ps) {
+      case (ps, a) => a.applyTo(lab, ps)
     }
   }
 
-  final def ||(that: ExpressionGrammar[T]): ExpressionGrammar[T] = {
+  final def ||(that: ExpressionGrammar): ExpressionGrammar = {
     Union(Seq(this, that))
   }
 
-
   final def printProductions(printer: String => Unit)(implicit ctx: LeonContext) {
-    for ((t, gs) <- cache.toSeq.sortBy(_._1.asString)) {
-      val lhs = f"${Console.BOLD}${t.asString}%50s${Console.RESET} ::= "
+    def sorter(lp1: (Label, Seq[ProductionRule[Label, Expr]]), lp2: (Label, Seq[ProductionRule[Label, Expr]])): Boolean = {
+      val l1 = lp1._1
+      val l2 = lp2._1
+
+      val os1 = l1.aspects.collectFirst { case aspects.Sized(size) => size }
+      val os2 = l2.aspects.collectFirst { case aspects.Sized(size) => size }
+
+      (os1, os2) match {
+        case (Some(s1), Some(s2)) =>
+          if (s1 > s2) {
+            true
+          } else if (s1 == s2) {
+            l1.asString < l2.asString
+          } else {
+            false
+          }
+        case _ => l1.asString < l2.asString
+      }
+    }
+
+
+    for ((lab, gs) <- cache.toSeq.sortWith(sorter)) {
+      val lhs = f"${Console.BOLD}${lab.asString}%50s${Console.RESET} ::= "
+
       if (gs.isEmpty) {
         printer(s"${lhs}ε")
       } else {
diff --git a/src/main/scala/leon/grammars/FunctionCalls.scala b/src/main/scala/leon/grammars/FunctionCalls.scala
index 27d9c5197..c8110a020 100644
--- a/src/main/scala/leon/grammars/FunctionCalls.scala
+++ b/src/main/scala/leon/grammars/FunctionCalls.scala
@@ -16,7 +16,7 @@ import purescala.Expressions._
   * @param types The candidate real type parameters for [[currentFunction]]
   * @param exclude An additional set of functions for which no calls will be generated
   */
-case class FunctionCalls(prog: Program, currentFunction: FunDef, types: Seq[TypeTree], exclude: Set[FunDef]) extends ExpressionGrammar[TypeTree] {
+case class FunctionCalls(prog: Program, currentFunction: FunDef, types: Seq[TypeTree], exclude: Set[FunDef]) extends SimpleExpressionGrammar {
   def computeProductions(t: TypeTree)(implicit ctx: LeonContext): Seq[Prod] = {
 
      def getCandidates(fd: FunDef): Seq[TypedFunDef] = {
diff --git a/src/main/scala/leon/grammars/Grammars.scala b/src/main/scala/leon/grammars/Grammars.scala
index fcbc628c4..7ed0760f4 100644
--- a/src/main/scala/leon/grammars/Grammars.scala
+++ b/src/main/scala/leon/grammars/Grammars.scala
@@ -15,8 +15,9 @@ import synthesis.{SynthesisContext, Problem}
 
 object Grammars {
 
-  def default(prog: Program, inputs: Seq[Expr], currentFunction: FunDef, exclude: Set[FunDef]): ExpressionGrammar[TypeTree] = {
+  def default(prog: Program, inputs: Seq[Expr], currentFunction: FunDef, exclude: Set[FunDef]): ExpressionGrammar = {
     BaseGrammar ||
+    Closures ||
     EqualityGrammar(Set(IntegerType, Int32Type, BooleanType) ++ inputs.map { _.getType }) ||
     OneOf(inputs) ||
     Constants(currentFunction.fullBody) ||
@@ -24,14 +25,10 @@ object Grammars {
     FunctionCalls(prog, currentFunction, inputs.map(_.getType), exclude)
   }
 
-  def default(sctx: SynthesisContext, p: Problem, extraHints: Seq[Expr] = Seq()): ExpressionGrammar[TypeTree] = {
+  def default(sctx: SynthesisContext, p: Problem, extraHints: Seq[Expr] = Seq()): ExpressionGrammar = {
     val TopLevelAnds(ws) = p.ws
     val hints = ws.collect{ case Hint(e) if formulaSize(e) >= 4 => e }
     default(sctx.program, p.as.map(_.toVariable) ++ hints ++ extraHints, sctx.functionContext, sctx.settings.functionsToIgnore)
   }
-
-  def typeDepthBound[T <: Typed](g: ExpressionGrammar[T], b: Int) = {
-    g.filter(g => g.subTrees.forall(t => typeDepth(t.getType) <= b))
-  }
 }
 
diff --git a/src/main/scala/leon/grammars/Label.scala b/src/main/scala/leon/grammars/Label.scala
new file mode 100644
index 000000000..dc3e98ea9
--- /dev/null
+++ b/src/main/scala/leon/grammars/Label.scala
@@ -0,0 +1,20 @@
+/* Copyright 2009-2015 EPFL, Lausanne */
+
+package leon
+package grammars
+
+import purescala.Types._
+import purescala.Expressions._
+import purescala.Constructors._
+
+case class Label(tpe: TypeTree, aspects: List[Aspect] = Nil) extends Typed {
+  val getType = tpe
+
+  def asString(implicit ctx: LeonContext): String = {
+    val ts = tpe.asString
+
+    ts + aspects.map(_.asString).mkString
+  }
+
+  def withAspect(a: Aspect) = Label(tpe, aspects :+ a)
+}
diff --git a/src/main/scala/leon/grammars/OneOf.scala b/src/main/scala/leon/grammars/OneOf.scala
index 3e77de8fa..cad657b83 100644
--- a/src/main/scala/leon/grammars/OneOf.scala
+++ b/src/main/scala/leon/grammars/OneOf.scala
@@ -8,10 +8,10 @@ import purescala.TypeOps._
 import purescala.Types.TypeTree
 
 /** Generates one production rule for each expression in a sequence that has compatible type */
-case class OneOf(inputs: Seq[Expr]) extends ExpressionGrammar[TypeTree] {
-  def computeProductions(t: TypeTree)(implicit ctx: LeonContext): Seq[Prod] = {
+case class OneOf(inputs: Seq[Expr]) extends SimpleExpressionGrammar {
+  def computeProductions(lab: TypeTree)(implicit ctx: LeonContext): Seq[Prod] = {
     inputs.collect {
-      case i if isSubtypeOf(i.getType, t) =>
+      case i if isSubtypeOf(i.getType, lab.getType) =>
         terminal(i)
     }
   }
diff --git a/src/main/scala/leon/grammars/SafeRecursiveCalls.scala b/src/main/scala/leon/grammars/SafeRecursiveCalls.scala
index cbfd3947d..679f6f764 100644
--- a/src/main/scala/leon/grammars/SafeRecursiveCalls.scala
+++ b/src/main/scala/leon/grammars/SafeRecursiveCalls.scala
@@ -14,7 +14,7 @@ import synthesis.utils.Helpers._
   * @param ws An expression that contains the known set [[synthesis.Witnesses.Terminating]] expressions
   * @param pc The path condition for the generated [[Expr]] by this grammar
   */
-case class SafeRecursiveCalls(prog: Program, ws: Expr, pc: Expr) extends ExpressionGrammar[TypeTree] {
+case class SafeRecursiveCalls(prog: Program, ws: Expr, pc: Expr) extends SimpleExpressionGrammar {
   def computeProductions(t: TypeTree)(implicit ctx: LeonContext): Seq[Prod] = {
     val calls = terminatingCalls(prog,ws, pc, Some(t), true)
 
diff --git a/src/main/scala/leon/grammars/SimilarTo.scala b/src/main/scala/leon/grammars/SimilarTo.scala
index 6b2f3f538..295f18311 100644
--- a/src/main/scala/leon/grammars/SimilarTo.scala
+++ b/src/main/scala/leon/grammars/SimilarTo.scala
@@ -3,7 +3,6 @@
 package leon
 package grammars
 
-import transformers._
 import purescala.Types._
 import purescala.TypeOps._
 import purescala.Extractors._
@@ -15,158 +14,159 @@ import synthesis._
 /** A grammar that generates expressions by inserting small variations in [[e]]
  * @param e The [[Expr]] to which small variations will be inserted
  */
-case class SimilarTo(e: Expr, sctx: SynthesisContext, p: Problem) extends ExpressionGrammar[NonTerminal[String]] {
-
-  val excludeFCalls = sctx.settings.functionsToIgnore
-
-  val normalGrammar: ExpressionGrammar[NonTerminal[String]] = DepthBoundedGrammar(EmbeddedGrammar(
-    Grammars.default(sctx, p, Seq(e)),
-    (t: TypeTree) => NonTerminal(t, "B", None),
-    (l: NonTerminal[String]) => l.getType
-  ), 1)
-
-  type L = NonTerminal[String]
-
-  val getNext: () => Int = {
-    var counter = -1
-    () => {
-      counter += 1
-      counter
-    }
-  }
-
-  private[this] var similarCache: Option[Map[L, Seq[Prod]]] = None
-
-  def computeProductions(t: L)(implicit ctx: LeonContext): Seq[Prod] = {
-    t match {
-      case NonTerminal(_, "B", _) => normalGrammar.computeProductions(t)
-      case _                =>
-
-        val allSimilar = similarCache.getOrElse {
-          val res = computeSimilar(e).groupBy(_._1).mapValues(_.map(_._2))
-          similarCache = Some(res)
-          res
-        }
-
-        allSimilar.getOrElse(t, Nil)
-    }
-  }
-
-  def computeSimilar(e : Expr)(implicit ctx: LeonContext): Seq[(L, Prod)] = {
-
-    def getLabel(t: TypeTree) = {
-      val tpe = bestRealType(t)
-      val c = getNext()
-      NonTerminal(tpe, "G"+c)
-    }
-
-    def isCommutative(e: Expr) = e match {
-      case _: Plus | _: Times => true
-      case _ => false
-    }
-
-    def rec(e: Expr, gl: L): Seq[(L, Prod)] = {
-
-      def gens(e: Expr, gl: L, subs: Seq[Expr], builder: (Seq[Expr] => Expr)): Seq[(L, Prod)] = {
-        val subGls = subs.map { s => getLabel(s.getType) }
-
-        // All the subproductions for sub gl
-        val allSubs = (subs zip subGls).flatMap { case (e, gl) => rec(e, gl) }
-
-        // Inject fix at one place
-        val injectG = for ((sgl, i) <- subGls.zipWithIndex) yield {
-          gl -> nonTerminal(Seq(sgl), { case Seq(ge) => builder(subs.updated(i, ge)) } )
-        }
-
-        val swaps = if (subs.size > 1 && !isCommutative(e)) {
-          (for (i <- subs.indices;
-                j <- i+1 until subs.size) yield {
-
-            if (subs(i).getType == subs(j).getType) {
-              val swapSubs = subs.updated(i, subs(j)).updated(j, subs(i))
-              Some(gl -> terminal(builder(swapSubs)))
-            } else {
-              None
-            }
-          }).flatten
-        } else {
-          Nil
-        }
-
-        allSubs ++ injectG ++ swaps
-      }
-
-      def cegis(gl: L): Seq[(L, Prod)] = {
-        normalGrammar.getProductions(gl).map(gl -> _)
-      }
-
-      def int32Variations(gl: L, e : Expr): Seq[(L, Prod)] = {
-        Seq(
-          gl -> terminal(BVMinus(e, IntLiteral(1))),
-          gl -> terminal(BVPlus (e, IntLiteral(1)))
-        )
-      }
-
-      def intVariations(gl: L, e : Expr): Seq[(L, Prod)] = {
-        Seq(
-          gl -> terminal(Minus(e, InfiniteIntegerLiteral(1))),
-          gl -> terminal(Plus (e, InfiniteIntegerLiteral(1)))
-        )
-      }
-
-      // Find neighbor case classes that are compatible with the arguments:
-      // Turns And(e1, e2) into Or(e1, e2)...
-      def ccVariations(gl: L, cc: CaseClass): Seq[(L, Prod)] = {
-        val CaseClass(cct, args) = cc
-
-        val neighbors = cct.root.knownCCDescendants diff Seq(cct)
-
-        for (scct <- neighbors if scct.fieldsTypes == cct.fieldsTypes) yield {
-          gl -> terminal(CaseClass(scct, args))
-        }
-      }
-
-      val funFilter = (fd: FunDef) => fd.isSynthetic || (excludeFCalls contains fd)
-      val subs: Seq[(L, Prod)] = e match {
-        
-        case _: Terminal | _: Let | _: LetDef | _: MatchExpr =>
-          gens(e, gl, Nil, { _ => e }) ++ cegis(gl)
-
-        case cc @ CaseClass(cct, exprs) =>
-          gens(e, gl, exprs, { case ss => CaseClass(cct, ss) }) ++ ccVariations(gl, cc)
-
-        case FunctionInvocation(TypedFunDef(fd, _), _) if funFilter(fd) =>
-          // We allow only exact call, and/or cegis extensions
-          /*Seq(el -> Generator[L, Expr](Nil, { _ => e })) ++*/ cegis(gl)
-
-        case Operator(subs, builder) =>
-          gens(e, gl, subs, { case ss => builder(ss) })
-      }
-
-      val terminalsMatching = p.as.collect {
-        case IsTyped(term, tpe) if tpe == gl.getType && Variable(term) != e =>
-          gl -> terminal(Variable(term))
-      }
-
-      val variations = e.getType match {
-        case IntegerType => intVariations(gl, e)
-        case Int32Type => int32Variations(gl, e)
-        case _ => Nil
-      }
-
-      subs ++ terminalsMatching ++ variations
-    }
-
-    val gl = getLabel(e.getType)
-
-    val res = rec(e, gl)
-
-    //for ((t, g) <- res) {
-    //  val subs = g.subTrees.map { t => FreshIdentifier(t.asString, t.getType).toVariable}
-    //  val gen = g.builder(subs)
-
-    //  println(f"$t%30s ::= "+gen)
-    //}
-    res
-  }
+case class SimilarTo(e: Expr, sctx: SynthesisContext, p: Problem) extends ExpressionGrammar {
+
+  def computeProductions(l: Label)(implicit ctx: LeonContext) = Nil
+//  val excludeFCalls = sctx.settings.functionsToIgnore
+//
+//  val normalGrammar: ExpressionGrammar[NonTerminal[String]] = DepthBoundedGrammar(EmbeddedGrammar(
+//    Grammars.default(sctx, p, Seq(e)),
+//    (t: TypeTree) => NonTerminal(t, "B", None),
+//    (l: NonTerminal[String]) => l.getType
+//  ), 1)
+//
+//  type L = NonTerminal[String]
+//
+//  val getNext: () => Int = {
+//    var counter = -1
+//    () => {
+//      counter += 1
+//      counter
+//    }
+//  }
+//
+//  private[this] var similarCache: Option[Map[L, Seq[Prod]]] = None
+//
+//  def computeProductions(t: L)(implicit ctx: LeonContext): Seq[Prod] = {
+//    t match {
+//      case NonTerminal(_, "B", _) => normalGrammar.computeProductions(t)
+//      case _                =>
+//
+//        val allSimilar = similarCache.getOrElse {
+//          val res = computeSimilar(e).groupBy(_._1).mapValues(_.map(_._2))
+//          similarCache = Some(res)
+//          res
+//        }
+//
+//        allSimilar.getOrElse(t, Nil)
+//    }
+//  }
+//
+//  def computeSimilar(e : Expr)(implicit ctx: LeonContext): Seq[(L, Prod)] = {
+//
+//    def getLabel(t: TypeTree) = {
+//      val tpe = bestRealType(t)
+//      val c = getNext()
+//      NonTerminal(tpe, "G"+c)
+//    }
+//
+//    def isCommutative(e: Expr) = e match {
+//      case _: Plus | _: Times => true
+//      case _ => false
+//    }
+//
+//    def rec(e: Expr, gl: L): Seq[(L, Prod)] = {
+//
+//      def gens(e: Expr, gl: L, subs: Seq[Expr], builder: (Seq[Expr] => Expr)): Seq[(L, Prod)] = {
+//        val subGls = subs.map { s => getLabel(s.getType) }
+//
+//        // All the subproductions for sub gl
+//        val allSubs = (subs zip subGls).flatMap { case (e, gl) => rec(e, gl) }
+//
+//        // Inject fix at one place
+//        val injectG = for ((sgl, i) <- subGls.zipWithIndex) yield {
+//          gl -> nonTerminal(Seq(sgl), { case Seq(ge) => builder(subs.updated(i, ge)) } )
+//        }
+//
+//        val swaps = if (subs.size > 1 && !isCommutative(e)) {
+//          (for (i <- subs.indices;
+//                j <- i+1 until subs.size) yield {
+//
+//            if (subs(i).getType == subs(j).getType) {
+//              val swapSubs = subs.updated(i, subs(j)).updated(j, subs(i))
+//              Some(gl -> terminal(builder(swapSubs)))
+//            } else {
+//              None
+//            }
+//          }).flatten
+//        } else {
+//          Nil
+//        }
+//
+//        allSubs ++ injectG ++ swaps
+//      }
+//
+//      def cegis(gl: L): Seq[(L, Prod)] = {
+//        normalGrammar.getProductions(gl).map(gl -> _)
+//      }
+//
+//      def int32Variations(gl: L, e : Expr): Seq[(L, Prod)] = {
+//        Seq(
+//          gl -> terminal(BVMinus(e, IntLiteral(1))),
+//          gl -> terminal(BVPlus (e, IntLiteral(1)))
+//        )
+//      }
+//
+//      def intVariations(gl: L, e : Expr): Seq[(L, Prod)] = {
+//        Seq(
+//          gl -> terminal(Minus(e, InfiniteIntegerLiteral(1))),
+//          gl -> terminal(Plus (e, InfiniteIntegerLiteral(1)))
+//        )
+//      }
+//
+//      // Find neighbor case classes that are compatible with the arguments:
+//      // Turns And(e1, e2) into Or(e1, e2)...
+//      def ccVariations(gl: L, cc: CaseClass): Seq[(L, Prod)] = {
+//        val CaseClass(cct, args) = cc
+//
+//        val neighbors = cct.root.knownCCDescendants diff Seq(cct)
+//
+//        for (scct <- neighbors if scct.fieldsTypes == cct.fieldsTypes) yield {
+//          gl -> terminal(CaseClass(scct, args))
+//        }
+//      }
+//
+//      val funFilter = (fd: FunDef) => fd.isSynthetic || (excludeFCalls contains fd)
+//      val subs: Seq[(L, Prod)] = e match {
+//        
+//        case _: Terminal | _: Let | _: LetDef | _: MatchExpr =>
+//          gens(e, gl, Nil, { _ => e }) ++ cegis(gl)
+//
+//        case cc @ CaseClass(cct, exprs) =>
+//          gens(e, gl, exprs, { case ss => CaseClass(cct, ss) }) ++ ccVariations(gl, cc)
+//
+//        case FunctionInvocation(TypedFunDef(fd, _), _) if funFilter(fd) =>
+//          // We allow only exact call, and/or cegis extensions
+//          /*Seq(el -> Generator[L, Expr](Nil, { _ => e })) ++*/ cegis(gl)
+//
+//        case Operator(subs, builder) =>
+//          gens(e, gl, subs, { case ss => builder(ss) })
+//      }
+//
+//      val terminalsMatching = p.as.collect {
+//        case IsTyped(term, tpe) if tpe == gl.getType && Variable(term) != e =>
+//          gl -> terminal(Variable(term))
+//      }
+//
+//      val variations = e.getType match {
+//        case IntegerType => intVariations(gl, e)
+//        case Int32Type => int32Variations(gl, e)
+//        case _ => Nil
+//      }
+//
+//      subs ++ terminalsMatching ++ variations
+//    }
+//
+//    val gl = getLabel(e.getType)
+//
+//    val res = rec(e, gl)
+//
+//    //for ((t, g) <- res) {
+//    //  val subs = g.subTrees.map { t => FreshIdentifier(t.asString, t.getType).toVariable}
+//    //  val gen = g.builder(subs)
+//
+//    //  println(f"$t%30s ::= "+gen)
+//    //}
+//    res
+//  }
 }
diff --git a/src/main/scala/leon/grammars/SimpleExpressionGrammar.scala b/src/main/scala/leon/grammars/SimpleExpressionGrammar.scala
new file mode 100644
index 000000000..f12849521
--- /dev/null
+++ b/src/main/scala/leon/grammars/SimpleExpressionGrammar.scala
@@ -0,0 +1,42 @@
+/* Copyright 2009-2015 EPFL, Lausanne */
+
+package leon
+package grammars
+
+import purescala.Expressions._
+import purescala.Types._
+
+abstract class SimpleExpressionGrammar extends ExpressionGrammar {
+
+  type Prod = ProductionRule[TypeTree, Expr]
+
+  /** Generates a [[ProductionRule]] without nonterminal symbols */
+  def terminal(builder: => Expr, tag: Tags.Tag = Tags.Top, cost: Int = 1) = {
+    ProductionRule[TypeTree, Expr](Nil, { (subs: Seq[Expr]) => builder }, tag, cost)
+  }
+
+  /** Generates a [[ProductionRule]] with nonterminal symbols */
+  def nonTerminal(subs: Seq[TypeTree], builder: (Seq[Expr] => Expr), tag: Tags.Tag = Tags.Top, cost: Int = 1) = {
+    ProductionRule[TypeTree, Expr](subs, builder, tag, cost)
+  }
+
+  def filter(f: Prod => Boolean) = {
+    new SimpleExpressionGrammar {
+      def computeProductions(lab: TypeTree)(implicit ctx: LeonContext) = {
+        SimpleExpressionGrammar.this.computeProductions(lab).filter(f)
+      }
+    }
+  }
+
+  /** The list of production rules for this grammar for a given nonterminal
+    *
+    * @param lab The nonterminal for which production rules will be generated
+    */
+  def computeProductions(lab: Label)(implicit ctx: LeonContext): Seq[ProductionRule[Label, Expr]] = {
+    computeProductions(lab.getType).map { p =>
+      ProductionRule(p.subTrees.map(Label(_)), p.builder, p.tag, p.cost)
+    }
+  }
+
+  def computeProductions(tpe: TypeTree)(implicit ctx: LeonContext): Seq[ProductionRule[TypeTree, Expr]]
+}
diff --git a/src/main/scala/leon/grammars/Union.scala b/src/main/scala/leon/grammars/Union.scala
new file mode 100644
index 000000000..8d5d5a42f
--- /dev/null
+++ b/src/main/scala/leon/grammars/Union.scala
@@ -0,0 +1,17 @@
+/* Copyright 2009-2015 EPFL, Lausanne */
+
+package leon
+package grammars
+
+import purescala.Expressions.Expr
+import purescala.Types.Typed
+
+case class Union(gs: Seq[ExpressionGrammar]) extends ExpressionGrammar {
+  val subGrammars: Seq[ExpressionGrammar] = gs.flatMap {
+    case u: Union => u.subGrammars
+    case g => Seq(g)
+  }
+
+  def computeProductions(label: Label)(implicit ctx: LeonContext): Seq[ProductionRule[Label, Expr]] =
+    subGrammars.flatMap(_.computeProductions(label))
+}
diff --git a/src/main/scala/leon/grammars/ValueGrammar.scala b/src/main/scala/leon/grammars/ValueGrammar.scala
index c97ed9a14..996debb85 100644
--- a/src/main/scala/leon/grammars/ValueGrammar.scala
+++ b/src/main/scala/leon/grammars/ValueGrammar.scala
@@ -7,7 +7,7 @@ import purescala.Types._
 import purescala.Expressions._
 
 /** A grammar of values (ground terms) */
-case object ValueGrammar extends ExpressionGrammar[TypeTree] {
+case object ValueGrammar extends SimpleExpressionGrammar {
   def computeProductions(t: TypeTree)(implicit ctx: LeonContext): Seq[Prod] = t match {
     case BooleanType =>
       List(
diff --git a/src/main/scala/leon/grammars/aspects/ExtraTerminals.scala b/src/main/scala/leon/grammars/aspects/ExtraTerminals.scala
new file mode 100644
index 000000000..f2d0f46d0
--- /dev/null
+++ b/src/main/scala/leon/grammars/aspects/ExtraTerminals.scala
@@ -0,0 +1,28 @@
+/* Copyright 2009-2015 EPFL, Lausanne */
+
+package leon
+package grammars
+package aspects
+
+import purescala.Expressions.Expr
+import purescala.ExprOps.formulaSize
+import purescala.TypeOps.isSubtypeOf
+
+/**
+ * Informs sub-productions that there are extra terminals available (used by
+ * grammars.ExtraTerminals).
+ */
+case class ExtraTerminals(s: Set[Expr]) extends PersistantAspect {
+  def asString(implicit ctx: LeonContext) = {
+    s.toList.map(_.asString).mkString("{", ",", "}")
+  }
+
+
+  override def applyTo(lab: Label, ps: Seq[ProductionRule[Label, Expr]])(implicit ctx: LeonContext) = {
+    super.applyTo(lab, ps) ++ {
+      s.filter(e => isSubtypeOf(e.getType, lab.getType)).map { e =>
+        ProductionRule[Label, Expr](Nil, { (es: Seq[Expr]) => e }, Tags.Top, formulaSize(e))
+      }
+    }
+  }
+}
diff --git a/src/main/scala/leon/grammars/aspects/PersistantAspect.scala b/src/main/scala/leon/grammars/aspects/PersistantAspect.scala
new file mode 100644
index 000000000..4d8670bd4
--- /dev/null
+++ b/src/main/scala/leon/grammars/aspects/PersistantAspect.scala
@@ -0,0 +1,28 @@
+/* Copyright 2009-2015 EPFL, Lausanne */
+
+package leon
+package grammars
+package aspects
+
+import purescala.Expressions.Expr
+
+/**
+ * Persistant aspects allow label information to be propagated down:
+ * Int{e} means (Int with a terminal 'e'). And thus, the Closure grammar
+ * is able to have, as production:
+ *   Int=>Int  :=  (e: Int) => Int{e}
+ * In turn, all Int productions, e.g. Int := Int + Int, gets transformed by the
+ * aspect and generate:
+ *   Int{e}  :=  Int{e} + Int{e}
+ *
+ * This with the ExtraTerminals grammar, enables the generation of expressions
+ * like:
+ *   e + 1
+ */
+abstract class PersistantAspect extends Aspect {
+  def applyTo(lab: Label, ps: Seq[ProductionRule[Label, Expr]])(implicit ctx: LeonContext) = {
+    ps.map { p =>
+      p.copy(subTrees = p.subTrees.map(lab => lab.withAspect(this)))
+    }
+  }
+}
diff --git a/src/main/scala/leon/grammars/aspects/SimilarTo.scala b/src/main/scala/leon/grammars/aspects/SimilarTo.scala
new file mode 100644
index 000000000..4685f9a3c
--- /dev/null
+++ b/src/main/scala/leon/grammars/aspects/SimilarTo.scala
@@ -0,0 +1,136 @@
+/* Copyright 2009-2015 EPFL, Lausanne */
+
+package leon
+package grammars
+package aspects
+
+import purescala.Expressions._
+import purescala.Types._
+import purescala.TypeOps._
+import purescala.Constructors._
+import purescala.Extractors._
+import utils.SeqUtils._
+
+/**
+ * Attach sizes to labels and transmit them down accordingly
+ */
+case class SimilarTo(e: Expr) extends Aspect {
+  type Prods = Seq[ProductionRule[Label, Expr]]
+
+  def asString(implicit ctx: LeonContext) = "~"+e.asString+"~"
+
+  def term(e: Expr, tag: Tags.Tag = Tags.Top, cost: Int = 1): ProductionRule[Label, Expr] = {
+    ProductionRule(Nil, { case Seq() => e }, tag, cost)
+  }
+
+  /**
+   * ~f(a,b)~  ::=  f(~a~, b)
+   *                f(a, ~b~)
+   *                f(b, a)   // if non-commut
+   */
+  def applyTo(lab: Label, ps: Seq[ProductionRule[Label, Expr]])(implicit ctx: LeonContext) = {
+    def isCommutative(e: Expr) = e match {
+      case _: Plus | _: Times => true
+      case _ => false
+    }
+
+    val similarProds: Prods = if (isSubtypeOf(e.getType, lab.getType)) {
+      val swaps: Prods = e match {
+        case Operator(as, b) if as.nonEmpty && !isCommutative(e) =>
+          val ast = as.zipWithIndex.groupBy(_._1.getType).mapValues(_.map(_._2).toList)
+
+          val perms = ast.values.map { is =>
+            is.permutations.toList.filter(p => p != is).map(ps => (is zip ps).toMap)
+          }.filter(_.nonEmpty).toList
+
+          //println("Perms:")
+          //for (ps <- perms) {
+          //  println(" - "+ps.mkString(", "))
+          //}
+
+          for (ps <- cartesianProduct(perms)) yield {
+            val perm = ps.foldLeft(Map[Int, Int]())( _ ++ _ )
+
+            //println("Trying permutation "+perm+": "+
+            //    b(as.indices.map { i =>
+            //      as(perm.getOrElse(i, i))
+            //    }))
+
+            term(b(as.indices.map { i => as(perm.getOrElse(i, i)) }))
+          }
+        case _ =>
+          Nil
+      }
+
+      val subs: Prods = e match {
+        case Operator(as, b) if as.size > 0 =>
+          for ((a, i) <- as.zipWithIndex) yield {
+            ProductionRule[Label, Expr](
+              List(Label(a.getType).withAspect(SimilarTo(a))),
+              { case Seq(e) =>
+                b(as.updated(i, e))
+              },
+              Tags.Top,
+              1
+            )
+          }
+        case _ =>
+          Nil
+      }
+
+      val typeVariations: Prods = e match {
+        case InfiniteIntegerLiteral(v) =>
+          List(
+            term(InfiniteIntegerLiteral(v + 1)),
+            term(InfiniteIntegerLiteral(v - 1))
+          )
+
+        case IntLiteral(v) =>
+          List(
+            term(IntLiteral(v + 1)),
+            term(IntLiteral(v - 1))
+          )
+
+        case BooleanLiteral(v) =>
+          List(
+            term(BooleanLiteral(!v))
+          )
+
+        case IsTyped(e, IntegerType) =>
+          List(
+            term(Plus(e, InfiniteIntegerLiteral(1))),
+            term(Minus(e, InfiniteIntegerLiteral(1)))
+          )
+        case IsTyped(e, Int32Type) =>
+          List(
+            term(BVPlus(e, IntLiteral(1))),
+            term(BVMinus(e, IntLiteral(1)))
+          )
+        case IsTyped(e, BooleanType) =>
+          List(
+            term(not(e))
+          )
+
+        case _ =>
+          Nil
+      }
+
+      val ccVariations: Prods = e match {
+        case CaseClass(cct, args) =>
+          val neighbors = cct.root.knownCCDescendants diff Seq(cct)
+
+          for (scct <- neighbors if scct.fieldsTypes == cct.fieldsTypes) yield {
+            term(CaseClass(scct, args))
+          }
+        case _ =>
+          Nil
+      }
+
+      swaps ++ subs ++ typeVariations ++ ccVariations
+    } else {
+      Nil
+    }
+
+    ps ++ similarProds
+  }
+}
diff --git a/src/main/scala/leon/grammars/aspects/Sized.scala b/src/main/scala/leon/grammars/aspects/Sized.scala
new file mode 100644
index 000000000..664852578
--- /dev/null
+++ b/src/main/scala/leon/grammars/aspects/Sized.scala
@@ -0,0 +1,45 @@
+/* Copyright 2009-2015 EPFL, Lausanne */
+
+package leon
+package grammars
+package aspects
+
+import purescala.Expressions.Expr
+import utils.SeqUtils._
+
+/**
+ * Attach sizes to labels and transmit them down accordingly
+ */
+case class Sized(size: Int) extends Aspect {
+  def asString(implicit ctx: LeonContext) = "|"+size+"|"
+
+  def applyTo(lab: Label, ps: Seq[ProductionRule[Label, Expr]])(implicit ctx: LeonContext) = {
+    val optimizeCommut = true
+
+    ps.flatMap { p =>
+      if (size <= 0) {
+        Nil
+      } else if (p.arity == 0) {
+        if (size == p.cost) {
+          List(p)
+        } else {
+          Nil
+        }
+      } else {
+        val sizes = if(optimizeCommut && Tags.isCommut(p.tag) && p.subTrees.toSet.size == 1) {
+          sumToOrdered(size - p.cost, p.arity)
+        } else {
+          sumTo(size - p.cost, p.arity)
+        }
+
+        for (ss <- sizes) yield {
+          val newSubTrees = (p.subTrees zip ss).map {
+            case (lab, s) => lab.withAspect(Sized(s))
+          }
+
+          ProductionRule(newSubTrees, p.builder, p.tag, p.cost)
+        }
+      }
+    }
+  }
+}
diff --git a/src/main/scala/leon/grammars/aspects/Tagged.scala b/src/main/scala/leon/grammars/aspects/Tagged.scala
new file mode 100644
index 000000000..be097ac12
--- /dev/null
+++ b/src/main/scala/leon/grammars/aspects/Tagged.scala
@@ -0,0 +1,93 @@
+/* Copyright 2009-2015 EPFL, Lausanne */
+
+package leon
+package grammars
+package aspects
+
+import purescala.Expressions.Expr
+import utils.SeqUtils._
+import Tags._
+
+case class Tagged(tag: Tag, pos: Int, isConst: Option[Boolean]) extends Aspect {
+  private val cString = isConst match {
+    case Some(true) => "↓"
+    case Some(false) => "↑"
+    case None => "â—‹"
+  }
+
+  /** [[isConst]] is printed as follows: ↓ for constants only, ↑ for nonconstants only,
+    * â—‹ for anything allowed.
+    */
+  override def asString(implicit ctx: LeonContext): String = s"$tag@$pos$cString"
+
+
+  override def applyTo(lab: Label, ps: Seq[ProductionRule[Label, Expr]])(implicit ctx: LeonContext) = {
+
+    // Tags to avoid depending on parent aspect
+    val excludedTags: Set[Tag] = (tag, pos) match {
+      case (Top,   _)             => Set()
+      case (And,   0)             => Set(And, BooleanC)
+      case (And,   1)             => Set(BooleanC)
+      case (Or,    0)             => Set(Or, BooleanC)
+      case (Or,    1)             => Set(BooleanC)
+      case (Plus,  0)             => Set(Plus, Zero, One)
+      case (Plus,  1)             => Set(Zero)
+      case (Minus, 1)             => Set(Zero)
+      case (Not,   _)             => Set(Not, BooleanC)
+      case (Times, 0)             => Set(Times, Zero, One)
+      case (Times, 1)             => Set(Zero, One)
+      case (Equals,_)             => Set(Not, BooleanC)
+      case (Div | Mod, 0 | 1)     => Set(Zero, One)
+      case (FunCall(true, _), 0)  => Set(Constructor(true)) // Don't allow Nil().size etc.
+      case _                      => Set()
+    }
+
+    def powerSet[A](t: Set[A]): Set[Set[A]] = {
+      @scala.annotation.tailrec
+      def pwr(t: Set[A], ps: Set[Set[A]]): Set[Set[A]] =
+        if (t.isEmpty) ps
+        else pwr(t.tail, ps ++ (ps map (_ + t.head)))
+
+      pwr(t, Set(Set.empty[A]))
+    }
+
+
+    ps.flatMap { p =>
+      val tagsValid = !(excludedTags contains p.tag)
+
+      // If const-ness is explicit, make sure the production has similar const-ness
+      val constValid = isConst match {
+        case Some(b) => Tags.isConst(p.tag) == b
+        case None    => true
+      }
+
+      if (constValid && tagsValid) {
+        val subAspects = if (p.isTerminal || Tags.allConstArgsAllowed(p.tag)) {
+          Seq(p.subTrees.indices.map { i =>
+            Tagged(p.tag, i, None)
+          })
+        } else {
+          // All positions are either forced to be const or forced to be
+          // non-const. We don't want all consts though.
+          val indices = p.subTrees.indices.toSet
+
+          (powerSet(indices) - indices) map { isConst =>
+            p.subTrees.indices.map { i =>
+              Tagged(p.tag, i, Some(isConst(i)))
+            }
+          }
+        }
+
+        for (as <- subAspects) yield {
+          val newSubTrees = (p.subTrees zip as).map { case (lab, a) =>
+            lab.withAspect(a)
+          }
+
+          ProductionRule(newSubTrees, p.builder, p.tag, p.cost)
+        }
+      } else {
+        Nil
+      }
+    }
+  }
+}
diff --git a/src/main/scala/leon/grammars/transformers/DepthBoundedGrammar.scala b/src/main/scala/leon/grammars/transformers/DepthBoundedGrammar.scala
deleted file mode 100644
index 10a7f314c..000000000
--- a/src/main/scala/leon/grammars/transformers/DepthBoundedGrammar.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/* Copyright 2009-2016 EPFL, Lausanne */
-
-package leon
-package grammars
-package transformers
-
-/** Limits a grammar to a specific expression depth */
-case class DepthBoundedGrammar[L](g: ExpressionGrammar[NonTerminal[L]], bound: Int) extends ExpressionGrammar[NonTerminal[L]] {
-  def computeProductions(l: NonTerminal[L])(implicit ctx: LeonContext): Seq[Prod] = g.computeProductions(l).flatMap {
-    case gen =>
-      if (l.depth == Some(bound) && gen.isNonTerminal) {
-        Nil
-      } else if (l.depth.exists(_ > bound)) {
-        Nil
-      } else {
-        List (
-          gen.copy(subTrees = gen.subTrees.map(sl => sl.copy(depth = l.depth.map(_+1).orElse(Some(1)))))
-        )
-      }
-  }
-}
diff --git a/src/main/scala/leon/grammars/transformers/EmbeddedGrammar.scala b/src/main/scala/leon/grammars/transformers/EmbeddedGrammar.scala
deleted file mode 100644
index cb5bff3f5..000000000
--- a/src/main/scala/leon/grammars/transformers/EmbeddedGrammar.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/* Copyright 2009-2016 EPFL, Lausanne */
-
-package leon
-package grammars
-package transformers
-
-import leon.purescala.Types.Typed
-
-/**
- * Embed a grammar Li->Expr within a grammar Lo->Expr
- * 
- * We rely on a bijection between Li and Lo labels
- */
-case class EmbeddedGrammar[Ti <: Typed, To <: Typed](innerGrammar: ExpressionGrammar[Ti], iToo: Ti => To, oToi: To => Ti) extends ExpressionGrammar[To] {
-  def computeProductions(t: To)(implicit ctx: LeonContext): Seq[Prod] = {
-    innerGrammar.computeProductions(oToi(t)).map { innerGen =>
-      innerGen.copy(subTrees = innerGen.subTrees.map(iToo))
-    }
-  }
-}
diff --git a/src/main/scala/leon/grammars/transformers/SizeBoundedGrammar.scala b/src/main/scala/leon/grammars/transformers/SizeBoundedGrammar.scala
deleted file mode 100644
index b506ac82b..000000000
--- a/src/main/scala/leon/grammars/transformers/SizeBoundedGrammar.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-/* Copyright 2009-2016 EPFL, Lausanne */
-
-package leon
-package grammars
-package transformers
-
-import purescala.Types.Typed
-import utils.SeqUtils._
-
-/** Adds information about size to a nonterminal symbol */
-case class SizedNonTerm[T <: Typed](underlying: T, size: Int) extends Typed {
-  val getType = underlying.getType
-
-  override def asString(implicit ctx: LeonContext) = underlying.asString+"|"+size+"|"
-}
-
-/** Limits a grammar by producing expressions of size bounded by the [[SizedNonTerm.size]] of a given [[SizedNonTerm]].
-  *
-  * In case of commutative operations, the grammar will produce trees skewed to the right
-  * (i.e. the right subtree will always be larger). Notice we do not lose generality in case of
-  * commutative operations.
-  */
-case class SizeBoundedGrammar[T <: Typed](g: ExpressionGrammar[T], optimizeCommut: Boolean) extends ExpressionGrammar[SizedNonTerm[T]] {
-  def computeProductions(sl: SizedNonTerm[T])(implicit ctx: LeonContext): Seq[Prod] = {
-
-
-    if (sl.size <= 0) {
-      Nil
-    } else {
-      val (terminals, nonTerminals) = g.getProductions(sl.underlying).partition(_.isTerminal)
-
-      val termProds = terminals.filter(_.cost == sl.size).map { gen =>
-        terminal(gen.builder(Seq()), gen.tag, gen.cost)
-      }
-
-      val recProds = nonTerminals.filter(_.cost < sl.size).flatMap { gen =>
-        // Ad-hoc equality that does not take into account position etc.of TaggedNonTerminal's
-        // TODO: Ugly and hacky
-        def characteristic(t: T): Typed = t match {
-          case TaggedNonTerm(underlying, _, _, _) =>
-            underlying
-          case other =>
-            other
-        }
-
-        // Optimization: When we have a commutative operation and all the labels are the same,
-        // we can skew the expression to always be right-heavy
-        val sizes = if(optimizeCommut && Tags.isCommut(gen.tag) && gen.subTrees.map(characteristic).toSet.size == 1) {
-          sumToOrdered(sl.size-gen.cost, gen.arity)
-        } else {
-          sumTo(sl.size-gen.cost, gen.arity)
-        }
-
-        for (ss <- sizes) yield {
-          val subSizedLabels = (gen.subTrees zip ss) map (s => SizedNonTerm(s._1, s._2))
-
-          gen.copy(subTrees = subSizedLabels)
-        }
-      }
-
-      termProds ++ recProds
-    }
-  }
-}
diff --git a/src/main/scala/leon/grammars/transformers/TaggedGrammar.scala b/src/main/scala/leon/grammars/transformers/TaggedGrammar.scala
deleted file mode 100644
index 2d0d73fd3..000000000
--- a/src/main/scala/leon/grammars/transformers/TaggedGrammar.scala
+++ /dev/null
@@ -1,109 +0,0 @@
-/* Copyright 2009-2016 EPFL, Lausanne */
-
-package leon
-package grammars
-package transformers
-
-import leon.purescala.Types.Typed
-import Tags._
-
-/** Adds to a nonterminal information about about the tag of its parent's [[leon.grammars.ProductionRule.tag]]
-  * and additional information.
-  * 
-  * @param underlying The underlying nonterminal
-  * @param tag The tag of the parent of this nonterminal
-  * @param pos The index of this nonterminal in its father's production rule
-  * @param isConst Whether this nonterminal is obliged to generate/not generate constants.
-  *
-  */
-case class TaggedNonTerm[T <: Typed](underlying: T, tag: Tag, pos: Int, isConst: Option[Boolean]) extends Typed {
-  val getType = underlying.getType
-
-  private val cString = isConst match {
-    case Some(true) => "↓"
-    case Some(false) => "↑"
-    case None => "â—‹"
-  }
-
-  /** [[isConst]] is printed as follows: ↓ for constants only, ↑ for nonconstants only,
-    * â—‹ for anything allowed.
-    */
-  override def asString(implicit ctx: LeonContext): String = s"$underlying%$tag@$pos$cString"
-}
-
-/** Constraints a grammar to reduce redundancy by utilizing information provided by the [[TaggedNonTerm]].
-  *
-  * 1) In case of associative operations, right associativity is enforced.
-  * 2) Does not generate
-  *    - neutral and absorbing elements (incl. boolean equality)
-  *    - nested negations
-  * 3) Excludes method calls on nullary case objects, e.g. Nil().size
-  * 4) Enforces that no constant trees are generated (and recursively for each subtree)
-  *
-  * @param g The underlying untagged grammar
-  */
-case class TaggedGrammar[T <: Typed](g: ExpressionGrammar[T]) extends ExpressionGrammar[TaggedNonTerm[T]] {
-
-  private def exclude(tag: Tag, pos: Int): Set[Tag] = (tag, pos) match {
-    case (Top,   _) => Set()
-    case (And,   0) => Set(And, BooleanC)
-    case (And,   1) => Set(BooleanC)
-    case (Or,    0) => Set(Or, BooleanC)
-    case (Or,    1) => Set(BooleanC)
-    case (Plus,  0) => Set(Plus, Zero, One)
-    case (Plus,  1) => Set(Zero)
-    case (Minus, 1) => Set(Zero)
-    case (Not,   _) => Set(Not, BooleanC)
-    case (Times, 0) => Set(Times, Zero, One)
-    case (Times, 1) => Set(Zero, One)
-    case (Equals,_) => Set(Not, BooleanC)
-    case (Div | Mod, 0 | 1) => Set(Zero, One)
-    case (FunCall(true, _), 0) => Set(Constructor(true)) // Don't allow Nil().size etc.
-    case _ => Set()
-  }
-
-  def computeProductions(t: TaggedNonTerm[T])(implicit ctx: LeonContext): Seq[Prod] = {
-
-    // Point (4) for this level
-    val constFilter: g.Prod => Boolean = t.isConst match {
-      case Some(b) =>
-        innerGen => isConst(innerGen.tag) == b
-      case None =>
-        _ => true
-    }
-
-    g.computeProductions(t.underlying)
-      // Include only constants iff constants are forced, only non-constants iff they are forced
-      .filter(constFilter)
-      // Points (1), (2). (3)
-      .filterNot { innerGen => exclude(t.tag, t.pos)(innerGen.tag) }
-      .flatMap   { innerGen =>
-
-        def nt(isConst: Int => Option[Boolean]) = innerGen.copy(
-          subTrees = innerGen.subTrees.zipWithIndex.map {
-            case (t, pos) => TaggedNonTerm(t, innerGen.tag, pos, isConst(pos))
-          }
-        )
-
-        def powerSet[A](t: Set[A]): Set[Set[A]] = {
-          @scala.annotation.tailrec
-          def pwr(t: Set[A], ps: Set[Set[A]]): Set[Set[A]] =
-            if (t.isEmpty) ps
-            else pwr(t.tail, ps ++ (ps map (_ + t.head)))
-
-          pwr(t, Set(Set.empty[A]))
-        }
-
-        // Allow constants everywhere if this is allowed, otherwise demand at least 1 variable.
-        // Aka. tag subTrees correctly so point (4) is enforced in the lower level
-        // (also, make sure we treat terminals correctly).
-        if (innerGen.isTerminal || allConstArgsAllowed(innerGen.tag)) {
-          Seq(nt(_ => None))
-        } else {
-          val indices = innerGen.subTrees.indices.toSet
-          (powerSet(indices) - indices) map (indices => nt(x => Some(indices(x))))
-        }
-      }
-  }
-
-}
diff --git a/src/main/scala/leon/grammars/transformers/Union.scala b/src/main/scala/leon/grammars/transformers/Union.scala
deleted file mode 100644
index ab61004fa..000000000
--- a/src/main/scala/leon/grammars/transformers/Union.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-/* Copyright 2009-2016 EPFL, Lausanne */
-
-package leon
-package grammars
-package transformers
-
-import purescala.Types.Typed
-
-case class Union[T <: Typed](gs: Seq[ExpressionGrammar[T]]) extends ExpressionGrammar[T] {
-  val subGrammars: Seq[ExpressionGrammar[T]] = gs.flatMap {
-    case u: Union[T] => u.subGrammars
-    case g => Seq(g)
-  }
-
-  def computeProductions(t: T)(implicit ctx: LeonContext): Seq[Prod] =
-    subGrammars.flatMap(_.getProductions(t))
-}
diff --git a/src/main/scala/leon/synthesis/disambiguation/QuestionBuilder.scala b/src/main/scala/leon/synthesis/disambiguation/QuestionBuilder.scala
index ccbdd8f15..8e3947da0 100644
--- a/src/main/scala/leon/synthesis/disambiguation/QuestionBuilder.scala
+++ b/src/main/scala/leon/synthesis/disambiguation/QuestionBuilder.scala
@@ -69,7 +69,7 @@ object QuestionBuilder {
   }
   
   /** Specific enumeration of strings, which can be used with the QuestionBuilder#setValueEnumerator method */
-  object SpecialStringValueGrammar extends ExpressionGrammar[TypeTree] {
+  object SpecialStringValueGrammar extends SimpleExpressionGrammar {
     def computeProductions(t: TypeTree)(implicit ctx: LeonContext): Seq[Prod] = t match {
       case StringType =>
         List(
@@ -109,7 +109,7 @@ class QuestionBuilder[T <: Expr](
   private var solutionsToTake = 30
   private var expressionsToTake = 30
   private var keepEmptyAlternativeQuestions: T => Boolean = Set()
-  private var value_enumerator: ExpressionGrammar[TypeTree] = ValueGrammar
+  private var value_enumerator: ExpressionGrammar = ValueGrammar
 
   /** Sets the way to sort questions. See [[QuestionSortingType]] */
   def setSortQuestionBy(questionSorMethod: QuestionSortingType) = { _questionSorMethod = questionSorMethod; this }
@@ -124,7 +124,7 @@ class QuestionBuilder[T <: Expr](
   /** Sets if when there is no alternative, the question should be kept. */
   def setKeepEmptyAlternativeQuestions(b: T => Boolean) = {keepEmptyAlternativeQuestions = b; this }
   /** Sets the way to enumerate expressions */
-  def setValueEnumerator(v: ExpressionGrammar[TypeTree]) = value_enumerator = v
+  def setValueEnumerator(v: ExpressionGrammar) = value_enumerator = v
   
   private def run(s: Solution, elems: Seq[(Identifier, Expr)]): Option[Expr] = {
     val newProgram = DefOps.addFunDefs(p, s.defs, p.definedFunctions.head)
diff --git a/src/main/scala/leon/synthesis/rules/CEGIS.scala b/src/main/scala/leon/synthesis/rules/CEGIS.scala
index aad3d9cc3..2ec177fc9 100644
--- a/src/main/scala/leon/synthesis/rules/CEGIS.scala
+++ b/src/main/scala/leon/synthesis/rules/CEGIS.scala
@@ -5,15 +5,18 @@ package synthesis
 package rules
 
 import grammars._
-import grammars.transformers._
+import grammars.Tags
+import grammars.aspects._
 import purescala.Types.TypeTree
 
 /** Basic implementation of CEGIS that uses a naive grammar */
-case object NaiveCEGIS extends CEGISLike[TypeTree]("Naive CEGIS") {
+case object NaiveCEGIS extends CEGISLike("Naive CEGIS") {
   def getParams(sctx: SynthesisContext, p: Problem) = {
     CegisParams(
+      //grammar = Grammars.typeDepthBound(Grammars.default(sctx, p), 2), // This limits type depth
+      //rootLabel = {(tpe: TypeTree) => tpe },
       grammar = Grammars.default(sctx, p), // This limits type depth
-      rootLabel = {(tpe: TypeTree) => tpe },
+      rootLabel = Label(_),
       optimizations = false
     )
   }
@@ -22,12 +25,13 @@ case object NaiveCEGIS extends CEGISLike[TypeTree]("Naive CEGIS") {
 /** More advanced implementation of CEGIS that uses a less permissive grammar
   * and some optimizations
   */
-case object CEGIS extends CEGISLike[TaggedNonTerm[TypeTree]]("CEGIS") {
+case object CEGIS extends CEGISLike("CEGIS") {
   def getParams(sctx: SynthesisContext, p: Problem) = {
-    val base = NaiveCEGIS.getParams(sctx,p).grammar
     CegisParams(
-      grammar = TaggedGrammar(base),
-      rootLabel = TaggedNonTerm(_, Tags.Top, 0, None),
+      //grammar = TaggedGrammar(base),
+      //rootLabel = TaggedNonTerm(_, Tags.Top, 0, None),
+      grammar = NaiveCEGIS.getParams(sctx,p).grammar,
+      rootLabel = Label(_).withAspect(Tagged(Tags.Top, 0, None)),
       optimizations = true
     )
   }
diff --git a/src/main/scala/leon/synthesis/rules/CEGISLike.scala b/src/main/scala/leon/synthesis/rules/CEGISLike.scala
index 09a22476a..7ac1efc04 100644
--- a/src/main/scala/leon/synthesis/rules/CEGISLike.scala
+++ b/src/main/scala/leon/synthesis/rules/CEGISLike.scala
@@ -15,7 +15,7 @@ import purescala.TypeOps.typeDepth
 
 import solvers._
 import grammars._
-import grammars.transformers._
+import grammars.aspects._
 import leon.utils._
 
 import evaluators._
@@ -24,11 +24,11 @@ import codegen.CodeGenParams
 
 import scala.collection.mutable.{HashMap => MutableMap}
 
-abstract class CEGISLike[T <: Typed](name: String) extends Rule(name) {
+abstract class CEGISLike(name: String) extends Rule(name) {
 
   case class CegisParams(
-    grammar: ExpressionGrammar[T],
-    rootLabel: TypeTree => T,
+    grammar: ExpressionGrammar,
+    rootLabel: TypeTree => Label,
     optimizations: Boolean,
     maxSize: Option[Int] = None
   )
@@ -73,12 +73,10 @@ abstract class CEGISLike[T <: Typed](name: String) extends Rule(name) {
 
       private val targetType = tupleTypeWrap(p.xs.map(_.getType))
 
-      val grammar = SizeBoundedGrammar(
-        Grammars.typeDepthBound(params.grammar, typeDepth(targetType)),
-        params.optimizations
-      )
+      val grammar = params.grammar
 
-      def rootLabel = SizedNonTerm(params.rootLabel(targetType), termSize)
+      //def rootLabel = SizedNonTerm(params.rootLabel(tupleTypeWrap(p.xs.map(_.getType))), termSize)
+      def rootLabel = params.rootLabel(targetType).withAspect(Sized(termSize))
 
       def init(): Unit = {
         updateCTree()
@@ -113,19 +111,19 @@ abstract class CEGISLike[T <: Typed](name: String) extends Rule(name) {
 
       // Generator of fresh cs that minimizes labels
       class CGenerator {
-        private var buffers = Map[SizedNonTerm[T], Stream[Identifier]]()
+        private var buffers = Map[Label, Stream[Identifier]]()
 
-        private var slots = Map[SizedNonTerm[T], Int]().withDefaultValue(0)
+        private var slots = Map[Label, Int]().withDefaultValue(0)
 
-        private def streamOf(t: SizedNonTerm[T]): Stream[Identifier] = Stream.continually(
+        private def streamOf(t: Label): Stream[Identifier] = Stream.continually(
           FreshIdentifier(t.asString, t.getType, true)
         )
 
         def rewind(): Unit = {
-          slots = Map[SizedNonTerm[T], Int]().withDefaultValue(0)
+          slots = Map[Label, Int]().withDefaultValue(0)
         }
 
-        def getNext(t: SizedNonTerm[T]) = {
+        def getNext(t: Label) = {
           if (!(buffers contains t)) {
             buffers += t -> streamOf(t)
           }
@@ -151,7 +149,7 @@ abstract class CEGISLike[T <: Typed](name: String) extends Rule(name) {
           id
         }
 
-        def defineCTreeFor(l: SizedNonTerm[T], c: Identifier): Unit = {
+        def defineCTreeFor(l: Label, c: Identifier): Unit = {
           if (!(cTree contains c)) {
             val cGen = new CGenerator()
 
@@ -203,9 +201,9 @@ abstract class CEGISLike[T <: Typed](name: String) extends Rule(name) {
 
       // Returns a count of all possible programs
       val allProgramsCount: () => Int = {
-        var nAltsCache = Map[SizedNonTerm[T], Int]()
+        var nAltsCache = Map[Label, Int]()
 
-        def countAlternatives(l: SizedNonTerm[T]): Int = {
+        def countAlternatives(l: Label): Int = {
           if (!(nAltsCache contains l)) {
             val count = grammar.getProductions(l).map { gen =>
               gen.subTrees.map(countAlternatives).product
diff --git a/src/main/scala/leon/synthesis/rules/CEGLESS.scala b/src/main/scala/leon/synthesis/rules/CEGLESS.scala
index d59e99011..e29c6d2bc 100644
--- a/src/main/scala/leon/synthesis/rules/CEGLESS.scala
+++ b/src/main/scala/leon/synthesis/rules/CEGLESS.scala
@@ -4,14 +4,13 @@ package leon
 package synthesis
 package rules
 
-import leon.grammars.transformers.Union
 import purescala.ExprOps._
 import purescala.Types._
 import purescala.Extractors._
 import grammars._
 import Witnesses._
 
-case object CEGLESS extends CEGISLike[NonTerminal[String]]("CEGLESS") {
+case object CEGLESS extends CEGISLike("CEGLESS") {
   def getParams(sctx: SynthesisContext, p: Problem) = {
     val TopLevelAnds(clauses) = p.ws
 
@@ -30,7 +29,8 @@ case object CEGLESS extends CEGISLike[NonTerminal[String]]("CEGLESS") {
 
     CegisParams(
       grammar = guidedGrammar,
-      rootLabel = { (tpe: TypeTree) => NonTerminal(tpe, "G0") },
+      //rootLabel = { (tpe: TypeTree) => NonTerminal(tpe, "G0") },
+      rootLabel = { (tpe: TypeTree) => Label(tpe) },
       optimizations = false,
       maxSize = Some((0 +: guides.map(depth(_) + 1)).max)
     )
diff --git a/src/main/scala/leon/synthesis/rules/unused/BottomUpTegis.scala b/src/main/scala/leon/synthesis/rules/unused/BottomUpTegis.scala
index 8f82395a5..9f5dff95e 100644
--- a/src/main/scala/leon/synthesis/rules/unused/BottomUpTegis.scala
+++ b/src/main/scala/leon/synthesis/rules/unused/BottomUpTegis.scala
@@ -15,18 +15,18 @@ import grammars._
 
 import bonsai.enumerators._
 
-case object BottomUpTEGIS extends BottomUpTEGISLike[TypeTree]("BU TEGIS") {
+case object BottomUpTEGIS extends BottomUpTEGISLike("BU TEGIS") {
   def getGrammar(sctx: SynthesisContext, p: Problem) = {
     Grammars.default(sctx, p)
   }
 
-  def getRootLabel(tpe: TypeTree): TypeTree = tpe
+  def getRootLabel(tpe: TypeTree): Label = Label(tpe)
 }
 
-abstract class BottomUpTEGISLike[T <: Typed](name: String) extends Rule(name) {
-  def getGrammar(sctx: SynthesisContext, p: Problem): ExpressionGrammar[T]
+abstract class BottomUpTEGISLike(name: String) extends Rule(name) {
+  def getGrammar(sctx: SynthesisContext, p: Problem): ExpressionGrammar
 
-  def getRootLabel(tpe: TypeTree): T
+  def getRootLabel(tpe: TypeTree): Label
 
   def instantiateOn(implicit hctx: SearchContext, p: Problem): Traversable[RuleInstantiation] = {
 
@@ -47,13 +47,13 @@ abstract class BottomUpTEGISLike[T <: Typed](name: String) extends Rule(name) {
 
           val nTests = tests.size
 
-          var compiled = Map[ProductionRule[T, Expr], Vector[Vector[Expr]] => Option[Vector[Expr]]]()
+          var compiled = Map[ProductionRule[Label, Expr], Vector[Vector[Expr]] => Option[Vector[Expr]]]()
 
           /**
            * Compile Generators to functions from Expr to Expr. The compiled
            * generators will be passed to the enumerator
            */
-          def compile(gen: ProductionRule[T, Expr]): Vector[Vector[Expr]] => Option[Vector[Expr]] = {
+          def compile(gen: ProductionRule[Label, Expr]): Vector[Vector[Expr]] => Option[Vector[Expr]] = {
             compiled.getOrElse(gen, {
               val executor = if (gen.subTrees.isEmpty) {
 
@@ -104,7 +104,7 @@ abstract class BottomUpTEGISLike[T <: Typed](name: String) extends Rule(name) {
           val targetType   = tupleTypeWrap(p.xs.map(_.getType))
           val wrappedTests = tests.map { case (is, os) => (is, tupleWrap(os))}
 
-          val enum = new BottomUpEnumerator[T, Expr, Expr, ProductionRule[T, Expr]](
+          val enum = new BottomUpEnumerator[Label, Expr, Expr, ProductionRule[Label, Expr]](
             grammar.getProductions(_)(hctx),
             wrappedTests,
             { (vecs, gen) =>
diff --git a/src/main/scala/leon/synthesis/rules/unused/TEGIS.scala b/src/main/scala/leon/synthesis/rules/unused/TEGIS.scala
index 7b08304dc..886d57719 100644
--- a/src/main/scala/leon/synthesis/rules/unused/TEGIS.scala
+++ b/src/main/scala/leon/synthesis/rules/unused/TEGIS.scala
@@ -7,11 +7,11 @@ package rules.unused
 import purescala.Types._
 import grammars._
 
-case object TEGIS extends TEGISLike[TypeTree]("TEGIS") {
+case object TEGIS extends TEGISLike("TEGIS") {
   def getParams(sctx: SynthesisContext, p: Problem) = {
     TegisParams(
       grammar = Grammars.default(sctx, p),
-      rootLabel = {(tpe: TypeTree) => tpe }
+      rootLabel = Label(_)
     )
   }
 }
diff --git a/src/main/scala/leon/synthesis/rules/unused/TEGISLike.scala b/src/main/scala/leon/synthesis/rules/unused/TEGISLike.scala
index 0f0a76a96..16eb701f9 100644
--- a/src/main/scala/leon/synthesis/rules/unused/TEGISLike.scala
+++ b/src/main/scala/leon/synthesis/rules/unused/TEGISLike.scala
@@ -18,10 +18,10 @@ import scala.collection.mutable.{HashMap => MutableMap}
 
 import bonsai.enumerators._
 
-abstract class TEGISLike[T <: Typed](name: String) extends Rule(name) {
+abstract class TEGISLike(name: String) extends Rule(name) {
   case class TegisParams(
-    grammar: ExpressionGrammar[T],
-    rootLabel: TypeTree => T,
+    grammar: ExpressionGrammar,
+    rootLabel: TypeTree => Label,
     enumLimit: Int = 10000,
     reorderInterval: Int = 50
   )
@@ -65,7 +65,7 @@ abstract class TEGISLike[T <: Typed](name: String) extends Rule(name) {
           val evalParams = CodeGenParams.default.copy(maxFunctionInvocations = 2000)
           val evaluator  = new DualEvaluator(hctx, hctx.program, evalParams)
 
-          val enum = new MemoizedEnumerator[T, Expr, ProductionRule[T, Expr]](grammar.getProductions)
+          val enum = new MemoizedEnumerator[Label, Expr, ProductionRule[Label, Expr]](grammar.getProductions)
 
           val targetType = tupleTypeWrap(p.xs.map(_.getType))
 
diff --git a/src/main/scala/leon/synthesis/rules/unused/TEGLESS.scala b/src/main/scala/leon/synthesis/rules/unused/TEGLESS.scala
index a58a0b138..924e9567b 100644
--- a/src/main/scala/leon/synthesis/rules/unused/TEGLESS.scala
+++ b/src/main/scala/leon/synthesis/rules/unused/TEGLESS.scala
@@ -9,7 +9,7 @@ import purescala.Extractors._
 import Witnesses._
 import grammars._
 
-case object TEGLESS extends TEGISLike[NonTerminal[String]]("TEGLESS") {
+case object TEGLESS extends TEGISLike("TEGLESS") {
   def getParams(sctx: SynthesisContext, p: Problem) = {
 
     val TopLevelAnds(clauses) = p.ws
@@ -25,11 +25,12 @@ case object TEGLESS extends TEGISLike[NonTerminal[String]]("TEGLESS") {
       }
     }
 
-    val guidedGrammar = guides.map(SimilarTo(_, sctx, p)).foldLeft[ExpressionGrammar[NonTerminal[String]]](Empty())(_ || _)
+    val guidedGrammar = guides.map(SimilarTo(_, sctx, p)).foldLeft[ExpressionGrammar](Empty())(_ || _)
 
     TegisParams(
       grammar = guidedGrammar,
-      rootLabel = { (tpe: TypeTree) => NonTerminal(tpe, "G0") }
+      //rootLabel = { (tpe: TypeTree) => NonTerminal(tpe, "G0") }
+      rootLabel = { (tpe: TypeTree) => Label(tpe) }
     )
   }
 }
diff --git a/src/test/scala/leon/integration/grammars/GrammarsSuite.scala b/src/test/scala/leon/integration/grammars/GrammarsSuite.scala
new file mode 100644
index 000000000..71fc0cb55
--- /dev/null
+++ b/src/test/scala/leon/integration/grammars/GrammarsSuite.scala
@@ -0,0 +1,243 @@
+/* Copyright 2009-2016 EPFL, Lausanne */
+
+package leon.integration.grammars
+
+import leon._
+import leon.test._
+import leon.test.helpers._
+import leon.purescala.Common._
+import leon.purescala.Definitions._
+import leon.purescala.Constructors._
+import leon.purescala.Expressions._
+import leon.purescala.Types._
+import leon.codegen._
+import synthesis._
+import bonsai.enumerators._
+import grammars._
+import grammars.aspects.SimilarTo
+
+class SimilarToSuite extends LeonTestSuiteWithProgram with ExpressionsDSL {
+
+  val sources = List(
+    """|import leon.lang._
+       |import leon.annotation._
+       |import leon.collection._
+       |import leon._
+       |
+       |object Trees {
+       |  abstract class Expr
+       |  case class Plus(lhs: Expr, rhs: Expr) extends Expr
+       |  case class Minus(lhs: Expr, rhs: Expr) extends Expr
+       |  case class LessThan(lhs: Expr, rhs: Expr) extends Expr
+       |  case class And(lhs: Expr, rhs: Expr) extends Expr
+       |  case class Or(lhs: Expr, rhs: Expr) extends Expr
+       |  case class Not(e : Expr) extends Expr
+       |  case class Eq(lhs: Expr, rhs: Expr) extends Expr
+       |  case class Ite(cond: Expr, thn: Expr, els: Expr) extends Expr
+       |  case class IntLiteral(v: BigInt) extends Expr
+       |  case class BoolLiteral(b : Boolean) extends Expr
+       |}
+       |object STrees {
+       |  abstract class Expr
+       |  case class Plus(lhs : Expr, rhs : Expr) extends Expr
+       |  case class Neg(arg : Expr) extends Expr
+       |  case class Ite(cond : Expr, thn : Expr, els : Expr) extends Expr
+       |  case class Eq(lhs : Expr, rhs : Expr) extends Expr
+       |  case class LessThan(lhs : Expr, rhs : Expr) extends Expr
+       |  case class Literal(i : BigInt) extends Expr
+       |}
+       |
+       |object Desugar {
+       |  def desugar(e: Trees.Expr): STrees.Expr = {
+       |    STrees.Literal(0)
+       |  }
+       |} """.stripMargin,
+
+    """|import leon.lang._
+       |import leon.collection._
+       |
+       |object Heaps {
+       |
+       |  sealed abstract class Heap {
+       |    val rank : BigInt = this match {
+       |      case Leaf() => 0
+       |      case Node(_, l, r) => 
+       |        1 + max(l.rank, r.rank)
+       |    }
+       |    def content : Set[BigInt] = this match {
+       |      case Leaf() => Set[BigInt]()
+       |      case Node(v,l,r) => l.content ++ Set(v) ++ r.content
+       |    }
+       |  }
+       |  case class Leaf() extends Heap
+       |  case class Node(value:BigInt, left: Heap, right: Heap) extends Heap
+       |
+       |  def max(i1 : BigInt, i2 : BigInt) = if (i1 >= i2) i1 else i2
+       |
+       |  def hasHeapProperty(h : Heap) : Boolean = h match {
+       |    case Leaf() => true
+       |    case Node(v, l, r) => 
+       |      ( l match {
+       |        case Leaf() => true
+       |        case n@Node(v2,_,_) => v >= v2 && hasHeapProperty(n)
+       |      }) && 
+       |      ( r match {
+       |        case Leaf() => true
+       |        case n@Node(v2,_,_) => v >= v2 && hasHeapProperty(n)
+       |      })
+       |  }
+       |
+       |  def hasLeftistProperty(h: Heap) : Boolean = h match {
+       |    case Leaf() => true
+       |    case Node(_,l,r) => 
+       |      hasLeftistProperty(l) && 
+       |      hasLeftistProperty(r) && 
+       |      l.rank >= r.rank 
+       |  }
+       |
+       |  def heapSize(t: Heap): BigInt = { t match {
+       |    case Leaf() => BigInt(0)
+       |    case Node(v, l, r) => heapSize(l) + 1 + heapSize(r)
+       |  }} ensuring(_ >= 0)
+       |
+       |  private def merge(h1: Heap, h2: Heap) : Heap = {
+       |    require(
+       |      hasLeftistProperty(h1) && hasLeftistProperty(h2) && 
+       |      hasHeapProperty(h1) && hasHeapProperty(h2)
+       |    )
+       |    (h1,h2) match {
+       |      case (Leaf(), _) => h2
+       |      case (_, Leaf()) => h1
+       |      case (Node(v1, l1, r1), Node(v2, l2, r2)) =>
+       |        //if(v1 >= v2) // FIXME
+       |          makeN(v1, l1, merge(r1, h2))
+       |        //else
+       |        //  makeN(v2, l2, merge(h1, r2))
+       |    }
+       |  } ensuring { res => 
+       |    hasLeftistProperty(res) && hasHeapProperty(res) &&
+       |    heapSize(h1) + heapSize(h2) == heapSize(res) &&
+       |    h1.content ++ h2.content == res.content 
+       |  }
+       |
+       |  private def makeN(value: BigInt, left: Heap, right: Heap) : Heap = {
+       |    require(
+       |      hasLeftistProperty(left) && hasLeftistProperty(right)
+       |    )
+       |    if(left.rank >= right.rank)
+       |      Node(value, left, right)
+       |    else
+       |      Node(value, right, left)
+       |  } ensuring { res =>
+       |    hasLeftistProperty(res)  }
+       |}""".stripMargin
+
+  )
+
+  def runTests(tests: List[(List[Variable], Expr, Expr)], ofd: Option[FunDef] = None)(implicit fix: (LeonContext, Program)): Unit = {
+    for ((vs, from, exp) <- tests) {
+      // SimilarTo(<from>) should produce <exp>
+
+      val sctx = new SynthesisContext(fix._1, SynthesisSettings(), ofd.getOrElse(fix._2.definedFunctions.head), fix._2)
+      val p = Problem(vs.map(_.id), BooleanLiteral(true), BooleanLiteral(true), BooleanLiteral(true), Nil, ExamplesBank.empty)
+
+      val g = OneOf(vs)
+      val enum = new MemoizedEnumerator[Label, Expr, ProductionRule[Label, Expr]](g.getProductions)
+      val exprs = enum.iterator(Label(exp.getType).withAspect(SimilarTo(from)))
+
+      //println(s"SimilarTo(${from.asString}):")
+
+      if (!(exprs.exists { e => 
+        //println(s" - ${e.asString}")
+        e == exp
+      })) {
+        info("Productions: ")
+        g.printProductions(info(_))
+
+        fail(s"Unable to find ${exp.asString} in SimilarTo(${from.asString})")
+      }
+    }
+  }
+
+  test("Basic") { implicit fix =>
+
+    def _None = CaseClass(caseClassDef("leon.lang.None").typed(Seq(IntegerType)), Nil)
+    def _Some(e: Expr) = CaseClass(caseClassDef("leon.lang.Some").typed(Seq(IntegerType)), List(e))
+
+    val tests = List[(List[Variable], Expr, Expr)](
+      (List(x, y), GreaterThan(x, y), GreaterThan(y, x)),
+      (List(x, y), GreaterThan(x, y), not(GreaterThan(x, y))),
+      (List(x, y), equality(x, y), not(equality(x, y))),
+      (List(x, y), x, y),
+      (List(x), _Some(x), _Some(Plus(x, bi(1)))),
+      (List(x), _Some(Plus(x, bi(2))), _Some(Plus(x, bi(1))))
+    )
+
+    runTests(tests)
+  }
+
+  test("Desugar") { implicit fix =>
+    val e    = id("e", "Trees.Expr").toVariable
+    val cond = id("cond", "Trees.Expr").toVariable
+    val thn  = id("thn", "Trees.Expr").toVariable
+    val els  = id("els", "Trees.Expr").toVariable
+
+    def Ite(es: Expr*) = cc("STrees.Ite")(es: _*)
+    def Literal(es: Expr*) = cc("STrees.Literal")(es: _*)
+
+    val desugarfd = funDef("Desugar.desugar")
+    def desugar(es: Expr*) = fcall("Desugar.desugar")(es: _*)
+
+    val tests = List[(List[Variable], Expr, Expr)](
+      (List(cond, thn, els),
+       Ite(desugar(cond), desugar(els), desugar(thn)),
+       Ite(desugar(cond), desugar(thn), desugar(els))
+      ),
+      (List(e),
+       Ite(desugar(e), Literal(bi(1)), Literal(bi(1))),
+       Ite(desugar(e), Literal(bi(0)), Literal(bi(1)))
+      )
+    )
+
+    runTests(tests, Some(desugarfd))
+  }
+
+  test("Heap") { implicit fix =>
+    val v1   = id("v1", IntegerType).toVariable
+    val h1   = id("h1", "Heaps.Heap").toVariable
+    val l1   = id("l1", "Heaps.Heap").toVariable
+    val r1   = id("r1", "Heaps.Heap").toVariable
+
+    val v2   = id("v2", IntegerType).toVariable
+    val h2   = id("h2", "Heaps.Heap").toVariable
+    val l2   = id("l2", "Heaps.Heap").toVariable
+    val r2   = id("r2", "Heaps.Heap").toVariable
+
+    val mergefd = funDef("Heaps.merge")
+    def merge(es: Expr*) = fcall("Heaps.merge")(es: _*)
+
+    def makeN(es: Expr*) = fcall("Heaps.makeN")(es: _*)
+
+    def Node(es: Expr*) = cc("Heaps.Node")(es: _*)
+    def Leaf(es: Expr*) = cc("Heaps.Leaf")(es: _*)
+
+    def rank(es: Expr*) = fcall("Heap.rank")(es: _*)
+
+    val tests = List[(List[Variable], Expr, Expr)](
+      (List(h1, v1, l1, r1, h2, v2, l2, r2),
+       makeN(v2, l1, merge(h1, r2)),
+       makeN(v2, l2, merge(h1, r2))
+      ),
+      (List(v1, h1),
+       merge(Node(Plus(v1, bi(1)), Leaf(), Leaf()), h1),
+      merge(Node(v1, Leaf(), Leaf()), h1)
+      ),
+      (List(h1, h2),
+       GreaterThan(rank(h1), Plus(rank(h2), bi(1))),
+       GreaterThan(rank(h1), rank(h2))
+      )
+    )
+
+    runTests(tests, Some(mergefd))
+  }
+}
diff --git a/src/test/scala/leon/test/helpers/ExpressionsDSL.scala b/src/test/scala/leon/test/helpers/ExpressionsDSL.scala
index 8d14906c5..fa118cbcc 100644
--- a/src/test/scala/leon/test/helpers/ExpressionsDSL.scala
+++ b/src/test/scala/leon/test/helpers/ExpressionsDSL.scala
@@ -36,6 +36,13 @@ trait ExpressionsDSL {
   val q = FreshIdentifier("q", BooleanType).toVariable
   val r = FreshIdentifier("r", BooleanType).toVariable
 
+  def id(name: String, tpe: TypeTree)(implicit pgm: Program): Identifier = {
+    FreshIdentifier(name, tpe)
+  }
+
+  def id(name: String, tpeName: String, tps: Seq[TypeTree] = Nil)(implicit pgm: Program): Identifier = {
+    id(name, classType(tpeName, tps))
+  }
 
   def funDef(name: String)(implicit pgm: Program): FunDef = {
     pgm.lookupAll(name).collect {
@@ -53,6 +60,13 @@ trait ExpressionsDSL {
     }
   }
 
+  def classType(name: String, tps: Seq[TypeTree] = Nil)(implicit pgm: Program): ClassType = {
+    classDef(name) match {
+      case acd: AbstractClassDef => AbstractClassType(acd, tps)
+      case ccd: CaseClassDef => CaseClassType(ccd, tps)
+    }
+  }
+
   def caseClassDef(name: String)(implicit pgm: Program): CaseClassDef = {
     pgm.lookupAll(name).collect {
       case ccd: CaseClassDef => ccd
diff --git a/testcases/synthesis/current/HOFs/FilterNonNeg.scala b/testcases/synthesis/current/HOFs/FilterNonNeg.scala
new file mode 100644
index 000000000..3d17b5d14
--- /dev/null
+++ b/testcases/synthesis/current/HOFs/FilterNonNeg.scala
@@ -0,0 +1,26 @@
+import leon.lang._
+import leon.lang.synthesis._
+
+object FilterNonNeg {
+
+  abstract class List
+  case object Nil extends List
+  case class Cons(h: Int, t: List) extends List
+
+  def filter(l: List, f: Int => Boolean): List = {
+    l match {
+      case Cons(h, t) => if (f(h)) Cons(h, filter(t, f)) else filter(t, f)
+      case Nil => Nil
+    }
+  }
+
+  def test(in: List): List = {
+    ???[List]
+  } ensuring {
+    out => (in, out) passes {
+      case Cons(1, Cons(2, Cons(3, Cons(4, Nil))))    => Cons(1, Cons(2, Cons(3, Cons(4, Nil))))
+      case Cons(-1, Cons(-2, Cons(3, Cons(-4, Nil)))) => Cons(3, Nil)
+      case Cons(1, Cons(-2, Cons(3, Cons(-4, Nil))))  => Cons(1, Cons(3, Nil))
+    }
+  }
+}
diff --git a/testcases/synthesis/current/HOFs/MapPlus1.scala b/testcases/synthesis/current/HOFs/MapPlus1.scala
new file mode 100644
index 000000000..e14451893
--- /dev/null
+++ b/testcases/synthesis/current/HOFs/MapPlus1.scala
@@ -0,0 +1,42 @@
+import leon.lang._
+import leon.lang.synthesis._
+
+
+object MapPlus1 {
+  abstract class List
+  case object Nil extends List
+  case class Cons(h: BigInt, t: List) extends List
+
+  def size(l: List): BigInt = {
+    l match {
+      case Cons(h, t) =>  size(t) + 1
+      case Nil => BigInt(0)
+    }
+  } ensuring { res => res >= 0 }
+
+  def sum(l: List): BigInt = {
+    l match {
+      case Cons(h, t) =>  h + sum(t)
+      case Nil => BigInt(0)
+    }
+  }
+
+
+  def map(l: List, f: BigInt => BigInt): List = {
+    l match {
+      case Nil => Nil
+      case Cons(h, t) => Cons(f(h), map(t, f))
+    }
+  } ensuring { res =>
+    size(res) == size(l)
+  }
+
+
+  def test(l: List): List = {
+      ???[List]
+  } ensuring { res =>
+    (sum(res) == sum(l) + size(l)) &&
+    (size(res) == size(l))
+  }
+
+}
-- 
GitLab