diff --git a/src/main/scala/inox/grammars/AllGrammars.scala b/src/main/scala/inox/grammars/AllGrammars.scala
new file mode 100644
index 0000000000000000000000000000000000000000..265892bfbf1d480372eb7cf60ffa6060c56b4037
--- /dev/null
+++ b/src/main/scala/inox/grammars/AllGrammars.scala
@@ -0,0 +1,11 @@
+package inox.grammars
+
+trait AllGrammars extends ElementaryGrammars
+  with BaseGrammars
+  with ValueGrammars
+  with ConstantGrammars
+  with ClosureGrammars
+  with EqualityGrammars
+  with FunctionCallsGrammars
+{ self: GrammarsUniverse =>
+}
diff --git a/src/main/scala/inox/grammars/Aspect.scala b/src/main/scala/inox/grammars/Aspect.scala
index 69ed03380f556ebc7ac6ac40ab2815a2a88b5cca..027e423f59e34010814960332dad50219f5a94ea 100644
--- a/src/main/scala/inox/grammars/Aspect.scala
+++ b/src/main/scala/inox/grammars/Aspect.scala
@@ -1,12 +1,8 @@
 /* Copyright 2009-2015 EPFL, Lausanne */
 
-package leon
+package inox
 package grammars
 
-import purescala.Expressions.Expr
-import utils.SeqUtils._
-import Tags._
-
 /**
  * An Aspect applies to a label, and attaches information to it.
  *
@@ -24,9 +20,13 @@ import Tags._
  *
  */
 
+trait Aspects { self: GrammarsUniverse =>
+  import program._
+  import trees._
 
-abstract class Aspect extends Printable {
-  final type Production = ProductionRule[Label, Expr]
+  trait Aspect extends Printable {
+    final type Production = ProductionRule[Label, Expr]
 
-  def applyTo(l: Label, ps: Seq[Production])(implicit ctx: LeonContext): Seq[Production]
+    def applyTo(l: Label, ps: Seq[Production])(implicit ctx: InoxContext): Seq[Production]
+  }
 }
diff --git a/src/main/scala/inox/grammars/BaseGrammar.scala b/src/main/scala/inox/grammars/BaseGrammar.scala
index ee3a85791f7bbcbc620736188caa5de0f1df2d84..f1d4559b61e852c2fccd04644a267df328d333b4 100644
--- a/src/main/scala/inox/grammars/BaseGrammar.scala
+++ b/src/main/scala/inox/grammars/BaseGrammar.scala
@@ -1,82 +1,87 @@
 /* Copyright 2009-2016 EPFL, Lausanne */
 
-package leon
+package inox
 package grammars
 
-import purescala.Types._
-import purescala.Expressions._
-import purescala.Constructors._
+trait BaseGrammars { self: GrammarsUniverse =>
+  import program._
+  import trees._
+  import symbols._
 
-/** The basic grammar for Leon expressions.
-  * Generates the most obvious expressions for a given type,
-  * without regard of context (variables in scope, current function etc.)
-  * Also does some trivial simplifications.
-  */
-case object BaseGrammar extends SimpleExpressionGrammar {
+  /** The basic grammar for Inox expressions.
+    * Generates the most obvious expressions for a given type,
+    * without regard of context (variables in scope, current function etc.)
+    * Also does some trivial simplifications.
+    */
+  case object BaseGrammar extends SimpleExpressionGrammar {
 
-  def computeProductions(t: TypeTree)(implicit ctx: LeonContext): Seq[Prod] = t match {
-    case BooleanType =>
-      List(
-        terminal(BooleanLiteral(false), Tags.BooleanC),
-        terminal(BooleanLiteral(true),  Tags.BooleanC),
-        nonTerminal(List(BooleanType), { case Seq(a) => not(a) }, Tags.Not),
-        nonTerminal(List(BooleanType, BooleanType), { case Seq(a, b) => and(a, b) }, Tags.And),
-        nonTerminal(List(BooleanType, BooleanType), { case Seq(a, b) => or(a, b)  }, Tags.Or ),
-        nonTerminal(List(Int32Type,   Int32Type),   { case Seq(a, b) => LessThan(a, b)   }),
-        nonTerminal(List(Int32Type,   Int32Type),   { case Seq(a, b) => LessEquals(a, b) }),
-        nonTerminal(List(IntegerType, IntegerType), { case Seq(a, b) => LessThan(a, b)   }),
-        nonTerminal(List(IntegerType, IntegerType), { case Seq(a, b) => LessEquals(a, b) })
-      )
-    case Int32Type =>
-      List(
-        terminal(IntLiteral(0), Tags.Zero),
-        terminal(IntLiteral(1), Tags.One ),
-        nonTerminal(List(Int32Type, Int32Type), { case Seq(a,b) => plus(a, b)  }, Tags.Plus ),
-        nonTerminal(List(Int32Type, Int32Type), { case Seq(a,b) => minus(a, b) }, Tags.Minus),
-        nonTerminal(List(Int32Type, Int32Type), { case Seq(a,b) => times(a, b) }, Tags.Times)
-      )
+    def computeProductions(t: Type)(implicit ctx: InoxContext): Seq[Prod] = t match {
+      case BooleanType =>
+        List(
+          terminal(BooleanLiteral(false), BooleanC),
+          terminal(BooleanLiteral(true),  BooleanC),
+          nonTerminal(List(BooleanType), { case Seq(a) => not(a) }, Not),
+          nonTerminal(List(BooleanType, BooleanType), { case Seq(a, b) => and(a, b) }, And),
+          nonTerminal(List(BooleanType, BooleanType), { case Seq(a, b) => or(a, b)  }, Or ),
+          nonTerminal(List(Int32Type,   Int32Type),   { case Seq(a, b) => LessThan(a, b)   }),
+          nonTerminal(List(Int32Type,   Int32Type),   { case Seq(a, b) => LessEquals(a, b) }),
+          nonTerminal(List(IntegerType, IntegerType), { case Seq(a, b) => LessThan(a, b)   }),
+          nonTerminal(List(IntegerType, IntegerType), { case Seq(a, b) => LessEquals(a, b) })
+        )
+      case Int32Type =>
+        List(
+          terminal(IntLiteral(0), Zero),
+          terminal(IntLiteral(1), One ),
+          nonTerminal(List(Int32Type, Int32Type), { case Seq(a,b) => plus(a, b)  }, Plus ),
+          nonTerminal(List(Int32Type, Int32Type), { case Seq(a,b) => minus(a, b) }, Minus),
+          nonTerminal(List(Int32Type, Int32Type), { case Seq(a,b) => times(a, b) }, Times)
+        )
 
-    case IntegerType =>
-      List(
-        terminal(InfiniteIntegerLiteral(0), Tags.Zero),
-        terminal(InfiniteIntegerLiteral(1), Tags.One ),
-        nonTerminal(List(IntegerType, IntegerType), { case Seq(a,b) => plus(a, b)  }, Tags.Plus ),
-        nonTerminal(List(IntegerType, IntegerType), { case Seq(a,b) => minus(a, b) }, Tags.Minus),
-        nonTerminal(List(IntegerType, IntegerType), { case Seq(a,b) => times(a, b) }, Tags.Times)//,
-        //nonTerminal(List(IntegerType, IntegerType), { case Seq(a,b) => Modulo(a, b)   }, Tags.Mod),
-        //nonTerminal(List(IntegerType, IntegerType), { case Seq(a,b) => Division(a, b) }, Tags.Div)
-      )
+      case IntegerType =>
+        List(
+          terminal(IntegerLiteral(0), Zero),
+          terminal(IntegerLiteral(1), One ),
+          nonTerminal(List(IntegerType, IntegerType), { case Seq(a,b) => plus(a, b)  }, Plus ),
+          nonTerminal(List(IntegerType, IntegerType), { case Seq(a,b) => minus(a, b) }, Minus),
+          nonTerminal(List(IntegerType, IntegerType), { case Seq(a,b) => times(a, b) }, Times)//,
+          //nonTerminal(List(IntegerType, IntegerType), { case Seq(a,b) => Modulo(a, b)   }, Mod),
+          //nonTerminal(List(IntegerType, IntegerType), { case Seq(a,b) => Division(a, b) }, Div)
+        )
 
-    case TupleType(stps) =>
-      List(
-        nonTerminal(stps, Tuple, Tags.Constructor(isTerminal = false))
-      )
+      case TupleType(stps) =>
+        List(
+          nonTerminal(stps, Tuple, Constructor(isTerminal = false))
+        )
 
-    case cct: CaseClassType =>
-      List(
-        nonTerminal(cct.fields.map(_.getType), CaseClass(cct, _), Tags.tagOf(cct) )
-      )
+      case ct: ClassType =>
+        ct.tcd match {
+          case cct: TypedCaseClassDef =>
+            List(
+              nonTerminal(cct.fields.map(_.getType), CaseClass(ct, _), tagOf(cct.cd) )
+            )
 
-    case act: AbstractClassType =>
-      act.knownCCDescendants.map { cct =>
-        nonTerminal(cct.fields.map(_.getType), CaseClass(cct, _), Tags.tagOf(cct) )
-      }
+          case act: TypedAbstractClassDef =>
+            act.descendants.map { cct =>
+              nonTerminal(cct.fields.map(_.getType), CaseClass(cct.toType, _), tagOf(cct.cd) )
+            }
+        }
 
-    case st @ SetType(base) =>
-      List(
-        terminal(FiniteSet(Set(), base), Tags.Constant),
-        nonTerminal(List(base),   { case elems     => FiniteSet(elems.toSet, base) }, Tags.Constructor(isTerminal = false)),
-        nonTerminal(List(st, st), { case Seq(a, b) => SetUnion(a, b) }),
-        nonTerminal(List(st, st), { case Seq(a, b) => SetIntersection(a, b) }),
-        nonTerminal(List(st, st), { case Seq(a, b) => SetDifference(a, b) })
-      )
+      case st @ SetType(base) =>
+        List(
+          terminal(FiniteSet(Seq(), base), Constant),
+          nonTerminal(List(base),   { case elems     => FiniteSet(elems, base) }, Constructor(isTerminal = false)),
+          nonTerminal(List(st, st), { case Seq(a, b) => SetUnion(a, b) }),
+          nonTerminal(List(st, st), { case Seq(a, b) => SetIntersection(a, b) }),
+          nonTerminal(List(st, st), { case Seq(a, b) => SetDifference(a, b) })
+        )
 
-    case UnitType =>
-      List(
-        terminal(UnitLiteral(), Tags.Constant)
-      )
+      case UnitType =>
+        List(
+          terminal(UnitLiteral(), Constant)
+        )
 
-    case _ =>
-      Nil
+      case _ =>
+        Nil
+    }
   }
 }
diff --git a/src/main/scala/inox/grammars/Closures.scala b/src/main/scala/inox/grammars/Closures.scala
index 1d1d8c9cc8197b02534fa5391038e3959550c1c6..5695a724ffc44dbdecc22b1e795dad9a63e75d6f 100644
--- a/src/main/scala/inox/grammars/Closures.scala
+++ b/src/main/scala/inox/grammars/Closures.scala
@@ -1,27 +1,28 @@
 /* Copyright 2009-2015 EPFL, Lausanne */
 
-package leon
+package inox
 package grammars
 
-import purescala.Types._
-import purescala.Definitions._
-import purescala.Expressions._
-import purescala.Common._
+trait ClosureGrammars { self: GrammarsUniverse =>
+  import program._
+  import trees._
+  import symbols._
 
-case object Closures extends ExpressionGrammar {
-  def computeProductions(lab: Label)(implicit ctx: LeonContext): Seq[ProductionRule[Label, Expr]] = lab.getType match {
-    case FunctionType(argsTpes, ret) =>
-      val args = argsTpes.zipWithIndex.map { case (tpe, i) =>
-        ValDef(FreshIdentifier("a"+i, tpe))
-      }
+  case object Closures extends ExpressionGrammar {
+    def computeProductions(lab: Label)(implicit ctx: InoxContext): Seq[ProductionRule[Label, Expr]] = lab.getType match {
+      case FunctionType(argsTpes, ret) =>
+        val args = argsTpes.zipWithIndex.map { case (tpe, i) =>
+          ValDef(FreshIdentifier("a"+i), tpe)
+        }
 
-      val rlab = Label(ret).withAspect(aspects.ExtraTerminals(args.map(_.toVariable).toSet))
+        val rlab = Label(ret).withAspect(ExtraTerminals(args.map(_.toVariable).toSet))
 
-      applyAspects(lab, List(
-        ProductionRule(List(rlab), { case List(body) => Lambda(args, body) }, Tags.Top, 1)
-      ))
+        applyAspects(lab, List(
+          ProductionRule(List(rlab), { case List(body) => Lambda(args, body) }, Top, 1)
+        ))
 
-    case _ =>
-      Nil
+      case _ =>
+        Nil
+    }
   }
 }
diff --git a/src/main/scala/inox/grammars/Constants.scala b/src/main/scala/inox/grammars/Constants.scala
index a542a9a0e5bcd22468d88bfdfa2f4e953c9bd43c..88aa7d5b06fda5f58d69521164e235ba67a6fc98 100644
--- a/src/main/scala/inox/grammars/Constants.scala
+++ b/src/main/scala/inox/grammars/Constants.scala
@@ -1,33 +1,34 @@
 /* Copyright 2009-2016 EPFL, Lausanne */
 
-package leon
+package inox
 package grammars
 
-import purescala.Expressions._
-import purescala.Types.TypeTree
-import purescala.ExprOps.collect
-import purescala.Extractors.IsTyped
+trait ConstantGrammars { self: GrammarsUniverse =>
+  import program._
+  import trees._
+  import exprOps.collect
 
-/** Generates constants found in an [[leon.purescala.Expressions.Expr]].
-  * Some constants that are generated by other grammars (like 0, 1) will be excluded
-  */
-case class Constants(e: Expr) extends SimpleExpressionGrammar {
+  /** Generates constants found in an [[inox.ast.Expressions.Expr]].
+    * Some constants that are generated by other grammars (like 0, 1) will be excluded
+    */
+  case class Constants(e: Expr) extends SimpleExpressionGrammar {
 
-  private val excluded: Set[Expr] = Set(
-    InfiniteIntegerLiteral(1),
-    InfiniteIntegerLiteral(0),
-    IntLiteral(1),
-    IntLiteral(0),
-    BooleanLiteral(true),
-    BooleanLiteral(false)
-  )
+    private val excluded: Set[Expr] = Set(
+      IntegerLiteral(1),
+      IntegerLiteral(0),
+      IntLiteral(1),
+      IntLiteral(0),
+      BooleanLiteral(true),
+      BooleanLiteral(false)
+    )
 
-  def computeProductions(t: TypeTree)(implicit ctx: LeonContext): Seq[Prod] = {
-    val literals = collect[Expr]{
-      case IsTyped(l:Literal[_], `t`) => Set(l)
-      case _ => Set()
-    }(e)
+    def computeProductions(t: Type)(implicit ctx: InoxContext): Seq[Prod] = {
+      val literals = collect[Expr]{
+        case IsTyped(l:Literal[_], `t`) => Set(l)
+        case _ => Set()
+      }(e)
 
-    (literals -- excluded map (terminal(_, Tags.Constant))).toSeq
+      (literals -- excluded map (terminal(_, Constant))).toSeq
+    }
   }
 }
diff --git a/src/main/scala/inox/grammars/Empty.scala b/src/main/scala/inox/grammars/Empty.scala
deleted file mode 100644
index 639c0f9562d1d23ef546e8899eca26db131ecfd5..0000000000000000000000000000000000000000
--- a/src/main/scala/inox/grammars/Empty.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-/* Copyright 2009-2016 EPFL, Lausanne */
-
-package leon
-package grammars
-
-/** The empty expression grammar */
-case class Empty() extends ExpressionGrammar {
-  def computeProductions(l: Label)(implicit ctx: LeonContext) = Nil
-}
diff --git a/src/main/scala/inox/grammars/EqualityGrammar.scala b/src/main/scala/inox/grammars/EqualityGrammar.scala
index 8ffa268788fd94f96d9d4f89079a1f56a90601bd..45a031405c3e4537b87da3e118f880907542bbac 100644
--- a/src/main/scala/inox/grammars/EqualityGrammar.scala
+++ b/src/main/scala/inox/grammars/EqualityGrammar.scala
@@ -1,22 +1,25 @@
 /* Copyright 2009-2016 EPFL, Lausanne */
 
-package leon
+package inox
 package grammars
 
-import purescala.Types._
-import purescala.Constructors._
+trait EqualityGrammars { self: GrammarsUniverse =>
+  import program._
+  import trees._
+  import symbols._
 
-/** A grammar of equalities
-  *
-  * @param types The set of types for which equalities will be generated
-  */
-case class EqualityGrammar(types: Set[TypeTree]) extends SimpleExpressionGrammar {
-  def computeProductions(t: TypeTree)(implicit ctx: LeonContext): Seq[Prod] = t match {
-    case BooleanType =>
-      types.toList map { tp =>
-        nonTerminal(List(tp, tp), { case Seq(a, b) => equality(a, b) }, Tags.Equals)
-      }
+  /** A grammar of equalities
+    *
+    * @param types The set of types for which equalities will be generated
+    */
+  case class EqualityGrammar(types: Set[Type]) extends SimpleExpressionGrammar {
+    def computeProductions(t: Type)(implicit ctx: InoxContext): Seq[Prod] = t match {
+      case BooleanType =>
+        types.toList map { tp =>
+          nonTerminal(List(tp, tp), { case Seq(a, b) => equality(a, b) }, Equals)
+        }
 
-    case _ => Nil
+      case _ => Nil
+    }
   }
 }
diff --git a/src/main/scala/inox/grammars/ExpressionGrammar.scala b/src/main/scala/inox/grammars/ExpressionGrammar.scala
index 7c31a80c91ae69111e87b61c0b1898cc457e4dda..1962be286a2403b98ebee8fda03ac1203b710f99 100644
--- a/src/main/scala/inox/grammars/ExpressionGrammar.scala
+++ b/src/main/scala/inox/grammars/ExpressionGrammar.scala
@@ -1,85 +1,91 @@
 /* Copyright 2009-2016 EPFL, Lausanne */
 
-package leon
+package inox
 package grammars
 
-import purescala.Expressions._
-import purescala.Common._
-
+import ast.Trees
 import scala.collection.mutable.{HashMap => MutableMap}
 
 /** Represents a context-free grammar of expressions */
-abstract class ExpressionGrammar {
+trait ExpressionGrammars { self: GrammarsUniverse =>
+  import program._
+  import trees._
+  import symbols._
 
-  private[this] val cache = new MutableMap[Label, Seq[ProductionRule[Label, Expr]]]()
+  trait ExpressionGrammar {
+    private[this] val cache = new MutableMap[Label, Seq[ProductionRule[Label, Expr]]]()
 
-  /** The list of production rules for this grammar for a given nonterminal.
-    *
-    * @param lab The nonterminal for which production rules will be generated
-    * @note This is the cached version of [[computeProductions]]. Clients should use this method.
-    */
-  final def getProductions(lab: Label)(implicit ctx: LeonContext) = {
-    cache.getOrElse(lab, {
-      val res = applyAspects(lab, computeProductions(lab))
-      cache += lab -> res
-      res
-    })
-  }
+    /** The list of production rules for this grammar for a given nonterminal.
+      *
+      * @param lab The nonterminal for which production rules will be generated
+      * @note This is the cached version of [[computeProductions]]. Clients should use this method.
+      */
+    final def getProductions(lab: Label)(implicit ctx: InoxContext) = {
+      cache.getOrElse(lab, {
+        val res = applyAspects(lab, computeProductions(lab))
+        cache += lab -> res
+        res
+      })
+    }
 
-  /** The list of production rules for this grammar for a given nonterminal.
-    *
-    * @param lab The nonterminal for which production rules will be generated
-    * @note Clients should use the cached version, [[getProductions]] instead
-    */
-  def computeProductions(lab: Label)(implicit ctx: LeonContext): Seq[ProductionRule[Label, Expr]]
+    /** The list of production rules for this grammar for a given nonterminal.
+      *
+      * @param lab The nonterminal for which production rules will be generated
+      * @note Clients should use the cached version, [[getProductions]] instead
+      */
+    def computeProductions(lab: Label)(implicit ctx: InoxContext): Seq[ProductionRule[Label, Expr]]
 
-  protected def applyAspects(lab: Label, ps: Seq[ProductionRule[Label, Expr]])(implicit ctx: LeonContext) = {
-    lab.aspects.foldLeft(ps) {
-      case (ps, a) => a.applyTo(lab, ps)
+    protected def applyAspects(lab: Label, ps: Seq[ProductionRule[Label, Expr]])(implicit ctx: InoxContext) = {
+      lab.aspects.foldLeft(ps) {
+        case (ps, a) => a.applyTo(lab, ps)
+      }
     }
-  }
 
-  /** Returns the union of two generators. */
-  final def ||(that: ExpressionGrammar): ExpressionGrammar = {
-    Union(Seq(this, that))
-  }
+    /** Returns the union of two generators. */
+    //final def ||(that: ExpressionGrammar): ExpressionGrammar = {
+    //  Union(Seq(this, that))
+    //}
 
-  final def printProductions(printer: String => Unit)(implicit ctx: LeonContext) {
-    def sorter(lp1: (Label, Seq[ProductionRule[Label, Expr]]), lp2: (Label, Seq[ProductionRule[Label, Expr]])): Boolean = {
-      val l1 = lp1._1
-      val l2 = lp2._1
+    final def printProductions(printer: String => Unit)(implicit ctx: InoxContext) {
+      def sorter(lp1: (Label, Seq[ProductionRule[Label, Expr]]), lp2: (Label, Seq[ProductionRule[Label, Expr]])): Boolean = {
+        val l1 = lp1._1
+        val l2 = lp2._1
 
-      val os1 = l1.aspects.collectFirst { case aspects.Sized(size) => size }
-      val os2 = l2.aspects.collectFirst { case aspects.Sized(size) => size }
+        val os1 = l1.aspects.collectFirst { case Sized(size) => size }
+        val os2 = l2.aspects.collectFirst { case Sized(size) => size }
 
-      (os1, os2) match {
-        case (Some(s1), Some(s2)) =>
-          if (s1 > s2) {
-            true
-          } else if (s1 == s2) {
-            l1.asString < l2.asString
-          } else {
-            false
-          }
-        case _ => l1.asString < l2.asString
+        (os1, os2) match {
+          case (Some(s1), Some(s2)) =>
+            if (s1 > s2) {
+              true
+            } else if (s1 == s2) {
+              l1.asString < l2.asString
+            } else {
+              false
+            }
+          case _ => l1.asString < l2.asString
+        }
       }
-    }
 
-    for ((lab, gs) <- cache.toSeq.sortWith(sorter)) {
-      val lhs = f"${Console.BOLD}${lab.asString}%50s${Console.RESET} ::= "
+      for ((lab, gs) <- cache.toSeq.sortWith(sorter)) {
+        val lhs = f"${Console.BOLD}${lab.asString}%50s${Console.RESET} ::= "
 
-      if (gs.isEmpty) {
-        printer(s"${lhs}ε")
-      } else {
-        val rhs = for (g <- gs) yield {
-        val subs = g.subTrees.map { t =>
-          FreshIdentifier(Console.BOLD + t.asString + Console.RESET, t.getType).toVariable
-        }
+        if (gs.isEmpty) {
+          printer(s"${lhs}ε")
+        } else {
+          val rhs = for (g <- gs) yield {
+            val subs = g.subTrees.map { t =>
+              new Variable(
+                FreshIdentifier(Console.BOLD + t.asString + Console.RESET),
+                t.getType
+              )
+            }
 
-          g.builder(subs).asString
+            g.builder(subs).asString
+          }
+          printer(lhs + rhs.mkString("\n" + " " * 55))
+        }
       }
-        printer(lhs + rhs.mkString("\n" + " " * 55))
     }
   }
-  }
 }
diff --git a/src/main/scala/inox/grammars/FunctionCalls.scala b/src/main/scala/inox/grammars/FunctionCalls.scala
index 0c235d66836c3c7da55d44f0ff1e300d75349c8b..52b2de1d1f66196f2d739578a5c58d964d936e8a 100644
--- a/src/main/scala/inox/grammars/FunctionCalls.scala
+++ b/src/main/scala/inox/grammars/FunctionCalls.scala
@@ -1,84 +1,87 @@
 /* Copyright 2009-2016 EPFL, Lausanne */
 
-package leon
+package inox
 package grammars
 
-import purescala.Types._
-import purescala.TypeOps._
-import purescala.Definitions._
-import purescala.ExprOps._
-import purescala.DefOps._
-import purescala.Expressions._
-
-/** Generates non-recursive function calls
-  *
-  * @param currentFunction The currend function for which no calls will be generated
-  * @param types The candidate real type parameters for [[currentFunction]]
-  * @param exclude An additional set of functions for which no calls will be generated
-  */
-case class FunctionCalls(prog: Program, currentFunction: FunDef, types: Seq[TypeTree], exclude: Set[FunDef]) extends SimpleExpressionGrammar {
-  def computeProductions(t: TypeTree)(implicit ctx: LeonContext): Seq[Prod] = {
-
-    def getCandidates(fd: FunDef): Seq[TypedFunDef] = {
-      // Prevents recursive calls
-      val cfd = currentFunction
-
-      val isRecursiveCall = (prog.callGraph.transitiveCallers(cfd) + cfd) contains fd
-
-      val isDet = fd.body.exists(isDeterministic)
-
-      if (!isRecursiveCall && isDet) {
-        canBeSubtypeOf(fd.returnType, t) match {
-          case Some(tpsMap) =>
-            val free = fd.tparams.map(_.tp)
-            val tfd = fd.typed(free.map(tp => tpsMap.getOrElse(tp, tp)))
-
-            if (tpsMap.size < free.size) {
-              /* Some type params remain free, we want to assign them:
-               *
-               * List[T] => Int, for instance, will be found when
-               * requesting Int, but we need to assign T to viable
-               * types. For that we use list of input types as heuristic,
-               * and look for instantiations of T such that input <?:
-               * List[T].
-               */
-              types.distinct.flatMap { (atpe: TypeTree) =>
-                var finalFree = free.toSet -- tpsMap.keySet
-                var finalMap = tpsMap
-
-                for (ptpe <- tfd.params.map(_.getType).distinct) {
-                  unify(atpe, ptpe, finalFree.toSeq) match { // FIXME!!!! this may allow weird things if lub!=ptpe
-                    case Some(ntpsMap) =>
-                      finalFree --= ntpsMap.keySet
-                      finalMap  ++= ntpsMap
-                    case _ =>
+trait FunctionCallsGrammars extends utils.Helpers { self: GrammarsUniverse =>
+  import program._
+  import trees._
+  import exprOps._
+  import symbols._
+
+  /** Generates non-recursive function calls
+    *
+    * @param currentFunction The currend function for which no calls will be generated
+    * @param types The candidate real type parameters for [[currentFunction]]
+    * @param exclude An additional set of functions for which no calls will be generated
+    */
+  case class FunctionCalls(prog: Program, currentFunction: FunDef, types: Seq[Type], exclude: Set[FunDef]) extends SimpleExpressionGrammar {
+    def computeProductions(t: Type)(implicit ctx: InoxContext): Seq[Prod] = {
+
+      def getCandidates(fd: FunDef): Seq[TypedFunDef] = {
+        // Prevents recursive calls
+        val cfd = currentFunction
+
+        val isRecursiveCall = (transitiveCallers(cfd) + cfd) contains fd
+
+        val isDet = true // TODO FIXME fd.body.exists(isDeterministic)
+
+        if (!isRecursiveCall && isDet) {
+          canBeSubtypeOf(fd.returnType, t) match {
+            case Some(tpsMap) =>
+              val free = fd.tparams.map(_.tp)
+              val tfd = fd.typed(free.map(tp => tpsMap.getOrElse(tp, tp)))
+
+              if (tpsMap.size < free.size) {
+                /* Some type params remain free, we want to assign them:
+                 *
+                 * List[T] => Int, for instance, will be found when
+                 * requesting Int, but we need to assign T to viable
+                 * types. For that we use list of input types as heuristic,
+                 * and look for instantiations of T such that input <?:
+                 * List[T].
+                 */
+                types.distinct.flatMap { (atpe: Type) =>
+                  var finalFree = free.toSet -- tpsMap.keySet
+                  var finalMap = tpsMap
+
+                  for (ptpe <- tfd.params.map(_.getType).distinct) {
+                    unify(atpe, ptpe, finalFree.toSeq) match { // FIXME!!!! this may allow weird things if lub!=ptpe
+                      case Some(ntpsMap) =>
+                        finalFree --= ntpsMap.keySet
+                        finalMap  ++= ntpsMap
+                      case _ =>
+                    }
                   }
-                }
 
-                if (finalFree.isEmpty) {
-                  List(fd.typed(free.map(tp => finalMap.getOrElse(tp, tp))))
-                } else {
-                  Nil
+                  if (finalFree.isEmpty) {
+                    List(fd.typed(free.map(tp => finalMap.getOrElse(tp, tp))))
+                  } else {
+                    Nil
+                  }
                 }
+              } else {
+                // All type parameters that used to be free are assigned
+                List(tfd)
               }
-            } else {
-              // All type parameters that used to be free are assigned
-              List(tfd)
-            }
-          case None =>
-            Nil
+            case None =>
+              Nil
+          }
+        } else {
+          Nil
         }
-      } else {
-        Nil
       }
-    }
 
-    val filter = (tfd:TypedFunDef) => tfd.fd.isSynthetic || tfd.fd.isInner || (exclude contains tfd.fd)
+      val filter = (tfd:TypedFunDef) => /* TODO: Reimplement this somehow tfd.fd.isSynthetic || tfd.fd.isInner || */ (exclude contains tfd.fd)
 
-    val funcs = visibleFunDefsFromMain(prog).toSeq.sortBy(_.id).flatMap(getCandidates).filterNot(filter)
+      val funcs = functionsAvailable(prog).toSeq.sortBy(_.id).flatMap(getCandidates).filterNot(filter)
+
+      funcs.map{ tfd =>
+        nonTerminal(tfd.params.map(_.getType), FunctionInvocation(tfd.id, tfd.tps, _))//, tagOf(tfd.fd, isSafe = false))
+      } 
 
-    funcs.map{ tfd =>
-      nonTerminal(tfd.params.map(_.getType), FunctionInvocation(tfd, _), Tags.tagOf(tfd.fd, isSafe = false))
     }
   }
+
 }
+
diff --git a/src/main/scala/inox/grammars/GrammarsUniverse.scala b/src/main/scala/inox/grammars/GrammarsUniverse.scala
new file mode 100644
index 0000000000000000000000000000000000000000..74971a28e64a6e425aa520fbe5ac1cbb84609fd1
--- /dev/null
+++ b/src/main/scala/inox/grammars/GrammarsUniverse.scala
@@ -0,0 +1,15 @@
+package inox
+package grammars
+
+trait GrammarsUniverse extends Tags
+  with Productions
+  with Aspects
+  with aspects.PersistentAspects
+  with aspects.AllAspects
+  with Labels
+  with ExpressionGrammars
+  with SimpleExpressionGrammars
+  with AllGrammars
+{
+  val program: Program
+}
diff --git a/src/main/scala/inox/grammars/Label.scala b/src/main/scala/inox/grammars/Label.scala
index efb36387d0b4bfd67e62c81100b29cbfb1b9fc50..088ca068a4e6c9c5b552ad6ff59a3433cc9b3e74 100644
--- a/src/main/scala/inox/grammars/Label.scala
+++ b/src/main/scala/inox/grammars/Label.scala
@@ -1,18 +1,23 @@
 /* Copyright 2009-2015 EPFL, Lausanne */
 
-package leon
+package inox
 package grammars
 
-import purescala.Types._
+import ast.Trees
 
-case class Label(tpe: TypeTree, aspects: List[Aspect] = Nil) extends Typed {
-  val getType = tpe
+trait Labels { self: GrammarsUniverse =>
+  import program.{printerOpts => _, _}
+  import trees._
 
-  def asString(implicit ctx: LeonContext): String = {
-    val ts = tpe.asString
+  case class Label(tpe: Type, aspects: List[Aspect] = Nil) extends Typed {
+    def getType(implicit s: Symbols) = tpe
 
-    ts + aspects.map(_.asString).mkString
-  }
+    def asString(implicit opts: PrinterOptions): String = {
+      val ts = tpe.asString
+
+      ts + aspects.map(_.asString).mkString
+    }
 
-  def withAspect(a: Aspect) = Label(tpe, aspects :+ a)
+    def withAspect(a: Aspect) = Label(tpe, aspects :+ a)
+  }
 }
diff --git a/src/main/scala/inox/grammars/OneOf.scala b/src/main/scala/inox/grammars/OneOf.scala
index cad657b839ced11133f0dabb1d7b53bd806ff7a9..d38b2956a6410e0f29f2c5729f81482ad894c557 100644
--- a/src/main/scala/inox/grammars/OneOf.scala
+++ b/src/main/scala/inox/grammars/OneOf.scala
@@ -1,18 +1,36 @@
 /* Copyright 2009-2016 EPFL, Lausanne */
 
-package leon
+package inox
 package grammars
 
-import purescala.Expressions.Expr
-import purescala.TypeOps._
-import purescala.Types.TypeTree
+trait ElementaryGrammars { self: GrammarsUniverse =>
+  import program.trees._
+  import program.symbols._
 
-/** Generates one production rule for each expression in a sequence that has compatible type */
-case class OneOf(inputs: Seq[Expr]) extends SimpleExpressionGrammar {
-  def computeProductions(lab: TypeTree)(implicit ctx: LeonContext): Seq[Prod] = {
-    inputs.collect {
-      case i if isSubtypeOf(i.getType, lab.getType) =>
-        terminal(i)
+  /** Generates one production rule for each expression in a sequence that has compatible type */
+  case class OneOf(inputs: Seq[Expr]) extends SimpleExpressionGrammar {
+    def computeProductions(lab: Type)(implicit ctx: InoxContext): Seq[Prod] = {
+      inputs.collect {
+        case i if isSubtypeOf(i.getType, lab.getType) =>
+          terminal(i)
+      }
     }
   }
+
+  case class Union(gs: Seq[ExpressionGrammar]) extends ExpressionGrammar {
+    val subGrammars: Seq[ExpressionGrammar] = gs.flatMap {
+      case u: Union => u.subGrammars
+      case g => Seq(g)
+    }
+
+    def computeProductions(label: Label)(implicit ctx: InoxContext): Seq[ProductionRule[Label, Expr]] =
+      subGrammars.flatMap(_.computeProductions(label))
+  }
+
+  /** The empty expression grammar */
+  case class Empty() extends ExpressionGrammar {
+    def computeProductions(l: Label)(implicit ctx: InoxContext) = Nil
+  }
+
 }
+
diff --git a/src/main/scala/inox/grammars/ProductionRule.scala b/src/main/scala/inox/grammars/ProductionRule.scala
index ad740484708a2e8a1a2a6b4224a8ee17b762c2c4..00a5344f564049d3a4b1c10512fb8494a6047ef1 100644
--- a/src/main/scala/inox/grammars/ProductionRule.scala
+++ b/src/main/scala/inox/grammars/ProductionRule.scala
@@ -1,6 +1,6 @@
 /* Copyright 2009-2016 EPFL, Lausanne */
 
-package leon
+package inox
 package grammars
 
 import bonsai.Generator
@@ -14,5 +14,8 @@ import bonsai.Generator
  *  @tparam T The type of nonterminal symbols of the grammar
  *  @tparam R The type of syntax trees of the grammar
  */
-case class ProductionRule[T, R](override val subTrees: Seq[T], override val builder: Seq[R] => R, tag: Tags.Tag, cost: Int)
-  extends Generator[T,R](subTrees, builder)
+trait Productions { self: GrammarsUniverse =>
+
+  case class ProductionRule[T, R](override val subTrees: Seq[T], override val builder: Seq[R] => R, tag: Tag, cost: Int)
+    extends Generator[T,R](subTrees, builder)
+}
diff --git a/src/main/scala/inox/grammars/SafeRecursiveCalls.scala b/src/main/scala/inox/grammars/SafeRecursiveCalls.scala
deleted file mode 100644
index fed42d36c57c7b0d3a39b7487391c462ba0ce2b0..0000000000000000000000000000000000000000
--- a/src/main/scala/inox/grammars/SafeRecursiveCalls.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/* Copyright 2009-2016 EPFL, Lausanne */
-
-package leon
-package grammars
-
-import purescala.Path
-import purescala.Types._
-import purescala.Definitions._
-import purescala.ExprOps._
-import purescala.Expressions._
-import synthesis.utils.Helpers._
-
-/** Generates recursive calls that will not trivially result in non-termination.
-  *
-  * @param ws An expression that contains the known set [[synthesis.Witnesses.Terminating]] expressions
-  * @param pc The path condition for the generated [[Expr]] by this grammar
-  */
-case class SafeRecursiveCalls(prog: Program, ws: Expr, pc: Path) extends SimpleExpressionGrammar {
-  def computeProductions(t: TypeTree)(implicit ctx: LeonContext): Seq[Prod] = {
-    val calls = terminatingCalls(prog,ws, pc, Some(t), true)
-
-    calls.map { c => (c: @unchecked) match {
-      case (fi, Some(free)) =>
-        val freeSeq = free.toSeq
-
-        nonTerminal(
-          freeSeq.map(_.getType),
-          sub => replaceFromIDs(freeSeq.zip(sub).toMap, fi),
-          Tags.tagOf(fi.tfd.fd, isSafe = true),
-          formulaSize(fi) - freeSeq.size
-        )
-    }}
-  }
-}
diff --git a/src/main/scala/inox/grammars/SimpleExpressionGrammar.scala b/src/main/scala/inox/grammars/SimpleExpressionGrammar.scala
index 41b293f8101316351e2df3f23ed285d117326754..b571a32548b2c4035770d381892657869b7bc3eb 100644
--- a/src/main/scala/inox/grammars/SimpleExpressionGrammar.scala
+++ b/src/main/scala/inox/grammars/SimpleExpressionGrammar.scala
@@ -1,43 +1,44 @@
 /* Copyright 2009-2015 EPFL, Lausanne */
 
-package leon
+package inox
 package grammars
 
-import purescala.Expressions.Expr
-import purescala.Types.TypeTree
-
 /** An [[ExpressionGrammar]] whose productions for a given [[Label]]
-  * depend only on the underlying [[TypeTree]] of the label
+  * depend only on the underlying [[Type]] of the label
   */
-abstract class SimpleExpressionGrammar extends ExpressionGrammar {
+trait SimpleExpressionGrammars { self: GrammarsUniverse =>
+  import program.trees._
+  import program.symbols
 
-  type Prod = ProductionRule[TypeTree, Expr]
+  trait SimpleExpressionGrammar extends ExpressionGrammar {
+    type Prod = ProductionRule[Type, Expr]
 
-  /** Generates a [[ProductionRule]] without nonterminal symbols */
-  def terminal(builder: => Expr, tag: Tags.Tag = Tags.Top, cost: Int = 1) = {
-    ProductionRule[TypeTree, Expr](Nil, { (subs: Seq[Expr]) => builder }, tag, cost)
-  }
+    /** Generates a [[ProductionRule]] without nonterminal symbols */
+    def terminal(builder: => Expr, tag: Tag = Top, cost: Int = 1) = {
+      ProductionRule[Type, Expr](Nil, { (subs: Seq[Expr]) => builder }, tag, cost)
+    }
 
-  /** Generates a [[ProductionRule]] with nonterminal symbols */
-  def nonTerminal(subs: Seq[TypeTree], builder: (Seq[Expr] => Expr), tag: Tags.Tag = Tags.Top, cost: Int = 1) = {
-    ProductionRule[TypeTree, Expr](subs, builder, tag, cost)
-  }
+    /** Generates a [[ProductionRule]] with nonterminal symbols */
+    def nonTerminal(subs: Seq[Type], builder: (Seq[Expr] => Expr), tag: Tag = Top, cost: Int = 1) = {
+      ProductionRule[Type, Expr](subs, builder, tag, cost)
+    }
 
-  def filter(f: Prod => Boolean) = {
-    new SimpleExpressionGrammar {
-      def computeProductions(lab: TypeTree)(implicit ctx: LeonContext) = {
-        SimpleExpressionGrammar.this.computeProductions(lab).filter(f)
+    def filter(f: Prod => Boolean) = {
+      new SimpleExpressionGrammar {
+        def computeProductions(lab: Type)(implicit ctx: InoxContext) = {
+          SimpleExpressionGrammar.this.computeProductions(lab).filter(f)
+        }
       }
     }
-  }
 
-  // Finalize this to depend only on the type of the label
-  final def computeProductions(lab: Label)(implicit ctx: LeonContext): Seq[ProductionRule[Label, Expr]] = {
-    computeProductions(lab.getType).map { p =>
-      ProductionRule(p.subTrees.map(Label(_)), p.builder, p.tag, p.cost)
+    // Finalize this to depend only on the type of the label
+    final def computeProductions(lab: Label)(implicit ctx: InoxContext): Seq[ProductionRule[Label, Expr]] = {
+      computeProductions(lab.getType).map { p =>
+        ProductionRule(p.subTrees.map(Label(_)), p.builder, p.tag, p.cost)
+      }
     }
-  }
 
-  /** Version of [[ExpressionGrammar.computeProductions]] which depends only a [[TypeTree]] */
-  def computeProductions(tpe: TypeTree)(implicit ctx: LeonContext): Seq[Prod]
+    /** Version of [[ExpressionGrammar.computeProductions]] which depends only a [[Type]] */
+    def computeProductions(tpe: Type)(implicit ctx: InoxContext): Seq[Prod]
+  }
 }
diff --git a/src/main/scala/inox/grammars/Tags.scala b/src/main/scala/inox/grammars/Tags.scala
index 3686fd462e9560b457fa535a8fe88afb0a5816b4..a6de1b1fbb6c4d16f544711c088bcb7f84f25c6a 100644
--- a/src/main/scala/inox/grammars/Tags.scala
+++ b/src/main/scala/inox/grammars/Tags.scala
@@ -1,12 +1,13 @@
 /* Copyright 2009-2016 EPFL, Lausanne */
 
-package leon
+package inox
 package grammars
 
-import purescala.Types.CaseClassType
-import purescala.Definitions.FunDef
+trait Tags { self: GrammarsUniverse =>
+  import program._
+  import trees.CaseClassDef
+  import trees.FunDef
 
-object Tags {
   /** A class for tags that tag a [[ProductionRule]] with the kind of expression in generates. */
   abstract class Tag
   case object Top       extends Tag // Tag for the top-level of the grammar (default)
@@ -37,7 +38,7 @@ object Tags {
     *               We need this because this call implicitly contains a variable,
     *               so we want to allow constants in all arguments.
     */
-  case class FunCall(isMethod: Boolean, isSafe: Boolean) extends Tag
+  //case class FunCall(isMethod: Boolean, isSafe: Boolean) extends Tag
 
   /** The set of tags that represent constants */
   val isConst: Set[Tag] = Set(Zero, One, Constant, BooleanC, Constructor(true))
@@ -56,10 +57,10 @@ object Tags {
     * top-level/ general function calls/ constructors/...?
     */
   def allConstArgsAllowed(t: Tag) = t match {
-    case FunCall(_, true) => true
+    //case FunCall(_, true) => true
     case _ => false
   }
 
-  def tagOf(cct: CaseClassType) = Constructor(cct.fields.isEmpty)
-  def tagOf(fd: FunDef, isSafe: Boolean) = FunCall(fd.methodOwner.isDefined, isSafe)
-}
\ No newline at end of file
+  def tagOf(cct: CaseClassDef) = Constructor(cct.fields.isEmpty)
+  //def tagOf(fd: FunDef, isSafe: Boolean) = FunCall(fd.methodOwner.isDefined, isSafe)
+}
diff --git a/src/main/scala/inox/grammars/Union.scala b/src/main/scala/inox/grammars/Union.scala
deleted file mode 100644
index 8d5d5a42f2f8b5313e9c6aafb64d31f18bf47be5..0000000000000000000000000000000000000000
--- a/src/main/scala/inox/grammars/Union.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-/* Copyright 2009-2015 EPFL, Lausanne */
-
-package leon
-package grammars
-
-import purescala.Expressions.Expr
-import purescala.Types.Typed
-
-case class Union(gs: Seq[ExpressionGrammar]) extends ExpressionGrammar {
-  val subGrammars: Seq[ExpressionGrammar] = gs.flatMap {
-    case u: Union => u.subGrammars
-    case g => Seq(g)
-  }
-
-  def computeProductions(label: Label)(implicit ctx: LeonContext): Seq[ProductionRule[Label, Expr]] =
-    subGrammars.flatMap(_.computeProductions(label))
-}
diff --git a/src/main/scala/inox/grammars/ValueGrammar.scala b/src/main/scala/inox/grammars/ValueGrammar.scala
index a713445839ff3622ec0bb3778c57a941edbf09d2..ea38e87b6c0a37a7e169f8a6edc6f63e6d82728f 100644
--- a/src/main/scala/inox/grammars/ValueGrammar.scala
+++ b/src/main/scala/inox/grammars/ValueGrammar.scala
@@ -1,93 +1,98 @@
 /* Copyright 2009-2016 EPFL, Lausanne */
 
-package leon
+package inox
 package grammars
 
-import leon.purescala.Common.FreshIdentifier
-import leon.purescala.Definitions.ValDef
-import purescala.Types._
-import purescala.Expressions._
+trait ValueGrammars { self: GrammarsUniverse =>
+  import program._
+  import trees._
+  import symbols._
 
-/** A grammar of values (ground terms) */
-case object ValueGrammar extends SimpleExpressionGrammar {
-  def computeProductions(t: TypeTree)(implicit ctx: LeonContext): Seq[Prod] = t match {
-    case BooleanType =>
-      List(
-        terminal(BooleanLiteral(true), Tags.One),
-        terminal(BooleanLiteral(false), Tags.Zero)
-      )
-    case Int32Type =>
-      List(
-        terminal(IntLiteral(0), Tags.Zero),
-        terminal(IntLiteral(1), Tags.One),
-        terminal(IntLiteral(5), Tags.Constant)
-      )
-    case IntegerType =>
-      List(
-        terminal(InfiniteIntegerLiteral(0), Tags.Zero),
-        terminal(InfiniteIntegerLiteral(1), Tags.One),
-        terminal(InfiniteIntegerLiteral(5), Tags.Constant)
-      )
-    case CharType =>
-      List(
-        terminal(CharLiteral('a'), Tags.Constant),
-        terminal(CharLiteral('b'), Tags.Constant),
-        terminal(CharLiteral('0'), Tags.Constant)
-      )
-    case RealType =>
-      List(
-        terminal(FractionalLiteral(0, 1), Tags.Zero),
-        terminal(FractionalLiteral(1, 1), Tags.One),
-        terminal(FractionalLiteral(-1, 2), Tags.Constant),
-        terminal(FractionalLiteral(555, 42), Tags.Constant)
-      )
-    case StringType =>
-      List(
-        terminal(StringLiteral(""), Tags.Constant),
-        terminal(StringLiteral("a"), Tags.Constant),
-        terminal(StringLiteral("foo"), Tags.Constant),
-        terminal(StringLiteral("bar"), Tags.Constant)
-      )
+  /** A grammar of values (ground terms) */
+  case object ValueGrammar extends SimpleExpressionGrammar {
+    def computeProductions(t: Type)(implicit ctx: InoxContext): Seq[Prod] = t match {
+      case BooleanType =>
+        List(
+          terminal(BooleanLiteral(true), One),
+          terminal(BooleanLiteral(false), Zero)
+        )
+      case Int32Type =>
+        List(
+          terminal(IntLiteral(0), Zero),
+          terminal(IntLiteral(1), One),
+          terminal(IntLiteral(5), Constant)
+        )
+      case IntegerType =>
+        List(
+          terminal(IntegerLiteral(0), Zero),
+          terminal(IntegerLiteral(1), One),
+          terminal(IntegerLiteral(5), Constant)
+        )
+      case CharType =>
+        List(
+          terminal(CharLiteral('a'), Constant),
+          terminal(CharLiteral('b'), Constant),
+          terminal(CharLiteral('0'), Constant)
+        )
+      case RealType =>
+        List(
+          terminal(FractionLiteral(0, 1), Zero),
+          terminal(FractionLiteral(1, 1), One),
+          terminal(FractionLiteral(-1, 2), Constant),
+          terminal(FractionLiteral(555, 42), Constant)
+        )
+      case StringType =>
+        List(
+          terminal(StringLiteral(""), Constant),
+          terminal(StringLiteral("a"), Constant),
+          terminal(StringLiteral("foo"), Constant),
+          terminal(StringLiteral("bar"), Constant)
+        )
 
-    case tp: TypeParameter =>
-      List(
-        terminal(GenericValue(tp, 0))
-      )
+      case tp: TypeParameter =>
+        List(
+          terminal(GenericValue(tp, 0))
+        )
 
-    case TupleType(stps) =>
-      List(
-        nonTerminal(stps, Tuple, Tags.Constructor(stps.isEmpty))
-      )
+      case TupleType(stps) =>
+        List(
+          nonTerminal(stps, Tuple, Constructor(stps.isEmpty))
+        )
 
-    case cct: CaseClassType =>
-      List(
-        nonTerminal(cct.fields.map(_.getType), CaseClass(cct, _), Tags.tagOf(cct))
-      )
+      case ct: ClassType =>
+        ct.tcd match {
+          case cct: TypedCaseClassDef =>
+            List(
+              nonTerminal(cct.fields.map(_.getType), CaseClass(cct.toType, _), tagOf(cct.cd))
+            )
 
-    case act: AbstractClassType =>
-      act.knownCCDescendants.map { cct =>
-        nonTerminal(cct.fields.map(_.getType), CaseClass(cct, _), Tags.tagOf(cct))
-      }
+          case act: TypedAbstractClassDef =>
+            act.descendants.map { cct =>
+              nonTerminal(cct.fields.map(_.getType), CaseClass(cct.toType, _), tagOf(cct.cd))
+            }
+        }
 
-    case st @ SetType(base) =>
-      List(
-        terminal(FiniteSet(Set(), base), Tags.Constant),
-        nonTerminal(List(base),       { elems => FiniteSet(elems.toSet, base) }, Tags.Constructor(isTerminal = false)),
-        nonTerminal(List(base, base), { elems => FiniteSet(elems.toSet, base) }, Tags.Constructor(isTerminal = false))
-      )
-    
-    case UnitType =>
-      List(
-        terminal(UnitLiteral(), Tags.Constant)
-      )
+      case st @ SetType(base) =>
+        List(
+          terminal(FiniteSet(Seq(), base), Constant),
+          nonTerminal(List(base),       { elems => FiniteSet(elems, base) }, Constructor(isTerminal = false)),
+          nonTerminal(List(base, base), { elems => FiniteSet(elems, base) }, Constructor(isTerminal = false))
+        )
+      
+      case UnitType =>
+        List(
+          terminal(UnitLiteral(), Constant)
+        )
 
-    case FunctionType(from, to) =>
-      val args = from map (tp => ValDef(FreshIdentifier("x", tp, true)))
-      List(
-        nonTerminal(Seq(to), { case Seq(e) => Lambda(args, e) })
-      )
+      case FunctionType(from, to) =>
+        val args = from map (tp => ValDef(FreshIdentifier("x", true), tp))
+        List(
+          nonTerminal(Seq(to), { case Seq(e) => Lambda(args, e) })
+        )
 
-    case _ =>
-      Nil
+      case _ =>
+        Nil
+    }
   }
 }
+
diff --git a/src/main/scala/inox/grammars/aspects/AllAspects.scala b/src/main/scala/inox/grammars/aspects/AllAspects.scala
new file mode 100644
index 0000000000000000000000000000000000000000..57e7cda88599ad37cacaac25b5cf09d7c35e4948
--- /dev/null
+++ b/src/main/scala/inox/grammars/aspects/AllAspects.scala
@@ -0,0 +1,12 @@
+package inox.grammars
+package aspects
+
+trait AllAspects extends SizeAspects
+  with ExtraTerminalsAspects
+  with SimilarToAspects
+  with DepthBoundAspects
+  with TypeDepthBoundAspects
+  with TaggedAspects
+{
+  self: GrammarsUniverse =>
+}
diff --git a/src/main/scala/inox/grammars/aspects/DepthBound.scala b/src/main/scala/inox/grammars/aspects/DepthBound.scala
index b694b5403b404e4298a73a4bd471f1927157f451..82c35c28cba616ab5162403fcd8a57c7f17aee86 100644
--- a/src/main/scala/inox/grammars/aspects/DepthBound.scala
+++ b/src/main/scala/inox/grammars/aspects/DepthBound.scala
@@ -1,23 +1,27 @@
 /* Copyright 2009-2016 EPFL, Lausanne */
 
-package leon
+package inox
 package grammars
 package aspects
 
-/** Limits a grammar by depth */
-case class DepthBound(depth: Int) extends Aspect {
-  require(depth >= 0)
+trait DepthBoundAspects { self: GrammarsUniverse =>
+  import program.trees.PrinterOptions
 
-  def asString(implicit ctx: LeonContext): String = s"D$depth"
+  /** Limits a grammar by depth */
+  case class DepthBound(depth: Int) extends Aspect {
+    require(depth >= 0)
 
-  def applyTo(l: Label, ps: Seq[Production])(implicit ctx: LeonContext) = {
-    if (depth == 0) Nil
-    else if (depth == 1) ps.filter(_.isTerminal)
-    else {
-      ps map { prod =>
-        prod.copy(subTrees = prod.subTrees.map(_.withAspect(DepthBound(depth - 1))))
+    def asString(implicit opts: PrinterOptions): String = s"D$depth"
+
+    def applyTo(l: Label, ps: Seq[Production])(implicit ctx: InoxContext) = {
+      if (depth == 0) Nil
+      else if (depth == 1) ps.filter(_.isTerminal)
+      else {
+        ps map { prod =>
+          prod.copy(subTrees = prod.subTrees.map(_.withAspect(DepthBound(depth - 1))))
+        }
       }
     }
   }
-
 }
+
diff --git a/src/main/scala/inox/grammars/aspects/ExtraTerminals.scala b/src/main/scala/inox/grammars/aspects/ExtraTerminals.scala
index 213ea7445af150e55a105e1090e4b0c21d15e4ef..a64294ae8858ad3fc460902fc04283f3c6a5f177 100644
--- a/src/main/scala/inox/grammars/aspects/ExtraTerminals.scala
+++ b/src/main/scala/inox/grammars/aspects/ExtraTerminals.scala
@@ -1,28 +1,31 @@
 /* Copyright 2009-2015 EPFL, Lausanne */
 
-package leon
+package inox
 package grammars
 package aspects
 
-import purescala.Expressions.Expr
-import purescala.ExprOps.formulaSize
-import purescala.TypeOps.isSubtypeOf
-
-/**
- * Informs sub-productions that there are extra terminals available (used by
- * grammars.ExtraTerminals).
- */
-case class ExtraTerminals(s: Set[Expr]) extends PersistentAspect {
-  def asString(implicit ctx: LeonContext) = {
-    s.toList.map(_.asString).mkString("{", ",", "}")
-  }
+trait ExtraTerminalsAspects { self: GrammarsUniverse =>
+  import program._
+  import trees._
+  import exprOps.formulaSize
+  import symbols._
 
+  /**
+   * Informs sub-productions that there are extra terminals available (used by
+   * grammars.ExtraTerminals).
+   */
+  case class ExtraTerminals(s: Set[Expr]) extends PersistentAspect {
+    def asString(implicit opts: PrinterOptions) = {
+      s.toList.map(_.asString(opts)).mkString("{", ",", "}")
+    }
 
-  override def applyTo(lab: Label, ps: Seq[Production])(implicit ctx: LeonContext) = {
-    super.applyTo(lab, ps) ++ {
-      s.filter(e => isSubtypeOf(e.getType, lab.getType)).map { e =>
-        ProductionRule[Label, Expr](Nil, { (es: Seq[Expr]) => e }, Tags.Top, formulaSize(e))
+    override def applyTo(lab: Label, ps: Seq[Production])(implicit ctx: InoxContext) = {
+      super.applyTo(lab, ps) ++ {
+        s.filter(e => isSubtypeOf(e.getType, lab.getType)).map { e =>
+          ProductionRule[Label, Expr](Nil, { (es: Seq[Expr]) => e }, Top, formulaSize(e))
+        }
       }
     }
   }
 }
+
diff --git a/src/main/scala/inox/grammars/aspects/PersistentAspect.scala b/src/main/scala/inox/grammars/aspects/PersistentAspect.scala
index 05f17f73642eb7d109de95269aeb200ea5a2ce2c..6f35c218ba6bb7b57546b0dc083b843277dc0d07 100644
--- a/src/main/scala/inox/grammars/aspects/PersistentAspect.scala
+++ b/src/main/scala/inox/grammars/aspects/PersistentAspect.scala
@@ -1,26 +1,29 @@
 /* Copyright 2009-2015 EPFL, Lausanne */
 
-package leon
+package inox
 package grammars
 package aspects
 
-/**
- * Persistent aspects allow label information to be propagated down:
- * Int{e} means (Int with a terminal 'e'). And thus, the Closure grammar
- * is able to have, as production:
- *   Int=>Int  :=  (e: Int) => Int{e}
- * In turn, all Int productions, e.g. Int := Int + Int, gets transformed by the
- * aspect and generate:
- *   Int{e}  :=  Int{e} + Int{e}
- *
- * This with the ExtraTerminals grammar, enables the generation of expressions
- * like:
- *   e + 1
- */
-abstract class PersistentAspect extends Aspect {
-  def applyTo(lab: Label, ps: Seq[Production])(implicit ctx: LeonContext) = {
-    ps.map { p =>
-      p.copy(subTrees = p.subTrees.map(lab => lab.withAspect(this)))
+trait PersistentAspects { self: GrammarsUniverse =>
+
+  /**
+   * Persistent aspects allow label information to be propagated down:
+   * Int{e} means (Int with a terminal 'e'). And thus, the Closure grammar
+   * is able to have, as production:
+   *   Int=>Int  :=  (e: Int) => Int{e}
+   * In turn, all Int productions, e.g. Int := Int + Int, gets transformed by the
+   * aspect and generate:
+   *   Int{e}  :=  Int{e} + Int{e}
+   *
+   * This with the ExtraTerminals grammar, enables the generation of expressions
+   * like:
+   *   e + 1
+   */
+  abstract class PersistentAspect extends Aspect {
+    def applyTo(lab: Label, ps: Seq[Production])(implicit ctx: InoxContext) = {
+      ps.map { p =>
+        p.copy(subTrees = p.subTrees.map(lab => lab.withAspect(this)))
+      }
     }
   }
 }
diff --git a/src/main/scala/inox/grammars/aspects/SimilarTo.scala b/src/main/scala/inox/grammars/aspects/SimilarTo.scala
index fc500814826f8cf33f79f17f639a56069d82f43b..abbbe29d75bdf77c8d3564d1870f6585bae4436d 100644
--- a/src/main/scala/inox/grammars/aspects/SimilarTo.scala
+++ b/src/main/scala/inox/grammars/aspects/SimilarTo.scala
@@ -1,134 +1,142 @@
 /* Copyright 2009-2015 EPFL, Lausanne */
 
-package leon
+package inox
 package grammars
 package aspects
 
-import purescala.Expressions._
-import purescala.Types._
-import purescala.TypeOps._
-import purescala.Constructors._
-import purescala.Extractors._
-import utils.SeqUtils._
+import inox.utils.SeqUtils._
 
-/** Generates expressions similar to a [[Seq]] of given expressions
-  * @param es The expressions for which similar ones will be generated
-  */
-case class SimilarTo(es: Seq[Expr]) extends Aspect {
-  type Prods = Seq[ProductionRule[Label, Expr]]
+trait SimilarToAspects { self: GrammarsUniverse =>
+  import program._
+  import trees.{Minus => EMinus, Plus => EPlus, Times => ETimes, _}
+  import symbols._
 
-  def asString(implicit ctx: LeonContext) = es.mkString("~", "~", "~")
+  /** Generates expressions similar to a [[Seq]] of given expressions
+    * @param es The expressions for which similar ones will be generated
+    */
+  case class SimilarTo(es: Seq[Expr]) extends Aspect {
+    type Prods = Seq[ProductionRule[Label, Expr]]
 
-  def term(e: Expr, tag: Tags.Tag = Tags.Top, cost: Int = 1): ProductionRule[Label, Expr] = {
-    ProductionRule(Nil, { case Seq() => e }, tag, cost)
-  }
+    def asString(implicit opts: PrinterOptions) = es.map(_.asString(opts)).mkString("~", "~", "~")
 
-  /**
-   * ~f(a,b)~  ::=  f(~a~, b)
-   *                f(a, ~b~)
-   *                f(b, a)   // if non-commut
-   */
-  def applyTo(lab: Label, ps: Seq[Production])(implicit ctx: LeonContext) = {
-    def isCommutative(e: Expr) = e match {
-      case _: Plus | _: Times => true
-      case _ => false
+    def term(e: Expr, tag: Tag = Top, cost: Int = 1): ProductionRule[Label, Expr] = {
+      ProductionRule(Nil, { case Seq() => e }, tag, cost)
     }
 
-    val similarProds: Prods = es.filter(e => isSubtypeOf(e.getType, lab.getType)).flatMap { e =>
-      val swaps: Prods = e match {
-        case Operator(as, b) if as.nonEmpty && !isCommutative(e) =>
-          val ast = as.zipWithIndex.groupBy(_._1.getType).mapValues(_.map(_._2).toList)
-
-          val perms = ast.values.map { is =>
-            is.permutations.toList.filter(p => p != is).map(ps => (is zip ps).toMap)
-          }.filter(_.nonEmpty).toList
-
-          //println("Perms:")
-          //for (ps <- perms) {
-          //  println(" - "+ps.mkString(", "))
-          //}
-
-          for (ps <- cartesianProduct(perms)) yield {
-            val perm = ps.foldLeft(Map[Int, Int]())( _ ++ _ )
-
-            //println("Trying permutation "+perm+": "+
-            //    b(as.indices.map { i =>
-            //      as(perm.getOrElse(i, i))
-            //    }))
-
-            term(b(as.indices.map { i => as(perm.getOrElse(i, i)) }))
-          }
-        case _ =>
-          Nil
+    /**
+     * ~f(a,b)~  ::=  f(~a~, b)
+     *                f(a, ~b~)
+     *                f(b, a)   // if non-commut
+     */
+    def applyTo(lab: Label, ps: Seq[Production])(implicit ctx: InoxContext) = {
+      def isCommutative(e: Expr) = e match {
+        case _: EPlus | _: ETimes => true
+        case _ => false
       }
 
-      val subs: Prods = e match {
-        case Operator(as, b) if as.nonEmpty =>
-          for ((a, i) <- as.zipWithIndex) yield {
-            ProductionRule[Label, Expr](
-              List(Label(a.getType).withAspect(SimilarTo(Seq(a)))),
-              { case Seq(e) =>
-                b(as.updated(i, e))
-              },
-              Tags.Top,
-              1
+      val similarProds: Prods = es.filter(e => isSubtypeOf(e.getType, lab.getType)).flatMap { e =>
+        val swaps: Prods = e match {
+          case Operator(as, b) if as.nonEmpty && !isCommutative(e) =>
+            val ast = as.zipWithIndex.groupBy(_._1.getType).mapValues(_.map(_._2).toList)
+
+            val perms = ast.values.map { is =>
+              is.permutations.toList.filter(p => p != is).map(ps => (is zip ps).toMap)
+            }.filter(_.nonEmpty).toList
+
+            //println("Perms:")
+            //for (ps <- perms) {
+            //  println(" - "+ps.mkString(", "))
+            //}
+
+            for (ps <- cartesianProduct(perms)) yield {
+              val perm = ps.foldLeft(Map[Int, Int]())( _ ++ _ )
+
+              //println("Trying permutation "+perm+": "+
+              //    b(as.indices.map { i =>
+              //      as(perm.getOrElse(i, i))
+              //    }))
+
+              term(b(as.indices.map { i => as(perm.getOrElse(i, i)) }))
+            }
+          case _ =>
+            Nil
+        }
+
+        val subs: Prods = e match {
+          case Operator(as, b) if as.nonEmpty =>
+            for ((a, i) <- as.zipWithIndex) yield {
+              ProductionRule[Label, Expr](
+                List(Label(a.getType).withAspect(SimilarTo(Seq(a)))),
+                { case Seq(e) =>
+                  b(as.updated(i, e))
+                },
+                Top,
+                1
+              )
+            }
+          case _ =>
+            Nil
+        }
+
+        val typeVariations: Prods = e match {
+          case IntegerLiteral(v) =>
+            List(
+              term(IntegerLiteral(v + 1)),
+              term(IntegerLiteral(v - 1))
             )
-          }
-        case _ =>
-          Nil
-      }
 
-      val typeVariations: Prods = e match {
-        case InfiniteIntegerLiteral(v) =>
-          List(
-            term(InfiniteIntegerLiteral(v + 1)),
-            term(InfiniteIntegerLiteral(v - 1))
-          )
-
-        case IntLiteral(v) =>
-          List(
-            term(IntLiteral(v + 1)),
-            term(IntLiteral(v - 1))
-          )
-
-        case BooleanLiteral(v) =>
-          List(
-            term(BooleanLiteral(!v))
-          )
-
-        case IsTyped(e, IntegerType) =>
-          List(
-            term(Plus(e, InfiniteIntegerLiteral(1))),
-            term(Minus(e, InfiniteIntegerLiteral(1)))
-          )
-        case IsTyped(e, Int32Type) =>
-          List(
-            term(BVPlus(e, IntLiteral(1))),
-            term(BVMinus(e, IntLiteral(1)))
-          )
-        case IsTyped(e, BooleanType) =>
-          List(
-            term(not(e))
-          )
-
-        case _ =>
-          Nil
-      }
+          case IntLiteral(v) =>
+            List(
+              term(IntLiteral(v + 1)),
+              term(IntLiteral(v - 1))
+            )
+
+          case BooleanLiteral(v) =>
+            List(
+              term(BooleanLiteral(!v))
+            )
 
-      val ccVariations: Prods = e match {
-        case CaseClass(cct, args) =>
-          val neighbors = cct.root.knownCCDescendants diff Seq(cct)
+          case IsTyped(e, IntegerType) =>
+            List(
+              term(EPlus(e, IntegerLiteral(1))),
+              term(EMinus(e, IntegerLiteral(1)))
+            )
+          case IsTyped(e, Int32Type) =>
+            List(
+              term(EPlus(e, IntLiteral(1))),
+              term(EMinus(e, IntLiteral(1)))
+            )
+          case IsTyped(e, BooleanType) =>
+            List(
+              term(not(e))
+            )
 
-          for (scct <- neighbors if scct.fieldsTypes == cct.fieldsTypes) yield {
-            term(CaseClass(scct, args))
-          }
-        case _ =>
-          Nil
+          case _ =>
+            Nil
+        }
+
+        val ccVariations: Prods = e match {
+          case CaseClass(cct, args) =>
+            val resType = cct.tcd.toCase
+            val neighbors = resType.root match {
+              case acd: TypedAbstractClassDef =>
+                acd.descendants diff Seq(resType)
+              case ccd: TypedCaseClassDef =>
+                Nil
+            }
+
+            for (scct <- neighbors if scct.fieldsTypes == resType.fieldsTypes) yield {
+              term(CaseClass(scct.toType, args))
+            }
+          case _ =>
+            Nil
+        }
+
+        swaps ++ subs ++ typeVariations ++ ccVariations
       }
 
-      swaps ++ subs ++ typeVariations ++ ccVariations
+      ps ++ similarProds
     }
-
-    ps ++ similarProds
   }
 }
+
diff --git a/src/main/scala/inox/grammars/aspects/Sized.scala b/src/main/scala/inox/grammars/aspects/Sized.scala
index e2ed3c0e835c6c1c6b53857ae7d85a69080f4a94..2d39d2feeed3b0237e38838b6b9ad463894ff84b 100644
--- a/src/main/scala/inox/grammars/aspects/Sized.scala
+++ b/src/main/scala/inox/grammars/aspects/Sized.scala
@@ -1,44 +1,51 @@
 /* Copyright 2009-2015 EPFL, Lausanne */
 
-package leon
+package inox
 package grammars
 package aspects
 
-import utils.SeqUtils._
+import inox.utils.SeqUtils._
 
 /**
  * Attach sizes to labels and transmit them down accordingly
  */
-case class Sized(size: Int) extends Aspect {
-  def asString(implicit ctx: LeonContext) = "|"+size+"|"
-
-  def applyTo(lab: Label, ps: Seq[Production])(implicit ctx: LeonContext) = {
-    val optimizeCommut = true
-
-    ps.flatMap { p =>
-      if (size <= 0) {
-        Nil
-      } else if (p.arity == 0) {
-        if (size == p.cost) {
-          List(p)
-        } else {
+trait SizeAspects { self: GrammarsUniverse =>
+  import program.trees.PrinterOptions
+
+  case class Sized(size: Int) extends Aspect {
+    def asString(implicit opts: PrinterOptions) = "|"+size+"|"
+
+    def applyTo(lab: Label, ps: Seq[Production])(implicit ctx: InoxContext) = {
+      val optimizeCommut = true
+
+      ps.flatMap { p =>
+        if (size <= 0) {
           Nil
-        }
-      } else {
-        val sizes = if(optimizeCommut && Tags.isCommut(p.tag) && p.subTrees.toSet.size == 1) {
-          sumToOrdered(size - p.cost, p.arity)
+        } else if (p.arity == 0) {
+          if (size == p.cost) {
+            List(p)
+          } else {
+            Nil
+          }
         } else {
-          sumTo(size - p.cost, p.arity)
-        }
-
-        for (ss <- sizes) yield {
-          val newSubTrees = (p.subTrees zip ss).map {
-            case (lab, s) => lab.withAspect(Sized(s))
+          val sizes = if(optimizeCommut && isCommut(p.tag) && p.subTrees.toSet.size == 1) {
+            sumToOrdered(size - p.cost, p.arity)
+          } else {
+            sumTo(size - p.cost, p.arity)
           }
 
-          ProductionRule(newSubTrees, p.builder, p.tag, p.cost)
+          for (ss <- sizes) yield {
+            val newSubTrees = (p.subTrees zip ss).map {
+              case (lab, s) => lab.withAspect(Sized(s))
+            }
+
+            ProductionRule(newSubTrees, p.builder, p.tag, p.cost)
+          }
         }
       }
     }
+  
   }
+
 }
+
diff --git a/src/main/scala/inox/grammars/aspects/Tagged.scala b/src/main/scala/inox/grammars/aspects/Tagged.scala
index c6838ab2fb31c906866bb896965293f8211f0f03..ab97369384af02d0c493edd80635ab1afdfd047f 100644
--- a/src/main/scala/inox/grammars/aspects/Tagged.scala
+++ b/src/main/scala/inox/grammars/aspects/Tagged.scala
@@ -1,89 +1,91 @@
 /* Copyright 2009-2015 EPFL, Lausanne */
 
-package leon
+package inox
 package grammars
 package aspects
 
-import Tags._
+trait TaggedAspects { self: GrammarsUniverse =>
+  import program.trees.PrinterOptions
 
-case class Tagged(tag: Tag, pos: Int, isConst: Option[Boolean]) extends Aspect {
-  private val cString = isConst match {
-    case Some(true) => "↓"
-    case Some(false) => "↑"
-    case None => "○"
-  }
+  case class Tagged(tag: Tag, pos: Int, isConst: Option[Boolean]) extends Aspect {
+    private val cString = isConst match {
+      case Some(true) => "↓"
+      case Some(false) => "↑"
+      case None => "○"
+    }
 
-  /** [[isConst]] is printed as follows: ↓ for constants only, ↑ for nonconstants only,
-    * ○ for anything allowed.
-    */
-  def asString(implicit ctx: LeonContext): String = s"#$tag$cString@$pos"
+    /** [[isConst]] is printed as follows: ↓ for constants only, ↑ for nonconstants only,
+      * ○ for anything allowed.
+      */
+    def asString(implicit opts: PrinterOptions): String = s"#$tag$cString@$pos"
 
-  def applyTo(lab: Label, ps: Seq[Production])(implicit ctx: LeonContext) = {
+    def applyTo(lab: Label, ps: Seq[Production])(implicit ctx: InoxContext) = {
 
-    // Tags to avoid depending on parent aspect
-    val excludedTags: Set[Tag] = (tag, pos) match {
-      case (Top,   _)             => Set()
-      case (And,   0)             => Set(And, BooleanC)
-      case (And,   1)             => Set(BooleanC)
-      case (Or,    0)             => Set(Or, BooleanC)
-      case (Or,    1)             => Set(BooleanC)
-      case (Plus,  0)             => Set(Plus, Zero, One)
-      case (Plus,  1)             => Set(Zero)
-      case (Minus, 1)             => Set(Zero)
-      case (Not,   _)             => Set(Not, BooleanC)
-      case (Times, 0)             => Set(Times, Zero, One)
-      case (Times, 1)             => Set(Zero, One)
-      case (Equals,_)             => Set(Not, BooleanC)
-      case (Div | Mod, 0 | 1)     => Set(Zero, One)
-      case (FunCall(true, _), 0)  => Set(Constructor(true)) // Don't allow Nil().size etc.
-      case _                      => Set()
-    }
+      // Tags to avoid depending on parent aspect
+      val excludedTags: Set[Tag] = (tag, pos) match {
+        case (Top,   _)             => Set()
+        case (And,   0)             => Set(And, BooleanC)
+        case (And,   1)             => Set(BooleanC)
+        case (Or,    0)             => Set(Or, BooleanC)
+        case (Or,    1)             => Set(BooleanC)
+        case (Plus,  0)             => Set(Plus, Zero, One)
+        case (Plus,  1)             => Set(Zero)
+        case (Minus, 1)             => Set(Zero)
+        case (Not,   _)             => Set(Not, BooleanC)
+        case (Times, 0)             => Set(Times, Zero, One)
+        case (Times, 1)             => Set(Zero, One)
+        case (Equals,_)             => Set(Not, BooleanC)
+        case (Div | Mod, 0 | 1)     => Set(Zero, One)
+        //case (FunCall(true, _), 0)  => Set(Constructor(true)) // Don't allow Nil().size etc.
+        case _                      => Set()
+      }
 
-    def powerSet[A](t: Set[A]): Set[Set[A]] = {
-      @scala.annotation.tailrec
-      def pwr(t: Set[A], ps: Set[Set[A]]): Set[Set[A]] =
-        if (t.isEmpty) ps
-        else pwr(t.tail, ps ++ (ps map (_ + t.head)))
+      def powerSet[A](t: Set[A]): Set[Set[A]] = {
+        @scala.annotation.tailrec
+        def pwr(t: Set[A], ps: Set[Set[A]]): Set[Set[A]] =
+          if (t.isEmpty) ps
+          else pwr(t.tail, ps ++ (ps map (_ + t.head)))
 
-      pwr(t, Set(Set.empty[A]))
-    }
+        pwr(t, Set(Set.empty[A]))
+      }
 
 
-    ps.flatMap { p =>
-      val tagsValid = !(excludedTags contains p.tag)
+      ps.flatMap { p =>
+        val tagsValid = !(excludedTags contains p.tag)
 
-      // If const-ness is explicit, make sure the production has similar const-ness
-      val constValid = isConst match {
-        case Some(b) => Tags.isConst(p.tag) == b
-        case None    => true
-      }
+        // If const-ness is explicit, make sure the production has similar const-ness
+        val constValid = isConst match {
+          case Some(b) => TaggedAspects.this.isConst(p.tag) == b
+          case None    => true
+        }
 
-      if (constValid && tagsValid) {
-        val subAspects = if (p.isTerminal || Tags.allConstArgsAllowed(p.tag)) {
-          Seq(p.subTrees.indices.map { i =>
-            Tagged(p.tag, i, None)
-          })
-        } else {
-          // All positions are either forced to be const or forced to be
-          // non-const. We don't want all consts though.
-          val indices = p.subTrees.indices.toSet
+        if (constValid && tagsValid) {
+          val subAspects = if (p.isTerminal || allConstArgsAllowed(p.tag)) {
+            Seq(p.subTrees.indices.map { i =>
+              Tagged(p.tag, i, None)
+            })
+          } else {
+            // All positions are either forced to be const or forced to be
+            // non-const. We don't want all consts though.
+            val indices = p.subTrees.indices.toSet
 
-          (powerSet(indices) - indices) map { isConst =>
-            p.subTrees.indices.map { i =>
-              Tagged(p.tag, i, Some(isConst(i)))
+            (powerSet(indices) - indices) map { isConst =>
+              p.subTrees.indices.map { i =>
+                Tagged(p.tag, i, Some(isConst(i)))
+              }
             }
           }
-        }
 
-        for (as <- subAspects) yield {
-          val newSubTrees = (p.subTrees zip as).map { case (lab, a) =>
-            lab.withAspect(a)
-          }
+          for (as <- subAspects) yield {
+            val newSubTrees = (p.subTrees zip as).map { case (lab, a) =>
+              lab.withAspect(a)
+            }
 
-          ProductionRule(newSubTrees, p.builder, p.tag, p.cost)
+            ProductionRule(newSubTrees, p.builder, p.tag, p.cost)
+          }
+        } else {
+          Nil
         }
-      } else {
-        Nil
       }
     }
   }
diff --git a/src/main/scala/inox/grammars/aspects/TypeDepthBound.scala b/src/main/scala/inox/grammars/aspects/TypeDepthBound.scala
index c46e51f4bd4d0ec2c0b750a89abd8692435c749f..5338f213b45266be379d76d2ac2dd8d5b44ba474 100644
--- a/src/main/scala/inox/grammars/aspects/TypeDepthBound.scala
+++ b/src/main/scala/inox/grammars/aspects/TypeDepthBound.scala
@@ -1,17 +1,21 @@
 /* Copyright 2009-2016 EPFL, Lausanne */
 
-package leon
+package inox
 package grammars
 package aspects
 
-import purescala.TypeOps.depth
+trait TypeDepthBoundAspects { self: GrammarsUniverse =>
+  import program._
+  import trees._
+  import symbols.typeOps.depth
 
-case class TypeDepthBound(bound: Int) extends PersistentAspect {
-  override def asString(implicit ctx: LeonContext): String = "" // This is just debug pollution to print
+  case class TypeDepthBound(bound: Int) extends PersistentAspect {
+    override def asString(implicit opts: PrinterOptions): String = "" // This is just debug pollution to print
 
-  override def applyTo(lab: Label, ps: Seq[Production])(implicit ctx: LeonContext) = {
-    if (depth(lab.getType) > bound) Nil
-    else super.applyTo(lab, ps)
-  }
+    override def applyTo(lab: Label, ps: Seq[Production])(implicit ctx: InoxContext) = {
+      if (depth(lab.getType) > bound) Nil
+      else super.applyTo(lab, ps)
+    }
 
+  }
 }
diff --git a/src/main/scala/inox/grammars/utils/Helpers.scala b/src/main/scala/inox/grammars/utils/Helpers.scala
new file mode 100644
index 0000000000000000000000000000000000000000..40611043515530c4c496a3a3ca429d9a0865c4dd
--- /dev/null
+++ b/src/main/scala/inox/grammars/utils/Helpers.scala
@@ -0,0 +1,111 @@
+/* Copyright 2009-2016 EPFL, Lausanne */
+
+package inox
+package grammars
+package utils
+
+trait Helpers { self: GrammarsUniverse =>
+  import program._
+  import trees.{ Minus => EMinus, Plus => EPlus, Variable => EVariable, _ }
+  import exprOps._
+  import symbols._
+
+  /**
+   * Filter functions potentially returning target type
+   *
+   * If the function takes type parameters, it will try to find an assignment
+   * such that the function returns the target type.
+   *
+   * The return is thus a set of typed functions.
+   */
+  def functionsReturning(fds: Set[FunDef], tpe: Type): Set[TypedFunDef] = {
+    fds.flatMap { fd =>
+      canBeSubtypeOf(fd.returnType, tpe) match {
+        case Some(tpsMap) =>
+          Some(fd.typed(fd.typeArgs.map(tp => tpsMap.getOrElse(tp, tp))))
+        case None =>
+          None
+      }
+    }
+  }
+
+  /** Given an initial set of function calls provided by a list of [[Terminating]],
+    * returns function calls that will hopefully be safe to call recursively from within this initial function calls.
+    *
+    * For each returned call, one argument is substituted by a "smaller" one, while the rest are left as holes.
+    *
+    * @param prog The current program
+    * @param ws Helper predicates that contain [[Terminating]]s with the initial calls
+    * @param pc The path condition
+    * @param tpe The expected type for the returned function calls. If absent, all types are permitted.
+    * @return A list of pairs (safe function call, holes),
+    *         where holes stand for the rest of the arguments of the function.
+    */
+  def terminatingCalls(prog: Program, wss: Seq[FunctionInvocation], pc: Path, tpe: Option[Type], introduceHoles: Boolean): List[(FunctionInvocation, Option[Set[Identifier]])] = {
+
+    def subExprsOf(expr: Expr, v: EVariable): Option[(EVariable, Expr)] = expr match {
+      case CaseClassSelector(cct, r, _) => subExprsOf(r, v)
+      case (r: EVariable) if leastUpperBound(r.getType, v.getType).isDefined => Some(r -> v)
+      case _ => None
+    }
+
+    val z   = IntegerLiteral(0)
+    val one = IntegerLiteral(1)
+    val knownSmallers = (pc.bindings.flatMap {
+      // @nv: used to check both Equals(id, selector) and Equals(selector, id)
+      case (id, s @ CaseClassSelector(cct, r, _)) => subExprsOf(s, id.toVariable)
+      case _ => None
+    } ++ pc.conditions.flatMap {
+      case GreaterThan(v: EVariable, `z`) =>
+        Some(v -> EMinus(v, one))
+      case LessThan(`z`, v: EVariable) =>
+        Some(v -> EMinus(v, one))
+      case LessThan(v: EVariable, `z`) =>
+        Some(v -> EPlus(v, one))
+      case GreaterThan(`z`, v: EVariable) =>
+        Some(v -> EPlus(v, one))
+      case _ => None
+    }).groupBy(_._1).mapValues(v => v.map(_._2))
+
+    def argsSmaller(e: Expr, tpe: Type): Seq[Expr] = e match {
+      case CaseClass(cct, args) =>
+        (cct.tcd.asInstanceOf[TypedCaseClassDef].fields.map(_.getType) zip args).collect {
+          case (t, e) if isSubtypeOf(t, tpe) =>
+            List(e) ++ argsSmaller(e, tpe) 
+        }.flatten
+      case v: EVariable =>
+        knownSmallers.getOrElse(v, Seq())
+      case _ => Nil
+    }
+
+    val res = wss.flatMap {
+      case FunctionInvocation(fid, tps, args) =>
+        val resFun = getFunction(fid)
+        if (tpe forall (isSubtypeOf(resFun.returnType, _))) Nil else {
+          val ids = resFun.params.map(vd => EVariable(FreshIdentifier("<hole>", true), vd.getType)).toList
+
+          for (((a, i), tpe) <- args.zipWithIndex zip resFun.params.map(_.getType);
+                smaller <- argsSmaller(a, tpe)) yield {
+            val newArgs = (if (introduceHoles) ids else args).updated(i, smaller)
+            val newFd = FunctionInvocation(fid, tps, newArgs)
+            val freeIds = if(introduceHoles) Some((ids.toSet - ids(i)).map(_.id)) else None
+            (newFd, freeIds)
+          }
+        }
+    }
+
+    res.toList
+  }
+
+
+  /**
+   * All functions we call use in synthesis, which includes:
+   *  - all functions in main units
+   *  - all functions imported, or methods of classes imported
+   */
+  def functionsAvailable(p: Program): Set[FunDef]// = {
+  //  visibleFunDefsFromMain(p)
+  //}
+
+
+}