diff --git a/src/main/scala/leon/invariant/engine/InferenceContext.scala b/src/main/scala/leon/invariant/engine/InferenceContext.scala
index 7400aeb67dc7368a38d7d42893abdec56a4fc2f5..d94c46fd637ae23561c848c3a182aa0420767d56 100644
--- a/src/main/scala/leon/invariant/engine/InferenceContext.scala
+++ b/src/main/scala/leon/invariant/engine/InferenceContext.scala
@@ -24,7 +24,7 @@ class InferenceContext(val initProgram: Program, val leonContext: LeonContext) {
   // get  options from ctx or initialize them to default values
   // the following options are enabled by default
   val targettedUnroll = !(leonContext.findOption(optFunctionUnroll).getOrElse(false))
-  val autoInference = leonContext.findOption(optDisableInfer).getOrElse(true)
+  val autoInference = !(leonContext.findOption(optDisableInfer).getOrElse(false))
   val assumepre = leonContext.findOption(optAssumePre).getOrElse(false)
 
   // the following options are disabled by default
diff --git a/src/main/scala/leon/invariant/engine/InferenceEngine.scala b/src/main/scala/leon/invariant/engine/InferenceEngine.scala
index 5b0dde4e6365f895add8c217815dcd64dad1b7d4..e42a545babaecf90854104fd6a0626ef559264b1 100644
--- a/src/main/scala/leon/invariant/engine/InferenceEngine.scala
+++ b/src/main/scala/leon/invariant/engine/InferenceEngine.scala
@@ -1,6 +1,7 @@
 package leon
 package invariant.engine
 
+import purescala._
 import purescala.Definitions._
 import purescala.ExprOps._
 import java.io._
@@ -21,6 +22,8 @@ import ProgramUtil._
  */
 class InferenceEngine(ctx: InferenceContext) extends Interruptible {
 
+  val debugBottomupIterations = false
+
   val ti = new TimeoutFor(this)
 
   def interrupt() = {
@@ -122,8 +125,8 @@ class InferenceEngine(ctx: InferenceContext) extends Interruptible {
    * we an get rid of dependence on origFd in many places.
    */
   def analyseProgram(startProg: Program, functionsToAnalyze: Seq[FunDef],
-      vcSolver: (FunDef, Program) => FunctionTemplateSolver,
-      progressCallback: Option[InferenceCondition => Unit]): Map[FunDef, InferenceCondition] = {
+                     vcSolver: (FunDef, Program) => FunctionTemplateSolver,
+                     progressCallback: Option[InferenceCondition => Unit]): Map[FunDef, InferenceCondition] = {
     val reporter = ctx.reporter
     val funToTmpl =
       if (ctx.autoInference) {
@@ -143,6 +146,12 @@ class InferenceEngine(ctx: InferenceContext) extends Interruptible {
         (fd.annotations contains "theoryop")
     }).foldLeft(progWithTemplates) { (prog, origFun) =>
 
+      if (debugBottomupIterations) {
+        println("Current Program: \n",
+          ScalaPrinter.apply(prog, purescala.PrinterOptions(printUniqueIds = true)))
+        scala.io.StdIn.readLine()
+      }
+
       if (ctx.abort) {
         reporter.info("- Aborting analysis of " + origFun.id.name)
         val ic = new InferenceCondition(Seq(), origFun)
@@ -165,49 +174,64 @@ class InferenceEngine(ctx: InferenceContext) extends Interruptible {
             val funcTime = (System.currentTimeMillis() - t1) / 1000.0
             infRes match {
               case Some(InferResult(true, model, inferredFuns)) =>
-                // create a inference condition for reporting
-                var first = true
-                inferredFuns.foreach { fd =>
-                  val origFd = functionByName(fd.id.name, startProg).get
-                  val inv = if (origFd.hasTemplate) {
-                    TemplateInstantiator.getAllInvariants(model.get,
-                      Map(origFd -> origFd.getTemplate), prettyInv = true)(origFd)
-                  } else {
-                    val currentInv = TemplateInstantiator.getAllInvariants(model.get,
-                      Map(fd -> fd.getTemplate), prettyInv = true)(fd)
-                    // map result variable  in currentInv
-                    val repInv = replace(Map(getResId(fd).get.toVariable -> getResId(origFd).get.toVariable), currentInv)
-                    translateExprToProgram(repInv, prog, startProg)
-                  }
-                  // record the inferred invariants
-                  val inferCond = if (analyzedSet.contains(origFd)) {
-                    val ic = analyzedSet(origFd)
-                    ic.addInv(Seq(inv))
-                    ic
-                  } else {
-                    val ic = new InferenceCondition(Seq(inv), origFd)
-                    ic.time = if (first) Some(funcTime) else Some(0.0)
-                    // update analyzed set
-                    analyzedSet += (origFd -> ic)
-                    first = false
-                    ic
-                  }
-                  progressCallback.foreach(cb => cb(inferCond))
-                }
+                val origFds = inferredFuns.map { fd =>
+                  (fd -> functionByName(fd.id.name, startProg).get)
+                }.toMap
+                // find functions in the source that had a user-defined template and was solved
+                // and it was not previously solved
                 val funsWithTemplates = inferredFuns.filter { fd =>
-                  val origFd = functionByName(fd.id.name, startProg).get
+                  val origFd = origFds(fd)
                   !analyzedSet.contains(origFd) && origFd.hasTemplate
                 }
+                // now the templates of these functions will be replaced by inferred invariants
                 val invs = TemplateInstantiator.getAllInvariants(model.get,
                   funsWithTemplates.collect {
                     case fd if fd.hasTemplate => fd -> fd.getTemplate
                   }.toMap)
-                // create a new program that has the inferred templates
+                // collect templates of remaining functions
                 val funToTmpl = prog.definedFunctions.collect {
                   case fd if !invs.contains(fd) && fd.hasTemplate =>
                     fd -> fd.getTemplate
                 }.toMap
-                assignTemplateAndCojoinPost(funToTmpl, prog, invs)
+                /*println("Inferred Funs: " + inferredFuns)
+                println("inv map: " + invs)
+                println("Templ map: " + funToTmpl)*/
+                val nextProg = assignTemplateAndCojoinPost(funToTmpl, prog, invs)
+
+                // create a inference condition for reporting
+                var first = true
+                inferredFuns.foreach { fd =>
+                  val origFd = origFds(fd)
+                  val invOpt = if (funsWithTemplates.contains(fd)) {
+                    Some(TemplateInstantiator.getAllInvariants(model.get,
+                      Map(origFd -> origFd.getTemplate), prettyInv = true)(origFd))
+                  } else if (fd.hasTemplate) {
+                    val currentInv = TemplateInstantiator.getAllInvariants(model.get,
+                      Map(fd -> fd.getTemplate), prettyInv = true)(fd)
+                    // map result variable  in currentInv
+                    val repInv = replace(Map(getResId(fd).get.toVariable -> getResId(origFd).get.toVariable), currentInv)
+                    Some(translateExprToProgram(repInv, prog, startProg))
+                  } else None
+                  invOpt match {
+                    case Some(inv) =>
+                      // record the inferred invariants
+                      val inferCond = if (analyzedSet.contains(origFd)) {
+                        val ic = analyzedSet(origFd)
+                        ic.addInv(Seq(inv))
+                        ic
+                      } else {
+                        val ic = new InferenceCondition(Seq(inv), origFd)
+                        ic.time = if (first) Some(funcTime) else Some(0.0)
+                        // update analyzed set
+                        analyzedSet += (origFd -> ic)
+                        first = false
+                        ic
+                      }
+                      progressCallback.foreach(cb => cb(inferCond))
+                    case _ =>
+                  }
+                }
+                nextProg
 
               case _ =>
                 reporter.info("- Exhausted all templates, cannot infer invariants")
diff --git a/src/main/scala/leon/invariant/templateSolvers/NLTemplateSolver.scala b/src/main/scala/leon/invariant/templateSolvers/NLTemplateSolver.scala
index b1112a36da5a4e9ee6e3622c2a107ab70f1f9b9e..7f60be3ae639a29a051ec501e13a4d96e26b3174 100644
--- a/src/main/scala/leon/invariant/templateSolvers/NLTemplateSolver.scala
+++ b/src/main/scala/leon/invariant/templateSolvers/NLTemplateSolver.scala
@@ -60,7 +60,7 @@ class NLTemplateSolver(ctx: InferenceContext, program: Program,
   private val startFromEarlierModel = true
   private val disableCegis = true
   private val useIncrementalSolvingForVCs = true
-  private val useCVCToCheckVCs = false
+  private val useCVCToCheckVCs = true
 
   //this is private mutable state used by initialized during every call to 'solve' and used by 'solveUNSAT'
   protected var funcVCs = Map[FunDef, Expr]()
diff --git a/src/main/scala/leon/invariant/util/TreeUtil.scala b/src/main/scala/leon/invariant/util/TreeUtil.scala
index c9891dc5050e61b5deb7dbbcbc3d5a071577a0d7..e73d4871b07a59c0f0be0c7a53b4c6be9c9bf7f5 100644
--- a/src/main/scala/leon/invariant/util/TreeUtil.scala
+++ b/src/main/scala/leon/invariant/util/TreeUtil.scala
@@ -132,7 +132,6 @@ object ProgramUtil {
       }
     }
 
-    // FIXME: This with createAnd (which performs simplifications) gives an error during composition.
     val mapExpr = mapFunctionsInExpr(funMap) _
     for ((from, to) <- funMap) {
       to.fullBody = if (!funToTmpl.contains(from)) {
@@ -286,7 +285,7 @@ object PredicateUtil {
     case base =>
       (e => e, base)
   }
-  
+
   /**
    * Checks if the input expression has only template variables as free variables
    */
diff --git a/testcases/lazy-datastructures/RealTimeQueue.scala b/testcases/lazy-datastructures/RealTimeQueue.scala
index 6145bb5c85813e72bdcd8f9617658be1bd60bc81..0c4c3a6db5a361212d15ab8a6fdbc80f59de4932 100644
--- a/testcases/lazy-datastructures/RealTimeQueue.scala
+++ b/testcases/lazy-datastructures/RealTimeQueue.scala
@@ -42,9 +42,7 @@ object RealTimeQueue {
     })
   }
 
-  def reverse
-
-   @invstate
+  @invstate
   def rotate[T](f: $[Stream[T]], r: List[T], a: $[Stream[T]]): Stream[T] = { // doesn't change state
     require(r.size == ssize(f) + 1 && isConcrete(f))
     (f.value, r) match {
diff --git a/testcases/lazy-datastructures/withOrb/BottomUpMegeSort.scala b/testcases/lazy-datastructures/withOrb/BottomUpMegeSort.scala
new file mode 100644
index 0000000000000000000000000000000000000000..bf9387fd80f40f8c3a9605a521f1a986d4d02515
--- /dev/null
+++ b/testcases/lazy-datastructures/withOrb/BottomUpMegeSort.scala
@@ -0,0 +1,179 @@
+package orb
+
+import leon.lazyeval._
+import leon.lazyeval.$._
+import leon.lang._
+import leon.annotation._
+import leon.instrumentation._
+import leon.invariant._
+
+/**
+ * TODO Multiple instantiations of type parameters is not supported yet,
+ * since it require creation of multiple states one for each instantiation
+ */
+
+/**
+ * A version of merge sort that operates bottom-up. That allows
+ * accessing the first element in the sorted list in constant time.
+ */
+object BottomUpMergeSort {
+
+  /**
+   * A list of integers that have to be sorted
+   */
+  sealed abstract class IList {
+    def size: BigInt = {
+      this match {
+        case ICons(_, xs) => 1 + xs.size
+        case _            => BigInt(0)
+      }
+    } ensuring (_ >= 0)
+  }
+  case class ICons(x: BigInt, tail: IList) extends IList
+  case class INil() extends IList
+
+  /**
+   * A stream of integers (the tail is lazy)
+   */
+  sealed abstract class IStream {
+    def size: BigInt = {
+      this match {
+        case SCons(_, xs) => 1 + ssize(xs)
+        case _            => BigInt(0)
+      }
+    } ensuring (_ >= 0)
+  }
+  case class SCons(x: BigInt, tail: $[IStream]) extends IStream
+  case class SNil() extends IStream
+  def ssize(l: $[IStream]): BigInt = (l*).size
+
+  /**
+   * A list of suspensions
+   */
+  sealed abstract class LList {
+    def size: BigInt = {
+      this match {
+        case LNil()      => BigInt(0)
+        case LCons(_, t) => 1 + t.size
+      }
+    } ensuring (_ >= 0)
+
+    def valid: Boolean = {
+      this match {
+        case LNil()      => true
+        case LCons(l, t) => ssize(l) > 0 && t.valid
+      }
+    }
+
+    def fullSize: BigInt = {
+      this match {
+        case LNil()      => BigInt(0)
+        case LCons(l, t) => ssize(l) + t.fullSize
+      }
+    } ensuring (_ >= 0)
+  }
+  case class LCons(x: $[IStream], tail: LList) extends LList
+  case class LNil() extends LList
+
+  /**
+   * A function that given a list of (lazy) sorted lists,
+   * groups them into pairs and lazily invokes the 'merge' function
+   * on each pair.
+   * Takes time linear in the size of the input list
+   */
+  def pairs(l: LList): LList = {
+    require(l.valid)
+    l match {
+      case LNil()           => LNil()
+      case LCons(_, LNil()) => l
+      case LCons(l1, LCons(l2, rest)) =>
+        LCons($(merge(l1, l2)), pairs(rest))
+    }
+  } ensuring (res => res.size <= (l.size + 1) / 2 &&
+    l.fullSize == res.fullSize &&
+    res.valid &&
+    time <= ? * l.size + ?)
+
+  /**
+   * Create a linearized tree of merges e.g. merge(merge(2, 1), merge(17, 19)).
+   * Takes time linear in the size of the input list.
+   */
+  def constructMergeTree(l: LList): LList = {
+    require(l.valid)
+    l match {
+      case LNil()           => LNil()
+      case LCons(_, LNil()) => l
+      case _ =>
+        constructMergeTree(pairs(l))
+    }
+  } ensuring (res => res.size <= 1 && res.fullSize == l.fullSize &&
+    (res match {
+      case LCons(il, LNil()) =>
+        res.fullSize == ssize(il) // this is implied by the previous conditions
+      case _ => true
+    }) &&
+    res.valid &&
+    time <= ? * l.size + ?)
+
+  /**
+   *  A function that merges two sorted streams of integers.
+   *  Note: the sorted stream of integers may by recursively constructed using merge.
+   *  Takes time linear in the size of the streams (non-trivial to prove due to cascading of lazy calls)
+   */
+  def merge(a: $[IStream], b: $[IStream]): IStream = {
+    require(((a*) != SNil() || b.isEvaluated) && // if one of the arguments is Nil then the other is evaluated
+        ((b*) != SNil() || a.isEvaluated) &&
+        ((a*) != SNil() || (b*) != SNil())) // at least one of the arguments is not Nil
+    b.value match {
+      case SNil() => a.value
+      case bl @ SCons(x, xs) =>
+        a.value match {
+          case SNil() => bl
+          case SCons(y, ys) =>
+            if (y < x)
+              SCons(y, $(merge(ys, b)))
+            else
+              SCons(x, $(merge(a, xs)))
+        }
+    }
+  } ensuring (res => ssize(a) + ssize(b) == res.size &&
+       time <= ? * res.size + ?) // note: res.size >= 1 // here stack is max of a and b
+
+  /**
+   * Converts a list of integers to a list of streams of integers
+   */
+  def IListToLList(l: IList): LList = {
+    l match {
+      case INil() => LNil()
+      case ICons(x, xs) =>
+        LCons(SCons(x, SNil()), IListToLList(xs))
+    }
+  } ensuring (res => res.fullSize == l.size &&
+    res.size == l.size &&
+    res.valid &&
+    time <= ? * l.size + ?)
+
+  /**
+   * Takes list of integers and returns a sorted stream of integers.
+   * Takes time linear in the size of the  input since it sorts lazily.
+   */
+  def mergeSort(l: IList): IStream = {
+    l match {
+      case INil() => SNil()
+      case _ =>
+        constructMergeTree(IListToLList(l)) match {
+          case LCons(r, LNil()) => r.value
+        }
+    }
+  } ensuring (res => time <= ? * l.size + ?)
+
+  /**
+   * A function that accesses the first element of a list using lazy sorting.
+   */
+  def firstMin(l: IList) : BigInt ={
+    require(l != INil())
+    mergeSort(l) match {
+      case SCons(x, rest) => x
+    }
+  } ensuring (res => time <= ? * l.size + ?)
+}
diff --git a/testcases/lazy-datastructures/ManualnOutdated/Concat.scala b/testcases/lazy-datastructures/withOrb/Concat.scala
similarity index 85%
rename from testcases/lazy-datastructures/ManualnOutdated/Concat.scala
rename to testcases/lazy-datastructures/withOrb/Concat.scala
index 8a881fc0177518b673ee1ce62a4bdecc338d5fb3..3b961a1bb22b708a8ee738c8676e43452158dd92 100644
--- a/testcases/lazy-datastructures/ManualnOutdated/Concat.scala
+++ b/testcases/lazy-datastructures/withOrb/Concat.scala
@@ -1,10 +1,13 @@
+package withOrb
+
 import leon.lazyeval._
 import leon.lang._
 import leon.annotation._
 import leon.instrumentation._
 import leon.collection._
 import leon.invariant._
-//import leon.invariant._
+import scala.BigInt
+import scala.math.BigInt.int2bigInt
 
 object Concat {
   sealed abstract class LList[T] {
@@ -24,7 +27,7 @@ object Concat {
       case Cons(x, xs) => SCons(x, $(concat(xs, l2)))
       case Nil() => SNil[T]()
     }
-  } ensuring(_ => time <= 20)
+  } ensuring(_ => time <= ?)
 
   def kthElem[T](l: $[LList[T]], k: BigInt): Option[T] = {
     require(k >= 1)
@@ -35,10 +38,10 @@ object Concat {
           kthElem(xs, k - 1)
       case SNil() => None[T]
     }
-  } //ensuring (_ => time <= ? * k)
+  } ensuring (_ => time <= ? * k)
 
   def concatnSelect[T](l1: List[T], l2: List[T], k: BigInt) : Option[T] = {
     require(k >= 1)
     kthElem($(concat(l1, l2)), k)
-  } //ensuring (_ => time <= ? * k)
+  } ensuring (_ => time <= ? * k)
 }
diff --git a/testcases/lazy-datastructures/withOrb/Deque.scala b/testcases/lazy-datastructures/withOrb/Deque.scala
new file mode 100644
index 0000000000000000000000000000000000000000..870eed380bc729186c5eeafd718cd4938f16c2d7
--- /dev/null
+++ b/testcases/lazy-datastructures/withOrb/Deque.scala
@@ -0,0 +1,348 @@
+package orb
+
+import leon.lazyeval._
+import leon.lazyeval.$._
+import leon.lang._
+import leon.annotation._
+import leon.collection._
+import leon.instrumentation._
+import leon.math._
+import leon.invariant._
+
+/**
+ * A constant time deque based on Okasaki's implementation: Fig.8.4 Pg. 112.
+ * Here, both front and rear streams are scheduled.
+ * We require both the front and the rear streams to be of almost equal
+ * size. If not, we lazily rotate the streams.
+ * The invariants are a lot more complex than in `RealTimeQueue`.
+ * The program also fixes a bug in Okasaki's implementatin: see function `rotateDrop`
+ */
+object RealTimeDeque {
+
+  sealed abstract class Stream[T] {
+    @inline
+    def isEmpty: Boolean = {
+      this match {
+        case SNil() => true
+        case _      => false
+      }
+    }
+
+    @inline
+    def isCons: Boolean = {
+      this match {
+        case SCons(_, _) => true
+        case _           => false
+      }
+    }
+
+    def size: BigInt = {
+      this match {
+        case SNil()      => BigInt(0)
+        case SCons(x, t) => 1 + (t*).size
+      }
+    } ensuring (_ >= 0)
+  }
+  case class SCons[T](x: T, tail: $[Stream[T]]) extends Stream[T]
+  case class SNil[T]() extends Stream[T]
+
+  @inline
+  def ssize[T](l: $[Stream[T]]): BigInt = (l*).size
+
+  def isConcrete[T](l: $[Stream[T]]): Boolean = {
+    l.isEvaluated && (l* match {
+      case SCons(_, tail) =>
+        isConcrete(tail)
+      case _ => true
+    })
+  }
+
+  @invstate
+  def revAppend[T](l1: $[Stream[T]], l2: $[Stream[T]]): $[Stream[T]] = {
+    require(isConcrete(l1) && isConcrete(l2))
+    l1.value match {
+      case SNil() => l2
+      case SCons(x, tail) =>
+        val nt: $[Stream[T]] = SCons[T](x, l2)
+        revAppend(tail, nt)
+    }
+  } ensuring(res => ssize(res) == ssize(l1) + ssize(l2) &&
+      isConcrete(res) &&
+      (ssize(l1) >= 1 ==> (res*).isCons) &&
+      time <= ? * ssize(l1) + ?)
+
+  @invstate
+  def drop[T](n: BigInt, l: $[Stream[T]]): $[Stream[T]] = {
+    require(n >= 0 && isConcrete(l) && ssize(l) >= n)
+    if (n == 0) {
+      l
+    } else {
+      l.value match {
+        case SNil()         => l
+        case SCons(x, tail) => drop(n - 1, tail)
+      }
+    }
+  } ensuring(res => isConcrete(res) &&
+      ssize(res) == ssize(l) - n &&
+      time <= ? * n + ?)
+
+  @invstate
+  def take[T](n: BigInt, l: $[Stream[T]]): $[Stream[T]] = {
+    require(n >= 0 && isConcrete(l) && ssize(l) >= n)
+    val r: $[Stream[T]] =
+      if (n == 0) {
+        SNil[T]()
+      } else {
+        l.value match {
+          case SNil() => l
+          case SCons(x, tail) =>
+            SCons[T](x, take(n - 1, tail))
+        }
+      }
+    r
+  } ensuring(res => isConcrete(res) &&
+      ssize(res) == n &&
+      time <= ? * n + ?)
+
+  @invstate
+  def takeLazy[T](n: BigInt, l: $[Stream[T]]): Stream[T] = {
+    require(isConcrete(l) && n >= 1 && ssize(l) >= n)
+    l.value match {
+      case SCons(x, tail) =>
+        if (n == 1)
+          SCons[T](x, SNil[T]())
+        else
+          SCons[T](x, $(takeLazy(n - 1, tail)))
+    }
+  } ensuring(res => res.size == n && res.isCons &&
+      time <= ?)
+
+  @invstate
+  def rotateRev[T](r: $[Stream[T]], f: $[Stream[T]], a: $[Stream[T]]): Stream[T] = {
+    require(isConcrete(r) && isConcrete(f) && isConcrete(a) &&
+      {
+        val lenf = ssize(f)
+        val lenr = ssize(r)
+        (lenf <= 2 * lenr + 3 && lenf >= 2 * lenr + 1)
+      })
+    r.value match {
+      case SNil() => revAppend(f, a).value // |f| <= 3
+      case SCons(x, rt) =>
+        SCons(x, $(rotateRev(rt, drop(2, f), revAppend(take(2, f), a))))
+    }  // here, it doesn't matter whether 'f' has i elements or not, what we want is |drop(2,f)| + |take(2,f)| == |f|
+  } ensuring (res => res.size == (r*).size + (f*).size + (a*).size &&
+      res.isCons &&
+      time <= ?)
+
+  @invstate
+  def rotateDrop[T](r: $[Stream[T]], i: BigInt, f: $[Stream[T]]): Stream[T] = {
+    require(isConcrete(r) && isConcrete(f) && i >= 0 && {
+      val lenf = ssize(f)
+      val lenr = ssize(r)
+      (lenf >= 2 * lenr + 2 && lenf <= 2 * lenr + 3) && // size invariant between 'f' and 'r'
+      lenf > i
+    })
+    val rval = r.value
+    if(i < 2 || rval == SNil[T]()) { // A bug in Okasaki implementation: we must check for: 'rval = SNil()'
+      val a: $[Stream[T]] = SNil[T]()
+      rotateRev(r, drop(i, f), a)
+    } else {
+      rval match {
+        case SCons(x, rt) =>
+          SCons(x, $(rotateDrop(rt, i - 2, drop(2, f))))
+      }
+    }
+  } ensuring(res => res.size == (r*).size + (f*).size - i &&
+      res.isCons && time <= ?)
+
+  def firstUneval[T](l: $[Stream[T]]): $[Stream[T]] = {
+    if (l.isEvaluated) {
+      l* match {
+        case SCons(_, tail) =>
+          firstUneval(tail)
+        case _ => l
+      }
+    } else
+      l
+  } ensuring (res => (!(res*).isEmpty || isConcrete(l)) &&
+    ((res*).isEmpty || !res.isEvaluated) && // if the return value is not a Nil closure then it would not have been evaluated
+    (res.value match {
+      case SCons(_, tail) =>
+        firstUneval(l) == firstUneval(tail) // after evaluating the firstUneval closure in 'l' we can access the next unevaluated closure
+      case _ => true
+    }))
+
+  case class Queue[T](f: $[Stream[T]], lenf: BigInt, sf: $[Stream[T]],
+      r: $[Stream[T]], lenr: BigInt, sr: $[Stream[T]]) {
+    def isEmpty = lenf + lenr == 0
+    def valid = {
+      (firstUneval(f) == firstUneval(sf)) &&
+        (firstUneval(r) == firstUneval(sr)) &&
+        (lenf == ssize(f) && lenr == ssize(r)) &&
+        (lenf <= 2*lenr + 1 && lenr <= 2*lenf + 1) &&
+        {
+          val mind = min(2*lenr-lenf+2, 2*lenf-lenr+2)
+          ssize(sf) <= mind && ssize(sr) <= mind
+        }
+    }
+  }
+
+  /**
+   * A function that takes streams where the size of front and rear streams violate
+   * the balance invariant, and restores the balance.
+   */
+  def createQueue[T](f: $[Stream[T]], lenf: BigInt, sf: $[Stream[T]],
+      r: $[Stream[T]], lenr: BigInt, sr: $[Stream[T]]): Queue[T] = {
+    require(firstUneval(f) == firstUneval(sf) &&
+        firstUneval(r) == firstUneval(sr) &&
+        (lenf == ssize(f) && lenr == ssize(r)) &&
+        ((lenf - 1 <= 2*lenr + 1 && lenr <= 2*lenf + 1) ||
+          (lenf <= 2*lenr + 1 && lenr - 2 <= 2*lenf + 1)) &&
+        {
+          val mind = max(min(2*lenr-lenf+2, 2*lenf-lenr+2), 0)
+          ssize(sf) <= mind && ssize(sr) <= mind
+        })
+    if(lenf > 2*lenr + 1) {
+      val i = (lenf + lenr) / 2
+      val j = lenf + lenr - i
+      val nr = rotateDrop(r, i, f)
+      val nf = takeLazy(i, f)
+      Queue(nf, i, nf, nr, j, nr)
+    } else if(lenr > 2*lenf + 1) {
+      val i = (lenf + lenr) / 2
+      val j = lenf + lenr - i
+      val nf =  rotateDrop(f, j, r) // here, both 'r' and 'f' are concretized
+      val nr = takeLazy(j, r)
+      Queue(nf, i, nf, nr, j, nr)
+    } else
+      Queue(f, lenf, sf, r, lenr, sr)
+  } ensuring(res => res.valid &&
+      time <= ?)
+
+  /**
+   * Forces the schedules, and ensures that `firstUneval` equality is preserved
+   */
+  def force[T](tar: $[Stream[T]], htar: $[Stream[T]], other: $[Stream[T]], hother: $[Stream[T]]): $[Stream[T]] = {
+    require(firstUneval(tar) == firstUneval(htar) &&
+      firstUneval(other) == firstUneval(hother))
+    tar.value match {
+      case SCons(_, tail) => tail
+      case _              => tar
+    }
+  } ensuring (res => {
+    //lemma instantiations
+    val in = $.inState[Stream[T]]
+    val out = $.outState[Stream[T]]
+    funeMonotone(tar, htar, in, out) &&
+      funeMonotone(other, hother, in, out) && {
+      //properties
+        val rsize = ssize(res)
+          firstUneval(htar) == firstUneval(res) && // follows from post of fune
+            firstUneval(other) == firstUneval(hother) &&
+            (rsize == 0 || rsize == ssize(tar) - 1)
+      } && time <= ?
+  })
+
+  /**
+   * Forces the schedules in the queue twice and ensures the `firstUneval` property.
+   */
+  def forceTwice[T](q: Queue[T]): ($[Stream[T]], $[Stream[T]]) = {
+    require(q.valid)
+    val nsf = force(force(q.sf, q.f, q.r, q.sr), q.f, q.r, q.sr) // forces q.sf twice
+    val nsr = force(force(q.sr, q.r, q.f, nsf), q.r, q.f, nsf) // forces q.sr twice
+    (nsf, nsr)
+  }
+  // the following properties are ensured, but need not be stated
+  /*ensuring (res => {
+    val nsf = res._1
+    val nsr = res._2
+    firstUneval(q.f) == firstUneval(nsf) &&
+      firstUneval(q.r) == firstUneval(nsr) &&
+      (ssize(nsf) == 0 || ssize(nsf) == ssize(q.sf) - 2) &&
+      (ssize(nsr) == 0 || ssize(nsr) == ssize(q.sr) - 2) &&
+      time <= 1500
+  })*/
+
+  def empty[T] = {
+    val nil: $[Stream[T]] = SNil[T]()
+    Queue(nil, 0, nil, nil, 0, nil)
+  }
+
+  /**
+   * Adding an element to the front of the list
+   */
+  def cons[T](x: T, q: Queue[T]): Queue[T] = {
+    require(q.valid)
+    val nf: Stream[T] = SCons[T](x, q.f)
+    // force the front and rear scheds once
+    val nsf = force(q.sf, q.f, q.r, q.sr)
+    val nsr = force(q.sr, q.r, q.f, nsf)
+    createQueue(nf, q.lenf + 1, nsf, q.r, q.lenr, nsr)
+  } ensuring (res => res.valid && time <= ?)
+
+  /**
+   * Removing the element at the front, and returning the tail
+   */
+  def tail[T](q: Queue[T]): Queue[T] = {
+    require(!q.isEmpty && q.valid)
+    force(q.f, q.sf, q.r, q.sr) match { // force 'f'
+      case _ =>
+        tailSub(q)
+    }
+  } ensuring(res => res.valid && time <= ?)
+
+  def tailSub[T](q: Queue[T]): Queue[T] = {
+    require(!q.isEmpty && q.valid && q.f.isEvaluated)
+    q.f.value match {
+      case SCons(x, nf) =>
+        val (nsf, nsr) = forceTwice(q)
+        // here, sf and sr got smaller by 2 holds, the schedule invariant still holds
+        createQueue(nf, q.lenf - 1, nsf, q.r, q.lenr, nsr)
+      case SNil() =>
+         // in this case 'r' will have only one element by invariant
+        empty[T]
+    }
+  } ensuring(res => res.valid && time <= ?)
+
+  /**
+   * Reversing a list. Takes constant time.
+   * This implies that data structure is a `deque`.
+   */
+  def reverse[T](q: Queue[T]): Queue[T] = {
+    require(q.valid)
+    Queue(q.r, q.lenr, q.sr, q.f, q.lenf, q.sf)
+  } ensuring(q.valid && time <= ?)
+
+   // Properties of `firstUneval`. We use `fune` as a shorthand for `firstUneval`
+  /**
+   * st1.subsetOf(st2) ==> fune(l, st2) == fune(fune(l, st1), st2)
+   */
+  @traceInduct
+  def funeCompose[T](l1: $[Stream[T]], st1: Set[$[Stream[T]]], st2: Set[$[Stream[T]]]): Boolean = {
+    require(st1.subsetOf(st2))
+    // property
+    (firstUneval(l1) withState st2) == (firstUneval(firstUneval(l1) withState st1) withState st2)
+  } holds
+
+  /**
+   * st1.subsetOf(st2) && fune(la,st1) == fune(lb,st1) ==> fune(la,st2) == fune(lb,st2)
+   * The `fune` equality  is preseved by evaluation of lazy closures.
+   * This is a kind of frame axiom for `fune` but is slightly different in that
+   * it doesn't require (st2 \ st1) to be disjoint from la and lb.
+   */
+  def funeMonotone[T](l1: $[Stream[T]], l2: $[Stream[T]], st1: Set[$[Stream[T]]], st2: Set[$[Stream[T]]]): Boolean = {
+    require((firstUneval(l1) withState st1) == (firstUneval(l2) withState st1) &&
+        st1.subsetOf(st2))
+     funeCompose(l1, st1, st2) && // lemma instantiations
+     funeCompose(l2, st1, st2) &&
+     // induction scheme
+    (if (l1.isEvaluated withState st1) {
+      l1* match {
+        case SCons(_, tail) =>
+          funeMonotone(tail, l2, st1, st2)
+        case _ => true
+      }
+    } else true) &&
+      (firstUneval(l1) withState st2) == (firstUneval(l2) withState st2) // property
+  } holds
+}
diff --git a/testcases/lazy-datastructures/withOrb/Esop15Benchmarks.scala b/testcases/lazy-datastructures/withOrb/Esop15Benchmarks.scala
new file mode 100644
index 0000000000000000000000000000000000000000..ee381e608d9301725586dffe5c20e391920583bb
--- /dev/null
+++ b/testcases/lazy-datastructures/withOrb/Esop15Benchmarks.scala
@@ -0,0 +1,58 @@
+package lazybenchmarks
+
+import leon.lazyeval._
+import leon.lang._
+import leon.annotation._
+import leon.instrumentation._
+import leon.lazyeval.$._
+import leon.collection._
+import leon.invariant._
+
+/**
+ * This file is the collection of programs in the ESOP 2015 paper.
+ * Note this benchmark is very good for finding counter-examples.s
+ * Decreasing the time bounds slightly will display counter-examples.
+ */
+object Esop15Benchmarks {
+  sealed abstract class IStream
+  case class SCons(x: BigInt, tail: $[IStream]) extends IStream
+  case class SNil() extends IStream
+
+  sealed abstract class StreamPairs
+  case class PCons(pair: (BigInt, BigInt), tail: $[StreamPairs]) extends StreamPairs
+  case class PNil() extends StreamPairs
+
+  def zipWith(xs: $[IStream], ys: $[IStream]): StreamPairs = {
+    (xs.value, ys.value) match {
+      case (SCons(x, xtail), SCons(y, ytail)) =>
+        PCons((x, y), $(zipWith(xtail, ytail)))
+      case _ =>
+        PNil()
+    }
+  } ensuring(_ => time <= ?)
+
+  def nextFib(x: BigInt, y: BigInt, n: BigInt): IStream = {
+    if (n <= 0)
+      SNil()
+    else {
+      val next = x + y
+      SCons(next, $(nextFib(y, next, n - 1)))
+    }
+  } ensuring(_ => time <= ?)
+
+  def fibStream(n: BigInt) : IStream = {
+    SCons(0, SCons(1, $(nextFib(0, 1, n))))
+  }
+
+  def nthFib(n: BigInt, fs: $[IStream]): BigInt = {
+    require(n >= 0)
+    fs.value match {
+      case SCons(x, tail) =>
+        if (n == 0)
+          x
+        else
+          nthFib(n - 1, tail)
+      case SNil() => BigInt(-1)
+    }
+  } ensuring(_ => time <= ? * n + ?) // you get a counter-example for 20*n + 20
+}
\ No newline at end of file
diff --git a/testcases/lazy-datastructures/withOrb/FibonacciMemoized.scala b/testcases/lazy-datastructures/withOrb/FibonacciMemoized.scala
new file mode 100644
index 0000000000000000000000000000000000000000..89ce222e49424eb2f609a15952801f6562a447be
--- /dev/null
+++ b/testcases/lazy-datastructures/withOrb/FibonacciMemoized.scala
@@ -0,0 +1,25 @@
+package orb
+
+import leon.lazyeval._
+import leon.lazyeval.Mem._
+import leon.lang._
+import leon.annotation._
+import leon.instrumentation._
+import leon.invariant._
+
+object FibMem {
+  sealed abstract class IList
+  case class Cons(x: BigInt, tail: IList) extends IList
+  case class Nil() extends IList
+
+  @memoize
+  def fibRec(n: BigInt): BigInt = {
+    require(n >= 0)
+        if(n <= 2)
+          BigInt(1)
+        else
+          fibRec(n-1) + fibRec(n-2) // postcondition implies that the second call would be cached
+  } ensuring(_ =>
+    (n <= 2 || (fibRec(n-1).isCached &&
+        fibRec(n-2).isCached))  && time <= ? * n + ?)
+}
diff --git a/testcases/lazy-datastructures/withOrb/HammingMemoized.scala b/testcases/lazy-datastructures/withOrb/HammingMemoized.scala
new file mode 100644
index 0000000000000000000000000000000000000000..4b214ed9045b7875d01c1d4b8b046e2aa11cb772
--- /dev/null
+++ b/testcases/lazy-datastructures/withOrb/HammingMemoized.scala
@@ -0,0 +1,89 @@
+package orb
+
+import leon.lazyeval._
+import leon.lazyeval.Mem._
+import leon.lang._
+import leon.annotation._
+import leon.instrumentation._
+import leon.invariant._
+
+/**
+ * A memoized version of the implementation of Hamming problem shown in
+ * section 4.3 of Type-based allocation analysis for Co-recursion
+ */
+object Hamming {
+  sealed abstract class IList
+  case class Cons(x: BigInt, tail: IList) extends IList
+  case class Nil() extends IList
+
+  case class Data(v: BigInt, i2: BigInt, i3: BigInt, i5: BigInt)
+
+  @invstate
+  @memoize
+  def ham(n: BigInt): Data = {
+    require(n ==0 || (n > 0 && depsEval(n - 1)))
+    if(n == BigInt(0)) Data(1, 0, 0, 0)
+    else {
+      val Data(x, i2, i3, i5) = ham(n-1)
+      val a = ham(i2).i2 * 2
+      val b = ham(i3).i3 * 3
+      val c = ham(i5).i5 * 5
+      val (v, ni, nj, nk) = threeWayMerge(a, b, c, i2, i3, i5)
+      Data(v, ni, nj, nk)
+    }
+  } ensuring(res =>  res.i2 <= n && res.i3 <= n && res.i5 <= n &&
+      res.i3 >= 0 && res.i5 >= 0 && res.i2 >= 0 &&
+      depsLem(res.i2, n) && depsLem(res.i3, n) && depsLem(res.i5, n) && // instantiations
+      time <= ?)
+
+  def depsEval(i: BigInt): Boolean = {
+    require(i >= 0)
+    ham(i).isCached && (if (i <= 0) true else depsEval(i - 1))
+  }
+
+  @traceInduct
+  def depsEvalMono(i: BigInt, st1: Set[Mem[Data]], st2: Set[Mem[Data]]) = {
+    require(i >= 0)
+    (st1.subsetOf(st2) && (depsEval(i) withState st1)) ==> (depsEval(i) withState st2)
+  } holds
+
+  @traceInduct
+  def depsLem(x: BigInt, y: BigInt) = {
+    require(x >= 0 && y >= 0)
+    (x <= y && depsEval(y)) ==> depsEval(x)
+  } holds
+
+  def invoke(n: BigInt) = {
+    require(n == 0 || n > 0 && depsEval(n - 1))
+    ham(n).v
+  } ensuring (res => {
+    val in = Mem.inState[Data]
+    val out = Mem.outState[Data]
+    (n == 0 || depsEvalMono(n-1, in, out)) && // instantiation
+      time <= ?
+  })
+
+  /**
+   * Returns a list of hamming numbers upto 'n'
+   */
+  def hammingList(n: BigInt): IList = {
+    require(n >= 0)
+    if(n == 0) {
+        Cons(invoke(n), Nil())
+    } else {
+      val tailRes =  hammingList(n-1)
+      Cons(invoke(n), tailRes)
+    }
+  } ensuring(_ => depsEval(n) && time <= ? * (n + 1))
+
+  @inline
+   def threeWayMerge(a: BigInt, b: BigInt, c: BigInt, i2: BigInt, i3: BigInt, i5: BigInt) = {
+      if(a == b && b == c)      (a, i2 + 1, i3 + 1, i5 + 1)
+      else if(a == b && a < c)  (a, i2 + 1, i3 + 1, i5    )
+      else if(a == c && a < b)  (a, i2 + 1, i3    , i5 + 1)
+      else if(b == c && b < a)  (b, i2    , i3 + 1, i5 + 1)
+      else if(a < b && a < c)   (a, i2 + 1, i3    , i5    )
+      else if(b < c && b < a)   (b, i2    , i3 + 1, i5    )
+      else/*if(c < a && c < b)*/(c, i2    , i3    , i5 + 1)
+   }
+}
diff --git a/testcases/lazy-datastructures/withOrb/Knapsack.scala b/testcases/lazy-datastructures/withOrb/Knapsack.scala
new file mode 100644
index 0000000000000000000000000000000000000000..e1645b545160e455e7878e23e4ce1292ef018b9a
--- /dev/null
+++ b/testcases/lazy-datastructures/withOrb/Knapsack.scala
@@ -0,0 +1,107 @@
+package orb
+
+import leon.lazyeval._
+import leon.lazyeval.Mem._
+import leon.lang._
+import leon.annotation._
+import leon.instrumentation._
+import leon.invariant._
+
+object Knapscak {
+  sealed abstract class IList {
+    def size: BigInt = {
+      this match {
+        case Cons(_, tail) => 1 + tail.size
+        case Nil() => BigInt(0)
+      }
+    } ensuring(_ >= 0)
+  }
+  case class Cons(x: (BigInt, BigInt), tail: IList) extends IList // a list of pairs of weights and values
+  case class Nil() extends IList
+
+  def deps(i: BigInt, items: IList): Boolean = {
+    require(i >= 0)
+    knapSack(i, items).isCached && // if we have the cached check only along the else branch, we would get a counter-example.
+      (if (i <= 0) true
+      else {
+        deps(i - 1, items)
+      })
+  }
+
+  @invstate
+  def maxValue(items: IList, w: BigInt, currList: IList): BigInt = {
+    require((w ==0 || (w > 0 && deps(w - 1, items))) &&
+      // lemma inst
+      (currList match {
+        case Cons((wi, vi), _) =>
+          if (wi <= w && wi > 0) depsLem(w - wi, w - 1, items)
+          else true
+        case Nil() => true
+      }))
+    currList match {
+      case Cons((wi, vi), tail) =>
+        val oldMax = maxValue(items, w, tail)
+        if (wi <= w && wi > 0) {
+          val choiceVal = vi + knapSack(w - wi, items)
+          if (choiceVal >= oldMax)
+            choiceVal
+          else
+            oldMax
+        } else oldMax
+      case Nil() => BigInt(0)
+    }
+  } ensuring(_ => time <= ? * currList.size + ?) // interchanging currList and items in the bound will produce a counter-example
+
+  @memoize
+  def knapSack(w: BigInt, items: IList): BigInt = {
+    require(w >= 0 && (w == 0 || deps(w - 1, items)))
+    if (w == 0) BigInt(0)
+    else {
+      maxValue(items, w, items)
+    }
+  } ensuring(_ => time <= ? * items.size + ?)
+
+  def invoke(i: BigInt, items: IList) = {
+    require(i == 0 || (i > 0 && deps(i - 1, items)))
+    knapSack(i, items)
+  } ensuring (res => {
+    (i == 0 || depsMono(i - 1, items, inState[BigInt], outState[BigInt]) && // lemma inst
+        deps(i - 1, items)) &&
+      time <= ? * items.size + ?
+  })
+
+  def bottomup(i: BigInt, w: BigInt, items: IList): IList = {
+    require(w >= i && (i == 0 || i > 0 && deps(i - 1, items)))
+    val ri = invoke(i, items)
+    if (i == w)
+      Cons((i,ri), Nil())
+    else {
+      Cons((i,ri), bottomup(i + 1, w, items))
+    }
+  } ensuring(items.size <= 10 ==> time <= ? * (w - i + 1))
+
+  /**
+   * Computes the list of optimal solutions of all weights up to 'w'
+   */
+  def knapSackSol(w: BigInt, items: IList) = {
+    require(w >= 0 && items.size <= 10) //  the second requirement is only to keep the bounds linear for z3 to work
+    bottomup(0, w, items)
+  } ensuring(time <= ? * w + ?)
+
+  /**
+   * Lemmas of deps
+   */
+  // deps is monotonic
+  @traceInduct
+  def depsMono(i: BigInt, items: IList, st1: Set[Mem[BigInt]], st2: Set[Mem[BigInt]]) = {
+    require(i >= 0)
+    (st1.subsetOf(st2) && (deps(i, items) withState st1)) ==> (deps(i, items) withState st2)
+  } holds
+
+  // forall. x, x <= y && deps(y) => deps(x)
+  @traceInduct
+  def depsLem(x: BigInt, y: BigInt, items: IList) = {
+    require(x >= 0 && y >= 0)
+    (x <= y && deps(y, items)) ==> deps(x, items)
+  } holds
+}
diff --git a/testcases/lazy-datastructures/withOrb/RealTimeQueue.scala b/testcases/lazy-datastructures/withOrb/RealTimeQueue.scala
new file mode 100644
index 0000000000000000000000000000000000000000..5ae81cbc13502a6b42e1a91ae71d6f7095c24fe3
--- /dev/null
+++ b/testcases/lazy-datastructures/withOrb/RealTimeQueue.scala
@@ -0,0 +1,112 @@
+package orb
+
+import leon.lazyeval._
+import leon.lazyeval.$._
+import leon.lang._
+import leon.annotation._
+import leon.collection._
+import leon.instrumentation._
+import leon.invariant._
+
+object RealTimeQueue {
+
+  sealed abstract class Stream[T] {
+    def isEmpty: Boolean = {
+      this match {
+        case SNil() => true
+        case _      => false
+      }
+    }
+
+    def isCons: Boolean = {
+      this match {
+        case SCons(_, _) => true
+        case _           => false
+      }
+    }
+
+    def size: BigInt = {
+      this match {
+        case SNil()      => BigInt(0)
+        case SCons(x, t) => 1 + ssize(t)
+      }
+    } ensuring (_ >= 0)
+  }
+  case class SCons[T](x: T, tail: $[Stream[T]]) extends Stream[T]
+  case class SNil[T]() extends Stream[T]
+
+  def ssize[T](l: $[Stream[T]]): BigInt = (l*).size
+
+  def isConcrete[T](l: $[Stream[T]]): Boolean = {
+    l.isEvaluated && (l* match {
+      case SCons(_, tail) =>
+        isConcrete(tail)
+      case _ => true
+    })
+  }
+
+   @invstate
+  def rotate[T](f: $[Stream[T]], r: List[T], a: $[Stream[T]]): Stream[T] = { // doesn't change state
+    require(r.size == ssize(f) + 1 && isConcrete(f))
+    (f.value, r) match {
+      case (SNil(), Cons(y, _)) => //in this case 'y' is the only element in 'r'
+        SCons[T](y, a)
+      case (SCons(x, tail), Cons(y, r1)) =>
+        val newa: Stream[T] = SCons[T](y, a)
+        val rot = $(rotate(tail, r1, newa)) //this creates a lazy rotate operation
+        SCons[T](x, rot)
+    }
+  } ensuring (res => res.size == (f*).size + r.size + (a*).size && res.isCons &&
+                     time <= ?)
+
+  /**
+   * Returns the first element of the stream that is not evaluated.
+   */
+  def firstUnevaluated[T](l: $[Stream[T]]): $[Stream[T]] = {
+    if (l.isEvaluated) {
+      l* match {
+        case SCons(_, tail) =>
+          firstUnevaluated(tail)
+        case _ => l
+      }
+    } else
+      l
+  } ensuring (res => (!(res*).isEmpty || isConcrete(l)) && //if there are no lazy closures then the stream is concrete
+    (res.value match {
+      case SCons(_, tail) =>
+        firstUnevaluated(l) == firstUnevaluated(tail) // after evaluating the firstUnevaluated closure in 'l' we can access the next unevaluated closure
+      case _ => true
+    }))
+
+  case class Queue[T](f: $[Stream[T]], r: List[T], s: $[Stream[T]]) {
+    def isEmpty = (f*).isEmpty
+    def valid = {
+      (firstUnevaluated(f) == firstUnevaluated(s)) &&
+        ssize(s) == ssize(f) - r.size //invariant: |s| = |f| - |r|
+    }
+  }
+
+  @inline
+  def createQ[T](f: $[Stream[T]], r: List[T], s: $[Stream[T]]) = {
+    s.value match {
+      case SCons(_, tail) => Queue(f, r, tail)
+      case SNil() =>
+        val newa: Stream[T] = SNil()
+        val rotres = $(rotate(f, r, newa))
+        Queue(rotres, Nil(), rotres)
+    }
+  }
+
+  def enqueue[T](x: T, q: Queue[T]): Queue[T] = {
+    require(q.valid)
+    createQ(q.f, Cons(x, q.r), q.s)
+  } ensuring (res => res.valid && time <= ?)
+
+  def dequeue[T](q: Queue[T]): Queue[T] = {
+    require(!q.isEmpty && q.valid)
+    q.f.value match {
+      case SCons(x, nf) =>
+        createQ(nf, q.r, q.s)
+    }
+  } ensuring (res => res.valid && time <= ?)
+}
diff --git a/testcases/lazy-datastructures/withOrb/SortingnConcat-orb.scala b/testcases/lazy-datastructures/withOrb/SortingnConcat-orb.scala
index 384655b83c58273dce59a27efa226a9a2465b478..94acdf9caaae8dd289262048d9d3bf7e3ad69c9f 100644
--- a/testcases/lazy-datastructures/withOrb/SortingnConcat-orb.scala
+++ b/testcases/lazy-datastructures/withOrb/SortingnConcat-orb.scala
@@ -5,7 +5,6 @@ import leon.lang._
 import leon.annotation._
 import leon.instrumentation._
 import leon.invariant._
-import scala.math.BigInt.int2bigInt
 
 object SortingnConcat {