diff --git a/src/main/scala/leon/Main.scala b/src/main/scala/leon/Main.scala
index 542fb6835080d36803605f318e786352a16cf86b..facc21c135aae2959cfe58e3c5fad3a52a0f1f0d 100644
--- a/src/main/scala/leon/Main.scala
+++ b/src/main/scala/leon/Main.scala
@@ -159,7 +159,6 @@ object Main {
     import invariant.engine.InferInvariantsPhase
     import transformations._
 
-
     val helpF        = ctx.findOptionOrDefault(optHelp)
     val noopF        = ctx.findOptionOrDefault(optNoop)
     val synthesisF   = ctx.findOptionOrDefault(optSynthesis)
@@ -196,7 +195,7 @@ object Main {
         else if (inferInvF) InferInvariantsPhase
         else if (instrumentF) InstrumentationPhase andThen FileOutputPhase
         else if (lazyevalF) LazinessEliminationPhase
-        else analysis
+        else InstrumentationPhase andThen analysis
       }
 
       pipeBegin andThen
diff --git a/src/main/scala/leon/invariant/engine/SpecInstatiator.scala b/src/main/scala/leon/invariant/engine/SpecInstatiator.scala
index 9961df29357d1de52c98fe135f106993ab1e388a..ba965fab9a39a244f261d5bac63925a35abc9506 100644
--- a/src/main/scala/leon/invariant/engine/SpecInstatiator.scala
+++ b/src/main/scala/leon/invariant/engine/SpecInstatiator.scala
@@ -140,7 +140,7 @@ class SpecInstantiator(ctx: InferenceContext, ctrTracker: ConstraintTracker) {
       }
       //flatten functions
       //TODO: should we freshen locals here ??
-      Some(ExpressionTransformer.normalizeExpr(template, ctx.multOp))
+      Some(ExpressionTransformer.normalizeExpr(matchToIfThenElse(template), ctx.multOp))
     } else None
   }
 
diff --git a/src/main/scala/leon/invariant/util/Util.scala b/src/main/scala/leon/invariant/util/Util.scala
index 2df7ac71ce0264bffbba3a9c35f890d474604341..5554411a5c734e3559bdd0b67372ec54c433c904 100644
--- a/src/main/scala/leon/invariant/util/Util.scala
+++ b/src/main/scala/leon/invariant/util/Util.scala
@@ -608,7 +608,7 @@ object Util {
    * Apply an expression operation on all expressions contained in a FunDef
    */
   def applyOnFunDef(operation: Expr => Expr)(funDef: FunDef): FunDef = {
-    val newFunDef = funDef.duplicate
+    val newFunDef = funDef.duplicate()
     newFunDef.fullBody = operation(funDef.fullBody)
     newFunDef
   }
diff --git a/src/main/scala/leon/laziness/ClosurePreAsserter.scala b/src/main/scala/leon/laziness/ClosurePreAsserter.scala
index a0f68b29a77476feb2533f8bae35918e680feabd..73f2bf486d38441a9883a261ee3e20627a2c957d 100644
--- a/src/main/scala/leon/laziness/ClosurePreAsserter.scala
+++ b/src/main/scala/leon/laziness/ClosurePreAsserter.scala
@@ -56,7 +56,7 @@ class ClosurePreAsserter(p: Program) {
           val lemmaid = FreshIdentifier(ccd.id.name + fd.id.name + "Lem", Untyped, true)
           val params = variablesOf(vc).toSeq.map(v => ValDef(v))
           val tparams = params.flatMap(p => getTypeParameters(p.getType)).distinct map TypeParameterDef
-          val lemmafd = new FunDef(lemmaid, tparams, BooleanType, params)
+          val lemmafd = new FunDef(lemmaid, tparams, params, BooleanType)
           // reset the types of locals
           val initGamma = params.map(vd => vd.id -> vd.getType).toMap
           lemmafd.body = Some(TypeChecker.inferTypesOfLocals(vc, initGamma))
@@ -75,4 +75,3 @@ class ClosurePreAsserter(p: Program) {
     else p
   }
 }
-
diff --git a/src/main/scala/leon/laziness/LazinessEliminationPhase.scala b/src/main/scala/leon/laziness/LazinessEliminationPhase.scala
index 39d7e1b7a582d0da9e624d1a48b10966a03d2df9..12ae3e67ae65cc5f6ac6424a6e84b942e399ec1a 100644
--- a/src/main/scala/leon/laziness/LazinessEliminationPhase.scala
+++ b/src/main/scala/leon/laziness/LazinessEliminationPhase.scala
@@ -99,7 +99,7 @@ object LazinessEliminationPhase extends TransformationPhase {
                 } else {
                   //construct type parameters for the function
                   val nfun = new FunDef(FreshIdentifier("lazyarg", arg.getType, true),
-                    tparams map TypeParameterDef.apply, arg.getType, freevars.map(ValDef(_)))
+                    tparams map TypeParameterDef.apply, freevars.map(ValDef(_)), arg.getType)
                   nfun.body = Some(arg)
                   newfuns += (argstruc -> (nfun, md))
                   nfun
@@ -109,7 +109,7 @@ object LazinessEliminationPhase extends TransformationPhase {
             case _ => None
           }(fd)
           (fd -> Some(nfd))
-        case fd => fd -> Some(fd.duplicate)
+        case fd => fd -> Some(fd.duplicate())
       }
     }.toMap
     // map  the new functions to themselves
@@ -138,7 +138,7 @@ object LazinessEliminationPhase extends TransformationPhase {
     val functions = Seq() // Seq("--functions=rotate-time")
     val solverOptions = if (debugSolvers) Seq("--debug=solver") else Seq()
     val ctx = Main.processOptions(Seq("--solvers=smt-cvc4,smt-z3") ++ solverOptions ++ functions)
-    val report = AnalysisPhase.run(ctx)(prog)
+    val report = AnalysisPhase.apply(ctx, prog)
     println(report.summaryString)
     /*val timeout = 10
     val rep = ctx.reporter
diff --git a/src/main/scala/leon/laziness/LazinessUtil.scala b/src/main/scala/leon/laziness/LazinessUtil.scala
index 3383d8d52cf1d43ee1b8158cf6e8cec9ff23d756..cecd2a8bedcffb74ab2fb8995c5cc977b1aefa39 100644
--- a/src/main/scala/leon/laziness/LazinessUtil.scala
+++ b/src/main/scala/leon/laziness/LazinessUtil.scala
@@ -48,8 +48,8 @@ object LazinessUtil {
       try {
         val out = new BufferedWriter(new FileWriter(outputFile))
         // remove '@' from the end of the identifier names
-        val pat = new Regex("""(\S+)(@)""", "base", "suffix")
-        val pgmText = pat.replaceAllIn(ScalaPrinter.apply(p), m => m.group("base"))
+        val pat = new Regex("""(\w+)([@*]+)(\w*)""", "base", "AtorStar", "rest")
+        val pgmText = pat.replaceAllIn(ScalaPrinter.apply(p), m => m.group("base") + m.group("rest"))
         out.write(pgmText)
         out.close()
       } catch {
diff --git a/src/main/scala/leon/laziness/LazyClosureConverter.scala b/src/main/scala/leon/laziness/LazyClosureConverter.scala
index 1b6992ae1cd0ba615294eb7c5c11f23dd495fc83..353bbcb0f6aa949a119e1a420da72787b6873662 100644
--- a/src/main/scala/leon/laziness/LazyClosureConverter.scala
+++ b/src/main/scala/leon/laziness/LazyClosureConverter.scala
@@ -69,10 +69,10 @@ class LazyClosureConverter(p: Program, closureFactory: LazyClosureFactory) {
             nretType
         // the type parameters will be unified later
         new FunDef(FreshIdentifier(fd.id.name, Untyped),
-          fd.tparams ++ newTParams, retTypeWithState, nparams ++ stParams)
+          fd.tparams ++ newTParams, nparams ++ stParams, retTypeWithState)
         // body of these functions are defined later
       } else {
-        new FunDef(FreshIdentifier(fd.id.name, Untyped), fd.tparams, nretType, nparams)
+        new FunDef(FreshIdentifier(fd.id.name, Untyped), fd.tparams, nparams, nretType)
       }
       // copy annotations
       fd.flags.foreach(nfd.addFlag(_))
@@ -86,7 +86,7 @@ class LazyClosureConverter(p: Program, closureFactory: LazyClosureFactory) {
   lazy val uninterpretedTargets = funMap.map {
     case (k, v) =>
       val ufd = new FunDef(FreshIdentifier(v.id.name, v.id.getType, true),
-        v.tparams, v.returnType, v.params)
+        v.tparams, v.params, v.returnType)
       (k -> ufd)
   }.toMap
 
@@ -135,7 +135,7 @@ class LazyClosureConverter(p: Program, closureFactory: LazyClosureFactory) {
 
     // create a eval function
     val dfun = new FunDef(FreshIdentifier(evalFunctionName(absdef.id.name), Untyped),
-      tparamDefs, retType, Seq(ValDef(param1), ValDef(param2)))
+      tparamDefs, Seq(ValDef(param1), ValDef(param2)), retType)
 
     // assign body of the eval fucntion
     // create a match case to switch over the possible class defs and invoke the corresponding functions
@@ -159,6 +159,7 @@ class LazyClosureConverter(p: Program, closureFactory: LazyClosureFactory) {
           // do not make all targets uninterpreted
           uninterpretedTargets(op)
         } else funMap(op)
+      //println(s"invoking function $targetFun with args $args")
       val invoke = FunctionInvocation(TypedFunDef(targetFun, tparams), args ++ stArgs) // we assume that the type parameters of lazy ops are same as absdefs
       val invPart = if (funsRetStates(op)) {
         TupleSelect(invoke, 1) // we are only interested in the value
@@ -180,7 +181,7 @@ class LazyClosureConverter(p: Program, closureFactory: LazyClosureFactory) {
       val tpe = closureFactory.lazyType(tname)
       val param1 = evalfd.params.head
       val fun = new FunDef(FreshIdentifier(evalfd.id.name + "*", Untyped),
-        evalfd.tparams, tpe, Seq(param1))
+        evalfd.tparams, Seq(param1), tpe)
       val invoke = FunctionInvocation(TypedFunDef(evalfd, evalfd.tparams.map(_.tp)),
         Seq(param1.id.toVariable, FiniteSet(Set(),
           getStateType(tname, getTypeParameters(tpe)).base)))
@@ -200,8 +201,8 @@ class LazyClosureConverter(p: Program, closureFactory: LazyClosureFactory) {
     val stType = SetType(param1Type)
     val param2 = FreshIdentifier("st@", stType)
     val tparamDefs = adt.tparams
-    val fun = new FunDef(FreshIdentifier(closureConsName(tname)), adt.tparams, param1Type,
-      Seq(ValDef(param1), ValDef(param2)))
+    val fun = new FunDef(FreshIdentifier(closureConsName(tname)), adt.tparams,
+      Seq(ValDef(param1), ValDef(param2)), param1Type)
     fun.body = Some(param1.toVariable)
     val resid = FreshIdentifier("res", param1Type)
     val postbody = Not(ElementOfSet(resid.toVariable, param2.toVariable))
diff --git a/src/main/scala/leon/laziness/TypeRectifier.scala b/src/main/scala/leon/laziness/TypeRectifier.scala
index 171dd4f3dcb7d2db8eea0c4bb948b3fc86e5d27c..97ea6011fe9ae7e8fa9a42e8f57a02a951087aca 100644
--- a/src/main/scala/leon/laziness/TypeRectifier.scala
+++ b/src/main/scala/leon/laziness/TypeRectifier.scala
@@ -31,7 +31,7 @@ import leon.invariant.datastructure.DisjointSets
 /**
  * This performs a little bit of Hindley-Milner type Inference
  * to correct the local types and also unify type parameters
- * @param placeHolderParameter Expected to returns true iff a type parameter 
+ * @param placeHolderParameter Expected to returns true iff a type parameter
  * 														is meant as a placeholder and cannot be used
  * 														to represent a unified type
  */
@@ -69,7 +69,7 @@ class TypeRectifier(p: Program, placeHolderParameter: TypeParameter => Boolean)
   }
 
   val equivTypeParams = typeClasses.toMap
-  
+
   val fdMap = p.definedFunctions.collect {
     case fd if !fd.isLibrary =>
       val (tempTPs, otherTPs) = fd.tparams.map(_.tp).partition {
@@ -98,8 +98,8 @@ class TypeRectifier(p: Program, placeHolderParameter: TypeParameter => Boolean)
           (id -> FreshIdentifier(id.name, instf(vd.getType)))
       }.toMap
       val ntparams = tpMap.values.toSeq.distinct.collect { case tp: TypeParameter => tp } map TypeParameterDef
-      val nfd = new FunDef(fd.id.freshen, ntparams, instf(fd.returnType),
-        fd.params.map(vd => ValDef(paramMap(vd.id))))
+      val nfd = new FunDef(fd.id.freshen, ntparams, fd.params.map(vd => ValDef(paramMap(vd.id))),
+          instf(fd.returnType))
       fd -> (nfd, tpMap, paramMap)
   }.toMap
 
@@ -149,4 +149,3 @@ class TypeRectifier(p: Program, placeHolderParameter: TypeParameter => Boolean)
     })
   }
 }
-
diff --git a/src/main/scala/leon/solvers/z3/AbstractZ3Solver.scala b/src/main/scala/leon/solvers/z3/AbstractZ3Solver.scala
index 9a06cff266e221b0459b7d23e9417b4940ec5743..b4cacc43a0496f73990928a1ad338faaf180c980 100644
--- a/src/main/scala/leon/solvers/z3/AbstractZ3Solver.scala
+++ b/src/main/scala/leon/solvers/z3/AbstractZ3Solver.scala
@@ -224,7 +224,6 @@ trait AbstractZ3Solver extends Solver {
         declareStructuralSort(tpe)
       }
 
-
     case tt @ SetType(base) =>
       sorts.cachedB(tt) {
         z3.mkSetSort(typeToSort(base))
@@ -534,7 +533,6 @@ trait AbstractZ3Solver extends Solver {
           z3.mkStore(m, rec(k), rec(CaseClass(library.someType(t), Seq(v))))
         }
 
-
       case gv @ GenericValue(tp, id) =>
         z3.mkApp(genericValueToDecl(gv))
 
diff --git a/testcases/lazy-datastructures/BottomUpMegeSort.scala b/testcases/lazy-datastructures/BottomUpMegeSort.scala
new file mode 100644
index 0000000000000000000000000000000000000000..94ab6a0705e373d41eef8a547919ee725404a03e
--- /dev/null
+++ b/testcases/lazy-datastructures/BottomUpMegeSort.scala
@@ -0,0 +1,151 @@
+package lazybenchmarks
+
+import leon.lazyeval._
+import leon.lang._
+import leon.annotation._
+import leon.instrumentation._
+//import leon.invariant._
+
+object BottomUpMergeSort {
+
+  sealed abstract class IList {
+    def size: BigInt = {
+      this match {
+        case ICons(_, xs) => 1 + xs.size
+        case _            => BigInt(0)
+      }
+    } ensuring (_ >= 0)
+  }
+  case class ICons(x: BigInt, tail: IList) extends IList
+  case class INil() extends IList
+
+  sealed abstract class ILList {
+    def size: BigInt = {
+      this match {
+        case LCons(_, xs) => 1 + ssize(xs)
+        case _            => BigInt(0)
+      }
+    } ensuring (_ >= 0)
+  }
+  case class LCons(x: BigInt, tail: $[ILList]) extends ILList
+  case class LNil() extends ILList
+  def ssize(l: $[ILList]): BigInt = (l*).size
+
+  // TODO: making this parametric will break many things. Fix them
+  sealed abstract class LList {
+    def size: BigInt = {
+      this match {
+        case SNil()      => BigInt(0)
+        case SCons(_, t) => 1 + t.size
+      }
+    } ensuring (_ >= 0)
+
+    def valid: Boolean = {
+      this match {
+        case SNil()      => true
+        case SCons(l, t) => ssize(l) > 0 && t.valid
+      }
+    }
+
+    def fullSize: BigInt = {
+      this match {
+        case SNil()      => BigInt(0)
+        case SCons(l, t) => ssize(l) + t.fullSize
+      }
+    } ensuring (_ >= 0)
+  }
+  case class SCons(x: $[ILList], tail: LList) extends LList
+  case class SNil() extends LList
+
+  // takes O(n) time
+  def pairs(l: LList): LList = {
+    require(l.valid)
+    l match {
+      case SNil()           => SNil()
+      case SCons(_, SNil()) => l
+      case SCons(l1, SCons(l2, rest)) =>
+        SCons($(merge(l1, l2)), pairs(rest))
+    }
+  } ensuring (res => res.size <= (l.size + 1) / 2 &&
+    l.fullSize == res.fullSize &&
+    res.valid &&
+    time <= 10 * l.size + 4)
+
+  def constructMergeTree(l: LList): LList = {
+    require(l.valid)
+    l match {
+      case SNil()           => SNil()
+      case SCons(_, SNil()) => l
+      case _ =>
+        constructMergeTree(pairs(l))
+    }
+  } ensuring (res => res.size <= 1 && res.fullSize == l.fullSize &&
+    (res match {
+      case SCons(il, SNil()) =>
+        res.fullSize == ssize(il) // this is implied by the previous conditions
+      case _ => true
+    }) &&
+    res.valid &&
+    time <= 42 * l.size + 4)
+
+  // here size of res is required to be >= 1
+  def merge(a: $[ILList], b: $[ILList]): ILList = {
+    require(((a*) != LNil() || b.isEvaluated) && // if one of them is Nil then the other is evaluated
+        ((b*) != LNil() || a.isEvaluated) &&
+        ((a*) != LNil() || (b*) != LNil())) // one of them is not Nil
+    b.value match {
+      case LNil() => a.value
+      case bl @ LCons(x, xs) =>
+        a.value match {
+          case LNil() => bl
+          case LCons(y, ys) =>
+            if (y < x)
+              LCons(y, $(merge(ys, b)))
+            else
+              LCons(x, $(merge(a, xs)))
+        }
+    }
+  } ensuring (res => ssize(a) + ssize(b) == res.size &&
+       time <= 300 * res.size - 100)
+
+  // this will take O(n) time
+  def IListToLList(l: IList): LList = {
+    l match {
+      case INil() => SNil()
+      case ICons(x, xs) =>
+        SCons($(lsingle(x)), IListToLList(xs))
+    }
+  } ensuring (res => res.fullSize == l.size &&
+    res.size == l.size &&
+    res.valid &&
+    time <= 9 * l.size + 3)
+
+  def mergeSort(l: IList): ILList = {
+    l match {
+      case INil() => LNil()
+      case _ =>
+        constructMergeTree(IListToLList(l)) match {
+          case SCons(r, SNil()) => r.value
+        }
+    }
+  } ensuring (res => time <= 400 * l.size + 10)
+
+  /**
+   * Order statistics
+   */
+  def firstMin(l: IList) : BigInt ={
+    require(l != INil())
+    mergeSort(l) match {
+      case LCons(x, rest) => x
+    }
+  } ensuring (res => time <= 400 * l.size + 20)
+
+  // TODO: inlining this creates problems. why ?
+  def lempty: ILList = {
+    LNil()
+  }
+
+  def lsingle(x: BigInt): ILList = {
+    LCons(x, $(lempty))
+  }
+}
diff --git a/testcases/lazy-datastructures/LazyMegeSort-edited.scala b/testcases/lazy-datastructures/LazyMegeSort-edited.scala
new file mode 100644
index 0000000000000000000000000000000000000000..f11476bd1eb3d9b1924c8e7aee00903fa3b6c055
--- /dev/null
+++ b/testcases/lazy-datastructures/LazyMegeSort-edited.scala
@@ -0,0 +1,230 @@
+package output
+import leon.lazyeval._
+import leon.lang._
+import leon.annotation._
+import leon.instrumentation._
+import leon.invariant._
+
+object MergeSort {
+
+  case class Set1[T]()
+
+  abstract class IList {
+    def size: BigInt = {
+      this match {
+        case ICons(_, xs) =>
+          BigInt(1) + xs.size
+        case _ =>
+          BigInt(0)
+      }
+    } ensuring {
+      (x$1: BigInt) => x$1 >= BigInt(0)
+    }
+  }
+
+  case class ICons(x: BigInt, tail: IList) extends IList
+
+  case class INil() extends IList
+
+  abstract class ILList {
+    /*def size: BigInt = {
+      this match {
+        case LCons(_, xs37) =>
+          BigInt(1) + ssize(xs37)
+        case _ =>
+          BigInt(0)
+      }
+    } ensuring {
+      (x$218: BigInt) => x$218 >= BigInt(0)
+    }*/
+  }
+
+  case class LCons(x: BigInt, tail: LazyILList) extends ILList
+
+  case class LNil() extends ILList
+
+  abstract class LList {
+    def size: BigInt = {
+      this match {
+        case SNil() =>
+          BigInt(0)
+        case SCons(_, t) =>
+          BigInt(1) + t.size
+      }
+    } ensuring {
+      (x$3: BigInt) => x$3 >= BigInt(0)
+    }
+  }
+
+  case class SCons(x: LazyILList, tail: LList) extends LList
+
+  case class SNil() extends LList
+
+ /* def ssize(l: LazyILList): BigInt = {
+    evalLazyILList2(l).size
+  } ensuring {
+    (res: BigInt) => true
+  }
+
+  def fullSize(l: LList): BigInt = {
+    l match {
+      case SNil() =>
+        BigInt(0)
+      case SCons(l49, t) =>
+        ssize(l49) + fullSize(t)
+    }
+  } ensuring {
+    (x$46: BigInt) => x$46 >= BigInt(0)
+  }*/
+
+  /*def pairs(l: LList, st: Set1[LazyILList]): (LList, Set1[LazyILList]) = {
+    l match {
+      case SNil() =>
+        (SNil(), st)
+      case SCons(_, SNil()) =>
+        (l, st)
+      case SCons(l118, SCons(l215, rest)) =>
+        val a77 = pairs(rest, st)
+        (SCons(newILList(Merge(l118, l215), st), a77._1), a77._2)
+    }
+  } ensuring {
+    (res69: (LList, Set1[LazyILList])) =>
+      res69._1.size <= (l.size + BigInt(1)) / BigInt(2) &&
+      fullSize(l) == fullSize(res69._1) &&
+      time <= 10 * l.size + 4
+  }*/
+
+  abstract class LazyILList
+
+  case class Merge(a: LazyILList, b: LazyILList) extends LazyILList
+
+  case class Lsingle(x: BigInt) extends LazyILList
+
+  case class Lempty() extends LazyILList
+
+  //@library
+  def newILList(cc: LazyILList, st: Set1[LazyILList]): LazyILList = {
+    cc
+  } /*ensuring {
+    (res: LazyILList) => !st.contains(res)
+  }*/
+
+  //@library
+  /*def evalLazyILList(cl: LazyILList, st: Set1[LazyILList]): (ILList, Set1[LazyILList]) = {
+    cl match {
+      case t: Merge =>
+        (merge(t.a, t.b, Set1[LazyILList]()), Set1[LazyILList]())
+      case t: Lsingle =>
+        (lsingle(t.x, Set1[LazyILList]()), Set1[LazyILList]())
+      case t: Lempty =>
+        (lempty, Set1[LazyILList]())
+    }
+  } ensuring {
+    (res: (ILList, Set1[LazyILList])) =>
+      cl match {
+        case t: Merge =>
+          ssize(t.a) + ssize(t.b) == res._1.size &&
+          time <= 300 * res._1.size  - 100
+        case t: Lsingle =>
+          true
+        case t: Lempty =>
+          true
+      }
+  }
+
+  def evalLazyILList2(cl: LazyILList): ILList = {
+    evalLazyILList(cl, Set1[LazyILList]())._1
+  } ensuring {
+    (res: ILList) => true
+  }*/
+
+/*  def constructMergeTree(l: LList, st: Set1[LazyILList]): (LList, Set1[LazyILList]) = {
+    l match {
+      case SNil() =>
+        (SNil(), st)
+      case SCons(_, SNil()) =>
+        (l, st)
+      case _ =>
+        val a76 = pairs(l, st)
+        constructMergeTree(a76._1, a76._2)
+    }
+  } ensuring {
+    (res: (LList, Set1[LazyILList])) =>
+      res._1.size <= BigInt(1) && fullSize(res._1) == fullSize(l) && (res._1 match {
+        case SCons(il1, SNil()) =>
+          fullSize(res._1) == ssize(il1)
+        case _ =>
+          true
+      }) &&
+      time <= 42 * l.size + 4
+  }*/
+
+  /*  def merge(a: LazyILList, b: LazyILList, st: Set1[LazyILList]): ILList = {
+    require(evalLazyILList2(a) != LNil() && evalLazyILList2(b) != LNil())
+    evalLazyILList(b, st)._1 match {
+      case LNil() =>
+        evalLazyILList(a, st)._1
+      case bl @ LCons(x, xs36) =>
+        evalLazyILList(a, st)._1 match {
+          case LNil() =>
+            bl
+          case LCons(y, ys2) =>
+            if (y < x) {
+              LCons(y, Merge(ys2, b))
+            } else {
+              LCons(x, Merge(a, xs36))
+            }
+        }
+    }
+  } ensuring {
+    (res70 : ILList) => ssize(a) + ssize(b) == res70.size &&
+    time <= 300 * res70.size  - 100
+//    (res70 match {
+//      case _ if res70.size == 1 =>
+//        time <= 300 * res70.size  + 100
+//      case _ =>
+//        time <= 300 * res70.size  - 100
+//    })
+  }*/
+
+  def IListToLList(l: IList, st: Set1[LazyILList]): LList = {
+    l match {
+      case INil() =>
+        SNil()
+      case ICons(x, xs) =>
+        SCons(newILList(Lsingle(x), st), IListToLList(xs, st))
+    }
+  } ensuring {
+    (res: LList) =>
+      //fullSize(res) == l.size && res.size == l.size &&
+        time <= 9 * l.size + 3
+  }
+
+//  def mergeSort(l: IList, st: Set1[LazyILList]): (ILList, Set1[LazyILList]) = {
+//    l match {
+//      case INil() =>
+//        (LNil(), st)
+//      case _ =>
+//        val scr = constructMergeTree(IListToLList(l, st), st)
+//        scr._1 match {
+//          case SCons(r13, SNil()) =>
+//            val dres = evalLazyILList(r13, scr._2)
+//            (dres._1, dres._2)
+//        }
+//    }
+//  } ensuring {
+//    (res: (ILList, Set1[LazyILList])) => true
+//  }
+
+  def lempty(): ILList = {
+    LNil()
+  } ensuring {
+    (res: ILList) => true
+  }
+
+  def lsingle(x: BigInt, st: Set1[LazyILList]): ILList = {
+    LCons(x, newILList(Lempty(), st))
+  } ensuring {
+    (res: ILList) => true
+  }
+}
diff --git a/testcases/lazy-datastructures/LazyMegeSort.scala b/testcases/lazy-datastructures/LazyMegeSort.scala
new file mode 100644
index 0000000000000000000000000000000000000000..ca78492c416ea962bc08b8b894b3f5aef958aa5e
--- /dev/null
+++ b/testcases/lazy-datastructures/LazyMegeSort.scala
@@ -0,0 +1,92 @@
+package lazybenchmarks
+
+import leon.lazyeval._
+import leon.lang._
+import leon.annotation._
+import leon.instrumentation._
+//import leon.invariant._
+
+object MergeSort {
+
+  // TODO: making this parametric will break many things. Fix them
+  sealed abstract class LList {
+    def size: BigInt = {
+      this match {
+        case SNil()      => BigInt(0)
+        case SCons(x, t) => 1 + ssize(t)
+      }
+    } ensuring (_ >= 0)
+  }
+  case class SCons(x: BigInt, tail: $[LList]) extends LList
+  case class SNil() extends LList
+  def ssize(l: $[LList]): BigInt = (l*).size
+
+  sealed abstract class List {
+    def size: BigInt = {
+      this match {
+        case Cons(_, xs) => 1 + xs.size
+        case _           => BigInt(0)
+      }
+    } ensuring (_ >= 0)
+  }
+  case class Cons(x: BigInt, tail: List) extends List
+  case class Nil() extends List
+
+  def length(l: List): BigInt = {
+    l match {
+      case Nil()       => BigInt(0)
+      case Cons(x, xs) => 1 + length(xs)
+    }
+  } ensuring (res => res >= 0 && res == l.size && stack <= 14 * l.size + 15)
+
+  def split(l: List, n: BigInt): (List, List) = {
+    require(n > 0 && n < l.size)
+    l match {
+      case Nil() => (Nil(), l)
+      case Cons(x, xs) =>
+        if (n == 1) {
+          (Cons(x, Nil()), xs)
+        } else {
+          val (fst, snd) = split(xs, n - 1)
+          (Cons(x, fst), snd)
+        }
+    }
+  } ensuring (res => res._2.size == l.size - n && res._1.size == n && stack <= 25 * l.size - 1)
+
+  /*
+   * Note: merge is not recursive due to closures.
+   * However, concretizing merge may invoke merge or msort recursively.
+   * So proving standalone bound for merge requires preconditions.
+   */
+  def merge(a: $[LList], b: $[LList]): LList = (b.value match {
+    case SNil() => a.value
+    case bl @ SCons(x, xs) =>
+      a.value match {
+        case SNil() => bl
+        case SCons(y, ys) =>
+          if (y < x)
+            SCons(y, $(merge(ys, b)))
+          else
+            SCons(x, $(merge(a, xs)))
+      }
+  }) //ensuring (res => ssize(a) + ssize(b) == res.size)
+
+  /**
+   * For proving time, we need a term of order \Omega(n) with strict
+   * inverse triangle inequality i.e, f(a + b) > f(a) + f(b)
+   * Log satisfies this but it is very expensive. Is there another function ?
+   */
+  def mergeSort(l: List): LList = (l match {
+    case Nil()          => SNil()
+    case Cons(x, Nil()) => SCons(x, $(empty))
+    case _ =>
+      val (fst, snd) = split(l, length(l) / 2)
+      merge($(mergeSort(fst)), $(mergeSort(snd)))
+
+  }) ensuring (res => stack <= 81 * l.size + 35) // res.size == l.size
+
+  // TODO: inlining this creates problems. why ?
+  def empty: LList = {
+    SNil()
+  }
+}