diff --git a/leon.out/Conqueue-edited.scala b/leon.out/Conqueue-edited.scala
deleted file mode 100644
index 3e9ba4df1a1526d86e5c46c10131cd230cd32121..0000000000000000000000000000000000000000
--- a/leon.out/Conqueue-edited.scala
+++ /dev/null
@@ -1,465 +0,0 @@
-package lazybenchmarks
-import leon.lazyeval._
-import leon.lang._
-import leon.annotation._
-import leon.instrumentation._
-import leon.lang.synthesis._
-import ConcTrees._
-import ConQ._
-import Conqueue._
-import conc.Conqueue
-import conc.ConcTrees
-
-object ConcTrees {
-  abstract class Conc[T] { 
-     def isEmpty : Boolean = this == Empty[T]()
-  
-	  def level : BigInt =  {
-	    this match {
-	      case Empty() =>
-		BigInt(0)
-	      case Single(x) =>
-		BigInt(0)
-	      case CC(l, r) =>
-		BigInt(1) + max(l.level, r.level)
-	    }
-	  } ensuring {
-	    (x$1 : BigInt) => x$1 >= BigInt(0)
-	  }
-  }
-  
-  case class Empty[T]() extends Conc[T]
-  
-  case class Single[T](x : T) extends Conc[T]
-  
-  case class CC[T](left : Conc[T], right : Conc[T]) extends Conc[T]
-  
-  def max(x : BigInt, y : BigInt): BigInt = if (x >= y) {
-    x
-  } else {
-    y
-  }
-  
-  def abs(x : BigInt): BigInt = if (x < BigInt(0)) {
-    -x
-  } else {
-    x
-  }
-}
-
-object Conqueue {
-  abstract class ConQ[T] { 
-    def isSpine : Boolean = this match {
-	    case Spine(_, _) =>
-	      true
-	    case _ =>
-	      false
-	  }
-  def isTip : Boolean = !this.isSpine
-  }
-  
-  case class Tip[T](t : Conc[T]) extends ConQ[T]
-  
-  case class Spine[T](head : Conc[T], rear : LazyConQ[T]) extends ConQ[T]
-  
-  abstract class Scheds[T]
-  
-  case class Cons[T](h : LazyConQ[T], tail : Scheds[T]) extends Scheds[T]
-  
-  case class Nil[T]() extends Scheds[T]
-  
-  case class Wrapper[T](queue : LazyConQ[T], schedule : Scheds[T]) {
-	def valid(st : Set[LazyConQ[T]]): Boolean = zeroPreceedsLazy[T](this.queue, st) && schedulesProperty(this.queue, this.schedule, st)
-  } 
-  
-def zeroPreceedsLazy[T](q : LazyConQ[T], st : Set[LazyConQ[T]]): Boolean = {
-  if (st.contains(q)) {
-    evalLazyConQS[T](q) match {
-      case Spine(Empty(), rear10) =>
-        true
-      case Spine(h, rear11) =>
-        zeroPreceedsLazy[T](rear11, st)
-      case Tip(_) =>
-        true
-    }
-  } else {
-    false
-  } 
-} ensuring { res => 
-  res || !st.contains(q) || {
-    val x = firstUnevaluated(q, st)
-    val (_, nst) = evalLazyConQ(x, st) 
-    zeroPreceedsLazy(q, nst)
-  }
-}
-
-def zeroPredLazyMonotone[T](st1 : Set[LazyConQ[T]], st2: Set[LazyConQ[T]], q: LazyConQ[T]) : Boolean = {
-  require(st1.subsetOf(st2) && zeroPreceedsLazy(q, st1))
-  zeroPreceedsLazy(q, st2) && 
-  //induction scheme 
-  (evalLazyConQS[T](q) match {
-      case Spine(Empty(), _) =>
-        true
-      case Spine(h, rear) =>
-        zeroPredLazyMonotone(st1, st2, rear)
-      case Tip(_) =>
-        true
-    })
-} holds
-  
-  def isConcrete[T](l : LazyConQ[T], st : Set[LazyConQ[T]]): Boolean = st.contains(l) && (evalLazyConQS[T](l) match {
-    case Spine(_, tail13) =>
-      isConcrete[T](tail13, st)
-    case _ =>
-      true
-  }) || evalLazyConQS[T](l).isTip
-  
-  @library   
-  def streamLemma[T](l : LazyConQ[T], st : Set[LazyConQ[T]]): Boolean =  {
-    st.contains(l) || (evalLazyConQS[T](l) match {
-      case Spine(_, tail14) =>
-        l != tail14 && !st.contains(tail14)
-      case _ =>
-        true
-    })
-  } ensuring {
-    (holds : Boolean) => holds
-  }
-  
-  def firstUnevaluated[T](l : LazyConQ[T], st : Set[LazyConQ[T]]): LazyConQ[T] =  {
-    if (st.contains(l)) {
-      evalLazyConQS[T](l) match {
-        case Spine(_, tail15) =>
-          firstUnevaluated[T](tail15, st)
-        case _ =>
-          l
-      }
-    } else {
-      l
-    }
-  } ensuring {
-    (res65 : LazyConQ[T]) => {
-      val dres4 = evalLazyConQ[T](res65, st)
-      (evalLazyConQS[T](res65).isSpine || isConcrete[T](l, st)) && (evalLazyConQS[T](res65).isTip || !st.contains(res65)) && streamLemma[T](res65, st) && (dres4._1 match {
-        case Spine(_, tail16) =>
-          ((firstUnevaluated[T](l, dres4._2) == tail16), dres4._2)
-        case _ =>
-          (true, dres4._2)
-      })._1
-    }
-  }
-  
-  // def nextUnevaluated[T](l : LazyConQ[T], st : Set[LazyConQ[T]]): LazyConQ[T] = evalLazyConQS[T](l) match {
-  //   case Spine(_, tail17) =>
-  //     firstUnevaluated[T](tail17, st)
-  //   case _ =>
-  //     l
-  // }
-  
-  def schedulesProperty[T](q : LazyConQ[T], schs : Scheds[T], st : Set[LazyConQ[T]]): Boolean = schs match {
-    /*case Cons(head, Nil()) =>  	
-      firstUnevaluated[T](q, st) == head // here, head can be tip or cons i.e, q should be concrete*/
-    case Cons(head5, tail) =>  	
-      /*evalLazyConQS[T](head5).isSpine &&*/ firstUnevaluated[T](q, st) == head5 && schedulesProperty[T](pushUntilZero[T](head5), tail, st)
-    case Nil() =>
-      isConcrete[T](q, st)
-  }
-  
-  def pushUntilZero[T](q : LazyConQ[T]): LazyConQ[T] = evalLazyConQS[T](q) match {
-    case Spine(Empty(), rear12) =>
-      pushUntilZero[T](rear12)
-    case Spine(h, rear13) =>
-      rear13
-    case Tip(_) =>
-      q
-  }
-  
-  def pushLeft[T](ys : Single[T], xs : LazyConQ[T], st : Set[LazyConQ[T]]): (ConQ[T], Set[LazyConQ[T]]) = {
-    require(zeroPreceedsLazy[T](xs, st) && ys.isInstanceOf[Single[T]])
-    val dres5 = evalLazyConQ[T](xs, st)
-    dres5._1 match {
-      case Tip(CC(_, _)) =>
-        (Spine[T](ys, xs), dres5._2)
-      case Tip(Empty()) =>
-        (Tip[T](ys), dres5._2)
-      case Tip(t @ Single(_)) =>
-        (Tip[T](CC[T](ys, t)), dres5._2)
-      case s @ Spine(_, _) =>
-        pushLeftLazy[T](ys, xs, dres5._2)
-    }
-  }
-
-  def pushLeftLazyUI[T](ys : Conc[T], xs : LazyConQ[T], st : Set[LazyConQ[T]]): (ConQ[T], Set[LazyConQ[T]]) = ???[(ConQ[T], Set[LazyConQ[T]])]
-  
-//  @library
-  def pushLeftLazy[T](ys : Conc[T], xs : LazyConQ[T], st : Set[LazyConQ[T]]): (ConQ[T], Set[LazyConQ[T]]) =  {
-    require(!ys.isEmpty && zeroPreceedsLazy[T](xs, st) && evalLazyConQS[T](xs).isSpine)
-    val dres = evalLazyConQ[T](xs, st)
-    dres._1 match {
-      case Spine(Empty(), rear14) =>
-        (Spine[T](ys, rear14), dres._2)
-      case Spine(head, rear15) =>
-        val carry = CC[T](head, ys)
-        val dres2 = evalLazyConQ[T](rear15, dres._2)
-        dres2._1 match {
-          case s @ Spine(_, _) =>
-            (Spine[T](Empty[T](), newConQ[T](PushLeftLazy[T](carry, rear15), dres2._2)), dres2._2)
-          case t @ Tip(tree) =>
-	    if (tree.level > carry.level) { // can this happen ? this means tree is of level at least two greater than rear ?
-              val x : ConQ[T] = t
-              val y : ConQ[T] = Spine[T](carry, newConQ[T](Lazyarg1[T](x), dres._2))
-              (Spine[T](Empty[T](), newConQ[T](Lazyarg1[T](y), dres2._2)), dres2._2)
-            } else {// here tree level and carry level are equal
-              val x : ConQ[T] = Tip[T](CC[T](tree, carry))
-              val y : ConQ[T] = Spine[T](Empty[T](), newConQ[T](Lazyarg1[T](x), dres._2))
-              (Spine[T](Empty[T](), newConQ[T](Lazyarg1[T](y), dres2._2)), dres2._2)	
-            }
-        }
-    }
-  } ensuring {
-    (res66 : (Spine[T], Set[LazyConQ[T]])) => res66._1.isSpine && (res66._2 == st) && (res66._1 match {
-      case Spine(Empty(), rear16) =>
-	     evalLazyConQS[T](rear16).isSpine && !res66._2.contains(rear16) &&
-            (firstUnevaluated[T](pushUntilZero[T](rear16), res66._2) == firstUnevaluated[T](xs, res66._2)) || 
-			(isConcrete(xs, res66._2))
-      case Spine(h, rear17) =>
-        firstUnevaluated[T](rear17, res66._2) == firstUnevaluated[T](xs, res66._2)
-      case _ =>
-        true
-    }) && (evalLazyConQS[T](xs) match {
-      	case Spine(Empty(), rear) => true
-      	case Spine(h, rear) =>
-      		firstUnevaluated[T](xs, res66._2) == firstUnevaluated[T](rear, res66._2)
-      	case _ => true
-     })
-  }
-  
-  /*def PushLeftLazypushLeftLazyLem[T](rear15 : LazyConQ[T], head : Conc[T], dres : (ConQ[T], Set[LazyConQ[T]]), st : Set[LazyConQ[T]], xs : LazyConQ[T], s : Spine[T], dres : (ConQ[T], Set[LazyConQ[T]]), carry : CC[T], ys : Conc[T]): Boolean =  {
-    (!ys.isEmpty && zeroPreceedsLazy[T](xs, st) && evalLazyConQS[T](xs).isSpine && dres == evalLazyConQ[T](xs, st) && (!dres._1.isInstanceOf[Spine[T]] || !dres._1.head.isInstanceOf[Empty[T]]) && dres._1.isInstanceOf[Spine[T]] && head == dres._1.head && rear15 == dres._1.rear && carry == CC[T](head, ys) && dres == evalLazyConQ[T](rear15, dres._2) && (!dres._1.isInstanceOf[Spine[T]] || !dres._1.head.isInstanceOf[Empty[T]]) && dres._1.isInstanceOf[Spine[T]] && s == dres._1) ==> (!carry.isEmpty && zeroPreceedsLazy[T](rear15, dres._2) && evalLazyConQS[T](rear15).isSpine)
-  } ensuring {
-    (holds : Boolean) => holds
-  }*/
-
-  def streamContains[T](l: LazyConQ[T], newl: LazyConQ[T]) : Boolean = {
-    (l == newl) || (evalLazyConQS[T](l) match {
-       case Spine(_ , tail) => 
-          streamContains(tail, newl)
-        case _ => false
-     })
-  }
-  
-  // monotonicity of fune
-  def funeMonotone[T](st1 : Set[LazyConQ[T]], st2 : Set[LazyConQ[T]], l : LazyConQ[T], newl: LazyConQ[T]) : Boolean = {   
-    require(st2 == st1 ++ Set(newl) && 
-      !streamContains(l, newl))
-    (firstUnevaluated(l, st1) == firstUnevaluated(l, st2)) && //property
-	 //induction scheme
-      (evalLazyConQS[T](l) match {
-        case Spine(_, tail) =>	  
-          	funeMonotone(st1, st2, tail, newl)
-        case _ =>          
-		        true
-      })    
-  } holds
-
-  // isConcrete monotonicity
-  def concreteMonotone[T](st1 : Set[LazyConQ[T]], st2 : Set[LazyConQ[T]], l : LazyConQ[T]) : Boolean = {   
-    ((isConcrete(l, st1) && st1.subsetOf(st2)) ==> isConcrete(l, st2)) && {
-      // induction scheme
-      evalLazyConQS[T](l) match {
-        case Spine(_, tail) =>
-          concreteMonotone[T](st1, st2, tail)
-        case _ =>
-          true
-      }
-    }
-  } holds
-
-  @library // To be proven
-  def schedMonotone[T](st1: Set[LazyConQ[T]], st2 : Set[LazyConQ[T]], scheds: Scheds[T], l : LazyConQ[T], newl: LazyConQ[T]) : Boolean = {  
-    require((st2 == st1 ++ Set(newl)) && 
-      !streamContains(l, newl) && // newl is not contained in 'l'
-      schedulesProperty(l, scheds, st1)
-      ) 
-    concreteMonotone(st1, st2, l) && funeMonotone(st1, st2, l, newl) &&   //instantiations   
-       schedulesProperty(l, scheds, st2) &&  //property
-       //induction scheme
-       (scheds match {
-          case Cons(head, tail) => 
-            schedMonotone(st1, st2, tail, pushUntilZero(head), newl)
-          case Nil() => true
-        })      
-  } holds
-  
-  //@library
-  def pushLeftWrapperSub[T](ys : Single[T], w : Wrapper[T], st : Set[LazyConQ[T]]): ((ConQ[T], Scheds[T]), Set[LazyConQ[T]]) = {
-    require(w.valid(st) && ys.isInstanceOf[Single[T]])
-    val nq2 = pushLeft[T](ys, w.queue, st)    
-    val nsched = nq2._1 match {
-      case Spine(Empty(), rear18) =>
-        Cons[T](rear18, w.schedule)
-      case Spine(_, _) =>
-        w.schedule
-      case t : Tip[T] =>
-	Nil[T]() //we can ignore the tip here
-    }   
-    ((nq2._1, nsched), nq2._2)
-  } ensuring {res => 
-     res._1._2 match {	
-    	case Cons(head, tail) => 
-    	  res._1._1 match {
-    		case Spine(_, rear) => 		   
-    		   /*evalLazyConQS[T](head).isSpine &&*/ firstUnevaluated[T](rear, res._2) == head && schedulesProperty[T](pushUntilZero[T](head), tail, res._2)		  
-    	 }
-    	case _ =>
-    	 res._1._1 match {
-    		case Spine(h, rear) => 
-    		  isConcrete[T](rear, res._2)	   		
-    		case _ => true
-    	 }	    	
-     }
-  }
-
-  @library
-  def newLazyCons[T](q: ConQ[T], st : Set[LazyConQ[T]]) : LazyConQ[T] = {
-    newConQ[T](Lazyarg1(q), st)
-  } ensuring(r => q match {
-      case Spine(_, rear) =>
-        !streamContains(rear, r)
-        case _ => true          
-    })
-
-//  @library
-  def pushLeftWrapper[T](ys : Single[T], w : Wrapper[T], st : Set[LazyConQ[T]]): (LazyConQ[T], Scheds[T], Set[LazyConQ[T]]) = {
-    require(w.valid(st) && ys.isInstanceOf[Single[T]])
-    val ((nq, nsched), nst) = pushLeftWrapperSub(ys, w, st)
-    val lq = newLazyCons(nq, nst)
-    val (_, rst) = evalLazyConQ(lq, nst)
-    (lq, nsched, rst)
-  } ensuring {res =>     
-    schedulesProperty(res._1, res._2, res._3) &&     
-    // instantiations
-     (evalLazyConQS(res._1) match { 
-          case Spine(_, rear) =>
-            schedMonotone(st, res._3, res._2, rear, res._1)
-            case _ => true
-      })
-  }
-
-  @library
-  def dummyAxiom[T](l: LazyConQ[T], nl: LazyConQ[T]) : Boolean = {
-    !streamContains(l, nl)
-  } holds
-
-  def funeCompose[T](st1: Set[LazyConQ[T]], st2: Set[LazyConQ[T]], q : LazyConQ[T]) : Boolean = {
-    require(st1.subsetOf(st2))
-    (firstUnevaluated(firstUnevaluated(q, st1), st2) == firstUnevaluated(q, st2)) && //property
-    //induction scheme
-      (evalLazyConQS[T](q) match {
-        case Spine(_, tail) =>    
-            funeCompose(st1, st2, tail)
-        case _ =>          
-            true
-      }) 
-  } holds
-  
-  //@library
-  def Pay[T](q : LazyConQ[T], scheds : Scheds[T], st : Set[LazyConQ[T]]): (Scheds[T], Set[LazyConQ[T]]) = {
-    require(schedulesProperty(q, scheds, st) && st.contains(q))    
-    val (nschs, rst) = scheds match {
-      case c @ Cons(head, rest) =>
-        val (headval, st2) = evalLazyConQ(head, st)
-        (headval match {
-          case Spine(Empty(), rear) =>
-            Cons(rear, rest) 
-          case Spine(_, _) =>
-            rest  // in this case: firstUnevaluated[T](rear, st) == rhead                                                 
-	  case _ => 
-	   Nil[T]()
-        }, st2)
-      case Nil() =>        
-        (scheds, st)
-    }
-    (nschs, rst)
-  } ensuring {res => schedulesProperty(q, res._1, res._2) &&      
-     // instantiations (relating rhead and head)
-     (res._1 match {
-      case Cons(rhead, rtail) =>                  
-          (scheds match {
-              case Cons(head, _) =>                            
-                dummyAxiom(pushUntilZero(rhead), head) &&   
-                schedMonotone(st, res._2, rtail, pushUntilZero(rhead), head) && 
-                funeCompose(st, res._2, q)
-              case _ => true 
-          })        
-      case _ => true          
-     }) /*&&
-     zeroPreceedsLazy(q, res._2) && 
-     // instantiations for zeroPreceedsLazy
-     zeroPredLazyMonotone(st, res._2, q)*/
-  }
-
-  def pushLeftAndPay[T](ys : Single[T], w : Wrapper[T], st : Set[LazyConQ[T]]): (Wrapper[T], Set[LazyConQ[T]]) = {
-    require(w.valid(st) && ys.isInstanceOf[Single[T]])
-    val (q, scheds, nst) =  pushLeftWrapper(ys, w, st)
-    val (nscheds, fst) = Pay(q, scheds, nst)
-    (Wrapper(q, nscheds), fst)
-  } ensuring {res =>  res._1.valid(res._2) }
-  
-  def lazyarg1[T](x : ConQ[T]): ConQ[T] = x
-}
-
-object ConQ {  
-  
-  abstract class LazyConQ[T1]
-  
-  case class Lazyarg1[T](x : ConQ[T]) extends LazyConQ[T]
-  
-  case class PushLeftLazy[T](ys : Conc[T], xs : LazyConQ[T]) extends LazyConQ[T]
-  
-  @library
-  def newConQ[T1](cc : LazyConQ[T1], st : Set[LazyConQ[T1]]): LazyConQ[T1] =  {
-    cc
-  } ensuring {
-    (res : LazyConQ[T1]) => !st.contains(res)
-  }
-  
-  @library
-  def evalLazyConQ[T](cl : LazyConQ[T], st : Set[LazyConQ[T]]): (ConQ[T], Set[LazyConQ[T]]) =  {
-    cl match {
-      case t : Lazyarg1[T] =>
-        (t.x, (st ++ Set[LazyConQ[T]](t)))
-      case t : PushLeftLazy[T] =>
-        (pushLeftLazyUI[T](t.ys, t.xs, Set[LazyConQ[T]]())._1, (st ++ Set[LazyConQ[T]](t)))
-    }
-  } ensuring {
-    (res : (ConQ[T], Set[LazyConQ[T]])) => (cl match {
-      case t : Lazyarg1[T] =>
-        //res._1.isSpine 
-        res._1 match {
-          case _ : Tip[T] => true
-          case Spine(Empty(), _) => true //TODO: make this inferrable
-          case _ =>  false
-        }        
-      case t : PushLeftLazy[T] =>
-        res._1.isSpine && res._2 == st && (res._1 match {
-          case Spine(Empty(), rear19) =>
-                        (firstUnevaluated[T](pushUntilZero[T](rear19), res._2) == firstUnevaluated[T](t.xs, res._2)) ||
-				                 isConcrete(t.xs, res._2)
-          case Spine(h, rear20) =>
-            firstUnevaluated[T](rear20, res._2) == firstUnevaluated[T](t.xs, res._2) 
-          case _ =>
-            true
-        })
-     }) && (
-      res._1 match {
-        case Spine(_, rear) => 
-          !res._2.contains(rear) // rear would remain unevaluated
-        case _ => true
-      }
-    )
-  }
-  
-  def evalLazyConQS[T](cl : LazyConQ[T]): ConQ[T] = evalLazyConQ[T](cl, Set[LazyConQ[T]]())._1
- 
-}
diff --git a/src/main/scala/leon/invariant/util/CallGraph.scala b/src/main/scala/leon/invariant/util/CallGraph.scala
index 9c8d1359b0f833d613aded2b517cd467e80ba759..7f49738744db2aa663dc1ca6bdc891b444c8dc66 100644
--- a/src/main/scala/leon/invariant/util/CallGraph.scala
+++ b/src/main/scala/leon/invariant/util/CallGraph.scala
@@ -60,12 +60,12 @@ class CallGraph {
 
   /**
    * Sorting functions in reverse topological order.
-   * For functions within an SCC, we preserve the initial order 
+   * For functions within an SCC, we preserve the initial order
    * given as input
    */
-  def reverseTopologicalOrder(initOrder: Seq[FunDef]): Seq[FunDef] = {  
+  def reverseTopologicalOrder(initOrder: Seq[FunDef]): Seq[FunDef] = {
     val orderMap = initOrder.zipWithIndex.toMap
-    graph.sccs.flatMap{scc => scc.sortWith((f1, f2) => orderMap(f1) <= orderMap(f2)) }    
+    graph.sccs.flatMap{scc => scc.sortWith((f1, f2) => orderMap(f1) <= orderMap(f2)) }
   }
 
   override def toString: String = {
diff --git a/src/main/scala/leon/invariant/util/TreeUtil.scala b/src/main/scala/leon/invariant/util/TreeUtil.scala
index b5650b7b2e1a65461664eeb6365f4f3b7fbf2379..88dabc505faf476cef5e6fae516e6d90e093d676 100644
--- a/src/main/scala/leon/invariant/util/TreeUtil.scala
+++ b/src/main/scala/leon/invariant/util/TreeUtil.scala
@@ -189,7 +189,7 @@ object ProgramUtil {
                  uniqueIdDisplay: Boolean = true, excludeLibrary: Boolean = true): Program = {
 
     val funMap = functionsWOFields(prog.definedFunctions).foldLeft(Map[FunDef, FunDef]()) {
-      case (accMap, fd) if fd.isTheoryOperation || fd.isLibrary =>
+      case (accMap, fd) if fd.isTheoryOperation || fd.isLibrary || fd.isInvariant =>
         accMap + (fd -> fd)
       case (accMap, fd) => {
         val freshId = FreshIdentifier(fd.id.name, fd.returnType, uniqueIdDisplay)
@@ -232,7 +232,7 @@ object ProgramUtil {
   }
 
   def functionsWOFields(fds: Seq[FunDef]): Seq[FunDef] = {
-    fds.filter(_.isRealFunction)
+    fds.filter(fd => fd.isRealFunction)
   }
 
   def translateExprToProgram(ine: Expr, currProg: Program, newProg: Program): Expr = {
@@ -307,12 +307,12 @@ object PredicateUtil {
   def isTemplateExpr(expr: Expr): Boolean = {
     var foundVar = false
     postTraversal {
-      case e @ Variable(id) => 
+      case e @ Variable(id) =>
         if (!TemplateIdFactory.IsTemplateIdentifier(id))
-          foundVar = true              
-      case e @ ResultVariable(_) => 
-        foundVar = true              
-      case e => 
+          foundVar = true
+      case e @ ResultVariable(_) =>
+        foundVar = true
+      case e =>
     }(expr)
     !foundVar
   }
diff --git a/src/main/scala/leon/laziness/LazinessEliminationPhase.scala b/src/main/scala/leon/laziness/LazinessEliminationPhase.scala
index 8a5347ad279139b808d72bc967ce2dcfe3d76a15..9c1abab5381882451a3a0769b590301c94d4f81a 100644
--- a/src/main/scala/leon/laziness/LazinessEliminationPhase.scala
+++ b/src/main/scala/leon/laziness/LazinessEliminationPhase.scala
@@ -38,13 +38,13 @@ import leon.invariant.engine._
 import LazyVerificationPhase._
 import utils._
 import java.io.
-_
+  _
 /**
  * TODO: Function names are assumed to be small case. Fix this!!
  */
 object LazinessEliminationPhase extends TransformationPhase {
-  val debugLifting = false
   val dumpInputProg = false
+  val debugLifting = false
   val dumpLiftProg = false
   val dumpProgramWithClosures = false
   val dumpTypeCorrectProg = false
@@ -78,50 +78,43 @@ object LazinessEliminationPhase extends TransformationPhase {
 
     // refEq is by default false
     val nprog = LazyExpressionLifter.liftLazyExpressions(prog, ctx.findOption(optRefEquality).getOrElse(false))
-    if (dumpLiftProg) {
+    if (dumpLiftProg)
       prettyPrintProgramToFile(nprog, ctx, "-lifted")
-    }
 
     val funsManager = new LazyFunctionsManager(nprog)
     val closureFactory = new LazyClosureFactory(nprog)
     val progWithClosures = (new LazyClosureConverter(nprog, ctx, closureFactory, funsManager)).apply
-    if (dumpProgramWithClosures) {
-      //println("After closure conversion: \n" + ScalaPrinter.apply(progWithClosures, purescala.PrinterOptions(printUniqueIds = true)))
+    if (dumpProgramWithClosures)
       prettyPrintProgramToFile(progWithClosures, ctx, "-closures")
-    }
 
     //Rectify type parameters and local types
     val typeCorrectProg = (new TypeRectifier(progWithClosures, closureFactory)).apply
     if (dumpTypeCorrectProg)
-      println("After rectifying types: \n" + ScalaPrinter.apply(typeCorrectProg))
+      prettyPrintProgramToFile(typeCorrectProg, ctx, "-typed")
 
-    val progWithPre =  (new ClosurePreAsserter(typeCorrectProg)).apply
-    if (dumpProgWithPreAsserts) {
-      //println("After asserting closure preconditions: \n" + ScalaPrinter.apply(progWithPre))
+    val progWithPre = (new ClosurePreAsserter(typeCorrectProg)).apply
+    if (dumpProgWithPreAsserts)
       prettyPrintProgramToFile(progWithPre, ctx, "-withpre", uniqueIds = true)
-    }
 
     // verify the contracts that do not use resources
     val progWOInstSpecs = InliningPhase.apply(ctx, removeInstrumentationSpecs(progWithPre))
-    if (dumpProgWOInstSpecs) {
-      //println("After removing instrumentation specs: \n" + ScalaPrinter.apply(progWOInstSpecs))
+    if (dumpProgWOInstSpecs)
       prettyPrintProgramToFile(progWOInstSpecs, ctx, "-woinst")
-    }
+
     val checkCtx = contextForChecks(ctx)
     if (!skipStateVerification)
       checkSpecifications(progWOInstSpecs, checkCtx)
 
     // instrument the program for resources (note: we avoid checking preconditions again here)
-    val instrumenter = new LazyInstrumenter(InliningPhase.apply(ctx, typeCorrectProg), closureFactory)
+    val instrumenter = new LazyInstrumenter(InliningPhase.apply(ctx, typeCorrectProg), ctx, closureFactory)
     val instProg = instrumenter.apply
-    if (dumpInstrumentedProgram) {
-      //println("After instrumentation: \n" + ScalaPrinter.apply(instProg))
+    if (dumpInstrumentedProgram)
       prettyPrintProgramToFile(instProg, ctx, "-withinst", uniqueIds = true)
-    }
+
     // check specifications (to be moved to a different phase)
     if (!skipResourceVerification)
       checkInstrumentationSpecs(instProg, checkCtx,
-          checkCtx.findOption(LazinessEliminationPhase.optUseOrb).getOrElse(false))
+        checkCtx.findOption(LazinessEliminationPhase.optUseOrb).getOrElse(false))
     // dump stats
     if (ctx.findOption(SharedOptions.optBenchmark).getOrElse(false)) {
       val modid = prog.units.find(_.isMainUnit).get.id
diff --git a/src/main/scala/leon/laziness/LazinessUtil.scala b/src/main/scala/leon/laziness/LazinessUtil.scala
index 39c7505c534d7615ee9536f4dd10a1b14546b168..903ef54e9959cc5165f2d3fdda4cedae8ad23186 100644
--- a/src/main/scala/leon/laziness/LazinessUtil.scala
+++ b/src/main/scala/leon/laziness/LazinessUtil.scala
@@ -54,7 +54,7 @@ object LazinessUtil {
         val out = new BufferedWriter(new FileWriter(outputFile))
         // remove '@' from the end of the identifier names
         val pat = new Regex("""(\w+)(@)(\w*)(\*?)(\S*)""", "base", "at", "mid", "star", "rest")
-        val pgmText = pat.replaceAllIn(ScalaPrinter.apply(p, purescala.PrinterOptions(printUniqueIds = uniqueIds)),
+        val pgmText = pat.replaceAllIn(ScalaPrinter.apply(u, purescala.PrinterOptions(printUniqueIds = uniqueIds)),
           m => m.group("base") + m.group("mid") + (
             if (!m.group("star").isEmpty()) "S" else "") + m.group("rest"))
         //val pgmText = ScalaPrinter.apply(p)
@@ -171,7 +171,7 @@ object LazinessUtil {
     case ctype @ CaseClassType(_, Seq(innerType)) if isLazyType(ctype) || isMemType(ctype) =>
       Some(innerType)
     case _ => None
-  }  
+  }
 
   def opNameToCCName(name: String) = {
     name.capitalize + "@"
diff --git a/src/main/scala/leon/laziness/LazyClosureConverter.scala b/src/main/scala/leon/laziness/LazyClosureConverter.scala
index 84d6344176d44f50ebe5df6e528340b8e22f6fec..a47855f267eeabfe2d30587a3b0a7720a1372f27 100644
--- a/src/main/scala/leon/laziness/LazyClosureConverter.scala
+++ b/src/main/scala/leon/laziness/LazyClosureConverter.scala
@@ -63,7 +63,7 @@ class LazyClosureConverter(p: Program, ctx: LeonContext,
   }
 
   val funMap = p.definedFunctions.collect {
-    case fd if (fd.hasBody && !fd.isLibrary) =>
+    case fd if (fd.hasBody && !fd.isLibrary && !fd.isInvariant) => // skipping class invariants for now.
       // replace lazy types in parameters and return values
       val nparams = fd.params map { vd =>
         val nparam = makeIdOfType(vd.id, replaceLazyTypes(vd.getType))
@@ -135,7 +135,7 @@ class LazyClosureConverter(p: Program, ctx: LeonContext,
         Equals(valres.id.toVariable,
           FunctionInvocation(TypedFunDef(ufd, tparams), params.map(_.id.toVariable))), // res = funUI(..)
         Lambda(Seq(ValDef(resid)), resid.toVariable)) // holds
-      pred.addFlag(Annotation("axiom", Seq())) // mark it as @library
+      pred.addFlag(Annotation("axiom", Seq())) // @axiom is similar to @library
       (k -> (ufd, Some(pred)))
 
     case (k, v) if lazyops(k) =>
diff --git a/src/main/scala/leon/laziness/LazyExpressionLifter.scala b/src/main/scala/leon/laziness/LazyExpressionLifter.scala
index acc5598e3a3f96abf1cb27673aeb4cecb61b7708..96683d6114b2d58344b1d091098a431962cd1cb2 100644
--- a/src/main/scala/leon/laziness/LazyExpressionLifter.scala
+++ b/src/main/scala/leon/laziness/LazyExpressionLifter.scala
@@ -62,7 +62,7 @@ object LazyExpressionLifter {
 
     var newfuns = Map[ExprStructure, (FunDef, ModuleDef)]()
     val fdmap = prog.definedFunctions.collect {
-      case fd if !fd.isLibrary =>
+      case fd if !fd.isLibrary && !fd.isInvariant =>
         val nname = FreshIdentifier(fd.id.name)
         val nfd =
           if (createUniqueIds && needsId(fd)) {
@@ -152,7 +152,7 @@ object LazyExpressionLifter {
         case e => e
       }
       md.definedFunctions.foreach {
-        case fd if fd.hasBody && !fd.isLibrary =>
+        case fd if fd.hasBody && !fd.isLibrary && !fd.isInvariant =>
           // create a free list iterator
           val nfd = fdmap(fd)
           val fliter =
diff --git a/src/main/scala/leon/laziness/LazyInstrumenter.scala b/src/main/scala/leon/laziness/LazyInstrumenter.scala
index c3b6c7ae438b7cdbfd74986ef957ffdd5c0067fe..764a5606f74ab86edf11d6df81a07681519d6b87 100644
--- a/src/main/scala/leon/laziness/LazyInstrumenter.scala
+++ b/src/main/scala/leon/laziness/LazyInstrumenter.scala
@@ -26,12 +26,11 @@ import leon.Main
 import leon.transformations._
 import LazinessUtil._
 
-class LazyInstrumenter(p: Program, clFactory: LazyClosureFactory) {
+class LazyInstrumenter(p: Program, ctx: LeonContext, clFactory: LazyClosureFactory) {
 
   val exprInstFactory = (x: Map[FunDef, FunDef], y: SerialInstrumenter, z: FunDef) =>
       new LazyExprInstrumenter(x, y)(z)
   val serialInst = new SerialInstrumenter(p, Some(exprInstFactory))
-
   /*def funsWithInstSpecs  = {
     serialInst.instToInstrumenter.values.flatMap{inst =>
       inst.getRootFuncs(p)
diff --git a/src/main/scala/leon/laziness/LazyVerificationPhase.scala b/src/main/scala/leon/laziness/LazyVerificationPhase.scala
index 242e07338f1832fe9c5d898b1c6c64a4a857f4cf..dcc0384d73175de68d1c4984e594c275dbd3e99b 100644
--- a/src/main/scala/leon/laziness/LazyVerificationPhase.scala
+++ b/src/main/scala/leon/laziness/LazyVerificationPhase.scala
@@ -39,7 +39,7 @@ import leon.invariant.engine._
 object LazyVerificationPhase {
 
   val debugInstVCs = false
-  val debugInferProgram = true
+  val debugInferProgram = false
 
   def removeInstrumentationSpecs(p: Program): Program = {
     def hasInstVar(e: Expr) = {
@@ -136,7 +136,7 @@ object LazyVerificationPhase {
    * Note: we also skip verification of uninterpreted functions
    */
   def shouldGenerateVC(fd: FunDef) = {
-    !fd.isLibrary && InstUtil.isInstrumented(fd) && fd.hasBody &&
+    !fd.isInvariant && !fd.isLibrary && InstUtil.isInstrumented(fd) && fd.hasBody &&
       fd.postcondition.exists { post =>
         val Lambda(Seq(resdef), pbody) = post
         accessesSecondRes(pbody, resdef.id)
diff --git a/src/main/scala/leon/laziness/TypeRectifier.scala b/src/main/scala/leon/laziness/TypeRectifier.scala
index ac5c69d9ae7a50f7897d2f5d548386d6ee97a611..a048a969afd4fa9dcf091c5465494e2880bbc5a8 100644
--- a/src/main/scala/leon/laziness/TypeRectifier.scala
+++ b/src/main/scala/leon/laziness/TypeRectifier.scala
@@ -41,15 +41,9 @@ import invariant.util.ProgramUtil._
 class TypeRectifier(p: Program, clFactory: LazyClosureFactory) {
 
   val typeClasses = {
-    var tc = new DisjointSets[TypeTree]() /*{
-      override def union(x: TypeTree, y: TypeTree) {
-        if (!x.isInstanceOf[TypeParameter] || !y.isInstanceOf[TypeParameter])
-          throw new IllegalStateException(s"Unifying: $x and $y")
-        super.union(x, y)
-      }
-    }*/
+    var tc = new DisjointSets[TypeTree]()
     p.definedFunctions.foreach {
-      case fd if fd.hasBody && !fd.isLibrary =>
+      case fd if fd.hasBody && !fd.isLibrary && !fd.isInvariant =>
         postTraversal {
           case call @ FunctionInvocation(TypedFunDef(fd, tparams), args) =>
             // unify formal type parameters with actual type arguments
@@ -83,8 +77,6 @@ class TypeRectifier(p: Program, clFactory: LazyClosureFactory) {
                       tc.union(t1, t2)
                     case _ =>
                   }
-                //                                case (tf: TypeParameter, ta: TypeParameter) =>
-                //                                  tc.union(tf, ta)
                 case (t1, t2) =>
                 /*throw new IllegalStateException(s"Types of formal and actual parameters: ($tf, $ta)"
                     + s"do not match for call: $call")*/
@@ -114,7 +106,7 @@ class TypeRectifier(p: Program, clFactory: LazyClosureFactory) {
   val equivTypeParams = typeClasses.toMap
 
   val fdMap = p.definedFunctions.collect {
-    case fd if !fd.isLibrary =>
+    case fd if !fd.isLibrary && !fd.isInvariant =>
       val (tempTPs, otherTPs) = fd.tparams.map(_.tp).partition {
         case tp if isPlaceHolderTParam(tp) => true
         case _ => false
@@ -145,7 +137,6 @@ class TypeRectifier(p: Program, clFactory: LazyClosureFactory) {
       } map TypeParameterDef
       val nfd = new FunDef(fd.id.freshen, ntparams, fd.params.map(vd => ValDef(paramMap(vd.id))),
         instf(fd.returnType))
-      //println("New fd: "+nfd)
       fd -> (nfd, tpMap, paramMap)
   }.toMap
 
diff --git a/testcases/lazy-datastructures/memoization/WeightedScheduling.scala b/testcases/lazy-datastructures/memoization/WeightedScheduling.scala
index 4ddbf9e29fe0c6b6846dfc55c9f89f896f45f051..cf2fa7508ef1f3ffa9fe686a18278c3a270ef925 100644
--- a/testcases/lazy-datastructures/memoization/WeightedScheduling.scala
+++ b/testcases/lazy-datastructures/memoization/WeightedScheduling.scala
@@ -69,7 +69,7 @@ object WeightedSched {
   @memoize
   def sched(jobIndex: BigInt): BigInt = {
     require(depsEval(jobIndex) &&
-        (jobIndex == 0 || evalLem(prevCompatibleJob(jobIndex), jobIndex-1)))
+        (jobIndex == 0 || prevCompatibleJob(jobIndex) >= 0)) //evalLem(prevCompatibleJob(jobIndex), jobIndex-1)))
     val (st, fn, w) = jobInfo(jobIndex)
     if(jobIndex == 0) w
     else {
diff --git a/testcases/lazy-datastructures/withOrb/SortingnConcat-orb.scala b/testcases/lazy-datastructures/withOrb/SortingnConcat-orb.scala
index 78eff9bbe6c7f0a2c0ffa7cdba3ac385c4f11649..76f6bae6bba6e3d7f24a5741cbbd9c638642e29d 100644
--- a/testcases/lazy-datastructures/withOrb/SortingnConcat-orb.scala
+++ b/testcases/lazy-datastructures/withOrb/SortingnConcat-orb.scala
@@ -71,15 +71,14 @@ object SortingnConcat {
     }
   } ensuring (_ => time <= ? * ssize(l) + ?)
 
-  // Orb can prove this
-  def kthMin(l: $[LList], k: BigInt): BigInt = {
+/*  def kthMin(l: $[LList], k: BigInt): BigInt = {
     require(k >= 1)
     l.value match {
       case SCons(x, xs) =>
         if (k == 1) x
         else
           kthMin(xs, k - 1)
-      case SNil() => BigInt(0) // None[BigInt]
+      case SNil() => BigInt(0)
     }
-  } ensuring (_ => time <= ? * k * ssize(l) + ? * k + ?)
+  } ensuring (_ => time <= ? * k * ssize(l) + ? * k + ?)*/
 }