diff --git a/src/main/scala/leon/invariant/structure/Constraint.scala b/src/main/scala/leon/invariant/structure/Constraint.scala
index 39fd20bca11971ba0feadf26e20331970f9a9ca7..2a12f69494e20ae531c5f326b5d92eae5b8b60ce 100644
--- a/src/main/scala/leon/invariant/structure/Constraint.scala
+++ b/src/main/scala/leon/invariant/structure/Constraint.scala
@@ -250,6 +250,49 @@ case class Call(retexpr: Expr, fi: FunctionInvocation) extends Constraint {
   override def toExpr = expr
 }
 
+object SetConstraint {
+  def setConstraintOfBase(e: Expr) = e match {
+    case Equals(lhs@Variable(_), rhs) if lhs.getType.isInstanceOf[SetType] =>
+      rhs match {
+        case SetUnion(_, _) | FiniteSet(_, _) | ElementOfSet(_, _) | SubsetOf(_, _) | Variable(_) =>
+          true
+        case _ => false
+      }
+    case _ => false
+  }
+
+  def isSetConstraint(e: Expr) = {
+    val base = e match {
+      case Not(b) => b
+      case _ => e
+    }
+    setConstraintOfBase(base)
+  }
+}
+
+case class SetConstraint(expr: Expr) extends Constraint {
+  var union = false
+  var newset = false
+  var equal = false
+  var elemof = false
+  var subset = false
+  // TODO: add more operations here
+  expr match {
+    case Equals(Variable(_), rhs) =>
+      rhs match {
+        case SetUnion(_, _) => union = true
+        case FiniteSet(_, _) => newset = true
+        case ElementOfSet(_, _) => elemof = true
+        case SubsetOf(_, _) => subset = true
+        case Variable(_) => equal = true
+      }
+  }
+  override def toString(): String = {
+    expr.toString
+  }
+  override def toExpr = expr
+}
+
 object ConstraintUtil {
 
   def createConstriant(ie: Expr): Constraint = {
@@ -264,6 +307,9 @@ object ConstraintUtil {
 
         ADTConstraint(ie)
       }
+      case _ if SetConstraint.isSetConstraint(ie) =>
+        SetConstraint(ie)
+      // every other equality will be considered an ADT constraint (including TypeParameter equalities)
       case Equals(lhs, rhs) if (lhs.getType != Int32Type && lhs.getType != RealType && lhs.getType != IntegerType) => {
         //println("ADT constraint: "+ie)
         ADTConstraint(ie)
diff --git a/src/main/scala/leon/invariant/templateSolvers/NLTemplateSolver.scala b/src/main/scala/leon/invariant/templateSolvers/NLTemplateSolver.scala
index 90cffa3f181d11deb499a06ec538a515ed164ac1..b1112a36da5a4e9ee6e3622c2a107ab70f1f9b9e 100644
--- a/src/main/scala/leon/invariant/templateSolvers/NLTemplateSolver.scala
+++ b/src/main/scala/leon/invariant/templateSolvers/NLTemplateSolver.scala
@@ -589,6 +589,7 @@ class NLTemplateSolver(ctx: InferenceContext, program: Program,
     satCtrs.foreach {
       case t: Call => calls += t
       case t: ADTConstraint if (t.cons.isDefined) => cons += t.cons.get
+      // TODO: ignoring all set constraints here, fix this
       case _ => ;
     }
     val callExprs = calls.map(_.toExpr)
diff --git a/src/main/scala/leon/invariant/templateSolvers/TemplateSolver.scala b/src/main/scala/leon/invariant/templateSolvers/TemplateSolver.scala
index a85a7d4f5b9f55af812da50d577ec36d350b7ce5..032ace5c3a38e2d1ebf9b2d7a86e5844605cac78 100644
--- a/src/main/scala/leon/invariant/templateSolvers/TemplateSolver.scala
+++ b/src/main/scala/leon/invariant/templateSolvers/TemplateSolver.scala
@@ -25,7 +25,7 @@ abstract class TemplateSolver(ctx: InferenceContext, val rootFun: FunDef,
   protected val tru = BooleanLiteral(true)
   //protected val zero = IntLiteral(0)
 
-  private val dumpVCtoConsole = false
+  private val dumpVCtoConsole = true
   private val dumpVCasText = false
 
   /**
diff --git a/src/main/scala/leon/invariant/util/ExpressionTransformer.scala b/src/main/scala/leon/invariant/util/ExpressionTransformer.scala
index 9bf2889ed103f0e14083a031e5b80c700e5c89d0..58131d249ed07e93b48c2d57b6056735a2f241e1 100644
--- a/src/main/scala/leon/invariant/util/ExpressionTransformer.scala
+++ b/src/main/scala/leon/invariant/util/ExpressionTransformer.scala
@@ -274,6 +274,20 @@ object ExpressionTransformer {
 
           (freshResVar, newConjuncts)
         }
+        case SetUnion(_, _) | ElementOfSet(_, _) | SubsetOf(_, _)  =>
+          val Operator(args, op) = e
+          val (Seq(a1, a2), newcjs) = flattenArgs(args, true)
+          val newexpr = op(Seq(a1, a2))
+          val freshResVar = Variable(TVarFactory.createTemp("set", e.getType))
+          (freshResVar, newcjs + Equals(freshResVar, newexpr))
+
+        case fs@FiniteSet(es, typ) =>
+          val args = es.toSeq
+          val (nargs, newcjs) = flattenArgs(args, true)
+          val newexpr = FiniteSet(nargs.toSet, typ)
+          val freshResVar = Variable(TVarFactory.createTemp("fset", fs.getType))
+          (freshResVar, newcjs + Equals(freshResVar, newexpr))
+
         case _ => conjoinWithinClause(e, flattenFunc, insideFunction)
       }
     }
diff --git a/src/test/resources/regression/verification/purescala/invalid/PreInSpecs.scala b/src/test/resources/regression/verification/purescala/invalid/PreInSpecs.scala
new file mode 100644
index 0000000000000000000000000000000000000000..4f184bac1d597ccaac1eee8e9e2a5cdb6f5cae26
--- /dev/null
+++ b/src/test/resources/regression/verification/purescala/invalid/PreInSpecs.scala
@@ -0,0 +1,25 @@
+import leon.lang._
+
+object PreInSpecs {
+
+  def f(i : BigInt): Boolean =  {
+    require(i >= 0)
+    i > 0
+  }
+
+  def g(i : BigInt): Boolean =  {
+    require(i >= 0)
+    i >= -1
+  } holds
+
+  def invoke(i : BigInt): BigInt =  {
+    require(i == 0 || i > 0 && f(i - 1))
+    i + 1
+  } ensuring(res => g(i - 1))  
+
+  def invoke2(i: BigInt): BigInt = {
+    require(g(i))
+    0
+  }
+
+}
diff --git a/src/test/resources/regression/verification/purescala/valid/PreInSpecs.scala b/src/test/resources/regression/verification/purescala/valid/PreInSpecs.scala
new file mode 100644
index 0000000000000000000000000000000000000000..988f7331aac7f839ff01d7550cfa9f72b90ecc71
--- /dev/null
+++ b/src/test/resources/regression/verification/purescala/valid/PreInSpecs.scala
@@ -0,0 +1,25 @@
+import leon.lang._
+
+object PreInSpecs {
+
+  def f(i : BigInt): Boolean =  {
+    require(i >= 0)
+    i > 0
+  }
+
+  def g(i : BigInt): Boolean =  {
+    require(i > 0)
+    i >= -1
+  } holds
+
+  def invoke(i : BigInt): BigInt =  {
+    require(i == 0 || i > 0 && f(i - 1))
+    i + 1
+  } ensuring(res => g(i + 1))  
+
+  def invoke2(i: BigInt): BigInt = {
+    require(i > 0 && g(i))
+    0
+  }
+
+}
diff --git a/testcases/lazy-datastructures/ManualnOutdated/LazyMegeSort1-Time.scala b/testcases/lazy-datastructures/ManualnOutdated/LazyMegeSort1-Time.scala
new file mode 100644
index 0000000000000000000000000000000000000000..119e95040f7b02ce9b2bb6a8d7b1da3a8d729ab6
--- /dev/null
+++ b/testcases/lazy-datastructures/ManualnOutdated/LazyMegeSort1-Time.scala
@@ -0,0 +1,92 @@
+package lazybenchmarks
+
+import leon.lazyeval._
+import leon.lang._
+import leon.annotation._
+import leon.instrumentation._
+//import leon.invariant._
+
+object MergeSort1 {
+
+  // TODO: making this parametric will break many things. Fix them
+  sealed abstract class LList {
+    def size: BigInt = {
+      this match {
+        case SNil()      => BigInt(0)
+        case SCons(x, t) => 1 + ssize(t)
+      }
+    } ensuring (_ >= 0)
+  }
+  case class SCons(x: BigInt, tail: $[LList]) extends LList
+  case class SNil() extends LList
+  def ssize(l: $[LList]): BigInt = (l*).size
+
+  sealed abstract class List {
+    def size: BigInt = {
+      this match {
+        case Cons(_, xs) => 1 + xs.size
+        case _           => BigInt(0)
+      }
+    } ensuring (_ >= 0)
+  }
+  case class Cons(x: BigInt, tail: List) extends List
+  case class Nil() extends List
+
+  def length(l: List): BigInt = {
+    l match {
+      case Nil()       => BigInt(0)
+      case Cons(x, xs) => 1 + length(xs)
+    }
+  } ensuring (res => res >= 0 && res == l.size)
+
+  def split(l: List, n: BigInt): (List, List) = {
+    require(n > 0 && n < l.size)
+    l match {
+      case Nil() => (Nil(), l)
+      case Cons(x, xs) =>
+        if (n == 1) {
+          (Cons(x, Nil()), xs)
+        } else {
+          val (fst, snd) = split(xs, n - 1)
+          (Cons(x, fst), snd)
+        }
+    }
+  } ensuring (res => res._2.size == l.size - n && res._1.size == n && stack <= 25 * l.size - 1)
+
+  /*
+   * Note: merge is not recursive due to closures.
+   * However, concretizing merge may invoke merge or msort recursively.
+   * So proving standalone bound for merge requires preconditions.
+   */
+  def merge(a: $[LList], b: $[LList]): LList = (b.value match {
+    case SNil() => a.value
+    case bl @ SCons(x, xs) =>
+      a.value match {
+        case SNil() => bl
+        case SCons(y, ys) =>
+          if (y < x)
+            SCons(y, $(merge(ys, b)))
+          else
+            SCons(x, $(merge(a, xs)))
+      }
+  }) //ensuring (res => ssize(a) + ssize(b) == res.size)
+
+  /**
+   * For proving time, we need a term of order \Omega(n) with strict
+   * inverse triangle inequality i.e, f(a + b) > f(a) + f(b)
+   * Log satisfies this but it is very expensive. Is there another function ?
+   */
+  def mergeSort(l: List): LList = (l match {
+    case Nil()          => SNil()
+    case Cons(x, Nil()) => SCons(x, $(empty))
+    case _ =>
+      val (fst, snd) = split(l, length(l) / 2)
+      merge($(mergeSort(fst)), $(mergeSort(snd)))
+
+  }) ensuring (res => stack <= 81 * l.size + 35) // res.size == l.size
+
+  // TODO: inlining this creates problems. why ?
+  def empty: LList = {
+    SNil()
+  }
+}
diff --git a/testcases/lazy-datastructures/ManualnOutdated/TestArray.scala b/testcases/lazy-datastructures/ManualnOutdated/TestArray.scala
new file mode 100644
index 0000000000000000000000000000000000000000..61d729a5e02e1455164ad127721a8646a80c5f69
--- /dev/null
+++ b/testcases/lazy-datastructures/ManualnOutdated/TestArray.scala
@@ -0,0 +1,49 @@
+package ManualnOutdated
+
+import leon.lazyeval._
+import leon.lang._
+import leon.annotation._
+import leon.instrumentation._
+import scala.math.BigInt.int2bigInt
+//import leon.invariant._
+
+object TestArray {
+
+  sealed abstract class LList {
+    def size: BigInt = {
+      this match {
+        case SNil()      => BigInt(0)
+        case SCons(x, t) => 1 + ssize(t)
+      }
+    } ensuring (_ >= 0)
+  }
+  case class SCons(x: BigInt, tail: $[LList]) extends LList
+  case class SNil() extends LList
+  def ssize(l: $[LList]): BigInt = (l*).size
+
+  sealed abstract class List {
+    def size: BigInt = this match {
+      case Cons(_, xs) => 1 + xs.size
+      case _           => BigInt(0)
+    }
+  }
+  case class Cons(x: BigInt, tail: List) extends List
+  case class Nil() extends List
+
+  def concat(l1: List, l2: LList) : LList = {
+    l1 match {
+      case Cons(x, xs) => SCons(x, $(concat(xs, l2)))
+      case Nil() => SNil()
+    }
+  } ensuring(res => time <= 15)
+
+  @ignore
+  var arr = Array[BigInt]()
+
+  @extern
+  def arrayFun(l1: List, l2: LList, i: BigInt): BigInt = {
+    //require(i >= 0)
+    //Array(concat(l1, l2))
+    arr(i.toInt)
+  }
+}
diff --git a/testcases/lazy-datastructures/ManualnOutdated/TestGenerics.scala b/testcases/lazy-datastructures/ManualnOutdated/TestGenerics.scala
new file mode 100644
index 0000000000000000000000000000000000000000..4a5971e296dd72a7fda878b71ac84ff6597557bb
--- /dev/null
+++ b/testcases/lazy-datastructures/ManualnOutdated/TestGenerics.scala
@@ -0,0 +1,29 @@
+package ManualnOutdated
+
+import leon.lazyeval._
+import leon.lang._
+import leon.annotation._
+import leon.instrumentation._
+import leon.invariant._
+//import leon.invariant._
+
+object TestGenerics {
+
+  sealed abstract class List[T] {
+    def size: BigInt = {
+      this match {
+        case SNil()      => BigInt(0)
+        case SCons(x, t) => 1 + t.size
+      }
+    } ensuring (_ >= 0)
+  }
+  case class SCons[T](x: T, tail: List[T]) extends List[T]
+  case class SNil[T]() extends List[T]
+
+  def concat[T](l1: List[T], l2: List[T]) : List[T] = {
+    l1 match {
+      case SCons(x, xs) => SCons(x, concat(xs, l2))
+      case SNil() => SNil[T]()
+    }
+  } ensuring(res => time <= ? * l1.size + ?)
+}
diff --git a/testcases/lazy-datastructures/ManualnOutdated/TestSets.scala b/testcases/lazy-datastructures/ManualnOutdated/TestSets.scala
new file mode 100644
index 0000000000000000000000000000000000000000..f4a34ee33f13da911b07534063236abbd84a9516
--- /dev/null
+++ b/testcases/lazy-datastructures/ManualnOutdated/TestSets.scala
@@ -0,0 +1,29 @@
+package ManualnOutdated
+
+import leon.lazyeval._
+import leon.lang._
+import leon.annotation._
+import leon.instrumentation._
+import leon.invariant._
+//import leon.invariant._
+
+object TestSets {
+
+  sealed abstract class List[T] {
+    def size: BigInt = {
+      this match {
+        case SNil()      => BigInt(0)
+        case SCons(x, t) => 1 + t.size
+      }
+    } ensuring (_ >= 0)
+  }
+  case class SCons[T](x: T, tail: List[T]) extends List[T]
+  case class SNil[T]() extends List[T]
+
+  def concat[T](l1: List[T], l2: List[T], st: Set[T]) : List[T] = {
+    l1 match {
+      case SCons(x, xs) => SCons(x, concat(xs, l2, st ++ Set[T](x)))
+      case SNil() => SNil[T]()
+    }
+  } ensuring(res => time <= ? * l1.size + ?)
+}
diff --git a/testcases/lazy-datastructures/ManualnOutdated/WeightedScheduling-strategy2.scala b/testcases/lazy-datastructures/ManualnOutdated/WeightedScheduling-strategy2.scala
new file mode 100644
index 0000000000000000000000000000000000000000..198c3988e1e023e3d95fe9bd98ab25bf5fb19e41
--- /dev/null
+++ b/testcases/lazy-datastructures/ManualnOutdated/WeightedScheduling-strategy2.scala
@@ -0,0 +1,135 @@
+import leon.lazyeval._
+import leon.lazyeval.Mem._
+import leon.lang._
+import leon.annotation._
+import leon.instrumentation._
+//import leon.invariant._
+
+object WeightedSched {
+  sealed abstract class IList {
+    def size: BigInt = {
+      this match {
+        case Cons(_, tail) => 1 + tail.size
+        case Nil() => BigInt(0)
+      }
+    } ensuring(_ >= 0)
+  }
+  case class Cons(x: BigInt, tail: IList) extends IList // a list of pairs of start, finish and weights
+  case class Nil() extends IList
+
+  /*sealed abstract class LList {
+    def size: BigInt = {
+      this match {
+        case LCons(_, _, tail) => 1 + tail.size
+        case LNil() => BigInt(0)
+      }
+    } ensuring(_ >= 0)
+  }
+  case class LCons(l: IList, pl: LList, tail: LList) extends LList // a list of pointers into IList, and LList
+  case class LNil() extends LList*/
+
+  /**
+   * array of jobs
+   * (a) each job has a start time, finish time, and weight
+   * (b) Jobs are sorted in ascending order of finish times
+   */
+  @ignore
+  var jobs = Array[(BigInt, BigInt, BigInt)]()
+
+  /**
+   * A precomputed mapping from each job i to the previous job j it is compatible with.
+   */
+  @ignore
+  var p = Array[BigInt]()
+
+  @extern
+  def jobInfo(i: BigInt) = {
+    jobs(i.toInt)
+  } ensuring(_ => time <= 1)
+
+  @extern
+  def prevCompatibleJob(i: BigInt): BigInt = {
+    //require(i >= 1)
+    p(i.toInt)
+  } ensuring(res => res >=0 && res < i && time <= 1)
+
+//  @library
+//  def prevCompatLem(i: BigInt) : Boolean ={
+//    require(i >= 1)
+//    val r = prevCompatibleJob(i)
+//    r >= 0 && r < i
+//  } holds
+
+  @inline
+  def max(x: BigInt, y: BigInt) = if (x >= y) x else y
+
+  def depsEval(i: BigInt) = i == 0 || (i > 0 && allEval(i-1))
+
+  def allEval(i: BigInt): Boolean = {
+    require(i >= 0)
+    sched(i).isCached &&
+      (if (i == 0) true
+      else allEval(i - 1))
+  }
+
+  @traceInduct
+  def evalMono(i: BigInt, st1: Set[Mem[BigInt]], st2: Set[Mem[BigInt]]) = {
+    require(i >= 0)
+    (st1.subsetOf(st2) && (allEval(i) withState st1)) ==> (allEval(i) withState st2)
+  } holds
+
+  @traceInduct
+   def evalLem(x: BigInt, y: BigInt) = {
+    require(x >= 0 && y >= 0)
+    (x <= y && allEval(y)) ==> allEval(x)
+  } holds
+
+  /**
+   * (a) assuming that jobs are sorted in descending order of the finish times
+   * (b) 'prev' -
+   */
+  @invstate
+  @memoize
+  def sched(jobIndex: BigInt): BigInt = {
+    require(depsEval(jobIndex) &&
+        (jobIndex == 0 || evalLem(prevCompatibleJob(jobIndex), jobIndex-1)))
+    val (st, fn, w) = jobInfo(jobIndex)
+    if(jobIndex == 0) w
+    else {
+      // we may either include the head job or not:
+        // if we include the head job, we have to skip every job that overlaps with it
+      val tailValue = sched(jobIndex - 1)
+      val prevCompatVal = sched(prevCompatibleJob(jobIndex))
+      max(w + prevCompatVal, tailValue)
+    }
+  } ensuring(_ => time <= 100)
+//  } ensuring(res => {
+//    val in = Mem.inState[BigInt]
+//    val out = Mem.outState[BigInt]
+//      (( withState in) ==> (in == out)) //&&
+//      (jobIndex == 0 || evalMono(jobIndex-1, out, out ++ Set(Mem(sched(jobIndex)))) &&
+////          allEval(jobIndex-1))
+//          //evalMono(tail, out, out ++ Set(Mem(optValOfSched(jobs)))) // lemma instantiation
+//  })
+
+  def invoke(jobIndex: BigInt) = {
+    require(depsEval(jobIndex))
+    sched(jobIndex)
+  } ensuring (res => {
+    val in = Mem.inState[BigInt]
+    val out = Mem.outState[BigInt]
+    (jobIndex == 0 || evalMono(jobIndex-1, in, out)) &&
+      time <= 150
+  })
+
+  def schedBU(jobi: BigInt): IList = {
+    require(jobi >= 0)
+    if(jobi == 0) {
+        Cons(invoke(jobi), Nil())
+    } else {
+      val tailRes =  schedBU(jobi-1)
+      Cons(invoke(jobi), tailRes)
+    }
+  } ensuring(_ => allEval(jobi) &&
+		  	time <= 200 * (jobi + 1))
+}