diff --git a/.gitignore b/.gitignore
index 2d08b39e3194e66d69f2e673162136c0f1d36f34..85f840fd53901b688c0351348cba2504ebf24d52 100644
--- a/.gitignore
+++ b/.gitignore
@@ -48,3 +48,6 @@ out-classes
 
 #travis
 travis/builds
+
+# Isabelle
+contrib
diff --git a/.gitmodules b/.gitmodules
new file mode 100644
index 0000000000000000000000000000000000000000..58bbf387e15f357b463b2321b735282df397fe9f
--- /dev/null
+++ b/.gitmodules
@@ -0,0 +1,3 @@
+[submodule "unmanaged/isabelle/protocol"]
+	path = unmanaged/isabelle/protocol
+	url = https://github.com/larsrh/libisabelle-protocol.git
diff --git a/ROOTS b/ROOTS
new file mode 100644
index 0000000000000000000000000000000000000000..011708995db7c4dd771f19c5d366d74059e0abde
--- /dev/null
+++ b/ROOTS
@@ -0,0 +1,2 @@
+unmanaged/isabelle/protocol
+src/main/isabelle
diff --git a/build.sbt b/build.sbt
index b82ddddc4a417ae7fd69f2e07f896de24bd739ed..f279c4fc003ca0c05d962e1da1766ecd0ce02b2e 100644
--- a/build.sbt
+++ b/build.sbt
@@ -35,6 +35,9 @@ libraryDependencies ++= Seq(
   "org.scala-lang" % "scala-compiler" % "2.11.6",
   "org.scalatest" %% "scalatest" % "2.2.4" % "test",
   "com.typesafe.akka" %% "akka-actor" % "2.3.4",
+  "info.hupel" %% "libisabelle" % "0.1",
+  "info.hupel" %% "libisabelle-setup" % "0.1",
+  "info.hupel" %% "pide-2015" % "0.1",
   "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.6.0-rc2"
 )
 
@@ -66,11 +69,13 @@ script := {
       s.log.info("Generating '"+f.getName+"' script ("+(if(is64) "64b" else "32b")+")...")
     }
     val paths = (res.getAbsolutePath +: out.getAbsolutePath +: cps.map(_.data.absolutePath)).mkString(System.getProperty("path.separator"))
+    val base = baseDirectory.value.getAbsolutePath
     IO.write(f, s"""|#!/bin/bash --posix
                     |
                     |SCALACLASSPATH="$paths"
+                    |BASEDIRECTORY="$base"
                     |
-                    |java -Xmx2G -Xms512M -Xss64M -classpath $${SCALACLASSPATH} -Dscala.usejavacp=false scala.tools.nsc.MainGenericRunner -classpath $${SCALACLASSPATH} leon.Main $$@ 2>&1 | tee -i last.log
+                    |java -Xmx2G -Xms512M -Xss64M -classpath "$${SCALACLASSPATH}" -Dleon.base="$${BASEDIRECTORY}" -Dscala.usejavacp=false scala.tools.nsc.MainGenericRunner -classpath "$${SCALACLASSPATH}" leon.Main $$@ 2>&1 | tee -i last.log
                     |""".stripMargin)
     f.setExecutable(true)
   } catch {
@@ -83,12 +88,12 @@ sourceGenerators in Compile <+= Def.task {
   val libFiles = ((baseDirectory.value / "library") ** "*.scala").getPaths
   val build = (sourceManaged in Compile).value / "leon" / "Build.scala";
   IO.write(build, s"""|package leon;
-                    |
-                    |object Build {
-                    |  val libFiles = List(
-                    |    ${libFiles.mkString("\"\"\"", "\"\"\",\n    \"\"\"", "\"\"\"")}
-                    |  )
-                    |}""".stripMargin)
+                      |
+                      |object Build {
+                      |  val libFiles = List(
+                      |    ${libFiles.mkString("\"\"\"", "\"\"\",\n    \"\"\"", "\"\"\"")}
+                      |  )
+                      |}""".stripMargin)
   Seq(build)
 }
 
@@ -134,5 +139,3 @@ lazy val root = (project in file(".")).
   settings(inConfig(RegressionTest)(Defaults.testTasks ++ testSettings): _*).
   settings(inConfig(IntegrTest)(Defaults.testTasks ++ testSettings): _*).
   settings(inConfig(Test)(Defaults.testTasks ++ testSettings): _*)
-
-
diff --git a/library/annotation/isabelle.scala b/library/annotation/isabelle.scala
new file mode 100644
index 0000000000000000000000000000000000000000..bfb4b39ec1499f270907a861c4d339d82a2d963d
--- /dev/null
+++ b/library/annotation/isabelle.scala
@@ -0,0 +1,31 @@
+package leon.annotation
+
+import scala.annotation.StaticAnnotation
+
+object isabelle {
+
+  @ignore
+  class typ(name: String) extends StaticAnnotation
+
+  @ignore
+  class constructor(name: String) extends StaticAnnotation
+
+  @ignore
+  class function(term: String) extends StaticAnnotation
+
+  @ignore
+  class script(name: String, source: String) extends StaticAnnotation
+
+  @ignore
+  class proof(method: String, kind: String = "") extends StaticAnnotation
+
+  @ignore
+  class fullBody() extends StaticAnnotation
+
+  @ignore
+  class noBody() extends StaticAnnotation
+
+  @ignore
+  class lemma(about: String) extends StaticAnnotation
+
+}
diff --git a/library/collection/List.scala b/library/collection/List.scala
index e75b588c74be5ee9ba3cdfeb22ba857da1b155a7..ef22189d776ac2b4e0f36a629c96f4cc79e4d8d7 100644
--- a/library/collection/List.scala
+++ b/library/collection/List.scala
@@ -9,8 +9,10 @@ import leon.math._
 import leon.proof._
 
 @library
+@isabelle.typ(name = "List.list")
 sealed abstract class List[T] {
 
+  @isabelle.function(term = "Int.int o List.length")
   def size: BigInt = (this match {
     case Nil() => BigInt(0)
     case Cons(h, t) => 1 + t.size
@@ -18,16 +20,19 @@ sealed abstract class List[T] {
 
   def length = size
 
+  @isabelle.function(term = "List.list.set")
   def content: Set[T] = this match {
     case Nil() => Set()
     case Cons(h, t) => Set(h) ++ t.content
   }
 
+  @isabelle.function(term = "List.member")
   def contains(v: T): Boolean = (this match {
     case Cons(h, t) => h == v || t.contains(v)
     case Nil() => false
   }) ensuring { _ == (content contains v) }
 
+  @isabelle.function(term = "List.append")
   def ++(that: List[T]): List[T] = (this match {
     case Nil() => that
     case Cons(x, xs) => Cons(x, xs ++ that)
@@ -49,6 +54,7 @@ sealed abstract class List[T] {
     t
   }
 
+  @isabelle.fullBody
   def apply(index: BigInt): T = {
     require(0 <= index && index < size)
     if (index == BigInt(0)) {
@@ -58,8 +64,10 @@ sealed abstract class List[T] {
     }
   }
 
+  @isabelle.function(term = "%xs x. x # xs")
   def ::(t:T): List[T] = Cons(t, this)
 
+  @isabelle.function(term = "%xs x. xs @ [x]")
   def :+(t:T): List[T] = {
     this match {
       case Nil() => Cons(t, this)
@@ -67,6 +75,7 @@ sealed abstract class List[T] {
     }
   } ensuring(res => (res.size == size + 1) && (res.content == content ++ Set(t)))
 
+  @isabelle.function(term = "List.rev")
   def reverse: List[T] = {
     this match {
       case Nil() => this
@@ -152,6 +161,7 @@ sealed abstract class List[T] {
     chunk0(s, this, Nil(), Nil(), s)
   }
 
+  @isabelle.function(term = "List.zip")
   def zip[B](that: List[B]): List[(T, B)] = { (this, that) match {
     case (Cons(h1, t1), Cons(h2, t2)) =>
       Cons((h1, h2), t1.zip(t2))
@@ -161,6 +171,7 @@ sealed abstract class List[T] {
     if (this.size <= that.size) this.size else that.size
   )}
 
+  @isabelle.function(term = "%xs x. removeAll x xs")
   def -(e: T): List[T] = { this match {
     case Cons(h, t) =>
       if (e == h) {
@@ -383,6 +394,7 @@ sealed abstract class List[T] {
     res.size == this.size
   }
 
+  @isabelle.function(term = "List.null")
   def isEmpty = this match {
     case Nil() => true
     case _ => false
@@ -391,16 +403,19 @@ sealed abstract class List[T] {
   def nonEmpty = !isEmpty
 
   // Higher-order API
+  @isabelle.function(term = "%xs f. List.list.map f xs")
   def map[R](f: T => R): List[R] = { this match {
     case Nil() => Nil[R]()
     case Cons(h, t) => f(h) :: t.map(f)
   }} ensuring { _.size == this.size }
 
+  @isabelle.function(term = "%bs a f. List.foldl f a bs")
   def foldLeft[R](z: R)(f: (R,T) => R): R = this match {
     case Nil() => z
     case Cons(h,t) => t.foldLeft(f(z,h))(f)
   }
 
+  @isabelle.function(term = "%as b f. List.foldr f as b")
   def foldRight[R](z: R)(f: (T,R) => R): R = this match {
     case Nil() => z
     case Cons(h, t) => f(h, t.foldRight(z)(f))
@@ -418,6 +433,7 @@ sealed abstract class List[T] {
       f(h, h1) :: rest
   }} ensuring { !_.isEmpty }
 
+  @isabelle.function(term = "List.bind")
   def flatMap[R](f: T => List[R]): List[R] =
     ListOps.flatten(this map f)
 
@@ -452,17 +468,19 @@ sealed abstract class List[T] {
   // In case we implement for-comprehensions
   def withFilter(p: T => Boolean) = filter(p)
 
+  @isabelle.function(term = "%xs P. List.list_all P xs")
   def forall(p: T => Boolean): Boolean = this match {
     case Nil() => true
     case Cons(h, t) => p(h) && t.forall(p)
   }
 
+  @isabelle.function(term = "%xs P. List.list_ex P xs")
   def exists(p: T => Boolean) = !forall(!p(_))
 
+  @isabelle.function(term = "%xs P. List.find P xs")
   def find(p: T => Boolean): Option[T] = { this match {
     case Nil() => None[T]()
-    case Cons(h, t) if p(h) => Some(h)
-    case Cons(_, t) => t.find(p)
+    case Cons(h, t) => if (p(h)) Some(h) else t.find(p)
   }} ensuring { res => res match {
     case Some(r) => (content contains r) && p(r)
     case None() => true
@@ -522,7 +540,10 @@ sealed abstract class List[T] {
 
 }
 
+@isabelle.constructor(name = "List.list.Cons")
 case class Cons[T](h: T, t: List[T]) extends List[T]
+
+@isabelle.constructor(name = "List.list.Nil")
 case class Nil[T]() extends List[T]
 
 object List {
@@ -545,6 +566,7 @@ object List {
 
 @library
 object ListOps {
+  @isabelle.function(term = "List.concat")
   def flatten[T](ls: List[List[T]]): List[T] = ls match {
     case Cons(h, t) => h ++ flatten(t)
     case Nil() => Nil()
@@ -604,6 +626,7 @@ object ListSpecs {
   }.holds
 
   @induct
+  @isabelle.lemma(about = "leon.collection.List.apply")
   def consIndex[T](h: T, t: List[T], i: BigInt): Boolean = {
     require(0 <= i && i < t.size + 1)
     (h :: t).apply(i) == (if (i == 0) h else t.apply(i - 1))
diff --git a/library/lang/Dummy.scala b/library/lang/Dummy.scala
new file mode 100644
index 0000000000000000000000000000000000000000..41dbe4f90ccea298a17de70caa2f3d04ebdc0def
--- /dev/null
+++ b/library/lang/Dummy.scala
@@ -0,0 +1,3 @@
+package leon.lang
+
+case class Dummy[T]()
diff --git a/library/lang/Map.scala b/library/lang/Map.scala
index 368c028abd1622c8236615121c3f324e54feffc6..aff6f26f80d5e4abc7d8fa70d3a6553ebdff1c17 100644
--- a/library/lang/Map.scala
+++ b/library/lang/Map.scala
@@ -3,12 +3,13 @@ import leon.annotation._
 
 object Map {
   @library
+  @isabelle.function(term = "Map.empty")
   def empty[A,B] = Map[A,B]()
 
-   @ignore
-   def apply[A,B](elems: (A,B)*) = {
-     new Map[A,B](scala.collection.immutable.Map[A,B](elems : _*))
-   }
+  @ignore
+  def apply[A,B](elems: (A,B)*) = {
+    new Map[A,B](scala.collection.immutable.Map[A,B](elems : _*))
+  }
 }
 
 @ignore
diff --git a/library/lang/Option.scala b/library/lang/Option.scala
index 0efcc4ababecc3f32f2fe2af8e9b6e84ef685336..a6f09db68f3fecc7ad8095506a49d21e4d8ac7ad 100644
--- a/library/lang/Option.scala
+++ b/library/lang/Option.scala
@@ -6,6 +6,7 @@ import leon.annotation._
 import leon.collection._
 
 @library
+@isabelle.typ(name = "Option.option")
 sealed abstract class Option[T] {
 
   def get : T = {
@@ -38,11 +39,13 @@ sealed abstract class Option[T] {
 
 
   // Higher-order API
+  @isabelle.function(term = "%x f. Option.map_option f x")
   def map[R](f: T => R) = { this match {
     case None() => None[R]()
     case Some(x) => Some(f(x))
   }} ensuring { _.isDefined == this.isDefined }
 
+  @isabelle.function(term = "Option.bind")
   def flatMap[R](f: T => Option[R]) = this match {
     case None() => None[R]()
     case Some(x) => f(x)
@@ -74,5 +77,8 @@ sealed abstract class Option[T] {
   }
 }
 
+@isabelle.constructor(name = "Option.option.Some")
 case class Some[T](v: T) extends Option[T]
+
+@isabelle.constructor(name = "Option.option.None")
 case class None[T]() extends Option[T]
diff --git a/library/monads/state/State.scala b/library/monads/state/State.scala
index ece710774a3a707e3ea8740eeee7a6e4989b498a..8e268c79ed7870302bc9a7fb6eeaa22c77ee60cb 100644
--- a/library/monads/state/State.scala
+++ b/library/monads/state/State.scala
@@ -47,6 +47,7 @@ case class State[S, A](runState: S => (A, S)) {
   def >:: (s: S) = eval(s)
 
   /** Helpers */
+  @isabelle.noBody
   def forever[B]: State[S, B] = this >> forever
 
   def apply(s: S) = runState(s)
@@ -94,7 +95,7 @@ object State {
   def mapM[A, B, S](f: A => State[S, B], l: List[A]): State[S, List[B]] = {
     l match {
       case Nil() => unit(Nil[B]())
-      case x :: xs => for {
+      case Cons(x, xs) => for {
         mx <- f(x)
         mxs <- mapM(f,xs)
       } yield mx :: mxs
@@ -104,7 +105,7 @@ object State {
   def mapM_ [A, B, S] (f: A => State[S, B], l: List[A]): State[S, Unit] = {
     l match {
       case Nil() => unit(())
-      case x :: xs => for {
+      case Cons(x, xs) => for {
         mx <- f(x)
         mxs <- mapM_ (f, xs)
       } yield ()
@@ -123,7 +124,7 @@ object State {
   def filterM[S, A](f: A => State[S, Boolean], l: List[A]): State[S, List[A]] = {
     l match {
       case Nil() => unit(Nil[A]())
-      case x :: xs => for {
+      case Cons(x, xs) => for {
         flg <- f(x)
         rest <- filterM(f, xs)
       } yield (if (flg) x :: rest else rest)
@@ -134,7 +135,7 @@ object State {
   def foldLeftM[S, A, B](f: (B, A) => State[S, B], z: B, l: List[A]): State[S, B] = {
     l match {
       case Nil() => unit(z)
-      case x :: xs =>
+      case Cons(x, xs) =>
         f(z, x) >>= ( z0 => foldLeftM(f,z0,xs) )
     }
   }
@@ -143,7 +144,7 @@ object State {
   def foldLeftM_ [S, A](f: A => State[S, Unit], l: List[A]): State[S, Unit] = {
     l match {
       case Nil() => unit(())
-      case x :: xs =>
+      case Cons(x, xs) =>
         f(x) >> foldLeftM_ (f, xs)
     }
   }
diff --git a/src/main/isabelle/.gitignore b/src/main/isabelle/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..c0810538385bb019c4ca5566f2e0a4b28d52b3c5
--- /dev/null
+++ b/src/main/isabelle/.gitignore
@@ -0,0 +1 @@
+Scratch.thy
diff --git a/src/main/isabelle/Leon.thy b/src/main/isabelle/Leon.thy
new file mode 100644
index 0000000000000000000000000000000000000000..60ffbc20801be58656c2f01f339bd65fdb7f991e
--- /dev/null
+++ b/src/main/isabelle/Leon.thy
@@ -0,0 +1,5 @@
+theory Leon
+imports Leon_Ops Leon_Library
+begin
+
+end
\ No newline at end of file
diff --git a/src/main/isabelle/Leon_Library.thy b/src/main/isabelle/Leon_Library.thy
new file mode 100644
index 0000000000000000000000000000000000000000..62d7108d45ecd4371b450e5b2ba123292647d687
--- /dev/null
+++ b/src/main/isabelle/Leon_Library.thy
@@ -0,0 +1,29 @@
+theory Leon_Library
+imports Leon_Ops
+begin
+
+axiomatization
+  error :: "nat \<Rightarrow> 'a"
+
+declare [[code abort: error]]
+
+declare null_rec[simp]
+declare List.null_def[simp]
+declare List.bind_def[simp]
+declare Let_def[leon_unfold]
+declare Product_Type.split[leon_unfold]
+declare comp_apply[simp del]
+declare comp_def[simp]
+declare member_rec[simp]
+
+lemma [simp]: "list_ex P xs = (\<not> list_all (Not \<circ> P) xs)"
+by (induct xs) auto
+
+syntax "_leon_var" :: "idt \<Rightarrow> 'a" ("<var _>")
+syntax "_leon_const" :: "idt \<Rightarrow> 'a" ("<const _>")
+
+ML_file "leon_syntax.ML"
+
+setup \<open>Leon_Syntax.setup\<close>
+
+end
\ No newline at end of file
diff --git a/src/main/isabelle/Leon_Ops.thy b/src/main/isabelle/Leon_Ops.thy
new file mode 100644
index 0000000000000000000000000000000000000000..8ed524cc66c6dd60bf5284f686f36f7097453423
--- /dev/null
+++ b/src/main/isabelle/Leon_Ops.thy
@@ -0,0 +1,47 @@
+theory Leon_Ops
+imports
+  Protocol
+  Codec_Class
+  "~~/src/HOL/Word/Word"
+  "~~/src/HOL/Library/Simps_Case_Conv"
+  Complex_Main
+begin
+
+named_theorems leon_unfold
+
+ML_file "~~/src/HOL/TPTP/sledgehammer_tactics.ML"
+
+sledgehammer_params [timeout = 5, provers = cvc4 z3 spass e]
+
+lemma if_to_cond_simps:
+  assumes "f = (if p then x else y)"
+  shows "p \<Longrightarrow> f = x" "\<not> p \<Longrightarrow> f = y"
+using assms by auto
+
+ML_file "tactics.ML"
+ML_file "util.ML"
+ML_file "stateless_ops.ML"
+ML_file "stateful_ops.ML"
+
+operation_setup (auto) numeral_literal = \<open>Stateless_Ops.numeral\<close>
+operation_setup (auto) word_literal = \<open>Stateless_Ops.word\<close>
+operation_setup (auto) map_literal = \<open>Stateless_Ops.map\<close>
+operation_setup (auto) serial_nat = \<open>Stateless_Ops.serial_nat\<close>
+operation_setup (auto) load = \<open>Stateful_Ops.load\<close>
+operation_setup (auto) "oracle" = \<open>Stateful_Ops.oracle\<close>
+operation_setup (auto) fresh = \<open>Stateful_Ops.fresh\<close>
+operation_setup (auto) prove = \<open>Stateful_Ops.prove\<close>
+operation_setup (auto) equivalences = \<open>Stateful_Ops.equivalences\<close>
+operation_setup (auto) assume_equivalences = \<open>Stateful_Ops.assume_equivalences\<close>
+operation_setup (auto) "lemmas" = \<open>Stateful_Ops.lemmas\<close>
+operation_setup (auto) assume_lemmas = \<open>Stateful_Ops.assume_lemmas\<close>
+operation_setup (auto) "declare" = \<open>Stateful_Ops.declare\<close>
+operation_setup (auto) pretty = \<open>Stateful_Ops.pretty\<close>
+operation_setup (auto) report = \<open>Stateful_Ops.report\<close>
+operation_setup (auto) dump = \<open>Stateful_Ops.dump\<close>
+operation_setup (auto) lookup_datatype = \<open>Stateful_Ops.lookup_datatype\<close>
+operation_setup (auto) "datatypes" = \<open>Stateful_Ops.datatypes\<close>
+operation_setup (auto) "functions" = \<open>Stateful_Ops.functions\<close>
+operation_setup (auto) cases = \<open>Stateful_Ops.cases\<close>
+
+end
\ No newline at end of file
diff --git a/src/main/isabelle/ROOT b/src/main/isabelle/ROOT
new file mode 100644
index 0000000000000000000000000000000000000000..5c659007c62c6eaf46ea0a690c2e5aad05574bf8
--- /dev/null
+++ b/src/main/isabelle/ROOT
@@ -0,0 +1,8 @@
+session Leon_Deps = "HOL-Protocol2015" +
+  theories
+    "~~/src/HOL/Word/Word"
+    "~~/src/HOL/Library/Simps_Case_Conv"
+
+session Leon = Leon_Deps +
+  theories
+    Leon
diff --git a/src/main/isabelle/leon_syntax.ML b/src/main/isabelle/leon_syntax.ML
new file mode 100644
index 0000000000000000000000000000000000000000..cc0c704a08cdbe12de2c6c061f4228e4098afaaa
--- /dev/null
+++ b/src/main/isabelle/leon_syntax.ML
@@ -0,0 +1,82 @@
+signature LEON_SYNTAX = sig
+  val setup: theory -> theory
+end
+
+structure Leon_Syntax: LEON_SYNTAX = struct
+
+fun is_digit #"0" = true
+  | is_digit #"1" = true
+  | is_digit #"2" = true
+  | is_digit #"3" = true
+  | is_digit #"4" = true
+  | is_digit #"5" = true
+  | is_digit #"6" = true
+  | is_digit #"7" = true
+  | is_digit #"8" = true
+  | is_digit #"9" = true
+  | is_digit _ = false
+
+fun is_leon_name base id name =
+  let
+    val prefix = Long_Name.append base (id ^ "'")
+    val exploded = String.explode name
+  in
+    String.isPrefix prefix name andalso List.all is_digit (drop (String.size prefix) exploded)
+  end
+
+fun find_leon_name base id names =
+  case filter (is_leon_name base id) names of
+    [name] => SOME name
+  | _ => NONE
+
+fun translation const ctxt args =
+  let
+    val thy = Proof_Context.theory_of ctxt
+    val thy_name = Context.theory_name thy
+    val fixes = Variable.dest_fixes ctxt
+    val {constants = consts, ...} =  Consts.dest (Sign.consts_of thy)
+    fun guess id =
+      if const then
+        case find_leon_name thy_name id (map fst consts) of
+          SOME name => Const (name, dummyT)
+        | NONE => raise TERM ("unknown or ambiguous Leon constant " ^ id, args)
+      else
+        case find_leon_name "" id (map fst fixes) of
+          SOME name => Free (name, dummyT)
+        | NONE => raise TERM ("unknown or ambiguous Leon variable " ^ id, args)
+  in
+    case args of
+      [(Const (@{syntax_const "_constrain"}, _)) $ Free (s, _) $ _] => guess s
+    | _ => impossible "Leon_Syntax.translation" @{here}
+  end
+
+val const_translation = translation true
+val var_translation = translation false
+
+val translation_setup = Sign.parse_translation
+  [(@{syntax_const "_leon_var"}, var_translation),
+   (@{syntax_const "_leon_const"}, const_translation)]
+
+
+fun get_induct id thy =
+  let
+    val functions = Stateful_Ops.get_functions thy
+    val names = Symtab.keys functions
+  in
+    case find_leon_name "" id names of
+      SOME name =>
+        (case the (Symtab.lookup functions name) of
+          (NONE, _) => error ("no induction rule for " ^ id)
+        | (SOME thm, _) => thm)
+    | NONE => error ("unknown or ambiguous Leon function " ^ id)
+  end
+
+val attrib_setup =
+  Attrib.setup @{binding leon_induct}
+    (Scan.lift Args.name >>
+      (fn name => Thm.mixed_attribute (fn (context, _) => (context, get_induct name (Context.theory_of context)))))
+    "induction rule for Leon function"
+
+val setup = translation_setup #> attrib_setup
+
+end
\ No newline at end of file
diff --git a/src/main/isabelle/stateful_ops.ML b/src/main/isabelle/stateful_ops.ML
new file mode 100644
index 0000000000000000000000000000000000000000..2289e3e6e6311b9fa35d819db29794b433c897a9
--- /dev/null
+++ b/src/main/isabelle/stateful_ops.ML
@@ -0,0 +1,565 @@
+signature STATEFUL_OPS = sig
+  val load: string * string * (string * string) list -> string list
+  val oracle: unit -> unit
+  val fresh: string -> string
+  val prove: term list * string option -> string option
+  val declare: string list -> unit
+  val cases: (term * (string * typ) list) * (typ * (term * term) list) -> term
+  val equivalences: (string * string) list -> string list
+  val assume_equivalences: (string * string) list -> unit
+  val lemmas: (string * term) list -> string list
+  val assume_lemmas: (string * term) list -> unit
+  val functions: (string * (string * typ) list * (term * typ)) list -> string option
+  val datatypes: (string * string list * (string * (string * typ) list) list) list -> string option
+  val lookup_datatype: string * (string * string) list -> (string, (string * (term * term * term list)) list) Codec.either
+  val pretty: term -> string
+  val report: unit -> (string * string) list
+  val dump: unit -> (string * string) list
+
+  (* internal *)
+  val reset: theory option -> unit
+  val peek: unit -> local_theory option
+  val get_functions: theory -> (thm option * thm list) Symtab.table
+end
+
+structure Stateful_Ops: STATEFUL_OPS = struct
+
+fun try_timeout' f = try_timeout 5 f (* global timeout for auxiliary operations *)
+
+fun err_timeout msg f x =
+  case try_timeout 5 f x of
+    Exn.Res r => r
+  | Exn.Exn TimeLimit.TimeOut => raise Timeout msg
+  | Exn.Exn exn => reraise exn
+
+datatype report =
+  Function of {names: string list, raws: string list, specs: string list, inducts: (string * string) list} |
+  Datatype of string |
+  Theory of {import: string, scripts: (string * string) list} |
+  Equivalence of string |
+  Lemma of {name: string, prop: string} |
+  Oracle
+
+structure Reports = Theory_Data
+(
+  type T = report list
+  val empty = []
+  fun merge _ = impossible "Reports.merge" @{here}
+  val extend = I
+)
+
+structure Functions = Theory_Data
+(
+  type T = (thm option * thm list) Symtab.table
+  val empty = Symtab.empty
+  fun merge _ = impossible "Functions.merge" @{here}
+  val extend = I
+)
+
+structure Oracle = Theory_Data
+(
+  type T = (cterm -> thm) option
+  val empty = NONE
+  fun merge _ = impossible "Oracle.merge" @{here}
+  val extend = I
+)
+
+fun add_report report = Reports.map (fn list => report :: list)
+val add_fun = Functions.map o Symtab.update_new
+val add_funs = fold add_fun
+fun set_oracle thy =
+  let
+    val ((_, oracle), thy') = Thm.add_oracle (@{binding leon_oracle}, I) thy
+  in
+    Oracle.map (K (SOME oracle)) thy'
+    |> add_report Oracle
+  end
+val get_oracle =
+  Proof_Context.theory_of
+  #> Oracle.get
+  #> the_error "expected oracle" @{here}
+
+val get_functions = Functions.get
+
+type state = local_theory option
+val empty_state = NONE: state
+
+val state = Synchronized.var "libisabelle.leon_state" empty_state
+
+fun unset_parallel_proofs () =
+  Goal.parallel_proofs := 0
+
+fun reset thy =
+  (unset_parallel_proofs (); Synchronized.change state (K (Option.map (Named_Target.theory_init) thy)))
+
+fun peek () =
+  Synchronized.value state
+
+local
+  fun not_loaded pos =
+    invalid_state "no theory loaded" pos
+  fun already_loaded lthy =
+    invalid_state ("theory " ^ Context.theory_name (Proof_Context.theory_of lthy) ^ "already loaded")
+in
+
+fun access_loaded f =
+  case Synchronized.value state of
+    SOME thy => f thy
+  | NONE => not_loaded @{here}
+
+fun update_loaded f =
+  let
+    fun upd NONE = not_loaded @{here}
+      | upd (SOME thy) = let val (a, thy') = f thy in (a, SOME thy') end
+  in
+    Synchronized.change_result state upd
+  end
+
+fun load (root_name, thy_name, scripts) =
+  let
+    val _ = unset_parallel_proofs ()
+    val _ =
+      if Execution.is_running_exec Document_ID.none then ()
+      else invalid_state "Must not be running in interactive mode" @{here}
+    val root = Thy_Info.get_theory root_name
+
+    fun load_script (name, txt) =
+      let
+        val thy = Theory.begin_theory (name, Position.none) [root]
+      in (name, Exn.interruptible_capture (script_thy Position.none txt) thy) end
+
+    fun classify (_, Exn.Res thy) = ([thy], [])
+      | classify (name, Exn.Exn _) = ([], [name])
+
+    fun upd (SOME lthy) = already_loaded lthy @{here}
+      | upd NONE =
+          let
+            val (good, bad) = map classify (Par_List.map load_script scripts)
+              |> split_list ||> flat |>> flat
+
+            fun thy () =
+              Theory.begin_theory (thy_name, Position.none) (root :: good)
+              |> add_report (Theory {import = root_name, scripts = scripts})
+
+            val res =
+              if null bad then
+                SOME (Named_Target.theory_init (thy ()))
+              else
+                NONE
+          in (bad, res) end
+  in Synchronized.change_result state upd end
+
+end
+
+fun oracle () =
+  update_loaded (Local_Theory.background_theory set_oracle #> pair ())
+
+fun fresh n =
+  update_loaded (Variable.variant_fixes [n] #>> hd)
+
+fun prove (ts, method) =
+  access_loaded (fn lthy =>
+    let
+      val (ts', lthy) = register_terms ts lthy
+      val prop = Balanced_Tree.make HOLogic.mk_disj ts'
+      fun tac ctxt =
+        case method of
+          NONE => prove_tac ctxt
+        | SOME src => HEADGOAL (method_tac @{here} src ctxt)
+    in
+      (* Assumption: all proofs are sequential *)
+      try (Goal.prove lthy [] [] prop) (fn {context, ...} => tac context)
+      |> Option.map (print_thm lthy)
+    end)
+
+fun gen_lemmas prove props =
+  update_loaded (fn lthy =>
+    let
+      fun process (name, prop) lthy =
+        let
+          val ([prop'], lthy') = register_terms [prop] lthy
+          val binding = Binding.qualified true "lemma" (Binding.make (name, @{here}))
+        in
+          case prove lthy' prop' of
+            NONE => ([name], lthy)
+          | SOME thm =>
+              lthy (* sic *)
+              |> Local_Theory.note ((binding, @{attributes [simp]}), [thm]) |> snd
+              |> Local_Theory.background_theory (add_report (Lemma {name = name, prop = print_thm lthy thm}))
+              |> pair []
+        end
+    in
+      fold_map process props lthy
+      |>> flat
+    end)
+
+val lemmas =
+  gen_lemmas (fn ctxt => fn goal =>
+    try_timeout' (Goal.prove ctxt [] [] goal) (fn {context, ...} => prove_tac context) |> Exn.get_res)
+
+val assume_lemmas =
+  gen_lemmas (fn ctxt => fn goal =>
+    SOME (get_oracle ctxt (Thm.cterm_of ctxt goal)))
+  #> K ()
+
+fun declare names =
+  update_loaded (fold Variable.declare_names (map (Free o rpair dummyT) names) #> pair ())
+
+fun cases ((raw_scr, bounds), (expected_typ, clauses)) =
+  access_loaded (fn ctxt =>
+    let
+      fun dest_abs (name, typ) t = Term.dest_abs (name, typ, t)
+      val unbound = fold_map dest_abs bounds #> snd
+
+      val scr = Syntax.check_term ctxt (unbound raw_scr)
+      val scr_typ = fastype_of scr
+      val names = Variable.names_of ctxt
+
+      fun process_clause (raw_pat, raw_rhs) =
+        let
+          val pat = Syntax.check_term ctxt (Type.constraint scr_typ raw_pat)
+          val frees = all_undeclared_frees ctxt pat
+          val (free_names', ctxt') = Variable.add_fixes (map fst frees) ctxt
+          val frees' = map Free (free_names' ~~ map snd frees)
+          val frees = map Free frees
+          val (pat', raw_rhs') = apply2 (subst_free (frees ~~ frees')) (pat, unbound raw_rhs)
+
+          val rhs' =
+            Type.constraint expected_typ raw_rhs'
+            |> Syntax.check_term (Variable.declare_term pat' ctxt')
+        in
+          (pat', rhs')
+        end
+
+      val term =
+         map process_clause clauses
+         |> Case_Translation.make_case ctxt Case_Translation.Quiet names scr
+         |> Syntax.check_term ctxt
+
+      fun find_type (t $ u) name = merge_options (find_type t name, find_type u name)
+        | find_type (Abs (_, _, t)) name = find_type t name
+        | find_type (Free (name', typ)) name = if name = name' then SOME typ else NONE
+        | find_type _ _ = NONE
+
+      fun get_type (name, typ) =
+        if typ = dummyT then
+          the_default dummyT (find_type term name)
+        else
+          typ
+
+      val new_bounds = map fst bounds ~~ map get_type bounds
+    in
+      fold (Term.lambda o Free) new_bounds term |> strip_abs_body
+    end)
+
+fun gen_equivalences prove eqs =
+  update_loaded (fn lthy =>
+    let
+      fun prepare (lhs, rhs) =
+        let
+          val lhs' = qualify lthy lhs
+
+          val (rule, specs) =
+            Symtab.lookup (Functions.get (Proof_Context.theory_of lthy)) lhs
+            |> the_error "equivalences" @{here}
+
+          val goal = mk_untyped_eq (Const (lhs', dummyT), Syntax.parse_term lthy rhs)
+            |> HOLogic.mk_Trueprop
+            |> Syntax.check_term lthy
+        in
+          (lhs, specs, goal, rule)
+        end
+
+      val prepared = map prepare eqs
+
+      fun process (name, specs, goal, rule) lthy =
+        let
+          val binding = Binding.qualified true "equiv" (Binding.make (name, @{here}))
+        in
+          case prove lthy goal rule of
+            NONE => (([name], []), lthy)
+          | SOME thm =>
+              lthy
+              |> Local_Theory.note ((Binding.empty, @{attributes [simp del]}), specs) |> snd
+              |> Local_Theory.note ((binding, @{attributes [simp]}), [thm]) |> snd
+              |> pair ([], [thm])
+        end
+
+      val ((bad, good), lthy') =
+        fold_map process prepared lthy
+        |>> split_list |>> apfst flat |>> apsnd flat
+
+      val lthy'' =
+        Local_Theory.background_theory (fold (add_report o Equivalence o print_thm lthy') good) lthy'
+    in
+      (bad, lthy'')
+    end)
+
+val assume_equivalences =
+  gen_equivalences (fn lthy => fn goal => fn _ =>
+    SOME (get_oracle lthy (Thm.cterm_of lthy goal)))
+  #> K ()
+
+val equivalences =
+  gen_equivalences (fn lthy => fn goal => fn rule =>
+    try_timeout' (Goal.prove lthy [] [] goal) (fn {context, ...} => equiv_tac rule context) |> Exn.get_res)
+
+fun functions raw_funs =
+  update_loaded (fn lthy =>
+    let
+      fun transform (name, raw_args, (raw_rhs, expected_typ)) =
+        let
+          val lhs = list_comb (Free (name, dummyT), map Free raw_args)
+          val rhs = Type.constraint expected_typ raw_rhs
+        in
+          ((Binding.make (name, @{here}), NONE, NoSyn),
+           HOLogic.mk_Trueprop (Const (@{const_name HOL.eq}, dummyT) $ lhs $ rhs))
+        end
+
+      val no_args = exists (equal 0 o length o #2) raw_funs
+
+      val specs =
+        map transform raw_funs
+        |> split_list
+        ||> map (Syntax.check_term lthy)
+        ||> length raw_funs > 1 ? homogenize_raw_types lthy
+        ||> map (pair (Binding.empty, []))
+        |> op ~~
+
+      val recursive =
+        case specs of
+          [(_, (_, prop))] =>
+            let
+              val ((f, _), rhs) = HOLogic.dest_eq (HOLogic.dest_Trueprop prop) |>> strip_comb
+            in
+              exists (equal f o Free) (all_frees rhs)
+            end
+        | _ => true
+
+      fun fun_construction lthy =
+        let
+          val (heads, props) = split_list specs
+          val construction = Function.add_function heads props Function_Fun.fun_config pat_completeness_auto_tac
+          val (_, lthy) = err_timeout "function construction" construction lthy
+          val (info, lthy) = err_timeout "termination proof" (Function.prove_termination NONE (termination_tac lthy)) lthy
+        in
+          #simps info
+          |> the_error "simps of function definition" @{here}
+          |> rpair lthy
+        end
+      fun def_construction lthy =
+        let val [(head, prop)] = specs in
+          Specification.definition (SOME head, prop) lthy
+          |>> snd |>> snd |>> single
+        end
+
+      fun construction lthy = lthy |>
+        (case (recursive, no_args) of
+          (true, true) => raise Unsupported ("Mutual definition without arguments: " ^ commas (map #1 raw_funs))
+        | (true, false) => fun_construction
+        | (false, _) => def_construction)
+    in
+      case Exn.interruptible_capture construction lthy of
+        Exn.Res (thms, lthy) =>
+          let
+            val names = map #1 raw_funs
+            val lthy' = Local_Theory.restore lthy
+            val thms = Proof_Context.export lthy lthy' thms
+            val simpss = thms
+              |> map (try_case_to_simps lthy' o restore_eq o full_simplify (mk_simp_ctxt lthy'))
+              |> map (homogenize_spec_types lthy')
+            val simps = maps if_to_cond_simps (flat simpss)
+            val specs = names ~~ map (`(try_timeout' (mk_induction_schema lthy') #> Exn.get_res)) simpss
+            fun print_induct (_, (NONE, _)) = NONE
+              | print_induct (name, (SOME rule, _)) = SOME (name, print_thm lthy' rule)
+            val binding = Binding.qualified true "spec" (Binding.make (hd names, @{here}))
+            val report =
+              Function
+                {names = names,
+                 inducts = map_filter print_induct specs,
+                 specs = map (print_thm lthy') simps,
+                 raws = map (print_thm lthy') thms}
+            val lthy'' = lthy'
+              |> Local_Theory.note ((Binding.empty, @{attributes [simp del]}), thms) |> snd
+              |> Local_Theory.note ((binding, @{attributes [simp]}), simps) |> snd
+              |> Local_Theory.background_theory (add_funs specs #> add_report report)
+          in
+            (NONE, lthy'')
+          end
+      | Exn.Exn (exn as Impossible _) =>
+          reraise exn
+      | Exn.Exn exn =>
+          (SOME (print_exn exn), lthy)
+    end)
+
+fun datatypes raw_dts =
+  update_loaded (fn lthy =>
+    let
+      val raw_dts = map (apfst (qualify lthy o #1) o dupl) raw_dts
+
+      fun mk_edges (qname, (_, _, constrs)) =
+        let
+          fun collect_types (Type (name, typs)) = name :: maps collect_types typs
+            | collect_types _ = []
+          val deps = constrs |> maps snd |> maps (collect_types o snd)
+        in map (pair qname) deps end
+
+      fun free tp = TFree ("'" ^ tp, @{sort type})
+
+      fun homogenize ((dt as (_, tps, _)) :: dts) =
+        let
+          fun subst tps' = typ_subst_atomic (map free tps' ~~ map free tps)
+          fun replace (name, tps', constrs) = (name, tps, map (apsnd (map (apsnd (subst tps')))) constrs)
+        in
+          dt :: map replace dts
+        end
+
+      val sccs = compute_sccs raw_dts (maps mk_edges raw_dts)
+
+      fun check_arity_dts dts = length (distinct op = (map (length o #2) dts)) = 1
+      val check_arity = not (exists (equal false) (map check_arity_dts sccs))
+
+      fun transform_tp raw_tp =
+        (SOME (Binding.name raw_tp), (free raw_tp, @{sort type}))
+      fun transform_field (name, typ) =
+        (Binding.name ("s" ^ name), typ)
+      fun transform_constr (name, raw_fields) =
+        (((Binding.name ("d" ^ name), Binding.name ("c" ^ name)), map transform_field raw_fields), NoSyn)
+      fun transform (name, raw_tps, raw_constrs) =
+        let
+          val dt = ((map transform_tp raw_tps, Binding.make (name, @{here})), NoSyn)
+          val constrs = map transform_constr raw_constrs
+        in
+          (((dt, constrs), (@{binding map}, @{binding rel})), [])
+        end
+
+      fun print lthy dts =
+        let
+          fun print_tp tp = "'" ^ tp
+          fun print_tps [] = ""
+            | print_tps tps = enclose "(" ")" (commas (map print_tp tps))
+          fun print_field (name, typ) =
+            "(s" ^ name ^ ": " ^ quote (print_typ lthy typ) ^ ")"
+          fun print_constr (name, fields) =
+            "d" ^ name ^ ": c" ^ name ^ " " ^ space_implode " " (map print_field fields)
+          fun print_one (name, tps, constrs) =
+            print_tps tps ^ " " ^ name ^ " = " ^ space_implode " | " (map print_constr constrs)
+        in
+          "datatype " ^ space_implode " and " (map print_one dts)
+        end
+
+      fun setup_dts scc lthy =
+        let
+          val input = (Ctr_Sugar.default_ctr_options, map transform scc)
+          val lthy =
+            BNF_FP_Def_Sugar.co_datatypes BNF_Util.Least_FP BNF_LFP.construct_lfp input lthy
+            |> Local_Theory.restore
+
+          fun get_sugar (name, _, _) =
+            qualify lthy name |> Ctr_Sugar.ctr_sugar_of lthy
+            |> the_error ("ctr_sugar of " ^ name) @{here}
+
+          val sugars = map get_sugar scc
+
+          fun get_splits {split, split_asm, ...} = [split, split_asm]
+          fun get_cases {case_thms, ...} = case_thms
+        in
+          lthy
+          |> Local_Theory.note ((Binding.empty, @{attributes [split]}), maps get_splits sugars) |> snd
+          |> Local_Theory.note ((Binding.empty, @{attributes [leon_unfold]}), maps get_cases sugars) |> snd
+        end
+    in
+      if check_arity then
+        let
+          val sccs = map homogenize sccs
+          val lthy' = fold setup_dts sccs lthy
+          val lthy'' = Local_Theory.background_theory (fold (add_report o Datatype o print lthy') sccs) lthy'
+        in (NONE, lthy'') end
+      else
+        (SOME "Unsupported feature: mismatched datatype arity", lthy)
+    end)
+
+fun lookup_datatype (name, raw_constructors) =
+  update_loaded (fn lthy =>
+    case Ctr_Sugar.ctr_sugar_of lthy name of
+      NONE =>
+        (Codec.Left ("unknown datatype " ^ name), lthy)
+    | SOME {ctrs, discs, selss, split, split_asm, case_thms, ...} =>
+        let
+          val raw_constructors = sort_wrt snd (raw_constructors)
+          val (names, terms) =
+            map (fst o dest_Const) ctrs ~~ (discs ~~ selss)
+            |> sort_wrt fst
+            |> split_list
+          fun process (internal_name, name) (disc, sels) =
+            (internal_name, (Const (name, dummyT), dummify_types disc, map dummify_types sels))
+
+          val lthy' =
+            lthy
+            |> Local_Theory.note ((Binding.empty, @{attributes [split]}), [split, split_asm]) |> snd
+            |> Local_Theory.note ((Binding.empty, @{attributes [leon_unfold]}), case_thms) |> snd
+        in
+          if map snd raw_constructors <> names then
+            (Codec.Left ("constructor name mismatch: declared constructors are " ^ commas names), lthy)
+          else
+            (Codec.Right (map2 process raw_constructors terms), lthy')
+        end)
+
+fun pretty t =
+  access_loaded (fn lthy => print_term lthy t)
+
+fun report () =
+  access_loaded (fn lthy =>
+    let
+      fun print_report (Theory {scripts, ...}) = map (pair "Script" o fst) scripts
+        | print_report (Equivalence prop) = [("Equivalence", prop)]
+        | print_report (Datatype spec) = [("Datatype", spec)]
+        | print_report (Lemma {name, prop}) = [("Lemma", name ^ ": " ^ prop)]
+        | print_report (Function {raws, inducts, specs, ...}) =
+            map (pair "Function (raw)") raws @
+            map (pair "Function (spec)") specs @
+            map (fn (name, induct) => ("Function (induct)", name ^ ": " ^ induct)) inducts
+        | print_report Oracle = [("Oracle", "enabled")]
+    in
+      Proof_Context.theory_of lthy
+      |> Reports.get
+      |> rev
+      |> maps print_report
+    end)
+
+fun dump () =
+  access_loaded (fn lthy =>
+    let
+      fun print_thy_header name imports =
+        "theory " ^ name ^ "\n" ^
+        "imports " ^ space_implode " " imports ^ "\n" ^
+        "begin\n"
+
+      fun print_thy name import text =
+        print_thy_header name [import] ^ text ^ "\nend\n"
+
+      val name = Context.theory_name (Proof_Context.theory_of lthy)
+
+      fun print_report (Theory {scripts, import}) =
+            (map (fn (script_name, text) => (script_name, print_thy script_name import text)) scripts,
+             print_thy_header name (import :: map fst scripts))
+        | print_report (Equivalence prop) = ([], "lemma [simp]: " ^ quote prop ^ "\nsorry")
+        | print_report (Lemma {prop, ...}) = ([], "lemma [simp]: " ^ quote prop ^ "\nsorry")
+        | print_report (Datatype spec) = ([], spec)
+        | print_report Oracle = ([], "")
+        | print_report (Function {names, specs, ...}) =
+            ([], "fun " ^ space_implode " and " names ^ " where\n" ^ space_implode "|\n" (map quote specs))
+    in
+      Proof_Context.theory_of lthy
+      |> Reports.get
+      |> rev
+      |> map print_report
+      |> split_list
+      |>> flat
+      ||> space_implode "\n\n"
+      ||> pair name
+      |> swap
+      |> op ::
+    end)
+
+end
\ No newline at end of file
diff --git a/src/main/isabelle/stateless_ops.ML b/src/main/isabelle/stateless_ops.ML
new file mode 100644
index 0000000000000000000000000000000000000000..31fc63999570922f63dac1972289bd43b1044a5b
--- /dev/null
+++ b/src/main/isabelle/stateless_ops.ML
@@ -0,0 +1,40 @@
+signature STATELESS_OPS = sig
+  val numeral: (int * typ) -> term
+  val serial_nat: unit -> term
+  val word: int -> typ
+  val map: ((term * term) list * (typ * typ)) -> term
+end
+
+structure Stateless_Ops: STATELESS_OPS = struct
+
+fun numeral (n, typ) =
+  let
+    fun wrap numeral = HOLogic.numeral_const typ $ numeral
+  in
+    if n = 0 then
+      Const (@{const_name "zero_class.zero"}, typ)
+    else if n > 0 then
+      wrap (HOLogic.mk_numeral n)
+    else
+      Const (@{const_name "uminus"}, dummyT) $ wrap (HOLogic.mk_numeral (~n))
+  end
+
+val serial_nat =
+  numeral o rpair @{typ nat} o serial
+
+fun word n =
+  Type (@{type_name word}, [Syntax.read_typ @{context} (Markup.print_int n)])
+
+fun map (xs, (fty, tty)) =
+  let
+    val opt_tty = Type (@{type_name option}, [tty])
+    val fun_ty = fty --> opt_tty
+    val init = Abs ("x", fty, Const (@{const_name None}, opt_tty))
+    fun aux (f, t) acc =
+      Const (@{const_name fun_upd}, fun_ty --> fty --> opt_tty --> fun_ty) $ acc $ f $
+        (Const (@{const_name Some}, tty --> opt_tty) $ t)
+  in
+    fold aux xs init
+  end
+
+end
\ No newline at end of file
diff --git a/src/main/isabelle/tactics.ML b/src/main/isabelle/tactics.ML
new file mode 100644
index 0000000000000000000000000000000000000000..639c1348b43666f15ee9d5dca347ddd16b6f5a77
--- /dev/null
+++ b/src/main/isabelle/tactics.ML
@@ -0,0 +1,67 @@
+fun mk_simp_ctxt ctxt =
+  (put_simpset HOL_ss ctxt) addsimps Named_Theorems.get ctxt @{named_theorems leon_unfold}
+
+fun prove_tac ctxt =
+  HEADGOAL
+    (SELECT_GOAL (TRY (auto_tac ctxt)) THEN_ALL_NEW
+      (FIRST'
+        [Cooper.tac false [] [] ctxt,
+         Sledgehammer_Tactics.sledgehammer_with_metis_tac ctxt [] Sledgehammer_Fact.no_fact_override []]))
+
+fun equiv_tac rule ctxt =
+  let
+    val solve_tac =
+      TRY (ALLGOALS (REPEAT_ALL_NEW (resolve_tac ctxt @{thms ext}))) THEN
+        Local_Defs.unfold_tac ctxt (Named_Theorems.get ctxt @{named_theorems leon_unfold}) THEN
+        auto_tac ctxt
+
+    fun ext_tac 0 _ = all_tac
+      | ext_tac n k = resolve_tac ctxt @{thms ext} k THEN ext_tac (n - 1) k
+
+    fun induct rule = CSUBGOAL (fn (concl, n) =>
+      let
+        val (p, args) = strip_comb (HOLogic.dest_Trueprop (Thm.concl_of rule))
+
+        val p_inst = Thm.term_of concl
+          |> HOLogic.dest_Trueprop |> HOLogic.dest_eq
+          |> apply2 (rpair (map Bound (length args - 1 downto 0)))
+          |> apply2 list_comb
+          |> HOLogic.mk_eq
+          |> fold (fn _ => fn t => Abs ("x", dummyT, t)) (1 upto length args)
+          |> Syntax.check_term ctxt
+          |> Thm.cterm_of ctxt
+
+        val insts = [(Thm.cterm_of ctxt p, p_inst)]
+        val rule = Drule.cterm_instantiate insts rule
+      in ext_tac (length args) n THEN resolve_tac ctxt [rule] n end)
+
+    val maybe_induct =
+      case rule of
+        NONE => K all_tac
+      | SOME rule => induct rule
+  in
+    HEADGOAL maybe_induct THEN solve_tac
+  end
+
+fun method_tac pos src = Subgoal.FOCUS (fn {context = ctxt, concl, ...} =>
+  let
+    val scan = Scan.finite Token.stopper Method.parse;
+    val ((m, _), []) =
+      src
+      |> Token.explode (Thy_Header.get_keywords (Proof_Context.theory_of ctxt)) pos
+      |> filter_out Token.is_space
+      |> Scan.catch scan
+    val state =
+      Proof.theorem NONE (K I) [[(Thm.term_of concl, [])]] ctxt
+      |> Proof.refine m
+      |> Seq.hd
+    val {goal, ...} = Proof.simple_goal state
+  in
+    HEADGOAL (resolve_tac ctxt [Goal.conclude goal])
+  end)
+
+fun pat_completeness_auto_tac ctxt =
+  Pat_Completeness.pat_completeness_tac ctxt 1 THEN auto_tac ctxt
+
+fun termination_tac ctxt =
+  Function_Common.termination_prover_tac ctxt
\ No newline at end of file
diff --git a/src/main/isabelle/term_codec.ML b/src/main/isabelle/term_codec.ML
new file mode 100644
index 0000000000000000000000000000000000000000..be8e81e96bf557dbfe8cb1e4f4a221b308f9b494
--- /dev/null
+++ b/src/main/isabelle/term_codec.ML
@@ -0,0 +1,75 @@
+datatype ('a, 'b) either = Left of 'a | Right of 'b
+
+structure Codec = struct
+
+open Codec
+
+fun triple a b c =
+  tuple a (tuple b c)
+  |> transform (fn (a, (b, c)) => (a, b, c)) (fn (a, b, c) => (a, (b, c)))
+
+fun either a b =
+  let
+    fun enc (Left l)  = (0, encode a l)
+      | enc (Right r) = (1, encode b r)
+    fun dec 0 = SOME (decode a #> map_result Left)
+      | dec 1 = SOME (decode b #> map_result Right)
+      | dec _ = NONE
+  in Codec.variant enc dec "either" end
+
+end
+
+signature TERM_CODEC = sig
+  val typ : typ codec
+  val term : term codec
+end
+
+structure Term_Codec : TERM_CODEC = struct
+
+val sort : sort codec = Codec.list Codec.string
+val indexname : indexname codec = Codec.tuple Codec.string Codec.int
+
+fun typ () =
+  let
+    fun typ_type () = Codec.tuple Codec.string (Codec.list (typ ()))
+    val typ_tfree = Codec.tuple Codec.string sort
+    val typ_tvar = Codec.tuple indexname sort
+
+    fun enc (Type arg) =  (0, Codec.encode (typ_type ()) arg)
+      | enc (TFree arg) = (1, Codec.encode typ_tfree arg)
+      | enc (TVar arg) =  (2, Codec.encode typ_tvar arg)
+    fun dec 0 = SOME (Codec.decode (typ_type ()) #> Codec.map_result Type)
+      | dec 1 = SOME (Codec.decode typ_tfree #> Codec.map_result TFree)
+      | dec 2 = SOME (Codec.decode typ_tvar #> Codec.map_result TVar)
+      | dec _ = NONE
+  in Codec.variant enc dec "Pure.typ" end
+
+val typ = typ ()
+
+fun term () =
+  let
+    val term_const = Codec.tuple Codec.string typ
+    val term_free = Codec.tuple Codec.string typ
+    val term_var = Codec.tuple indexname typ
+    val term_bound = Codec.int
+    fun term_abs () = Codec.triple Codec.string typ (term ())
+    fun term_app () = Codec.tuple (term ()) (term ())
+
+    fun enc (Const arg) = (0, Codec.encode term_const arg)
+      | enc (Free arg) =  (1, Codec.encode term_free arg)
+      | enc (Var arg) =   (2, Codec.encode term_var arg)
+      | enc (Bound arg) = (3, Codec.encode term_bound arg)
+      | enc (Abs arg) =   (4, Codec.encode (term_abs ()) arg)
+      | enc (op $ arg) =  (5, Codec.encode (term_app ()) arg)
+    fun dec 0 = SOME (Codec.decode term_const #> Codec.map_result Const)
+      | dec 1 = SOME (Codec.decode term_free #> Codec.map_result Free)
+      | dec 2 = SOME (Codec.decode term_var #> Codec.map_result Var)
+      | dec 3 = SOME (Codec.decode term_bound #> Codec.map_result Bound)
+      | dec 4 = SOME (Codec.decode (term_abs ()) #> Codec.map_result Abs)
+      | dec 5 = SOME (Codec.decode (term_app ()) #> Codec.map_result op $)
+      | dec _ = NONE
+  in Codec.variant enc dec "Pure.term" end
+
+val term = term ()
+
+end
\ No newline at end of file
diff --git a/src/main/isabelle/util.ML b/src/main/isabelle/util.ML
new file mode 100644
index 0000000000000000000000000000000000000000..2401cfb4bd8bcacc8120b11e71e5989f244b7e3e
--- /dev/null
+++ b/src/main/isabelle/util.ML
@@ -0,0 +1,281 @@
+fun map_result_exn _ (Exn.Res r) = Exn.Res r
+  | map_result_exn f (Exn.Exn exn) = Exn.Exn (f exn)
+
+fun print_position pos =
+  let
+    val print_file = Path.implode o Path.base o Path.explode
+  in
+    case (Position.file_of pos, Position.line_of pos) of
+      (SOME file, SOME line) => print_file file ^ ":" ^ Markup.print_int line
+    | (SOME file, NONE) => print_file file
+    | _ => "<unknown>"
+  end
+
+exception Unsupported of string
+exception Construction of string * exn
+exception Impossible of string
+exception Invalid_State of string
+exception Timeout of string
+
+local
+  fun format_message msg pos = msg ^ " (at " ^ print_position pos ^ ")"
+in
+
+fun impossible msg pos =
+  raise Impossible (format_message msg pos)
+
+fun invalid_state msg pos =
+  raise Invalid_State (format_message msg pos)
+
+fun the_error msg pos NONE = impossible ("expected SOME: " ^ msg) pos
+  | the_error _   _   (SOME x) = x
+
+fun single_error _   _   [t] = t
+  | single_error msg pos _ = impossible ("expected singleton list: " ^ msg) pos
+
+end
+
+fun print_exn (exn: exn) =
+  YXML.content_of (@{make_string} exn)
+
+fun check true _ = ()
+  | check false f = error (f ())
+
+fun dupl x = (x, x)
+
+val unvarify_typ =
+  let
+    fun aux (TVar ((name, idx), sort)) = TFree (name ^ Markup.print_int idx, sort)
+      | aux t = t
+  in map_atyps aux end
+
+val unvarify =
+  let
+    fun aux (Var ((name, idx), typ)) = Free (name ^ Markup.print_int idx, typ)
+      | aux t = t
+  in map_aterms aux o map_types unvarify_typ end
+
+fun print_term ctxt =
+  let
+    val ctxt' = Config.put show_markup false ctxt
+  in YXML.content_of o Syntax.string_of_term ctxt' o unvarify end
+
+fun print_typ ctxt =
+  let
+    val ctxt' = Config.put show_markup false ctxt
+  in YXML.content_of o Syntax.string_of_typ ctxt' o unvarify_typ end
+
+fun print_thm ctxt thm =
+  let
+    val t = Thm.prop_of thm
+    val {oracle, ...} = Thm.peek_status thm
+    val suffix = if oracle then " [!]" else ""
+  in
+    print_term ctxt t ^ suffix
+  end
+
+fun all_consts (Const (name, typ)) = [(name, typ)]
+  | all_consts (t $ u) = all_consts t @ all_consts u
+  | all_consts (Abs (_, _, t)) = all_consts t
+  | all_consts _ = []
+
+fun all_frees (Free (name, typ)) = [(name, typ)]
+  | all_frees (t $ u) = all_frees t @ all_frees u
+  | all_frees (Abs (_, _, t)) = all_frees t
+  | all_frees _ = []
+
+fun all_undeclared_frees ctxt =
+  all_frees
+  #> distinct op =
+  #> filter (not o Variable.is_declared ctxt o fst)
+
+fun register_terms ts lthy =
+  let
+    val ts' = Syntax.check_terms lthy ts |> map HOLogic.mk_Trueprop
+    val frees = distinct op = (maps (all_undeclared_frees lthy) ts')
+    val (free_names', lthy) = Variable.add_fixes (map fst frees) lthy
+    val frees' = map Free (free_names' ~~ map snd frees)
+    val frees = map Free frees
+    val ts'' = map (subst_free (frees ~~ frees')) ts'
+  in
+    (ts'', fold Variable.declare_term ts'' lthy)
+  end
+
+fun qualify ctxt name =
+  Context.theory_name (Proof_Context.theory_of ctxt) ^ "." ^ name
+
+fun compute_sccs nodes edges =
+  Graph.empty
+  |> fold Graph.new_node nodes
+  |> fold (fn edge => perhaps (try (Graph.add_edge edge))) edges
+  |> (fn graph => map (map (Graph.get_node graph)) (Graph.strong_conn graph))
+  |> rev
+
+val dummify_types = map_types (K dummyT)
+
+fun fold_opt _ [] y = SOME y
+  | fold_opt f (x :: xs) y = Option.mapPartial (fold_opt f xs) (f x y)
+
+fun fold_res _ [] y = Exn.Res y
+  | fold_res f (x :: xs) y = Exn.maps_result (fold_res f xs) (f x y)
+
+fun map_generalized f t =
+  case t of
+    Const (@{const_name Pure.all}, typ) $ Abs (name, typ', t) =>
+      (Const (@{const_name Pure.all}, typ) $ Abs (name, typ', map_generalized f t))
+  | _ =>
+      f t
+
+fun mk_untyped_eq (x, y) =
+  HOLogic.eq_const dummyT $ x $ y
+
+fun strip_first_prem t =
+  let
+    val prems = tl (Logic.strip_imp_prems t)
+    val concl = Logic.strip_imp_concl t
+  in Logic.list_implies (prems, concl) end
+
+fun map_prems f t =
+  let
+    val prems = Logic.strip_imp_prems t
+    val concl = Logic.strip_imp_concl t
+  in Logic.list_implies (map f prems, concl) end
+
+fun mk_induction_schema lthy thms =
+  let
+    val props = map (unvarify o Thm.prop_of) thms
+
+    val ((heads, argss), rhss) =
+      props
+      |> map (HOLogic.dest_eq o HOLogic.dest_Trueprop)
+      |> split_list
+      |>> map strip_comb |>> split_list
+
+    val names =
+      map (fst o dest_Const) heads
+      |> distinct op =
+      |> Name.invent_names (Variable.names_of (fold Variable.declare_names (flat argss) lthy)) "temp"
+      |> map swap
+
+    fun subst (t as Const (name, _)) =
+         (case AList.lookup op = names name of SOME new => Free (new, dummyT) | NONE => t)
+      | subst t = t
+
+    fun mk_prop head args rhs =
+      map_aterms subst rhs
+      |> pair (list_comb (subst head, args))
+      |> mk_untyped_eq
+      |> HOLogic.mk_Trueprop
+
+    fun mk_head name = (Binding.name name, NONE, NoSyn)
+    val props = map (pair (Binding.empty, [])) (Syntax.check_terms lthy (@{map 3} mk_prop heads argss rhss))
+    val heads = map (mk_head o snd) names
+
+    val (info, _) =
+      (* FIXME funny warning *)
+      Function.add_function heads props Function_Common.default_config pat_completeness_auto_tac lthy
+
+    val raw_prop = unvarify (Thm.prop_of (hd (#pinducts info)))
+    val prop = map_prems (map_generalized strip_first_prem) (strip_first_prem raw_prop)
+    val frees = distinct op = (map fst (all_frees prop))
+
+    fun tac ctxt =
+      Induction_Schema.induction_schema_tac ctxt [] THEN
+        HEADGOAL (Pat_Completeness.pat_completeness_tac ctxt) THEN
+        Lexicographic_Order.lexicographic_order_tac false ctxt
+
+    val thm = Goal.prove lthy frees [] prop (fn {context, ...} => tac context)
+    val lthy' = Local_Theory.restore lthy
+  in
+    Morphism.thm (Proof_Context.export_morphism lthy lthy') thm
+  end
+
+(* based on Thy_Info.script_thy *)
+fun script_thy pos txt thy =
+  let
+    val trs = flat
+      [[Toplevel.init_theory (K thy) Toplevel.empty],
+       Outer_Syntax.parse thy pos txt,
+       [Toplevel.exit Toplevel.empty]]
+    val end_state = fold (Toplevel.command_exception true) trs Toplevel.toplevel;
+  in
+    Toplevel.end_theory pos end_state
+  end
+
+fun try_timeout secs f x =
+  let
+    val time = Time.fromSeconds secs
+  in
+    Exn.capture (TimeLimit.timeLimit time f) x
+  end
+
+fun try_case_to_simps ctxt thm =
+  the_default [thm] (try (Simps_Case_Conv.to_simps ctxt) thm)
+
+fun restore_eq thm =
+  case Thm.prop_of thm of
+    @{term Trueprop} $ (Const (@{const_name HOL.eq}, _) $ _ $ _) => thm
+  | _  => @{lemma "P \<Longrightarrow> P = True" by simp} OF [thm]
+
+fun homogenize_spec_types ctxt thms =
+  let
+    val get_head = dest_Const o fst o strip_comb o fst o HOLogic.dest_eq o HOLogic.dest_Trueprop o Thm.prop_of
+    val paired = map (`get_head) thms
+
+    val head = map (fst o fst) paired
+      |> distinct op =
+      |> single_error "heads" @{here}
+
+    val typ = Sign.the_const_type (Proof_Context.theory_of ctxt) head
+
+    fun mk_subst (iname, (sort, inst)) =
+      (Thm.ctyp_of ctxt (TVar (iname, sort)), Thm.ctyp_of ctxt inst)
+
+    fun instantiate ((_, ttyp), thm) =
+      let
+        val subst = Type.raw_match (ttyp, typ) Vartab.empty |> Vartab.dest |> map mk_subst
+      in
+        Thm.instantiate (subst, []) thm
+      end
+  in
+    map instantiate paired
+  end
+
+fun unify_all_types ctxt ts =
+  let
+    val unif = Pattern.unify_types (Context.Proof ctxt)
+    fun aux (t1 :: t2 :: ts) env = aux (t2 :: ts) (unif (t1, t2) env)
+      | aux _ env = env
+  in
+    aux ts
+  end
+
+fun homogenize_raw_types ctxt terms =
+  let
+    val terms = map Logic.varify_types_global terms
+
+    val get_head = fst o dest_Free o fst o strip_comb o fst o HOLogic.dest_eq o HOLogic.dest_Trueprop
+    val heads = map get_head terms
+
+    fun typ_occurences head =
+      maps all_frees terms
+      |> filter (equal head o fst)
+      |> map snd
+
+    val problem = map typ_occurences heads
+    val env = Envir.empty (maxidx_of_typs (flat problem))
+
+    val env' = fold (unify_all_types ctxt) problem env
+
+    val solution = map_types (unvarify_typ o Envir.norm_type (Envir.type_env env'))
+  in
+    map solution terms
+    |> Syntax.check_terms ctxt (* fail early if something's wrong *)
+  end
+
+fun if_to_cond_simps thm =
+  case Thm.concl_of thm of
+    @{const Trueprop} $ (Const (@{const_name HOL.eq}, _) $ _ $ (Const (@{const_name If}, _) $ _ $ _ $ _)) =>
+      maps (fn s => if_to_cond_simps (s OF [thm])) @{thms if_to_cond_simps}
+  | _ =>
+      [thm]
\ No newline at end of file
diff --git a/src/main/scala/leon/LeonOption.scala b/src/main/scala/leon/LeonOption.scala
index 57a826d70dcfa04194a3ae98cfa8c6a9c478c328..a0a9c9d92ee78397cce95dd0a52042fa8bc63330 100644
--- a/src/main/scala/leon/LeonOption.scala
+++ b/src/main/scala/leon/LeonOption.scala
@@ -18,7 +18,7 @@ abstract class LeonOptionDef[+A] {
     else s"--$name=$usageRhs"
   }
   def helpString = {
-    f"$usageDesc%-26s" + description.replaceAll("\n", "\n" + " " * 26)
+    f"$usageDesc%-28s" + description.replaceAll("\n", "\n" + " " * 28)
   }
 
   private def parseValue(s: String)(implicit reporter: Reporter): A = {
diff --git a/src/main/scala/leon/Main.scala b/src/main/scala/leon/Main.scala
index d3f6f6d36b83423bd3b0c62cc286ce4d2704e6ab..6e2af3812045f1b7dbc613dabea2162585d30cda 100644
--- a/src/main/scala/leon/Main.scala
+++ b/src/main/scala/leon/Main.scala
@@ -22,13 +22,15 @@ object Main {
       termination.TerminationPhase,
       verification.AnalysisPhase,
       repair.RepairPhase,
-      evaluators.EvaluationPhase
+      evaluators.EvaluationPhase,
+      solvers.isabelle.AdaptationPhase,
+      solvers.isabelle.IsabellePhase
     )
   }
 
   // Add whatever you need here.
   lazy val allComponents : Set[LeonComponent] = allPhases.toSet ++ Set(
-    solvers.z3.FairZ3Component, MainComponent, SharedOptions, solvers.smtlib.SMTLIBCVC4Component
+    solvers.z3.FairZ3Component, MainComponent, SharedOptions, solvers.smtlib.SMTLIBCVC4Component, solvers.isabelle.Component
   )
 
   /*
@@ -43,12 +45,13 @@ object Main {
     val optTermination = LeonFlagOptionDef("termination", "Check program termination. Can be used along --verify", false)
     val optRepair      = LeonFlagOptionDef("repair",      "Repair selected functions",                             false)
     val optSynthesis   = LeonFlagOptionDef("synthesis",   "Partial synthesis of choose() constructs",              false)
+    val optIsabelle    = LeonFlagOptionDef("isabelle",    "Run Isabelle verification",                             false)
     val optNoop        = LeonFlagOptionDef("noop",        "No operation performed, just output program",           false)
     val optVerify      = LeonFlagOptionDef("verify",      "Verify function contracts",                             false)
     val optHelp        = LeonFlagOptionDef("help",        "Show help message",                                     false)
 
     override val definedOptions: Set[LeonOptionDef[Any]] =
-      Set(optTermination, optRepair, optSynthesis, optNoop, optHelp, optEval, optVerify)
+      Set(optTermination, optRepair, optSynthesis, optIsabelle, optNoop, optHelp, optEval, optVerify)
 
   }
 
@@ -144,6 +147,7 @@ object Main {
     import verification.AnalysisPhase
     import repair.RepairPhase
     import evaluators.EvaluationPhase
+    import solvers.isabelle.IsabellePhase
     import MainComponent._
 
     val helpF        = ctx.findOptionOrDefault(optHelp)
@@ -151,6 +155,7 @@ object Main {
     val synthesisF   = ctx.findOptionOrDefault(optSynthesis)
     val xlangF       = ctx.findOptionOrDefault(SharedOptions.optXLang)
     val repairF      = ctx.findOptionOrDefault(optRepair)
+    val isabelleF    = ctx.findOptionOrDefault(optIsabelle)
     val terminationF = ctx.findOptionOrDefault(optTermination)
     val verifyF      = ctx.findOptionOrDefault(optVerify)
     val evalF        = ctx.findOption(optEval).isDefined
@@ -172,6 +177,7 @@ object Main {
         else if (repairF) RepairPhase
         else if (analysisF) Pipeline.both(analysis, TerminationPhase)
         else if (terminationF) TerminationPhase
+        else if (isabelleF) IsabellePhase
         else if (evalF) EvaluationPhase
         else analysis
       }
diff --git a/src/main/scala/leon/purescala/Common.scala b/src/main/scala/leon/purescala/Common.scala
index 848577763f29005047571adfb6760a8ab574bc1b..63ec4a7d1d38245ba7b835524b6c5cd2aec4efed 100644
--- a/src/main/scala/leon/purescala/Common.scala
+++ b/src/main/scala/leon/purescala/Common.scala
@@ -37,7 +37,7 @@ object Common {
     */
   class Identifier private[Common](
     val name: String,
-    private[Common] val globalId: Int,
+    val globalId: Int,
     private[Common] val id: Int,
     private val tpe: TypeTree,
     private val alwaysShowUniqueID: Boolean = false
diff --git a/src/main/scala/leon/solvers/SolverFactory.scala b/src/main/scala/leon/solvers/SolverFactory.scala
index 27f4973e024bc25cabba723d8b566e12d055856e..6c8a0cb8e5f88d59c3cb6a1657d73a38a9a87c37 100644
--- a/src/main/scala/leon/solvers/SolverFactory.scala
+++ b/src/main/scala/leon/solvers/SolverFactory.scala
@@ -39,7 +39,8 @@ object SolverFactory {
     "smt-cvc4-cex"   -> "CVC4 through SMT-LIB, in-solver finite-model-finding, for counter-examples only",
     "unrollz3"       -> "Native Z3 with leon-templates for unfolding",
     "ground"         -> "Only solves ground verification conditions by evaluating them",
-    "enum"           -> "Enumeration-based counter-example-finder"
+    "enum"           -> "Enumeration-based counter-example-finder",
+    "isabelle"       -> "Isabelle2015 through libisabelle with various automated tactics"
   )
 
   val availableSolversPretty = "Available: " +
@@ -104,6 +105,9 @@ object SolverFactory {
     case "smt-cvc4-cex" =>
       SolverFactory(() => new SMTLIBCVC4CounterExampleSolver(ctx, program) with TimeoutSolver)
 
+    case "isabelle" =>
+      new isabelle.IsabelleSolverFactory(ctx, program)
+
     case _ =>
       ctx.reporter.error(s"Unknown solver $name")
       showSolvers(ctx)
diff --git a/src/main/scala/leon/solvers/isabelle/AdaptationPhase.scala b/src/main/scala/leon/solvers/isabelle/AdaptationPhase.scala
new file mode 100644
index 0000000000000000000000000000000000000000..d8acc2a21c12dc2497ab7acd792a403d15858a85
--- /dev/null
+++ b/src/main/scala/leon/solvers/isabelle/AdaptationPhase.scala
@@ -0,0 +1,70 @@
+package leon.solvers.isabelle
+
+import leon._
+import leon.purescala.Common._
+import leon.purescala.Definitions._
+import leon.purescala.DefOps
+import leon.purescala.Expressions._
+import leon.purescala.TypeOps
+import leon.purescala.Types._
+import leon.utils._
+
+object AdaptationPhase extends TransformationPhase {
+
+  val name = "adaptation"
+  val description = "Function parameter adaptation"
+
+  implicit val debugSection = DebugSectionIsabelle
+
+  def apply(context: LeonContext, program: Program): Program = {
+    val dummy = program.library.Dummy match {
+      case Some(dummy) => dummy
+      case None => context.reporter.internalError("Definition of dummy type not found")
+    }
+
+    def mkDummyTyp(tp: TypeParameter) =
+      CaseClassType(dummy, List(tp))
+
+    def mkDummyParameter(tp: TypeParameter) =
+      ValDef(FreshIdentifier("dummy", mkDummyTyp(tp)), Some(mkDummyTyp(tp)))
+
+    def mkDummyArgument(tree: TypeTree) =
+      CaseClass(CaseClassType(dummy, List(tree)), Nil)
+
+    val enabled =
+      context.findOptionOrDefault(SharedOptions.optSelectedSolvers).contains("isabelle") ||
+      context.findOptionOrDefault(Main.MainComponent.optIsabelle)
+
+    if (!enabled) program
+    else {
+      context.reporter.debug("Running adaptation phase, because Isabelle is selected ...")
+
+      val map = program.definedFunctions.flatMap { fd =>
+        val expected = fd.tparams.map(_.tp).toSet
+        val actual = fd.params.map(_.getType).flatMap(TypeOps.typeParamsOf).toSet ++ TypeOps.typeParamsOf(fd.returnType).toSet
+        if (expected == actual)
+          None
+        else {
+          val diff: List[TypeParameter] = (expected -- actual).toList
+          context.reporter.debug(s"Rewriting function definition ${fd.qualifiedName(program)}: adding dummies for hidden type parameter(s) ${diff.map(_.id).mkString(" and ")}")
+          val nid = FreshIdentifier(fd.id.name, FunctionType(fd.params.map(_.getType) ++ diff.map(mkDummyTyp), fd.returnType))
+          val nfd = new FunDef(nid, fd.tparams, fd.returnType, fd.params ++ diff.map(mkDummyParameter))
+          nfd.copyContentFrom(fd)
+          nfd.setPos(fd.getPos)
+          Some(fd -> ((nfd, diff)))
+        }
+      }.toMap
+
+      def mapFunDef(fd: FunDef): Option[FunDef] = map.get(fd).map(_._1)
+
+      def mapFunInvocation(fi: FunctionInvocation, nfd: FunDef): Option[Expr] = map.get(fi.tfd.fd).map { case (_, diff) =>
+        val inst = (nfd.tparams.map(_.tp) zip fi.tfd.tps).toMap
+        val args = diff.map(t => mkDummyArgument(inst(t)))
+        FunctionInvocation(nfd.typed(fi.tfd.tps), fi.args ++ args)
+      }
+
+      DefOps.replaceFunDefs(program)(mapFunDef, mapFunInvocation)._1
+    }
+  }
+
+}
diff --git a/src/main/scala/leon/solvers/isabelle/Component.scala b/src/main/scala/leon/solvers/isabelle/Component.scala
new file mode 100644
index 0000000000000000000000000000000000000000..2e4cd5d67c40fbf7b9391f9bd3b63080b6228ca2
--- /dev/null
+++ b/src/main/scala/leon/solvers/isabelle/Component.scala
@@ -0,0 +1,63 @@
+package leon.solvers.isabelle
+
+import java.nio.file.Paths
+
+import scala.concurrent._
+import scala.concurrent.duration._
+import scala.concurrent.ExecutionContext.Implicits.global
+
+import leon._
+
+object Component extends LeonComponent {
+
+  val name = "isabelle"
+  val description = "Isabelle solver"
+
+  val leonBase =
+    Paths.get(Option(System.getProperty("leon.base")).getOrElse(".")).toRealPath()
+
+  val isabelleBase =
+    leonBase.resolve("contrib").toRealPath()
+
+  val optBase = LeonStringOptionDef(
+    name = "isabelle:base",
+    description = "Base directory of the Isabelle installation",
+    default = isabelleBase.toString,
+    usageRhs = "path"
+  )
+
+  val optDownload = LeonFlagOptionDef(
+    name = "isabelle:download",
+    description = "Automatic download of Isabelle",
+    default = false
+  )
+
+  val optBuild = LeonFlagOptionDef(
+    name = "isabelle:build",
+    description = "Automatic build of Isabelle/Leon",
+    default = true
+  )
+
+  val optMapping = LeonFlagOptionDef(
+    name = "isabelle:mapping",
+    description = "Enable processing of type and function mapping annotations",
+    default = true
+  )
+
+  val optDump = LeonStringOptionDef(
+    name = "isabelle:dump",
+    description = "Dump theory source files into the specified directory",
+    default = "",
+    usageRhs = "path"
+  )
+
+  val optStrict = LeonFlagOptionDef(
+    name = "isabelle:strict",
+    description = "Require proofs for indirectly referenced functions",
+    default = true
+  )
+
+  override val definedOptions: Set[LeonOptionDef[Any]] =
+    Set(optBase, optDownload, optBuild, optMapping, optDump, optStrict)
+
+}
diff --git a/src/main/scala/leon/solvers/isabelle/Functions.scala b/src/main/scala/leon/solvers/isabelle/Functions.scala
new file mode 100644
index 0000000000000000000000000000000000000000..bad4b96acc157392278ae80787f0b537b4b13f77
--- /dev/null
+++ b/src/main/scala/leon/solvers/isabelle/Functions.scala
@@ -0,0 +1,152 @@
+package leon.solvers.isabelle
+
+import scala.concurrent._
+import scala.math.BigInt
+
+import leon.LeonContext
+import leon.purescala.Common._
+import leon.purescala.Definitions._
+import leon.purescala.Expressions._
+import leon.purescala.ExprOps
+import leon.purescala.Types._
+import leon.utils._
+
+import edu.tum.cs.isabelle._
+
+final class Functions(context: LeonContext, program: Program, types: Types, funs: List[FunDef], system: System)(implicit ec: ExecutionContext) {
+
+  private implicit val debugSection = DebugSectionIsabelle
+  private val strict = context.findOptionOrDefault(Component.optStrict)
+
+  private val translator = new Translator(context, program, types, system)
+
+  private val allLemmas = program.definedFunctions.flatMap(_.checkLemma(program, context)).toMap
+
+  private def closure(funs: Set[FunDef]): Set[FunDef] = {
+    val referenced = funs.flatMap(program.callGraph.transitiveCallees)
+    val lemmas = allLemmas.filter(_._2.exists(referenced.contains)).keySet
+    if (lemmas.subsetOf(funs) && referenced.subsetOf(funs))
+      funs
+    else {
+      context.reporter.debug(s"Discovered relevant lemma(s): ${lemmas.map(_.qualifiedName(program)).mkString(", ")}")
+      closure(funs ++ referenced ++ lemmas)
+    }
+  }
+
+  private val relevant = closure(funs.toSet).toList
+
+  private val preGroups = program.callGraph.stronglyConnectedComponents.map(_.filter(relevant.contains)).filterNot(_.isEmpty).toSet
+  private val callGraph = preGroups.map { group => group ->
+    preGroups.filter(cand => cand.exists(to => group.exists(from => program.callGraph.calls(from, to))) && cand != group)
+  }.toMap
+
+  private val groups = GraphOps.topologicalSorting(callGraph) match {
+    case Right(seq) => seq.toList
+    case Left(_) => context.reporter.internalError("unexpected cycle in call graph")
+  }
+
+  context.reporter.debug(s"Functions to be defined: ${groups.map(_.map(_.qualifiedName(program)).mkString("{", ", ", "}")).mkString(", ")}")
+
+  private def globalLookup(data: List[FunDef])(fun: FunDef, typ: Typ) =
+    if (data.contains(fun))
+      Const(s"${theory}.${fun.id.mangledName}", typ)
+    else
+      context.reporter.internalError(s"Unknown function ${fun.qualifiedName(program)}")
+
+  private def processGroup(group: Set[FunDef], data: List[FunDef]): Future[List[FunDef]] = {
+    def lookup(fun: FunDef, typ: Typ): Term =
+      if (group.contains(fun))
+        Free(fun.id.mangledName, typ)
+      else if (data.contains(fun))
+        Const(s"${theory}.${fun.id.mangledName}", typ)
+      else
+        context.reporter.internalError(s"Unknown function ${fun.qualifiedName(program)} while defining functions")
+
+    val defs = group.toList
+
+    context.reporter.debug(s"Defining function(s) ${defs.map(_.qualifiedName(program)).mkString(", ")} ...")
+
+    val pairs = defs.flatMap { fun =>
+      val name = fun.qualifiedName(program)
+      fun.extAnnotations.get("isabelle.function") match {
+        case Some(List(Some(name: String))) => Some(fun -> (fun.id.mangledName -> name))
+        case Some(List(_)) =>
+          context.reporter.fatalError(s"In function mapping for function definition $name: expected a string literal as name")
+        case Some(_) =>
+          context.reporter.internalError(s"Function mapping for function definition $name")
+        case None =>
+          None
+      }
+    }
+
+    system.invoke(Declare)(defs.map(_.id.mangledName)).assertSuccess(context).flatMap { case () => Future.traverse(defs) { fun =>
+      val name = fun.id.mangledName
+      val params = Future.traverse(fun.params.toList) { param =>
+        types.typ(param.getType, strict = true).map(param.id.mangledName -> _)
+      }
+
+      val full = fun.annotations.contains("isabelle.fullBody")
+      val none = fun.annotations.contains("isabelle.noBody")
+
+      val body = (full, none) match {
+        case (true, false) =>  translator.term(fun.fullBody, Nil, lookup)
+        case (false, true) =>  translator.mkFreshError(None)
+        case (false, false) => translator.term(fun.body.get, Nil, lookup)
+        case (true, true) =>   context.reporter.fatalError(s"Conflicting body annotations for function definition ${fun.qualifiedName(program)}")
+      }
+
+      for {
+        params <- params
+        body <- body
+        typ <- types.typ(fun.returnType, strict = true)
+      }
+      yield
+        (name, params, (body, typ))
+    }}.flatMap(system.invoke(Functions)).assertSuccess(context).flatMap {
+      case Some(msg) => context.reporter.fatalError(s"In function definition: $msg")
+      case None =>
+        def requireProof(fun: FunDef) = funs.contains(fun) || strict
+
+        val (checked, unchecked) = pairs.partition { case (fun, _) => requireProof(fun) }
+
+        val equivalences =
+          for {
+            failed <- system.invoke(Equivalences)(checked.map(_._2)).assertSuccess(context)
+            () <- system.invoke(AssumeEquivalences)(unchecked.map(_._2)).assertSuccess(context)
+          }
+          yield failed
+
+        equivalences.foreach {
+          case Nil => ()
+          case failed => context.reporter.warning(s"Equivalence check(s) for ${failed.mkString(" and ")} failed")
+        }
+        equivalences.map(_ => data ++ defs)
+    }.flatMap { data =>
+      def generateStatement(fun: FunDef): Option[Future[(String, Term)]] =
+        fun.statement.map(expr => translator.term(expr, Nil, globalLookup(data)).map(fun.id.mangledName -> _))
+
+      val lemmas =
+        Future.sequence(defs.filterNot(funs.contains).flatMap(generateStatement)).flatMap { statements =>
+          if (strict)
+            system.invoke(Lemmas)(statements).assertSuccess(context)
+          else
+            system.invoke(AssumeLemmas)(statements).assertSuccess(context).map(_ => Nil)
+        }
+
+      lemmas.foreach {
+        case Nil => ()
+        case failed => context.reporter.warning(s"Proof(s) of lemma(s) for ${failed.mkString(" and ")} failed")
+      }
+
+      lemmas.map(_ => data)
+    }
+  }
+
+  val data: Future[List[FunDef]] =
+    groups.foldLeft(Future.successful(List.empty[FunDef])) { (acc, group) =>
+      acc.flatMap(processGroup(group, _))
+    }
+
+  def term(expr: Expr): Future[Term] = data.flatMap(data => translator.term(expr, Nil, globalLookup(data)))
+
+}
diff --git a/src/main/scala/leon/solvers/isabelle/IsabelleEnvironment.scala b/src/main/scala/leon/solvers/isabelle/IsabelleEnvironment.scala
new file mode 100644
index 0000000000000000000000000000000000000000..33094b93682b53bf6a7e92b8d63280c810ded523
--- /dev/null
+++ b/src/main/scala/leon/solvers/isabelle/IsabelleEnvironment.scala
@@ -0,0 +1,160 @@
+package leon.solvers.isabelle
+
+import java.io.FileWriter
+import java.nio.file.{Files, Paths}
+
+import scala.concurrent._
+import scala.concurrent.duration._
+import scala.concurrent.ExecutionContext.Implicits.global
+
+import leon.LeonContext
+import leon.OptionsHelpers._
+import leon.SharedOptions
+import leon.purescala.Definitions._
+import leon.purescala.Expressions._
+import leon.purescala.Common._
+import leon.solvers._
+import leon.utils._
+
+import edu.tum.cs.isabelle.{impl => impl2015, _}
+import edu.tum.cs.isabelle.api._
+import edu.tum.cs.isabelle.setup._
+
+object IsabelleEnvironment {
+
+  private implicit val debugSection = DebugSectionIsabelle
+
+  def apply(context: LeonContext, program: Program): Future[IsabelleEnvironment] = {
+    val version = Version(isabelleVersion)
+    val base = Paths.get(context.findOptionOrDefault(Component.optBase))
+    val download = context.findOptionOrDefault(Component.optDownload)
+    val build = context.findOptionOrDefault(Component.optBuild)
+    val dump = context.findOptionOrDefault(Component.optDump)
+    val strict = context.findOptionOrDefault(Component.optStrict)
+
+    val funFilter =
+      // FIXME duplicated from AnalysisPhase
+      filterInclusive[FunDef](context.findOption(SharedOptions.optFunctions).map(fdMatcher(program)), Some(_.annotations contains "library"))
+
+    val funs = program.definedFunctions.filter(funFilter)
+
+    val scripts = funs.flatMap { fun =>
+      val name = fun.qualifiedName(program)
+      fun.extAnnotations.get("isabelle.script") match {
+        case Some(List(Some(name: String), Some(source: String))) => Some(name -> source)
+        case Some(List(_, _)) =>
+          context.reporter.fatalError(s"In script for function definition $name: expected a string literal as name and a string literal as source")
+        case Some(_) =>
+          context.reporter.internalError(s"Script for function definition $name")
+        case None =>
+          None
+      }
+    }.toList
+
+    val setup = Setup.detectSetup(base, version) match {
+      case Some(setup) => Future.successful { setup }
+      case None if !download =>
+        context.reporter.fatalError(s"No $version found at $base. Please install manually or set '${Component.optDownload.name}' flag to true.")
+      case _ =>
+        context.reporter.info(s"No $version found at $base")
+        context.reporter.info(s"Preparing $version environment ...")
+        Setup.installTo(Files.createDirectories(base), version)
+    }
+
+    val system = setup.flatMap { setup =>
+      val env = new impl2015.Environment(setup.home)
+      val config = env.Configuration.fromPath(Component.leonBase, "Leon")
+
+      if (build) {
+        context.reporter.info(s"Building session ...")
+        if (!System.build(env)(config))
+          context.reporter.internalError("Build failed")
+      }
+
+      context.reporter.info(s"Starting $version instance ...")
+      System.create(env)(config)
+    }
+
+    val thy = system.flatMap { system =>
+      context.reporter.debug(s"Loading theory as $theory ...")
+
+      system.invoke(Load)(("Leon", theory, scripts)).assertSuccess(context).map {
+        case Nil =>
+          ()
+        case bad =>
+          context.reporter.fatalError(s"Could not process the following scripts: ${bad.mkString(", ")}")
+      }
+    }
+
+    val oracle =
+      if (strict) {
+        context.reporter.debug("Strict mode enabled; background proofs will be replayed in Isabelle")
+        Future.successful { () }
+      }
+      else {
+        context.reporter.warning("Strict mode disabled; proofs may be unsound")
+        thy.flatMap(_ => system.flatMap(_.invoke(Oracle)(()))).assertSuccess(context)
+      }
+
+    val types =
+      for {
+        s <- system
+        () <- thy
+        _ = context.reporter.debug(s"Defining types ...")
+      }
+      yield
+        new Types(context, program, s)
+
+    val functions =
+      for {
+        s <- system
+        t <- types
+        () <- oracle
+        _ <- t.data
+      }
+      yield
+        new Functions(context, program, t, funs, s)
+
+    functions.flatMap(_.data).foreach { _ =>
+      if (dump.isEmpty)
+        system.flatMap(_.invoke(Report)(())).assertSuccess(context).foreach { report =>
+          context.reporter.debug(s"Report for $theory ...")
+          report.foreach { case (key, value) =>
+            context.reporter.debug(s"$key: ${canonicalizeOutput(value)}")
+          }
+        }
+      else
+        system.flatMap(_.invoke(Dump)(())).assertSuccess(context).foreach { output =>
+          context.reporter.debug(s"Dumping theory sources to $dump ...")
+          val path = Files.createDirectories(Paths.get(dump))
+          output.foreach { case (name, content) =>
+            val writer = new FileWriter(path.resolve(s"$name.thy").toFile())
+            writer.write(content)
+            writer.close()
+          }
+        }
+    }
+
+    for {
+      s <- system
+      t <- types
+      f <- functions
+      _ <- t.data
+      _ <- f.data
+    }
+    yield new IsabelleEnvironment(context, program, t, f, s, funs)
+  }
+
+}
+
+final class IsabelleEnvironment private(
+    val context: LeonContext,
+    val program: Program,
+    val types: Types,
+    val functions: Functions,
+    val system: System,
+    val selectedFunDefs: List[FunDef]
+) {
+  def solver: IsabelleSolver with TimeoutSolver =
+    new IsabelleSolver(context, program, types, functions, system) with TimeoutSolver
+}
diff --git a/src/main/scala/leon/solvers/isabelle/IsabellePhase.scala b/src/main/scala/leon/solvers/isabelle/IsabellePhase.scala
new file mode 100644
index 0000000000000000000000000000000000000000..d7f2fcdc1f032ab7758155cc6bb274c57f45bae0
--- /dev/null
+++ b/src/main/scala/leon/solvers/isabelle/IsabellePhase.scala
@@ -0,0 +1,43 @@
+package leon.solvers.isabelle
+
+import scala.concurrent._
+import scala.concurrent.duration._
+import scala.concurrent.ExecutionContext.Implicits.global
+
+import leon._
+import leon.purescala.Definitions._
+import leon.utils._
+import leon.verification._
+
+object IsabellePhase extends LeonPhase[Program, VerificationReport] {
+
+  object IsabelleVC extends VCKind("Isabelle", "isa")
+
+  val name = "isabelle"
+  val description = "Isabelle verification"
+
+  implicit val debugSection = DebugSectionIsabelle
+
+  def run(context: LeonContext)(program: Program): VerificationReport = {
+    val env = IsabelleEnvironment(context, program)
+
+    val report = env.map { env =>
+      def prove(fd: FunDef) = fd.statement.map { expr =>
+        context.reporter.debug(s"Proving postcondition of ${fd.qualifiedName(program)} ...")
+        val vc = VC(expr, fd, IsabelleVC).setPos(fd.getPos)
+        val term = env.functions.term(expr)
+        val input = term.map(t => (List(t), fd.proofMethod(vc, context)))
+        val result = Await.result(input.flatMap(env.system.invoke(Prove)).assertSuccess(context), Duration.Inf) match {
+          case Some(thm) => VCResult(VCStatus.Valid, None, None)
+          case None => VCResult(VCStatus.Unknown, None, None)
+        }
+        vc -> Some(result)
+      }
+
+      VerificationReport(program, env.selectedFunDefs.flatMap(prove).toMap)
+    }
+
+    Await.result(report, Duration.Inf)
+  }
+
+}
diff --git a/src/main/scala/leon/solvers/isabelle/IsabelleSolver.scala b/src/main/scala/leon/solvers/isabelle/IsabelleSolver.scala
new file mode 100644
index 0000000000000000000000000000000000000000..40c754bc62b26a5bea51c9b00a84f5fb0d868efe
--- /dev/null
+++ b/src/main/scala/leon/solvers/isabelle/IsabelleSolver.scala
@@ -0,0 +1,96 @@
+package leon.solvers.isabelle
+
+import scala.concurrent._
+import scala.concurrent.duration._
+
+import scala.math.BigInt
+
+import leon.LeonContext
+import leon.purescala.Common._
+import leon.purescala.Definitions._
+import leon.purescala.Expressions._
+import leon.solvers._
+import leon.utils.Interruptible
+import leon.verification.VC
+
+import edu.tum.cs.isabelle._
+import edu.tum.cs.isabelle.api.ProverResult
+
+class IsabelleSolver(val context: LeonContext, program: Program, types: Types, functions: Functions, system: System) extends Solver with Interruptible { self: TimeoutSolver =>
+
+  context.interruptManager.registerForInterrupts(this)
+
+  import system.executionContext
+
+  private def timeout = optTimeout.map(_.millis).getOrElse(Duration.Inf)
+
+  val name = "isabelle"
+
+  private var pending: List[Future[Term]] = Nil
+  private var method: Option[String] = None
+  private var running: Option[CancellableFuture[_]] = None
+
+  def reset() = { pending = Nil; method = None; running = None }
+  private def addPending(future: Future[Term]) = { pending ::= future }
+  private def sequencePending() = { Future.sequence(pending) }
+
+  override def assertVC(vc: VC): Unit = {
+    val name = vc.fd.qualifiedName(program)
+    addPending(functions.term(vc.condition))
+    (vc.fd.proofMethod(vc, context), method) match {
+      case (None, _) =>
+      case (Some(m1), Some(m2)) if m1 == m2 =>
+      case (Some(m1), Some(m2)) => context.reporter.error(s"In proof hint for function definition $name: can't override existing method $m2 with method $m1")
+      case (Some(m1), None) => method = Some(m1)
+    }
+  }
+
+  def check: Option[Boolean] = {
+    val verdict = sequencePending().flatMapC { ts =>
+      Future.traverse(ts)(system.invoke(Pretty)(_).assertSuccess(context)).foreach { strs =>
+        context.reporter.debug(s"Attempting to prove disjunction of ${canonicalizeOutput(strs.mkString(", "))}")
+      }
+
+      system.cancellableInvoke(Prove)((ts, method))
+    }
+    running = Some(verdict)
+
+    try {
+      Await.result(verdict.future.assertSuccess(context), timeout) match {
+        case Some(thm) =>
+          context.reporter.debug(s"Proved theorem: ${canonicalizeOutput(thm)}")
+          Some(false)
+        case None =>
+          None
+      }
+    }
+    catch {
+      case _: TimeoutException =>
+        context.reporter.info("Isabelle timed out")
+        verdict.cancel()
+        None
+      case _: CancellationException =>
+        None
+    }
+  }
+
+  def free(): Unit = {}
+  def interrupt(): Unit = { running.foreach(_.cancel()) }
+  def recoverInterrupt(): Unit = {}
+
+  def checkAssumptions(assumptions: Set[Expr]): Option[Boolean] = None
+
+  // 'check' never returns 'Some(true)'
+  def getModel = sys.error("impossible")
+
+  // 'checkAssumptions' never returns 'Some'
+  def getUnsatCore = sys.error("impossible")
+
+  // custom 'assertVC'
+  def assertCnstr(expression: Expr): Unit = sys.error("impossible")
+
+  // unimplemented
+  def pop(): Unit = ???
+  def push(): Unit = ???
+
+}
diff --git a/src/main/scala/leon/solvers/isabelle/IsabelleSolverFactory.scala b/src/main/scala/leon/solvers/isabelle/IsabelleSolverFactory.scala
new file mode 100644
index 0000000000000000000000000000000000000000..190224cb661991095bd4eb791b79cafbdd426066
--- /dev/null
+++ b/src/main/scala/leon/solvers/isabelle/IsabelleSolverFactory.scala
@@ -0,0 +1,21 @@
+package leon.solvers.isabelle
+
+import scala.concurrent._
+import scala.concurrent.duration._
+import scala.concurrent.ExecutionContext.Implicits.global
+
+import leon.LeonContext
+import leon.purescala.Definitions._
+import leon.solvers._
+
+final class IsabelleSolverFactory(context: LeonContext, program: Program) extends SolverFactory[TimeoutSolver] {
+
+  private val env = IsabelleEnvironment(context, program)
+
+  override def shutdown() =
+    Await.result(env.flatMap(_.system.dispose), Duration.Inf)
+
+  def getNewSolver() =
+    Await.result(env.map(_.solver), Duration.Inf)
+
+}
diff --git a/src/main/scala/leon/solvers/isabelle/Translator.scala b/src/main/scala/leon/solvers/isabelle/Translator.scala
new file mode 100644
index 0000000000000000000000000000000000000000..47d62bb5c7a9677b96e2ebda534794e1e89cc8c0
--- /dev/null
+++ b/src/main/scala/leon/solvers/isabelle/Translator.scala
@@ -0,0 +1,323 @@
+package leon.solvers.isabelle
+
+import scala.concurrent._
+import scala.math.BigInt
+
+import leon.LeonContext
+import leon.purescala.Common._
+import leon.purescala.Definitions._
+import leon.purescala.Expressions._
+import leon.purescala.ExprOps
+import leon.purescala.Types._
+import leon.utils._
+
+import edu.tum.cs.isabelle._
+
+final class Translator(context: LeonContext, program: Program, types: Types, system: System)(implicit ec: ExecutionContext) {
+
+  private implicit val debugSection = DebugSectionIsabelle
+
+  private def mkApp(f: Term, args: Term*): Term =
+    args.foldLeft(f) { (acc, arg) => App(acc, arg) }
+
+  private def lookupConstructor(typ: CaseClassType): Future[Constructor] =
+    types.data.map(_.get(Types.findRoot(typ).classDef).flatMap(_.constructors.get(typ.classDef)) match {
+      case Some(constr) => constr
+      case None => context.reporter.internalError(s"$typ not found in program")
+    })
+
+  private def mkFresh(typ: Option[TypeTree]) =
+    system.invoke(Fresh)("fresh''").assertSuccess(context).flatMap { name =>
+      types.optTyp(typ).map(Free(name, _))
+    }
+
+  def mkFreshError(typ: Option[TypeTree]): Future[Term] =
+    system.invoke(SerialNat)(()).assertSuccess(context).flatMap { nat =>
+      types.optTyp(typ).map { typ =>
+        App(Const("Leon_Library.error", Type("fun", List(Typ.dummyT, typ))), nat)
+      }
+    }
+
+  private def arity(typ: TypeTree): Int = typ match {
+    case t: TupleType => t.dimension
+    case _ => context.reporter.internalError(s"Expected tuple type, got $typ")
+  }
+
+  def term(expr: Expr, bounds: List[Identifier], consts: (FunDef, Typ) => Term): Future[Term] = {
+    def mkAbs(params: List[Identifier], body: Expr, bounds: List[Identifier]): Future[Term] = params match {
+      case Nil => term(body, bounds, consts)
+      case p :: ps =>
+        for {
+          rec <- mkAbs(ps, body, p :: bounds)
+          typ <- types.typ(p.getType)
+        }
+        yield
+          Abs(p.mangledName /* name hint, no logical content */, typ, rec)
+    }
+
+    def nary(const: Term, xs: Expr*) =
+      Future.traverse(xs.toList)(term(_, bounds, consts)).map(ts => mkApp(const, ts: _*))
+
+    def flexary1(const: Term, xs: Seq[Expr]) =
+      Future.traverse(xs.toList)(term(_, bounds, consts)).map(_.reduceRight { (t, acc) => mkApp(const, t, acc) })
+
+    def flexary(const: Term, init: Term, xs: Seq[Expr]) =
+      Future.traverse(xs.toList)(term(_, bounds, consts)).map(_.foldRight(init) { (t, acc) => mkApp(const, t, acc) })
+
+    def inSet(set: Term, elem: Expr) =
+      term(elem, bounds, consts).map(mkApp(Const("Set.member", Typ.dummyT), _, set))
+
+    def lets(bindings: List[(Identifier, Term)], body: Expr) = {
+      val referenced = ExprOps.variablesOf(body)
+      val filtered = bindings.filter { case (id, _) => referenced contains id }
+
+      val (names, rhss) = filtered.unzip
+      rhss.foldLeft(mkAbs(names, body, bounds)) { (term, rhs) =>
+        term.map(mkApp(Const("HOL.Let", Typ.dummyT), rhs, _))
+      }
+    }
+
+    def let(name: Identifier, rhs: Term, body: Expr) =
+      lets(List(name -> rhs), body)
+
+    def precondition(pred: Expr, body: Expr) =
+      for {
+        p <- term(pred, bounds, consts)
+        b <- term(body, bounds, consts)
+        e <- mkFreshError(Some(body.getType))
+      }
+      yield
+        mkApp(Const("HOL.If", Typ.dummyT), p, b, e)
+
+    def postcondition(pred: Expr, body: Expr) =
+      for {
+        p <- term(pred, bounds, consts)
+        b <- term(body, bounds, consts)
+        e <- mkFreshError(Some(body.getType))
+      }
+      yield
+        mkApp(Const("HOL.If", Typ.dummyT), App(p, b), b, e)
+
+    def pattern(pat: Pattern): Future[Option[(Term, List[(Identifier, Term)])]] = {
+      val res = pat match {
+        case CaseClassPattern(_, typ, pats) =>
+          Future.traverse(pats.toList)(pattern).flatMap { _.sequence match {
+            case Some(args) =>
+              val (pats, bindings) = args.unzip
+
+              lookupConstructor(typ).map(_.term).map { head =>
+                Some((mkApp(head, pats: _*), bindings.flatten))
+              }
+            case None =>
+              Future.successful(None)
+          }}
+
+        case InstanceOfPattern(_, typ: CaseClassType) =>
+          lookupConstructor(typ).flatMap { case Constructor(term, _, sels) =>
+            val length = sels.size
+            Future.traverse(1 to length) { _ => mkFresh(None) }.map { vars =>
+              Some((mkApp(term, vars: _*), Nil))
+            }
+          }
+
+        case TuplePattern(_, pats) =>
+          Future.traverse(pats.toList)(pattern).map { _.sequence match {
+            case Some(args) =>
+              val (pats, bindings) = args.unzip
+              val pat = pats.reduceRight { (p, acc) => mkApp(Const("Product_Type.Pair", Typ.dummyT), p, acc) }
+              Some(pat, bindings.flatten)
+            case None =>
+              None
+          }}
+
+        case WildcardPattern(None) =>
+          mkFresh(pat.binder.map(_.getType)).map { term => Some((term, Nil)) }
+
+        case WildcardPattern(Some(id)) =>
+          types.typ(id.getType).map { typ => Some((Free(id.mangledName, typ), Nil)) }
+
+        case _ =>
+          Future.successful(None)
+      }
+
+      val bind = pat match {
+        case WildcardPattern(_) => false
+        case _ => true
+      }
+
+      pat.binder match {
+        case Some(id) if bind => res.map(_.map { case (term, bindings) =>
+          (term, (id -> term) :: bindings)
+        })
+        case _ => res
+      }
+    }
+
+    def clause(clause: MatchCase) = clause.optGuard match {
+      case Some(_) =>
+        Future.successful(None)
+      case None =>
+        pattern(clause.pattern).flatMap {
+          case Some((pat, bindings)) =>
+            lets(bindings, clause.rhs).map { term => Some(pat -> term) }
+          case None =>
+            Future.successful(None)
+        }
+    }
+
+    expr match {
+      case Variable(id) =>
+        types.typ(id.getType) map { typ =>
+          bounds.indexOf(id) match {
+            case -1 => Free(id.mangledName, typ)
+            case n => Bound(n)
+          }
+        }
+
+      case Let(x, t, y) =>
+        term(t, bounds, consts).flatMap { let(x, _, y) }
+
+      case Lambda(args, expr) =>
+        mkAbs(args.map(_.id).toList, expr, bounds)
+
+      case CaseClass(typ, exprs) =>
+        lookupConstructor(typ).map(_.term).flatMap { nary(_, exprs: _*) }
+
+      case CaseClassSelector(typ, expr, sel) =>
+        lookupConstructor(typ).map(_.sels.collectFirst {
+          case (field, term) if field.id == sel => term
+        }).flatMap {
+          case Some(term) => nary(term, expr)
+          case None => context.reporter.internalError(s"$typ or $sel not found in program")
+        }
+
+      case IsInstanceOf(expr, typ: CaseClassType) =>
+        lookupConstructor(typ).map(_.disc).flatMap { nary(_, expr) }
+
+      case AsInstanceOf(expr, _) =>
+        term(expr, bounds, consts)
+
+      case Tuple(exprs) =>
+        flexary1(Const("Product_Type.Pair", Typ.dummyT), exprs)
+
+      case TupleSelect(expr, i) =>
+        def sel(i: Int, len: Int, term: Term): Term = (i, len) match {
+          case (1, _) => App(Const("Product_Type.prod.fst", Typ.dummyT), term)
+          case (2, 2) => App(Const("Product_Type.prod.snd", Typ.dummyT), term)
+          case _ => App(Const("Product_Type.prod.snd", Typ.dummyT), sel(i - 1, len - 1, term))
+        }
+
+        term(expr, bounds, consts).map(sel(i, arity(expr.getType), _))
+
+      case MatchExpr(scrutinee, clauses) =>
+        term(scrutinee, bounds, consts).flatMap { scrutinee =>
+          Future.traverse(clauses.toList)(clause).flatMap { _.sequence match {
+            case Some(clauses) =>
+              val catchall =
+                for {
+                  pat <- mkFresh(None)
+                  rhs <- mkFreshError(Some(expr.getType))
+                }
+                yield (pat, rhs)
+
+              types.typ(expr.getType).flatMap { typ =>
+                catchall.flatMap { catchall =>
+                  Future.traverse(bounds) { id =>
+                    types.typ(id.getType).map(id.mangledName -> _)
+                  }.flatMap { bounds =>
+                    system.invoke(Cases)(((scrutinee, bounds), (typ, clauses :+ catchall))).assertSuccess(context)
+                  }
+                }
+              }
+            case None =>
+              context.reporter.warning("Match uses unsupported features; translating to if-then-else")
+              term(ExprOps.matchToIfThenElse(expr), bounds, consts)
+          }}
+        }
+
+      case FunctionInvocation(fun, args) =>
+        types.functionTyp(args.map(_.getType), expr.getType).flatMap { typ =>
+          nary(consts(fun.fd, typ), args: _*)
+        }
+
+      case Application(fun, args) =>
+        term(fun, bounds, consts).flatMap(nary(_, args: _*))
+
+      case Error(tpe, _) =>
+        mkFreshError(Some(tpe))
+
+      case UnitLiteral() =>         Future.successful { Const("Product_Type.Unity", Typ.dummyT) }
+      case BooleanLiteral(true) =>  Future.successful { Const("HOL.True", Typ.dummyT) }
+      case BooleanLiteral(false) => Future.successful { Const("HOL.False", Typ.dummyT) }
+
+      case InfiniteIntegerLiteral(n) =>
+        system.invoke(NumeralLiteral)((n, Type("Int.int", Nil))).assertSuccess(context)
+
+      case CharLiteral(c) =>
+        types.char.flatMap(typ => system.invoke(NumeralLiteral)((c.toLong, typ))).assertSuccess(context)
+
+      case IntLiteral(n) =>
+        types.typ(expr.getType).flatMap(typ => system.invoke(NumeralLiteral)((n, typ))).assertSuccess(context)
+
+      case Assert(pred, _, body) => precondition(pred, body)
+      case Require(pred, body) =>   precondition(pred, body)
+      case Ensuring(body, pred) =>  postcondition(pred, body)
+
+      case IfExpr(x, y, z) => nary(Const("HOL.If", Typ.dummyT), x, y, z)
+      case Not(x) =>          nary(Const("HOL.Not", Typ.dummyT), x)
+      case Implies(x, y) =>   nary(Const("HOL.implies", Typ.dummyT), x, y)
+      case And(xs) =>         flexary1(Const("HOL.conj", Typ.dummyT), xs)
+      case Or(xs) =>          flexary1(Const("HOL.disj", Typ.dummyT), xs)
+
+      // Isabelle doesn't have > or >=, need to swap operands in these cases
+      case Equals(x, y) =>        nary(Const("HOL.eq", Typ.dummyT), x, y)
+      case LessThan(x, y) =>      nary(Const("Orderings.ord_class.less", Typ.dummyT), x, y)
+      case LessEquals(x, y) =>    nary(Const("Orderings.ord_class.less_eq", Typ.dummyT), x, y)
+      case GreaterThan(x, y) =>   nary(Const("Orderings.ord_class.less", Typ.dummyT), y, x)
+      case GreaterEquals(x, y) => nary(Const("Orderings.ord_class.less_eq", Typ.dummyT), y, x)
+
+      case Plus(x, y) =>  nary(Const("Groups.plus_class.plus", Typ.dummyT), x, y)
+      case Minus(x, y) => nary(Const("Groups.minus_class.minus", Typ.dummyT), x, y)
+      case UMinus(x) =>   nary(Const("Groups.uminus_class.uminus", Typ.dummyT), x)
+      case Times(x, y) => nary(Const("Groups.times_class.times", Typ.dummyT), x, y)
+
+      case RealPlus(x, y) =>  nary(Const("Groups.plus_class.plus", Typ.dummyT), x, y)
+      case RealMinus(x, y) => nary(Const("Groups.minus_class.minus", Typ.dummyT), x, y)
+      case RealUMinus(x) =>   nary(Const("Groups.uminus_class.uminus", Typ.dummyT), x)
+      case RealTimes(x, y) => nary(Const("Groups.times_class.times", Typ.dummyT), x, y)
+
+      case FiniteSet(elems, _) =>   flexary(Const("Set.insert", Typ.dummyT), Const("Set.empty", Typ.dummyT), elems.toList)
+      case SetUnion(x, y) =>        nary(Const("Lattices.sup_class.sup", Typ.dummyT), x, y)
+      case SetIntersection(x, y) => nary(Const("Lattices.inf_class.inf", Typ.dummyT), x, y)
+      case SetDifference(x, y) =>   nary(Const("Groups.minus_class.minus", Typ.dummyT), x, y)
+      case SubsetOf(x, y) =>        nary(Const("Orderings.ord_class.less_eq", Typ.dummyT), x, y)
+      case ElementOfSet(elem, set) => term(set, bounds, consts).flatMap(inSet(_, elem))
+
+      case FiniteMap(elems, from, to) =>
+        val es = Future.traverse(elems.toList) { case (k, v) => term(k, bounds, consts) zip term(v, bounds, consts) }
+        for {
+          f <- types.typ(from)
+          t <- types.typ(to)
+          es <- es
+          res <- system.invoke(MapLiteral)((es, (f, t))).assertSuccess(context)
+        }
+        yield res
+
+      case MapApply(map, key) =>
+        term(map, bounds, consts).flatMap(nary(_, key)).map(App(Const("Option.option.the", Typ.dummyT), _))
+      case MapIsDefinedAt(map, key) =>
+        term(map, bounds, consts).flatMap { map =>
+          inSet(App(Const("Map.dom", Typ.dummyT), map), key)
+        }
+      case MapUnion(x, y) => nary(Const("Map.map_add", Typ.dummyT), x, y)
+
+      case Choose(pred) =>
+        nary(Const("Hilbert_Choice.Eps", Typ.dummyT), pred)
+
+      case _ =>
+        context.reporter.warning(s"Unknown expression $expr, translating to unspecified term")
+        mkFreshError(Some(expr.getType))
+    }
+  }
+
+}
diff --git a/src/main/scala/leon/solvers/isabelle/Types.scala b/src/main/scala/leon/solvers/isabelle/Types.scala
new file mode 100644
index 0000000000000000000000000000000000000000..0e609155104023776b5c53934db9cc15cb09c811
--- /dev/null
+++ b/src/main/scala/leon/solvers/isabelle/Types.scala
@@ -0,0 +1,190 @@
+package leon.solvers.isabelle
+
+import scala.concurrent._
+import scala.math.BigInt
+
+import leon.LeonContext
+import leon.purescala.Common._
+import leon.purescala.Definitions._
+import leon.purescala.Expressions._
+import leon.purescala.Types._
+
+import edu.tum.cs.isabelle._
+
+case class Constructor(term: Term, disc: Term, sels: Map[ValDef, Term])
+case class Datatype(typ: String, constructors: Map[CaseClassDef, Constructor])
+
+object Types {
+
+  def findRoot(typ: ClassType): ClassType =
+    typ.parent.map(findRoot).getOrElse(typ)
+
+}
+
+final class Types(context: LeonContext, program: Program, system: System)(implicit ec: ExecutionContext) {
+
+  lazy val char = system.invoke(WordLiteral)(32).assertSuccess(context)
+
+  private val enableMapping = context.findOptionOrDefault(Component.optMapping)
+
+  private val (mapped, unmapped) =
+    program.classHierarchyRoots.map { root =>
+      val descendants = root match {
+        case ccd: CaseClassDef => List(ccd)
+        case _ => root.knownDescendants.collect { case ccd: CaseClassDef => ccd }
+      }
+
+      root -> descendants
+    }.toMap.partition { case (root, descendants) =>
+      root.annotations.contains("isabelle.typ") && {
+        if (!enableMapping)
+          context.reporter.warning(s"Class definition ${root.id} is mapped, but mapping is disabled")
+        enableMapping
+      }
+    }
+
+  private val mappedData: Future[Map[ClassDef, Datatype]] = Future.traverse(mapped) { case (root, descendants) =>
+    val name = root.extAnnotations("isabelle.typ") match {
+      case List(Some(name: String)) => name
+      case List(_) => context.reporter.fatalError(s"In type mapping for class definition ${root.id}: expected a string literal as name")
+      case _ => context.reporter.internalError(s"Type mapping for class definition ${root.id}")
+    }
+
+    val constructors = descendants.map { desc =>
+      val name = desc.extAnnotations.get("isabelle.constructor") match {
+        case Some(List(Some(name: String))) => name
+        case Some(List(_)) =>
+          context.reporter.fatalError(s"In constructor mapping for class definition ${desc.id}: expected a string literal as name")
+        case None =>
+          context.reporter.fatalError(s"Expected constructor mapping for class definition ${desc.id}, because it inherits from ${root.id} which has a type mapping")
+        case Some(_) =>
+          context.reporter.internalError(s"Constructor mapping for class definition ${desc.id}")
+      }
+
+      (desc.id.mangledName, name)
+    }.toList
+
+    system.invoke(LookupDatatype)(name -> constructors).assertSuccess(context).map {
+      case Left(err) =>
+        context.reporter.fatalError(s"In mapping for class definition ${root.id}: $err")
+      case Right(constructors) => root ->
+        Datatype(
+          typ = name,
+          constructors =
+            constructors.map { case (name, (term, disc, sels)) =>
+              val desc = descendants.find(_.id.mangledName == name).get
+              if (desc.fields.length != sels.length)
+                context.reporter.fatalError(s"Invalid constructor mapping for ${desc.id}: Isabelle constructor $name has different number of arguments")
+              desc -> Constructor(term, disc, (desc.fields, sels).zipped.toMap)
+            }.toMap
+        )
+    }
+  }.map(_.toMap)
+
+  def optTyp(tree: Option[TypeTree]): Future[Typ] = tree match {
+    case Some(tree) => typ(tree)
+    case None => Future.successful(Typ.dummyT)
+  }
+
+  def typ(tree: TypeTree, subst: Map[Identifier, Identifier] = Map.empty, strict: Boolean = false): Future[Typ] = {
+    def flexary1(constructor: String, ts: Seq[TypeTree]): Future[Typ] =
+      Future.traverse(ts.toList)(typ(_, subst, strict)).map(_.reduceRight { (t, acc) => Type(constructor, List(t, acc)) })
+
+    tree match {
+      case BooleanType => Future.successful { Type("HOL.bool", Nil) }
+      case IntegerType => Future.successful { Type("Int.int", Nil) }
+      case UnitType => Future.successful { Type("Product_Type.unit", Nil) }
+      case RealType => Future.successful { Type("Real.real", Nil) }
+      case CharType => char
+      case TypeParameter(id) =>
+        val id0 = subst.getOrElse(id, id)
+        Future.successful { TFree(s"'${id0.mangledName}", List("HOL.type")) }
+      case SetType(base) =>
+        typ(base, subst, strict).map { typ => Type("Set.set", List(typ)) }
+      case TupleType(bases) =>
+        flexary1("Product_Type.prod", bases)
+      case FunctionType(args, res) =>
+        for {
+          r <- typ(res, subst, strict)
+          as <- Future.traverse(args)(typ(_, subst, strict))
+        }
+        yield
+          as.foldRight(r) { (t, acc) => Type("fun", List(t, acc)) }
+      case bvt: BitVectorType =>
+        system.invoke(WordLiteral)(bvt.size).assertSuccess(context)
+      case MapType(from, to) =>
+        for {
+          f <- typ(from, subst, strict)
+          t <- typ(to, subst, strict)
+        }
+        yield
+          Type("fun", List(f, Type("Option.option", List(t))))
+      case ct: ClassType =>
+        val root = Types.findRoot(ct)
+        Future.traverse(root.tps.toList) { typ(_, subst, strict) }.flatMap { args =>
+          mappedData.map { _.get(root.classDef) match {
+            case None => s"$theory.${root.id.mangledName}"
+            case Some(datatype) => datatype.typ
+          }}.map { Type(_, args) }
+        }
+      case _ if strict =>
+        context.reporter.fatalError(s"Unsupported type $tree, can't be inferred")
+      case _ =>
+        context.reporter.warning(s"Unknown type $tree, translating to dummy (to be inferred)")
+        Future.successful { Typ.dummyT }
+    }
+  }
+
+  def functionTyp(args: Seq[TypeTree], res: TypeTree): Future[Typ] =
+    typ(res).flatMap { res =>
+      Future.traverse(args.toList)(typ(_)).map(_.foldRight(res) { (t, acc) => Type("fun", List(t, acc)) })
+    }
+
+  private val unmappedData: Future[Map[ClassDef, Datatype]] = {
+    val entries = unmapped.map { case (root, descendants) =>
+      val name = s"$theory.${root.id.mangledName}"
+
+      if (!enableMapping)
+        descendants.foreach { desc =>
+          if (desc.annotations.contains("isabelle.constructor"))
+            context.reporter.warning(s"Ignoring constructor mapping of class definition ${desc.id}, because its root ${root.id} has no type mapping")
+        }
+
+      val constructors = descendants.map { desc => desc ->
+        Constructor(
+          term = Const(s"$name.c${desc.id.mangledName}", Typ.dummyT),
+          disc = Const(s"$name.d${desc.id.mangledName}", Typ.dummyT),
+          sels = desc.fields.map { field => field ->
+            Const(s"$name.s${field.id.mangledName}", Typ.dummyT)
+          }.toMap
+        )
+      }.toMap
+
+      root -> Datatype(name, constructors)
+    }
+
+    val translation = Future.traverse(entries.toList) { case (cls, Datatype(_, constructors)) =>
+      val tparams = cls.tparams.map(_.id)
+      Future.traverse(constructors.toList) { case (subcls, Constructor(_, _, sels)) =>
+        val subst = (subcls.tparams.map(_.id), tparams).zipped.toMap
+        Future.traverse(subcls.fields.toList) { field =>
+          typ(field.getType, subst, true).map { typ =>
+            field.id.mangledName -> typ
+          }
+        }.map {
+          subcls.id.mangledName -> _
+        }
+      }.map {
+        (cls.id.mangledName, tparams.toList.map(_.mangledName), _)
+      }
+    }
+
+    translation.flatMap(system.invoke(Datatypes)).assertSuccess(context).map {
+      case None => entries
+      case Some(msg) => context.reporter.fatalError(s"In datatype definition: $msg")
+    }
+  }
+
+  val data = for { d1 <- mappedData; d2 <- unmappedData } yield d1 ++ d2
+
+}
diff --git a/src/main/scala/leon/solvers/isabelle/package.scala b/src/main/scala/leon/solvers/isabelle/package.scala
new file mode 100644
index 0000000000000000000000000000000000000000..421d64091da4f2c125dd25a2a52c611210e0e7c3
--- /dev/null
+++ b/src/main/scala/leon/solvers/isabelle/package.scala
@@ -0,0 +1,136 @@
+package leon
+package solvers.isabelle
+
+import scala.concurrent._
+import scala.math.BigInt
+import scala.reflect.NameTransformer
+
+import leon.LeonContext
+import leon.purescala.Common.Identifier
+import leon.purescala.Definitions._
+import leon.purescala.Expressions._
+import leon.verification.VC
+
+import edu.tum.cs.isabelle._
+import edu.tum.cs.isabelle.api.ProverResult
+
+object `package` {
+
+  val theory = "Leon_Runtime"
+  val isabelleVersion = "2015"
+
+
+  implicit class FutureResultOps[A](val future: Future[ProverResult[A]]) extends AnyVal {
+    def assertSuccess(context: LeonContext)(implicit ec: ExecutionContext): Future[A] = future map {
+      case ProverResult.Success(a) => a
+      case ProverResult.Failure(err: Operation.ProverException) => context.reporter.internalError(err.fullMessage)
+      case ProverResult.Failure(err) => context.reporter.internalError(err.getMessage)
+    }
+  }
+
+  implicit class ListOptionOps[A](val list: List[Option[A]]) extends AnyVal {
+    def sequence: Option[List[A]] = list match {
+      case Nil => Some(Nil)
+      case None :: _ => None
+      case Some(x) :: xs => xs.sequence.map(x :: _)
+    }
+  }
+
+  implicit class IdentifierOps(val id: Identifier) extends AnyVal {
+    def mangledName: String =
+      NameTransformer.encode(id.name)
+        .replace("_", "'u'")
+        .replace("$", "'d'")
+        .replaceAll("^_*", "")
+        .replaceAll("_*$", "")
+        .replaceAll("^'*", "") + "'" + id.globalId
+  }
+
+  implicit class FunDefOps(val fd: FunDef) extends AnyVal {
+    private def name = fd.id.name
+
+    def statement: Option[Expr] = fd.postcondition match {
+      case Some(post) =>
+        val args = fd.params.map(_.id.toVariable)
+        val appliedPost = Application(post, List(FunctionInvocation(fd.typed, args)))
+        Some(fd.precondition match {
+          case None => appliedPost
+          case Some(pre) => Implies(pre, appliedPost)
+        })
+      case None => None
+    }
+
+    def proofMethod(vc: VC, context: LeonContext): Option[String] =
+      fd.extAnnotations.get("isabelle.proof") match {
+        case Some(List(Some(method: String), Some(kind: String))) =>
+          val kinds = kind.split(',')
+          if (kinds.contains(vc.kind.name) || kind == "")
+            Some(method)
+          else {
+            context.reporter.warning(s"Ignoring proof hint for function definition $name: only applicable for VC kinds ${kinds.mkString(", ")}")
+            None
+          }
+        case Some(List(Some(method: String), None)) =>
+          Some(method)
+        case Some(List(_, _)) =>
+          context.reporter.fatalError(s"In proof hint for function definition $name: expected a string literal as method and (optionally) a string literal as kind")
+        case Some(_) =>
+          context.reporter.internalError(s"Proof hint for function definition $name")
+        case None =>
+          None
+      }
+
+    def isLemma: Boolean =
+      fd.annotations.contains("isabelle.lemma")
+
+    def checkLemma(program: Program, context: LeonContext): Option[(FunDef, List[FunDef])] = {
+      val name = fd.qualifiedName(program)
+
+      def error(msg: String) =
+        context.reporter.fatalError(s"In lemma declaration for function definition $name: $msg")
+
+      fd.extAnnotations.get("isabelle.lemma") match {
+        case Some(List(Some(abouts: String))) =>
+          val refs = abouts.split(',').toList.map { about =>
+            program.lookupAll(about) match {
+              case List(fd: FunDef) if !fd.isLemma => fd
+              case List(_: FunDef) => error(s"lemmas can only be about plain functions, but $about is itself a lemma")
+              case Nil | List(_) => error(s"$about is not a function definition")
+              case _ => error(s"$about is ambiguous")
+            }
+          }
+          Some(fd -> refs)
+        case Some(List(_)) => error("expected a string literal for about")
+        case Some(_) => context.reporter.internalError(s"Lemma declaration for function definition $name")
+        case None => None
+      }
+    }
+  }
+
+  def canonicalizeOutput(str: String) =
+    // FIXME expose this via libisabelle
+    // FIXME use this everywhere
+    isabelle.Symbol.decode("""\s+""".r.replaceAllIn(str, " "))
+
+  val Prove = Operation.implicitly[(List[Term], Option[String]), Option[String]]("prove")
+  val Pretty = Operation.implicitly[Term, String]("pretty")
+  val Load = Operation.implicitly[(String, String, List[(String, String)]), List[String]]("load")
+  val Report = Operation.implicitly[Unit, List[(String, String)]]("report")
+  val Dump = Operation.implicitly[Unit, List[(String, String)]]("dump")
+  val Oracle = Operation.implicitly[Unit, Unit]("oracle")
+  val Fresh = Operation.implicitly[String, String]("fresh")
+  val NumeralLiteral = Operation.implicitly[(BigInt, Typ), Term]("numeral_literal")
+  val Cases = Operation.implicitly[((Term, List[(String, Typ)]), (Typ, List[(Term, Term)])), Term]("cases")
+  val SerialNat = Operation.implicitly[Unit, Term]("serial_nat")
+  val MapLiteral = Operation.implicitly[(List[(Term, Term)], (Typ, Typ)), Term]("map_literal")
+  val Functions = Operation.implicitly[(List[(String, List[(String, Typ)], (Term, Typ))]), Option[String]]("functions")
+  val Declare = Operation.implicitly[List[String], Unit]("declare")
+  val Equivalences = Operation.implicitly[List[(String, String)], List[String]]("equivalences")
+  val AssumeEquivalences = Operation.implicitly[List[(String, String)], Unit]("assume_equivalences")
+  val Lemmas = Operation.implicitly[List[(String, Term)], List[String]]("lemmas")
+  val AssumeLemmas = Operation.implicitly[List[(String, Term)], Unit]("assume_lemmas")
+  val WordLiteral = Operation.implicitly[BigInt, Typ]("word_literal")
+  val Datatypes = Operation.implicitly[List[(String, List[String], List[(String, List[(String, Typ)])])], Option[String]]("datatypes")
+  val LookupDatatype = Operation.implicitly[(String, List[(String, String)]), Either[String, List[(String, (Term, Term, List[Term]))]]]("lookup_datatype")
+
+}
diff --git a/src/main/scala/leon/utils/DebugSections.scala b/src/main/scala/leon/utils/DebugSections.scala
index 9ccabc241de2e1a03dfa49246b944406a550a49f..c18881e47f23fd9c5503320888158cc38f68b10d 100644
--- a/src/main/scala/leon/utils/DebugSections.scala
+++ b/src/main/scala/leon/utils/DebugSections.scala
@@ -22,6 +22,7 @@ case object DebugSectionRepair       extends DebugSection("repair",       1 << 1
 case object DebugSectionLeon         extends DebugSection("leon",         1 << 11)
 case object DebugSectionXLang        extends DebugSection("xlang",        1 << 12)
 case object DebugSectionTypes        extends DebugSection("types",        1 << 13)
+case object DebugSectionIsabelle     extends DebugSection("isabelle",     1 << 14)
 
 object DebugSections {
   val all = Set[DebugSection](
@@ -38,6 +39,7 @@ object DebugSections {
     DebugSectionRepair,
     DebugSectionLeon,
     DebugSectionXLang,
-    DebugSectionTypes
+    DebugSectionTypes,
+    DebugSectionIsabelle
   )
 }
diff --git a/src/main/scala/leon/utils/Library.scala b/src/main/scala/leon/utils/Library.scala
index c38f3b4cd23934588678562a86e80baeffd3fe74..ef386d5de05acb264ad9e5db698e0e626cf63ad0 100644
--- a/src/main/scala/leon/utils/Library.scala
+++ b/src/main/scala/leon/utils/Library.scala
@@ -18,6 +18,8 @@ case class Library(pgm: Program) {
 
   lazy val String = lookup("leon.lang.string.String").collectFirst { case ccd : CaseClassDef => ccd }
 
+  lazy val Dummy  = lookup("leon.lang.Dummy").collectFirst { case ccd : CaseClassDef => ccd }
+
   lazy val setToList = lookup("leon.collection.setToList").collectFirst { case fd : FunDef => fd }
 
   def lookup(name: String): Seq[Definition] = {
diff --git a/src/main/scala/leon/utils/PreprocessingPhase.scala b/src/main/scala/leon/utils/PreprocessingPhase.scala
index 9bcd317d33a600f968c6b9c9cec00ec742b0d84e..fb08be9ce3bc4426997c64ae00f2a1b77fcb727a 100644
--- a/src/main/scala/leon/utils/PreprocessingPhase.scala
+++ b/src/main/scala/leon/utils/PreprocessingPhase.scala
@@ -3,12 +3,12 @@
 package leon
 package utils
 
-import leon.xlang.XLangDesugaringPhase
-import purescala.Definitions.Program
-
 import leon.purescala._
-import synthesis.{ConvertWithOracle, ConvertHoles}
-import verification.InjectAsserts
+import leon.purescala.Definitions.Program
+import leon.solvers.isabelle.AdaptationPhase
+import leon.synthesis.{ConvertWithOracle, ConvertHoles}
+import leon.verification.InjectAsserts
+import leon.xlang.XLangDesugaringPhase
 
 class PreprocessingPhase(private val desugarXLang: Boolean = false) extends TransformationPhase {
 
@@ -45,8 +45,9 @@ class PreprocessingPhase(private val desugarXLang: Boolean = false) extends Tran
 
     val phases =
       pipeBegin andThen
-      pipeX     andThen
+      pipeX andThen
       new FunctionClosure andThen
+      AdaptationPhase andThen
       debugTrees("Program after pre-processing")
 
     phases.run(ctx)(p)
diff --git a/testcases/verification/isabelle/Functions.scala b/testcases/verification/isabelle/Functions.scala
new file mode 100644
index 0000000000000000000000000000000000000000..bb7503d724042e69b6c7db1a3facb1c957cfbdcc
--- /dev/null
+++ b/testcases/verification/isabelle/Functions.scala
@@ -0,0 +1,19 @@
+import leon._
+import leon.annotation._
+import leon.collection._
+import leon.lang._
+
+object Functions {
+
+  def zero: BigInt = 0
+
+  def length[A](xs: List[A]): BigInt = {
+    val ys = xs
+    val zs = ys
+    ys match {
+      case Nil() => zero
+      case Cons(_, xs) => BigInt(1) + length(xs)
+    }
+  } ensuring (_ >= zero)
+
+}
diff --git a/testcases/verification/isabelle/Simple.scala b/testcases/verification/isabelle/Simple.scala
new file mode 100644
index 0000000000000000000000000000000000000000..802f9530ce46c0a472c0169c0a21ceb9f3378909
--- /dev/null
+++ b/testcases/verification/isabelle/Simple.scala
@@ -0,0 +1,18 @@
+import leon.annotation._
+import leon.lang._
+
+object Simple {
+
+  val truth: Boolean = true holds
+  val contradiction: Boolean = false holds
+
+  val equality = { val x = (); val y = (); x == y } holds
+
+  def ints(x: BigInt, y: BigInt) = {
+    require { x > 0 && y > 0 }
+    x + y
+  } ensuring { _ > 0 }
+
+  val chars = { val x = 'a'; val y = 'a'; x == y } holds
+
+}
diff --git a/unmanaged/isabelle/protocol b/unmanaged/isabelle/protocol
new file mode 160000
index 0000000000000000000000000000000000000000..c33db5fef8af6548c494705e55e6b56e976a10a9
--- /dev/null
+++ b/unmanaged/isabelle/protocol
@@ -0,0 +1 @@
+Subproject commit c33db5fef8af6548c494705e55e6b56e976a10a9