Skip to content
Snippets Groups Projects
Commit fe46ea4a authored by Katja Goltsova's avatar Katja Goltsova Committed by Viktor Kunčak
Browse files

Move sequent parsing into the scallion parser

Add tests for sequent parsing.
parent ccd5990a
Branches
No related tags found
4 merge requests!62Easy tactics,!58Easy tactics,!54Front integration,!53Front integration
This commit is part of merge request !53. Comments created here will be created in the context of that merge request.
......@@ -12,25 +12,22 @@ object Parser {
class ParserException(msg: String) extends Exception(msg)
object UnreachableException extends ParserException("Internal error: expected unreachable")
def parseSequent(s: String): Sequent = s.split("⊢") match {
case Array(fs) => Sequent(Set(), fs.split(";").map(parseFormula).toSet)
case Array(fsl, fsr) => Sequent(fsl.split(";").map(parseFormula).toSet, fsr.split(";").map(parseFormula).toSet)
case _ => throw ParserException("Invalid sequent")
}
private def extractParseResult[T](r: FormulaParser.ParseResult[T]): T = r match {
case FormulaParser.Parsed(value, _) => value
// TODO: at position
// TODO: leaking implementation? Should I sugar tokens?
case FormulaParser.UnexpectedToken(token, _) => throw ParserException(s"Unexpected input: $token")
case FormulaParser.UnexpectedEnd(_) => throw ParserException(s"Unexpected end of input")
}
def parseSequent(s: String): Sequent =
extractParseResult(SequentParser.parseSequent(SequentLexer(s.iterator)))
def parseFormula(s: String): Formula =
extractParseResult(FormulaParser(FormulaLexer(s.iterator)))
extractParseResult(SequentParser.parseFormula(SequentLexer(s.iterator)))
def parseTerm(s: String): Term =
extractParseResult(FormulaParser.parseTerm(FormulaLexer(s.iterator)))
extractParseResult(SequentParser.parseTerm(SequentLexer(s.iterator)))
private def extractParseResult[T](r: SequentParser.ParseResult[T]): T = r match {
case SequentParser.Parsed(value, _) => value
// TODO: at position
// TODO: leaking implementation? Should I sugar tokens? -- Lexer.unapply once it's implemented
case SequentParser.UnexpectedToken(token, _) => throw ParserException(s"Unexpected input: $token")
case SequentParser.UnexpectedEnd(_) => throw ParserException(s"Unexpected end of input")
}
private[Parser] sealed abstract class FormulaToken
case object ForallToken extends FormulaToken
......@@ -49,9 +46,11 @@ object Parser {
case class SchematicToken(id: String) extends FormulaToken
case class ParenthesisToken(isOpen: Boolean) extends FormulaToken
case object CommaToken extends FormulaToken
case object SemicolonToken extends FormulaToken
case object SequentToken extends FormulaToken
case object SpaceToken extends FormulaToken
private[Parser] object FormulaLexer extends Lexers with CharLexers {
private[Parser] object SequentLexer extends Lexers with CharLexers {
type Token = FormulaToken
// TODO: add positions ==> ranges to tokens
type Position = Unit
......@@ -70,6 +69,8 @@ object Parser {
elem('(') |> ParenthesisToken(true),
elem(')') |> ParenthesisToken(false),
elem(',') |> CommaToken,
elem(';') |> SemicolonToken,
elem('⊢') | word("|-") |> SequentToken,
many1(whiteSpace) |> SpaceToken,
elem('?') ~ many1(elem(_.isLetter) | elem('_') | elem(_.isDigit) | elem('\'')) |> { cs =>
// drop the '?'
......@@ -86,7 +87,7 @@ object Parser {
}
}
private[Parser] object FormulaParser extends Parsers {
private[Parser] object SequentParser extends Parsers {
sealed abstract class TokenKind
// and, or are both (left) associative and bind tighter than implies, iff
case object AndKind extends TokenKind
......@@ -101,6 +102,8 @@ object Parser {
case class ParenthesisKind(isOpen: Boolean) extends TokenKind
case object BinderKind extends TokenKind
case object DotKind extends TokenKind
case object SemicolonKind extends TokenKind
case object SequentSymbolKind extends TokenKind
case object OtherKind extends TokenKind
type Token = lisa.utils.Parser.FormulaToken
......@@ -120,6 +123,8 @@ object Parser {
case ParenthesisToken(isOpen) => ParenthesisKind(isOpen)
case ExistsToken | ExistsOneToken | ForallToken => BinderKind
case DotToken => DotKind
case SemicolonToken => SemicolonKind
case SequentToken => SequentSymbolKind
case SpaceToken => OtherKind
}
......@@ -134,6 +139,10 @@ object Parser {
val comma: Syntax[Unit] = elem(CommaKind).unit(CommaToken)
val eq: Syntax[Unit] = elem(EqualityKind).unit(EqualityToken)
val semicolon: Syntax[Unit] = elem(SemicolonKind).unit(SemicolonToken)
val sequentSymbol: Syntax[Unit] = elem(SequentSymbolKind).unit(SequentToken)
///////////////////////////////////////////////////////////////////
//////////////////////// LABELS ///////////////////////////////////
......@@ -237,14 +246,22 @@ object Parser {
}
}
///////////////////////////////////////////////////////////////////
val sequent: Syntax[Sequent] = repsep(formula, semicolon) ~ opt(sequentSymbol.skip ~ repsep(formula, semicolon)) map {
case left ~ Some(right) => Sequent(left.toSet, right.toSet)
case right ~ None => Sequent(Set(), right.toSet)
}
val parser: Parser[Sequent] = Parser(sequent)
val formulaParser: SequentParser.Parser[FOL.Formula] = Parser(formula)
val parser: FormulaParser.Parser[FOL.Formula] = Parser(formula)
val termParser: SequentParser.Parser[FOL.Term] = Parser(term)
val termParser: FormulaParser.Parser[FOL.Term] = Parser(term)
def apply(it: Iterator[Token]): ParseResult[Sequent] = parseSequent(it)
def apply(it: Iterator[Token]): ParseResult[Formula] = parseFormula(it)
def parseSequent(it: Iterator[Token]): ParseResult[Sequent] = parser(it)
def parseFormula(it: Iterator[Token]): ParseResult[Formula] = parser(it)
def parseFormula(it: Iterator[Token]): ParseResult[Formula] = formulaParser(it)
def parseTerm(it: Iterator[Token]): ParseResult[Term] = termParser(it)
}
......
package lisa.utils
import lisa.kernel.fol.FOL.*
import lisa.kernel.fol.FOL._
import lisa.kernel.proof.SequentCalculus.Sequent
import lisa.utils.Helpers.*
import org.scalatest.funsuite.AnyFunSuite
class ParserTest extends AnyFunSuite {
val (a, b, c) = (ConstantPredicateLabel("a", 0), ConstantPredicateLabel("b", 0), ConstantPredicateLabel("c", 0))
val (x, y, z) = (VariableLabel("x"), VariableLabel("y"), VariableLabel("z"))
val (x1, y1, z1) = (VariableLabel("x1"), VariableLabel("y1"), VariableLabel("z1"))
val (xPrime, yPrime, zPrime) = (VariableLabel("x'"), VariableLabel("y'"), VariableLabel("z'"))
val (cx, cy, cz) = (ConstantFunctionLabel("x", 0), ConstantFunctionLabel("y", 0), ConstantFunctionLabel("z", 0))
val (f0, f1, f2, f3) = (ConstantFunctionLabel("f", 0), ConstantFunctionLabel("f", 1), ConstantFunctionLabel("f", 2), ConstantFunctionLabel("f", 3))
val (sf1, sf2, sf3) = (SchematicFunctionLabel("f", 1), SchematicFunctionLabel("f", 2), SchematicFunctionLabel("f", 3))
val (sPhi1, sPhi2) = (SchematicNPredicateLabel("phi", 1), SchematicNPredicateLabel("phi", 2))
given Conversion[PredicateLabel, PredicateFormula] = PredicateFormula(_, Seq.empty)
......@@ -178,15 +183,43 @@ class ParserTest extends AnyFunSuite {
}
test("sequent") {
fail("not implemented")
val forallEq = BinderFormula(Forall, x, PredicateFormula(equality, Seq(x, x)))
assert(Parser.parseSequent("∀x. ?x = ?x") == Sequent(Set(), Set(forallEq)))
assert(Parser.parseSequent("⊢ ∀x. ?x = ?x") == Sequent(Set(), Set(forallEq)))
assert(Parser.parseSequent("∀x. ?x = ?x ⊢ ∀x. ?x = ?x") == Sequent(Set(forallEq), Set(forallEq)))
val existsXEq = BinderFormula(Exists, x, PredicateFormula(equality, Seq(x, x)))
assert(Parser.parseSequent("∀x. ?x = ?x ⊢ ∃x. ?x = ?x") == Sequent(Set(forallEq), Set(existsXEq)))
val existsYEq = BinderFormula(Exists, y, PredicateFormula(equality, Seq(y, y)))
assert(Parser.parseSequent("∀x. ?x = ?x ⊢ ∃x. ?x = ?x; ∃y. ?y = ?y") == Sequent(Set(forallEq), Set(existsYEq, existsXEq)))
assert(
Parser.parseSequent("p ; ∀x. ?x = ?x ⊢ ∃x. ?x = ?x; ∃y. ?y = ?y") ==
Sequent(Set(forallEq, PredicateFormula(ConstantPredicateLabel("p", 0), Seq())), Set(existsYEq, existsXEq))
)
}
test("sequents from Mapping proof") {
fail("not implemented")
}
test("sequents from Mapping and SetTheory") {
val va = VariableLabel("a")
val leftAndRight = BinderFormula(ExistsOne, x, PredicateFormula(sPhi2, Seq(x, va)))
assert(Parser.parseSequent("∃!x. ?phi(?x, ?a) ⊢ ∃!x. ?phi(?x, ?a)") == Sequent(Set(leftAndRight), Set(leftAndRight)))
test("sequents from Peano proof") {
fail("not implemented")
assert(
Parser.parseSequent("∀x. (?x = ?x1) ↔ ?phi(?x) ⊢ (?z = ?f(?x1)) ⇒ (∃x. (?z = ?f(?x)) ∧ ?phi(?x))") == Sequent(
Set(BinderFormula(Forall, x, ConnectorFormula(Iff, Seq(x === x1, sPhi1(x))))),
Set((z === sf1(x1)) ==> exists(x, (z === sf1(x)) /\ sPhi1(x)))
)
)
assert(
Parser.parseSequent("∃x1. ∀x. (?x = ?x1) ↔ ?phi(?x) ⊢ ∃z1. ∀z. (?z = ?z1) ↔ (∃x. (?z = ?f(?x)) ∧ ?phi(?x))") == (exists(x1, forall(x, (x === x1) <=> (sPhi1(x)))) |- exists(
z1,
forall(z, (z === z1) <=> exists(x, (z === sf1(x)) /\ sPhi1(x)))
))
)
assert(Parser.parseSequent("⊢ (?x = ?x) ∨ (?x = ?y)") == (() |- (x === x) \/ (x === y)))
assert(
Parser.parseSequent("(?x = ?x) ∨ (?x = ?y); (?x = ?x) ∨ (?x = ?y) ↔ (?x = ?x') ∨ (?x = ?y') ⊢ (?x = ?x') ∨ (?x = ?y')") == (Set(
(x === x) \/ (x === y),
((x === x) \/ (x === y)) <=> ((x === xPrime) \/ (x === yPrime))
) |- (x === xPrime) \/ (x === yPrime))
)
}
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment