diff --git a/lisa-utils/src/main/scala/lisa/utils/Parser.scala b/lisa-utils/src/main/scala/lisa/utils/Parser.scala index 279590a9b4698ec8fcb67ddbaeab8595f56e0fc1..c0524e543fb02e98822852ab71a50a640dafe487 100644 --- a/lisa-utils/src/main/scala/lisa/utils/Parser.scala +++ b/lisa-utils/src/main/scala/lisa/utils/Parser.scala @@ -15,6 +15,8 @@ object Parser { object UnreachableException extends ParserException("Internal error: expected unreachable") class PrintFailedException(inp: Sequent | Formula | Term) extends ParserException(s"Printing of $inp failed unexpectedly") + def isInfixPredicate(id: String): Boolean = Set("=", "∊", "⊂", "⊆").contains(id) + /** * Parses a sequent from a string. A sequent consists of the left and right side, separated by `⊢` or `|-`. * Left and right sides consist of formulas, separated by `;`. @@ -131,7 +133,7 @@ object Parser { case object FalseToken extends FormulaToken("⊥") - case object EqualityToken extends FormulaToken(equality.id) + case class InfixPredicateToken(id: String) extends FormulaToken(id) // Constant functions and predicates case class ConstantToken(id: String) extends FormulaToken(id) @@ -153,15 +155,23 @@ object Parser { // TODO: add positions ==> ranges to tokens type Position = Unit - private val allowedIdentifierCharacters = elem(_.isLetter) | elem(_.isDigit) | oneOf("_\\@#$%^&*><:|_+-=") + val escapeChar = '`' + val pathSeparator = '$' private val schematicSymbol = "'" + private val letter = elem(_.isLetter) + private val variableLike = letter ~ many(elem(c => c.isLetterOrDigit || c == '_')) + private val number = many1(elem(_.isDigit)) + private val escaped = elem(escapeChar) ~ many1(elem(_ != escapeChar)) ~ elem(escapeChar) + private val arbitrarySymbol = elem(!_.isWhitespace) + private val symbolSequence = many1(oneOf("*/+-^:<>#%&@")) + private val path = many1(many1(letter) ~ elem(pathSeparator)) + private val lexer = Lexer( elem('∀') |> { _ => ForallToken }, word("∃!") |> { _ => ExistsOneToken }, elem('∃') |> { _ => ExistsToken }, elem('.') |> { _ => DotToken }, - elem('=') |> { _ => EqualityToken }, elem('∧') | word("/\\") |> { _ => AndToken }, elem('∨') | word("\\/") |> { _ => OrToken }, word(Implies.id) | word("=>") | word("==>") | elem('→') |> { _ => ImpliesToken }, @@ -175,18 +185,25 @@ object Parser { elem(';') |> SemicolonToken, elem('⊢') | word("|-") |> SequentToken, many1(whiteSpace) |> SpaceToken, - word(schematicSymbol) ~ many1(allowedIdentifierCharacters) |> { cs => + word(schematicSymbol) ~ variableLike |> { cs => // drop the ' SchematicToken(cs.drop(1).mkString) }, - many1(allowedIdentifierCharacters) |> { cs => ConstantToken(cs.mkString) } + // Currently the path is merged into the id on the lexer level. When qualified ids are supported, this should be + // lifted into the parser. + opt(path) ~ (variableLike | number | arbitrarySymbol | symbolSequence | escaped) |> { cs => ConstantToken(cs.filter(_ != escapeChar).mkString) } ) onError { (cs, _) => throw ParserException(s"Unexpected input: ${cs.mkString}") } def apply(it: Iterator[Char]): Iterator[Token] = { val source = Source.fromIterator(it, NoPositioner) - lexer(source).filter(_ != SpaceToken) + lexer(source) + .filter(_ != SpaceToken) + .map({ + case ConstantToken(id) if isInfixPredicate(id) => InfixPredicateToken(id) + case t => t + }) } def unapply(tokens: Iterator[Token]): String = { @@ -202,7 +219,7 @@ object Parser { // space after: separators case DotToken | CommaToken | SemicolonToken => l :+ t.toString :+ space // space before and after: equality, connectors, sequent symbol - case EqualityToken | AndToken | OrToken | ImpliesToken | IffToken | SequentToken => l :+ space :+ t.toString :+ space + case InfixPredicateToken(_) | AndToken | OrToken | ImpliesToken | IffToken | SequentToken => l :+ space :+ t.toString :+ space } } .mkString @@ -219,7 +236,7 @@ object Parser { case object NegationKind extends TokenKind case object BooleanConstantKind extends TokenKind case object FunctionOrPredicateKind extends TokenKind - case object EqualityKind extends TokenKind + case object InfixPredicateKind extends TokenKind case object CommaKind extends TokenKind case class ParenthesisKind(isOpen: Boolean) extends TokenKind case object BinderKind extends TokenKind @@ -242,7 +259,7 @@ object Parser { case NegationToken => NegationKind case TrueToken | FalseToken => BooleanConstantKind case _: ConstantToken | _: SchematicToken => FunctionOrPredicateKind - case EqualityToken => EqualityKind + case _: InfixPredicateToken => InfixPredicateKind case CommaToken => CommaKind case ParenthesisToken(isOpen) => ParenthesisKind(isOpen) case ExistsToken | ExistsOneToken | ForallToken => BinderKind @@ -262,7 +279,17 @@ object Parser { val comma: Syntax[Unit] = elem(CommaKind).unit(CommaToken) - val eq: Syntax[Unit] = elem(EqualityKind).unit(EqualityToken) + val INFIX_ARITY = 2 + val infixPredicateLabel: Syntax[ConstantPredicateLabel] = accept(InfixPredicateKind)( + { + case InfixPredicateToken(id) => ConstantPredicateLabel(id, INFIX_ARITY) + case _ => throw UnreachableException + }, + { + case ConstantPredicateLabel(id, INFIX_ARITY) if isInfixPredicate(id) => Seq(InfixPredicateToken(id)) + case _ => throw UnreachableException + } + ) val semicolon: Syntax[Unit] = elem(SemicolonKind).unit(SemicolonToken) @@ -342,7 +369,7 @@ object Parser { } ) - val predicate: Syntax[PredicateFormula] = (elem(FunctionOrPredicateKind) ~ opt(args) ~ opt(eq.skip ~ elem(FunctionOrPredicateKind) ~ opt(args))).map( + val predicate: Syntax[PredicateFormula] = (elem(FunctionOrPredicateKind) ~ opt(args) ~ opt(infixPredicateLabel ~ term)).map( { // predicate application case ConstantToken(id) ~ maybeArgs ~ None => @@ -353,27 +380,21 @@ object Parser { PredicateFormula(VariableFormulaLabel(id), Seq()) // equality of two function applications - case fun1 ~ args1 ~ Some(fun2 ~ args2) => - PredicateFormula(FOL.equality, Seq(createTerm(fun1, args1.getOrElse(Seq())), createTerm(fun2, args2.getOrElse(Seq())))) + case fun1 ~ args1 ~ Some(pred ~ term2) => + PredicateFormula(pred, Seq(createTerm(fun1, args1.getOrElse(Seq())), term2)) case _ => throw UnreachableException }, - { case PredicateFormula(label, args) => - label match { - case FOL.equality => - args match { - case Seq(first, second) => Seq(invertTerm(first) ~ Some(invertTerm(second))) - case _ => Seq() - } - case other => - val predicateApp = other match { - case ConstantPredicateLabel(id, 0) => ConstantToken(id) ~ None - case ConstantPredicateLabel(id, _) => ConstantToken(id) ~ Some(args) - case VariableFormulaLabel(id) => SchematicToken(id) ~ None - case SchematicPredicateLabel(id, _) => SchematicToken(id) ~ Some(args) - } - Seq(predicateApp ~ None) - } + { + case PredicateFormula(label @ ConstantPredicateLabel(id, INFIX_ARITY), Seq(first, second)) if isInfixPredicate(id) => Seq(invertTerm(first) ~ Some(label ~ second)) + case PredicateFormula(label, args) => + val prefixApp = label match { + case VariableFormulaLabel(id) => SchematicToken(id) ~ None + case SchematicPredicateLabel(id, _) => SchematicToken(id) ~ Some(args) + case ConstantPredicateLabel(id, 0) => ConstantToken(id) ~ None + case ConstantPredicateLabel(id, _) => ConstantToken(id) ~ Some(args) + } + Seq(prefixApp ~ None) } ) diff --git a/lisa-utils/src/test/scala/lisa/utils/ParserTest.scala b/lisa-utils/src/test/scala/lisa/utils/ParserTest.scala index c4373935eadb555a125360fd8055aed5c619a441..a8e456080fb1df29db57d9744bd4ed44e03720d4 100644 --- a/lisa-utils/src/test/scala/lisa/utils/ParserTest.scala +++ b/lisa-utils/src/test/scala/lisa/utils/ParserTest.scala @@ -219,4 +219,13 @@ class ParserTest extends AnyFunSuite with TestUtils { ) |- (x === x1) \/ (x === y1)) ) } + + test("infix predicates") { + val in = ConstantPredicateLabel("∊", 2) + assert(Parser.parseFormula("x∊y") == PredicateFormula(in, Seq(cx, cy))) + assert(Parser.parseFormula("x ∊ y") == PredicateFormula(in, Seq(cx, cy))) + assert(Parser.parseFormula("'x ∊ 'y") == PredicateFormula(in, Seq(x, y))) + assert(Parser.parseFormula("('x ∊ 'y) /\\ a") == ConnectorFormula(And, Seq(PredicateFormula(in, Seq(x, y)), a))) + assert(Parser.parseFormula("a \\/ ('x ∊ 'y)") == ConnectorFormula(Or, Seq(a, PredicateFormula(in, Seq(x, y))))) + } } diff --git a/lisa-utils/src/test/scala/lisa/utils/PrinterTest.scala b/lisa-utils/src/test/scala/lisa/utils/PrinterTest.scala index e7c2da63f0d76e90144530547ad3ae85bdbfff0e..a43739e96818e9fa73ee104e6aefa8b559071ed5 100644 --- a/lisa-utils/src/test/scala/lisa/utils/PrinterTest.scala +++ b/lisa-utils/src/test/scala/lisa/utils/PrinterTest.scala @@ -257,4 +257,12 @@ class PrinterTest extends AnyFunSuite with TestUtils { ) == "'x = 'x ∨ 'x = 'y; 'x = 'x ∨ 'x = 'y ⇔ 'x = 'x' ∨ 'x = 'y' ⊢ 'x = 'x' ∨ 'x = 'y'" ) } + + test("infix predicates") { + val in = ConstantPredicateLabel("∊", 2) + assert(Parser.printFormula(PredicateFormula(in, Seq(cx, cy))) == "x ∊ y") + assert(Parser.printFormula(PredicateFormula(in, Seq(x, y))) == "'x ∊ 'y") + assert(Parser.printFormula(ConnectorFormula(And, Seq(PredicateFormula(in, Seq(x, y)), a))) == "'x ∊ 'y ∧ a") + assert(Parser.printFormula(ConnectorFormula(Or, Seq(a, PredicateFormula(in, Seq(x, y))))) == "a ∨ 'x ∊ 'y") + } }