diff --git a/.github/workflows/actions.yml b/.github/workflows/actions.yml
new file mode 100644
index 00000000..bf494f6d
--- /dev/null
+++ b/.github/workflows/actions.yml
@@ -0,0 +1,74 @@
+name: ci
+
+on:
+ push:
+ branches:
+ - master
+ tags:
+ - '*'
+ pull_request:
+ branches:
+ - master
+
+jobs:
+ test-js:
+ runs-on: ubuntu-22.04
+ steps:
+ - uses: actions/checkout@v3
+ - uses: coursier/cache-action@v6
+ - name: Run tests
+ run: ./mill -i __.js.__.test
+ test-native:
+ runs-on: ubuntu-22.04
+ steps:
+ - uses: actions/checkout@v3
+ - uses: coursier/cache-action@v6
+ - name: Run tests
+ run: ./mill -i __.native.__.test
+ test-jvm:
+ runs-on: ubuntu-22.04
+ steps:
+ - uses: actions/checkout@v3
+ - uses: coursier/cache-action@v6
+ - name: Run tests
+ run: ./mill -i __.jvm.__.test
+ check-binary-compatibility:
+ runs-on: ubuntu-22.04
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ fetch-depth: 0
+ - uses: coursier/cache-action@v6
+ - name: Check Binary Compatibility
+ run: ./mill -i __.mimaReportBinaryIssues
+
+ publish-sonatype:
+ if: github.repository == 'com-lihaoyi/fastparse' && contains(github.ref, 'refs/tags/')
+ needs:
+ - test-jvm
+ - test-js
+ - test-native
+ runs-on: ubuntu-latest
+ env:
+ MILL_SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }}
+ MILL_SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
+ MILL_PGP_SECRET_BASE64: ${{ secrets.SONATYPE_PGP_PRIVATE_KEY }}
+ MILL_PGP_PASSPHRASE: ${{ secrets.SONATYPE_PGP_PRIVATE_KEY_PASSWORD }}
+ LANG: "en_US.UTF-8"
+ LC_MESSAGES: "en_US.UTF-8"
+ LC_ALL: "en_US.UTF-8"
+
+ steps:
+ - uses: actions/checkout@v3
+ - name: Publish to Maven Central
+ run: ./mill -i mill.scalalib.SonatypeCentralPublishModule/
+
+ - name: Create GitHub Release
+ id: create_gh_release
+ uses: actions/create-release@v1.1.4
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # This token is provided by Actions, you do not need to create your own token
+ with:
+ tag_name: ${{ github.ref }}
+ release_name: ${{ github.ref }}
+ draft: false
diff --git a/.gitignore b/.gitignore
index 2daa223a..41fccdaf 100644
--- a/.gitignore
+++ b/.gitignore
@@ -4,3 +4,9 @@ out/
.DS_STORE
*.iml
.idea
+.coursier
+.bloop/
+.metals/
+project/metals.sbt
+.vscode/
+.bsp/
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index dc27a0c1..00000000
--- a/.travis.yml
+++ /dev/null
@@ -1,37 +0,0 @@
-sudo: false
-language: scala
-stages:
- - name: test
- - name: release
- if: (branch = master AND type = push) OR (tag IS present)
-jobs:
- include:
- - script: "./mill __.jvm[2.12.7].test"
- jdk: oraclejdk9
-
- - script: "./mill __.jvm[2.13.0].test"
- jdk: openjdk10
-
- - script: "./mill __.js[2.12.7].test"
- jdk: openjdk10
-
- - script: "./mill __.js[2.13.0].test"
- jdk: openjdk10
-
- - script: "./mill all demo.fastOpt perftests.__.compile"
- jdk: openjdk10
-
- - script: "./mill demo.fullOpt && sbt readme/run"
- jdk: oraclejdk9
-
- - stage: release
- script:
- - echo "$PGP_SECRET" | base64 --decode | gpg --import
- - ./mill __[2.12.7].publish --sonatypeCreds "$SONATYPE_USERNAME:$SONATYPE_PASSWORD" --release true --gpgPassphrase "$PGP_PASSPHRASE"
-
- - stage: release
- script:
- - echo "$PGP_SECRET" | base64 --decode | gpg --import
- - ./mill __[2.13.0].publish --sonatypeCreds "$SONATYPE_USERNAME:$SONATYPE_PASSWORD" --release true --gpgPassphrase "$PGP_PASSPHRASE"
-
-
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 00000000..96fe02f7
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2014 Li Haoyi (haoyi.sg@gmail.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/README.md b/README.md
index 4b75e0ca..17d02776 100644
--- a/README.md
+++ b/README.md
@@ -4,13 +4,27 @@ FastParse [
+- [Documentation](https://com-lihaoyi.github.io/fastparse)
+
+If you use FastParse and like it, you will probably enjoy the following book by the Author:
+
+- [*Hands-on Scala Programming*](https://www.handsonscala.com/)
+
+*Hands-on Scala* has uses FastParse extensively throughout the book, with
+the entirety of *Chapter 19: Parsing Structured Text* dedicated to
+the library and *Chapter 20: Implementing a Programming Language* making heavy
+use of it. *Hands-on Scala* is a great way to level up your skills in Scala
+in general and FastParse in particular.
For a good hands-on tutorial working through the basics of how to use this
library, check out the following blog post:
- [Easy Parsing with Parser Combinators](http://www.lihaoyi.com/post/EasyParsingwithParserCombinators.html)
+The following post gives a good overview of the design of FastParse:
+
+- [Fastparse 2: Even Faster Scala Parser Combinators](https://www.lihaoyi.com/post/Fastparse2EvenFasterScalaParserCombinators.html)
+
This readme contains some developer docs, if you intend on working on the
fastparse repo, not just using it as a library.
@@ -36,18 +50,20 @@ compiled to Scala.js. These all live in `demo/`.
Common Commands
---------------
-- `mill -w fastparse.jvm[2.12.7].test` runs the main testsuite. If you're
+Note: you should use mill 0.11 or later.
+
+- `mill -w "fastparse.jvm[2.12.10].test"` runs the main testsuite. If you're
hacking on FastParse, this is often where you want to go
- You can run the other suites via `fastparse.js`, `scalaparse.jvm`, etc. if you
wish, but I typically don't and leave that to CI unless I'm actively working
on the sub-project
-- You can use `mill -w fastparse.jvm[_].test` to run it under different Scala
+- You can use `mill -w "fastparse.jvm[_].test"` to run it under different Scala
versions, but again I usually don't bother
- `mill __.test.test` is the aggregate test-all command, but is pretty slow. You
- can use `mill __.jvm[2.12.7].test` to run all tests only under JVM/Scala-2.12,
+ can use `mill "__.jvm[2.12.17].test"` to run all tests only under JVM/Scala-2.12,
which is much faster and catches most issues
- `mill demo.fullOpt && sbt readme/run` builds the documentation site, which can
@@ -64,7 +80,7 @@ Contribution Guidelines
- **It's entirely possible your changes won't be merged**, or will get ripped out later. This is also the case for my changes, as the Author!
- **Even a rejected/reverted PR is valuable**! It helps explore the solution space, and know what works and what doesn't. For every line in the repo, at least three lines were tried, committed, and reverted/refactored, and more than 10 were tried without committing.
- **Feel free to send Proof-Of-Concept PRs** that you don't intend to get merged.
-- **No binary or source compatibility is guaranteed between any releases**. FastParse is still in the 0.x.y phase of development, which means it's still under rapid development and things do change. On the other hand, upgrading is usually trivial, and I don't expect existing functionality to go away
+- **No binary or source compatibility is guaranteed between any releases**. FastParse keeps compatibility following the SemVer rule, upgrading is usually trivial, and I don't expect existing functionality to go away
License
=======
diff --git a/build.mill b/build.mill
new file mode 100644
index 00000000..e0a5a481
--- /dev/null
+++ b/build.mill
@@ -0,0 +1,298 @@
+//| mill-version: 1.1.0-RC1
+//| mill-jvm-version: 11
+//| mill-jvm-opts: ["-XX:NonProfiledCodeHeapSize=500m", "-XX:ReservedCodeCacheSize=1000m"]
+//| mvnDeps:
+//| - com.github.lolgab::mill-mima_mill1:0.2.0
+package build
+import mill._
+import scalalib._
+import scalajslib._
+import scalanativelib._
+import publish._
+import mill.api.Result
+import mill.util.Jvm.createJar
+
+import mill.javalib.api.JvmWorkerUtil.isScala3
+
+import mill.util.VcsVersion
+import com.github.lolgab.mill.mima._
+
+val scala3 = "3.3.3"
+val scala213 = "2.13.14"
+val scala212 = "2.12.19"
+val scalaJS1 = "1.16.0"
+val scalaNative05 = "0.5.1"
+val crossVersions = Seq(scala3, scala213, scala212)
+
+object fastparse extends Module{
+ object jvm extends Cross[fastparseJvmModule](crossVersions)
+ trait fastparseJvmModule extends FastparseModule{
+ object test extends ScalaTests with CommonTestModule
+ }
+
+ object js extends Cross[fastparseJsModule](crossVersions)
+ trait fastparseJsModule extends FastparseModule with ScalaJSModule {
+ def scalaJSVersion = scalaJS1
+ private def sourceMapOptions = Task.Anon {
+ val vcsState = VcsVersion.vcsState()
+ vcsState.lastTag.collect {
+ case tag if vcsState.commitsSinceLastTag == 0 =>
+ val baseUrl = pomSettings().url.replace("github.com", "raw.githubusercontent.com")
+ val sourcesOptionName = if(isScala3(crossScalaVersion)) "-scalajs-mapSourceURI" else "-P:scalajs:mapSourceURI"
+ s"$sourcesOptionName:${mill.api.BuildCtx.workspaceRoot.toIO.toURI}->$baseUrl/$tag/"
+ }
+ }
+
+ override def scalacOptions = super.scalacOptions() ++ sourceMapOptions()
+
+ object test extends ScalaJSTests with CommonTestModule
+ }
+
+
+ object native extends Cross[fastparseNativeModule](crossVersions)
+ trait fastparseNativeModule extends FastparseModule with ScalaNativeModule {
+ def scalaNativeVersion = scalaNative05
+
+ object test extends ScalaNativeTests with CommonTestModule
+ }
+}
+
+trait FastparseModule extends CommonCrossModule with Mima{
+ def mvnDeps = Seq(
+ mvn"com.lihaoyi::sourcecode::0.4.0",
+ mvn"com.lihaoyi::geny::1.1.0"
+ )
+
+ def compileMvnDeps =
+ if(isScala3(crossScalaVersion)) Seq.empty[Dep]
+ else Seq(mvn"org.scala-lang:scala-reflect:$crossScalaVersion")
+
+ def generatedSources = Task {
+ val dir = Task.ctx().dest
+ val file = dir/"fastparse"/"SequencerGen.scala"
+ // Only go up to 21, because adding the last element makes it 22
+ val tuples = (2 to 21).map{ i =>
+ val ts = (1 to i) map ("T" + _)
+ val chunks = (1 to i) map { n =>
+ s"t._$n"
+ }
+ val tsD = (ts :+ "D").mkString(",")
+ val anys = ts.map(_ => "Any").mkString(", ")
+ s"""
+ val BaseSequencer$i: Sequencer[($anys), Any, ($anys, Any)] =
+ Sequencer0((t, d) => (${chunks.mkString(", ")}, d))
+ implicit def Sequencer$i[$tsD]: Sequencer[(${ts.mkString(", ")}), D, ($tsD)] =
+ BaseSequencer$i.asInstanceOf[Sequencer[(${ts.mkString(", ")}), D, ($tsD)]]
+ """
+ }
+ val output = s"""
+ package fastparse
+ trait SequencerGen[Sequencer[_, _, _]] extends LowestPriSequencer[Sequencer]{
+ protected[this] def Sequencer0[A, B, C](f: (A, B) => C): Sequencer[A, B, C]
+ ${tuples.mkString("\n")}
+ }
+ trait LowestPriSequencer[Sequencer[_, _, _]]{
+ protected[this] def Sequencer0[A, B, C](f: (A, B) => C): Sequencer[A, B, C]
+ implicit def Sequencer1[T1, T2]: Sequencer[T1, T2, (T1, T2)] = Sequencer0{case (t1, t2) => (t1, t2)}
+ }
+ """.stripMargin
+ os.write(file, output, createFolders = true)
+ Seq(PathRef(file))
+ }
+
+ override def scalacOptions =
+ super.scalacOptions() ++
+ Seq(
+ "-Xfatal-warnings",
+ "-Xlint:unused",
+ "-Wconf:cat=feature:s,cat=deprecation:s"
+ ).filter(_ => scalaVersion() != scala3)
+
+ def mimaReportBinaryIssues() =
+ if (this.isInstanceOf[ScalaNativeModule] || this.isInstanceOf[ScalaJSModule]) Task.Command {}
+ else super.mimaReportBinaryIssues()
+
+ def mimaPreviousVersions = Seq(
+ VcsVersion
+ .vcsState()
+ .lastTag
+ .getOrElse(throw new Exception("Missing last tag"))
+ )
+
+ def mimaPreviousArtifacts =
+ if (isScala3(crossScalaVersion)) Seq.empty[Dep]
+ else super.mimaPreviousArtifacts()
+
+ def mimaBinaryIssueFilters = super.mimaBinaryIssueFilters() ++ Seq(
+ ProblemFilter.exclude[IncompatibleResultTypeProblem]("fastparse.Parsed#Failure.unapply"),
+ )
+}
+
+object scalaparse extends Module{
+ object js extends Cross[ScalaParseJsModule](crossVersions)
+ trait ScalaParseJsModule extends ExampleParseJsModule
+
+ object jvm extends Cross[ScalaParseJvmModule](crossVersions)
+ trait ScalaParseJvmModule extends ExampleParseJvmModule
+
+ object native extends Cross[ScalaParseNativeModule](crossVersions)
+ trait ScalaParseNativeModule extends ExampleParseNativeModule
+}
+
+object cssparse extends Module{
+ object js extends Cross[CssParseJsModule](crossVersions)
+ trait CssParseJsModule extends ExampleParseJsModule
+
+ object jvm extends Cross[CssParseJvmModule](crossVersions)
+ trait CssParseJvmModule extends ExampleParseJvmModule
+
+ object native extends Cross[CssParseNativeModule](crossVersions)
+
+ trait CssParseNativeModule extends ExampleParseNativeModule
+}
+
+object pythonparse extends Module{
+ object js extends Cross[PythonParseJsModule](crossVersions)
+ trait PythonParseJsModule extends ExampleParseJsModule
+
+ object jvm extends Cross[PythonParseJvmModule](crossVersions)
+ trait PythonParseJvmModule extends ExampleParseJvmModule
+
+ object native extends Cross[PythonParseNativeModule](crossVersions)
+ trait PythonParseNativeModule extends ExampleParseNativeModule
+}
+
+trait ExampleParseJsModule extends CommonCrossModule with ScalaJSModule{
+ def moduleDeps = Seq(fastparse.js())
+ def scalaJSVersion = scalaJS1
+
+ object test extends ScalaJSTests with CommonTestModule
+}
+
+trait ExampleParseJvmModule extends CommonCrossModule{
+ def moduleDeps = Seq(fastparse.jvm())
+
+ object test extends ScalaTests with CommonTestModule{
+ def mvnDeps = super.mvnDeps() ++ Seq(
+ mvn"net.sourceforge.cssparser:cssparser:0.9.18",
+ ) ++ Option.when(!isScala3(crossScalaVersion))(
+ mvn"org.scala-lang:scala-compiler:$crossScalaVersion"
+ )
+ }
+}
+
+trait ExampleParseNativeModule extends CommonCrossModule with ScalaNativeModule{
+ def scalaNativeVersion = scalaNative05
+ def moduleDeps = Seq(fastparse.native())
+
+ object test extends ScalaNativeTests with CommonTestModule
+}
+
+trait CommonCrossModule extends CrossScalaModule with PublishModule with PlatformScalaModule{
+ def publishVersion = VcsVersion.vcsState().format()
+
+ def pomSettings = PomSettings(
+ description = artifactName(),
+ organization = "com.lihaoyi",
+ url = "https://github.com/lihaoyi/fastparse",
+ licenses = Seq(License.MIT),
+ versionControl = VersionControl.github(
+ "com-lihaoyi",
+ "fastparse"
+ ),
+ developers = Seq(
+ Developer("lihaoyi", "Li Haoyi","https://github.com/lihaoyi")
+ )
+ )
+
+ def scalaDocPluginClasspath = Task { Seq[PathRef]() }
+
+ def newSources = Task.Sources("src-2.12+")
+ def sources = Task { super.sources() ++ newSources() }
+}
+
+trait CommonTestModule extends ScalaModule with TestModule.Utest {
+ def mvnDeps = Seq(mvn"com.lihaoyi::utest::0.8.3")
+
+ override def scalacOptions =
+ super.scalacOptions() ++
+ Seq(
+ "-Xfatal-warnings",
+ "-Xlint:unused",
+ "-Wconf:cat=feature:s,cat=deprecation:s"
+ ).filter(_ => scalaVersion() != scala3)
+}
+
+object perftests extends Module{
+ object bench2 extends PerfTestModule {
+ def scalaVersion0 = scala213
+ def moduleDeps = Seq(
+ scalaparse.jvm(scala213).test,
+ pythonparse.jvm(scala213).test,
+ cssparse.jvm(scala213).test,
+ fastparse.jvm(scala213).test,
+ )
+ }
+
+ object benchScala33 extends PerfTestModule {
+ def scalaVersion0 = scala3
+ def sources = Task { bench2.sources() }
+ def moduleDeps = Seq(
+ scalaparse.jvm(scala3).test,
+ pythonparse.jvm(scala3).test,
+ cssparse.jvm(scala3).test,
+ fastparse.jvm(scala3).test,
+ )
+ }
+
+
+ object compare extends PerfTestModule {
+ def scalaVersion0 = scala212
+ def moduleDeps = Seq(
+ fastparse.jvm(scala212).test,
+ scalaparse.jvm(scala212).test,
+ pythonparse.jvm(scala212).test
+ )
+
+ def mvnDeps = super.mvnDeps() ++ Seq(
+ mvn"org.json4s::json4s-ast:3.6.0",
+ mvn"org.json4s::json4s-native:3.6.0",
+ mvn"org.json4s::json4s-jackson:3.6.0",
+ mvn"io.circe::circe-parser:0.9.1",
+ mvn"io.argonaut::argonaut:6.2",
+ mvn"com.typesafe.play::play-json:2.6.9",
+ mvn"com.fasterxml.jackson.core:jackson-databind:2.9.4",
+ mvn"com.lihaoyi::ujson:1.1.0",
+ mvn"org.scala-lang.modules::scala-parser-combinators:1.1.1",
+ mvn"org.python:jython:2.7.1b3"
+ )
+ }
+
+ trait PerfTestModule extends ScalaModule with TestModule.Utest {
+ def scalaVersion0: String
+ def scalaVersion = scalaVersion0
+ def scalacOptions = Seq("-opt:l:method")
+ def newResources = Task.Sources(perftests.moduleDir / "resources")
+ def resources = Task {
+ newResources() ++ fastparse.jvm(scalaVersion0).test.resources()
+ }
+
+ def mvnDeps = Seq(mvn"com.lihaoyi::utest::0.8.3")
+ }
+}
+
+object demo extends ScalaJSModule{
+ def scalaJSVersion = scalaJS1
+ def scalaVersion = scala213
+ def moduleDeps = Seq(
+ scalaparse.js(scala213),
+ cssparse.js(scala213),
+ pythonparse.js(scala213),
+ fastparse.js(scala213).test,
+ )
+
+ def mvnDeps = Seq(
+ mvn"org.scala-js::scalajs-dom::0.9.8",
+ mvn"com.lihaoyi::scalatags::0.9.3"
+ )
+}
diff --git a/build.sbt b/build.sbt
index c35077c5..ac078f5e 100644
--- a/build.sbt
+++ b/build.sbt
@@ -5,7 +5,7 @@ lazy val readme = scalatex.ScalatexReadme(
source = "Readme",
autoResources = List("out.js", "JProfiler.png")
).settings(
- (resources in Compile) += baseDirectory.value/".."/"out"/"demo"/"fullOpt"/"dest"/"out.js",
- scalaVersion := "2.12.7"
+ (Compile / resources) += baseDirectory.value/".."/"out"/"demo"/"fullOpt.dest"/"out.js",
+ scalaVersion := "2.12.19"
)
diff --git a/build.sc b/build.sc
deleted file mode 100644
index f596a30b..00000000
--- a/build.sc
+++ /dev/null
@@ -1,257 +0,0 @@
-import mill._
-import scalalib._
-import scalajslib._
-import publish._
-
-val crossVersions = Seq("2.12.7", "2.13.0")
-object fastparse extends Module{
- object jvm extends Cross[fastparseJvmModule](crossVersions:_*)
- class fastparseJvmModule(val crossScalaVersion: String) extends FastparseModule{
- def platformSegment = "jvm"
- object test extends Tests with CommonTestModule{
- def platformSegment = "jvm"
- }
- }
-
- object js extends Cross[fastparseJsModule](crossVersions:_*)
- class fastparseJsModule(val crossScalaVersion: String) extends FastparseModule with ScalaJSModule {
- def platformSegment = "js"
- def scalaJSVersion = "0.6.28"
- object test extends Tests with CommonTestModule{
- def platformSegment = "js"
- }
- }
-}
-trait FastparseModule extends CommonCrossModule{
- def ivyDeps = Agg(
- ivy"com.lihaoyi::sourcecode::0.1.7",
- ivy"com.lihaoyi::geny::0.4.2"
- )
- def compileIvyDeps = Agg(
- ivy"org.scala-lang:scala-reflect:${scalaVersion()}"
- )
- def generatedSources = T{
- val dir = T.ctx().dest
- val file = dir/"fastparse"/"SequencerGen.scala"
- // Only go up to 21, because adding the last element makes it 22
- val tuples = (2 to 21).map{ i =>
- val ts = (1 to i) map ("T" + _)
- val chunks = (1 to i) map { n =>
- s"t._$n"
- }
- val tsD = (ts :+ "D").mkString(",")
- val anys = ts.map(_ => "Any").mkString(", ")
- s"""
- val BaseSequencer$i: Sequencer[($anys), Any, ($anys, Any)] =
- Sequencer0((t, d) => (${chunks.mkString(", ")}, d))
- implicit def Sequencer$i[$tsD]: Sequencer[(${ts.mkString(", ")}), D, ($tsD)] =
- BaseSequencer$i.asInstanceOf[Sequencer[(${ts.mkString(", ")}), D, ($tsD)]]
- """
- }
- val output = s"""
- package fastparse
- trait SequencerGen[Sequencer[_, _, _]] extends LowestPriSequencer[Sequencer]{
- protected[this] def Sequencer0[A, B, C](f: (A, B) => C): Sequencer[A, B, C]
- ${tuples.mkString("\n")}
- }
- trait LowestPriSequencer[Sequencer[_, _, _]]{
- protected[this] def Sequencer0[A, B, C](f: (A, B) => C): Sequencer[A, B, C]
- implicit def Sequencer1[T1, T2]: Sequencer[T1, T2, (T1, T2)] = Sequencer0{case (t1, t2) => (t1, t2)}
- }
- """.stripMargin
- os.write(file, output, createFolders = true)
- Seq(PathRef(file))
- }
-}
-
-object scalaparse extends Module{
- object js extends Cross[ScalaParseJsModule](crossVersions:_*)
- class ScalaParseJsModule(val crossScalaVersion: String) extends ExampleParseJsModule
-
- object jvm extends Cross[ScalaParseJvmModule](crossVersions:_*)
- class ScalaParseJvmModule(val crossScalaVersion: String) extends ExampleParseJvmModule
-}
-
-
-object cssparse extends Module{
- object js extends Cross[CssParseJsModule](crossVersions:_*)
- class CssParseJsModule(val crossScalaVersion: String) extends ExampleParseJsModule
-
- object jvm extends Cross[CssParseJvmModule](crossVersions:_*)
- class CssParseJvmModule(val crossScalaVersion: String) extends ExampleParseJvmModule
-}
-object pythonparse extends Module{
- object js extends Cross[PythonParseJsModule](crossVersions:_*)
- class PythonParseJsModule(val crossScalaVersion: String) extends ExampleParseJsModule
-
- object jvm extends Cross[PythonParseJvmModule](crossVersions:_*)
- class PythonParseJvmModule(val crossScalaVersion: String) extends ExampleParseJvmModule
-}
-
-trait ExampleParseJsModule extends CommonCrossModule with ScalaJSModule{
- def moduleDeps = Seq(fastparse.js())
- def scalaJSVersion = "0.6.28"
- def platformSegment = "js"
- object test extends Tests with CommonTestModule{
- def platformSegment = "js"
- }
-}
-
-
-trait ExampleParseJvmModule extends CommonCrossModule{
- def moduleDeps = Seq(fastparse.jvm())
- def platformSegment = "jvm"
- object test extends Tests with CommonTestModule{
- def platformSegment = "jvm"
- def ivyDeps = super.ivyDeps() ++ Agg(
- ivy"net.sourceforge.cssparser:cssparser:0.9.18",
- ivy"org.scala-lang:scala-compiler:${scalaVersion()}"
- )
- }
-}
-
-
-// Remove once mill has proper support for 2.13.0-RC1
-object CustomZincWorker extends mill.scalalib.ZincWorkerModule {
- def scalaCompilerBridgeSourceJar(scalaVersion: String, scalaOrganization: String) = {
- val (scalaVersion0, scalaBinaryVersion0, bridgeVersion) = scalaVersion match {
- case s if s.startsWith("2.13.") => ("2.13.0-M2", "2.13.0-M2", "1.2.5")
- case _ => (scalaVersion, mill.scalalib.api.Util.scalaBinaryVersion(scalaVersion), Versions.zinc)
- }
-
- val (bridgeDep, bridgeName) = {
- val org = "org.scala-sbt"
- val name = "compiler-bridge"
- (ivy"$org::$name:$bridgeVersion", s"${name}_$scalaBinaryVersion0")
- }
-
- mill.scalalib.Lib.resolveDependencies(
- repositories,
- Lib.depToDependency(_, scalaVersion0, ""),
- Seq(bridgeDep),
- sources = true
- ).map(deps =>
- mill.scalalib.api.Util.grepJar(deps.map(_.path), bridgeName, bridgeVersion, sources = true)
- )
- }
-}
-
-trait CommonCrossModule extends CrossScalaModule with PublishModule{
- def zincWorker: ZincWorkerModule =
- CustomZincWorker
-
- def publishVersion = "2.2.1"
- def artifactName = millModuleSegments.parts.dropRight(2).mkString("-")
- def pomSettings = PomSettings(
- description = artifactName(),
- organization = "com.lihaoyi",
- url = "https://github.com/lihaoyi/fastparse",
- licenses = Seq(License.MIT),
- scm = SCM(
- "git://github.com/lihaoyi/fastparse.git",
- "scm:git://github.com/lihaoyi/fastparse.git"
- ),
- developers = Seq(
- Developer("lihaoyi", "Li Haoyi","https://github.com/lihaoyi")
- )
- )
-
- def scalaDocPluginClasspath = T{ Agg[PathRef]() }
-// def scalacOptions = T{ if (scalaVersion() == "2.12.7") Seq("-opt:l:method") else Nil }
-
- def platformSegment: String
- def millSourcePath = super.millSourcePath / ammonite.ops.up
- def sources = T.sources(
- millSourcePath / "src",
- millSourcePath / s"src-$platformSegment"
- )
-
-
-}
-trait CommonTestModule extends ScalaModule with TestModule{
- def zincWorker: ZincWorkerModule =
- CustomZincWorker
-
- def platformSegment: String
- def ivyDeps = Agg(
- ivy"com.lihaoyi::utest::0.7.0",
- )
-
-// def scalacOptions = T{ if (scalaVersion() == "2.12.7") Seq("-opt:l:method") else Nil }
-
- def sources = T.sources(
- millSourcePath / "src",
- millSourcePath / s"src-$platformSegment"
- )
- def testFrameworks = Seq("utest.runner.Framework")
-}
-
-object perftests extends Module{
- object bench1 extends PerfTestModule {
- def ivyDeps = super.ivyDeps() ++ Agg(
- ivy"com.lihaoyi::scalaparse:1.0.0",
- ivy"com.lihaoyi::pythonparse:1.0.0",
- ivy"com.lihaoyi::cssparse:1.0.0",
- )
- }
-
- object bench2 extends PerfTestModule {
- def moduleDeps = Seq(
- scalaparse.jvm("2.12.7").test,
- pythonparse.jvm("2.12.7").test,
- cssparse.jvm("2.12.7").test,
- fastparse.jvm("2.12.7").test,
- )
-
- }
-
-
- object compare extends PerfTestModule {
- def moduleDeps = Seq(
- fastparse.jvm("2.12.7").test,
- scalaparse.jvm("2.12.7").test,
- pythonparse.jvm("2.12.7").test
- )
- def ivyDeps = super.ivyDeps() ++ Agg(
- ivy"org.json4s::json4s-ast:3.6.0",
- ivy"org.json4s::json4s-native:3.6.0",
- ivy"org.json4s::json4s-jackson:3.6.0",
- ivy"io.circe::circe-parser:0.9.1",
- ivy"io.argonaut::argonaut:6.2",
- ivy"com.typesafe.play::play-json:2.6.9",
- ivy"com.fasterxml.jackson.core:jackson-databind:2.9.4",
- ivy"com.lihaoyi::ujson:0.6.7",
- ivy"org.scala-lang.modules::scala-parser-combinators:1.1.1",
- ivy"org.python:jython:2.7.1b3"
- )
- }
-
- trait PerfTestModule extends ScalaModule with TestModule{
- def scalaVersion = "2.12.7"
- def scalacOptions = Seq("-opt:l:method")
- def resources = T.sources{
- Seq(PathRef(perftests.millSourcePath / "resources")) ++
- fastparse.jvm("2.12.7").test.resources()
- }
- def testFrameworks = Seq("utest.runner.Framework")
- def ivyDeps = Agg(
- ivy"com.lihaoyi::utest::0.7.1",
- ivy"org.scala-lang:scala-compiler:${scalaVersion()}"
- )
- }
-}
-
-object demo extends ScalaJSModule{
- def scalaJSVersion = "0.6.28"
- def scalaVersion = "2.13.0"
- def moduleDeps = Seq(
- scalaparse.js("2.13.0"),
- cssparse.js("2.13.0"),
- pythonparse.js("2.13.0"),
- fastparse.js("2.13.0").test,
- )
- def ivyDeps = Agg(
- ivy"org.scala-js::scalajs-dom::0.9.7",
- ivy"com.lihaoyi::scalatags::0.7.0"
- )
-}
diff --git a/cssparse/src/cssparse/CssParser.scala b/cssparse/src/cssparse/CssParser.scala
index 617562b6..b9c21c19 100644
--- a/cssparse/src/cssparse/CssParser.scala
+++ b/cssparse/src/cssparse/CssParser.scala
@@ -8,62 +8,62 @@ import fastparse.NoWhitespace._
object CssTokensParser {
- def comment[_: P] = P( "/*" ~/ (!"*/" ~ AnyChar).rep ~/ "*/")
+ def comment[$: P] = P( "/*" ~ (!"*/" ~ AnyChar).rep ~ "*/")
- def newline[_: P] = P( "\n" | "\r\n" | "\r" | "\f")
+ def newline[$: P] = P( "\n" | "\r\n" | "\r" | "\f")
- def whitespace[_: P] = P( " " | "\t" | newline)
+ def whitespace[$: P] = P( " " | "\t" | newline)
- def hexDigit[_: P] = P( CharIn("0-9", "a-f", "A-F") )
+ def hexDigit[$: P] = P( CharIn("0-9", "a-f", "A-F") )
- def escape[_: P] = P( "\\" ~ ((!(newline | hexDigit) ~ AnyChar) | (hexDigit.rep(min=1, max=6) ~ whitespace.?)) )
+ def escape[$: P] = P( "\\" ~ ((!(newline | hexDigit) ~ AnyChar) | (hexDigit.rep(min=1, max=6) ~ whitespace.?)) )
- def whitespaceToken[_: P] = P( comment | whitespace.rep(1) )
+ def whitespaceToken[$: P] = P( comment | whitespace.rep(1) )
- def ws[_: P] = P( whitespaceToken.rep )
+ def ws[$: P] = P( whitespaceToken.rep )
- def identToken[_: P] = {
+ def identToken[$: P] = {
def firstLetter = P( "-".? ~ (CharIn("a-zA-Z_") | escape) )
def rest = P( (CharIn("a-zA-Z0-9\\-_") | escape).rep )
- P( (firstLetter ~ rest).! ).map(Ast.IdentToken)
+ P( (firstLetter ~ rest).! ).map(Ast.IdentToken.apply)
}
- def functionToken[_: P] = P( identToken.! ~ "(" ).map(Ast.FunctionToken)
+ def functionToken[$: P] = P( identToken.! ~ "(" ).map(Ast.FunctionToken.apply)
- def atWordToken[_: P] = P( "@" ~ identToken.! ).map(Ast.AtWordToken)
+ def atWordToken[$: P] = P( "@" ~ identToken.! ).map(Ast.AtWordToken.apply)
- def hashToken[_: P] = P( "#" ~
- (CharIn("a-zA-Z0-9\\-_") | escape).rep(1).! ).map(Ast.HashWordToken)
+ def hashToken[$: P] = P( "#" ~
+ (CharIn("a-zA-Z0-9\\-_") | escape).rep(1).! ).map(Ast.HashWordToken.apply)
- def stringTokenChar[_: P] = P( (!("\"" | "'" | "\\" | newline ) ~ AnyChar) | escape | ("\\" ~ newline) )
+ def stringTokenChar[$: P] = P( (!("\"" | "'" | "\\" | newline ) ~ AnyChar) | escape | ("\\" ~ newline) )
- def stringToken[_: P] = {
+ def stringToken[$: P] = {
def stringQuotes1 = P( "\"" ~ stringTokenChar.rep.! ~ "\"" )
def stringQuotes2 = P( "'" ~ stringTokenChar.rep.! ~ "'" )
- P( stringQuotes1 | stringQuotes2 ).map(Ast.StringToken)
+ P( stringQuotes1 | stringQuotes2 ).map(Ast.StringToken.apply)
}
- def urlUnquoted[_: P] = P( ((!(CharIn("\"\'()\\\\") | whitespace) ~ AnyChar) | escape).rep(1) )
+ def urlUnquoted[$: P] = P( ((!(CharIn("\"\'()\\\\") | whitespace) ~ AnyChar) | escape).rep(1) )
- def urlToken[_: P] = P( "url("http://github.com/~%20(ws%20~%20(urlUnquoted.!%20|%20stringToken.!) ~ ws).?.! ~ ")" ).map(Ast.UrlToken)
+ def urlToken[$: P] = P( "url("http://github.com/~%20(ws%20~%20(urlUnquoted.!%20|%20stringToken.!) ~ ws).?.! ~ ")" ).map(Ast.UrlToken.apply)
- def digit[_: P] = P( CharIn("0-9") )
+ def digit[$: P] = P( CharIn("0-9") )
- def numberToken[_: P] = {
+ def numberToken[$: P] = {
def withPoint = P( digit.rep(1) ~ "." ~ digit.rep(1) )
def withoutPoint = P( digit.rep(1) )
def withE = P( "." ~ digit.rep(1) ~ (CharIn("eE") ~ CharIn("+\\-").? ~ digit.rep(1)).? )
- P( (CharIn("+\\-").? ~ (withPoint | withoutPoint | withE)).! ).map(Ast.NumberToken)
+ P( (CharIn("+\\-").? ~ (withPoint | withoutPoint | withE)).! ).map(Ast.NumberToken.apply)
}
- def dimensionToken[_: P] = P( numberToken.! ~ identToken.! ) map
+ def dimensionToken[$: P] = P( numberToken.! ~ identToken.! ) map
{case (number, ident) => Ast.DimensionToken(number, ident)}
- def percentageToken[_: P] = P( numberToken.! ~ "%" ).map(Ast.PercentageToken)
+ def percentageToken[$: P] = P( numberToken.! ~ "%" ).map(Ast.PercentageToken.apply)
- def unicodeRangeToken[_: P] = {
+ def unicodeRangeToken[$: P] = {
def questionMarks = P( (hexDigit.rep(min=1, max=6) ~ "?".rep(min=1, max=5)).! )
def range = P( hexDigit.rep(min=1, max=6).! ~ "-" ~ hexDigit.rep(min=1, max=6).! )
def regular = P( hexDigit.rep(min=1, max=6).! )
@@ -75,26 +75,26 @@ object CssTokensParser {
}
- def includeMatchToken[_: P] = P( "~=" ).map{ _ => Ast.IncludeMatchToken()}
- def dashMatchToken[_: P] = P( "|=" ).map{ _ => Ast.DashMatchToken()}
- def prefixMatchToken[_: P] = P( "^=" ).map{ _ => Ast.PrefixMatchToken()}
- def suffixMatchToken[_: P] = P( "$=" ).map{_ => Ast.SuffixMatchToken()}
- def substringMatchToken[_: P] = P( "*=" ).map{_ => Ast.SubstringMatchToken()}
- def matchToken[_: P] = P(
+ def includeMatchToken[$: P] = P( "~=" ).map{ _ => Ast.IncludeMatchToken()}
+ def dashMatchToken[$: P] = P( "|=" ).map{ _ => Ast.DashMatchToken()}
+ def prefixMatchToken[$: P] = P( "^=" ).map{ _ => Ast.PrefixMatchToken()}
+ def suffixMatchToken[$: P] = P( "$=" ).map{_ => Ast.SuffixMatchToken()}
+ def substringMatchToken[$: P] = P( "*=" ).map{_ => Ast.SubstringMatchToken()}
+ def matchToken[$: P] = P(
includeMatchToken | dashMatchToken |
prefixMatchToken | suffixMatchToken |
suffixMatchToken | substringMatchToken |
substringMatchToken
)
- def columnToken[_: P] = P( "||" ).map{_ => Ast.ColumnToken()}
- def CDOToken[_: P] = P( "" ).map{_ => Ast.CdcToken()}
+ def columnToken[$: P] = P( "||" ).map{_ => Ast.ColumnToken()}
+ def CDOToken[$: P] = P( "" ).map{_ => Ast.CdcToken()}
- def delimToken[_: P] = P( ("::" | CharIn("#$*+,\\-./:<>^~=!")).! ).map(Ast.DelimToken)
+ def delimToken[$: P] = P( ("::" | CharIn("#$*+,\\-./:<>^~=!")).! ).map(Ast.DelimToken.apply)
// any token except functionToken
- def simpleToken[_: P]: P[Option[Ast.SimpleToken]] = P(
+ def simpleToken[$: P]: P[Option[Ast.SimpleToken]] = P(
whitespaceToken | atWordToken |
hashToken | matchToken |
columnToken | CDOToken |
@@ -108,16 +108,16 @@ object CssTokensParser {
case _ => None
}
- def bracketsBlock[_: P] = P( "(" ~ componentValue.rep ~ ")" ).map(values => Ast.BracketsBlock(values.flatten))
- def curlyBracketsBlock[_: P] = P( "{" ~ componentValue.rep ~ "}" ).map(values => Ast.CurlyBracketsBlock(values.flatten))
- def squareBracketsBlock[_: P] = P( "[" ~ componentValue.rep ~ "]" ).map(values => Ast.SquareBracketsBlock(values.flatten))
+ def bracketsBlock[$: P] = P( "(" ~ componentValue.rep ~ ")" ).map(values => Ast.BracketsBlock(values.flatten))
+ def curlyBracketsBlock[$: P] = P( "{" ~ componentValue.rep ~ "}" ).map(values => Ast.CurlyBracketsBlock(values.flatten))
+ def squareBracketsBlock[$: P] = P( "[" ~ componentValue.rep ~ "]" ).map(values => Ast.SquareBracketsBlock(values.flatten))
- def functionBlock[_: P] = P( functionToken ~ componentValue.rep ~ ")").map{
+ def functionBlock[$: P] = P( functionToken ~ componentValue.rep ~ ")").map{
case (Ast.FunctionToken(name), values: Seq[Option[Ast.ComponentValue]]) =>
Ast.FunctionBlock(name, Ast.BracketsBlock(values.flatten))
}
- def componentValue[_: P]: P[Option[Ast.ComponentValue]] = {
+ def componentValue[$: P]: P[Option[Ast.ComponentValue]] = {
def blockOpt = P( bracketsBlock | curlyBracketsBlock | squareBracketsBlock | functionBlock ).map(Some(_))
P( simpleToken | blockOpt )
}
@@ -127,13 +127,13 @@ object CssRulesParser {
import CssTokensParser._
- def allSelector[_: P] = P( "*" ).map{_ => Ast.AllSelector()}
+ def allSelector[$: P] = P( "*" ).map{_ => Ast.AllSelector()}
- def elementSelector[_: P] = P( identToken.! ).map(Ast.ElementSelector)
+ def elementSelector[$: P] = P( identToken.! ).map(Ast.ElementSelector.apply)
- def idSelector[_: P] = P( "#" ~ identToken.! ).map(Ast.IdSelector)
+ def idSelector[$: P] = P( "#" ~ identToken.! ).map(Ast.IdSelector.apply)
- def attributeSelector[_: P] = {
+ def attributeSelector[$: P] = {
def bracket = P( "[" ~ identToken.! ~ (( "=" | matchToken).! ~ (stringToken | identToken)).? ~ "]" )
P( identToken.!.? ~ bracket.rep(1) ).map{
@@ -141,81 +141,83 @@ object CssRulesParser {
case (ident, Some((token, Ast.StringToken(string)))) => (ident, Some(token), Some(string))
case (ident, Some((token, Ast.IdentToken(string)))) => (ident, Some(token), Some(string))
case (ident, None) => (ident, None, None)
+ case _ => throw new IllegalArgumentException("Should not happen.")
})
}
}
- def partSelector[_: P] = P( allSelector | attributeSelector | elementSelector )
+ def partSelector[$: P] = P( allSelector | attributeSelector | elementSelector )
- def classSelectorPart[_: P] = P( "." ~ partSelector ).map(Ast.ClassSelectorPart)
+ def classSelectorPart[$: P] = P( "." ~ partSelector ).map(Ast.ClassSelectorPart.apply)
- def pseudoSelectorPart[_: P] = P( (("::" | ":") ~ identToken).! ~ ("(" ~ componentValue.rep(1) ~ ")").? ).map{
+ def pseudoSelectorPart[$: P] = P( (("::" | ":") ~ identToken).! ~ ("(" ~ componentValue.rep(1) ~ ")").? ).map{
case (name, optValues) =>
Ast.PseudoSelectorPart(name, optValues.toSeq.flatten.flatten)
}
- def complexSelectorPart[_: P] = P( pseudoSelectorPart | classSelectorPart )
+ def complexSelectorPart[$: P] = P( pseudoSelectorPart | classSelectorPart )
- def complexSelector[_: P] = P( partSelector.? ~ complexSelectorPart.rep(1) ).map{
+ def complexSelector[$: P] = P( partSelector.? ~ complexSelectorPart.rep(1) ).map{
case (part, parts) => Ast.ComplexSelector(part, parts)
}
- def singleSelector[_: P]: P[Ast.SingleSelector] = P( complexSelector | partSelector | idSelector | allSelector )
+ def singleSelector[$: P]: P[Ast.SingleSelector] = P( complexSelector | partSelector | idSelector | allSelector )
- def selectorDelim[_: P] = P( (ws ~ CharIn(",>+~").! ~ ws) | whitespaceToken.rep(1).! ).map{
+ def selectorDelim[$: P] = P( (ws ~ CharIn(",>+~").! ~ ws) | whitespaceToken.rep(1).! ).map{
case s if s.startsWith(" ") => " "
case s => s
}
- def multipleSelector[_: P] = P( singleSelector ~ (selectorDelim ~ singleSelector).rep(1) ).map{
+ def multipleSelector[$: P] = P( singleSelector ~ (selectorDelim ~ singleSelector).rep(1) ).map{
case (firstSelector, selectors) => Ast.MultipleSelector(firstSelector, selectors)
}
- def selector[_: P]: P[Ast.Selector] = P( multipleSelector | singleSelector | allSelector )
+ def selector[$: P]: P[Ast.Selector] = P( multipleSelector | singleSelector | allSelector )
- def important[_: P] = P( "!" ~ ws ~ "important" ~ ws)
+ def important[$: P] = P( "!" ~ ws ~ "important" ~ ws)
- def declaration[_: P] = P( identToken.! ~ ws ~ ":" ~ (!CharIn(";}!") ~ componentValue).rep ~ important.!.?).map{
+ def declaration[$: P] = P( identToken.! ~ ws ~ ":" ~ (!CharIn(";}!") ~ componentValue).rep ~ important.!.?).map{
case (ident, values, Some(_)) => Ast.Declaration(ident, values.flatten, isImportant = true)
case (ident, values, None) => Ast.Declaration(ident, values.flatten, isImportant = false)
}
- def simpleAtRule[_: P] = P( atWordToken ~ (!CharIn(";{}") ~ componentValue).rep ).map{
+ def simpleAtRule[$: P] = P( atWordToken ~ (!CharIn(";{}") ~ componentValue).rep ).map{
case (Ast.AtWordToken(name), values) => Ast.AtRule(name, values.flatten, None)
}
- def declarationList[_: P] = P( (ws ~ (simpleAtRule | declaration) ~ ws ~ (&("}") | ";")).rep ).map(
+ def declarationList[$: P] = P( (ws ~ (simpleAtRule | declaration) ~ ws ~ (&("}") | ";")).rep ).map(
s => Ast.DeclarationList(s.map{
case atRule: Ast.AtRule => Right(atRule)
case declaration: Ast.Declaration => Left(declaration)
}))
- def declAtRule[_: P] =
+ def declAtRule[$: P] =
P( atWordToken ~ (!CharIn(";{}") ~ componentValue).rep ~ "{" ~ declarationList ~ ws ~ "}" ).map{
case (Ast.AtWordToken(name), values, block) => Ast.AtRule(name, values.flatten, Some(Left(block)))
}
- def complexAtRule[_: P] =
+ def complexAtRule[$: P] =
P( atWordToken ~ (!CharIn(";{}") ~ componentValue).rep ~ "{" ~ ruleList ~ ws ~ "}" ).map{
case (Ast.AtWordToken(name), values, block) => Ast.AtRule(name, values.flatten, Some(Right(block)))
}
- def atRule[_: P] = P( complexAtRule | declAtRule | (simpleAtRule ~ ";") )
+ def atRule[$: P] = P( complexAtRule | declAtRule | (simpleAtRule ~ ";") )
- def qualifiedRule[_: P] = P( ((selector ~ ws) | (!"{" ~ componentValue).rep) ~ "{" ~ declarationList ~ ws ~ "}" ).map{
- case (values: Seq[Option[Ast.ComponentValue]], block) => Ast.QualifiedRule(Right(values.flatten), block)
+ def qualifiedRule[$: P] = P( ((selector ~ ws) | (!"{" ~ componentValue).rep) ~ "{" ~ declarationList ~ ws ~ "}" ).map{
+ case (values: Seq[Option[Ast.ComponentValue]] @unchecked, block) => Ast.QualifiedRule(Right(values.flatten), block)
case (selector: Ast.Selector, block) => Ast.QualifiedRule(Left(selector), block)
+ case _ => throw new IllegalArgumentException("Should not happen.")
}
- def ruleList[_: P]: P[Ast.RuleList] = P( (whitespaceToken | atRule | qualifiedRule).rep ).map{
+ def ruleList[$: P]: P[Ast.RuleList] = P( (whitespaceToken | atRule | qualifiedRule).rep ).map{
s => Ast.RuleList(s flatMap {
case rule: Ast.Rule => Some(rule)
case _ => None
})
}
- def stylesheet[_: P] = P( (CDOToken | CDCToken | whitespaceToken | atRule | qualifiedRule).rep ).map{
+ def stylesheet[$: P] = P( (CDOToken | CDCToken | whitespaceToken | atRule | qualifiedRule).rep ).map{
s => Ast.Stylesheet(s flatMap {
case rule: Ast.Rule => Some(Left(rule))
case ctoken: Ast.CToken => Some(Right(ctoken))
diff --git a/cssparse/test/src-jvm/cssparse/ProjectTests.scala b/cssparse/test/src-jvm/cssparse/ProjectTests.scala
index a9dcc397..53c05241 100644
--- a/cssparse/test/src-jvm/cssparse/ProjectTests.scala
+++ b/cssparse/test/src-jvm/cssparse/ProjectTests.scala
@@ -27,11 +27,17 @@ object ProjectTests extends TestSuite {
TestUtil.checkPrinting(css, tag = name)
}
- val tests = this {
- Seq("mkdir", "-p", "out/repos").!
+ val tests = Tests {
- "twbs/bootstrap/raw/2c2ac3356425e192f7537227508c809a14aa5850/dist/css/bootstrap.css" - checkCss()
- "twbs/bootstrap/raw/2c2ac3356425e192f7537227508c809a14aa5850/dist/css/bootstrap.min.css" - checkCss()
+
+ test("twbs/bootstrap/raw/2c2ac3356425e192f7537227508c809a14aa5850/dist/css/bootstrap.css") - {
+ Seq("mkdir", "-p", "out/repos").!
+ checkCss()
+ }
+ test("twbs/bootstrap/raw/2c2ac3356425e192f7537227508c809a14aa5850/dist/css/bootstrap.min.css") - {
+ Seq("mkdir", "-p", "out/repos").!
+ checkCss()
+ }
// "primer/primer/raw/2c2ac3356425e192f7537227508c809a14aa5850/css/primer.css" - checkCss()
}
}
diff --git a/cssparse/test/src-jvm/cssparse/TestUtil.scala b/cssparse/test/src-jvm/cssparse/TestUtil.scala
index fdd3bb3d..088d411b 100644
--- a/cssparse/test/src-jvm/cssparse/TestUtil.scala
+++ b/cssparse/test/src-jvm/cssparse/TestUtil.scala
@@ -49,6 +49,7 @@ object TestUtil {
def warning(ex: CSSParseException) = println("WARNING " + ex)
})
val sheet = parser.parseStyleSheet(source, null, null)
+ assert(sheet != null)
errors.toSeq
}
diff --git a/cssparse/test/src/cssparse/CssTests.scala b/cssparse/test/src/cssparse/CssTests.scala
index f705ac2e..a3d5feec 100644
--- a/cssparse/test/src/cssparse/CssTests.scala
+++ b/cssparse/test/src/cssparse/CssTests.scala
@@ -22,7 +22,7 @@ object CssTests extends TestSuite {
|
""".stripMargin,
CssRulesParser.ruleList(_)
- )
+ ) : @unchecked
assert(
value1 ==
@@ -54,7 +54,7 @@ object CssTests extends TestSuite {
| -ms-text-size-adjust: 100%;
|}
|
- """.stripMargin, CssRulesParser.ruleList(_))
+ """.stripMargin, CssRulesParser.ruleList(_)) : @unchecked
assert(
value2 ==
@@ -81,7 +81,7 @@ object CssTests extends TestSuite {
| box-shadow: none !important;
| }
|
- """.stripMargin, CssRulesParser.ruleList(_))
+ """.stripMargin, CssRulesParser.ruleList(_)) : @unchecked
val expected = RuleList(Seq(
QualifiedRule(
@@ -108,7 +108,7 @@ object CssTests extends TestSuite {
| background-color: #31b0d5;
| }
|
- """.stripMargin, CssRulesParser.ruleList(_))
+ """.stripMargin, CssRulesParser.ruleList(_)) : @unchecked
assert(
value4 ==
@@ -128,7 +128,7 @@ object CssTests extends TestSuite {
}
test("test5"){
- val Parsed.Success(value5, index5) = parse(
+ val Parsed.Success(value5, _) = parse(
"""
|
| [hidden],
@@ -136,7 +136,7 @@ object CssTests extends TestSuite {
| display: none;
| }
|
- """.stripMargin, CssRulesParser.ruleList(_))
+ """.stripMargin, CssRulesParser.ruleList(_)) : @unchecked
assert(value5 == RuleList(Seq(
QualifiedRule(
@@ -148,7 +148,7 @@ object CssTests extends TestSuite {
}
test("test6"){
- val Parsed.Success(value6, index6) = parse(
+ val Parsed.Success(value6, _) = parse(
"""
|
|@media (min-width: 768px) {
@@ -157,7 +157,7 @@ object CssTests extends TestSuite {
| }
| }
|
- """.stripMargin, CssRulesParser.ruleList(_))
+ """.stripMargin, CssRulesParser.ruleList(_)) : @unchecked
assert(value6 == RuleList(Seq(
AtRule("media", Seq(
@@ -170,7 +170,7 @@ object CssTests extends TestSuite {
}
test("test7"){
- val Parsed.Success(value7, index7) = parse(
+ val Parsed.Success(value7, _) = parse(
"""|
|@rule {
| unicode-range: U+26; /* single codepoint */
@@ -179,7 +179,7 @@ object CssTests extends TestSuite {
| unicode-range: U+4??; /* wildcard range */
| unicode-range: U+0025-00FF, U+4??;
|}
- """.stripMargin, CssRulesParser.ruleList(_))
+ """.stripMargin, CssRulesParser.ruleList(_)) : @unchecked
assert(value7 == RuleList(Seq(
AtRule("rule", Seq(), Some(Left(
DeclarationList(Seq(
@@ -191,6 +191,30 @@ object CssTests extends TestSuite {
UnicodeRangeToken("0025", "00FF"), DelimToken(","), UnicodeRangeToken("4??", "4??")), false))))))))))
}
+
+ // https://github.com/com-lihaoyi/fastparse/issues/255
+ test("issue-#255: comments at the end of a block"){
+ val Parsed.Success(value2, index2) = parse(
+ """
+ |p {
+ | font-family: sans-serif;
+ | color: red;
+ | /* test comment */
+ |}
+ |
+ """.stripMargin, CssRulesParser.ruleList(_)) : @unchecked
+
+ assert(
+ value2 ==
+ RuleList(Seq(
+ QualifiedRule(
+ Left(ElementSelector("p")),
+ DeclarationList(Seq(
+ Left(Declaration("font-family", Seq(IdentToken("sans-serif")), false)),
+ Left(Declaration("color", Seq(IdentToken("red")), false))))))),
+ index2 == 80
+ )
+ }
}
}
}
diff --git a/demo/src/demo/DemoMain.scala b/demo/src/demo/DemoMain.scala
index 9256874e..52ce2431 100644
--- a/demo/src/demo/DemoMain.scala
+++ b/demo/src/demo/DemoMain.scala
@@ -10,7 +10,7 @@ import scala.scalajs.js
import scala.scalajs.js.annotation.{JSExport, JSExportTopLevel}
import scala.scalajs.js.typedarray.{ArrayBuffer, Uint8Array}
-@JSExportTopLevel("demo.DemoMain")
+@JSExportTopLevel("DemoMain")
object DemoMain {
@JSExport
def scalaparser(container: html.Div) = {
diff --git a/fastparse/src-2.12/fastparse/internal/NoWarn.scala b/fastparse/src-2.12/fastparse/internal/NoWarn.scala
new file mode 100644
index 00000000..4c94bc45
--- /dev/null
+++ b/fastparse/src-2.12/fastparse/internal/NoWarn.scala
@@ -0,0 +1,6 @@
+package fastparse.internal
+
+object NoWarn {
+ @deprecated("Use scala.annotation.nowarn instead", "3.1.1")
+ class nowarn(msg: String = "")
+}
diff --git a/fastparse/src-2.13/fastparse/internal/NoWarn.scala b/fastparse/src-2.13/fastparse/internal/NoWarn.scala
new file mode 100644
index 00000000..5582414f
--- /dev/null
+++ b/fastparse/src-2.13/fastparse/internal/NoWarn.scala
@@ -0,0 +1,6 @@
+package fastparse.internal
+
+object NoWarn{
+ @deprecated("Use scala.annotation.nowarn instead", "3.1.1")
+ type nowarn = scala.annotation.nowarn
+}
\ No newline at end of file
diff --git a/fastparse/src/fastparse/internal/MacroImpls.scala b/fastparse/src-2/fastparse/internal/MacroImpls.scala
similarity index 90%
rename from fastparse/src/fastparse/internal/MacroImpls.scala
rename to fastparse/src-2/fastparse/internal/MacroImpls.scala
index 1e2dfa03..ec786fa4 100644
--- a/fastparse/src/fastparse/internal/MacroImpls.scala
+++ b/fastparse/src-2/fastparse/internal/MacroImpls.scala
@@ -2,7 +2,6 @@ package fastparse.internal
import fastparse.{EagerOps, Implicits, ParserInput, ParsingRun}
-import scala.annotation.tailrec
import scala.reflect.macros.blackbox.Context
/**
@@ -29,7 +28,7 @@ object MacroImpls {
else if (f.splice(ctx1.successValue.asInstanceOf[T])) ctx1.asInstanceOf[ParsingRun[T]]
else ctx1.freshFailure().asInstanceOf[ParsingRun[T]]
- if (ctx1.verboseFailures) ctx1.aggregateTerminal(startIndex, () => "filter")
+ if (ctx1.verboseFailures) ctx1.reportTerminalMsg(startIndex, () => "filter")
res
}
}
@@ -52,11 +51,9 @@ object MacroImpls {
ctx1.instrument.afterParse(name.splice.value, ctx0.index, ctx0.isSuccess)
}
if (ctx0.verboseFailures) {
- ctx0.aggregateMsg(
- startIndex,
- Msgs(List(new Lazy(() => name.splice.value))),
- ctx0.failureGroupAggregate,
- startIndex < ctx0.traceIndex
+ ctx0.reportAggregateMsg(
+ () => name.splice.value,
+ forceAggregate = startIndex < ctx0.traceIndex
)
if (!ctx0.isSuccess){
ctx0.failureStack = (name.splice.value -> startIndex) :: ctx0.failureStack
@@ -89,7 +86,7 @@ object MacroImpls {
}else{
ctx1.freshFailure().asInstanceOf[ParsingRun[Unit]]
}
- if (ctx1.verboseFailures) ctx1.aggregateTerminal(index, () => literalized.splice)
+ if (ctx1.verboseFailures) ctx1.reportTerminalMsg(index, () => literalized.splice)
res
}
@@ -119,7 +116,7 @@ object MacroImpls {
ctx1.freshFailure().asInstanceOf[ParsingRun[Unit]]
}
if (ctx1.verboseFailures) {
- ctx1.aggregateTerminal(index, () => literalized.splice)
+ ctx1.reportTerminalMsg(index, () => literalized.splice)
}
res
@@ -134,7 +131,7 @@ object MacroImpls {
val res =
if (Util.startsWith(ctx1.input, s1, index)) ctx1.freshSuccessUnit(index + s1.length)
else ctx1.freshFailure().asInstanceOf[ParsingRun[Unit]]
- if (ctx1.verboseFailures) ctx1.aggregateTerminal(index, () => Util.literalize(s1))
+ if (ctx1.verboseFailures) ctx1.reportTerminalMsg(index, () => Util.literalize(s1))
res
}
}
@@ -160,6 +157,25 @@ object MacroImpls {
}
}
+ def collectMacro[T: c.WeakTypeTag, V: c.WeakTypeTag]
+ (c: Context)
+ (f: c.Expr[PartialFunction[T, V]]): c.Expr[ParsingRun[V]] = {
+ import c.universe._
+
+ val lhs0 = c.prefix.asInstanceOf[c.Expr[EagerOps[T]]]
+ reify {
+ lhs0.splice.parse0 match {
+ case lhs =>
+ if (!lhs.isSuccess) lhs.asInstanceOf[ParsingRun[V]]
+ else {
+ val this2 = lhs.asInstanceOf[ParsingRun[V]]
+ val f2 = f.splice.andThen(v => this2.successValue = v)
+ f2.applyOrElse(this2.successValue.asInstanceOf[T], {_: T => this2.freshFailure()})
+ this2
+ }
+ }
+ }
+ }
def flatMapXMacro[T: c.WeakTypeTag, V: c.WeakTypeTag]
(c: Context)
@@ -177,7 +193,7 @@ object MacroImpls {
def flatMapMacro[T: c.WeakTypeTag, V: c.WeakTypeTag]
(c: Context)
(f: c.Expr[T => ParsingRun[V]])
- (whitespace: c.Expr[ParsingRun[Any] => ParsingRun[Unit]]): c.Expr[ParsingRun[V]] = {
+ (whitespace: c.Expr[fastparse.Whitespace]): c.Expr[ParsingRun[V]] = {
import c.universe._
val lhs0 = c.prefix.asInstanceOf[c.Expr[EagerOps[T]]]
@@ -212,8 +228,8 @@ object MacroImpls {
val startPos = ctx5.index
lhs0.splice
- val lhsMsg = ctx5.shortParserMsg
- val lhsAggregate = ctx5.failureGroupAggregate
+ val lhsMsg = ctx5.shortMsg
+ val lhsAggregate = ctx5.aggregateMsgs
if (ctx5.isSuccess) {
ctx5.cut |= oldCut
ctx5.asInstanceOf[ParsingRun[V]]
@@ -223,16 +239,16 @@ object MacroImpls {
val verboseFailures = ctx5.verboseFailures
ctx5.index = startPos
- if (verboseFailures) ctx5.aggregateMsg(startPos, lhsMsg, lhsAggregate)
+ if (verboseFailures) ctx5.reportAggregateMsg(lhsMsg)
ctx5.cut = false
other.splice
- val rhsMsg = ctx5.shortParserMsg
+ val rhsMsg = ctx5.shortMsg
val rhsCut = ctx5.cut
val endCut = rhsCut | oldCut
if (!ctx5.isSuccess && !rhsCut) ctx5.freshFailure(startPos)
ctx5.cut = endCut
- if (verboseFailures) ctx5.aggregateMsg(startPos, rhsMsg ::: lhsMsg, ctx5.failureGroupAggregate ::: lhsAggregate)
+ if (verboseFailures) ctx5.reportAggregateMsg(rhsMsg ::: lhsMsg, ctx5.aggregateMsgs ::: lhsAggregate)
ctx5.asInstanceOf[ParsingRun[V]]
}
}
@@ -331,7 +347,7 @@ object MacroImpls {
)
}
- val bracketed = "StringIn(" + literals.map(Util.literalize(_)).mkString(", ") + ")"
+ val bracketed = literals.map(Util.literalize(_)).toList
val res = q"""
$ctx match{ case $ctx1 =>
@@ -344,7 +360,7 @@ object MacroImpls {
val res =
if ($output != -1) $ctx1.freshSuccessUnit(index = $output)
else $ctx1.freshFailure()
- if ($ctx1.verboseFailures) $ctx1.setMsg($index, () => $bracketed)
+ if ($ctx1.verboseFailures) $ctx1.reportTerminalMsg($index, $bracketed)
res
}
"""
@@ -410,7 +426,7 @@ object MacroImpls {
case true => ctx1.freshSuccessUnit(index + 1)
case false => ctx1.freshFailure().asInstanceOf[ParsingRun[Unit]]
}
- if (ctx1.verboseFailures) ctx1.aggregateTerminal(index, () => bracketed.splice)
+ if (ctx1.verboseFailures) ctx1.reportTerminalMsg(index, () => bracketed.splice)
res
}
}
@@ -420,7 +436,7 @@ object MacroImpls {
(c: Context)
(rhs: c.Expr[ParsingRun[V]], cut: Boolean)
(s: c.Expr[Implicits.Sequencer[T, V, R]],
- whitespace: Option[c.Expr[ParsingRun[Any] => ParsingRun[Unit]]],
+ whitespace: Option[c.Expr[fastparse.Whitespace]],
ctx: c.Expr[ParsingRun[_]]): c.Expr[ParsingRun[R]] = {
import c.universe._
@@ -447,8 +463,8 @@ object MacroImpls {
else {
val $preRhsIndex = $ctx1.index
$rhs
- val $rhsAggregate = $ctx1.failureGroupAggregate
- val $rhsMsg = $ctx1.shortParserMsg
+ val $rhsAggregate = $ctx1.aggregateMsgs
+ val $rhsMsg = $ctx1.shortMsg
val $res =
if (!$ctx1.isSuccess) {
$setCut
@@ -469,11 +485,10 @@ object MacroImpls {
)
}
- if ($ctx1.verboseFailures) $ctx1.aggregateMsg(
- $preLhsIndex,
+ if ($ctx1.verboseFailures) $ctx1.reportAggregateMsg(
_root_.fastparse.internal.Util.joinBinOp($lhsMsg, $rhsMsg),
$rhsAggregate ::: $lhsAggregate,
- // We override the failureGroupAggregate to avoid building an `a ~ b`
+ // We override the aggregateMsgs to avoid building an `a ~ b`
// aggregate msg in the specific case where the LHS parser fails to
// make any progress past `startIndex`. This finds cases like `a.? ~ b`
// or `a.rep ~ b` and lets use flatten them out into `a | b`
@@ -503,8 +518,8 @@ object MacroImpls {
if (!$ctx1.isSuccess) $ctx1
else {
val $postLhsIndex = $ctx1.index
- val $lhsAggregate = $ctx1.failureGroupAggregate
- val $lhsMsg = $ctx1.shortParserMsg
+ val $lhsAggregate = $ctx1.aggregateMsgs
+ val $lhsMsg = $ctx1.shortMsg
$setCut
if ($postLhsIndex > $preLhsIndex && $ctx1.checkForDrop()) $input.dropBuffer($postLhsIndex)
@@ -551,7 +566,7 @@ object MacroImpls {
else {
ctx0.freshSuccessUnit(ctx0.index + 1)
}
- if (ctx0.verboseFailures) ctx0.aggregateTerminal(startIndex, () => s"char-pred(${p0})")
+ if (ctx0.verboseFailures) ctx0.reportTerminalMsg(startIndex, () => s"char-pred(${p0})")
res
}
}
@@ -594,7 +609,7 @@ object MacroImpls {
if ($index >= $goal) $ctx1.freshSuccessUnit(index = $index)
else $ctx1.freshFailure()
- if ($ctx1.verboseFailures) $ctx1.aggregateTerminal($start, () => $bracketed)
+ if ($ctx1.verboseFailures) $ctx1.reportTerminalMsg($start, () => $bracketed)
res
}
"""
@@ -622,7 +637,7 @@ object MacroImpls {
val res =
if (index >= goal) ctx0.freshSuccessUnit(index = index)
else ctx0.freshFailure()
- if (ctx0.verboseFailures) ctx0.aggregateTerminal(start, () => s"chars-while($p0, ${min.splice})")
+ if (ctx0.verboseFailures) ctx0.reportTerminalMsg(start, () => s"chars-while($p0, ${min.splice})")
res
}
}
@@ -673,10 +688,9 @@ object MacroImpls {
}
if (ctx1.verboseFailures) {
- val msg = ctx1.shortParserMsg
- val agg = ctx1.failureGroupAggregate
+ val msg = ctx1.shortMsg
if (!postSuccess){
- ctx1.aggregateMsg(startPos, () => msg.render + ".?", agg)
+ ctx1.reportAggregateMsg(() => msg.render + ".?")
}
}
res
@@ -689,7 +703,7 @@ object MacroImpls {
(c: Context)
(other: c.Expr[ParsingRun[V]])
(s: c.Expr[Implicits.Sequencer[T, V, R]],
- whitespace: c.Expr[ParsingRun[Any] => ParsingRun[Unit]],
+ whitespace: c.Expr[fastparse.Whitespace],
ctx: c.Expr[ParsingRun[_]]): c.Expr[ParsingRun[R]] = {
MacroImpls.parsedSequence0[T, V, R](c)(other, false)(s, Some(whitespace), ctx)
}
@@ -698,7 +712,7 @@ object MacroImpls {
(c: Context)
(other: c.Expr[ParsingRun[V]])
(s: c.Expr[Implicits.Sequencer[T, V, R]],
- whitespace: c.Expr[ParsingRun[Any] => ParsingRun[Unit]],
+ whitespace: c.Expr[fastparse.Whitespace],
ctx: c.Expr[ParsingRun[_]]): c.Expr[ParsingRun[R]] = {
MacroImpls.parsedSequence0[T, V, R](c)(other, true)(s, Some(whitespace), ctx)
}
@@ -716,5 +730,4 @@ object MacroImpls {
ctx: c.Expr[ParsingRun[_]]): c.Expr[ParsingRun[R]] = {
MacroImpls.parsedSequence0[T, V, R](c)(other, true)(s, None, ctx)
}
-
}
diff --git a/fastparse/src-2/fastparse/internal/MacroRepImpls.scala b/fastparse/src-2/fastparse/internal/MacroRepImpls.scala
new file mode 100644
index 00000000..77d6ce8c
--- /dev/null
+++ b/fastparse/src-2/fastparse/internal/MacroRepImpls.scala
@@ -0,0 +1,141 @@
+package fastparse.internal
+
+import scala.reflect.macros.blackbox.Context
+
+/**
+ * Implementations of the various `.rep`/`.repX` overloads. The most common
+ * and simple overloads are implemented as macros for performance, while the
+ * more complex/general cases are left as normal methods to avoid code bloat
+ * and allow the use of default/named arguments (which don't work in macros
+ * due to https://github.com/scala/bug/issues/5920).
+ *
+ * Even the normal method overloads are manually-specialized to some extent
+ * for various sorts of inputs as a best-effort attempt ot minimize branching
+ * in the hot paths.
+ */
+object MacroRepImpls{
+ def repXMacro0[T: c.WeakTypeTag, V: c.WeakTypeTag](c: Context)
+ (whitespace: Option[c.Tree], min: Option[c.Tree])
+ (repeater: c.Tree,
+ ctx: c.Tree): c.Tree = {
+ import c.universe._
+ val repeater1 = TermName(c.freshName("repeater"))
+ val ctx1 = TermName(c.freshName("repeater"))
+ val acc = TermName(c.freshName("acc"))
+ val startIndex = TermName(c.freshName("startIndex"))
+ val count = TermName(c.freshName("count"))
+ val beforeSepIndex = TermName(c.freshName("beforeSepIndex"))
+ val rec = TermName(c.freshName("rec"))
+ val originalCut = TermName(c.freshName("originalCut"))
+ val parsedMsg = TermName(c.freshName("parsedMsg"))
+ val lastAgg = TermName(c.freshName("lastAgg"))
+ val parsedAgg = TermName(c.freshName("parsedAgg"))
+ val ((endSnippet, _), minCut) = min match{
+ case None =>
+ q"""
+ $ctx1.freshSuccess($repeater1.result($acc), $startIndex, $originalCut)
+ """ ->
+ q""" "" """ ->
+ q"""false"""
+ case Some(min1) =>
+ q"""
+ if ($count < $min1) $ctx1.augmentFailure($startIndex, $originalCut)
+ else $ctx1.freshSuccess($repeater1.result($acc), $startIndex, $originalCut)
+ """ ->
+ q"""if($min1 == 0) "" else "(" + $min1 + ")"""" ->
+ q"""$originalCut && ($count < $min1)"""
+ }
+
+ val rhsSnippet =
+ q"""
+ if (!$ctx1.isSuccess && $ctx1.cut) $ctx1.asInstanceOf[_root_.fastparse.ParsingRun[scala.Nothing]]
+ else {
+ $ctx1.cut = false
+ $rec($beforeSepIndex, $count + 1, $parsedAgg)
+ }
+ """
+
+ val wsSnippet = whitespace match{
+ case None => q"$rec($beforeSepIndex, $count + 1, $parsedAgg)"
+ case Some(ws) =>
+ if (ws.tpe =:= typeOf[fastparse.NoWhitespace.noWhitespaceImplicit.type])
+ rhsSnippet
+ else
+ q"""
+ _root_.fastparse.internal.Util.consumeWhitespace($ws, $ctx1)
+ $rhsSnippet
+ """
+ }
+
+ q"""
+ $ctx match{ case $ctx1 =>
+ $repeater match {case $repeater1 =>
+ var $originalCut = $ctx1.cut
+ val $acc = $repeater1.initial
+ @_root_.scala.annotation.tailrec
+ def $rec($startIndex: _root_.scala.Int,
+ $count: _root_.scala.Int,
+ $lastAgg: _root_.fastparse.internal.Msgs): _root_.fastparse.P[${c.weakTypeOf[V]}] = {
+ $ctx1.cut = $minCut
+ ${c.prefix}.parse0()
+
+ val $parsedMsg = $ctx1.shortMsg
+ val $parsedAgg = $ctx1.aggregateMsgs
+ $originalCut |= $ctx1.cut
+ if (!$ctx1.isSuccess) {
+ val res =
+ if ($ctx1.cut) $ctx1.asInstanceOf[_root_.fastparse.P[${c.weakTypeOf[V]}]]
+ else $endSnippet
+
+ if ($ctx1.verboseFailures) _root_.fastparse.internal.Util.reportParseMsgInRep(
+ $startIndex,
+ ${min.getOrElse(q"0")},
+ $ctx1,
+ _root_.fastparse.internal.Msgs.empty,
+ $parsedMsg,
+ $lastAgg,
+ true
+ )
+
+ res
+ }else {
+ val $beforeSepIndex = $ctx1.index
+ $repeater1.accumulate($ctx1.successValue.asInstanceOf[${c.weakTypeOf[T]}], $acc)
+ $ctx1.cut = false
+ $wsSnippet
+ }
+ }
+ $rec($ctx1.index, 0, null)
+ }
+ }
+ """
+ }
+
+ def repXMacro1[T: c.WeakTypeTag, V: c.WeakTypeTag](c: Context)
+ (repeater: c.Tree,
+ ctx: c.Tree): c.Tree = {
+ MacroRepImpls.repXMacro0[T, V](c)(None, None)(repeater, ctx)
+ }
+
+ def repXMacro2[T: c.WeakTypeTag, V: c.WeakTypeTag](c: Context)
+ (min: c.Tree)
+ (repeater: c.Tree,
+ ctx: c.Tree): c.Tree = {
+ MacroRepImpls.repXMacro0[T, V](c)(None, Some(min))(repeater, ctx)
+ }
+
+ def repXMacro1ws[T: c.WeakTypeTag, V: c.WeakTypeTag](c: Context)
+ (repeater: c.Tree,
+ whitespace: c.Tree,
+ ctx: c.Tree): c.Tree = {
+ MacroRepImpls.repXMacro0[T, V](c)(Some(whitespace), None)(repeater, ctx)
+ }
+
+ def repXMacro2ws[T: c.WeakTypeTag, V: c.WeakTypeTag](c: Context)
+ (min: c.Tree)
+ (repeater: c.Tree,
+ whitespace: c.Tree,
+ ctx: c.Tree): c.Tree = {
+ MacroRepImpls.repXMacro0[T, V](c)(Some(whitespace), Some(min))(repeater, ctx)
+ }
+}
diff --git a/fastparse/src-2/fastparse/internal/RepImpls.scala b/fastparse/src-2/fastparse/internal/RepImpls.scala
new file mode 100644
index 00000000..cf6d9819
--- /dev/null
+++ b/fastparse/src-2/fastparse/internal/RepImpls.scala
@@ -0,0 +1,277 @@
+package fastparse.internal
+
+
+import fastparse.{Implicits, NoWhitespace, ParsingRun}
+import Util.{reportParseMsgInRep, reportParseMsgPostSep}
+import scala.annotation.tailrec
+
+
+class RepImpls[T](val parse0: () => ParsingRun[T]) extends AnyVal{
+ def repX[V](min: Int = 0,
+ sep: => ParsingRun[_] = null,
+ max: Int = Int.MaxValue,
+ exactly: Int = -1)
+ (implicit repeater: Implicits.Repeater[T, V],
+ ctx: ParsingRun[Any]): ParsingRun[V] = {
+
+ val acc = repeater.initial
+ val actualMin = if(exactly == -1) min else exactly
+ val actualMax = if(exactly == -1) max else exactly
+
+ def end(successIndex: Int, index: Int, count: Int, endCut: Boolean) = {
+ if (count < actualMin) ctx.augmentFailure(index, endCut)
+ else ctx.freshSuccess(repeater.result(acc), successIndex, endCut)
+ }
+ @tailrec def rec(startIndex: Int,
+ count: Int,
+ precut: Boolean,
+ outerCut: Boolean,
+ sepMsg: Msgs,
+ lastAgg: Msgs): ParsingRun[V] = {
+ ctx.cut = precut | (count < min && outerCut)
+ if (count == 0 && actualMax == 0) ctx.freshSuccess(repeater.result(acc), startIndex)
+ else {
+ val verboseFailures = ctx.verboseFailures
+ parse0()
+ val parsedMsg = ctx.shortMsg
+ val parsedAgg = ctx.aggregateMsgs
+ val postCut = ctx.cut
+ if (!ctx.isSuccess) {
+ val res =
+ if (postCut) ctx.asInstanceOf[ParsingRun[V]]
+ else end(startIndex, startIndex, count, outerCut | postCut)
+
+ if (verboseFailures) reportParseMsgInRep(startIndex, min, ctx, sepMsg, parsedMsg, lastAgg, precut || postCut)
+ res
+ }else {
+ val beforeSepIndex = ctx.index
+ repeater.accumulate(ctx.successValue.asInstanceOf[T], acc)
+ val nextCount = count + 1
+ if (nextCount == actualMax) {
+ val res = end(beforeSepIndex, beforeSepIndex, nextCount, outerCut | postCut)
+ if (verboseFailures) ctx.reportTerminalMsg(startIndex, () => parsedMsg.render + ".repX" + (if(min == 0) "" else s"($min)"))
+ res
+ }
+ else {
+ ctx.cut = false
+ val sep1 = sep
+ val sepCut = ctx.cut
+ val endCut = outerCut | postCut | sepCut
+ if (sep1 == null) rec(beforeSepIndex, nextCount, false, endCut, null, parsedAgg)
+ else {
+ if (ctx.isSuccess) rec(beforeSepIndex, nextCount, sepCut, endCut, ctx.shortMsg, parsedAgg)
+ else {
+ val res =
+ if (sepCut) ctx.augmentFailure(beforeSepIndex, endCut)
+ else end(beforeSepIndex, beforeSepIndex, nextCount, endCut)
+ if (verboseFailures) reportParseMsgPostSep(startIndex, min, ctx, parsedMsg, parsedAgg)
+ res
+ }
+ }
+ }
+ }
+ }
+ }
+ rec(ctx.index, 0, false, ctx.cut, null, null)
+ }
+
+ def repX[V](min: Int,
+ sep: => ParsingRun[_])
+ (implicit repeater: Implicits.Repeater[T, V],
+ ctx: ParsingRun[Any]): ParsingRun[V] = {
+
+ val acc = repeater.initial
+
+ def end(successIndex: Int, index: Int, count: Int, endCut: Boolean) = {
+ if (count < min) ctx.augmentFailure(index, endCut)
+ else ctx.freshSuccess(repeater.result(acc), successIndex, endCut)
+ }
+ @tailrec def rec(startIndex: Int,
+ count: Int,
+ precut: Boolean,
+ outerCut: Boolean,
+ sepMsg: Msgs,
+ lastAgg: Msgs): ParsingRun[V] = {
+ ctx.cut = precut | (count < min && outerCut)
+ parse0()
+ val parsedMsg = ctx.shortMsg
+ val parsedAgg = ctx.aggregateMsgs
+ val postCut = ctx.cut
+ val verboseFailures = ctx.verboseFailures
+ if (!ctx.isSuccess) {
+ val res =
+ if (postCut) ctx.asInstanceOf[ParsingRun[V]]
+ else end(startIndex, startIndex, count, outerCut | postCut)
+ if (verboseFailures) reportParseMsgInRep(startIndex, min, ctx, sepMsg, parsedMsg, lastAgg, precut || postCut)
+ res
+ }else {
+ val beforeSepIndex = ctx.index
+ repeater.accumulate(ctx.successValue.asInstanceOf[T], acc)
+ val nextCount = count + 1
+ ctx.cut = false
+ val sep1 = sep
+ val sepCut = ctx.cut
+ val endCut = outerCut | postCut | sepCut
+ if (sep1 == null) rec(beforeSepIndex, nextCount, false, endCut, null, parsedAgg)
+ else {
+ if (ctx.isSuccess) rec(beforeSepIndex, nextCount, sepCut, endCut, ctx.shortMsg, parsedAgg)
+ else {
+ val res =
+ if (sepCut) ctx.augmentFailure(beforeSepIndex, endCut)
+ else end(beforeSepIndex, beforeSepIndex, nextCount, endCut)
+ if (verboseFailures) reportParseMsgPostSep(startIndex, min, ctx, parsedMsg, parsedAgg)
+ res
+ }
+ }
+ }
+ }
+ rec(ctx.index, 0, false, ctx.cut, null, null)
+ }
+ def rep[V](min: Int = 0,
+ sep: => ParsingRun[_] = null,
+ max: Int = Int.MaxValue,
+ exactly: Int = -1)
+ (implicit repeater: Implicits.Repeater[T, V],
+ whitespace: fastparse.Whitespace,
+ ctx: ParsingRun[Any]): ParsingRun[V] = {
+
+ val acc = repeater.initial
+ val actualMin = if(exactly == -1) min else exactly
+ val actualMax = if(exactly == -1) max else exactly
+
+ def end(successIndex: Int, index: Int, count: Int, endCut: Boolean) = {
+ if (count < actualMin) ctx.augmentFailure(index, endCut)
+ else ctx.freshSuccess(repeater.result(acc), successIndex, endCut)
+ }
+ @tailrec def rec(startIndex: Int,
+ count: Int,
+ precut: Boolean,
+ outerCut: Boolean,
+ sepMsg: Msgs,
+ lastAgg: Msgs): ParsingRun[V] = {
+
+ ctx.cut = precut | (count < min && outerCut)
+
+ if (count == 0 && actualMax == 0) ctx.freshSuccess(repeater.result(acc), startIndex)
+ else {
+ parse0()
+ val parsedMsg = ctx.shortMsg
+ val parsedAgg = ctx.aggregateMsgs
+ val postCut = ctx.cut
+ val verboseFailures = ctx.verboseFailures
+ if (!ctx.isSuccess) {
+ val res =
+ if (postCut) ctx.asInstanceOf[ParsingRun[V]]
+ else end(startIndex, startIndex, count, outerCut | postCut)
+ if (verboseFailures) reportParseMsgInRep(startIndex, min, ctx, sepMsg, parsedMsg, lastAgg, precut || postCut)
+ res
+ } else {
+ val beforeSepIndex = ctx.index
+ repeater.accumulate(ctx.successValue.asInstanceOf[T], acc)
+ val nextCount = count + 1
+ if (nextCount == actualMax) {
+ val res = end(beforeSepIndex, beforeSepIndex, nextCount, outerCut | postCut)
+ if (verboseFailures) ctx.reportTerminalMsg(startIndex, () => parsedMsg.render + ".rep" + (if(min == 0) "" else s"($min)"))
+ res
+ }
+ else if (!consumeWhitespace(whitespace, ctx, false)) ctx.asInstanceOf[ParsingRun[Nothing]]
+ else {
+ ctx.cut = false
+ val sep1 = sep
+ val sepCut = ctx.cut
+ val endCut = outerCut | postCut | sepCut
+ if (sep1 == null) rec(beforeSepIndex, nextCount, false, endCut, null, parsedAgg)
+ else if (ctx.isSuccess) {
+ val sepMsg = ctx.shortMsg
+ if (!consumeWhitespace(whitespace, ctx, sepCut)) ctx.asInstanceOf[ParsingRun[Nothing]]
+ else {
+ rec(beforeSepIndex, nextCount, sepCut, endCut, sepMsg, parsedAgg)
+ }
+ }
+ else {
+ val res =
+ if (sepCut) ctx.augmentFailure(beforeSepIndex, endCut)
+ else end(beforeSepIndex, beforeSepIndex, nextCount, endCut)
+
+ if (verboseFailures) reportParseMsgPostSep(startIndex, min, ctx, parsedMsg, parsedAgg)
+ res
+ }
+ }
+ }
+ }
+ }
+ rec(ctx.index, 0, false, ctx.cut, null, null)
+ }
+
+ def rep[V](min: Int,
+ sep: => ParsingRun[_])
+ (implicit repeater: Implicits.Repeater[T, V],
+ whitespace: fastparse.Whitespace,
+ ctx: ParsingRun[Any]): ParsingRun[V] = {
+
+ val acc = repeater.initial
+
+ def end(successIndex: Int, index: Int, count: Int, endCut: Boolean) = {
+ if (count < min) ctx.augmentFailure(index, endCut)
+ else ctx.freshSuccess(repeater.result(acc), successIndex, endCut)
+ }
+ @tailrec def rec(startIndex: Int,
+ count: Int,
+ precut: Boolean,
+ outerCut: Boolean,
+ sepMsg: Msgs,
+ lastAgg: Msgs): ParsingRun[V] = {
+
+ ctx.cut = precut | (count < min && outerCut)
+ parse0()
+ val parsedMsg = ctx.shortMsg
+ val parsedAgg = ctx.aggregateMsgs
+ val postCut = ctx.cut
+ val verboseFailures = ctx.verboseFailures
+ if (!ctx.isSuccess){
+ val res =
+ if (postCut) ctx.asInstanceOf[ParsingRun[V]]
+ else end(startIndex, startIndex, count, outerCut | postCut)
+ if (verboseFailures) reportParseMsgInRep(startIndex, min, ctx, sepMsg, parsedMsg, lastAgg, precut || postCut)
+ res
+ }else{
+ val beforeSepIndex = ctx.index
+ repeater.accumulate(ctx.successValue.asInstanceOf[T], acc)
+ val nextCount = count + 1
+ if (!consumeWhitespace(whitespace, ctx, false)) ctx.asInstanceOf[ParsingRun[Nothing]]
+ else {
+ ctx.cut = false
+ val sep1 = sep
+ val sepCut = ctx.cut
+ val endCut = outerCut | postCut | sepCut
+ if (sep1 == null) rec(beforeSepIndex, nextCount, false, endCut, null, parsedAgg)
+ else if (ctx.isSuccess) {
+ val sepMsg = ctx.shortMsg
+ if (!consumeWhitespace(whitespace, ctx, sepCut)) ctx.asInstanceOf[ParsingRun[Nothing]]
+ else {
+ rec(beforeSepIndex, nextCount, sepCut, endCut, sepMsg, parsedAgg)
+ }
+ }
+ else {
+ val res =
+ if (sepCut) ctx.augmentFailure(beforeSepIndex, endCut)
+ else end(beforeSepIndex, beforeSepIndex, nextCount, endCut)
+
+ if (verboseFailures) reportParseMsgPostSep(startIndex, min, ctx, parsedMsg, parsedAgg)
+ res
+ }
+ }
+ }
+ }
+ rec(ctx.index, 0, false, ctx.cut, null, null)
+ }
+
+ private def consumeWhitespace(whitespace: fastparse.Whitespace, ctx: ParsingRun[_], extraCut: Boolean) = {
+ if (whitespace eq NoWhitespace.noWhitespaceImplicit) true
+ else {
+ Util.consumeWhitespace(whitespace, ctx)
+ if (!ctx.isSuccess && (extraCut || ctx.cut)) false
+ else true
+ }
+ }
+}
diff --git a/fastparse/src/fastparse/package.scala b/fastparse/src-2/fastparse/package.scala
similarity index 55%
rename from fastparse/src/fastparse/package.scala
rename to fastparse/src-2/fastparse/package.scala
index 952154ab..3b3633dd 100644
--- a/fastparse/src/fastparse/package.scala
+++ b/fastparse/src-2/fastparse/package.scala
@@ -1,112 +1,36 @@
import fastparse.internal._
-import fastparse.internal.{Instrument, Logger}
-import language.experimental.macros
+import scala.language.experimental.macros
+import scala.language.implicitConversions
-package object fastparse {
+package object fastparse extends fastparse.SharedPackageDefs {
/**
- * Parses the given input [[ParserInput]] using the given parser and returns
- * a [[Parsed]] result containing the success value or failure metadata.
- *
- * Can take either a [[String]], an [[Iterator]] or strings or a
- * [[fastparse.ParserInput]] object
- *
- * @param input the input to parse
- *
- * @param parser the parser method to use to parse the input
- *
- * @param verboseFailures enable this to show a more detailed error message
- * if a parser fails, without needing to run
- * `.traced.trace`. Defaults to `false` as it slows
- * down parsing considerably
- *
- * @param startIndex where in the input to start parsing
- *
- * @param instrument Callbacks that get run before and after every named
- * `P(...)` parser
- *
- *
+ * Delimits a named parser. This name will appear in the parser failure
+ * messages and stack traces, and by default is taken from the name of the
+ * enclosing method.
*/
- def parse[T](input: ParserInputSource,
- parser: P[_] => P[T],
- verboseFailures: Boolean = false,
- startIndex: Int = 0,
- instrument: Instrument = null): Parsed[T] = {
- Parsed.fromParsingRun(input.parseThrough(parseInputRaw[T](
- _,
- parser,
- verboseFailures,
- startIndex,
- -1,
- instrument
- )))
- }
-
- def parseInputRaw[T](input: ParserInput,
- parser: P[_] => P[T],
- verboseFailures: Boolean = false,
- startIndex: Int = 0,
- traceIndex: Int = -1,
- instrument: Instrument = null,
- enableLogging: Boolean = true): ParsingRun[T] = parser(new ParsingRun(
- input = input,
- startIndex = startIndex,
- originalParser = parser,
- traceIndex = traceIndex,
- instrument = instrument,
- failureTerminalAggregate = Msgs.empty,
- failureGroupAggregate = Msgs.empty,
- shortParserMsg = Msgs.empty,
- lastFailureMsg = null,
- failureStack = List.empty,
- isSuccess = true,
- logDepth = if (enableLogging) 0 else -1,
- startIndex,
- true,
- (),
- verboseFailures,
- false,
- collection.mutable.Map.empty
- ))
-
+ def P[T](t: P[T])(implicit name: sourcecode.Name, ctx: P[_]): P[T] = macro MacroImpls.pMacro[T]
/**
- * Shorthand alias for [[ParsingRun]]; this is both the parameter-to and the
- * return type for all Fastparse's parsing methods.
- *
- * @tparam T is the type of the value returned by the parser method on success
- */
+ * Shorthand alias for [[ParsingRun]]; this is both the parameter-to and the
+ * return type for all Fastparse's parsing methods.
+ *
+ * @tparam T is the type of the value returned by the parser method on success
+ */
type P[+T] = ParsingRun[T]
+
val P = ParsingRun
- /**
- * Shorthand for `P[Unit]`
- */
- type P0 = P[Unit]
- /**
- * Delimits a named parser. This name will appear in the parser failure
- * messages and stack traces, and by default is taken from the name of the
- * enclosing method.
- */
- def P[T](t: P[T])(implicit name: sourcecode.Name, ctx: P[_]): P[T] = macro MacroImpls.pMacro[T]
+ implicit def DiscardParserValue(p: P[_]): P[Unit] = {
+ p.successValue = ()
+ p.asInstanceOf[P[Unit]]
+ }
/**
* Parses an exact string value.
*/
implicit def LiteralStr(s: String)(implicit ctx: P[Any]): P[Unit] = macro MacroImpls.literalStrMacro
- /**
- * Parses a string value case-insensitively
- */
- def IgnoreCase(s: String)(implicit ctx: P[Any]): P[Unit] = {
- val startIndex = ctx.index
- val res =
- if (Util.startsWithIgnoreCase(ctx.input, s, ctx.index)) ctx.freshSuccessUnit(ctx.index + s.length)
- else ctx.freshFailure().asInstanceOf[P[Unit]]
- if (ctx.verboseFailures) ctx.aggregateTerminal(startIndex, () => Util.literalize(s))
- res
- }
-
/**
* Provides [[EagerOps]] extension methods on [[String]]
*/
@@ -131,7 +55,7 @@ package object fastparse {
*/
def ~/[V, R](other: P[V])
(implicit s: Implicits.Sequencer[T, V, R],
- whitespace: P[Any] => P[Unit],
+ whitespace: Whitespace,
ctx: P[_]): P[R] = macro MacroImpls.parsedSequenceCut[T, V, R]
/**
@@ -141,7 +65,7 @@ package object fastparse {
*/
def ~[V, R](other: P[V])
(implicit s: Implicits.Sequencer[T, V, R],
- whitespace: P[Any] => P[Unit],
+ whitespace: Whitespace,
ctx: P[_]): P[R] = macro MacroImpls.parsedSequence[T, V, R]
/**
@@ -177,6 +101,15 @@ package object fastparse {
*/
def filter(f: T => Boolean)
(implicit ctx: P[Any]): P[T] = macro MacroImpls.filterMacro[T]
+
+ /**
+ * Transforms the result of this parser using the given partial function,
+ * failing the parse if the partial function is not defined on the result
+ * of the current parser. This is eqivalent to
+ * `.filter(f.isDefinedAt).map(f.apply)`
+ */
+ def collect[V](f: PartialFunction[T, V]): P[V] = macro MacroImpls.collectMacro[T, V]
+
/**
* Transforms the result of this parser using the given function into a
* new parser which is applied (after whitespace). Useful for doing
@@ -185,7 +118,7 @@ package object fastparse {
* you next want to parse an array, dictionary or string.
*/
def flatMap[V](f: T => P[V])
- (implicit whitespace: P[Any] => P[Unit]): P[V] = macro MacroImpls.flatMapMacro[T, V]
+ (implicit whitespace: Whitespace): P[V] = macro MacroImpls.flatMapMacro[T, V]
/**
* Transforms the result of this parser using the given function into a
* new parser which is applied (without consuming whitespace). Useful for
@@ -226,7 +159,7 @@ package object fastparse {
/**
* Provides [[ByNameOps]] extension methods on [[P]]s
*/
- implicit def ByNameOps[T](parse0: => P[T]) = new ByNameOps(() => parse0)
+ implicit def ByNameOps[T](parse0: => P[T]): ByNameOps[T] = new ByNameOps(() => parse0)
class ByNameOps[T](val parse0: () => P[T]) extends AnyVal{
/**
* Repeat operator; runs the LHS parser 0 or more times separated by the
@@ -235,7 +168,7 @@ package object fastparse {
* index of the last run.
*/
def rep[V](implicit repeater: Implicits.Repeater[T, V],
- whitespace: P[_] => P[Unit],
+ whitespace: Whitespace,
ctx: P[Any]): P[V] = macro MacroRepImpls.repXMacro1ws[T, V]
/**
* Repeat operator; runs the LHS parser at least `min` to at most `max`
@@ -251,7 +184,7 @@ package object fastparse {
max: Int = Int.MaxValue,
exactly: Int = -1)
(implicit repeater: Implicits.Repeater[T, V],
- whitespace: P[_] => P[Unit],
+ whitespace: Whitespace,
ctx: P[Any]): P[V] =
new RepImpls[T](parse0).rep[V](min, sep, max, exactly)
@@ -264,7 +197,7 @@ package object fastparse {
def rep[V](min: Int,
sep: => P[_])
(implicit repeater: Implicits.Repeater[T, V],
- whitespace: P[_] => P[Unit],
+ whitespace: Whitespace,
ctx: P[Any]): P[V] =
new RepImpls[T](parse0).rep[V](min, sep)
@@ -276,7 +209,7 @@ package object fastparse {
*/
def rep[V](min: Int)
(implicit repeater: Implicits.Repeater[T, V],
- whitespace: P[_] => P[Unit],
+ whitespace: Whitespace,
ctx: P[Any]): P[V] =
macro MacroRepImpls.repXMacro2ws[T, V]
@@ -336,19 +269,7 @@ package object fastparse {
* parsers end up in the failure traces or failure stack to be displayed
* to the user.
*/
- def opaque(msg: String)(implicit ctx: P[Any]): P[T] = {
- val oldIndex = ctx.index
-
- val res = parse0()
-
- val res2 =
- if (res.isSuccess) ctx.freshSuccess(ctx.successValue)
- else ctx.freshFailure(oldIndex)
-
- if (ctx.verboseFailures) ctx.aggregateTerminal(oldIndex, () => msg)
-
- res2.asInstanceOf[P[T]]
- }
+ def opaque(msg: String)(implicit ctx: P[Any]): P[T] = SharedPackageDefs.opaque(parse0, msg)
/**
* Negative lookahead operator: succeeds if the wrapped parser fails and
@@ -360,106 +281,36 @@ package object fastparse {
val startCut = ctx.cut
val oldNoCut = ctx.noDropBuffer
ctx.noDropBuffer = true
- val startTerminals = ctx.failureTerminalAggregate
+ val startTerminals = ctx.terminalMsgs
parse0()
ctx.noDropBuffer = oldNoCut
- val msg = ctx.shortParserMsg
val res =
if (ctx.isSuccess) ctx.freshFailure(startPos)
else ctx.freshSuccessUnit(startPos)
if (ctx.verboseFailures) {
- ctx.failureTerminalAggregate = startTerminals
- ctx.failureGroupAggregate = Msgs.empty
- ctx.setMsg(startPos, () => "!" + msg.render)
+ // Unlike most other data on `ctx`, `terminalMsgs` is normally
+ // append-only. Thus when we're inside the unary_! expression, it
+ // continually appends to `terminalMsgs` sub-parsers that could
+ // have succeeded within it, but are irrelevant to the user because
+ // we *want* the contents of the unary_! to fail! Thus, we reset
+ // `terminalMsgs` once we exit the unary_!, to ensure these do not
+ // end up in error messages
+ ctx.terminalMsgs = startTerminals
+ ctx.reportTerminalMsg(startPos, Msgs.empty)
}
res.cut = startCut
res
}
-
}
- /**
- * Provides logging-related [[LogByNameOps]] implicits on [[String]].
- */
- implicit def LogOpsStr(parse0: String)
- (implicit ctx: P[Any]): fastparse.LogByNameOps[Unit] =
- macro MacroImpls.logOpsStrMacro
- /**
- * Separated out from [[ByNameOps]] because `.log` isn't easy to make an
- * [[AnyVal]] extension method, but it doesn't matter since `.log` calls
- * are only for use in development while the other [[ByNameOps]] operators
- * are more performance-sensitive
- */
- implicit class LogByNameOps[T](parse0: => P[T])(implicit ctx: P[_]) {
- /**
- * Wraps a parser to log when it succeeds and fails, and at what index.
- * Useful for seeing what is going on within your parser. Nicely indents
- * the logs for easy reading
- */
- def log(implicit name: sourcecode.Name, logger: Logger = Logger.stdout): P[T] = {
- if (ctx.logDepth == -1) parse0
- else {
- val msg = name.value
- val output = logger.f
- val indent = " " * ctx.logDepth
-
- output(s"$indent+$msg:${ctx.input.prettyIndex(ctx.index)}${if (ctx.cut) ", cut" else ""}")
- val depth = ctx.logDepth
- ctx.logDepth += 1
- val startIndex = ctx.index
- val oldverboseFailures = ctx.verboseFailures
- ctx.verboseFailures = true
- parse0
- ctx.verboseFailures = oldverboseFailures
- ctx.logDepth = depth
- val prettyIndex = ctx.input.prettyIndex(ctx.index)
- val strRes = if (ctx.isSuccess) {
- s"Success($prettyIndex${if (ctx.cut) ", cut" else ""})"
- } else {
- val trace = Parsed.Failure.formatStack(
- ctx.input,
- ctx.failureStack ++ Seq(ctx.lastFailureMsg.render -> ctx.index)
- )
- val trailing = ctx.input match {
- case c: IndexedParserInput => Parsed.Failure.formatTrailing(ctx.input, startIndex)
- case _ => ""
- }
- s"Failure($trace ...$trailing${if (ctx.cut) ", cut" else ""})"
- }
- output(s"$indent-$msg:${ctx.input.prettyIndex(startIndex)}:$strRes")
- // output(s"$indent-$msg:${repr.prettyIndex(cfg.input, index)}:$strRes")
- ctx.asInstanceOf[P[T]]
- }
- }
-
- /**
- * Prints the given message, nicely indented, after the wrapped parser finishes
- */
- def logAfter(msg: => Any)(implicit logger: Logger = Logger.stdout): P[T] = {
- val indent = " " * ctx.logDepth
- val res = parse0
- if (ctx.logDepth != -1) logger.f(indent + msg)
- res
- }
-
- /**
- * Prints the given message, nicely indented, before the wrapped parser starts
- */
- def logBefore(msg: => Any)(implicit logger: Logger = Logger.stdout): P[T] = {
- val indent = " " * ctx.logDepth
- if (ctx.logDepth != -1) logger.f(indent + msg)
- val res = parse0
- res
- }
- }
/**
- * Positive lookahead operator: succeeds if the wrapped parser succeeds and
- * fails if the wrapped parser fails, but in all cases consumes zero
- * characters.
- */
+ * Positive lookahead operator: succeeds if the wrapped parser succeeds and
+ * fails if the wrapped parser fails, but in all cases consumes zero
+ * characters.
+ */
def &(parse: => P[_])(implicit ctx: P[_]): P[Unit] = {
val startPos = ctx.index
@@ -468,136 +319,39 @@ package object fastparse {
ctx.noDropBuffer = true
parse
ctx.noDropBuffer = oldNoCut
- val msg = ctx.shortParserMsg
val res =
if (ctx.isSuccess) ctx.freshSuccessUnit(startPos)
else ctx.asInstanceOf[P[Unit]]
- if (ctx.verboseFailures) {
- ctx.failureGroupAggregate = Msgs.empty
- ctx.setMsg(startPos, () =>
- msg match{
- case Seq(x) => s"&(${msg.render})"
- case xs => s"&${msg.render}"
- }
- )
- }
- res.cut = startCut
- res
- }
-
- /**
- * Parser that is only successful at the end of the input. Useful to ensure
- * your parser parses the whole file.
- */
- def End(implicit ctx: P[_]): P[Unit] = {
- val startIndex = ctx.index
- val res =
- if (!ctx.input.isReachable(startIndex)) ctx.freshSuccessUnit()
- else ctx.freshFailure().asInstanceOf[P[Unit]]
- if (ctx.verboseFailures) ctx.aggregateTerminal(startIndex, () => "end-of-input")
- res
-
- }
- /**
- * Parser that is only successful at the start of the input.
- */
- def Start(implicit ctx: P[_]): P[Unit] = {
- val startIndex = ctx.index
- val res =
- if (startIndex == 0) ctx.freshSuccessUnit()
- else ctx.freshFailure().asInstanceOf[P[Unit]]
- if (ctx.verboseFailures) ctx.aggregateTerminal(startIndex, () => "start-of-input")
- res
- }
-
- /**
- * Wraps a parser and ensures that none of the parsers within it leave
- * failure traces in failureTerminalAggregate, though unlike [[ByNameOps.opaque]]
- * if there is a failure *within* the wrapped parser the failure's location
- * and error message will still be shown
- *
- * Useful for wrapping things like whitespace, code-comment, etc. parsers
- * which can be applied everywhere and are not useful to display to the user
- * as part of the error message.
- */
- def NoTrace[T](p: => P[T])(implicit ctx: P[_]): P[T] = {
-
- val res = p
- if (ctx.verboseFailures) {
- ctx.failureGroupAggregate = Msgs.empty
- ctx.shortParserMsg = Msgs.empty
- }
- res
- }
-
- /**
- * No-op parser that always succeeds, consuming zero characters
- */
- def Pass(implicit ctx: P[_]): P[Unit] = {
- val res = ctx.freshSuccessUnit()
- if (ctx.verboseFailures) ctx.setMsg(ctx.index, () => "Pass")
- res
- }
- /**
- * No-op parser that always succeeds with the given value, consuming zero
- * characters
- */
- def Pass[T](v: T)(implicit ctx: P[_]): P[T] = {
- val res = ctx.freshSuccess(v)
- if (ctx.verboseFailures) ctx.setMsg(ctx.index, () => "Pass")
+ res.cut = startCut
res
}
-
+
/**
- * No-op parser that always fails, consuming zero characters
+ * Provides logging-related [[LogByNameOps]] implicits on [[String]].
*/
- def Fail(implicit ctx: P[_]): P[Nothing] = {
- val res = ctx.freshFailure()
- if (ctx.verboseFailures) ctx.setMsg(ctx.index, () => "fail")
- res
- }
-
+ implicit def LogOpsStr(parse0: String)
+ (implicit ctx: P[Any]): fastparse.LogByNameOps[Unit] =
+ macro MacroImpls.logOpsStrMacro
/**
- * Parser that always succeeds and returns the current index into the parsed
- * input. Useful for e.g. capturing source locations so when downstream
- * valiation raises errors you can tell the user where in the input the
- * error originated from
+ * Separated out from [[ByNameOps]] because `.log` isn't easy to make an
+ * [[AnyVal]] extension method, but it doesn't matter since `.log` calls
+ * are only for use in development while the other [[ByNameOps]] operators
+ * are more performance-sensitive
*/
- def Index(implicit ctx: P[_]): P[Int] = {
- val res = ctx.freshSuccess(ctx.index)
- if (ctx.verboseFailures) ctx.setMsg(ctx.index, () => "Index")
- res
- }
+ implicit class LogByNameOps[T](parse0: => P[T])(implicit ctx: P[_]) {
+ /** Wraps a parser to log when it succeeds and fails, and at what index.
+ * Useful for seeing what is going on within your parser. Nicely indents
+ * the logs for easy reading
+ */
+ def log(implicit name: sourcecode.Name, logger: Logger = Logger.stdout): P[T] = SharedPackageDefs.log(() => parse0)
- /**
- * Parses a single character, any character, as long as there is at least
- * one character for it to parse (i.e. the input isn't at its end)
- */
- def AnyChar(implicit ctx: P[_]): P[Unit] = {
- val startIndex = ctx.index
- val res =
- if (!ctx.input.isReachable(ctx.index)) ctx.freshFailure().asInstanceOf[P[Unit]]
- else ctx.freshSuccessUnit(ctx.index + 1)
- if (ctx.verboseFailures) ctx.aggregateTerminal(startIndex, () => "any-character")
- res
- }
+ /** Prints the given message, nicely indented, after the wrapped parser finishes */
+ def logAfter(msg: => Any)(implicit logger: Logger = Logger.stdout): P[T] = SharedPackageDefs.logAfter(() => parse0, msg)
- /**
- * Like [[AnyChar]], but returns the single character it parses. Useful
- * together with [[EagerOps.flatMapX]] to provide one-character-lookahead
- * style parsing: [[SingleChar]] consumes the single character, and then
- * [[EagerOps.flatMapX]] can `match` on that single character and decide
- * which downstream parser you wish to invoke
- */
- def SingleChar(implicit ctx: P[_]): P[Char] = {
- val startIndex = ctx.index
- val res =
- if (!ctx.input.isReachable(ctx.index)) ctx.freshFailure().asInstanceOf[P[Char]]
- else ctx.freshSuccess(ctx.input(ctx.index), ctx.index + 1)
- if (ctx.verboseFailures) ctx.aggregateTerminal(startIndex, () => "any-character")
- res
+ /** Prints the given message, nicely indented, before the wrapped parser starts */
+ def logBefore(msg: => Any)(implicit logger: Logger = Logger.stdout): P[T] = SharedPackageDefs.logBefore(() => parse0, msg)
}
/**
@@ -637,25 +391,6 @@ package object fastparse {
def CharsWhile(p: Char => Boolean, min: Int)
(implicit ctx: P[_]): P[Unit] = macro MacroImpls.charsWhileMacro
-
- /**
- * Allows backtracking regardless of whether cuts happen within the wrapped
- * parser; this is useful for re-using an existing parser with cuts within
- * it, in other parts of your grammar where backtracking is necessary and
- * unavoidable.
- */
- def NoCut[T](parse: => P[T])(implicit ctx: P[_]): P[T] = {
- val cut = ctx.cut
- val oldNoCut = ctx.noDropBuffer
- ctx.noDropBuffer = true
- val res = parse
- ctx.noDropBuffer = oldNoCut
-
- res.cut = cut
- res
- }
-
-
/**
* Efficiently parses any one of the given [[String]]s; more efficient than
* chaining [[EagerOps.|]] together
diff --git a/fastparse/src-3/fastparse/internal/MacroInlineImpls.scala b/fastparse/src-3/fastparse/internal/MacroInlineImpls.scala
new file mode 100644
index 00000000..d5b449cf
--- /dev/null
+++ b/fastparse/src-3/fastparse/internal/MacroInlineImpls.scala
@@ -0,0 +1,548 @@
+package fastparse.internal
+
+import fastparse.{Implicits, ParserInput, ParsingRun}
+
+import scala.quoted.*
+
+object MacroInlineImpls {
+
+ def literalStrMacro(s: Expr[String])(ctx: Expr[ParsingRun[Any]])(using quotes: Quotes): Expr[ParsingRun[Unit]] = {
+ import quotes.reflect.*
+
+ s.value match {
+ case Some(x) =>
+ val literalized = Expr[String](Util.literalize(x))
+ if (x.length == 0) '{ $ctx.freshSuccessUnit() }
+ else if (x.length == 1) {
+ val charLiteral = Expr[Char](x.charAt(0))
+ '{
+
+ $ctx match {
+ case ctx1 =>
+ val input = ctx1.input
+ val index = ctx1.index
+ val res =
+ if (input.isReachable(index) && input(index) == $charLiteral) {
+ ctx1.freshSuccessUnit(index + 1)
+ } else {
+ ctx1.freshFailure().asInstanceOf[ParsingRun[Unit]]
+ }
+ if (ctx1.verboseFailures) ctx1.reportTerminalMsg(index, () => $literalized)
+ res
+ }
+
+ }
+ } else {
+ val xLength = Expr[Int](x.length)
+ '{
+ $ctx match {
+ case ctx1 =>
+ val index = ctx1.index
+ val end = index + $xLength
+ val input = ctx1.input
+ val res =
+ if (input.isReachable(end - 1) && ${
+ x.zipWithIndex
+ .map { case (char, i) => '{ input.apply(index + ${ Expr(i) }) == ${ Expr(char) } } }
+ .reduce[Expr[Boolean]] { case (l, r) => '{ $l && $r } }
+ }) {
+ ctx1.freshSuccessUnit(end)
+ } else {
+ ctx1.freshFailure().asInstanceOf[ParsingRun[Unit]]
+ }
+ if (ctx1.verboseFailures) {
+ ctx1.reportTerminalMsg(index, () => $literalized)
+ }
+ res
+
+ }
+ }
+ }
+ case None =>
+ '{
+ val s1 = $s
+ $ctx match {
+ case ctx1 =>
+ val index = ctx1.index
+ val res =
+ if (Util.startsWith(ctx1.input, s1, index)) ctx1.freshSuccessUnit(index + s1.length)
+ else ctx1.freshFailure().asInstanceOf[ParsingRun[Unit]]
+ if (ctx1.verboseFailures) ctx1.reportTerminalMsg(index, () => Util.literalize(s1))
+ res
+ }
+ }
+ }
+
+ }
+
+ inline def filterInline[T](inline lhs: ParsingRun[_])(f: T => Boolean)(ctx1: ParsingRun[_]): ParsingRun[T] = {
+ val startIndex = ctx1.index
+ lhs
+ val res: ParsingRun[T] =
+ if (!ctx1.isSuccess) ctx1.asInstanceOf[ParsingRun[T]]
+ else if (f(ctx1.successValue.asInstanceOf[T])) ctx1.asInstanceOf[ParsingRun[T]]
+ else ctx1.freshFailure().asInstanceOf[ParsingRun[T]]
+
+ if (ctx1.verboseFailures) ctx1.reportTerminalMsg(startIndex, () => "filter")
+ res
+ }
+
+ inline def pInline[T](inline t: ParsingRun[T])(
+ name: sourcecode.Name,
+ ctx1: ParsingRun[_]
+ ): ParsingRun[T] = {
+
+ val startIndex = ctx1.index
+ val instrument = ctx1.instrument != null
+ if (instrument) {
+ ctx1.instrument.beforeParse(name.value, startIndex)
+ }
+ val ctx0 = t
+
+ if (instrument) {
+ ctx1.instrument.afterParse(name.value, ctx0.index, ctx0.isSuccess)
+ }
+ if (ctx0.verboseFailures) {
+ ctx0.reportAggregateMsg(
+ () => name.value,
+ forceAggregate = startIndex < ctx0.traceIndex
+ )
+ if (!ctx0.isSuccess) {
+ ctx0.failureStack = (name.value -> startIndex) :: ctx0.failureStack
+ }
+ }
+ ctx0
+ }
+
+ inline def cutInline[T](inline lhs: ParsingRun[_])(ctx0: ParsingRun[_]): ParsingRun[T] = {
+ val startIndex = ctx0.index
+ val ctx1 = lhs
+ val index = ctx1.index
+ if (!ctx1.isSuccess) ctx1.augmentFailure(index)
+ else {
+ val progress = index > startIndex
+ if (progress && ctx1.checkForDrop()) ctx1.input.dropBuffer(index)
+
+ ctx1.freshSuccess(ctx1.successValue, cut = ctx1.cut | progress).asInstanceOf[ParsingRun[T]]
+ }
+ }
+
+ def parsedSequence0[T: Type, V: Type, R: Type](lhs: Expr[ParsingRun[T]], rhs: Expr[ParsingRun[V]], cut: Boolean)(
+ s: Expr[Implicits.Sequencer[T, V, R]],
+ whitespace: Null | Expr[fastparse.Whitespace],
+ ctx: Expr[ParsingRun[_]]
+ )(using quotes: Quotes): Expr[ParsingRun[R]] = {
+ import quotes.reflect.*
+
+ def setCut(ctx1: Expr[ParsingRun[Any]]): Expr[Unit] = if cut then '{ $ctx1.cut = true }
+ else '{}
+
+ '{
+ {
+ val ctx1 = $ctx
+ val s1 = $s
+ val preLhsIndex = ctx1.index
+ val input = ctx1.input
+ $lhs
+ if (!ctx1.isSuccess) ctx1
+ else {
+ val postLhsIndex = ctx1.index
+ val lhsAggregate = ctx1.aggregateMsgs
+ val lhsMsg = ctx1.shortMsg
+ ${ setCut('{ ctx1 }) }
+
+ if (postLhsIndex > preLhsIndex && ctx1.checkForDrop()) input.dropBuffer(postLhsIndex)
+
+ val lhsValue = ctx1.successValue
+ ${
+
+ val rhsSnippet = '{
+ if (!ctx1.isSuccess && ctx1.cut) ctx1
+ else {
+ val preRhsIndex = ctx1.index
+ $rhs
+ val rhsAggregate = ctx1.aggregateMsgs
+ val rhsMsg = ctx1.shortMsg
+ val res =
+ if (!ctx1.isSuccess) {
+ ${ setCut('{ ctx1 }) }
+ ctx1
+ } else {
+ val postRhsIndex = ctx1.index
+
+ val rhsMadeProgress = postRhsIndex > preRhsIndex
+ val nextIndex =
+ if (!rhsMadeProgress && input.isReachable(postRhsIndex)) postLhsIndex
+ else postRhsIndex
+
+ if (rhsMadeProgress && ctx1.checkForDrop()) input.dropBuffer(postRhsIndex)
+
+ ctx1.freshSuccess(
+ s1.apply(
+ lhsValue.asInstanceOf[T],
+ ctx1.successValue.asInstanceOf[V]
+ ),
+ nextIndex
+ )
+ }
+
+ if (ctx1.verboseFailures) ctx1.reportAggregateMsg(
+ Util.joinBinOp(lhsMsg, rhsMsg),
+ rhsAggregate ::: lhsAggregate,
+ // We override the aggregateMsgs to avoid building an `a ~ b`
+ // aggregate msg in the specific case where the LHS parser fails to
+ // make any progress past `startIndex`. This finds cases like `a.? ~ b`
+ // or `a.rep ~ b` and lets use flatten them out into `a | b`
+ forceAggregate = preRhsIndex == ctx1.traceIndex
+ )
+ res
+ }
+ }
+
+ whitespace match {
+ case null => rhsSnippet
+ case ws =>
+ if (ws.asTerm.tpe =:= TypeRepr.of[fastparse.NoWhitespace.noWhitespaceImplicit.type]) rhsSnippet
+ else {
+ '{
+ Util.consumeWhitespace($ws, ctx1)
+ if (ctx1.isSuccess) $rhsSnippet
+ else ctx1
+ }
+ }
+ }
+ }
+ }
+ }.asInstanceOf[ParsingRun[R]]
+ }
+ }
+
+ inline def optionInline[T, V](inline lhs0: ParsingRun[T])(
+ optioner1: Implicits.Optioner[T, V],
+ ctx1: ParsingRun[Any]
+ ): ParsingRun[V] = {
+ val startPos = ctx1.index
+ val startCut = ctx1.cut
+ ctx1.cut = false
+ lhs0
+ val postSuccess = ctx1.isSuccess
+
+ val res =
+ if (postSuccess) {
+ val res = ctx1.freshSuccess(optioner1.some(ctx1.successValue.asInstanceOf[T]))
+ res.cut |= startCut
+ res
+ } else if (ctx1.cut) ctx1.asInstanceOf[ParsingRun[V]]
+ else {
+ val res = ctx1.freshSuccess(optioner1.none, startPos)
+ res.cut |= startCut
+ res
+ }
+
+ if (ctx1.verboseFailures) {
+ val msg = ctx1.shortMsg
+ if (!postSuccess) {
+ ctx1.reportAggregateMsg(() => msg.render + ".?")
+ }
+ }
+ res
+ }
+
+ inline def mapInline[T, V](lhs: ParsingRun[T])(inline f: T => V): ParsingRun[V] = {
+ if (!lhs.isSuccess) lhs.asInstanceOf[ParsingRun[V]]
+ else {
+ val this2 = lhs.asInstanceOf[ParsingRun[V]]
+ this2.successValue = f(this2.successValue.asInstanceOf[T])
+ this2
+ }
+ }
+
+ inline def collectInline[T, V](lhs: ParsingRun[T])(inline f: PartialFunction[T, V]): ParsingRun[V] = {
+ if (!lhs.isSuccess) lhs.asInstanceOf[ParsingRun[V]]
+ else {
+ val this2 = lhs.asInstanceOf[ParsingRun[V]]
+ val f2 = f.andThen(v => this2.successValue = v)
+ f2.applyOrElse(this2.successValue.asInstanceOf[T], { (_: T) => this2.freshFailure() })
+ this2
+ }
+ }
+
+ inline def flatMapXInline[T, V](lhs: ParsingRun[T])(inline f: T => ParsingRun[V]): ParsingRun[V] = {
+ if (!lhs.isSuccess) lhs.asInstanceOf[ParsingRun[V]]
+ else f(lhs.successValue.asInstanceOf[T])
+ }
+
+ inline def flatMapInline[T, V](
+ lhs: ParsingRun[T]
+ )(inline f: T => ParsingRun[V])(ws: fastparse.Whitespace): ParsingRun[V] = {
+ if (!lhs.isSuccess) lhs.asInstanceOf[ParsingRun[V]]
+ else {
+ val oldCapturing = lhs.noDropBuffer
+ val successValue = lhs.successValue
+ lhs.noDropBuffer = true
+ ws(lhs)
+ lhs.noDropBuffer = oldCapturing
+ if (!lhs.isSuccess && lhs.cut) lhs.asInstanceOf[ParsingRun[V]]
+ else f(successValue.asInstanceOf[T])
+ }
+ }
+
+ inline def eitherInline[T, V >: T](inline lhs0: ParsingRun[T])(inline other: ParsingRun[V])(ctx5: ParsingRun[Any])
+ : ParsingRun[V] = {
+
+ val oldCut = ctx5.cut
+ ctx5.cut = false
+ val startPos = ctx5.index
+
+ lhs0
+ val lhsMsg = ctx5.shortMsg
+ val lhsAggregate = ctx5.aggregateMsgs
+ if (ctx5.isSuccess) {
+ ctx5.cut |= oldCut
+ ctx5.asInstanceOf[ParsingRun[V]]
+ } else if (ctx5.cut) ctx5.asInstanceOf[ParsingRun[V]]
+ else {
+ val verboseFailures = ctx5.verboseFailures
+
+ ctx5.index = startPos
+ if (verboseFailures) ctx5.reportAggregateMsg(lhsMsg)
+
+ ctx5.cut = false
+ other
+ val rhsMsg = ctx5.shortMsg
+ val rhsCut = ctx5.cut
+ val endCut = rhsCut | oldCut
+ if (!ctx5.isSuccess && !rhsCut) ctx5.freshFailure(startPos)
+ ctx5.cut = endCut
+ if (verboseFailures) {
+ ctx5.reportAggregateMsg(rhsMsg ::: lhsMsg, ctx5.aggregateMsgs ::: lhsAggregate)
+ }
+ ctx5.asInstanceOf[ParsingRun[V]]
+ }
+ }
+
+ inline def captureInline(inline lhs0: ParsingRun[Any])(ctx6: ParsingRun[Any]): ParsingRun[String] = {
+ val startPos = ctx6.index
+ val oldCapturing = ctx6.noDropBuffer
+ ctx6.noDropBuffer = true
+ lhs0
+ ctx6.noDropBuffer = oldCapturing
+
+ if (!ctx6.isSuccess) ctx6.asInstanceOf[ParsingRun[String]]
+ else ctx6.freshSuccess(ctx6.input.slice(startPos, ctx6.index))
+ }
+
+ def parseCharCls(char: Expr[Char], ss: Seq[String])(using quotes: Quotes): Expr[Boolean] = {
+ import quotes.reflect.*
+
+ val snippets = for (s <- ss) yield {
+ val output = collection.mutable.Buffer.empty[Either[Char, (Char, Char)]]
+ var i = 0
+ while (i < s.length) {
+ s(i) match {
+ case '\\' =>
+ i += 1
+ output.append(Left(s(i)))
+ case '-' =>
+ i += 1
+ val Left(last) = output.remove(output.length - 1) : @unchecked
+ output.append(Right((last, s(i))))
+ case c => output.append(Left(c))
+ }
+ i += 1
+ }
+
+ (
+ output.collect { case Left(char) => CaseDef(Expr(char).asTerm, None, Expr(true).asTerm) },
+ output.collect { case Right((l, h)) => (l, h) }
+ )
+ }
+
+ val (literals, ranges) = snippets.unzip
+
+ '{
+ val charIn = $char
+ ${
+ Match(
+ '{ charIn }.asTerm,
+ literals.flatten.toList :+ CaseDef(
+ Wildcard(),
+ None,
+ ranges.flatten.map { (l, h) => '{ ${ Expr(l) } <= charIn && charIn <= ${ Expr(h) } } }.reduceOption {
+ (l, r) => '{ $l || $r }
+ }.getOrElse(Expr(false)).asTerm
+ )
+ ).asExprOf[Boolean]
+ }
+ }
+ }
+
+ def charInMacro(s: Expr[Seq[String]])(ctx: Expr[ParsingRun[Any]])(using quotes: Quotes): Expr[ParsingRun[Unit]] = {
+ import quotes.reflect.*
+
+ val literals: Seq[String] = getLiteralStrings(s)
+
+ val parsed = parseCharCls('{ $ctx.input($ctx.index) }, literals)
+ val bracketed = Expr[String](literals.map(l => "[" + Util.literalize(l).drop(1).dropRight(1) + "]").mkString)
+ '{
+ $ctx match {
+ case ctx1 =>
+ val index = ctx1.index
+ val res =
+ if (!ctx1.input.isReachable(index)) {
+ ctx1.freshFailure().asInstanceOf[ParsingRun[Unit]]
+ } else $parsed match {
+ case true => ctx1.freshSuccessUnit(index + 1)
+ case false => ctx1.freshFailure().asInstanceOf[ParsingRun[Unit]]
+ }
+ if (ctx1.verboseFailures) ctx1.reportTerminalMsg(index, () => $bracketed)
+ res
+ }
+ }
+ }
+
+ private def getLiteralStrings(s: Expr[Seq[String]])(using quotes: Quotes): Seq[String] = {
+ import quotes.reflect.*
+ s match {
+ case Varargs(args @ Exprs(argValues)) => argValues
+ case _ =>
+ report.errorAndAbort("Function can only accept constant singleton type", s)
+ }
+ }
+
+ inline def charPredInline(inline p0: Char => Boolean)(ctx0: ParsingRun[Any]): ParsingRun[Unit] = {
+ val startIndex = ctx0.index
+ val res =
+ if (!(ctx0.input.isReachable(ctx0.index) && p0(ctx0.input(ctx0.index)))) {
+ ctx0.freshFailure().asInstanceOf[ParsingRun[Unit]]
+ } else {
+ ctx0.freshSuccessUnit(ctx0.index + 1)
+ }
+ if (ctx0.verboseFailures) ctx0.reportTerminalMsg(startIndex, () => s"char-pred(${p0})")
+ res
+ }
+
+ def charsWhileInMacro(s: Expr[String], min: Expr[Int])(ctx: Expr[ParsingRun[Any]])(using
+ quotes: Quotes
+ ): Expr[ParsingRun[Unit]] = {
+ import quotes.reflect.*
+
+ val literal = s.value.getOrElse(report.errorAndAbort("Function can only accept constant singleton type", s))
+
+ val bracketed = Expr[String]("[" + Util.literalize(literal).drop(1).dropRight(1) + "]")
+
+ '{
+ $ctx match {
+ case ctx1 =>
+ var index = ctx1.index
+ val input = ctx1.input
+ val start = index
+ val goal = $min + start
+ while (
+ input.isReachable(index) &&
+ ${ parseCharCls('{ input(index) }, Seq(literal)) }
+ ) index += 1
+ val res =
+ if (index >= goal) ctx1.freshSuccessUnit(index = index)
+ else ctx1.freshFailure()
+
+ if (ctx1.verboseFailures) ctx1.reportTerminalMsg(start, () => $bracketed)
+ res
+ }
+ }
+ }
+
+ inline def charsWhileInline(inline p0: Char => Boolean, min: Int)(ctx0: ParsingRun[Any]): ParsingRun[Unit] = {
+ var index = ctx0.index
+ val input = ctx0.input
+ val start = index
+ val goal = min + start
+ while (input.isReachable(index) && p0(input(index))) index += 1
+ val res =
+ if (index >= goal) ctx0.freshSuccessUnit(index = index)
+ else ctx0.freshFailure()
+ if (ctx0.verboseFailures) ctx0.reportTerminalMsg(start, () => s"chars-while($p0, $min)")
+ res
+ }
+
+ def stringInMacro0(
+ ignoreCaseExpr: Expr[Boolean],
+ s: Expr[Seq[String]]
+ )(ctx: Expr[ParsingRun[Any]])(using quotes: Quotes): Expr[ParsingRun[Unit]] = {
+
+ import quotes.reflect.*
+
+ val ignoreCase = ignoreCaseExpr.valueOrAbort
+
+ val literals = getLiteralStrings(s)
+ val trie = new CompactTrieNode(
+ new TrieNode(if (ignoreCase) literals.map(_.toLowerCase) else literals)
+ )
+
+ '{
+ $ctx match {
+ case ctx1 =>
+ val index = ctx1.index
+ val input = ctx1.input
+
+ var output: Int = -1
+ def setOutput(x: Int) = output = x
+
+ ${
+
+ def charAtN(n: Expr[Int]): Expr[Char] =
+ if (ignoreCase) '{ input.apply($n).toLower }
+ else '{ input.apply($n) }
+ def rec(depth: Int, t: CompactTrieNode): Expr[Unit] = '{
+ val n = index + ${ Expr(depth) }
+ ${
+ if t.word
+ then '{ setOutput(n) }
+ else '{}
+ }
+ if (input.isReachable(n)) ${
+ if (t.children.isEmpty) '{ () }
+ else {
+ val casedefs = t.children.map {
+ case (k, ("", v)) => CaseDef(Expr(k).asTerm, None, rec(depth + 1, v).asTerm)
+ case (k, (s, v)) =>
+ val checks = s
+ .zipWithIndex
+ .map { case (char, i) =>
+ '{ ${ charAtN('{ index + ${ Expr(depth + i + 1) } }) } == ${ Expr(char) } }
+ }
+ .reduce[Expr[Boolean]] { case (l, r) => '{ $l && $r } }
+ CaseDef(
+ Expr(k).asTerm,
+ None,
+ '{
+ if (input.isReachable(index + ${ Expr(depth + s.length) }) && $checks) {
+ ${ rec(depth + s.length + 1, v) }
+ }
+ }.asTerm
+ )
+ }
+
+ Match(charAtN('{ n }).asTerm, casedefs.toList :+ CaseDef(Wildcard(), None, '{}.asTerm))
+ .asExprOf[Unit]
+ }
+ }
+ }
+
+ rec(0, trie)
+
+ }
+
+ val res =
+ if (output != -1) ctx1.freshSuccessUnit(output)
+ else ctx1.freshFailure()
+ if (ctx1.verboseFailures) ctx1.reportTerminalMsg(
+ index,
+ Msgs.fromStrings(${ Expr(literals.map(Util.literalize(_)).toList) })
+ )
+ res
+ }
+ }
+ }
+
+}
diff --git a/fastparse/src-3/fastparse/internal/MacroRepImpls.scala b/fastparse/src-3/fastparse/internal/MacroRepImpls.scala
new file mode 100644
index 00000000..36a884f5
--- /dev/null
+++ b/fastparse/src-3/fastparse/internal/MacroRepImpls.scala
@@ -0,0 +1,182 @@
+package fastparse.internal
+
+import fastparse.{Implicits, NoWhitespace, P, ParsingRun}
+
+import scala.annotation.tailrec
+import scala.quoted.*
+
+/** Implementations of the various `.rep`/`.repX` overloads. The most common
+ * and simple overloads are implemented as macros for performance, while the
+ * more complex/general cases are left as normal methods to avoid code bloat
+ * and allow the use of default/named arguments (which don't work in macros
+ * due to https://github.com/scala/bug/issues/5920).
+ *
+ * Even the normal method overloads are manually-specialized to some extent
+ * for various sorts of inputs as a best-effort attempt ot minimize branching
+ * in the hot paths.
+ */
+object MacroRepImpls {
+ def repMacro0[T: Type, V: Type](
+ parse0: Expr[ParsingRun[T]],
+ sep: Expr[ParsingRun[_]],
+ whitespace: Expr[fastparse.Whitespace],
+ min: Expr[Int],
+ max: Expr[Int],
+ exactly: Expr[Int],
+ )(repeater0: Expr[Implicits.Repeater[T, V]],
+ ctx0: Expr[ParsingRun[_]])(using quotes: Quotes): Expr[ParsingRun[V]] = {
+ import quotes.reflect.*
+
+ def getInlineExpansionValue[T](t: Term): Term = {
+ t match{
+ case Inlined(a, b, c) => getInlineExpansionValue(c)
+ case Typed(a, b) => getInlineExpansionValue(a)
+ case _ => t
+ }
+ }
+
+ val staticMin0 = getInlineExpansionValue[Int](min.asTerm).asExprOf[Int]
+ val staticMax0 = getInlineExpansionValue[Int](max.asTerm).asExprOf[Int]
+ val staticExactly0 = getInlineExpansionValue[Int](exactly.asTerm).asExprOf[Int]
+
+ val staticActualMin = staticExactly0 match{
+ case '{-1} => staticMin0.value
+ case _ => staticExactly0.value
+ }
+ val staticActualMax = staticExactly0 match{
+ case '{-1} => staticMax0.value
+ case _ => staticExactly0.value
+ }
+
+ '{
+ val ctx = $ctx0
+ val repeater = $repeater0
+ val acc = repeater.initial
+ val actualMin = if ($exactly == -1) $min else $exactly
+ val actualMax = if ($exactly == -1) $max else $exactly
+
+ def end(successIndex: Int, index: Int, count: Int, endCut: Boolean) = ${
+ staticActualMin match{
+ case Some(-1) => '{ ctx.freshSuccess(repeater.result(acc), successIndex, endCut) }
+ case _ =>
+ '{
+ if (count < actualMin) ctx.augmentFailure(index, endCut)
+ else ctx.freshSuccess(repeater.result(acc), successIndex, endCut)
+ }
+ }
+ }
+
+ @tailrec def rec(startIndex: Int,
+ count: Int,
+ precut: Boolean,
+ outerCut: Boolean,
+ sepMsg: Msgs,
+ lastAgg: Msgs): ParsingRun[V] = ${
+
+ def consumeWhitespace(extraCut: Expr[Boolean])(x: Expr[ParsingRun[V]]) =
+ if whitespace.asTerm.tpe =:= TypeRepr.of[fastparse.NoWhitespace.noWhitespaceImplicit.type]
+ then x
+ else '{
+ Util.consumeWhitespace($whitespace, ctx)
+ if (!ctx.isSuccess && ($extraCut || ctx.cut)) ctx.asInstanceOf[ParsingRun[Nothing]]
+ else { $x }
+ }
+
+ val ctxCut = staticActualMin match{
+ case Some(-1) => '{ precut }
+ case _ => '{ precut | (count < actualMin && outerCut) }
+ }
+
+ val checkMax0 = staticActualMax match{
+ case Some(v) if v != 0 => '{false}
+ case _ => '{ count == 0 && actualMax == 0 }
+ }
+
+ '{
+ ctx.cut = $ctxCut
+ if ($checkMax0) ctx.freshSuccess(repeater.result(acc), startIndex)
+ else {
+ $parse0
+ val parsedMsg = ctx.shortMsg
+ val parsedAgg = ctx.aggregateMsgs
+ val postCut = ctx.cut
+ val verboseFailures = ctx.verboseFailures
+ if (!ctx.isSuccess) {
+ val res =
+ if (postCut) ctx.asInstanceOf[ParsingRun[V]]
+ else end(startIndex, startIndex, count, outerCut | postCut)
+ if (verboseFailures) {
+ Util.reportParseMsgInRep(
+ startIndex,
+ actualMin,
+ ctx,
+ sepMsg,
+ parsedMsg,
+ lastAgg,
+ precut || postCut
+ )
+ }
+ res
+ } else {
+ val beforeSepIndex = ctx.index
+ repeater.accumulate(ctx.successValue.asInstanceOf[T], acc)
+ val nextCount = count + 1
+ ${
+ val checkMax2 = staticActualMax match {
+ case Some(Int.MaxValue) => '{ false }
+ case _ => '{ nextCount == actualMax }
+ }
+ '{
+ if ($checkMax2) {
+ val res = end(beforeSepIndex, beforeSepIndex, nextCount, outerCut | postCut)
+ if (verboseFailures) ctx.reportTerminalMsg(startIndex, () => parsedMsg.render + ".rep" + (if (actualMin == 0) "" else s"(${actualMin})"))
+ res
+ }
+ else {
+ ${
+ consumeWhitespace('{false})('{
+ ctx.cut = false
+ ${
+ getInlineExpansionValue(sep.asTerm).asExpr match {
+ case '{ null } =>
+ '{
+ rec(beforeSepIndex, nextCount, false, outerCut | postCut, null, parsedAgg)
+ }
+ case _ =>
+ '{
+ val sep1 = $sep
+ val sepCut = ctx.cut
+ val endCut = outerCut | postCut | sepCut
+ if (ctx.isSuccess) {
+ val postSepMsg = ctx.shortMsg
+ ${
+ consumeWhitespace('{sepCut})('{
+ rec(beforeSepIndex, nextCount, sepCut, endCut, postSepMsg, parsedAgg)
+ })
+ }
+ }
+ else {
+ val res =
+ if (sepCut) ctx.augmentFailure(beforeSepIndex, endCut)
+ else end(beforeSepIndex, beforeSepIndex, nextCount, endCut)
+
+ if (verboseFailures) Util.reportParseMsgPostSep(startIndex, actualMin, ctx, parsedMsg, parsedAgg)
+ res
+ }
+ }
+ }
+ }
+ })
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ rec(ctx.index, 0, false, ctx.cut, null, null)
+ }
+ }
+}
diff --git a/fastparse/src-3/fastparse/internal/NoWarn.scala b/fastparse/src-3/fastparse/internal/NoWarn.scala
new file mode 100644
index 00000000..5e76b160
--- /dev/null
+++ b/fastparse/src-3/fastparse/internal/NoWarn.scala
@@ -0,0 +1,6 @@
+package fastparse.internal
+
+object NoWarn{
+ @deprecated("Use scala.annotation.nowarn instead", "3.1.1")
+ type nowarn = scala.annotation.nowarn
+}
diff --git a/fastparse/src-3/fastparse/package.scala b/fastparse/src-3/fastparse/package.scala
new file mode 100644
index 00000000..638ab909
--- /dev/null
+++ b/fastparse/src-3/fastparse/package.scala
@@ -0,0 +1,353 @@
+import fastparse.internal.{Instrument, Logger, *}
+
+import language.experimental.macros
+
+package object fastparse extends fastparse.SharedPackageDefs {
+
+ /**
+ * Shorthand alias for [[ParsingRun]]; this is both the parameter-to and the
+ * return type for all Fastparse's parsing methods.
+ *
+ * @tparam T is the type of the value returned by the parser method on success
+ */
+ type P[+T] = ParsingRun[T]
+
+ val P = ParsingRun
+
+ implicit def DiscardParserValue(p: P[_]): P[Unit] = {
+ p.successValue = ()
+ p.asInstanceOf[P[Unit]]
+ }
+
+ /** Delimits a named parser. This name will appear in the parser failure
+ * messages and stack traces, and by default is taken from the name of the
+ * enclosing method.
+ */
+ inline def P[T](inline t: P[T])(implicit name: sourcecode.Name, ctx: P[_]): P[T] =
+ MacroInlineImpls.pInline(t)(name, ctx)
+
+ /** Parses an exact string value. */
+ implicit inline def LiteralStr(s: String)(implicit ctx: P[Any]): P[Unit] =
+ ${ MacroInlineImpls.literalStrMacro('{ s })('{ ctx }) }
+
+ extension [T](inline parse0: P[T])
+ /** Tests the output of the parser with a given predicate, failing the
+ * parse if the predicate returns false. Useful for doing local validation
+ * on bits and pieces of your parsed output
+ */
+ inline def filter(f: T => Boolean)(using ctx: P[Any]): P[T] =
+ MacroInlineImpls.filterInline[T](parse0)(f)(ctx)
+
+ /** Either-or operator: tries to parse the left-hand-side, and if that
+ * fails it backtracks and tries to pass the right-hand-side. Can be
+ * chained more than once to parse larger numbers of alternatives.
+ */
+ inline def |[V >: T](inline other: P[V])(using ctx: P[Any]): P[V] =
+ MacroInlineImpls.eitherInline[T, V](parse0)(other)(ctx)
+
+ /** Plain cut operator. Runs the parser, and if it succeeds, backtracking
+ * past that point is now prohibited
+ */
+ inline def /(implicit ctx: P[_]): P[T] = MacroInlineImpls.cutInline[T](parse0)(ctx) // macro MacroImpls.cutMacro[T]
+
+ /** Optional operator. Parses the given input to wrap it in a `Some`, but
+ * if parsing fails backtracks and returns `None`
+ */
+ inline def ?[V](implicit optioner: Implicits.Optioner[T, V], ctx: P[Any]): P[V] =
+ MacroInlineImpls.optionInline[T, V](parse0)(optioner, ctx)
+
+ /** Transforms the result of this parser using the given function. Useful
+ * for turning the [[String]]s captured by [[!]] and the tuples built
+ * by [[~]] into your own case classes or data structure
+ */
+ inline def map[V](inline f: T => V): P[V] = MacroInlineImpls.mapInline[T, V](parse0)(f)
+
+ /** Transforms the result of this parser using the given partial function,
+ * failing the parse if the partial function is not defined on the result
+ * of the current parser. This is eqivalent to
+ * `.filter(f.isDefinedAt).map(f.apply)`
+ */
+ inline def collect[V](inline f: PartialFunction[T, V]): P[V] = MacroInlineImpls.collectInline[T, V](parse0)(f)
+
+ /** Transforms the result of this parser using the given function into a
+ * new parser which is applied (without consuming whitespace). Useful for
+ * doing dependent parsing, e.g. when parsing JSON you may first parse a
+ * character to see if it's a `[`, `{`, or `"`, and then deciding whether
+ * you next want to parse an array, dictionary or string.
+ */
+ inline def flatMapX[V](inline f: T => P[V]): P[V] = MacroInlineImpls.flatMapXInline[T, V](parse0)(f)
+
+ /** Transforms the result of this parser using the given function into a
+ * new parser which is applied (after whitespace). Useful for doing
+ * dependent parsing, e.g. when parsing JSON you may first parse a
+ * character to see if it's a `[`, `{`, or `"`, and then deciding whether
+ * you next want to parse an array, dictionary or string.
+ */
+ inline def flatMap[V](f: T => P[V])(using whitespace: Whitespace): P[V] =
+ MacroInlineImpls.flatMapInline[T, V](parse0)(f)(whitespace)
+
+ /** Capture operator; makes the parser return the span of input it parsed
+ * as a [[String]], which can then be processed further using [[~]],
+ * [[map]] or [[flatMapX]]
+ */
+ inline def !(using ctx: P[Any]): P[String] = MacroInlineImpls.captureInline(parse0)(ctx)
+
+ /** Sequence-with-cut operator. Runs two parsers one after the other,
+ * with optional whitespace in between. If the first parser completes
+ * successfully, backtracking is now prohibited. If both parsers
+ * return a value, this returns a tuple.
+ */
+ inline def ~/[V, R](inline other: P[V])(using
+ s: Implicits.Sequencer[T, V, R],
+ whitespace: Whitespace,
+ ctx: P[_]
+ ): P[R] = ${ MacroInlineImpls.parsedSequence0[T, V, R]('parse0, 'other, true)('s, 'whitespace, 'ctx) }
+
+ /** Sequence operator. Runs two parsers one after the other,
+ * with optional whitespace in between.If both parsers
+ * return a value, this returns a tuple.
+ */
+ inline def ~[V, R](inline other: P[V])(using
+ s: Implicits.Sequencer[T, V, R],
+ whitespace: Whitespace,
+ ctx: P[_]
+ ): P[R] =
+ ${ MacroInlineImpls.parsedSequence0[T, V, R]('parse0, 'other, false)('s, 'whitespace, 'ctx) }
+
+ /** Raw sequence-with-cut operator. Runs two parsers one after the other,
+ * *without* whitespace in between. If the first parser completes
+ * successfully, backtracking is no longer prohibited. If both parsers
+ * return a value, this returns a tuple.
+ */
+ inline def ~~/[V, R](inline other: P[V])(using s: Implicits.Sequencer[T, V, R], ctx: P[_]): P[R] =
+ ${ MacroInlineImpls.parsedSequence0[T, V, R]('parse0, 'other, true)('s, null, 'ctx) }
+
+ /** Raw sequence operator. Runs two parsers one after the other,
+ * *without* whitespace in between. If both parsers return a value,
+ * this returns a tuple.
+ */
+ inline def ~~[V, R](inline other: P[V])(using s: Implicits.Sequencer[T, V, R], ctx: P[_]): P[R] =
+ ${ MacroInlineImpls.parsedSequence0[T, V, R]('parse0, 'other, false)('s, null, 'ctx) }
+
+ /** Repeat operator; runs the LHS parser 0 or more times separated by the
+ * given whitespace (in implicit scope), and returns
+ * a `Seq[T]` of the parsed values. On failure, backtracks to the starting
+ * index of the last run.
+ */
+ inline def rep[V](using repeater: Implicits.Repeater[T, V], whitespace: Whitespace, ctx: P[Any]): P[V] =
+ ${ MacroRepImpls.repMacro0[T, V]('parse0, '{null}, 'whitespace, '{0}, '{Int.MaxValue}, '{-1})('repeater, 'ctx) }
+
+ /** Raw repeat operator; runs the LHS parser 0 or more times *without*
+ * any whitespace in between, and returns
+ * a `Seq[T]` of the parsed values. On failure, backtracks to the starting
+ * index of the last run.
+ */
+ inline def repX[V](using repeater: Implicits.Repeater[T, V], ctx: P[Any]): P[V] =
+ ${ MacroRepImpls.repMacro0[T, V]('parse0, '{null}, '{fastparse.NoWhitespace.noWhitespaceImplicit}, '{0}, '{Int.MaxValue}, '{-1})('repeater, 'ctx) }
+
+ /// ** Repeat operator; runs the LHS parser at least `min`
+ // * times separated by the given whitespace (in implicit scope),
+ // * and returns a `Seq[T]` of the parsed values. On
+ // * failure, backtracks to the starting index of the last run.
+ // */
+ // inline def rep[V](inline min: Int)(using repeater: Implicits.Repeater[T, V], whitespace: Whitespace, ctx: P[Any]): P[V] =
+ // ${ MacroRepImpls.repXMacro0[T, V]('parse0, 'whitespace, 'min)('repeater, 'ctx) }
+
+ /// ** Raw repeat operator; runs the LHS parser at least `min`
+ // * times *without* any whitespace in between,
+ // * and returns a `Seq[T]` of the parsed values. On
+ // * failure, backtracks to the starting index of the last run.
+ // */
+ // inline def repX[V](min: Int)(implicit repeater: Implicits.Repeater[T, V], ctx: P[Any]): P[V] =
+ // ${ MacroRepImpls.repXMacro0[T, V]('parse0, null, 'min)('repeater, 'ctx) }
+
+ end extension
+
+ extension [T](inline parse0: => P[T])
+
+ /** Repeat operator; runs the LHS parser at least `min` to at most `max`
+ * times separated by the given whitespace (in implicit scope) and
+ * separator `sep`, and returns a `Seq[T]` of the parsed values. On
+ * failure, backtracks to the starting index of the last run.
+ *
+ * The convenience parameter `exactly` is provided to set both `min` and
+ * `max` to the same value.
+ */
+ inline def rep[V](
+ inline min: Int = 0,
+ inline sep: => P[_] = null,
+ inline max: Int = Int.MaxValue,
+ inline exactly: Int = -1
+ )(using
+ repeater: Implicits.Repeater[T, V],
+ whitespace: Whitespace,
+ ctx: P[Any]
+ ): P[V] = ${
+ MacroRepImpls.repMacro0[T, V](
+ 'parse0,
+ 'sep,
+ 'whitespace,
+ 'min,
+ 'max,
+ 'exactly
+ )(
+ 'repeater,
+ 'ctx
+ )
+ }
+
+
+ /** Raw repeat operator; runs the LHS parser at least `min` to at most `max`
+ * times separated by the
+ * separator `sep` *without* any whitespace in between, and returns a `Seq[T]` of the parsed values. On
+ * failure, backtracks to the starting index of the last run.
+ *
+ * The convenience parameter `exactly` is provided to set both `min` and
+ * `max` to the same value.
+ */
+ inline def repX[V](
+ inline min: Int = 0,
+ inline sep: => P[_] = null,
+ inline max: Int = Int.MaxValue,
+ inline exactly: Int = -1
+ )(implicit
+ repeater: Implicits.Repeater[T, V],
+ ctx: P[Any]
+ ): P[V] = ${
+ MacroRepImpls.repMacro0[T, V](
+ 'parse0,
+ 'sep,
+ '{fastparse.NoWhitespace.noWhitespaceImplicit},
+ 'min,
+ 'max,
+ 'exactly
+ )(
+ 'repeater,
+ 'ctx
+ )
+ }
+
+
+
+ /**
+ * Hides the internals of the given parser when it fails, such that it
+ * only succeeds completely or fails completely, and none of it's internal
+ * parsers end up in the failure traces or failure stack to be displayed
+ * to the user.
+ */
+ inline def opaque(msg: String)(implicit ctx: P[Any]): P[T] = SharedPackageDefs.opaque(() => parse0, msg)
+
+ /**
+ * Negative lookahead operator: succeeds if the wrapped parser fails and
+ * fails if the wrapped parser succeeds. In all cases, it ends up
+ * consuming zero characters.
+ */
+ inline def unary_!(implicit ctx: P[Any]): P[Unit] = {
+
+ val startPos = ctx.index
+ val startCut = ctx.cut
+ val oldNoCut = ctx.noDropBuffer
+ ctx.noDropBuffer = true
+ val startTerminals = ctx.terminalMsgs
+ parse0
+ ctx.noDropBuffer = oldNoCut
+
+ val res =
+ if (ctx.isSuccess) ctx.freshFailure(startPos)
+ else ctx.freshSuccessUnit(startPos)
+
+ if (ctx.verboseFailures) {
+ // Unlike most other data on `ctx`, `terminalMsgs` is normally
+ // append-only. Thus when we're inside the unary_! expression, it
+ // continually appends to `terminalMsgs` sub-parsers that could
+ // have succeeded within it, but are irrelevant to the user because
+ // we *want* the contents of the unary_! to fail! Thus, we reset
+ // `terminalMsgs` once we exit the unary_!, to ensure these do not
+ // end up in error messages
+ ctx.terminalMsgs = startTerminals
+ ctx.reportTerminalMsg(startPos, Msgs.empty)
+ }
+ res.cut = startCut
+ res
+ }
+
+ /**
+ * Positive lookahead operator: succeeds if the wrapped parser succeeds and
+ * fails if the wrapped parser fails, but in all cases consumes zero
+ * characters.
+ */
+ inline def &(inline parse: => P[_])(implicit ctx: P[_]): P[Unit] = {
+
+ val startPos = ctx.index
+ val startCut = ctx.cut
+ val oldNoCut = ctx.noDropBuffer
+ ctx.noDropBuffer = true
+ parse
+ ctx.noDropBuffer = oldNoCut
+
+ val res =
+ if (ctx.isSuccess) ctx.freshSuccessUnit(startPos)
+ else ctx.asInstanceOf[P[Unit]]
+
+ res.cut = startCut
+ res
+ }
+
+ /** Provides logging-related [[LogByNameOps]] implicits on [[String]]. */
+ implicit def LogOpsStr(parse0: String)(implicit ctx: P[Any]): fastparse.LogByNameOps[Unit] = LogByNameOps(parse0)
+ // ??? // macro MacroImpls.logOpsStrMacro
+ /** Separated out from [[ByNameOps]] because `.log` isn't easy to make an
+ * [[AnyVal]] extension method, but it doesn't matter since `.log` calls
+ * are only for use in development while the other [[ByNameOps]] operators
+ * are more performance-sensitive
+ */
+ implicit class LogByNameOps[T](parse0: => P[T])(implicit ctx: P[_]) {
+
+ /** Wraps a parser to log when it succeeds and fails, and at what index.
+ * Useful for seeing what is going on within your parser. Nicely indents
+ * the logs for easy reading
+ */
+ def log(implicit name: sourcecode.Name, logger: Logger = Logger.stdout): P[T] = SharedPackageDefs.log(() => parse0)
+
+ /** Prints the given message, nicely indented, after the wrapped parser finishes */
+ def logAfter(msg: => Any)(implicit logger: Logger = Logger.stdout): P[T] = SharedPackageDefs.logAfter(() => parse0, msg)
+
+ /** Prints the given message, nicely indented, before the wrapped parser starts */
+ def logBefore(msg: => Any)(implicit logger: Logger = Logger.stdout): P[T] = SharedPackageDefs.logBefore(() => parse0, msg)
+ }
+
+
+ /** Parses a single character satisfying the given predicate */
+ inline def CharPred(inline p: Char => Boolean)(implicit ctx: P[_]): P[Unit] = MacroInlineImpls.charPredInline(p)(ctx)
+
+ /** Parses a single character in one of the input strings representing
+ * character classes
+ */
+ inline def CharIn(inline s: String*)(using ctx: P[_]): P[Unit] =
+ ${ MacroInlineImpls.charInMacro('s)('ctx) }
+
+ /** Parses `min` or more characters as long as they are contained
+ * in one of the input strings representing character classes
+ */
+ inline def CharsWhileIn(inline s: String, min: Int = 1)(implicit ctx: P[_]): P[Unit] =
+ ${ MacroInlineImpls.charsWhileInMacro('s, 'min)('ctx) }
+
+ /** Parses `min` or more characters as long as they satisfy the given
+ * predicate
+ */
+ inline def CharsWhile(inline p: Char => Boolean, min: Int = 1)(implicit ctx: P[_]): P[Unit] =
+ MacroInlineImpls.charsWhileInline(p, min)(ctx)
+
+ /** Efficiently parses any one of the given [[String]]s; more efficient than
+ * chaining [[EagerOps.|]] together
+ */
+ inline def StringIn(inline s: String*)(implicit ctx: P[_]): P[Unit] =
+ ${ MacroInlineImpls.stringInMacro0('false, 's)('ctx) }
+
+ /** Efficiently parses any one of the given [[String]]s, case-insensitively;
+ * more efficient than chaining [[EagerOps.|]] together with [[IgnoreCase]]
+ */
+ inline def StringInIgnoreCase(inline s: String*)(implicit ctx: P[_]): P[Unit] =
+ ${ MacroInlineImpls.stringInMacro0('true, 's)('ctx) }
+
+}
diff --git a/fastparse/src-native/fastparse/CharPredicates.scala b/fastparse/src-native/fastparse/CharPredicates.scala
new file mode 100644
index 00000000..edae664a
--- /dev/null
+++ b/fastparse/src-native/fastparse/CharPredicates.scala
@@ -0,0 +1,32 @@
+package fastparse
+
+/**
+ * Fast, pre-computed character predicates for charactes from 0 to 65535
+ *
+ * Useful because FastParse does it's parsing character by character, so
+ * although this doesn't have the full range of the java
+ * `Character.getType(c: Int)` functions, it still is good enough for
+ * a wide range of use cases
+ */
+object CharPredicates{
+
+ def isMathSymbol(c: Char) = Character.getType(c) == Character.MATH_SYMBOL
+ def isOtherSymbol(c: Char) = Character.getType(c) == Character.OTHER_SYMBOL
+ def isLetter(c: Char) = {
+ ((((1 << Character.UPPERCASE_LETTER) |
+ (1 << Character.LOWERCASE_LETTER) |
+ (1 << Character.TITLECASE_LETTER) |
+ (1 << Character.MODIFIER_LETTER) |
+ (1 << Character.OTHER_LETTER)) >> Character.getType(c)) & 1) != 0
+ }
+
+ def isPrintableChar(c: Char) = {
+ // Don't bother checking for Unicode SPECIAL block characters
+ // in Scala.js because Scala.js doesn't really support it
+ !java.lang.Character.isISOControl(c) &&
+ !java.lang.Character.isSurrogate(c)
+ }
+ def isDigit(c: Char) = Character.isDigit(c)
+ def isLower(c: Char) = Character.isLowerCase((c))
+ def isUpper(c: Char) = Character.isUpperCase(c)
+}
\ No newline at end of file
diff --git a/fastparse/src/fastparse/Implicits.scala b/fastparse/src/fastparse/Implicits.scala
index 6dc82ece..3203efaf 100644
--- a/fastparse/src/fastparse/Implicits.scala
+++ b/fastparse/src/fastparse/Implicits.scala
@@ -46,7 +46,7 @@ object Implicits {
}
}
trait LowPriRepeater{
- implicit def GenericRepeaterImplicit[T] = GenericRepeatedImplicit0.asInstanceOf[Repeater[T, Seq[T]]]
+ implicit def GenericRepeaterImplicit[T]: Repeater[T, Seq[T]] = GenericRepeatedImplicit0.asInstanceOf[Repeater[T, Seq[T]]]
object GenericRepeatedImplicit0 extends Repeater[Any, Seq[Any]]{
type Acc = mutable.Buffer[Any]
def initial = mutable.Buffer.empty[Any]
@@ -67,7 +67,7 @@ object Implicits {
}
}
trait LowPriOptioner{
- implicit def GenericOptionerImplicit[T] = GenericOptionerImplicit0.asInstanceOf[Optioner[T, Option[T]]]
+ implicit def GenericOptionerImplicit[T]: Optioner[T, Option[T]] = GenericOptionerImplicit0.asInstanceOf[Optioner[T, Option[T]]]
object GenericOptionerImplicit0 extends Optioner[Any, Option[Any]]{
def none = None
def some(value: Any) = Some(value)
diff --git a/fastparse/src/fastparse/Parsed.scala b/fastparse/src/fastparse/Parsed.scala
index 68ade645..75f15f84 100644
--- a/fastparse/src/fastparse/Parsed.scala
+++ b/fastparse/src/fastparse/Parsed.scala
@@ -95,10 +95,8 @@ object Parsed{
object Failure{
def apply(label: String, index: Int, extra: Extra) = new Failure(label, index, extra)
- def unapply(x: Failure): Option[(String, Int, Extra)] = x match{
- case f: Failure => Some((f.label, f.index, f.extra))
- case _ => None
- }
+ def unapply(f: Failure): Some[(String, Int, Extra)] =
+ Some((f.label, f.index, f.extra))
def formatMsg(input: ParserInput, stack: List[(String, Int)], index: Int) = {
"Expected " + Failure.formatStack(input, stack) +
", found " + Failure.formatTrailing(input, index)
@@ -150,8 +148,8 @@ object Parsed{
def fromParsingRun[T](p: ParsingRun[T]) = {
assert(!p.isSuccess)
TracedFailure(
- p.failureTerminalAggregate,
- p.lastFailureMsg ::: p.failureGroupAggregate,
+ p.terminalMsgs,
+ p.aggregateMsgs,
Parsed.fromParsingRun(p).asInstanceOf[Failure]
)
}
@@ -187,7 +185,7 @@ object Parsed{
* Displays the short failure message excluding the parse stack. This shows
* the last parser which failed causing the parse to fail. Note that this
* does not include other parsers which may have failed earlier; see [[terminalsMsg]]
- * and [[aggregateMsg]] for more detailed errors
+ * and [[reportParseMsg]] for more detailed errors
*/
def msg = failure.msg
/**
@@ -223,4 +221,3 @@ object Parsed{
def longAggregateMsg = Failure.formatMsg(input, stack ++ Seq(groupAggregateString -> index), index)
}
}
-
diff --git a/fastparse/src/fastparse/ParserInput.scala b/fastparse/src/fastparse/ParserInput.scala
index 9a06e793..3f06bae2 100644
--- a/fastparse/src/fastparse/ParserInput.scala
+++ b/fastparse/src/fastparse/ParserInput.scala
@@ -28,7 +28,7 @@ object ParserInputSource extends ParserInputSourceLowPri {
}
trait ParserInputSourceLowPri{
- implicit def fromReadable[T](s: T)(implicit f: T => geny.Readable) = FromReadable(
+ implicit def fromReadable[T](s: T)(implicit f: T => geny.Readable): FromReadable = FromReadable(
f(s),
// Default bufferSize of 4096. Somewhat arbitrary, but doesn't seem to matter
// much in benchmarks, e.g. on parsing `GenJSCode.scala`:
@@ -53,8 +53,8 @@ trait ParserInputSourceLowPri{
}
object ParserInput{
- implicit def fromString(s: String) = IndexedParserInput(s)
- implicit def FromIterator(s: Iterator[String]) = IteratorParserInput(s)
+ implicit def fromString(s: String): IndexedParserInput = IndexedParserInput(s)
+ implicit def FromIterator(s: Iterator[String]): IteratorParserInput = IteratorParserInput(s)
}
/**
* ParserInput class represents data that is needed to parse.
@@ -94,7 +94,7 @@ abstract class ParserInput extends IsReachable {
}
case class IndexedParserInput(data: String) extends ParserInput {
- override def apply(index: Int) = data.charAt(index)
+ override def apply(index: Int): Char = data.charAt(index)
/**
* As for `IndexedSeq` mode `dropBuffer` does nothing.
diff --git a/fastparse/src/fastparse/ParsingRun.scala b/fastparse/src/fastparse/ParsingRun.scala
index ce42f65f..3bfac276 100644
--- a/fastparse/src/fastparse/ParsingRun.scala
+++ b/fastparse/src/fastparse/ParsingRun.scala
@@ -1,6 +1,6 @@
package fastparse
-import fastparse.internal.{Instrument, Lazy, Msgs, Util}
+import fastparse.internal.{Instrument, Msgs}
/**
* Models an in-progress parsing run; contains all the mutable state that may
@@ -11,12 +11,12 @@ import fastparse.internal.{Instrument, Lazy, Msgs, Util}
* There are a few patterns that let us program with these mutable variables
* in a sort-of-pure-functional way:
*
- test - If a parser that wishes to ignore changes to a field within their child
+ * - If a parser that wishes to ignore changes to a field within their child
* parsers, a common pattern is to save the value of the field before the
* wrapped parser runs, and then re-set the field. e.g. this can be used to
* backtrack [[index]] after a lookahead parser finishes
*
- test - If a parser wants to read the value of the field "returned" by multiple
+ * - If a parser wants to read the value of the field "returned" by multiple
* child parsers, make sure to read the field into a local variable after
* each child parser is complete to make sure the value you want from an
* earlier child isn't stomped over by a later child
@@ -35,12 +35,12 @@ import fastparse.internal.{Instrument, Lazy, Msgs, Util}
* it with tracing enabled.
* @param traceIndex The index we wish to trace if tracing is enabled, else
* -1. Used to find failure messages to aggregate into
- * `failureTerminalAggregate`
+ * `terminalMsgs`
* @param instrument Callbacks that can be injected before/after every
* `P(...)` parser.
- * @param failureTerminalAggregate When tracing is enabled, this collects up all the
+ * @param terminalMsgs When tracing is enabled, this collects up all the
* upper-most failures that happen at [[traceIndex]]
- * (in [[Lazy]] wrappers) so they can be shown to the
+ * (in [[fastparse.internal.Lazy]] wrappers) so they can be shown to the
* user at end-of-parse as suggestions for what could
* make the parse succeed. For terminal parsers like
* [[LiteralStr]], it just aggregate's the string
@@ -48,9 +48,9 @@ import fastparse.internal.{Instrument, Lazy, Msgs, Util}
* or `!a` which may fail at [[traceIndex]] even
* without any of their wrapped terminal parsers
* failing there, it makes use of the
- * [[shortParserMsg]] as the string representation of
+ * [[shortMsg]] as the string representation of
* the composite parser.
- * @param shortParserMsg When tracing is enabled, this contains string
+ * @param shortMsg When tracing is enabled, this contains string
* representation of the last parser to run. Since
* parsers aren't really objects, we piece together
* the string in the parser body and return store it
@@ -60,7 +60,7 @@ import fastparse.internal.{Instrument, Lazy, Msgs, Util}
* we only aggregate the portion of the parser msg
* that takes place after `traceIndex`
* @param failureStack The stack of named `P(...)` parsers in effect when
- * the failure occured; only constructed when tracing
+ * the failure occurred; only constructed when tracing
* is enabled via `traceIndex != -1`
* @param isSuccess Whether or not the parse is currently successful
* @param logDepth How many nested `.log` calls are currently surrounding us.
@@ -108,9 +108,9 @@ final class ParsingRun[+T](val input: ParserInput,
val traceIndex: Int,
val instrument: Instrument,
// Mutable vars below:
- var failureTerminalAggregate: Msgs,
- var failureGroupAggregate: Msgs,
- var shortParserMsg: Msgs,
+ var terminalMsgs: Msgs,
+ var aggregateMsgs: Msgs,
+ var shortMsg: Msgs,
var lastFailureMsg: Msgs,
var failureStack: List[(String, Int)],
var isSuccess: Boolean,
@@ -122,132 +122,121 @@ final class ParsingRun[+T](val input: ParserInput,
var noDropBuffer: Boolean,
val misc: collection.mutable.Map[Any, Any]){
- // HOW ERROR AGGREGATION WORKS:
- //
- // Fastparse provides two levels of error aggregation that get enabled when
- // calling `.trace()`: `failureTerminalAggregate`, and `failureGroupAggregate`:
- //
- // - `failureTerminalAggregate` lists all low-level terminal parsers which are
- // tried at the given `traceIndex`. This is useful to answer the question
- // "what can I put at the error position to make my parse continue"
- //
- // - `failureGroupAggregate` lists all high-level parsers which are tried at
- // the given `traceIndex`. This is useful to answer the question "What was
- // the parser trying to do when it failed"
- //
- // The implementation of `failureTerminalAggregate` is straightforward: we
- // simply call `aggregateTerminal` in every terminal parser, which collects
- // all the messages in a big list and returns it. The implementation of
- // `failureGroupAggregate` is more interesting, since we need to figure out
- // what are the "high level" parsers that we need to list. We use the
- // following algorithm:
- //
- // - When a parse which started at the given `traceIndex` fails without a cut
- // - Over-write `failureGroupAggregate` with it's `shortParserMsg`
- //
- // - Otherwise:
- // - If we are a terminal parser, we set our `failureGroupAggregate` to Nil
- // - If we are a compound parser, we simply sum up the `failureGroupAggregate`
- // of all our constituent parts
- //
- // The point of this heuristic is to provide the highest-level parsers which
- // failed at the `traceIndex`, but are not already part of the `failureStack`.
- // non-highest-level parsers do successfully write their message to
- // `failureGroupAggregate`, but they are subsequently over-written by the higher
- // level parsers, until it reaches the point where `cut == true`, indicating
- // that any further higher-level parsers will be in `failureStack` and using
- // their message to stomp over the existing parse-failure-messages in
- // `failureGroupAggregate` would be wasteful.
- //
- // These is an edge case where there is no given failure that occurs exactly at
- // `traceIndex` e.g. parsing "ax" with P( ("a" ~ "b") ~ "c" | "a" ~/ "d" ), the
- // final failure `index` and thus `traceIndex` is at offset 1, and we would like
- // to receive the aggregation ("b" | "d"). But ("a" ~ "b")
- // passes from offsets 0-2, "c" fails at offset 2 and ("a" ~ "b") ~ "c" fails
- // from offset 0-2. In such a case, we truncate the `shortParserMsg` at
- // `traceIndex` to only include the portion we're interested in (which directly
- // follows the failure). This then gets aggregated nicely to form the error
- // message from-point-of-failure.
- //
- // A follow-on edge case is parsing "ax" with
- //
- // val inner = P( "a" ~ "b" )
- // P( inner ~ "c" | "a" ~/ "d" )
- //
- // Here, we find that the `inner` parser starts before the `traceIndex` and
- // fails at `traceIndex`, but we want our aggregation to continue being
- // ("b" | "d"), rather than (inner | "d"). Thus, for opaque compound parsers
- // like `inner` which do not expose their internals, we use the `forceAggregate`
- // to force it to expose it's internals when it's range covers the `traceIndex`
- // but it isn't an exact match
- def aggregateMsg(startIndex: Int,
- msgToSet: () => String,
- msgToAggregate: Msgs): Unit = {
- aggregateMsg(startIndex, Msgs(List(new Lazy(msgToSet))), msgToAggregate)
+ /**
+ * Called by non-terminal parsers after completion, success or failure
+ *
+ * This needs to be called for both successful and failed parsers, as we need
+ * to record the msg of a successful parse in case it forms part of a larger
+ * failed parse later.
+ *
+ * For example:
+ *
+ * - Using "a" ~ ("b" ~ "c" | "d") to parse "abe"
+ * - We report that the the parser ("b" ~ "c" | "d") failed at index 1
+ * - That msg contains the msg of the parse "b" even though it was successful
+ *
+ * Overloaded to minimize the amount of callsite bytecode, since we do a ton
+ * of inlining in Fastparse, and large amounts of bytecode inlined in a method
+ * can cause JVM performance problems (e.g. JIT compilation may get disabled)
+ */
+ def reportAggregateMsg(newshortMsg: Msgs): Unit = {
+
+ reportAggregateMsg(newshortMsg, aggregateMsgs)
}
+ def reportAggregateMsg(newshortMsg: Msgs,
+ newAggregateMsgs: Msgs): Unit = {
- def aggregateMsg(startIndex: Int,
- msgToSet: Msgs,
- msgToAggregate: Msgs): Unit = {
- aggregateMsg(startIndex, msgToSet, msgToAggregate, false)
+ reportAggregateMsg(newshortMsg, newAggregateMsgs, false)
}
- def aggregateMsg(startIndex: Int,
- msgToSet: Msgs,
- msgToAggregate: Msgs,
- forceAggregate: Boolean): Unit = {
- if (!isSuccess && lastFailureMsg == null) lastFailureMsg = msgToSet
+ def reportAggregateMsg(newshortMsg: Msgs,
+ forceAggregate: Boolean): Unit = {
+ reportAggregateMsg(newshortMsg, aggregateMsgs, forceAggregate)
+ }
- shortParserMsg = msgToSet
+ def reportAggregateMsg(newshortMsg: Msgs,
+ newAggregateMsgs: Msgs,
+ forceAggregate: Boolean): Unit = {
- // There are two cases when aggregating: either we stomp over the entire
- // existing aggregation with `msgToSet`, or we preserve it (with possible
- // additions) with `msgToAggregate`.
- if (checkAggregate(startIndex) && !forceAggregate) failureGroupAggregate = msgToSet
- else failureGroupAggregate = msgToAggregate
+ reportParseMsg0(
+ newshortMsg,
+ newAggregateMsgs,
+ forceAggregate,
+ newAggregateMsgs.value.nonEmpty
+ )
}
- def aggregateTerminal(startIndex: Int, f: () => String): Unit = {
- val f2 = new Lazy(f)
- if (!isSuccess){
- if (index == traceIndex) failureTerminalAggregate ::= f2
- if (lastFailureMsg == null) lastFailureMsg = Msgs(List(f2))
- }
+ /**
+ * Called by any terminal parser; these are parsers for which displaying
+ * sub-failures does not make sense these include:
+ *
+ * - Individual strings or characters
+ * - Parsers like negation `!p` or `.filter` where the entire parser failing
+ * is not caused by sub-failure
+ * - Parsers like `.opaque`, where sub-failures are intentionally hidden and
+ * not shown to the user
+ *
+ * These "terminal" failures will be stored in the `terminalMsgs` in case
+ * a user wants to know what could have been placed at the failure point to
+ * let the parse progress
+ */
+ def reportTerminalMsg(startIndex: Int, newshortMsg: Msgs): Unit = {
+ // We only care about terminal parsers which failed exactly at the traceIndex
+ if (!isSuccess && index == traceIndex) terminalMsgs :::= newshortMsg
- shortParserMsg = if (startIndex >= traceIndex) Msgs(List(f2)) else Msgs.empty
- failureGroupAggregate = if (checkAggregate(startIndex)) shortParserMsg else Msgs.empty
+ reportParseMsg0(
+ if (startIndex >= traceIndex) newshortMsg else Msgs.empty,
+ if (startIndex >= traceIndex) newshortMsg else Msgs.empty,
+ false,
+ startIndex >= traceIndex
+ )
}
- def setMsg(startIndex: Int, f: () => String): Unit = {
- setMsg(startIndex, Msgs(List(new Lazy(f))))
- }
+ def reportParseMsg0(newshortMsg: Msgs,
+ newAggregateMsgs: Msgs,
+ forceAggregate: Boolean,
+ setShortMsg: Boolean): Unit = {
+ // `lastFailureMsg` ends up being set by the first parser to report a
+ // failure, while returning from the last parser to call `.freshFailure()
+ // (which nulls it out)
+ if (!isSuccess && lastFailureMsg == null) lastFailureMsg = newshortMsg
- def setMsg(startIndex: Int, f: Msgs): Unit = {
- if (!isSuccess && lastFailureMsg == null) lastFailureMsg = f
- shortParserMsg = if (startIndex >= traceIndex) f else Msgs.empty
- failureGroupAggregate = if (checkAggregate(startIndex)) shortParserMsg else Msgs.empty
- }
+ // We only set the `shortMsg` for some parsers. These include:
+ //
+ // - Terminal parsers which have `startIndex >= traceIndex`
+ //
+ // - Aggregate parsers which have non-empty `newAggregateMsgs`, indicating
+ // that they have either child terminal parsers with `startIndex >= traceIndex`
+ // or they have child aggregate parsers with non-empty `newAggregateMsgs`
+ //
+ // This lets us skip setting `shortMsg` for all parsers, terminal or
+ // aggregate, which run and terminate fully before `traceIndex`, and thus
+ // would be of no interest to a user debugging parse failures at `traceIndex`
+ shortMsg = if (setShortMsg) newshortMsg else Msgs.empty
- /**
- * Conditions under which we want to aggregate the given parse
- */
- def checkAggregate(startIndex: Int) = {
- // We only aggregate if we are not currently past a cut; if we are past a
- // cut, there is no further backtracking and so the error aggregate that has
- // occurred will be the final aggregate shown to the user
- !cut &&
- // Only aggregate failures
- !isSuccess &&
- // We only stomp over the given aggregation with shortParserMsg if the range
- // of the failed parse surrounds `traceIndex`. For parses that occur
- // completely before or after the `traceIndex`, the actual parse doesn't
- // contribute anything to the aggregation.
- startIndex <= traceIndex &&
- traceIndex <= index
+ // There are two cases when aggregating: either we stomp over the entire
+ // existing `aggregateMsgs` with `newshortMsg`, or we preserve it
+ // (with possible additions) with `newAggregateMsgs`.
+ aggregateMsgs =
+ if (forceAggregate) newAggregateMsgs
+ // We only replace the aggregate Msgs if:
+ //
+ // 1. We are not currently past a cut; if we are past a cut, there is no
+ // further backtracking and so the error aggregate that has occurred
+ // will be the final aggregate shown to the user
+ //
+ // 2. Only replace in case of failures
+ //
+ // 3. Only stomp over the given aggregation with shortMsg if the
+ // current parser has failed and the final parse `index` (after any
+ // backtracking) is still at-or-greater-than the `traceIndex`. That
+ // ensures that any parsers which started/ended before the point of
+ // failure are not shown, since they are irrelevant
+ else if (!cut && !isSuccess && traceIndex <= index) shortMsg
+ else newAggregateMsgs
}
-
// Use telescoping methods rather than default arguments to try and minimize
// the amount of bytecode generated at the callsite.
//
@@ -327,7 +316,7 @@ final class ParsingRun[+T](val input: ParserInput,
this.asInstanceOf[ParsingRun[Nothing]]
}
- def checkForDrop() = !noDropBuffer && cut
+ def checkForDrop(): Boolean = !noDropBuffer && cut
}
object ParsingRun{
diff --git a/fastparse/src/fastparse/SharedPackageDefs.scala b/fastparse/src/fastparse/SharedPackageDefs.scala
new file mode 100644
index 00000000..98fde91d
--- /dev/null
+++ b/fastparse/src/fastparse/SharedPackageDefs.scala
@@ -0,0 +1,302 @@
+package fastparse
+import fastparse.internal._
+import fastparse.internal.{Instrument, Logger}
+
+
+trait SharedPackageDefs {
+ /**
+ * Parses the given input [[ParserInput]] using the given parser and returns
+ * a [[Parsed]] result containing the success value or failure metadata.
+ *
+ * Can take either a [[String]], an [[Iterator]] or strings or a
+ * [[fastparse.ParserInput]] object
+ *
+ * @param input the input to parse
+ *
+ * @param parser the parser method to use to parse the input
+ *
+ * @param verboseFailures enable this to show a more detailed error message
+ * if a parser fails, without needing to run
+ * `.traced.trace`. Defaults to `false` as it slows
+ * down parsing considerably
+ *
+ * @param startIndex where in the input to start parsing
+ *
+ * @param instrument Callbacks that get run before and after every named
+ * `P(...)` parser
+ *
+ *
+ */
+ def parse[T](input: ParserInputSource,
+ parser: P[_] => P[T],
+ verboseFailures: Boolean = false,
+ startIndex: Int = 0,
+ instrument: Instrument = null): Parsed[T] = {
+ Parsed.fromParsingRun(input.parseThrough(parseInputRaw[T](
+ _,
+ parser,
+ verboseFailures,
+ startIndex,
+ -1,
+ instrument
+ )))
+ }
+
+ def parseInputRaw[T](input: ParserInput,
+ parser: P[_] => P[T],
+ verboseFailures: Boolean = false,
+ startIndex: Int = 0,
+ traceIndex: Int = -1,
+ instrument: Instrument = null,
+ enableLogging: Boolean = true): ParsingRun[T] = parser(new ParsingRun(
+ input = input,
+ startIndex = startIndex,
+ originalParser = parser,
+ traceIndex = traceIndex,
+ instrument = instrument,
+ terminalMsgs = Msgs.empty,
+ aggregateMsgs = Msgs.empty,
+ shortMsg = Msgs.empty,
+ lastFailureMsg = null,
+ failureStack = List.empty,
+ isSuccess = true,
+ logDepth = if (enableLogging) 0 else -1,
+ startIndex,
+ true,
+ (),
+ verboseFailures,
+ false,
+ collection.mutable.Map.empty
+ ))
+
+ /**
+ * Shorthand for `P[Unit]`
+ */
+ type P0 = P[Unit]
+
+ /**
+ * Parses a string value case-insensitively
+ */
+ def IgnoreCase(s: String)(implicit ctx: P[Any]): P[Unit] = {
+ val startIndex = ctx.index
+ val res =
+ if (Util.startsWithIgnoreCase(ctx.input, s, ctx.index)) ctx.freshSuccessUnit(ctx.index + s.length)
+ else ctx.freshFailure().asInstanceOf[P[Unit]]
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(startIndex, () => Util.literalize(s))
+ res
+ }
+
+
+ /**
+ * Parser that is only successful at the end of the input. Useful to ensure
+ * your parser parses the whole file.
+ */
+ def End(implicit ctx: P[_]): P[Unit] = {
+ val startIndex = ctx.index
+ val res =
+ if (!ctx.input.isReachable(startIndex)) ctx.freshSuccessUnit()
+ else ctx.freshFailure().asInstanceOf[P[Unit]]
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(startIndex, () => "end-of-input")
+ res
+
+ }
+ /**
+ * Parser that is only successful at the start of the input.
+ */
+ def Start(implicit ctx: P[_]): P[Unit] = {
+ val startIndex = ctx.index
+ val res =
+ if (startIndex == 0) ctx.freshSuccessUnit()
+ else ctx.freshFailure().asInstanceOf[P[Unit]]
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(startIndex, () => "start-of-input")
+ res
+ }
+
+ /**
+ * Wraps a parser and ensures that none of the parsers within it leave
+ * failure traces in terminalMsgs, though unlike [[ByNameOps.opaque]]
+ * if there is a failure *within* the wrapped parser the failure's location
+ * and error message will still be shown
+ *
+ * Useful for wrapping things like whitespace, code-comment, etc. parsers
+ * which can be applied everywhere and are not useful to display to the user
+ * as part of the error message.
+ */
+ def NoTrace[T](p: => P[T])(implicit ctx: P[_]): P[T] = {
+
+ val res = p
+ if (ctx.verboseFailures) {
+ ctx.aggregateMsgs = Msgs.empty
+ ctx.shortMsg = Msgs.empty
+ }
+ res
+ }
+
+ /**
+ * No-op parser that always succeeds, consuming zero characters
+ */
+ def Pass(implicit ctx: P[_]): P[Unit] = {
+ val res = ctx.freshSuccessUnit()
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(ctx.index, Msgs.empty)
+ res
+ }
+
+ /**
+ * No-op parser that always succeeds with the given value, consuming zero
+ * characters
+ */
+ def Pass[T](v: T)(implicit ctx: P[_]): P[T] = {
+ val res = ctx.freshSuccess(v)
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(ctx.index, () => "Pass")
+ res
+ }
+
+ /**
+ * No-op parser that always fails, consuming zero characters
+ */
+ def Fail(implicit ctx: P[_]): P[Nothing] = Fail("fail")
+
+ /**
+ * No-op parser with a custom error message that always fails, consuming zero characters
+ */
+ def Fail(msg: String)(implicit ctx: P[_]): P[Nothing] = {
+ val res = ctx.freshFailure()
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(ctx.index, () => msg)
+ res
+ }
+ /**
+ * Parser that always succeeds and returns the current index into the parsed
+ * input. Useful for e.g. capturing source locations so when downstream
+ * valiation raises errors you can tell the user where in the input the
+ * error originated from
+ */
+ def Index(implicit ctx: P[_]): P[Int] = {
+ val res = ctx.freshSuccess(ctx.index)
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(ctx.index, () => "Index")
+ res
+ }
+
+ /**
+ * Parses a single character, any character, as long as there is at least
+ * one character for it to parse (i.e. the input isn't at its end)
+ */
+ def AnyChar(implicit ctx: P[_]): P[Unit] = {
+ val startIndex = ctx.index
+ val res =
+ if (!ctx.input.isReachable(ctx.index)) ctx.freshFailure().asInstanceOf[P[Unit]]
+ else ctx.freshSuccessUnit(ctx.index + 1)
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(startIndex, () => "any-char")
+ res
+ }
+
+ /**
+ * Like [[AnyChar]], but returns the single character it parses. Useful
+ * together with [[EagerOps.flatMapX]] to provide one-character-lookahead
+ * style parsing: [[SingleChar]] consumes the single character, and then
+ * [[EagerOps.flatMapX]] can `match` on that single character and decide
+ * which downstream parser you wish to invoke
+ */
+ def SingleChar(implicit ctx: P[_]): P[Char] = {
+ val startIndex = ctx.index
+ val res =
+ if (!ctx.input.isReachable(ctx.index)) ctx.freshFailure().asInstanceOf[P[Char]]
+ else ctx.freshSuccess(ctx.input(ctx.index), ctx.index + 1)
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(startIndex, () => "any-char")
+ res
+ }
+
+ /**
+ * Allows backtracking regardless of whether cuts happen within the wrapped
+ * parser; this is useful for re-using an existing parser with cuts within
+ * it, in other parts of your grammar where backtracking is necessary and
+ * unavoidable.
+ */
+ def NoCut[T](parse: => P[T])(implicit ctx: P[_]): P[T] = {
+ val cut = ctx.cut
+ val oldNoCut = ctx.noDropBuffer
+ ctx.noDropBuffer = true
+ val res = parse
+ ctx.noDropBuffer = oldNoCut
+
+ res.cut = cut
+ res
+ }
+
+
+}
+
+object SharedPackageDefs{
+
+ def opaque[T](parse0: () => P[T], msg: String)(implicit ctx: P[Any]): P[T] = {
+ val oldIndex = ctx.index
+ val startTerminals = ctx.terminalMsgs
+ val res = parse0()
+
+ val res2 =
+ if (res.isSuccess) ctx.freshSuccess(ctx.successValue)
+ else ctx.freshFailure(oldIndex)
+
+ if (ctx.verboseFailures) {
+ ctx.terminalMsgs = startTerminals
+ ctx.reportTerminalMsg(oldIndex, () => msg)
+ }
+
+ res2.asInstanceOf[P[T]]
+ }
+
+ /** Wraps a parser to log when it succeeds and fails, and at what index.
+ * Useful for seeing what is going on within your parser. Nicely indents
+ * the logs for easy reading
+ */
+ def log[T](parse0: () => P[T])(implicit ctx: P[_], name: sourcecode.Name, logger: Logger = Logger.stdout): P[T] = {
+ if (ctx.logDepth == -1) parse0()
+ else {
+ val msg = name.value
+ val output = logger.f
+ val indent = " " * ctx.logDepth
+
+ output(s"$indent+$msg:${ctx.input.prettyIndex(ctx.index)}${if (ctx.cut) ", cut" else ""}")
+ val depth = ctx.logDepth
+ ctx.logDepth += 1
+ val startIndex = ctx.index
+ val oldverboseFailures = ctx.verboseFailures
+ ctx.verboseFailures = true
+ parse0()
+ ctx.verboseFailures = oldverboseFailures
+ ctx.logDepth = depth
+ val prettyIndex = ctx.input.prettyIndex(ctx.index)
+ val strRes = if (ctx.isSuccess) {
+ s"Success($prettyIndex${if (ctx.cut) ", cut" else ""})"
+ } else {
+ val trace = Parsed.Failure.formatStack(
+ ctx.input,
+ ctx.failureStack ++ Seq(ctx.lastFailureMsg.render -> ctx.index)
+ )
+ val trailing = ctx.input match {
+ case c: IndexedParserInput => Parsed.Failure.formatTrailing(ctx.input, startIndex)
+ case _ => ""
+ }
+ s"Failure($trace ...$trailing${if (ctx.cut) ", cut" else ""})"
+ }
+ output(s"$indent-$msg:${ctx.input.prettyIndex(startIndex)}:$strRes")
+ // output(s"$indent-$msg:${repr.prettyIndex(cfg.input, index)}:$strRes")
+ ctx.asInstanceOf[P[T]]
+ }
+ }
+
+ /** Prints the given message, nicely indented, after the wrapped parser finishes */
+ def logAfter[T](parse0: () => P[T], msg: => Any)(implicit ctx: P[_], logger: Logger = Logger.stdout): P[T] = {
+ val indent = " " * ctx.logDepth
+ val res = parse0()
+ if (ctx.logDepth != -1) logger.f(indent + msg)
+ res
+ }
+
+ /** Prints the given message, nicely indented, before the wrapped parser starts */
+ def logBefore[T](parse0: () => P[T], msg: => Any)(implicit ctx: P[_], logger: Logger = Logger.stdout): P[T] = {
+ val indent = " " * ctx.logDepth
+ if (ctx.logDepth != -1) logger.f(indent + msg)
+ val res = parse0()
+ res
+ }
+}
diff --git a/fastparse/src/fastparse/Whitespace.scala b/fastparse/src/fastparse/Whitespace.scala
index b02b188c..0028ce17 100644
--- a/fastparse/src/fastparse/Whitespace.scala
+++ b/fastparse/src/fastparse/Whitespace.scala
@@ -1,14 +1,17 @@
package fastparse
-import fastparse._
-import fastparse.internal.Util
+import fastparse.internal.{Msgs, Util}
-import scala.annotation.{Annotation, switch, tailrec}
+import scala.annotation.{switch, tailrec}
+
+trait Whitespace{
+ def apply(ctx: ParsingRun[_]): ParsingRun[Unit]
+}
/**
* No-op whitespace syntax that doesn't consume anything
*/
object NoWhitespace {
- implicit object noWhitespaceImplicit extends (ParsingRun[_] => ParsingRun[Unit]){
+ implicit object noWhitespaceImplicit extends Whitespace{
def apply(ctx: ParsingRun[_]) = ctx.freshSuccessUnit()
}
}
@@ -18,15 +21,18 @@ object NoWhitespace {
* characters.
*/
object SingleLineWhitespace {
- implicit val whitespace = {implicit ctx: ParsingRun[_] =>
- var index = ctx.index
- val input = ctx.input
-
- while(
- input.isReachable(index) &&
- (input(index) match{ case ' ' | '\t' => true case _ => false})
- ) index += 1
- ctx.freshSuccessUnit(index = index)
+ implicit object whitespace extends Whitespace {
+ def apply(ctx: ParsingRun[_]) = {
+ var index = ctx.index
+ val input = ctx.input
+
+ while(
+ input.isReachable(index) &&
+ (input(index) match{ case ' ' | '\t' => true case _ => false})
+ ) index += 1
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(index, Msgs.empty)
+ ctx.freshSuccessUnit(index = index)
+ }
}
}
/**
@@ -34,15 +40,18 @@ object SingleLineWhitespace {
* "\r" and "\n" whitespace characters.
*/
object MultiLineWhitespace {
- implicit val whitespace = {implicit ctx: ParsingRun[_] =>
- var index = ctx.index
- val input = ctx.input
+ implicit object whitespace extends Whitespace {
+ def apply(ctx: ParsingRun[_]) = {
+ var index = ctx.index
+ val input = ctx.input
- while(
- input.isReachable(index) &&
- (input(index) match{ case ' ' | '\t' | '\r' | '\n' => true case _ => false})
- ) index += 1
- ctx.freshSuccessUnit(index = index)
+ while(
+ input.isReachable(index) &&
+ (input(index) match{ case ' ' | '\t' | '\r' | '\n' => true case _ => false})
+ ) index += 1
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(index, Msgs.empty)
+ ctx.freshSuccessUnit(index = index)
+ }
}
}
@@ -51,24 +60,31 @@ object MultiLineWhitespace {
* programming languages such as Bash, Ruby, or Python
*/
object ScriptWhitespace{
- implicit val whitespace = {implicit ctx: ParsingRun[_] =>
- val input = ctx.input
- @tailrec def rec(current: Int, state: Int): ParsingRun[Unit] = {
- if (!input.isReachable(current)) ctx.freshSuccessUnit(current)
- else {
- val currentChar = input(current)
- (state: @switch) match{
- case 0 =>
- (currentChar: @switch) match{
- case ' ' | '\t' | '\n' | '\r' => rec(current + 1, state)
- case '#' => rec(current + 1, state = 1)
- case _ => ctx.freshSuccessUnit(current)
- }
- case 1 => rec(current + 1, state = if (currentChar == '\n') 0 else state)
+ implicit object whitespace extends Whitespace {
+ def apply(ctx: ParsingRun[_]) = {
+ val input = ctx.input
+ @tailrec def rec(current: Int, state: Int): ParsingRun[Unit] = {
+ if (!input.isReachable(current)) {
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(current, Msgs.empty)
+ ctx.freshSuccessUnit(current)
+ }
+ else {
+ val currentChar = input(current)
+ (state: @switch) match{
+ case 0 =>
+ (currentChar: @switch) match{
+ case ' ' | '\t' | '\n' | '\r' => rec(current + 1, state)
+ case '#' => rec(current + 1, state = 1)
+ case _ =>
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(current, Msgs.empty)
+ ctx.freshSuccessUnit(current)
+ }
+ case 1 => rec(current + 1, state = if (currentChar == '\n') 0 else state)
+ }
}
}
+ rec(current = ctx.index, state = 0)
}
- rec(current = ctx.index, state = 0)
}
}
@@ -78,47 +94,58 @@ object ScriptWhitespace{
* in the Java programming language
*/
object JavaWhitespace{
- implicit val whitespace = {implicit ctx: ParsingRun[_] =>
- val input = ctx.input
- val startIndex = ctx.index
- @tailrec def rec(current: Int, state: Int): ParsingRun[Unit] = {
- if (!input.isReachable(current)) {
- if (state == 0 || state == 1) ctx.freshSuccessUnit(current)
- else if(state == 2) ctx.freshSuccessUnit(current - 1)
- else {
- ctx.cut = true
- val res = ctx.freshFailure(current)
- if (ctx.verboseFailures) ctx.setMsg(startIndex, () => Util.literalize("*/"))
- res
- }
- } else {
- val currentChar = input(current)
- (state: @switch) match{
- case 0 =>
- (currentChar: @switch) match{
- case ' ' | '\t' | '\n' | '\r' => rec(current + 1, state)
- case '/' => rec(current + 1, state = 2)
- case _ => ctx.freshSuccessUnit(current)
- }
- case 1 => rec(current + 1, state = if (currentChar == '\n') 0 else state)
- case 2 =>
- (currentChar: @switch) match{
- case '/' => rec(current + 1, state = 1)
- case '*' => rec(current + 1, state = 3)
- case _ => ctx.freshSuccessUnit(current - 1)
- }
- case 3 => rec(current + 1, state = if (currentChar == '*') 4 else state)
- case 4 =>
- (currentChar: @switch) match{
- case '/' => rec(current + 1, state = 0)
- case '*' => rec(current + 1, state = 4)
- case _ => rec(current + 1, state = 3)
- }
-// rec(current + 1, state = if (currentChar == '/') 0 else 3)
+ implicit object whitespace extends Whitespace {
+ def apply(ctx: ParsingRun[_]) = {
+ val input = ctx.input
+ @tailrec def rec(current: Int, state: Int): ParsingRun[Unit] = {
+ if (!input.isReachable(current)) {
+ if (state == 0 || state == 1) {
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(current, Msgs.empty)
+ ctx.freshSuccessUnit(current)
+ }
+ else if(state == 2) {
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(current, Msgs.empty)
+ ctx.freshSuccessUnit(current - 1)
+ }
+ else {
+ ctx.cut = true
+ val res = ctx.freshFailure(current)
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(current, () => Util.literalize("*/"))
+ res
+ }
+ } else {
+ val currentChar = input(current)
+ (state: @switch) match{
+ case 0 =>
+ (currentChar: @switch) match{
+ case ' ' | '\t' | '\n' | '\r' => rec(current + 1, state)
+ case '/' => rec(current + 1, state = 2)
+ case _ =>
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(current, Msgs.empty)
+ ctx.freshSuccessUnit(current)
+ }
+ case 1 => rec(current + 1, state = if (currentChar == '\n') 0 else state)
+ case 2 =>
+ (currentChar: @switch) match{
+ case '/' => rec(current + 1, state = 1)
+ case '*' => rec(current + 1, state = 3)
+ case _ =>
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(current, Msgs.empty)
+ ctx.freshSuccessUnit(current - 1)
+ }
+ case 3 => rec(current + 1, state = if (currentChar == '*') 4 else state)
+ case 4 =>
+ (currentChar: @switch) match{
+ case '/' => rec(current + 1, state = 0)
+ case '*' => rec(current + 1, state = 4)
+ case _ => rec(current + 1, state = 3)
+ }
+ // rec(current + 1, state = if (currentChar == '/') 0 else 3)
+ }
}
}
+ rec(current = ctx.index, state = 0)
}
- rec(current = ctx.index, state = 0)
}
}
@@ -128,47 +155,59 @@ object JavaWhitespace{
* case in the Jsonnet programming language
*/
object JsonnetWhitespace{
- implicit val whitespace = {implicit ctx: ParsingRun[_] =>
- val input = ctx.input
- val startIndex = ctx.index
- @tailrec def rec(current: Int, state: Int): ParsingRun[Unit] = {
- if (!input.isReachable(current)) {
- if (state == 0 || state == 1) ctx.freshSuccessUnit(current)
- else if(state == 2) ctx.freshSuccessUnit(current - 1)
- else {
- ctx.cut = true
- val res = ctx.freshFailure(current)
- if (ctx.verboseFailures) ctx.setMsg(startIndex, () => Util.literalize("*/"))
- res
- }
- } else {
- val currentChar = input(current)
- (state: @switch) match{
- case 0 =>
- (currentChar: @switch) match{
- case ' ' | '\t' | '\n' | '\r' => rec(current + 1, state)
- case '#' => rec(current + 1, state = 1)
- case '/' => rec(current + 1, state = 2)
- case _ => ctx.freshSuccessUnit(current)
- }
- case 1 => rec(current + 1, state = if (currentChar == '\n') 0 else state)
- case 2 =>
- (currentChar: @switch) match{
- case '/' => rec(current + 1, state = 1)
- case '*' => rec(current + 1, state = 3)
- case _ => ctx.freshSuccessUnit(current - 1)
- }
- case 3 => rec(current + 1, state = if (currentChar == '*') 4 else state)
- case 4 =>
- (currentChar: @switch) match{
- case '/' => rec(current + 1, state = 0)
- case '*' => rec(current + 1, state = 4)
- case _ => rec(current + 1, state = 3)
- }
+ implicit object whitespace extends Whitespace {
+ def apply(ctx: ParsingRun[_]) = {
+ val input = ctx.input
+ @tailrec def rec(current: Int, state: Int): ParsingRun[Unit] = {
+ if (!input.isReachable(current)) {
+ if (state == 0 || state == 1) {
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(current, Msgs.empty)
+ ctx.freshSuccessUnit(current)
+ }
+ else if(state == 2) {
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(current, Msgs.empty)
+ ctx.freshSuccessUnit(current - 1)
+ }
+ else {
+ ctx.cut = true
+ val res = ctx.freshFailure(current)
+
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(current, () => Util.literalize("*/"))
+ res
+ }
+ } else {
+ val currentChar = input(current)
+ (state: @switch) match{
+ case 0 =>
+ (currentChar: @switch) match{
+ case ' ' | '\t' | '\n' | '\r' => rec(current + 1, state)
+ case '#' => rec(current + 1, state = 1)
+ case '/' => rec(current + 1, state = 2)
+ case _ =>
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(current, Msgs.empty)
+ ctx.freshSuccessUnit(current)
+ }
+ case 1 => rec(current + 1, state = if (currentChar == '\n') 0 else state)
+ case 2 =>
+ (currentChar: @switch) match{
+ case '/' => rec(current + 1, state = 1)
+ case '*' => rec(current + 1, state = 3)
+ case _ =>
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(current, Msgs.empty)
+ ctx.freshSuccessUnit(current - 1)
+ }
+ case 3 => rec(current + 1, state = if (currentChar == '*') 4 else state)
+ case 4 =>
+ (currentChar: @switch) match{
+ case '/' => rec(current + 1, state = 0)
+ case '*' => rec(current + 1, state = 4)
+ case _ => rec(current + 1, state = 3)
+ }
+ }
}
}
+ rec(current = ctx.index, state = 0)
}
- rec(current = ctx.index, state = 0)
}
}
@@ -178,55 +217,67 @@ object JsonnetWhitespace{
* in the Scala programming language
*/
object ScalaWhitespace {
- implicit val whitespace = {implicit ctx: ParsingRun[_] =>
- val input = ctx.input
- val startIndex = ctx.index
- @tailrec def rec(current: Int, state: Int, nesting: Int): ParsingRun[Unit] = {
- if (!input.isReachable(current)) {
- if (state == 0 || state == 1) ctx.freshSuccessUnit(current)
- else if(state == 2 && nesting == 0) ctx.freshSuccessUnit(current - 1)
- else {
- ctx.cut = true
- val res = ctx.freshFailure(current)
- if (ctx.verboseFailures) ctx.setMsg(startIndex, () => Util.literalize("*/"))
- res
- }
- } else {
- val currentChar = input(current)
- (state: @switch) match{
- case 0 =>
- (currentChar: @switch) match{
- case ' ' | '\t' | '\n' | '\r' => rec(current + 1, state, 0)
- case '/' => rec(current + 1, state = 2, 0)
- case _ => ctx.freshSuccessUnit(current)
- }
- case 1 => rec(current + 1, state = if (currentChar == '\n') 0 else state, 0)
- case 2 =>
- (currentChar: @switch) match{
- case '/' =>
- if (nesting == 0) rec(current + 1, state = 1, 0)
- else rec(current + 1, state = 2, nesting)
- case '*' => rec(current + 1, state = 3, nesting + 1)
- case _ =>
- if (nesting == 0) ctx.freshSuccessUnit(current - 1)
- else rec(current + 1, state = 3, nesting)
- }
- case 3 =>
- (currentChar: @switch) match{
- case '/' => rec(current + 1, state = 2, nesting)
- case '*' => rec(current + 1, state = 4 , nesting)
- case _ => rec(current + 1, state = state, nesting)
- }
- case 4 =>
- (currentChar: @switch) match{
- case '/' => rec(current + 1, state = if (nesting == 1) 0 else 3 , nesting - 1)
- case '*' => rec(current + 1, state = 4, nesting)
- case _ => rec(current + 1, state = 3, nesting)
- }
+ implicit object whitespace extends Whitespace {
+ def apply(ctx: ParsingRun[_]) = {
+ val input = ctx.input
+ @tailrec def rec(current: Int, state: Int, nesting: Int): ParsingRun[Unit] = {
+ if (!input.isReachable(current)) {
+ if (state == 0 || state == 1) {
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(current, Msgs.empty)
+ ctx.freshSuccessUnit(current)
+ }
+ else if(state == 2 && nesting == 0) {
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(current, Msgs.empty)
+ ctx.freshSuccessUnit(current - 1)
+ }
+ else {
+ ctx.cut = true
+ val res = ctx.freshFailure(current)
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(current, () => Util.literalize("*/"))
+ res
+ }
+ } else {
+ val currentChar = input(current)
+ (state: @switch) match{
+ case 0 =>
+ (currentChar: @switch) match{
+ case ' ' | '\t' | '\n' | '\r' => rec(current + 1, state, 0)
+ case '/' => rec(current + 1, state = 2, 0)
+ case _ =>
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(current, Msgs.empty)
+ ctx.freshSuccessUnit(current)
+ }
+ case 1 => rec(current + 1, state = if (currentChar == '\n') 0 else state, 0)
+ case 2 =>
+ (currentChar: @switch) match{
+ case '/' =>
+ if (nesting == 0) rec(current + 1, state = 1, 0)
+ else rec(current + 1, state = 2, nesting)
+ case '*' => rec(current + 1, state = 3, nesting + 1)
+ case _ =>
+ if (nesting == 0) {
+ if (ctx.verboseFailures) ctx.reportTerminalMsg(current, Msgs.empty)
+ ctx.freshSuccessUnit(current - 1)
+ }
+ else rec(current + 1, state = 3, nesting)
+ }
+ case 3 =>
+ (currentChar: @switch) match{
+ case '/' => rec(current + 1, state = 2, nesting)
+ case '*' => rec(current + 1, state = 4 , nesting)
+ case _ => rec(current + 1, state = state, nesting)
+ }
+ case 4 =>
+ (currentChar: @switch) match{
+ case '/' => rec(current + 1, state = if (nesting == 1) 0 else 3 , nesting - 1)
+ case '*' => rec(current + 1, state = 4, nesting)
+ case _ => rec(current + 1, state = 3, nesting)
+ }
+ }
}
}
+ rec(current = ctx.index, state = 0, nesting = 0)
}
- rec(current = ctx.index, state = 0, nesting = 0)
}
}
diff --git a/fastparse/src/fastparse/internal/RepImpls.scala b/fastparse/src/fastparse/internal/RepImpls.scala
deleted file mode 100644
index 0a7a74df..00000000
--- a/fastparse/src/fastparse/internal/RepImpls.scala
+++ /dev/null
@@ -1,445 +0,0 @@
-package fastparse.internal
-
-import fastparse.{Implicits, NoWhitespace, ParsingRun}
-
-import scala.annotation.tailrec
-import scala.reflect.macros.blackbox.Context
-import language.experimental.macros
-
-/**
- * Implementations of the various `.rep`/`.repX` overloads. The most common
- * and simple overloads are implemented as macros for performance, while the
- * more complex/general cases are left as normal methods to avoid code bloat
- * and allow the use of default/named arguments (which don't work in macros
- * due to https://github.com/scala/bug/issues/5920).
- *
- * Even the normal method overloads are manually-specialized to some extent
- * for various sorts of inputs as a best-effort attempt ot minimize branching
- * in the hot paths.
- */
-object MacroRepImpls{
- def repXMacro0[T: c.WeakTypeTag, V: c.WeakTypeTag](c: Context)
- (whitespace: Option[c.Tree], min: Option[c.Tree])
- (repeater: c.Tree,
- ctx: c.Tree): c.Tree = {
- import c.universe._
- val repeater1 = TermName(c.freshName("repeater"))
- val ctx1 = TermName(c.freshName("repeater"))
- val acc = TermName(c.freshName("acc"))
- val startIndex = TermName(c.freshName("startIndex"))
- val count = TermName(c.freshName("count"))
- val beforeSepIndex = TermName(c.freshName("beforeSepIndex"))
- val rec = TermName(c.freshName("rec"))
- val originalCut = TermName(c.freshName("originalCut"))
- val parsedMsg = TermName(c.freshName("parsedMsg"))
- val lastAgg = TermName(c.freshName("lastAgg"))
- val parsedAgg = TermName(c.freshName("parsedAgg"))
- val ((endSnippet, aggregateSnippet), minCut) = min match{
- case None =>
- q"""
- $ctx1.freshSuccess($repeater1.result($acc), $startIndex, $originalCut)
- """ ->
- q""" "" """ ->
- q"""false"""
- case Some(min1) =>
- q"""
- if ($count < $min1) $ctx1.augmentFailure($startIndex, $originalCut)
- else $ctx1.freshSuccess($repeater1.result($acc), $startIndex, $originalCut)
- """ ->
- q"""if($min1 == 0) "" else "(" + $min1 + ")"""" ->
- q"""$originalCut && ($count < $min1)"""
- }
-
- val wsSnippet = whitespace match{
- case None => q"$rec($beforeSepIndex, $count + 1, $parsedAgg)"
- case Some(ws) =>
- q"""
- if ($ws ne _root_.fastparse.NoWhitespace.noWhitespaceImplicit) {
- _root_.fastparse.internal.Util.consumeWhitespace($ws, $ctx1)
- }
- if (!$ctx1.isSuccess && $ctx1.cut) $ctx1.asInstanceOf[_root_.fastparse.ParsingRun[scala.Nothing]]
- else{
- $ctx1.cut = false
- $rec($beforeSepIndex, $count + 1, $parsedAgg)
- }
- """
- }
-
- q"""
- $ctx match{ case $ctx1 =>
- $repeater match {case $repeater1 =>
- var $originalCut = $ctx1.cut
- val $acc = $repeater1.initial
- @_root_.scala.annotation.tailrec
- def $rec($startIndex: _root_.scala.Int,
- $count: _root_.scala.Int,
- $lastAgg: _root_.fastparse.internal.Msgs): _root_.fastparse.P[${c.weakTypeOf[V]}] = {
- $ctx1.cut = $minCut
- ${c.prefix}.parse0()
-
- val $parsedMsg = $ctx1.shortParserMsg
- val $parsedAgg = $ctx1.failureGroupAggregate
- $originalCut |= $ctx1.cut
- if (!$ctx1.isSuccess) {
- val res =
- if ($ctx1.cut) $ctx1.asInstanceOf[_root_.fastparse.P[${c.weakTypeOf[V]}]]
- else $endSnippet
- if ($ctx1.verboseFailures) {
- $ctx1.aggregateMsg(
- $startIndex,
- () => $parsedMsg.render + s".rep" + $aggregateSnippet,
- if ($lastAgg == null) $ctx1.failureGroupAggregate
- else $ctx1.failureGroupAggregate ::: $lastAgg
- )
- }
- res
- }else {
- val $beforeSepIndex = $ctx1.index
- $repeater1.accumulate($ctx1.successValue.asInstanceOf[${c.weakTypeOf[T]}], $acc)
- $ctx1.cut = false
- $wsSnippet
- }
- }
- $rec($ctx1.index, 0, null)
- }
- }
- """
- }
-
- def repXMacro1[T: c.WeakTypeTag, V: c.WeakTypeTag](c: Context)
- (repeater: c.Tree,
- ctx: c.Tree): c.Tree = {
- import c.universe._
- MacroRepImpls.repXMacro0[T, V](c)(None, None)(repeater, ctx)
- }
-
- def repXMacro2[T: c.WeakTypeTag, V: c.WeakTypeTag](c: Context)
- (min: c.Tree)
- (repeater: c.Tree,
- ctx: c.Tree): c.Tree = {
- import c.universe._
- MacroRepImpls.repXMacro0[T, V](c)(None, Some(min))(repeater, ctx)
- }
-
- def repXMacro1ws[T: c.WeakTypeTag, V: c.WeakTypeTag](c: Context)
- (repeater: c.Tree,
- whitespace: c.Tree,
- ctx: c.Tree): c.Tree = {
- import c.universe._
- MacroRepImpls.repXMacro0[T, V](c)(Some(whitespace), None)(repeater, ctx)
- }
-
- def repXMacro2ws[T: c.WeakTypeTag, V: c.WeakTypeTag](c: Context)
- (min: c.Tree)
- (repeater: c.Tree,
- whitespace: c.Tree,
- ctx: c.Tree): c.Tree = {
- import c.universe._
- MacroRepImpls.repXMacro0[T, V](c)(Some(whitespace), Some(min))(repeater, ctx)
- }
-}
-
-class RepImpls[T](val parse0: () => ParsingRun[T]) extends AnyVal{
- def repX[V](min: Int = 0,
- sep: => ParsingRun[_] = null,
- max: Int = Int.MaxValue,
- exactly: Int = -1)
- (implicit repeater: Implicits.Repeater[T, V],
- ctx: ParsingRun[Any]): ParsingRun[V] = {
-
- val acc = repeater.initial
- val actualMin = if(exactly == -1) min else exactly
- val actualMax = if(exactly == -1) max else exactly
-
- def end(successIndex: Int, index: Int, count: Int, endCut: Boolean) = {
- if (count < actualMin) ctx.augmentFailure(index, endCut)
- else ctx.freshSuccess(repeater.result(acc), successIndex, endCut)
- }
- @tailrec def rec(startIndex: Int,
- count: Int,
- precut: Boolean,
- outerCut: Boolean,
- sepMsg: Msgs,
- lastAgg: Msgs): ParsingRun[V] = {
- ctx.cut = precut | (count < min && outerCut)
- if (count == 0 && actualMax == 0) ctx.freshSuccess(repeater.result(acc), startIndex)
- else {
- val verboseFailures = ctx.verboseFailures
- parse0()
- val parsedMsg = ctx.shortParserMsg
- val parsedAgg = ctx.failureGroupAggregate
- val postCut = ctx.cut
- if (!ctx.isSuccess) {
- val res =
- if (postCut) ctx.asInstanceOf[ParsingRun[V]]
- else end(startIndex, startIndex, count, outerCut | postCut)
-
- if (verboseFailures) aggregateMsgInRep(startIndex, min, ctx, sepMsg, parsedMsg, lastAgg, precut)
- res
- }else {
- val beforeSepIndex = ctx.index
- repeater.accumulate(ctx.successValue.asInstanceOf[T], acc)
- val nextCount = count + 1
- if (nextCount == actualMax) {
- val res = end(beforeSepIndex, beforeSepIndex, nextCount, outerCut | postCut)
- if (verboseFailures) ctx.setMsg(startIndex, () => parsedMsg.render + ".repX" + (if(min == 0) "" else s"($min)"))
- res
- }
- else {
- ctx.cut = false
- val sep1 = sep
- val sepCut = ctx.cut
- val endCut = outerCut | postCut | sepCut
- if (sep1 == null) rec(beforeSepIndex, nextCount, false, endCut, null, parsedAgg)
- else {
- if (ctx.isSuccess) rec(beforeSepIndex, nextCount, sepCut, endCut, ctx.shortParserMsg, parsedAgg)
- else {
- val res =
- if (sepCut) ctx.augmentFailure(beforeSepIndex, endCut)
- else end(beforeSepIndex, beforeSepIndex, nextCount, endCut)
- if (verboseFailures) aggregateMsgPostSep(startIndex, min, ctx, parsedMsg, parsedAgg)
- res
- }
- }
- }
- }
- }
- }
- rec(ctx.index, 0, false, ctx.cut, null, null)
- }
-
- def repX[V](min: Int,
- sep: => ParsingRun[_])
- (implicit repeater: Implicits.Repeater[T, V],
- ctx: ParsingRun[Any]): ParsingRun[V] = {
-
- val acc = repeater.initial
-
- def end(successIndex: Int, index: Int, count: Int, endCut: Boolean) = {
- if (count < min) ctx.augmentFailure(index, endCut)
- else ctx.freshSuccess(repeater.result(acc), successIndex, endCut)
- }
- @tailrec def rec(startIndex: Int,
- count: Int,
- precut: Boolean,
- outerCut: Boolean,
- sepMsg: Msgs,
- lastAgg: Msgs): ParsingRun[V] = {
- ctx.cut = precut | (count < min && outerCut)
- parse0()
- val parsedMsg = ctx.shortParserMsg
- val parsedAgg = ctx.failureGroupAggregate
- val postCut = ctx.cut
- val verboseFailures = ctx.verboseFailures
- if (!ctx.isSuccess) {
- val res =
- if (postCut) ctx.asInstanceOf[ParsingRun[V]]
- else end(startIndex, startIndex, count, outerCut | postCut)
- if (verboseFailures) aggregateMsgInRep(startIndex, min, ctx, sepMsg, parsedMsg, lastAgg, precut)
- res
- }else {
- val beforeSepIndex = ctx.index
- repeater.accumulate(ctx.successValue.asInstanceOf[T], acc)
- val nextCount = count + 1
- ctx.cut = false
- val sep1 = sep
- val sepCut = ctx.cut
- val endCut = outerCut | postCut | sepCut
- if (sep1 == null) rec(beforeSepIndex, nextCount, false, endCut, null, parsedAgg)
- else {
- if (ctx.isSuccess) rec(beforeSepIndex, nextCount, sepCut, endCut, ctx.shortParserMsg, parsedAgg)
- else {
- val res =
- if (sepCut) ctx.augmentFailure(beforeSepIndex, endCut)
- else end(beforeSepIndex, beforeSepIndex, nextCount, endCut)
- if (verboseFailures) aggregateMsgPostSep(startIndex, min, ctx, parsedMsg, parsedAgg)
- res
- }
- }
- }
- }
- rec(ctx.index, 0, false, ctx.cut, null, null)
- }
- def rep[V](min: Int = 0,
- sep: => ParsingRun[_] = null,
- max: Int = Int.MaxValue,
- exactly: Int = -1)
- (implicit repeater: Implicits.Repeater[T, V],
- whitespace: ParsingRun[_] => ParsingRun[Unit],
- ctx: ParsingRun[Any]): ParsingRun[V] = {
-
- val acc = repeater.initial
- val actualMin = if(exactly == -1) min else exactly
- val actualMax = if(exactly == -1) max else exactly
-
- def end(successIndex: Int, index: Int, count: Int, endCut: Boolean) = {
- if (count < actualMin) ctx.augmentFailure(index, endCut)
- else ctx.freshSuccess(repeater.result(acc), successIndex, endCut)
- }
- @tailrec def rec(startIndex: Int,
- count: Int,
- precut: Boolean,
- outerCut: Boolean,
- sepMsg: Msgs,
- lastAgg: Msgs): ParsingRun[V] = {
- ctx.cut = precut | (count < min && outerCut)
- if (count == 0 && actualMax == 0) ctx.freshSuccess(repeater.result(acc), startIndex)
- else {
- parse0()
- val parsedMsg = ctx.shortParserMsg
- val parsedAgg = ctx.failureGroupAggregate
- val postCut = ctx.cut
- val verboseFailures = ctx.verboseFailures
- if (!ctx.isSuccess) {
- val res =
- if (postCut) ctx.asInstanceOf[ParsingRun[V]]
- else end(startIndex, startIndex, count, outerCut | postCut)
- if (verboseFailures) aggregateMsgInRep(startIndex, min, ctx, sepMsg, parsedMsg, lastAgg, precut)
- res
- } else {
- val beforeSepIndex = ctx.index
- repeater.accumulate(ctx.successValue.asInstanceOf[T], acc)
- val nextCount = count + 1
- if (nextCount == actualMax) {
- val res = end(beforeSepIndex, beforeSepIndex, nextCount, outerCut | postCut)
- if (verboseFailures) ctx.setMsg(startIndex, () => parsedMsg.render + ".rep" + (if(min == 0) "" else s"($min)"))
- res
- }
- else {
- if (whitespace ne NoWhitespace.noWhitespaceImplicit) Util.consumeWhitespace(whitespace, ctx)
-
- if (!ctx.isSuccess && ctx.cut) ctx.asInstanceOf[ParsingRun[Nothing]]
- else {
- ctx.cut = false
- val sep1 = sep
- val sepCut = ctx.cut
- val endCut = outerCut | postCut | sepCut
- if (sep1 == null) rec(beforeSepIndex, nextCount, false, endCut, null, parsedAgg)
- else if (ctx.isSuccess) {
- if (whitespace ne NoWhitespace.noWhitespaceImplicit) Util.consumeWhitespace(whitespace, ctx)
- if (!ctx.isSuccess && sepCut) ctx.asInstanceOf[ParsingRun[Nothing]]
- else rec(beforeSepIndex, nextCount, sepCut, endCut, ctx.shortParserMsg, parsedAgg)
- }
- else {
- val res =
- if (sepCut) ctx.augmentFailure(beforeSepIndex, endCut)
- else end(beforeSepIndex, beforeSepIndex, nextCount, endCut)
-
- if (verboseFailures) aggregateMsgPostSep(startIndex, min, ctx, parsedMsg, parsedAgg)
- res
- }
- }
- }
- }
- }
- }
- rec(ctx.index, 0, false, ctx.cut, null, null)
- }
- def rep[V](min: Int,
- sep: => ParsingRun[_])
- (implicit repeater: Implicits.Repeater[T, V],
- whitespace: ParsingRun[_] => ParsingRun[Unit],
- ctx: ParsingRun[Any]): ParsingRun[V] = {
-
- val acc = repeater.initial
-
- def end(successIndex: Int, index: Int, count: Int, endCut: Boolean) = {
- if (count < min) ctx.augmentFailure(index, endCut)
- else ctx.freshSuccess(repeater.result(acc), successIndex, endCut)
- }
- @tailrec def rec(startIndex: Int,
- count: Int,
- precut: Boolean,
- outerCut: Boolean,
- sepMsg: Msgs,
- lastAgg: Msgs): ParsingRun[V] = {
-
- ctx.cut = precut | (count < min && outerCut)
- parse0()
- val parsedMsg = ctx.shortParserMsg
- val parsedAgg = ctx.failureGroupAggregate
- val postCut = ctx.cut
- val verboseFailures = ctx.verboseFailures
- if (!ctx.isSuccess){
- val res =
- if (postCut) ctx.asInstanceOf[ParsingRun[V]]
- else end(startIndex, startIndex, count, outerCut | postCut)
- if (verboseFailures) aggregateMsgInRep(startIndex, min, ctx, sepMsg, parsedMsg, lastAgg, precut)
- res
- }else{
- val beforeSepIndex = ctx.index
- repeater.accumulate(ctx.successValue.asInstanceOf[T], acc)
- val nextCount = count + 1
- if (whitespace ne NoWhitespace.noWhitespaceImplicit) Util.consumeWhitespace(whitespace, ctx)
-
- if (!ctx.isSuccess && ctx.cut) ctx.asInstanceOf[ParsingRun[Nothing]]
- else {
- ctx.cut = false
- val sep1 = sep
- val sepCut = ctx.cut
- val endCut = outerCut | postCut | sepCut
- if (sep1 == null) rec(beforeSepIndex, nextCount, false, endCut, null, parsedAgg)
- else if (ctx.isSuccess) {
- if (whitespace ne NoWhitespace.noWhitespaceImplicit) Util.consumeWhitespace(whitespace, ctx)
-
- rec(beforeSepIndex, nextCount, sepCut, endCut, ctx.shortParserMsg, parsedAgg)
- }
- else {
- val res =
- if (sepCut) ctx.augmentFailure(beforeSepIndex, endCut)
- else end(beforeSepIndex, beforeSepIndex, nextCount, endCut)
-
- if (verboseFailures) aggregateMsgPostSep(startIndex, min, ctx, parsedMsg, parsedAgg)
- res
- }
- }
- }
- }
- rec(ctx.index, 0, false, ctx.cut, null, null)
- }
-
- private def aggregateMsgPostSep[V](startIndex: Int,
- min: Int,
- ctx: ParsingRun[Any],
- parsedMsg: Msgs,
- lastAgg: Msgs) = {
- ctx.aggregateMsg(
- startIndex,
- () => parsedMsg.render + s".rep($min)",
- // When we fail on a sep, we collect the failure aggregate of the last
- // non-sep rep body together with the failure aggregate of the sep, since
- // the last non-sep rep body continuing is one of the valid ways of
- // continuing the parse
- ctx.failureGroupAggregate ::: lastAgg
-
- )
- }
-
- private def aggregateMsgInRep[V](startIndex: Int,
- min: Int,
- ctx: ParsingRun[Any],
- sepMsg: Msgs,
- parsedMsg: Msgs,
- lastAgg: Msgs,
- precut: Boolean) = {
- if (sepMsg == null || precut) {
- ctx.aggregateMsg(
- startIndex,
- () => parsedMsg.render + s".rep($min)",
- ctx.failureGroupAggregate
- )
- } else {
- ctx.aggregateMsg(
- startIndex,
- () => parsedMsg.render + s".rep($min)",
- // When we fail on a rep body, we collect both the concatenated
- // sep and failure aggregate of the rep body that we tried (because
- // we backtrack past the sep on failure) as well as the failure
- // aggregate of the previous rep, which we could have continued
- if (lastAgg == null) Util.joinBinOp(sepMsg, parsedMsg)
- else Util.joinBinOp(sepMsg, parsedMsg) ::: lastAgg
- )
- }
- }
-
-}
diff --git a/fastparse/src/fastparse/internal/Util.scala b/fastparse/src/fastparse/internal/Util.scala
index 7115522a..034658fc 100644
--- a/fastparse/src/fastparse/internal/Util.scala
+++ b/fastparse/src/fastparse/internal/Util.scala
@@ -6,16 +6,17 @@ import scala.annotation.{switch, tailrec}
import scala.collection.mutable.ArrayBuffer
object Util {
- def parenthize(fs: Seq[Lazy[String]]) = fs.reverseIterator.map(_()).toSeq.distinct match{
+ def parenthize(fs: List[Lazy[String]]) = fs.reverseIterator.map(_()).toSeq.distinct match{
case Seq(x) => x
case xs => xs.mkString("(", " | ", ")")
}
- def joinBinOp(lhs: Msgs, rhs: Msgs) =
+ def joinBinOp(lhs: Msgs, rhs: Msgs): Msgs = {
if (lhs.value.isEmpty) rhs
else if (rhs.value.isEmpty) lhs
- else Msgs(List(new Lazy(() => lhs.render + " ~ " + rhs.render)))
+ else Msgs.fromFunction(() => lhs.render + " ~ " + rhs.render)
+ }
- def consumeWhitespace[V](whitespace: ParsingRun[_] => ParsingRun[Unit], ctx: ParsingRun[Any]) = {
+ def consumeWhitespace[V](whitespace: fastparse.Whitespace, ctx: ParsingRun[Any]) = {
val oldCapturing = ctx.noDropBuffer // completely disallow dropBuffer
ctx.noDropBuffer = true
whitespace(ctx)
@@ -46,30 +47,31 @@ object Util {
rec(0)
}
def lineNumberLookup(data: String): Array[Int] = {
- val lineStarts = new ArrayBuffer[Int]()
+ val lineStarts = ArrayBuffer[Int](0)
var i = 0
var col = 1
- var cr = false
- var prev: Character = null
+ // Stores the previous char we saw, or -1 if we just saw a \r\n or \n\r pair
+ var state: Int = 0
while (i < data.length){
val char = data(i)
- if (char == '\r') {
- if (prev != '\n' && col == 1) lineStarts.append(i)
- col = 1
- cr = true
- }else if (char == '\n') {
- if (prev != '\r' && col == 1) lineStarts.append(i)
+ if (char == '\r' && state == '\n' || char == '\n' && state == '\r'){
+ col += 1
+ state = -1
+ } else if (state == '\r' || state == '\n' || state == -1) {
+ lineStarts.append(i)
col = 1
- cr = false
+ state = char
}else{
- if (col == 1) lineStarts.append(i)
col += 1
- cr = false
+ state = char
}
- prev = char
+
i += 1
}
- if (col == 1) lineStarts.append(i)
+
+ if (state == '\r' || state == '\n' || state == -1) {
+ lineStarts.append(i)
+ }
lineStarts.toArray
}
@@ -98,6 +100,38 @@ object Util {
sb.result()
}
+
+
+ def reportParseMsgPostSep(startIndex: Int,
+ min: Int,
+ ctx: ParsingRun[Any],
+ parsedMsg: Msgs,
+ lastAgg: Msgs) = {
+ reportParseMsgInRep(startIndex, min, ctx, null, parsedMsg, lastAgg, true)
+ }
+
+ def reportParseMsgInRep(startIndex: Int,
+ min: Int,
+ ctx: ParsingRun[Any],
+ sepMsg: Msgs,
+ parsedMsg: Msgs,
+ lastAgg: Msgs,
+ precut: Boolean) = {
+
+ // When we fail on a rep body, we collect both the concatenated
+ // sep and failure aggregate of the rep body that we tried (because
+ // we backtrack past the sep on failure) as well as the failure
+ // aggregate of the previous rep, which we could have continued
+ val newAgg =
+ if (sepMsg == null || precut) ctx.aggregateMsgs
+ else Util.joinBinOp(sepMsg, parsedMsg)
+
+ ctx.reportAggregateMsg(
+ () => parsedMsg.render + ".rep" + (if (min == 0) "" else s"(${min})"),
+ if (lastAgg == null) newAgg
+ else newAgg ::: lastAgg
+ )
+ }
}
class Lazy[T](calc0: () => T){
@@ -107,7 +141,7 @@ class Lazy[T](calc0: () => T){
case class Logger(f: String => Unit)
object Logger {
- implicit val stdout = Logger(println)
+ implicit val stdout: Logger = Logger(println)
}
trait Instrument{
@@ -148,7 +182,14 @@ final class CompactTrieNode(source: TrieNode){
}
object Msgs{
val empty = Msgs(Nil)
+ implicit def fromFunction(msgToSet: () => String): Msgs = {
+ Msgs(new Lazy(() => msgToSet()):: Nil)
+ }
+ implicit def fromStrings(msgsToSet: List[String]): Msgs = {
+ Msgs(msgsToSet.map(s => new Lazy(() => s)))
+ }
}
+
case class Msgs(value: List[Lazy[String]]){
def :::(other: Msgs) = Msgs(other.value ::: value)
def ::(other: Lazy[String]) = Msgs(other :: value)
diff --git a/fastparse/test/src/fastparse/CustomWhitespaceMathTests.scala b/fastparse/test/src-2.12+/fastparse/CustomWhitespaceMathTests.scala
similarity index 72%
rename from fastparse/test/src/fastparse/CustomWhitespaceMathTests.scala
rename to fastparse/test/src-2.12+/fastparse/CustomWhitespaceMathTests.scala
index 0c419cff..92aea085 100644
--- a/fastparse/test/src/fastparse/CustomWhitespaceMathTests.scala
+++ b/fastparse/test/src-2.12+/fastparse/CustomWhitespaceMathTests.scala
@@ -4,11 +4,14 @@ import fastparse._
import utest._
/**
- * Same as MathTests, but demonstrating the use of whitespace
- */
+ * Same as MathTests, but demonstrating the use of whitespace
+ */
object CustomWhitespaceMathTests extends TestSuite{
- implicit val whitespace = { implicit ctx: ParsingRun[_] =>
- CharsWhileIn(" \t", 0)
+ implicit object whitespace extends Whitespace{
+ def apply(ctx: fastparse.ParsingRun[_]): P[Unit] = {
+ implicit val ctx0 = ctx
+ CharsWhileIn(" \t", 0)
+ }
}
def eval(tree: (Int, Seq[(String, Int)])): Int = {
val (base, ops) = tree
@@ -17,13 +20,13 @@ object CustomWhitespaceMathTests extends TestSuite{
case "*" => left * right case "/" => left / right
}}
}
- def number[_: P]: P[Int] = P( CharIn("0-9").rep(1).!.map(_.toInt) )
- def parens[_: P]: P[Int] = P( "(" ~/ addSub ~ ")" )
- def factor[_: P]: P[Int] = P( number | parens )
+ def number[$: P]: P[Int] = P( CharIn("0-9").rep(1).!.map(_.toInt) )
+ def parens[$: P]: P[Int] = P( "(" ~/ addSub ~ ")" )
+ def factor[$: P]: P[Int] = P( number | parens )
- def divMul[_: P]: P[Int] = P( factor ~ (CharIn("*/").! ~/ factor).rep ).map(eval)
- def addSub[_: P]: P[Int] = P( divMul ~ (CharIn("+\\-").! ~/ divMul).rep ).map(eval)
- def expr[_: P]: P[Int] = P( " ".rep ~ addSub ~ " ".rep ~ End )
+ def divMul[$: P]: P[Int] = P( factor ~ (CharIn("*/").! ~/ factor).rep ).map(eval)
+ def addSub[$: P]: P[Int] = P( divMul ~ (CharIn("+\\-").! ~/ divMul).rep ).map(eval)
+ def expr[$: P]: P[Int] = P( " ".rep ~ addSub ~ " ".rep ~ End )
val tests = Tests {
test("pass"){
@@ -56,4 +59,4 @@ object CustomWhitespaceMathTests extends TestSuite{
)
}
}
-}
\ No newline at end of file
+}
diff --git a/fastparse/test/src/fastparse/IteratorTests.scala b/fastparse/test/src-2.12+/fastparse/IteratorTests.scala
similarity index 75%
rename from fastparse/test/src/fastparse/IteratorTests.scala
rename to fastparse/test/src-2.12+/fastparse/IteratorTests.scala
index 1760a6fa..ae8bfbdd 100644
--- a/fastparse/test/src/fastparse/IteratorTests.scala
+++ b/fastparse/test/src-2.12+/fastparse/IteratorTests.scala
@@ -5,27 +5,27 @@ import utest._
import scala.collection.mutable
object IteratorTests extends TestSuite {
- def toInput(string: String) = {
+ class LoggedDropsParserInput(data: Iterator[String])
+ extends IteratorParserInput(data) {
- class LoggedDropsParserInput(data: Iterator[String])
- extends IteratorParserInput(data) {
+ val drops = mutable.SortedSet.empty[Int]
- val drops = mutable.SortedSet.empty[Int]
+ override def dropBuffer(index: Int): Unit = {
+ drops.add(index)
+ super.dropBuffer(index)
+ }
- override def dropBuffer(index: Int): Unit = {
- drops.add(index)
- super.dropBuffer(index)
- }
+ override def toString = s"LoggedDropsParserInput($drops)"
+ }
- override def toString = s"LoggedDropsParserInput($drops)"
- }
+ def toInput(string: String): LoggedDropsParserInput = {
new LoggedDropsParserInput(string.grouped(1))
}
val tests = Tests {
test("basic"){
import NoWhitespace._
- def p[_: P] = P( "ab" ~/ "cd".rep().! ~ "ef" | "z" )
+ def p[$: P] = P( "ab" ~/ "cd".rep().! ~ "ef" | "z" )
val Parsed.Success(res, i) = parse(Iterator("ab", "cd", "cd", "cd", "ef"), p(_))
@@ -35,7 +35,7 @@ object IteratorTests extends TestSuite {
test("readable"){
for(bufferSize <- Range(1, 15)){
import NoWhitespace._
- def p[_: P] = P("ab" ~/ "cd".rep().! ~ "ef" | "z")
+ def p[$: P] = P("ab" ~/ "cd".rep().! ~ "ef" | "z")
val Parsed.Success(res, i) = parse(
ParserInputSource.FromReadable("abcdcdcdef", bufferSize),
@@ -48,7 +48,7 @@ object IteratorTests extends TestSuite {
test("immediateCutDrop"){
import NoWhitespace._
- def p[_: P] = P( "ab" ~/ "cd" | "z" ).log
+ def p[$: P] = P( "ab" ~/ "cd" | "z" ).log
val input = toInput("abcdef")
val Parsed.Success(res, i) = parse(input, p(_))
@@ -58,12 +58,13 @@ object IteratorTests extends TestSuite {
}
test("whitespaceImmediateCutDrop"){
- import NoWhitespace._
- implicit def whitespace{implicit ctx: P[_] =>
+ import NoWhitespace.{noWhitespaceImplicit => _}
+ implicit val whitespace: Whitespace = { implicit ctx: P[_] =>
+ import NoWhitespace.noWhitespaceImplicit
" ".? ~ " ".rep
}
- def p[_: P] = P( "ab" ~/ "cd" | "z" )
+ def p[$: P] = P( "ab" ~/ "cd" | "z" )
val input = toInput("abcdef")
val Parsed.Success(res, i) = parse(input, p(_))
@@ -77,8 +78,8 @@ object IteratorTests extends TestSuite {
// Top-level sequences, which are not inside any `|`s or `.rep`s or `.?`s,
// should dropBuffer immediately after every `~`, even without any cuts
- def p[_: P] = P( "a" ~ "b" ~ "c")
- def capt[_ : P] = P( p ~ p ~ p)
+ def p[$: P] = P( "a" ~ "b" ~ "c")
+ def capt[$: P] = P( p ~ p ~ p)
val input = toInput("abcabcabc")
val Parsed.Success(res, i) = parse(input, capt(_))
println(i)
@@ -89,8 +90,8 @@ object IteratorTests extends TestSuite {
test("capturing"){
import NoWhitespace._
- def p[_: P] = P( "a" ~/ "b" ~/ "c")
- def capt[_: P] = P( p.! ~ p.! ~ p.!)
+ def p[$: P] = P( "a" ~/ "b" ~/ "c")
+ def capt[$: P] = P( p.! ~ p.! ~ p.!)
val input = toInput("abcabcabc")
val Parsed.Success(res, i) = parse(input, capt(_))
assert(
@@ -103,8 +104,8 @@ object IteratorTests extends TestSuite {
test("nocut"){
import NoWhitespace._
- def p[_: P] = P( "a" ~/ "b" ~/ "c")
- def nocut[_: P] = P((NoCut(p) ~ NoCut(p) ~/ NoCut(p)) | "abcd")
+ def p[$: P] = P( "a" ~/ "b" ~/ "c")
+ def nocut[$: P] = P((NoCut(p) ~ NoCut(p) ~/ NoCut(p)) | "abcd")
val input1 = toInput("abcabcabc")
val Parsed.Success(_, i1) = parse(input1, nocut(_))
@@ -123,9 +124,9 @@ object IteratorTests extends TestSuite {
test("either"){
import NoWhitespace._
- def p[_: P] = P( "a" ~ "b" ~ "c")
- def either[_: P] = P( (p ~ End) | ("abc" ~ p ~ End) | ("abcabc" ~ p ~ End))
- def eitherCutted[_: P] = P( (p ~ End) | ("abc" ~ p ~ End) | ("abcabc" ~/ p ~ End))
+ def p[$: P] = P( "a" ~ "b" ~ "c")
+ def either[$: P] = P( (p ~ End) | ("abc" ~ p ~ End) | ("abcabc" ~ p ~ End))
+ def eitherCutted[$: P] = P( (p ~ End) | ("abc" ~ p ~ End) | ("abcabc" ~/ p ~ End))
val input1 = toInput("abcabcabc")
val Parsed.Success(_, i1) = parse(input1, either(_))
@@ -163,9 +164,9 @@ object IteratorTests extends TestSuite {
test("rep"){
import NoWhitespace._
- def p[_: P] = P( "a" ~ "b" ~ "c")
- def rep[_: P] = P( (p.rep ~ "d") | (p.rep ~ "e") )
- def repCutted[_: P] = P( (p.rep ~ "d") | (p.rep ~/ "e") )
+ def p[$: P] = P( "a" ~ "b" ~ "c")
+ def rep[$: P] = P( (p.rep ~ "d") | (p.rep ~ "e") )
+ def repCutted[$: P] = P( (p.rep ~ "d") | (p.rep ~/ "e") )
val input1 = toInput("abcabcabcd")
val Parsed.Success(_, i1) = parse(input1, rep(_))
@@ -193,10 +194,10 @@ object IteratorTests extends TestSuite {
test("all"){
import NoWhitespace._
- def p[_: P] = P( "a" ~ "b" ~ "c" ~/ "d")
- def np[_: P] = NoCut(p)
- def pp[_: P] = P( "a" ~ "b" ~ "c" ~ End)
- def all[_: P] = P( pp | (np ~/ np) | p ~ "e" | "abded".! )
+ def p[$: P] = P( "a" ~ "b" ~ "c" ~/ "d")
+ def np[$: P] = NoCut(p)
+ def pp[$: P] = P( "a" ~ "b" ~ "c" ~ End)
+ def all[$: P] = P( pp | (np ~/ np) | p ~ "e" | "abded".! )
val input = toInput("abded")
@@ -209,13 +210,13 @@ object IteratorTests extends TestSuite {
test("whitespaceApi"){
- implicit def whitespace = { implicit ctx: P[_] =>
+ implicit def whitespace: Whitespace = { implicit ctx: P[_] =>
" ".? ~~/ " ".repX
}
- def a[_: P] = P( "aaa" )
- def b[_: P] = P( "bbb" )
- def ab[_: P] = P( a ~ b.? ~~ " " ~~ "ccc" )
+ def a[$: P] = P( "aaa" )
+ def b[$: P] = P( "bbb" )
+ def ab[$: P] = P( a ~ b.? ~~ " " ~~ "ccc" )
val input1 = toInput("aaa bbb ccc")
val Parsed.Success(_, i1) = parse(input1, ab(_))
@@ -236,12 +237,13 @@ object IteratorTests extends TestSuite {
val input3 = toInput("aaa ccc")
// this shows behavior of whitespaceApi which requires quite tricky dropBuffer calls
// it totally ignores first ~ and produces error in the second ~~
- assert(parse(input3, ab(_)).isInstanceOf[Parsed.Failure])
+ val parsed3 = parse(input3, ab(_))
+ assert(parsed3.isInstanceOf[Parsed.Failure])
}
test("zeroDrops"){
import NoWhitespace._
- def p[_: P] = P(
+ def p[$: P] = P(
(("big, " ~ ("another, " ~ ("X".? ~/ "Y".?)) | "small, ") ~ "end") | "other"
)
val input = toInput("big, another, end")
@@ -260,11 +262,12 @@ object IteratorTests extends TestSuite {
test("traceFailure"){
import NoWhitespace._
- def p[_: P] = P("[" ~ "]")
+ def p[$: P] = P("[" ~ "]")
- parse("[ ]", p(_)).asInstanceOf[Parsed.Failure].extra.traced
+ // fails under native if run inside the intercept - utest bug?
+ val t = scala.util.Try{ parse(Iterator("[", " ", "]"), p(_)).asInstanceOf[Parsed.Failure].extra.traced }
val e = intercept[RuntimeException] {
- parse(Iterator("[", " ", "]"), p(_)).asInstanceOf[Parsed.Failure].extra.traced
+ t.get
}
assert(e.getMessage.contains("Cannot perform `.traced` on an `fastparse.IteratorParserInput`"))
}
diff --git a/fastparse/test/src/fastparse/ExampleTests.scala b/fastparse/test/src/fastparse/ExampleTests.scala
index d2f2a763..ab17adb9 100644
--- a/fastparse/test/src/fastparse/ExampleTests.scala
+++ b/fastparse/test/src/fastparse/ExampleTests.scala
@@ -3,19 +3,23 @@ package test.fastparse
import utest._
import fastparse._
import fastparse.internal.Logger
+
+import scala.annotation.nowarn
/**
* Demonstrates simultaneously parsing and
* evaluating simple arithmetic expressions
*/
+@nowarn("msg=comparing values of types Unit and Unit using `==` will always yield true")
object ExampleTests extends TestSuite{
import fastparse.NoWhitespace._
val tests = Tests{
test("basic"){
test("simple"){
- import fastparse._, NoWhitespace._
- def parseA[_: P] = P("a")
+ import fastparse._
+ def parseA[$: P] = P("a")
val Parsed.Success(value, successIndex) = parse("a", parseA(_))
+
assert(value == (), successIndex == 1)
val f @ Parsed.Failure(label, index, extra) = parse("b", parseA(_))
@@ -28,8 +32,8 @@ object ExampleTests extends TestSuite{
test("failures"){
import fastparse._, NoWhitespace._
- def parseEither[_: P] = P( "a" | "b" )
- def parseA[_: P] = P( parseEither.? ~ "c" )
+ def parseEither[$: P] = P( "a" | "b" )
+ def parseA[$: P] = P( parseEither.? ~ "c" )
val f @ Parsed.Failure(failureString, index, extra) = parse("d", parseA(_))
assert(
@@ -53,6 +57,7 @@ object ExampleTests extends TestSuite{
// aggregateMsg and longAggregateMsg record all parsers
// failing at the position, "a" | "b" | "c",
+
assert(
trace.aggregateMsg == """Expected (parseEither | "c"):1:1, found "d"""",
trace.longAggregateMsg == """Expected parseA:1:1 / (parseEither | "c"):1:1, found "d""""
@@ -60,7 +65,7 @@ object ExampleTests extends TestSuite{
}
test("sequence"){
- def ab[_: P] = P( "a" ~ "b" )
+ def ab[$: P] = P( "a" ~ "b" )
val Parsed.Success(_, 2) = parse("ab", ab(_))
@@ -68,21 +73,21 @@ object ExampleTests extends TestSuite{
}
test("repeat"){
- def ab[_: P] = P( "a".rep ~ "b" )
+ def ab[$: P] = P( "a".rep ~ "b" )
val Parsed.Success(_, 8) = parse("aaaaaaab", ab(_))
val Parsed.Success(_, 4) = parse("aaaba", ab(_))
- def abc[_: P] = P( "a".rep(sep="b") ~ "c")
+ def abc[$: P] = P( "a".rep(sep="b") ~ "c")
val Parsed.Success(_, 8) = parse("abababac", abc(_))
val Parsed.Failure(_, 3, _) = parse("abaabac", abc(_))
- def ab4[_: P] = P( "a".rep(min=2, max=4, sep="b") )
+ def ab4[$: P] = P( "a".rep(min=2, max=4, sep="b") )
val Parsed.Success(_, 7) = parse("ababababababa", ab4(_))
- def ab2exactly[_: P] = P( "ab".rep(exactly=2) )
+ def ab2exactly[$: P] = P( "ab".rep(exactly=2) )
val Parsed.Success(_, 4) = parse("abab", ab2exactly(_))
- def ab4c[_: P] = P ( "a".rep(min=2, max=4, sep="b") ~ "c" )
+ def ab4c[$: P] = P ( "a".rep(min=2, max=4, sep="b") ~ "c" )
val Parsed.Failure(_, 1, _) = parse("ac", ab4c(_))
val Parsed.Success(_, 4) = parse("abac", ab4c(_))
val Parsed.Success(_, 8) = parse("abababac", ab4c(_))
@@ -90,14 +95,14 @@ object ExampleTests extends TestSuite{
}
test("option"){
- def option[_: P] = P( "c".? ~ "a".rep(sep="b").! ~ End)
+ def option[$: P] = P( "c".? ~ "a".rep(sep="b").! ~ End)
val Parsed.Success("aba", 3) = parse("aba", option(_))
val Parsed.Success("aba", 4) = parse("caba", option(_))
}
test("either"){
- def either[_: P] = P( "a".rep ~ ("b" | "c" | "d") ~ End)
+ def either[$: P] = P( "a".rep ~ ("b" | "c" | "d") ~ End)
val Parsed.Success(_, 6) = parse("aaaaab", either(_))
val f @ Parsed.Failure(_, 5, _) = parse("aaaaae", either(_))
@@ -109,15 +114,15 @@ object ExampleTests extends TestSuite{
}
test("end"){
- def noEnd[_: P] = P( "a".rep ~ "b")
- def withEnd[_: P] = P( "a".rep ~ "b" ~ End)
+ def noEnd[$: P] = P( "a".rep ~ "b")
+ def withEnd[$: P] = P( "a".rep ~ "b" ~ End)
val Parsed.Success(_, 4) = parse("aaaba", noEnd(_))
val Parsed.Failure(_, 4, _) = parse("aaaba", withEnd(_))
}
test("start"){
- def ab[_: P] = P( (("a" | Start) ~ "b").rep ~ End).!
+ def ab[$: P] = P( (("a" | Start) ~ "b").rep ~ End).!
val Parsed.Success("abab", 4) = parse("abab", ab(_))
val Parsed.Success("babab", 5) = parse("babab", ab(_))
@@ -128,38 +133,42 @@ object ExampleTests extends TestSuite{
test("passfail"){
val Parsed.Success((), 0) = parse("asdad", Pass(_))
val Parsed.Failure(_, 0, _) = parse("asdad", Fail(_))
+
+ def failWithLabel[$: P] = P( Fail("custom fail msg") )
+ val Parsed.Failure(_, 0, extra) = parse("asdad", failWithLabel(_))
+ assert(extra.trace().longMsg == """Expected failWithLabel:1:1 / custom fail msg:1:1, found "asdad"""")
}
test("index"){
- def finder[_: P] = P( "hay".rep ~ Index ~ "needle" ~ "hay".rep )
+ def finder[$: P] = P( "hay".rep ~ Index ~ "needle" ~ "hay".rep )
val Parsed.Success(9, _) = parse("hayhayhayneedlehay", finder(_))
}
test("capturing"){
- def capture1[_: P] = P( "a".rep.! ~ "b" ~ End)
+ def capture1[$: P] = P( "a".rep.! ~ "b" ~ End)
val Parsed.Success("aaa", 4) = parse("aaab", capture1(_))
- def capture2[_: P] = P( "a".rep.! ~ "b".! ~ End)
+ def capture2[$: P] = P( "a".rep.! ~ "b".! ~ End)
val Parsed.Success(("aaa", "b"), 4) = parse("aaab", capture2(_))
- def capture3[_: P] = P( "a".rep.! ~ "b".! ~ "c".! ~ End)
+ def capture3[$: P] = P( "a".rep.! ~ "b".! ~ "c".! ~ End)
val Parsed.Success(("aaa", "b", "c"), 5) = parse("aaabc", capture3(_))
- def captureRep[_: P] = P( "a".!.rep ~ "b" ~ End)
+ def captureRep[$: P] = P( "a".!.rep ~ "b" ~ End)
val Parsed.Success(Seq("a", "a", "a"), 4) = parse("aaab", captureRep(_))
- def captureOpt[_: P] = P( "a".rep ~ "b".!.? ~ End)
+ def captureOpt[$: P] = P( "a".rep ~ "b".!.? ~ End)
val Parsed.Success(Some("b"), 4) = parse("aaab", captureOpt(_))
}
test("anychar"){
- def ab[_: P] = P( "'" ~ AnyChar.! ~ "'" )
+ def ab[$: P] = P( "'" ~ AnyChar.! ~ "'" )
val Parsed.Success("-", 3) = parse("'-'", ab(_))
@@ -168,15 +177,15 @@ object ExampleTests extends TestSuite{
test("lookahead"){
- def keyword[_: P] = P( ("hello" ~ &(" ")).!.rep )
+ def keyword[$: P] = P( ("hello" ~ &(" ")).!.rep )
val Parsed.Success(Seq("hello"), _) = parse("hello ", keyword(_))
- val Parsed.Success(Seq(), _) = parse("hello", keyword(_))
+ val Parsed.Success(Seq(), _) = parse("hello", keyword(_))
val Parsed.Success(Seq(), _) = parse("helloX", keyword(_))
}
test("neglookahead"){
- def keyword[_: P] = P( "hello" ~ !" " ~ AnyChar ~ "world" ).!
+ def keyword[$: P] = P( "hello" ~ !" " ~ AnyChar ~ "world" ).!
val Parsed.Success("hello-world", _) = parse("hello-world", keyword(_))
val Parsed.Success("hello_world", _) = parse("hello_world", keyword(_))
@@ -186,17 +195,25 @@ object ExampleTests extends TestSuite{
}
test("map"){
- def binary[_: P] = P( ("0" | "1" ).rep.! )
- def binaryNum[_: P] = P( binary.map(Integer.parseInt(_, 2)) )
+ def binary[$: P] = P( ("0" | "1" ).rep.! )
+ def binaryNum[$: P] = P( binary.map(Integer.parseInt(_, 2)) )
val Parsed.Success("1100", _) = parse("1100", binary(_))
val Parsed.Success(12, _) = parse("1100", binaryNum(_))
}
+ test("collect"){
+ def binary[$: P] = P( ("0" | "1" ).rep.! )
+ def binaryNum[$: P] = P( binary.collect { case v if v.size % 2 == 0 => Integer.parseInt(v, 2)} )
+
+ val Parsed.Success("1100", _) = parse("1100", binary(_))
+ val Parsed.Failure(_, _, _) = parse("11001", binaryNum(_))
+ }
+
test("flatMap"){
- def leftTag[_: P] = P( "<" ~ (!">" ~ AnyChar).rep(1).! ~ ">")
- def rightTag[_: P](s: String) = P( "" ~ s.! ~ ">" )
- def xml[_: P] = P( leftTag.flatMap(rightTag) )
+ def leftTag[$: P] = P( "<" ~ (!">" ~ AnyChar).rep(1).! ~ ">")
+ def rightTag[$: P](s: String) = P( "" ~ s.! ~ ">" )
+ def xml[$: P] = P( leftTag.flatMap(rightTag(_)) )
val Parsed.Success("a", _) = parse("", xml(_))
val Parsed.Success("abcde", _) = parse("", xml(_))
@@ -207,9 +224,9 @@ object ExampleTests extends TestSuite{
)
}
test("flatMapFor"){
- def leftTag[_: P] = P( "<" ~ (!">" ~ AnyChar).rep(1).! ~ ">" )
- def rightTag[_: P](s: String) = P( "" ~ s.! ~ ">" )
- def xml[_: P] = P(
+ def leftTag[$: P] = P( "<" ~ (!">" ~ AnyChar).rep(1).! ~ ">" )
+ def rightTag[$: P](s: String) = P( "" ~ s.! ~ ">" )
+ def xml[$: P] = P(
for{
s <- leftTag
right <- rightTag(s)
@@ -225,26 +242,27 @@ object ExampleTests extends TestSuite{
)
}
test("filter"){
- def digits[_: P] = P(CharPred(c => '0' <= c && c <= '9').rep(1).!).map(_.toInt)
- def even[_: P] = P( digits.filter(_ % 2 == 0) )
+ def digits[$: P] = P(CharPred(c => '0' <= c && c <= '9').rep(1).!).map(_.toInt)
+ def even[$: P] = P( digits.filter(_ % 2 == 0) )
val Parsed.Success(12, _) = parse("12", even(_))
val failure = parse("123", even(_)).asInstanceOf[Parsed.Failure]
+ assert(!failure.isSuccess)
}
test("opaque"){
- def digit[_: P] = CharIn("0-9")
- def letter[_: P] = CharIn("A-Z")
- def twice[T, _: P](p: => P[T]) = p ~ p
+ def digit[$: P] = CharIn("0-9")
+ def letter[$: P] = CharIn("A-Z")
+ def twice[T, $: P](p: => P[T]) = p ~ p
def errorMessage[T](p: P[_] => P[T], str: String) =
parse(str, p).asInstanceOf[Parsed.Failure].trace().longAggregateMsg
// Portuguese number plate format since 2006
- def numberPlate[_: P] = P(twice(digit) ~ "-" ~ twice(letter) ~ "-" ~ twice(digit))
+ def numberPlate[$: P] = P(twice(digit) ~ "-" ~ twice(letter) ~ "-" ~ twice(digit))
val err1 = errorMessage(numberPlate(_), "11-A1-22")
assert(err1 == """Expected numberPlate:1:1 / [A-Z]:1:5, found "1-22"""")
// Suppress implementation details from the error message
- def opaqueNumberPlate[_: P] = numberPlate.opaque("")
+ def opaqueNumberPlate[$: P] = numberPlate.opaque("")
val err2 = errorMessage(opaqueNumberPlate(_), "11-A1-22")
assert(err2 == """Expected :1:1, found "11-A1-22"""")
@@ -253,40 +271,40 @@ object ExampleTests extends TestSuite{
test("charX"){
test("charPred"){
- def cp[_: P] = P( CharPred(_.isUpper).rep.! ~ "." ~ End )
+ def cp[$: P] = P( CharPred(_.isUpper).rep.! ~ "." ~ End )
val Parsed.Success("ABC", _) = parse("ABC.", cp(_))
val Parsed.Failure(_, 2, _) = parse("ABc.", cp(_))
}
test("charIn"){
- def ci[_: P] = P( CharIn("abc", "xyz").rep.! ~ End )
+ def ci[$: P] = P( CharIn("abc", "xyz").rep.! ~ End )
val Parsed.Success("aaabbccxyz", _) = parse("aaabbccxyz", ci(_))
val Parsed.Failure(_, 7, _) = parse("aaabbccdxyz.", ci(_))
- def digits[_: P] = P( CharIn("0-9").rep.! )
+ def digits[$: P] = P( CharIn("0-9").rep.! )
val Parsed.Success("12345", _) = parse("12345abcde", digits(_))
val Parsed.Success("123", _) = parse("123abcde45", digits(_))
}
test("charsWhile"){
- def cw[_: P] = P( CharsWhile(_ != ' ').! )
+ def cw[$: P] = P( CharsWhile(_ != ' ').! )
val Parsed.Success("12345", _) = parse("12345", cw(_))
val Parsed.Success("123", _) = parse("123 45", cw(_))
}
test("charsWhileIn"){
- def cw[_: P] = P( CharsWhileIn("123456789").! )
+ def cw[$: P] = P( CharsWhileIn("123456789").! )
val Parsed.Success("12345", _) = parse("12345", cw(_))
val Parsed.Success("123", _) = parse("123 45", cw(_))
}
test("stringIn"){
- def si[_: P] = P( StringIn("cow", "cattle").!.rep(1) )
+ def si[$: P] = P( StringIn("cow", "cattle").!.rep(1) )
val Parsed.Success(Seq("cow", "cattle"), _) = parse("cowcattle", si(_))
val Parsed.Success(Seq("cow"), _) = parse("cowmoo", si(_))
@@ -296,8 +314,8 @@ object ExampleTests extends TestSuite{
test("cuts"){
test("nocut"){
- def alpha[_: P] = P( CharIn("a-z") )
- def nocut[_: P] = P( "val " ~ alpha.rep(1).! | "def " ~ alpha.rep(1).!)
+ def alpha[$: P] = P( CharIn("a-z") )
+ def nocut[$: P] = P( "val " ~ alpha.rep(1).! | "def " ~ alpha.rep(1).!)
val Parsed.Success("abcd", _) = parse("val abcd", nocut(_))
@@ -310,8 +328,8 @@ object ExampleTests extends TestSuite{
}
test("withcut"){
- def alpha[_: P] = P( CharIn("a-z") )
- def nocut[_: P] = P( "val " ~/ alpha.rep(1).! | "def " ~/ alpha.rep(1).!)
+ def alpha[$: P] = P( CharIn("a-z") )
+ def nocut[$: P] = P( "val " ~/ alpha.rep(1).! | "def " ~/ alpha.rep(1).!)
val Parsed.Success("abcd", _) = parse("val abcd", nocut(_))
@@ -324,9 +342,9 @@ object ExampleTests extends TestSuite{
}
test("repnocut"){
- def alpha[_: P] = P( CharIn("a-z") )
- def stmt[_: P] = P( "val " ~ alpha.rep(1).! ~ ";" ~ " ".rep )
- def stmts[_: P] = P( stmt.rep(1) ~ End )
+ def alpha[$: P] = P( CharIn("a-z") )
+ def stmt[$: P] = P( "val " ~ alpha.rep(1).! ~ ";" ~ " ".rep )
+ def stmts[$: P] = P( stmt.rep(1) ~ End )
val Parsed.Success(Seq("abcd"), _) = parse("val abcd;", stmts(_))
val Parsed.Success(Seq("abcd", "efg"), _) = parse("val abcd; val efg;", stmts(_))
@@ -340,9 +358,9 @@ object ExampleTests extends TestSuite{
}
test("repcut"){
- def alpha[_: P] = P( CharIn("a-z") )
- def stmt[_: P] = P( "val " ~/ alpha.rep(1).! ~ ";" ~ " ".rep )
- def stmts[_: P] = P( stmt.rep(1) ~ End )
+ def alpha[$: P] = P( CharIn("a-z") )
+ def stmt[$: P] = P( "val " ~/ alpha.rep(1).! ~ ";" ~ " ".rep )
+ def stmts[$: P] = P( stmt.rep(1) ~ End )
val Parsed.Success(Seq("abcd"), _) = parse("val abcd;", stmts(_))
val Parsed.Success(Seq("abcd", "efg"), _) = parse("val abcd; val efg;", stmts(_))
@@ -356,8 +374,8 @@ object ExampleTests extends TestSuite{
}
test("delimiternocut"){
- def digits[_: P] = P( CharIn("0-9").rep(1) )
- def tuple[_: P] = P( "(" ~ digits.!.rep(sep=",") ~ ")" )
+ def digits[$: P] = P( CharIn("0-9").rep(1) )
+ def tuple[$: P] = P( "(" ~ digits.!.rep(sep=",") ~ ")" )
val Parsed.Success(Seq("1", "23"), _) = parse("(1,23)", tuple(_))
@@ -370,8 +388,8 @@ object ExampleTests extends TestSuite{
}
test("delimitercut"){
- def digits[_: P] = P( CharIn("0-9").rep(1) )
- def tuple[_: P] = P( "(" ~ digits.!.rep(sep=","./) ~ ")" )
+ def digits[$: P] = P( CharIn("0-9").rep(1) )
+ def tuple[$: P] = P( "(" ~ digits.!.rep(sep=","./) ~ ")" )
val Parsed.Success(Seq("1", "23"), _) = parse("(1,23)", tuple(_))
@@ -385,8 +403,8 @@ object ExampleTests extends TestSuite{
}
test("endcut"){
- def digits[_: P] = P( CharIn("0-9").rep(1) )
- def tuple[_: P] = P( "(" ~ digits.!.rep(sep=","./) ~ ")" )
+ def digits[$: P] = P( CharIn("0-9").rep(1) )
+ def tuple[$: P] = P( "(" ~ digits.!.rep(sep=","./) ~ ")" )
val Parsed.Success(Seq("1", "23"), _) = parse("(1,23)", tuple(_))
@@ -399,24 +417,24 @@ object ExampleTests extends TestSuite{
}
test("composecut"){
- def digit[_: P] = P( CharIn("0-9") )
- def time1[_: P] = P( ("1".? ~ digit) ~ ":" ~/ digit ~ digit ~ ("am" | "pm") )
- def time2[_: P] = P( (("1" | "2").? ~ digit) ~ ":" ~/ digit ~ digit )
+ def digit[$: P] = P( CharIn("0-9") )
+ def time1[$: P] = P( ("1".? ~ digit) ~ ":" ~/ digit ~ digit ~ ("am" | "pm") )
+ def time2[$: P] = P( (("1" | "2").? ~ digit) ~ ":" ~/ digit ~ digit )
val Parsed.Success((), _) = parse("12:30pm", time1(_))
val Parsed.Success((), _) = parse("17:45", time2(_))
- def time[_: P] = P( time1 | time2 ).log
+ def time[$: P] = P( time1 | time2 ).log
val Parsed.Success((), _) = parse("12:30pm", time(_))
val failure = parse("17:45", time(_)).asInstanceOf[Parsed.Failure]
assert(failure.index == 5) // Expects am or pm
}
test("composenocut"){
- def digit[_: P] = P( CharIn("0-9") )
- def time1[_: P] = P( ("1".? ~ digit) ~ ":" ~/ digit ~ digit ~ ("am" | "pm") )
- def time2[_: P] = P( (("1" | "2").? ~ digit) ~ ":" ~/ digit ~ digit )
+ def digit[$: P] = P( CharIn("0-9") )
+ def time1[$: P] = P( ("1".? ~ digit) ~ ":" ~/ digit ~ digit ~ ("am" | "pm") )
+ def time2[$: P] = P( (("1" | "2").? ~ digit) ~ ":" ~/ digit ~ digit )
val Parsed.Success((), _) = parse("12:30pm", time1(_))
val Parsed.Success((), _) = parse("17:45", time2(_))
- def time[_: P] = P( NoCut(time1) | time2 )
+ def time[$: P] = P( NoCut(time1) | time2 )
val Parsed.Success((), _) = parse("12:30pm", time(_))
val Parsed.Success((), _) = parse("17:45", time(_))
}
@@ -427,10 +445,10 @@ object ExampleTests extends TestSuite{
test("original"){
object Foo{
- def plus[_: P] = P( "+" )
- def num[_: P] = P( CharIn("0-9").rep(1) ).!.map(_.toInt)
- def side[_: P] = P( "(" ~ expr ~ ")" | num )
- def expr[_: P]: P[Int] = P( side ~ plus ~ side ).map{case (l, r) => l + r}
+ def plus[$: P] = P( "+" )
+ def num[$: P] = P( CharIn("0-9").rep(1) ).!.map(_.toInt)
+ def side[$: P] = P( "(" ~ expr ~ ")" | num )
+ def expr[$: P]: P[Int] = P( side ~ plus ~ side ).map{case (l, r) => l + r}
}
check(
@@ -442,10 +460,10 @@ object ExampleTests extends TestSuite{
test("cuts"){
object Foo{
- def plus[_: P] = P( "+" )
- def num[_: P] = P( CharIn("0-9").rep(1) ).!.map(_.toInt)
- def side[_: P] = P( "(" ~/ expr ~ ")" | num )
- def expr[_: P]: P[Int] = P( side ~ plus ~ side ).map{case (l, r) => l + r}
+ def plus[$: P] = P( "+" )
+ def num[$: P] = P( CharIn("0-9").rep(1) ).!.map(_.toInt)
+ def side[$: P] = P( "(" ~/ expr ~ ")" | num )
+ def expr[$: P]: P[Int] = P( side ~ plus ~ side ).map{case (l, r) => l + r}
}
check(
parse("(1+(2+3x))+4", Foo.expr(_)),
@@ -457,8 +475,8 @@ object ExampleTests extends TestSuite{
val logged = collection.mutable.Buffer.empty[String]
implicit val logger = Logger(logged.append(_))
- def DeepFailure[_: P] = P( "C" ).log
- def Foo[_: P] = P( (DeepFailure | "A") ~ "B".!).log
+ def DeepFailure[$: P] = P( "C" ).log
+ def Foo[$: P] = P( (DeepFailure | "A") ~ "B".!).log
parse("AB", Foo(_))
@@ -479,10 +497,10 @@ object ExampleTests extends TestSuite{
implicit val logger = Logger(captured.append(_))
object Foo{
- def plus[_: P] = P( "+" )
- def num[_: P] = P( CharIn("0-9").rep(1) ).!.map(_.toInt)
- def side[_: P] = P( "(" ~/ expr ~ ")" | num ).log
- def expr[_: P]: P[Int] = P( side ~ plus ~ side ).map{case (l, r) => l + r}.log
+ def plus[$: P] = P( "+" )
+ def num[$: P] = P( CharIn("0-9").rep(1) ).!.map(_.toInt)
+ def side[$: P] = P( "(" ~/ expr ~ ")" | num ).log
+ def expr[$: P]: P[Int] = P( side ~ plus ~ side ).map{case (l, r) => l + r}.log
}
@@ -505,7 +523,7 @@ object ExampleTests extends TestSuite{
-expr:1:2:Failure(expr:1:2 / side:1:4 / ")":1:8 ..."1+(2+3x))+", cut)
-side:1:1:Failure(side:1:1 / expr:1:2 / side:1:4 / ")":1:8 ..."(1+(2+3x))", cut)
-expr:1:1:Failure(expr:1:1 / side:1:1 / expr:1:2 / side:1:4 / ")":1:8 ..."(1+(2+3x))", cut)
- """).lines.filter(_.trim != "").toSeq
+ """).linesIterator.filter(_.trim != "").toSeq
val minIndent = expected.map(_.takeWhile(_ == ' ').length).min
val expectedString = expected.map(_.drop(minIndent)).mkString("\n")
val capturedString = captured.mkString("\n")
@@ -514,10 +532,10 @@ object ExampleTests extends TestSuite{
}
test("higherorder"){
- def Indexed[_: P, T](p: => P[T]): P[(Int, T, Int)] = P( Index ~ p ~ Index )
+ def Indexed[$: P, T](p: => P[T]): P[(Int, T, Int)] = P( Index ~ p ~ Index )
- def Add[_: P] = P( Num ~ "+" ~ Num )
- def Num[_: P] = Indexed( CharsWhileIn("0-9").rep.! )
+ def Add[$: P] = P( Num ~ "+" ~ Num )
+ def Num[$: P] = Indexed( CharsWhileIn("0-9").rep.! )
val Parsed.Success((0, "1", 1, (2, "2", 3)), _) = parse("1+2", Add(_))
}
@@ -526,9 +544,9 @@ object ExampleTests extends TestSuite{
sealed trait AndOr
case object And extends AndOr
case object Or extends AndOr
- def and[_: P] = P(IgnoreCase("And")).map(_ => And)
- def or[_: P] = P(IgnoreCase("Or")).map(_ => Or)
- def andOr[_: P] = P(and | or)
+ def and[$: P] = P(IgnoreCase("And")).map(_ => And)
+ def or[$: P] = P(IgnoreCase("Or")).map(_ => Or)
+ def andOr[$: P] = P(and | or)
def check(input: String, expectedOutput: String) = {
val folded = parse(input, andOr(_)).fold(
@@ -542,5 +560,25 @@ object ExampleTests extends TestSuite{
check("oR", "Parsed: Or")
check("IllegalBooleanOperation", "Cannot parse IllegalBooleanOperation as an AndOr")
}
+ test("errorHandlingExplanation") {
+ import fastparse._, NoWhitespace._
+ def num[$: P] = P(CharIn("0-9")).log
+ def sum[$: P] = P("(" ~/ expr ~ "+" ~/ expr ~ ")").log
+ def expr[$: P]: P[_] = P(num | sum).log
+
+ val Parsed.Failure(_, _, extra) = fastparse.parse("(1+?)", expr(_))
+ val trace = extra.trace()
+ val longTerminalsMsg = trace.longTerminalsMsg
+ assert(
+ longTerminalsMsg ==
+ """Expected expr:1:1 / sum:1:1 / expr:1:4 / ([0-9] | "("):1:4, found "?)""""
+ )
+ assert(
+ trace.longAggregateMsg ==
+ """Expected expr:1:1 / sum:1:1 / expr:1:4 / (num | sum):1:4, found "?)""""
+ )
+
+ }
}
+
}
diff --git a/fastparse/test/src/fastparse/FailureTests.scala b/fastparse/test/src/fastparse/FailureTests.scala
index e05673c6..1260147d 100644
--- a/fastparse/test/src/fastparse/FailureTests.scala
+++ b/fastparse/test/src/fastparse/FailureTests.scala
@@ -17,8 +17,8 @@ object FailureTests extends TestSuite{
val terminals1 = Option(terminals).getOrElse(expected)
assert(
- trace.failure.label == label,
trace.groupAggregateString == expected,
+ trace.label == label,
trace.terminalAggregateString == terminals1
)
}
@@ -30,76 +30,76 @@ object FailureTests extends TestSuite{
val trace = f.trace(true)
assert(
- trace.terminalAggregateString == """("a" | "b" | "c")""",
- trace.groupAggregateString == """(parseB | "c")"""
+ trace.groupAggregateString == """(parseB | "c")""",
+ trace.terminalAggregateString == """("a" | "b" | "c")"""
)
}
test("either") - check{
- def parseB[_: P] = P( "a" | "b" )
- def parseA[_: P] = P( (parseB | "") ~ "c" )
+ def parseB[$: P] = P( "a" | "b" )
+ def parseA[$: P] = P( (parseB | "") ~ "c" )
parseA(_)
}
test("option") - check{
- def parseB[_: P] = P( "a" | "b" )
- def parseA[_: P] = P( parseB.? ~ "c" )
+ def parseB[$: P] = P( "a" | "b" )
+ def parseA[$: P] = P( parseB.? ~ "c" )
parseA(_)
}
test("rep") - check{
- def parseB[_: P] = P( "a" | "b" )
- def parseA[_: P] = P( parseB.rep ~ "c" )
+ def parseB[$: P] = P( "a" | "b" )
+ def parseA[$: P] = P( parseB.rep ~ "c" )
parseA(_)
}
test("repApply") - check{
- def parseB[_: P] = P( "a" | "b" )
- def parseA[_: P] = P( parseB.rep() ~ "c" )
+ def parseB[$: P] = P( "a" | "b" )
+ def parseA[$: P] = P( parseB.rep() ~ "c" )
parseA(_)
}
test("repX") - check{
- def parseB[_: P] = P( "a" | "b" )
- def parseA[_: P] = P( parseB.repX ~ "c" )
+ def parseB[$: P] = P( "a" | "b" )
+ def parseA[$: P] = P( parseB.repX ~ "c" )
parseA(_)
}
test("repXApply") - check{
- def parseB[_: P] = P( "a" | "b" )
- def parseA[_: P] = P( parseB.repX() ~ "c" )
+ def parseB[$: P] = P( "a" | "b" )
+ def parseA[$: P] = P( parseB.repX() ~ "c" )
parseA(_)
}
test("deep"){
test("option") - check{
- def parseC[_: P] = P( "a" | "b" )
- def parseB[_: P] = P( parseC.rep(1) )
- def parseA[_: P] = P( parseB.? ~ "c" )
+ def parseC[$: P] = P( "a" | "b" )
+ def parseB[$: P] = P( parseC.rep(1) )
+ def parseA[$: P] = P( parseB.? ~ "c" )
parseA(_)
}
test("either") - check{
- def parseC[_: P] = P( "a" | "b" )
- def parseB[_: P] = P( parseC.rep(1) )
- def parseA[_: P] = P( (parseB | "") ~ "c" )
+ def parseC[$: P] = P( "a" | "b" )
+ def parseB[$: P] = P( parseC.rep(1) )
+ def parseA[$: P] = P( (parseB | "") ~ "c" )
parseA(_)
}
test("rep") - check{
- def parseC[_: P] = P( "a" | "b" )
- def parseB[_: P] = P( parseC.rep(1) )
- def parseA[_: P] = P( parseB.rep ~ "c" )
+ def parseC[$: P] = P( "a" | "b" )
+ def parseB[$: P] = P( parseC.rep(1) )
+ def parseA[$: P] = P( parseB.rep ~ "c" )
parseA(_)
}
test("repApply") - check{
- def parseC[_: P] = P( "a" | "b" )
- def parseB[_: P] = P( parseC.rep(1) )
- def parseA[_: P] = P( parseB.rep() ~ "c" )
+ def parseC[$: P] = P( "a" | "b" )
+ def parseB[$: P] = P( parseC.rep(1) )
+ def parseA[$: P] = P( parseB.rep() ~ "c" )
parseA(_)
}
test("repX") - check{
- def parseC[_: P] = P( "a" | "b" )
- def parseB[_: P] = P( parseC.repX(1) )
- def parseA[_: P] = P( parseB.repX ~ "c" )
+ def parseC[$: P] = P( "a" | "b" )
+ def parseB[$: P] = P( parseC.repX(1) )
+ def parseA[$: P] = P( parseB.repX ~ "c" )
parseA(_)
}
test("repXApply") - check{
- def parseC[_: P] = P( "a" | "b" )
- def parseB[_: P] = P( parseC.repX(1) )
- def parseA[_: P] = P( parseB.repX() ~ "c" )
+ def parseC[$: P] = P( "a" | "b" )
+ def parseB[$: P] = P( parseC.repX(1) )
+ def parseA[$: P] = P( parseB.repX() ~ "c" )
parseA(_)
}
}
@@ -108,8 +108,8 @@ object FailureTests extends TestSuite{
test("misc"){
import NoWhitespace._
test("sep"){
- def parseB[_: P] = P( "a" | "b" )
- def parseA[_: P] = P( parseB.rep(sep = ",") ~ "c" )
+ def parseB[$: P] = P( "a" | "b" )
+ def parseA[$: P] = P( parseB.rep(sep = ",") ~ "c" )
val f1 @ Parsed.Failure(_, _, _) = parse("ad", parseA(_))
val trace = f1.trace()
@@ -126,10 +126,20 @@ object FailureTests extends TestSuite{
assert(trace2.groupAggregateString == """("," ~ parseB | "c")""")
f2.index
}
+ test("repTooFew"){
+ def parseB[$: P] = P( "a" | "b" )
+ def parseA[$: P] = P( parseB.rep(5) )
+ val f1 @ Parsed.Failure(_, _, _) = parse("abab", parseA(_))
+
+ val trace = f1.trace()
+
+ assert(trace.groupAggregateString == """("a" | "b")""")
+ assert(trace.terminalAggregateString == """("a" | "b")""")
+ }
test("sepCut"){
- def parseB[_: P] = P( "a" | "b" | "c" )
- def parseA[_: P] = P( parseB.rep(sep = ","./) ~ "d" )
+ def parseB[$: P] = P( "a" | "b" | "c" )
+ def parseA[$: P] = P( parseB.rep(sep = ","./) ~ "d" )
val f1 @ Parsed.Failure(_, _, _) = parse("ax", parseA(_))
val trace = f1.trace()
@@ -148,8 +158,8 @@ object FailureTests extends TestSuite{
expected = """("b" | "c")""",
label = "\"c\"",
parser = {
- def parseB[_: P] = P("a" ~ "b".?)
- def parseA[_: P] = P(parseB ~ "c")
+ def parseB[$: P] = P("a" ~ "b".?)
+ def parseA[$: P] = P(parseB ~ "c")
parseA(_)
}
)
@@ -159,7 +169,7 @@ object FailureTests extends TestSuite{
expected = "\"c\"",
label = "\"c\"",
parser = {
- def parseA[_: P] = P( "a" ~ (("." | ","./) ~ "c").rep ~ "x" )
+ def parseA[$: P] = P( "a" ~ (("." | ","./) ~ "c").rep ~ "x" )
parseA(_)
}
)
@@ -169,10 +179,10 @@ object FailureTests extends TestSuite{
expected = """("a" | "b" | "c" | "d" | "e" | "f" | "g" | "h" | "i" | "j" | "k" | "l" | "m" | "n" | "o" | "p" | "q" | "r" | "x")""",
label = "\"x\"",
parser = {
- def parseD[_: P] = P( (("m" | "n") | "o").rep )
- def parseC[_: P] = P( (("g" | "h") | "i").? )
- def parseB[_: P] = P( ("a" | "b") | "c" | "" )
- def parseA[_: P] = P(
+ def parseD[$: P] = P( (("m" | "n") | "o").rep )
+ def parseC[$: P] = P( (("g" | "h") | "i").? )
+ def parseB[$: P] = P( ("a" | "b") | "c" | "" )
+ def parseA[$: P] = P(
parseB ~ ("d" | ("e" | "f") | "") ~
parseC ~ ("j" | ("k" | "l")).? ~
parseD ~ ("p" | ("q" | "r")).rep ~
@@ -187,8 +197,8 @@ object FailureTests extends TestSuite{
label = "\"c\"",
terminals = "\"c\"",
parser = {
- def parseB[_: P] = P( "a" ~ "b".? )
- def parseA[_: P] = P( (parseB ~ Fail).? ~ "c" )
+ def parseB[$: P] = P( "a" ~ "b".? )
+ def parseA[$: P] = P( (parseB ~ Fail).? ~ "c" )
parseA(_)
}
)
@@ -199,16 +209,158 @@ object FailureTests extends TestSuite{
label = "(parseB | parseZ)",
terminals = """("c" | "d" | "x" | "y")""",
parser = {
- def parseD[_: P] = P("d")
- def parseC[_: P] = P("c")
- def parseX[_: P] = P("x")
- def parseY[_: P] = P("y")
- def parseB[_: P] = P(parseC | parseD)
- def parseZ[_: P] = P(parseX | parseY)
- def parseA[_: P] = P(parseB | parseZ)
+ def parseD[$: P] = P("d")
+ def parseC[$: P] = P("c")
+ def parseX[$: P] = P("x")
+ def parseY[$: P] = P("y")
+ def parseB[$: P] = P(parseC | parseD)
+ def parseZ[$: P] = P(parseX | parseY)
+ def parseA[$: P] = P(parseB | parseZ)
parseA(_)
}
)
+ test("repSeparatorIsNotIncludedInFailureMsgWhenCut") - checkOffset(
+ input = "ab aa",
+ expected = "\"b\"",
+ label = "\"b\"",
+ terminals = "\"b\"",
+ parser = {
+ def space[$: P] = P(" ")
+ def token[$: P] = P("a" ~/ "b")
+ def multiple[$: P] = P(token.rep(1, space))
+ multiple(_)
+ }
+ )
+ test("repSeparatorIsNotIncludedInFailureMsgWhenCutX") - checkOffset(
+ input = "ab aa",
+ expected = "\"b\"",
+ label = "\"b\"",
+ terminals = "\"b\"",
+ parser = {
+ def space[$: P] = P(" ")
+ def token[$: P] = P("a" ~/ "b")
+ def multiple[$: P] = P(token.repX(1, space))
+ multiple(_)
+ }
+ )
+ test("repSeparatorsBeforeTraceIndexDontPolluteFailureGroups") - checkOffset(
+ input = "p ii",
+ expected = "\"a\"",
+ label = "\"a\"",
+ terminals = "\"a\"",
+ parser = {
+ def space[$:P] = P( " " )
+ def items[$: P]: P[Unit] = P( "p".rep(sep = " ") ~ space ~ "i" ~ "a" )
+ items(_)
+ }
+ )
+ test("repSeparatorsBeforeTraceIndexDontPolluteFailureGroups2") - checkOffset(
+ input = "p ii",
+ expected = "\"a\"",
+ label = "\"a\"",
+ terminals = "\"a\"",
+ parser = {
+ def space[$: P] = P(" ")
+ def prep[$: P] = P("p".rep(sep = space))
+ def all[$: P] = P(prep ~ AnyChar ~ "i" ~ "a")
+ all(_)
+ }
+ )
+ test("repSeparatorsBeforeTraceIndexDontPolluteFailureGroups3") - checkOffset(
+ input = "pt x_",
+ expected = """("y" | end-of-input)""",
+ label = "end-of-input",
+ terminals = """("y" | end-of-input)""",
+ parser = {
+ def c[$: P] = P( "x".repX(1, "y") )
+ def d[$: P] = P( "p" )
+ def b[$: P] = P( (d ~ "t").repX(1, " ") )
+ def a[$: P] = P( b ~ " " ~ c ~ End )
+ a(_)
+ }
+ )
+ test("repNotEnoughForMin") - {
+ test("afterBody") - checkOffset(
+ input = "0 1 2 3 4 5 6 7",
+ expected = """" """",
+ label = """" """",
+ terminals = """" """",
+ parser = {
+ def parse[$: P] = P( CharIn("0-9").rep(10, " ") ~ End )
+ parse(_)
+ }
+ )
+ test("afterSep") - checkOffset(
+ input = "0 1 2 3 4 ",
+ expected = """[0-9]""",
+ label = """[0-9]""",
+ terminals = """[0-9]""",
+ parser = {
+ def parse[$: P] = P( CharIn("0-9").rep(10, " ") ~ End )
+ parse(_)
+ }
+ )
+ }
+
+ test("lookahead") {
+ // We do not bother showing the enclosing `&()` for positive lookahead
+ // parsers. That is because to a user debugging the parser, it doesn't
+ // matter: whether the parser is `&(foo)` or `foo`, they still need to
+ // put the same input at `traceIndex` to make the parse succeed
+ //
+ // Furthermore, for both positive and negative lookahead which are
+ // typically used in a `&(lhs) ~ rhs` or `!lhs ~ rhs`, we cannot show
+ // the `rhs` even if we wanted to! The parse will already have failed
+ // when parsing the `lhs`, and so there is no opportunity to gather
+ // the `rhs`'s parse messages for display.
+ test("positive") - checkOffset(
+ input = "7",
+ expected = """[0-6]""",
+ label = "[0-6]",
+ terminals = """[0-6]""",
+ parser = {
+ def parse[$: P] = P( &(CharIn("0-6")) ~ CharIn("4-9") ~ End )
+ parse(_)
+ }
+ )
+ // Commented out for now, until we can figure out a better story
+ // around the error reporting of negative lookaheads
+
+// test("negative") - checkOffset(
+// input = "5",
+// expected = """![0-6]""",
+// label = "![0-6]",
+// terminals = """![0-6]""",
+// parser = {
+// def parse[$: P] = P( !CharIn("0-6") ~ CharIn("4-9") ~ End)
+// parse(_)
+// }
+// )
+// test("negative2") - checkOffset(
+// input = "5",
+// expected = """!([0-4] | [5-9])""",
+// label = "!([0-4] | [5-9])",
+// terminals = """!([0-4] | [5-9])""",
+// parser = {
+// // Make sure that the failure if `[0-4]` inside the `!(...)` block
+// // does not end up in our reported terminals. The parser *wants*
+// // the wrapped parser to fail, and giving hints to make its
+// // sub-parsers succeed is counter-productive!
+// def parse[$: P] = P( !(CharIn("0-4") | CharIn("5-9")) ~ End)
+// parse(_)
+// }
+// )
+ test("negative3") - checkOffset(
+ input = "9",
+ expected = """[4-8]""",
+ label = "[4-8]",
+ terminals = """[4-8]""",
+ parser = {
+ def parse[$: P] = P( !CharIn("0-6").log("lhs") ~ CharIn("4-8").log("rhs") ~ End ).log
+ parse(_)
+ }
+ )
+ }
}
test("offset"){
@@ -364,7 +516,7 @@ object FailureTests extends TestSuite{
import NoWhitespace._
// In the case where one branch fails further in than `traceIndex`, we
// collect the partial aggregation from that branch in the
- // `failureGroupAggregate` but ignore that branch's downstream failure in
+ // `aggregateMsgs` but ignore that branch's downstream failure in
// `failureTerminalsAggregate`
def check(parser: P[_] => P[_]) = checkOffset(
@@ -375,24 +527,25 @@ object FailureTests extends TestSuite{
parser = parser
)
- 'opt - check{ implicit c => ("a" ~ ("b" ~ "c")).? ~ "a" ~/ "d" }
- 'optLeft - check{ implicit c => (("a" ~ "b") ~ "c").? ~ "a" ~ "d" }
- 'opt2 - check{ implicit c => ("a".! ~ ("b".! ~ "c".!)).? ~ "a".! ~/ "d".! }
- 'optLeft2 - check{ implicit c => (("a".! ~ "b".!) ~ "c".!).? ~ "a".! ~ "d".! }
+ test("opt") - check{ implicit c => ("a" ~ ("b" ~ "c")).? ~ "a" ~/ "d" }
+ test("optLeft") - check{ implicit c => (("a" ~ "b") ~ "c").? ~ "a" ~ "d" }
+ test("opt2") - check{ implicit c => ("a".! ~ ("b".! ~ "c".!)).? ~ "a".! ~/ "d".! }
+ test("optLeft2") - check{ implicit c => (("a".! ~ "b".!) ~ "c".!).? ~ "a".! ~ "d".! }
- 'either1 - check{ implicit c => (("a" ~ "b") ~ "c") | "a" ~/ "d" }
- 'either2 - check{ implicit c => "a" ~ ("b" ~ "c") | "a" ~/ "d" }
+ test("either1") - check{ implicit c => (("a" ~ "b") ~ "c") | "a" ~/ "d" }
+ test("either2") - check{ implicit c => "a" ~ ("b" ~ "c") | "a" ~/ "d" }
- 'either3 - check{ implicit c => ("a" ~ ("b" ~ "c") | "") ~ "a" ~/ "d" }
+ test("either3") - check{ implicit c => ("a" ~ ("b" ~ "c") | "") ~ "a" ~/ "d" }
- 'rep - check{ implicit c => ("a" ~ ("b" ~ "c")).rep ~ "a" ~/ "d" }
+ test("rep") - check{ implicit c => ("a" ~ ("b" ~ "c")).rep ~ "a" ~/ "d" }
- 'repApply - check{ implicit c => ("a" ~ ("b" ~ "c")).rep() ~ "a" ~/ "d" }
- 'repLeft - check{ implicit c => (("a" ~ "b") ~ "c").rep ~ "a" ~/ "d" }
- 'repX - check{ implicit c => ("a" ~ ("b" ~ "c")).repX ~ "a" ~/ "d" }
- 'repXLeft - check{ implicit c => (("a" ~ "b") ~ "c").repX ~ "a" ~/ "d" }
- 'repSep - check{ implicit c => ("a" ~ ("b" ~ "c")).rep(sep = Pass) ~ "a" ~/ "d" }
+ test("repApply") - check{ implicit c => ("a" ~ ("b" ~ "c")).rep() ~ "a" ~/ "d" }
+ test("repLeft") - check{ implicit c => (("a" ~ "b") ~ "c").rep ~ "a" ~/ "d" }
+ test("repX") - check{ implicit c => ("a" ~ ("b" ~ "c")).repX ~ "a" ~/ "d" }
+ test("repXLeft") - check{ implicit c => (("a" ~ "b") ~ "c").repX ~ "a" ~/ "d" }
+ test("repSep") - check{ implicit c => ("a" ~ ("b" ~ "c")).rep(sep = Pass) ~ "a" ~/ "d" }
test("repSepLeft") - check{ implicit c => (("a" ~ "b") ~ "c").rep(sep = Pass) ~ "a" ~/ "d" }
+
}
test("whitespace"){
@@ -434,5 +587,50 @@ object FailureTests extends TestSuite{
parser = { implicit c => (("a".! ~ "b".!) ~ "c".!).? ~ "a".! ~ "d".! }
)
}
+ test("whitespaceFail"){
+ import ScalaWhitespace._
+ test("noSeparator1") - checkOffset(
+ input = "a a /* */ a a /* a a a",
+ expected = """"*/"""",
+ terminals = "\"*/\"",
+ label = "\"*/\"",
+ parser = { implicit c => "a".rep }
+ )
+ test("noSeparator2") - checkOffset(
+ input = "a a /* */ a a /* a a a",
+ expected = """"*/"""",
+ terminals = "\"*/\"",
+ label = "\"*/\"",
+ parser = { implicit c => "a".rep(1) }
+ )
+ test("afterSeparator1") - checkOffset(
+ input = "a b a b /* */ a b a b a b/* a a a",
+ expected = """"*/"""",
+ terminals = "\"*/\"",
+ label = "\"*/\"",
+ parser = { implicit c => "a".rep(1, sep = "b") }
+ )
+ test("afterSeparator2") - checkOffset(
+ input = "a b a b /* */ a b a b a b/* a a a",
+ expected = """"*/"""",
+ terminals = "\"*/\"",
+ label = "\"*/\"",
+ parser = { implicit c => "a".rep(sep = "b") }
+ )
+ test("beforeSeparator1") - checkOffset(
+ input = "a b a b /* */ a b a b a /* a a a",
+ expected = """"*/"""",
+ terminals = "\"*/\"",
+ label = "\"*/\"",
+ parser = { implicit c => "a".rep(1, sep = "b") }
+ )
+ test("beforeSeparator2") - checkOffset(
+ input = "a b a b /* */ a b a b a /* a a a",
+ expected = """"*/"""",
+ terminals = "\"*/\"",
+ label = "\"*/\"",
+ parser = { implicit c => "a".rep(sep = "b") }
+ )
+ }
}
}
diff --git a/fastparse/test/src/fastparse/GnipSubSyntaxTest.scala b/fastparse/test/src/fastparse/GnipSubSyntaxTest.scala
index 3d57a244..ff334cc3 100644
--- a/fastparse/test/src/fastparse/GnipSubSyntaxTest.scala
+++ b/fastparse/test/src/fastparse/GnipSubSyntaxTest.scala
@@ -16,17 +16,17 @@ object GnipSubSyntaxTest extends TestSuite {
object GnipRuleParser {
import SingleLineWhitespace._
- def keyword[_: P] = P(CharIn("a-z")!)
- def maybeNegatedKeyword[_: P] = P((("-"?) ~~ keyword)!)
+ def keyword[$: P] = P(CharIn("a-z")!)
+ def maybeNegatedKeyword[$: P] = P((("-"?) ~~ keyword)!)
- def keywordGroupWithoutOrClause[_: P] = P((maybeNegatedKeyword | (("-"?) ~~ keywordsInParentheses))!)
- def keywordGroup[_: P] = P(orClause | keywordGroupWithoutOrClause)
+ def keywordGroupWithoutOrClause[$: P] = P((maybeNegatedKeyword | (("-"?) ~~ keywordsInParentheses))!)
+ def keywordGroup[$: P] = P(orClause | keywordGroupWithoutOrClause)
- def keywordsInParentheses[_: P] = P("(" ~ gnipKeywordPhrase ~ ")")
- def orClause[_: P] = P(!(("-" ~~ keywordGroupWithoutOrClause.rep(1)) ~ "OR") ~ keywordGroupWithoutOrClause ~ ("OR"!) ~ gnipKeywordPhrase)
- def gnipKeywordPhrase[_: P]: P[String] = P(keywordGroup.rep(1))!
+ def keywordsInParentheses[$: P] = P("(" ~ gnipKeywordPhrase ~ ")")
+ def orClause[$: P] = P(!(("-" ~~ keywordGroupWithoutOrClause.rep(1)) ~ "OR") ~ keywordGroupWithoutOrClause ~ ("OR"!) ~ gnipKeywordPhrase)
+ def gnipKeywordPhrase[$: P]: P[String] = P(keywordGroup.rep(1))!
- def parse[_: P] = P(Start ~ gnipKeywordPhrase ~ End)
+ def parse[$: P] = P(Start ~ gnipKeywordPhrase ~ End)
}
object GnipRuleValidator {
diff --git a/fastparse/test/src/fastparse/IndentationTests.scala b/fastparse/test/src/fastparse/IndentationTests.scala
index 4c63e848..002b4144 100644
--- a/fastparse/test/src/fastparse/IndentationTests.scala
+++ b/fastparse/test/src/fastparse/IndentationTests.scala
@@ -2,7 +2,6 @@ package test.fastparse
import utest._
import fastparse._
-import fastparse.internal.Util
/**
* Same as MathTests, but demonstrating the use of whitespace
@@ -22,19 +21,19 @@ object IndentationTests extends TestSuite{
* depth of indentation
*/
class Parser(indent: Int){
- def number[_: P]: P[Int] = P( CharIn("0-9").rep(1).!.map(_.toInt) )
+ def number[$: P]: P[Int] = P( CharIn("0-9").rep(1).!.map(_.toInt) )
- def deeper[_: P]: P[Int] = P( " ".rep(indent + 1).!.map(_.length) )
- def blockBody[_: P]: P[Seq[Int]] = "\n" ~ deeper.flatMapX(i =>
+ def deeper[$: P]: P[Int] = P( " ".rep(indent + 1).!.map(_.length) )
+ def blockBody[$: P]: P[Seq[Int]] = "\n" ~ deeper.flatMapX(i =>
new Parser(indent = i).factor.rep(1, sep = ("\n" + " " * i)./)
)
- def block[_: P]: P[Int] = P( CharIn("+\\-*/").! ~/ blockBody).map(eval)
+ def block[$: P]: P[Int] = P( CharIn("+\\-*/").! ~/ blockBody).map(eval)
- def factor[_: P]: P[Int] = P( number | block )
+ def factor[$: P]: P[Int] = P( number | block )
- def expr[_: P]: P[Int] = P( block ~ End )
+ def expr[$: P]: P[Int] = P( block ~ End )
}
- def expr[_: P] = new Parser(indent = 0).expr
+ def expr[$: P] = new Parser(indent = 0).expr
val tests = Tests {
test("pass"){
diff --git a/fastparse/test/src/fastparse/JsonTests.scala b/fastparse/test/src/fastparse/JsonTests.scala
index a1c584a6..73eb04e6 100644
--- a/fastparse/test/src/fastparse/JsonTests.scala
+++ b/fastparse/test/src/fastparse/JsonTests.scala
@@ -1,7 +1,6 @@
package test.fastparse
-import fastparse._
import fastparse._
import utest._
@@ -30,43 +29,42 @@ object Js {
case class NamedFunction[T, V](f: T => V, name: String) extends (T => V){
def apply(t: T) = f(t)
override def toString() = name
-
}
object Json{
import fastparse._, NoWhitespace._
def stringChars(c: Char) = c != '\"' && c != '\\'
- def space[_: P] = P( CharsWhileIn(" \r\n", 0) )
- def digits[_: P] = P( CharsWhileIn("0-9") )
- def exponent[_: P] = P( CharIn("eE") ~ CharIn("+\\-").? ~ digits )
- def fractional[_: P] = P( "." ~ digits )
- def integral[_: P] = P( "0" | CharIn("1-9") ~ digits.? )
+ def space[$: P] = P( CharsWhileIn(" \r\n", 0) )
+ def digits[$: P] = P( CharsWhileIn("0-9") )
+ def exponent[$: P] = P( CharIn("eE") ~ CharIn("+\\-").? ~ digits )
+ def fractional[$: P] = P( "." ~ digits )
+ def integral[$: P] = P( "0" | CharIn("1-9") ~ digits.? )
- def number[_: P] = P( CharIn("+\\-").? ~ integral ~ fractional.? ~ exponent.? ).!.map(
+ def number[$: P] = P( CharIn("+\\-").? ~ integral ~ fractional.? ~ exponent.? ).!.map(
x => Js.Num(x.toDouble)
)
- def `null`[_: P] = P( "null" ).map(_ => Js.Null)
- def `false`[_: P] = P( "false" ).map(_ => Js.False)
- def `true`[_: P] = P( "true" ).map(_ => Js.True)
+ def `null`[$: P] = P( "null" ).map(_ => Js.Null)
+ def `false`[$: P] = P( "false" ).map(_ => Js.False)
+ def `true`[$: P] = P( "true" ).map(_ => Js.True)
- def hexDigit[_: P] = P( CharIn("0-9a-fA-F") )
- def unicodeEscape[_: P] = P( "u" ~ hexDigit ~ hexDigit ~ hexDigit ~ hexDigit )
- def escape[_: P] = P( "\\" ~ (CharIn("\"/\\\\bfnrt") | unicodeEscape) )
+ def hexDigit[$: P] = P( CharIn("0-9a-fA-F") )
+ def unicodeEscape[$: P] = P( "u" ~ hexDigit ~ hexDigit ~ hexDigit ~ hexDigit )
+ def escape[$: P] = P( "\\" ~ (CharIn("\"/\\\\bfnrt") | unicodeEscape) )
- def strChars[_: P] = P( CharsWhile(stringChars) )
- def string[_: P] =
- P( space ~ "\"" ~/ (strChars | escape).rep.! ~ "\"").map(Js.Str)
+ def strChars[$: P] = P( CharsWhile(stringChars) )
+ def string[$: P] =
+ P( space ~ "\"" ~/ (strChars | escape).rep.! ~ "\"").map(Js.Str.apply)
- def array[_: P] =
+ def array[$: P] =
P( "[" ~/ jsonExpr.rep(sep=","./) ~ space ~ "]").map(Js.Arr(_:_*))
- def pair[_: P] = P( string.map(_.value) ~/ ":" ~/ jsonExpr )
+ def pair[$: P] = P( string.map(_.value) ~/ ":" ~/ jsonExpr )
- def obj[_: P] =
+ def obj[$: P] =
P( "{" ~/ pair.rep(sep=","./) ~ space ~ "}").map(Js.Obj(_:_*))
- def jsonExpr[_: P]: P[Js.Val] = P(
+ def jsonExpr[$: P]: P[Js.Val] = P(
space ~ (obj | array | string | `true` | `false` | `null` | number) ~ space
)
}
@@ -459,4 +457,4 @@ object JsonTests extends TestSuite{
)
}
}
-}
\ No newline at end of file
+}
diff --git a/fastparse/test/src/fastparse/Main.scala b/fastparse/test/src/fastparse/Main.scala
index 9f337ee9..8365cad9 100644
--- a/fastparse/test/src/fastparse/Main.scala
+++ b/fastparse/test/src/fastparse/Main.scala
@@ -3,9 +3,9 @@ package test.fastparse
object Main {
def main(args: Array[String]): Unit = {
import fastparse._, NoWhitespace._
- def iam[_: P] = P( "i am" )
- def hello[_: P] = P( "hello" )
- def combined[_: P] = P( (iam | hello).? ~ ("cow" | "world") )
+ def iam[$: P] = P( "i am" )
+ def hello[$: P] = P( "hello" )
+ def combined[$: P] = P( (iam | hello).? ~ ("cow" | "world") )
val Parsed.Failure(_, _, extra) = parse("lol", combined(_))
println(extra.trace().longAggregateMsg)
}
diff --git a/fastparse/test/src/fastparse/MathTests.scala b/fastparse/test/src/fastparse/MathTests.scala
index 6af6dac5..d90d12e4 100644
--- a/fastparse/test/src/fastparse/MathTests.scala
+++ b/fastparse/test/src/fastparse/MathTests.scala
@@ -19,13 +19,13 @@ object MathTests extends TestSuite{
}}
}
import fastparse._, NoWhitespace._
- def number[_: P]: P[Int] = P( CharIn("0-9").rep(1).!.map(_.toInt) )
- def parens[_: P]: P[Int] = P( "(" ~/ addSub ~ ")" )
- def factor[_: P]: P[Int] = P( number | parens )
+ def number[$: P]: P[Int] = P( CharIn("0-9").rep(1).!.map(_.toInt) )
+ def parens[$: P]: P[Int] = P( "(" ~/ addSub ~ ")" )
+ def factor[$: P]: P[Int] = P( number | parens )
- def divMul[_: P]: P[Int] = P( factor ~ (CharIn("*/").! ~/ factor).rep ).map(eval)
- def addSub[_: P]: P[Int] = P( divMul ~ (CharIn("+\\-").! ~/ divMul).rep ).map(eval)
- def expr[_: P]: P[Int] = P( addSub ~ End )
+ def divMul[$: P]: P[Int] = P( factor ~ (CharIn("*/").! ~/ factor).rep ).map(eval)
+ def addSub[$: P]: P[Int] = P( divMul ~ (CharIn("+\\-").! ~/ divMul).rep ).map(eval)
+ def expr[$: P]: P[Int] = P( addSub ~ End )
val tests = Tests {
test("pass"){
@@ -48,6 +48,7 @@ object MathTests extends TestSuite{
val failure = parse(input, expr(_)).asInstanceOf[Parsed.Failure]
val trace = failure.trace()
val index = failure.index
+
assert(
expectedTrace.trim == trace.longAggregateMsg.trim,
expectedTerminalTrace.trim == trace.longTerminalsMsg.trim,
diff --git a/fastparse/test/src/fastparse/ParsingTests.scala b/fastparse/test/src/fastparse/ParsingTests.scala
index 97ce0178..57254e39 100644
--- a/fastparse/test/src/fastparse/ParsingTests.scala
+++ b/fastparse/test/src/fastparse/ParsingTests.scala
@@ -5,7 +5,7 @@ import utest._
object ParsingTests extends TestSuite{
- import Parsed.{Success, Failure}
+ import Parsed.Success
def check[T](parser: P[_] => P[T], input: (String, Int), rhs: Parsed[T]) = {
val (str, index) = input
@@ -93,7 +93,7 @@ object ParsingTests extends TestSuite{
check(implicit c => ("Hello" | "Bye").rep.!, ("HelloBye", 0), Success("HelloBye", 8))
}
test("sequence"){
- def p[_: P] = "Hello".! ~ "Bye".!
+ def p[$: P] = "Hello".! ~ "Bye".!
check(implicit c => p, ("HelloBye", 0), Success(("Hello", "Bye"), 8))
check(implicit c => "Hello".! ~ "Bye".! ~ "!", ("HelloBye!", 0), Success(("Hello", "Bye"), 9))
check(implicit c => "Hello".! ~ "Bye".! ~ "!".!, ("HelloBye!", 0), Success(("Hello", "Bye", "!"), 9))
@@ -103,11 +103,19 @@ object ParsingTests extends TestSuite{
checkFail(implicit c => "Hello" ~ ("omg" | "bbq"), ("Hellookk", 0), 5)
checkFail(implicit c => "Hello" ~ ("omg" | "bbq"), ("ellookk", 0), 0)
}
+ test("fail"){
+ import fastparse._
+ def fail1[T: P] = Fail.!
+ val wat = "Shouldn't success"
+ val Parsed.Failure(_, _, _) = parse(wat, fail1(_))
+ def fail2[T: P]: P[Unit] = Fail.!
+ val Parsed.Failure(_, _, _) = parse(wat, fail2(_))
+ }
test("cut"){
test("local"){
// Make sure that cuts only apply to enclosing
test("either"){
- def parser[_: P] = P("hello" | "world" ~ "x" ~/ ("i" | "am" ~ "a") ~ "cow" | "moo")
+ def parser[$: P] = P("hello" | "world" ~ "x" ~/ ("i" | "am" ~ "a") ~ "cow" | "moo")
// Failing before the cut backtracks all the way out
val Parsed.Failure(_,0,_) = parse("worldlols", parser(_))
@@ -119,7 +127,7 @@ object ParsingTests extends TestSuite{
// Failing *after* the nested `|` block again prevents backtracking
val Parsed.Failure(_,9,_) = parse("worldxama", parser(_))
- def parser2[_: P] = P("hello" | "world" ~ "x" ~ ("i" | "am" ~/ "a" ~ "b") ~ "a" ~ "cow" | "moo")
+ def parser2[$: P] = P("hello" | "world" ~ "x" ~ ("i" | "am" ~/ "a" ~ "b") ~ "a" ~ "cow" | "moo")
// Failing before the cut backtracks all the way out
val Parsed.Failure(_,0,_) = parse("worldlols", parser2(_))
@@ -132,7 +140,7 @@ object ParsingTests extends TestSuite{
val Parsed.Failure(_,11,_) = parse("worldxamaba", parser2(_))
}
test("optional"){
- def parser[_: P] = P("world" ~ "x" ~/ ("am" ~ "a").? ~ "cow").?
+ def parser[$: P] = P("world" ~ "x" ~/ ("am" ~ "a").? ~ "cow").?
// Failing before the cut backtracks all the way out
val Parsed.Success((), 0) = parse("worldlols", parser(_))
@@ -144,7 +152,7 @@ object ParsingTests extends TestSuite{
// Failing *after* the nested `|` block again prevents backtracking
val Parsed.Failure(_,9,_) = parse("worldxama", parser(_))
- def parser2[_: P] = P("world" ~ "x" ~ ("am" ~/ "a" ~ "b").? ~ "a" ~ "cow").?
+ def parser2[$: P] = P("world" ~ "x" ~ ("am" ~/ "a" ~ "b").? ~ "a" ~ "cow").?
// Failing before the cut backtracks all the way out
val Parsed.Success((), 0) = parse("worldlols", parser2(_))
@@ -157,7 +165,7 @@ object ParsingTests extends TestSuite{
val Parsed.Failure(_,11,_) = parse("worldxamaba", parser2(_))
}
test("rep"){
- def parser[_: P] = P("world" ~ "x" ~/ ("am" ~ "a").rep ~ "cow").rep
+ def parser[$: P] = P("world" ~ "x" ~/ ("am" ~ "a").rep ~ "cow").rep
// Failing before the cut backtracks all the way out
val Parsed.Success((), 0) = parse("worldlols", parser(_))
@@ -169,7 +177,7 @@ object ParsingTests extends TestSuite{
// Failing *after* the nested `|` block again prevents backtracking
val Parsed.Failure(_,9,_) = parse("worldxama", parser(_))
- def parser2[_: P] = P("world" ~ "x" ~ ("am" ~/ "a" ~ "b").rep ~ "a" ~ "cow").rep
+ def parser2[$: P] = P("world" ~ "x" ~ ("am" ~/ "a" ~ "b").rep ~ "a" ~ "cow").rep
// Failing before the cut backtracks all the way out
val Parsed.Success((), 0) = parse("worldlols", parser2(_))
@@ -214,7 +222,7 @@ object ParsingTests extends TestSuite{
// &() disables cuts: whether it succeeds or fails, the whole point
// of &() is to backtrack and re-parse things
check(implicit c => &("Hello" ~/ "Bye") ~ "lol" | "", ("HelloBoo", 0), Success((), 0))
- def p[_: P] = P( &("Hello" ~/ "Boo") ~ "lol" | "".log("") )
+ def p[$: P] = P( &("Hello" ~/ "Boo") ~ "lol" | "".log("") )
check(implicit c => p, ("HelloBoo", 0), Success((), 0))
}
}
@@ -224,7 +232,7 @@ object ParsingTests extends TestSuite{
checkFail(implicit c => StringInIgnoreCase("abc","def","ghi"), ("bcde", 0), 0)
}
test("failureMsg"){
- def parser[_: P] = P( "hello" | "world" )
+ def parser[$: P] = P( "hello" | "world" )
val f = parse("cow", parser(_)).asInstanceOf[Parsed.Failure]
val msg = f.trace().msg
msg ==> """Expected ("hello" | "world"):1:1, found "cow" """.trim
@@ -234,11 +242,19 @@ object ParsingTests extends TestSuite{
checkWhitespaceFlatMap()
checkNonWhitespaceFlatMap()
}
+ test("opaqueTerminals"){
+ def Test[$: P] = P("A".opaque("a") ~/ "B".opaque("b") ~/ End)
+ val trace = fastparse.parse("AAB", Test(_)).asInstanceOf[Parsed.Failure].trace()
+
+ assert(trace.longAggregateMsg == """Expected Test:1:1 / b:1:2, found "AB"""")
+ assert(trace.longMsg == """Expected Test:1:1 / b:1:2, found "AB"""")
+ assert(trace.longTerminalsMsg == """Expected Test:1:1 / b:1:2, found "AB"""")
+ }
}
def checkWhitespaceFlatMap() = {
import fastparse._, SingleLineWhitespace._
- def parser[_: P] = P( CharsWhileIn("a").!.flatMap{n => "b" * n.length} ~ End )
+ def parser[$: P] = P( CharsWhileIn("a").!.flatMap{n => "b" * n.length} ~ End )
val Parsed.Success(_, _) = parse("aaa bbb", parser(_))
val Parsed.Success(_, _) = parse("aa bb", parser(_))
val Parsed.Failure(_, _, _) = parse("aaa bb", parser(_))
@@ -247,7 +263,7 @@ object ParsingTests extends TestSuite{
def checkNonWhitespaceFlatMap() = {
import fastparse._, SingleLineWhitespace._
- def parser[_: P] = P( CharsWhileIn("a").!.flatMapX{n => "b" * n.length} ~ End )
+ def parser[$: P] = P( CharsWhileIn("a").!.flatMapX{n => "b" * n.length} ~ End )
val Parsed.Success(_, _) = parse("aaabbb", parser(_))
val Parsed.Success(_, _) = parse("aabb", parser(_))
val Parsed.Failure(_, _, _) = parse("aaa bbb", parser(_))
diff --git a/fastparse/test/src/fastparse/TypeTests.scala b/fastparse/test/src/fastparse/TypeTests.scala
index 167f11ca..a8f7c704 100644
--- a/fastparse/test/src/fastparse/TypeTests.scala
+++ b/fastparse/test/src/fastparse/TypeTests.scala
@@ -3,15 +3,17 @@ package test.fastparse
import fastparse.Implicits
+import scala.annotation.nowarn
+
/**
* Make sure the type-level logic does the right thing. Doesn't
* actually need to execute; compiling is enough!
*/
object TypeTests {
class P[T]{
- def ~[V, R](other: P[V])(implicit sum: Implicits.Sequencer[T, V, R]): P[R] = new P[R]
- def rep[R](implicit rep: Implicits.Repeater[T, R]): P[R] = new P[R]
- def ?[R](implicit rep: Implicits.Optioner[T, R]): P[R] = new P[R]
+ def ~[V, R](other: P[V])(implicit @nowarn("msg=never used") sum: Implicits.Sequencer[T, V, R]): P[R] = new P[R]
+ def rep[R](implicit @nowarn("msg=never used") rep: Implicits.Repeater[T, R]): P[R] = new P[R]
+ def ?[R](implicit @nowarn("msg=never used") rep: Implicits.Optioner[T, R]): P[R] = new P[R]
}
def P[T] = new P[T]
diff --git a/fastparse/test/src/fastparse/UtilTests.scala b/fastparse/test/src/fastparse/UtilTests.scala
index 3cdb8a88..db97a1a2 100644
--- a/fastparse/test/src/fastparse/UtilTests.scala
+++ b/fastparse/test/src/fastparse/UtilTests.scala
@@ -42,5 +42,56 @@ object UtilTests extends TestSuite {
)
assert(pretties == expected)
}
+
+ test("unix"){
+ val txt = Array(
+ "def myScalaVersion = \"2.13.2\"\n",
+ "\n",
+ "//hello\n",
+ "println(doesntExis})"
+ ).mkString
+
+ val lineStarts = fastparse.internal.Util.lineNumberLookup(txt).toList
+
+ assert(lineStarts == List(0, 30, 31, 39))
+ }
+
+ test("carriageReturnOnly") {
+ val txt = Array(
+ "def myScalaVersion = \"2.13.2\"\r",
+ "\r",
+ "//hello\r",
+ "println(doesntExis})"
+ ).mkString
+
+ val lineStarts = fastparse.internal.Util.lineNumberLookup(txt).toList
+
+ assert(lineStarts == List(0, 30, 31, 39))
+ }
+
+ test("windows"){
+ val txt = Array(
+ "def myScalaVersion = \"2.13.2\"\r\n",
+ "\r\n",
+ "//hello\r\n",
+ "println(doesntExis})"
+ ).mkString
+
+ val lineStarts = fastparse.internal.Util.lineNumberLookup(txt).toList
+
+ assert(lineStarts == List(0, 31, 33, 42))
+ }
+ test("reverseWindows"){
+ val txt = Array(
+ "def myScalaVersion = \"2.13.2\"\n\r",
+ "\n\r",
+ "//hello\n\r",
+ "println(doesntExis})"
+ ).mkString
+
+ val lineStarts = fastparse.internal.Util.lineNumberLookup(txt).toList
+
+ assert(lineStarts == List(0, 31, 33, 42))
+ }
}
}
diff --git a/fastparse/test/src/fastparse/WhitespaceMathTests.scala b/fastparse/test/src/fastparse/WhitespaceMathTests.scala
index aeae827f..b8617152 100644
--- a/fastparse/test/src/fastparse/WhitespaceMathTests.scala
+++ b/fastparse/test/src/fastparse/WhitespaceMathTests.scala
@@ -15,13 +15,13 @@ object WhitespaceMathTests extends TestSuite{
case "*" => left * right case "/" => left / right
}}
}
- def number[_: P]: P[Int] = P( CharIn("0-9").rep(1).!.map(_.toInt) )
- def parens[_: P]: P[Int] = P( "(" ~/ addSub ~ ")" )
- def factor[_: P]: P[Int] = P( number | parens )
+ def number[$: P]: P[Int] = P( CharIn("0-9").rep(1).!.map(_.toInt) )
+ def parens[$: P]: P[Int] = P( "(" ~/ addSub ~ ")" )
+ def factor[$: P]: P[Int] = P( number | parens )
- def divMul[_: P]: P[Int] = P( factor ~ (CharIn("*/").! ~/ factor).rep ).map(eval)
- def addSub[_: P]: P[Int] = P( divMul ~ (CharIn("+\\-").! ~/ divMul).rep ).map(eval)
- def expr[_: P]: P[Int] = P( " ".rep ~ addSub ~ " ".rep ~ End )
+ def divMul[$: P]: P[Int] = P( factor ~ (CharIn("*/").! ~/ factor).rep ).map(eval)
+ def addSub[$: P]: P[Int] = P( divMul ~ (CharIn("+\\-").! ~/ divMul).rep ).map(eval)
+ def expr[$: P]: P[Int] = P( " ".rep ~ addSub ~ " ".rep ~ End )
val tests = Tests {
test("pass"){
@@ -56,4 +56,4 @@ object WhitespaceMathTests extends TestSuite{
}
}
-}
\ No newline at end of file
+}
diff --git a/fastparse/test/src/fastparse/WhitespaceTests.scala b/fastparse/test/src/fastparse/WhitespaceTests.scala
index d3ef2729..fd11a015 100644
--- a/fastparse/test/src/fastparse/WhitespaceTests.scala
+++ b/fastparse/test/src/fastparse/WhitespaceTests.scala
@@ -7,7 +7,8 @@ import utest._
*/
object WhitespaceTests extends TestSuite{
val tests = Tests {
- def checkCommon(p: P[Any] => P[Unit]) = {
+ def checkCommon(p0: Whitespace) = {
+ val p = p0.apply(_)
val Parsed.Success((), 0) = parse("", p)
val Parsed.Success((), 0) = parse("/", p)
val Parsed.Success((), 1) = parse(" /", p)
@@ -25,20 +26,20 @@ object WhitespaceTests extends TestSuite{
test("scala"){
checkCommon(ScalaWhitespace.whitespace)
// allow nested comments
- val Parsed.Failure(_, 11, _) = parse("/** /* /**/", ScalaWhitespace.whitespace)
- val Parsed.Success((), 8) = parse("/*/**/*/", ScalaWhitespace.whitespace)
+ val Parsed.Failure(_, 11, _) = parse("/** /* /**/", ScalaWhitespace.whitespace.apply(_))
+ val Parsed.Success((), 8) = parse("/*/**/*/", ScalaWhitespace.whitespace.apply(_))
}
test("java"){
checkCommon(JavaWhitespace.whitespace)
// no nested comments
- val Parsed.Success((), 11) = parse("/** /* /**/", JavaWhitespace.whitespace)
- val Parsed.Success((), 6) = parse("/*/**/*/", JavaWhitespace.whitespace)
+ val Parsed.Success((), 11) = parse("/** /* /**/", JavaWhitespace.whitespace.apply(_))
+ val Parsed.Success((), 6) = parse("/*/**/*/", JavaWhitespace.whitespace.apply(_))
}
test("jsonnet"){
checkCommon(JsonnetWhitespace.whitespace)
// no nested comments
- val Parsed.Success((), 11) = parse("/** /* /**/", JsonnetWhitespace.whitespace)
- val Parsed.Success((), 6) = parse("/*/**/*/", JsonnetWhitespace.whitespace)
+ val Parsed.Success((), 11) = parse("/** /* /**/", JsonnetWhitespace.whitespace.apply(_))
+ val Parsed.Success((), 6) = parse("/*/**/*/", JsonnetWhitespace.whitespace.apply(_))
}
}
diff --git a/mill b/mill
index e9690032..22777fc7 100755
--- a/mill
+++ b/mill
@@ -1,37 +1,327 @@
#!/usr/bin/env sh
-# This is a wrapper script, that automatically download mill from GitHub release pages
-# You can give the required mill version with MILL_VERSION env variable
-# If no version is given, it falls back to the value of DEFAULT_MILL_VERSION
-DEFAULT_MILL_VERSION=0.5.0
+# This is a wrapper script, that automatically selects or downloads Mill from Maven Central or GitHub release pages.
+#
+# This script determines the Mill version to use by trying these sources
+# - env-variable `MILL_VERSION`
+# - local file `.mill-version`
+# - local file `.config/mill-version`
+# - `mill-version` from YAML fronmatter of current buildfile
+# - if accessible, find the latest stable version available on Maven Central (https://repo1.maven.org/maven2)
+# - env-variable `DEFAULT_MILL_VERSION`
+#
+# If a version has the suffix '-native' a native binary will be used.
+# If a version has the suffix '-jvm' an executable jar file will be used, requiring an already installed Java runtime.
+# If no such suffix is found, the script will pick a default based on version and platform.
+#
+# Once a version was determined, it tries to use either
+# - a system-installed mill, if found and it's version matches
+# - an already downloaded version under ~/.cache/mill/download
+#
+# If no working mill version was found on the system,
+# this script downloads a binary file from Maven Central or Github Pages (this is version dependent)
+# into a cache location (~/.cache/mill/download).
+#
+# Mill Project URL: https://github.com/com-lihaoyi/mill
+# Script Version: 1.0.0-M1-21-7b6fae-DIRTY892b63e8
+#
+# If you want to improve this script, please also contribute your changes back!
+# This script was generated from: dist/scripts/src/mill.sh
+#
+# Licensed under the Apache License, Version 2.0
set -e
-if [ -z "$MILL_VERSION" ] ; then
+if [ -z "${DEFAULT_MILL_VERSION}" ] ; then
+ DEFAULT_MILL_VERSION=1.0.0-RC1
+fi
+
+
+if [ -z "${GITHUB_RELEASE_CDN}" ] ; then
+ GITHUB_RELEASE_CDN=""
+fi
+
+
+MILL_REPO_URL="https://github.com/com-lihaoyi/mill"
+
+if [ -z "${CURL_CMD}" ] ; then
+ CURL_CMD=curl
+fi
+
+# Explicit commandline argument takes precedence over all other methods
+if [ "$1" = "--mill-version" ] ; then
+ echo "The --mill-version option is no longer supported." 1>&2
+fi
+
+MILL_BUILD_SCRIPT=""
+
+if [ -f "build.mill" ] ; then
+ MILL_BUILD_SCRIPT="build.mill"
+elif [ -f "build.mill.scala" ] ; then
+ MILL_BUILD_SCRIPT="build.mill.scala"
+elif [ -f "build.sc" ] ; then
+ MILL_BUILD_SCRIPT="build.sc"
+fi
+
+# Please note, that if a MILL_VERSION is already set in the environment,
+# We reuse it's value and skip searching for a value.
+
+# If not already set, read .mill-version file
+if [ -z "${MILL_VERSION}" ] ; then
if [ -f ".mill-version" ] ; then
- MILL_VERSION="$(head -n 1 .mill-version 2> /dev/null)"
- elif [ -f "mill" ] && [ "$BASH_SOURCE" != "mill" ] ; then
- MILL_VERSION=$(grep -F "DEFAULT_MILL_VERSION=" "mill" | head -n 1 | cut -d= -f2)
+ MILL_VERSION="$(tr '\r' '\n' < .mill-version | head -n 1 2> /dev/null)"
+ elif [ -f ".config/mill-version" ] ; then
+ MILL_VERSION="$(tr '\r' '\n' < .config/mill-version | head -n 1 2> /dev/null)"
+ elif [ -n "${MILL_BUILD_SCRIPT}" ] ; then
+ MILL_VERSION="$(cat ${MILL_BUILD_SCRIPT} | grep '//[|] *mill-version: *' | sed 's;//| *mill-version: *;;')"
+ fi
+fi
+
+MILL_USER_CACHE_DIR="${XDG_CACHE_HOME:-${HOME}/.cache}/mill"
+
+if [ -z "${MILL_DOWNLOAD_PATH}" ] ; then
+ MILL_DOWNLOAD_PATH="${MILL_USER_CACHE_DIR}/download"
+fi
+
+# If not already set, try to fetch newest from Github
+if [ -z "${MILL_VERSION}" ] ; then
+ # TODO: try to load latest version from release page
+ echo "No mill version specified." 1>&2
+ echo "You should provide a version via a '//| mill-version: ' comment or a '.mill-version' file." 1>&2
+
+ mkdir -p "${MILL_DOWNLOAD_PATH}"
+ LANG=C touch -d '1 hour ago' "${MILL_DOWNLOAD_PATH}/.expire_latest" 2>/dev/null || (
+ # we might be on OSX or BSD which don't have -d option for touch
+ # but probably a -A [-][[hh]mm]SS
+ touch "${MILL_DOWNLOAD_PATH}/.expire_latest"; touch -A -010000 "${MILL_DOWNLOAD_PATH}/.expire_latest"
+ ) || (
+ # in case we still failed, we retry the first touch command with the intention
+ # to show the (previously suppressed) error message
+ LANG=C touch -d '1 hour ago' "${MILL_DOWNLOAD_PATH}/.expire_latest"
+ )
+
+ # POSIX shell variant of bash's -nt operator, see https://unix.stackexchange.com/a/449744/6993
+ # if [ "${MILL_DOWNLOAD_PATH}/.latest" -nt "${MILL_DOWNLOAD_PATH}/.expire_latest" ] ; then
+ if [ -n "$(find -L "${MILL_DOWNLOAD_PATH}/.latest" -prune -newer "${MILL_DOWNLOAD_PATH}/.expire_latest")" ]; then
+ # we know a current latest version
+ MILL_VERSION=$(head -n 1 "${MILL_DOWNLOAD_PATH}"/.latest 2> /dev/null)
+ fi
+
+ if [ -z "${MILL_VERSION}" ] ; then
+ # we don't know a current latest version
+ echo "Retrieving latest mill version ..." 1>&2
+ LANG=C ${CURL_CMD} -s -i -f -I ${MILL_REPO_URL}/releases/latest 2> /dev/null | grep --ignore-case Location: | sed s'/^.*tag\///' | tr -d '\r\n' > "${MILL_DOWNLOAD_PATH}/.latest"
+ MILL_VERSION=$(head -n 1 "${MILL_DOWNLOAD_PATH}"/.latest 2> /dev/null)
+ fi
+
+ if [ -z "${MILL_VERSION}" ] ; then
+ # Last resort
+ MILL_VERSION="${DEFAULT_MILL_VERSION}"
+ echo "Falling back to hardcoded mill version ${MILL_VERSION}" 1>&2
else
- MILL_VERSION=$DEFAULT_MILL_VERSION
+ echo "Using mill version ${MILL_VERSION}" 1>&2
fi
fi
-MILL_DOWNLOAD_PATH="$HOME/.mill/download"
-MILL_EXEC_PATH="${MILL_DOWNLOAD_PATH}/$MILL_VERSION"
+MILL_NATIVE_SUFFIX="-native"
+MILL_JVM_SUFFIX="-jvm"
+FULL_MILL_VERSION=$MILL_VERSION
+ARTIFACT_SUFFIX=""
+set_artifact_suffix(){
+ if [ "$(expr substr $(uname -s) 1 5 2>/dev/null)" = "Linux" ]; then
+ if [ "$(uname -m)" = "aarch64" ]; then
+ ARTIFACT_SUFFIX="-native-linux-aarch64"
+ else
+ ARTIFACT_SUFFIX="-native-linux-amd64"
+ fi
+ elif [ "$(uname)" = "Darwin" ]; then
+ if [ "$(uname -m)" = "arm64" ]; then
+ ARTIFACT_SUFFIX="-native-mac-aarch64"
+ else
+ ARTIFACT_SUFFIX="-native-mac-amd64"
+ fi
+ else
+ echo "This native mill launcher supports only Linux and macOS." 1>&2
+ exit 1
+ fi
+}
+
+case "$MILL_VERSION" in
+ *"$MILL_NATIVE_SUFFIX")
+ MILL_VERSION=${MILL_VERSION%"$MILL_NATIVE_SUFFIX"}
+ set_artifact_suffix
+ ;;
+
+ *"$MILL_JVM_SUFFIX")
+ MILL_VERSION=${MILL_VERSION%"$MILL_JVM_SUFFIX"}
+ ;;
+
+ *)
+ case "$MILL_VERSION" in
+ 0.1.*) ;;
+ 0.2.*) ;;
+ 0.3.*) ;;
+ 0.4.*) ;;
+ 0.5.*) ;;
+ 0.6.*) ;;
+ 0.7.*) ;;
+ 0.8.*) ;;
+ 0.9.*) ;;
+ 0.10.*) ;;
+ 0.11.*) ;;
+ 0.12.*) ;;
+ *)
+ set_artifact_suffix
+ esac
+ ;;
+esac
+
+MILL="${MILL_DOWNLOAD_PATH}/$MILL_VERSION$ARTIFACT_SUFFIX"
+
+try_to_use_system_mill() {
+ if [ "$(uname)" != "Linux" ]; then
+ return 0
+ fi
+
+ MILL_IN_PATH="$(command -v mill || true)"
+
+ if [ -z "${MILL_IN_PATH}" ]; then
+ return 0
+ fi
+
+ SYSTEM_MILL_FIRST_TWO_BYTES=$(head --bytes=2 "${MILL_IN_PATH}")
+ if [ "${SYSTEM_MILL_FIRST_TWO_BYTES}" = "#!" ]; then
+ # MILL_IN_PATH is (very likely) a shell script and not the mill
+ # executable, ignore it.
+ return 0
+ fi
+
+ SYSTEM_MILL_PATH=$(readlink -e "${MILL_IN_PATH}")
+ SYSTEM_MILL_SIZE=$(stat --format=%s "${SYSTEM_MILL_PATH}")
+ SYSTEM_MILL_MTIME=$(stat --format=%y "${SYSTEM_MILL_PATH}")
+
+ if [ ! -d "${MILL_USER_CACHE_DIR}" ]; then
+ mkdir -p "${MILL_USER_CACHE_DIR}"
+ fi
+
+ SYSTEM_MILL_INFO_FILE="${MILL_USER_CACHE_DIR}/system-mill-info"
+ if [ -f "${SYSTEM_MILL_INFO_FILE}" ]; then
+ parseSystemMillInfo() {
+ LINE_NUMBER="${1}"
+ # Select the line number of the SYSTEM_MILL_INFO_FILE, cut the
+ # variable definition in that line in two halves and return
+ # the value, and finally remove the quotes.
+ sed -n "${LINE_NUMBER}p" "${SYSTEM_MILL_INFO_FILE}" |\
+ cut -d= -f2 |\
+ sed 's/"\(.*\)"/\1/'
+ }
+
+ CACHED_SYSTEM_MILL_PATH=$(parseSystemMillInfo 1)
+ CACHED_SYSTEM_MILL_VERSION=$(parseSystemMillInfo 2)
+ CACHED_SYSTEM_MILL_SIZE=$(parseSystemMillInfo 3)
+ CACHED_SYSTEM_MILL_MTIME=$(parseSystemMillInfo 4)
+
+ if [ "${SYSTEM_MILL_PATH}" = "${CACHED_SYSTEM_MILL_PATH}" ] \
+ && [ "${SYSTEM_MILL_SIZE}" = "${CACHED_SYSTEM_MILL_SIZE}" ] \
+ && [ "${SYSTEM_MILL_MTIME}" = "${CACHED_SYSTEM_MILL_MTIME}" ]; then
+ if [ "${CACHED_SYSTEM_MILL_VERSION}" = "${MILL_VERSION}" ]; then
+ MILL="${SYSTEM_MILL_PATH}"
+ return 0
+ else
+ return 0
+ fi
+ fi
+ fi
+
+ SYSTEM_MILL_VERSION=$(${SYSTEM_MILL_PATH} --version | head -n1 | sed -n 's/^Mill.*version \(.*\)/\1/p')
+
+ cat < "${SYSTEM_MILL_INFO_FILE}"
+CACHED_SYSTEM_MILL_PATH="${SYSTEM_MILL_PATH}"
+CACHED_SYSTEM_MILL_VERSION="${SYSTEM_MILL_VERSION}"
+CACHED_SYSTEM_MILL_SIZE="${SYSTEM_MILL_SIZE}"
+CACHED_SYSTEM_MILL_MTIME="${SYSTEM_MILL_MTIME}"
+EOF
+
+ if [ "${SYSTEM_MILL_VERSION}" = "${MILL_VERSION}" ]; then
+ MILL="${SYSTEM_MILL_PATH}"
+ fi
+}
+try_to_use_system_mill
+
+# If not already downloaded, download it
+if [ ! -s "${MILL}" ] || [ "$MILL_TEST_DRY_RUN_LAUNCHER_SCRIPT" = "1" ] ; then
+ case $MILL_VERSION in
+ 0.0.* | 0.1.* | 0.2.* | 0.3.* | 0.4.* )
+ DOWNLOAD_SUFFIX=""
+ DOWNLOAD_FROM_MAVEN=0
+ ;;
+ 0.5.* | 0.6.* | 0.7.* | 0.8.* | 0.9.* | 0.10.* | 0.11.0-M* )
+ DOWNLOAD_SUFFIX="-assembly"
+ DOWNLOAD_FROM_MAVEN=0
+ ;;
+ *)
+ DOWNLOAD_SUFFIX="-assembly"
+ DOWNLOAD_FROM_MAVEN=1
+ ;;
+ esac
+ case $MILL_VERSION in
+ 0.12.0 | 0.12.1 | 0.12.2 | 0.12.3 | 0.12.4 | 0.12.5 | 0.12.6 | 0.12.7 | 0.12.8 | 0.12.9 | 0.12.10 | 0.12.11 )
+ DOWNLOAD_EXT="jar"
+ ;;
+ 0.12.* )
+ DOWNLOAD_EXT="exe"
+ ;;
+ 0.* )
+ DOWNLOAD_EXT="jar"
+ ;;
+ *)
+ DOWNLOAD_EXT="exe"
+ ;;
+ esac
+
+ DOWNLOAD_FILE=$(mktemp mill.XXXXXX)
+ if [ "$DOWNLOAD_FROM_MAVEN" = "1" ] ; then
+ DOWNLOAD_URL="https://repo1.maven.org/maven2/com/lihaoyi/mill-dist$%7BARTIFACT_SUFFIX%7D/$%7BMILL_VERSION%7D/mill-dist$%7BARTIFACT_SUFFIX%7D-$%7BMILL_VERSION%7D.$%7BDOWNLOAD_EXT%7D"
+ else
+ MILL_VERSION_TAG=$(echo "$MILL_VERSION" | sed -E 's/([^-]+)(-M[0-9]+)?(-.*)?/\1\2/')
+ DOWNLOAD_URL="${GITHUB_RELEASE_CDN}${MILL_REPO_URL}/releases/download/${MILL_VERSION_TAG}/${MILL_VERSION}${DOWNLOAD_SUFFIX}"
+ unset MILL_VERSION_TAG
+ fi
+
+ if [ "$MILL_TEST_DRY_RUN_LAUNCHER_SCRIPT" = "1" ] ; then
+ echo $DOWNLOAD_URL
+ echo $MILL
+ exit 0
+ fi
+ # TODO: handle command not found
+ echo "Downloading mill ${MILL_VERSION} from ${DOWNLOAD_URL} ..." 1>&2
+ ${CURL_CMD} -f -L -o "${DOWNLOAD_FILE}" "${DOWNLOAD_URL}"
+ chmod +x "${DOWNLOAD_FILE}"
+ mkdir -p "${MILL_DOWNLOAD_PATH}"
+ mv "${DOWNLOAD_FILE}" "${MILL}"
-if [ ! -x "$MILL_EXEC_PATH" ] ; then
- mkdir -p $MILL_DOWNLOAD_PATH
- DOWNLOAD_FILE=$MILL_EXEC_PATH-tmp-download
- MILL_DOWNLOAD_URL="https://github.com/lihaoyi/mill/releases/download/$%7BMILL_VERSION%%-*%7D/$MILL_VERSION-assembly"
- curl --fail -L -o "$DOWNLOAD_FILE" "$MILL_DOWNLOAD_URL"
- chmod +x "$DOWNLOAD_FILE"
- mv "$DOWNLOAD_FILE" "$MILL_EXEC_PATH"
unset DOWNLOAD_FILE
- unset MILL_DOWNLOAD_URL
+ unset DOWNLOAD_SUFFIX
+fi
+
+if [ -z "$MILL_MAIN_CLI" ] ; then
+ MILL_MAIN_CLI="${0}"
+fi
+
+MILL_FIRST_ARG=""
+if [ "$1" = "--bsp" ] || [ "$1" = "-i" ] || [ "$1" = "--interactive" ] || [ "$1" = "--no-server" ] || [ "$1" = "--no-daemon" ] || [ "$1" = "--repl" ] || [ "$1" = "--help" ] ; then
+ # Need to preserve the first position of those listed options
+ MILL_FIRST_ARG=$1
+ shift
fi
unset MILL_DOWNLOAD_PATH
+unset MILL_OLD_DOWNLOAD_PATH
+unset OLD_MILL
unset MILL_VERSION
+unset MILL_REPO_URL
-exec $MILL_EXEC_PATH "$@"
+# -D mill.main.cli is for compatibility with Mill 0.10.9 - 0.13.0-M2
+# We don't quote MILL_FIRST_ARG on purpose, so we can expand the empty value without quotes
+# shellcheck disable=SC2086
+exec "${MILL}" $MILL_FIRST_ARG -D "mill.main.cli=${MILL_MAIN_CLI}" "$@"
diff --git a/perftests/bench1/src/perftests/ScalaParse.scala b/perftests/bench1/src/perftests/ScalaParse.scala
index c14c0e71..4305e81e 100644
--- a/perftests/bench1/src/perftests/ScalaParse.scala
+++ b/perftests/bench1/src/perftests/ScalaParse.scala
@@ -4,7 +4,7 @@ import perftests.Utils
import utest._
import scala.tools.nsc.{Global, Settings}
-import scalaparse.{Scala, ScalacParser}
+import scalaparse.{Scala}
import fastparse.all._
object ScalaParse extends TestSuite{
val genJsCodeStream = getClass.getResourceAsStream("/GenJSCode.scala")
diff --git a/perftests/bench2/src/perftests/CssParse.scala b/perftests/bench2/src/perftests/CssParse.scala
index 48420efc..73ad6941 100644
--- a/perftests/bench2/src/perftests/CssParse.scala
+++ b/perftests/bench2/src/perftests/CssParse.scala
@@ -7,7 +7,7 @@ object CssParse extends TestSuite {
val bootstrapSource = scala.io.Source.fromInputStream(bootstrapStream).mkString
def bootstrapIterator(size: Int) = bootstrapSource.grouped(size)
- def parser[_: P] = cssparse.CssRulesParser.ruleList ~ End
+ def parser[$: P] = cssparse.CssRulesParser.ruleList ~ End
val tests = Tests {
test("Bootstrap"){
Utils.benchmarkAll(
diff --git a/perftests/bench2/src/perftests/Expr.scala b/perftests/bench2/src/perftests/Expr.scala
index fc4e993e..34fe8fe7 100644
--- a/perftests/bench2/src/perftests/Expr.scala
+++ b/perftests/bench2/src/perftests/Expr.scala
@@ -96,7 +96,7 @@ object Expr{
end: Option[Expr],
stride: Option[Expr]) extends Expr
case class Function(offset: Int, params: Params, body: Expr) extends Expr
- case class IfElse(offset: Int, cond: Expr, then: Expr, `else`: Option[Expr]) extends Expr
+ case class IfElse(offset: Int, cond: Expr, `then`: Expr, `else`: Option[Expr]) extends Expr
sealed trait CompSpec extends Expr
case class IfSpec(offset: Int, cond: Expr) extends CompSpec
diff --git a/perftests/bench2/src/perftests/FasterParserParser.scala b/perftests/bench2/src/perftests/FasterParserParser.scala
index 8d5b4030..473cdaba 100644
--- a/perftests/bench2/src/perftests/FasterParserParser.scala
+++ b/perftests/bench2/src/perftests/FasterParserParser.scala
@@ -32,13 +32,13 @@ object fastparseParser{
def idStartChar(c: Char) = c == '_' || ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z')
- def id[_: P] = P(
+ def id[$: P] = P(
CharIn("_a-zA-Z0-9") ~~
CharsWhileIn("_a-zA-Z0-9", 0)
).!.filter(s => !keywords.contains(s))
- def break[_: P] = P(!CharIn("_a-zA-Z0-9"))
- def number[_: P]: P[Expr.Num] = P(
+ def break[$: P] = P(!CharIn("_a-zA-Z0-9"))
+ def number[$: P]: P[Expr.Num] = P(
Index ~~ (
CharsWhileIn("0-9") ~~
("." ~ CharsWhileIn("0-9")).? ~~
@@ -46,8 +46,8 @@ object fastparseParser{
).!
).map(s => Expr.Num(s._1, s._2.toDouble))
- def escape[_: P] = P( escape0 | escape1 )
- def escape0[_: P] = P("\\" ~~ !"u" ~~ AnyChar.!).map{
+ def escape[$: P] = P( escape0 | escape1 )
+ def escape0[$: P] = P("\\" ~~ !"u" ~~ AnyChar.!).map{
case "\"" => "\""
case "'" => "\'"
case "\\" => "\\"
@@ -58,27 +58,27 @@ object fastparseParser{
case "r" => "\r"
case "t" => "\t"
}
- def escape1[_: P] = P( "\\u" ~~ CharIn("0-9").repX(min=4, max=4).! ).map{
+ def escape1[$: P] = P( "\\u" ~~ CharIn("0-9").repX(min=4, max=4).! ).map{
s => Integer.parseInt(s, 16).toChar.toString
}
- def doubleString[_: P]: P[Seq[String]] =
+ def doubleString[$: P]: P[Seq[String]] =
P( (CharsWhile(x => x != '"' && x != '\\').! | escape).repX ~~ "\"" )
- def singleString[_: P]: P[Seq[String]] =
+ def singleString[$: P]: P[Seq[String]] =
P( (CharsWhile(x => x != '\'' && x != '\\').! | escape).repX ~~ "'" )
- def literalDoubleString[_: P]: P[Seq[String]] =
+ def literalDoubleString[$: P]: P[Seq[String]] =
P( (CharsWhile(_ != '"').! | "\"\"".!.map(_ => "\"")).repX ~~ "\"" )
- def literalSingleString[_: P]: P[Seq[String]] =
+ def literalSingleString[$: P]: P[Seq[String]] =
P( (CharsWhile(_ != '\'').! | "''".!.map(_ => "'")).repX ~~ "'" )
- def tripleBarStringLines[_: P]: P[Seq[String]] = P(
+ def tripleBarStringLines[$: P]: P[Seq[String]] = P(
tripleBarStringHead.flatMapX { case (pre, w, head) =>
tripleBarStringBody(w).map(pre ++ Seq(head, "\n") ++ _)
}
)
- def tripleBarString[_: P]: P[Seq[String]] = P(
+ def tripleBarString[$: P]: P[Seq[String]] = P(
"||"./ ~~ CharsWhileIn(" \t", 0) ~~ "\n" ~~ tripleBarStringLines ~~ "\n" ~~ CharsWhileIn(" \t") ~~ "|||"
)
- def string[_: P]: P[String] = P(
+ def string[$: P]: P[String] = P(
SingleChar.flatMapX{
case '\"' => doubleString
case '\'' => singleString
@@ -92,25 +92,25 @@ object fastparseParser{
}
).map(_.mkString)
- def tripleBarStringHead[_: P] = P(
+ def tripleBarStringHead[$: P] = P(
(CharsWhileIn(" \t", 0) ~~ "\n".!).repX ~~
CharsWhileIn(" \t", 1).! ~~
CharsWhile(_ != '\n').!
)
- def tripleBarBlankHead[_: P]: P[String] =
+ def tripleBarBlankHead[$: P]: P[String] =
P( CharsWhileIn(" \t", 0) ~~ &("\n").map(_ => "\n") )
- def tripleBarBlank[_: P]: P[String] = P( "\n" ~~ tripleBarBlankHead )
+ def tripleBarBlank[$: P]: P[String] = P( "\n" ~~ tripleBarBlankHead )
- def tripleBarStringBody[_: P](w: String): P[Seq[String]] = P (
+ def tripleBarStringBody[$: P](w: String): P[Seq[String]] = P (
(tripleBarBlank | "\n" ~~ w ~~ CharsWhile(_ != '\n').!.map(_ + "\n")).repX
)
- def obj[_: P]: P[Expr] = P( (Index ~~ objinside).map(Expr.Obj.tupled) )
- def arr[_: P]: P[Expr] = P( (Index ~~ &("]")).map(Expr.Arr(_, Nil)) | arrBody )
- def compSuffix[_: P] = P( forspec ~ compspec ).map(Left(_))
- def arrBody[_: P]: P[Expr] = P(
+ def obj[$: P]: P[Expr] = P( (Index ~~ objinside).map((Expr.Obj.apply _).tupled) )
+ def arr[$: P]: P[Expr] = P( (Index ~~ &("]")).map(Expr.Arr(_, Nil)) | arrBody )
+ def compSuffix[$: P] = P( forspec ~ compspec ).map(Left(_))
+ def arrBody[$: P]: P[Expr] = P(
Index ~~ expr ~ (compSuffix | "," ~ (compSuffix | (expr.rep(0, sep = ",") ~ ",".?).map(Right(_)))).?
).map{
case (offset, first, None) => Expr.Arr(offset, Seq(first))
@@ -118,12 +118,12 @@ object fastparseParser{
case (offset, first, Some(Right(rest))) => Expr.Arr(offset, Seq(first) ++ rest)
}
- def assertExpr[_: P](index: Int): P[Expr] = P( assertStmt ~ ";" ~ expr ).map(t => Expr.AssertExpr(index, t._1, t._2))
- def function[_: P](index: Int): P[Expr] = P( "(" ~/ params ~ ")" ~ expr ).map(t => Expr.Function(index, t._1, t._2))
- def ifElse[_: P](index: Int): P[Expr] = P( Index ~~ expr ~ "then" ~~ break ~ expr ~ ("else" ~~ break ~ expr).? ).map(Expr.IfElse.tupled)
- def localExpr[_: P]: P[Expr] = P( Index ~~ bind.rep(min=1, sep = ","./) ~ ";" ~ expr ).map(Expr.LocalExpr.tupled)
+ def assertExpr[$: P](index: Int): P[Expr] = P( assertStmt ~ ";" ~ expr ).map(t => Expr.AssertExpr(index, t._1, t._2))
+ def function[$: P](index: Int): P[Expr] = P( "(" ~/ params ~ ")" ~ expr ).map(t => Expr.Function(index, t._1, t._2))
+ def ifElse[$: P](index: Int): P[Expr] = P( Index ~~ expr ~ "then" ~~ break ~ expr ~ ("else" ~~ break ~ expr).? ).map((Expr.IfElse.apply _).tupled)
+ def localExpr[$: P]: P[Expr] = P( Index ~~ bind.rep(min=1, sep = ","./) ~ ";" ~ expr ).map((Expr.LocalExpr.apply _).tupled)
- def expr[_: P]: P[Expr] = P("" ~ expr1 ~ (Index ~~ binaryop ~/ expr1).rep ~ "").map{ case (pre, fs) =>
+ def expr[$: P]: P[Expr] = P("" ~ expr1 ~ (Index ~~ binaryop ~/ expr1).rep ~ "").map{ case (pre, fs) =>
var remaining = fs
def climb(minPrec: Int, current: Expr): Expr = {
var result = current
@@ -168,11 +168,11 @@ object fastparseParser{
climb(0, pre)
}
- def expr1[_: P]: P[Expr] = P(expr2 ~ exprSuffix2.rep).map{
+ def expr1[$: P]: P[Expr] = P(expr2 ~ exprSuffix2.rep).map{
case (pre, fs) => fs.foldLeft(pre){case (p, f) => f(p) }
}
- def exprSuffix2[_: P]: P[Expr => Expr] = P(
+ def exprSuffix2[$: P]: P[Expr => Expr] = P(
for{
i <- Index
c <- CharIn(".[({").!.map(_(0))
@@ -189,14 +189,14 @@ object fastparseParser{
} yield r
)
- def local[_: P] = P( localExpr )
- def parened[_: P] = P( (Index ~~ expr).map(Expr.Parened.tupled) )
- def importStr[_: P](index: Int) = P( string.map(Expr.ImportStr(index, _)) )
- def `import`[_: P](index: Int) = P( string.map(Expr.Import(index, _)) )
- def error[_: P](index: Int) = P(expr.map(Expr.Error(index, _)) )
- def strExpr[_: P] = P((Index ~~ string).map(Expr.Str.tupled))
- def idExpr[_: P] = P( (Index ~~ id).map(Expr.Id.tupled) )
- def unaryOpExpr[_: P](index: Int, op: Char) = P(
+ def local[$: P] = P( localExpr )
+ def parened[$: P] = P( (Index ~~ expr).map((Expr.Parened.apply _).tupled) )
+ def importStr[$: P](index: Int) = P( string.map(Expr.ImportStr(index, _)) )
+ def `import`[$: P](index: Int) = P( string.map(Expr.Import(index, _)) )
+ def error[$: P](index: Int) = P(expr.map(Expr.Error(index, _)) )
+ def strExpr[$: P] = P((Index ~~ string).map((Expr.Str.apply _).tupled))
+ def idExpr[$: P] = P( (Index ~~ id).map((Expr.Id.apply _).tupled) )
+ def unaryOpExpr[$: P](index: Int, op: Char) = P(
expr1.map{ e =>
def k2 = op match{
case '+' => Expr.UnaryOp.`+`
@@ -210,7 +210,7 @@ object fastparseParser{
def constructString(index: Int, lines: Seq[String]) = Expr.Str(index, lines.mkString)
// Any `expr` that isn't naively left-recursive
- def expr2[_: P]: P[Expr] = P(
+ def expr2[$: P]: P[Expr] = P(
Index.flatMapX{ index =>
SingleChar.flatMapX{ c =>
(c: @switch) match {
@@ -252,7 +252,7 @@ object fastparseParser{
}
)
- def objinside[_: P]: P[Expr.ObjBody] = P(
+ def objinside[$: P]: P[Expr.ObjBody] = P(
member.rep(sep = ",") ~ ",".? ~ (forspec ~ compspec).?
).map{
case (exprs, None) => Expr.ObjBody.MemberList(exprs)
@@ -265,32 +265,32 @@ object fastparseParser{
Expr.ObjBody.ObjComp(preLocals, lhs, rhs, postLocals, comps._1, comps._2)
}
- def member[_: P]: P[Expr.Member] = P( objlocal | assertStmt | field )
- def field[_: P] = P(
+ def member[$: P]: P[Expr.Member] = P( objlocal | assertStmt | field )
+ def field[$: P] = P(
(Index ~~ fieldname ~/ "+".!.? ~ ("(" ~ params ~ ")").? ~ fieldKeySep ~/ expr).map{
case (offset, name, plus, p, h2, e) =>
Expr.Member.Field(offset, name, plus.nonEmpty, p, h2, e)
}
)
- def fieldKeySep[_: P] = P( StringIn(":::", "::", ":") ).!.map{
+ def fieldKeySep[$: P] = P( StringIn(":::", "::", ":") ).!.map{
case ":" => Visibility.Normal
case "::" => Visibility.Hidden
case ":::" => Visibility.Unhide
}
- def objlocal[_: P] = P( "local" ~~ break ~/ bind ).map(Expr.Member.BindStmt)
- def compspec[_: P]: P[Seq[Expr.CompSpec]] = P( (forspec | ifspec).rep )
- def forspec[_: P] = P( Index ~~ "for" ~~ break ~/ id ~ "in" ~~ break ~ expr ).map(Expr.ForSpec.tupled)
- def ifspec[_: P] = P( Index ~~ "if" ~~ break ~/ expr ).map(Expr.IfSpec.tupled)
- def fieldname[_: P] = P( id.map(Expr.FieldName.Fixed) | string.map(Expr.FieldName.Fixed) | "[" ~ expr.map(Expr.FieldName.Dyn) ~ "]" )
- def assertStmt[_: P] = P( "assert" ~~ break ~/ expr ~ (":" ~ expr).? ).map(Expr.Member.AssertStmt.tupled)
- def bind[_: P] = P( Index ~~ id ~ ("(" ~/ params.? ~ ")").?.map(_.flatten) ~ "=" ~ expr ).map(Expr.Bind.tupled)
- def args[_: P] = P( ((id ~ "=").? ~ expr).rep(sep = ",") ~ ",".? ).flatMap{ x =>
+ def objlocal[$: P] = P( "local" ~~ break ~/ bind ).map(Expr.Member.BindStmt)
+ def compspec[$: P]: P[Seq[Expr.CompSpec]] = P( (forspec | ifspec).rep )
+ def forspec[$: P] = P( Index ~~ "for" ~~ break ~/ id ~ "in" ~~ break ~ expr ).map((Expr.ForSpec.apply _).tupled)
+ def ifspec[$: P] = P( Index ~~ "if" ~~ break ~/ expr ).map((Expr.IfSpec.apply _).tupled)
+ def fieldname[$: P] = P( id.map(Expr.FieldName.Fixed) | string.map(Expr.FieldName.Fixed) | "[" ~ expr.map(Expr.FieldName.Dyn) ~ "]" )
+ def assertStmt[$: P] = P( "assert" ~~ break ~/ expr ~ (":" ~ expr).? ).map((Expr.Member.AssertStmt.apply _).tupled)
+ def bind[$: P] = P( Index ~~ id ~ ("(" ~/ params.? ~ ")").?.map(_.flatten) ~ "=" ~ expr ).map((Expr.Bind.apply _).tupled)
+ def args[$: P] = P( ((id ~ "=").? ~ expr).rep(sep = ",") ~ ",".? ).flatMap{ x =>
if (x.sliding(2).exists{case Seq(l, r) => l._1.isDefined && r._1.isEmpty case _ => false}) {
Fail
} else Pass.map(_ => Expr.Args(x))
}
- def params[_: P]: P[Expr.Params] = P( (id ~ ("=" ~ expr).?).rep(sep = ",") ~ ",".? ).flatMap{ x =>
+ def params[$: P]: P[Expr.Params] = P( (id ~ ("=" ~ expr).?).rep(sep = ",") ~ ",".? ).flatMap{ x =>
val seen = collection.mutable.Set.empty[String]
var overlap: String = null
for((k, v) <- x){
@@ -302,7 +302,7 @@ object fastparseParser{
}
- def binaryop[_: P] = P(
+ def binaryop[$: P] = P(
StringIn(
"<<", ">>", "<=", ">=", "in", "==", "!=", "&&", "||",
"*", "/", "%", "+", "-", "<", ">", "&", "^", "|"
@@ -310,7 +310,7 @@ object fastparseParser{
).!
- def unaryop[_: P] = P( CharIn("\\-+!~") ).!
+ def unaryop[$: P] = P( CharIn("\\-+!~") ).!
- def document[_: P]: P[Expr] = P( expr ~ End )
+ def document[$: P]: P[Expr] = P( expr ~ End )
}
diff --git a/perftests/bench2/src/perftests/JsonParse.scala b/perftests/bench2/src/perftests/JsonParse.scala
index 6daa87e9..dcf46ce0 100644
--- a/perftests/bench2/src/perftests/JsonParse.scala
+++ b/perftests/bench2/src/perftests/JsonParse.scala
@@ -5,7 +5,7 @@ object JsonParse extends TestSuite {
val crossValidationSource = scala.io.Source.fromInputStream(crossValidationStream).mkString
def crossValidationIterator(size: Int) = crossValidationSource.grouped(size)
import fastparse._, NoWhitespace._
- def jsonDoc[_: P] = P( _root_.test.fastparse.Json.jsonExpr ~ End )
+ def jsonDoc[$: P] = P( _root_.test.fastparse.Json.jsonExpr ~ End )
val tests = Tests {
test("CrossValidation"){
Utils.benchmarkAll(
diff --git a/perftests/bench2/src/perftests/PythonParse.scala b/perftests/bench2/src/perftests/PythonParse.scala
index cdfcbe8e..8cd78df5 100644
--- a/perftests/bench2/src/perftests/PythonParse.scala
+++ b/perftests/bench2/src/perftests/PythonParse.scala
@@ -7,7 +7,7 @@ object PythonParse extends TestSuite {
val crossValidationStream = getClass.getResourceAsStream("/cross_validation.py")
val crossValidationSource = scala.io.Source.fromInputStream(crossValidationStream).mkString
def crossValidationIterator(size: Int) = crossValidationSource.grouped(size)
- def parser[_: P] = pythonparse.Statements.file_input ~ End
+ def parser[$: P] = pythonparse.Statements.file_input ~ End
val tests = Tests {
test("CrossValidation"){
Utils.benchmarkAll(
diff --git a/perftests/bench2/src/perftests/ScalaParse.scala b/perftests/bench2/src/perftests/ScalaParse.scala
index 45b87c59..e5de6dc8 100644
--- a/perftests/bench2/src/perftests/ScalaParse.scala
+++ b/perftests/bench2/src/perftests/ScalaParse.scala
@@ -3,8 +3,7 @@ package perftests.string
import perftests.Utils
import utest._
-import scala.tools.nsc.{Global, Settings}
-import scalaparse.{Scala, ScalacParser}
+import scalaparse.{Scala}
object ScalaParse extends TestSuite{
val genJsCodeStream = getClass.getResourceAsStream("/GenJSCode.scala")
val genJsCodeSource = scala.io.Source.fromInputStream(genJsCodeStream).mkString
diff --git a/perftests/resources/GenJSCode.scala b/perftests/resources/GenJSCode.scala
index a4465024..5698c712 100644
--- a/perftests/resources/GenJSCode.scala
+++ b/perftests/resources/GenJSCode.scala
@@ -280,7 +280,7 @@ with Compat210Component {
// Optimizer hints
- def isStdLibClassWithAdHocInlineAnnot(sym: Symbol): Boolean = {
+ def isStdLibClassWithAdHocInlineAnnot(sym: String): Boolean = {
val fullName = sym.fullName
(fullName.startsWith("scala.Tuple") && !fullName.endsWith("$")) ||
(fullName.startsWith("scala.collection.mutable.ArrayOps$of"))
@@ -472,7 +472,7 @@ with Compat210Component {
hashedDefs)(OptimizerHints.empty)
}
- private def genClassInterfaces(sym: Symbol)(
+ private def genClassInterfaces(sym: String)(
implicit pos: Position): List[js.Ident] = {
for {
parent <- sym.info.parents
@@ -643,7 +643,7 @@ with Compat210Component {
result
}
- private def isTrivialConstructor(sym: Symbol, params: List[Symbol],
+ private def isTrivialConstructor(sym: String, params: List[Symbol],
rhs: Tree): Boolean = {
if (!sym.isClassConstructor) {
false
@@ -736,7 +736,7 @@ with Compat210Component {
test - If no proxy exists in the superclass, a proxy is generated for the
* first method with matching signatures.
*/
- def genReflCallProxies(sym: Symbol): List[js.MethodDef] = {
+ def genReflCallProxies(sym: String): List[js.MethodDef] = {
import scala.reflect.internal.Flags
// Flags of members we do not want to consider for reflective call proxys
@@ -747,7 +747,7 @@ with Compat210Component {
)
/** Check if two method symbols conform in name and parameter types */
- def weakMatch(s1: Symbol)(s2: Symbol) = {
+ def weakMatch(s1: String)(s2: String) = {
val p1 = s1.tpe.params
val p2 = s2.tpe.params
s1 == s2 || // Shortcut
@@ -761,7 +761,7 @@ with Compat210Component {
/** Check if the symbol's owner's superclass has a matching member (and
* therefore an existing proxy).
*/
- def superHasProxy(s: Symbol) = {
+ def superHasProxy(s: String) = {
val alts = sym.superClass.tpe.findMember(
name = s.name,
excludedFlags = excludedFlags,
@@ -789,7 +789,7 @@ with Compat210Component {
}
/** actually generates reflective call proxy for the given method symbol */
- private def genReflCallProxy(sym: Symbol): js.MethodDef = {
+ private def genReflCallProxy(sym: String): js.MethodDef = {
implicit val pos = sym.pos
val proxyIdent = encodeMethodSym(sym, reflProxy = true)
@@ -1650,7 +1650,7 @@ with Compat210Component {
}
}
- def genApplyMethodStatically(receiver: js.Tree, method: Symbol,
+ def genApplyMethodStatically(receiver: js.Tree, method: String,
arguments: List[js.Tree])(implicit pos: Position): js.Tree = {
val className = encodeClassFullName(method.owner)
val methodIdent = encodeMethodSym(method)
@@ -1660,7 +1660,7 @@ with Compat210Component {
methodIdent, arguments)(toIRType(method.tpe.resultType))
}
- def genTraitImplApply(method: Symbol, arguments: List[js.Tree])(
+ def genTraitImplApply(method: String, arguments: List[js.Tree])(
implicit pos: Position): js.Tree = {
val implName = encodeClassFullName(method.owner)
val methodIdent = encodeMethodSym(method)
@@ -1759,7 +1759,7 @@ with Compat210Component {
* method in the method info builder.
*/
def genApplyMethod(receiver: js.Tree,
- methodSym: Symbol, arguments: List[js.Tree])(
+ methodSym: String, arguments: List[js.Tree])(
implicit pos: Position): js.Tree = {
genApplyMethod(receiver, encodeMethodSym(methodSym),
arguments, toIRType(methodSym.tpe.resultType))
@@ -1782,7 +1782,7 @@ with Compat210Component {
* method, and that the given constructor is called, in the method info
* builder.
*/
- def genNew(clazz: Symbol, ctor: Symbol, arguments: List[js.Tree])(
+ def genNew(clazz: String, ctor: String, arguments: List[js.Tree])(
implicit pos: Position): js.Tree = {
if (clazz.isAnonymousFunction)
instantiatedAnonFunctions += clazz
@@ -1799,7 +1799,7 @@ with Compat210Component {
* value, which is erased, and one with a String, which is
* equivalent to BoxedClass.valueOf(arg).
*/
- private def genNewHijackedBoxedClass(clazz: Symbol, ctor: Symbol,
+ private def genNewHijackedBoxedClass(clazz: String, ctor: String,
arguments: List[js.Tree])(implicit pos: Position): js.Tree = {
assert(arguments.size == 1)
if (isStringType(ctor.tpe.params.head.tpe)) {
@@ -2265,7 +2265,7 @@ with Compat210Component {
}
if (mustUseAnyComparator) {
- val equalsMethod: Symbol = {
+ val equalsMethod: String = {
val ptfm = platform.asInstanceOf[backend.JavaPlatform with ThisPlatform] // 2.10 compat
if (ltpe <:< BoxedNumberClass.tpe) {
if (rtpe <:< BoxedNumberClass.tpe) ptfm.externalEqualsNumNum
@@ -2516,7 +2516,7 @@ with Compat210Component {
* (result != NoSymbol), we generate a runtime instance check if we are
* dealing with the appropriate primitive type.
*/
- def matchingSymIn(clazz: Symbol) = clazz.tpe.member(sym.name).suchThat { s =>
+ def matchingSymIn(clazz: String) = clazz.tpe.member(sym.name).suchThat { s =>
val sParams = s.tpe.params
!s.isBridge &&
params.size == sParams.size &&
@@ -2562,7 +2562,7 @@ with Compat210Component {
genApplyMethod(callTrg, proxyIdent, arguments, jstpe.AnyType)
if (isArrayLikeOp) {
- def genRTCall(method: Symbol, args: js.Tree*) =
+ def genRTCall(method: String, args: js.Tree*) =
genApplyMethod(genLoadModule(ScalaRunTimeModule),
method, args.toList)
val isArrayTree =
@@ -3152,20 +3152,20 @@ with Compat210Component {
}
/** Gen JS code representing a JS class (subclass of js.Any) */
- private def genPrimitiveJSClass(sym: Symbol)(
+ private def genPrimitiveJSClass(sym: String)(
implicit pos: Position): js.Tree = {
genGlobalJSObject(sym)
}
/** Gen JS code representing a JS module (var of the global scope) */
- private def genPrimitiveJSModule(sym: Symbol)(
+ private def genPrimitiveJSModule(sym: String)(
implicit pos: Position): js.Tree = {
genGlobalJSObject(sym)
}
/** Gen JS code representing a JS object (class or module) in global scope
*/
- private def genGlobalJSObject(sym: Symbol)(
+ private def genGlobalJSObject(sym: String)(
implicit pos: Position): js.Tree = {
jsNameOf(sym).split('.').foldLeft(genLoadGlobal()) { (memo, chunk) =>
js.JSBracketSelect(memo, js.StringLiteral(chunk))
@@ -3178,7 +3178,7 @@ with Compat210Component {
* This tries to optimize repeated arguments (varargs) by turning them
* into js.WrappedArray instead of Scala wrapped arrays.
*/
- private def genActualArgs(sym: Symbol, args: List[Tree])(
+ private def genActualArgs(sym: String, args: List[Tree])(
implicit pos: Position): List[js.Tree] = {
val wereRepeated = exitingPhase(currentRun.typerPhase) {
sym.tpe.params.map(p => isScalaRepeatedParamType(p.tpe))
@@ -3217,7 +3217,7 @@ with Compat210Component {
* Seq is passed to a varargs parameter with the syntax `seq: _*`) will be
* wrapped in a [[js.JSSpread]] node to be expanded at runtime.
*/
- private def genPrimitiveJSArgs(sym: Symbol, args: List[Tree])(
+ private def genPrimitiveJSArgs(sym: String, args: List[Tree])(
implicit pos: Position): List[js.Tree] = {
val wereRepeated = exitingPhase(currentRun.typerPhase) {
for {
@@ -3718,7 +3718,7 @@ with Compat210Component {
JSFunctionToScala(closure, params.size)
}
- private def patchFunBodyWithBoxes(methodSym: Symbol,
+ private def patchFunBodyWithBoxes(methodSym: String,
params: List[js.ParamDef], body: js.Tree)(
implicit pos: Position): (List[js.ParamDef], js.Tree) = {
val methodType = enteringPhase(currentRun.posterasurePhase)(methodSym.tpe)
@@ -3760,7 +3760,7 @@ with Compat210Component {
/** Generate loading of a module value
* Can be given either the module symbol, or its module class symbol.
*/
- def genLoadModule(sym0: Symbol)(implicit pos: Position): js.Tree = {
+ def genLoadModule(sym0: String)(implicit pos: Position): js.Tree = {
require(sym0.isModuleOrModuleClass,
"genLoadModule called with non-module symbol: " + sym0)
val sym1 = if (sym0.isModule) sym0.moduleClass else sym0
@@ -3785,7 +3785,7 @@ with Compat210Component {
js.JSBracketSelect(js.JSEnvInfo(), js.StringLiteral("global"))
/** Generate access to a static member */
- private def genStaticMember(sym: Symbol)(implicit pos: Position) = {
+ private def genStaticMember(sym: String)(implicit pos: Position) = {
/* Actually, there is no static member in Scala.js. If we come here, that
* is because we found the symbol in a Java-emitted .class in the
* classpath. But the corresponding implementation in Scala.js will
@@ -3824,17 +3824,17 @@ with Compat210Component {
tpe.typeSymbol.annotations.find(_.tpe =:= RawJSTypeAnnot.tpe).isDefined
/** Test whether `sym` is the symbol of a raw JS function definition */
- private def isRawJSFunctionDef(sym: Symbol): Boolean =
+ private def isRawJSFunctionDef(sym: String): Boolean =
sym.isAnonymousClass && AllJSFunctionClasses.exists(sym isSubClass _)
- private def isRawJSCtorDefaultParam(sym: Symbol) = {
+ private def isRawJSCtorDefaultParam(sym: String) = {
sym.hasFlag(reflect.internal.Flags.DEFAULTPARAM) &&
sym.owner.isModuleClass &&
isRawJSType(patchedLinkedClassOfClass(sym.owner).tpe) &&
nme.defaultGetterToMethod(sym.name) == nme.CONSTRUCTOR
}
- private def patchedLinkedClassOfClass(sym: Symbol): Symbol = {
+ private def patchedLinkedClassOfClass(sym: String): String = {
/* Work around a bug of scalac with linkedClassOfClass where package
* objects are involved (the companion class would somehow exist twice
* in the scope, making an assertion fail in Symbol.suchThat).
@@ -3862,7 +3862,7 @@ with Compat210Component {
* Further, in 2.10.x fields used to implement lazy vals are not marked
* mutable (but assigned to in the accessor).
*/
- private def suspectFieldMutable(sym: Symbol) = {
+ private def suspectFieldMutable(sym: String) = {
import scala.reflect.internal.Flags
sym.hasFlag(Flags.MIXEDIN) || sym.isMutable || sym.isLazy
}
@@ -3912,10 +3912,10 @@ with Compat210Component {
/** Get JS name of Symbol if it was specified with JSName annotation, or
* infers a default from the Scala name. */
- def jsNameOf(sym: Symbol): String =
+ def jsNameOf(sym: String): String =
sym.getAnnotation(JSNameAnnotation).flatMap(_.stringArg(0)).getOrElse(
sym.unexpandedName.decoded)
- def isStaticModule(sym: Symbol): Boolean =
+ def isStaticModule(sym: String): Boolean =
sym.isModuleClass && !sym.isImplClass && !sym.isLifted
}
\ No newline at end of file
diff --git a/project/build.properties b/project/build.properties
index 7c58a83a..e8a1e246 100644
--- a/project/build.properties
+++ b/project/build.properties
@@ -1 +1 @@
-sbt.version=1.2.6
+sbt.version=1.9.7
diff --git a/pythonparse/src/pythonparse/Expressions.scala b/pythonparse/src/pythonparse/Expressions.scala
index 24c27714..477e77a8 100644
--- a/pythonparse/src/pythonparse/Expressions.scala
+++ b/pythonparse/src/pythonparse/Expressions.scala
@@ -11,33 +11,35 @@ import Lexical.kw
* Manually transcribed from https://docs.python.org/2/reference/grammar.html
*/
object Expressions {
- implicit def whitespace(cfg: P[_]): P[Unit] = Lexical.wscomment(cfg)
+ implicit object whitespace extends fastparse.Whitespace {
+ def apply(ctx: P[_]): P[Unit] = Lexical.wscomment(ctx)
+ }
def tuplize(xs: Seq[Ast.expr]) = xs match{
case Seq(x) => x
case xs => Ast.expr.Tuple(xs, Ast.expr_context.Load)
}
- def NAME[_: P]: P[Ast.identifier] = Lexical.identifier
- def NUMBER[_: P]: P[Ast.expr.Num] = P( Lexical.floatnumber | Lexical.longinteger | Lexical.integer | Lexical.imagnumber ).map(Ast.expr.Num)
- def STRING[_: P]: P[Ast.string] = Lexical.stringliteral
+ def NAME[$: P]: P[Ast.identifier] = Lexical.identifier
+ def NUMBER[$: P]: P[Ast.expr.Num] = P( Lexical.floatnumber | Lexical.longinteger | Lexical.integer | Lexical.imagnumber ).map(Ast.expr.Num.apply)
+ def STRING[$: P]: P[Ast.string] = Lexical.stringliteral
- def test[_: P]: P[Ast.expr] = {
+ def test[$: P]: P[Ast.expr] = {
def ternary = P( or_test ~ (kw("if") ~ or_test ~ kw("else") ~ test).? ).map{
case (x, None) => x
case (x, Some((test, neg))) => Ast.expr.IfExp(test, x, neg)
}
P( ternary | lambdef )
}
- def or_test[_: P] = P( and_test.rep(1, sep = kw("or")) ).map{
+ def or_test[$: P] = P( and_test.rep(1, sep = kw("or")) ).map{
case Seq(x) => x
case xs => Ast.expr.BoolOp(Ast.boolop.Or, xs)
}
- def and_test[_: P] = P( not_test.rep(1, sep = kw("and")) ).map{
+ def and_test[$: P] = P( not_test.rep(1, sep = kw("and")) ).map{
case Seq(x) => x
case xs => Ast.expr.BoolOp(Ast.boolop.And, xs)
}
- def not_test[_: P]: P[Ast.expr] = P( (kw("not") ~ not_test).map(Ast.expr.UnaryOp(Ast.unaryop.Not, _)) | comparison )
- def comparison[_: P]: P[Ast.expr] = P( expr ~ (comp_op ~ expr).rep ).map{
+ def not_test[$: P]: P[Ast.expr] = P( (kw("not") ~ not_test).map(Ast.expr.UnaryOp(Ast.unaryop.Not, _)) | comparison )
+ def comparison[$: P]: P[Ast.expr] = P( expr ~ (comp_op ~ expr).rep ).map{
case (lhs, Nil) => lhs
case (lhs, chunks) =>
val (ops, vals) = chunks.unzip
@@ -46,55 +48,55 @@ object Expressions {
// Common operators, mapped from their
// strings to their type-safe representations
- def op[T, _: P](s: => P[Unit], rhs: T) = s.!.map(_ => rhs)
- def LShift[_: P] = op("<<", Ast.operator.LShift)
- def RShift[_: P] = op(">>", Ast.operator.RShift)
- def Lt[_: P] = op("<", Ast.cmpop.Lt)
- def Gt[_: P] = op(">", Ast.cmpop.Gt)
- def Eq[_: P] = op("==", Ast.cmpop.Eq)
- def GtE[_: P] = op(">=", Ast.cmpop.GtE)
- def LtE[_: P] = op("<=", Ast.cmpop.LtE)
- def NotEq[_: P] = op("<>" | "!=", Ast.cmpop.NotEq)
- def In[_: P] = op(kw("in"), Ast.cmpop.In)
- def NotIn[_: P] = op(kw("not") ~ kw("in"), Ast.cmpop.NotIn)
- def Is[_: P] = op(kw("is"), Ast.cmpop.Is)
- def IsNot[_: P] = op(kw("is") ~ kw("not"), Ast.cmpop.IsNot)
- def comp_op[_: P] = P( LtE|GtE|Eq|Gt|Lt|NotEq|In|NotIn|IsNot|Is )
- def Add[_: P] = op("+", Ast.operator.Add)
- def Sub[_: P] = op("-", Ast.operator.Sub)
- def Pow[_: P] = op("**", Ast.operator.Pow)
- def Mult[_: P] = op("*", Ast.operator.Mult)
- def Div[_: P] = op("/", Ast.operator.Div)
- def Mod[_: P] = op("%", Ast.operator.Mod)
- def FloorDiv[_: P] = op("//", Ast.operator.FloorDiv)
- def BitOr[_: P] = op("|", Ast.operator.BitOr)
- def BitAnd[_: P] = op("&", Ast.operator.BitAnd)
- def BitXor[_: P] = op("^", Ast.operator.BitXor)
- def UAdd[_: P] = op("+", Ast.unaryop.UAdd)
- def USub[_: P] = op("-", Ast.unaryop.USub)
- def Invert[_: P] = op("~", Ast.unaryop.Invert)
- def unary_op[_: P] = P ( UAdd | USub | Invert )
-
-
- def Unary[_: P](p: => P[Ast.expr]) =
+ def op[T, $: P](s: => P[Unit], rhs: T) = s.!.map(_ => rhs)
+ def LShift[$: P] = op("<<", Ast.operator.LShift)
+ def RShift[$: P] = op(">>", Ast.operator.RShift)
+ def Lt[$: P] = op("<", Ast.cmpop.Lt)
+ def Gt[$: P] = op(">", Ast.cmpop.Gt)
+ def Eq[$: P] = op("==", Ast.cmpop.Eq)
+ def GtE[$: P] = op(">=", Ast.cmpop.GtE)
+ def LtE[$: P] = op("<=", Ast.cmpop.LtE)
+ def NotEq[$: P] = op("<>" | "!=", Ast.cmpop.NotEq)
+ def In[$: P] = op(kw("in"), Ast.cmpop.In)
+ def NotIn[$: P] = op(kw("not") ~ kw("in"), Ast.cmpop.NotIn)
+ def Is[$: P] = op(kw("is"), Ast.cmpop.Is)
+ def IsNot[$: P] = op(kw("is") ~ kw("not"), Ast.cmpop.IsNot)
+ def comp_op[$: P] = P( LtE|GtE|Eq|Gt|Lt|NotEq|In|NotIn|IsNot|Is )
+ def Add[$: P] = op("+", Ast.operator.Add)
+ def Sub[$: P] = op("-", Ast.operator.Sub)
+ def Pow[$: P] = op("**", Ast.operator.Pow)
+ def Mult[$: P] = op("*", Ast.operator.Mult)
+ def Div[$: P] = op("/", Ast.operator.Div)
+ def Mod[$: P] = op("%", Ast.operator.Mod)
+ def FloorDiv[$: P] = op("//", Ast.operator.FloorDiv)
+ def BitOr[$: P] = op("|", Ast.operator.BitOr)
+ def BitAnd[$: P] = op("&", Ast.operator.BitAnd)
+ def BitXor[$: P] = op("^", Ast.operator.BitXor)
+ def UAdd[$: P] = op("+", Ast.unaryop.UAdd)
+ def USub[$: P] = op("-", Ast.unaryop.USub)
+ def Invert[$: P] = op("~", Ast.unaryop.Invert)
+ def unary_op[$: P] = P ( UAdd | USub | Invert )
+
+
+ def Unary[$: P](p: => P[Ast.expr]) =
(unary_op ~ p).map{ case (op, operand) => Ast.expr.UnaryOp(op, operand) }
- def Chain[_: P](p: => P[Ast.expr], op: => P[Ast.operator]) = P( p ~ (op ~ p).rep ).map{
+ def Chain[$: P](p: => P[Ast.expr], op: => P[Ast.operator]) = P( p ~ (op ~ p).rep ).map{
case (lhs, chunks) =>
chunks.foldLeft(lhs){case (lhs, (op, rhs)) =>
Ast.expr.BinOp(lhs, op, rhs)
}
}
- def expr[_: P]: P[Ast.expr] = P( Chain(xor_expr, BitOr) )
- def xor_expr[_: P]: P[Ast.expr] = P( Chain(and_expr, BitXor) )
- def and_expr[_: P]: P[Ast.expr] = P( Chain(shift_expr, BitAnd) )
- def shift_expr[_: P]: P[Ast.expr] = P( Chain(arith_expr, LShift | RShift) )
+ def expr[$: P]: P[Ast.expr] = P( Chain(xor_expr, BitOr) )
+ def xor_expr[$: P]: P[Ast.expr] = P( Chain(and_expr, BitXor) )
+ def and_expr[$: P]: P[Ast.expr] = P( Chain(shift_expr, BitAnd) )
+ def shift_expr[$: P]: P[Ast.expr] = P( Chain(arith_expr, LShift | RShift) )
- def arith_expr[_: P]: P[Ast.expr] = P( Chain(term, Add | Sub) )
- def term[_: P]: P[Ast.expr] = P( Chain(factor, Mult | FloorDiv | Div | Mod ) )
+ def arith_expr[$: P]: P[Ast.expr] = P( Chain(term, Add | Sub) )
+ def term[$: P]: P[Ast.expr] = P( Chain(factor, Mult | FloorDiv | Div | Mod ) )
- def factor[_: P]: P[Ast.expr] = P( power | Unary(factor) )
- def power[_: P]: P[Ast.expr] = P( atom ~ trailer.rep ~ (Pow ~ factor).? ).map{
+ def factor[$: P]: P[Ast.expr] = P( power | Unary(factor) )
+ def power[$: P]: P[Ast.expr] = P( atom ~ trailer.rep ~ (Pow ~ factor).? ).map{
case (lhs, trailers, rhs) =>
val left = trailers.foldLeft(lhs)((l, t) => t(l))
rhs match{
@@ -102,7 +104,7 @@ object Expressions {
case Some((op, right)) => Ast.expr.BinOp(left, op, right)
}
}
- def atom[_: P]: P[Ast.expr] = {
+ def atom[$: P]: P[Ast.expr] = {
def empty_tuple = ("(" ~ ")").map(_ => Ast.expr.Tuple(Nil, Ast.expr_context.Load))
def empty_list = ("[" ~ "]").map(_ => Ast.expr.List(Nil, Ast.expr_context.Load))
def empty_dict = ("{" ~ "}").map(_ => Ast.expr.Dict(Nil, Nil))
@@ -114,33 +116,33 @@ object Expressions {
"[" ~ (list_comp | list) ~ "]" |
"{" ~ dictorsetmaker ~ "}" |
"`" ~ testlist1.map(x => Ast.expr.Repr(Ast.expr.Tuple(x, Ast.expr_context.Load))) ~ "`" |
- STRING.rep(1).map(_.mkString).map(Ast.expr.Str) |
+ STRING.rep(1).map(_.mkString).map(Ast.expr.Str.apply) |
NAME.map(Ast.expr.Name(_, Ast.expr_context.Load)) |
NUMBER
)
}
- def list_contents[_: P] = P( test.rep(1, ",") ~ ",".? )
- def list[_: P] = P( list_contents ).map(Ast.expr.List(_, Ast.expr_context.Load))
- def tuple_contents[_: P] = P( test ~ "," ~ list_contents.?).map { case (head, rest) => head +: rest.getOrElse(Seq.empty) }
- def tuple[_: P] = P( tuple_contents).map(Ast.expr.Tuple(_, Ast.expr_context.Load))
- def list_comp_contents[_: P] = P( test ~ comp_for.rep(1) )
- def list_comp[_: P] = P( list_comp_contents ).map(Ast.expr.ListComp.tupled)
- def generator[_: P] = P( list_comp_contents ).map(Ast.expr.GeneratorExp.tupled)
+ def list_contents[$: P] = P( test.rep(1, ",") ~ ",".? )
+ def list[$: P] = P( list_contents ).map(Ast.expr.List(_, Ast.expr_context.Load))
+ def tuple_contents[$: P] = P( test ~ "," ~ list_contents.?).map { case (head, rest) => head +: rest.getOrElse(Seq.empty) }
+ def tuple[$: P] = P( tuple_contents).map(Ast.expr.Tuple(_, Ast.expr_context.Load))
+ def list_comp_contents[$: P] = P( test ~ comp_for.rep(1) )
+ def list_comp[$: P] = P( list_comp_contents ).map((Ast.expr.ListComp.apply _).tupled)
+ def generator[$: P] = P( list_comp_contents ).map((Ast.expr.GeneratorExp.apply _).tupled)
- def lambdef[_: P]: P[Ast.expr.Lambda] = P( kw("lambda") ~ varargslist ~ ":" ~ test ).map(Ast.expr.Lambda.tupled)
- def trailer[_: P]: P[Ast.expr => Ast.expr] = {
+ def lambdef[$: P]: P[Ast.expr.Lambda] = P( kw("lambda") ~ varargslist ~ ":" ~ test ).map((Ast.expr.Lambda.apply _).tupled)
+ def trailer[$: P]: P[Ast.expr => Ast.expr] = {
def call = P("(" ~ arglist ~ ")").map{ case (args, (keywords, starargs, kwargs)) => (lhs: Ast.expr) => Ast.expr.Call(lhs, args, keywords, starargs, kwargs)}
def slice = P("[" ~ subscriptlist ~ "]").map(args => (lhs: Ast.expr) => Ast.expr.Subscript(lhs, args, Ast.expr_context.Load))
def attr = P("." ~ NAME).map(id => (lhs: Ast.expr) => Ast.expr.Attribute(lhs, id, Ast.expr_context.Load))
P( call | slice | attr )
}
- def subscriptlist[_: P] = P( subscript.rep(1, ",") ~ ",".? ).map{
+ def subscriptlist[$: P] = P( subscript.rep(1, ",") ~ ",".? ).map{
case Seq(x) => x
case xs => Ast.slice.ExtSlice(xs)
}
- def subscript[_: P]: P[Ast.slice] = {
+ def subscript[$: P]: P[Ast.slice] = {
def ellipses = P( ("." ~ "." ~ ".").map(_ => Ast.slice.Ellipsis) )
- def single = P( test.map(Ast.slice.Index) )
+ def single = P( test.map(Ast.slice.Index.apply) )
def multi = P(test.? ~ ":" ~ test.? ~ sliceop.?).map { case (lower, upper, step) =>
Ast.slice.Slice(
lower,
@@ -151,10 +153,10 @@ object Expressions {
P( ellipses | multi | single )
}
- def sliceop[_: P] = P( ":" ~ test.? )
- def exprlist[_: P]: P[Seq[Ast.expr]] = P( expr.rep(1, sep = ",") ~ ",".? )
- def testlist[_: P]: P[Seq[Ast.expr]] = P( test.rep(1, sep = ",") ~ ",".? )
- def dictorsetmaker[_: P]: P[Ast.expr] = {
+ def sliceop[$: P] = P( ":" ~ test.? )
+ def exprlist[$: P]: P[Seq[Ast.expr]] = P( expr.rep(1, sep = ",") ~ ",".? )
+ def testlist[$: P]: P[Seq[Ast.expr]] = P( test.rep(1, sep = ",") ~ ",".? )
+ def dictorsetmaker[$: P]: P[Ast.expr] = {
def dict_item = P( test ~ ":" ~ test )
def dict: P[Ast.expr.Dict] = P(
(dict_item.rep(1, ",") ~ ",".?).map{x =>
@@ -163,14 +165,14 @@ object Expressions {
}
)
def dict_comp = P(
- (dict_item ~ comp_for.rep(1)).map(Ast.expr.DictComp.tupled)
+ (dict_item ~ comp_for.rep(1)).map((Ast.expr.DictComp.apply _).tupled)
)
- def set: P[Ast.expr.Set] = P( test.rep(1, ",") ~ ",".? ).map(Ast.expr.Set)
- def set_comp = P( test ~ comp_for.rep(1) ).map(Ast.expr.SetComp.tupled)
+ def set: P[Ast.expr.Set] = P( test.rep(1, ",") ~ ",".? ).map(Ast.expr.Set.apply)
+ def set_comp = P( test ~ comp_for.rep(1) ).map((Ast.expr.SetComp.apply _).tupled)
P( dict_comp | dict | set_comp | set)
}
- def arglist[_: P] = {
+ def arglist[$: P] = {
def inits = P( (plain_argument ~ !"=").rep(0, ",") )
def later = P( named_argument.rep(0, ",") ~ ",".? ~ ("*" ~ test).? ~ ",".? ~ ("**" ~ test).? ~ ",".? ~ named_argument.rep(0, ",")).map{
case (named1, dot, star, named2) => (named1 ++ named2, dot, star )
@@ -178,25 +180,25 @@ object Expressions {
P( inits ~ ",".? ~ later )
}
- def plain_argument[_: P] = P( test ~ comp_for.rep ).map{
+ def plain_argument[$: P] = P( test ~ comp_for.rep ).map{
case (x, Nil) => x
case (x, gens) => Ast.expr.GeneratorExp(x, gens)
}
- def named_argument[_: P] = P( NAME ~ "=" ~ test ).map(Ast.keyword.tupled)
+ def named_argument[$: P] = P( NAME ~ "=" ~ test ).map((Ast.keyword.apply _).tupled)
- def comp_for[_: P]: P[Ast.comprehension] = P( kw("for") ~ exprlist ~ kw("in") ~ or_test ~ comp_if.rep ).map{
+ def comp_for[$: P]: P[Ast.comprehension] = P( kw("for") ~ exprlist ~ kw("in") ~ or_test ~ comp_if.rep ).map{
case (targets, test, ifs) => Ast.comprehension(tuplize(targets), test, ifs)
}
- def comp_if[_: P]: P[Ast.expr] = P( kw("if") ~ test )
+ def comp_if[$: P]: P[Ast.expr] = P( kw("if") ~ test )
- def testlist1[_: P]: P[Seq[Ast.expr]] = P( test.rep(1, sep = ",") )
+ def testlist1[$: P]: P[Seq[Ast.expr]] = P( test.rep(1, sep = ",") )
// not used in grammar, but may appear in "node" passed from Parser to Compiler
- // def encoding_decl[_: P]: P0 = P( NAME )
+ // def encoding_decl[$: P]: P0 = P( NAME )
- def yield_expr[_: P]: P[Ast.expr.Yield] = P( kw("yield") ~ testlist.map(tuplize).? ).map(Ast.expr.Yield)
+ def yield_expr[$: P]: P[Ast.expr.Yield] = P( kw("yield") ~ testlist.map(tuplize).? ).map(Ast.expr.Yield.apply)
- def varargslist[_: P]: P[Ast.arguments] = {
+ def varargslist[$: P]: P[Ast.arguments] = {
def named_arg = P( fpdef ~ ("=" ~ test).? )
def x = P( named_arg.rep(sep = ",") ~ ",".? ~ ("*" ~ NAME).? ~ ",".? ~ ("**" ~ NAME).? ).map{
case (normal_args, starargs, kwargs) =>
@@ -206,6 +208,6 @@ object Expressions {
P( x )
}
- def fpdef[_: P]: P[Ast.expr] = P( NAME.map(Ast.expr.Name(_, Ast.expr_context.Param)) | "(" ~ fplist ~ ")" )
- def fplist[_: P]: P[Ast.expr] = P( fpdef.rep(sep = ",") ~ ",".? ).map(Ast.expr.Tuple(_, Ast.expr_context.Param))
+ def fpdef[$: P]: P[Ast.expr] = P( NAME.map(Ast.expr.Name(_, Ast.expr_context.Param)) | "(" ~ fplist ~ ")" )
+ def fplist[$: P]: P[Ast.expr] = P( fpdef.rep(sep = ",") ~ ",".? ).map(Ast.expr.Tuple(_, Ast.expr_context.Param))
}
diff --git a/pythonparse/src/pythonparse/Lexical.scala b/pythonparse/src/pythonparse/Lexical.scala
index 711f64fd..dde955a0 100644
--- a/pythonparse/src/pythonparse/Lexical.scala
+++ b/pythonparse/src/pythonparse/Lexical.scala
@@ -10,17 +10,17 @@ import fastparse.NoWhitespace._
object Lexical {
import fastparse._
- def kw[_: P](s: String) = s ~ !(letter | digit | "_")
- def comment[_: P] = P( "#" ~ CharsWhile(_ != '\n', 0) )
- def wscomment[_: P] = P( (CharsWhileIn(" \n") | Lexical.comment | "\\\n").rep )
- def nonewlinewscomment[_: P] = P( (CharsWhileIn(" ") | Lexical.comment | "\\\n").rep )
+ def kw[$: P](s: String) = s ~ !(letter | digit | "_")
+ def comment[$: P] = P( "#" ~ CharsWhile(_ != '\n', 0) )
+ def wscomment[$: P] = P( (CharsWhileIn(" \n") | Lexical.comment | "\\\n").rep )
+ def nonewlinewscomment[$: P] = P( (CharsWhileIn(" ") | Lexical.comment | "\\\n").rep )
- def identifier[_: P]: P[Ast.identifier] =
- P( (letter|"_") ~ (letter | digit | "_").rep ).!.filter(!keywordList.contains(_)).map(Ast.identifier)
- def letter[_: P] = P( lowercase | uppercase )
- def lowercase[_: P] = P( CharIn("a-z") )
- def uppercase[_: P] = P( CharIn("A-Z") )
- def digit[_: P] = P( CharIn("0-9") )
+ def identifier[$: P]: P[Ast.identifier] =
+ P( (letter|"_") ~ (letter | digit | "_").rep ).!.filter(!keywordList.contains(_)).map(Ast.identifier.apply)
+ def letter[$: P] = P( lowercase | uppercase )
+ def lowercase[$: P] = P( CharIn("a-z") )
+ def uppercase[$: P] = P( CharIn("A-Z") )
+ def digit[$: P] = P( CharIn("0-9") )
val keywordList = Set(
"and", "del", "from", "not", "while",
@@ -32,46 +32,46 @@ object Lexical {
"def", "for", "lambda", "try"
)
- def stringliteral[_: P]: P[String] = P( stringprefix.? ~ (longstring | shortstring) )
- def stringprefix[_: P]: P[Unit] = P(
+ def stringliteral[$: P]: P[String] = P( stringprefix.? ~ (longstring | shortstring) )
+ def stringprefix[$: P]: P[Unit] = P(
"r" | "u" | "ur" | "R" | "U" | "UR" | "Ur" | "uR" | "b" | "B" | "br" | "Br" | "bR" | "BR"
)
- def shortstring[_: P]: P[String] = P( shortstring0("'") | shortstring0("\"") )
- def shortstring0[_: P](delimiter: String) = P( delimiter ~ shortstringitem(delimiter).rep.! ~ delimiter)
- def shortstringitem[_: P](quote: String): P[Unit] = P( shortstringchar(quote) | escapeseq )
- def shortstringchar[_: P](quote: String): P[Unit] = P( CharsWhile(!s"\\\n${quote(0)}".contains(_)) )
+ def shortstring[$: P]: P[String] = P( shortstring0("'") | shortstring0("\"") )
+ def shortstring0[$: P](delimiter: String) = P( delimiter ~ shortstringitem(delimiter).rep.! ~ delimiter)
+ def shortstringitem[$: P](quote: String): P[Unit] = P( shortstringchar(quote) | escapeseq )
+ def shortstringchar[$: P](quote: String): P[Unit] = P( CharsWhile(!s"\\\n${quote(0)}".contains(_)) )
- def longstring[_: P]: P[String] = P( longstring0("'''") | longstring0("\"\"\"") )
- def longstring0[_: P](delimiter: String) = P( delimiter ~ longstringitem(delimiter).rep.! ~ delimiter)
- def longstringitem[_: P](quote: String): P[Unit] = P( longstringchar(quote) | escapeseq | !quote ~ quote.take(1) )
- def longstringchar[_: P](quote: String): P[Unit] = P( CharsWhile(!s"\\${quote(0)}".contains(_)) )
+ def longstring[$: P]: P[String] = P( longstring0("'''") | longstring0("\"\"\"") )
+ def longstring0[$: P](delimiter: String) = P( delimiter ~ longstringitem(delimiter).rep.! ~ delimiter)
+ def longstringitem[$: P](quote: String): P[Unit] = P( longstringchar(quote) | escapeseq | !quote ~ quote.take(1) )
+ def longstringchar[$: P](quote: String): P[Unit] = P( CharsWhile(!s"\\${quote(0)}".contains(_)) )
- def escapeseq[_: P]: P[Unit] = P( "\\" ~ AnyChar )
+ def escapeseq[$: P]: P[Unit] = P( "\\" ~ AnyChar )
- def negatable[T, _: P](p: => P[T])(implicit ev: Numeric[T]) = (("+" | "-").?.! ~ p).map {
+ def negatable[T, $: P](p: => P[T])(implicit ev: Numeric[T]) = (("+" | "-").?.! ~ p).map {
case ("-", i) => ev.negate(i)
case (_, i) => i
}
- def longinteger[_: P]: P[BigInt] = P( integer ~ ("l" | "L") )
- def integer[_: P]: P[BigInt] = negatable[BigInt, Any](P( octinteger | hexinteger | bininteger | decimalinteger))
- def decimalinteger[_: P]: P[BigInt] = P( nonzerodigit ~ digit.rep | "0" ).!.map(scala.BigInt(_))
- def octinteger[_: P]: P[BigInt] = P( "0" ~ ("o" | "O") ~ octdigit.rep(1).! | "0" ~ octdigit.rep(1).! ).map(scala.BigInt(_, 8))
- def hexinteger[_: P]: P[BigInt] = P( "0" ~ ("x" | "X") ~ hexdigit.rep(1).! ).map(scala.BigInt(_, 16))
- def bininteger[_: P]: P[BigInt] = P( "0" ~ ("b" | "B") ~ bindigit.rep(1).! ).map(scala.BigInt(_, 2))
- def nonzerodigit[_: P]: P[Unit] = P( CharIn("1-9") )
- def octdigit[_: P]: P[Unit] = P( CharIn("0-7") )
- def bindigit[_: P]: P[Unit] = P( "0" | "1" )
- def hexdigit[_: P]: P[Unit] = P( digit | CharIn("a-f", "A-F") )
+ def longinteger[$: P]: P[BigInt] = P( integer ~ ("l" | "L") )
+ def integer[$: P]: P[BigInt] = negatable[BigInt, Any](P( octinteger | hexinteger | bininteger | decimalinteger))
+ def decimalinteger[$: P]: P[BigInt] = P( nonzerodigit ~ digit.rep | "0" ).!.map(scala.BigInt(_))
+ def octinteger[$: P]: P[BigInt] = P( "0" ~ ("o" | "O") ~ octdigit.rep(1).! | "0" ~ octdigit.rep(1).! ).map(scala.BigInt(_, 8))
+ def hexinteger[$: P]: P[BigInt] = P( "0" ~ ("x" | "X") ~ hexdigit.rep(1).! ).map(scala.BigInt(_, 16))
+ def bininteger[$: P]: P[BigInt] = P( "0" ~ ("b" | "B") ~ bindigit.rep(1).! ).map(scala.BigInt(_, 2))
+ def nonzerodigit[$: P]: P[Unit] = P( CharIn("1-9") )
+ def octdigit[$: P]: P[Unit] = P( CharIn("0-7") )
+ def bindigit[$: P]: P[Unit] = P( "0" | "1" )
+ def hexdigit[$: P]: P[Unit] = P( digit | CharIn("a-f", "A-F") )
- def floatnumber[_: P]: P[BigDecimal] = negatable[BigDecimal, Any](P( pointfloat | exponentfloat ))
- def pointfloat[_: P]: P[BigDecimal] = P( intpart.? ~ fraction | intpart ~ "." ).!.map(BigDecimal(_))
- def exponentfloat[_: P]: P[BigDecimal] = P( (intpart | pointfloat) ~ exponent ).!.map(BigDecimal(_))
- def intpart[_: P]: P[BigDecimal] = P( digit.rep(1) ).!.map(BigDecimal(_))
- def fraction[_: P]: P[Unit] = P( "." ~ digit.rep(1) )
- def exponent[_: P]: P[Unit] = P( ("e" | "E") ~ ("+" | "-").? ~ digit.rep(1) )
+ def floatnumber[$: P]: P[BigDecimal] = negatable[BigDecimal, Any](P( pointfloat | exponentfloat ))
+ def pointfloat[$: P]: P[BigDecimal] = P( intpart.? ~ fraction | intpart ~ "." ).!.map(BigDecimal(_))
+ def exponentfloat[$: P]: P[BigDecimal] = P( (intpart | pointfloat) ~ exponent ).!.map(BigDecimal(_))
+ def intpart[$: P]: P[BigDecimal] = P( digit.rep(1) ).!.map(BigDecimal(_))
+ def fraction[$: P]: P[Unit] = P( "." ~ digit.rep(1) )
+ def exponent[$: P]: P[Unit] = P( ("e" | "E") ~ ("+" | "-").? ~ digit.rep(1) )
- def imagnumber[_: P] = P( (floatnumber | intpart) ~ ("j" | "J") )
+ def imagnumber[$: P] = P( (floatnumber | intpart) ~ ("j" | "J") )
}
diff --git a/pythonparse/src/pythonparse/Statements.scala b/pythonparse/src/pythonparse/Statements.scala
index 218d8686..0946b0fa 100644
--- a/pythonparse/src/pythonparse/Statements.scala
+++ b/pythonparse/src/pythonparse/Statements.scala
@@ -11,23 +11,25 @@ object Statements extends Statements(0)
* Manually transcribed from https://docs.python.org/2/reference/grammar.html
*/
class Statements(indent: Int){
- implicit def whitespace(cfg: P[_]): P[Unit] = Lexical.wscomment(cfg)
- def space[_: P] = P( CharIn(" \n") )
- def NEWLINE[_: P]: P0 = P( "\n" | End )
- def ENDMARKER[_: P]: P0 = P( End )
+ implicit object whitespace extends fastparse.Whitespace {
+ def apply(ctx: P[_]): P[Unit] = Lexical.wscomment(ctx)
+ }
+ def space[$: P] = P( CharIn(" \n") )
+ def NEWLINE[$: P]: P0 = P( "\n" | End )
+ def ENDMARKER[$: P]: P0 = P( End )
- def single_input[_: P]: P[Seq[Ast.stmt]] = P(
+ def single_input[$: P]: P[Seq[Ast.stmt]] = P(
NEWLINE.map(_ => Nil) |
simple_stmt |
compound_stmt.map(Seq(_)) ~ NEWLINE
)
- def indents[_: P] = P( "\n" ~~ " ".repX(indent) )
+ def indents[$: P] = P( "\n" ~~ " ".repX(indent) )
- def spaces[_: P] = P( (Lexical.nonewlinewscomment.? ~~ "\n").repX(1) )
- def file_input[_: P]: P[Seq[Ast.stmt]] =
+ def spaces[$: P] = P( (Lexical.nonewlinewscomment.? ~~ "\n").repX(1) )
+ def file_input[$: P]: P[Seq[Ast.stmt]] =
P( spaces.? ~ stmt.repX(0, spaces) ~ spaces.? ).map(_.flatten)
- def eval_input[_: P]: P[Ast.expr] = P( testlist ~ NEWLINE.rep ~ ENDMARKER ).map(tuplize)
+ def eval_input[$: P]: P[Ast.expr] = P( testlist ~ NEWLINE.rep ~ ENDMARKER ).map(tuplize)
def collapse_dotted_name(name: Seq[Ast.identifier]): Ast.expr = {
name.tail.foldLeft[Ast.expr](Ast.expr.Name(name.head, Ast.expr_context.Load))(
@@ -35,34 +37,34 @@ class Statements(indent: Int){
)
}
- def decorator[_: P]: P[Ast.expr] = P( "@" ~/ dotted_name ~ ("(" ~ arglist ~ ")" ).? ~~ Lexical.nonewlinewscomment.? ~~ NEWLINE).map{
+ def decorator[$: P]: P[Ast.expr] = P( "@" ~/ dotted_name ~ ("(" ~ arglist ~ ")" ).? ~~ Lexical.nonewlinewscomment.? ~~ NEWLINE).map{
case (name, None) => collapse_dotted_name(name)
case (name, Some((args, (keywords, starargs, kwargs)))) =>
val x = collapse_dotted_name(name)
Ast.expr.Call(x, args, keywords, starargs, kwargs)
}
- def decorators[_: P] = P( decorator.rep )
- def decorated[_: P]: P[Ast.stmt] = P( decorators ~ (classdef | funcdef) ).map{case (a, b) => b(a)}
- def classdef[_: P]: P[Seq[Ast.expr] => Ast.stmt.ClassDef] =
+ def decorators[$: P] = P( decorator.rep )
+ def decorated[$: P]: P[Ast.stmt] = P( decorators ~ (classdef | funcdef) ).map{case (a, b) => b(a)}
+ def classdef[$: P]: P[Seq[Ast.expr] => Ast.stmt.ClassDef] =
P( kw("class") ~/ NAME ~ ("(" ~ testlist.? ~ ")").?.map(_.toSeq.flatten.flatten) ~ ":" ~~ suite ).map{
case (a, b, c) => Ast.stmt.ClassDef(a, b, c, _)
}
- def funcdef[_: P]: P[Seq[Ast.expr] => Ast.stmt.FunctionDef] = P( kw("def") ~/ NAME ~ parameters ~ ":" ~~ suite ).map{
+ def funcdef[$: P]: P[Seq[Ast.expr] => Ast.stmt.FunctionDef] = P( kw("def") ~/ NAME ~ parameters ~ ":" ~~ suite ).map{
case (name, args, suite) => Ast.stmt.FunctionDef(name, args, suite, _)
}
- def parameters[_: P]: P[Ast.arguments] = P( "(" ~ varargslist ~ ")" )
+ def parameters[$: P]: P[Ast.arguments] = P( "(" ~ varargslist ~ ")" )
- def stmt[_: P]: P[Seq[Ast.stmt]] = P( compound_stmt.map(Seq(_)) | simple_stmt )
+ def stmt[$: P]: P[Seq[Ast.stmt]] = P( compound_stmt.map(Seq(_)) | simple_stmt )
- def simple_stmt[_: P]: P[Seq[Ast.stmt]] = P( small_stmt.rep(1, sep = ";") ~ ";".? )
- def small_stmt[_: P]: P[Ast.stmt] = P(
+ def simple_stmt[$: P]: P[Seq[Ast.stmt]] = P( small_stmt.rep(1, sep = ";") ~ ";".? )
+ def small_stmt[$: P]: P[Ast.stmt] = P(
print_stmt | del_stmt | pass_stmt | flow_stmt |
import_stmt | global_stmt | exec_stmt | assert_stmt | expr_stmt
)
- def expr_stmt[_: P]: P[Ast.stmt] = {
+ def expr_stmt[$: P]: P[Ast.stmt] = {
def aug = P( testlist ~ augassign ~ (yield_expr | testlist.map(tuplize)) )
def assign = P( testlist ~ ("=" ~ (yield_expr | testlist.map(tuplize))).rep )
@@ -75,7 +77,7 @@ class Statements(indent: Int){
)
}
- def augassign[_: P]: P[Ast.operator] = P(
+ def augassign[$: P]: P[Ast.operator] = P(
"+=".!.map(_ => Ast.operator.Add) |
"-=".!.map(_ => Ast.operator.Sub) |
"*=".!.map(_ => Ast.operator.Mult) |
@@ -90,64 +92,64 @@ class Statements(indent: Int){
"//=".!.map(_ => Ast.operator.FloorDiv)
)
- def print_stmt[_: P]: P[Ast.stmt.Print] = {
+ def print_stmt[$: P]: P[Ast.stmt.Print] = {
def noDest = P( test.rep(sep = ",") ~ ",".?).map(Ast.stmt.Print(None, _, true))
def dest = P( ">>" ~ test ~ ("," ~ test).rep ~ ",".?).map{case (dest, exprs) => Ast.stmt.Print(Some(dest), exprs, true)}
P( "print" ~~ " ".rep ~~ (noDest | dest) )
}
- def del_stmt[_: P] = P( kw("del") ~~ " ".rep ~~ exprlist ).map(Ast.stmt.Delete)
- def pass_stmt[_: P] = P( kw("pass") ).map(_ => Ast.stmt.Pass)
- def flow_stmt[_: P]: P[Ast.stmt] = P( break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt )
- def break_stmt[_: P] = P( kw("break") ).map(_ => Ast.stmt.Break)
- def continue_stmt[_: P] = P( kw("continue") ).map(_ => Ast.stmt.Continue)
- def return_stmt[_: P] = P( kw("return") ~~ " ".rep ~~ testlist.map(tuplize).? ).map(Ast.stmt.Return)
-
- def yield_stmt[_: P] = P( yield_expr ).map(Ast.stmt.Expr)
- def raise_stmt[_: P]: P[Ast.stmt.Raise] = P( kw("raise") ~~ " ".rep ~~test.? ~ ("," ~ test).? ~ ("," ~ test).? ).map(Ast.stmt.Raise.tupled)
- def import_stmt[_: P]: P[Ast.stmt] = P( import_name | import_from )
- def import_name[_: P]: P[Ast.stmt.Import] = P( kw("import") ~ dotted_as_names ).map(Ast.stmt.Import)
- def import_from[_: P]: P[Ast.stmt.ImportFrom] = {
+ def del_stmt[$: P] = P( kw("del") ~~ " ".rep ~~ exprlist ).map(Ast.stmt.Delete.apply)
+ def pass_stmt[$: P] = P( kw("pass") ).map(_ => Ast.stmt.Pass)
+ def flow_stmt[$: P]: P[Ast.stmt] = P( break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt )
+ def break_stmt[$: P] = P( kw("break") ).map(_ => Ast.stmt.Break)
+ def continue_stmt[$: P] = P( kw("continue") ).map(_ => Ast.stmt.Continue)
+ def return_stmt[$: P] = P( kw("return") ~~ " ".rep ~~ testlist.map(tuplize).? ).map(Ast.stmt.Return.apply)
+
+ def yield_stmt[$: P] = P( yield_expr ).map(Ast.stmt.Expr.apply)
+ def raise_stmt[$: P]: P[Ast.stmt.Raise] = P( kw("raise") ~~ " ".rep ~~test.? ~ ("," ~ test).? ~ ("," ~ test).? ).map((Ast.stmt.Raise.apply _).tupled)
+ def import_stmt[$: P]: P[Ast.stmt] = P( import_name | import_from )
+ def import_name[$: P]: P[Ast.stmt.Import] = P( kw("import") ~ dotted_as_names ).map(Ast.stmt.Import.apply)
+ def import_from[$: P]: P[Ast.stmt.ImportFrom] = {
def named = P( ".".rep(1).!.? ~ dotted_name.!.map(Some(_)) )
def unNamed = P( ".".rep(1).!.map(x => (Some(x), None)) )
def star = P( "*".!.map(_ => Seq(Ast.alias(Ast.identifier("*"), None))) )
P( kw("from") ~ (named | unNamed) ~ kw("import") ~ (star | "(" ~ import_as_names ~ ")" | import_as_names) ).map{
- case (dots, module, names) => Ast.stmt.ImportFrom(module.map(Ast.identifier), names, dots.map(_.length))
+ case (dots, module, names) => Ast.stmt.ImportFrom(module.map(Ast.identifier.apply), names, dots.map(_.length))
}
}
- def import_as_name[_: P]: P[Ast.alias] = P( NAME ~ (kw("as") ~ NAME).? ).map(Ast.alias.tupled)
- def dotted_as_name[_: P]: P[Ast.alias] = P( dotted_name.map(x => Ast.identifier(x.map(_.name).mkString("."))) ~ (kw("as") ~ NAME).? ).map(Ast.alias.tupled)
- def import_as_names[_: P] = P( import_as_name.rep(1, ",") ~ (",").? )
- def dotted_as_names[_: P] = P( dotted_as_name.rep(1, ",") )
- def dotted_name[_: P] = P( NAME.rep(1, ".") )
- def global_stmt[_: P]: P[Ast.stmt.Global] = P( kw("global") ~ NAME.rep(sep = ",") ).map(Ast.stmt.Global)
- def exec_stmt[_: P]: P[Ast.stmt.Exec] = P( kw("exec") ~ expr ~ (kw("in") ~ test ~ ("," ~ test).?).? ).map {
+ def import_as_name[$: P]: P[Ast.alias] = P( NAME ~ (kw("as") ~ NAME).? ).map((Ast.alias.apply _).tupled)
+ def dotted_as_name[$: P]: P[Ast.alias] = P( dotted_name.map(x => Ast.identifier(x.map(_.name).mkString("."))) ~ (kw("as") ~ NAME).? ).map((Ast.alias.apply _).tupled)
+ def import_as_names[$: P] = P( import_as_name.rep(1, ",") ~ (",").? )
+ def dotted_as_names[$: P] = P( dotted_as_name.rep(1, ",") )
+ def dotted_name[$: P] = P( NAME.rep(1, ".") )
+ def global_stmt[$: P]: P[Ast.stmt.Global] = P( kw("global") ~ NAME.rep(sep = ",") ).map(Ast.stmt.Global.apply)
+ def exec_stmt[$: P]: P[Ast.stmt.Exec] = P( kw("exec") ~ expr ~ (kw("in") ~ test ~ ("," ~ test).?).? ).map {
case (expr, None) => Ast.stmt.Exec(expr, None, None)
case (expr, Some((globals, None))) => Ast.stmt.Exec(expr, Some(globals), None)
case (expr, Some((globals, Some(locals)))) => Ast.stmt.Exec(expr, Some(globals), Some(locals))
}
- def assert_stmt[_: P]: P[Ast.stmt.Assert] = P( kw("assert") ~ test ~ ("," ~ test).? ).map(Ast.stmt.Assert.tupled)
+ def assert_stmt[$: P]: P[Ast.stmt.Assert] = P( kw("assert") ~ test ~ ("," ~ test).? ).map((Ast.stmt.Assert.apply _).tupled)
- def compound_stmt[_: P]: P[Ast.stmt] = P( if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | decorated )
- def if_stmt[_: P]: P[Ast.stmt.If] = {
+ def compound_stmt[$: P]: P[Ast.stmt] = P( if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | decorated )
+ def if_stmt[$: P]: P[Ast.stmt.If] = {
def firstIf = P( kw("if") ~/ test ~ ":" ~~ suite )
def elifs = P( (space_indents ~~ kw("elif") ~/ test ~ ":" ~~ suite).repX )
def lastElse = P( (space_indents ~~ kw("else") ~/ ":" ~~ suite).? )
P( firstIf ~~ elifs ~~ lastElse ).map{
case (test, body, elifs, orelse) =>
- val (init :+ last) = (test, body) +: elifs
+ val (init :+ last) = (test, body) +: elifs : @unchecked
val (last_test, last_body) = last
init.foldRight(Ast.stmt.If(last_test, last_body, orelse.toSeq.flatten)){
case ((test, body), rhs) => Ast.stmt.If(test, body, Seq(rhs))
}
}
}
- def space_indents[_: P] = P( spaces.repX ~~ " ".repX(indent) )
- def while_stmt[_: P] = P( kw("while") ~/ test ~ ":" ~~ suite ~~ (space_indents ~~ kw("else") ~/ ":" ~~ suite).?.map(_.toSeq.flatten) ).map(Ast.stmt.While.tupled)
- def for_stmt[_: P]: P[Ast.stmt.For] = P( kw("for") ~/ exprlist ~ kw("in") ~ testlist ~ ":" ~~ suite ~~ (space_indents ~ kw("else") ~/ ":" ~~ suite).? ).map {
+ def space_indents[$: P] = P( spaces.repX ~~ " ".repX(indent) )
+ def while_stmt[$: P] = P( kw("while") ~/ test ~ ":" ~~ suite ~~ (space_indents ~~ kw("else") ~/ ":" ~~ suite).?.map(_.toSeq.flatten) ).map((Ast.stmt.While.apply _).tupled)
+ def for_stmt[$: P]: P[Ast.stmt.For] = P( kw("for") ~/ exprlist ~ kw("in") ~ testlist ~ ":" ~~ suite ~~ (space_indents ~ kw("else") ~/ ":" ~~ suite).? ).map {
case (itervars, generator, body, orelse) =>
Ast.stmt.For(tuplize(itervars), tuplize(generator), body, orelse.toSeq.flatten)
}
- def try_stmt[_: P]: P[Ast.stmt]= {
+ def try_stmt[$: P]: P[Ast.stmt]= {
def `try` = P( kw("try") ~/ ":" ~~ suite )
def excepts: P[Seq[Ast.excepthandler]] = P( (except_clause ~ ":" ~~ suite).map{
case (None, body) => Ast.excepthandler.ExceptHandler(None, None, body)
@@ -168,7 +170,7 @@ class Statements(indent: Int){
)
}
}
- def with_stmt[_: P]: P[Ast.stmt.With] = P( kw("with") ~/ with_item.rep(1, ",")~ ":" ~~ suite ).map{
+ def with_stmt[$: P]: P[Ast.stmt.With] = P( kw("with") ~/ with_item.rep(1, ",")~ ":" ~~ suite ).map{
case (items, body) =>
val (last_expr, last_vars) = items.last
val inner = Ast.stmt.With(last_expr, last_vars, body)
@@ -176,12 +178,12 @@ class Statements(indent: Int){
case ((expr, vars), body) => Ast.stmt.With(expr, vars, Seq(body))
}
}
- def with_item[_: P]: P[(Ast.expr, Option[Ast.expr])] = P( test ~ (kw("as") ~ expr).? )
+ def with_item[$: P]: P[(Ast.expr, Option[Ast.expr])] = P( test ~ (kw("as") ~ expr).? )
// NB compile.c makes sure that the default except clause is last
- def except_clause[_: P] = P( space_indents ~ kw("except") ~/ (test ~ ((kw("as") | ",") ~ test).?).? )
+ def except_clause[$: P] = P( space_indents ~ kw("except") ~/ (test ~ ((kw("as") | ",") ~ test).?).? )
- def suite[_: P]: P[Seq[Ast.stmt]] = {
+ def suite[$: P]: P[Seq[Ast.stmt]] = {
def deeper: P[Int] = {
def commentLine = P("\n" ~~ Lexical.nonewlinewscomment.?.map(_ => 0)).map((_, Some("")))
def endLine = P("\n" ~~ (" "|"\t").repX(indent + 1).!.map(_.length) ~~ Lexical.comment.!.? )
diff --git a/pythonparse/test/src-jvm/pythonparse/ProjectTests.scala b/pythonparse/test/src-jvm/pythonparse/ProjectTests.scala
index 054eacb2..045e979b 100644
--- a/pythonparse/test/src-jvm/pythonparse/ProjectTests.scala
+++ b/pythonparse/test/src-jvm/pythonparse/ProjectTests.scala
@@ -65,15 +65,17 @@ object ProjectTests extends TestSuite{
}
val tests = Tests {
- "dropbox/changes" - check("37e23c3141b75e4785cf398d015e3dbca41bdd56")
- "django/django" - check(
- "399a8db33b14a1f707912ac48a185fb0a1204913",
- ignored = Seq("tests/i18n/test_compilation.py")
- )
- "mitsuhiko/flask" - check("9291ead32e2fc8b13cef825186c968944e9ff344")
- "zulip/zulip" - check("b5c107ed27b337ed833ebe9c754889bf078d743e")
- "ansible/ansible"- check("02cd88169764232fd63c776456178fe61d3a214a")
- "kennethreitz/requests" - check("9713289e741960249c94fcb1686746f80e2f20b5")
+ // Disabled since Github Actions dropped support for Python 2.7
+
+// "dropbox/changes" - check("37e23c3141b75e4785cf398d015e3dbca41bdd56")
+// "django/django" - check(
+// "399a8db33b14a1f707912ac48a185fb0a1204913",
+// ignored = Seq("tests/i18n/test_compilation.py")
+// )
+// "mitsuhiko/flask" - check("9291ead32e2fc8b13cef825186c968944e9ff344")
+// "zulip/zulip" - check("b5c107ed27b337ed833ebe9c754889bf078d743e")
+// "ansible/ansible"- check("02cd88169764232fd63c776456178fe61d3a214a")
+// "kennethreitz/requests" - check("9713289e741960249c94fcb1686746f80e2f20b5")
// test("test"){
// val txt = new String(Files.readAllBytes(Paths.get("out/repos/ansible/lib/ansible/modules/cloud/cloudstack/cs_instance.py")))
diff --git a/pythonparse/test/src/pythonparse/RegressionTests.scala b/pythonparse/test/src/pythonparse/RegressionTests.scala
index bf8a8f55..59427b2a 100644
--- a/pythonparse/test/src/pythonparse/RegressionTests.scala
+++ b/pythonparse/test/src/pythonparse/RegressionTests.scala
@@ -1,17 +1,15 @@
package pythonparse
import utest._
-import fastparse._
object RegressionTests extends TestSuite{
import Ast.expr._
import Ast.stmt._
import Ast.expr_context._
- import Ast.cmpop._
import Ast.operator._
import Ast.unaryop._
import Ast._
- implicit def strName(s: Symbol) = Name(identifier(s.name), Load)
- implicit def strIdent(s: Symbol) = identifier(s.name)
+ implicit def strName(s: String): Name = Name(identifier(s), Load)
+ implicit def strIdent(s: String): identifier = identifier(s)
val tests = Tests {
test("multiple_comments") - TestUtils.check(
Statements.file_input(_),
@@ -23,7 +21,7 @@ object RegressionTests extends TestSuite{
test("multiple_newlines") - TestUtils.check(
Statements.file_input(_),
- Seq(Expr('a), Expr('b)),
+ Seq(Expr("a"), Expr("b")),
"""a
|
|b""".stripMargin
@@ -31,7 +29,7 @@ object RegressionTests extends TestSuite{
test("multi_line_function") - TestUtils.check(
Statements.file_input(_),
- Seq(FunctionDef('session_config, arguments(Nil, None, None, Nil), Seq(Expr('a), Expr('b)), Nil)),
+ Seq(FunctionDef("session_config", arguments(Nil, None, None, Nil), Seq(Expr("a"), Expr("b")), Nil)),
"""def session_config():
| a
|
@@ -40,7 +38,7 @@ object RegressionTests extends TestSuite{
test("backslash_breaks") - TestUtils.check(
Statements.file_input(_),
- Seq(Expr(Attribute('a, 'b, Load))),
+ Seq(Expr(Attribute("a", "b", Load))),
"""a\
|.b
|""".stripMargin
@@ -55,7 +53,7 @@ object RegressionTests extends TestSuite{
test("try_finally_no_except") - TestUtils.check(
Statements.file_input(_),
- Seq(TryFinally(Seq(Expr('a)), Seq(Expr('b)))),
+ Seq(TryFinally(Seq(Expr("a")), Seq(Expr("b")))),
"""try:
| a
|finally:
@@ -65,10 +63,10 @@ object RegressionTests extends TestSuite{
)
test("indented_try_except_with_space") - TestUtils.check(
Statements.file_input(_),
- Seq(FunctionDef('f, arguments(Nil, None, None, Nil), Seq(
+ Seq(FunctionDef("f", arguments(Nil, None, None, Nil), Seq(
TryExcept(
Seq(Pass),
- Seq(excepthandler.ExceptHandler(Some('s), None, Seq(Pass))),
+ Seq(excepthandler.ExceptHandler(Some("s"), None, Seq(Pass))),
Nil
)
), Nil)),
@@ -84,9 +82,9 @@ object RegressionTests extends TestSuite{
test("indented_block_with_spaces_and_offset_comments") - TestUtils.check(
Statements.file_input(_),
Seq(FunctionDef(
- 'post,
- arguments(Seq(Name('self, Param)), None, None, Nil),
- Seq(If(Num(1), Seq(Expr('a)), Nil)),
+ "post",
+ arguments(Seq(Name("self", Param)), None, None, Nil),
+ Seq(If(Num(1), Seq(Expr("a")), Nil)),
Nil
)),
"""def post(self):
@@ -99,13 +97,13 @@ object RegressionTests extends TestSuite{
test("indented_block_with_spaces_and_offset_comments") - TestUtils.check(
Statements.file_input(_),
Seq(While(
- 'a,
+ "a",
Seq(
TryExcept(
- Seq(Expr('a)),
+ Seq(Expr("a")),
Seq(
- excepthandler.ExceptHandler(None, None, Seq(Return(Some('a)))),
- excepthandler.ExceptHandler(None, None, Seq(Expr('a)))
+ excepthandler.ExceptHandler(None, None, Seq(Return(Some("a")))),
+ excepthandler.ExceptHandler(None, None, Seq(Expr("a")))
),
Nil
)
@@ -123,7 +121,7 @@ object RegressionTests extends TestSuite{
Statements.file_input(_),
Seq(While(
Num(1),
- Seq(Expr('a)),
+ Seq(Expr("a")),
Nil
)),
"""while 1:
@@ -134,8 +132,8 @@ object RegressionTests extends TestSuite{
test("ident_looking_string") - TestUtils.check(
Statements.file_input(_),
Seq(If(
- Call('match, Seq(Str("^[a-zA-Z0-9]")), Nil, None, None),
- Seq(Expr('a)),
+ Call("match", Seq(Str("^[a-zA-Z0-9]")), Nil, None, None),
+ Seq(Expr("a")),
Nil
)),
"""
@@ -147,9 +145,9 @@ object RegressionTests extends TestSuite{
test("same_line_comment") - TestUtils.check(
Statements.file_input(_),
Seq(If(
- 'b,
+ "b",
Seq(If(
- 'c,
+ "c",
Seq(Pass),
Nil
)),
@@ -164,13 +162,13 @@ object RegressionTests extends TestSuite{
Statements.file_input(_),
Seq(While(Num(1),
Seq(
- If('a,
- Seq(Expr('a)),
+ If("a",
+ Seq(Expr("a")),
Seq(
- If('b,
- Seq(Expr('b)),
- Seq(If('c,
- Seq(Expr('c)),
+ If("b",
+ Seq(Expr("b")),
+ Seq(If("c",
+ Seq(Expr("c")),
Nil
)))
))
@@ -188,7 +186,7 @@ object RegressionTests extends TestSuite{
)
test("bitand") - TestUtils.check(
Statements.file_input(_),
- Seq(Expr(BinOp('a, BitAnd, 'a))),
+ Seq(Expr(BinOp("a", BitAnd, "a"))),
"""a & a
|""".stripMargin
)
@@ -200,7 +198,7 @@ object RegressionTests extends TestSuite{
)
test("comment_after_decorator") - TestUtils.check(
Statements.file_input(_),
- Seq(ClassDef('GenericForeignKeyTests, Nil, Seq(Pass), Seq('override_settings))),
+ Seq(ClassDef("GenericForeignKeyTests", Nil, Seq(Pass), Seq("override_settings"))),
"""@override_settings # ForeignKey(unique=True)
|class GenericForeignKeyTests:
| pass
diff --git a/pythonparse/test/src/pythonparse/TestUtils.scala b/pythonparse/test/src/pythonparse/TestUtils.scala
index 8583b9bd..713f71df 100644
--- a/pythonparse/test/src/pythonparse/TestUtils.scala
+++ b/pythonparse/test/src/pythonparse/TestUtils.scala
@@ -9,7 +9,7 @@ object TestUtils {
import fastparse._
def check[T](rule: P[_] => P[T], expected: T, s: String) = {
import fastparse.NoWhitespace._
- def parseIt[_: P] = rule(P.current) ~ End
+ def parseIt[$: P] = rule(P.current) ~ End
val parsed = parse(s, parseIt(_))
val stringResult = parsed match {
case f: Parsed.Failure => throw new Exception(f.trace().longTerminalsMsg)
diff --git a/pythonparse/test/src/pythonparse/UnitTests.scala b/pythonparse/test/src/pythonparse/UnitTests.scala
index a1cf473d..3d87a671 100644
--- a/pythonparse/test/src/pythonparse/UnitTests.scala
+++ b/pythonparse/test/src/pythonparse/UnitTests.scala
@@ -1,7 +1,7 @@
package pythonparse
import utest._
-import fastparse._
+
/**
* Tests to cover most basic syntactic constructs. It's likely there are
* interactions between the constructs that will cause problems, but these
@@ -16,8 +16,8 @@ object UnitTests extends TestSuite{
import Ast.operator._
import Ast.unaryop._
import Ast._
- implicit def strName(s: Symbol) = Name(identifier(s.name), Load)
- implicit def strIdent(s: Symbol) = identifier(s.name)
+ implicit def strName(s: String): Name = Name(identifier(s), Load)
+ implicit def strIdent(s: String): identifier = identifier(s)
test("exprs"){
def expr(expected: Ast.expr, s: String*) = s.map(TestUtils.check(Expressions.test(_), expected, _)).head
@@ -39,9 +39,9 @@ object UnitTests extends TestSuite{
test("math") - expr(BinOp(Num(1.0), Add, Num(2.0)), "1+2", "1 + 2")
test("ident_math") - expr(
BinOp(
- 'a,
+ "a",
operator.Add,
- 'b
+ "b"
), "a + b")
test("precedence") - expr(
BinOp(
@@ -54,7 +54,7 @@ object UnitTests extends TestSuite{
test("unary") - expr(
UnaryOp(
Not,
- 'a
+ "a"
),
"not a"
)
@@ -63,7 +63,7 @@ object UnitTests extends TestSuite{
Not,
UnaryOp(
Not,
- 'a
+ "a"
)
),
"not not a"
@@ -71,14 +71,14 @@ object UnitTests extends TestSuite{
test("unary_invert") - expr(
UnaryOp(
Invert,
- 'a
+ "a"
),
"~a"
)
test("unary_negation") - expr(
UnaryOp(
USub,
- 'b
+ "b"
),
"-b"
)
@@ -92,48 +92,48 @@ object UnitTests extends TestSuite{
test("unary_add") - expr(
UnaryOp(
UAdd,
- 'c
+ "c"
),
"+c"
)
test("unary_precedence") - expr(
BinOp(
- BinOp(UnaryOp(USub, 'a), Add, 'b),
+ BinOp(UnaryOp(USub, "a"), Add, "b"),
Sub,
- 'c
+ "c"
),
"-a + b - c"
)
test("comparison") - expr(
Compare(
- 'a,
+ "a",
Seq(Lt, LtE, Gt, GtE, Eq, NotEq, In, NotIn),
- Seq('b, 'c, 'd, 'e, 'f, 'g, 'h, 'i)
+ Seq("b", "c", "d", "e", "f", "g", "h", "i")
),
"a < b <= c > d >= e == f != g in h not in i"
)
test("parenthetical_grouping") - expr(
- BinOp(BinOp('a, Add, 'b), Mult, BinOp('c, Sub, 'd)),
+ BinOp(BinOp("a", Add, "b"), Mult, BinOp("c", Sub, "d")),
"(a + b) * (c - d)"
)
}
test("chained"){
test("attributes") - expr(
- Attribute(Attribute('a, 'b, Load), 'c, Load),
+ Attribute(Attribute("a", "b", Load), "c", Load),
"a.b.c"
)
test("function_call") - expr(
Call(
Call(
- Call('a, Nil, Nil, None, None),
- Seq('x), Seq(keyword('y, 'z)), Some('wtf), Some('omg)
+ Call("a", Nil, Nil, None, None),
+ Seq("x"), Seq(keyword("y", "z")), Some("wtf"), Some("omg")
),
- Nil, Nil, None, Some('lol)
+ Nil, Nil, None, Some("lol")
),
"a()(x,y=z, *wtf, **omg)(**lol)"
)
test("slicing") - expr(
- Subscript('abc, slice.ExtSlice(Seq(slice.Index('d), slice.Slice(Some('e), Some('f), Some('None)))), Load),
+ Subscript("abc", slice.ExtSlice(Seq(slice.Index("d"), slice.Slice(Some("e"), Some("f"), Some("None")))), Load),
"abc[d, e:f:]"
)
}
@@ -165,36 +165,36 @@ object UnitTests extends TestSuite{
test("dict") - expr(
Dict(
Seq(Num(1.0), Num(2.0), Str("a")),
- Seq(Str("1"), Str("2"), 'a)
+ Seq(Str("1"), Str("2"), "a")
),
"{1 :'1', 2: '2', 'a': a}"
)
test("list_comp") - expr(
- ListComp('x, Seq(comprehension('y, 'z, Seq('w)))),
+ ListComp("x", Seq(comprehension("y", "z", Seq("w")))),
"[x for y in z if w]"
)
test("list_comp2") - expr(
- ListComp(Tuple(Seq('x, 'y), Load), Seq(
+ ListComp(Tuple(Seq("x", "y"), Load), Seq(
comprehension(
- Tuple(Seq('z, 'a), Load),
- Tuple(Seq('b, 'c), Load),
- Seq('d, 'e)
+ Tuple(Seq("z", "a"), Load),
+ Tuple(Seq("b", "c"), Load),
+ Seq("d", "e")
),
- comprehension('j, 'k, Nil)
+ comprehension("j", "k", Nil)
)),
"[(x, y) for (z, a) in (b, c) if d if e for j in k]"
)
test("set_comp") - expr(
- SetComp('x, Seq(comprehension('y, 'z, Seq('w)))),
+ SetComp("x", Seq(comprehension("y", "z", Seq("w")))),
"{x for y in z if w}"
)
test("dict_comp") - expr(
- DictComp('x, Num(1.0), Seq(comprehension('y, 'z, Seq('w)))),
+ DictComp("x", Num(1.0), Seq(comprehension("y", "z", Seq("w")))),
"{x: 1 for y in z if w}"
)
test("generator") - expr(
- GeneratorExp('x, Seq(comprehension('y, 'z, Seq('w)))),
+ GeneratorExp("x", Seq(comprehension("y", "z", Seq("w")))),
"(x for y in z if w)"
)
}
@@ -221,8 +221,8 @@ object UnitTests extends TestSuite{
test("pyramid") - stmt(
Seq(
Pass, Return(None), Return(Some(Num(1))),
- Delete(Seq('x)), Raise(Some('Foo), None, None),
- Assert('False, None)
+ Delete(Seq("x")), Raise(Some("Foo"), None, None),
+ Assert("False", None)
),
"""pass; return; return 1;
|del x; raise Foo
@@ -235,53 +235,53 @@ object UnitTests extends TestSuite{
"import a.b.c"
)
test("import2") - stmt(
- Seq(Import(Seq(alias(identifier("a.b.c"), Some('d)), alias(identifier("e"), Some('f))))),
+ Seq(Import(Seq(alias(identifier("a.b.c"), Some("d")), alias(identifier("e"), Some("f"))))),
"import a.b.c as d, e as f"
)
test("import3") - stmt(
- Seq(ImportFrom(Some('x), Seq(alias('y, None)), None)),
+ Seq(ImportFrom(Some("x"), Seq(alias("y", None)), None)),
"from x import y"
)
test("import4") - stmt(
- Seq(ImportFrom(Some(identifier("x.y")), Seq(alias('y, Some('z))), None)),
+ Seq(ImportFrom(Some(identifier("x.y")), Seq(alias("y", Some("z"))), None)),
"from x.y import y as z"
)
test("import5") - stmt(
- Seq(ImportFrom(Some(identifier("x.y")), Seq(alias('y, Some('z))), Some(1))),
+ Seq(ImportFrom(Some(identifier("x.y")), Seq(alias("y", Some("z"))), Some(1))),
"from .x.y import y as z"
)
test("import6") - stmt(
- Seq(ImportFrom(None, Seq(alias('y, Some('z))), Some(2))),
+ Seq(ImportFrom(None, Seq(alias("y", Some("z"))), Some(2))),
"from .. import y as z"
)
test("assign") - stmt(
- Seq(Assign(Seq(Name('x, Load)), Num(1))),
+ Seq(Assign(Seq(Name("x", Load)), Num(1))),
"x = 1"
)
test("assign2") - stmt(
- Seq(Assign(Seq('x, Tuple(Seq('y, 'z), Load)), Num(1))),
+ Seq(Assign(Seq("x", Tuple(Seq("y", "z"), Load)), Num(1))),
"x = y, z = 1"
)
test("augassign") - stmt(
- Seq(AugAssign('x, Add, Num(2))),
+ Seq(AugAssign("x", Add, Num(2))),
"x += 2"
)
}
// Statements which can have other statements within them
test("compound"){
test("while") - stmt(
- Seq(While('True, Seq(Pass), Nil)),
+ Seq(While("True", Seq(Pass), Nil)),
"""while True: pass"""
)
test("while2") - stmt(
- Seq(While('True, Seq(Pass, Pass), Nil)),
+ Seq(While("True", Seq(Pass, Pass), Nil)),
"""while True:
| pass
| pass
|""".stripMargin
)
test("while3") - stmt(
- Seq(While('True, Seq(Expr(Call('func, Seq(Num(1)), Nil, None, None)), Pass), Nil), Pass),
+ Seq(While("True", Seq(Expr(Call("func", Seq(Num(1)), Nil, None, None)), Pass), Nil), Pass),
"""while True:
| func(
|1
@@ -291,23 +291,23 @@ object UnitTests extends TestSuite{
|""".stripMargin
)
test("for") - stmt(
- Seq(For(Tuple(Seq('x, 'y), Load), Call('range, Seq(Num(10)), Nil, None, None), Seq(Print(None, Seq('x), true)), Nil)),
+ Seq(For(Tuple(Seq("x", "y"), Load), Call("range", Seq(Num(10)), Nil, None, None), Seq(Print(None, Seq("x"), true)), Nil)),
"""for x, y in range(10):
| print x""".stripMargin
)
test("if") - stmt(
Seq(If(
- 'a,
+ "a",
Seq(If(
- 'b,
+ "b",
Seq(Pass),
Seq(Print(None, Seq(Num(1)), true))
)),
Seq(If(
- 'c,
+ "c",
Seq(Pass),
Seq(If(
- 'd,
+ "d",
Seq(Pass),
Seq(Pass)
))
@@ -326,8 +326,8 @@ object UnitTests extends TestSuite{
)
test("forelse") - stmt(
- Seq(For(Name('w, Load), Tuple(Seq('x, 'y, 'z), Load),
- Seq(For(Tuple(Seq('a, 'b), Load), 'c, Seq(Pass), Nil)),
+ Seq(For(Name("w", Load), Tuple(Seq("x", "y", "z"), Load),
+ Seq(For(Tuple(Seq("a", "b"), Load), "c", Seq(Pass), Nil)),
Seq(Pass)
)),
"""for w in x, y, z:
@@ -338,11 +338,11 @@ object UnitTests extends TestSuite{
""".stripMargin
)
test("class1") - stmt(
- Seq(ClassDef('Foo, Nil, Seq(Pass), Nil)),
+ Seq(ClassDef("Foo", Nil, Seq(Pass), Nil)),
"""class Foo: pass""".stripMargin
)
test("class2") - stmt(
- Seq(ClassDef('Foo, Seq(BinOp('A, BitOr, 'B)), Seq(Pass), Seq('foo, Call(Attribute('bar, 'baz, Load), Seq(Num(1)), Nil, None, None)))),
+ Seq(ClassDef("Foo", Seq(BinOp("A", BitOr, "B")), Seq(Pass), Seq("foo", Call(Attribute("bar", "baz", Load), Seq(Num(1)), Nil, None, None)))),
"""@foo
|@bar.baz(1)
|class Foo(A | B):
@@ -350,17 +350,17 @@ object UnitTests extends TestSuite{
""".stripMargin
)
test("function") - stmt(
- Seq(FunctionDef('foo, arguments(Seq(Name('x, Param)), None, None, Nil), Seq(Return(Some('x))), Nil)),
+ Seq(FunctionDef("foo", arguments(Seq(Name("x", Param)), None, None, Nil), Seq(Return(Some("x"))), Nil)),
"""def foo(x):
| return x
""".stripMargin
)
test("function2") - stmt(
Seq(FunctionDef(
- 'foo,
- arguments(Seq(Name('x, Param), Name('y, Param)), None, Some('z), Seq(Num(1))),
- Seq(Return(Some('x))),
- Seq('dec)
+ "foo",
+ arguments(Seq(Name("x", Param), Name("y", Param)), None, Some("z"), Seq(Num(1))),
+ Seq(Return(Some("x"))),
+ Seq("dec")
)),
"""@dec
|def foo(x, y=1, **z):
@@ -368,14 +368,14 @@ object UnitTests extends TestSuite{
""".stripMargin
)
test("with") - stmt(
- Seq(With('x, Some(Name('y, Load)), Seq(Return(Some('y))))),
+ Seq(With("x", Some(Name("y", Load)), Seq(Return(Some("y"))))),
"with x as y: return y"
)
test("with2") - stmt(
- Seq(With('x, Some(Name('y, Load)), Seq(With('a, Some(Name('b, Load)), Seq(Return(Some(Tuple(Seq('y, 'b), Load)))))))),
+ Seq(With("x", Some(Name("y", Load)), Seq(With("a", Some(Name("b", Load)), Seq(Return(Some(Tuple(Seq("y", "b"), Load)))))))),
"with x as y, a as b: return y, b"
)
}
}
}
-}
\ No newline at end of file
+}
diff --git a/readme/Changelog.scalatex b/readme/Changelog.scalatex
index 788f079d..054f9430 100644
--- a/readme/Changelog.scalatex
+++ b/readme/Changelog.scalatex
@@ -1,7 +1,71 @@
@import Main._
@sect{Change Log}
+ @sect{3.1.1}
+ @ul
+ @li
+ Fix spurious warnings @lnk("#285", "https://github.com/com-lihaoyi/fastparse/issues/285")"
+ @sect{3.1.0}
+ @ul
+ @li
+ Support for Scala-Native 0.5.0
+ @li
+ Dropped support for Scala 3.x versions before 3.3.1
+ @li
+ Dropped support for Scala 2.x versions before 2.12.x
- @sect{2.2.1}
+ @sect{3.0.2}
+ @ul
+ @li
+ Release Scala-Native/Scala-3 version
+ @lnk("#288", "https://github.com/lihaoyi/fastparse/issues/288")
+ @sect{3.0.1}
+ @ul
+ @li
+ Fix handling of terminal parser error reporting in @code{.opaque} calls
+ @lnk("#278", "https://github.com/lihaoyi/fastparse/issues/278")
+ @li
+ Remove spurious warnings on @code{.rep} calls
+ @lnk("#281", "https://github.com/lihaoyi/fastparse/issues/281")
+
+ @li
+ Remove spurious warnings on @code{.rep} calls
+ @lnk("#282", "https://github.com/lihaoyi/fastparse/issues/282")
+
+ @sect{3.0.0}
+ @ul
+ @li
+ Scala 3.x support, starting from 3.2.2 @lnk("#271", "https://github.com/lihaoyi/fastparse/issues/271").
+ All functionality works in Scala 3.0.0, though performance may vary slightly due
+ to differences in the implementation.
+ @li
+ Make whitespace handling a proper @code{fastparse.Whitespace} trait,
+ rather than a function type @lnk("#272", "https://github.com/lihaoyi/fastparse/issues/272"),
+ to avoid problems with parse failures being silently discarded
+ @lnk("#261", "https://github.com/lihaoyi/fastparse/issues/261")
+ @li
+ Dependency bumps: @code{com.lihaoyi::geny:1.0.0}, @code{com.lihaoyi::sourcecode:0.3.0}
+ @li
+ @code{Fail} parser now takes an optional message
+ @lnk("#244", "https://github.com/lihaoyi/fastparse/issues/244")
+ @li
+ Significant overhaul of @code{aggregateMsg}/@code{longAggregateMsg}
+ implementation, with code cleanup and bugfixes
+ @lnk("#276", "https://github.com/lihaoyi/fastparse/issues/276").
+ A lot of internal-facing method call were modified or renamed, but
+ this is not expected to require changes in user code.
+ @li
+ Note that this is a binary incompatible change from Fastparse 2.x.
+ It is largely source compatible, except for users who are defining
+ custom whitespace implementations which would need to replace their
+ @hl.scala{implicit val whitespace} with
+ @hl.scala{implicit object whitespace extends Whitespace}
+
+
+ @sect{2.3.1}
+ @ul
+ @li
+ Support for Scala-Native 0.4.0
+ @sect{2.2.2}
@ul
@li
You can now call @code{fastparse.parse} on any
diff --git a/readme/ErrorReportingInternals.scalatex b/readme/ErrorReportingInternals.scalatex
new file mode 100644
index 00000000..cf8e0320
--- /dev/null
+++ b/readme/ErrorReportingInternals.scalatex
@@ -0,0 +1,121 @@
+@import Main._
+@val tests = wd/'fastparse/'test/'src/'fastparse
+@sect{Error Reporting Internals}
+ @p
+ This section goes into detail of how the FastParse error reporting
+ algorithm works. In general, it should "just work" when you call
+ @code{.longMsg}, @code{.longAggregateMsg}, or @code{.longTerminalsMsg}.
+ Nevertheless, it is both complicated as well important enough that it
+ is worth documenting in detail
+
+ @p
+ The two levels of error reporting that are most interesting are
+ @code{.longAggregateMsg} and @code{.longTerminalsMsg}. Consider a failed
+ parse of an example simplified arithmetic parser:
+
+ @hl.ref(tests/"ExampleTests.scala", Seq("\"errorHandlingExplanation\"", ""))
+
+ @p
+ This fails on the @code{?} being invalid syntax. The following error reporting
+ levels will treat this as follows:
+ @ul
+ @li
+ @code{terminalMsgs} lists all the lowest-level terminal parsers which are
+ tried at the given @code{traceIndex}, i.e. the character class @code{[0-9]} and the
+ token @hl.scala{"("}. This is useful to answer the question "what token/char can I
+ put at the error position to make my parse continue". The implementation
+ of @code{terminalMsgs} is straightforward: we simply call
+ @code{reportTerminalMsg} in every terminal parser, which collects all the
+ messages in a big list and returns it.
+ @li
+ @code{aggregateMsgs} lists all high-level parsers which are tried at the given
+ @code{traceIndex}, i.e. the named parsers @code{num} and @code{plus}. This is useful to
+ answer the question "What construct was the parser trying to do when it
+ failed"
+ @p
+ The implementation of @code{aggregateMsgs} is more interesting, since we need
+ to define what "high level" parsers means, which is non-obvious.
+
+ @sect{Definition of aggregateMsgs}
+ @p
+ Fastparse uses the following definition for @code{aggregateMsgs}:
+ @ul
+ @li
+ @code{aggregateMsgs} should contain the parsers highest in the call stack,
+ whose failure isn't immediately fatal to the parse (due to them being in
+ @code{|}, @code{.rep}, @code{?}, or other "backtrackable" operators, but
+ not past a @code{cut})
+ @p
+ This is a useful definition because we already have the @code{failureStack}
+ containing all (named) parsers whose failure *is* immediately fatal to the
+ parse, both those at @code{traceIndex} and those earlier in the input. Thus
+ there is no need to duplicate showing any of them in the @code{aggregateMsgs},
+ and we can instead go "one level deeper" to find the highest-level parsers
+ within the deepest parser of the @code{failureStack} and show those instead.
+ Thus, in the combined @code{longAggregateMsg}, the failure stack shows us
+ exactly which parsers failing directly contributed to the failure at
+ @code{traceIndex}, while the longAggregateMsg tells us what are the
+ highest-level parsers FastParse was trying to parse at @code{traceIndex} before
+ it finally failed.
+ @sect{Implementation of aggregateMsgs}
+ @p
+ To collect the @code{aggregateMsgs}, We use the following algorithm:
+ @ul
+ @li
+ When a parse which started at the given @code{traceIndex} fails without a cut:
+ Over-write @code{aggregateMsgs} with it's @code{shortMsg}
+
+ @li
+ Otherwise:
+
+ @ul
+ @li
+ If we are a terminal parser, we set our @code{aggregateMsgs} to Nil
+ @li
+ If we are a compound parser, we simply sum up the @code{aggregateMsgs}
+ of all our constituent parts
+ @p
+ As mentioned earlier, the point of this is to provide the highest-level parsers which
+ failed at the @code{traceIndex}, but are not already part of the @code{failureStack}.
+ non-highest-level parsers do successfully write their message to
+ @code{aggregateMsgs}, but they are subsequently over-written by the higher
+ level parsers, until it reaches the point where @code{cut == true}, indicating
+ that any further higher-level parsers will be in @code{failureStack} and using
+ their message to stomp over the existing parse-failure-messages in
+ @code{aggregateMsgs} would be wasteful.
+ @sect{Edge Cases}
+ @p
+ These is an edge case where there is no given failure that occurs exactly at
+ @code{traceIndex} e.g.
+ @ul
+ @li
+ Parsing @hl.scala{"ax"} with @hl.scala{P( ("a" ~ "b") ~ "c" | "a" ~/ "d" )}
+ @li
+ The final failure @code{index} and thus @code{traceIndex} is at offset 1
+ @li
+ We would like to receive the aggregation @hl.scala{("b" | "d")}
+ @li
+ But @hl.scala{("a" ~ "b")} passes from offsets 0-2, @hl.scala{"c"} fails
+
+
+ @p
+ In such a case, we truncate the @code{shortMsg} at
+ @code{traceIndex} to only include the portion we're interested in (which directly
+ follows the failure). This then gets aggregated nicely to form the error
+ message from-point-of-failure.
+ @p
+ A follow-on edge case is parsing @hl.scala{"ax"} with
+ @hl.scala
+ val inner = P( "a" ~ "b" )
+ P( inner ~ "c" | "a" ~/ "d" )
+ @ul
+ @li
+ Here, we find that the @code{inner} parser starts before the @code{traceIndex} and
+ fails at @code{traceIndex},
+ @li
+ But we want our aggregation to continue being @hl.scala{("b" | "d")}, rather than
+ @hl.scala{(inner | "d")}.
+
+ Thus, for opaque compound parsers like @code{inner} which do not expose their
+ internals, we use @code{forceAggregate} to force it to expose it's internals
+ when it's range covers the @code{traceIndex} but it isn't an exact match
diff --git a/readme/ExampleParsers.scalatex b/readme/ExampleParsers.scalatex
index 2cfa331e..79093ddd 100644
--- a/readme/ExampleParsers.scalatex
+++ b/readme/ExampleParsers.scalatex
@@ -1,5 +1,6 @@
@import Main._
@val tests = wd/'fastparse/'test/'src/'fastparse
+@val tests212plus = wd/'fastparse/'test/"src-2.12+"/'fastparse
@val main = wd/'fastparse/'src/'fastparse
@sect{Example Parsers}
@@ -45,7 +46,7 @@
Try it out yourself! Remember that it does not handle whitespace:
@div(id := "mathdiv")
- @script(raw("""demo.DemoMain.math(document.getElementById("mathdiv"))"""))
+ @script(raw("""DemoMain.math(document.getElementById("mathdiv"))"""))
@sect{Whitespace Handling}
@hl.ref(tests/"WhitespaceMathTests.scala", "import SingleLineWhitespace._", "val tests")
@@ -76,7 +77,7 @@
You can also define your own custom whitespace consumer, if none of
bundled ones fit your needs:
- @hl.ref(tests/"CustomWhitespaceMathTests.scala", "implicit val whitespace", "val tests")
+ @hl.ref(tests212plus/"CustomWhitespaceMathTests.scala", "implicit object whitespace", "val tests")
@@ -84,7 +85,7 @@
Or try it yourself:
@div(id := "wsmathdiv")
- @script(raw("""demo.DemoMain.whitespaceMath(document.getElementById("wsmathdiv"))"""))
+ @script(raw("""DemoMain.whitespaceMath(document.getElementById("wsmathdiv"))"""))
@sect{Indentation Grammars}
@hl.ref(tests/"IndentationTests.scala", "def eval", "val tests")
@@ -116,7 +117,7 @@
@p
Try it out!
@div(id := "indentdiv")
- @script(raw("""demo.DemoMain.indentation(document.getElementById("indentdiv"))"""))
+ @script(raw("""DemoMain.indentation(document.getElementById("indentdiv"))"""))
@sect{Json}
@hl.ref(tests/"JsonTests.scala", Seq("object Js {", ""))
@@ -161,12 +162,12 @@
Try it out!
@div(id := "jsondiv")
- @script(raw("""demo.DemoMain.json(document.getElementById("jsondiv"))"""))
+ @script(raw("""DemoMain.json(document.getElementById("jsondiv"))"""))
@sect{ScalaParse}
@div(id := "scaladiv")
- @script(raw("""demo.DemoMain.scalaparser(document.getElementById("scaladiv"))"""))
+ @script(raw("""DemoMain.scalaparser(document.getElementById("scaladiv"))"""))
@p
ScalaParse is a parser for the entire Scala programming language, written using FastParse. This is notable for a few reasons:
@@ -205,7 +206,7 @@
@sect{PythonParse}
@div(id := "pythondiv")
- @script(raw("""demo.DemoMain.python(document.getElementById("pythondiv"))"""))
+ @script(raw("""DemoMain.python(document.getElementById("pythondiv"))"""))
@p
There is now an @a("example Python parser", href:="https://github.com/lihaoyi/fastparse/tree/master/pythonparse/shared/src/main/scala/pythonparse") available under a subproject in the repo. This is a good example of a real-world parser: parsing knotty syntax (including indentation-delimited blocks!), building an AST, and with heavy unit tests.
@@ -236,7 +237,7 @@
the file readable and good-looking.
@div(id := "cssdiv")
- @script(raw("""demo.DemoMain.css(document.getElementById("cssdiv"))"""))
+ @script(raw("""DemoMain.css(document.getElementById("cssdiv"))"""))
@p
As mentioned above, CssParse builds and AST that stores
diff --git a/readme/FastParseInternals.scalatex b/readme/FastParseInternals.scalatex
index 8e5d03d0..162a4ab2 100644
--- a/readme/FastParseInternals.scalatex
+++ b/readme/FastParseInternals.scalatex
@@ -1,7 +1,7 @@
@import Main._
@sect{Internals}
@p
- FastParse 2.0.5 is implemented as a set of methods that perform a
+ FastParse is implemented as a set of methods that perform a
recursive-descent parse on the given input, with all book-keeping
information maintained in the @code{fastparse.ParsingRun[T]} objects
(abbreviated @code{fastparse.P[T]}). @code{ParsingRun}s are mutable,
diff --git a/readme/GettingStarted.scalatex b/readme/GettingStarted.scalatex
index bd56cd85..113fd7a1 100644
--- a/readme/GettingStarted.scalatex
+++ b/readme/GettingStarted.scalatex
@@ -6,7 +6,7 @@
@hl.ref(tests/"MathTests.scala", "import fastparse._", "val tests")
@hl.ref(tests/"MathTests.scala", Seq("\"pass\"", ""))
@div(id := "splashdiv")
-@script(raw("""demo.DemoMain.math(document.getElementById("splashdiv"))"""))
+@script(raw("""DemoMain.math(document.getElementById("splashdiv"))"""))
@p
FastParse is a Scala library for parsing strings and bytes into structured
data. This lets you easily write a parser for any arbitrary textual data
@@ -50,12 +50,12 @@
To begin using FastParse, add the following to your build config
@hl.scala
- "com.lihaoyi" %% "fastparse" % "2.2.1" // SBT
- ivy"com.lihaoyi::fastparse:2.2.1" // Mill
+ "com.lihaoyi" %% "fastparse" % "3.1.1" // SBT
+ ivy"com.lihaoyi::fastparse:3.1.1" // Mill
@p
To use with Scala.js, you'll need
@hl.scala
- "com.lihaoyi" %%% "fastparse" % "2.2.1" // SBT
- ivy"com.lihaoyi::fastparse::2.2.1" // Mill
+ "com.lihaoyi" %%% "fastparse" % "3.1.1" // SBT
+ ivy"com.lihaoyi::fastparse::3.1.1" // Mill
diff --git a/readme/Readme.scalatex b/readme/Readme.scalatex
index a99d0cf1..9b4f7cfa 100644
--- a/readme/Readme.scalatex
+++ b/readme/Readme.scalatex
@@ -20,7 +20,7 @@
)
)
-@sect("FastParse 2.2.1", "Fast to write, Fast running Parsers in Scala")
+@sect("FastParse 3.1.1", "Fast to write, Fast running Parsers in Scala")
@GettingStarted()
@WritingParsers()
@@ -39,5 +39,7 @@
@FastParseInternals()
+ @ErrorReportingInternals()
+
@Changelog()
diff --git a/readme/StreamingParsing.scalatex b/readme/StreamingParsing.scalatex
index c08d7fdc..09c0ff65 100644
--- a/readme/StreamingParsing.scalatex
+++ b/readme/StreamingParsing.scalatex
@@ -1,5 +1,6 @@
@import Main._
@val tests = wd/'fastparse/'test/'src/'fastparse
+@val tests212plus = wd/'fastparse/'test/"src-2.12+"/'fastparse
@val main = wd/'fastparse/'src/'fastparse
@sect{Streaming Parsing}
@@ -9,7 +10,7 @@
@hl.scala{Iterator[String]} or @hl.scala{java.io.InputStream} instead
of a @code{String} to the @hl.scala{fastparse.parse} method.
- @hl.ref(tests/"IteratorTests.scala", Seq("\"basic\"", ""))
+ @hl.ref(tests212plus/"IteratorTests.scala", Seq("\"basic\"", ""))
@p
Streaming parsing still needs to buffer input in-memory: in particular,
diff --git a/readme/WritingParsers.scalatex b/readme/WritingParsers.scalatex
index eada6dbf..c1ef6c4d 100644
--- a/readme/WritingParsers.scalatex
+++ b/readme/WritingParsers.scalatex
@@ -11,6 +11,10 @@
@p
Such a parser returns a @hl.scala{Parsed.Success} if the input matches the string, and otherwise returns a @hl.scala{Parsed.Failure}.
+
+ @p
+ @b{Note}: if using Scala 3.5 or later, calls to the @code{parse} function should include the keyword @code{using}, e.g. @code{parse("a", parseA(using _))}.
+
@p
As you can see, by default the @hl.scala{Parsed.Success} contains
a @hl.scala{(): Unit}, unless you use @sect.ref{Capture} or
diff --git a/scalaparse/src/scalaparse/Core.scala b/scalaparse/src/scalaparse/Core.scala
index 388c44e3..28d6bdf1 100644
--- a/scalaparse/src/scalaparse/Core.scala
+++ b/scalaparse/src/scalaparse/Core.scala
@@ -15,89 +15,91 @@ trait Core extends syntax.Literals{
def repTC[R](min: Int = 0, max: Int = Int.MaxValue, exactly: Int = -1)
(implicit ev: fastparse.Implicits.Repeater[T, R],
ctx: P[_]): P[R] =
- p0.rep[R](min = min, sep = ",", max = max, exactly = exactly) ~ TrailingComma
+ p0.rep(min = min, sep = ",", max = max, exactly = exactly) ~ TrailingComma
}
// Aliases for common things. These things are used in almost every parser
// in the file, so it makes sense to keep them short.
import Key._
// Keywords that match themselves and nothing else
- def `=>`[_: P] = (O("=>") | O("⇒")).opaque("\"=>\"")
- def `<-`[_: P] = O("<-") | O("←").opaque("\"<-\"")
- def `:`[_: P] = O(":")
- def `=`[_: P] = O("=")
- def `@`[_: P] = O("@")
- def `_`[_: P] = W("_")
- def `this`[_: P] = W("this")
- def `type`[_: P] = W("type")
- def `val`[_: P] = W("val")
- def `var`[_: P] = W("var")
- def `def`[_: P] = W("def")
- def `with`[_: P] = W("with")
- def `package`[_: P] = W("package")
- def `object`[_: P] = W("object")
- def `class`[_: P] = W("class")
- def `case`[_: P] = W("case")
- def `trait`[_: P] = W("trait")
- def `extends`[_: P] = W("extends")
- def `implicit`[_: P] = W("implicit")
- def `try`[_: P] = W("try")
- def `new`[_: P] = W("new")
- def `macro`[_: P] = W("macro")
- def `import`[_: P] = W("import")
- def `else`[_: P] = W("else")
- def `super`[_: P] = W("super")
- def `catch`[_: P] = W("catch")
- def `finally`[_: P] = W("finally")
- def `do`[_: P] = W("do")
- def `yield`[_: P] = W("yield")
- def `while`[_: P] = W("while")
- def `<%`[_: P] = O("<%")
- def `override`[_: P] = W("override")
- def `#`[_: P] = O("#")
- def `forSome`[_: P] = W("forSome")
- def `for`[_: P] = W("for")
- def `abstract`[_: P] = W("abstract")
- def `throw`[_: P] = W("throw")
- def `return`[_: P] = W("return")
- def `lazy`[_: P] = W("lazy")
- def `if`[_: P] = W("if")
- def `match`[_: P] = W("match")
- def `>:`[_: P] = O(">:")
- def `<:`[_: P] = O("<:")
- def `final`[_: P] = W("final")
- def `sealed`[_: P] = W("sealed")
- def `private`[_: P] = W("private")
- def `protected`[_: P] = W("protected")
+ def `=>`[$: P] = (O("=>") | O("⇒")).opaque("\"=>\"")
+ def `<-`[$: P] = O("<-") | O("←").opaque("\"<-\"")
+ def `:`[$: P] = O(":")
+ def `=`[$: P] = O("=")
+ def `@`[$: P] = O("@")
+ //def `_`[$: P] = W("_")
+ def Underscore[$: P] = W("_")
+ def `this`[$: P] = W("this")
+ def `type`[$: P] = W("type")
+ def `val`[$: P] = W("val")
+ def `var`[$: P] = W("var")
+ def `def`[$: P] = W("def")
+ def `with`[$: P] = W("with")
+ def `package`[$: P] = W("package")
+ def `object`[$: P] = W("object")
+ def `class`[$: P] = W("class")
+ def `case`[$: P] = W("case")
+ def `trait`[$: P] = W("trait")
+ def `extends`[$: P] = W("extends")
+ def `implicit`[$: P] = W("implicit")
+ def `try`[$: P] = W("try")
+ def `new`[$: P] = W("new")
+ def `macro`[$: P] = W("macro")
+ def `import`[$: P] = W("import")
+ def `else`[$: P] = W("else")
+ def `super`[$: P] = W("super")
+ def `catch`[$: P] = W("catch")
+ def `finally`[$: P] = W("finally")
+ def `do`[$: P] = W("do")
+ def `yield`[$: P] = W("yield")
+ def `while`[$: P] = W("while")
+ def `<%`[$: P] = O("<%")
+ def `override`[$: P] = W("override")
+ def `#`[$: P] = O("#")
+ def `forSome`[$: P] = W("forSome")
+ def `for`[$: P] = W("for")
+ def `abstract`[$: P] = W("abstract")
+ def `throw`[$: P] = W("throw")
+ def `return`[$: P] = W("return")
+ def `lazy`[$: P] = W("lazy")
+ def `if`[$: P] = W("if")
+ def `match`[$: P] = W("match")
+ def `>:`[$: P] = O(">:")
+ def `<:`[$: P] = O("<:")
+ def `final`[$: P] = W("final")
+ def `sealed`[$: P] = W("sealed")
+ def `private`[$: P] = W("private")
+ def `protected`[$: P] = W("protected")
// kinda-sorta keywords that are common patterns even if not
// really-truly keywords
- def `*`[_: P] = O("*")
- def `_*`[_: P] = P( `_` ~ `*` )
- def `}`[_: P] = P( Semis.? ~ "}" )
- def `{`[_: P] = P( "{" ~ Semis.? )
+ def `*`[$: P] = O("*")
+ // def `_*`[$: P] = P( `_` ~ `*` )
+ def `Underscore*`[$: P] = P( Underscore ~ `*` )
+ def `}`[$: P] = P( Semis.? ~ "}" )
+ def `{`[$: P] = P( "{" ~ Semis.? )
/**
* helper printing function
*/
- def Id[_: P] = P( WL ~ Identifiers.Id )
- def VarId[_: P] = P( WL ~ Identifiers.VarId )
- def BacktickId[_: P] = P( WL ~ Identifiers.BacktickId )
- def ExprLiteral[_: P] = P( WL ~ Literals.Expr.Literal )
- def PatLiteral[_: P] = P( WL ~ Literals.Pat.Literal )
+ def Id[$: P] = P( WL ~ Identifiers.Id )
+ def VarId[$: P] = P( WL ~ Identifiers.VarId )
+ def BacktickId[$: P] = P( WL ~ Identifiers.BacktickId )
+ def ExprLiteral[$: P] = P( WL ~ Literals.Expr.Literal )
+ def PatLiteral[$: P] = P( WL ~ Literals.Pat.Literal )
- def QualId[_: P] = P( WL ~ Id.rep(1, sep = ".") )
- def Ids[_: P] = P( Id.rep(1, sep = ",") )
+ def QualId[$: P] = P( WL ~ Id.rep(1, sep = ".") )
+ def Ids[$: P] = P( Id.rep(1, sep = ",") )
/**
* Sketchy way to whitelist a few suffixes that come after a . select;
* apart from these and IDs, everything else is illegal
*/
- def PostDotCheck[_: P]: P[Unit] = P( WL ~ !(`super` | `this` | "{" | `_` | `type`) )
- def ClassQualifier[_: P] = P( "[" ~ Id ~ "]" )
- def ThisSuper[_: P] = P( `this` | `super` ~ ClassQualifier.? )
- def ThisPath[_: P]: P[Unit] = P( ThisSuper ~ ("." ~ PostDotCheck ~/ Id).rep )
- def IdPath[_: P]: P[Unit] = P( Id ~ ("." ~ PostDotCheck ~/ (`this` | Id)).rep ~ ("." ~ ThisPath).? )
- def StableId[_: P]: P[Unit] = P( ThisPath | IdPath )
+ def PostDotCheck[$: P]: P[Unit] = P( WL ~ !(`super` | `this` | "{" | Underscore | `type`) )
+ def ClassQualifier[$: P] = P( "[" ~ Id ~ "]" )
+ def ThisSuper[$: P] = P( `this` | `super` ~ ClassQualifier.? )
+ def ThisPath[$: P]: P[Unit] = P( ThisSuper ~ ("." ~ PostDotCheck ~/ Id).rep )
+ def IdPath[$: P]: P[Unit] = P( Id ~ ("." ~ PostDotCheck ~/ (`this` | Id)).rep ~ ("." ~ ThisPath).? )
+ def StableId[$: P]: P[Unit] = P( ThisPath | IdPath )
}
diff --git a/scalaparse/src/scalaparse/Exprs.scala b/scalaparse/src/scalaparse/Exprs.scala
index b8aa3590..73692b94 100644
--- a/scalaparse/src/scalaparse/Exprs.scala
+++ b/scalaparse/src/scalaparse/Exprs.scala
@@ -2,13 +2,13 @@ package scalaparse
import fastparse._, ScalaWhitespace._
trait Exprs extends Core with Types with Xml{
- def AnonTmpl[_: P]: P[Unit]
- def BlockDef[_: P]: P[Unit]
+ def AnonTmpl[$: P]: P[Unit]
+ def BlockDef[$: P]: P[Unit]
- def Import[_: P]: P[Unit] = {
- def Selector: P[Unit] = P( (Id | `_`) ~ (`=>` ~/ (Id | `_`)).? )
+ def Import[$: P]: P[Unit] = {
+ def Selector: P[Unit] = P( (Id | Underscore) ~ (`=>` ~/ (Id | Underscore)).? )
def Selectors: P[Unit] = P( "{" ~/ Selector.repTC() ~ "}" )
- def ImportExpr: P[Unit] = P( StableId ~ ("." ~/ (`_` | Selectors)).? )
+ def ImportExpr: P[Unit] = P( StableId ~ ("." ~/ (Underscore | Selectors)).? )
P( `import` ~/ ImportExpr.rep(1, sep = ","./) )
}
@@ -23,15 +23,15 @@ trait Exprs extends Core with Types with Xml{
// Expressions directly within a `val x = ...` or `def x = ...`
object FreeCtx extends WsCtx(semiInference=true, arrowTypeAscriptions=true)
- def TypeExpr[_: P] = ExprCtx.Expr
+ def TypeExpr[$: P] = ExprCtx.Expr
class WsCtx(semiInference: Boolean, arrowTypeAscriptions: Boolean){
- def OneSemiMax[_: P] = if (semiInference) OneNLMax else Pass
- def NoSemis[_: P] = if (semiInference) NotNewline else Pass
+ def OneSemiMax[$: P] = if (semiInference) OneNLMax else Pass
+ def NoSemis[$: P] = if (semiInference) NotNewline else Pass
- def Enumerators[_: P] = {
+ def Enumerators[$: P] = {
def Generator = P( `<-` ~/ Expr ~ Guard.? )
def Assign = P( `=` ~/ Expr )
// CuttingSemis is a bit weird, and unlike other places within this parser
@@ -44,7 +44,7 @@ trait Exprs extends Core with Types with Xml{
P( TypeOrBindPattern ~ Generator ~~ Enumerator.repX )
}
- def Expr[_: P]: P[Unit] = {
+ def Expr[$: P]: P[Unit] = {
def If = {
def Else = P( Semis.? ~ `else` ~/ Expr )
P( `if` ~/ "(" ~ ExprCtx.Expr ~ ")" ~ Expr ~ Else.? )
@@ -66,7 +66,7 @@ trait Exprs extends Core with Types with Xml{
def LambdaRhs = if (semiInference) P( BlockChunk ) else P( Expr )
- def ImplicitLambda = P( `implicit` ~ (Id | `_`) ~ (`:` ~ InfixType).? ~ `=>` ~ LambdaRhs.? )
+ def ImplicitLambda = P( `implicit` ~ (Id | Underscore) ~ (`:` ~ InfixType).? ~ `=>` ~ LambdaRhs.? )
def ParenedLambda = P( Parened ~~ (WL ~ `=>` ~ LambdaRhs.? | ExprSuffix ~~ PostfixSuffix ~ SuperPostfixSuffix) )
def PostfixLambda = P( PostfixExpr ~ (`=>` ~ LambdaRhs.? | SuperPostfixSuffix).? )
def SmallerExprOrLambda = P( ParenedLambda | PostfixLambda )
@@ -76,75 +76,75 @@ trait Exprs extends Core with Types with Xml{
)
}
- def SuperPostfixSuffix[_: P] = P( (`=` ~/ Expr).? ~ MatchAscriptionSuffix.? )
- def AscriptionType[_: P] = if (arrowTypeAscriptions) P( Type ) else P( InfixType )
- def Ascription[_: P] = P( `:` ~/ (`_*` | AscriptionType | Annot.rep(1)) )
- def MatchAscriptionSuffix[_: P] = P(`match` ~/ "{" ~ CaseClauses | Ascription)
- def ExprPrefix[_: P] = P( WL ~ CharIn("\\-+!~") ~~ !syntax.Basic.OpChar ~ WS)
- def ExprSuffix[_: P] = P( (WL ~ "." ~/ Id | WL ~ TypeArgs | NoSemis ~ ArgList).repX ~~ (NoSemis ~ `_`).? )
- def PrefixExpr[_: P] = P( ExprPrefix.? ~ SimpleExpr )
+ def SuperPostfixSuffix[$: P] = P( (`=` ~/ Expr).? ~ MatchAscriptionSuffix.? )
+ def AscriptionType[$: P] = if (arrowTypeAscriptions) P( Type ) else P( InfixType )
+ def Ascription[$: P] = P( `:` ~/ (`Underscore*` | AscriptionType | Annot.rep(1)) )
+ def MatchAscriptionSuffix[$: P] = P(`match` ~/ "{" ~ CaseClauses | Ascription)
+ def ExprPrefix[$: P] = P( WL ~ CharIn("\\-+!~") ~~ !syntax.Basic.OpChar ~ WS)
+ def ExprSuffix[$: P] = P( (WL ~ "." ~/ Id | WL ~ TypeArgs | NoSemis ~ ArgList).repX ~~ (NoSemis ~ Underscore).? )
+ def PrefixExpr[$: P] = P( ExprPrefix.? ~ SimpleExpr )
// Intermediate `WL` needs to always be non-cutting, because you need to
// backtrack out of `InfixSuffix` into `PostFixSuffix` if it doesn't work out
- def InfixSuffix[_: P] = P( NoSemis ~~ WL ~~ Id ~ TypeArgs.? ~~ OneSemiMax ~ PrefixExpr ~~ ExprSuffix)
- def PostFix[_: P] = P( NoSemis ~~ WL ~~ Id ~ Newline.? )
+ def InfixSuffix[$: P] = P( NoSemis ~~ WL ~~ Id ~ TypeArgs.? ~~ OneSemiMax ~ PrefixExpr ~~ ExprSuffix)
+ def PostFix[$: P] = P( NoSemis ~~ WL ~~ Id ~ Newline.? )
- def PostfixSuffix[_: P] = P( InfixSuffix.repX ~~ PostFix.?)
+ def PostfixSuffix[$: P] = P( InfixSuffix.repX ~~ PostFix.?)
- def PostfixExpr[_: P]: P[Unit] = P( PrefixExpr ~~ ExprSuffix ~~ PostfixSuffix )
+ def PostfixExpr[$: P]: P[Unit] = P( PrefixExpr ~~ ExprSuffix ~~ PostfixSuffix )
- def Parened[_: P] = P ( "(" ~/ TypeExpr.repTC() ~ ")" )
- def SimpleExpr[_: P]: P[Unit] = {
+ def Parened[$: P] = P ( "(" ~/ TypeExpr.repTC() ~ ")" )
+ def SimpleExpr[$: P]: P[Unit] = {
def New = P( `new` ~/ AnonTmpl )
- P( XmlExpr | New | BlockExpr | ExprLiteral | StableId | `_` | Parened )
+ P( XmlExpr | New | BlockExpr | ExprLiteral | StableId | Underscore | Parened )
}
- def Guard[_: P] : P[Unit] = P( `if` ~/ PostfixExpr )
+ def Guard[$: P] : P[Unit] = P( `if` ~/ PostfixExpr )
}
- def SimplePattern[_: P]: P[Unit] = {
+ def SimplePattern[$: P]: P[Unit] = {
def TupleEx = P( "(" ~/ Pattern.repTC() ~ ")" )
def Extractor = P( StableId ~ TypeArgs.? ~ TupleEx.? )
- def Thingy = P( `_` ~ (`:` ~/ TypePat).? ~ !("*" ~~ !syntax.Basic.OpChar) )
+ def Thingy = P( Underscore ~ (`:` ~/ TypePat).? ~ !("*" ~~ !syntax.Basic.OpChar) )
P( XmlPattern | Thingy | PatLiteral | TupleEx | Extractor | VarId )
}
- def BlockExpr[_: P]: P[Unit] = P( "{" ~/ (CaseClauses | Block ~ "}") )
+ def BlockExpr[$: P]: P[Unit] = P( "{" ~/ (CaseClauses | Block ~ "}") )
- def BlockLambdaHead[_: P]: P[Unit] = P( "(" ~ BlockLambdaHead ~ ")" | `this` | Id | `_` )
+ def BlockLambdaHead[$: P]: P[Unit] = P( "(" ~ BlockLambdaHead ~ ")" | `this` | Id | Underscore )
- def BlockLambda[_: P] = P( BlockLambdaHead ~ (`=>` | `:` ~ InfixType ~ `=>`.?) )
+ def BlockLambda[$: P] = P( BlockLambdaHead ~ (`=>` | `:` ~ InfixType ~ `=>`.?) )
- def BlockChunk[_: P] = {
+ def BlockChunk[$: P] = {
def Prelude = P( Annot.rep ~ LocalMod.rep )
def BlockStat = P( Import | Prelude ~ BlockDef | StatCtx.Expr )
P( BlockLambda.rep ~ BlockStat.rep(sep = Semis) )
}
- def BaseBlock[_: P](end: => P[Unit])(implicit name: sourcecode.Name): P[Unit] = {
+ def BaseBlock[$: P](end: => P[Unit])(implicit name: sourcecode.Name): P[Unit] = {
def BlockEnd = P( Semis.? ~ &(end) )
def Body = P( BlockChunk.repX(sep = Semis) )
P( Semis.? ~ BlockLambda.? ~ Body ~/ BlockEnd )
}
- def Block[_: P] = BaseBlock("}")
- def CaseBlock[_: P] = BaseBlock("}" | `case`)
+ def Block[$: P] = BaseBlock("}")
+ def CaseBlock[$: P] = BaseBlock("}" | `case`)
- def Patterns[_: P]: P[Unit] = P( Pattern.rep(1, sep = ","./) )
- def Pattern[_: P]: P[Unit] = P( (WL ~ TypeOrBindPattern).rep(1, sep = "|"./) )
- def TypePattern[_: P] = P( (`_` | BacktickId | VarId) ~ `:` ~ TypePat )
- def TypeOrBindPattern[_: P]: P[Unit] = P( TypePattern | BindPattern )
- def BindPattern[_: P]: P[Unit] = {
- def InfixPattern = P( SimplePattern ~ (Id ~/ SimplePattern).rep | `_*` )
- def Binding = P( (Id | `_`) ~ `@` )
+ def Patterns[$: P]: P[Unit] = P( Pattern.rep(1, sep = ","./) )
+ def Pattern[$: P]: P[Unit] = P( (WL ~ TypeOrBindPattern).rep(1, sep = "|"./) )
+ def TypePattern[$: P] = P( (Underscore | BacktickId | VarId) ~ `:` ~ TypePat )
+ def TypeOrBindPattern[$: P]: P[Unit] = P( TypePattern | BindPattern )
+ def BindPattern[$: P]: P[Unit] = {
+ def InfixPattern = P( SimplePattern ~ (Id ~/ SimplePattern).rep | `Underscore*` )
+ def Binding = P( (Id | Underscore) ~ `@` )
P( Binding ~ InfixPattern | InfixPattern | VarId )
}
- def TypePat[_: P] = P( CompoundType )
- def ParenArgList[_: P] = P( "(" ~/ (Exprs ~ (`:` ~/ `_*`).?).? ~ TrailingComma ~ ")" )
- def ArgList[_: P]: P[Unit] = P( ParenArgList | OneNLMax ~ BlockExpr )
+ def TypePat[$: P] = P( CompoundType )
+ def ParenArgList[$: P] = P( "(" ~/ (Exprs ~ (`:` ~/ `Underscore*`).?).? ~ TrailingComma ~ ")" )
+ def ArgList[$: P]: P[Unit] = P( ParenArgList | OneNLMax ~ BlockExpr )
- def CaseClauses[_: P]: P[Unit] = {
+ def CaseClauses[$: P]: P[Unit] = {
// Need to lookahead for `class` and `object` because
// the block { case object X } is not a case clause!
def CaseClause: P[Unit] = P( `case` ~ !(`class` | `object`) ~/ Pattern ~ ExprCtx.Guard.? ~ `=>` ~ CaseBlock )
diff --git a/scalaparse/src/scalaparse/Scala.scala b/scalaparse/src/scalaparse/Scala.scala
index b6e7cbef..7e671238 100644
--- a/scalaparse/src/scalaparse/Scala.scala
+++ b/scalaparse/src/scalaparse/Scala.scala
@@ -9,23 +9,23 @@ import fastparse._, ScalaWhitespace._
*/
object Scala extends Core with Types with Exprs with Xml{
- def TmplBody[_: P]: P[Unit] = {
+ def TmplBody[$: P]: P[Unit] = {
def Prelude = P( (Annot ~ OneNLMax).rep ~ Mod./.rep )
def TmplStat = P( Import | Prelude ~ BlockDef | StatCtx.Expr )
P( "{" ~/ BlockLambda.? ~ Semis.? ~ TmplStat.repX(sep = NoCut(Semis)) ~ Semis.? ~ `}` )
}
- def ValVarDef[_: P] = P( BindPattern.rep(1, ","./) ~ (`:` ~/ Type).? ~ (`=` ~/ FreeCtx.Expr).? )
+ def ValVarDef[$: P] = P( BindPattern.rep(1, ","./) ~ (`:` ~/ Type).? ~ (`=` ~/ FreeCtx.Expr).? )
- def FunDef[_: P] = {
+ def FunDef[$: P] = {
def Body = P( WL ~ `=` ~/ `macro`.? ~ StatCtx.Expr | OneNLMax ~ "{" ~ Block ~ "}" )
P( FunSig ~ (`:` ~/ Type).? ~~ Body.? )
}
- def BlockDef[_: P]: P[Unit] = P( Dcl | TraitDef | ClsDef | ObjDef )
+ def BlockDef[$: P]: P[Unit] = P( Dcl | TraitDef | ClsDef | ObjDef )
- def ClsDef[_: P] = {
+ def ClsDef[$: P] = {
def ClsAnnot = P( `@` ~ SimpleType ~ ArgList.? )
def Prelude = P( NotNewline ~ ( ClsAnnot.rep(1) ~ AccessMod.? | AccessMod) )
def ClsArgMod = P( Mod.rep ~ (`val` | `var`) )
@@ -35,29 +35,29 @@ object Scala extends Core with Types with Exprs with Xml{
P( `case`.? ~ `class` ~/ Id ~ TypeArgList.? ~~ Prelude.? ~~ ClsArgs.repX ~ DefTmpl.? )
}
- def Constrs[_: P] = P( (WL ~ Constr).rep(1, `with`./) )
- def EarlyDefTmpl[_: P] = P( TmplBody ~ (`with` ~/ Constr).rep ~ TmplBody.? )
- def NamedTmpl[_: P] = P( Constrs ~ TmplBody.? )
+ def Constrs[$: P] = P( (WL ~ Constr).rep(1, `with`./) )
+ def EarlyDefTmpl[$: P] = P( TmplBody ~ (`with` ~/ Constr).rep ~ TmplBody.? )
+ def NamedTmpl[$: P] = P( Constrs ~ TmplBody.? )
- def DefTmpl[_: P] = P( (`extends` | `<:`) ~ AnonTmpl | TmplBody )
- def AnonTmpl[_: P] = P( EarlyDefTmpl | NamedTmpl | TmplBody )
+ def DefTmpl[$: P] = P( (`extends` | `<:`) ~ AnonTmpl | TmplBody )
+ def AnonTmpl[$: P] = P( EarlyDefTmpl | NamedTmpl | TmplBody )
- def TraitDef[_: P] = P( `trait` ~/ Id ~ TypeArgList.? ~ DefTmpl.? )
+ def TraitDef[$: P] = P( `trait` ~/ Id ~ TypeArgList.? ~ DefTmpl.? )
- def ObjDef[_: P]: P[Unit] = P( `case`.? ~ `object` ~/ Id ~ DefTmpl.? )
+ def ObjDef[$: P]: P[Unit] = P( `case`.? ~ `object` ~/ Id ~ DefTmpl.? )
- def Constr[_: P] = P( AnnotType ~~ (NotNewline ~ ParenArgList ).repX )
+ def Constr[$: P] = P( AnnotType ~~ (NotNewline ~ ParenArgList ).repX )
- def PkgObj[_: P] = P( ObjDef )
- def PkgBlock[_: P] = P( QualId ~/ `{` ~ TopStatSeq.? ~ `}` )
- def Pkg[_: P] = P( `package` ~/ (PkgBlock | PkgObj) )
- def TopStatSeq[_: P]: P[Unit] = {
+ def PkgObj[$: P] = P( ObjDef )
+ def PkgBlock[$: P] = P( QualId ~/ `{` ~ TopStatSeq.? ~ `}` )
+ def Pkg[$: P] = P( `package` ~/ (PkgBlock | PkgObj) )
+ def TopStatSeq[$: P]: P[Unit] = {
def Tmpl = P( (Annot ~~ OneNLMax).rep ~ Mod.rep ~ (TraitDef | ClsDef | ObjDef) )
def TopStat = P( Pkg | Import | Tmpl )
P( TopStat.repX(1, Semis) )
}
- def TopPkgSeq[_: P] = P( ((`package` ~ QualId) ~~ !(WS ~ "{")).repX(1, Semis) )
- def CompilationUnit[_: P]: P[Unit] = {
+ def TopPkgSeq[$: P] = P( ((`package` ~ QualId) ~~ !(WS ~ "{")).repX(1, Semis) )
+ def CompilationUnit[$: P]: P[Unit] = {
def Body = P( TopPkgSeq ~~ (Semis ~ TopStatSeq).? | TopStatSeq )
P( Semis.? ~ Body.? ~~ Semis.? ~ WL0 ~ End )
}
diff --git a/scalaparse/src/scalaparse/Types.scala b/scalaparse/src/scalaparse/Types.scala
index 4b78ed37..476ad8d1 100644
--- a/scalaparse/src/scalaparse/Types.scala
+++ b/scalaparse/src/scalaparse/Types.scala
@@ -3,51 +3,51 @@ package scalaparse
import fastparse._
import fastparse._, ScalaWhitespace._
trait Types extends Core{
- def TypeExpr[_: P]: P[Unit]
- def ValVarDef[_: P]: P[Unit]
- def FunDef[_: P]: P[Unit]
+ def TypeExpr[$: P]: P[Unit]
+ def ValVarDef[$: P]: P[Unit]
+ def FunDef[$: P]: P[Unit]
- def LocalMod[_: P]: P[Unit] = P( `abstract` | `final` | `sealed` | `implicit` | `lazy` )
- def AccessMod[_: P]: P[Unit] = {
+ def LocalMod[$: P]: P[Unit] = P( `abstract` | `final` | `sealed` | `implicit` | `lazy` )
+ def AccessMod[$: P]: P[Unit] = {
def AccessQualifier = P( "[" ~/ (`this` | Id) ~ "]" )
P( (`private` | `protected`) ~ AccessQualifier.? )
}
- def Dcl[_: P]: P[Unit] = {
+ def Dcl[$: P]: P[Unit] = {
P( (`val` | `var`) ~/ ValVarDef | `def` ~/ FunDef | `type` ~/ TypeDef )
}
- def Mod[_: P]: P[Unit] = P( LocalMod | AccessMod | `override` )
+ def Mod[$: P]: P[Unit] = P( LocalMod | AccessMod | `override` )
- def ExistentialClause[_: P] = P( `forSome` ~/ `{` ~ Dcl.repX(1, Semis) ~ `}` )
- def PostfixType[_: P] = P( InfixType ~ (`=>` ~/ Type | ExistentialClause).? )
- def Type[_: P]: P[Unit] = P( `=>`.? ~~ PostfixType ~ TypeBounds ~ `*`.? )
+ def ExistentialClause[$: P] = P( `forSome` ~/ `{` ~ Dcl.repX(1, Semis) ~ `}` )
+ def PostfixType[$: P] = P( InfixType ~ (`=>` ~/ Type | ExistentialClause).? )
+ def Type[$: P]: P[Unit] = P( `=>`.? ~~ PostfixType ~ TypeBounds ~ `*`.? )
// Can't cut after `*` because we may need to backtrack and settle for
// the `*`-postfix rather than an infix type
- def InfixType[_: P] = P( CompoundType ~~ (NotNewline ~ (`*` | Id./) ~~ OneNLMax ~ CompoundType).repX )
+ def InfixType[$: P] = P( CompoundType ~~ (NotNewline ~ (`*` | Id./) ~~ OneNLMax ~ CompoundType).repX )
- def CompoundType[_: P] = {
+ def CompoundType[$: P] = {
def Refinement = P( OneNLMax ~ `{` ~/ Dcl.repX(sep=Semis) ~ `}` )
def NamedType = P( (Pass ~ AnnotType).rep(1, `with`./) )
P( NamedType ~~ Refinement.? | Refinement )
}
- def NLAnnot[_: P] = P( NotNewline ~ Annot )
- def AnnotType[_: P] = P(SimpleType ~~ NLAnnot.repX )
+ def NLAnnot[$: P] = P( NotNewline ~ Annot )
+ def AnnotType[$: P] = P(SimpleType ~~ NLAnnot.repX )
- def TypeId[_: P] = P( StableId )
- def SimpleType[_: P]: P[Unit] = {
+ def TypeId[$: P] = P( StableId )
+ def SimpleType[$: P]: P[Unit] = {
// Can't `cut` after the opening paren, because we might be trying to parse `()`
// or `() => T`! only cut after parsing one type
def TupleType = P( "(" ~/ Type.repTC() ~ ")" )
- def BasicType = P( TupleType | Literals.NoInterp.Literal | TypeId ~ ("." ~ `type`).? | `_` )
+ def BasicType = P( TupleType | Literals.NoInterp.Literal | TypeId ~ ("." ~ `type`).? | Underscore )
P( BasicType ~ (TypeArgs | `#` ~/ Id).rep )
}
- def TypeArgs[_: P] = P( "[" ~/ Type.repTC() ~ "]" )
+ def TypeArgs[$: P] = P( "[" ~/ Type.repTC() ~ "]" )
- def FunSig[_: P]: P[Unit] = {
+ def FunSig[$: P]: P[Unit] = {
def FunArg = P( Annot.rep ~ Id ~ (`:` ~/ Type).? ~ (`=` ~/ TypeExpr).? )
def Args = P( FunArg.repTC(1) )
def FunArgs = P( OneNLMax ~ "(" ~/ `implicit`.? ~ Args.? ~ ")" )
@@ -55,18 +55,18 @@ trait Types extends Core{
P( (Id | `this`) ~ FunTypeArgs.? ~~ FunArgs.rep )
}
- def TypeBounds[_: P]: P[Unit] = P( (`>:` ~/ Type).? ~ (`<:` ~/ Type).? )
- def TypeArg[_: P]: P[Unit] = {
+ def TypeBounds[$: P]: P[Unit] = P( (`>:` ~/ Type).? ~ (`<:` ~/ Type).? )
+ def TypeArg[$: P]: P[Unit] = {
def CtxBounds = P((`<%` ~/ Type).rep ~ (`:` ~/ Type).rep)
- P((Id | `_`) ~ TypeArgList.? ~ TypeBounds ~ CtxBounds)
+ P((Id | Underscore) ~ TypeArgList.? ~ TypeBounds ~ CtxBounds)
}
- def Annot[_: P]: P[Unit] = P( `@` ~/ SimpleType ~ ("(" ~/ (Exprs ~ (`:` ~/ `_*`).?).? ~ TrailingComma ~ ")").rep )
+ def Annot[$: P]: P[Unit] = P( `@` ~/ SimpleType ~ ("(" ~/ (Exprs ~ (`:` ~/ `Underscore*`).?).? ~ TrailingComma ~ ")").rep )
- def TypeArgList[_: P]: P[Unit] = {
+ def TypeArgList[$: P]: P[Unit] = {
def Variant: P[Unit] = P( Annot.rep ~ CharIn("+\\-").? ~ TypeArg )
P( "[" ~/ Variant.repTC(1) ~ "]" )
}
- def Exprs[_: P]: P[Unit] = P( TypeExpr.rep(1, ",") )
- def TypeDef[_: P]: P[Unit] = P( Id ~ TypeArgList.? ~ (`=` ~/ Type | TypeBounds) )
+ def Exprs[$: P]: P[Unit] = P( TypeExpr.rep(1, ",") )
+ def TypeDef[$: P]: P[Unit] = P( Id ~ TypeArgList.? ~ (`=` ~/ Type | TypeBounds) )
}
diff --git a/scalaparse/src/scalaparse/Xml.scala b/scalaparse/src/scalaparse/Xml.scala
index 94e34631..977d62dc 100644
--- a/scalaparse/src/scalaparse/Xml.scala
+++ b/scalaparse/src/scalaparse/Xml.scala
@@ -8,71 +8,71 @@ import scala.language.implicitConversions
import NoWhitespace._
trait Xml {
- def WL[_: P]: P0
- def WS[_: P]: P0
- def Block[_: P]: P0
- def Patterns[_: P]: P[Unit]
- def XmlExpr[_: P] = P( WL ~ Xml.XmlContent.rep(min = 1, sep = WL.?) )
- def XmlPattern[_: P] = P( WL ~ Xml.ElemPattern )
+ def WL[$: P]: P0
+ def WS[$: P]: P0
+ def Block[$: P]: P0
+ def Patterns[$: P]: P[Unit]
+ def XmlExpr[$: P] = P( WL ~ Xml.XmlContent.rep(min = 1, sep = WL.?) )
+ def XmlPattern[$: P] = P( WL ~ Xml.ElemPattern )
private[this] object Xml {
- def Element[_: P] = P( TagHeader ~/ ("/>" | ">" ~/ Content ~/ ETag ) )
- def TagHeader[_: P] = P( "<" ~ Name ~/ (WL ~ Attribute).rep ~ WL.? )
- def ETag[_: P] = P( "" ~ Name ~ WL.? ~ ">" )
+ def Element[$: P] = P( TagHeader ~/ ("/>" | ">" ~/ Content ~/ ETag ) )
+ def TagHeader[$: P] = P( "<" ~ Name ~/ (WL ~ Attribute).rep ~ WL.? )
+ def ETag[$: P] = P( "" ~ Name ~ WL.? ~ ">" )
- def Attribute[_: P] = P( Name ~ Eq ~/ AttValue )
- def Eq[_: P] = P( WL.? ~ "=" ~ WL.? )
- def AttValue[_: P] = P(
+ def Attribute[$: P] = P( Name ~ Eq ~/ AttValue )
+ def Eq[$: P] = P( WL.? ~ "=" ~ WL.? )
+ def AttValue[$: P] = P(
"\"" ~/ (CharQ | Reference).rep ~ "\"" |
"'" ~/ (CharA | Reference).rep ~ "'" |
ScalaExpr
)
- def Content[_: P] = P( (CharData | Reference | ScalaExpr | XmlContent).rep )
- def XmlContent[_: P]: P[Unit] = P( Unparsed | CDSect | PI | Comment | Element )
+ def Content[$: P] = P( (CharData | Reference | ScalaExpr | XmlContent).rep )
+ def XmlContent[$: P]: P[Unit] = P( Unparsed | CDSect | PI | Comment | Element )
- def ScalaExpr[_: P] = P( "{" ~ WS ~ Block ~ WL ~ "}" )
+ def ScalaExpr[$: P] = P( "{" ~ WS ~ Block ~ WL ~ "}" )
- def Unparsed[_: P] = P( UnpStart ~/ UnpData ~ UnpEnd )
- def UnpStart[_: P] = P( "" )
- def UnpEnd[_: P] = P( "" )
- def UnpData[_: P] = P( (!UnpEnd ~ AnyChar).rep )
+ def Unparsed[$: P] = P( UnpStart ~/ UnpData ~ UnpEnd )
+ def UnpStart[$: P] = P( "" )
+ def UnpEnd[$: P] = P( "" )
+ def UnpData[$: P] = P( (!UnpEnd ~ AnyChar).rep )
- def CDSect[_: P] = P( CDStart ~/ CData ~ CDEnd )
- def CDStart[_: P] = P( "" ~ Char).rep )
- def CDEnd[_: P] = P( "]]>" )
+ def CDSect[$: P] = P( CDStart ~/ CData ~ CDEnd )
+ def CDStart[$: P] = P( "" ~ Char).rep )
+ def CDEnd[$: P] = P( "]]>" )
- def Comment[_: P] = P( "" )
- def ComText[_: P] = P( (!"--" ~ Char).rep ~ ("-" ~ &("--")).? )
+ def Comment[$: P] = P( "" )
+ def ComText[$: P] = P( (!"--" ~ Char).rep ~ ("-" ~ &("--")).? )
- def PI[_: P] = P( "" ~ PITarget ~ PIProcText.? ~ "?>" )
- def PITarget[_: P] = P( !(("X" | "x") ~ ("M" | "m") ~ ("L" | "l")) ~ Name )
- def PIProcText[_: P] = P( WL ~ (!"?>" ~ Char).rep )
+ def PI[$: P] = P( "" ~ PITarget ~ PIProcText.? ~ "?>" )
+ def PITarget[$: P] = P( !(("X" | "x") ~ ("M" | "m") ~ ("L" | "l")) ~ Name )
+ def PIProcText[$: P] = P( WL ~ (!"?>" ~ Char).rep )
- def Reference[_: P] = P( EntityRef | CharRef )
- def EntityRef[_: P] = P( "&" ~ Name ~/ ";" )
- def CharRef[_: P] = P( "" ~ Num ~/ ";" | "" ~ HexNum ~/ ";" )
- def Num[_: P] = P( CharIn("0-9").rep )
- def HexNum[_: P] = P( CharIn("0-9a-fA-F").rep )
+ def Reference[$: P] = P( EntityRef | CharRef )
+ def EntityRef[$: P] = P( "&" ~ Name ~/ ";" )
+ def CharRef[$: P] = P( "" ~ Num ~/ ";" | "" ~ HexNum ~/ ";" )
+ def Num[$: P] = P( CharIn("0-9").rep )
+ def HexNum[$: P] = P( CharIn("0-9a-fA-F").rep )
- def CharData[_: P] = P( (!"{" ~ Char1 | "{{").rep(1) )
+ def CharData[$: P] = P( (!"{" ~ Char1 | "{{").rep(1) )
- def Char[_: P] = P( AnyChar )
- def Char1[_: P] = P( !("<" | "&") ~ Char )
- def CharQ[_: P] = P( !"\"" ~ Char1 )
- def CharA[_: P] = P( !"'" ~ Char1 )
+ def Char[$: P] = P( AnyChar )
+ def Char1[$: P] = P( !("<" | "&") ~ Char )
+ def CharQ[$: P] = P( !"\"" ~ Char1 )
+ def CharA[$: P] = P( !"'" ~ Char1 )
- def Name[_: P] = P( NameStart ~ NameChar.rep )
- def NameStart[_: P] = P( CharPred(isNameStart) ).opaque("NameStart")
- def NameChar[_: P] = P( CharPred(isNameChar) ).opaque("NameChar")
+ def Name[$: P] = P( NameStart ~ NameChar.rep )
+ def NameStart[$: P] = P( CharPred(isNameStart) ).opaque("NameStart")
+ def NameChar[$: P] = P( CharPred(isNameChar) ).opaque("NameChar")
- def ElemPattern[_: P]: P[Unit] = P( TagPHeader ~/ ("/>" | ">" ~/ ContentP ~/ ETag ) )
- def TagPHeader[_: P] = P( "<" ~ Name ~ WL.? )
+ def ElemPattern[$: P]: P[Unit] = P( TagPHeader ~/ ("/>" | ">" ~/ ContentP ~/ ETag ) )
+ def TagPHeader[$: P] = P( "<" ~ Name ~ WL.? )
- def ContentP[_: P]: P[Unit] = P( ( CharDataP | ScalaPatterns | ElemPattern ).rep )
- def ScalaPatterns[_: P] = P( "{" ~ Patterns ~ WL ~ "}" )
- def CharDataP[_: P] = P( "&" ~ CharData.? | CharData ) // matches weirdness of scalac parser on xml reference.
+ def ContentP[$: P]: P[Unit] = P( ( CharDataP | ScalaPatterns | ElemPattern ).rep )
+ def ScalaPatterns[$: P] = P( "{" ~ Patterns ~ WL ~ "}" )
+ def CharDataP[$: P] = P( "&" ~ CharData.? | CharData ) // matches weirdness of scalac parser on xml reference.
//================================================================================
// From `scala.xml.parsing.TokenTests`
diff --git a/scalaparse/src/scalaparse/syntax/Basic.scala b/scalaparse/src/scalaparse/syntax/Basic.scala
index b957c9b5..4f9bf37d 100644
--- a/scalaparse/src/scalaparse/syntax/Basic.scala
+++ b/scalaparse/src/scalaparse/syntax/Basic.scala
@@ -8,21 +8,21 @@ import CharPredicates._
import scalaparse.syntax.Identifiers.NamedFunction
object Basic {
- def UnicodeEscape[_: P] = P( "u" ~ HexDigit ~ HexDigit ~ HexDigit ~ HexDigit )
+ def UnicodeEscape[$: P] = P( "u" ~ HexDigit ~ HexDigit ~ HexDigit ~ HexDigit )
//Numbers and digits
- def Digit[_: P] = P( CharIn("0-9") )
+ def Digit[$: P] = P( CharIn("0-9") )
- def HexDigit[_: P] = P( CharIn("0-9a-fA-F") )
- def HexNum[_: P] = P( "0x" ~ CharsWhileIn("0-9a-fA-F") )
- def DecNum[_: P] = P( CharsWhileIn("0-9") )
- def Exp[_: P] = P( CharIn("Ee") ~ CharIn("+\\-").? ~ DecNum )
- def FloatType[_: P] = P( CharIn("fFdD") )
+ def HexDigit[$: P] = P( CharIn("0-9a-fA-F") )
+ def HexNum[$: P] = P( "0x" ~ CharsWhileIn("0-9a-fA-F") )
+ def DecNum[$: P] = P( CharsWhileIn("0-9") )
+ def Exp[$: P] = P( CharIn("Ee") ~ CharIn("+\\-").? ~ DecNum )
+ def FloatType[$: P] = P( CharIn("fFdD") )
- def WSChars[_: P] = P( NoTrace(CharsWhileIn("\u0020\u0009")) )
- def Newline[_: P] = P( NoTrace(StringIn("\r\n", "\n")) )
- def Semi[_: P] = P( ";" | Newline.rep(1) )
- def OpChar[_: P] = P ( CharPred(isOpChar) )
+ def WSChars[$: P] = P( NoTrace(CharsWhileIn("\u0020\u0009")) )
+ def Newline[$: P] = P( NoTrace(StringIn("\r\n", "\n")) )
+ def Semi[$: P] = P( ";" | Newline.rep(1) )
+ def OpChar[$: P] = P ( CharPred(isOpChar) )
val isOpChar = NamedFunction{
case '!' | '#' | '%' | '&' | '*' | '+' | '-' | '/' |
@@ -38,8 +38,8 @@ object Basic {
)
val UpperChar = NamedFunction(isUpper)
- def Lower[_: P] = P( CharPred(LowerChar) )
- def Upper[_: P] = P( CharPred(UpperChar) )
+ def Lower[$: P] = P( CharPred(LowerChar) )
+ def Upper[$: P] = P( CharPred(UpperChar) )
}
/**
* Most keywords don't just require the correct characters to match,
@@ -48,7 +48,7 @@ object Basic {
* (W) and key-operators (O) which have different non-match criteria.
*/
object Key {
- def W[_: P](s: String) = P( s ~ !CharPred(Basic.LetterDigitDollarUnderscore) )(s"`$s`", implicitly)
+ def W[$: P](s: String) = P( s ~ !CharPred(Basic.LetterDigitDollarUnderscore) )(s"`$s`", implicitly)
// If the operator is followed by a comment, stop early so we can parse the comment
- def O[_: P](s: String) = P( s ~ (!Basic.OpChar | &(NoTrace(StringIn("/*", "//")))) )(s"`$s`", implicitly)
+ def O[$: P](s: String) = P( s ~ (!Basic.OpChar | &(NoTrace(StringIn("/*", "//")))) )(s"`$s`", implicitly)
}
diff --git a/scalaparse/src/scalaparse/syntax/Identifiers.scala b/scalaparse/src/scalaparse/syntax/Identifiers.scala
index 9350f4de..af4ad62a 100644
--- a/scalaparse/src/scalaparse/syntax/Identifiers.scala
+++ b/scalaparse/src/scalaparse/syntax/Identifiers.scala
@@ -15,24 +15,24 @@ object Identifiers{
val OpCharNotSlash = NamedFunction(x => isOpChar(x) && x != '/')
val NotBackTick = NamedFunction(_ != '`')
- def Operator[_: P] = P(
+ def Operator[$: P] = P(
!SymbolicKeywords ~ (!StringIn("/*", "//") ~ (CharsWhile(OpCharNotSlash) | "/")).rep(1)
).opaque("operator")
- def VarId[_: P] = P( VarId0(true) ).opaque("var-id")
+ def VarId[$: P] = P( VarId0(true) ).opaque("var-id")
- def VarId0[_: P](dollar: Boolean) = P( !Keywords ~ Lower ~ IdRest(dollar) )
+ def VarId0[$: P](dollar: Boolean) = P( !Keywords ~ Lower ~ IdRest(dollar) )
- def UppercaseId[_: P](dollar: Boolean) = P( !Keywords ~ Upper ~ IdRest(dollar) )
- def PlainId[_: P] = P( UppercaseId(true) | VarId | Operator ~ (!OpChar | &(StringIn("/*", "//"))) )
+ def UppercaseId[$: P](dollar: Boolean) = P( !Keywords ~ Upper ~ IdRest(dollar) )
+ def PlainId[$: P] = P( UppercaseId(true) | VarId | Operator ~ (!OpChar | &(StringIn("/*", "//"))) )
.opaque("plain-id")
- def PlainIdNoDollar[_: P] = P( UppercaseId(false) | VarId0(false) | Operator ).opaque("plain-id")
+ def PlainIdNoDollar[$: P] = P( UppercaseId(false) | VarId0(false) | Operator ).opaque("plain-id")
- def BacktickId[_: P] = P( "`" ~ CharsWhile(NotBackTick) ~ "`" )
- def Id[_: P]: P[Unit] = P( BacktickId | PlainId ).opaque("id")
+ def BacktickId[$: P] = P( "`" ~ CharsWhile(NotBackTick) ~ "`" )
+ def Id[$: P]: P[Unit] = P( BacktickId | PlainId ).opaque("id")
- def IdRest[_: P](allowDollar: Boolean) = {
+ def IdRest[$: P](allowDollar: Boolean) = {
val IdCharacter =
if(allowDollar) NamedFunction(c => c == '$' || isLetter(c) || isDigit(c))
@@ -42,7 +42,7 @@ object Identifiers{
P( IdUnderscoreChunk.rep ~ (CharsWhileIn("_") ~ CharsWhile(isOpChar, 0)).? )
}
- def AlphabetKeywords[_: P] = P {
+ def AlphabetKeywords[$: P] = P {
StringIn("abstract", "case", "catch", "class", "def", "do", "else",
"extends", "false", "finally", "final", "finally", "forSome",
"for", "if", "implicit", "import", "lazy", "match", "new",
@@ -52,9 +52,9 @@ object Identifiers{
!CharPred(Basic.LetterDigitDollarUnderscore)
}.opaque("AlphabetKeywords")
- def SymbolicKeywords[_: P] = P{
+ def SymbolicKeywords[$: P] = P{
StringIn(":", ";", "=>", "=", "<-", "<:", "<%", ">:", "#", "@", "\u21d2", "\u2190") ~ !OpChar
}.opaque("SymbolicKeywords")
- def Keywords[_: P] = P( AlphabetKeywords | SymbolicKeywords )
+ def Keywords[$: P] = P( AlphabetKeywords | SymbolicKeywords )
}
diff --git a/scalaparse/src/scalaparse/syntax/Literals.scala b/scalaparse/src/scalaparse/syntax/Literals.scala
index bebc9579..14e8985f 100644
--- a/scalaparse/src/scalaparse/syntax/Literals.scala
+++ b/scalaparse/src/scalaparse/syntax/Literals.scala
@@ -6,64 +6,64 @@ import NoWhitespace._
import Identifiers._
trait Literals { l =>
- def Block[_: P]: P[Unit]
+ def Block[$: P]: P[Unit]
/**
* Parses all whitespace, excluding newlines. This is only
* really useful in e.g. {} blocks, where we want to avoid
* capturing newlines so semicolon-inference would work
*/
- def WS[_: P]: P[Unit] = P( NoTrace((Basic.WSChars | Literals.Comment).rep) )
+ def WS[$: P]: P[Unit] = P( NoTrace((Basic.WSChars | Literals.Comment).rep) )
/**
* Parses whitespace, including newlines.
* This is the default for most things
*/
- def WL0[_: P]: P[Unit] = P( ScalaWhitespace.whitespace(P.current) )
- def WL[_: P]: P[Unit] = P( NoCut(WL0) )
+ def WL0[$: P]: P[Unit] = P( ScalaWhitespace.whitespace(P.current) )
+ def WL[$: P]: P[Unit] = P( NoCut(WL0) )
- def Semis[_: P]: P[Unit] = P( NoTrace(NoCut(WS) ~ Basic.Semi.rep(1, NoCut(WS)) ~ NoCut(WS)) )
- def Newline[_: P]: P[Unit] = P( WL ~ Basic.Newline )
+ def Semis[$: P]: P[Unit] = P( NoTrace(NoCut(WS) ~ Basic.Semi.rep(1, NoCut(WS)) ~ NoCut(WS)) )
+ def Newline[$: P]: P[Unit] = P( WL ~ Basic.Newline )
- def NotNewline[_: P]: P[Unit] = P( &( WS ~ !Basic.Newline ) )
- def OneNLMax[_: P]: P[Unit] = {
+ def NotNewline[$: P]: P[Unit] = P( &( WS ~ !Basic.Newline ) )
+ def OneNLMax[$: P]: P[Unit] = {
def ConsumeComments = P( (Basic.WSChars.? ~ NoTrace(Literals.Comment) ~ Basic.WSChars.? ~ Basic.Newline).rep )
P( NoCut(NoTrace(WS ~ Basic.Newline.? ~ ConsumeComments ~ NotNewline) ))
}
- def TrailingComma[_: P]: P[Unit] = P( ("," ~ WS ~ Basic.Newline).? )
- def Pattern[_: P]: P[Unit]
+ def TrailingComma[$: P]: P[Unit] = P( ("," ~ WS ~ Basic.Newline).? )
+ def Pattern[$: P]: P[Unit]
object Literals{
import Basic._
- def Float[_: P] = {
+ def Float[$: P] = {
def LeadingDotFloat = P( "." ~ DecNum ~ Exp.? ~ FloatType.? )
def FloatSuffix = P( LeadingDotFloat | Exp ~ FloatType.? | Exp.? ~ FloatType )
P( LeadingDotFloat | DecNum ~ FloatSuffix )
}
- def Int[_: P] = P( (HexNum | DecNum) ~ ("L" | "l").? )
+ def Int[$: P] = P( (HexNum | DecNum) ~ ("L" | "l").? )
- def Bool[_: P] = P( Key.W("true") | Key.W("false") )
+ def Bool[$: P] = P( Key.W("true") | Key.W("false") )
// Comments cannot have cuts in them, because they appear before every
// terminal node. That means that a comment before any terminal will
// prevent any backtracking from working, which is not what we want!
- def CommentChunk[_: P] = P( CharsWhile(c => c != '/' && c != '*') | MultilineComment | !"*/" ~ AnyChar )
- def MultilineComment[_: P]: P[Unit] = P( "/*" ~/ CommentChunk.rep ~ "*/" )
- def SameLineCharChunks[_: P] = P( CharsWhile(c => c != '\n' && c != '\r') | !Basic.Newline ~ AnyChar )
- def LineComment[_: P] = P( "//" ~ SameLineCharChunks.rep ~ &(Basic.Newline | End) )
- def Comment[_: P]: P[Unit] = P( MultilineComment | LineComment )
+ def CommentChunk[$: P] = P( CharsWhile(c => c != '/' && c != '*') | MultilineComment | !"*/" ~ AnyChar )
+ def MultilineComment[$: P]: P[Unit] = P( "/*" ~/ CommentChunk.rep ~ "*/" )
+ def SameLineCharChunks[$: P] = P( CharsWhile(c => c != '\n' && c != '\r') | !Basic.Newline ~ AnyChar )
+ def LineComment[$: P] = P( "//" ~ SameLineCharChunks.rep ~ &(Basic.Newline | End) )
+ def Comment[$: P]: P[Unit] = P( MultilineComment | LineComment )
- def Null[_: P] = Key.W("null")
+ def Null[$: P] = Key.W("null")
- def OctalEscape[_: P] = P( Digit ~ Digit.? ~ Digit.? )
- def Escape[_: P] = P( "\\" ~/ (CharIn("""btnfr'\\"]""") | OctalEscape | UnicodeEscape ) )
+ def OctalEscape[$: P] = P( Digit ~ Digit.? ~ Digit.? )
+ def Escape[$: P] = P( "\\" ~/ (CharIn("""btnfr'\\"]""") | OctalEscape | UnicodeEscape ) )
// Note that symbols can take on the same values as keywords!
- def Symbol[_: P] = P( Identifiers.PlainId | Identifiers.Keywords )
+ def Symbol[$: P] = P( Identifiers.PlainId | Identifiers.Keywords )
- def Char[_: P] = {
+ def Char[$: P] = {
// scalac 2.10 crashes if PrintableChar below is substituted by its body
def PrintableChar = CharPred(CharPredicates.isPrintableChar)
@@ -71,29 +71,29 @@ trait Literals { l =>
}
class InterpCtx(interp: Option[() => P[Unit]]) {
- def Literal[_: P] = P( ("-".? ~ (Float | Int)) | Bool | String | "'" ~/ (Char | Symbol) | Null )
- def Interp[_: P] = interp match{
+ def Literal[$: P] = P( ("-".? ~ (Float | Int)) | Bool | String | "'" ~/ (Char | Symbol) | Null )
+ def Interp[$: P] = interp match{
case None => P ( Fail )
case Some(p) => P( "$" ~ Identifiers.PlainIdNoDollar | ("${" ~ p() ~ WL ~ "}") | "$$" )
}
- def TQ[_: P] = P( "\"\"\"" )
+ def TQ[$: P] = P( "\"\"\"" )
/**
* Helper to quickly gobble up large chunks of un-interesting
* characters. We break out conservatively, even if we don't know
* it's a "real" escape sequence: worst come to worst it turns out
* to be a dud and we go back into a CharsChunk next rep
*/
- def StringChars[_: P] = P( CharsWhile(c => c != '\n' && c != '"' && c != '\\' && c != '$') )
- def NonTripleQuoteChar[_: P] = P( "\"" ~ "\"".? ~ !"\"" | CharIn("\\\\$\n") )
- def TripleChars[_: P] = P( (StringChars | Interp | NonTripleQuoteChar).rep )
- def TripleTail[_: P] = P( TQ ~ "\"".rep )
- def SingleChars[_: P](allowSlash: Boolean) = {
+ def StringChars[$: P] = P( CharsWhile(c => c != '\n' && c != '"' && c != '\\' && c != '$') )
+ def NonTripleQuoteChar[$: P] = P( "\"" ~ "\"".? ~ !"\"" | CharIn("\\\\$\n") )
+ def TripleChars[$: P] = P( (StringChars | Interp | NonTripleQuoteChar).rep )
+ def TripleTail[$: P] = P( TQ ~ "\"".rep )
+ def SingleChars[$: P](allowSlash: Boolean) = {
def LiteralSlash = P( if(allowSlash) "\\" else Fail )
def NonStringEnd = P( !CharIn("\n\"") ~ AnyChar )
P( (StringChars | Interp | LiteralSlash | Escape | NonStringEnd ).rep )
}
- def String[_: P] = {
+ def String[$: P] = {
P {
Id.filter(_ => interp.isDefined) ~ (
TQ ~/ TripleChars ~ TripleTail |
@@ -105,9 +105,9 @@ trait Literals { l =>
}
}
- def NoInterp[_: P] = new InterpCtx(None)
- def Pat[_: P] = new InterpCtx(Some(() => l.Pattern))
- def Expr[_: P] = new InterpCtx(Some(() => Block))
+ def NoInterp[$: P] = new InterpCtx(None)
+ def Pat[$: P] = new InterpCtx(Some(() => l.Pattern))
+ def Expr[$: P] = new InterpCtx(Some(() => Block))
}
}
diff --git a/scalaparse/test/src-jvm/scalaparse/ProjectTests.scala b/scalaparse/test/src-2-jvm/scalaparse/ProjectTests.scala
similarity index 97%
rename from scalaparse/test/src-jvm/scalaparse/ProjectTests.scala
rename to scalaparse/test/src-2-jvm/scalaparse/ProjectTests.scala
index 35315be9..75ab48ef 100644
--- a/scalaparse/test/src-jvm/scalaparse/ProjectTests.scala
+++ b/scalaparse/test/src-2-jvm/scalaparse/ProjectTests.scala
@@ -1,13 +1,11 @@
package scalaparse
import java.io.File
-import java.nio.file.{Files, Path, Paths}
-
-
-
+import java.nio.file.{Files, Paths}
import concurrent.ExecutionContext.Implicits.global
import utest._
+import scala.annotation.nowarn
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}
@@ -16,7 +14,8 @@ object ProjectTests extends TestSuite{
println("running")
def tests = this{
- def checkDir(path: String, filter: String => Boolean = _ => true) = {
+ @nowarn
+ def checkDir(path: String, filter: String => Boolean = _ => true): Unit = {
println("Checking Dir " + path)
def listFiles(s: File): Seq[String] = {
val (dirs, files) = Option(s.listFiles).getOrElse(Array[File]()).partition(_.isDirectory)
@@ -53,7 +52,6 @@ object ProjectTests extends TestSuite{
if (scala.util.Properties.javaVersion.startsWith("1.8")) {
val repo = "https://github.com/" + testPath.value.last
- import sys.process._
val name = repo.split("/").last
println("CLONING?")
val path = Paths.get("out", "repos", name)
@@ -117,7 +115,6 @@ object ProjectTests extends TestSuite{
"mesosphere/marathon" - checkRepo("6d5c5b53ddd63aa2ee2a4e504d1dfd4159dde31c")
"scalatra/scalatra" - checkRepo("d997d15cc791a1043d60969c27d468ea1a2ea1c3")
"slick/slick" - checkRepo("dcd5bcfa1f52192339c6eb9b264673b1eb893560")
- "ensime/ensime-server" - checkRepo("3db871f41b30572225cdce5a33ffa779721f915b")
"GravityLabs/goose" - checkRepo("462f04a0b3d79508266770fd2462b1d4b43f6c54")
"ornicar/lila" - checkRepo(
"3f093d56e5000560a5db83e9d26db8e5143f2a80",
diff --git a/scalaparse/test/src-jvm/scalaparse/TestMain.scala b/scalaparse/test/src-2-jvm/scalaparse/TestMain.scala
similarity index 100%
rename from scalaparse/test/src-jvm/scalaparse/TestMain.scala
rename to scalaparse/test/src-2-jvm/scalaparse/TestMain.scala
diff --git a/scalaparse/test/src-jvm/scalaparse/ScalacParser.scala b/scalaparse/test/src-2.12-jvm/scalaparse/ScalacParser.scala
similarity index 98%
rename from scalaparse/test/src-jvm/scalaparse/ScalacParser.scala
rename to scalaparse/test/src-2.12-jvm/scalaparse/ScalacParser.scala
index 404b8d24..4d43c488 100644
--- a/scalaparse/test/src-jvm/scalaparse/ScalacParser.scala
+++ b/scalaparse/test/src-2.12-jvm/scalaparse/ScalacParser.scala
@@ -27,7 +27,7 @@ object ScalacParser{
val global = new Global(settings)
def checkParseFails(input: String) = this.synchronized{
- val run = new global.Run()
+ new global.Run()
var fail = false
import global.syntaxAnalyzer.Offset
val cu = new global.CompilationUnit(global.newSourceFile(input))
diff --git a/scalaparse/test/src-2.13-jvm/scalaparse/ScalacParser.scala b/scalaparse/test/src-2.13-jvm/scalaparse/ScalacParser.scala
new file mode 100644
index 00000000..cd3c3eb8
--- /dev/null
+++ b/scalaparse/test/src-2.13-jvm/scalaparse/ScalacParser.scala
@@ -0,0 +1,61 @@
+package scalaparse
+
+import scala.reflect.internal.util.CodeAction
+import scala.tools.nsc.{Global, Settings}
+
+object ScalacParser{
+ var current = Thread.currentThread().getContextClassLoader
+ val files = collection.mutable.Buffer.empty[java.io.File]
+ files.appendAll(
+ System.getProperty("sun.boot.class.path")
+ .split(":")
+ .map(new java.io.File(_))
+ )
+ while(current != null){
+ current match{
+ case t: java.net.URLClassLoader =>
+ files.appendAll(t.getURLs.map(u => new java.io.File(u.toURI)))
+ case _ =>
+ }
+ current = current.getParent
+ }
+
+ val settings = new Settings()
+ settings.usejavacp.value = true
+ settings.embeddedDefaults[ScalacParser.type]
+ settings.classpath.append(files.mkString(":"))
+
+ val global = new Global(settings)
+
+ def checkParseFails(input: String) = this.synchronized{
+ new global.Run()
+ var fail = false
+ import global.syntaxAnalyzer.Offset
+ val cu = new global.CompilationUnit(global.newSourceFile(input))
+ val parser = new global.syntaxAnalyzer.UnitParser(cu, Nil){
+ override def newScanner() = new global.syntaxAnalyzer.UnitScanner(cu, Nil){
+ override def error(off: Offset, msg: String) = {
+ fail = true
+ }
+ override def syntaxError(off: Offset, msg: String) = {
+ fail = true
+ }
+ override def incompleteInputError(off: Offset, msg: String) = {
+ fail = true
+ }
+ }
+
+ override def incompleteInputError(msg: String, actions: List[CodeAction]): Unit = {
+ fail = true
+ super.incompleteInputError(msg, actions)
+ }
+
+ override def syntaxError(offset: global.syntaxAnalyzer.Offset, msg: String, actions: List[CodeAction]): Unit = {
+ fail = true
+ super.syntaxError(offset, msg, actions)
+ }
+ }
+ parser.parse()
+ fail
+ }
+}
diff --git a/scalaparse/test/src/scalaparse/TestUtil.scala b/scalaparse/test/src/scalaparse/TestUtil.scala
index 7fdc7349..26ec8dfa 100644
--- a/scalaparse/test/src/scalaparse/TestUtil.scala
+++ b/scalaparse/test/src/scalaparse/TestUtil.scala
@@ -68,7 +68,7 @@ object TestUtil {
def check[T](input: String, tag: String = "", skipIterator: Boolean = false) = {
println("Checking...\n" )
- println(input)
+// println(input)
check0(input, input.length, tag)
if (!skipIterator) {
for(chunkSize <- Seq(1, 5, 18, 67, 260, 1029)) {
diff --git a/scalaparse/test/src/scalaparse/unit/FailureTests.scala b/scalaparse/test/src/scalaparse/unit/FailureTests.scala
index d8e78ba7..e72720c7 100644
--- a/scalaparse/test/src/scalaparse/unit/FailureTests.scala
+++ b/scalaparse/test/src/scalaparse/unit/FailureTests.scala
@@ -9,7 +9,7 @@ object FailureTests extends TestSuite{
test - checkNeg(
"package package",
aggregate = """(QualId | PkgBlock | PkgObj)""",
- terminals = """("`" | var-id | chars-while(OpCharNotSlash, 1) | "/" | operator | plain-id | id | "case" | "object")""",
+ terminals = """(id | "case" | "object")""",
found = "package"
)
@@ -18,8 +18,8 @@ object FailureTests extends TestSuite{
|import a
|import import
""".stripMargin,
- aggregate = """(Semis ~ `package` | Semis ~ TopStat | ThisPath | IdPath)""",
- terminals = """("this" | "super" | "`" | var-id | chars-while(OpCharNotSlash, 1) | "/" | operator | plain-id | id)""",
+ aggregate = """(ThisPath | IdPath)""",
+ terminals = """("this" | "super" | id)""",
found = "import"
)
@@ -34,7 +34,7 @@ object FailureTests extends TestSuite{
|}
""".stripMargin,
aggregate = """(Id | Generator | Assign)""",
- terminals = """("`" | char-pred(UpperChar) | char-pred(LowerChar) | var-id | chars-while(OpCharNotSlash, 1) | "/" | operator | plain-id | id | "<-" | "←" | "=")""",
+ terminals = """ (id | "<-" | "=")""",
found = "} yield x"
)
test - checkNeg(
@@ -43,7 +43,7 @@ object FailureTests extends TestSuite{
|}
""".stripMargin,
aggregate = """(NamedType | Refinement)""",
- terminals = """(chars-while(IdCharacter, 1) | [_] | [ \t] | "/*" | "//" | "(" | "-" | "." | [0-9] | "0x" | "true" | "false" | "`" | char-pred(UpperChar) | char-pred(LowerChar) | var-id | chars-while(OpCharNotSlash, 1) | "/" | operator | plain-id | id | filter | "\"\"\"" | "\"" | "'" | "null" | "this" | "super" | "_" | "{")""",
+ terminals = """([ \t] | "/*" | "//" | "\n" | "\r\n" | "(" | "-" | "." | [0-9] | "0x" | "true" | "false" | id | filter | "\"\"\"" | "\"" | "'" | "null" | "this" | "super" | "_" | "{")""",
found = ")"
)
test - checkNeg(
@@ -70,7 +70,7 @@ object FailureTests extends TestSuite{
| }
|}
""".stripMargin,
- aggregate = """(FunArgs | `:` | Body | Semis | "}")""",
+ aggregate = """(FunArgs | `:` | Body | "}")""",
terminals = null,
found = "](input: S"
)
@@ -93,7 +93,7 @@ object FailureTests extends TestSuite{
| }
|}
""".stripMargin,
- aggregate = """("=>" | `:` | "." | TypeArgs | ArgList | `_` | Id | `=` | MatchAscriptionSuffix | Semis | "}")""",
+ aggregate = """("=>" | `:` | "." | TypeArgs | ArgList | `_` | Id | `=` | MatchAscriptionSuffix | "}")""",
terminals = null,
found ="1\n"
)
@@ -117,7 +117,7 @@ object FailureTests extends TestSuite{
| filename.asInstanceOf 10
|}
""".stripMargin,
- aggregate = """("." | TypeArgs | ArgList | `_` | Id | "=>" | `=` | MatchAscriptionSuffix | Semis | "}")""",
+ aggregate = """("." | TypeArgs | ArgList | `_` | Id | "=>" | `=` | MatchAscriptionSuffix | "}")""",
terminals = null,
found = "10"
)
@@ -179,7 +179,7 @@ object FailureTests extends TestSuite{
|import org.parboiled2 _
|
""".stripMargin,
- aggregate = """(Semis ~ `package` | "." | "," | end-of-input)""",
+ aggregate = """("." | "," | end-of-input)""",
terminals = null,
found = "_"
)
@@ -220,7 +220,7 @@ object FailureTests extends TestSuite{
|}
|
""".stripMargin,
- aggregate = """(WL ~ "." | WL ~ TypeArgs | NotNewline ~ ArgList | `_` | InfixSuffix | PostFix | "=>" | `=` | MatchAscriptionSuffix | Semis | "}")""",
+ aggregate = """("." | TypeArgs | ArgList | `_` | InfixSuffix | PostFix | "=>" | `=` | MatchAscriptionSuffix | "}")""",
terminals = null,
found = ")"
)
@@ -251,7 +251,7 @@ object FailureTests extends TestSuite{
| d = 1
|
""".stripMargin,
- aggregate = """("." | TypeArgs | "=>" | `=` | MatchAscriptionSuffix | Semis | "}")""",
+ aggregate = """("." | TypeArgs | "=>" | `=` | MatchAscriptionSuffix | "}")""",
terminals = null,
found = ""
)
@@ -361,7 +361,7 @@ object FailureTests extends TestSuite{
| a =:= .c
|}
""".stripMargin,
- aggregate = """(TypeArgs | PrefixExpr | Newline | "=>" | `=` | MatchAscriptionSuffix | Semis | "}")""",
+ aggregate = """(TypeArgs | PrefixExpr | "=>" | `=` | MatchAscriptionSuffix | "}")""",
terminals = null,
found = ".c"
)
@@ -373,7 +373,7 @@ object FailureTests extends TestSuite{
| )
|}
""".stripMargin,
- aggregate = """(_* | AscriptionType | Annot.rep(1))""",
+ aggregate = """(Underscore* | AscriptionType | Annot.rep(1))""",
terminals = null,
found = ")\n}"
)
@@ -416,7 +416,7 @@ object FailureTests extends TestSuite{
| val trueA = 1
|}
""".stripMargin,
- aggregate = """(DefTmpl | Semis ~ TopStat | end-of-input)""",
+ aggregate = """(DefTmpl | TopStat | end-of-input)""",
terminals = null,
found = "val trueA"
)
@@ -426,7 +426,7 @@ object FailureTests extends TestSuite{
| val null null cow = 1
|}
""".stripMargin,
- aggregate = """(Id | "," | `:` | `=` | Semis | "}")""",
+ aggregate = """(Id | "," | `:` | `=` | "}")""",
terminals = null,
found = "null cow"
)
@@ -436,7 +436,7 @@ object FailureTests extends TestSuite{
| val omg_+_+ = 1
|}
""".stripMargin,
- aggregate = """(`@` | TQ | "\"" | "." | TypeArgs | TupleEx | Id | "," | `:` | `=` | Semis | "}")""",
+ aggregate = """(`@` | TQ | "\"" | "." | TypeArgs | TupleEx | Id | "," | `:` | `=` | "}")""",
terminals = null,
found = "_+ = 1"
)
@@ -447,7 +447,7 @@ object FailureTests extends TestSuite{
| var = 2
|}
""".stripMargin,
- aggregate = """(Semis ~ TmplStat | Binding | InfixPattern | VarId)""",
+ aggregate = """(Binding | InfixPattern | VarId)""",
terminals = null,
found = "= 2"
)
@@ -480,7 +480,7 @@ object FailureTests extends TestSuite{
| a!.b
|}
""".stripMargin,
- aggregate = """(TypeArgs | PrefixExpr | Newline | "=>" | `=` | MatchAscriptionSuffix | Semis | "}")""",
+ aggregate = """(TypeArgs | PrefixExpr | "=>" | `=` | MatchAscriptionSuffix | "}")""",
terminals = null,
found = ".b"
)
@@ -519,7 +519,7 @@ object FailureTests extends TestSuite{
|package omg
|;
""".stripMargin,
- aggregate = """(Semis ~ TopStat | "{")""",
+ aggregate = """("." | "{")""",
terminals = null,
found = ";"
)
@@ -529,7 +529,7 @@ object FailureTests extends TestSuite{
| { a: L = }
|}
""".stripMargin,
- aggregate = """("." | TypeArgs | `#` | Annot | `with` | { | `*` | Id | "=>" | BlockLambda | BlockStat | Semis | "}")""",
+ aggregate = """("." | TypeArgs | `#` | Annot | `with` | { | `*` | Id | "=>" | BlockLambda | BlockStat | "}")""",
terminals = null,
found = "= }"
)
@@ -549,7 +549,7 @@ object FailureTests extends TestSuite{
|}
|
""".stripMargin,
- aggregate = """(PostDotCheck | id)""",
+ aggregate = """id""",
terminals = null,
found = "this"
)
@@ -645,7 +645,7 @@ object FailureTests extends TestSuite{
|}
|
""".stripMargin,
- aggregate = """(TypeArgs | `#` | NLAnnot | `with` | Refinement | `*` | Id | "=>" | ExistentialClause | `>:` | `<:` | "," ~ Type | "," ~ WS ~ Newline | "]")""",
+ aggregate = """(TypeArgs | `#` | NLAnnot | `with` | Refinement | `*` | Id | "=>" | ExistentialClause | `>:` | `<:` | "," ~ Type | "," | "]")""",
terminals = null,
found = ", ]"
)
@@ -669,7 +669,7 @@ object FailureTests extends TestSuite{
| }
|}
""".stripMargin,
- aggregate = """(BlockLambda | BlockStat | Semis | "}")""",
+ aggregate = """(BlockLambda | BlockStat | "}")""",
terminals = null,
found = "case for"
)
@@ -691,7 +691,7 @@ object FailureTests extends TestSuite{
|}
|
""".stripMargin,
- aggregate = """(StringChars | Interp | LiteralSlash | Escape | NonStringEnd | "\"")""",
+ aggregate = """(StringChars | Interp | LiteralSlash | Escape | "\"")""",
terminals = null,
found = "\n"
)
@@ -759,7 +759,7 @@ object FailureTests extends TestSuite{
| val (x,) = 1
|}
""".stripMargin,
- aggregate = """(`:` | `@` | TQ | "\"" | "." | TypeArgs | TupleEx | Id | "|" | "," ~ Pattern | "," ~ WS ~ Newline | ")")""",
+ aggregate = """(`:` | `@` | TQ | "\"" | "." | TypeArgs | TupleEx | Id | "|" | "," ~ Pattern | "," | ")")""",
terminals = null,
found = ",)"
)
@@ -799,7 +799,7 @@ object FailureTests extends TestSuite{
s"""
|object X{def f(x: Int, ) = 1}
""".stripMargin,
- aggregate = """("." | TypeArgs | `#` | NLAnnot | `with` | Refinement | `*` | Id | "=>" | ExistentialClause | `>:` | `<:` | `=` | "," ~ FunArg | "," ~ WS ~ Newline | ")")""",
+ aggregate = """("." | TypeArgs | `#` | NLAnnot | `with` | Refinement | `*` | Id | "=>" | ExistentialClause | `>:` | `<:` | `=` | "," ~ FunArg | "," | ")")""",
terminals = null,
found = ", )"
)
@@ -807,7 +807,7 @@ object FailureTests extends TestSuite{
s"""
|object X{(2,)}
""".stripMargin,
- aggregate = """(FloatSuffix | "L" | "l" | WL ~ "." | WL ~ TypeArgs | Pass ~ ArgList | `_` | InfixSuffix | PostFix | "=>" | `=` | MatchAscriptionSuffix | "," ~ Expr | "," ~ WS ~ Newline | ")")""",
+ aggregate = """(FloatSuffix | "L" | "l" | "." | TypeArgs | ArgList | `_` | InfixSuffix | PostFix | "=>" | `=` | MatchAscriptionSuffix | "," ~ Expr | "," | ")")""",
terminals = null,
found = ",)"
)
@@ -815,7 +815,7 @@ object FailureTests extends TestSuite{
s"""
|object X{f[A,]}
""".stripMargin,
- aggregate = """("." | TypeArgs | `#` | NLAnnot | `with` | Refinement | `*` | Id | "=>" | ExistentialClause | `>:` | `<:` | "," ~ Type | "," ~ WS ~ Newline | "]")""",
+ aggregate = """("." | TypeArgs | `#` | NLAnnot | `with` | Refinement | `*` | Id | "=>" | ExistentialClause | `>:` | `<:` | "," ~ Type | "," | "]")""",
terminals = null,
found = ",]"
)
@@ -831,7 +831,7 @@ object FailureTests extends TestSuite{
s"""
|object X{def f[T, B,] = 1}
""".stripMargin,
- aggregate = """(TypeArgList | `>:` | `<:` | `<%` | `:` | "," ~ Annot.rep ~ TypeArg | "," ~ WS ~ Newline | "]")""",
+ aggregate = """(TypeArgList | `>:` | `<:` | `<%` | `:` | "," ~ Annot.rep ~ TypeArg | "," | "]")""",
terminals = null,
found = ",]"
)
@@ -943,13 +943,13 @@ object FailureTests extends TestSuite{
| for(i <- Nil if x: Int => bar) 1
|}
""".stripMargin,
- aggregate = """(TQ | "\"" | "." | WL ~ "." | WL ~ TypeArgs | Pass ~ ArgList | `_` | InfixSuffix | PostFix | Enumerator | ")")""",
+ aggregate = """(TQ | "\"" | "." | TypeArgs | ArgList | `_` | InfixSuffix | PostFix | Enumerator | ")")""",
terminals = null,
found = ": Int"
)
test - checkNeg(
s"""object Foo{; x: Int => x}""",
- aggregate = """("." | TypeArgs | `#` | Annot | `with` | { | `*` | Id | Semis | "}")""",
+ aggregate = """("." | TypeArgs | `#` | Annot | `with` | { | `*` | Id | "}")""",
terminals = null,
found = "=> x"
)
@@ -978,7 +978,16 @@ object FailureTests extends TestSuite{
| val x = 1
| ;
| """.stripMargin,
- aggregate = """(BlockLambda | BlockStat | Semis | "}")""",
+ aggregate = """(BlockLambda | BlockStat | "}")""",
+ terminals = null,
+ found = ""
+ )
+
+ test - checkNeg(
+ """object foo
+ |/*
+ | """.stripMargin,
+ aggregate = """"*/"""",
terminals = null,
found = ""
)
diff --git a/scalaparse/test/src/scalaparse/unit/SuccessTests.scala b/scalaparse/test/src/scalaparse/unit/SuccessTests.scala
index 6ad76ab6..c823f0c8 100644
--- a/scalaparse/test/src/scalaparse/unit/SuccessTests.scala
+++ b/scalaparse/test/src/scalaparse/unit/SuccessTests.scala
@@ -1,7 +1,6 @@
package scalaparse.unit
-import scalaparse.{TestUtil, Scala}
-import fastparse._
+import scalaparse.TestUtil
import utest._
import TestUtil._
object SuccessTests extends TestSuite{
@@ -1305,7 +1304,6 @@ object SuccessTests extends TestSuite{
val d1 = "dca5"
val d2 = "20ac"
val d3 = "d83c"
- val d4 = "dca5"
test - check(
s"""object U{
| Seq(