Merge pull request #6746 from eed3si9n/wip/sbt-2.x

sbt 2.x (sbt in Scala 3)
This commit is contained in:
eugene yokota 2023-10-21 20:40:20 -04:00 committed by GitHub
commit 11cc8b5020
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
938 changed files with 13587 additions and 12360 deletions

View File

@ -17,15 +17,15 @@ jobs:
distribution: temurin
jobtype: 1
- os: ubuntu-latest
java: 17
java: 11
distribution: temurin
jobtype: 2
- os: ubuntu-latest
java: 17
java: 11
distribution: temurin
jobtype: 3
- os: ubuntu-latest
java: 17
java: 11
distribution: temurin
jobtype: 4
- os: ubuntu-latest
@ -44,7 +44,7 @@ jobs:
java: 8
distribution: adopt
jobtype: 8
- os: windows-latest
- os: windows-2019
java: 8
distribution: adopt
jobtype: 9
@ -93,14 +93,20 @@ jobs:
python-version: 3.7
- name: Coursier cache
uses: coursier/cache-action@v6
- name: Cache sbt
uses: actions/cache@v3
with:
path: ~/.sbt
key: ${{ runner.os }}-sbt-cache-${{ hashFiles('**/*.sbt') }}-${{ hashFiles('project/build.properties') }}
# - name: Cache sbt
# uses: actions/cache@v3
# with:
# path: ~/.sbt
# key: ${{ runner.os }}-sbt-cache-${{ hashFiles('**/*.sbt') }}-${{ hashFiles('project/build.properties') }}
- name: Setup Windows C++ toolchain
uses: ilammy/msvc-dev-cmd@v1
if: ${{ matrix.os == 'windows-latest' }}
if: ${{ matrix.os == 'windows-2019' }}
- name: Pre-test cleanup
shell: bash
run: |
rm -rf "$HOME/.sbt/scripted/" || true
rm -rf "$HOME/.ivy2/local" || true
rm -r $(find $HOME/.sbt/boot -name "*-SNAPSHOT") || true
- name: Build and test (1)
if: ${{ matrix.jobtype == 1 }}
shell: bash
@ -112,48 +118,54 @@ jobs:
./sbt -v --client scalafmtCheckAll
./sbt -v --client scalafmtSbtCheck
./sbt -v --client serverTestProj/scalafmtCheckAll
./sbt -v --client headerCheck
./sbt -v --client "Test/headerCheck"
# ./sbt -v --client headerCheck
# ./sbt -v --client "Test/headerCheck"
./sbt -v --client "Test/compile"
./sbt -v --client publishLocal
./sbt -v --client test
./sbt -v --client "serverTestProj/test"
./sbt -v --client doc
./sbt -v --client "all $UTIL_TESTS"
./sbt -v --client ++$SCALA_213
# ./sbt -v --client "serverTestProj/test"
# ./sbt -v --client doc
./sbt -v --client "all $UTIL_TESTS"
# ./sbt -v --client ++$SCALA_213
# ./sbt -v --client "all $UTIL_TESTS"
- name: Build and test (2)
if: ${{ matrix.jobtype == 2 }}
shell: bash
run: |
./sbt -v "scripted actions/* apiinfo/* compiler-project/* ivy-deps-management/* reporter/* tests/* watch/* classloader-cache/* package/*"
./sbt -v "scripted actions/* apiinfo/* compiler-project/* ivy-deps-management/* reporter/* tests/* classloader-cache/* package/*"
# ./sbt -v "scripted watch/*"
- name: Build and test (3)
if: ${{ matrix.jobtype == 3 }}
shell: bash
run: |
./sbt -v "dependencyTreeProj/publishLocal; scripted dependency-graph/* dependency-management/* plugins/* project-load/* java/* run/* nio/*"
# ./sbt -v "dependencyTreeProj/publishLocal; scripted dependency-graph/*"
./sbt -v --client "scripted dependency-management/* project-load/* java/* run/*"
# ./sbt -v --client "scripted plugins/*"
# ./sbt -v --client "scripted nio/*"
- name: Build and test (4)
if: ${{ matrix.jobtype == 4 }}
shell: bash
run: |
./sbt -v "repoOverrideTest:scripted dependency-management/*; scripted source-dependencies/* project/*"
- name: Build and test (5)
if: ${{ matrix.jobtype == 5 }}
shell: bash
run: |
./sbt -v "++$SCALA_213!; test; ++$SCALA_3!; all utilControl/test utilRelation/test utilPosition/test"
- name: Build and test (6)
if: ${{ matrix.jobtype == 6 }}
shell: bash
run: |
# build from fresh IO, LM, and Zinc
BUILD_VERSION="1.5.0-SNAPSHOT"
cd io
sbt -v -Dsbt.build.version=${BUILD_VERSION} +publishLocal
cd ../
sbt -Dsbtlm.path=$HOME/work/sbt/sbt/librarymanagement -Dsbtzinc.path=$HOME/work/sbt/sbt/zinc -Dsbt.build.version=$BUILD_VERSION -Dsbt.build.fatal=false "+lowerUtils/publishLocal; {librarymanagement}/publishLocal; {zinc}/publishLocal; upperModules/publishLocal"
rm -r $(find $HOME/.sbt/boot -name "*-SNAPSHOT") || true
sbt -v -Dsbt.version=$BUILD_VERSION "++$SCALA_213; all $UTIL_TESTS; ++$SCALA_212; all $UTIL_TESTS; scripted actions/* source-dependencies/*1of3 dependency-management/*1of4 java/*"
# ./sbt -v "repoOverrideTest:scripted dependency-management/*"
./sbt -v "scripted source-dependencies/*"
# ./sbt -v "scripted project/*"
# - name: Build and test (5)
# if: ${{ matrix.jobtype == 5 }}
# shell: bash
# run: |
# ./sbt -v "++$SCALA_213!; test; ++$SCALA_3!; all utilControl/test utilRelation/test utilPosition/test"
# - name: Build and test (6)
# if: ${{ matrix.jobtype == 6 }}
# shell: bash
# run: |
# # build from fresh IO, LM, and Zinc
# BUILD_VERSION="1.5.0-SNAPSHOT"
# cd io
# sbt -v -Dsbt.build.version=${BUILD_VERSION} +publishLocal
# cd ../
# sbt -Dsbtlm.path=$HOME/work/sbt/sbt/librarymanagement -Dsbtzinc.path=$HOME/work/sbt/sbt/zinc -Dsbt.build.version=$BUILD_VERSION -Dsbt.build.fatal=false "+lowerUtils/publishLocal; {librarymanagement}/publishLocal; {zinc}/publishLocal; upperModules/publishLocal"
# rm -r $(find $HOME/.sbt/boot -name "*-SNAPSHOT") || true
# sbt -v -Dsbt.version=$BUILD_VERSION "++$SCALA_213; all $UTIL_TESTS; ++$SCALA_212; all $UTIL_TESTS; scripted actions/* source-dependencies/*1of3 dependency-management/*1of4 java/*"
- name: Build and test (7)
if: ${{ matrix.jobtype == 7 }}
shell: bash

1
.gitignore vendored
View File

@ -5,6 +5,7 @@ node_modules
vscode-sbt-scala/client/server
npm-debug.log
*.vsix
*_pid*.log
!sbt/src/server-test/completions/target
.big
.idea

View File

@ -1,5 +1,6 @@
version = 2.3.2
edition = 2019-10
version = 3.6.0
runner.dialect = scala3
maxColumn = 100
project.git = true
project.excludeFilters = [ "\\Wsbt-test\\W", "\\Winput_sources\\W", "\\Wcontraband-scala\\W" ]
@ -7,7 +8,8 @@ lineEndings = preserve
# https://docs.scala-lang.org/style/scaladoc.html recommends the JavaDoc style.
# scala/scala is written that way too https://github.com/scala/scala/blob/v2.12.2/src/library/scala/Predef.scala
docstrings = JavaDoc
docstrings.style = Asterisk
docstrings.wrap = false
# This also seems more idiomatic to include whitespace in import x.{ yyy }
spaces.inImportCurlyBraces = true
@ -18,7 +20,7 @@ align.openParenCallSite = false
align.openParenDefnSite = false
# For better code clarity
danglingParentheses = true
danglingParentheses.preset = true
trailingCommas = preserve

271
build.sbt
View File

@ -10,14 +10,14 @@ import scala.util.Try
// ThisBuild settings take lower precedence,
// but can be shared across the multi projects.
ThisBuild / version := {
val v = "1.8.1-SNAPSHOT"
val v = "2.0.0-alpha6-SNAPSHOT"
nightlyVersion.getOrElse(v)
}
ThisBuild / version2_13 := "2.0.0-SNAPSHOT"
ThisBuild / version2_13 := "2.0.0-alpha1-SNAPSHOT"
ThisBuild / versionScheme := Some("early-semver")
ThisBuild / scalafmtOnCompile := !(Global / insideCI).value
ThisBuild / Test / scalafmtOnCompile := !(Global / insideCI).value
ThisBuild / turbo := true
// ThisBuild / turbo := true
ThisBuild / usePipelining := false // !(Global / insideCI).value
ThisBuild / organization := "org.scala-sbt"
ThisBuild / description := "sbt is an interactive build tool"
@ -53,6 +53,7 @@ Global / excludeLint := (Global / excludeLint).?.value.getOrElse(Set.empty)
Global / excludeLint += componentID
Global / excludeLint += scriptedBufferLog
Global / excludeLint += checkPluginCross
ThisBuild / evictionErrorLevel := Level.Info
def commonBaseSettings: Seq[Setting[_]] = Def.settings(
headerLicense := Some(
@ -180,8 +181,7 @@ def mimaSettingsSince(versions: Seq[String]): Seq[Def.Setting[_]] = Def settings
val scriptedSbtReduxMimaSettings = Def.settings(mimaPreviousArtifacts := Set())
lazy val sbtRoot: Project = (project in file("."))
// .enablePlugins(ScriptedPlugin)
.aggregate(nonRoots: _*)
.aggregate(allProjects.map(p => LocalProject(p.id)): _*)
.settings(
minimalSettings,
onLoadMessage := {
@ -256,49 +256,20 @@ lazy val bundledLauncherProj =
/* ** subproject declarations ** */
val collectionProj = (project in file("internal") / "util-collection")
val collectionProj = (project in file("util-collection"))
.dependsOn(utilPosition)
.settings(
name := "Collections",
testedBaseSettings,
utilCommonSettings,
Util.keywordsSettings,
name := "Collections",
libraryDependencies ++= Seq(sjsonNewScalaJson.value),
libraryDependencies ++= (CrossVersion.partialVersion(scalaVersion.value) match {
case Some((2, major)) if major <= 12 => Seq()
case _ => Seq("org.scala-lang.modules" %% "scala-parallel-collections" % "0.2.0")
case _ => Seq(scalaPar)
}),
mimaSettings,
mimaBinaryIssueFilters ++= Seq(
// Added private[sbt] method to capture State attributes.
exclude[ReversedMissingMethodProblem]("sbt.internal.util.AttributeMap.setCond"),
// Dropped in favour of kind-projector's inline type lambda syntax
exclude[MissingClassProblem]("sbt.internal.util.TypeFunctions$P1of2"),
// Dropped in favour of kind-projector's polymorphic lambda literals
exclude[MissingClassProblem]("sbt.internal.util.Param"),
exclude[MissingClassProblem]("sbt.internal.util.Param$"),
// Dropped in favour of plain scala.Function, and its compose method
exclude[MissingClassProblem]("sbt.internal.util.Fn1"),
exclude[DirectMissingMethodProblem]("sbt.internal.util.TypeFunctions.toFn1"),
exclude[DirectMissingMethodProblem]("sbt.internal.util.Types.toFn1"),
// Instead of defining foldr in KList & overriding in KCons,
// it's now abstract in KList and defined in both KCons & KNil.
exclude[FinalMethodProblem]("sbt.internal.util.KNil.foldr"),
exclude[DirectAbstractMethodProblem]("sbt.internal.util.KList.foldr"),
exclude[IncompatibleSignatureProblem]("sbt.internal.util.Init*.*"),
exclude[IncompatibleSignatureProblem]("sbt.internal.util.Settings0.*"),
exclude[IncompatibleSignatureProblem]("sbt.internal.util.EvaluateSettings#INode.*"),
exclude[IncompatibleSignatureProblem]("sbt.internal.util.TypeFunctions.*"),
exclude[IncompatibleSignatureProblem]("sbt.internal.util.EvaluateSettings.*"),
exclude[IncompatibleSignatureProblem]("sbt.internal.util.Settings.*"),
exclude[IncompatibleSignatureProblem]("sbt.internal.util.EvaluateSettings#MixedNode.*"),
exclude[IncompatibleSignatureProblem]("sbt.internal.util.EvaluateSettings#BindNode.this"),
exclude[IncompatibleSignatureProblem](
"sbt.internal.util.EvaluateSettings#BindNode.dependsOn"
),
exclude[IncompatibleSignatureProblem]("sbt.internal.util.Types.some")
),
)
.dependsOn(utilPosition)
// Command line-related utilities.
val completeProj = (project in file("internal") / "util-complete")
@ -380,9 +351,9 @@ lazy val utilLogging = (project in file("internal") / "util-logging")
log4jCore,
disruptor,
sjsonNewScalaJson.value,
scalaReflect.value
),
libraryDependencies ++= Seq(scalacheck % "test", scalatest % "test"),
Compile / generateContrabands / contrabandCodecsDependencies := List(sjsonNewCore.value),
Compile / scalacOptions ++= (scalaVersion.value match {
case v if v.startsWith("2.12.") => List("-Ywarn-unused:-locals,-explicits,-privates")
case _ => List()
@ -491,7 +462,8 @@ lazy val testingProj = (project in file("testing"))
scalaXml.value,
testInterface,
launcherInterface,
sjsonNewScalaJson.value
sjsonNewScalaJson.value,
sjsonNewCore.value,
),
Compile / scalacOptions += "-Ywarn-unused:-locals,-explicits,-privates",
Compile / managedSourceDirectories +=
@ -523,7 +495,7 @@ lazy val testingProj = (project in file("testing"))
exclude[DirectMissingMethodProblem]("sbt.protocol.testing.TestItemEvent.copy$default$*"),
exclude[DirectMissingMethodProblem]("sbt.protocol.testing.TestStringEvent.copy"),
exclude[DirectMissingMethodProblem]("sbt.protocol.testing.TestStringEvent.copy$default$1"),
//no reason to use
// no reason to use
exclude[DirectMissingMethodProblem]("sbt.JUnitXmlTestsListener.testSuite"),
)
)
@ -715,7 +687,7 @@ lazy val protocolProj = (project in file("protocol"))
.settings(
testedBaseSettings,
name := "Protocol",
libraryDependencies ++= Seq(sjsonNewScalaJson.value, ipcSocket),
libraryDependencies ++= Seq(sjsonNewScalaJson.value, sjsonNewCore.value, ipcSocket),
Compile / scalacOptions += "-Ywarn-unused:-locals,-explicits,-privates",
Compile / managedSourceDirectories +=
baseDirectory.value / "src" / "main" / "contraband-scala",
@ -757,7 +729,12 @@ lazy val commandProj = (project in file("main-command"))
.settings(
testedBaseSettings,
name := "Command",
libraryDependencies ++= Seq(launcherInterface, sjsonNewScalaJson.value, templateResolverApi),
libraryDependencies ++= Seq(
launcherInterface,
sjsonNewCore.value,
sjsonNewScalaJson.value,
templateResolverApi
),
Compile / scalacOptions += "-Ywarn-unused:-locals,-explicits,-privates",
Compile / managedSourceDirectories +=
baseDirectory.value / "src" / "main" / "contraband-scala",
@ -816,15 +793,8 @@ lazy val commandProj = (project in file("main-command"))
lazy val coreMacrosProj = (project in file("core-macros"))
.dependsOn(collectionProj)
.settings(
baseSettings :+ (crossScalaVersions := (scala212 :: scala213 :: Nil)),
testedBaseSettings :+ (crossScalaVersions := (scala212 :: scala213 :: Nil)),
name := "Core Macros",
libraryDependencies += {
if (scalaBinaryVersion.value == "3") {
"org.scala-lang" % "scala-compiler" % scala213
} else {
"org.scala-lang" % "scala-compiler" % scalaVersion.value
}
},
SettingKey[Boolean]("exportPipelining") := false,
mimaSettings,
)
@ -836,6 +806,7 @@ lazy val mainSettingsProj = (project in file("main-settings"))
commandProj,
stdTaskProj,
coreMacrosProj,
logicProj,
utilLogging,
utilCache,
utilRelation,
@ -909,12 +880,29 @@ lazy val zincLmIntegrationProj = (project in file("zinc-lm-integration"))
)
.configure(addSbtZincCompileCore, addSbtLmCore, addSbtLmIvyTest)
lazy val buildFileProj = (project in file("buildfile"))
.dependsOn(
mainSettingsProj,
)
.settings(
testedBaseSettings,
name := "build file",
libraryDependencies ++= Seq(scalaCompiler),
)
.configure(
addSbtIO,
addSbtLmCore,
addSbtLmIvy,
addSbtCompilerInterface,
addSbtZincCompile
)
// The main integration project for sbt. It brings all of the projects together, configures them, and provides for overriding conventions.
lazy val mainProj = (project in file("main"))
.enablePlugins(ContrabandPlugin)
.dependsOn(
logicProj,
actionsProj,
buildFileProj,
mainSettingsProj,
runProj,
commandProj,
@ -934,7 +922,14 @@ lazy val mainProj = (project in file("main"))
}
},
libraryDependencies ++=
(Seq(scalaXml.value, launcherInterface, caffeine, lmCoursierShaded) ++ log4jModules),
(Seq(
scalaXml.value,
sjsonNewScalaJson.value,
sjsonNewCore.value,
launcherInterface,
caffeine,
lmCoursierShaded,
) ++ log4jModules),
libraryDependencies ++= (scalaVersion.value match {
case v if v.startsWith("2.12.") => List()
case _ => List(scalaPar)
@ -945,128 +940,8 @@ lazy val mainProj = (project in file("main"))
Test / testOptions += Tests
.Argument(TestFrameworks.ScalaCheck, "-minSuccessfulTests", "1000"),
SettingKey[Boolean]("usePipelining") := false,
mimaSettings,
mimaBinaryIssueFilters ++= Vector(
// New and changed methods on KeyIndex. internal.
exclude[ReversedMissingMethodProblem]("sbt.internal.KeyIndex.*"),
// internal
exclude[IncompatibleMethTypeProblem]("sbt.internal.*"),
// Changed signature or removed private[sbt] methods
exclude[DirectMissingMethodProblem]("sbt.Classpaths.unmanagedLibs0"),
exclude[DirectMissingMethodProblem]("sbt.Defaults.allTestGroupsTask"),
exclude[DirectMissingMethodProblem]("sbt.Plugins.topologicalSort"),
exclude[IncompatibleMethTypeProblem]("sbt.Defaults.allTestGroupsTask"),
exclude[DirectMissingMethodProblem]("sbt.StandardMain.shutdownHook"),
exclude[DirectMissingMethodProblem]("sbt.nio.Keys.compileBinaryFileInputs"),
exclude[DirectMissingMethodProblem]("sbt.nio.Keys.compileSourceFileInputs"),
exclude[MissingClassProblem]("sbt.internal.ResourceLoaderImpl"),
exclude[IncompatibleSignatureProblem]("sbt.internal.ConfigIndex.*"),
exclude[IncompatibleSignatureProblem]("sbt.internal.Inspect.*"),
exclude[IncompatibleSignatureProblem]("sbt.internal.ProjectIndex.*"),
exclude[IncompatibleSignatureProblem]("sbt.internal.BuildIndex.*"),
exclude[IncompatibleSignatureProblem]("sbt.internal.server.BuildServerReporter.*"),
exclude[VirtualStaticMemberProblem]("sbt.internal.server.LanguageServerProtocol.*"),
exclude[IncompatibleSignatureProblem]("sbt.internal.librarymanagement.IvyXml.*"),
exclude[IncompatibleSignatureProblem]("sbt.ScriptedPlugin.*Settings"),
exclude[IncompatibleSignatureProblem]("sbt.plugins.SbtPlugin.*Settings"),
// Removed private internal classes
exclude[MissingClassProblem]("sbt.internal.ReverseLookupClassLoaderHolder$BottomClassLoader"),
exclude[MissingClassProblem](
"sbt.internal.ReverseLookupClassLoaderHolder$ReverseLookupClassLoader$ResourceLoader"
),
exclude[MissingClassProblem]("sbt.internal.ReverseLookupClassLoaderHolder$ClassLoadingLock"),
exclude[MissingClassProblem](
"sbt.internal.ReverseLookupClassLoaderHolder$ReverseLookupClassLoader"
),
exclude[MissingClassProblem]("sbt.internal.LayeredClassLoaderImpl"),
exclude[MissingClassProblem]("sbt.internal.FileManagement"),
exclude[MissingClassProblem]("sbt.internal.FileManagement$"),
exclude[MissingClassProblem]("sbt.internal.FileManagement$CopiedFileTreeRepository"),
exclude[MissingClassProblem]("sbt.internal.server.LanguageServerReporter*"),
exclude[MissingClassProblem]("sbt.internal.ExternalHooks"),
exclude[MissingClassProblem]("sbt.internal.ExternalHooks$"),
// false positives
exclude[DirectMissingMethodProblem]("sbt.plugins.IvyPlugin.requires"),
exclude[DirectMissingMethodProblem]("sbt.plugins.JUnitXmlReportPlugin.requires"),
exclude[DirectMissingMethodProblem]("sbt.plugins.Giter8TemplatePlugin.requires"),
exclude[DirectMissingMethodProblem]("sbt.plugins.JvmPlugin.requires"),
exclude[DirectMissingMethodProblem]("sbt.plugins.SbtPlugin.requires"),
exclude[DirectMissingMethodProblem]("sbt.ResolvedClasspathDependency.apply"),
exclude[DirectMissingMethodProblem]("sbt.ClasspathDependency.apply"),
exclude[IncompatibleSignatureProblem]("sbt.plugins.SemanticdbPlugin.globalSettings"),
// File -> Source
exclude[DirectMissingMethodProblem]("sbt.Defaults.cleanFilesTask"),
exclude[IncompatibleSignatureProblem]("sbt.Defaults.resourceConfigPaths"),
exclude[IncompatibleSignatureProblem]("sbt.Defaults.sourceConfigPaths"),
exclude[IncompatibleSignatureProblem]("sbt.Defaults.configPaths"),
exclude[IncompatibleSignatureProblem]("sbt.Defaults.paths"),
exclude[IncompatibleSignatureProblem]("sbt.Keys.csrPublications"),
exclude[IncompatibleSignatureProblem](
"sbt.coursierint.CoursierArtifactsTasks.coursierPublicationsTask"
),
exclude[IncompatibleSignatureProblem](
"sbt.coursierint.CoursierArtifactsTasks.coursierPublicationsTask"
),
exclude[IncompatibleSignatureProblem]("sbt.coursierint.LMCoursier.coursierConfiguration"),
exclude[IncompatibleSignatureProblem]("sbt.coursierint.LMCoursier.publicationsSetting"),
exclude[IncompatibleSignatureProblem]("sbt.Project.inThisBuild"),
exclude[IncompatibleSignatureProblem]("sbt.Project.inConfig"),
exclude[IncompatibleSignatureProblem]("sbt.Project.inTask"),
exclude[IncompatibleSignatureProblem]("sbt.Project.inScope"),
exclude[IncompatibleSignatureProblem]("sbt.ProjectExtra.inThisBuild"),
exclude[IncompatibleSignatureProblem]("sbt.ProjectExtra.inConfig"),
exclude[IncompatibleSignatureProblem]("sbt.ProjectExtra.inTask"),
exclude[IncompatibleSignatureProblem]("sbt.ProjectExtra.inScope"),
exclude[MissingTypesProblem]("sbt.internal.Load*"),
exclude[IncompatibleSignatureProblem]("sbt.internal.Load*"),
exclude[MissingTypesProblem]("sbt.internal.server.NetworkChannel"),
// IvyConfiguration was replaced by InlineIvyConfiguration in the generic
// signature, this does not break compatibility regardless of what
// cast a compiler might have inserted based on the old signature
// since we're returning the same values as before.
exclude[IncompatibleSignatureProblem]("sbt.Classpaths.mkIvyConfiguration"),
exclude[IncompatibleMethTypeProblem]("sbt.internal.server.Definition*"),
exclude[IncompatibleTemplateDefProblem]("sbt.internal.server.LanguageServerProtocol"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.warnInsecureProtocol"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.warnInsecureProtocolInModules"),
exclude[MissingClassProblem]("sbt.internal.ExternalHooks*"),
// This seems to be a mima problem. The older constructor still exists but
// mima seems to incorrectly miss the secondary constructor that provides
// the binary compatible version.
exclude[IncompatibleMethTypeProblem]("sbt.internal.server.NetworkChannel.this"),
exclude[IncompatibleSignatureProblem]("sbt.internal.DeprecatedContinuous.taskDefinitions"),
exclude[MissingClassProblem]("sbt.internal.SettingsGraph*"),
// Tasks include non-Files, but it's ok
exclude[IncompatibleSignatureProblem]("sbt.Defaults.outputConfigPaths"),
// private[sbt]
exclude[DirectMissingMethodProblem]("sbt.Classpaths.trackedExportedProducts"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.trackedExportedJarProducts"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.unmanagedDependencies0"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.internalDependenciesImplTask"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.internalDependencyJarsImplTask"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.interDependencies"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.productsTask"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.jarProductsTask"),
exclude[DirectMissingMethodProblem]("sbt.StandardMain.cache"),
// internal logging apis,
exclude[IncompatibleSignatureProblem]("sbt.internal.LogManager*"),
exclude[MissingTypesProblem]("sbt.internal.RelayAppender"),
exclude[MissingClassProblem]("sbt.internal.TaskProgress$ProgressThread"),
// internal implementation
exclude[MissingClassProblem](
"sbt.internal.XMainConfiguration$ModifiedConfiguration$ModifiedAppProvider$ModifiedScalaProvider$"
),
// internal impl
exclude[IncompatibleSignatureProblem]("sbt.internal.Act.configIdent"),
exclude[IncompatibleSignatureProblem]("sbt.internal.Act.taskAxis"),
// private[sbt] method, used to call the correct sourcePositionMapper
exclude[DirectMissingMethodProblem]("sbt.Defaults.foldMappers"),
exclude[DirectMissingMethodProblem]("sbt.Defaults.toAbsoluteSourceMapper"),
exclude[DirectMissingMethodProblem]("sbt.Defaults.earlyArtifactPathSetting"),
exclude[MissingClassProblem]("sbt.internal.server.BuildServerReporter$"),
exclude[IncompatibleTemplateDefProblem]("sbt.internal.server.BuildServerReporter"),
exclude[MissingClassProblem]("sbt.internal.CustomHttp*"),
)
// mimaSettings,
// mimaBinaryIssueFilters ++= Vector(),
)
.configure(
addSbtIO,
@ -1108,13 +983,15 @@ lazy val sbtProj = (project in file("sbt-app"))
Tests.Argument(framework, s"-Dsbt.server.scala.version=${scalaVersion.value}") :: Nil
},
)
.configure(addSbtIO, addSbtCompilerBridge)
.configure(addSbtIO)
// addSbtCompilerBridge
lazy val serverTestProj = (project in file("server-test"))
.dependsOn(sbtProj % "compile->test", scriptedSbtReduxProj % "compile->test")
.settings(
testedBaseSettings,
crossScalaVersions := Seq(baseScalaVersion),
bspEnabled := false,
publish / skip := true,
// make server tests serial
Test / watchTriggers += baseDirectory.value.toGlob / "src" / "server-test" / **,
@ -1139,7 +1016,8 @@ lazy val serverTestProj = (project in file("server-test"))
|}
""".stripMargin
}
val file = (Test / target).value / "generated" / "src" / "test" / "scala" / "testpkg" / "TestProperties.scala"
val file =
(Test / target).value / "generated" / "src" / "test" / "scala" / "testpkg" / "TestProperties.scala"
IO.write(file, content)
file :: Nil
},
@ -1156,7 +1034,6 @@ lazy val sbtClientProj = (project in file("client"))
.dependsOn(commandProj)
.settings(
commonBaseSettings,
scalaVersion := "2.12.11", // The thin client does not build with 2.12.12
publish / skip := true,
name := "sbt-client",
mimaPreviousArtifacts := Set.empty,
@ -1354,6 +1231,7 @@ def scriptedTask(launch: Boolean): Def.Initialize[InputTask[Unit]] = Def.inputTa
(scriptedSbtReduxProj / Test / fullClasspathAsJars).value
.map(_.data)
.filterNot(_.getName.contains("scala-compiler")),
(bundledLauncherProj / Compile / packageBin).value,
streams.value.log
)
}
@ -1382,6 +1260,7 @@ def allProjects =
sbtProj,
bundledLauncherProj,
sbtClientProj,
buildFileProj,
) ++ lowerUtilProjects
// These need to be cross published to 2.12 and 2.13 for Zinc
@ -1402,13 +1281,12 @@ lazy val lowerUtilProjects =
lazy val nonRoots = allProjects.map(p => LocalProject(p.id))
ThisBuild / scriptedBufferLog := true
ThisBuild / scriptedPrescripted := { _ =>
}
ThisBuild / scriptedPrescripted := { _ => }
def otherRootSettings =
Seq(
scripted := scriptedTask(false).evaluated,
scriptedUnpublished := scriptedTask(false).evaluated,
scripted := scriptedTask(true).evaluated,
scriptedUnpublished := scriptedTask(true).evaluated,
scriptedSource := (sbtProj / sourceDirectory).value / "sbt-test",
scripted / watchTriggers += scriptedSource.value.toGlob / **,
scriptedUnpublished / watchTriggers := (scripted / watchTriggers).value,
@ -1471,21 +1349,24 @@ def customCommands: Seq[Setting[_]] = Seq(
import extracted._
val sv = get(scalaVersion)
val projs = structure.allProjectRefs
val ioOpt = projs find { case ProjectRef(_, id) => id == "ioRoot"; case _ => false }
val ioOpt = projs find { case ProjectRef(_, id) => id == "ioRoot"; case _ => false }
val utilOpt = projs find { case ProjectRef(_, id) => id == "utilRoot"; case _ => false }
val lmOpt = projs find { case ProjectRef(_, id) => id == "lmRoot"; case _ => false }
val lmOpt = projs find { case ProjectRef(_, id) => id == "lmRoot"; case _ => false }
val zincOpt = projs find { case ProjectRef(_, id) => id == "zincRoot"; case _ => false }
(ioOpt map { case ProjectRef(build, _) => "{" + build.toString + "}/publishLocal" }).toList :::
(utilOpt map { case ProjectRef(build, _) => "{" + build.toString + "}/publishLocal" }).toList :::
(lmOpt map { case ProjectRef(build, _) => "{" + build.toString + "}/publishLocal" }).toList :::
(zincOpt map {
case ProjectRef(build, _) =>
val zincSv = get((ProjectRef(build, "zinc") / scalaVersion))
val csv = get((ProjectRef(build, "compilerBridge") / crossScalaVersions)).toList
(csv flatMap { bridgeSv =>
s"++$bridgeSv" :: ("{" + build.toString + "}compilerBridge/publishLocal") :: Nil
}) :::
List(s"++$zincSv", "{" + build.toString + "}/publishLocal")
(ioOpt map { case ProjectRef(build, _) => "{" + build.toString + "}/publishLocal" }).toList :::
(utilOpt map { case ProjectRef(build, _) =>
"{" + build.toString + "}/publishLocal"
}).toList :::
(lmOpt map { case ProjectRef(build, _) =>
"{" + build.toString + "}/publishLocal"
}).toList :::
(zincOpt map { case ProjectRef(build, _) =>
val zincSv = get((ProjectRef(build, "zinc") / scalaVersion))
val csv = get((ProjectRef(build, "compilerBridge") / crossScalaVersions)).toList
(csv flatMap { bridgeSv =>
s"++$bridgeSv" :: ("{" + build.toString + "}compilerBridge/publishLocal") :: Nil
}) :::
List(s"++$zincSv", "{" + build.toString + "}/publishLocal")
}).getOrElse(Nil) :::
List(s"++$sv", "publishLocal") :::
state

View File

@ -0,0 +1,450 @@
package sbt
package internal
import dotty.tools.dotc.ast
import dotty.tools.dotc.ast.{ tpd, untpd }
import dotty.tools.dotc.CompilationUnit
import dotty.tools.dotc.config.ScalaSettings
import dotty.tools.dotc.core.Contexts.{ atPhase, Context }
import dotty.tools.dotc.core.{ Flags, Names, Phases, Symbols, Types }
import dotty.tools.dotc.Driver
import dotty.tools.dotc.parsing.Parsers.Parser
import dotty.tools.dotc.reporting.Reporter
import dotty.tools.dotc.Run
import dotty.tools.dotc.util.SourceFile
import dotty.tools.io.{ PlainDirectory, Directory, VirtualDirectory, VirtualFile }
import dotty.tools.repl.AbstractFileClassLoader
import java.io.File
import java.net.URLClassLoader
import java.nio.charset.StandardCharsets
import java.nio.file.{ Files, Path, Paths, StandardOpenOption }
import java.security.MessageDigest
import scala.collection.JavaConverters.*
import scala.quoted.*
import sbt.io.Hash
/**
* - nonCpOptions - non-classpath options
* - classpath - classpath used for evaluation
* - backingDir - directory to save `*.class` files
* - mkReporter - an optional factory method to create a reporter
*/
class Eval(
nonCpOptions: Seq[String],
classpath: Seq[Path],
backingDir: Option[Path],
mkReporter: Option[() => Reporter]
):
import Eval.*
backingDir.foreach { dir =>
Files.createDirectories(dir)
}
private val outputDir =
backingDir match
case Some(dir) => PlainDirectory(Directory(dir.toString))
case None => VirtualDirectory("output")
private val classpathString = (backingDir.toList ++ classpath)
.map(_.toString)
.mkString(":")
private lazy val driver: EvalDriver = new EvalDriver
private lazy val reporter = mkReporter match
case Some(fn) => fn()
case None => EvalReporter.store
final class EvalDriver extends Driver:
import dotty.tools.dotc.config.Settings.Setting._
val compileCtx0 = initCtx.fresh
val options = nonCpOptions ++ Seq("-classpath", classpathString, "dummy.scala")
val compileCtx1 = setup(options.toArray, compileCtx0) match
case Some((_, ctx)) => ctx
case _ => sys.error(s"initialization failed for $options")
val compileCtx2 = compileCtx1.fresh
.setSetting(
compileCtx1.settings.outputDir,
outputDir
)
.setReporter(reporter)
val compileCtx = compileCtx2
val compiler = newCompiler(using compileCtx)
end EvalDriver
def eval(expression: String, tpeName: Option[String]): EvalResult =
eval(expression, noImports, tpeName, "<setting>", Eval.DefaultStartLine)
def evalInfer(expression: String): EvalResult =
eval(expression, noImports, None, "<setting>", Eval.DefaultStartLine)
def evalInfer(expression: String, imports: EvalImports): EvalResult =
eval(expression, imports, None, "<setting>", Eval.DefaultStartLine)
def eval(
expression: String,
imports: EvalImports,
tpeName: Option[String],
srcName: String,
line: Int
): EvalResult =
val ev = new EvalType[String]:
override def makeSource(moduleName: String): SourceFile =
val returnType = tpeName match
case Some(tpe) => s": $tpe"
case _ => ""
val header =
imports.strings.mkString("\n") +
s"""
|object $moduleName {
| def $WrapValName${returnType} = {""".stripMargin
val contents = s"""$header
|$expression
| }
|}
|""".stripMargin
val startLine = header.linesIterator.toList.size
EvalSourceFile(srcName, startLine, contents)
override def extract(run: Run, unit: CompilationUnit)(using ctx: Context): String =
atPhase(Phases.typerPhase.next) {
(new TypeExtractor).getType(unit.tpdTree)
}
override def read(file: Path): String =
String(Files.readAllBytes(file), StandardCharsets.UTF_8)
override def write(value: String, file: Path): Unit =
Files.write(
file,
value.getBytes(StandardCharsets.UTF_8),
StandardOpenOption.CREATE,
StandardOpenOption.TRUNCATE_EXISTING
)
override def extraHash: String = ""
val inter = evalCommon[String](expression :: Nil, imports, tpeName, ev)
val valueFn = (cl: ClassLoader) => getValue[Any](inter.enclosingModule, inter.loader(cl))
EvalResult(
tpe = inter.extra,
getValue = valueFn,
generated = inter.generated,
)
end eval
def evalDefinitions(
definitions: Seq[(String, scala.Range)],
imports: EvalImports,
srcName: String,
valTypes: Seq[String],
): EvalDefinitions =
evalDefinitions(definitions, imports, srcName, valTypes, "")
def evalDefinitions(
definitions: Seq[(String, scala.Range)],
imports: EvalImports,
srcName: String,
valTypes: Seq[String],
extraHash: String,
): EvalDefinitions =
// println(s"""evalDefinitions(definitions = $definitions)
// backingDir = $backingDir,
// """)
require(definitions.nonEmpty, "definitions to evaluate cannot be empty.")
val extraHash0 = extraHash
val ev = new EvalType[Seq[String]]:
override def makeSource(moduleName: String): SourceFile =
val header =
imports.strings.mkString("\n") +
s"""
|object $moduleName {""".stripMargin
val contents =
s"""$header
|${definitions.map(_._1).mkString("\n")}
|}
|""".stripMargin
val startLine = header.linesIterator.toList.size
EvalSourceFile(srcName, startLine, contents)
override def extract(run: Run, unit: CompilationUnit)(using ctx: Context): Seq[String] =
atPhase(Phases.typerPhase.next) {
(new ValExtractor(valTypes.toSet)).getVals(unit.tpdTree)
}(using run.runContext)
override def read(file: Path): Seq[String] =
new String(Files.readAllBytes(file), StandardCharsets.UTF_8).linesIterator.toList
override def write(value: Seq[String], file: Path): Unit =
Files.write(
file,
value.mkString("\n").getBytes(StandardCharsets.UTF_8),
StandardOpenOption.CREATE,
StandardOpenOption.TRUNCATE_EXISTING
)
override def extraHash: String = extraHash0
val inter = evalCommon[Seq[String]](definitions.map(_._1), imports, tpeName = Some(""), ev)
EvalDefinitions(inter.loader, inter.generated, inter.enclosingModule, inter.extra.reverse)
end evalDefinitions
private[this] def evalCommon[A](
content: Seq[String],
imports: EvalImports,
tpeName: Option[String],
ev: EvalType[A],
): EvalIntermediate[A] =
import Eval.*
// This is a hot path.
val digester = MessageDigest.getInstance("SHA")
content.foreach { c =>
digester.update(bytes(c))
}
tpeName.foreach { tpe =>
digester.update(bytes(tpe))
}
digester.update(bytes(ev.extraHash))
val d = digester.digest()
val hash = Hash.toHex(d)
val moduleName = makeModuleName(hash)
val (extra, loader) = backingDir match
case Some(backing) if classExists(backing, moduleName) =>
val loader = (parent: ClassLoader) =>
(new URLClassLoader(Array(backing.toUri.toURL), parent): ClassLoader)
val extra = ev.read(cacheFile(backing, moduleName))
(extra, loader)
case _ => compileAndLoad(ev, moduleName)
val generatedFiles = getGeneratedFiles(moduleName)
EvalIntermediate(
extra = extra,
loader = loader,
generated = generatedFiles,
enclosingModule = moduleName,
)
// location of the cached type or definition information
private[this] def cacheFile(base: Path, moduleName: String): Path =
base.resolve(moduleName + ".cache")
private[this] def compileAndLoad[A](
ev: EvalType[A],
moduleName: String,
): (A, ClassLoader => ClassLoader) =
given rootCtx: Context = driver.compileCtx
val run = driver.compiler.newRun
val source = ev.makeSource(moduleName)
run.compileSources(source :: Nil)
checkError("an error in expression")
val unit = run.units.head
val extra: A = ev.extract(run, unit)
backingDir.foreach { backing =>
ev.write(extra, cacheFile(backing, moduleName))
}
val loader = (parent: ClassLoader) => AbstractFileClassLoader(outputDir, parent)
(extra, loader)
private[this] final class EvalIntermediate[A](
val extra: A,
val loader: ClassLoader => ClassLoader,
val generated: Seq[Path],
val enclosingModule: String,
)
private[this] def classExists(dir: Path, name: String): Boolean =
Files.exists(dir.resolve(s"$name.class"))
private[this] def getGeneratedFiles(moduleName: String): Seq[Path] =
backingDir match
case Some(dir) =>
asScala(
Files
.list(dir)
.filter(!Files.isDirectory(_))
.filter(_.getFileName.toString.contains(moduleName))
.iterator
).toList
case None => Nil
private[this] def makeModuleName(hash: String): String = "$Wrap" + hash.take(10)
private[this] def checkError(label: String)(using ctx: Context): Unit =
if ctx.reporter.hasErrors then
throw new EvalException(label + ": " + ctx.reporter.allErrors.head.toString)
else ()
end Eval
object Eval:
private[sbt] val DefaultStartLine = 0
lazy val noImports = EvalImports(Nil)
def apply(): Eval =
new Eval(Nil, currentClasspath, None, None)
def apply(mkReporter: () => Reporter): Eval =
new Eval(Nil, currentClasspath, None, Some(mkReporter))
def apply(
backingDir: Path,
mkReporter: () => Reporter,
): Eval =
new Eval(Nil, currentClasspath, Some(backingDir), Some(mkReporter))
def apply(
nonCpOptions: Seq[String],
backingDir: Path,
mkReporter: () => Reporter,
): Eval =
new Eval(nonCpOptions, currentClasspath, Some(backingDir), Some(mkReporter))
inline def apply[A](expression: String): A = ${ evalImpl[A]('{ expression }) }
private def thisClassLoader = this.getClass.getClassLoader
def evalImpl[A: Type](expression: Expr[String])(using qctx: Quotes): Expr[A] =
import quotes.reflect._
val sym = TypeRepr.of[A].typeSymbol
val fullName = Expr(sym.fullName)
'{
Eval().eval($expression, Some($fullName)).getValue(thisClassLoader).asInstanceOf[A]
}
def currentClasspath: Seq[Path] =
val urls = sys.props
.get("java.class.path")
.map(_.split(File.pathSeparator))
.getOrElse(Array.empty[String])
urls.toVector.map(Paths.get(_))
def bytes(s: String): Array[Byte] = s.getBytes("UTF-8")
/** The name of the synthetic val in the synthetic module that an expression is assigned to. */
private[sbt] final val WrapValName = "$sbtdef"
// used to map the position offset
class EvalSourceFile(name: String, startLine: Int, contents: String)
extends SourceFile(
new VirtualFile(name, contents.getBytes(StandardCharsets.UTF_8)),
contents.toArray
):
override def lineToOffset(line: Int): Int = super.lineToOffset((line + startLine) max 0)
override def offsetToLine(offset: Int): Int = super.offsetToLine(offset) - startLine
end EvalSourceFile
trait EvalType[A]:
def makeSource(moduleName: String): SourceFile
/** Extracts additional information after the compilation unit is evaluated. */
def extract(run: Run, unit: CompilationUnit)(using ctx: Context): A
/** Deserializes the extra information for unchanged inputs from a cache file. */
def read(file: Path): A
/**
* Serializes the extra information to a cache file, where it can be `read` back if inputs
* haven't changed.
*/
def write(value: A, file: Path): Unit
/** Extra information to include in the hash'd object name to help avoid collisions. */
def extraHash: String
end EvalType
class TypeExtractor extends tpd.TreeTraverser:
private[this] var result = ""
def getType(t: tpd.Tree)(using ctx: Context): String =
result = ""
this((), t)
result
override def traverse(tree: tpd.Tree)(using ctx: Context): Unit =
tree match
case tpd.DefDef(name, _, tpt, _) if name.toString == WrapValName =>
result = tpt.typeOpt.show
case t: tpd.Template => this((), t.body)
case t: tpd.PackageDef => this((), t.stats)
case t: tpd.TypeDef => this((), t.rhs)
case _ => ()
end TypeExtractor
/**
* Tree traverser that obtains the names of vals in a top-level module whose type is a subtype of
* one of `types`.
*/
class ValExtractor(tpes: Set[String]) extends tpd.TreeTraverser:
private[this] var vals = List[String]()
def getVals(t: tpd.Tree)(using ctx: Context): List[String] =
vals = Nil
traverse(t)
vals
def isAcceptableType(tpe: Types.Type)(using ctx: Context): Boolean =
tpe.baseClasses.exists { sym =>
tpes.contains(sym.fullName.toString)
}
def isTopLevelModule(sym: Symbols.Symbol)(using ctx: Context): Boolean =
(sym is Flags.Module) && (sym.owner is Flags.ModuleClass)
override def traverse(tree: tpd.Tree)(using ctx: Context): Unit =
tree match
case tpd.ValDef(name, tpt, _)
if isTopLevelModule(tree.symbol.owner) && isAcceptableType(tpt.tpe) =>
val str = name.mangledString
vals ::= (
if str.contains("$lzy") then str.take(str.indexOf("$"))
else str
)
case t: tpd.Template => this((), t.body)
case t: tpd.PackageDef => this((), t.stats)
case t: tpd.TypeDef => this((), t.rhs)
case _ => ()
end ValExtractor
/**
* Gets the value of the expression wrapped in module `objectName`, which is accessible via
* `loader`. The module name should not include the trailing `$`.
*/
def getValue[A](objectName: String, loader: ClassLoader): A =
val module = getModule(objectName, loader)
val accessor = module.getClass.getMethod(WrapValName)
val value = accessor.invoke(module)
value.asInstanceOf[A]
/**
* Gets the top-level module `moduleName` from the provided class `loader`. The module name should
* not include the trailing `$`.
*/
def getModule(moduleName: String, loader: ClassLoader): Any =
val clazz = Class.forName(moduleName + "$", true, loader)
clazz.getField("MODULE$").get(null)
end Eval
final class EvalResult(
val tpe: String,
val getValue: ClassLoader => Any,
val generated: Seq[Path],
)
/**
* The result of evaluating a group of Scala definitions. The definitions are wrapped in an
* auto-generated, top-level module named `enclosingModule`. `generated` contains the compiled
* classes and cache files related to the definitions. A new class loader containing the module may
* be obtained from `loader` by passing the parent class loader providing the classes from the
* classpath that the definitions were compiled against. The list of vals with the requested types
* is `valNames`. The values for these may be obtained by providing the parent class loader to
* `values` as is done with `loader`.
*/
final class EvalDefinitions(
val loader: ClassLoader => ClassLoader,
val generated: Seq[Path],
val enclosingModule: String,
val valNames: Seq[String]
):
def values(parent: ClassLoader): Seq[Any] = {
val module = Eval.getModule(enclosingModule, loader(parent))
for n <- valNames
yield module.getClass.getMethod(n).invoke(module)
}
end EvalDefinitions
final class EvalException(msg: String) extends RuntimeException(msg)
final class EvalImports(val strings: Seq[String])

View File

@ -0,0 +1,19 @@
package sbt
package internal
import dotty.tools.dotc.core.Contexts.Context
import dotty.tools.dotc.reporting.ConsoleReporter
import dotty.tools.dotc.reporting.Diagnostic
import dotty.tools.dotc.reporting.Reporter
import dotty.tools.dotc.reporting.StoreReporter
abstract class EvalReporter extends Reporter
object EvalReporter:
def console: EvalReporter = ForwardingReporter(ConsoleReporter())
def store: EvalReporter = ForwardingReporter(StoreReporter())
end EvalReporter
class ForwardingReporter(delegate: Reporter) extends EvalReporter:
def doReport(dia: Diagnostic)(using Context): Unit = delegate.doReport(dia)
end ForwardingReporter

View File

@ -18,15 +18,16 @@ import sbt.internal.util.{
}
import java.io.File
import compiler.{ Eval, EvalImports }
import java.nio.file.Path
import sbt.internal.util.complete.DefaultParsers.validID
import Def.{ ScopedKey, Setting }
import Scope.GlobalScope
import sbt.SlashSyntax0._
import sbt.SlashSyntax0.*
import sbt.internal.parser.SbtParser
import sbt.io.IO
import scala.collection.JavaConverters._
import scala.collection.JavaConverters.*
import xsbti.VirtualFile
import xsbti.VirtualFileRef
/**
* This file is responsible for compiling the .sbt files used to configure sbt builds.
@ -39,9 +40,12 @@ import scala.collection.JavaConverters._
*/
private[sbt] object EvaluateConfigurations {
type LazyClassLoaded[T] = ClassLoader => T
type LazyClassLoaded[A] = ClassLoader => A
private[sbt] case class TrackedEvalResult[T](generated: Seq[File], result: LazyClassLoaded[T])
private[sbt] case class TrackedEvalResult[A](
generated: Seq[Path],
result: LazyClassLoaded[A]
)
/**
* This represents the parsed expressions in a build sbt, as well as where they were defined.
@ -61,9 +65,13 @@ private[sbt] object EvaluateConfigurations {
* return a parsed, compiled + evaluated [[LoadedSbtFile]]. The result has
* raw sbt-types that can be accessed and used.
*/
def apply(eval: Eval, srcs: Seq[File], imports: Seq[String]): LazyClassLoaded[LoadedSbtFile] = {
val loadFiles = srcs.sortBy(_.getName) map { src =>
evaluateSbtFile(eval, src, IO.readLines(src), imports, 0)
def apply(
eval: Eval,
srcs: Seq[VirtualFile],
imports: Seq[String],
): LazyClassLoaded[LoadedSbtFile] = {
val loadFiles = srcs.sortBy(_.name) map { src =>
evaluateSbtFile(eval, src, IO.readStream(src.input()).linesIterator.toList, imports, 0)
}
loader =>
loadFiles.foldLeft(LoadedSbtFile.empty) { (loaded, load) =>
@ -78,10 +86,10 @@ private[sbt] object EvaluateConfigurations {
*/
def evaluateConfiguration(
eval: Eval,
src: File,
src: VirtualFile,
imports: Seq[String]
): LazyClassLoaded[Seq[Setting[_]]] =
evaluateConfiguration(eval, src, IO.readLines(src), imports, 0)
evaluateConfiguration(eval, src, IO.readStream(src.input()).linesIterator.toList, imports, 0)
/**
* Parses a sequence of build.sbt lines into a [[ParsedFile]]. The result contains
@ -90,7 +98,7 @@ private[sbt] object EvaluateConfigurations {
* @param builtinImports The set of import statements to add to those parsed in the .sbt file.
*/
private[this] def parseConfiguration(
file: File,
file: VirtualFileRef,
lines: Seq[String],
builtinImports: Seq[String],
offset: Int
@ -115,7 +123,7 @@ private[sbt] object EvaluateConfigurations {
*/
def evaluateConfiguration(
eval: Eval,
file: File,
file: VirtualFileRef,
lines: Seq[String],
imports: Seq[String],
offset: Int
@ -136,37 +144,40 @@ private[sbt] object EvaluateConfigurations {
*/
private[sbt] def evaluateSbtFile(
eval: Eval,
file: File,
file: VirtualFileRef,
lines: Seq[String],
imports: Seq[String],
offset: Int
): LazyClassLoaded[LoadedSbtFile] = {
// TODO - Store the file on the LoadedSbtFile (or the parent dir) so we can accurately do
// detection for which project project manipulations should be applied.
val name = file.getPath
val name = file.id
val parsed = parseConfiguration(file, lines, imports, offset)
val (importDefs, definitions) =
if (parsed.definitions.isEmpty) (Nil, DefinedSbtValues.empty)
else {
val definitions =
evaluateDefinitions(eval, name, parsed.imports, parsed.definitions, Some(file))
val imp = BuildUtil.importAllRoot(definitions.enclosingModule :: Nil)
val imp = BuildUtilLite.importAllRoot(definitions.enclosingModule :: Nil)
(imp, DefinedSbtValues(definitions))
}
val allImports = importDefs.map(s => (s, -1)) ++ parsed.imports
val dslEntries = parsed.settings map {
case (dslExpression, range) =>
evaluateDslEntry(eval, name, allImports, dslExpression, range)
val dslEntries = parsed.settings map { case (dslExpression, range) =>
evaluateDslEntry(eval, name, allImports, dslExpression, range)
}
eval.unlinkDeferred()
// TODO:
// eval.unlinkDeferred()
// Tracks all the files we generated from evaluating the sbt file.
val allGeneratedFiles = (definitions.generated ++ dslEntries.flatMap(_.generated))
val allGeneratedFiles: Seq[Path] = (definitions.generated ++ dslEntries.flatMap(_.generated))
loader => {
val projects = {
val compositeProjects = definitions.values(loader).collect {
case p: CompositeProject => p
val compositeProjects = definitions.values(loader).collect { case p: CompositeProject =>
p
}
CompositeProject.expand(compositeProjects).map(resolveBase(file.getParentFile, _))
// todo: resolveBase?
CompositeProject.expand(compositeProjects) // .map(resolveBase(file.getParentFile, _))
}
val (settingsRaw, manipulationsRaw) =
dslEntries map (_.result apply loader) partition {
@ -177,8 +188,8 @@ private[sbt] object EvaluateConfigurations {
case DslEntry.ProjectSettings(settings) => settings
case _ => Nil
}
val manipulations = manipulationsRaw map {
case DslEntry.ProjectManipulation(f) => f
val manipulations = manipulationsRaw map { case DslEntry.ProjectManipulation(f) =>
f
}
// TODO -get project manipulations.
new LoadedSbtFile(
@ -193,7 +204,8 @@ private[sbt] object EvaluateConfigurations {
}
/** move a project to be relative to this file after we've evaluated it. */
private[this] def resolveBase(f: File, p: Project) = p.copy(base = IO.resolve(f, p.base))
private[this] def resolveBase(f: File, p: Project) =
p.copy(base = IO.resolve(f, p.base))
def addOffset(offset: Int, lines: Seq[(String, Int)]): Seq[(String, Int)] =
lines.map { case (s, i) => (s, i + offset) }
@ -205,7 +217,8 @@ private[sbt] object EvaluateConfigurations {
* The name of the class we cast DSL "setting" (vs. definition) lines to.
*/
val SettingsDefinitionName = {
val _ = classOf[DslEntry] // this line exists to try to provide a compile-time error when the following line needs to be changed
val _ =
classOf[DslEntry] // this line exists to try to provide a compile-time error when the following line needs to be changed
"sbt.internal.DslEntry"
}
@ -230,17 +243,18 @@ private[sbt] object EvaluateConfigurations {
): TrackedEvalResult[DslEntry] = {
// TODO - Should we try to namespace these between.sbt files? IF they hash to the same value, they may actually be
// exactly the same setting, so perhaps we don't care?
val result = try {
eval.eval(
expression,
imports = new EvalImports(imports, name),
srcName = name,
tpeName = Some(SettingsDefinitionName),
line = range.start
)
} catch {
case e: sbt.compiler.EvalException => throw new MessageOnlyException(e.getMessage)
}
val result =
try {
eval.eval(
expression,
imports = new EvalImports(imports.map(_._1)), // name
srcName = name,
tpeName = Some(SettingsDefinitionName),
line = range.start
)
} catch {
case e: EvalException => throw new MessageOnlyException(e.getMessage)
}
// TODO - keep track of configuration classes defined.
TrackedEvalResult(
result.generated,
@ -283,14 +297,13 @@ private[sbt] object EvaluateConfigurations {
* anything on the right of the tuple is a scala expression (definition or setting).
*/
private[sbt] def splitExpressions(
file: File,
file: VirtualFileRef,
lines: Seq[String]
): (Seq[(String, Int)], Seq[(String, LineRange)]) = {
): (Seq[(String, Int)], Seq[(String, LineRange)]) =
val split = SbtParser(file, lines)
// TODO - Look at pulling the parsed expression trees from the SbtParser and stitch them back into a different
// scala compiler rather than re-parsing.
(split.imports, split.settings)
}
private[this] def splitSettingsDefinitions(
lines: Seq[(String, LineRange)]
@ -315,29 +328,41 @@ private[sbt] object EvaluateConfigurations {
name: String,
imports: Seq[(String, Int)],
definitions: Seq[(String, LineRange)],
file: Option[File]
): compiler.EvalDefinitions = {
file: Option[VirtualFileRef],
): EvalDefinitions = {
val convertedRanges = definitions.map { case (s, r) => (s, r.start to r.end) }
eval.evalDefinitions(
convertedRanges,
new EvalImports(imports, name),
new EvalImports(imports.map(_._1)), // name
name,
file,
// file,
extractedValTypes
)
}
}
object BuildUtilLite:
/** Import just the names. */
def importNames(names: Seq[String]): Seq[String] =
if (names.isEmpty) Nil else names.mkString("import ", ", ", "") :: Nil
/** Prepend `_root_` and import just the names. */
def importNamesRoot(names: Seq[String]): Seq[String] = importNames(names map rootedName)
/** Wildcard import `._` for all values. */
def importAll(values: Seq[String]): Seq[String] = importNames(values map { _ + "._" })
def importAllRoot(values: Seq[String]): Seq[String] = importAll(values map rootedName)
def rootedName(s: String): String = if (s contains '.') "_root_." + s else s
end BuildUtilLite
object Index {
def taskToKeyMap(data: Settings[Scope]): Map[Task[_], ScopedKey[Task[_]]] = {
val pairs = data.scopes flatMap (
scope =>
data.data(scope).entries collect {
case AttributeEntry(key, value: Task[_]) =>
(value, ScopedKey(scope, key.asInstanceOf[AttributeKey[Task[_]]]))
}
)
val pairs = data.scopes flatMap (scope =>
data.data(scope).entries collect { case AttributeEntry(key, value: Task[_]) =>
(value, ScopedKey(scope, key.asInstanceOf[AttributeKey[Task[_]]]))
}
)
pairs.toMap[Task[_], ScopedKey[Task[_]]]
}
@ -372,29 +397,38 @@ object Index {
multiMap.collect { case (k, v) if validID(k) => (k, v.head) } toMap
else
sys.error(
duplicates map { case (k, tps) => "'" + k + "' (" + tps.mkString(", ") + ")" } mkString ("Some keys were defined with the same name but different types: ", ", ", "")
duplicates map { case (k, tps) =>
"'" + k + "' (" + tps.mkString(", ") + ")"
} mkString ("Some keys were defined with the same name but different types: ", ", ", "")
)
}
private[this] type TriggerMap = collection.mutable.HashMap[Task[_], Seq[Task[_]]]
private[this] type TriggerMap = collection.mutable.HashMap[Task[Any], Seq[Task[Any]]]
def triggers(ss: Settings[Scope]): Triggers[Task] = {
val runBefore = new TriggerMap
val triggeredBy = new TriggerMap
ss.data.values foreach (
_.entries foreach {
case AttributeEntry(_, value: Task[_]) =>
case AttributeEntry(_, value: Task[Any]) =>
val as = value.info.attributes
update(runBefore, value, as get Keys.runBefore)
update(triggeredBy, value, as get Keys.triggeredBy)
update(runBefore, value, as.get(Def.runBefore.asInstanceOf))
update(triggeredBy, value, as.get(Def.triggeredBy.asInstanceOf))
case _ => ()
}
)
val onComplete = (GlobalScope / Keys.onComplete) get ss getOrElse (() => ())
val onComplete = (GlobalScope / Def.onComplete) get ss getOrElse (() => ())
new Triggers[Task](runBefore, triggeredBy, map => { onComplete(); map })
}
private[this] def update(map: TriggerMap, base: Task[_], tasksOpt: Option[Seq[Task[_]]]): Unit =
for (tasks <- tasksOpt; task <- tasks)
private[this] def update(
map: TriggerMap,
base: Task[Any],
tasksOpt: Option[Seq[Task[Any]]]
): Unit =
for {
tasks <- tasksOpt
task <- tasks
}
map(task) = base +: map.getOrElse(task, Nil)
}

View File

@ -10,6 +10,7 @@ package internal
import Def.Setting
import java.io.File
import java.nio.file.Path
/**
* Represents the exported contents of a .sbt file. Currently, that includes the list of settings,
@ -23,7 +24,7 @@ private[sbt] final class LoadedSbtFile(
// TODO - we may want to expose a simpler interface on top of here for the set command,
// rather than what we have now...
val definitions: DefinedSbtValues,
val generatedFiles: Seq[File]
val generatedFiles: Seq[Path]
) {
// We still use merge for now. We track originating sbt file in an alternative manner.
def merge(o: LoadedSbtFile): LoadedSbtFile =
@ -44,7 +45,7 @@ private[sbt] final class LoadedSbtFile(
* Represents the `val`/`lazy val` definitions defined within a build.sbt file
* which we can reference in other settings.
*/
private[sbt] final class DefinedSbtValues(val sbtFiles: Seq[compiler.EvalDefinitions]) {
private[sbt] final class DefinedSbtValues(val sbtFiles: Seq[EvalDefinitions]) {
def values(parent: ClassLoader): Seq[Any] =
sbtFiles flatMap (_ values parent)
@ -63,12 +64,12 @@ private[sbt] final class DefinedSbtValues(val sbtFiles: Seq[compiler.EvalDefinit
v <- file.valNames
} yield s"import ${m}.`${v}`"
}
def generated: Seq[File] =
sbtFiles flatMap (_.generated)
def generated: Seq[Path] =
sbtFiles.flatMap(_.generated)
// Returns a classpath for the generated .sbt files.
def classpath: Seq[File] =
generated.map(_.getParentFile).distinct
def classpath: Seq[Path] =
generated.map(_.getParent()).distinct
/**
* Joins the defines of this build.sbt with another.
@ -81,7 +82,7 @@ private[sbt] final class DefinedSbtValues(val sbtFiles: Seq[compiler.EvalDefinit
private[sbt] object DefinedSbtValues {
/** Construct a DefinedSbtValues object directly from the underlying representation. */
def apply(eval: compiler.EvalDefinitions): DefinedSbtValues =
def apply(eval: EvalDefinitions): DefinedSbtValues =
new DefinedSbtValues(Seq(eval))
/** Construct an empty value object. */
@ -91,6 +92,6 @@ private[sbt] object DefinedSbtValues {
private[sbt] object LoadedSbtFile {
/** Represents an empty .sbt file: no Projects, imports, or settings.*/
/** Represents an empty .sbt file: no Projects, imports, or settings. */
def empty = new LoadedSbtFile(Nil, Nil, Nil, Nil, DefinedSbtValues.empty, Nil)
}

View File

@ -0,0 +1,311 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt
package internal
package parser
import sbt.internal.util.{ LineRange, MessageOnlyException }
import java.io.File
import java.nio.charset.StandardCharsets
import java.util.concurrent.ConcurrentHashMap
import sbt.internal.parser.SbtParser._
import scala.compat.Platform.EOL
import dotty.tools.dotc.ast.Trees.Lazy
import dotty.tools.dotc.ast.untpd
import dotty.tools.dotc.ast.untpd.Tree
import dotty.tools.dotc.CompilationUnit
import dotty.tools.dotc.core.Contexts.Context
import dotty.tools.dotc.Driver
import dotty.tools.dotc.util.NoSourcePosition
import dotty.tools.dotc.util.SourceFile
import dotty.tools.dotc.util.SourcePosition
import dotty.tools.io.VirtualDirectory
import dotty.tools.io.VirtualFile
import dotty.tools.dotc.parsing.*
import dotty.tools.dotc.reporting.ConsoleReporter
import dotty.tools.dotc.reporting.Diagnostic
import dotty.tools.dotc.reporting.Reporter
import dotty.tools.dotc.reporting.StoreReporter
import scala.util.Random
import scala.util.{ Failure, Success }
import xsbti.VirtualFileRef
import dotty.tools.dotc.printing.Printer
import dotty.tools.dotc.config.Printers
private[sbt] object SbtParser:
val END_OF_LINE_CHAR = '\n'
val END_OF_LINE = String.valueOf(END_OF_LINE_CHAR)
private[parser] val NOT_FOUND_INDEX = -1
private[sbt] val FAKE_FILE = VirtualFileRef.of("fake") // new File("fake")
private[parser] val XML_ERROR = "';' expected but 'val' found."
private val XmlErrorMessage =
"""Probably problem with parsing xml group, please add parens or semicolons:
|Replace:
|val xmlGroup = <a/><b/>
|with:
|val xmlGroup = (<a/><b/>)
|or
|val xmlGroup = <a/><b/>;
""".stripMargin
private final val defaultClasspath =
sbt.io.Path.makeString(sbt.io.IO.classLocationPath[Product].toFile :: Nil)
def isIdentifier(ident: String): Boolean =
val code = s"val $ident = 0; val ${ident}${ident} = $ident"
try
val p = SbtParser(FAKE_FILE, List(code))
true
catch case e: Throwable => false
/**
* Provides the previous error reporting functionality in
* [[scala.tools.reflect.ToolBox]].
*
* This parser is a wrapper around a collection of reporters that are
* indexed by a unique key. This is used to ensure that the reports of
* one parser don't collide with other ones in concurrent settings.
*
* This parser is a sign that this whole parser should be rewritten.
* There are exceptions everywhere and the logic to work around
* the scalac parser bug heavily relies on them and it's tied
* to the test suite. Ideally, we only want to throw exceptions
* when we know for a fact that the user-provided snippet doesn't
* parse.
*/
private[sbt] class UniqueParserReporter extends Reporter {
private val reporters = new ConcurrentHashMap[String, StoreReporter]()
override def doReport(dia: Diagnostic)(using Context): Unit =
import scala.jdk.OptionConverters.*
val sourcePath = dia.position.asScala.getOrElse(sys.error("missing position")).source.path
val reporter = getReporter(sourcePath)
reporter.doReport(dia)
override def report(dia: Diagnostic)(using Context): Unit =
import scala.jdk.OptionConverters.*
val sourcePath = dia.position.asScala.getOrElse(sys.error("missing position")).source.path
val reporter = getReporter(sourcePath)
reporter.report(dia)
override def hasErrors: Boolean = {
var result = false
reporters.forEachValue(100, r => if (r.hasErrors) result = true)
result
}
def createReporter(uniqueFileName: String): StoreReporter =
val r = new StoreReporter(null)
reporters.put(uniqueFileName, r)
r
def getOrCreateReporter(uniqueFileName: String): StoreReporter = {
val r = reporters.get(uniqueFileName)
if (r == null) createReporter(uniqueFileName)
else r
}
private def getReporter(fileName: String) = {
val reporter = reporters.get(fileName)
if (reporter == null) {
scalacGlobalInitReporter.getOrElse(
sys.error(s"sbt forgot to initialize `scalacGlobalInitReporter`.")
)
} else reporter
}
def throwParserErrorsIfAny(reporter: StoreReporter, fileName: String)(using
context: Context
): Unit =
if reporter.hasErrors then {
val seq = reporter.pendingMessages.map { info =>
s"""[$fileName]:${info.pos.line}: ${info.msg}"""
}
val errorMessage = seq.mkString(EOL)
val error: String =
if (errorMessage.contains(XML_ERROR))
s"$errorMessage\n${SbtParser.XmlErrorMessage}"
else errorMessage
throw new MessageOnlyException(error)
} else ()
}
private[sbt] var scalacGlobalInitReporter: Option[ConsoleReporter] = None
private[sbt] val globalReporter = UniqueParserReporter()
private[sbt] val defaultGlobalForParser = ParseDriver()
private[sbt] final class ParseDriver extends Driver:
import dotty.tools.dotc.config.Settings.Setting._
val compileCtx0 = initCtx.fresh
val options = List("-classpath", s"$defaultClasspath", "dummy.scala")
val compileCtx1 = setup(options.toArray, compileCtx0) match
case Some((_, ctx)) => ctx
case _ => sys.error(s"initialization failed for $options")
val outputDir = VirtualDirectory("output")
val compileCtx2 = compileCtx1.fresh
.setSetting(
compileCtx1.settings.outputDir,
outputDir
)
.setReporter(globalReporter)
val compileCtx = compileCtx2
val compiler = newCompiler(using compileCtx)
end ParseDriver
/**
* Parse code reusing the same [[Run]] instance.
*
* @param code The code to be parsed.
* @param filePath The file name where the code comes from.
* @param reporterId0 The reporter id is the key used to get the pertinent
* reporter. Given that the parsing reuses a global
* instance, this reporter id makes sure that every parsing
* session gets its own errors in a concurrent setting.
* The reporter id must be unique per parsing session.
* @return
*/
private[sbt] def parse(
code: String,
filePath: String,
reporterId0: Option[String]
): (List[untpd.Tree], String, SourceFile) =
import defaultGlobalForParser.*
given ctx: Context = compileCtx
val reporterId = reporterId0.getOrElse(s"$filePath-${Random.nextInt}")
val reporter = globalReporter.getOrCreateReporter(reporterId)
reporter.removeBufferedMessages
val moduleName = "SyntheticModule"
val wrapCode = s"""object $moduleName {
|$code
|}""".stripMargin
val wrapperFile = SourceFile(
VirtualFile(reporterId, wrapCode.getBytes(StandardCharsets.UTF_8)),
scala.io.Codec.UTF8
)
val parser = Parsers.Parser(wrapperFile)
val t = parser.parse()
val parsedTrees = t match
case untpd.PackageDef(_, List(untpd.ModuleDef(_, untpd.Template(_, _, _, trees)))) =>
trees match
case ts: List[untpd.Tree] => ts
case ts: Lazy[List[untpd.Tree]] => ts.complete
globalReporter.throwParserErrorsIfAny(reporter, filePath)
(parsedTrees, reporterId, wrapperFile)
end SbtParser
private class SbtParserInit {
new Thread("sbt-parser-init-thread") {
setDaemon(true)
start()
override def run(): Unit = {
val _ = SbtParser.defaultGlobalForParser
}
}
}
/**
* This method solely exists to add scaladoc to members in SbtParser which
* are defined using pattern matching.
*/
sealed trait ParsedSbtFileExpressions:
/** The set of parsed import expressions. */
def imports: Seq[(String, Int)]
/** The set of parsed definitions and/or sbt build settings. */
def settings: Seq[(String, LineRange)]
/** The set of scala tree's for parsed definitions/settings and the underlying string representation.. */
def settingsTrees: Seq[(String, Tree)]
end ParsedSbtFileExpressions
/**
* An initial parser/splitter of .sbt files.
*
* This class is responsible for chunking a `.sbt` file into expression ranges
* which we can then compile using the Scala compiler.
*
* Example:
*
* {{{
* val parser = SbtParser(myFile, IO.readLines(myFile))
* // All import statements
* val imports = parser.imports
* // All other statements (val x =, or raw settings)
* val settings = parser.settings
* }}}
*
* @param file The file we're parsing (may be a dummy file)
* @param lines The parsed "lines" of the file, where each string is a line.
*/
private[sbt] case class SbtParser(path: VirtualFileRef, lines: Seq[String])
extends ParsedSbtFileExpressions:
// settingsTrees,modifiedContent needed for "session save"
// TODO - We should look into splitting out "definitions" vs. "settings" here instead of further string lookups, since we have the
// parsed trees.
val (imports, settings, settingsTrees) = splitExpressions(path, lines)
import SbtParser.defaultGlobalForParser.*
private def splitExpressions(
path: VirtualFileRef,
lines: Seq[String]
): (Seq[(String, Int)], Seq[(String, LineRange)], Seq[(String, Tree)]) = {
// import sbt.internal.parser.MissingBracketHandler.findMissingText
val indexedLines = lines.toIndexedSeq
val content = indexedLines.mkString(END_OF_LINE)
val fileName = path.id
val (parsedTrees, reporterId, sourceFile) = parse(content, fileName, None)
given ctx: Context = compileCtx
val (imports: Seq[untpd.Tree], statements: Seq[untpd.Tree]) =
parsedTrees.partition {
case _: untpd.Import => true
case _ => false
}
def convertStatement(tree: untpd.Tree)(using ctx: Context): Option[(String, Tree, LineRange)] =
if tree.span.exists then
// not sure why I need to reconstruct the position myself
val pos = SourcePosition(sourceFile, tree.span)
val statement = String(pos.linesSlice).trim()
val lines = pos.lines
val wrapperLineOffset = 0
Some(
(
statement,
tree,
LineRange(lines.start + wrapperLineOffset, lines.end + wrapperLineOffset)
)
)
else None
val stmtTreeLineRange = statements.flatMap(convertStatement)
val importsLineRange = importsToLineRanges(sourceFile, imports)
(
importsLineRange,
stmtTreeLineRange.map { case (stmt, _, lr) =>
(stmt, lr)
},
stmtTreeLineRange.map { case (stmt, tree, _) =>
(stmt, tree)
}
)
}
private def importsToLineRanges(
sourceFile: SourceFile,
imports: Seq[Tree]
)(using context: Context): Seq[(String, Int)] =
imports.map { tree =>
// not sure why I need to reconstruct the position myself
val pos = SourcePosition(sourceFile, tree.span)
val content = String(pos.linesSlice).trim()
val wrapperLineOffset = 0
(content, pos.line + wrapperLineOffset)
}
end SbtParser

View File

@ -9,11 +9,17 @@ package sbt
package internal
package parser
private[sbt] object SbtRefactorings {
import java.io.File
import dotty.tools.dotc.ast.untpd
import dotty.tools.dotc.core.Contexts.Context
private[sbt] object SbtRefactorings:
import sbt.internal.parser.SbtParser.{ END_OF_LINE, FAKE_FILE }
import sbt.internal.SessionSettings.{ SessionSetting, SbtConfigFile }
/** A session setting is simply a tuple of a Setting[_] and the strings which define it. */
type SessionSetting = (Def.Setting[_], Seq[String])
type SbtConfigFile = (File, Seq[String])
val emptyString = ""
val reverseOrderingInt = Ordering[Int].reverse
@ -32,6 +38,7 @@ private[sbt] object SbtRefactorings {
): SbtConfigFile = {
val (file, lines) = configFile
val split = SbtParser(FAKE_FILE, lines)
given ctx: Context = SbtParser.defaultGlobalForParser.compileCtx
val recordedCommands = recordCommands(commands, split)
val sortedRecordedCommands = recordedCommands.sortBy(_._1)(reverseOrderingInt)
@ -43,12 +50,11 @@ private[sbt] object SbtRefactorings {
modifiedContent: String,
sortedRecordedCommands: Seq[(Int, String, String)]
) = {
sortedRecordedCommands.foldLeft(modifiedContent) {
case (acc, (from, old, replacement)) =>
val before = acc.substring(0, from)
val after = acc.substring(from + old.length, acc.length)
val afterLast = emptyStringForEmptyString(after)
before + replacement + afterLast
sortedRecordedCommands.foldLeft(modifiedContent) { case (acc, (from, old, replacement)) =>
val before = acc.substring(0, from)
val after = acc.substring(from + old.length, acc.length)
val afterLast = emptyStringForEmptyString(after)
before + replacement + afterLast
}
}
@ -57,44 +63,44 @@ private[sbt] object SbtRefactorings {
if (trimmed.isEmpty) trimmed else text
}
private def recordCommands(commands: Seq[SessionSetting], split: SbtParser) =
commands.flatMap {
case (_, command) =>
val map = toTreeStringMap(command)
map.flatMap { case (name, _) => treesToReplacements(split, name, command) }
private def recordCommands(commands: Seq[SessionSetting], split: SbtParser)(using Context) =
commands.flatMap { case (_, command) =>
val map = toTreeStringMap(command)
map.flatMap { case (name, _) => treesToReplacements(split, name, command) }
}
private def treesToReplacements(split: SbtParser, name: String, command: Seq[String]) =
split.settingsTrees.foldLeft(Seq.empty[(Int, String, String)]) {
case (acc, (st, tree)) =>
val treeName = extractSettingName(tree)
if (name == treeName) {
val replacement =
if (acc.isEmpty) command.mkString(END_OF_LINE)
else emptyString
(tree.pos.start, st, replacement) +: acc
} else {
acc
}
private def treesToReplacements(split: SbtParser, name: String, command: Seq[String])(using
Context
) =
split.settingsTrees.foldLeft(Seq.empty[(Int, String, String)]) { case (acc, (st, tree)) =>
val treeName = extractSettingName(tree)
if (name == treeName) {
val replacement =
if (acc.isEmpty) command.mkString(END_OF_LINE)
else emptyString
(tree.sourcePos.start, st, replacement) +: acc
} else {
acc
}
}
private def toTreeStringMap(command: Seq[String]) = {
val split = SbtParser(FAKE_FILE, command)
val trees = split.settingsTrees
val seq = trees.map {
case (statement, tree) =>
(extractSettingName(tree), statement)
val seq = trees.map { case (statement, tree) =>
(extractSettingName(tree), statement)
}
seq.toMap
}
import scala.tools.nsc.Global
private def extractSettingName(tree: Global#Tree): String =
tree.children match {
case h :: _ =>
extractSettingName(h)
case _ =>
tree.toString()
}
// todo: revisit
private def extractSettingName(tree: untpd.Tree): String =
tree.toString()
// tree.children match {
// case h :: _ =>
// extractSettingName(h)
// case _ =>
// tree.toString()
// }
}
end SbtRefactorings

View File

@ -0,0 +1,43 @@
package sbt.internal
import sbt.internal.parser.SbtParser
import sbt.internal.util.LineRange
import xsbti.VirtualFileRef
object SbtParserTest extends verify.BasicTestSuite:
lazy val testCode: String = """import keys.*
import com.{
keys
}
val x = 1
lazy val foo = project
.settings(x := y)
"""
test("imports with their lines") {
val ref = VirtualFileRef.of("vfile")
val p = SbtParser(ref, testCode.linesIterator.toList)
assert(
p.imports == List(
"import keys.*" -> 1,
"""import com.{
keys
}""" -> 2
)
)
}
test("imports with their lines2") {
val ref = VirtualFileRef.of("vfile")
val p = SbtParser(ref, testCode.linesIterator.toList)
assert(p.settings.size == 2)
assert(p.settings(0) == ("""val x = 1""" -> LineRange(6, 6)))
assert(p.settings(1) == ("""lazy val foo = project
.settings(x := y)""" -> LineRange(7, 8)))
}
test("isIdentifier") {
assert(SbtParser.isIdentifier("1a") == false)
}
end SbtParserTest

View File

@ -10,36 +10,36 @@ package internal
package parser
abstract class CheckIfParsedSpec(
implicit val splitter: SplitExpressions.SplitExpression =
EvaluateConfigurations.splitExpressions
val splitter: SplitExpressions.SplitExpression = EvaluateConfigurations.splitExpressions
) extends AbstractSpec {
test(s"${this.getClass.getName} should parse sbt file") {
files foreach {
case (content, description, nonEmptyImports, nonEmptyStatements) =>
println(s"""${getClass.getSimpleName}: "$description" """)
val (imports, statements) = split(content)
assert(
nonEmptyStatements == statements.nonEmpty,
s"""$description
files foreach { case (content, description, nonEmptyImports, nonEmptyStatements) =>
println(s"""${getClass.getSimpleName}: "$description" """)
val (imports, statements) = split(content)(splitter)
assert(
nonEmptyStatements == statements.nonEmpty,
s"""$description
|***${shouldContains(nonEmptyStatements)} statements***
|$content """.stripMargin
)
assert(
nonEmptyImports == imports.nonEmpty,
s"""$description
)
assert(
nonEmptyImports == imports.nonEmpty,
s"""$description
|***${shouldContains(nonEmptyImports)} imports***
|$content """.stripMargin
)
)
}
}
private def shouldContains(b: Boolean): String =
s"""Should ${if (b) {
"contain"
} else {
"not contain"
}}"""
s"""Should ${
if (b) {
"contain"
} else {
"not contain"
}
}"""
protected def files: Seq[(String, String, Boolean, Boolean)]

View File

@ -25,7 +25,8 @@ object CommentedXmlSpec extends CheckIfParsedSpec {
false,
true
),
("""
(
"""
|val scmpom = taskKey[xml.NodeBuffer]("Node buffer")
|
|scmpom := <scm>
@ -44,8 +45,13 @@ object CommentedXmlSpec extends CheckIfParsedSpec {
|
|publishMavenStyle := true
|
""".stripMargin, "Wrong Commented xml ", false, true),
("""
""".stripMargin,
"Wrong Commented xml ",
false,
true
),
(
"""
|val scmpom = taskKey[xml.NodeBuffer]("Node buffer")
|
|scmpom := <scm>
@ -64,14 +70,28 @@ object CommentedXmlSpec extends CheckIfParsedSpec {
|
|publishMavenStyle := true
|
""".stripMargin, "Commented xml ", false, true),
("""
""".stripMargin,
"Commented xml ",
false,
true
),
(
"""
|import sbt._
|
|// </a
""".stripMargin, "Xml in comment", true, false),
("""
""".stripMargin,
"Xml in comment",
true,
false
),
(
"""
|// a/>
""".stripMargin, "Xml in comment2", false, false)
""".stripMargin,
"Xml in comment2",
false,
false
)
)
}

View File

@ -9,6 +9,7 @@ package sbt.internal.parser
import sbt.internal.util.MessageOnlyException
/*
object EmbeddedXmlSpec extends CheckIfParsedSpec {
test("File with xml content should Handle last xml part") {
@ -36,6 +37,7 @@ object EmbeddedXmlSpec extends CheckIfParsedSpec {
try {
split(buildSbt)
sys.error("expected MessageOnlyException")
} catch {
case exception: MessageOnlyException =>
val index = buildSbt.linesIterator.indexWhere(line => line.contains(errorLine)) + 1
@ -47,13 +49,24 @@ object EmbeddedXmlSpec extends CheckIfParsedSpec {
}
protected val files = Seq(
("""
(
"""
|val p = <a/>
""".stripMargin, "Xml modified closing tag at end of file", false, true),
("""
""".stripMargin,
"Xml modified closing tag at end of file",
false,
true
),
(
"""
|val p = <a></a>
""".stripMargin, "Xml at end of file", false, true),
("""|
""".stripMargin,
"Xml at end of file",
false,
true
),
(
"""|
|
|name := "play-html-compressor"
|
@ -89,8 +102,13 @@ object EmbeddedXmlSpec extends CheckIfParsedSpec {
|
|val tra = "</scm>"
|
""".stripMargin, "Xml in string", false, true),
("""|
""".stripMargin,
"Xml in string",
false,
true
),
(
"""|
|
|name := "play-html-compressor"
|
@ -119,7 +137,11 @@ object EmbeddedXmlSpec extends CheckIfParsedSpec {
|<aa/>
| </a></xml:group>
|
| """.stripMargin, "Xml with attributes", false, true),
| """.stripMargin,
"Xml with attributes",
false,
true
),
(
"""
|scalaVersion := "2.10.2"
@ -151,3 +173,4 @@ object EmbeddedXmlSpec extends CheckIfParsedSpec {
)
}
*/

View File

@ -12,48 +12,54 @@ package parser
import java.io.File
import sbt.internal.util.MessageOnlyException
import scala.io.Source
import sbt.internal.inc.PlainVirtualFileConverter
object ErrorSpec extends AbstractSpec {
implicit val splitter: SplitExpressions.SplitExpression = EvaluateConfigurations.splitExpressions
val converter = PlainVirtualFileConverter.converter
// implicit val splitter: SplitExpressions.SplitExpression = EvaluateConfigurations.splitExpressions
test("Parser should contains file name and line number") {
val rootPath = getClass.getResource("/error-format/").getPath
println(s"Reading files from: $rootPath")
new File(rootPath).listFiles foreach { file =>
print(s"Processing ${file.getName}: ")
val vf = converter.toVirtualFile(file.toPath())
val buildSbt = Source.fromFile(file).getLines().mkString("\n")
try {
SbtParser(file, buildSbt.linesIterator.toSeq)
SbtParser(vf, buildSbt.linesIterator.toSeq)
} catch {
case exp: MessageOnlyException =>
val message = exp.getMessage
println(s"${exp.getMessage}")
assert(message.contains(file.getName))
}
containsLineNumber(buildSbt)
// todo:
// containsLineNumber(buildSbt)
}
}
test("it should handle wrong parsing") {
intercept[MessageOnlyException] {
val buildSbt =
"""
|libraryDependencies ++= Seq("a" % "b" % "2") map {
|(dependency) =>{
| dependency
| } /* */ //
|}
""".stripMargin
MissingBracketHandler.findMissingText(
buildSbt,
buildSbt.length,
2,
"fake.txt",
new MessageOnlyException("fake")
)
()
}
}
// test("it should handle wrong parsing") {
// intercept[MessageOnlyException] {
// val buildSbt =
// """
// |libraryDependencies ++= Seq("a" % "b" % "2") map {
// |(dependency) =>{
// | dependency
// | } /* */ //
// |}
// """.stripMargin
// MissingBracketHandler.findMissingText(
// buildSbt,
// buildSbt.length,
// 2,
// "fake.txt",
// new MessageOnlyException("fake")
// )
// ()
// }
// }
test("it should handle xml error") {
try {
@ -63,11 +69,12 @@ object ErrorSpec extends AbstractSpec {
|val s = '
""".stripMargin
SbtParser(SbtParser.FAKE_FILE, buildSbt.linesIterator.toSeq)
// sys.error("not supposed to reach here")
} catch {
case exp: MessageOnlyException =>
val message = exp.getMessage
println(s"${exp.getMessage}")
assert(message.contains(SbtParser.FAKE_FILE.getName))
assert(message.contains(SbtParser.FAKE_FILE.id()))
}
}

View File

@ -12,8 +12,10 @@ package parser
import java.io.File
import scala.io.Source
import sbt.internal.inc.PlainVirtualFileConverter
object NewFormatSpec extends AbstractSpec {
val converter = PlainVirtualFileConverter.converter
implicit val splitter: SplitExpressions.SplitExpression = EvaluateConfigurations.splitExpressions
test("New Format should handle lines") {
@ -22,11 +24,15 @@ object NewFormatSpec extends AbstractSpec {
val allFiles = new File(rootPath).listFiles.toList
allFiles foreach { path =>
println(s"$path")
val vf = converter.toVirtualFile(path.toPath())
val lines = Source.fromFile(path).getLines().toList
val (_, statements) = splitter(path, lines)
assert(statements.nonEmpty, s"""
val (_, statements) = splitter(vf, lines)
assert(
statements.nonEmpty,
s"""
|***should contains statements***
|$lines """.stripMargin)
|$lines """.stripMargin
)
}
}
}

View File

@ -9,6 +9,7 @@ package sbt
package internal
package parser
/*
import java.io.{ File, FilenameFilter }
import scala.io.Source
@ -39,12 +40,11 @@ abstract class AbstractSessionSettingsSpec(folder: String) extends AbstractSpec
.toList
allFiles foreach { file =>
val originalLines = Source.fromFile(file).getLines().toList
expectedResultAndMap(file) foreach {
case (expectedResultList, commands) =>
val resultList = SbtRefactorings.applySessionSettings((file, originalLines), commands)
val expected = SbtParser(file, expectedResultList)
val result = SbtParser(file, resultList._2)
assert(result.settings == expected.settings)
expectedResultAndMap(file) foreach { case (expectedResultList, commands) =>
val resultList = SbtRefactorings.applySessionSettings((file, originalLines), commands)
val expected = SbtParser(file, expectedResultList)
val result = SbtParser(file, resultList._2)
assert(result.settings == expected.settings)
}
}
}
@ -76,3 +76,4 @@ abstract class AbstractSessionSettingsSpec(folder: String) extends AbstractSpec
class SessionSettingsSpec extends AbstractSessionSettingsSpec("session-settings")
class SessionSettingsQuickSpec extends AbstractSessionSettingsSpec("session-settings-quick")
*/

View File

@ -9,10 +9,10 @@ package sbt
package internal
package parser
import java.io.File
import sbt.internal.util.LineRange
import xsbti.VirtualFileRef
object SplitExpressions {
type SplitExpression = (File, Seq[String]) => (Seq[(String, Int)], Seq[(String, LineRange)])
}
object SplitExpressions:
type SplitExpression =
(VirtualFileRef, Seq[String]) => (Seq[(String, Int)], Seq[(String, LineRange)])
end SplitExpressions

View File

@ -12,20 +12,21 @@ package parser
import java.io.File
import sbt.internal.util.LineRange
import xsbti.VirtualFileRef
trait SplitExpression {
def split(s: String, file: File = new File("noFile"))(
implicit splitter: SplitExpressions.SplitExpression
def split(s: String, file: VirtualFileRef = VirtualFileRef.of("noFile"))(
splitter: SplitExpressions.SplitExpression
): (Seq[(String, Int)], Seq[(String, LineRange)]) = splitter(file, s.split("\n").toSeq)
}
trait SplitExpressionsBehavior extends SplitExpression { this: verify.BasicTestSuite =>
def newExpressionsSplitter(implicit splitter: SplitExpressions.SplitExpression) = {
def newExpressionsSplitter(splitter: SplitExpressions.SplitExpression) = {
test("parse a two settings without intervening blank line") {
val (imports, settings) = split("""version := "1.0"
scalaVersion := "2.10.4"""")
scalaVersion := "2.10.4"""")(splitter)
assert(imports.isEmpty)
assert(settings.size == 2)
@ -34,7 +35,7 @@ scalaVersion := "2.10.4"""")
test("parse a setting and val without intervening blank line") {
val (imports, settings) =
split("""version := "1.0"
lazy val root = (project in file(".")).enablePlugins­(PlayScala)""")
lazy val root = (project in file(".")).enablePlugins­(PlayScala)""")(splitter)
assert(imports.isEmpty)
assert(settings.size == 2)
@ -46,11 +47,10 @@ lazy val root = (project in file(".")).enablePlugins­(PlayScala)""")
import foo.Bar
version := "1.0"
""".stripMargin
)
)(splitter)
assert(imports.size == 2)
assert(settingsAndDefs.size == 1)
}
}
}

View File

@ -9,6 +9,6 @@ package sbt
package internal
package parser
object SplitExpressionsTest extends verify.BasicTestSuite with SplitExpressionsBehavior {
object SplitExpressionsTest extends verify.BasicTestSuite with SplitExpressionsBehavior:
newExpressionsSplitter(EvaluateConfigurations.splitExpressions)
}
end SplitExpressionsTest

View File

@ -0,0 +1,267 @@
package sbt
package internal
package util
package appmacro
import scala.collection.mutable.ListBuffer
import scala.reflect.TypeTest
import scala.quoted.*
import sbt.util.Applicative
import sbt.util.Monad
import Types.Id
/**
* Implementation of a macro that provides a direct syntax for applicative functors and monads. It
* is intended to be used in conjunction with another macro that conditions the inputs.
*/
trait Cont:
final val InstanceTCName = "F"
extension [C <: Quotes & Singleton](conv: Convert[C])
/**
* Implementation of a macro that provides a direct syntax for applicative functors. It is
* intended to be used in conjunction with another macro that conditions the inputs.
*/
def contMapN[A: Type, F[_], Effect[_]: Type](
tree: Expr[A],
instanceExpr: Expr[Applicative[F]]
)(using
iftpe: Type[F],
eatpe: Type[Effect[A]],
): Expr[F[Effect[A]]] =
contMapN[A, F, Effect](tree, instanceExpr, conv.idTransform)
/**
* Implementation of a macro that provides a direct syntax for applicative functors. It is
* intended to be used in conjunction with another macro that conditions the inputs.
*/
def contMapN[A: Type, F[_], Effect[_]: Type](
tree: Expr[A],
instanceExpr: Expr[Applicative[F]],
inner: conv.TermTransform[Effect]
)(using
iftpe: Type[F],
eatpe: Type[Effect[A]],
): Expr[F[Effect[A]]] =
contImpl[A, F, Effect](Left(tree), instanceExpr, inner)
/**
* Implementation of a macro that provides a direct syntax for applicative functors. It is
* intended to be used in conjunction with another macro that conditions the inputs.
*/
def contFlatMap[A: Type, F[_], Effect[_]: Type](
tree: Expr[F[A]],
instanceExpr: Expr[Applicative[F]],
)(using
iftpe: Type[F],
eatpe: Type[Effect[A]],
): Expr[F[Effect[A]]] =
contFlatMap[A, F, Effect](tree, instanceExpr, conv.idTransform)
/**
* Implementation of a macro that provides a direct syntax for applicative functors. It is
* intended to be used in conjunction with another macro that conditions the inputs.
*/
def contFlatMap[A: Type, F[_], Effect[_]: Type](
tree: Expr[F[A]],
instanceExpr: Expr[Applicative[F]],
inner: conv.TermTransform[Effect]
)(using
iftpe: Type[F],
eatpe: Type[Effect[A]],
): Expr[F[Effect[A]]] =
contImpl[A, F, Effect](Right(tree), instanceExpr, inner)
def summonAppExpr[F[_]: Type]: Expr[Applicative[F]] =
import conv.qctx
import qctx.reflect.*
given qctx.type = qctx
Expr
.summon[Applicative[F]]
.getOrElse(sys.error(s"Applicative[F] not found for ${TypeRepr.of[F].typeSymbol}"))
/**
* Implementation of a macro that provides a direct syntax for applicative functors and monads.
* It is intended to bcke used in conjunction with another macro that conditions the inputs.
*
* This method processes the Term `t` to find inputs of the form `wrap[A]( input )` This form is
* typically constructed by another macro that pretends to be able to get a value of type `A`
* from a value convertible to `F[A]`. This `wrap(input)` form has two main purposes. First, it
* identifies the inputs that should be transformed. Second, it allows the input trees to be
* wrapped for later conversion into the appropriate `F[A]` type by `convert`. This wrapping is
* necessary because applying the first macro must preserve the original type, but it is useful
* to delay conversion until the outer, second macro is called. The `wrap` method accomplishes
* this by allowing the original `Term` and `Type` to be hidden behind the raw `A` type. This
* method will remove the call to `wrap` so that it is not actually called at runtime.
*
* Each `input` in each expression of the form `wrap[A]( input )` is transformed by `convert`.
* This transformation converts the input Term to a Term of type `F[A]`. The original wrapped
* expression `wrap(input)` is replaced by a reference to a new local `val x: A`, where `x` is a
* fresh name. These converted inputs are passed to `builder` as well as the list of these
* synthetic `ValDef`s. The `TupleBuilder` instance constructs a tuple (Tree) from the inputs
* and defines the right hand side of the vals that unpacks the tuple containing the results of
* the inputs.
*
* The constructed tuple of inputs and the code that unpacks the results of the inputs are then
* passed to the `i`, which is an implementation of `Instance` that is statically accessible. An
* Instance defines a applicative functor associated with a specific type constructor and, if it
* implements MonadInstance as well, a monad. Typically, it will be either a top-level module or
* a stable member of a top-level module (such as a val or a nested module). The `with
* Singleton` part of the type verifies some cases at macro compilation time, while the full
* check for static accessibility is done at macro expansion time. Note: Ideally, the types
* would verify that `i: MonadInstance` when `t.isRight`. With the various dependent types
* involved, this is not worth it.
*
* The `eitherTree` argument is the argument of the macro that will be transformed as described
* above. If the macro that calls this method is for a multi-input map (app followed by map),
* `in` should be the argument wrapped in Left. If this is for multi-input flatMap (app followed
* by flatMap), this should be the argument wrapped in Right.
*/
def contImpl[A: Type, F[_], Effect[_]: Type](
eitherTree: Either[Expr[A], Expr[F[A]]],
instanceExpr: Expr[Applicative[F]],
inner: conv.TermTransform[Effect]
)(using
iftpe: Type[F],
eatpe: Type[Effect[A]],
): Expr[F[Effect[A]]] =
import conv.*
import qctx.reflect.*
given qctx.type = qctx
val fTypeCon = TypeRepr.of[F]
val faTpe = fTypeCon.appliedTo(TypeRepr.of[Effect[A]])
val (expr, treeType) = eitherTree match
case Left(l) => (l, TypeRepr.of[Effect[A]])
case Right(r) => (r, faTpe)
val inputBuf = ListBuffer[Input]()
def makeApp(body: Term, inputs: List[Input]): Expr[F[Effect[A]]] = inputs match
case Nil => pure(body)
case x :: Nil => genMap(body, x)
case xs => genMapN(body, xs)
// no inputs, so construct F[A] via Instance.pure or pure+flatten
def pure(body: Term): Expr[F[Effect[A]]] =
def pure0[A1: Type](body: Expr[A1]): Expr[F[A1]] =
'{
$instanceExpr.pure[A1] { () => $body }
}
eitherTree match
case Left(_) => pure0[Effect[A]](inner(body).asExprOf[Effect[A]])
case Right(_) =>
flatten(pure0[F[Effect[A]]](inner(body).asExprOf[F[Effect[A]]]))
// m should have type F[F[A]]
// the returned Tree will have type F[A]
def flatten(m: Expr[F[F[Effect[A]]]]): Expr[F[Effect[A]]] =
'{
{
val i1 = $instanceExpr.asInstanceOf[Monad[F]]
i1.flatten[Effect[A]]($m.asInstanceOf[F[F[Effect[A]]]])
}
}
def genMap(body: Term, input: Input): Expr[F[Effect[A]]] =
def genMap0[A1: Type](body: Expr[A1]): Expr[F[A1]] =
input.tpe.asType match
case '[a] =>
val tpe =
MethodType(List(input.name))(_ => List(TypeRepr.of[a]), _ => TypeRepr.of[A1])
val lambda = Lambda(
owner = Symbol.spliceOwner,
tpe = tpe,
rhsFn = (sym, params) => {
val param = params.head.asInstanceOf[Term]
// Called when transforming the tree to add an input.
// For `qual` of type F[A], and a `selection` qual.value,
// the call is addType(Type A, Tree qual)
// The result is a Tree representing a reference to
// the bound value of the input.
val substitute = [x] =>
(name: String, tpe: Type[x], qual: Term, replace: Term) =>
given t: Type[x] = tpe
convert[x](name, qual) transform { (tree: Term) =>
typed[x](Ref(param.symbol))
}
transformWrappers(body.asTerm.changeOwner(sym), substitute, sym)
}
).asExprOf[a => A1]
val expr = input.term.asExprOf[F[a]]
typed[F[A1]](
'{
$instanceExpr.map[a, A1]($expr.asInstanceOf[F[a]])($lambda)
}.asTerm
).asExprOf[F[A1]]
eitherTree match
case Left(_) =>
genMap0[Effect[A]](inner(body).asExprOf[Effect[A]])
case Right(_) =>
flatten(genMap0[F[Effect[A]]](inner(body).asExprOf[F[Effect[A]]]))
def genMapN(body: Term, inputs: List[Input]): Expr[F[Effect[A]]] =
def genMapN0[A1: Type](body: Expr[A1]): Expr[F[A1]] =
val br = makeTuple(inputs)
val lambdaTpe =
MethodType(List("$p0"))(_ => List(br.inputTupleTypeRepr), _ => TypeRepr.of[A1])
val lambda = Lambda(
owner = Symbol.spliceOwner,
tpe = lambdaTpe,
rhsFn = (sym, params) => {
val p0 = params.head.asInstanceOf[Term]
// Called when transforming the tree to add an input.
// For `qual` of type F[A], and a `selection` qual.value,
// the call is addType(Type A, Tree qual)
// The result is a Tree representing a reference to
// the bound value of the input.
val substitute = [x] =>
(name: String, tpe: Type[x], qual: Term, oldTree: Term) =>
given Type[x] = tpe
convert[x](name, qual) transform { (replacement: Term) =>
val idx = inputs.indexWhere(input => input.qual == qual)
Select
.unique(Ref(p0.symbol), "apply")
.appliedToTypes(List(br.inputTupleTypeRepr))
.appliedToArgs(List(Literal(IntConstant(idx))))
}
transformWrappers(body.asTerm.changeOwner(sym), substitute, sym)
}
)
val tupleMapRepr = TypeRepr
.of[Tuple.Map]
.appliedTo(List(br.inputTupleTypeRepr, TypeRepr.of[F]))
tupleMapRepr.asType match
case '[tupleMap] =>
br.inputTupleTypeRepr.asType match
case '[inputTypeTpe] =>
'{
given Applicative[F] = $instanceExpr
AList
.tuple[inputTypeTpe & Tuple]
.mapN[F, A1](${
br.tupleExpr.asInstanceOf[Expr[Tuple.Map[inputTypeTpe & Tuple, F]]]
})(
${ lambda.asExprOf[Tuple.Map[inputTypeTpe & Tuple, Id] => A1] }
)
}
eitherTree match
case Left(_) =>
genMapN0[Effect[A]](inner(body).asExprOf[Effect[A]])
case Right(_) =>
flatten(genMapN0[F[Effect[A]]](inner(body).asExprOf[F[Effect[A]]]))
// Called when transforming the tree to add an input.
// For `qual` of type F[A], and a `selection` qual.value.
val record = [a] =>
(name: String, tpe: Type[a], qual: Term, oldTree: Term) =>
given t: Type[a] = tpe
convert[a](name, qual) transform { (replacement: Term) =>
inputBuf += Input(TypeRepr.of[a], qual, replacement, freshName("q"))
oldTree
}
val tx = transformWrappers(expr.asTerm, record, Symbol.spliceOwner)
val tr = makeApp(tx, inputBuf.toList)
tr
end Cont

View File

@ -1,322 +1,85 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.internal.util.appmacro
package sbt.internal.util
package appmacro
import sbt.internal.util.Types.Id
import scala.compiletime.summonInline
import scala.quoted.*
import scala.reflect.TypeTest
import scala.collection.mutable
import scala.reflect._
import macros._
import ContextUtil.{ DynamicDependencyError, DynamicReferenceError }
trait ContextUtil[C <: Quotes & scala.Singleton](val qctx: C, val valStart: Int):
import qctx.reflect.*
given qctx.type = qctx
object ContextUtil {
final val DynamicDependencyError = "Illegal dynamic dependency"
final val DynamicReferenceError = "Illegal dynamic reference"
private var counter: Int = valStart - 1
def freshName(prefix: String): String =
counter = counter + 1
s"$$${prefix}${counter}"
/**
* Constructs an object with utility methods for operating in the provided macro context `c`.
* Callers should explicitly specify the type parameter as `c.type` in order to preserve the path dependent types.
* Constructs a new, synthetic, local var with type `tpe`, a unique name, initialized to
* zero-equivalent (Zero[A]), and owned by `parent`.
*/
def apply[C <: blackbox.Context with Singleton](c: C): ContextUtil[C] = new ContextUtil(c: C)
def freshValDef(parent: Symbol, tpe: TypeRepr, rhs: Term): ValDef =
tpe.asType match
case '[a] =>
val sym =
Symbol.newVal(
parent,
freshName("q"),
tpe,
Flags.Synthetic,
Symbol.noSymbol
)
ValDef(sym, rhs = Some(rhs))
/**
* Helper for implementing a no-argument macro that is introduced via an implicit.
* This method removes the implicit conversion and evaluates the function `f` on the target of the conversion.
*
* Given `myImplicitConversion(someValue).extensionMethod`, where `extensionMethod` is a macro that uses this
* method, the result of this method is `f(<Tree of someValue>)`.
*/
def selectMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(f: (c.Expr[Any], c.Position) => c.Expr[T]): c.Expr[T] = {
import c.universe._
def typed[A: Type](value: Term): Term =
Typed(value, TypeTree.of[A])
c.macroApplication match {
case s @ Select(Apply(_, t :: Nil), _) => f(c.Expr[Any](t), s.pos)
case a @ Apply(_, t :: Nil) => f(c.Expr[Any](t), a.pos)
case x => unexpectedTree(x)
}
}
def makeTuple(inputs: List[Input]): BuilderResult =
new BuilderResult:
override def inputTupleTypeRepr: TypeRepr =
tupleTypeRepr(inputs.map(_.tpe))
override def tupleExpr: Expr[Tuple] =
Expr.ofTupleFromSeq(inputs.map(_.term.asExpr))
def unexpectedTree[C <: blackbox.Context](tree: C#Tree): Nothing =
sys.error("Unexpected macro application tree (" + tree.getClass + "): " + tree)
}
trait BuilderResult:
def inputTupleTypeRepr: TypeRepr
def tupleExpr: Expr[Tuple]
end BuilderResult
/**
* Utility methods for macros. Several methods assume that the context's universe is a full compiler
* (`scala.tools.nsc.Global`).
* This is not thread safe due to the underlying Context and related data structures not being thread safe.
* Use `ContextUtil[c.type](c)` to construct.
*/
final class ContextUtil[C <: blackbox.Context](val ctx: C) {
import ctx.universe.{ Apply => ApplyTree, _ }
import internal.decorators._
def tupleTypeRepr(param: List[TypeRepr]): TypeRepr =
param match
case x :: xs => TypeRepr.of[scala.*:].appliedTo(List(x, tupleTypeRepr(xs)))
case Nil => TypeRepr.of[EmptyTuple]
val powerContext = ctx.asInstanceOf[reflect.macros.runtime.Context]
val global: powerContext.universe.type = powerContext.universe
def callsiteTyper: global.analyzer.Typer = powerContext.callsiteTyper
val initialOwner: Symbol = callsiteTyper.context.owner.asInstanceOf[ctx.universe.Symbol]
final class Input(
val tpe: TypeRepr,
val qual: Term,
val term: Term,
val name: String
):
override def toString: String =
s"Input($tpe, $qual, $term, $name)"
lazy val alistType = ctx.typeOf[AList[KList]]
lazy val alist: Symbol = alistType.typeSymbol.companion
lazy val alistTC: Type = alistType.typeConstructor
trait TermTransform[F[_]]:
def apply(in: Term): Term
end TermTransform
/** Modifiers for a local val.*/
lazy val localModifiers = Modifiers(NoFlags)
def idTransform[F[_]]: TermTransform[F] = in => in
def getPos(sym: Symbol) = if (sym eq null) NoPosition else sym.pos
/**
* Constructs a unique term name with the given prefix within this Context.
* (The current implementation uses Context.freshName, which increments
*/
def freshTermName(prefix: String) = TermName(ctx.freshName("$" + prefix))
/**
* Constructs a new, synthetic, local ValDef Type `tpe`, a unique name,
* Position `pos`, an empty implementation (no rhs), and owned by `owner`.
*/
def freshValDef(tpe: Type, pos: Position, owner: Symbol): ValDef = {
val SYNTHETIC = (1 << 21).toLong.asInstanceOf[FlagSet]
val sym = owner.newTermSymbol(freshTermName("q"), pos, SYNTHETIC)
setInfo(sym, tpe)
val vd = internal.valDef(sym, EmptyTree)
vd.setPos(pos)
vd
}
lazy val parameterModifiers = Modifiers(Flag.PARAM)
/**
* Collects all definitions in the tree for use in checkReferences.
* This excludes definitions in wrapped expressions because checkReferences won't allow nested dereferencing anyway.
*/
def collectDefs(
tree: Tree,
isWrapper: (String, Type, Tree) => Boolean
): collection.Set[Symbol] = {
val defs = new collection.mutable.HashSet[Symbol]
// adds the symbols for all non-Ident subtrees to `defs`.
val process = new Traverser {
override def traverse(t: Tree) = t match {
case _: Ident => ()
case ApplyTree(TypeApply(Select(_, nme), tpe :: Nil), qual :: Nil)
if isWrapper(nme.decodedName.toString, tpe.tpe, qual) =>
()
case tree =>
if (tree.symbol ne null) {
defs += tree.symbol
def collectDefs(tree: Term, isWrapper: (String, TypeRepr, Term) => Boolean): Set[Symbol] =
val defs = mutable.HashSet[Symbol]()
object traverser extends TreeTraverser:
override def traverseTree(tree: Tree)(owner: Symbol): Unit =
tree match
case Ident(_) => ()
case Apply(TypeApply(Select(_, nme), tpe :: Nil), qual :: Nil)
if isWrapper(nme, tpe.tpe, qual) =>
()
}
super.traverse(tree)
}
}
process.traverse(tree)
defs
}
/**
* A reference is illegal if it is to an M instance defined within the scope of the macro call.
* As an approximation, disallow referenced to any local definitions `defs`.
*/
def illegalReference(defs: collection.Set[Symbol], sym: Symbol, mType: Type): Boolean =
sym != null && sym != NoSymbol && defs.contains(sym) && {
sym match {
case m: MethodSymbol => m.returnType.erasure <:< mType
case _ => sym.typeSignature <:< mType
}
}
/**
* A reference is illegal if it is to an M instance defined within the scope of the macro call.
* As an approximation, disallow referenced to any local definitions `defs`.
*/
def illegalReference(defs: collection.Set[Symbol], sym: Symbol): Boolean =
illegalReference(defs, sym, weakTypeOf[Any])
type PropertyChecker = (String, Type, Tree) => Boolean
/**
* A function that checks the provided tree for illegal references to M instances defined in the
* expression passed to the macro and for illegal dereferencing of M instances.
*/
def checkReferences(
defs: collection.Set[Symbol],
isWrapper: PropertyChecker,
mType: Type
): Tree => Unit = {
case s @ ApplyTree(TypeApply(Select(_, nme), tpe :: Nil), qual :: Nil) =>
if (isWrapper(nme.decodedName.toString, tpe.tpe, qual)) {
ctx.error(s.pos, DynamicDependencyError)
}
case id @ Ident(name) if illegalReference(defs, id.symbol, mType) =>
ctx.error(id.pos, DynamicReferenceError + ": " + name)
case _ => ()
}
@deprecated("Use that variant that specifies the M instance types to exclude", since = "1.3.0")
/**
* A function that checks the provided tree for illegal references to M instances defined in the
* expression passed to the macro and for illegal dereferencing of M instances.
*/
def checkReferences(defs: collection.Set[Symbol], isWrapper: PropertyChecker): Tree => Unit =
checkReferences(defs, isWrapper, weakTypeOf[Any])
/** Constructs a ValDef with a parameter modifier, a unique name, with the provided Type and with an empty rhs. */
def freshMethodParameter(tpe: Type): ValDef =
ValDef(parameterModifiers, freshTermName("p"), TypeTree(tpe), EmptyTree)
/** Constructs a ValDef with local modifiers and a unique name. */
def localValDef(tpt: Tree, rhs: Tree): ValDef =
ValDef(localModifiers, freshTermName("q"), tpt, rhs)
/** Constructs a tuple value of the right TupleN type from the provided inputs.*/
def mkTuple(args: List[Tree]): Tree =
global.gen.mkTuple(args.asInstanceOf[List[global.Tree]]).asInstanceOf[ctx.universe.Tree]
def setSymbol[_Tree](t: _Tree, sym: Symbol): Unit = {
t.asInstanceOf[global.Tree].setSymbol(sym.asInstanceOf[global.Symbol])
()
}
def setInfo(sym: Symbol, tpe: Type): Unit = {
sym.asInstanceOf[global.Symbol].setInfo(tpe.asInstanceOf[global.Type])
()
}
/** Creates a new, synthetic type variable with the specified `owner`. */
def newTypeVariable(owner: Symbol, prefix: String = "T0"): TypeSymbol =
owner
.asInstanceOf[global.Symbol]
.newSyntheticTypeParam(prefix, 0L)
.asInstanceOf[ctx.universe.TypeSymbol]
/** The type representing the type constructor `[X] X` */
lazy val idTC: Type = {
val tvar = newTypeVariable(NoSymbol)
internal.polyType(tvar :: Nil, refVar(tvar))
}
/** A Type that references the given type variable. */
def refVar(variable: TypeSymbol): Type = variable.toTypeConstructor
/** Constructs a new, synthetic type variable that is a type constructor. For example, in type Y[L[x]], L is such a type variable. */
def newTCVariable(owner: Symbol): TypeSymbol = {
val tc = newTypeVariable(owner)
val arg = newTypeVariable(tc, "x");
tc.setInfo(internal.polyType(arg :: Nil, emptyTypeBounds))
tc
}
/** >: Nothing <: Any */
def emptyTypeBounds: TypeBounds =
internal.typeBounds(definitions.NothingClass.toType, definitions.AnyClass.toType)
/** Creates a new anonymous function symbol with Position `pos`. */
def functionSymbol(pos: Position): Symbol =
callsiteTyper.context.owner
.newAnonymousFunctionValue(pos.asInstanceOf[global.Position])
.asInstanceOf[ctx.universe.Symbol]
def functionType(args: List[Type], result: Type): Type = {
val tpe = global.definitions
.functionType(args.asInstanceOf[List[global.Type]], result.asInstanceOf[global.Type])
tpe.asInstanceOf[Type]
}
/** Create a Tree that references the `val` represented by `vd`, copying attributes from `replaced`. */
def refVal(replaced: Tree, vd: ValDef): Tree =
treeCopy.Ident(replaced, vd.name).setSymbol(vd.symbol)
/** Creates a Function tree using `functionSym` as the Symbol and changing `initialOwner` to `functionSym` in `body`.*/
def createFunction(params: List[ValDef], body: Tree, functionSym: Symbol): Tree = {
changeOwner(body, initialOwner, functionSym)
val f = Function(params, body)
setSymbol(f, functionSym)
f
}
def changeOwner(tree: Tree, prev: Symbol, next: Symbol): Unit =
new ChangeOwnerAndModuleClassTraverser(
prev.asInstanceOf[global.Symbol],
next.asInstanceOf[global.Symbol]
).traverse(tree.asInstanceOf[global.Tree])
// Workaround copied from scala/async:can be removed once https://github.com/scala/scala/pull/3179 is merged.
private[this] class ChangeOwnerAndModuleClassTraverser(
oldowner: global.Symbol,
newowner: global.Symbol
) extends global.ChangeOwnerTraverser(oldowner, newowner) {
override def traverse(tree: global.Tree): Unit = {
tree match {
case _: global.DefTree => change(tree.symbol.moduleClass)
case _ =>
}
super.traverse(tree)
}
}
/** Returns the Symbol that references the statically accessible singleton `i`. */
def singleton[T <: AnyRef with Singleton](i: T)(implicit it: ctx.TypeTag[i.type]): Symbol =
it.tpe match {
case SingleType(_, sym) if !sym.isFreeTerm && sym.isStatic => sym
case x => sys.error("Instance must be static (was " + x + ").")
}
def select(t: Tree, name: String): Tree = Select(t, TermName(name))
/** Returns the symbol for the non-private method named `name` for the class/module `obj`. */
def method(obj: Symbol, name: String): Symbol = {
val ts: Type = obj.typeSignature
val m: global.Symbol = ts.asInstanceOf[global.Type].nonPrivateMember(global.newTermName(name))
m.asInstanceOf[Symbol]
}
/**
* Returns a Type representing the type constructor tcp.<name>. For example, given
* `object Demo { type M[x] = List[x] }`, the call `extractTC(Demo, "M")` will return a type representing
* the type constructor `[x] List[x]`.
*/
def extractTC(tcp: AnyRef with Singleton, name: String)(
implicit it: ctx.TypeTag[tcp.type]
): ctx.Type = {
val itTpe = it.tpe.asInstanceOf[global.Type]
val m = itTpe.nonPrivateMember(global.newTypeName(name))
val tc = itTpe.memberInfo(m).asInstanceOf[ctx.universe.Type]
assert(tc != NoType && tc.takesTypeArgs, "Invalid type constructor: " + tc)
tc
}
/**
* Substitutes wrappers in tree `t` with the result of `subWrapper`.
* A wrapper is a Tree of the form `f[T](v)` for which isWrapper(<Tree of f>, <Underlying Type>, <qual>.target) returns true.
* Typically, `f` is a `Select` or `Ident`.
* The wrapper is replaced with the result of `subWrapper(<Type of T>, <Tree of v>, <wrapper Tree>)`
*/
def transformWrappers(
t: Tree,
subWrapper: (String, Type, Tree, Tree) => Converted[ctx.type]
): Tree = {
// the main tree transformer that replaces calls to InputWrapper.wrap(x) with
// plain Idents that reference the actual input value
object appTransformer extends Transformer {
override def transform(tree: Tree): Tree =
tree match {
case ApplyTree(TypeApply(Select(_, nme), targ :: Nil), qual :: Nil) =>
subWrapper(nme.decodedName.toString, targ.tpe, qual, tree) match {
case Converted.Success(t, finalTx) =>
changeOwner(qual, currentOwner, initialOwner) // Fixes https://github.com/sbt/sbt/issues/1150
finalTx(t)
case Converted.Failure(p, m) => ctx.abort(p, m)
case _: Converted.NotApplicable[_] => super.transform(tree)
}
case _ => super.transform(tree)
}
}
appTransformer.atOwner(initialOwner) {
appTransformer.transform(t)
}
}
}
case _ =>
if tree.symbol ne null then defs += tree.symbol
super.traverseTree(tree)(owner)
end traverser
traverser.traverseTree(tree)(Symbol.spliceOwner)
defs.toSet
end ContextUtil

View File

@ -5,47 +5,84 @@
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.internal.util
package appmacro
package sbt.internal.util.appmacro
import scala.reflect._
import macros._
import Types.idFun
import sbt.internal.util.Types
import scala.quoted.*
abstract class Convert {
def apply[T: c.WeakTypeTag](c: blackbox.Context)(nme: String, in: c.Tree): Converted[c.type]
def asPredicate(c: blackbox.Context): (String, c.Type, c.Tree) => Boolean =
(n, tpe, tree) => {
val tag = c.WeakTypeTag(tpe)
apply(c)(n, tree)(tag).isSuccess
}
}
sealed trait Converted[C <: blackbox.Context with Singleton] {
def isSuccess: Boolean
def transform(f: C#Tree => C#Tree): Converted[C]
}
object Converted {
def NotApplicable[C <: blackbox.Context with Singleton] = new NotApplicable[C]
final case class Failure[C <: blackbox.Context with Singleton](
position: C#Position,
message: String
) extends Converted[C] {
def isSuccess = false
def transform(f: C#Tree => C#Tree): Converted[C] = new Failure(position, message)
}
final class NotApplicable[C <: blackbox.Context with Singleton] extends Converted[C] {
def isSuccess = false
def transform(f: C#Tree => C#Tree): Converted[C] = this
}
final case class Success[C <: blackbox.Context with Singleton](
tree: C#Tree,
finalTransform: C#Tree => C#Tree
) extends Converted[C] {
def isSuccess = true
def transform(f: C#Tree => C#Tree): Converted[C] = Success(f(tree), finalTransform)
}
object Success {
def apply[C <: blackbox.Context with Singleton](tree: C#Tree): Success[C] =
Success(tree, idFun)
}
}
/**
* Convert is a glorified partial function to scan through the AST for the purpose of substituting
* the matching term with something else.
*
* This is driven by calling transformWrappers(...) method. The filtering is limited to the shape of
* code matched using `appTransformer`, which is a generic function with a single type param and a
* single term param like `X.wrapInit[A](...)`.
*/
trait Convert[C <: Quotes & Singleton](override val qctx: C) extends ContextUtil[C]:
import qctx.reflect.*
def convert[A: Type](nme: String, in: Term): Converted
def asPredicate: (String, TypeRepr, Term) => Boolean =
(n: String, tpe: TypeRepr, tree: Term) =>
tpe.asType match
case '[a] =>
convert[a](n, tree)(Type.of[a]).isSuccess
/**
* Substitutes wrappers in tree `t` with the result of `subWrapper`. A wrapper is a Tree of the
* form `f[T](v)` for which isWrapper(<Tree of f>, <Underlying Type>, <qual>.target) returns true.
* Typically, `f` is a `Select` or `Ident`. The wrapper is replaced with the result of
* `subWrapper(<Type of T>, <Tree of v>, <wrapper Tree>)`
*/
def transformWrappers(
tree: Term,
subWrapper: [a] => (String, Type[a], Term, Term) => Converted,
owner: Symbol,
): Term =
object ApplySelectOrIdent:
def unapply(tree: Term): Option[(String, TypeTree, Term)] = tree match
case Apply(TypeApply(Select(_, nme), targ :: Nil), qual :: Nil) => Some((nme, targ, qual))
case Apply(TypeApply(Ident(nme), targ :: Nil), qual :: Nil) => Some((nme, targ, qual))
case _ => None
end ApplySelectOrIdent
// the main tree transformer that replaces calls to InputWrapper.wrap(x) with
// plain Idents that reference the actual input value
object appTransformer extends TreeMap:
override def transformTerm(tree: Term)(owner: Symbol): Term =
tree match
case ApplySelectOrIdent(nme, targ, qual) =>
val tpe = targ.tpe.asType
tpe match
case '[a] =>
subWrapper[a](nme, tpe.asInstanceOf[Type[a]], qual, tree) match
case Converted.Success(tree, finalTransform) =>
finalTransform(tree)
case Converted.Failure(position, message) =>
report.errorAndAbort(message, position)
case _ =>
super.transformTerm(tree)(owner)
case _ =>
super.transformTerm(tree)(owner)
end appTransformer
appTransformer.transformTerm(tree)(owner)
object Converted:
def success(tree: Term) = Converted.Success(tree, Types.idFun)
enum Converted:
def isSuccess: Boolean = this match
case Success(_, _) => true
case _ => false
def transform(f: Term => Term): Converted = this match
case Success(tree, finalTransform) => Success(f(tree), finalTransform)
case x: Failure => x
case x: NotApplicable => x
case Success(tree: Term, finalTransform: Term => Term) extends Converted
case Failure(position: Position, message: String) extends Converted
case NotApplicable() extends Converted
end Converted
end Convert

View File

@ -1,230 +0,0 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.internal.util
package appmacro
import sbt.internal.util.Classes.Applicative
import sbt.internal.util.Types.Id
/**
* The separate hierarchy from Applicative/Monad is for two reasons.
*
* 1. The type constructor is represented as an abstract type because a TypeTag cannot represent a type constructor directly.
* 2. The applicative interface is uncurried.
*/
trait Instance {
type M[x]
def app[K[L[x]], Z](in: K[M], f: K[Id] => Z)(implicit a: AList[K]): M[Z]
def map[S, T](in: M[S], f: S => T): M[T]
def pure[T](t: () => T): M[T]
}
trait MonadInstance extends Instance {
def flatten[T](in: M[M[T]]): M[T]
}
import scala.reflect.macros._
object Instance {
type Aux[M0[_]] = Instance { type M[x] = M0[x] }
type Aux2[M0[_], N[_]] = Instance { type M[x] = M0[N[x]] }
final val ApplyName = "app"
final val FlattenName = "flatten"
final val PureName = "pure"
final val MapName = "map"
final val InstanceTCName = "M"
final class Input[U <: Universe with Singleton](
val tpe: U#Type,
val expr: U#Tree,
val local: U#ValDef
)
trait Transform[C <: blackbox.Context with Singleton, N[_]] {
def apply(in: C#Tree): C#Tree
}
def idTransform[C <: blackbox.Context with Singleton]: Transform[C, Id] = in => in
/**
* Implementation of a macro that provides a direct syntax for applicative functors and monads.
* It is intended to be used in conjunction with another macro that conditions the inputs.
*
* This method processes the Tree `t` to find inputs of the form `wrap[T]( input )`
* This form is typically constructed by another macro that pretends to be able to get a value of type `T`
* from a value convertible to `M[T]`. This `wrap(input)` form has two main purposes.
* First, it identifies the inputs that should be transformed.
* Second, it allows the input trees to be wrapped for later conversion into the appropriate `M[T]` type by `convert`.
* This wrapping is necessary because applying the first macro must preserve the original type,
* but it is useful to delay conversion until the outer, second macro is called. The `wrap` method accomplishes this by
* allowing the original `Tree` and `Type` to be hidden behind the raw `T` type. This method will remove the call to `wrap`
* so that it is not actually called at runtime.
*
* Each `input` in each expression of the form `wrap[T]( input )` is transformed by `convert`.
* This transformation converts the input Tree to a Tree of type `M[T]`.
* The original wrapped expression `wrap(input)` is replaced by a reference to a new local `val x: T`, where `x` is a fresh name.
* These converted inputs are passed to `builder` as well as the list of these synthetic `ValDef`s.
* The `TupleBuilder` instance constructs a tuple (Tree) from the inputs and defines the right hand side of the vals
* that unpacks the tuple containing the results of the inputs.
*
* The constructed tuple of inputs and the code that unpacks the results of the inputs are then passed to the `i`,
* which is an implementation of `Instance` that is statically accessible.
* An Instance defines a applicative functor associated with a specific type constructor and, if it implements MonadInstance as well, a monad.
* Typically, it will be either a top-level module or a stable member of a top-level module (such as a val or a nested module).
* The `with Singleton` part of the type verifies some cases at macro compilation time,
* while the full check for static accessibility is done at macro expansion time.
* Note: Ideally, the types would verify that `i: MonadInstance` when `t.isRight`.
* With the various dependent types involved, this is not worth it.
*
* The `t` argument is the argument of the macro that will be transformed as described above.
* If the macro that calls this method is for a multi-input map (app followed by map),
* `t` should be the argument wrapped in Left.
* If this is for multi-input flatMap (app followed by flatMap),
* this should be the argument wrapped in Right.
*/
def contImpl[T, N[_]](
c: blackbox.Context,
i: Instance with Singleton,
convert: Convert,
builder: TupleBuilder,
linter: LinterDSL
)(
t: Either[c.Expr[T], c.Expr[i.M[T]]],
inner: Transform[c.type, N]
)(
implicit tt: c.WeakTypeTag[T],
nt: c.WeakTypeTag[N[T]],
it: c.TypeTag[i.type]
): c.Expr[i.M[N[T]]] = {
import c.universe.{ Apply => ApplyTree, _ }
val util = ContextUtil[c.type](c)
val mTC: Type = util.extractTC(i, InstanceTCName)
val mttpe: Type = appliedType(mTC, nt.tpe :: Nil).dealias
// the tree for the macro argument
val (tree, treeType) = t match {
case Left(l) => (l.tree, nt.tpe.dealias)
case Right(r) => (r.tree, mttpe)
}
// the Symbol for the anonymous function passed to the appropriate Instance.map/flatMap/pure method
// this Symbol needs to be known up front so that it can be used as the owner of synthetic vals
val functionSym = util.functionSymbol(tree.pos)
val instanceSym = util.singleton(i)
// A Tree that references the statically accessible Instance that provides the actual implementations of map, flatMap, ...
val instance = Ident(instanceSym)
val isWrapper: (String, Type, Tree) => Boolean = convert.asPredicate(c)
// Local definitions `defs` in the macro. This is used to ensure references are to M instances defined outside of the macro call.
// Also `refCount` is the number of references, which is used to create the private, synthetic method containing the body
val defs = util.collectDefs(tree, isWrapper)
val checkQual: Tree => Unit = util.checkReferences(defs, isWrapper, mttpe.erasure)
type In = Input[c.universe.type]
var inputs = List[In]()
// transforms the original tree into calls to the Instance functions pure, map, ...,
// resulting in a value of type M[T]
def makeApp(body: Tree): Tree =
inputs match {
case Nil => pure(body)
case x :: Nil => single(body, x)
case xs => arbArity(body, xs)
}
// no inputs, so construct M[T] via Instance.pure or pure+flatten
def pure(body: Tree): Tree = {
val typeApplied = TypeApply(util.select(instance, PureName), TypeTree(treeType) :: Nil)
val f = util.createFunction(Nil, body, functionSym)
val p = ApplyTree(typeApplied, f :: Nil)
if (t.isLeft) p else flatten(p)
}
// m should have type M[M[T]]
// the returned Tree will have type M[T]
def flatten(m: Tree): Tree = {
val typedFlatten = TypeApply(util.select(instance, FlattenName), TypeTree(tt.tpe) :: Nil)
ApplyTree(typedFlatten, m :: Nil)
}
// calls Instance.map or flatmap directly, skipping the intermediate Instance.app that is unnecessary for a single input
def single(body: Tree, input: In): Tree = {
val variable = input.local
val param =
treeCopy.ValDef(variable, util.parameterModifiers, variable.name, variable.tpt, EmptyTree)
val typeApplied =
TypeApply(util.select(instance, MapName), variable.tpt :: (TypeTree(treeType): Tree) :: Nil)
val f = util.createFunction(param :: Nil, body, functionSym)
val mapped = ApplyTree(typeApplied, input.expr :: f :: Nil)
if (t.isLeft) mapped else flatten(mapped)
}
// calls Instance.app to get the values for all inputs and then calls Instance.map or flatMap to evaluate the body
def arbArity(body: Tree, inputs: List[In]): Tree = {
val result = builder.make(c)(mTC, inputs)
val param = util.freshMethodParameter(appliedType(result.representationC, util.idTC :: Nil))
val bindings = result.extract(param)
val f = util.createFunction(param :: Nil, Block(bindings, body), functionSym)
val ttt = TypeTree(treeType)
val typedApp =
TypeApply(util.select(instance, ApplyName), TypeTree(result.representationC) :: ttt :: Nil)
val app =
ApplyTree(ApplyTree(typedApp, result.input :: f :: Nil), result.alistInstance :: Nil)
if (t.isLeft) app else flatten(app)
}
// Called when transforming the tree to add an input.
// For `qual` of type M[A], and a `selection` qual.value,
// the call is addType(Type A, Tree qual)
// The result is a Tree representing a reference to
// the bound value of the input.
def addType(tpe: Type, qual: Tree, selection: Tree): Tree = {
qual.foreach(checkQual)
val vd = util.freshValDef(tpe, qual.pos, functionSym)
inputs ::= new Input(tpe, qual, vd)
util.refVal(selection, vd)
}
def sub(name: String, tpe: Type, qual: Tree, replace: Tree): Converted[c.type] = {
val tag = c.WeakTypeTag[T](tpe)
convert[T](c)(name, qual)(tag) transform { tree =>
addType(tpe, tree, replace)
}
}
// applies the transformation
linter.runLinter(c)(tree)
val tx = util.transformWrappers(tree, (n, tpe, t, replace) => sub(n, tpe, t, replace))
// resetting attributes must be: a) local b) done here and not wider or else there are obscure errors
val tr = makeApp(inner(tx))
val noWarn = q"""($tr: @_root_.scala.annotation.nowarn("cat=other-pure-statement"))"""
c.Expr[i.M[N[T]]](noWarn)
}
import Types._
implicit def applicativeInstance[A[_]](implicit ap: Applicative[A]): Instance.Aux[A] =
new Instance {
type M[x] = A[x]
def app[K[L[x]], Z](in: K[A], f: K[Id] => Z)(implicit a: AList[K]) = a.apply[A, Z](in, f)
def map[S, T](in: A[S], f: S => T) = ap.map(f, in)
def pure[S](s: () => S): M[S] = ap.pure(s())
}
def compose[A[_], B[_]](implicit a: Aux[A], b: Aux[B]): Instance.Aux2[A, B] =
new Composed[A, B](a, b)
// made a public, named, unsealed class because of trouble with macros and inference when the Instance is not an object
class Composed[A[_], B[_]](a: Aux[A], b: Aux[B]) extends Instance {
type M[x] = A[B[x]]
def pure[S](s: () => S): A[B[S]] = a.pure(() => b.pure(s))
def map[S, T](in: M[S], f: S => T): M[T] = a.map(in, (bv: B[S]) => b.map(bv, f))
def app[K[L[x]], Z](in: K[M], f: K[Id] => Z)(implicit alist: AList[K]): A[B[Z]] = {
val g: K[B] => B[Z] = in => b.app[K, Z](in, f)
a.app[AList.SplitK[K, B]#l, B[Z]](in, g)(AList.asplit(alist))
}
}
}

View File

@ -1,85 +0,0 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.internal.util
package appmacro
import scala.reflect._
import macros._
/** A `TupleBuilder` that uses a KList as the tuple representation.*/
object KListBuilder extends TupleBuilder {
def make(
c: blackbox.Context
)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] =
new BuilderResult[c.type] {
val ctx: c.type = c
val util = ContextUtil[c.type](c)
import c.universe.{ Apply => ApplyTree, _ }
import util._
val knilType = c.typeOf[KNil]
val knil = Ident(knilType.typeSymbol.companion)
val kconsTpe = c.typeOf[KCons[Int, KNil, List]]
val kcons = kconsTpe.typeSymbol.companion
val mTC: Type = mt.asInstanceOf[c.universe.Type]
val kconsTC: Type = kconsTpe.typeConstructor
/** This is the L in the type function [L[x]] ... */
val tcVariable: TypeSymbol = newTCVariable(util.initialOwner)
/** Instantiates KCons[h, t <: KList[L], L], where L is the type constructor variable */
def kconsType(h: Type, t: Type): Type =
appliedType(kconsTC, h :: t :: refVar(tcVariable) :: Nil)
def bindKList(prev: ValDef, revBindings: List[ValDef], params: List[ValDef]): List[ValDef] =
params match {
case (x @ ValDef(mods, name, tpt, _)) :: xs =>
val rhs = select(Ident(prev.name), "head")
val head = treeCopy.ValDef(x, mods, name, tpt, rhs)
util.setSymbol(head, x.symbol)
val tail = localValDef(TypeTree(), select(Ident(prev.name), "tail"))
val base = head :: revBindings
bindKList(tail, if (xs.isEmpty) base else tail :: base, xs)
case Nil => revBindings.reverse
}
private[this] def makeKList(
revInputs: Inputs[c.universe.type],
klist: Tree,
klistType: Type
): Tree =
revInputs match {
case in :: tail =>
val next = ApplyTree(
TypeApply(
Ident(kcons),
TypeTree(in.tpe) :: TypeTree(klistType) :: TypeTree(mTC) :: Nil
),
in.expr :: klist :: Nil
)
makeKList(tail, next, appliedType(kconsTC, in.tpe :: klistType :: mTC :: Nil))
case Nil => klist
}
/** The input trees combined in a KList */
val klist = makeKList(inputs.reverse, knil, knilType)
/**
* The input types combined in a KList type. The main concern is tracking the heterogeneous types.
* The type constructor is tcVariable, so that it can be applied to [X] X or M later.
* When applied to `M`, this type gives the type of the `input` KList.
*/
val klistType: Type = inputs.foldRight(knilType)((in, klist) => kconsType(in.tpe, klist))
val representationC = internal.polyType(tcVariable :: Nil, klistType)
val input = klist
val alistInstance: ctx.universe.Tree =
TypeApply(select(Ident(alist), "klist"), TypeTree(representationC) :: Nil)
def extract(param: ValDef) = bindKList(param, Nil, inputs.map(_.local))
}
}

View File

@ -1,27 +0,0 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.internal.util
package appmacro
import scala.reflect._
import macros._
/**
* A builder that uses `TupleN` as the representation for small numbers of inputs (up to `TupleNBuilder.MaxInputs`)
* and `KList` for larger numbers of inputs. This builder cannot handle fewer than 2 inputs.
*/
object MixedBuilder extends TupleBuilder {
def make(
c: blackbox.Context
)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] = {
val delegate =
if (inputs.size > TupleNBuilder.MaxInputs) (KListBuilder: TupleBuilder)
else (TupleNBuilder: TupleBuilder)
delegate.make(c)(mt, inputs)
}
}

View File

@ -7,21 +7,23 @@
package sbt.internal.util.appmacro
import scala.reflect.macros.blackbox
final class StringTypeTag[A](val key: String):
override def toString(): String = key
override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match {
case x: StringTypeTag[_] => (this.key == x.key)
case _ => false
})
override def hashCode: Int = key.##
end StringTypeTag
object StringTypeTag {
def impl[A: c.WeakTypeTag](c: blackbox.Context): c.Tree = {
import c.universe._
val tpe = weakTypeOf[A]
def typeToString(tpe: Type): String = tpe match {
case TypeRef(_, sym, args) if args.nonEmpty =>
val typeCon = tpe.typeSymbol.fullName
val typeArgs = args map typeToString
s"""$typeCon[${typeArgs.mkString(",")}]"""
case _ => tpe.toString
}
object StringTypeTag:
inline given apply[A]: StringTypeTag[A] = ${ applyImpl[A] }
val key = Literal(Constant(typeToString(tpe)))
q"new sbt.internal.util.StringTypeTag[$tpe]($key)"
}
}
def manually[A](key: String): StringTypeTag[A] = new StringTypeTag(key)
import scala.quoted.*
private def applyImpl[A: Type](using qctx: Quotes): Expr[StringTypeTag[A]] =
import qctx.reflect._
val tpe = TypeRepr.of[A]
'{ new StringTypeTag[A](${ Expr(tpe.dealias.show) }) }
end StringTypeTag

View File

@ -1,64 +0,0 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.internal.util
package appmacro
import scala.reflect._
import macros._
/**
* A `TupleBuilder` abstracts the work of constructing a tuple data structure such as a `TupleN` or `KList`
* and extracting values from it. The `Instance` macro implementation will (roughly) traverse the tree of its argument
* and ultimately obtain a list of expressions with type `M[T]` for different types `T`.
* The macro constructs an `Input` value for each of these expressions that contains the `Type` for `T`,
* the `Tree` for the expression, and a `ValDef` that will hold the value for the input.
*
* `TupleBuilder.apply` is provided with the list of `Input`s and is expected to provide three values in the returned BuilderResult.
* First, it returns the constructed tuple data structure Tree in `input`.
* Next, it provides the type constructor `representationC` that, when applied to M, gives the type of tuple data structure.
* For example, a builder that constructs a `Tuple3` for inputs `M[Int]`, `M[Boolean]`, and `M[String]`
* would provide a Type representing `[L[x]] (L[Int], L[Boolean], L[String])`. The `input` method
* would return a value whose type is that type constructor applied to M, or `(M[Int], M[Boolean], M[String])`.
*
* Finally, the `extract` method provides a list of vals that extract information from the applied input.
* The type of the applied input is the type constructor applied to `Id` (`[X] X`).
* The returned list of ValDefs should be the ValDefs from `inputs`, but with non-empty right-hand sides.
*/
trait TupleBuilder {
/** A convenience alias for a list of inputs (associated with a Universe of type U). */
type Inputs[U <: Universe with Singleton] = List[Instance.Input[U]]
/** Constructs a one-time use Builder for Context `c` and type constructor `tcType`. */
def make(
c: blackbox.Context
)(tcType: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type]
}
trait BuilderResult[C <: blackbox.Context with Singleton] {
val ctx: C
import ctx.universe._
/**
* Represents the higher-order type constructor `[L[x]] ...` where `...` is the
* type of the data structure containing the added expressions,
* except that it is abstracted over the type constructor applied to each heterogeneous part of the type .
*/
def representationC: PolyType
/** The instance of AList for the input. For a `representationC` of `[L[x]]`, this `Tree` should have a `Type` of `AList[L]`*/
def alistInstance: Tree
/** Returns the completed value containing all expressions added to the builder. */
def input: Tree
/* The list of definitions that extract values from a value of type `$representationC[Id]`.
* The returned value should be identical to the `ValDef`s provided to the `TupleBuilder.make` method but with
* non-empty right hand sides. Each `ValDef` may refer to `param` and previous `ValDef`s in the list.*/
def extract(param: ValDef): List[ValDef]
}

View File

@ -1,67 +0,0 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.internal.util
package appmacro
import scala.tools.nsc.Global
import scala.reflect._
import macros._
/**
* A builder that uses a TupleN as the tuple representation.
* It is limited to tuples of size 2 to `MaxInputs`.
*/
object TupleNBuilder extends TupleBuilder {
/** The largest number of inputs that this builder can handle. */
final val MaxInputs = 11
final val TupleMethodName = "tuple"
def make(
c: blackbox.Context
)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] =
new BuilderResult[c.type] {
val util = ContextUtil[c.type](c)
import c.universe._
import util._
val global: Global = c.universe.asInstanceOf[Global]
val ctx: c.type = c
val representationC: PolyType = {
val tcVariable: Symbol = newTCVariable(util.initialOwner)
val tupleTypeArgs = inputs.map(
in => internal.typeRef(NoPrefix, tcVariable, in.tpe :: Nil).asInstanceOf[global.Type]
)
val tuple = global.definitions.tupleType(tupleTypeArgs)
internal.polyType(tcVariable :: Nil, tuple.asInstanceOf[Type])
}
val input: Tree = mkTuple(inputs.map(_.expr))
val alistInstance: Tree = {
val selectTree = select(Ident(alist), TupleMethodName + inputs.size.toString)
TypeApply(selectTree, inputs.map(in => TypeTree(in.tpe)))
}
def extract(param: ValDef): List[ValDef] = bindTuple(param, Nil, inputs.map(_.local), 1)
def bindTuple(
param: ValDef,
revBindings: List[ValDef],
params: List[ValDef],
i: Int
): List[ValDef] =
params match {
case (x @ ValDef(mods, name, tpt, _)) :: xs =>
val rhs = select(Ident(param.name), "_" + i.toString)
val newVal = treeCopy.ValDef(x, mods, name, tpt, rhs)
util.setSymbol(newVal, x.symbol)
bindTuple(param, newVal :: revBindings, xs, i + 1)
case Nil => revBindings.reverse
}
}
}

View File

@ -0,0 +1,42 @@
package sbt.internal
import sbt.internal.util.appmacro.*
import verify.*
import ContTestMacro.*
import sbt.util.Applicative
object ContTest extends BasicTestSuite:
test("pure") {
given Applicative[List] = sbt.util.ListInstances.listMonad
val actual = contMapNMacro[List, Int](12)
assert(actual == List(12))
}
test("getMap") {
given Applicative[List] = sbt.util.ListInstances.listMonad
val actual = contMapNMacro[List, Int](ContTest.wrapInit(List(1)) + 2)
assert(actual == List(3))
}
test("getMapN") {
given Applicative[List] = sbt.util.ListInstances.listMonad
val actual = contMapNMacro[List, Int](
ContTest.wrapInit(List(1))
+ ContTest.wrapInit(List(2)) + 3
)
assert(actual == List(6))
}
test("getMapN2") {
given Applicative[List] = sbt.util.ListInstances.listMonad
val actual = contMapNMacro[List, Int]({
val x = ContTest.wrapInit(List(1))
val y = ContTest.wrapInit(List(2))
x + y + 3
})
assert(actual == List(6))
}
// This compiles away
def wrapInit[A](a: List[A]): A = ???
end ContTest

View File

@ -0,0 +1,21 @@
package sbt.internal
import sbt.internal.util.Types.Id
import sbt.internal.util.appmacro.*
import sbt.util.Applicative
import scala.quoted.*
import ConvertTestMacro.InputInitConvert
object ContTestMacro:
inline def contMapNMacro[F[_]: Applicative, A](inline expr: A): List[A] =
${ contMapNMacroImpl[F, A]('expr) }
def contMapNMacroImpl[F[_]: Type, A: Type](expr: Expr[A])(using
qctx: Quotes
): Expr[List[A]] =
object ContSyntax extends Cont
import ContSyntax.*
val convert1: Convert[qctx.type] = new InputInitConvert(qctx)
convert1.contMapN[A, List, Id](expr, convert1.summonAppExpr[List], convert1.idTransform)
end ContTestMacro

View File

@ -0,0 +1,15 @@
package sbt.internal
import sbt.internal.util.appmacro.*
import verify.*
import ConvertTestMacro._
object ConvertTest extends BasicTestSuite:
test("convert") {
// assert(someMacro(ConvertTest.wrapInit(1) == 2))
assert(someMacro(ConvertTest.wrapInit(1).toString == "Some(2)"))
}
def wrapInitTask[A](a: A): Int = 2
def wrapInit[A](a: A): Int = 2
end ConvertTest

View File

@ -0,0 +1,43 @@
package sbt.internal
import sbt.internal.util.appmacro.*
import scala.quoted.*
object ConvertTestMacro:
final val WrapInitName = "wrapInit"
final val WrapInitTaskName = "wrapInitTask"
inline def someMacro(inline expr: Boolean): Boolean =
${ someMacroImpl('expr) }
def someMacroImpl(expr: Expr[Boolean])(using qctx: Quotes) =
val convert1: Convert[qctx.type] = new InputInitConvert(qctx)
import convert1.qctx.reflect.*
def addTypeCon[A](tpe: Type[A], qual: Term, selection: Term): Term =
tpe match
case '[a] =>
'{
Option[a](${ selection.asExprOf[a] })
}.asTerm
val substitute = [a] =>
(name: String, tpe: Type[a], qual: Term, replace: Term) =>
convert1.convert[Boolean](name, qual) transform { (tree: Term) =>
addTypeCon(tpe, tree, replace)
}
convert1.transformWrappers(expr.asTerm, substitute, Symbol.spliceOwner).asExprOf[Boolean]
class InputInitConvert[C <: Quotes & scala.Singleton](override val qctx: C)
extends Convert[C](qctx)
with ContextUtil[C](qctx, 0):
// with TupleBuilder[C](qctx)
// with TupleNBuilder[C](qctx):
import qctx.reflect.*
def convert[A: Type](nme: String, in: Term): Converted =
nme match
case WrapInitName => Converted.success(in)
case WrapInitTaskName => Converted.Failure(in.pos, initTaskErrorMessage)
case _ => Converted.NotApplicable()
private def initTaskErrorMessage = "Internal sbt error: initialize+task wrapper not split"
end InputInitConvert
end ConvertTestMacro

View File

@ -0,0 +1,18 @@
package sbt.internal
import sbt.internal.util.appmacro.*
import verify.*
object StringTypeTagTest extends BasicTestSuite:
test("String") {
assert(StringTypeTag[String].toString == "java.lang.String")
}
test("Int") {
assert(StringTypeTag[Int].toString == "scala.Int")
}
test("List[Int]") {
assert(StringTypeTag[List[Int]].toString == "scala.collection.immutable.List[scala.Int]")
}
end StringTypeTagTest

View File

@ -1,389 +0,0 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.internal.util
import Classes.Applicative
import Types._
/**
* An abstraction over a higher-order type constructor `K[x[y]]` with the purpose of abstracting
* over heterogeneous sequences like `KList` and `TupleN` with elements with a common type
* constructor as well as homogeneous sequences `Seq[M[T]]`.
*/
trait AList[K[L[x]]] {
def transform[M[_], N[_]](value: K[M], f: M ~> N): K[N]
def traverse[M[_], N[_], P[_]](value: K[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[K[P]]
def foldr[M[_], A](value: K[M], f: (M[_], A) => A, init: A): A
def toList[M[_]](value: K[M]): List[M[_]] = foldr[M, List[M[_]]](value, _ :: _, Nil)
def apply[M[_], C](value: K[M], f: K[Id] => C)(implicit a: Applicative[M]): M[C] =
a.map(f, traverse[M, M, Id](value, idK[M])(a))
}
object AList {
type Empty = AList[ConstK[Unit]#l]
/** AList for Unit, which represents a sequence that is always empty.*/
val empty: Empty = new Empty {
def transform[M[_], N[_]](in: Unit, f: M ~> N) = ()
def foldr[M[_], T](in: Unit, f: (M[_], T) => T, init: T) = init
override def apply[M[_], C](in: Unit, f: Unit => C)(implicit app: Applicative[M]): M[C] =
app.pure(f(()))
def traverse[M[_], N[_], P[_]](in: Unit, f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[Unit] = np.pure(())
}
type SeqList[T] = AList[λ[L[x] => List[L[T]]]]
/** AList for a homogeneous sequence. */
def seq[T]: SeqList[T] = new SeqList[T] {
def transform[M[_], N[_]](s: List[M[T]], f: M ~> N) = s.map(f.fn[T])
def foldr[M[_], A](s: List[M[T]], f: (M[_], A) => A, init: A): A =
s.reverse.foldLeft(init)((t, m) => f(m, t))
override def apply[M[_], C](s: List[M[T]], f: List[T] => C)(
implicit ap: Applicative[M]
): M[C] = {
def loop[V](in: List[M[T]], g: List[T] => V): M[V] =
in match {
case Nil => ap.pure(g(Nil))
case x :: xs =>
val h = (ts: List[T]) => (t: T) => g(t :: ts)
ap.apply(loop(xs, h), x)
}
loop(s, f)
}
def traverse[M[_], N[_], P[_]](s: List[M[T]], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[List[P[T]]] = ???
}
/** AList for the arbitrary arity data structure KList. */
def klist[KL[M[_]] <: KList.Aux[M, KL]]: AList[KL] = new AList[KL] {
def transform[M[_], N[_]](k: KL[M], f: M ~> N) = k.transform(f)
def foldr[M[_], T](k: KL[M], f: (M[_], T) => T, init: T): T = k.foldr(f, init)
override def apply[M[_], C](k: KL[M], f: KL[Id] => C)(implicit app: Applicative[M]): M[C] =
k.apply(f)(app)
def traverse[M[_], N[_], P[_]](k: KL[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[KL[P]] = k.traverse[N, P](f)(np)
override def toList[M[_]](k: KL[M]) = k.toList
}
type Single[A] = AList[λ[L[x] => L[A]]]
/** AList for a single value. */
def single[A]: Single[A] = new Single[A] {
def transform[M[_], N[_]](a: M[A], f: M ~> N) = f(a)
def foldr[M[_], T](a: M[A], f: (M[_], T) => T, init: T): T = f(a, init)
def traverse[M[_], N[_], P[_]](a: M[A], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[P[A]] = f(a)
}
/** Example: calling `AList.SplitK[K, Task]#l` returns the type lambda `A[x] => K[A[Task[x]]`. */
sealed trait SplitK[K[L[x]], B[x]] { type l[A[x]] = K[(A B)#l] }
type ASplit[K[L[x]], B[x]] = AList[SplitK[K, B]#l]
/** AList that operates on the outer type constructor `A` of a composition `[x] A[B[x]]` for type constructors `A` and `B`. */
def asplit[K[L[x]], B[x]](base: AList[K]): ASplit[K, B] = new ASplit[K, B] {
type Split[L[x]] = K[(L B)#l]
def transform[M[_], N[_]](value: Split[M], f: M ~> N): Split[N] =
base.transform[(M B)#l, (N B)#l](value, nestCon[M, N, B](f))
def traverse[M[_], N[_], P[_]](value: Split[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[Split[P]] = {
val g = nestCon[M, (N P)#l, B](f)
base.traverse[(M B)#l, N, (P B)#l](value, g)(np)
}
def foldr[M[_], A](value: Split[M], f: (M[_], A) => A, init: A): A =
base.foldr[(M B)#l, A](value, f, init)
}
// TODO: auto-generate
sealed trait T2K[A, B] { type l[L[x]] = (L[A], L[B]) }
type T2List[A, B] = AList[T2K[A, B]#l]
def tuple2[A, B]: T2List[A, B] = new T2List[A, B] {
type T2[M[_]] = (M[A], M[B])
def transform[M[_], N[_]](t: T2[M], f: M ~> N): T2[N] = (f(t._1), f(t._2))
def foldr[M[_], T](t: T2[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, init))
def traverse[M[_], N[_], P[_]](t: T2[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T2[P]] = {
val g = (Tuple2.apply[P[A], P[B]] _).curried
np.apply(np.map(g, f(t._1)), f(t._2))
}
}
sealed trait T3K[A, B, C] { type l[L[x]] = (L[A], L[B], L[C]) }
type T3List[A, B, C] = AList[T3K[A, B, C]#l]
def tuple3[A, B, C]: T3List[A, B, C] = new T3List[A, B, C] {
type T3[M[_]] = (M[A], M[B], M[C])
def transform[M[_], N[_]](t: T3[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3))
def foldr[M[_], T](t: T3[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, init)))
def traverse[M[_], N[_], P[_]](t: T3[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T3[P]] = {
val g = (Tuple3.apply[P[A], P[B], P[C]] _).curried
np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3))
}
}
sealed trait T4K[A, B, C, D] { type l[L[x]] = (L[A], L[B], L[C], L[D]) }
type T4List[A, B, C, D] = AList[T4K[A, B, C, D]#l]
def tuple4[A, B, C, D]: T4List[A, B, C, D] = new T4List[A, B, C, D] {
type T4[M[_]] = (M[A], M[B], M[C], M[D])
def transform[M[_], N[_]](t: T4[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4))
def foldr[M[_], T](t: T4[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, init))))
def traverse[M[_], N[_], P[_]](t: T4[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T4[P]] = {
val g = (Tuple4.apply[P[A], P[B], P[C], P[D]] _).curried
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4))
}
}
sealed trait T5K[A, B, C, D, E] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E]) }
type T5List[A, B, C, D, E] = AList[T5K[A, B, C, D, E]#l]
def tuple5[A, B, C, D, E]: T5List[A, B, C, D, E] = new T5List[A, B, C, D, E] {
type T5[M[_]] = (M[A], M[B], M[C], M[D], M[E])
def transform[M[_], N[_]](t: T5[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5))
def foldr[M[_], T](t: T5[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, init)))))
def traverse[M[_], N[_], P[_]](t: T5[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T5[P]] = {
val g = (Tuple5.apply[P[A], P[B], P[C], P[D], P[E]] _).curried
np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5))
}
}
sealed trait T6K[A, B, C, D, E, F] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F]) }
type T6List[A, B, C, D, E, F] = AList[T6K[A, B, C, D, E, F]#l]
def tuple6[A, B, C, D, E, F]: T6List[A, B, C, D, E, F] = new T6List[A, B, C, D, E, F] {
type T6[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F])
def transform[M[_], N[_]](t: T6[M], f: M ~> N) =
(f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6))
def foldr[M[_], T](t: T6[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, init))))))
def traverse[M[_], N[_], P[_]](t: T6[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T6[P]] = {
val g = (Tuple6.apply[P[A], P[B], P[C], P[D], P[E], P[F]] _).curried
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
)
}
}
sealed trait T7K[A, B, C, D, E, F, G] {
type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G])
}
type T7List[A, B, C, D, E, F, G] = AList[T7K[A, B, C, D, E, F, G]#l]
def tuple7[A, B, C, D, E, F, G]: T7List[A, B, C, D, E, F, G] = new T7List[A, B, C, D, E, F, G] {
type T7[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G])
def transform[M[_], N[_]](t: T7[M], f: M ~> N) =
(f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7))
def foldr[M[_], T](t: T7[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, init)))))))
def traverse[M[_], N[_], P[_]](t: T7[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T7[P]] = {
val g = (Tuple7.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G]] _).curried
np.apply(
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
),
f(t._7)
)
}
}
sealed trait T8K[A, B, C, D, E, F, G, H] {
type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H])
}
type T8List[A, B, C, D, E, F, G, H] = AList[T8K[A, B, C, D, E, F, G, H]#l]
def tuple8[A, B, C, D, E, F, G, H]: T8List[A, B, C, D, E, F, G, H] =
new T8List[A, B, C, D, E, F, G, H] {
type T8[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H])
def transform[M[_], N[_]](t: T8[M], f: M ~> N) =
(f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8))
def foldr[M[_], T](t: T8[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, init))))))))
def traverse[M[_], N[_], P[_]](t: T8[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T8[P]] = {
val g = (Tuple8.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H]] _).curried
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
),
f(t._7)
),
f(t._8)
)
}
}
sealed trait T9K[A, B, C, D, E, F, G, H, I] {
type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I])
}
type T9List[A, B, C, D, E, F, G, H, I] = AList[T9K[A, B, C, D, E, F, G, H, I]#l]
def tuple9[A, B, C, D, E, F, G, H, I]: T9List[A, B, C, D, E, F, G, H, I] =
new T9List[A, B, C, D, E, F, G, H, I] {
type T9[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I])
def transform[M[_], N[_]](t: T9[M], f: M ~> N) =
(f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9))
def foldr[M[_], T](t: T9[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, init)))))))))
def traverse[M[_], N[_], P[_]](t: T9[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T9[P]] = {
val g = (Tuple9.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I]] _).curried
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
),
f(t._7)
),
f(t._8)
),
f(t._9)
)
}
}
sealed trait T10K[A, B, C, D, E, F, G, H, I, J] {
type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J])
}
type T10List[A, B, C, D, E, F, G, H, I, J] = AList[T10K[A, B, C, D, E, F, G, H, I, J]#l]
def tuple10[A, B, C, D, E, F, G, H, I, J]: T10List[A, B, C, D, E, F, G, H, I, J] =
new T10List[A, B, C, D, E, F, G, H, I, J] {
type T10[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J])
def transform[M[_], N[_]](t: T10[M], f: M ~> N) =
(f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9), f(t._10))
def foldr[M[_], T](t: T10[M], f: (M[_], T) => T, init: T): T =
f(
t._1,
f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, init)))))))))
)
def traverse[M[_], N[_], P[_]](t: T10[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T10[P]] = {
val g =
(Tuple10.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J]] _).curried
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
),
f(t._7)
),
f(t._8)
),
f(t._9)
),
f(t._10)
)
}
}
sealed trait T11K[A, B, C, D, E, F, G, H, I, J, K] {
type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J], L[K])
}
type T11List[A, B, C, D, E, F, G, H, I, J, K] = AList[T11K[A, B, C, D, E, F, G, H, I, J, K]#l]
def tuple11[A, B, C, D, E, F, G, H, I, J, K]: T11List[A, B, C, D, E, F, G, H, I, J, K] =
new T11List[A, B, C, D, E, F, G, H, I, J, K] {
type T11[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J], M[K])
def transform[M[_], N[_]](t: T11[M], f: M ~> N) =
(
f(t._1),
f(t._2),
f(t._3),
f(t._4),
f(t._5),
f(t._6),
f(t._7),
f(t._8),
f(t._9),
f(t._10),
f(t._11)
)
def foldr[M[_], T](t: T11[M], f: (M[_], T) => T, init: T): T =
f(
t._1,
f(
t._2,
f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, f(t._11, init)))))))))
)
)
def traverse[M[_], N[_], P[_]](t: T11[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T11[P]] = {
val g = (Tuple11
.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J], P[K]] _).curried
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
),
f(t._7)
),
f(t._8)
),
f(t._9)
),
f(t._10)
),
f(t._11)
)
}
}
}

View File

@ -1,42 +0,0 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.internal.util
object Classes {
trait Applicative[M[_]] {
def apply[S, T](f: M[S => T], v: M[S]): M[T]
def pure[S](s: => S): M[S]
def map[S, T](f: S => T, v: M[S]): M[T]
}
trait Selective[M[_]] extends Applicative[M] {
def select[A, B](fab: M[Either[A, B]])(fn: M[A => B]): M[B]
}
trait Monad[M[_]] extends Applicative[M] {
def flatten[T](m: M[M[T]]): M[T]
}
implicit val optionMonad: Monad[Option] = new Monad[Option] {
def apply[S, T](f: Option[S => T], v: Option[S]) = (f, v) match {
case (Some(fv), Some(vv)) => Some(fv(vv))
case _ => None
}
def pure[S](s: => S) = Some(s)
def map[S, T](f: S => T, v: Option[S]) = v map f
def flatten[T](m: Option[Option[T]]): Option[T] = m.flatten
}
implicit val listMonad: Monad[List] = new Monad[List] {
def apply[S, T](f: List[S => T], v: List[S]) = for (fv <- f; vv <- v) yield fv(vv)
def pure[S](s: => S) = s :: Nil
def map[S, T](f: S => T, v: List[S]) = v map f
def flatten[T](m: List[List[T]]): List[T] = m.flatten
}
}

View File

@ -1,129 +0,0 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.internal.util
import collection.mutable
trait RMap[K[_], V[_]] {
def apply[T](k: K[T]): V[T]
def get[T](k: K[T]): Option[V[T]]
def contains[T](k: K[T]): Boolean
def toSeq: Seq[(K[_], V[_])]
def toTypedSeq: Seq[TPair[_]] = toSeq.map {
case (k: K[t], v) => TPair[t](k, v.asInstanceOf[V[t]])
}
def keys: Iterable[K[_]]
def values: Iterable[V[_]]
def isEmpty: Boolean
sealed case class TPair[T](key: K[T], value: V[T])
}
trait IMap[K[_], V[_]] extends (K ~> V) with RMap[K, V] {
def put[T](k: K[T], v: V[T]): IMap[K, V]
def remove[T](k: K[T]): IMap[K, V]
def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]): IMap[K, V]
def mapValues[V2[_]](f: V ~> V2): IMap[K, V2]
def mapSeparate[VL[_], VR[_]](f: V ~> λ[T => Either[VL[T], VR[T]]]): (IMap[K, VL], IMap[K, VR])
}
trait PMap[K[_], V[_]] extends (K ~> V) with RMap[K, V] {
def update[T](k: K[T], v: V[T]): Unit
def remove[T](k: K[T]): Option[V[T]]
def getOrUpdate[T](k: K[T], make: => V[T]): V[T]
def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]): V[T]
}
object PMap {
implicit def toFunction[K[_], V[_]](map: PMap[K, V]): K[_] => V[_] = k => map(k)
def empty[K[_], V[_]]: PMap[K, V] = new DelegatingPMap[K, V](new mutable.HashMap)
}
object IMap {
/**
* Only suitable for K that is invariant in its type parameter.
* Option and List keys are not suitable, for example,
* because None &lt;:&lt; Option[String] and None &lt;: Option[Int].
*/
def empty[K[_], V[_]]: IMap[K, V] = new IMap0[K, V](Map.empty)
private[sbt] def fromJMap[K[_], V[_]](map: java.util.Map[K[_], V[_]]): IMap[K, V] =
new IMap0[K, V](new WrappedMap(map))
private[sbt] class IMap0[K[_], V[_]](val backing: Map[K[_], V[_]])
extends AbstractRMap[K, V]
with IMap[K, V] {
def get[T](k: K[T]): Option[V[T]] = (backing get k).asInstanceOf[Option[V[T]]]
def put[T](k: K[T], v: V[T]) = new IMap0[K, V](backing.updated(k, v))
def remove[T](k: K[T]) = new IMap0[K, V](backing - k)
def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]) =
put(k, f(this get k getOrElse init))
def mapValues[V2[_]](f: V ~> V2) =
new IMap0[K, V2](Map(backing.iterator.map { case (k, v) => k -> f(v) }.toArray: _*))
def mapSeparate[VL[_], VR[_]](f: V ~> λ[T => Either[VL[T], VR[T]]]) = {
val left = new java.util.concurrent.ConcurrentHashMap[K[_], VL[_]]
val right = new java.util.concurrent.ConcurrentHashMap[K[_], VR[_]]
Par(backing.toVector).foreach {
case (k, v) =>
f(v) match {
case Left(l) => left.put(k, l)
case Right(r) => right.put(k, r)
}
}
(new IMap0[K, VL](new WrappedMap(left)), new IMap0[K, VR](new WrappedMap(right)))
}
def toSeq = backing.toSeq
def keys = backing.keys
def values = backing.values
def isEmpty = backing.isEmpty
override def toString = backing.toString
}
}
abstract class AbstractRMap[K[_], V[_]] extends RMap[K, V] {
def apply[T](k: K[T]): V[T] = get(k).get
def contains[T](k: K[T]): Boolean = get(k).isDefined
}
/**
* Only suitable for K that is invariant in its type parameter.
* Option and List keys are not suitable, for example,
* because None &lt;:&lt; Option[String] and None &lt;: Option[Int].
*/
class DelegatingPMap[K[_], V[_]](backing: mutable.Map[K[_], V[_]])
extends AbstractRMap[K, V]
with PMap[K, V] {
def get[T](k: K[T]): Option[V[T]] = cast[T](backing.get(k))
def update[T](k: K[T], v: V[T]): Unit = { backing(k) = v }
def remove[T](k: K[T]) = cast(backing.remove(k))
def getOrUpdate[T](k: K[T], make: => V[T]) = cast[T](backing.getOrElseUpdate(k, make))
def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]): V[T] = {
val v = f(this get k getOrElse init)
update(k, v)
v
}
def toSeq = backing.toSeq
def keys = backing.keys
def values = backing.values
def isEmpty = backing.isEmpty
private[this] def cast[T](v: V[_]): V[T] = v.asInstanceOf[V[T]]
private[this] def cast[T](o: Option[V[_]]): Option[V[T]] = o map cast[T]
override def toString = backing.toString
}

View File

@ -1,70 +0,0 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.internal.util
trait TypeFunctions {
import TypeFunctions._
type Id[X] = X
type NothingK[X] = Nothing
sealed trait Const[A] { type Apply[B] = A }
sealed trait ConstK[A] { type l[L[x]] = A }
sealed trait Compose[A[_], B[_]] { type Apply[T] = A[B[T]] }
sealed trait [A[_], B[_]] { type l[T] = A[B[T]] }
private type AnyLeft[T] = Left[T, Nothing]
private type AnyRight[T] = Right[Nothing, T]
final val left: Id ~> Left[*, Nothing] =
λ[Id ~> AnyLeft](Left(_)).setToString("TypeFunctions.left")
final val right: Id ~> Right[Nothing, *] =
λ[Id ~> AnyRight](Right(_)).setToString("TypeFunctions.right")
final val some: Id ~> Some[*] = λ[Id ~> Some](Some(_)).setToString("TypeFunctions.some")
final def idFun[T]: T => T = ((t: T) => t).setToString("TypeFunctions.id")
final def const[A, B](b: B): A => B = ((_: A) => b).setToString(s"TypeFunctions.const($b)")
final def idK[M[_]]: M ~> M = λ[M ~> M](m => m).setToString("TypeFunctions.idK")
def nestCon[M[_], N[_], G[_]](f: M ~> N): (M G)#l ~> (N G)#l =
f.asInstanceOf[(M G)#l ~> (N G)#l] // implemented with a cast to avoid extra object+method call.
// castless version:
// λ[(M G)#l ~> (N G)#l](f(_))
type Endo[T] = T => T
type ~>|[A[_], B[_]] = A ~> Compose[Option, B]#Apply
}
object TypeFunctions extends TypeFunctions {
private implicit class Ops[T[_], R[_]](val underlying: T ~> R) extends AnyVal {
def setToString(string: String): T ~> R = new (T ~> R) {
override def apply[U](a: T[U]): R[U] = underlying(a)
override def toString: String = string
override def equals(o: Any): Boolean = underlying.equals(o)
override def hashCode: Int = underlying.hashCode
}
}
private implicit class FunctionOps[A, B](val f: A => B) extends AnyVal {
def setToString(string: String): A => B = new (A => B) {
override def apply(a: A): B = f(a)
override def toString: String = string
override def equals(o: Any): Boolean = f.equals(o)
override def hashCode: Int = f.hashCode
}
}
}
trait ~>[-A[_], +B[_]] { outer =>
def apply[T](a: A[T]): B[T]
// directly on ~> because of type inference limitations
final def [C[_]](g: C ~> A): C ~> B = λ[C ~> B](c => outer.apply(g(c)))
final def [C, D](g: C => D)(implicit ev: D <:< A[D]): C => B[D] = i => apply(ev(g(i)))
final def fn[T]: A[T] => B[T] = (t: A[T]) => apply[T](t)
}
object ~> {
import TypeFunctions._
val Id: Id ~> Id = idK[Id]
implicit def tcIdEquals: Id ~> Id = Id
}

View File

@ -125,9 +125,11 @@ object LineReader {
case _: Terminal.ConsoleTerminal => Some(Signals.register(() => terminal.write(-1)))
case _ => None
}
try terminal.withRawInput {
Option(mask.map(reader.readLine(prompt, _)).getOrElse(reader.readLine(prompt)))
} catch {
try
terminal.withRawInput {
Option(mask.map(reader.readLine(prompt, _)).getOrElse(reader.readLine(prompt)))
}
catch {
case e: EndOfFileException =>
if (terminal == Terminal.console && System.console == null) None
else Some("exit")
@ -195,8 +197,8 @@ abstract class JLine extends LineReader {
private[this] def readLineDirect(prompt: String, mask: Option[Char]): Option[String] =
if (handleCONT)
Signals.withHandler(() => resume(), signal = Signals.CONT)(
() => readLineDirectRaw(prompt, mask)
Signals.withHandler(() => resume(), signal = Signals.CONT)(() =>
readLineDirectRaw(prompt, mask)
)
else
readLineDirectRaw(prompt, mask)
@ -236,31 +238,23 @@ abstract class JLine extends LineReader {
@deprecated("Use LineReader apis", "1.4.0")
private[sbt] object JLine {
@deprecated("For binary compatibility only", "1.4.0")
protected[this] val originalIn = new FileInputStream(FileDescriptor.in)
@deprecated("Handled by Terminal.fixTerminalProperty", "1.4.0")
private[sbt] def fixTerminalProperty(): Unit = ()
@deprecated("For binary compatibility only", "1.4.0")
private[sbt] def makeInputStream(injectThreadSleep: Boolean): InputStream =
if (injectThreadSleep) new InputStreamWrapper(originalIn, 2.milliseconds)
else originalIn
// When calling this, ensure that enableEcho has been or will be called.
// TerminalFactory.get will initialize the terminal to disable echo.
@deprecated("Don't use jline.Terminal directly", "1.4.0")
private[sbt] def terminal: jline.Terminal = Terminal.deprecatedTeminal
/**
* For accessing the JLine Terminal object.
* This ensures synchronized access as well as re-enabling echo after getting the Terminal.
* For accessing the JLine Terminal object. This ensures synchronized access as well as
* re-enabling echo after getting the Terminal.
*/
@deprecated(
"Don't use jline.Terminal directly. Use Terminal.get.withCanonicalIn instead.",
"1.4.0"
)
def usingTerminal[T](f: jline.Terminal => T): T = f(Terminal.get.toJLine)
// @deprecated(
// "Don't use jline.Terminal directly. Use Terminal.get.withCanonicalIn instead.",
// "1.4.0"
// )
// def usingTerminal[T](f: jline.Terminal => T): T = f(Terminal.get.toJLine)
@deprecated("unused", "1.4.0")
def createReader(): ConsoleReader = createReader(None, Terminal.wrappedSystemIn)
@ -296,31 +290,6 @@ private[sbt] object JLine {
val HandleCONT = LineReader.HandleCONT
}
@deprecated("For binary compatibility only", "1.4.0")
private[sbt] class InputStreamWrapper(is: InputStream, val poll: Duration)
extends FilterInputStream(is) {
@tailrec final override def read(): Int =
if (is.available() != 0) is.read()
else {
Thread.sleep(poll.toMillis)
read()
}
@tailrec final override def read(b: Array[Byte]): Int =
if (is.available() != 0) is.read(b)
else {
Thread.sleep(poll.toMillis)
read(b)
}
@tailrec final override def read(b: Array[Byte], off: Int, len: Int): Int =
if (is.available() != 0) is.read(b, off, len)
else {
Thread.sleep(poll.toMillis)
read(b, off, len)
}
}
final class FullReader(
historyPath: Option[File],
complete: Parser[_],

View File

@ -9,9 +9,8 @@ package sbt.internal.util
package complete
/**
* Represents a set of completions.
* It exists instead of implicitly defined operations on top of Set[Completion]
* for laziness.
* Represents a set of completions. It exists instead of implicitly defined operations on top of
* Set[Completion] for laziness.
*/
sealed trait Completions {
def get: Set[Completion]
@ -46,49 +45,48 @@ object Completions {
def strict(cs: Set[Completion]): Completions = apply(cs)
/**
* No suggested completions, not even the empty Completion.
* This typically represents invalid input.
* No suggested completions, not even the empty Completion. This typically represents invalid
* input.
*/
val nil: Completions = strict(Set.empty)
/**
* Only includes an empty Suggestion.
* This typically represents valid input that either has no completions or accepts no further input.
* Only includes an empty Suggestion. This typically represents valid input that either has no
* completions or accepts no further input.
*/
val empty: Completions = strict(Set.empty + Completion.empty)
/** Returns a strict Completions instance containing only the provided Completion.*/
/** Returns a strict Completions instance containing only the provided Completion. */
def single(c: Completion): Completions = strict(Set.empty + c)
}
/**
* Represents a completion.
* The abstract members `display` and `append` are best explained with an example.
* Represents a completion. The abstract members `display` and `append` are best explained with an
* example.
*
* Assuming space-delimited tokens, processing this:
* am is are w<TAB>
* could produce these Completions:
* Completion { display = "was"; append = "as" }
* Completion { display = "were"; append = "ere" }
* to suggest the tokens "was" and "were".
* Assuming space-delimited tokens, processing this: am is are w<TAB> could produce these
* Completions: Completion { display = "was"; append = "as" } Completion { display = "were"; append
* = "ere" } to suggest the tokens "was" and "were".
*
* In this way, two pieces of information are preserved:
* 1) what needs to be appended to the current input if a completion is selected
* 2) the full token being completed, which is useful for presenting a user with choices to select
* In this way, two pieces of information are preserved: 1) what needs to be appended to the current
* input if a completion is selected 2) the full token being completed, which is useful for
* presenting a user with choices to select
*/
sealed trait Completion {
/** The proposed suffix to append to the existing input to complete the last token in the input.*/
/**
* The proposed suffix to append to the existing input to complete the last token in the input.
*/
def append: String
/** The string to present to the user to represent the full token being suggested.*/
/** The string to present to the user to represent the full token being suggested. */
def display: String
/** True if this Completion is suggesting the empty string.*/
/** True if this Completion is suggesting the empty string. */
def isEmpty: Boolean
/** Appends the completions in `o` with the completions in this Completion.*/
/** Appends the completions in `o` with the completions in this Completion. */
def ++(o: Completion): Completion = Completion.concat(this, o)
final def x(o: Completions): Completions =
@ -160,14 +158,4 @@ object Completion {
def tokenDisplay(append: String, display: String): Completion = new Token(display, append)
def suggestion(value: String): Completion = new Suggestion(value)
@deprecated("No longer used. for binary compatibility", "1.1.0")
private[complete] def displayOnly(value: => String): Completion = new DisplayOnly(value)
@deprecated("No longer used. for binary compatibility", "1.1.0")
private[complete] def token(prepend: => String, append: => String): Completion =
new Token(prepend + append, append)
@deprecated("No longer used. for binary compatibility", "1.1.0")
private[complete] def suggestion(value: => String): Completion = new Suggestion(value)
}

View File

@ -14,9 +14,8 @@ import java.lang.Character.{ toLowerCase => lower }
object EditDistance {
/**
* Translated from the java version at
* http://www.merriampark.com/ld.htm
* which is declared to be public domain.
* Translated from the java version at http://www.merriampark.com/ld.htm which is declared to be
* public domain.
*/
def levenshtein(
s: String,

View File

@ -12,22 +12,25 @@ import java.io.File
import sbt.io.IO
/**
* These sources of examples are used in parsers for user input completion. An example of such a source is the
* [[sbt.internal.util.complete.FileExamples]] class, which provides a list of suggested files to the user as they press the
* TAB key in the console.
* These sources of examples are used in parsers for user input completion. An example of such a
* source is the [[sbt.internal.util.complete.FileExamples]] class, which provides a list of
* suggested files to the user as they press the TAB key in the console.
*/
trait ExampleSource {
/**
* @return a (possibly lazy) list of completion example strings. These strings are continuations of user's input. The
* user's input is incremented with calls to [[withAddedPrefix]].
* @return
* a (possibly lazy) list of completion example strings. These strings are continuations of
* user's input. The user's input is incremented with calls to [[withAddedPrefix]].
*/
def apply(): Iterable[String]
/**
* @param addedPrefix a string that just typed in by the user.
* @return a new source of only those examples that start with the string typed by the user so far (with addition of
* the just added prefix).
* @param addedPrefix
* a string that just typed in by the user.
* @return
* a new source of only those examples that start with the string typed by the user so far (with
* addition of the just added prefix).
*/
def withAddedPrefix(addedPrefix: String): ExampleSource
@ -35,7 +38,8 @@ trait ExampleSource {
/**
* A convenience example source that wraps any collection of strings into a source of examples.
* @param examples the examples that will be displayed to the user when they press the TAB key.
* @param examples
* the examples that will be displayed to the user when they press the TAB key.
*/
sealed case class FixedSetExamples(examples: Iterable[String]) extends ExampleSource {
override def withAddedPrefix(addedPrefix: String): ExampleSource =
@ -50,8 +54,10 @@ sealed case class FixedSetExamples(examples: Iterable[String]) extends ExampleSo
/**
* Provides path completion examples based on files in the base directory.
* @param base the directory within which this class will search for completion examples.
* @param prefix the part of the path already written by the user.
* @param base
* the directory within which this class will search for completion examples.
* @param prefix
* the part of the path already written by the user.
*/
class FileExamples(base: File, prefix: String = "") extends ExampleSource {
override def apply(): Stream[String] = files(base).map(_ substring prefix.length)
@ -64,7 +70,9 @@ class FileExamples(base: File, prefix: String = "") extends ExampleSource {
val prefixedDirectChildPaths = childPaths map { IO.relativize(base, _).get } filter {
_ startsWith prefix
}
val dirsToRecurseInto = childPaths filter { _.isDirectory } map { IO.relativize(base, _).get } filter {
val dirsToRecurseInto = childPaths filter { _.isDirectory } map {
IO.relativize(base, _).get
} filter {
dirStartsWithPrefix
}
prefixedDirectChildPaths append dirsToRecurseInto.flatMap(dir => files(new File(base, dir)))

View File

@ -61,14 +61,12 @@ object HistoryCommands {
{ printHistory(h, MaxLines, show); nil[String].some }
}
lazy val execStr = flag('?') ~ token(any.+.string, "<string>") map {
case (contains, str) =>
execute(h => if (contains) h !? str else h ! str)
lazy val execStr = flag('?') ~ token(any.+.string, "<string>") map { case (contains, str) =>
execute(h => if (contains) h !? str else h ! str)
}
lazy val execInt = flag('-') ~ num map {
case (neg, value) =>
execute(h => if (neg) h !- value else h ! value)
lazy val execInt = flag('-') ~ num map { case (neg, value) =>
execute(h => if (neg) h !- value else h ! value)
}
lazy val help = success((h: History) => { printHelp(); nil[String].some })

View File

@ -81,10 +81,9 @@ object JLineCompletion {
def convertCompletions(cs: Set[Completion]): (Seq[String], Seq[String]) = {
val (insert, display) =
cs.foldLeft((Set.empty[String], Set.empty[String])) {
case (t @ (insert, display), comp) =>
if (comp.isEmpty) t
else (appendNonEmpty(insert, comp.append), appendNonEmpty(display, comp.display))
cs.foldLeft((Set.empty[String], Set.empty[String])) { case (t @ (insert, display), comp) =>
if (comp.isEmpty) t
else (appendNonEmpty(insert, comp.append), appendNonEmpty(display, comp.display))
}
(insert.toSeq, display.toSeq.sorted)
}
@ -135,8 +134,8 @@ object JLineCompletion {
}
/**
* `display` is assumed to be the exact strings requested to be displayed.
* In particular, duplicates should have been removed already.
* `display` is assumed to be the exact strings requested to be displayed. In particular,
* duplicates should have been removed already.
*/
def showCompletions(display: Seq[String], reader: ConsoleReader): Unit = {
printCompletions(display, reader)

View File

@ -13,98 +13,111 @@ import sbt.internal.util.Types.{ left, right, some }
import sbt.internal.util.Util.{ makeList, separate }
/**
* A String parser that provides semi-automatic tab completion.
* A successful parse results in a value of type `T`.
* The methods in this trait are what must be implemented to define a new Parser implementation, but are not typically useful for common usage.
* Instead, most useful methods for combining smaller parsers into larger parsers are implicitly added by the [[RichParser]] type.
* A String parser that provides semi-automatic tab completion. A successful parse results in a
* value of type `A`. The methods in this trait are what must be implemented to define a new Parser
* implementation, but are not typically useful for common usage. Instead, most useful methods for
* combining smaller parsers into larger parsers are implicitly added by the [[RichParser]] type.
*/
trait Parser[+T] {
def derive(i: Char): Parser[T]
def resultEmpty: Result[T]
def result: Option[T]
trait Parser[+A1]:
def derive(i: Char): Parser[A1]
def resultEmpty: Result[A1]
def result: Option[A1]
def completions(level: Int): Completions
def failure: Option[Failure]
def isTokenStart = false
def ifValid[S](p: => Parser[S]): Parser[S]
def ifValid[A2](p: => Parser[A2]): Parser[A2]
def valid: Boolean
}
end Parser
sealed trait RichParser[A] {
/** Apply the original Parser and then apply `next` (in order). The result of both is provides as a pair. */
/**
* Apply the original Parser and then apply `next` (in order). The result of both is provides as a
* pair.
*/
def ~[B](next: Parser[B]): Parser[(A, B)]
/** Apply the original Parser one or more times and provide the non-empty sequence of results.*/
/** Apply the original Parser one or more times and provide the non-empty sequence of results. */
def + : Parser[Seq[A]]
/** Apply the original Parser zero or more times and provide the (potentially empty) sequence of results.*/
/**
* Apply the original Parser zero or more times and provide the (potentially empty) sequence of
* results.
*/
def * : Parser[Seq[A]]
/** Apply the original Parser zero or one times, returning None if it was applied zero times or the result wrapped in Some if it was applied once.*/
/**
* Apply the original Parser zero or one times, returning None if it was applied zero times or the
* result wrapped in Some if it was applied once.
*/
def ? : Parser[Option[A]]
/** Apply either the original Parser or `b`.*/
/** Apply either the original Parser or `b`. */
def |[B >: A](b: Parser[B]): Parser[B]
/** Apply either the original Parser or `b`.*/
/** Apply either the original Parser or `b`. */
def ||[B](b: Parser[B]): Parser[Either[A, B]]
/** Apply the original Parser to the input and then apply `f` to the result.*/
/** Apply the original Parser to the input and then apply `f` to the result. */
def map[B](f: A => B): Parser[B]
/**
* Returns the original parser. This is useful for converting literals to Parsers.
* For example, `'c'.id` or `"asdf".id`
* Returns the original parser. This is useful for converting literals to Parsers. For example,
* `'c'.id` or `"asdf".id`
*/
def id: Parser[A]
/** Apply the original Parser, but provide `value` as the result if it succeeds. */
def ^^^[B](value: B): Parser[B]
/** Apply the original Parser, but provide `alt` as the result if it fails.*/
/** Apply the original Parser, but provide `alt` as the result if it fails. */
def ??[B >: A](alt: B): Parser[B]
/**
* Produces a Parser that applies the original Parser and then applies `next` (in order), discarding the result of `next`.
* (The arrow point in the direction of the retained result.)
* Produces a Parser that applies the original Parser and then applies `next` (in order),
* discarding the result of `next`. (The arrow point in the direction of the retained result.)
*/
def <~[B](b: Parser[B]): Parser[A]
/**
* Produces a Parser that applies the original Parser and then applies `next` (in order), discarding the result of the original parser.
* (The arrow point in the direction of the retained result.)
* Produces a Parser that applies the original Parser and then applies `next` (in order),
* discarding the result of the original parser. (The arrow point in the direction of the retained
* result.)
*/
def ~>[B](b: Parser[B]): Parser[B]
/** Uses the specified message if the original Parser fails.*/
/** Uses the specified message if the original Parser fails. */
def !!!(msg: String): Parser[A]
/**
* If an exception is thrown by the original Parser,
* capture it and fail locally instead of allowing the exception to propagate up and terminate parsing.
* If an exception is thrown by the original Parser, capture it and fail locally instead of
* allowing the exception to propagate up and terminate parsing.
*/
def failOnException: Parser[A]
/**
* Apply the original parser, but only succeed if `o` also succeeds.
* Note that `o` does not need to consume the same amount of input to satisfy this condition.
* Apply the original parser, but only succeed if `o` also succeeds. Note that `o` does not need
* to consume the same amount of input to satisfy this condition.
*/
def &(o: Parser[_]): Parser[A]
/** Explicitly defines the completions for the original Parser.*/
/** Explicitly defines the completions for the original Parser. */
def examples(s: String*): Parser[A]
/** Explicitly defines the completions for the original Parser.*/
/** Explicitly defines the completions for the original Parser. */
def examples(s: Set[String], check: Boolean = false): Parser[A]
/**
* @param exampleSource the source of examples when displaying completions to the user.
* @param maxNumberOfExamples limits the number of examples that the source of examples should return. This can
* prevent lengthy pauses and avoids bad interactive user experience.
* @param removeInvalidExamples indicates whether completion examples should be checked for validity (against the
* given parser). Invalid examples will be filtered out and only valid suggestions will
* be displayed.
* @return a new parser with a new source of completions.
* @param exampleSource
* the source of examples when displaying completions to the user.
* @param maxNumberOfExamples
* limits the number of examples that the source of examples should return. This can prevent
* lengthy pauses and avoids bad interactive user experience.
* @param removeInvalidExamples
* indicates whether completion examples should be checked for validity (against the given
* parser). Invalid examples will be filtered out and only valid suggestions will be displayed.
* @return
* a new parser with a new source of completions.
*/
def examples(
exampleSource: ExampleSource,
@ -113,29 +126,35 @@ sealed trait RichParser[A] {
): Parser[A]
/**
* @param exampleSource the source of examples when displaying completions to the user.
* @return a new parser with a new source of completions. It displays at most 25 completion examples and does not
* remove invalid examples.
* @param exampleSource
* the source of examples when displaying completions to the user.
* @return
* a new parser with a new source of completions. It displays at most 25 completion examples and
* does not remove invalid examples.
*/
def examples(exampleSource: ExampleSource): Parser[A] =
examples(exampleSource, maxNumberOfExamples = 25, removeInvalidExamples = false)
/** Converts a Parser returning a Char sequence to a Parser returning a String.*/
/** Converts a Parser returning a Char sequence to a Parser returning a String. */
def string(implicit ev: A <:< Seq[Char]): Parser[String]
/**
* Produces a Parser that filters the original parser.
* If 'f' is not true when applied to the output of the original parser, the Parser returned by this method fails.
* The failure message is constructed by applying `msg` to the String that was successfully parsed by the original parser.
* Produces a Parser that filters the original parser. If 'f' is not true when applied to the
* output of the original parser, the Parser returned by this method fails. The failure message is
* constructed by applying `msg` to the String that was successfully parsed by the original
* parser.
*/
def filter(f: A => Boolean, msg: String => String): Parser[A]
/** Applies the original parser, applies `f` to the result to get the next parser, and applies that parser and uses its result for the overall result. */
/**
* Applies the original parser, applies `f` to the result to get the next parser, and applies that
* parser and uses its result for the overall result.
*/
def flatMap[B](f: A => Parser[B]): Parser[B]
}
/** Contains Parser implementation helper methods not typically needed for using parsers. */
object Parser extends ParserMain {
object Parser extends ParserMain:
sealed abstract class Result[+T] {
def isFailure: Boolean
def isValid: Boolean
@ -251,16 +270,18 @@ object Parser extends ParserMain {
}
def choiceParser[A, B](a: Parser[A], b: Parser[B]): Parser[Either[A, B]] =
if (a.valid)
if (b.valid) new HetParser(a, b) else a.map(left.fn)
else
b.map(right.fn)
if a.valid then
if b.valid then new HetParser(a, b)
else a.map(left[A])
else b.map(right[B])
def opt[T](a: Parser[T]): Parser[Option[T]] =
if (a.valid) new Optional(a) else success(None)
if a.valid then new Optional(a)
else success(None)
def onFailure[T](delegate: Parser[T], msg: String): Parser[T] =
if (delegate.valid) new OnFailure(delegate, msg) else failure(msg)
if delegate.valid then new OnFailure(delegate, msg)
else failure(msg)
def trapAndFail[T](delegate: Parser[T]): Parser[T] =
delegate.ifValid(new TrapAndFail(delegate))
@ -311,11 +332,12 @@ object Parser extends ParserMain {
}
def and[T](a: Parser[T], b: Parser[_]): Parser[T] = a.ifValid(b.ifValid(new And(a, b)))
}
end Parser
trait ParserMain {
/** Provides combinators for Parsers.*/
/** Provides combinators for Parsers. */
implicit def richParser[A](a: Parser[A]): RichParser[A] = new RichParser[A] {
def ~[B](b: Parser[B]) = seqParser(a, b)
def ||[B](b: Parser[B]) = choiceParser(a, b)
@ -357,29 +379,29 @@ trait ParserMain {
implicit def literalRichStringParser(s: String): RichParser[String] = richParser(s)
/**
* Construct a parser that is valid, but has no valid result. This is used as a way
* to provide a definitive Failure when a parser doesn't match empty input. For example,
* in `softFailure(...) | p`, if `p` doesn't match the empty sequence, the failure will come
* from the Parser constructed by the `softFailure` method.
* Construct a parser that is valid, but has no valid result. This is used as a way to provide a
* definitive Failure when a parser doesn't match empty input. For example, in `softFailure(...) |
* p`, if `p` doesn't match the empty sequence, the failure will come from the Parser constructed
* by the `softFailure` method.
*/
private[sbt] def softFailure(msg: => String, definitive: Boolean = false): Parser[Nothing] =
SoftInvalid(mkFailures(msg :: Nil, definitive))
/**
* Defines a parser that always fails on any input with messages `msgs`.
* If `definitive` is `true`, any failures by later alternatives are discarded.
* Defines a parser that always fails on any input with messages `msgs`. If `definitive` is
* `true`, any failures by later alternatives are discarded.
*/
def invalid(msgs: => Seq[String], definitive: Boolean = false): Parser[Nothing] =
Invalid(mkFailures(msgs, definitive))
/**
* Defines a parser that always fails on any input with message `msg`.
* If `definitive` is `true`, any failures by later alternatives are discarded.
* Defines a parser that always fails on any input with message `msg`. If `definitive` is `true`,
* any failures by later alternatives are discarded.
*/
def failure(msg: => String, definitive: Boolean = false): Parser[Nothing] =
invalid(msg :: Nil, definitive)
/** Defines a parser that always succeeds on empty input with the result `value`.*/
/** Defines a parser that always succeeds on empty input with the result `value`. */
def success[T](value: T): Parser[T] = new ValidParser[T] {
override def result = Some(value)
def resultEmpty = Value(value)
@ -388,25 +410,29 @@ trait ParserMain {
override def toString = "success(" + value + ")"
}
/** Presents a Char range as a Parser. A single Char is parsed only if it is in the given range.*/
/**
* Presents a Char range as a Parser. A single Char is parsed only if it is in the given range.
*/
implicit def range(r: collection.immutable.NumericRange[Char]): Parser[Char] = {
val label = r.map(_.toString).toString
range(r, label)
}
/** Presents a Char range as a Parser. A single Char is parsed only if it is in the given range.*/
/**
* Presents a Char range as a Parser. A single Char is parsed only if it is in the given range.
*/
def range(r: collection.immutable.NumericRange[Char], label: String): Parser[Char] =
charClass(r contains _, label).examples(r.map(_.toString): _*)
/** Defines a Parser that parses a single character only if it is contained in `legal`.*/
/** Defines a Parser that parses a single character only if it is contained in `legal`. */
def chars(legal: String): Parser[Char] = {
val set = legal.toSet
charClass(set, "character in '" + legal + "'") examples (set.map(_.toString))
}
/**
* Defines a Parser that parses a single character only if the predicate `f` returns true for that character.
* If this parser fails, `label` is used as the failure message.
* Defines a Parser that parses a single character only if the predicate `f` returns true for that
* character. If this parser fails, `label` is used as the failure message.
*/
def charClass(f: Char => Boolean, label: String = "<unspecified>"): Parser[Char] =
new CharacterClass(f, label)
@ -414,24 +440,31 @@ trait ParserMain {
/** Presents a single Char `ch` as a Parser that only parses that exact character. */
implicit def literal(ch: Char): Parser[Char] = new ValidParser[Char] {
def result = None
def resultEmpty = mkFailure("Expected '" + ch + "'")
def derive(c: Char) = if (c == ch) success(ch) else new Invalid(resultEmpty)
private[this] lazy val fail = mkFailure("Expected '" + ch + "'")
def resultEmpty = fail
def derive(c: Char) = if (c == ch) success(ch) else new Invalid(fail)
def completions(level: Int) = Completions.single(Completion.suggestion(ch.toString))
override def toString = "'" + ch + "'"
}
/** Presents a literal String `s` as a Parser that only parses that exact text and provides it as the result.*/
/**
* Presents a literal String `s` as a Parser that only parses that exact text and provides it as
* the result.
*/
implicit def literal(s: String): Parser[String] = stringLiteral(s, 0)
/** See [[unapply]]. */
object ~ {
/** Convenience for destructuring a tuple that mirrors the `~` combinator.*/
/** Convenience for destructuring a tuple that mirrors the `~` combinator. */
def unapply[A, B](t: (A, B)): Some[(A, B)] = Some(t)
}
/** Parses input `str` using `parser`. If successful, the result is provided wrapped in `Right`. If unsuccessful, an error message is provided in `Left`.*/
/**
* Parses input `str` using `parser`. If successful, the result is provided wrapped in `Right`. If
* unsuccessful, an error message is provided in `Left`.
*/
def parse[T](str: String, parser: Parser[T]): Either[String, T] =
Parser.result(parser, str).left.map { failures =>
val (msgs, pos) = failures()
@ -439,11 +472,10 @@ trait ParserMain {
}
/**
* Convenience method to use when developing a parser.
* `parser` is applied to the input `str`.
* If `completions` is true, the available completions for the input are displayed.
* Otherwise, the result of parsing is printed using the result's `toString` method.
* If parsing fails, the error message is displayed.
* Convenience method to use when developing a parser. `parser` is applied to the input `str`. If
* `completions` is true, the available completions for the input are displayed. Otherwise, the
* result of parsing is printed using the result's `toString` method. If parsing fails, the error
* message is displayed.
*
* See also [[sampleParse]] and [[sampleCompletions]].
*/
@ -451,9 +483,9 @@ trait ParserMain {
if (completions) sampleCompletions(str, parser) else sampleParse(str, parser)
/**
* Convenience method to use when developing a parser.
* `parser` is applied to the input `str` and the result of parsing is printed using the result's `toString` method.
* If parsing fails, the error message is displayed.
* Convenience method to use when developing a parser. `parser` is applied to the input `str` and
* the result of parsing is printed using the result's `toString` method. If parsing fails, the
* error message is displayed.
*/
def sampleParse(str: String, parser: Parser[_]): Unit =
parse(str, parser) match {
@ -462,9 +494,9 @@ trait ParserMain {
}
/**
* Convenience method to use when developing a parser.
* `parser` is applied to the input `str` and the available completions are displayed on separate lines.
* If parsing fails, the error message is displayed.
* Convenience method to use when developing a parser. `parser` is applied to the input `str` and
* the available completions are displayed on separate lines. If parsing fails, the error message
* is displayed.
*/
def sampleCompletions(str: String, parser: Parser[_], level: Int = 1): Unit =
Parser.completions(parser, str, level).get foreach println
@ -481,7 +513,8 @@ trait ParserMain {
val msgs = msgs0()
val nonEmpty = if (msgs.isEmpty) Seq("Unexpected end of input") else msgs
(nonEmpty, ci)
} else
}
else
loop(ci, a derive s(ci))
}
loop(-1, p)
@ -496,10 +529,10 @@ trait ParserMain {
if (p.valid) p.derive(c) else p
/**
* Applies parser `p` to input `s` and returns the completions at verbosity `level`.
* The interpretation of `level` is up to parser definitions, but 0 is the default by convention,
* with increasing positive numbers corresponding to increasing verbosity. Typically no more than
* a few levels are defined.
* Applies parser `p` to input `s` and returns the completions at verbosity `level`. The
* interpretation of `level` is up to parser definitions, but 0 is the default by convention, with
* increasing positive numbers corresponding to increasing verbosity. Typically no more than a few
* levels are defined.
*/
def completions(p: Parser[_], s: String, level: Int): Completions =
// The x Completions.empty removes any trailing token completions where append.isEmpty
@ -509,14 +542,20 @@ trait ParserMain {
examples(a, new FixedSetExamples(completions), completions.size, check)
/**
* @param a the parser to decorate with a source of examples. All validation and parsing is delegated to this parser,
* only [[Parser.completions]] is modified.
* @param completions the source of examples when displaying completions to the user.
* @param maxNumberOfExamples limits the number of examples that the source of examples should return. This can
* prevent lengthy pauses and avoids bad interactive user experience.
* @param removeInvalidExamples indicates whether completion examples should be checked for validity (against the given parser). An
* exception is thrown if the example source contains no valid completion suggestions.
* @tparam A the type of values that are returned by the parser.
* @param a
* the parser to decorate with a source of examples. All validation and parsing is delegated to
* this parser, only [[Parser.completions]] is modified.
* @param completions
* the source of examples when displaying completions to the user.
* @param maxNumberOfExamples
* limits the number of examples that the source of examples should return. This can prevent
* lengthy pauses and avoids bad interactive user experience.
* @param removeInvalidExamples
* indicates whether completion examples should be checked for validity (against the given
* parser). An exception is thrown if the example source contains no valid completion
* suggestions.
* @tparam A
* the type of values that are returned by the parser.
* @return
*/
def examples[A](
@ -548,31 +587,33 @@ trait ParserMain {
}
/**
* Establishes delegate parser `t` as a single token of tab completion.
* When tab completion of part of this token is requested, the completions provided by the delegate `t` or a later derivative are appended to
* the prefix String already seen by this parser.
* Establishes delegate parser `t` as a single token of tab completion. When tab completion of
* part of this token is requested, the completions provided by the delegate `t` or a later
* derivative are appended to the prefix String already seen by this parser.
*/
def token[T](t: Parser[T]): Parser[T] = token(t, TokenCompletions.default)
/**
* Establishes delegate parser `t` as a single token of tab completion.
* When tab completion of part of this token is requested, no completions are returned if `hide` returns true for the current tab completion level.
* Otherwise, the completions provided by the delegate `t` or a later derivative are appended to the prefix String already seen by this parser.
* Establishes delegate parser `t` as a single token of tab completion. When tab completion of
* part of this token is requested, no completions are returned if `hide` returns true for the
* current tab completion level. Otherwise, the completions provided by the delegate `t` or a
* later derivative are appended to the prefix String already seen by this parser.
*/
def token[T](t: Parser[T], hide: Int => Boolean): Parser[T] =
token(t, TokenCompletions.default.hideWhen(hide))
/**
* Establishes delegate parser `t` as a single token of tab completion.
* When tab completion of part of this token is requested, `description` is displayed for suggestions and no completions are ever performed.
* Establishes delegate parser `t` as a single token of tab completion. When tab completion of
* part of this token is requested, `description` is displayed for suggestions and no completions
* are ever performed.
*/
def token[T](t: Parser[T], description: String): Parser[T] =
token(t, TokenCompletions.displayOnly(description))
/**
* Establishes delegate parser `t` as a single token of tab completion.
* When tab completion of part of this token is requested, `display` is used as the printed suggestion, but the completions from the delegate
* parser `t` are used to complete if unambiguous.
* Establishes delegate parser `t` as a single token of tab completion. When tab completion of
* part of this token is requested, `display` is used as the printed suggestion, but the
* completions from the delegate parser `t` are used to complete if unambiguous.
*/
def tokenDisplay[T](t: Parser[T], display: String): Parser[T] =
token(t, TokenCompletions.overrideDisplay(display))
@ -603,7 +644,7 @@ trait ParserMain {
def seq0[T](p: Seq[Parser[T]], errors: => Seq[String]): Parser[Seq[T]] = {
val (newErrors, valid) = separate(p) {
case Invalid(f) => Left(f.errors _): Either[() => Seq[String], Parser[T]]
case Invalid(f) => Left(() => f.errors): Either[() => Seq[String], Parser[T]]
case ok => Right(ok): Either[() => Seq[String], Parser[T]]
}
def combinedErrors = errors ++ newErrors.flatMap(_())
@ -842,19 +883,25 @@ private final class Not(delegate: Parser[_], failMessage: String) extends ValidP
}
/**
* This class wraps an existing parser (the delegate), and replaces the delegate's completions with examples from
* the given example source.
* This class wraps an existing parser (the delegate), and replaces the delegate's completions with
* examples from the given example source.
*
* This class asks the example source for a limited amount of examples (to prevent lengthy and expensive
* computations and large amounts of allocated data). It then passes these examples on to the UI.
* This class asks the example source for a limited amount of examples (to prevent lengthy and
* expensive computations and large amounts of allocated data). It then passes these examples on to
* the UI.
*
* @param delegate the parser to decorate with completion examples (i.e., completion of user input).
* @param exampleSource the source from which this class will take examples (potentially filter them with the delegate
* parser), and pass them to the UI.
* @param maxNumberOfExamples the maximum number of completions to read from the example source and pass to the UI. This
* limit prevents lengthy example generation and allocation of large amounts of memory.
* @param removeInvalidExamples indicates whether to remove examples that are deemed invalid by the delegate parser.
* @tparam T the type of value produced by the parser.
* @param delegate
* the parser to decorate with completion examples (i.e., completion of user input).
* @param exampleSource
* the source from which this class will take examples (potentially filter them with the delegate
* parser), and pass them to the UI.
* @param maxNumberOfExamples
* the maximum number of completions to read from the example source and pass to the UI. This
* limit prevents lengthy example generation and allocation of large amounts of memory.
* @param removeInvalidExamples
* indicates whether to remove examples that are deemed invalid by the delegate parser.
* @tparam T
* the type of value produced by the parser.
*/
private final class ParserWithExamples[T](
delegate: Parser[T],
@ -876,8 +923,7 @@ private final class ParserWithExamples[T](
lazy val resultEmpty = delegate.resultEmpty
def completions(level: Int) = {
if (exampleSource().isEmpty)
if (resultEmpty.isValid) Completions.nil else Completions.empty
if (exampleSource().isEmpty) if (resultEmpty.isValid) Completions.nil else Completions.empty
else {
val examplesBasedOnTheResult = filteredExamples.take(maxNumberOfExamples).toSet
Completions(examplesBasedOnTheResult.map(ex => Completion.suggestion(ex)))
@ -902,11 +948,12 @@ private final class StringLiteral(str: String, start: Int) extends ValidParser[S
assert(0 <= start && start < str.length)
def failMsg = "Expected '" + str + "'"
private[this] lazy val fail = mkFailure(failMsg)
def resultEmpty = mkFailure(failMsg)
def result = None
def derive(c: Char) =
if (str.charAt(start) == c) stringLiteral(str, start + 1) else new Invalid(resultEmpty)
if (str.charAt(start) == c) stringLiteral(str, start + 1) else new Invalid(fail)
def completions(level: Int) = Completions.single(Completion.suggestion(str.substring(start)))
override def toString = "\"" + str + "\""
@ -914,16 +961,17 @@ private final class StringLiteral(str: String, start: Int) extends ValidParser[S
private final class CharacterClass(f: Char => Boolean, label: String) extends ValidParser[Char] {
def result = None
def resultEmpty = mkFailure("Expected " + label)
def derive(c: Char) = if (f(c)) success(c) else Invalid(resultEmpty)
private[this] def fail: Failure = mkFailure("Expected " + label)
def resultEmpty = fail
def derive(c: Char) = if (f(c)) success(c) else Invalid(fail)
def completions(level: Int) = Completions.empty
override def toString = "class(" + label + ")"
}
private final class Optional[T](delegate: Parser[T]) extends ValidParser[Option[T]] {
def result = delegate.result map some.fn
private final class Optional[A](delegate: Parser[A]) extends ValidParser[Option[A]] {
def result = delegate.result.map(some[A])
def resultEmpty = Value(None)
def derive(c: Char) = (delegate derive c).map(some.fn)
def derive(c: Char) = (delegate derive c).map(some[A])
def completions(level: Int) = Completion.empty +: delegate.completions(level)
override def toString = delegate.toString + "?"
}

View File

@ -33,10 +33,10 @@ trait Parsers {
/** Parses any single character and provides that character as the result. */
lazy val any: Parser[Char] = charClass(_ => true, "any character")
/** Set that contains each digit in a String representation.*/
/** Set that contains each digit in a String representation. */
lazy val DigitSet = Set("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")
/** Parses any single digit and provides that digit as a Char as the result.*/
/** Parses any single digit and provides that digit as a Char as the result. */
lazy val Digit = charClass(_.isDigit, "digit") examples DigitSet
/** Set containing Chars for hexadecimal digits 0-9 and A-F (but not a-f). */
@ -57,34 +57,57 @@ trait Parsers {
/** Parses a single letter, according to Char.isLower, into a Char. */
lazy val Lower = charClass(_.isLower, "lower")
/** Parses the first Char in an sbt identifier, which must be a [[Letter]].*/
/** Parses the first Char in an sbt identifier, which must be a [[Letter]]. */
def IDStart = Letter
/** Parses an identifier Char other than the first character. This includes letters, digits, dash `-`, and underscore `_`.*/
/**
* Parses an identifier Char other than the first character. This includes letters, digits, dash
* `-`, and underscore `_`.
*/
lazy val IDChar = charClass(isIDChar, "ID character")
/** Parses an identifier String, which must start with [[IDStart]] and contain zero or more [[IDChar]]s after that. */
/**
* Parses an identifier String, which must start with [[IDStart]] and contain zero or more
* [[IDChar]]s after that.
*/
lazy val ID = identifier(IDStart, IDChar)
/** Parses a single operator Char, as allowed by [[isOpChar]]. */
lazy val OpChar = charClass(isOpChar, "symbol")
/** Parses a non-empty operator String, which consists only of characters allowed by [[OpChar]]. */
/**
* Parses a non-empty operator String, which consists only of characters allowed by [[OpChar]].
*/
lazy val Op = OpChar.+.string
/** Parses either an operator String defined by [[Op]] or a non-symbolic identifier defined by [[ID]]. */
/**
* Parses either an operator String defined by [[Op]] or a non-symbolic identifier defined by
* [[ID]].
*/
lazy val OpOrID = ID | Op
/** Parses a single, non-symbolic Scala identifier Char. Valid characters are letters, digits, and the underscore character `_`. */
/**
* Parses a single, non-symbolic Scala identifier Char. Valid characters are letters, digits, and
* the underscore character `_`.
*/
lazy val ScalaIDChar = charClass(isScalaIDChar, "Scala identifier character")
/** Parses a non-symbolic Scala-like identifier. The identifier must start with [[IDStart]] and contain zero or more [[ScalaIDChar]]s after that.*/
/**
* Parses a non-symbolic Scala-like identifier. The identifier must start with [[IDStart]] and
* contain zero or more [[ScalaIDChar]]s after that.
*/
lazy val ScalaID = identifier(IDStart, ScalaIDChar)
/** Parses a non-symbolic Scala-like identifier. The identifier must start with [[Upper]] and contain zero or more [[ScalaIDChar]]s after that.*/
/**
* Parses a non-symbolic Scala-like identifier. The identifier must start with [[Upper]] and
* contain zero or more [[ScalaIDChar]]s after that.
*/
lazy val CapitalizedID = identifier(Upper, ScalaIDChar)
/** Parses a String that starts with `start` and is followed by zero or more characters parsed by `rep`.*/
/**
* Parses a String that starts with `start` and is followed by zero or more characters parsed by
* `rep`.
*/
def identifier(start: Parser[Char], rep: Parser[Char]): Parser[String] =
start ~ rep.* map { case x ~ xs => (x +: xs).mkString }
@ -102,7 +125,8 @@ trait Parsers {
def isOpType(cat: Int) = cat match {
case MATH_SYMBOL | OTHER_SYMBOL | DASH_PUNCTUATION | OTHER_PUNCTUATION | MODIFIER_SYMBOL |
CURRENCY_SYMBOL =>
true; case _ => false
true
case _ => false
}
/** Returns true if `c` is a dash `-`, a letter, digit, or an underscore `_`. */
@ -118,7 +142,7 @@ trait Parsers {
/** Matches a single character that is not a whitespace character. */
lazy val NotSpaceClass = charClass(!_.isWhitespace, "non-whitespace character")
/** Matches a single whitespace character, as determined by Char.isWhitespace.*/
/** Matches a single whitespace character, as determined by Char.isWhitespace. */
lazy val SpaceClass = charClass(_.isWhitespace, "whitespace character")
/** Matches a non-empty String consisting of non-whitespace characters. */
@ -128,21 +152,23 @@ trait Parsers {
lazy val OptNotSpace = NotSpaceClass.*.string
/**
* Matches a non-empty String consisting of whitespace characters.
* The suggested tab completion is a single, constant space character.
* Matches a non-empty String consisting of whitespace characters. The suggested tab completion is
* a single, constant space character.
*/
lazy val Space: Parser[Seq[Char]] = SpaceClass.+.examples(" ")
/**
* Matches a possibly empty String consisting of whitespace characters.
* The suggested tab completion is a single, constant space character.
* Matches a possibly empty String consisting of whitespace characters. The suggested tab
* completion is a single, constant space character.
*/
lazy val OptSpace = SpaceClass.*.examples(" ")
/** Parses a non-empty String that contains only valid URI characters, as defined by [[URIChar]].*/
/**
* Parses a non-empty String that contains only valid URI characters, as defined by [[URIChar]].
*/
lazy val URIClass = URIChar.+.string !!! "Invalid URI"
/** Triple-quotes, as used for verbatim quoting.*/
/** Triple-quotes, as used for verbatim quoting. */
lazy val VerbatimDQuotes = "\"\"\""
/** Double quote character. */
@ -156,15 +182,17 @@ trait Parsers {
/** Matches any character except a double quote or whitespace. */
lazy val NotDQuoteSpaceClass =
charClass({ c: Char =>
(c != DQuoteChar) && !c.isWhitespace
}, "non-double-quote-space character")
charClass(
(c: Char) => { (c != DQuoteChar) && !c.isWhitespace },
"non-double-quote-space character"
)
/** Matches any character except a double quote or backslash. */
lazy val NotDQuoteBackslashClass =
charClass({ c: Char =>
(c != DQuoteChar) && (c != BackslashChar)
}, "non-double-quote-backslash character")
charClass(
(c: Char) => { (c != DQuoteChar) && (c != BackslashChar) },
"non-double-quote-backslash character"
)
/** Matches a single character that is valid somewhere in a URI. */
lazy val URIChar = charClass(alphanum, "alphanum") | chars("_-!.~'()*,;:$&+=?/[]@%#")
@ -174,16 +202,21 @@ trait Parsers {
('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || ('0' <= c && c <= '9')
/**
* @param base the directory used for completion proposals (when the user presses the TAB key). Only paths under this
* directory will be proposed.
* @return the file that was parsed from the input string. The returned path may or may not exist.
* @param base
* the directory used for completion proposals (when the user presses the TAB key). Only paths
* under this directory will be proposed.
* @return
* the file that was parsed from the input string. The returned path may or may not exist.
*/
def fileParser(base: File): Parser[File] =
OptSpace ~> StringBasic
.examples(new FileExamples(base))
.map(new File(_))
/** Parses a port number. Currently, this accepts any integer and presents a tab completion suggestion of `<port>`. */
/**
* Parses a port number. Currently, this accepts any integer and presents a tab completion
* suggestion of `<port>`.
*/
lazy val Port = token(IntBasic, "<port>")
/** Parses a signed integer. */
@ -195,44 +228,49 @@ trait Parsers {
private[this] def toInt(neg: Option[Char], digits: Seq[Char]): Int =
(neg.toSeq ++ digits).mkString.toInt
/** Parses the lower-case values `true` and `false` into their corresponding Boolean values. */
/** Parses the lower-case values `true` and `false` into their corresponding Boolean values. */
lazy val Bool = ("true" ^^^ true) | ("false" ^^^ false)
/**
* Parses a potentially quoted String value. The value may be verbatim quoted ([[StringVerbatim]]),
* quoted with interpreted escapes ([[StringEscapable]]), or unquoted ([[NotQuoted]]).
* Parses a potentially quoted String value. The value may be verbatim quoted
* ([[StringVerbatim]]), quoted with interpreted escapes ([[StringEscapable]]), or unquoted
* ([[NotQuoted]]).
*/
lazy val StringBasic = StringVerbatim | StringEscapable | NotQuoted | NotQuotedThenQuoted
/**
* Parses a verbatim quoted String value, discarding the quotes in the result. This kind of quoted text starts with triple quotes `"""`
* and ends at the next triple quotes and may contain any character in between.
* Parses a verbatim quoted String value, discarding the quotes in the result. This kind of quoted
* text starts with triple quotes `"""` and ends at the next triple quotes and may contain any
* character in between.
*/
lazy val StringVerbatim: Parser[String] = VerbatimDQuotes ~>
any.+.string.filter(!_.contains(VerbatimDQuotes), _ => "Invalid verbatim string") <~
VerbatimDQuotes
/**
* Parses a string value, interpreting escapes and discarding the surrounding quotes in the result.
* See [[EscapeSequence]] for supported escapes.
* Parses a string value, interpreting escapes and discarding the surrounding quotes in the
* result. See [[EscapeSequence]] for supported escapes.
*/
lazy val StringEscapable: Parser[String] =
(DQuoteChar ~> (NotDQuoteBackslashClass | EscapeSequence).+.string <~ DQuoteChar |
(DQuoteChar ~ DQuoteChar) ^^^ "")
/**
* Parses a size unit string. For example, `128K` parsers to `128L * 1024`, and `1.25g` parses
* to `1024L * 1024 * 1024 * 5 / 4`.
* Parses a size unit string. For example, `128K` parsers to `128L * 1024`, and `1.25g` parses to
* `1024L * 1024 * 1024 * 5 / 4`.
*/
lazy val Size: Parser[Long] = SizeParser.value
/**
* Parses a brace enclosed string and, if each opening brace is matched with a closing brace,
* it returns the entire string including the braces.
* Parses a brace enclosed string and, if each opening brace is matched with a closing brace, it
* returns the entire string including the braces.
*
* @param open the opening character, e.g. '{'
* @param close the closing character, e.g. '}'
* @return a parser for the brace encloosed string.
* @param open
* the opening character, e.g. '{'
* @param close
* the closing character, e.g. '}'
* @return
* a parser for the brace encloosed string.
*/
private[sbt] def braces(open: Char, close: Char): Parser[String] = {
val notDelim = charClass(c => c != open && c != close).*.string
@ -240,10 +278,10 @@ trait Parsers {
(open ~ (notDelim ~ close).?).flatMap {
case (l, Some((content, r))) => Parser.success(s"$l$content$r")
case (l, None) =>
((notDelim ~ impl()).map {
case (leftPrefix, nestedBraces) => leftPrefix + nestedBraces
}.+ ~ notDelim ~ close).map {
case ((nested, suffix), r) => s"$l${nested.mkString}$suffix$r"
((notDelim ~ impl()).map { case (leftPrefix, nestedBraces) =>
leftPrefix + nestedBraces
}.+ ~ notDelim ~ close).map { case ((nested, suffix), r) =>
s"$l${nested.mkString}$suffix$r"
}
}
}
@ -251,52 +289,56 @@ trait Parsers {
}
/**
* Parses a single escape sequence into the represented Char.
* Escapes start with a backslash and are followed by `u` for a [[UnicodeEscape]] or by `b`, `t`, `n`, `f`, `r`, `"`, `'`, `\` for standard escapes.
* Parses a single escape sequence into the represented Char. Escapes start with a backslash and
* are followed by `u` for a [[UnicodeEscape]] or by `b`, `t`, `n`, `f`, `r`, `"`, `'`, `\` for
* standard escapes.
*/
lazy val EscapeSequence: Parser[Char] =
BackslashChar ~> ('b' ^^^ '\b' | 't' ^^^ '\t' | 'n' ^^^ '\n' | 'f' ^^^ '\f' | 'r' ^^^ '\r' |
'\"' ^^^ '\"' | '\'' ^^^ '\'' | '\\' ^^^ '\\' | UnicodeEscape)
/**
* Parses a single unicode escape sequence into the represented Char.
* A unicode escape begins with a backslash, followed by a `u` and 4 hexadecimal digits representing the unicode value.
* Parses a single unicode escape sequence into the represented Char. A unicode escape begins with
* a backslash, followed by a `u` and 4 hexadecimal digits representing the unicode value.
*/
lazy val UnicodeEscape: Parser[Char] =
("u" ~> repeat(HexDigit, 4, 4)) map { seq =>
Integer.parseInt(seq.mkString, 16).toChar
}
/** Parses an unquoted, non-empty String value that cannot start with a double quote and cannot contain whitespace.*/
/**
* Parses an unquoted, non-empty String value that cannot start with a double quote and cannot
* contain whitespace.
*/
lazy val NotQuoted = (NotDQuoteSpaceClass ~ OptNotSpace) map { case (c, s) => c.toString + s }
/** Parses a non-empty String value that cannot start with a double quote, but includes double quotes.*/
lazy val NotQuotedThenQuoted = (NotQuoted ~ StringEscapable) map {
case (s1, s2) => s"""$s1\"$s2\""""
/** Parses a non-empty String value that cannot start with a double quote, but includes double quotes. */
lazy val NotQuotedThenQuoted = (NotQuoted ~ StringEscapable) map { case (s1, s2) =>
s"""$s1\"$s2\""""
}
/**
* Applies `rep` zero or more times, separated by `sep`.
* The result is the (possibly empty) sequence of results from the multiple `rep` applications. The `sep` results are discarded.
* Applies `rep` zero or more times, separated by `sep`. The result is the (possibly empty)
* sequence of results from the multiple `rep` applications. The `sep` results are discarded.
*/
def repsep[T](rep: Parser[T], sep: Parser[_]): Parser[Seq[T]] =
rep1sep(rep, sep) ?? nilSeq[T]
/**
* Applies `rep` one or more times, separated by `sep`.
* The result is the non-empty sequence of results from the multiple `rep` applications. The `sep` results are discarded.
* Applies `rep` one or more times, separated by `sep`. The result is the non-empty sequence of
* results from the multiple `rep` applications. The `sep` results are discarded.
*/
def rep1sep[T](rep: Parser[T], sep: Parser[_]): Parser[Seq[T]] =
(rep ~ (sep ~> rep).*).map { case (x ~ xs) => x +: xs }
/** Wraps the result of `p` in `Some`.*/
/** Wraps the result of `p` in `Some`. */
def some[T](p: Parser[T]): Parser[Option[T]] = p map { v =>
Some(v)
}
/**
* Applies `f` to the result of `p`, transforming any exception when evaluating
* `f` into a parse failure with the exception `toString` as the message.
* Applies `f` to the result of `p`, transforming any exception when evaluating `f` into a parse
* failure with the exception `toString` as the message.
*/
def mapOrFail[S, T](p: Parser[S])(f: S => T): Parser[T] =
p flatMap { s =>
@ -306,20 +348,24 @@ trait Parsers {
}
/**
* Parses a space-delimited, possibly empty sequence of arguments.
* The arguments may use quotes and escapes according to [[StringBasic]].
* Parses a space-delimited, possibly empty sequence of arguments. The arguments may use quotes
* and escapes according to [[StringBasic]].
*/
def spaceDelimited(display: String): Parser[Seq[String]] =
(token(Space) ~> token(StringBasic, display)).* <~ SpaceClass.*
/** Applies `p` and uses `true` as the result if it succeeds and turns failure into a result of `false`. */
/**
* Applies `p` and uses `true` as the result if it succeeds and turns failure into a result of
* `false`.
*/
def flag[T](p: Parser[T]): Parser[Boolean] = (p ^^^ true) ?? false
/**
* Defines a sequence parser where the parser used for each part depends on the previously parsed values.
* `p` is applied to the (possibly empty) sequence of already parsed values to obtain the next parser to use.
* The parsers obtained in this way are separated by `sep`, whose result is discarded and only the sequence
* of values from the parsers returned by `p` is used for the result.
* Defines a sequence parser where the parser used for each part depends on the previously parsed
* values. `p` is applied to the (possibly empty) sequence of already parsed values to obtain the
* next parser to use. The parsers obtained in this way are separated by `sep`, whose result is
* discarded and only the sequence of values from the parsers returned by `p` is used for the
* result.
*/
def repeatDep[A](p: Seq[A] => Parser[A], sep: Parser[Any]): Parser[Seq[A]] = {
def loop(acc: Seq[A]): Parser[Seq[A]] = {
@ -339,21 +385,24 @@ trait Parsers {
/** Parses a URI that is valid according to the single argument java.net.URI constructor. */
lazy val basicUri = mapOrFail(URIClass)(uri => new URI(uri))
/** Parses a URI that is valid according to the single argument java.net.URI constructor, using `ex` as tab completion examples. */
/**
* Parses a URI that is valid according to the single argument java.net.URI constructor, using
* `ex` as tab completion examples.
*/
def Uri(ex: Set[URI]) = basicUri examples (ex.map(_.toString))
}
/** Provides standard [[Parser]] implementations. */
object Parsers extends Parsers
/** Provides common [[Parser]] implementations and helper methods.*/
/** Provides common [[Parser]] implementations and helper methods. */
object DefaultParsers extends Parsers with ParserMain {
/** Applies parser `p` to input `s` and returns `true` if the parse was successful. */
def matches(p: Parser[_], s: String): Boolean =
apply(p)(s).resultEmpty.isValid
/** Returns `true` if `s` parses successfully according to [[ID]].*/
/** Returns `true` if `s` parses successfully according to [[ID]]. */
def validID(s: String): Boolean = {
// Handwritten version of `matches(ID, s)` because validID turned up in profiling.
def isIdChar(c: Char): Boolean = Character.isLetterOrDigit(c) || (c == '-') || (c == '_')

View File

@ -44,13 +44,12 @@ private[sbt] object SizeParser {
((numberParser <~ SpaceClass
.examples(" ", "b", "B", "g", "G", "k", "K", "m", "M")
.*) ~ unitParser.?)
.map {
case (number, unit) =>
unit match {
case None | Some(Bytes) => multiply(number, right = 1L)
case Some(KiloBytes) => multiply(number, right = 1024L)
case Some(MegaBytes) => multiply(number, right = 1024L * 1024)
case Some(GigaBytes) => multiply(number, right = 1024L * 1024 * 1024)
}
.map { case (number, unit) =>
unit match {
case None | Some(Bytes) => multiply(number, right = 1L)
case Some(KiloBytes) => multiply(number, right = 1024L)
case Some(MegaBytes) => multiply(number, right = 1024L * 1024)
case Some(GigaBytes) => multiply(number, right = 1024L * 1024 * 1024)
}
}
}

View File

@ -12,9 +12,9 @@ import DefaultParsers._
import TypeString._
/**
* Basic representation of types parsed from Manifest.toString.
* This can only represent the structure of parameterized types.
* All other types are represented by a TypeString with an empty `args`.
* Basic representation of types parsed from Manifest.toString. This can only represent the
* structure of parameterized types. All other types are represented by a TypeString with an empty
* `args`.
*/
private[sbt] final class TypeString(val base: String, val args: List[TypeString]) {
override def toString =
@ -28,7 +28,7 @@ private[sbt] final class TypeString(val base: String, val args: List[TypeString]
private[sbt] object TypeString {
/** Makes the string representation of a type as returned by Manifest.toString more readable.*/
/** Makes the string representation of a type as returned by Manifest.toString more readable. */
def cleanup(typeString: String): String =
parse(typeString, typeStringParser) match {
case Right(ts) => ts.toString
@ -36,19 +36,19 @@ private[sbt] object TypeString {
}
/**
* Makes a fully qualified type name provided by Manifest.toString more readable.
* The argument should be just a name (like scala.Tuple2) and not a full type (like scala.Tuple2[Int,Boolean])
* Makes a fully qualified type name provided by Manifest.toString more readable. The argument
* should be just a name (like scala.Tuple2) and not a full type (like scala.Tuple2[Int,Boolean])
*/
def cleanupTypeName(base: String): String =
dropPrefix(base).replace('$', '.')
/**
* Removes prefixes from a fully qualified type name that are unnecessary in the presence of standard imports for an sbt setting.
* This does not use the compiler and is therefore a conservative approximation.
* Removes prefixes from a fully qualified type name that are unnecessary in the presence of
* standard imports for an sbt setting. This does not use the compiler and is therefore a
* conservative approximation.
*/
def dropPrefix(base: String): String =
if (base.startsWith(SbtPrefix))
base.substring(SbtPrefix.length)
if (base.startsWith(SbtPrefix)) base.substring(SbtPrefix.length)
else if (base.startsWith(CollectionPrefix)) {
val simple = base.substring(CollectionPrefix.length)
if (ShortenCollection(simple)) simple else base
@ -75,8 +75,9 @@ private[sbt] object TypeString {
)
/**
* A Parser that extracts basic structure from the string representation of a type from Manifest.toString.
* This is rudimentary and essentially only decomposes the string into names and arguments for parameterized types.
* A Parser that extracts basic structure from the string representation of a type from
* Manifest.toString. This is rudimentary and essentially only decomposes the string into names
* and arguments for parameterized types.
*/
lazy val typeStringParser: Parser[TypeString] = {
def isFullScalaIDChar(c: Char) = isScalaIDChar(c) || c == '.' || c == '$'

View File

@ -10,22 +10,23 @@ package complete
sealed trait UpperBound {
/** True if and only if the given value meets this bound.*/
/** True if and only if the given value meets this bound. */
def >=(min: Int): Boolean
/** True if and only if this bound is one.*/
/** True if and only if this bound is one. */
def isOne: Boolean
/** True if and only if this bound is zero.*/
/** True if and only if this bound is zero. */
def isZero: Boolean
/**
* If this bound is zero or Infinite, `decrement` returns this bound.
* Otherwise, this bound is finite and greater than zero and `decrement` returns the bound that is one less than this bound.
* If this bound is zero or Infinite, `decrement` returns this bound. Otherwise, this bound is
* finite and greater than zero and `decrement` returns the bound that is one less than this
* bound.
*/
def decrement: UpperBound
/** True if and only if this is unbounded.*/
/** True if and only if this is unbounded. */
def isInfinite: Boolean
}
@ -45,8 +46,8 @@ case object Infinite extends UpperBound {
}
/**
* Represents a finite upper bound. The maximum allowed value is 'value', inclusive.
* It must positive.
* Represents a finite upper bound. The maximum allowed value is 'value', inclusive. It must
* positive.
*/
final case class Finite(value: Int) extends UpperBound {
assume(value >= 0, "Maximum occurrences must be nonnegative.")

View File

@ -13,9 +13,8 @@ import org.scalacheck._, Gen._, Prop._
object DefaultParsersSpec extends Properties("DefaultParsers") {
import DefaultParsers.{ ID, isIDChar, matches, validID }
property("∀ s ∈ String: validID(s) == matches(ID, s)") = forAll(
(s: String) => validID(s) == matches(ID, s)
)
property("∀ s ∈ String: validID(s) == matches(ID, s)") =
forAll((s: String) => validID(s) == matches(ID, s))
property("∀ s ∈ genID: matches(ID, s)") = forAll(genID)(s => matches(ID, s))
property("∀ s ∈ genID: validID(s)") = forAll(genID)(s => validID(s))

View File

@ -8,6 +8,8 @@
package sbt.internal.util
package complete
import scala.collection.StringOps
object JLineTest {
import DefaultParsers._
@ -153,12 +155,12 @@ object ParserExample {
val an = repeat(a, min = n, max = n)
val ann = aqn ~ an
def r = apply(ann)("a" * (n * 2)).resultEmpty
def r = apply(ann)(new StringOps("a") * (n * 2)).resultEmpty
println(r.isValid)
}
def run2(n: Int): Unit = {
val ab = "ab".?.*
val r = apply(ab)("a" * n).resultEmpty
val r = apply(ab)(new StringOps("a") * n).resultEmpty
println(r)
}
}

View File

@ -16,10 +16,10 @@ class FileExamplesTest extends UnitSpec {
"listing all files in an absolute base directory" should
"produce the entire base directory's contents" in {
withDirectoryStructure() { ds =>
ds.fileExamples().toList should contain theSameElementsAs (ds.allRelativizedPaths)
withDirectoryStructure() { ds =>
ds.fileExamples().toList should contain theSameElementsAs (ds.allRelativizedPaths)
}
}
}
"listing files with a prefix that matches none" should "produce an empty list" in {
withDirectoryStructure(withCompletionPrefix = "z") { ds =>

View File

@ -14,57 +14,57 @@ class ParserWithExamplesTest extends UnitSpec {
"listing a limited number of completions" should
"grab only the needed number of elements from the iterable source of examples" in {
val _ = new ParserWithLazyExamples {
parserWithExamples.completions(0)
examples.size shouldEqual maxNumberOfExamples
val _ = new ParserWithLazyExamples {
parserWithExamples.completions(0)
examples.size shouldEqual maxNumberOfExamples
}
}
}
"listing only valid completions" should
"use the delegate parser to remove invalid examples" in {
val _ = new ParserWithValidExamples {
val validCompletions = Completions(
Set(
suggestion("blue"),
suggestion("red")
val _ = new ParserWithValidExamples {
val validCompletions = Completions(
Set(
suggestion("blue"),
suggestion("red")
)
)
)
parserWithExamples.completions(0) shouldEqual validCompletions
parserWithExamples.completions(0) shouldEqual validCompletions
}
}
}
"listing valid completions in a derived parser" should
"produce only valid examples that start with the character of the derivation" in {
val _ = new ParserWithValidExamples {
val derivedCompletions = Completions(
Set(
suggestion("lue")
val _ = new ParserWithValidExamples {
val derivedCompletions = Completions(
Set(
suggestion("lue")
)
)
)
parserWithExamples.derive('b').completions(0) shouldEqual derivedCompletions
parserWithExamples.derive('b').completions(0) shouldEqual derivedCompletions
}
}
}
"listing valid and invalid completions" should
"produce the entire source of examples" in {
val _ = new parserWithAllExamples {
val completions = Completions(examples.map(suggestion(_)).toSet)
parserWithExamples.completions(0) shouldEqual completions
val _ = new parserWithAllExamples {
val completions = Completions(examples.map(suggestion(_)).toSet)
parserWithExamples.completions(0) shouldEqual completions
}
}
}
"listing valid and invalid completions in a derived parser" should
"produce only examples that start with the character of the derivation" in {
val _ = new parserWithAllExamples {
val derivedCompletions = Completions(
Set(
suggestion("lue"),
suggestion("lock")
val _ = new parserWithAllExamples {
val derivedCompletions = Completions(
Set(
suggestion("lue"),
suggestion("lock")
)
)
)
parserWithExamples.derive('b').completions(0) shouldEqual derivedCompletions
parserWithExamples.derive('b').completions(0) shouldEqual derivedCompletions
}
}
}
class ParserWithLazyExamples
extends ParserExample(

View File

@ -36,8 +36,7 @@ object ErrorHandling {
if (e.getClass == classOf[RuntimeException]) {
val msg = e.getMessage
if (msg == null || msg.isEmpty) e.toString else msg
} else
e.toString
} else e.toString
}
sealed class TranslatedException private[sbt] (msg: String, cause: Throwable)

View File

@ -7,7 +7,7 @@
package sbt.internal.util
/** Defines a function to call as sbt exits.*/
/** Defines a function to call as sbt exits. */
trait ExitHook {
/** Subclasses should implement this method, which is called when this hook is executed. */
@ -21,7 +21,10 @@ object ExitHook {
object ExitHooks {
/** Calls each registered exit hook, trapping any exceptions so that each hook is given a chance to run. */
/**
* Calls each registered exit hook, trapping any exceptions so that each hook is given a chance to
* run.
*/
def runExitHooks(exitHooks: Seq[ExitHook]): Seq[Throwable] =
exitHooks.flatMap(hook => ErrorHandling.wideConvert(hook.runBeforeExiting()).left.toOption)

View File

@ -10,19 +10,20 @@ package sbt.internal.util
final class MessageOnlyException(override val toString: String) extends RuntimeException(toString)
/**
* A dummy exception for the top-level exception handler to know that an exception
* has been handled, but is being passed further up to indicate general failure.
* A dummy exception for the top-level exception handler to know that an exception has been handled,
* but is being passed further up to indicate general failure.
*/
final class AlreadyHandledException(val underlying: Throwable) extends RuntimeException
/**
* A marker trait for a top-level exception handler to know that this exception
* doesn't make sense to display.
* A marker trait for a top-level exception handler to know that this exception doesn't make sense
* to display.
*/
trait UnprintableException extends Throwable
/**
* A marker trait that refines UnprintableException to indicate to a top-level exception handler
* that the code throwing this exception has already provided feedback to the user about the error condition.
* that the code throwing this exception has already provided feedback to the user about the error
* condition.
*/
trait FeedbackProvidedException extends UnprintableException

View File

@ -11,9 +11,8 @@ import java.util.concurrent.ConcurrentHashMap
import scala.sys.process.Process
/**
* Manages forked processes created by sbt. Any process registered
* with RunningProcesses can be killed with the killAll method. In
* particular, this can be used in a signal handler to kill these
* Manages forked processes created by sbt. Any process registered with RunningProcesses can be
* killed with the killAll method. In particular, this can be used in a signal handler to kill these
* processes when the user inputs ctrl+c.
*/
private[sbt] object RunningProcesses {

View File

@ -10,8 +10,8 @@ package com.github.ghik.silencer
import scala.annotation.Annotation
/**
* When silencer compiler plugin is enabled, this annotation suppresses all warnings emitted by scalac for some portion
* of source code. It can be applied on any definition (`class`, def`, `val`, `var`, etc.) or on arbitrary expression,
* e.g. {123; 456}: @silent`
* When silencer compiler plugin is enabled, this annotation suppresses all warnings emitted by
* scalac for some portion of source code. It can be applied on any definition (`class`, def`,
* `val`, `var`, etc.) or on arbitrary expression, e.g. {123; 456}: @silent`
*/
class silent extends Annotation

View File

@ -9,7 +9,7 @@ package sbt.internal.util
import sbt.util._
/** Implements the level-setting methods of Logger.*/
/** Implements the level-setting methods of Logger. */
abstract class BasicLogger extends AbstractLogger {
private var traceEnabledVar: Int = java.lang.Integer.MAX_VALUE
private var level: Level.Value = Level.Info

View File

@ -29,11 +29,10 @@ object BufferedAppender {
}
/**
* An appender that can buffer the logging done on it and then can flush the buffer
* to the delegate appender provided in the constructor. Use 'record()' to
* start buffering and then 'play' to flush the buffer to the backing appender.
* The logging level set at the time a message is originally logged is used, not
* the level at the time 'play' is called.
* An appender that can buffer the logging done on it and then can flush the buffer to the delegate
* appender provided in the constructor. Use 'record()' to start buffering and then 'play' to flush
* the buffer to the backing appender. The logging level set at the time a message is originally
* logged is used, not the level at the time 'play' is called.
*/
class BufferedAppender(override val name: String, delegate: Appender) extends Appender {
override def close(): Unit = log4j.get match {
@ -108,8 +107,8 @@ class BufferedAppender(override val name: String, delegate: Appender) extends Ap
}
/**
* Flushes the buffer to the delegate logger. This method calls logAll on the delegate
* so that the messages are written consecutively. The buffer is cleared in the process.
* Flushes the buffer to the delegate logger. This method calls logAll on the delegate so that the
* messages are written consecutively. The buffer is cleared in the process.
*/
def play(): Unit =
synchronized {
@ -131,11 +130,10 @@ class BufferedAppender(override val name: String, delegate: Appender) extends Ap
}
/**
* A logger that can buffer the logging done on it and then can flush the buffer
* to the delegate logger provided in the constructor. Use 'startRecording' to
* start buffering and then 'play' from to flush the buffer to the backing logger.
* The logging level set at the time a message is originally logged is used, not
* the level at the time 'play' is called.
* A logger that can buffer the logging done on it and then can flush the buffer to the delegate
* logger provided in the constructor. Use 'startRecording' to start buffering and then 'play' from
* to flush the buffer to the backing logger. The logging level set at the time a message is
* originally logged is used, not the level at the time 'play' is called.
*
* This class assumes that it is the only client of the delegate logger.
*/
@ -168,8 +166,8 @@ class BufferedLogger(delegate: AbstractLogger) extends BasicLogger {
}
/**
* Flushes the buffer to the delegate logger. This method calls logAll on the delegate
* so that the messages are written consecutively. The buffer is cleared in the process.
* Flushes the buffer to the delegate logger. This method calls logAll on the delegate so that the
* messages are written consecutively. The buffer is cleared in the process.
*/
def play(): Unit = synchronized { delegate.logAll(buffer.toList); buffer.clear() }

View File

@ -40,27 +40,36 @@ object ConsoleLogger {
/**
* A new `ConsoleLogger` that logs to `out`.
*
* @param out Where to log the messages.
* @return A new `ConsoleLogger` that logs to `out`.
* @param out
* Where to log the messages.
* @return
* A new `ConsoleLogger` that logs to `out`.
*/
def apply(out: PrintStream): ConsoleLogger = apply(ConsoleOut.printStreamOut(out))
/**
* A new `ConsoleLogger` that logs to `out`.
*
* @param out Where to log the messages.
* @return A new `ConsoleLogger` that logs to `out`.
* @param out
* Where to log the messages.
* @return
* A new `ConsoleLogger` that logs to `out`.
*/
def apply(out: PrintWriter): ConsoleLogger = apply(ConsoleOut.printWriterOut(out))
/**
* A new `ConsoleLogger` that logs to `out`.
*
* @param out Where to log the messages.
* @param ansiCodesSupported `true` if `out` supported ansi codes, `false` otherwise.
* @param useFormat `true` to show formatting, `false` to remove it from messages.
* @param suppressedMessage How to show suppressed stack traces.
* @return A new `ConsoleLogger` that logs to `out`.
* @param out
* Where to log the messages.
* @param ansiCodesSupported
* `true` if `out` supported ansi codes, `false` otherwise.
* @param useFormat
* `true` to show formatting, `false` to remove it from messages.
* @param suppressedMessage
* How to show suppressed stack traces.
* @return
* A new `ConsoleLogger` that logs to `out`.
*/
def apply(
out: ConsoleOut = ConsoleOut.systemOut,
@ -73,8 +82,7 @@ object ConsoleLogger {
}
/**
* A logger that logs to the console. On supported systems, the level labels are
* colored.
* A logger that logs to the console. On supported systems, the level labels are colored.
*/
class ConsoleLogger private[ConsoleLogger] (
out: ConsoleOut,
@ -144,10 +152,9 @@ object ConsoleAppender {
/**
* Indicates whether formatting has been disabled in environment variables.
* 1. -Dsbt.log.noformat=true means no formatting.
* 2. -Dsbt.color=always/auto/never/true/false
* 3. -Dsbt.colour=always/auto/never/true/false
* 4. -Dsbt.log.format=always/auto/never/true/false
* 1. -Dsbt.log.noformat=true means no formatting. 2. -Dsbt.color=always/auto/never/true/false
* 3. -Dsbt.colour=always/auto/never/true/false 4.
* -Dsbt.log.format=always/auto/never/true/false
*/
@deprecated("Use Terminal.isAnsiSupported or Terminal.isColorEnabled", "1.4.0")
lazy val formatEnabledInEnv: Boolean = Terminal.isAnsiSupported
@ -163,58 +170,74 @@ object ConsoleAppender {
/**
* A new `ConsoleAppender` that writes to standard output.
*
* @return A new `ConsoleAppender` that writes to standard output.
* @return
* A new `ConsoleAppender` that writes to standard output.
*/
def apply(): Appender = apply(ConsoleOut.systemOut)
/**
* A new `ConsoleAppender` that appends log message to `out`.
*
* @param out Where to write messages.
* @return A new `ConsoleAppender`.
* @param out
* Where to write messages.
* @return
* A new `ConsoleAppender`.
*/
def apply(out: PrintStream): Appender = apply(ConsoleOut.printStreamOut(out))
/**
* A new `ConsoleAppender` that appends log messages to `out`.
*
* @param out Where to write messages.
* @return A new `ConsoleAppender`.
* @param out
* Where to write messages.
* @return
* A new `ConsoleAppender`.
*/
def apply(out: PrintWriter): Appender = apply(ConsoleOut.printWriterOut(out))
/**
* A new `ConsoleAppender` that writes to `out`.
*
* @param out Where to write messages.
* @return A new `ConsoleAppender that writes to `out`.
* @param out
* Where to write messages.
* @return
* A new `ConsoleAppender that writes to `out`.
*/
def apply(out: ConsoleOut): Appender = apply(generateName(), out)
/**
* A new `ConsoleAppender` identified by `name`, and that writes to standard output.
*
* @param name An identifier for the `ConsoleAppender`.
* @return A new `ConsoleAppender` that writes to standard output.
* @param name
* An identifier for the `ConsoleAppender`.
* @return
* A new `ConsoleAppender` that writes to standard output.
*/
def apply(name: String): Appender = apply(name, ConsoleOut.systemOut)
/**
* A new `ConsoleAppender` identified by `name`, and that writes to `out`.
*
* @param name An identifier for the `ConsoleAppender`.
* @param out Where to write messages.
* @return A new `ConsoleAppender` that writes to `out`.
* @param name
* An identifier for the `ConsoleAppender`.
* @param out
* Where to write messages.
* @return
* A new `ConsoleAppender` that writes to `out`.
*/
def apply(name: String, out: ConsoleOut): Appender = apply(name, out, Terminal.isAnsiSupported)
/**
* A new `ConsoleAppender` identified by `name`, and that writes to `out`.
*
* @param name An identifier for the `ConsoleAppender`.
* @param out Where to write messages.
* @param suppressedMessage How to handle stack traces.
* @return A new `ConsoleAppender` that writes to `out`.
* @param name
* An identifier for the `ConsoleAppender`.
* @param out
* Where to write messages.
* @param suppressedMessage
* How to handle stack traces.
* @return
* A new `ConsoleAppender` that writes to `out`.
*/
def apply(
name: String,
@ -228,10 +251,14 @@ object ConsoleAppender {
/**
* A new `ConsoleAppender` identified by `name`, and that writes to `out`.
*
* @param name An identifier for the `ConsoleAppender`.
* @param out Where to write messages.
* @param useFormat `true` to enable format (color, bold, etc.), `false` to remove formatting.
* @return A new `ConsoleAppender` that writes to `out`.
* @param name
* An identifier for the `ConsoleAppender`.
* @param out
* Where to write messages.
* @param useFormat
* `true` to enable format (color, bold, etc.), `false` to remove formatting.
* @return
* A new `ConsoleAppender` that writes to `out`.
*/
def apply(name: String, out: ConsoleOut, useFormat: Boolean): Appender =
apply(name, out, useFormat || Terminal.isAnsiSupported, useFormat, noSuppressedMessage)
@ -239,9 +266,12 @@ object ConsoleAppender {
/**
* A new `ConsoleAppender` identified by `name`, and that writes to `out`.
*
* @param name An identifier for the `ConsoleAppender`.
* @param terminal The terminal to which this appender corresponds
* @return A new `ConsoleAppender` that writes to `out`.
* @param name
* An identifier for the `ConsoleAppender`.
* @param terminal
* The terminal to which this appender corresponds
* @return
* A new `ConsoleAppender` that writes to `out`.
*/
def apply(name: String, terminal: Terminal): Appender = {
new ConsoleAppender(name, Properties.from(terminal), noSuppressedMessage)
@ -262,10 +292,14 @@ object ConsoleAppender {
/**
* A new `ConsoleAppender` identified by `name`, and that writes to `out`.
*
* @param name An identifier for the `ConsoleAppender`.
* @param terminal The terminal to which this appender corresponds
* @param suppressedMessage How to handle stack traces.
* @return A new `ConsoleAppender` that writes to `out`.
* @param name
* An identifier for the `ConsoleAppender`.
* @param terminal
* The terminal to which this appender corresponds
* @param suppressedMessage
* How to handle stack traces.
* @return
* A new `ConsoleAppender` that writes to `out`.
*/
def apply(
name: String,
@ -278,12 +312,16 @@ object ConsoleAppender {
/**
* A new `ConsoleAppender` identified by `name`, and that writes to `out`.
*
* @param name An identifier for the `ConsoleAppender`.
* @param out Where to write messages.
* @param ansiCodesSupported `true` if the output stream supports ansi codes, `false` otherwise.
* @param useFormat `true` to enable format (color, bold, etc.), `false` to remove
* formatting.
* @return A new `ConsoleAppender` that writes to `out`.
* @param name
* An identifier for the `ConsoleAppender`.
* @param out
* Where to write messages.
* @param ansiCodesSupported
* `true` if the output stream supports ansi codes, `false` otherwise.
* @param useFormat
* `true` to enable format (color, bold, etc.), `false` to remove formatting.
* @return
* A new `ConsoleAppender` that writes to `out`.
*/
def apply(
name: String,
@ -302,8 +340,10 @@ object ConsoleAppender {
/**
* Converts the Log4J `level` to the corresponding sbt level.
*
* @param level A level, as represented by Log4J.
* @return The corresponding level in sbt's world.
* @param level
* A level, as represented by Log4J.
* @return
* The corresponding level in sbt's world.
*/
def toLevel(level: XLevel): Level.Value =
level match {
@ -319,8 +359,10 @@ object ConsoleAppender {
/**
* Converts the sbt `level` to the corresponding Log4J level.
*
* @param level A level, as represented by sbt.
* @return The corresponding level in Log4J's world.
* @param level
* A level, as represented by sbt.
* @return
* The corresponding level in Log4J's world.
*/
def toXLevel(level: Level.Value): XLevel =
level match {
@ -341,8 +383,7 @@ object ConsoleAppender {
// https://logging.apache.org/log4j/2.x/log4j-core/apidocs/index.html
/**
* A logger that logs to the console. On supported systems, the level labels are
* colored.
* A logger that logs to the console. On supported systems, the level labels are colored.
*
* This logger is not thread-safe.
*/
@ -357,12 +398,17 @@ class ConsoleAppender(
log4j.synchronized {
log4j.get match {
case null =>
val l = new Log4JConsoleAppender(name, properties, suppressedMessage, { event =>
val level = ConsoleAppender.toLevel(event.getLevel)
val message = event.getMessage
try appendMessage(level, message)
catch { case _: ClosedChannelException => }
})
val l = new Log4JConsoleAppender(
name,
properties,
suppressedMessage,
{ event =>
val level = ConsoleAppender.toLevel(event.getLevel)
val message = event.getMessage
try appendMessage(level, message)
catch { case _: ClosedChannelException => }
}
)
log4j.set(l)
l
case l => l
@ -404,11 +450,13 @@ trait Appender extends AutoCloseable {
/**
* Logs the stack trace of `t`, possibly shortening it.
*
* The `traceLevel` parameter configures how the stack trace will be shortened.
* See `StackTrace.trimmed`.
* The `traceLevel` parameter configures how the stack trace will be shortened. See
* `StackTrace.trimmed`.
*
* @param t The `Throwable` whose stack trace to log.
* @param traceLevel How to shorten the stack trace.
* @param t
* The `Throwable` whose stack trace to log.
* @param traceLevel
* How to shorten the stack trace.
*/
def trace(t: => Throwable, traceLevel: Int): Unit = {
if (traceLevel >= 0)
@ -423,8 +471,10 @@ trait Appender extends AutoCloseable {
/**
* Logs a `ControlEvent` to the log.
*
* @param event The kind of `ControlEvent`.
* @param message The message to log.
* @param event
* The kind of `ControlEvent`.
* @param message
* The message to log.
*/
def control(event: ControlEvent.Value, message: => String): Unit =
appendLog(labelColor(Level.Info), Level.Info.toString, BLUE, message)
@ -432,8 +482,10 @@ trait Appender extends AutoCloseable {
/**
* Appends the message `message` to the to the log at level `level`.
*
* @param level The importance level of the message.
* @param message The message to log.
* @param level
* The importance level of the message.
* @param message
* The message to log.
*/
def appendLog(level: Level.Value, message: => String): Unit = {
appendLog(labelColor(level), level.toString, NO_COLOR, message)
@ -442,8 +494,10 @@ trait Appender extends AutoCloseable {
/**
* Select the right color for the label given `level`.
*
* @param level The label to consider to select the color.
* @return The color to use to color the label.
* @param level
* The label to consider to select the color.
* @return
* The color to use to color the label.
*/
private def labelColor(level: Level.Value): String =
level match {
@ -457,11 +511,14 @@ trait Appender extends AutoCloseable {
* `labelColor` if formatting is enabled. The lines of the messages are colored with
* `messageColor` if formatting is enabled.
*
* @param labelColor The color to use to format the label.
* @param label The label to prefix each line with. The label is shown between square
* brackets.
* @param messageColor The color to use to format the message.
* @param message The message to write.
* @param labelColor
* The color to use to format the label.
* @param label
* The label to prefix each line with. The label is shown between square brackets.
* @param messageColor
* The color to use to format the message.
* @param message
* The message to write.
*/
private def appendLog(
labelColor: String,
@ -535,7 +592,9 @@ trait Appender extends AutoCloseable {
codec.showLines(te).toVector foreach { appendLog(Level.Error, _) }
}
if (traceLevel <= 2) {
suppressedMessage(new SuppressedTraceContext(traceLevel, ansiCodesSupported && useFormat)) foreach {
suppressedMessage(
new SuppressedTraceContext(traceLevel, ansiCodesSupported && useFormat)
) foreach {
appendLog(Level.Error, _)
}
}
@ -545,7 +604,7 @@ trait Appender extends AutoCloseable {
def appendEvent(oe: ObjectEvent[_]): Unit = {
val contentType = oe.contentType
contentType match {
case "sbt.internal.util.TraceEvent" => appendTraceEvent(oe.message.asInstanceOf[TraceEvent])
case "sbt.internal.util.TraceEvent" => appendTraceEvent(oe.message.asInstanceOf[TraceEvent])
case "sbt.internal.util.ProgressEvent" =>
case _ =>
LogExchange.stringCodec[AnyRef](contentType) match {
@ -597,7 +656,7 @@ private[sbt] class ConsoleAppenderFromLog4J(
delegate.append(new AbstractLogEvent {
override def getLevel(): XLevel = ConsoleAppender.toXLevel(level)
override def getMessage(): Message =
StringFormatterMessageFactory.INSTANCE.newMessage(message.toString, Array.empty)
StringFormatterMessageFactory.INSTANCE.newMessage(message.toString, Array.empty[AnyRef])
})
}
}

View File

@ -51,14 +51,14 @@ object ConsoleOut {
private[this] final val OverwriteLine = "\u001B[A\r\u001B[2K"
/**
* ConsoleOut instance that is backed by System.out. It overwrites the previously printed line
* if the function `f(lineToWrite, previousLine)` returns true.
* ConsoleOut instance that is backed by System.out. It overwrites the previously printed line if
* the function `f(lineToWrite, previousLine)` returns true.
*
* The ConsoleOut returned by this method assumes that the only newlines are from println calls
* and not in the String arguments.
*/
def systemOutOverwrite(f: (String, String) => Boolean): ConsoleOut = new ConsoleOut {
val lockObject = System.out
val lockObject: PrintStream = System.out
private[this] var last: Option[String] = None
private[this] var current = new java.lang.StringBuffer
def print(s: String): Unit = synchronized { current.append(s); () }
@ -91,7 +91,8 @@ object ConsoleOut {
override def toString: String = s"TerminalOut"
}
/** Same as terminalOut but it catches and ignores the ClosedChannelException
/**
* Same as terminalOut but it catches and ignores the ClosedChannelException
*/
def safeTerminalOut(terminal: Terminal): ConsoleOut = {
val out = terminalOut(terminal)
@ -100,7 +101,7 @@ object ConsoleOut {
override def print(s: String): Unit = catchException(out.print(s))
override def println(s: String): Unit = catchException(out.println(s))
override def println(): Unit = catchException(out.println())
override def flush(): Unit = catchException(out.flush)
override def flush(): Unit = catchException(out.flush())
override def toString: String = s"SafeTerminalOut($terminal)"
private def catchException(f: => Unit): Unit = {
try f

View File

@ -16,8 +16,8 @@ object EscHelpers {
final val ESC = '\u001B'
/**
* An escape terminator is a character in the range `@` (decimal value 64) to `~` (decimal value 126).
* It is the final character in an escape sequence.
* An escape terminator is a character in the range `@` (decimal value 64) to `~` (decimal value
* 126). It is the final character in an escape sequence.
*
* cf. http://en.wikipedia.org/wiki/ANSI_escape_code#CSI_codes
*/
@ -29,10 +29,11 @@ object EscHelpers {
*
* see: http://en.wikipedia.org/wiki/ANSI_escape_code
*
* The CSI (control sequence instruction) codes start with ESC + '['. This is for testing the second character.
* The CSI (control sequence instruction) codes start with ESC + '['. This is for testing the
* second character.
*
* There is an additional CSI (one character) that we could test for, but is not frequnetly used, and we don't
* check for it.
* There is an additional CSI (one character) that we could test for, but is not frequnetly used,
* and we don't check for it.
*
* cf. http://en.wikipedia.org/wiki/ANSI_escape_code#CSI_codes
*/
@ -55,13 +56,13 @@ object EscHelpers {
s.indexOf(ESC) >= 0
/**
* Returns the string `s` with escape sequences removed.
* An escape sequence starts with the ESC character (decimal value 27) and ends with an escape terminator.
* @see isEscapeTerminator
* Returns the string `s` with escape sequences removed. An escape sequence starts with the ESC
* character (decimal value 27) and ends with an escape terminator.
* @see
* isEscapeTerminator
*/
def removeEscapeSequences(s: String): String =
if (s.isEmpty || !hasEscapeSequence(s))
s
if (s.isEmpty || !hasEscapeSequence(s)) s
else {
val sb = new java.lang.StringBuilder
nextESC(s, 0, sb)
@ -130,12 +131,15 @@ object EscHelpers {
/**
* Strips ansi escape and color codes from an input string.
*
* @param bytes the input bytes
* @param stripAnsi toggles whether or not to remove general ansi escape codes
* @param stripColor toggles whether or not to remove ansi color codes
* @return a string with the escape and color codes removed depending on the input
* parameter along with the length of the output string (which may be smaller than
* the returned array)
* @param bytes
* the input bytes
* @param stripAnsi
* toggles whether or not to remove general ansi escape codes
* @param stripColor
* toggles whether or not to remove ansi color codes
* @return
* a string with the escape and color codes removed depending on the input parameter along with
* the length of the output string (which may be smaller than the returned array)
*/
def strip(bytes: Array[Byte], stripAnsi: Boolean, stripColor: Boolean): (Array[Byte], Int) = {
val res = Array.fill[Byte](bytes.length)(0)
@ -186,15 +190,17 @@ object EscHelpers {
}
/**
* Removes the ansi escape sequences from a string and makes a best attempt at
* calculating any ansi moves by hand. For example, if the string contains
* a backspace character followed by a character, the output string would
* replace the character preceding the backspaces with the character proceding it.
* This is in contrast to `strip` which just removes all ansi codes entirely.
* Removes the ansi escape sequences from a string and makes a best attempt at calculating any
* ansi moves by hand. For example, if the string contains a backspace character followed by a
* character, the output string would replace the character preceding the backspaces with the
* character proceding it. This is in contrast to `strip` which just removes all ansi codes
* entirely.
*
* @param s the input string
* @return a string containing the original characters of the input stream with
* the ansi escape codes removed.
* @param s
* the input string
* @return
* a string containing the original characters of the input stream with the ansi escape codes
* removed.
*/
def stripColorsAndMoves(s: String): String = {
val bytes = s.getBytes
@ -239,7 +245,10 @@ object EscHelpers {
new String(res, 0, limit)
}
/** Skips the escape sequence starting at `i-1`. `i` should be positioned at the character after the ESC that starts the sequence. */
/**
* Skips the escape sequence starting at `i-1`. `i` should be positioned at the character after
* the ESC that starts the sequence.
*/
private[this] def skipESC(s: String, i: Int): Int = {
if (i >= s.length) {
i

View File

@ -11,8 +11,9 @@ import sbt.util._
import scala.annotation.nowarn
/**
* A filter logger is used to delegate messages but not the logging level to another logger. This means
* that messages are logged at the higher of the two levels set by this logger and its delegate.
* A filter logger is used to delegate messages but not the logging level to another logger. This
* means that messages are logged at the higher of the two levels set by this logger and its
* delegate.
*/
class FilterLogger(delegate: AbstractLogger) extends BasicLogger {
@nowarn override lazy val ansiCodesSupported = delegate.ansiCodesSupported

View File

@ -13,11 +13,11 @@ import java.io.{ File, PrintWriter }
/**
* Provides the current global logging configuration.
*
* `full` is the current global logger. It should not be set directly because it is generated as needed from `backing.newLogger`.
* `console` is where all logging from all ConsoleLoggers should go.
* `backed` is the Logger that other loggers should feed into.
* `backing` tracks the files that persist the global logging.
* `newLogger` creates a new global logging configuration from a sink and backing configuration.
* `full` is the current global logger. It should not be set directly because it is generated as
* needed from `backing.newLogger`. `console` is where all logging from all ConsoleLoggers should
* go. `backed` is the Logger that other loggers should feed into. `backing` tracks the files that
* persist the global logging. `newLogger` creates a new global logging configuration from a sink
* and backing configuration.
*/
final case class GlobalLogging(
full: ManagedLogger,
@ -36,21 +36,24 @@ final case class GlobalLogging1(
)
/**
* Tracks the files that persist the global logging.
* `file` is the current backing file. `last` is the previous backing file, if there is one.
* `newBackingFile` creates a new temporary location for the next backing file.
* Tracks the files that persist the global logging. `file` is the current backing file. `last` is
* the previous backing file, if there is one. `newBackingFile` creates a new temporary location for
* the next backing file.
*/
final case class GlobalLogBacking(file: File, last: Option[File], newBackingFile: () => File) {
/** Shifts the current backing file to `last` and sets the current backing to `newFile`. */
def shift(newFile: File) = GlobalLogBacking(newFile, Some(file), newBackingFile)
/** Shifts the current backing file to `last` and sets the current backing to a new temporary file generated by `newBackingFile`. */
/**
* Shifts the current backing file to `last` and sets the current backing to a new temporary file
* generated by `newBackingFile`.
*/
def shiftNew() = shift(newBackingFile())
/**
* If there is a previous backing file in `last`, that becomes the current backing file and the previous backing is cleared.
* Otherwise, no changes are made.
* If there is a previous backing file in `last`, that becomes the current backing file and the
* previous backing is cleared. Otherwise, no changes are made.
*/
def unshift = GlobalLogBacking(last getOrElse file, None, newBackingFile)
@ -58,7 +61,7 @@ final case class GlobalLogBacking(file: File, last: Option[File], newBackingFile
object GlobalLogBacking {
def apply(newBackingFile: => File): GlobalLogBacking =
GlobalLogBacking(newBackingFile, None, newBackingFile _)
GlobalLogBacking(newBackingFile, None, () => newBackingFile)
}
object GlobalLogging {

View File

@ -81,7 +81,7 @@ private[sbt] object JLine3 {
val bytes = new Array[Byte](4)
var i = 0
var res = -2
do {
while (i < 4 && res == -2) {
inputStream.read() match {
case -1 => res = -1
case byte =>
@ -94,8 +94,7 @@ private[sbt] object JLine3 {
if (it.hasNext) res = it.next
} catch { case _: CharacterCodingException => }
}
} while (i < 4 && res == -2)
}
res
}
private[this] def wrapTerminal(term: Terminal): JTerminal = {
@ -210,7 +209,9 @@ private[sbt] object JLine3 {
term.getBooleanCapability(cap.toString)
def getAttributes(): Attributes = attributesFromMap(term.getAttributes)
def getSize(): Size = new Size(term.getWidth, term.getHeight)
def setAttributes(a: Attributes): Unit = {} // don't allow the jline line reader to change attributes
def setAttributes(
a: Attributes
): Unit = {} // don't allow the jline line reader to change attributes
def setSize(size: Size): Unit = term.setSize(size.getColumns, size.getRows)
override def enterRawMode(): Attributes = {

View File

@ -10,8 +10,8 @@ package sbt.internal.util
import sbt.util._
/**
* Provides a `java.io.Writer` interface to a `Logger`. Content is line-buffered and logged at `level`.
* A line is delimited by `nl`, which is by default the platform line separator.
* Provides a `java.io.Writer` interface to a `Logger`. Content is line-buffered and logged at
* `level`. A line is delimited by `nl`, which is by default the platform line separator.
*/
class LoggerWriter(
delegate: Logger,

View File

@ -11,6 +11,7 @@ import sbt.internal.util.codec.JsonProtocol._
import sbt.util._
import scala.reflect.runtime.universe.TypeTag
import sjsonnew.JsonFormat
import sbt.internal.util.appmacro.StringTypeTag
private[sbt] trait MiniLogger {
def log[T](level: Level.Value, message: ObjectEvent[T]): Unit
@ -45,7 +46,7 @@ class ManagedLogger(
if (terminal.fold(true)(_.isSuccessEnabled)) {
infoEvent[SuccessEvent](SuccessEvent(message))(
implicitly[JsonFormat[SuccessEvent]],
StringTypeTag.fast[SuccessEvent],
StringTypeTag[SuccessEvent],
)
}
}
@ -54,30 +55,14 @@ class ManagedLogger(
LogExchange.registerStringCodec[A]
}
@deprecated("Use macro-powered StringTypeTag.fast instead", "1.4.0")
final def debugEvent[A](event: => A, f: JsonFormat[A], t: TypeTag[A]): Unit =
debugEvent(event)(f, StringTypeTag.apply(t))
@deprecated("Use macro-powered StringTypeTag.fast instead", "1.4.0")
final def infoEvent[A](event: => A, f: JsonFormat[A], t: TypeTag[A]): Unit =
infoEvent(event)(f, StringTypeTag.apply(t))
@deprecated("Use macro-powered StringTypeTag.fast instead", "1.4.0")
final def warnEvent[A](event: => A, f: JsonFormat[A], t: TypeTag[A]): Unit =
warnEvent(event)(f, StringTypeTag.apply(t))
@deprecated("Use macro-powered StringTypeTag.fast instead", "1.4.0")
final def errorEvent[A](event: => A, f: JsonFormat[A], t: TypeTag[A]): Unit =
errorEvent(event)(f, StringTypeTag.apply(t))
final def debugEvent[A: JsonFormat: StringTypeTag](event: => A): Unit =
logEvent(Level.Debug, event)
final def infoEvent[A: JsonFormat: StringTypeTag](event: => A): Unit = logEvent(Level.Info, event)
final def warnEvent[A: JsonFormat: StringTypeTag](event: => A): Unit = logEvent(Level.Warn, event)
final def errorEvent[A: JsonFormat: StringTypeTag](event: => A): Unit =
logEvent(Level.Error, event)
@deprecated("Use macro-powered StringTypeTag.fast instead", "1.4.0")
def logEvent[A](level: Level.Value, event: => A, f: JsonFormat[A], t: TypeTag[A]): Unit =
logEvent(level, event)(f, StringTypeTag.apply(t))
def logEvent[A: JsonFormat](level: Level.Value, event: => A)(
implicit tag: StringTypeTag[A]
def logEvent[A: JsonFormat](level: Level.Value, event: => A)(implicit
tag: StringTypeTag[A]
): Unit = {
val v: A = event
// println("logEvent " + tag.key)

View File

@ -159,11 +159,10 @@ private[sbt] object ProgressState {
private val SERVER_IS_RUNNING_LENGTH = SERVER_IS_RUNNING.length + 3
/**
* Receives a new task report and replaces the old one. In the event that the new
* report has fewer lines than the previous report, padding lines are added on top
* so that the console log lines remain contiguous. When a console line is printed
* at the info or greater level, we can decrement the padding because the console
* line will have filled in the blank line.
* Receives a new task report and replaces the old one. In the event that the new report has fewer
* lines than the previous report, padding lines are added on top so that the console log lines
* remain contiguous. When a console line is printed at the info or greater level, we can
* decrement the padding because the console line will have filled in the blank line.
*/
private[sbt] def updateProgressState(
pe: ProgressEvent,

View File

@ -14,19 +14,17 @@ object StackTrace {
def isSbtClass(name: String) = name.startsWith("sbt.") || name.startsWith("xsbt.")
/**
* Return a printable representation of the stack trace associated
* with t. Information about t and its Throwable causes is included.
* The number of lines to be included for each Throwable is configured
* via d which should be greater than or equal to 0.
* Return a printable representation of the stack trace associated with t. Information about t and
* its Throwable causes is included. The number of lines to be included for each Throwable is
* configured via d which should be greater than or equal to 0.
*
* - If d is 0, then all elements are included up to (but not including)
* the first element that comes from sbt.
* - If d is greater than 0, then up to that many lines are included,
* where the line for the Throwable is counted plus one line for each stack element.
* Less lines will be included if there are not enough stack elements.
* - If d is 0, then all elements are included up to (but not including) the first element that
* comes from sbt.
* - If d is greater than 0, then up to that many lines are included, where the line for the
* Throwable is counted plus one line for each stack element. Less lines will be included if
* there are not enough stack elements.
*
* See also ConsoleAppender where d <= 2 is treated specially by
* printing a prepared statement.
* See also ConsoleAppender where d <= 2 is treated specially by printing a prepared statement.
*/
def trimmedLines(t: Throwable, d: Int): List[String] = {
require(d >= 0)
@ -35,8 +33,7 @@ object StackTrace {
def appendStackTrace(t: Throwable, first: Boolean): Unit = {
val include: StackTraceElement => Boolean =
if (d == 0)
element => !isSbtClass(element.getClassName)
if (d == 0) element => !isSbtClass(element.getClassName)
else {
var count = d - 1
(_ => { count -= 1; count >= 0 })
@ -69,16 +66,15 @@ object StackTrace {
}
/**
* Return a printable representation of the stack trace associated
* with t. Information about t and its Throwable causes is included.
* The number of lines to be included for each Throwable is configured
* via d which should be greater than or equal to 0.
* Return a printable representation of the stack trace associated with t. Information about t and
* its Throwable causes is included. The number of lines to be included for each Throwable is
* configured via d which should be greater than or equal to 0.
*
* - If d is 0, then all elements are included up to (but not including)
* the first element that comes from sbt.
* - If d is greater than 0, then up to that many lines are included,
* where the line for the Throwable is counted plus one line for each stack element.
* Less lines will be included if there are not enough stack elements.
* - If d is 0, then all elements are included up to (but not including) the first element that
* comes from sbt.
* - If d is greater than 0, then up to that many lines are included, where the line for the
* Throwable is counted plus one line for each stack element. Less lines will be included if
* there are not enough stack elements.
*/
def trimmed(t: Throwable, d: Int): String =
trimmedLines(t, d).mkString(IO.Newline)

View File

@ -1,57 +0,0 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.internal.util
import scala.language.experimental.macros
import scala.reflect.runtime.universe._
/** This is used to carry type information in JSON. */
final case class StringTypeTag[A](key: String) {
override def toString: String = key
}
object StringTypeTag {
/** Generates a StringTypeTag for any type at compile time. */
implicit def fast[A]: StringTypeTag[A] = macro appmacro.StringTypeTag.impl[A]
@deprecated("Prefer macro generated StringTypeTag", "1.4.0")
def apply[A: TypeTag]: StringTypeTag[A] =
synchronized {
def doApply: StringTypeTag[A] = {
val tag = implicitly[TypeTag[A]]
val tpe = tag.tpe
val k = typeToString(tpe)
// println(tpe.getClass.toString + " " + k)
StringTypeTag[A](k)
}
def retry(n: Int): StringTypeTag[A] =
try {
doApply
} catch {
case e: NullPointerException =>
if (n < 1) throw new RuntimeException("NPE in StringTypeTag", e)
else {
Thread.sleep(1)
retry(n - 1)
}
}
retry(3)
}
@deprecated("Prefer macro generated StringTypeTag", "1.4.0")
def typeToString(tpe: Type): String =
tpe match {
case TypeRef(_, sym, args) =>
if (args.nonEmpty) {
val typeCon = tpe.typeSymbol.fullName
val typeArgs = args map typeToString
s"""$typeCon[${typeArgs.mkString(",")}]"""
} else tpe.toString
case _ => tpe.toString
}
}

Some files were not shown because too many files have changed in this diff Show More