diff --git a/.gitattributes b/.gitattributes index a5d9c6403..f3dbe80d8 100644 --- a/.gitattributes +++ b/.gitattributes @@ -5,3 +5,7 @@ # to native line endings on checkout. *.scala text *.java text + +# Exclude contraband generated files from diff (by default - you can see it if you want) +**/contraband-scala/**/* -diff merge=ours +**/contraband-scala/**/* linguist-generated=true diff --git a/.scalafmt.conf b/.scalafmt.conf index e4ab36511..a36334e44 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -8,3 +8,11 @@ docstrings = JavaDoc # This also seems more idiomatic to include whitespace in import x.{ yyy } spaces.inImportCurlyBraces = true + +# This is more idiomatic Scala. +# http://docs.scala-lang.org/style/indentation.html#methods-with-numerous-arguments +align.openParenCallSite = false +align.openParenDefnSite = false + +# For better code clarity +danglingParentheses = true diff --git a/.travis.yml b/.travis.yml index 85db736d4..08a61721f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,6 +6,7 @@ cache: directories: - $HOME/.ivy2/cache - $HOME/.sbt/boot + - $HOME/.jabba language: scala @@ -15,16 +16,26 @@ jdk: matrix: fast_finish: true +matrix: + include: + - env: SBT_CMD="scripted java/*" + before_install: + - curl -sL https://raw.githubusercontent.com/shyiko/jabba/0.10.1/install.sh | bash && . ~/.jabba/jabba.sh + install: + - /home/travis/.jabba/bin/jabba install openjdk@1.10 + env: + global: + - secure: d3bu2KNwsVHwfhbGgO+gmRfDKBJhfICdCJFGWKf2w3Gv86AJZX9nuTYRxz0KtdvEHO5Xw8WTBZLPb2thSJqhw9OCm4J8TBAVqCP0ruUj4+aqBUFy4bVexQ6WKE6nWHs4JPzPk8c6uC1LG3hMuzlC8RGETXtL/n81Ef1u7NjyXjs= matrix: - - SBT_CMD=";mimaReportBinaryIssues ;scalafmt::test ;test:scalafmt::test ;sbt:scalafmt::test ;headerCheck ;test:headerCheck ;test:compile ;mainSettingsProj/test ;safeUnitTests ;otherUnitTests" + - SBT_CMD=";mimaReportBinaryIssues ;scalafmt::test ;test:scalafmt::test ;sbt:scalafmt::test ;headerCheck ;test:headerCheck ;whitesourceCheckPolicies ;test:compile ;mainSettingsProj/test ;safeUnitTests ;otherUnitTests" - SBT_CMD="scripted actions/*" - SBT_CMD="scripted apiinfo/* compiler-project/* ivy-deps-management/*" - SBT_CMD="scripted dependency-management/*1of4" - SBT_CMD="scripted dependency-management/*2of4" - SBT_CMD="scripted dependency-management/*3of4" - SBT_CMD="scripted dependency-management/*4of4" - - SBT_CMD="scripted java/* package/* reporter/* run/* project-load/*" + - SBT_CMD="scripted package/* reporter/* run/* project-load/*" - SBT_CMD="scripted project/*1of2" - SBT_CMD="scripted project/*2of2" - SBT_CMD="scripted source-dependencies/*1of3" @@ -46,5 +57,5 @@ script: - sbt -J-XX:ReservedCodeCacheSize=128m -J-Xmx800M -J-Xms800M -J-server "$SBT_CMD" before_cache: - - find $HOME/.ivy2 -name "ivydata-*.properties" -print -delete - - find $HOME/.sbt -name "*.lock" -print -delete + - find $HOME/.ivy2 -name "ivydata-*.properties" -delete + - find $HOME/.sbt -name "*.lock" -delete diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ce8062617..ed916ffd8 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,33 +1,14 @@ [StackOverflow]: http://stackoverflow.com/tags/sbt - [ask]: https://stackoverflow.com/questions/ask?tags=sbt [Setup]: http://www.scala-sbt.org/release/docs/Getting-Started/Setup [Issues]: https://github.com/sbt/sbt/issues - [sbt-dev]: https://groups.google.com/d/forum/sbt-dev [sbt-contrib]: https://gitter.im/sbt/sbt-contrib - [Lightbend]: https://www.lightbend.com/ - [subscriptions]: https://www.lightbend.com/platform/subscription [327]: https://github.com/sbt/sbt/issues/327 - [gitter]: https://gitter.im/sbt/sbt [documentation]: https://github.com/sbt/website -Support -======= +Contributing +============ -[Lightbend] sponsors sbt and encourages contributions from the active community. Enterprises can adopt it for mission critical systems with confidence because Lightbend stands behind sbt with commercial support and services. - -For community support please [ask] on StackOverflow with the tag "sbt". - -- State the problem or question clearly and provide enough context. Code examples and `build.sbt` are often useful when appropriately edited. -- There's also [Gitter sbt/sbt room][gitter], but Stackoverflow is recommended so others can benefit from the answers. - -For professional support, [Lightbend], the maintainer of Scala compiler and sbt, provides: - -- [Lightbend Subscriptions][subscriptions], which includes Expert Support -- Training -- Consulting - -How to contribute to sbt -======================== +(For support, see [SUPPORT](./SUPPORT.md)) There are lots of ways to contribute to sbt ecosystem depending on your interests and skill level. @@ -48,9 +29,13 @@ When you find a bug in sbt we want to hear about it. Your bug reports play an im Effective bug reports are more likely to be fixed. These guidelines explain how to write such reports and pull requests. +Please open a GitHub issue when you are 90% sure it's an actual bug. + +If you have an enhancement idea, or a general discussion, bring it up to [sbt-contrib]. + ### Notes about Documentation -Documentation fixes and contributions are as much welcome as to patching the core. Visit [the website project][documentation] to learn about how to contribute. +Documentation fixes and contributions are as much welcome as to patching the core. Visit [sbt/website][documentation] to learn about how to contribute. ### Preliminaries @@ -59,35 +44,29 @@ Documentation fixes and contributions are as much welcome as to patching the cor - Open one case for each problem. - Proceed to the next steps for details. -### Where to get help and/or file a bug report - -sbt project uses GitHub Issues as a publicly visible todo list. Please open a GitHub issue when you are 90% sure it's an actual bug. - -- If you need help with sbt, please [ask] on StackOverflow with the tag "sbt" and the name of the sbt plugin if any. -- If you have an enhancement idea, or a general discussion, bring it up to [sbt-contrib]. -- If you need a faster response time, consider one of the [Lightbend subscriptions][subscriptions]. - ### What to report The developers need three things from you: **steps**, **problems**, and **expectations**. -### Steps +The most important thing to remember about bug reporting is to clearly distinguish facts and opinions. -The most important thing to remember about bug reporting is to clearly distinguish facts and opinions. What we need first is **the exact steps to reproduce your problems on our computers**. This is called *reproduction steps*, which is often shortened to "repro steps" or "steps." Describe your method of running sbt. Provide `build.sbt` that caused the problem and the version of sbt or Scala that was used. Provide sample Scala code if it's to do with incremental compilation. If possible, minimize the problem to reduce non-essential factors. +#### Steps + +What we need first is **the exact steps to reproduce your problems on our computers**. This is called *reproduction steps*, which is often shortened to "repro steps" or "steps." Describe your method of running sbt. Provide `build.sbt` that caused the problem and the version of sbt or Scala that was used. Provide sample Scala code if it's to do with incremental compilation. If possible, minimize the problem to reduce non-essential factors. Repro steps are the most important part of a bug report. If we cannot reproduce the problem in one way or the other, the problem can't be fixed. Telling us the error messages is not enough. -### Problems +#### Problems Next, describe the problems, or what *you think* is the problem. It might be "obvious" to you that it's a problem, but it could actually be an intentional behavior for some backward compatibility etc. For compilation errors, include the stack trace. The more raw info the better. -### Expectations +#### Expectations Same as the problems. Describe what *you think* should've happened. -### Notes +#### Notes -Add an optional notes section to describe your analysis. +Add any optional notes section to describe your analysis. ### Subject @@ -121,7 +100,7 @@ See below for the branch to work against. ### Adding notes -All pull requests are required to include a "Notes" file which documents the change. This file should reside in the +Most pull requests should include a "Notes" file which documents the change. This file should reside in the directory: @@ -199,12 +178,96 @@ $ sbt > compile ``` +### Using Jenkins sbt-snapshots nighties + +There is a Jenkins instance for sbt that every night builds and publishes (if successful) a timestamped version +of sbt to http://jenkins.scala-sbt.org/sbt-snapshots and is available for 4-5 weeks. To use it do the following: + +1. Set the `sbt.version` in `project/build.properties` + +```bash +echo "sbt.version=1.2.0-bin-20180423T192044" > project/build.properties +``` + +2. Create an sbt repositories file (`./repositories`) that includes that Maven repository: + +```properties +[repositories] + local + local-preloaded-ivy: file:///${sbt.preloaded-${sbt.global.base-${user.home}/.sbt}/preloaded/}, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext] + local-preloaded: file:///${sbt.preloaded-${sbt.global.base-${user.home}/.sbt}/preloaded/} + maven-central + sbt-maven-releases: https://repo.scala-sbt.org/scalasbt/maven-releases/, bootOnly + sbt-maven-snapshots: https://repo.scala-sbt.org/scalasbt/maven-snapshots/, bootOnly + typesafe-ivy-releases: https://repo.typesafe.com/typesafe/ivy-releases/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly + sbt-ivy-snapshots: https://repo.scala-sbt.org/scalasbt/ivy-snapshots/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly + sbt-snapshots: https://jenkins.scala-sbt.org/sbt-snapshots +``` + +3. Start sbt with a stable launcher and the custom repositories file: + +```bash +$ sbt -sbt-jar ~/.sbt/launchers/1.1.4/sbt-launch.jar -Dsbt.repository.config=repositories +Getting org.scala-sbt sbt 1.2.0-bin-20180423T192044 (this may take some time)... +downloading https://jenkins.scala-sbt.org/sbt-snapshots/org/scala-sbt/sbt/1.2.0-bin-20180423T192044/sbt-1.2.0-bin-20180423T192044.jar ... + [SUCCESSFUL ] org.scala-sbt#sbt;1.2.0-bin-20180423T192044!sbt.jar (139ms) +... +[info] sbt server started at local:///Users/dnw/.sbt/1.0/server/936e0f52ed9baf6b6d83/sock +> show sbtVersion +[info] 1.2.0-bin-20180423T192044 +``` + +### Using Jenkins maven-snapshots nightlies + +As an alternative you can request a build that publishes to https://repo.scala-sbt.org/scalasbt/maven-snapshots +and stays there forever by: + +1. Logging into https://jenkins.scala-sbt.org/job/sbt-validator/ +2. Clicking "Build with Parameters" +3. Making sure `deploy_to_bintray` is enabled +4. Hitting "Build" + +Afterwhich start sbt with a stable launcher: `sbt -sbt-jar ~/.sbt/launchers/1.1.4/sbt-launch.jar` + ### Clearing out boot and local cache When you run a locally built sbt, the JAR artifacts will be now cached under `$HOME/.sbt/boot/scala-2.12.6/org.scala-sbt/sbt/1.$MINOR.$PATCH-SNAPSHOT` directory. To clear this out run: `reboot dev` command from sbt's session of your test application. One drawback of `-SNAPSHOT` version is that it's slow to resolve as it tries to hit all the resolvers. You can workaround that by using a version name like `1.$MINOR.$PATCH-LOCAL1`. A non-SNAPSHOT artifacts will now be cached under `$HOME/.ivy/cache/` directory, so you need to clear that out using [sbt-dirty-money](https://github.com/sbt/sbt-dirty-money)'s `cleanCache` task. +### Running sbt "from source" - `sbtOn` + +In addition to locally publishing a build of sbt, there is an alternative, experimental launcher within sbt/sbt +to be able to run sbt "from source", that is to compile sbt and run it from its resulting classfiles rather than +from published jar files. + +Such a launcher is available within sbt/sbt's build through a custom `sbtOn` command that takes as its first +argument the directory on which you want to run sbt, and the remaining arguments are passed _to_ that sbt +instance. For example: + +I have setup a minimal sbt build in the directory `/s/t`, to run sbt on that directory I call: + +```bash +> sbtOn /s/t +[info] Packaging /d/sbt/scripted/sbt/target/scala-2.12/scripted-sbt_2.12-1.2.0-SNAPSHOT.jar ... +[info] Done packaging. +[info] Running (fork) sbt.RunFromSourceMain /s/t +Listening for transport dt_socket at address: 5005 +[info] Loading settings from idea.sbt,global-plugins.sbt ... +[info] Loading global plugins from /Users/dnw/.dotfiles/.sbt/1.0/plugins +[info] Loading project definition from /s/t/project +[info] Set current project to t (in build file:/s/t/) +[info] sbt server started at local:///Users/dnw/.sbt/1.0/server/ce9baa494c7598e4d59b/sock +> show baseDirectory +[info] /s/t +> exit +[info] shutting down server +[success] Total time: 19 s, completed 25-Apr-2018 15:04:58 +``` + +Please note that this alternative launcher does _not_ have feature parity with sbt/launcher. (Meta) +contributions welcome! :-D + ### Diagnosing build failures Globally included plugins can interfere building `sbt`; if you are getting errors building sbt, try disabling all globally included plugins and try again. @@ -232,13 +295,17 @@ command. To run a single test, such as the test in sbt "scripted project/global-plugin" +Profiling sbt +------------- + +See [PROFILING](./PROFILING.md) + Other notes for maintainers --------------------------- ### Publishing VS Code Extensions - -https://code.visualstudio.com/docs/extensions/publish-extension +Reference https://code.visualstudio.com/docs/extensions/publish-extension ``` $ sbt @@ -249,3 +316,12 @@ cd vscode-sbt-scala/client $ vsce package $ vsce publish ``` + +## Signing the CLA + +Contributing to sbt requires you or your employer to sign the +[Lightbend Contributor License Agreement](https://www.lightbend.com/contribute/cla). + +To make it easier to respect our license agreements, we have added an sbt task +that takes care of adding the LICENSE headers to new files. Run `headerCreate` +and sbt will put a copyright notice into it. diff --git a/ISSUE_TEMPLATE.md b/ISSUE_TEMPLATE.md index 3fab7acbb..c5d873e6a 100644 --- a/ISSUE_TEMPLATE.md +++ b/ISSUE_TEMPLATE.md @@ -1,4 +1,4 @@ -(See the guidelines for contributing, linked above) +- [ ] I've read the [CONTRIBUTING](https://github.com/sbt/sbt/blob/1.x/CONTRIBUTING.md) guidelines ## steps diff --git a/PROFILING.md b/PROFILING.md new file mode 100644 index 000000000..90f29a1b5 --- /dev/null +++ b/PROFILING.md @@ -0,0 +1,153 @@ +Profiling sbt +------------- + +There are several ways to profile sbt. The new hotness in profiling is FlameGraph. +You first collect stack trace samples, and then it is processed into svg graph. +See: + +- [Using FlameGraphs To Illuminate The JVM by Nitsan Wakart](https://www.youtube.com/watch?v=ugRrFdda_JQ) +- [USENIX ATC '17: Visualizing Performance with Flame Graphs](https://www.youtube.com/watch?v=D53T1Ejig1Q) + +### jvm-profiling-tools/async-profiler + +The first one I recommend is async-profiler. This is available for macOS and Linux, +and works fairly well. + +1. Download the installer from https://github.com/jvm-profiling-tools/async-profiler/releases/tag/v1.2 +2. Make symbolic link to `build/` and `profiler.sh` to `$HOME/bin`, assuming you have PATH to `$HOME/bin`: + `ln -s ~/Applications/async-profiler/profiler.sh $HOME/bin/profiler.sh` + `ln -s ~/Applications/async-profiler/build $HOME/bin/build` + +Next, close all Java appliations and anything that may affect the profiling, and run sbt in one terminal: + +``` +$ sbt exit +``` + +In another terminal, run: + +``` +$ jps +92746 sbt-launch.jar +92780 Jps +``` + +This tells you the process ID of sbt. In this case, it's 92746. While it's running, run + +``` +$ profiler.sh -d 60 +Started [cpu] profiling +--- Execution profile --- +Total samples: 31602 +Non-Java: 3239 (10.25%) +GC active: 46 (0.15%) +Unknown (native): 14667 (46.41%) +Not walkable (native): 3 (0.01%) +Unknown (Java): 433 (1.37%) +Not walkable (Java): 8 (0.03%) +Thread exit: 1 (0.00%) +Deopt: 9 (0.03%) + +Frame buffer usage: 55.658% + +Total: 1932000000 (6.11%) samples: 1932 + [ 0] java.lang.ClassLoader$NativeLibrary.load + [ 1] java.lang.ClassLoader.loadLibrary0 + [ 2] java.lang.ClassLoader.loadLibrary + [ 3] java.lang.Runtime.loadLibrary0 + [ 4] java.lang.System.loadLibrary +.... +``` + +This should show a bunch of stacktraces that are useful. +To visualize this as a flamegraph, run: + +``` +$ profiler.sh -d 60 -f /tmp/flamegraph.svg +``` + +This should produce `/tmp/flamegraph.svg` at the end. + +![flamegraph](project/flamegraph_svg.png) + +See https://gist.github.com/eed3si9n/82d43acc95a002876d357bd8ad5f40d5 + +### running sbt with standby + +One of the tricky things you come across while profiling is figuring out the process ID, +while wnating to profile the beginning of the application. + +For this purpose, we've added `sbt.launcher.standby` JVM flag. +In the next version of sbt, you should be able to run: + +``` +$ sbt -J-Dsbt.launcher.standby=20s exit +``` + +This will count down for 20s before doing anything else. + +### jvm-profiling-tools/perf-map-agent + +If you want to try the mixed flamegraph, you can try perf-map-agent. +This uses `dtrace` on macOS and `perf` on Linux. + +You first have to compile https://github.com/jvm-profiling-tools/perf-map-agent. +For macOS, here to how to export `JAVA_HOME` before running `cmake .`: + +``` +$ export JAVA_HOME=$(/usr/libexec/java_home) +$ cmake . +-- The C compiler identification is AppleClang 9.0.0.9000039 +-- The CXX compiler identification is AppleClang 9.0.0.9000039 +... +$ make +``` + +In addition, you have to git clone https://github.com/brendangregg/FlameGraph + +In a fresh termimal, run sbt with `-XX:+PreserveFramePointer` flag: + +``` +$ sbt -J-Dsbt.launcher.standby=20s -J-XX:+PreserveFramePointer exit +``` + +In the terminal that you will run the perf-map: + +``` +$ cd quicktest/ +$ export JAVA_HOME=$(/usr/libexec/java_home) +$ export FLAMEGRAPH_DIR=$HOME/work/FlameGraph +$ jps +94592 Jps +94549 sbt-launch.jar +$ $HOME/work/perf-map-agent/bin/dtrace-java-flames 94549 +dtrace: system integrity protection is on, some features will not be available + +dtrace: description 'profile-99 ' matched 2 probes +Flame graph SVG written to DTRACE_FLAME_OUTPUT='/Users/xxx/work/quicktest/flamegraph-94549.svg'. +``` + +This would produce better flamegraph in theory, but the output looks too messy for `sbt exit` case. +See https://gist.github.com/eed3si9n/b5856ff3d987655513380d1a551aa0df +This might be because it assumes that the operations are already JITed. + +### ktoso/sbt-jmh + +https://github.com/ktoso/sbt-jmh + +Due to JIT warmup etc, benchmarking is difficult. JMH runs the same tests multiple times to +remove these effects and comes closer to measuring the performance of your code. + +There's also an integration with jvm-profiling-tools/async-profiler, apparently. + +### VisualVM + +I'd also mention traditional JVM profiling tool. Since VisualVM is opensource, +I'll mention this one: https://visualvm.github.io/ + +1. First VisualVM. +2. Start sbt from a terminal. +3. You should see `xsbt.boot.Boot` under Local. +4. Open it, and select either sampler or profiler, and hit CPU button at the point when you want to start. + +If you are familiar with YourKit, it also works similarly. diff --git a/PULL_REQUEST_TEMPLATE.md b/PULL_REQUEST_TEMPLATE.md index 0392fc0ee..47947ca33 100644 --- a/PULL_REQUEST_TEMPLATE.md +++ b/PULL_REQUEST_TEMPLATE.md @@ -1 +1 @@ -(See the guidelines for contributing, linked above) +- [ ] I've read the [CONTRIBUTING](https://github.com/sbt/sbt/blob/1.x/CONTRIBUTING.md) guidelines diff --git a/README.md b/README.md index dab1a3eee..1d1a8dc08 100644 --- a/README.md +++ b/README.md @@ -21,10 +21,10 @@ sbt is a build tool for Scala, Java, and more. For general documentation, see http://www.scala-sbt.org/. -sbt 1.0.x +sbt 1.x --------- -This is the 1.0.x series of sbt. The source code of sbt is split across +This is the 1.x series of sbt. The source code of sbt is split across several Github repositories, including this one. - [sbt/io][sbt/io] hosts `sbt.io` module. diff --git a/SUPPORT.md b/SUPPORT.md new file mode 100644 index 000000000..c3c7d0c44 --- /dev/null +++ b/SUPPORT.md @@ -0,0 +1,20 @@ + [ask]: https://stackoverflow.com/questions/ask?tags=sbt + [Lightbend]: https://www.lightbend.com/ + [subscriptions]: https://www.lightbend.com/platform/subscription + [gitter]: https://gitter.im/sbt/sbt + +Support +======= + +[Lightbend] sponsors sbt and encourages contributions from the active community. Enterprises can adopt it for mission critical systems with confidence because Lightbend stands behind sbt with commercial support and services. + +For community support please [ask] on StackOverflow with the tag "sbt" (and the name of the sbt plugin(s) if any). + +- State the problem or question clearly and provide enough context. Code examples and `build.sbt` are often useful when appropriately edited. +- There's also [Gitter sbt/sbt room][gitter], but Stackoverflow is recommended so others can benefit from the answers. + +For professional support, for instance if you need faster response times, [Lightbend], the maintainer of Scala compiler and sbt, provides: + +- [Lightbend Subscriptions][subscriptions], which includes Expert Support +- Training +- Consulting diff --git a/build.sbt b/build.sbt index 33480b08c..7ee4bef71 100644 --- a/build.sbt +++ b/build.sbt @@ -9,7 +9,7 @@ def buildLevelSettings: Seq[Setting[_]] = inThisBuild( Seq( organization := "org.scala-sbt", - version := "1.1.5-SNAPSHOT", + version := "1.2.0-SNAPSHOT", description := "sbt is an interactive build tool", bintrayOrganization := Some("sbt"), bintrayRepository := { @@ -24,10 +24,12 @@ def buildLevelSettings: Seq[Setting[_]] = Developer("eed3si9n", "Eugene Yokota", "@eed3si9n", url("https://github.com/eed3si9n")), Developer("jsuereth", "Josh Suereth", "@jsuereth", url("https://github.com/jsuereth")), Developer("dwijnand", "Dale Wijnand", "@dwijnand", url("https://github.com/dwijnand")), - Developer("gkossakowski", - "Grzegorz Kossakowski", - "@gkossakowski", - url("https://github.com/gkossakowski")), + Developer( + "gkossakowski", + "Grzegorz Kossakowski", + "@gkossakowski", + url("https://github.com/gkossakowski") + ), Developer("Duhemm", "Martin Duhem", "@Duhemm", url("https://github.com/Duhemm")) ), homepage := Some(url("https://github.com/sbt/sbt")), @@ -35,35 +37,34 @@ def buildLevelSettings: Seq[Setting[_]] = resolvers += Resolver.mavenLocal, scalafmtOnCompile := true, scalafmtOnCompile in Sbt := false, - scalafmtVersion := "1.3.0", + scalafmtVersion := "1.4.0", )) -def commonSettings: Seq[Setting[_]] = - Seq[SettingsDefinition]( - headerLicense := Some(HeaderLicense.Custom( - """|sbt - |Copyright 2011 - 2017, Lightbend, Inc. - |Copyright 2008 - 2010, Mark Harrah - |Licensed under BSD-3-Clause license (see LICENSE) - |""".stripMargin - )), - scalaVersion := baseScalaVersion, - componentID := None, - resolvers += Resolver.typesafeIvyRepo("releases"), - resolvers += Resolver.sonatypeRepo("snapshots"), - resolvers += "bintray-sbt-maven-releases" at "https://dl.bintray.com/sbt/maven-releases/", - addCompilerPlugin("org.spire-math" % "kind-projector" % "0.9.4" cross CrossVersion.binary), - concurrentRestrictions in Global += Util.testExclusiveRestriction, - testOptions in Test += Tests.Argument(TestFrameworks.ScalaCheck, "-w", "1"), - testOptions in Test += Tests.Argument(TestFrameworks.ScalaCheck, "-verbosity", "2"), - javacOptions in compile ++= Seq("-Xlint", "-Xlint:-serial"), - crossScalaVersions := Seq(baseScalaVersion), - bintrayPackage := (bintrayPackage in ThisBuild).value, - bintrayRepository := (bintrayRepository in ThisBuild).value, - publishArtifact in Test := false, - fork in compile := true, - fork in run := true - ) flatMap (_.settings) +def commonSettings: Seq[Setting[_]] = Def.settings( + headerLicense := Some(HeaderLicense.Custom( + """|sbt + |Copyright 2011 - 2017, Lightbend, Inc. + |Copyright 2008 - 2010, Mark Harrah + |Licensed under BSD-3-Clause license (see LICENSE) + |""".stripMargin + )), + scalaVersion := baseScalaVersion, + componentID := None, + resolvers += Resolver.typesafeIvyRepo("releases"), + resolvers += Resolver.sonatypeRepo("snapshots"), + resolvers += "bintray-sbt-maven-releases" at "https://dl.bintray.com/sbt/maven-releases/", + addCompilerPlugin("org.spire-math" % "kind-projector" % "0.9.4" cross CrossVersion.binary), + concurrentRestrictions in Global += Util.testExclusiveRestriction, + testOptions in Test += Tests.Argument(TestFrameworks.ScalaCheck, "-w", "1"), + testOptions in Test += Tests.Argument(TestFrameworks.ScalaCheck, "-verbosity", "2"), + javacOptions in compile ++= Seq("-Xlint", "-Xlint:-serial"), + crossScalaVersions := Seq(baseScalaVersion), + bintrayPackage := (bintrayPackage in ThisBuild).value, + bintrayRepository := (bintrayRepository in ThisBuild).value, + publishArtifact in Test := false, + fork in compile := true, + fork in run := true +) def minimalSettings: Seq[Setting[_]] = commonSettings ++ customCommands ++ @@ -83,7 +84,14 @@ val mimaSettings = Def settings ( ).map { v => organization.value % moduleName.value % v cross (if (crossPaths.value) CrossVersion.binary else CrossVersion.disabled) }.toSet - } + }, + mimaBinaryIssueFilters ++= Seq( + // Changes in the internal pacakge + exclude[DirectMissingMethodProblem]("sbt.internal.*"), + exclude[FinalClassProblem]("sbt.internal.*"), + exclude[FinalMethodProblem]("sbt.internal.*"), + exclude[IncompatibleResultTypeProblem]("sbt.internal.*"), + ), ) lazy val sbtRoot: Project = (project in file(".")) @@ -163,6 +171,11 @@ val collectionProj = (project in file("internal") / "util-collection") exclude[MissingClassProblem]("sbt.internal.util.Fn1"), exclude[DirectMissingMethodProblem]("sbt.internal.util.TypeFunctions.toFn1"), exclude[DirectMissingMethodProblem]("sbt.internal.util.Types.toFn1"), + + // Instead of defining foldr in KList & overriding in KCons, + // it's now abstract in KList and defined in both KCons & KNil. + exclude[FinalMethodProblem]("sbt.internal.util.KNil.foldr"), + exclude[DirectAbstractMethodProblem]("sbt.internal.util.KList.foldr"), ), ) .configure(addSbtUtilPosition) @@ -175,6 +188,8 @@ val completeProj = (project in file("internal") / "util-complete") name := "Completion", libraryDependencies += jline, mimaSettings, + mimaBinaryIssueFilters ++= Seq( + ), ) .configure(addSbtIO, addSbtUtilControl) @@ -204,6 +219,10 @@ lazy val testingProj = (project in file("testing")) contrabandFormatsForType in generateContrabands in Compile := ContrabandConfig.getFormats, mimaSettings, mimaBinaryIssueFilters ++= Seq( + // private[sbt] + exclude[IncompatibleMethTypeProblem]("sbt.TestStatus.write"), + exclude[IncompatibleResultTypeProblem]("sbt.TestStatus.read"), + // copy method was never meant to be public exclude[DirectMissingMethodProblem]("sbt.protocol.testing.EndTestGroupErrorEvent.copy"), exclude[DirectMissingMethodProblem]("sbt.protocol.testing.EndTestGroupErrorEvent.copy$default$*"), @@ -285,22 +304,45 @@ lazy val runProj = (project in file("run")) ) .configure(addSbtIO, addSbtUtilLogging, addSbtCompilerClasspath) +val sbtProjDepsCompileScopeFilter = + ScopeFilter(inDependencies(LocalProject("sbtProj"), includeRoot = false), inConfigurations(Compile)) + lazy val scriptedSbtProj = (project in scriptedPath / "sbt") .dependsOn(commandProj) .settings( baseSettings, name := "Scripted sbt", libraryDependencies ++= Seq(launcherInterface % "provided"), + resourceGenerators in Compile += Def task { + val mainClassDir = (classDirectory in Compile in LocalProject("sbtProj")).value + val testClassDir = (classDirectory in Test in LocalProject("sbtProj")).value + val classDirs = (classDirectory all sbtProjDepsCompileScopeFilter).value + val extDepsCp = (externalDependencyClasspath in Compile in LocalProject("sbtProj")).value + + val cpStrings = (mainClassDir +: testClassDir +: classDirs) ++ extDepsCp.files map (_.toString) + + val file = (resourceManaged in Compile).value / "RunFromSource.classpath" + IO.writeLines(file, cpStrings) + List(file) + }, mimaSettings, + mimaBinaryIssueFilters ++= Seq( + // sbt.test package is renamed to sbt.scriptedtest. + exclude[MissingClassProblem]("sbt.test.*"), + ), ) .configure(addSbtIO, addSbtUtilLogging, addSbtCompilerInterface, addSbtUtilScripted, addSbtLmCore) lazy val scriptedPluginProj = (project in scriptedPath / "plugin") - .dependsOn(sbtProj) + .dependsOn(mainProj) .settings( baseSettings, name := "Scripted Plugin", mimaSettings, + mimaBinaryIssueFilters ++= Seq( + // scripted plugin has moved into sbt mothership. + exclude[MissingClassProblem]("sbt.ScriptedPlugin*") + ), ) .configure(addSbtCompilerClasspath) @@ -313,9 +355,14 @@ lazy val actionsProj = (project in file("main-actions")) libraryDependencies += sjsonNewScalaJson.value, mimaSettings, mimaBinaryIssueFilters ++= Seq( + // Removed unused private[sbt] nested class + exclude[MissingClassProblem]("sbt.Doc$Scaladoc"), + // Removed no longer used private[sbt] method + exclude[DirectMissingMethodProblem]("sbt.Doc.generate"), + exclude[DirectMissingMethodProblem]("sbt.compiler.Eval.filesModifiedBytes"), exclude[DirectMissingMethodProblem]("sbt.compiler.Eval.fileModifiedBytes"), - ) + ), ) .configure( addSbtIO, @@ -335,6 +382,8 @@ lazy val protocolProj = (project in file("protocol")) .dependsOn(collectionProj) .settings( testedBaseSettings, + scalacOptions -= "-Ywarn-unused", + scalacOptions += "-Xlint:-unused", name := "Protocol", libraryDependencies ++= Seq(sjsonNewScalaJson.value, ipcSocket), managedSourceDirectories in Compile += @@ -384,6 +433,11 @@ lazy val commandProj = (project in file("main-command")) contrabandFormatsForType in generateContrabands in Compile := ContrabandConfig.getFormats, mimaSettings, mimaBinaryIssueFilters ++= Vector( + // dropped private[sbt] method + exclude[DirectMissingMethodProblem]("sbt.BasicCommands.compatCommands"), + // dropped mainly internal command strings holder + exclude[MissingClassProblem]("sbt.BasicCommandStrings$Compat$"), + exclude[DirectMissingMethodProblem]("sbt.BasicCommands.rebootOptionParser"), // Changed the signature of Server method. nacho cheese. exclude[DirectMissingMethodProblem]("sbt.internal.server.Server.*"), @@ -397,6 +451,9 @@ lazy val commandProj = (project in file("main-command")) exclude[MissingClassProblem]("sbt.internal.NG*"), exclude[MissingClassProblem]("sbt.internal.ReferenceCountedFileDescriptor"), + // made private[sbt] method private[this] + exclude[DirectMissingMethodProblem]("sbt.State.handleException"), + // copy method was never meant to be public exclude[DirectMissingMethodProblem]("sbt.CommandSource.copy"), exclude[DirectMissingMethodProblem]("sbt.CommandSource.copy$default$*"), @@ -421,7 +478,7 @@ lazy val commandProj = (project in file("main-command")) lazy val coreMacrosProj = (project in file("core-macros")) .dependsOn(collectionProj) .settings( - commonSettings, + baseSettings, name := "Core Macros", libraryDependencies += "org.scala-lang" % "scala-compiler" % scalaVersion.value, mimaSettings, @@ -445,6 +502,10 @@ lazy val mainSettingsProj = (project in file("main-settings")) mimaSettings, mimaBinaryIssueFilters ++= Seq( exclude[DirectMissingMethodProblem]("sbt.Scope.display012StyleMasked"), + + // added a method to a sealed trait + exclude[InheritedNewAbstractMethodProblem]("sbt.Scoped.canEqual"), + exclude[InheritedNewAbstractMethodProblem]("sbt.ScopedTaskable.canEqual"), ), ) .configure( @@ -460,7 +521,7 @@ lazy val mainSettingsProj = (project in file("main-settings")) // The main integration project for sbt. It brings all of the projects together, configures them, and provides for overriding conventions. lazy val mainProj = (project in file("main")) .enablePlugins(ContrabandPlugin) - .dependsOn(logicProj, actionsProj, mainSettingsProj, runProj, commandProj, collectionProj) + .dependsOn(logicProj, actionsProj, mainSettingsProj, runProj, commandProj, collectionProj, scriptedSbtProj) .settings( testedBaseSettings, name := "Main", @@ -470,17 +531,14 @@ lazy val mainProj = (project in file("main")) sourceManaged in (Compile, generateContrabands) := baseDirectory.value / "src" / "main" / "contraband-scala", mimaSettings, mimaBinaryIssueFilters ++= Vector( - // Changed the signature of NetworkChannel ctor. internal. - exclude[DirectMissingMethodProblem]("sbt.internal.server.NetworkChannel.*"), - // ctor for ConfigIndex. internal. - exclude[DirectMissingMethodProblem]("sbt.internal.ConfigIndex.*"), // New and changed methods on KeyIndex. internal. exclude[ReversedMissingMethodProblem]("sbt.internal.KeyIndex.*"), - exclude[DirectMissingMethodProblem]("sbt.internal.KeyIndex.*"), - // Removed unused val. internal. - exclude[DirectMissingMethodProblem]("sbt.internal.RelayAppender.jsonFormat"), - // Removed unused def. internal. - exclude[DirectMissingMethodProblem]("sbt.internal.Load.isProjectThis"), + + // Changed signature or removed private[sbt] methods + exclude[DirectMissingMethodProblem]("sbt.Classpaths.unmanagedLibs0"), + exclude[DirectMissingMethodProblem]("sbt.Defaults.allTestGroupsTask"), + exclude[DirectMissingMethodProblem]("sbt.Plugins.topologicalSort"), + exclude[IncompatibleMethTypeProblem]("sbt.Defaults.allTestGroupsTask"), ) ) .configure( @@ -508,8 +566,10 @@ lazy val sbtProj = (project in file("sbt")) mimaBinaryIssueFilters ++= sbtIgnoredProblems, BuildInfoPlugin.buildInfoDefaultSettings, addBuildInfoToConfig(Test), + BuildInfoPlugin.buildInfoDefaultSettings, buildInfoObject in Test := "TestBuildInfo", buildInfoKeys in Test := Seq[BuildInfoKey]( + version, // WORKAROUND https://github.com/sbt/sbt-buildinfo/issues/117 BuildInfoKey.map((fullClasspath in Compile).taskValue) { case (ident, cp) => ident -> cp.files }, classDirectory in Compile, @@ -583,35 +643,29 @@ lazy val vscodePlugin = (project in file("vscode-sbt-scala")) ) def scriptedTask: Def.Initialize[InputTask[Unit]] = Def.inputTask { - val result = scriptedSource(dir => (s: State) => Scripted.scriptedParser(dir)).parsed // publishLocalBinAll.value // TODO: Restore scripted needing only binary jars. publishAll.value - // These two projects need to be visible in a repo even if the default - // local repository is hidden, so we publish them to an alternate location and add - // that alternate repo to the running scripted test (in Scripted.scriptedpreScripted). - // (altLocalPublish in interfaceProj).value - // (altLocalPublish in compileInterfaceProj).value + (sbtProj / Test / compile).value // make sure sbt.RunFromSourceMain is compiled Scripted.doScripted( (sbtLaunchJar in bundledLauncherProj).value, (fullClasspath in scriptedSbtProj in Test).value, (scalaInstance in scriptedSbtProj).value, scriptedSource.value, scriptedBufferLog.value, - result, + Def.setting(Scripted.scriptedParser(scriptedSource.value)).parsed, scriptedPrescripted.value, scriptedLaunchOpts.value ) } def scriptedUnpublishedTask: Def.Initialize[InputTask[Unit]] = Def.inputTask { - val result = scriptedSource(dir => (s: State) => Scripted.scriptedParser(dir)).parsed Scripted.doScripted( (sbtLaunchJar in bundledLauncherProj).value, (fullClasspath in scriptedSbtProj in Test).value, (scalaInstance in scriptedSbtProj).value, scriptedSource.value, scriptedBufferLog.value, - result, + Def.setting(Scripted.scriptedParser(scriptedSource.value)).parsed, scriptedPrescripted.value, scriptedLaunchOpts.value ) @@ -648,14 +702,12 @@ def otherRootSettings = scripted := scriptedTask.evaluated, scriptedUnpublished := scriptedUnpublishedTask.evaluated, scriptedSource := (sourceDirectory in sbtProj).value / "sbt-test", - // scriptedPrescripted := { addSbtAlternateResolver _ }, scriptedLaunchOpts := List("-Xmx1500M", "-Xms512M", "-server"), publishAll := { val _ = (publishLocal).all(ScopeFilter(inAnyProject)).value }, publishLocalBinAll := { val _ = (publishLocalBin).all(ScopeFilter(inAnyProject)).value }, aggregate in bintrayRelease := false ) ++ inConfig(Scripted.RepoOverrideTest)( Seq( - scriptedPrescripted := (_ => ()), scriptedLaunchOpts := List( "-Xmx1500M", "-Xms512M", @@ -668,23 +720,6 @@ def otherRootSettings = scriptedSource := (sourceDirectory in sbtProj).value / "repo-override-test" )) -// def addSbtAlternateResolver(scriptedRoot: File) = { -// val resolver = scriptedRoot / "project" / "AddResolverPlugin.scala" -// if (!resolver.exists) { -// IO.write(resolver, s"""import sbt._ -// |import Keys._ -// | -// |object AddResolverPlugin extends AutoPlugin { -// | override def requires = sbt.plugins.JvmPlugin -// | override def trigger = allRequirements -// | -// | override lazy val projectSettings = Seq(resolvers += alternativeLocalResolver) -// | lazy val alternativeLocalResolver = Resolver.file("$altLocalRepoName", file("$altLocalRepoPath"))(Resolver.ivyStylePatterns) -// |} -// |""".stripMargin) -// } -// } - lazy val docProjects: ScopeFilter = ScopeFilter( inAnyProject -- inProjects(sbtRoot, sbtProj, scriptedSbtProj, scriptedPluginProj), inConfigurations(Compile) @@ -780,3 +815,12 @@ def customCommands: Seq[Setting[_]] = Seq( state } ) + +inThisBuild(Seq( + whitesourceProduct := "Lightbend Reactive Platform", + whitesourceAggregateProjectName := "sbt-master", + whitesourceAggregateProjectToken := "e7a1e55518c0489a98e9c7430c8b2ccd53d9f97c12ed46148b592ebe4c8bf128", + whitesourceIgnoredScopes ++= Seq("plugin", "scalafmt", "sxr"), + whitesourceFailOnError := sys.env.contains("WHITESOURCE_PASSWORD"), // fail if pwd is present + whitesourceForceCheckAllDependencies := true, +)) diff --git a/core-macros/src/main/scala/sbt/internal/util/appmacro/ContextUtil.scala b/core-macros/src/main/scala/sbt/internal/util/appmacro/ContextUtil.scala index b70461e53..aeea0683e 100644 --- a/core-macros/src/main/scala/sbt/internal/util/appmacro/ContextUtil.scala +++ b/core-macros/src/main/scala/sbt/internal/util/appmacro/ContextUtil.scala @@ -29,13 +29,14 @@ object ContextUtil { * Given `myImplicitConversion(someValue).extensionMethod`, where `extensionMethod` is a macro that uses this * method, the result of this method is `f()`. */ - def selectMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)( - f: (c.Expr[Any], c.Position) => c.Expr[T]): c.Expr[T] = { + def selectMacroImpl[T: c.WeakTypeTag]( + c: blackbox.Context + )(f: (c.Expr[Any], c.Position) => c.Expr[T]): c.Expr[T] = { import c.universe._ c.macroApplication match { - case s @ Select(Apply(_, t :: Nil), tp) => f(c.Expr[Any](t), s.pos) - case a @ Apply(_, t :: Nil) => f(c.Expr[Any](t), a.pos) - case x => unexpectedTree(x) + case s @ Select(Apply(_, t :: Nil), _) => f(c.Expr[Any](t), s.pos) + case a @ Apply(_, t :: Nil) => f(c.Expr[Any](t), a.pos) + case x => unexpectedTree(x) } } @@ -211,12 +212,14 @@ final class ContextUtil[C <: blackbox.Context](val ctx: C) { def changeOwner(tree: Tree, prev: Symbol, next: Symbol): Unit = new ChangeOwnerAndModuleClassTraverser( prev.asInstanceOf[global.Symbol], - next.asInstanceOf[global.Symbol]).traverse(tree.asInstanceOf[global.Tree]) + next.asInstanceOf[global.Symbol] + ).traverse(tree.asInstanceOf[global.Tree]) // Workaround copied from scala/async:can be removed once https://github.com/scala/scala/pull/3179 is merged. - private[this] class ChangeOwnerAndModuleClassTraverser(oldowner: global.Symbol, - newowner: global.Symbol) - extends global.ChangeOwnerTraverser(oldowner, newowner) { + private[this] class ChangeOwnerAndModuleClassTraverser( + oldowner: global.Symbol, + newowner: global.Symbol + ) extends global.ChangeOwnerTraverser(oldowner, newowner) { override def traverse(tree: global.Tree): Unit = { tree match { case _: global.DefTree => change(tree.symbol.moduleClass) @@ -248,7 +251,8 @@ final class ContextUtil[C <: blackbox.Context](val ctx: C) { * the type constructor `[x] List[x]`. */ def extractTC(tcp: AnyRef with Singleton, name: String)( - implicit it: ctx.TypeTag[tcp.type]): ctx.Type = { + implicit it: ctx.TypeTag[tcp.type] + ): ctx.Type = { val itTpe = it.tpe.asInstanceOf[global.Type] val m = itTpe.nonPrivateMember(global.newTypeName(name)) val tc = itTpe.memberInfo(m).asInstanceOf[ctx.universe.Type] @@ -262,8 +266,10 @@ final class ContextUtil[C <: blackbox.Context](val ctx: C) { * Typically, `f` is a `Select` or `Ident`. * The wrapper is replaced with the result of `subWrapper(, , )` */ - def transformWrappers(t: Tree, - subWrapper: (String, Type, Tree, Tree) => Converted[ctx.type]): Tree = { + def transformWrappers( + t: Tree, + subWrapper: (String, Type, Tree, Tree) => Converted[ctx.type] + ): Tree = { // the main tree transformer that replaces calls to InputWrapper.wrap(x) with // plain Idents that reference the actual input value object appTransformer extends Transformer { diff --git a/core-macros/src/main/scala/sbt/internal/util/appmacro/Convert.scala b/core-macros/src/main/scala/sbt/internal/util/appmacro/Convert.scala index 2aae74f3b..36529ec6c 100644 --- a/core-macros/src/main/scala/sbt/internal/util/appmacro/Convert.scala +++ b/core-macros/src/main/scala/sbt/internal/util/appmacro/Convert.scala @@ -26,9 +26,10 @@ sealed trait Converted[C <: blackbox.Context with Singleton] { } object Converted { def NotApplicable[C <: blackbox.Context with Singleton] = new NotApplicable[C] - final case class Failure[C <: blackbox.Context with Singleton](position: C#Position, - message: String) - extends Converted[C] { + final case class Failure[C <: blackbox.Context with Singleton]( + position: C#Position, + message: String + ) extends Converted[C] { def isSuccess = false def transform(f: C#Tree => C#Tree): Converted[C] = new Failure(position, message) } @@ -36,9 +37,10 @@ object Converted { def isSuccess = false def transform(f: C#Tree => C#Tree): Converted[C] = this } - final case class Success[C <: blackbox.Context with Singleton](tree: C#Tree, - finalTransform: C#Tree => C#Tree) - extends Converted[C] { + final case class Success[C <: blackbox.Context with Singleton]( + tree: C#Tree, + finalTransform: C#Tree => C#Tree + ) extends Converted[C] { def isSuccess = true def transform(f: C#Tree => C#Tree): Converted[C] = Success(f(tree), finalTransform) } diff --git a/core-macros/src/main/scala/sbt/internal/util/appmacro/Instance.scala b/core-macros/src/main/scala/sbt/internal/util/appmacro/Instance.scala index 33e614eab..a2d41f3e4 100644 --- a/core-macros/src/main/scala/sbt/internal/util/appmacro/Instance.scala +++ b/core-macros/src/main/scala/sbt/internal/util/appmacro/Instance.scala @@ -41,9 +41,11 @@ object Instance { final val MapName = "map" final val InstanceTCName = "M" - final class Input[U <: Universe with Singleton](val tpe: U#Type, - val expr: U#Tree, - val local: U#ValDef) + final class Input[U <: Universe with Singleton]( + val tpe: U#Type, + val expr: U#Tree, + val local: U#ValDef + ) trait Transform[C <: blackbox.Context with Singleton, N[_]] { def apply(in: C#Tree): C#Tree } diff --git a/core-macros/src/main/scala/sbt/internal/util/appmacro/KListBuilder.scala b/core-macros/src/main/scala/sbt/internal/util/appmacro/KListBuilder.scala index 99a210a0c..148b772df 100644 --- a/core-macros/src/main/scala/sbt/internal/util/appmacro/KListBuilder.scala +++ b/core-macros/src/main/scala/sbt/internal/util/appmacro/KListBuilder.scala @@ -13,8 +13,9 @@ import macros._ /** A `TupleBuilder` that uses a KList as the tuple representation.*/ object KListBuilder extends TupleBuilder { - def make(c: blackbox.Context)(mt: c.Type, - inputs: Inputs[c.universe.type]): BuilderResult[c.type] = + def make( + c: blackbox.Context + )(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] = new BuilderResult[c.type] { val ctx: c.type = c val util = ContextUtil[c.type](c) @@ -47,15 +48,20 @@ object KListBuilder extends TupleBuilder { case Nil => revBindings.reverse } - private[this] def makeKList(revInputs: Inputs[c.universe.type], - klist: Tree, - klistType: Type): Tree = + private[this] def makeKList( + revInputs: Inputs[c.universe.type], + klist: Tree, + klistType: Type + ): Tree = revInputs match { case in :: tail => val next = ApplyTree( - TypeApply(Ident(kcons), - TypeTree(in.tpe) :: TypeTree(klistType) :: TypeTree(mTC) :: Nil), - in.expr :: klist :: Nil) + TypeApply( + Ident(kcons), + TypeTree(in.tpe) :: TypeTree(klistType) :: TypeTree(mTC) :: Nil + ), + in.expr :: klist :: Nil + ) makeKList(tail, next, appliedType(kconsTC, in.tpe :: klistType :: mTC :: Nil)) case Nil => klist } diff --git a/core-macros/src/main/scala/sbt/internal/util/appmacro/MixedBuilder.scala b/core-macros/src/main/scala/sbt/internal/util/appmacro/MixedBuilder.scala index 472a0446b..990bcb6e5 100644 --- a/core-macros/src/main/scala/sbt/internal/util/appmacro/MixedBuilder.scala +++ b/core-macros/src/main/scala/sbt/internal/util/appmacro/MixedBuilder.scala @@ -16,8 +16,9 @@ import macros._ * and `KList` for larger numbers of inputs. This builder cannot handle fewer than 2 inputs. */ object MixedBuilder extends TupleBuilder { - def make(c: blackbox.Context)(mt: c.Type, - inputs: Inputs[c.universe.type]): BuilderResult[c.type] = { + def make( + c: blackbox.Context + )(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] = { val delegate = if (inputs.size > TupleNBuilder.MaxInputs) KListBuilder else TupleNBuilder delegate.make(c)(mt, inputs) } diff --git a/core-macros/src/main/scala/sbt/internal/util/appmacro/TupleBuilder.scala b/core-macros/src/main/scala/sbt/internal/util/appmacro/TupleBuilder.scala index 0cd12ba44..1c67f9ed8 100644 --- a/core-macros/src/main/scala/sbt/internal/util/appmacro/TupleBuilder.scala +++ b/core-macros/src/main/scala/sbt/internal/util/appmacro/TupleBuilder.scala @@ -35,8 +35,9 @@ trait TupleBuilder { type Inputs[U <: Universe with Singleton] = List[Instance.Input[U]] /** Constructs a one-time use Builder for Context `c` and type constructor `tcType`. */ - def make(c: blackbox.Context)(tcType: c.Type, - inputs: Inputs[c.universe.type]): BuilderResult[c.type] + def make( + c: blackbox.Context + )(tcType: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] } trait BuilderResult[C <: blackbox.Context with Singleton] { diff --git a/core-macros/src/main/scala/sbt/internal/util/appmacro/TupleNBuilder.scala b/core-macros/src/main/scala/sbt/internal/util/appmacro/TupleNBuilder.scala index fab552157..6cf541e41 100644 --- a/core-macros/src/main/scala/sbt/internal/util/appmacro/TupleNBuilder.scala +++ b/core-macros/src/main/scala/sbt/internal/util/appmacro/TupleNBuilder.scala @@ -22,8 +22,9 @@ object TupleNBuilder extends TupleBuilder { final val MaxInputs = 11 final val TupleMethodName = "tuple" - def make(c: blackbox.Context)(mt: c.Type, - inputs: Inputs[c.universe.type]): BuilderResult[c.type] = + def make( + c: blackbox.Context + )(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] = new BuilderResult[c.type] { val util = ContextUtil[c.type](c) import c.universe._ @@ -34,8 +35,9 @@ object TupleNBuilder extends TupleBuilder { val ctx: c.type = c val representationC: PolyType = { val tcVariable: Symbol = newTCVariable(util.initialOwner) - val tupleTypeArgs = inputs.map(in => - internal.typeRef(NoPrefix, tcVariable, in.tpe :: Nil).asInstanceOf[global.Type]) + val tupleTypeArgs = inputs.map( + in => internal.typeRef(NoPrefix, tcVariable, in.tpe :: Nil).asInstanceOf[global.Type] + ) val tuple = global.definitions.tupleType(tupleTypeArgs) internal.polyType(tcVariable :: Nil, tuple.asInstanceOf[Type]) } @@ -47,10 +49,12 @@ object TupleNBuilder extends TupleBuilder { } def extract(param: ValDef): List[ValDef] = bindTuple(param, Nil, inputs.map(_.local), 1) - def bindTuple(param: ValDef, - revBindings: List[ValDef], - params: List[ValDef], - i: Int): List[ValDef] = + def bindTuple( + param: ValDef, + revBindings: List[ValDef], + params: List[ValDef], + i: Int + ): List[ValDef] = params match { case (x @ ValDef(mods, name, tpt, _)) :: xs => val rhs = select(Ident(param.name), "_" + i.toString) diff --git a/internal/util-collection/src/main/scala/sbt/internal/util/AList.scala b/internal/util-collection/src/main/scala/sbt/internal/util/AList.scala index cc5343d2f..ba2379567 100644 --- a/internal/util-collection/src/main/scala/sbt/internal/util/AList.scala +++ b/internal/util-collection/src/main/scala/sbt/internal/util/AList.scala @@ -17,7 +17,9 @@ import Types._ */ trait AList[K[L[x]]] { def transform[M[_], N[_]](value: K[M], f: M ~> N): K[N] - def traverse[M[_], N[_], P[_]](value: K[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[K[P]] + def traverse[M[_], N[_], P[_]](value: K[M], f: M ~> (N ∙ P)#l)( + implicit np: Applicative[N] + ): N[K[P]] def foldr[M[_], A](value: K[M], f: (M[_], A) => A, init: A): A def toList[M[_]](value: K[M]): List[M[_]] = foldr[M, List[M[_]]](value, _ :: _, Nil) @@ -33,8 +35,11 @@ object AList { val empty: Empty = new Empty { def transform[M[_], N[_]](in: Unit, f: M ~> N) = () def foldr[M[_], T](in: Unit, f: (M[_], T) => T, init: T) = init - override def apply[M[_], C](in: Unit, f: Unit => C)(implicit app: Applicative[M]): M[C] = app.pure(f(())) - def traverse[M[_], N[_], P[_]](in: Unit, f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[Unit] = np.pure(()) + override def apply[M[_], C](in: Unit, f: Unit => C)(implicit app: Applicative[M]): M[C] = + app.pure(f(())) + def traverse[M[_], N[_], P[_]](in: Unit, f: M ~> (N ∙ P)#l)( + implicit np: Applicative[N] + ): N[Unit] = np.pure(()) } type SeqList[T] = AList[λ[L[x] => List[L[T]]]] @@ -42,9 +47,12 @@ object AList { /** AList for a homogeneous sequence. */ def seq[T]: SeqList[T] = new SeqList[T] { def transform[M[_], N[_]](s: List[M[T]], f: M ~> N) = s.map(f.fn[T]) - def foldr[M[_], A](s: List[M[T]], f: (M[_], A) => A, init: A): A = (init /: s.reverse)((t, m) => f(m, t)) + def foldr[M[_], A](s: List[M[T]], f: (M[_], A) => A, init: A): A = + (init /: s.reverse)((t, m) => f(m, t)) - override def apply[M[_], C](s: List[M[T]], f: List[T] => C)(implicit ap: Applicative[M]): M[C] = { + override def apply[M[_], C](s: List[M[T]], f: List[T] => C)( + implicit ap: Applicative[M] + ): M[C] = { def loop[V](in: List[M[T]], g: List[T] => V): M[V] = in match { case Nil => ap.pure(g(Nil)) @@ -55,15 +63,20 @@ object AList { loop(s, f) } - def traverse[M[_], N[_], P[_]](s: List[M[T]], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[List[P[T]]] = ??? + def traverse[M[_], N[_], P[_]](s: List[M[T]], f: M ~> (N ∙ P)#l)( + implicit np: Applicative[N] + ): N[List[P[T]]] = ??? } /** AList for the arbitrary arity data structure KList. */ def klist[KL[M[_]] <: KList.Aux[M, KL]]: AList[KL] = new AList[KL] { def transform[M[_], N[_]](k: KL[M], f: M ~> N) = k.transform(f) def foldr[M[_], T](k: KL[M], f: (M[_], T) => T, init: T): T = k.foldr(f, init) - override def apply[M[_], C](k: KL[M], f: KL[Id] => C)(implicit app: Applicative[M]): M[C] = k.apply(f)(app) - def traverse[M[_], N[_], P[_]](k: KL[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[KL[P]] = k.traverse[N, P](f)(np) + override def apply[M[_], C](k: KL[M], f: KL[Id] => C)(implicit app: Applicative[M]): M[C] = + k.apply(f)(app) + def traverse[M[_], N[_], P[_]](k: KL[M], f: M ~> (N ∙ P)#l)( + implicit np: Applicative[N] + ): N[KL[P]] = k.traverse[N, P](f)(np) override def toList[M[_]](k: KL[M]) = k.toList } @@ -73,7 +86,9 @@ object AList { def single[A]: Single[A] = new Single[A] { def transform[M[_], N[_]](a: M[A], f: M ~> N) = f(a) def foldr[M[_], T](a: M[A], f: (M[_], T) => T, init: T): T = f(a, init) - def traverse[M[_], N[_], P[_]](a: M[A], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[P[A]] = f(a) + def traverse[M[_], N[_], P[_]](a: M[A], f: M ~> (N ∙ P)#l)( + implicit np: Applicative[N] + ): N[P[A]] = f(a) } type ASplit[K[L[x]], B[x]] = AList[λ[L[x] => K[(L ∙ B)#l]]] @@ -85,7 +100,9 @@ object AList { def transform[M[_], N[_]](value: Split[M], f: M ~> N): Split[N] = base.transform[(M ∙ B)#l, (N ∙ B)#l](value, nestCon[M, N, B](f)) - def traverse[M[_], N[_], P[_]](value: Split[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[Split[P]] = { + def traverse[M[_], N[_], P[_]](value: Split[M], f: M ~> (N ∙ P)#l)( + implicit np: Applicative[N] + ): N[Split[P]] = { val g = nestCon[M, (N ∙ P)#l, B](f) base.traverse[(M ∙ B)#l, N, (P ∙ B)#l](value, g)(np) } @@ -101,7 +118,9 @@ object AList { type T2[M[_]] = (M[A], M[B]) def transform[M[_], N[_]](t: T2[M], f: M ~> N): T2[N] = (f(t._1), f(t._2)) def foldr[M[_], T](t: T2[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, init)) - def traverse[M[_], N[_], P[_]](t: T2[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T2[P]] = { + def traverse[M[_], N[_], P[_]](t: T2[M], f: M ~> (N ∙ P)#l)( + implicit np: Applicative[N] + ): N[T2[P]] = { val g = (Tuple2.apply[P[A], P[B]] _).curried np.apply(np.map(g, f(t._1)), f(t._2)) } @@ -113,7 +132,9 @@ object AList { type T3[M[_]] = (M[A], M[B], M[C]) def transform[M[_], N[_]](t: T3[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3)) def foldr[M[_], T](t: T3[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, init))) - def traverse[M[_], N[_], P[_]](t: T3[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T3[P]] = { + def traverse[M[_], N[_], P[_]](t: T3[M], f: M ~> (N ∙ P)#l)( + implicit np: Applicative[N] + ): N[T3[P]] = { val g = (Tuple3.apply[P[A], P[B], P[C]] _).curried np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)) } @@ -124,8 +145,11 @@ object AList { def tuple4[A, B, C, D]: T4List[A, B, C, D] = new T4List[A, B, C, D] { type T4[M[_]] = (M[A], M[B], M[C], M[D]) def transform[M[_], N[_]](t: T4[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4)) - def foldr[M[_], T](t: T4[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, init)))) - def traverse[M[_], N[_], P[_]](t: T4[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T4[P]] = { + def foldr[M[_], T](t: T4[M], f: (M[_], T) => T, init: T): T = + f(t._1, f(t._2, f(t._3, f(t._4, init)))) + def traverse[M[_], N[_], P[_]](t: T4[M], f: M ~> (N ∙ P)#l)( + implicit np: Applicative[N] + ): N[T4[P]] = { val g = (Tuple4.apply[P[A], P[B], P[C], P[D]] _).curried np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)) } @@ -136,8 +160,11 @@ object AList { def tuple5[A, B, C, D, E]: T5List[A, B, C, D, E] = new T5List[A, B, C, D, E] { type T5[M[_]] = (M[A], M[B], M[C], M[D], M[E]) def transform[M[_], N[_]](t: T5[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5)) - def foldr[M[_], T](t: T5[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, init))))) - def traverse[M[_], N[_], P[_]](t: T5[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T5[P]] = { + def foldr[M[_], T](t: T5[M], f: (M[_], T) => T, init: T): T = + f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, init))))) + def traverse[M[_], N[_], P[_]](t: T5[M], f: M ~> (N ∙ P)#l)( + implicit np: Applicative[N] + ): N[T5[P]] = { val g = (Tuple5.apply[P[A], P[B], P[C], P[D], P[E]] _).curried np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)) } @@ -147,71 +174,213 @@ object AList { type T6List[A, B, C, D, E, F] = AList[T6K[A, B, C, D, E, F]#l] def tuple6[A, B, C, D, E, F]: T6List[A, B, C, D, E, F] = new T6List[A, B, C, D, E, F] { type T6[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F]) - def transform[M[_], N[_]](t: T6[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6)) - def foldr[M[_], T](t: T6[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, init)))))) - def traverse[M[_], N[_], P[_]](t: T6[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T6[P]] = { + def transform[M[_], N[_]](t: T6[M], f: M ~> N) = + (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6)) + def foldr[M[_], T](t: T6[M], f: (M[_], T) => T, init: T): T = + f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, init)))))) + def traverse[M[_], N[_], P[_]](t: T6[M], f: M ~> (N ∙ P)#l)( + implicit np: Applicative[N] + ): N[T6[P]] = { val g = (Tuple6.apply[P[A], P[B], P[C], P[D], P[E], P[F]] _).curried - np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)) + np.apply( + np.apply( + np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), + f(t._5) + ), + f(t._6) + ) } } - sealed trait T7K[A, B, C, D, E, F, G] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G]) } + sealed trait T7K[A, B, C, D, E, F, G] { + type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G]) + } type T7List[A, B, C, D, E, F, G] = AList[T7K[A, B, C, D, E, F, G]#l] def tuple7[A, B, C, D, E, F, G]: T7List[A, B, C, D, E, F, G] = new T7List[A, B, C, D, E, F, G] { type T7[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G]) - def transform[M[_], N[_]](t: T7[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7)) - def foldr[M[_], T](t: T7[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, init))))))) - def traverse[M[_], N[_], P[_]](t: T7[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T7[P]] = { + def transform[M[_], N[_]](t: T7[M], f: M ~> N) = + (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7)) + def foldr[M[_], T](t: T7[M], f: (M[_], T) => T, init: T): T = + f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, init))))))) + def traverse[M[_], N[_], P[_]](t: T7[M], f: M ~> (N ∙ P)#l)( + implicit np: Applicative[N] + ): N[T7[P]] = { val g = (Tuple7.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G]] _).curried - np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)) + np.apply( + np.apply( + np.apply( + np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), + f(t._5) + ), + f(t._6) + ), + f(t._7) + ) } } - sealed trait T8K[A, B, C, D, E, F, G, H] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H]) } + sealed trait T8K[A, B, C, D, E, F, G, H] { + type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H]) + } type T8List[A, B, C, D, E, F, G, H] = AList[T8K[A, B, C, D, E, F, G, H]#l] - def tuple8[A, B, C, D, E, F, G, H]: T8List[A, B, C, D, E, F, G, H] = new T8List[A, B, C, D, E, F, G, H] { - type T8[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H]) - def transform[M[_], N[_]](t: T8[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8)) - def foldr[M[_], T](t: T8[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, init)))))))) - def traverse[M[_], N[_], P[_]](t: T8[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T8[P]] = { - val g = (Tuple8.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H]] _).curried - np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)) + def tuple8[A, B, C, D, E, F, G, H]: T8List[A, B, C, D, E, F, G, H] = + new T8List[A, B, C, D, E, F, G, H] { + type T8[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H]) + def transform[M[_], N[_]](t: T8[M], f: M ~> N) = + (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8)) + def foldr[M[_], T](t: T8[M], f: (M[_], T) => T, init: T): T = + f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, init)))))))) + def traverse[M[_], N[_], P[_]](t: T8[M], f: M ~> (N ∙ P)#l)( + implicit np: Applicative[N] + ): N[T8[P]] = { + val g = (Tuple8.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H]] _).curried + np.apply( + np.apply( + np.apply( + np.apply( + np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), + f(t._5) + ), + f(t._6) + ), + f(t._7) + ), + f(t._8) + ) + } } - } - sealed trait T9K[A, B, C, D, E, F, G, H, I] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I]) } + sealed trait T9K[A, B, C, D, E, F, G, H, I] { + type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I]) + } type T9List[A, B, C, D, E, F, G, H, I] = AList[T9K[A, B, C, D, E, F, G, H, I]#l] - def tuple9[A, B, C, D, E, F, G, H, I]: T9List[A, B, C, D, E, F, G, H, I] = new T9List[A, B, C, D, E, F, G, H, I] { - type T9[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I]) - def transform[M[_], N[_]](t: T9[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9)) - def foldr[M[_], T](t: T9[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, init))))))))) - def traverse[M[_], N[_], P[_]](t: T9[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T9[P]] = { - val g = (Tuple9.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I]] _).curried - np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9)) + def tuple9[A, B, C, D, E, F, G, H, I]: T9List[A, B, C, D, E, F, G, H, I] = + new T9List[A, B, C, D, E, F, G, H, I] { + type T9[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I]) + def transform[M[_], N[_]](t: T9[M], f: M ~> N) = + (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9)) + def foldr[M[_], T](t: T9[M], f: (M[_], T) => T, init: T): T = + f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, init))))))))) + def traverse[M[_], N[_], P[_]](t: T9[M], f: M ~> (N ∙ P)#l)( + implicit np: Applicative[N] + ): N[T9[P]] = { + val g = (Tuple9.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I]] _).curried + np.apply( + np.apply( + np.apply( + np.apply( + np.apply( + np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), + f(t._5) + ), + f(t._6) + ), + f(t._7) + ), + f(t._8) + ), + f(t._9) + ) + } } - } - sealed trait T10K[A, B, C, D, E, F, G, H, I, J] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J]) } + sealed trait T10K[A, B, C, D, E, F, G, H, I, J] { + type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J]) + } type T10List[A, B, C, D, E, F, G, H, I, J] = AList[T10K[A, B, C, D, E, F, G, H, I, J]#l] - def tuple10[A, B, C, D, E, F, G, H, I, J]: T10List[A, B, C, D, E, F, G, H, I, J] = new T10List[A, B, C, D, E, F, G, H, I, J] { - type T10[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J]) - def transform[M[_], N[_]](t: T10[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9), f(t._10)) - def foldr[M[_], T](t: T10[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, init)))))))))) - def traverse[M[_], N[_], P[_]](t: T10[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T10[P]] = { - val g = (Tuple10.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J]] _).curried - np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9)), f(t._10)) + def tuple10[A, B, C, D, E, F, G, H, I, J]: T10List[A, B, C, D, E, F, G, H, I, J] = + new T10List[A, B, C, D, E, F, G, H, I, J] { + type T10[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J]) + def transform[M[_], N[_]](t: T10[M], f: M ~> N) = + (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9), f(t._10)) + def foldr[M[_], T](t: T10[M], f: (M[_], T) => T, init: T): T = + f( + t._1, + f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, init))))))))) + ) + def traverse[M[_], N[_], P[_]](t: T10[M], f: M ~> (N ∙ P)#l)( + implicit np: Applicative[N] + ): N[T10[P]] = { + val g = + (Tuple10.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J]] _).curried + np.apply( + np.apply( + np.apply( + np.apply( + np.apply( + np.apply( + np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), + f(t._5) + ), + f(t._6) + ), + f(t._7) + ), + f(t._8) + ), + f(t._9) + ), + f(t._10) + ) + } } - } - sealed trait T11K[A, B, C, D, E, F, G, H, I, J, K] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J], L[K]) } - type T11List[A, B, C, D, E, F, G, H, I, J, K] = AList[T11K[A, B, C, D, E, F, G, H, I, J, K]#l] - def tuple11[A, B, C, D, E, F, G, H, I, J, K]: T11List[A, B, C, D, E, F, G, H, I, J, K] = new T11List[A, B, C, D, E, F, G, H, I, J, K] { - type T11[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J], M[K]) - def transform[M[_], N[_]](t: T11[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9), f(t._10), f(t._11)) - def foldr[M[_], T](t: T11[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, f(t._11, init))))))))))) - def traverse[M[_], N[_], P[_]](t: T11[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T11[P]] = { - val g = (Tuple11.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J], P[K]] _).curried - np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9)), f(t._10)), f(t._11)) - } + sealed trait T11K[A, B, C, D, E, F, G, H, I, J, K] { + type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J], L[K]) } + type T11List[A, B, C, D, E, F, G, H, I, J, K] = AList[T11K[A, B, C, D, E, F, G, H, I, J, K]#l] + def tuple11[A, B, C, D, E, F, G, H, I, J, K]: T11List[A, B, C, D, E, F, G, H, I, J, K] = + new T11List[A, B, C, D, E, F, G, H, I, J, K] { + type T11[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J], M[K]) + def transform[M[_], N[_]](t: T11[M], f: M ~> N) = + ( + f(t._1), + f(t._2), + f(t._3), + f(t._4), + f(t._5), + f(t._6), + f(t._7), + f(t._8), + f(t._9), + f(t._10), + f(t._11) + ) + def foldr[M[_], T](t: T11[M], f: (M[_], T) => T, init: T): T = + f( + t._1, + f( + t._2, + f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, f(t._11, init))))))))) + ) + ) + def traverse[M[_], N[_], P[_]](t: T11[M], f: M ~> (N ∙ P)#l)( + implicit np: Applicative[N] + ): N[T11[P]] = { + val g = (Tuple11 + .apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J], P[K]] _).curried + np.apply( + np.apply( + np.apply( + np.apply( + np.apply( + np.apply( + np.apply( + np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), + f(t._5) + ), + f(t._6) + ), + f(t._7) + ), + f(t._8) + ), + f(t._9) + ), + f(t._10) + ), + f(t._11) + ) + } + } } diff --git a/internal/util-collection/src/main/scala/sbt/internal/util/Attributes.scala b/internal/util-collection/src/main/scala/sbt/internal/util/Attributes.scala index 0dfbea73e..b45eeb4a1 100644 --- a/internal/util-collection/src/main/scala/sbt/internal/util/Attributes.scala +++ b/internal/util-collection/src/main/scala/sbt/internal/util/Attributes.scala @@ -31,7 +31,8 @@ sealed trait AttributeKey[T] { def description: Option[String] /** - * In environments that support delegation, looking up this key when it has no associated value will delegate to the values associated with these keys. + * In environments that support delegation, looking up this key when it has no associated value + * will delegate to the values associated with these keys. * The delegation proceeds in order the keys are returned here. */ def extend: Seq[AttributeKey[_]] @@ -70,20 +71,26 @@ object AttributeKey { def apply[T: Manifest: OptJsonWriter](name: String, description: String): AttributeKey[T] = apply(name, description, Nil) - def apply[T: Manifest: OptJsonWriter](name: String, - description: String, - rank: Int): AttributeKey[T] = + def apply[T: Manifest: OptJsonWriter]( + name: String, + description: String, + rank: Int + ): AttributeKey[T] = apply(name, description, Nil, rank) - def apply[T: Manifest: OptJsonWriter](name: String, - description: String, - extend: Seq[AttributeKey[_]]): AttributeKey[T] = + def apply[T: Manifest: OptJsonWriter]( + name: String, + description: String, + extend: Seq[AttributeKey[_]] + ): AttributeKey[T] = apply(name, description, extend, Int.MaxValue) - def apply[T: Manifest: OptJsonWriter](name: String, - description: String, - extend: Seq[AttributeKey[_]], - rank: Int): AttributeKey[T] = + def apply[T: Manifest: OptJsonWriter]( + name: String, + description: String, + extend: Seq[AttributeKey[_]], + rank: Int + ): AttributeKey[T] = make(name, Some(description), extend, rank) private[sbt] def copyWithRank[T](a: AttributeKey[T], rank: Int): AttributeKey[T] = diff --git a/internal/util-collection/src/main/scala/sbt/internal/util/IDSet.scala b/internal/util-collection/src/main/scala/sbt/internal/util/IDSet.scala index 356b6906b..6bd5cff95 100644 --- a/internal/util-collection/src/main/scala/sbt/internal/util/IDSet.scala +++ b/internal/util-collection/src/main/scala/sbt/internal/util/IDSet.scala @@ -7,6 +7,8 @@ package sbt.internal.util +import scala.collection.JavaConverters._ + /** A mutable set interface that uses object identity to test for set membership.*/ trait IDSet[T] { def apply(t: T): Boolean @@ -41,7 +43,7 @@ object IDSet { def +=(t: T) = { backing.put(t, Dummy); () } def ++=(t: Iterable[T]) = t foreach += def -=(t: T) = if (backing.remove(t) eq null) false else true - def all = collection.JavaConverters.collectionAsScalaIterable(backing.keySet) + def all = backing.keySet.asScala def toList = all.toList def isEmpty = backing.isEmpty diff --git a/internal/util-collection/src/main/scala/sbt/internal/util/INode.scala b/internal/util-collection/src/main/scala/sbt/internal/util/INode.scala index 939bd9576..2090e0139 100644 --- a/internal/util-collection/src/main/scala/sbt/internal/util/INode.scala +++ b/internal/util-collection/src/main/scala/sbt/internal/util/INode.scala @@ -170,8 +170,10 @@ abstract class EvaluateSettings[Scope] { } protected final def setValue(v: T): Unit = { - assert(state != Evaluated, - "Already evaluated (trying to set value to " + v + "): " + toString) + assert( + state != Evaluated, + "Already evaluated (trying to set value to " + v + "): " + toString + ) if (v == null) sys.error("Setting value cannot be null: " + keyString) value = v state = Evaluated diff --git a/internal/util-collection/src/main/scala/sbt/internal/util/KList.scala b/internal/util-collection/src/main/scala/sbt/internal/util/KList.scala index a68b61788..7b7aaf404 100644 --- a/internal/util-collection/src/main/scala/sbt/internal/util/KList.scala +++ b/internal/util-collection/src/main/scala/sbt/internal/util/KList.scala @@ -10,7 +10,7 @@ package sbt.internal.util import Types._ import Classes.Applicative -/** Heterogeneous list with each element having type M[T] for some type T.*/ +/** A higher-kinded heterogeneous list of elements that share the same type constructor `M[_]`. */ sealed trait KList[+M[_]] { type Transform[N[_]] <: KList[N] @@ -18,7 +18,7 @@ sealed trait KList[+M[_]] { def transform[N[_]](f: M ~> N): Transform[N] /** Folds this list using a function that operates on the homogeneous type of the elements of this list. */ - def foldr[B](f: (M[_], B) => B, init: B): B = init // had trouble defining it in KNil + def foldr[B](f: (M[_], B) => B, init: B): B /** Applies `f` to the elements of this list in the applicative functor defined by `ap`. */ def apply[N[x] >: M[x], Z](f: Transform[Id] => Z)(implicit ap: Applicative[N]): N[Z] @@ -54,13 +54,14 @@ final case class KCons[H, +T <: KList[M], +M[_]](head: M[H], tail: T) extends KL override def foldr[B](f: (M[_], B) => B, init: B): B = f(head, tail.foldr(f, init)) } -sealed abstract class KNil extends KList[Nothing] { +sealed abstract class KNil extends KList[NothingK] { final type Transform[N[_]] = KNil - final def transform[N[_]](f: Nothing ~> N): Transform[N] = KNil + final def transform[N[_]](f: NothingK ~> N): Transform[N] = KNil + final def foldr[B](f: (NothingK[_], B) => B, init: B): B = init final def toList = Nil final def apply[N[x], Z](f: KNil => Z)(implicit ap: Applicative[N]): N[Z] = ap.pure(f(KNil)) - final def traverse[N[_], P[_]](f: Nothing ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[KNil] = + final def traverse[N[_], P[_]](f: NothingK ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[KNil] = np.pure(KNil) } diff --git a/internal/util-collection/src/main/scala/sbt/internal/util/Settings.scala b/internal/util-collection/src/main/scala/sbt/internal/util/Settings.scala index 910e1c089..c080fcbe3 100644 --- a/internal/util-collection/src/main/scala/sbt/internal/util/Settings.scala +++ b/internal/util-collection/src/main/scala/sbt/internal/util/Settings.scala @@ -357,7 +357,8 @@ trait Init[Scope] { keys.map(u => showUndefined(u, validKeys, delegates)).mkString("\n\n ", "\n\n ", "") new Uninitialized( keys, - prefix + suffix + " to undefined setting" + suffix + ": " + keysString + "\n ") + prefix + suffix + " to undefined setting" + suffix + ": " + keysString + "\n " + ) } final class Compiled[T]( @@ -374,8 +375,9 @@ trait Init[Scope] { val locals = compiled flatMap { case (key, comp) => if (key.key.isLocal) Seq[Compiled[_]](comp) else Nil } - val ordered = Dag.topologicalSort(locals)(_.dependencies.flatMap(dep => - if (dep.key.isLocal) Seq[Compiled[_]](compiled(dep)) else Nil)) + val ordered = Dag.topologicalSort(locals)( + _.dependencies.flatMap(dep => if (dep.key.isLocal) Seq[Compiled[_]](compiled(dep)) else Nil) + ) def flatten( cmap: Map[ScopedKey[_], Flattened], key: ScopedKey[_], @@ -383,7 +385,8 @@ trait Init[Scope] { ): Flattened = new Flattened( key, - deps.flatMap(dep => if (dep.key.isLocal) cmap(dep).dependencies else dep :: Nil)) + deps.flatMap(dep => if (dep.key.isLocal) cmap(dep).dependencies else dep :: Nil) + ) val empty = Map.empty[ScopedKey[_], Flattened] @@ -415,7 +418,8 @@ trait Init[Scope] { * Intersects two scopes, returning the more specific one if they intersect, or None otherwise. */ private[sbt] def intersect(s1: Scope, s2: Scope)( - implicit delegates: Scope => Seq[Scope]): Option[Scope] = + implicit delegates: Scope => Seq[Scope] + ): Option[Scope] = if (delegates(s1).contains(s2)) Some(s1) // s1 is more specific else if (delegates(s2).contains(s1)) Some(s2) // s2 is more specific else None diff --git a/internal/util-collection/src/main/scala/sbt/internal/util/Signal.scala b/internal/util-collection/src/main/scala/sbt/internal/util/Signal.scala index f1989ae19..661890b88 100644 --- a/internal/util-collection/src/main/scala/sbt/internal/util/Signal.scala +++ b/internal/util-collection/src/main/scala/sbt/internal/util/Signal.scala @@ -65,7 +65,7 @@ object Signals { } // Must only be referenced using a -// try { } catch { case e: LinkageError => ... } +// try { } catch { case _: LinkageError => ... } // block to private final class Signals0 { def supported(signal: String): Boolean = { diff --git a/internal/util-collection/src/main/scala/sbt/internal/util/TypeFunctions.scala b/internal/util-collection/src/main/scala/sbt/internal/util/TypeFunctions.scala index 49c29ff11..8ec06890a 100644 --- a/internal/util-collection/src/main/scala/sbt/internal/util/TypeFunctions.scala +++ b/internal/util-collection/src/main/scala/sbt/internal/util/TypeFunctions.scala @@ -9,6 +9,7 @@ package sbt.internal.util trait TypeFunctions { type Id[X] = X + type NothingK[X] = Nothing sealed trait Const[A] { type Apply[B] = A } sealed trait ConstK[A] { type l[L[x]] = A } sealed trait Compose[A[_], B[_]] { type Apply[T] = A[B[T]] } diff --git a/internal/util-collection/src/test/scala/SettingsTest.scala b/internal/util-collection/src/test/scala/SettingsTest.scala index 235cf5508..83d444079 100644 --- a/internal/util-collection/src/test/scala/SettingsTest.scala +++ b/internal/util-collection/src/test/scala/SettingsTest.scala @@ -7,8 +7,7 @@ package sbt.internal.util -import org.scalacheck._ -import Prop._ +import org.scalacheck._, Prop._ object SettingsTest extends Properties("settings") { val settingsExample: SettingsExample = SettingsExample() @@ -160,7 +159,7 @@ object SettingsTest extends Properties("settings") { final def checkCircularReferences(intermediate: Int): Prop = { val ccr = new CCR(intermediate) try { evaluate(setting(chk, ccr.top) :: Nil); false } catch { - case e: java.lang.Exception => true + case _: java.lang.Exception => true } } @@ -197,18 +196,18 @@ object SettingsTest extends Properties("settings") { def evaluate(settings: Seq[Setting[_]]): Settings[Scope] = try { make(settings)(delegates, scopeLocal, showFullKey) } catch { - case e: Throwable => e.printStackTrace; throw e + case e: Throwable => e.printStackTrace(); throw e } } // This setup is a workaround for module synchronization issues final class CCR(intermediate: Int) { import SettingsTest.settingsExample._ - lazy val top = iterate(value(intermediate), intermediate) - def iterate(init: Initialize[Int], i: Int): Initialize[Int] = + lazy val top = iterate(value(intermediate)) + def iterate(init: Initialize[Int]): Initialize[Int] = bind(init) { t => if (t <= 0) top else - iterate(value(t - 1), t - 1) + iterate(value(t - 1)) } } diff --git a/internal/util-complete/src/main/scala/sbt/internal/util/LineReader.scala b/internal/util-complete/src/main/scala/sbt/internal/util/LineReader.scala index b5e9d53d2..e67ff915c 100644 --- a/internal/util-complete/src/main/scala/sbt/internal/util/LineReader.scala +++ b/internal/util-complete/src/main/scala/sbt/internal/util/LineReader.scala @@ -49,8 +49,9 @@ abstract class JLine extends LineReader { private[this] def readLineDirect(prompt: String, mask: Option[Char]): Option[String] = if (handleCONT) - Signals.withHandler(() => resume(), signal = Signals.CONT)(() => - readLineDirectRaw(prompt, mask)) + Signals.withHandler(() => resume(), signal = Signals.CONT)( + () => readLineDirectRaw(prompt, mask) + ) else readLineDirectRaw(prompt, mask) @@ -132,7 +133,7 @@ private[sbt] object JLine { def createReader(): ConsoleReader = createReader(None, JLine.makeInputStream(true)) def createReader(historyPath: Option[File], in: InputStream): ConsoleReader = - usingTerminal { t => + usingTerminal { _ => val cr = new ConsoleReader(in, System.out) cr.setExpandEvents(false) // https://issues.scala-lang.org/browse/SI-7650 cr.setBellEnabled(false) diff --git a/internal/util-complete/src/main/scala/sbt/internal/util/complete/EditDistance.scala b/internal/util-complete/src/main/scala/sbt/internal/util/complete/EditDistance.scala index 5a8efe1be..cad2e5002 100644 --- a/internal/util-complete/src/main/scala/sbt/internal/util/complete/EditDistance.scala +++ b/internal/util-complete/src/main/scala/sbt/internal/util/complete/EditDistance.scala @@ -10,7 +10,7 @@ package complete import java.lang.Character.{ toLowerCase => lower } -/** @author Paul Phillips*/ +/** @author Paul Phillips */ object EditDistance { /** @@ -24,7 +24,6 @@ object EditDistance { insertCost: Int = 1, deleteCost: Int = 1, subCost: Int = 1, - transposeCost: Int = 1, matchCost: Int = 0, caseCost: Int = 1, transpositions: Boolean = false diff --git a/internal/util-complete/src/main/scala/sbt/internal/util/complete/History.scala b/internal/util-complete/src/main/scala/sbt/internal/util/complete/History.scala index 1decc7197..8c63bf592 100644 --- a/internal/util-complete/src/main/scala/sbt/internal/util/complete/History.scala +++ b/internal/util-complete/src/main/scala/sbt/internal/util/complete/History.scala @@ -11,11 +11,7 @@ package complete import History.number import java.io.File -final class History private ( - val lines: IndexedSeq[String], - val path: Option[File], - error: String => Unit -) { +final class History private (val lines: IndexedSeq[String], val path: Option[File]) { private def reversed = lines.reverse def all: Seq[String] = lines @@ -52,8 +48,8 @@ final class History private ( } object History { - def apply(lines: Seq[String], path: Option[File], error: String => Unit): History = - new History(lines.toIndexedSeq, path, sys.error) + def apply(lines: Seq[String], path: Option[File]): History = + new History(lines.toIndexedSeq, path) def number(s: String): Option[Int] = try { Some(s.toInt) } catch { case _: NumberFormatException => None } diff --git a/internal/util-complete/src/main/scala/sbt/internal/util/complete/JLineCompletion.scala b/internal/util-complete/src/main/scala/sbt/internal/util/complete/JLineCompletion.scala index 68d03b841..e0a44deb2 100644 --- a/internal/util-complete/src/main/scala/sbt/internal/util/complete/JLineCompletion.scala +++ b/internal/util-complete/src/main/scala/sbt/internal/util/complete/JLineCompletion.scala @@ -11,7 +11,7 @@ package complete import jline.console.ConsoleReader import jline.console.completer.{ Completer, CompletionHandler } import scala.annotation.tailrec -import scala.collection.JavaConverters +import scala.collection.JavaConverters._ object JLineCompletion { def installCustomCompletor(reader: ConsoleReader, parser: Parser[_]): Unit = @@ -91,7 +91,8 @@ object JLineCompletion { def appendNonEmpty(set: Set[String], add: String) = if (add.trim.isEmpty) set else set + add def customCompletor( - f: (String, Int) => (Seq[String], Seq[String])): (ConsoleReader, Int) => Boolean = + f: (String, Int) => (Seq[String], Seq[String]) + ): (ConsoleReader, Int) => Boolean = (reader, level) => { val success = complete(beforeCursor(reader), reader => f(reader, level), reader) reader.flush() @@ -154,7 +155,7 @@ object JLineCompletion { if (line.charAt(line.length - 1) != '\n') reader.println() } - reader.printColumns(JavaConverters.seqAsJavaList(columns.map(_.trim))) + reader.printColumns(columns.map(_.trim).asJava) } def hasNewline(s: String): Boolean = s.indexOf('\n') >= 0 diff --git a/internal/util-complete/src/main/scala/sbt/internal/util/complete/Parser.scala b/internal/util-complete/src/main/scala/sbt/internal/util/complete/Parser.scala index 94417e1c4..09bf9a8a3 100644 --- a/internal/util-complete/src/main/scala/sbt/internal/util/complete/Parser.scala +++ b/internal/util-complete/src/main/scala/sbt/internal/util/complete/Parser.scala @@ -275,8 +275,10 @@ object Parser extends ParserMain { revAcc: List[T] ): Parser[Seq[T]] = { assume(min >= 0, "Minimum must be greater than or equal to zero (was " + min + ")") - assume(max >= min, - "Minimum must be less than or equal to maximum (min: " + min + ", max: " + max + ")") + assume( + max >= min, + "Minimum must be less than or equal to maximum (min: " + min + ", max: " + max + ")" + ) def checkRepeated(invalidButOptional: => Parser[Seq[T]]): Parser[Seq[T]] = repeated match { @@ -836,10 +838,12 @@ private final class ParserWithExamples[T]( ) extends ValidParser[T] { def derive(c: Char) = - examples(delegate derive c, - exampleSource.withAddedPrefix(c.toString), - maxNumberOfExamples, - removeInvalidExamples) + examples( + delegate derive c, + exampleSource.withAddedPrefix(c.toString), + maxNumberOfExamples, + removeInvalidExamples + ) def result = delegate.result diff --git a/internal/util-complete/src/main/scala/sbt/internal/util/complete/Parsers.scala b/internal/util-complete/src/main/scala/sbt/internal/util/complete/Parsers.scala index b04b61127..ce444e229 100644 --- a/internal/util-complete/src/main/scala/sbt/internal/util/complete/Parsers.scala +++ b/internal/util-complete/src/main/scala/sbt/internal/util/complete/Parsers.scala @@ -12,15 +12,17 @@ import Parser._ import java.io.File import java.net.URI import java.lang.Character.{ - getType, - MATH_SYMBOL, - OTHER_SYMBOL, + CURRENCY_SYMBOL, DASH_PUNCTUATION, - OTHER_PUNCTUATION, + MATH_SYMBOL, MODIFIER_SYMBOL, - CURRENCY_SYMBOL + OTHER_PUNCTUATION, + OTHER_SYMBOL, + getType } +import scala.annotation.tailrec + /** Provides standard implementations of commonly useful [[Parser]]s. */ trait Parsers { @@ -42,7 +44,8 @@ trait Parsers { /** Parses a single hexadecimal digit (0-9, a-f, A-F). */ lazy val HexDigit = charClass(c => HexDigitSet(c.toUpper), "hex digit") examples HexDigitSet.map( - _.toString) + _.toString + ) /** Parses a single letter, according to Char.isLetter, into a Char. */ lazy val Letter = charClass(_.isLetter, "letter") @@ -313,6 +316,16 @@ object DefaultParsers extends Parsers with ParserMain { apply(p)(s).resultEmpty.isValid /** Returns `true` if `s` parses successfully according to [[ID]].*/ - def validID(s: String): Boolean = matches(ID, s) + def validID(s: String): Boolean = { + // Handwritten version of `matches(ID, s)` because validID turned up in profiling. + def isIdChar(c: Char): Boolean = Character.isLetterOrDigit(c) || (c == '-') || (c == '_') + @tailrec def isRestIdChar(cur: Int, s: String, length: Int): Boolean = + if (cur < length) + isIdChar(s.charAt(cur)) && isRestIdChar(cur + 1, s, length) + else + true + + !s.isEmpty && Character.isLetter(s.charAt(0)) && isRestIdChar(1, s, s.length) + } } diff --git a/internal/util-complete/src/test/scala/DefaultParsersSpec.scala b/internal/util-complete/src/test/scala/DefaultParsersSpec.scala new file mode 100644 index 000000000..b62ff8566 --- /dev/null +++ b/internal/util-complete/src/test/scala/DefaultParsersSpec.scala @@ -0,0 +1,29 @@ +/* + * sbt + * Copyright 2011 - 2017, Lightbend, Inc. + * Copyright 2008 - 2010, Mark Harrah + * Licensed under BSD-3-Clause license (see LICENSE) + */ + +package sbt.internal.util +package complete + +import org.scalacheck._, Gen._, Prop._ + +object DefaultParsersSpec extends Properties("DefaultParsers") { + import DefaultParsers.{ ID, isIDChar, matches, validID } + + property("∀ s ∈ String: validID(s) == matches(ID, s)") = forAll( + (s: String) => validID(s) == matches(ID, s) + ) + + property("∀ s ∈ genID: matches(ID, s)") = forAll(genID)(s => matches(ID, s)) + property("∀ s ∈ genID: validID(s)") = forAll(genID)(s => validID(s)) + + private val chars: Seq[Char] = Char.MinValue to Char.MaxValue + private val genID: Gen[String] = + for { + c <- oneOf(chars filter (_.isLetter)) + cs <- listOf(oneOf(chars filter isIDChar)) + } yield (c :: cs).mkString +} diff --git a/internal/util-complete/src/test/scala/sbt/complete/FileExamplesTest.scala b/internal/util-complete/src/test/scala/sbt/complete/FileExamplesTest.scala index 61c6e53b1..f7b79573c 100644 --- a/internal/util-complete/src/test/scala/sbt/complete/FileExamplesTest.scala +++ b/internal/util-complete/src/test/scala/sbt/complete/FileExamplesTest.scala @@ -9,60 +9,66 @@ package sbt.internal.util package complete import java.io.File -import sbt.io.IO._ +import org.scalatest.Assertion +import sbt.io.IO class FileExamplesTest extends UnitSpec { "listing all files in an absolute base directory" should "produce the entire base directory's contents" in { - val _ = new DirectoryStructure { - fileExamples().toList should contain theSameElementsAs (allRelativizedPaths) + withDirectoryStructure() { ds => + ds.fileExamples().toList should contain theSameElementsAs (ds.allRelativizedPaths) } } - "listing files with a prefix that matches none" should - "produce an empty list" in { - val _ = new DirectoryStructure(withCompletionPrefix = "z") { - fileExamples().toList shouldBe empty + "listing files with a prefix that matches none" should "produce an empty list" in { + withDirectoryStructure(withCompletionPrefix = "z") { ds => + ds.fileExamples().toList shouldBe empty } } - "listing single-character prefixed files" should - "produce matching paths only" in { - val _ = new DirectoryStructure(withCompletionPrefix = "f") { - fileExamples().toList should contain theSameElementsAs (prefixedPathsOnly) + "listing single-character prefixed files" should "produce matching paths only" in { + withDirectoryStructure(withCompletionPrefix = "f") { ds => + ds.fileExamples().toList should contain theSameElementsAs (ds.prefixedPathsOnly) } } - "listing directory-prefixed files" should - "produce matching paths only" in { - val _ = new DirectoryStructure(withCompletionPrefix = "far") { - fileExamples().toList should contain theSameElementsAs (prefixedPathsOnly) + "listing directory-prefixed files" should "produce matching paths only" in { + withDirectoryStructure(withCompletionPrefix = "far") { ds => + ds.fileExamples().toList should contain theSameElementsAs (ds.prefixedPathsOnly) } } it should "produce sub-dir contents only when appending a file separator to the directory" in { - val _ = new DirectoryStructure(withCompletionPrefix = "far" + File.separator) { - fileExamples().toList should contain theSameElementsAs (prefixedPathsOnly) + withDirectoryStructure(withCompletionPrefix = "far" + File.separator) { ds => + ds.fileExamples().toList should contain theSameElementsAs (ds.prefixedPathsOnly) } } - "listing files with a sub-path prefix" should - "produce matching paths only" in { - val _ = new DirectoryStructure(withCompletionPrefix = "far" + File.separator + "ba") { - fileExamples().toList should contain theSameElementsAs (prefixedPathsOnly) + "listing files with a sub-path prefix" should "produce matching paths only" in { + withDirectoryStructure(withCompletionPrefix = "far" + File.separator + "ba") { ds => + ds.fileExamples().toList should contain theSameElementsAs (ds.prefixedPathsOnly) } } - "completing a full path" should - "produce a list with an empty string" in { - val _ = new DirectoryStructure(withCompletionPrefix = "bazaar") { - fileExamples().toList shouldEqual List("") + "completing a full path" should "produce a list with an empty string" in { + withDirectoryStructure(withCompletionPrefix = "bazaar") { ds => + ds.fileExamples().toList shouldEqual List("") } } - // TODO: Remove DelayedInit - https://github.com/scala/scala/releases/tag/v2.11.0-RC1 - class DirectoryStructure(withCompletionPrefix: String = "") extends DelayedInit { + def withDirectoryStructure[A](withCompletionPrefix: String = "")( + thunk: DirectoryStructure => Assertion + ): Assertion = { + IO.withTemporaryDirectory { tempDir => + val ds = new DirectoryStructure(withCompletionPrefix) + ds.createSampleDirStructure(tempDir) + ds.fileExamples = new FileExamples(ds.baseDir, withCompletionPrefix) + thunk(ds) + } + } + + final class DirectoryStructure(withCompletionPrefix: String) { var fileExamples: FileExamples = _ var baseDir: File = _ var childFiles: List[File] = _ @@ -72,22 +78,14 @@ class FileExamplesTest extends UnitSpec { def allRelativizedPaths: List[String] = (childFiles ++ childDirectories ++ nestedFiles ++ nestedDirectories) - .map(relativize(baseDir, _).get) + .map(IO.relativize(baseDir, _).get) def prefixedPathsOnly: List[String] = allRelativizedPaths .filter(_ startsWith withCompletionPrefix) .map(_ substring withCompletionPrefix.length) - override def delayedInit(testBody: => Unit): Unit = { - withTemporaryDirectory { tempDir => - createSampleDirStructure(tempDir) - fileExamples = new FileExamples(baseDir, withCompletionPrefix) - testBody - } - } - - private def createSampleDirStructure(tempDir: File): Unit = { + def createSampleDirStructure(tempDir: File): Unit = { childFiles = toChildFiles(tempDir, List("foo", "bar", "bazaar")) childDirectories = toChildFiles(tempDir, List("moo", "far")) nestedFiles = toChildFiles(childDirectories(1), List("farfile1", "barfile2")) diff --git a/internal/util-complete/src/test/scala/sbt/complete/ParserWithExamplesTest.scala b/internal/util-complete/src/test/scala/sbt/complete/ParserWithExamplesTest.scala index e142e4a66..c2e7c314b 100644 --- a/internal/util-complete/src/test/scala/sbt/complete/ParserWithExamplesTest.scala +++ b/internal/util-complete/src/test/scala/sbt/complete/ParserWithExamplesTest.scala @@ -27,7 +27,8 @@ class ParserWithExamplesTest extends UnitSpec { Set( suggestion("blue"), suggestion("red") - )) + ) + ) parserWithExamples.completions(0) shouldEqual validCompletions } } @@ -38,7 +39,8 @@ class ParserWithExamplesTest extends UnitSpec { val derivedCompletions = Completions( Set( suggestion("lue") - )) + ) + ) parserWithExamples.derive('b').completions(0) shouldEqual derivedCompletions } } @@ -58,7 +60,8 @@ class ParserWithExamplesTest extends UnitSpec { Set( suggestion("lue"), suggestion("lock") - )) + ) + ) parserWithExamples.derive('b').completions(0) shouldEqual derivedCompletions } } diff --git a/internal/util-logic/src/test/scala/sbt/logic/Test.scala b/internal/util-logic/src/test/scala/sbt/logic/Test.scala index 51d2f2f67..f83835f39 100644 --- a/internal/util-logic/src/test/scala/sbt/logic/Test.scala +++ b/internal/util-logic/src/test/scala/sbt/logic/Test.scala @@ -24,14 +24,14 @@ object LogicTest extends Properties("Logic") { property("Properly orders results.") = secure(expect(ordering, Set(B, A, C, E, F))) property("Detects cyclic negation") = secure( Logic.reduceAll(badClauses, Set()) match { - case Right(res) => false - case Left(err: Logic.CyclicNegation) => true - case Left(err) => sys.error(s"Expected cyclic error, got: $err") + case Right(_) => false + case Left(_: Logic.CyclicNegation) => true + case Left(err) => sys.error(s"Expected cyclic error, got: $err") } ) def expect(result: Either[LogicException, Matched], expected: Set[Atom]) = result match { - case Left(err) => false + case Left(_) => false case Right(res) => val actual = res.provenSet if (actual != expected) diff --git a/main-actions/src/main/scala/sbt/Console.scala b/main-actions/src/main/scala/sbt/Console.scala index 9bc20d676..acd022ed0 100644 --- a/main-actions/src/main/scala/sbt/Console.scala +++ b/main-actions/src/main/scala/sbt/Console.scala @@ -20,25 +20,30 @@ final class Console(compiler: AnalyzingCompiler) { def apply(classpath: Seq[File], log: Logger): Try[Unit] = apply(classpath, Nil, "", "", log) - def apply(classpath: Seq[File], - options: Seq[String], - initialCommands: String, - cleanupCommands: String, - log: Logger): Try[Unit] = + def apply( + classpath: Seq[File], + options: Seq[String], + initialCommands: String, + cleanupCommands: String, + log: Logger + ): Try[Unit] = apply(classpath, options, initialCommands, cleanupCommands)(None, Nil)(log) - def apply(classpath: Seq[File], - options: Seq[String], - loader: ClassLoader, - initialCommands: String, - cleanupCommands: String)(bindings: (String, Any)*)(implicit log: Logger): Try[Unit] = + def apply( + classpath: Seq[File], + options: Seq[String], + loader: ClassLoader, + initialCommands: String, + cleanupCommands: String + )(bindings: (String, Any)*)(implicit log: Logger): Try[Unit] = apply(classpath, options, initialCommands, cleanupCommands)(Some(loader), bindings) - def apply(classpath: Seq[File], - options: Seq[String], - initialCommands: String, - cleanupCommands: String)(loader: Option[ClassLoader], bindings: Seq[(String, Any)])( - implicit log: Logger): Try[Unit] = { + def apply( + classpath: Seq[File], + options: Seq[String], + initialCommands: String, + cleanupCommands: String + )(loader: Option[ClassLoader], bindings: Seq[(String, Any)])(implicit log: Logger): Try[Unit] = { def console0() = compiler.console(classpath, options, initialCommands, cleanupCommands, log)(loader, bindings) JLine.usingTerminal { t => diff --git a/main-actions/src/main/scala/sbt/Doc.scala b/main-actions/src/main/scala/sbt/Doc.scala index db8a70db9..93eafb1e6 100644 --- a/main-actions/src/main/scala/sbt/Doc.scala +++ b/main-actions/src/main/scala/sbt/Doc.scala @@ -10,10 +10,6 @@ package sbt import java.io.File import sbt.internal.inc.AnalyzingCompiler -import Predef.{ conforms => _, _ } -import sbt.io.syntax._ -import sbt.io.IO - import sbt.util.CacheStoreFactory import xsbti.Reporter import xsbti.compile.JavaTools @@ -23,93 +19,51 @@ import sbt.internal.util.ManagedLogger object Doc { import RawCompileLike._ - def scaladoc(label: String, - cacheStoreFactory: CacheStoreFactory, - compiler: AnalyzingCompiler): Gen = + + def scaladoc( + label: String, + cacheStoreFactory: CacheStoreFactory, + compiler: AnalyzingCompiler + ): Gen = scaladoc(label, cacheStoreFactory, compiler, Seq()) - def scaladoc(label: String, - cacheStoreFactory: CacheStoreFactory, - compiler: AnalyzingCompiler, - fileInputOptions: Seq[String]): Gen = - cached(cacheStoreFactory, - fileInputOptions, - prepare(label + " Scala API documentation", compiler.doc)) - def javadoc(label: String, - cacheStoreFactory: CacheStoreFactory, - doc: JavaTools, - log: Logger, - reporter: Reporter): Gen = - javadoc(label, cacheStoreFactory, doc, log, reporter, Seq()) - def javadoc(label: String, - cacheStoreFactory: CacheStoreFactory, - doc: JavaTools, - log: Logger, - reporter: Reporter, - fileInputOptions: Seq[String]): Gen = + + def scaladoc( + label: String, + cacheStoreFactory: CacheStoreFactory, + compiler: AnalyzingCompiler, + fileInputOptions: Seq[String] + ): Gen = cached( cacheStoreFactory, fileInputOptions, - prepare( - label + " Java API documentation", - filterSources( - javaSourcesOnly, - (sources: Seq[File], - classpath: Seq[File], - outputDirectory: File, - options: Seq[String], - maxErrors: Int, - log: Logger) => { - // doc.doc - ??? - } - ) - ) + prepare(label + " Scala API documentation", compiler.doc) ) + @deprecated("Going away", "1.1.1") + def javadoc( + label: String, + cacheStoreFactory: CacheStoreFactory, + doc: JavaTools, + log: Logger, + reporter: Reporter, + ): Gen = ??? + + @deprecated("Going away", "1.1.1") + def javadoc( + label: String, + cacheStoreFactory: CacheStoreFactory, + doc: JavaTools, + log: Logger, + reporter: Reporter, + fileInputOptions: Seq[String], + ): Gen = ??? + + @deprecated("Going away", "1.1.1") val javaSourcesOnly: File => Boolean = _.getName.endsWith(".java") - - private[sbt] final class Scaladoc(maximumErrors: Int, compiler: AnalyzingCompiler) extends Doc { - def apply(label: String, - sources: Seq[File], - classpath: Seq[File], - outputDirectory: File, - options: Seq[String], - log: ManagedLogger): Unit = { - generate("Scala", - label, - compiler.doc, - sources, - classpath, - outputDirectory, - options, - maximumErrors, - log) - } - } } +@deprecated("Going away", "1.1.1") sealed trait Doc { + @deprecated("Going away", "1.1.1") type Gen = (Seq[File], Seq[File], File, Seq[String], Int, ManagedLogger) => Unit - - private[sbt] final def generate(variant: String, - label: String, - docf: Gen, - sources: Seq[File], - classpath: Seq[File], - outputDirectory: File, - options: Seq[String], - maxErrors: Int, - log: ManagedLogger): Unit = { - val logSnip = variant + " API documentation" - if (sources.isEmpty) - log.info("No sources available, skipping " + logSnip + "...") - else { - log.info( - "Generating " + logSnip + " for " + label + " sources to " + outputDirectory.absolutePath + "...") - IO.delete(outputDirectory) - IO.createDirectory(outputDirectory) - docf(sources, classpath, outputDirectory, options, maxErrors, log) - log.info(logSnip + " generation successful.") - } - } } diff --git a/main-actions/src/main/scala/sbt/DotGraph.scala b/main-actions/src/main/scala/sbt/DotGraph.scala index d0be921be..c3543770a 100644 --- a/main-actions/src/main/scala/sbt/DotGraph.scala +++ b/main-actions/src/main/scala/sbt/DotGraph.scala @@ -30,29 +30,37 @@ object DotGraph { val toString = packageOnly compose fToString(sourceRoots) apply(relations, outputDirectory, toString, toString) } - def apply(relations: Relations, - outputDir: File, - sourceToString: File => String, - externalToString: File => String): Unit = { + def apply( + relations: Relations, + outputDir: File, + sourceToString: File => String, + externalToString: File => String + ): Unit = { def file(name: String) = new File(outputDir, name) IO.createDirectory(outputDir) - generateGraph(file("int-class-deps"), - "dependencies", - relations.internalClassDep, - identity[String], - identity[String]) - generateGraph(file("binary-dependencies"), - "externalDependencies", - relations.libraryDep, - externalToString, - sourceToString) + generateGraph( + file("int-class-deps"), + "dependencies", + relations.internalClassDep, + identity[String], + identity[String] + ) + generateGraph( + file("binary-dependencies"), + "externalDependencies", + relations.libraryDep, + externalToString, + sourceToString + ) } - def generateGraph[K, V](file: File, - graphName: String, - relation: Relation[K, V], - keyToString: K => String, - valueToString: V => String): Unit = { + def generateGraph[K, V]( + file: File, + graphName: String, + relation: Relation[K, V], + keyToString: K => String, + valueToString: V => String + ): Unit = { import scala.collection.mutable.{ HashMap, HashSet } val mappedGraph = new HashMap[String, HashSet[String]] for ((key, values) <- relation.forwardMap; keyString = keyToString(key); value <- values) diff --git a/main-actions/src/main/scala/sbt/ForkTests.scala b/main-actions/src/main/scala/sbt/ForkTests.scala index 5beec08d3..9934eca97 100755 --- a/main-actions/src/main/scala/sbt/ForkTests.scala +++ b/main-actions/src/main/scala/sbt/ForkTests.scala @@ -17,15 +17,18 @@ import sbt.io.IO import sbt.util.Logger import sbt.ConcurrentRestrictions.Tag import sbt.protocol.testing._ +import sbt.internal.util.ConsoleAppender private[sbt] object ForkTests { - def apply(runners: Map[TestFramework, Runner], - tests: Vector[TestDefinition], - config: Execution, - classpath: Seq[File], - fork: ForkOptions, - log: Logger, - tag: Tag): Task[TestOutput] = { + def apply( + runners: Map[TestFramework, Runner], + tests: Vector[TestDefinition], + config: Execution, + classpath: Seq[File], + fork: ForkOptions, + log: Logger, + tag: Tag + ): Task[TestOutput] = { val opts = processOptions(config, tests, log) import std.TaskExtra._ @@ -42,12 +45,14 @@ private[sbt] object ForkTests { } } - private[this] def mainTestTask(runners: Map[TestFramework, Runner], - opts: ProcessedOptions, - classpath: Seq[File], - fork: ForkOptions, - log: Logger, - parallel: Boolean): Task[TestOutput] = + private[this] def mainTestTask( + runners: Map[TestFramework, Runner], + opts: ProcessedOptions, + classpath: Seq[File], + fork: ForkOptions, + log: Logger, + parallel: Boolean + ): Task[TestOutput] = std.TaskExtra.task { val server = new ServerSocket(0) val testListeners = opts.testListeners flatMap { @@ -67,7 +72,8 @@ private[sbt] object ForkTests { } catch { case e: java.net.SocketException => log.error( - "Could not accept connection from test agent: " + e.getClass + ": " + e.getMessage) + "Could not accept connection from test agent: " + e.getClass + ": " + e.getMessage + ) log.trace(e) server.close() return @@ -78,15 +84,17 @@ private[sbt] object ForkTests { val is = new ObjectInputStream(socket.getInputStream) try { - val config = new ForkConfiguration(log.ansiCodesSupported, parallel) + val config = new ForkConfiguration(ConsoleAppender.formatEnabledInEnv, parallel) os.writeObject(config) - val taskdefs = opts.tests.map( - t => - new TaskDef(t.name, - forkFingerprint(t.fingerprint), - t.explicitlySpecified, - t.selectors)) + val taskdefs = opts.tests.map { t => + new TaskDef( + t.name, + forkFingerprint(t.fingerprint), + t.explicitlySpecified, + t.selectors + ) + } os.writeObject(taskdefs.toArray) os.writeInt(runners.size) @@ -116,20 +124,27 @@ private[sbt] object ForkTests { val acceptorThread = new Thread(Acceptor) acceptorThread.start() - val fullCp = classpath ++: Seq(IO.classLocationFile[ForkMain], - IO.classLocationFile[Framework]) - val options = Seq("-classpath", - fullCp mkString File.pathSeparator, - classOf[ForkMain].getCanonicalName, - server.getLocalPort.toString) + val fullCp = classpath ++: Seq( + IO.classLocationFile[ForkMain], + IO.classLocationFile[Framework] + ) + val options = Seq( + "-classpath", + fullCp mkString File.pathSeparator, + classOf[ForkMain].getCanonicalName, + server.getLocalPort.toString + ) val ec = Fork.java(fork, options) val result = if (ec != 0) - TestOutput(TestResult.Error, - Map( - "Running java with options " + options - .mkString(" ") + " failed with exit code " + ec -> SuiteResult.Error), - Iterable.empty) + TestOutput( + TestResult.Error, + Map( + "Running java with options " + options + .mkString(" ") + " failed with exit code " + ec -> SuiteResult.Error + ), + Iterable.empty + ) else { // Need to wait acceptor thread to finish its business acceptorThread.join() @@ -150,11 +165,13 @@ private[sbt] object ForkTests { case _ => sys.error("Unknown fingerprint type: " + f.getClass) } } -private final class React(is: ObjectInputStream, - os: ObjectOutputStream, - log: Logger, - listeners: Seq[TestReportListener], - results: mutable.Map[String, SuiteResult]) { +private final class React( + is: ObjectInputStream, + os: ObjectOutputStream, + log: Logger, + listeners: Seq[TestReportListener], + results: mutable.Map[String, SuiteResult] +) { import ForkTags._ @annotation.tailrec def react(): Unit = is.readObject match { diff --git a/main-actions/src/main/scala/sbt/Package.scala b/main-actions/src/main/scala/sbt/Package.scala index 54d107917..9ce8735b4 100644 --- a/main-actions/src/main/scala/sbt/Package.scala +++ b/main-actions/src/main/scala/sbt/Package.scala @@ -7,7 +7,6 @@ package sbt -import scala.Predef.{ conforms => _, _ } import java.io.File import java.util.jar.{ Attributes, Manifest } import scala.collection.JavaConverters._ @@ -50,9 +49,11 @@ object Package { } } - final class Configuration(val sources: Seq[(File, String)], - val jar: File, - val options: Seq[PackageOption]) + final class Configuration( + val sources: Seq[(File, String)], + val jar: File, + val options: Seq[PackageOption] + ) def apply(conf: Configuration, cacheStoreFactory: CacheStoreFactory, log: Logger): Unit = { val manifest = new Manifest val main = manifest.getMainAttributes @@ -66,9 +67,9 @@ object Package { } setVersion(main) + type Inputs = Map[File, String] :+: FilesInfo[ModifiedFileInfo] :+: Manifest :+: HNil val cachedMakeJar = inputChanged(cacheStoreFactory make "inputs") { - (inChanged, - inputs: Map[File, String] :+: FilesInfo[ModifiedFileInfo] :+: Manifest :+: HNil) => + (inChanged, inputs: Inputs) => import exists.format val sources :+: _ :+: manifest :+: HNil = inputs outputChanged(cacheStoreFactory make "output") { (outChanged, jar: PlainFileInfo) => @@ -86,8 +87,10 @@ object Package { } def setVersion(main: Attributes): Unit = { val version = Attributes.Name.MANIFEST_VERSION - if (main.getValue(version) eq null) + if (main.getValue(version) eq null) { main.put(version, "1.0") + () + } } def addSpecManifestAttributes(name: String, version: String, orgName: String): PackageOption = { import Attributes.Name._ @@ -95,16 +98,26 @@ object Package { val attribVals = Seq(name, version, orgName) ManifestAttributes(attribKeys zip attribVals: _*) } - def addImplManifestAttributes(name: String, - version: String, - homepage: Option[java.net.URL], - org: String, - orgName: String): PackageOption = { + def addImplManifestAttributes( + name: String, + version: String, + homepage: Option[java.net.URL], + org: String, + orgName: String + ): PackageOption = { import Attributes.Name._ - val attribKeys = Seq(IMPLEMENTATION_TITLE, - IMPLEMENTATION_VERSION, - IMPLEMENTATION_VENDOR, - IMPLEMENTATION_VENDOR_ID) + + // The ones in Attributes.Name are deprecated saying: + // "Extension mechanism will be removed in a future release. Use class path instead." + val IMPLEMENTATION_VENDOR_ID = new Attributes.Name("Implementation-Vendor-Id") + val IMPLEMENTATION_URL = new Attributes.Name("Implementation-URL") + + val attribKeys = Seq( + IMPLEMENTATION_TITLE, + IMPLEMENTATION_VERSION, + IMPLEMENTATION_VENDOR, + IMPLEMENTATION_VENDOR_ID, + ) val attribVals = Seq(name, version, orgName, org) ManifestAttributes((attribKeys zip attribVals) ++ { homepage map (h => (IMPLEMENTATION_URL, h.toString)) diff --git a/main-actions/src/main/scala/sbt/RawCompileLike.scala b/main-actions/src/main/scala/sbt/RawCompileLike.scala index f06e7e3a8..c19d55a26 100644 --- a/main-actions/src/main/scala/sbt/RawCompileLike.scala +++ b/main-actions/src/main/scala/sbt/RawCompileLike.scala @@ -7,10 +7,10 @@ package sbt +import scala.annotation.tailrec import java.io.File import sbt.internal.inc.{ RawCompiler, ScalaInstance } -import Predef.{ conforms => _, _ } import sbt.io.syntax._ import sbt.io.IO @@ -30,7 +30,7 @@ object RawCompileLike { type Gen = (Seq[File], Seq[File], File, Seq[String], Int, ManagedLogger) => Unit private def optionFiles(options: Seq[String], fileInputOpts: Seq[String]): List[File] = { - @annotation.tailrec + @tailrec def loop(opt: List[String], result: List[File]): List[File] = { opt.dropWhile(!fileInputOpts.contains(_)) match { case List(_, fileOpt, tail @ _*) => { @@ -46,16 +46,20 @@ object RawCompileLike { def cached(cacheStoreFactory: CacheStoreFactory, doCompile: Gen): Gen = cached(cacheStoreFactory, Seq(), doCompile) - def cached(cacheStoreFactory: CacheStoreFactory, - fileInputOpts: Seq[String], - doCompile: Gen): Gen = + + def cached( + cacheStoreFactory: CacheStoreFactory, + fileInputOpts: Seq[String], + doCompile: Gen + ): Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => { type Inputs = - FilesInfo[HashFileInfo] :+: FilesInfo[ModifiedFileInfo] :+: Seq[File] :+: File :+: Seq[ - String] :+: Int :+: HNil + FilesInfo[HashFileInfo] :+: FilesInfo[ModifiedFileInfo] :+: Seq[File] :+: File :+: + Seq[String] :+: Int :+: HNil val inputs : Inputs = hash(sources.toSet ++ optionFiles(options, fileInputOpts)) :+: lastModified( - classpath.toSet) :+: classpath :+: outputDirectory :+: options :+: maxErrors :+: HNil + classpath.toSet + ) :+: classpath :+: outputDirectory :+: options :+: maxErrors :+: HNil val cachedComp = inputChanged(cacheStoreFactory make "inputs") { (inChanged, in: Inputs) => inputChanged(cacheStoreFactory make "output") { (outChanged, outputs: FilesInfo[PlainFileInfo]) => @@ -67,6 +71,7 @@ object RawCompileLike { } cachedComp(inputs)(exists(outputDirectory.allPaths.get.toSet)) } + def prepare(description: String, doCompile: Gen): Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => { if (sources.isEmpty) @@ -79,20 +84,24 @@ object RawCompileLike { log.info(description.capitalize + " successful.") } } + def filterSources(f: File => Boolean, doCompile: Gen): Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => doCompile(sources filter f, classpath, outputDirectory, options, maxErrors, log) def rawCompile(instance: ScalaInstance, cpOptions: ClasspathOptions): Gen = - (sources, classpath, outputDirectory, options, maxErrors, log) => { + (sources, classpath, outputDirectory, options, _, log) => { val compiler = new RawCompiler(instance, cpOptions, log) compiler(sources, classpath, outputDirectory, options) } - def compile(label: String, - cacheStoreFactory: CacheStoreFactory, - instance: ScalaInstance, - cpOptions: ClasspathOptions): Gen = + + def compile( + label: String, + cacheStoreFactory: CacheStoreFactory, + instance: ScalaInstance, + cpOptions: ClasspathOptions + ): Gen = cached(cacheStoreFactory, prepare(label + " sources", rawCompile(instance, cpOptions))) - val nop: Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => () + val nop: Gen = (_, _, _, _, _, _) => () } diff --git a/main-actions/src/main/scala/sbt/Sync.scala b/main-actions/src/main/scala/sbt/Sync.scala index ec2eb73e3..663cc04fc 100644 --- a/main-actions/src/main/scala/sbt/Sync.scala +++ b/main-actions/src/main/scala/sbt/Sync.scala @@ -30,10 +30,18 @@ import sjsonnew.{ Builder, JsonFormat, Unbuilder, deserializationError } * It is safe to use for its intended purpose: copying resources to a class output directory. */ object Sync { - def apply(store: CacheStore, - inStyle: FileInfo.Style = FileInfo.lastModified, - outStyle: FileInfo.Style = FileInfo.exists) - : Traversable[(File, File)] => Relation[File, File] = + @deprecated("Use sync, which doesn't take the unused outStyle param", "1.1.1") + def apply( + store: CacheStore, + inStyle: FileInfo.Style = FileInfo.lastModified, + outStyle: FileInfo.Style = FileInfo.exists, + ): Traversable[(File, File)] => Relation[File, File] = + sync(store, inStyle) + + def sync( + store: CacheStore, + inStyle: FileInfo.Style = FileInfo.lastModified, + ): Traversable[(File, File)] => Relation[File, File] = mappings => { val relation = Relation.empty ++ mappings noDuplicateTargets(relation) @@ -63,26 +71,24 @@ object Sync { def copy(source: File, target: File): Unit = if (source.isFile) IO.copyFile(source, target, true) - else if (!target.exists) // we don't want to update the last modified time of an existing directory - { - IO.createDirectory(target) - IO.copyLastModified(source, target) - } + else if (!target.exists) { // we don't want to update the last modified time of an existing directory + IO.createDirectory(target) + IO.copyLastModified(source, target) + () + } def noDuplicateTargets(relation: Relation[File, File]): Unit = { - val dups = relation.reverseMap.filter { - case (_, srcs) => - srcs.size >= 2 && srcs.exists(!_.isDirectory) - } map { - case (target, srcs) => - "\n\t" + target + "\nfrom\n\t" + srcs.mkString("\n\t\t") - } + val dups = relation.reverseMap + .filter { case (_, srcs) => srcs.size >= 2 && srcs.exists(!_.isDirectory) } + .map { case (target, srcs) => "\n\t" + target + "\nfrom\n\t" + srcs.mkString("\n\t\t") } if (dups.nonEmpty) sys.error("Duplicate mappings:" + dups.mkString) } - implicit def relationFormat[A, B](implicit af: JsonFormat[Map[A, Set[B]]], - bf: JsonFormat[Map[B, Set[A]]]): JsonFormat[Relation[A, B]] = + implicit def relationFormat[A, B]( + implicit af: JsonFormat[Map[A, Set[B]]], + bf: JsonFormat[Map[B, Set[A]]] + ): JsonFormat[Relation[A, B]] = new JsonFormat[Relation[A, B]] { def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Relation[A, B] = jsOpt match { @@ -105,15 +111,18 @@ object Sync { } - def writeInfo[F <: FileInfo](store: CacheStore, - relation: Relation[File, File], - info: Map[File, F])(implicit infoFormat: JsonFormat[F]): Unit = + def writeInfo[F <: FileInfo]( + store: CacheStore, + relation: Relation[File, File], + info: Map[File, F] + )(implicit infoFormat: JsonFormat[F]): Unit = store.write((relation, info)) type RelationInfo[F] = (Relation[File, File], Map[File, F]) - def readInfo[F <: FileInfo](store: CacheStore)( - implicit infoFormat: JsonFormat[F]): RelationInfo[F] = + def readInfo[F <: FileInfo]( + store: CacheStore + )(implicit infoFormat: JsonFormat[F]): RelationInfo[F] = try { readUncaught[F](store)(infoFormat) } catch { case _: IOException => (Relation.empty[File, File], Map.empty[File, F]) case _: ZipException => (Relation.empty[File, File], Map.empty[File, F]) @@ -124,7 +133,8 @@ object Sync { } } - private def readUncaught[F <: FileInfo](store: CacheStore)( - implicit infoFormat: JsonFormat[F]): RelationInfo[F] = + private def readUncaught[F <: FileInfo]( + store: CacheStore + )(implicit infoFormat: JsonFormat[F]): RelationInfo[F] = store.read(default = (Relation.empty[File, File], Map.empty[File, F])) } diff --git a/main-actions/src/main/scala/sbt/TestResultLogger.scala b/main-actions/src/main/scala/sbt/TestResultLogger.scala index 01cc9e03f..5f196f2b5 100644 --- a/main-actions/src/main/scala/sbt/TestResultLogger.scala +++ b/main-actions/src/main/scala/sbt/TestResultLogger.scala @@ -31,13 +31,17 @@ trait TestResultLogger { def run(log: Logger, results: Output, taskName: String): Unit /** Only allow invocation if certain criteria is met, else use another `TestResultLogger` (defaulting to nothing) . */ - final def onlyIf(f: (Output, String) => Boolean, - otherwise: TestResultLogger = TestResultLogger.Null) = + final def onlyIf( + f: (Output, String) => Boolean, + otherwise: TestResultLogger = TestResultLogger.Null + ) = TestResultLogger.choose(f, this, otherwise) /** Allow invocation unless a certain predicate passes, in which case use another `TestResultLogger` (defaulting to nothing) . */ - final def unless(f: (Output, String) => Boolean, - otherwise: TestResultLogger = TestResultLogger.Null) = + final def unless( + f: (Output, String) => Boolean, + otherwise: TestResultLogger = TestResultLogger.Null + ) = TestResultLogger.choose(f, otherwise, this) } @@ -69,8 +73,10 @@ object TestResultLogger { * @param f The `TestResultLogger` to choose if the predicate fails. */ def choose(cond: (Output, String) => Boolean, t: TestResultLogger, f: TestResultLogger) = - TestResultLogger((log, results, taskName) => - (if (cond(results, taskName)) t else f).run(log, results, taskName)) + TestResultLogger( + (log, results, taskName) => + (if (cond(results, taskName)) t else f).run(log, results, taskName) + ) /** Transforms the input to be completely silent when the subject module doesn't contain any tests. */ def silenceWhenNoTests(d: Defaults.Main) = @@ -127,32 +133,39 @@ object TestResultLogger { results.summaries.size > 1 || results.summaries.headOption.forall(_.summaryText.isEmpty) val printStandard = TestResultLogger((log, results, _) => { - val (skippedCount, - errorsCount, - passedCount, - failuresCount, - ignoredCount, - canceledCount, - pendingCount) = + val ( + skippedCount, + errorsCount, + passedCount, + failuresCount, + ignoredCount, + canceledCount, + pendingCount, + ) = results.events.foldLeft((0, 0, 0, 0, 0, 0, 0)) { - case ((skippedAcc, errorAcc, passedAcc, failureAcc, ignoredAcc, canceledAcc, pendingAcc), - (name @ _, testEvent)) => - (skippedAcc + testEvent.skippedCount, - errorAcc + testEvent.errorCount, - passedAcc + testEvent.passedCount, - failureAcc + testEvent.failureCount, - ignoredAcc + testEvent.ignoredCount, - canceledAcc + testEvent.canceledCount, - pendingAcc + testEvent.pendingCount) + case (acc, (_, testEvent)) => + val (skippedAcc, errorAcc, passedAcc, failureAcc, ignoredAcc, canceledAcc, pendingAcc) = + acc + ( + skippedAcc + testEvent.skippedCount, + errorAcc + testEvent.errorCount, + passedAcc + testEvent.passedCount, + failureAcc + testEvent.failureCount, + ignoredAcc + testEvent.ignoredCount, + canceledAcc + testEvent.canceledCount, + pendingAcc + testEvent.pendingCount, + ) } val totalCount = failuresCount + errorsCount + skippedCount + passedCount val base = s"Total $totalCount, Failed $failuresCount, Errors $errorsCount, Passed $passedCount" - val otherCounts = Seq("Skipped" -> skippedCount, - "Ignored" -> ignoredCount, - "Canceled" -> canceledCount, - "Pending" -> pendingCount) + val otherCounts = Seq( + "Skipped" -> skippedCount, + "Ignored" -> ignoredCount, + "Canceled" -> canceledCount, + "Pending" -> pendingCount + ) val extra = otherCounts.filter(_._2 > 0).map { case (label, count) => s", $label $count" } val postfix = base + extra.mkString @@ -181,6 +194,7 @@ object TestResultLogger { }) val printNoTests = TestResultLogger( - (log, results, taskName) => log.info("No tests to run for " + taskName)) + (log, results, taskName) => log.info("No tests to run for " + taskName) + ) } } diff --git a/main-actions/src/main/scala/sbt/Tests.scala b/main-actions/src/main/scala/sbt/Tests.scala index bd69e4c30..cc76b1412 100644 --- a/main-actions/src/main/scala/sbt/Tests.scala +++ b/main-actions/src/main/scala/sbt/Tests.scala @@ -34,6 +34,7 @@ import sbt.util.Logger import sbt.protocol.testing.TestResult sealed trait TestOption + object Tests { /** @@ -43,9 +44,11 @@ object Tests { * @param events The result of each test group (suite) executed during this test run. * @param summaries Explicit summaries directly provided by test frameworks. This may be empty, in which case a default summary will be generated. */ - final case class Output(overall: TestResult, - events: Map[String, SuiteResult], - summaries: Iterable[Summary]) + final case class Output( + overall: TestResult, + events: Map[String, SuiteResult], + summaries: Iterable[Summary] + ) /** * Summarizes a test run. @@ -137,9 +140,11 @@ object Tests { val cleanup: Vector[ClassLoader => Unit], val testListeners: Vector[TestReportListener] ) - private[sbt] def processOptions(config: Execution, - discovered: Vector[TestDefinition], - log: Logger): ProcessedOptions = { + private[sbt] def processOptions( + config: Execution, + discovered: Vector[TestDefinition], + log: Logger + ): ProcessedOptions = { import collection.mutable.{ HashSet, ListBuffer } val testFilters = new ListBuffer[String => Boolean] var orderedFilters = Seq[String => Boolean]() @@ -167,7 +172,8 @@ object Tests { if (undefinedFrameworks.nonEmpty) log.warn( "Arguments defined for test frameworks that are not present:\n\t" + undefinedFrameworks - .mkString("\n\t")) + .mkString("\n\t") + ) def includeTest(test: TestDefinition) = !excludeTestsSet.contains(test.name) && testFilters.forall(filter => filter(test.name)) @@ -176,10 +182,12 @@ object Tests { if (orderedFilters.isEmpty) filtered0 else orderedFilters.flatMap(f => filtered0.filter(d => f(d.name))).toList.distinct val uniqueTests = distinctBy(tests)(_.name) - new ProcessedOptions(uniqueTests.toVector, - setup.toVector, - cleanup.toVector, - testListeners.toVector) + new ProcessedOptions( + uniqueTests.toVector, + setup.toVector, + cleanup.toVector, + testListeners.toVector + ) } private[this] def distinctBy[T, K](in: Seq[T])(f: T => K): Seq[T] = { @@ -187,33 +195,39 @@ object Tests { in.filter(t => seen.add(f(t))) } - def apply(frameworks: Map[TestFramework, Framework], - testLoader: ClassLoader, - runners: Map[TestFramework, Runner], - discovered: Vector[TestDefinition], - config: Execution, - log: ManagedLogger): Task[Output] = { + def apply( + frameworks: Map[TestFramework, Framework], + testLoader: ClassLoader, + runners: Map[TestFramework, Runner], + discovered: Vector[TestDefinition], + config: Execution, + log: ManagedLogger + ): Task[Output] = { val o = processOptions(config, discovered, log) - testTask(testLoader, - frameworks, - runners, - o.tests, - o.setup, - o.cleanup, - log, - o.testListeners, - config) + testTask( + testLoader, + frameworks, + runners, + o.tests, + o.setup, + o.cleanup, + log, + o.testListeners, + config + ) } - def testTask(loader: ClassLoader, - frameworks: Map[TestFramework, Framework], - runners: Map[TestFramework, Runner], - tests: Vector[TestDefinition], - userSetup: Iterable[ClassLoader => Unit], - userCleanup: Iterable[ClassLoader => Unit], - log: ManagedLogger, - testListeners: Vector[TestReportListener], - config: Execution): Task[Output] = { + def testTask( + loader: ClassLoader, + frameworks: Map[TestFramework, Framework], + runners: Map[TestFramework, Runner], + tests: Vector[TestDefinition], + userSetup: Iterable[ClassLoader => Unit], + userCleanup: Iterable[ClassLoader => Unit], + log: ManagedLogger, + testListeners: Vector[TestReportListener], + config: Execution + ): Task[Output] = { def fj(actions: Iterable[() => Unit]): Task[Unit] = nop.dependsOn(actions.toSeq.fork(_()): _*) def partApp(actions: Iterable[ClassLoader => Unit]) = actions.toSeq map { a => () => a(loader) @@ -227,7 +241,7 @@ object Tests { if (config.parallel) makeParallel(loader, runnables, setupTasks, config.tags) //.toSeq.join else - makeSerial(loader, runnables, setupTasks, config.tags) + makeSerial(loader, runnables, setupTasks) val taggedMainTasks = mainTasks.tagw(config.tags: _*) taggedMainTasks map processResults flatMap { results => val cleanupTasks = fj(partApp(userCleanup) :+ frameworkCleanup(results.overall)) @@ -238,31 +252,43 @@ object Tests { } type TestRunnable = (String, TestFunction) - private def createNestedRunnables(loader: ClassLoader, - testFun: TestFunction, - nestedTasks: Seq[TestTask]): Seq[(String, TestFunction)] = + private def createNestedRunnables( + loader: ClassLoader, + testFun: TestFunction, + nestedTasks: Seq[TestTask] + ): Seq[(String, TestFunction)] = nestedTasks.view.zipWithIndex map { case (nt, idx) => val testFunDef = testFun.taskDef - (testFunDef.fullyQualifiedName, - TestFramework.createTestFunction(loader, - new TaskDef(testFunDef.fullyQualifiedName + "-" + idx, - testFunDef.fingerprint, - testFunDef.explicitlySpecified, - testFunDef.selectors), - testFun.runner, - nt)) + ( + testFunDef.fullyQualifiedName, + TestFramework.createTestFunction( + loader, + new TaskDef( + testFunDef.fullyQualifiedName + "-" + idx, + testFunDef.fingerprint, + testFunDef.explicitlySpecified, + testFunDef.selectors + ), + testFun.runner, + nt + ) + ) } - def makeParallel(loader: ClassLoader, - runnables: Iterable[TestRunnable], - setupTasks: Task[Unit], - tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] = + def makeParallel( + loader: ClassLoader, + runnables: Iterable[TestRunnable], + setupTasks: Task[Unit], + tags: Seq[(Tag, Int)] + ): Task[Map[String, SuiteResult]] = toTasks(loader, runnables.toSeq, tags).dependsOn(setupTasks) - def toTasks(loader: ClassLoader, - runnables: Seq[TestRunnable], - tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] = { + def toTasks( + loader: ClassLoader, + runnables: Seq[TestRunnable], + tags: Seq[(Tag, Int)] + ): Task[Map[String, SuiteResult]] = { val tasks = runnables.map { case (name, test) => toTask(loader, name, test, tags) } tasks.join.map(_.foldLeft(Map.empty[String, SuiteResult]) { case (sum, e) => @@ -274,10 +300,12 @@ object Tests { }) } - def toTask(loader: ClassLoader, - name: String, - fun: TestFunction, - tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] = { + def toTask( + loader: ClassLoader, + name: String, + fun: TestFunction, + tags: Seq[(Tag, Int)] + ): Task[Map[String, SuiteResult]] = { val base = task { (name, fun.apply()) } val taggedBase = base.tagw(tags: _*).tag(fun.tags.map(ConcurrentRestrictions.Tag(_)): _*) taggedBase flatMap { @@ -294,13 +322,25 @@ object Tests { } } - def makeSerial(loader: ClassLoader, - runnables: Seq[TestRunnable], - setupTasks: Task[Unit], - tags: Seq[(Tag, Int)]): Task[List[(String, SuiteResult)]] = { + @deprecated("Use the variant without tags", "1.1.1") + def makeSerial( + loader: ClassLoader, + runnables: Seq[TestRunnable], + setupTasks: Task[Unit], + tags: Seq[(Tag, Int)], + ): Task[List[(String, SuiteResult)]] = + makeSerial(loader, runnables, setupTasks) + + def makeSerial( + loader: ClassLoader, + runnables: Seq[TestRunnable], + setupTasks: Task[Unit], + ): Task[List[(String, SuiteResult)]] = { @tailrec - def processRunnable(runnableList: List[TestRunnable], - acc: List[(String, SuiteResult)]): List[(String, SuiteResult)] = + def processRunnable( + runnableList: List[TestRunnable], + acc: List[(String, SuiteResult)] + ): List[(String, SuiteResult)] = runnableList match { case hd :: rst => val testFun = hd._2 @@ -350,9 +390,11 @@ object Tests { ((TestResult.Passed: TestResult) /: results) { (acc, result) => if (severity(acc) < severity(result)) result else acc } - def discover(frameworks: Seq[Framework], - analysis: CompileAnalysis, - log: Logger): (Seq[TestDefinition], Set[String]) = + def discover( + frameworks: Seq[Framework], + analysis: CompileAnalysis, + log: Logger + ): (Seq[TestDefinition], Set[String]) = discover(frameworks flatMap TestFramework.getFingerprints, allDefs(analysis), log) def allDefs(analysis: CompileAnalysis) = analysis match { @@ -368,9 +410,11 @@ object Tests { all }.toSeq } - def discover(fingerprints: Seq[Fingerprint], - definitions: Seq[Definition], - log: Logger): (Seq[TestDefinition], Set[String]) = { + def discover( + fingerprints: Seq[Fingerprint], + definitions: Seq[Definition], + log: Logger + ): (Seq[TestDefinition], Set[String]) = { val subclasses = fingerprints collect { case sub: SubclassFingerprint => (sub.superclassName, sub.isModule, sub) }; @@ -381,9 +425,11 @@ object Tests { log.debug("Annotation fingerprints: " + annotations) def firsts[A, B, C](s: Seq[(A, B, C)]): Set[A] = s.map(_._1).toSet - def defined(in: Seq[(String, Boolean, Fingerprint)], - names: Set[String], - IsModule: Boolean): Seq[Fingerprint] = + def defined( + in: Seq[(String, Boolean, Fingerprint)], + names: Set[String], + IsModule: Boolean + ): Seq[Fingerprint] = in collect { case (name, IsModule, print) if names(name) => print } def toFingerprints(d: Discovered): Seq[Fingerprint] = diff --git a/main-actions/src/main/scala/sbt/compiler/Eval.scala b/main-actions/src/main/scala/sbt/compiler/Eval.scala index 56fbce162..e410107bd 100644 --- a/main-actions/src/main/scala/sbt/compiler/Eval.scala +++ b/main-actions/src/main/scala/sbt/compiler/Eval.scala @@ -34,10 +34,12 @@ final class EvalImports(val strings: Seq[(String, Int)], val srcName: String) * the module from that class loader. `generated` contains the compiled classes and cache files related * to the expression. The name of the auto-generated module wrapping the expression is `enclosingModule`. */ -final class EvalResult(val tpe: String, - val getValue: ClassLoader => Any, - val generated: Seq[File], - val enclosingModule: String) +final class EvalResult( + val tpe: String, + val getValue: ClassLoader => Any, + val generated: Seq[File], + val enclosingModule: String +) /** * The result of evaluating a group of Scala definitions. The definitions are wrapped in an auto-generated, @@ -46,10 +48,12 @@ final class EvalResult(val tpe: String, * from the classpath that the definitions were compiled against. The list of vals with the requested types is `valNames`. * The values for these may be obtained by providing the parent class loader to `values` as is done with `loader`. */ -final class EvalDefinitions(val loader: ClassLoader => ClassLoader, - val generated: Seq[File], - val enclosingModule: String, - val valNames: Seq[String]) { +final class EvalDefinitions( + val loader: ClassLoader => ClassLoader, + val generated: Seq[File], + val enclosingModule: String, + val valNames: Seq[String] +) { def values(parent: ClassLoader): Seq[Any] = { val module = getModule(enclosingModule, loader(parent)) for (n <- valNames) yield module.getClass.getMethod(n).invoke(module) @@ -58,10 +62,12 @@ final class EvalDefinitions(val loader: ClassLoader => ClassLoader, final class EvalException(msg: String) extends RuntimeException(msg) // not thread safe, since it reuses a Global instance -final class Eval(optionsNoncp: Seq[String], - classpath: Seq[File], - mkReporter: Settings => Reporter, - backing: Option[File]) { +final class Eval( + optionsNoncp: Seq[String], + classpath: Seq[File], + mkReporter: Settings => Reporter, + backing: Option[File] +) { def this(mkReporter: Settings => Reporter, backing: Option[File]) = this(Nil, IO.classLocationFile[Product] :: Nil, mkReporter, backing) def this() = this(s => new ConsoleReporter(s), None) @@ -97,11 +103,13 @@ final class Eval(optionsNoncp: Seq[String], private[this] var toUnlinkLater = List[Symbol]() private[this] def unlink(sym: Symbol) = sym.owner.info.decls.unlink(sym) - def eval(expression: String, - imports: EvalImports = noImports, - tpeName: Option[String] = None, - srcName: String = "", - line: Int = DefaultStartLine): EvalResult = { + def eval( + expression: String, + imports: EvalImports = noImports, + tpeName: Option[String] = None, + srcName: String = "", + line: Int = DefaultStartLine + ): EvalResult = { val ev = new EvalType[String] { def makeUnit = mkUnit(srcName, line, expression) def unlink = true @@ -121,11 +129,13 @@ final class Eval(optionsNoncp: Seq[String], val value = (cl: ClassLoader) => getValue[Any](i.enclosingModule, i.loader(cl)) new EvalResult(i.extra, value, i.generated, i.enclosingModule) } - def evalDefinitions(definitions: Seq[(String, scala.Range)], - imports: EvalImports, - srcName: String, - file: Option[File], - valTypes: Seq[String]): EvalDefinitions = { + def evalDefinitions( + definitions: Seq[(String, scala.Range)], + imports: EvalImports, + srcName: String, + file: Option[File], + valTypes: Seq[String] + ): EvalDefinitions = { require(definitions.nonEmpty, "Definitions to evaluate cannot be empty.") val ev = new EvalType[Seq[String]] { lazy val (fullUnit, defUnits) = mkDefsUnit(srcName, definitions) @@ -152,10 +162,12 @@ final class Eval(optionsNoncp: Seq[String], new EvalDefinitions(i.loader, i.generated, i.enclosingModule, i.extra) } - private[this] def evalCommon[T](content: Seq[String], - imports: EvalImports, - tpeName: Option[String], - ev: EvalType[T]): EvalIntermediate[T] = { + private[this] def evalCommon[T]( + content: Seq[String], + imports: EvalImports, + tpeName: Option[String], + ev: EvalType[T] + ): EvalIntermediate[T] = { import Eval._ // TODO - We also encode the source of the setting into the hash to avoid conflicts where the exact SAME setting // is defined in multiple evaluated instances with a backing. This leads to issues with finding a previous @@ -212,12 +224,14 @@ final class Eval(optionsNoncp: Seq[String], // location of the cached type or definition information private[this] def cacheFile(base: File, moduleName: String): File = new File(base, moduleName + ".cache") - private[this] def compileAndLoad[T](run: Run, - unit: CompilationUnit, - imports: EvalImports, - backing: Option[File], - moduleName: String, - ev: EvalType[T]): (T, ClassLoader => ClassLoader) = { + private[this] def compileAndLoad[T]( + run: Run, + unit: CompilationUnit, + imports: EvalImports, + backing: Option[File], + moduleName: String, + ev: EvalType[T] + ): (T, ClassLoader => ClassLoader) = { global.curRun = run run.currentUnit = unit val dir = outputDirectory(backing) @@ -262,18 +276,22 @@ final class Eval(optionsNoncp: Seq[String], parent => getValue[Any](moduleName, new URLClassLoader(Array(dir.toURI.toURL), parent)) //wrap tree in object objectName { def WrapValName = } - def augment(parser: global.syntaxAnalyzer.UnitParser, - imports: Seq[Tree], - tree: Tree, - tpt: Tree, - objectName: String): Tree = { + def augment( + parser: global.syntaxAnalyzer.UnitParser, + imports: Seq[Tree], + tree: Tree, + tpt: Tree, + objectName: String + ): Tree = { val method = DefDef(NoMods, newTermName(WrapValName), Nil, Nil, tpt, tree) syntheticModule(parser, imports, method :: Nil, objectName) } - private[this] def syntheticModule(parser: global.syntaxAnalyzer.UnitParser, - imports: Seq[Tree], - definitions: List[Tree], - objectName: String): Tree = { + private[this] def syntheticModule( + parser: global.syntaxAnalyzer.UnitParser, + imports: Seq[Tree], + definitions: List[Tree], + objectName: String + ): Tree = { val emptyTypeName = nme.EMPTY.toTypeName def emptyPkg = parser.atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) } def emptyInit = DefDef( @@ -282,8 +300,10 @@ final class Eval(optionsNoncp: Seq[String], Nil, List(Nil), TypeTree(), - Block(List(Apply(Select(Super(This(emptyTypeName), emptyTypeName), nme.CONSTRUCTOR), Nil)), - Literal(Constant(()))) + Block( + List(Apply(Select(Super(This(emptyTypeName), emptyTypeName), nme.CONSTRUCTOR), Nil)), + Literal(Constant(())) + ) ) def moduleBody = Template(List(gen.scalaAnyRefConstr), noSelfType, emptyInit :: definitions) @@ -321,10 +341,12 @@ final class Eval(optionsNoncp: Seq[String], private[this] def isTopLevelModule(s: Symbol): Boolean = s.hasFlag(reflect.internal.Flags.MODULE) && s.owner.isPackageClass - private[this] final class EvalIntermediate[T](val extra: T, - val loader: ClassLoader => ClassLoader, - val generated: Seq[File], - val enclosingModule: String) + private[this] final class EvalIntermediate[T]( + val extra: T, + val loader: ClassLoader => ClassLoader, + val generated: Seq[File], + val enclosingModule: String + ) private[this] def classExists(dir: File, name: String) = (new File(dir, name + ".class")).exists // TODO: use the code from Analyzer @@ -338,10 +360,12 @@ final class Eval(optionsNoncp: Seq[String], (s contains moduleName) } - private[this] class ParseErrorStrings(val base: String, - val extraBlank: String, - val missingBlank: String, - val extraSemi: String) + private[this] class ParseErrorStrings( + val base: String, + val extraBlank: String, + val missingBlank: String, + val extraSemi: String + ) private[this] def definitionErrorStrings = new ParseErrorStrings( base = "Error parsing definition.", extraBlank = " Ensure that there are no blank lines within a definition.", @@ -360,9 +384,11 @@ final class Eval(optionsNoncp: Seq[String], * Parses the provided compilation `unit` according to `f` and then performs checks on the final parser state * to catch errors that are common when the content is embedded in a blank-line-delimited format. */ - private[this] def parse[T](unit: CompilationUnit, - errors: ParseErrorStrings, - f: syntaxAnalyzer.UnitParser => T): (syntaxAnalyzer.UnitParser, T) = { + private[this] def parse[T]( + unit: CompilationUnit, + errors: ParseErrorStrings, + f: syntaxAnalyzer.UnitParser => T + ): (syntaxAnalyzer.UnitParser, T) = { val parser = new syntaxAnalyzer.UnitParser(unit) val tree = f(parser) @@ -463,7 +489,8 @@ final class Eval(optionsNoncp: Seq[String], */ private[this] def mkDefsUnit( srcName: String, - definitions: Seq[(String, scala.Range)]): (CompilationUnit, Seq[CompilationUnit]) = { + definitions: Seq[(String, scala.Range)] + ): (CompilationUnit, Seq[CompilationUnit]) = { def fragmentUnit(content: String, lineMap: Array[Int]) = new CompilationUnit(fragmentSourceFile(srcName, content, lineMap)) diff --git a/main-actions/src/test/scala/sbt/CacheIvyTest.scala b/main-actions/src/test/scala/sbt/CacheIvyTest.scala index 1827bc5db..73f145764 100644 --- a/main-actions/src/test/scala/sbt/CacheIvyTest.scala +++ b/main-actions/src/test/scala/sbt/CacheIvyTest.scala @@ -37,19 +37,21 @@ class CacheIvyTest extends Properties("CacheIvy") { content = converter.toJsonUnsafe(value) } - private def testCache[T: JsonFormat, U](f: (SingletonCache[T], CacheStore) => U)( - implicit cache: SingletonCache[T]): U = { + private def testCache[T: JsonFormat, U]( + f: (SingletonCache[T], CacheStore) => U + )(implicit cache: SingletonCache[T]): U = { val store = new InMemoryStore(Converter) f(cache, store) } - private def cachePreservesEquality[T: JsonFormat](m: T, - eq: (T, T) => Prop, - str: T => String): Prop = testCache[T, Prop] { - (cache, store) => - cache.write(store, m) - val out = cache.read(store) - eq(out, m) :| s"Expected: ${str(m)}" :| s"Got: ${str(out)}" + private def cachePreservesEquality[T: JsonFormat]( + m: T, + eq: (T, T) => Prop, + str: T => String + ): Prop = testCache[T, Prop] { (cache, store) => + cache.write(store, m) + val out = cache.read(store) + eq(out, m) :| s"Expected: ${str(m)}" :| s"Got: ${str(out)}" } implicit val arbConfigRef: Arbitrary[ConfigRef] = Arbitrary( diff --git a/main-actions/src/test/scala/sbt/compiler/EvalTest.scala b/main-actions/src/test/scala/sbt/compiler/EvalTest.scala index 10600a9a8..a5d6eb387 100644 --- a/main-actions/src/test/scala/sbt/compiler/EvalTest.scala +++ b/main-actions/src/test/scala/sbt/compiler/EvalTest.scala @@ -38,7 +38,8 @@ class EvalTest extends Properties("eval") { val line = math.abs(l) val src = "mismatch" throws(classOf[RuntimeException])( - eval.eval(i.toString, tpeName = Some(BooleanType), line = line, srcName = src)) && + eval.eval(i.toString, tpeName = Some(BooleanType), line = line, srcName = src) + ) && hasErrors(line + 1, src) } @@ -78,14 +79,17 @@ val p = { property("explicit import") = forAll(testImport("import math.abs" :: Nil)) property("wildcard import") = forAll(testImport("import math._" :: Nil)) property("comma-separated imports") = forAll( - testImport("import annotation._, math._, meta._" :: Nil)) + testImport("import annotation._, math._, meta._" :: Nil) + ) property("multiple imports") = forAll( - testImport("import annotation._" :: "import math._" :: "import meta._" :: Nil)) + testImport("import annotation._" :: "import math._" :: "import meta._" :: Nil) + ) private[this] def testImport(imports: Seq[String]): Int => Prop = i => value(eval.eval("abs(" + i + ")", new EvalImports(imports.zipWithIndex, "imp"))) == math.abs( - i) + i + ) private[this] def local(i: Int) = "{ class ETest(val i: Int); new ETest(" + i + ") }" val LocalType = "AnyRef{val i: Int}" diff --git a/main-command/src/main/scala/sbt/BasicCommandStrings.scala b/main-command/src/main/scala/sbt/BasicCommandStrings.scala index e9e73fba2..482612f6f 100644 --- a/main-command/src/main/scala/sbt/BasicCommandStrings.scala +++ b/main-command/src/main/scala/sbt/BasicCommandStrings.scala @@ -21,8 +21,10 @@ object BasicCommandStrings { val TerminateAction: String = Exit def helpBrief = - (HelpCommand, - s"Displays this help message or prints detailed help on requested commands (run '$HelpCommand ').") + ( + HelpCommand, + s"Displays this help message or prints detailed help on requested commands (run '$HelpCommand ')." + ) def helpDetailed = s"""$HelpCommand Prints a help summary. @@ -133,8 +135,10 @@ $HelpCommand def Multi = ";" def MultiBrief = - (Multi + " (" + Multi + " )*", - "Runs the provided semicolon-separated commands.") + ( + Multi + " (" + Multi + " )*", + "Runs the provided semicolon-separated commands." + ) def MultiDetailed = Multi + " command1 " + Multi + """ command2 ... @@ -185,20 +189,6 @@ $AliasCommand name= def StashOnFailure = "sbtStashOnFailure" def PopOnFailure = "sbtPopOnFailure" - // commands with poor choices for names since they clash with the usual conventions for command line options - // these are not documented and are mainly internal commands and can be removed without a full deprecation cycle - object Compat { - def OnFailure = "-" - def ClearOnFailure = "--" - def FailureWall = "---" - def OnFailureDeprecated = deprecatedAlias(OnFailure, BasicCommandStrings.OnFailure) - def ClearOnFailureDeprecated = - deprecatedAlias(ClearOnFailure, BasicCommandStrings.ClearOnFailure) - def FailureWallDeprecated = deprecatedAlias(FailureWall, BasicCommandStrings.FailureWall) - private[this] def deprecatedAlias(oldName: String, newName: String): String = - s"The `$oldName` command is deprecated in favor of `$newName` and will be removed in a later version" - } - def FailureWall = "resumeFromFailure" def ClearOnFailure = "sbtClearOnFailure" diff --git a/main-command/src/main/scala/sbt/BasicCommands.scala b/main-command/src/main/scala/sbt/BasicCommands.scala index 97ed9d1d6..eee6311ac 100644 --- a/main-command/src/main/scala/sbt/BasicCommands.scala +++ b/main-command/src/main/scala/sbt/BasicCommands.scala @@ -56,7 +56,7 @@ object BasicCommands { client, read, alias - ) ++ compatCommands + ) def nop: Command = Command.custom(s => success(() => s)) def ignore: Command = Command.command(FailureWall)(idFun) @@ -81,7 +81,8 @@ object BasicCommands { val h = (Help.empty /: s.definedCommands)( (a, b) => a ++ (try b.help(s) - catch { case NonFatal(_) => Help.empty })) + catch { case NonFatal(_) => Help.empty }) + ) val helpCommands = h.detail.keySet val spacedArg = singleArgument(helpCommands).? applyEffect(spacedArg)(runHelp(s, h)) @@ -95,10 +96,14 @@ object BasicCommands { } def completionsCommand: Command = - Command(CompletionsCommand, CompletionsBrief, CompletionsDetailed)(completionsParser)( - runCompletions(_)(_)) + Command(CompletionsCommand, CompletionsBrief, CompletionsDetailed)(_ => completionsParser)( + runCompletions(_)(_) + ) - def completionsParser(state: State): Parser[String] = { + @deprecated("No longer public", "1.1.1") + def completionsParser(state: State): Parser[String] = completionsParser + + private[this] def completionsParser: Parser[String] = { val notQuoted = (NotQuoted ~ any.*) map { case (nq, s) => nq ++ s } val quotedOrUnquotedSingleArgument = Space ~> (StringVerbatim | StringEscapable | notQuoted) token(quotedOrUnquotedSingleArgument ?? "" examples ("", " ")) @@ -116,8 +121,9 @@ object BasicCommands { def multiParser(s: State): Parser[List[String]] = { val nonSemi = token(charClass(_ != ';').+, hide = const(true)) val semi = token(';' ~> OptSpace) - val part = semi flatMap (_ => - matched((s.combinedParser & nonSemi) | nonSemi) <~ token(OptSpace)) + val part = semi flatMap ( + _ => matched((s.combinedParser & nonSemi) | nonSemi) <~ token(OptSpace) + ) (part map (_.trim)).+ map (_.toList) } @@ -133,40 +139,26 @@ object BasicCommands { matched(s.combinedParser | token(any, hide = const(true))) def ifLast: Command = - Command(IfLast, Help.more(IfLast, IfLastDetailed))(otherCommandParser)((s, arg) => - if (s.remainingCommands.isEmpty) arg :: s else s) + Command(IfLast, Help.more(IfLast, IfLastDetailed))(otherCommandParser)( + (s, arg) => if (s.remainingCommands.isEmpty) arg :: s else s + ) def append: Command = Command(AppendCommand, Help.more(AppendCommand, AppendLastDetailed))(otherCommandParser)( - (s, arg) => s.copy(remainingCommands = s.remainingCommands :+ Exec(arg, s.source))) + (s, arg) => s.copy(remainingCommands = s.remainingCommands :+ Exec(arg, s.source)) + ) def setOnFailure: Command = - Command(OnFailure, Help.more(OnFailure, OnFailureDetailed))(otherCommandParser)((s, arg) => - s.copy(onFailure = Some(Exec(arg, s.source)))) - - private[sbt] def compatCommands = Seq( - Command.command(Compat.ClearOnFailure) { s => - s.log.warn(Compat.ClearOnFailureDeprecated) - s.copy(onFailure = None) - }, - Command.arb( - s => - token(Compat.OnFailure, hide = const(true)) - .flatMap(_ => otherCommandParser(s))) { (s, arg) => - s.log.warn(Compat.OnFailureDeprecated) - s.copy(onFailure = Some(Exec(arg, s.source))) - }, - Command.command(Compat.FailureWall) { s => - s.log.warn(Compat.FailureWallDeprecated) - s - } - ) + Command(OnFailure, Help.more(OnFailure, OnFailureDetailed))(otherCommandParser)( + (s, arg) => s.copy(onFailure = Some(Exec(arg, s.source))) + ) def clearOnFailure: Command = Command.command(ClearOnFailure)(s => s.copy(onFailure = None)) def stashOnFailure: Command = - Command.command(StashOnFailure)(s => - s.copy(onFailure = None).update(OnFailureStack)(s.onFailure :: _.toList.flatten)) + Command.command(StashOnFailure)( + s => s.copy(onFailure = None).update(OnFailureStack)(s.onFailure :: _.toList.flatten) + ) def popOnFailure: Command = Command.command(PopOnFailure) { s => val stack = s.get(OnFailureStack).getOrElse(Nil) @@ -176,19 +168,19 @@ object BasicCommands { } def reboot: Command = - Command(RebootCommand, Help.more(RebootCommand, RebootDetailed))(rebootOptionParser) { + Command(RebootCommand, Help.more(RebootCommand, RebootDetailed))(_ => rebootOptionParser) { case (s, (full, currentOnly)) => s.reboot(full, currentOnly) } @deprecated("Use rebootOptionParser", "1.1.0") - def rebootParser(s: State): Parser[Boolean] = - rebootOptionParser(s) map { case (full, currentOnly) => full } + def rebootParser(s: State): Parser[Boolean] = rebootOptionParser map { case (full, _) => full } - private[sbt] def rebootOptionParser(s: State): Parser[(Boolean, Boolean)] = - token( - Space ~> (("full" ^^^ ((true, false))) | - ("dev" ^^^ ((false, true))))) ?? ((false, false)) + private[sbt] def rebootOptionParser: Parser[(Boolean, Boolean)] = { + val fullOption = "full" ^^^ ((true, false)) + val devOption = "dev" ^^^ ((false, true)) + token(Space ~> (fullOption | devOption)) ?? ((false, false)) + } def call: Command = Command(ApplyCommand, Help.more(ApplyCommand, ApplyDetailed))(_ => callParser) { @@ -211,8 +203,9 @@ object BasicCommands { private[this] def className: Parser[String] = { val base = StringBasic & not('-' ~> any.*, "Class name cannot start with '-'.") def single(s: String) = Completions.single(Completion.displayOnly(s)) - val compl = TokenCompletions.fixed((seen, _) => - if (seen.startsWith("-")) Completions.nil else single("")) + val compl = TokenCompletions.fixed( + (seen, _) => if (seen.startsWith("-")) Completions.nil else single("") + ) token(base, compl) } @@ -237,10 +230,9 @@ object BasicCommands { def historyParser(s: State): Parser[() => State] = Command.applyEffect(HistoryCommands.actionParser) { histFun => - val logError = (msg: String) => s.log.error(msg) - val hp = s get historyPath getOrElse None + val hp = (s get historyPath).flatten val lines = hp.toList.flatMap(p => IO.readLines(p)).toIndexedSeq - histFun(CHistory(lines, hp, logError)) match { + histFun(CHistory(lines, hp)) match { case Some(commands) => commands foreach println //printing is more appropriate than logging (commands ::: s).continue @@ -401,7 +393,8 @@ object BasicCommands { } def delegateToAlias(name: String, orElse: Parser[() => State])( - state: State): Parser[() => State] = + state: State + ): Parser[() => State] = aliases(state, (nme, _) => nme == name).headOption match { case None => orElse case Some((n, v)) => aliasBody(n, v)(state) diff --git a/main-command/src/main/scala/sbt/BasicKeys.scala b/main-command/src/main/scala/sbt/BasicKeys.scala index 1570d392b..d1fc63f7a 100644 --- a/main-command/src/main/scala/sbt/BasicKeys.scala +++ b/main-command/src/main/scala/sbt/BasicKeys.scala @@ -10,6 +10,7 @@ package sbt import java.io.File import sbt.internal.util.AttributeKey import sbt.internal.inc.classpath.ClassLoaderCache +import sbt.internal.server.ServerHandler import sbt.librarymanagement.ModuleID import sbt.util.Level @@ -17,11 +18,13 @@ object BasicKeys { val historyPath = AttributeKey[Option[File]]( "history", "The location where command line history is persisted.", - 40) + 40 + ) val shellPrompt = AttributeKey[State => String]( "shell-prompt", "The function that constructs the command prompt from the current build state.", - 10000) + 10000 + ) val watch = AttributeKey[Watched]("watch", "Continuous execution configuration.", 1000) val serverPort = AttributeKey[Int]("server-port", "The port number used by server command.", 10000) @@ -30,20 +33,32 @@ object BasicKeys { AttributeKey[String]("serverHost", "The host used by server command.", 10000) val serverAuthentication = - AttributeKey[Set[ServerAuthentication]]("serverAuthentication", - "Method of authenticating server command.", - 10000) + AttributeKey[Set[ServerAuthentication]]( + "serverAuthentication", + "Method of authenticating server command.", + 10000 + ) val serverConnectionType = - AttributeKey[ConnectionType]("serverConnectionType", - "The wire protocol for the server command.", - 10000) + AttributeKey[ConnectionType]( + "serverConnectionType", + "The wire protocol for the server command.", + 10000 + ) + + val fullServerHandlers = + AttributeKey[Seq[ServerHandler]]( + "fullServerHandlers", + "Combines default server handlers and user-defined handlers.", + 10000 + ) val autoStartServer = AttributeKey[Boolean]( "autoStartServer", "If true, the sbt server will startup automatically during interactive sessions.", - 10000) + 10000 + ) // Unlike other BasicKeys, this is not used directly as a setting key, // and severLog / logLevel is used instead. @@ -56,23 +71,28 @@ object BasicKeys { private[sbt] val interactive = AttributeKey[Boolean]( "interactive", "True if commands are currently being entered from an interactive environment.", - 10) + 10 + ) private[sbt] val classLoaderCache = AttributeKey[ClassLoaderCache]( "class-loader-cache", "Caches class loaders based on the classpath entries and last modified times.", - 10) + 10 + ) private[sbt] val OnFailureStack = AttributeKey[List[Option[Exec]]]( "on-failure-stack", "Stack that remembers on-failure handlers.", - 10) + 10 + ) private[sbt] val explicitGlobalLogLevels = AttributeKey[Boolean]( "explicit-global-log-levels", "True if the global logging levels were explicitly set by the user.", - 10) + 10 + ) private[sbt] val templateResolverInfos = AttributeKey[Seq[TemplateResolverInfo]]( "templateResolverInfos", "List of template resolver infos.", - 1000) + 1000 + ) } case class TemplateResolverInfo(module: ModuleID, implementationClass: String) diff --git a/main-command/src/main/scala/sbt/Command.scala b/main-command/src/main/scala/sbt/Command.scala index 53158daa4..49cda7fec 100644 --- a/main-command/src/main/scala/sbt/Command.scala +++ b/main-command/src/main/scala/sbt/Command.scala @@ -67,18 +67,21 @@ object Command { new SimpleCommand(name, help, parser, AttributeMap.empty) def make(name: String, briefHelp: (String, String), detail: String)( - parser: State => Parser[() => State]): Command = + parser: State => Parser[() => State] + ): Command = make(name, Help(name, briefHelp, detail))(parser) // General command construction /** Construct a command with the given name, parser and effect. */ - def apply[T](name: String, help: Help = Help.empty)(parser: State => Parser[T])( - effect: (State, T) => State): Command = + def apply[T](name: String, help: Help = Help.empty)( + parser: State => Parser[T] + )(effect: (State, T) => State): Command = make(name, help)(applyEffect(parser)(effect)) def apply[T](name: String, briefHelp: (String, String), detail: String)( - parser: State => Parser[T])(effect: (State, T) => State): Command = + parser: State => Parser[T] + )(effect: (State, T) => State): Command = apply(name, Help(name, briefHelp, detail))(parser)(effect) // No-argument command construction @@ -97,18 +100,21 @@ object Command { make(name, help)(state => token(trimmed(spacedAny(name)) map apply1(f, state))) def single(name: String, briefHelp: (String, String), detail: String)( - f: (State, String) => State): Command = + f: (State, String) => State + ): Command = single(name, Help(name, briefHelp, detail))(f) // Multi-argument command construction /** Construct a multi-argument command with the given name, tab completion display and effect. */ def args(name: String, display: String, help: Help = Help.empty)( - f: (State, Seq[String]) => State): Command = + f: (State, Seq[String]) => State + ): Command = make(name, help)(state => spaceDelimited(display) map apply1(f, state)) def args(name: String, briefHelp: (String, String), detail: String, display: String)( - f: (State, Seq[String]) => State): Command = + f: (State, Seq[String]) => State + ): Command = args(name, display, Help(name, briefHelp, detail))(f) // create ArbitraryCommand @@ -120,7 +126,8 @@ object Command { customHelp(parser, const(help)) def arb[T](parser: State => Parser[T], help: Help = Help.empty)( - effect: (State, T) => State): Command = + effect: (State, T) => State + ): Command = custom(applyEffect(parser)(effect), help) // misc Command object utilities @@ -129,8 +136,9 @@ object Command { def applyEffect[T](p: Parser[T])(f: T => State): Parser[() => State] = p map (t => () => f(t)) - def applyEffect[T](parser: State => Parser[T])( - effect: (State, T) => State): State => Parser[() => State] = + def applyEffect[T]( + parser: State => Parser[T] + )(effect: (State, T) => State): State => Parser[() => State] = s => applyEffect(parser(s))(t => effect(s, t)) def combine(cmds: Seq[Command]): State => Parser[() => State] = { @@ -140,7 +148,8 @@ object Command { } private[this] def separateCommands( - cmds: Seq[Command]): (Seq[SimpleCommand], Seq[ArbitraryCommand]) = + cmds: Seq[Command] + ): (Seq[SimpleCommand], Seq[ArbitraryCommand]) = Util.separate(cmds) { case s: SimpleCommand => Left(s); case a: ArbitraryCommand => Right(a) } private[this] def apply1[A, B, C](f: (A, B) => C, a: A): B => () => C = b => () => f(a, b) @@ -155,13 +164,26 @@ object Command { } def simpleParser( - commandMap: Map[String, State => Parser[() => State]]): State => Parser[() => State] = + commandMap: Map[String, State => Parser[() => State]] + ): State => Parser[() => State] = state => - token(OpOrID examples commandMap.keys.toSet) flatMap (id => - (commandMap get id) match { - case None => failure(invalidValue("command", commandMap.keys)(id)) - case Some(c) => c(state) - }) + token(OpOrID examples commandMap.keys.toSet) flatMap ( + id => + (commandMap get id) match { + case None => failure(invalidValue("command", commandMap.keys)(id)) + case Some(c) => c(state) + } + ) + + def process(command: String, state: State): State = { + val parser = combine(state.definedCommands) + parse(command, parser(state)) match { + case Right(s) => s() // apply command. command side effects happen here + case Left(errMsg) => + state.log error errMsg + state.fail + } + } def invalidValue(label: String, allowed: Iterable[String])(value: String): String = s"Not a valid $label: $value" + similar(value, allowed) @@ -171,22 +193,25 @@ object Command { if (suggested.isEmpty) "" else suggested.mkString(" (similar: ", ", ", ")") } - def suggestions(a: String, - bs: Seq[String], - maxDistance: Int = 3, - maxSuggestions: Int = 3): Seq[String] = + def suggestions( + a: String, + bs: Seq[String], + maxDistance: Int = 3, + maxSuggestions: Int = 3 + ): Seq[String] = bs map (b => (b, distance(a, b))) filter (_._2 <= maxDistance) sortBy (_._2) take (maxSuggestions) map (_._1) def distance(a: String, b: String): Int = - EditDistance.levenshtein(a, - b, - insertCost = 1, - deleteCost = 1, - subCost = 2, - transposeCost = 1, - matchCost = -1, - caseCost = 1, - transpositions = true) + EditDistance.levenshtein( + a, + b, + insertCost = 1, + deleteCost = 1, + subCost = 2, + matchCost = -1, + caseCost = 1, + transpositions = true + ) def spacedAny(name: String): Parser[String] = spacedC(name, any) @@ -222,9 +247,11 @@ object Help { def apply(briefHelp: Seq[(String, String)], detailedHelp: Map[String, String]): Help = apply(briefHelp, detailedHelp, Set.empty[String]) - def apply(briefHelp: Seq[(String, String)], - detailedHelp: Map[String, String], - more: Set[String]): Help = + def apply( + briefHelp: Seq[(String, String)], + detailedHelp: Map[String, String], + more: Set[String] + ): Help = new Help0(briefHelp, detailedHelp, more) def more(name: String, detailedHelp: String): Help = diff --git a/main-command/src/main/scala/sbt/MainControl.scala b/main-command/src/main/scala/sbt/MainControl.scala index e728dd765..e24c657e2 100644 --- a/main-command/src/main/scala/sbt/MainControl.scala +++ b/main-command/src/main/scala/sbt/MainControl.scala @@ -12,21 +12,23 @@ import java.io.File final case class Exit(code: Int) extends xsbti.Exit { require(code >= 0) } -final case class Reboot(scalaVersion: String, - argsList: Seq[String], - app: xsbti.ApplicationID, - baseDirectory: File) - extends xsbti.Reboot { +final case class Reboot( + scalaVersion: String, + argsList: Seq[String], + app: xsbti.ApplicationID, + baseDirectory: File +) extends xsbti.Reboot { def arguments = argsList.toArray } -final case class ApplicationID(groupID: String, - name: String, - version: String, - mainClass: String, - components: Seq[String], - crossVersionedValue: xsbti.CrossValue, - extra: Seq[File]) - extends xsbti.ApplicationID { +final case class ApplicationID( + groupID: String, + name: String, + version: String, + mainClass: String, + components: Seq[String], + crossVersionedValue: xsbti.CrossValue, + extra: Seq[File] +) extends xsbti.ApplicationID { def mainComponents = components.toArray def classpathExtra = extra.toArray def crossVersioned = crossVersionedValue != xsbti.CrossValue.Disabled @@ -35,11 +37,13 @@ object ApplicationID { def apply(delegate: xsbti.ApplicationID, newVersion: String): ApplicationID = apply(delegate).copy(version = newVersion) def apply(delegate: xsbti.ApplicationID): ApplicationID = - ApplicationID(delegate.groupID, - delegate.name, - delegate.version, - delegate.mainClass, - delegate.mainComponents, - delegate.crossVersionedValue, - delegate.classpathExtra) + ApplicationID( + delegate.groupID, + delegate.name, + delegate.version, + delegate.mainClass, + delegate.mainComponents, + delegate.crossVersionedValue, + delegate.classpathExtra + ) } diff --git a/main-command/src/main/scala/sbt/State.scala b/main-command/src/main/scala/sbt/State.scala index 9d0e401c6..f79b9d70d 100644 --- a/main-command/src/main/scala/sbt/State.scala +++ b/main-command/src/main/scala/sbt/State.scala @@ -238,14 +238,16 @@ object State { def process(f: (Exec, State) => State): State = { def runCmd(cmd: Exec, remainingCommands: List[Exec]) = { log.debug(s"> $cmd") - f(cmd, - s.copy(remainingCommands = remainingCommands, - currentCommand = Some(cmd), - history = cmd :: s.history)) + val s1 = s.copy( + remainingCommands = remainingCommands, + currentCommand = Some(cmd), + history = cmd :: s.history, + ) + f(cmd, s1) } s.remainingCommands match { - case List() => exit(true) - case List(x, xs @ _*) => runCmd(x, xs.toList) + case Nil => exit(true) + case x :: xs => runCmd(x, xs) } } def :::(newCommands: List[String]): State = ++:(newCommands map { Exec(_, s.source) }) @@ -283,10 +285,7 @@ object State { def log = s.globalLogging.full def handleError(t: Throwable): State = handleException(t, s, log) def fail = { - import BasicCommandStrings.Compat.{ FailureWall => CompatFailureWall } - val remaining = - s.remainingCommands.dropWhile(c => - c.commandLine != FailureWall && c.commandLine != CompatFailureWall) + val remaining = s.remainingCommands.dropWhile(c => c.commandLine != FailureWall) if (remaining.isEmpty) applyOnFailure(s, Nil, exit(ok = false)) else @@ -321,7 +320,7 @@ object State { import ExceptionCategory._ - private[sbt] def handleException(t: Throwable, s: State, log: Logger): State = { + private[this] def handleException(t: Throwable, s: State, log: Logger): State = { ExceptionCategory(t) match { case AlreadyHandled => () case m: MessageOnly => log.error(m.message) diff --git a/main-command/src/main/scala/sbt/Watched.scala b/main-command/src/main/scala/sbt/Watched.scala index 4daea053b..053e15082 100644 --- a/main-command/src/main/scala/sbt/Watched.scala +++ b/main-command/src/main/scala/sbt/Watched.scala @@ -23,8 +23,8 @@ import scala.util.Properties trait Watched { - /** The files watched when an action is run with a preceeding ~ */ - def watchSources(s: State): Seq[Watched.WatchSource] = Nil + /** The files watched when an action is run with a proceeding ~ */ + def watchSources(@deprecated("unused", "") s: State): Seq[Watched.WatchSource] = Nil def terminateWatch(key: Int): Boolean = Watched.isEnter(key) /** @@ -50,8 +50,13 @@ trait Watched { } object Watched { - val defaultWatchingMessage - : WatchState => String = _.count + ". Waiting for source changes... (press enter to interrupt)" + val defaultWatchingMessage: WatchState => String = ws => + s"${ws.count}. Waiting for source changes... (press enter to interrupt)" + + def projectWatchingMessage(projectId: String): WatchState => String = + ws => + s"${ws.count}. Waiting for source changes in project $projectId... (press enter to interrupt)" + val defaultTriggeredMessage: WatchState => String = const("") val clearWhenTriggered: WatchState => String = const(clearScreen) def clearScreen: String = "\u001b[2J\u001b[0;0H" @@ -76,8 +81,8 @@ object Watched { * @param base The base directory from which to include files. * @return An instance of `Source`. */ - def apply(base: File): Source = - apply(base, AllPassFilter, NothingFilter) + def apply(base: File): Source = apply(base, AllPassFilter, NothingFilter) + } private[this] class AWatched extends Watched @@ -111,11 +116,13 @@ object Watched { (ClearOnFailure :: next :: FailureWall :: repeat :: s) .put( ContinuousEventMonitor, - EventMonitor(WatchState.empty(watched.watchService(), watched.watchSources(s)), - watched.pollInterval, - watched.antiEntropy, - shouldTerminate, - logger) + EventMonitor( + WatchState.empty(watched.watchService(), watched.watchSources(s)), + watched.pollInterval, + watched.antiEntropy, + shouldTerminate, + logger + ) ) case Some(eventMonitor) => printIfDefined(watched watchingMessage eventMonitor.state) @@ -123,8 +130,9 @@ object Watched { catch { case e: Exception => log.error( - "Error occurred obtaining files to watch. Terminating continuous execution...") - State.handleException(e, s, log) + "Error occurred obtaining files to watch. Terminating continuous execution..." + ) + s.handleError(e) false } if (triggered) { @@ -139,16 +147,20 @@ object Watched { } val ContinuousEventMonitor = - AttributeKey[EventMonitor]("watch event monitor", - "Internal: maintains watch state and monitor threads.") + AttributeKey[EventMonitor]( + "watch event monitor", + "Internal: maintains watch state and monitor threads." + ) @deprecated("Superseded by ContinuousEventMonitor", "1.1.5") val ContinuousState = AttributeKey[WatchState]("watch state", "Internal: tracks state for continuous execution.") @deprecated("Superseded by ContinuousEventMonitor", "1.1.5") val ContinuousWatchService = - AttributeKey[WatchService]("watch service", - "Internal: tracks watch service for continuous execution.") + AttributeKey[WatchService]( + "watch service", + "Internal: tracks watch service for continuous execution." + ) val Configuration = AttributeKey[Watched]("watched-configuration", "Configures continuous execution.") diff --git a/main-command/src/main/scala/sbt/internal/CommandChannel.scala b/main-command/src/main/scala/sbt/internal/CommandChannel.scala index 4fbb9a299..54c65cfd3 100644 --- a/main-command/src/main/scala/sbt/internal/CommandChannel.scala +++ b/main-command/src/main/scala/sbt/internal/CommandChannel.scala @@ -19,12 +19,11 @@ import sjsonnew.JsonFormat */ abstract class CommandChannel { private val commandQueue: ConcurrentLinkedQueue[Exec] = new ConcurrentLinkedQueue() - def append(exec: Exec): Boolean = - commandQueue.add(exec) + def append(exec: Exec): Boolean = commandQueue.add(exec) def poll: Option[Exec] = Option(commandQueue.poll) def publishEvent[A: JsonFormat](event: A, execId: Option[String]): Unit - def publishEvent[A: JsonFormat](event: A): Unit + final def publishEvent[A: JsonFormat](event: A): Unit = publishEvent(event, None) def publishEventMessage(event: EventMessage): Unit def publishBytes(bytes: Array[Byte]): Unit def shutdown(): Unit diff --git a/main-command/src/main/scala/sbt/internal/ConsoleChannel.scala b/main-command/src/main/scala/sbt/internal/ConsoleChannel.scala index 1ac026e2d..3f039f270 100644 --- a/main-command/src/main/scala/sbt/internal/ConsoleChannel.scala +++ b/main-command/src/main/scala/sbt/internal/ConsoleChannel.scala @@ -40,8 +40,6 @@ private[sbt] final class ConsoleChannel(val name: String) extends CommandChannel def publishEvent[A: JsonFormat](event: A, execId: Option[String]): Unit = () - def publishEvent[A: JsonFormat](event: A): Unit = () - def publishEventMessage(event: EventMessage): Unit = event match { case e: ConsolePromptEvent => @@ -50,7 +48,7 @@ private[sbt] final class ConsoleChannel(val name: String) extends CommandChannel case _ => val x = makeAskUserThread(e.state) askUserThread = Some(x) - x.start + x.start() } case e: ConsoleUnpromptEvent => e.lastSource match { @@ -70,7 +68,7 @@ private[sbt] final class ConsoleChannel(val name: String) extends CommandChannel def shutdown(): Unit = askUserThread match { case Some(x) if x.isAlive => - x.interrupt + x.interrupt() askUserThread = None case _ => () } diff --git a/main-command/src/main/scala/sbt/internal/client/NetworkClient.scala b/main-command/src/main/scala/sbt/internal/client/NetworkClient.scala index ff5c5661b..d3cd2dc0e 100644 --- a/main-command/src/main/scala/sbt/internal/client/NetworkClient.scala +++ b/main-command/src/main/scala/sbt/internal/client/NetworkClient.scala @@ -126,6 +126,7 @@ object NetworkClient { def run(arguments: List[String]): Unit = try { new NetworkClient(arguments) + () } catch { case NonFatal(e) => println(e.getMessage) } diff --git a/main-command/src/main/scala/sbt/internal/server/Server.scala b/main-command/src/main/scala/sbt/internal/server/Server.scala index c2bccb886..c3d130470 100644 --- a/main-command/src/main/scala/sbt/internal/server/Server.scala +++ b/main-command/src/main/scala/sbt/internal/server/Server.scala @@ -40,9 +40,11 @@ private[sbt] object Server { with TokenFileFormats object JsonProtocol extends JsonProtocol - def start(connection: ServerConnection, - onIncomingSocket: (Socket, ServerInstance) => Unit, - log: Logger): ServerInstance = + def start( + connection: ServerConnection, + onIncomingSocket: (Socket, ServerInstance) => Unit, + log: Logger + ): ServerInstance = new ServerInstance { self => import connection._ val running = new AtomicBoolean(false) @@ -67,7 +69,8 @@ private[sbt] object Server { "socket file absolute path too long; " + "either switch to another connection type " + "or define a short \"SBT_GLOBAL_SERVER_DIR\" value. " + - s"Current path: ${path}") + s"Current path: ${path}" + ) tryClient(new UnixDomainSocket(path)) prepareSocketfile() addServerError(new UnixDomainServerSocket(path)) @@ -103,7 +106,7 @@ private[sbt] object Server { def tryClient(f: => Socket): Unit = { if (portfile.exists) { Try { f } match { - case Failure(e) => () + case Failure(_) => () case Success(socket) => socket.close() throw new AlreadyRunningException() diff --git a/main-command/src/main/scala/sbt/internal/server/ServerHandler.scala b/main-command/src/main/scala/sbt/internal/server/ServerHandler.scala new file mode 100644 index 000000000..61969a466 --- /dev/null +++ b/main-command/src/main/scala/sbt/internal/server/ServerHandler.scala @@ -0,0 +1,73 @@ +/* + * sbt + * Copyright 2011 - 2017, Lightbend, Inc. + * Copyright 2008 - 2010, Mark Harrah + * Licensed under BSD-3-Clause license (see LICENSE) + */ + +package sbt +package internal +package server + +import sjsonnew.JsonFormat +import sbt.internal.protocol._ +import sbt.util.Logger +import sbt.protocol.{ SettingQuery => Q } + +/** + * ServerHandler allows plugins to extend sbt server. + * It's a wrapper around curried function ServerCallback => JsonRpcRequestMessage => Unit. + */ +final class ServerHandler(val handler: ServerCallback => ServerIntent) { + override def toString: String = s"Serverhandler(...)" +} + +object ServerHandler { + def apply(handler: ServerCallback => ServerIntent): ServerHandler = + new ServerHandler(handler) + + lazy val fallback: ServerHandler = ServerHandler({ handler => + ServerIntent( + { case x => handler.log.debug(s"Unhandled notification received: ${x.method}: $x") }, + { case x => handler.log.debug(s"Unhandled request received: ${x.method}: $x") } + ) + }) +} + +final class ServerIntent( + val onRequest: PartialFunction[JsonRpcRequestMessage, Unit], + val onNotification: PartialFunction[JsonRpcNotificationMessage, Unit] +) { + override def toString: String = s"ServerIntent(...)" +} + +object ServerIntent { + def apply( + onRequest: PartialFunction[JsonRpcRequestMessage, Unit], + onNotification: PartialFunction[JsonRpcNotificationMessage, Unit] + ): ServerIntent = + new ServerIntent(onRequest, onNotification) + + def request(onRequest: PartialFunction[JsonRpcRequestMessage, Unit]): ServerIntent = + new ServerIntent(onRequest, PartialFunction.empty) + + def notify(onNotification: PartialFunction[JsonRpcNotificationMessage, Unit]): ServerIntent = + new ServerIntent(PartialFunction.empty, onNotification) +} + +/** + * Interface to invoke JSON-RPC response. + */ +trait ServerCallback { + def jsonRpcRespond[A: JsonFormat](event: A, execId: Option[String]): Unit + def jsonRpcRespondError(execId: Option[String], code: Long, message: String): Unit + def jsonRpcNotify[A: JsonFormat](method: String, params: A): Unit + def appendExec(exec: Exec): Boolean + def log: Logger + def name: String + + private[sbt] def authOptions: Set[ServerAuthentication] + private[sbt] def authenticate(token: String): Boolean + private[sbt] def setInitialized(value: Boolean): Unit + private[sbt] def onSettingQuery(execId: Option[String], req: Q): Unit +} diff --git a/main-command/src/main/scala/xsbt/IPC.scala b/main-command/src/main/scala/xsbt/IPC.scala index 2b9750438..c964bca7b 100644 --- a/main-command/src/main/scala/xsbt/IPC.scala +++ b/main-command/src/main/scala/xsbt/IPC.scala @@ -10,47 +10,57 @@ package xsbt import java.io.{ BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter } import java.net.{ InetAddress, ServerSocket, Socket } +import scala.annotation.tailrec import scala.util.control.NonFatal object IPC { private val portMin = 1025 private val portMax = 65536 - private val loopback = InetAddress.getByName(null) // loopback + private val loopback = InetAddress.getByName(null) - def client[T](port: Int)(f: IPC => T): T = - ipc(new Socket(loopback, port))(f) + def client[T](port: Int)(f: IPC => T): T = ipc(new Socket(loopback, port))(f) def pullServer[T](f: Server => T): T = { val server = makeServer - try { f(new Server(server)) } finally { server.close() } + try f(new Server(server)) + finally server.close() } + def unmanagedServer: Server = new Server(makeServer) + def makeServer: ServerSocket = { val random = new java.util.Random def nextPort = random.nextInt(portMax - portMin + 1) + portMin + def createServer(attempts: Int): ServerSocket = - if (attempts > 0) - try { new ServerSocket(nextPort, 1, loopback) } catch { - case NonFatal(_) => createServer(attempts - 1) - } else - sys.error("Could not connect to socket: maximum attempts exceeded") + if (attempts > 0) { + try new ServerSocket(nextPort, 1, loopback) + catch { case NonFatal(_) => createServer(attempts - 1) } + } else sys.error("Could not connect to socket: maximum attempts exceeded") + createServer(10) } + def server[T](f: IPC => Option[T]): T = serverImpl(makeServer, f) + def server[T](port: Int)(f: IPC => Option[T]): T = serverImpl(new ServerSocket(port, 1, loopback), f) + private def serverImpl[T](server: ServerSocket, f: IPC => Option[T]): T = { - def listen(): T = { + @tailrec def listen(): T = { ipc(server.accept())(f) match { case Some(done) => done case None => listen() } } - try { listen() } finally { server.close() } + try listen() + finally server.close() } + private def ipc[T](s: Socket)(f: IPC => T): T = - try { f(new IPC(s)) } finally { s.close() } + try f(new IPC(s)) + finally s.close() final class Server private[IPC] (s: ServerSocket) { def port = s.getLocalPort @@ -59,6 +69,7 @@ object IPC { def connection[T](f: IPC => T): T = IPC.ipc(s.accept())(f) } } + final class IPC private (s: Socket) { def port = s.getLocalPort private val in = new BufferedReader(new InputStreamReader(s.getInputStream)) diff --git a/main-settings/src/main/scala/sbt/Append.scala b/main-settings/src/main/scala/sbt/Append.scala index c9116de97..fb08b5e33 100644 --- a/main-settings/src/main/scala/sbt/Append.scala +++ b/main-settings/src/main/scala/sbt/Append.scala @@ -18,12 +18,14 @@ import sbt.io.{ AllPassFilter, NothingFilter } object Append { @implicitNotFound( - msg = "No implicit for Append.Value[${A}, ${B}] found,\n so ${B} cannot be appended to ${A}") + msg = "No implicit for Append.Value[${A}, ${B}] found,\n so ${B} cannot be appended to ${A}" + ) trait Value[A, B] { def appendValue(a: A, b: B): A } @implicitNotFound( - msg = "No implicit for Append.Values[${A}, ${B}] found,\n so ${B} cannot be appended to ${A}") + msg = "No implicit for Append.Values[${A}, ${B}] found,\n so ${B} cannot be appended to ${A}" + ) trait Values[A, -B] { def appendValues(a: A, b: B): A } diff --git a/main-settings/src/main/scala/sbt/Def.scala b/main-settings/src/main/scala/sbt/Def.scala index cfde273f9..0fd53e281 100644 --- a/main-settings/src/main/scala/sbt/Def.scala +++ b/main-settings/src/main/scala/sbt/Def.scala @@ -27,42 +27,60 @@ object Def extends Init[Scope] with TaskMacroExtra { val resolvedScoped = SettingKey[ScopedKey[_]]( "resolved-scoped", "The ScopedKey for the referencing setting or task.", - KeyRanks.DSetting) + KeyRanks.DSetting + ) private[sbt] val taskDefinitionKey = AttributeKey[ScopedKey[_]]( "task-definition-key", "Internal: used to map a task back to its ScopedKey.", - Invisible) + Invisible + ) lazy val showFullKey: Show[ScopedKey[_]] = showFullKey(None) def showFullKey(keyNameColor: Option[String]): Show[ScopedKey[_]] = Show[ScopedKey[_]]((key: ScopedKey[_]) => displayFull(key, keyNameColor)) + @deprecated("Use showRelativeKey2 which doesn't take the unused multi param", "1.1.1") def showRelativeKey( current: ProjectRef, multi: Boolean, keyNameColor: Option[String] = None ): Show[ScopedKey[_]] = - Show[ScopedKey[_]]( - key => - Scope.display( - key.scope, - withColor(key.key.label, keyNameColor), - ref => displayRelative(current, multi, ref) - )) + showRelativeKey2(current, keyNameColor) - def showBuildRelativeKey( - currentBuild: URI, - multi: Boolean, - keyNameColor: Option[String] = None + def showRelativeKey2( + current: ProjectRef, + keyNameColor: Option[String] = None, ): Show[ScopedKey[_]] = Show[ScopedKey[_]]( key => Scope.display( key.scope, withColor(key.key.label, keyNameColor), - ref => displayBuildRelative(currentBuild, multi, ref) - )) + ref => displayRelative2(current, ref) + ) + ) + + @deprecated("Use showBuildRelativeKey2 which doesn't take the unused multi param", "1.1.1") + def showBuildRelativeKey( + currentBuild: URI, + multi: Boolean, + keyNameColor: Option[String] = None, + ): Show[ScopedKey[_]] = + showBuildRelativeKey2(currentBuild, keyNameColor) + + def showBuildRelativeKey2( + currentBuild: URI, + keyNameColor: Option[String] = None, + ): Show[ScopedKey[_]] = + Show[ScopedKey[_]]( + key => + Scope.display( + key.scope, + withColor(key.key.label, keyNameColor), + ref => displayBuildRelative(currentBuild, ref) + ) + ) /** * Returns a String expression for the given [[Reference]] (BuildRef, [[ProjectRef]], etc) @@ -71,17 +89,22 @@ object Def extends Init[Scope] with TaskMacroExtra { def displayRelativeReference(current: ProjectRef, project: Reference): String = displayRelative(current, project, false) - @deprecated("Use displayRelativeReference", "1.1.0") + @deprecated("Use displayRelative2 which doesn't take the unused multi param", "1.1.1") def displayRelative(current: ProjectRef, multi: Boolean, project: Reference): String = + displayRelative2(current, project) + + def displayRelative2(current: ProjectRef, project: Reference): String = displayRelative(current, project, true) /** * Constructs the String of a given [[Reference]] relative to current. * Note that this no longer takes "multi" parameter, and omits the subproject id at all times. */ - private[sbt] def displayRelative(current: ProjectRef, - project: Reference, - trailingSlash: Boolean): String = { + private[sbt] def displayRelative( + current: ProjectRef, + project: Reference, + trailingSlash: Boolean + ): String = { val trailing = if (trailingSlash) " /" else "" project match { case BuildRef(current.build) => "ThisBuild" + trailing @@ -91,7 +114,11 @@ object Def extends Init[Scope] with TaskMacroExtra { } } + @deprecated("Use variant without multi", "1.1.1") def displayBuildRelative(currentBuild: URI, multi: Boolean, project: Reference): String = + displayBuildRelative(currentBuild, project) + + def displayBuildRelative(currentBuild: URI, project: Reference): String = project match { case BuildRef(`currentBuild`) => "ThisBuild /" case ProjectRef(`currentBuild`, x) => x + " /" @@ -124,11 +151,14 @@ object Def extends Init[Scope] with TaskMacroExtra { else None) orElse s.dependencies .find(k => k.scope != ThisScope) - .map(k => - s"Scope cannot be defined for dependency ${k.key.label} of ${definedSettingString(s)}") + .map( + k => + s"Scope cannot be defined for dependency ${k.key.label} of ${definedSettingString(s)}" + ) override def intersect(s1: Scope, s2: Scope)( - implicit delegates: Scope => Seq[Scope]): Option[Scope] = + implicit delegates: Scope => Seq[Scope] + ): Option[Scope] = if (s2 == GlobalScope) Some(s1) // s1 is more specific else if (s1 == GlobalScope) Some(s2) // s2 is more specific else super.intersect(s1, s2) @@ -173,16 +203,31 @@ object Def extends Init[Scope] with TaskMacroExtra { // The following conversions enable the types Initialize[T], Initialize[Task[T]], and Task[T] to // be used in task and setting macros as inputs with an ultimate result of type T - implicit def macroValueI[T](in: Initialize[T]): MacroValue[T] = ??? - implicit def macroValueIT[T](in: Initialize[Task[T]]): MacroValue[T] = ??? - implicit def macroValueIInT[T](in: Initialize[InputTask[T]]): InputEvaluated[T] = ??? - implicit def taskMacroValueIT[T](in: Initialize[Task[T]]): MacroTaskValue[T] = ??? - implicit def macroPrevious[T](in: TaskKey[T]): MacroPrevious[T] = ??? + implicit def macroValueI[T](@deprecated("unused", "") in: Initialize[T]): MacroValue[T] = ??? - // The following conversions enable the types Parser[T], Initialize[Parser[T]], and Initialize[State => Parser[T]] to - // be used in the inputTask macro as an input with an ultimate result of type T - implicit def parserInitToInput[T](p: Initialize[Parser[T]]): ParserInput[T] = ??? - implicit def parserInitStateToInput[T](p: Initialize[State => Parser[T]]): ParserInput[T] = ??? + implicit def macroValueIT[T](@deprecated("unused", "") in: Initialize[Task[T]]): MacroValue[T] = + ??? + + implicit def macroValueIInT[T]( + @deprecated("unused", "") in: Initialize[InputTask[T]] + ): InputEvaluated[T] = ??? + + implicit def taskMacroValueIT[T]( + @deprecated("unused", "") in: Initialize[Task[T]] + ): MacroTaskValue[T] = ??? + + implicit def macroPrevious[T](@deprecated("unused", "") in: TaskKey[T]): MacroPrevious[T] = ??? + + // The following conversions enable the types Parser[T], Initialize[Parser[T]], and + // Initialize[State => Parser[T]] to be used in the inputTask macro as an input with an ultimate + // result of type T + implicit def parserInitToInput[T]( + @deprecated("unused", "") p: Initialize[Parser[T]] + ): ParserInput[T] = ??? + + implicit def parserInitStateToInput[T]( + @deprecated("unused", "") p: Initialize[State => Parser[T]] + ): ParserInput[T] = ??? def settingKey[T](description: String): SettingKey[T] = macro std.KeyMacro.settingKeyImpl[T] def taskKey[T](description: String): TaskKey[T] = macro std.KeyMacro.taskKeyImpl[T] @@ -190,27 +235,43 @@ object Def extends Init[Scope] with TaskMacroExtra { private[sbt] def dummy[T: Manifest](name: String, description: String): (TaskKey[T], Task[T]) = (TaskKey[T](name, description, DTask), dummyTask(name)) + private[sbt] def dummyTask[T](name: String): Task[T] = { import std.TaskExtra.{ task => newTask, _ } val base: Task[T] = newTask( - sys.error("Dummy task '" + name + "' did not get converted to a full task.")) named name + sys.error("Dummy task '" + name + "' did not get converted to a full task.") + ) named name base.copy(info = base.info.set(isDummyTask, true)) } + private[sbt] def isDummy(t: Task[_]): Boolean = t.info.attributes.get(isDummyTask) getOrElse false + private[sbt] val isDummyTask = AttributeKey[Boolean]( "is-dummy-task", "Internal: used to identify dummy tasks. sbt injects values for these tasks at the start of task execution.", - Invisible) + Invisible + ) + private[sbt] val (stateKey, dummyState) = dummy[State]("state", "Current build state.") + private[sbt] val (streamsManagerKey, dummyStreamsManager) = Def.dummy[std.Streams[ScopedKey[_]]]( "streams-manager", - "Streams manager, which provides streams for different contexts.") + "Streams manager, which provides streams for different contexts." + ) } -// these need to be mixed into the sbt package object because the target doesn't involve Initialize or anything in Def + +// these need to be mixed into the sbt package object +// because the target doesn't involve Initialize or anything in Def trait TaskMacroExtra { - implicit def macroValueT[T](in: Task[T]): std.MacroValue[T] = ??? - implicit def macroValueIn[T](in: InputTask[T]): std.InputEvaluated[T] = ??? - implicit def parserToInput[T](in: Parser[T]): std.ParserInput[T] = ??? - implicit def stateParserToInput[T](in: State => Parser[T]): std.ParserInput[T] = ??? + implicit def macroValueT[T](@deprecated("unused", "") in: Task[T]): std.MacroValue[T] = ??? + + implicit def macroValueIn[T](@deprecated("unused", "") in: InputTask[T]): std.InputEvaluated[T] = + ??? + + implicit def parserToInput[T](@deprecated("unused", "") in: Parser[T]): std.ParserInput[T] = ??? + + implicit def stateParserToInput[T]( + @deprecated("unused", "") in: State => Parser[T] + ): std.ParserInput[T] = ??? } diff --git a/main-settings/src/main/scala/sbt/DelegateIndex.scala b/main-settings/src/main/scala/sbt/DelegateIndex.scala index a5a6f64c7..4915a7886 100644 --- a/main-settings/src/main/scala/sbt/DelegateIndex.scala +++ b/main-settings/src/main/scala/sbt/DelegateIndex.scala @@ -26,6 +26,8 @@ private final class DelegateIndex0(refs: Map[ProjectRef, ProjectDelegates]) exte case None => Select(conf) :: Zero :: Nil } } -private final class ProjectDelegates(val ref: ProjectRef, - val refs: Seq[ScopeAxis[ResolvedReference]], - val confs: Map[ConfigKey, Seq[ScopeAxis[ConfigKey]]]) +private final class ProjectDelegates( + val ref: ProjectRef, + val refs: Seq[ScopeAxis[ResolvedReference]], + val confs: Map[ConfigKey, Seq[ScopeAxis[ConfigKey]]] +) diff --git a/main-settings/src/main/scala/sbt/InputTask.scala b/main-settings/src/main/scala/sbt/InputTask.scala index fbdb4a221..c10315d0c 100644 --- a/main-settings/src/main/scala/sbt/InputTask.scala +++ b/main-settings/src/main/scala/sbt/InputTask.scala @@ -22,13 +22,15 @@ final class InputTask[T] private (val parser: State => Parser[Task[T]]) { new InputTask[T](s => Parser(parser(s))(in)) def fullInput(in: String): InputTask[T] = - new InputTask[T](s => - Parser.parse(in, parser(s)) match { - case Right(v) => Parser.success(v) - case Left(msg) => - val indented = msg.lines.map(" " + _).mkString("\n") - Parser.failure(s"Invalid programmatic input:\n$indented") - }) + new InputTask[T]( + s => + Parser.parse(in, parser(s)) match { + case Right(v) => Parser.success(v) + case Left(msg) => + val indented = msg.lines.map(" " + _).mkString("\n") + Parser.failure(s"Invalid programmatic input:\n$indented") + } + ) } object InputTask { @@ -38,19 +40,28 @@ object InputTask { import std.FullInstance._ def toTask(in: String): Initialize[Task[T]] = flatten( - (Def.stateKey zipWith i)((sTask, it) => - sTask map (s => - Parser.parse(in, it.parser(s)) match { - case Right(t) => Def.value(t) - case Left(msg) => - val indented = msg.lines.map(" " + _).mkString("\n") - sys.error(s"Invalid programmatic input:\n$indented") - })) + (Def.stateKey zipWith i)( + (sTask, it) => + sTask map ( + s => + Parser.parse(in, it.parser(s)) match { + case Right(t) => Def.value(t) + case Left(msg) => + val indented = msg.lines.map(" " + _).mkString("\n") + sys.error(s"Invalid programmatic input:\n$indented") + } + ) + ) ) } - implicit def inputTaskParsed[T](in: InputTask[T]): std.ParserInputTask[T] = ??? - implicit def inputTaskInitParsed[T](in: Initialize[InputTask[T]]): std.ParserInputTask[T] = ??? + implicit def inputTaskParsed[T]( + @deprecated("unused", "") in: InputTask[T] + ): std.ParserInputTask[T] = ??? + + implicit def inputTaskInitParsed[T]( + @deprecated("unused", "") in: Initialize[InputTask[T]] + ): std.ParserInputTask[T] = ??? def make[T](p: State => Parser[Task[T]]): InputTask[T] = new InputTask[T](p) @@ -62,12 +73,14 @@ object InputTask { def free[I, T](p: State => Parser[I])(c: I => Task[T]): InputTask[T] = free(s => p(s) map c) - def separate[I, T](p: State => Parser[I])( - action: Initialize[I => Task[T]]): Initialize[InputTask[T]] = + def separate[I, T]( + p: State => Parser[I] + )(action: Initialize[I => Task[T]]): Initialize[InputTask[T]] = separate(Def value p)(action) - def separate[I, T](p: Initialize[State => Parser[I]])( - action: Initialize[I => Task[T]]): Initialize[InputTask[T]] = + def separate[I, T]( + p: Initialize[State => Parser[I]] + )(action: Initialize[I => Task[T]]): Initialize[InputTask[T]] = p.zipWith(action)((parser, act) => free(parser)(act)) /** Constructs an InputTask that accepts no user input. */ @@ -81,8 +94,9 @@ object InputTask { * a) a Parser constructed using other Settings, but not Tasks * b) a dynamically constructed Task that uses Settings, Tasks, and the result of parsing. */ - def createDyn[I, T](p: Initialize[State => Parser[I]])( - action: Initialize[Task[I => Initialize[Task[T]]]]): Initialize[InputTask[T]] = + def createDyn[I, T]( + p: Initialize[State => Parser[I]] + )(action: Initialize[Task[I => Initialize[Task[T]]]]): Initialize[InputTask[T]] = separate(p)(std.FullInstance.flattenFun[I, T](action)) /** A dummy parser that consumes no input and produces nothing useful (unit).*/ @@ -98,8 +112,9 @@ object InputTask { i(Types.const) @deprecated("Use another InputTask constructor or the `Def.inputTask` macro.", "0.13.0") - def apply[I, T](p: Initialize[State => Parser[I]])( - action: TaskKey[I] => Initialize[Task[T]]): Initialize[InputTask[T]] = { + def apply[I, T]( + p: Initialize[State => Parser[I]] + )(action: TaskKey[I] => Initialize[Task[T]]): Initialize[InputTask[T]] = { val dummyKey = localKey[Task[I]] val (marker, dummy) = dummyTask[I] val it = action(TaskKey(dummyKey)) mapConstant subResultForDummy(dummyKey, dummy) @@ -136,9 +151,11 @@ object InputTask { (key, t) } - private[this] def subForDummy[I, T](marker: AttributeKey[Option[I]], - value: I, - task: Task[T]): Task[T] = { + private[this] def subForDummy[I, T]( + marker: AttributeKey[Option[I]], + value: I, + task: Task[T] + ): Task[T] = { val seen = new java.util.IdentityHashMap[Task[_], Task[_]] lazy val f: Task ~> Task = new (Task ~> Task) { def apply[A](t: Task[A]): Task[A] = { diff --git a/main-settings/src/main/scala/sbt/Previous.scala b/main-settings/src/main/scala/sbt/Previous.scala index 62bfd705f..a23afbcbf 100644 --- a/main-settings/src/main/scala/sbt/Previous.scala +++ b/main-settings/src/main/scala/sbt/Previous.scala @@ -53,11 +53,13 @@ object Previous { private[sbt] val references = SettingKey[References]( "previous-references", "Collects all static references to previous values of tasks.", - KeyRanks.Invisible) + KeyRanks.Invisible + ) private[sbt] val cache = TaskKey[Previous]( "previous-cache", "Caches previous values of tasks read from disk for the duration of a task execution.", - KeyRanks.Invisible) + KeyRanks.Invisible + ) /** Records references to previous task value. This should be completely populated after settings finish loading. */ private[sbt] final class References { @@ -72,9 +74,11 @@ object Previous { } /** Persists values of tasks t where there is some task referencing it via t.previous. */ - private[sbt] def complete(referenced: References, - results: RMap[Task, Result], - streams: Streams): Unit = { + private[sbt] def complete( + referenced: References, + results: RMap[Task, Result], + streams: Streams + ): Unit = { val map = referenced.getReferences def impl[T](key: ScopedKey[_], result: T): Unit = for (i <- map.get(key.asInstanceOf[ScopedTaskKey[T]])) { diff --git a/main-settings/src/main/scala/sbt/Remove.scala b/main-settings/src/main/scala/sbt/Remove.scala index ad2a4a0fe..60c395050 100644 --- a/main-settings/src/main/scala/sbt/Remove.scala +++ b/main-settings/src/main/scala/sbt/Remove.scala @@ -11,12 +11,14 @@ import scala.annotation.implicitNotFound object Remove { @implicitNotFound( - msg = "No implicit for Remove.Value[${A}, ${B}] found,\n so ${B} cannot be removed from ${A}") + msg = "No implicit for Remove.Value[${A}, ${B}] found,\n so ${B} cannot be removed from ${A}" + ) trait Value[A, B] extends Any { def removeValue(a: A, b: B): A } @implicitNotFound( - msg = "No implicit for Remove.Values[${A}, ${B}] found,\n so ${B} cannot be removed from ${A}") + msg = "No implicit for Remove.Values[${A}, ${B}] found,\n so ${B} cannot be removed from ${A}" + ) trait Values[A, -B] extends Any { def removeValues(a: A, b: B): A } diff --git a/main-settings/src/main/scala/sbt/Scope.scala b/main-settings/src/main/scala/sbt/Scope.scala index 0bd3e27c6..4e8747d10 100644 --- a/main-settings/src/main/scala/sbt/Scope.scala +++ b/main-settings/src/main/scala/sbt/Scope.scala @@ -13,10 +13,12 @@ import sbt.internal.util.{ AttributeKey, AttributeMap, Dag } import sbt.io.IO -final case class Scope(project: ScopeAxis[Reference], - config: ScopeAxis[ConfigKey], - task: ScopeAxis[AttributeKey[_]], - extra: ScopeAxis[AttributeMap]) { +final case class Scope( + project: ScopeAxis[Reference], + config: ScopeAxis[ConfigKey], + task: ScopeAxis[AttributeKey[_]], + extra: ScopeAxis[AttributeMap] +) { def in(project: Reference, config: ConfigKey): Scope = copy(project = Select(project), config = Select(config)) def in(config: ConfigKey, task: AttributeKey[_]): Scope = @@ -106,17 +108,21 @@ object Scope { else IO.directoryURI(current resolve uri) - def resolveReference(current: URI, - rootProject: URI => String, - ref: Reference): ResolvedReference = + def resolveReference( + current: URI, + rootProject: URI => String, + ref: Reference + ): ResolvedReference = ref match { case br: BuildReference => resolveBuildRef(current, br) case pr: ProjectReference => resolveProjectRef(current, rootProject, pr) } - def resolveProjectRef(current: URI, - rootProject: URI => String, - ref: ProjectReference): ProjectRef = + def resolveProjectRef( + current: URI, + rootProject: URI => String, + ref: ProjectReference + ): ProjectRef = ref match { case LocalRootProject => ProjectRef(current, rootProject(current)) case LocalProject(id) => ProjectRef(current, id) @@ -164,10 +170,12 @@ object Scope { def displayMasked(scope: Scope, sep: String, mask: ScopeMask, showZeroConfig: Boolean): String = displayMasked(scope, sep, showProject, mask, showZeroConfig) - def displayMasked(scope: Scope, - sep: String, - showProject: Reference => String, - mask: ScopeMask): String = + def displayMasked( + scope: Scope, + sep: String, + showProject: Reference => String, + mask: ScopeMask + ): String = displayMasked(scope, sep, showProject, mask, false) /** @@ -177,11 +185,13 @@ object Scope { * Technically speaking an unspecified configuration axis defaults to * the scope delegation (first configuration defining the key, then Zero). */ - def displayMasked(scope: Scope, - sep: String, - showProject: Reference => String, - mask: ScopeMask, - showZeroConfig: Boolean): String = { + def displayMasked( + scope: Scope, + sep: String, + showProject: Reference => String, + mask: ScopeMask, + showZeroConfig: Boolean + ): String = { import scope.{ project, config, task, extra } val zeroConfig = if (showZeroConfig) "Zero /" else "" val configPrefix = config.foldStrict(display, zeroConfig, "./") @@ -190,57 +200,68 @@ object Scope { val postfix = if (extras.isEmpty) "" else extras.mkString("(", ", ", ")") if (scope == GlobalScope) "Global / " + sep + postfix else - mask.concatShow(appendSpace(projectPrefix(project, showProject)), - appendSpace(configPrefix), - appendSpace(taskPrefix), - sep, - postfix) + mask.concatShow( + appendSpace(projectPrefix(project, showProject)), + appendSpace(configPrefix), + appendSpace(taskPrefix), + sep, + postfix + ) } private[sbt] def appendSpace(s: String): String = if (s == "") "" else s + " " - // sbt 0.12 style - def display012StyleMasked(scope: Scope, - sep: String, - showProject: Reference => String, - mask: ScopeMask): String = { - import scope.{ project, config, task, extra } - val configPrefix = config.foldStrict(displayConfigKey012Style, "*:", ".:") - val taskPrefix = task.foldStrict(_.label + "::", "", ".::") - val extras = extra.foldStrict(_.entries.map(_.toString).toList, Nil, Nil) - val postfix = if (extras.isEmpty) "" else extras.mkString("(", ", ", ")") - mask.concatShow(projectPrefix012Style(project, showProject012Style), - configPrefix, - taskPrefix, - sep, - postfix) - } - def equal(a: Scope, b: Scope, mask: ScopeMask): Boolean = (!mask.project || a.project == b.project) && (!mask.config || a.config == b.config) && (!mask.task || a.task == b.task) && (!mask.extra || a.extra == b.extra) - def projectPrefix(project: ScopeAxis[Reference], - show: Reference => String = showProject): String = + def projectPrefix( + project: ScopeAxis[Reference], + show: Reference => String = showProject + ): String = project.foldStrict(show, "Zero /", "./") - def projectPrefix012Style(project: ScopeAxis[Reference], - show: Reference => String = showProject): String = + def projectPrefix012Style( + project: ScopeAxis[Reference], + show: Reference => String = showProject + ): String = project.foldStrict(show, "*/", "./") def showProject = (ref: Reference) => Reference.display(ref) + " /" def showProject012Style = (ref: Reference) => Reference.display(ref) + "/" + @deprecated("No longer used", "1.1.3") def transformTaskName(s: String) = { val parts = s.split("-+") (parts.take(1) ++ parts.drop(1).map(_.capitalize)).mkString } + @deprecated("Use variant without extraInherit", "1.1.1") + def delegates[Proj]( + refs: Seq[(ProjectRef, Proj)], + configurations: Proj => Seq[ConfigKey], + resolve: Reference => ResolvedReference, + rootProject: URI => String, + projectInherit: ProjectRef => Seq[ProjectRef], + configInherit: (ResolvedReference, ConfigKey) => Seq[ConfigKey], + taskInherit: AttributeKey[_] => Seq[AttributeKey[_]], + extraInherit: (ResolvedReference, AttributeMap) => Seq[AttributeMap] + ): Scope => Seq[Scope] = + delegates( + refs, + configurations, + resolve, + rootProject, + projectInherit, + configInherit, + taskInherit, + ) + // *Inherit functions should be immediate delegates and not include argument itself. Transitivity will be provided by this method def delegates[Proj]( refs: Seq[(ProjectRef, Proj)], @@ -250,19 +271,27 @@ object Scope { projectInherit: ProjectRef => Seq[ProjectRef], configInherit: (ResolvedReference, ConfigKey) => Seq[ConfigKey], taskInherit: AttributeKey[_] => Seq[AttributeKey[_]], - extraInherit: (ResolvedReference, AttributeMap) => Seq[AttributeMap] ): Scope => Seq[Scope] = { val index = delegates(refs, configurations, projectInherit, configInherit) scope => - indexedDelegates(resolve, index, rootProject, taskInherit, extraInherit)(scope) + indexedDelegates(resolve, index, rootProject, taskInherit)(scope) } + @deprecated("Use variant without extraInherit", "1.1.1") def indexedDelegates( resolve: Reference => ResolvedReference, index: DelegateIndex, rootProject: URI => String, taskInherit: AttributeKey[_] => Seq[AttributeKey[_]], extraInherit: (ResolvedReference, AttributeMap) => Seq[AttributeMap] + )(rawScope: Scope): Seq[Scope] = + indexedDelegates(resolve, index, rootProject, taskInherit)(rawScope) + + def indexedDelegates( + resolve: Reference => ResolvedReference, + index: DelegateIndex, + rootProject: URI => String, + taskInherit: AttributeKey[_] => Seq[AttributeKey[_]], )(rawScope: Scope): Seq[Scope] = { val scope = Scope.replaceThis(GlobalScope)(rawScope) @@ -319,27 +348,32 @@ object Scope { } private[this] def delegateIndex(ref: ProjectRef, confs: Seq[ConfigKey])( projectInherit: ProjectRef => Seq[ProjectRef], - configInherit: (ResolvedReference, ConfigKey) => Seq[ConfigKey]): ProjectDelegates = { + configInherit: (ResolvedReference, ConfigKey) => Seq[ConfigKey] + ): ProjectDelegates = { val refDelegates = withRawBuilds(linearize(Select(ref), false)(projectInherit)) val configs = confs map { c => axisDelegates(configInherit, ref, c) } new ProjectDelegates(ref, refDelegates, configs.toMap) } - def axisDelegates[T](direct: (ResolvedReference, T) => Seq[T], - ref: ResolvedReference, - init: T): (T, Seq[ScopeAxis[T]]) = + def axisDelegates[T]( + direct: (ResolvedReference, T) => Seq[T], + ref: ResolvedReference, + init: T + ): (T, Seq[ScopeAxis[T]]) = (init, linearize(Select(init))(direct(ref, _))) def linearize[T](axis: ScopeAxis[T], appendZero: Boolean = true)( - inherit: T => Seq[T]): Seq[ScopeAxis[T]] = + inherit: T => Seq[T] + ): Seq[ScopeAxis[T]] = axis match { case Select(x) => topologicalSort[T](x, appendZero)(inherit) case Zero | This => if (appendZero) Zero :: Nil else Nil } def topologicalSort[T](node: T, appendZero: Boolean)( - dependencies: T => Seq[T]): Seq[ScopeAxis[T]] = { + dependencies: T => Seq[T] + ): Seq[ScopeAxis[T]] = { val o = Dag.topologicalSortUnchecked(node)(dependencies).map(Select.apply) if (appendZero) o ::: Zero :: Nil else o diff --git a/main-settings/src/main/scala/sbt/Structure.scala b/main-settings/src/main/scala/sbt/Structure.scala index 3c643f00d..4830cfc54 100644 --- a/main-settings/src/main/scala/sbt/Structure.scala +++ b/main-settings/src/main/scala/sbt/Structure.scala @@ -17,7 +17,18 @@ import sbt.Def.{ Initialize, KeyedInitialize, ScopedKey, Setting, setting } import std.TaskExtra.{ task => mktask, _ } /** An abstraction on top of Settings for build configuration and task definition. */ -sealed trait Scoped { def scope: Scope; val key: AttributeKey[_] } +sealed trait Scoped extends Equals { + def scope: Scope + val key: AttributeKey[_] + + override def equals(that: Any) = + (this eq that.asInstanceOf[AnyRef]) || (that match { + case that: Scoped => scope == that.scope && key == that.key && canEqual(that) + case _ => false + }) + + override def hashCode() = (scope, key).## +} /** A common type for SettingKey and TaskKey so that both can be used as inputs to tasks.*/ sealed trait ScopedTaskable[T] extends Scoped { @@ -95,6 +106,8 @@ sealed abstract class SettingKey[T] final def withRank(rank: Int): SettingKey[T] = SettingKey(AttributeKey.copyWithRank(key, rank)) + + def canEqual(that: Any): Boolean = that.isInstanceOf[SettingKey[_]] } /** @@ -163,6 +176,8 @@ sealed abstract class TaskKey[T] final def withRank(rank: Int): TaskKey[T] = TaskKey(AttributeKey.copyWithRank(key, rank)) + + def canEqual(that: Any): Boolean = that.isInstanceOf[TaskKey[_]] } /** @@ -195,6 +210,8 @@ sealed trait InputKey[T] final def withRank(rank: Int): InputKey[T] = InputKey(AttributeKey.copyWithRank(key, rank)) + + def canEqual(that: Any): Boolean = that.isInstanceOf[InputKey[_]] } /** Methods and types related to constructing settings, including keys, scopes, and initializations. */ @@ -320,10 +337,14 @@ object Scoped { def transform(f: S => S, source: SourcePosition): Setting[Task[S]] = set(scopedKey(_ map f), source) - @deprecated("No longer needed with new task syntax and SettingKey inheriting from Initialize.", - "0.13.2") + @deprecated( + "No longer needed with new task syntax and SettingKey inheriting from Initialize.", + "0.13.2" + ) def task: SettingKey[Task[S]] = scopedSetting(scope, key) + def toSettingKey: SettingKey[Task[S]] = scopedSetting(scope, key) + def get(settings: Settings[Scope]): Option[Task[S]] = settings.get(scope, key) def ? : Initialize[Task[Option[S]]] = Def.optional(scopedKey) { @@ -336,6 +357,11 @@ object Scoped { (this.? zipWith i)((x, y) => (x, y) map { case (a, b) => a getOrElse b }) } + /** Enriches `Initialize[Task[S]]` types. + * + * @param i the original `Initialize[Task[S]]` value to enrich + * @tparam S the type of the underlying value + */ final class RichInitializeTask[S](i: Initialize[Task[S]]) extends RichInitTaskBase[S, Task] { protected def onTask[T](f: Task[S] => Task[T]): Initialize[Task[T]] = i apply f @@ -365,22 +391,36 @@ object Scoped { } } + /** Enriches `Initialize[InputTask[S]]` types. + * + * @param i the original `Initialize[InputTask[S]]` value to enrich + * @tparam S the type of the underlying value + */ final class RichInitializeInputTask[S](i: Initialize[InputTask[S]]) extends RichInitTaskBase[S, InputTask] { + protected def onTask[T](f: Task[S] => Task[T]): Initialize[InputTask[T]] = i(_ mapTask f) def dependsOn(tasks: AnyInitTask*): Initialize[InputTask[S]] = { import TupleSyntax._ - (i, Initialize.joinAny[Task](tasks))((thisTask, deps) => - thisTask.mapTask(_.dependsOn(deps: _*))) + (i, Initialize.joinAny[Task](tasks))( + (thisTask, deps) => thisTask.mapTask(_.dependsOn(deps: _*)) + ) } } + /** Enriches `Initialize[R[S]]` types. Abstracts over the specific task-like type constructor. + * + * @tparam S the type of the underlying vault + * @tparam R the task-like type constructor (either Task or InputTask) + */ sealed abstract class RichInitTaskBase[S, R[_]] { protected def onTask[T](f: Task[S] => Task[T]): Initialize[R[T]] - def flatMap[T](f: S => Task[T]): Initialize[R[T]] = flatMapR(f compose successM) - def map[T](f: S => T): Initialize[R[T]] = mapR(f compose successM) + def flatMap[T](f: S => Task[T]): Initialize[R[T]] = + onTask(_.result flatMap (f compose successM)) + + def map[T](f: S => T): Initialize[R[T]] = onTask(_.result map (f compose successM)) def andFinally(fin: => Unit): Initialize[R[S]] = onTask(_ andFinally fin) def doFinally(t: Task[Unit]): Initialize[R[S]] = onTask(_ doFinally t) @@ -392,23 +432,28 @@ object Scoped { @deprecated( "Use the `result` method to create a task that returns the full Result of this task. Then, call `flatMap` on the new task.", - "0.13.0") - def flatMapR[T](f: Result[S] => Task[T]): Initialize[R[T]] = onTask(_ flatMapR f) + "0.13.0" + ) + def flatMapR[T](f: Result[S] => Task[T]): Initialize[R[T]] = onTask(_.result flatMap f) @deprecated( "Use the `result` method to create a task that returns the full Result of this task. Then, call `map` on the new task.", - "0.13.0") - def mapR[T](f: Result[S] => T): Initialize[R[T]] = onTask(_ mapR f) + "0.13.0" + ) + def mapR[T](f: Result[S] => T): Initialize[R[T]] = onTask(_.result map f) @deprecated( "Use the `failure` method to create a task that returns Incomplete when this task fails and then call `flatMap` on the new task.", - "0.13.0") - def flatFailure[T](f: Incomplete => Task[T]): Initialize[R[T]] = flatMapR(f compose failM) + "0.13.0" + ) + def flatFailure[T](f: Incomplete => Task[T]): Initialize[R[T]] = + onTask(_.result flatMap (f compose failM)) @deprecated( "Use the `failure` method to create a task that returns Incomplete when this task fails and then call `map` on the new task.", - "0.13.0") - def mapFailure[T](f: Incomplete => T): Initialize[R[T]] = mapR(f compose failM) + "0.13.0" + ) + def mapFailure[T](f: Incomplete => T): Initialize[R[T]] = onTask(_.result map (f compose failM)) } type AnyInitTask = Initialize[Task[T]] forSome { type T } @@ -565,7 +610,7 @@ object Scoped { /** The sbt 0.10 style DSL was deprecated in 0.13.13, favouring the use of the '.value' macro. * - * See http://www.scala-sbt.org/1.x/docs/Migrating-from-sbt-013x.html for how to migrate. + * See http://www.scala-sbt.org/1.x/docs/Migrating-from-sbt-013x.html#Migrating+from+sbt+0.12+style for how to migrate. */ trait TupleSyntax { import Scoped._ @@ -628,7 +673,7 @@ object InputKey { apply(AttributeKey[InputTask[T]](label, description, extendScoped(extend1, extendN), rank)) def apply[T](akey: AttributeKey[InputTask[T]]): InputKey[T] = - new InputKey[T] { val key = akey; def scope = Scope.ThisScope } + Scoped.scopedInput(Scope.ThisScope, akey) } /** Constructs TaskKeys, which are associated with tasks to define a setting.*/ @@ -657,8 +702,7 @@ object TaskKey { ): TaskKey[T] = apply(AttributeKey[Task[T]](label, description, extendScoped(extend1, extendN), rank)) - def apply[T](akey: AttributeKey[Task[T]]): TaskKey[T] = - new TaskKey[T] { val key = akey; def scope = Scope.ThisScope } + def apply[T](akey: AttributeKey[Task[T]]): TaskKey[T] = Scoped.scopedTask(Scope.ThisScope, akey) def local[T: Manifest]: TaskKey[T] = apply[T](AttributeKey.local[Task[T]]) } @@ -689,8 +733,7 @@ object SettingKey { ): SettingKey[T] = apply(AttributeKey[T](label, description, extendScoped(extend1, extendN), rank)) - def apply[T](akey: AttributeKey[T]): SettingKey[T] = - new SettingKey[T] { val key = akey; def scope = Scope.ThisScope } + def apply[T](akey: AttributeKey[T]): SettingKey[T] = Scoped.scopedSetting(Scope.ThisScope, akey) def local[T: Manifest: OptJsonWriter]: SettingKey[T] = apply[T](AttributeKey.local[T]) } diff --git a/main-settings/src/main/scala/sbt/std/InputConvert.scala b/main-settings/src/main/scala/sbt/std/InputConvert.scala index 3437af145..f7d581946 100644 --- a/main-settings/src/main/scala/sbt/std/InputConvert.scala +++ b/main-settings/src/main/scala/sbt/std/InputConvert.scala @@ -8,11 +8,11 @@ package sbt package std -import reflect.macros._ +import scala.reflect.macros._ -import Def.Initialize import sbt.internal.util.complete.Parser import sbt.internal.util.appmacro.{ Convert, Converted } +import Def.Initialize object InputInitConvert extends Convert { def apply[T: c.WeakTypeTag](c: blackbox.Context)(nme: String, in: c.Tree): Converted[c.type] = @@ -46,14 +46,13 @@ object TaskConvert extends Convert { /** Converts an input `Tree` of type `Initialize[T]`, `Initialize[Task[T]]`, or `Task[T]` into a `Tree` of type `Initialize[Task[T]]`.*/ object FullConvert extends Convert { - import InputWrapper._ def apply[T: c.WeakTypeTag](c: blackbox.Context)(nme: String, in: c.Tree): Converted[c.type] = nme match { - case WrapInitTaskName => Converted.Success[c.type](in) - case WrapPreviousName => Converted.Success[c.type](in) - case WrapInitName => wrapInit[T](c)(in) - case WrapTaskName => wrapTask[T](c)(in) - case _ => Converted.NotApplicable[c.type] + case InputWrapper.WrapInitTaskName => Converted.Success[c.type](in) + case InputWrapper.WrapPreviousName => Converted.Success[c.type](in) + case InputWrapper.WrapInitName => wrapInit[T](c)(in) + case InputWrapper.WrapTaskName => wrapTask[T](c)(in) + case _ => Converted.NotApplicable[c.type] } private def wrapInit[T: c.WeakTypeTag](c: blackbox.Context)(tree: c.Tree): Converted[c.type] = { diff --git a/main-settings/src/main/scala/sbt/std/InputWrapper.scala b/main-settings/src/main/scala/sbt/std/InputWrapper.scala index b6dcc7b46..c1cc5dbac 100644 --- a/main-settings/src/main/scala/sbt/std/InputWrapper.scala +++ b/main-settings/src/main/scala/sbt/std/InputWrapper.scala @@ -8,9 +8,10 @@ package sbt package std -import language.experimental.macros -import reflect.macros._ -import reflect.internal.annotations.compileTimeOnly +import scala.language.experimental.macros + +import scala.annotation.compileTimeOnly +import scala.reflect.macros._ import Def.Initialize import sbt.internal.util.appmacro.ContextUtil @@ -30,28 +31,34 @@ object InputWrapper { private[std] final val WrapPreviousName = "wrapPrevious_\u2603\u2603" @compileTimeOnly( - "`value` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task.") - def wrapTask_\u2603\u2603[T](in: Any): T = implDetailError + "`value` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task." + ) + def wrapTask_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError @compileTimeOnly( - "`value` can only be used within a task or setting macro, such as :=, +=, ++=, Def.task, or Def.setting.") - def wrapInit_\u2603\u2603[T](in: Any): T = implDetailError + "`value` can only be used within a task or setting macro, such as :=, +=, ++=, Def.task, or Def.setting." + ) + def wrapInit_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError @compileTimeOnly( - "`value` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task.") - def wrapInitTask_\u2603\u2603[T](in: Any): T = implDetailError + "`value` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task." + ) + def wrapInitTask_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError @compileTimeOnly( - "`value` can only be called on an input task within a task definition macro, such as := or Def.inputTask.") - def wrapInputTask_\u2603\u2603[T](in: Any): T = implDetailError + "`value` can only be called on an input task within a task definition macro, such as := or Def.inputTask." + ) + def wrapInputTask_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError @compileTimeOnly( - "`value` can only be called on an input task within a task definition macro, such as := or Def.inputTask.") - def wrapInitInputTask_\u2603\u2603[T](in: Any): T = implDetailError + "`value` can only be called on an input task within a task definition macro, such as := or Def.inputTask." + ) + def wrapInitInputTask_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError @compileTimeOnly( - "`previous` can only be called on a task within a task or input task definition macro, such as :=, +=, ++=, Def.task, or Def.inputTask.") - def wrapPrevious_\u2603\u2603[T](in: Any): T = implDetailError + "`previous` can only be called on a task within a task or input task definition macro, such as :=, +=, ++=, Def.task, or Def.inputTask." + ) + def wrapPrevious_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError private[this] def implDetailError = sys.error("This method is an implementation detail and should not be referenced.") @@ -160,11 +167,12 @@ object InputWrapper { } /** Translates .previous(format) to Previous.runtime()(format).value*/ - def previousMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)( - format: c.Expr[sjsonnew.JsonFormat[T]]): c.Expr[Option[T]] = { + def previousMacroImpl[T: c.WeakTypeTag]( + c: blackbox.Context + )(format: c.Expr[sjsonnew.JsonFormat[T]]): c.Expr[Option[T]] = { import c.universe._ c.macroApplication match { - case a @ Apply(Select(Apply(_, t :: Nil), tp), fmt) => + case a @ Apply(Select(Apply(_, t :: Nil), _), _) => if (t.tpe <:< c.weakTypeOf[TaskKey[T]]) { val tsTyped = c.Expr[TaskKey[T]](t) val newTree = c.universe.reify { Previous.runtime[T](tsTyped.splice)(format.splice) } @@ -181,35 +189,42 @@ object InputWrapper { sealed abstract class MacroTaskValue[T] { @compileTimeOnly( - "`taskValue` can only be used within a setting macro, such as :=, +=, ++=, or Def.setting.") + "`taskValue` can only be used within a setting macro, such as :=, +=, ++=, or Def.setting." + ) def taskValue: Task[T] = macro InputWrapper.taskValueMacroImpl[T] } sealed abstract class MacroValue[T] { @compileTimeOnly( - "`value` can only be used within a task or setting macro, such as :=, +=, ++=, Def.task, or Def.setting.") + "`value` can only be used within a task or setting macro, such as :=, +=, ++=, Def.task, or Def.setting." + ) def value: T = macro InputWrapper.valueMacroImpl[T] } sealed abstract class ParserInput[T] { @compileTimeOnly( - "`parsed` can only be used within an input task macro, such as := or Def.inputTask.") + "`parsed` can only be used within an input task macro, such as := or Def.inputTask." + ) def parsed: T = macro ParserInput.parsedMacroImpl[T] } sealed abstract class InputEvaluated[T] { @compileTimeOnly( - "`evaluated` can only be used within an input task macro, such as := or Def.inputTask.") + "`evaluated` can only be used within an input task macro, such as := or Def.inputTask." + ) def evaluated: T = macro InputWrapper.valueMacroImpl[T] @compileTimeOnly( - "`inputTaskValue` can only be used within an input task macro, such as := or Def.inputTask.") + "`inputTaskValue` can only be used within an input task macro, such as := or Def.inputTask." + ) def inputTaskValue: InputTask[T] = macro InputWrapper.inputTaskValueMacroImpl[T] } sealed abstract class ParserInputTask[T] { @compileTimeOnly( - "`parsed` can only be used within an input task macro, such as := or Def.inputTask.") + "`parsed` can only be used within an input task macro, such as := or Def.inputTask." + ) def parsed: Task[T] = macro ParserInput.parsedInputMacroImpl[T] } sealed abstract class MacroPrevious[T] { @compileTimeOnly( - "`previous` can only be used within a task macro, such as :=, +=, ++=, or Def.task.") + "`previous` can only be used within a task macro, such as :=, +=, ++=, or Def.task." + ) def previous(implicit format: sjsonnew.JsonFormat[T]): Option[T] = macro InputWrapper.previousMacroImpl[T] } @@ -223,24 +238,29 @@ object ParserInput { private[std] val WrapInitName = "initParser_\u2603\u2603" @compileTimeOnly( - "`parsed` can only be used within an input task macro, such as := or Def.inputTask.") - def parser_\u2603\u2603[T](i: Any): T = + "`parsed` can only be used within an input task macro, such as := or Def.inputTask." + ) + def parser_\u2603\u2603[T](@deprecated("unused", "") i: Any): T = sys.error("This method is an implementation detail and should not be referenced.") @compileTimeOnly( - "`parsed` can only be used within an input task macro, such as := or Def.inputTask.") - def initParser_\u2603\u2603[T](i: Any): T = + "`parsed` can only be used within an input task macro, such as := or Def.inputTask." + ) + def initParser_\u2603\u2603[T](@deprecated("unused", "") i: Any): T = sys.error("This method is an implementation detail and should not be referenced.") - private[std] def wrap[T: c.WeakTypeTag](c: blackbox.Context)(ts: c.Expr[Any], - pos: c.Position): c.Expr[T] = + private[std] def wrap[T: c.WeakTypeTag]( + c: blackbox.Context + )(ts: c.Expr[Any], pos: c.Position): c.Expr[T] = InputWrapper.wrapImpl[T, ParserInput.type](c, ParserInput, WrapName)(ts, pos) - private[std] def wrapInit[T: c.WeakTypeTag](c: blackbox.Context)(ts: c.Expr[Any], - pos: c.Position): c.Expr[T] = + private[std] def wrapInit[T: c.WeakTypeTag]( + c: blackbox.Context + )(ts: c.Expr[Any], pos: c.Position): c.Expr[T] = InputWrapper.wrapImpl[T, ParserInput.type](c, ParserInput, WrapInitName)(ts, pos) - private[std] def inputParser[T: c.WeakTypeTag](c: blackbox.Context)( - t: c.Expr[InputTask[T]]): c.Expr[State => Parser[Task[T]]] = + private[std] def inputParser[T: c.WeakTypeTag]( + c: blackbox.Context + )(t: c.Expr[InputTask[T]]): c.Expr[State => Parser[Task[T]]] = c.universe.reify(t.splice.parser) def parsedInputMacroImpl[T: c.WeakTypeTag](c: blackbox.Context): c.Expr[Task[T]] = @@ -260,8 +280,9 @@ object ParserInput { wrap[Task[T]](c)(inputParser(c)(e), pos) } - private def wrapInitInputTask[T: c.WeakTypeTag](c: blackbox.Context)(tree: c.Tree, - pos: c.Position) = { + private def wrapInitInputTask[T: c.WeakTypeTag]( + c: blackbox.Context + )(tree: c.Tree, pos: c.Position) = { val e = c.Expr[Initialize[InputTask[T]]](tree) wrapInit[Task[T]](c)(c.universe.reify { Def.toIParser(e.splice) }, pos) } diff --git a/main-settings/src/main/scala/sbt/std/KeyMacro.scala b/main-settings/src/main/scala/sbt/std/KeyMacro.scala index aa08c14e6..4a57fbb2d 100644 --- a/main-settings/src/main/scala/sbt/std/KeyMacro.scala +++ b/main-settings/src/main/scala/sbt/std/KeyMacro.scala @@ -14,18 +14,21 @@ import scala.reflect.macros._ import sbt.util.OptJsonWriter private[sbt] object KeyMacro { - def settingKeyImpl[T: c.WeakTypeTag](c: blackbox.Context)( - description: c.Expr[String]): c.Expr[SettingKey[T]] = + def settingKeyImpl[T: c.WeakTypeTag]( + c: blackbox.Context + )(description: c.Expr[String]): c.Expr[SettingKey[T]] = keyImpl2[T, SettingKey[T]](c) { (name, mf, ojw) => c.universe.reify { SettingKey[T](name.splice, description.splice)(mf.splice, ojw.splice) } } - def taskKeyImpl[T: c.WeakTypeTag](c: blackbox.Context)( - description: c.Expr[String]): c.Expr[TaskKey[T]] = + def taskKeyImpl[T: c.WeakTypeTag]( + c: blackbox.Context + )(description: c.Expr[String]): c.Expr[TaskKey[T]] = keyImpl[T, TaskKey[T]](c) { (name, mf) => c.universe.reify { TaskKey[T](name.splice, description.splice)(mf.splice) } } - def inputKeyImpl[T: c.WeakTypeTag](c: blackbox.Context)( - description: c.Expr[String]): c.Expr[InputKey[T]] = + def inputKeyImpl[T: c.WeakTypeTag]( + c: blackbox.Context + )(description: c.Expr[String]): c.Expr[InputKey[T]] = keyImpl[T, InputKey[T]](c) { (name, mf) => c.universe.reify { InputKey[T](name.splice, description.splice)(mf.splice) } } @@ -45,7 +48,8 @@ private[sbt] object KeyMacro { val enclosingValName = definingValName( c, methodName => - s"""$methodName must be directly assigned to a val, such as `val x = $methodName[Int]("description")`.""") + s"""$methodName must be directly assigned to a val, such as `val x = $methodName[Int]("description")`.""" + ) c.Expr[String](Literal(Constant(enclosingValName))) } @@ -61,10 +65,10 @@ private[sbt] object KeyMacro { n.decodedName.toString.trim // trim is not strictly correct, but macros don't expose the API necessary @tailrec def enclosingVal(trees: List[c.Tree]): String = { trees match { - case vd @ ValDef(_, name, _, _) :: ts => processName(name) + case ValDef(_, name, _, _) :: _ => processName(name) case (_: ApplyTree | _: Select | _: TypeApply) :: xs => enclosingVal(xs) // lazy val x: X = has this form for some reason (only when the explicit type is present, though) - case Block(_, _) :: DefDef(mods, name, _, _, _, _) :: xs if mods.hasFlag(Flag.LAZY) => + case Block(_, _) :: DefDef(mods, name, _, _, _, _) :: _ if mods.hasFlag(Flag.LAZY) => processName(name) case _ => c.error(c.enclosingPosition, invalidEnclosingTree(methodName.decodedName.toString)) diff --git a/main-settings/src/main/scala/sbt/std/SettingMacro.scala b/main-settings/src/main/scala/sbt/std/SettingMacro.scala index 23b9e51e0..d80b85242 100644 --- a/main-settings/src/main/scala/sbt/std/SettingMacro.scala +++ b/main-settings/src/main/scala/sbt/std/SettingMacro.scala @@ -46,11 +46,13 @@ object InitializeConvert extends Convert { Converted.Success(t) } - private def failTask[C <: blackbox.Context with Singleton](c: C)( - pos: c.Position): Converted[c.type] = + private def failTask[C <: blackbox.Context with Singleton]( + c: C + )(pos: c.Position): Converted[c.type] = Converted.Failure(pos, "A setting cannot depend on a task") - private def failPrevious[C <: blackbox.Context with Singleton](c: C)( - pos: c.Position): Converted[c.type] = + private def failPrevious[C <: blackbox.Context with Singleton]( + c: C + )(pos: c.Position): Converted[c.type] = Converted.Failure(pos, "A setting cannot depend on a task's previous value.") } @@ -59,11 +61,14 @@ object SettingMacro { def settingMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(t: c.Expr[T]): c.Expr[Initialize[T]] = Instance.contImpl[T, Id](c, InitializeInstance, InitializeConvert, MixedBuilder, EmptyLinter)( Left(t), - Instance.idTransform[c.type]) + Instance.idTransform[c.type] + ) - def settingDynMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)( - t: c.Expr[Initialize[T]]): c.Expr[Initialize[T]] = + def settingDynMacroImpl[T: c.WeakTypeTag]( + c: blackbox.Context + )(t: c.Expr[Initialize[T]]): c.Expr[Initialize[T]] = Instance.contImpl[T, Id](c, InitializeInstance, InitializeConvert, MixedBuilder, EmptyLinter)( Right(t), - Instance.idTransform[c.type]) + Instance.idTransform[c.type] + ) } diff --git a/main-settings/src/main/scala/sbt/std/TaskLinterDSL.scala b/main-settings/src/main/scala/sbt/std/TaskLinterDSL.scala index ec6b04288..98e26d4fe 100644 --- a/main-settings/src/main/scala/sbt/std/TaskLinterDSL.scala +++ b/main-settings/src/main/scala/sbt/std/TaskLinterDSL.scala @@ -24,9 +24,7 @@ abstract class BaseTaskLinterDSL extends LinterDSL { val isTask = convert.asPredicate(ctx) class traverser extends Traverser { private val unchecked = symbolOf[sbt.sbtUnchecked].asClass - private val taskKeyType = typeOf[sbt.TaskKey[_]] - private val settingKeyType = typeOf[sbt.SettingKey[_]] - private val inputKeyType = typeOf[sbt.InputKey[_]] + private val initializeType = typeOf[sbt.Def.Initialize[_]] private val uncheckedWrappers = MutableSet.empty[Tree] var insideIf: Boolean = false var insideAnon: Boolean = false @@ -48,6 +46,7 @@ abstract class BaseTaskLinterDSL extends LinterDSL { case _ => exprAtUseSite } uncheckedWrappers.add(removedSbtWrapper) + () } case _ => } @@ -55,8 +54,8 @@ abstract class BaseTaskLinterDSL extends LinterDSL { } } - @inline def isKey(tpe: Type): Boolean = - tpe <:< taskKeyType || tpe <:< settingKeyType || tpe <:< inputKeyType + @inline def isKey(tpe: Type): Boolean = isInitialize(tpe) + @inline def isInitialize(tpe: Type): Boolean = tpe <:< initializeType def detectAndErrorOnKeyMissingValue(i: Ident): Unit = { if (isKey(i.tpe)) { @@ -65,6 +64,20 @@ abstract class BaseTaskLinterDSL extends LinterDSL { } else () } + def detectAndErrorOnKeyMissingValue(s: Select): Unit = { + if (isKey(s.tpe)) { + val keyName = s.name.decodedName.toString + ctx.error(s.pos, TaskLinterDSLFeedback.missingValueForKey(keyName)) + } else () + } + + def detectAndErrorOnKeyMissingValue(a: Apply): Unit = { + if (isInitialize(a.tpe)) { + val expr = "X / y" + ctx.error(a.pos, TaskLinterDSLFeedback.missingValueForInitialize(expr)) + } else () + } + override def traverse(tree: ctx.universe.Tree): Unit = { tree match { case ap @ Apply(TypeApply(Select(_, nme), tpe :: Nil), qual :: Nil) => @@ -73,7 +86,7 @@ abstract class BaseTaskLinterDSL extends LinterDSL { val (qualName, isSettingKey) = Option(qual.symbol) .map(sym => (sym.name.decodedName.toString, qual.tpe <:< typeOf[SettingKey[_]])) - .getOrElse((ap.pos.lineContent, false)) + .getOrElse((ap.pos.source.lineToString(ap.pos.line - 1), false)) if (!isSettingKey && !shouldIgnore && isTask(wrapperName, tpe.tpe, qual)) { if (insideIf && !isDynamicTask) { @@ -117,11 +130,15 @@ abstract class BaseTaskLinterDSL extends LinterDSL { // TODO: Consider using unused names analysis to be able to report on more cases case ValDef(_, valName, _, rhs) if valName == termNames.WILDCARD => rhs match { - case i: Ident => detectAndErrorOnKeyMissingValue(i) - case _ => () + case i: Ident => detectAndErrorOnKeyMissingValue(i) + case s: Select => detectAndErrorOnKeyMissingValue(s) + case a: Apply => detectAndErrorOnKeyMissingValue(a) + case _ => () } - case i: Ident => detectAndErrorOnKeyMissingValue(i) - case _ => () + case i: Ident => detectAndErrorOnKeyMissingValue(i) + case s: Select => detectAndErrorOnKeyMissingValue(s) + case a: Apply => detectAndErrorOnKeyMissingValue(a) + case _ => () } } traverseTrees(stmts) @@ -160,14 +177,13 @@ object TaskLinterDSLFeedback { private final val startGreen = if (ConsoleAppender.formatEnabledInEnv) AnsiColor.GREEN else "" private final val reset = if (ConsoleAppender.formatEnabledInEnv) AnsiColor.RESET else "" - private final val ProblemHeader = s"${startRed}Problem${reset}" - private final val SolutionHeader = s"${startGreen}Solution${reset}" + private final val ProblemHeader = s"${startRed}problem${reset}" + private final val SolutionHeader = s"${startGreen}solution${reset}" def useOfValueInsideAnon(task: String) = s"""${startBold}The evaluation of `$task` inside an anonymous function is prohibited.$reset | |${ProblemHeader}: Task invocations inside anonymous functions are evaluated independently of whether the anonymous function is invoked or not. - | |${SolutionHeader}: | 1. Make `$task` evaluation explicit outside of the function body if you don't care about its evaluation. | 2. Use a dynamic task to evaluate `$task` and pass that value as a parameter to an anonymous function. @@ -178,7 +194,6 @@ object TaskLinterDSLFeedback { | |${ProblemHeader}: `$task` is inside the if expression of a regular task. | Regular tasks always evaluate task inside the bodies of if expressions. - | |${SolutionHeader}: | 1. If you only want to evaluate it when the if predicate is true or false, use a dynamic task. | 2. Otherwise, make the static evaluation explicit by evaluating `$task` outside the if expression. @@ -187,8 +202,14 @@ object TaskLinterDSLFeedback { def missingValueForKey(key: String) = s"""${startBold}The key `$key` is not being invoked inside the task definition.$reset | - |${ProblemHeader}: Keys missing `.value` are not initialized and their dependency is not registered. - | + |${ProblemHeader}: Keys missing `.value` are not initialized and their dependency is not registered. |${SolutionHeader}: Replace `$key` by `$key.value` or remove it if unused. """.stripMargin + + def missingValueForInitialize(expr: String) = + s"""${startBold}The setting/task `$expr` is not being invoked inside the task definition.$reset + | + |${ProblemHeader}: Settings/tasks missing `.value` are not initialized and their dependency is not registered. + |${SolutionHeader}: Replace `$expr` by `($expr).value` or remove it if unused. + """.stripMargin } diff --git a/main-settings/src/main/scala/sbt/std/TaskMacro.scala b/main-settings/src/main/scala/sbt/std/TaskMacro.scala index e7ffcee63..bf910a687 100644 --- a/main-settings/src/main/scala/sbt/std/TaskMacro.scala +++ b/main-settings/src/main/scala/sbt/std/TaskMacro.scala @@ -56,9 +56,11 @@ object FullInstance extends Instance.Composed[Initialize, Task](InitializeInstance, TaskInstance) with MonadInstance { type SS = sbt.internal.util.Settings[Scope] - val settingsData = TaskKey[SS]("settings-data", - "Provides access to the project data for the build.", - KeyRanks.DTask) + val settingsData = TaskKey[SS]( + "settings-data", + "Provides access to the project data for the build.", + KeyRanks.DTask + ) def flatten[T](in: Initialize[Task[Initialize[Task[T]]]]): Initialize[Task[T]] = { import TupleSyntax._ @@ -98,29 +100,35 @@ object TaskMacro { import LinterDSL.{ Empty => EmptyLinter } - def taskMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)( - t: c.Expr[T]): c.Expr[Initialize[Task[T]]] = + def taskMacroImpl[T: c.WeakTypeTag]( + c: blackbox.Context + )(t: c.Expr[T]): c.Expr[Initialize[Task[T]]] = Instance.contImpl[T, Id](c, FullInstance, FullConvert, MixedBuilder, TaskLinterDSL)( Left(t), - Instance.idTransform[c.type]) + Instance.idTransform[c.type] + ) - def taskDynMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)( - t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[Task[T]]] = + def taskDynMacroImpl[T: c.WeakTypeTag]( + c: blackbox.Context + )(t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[Task[T]]] = Instance.contImpl[T, Id](c, FullInstance, FullConvert, MixedBuilder, TaskDynLinterDSL)( Right(t), - Instance.idTransform[c.type]) + Instance.idTransform[c.type] + ) /** Implementation of := macro for settings. */ - def settingAssignMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)( - v: c.Expr[T]): c.Expr[Setting[T]] = { + def settingAssignMacroImpl[T: c.WeakTypeTag]( + c: blackbox.Context + )(v: c.Expr[T]): c.Expr[Setting[T]] = { val init = SettingMacro.settingMacroImpl[T](c)(v) val assign = transformMacroImpl(c)(init.tree)(AssignInitName) c.Expr[Setting[T]](assign) } /** Implementation of := macro for tasks. */ - def taskAssignMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)( - v: c.Expr[T]): c.Expr[Setting[Task[T]]] = { + def taskAssignMacroImpl[T: c.WeakTypeTag]( + c: blackbox.Context + )(v: c.Expr[T]): c.Expr[Setting[Task[T]]] = { val init = taskMacroImpl[T](c)(v) val assign = transformMacroImpl(c)(init.tree)(AssignInitName) c.Expr[Setting[Task[T]]](assign) @@ -130,88 +138,106 @@ object TaskMacro { // These macros are there just so we can fail old operators like `<<=` and provide useful migration information. def fakeSettingAssignPosition[T: c.WeakTypeTag](c: blackbox.Context)( - app: c.Expr[Initialize[T]]): c.Expr[Setting[T]] = - ContextUtil.selectMacroImpl[Setting[T]](c) { (ts, pos) => - c.abort(pos, assignMigration) - } - def fakeSettingAppend1Position[S: c.WeakTypeTag, V: c.WeakTypeTag](c: blackbox.Context)( - v: c.Expr[Initialize[V]])(a: c.Expr[Append.Value[S, V]]): c.Expr[Setting[S]] = - ContextUtil.selectMacroImpl[Setting[S]](c) { (ts, pos) => - c.abort(pos, append1Migration) - } - def fakeSettingAppendNPosition[S: c.WeakTypeTag, V: c.WeakTypeTag](c: blackbox.Context)( - vs: c.Expr[Initialize[V]])(a: c.Expr[Append.Values[S, V]]): c.Expr[Setting[S]] = - ContextUtil.selectMacroImpl[Setting[S]](c) { (ts, pos) => - c.abort(pos, appendNMigration) - } + @deprecated("unused", "") app: c.Expr[Initialize[T]] + ): c.Expr[Setting[T]] = + ContextUtil.selectMacroImpl[Setting[T]](c)((_, pos) => c.abort(pos, assignMigration)) + + def fakeSettingAppend1Position[S: c.WeakTypeTag, V: c.WeakTypeTag]( + c: blackbox.Context + )(@deprecated("unused", "") v: c.Expr[Initialize[V]])( + @deprecated("unused", "") a: c.Expr[Append.Value[S, V]] + ): c.Expr[Setting[S]] = + ContextUtil.selectMacroImpl[Setting[S]](c)((_, pos) => c.abort(pos, append1Migration)) + + def fakeSettingAppendNPosition[S: c.WeakTypeTag, V: c.WeakTypeTag]( + c: blackbox.Context + )(@deprecated("unused", "") vs: c.Expr[Initialize[V]])( + @deprecated("unused", "") a: c.Expr[Append.Values[S, V]] + ): c.Expr[Setting[S]] = + ContextUtil.selectMacroImpl[Setting[S]](c)((_, pos) => c.abort(pos, appendNMigration)) + def fakeItaskAssignPosition[T: c.WeakTypeTag](c: blackbox.Context)( - app: c.Expr[Initialize[Task[T]]]): c.Expr[Setting[Task[T]]] = - ContextUtil.selectMacroImpl[Setting[Task[T]]](c) { (ts, pos) => - c.abort(pos, assignMigration) - } - def fakeTaskAppend1Position[S: c.WeakTypeTag, V: c.WeakTypeTag](c: blackbox.Context)( - v: c.Expr[Initialize[Task[V]]])(a: c.Expr[Append.Value[S, V]]): c.Expr[Setting[Task[S]]] = - ContextUtil.selectMacroImpl[Setting[Task[S]]](c) { (ts, pos) => - c.abort(pos, append1Migration) - } - def fakeTaskAppendNPosition[S: c.WeakTypeTag, V: c.WeakTypeTag](c: blackbox.Context)( - vs: c.Expr[Initialize[Task[V]]])(a: c.Expr[Append.Values[S, V]]): c.Expr[Setting[Task[S]]] = - ContextUtil.selectMacroImpl[Setting[Task[S]]](c) { (ts, pos) => - c.abort(pos, appendNMigration) - } + @deprecated("unused", "") app: c.Expr[Initialize[Task[T]]] + ): c.Expr[Setting[Task[T]]] = + ContextUtil.selectMacroImpl[Setting[Task[T]]](c)((_, pos) => c.abort(pos, assignMigration)) - /* Implementations of <<= macro variations for tasks and settings. These just get the source position of the call site.*/ + def fakeTaskAppend1Position[S: c.WeakTypeTag, V: c.WeakTypeTag]( + c: blackbox.Context + )(@deprecated("unused", "") v: c.Expr[Initialize[Task[V]]])( + @deprecated("unused", "") a: c.Expr[Append.Value[S, V]] + ): c.Expr[Setting[Task[S]]] = + ContextUtil.selectMacroImpl[Setting[Task[S]]](c)((_, pos) => c.abort(pos, append1Migration)) - def itaskAssignPosition[T: c.WeakTypeTag](c: blackbox.Context)( - app: c.Expr[Initialize[Task[T]]]): c.Expr[Setting[Task[T]]] = + def fakeTaskAppendNPosition[S: c.WeakTypeTag, V: c.WeakTypeTag]( + c: blackbox.Context + )(@deprecated("unused", "") vs: c.Expr[Initialize[Task[V]]])( + @deprecated("unused", "") a: c.Expr[Append.Values[S, V]] + ): c.Expr[Setting[Task[S]]] = + ContextUtil.selectMacroImpl[Setting[Task[S]]](c)((_, pos) => c.abort(pos, appendNMigration)) + + // Implementations of <<= macro variations for tasks and settings. + // These just get the source position of the call site. + + def itaskAssignPosition[T: c.WeakTypeTag]( + c: blackbox.Context + )(app: c.Expr[Initialize[Task[T]]]): c.Expr[Setting[Task[T]]] = settingAssignPosition(c)(app) - def taskAssignPositionT[T: c.WeakTypeTag](c: blackbox.Context)( - app: c.Expr[Task[T]]): c.Expr[Setting[Task[T]]] = + def taskAssignPositionT[T: c.WeakTypeTag]( + c: blackbox.Context + )(app: c.Expr[Task[T]]): c.Expr[Setting[Task[T]]] = itaskAssignPosition(c)(c.universe.reify { Def.valueStrict(app.splice) }) - def taskAssignPositionPure[T: c.WeakTypeTag](c: blackbox.Context)( - app: c.Expr[T]): c.Expr[Setting[Task[T]]] = + def taskAssignPositionPure[T: c.WeakTypeTag]( + c: blackbox.Context + )(app: c.Expr[T]): c.Expr[Setting[Task[T]]] = taskAssignPositionT(c)(c.universe.reify { TaskExtra.constant(app.splice) }) - def taskTransformPosition[S: c.WeakTypeTag](c: blackbox.Context)( - f: c.Expr[S => S]): c.Expr[Setting[Task[S]]] = + def taskTransformPosition[S: c.WeakTypeTag]( + c: blackbox.Context + )(f: c.Expr[S => S]): c.Expr[Setting[Task[S]]] = c.Expr[Setting[Task[S]]](transformMacroImpl(c)(f.tree)(TransformInitName)) - def settingTransformPosition[S: c.WeakTypeTag](c: blackbox.Context)( - f: c.Expr[S => S]): c.Expr[Setting[S]] = + def settingTransformPosition[S: c.WeakTypeTag]( + c: blackbox.Context + )(f: c.Expr[S => S]): c.Expr[Setting[S]] = c.Expr[Setting[S]](transformMacroImpl(c)(f.tree)(TransformInitName)) - def itaskTransformPosition[S: c.WeakTypeTag](c: blackbox.Context)( - f: c.Expr[S => S]): c.Expr[Setting[S]] = + def itaskTransformPosition[S: c.WeakTypeTag]( + c: blackbox.Context + )(f: c.Expr[S => S]): c.Expr[Setting[S]] = c.Expr[Setting[S]](transformMacroImpl(c)(f.tree)(TransformInitName)) def settingAssignPure[T: c.WeakTypeTag](c: blackbox.Context)(app: c.Expr[T]): c.Expr[Setting[T]] = settingAssignPosition(c)(c.universe.reify { Def.valueStrict(app.splice) }) - def settingAssignPosition[T: c.WeakTypeTag](c: blackbox.Context)( - app: c.Expr[Initialize[T]]): c.Expr[Setting[T]] = + def settingAssignPosition[T: c.WeakTypeTag]( + c: blackbox.Context + )(app: c.Expr[Initialize[T]]): c.Expr[Setting[T]] = c.Expr[Setting[T]](transformMacroImpl(c)(app.tree)(AssignInitName)) /** Implementation of := macro for tasks. */ - def inputTaskAssignMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)( - v: c.Expr[T]): c.Expr[Setting[InputTask[T]]] = { + def inputTaskAssignMacroImpl[T: c.WeakTypeTag]( + c: blackbox.Context + )(v: c.Expr[T]): c.Expr[Setting[InputTask[T]]] = { val init = inputTaskMacroImpl[T](c)(v) val assign = transformMacroImpl(c)(init.tree)(AssignInitName) c.Expr[Setting[InputTask[T]]](assign) } /** Implementation of += macro for tasks. */ - def taskAppend1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(v: c.Expr[U])( - a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[Task[T]]] = { + def taskAppend1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag]( + c: blackbox.Context + )(v: c.Expr[U])(a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[Task[T]]] = { val init = taskMacroImpl[U](c)(v) val append = appendMacroImpl(c)(init.tree, a.tree)(Append1InitName) c.Expr[Setting[Task[T]]](append) } /** Implementation of += macro for settings. */ - def settingAppend1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(v: c.Expr[U])( - a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[T]] = { + def settingAppend1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag]( + c: blackbox.Context + )(v: c.Expr[U])(a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[T]] = { import c.universe._ val ttpe = c.weakTypeOf[T] val typeArgs = ttpe.typeArgs @@ -221,10 +247,11 @@ object TaskMacro { if typeArgs.nonEmpty && (typeArgs.head weak_<:< c.weakTypeOf[Task[_]]) && (tpe weak_<:< c.weakTypeOf[Initialize[_]]) => c.macroApplication match { - case Apply(Apply(TypeApply(Select(preT, nmeT), targs), _), _) => + case Apply(Apply(TypeApply(Select(preT, _), _), _), _) => val tree = Apply( TypeApply(Select(preT, TermName("+=").encodedName), TypeTree(typeArgs.head) :: Nil), - Select(v.tree, TermName("taskValue").encodedName) :: Nil) + Select(v.tree, TermName("taskValue").encodedName) :: Nil + ) c.Expr[Setting[T]](tree) case x => ContextUtil.unexpectedTree(x) } @@ -236,73 +263,89 @@ object TaskMacro { } /** Implementation of ++= macro for tasks. */ - def taskAppendNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(vs: c.Expr[U])( - a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[Task[T]]] = { + def taskAppendNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag]( + c: blackbox.Context + )(vs: c.Expr[U])(a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[Task[T]]] = { val init = taskMacroImpl[U](c)(vs) val append = appendMacroImpl(c)(init.tree, a.tree)(AppendNInitName) c.Expr[Setting[Task[T]]](append) } /** Implementation of ++= macro for settings. */ - def settingAppendNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(vs: c.Expr[U])( - a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[T]] = { + def settingAppendNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag]( + c: blackbox.Context + )(vs: c.Expr[U])(a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[T]] = { val init = SettingMacro.settingMacroImpl[U](c)(vs) val append = appendMacroImpl(c)(init.tree, a.tree)(AppendNInitName) c.Expr[Setting[T]](append) } /** Implementation of -= macro for tasks. */ - def taskRemove1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(v: c.Expr[U])( - r: c.Expr[Remove.Value[T, U]]): c.Expr[Setting[Task[T]]] = { + def taskRemove1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag]( + c: blackbox.Context + )(v: c.Expr[U])(r: c.Expr[Remove.Value[T, U]]): c.Expr[Setting[Task[T]]] = { val init = taskMacroImpl[U](c)(v) val remove = removeMacroImpl(c)(init.tree, r.tree)(Remove1InitName) c.Expr[Setting[Task[T]]](remove) } /** Implementation of -= macro for settings. */ - def settingRemove1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(v: c.Expr[U])( - r: c.Expr[Remove.Value[T, U]]): c.Expr[Setting[T]] = { + def settingRemove1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag]( + c: blackbox.Context + )(v: c.Expr[U])(r: c.Expr[Remove.Value[T, U]]): c.Expr[Setting[T]] = { val init = SettingMacro.settingMacroImpl[U](c)(v) val remove = removeMacroImpl(c)(init.tree, r.tree)(Remove1InitName) c.Expr[Setting[T]](remove) } /** Implementation of --= macro for tasks. */ - def taskRemoveNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(vs: c.Expr[U])( - r: c.Expr[Remove.Values[T, U]]): c.Expr[Setting[Task[T]]] = { + def taskRemoveNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag]( + c: blackbox.Context + )(vs: c.Expr[U])(r: c.Expr[Remove.Values[T, U]]): c.Expr[Setting[Task[T]]] = { val init = taskMacroImpl[U](c)(vs) val remove = removeMacroImpl(c)(init.tree, r.tree)(RemoveNInitName) c.Expr[Setting[Task[T]]](remove) } /** Implementation of --= macro for settings. */ - def settingRemoveNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(vs: c.Expr[U])( - r: c.Expr[Remove.Values[T, U]]): c.Expr[Setting[T]] = { + def settingRemoveNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag]( + c: blackbox.Context + )(vs: c.Expr[U])(r: c.Expr[Remove.Values[T, U]]): c.Expr[Setting[T]] = { val init = SettingMacro.settingMacroImpl[U](c)(vs) val remove = removeMacroImpl(c)(init.tree, r.tree)(RemoveNInitName) c.Expr[Setting[T]](remove) } - private[this] def appendMacroImpl(c: blackbox.Context)(init: c.Tree, append: c.Tree)( - newName: String): c.Tree = { + private[this] def appendMacroImpl( + c: blackbox.Context + )(init: c.Tree, append: c.Tree)(newName: String): c.Tree = { import c.universe._ c.macroApplication match { - case Apply(Apply(TypeApply(Select(preT, nmeT), targs), _), _) => - Apply(Apply(TypeApply(Select(preT, TermName(newName).encodedName), targs), - init :: sourcePosition(c).tree :: Nil), - append :: Nil) + case Apply(Apply(TypeApply(Select(preT, _), targs), _), _) => + Apply( + Apply( + TypeApply(Select(preT, TermName(newName).encodedName), targs), + init :: sourcePosition(c).tree :: Nil + ), + append :: Nil + ) case x => ContextUtil.unexpectedTree(x) } } - private[this] def removeMacroImpl(c: blackbox.Context)(init: c.Tree, remove: c.Tree)( - newName: String): c.Tree = { + private[this] def removeMacroImpl( + c: blackbox.Context + )(init: c.Tree, remove: c.Tree)(newName: String): c.Tree = { import c.universe._ c.macroApplication match { - case Apply(Apply(TypeApply(Select(preT, nmeT), targs), _), r) => - Apply(Apply(TypeApply(Select(preT, TermName(newName).encodedName), targs), - init :: sourcePosition(c).tree :: Nil), - r) + case Apply(Apply(TypeApply(Select(preT, _), targs), _), _) => + Apply( + Apply( + TypeApply(Select(preT, TermName(newName).encodedName), targs), + init :: sourcePosition(c).tree :: Nil + ), + remove :: Nil + ) case x => ContextUtil.unexpectedTree(x) } } @@ -316,8 +359,10 @@ object TaskMacro { case Apply(Select(prefix, _), _) => prefix case x => ContextUtil.unexpectedTree(x) } - Apply.apply(Select(target, TermName(newName).encodedName), - init :: sourcePosition(c).tree :: Nil) + Apply.apply( + Select(target, TermName(newName).encodedName), + init :: sourcePosition(c).tree :: Nil + ) } private[this] def sourcePosition(c: blackbox.Context): c.Expr[SourcePosition] = { @@ -335,7 +380,8 @@ object TaskMacro { private[this] def settingSource(c: blackbox.Context, path: String, name: String): String = { @tailrec def inEmptyPackage(s: c.Symbol): Boolean = s != c.universe.NoSymbol && ( s.owner == c.mirror.EmptyPackage || s.owner == c.mirror.EmptyPackageClass || inEmptyPackage( - s.owner) + s.owner + ) ) c.internal.enclosingOwner match { case ec if !ec.isStatic => name @@ -349,16 +395,19 @@ object TaskMacro { c.Expr[T](Literal(Constant(t))) } - def inputTaskMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)( - t: c.Expr[T]): c.Expr[Initialize[InputTask[T]]] = + def inputTaskMacroImpl[T: c.WeakTypeTag]( + c: blackbox.Context + )(t: c.Expr[T]): c.Expr[Initialize[InputTask[T]]] = inputTaskMacro0[T](c)(t) - def inputTaskDynMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)( - t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] = + def inputTaskDynMacroImpl[T: c.WeakTypeTag]( + c: blackbox.Context + )(t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] = inputTaskDynMacro0[T](c)(t) - private[this] def inputTaskMacro0[T: c.WeakTypeTag](c: blackbox.Context)( - t: c.Expr[T]): c.Expr[Initialize[InputTask[T]]] = + private[this] def inputTaskMacro0[T: c.WeakTypeTag]( + c: blackbox.Context + )(t: c.Expr[T]): c.Expr[Initialize[InputTask[T]]] = iInitializeMacro(c)(t) { et => val pt = iParserMacro(c)(et) { pt => iTaskMacro(c)(pt) @@ -367,8 +416,8 @@ object TaskMacro { } private[this] def iInitializeMacro[M[_], T](c: blackbox.Context)(t: c.Expr[T])( - f: c.Expr[T] => c.Expr[M[T]])(implicit tt: c.WeakTypeTag[T], - mt: c.WeakTypeTag[M[T]]): c.Expr[Initialize[M[T]]] = { + f: c.Expr[T] => c.Expr[M[T]] + )(implicit tt: c.WeakTypeTag[T], mt: c.WeakTypeTag[M[T]]): c.Expr[Initialize[M[T]]] = { val inner: Transform[c.type, M] = new Transform[c.type, M] { def apply(in: c.Tree): c.Tree = f(c.Expr[T](in)).tree } @@ -376,7 +425,8 @@ object TaskMacro { Instance .contImpl[T, M](c, InitializeInstance, InputInitConvert, MixedBuilder, EmptyLinter)( Left(cond), - inner) + inner + ) } private[this] def conditionInputTaskTree(c: blackbox.Context)(t: c.Tree): c.Tree = { @@ -412,25 +462,29 @@ object TaskMacro { } private[this] def iParserMacro[M[_], T](c: blackbox.Context)(t: c.Expr[T])( - f: c.Expr[T] => c.Expr[M[T]])(implicit tt: c.WeakTypeTag[T], - mt: c.WeakTypeTag[M[T]]): c.Expr[State => Parser[M[T]]] = { + f: c.Expr[T] => c.Expr[M[T]] + )(implicit tt: c.WeakTypeTag[T], mt: c.WeakTypeTag[M[T]]): c.Expr[State => Parser[M[T]]] = { val inner: Transform[c.type, M] = new Transform[c.type, M] { def apply(in: c.Tree): c.Tree = f(c.Expr[T](in)).tree } Instance.contImpl[T, M](c, ParserInstance, ParserConvert, MixedBuilder, LinterDSL.Empty)( Left(t), - inner) + inner + ) } - private[this] def iTaskMacro[T: c.WeakTypeTag](c: blackbox.Context)( - t: c.Expr[T]): c.Expr[Task[T]] = + private[this] def iTaskMacro[T: c.WeakTypeTag]( + c: blackbox.Context + )(t: c.Expr[T]): c.Expr[Task[T]] = Instance .contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder, EmptyLinter)( Left(t), - Instance.idTransform) + Instance.idTransform + ) - private[this] def inputTaskDynMacro0[T: c.WeakTypeTag](c: blackbox.Context)( - t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] = { + private[this] def inputTaskDynMacro0[T: c.WeakTypeTag]( + c: blackbox.Context + )(t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] = { import c.universe.{ Apply => ApplyTree, _ } import internal.decorators._ @@ -455,7 +509,8 @@ object TaskMacro { if (result.isDefined) { c.error( qual.pos, - "Implementation restriction: a dynamic InputTask can only have a single input parser.") + "Implementation restriction: a dynamic InputTask can only have a single input parser." + ) EmptyTree } else { qual.foreach(checkQual) @@ -514,11 +569,13 @@ object PlainTaskMacro { def taskImpl[T: c.WeakTypeTag](c: blackbox.Context)(t: c.Expr[T]): c.Expr[Task[T]] = Instance.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder, OnlyTaskLinterDSL)( Left(t), - Instance.idTransform[c.type]) + Instance.idTransform[c.type] + ) def taskDyn[T](t: Task[T]): Task[T] = macro taskDynImpl[T] def taskDynImpl[T: c.WeakTypeTag](c: blackbox.Context)(t: c.Expr[Task[T]]): c.Expr[Task[T]] = Instance.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder, OnlyTaskDynLinterDSL)( Right(t), - Instance.idTransform[c.type]) + Instance.idTransform[c.type] + ) } diff --git a/main-settings/src/test/scala/sbt/BuildSettingsInstances.scala b/main-settings/src/test/scala/sbt/BuildSettingsInstances.scala new file mode 100644 index 000000000..3618fb841 --- /dev/null +++ b/main-settings/src/test/scala/sbt/BuildSettingsInstances.scala @@ -0,0 +1,135 @@ +/* + * sbt + * Copyright 2011 - 2017, Lightbend, Inc. + * Copyright 2008 - 2010, Mark Harrah + * Licensed under BSD-3-Clause license (see LICENSE) + */ + +package sbt.test + +import org.scalacheck.{ Test => _, _ }, Arbitrary.arbitrary, Gen._ + +import java.io.File +import sbt.io.IO +import sbt.{ Scope, ScopeAxis, Scoped, Select, This, Zero } +import sbt.{ + BuildRef, + LocalProject, + LocalRootProject, + ProjectRef, + Reference, + RootProject, + ThisBuild, + ThisProject +} +import sbt.ConfigKey +import sbt.librarymanagement.syntax._ +import sbt.{ InputKey, SettingKey, TaskKey } +import sbt.internal.util.{ AttributeKey, AttributeMap } + +object BuildSettingsInstances { + val genFile: Gen[File] = Gen.oneOf(new File("."), new File("/tmp")) // for now.. + + implicit val arbBuildRef: Arbitrary[BuildRef] = Arbitrary(genFile map (f => BuildRef(IO toURI f))) + + implicit val arbProjectRef: Arbitrary[ProjectRef] = + Arbitrary(for (f <- genFile; id <- Gen.identifier) yield ProjectRef(f, id)) + + implicit val arbLocalProject: Arbitrary[LocalProject] = + Arbitrary(arbitrary[String] map LocalProject) + + implicit val arbRootProject: Arbitrary[RootProject] = Arbitrary(genFile map (RootProject(_))) + + implicit val arbReference: Arbitrary[Reference] = Arbitrary { + Gen.frequency( + 96 -> arbitrary[BuildRef], + 10271 -> ThisBuild, + 325 -> LocalRootProject, + 2283 -> arbitrary[ProjectRef], + 299 -> ThisProject, + 436 -> arbitrary[LocalProject], + 1133 -> arbitrary[RootProject], + ) + } + + implicit def arbConfigKey: Arbitrary[ConfigKey] = Arbitrary { + Gen.frequency( + 2 -> const[ConfigKey](Compile), + 2 -> const[ConfigKey](Test), + 1 -> const[ConfigKey](Runtime), + 1 -> const[ConfigKey](IntegrationTest), + 1 -> const[ConfigKey](Provided), + ) + } + + implicit def arbAttrKey[A: Manifest]: Arbitrary[AttributeKey[_]] = + Arbitrary(Gen.identifier map (AttributeKey[A](_))) + + implicit val arbAttributeMap: Arbitrary[AttributeMap] = Arbitrary { + Gen.frequency( + 20 -> AttributeMap.empty, + 1 -> { + for (name <- Gen.identifier; isModule <- arbitrary[Boolean]) + yield + AttributeMap.empty + .put(AttributeKey[String]("name"), name) + .put(AttributeKey[Boolean]("isModule"), isModule) + } + ) + } + + implicit def arbScopeAxis[A: Arbitrary]: Arbitrary[ScopeAxis[A]] = + Arbitrary(Gen.oneOf[ScopeAxis[A]](This, Zero, arbitrary[A] map (Select(_)))) + + implicit def arbScope: Arbitrary[Scope] = Arbitrary( + for { + r <- arbitrary[ScopeAxis[Reference]] + c <- arbitrary[ScopeAxis[ConfigKey]] + t <- arbitrary[ScopeAxis[AttributeKey[_]]] + e <- arbitrary[ScopeAxis[AttributeMap]] + } yield Scope(r, c, t, e) + ) + + type Key = K forSome { type K <: Scoped.ScopingSetting[K] with Scoped } + + final case class Label(value: String) + val genLabel: Gen[Label] = Gen.identifier map Label + implicit def arbLabel: Arbitrary[Label] = Arbitrary(genLabel) + + def genInputKey[A: Manifest]: Gen[InputKey[A]] = genLabel map (x => InputKey[A](x.value)) + def genSettingKey[A: Manifest]: Gen[SettingKey[A]] = genLabel map (x => SettingKey[A](x.value)) + def genTaskKey[A: Manifest]: Gen[TaskKey[A]] = genLabel map (x => TaskKey[A](x.value)) + + def withScope[K <: Scoped.ScopingSetting[K]](keyGen: Gen[K]): Arbitrary[K] = Arbitrary { + Gen.frequency( + 5 -> keyGen, + 1 -> (for (key <- keyGen; scope <- arbitrary[Scope]) yield key in scope) + ) + } + + implicit def arbInputKey[A: Manifest]: Arbitrary[InputKey[A]] = withScope(genInputKey[A]) + implicit def arbSettingKey[A: Manifest]: Arbitrary[SettingKey[A]] = withScope(genSettingKey[A]) + implicit def arbTaskKey[A: Manifest]: Arbitrary[TaskKey[A]] = withScope(genTaskKey[A]) + + implicit def arbKey[A: Manifest]( + implicit + arbInputKey: Arbitrary[InputKey[A]], + arbSettingKey: Arbitrary[SettingKey[A]], + arbTaskKey: Arbitrary[TaskKey[A]], + ): Arbitrary[Key] = Arbitrary { + def convert[T](g: Gen[T]) = g.asInstanceOf[Gen[Key]] + Gen.frequency( + 15431 -> convert(arbitrary[InputKey[A]]), + 19645 -> convert(arbitrary[SettingKey[A]]), + 22867 -> convert(arbitrary[TaskKey[A]]), + ) + } + + object WithoutScope { + implicit def arbInputKey[A: Manifest]: Arbitrary[InputKey[A]] = Arbitrary(genInputKey[A]) + implicit def arbSettingKey[A: Manifest]: Arbitrary[SettingKey[A]] = Arbitrary(genSettingKey[A]) + implicit def arbTaskKey[A: Manifest]: Arbitrary[TaskKey[A]] = Arbitrary(genTaskKey[A]) + } + + implicit def arbScoped[A: Manifest]: Arbitrary[Scoped] = Arbitrary(arbitrary[Key]) +} diff --git a/main-settings/src/test/scala/sbt/ScopedSpec.scala b/main-settings/src/test/scala/sbt/ScopedSpec.scala new file mode 100644 index 000000000..5992403b0 --- /dev/null +++ b/main-settings/src/test/scala/sbt/ScopedSpec.scala @@ -0,0 +1,145 @@ +/* + * sbt + * Copyright 2011 - 2017, Lightbend, Inc. + * Copyright 2008 - 2010, Mark Harrah + * Licensed under BSD-3-Clause license (see LICENSE) + */ + +package sbt.test + +import org.scalacheck._, Prop._, util.Pretty + +import sbt.internal.util.AttributeKey +import sbt.util.NoJsonWriter +import sbt.{ InputTask, Scope, Task } +import sbt.{ InputKey, Scoped, SettingKey, TaskKey } + +import BuildSettingsInstances._ + +object ScopedSpec extends Properties("Scoped") { + val intManifest = manifest[Int] + val stringManifest = manifest[String] + + implicit val arbManifest: Arbitrary[Manifest[_]] = + Arbitrary(Gen.oneOf(intManifest, stringManifest)) + + property("setting keys are structurally equal") = { + forAll { (label: Label, manifest: Manifest[_], scope: Scope) => + val k1 = settingKey(label, manifest, scope) + val k2 = settingKey(label, manifest, scope) + expectEq(k1, k2) + } + } + + property("task keys are structurally equal") = { + forAll { (label: Label, manifest: Manifest[_], scope: Scope) => + val k1 = taskKey(label, manifest, scope) + val k2 = taskKey(label, manifest, scope) + expectEq(k1, k2) + } + } + + property("input keys are structurally equal") = { + forAll { (label: Label, manifest: Manifest[_], scope: Scope) => + val k1 = inputKey(label, manifest, scope) + val k2 = inputKey(label, manifest, scope) + expectEq(k1, k2) + } + } + + property("different key types are not equal") = { + forAll { (label: Label, manifest: Manifest[_], scope: Scope) => + val settingKey1 = settingKey(label, manifest, scope) + val taskKey1 = taskKey(label, manifest, scope) + val inputKey1 = inputKey(label, manifest, scope) + + all( + expectNe(settingKey1, taskKey1), + expectNe(settingKey1, inputKey1), + expectNe(taskKey1, inputKey1), + ) + } + } + + property("different key types, with the same manifest, are not equal") = { + forAll { (label: Label, scope: Scope) => + val prop1 = { + val manifest1 = manifest[Task[String]] + val attrKey = attributeKey(label, manifest1) + val k1 = SettingKey(attrKey) in scope + val k2 = TaskKey(attrKey) in scope + expectNeSameManifest(k1, k2) + } + + val prop2 = { + val manifest1 = manifest[InputTask[String]] + val attrKey = attributeKey(label, manifest1) + val k1 = SettingKey(attrKey) in scope + val k2 = InputKey(attrKey) in scope + expectNeSameManifest(k1, k2) + } + + all(prop1, prop2) + } + } + + /// + + def settingKey[A](label: Label, manifest: Manifest[A], scope: Scope): SettingKey[A] = { + val noJsonWriter = NoJsonWriter[A]() + SettingKey[A](label.value)(manifest, noJsonWriter) in scope + } + + def taskKey[A](label: Label, manifest: Manifest[A], s: Scope): TaskKey[A] = + TaskKey[A](label.value)(manifest) in s + + def inputKey[A](label: Label, manifest: Manifest[A], scope: Scope): InputKey[A] = + InputKey[A](label.value)(manifest) in scope + + def attributeKey[A](label: Label, manifest: Manifest[A]): AttributeKey[A] = { + val jsonWriter = NoJsonWriter[A]() + AttributeKey[A](label.value)(manifest, jsonWriter) + } + + /// + + def expectEq(k1: Scoped, k2: Scoped): Prop = + ?=(k1, k2) && ?=(k2, k1) map eqLabels(k1, k2) + + def expectNe(k1: Scoped, k2: Scoped): Prop = + !=(k1, k2) && !=(k2, k1) map eqLabels(k1, k2) + + def expectNeSameManifest(k1: Scoped, k2: Scoped) = { + all( + ?=(k1.key.manifest, k2.key.manifest), // sanity check the manifests are the same + expectNe(k1, k2), + ) + } + + def eqLabels(k1: Scoped, k2: Scoped): Prop.Result => Prop.Result = r => { + val eqLabel = k1.key.label == k2.key.label + val eqManifest = k1.key.manifest == k2.key.manifest + val eqScope = k1.scope == k2.scope + r.label(s"label equality: ${k1.key.label} == ${k2.key.label} : $eqLabel") + .label(s"manifest equality: ${k1.key.manifest} == ${k2.key.manifest} : $eqManifest") + .label(s"scope equality: ${k1.scope} == ${k2.scope} : $eqScope") + } + + def ?=[T](x: T, y: T)(implicit pp: T => Pretty): Prop = + if (x == y) proved + else + falsified :| { + val act = Pretty.pretty[T](x, Pretty.Params(0)) + val exp = Pretty.pretty[T](y, Pretty.Params(0)) + s"Expected $act to be equal to $exp" + } + + def !=[T](x: T, y: T)(implicit pp: T => Pretty): Prop = + if (x == y) falsified + else + proved :| { + val act = Pretty.pretty[T](x, Pretty.Params(0)) + val exp = Pretty.pretty[T](y, Pretty.Params(0)) + s"Expected $act to NOT be equal to $exp" + } +} diff --git a/main-settings/src/test/scala/sbt/SlashSyntaxSpec.scala b/main-settings/src/test/scala/sbt/SlashSyntaxSpec.scala index a4c802934..45d61b6de 100644 --- a/main-settings/src/test/scala/sbt/SlashSyntaxSpec.scala +++ b/main-settings/src/test/scala/sbt/SlashSyntaxSpec.scala @@ -7,290 +7,104 @@ package sbt.test -import org.scalacheck.{ Test => _, _ }, Arbitrary.arbitrary, Gen._, Prop._ +import org.scalacheck.{ Test => _, _ }, Prop._ -import java.io.File -import sbt.io.IO import sbt.SlashSyntax -import sbt.{ Scope, ScopeAxis, Scoped, Select, This, Zero }, Scope.{ Global, ThisScope } -import sbt.{ BuildRef, LocalProject, LocalRootProject, ProjectRef, Reference, RootProject, ThisBuild, ThisProject } +import sbt.{ Scope, ScopeAxis, Scoped }, Scope.{ Global, ThisScope } +import sbt.Reference import sbt.ConfigKey -import sbt.librarymanagement.syntax._ -import sbt.{ InputKey, SettingKey, TaskKey } -import sbt.internal.util.{ AttributeKey, AttributeMap } +import sbt.internal.util.AttributeKey -object BuildDSLInstances { - val genFile: Gen[File] = Gen.oneOf(new File("."), new File("/tmp")) // for now.. - - implicit val arbBuildRef: Arbitrary[BuildRef] = Arbitrary(genFile map (f => BuildRef(IO toURI f))) - - implicit val arbProjectRef: Arbitrary[ProjectRef] = - Arbitrary(for (f <- genFile; id <- Gen.identifier) yield ProjectRef(f, id)) - - implicit val arbLocalProject: Arbitrary[LocalProject] = - Arbitrary(arbitrary[String] map LocalProject) - - implicit val arbRootProject: Arbitrary[RootProject] = Arbitrary(genFile map (RootProject(_))) - - implicit val arbReference: Arbitrary[Reference] = Arbitrary { - Gen.frequency( - 1 -> arbitrary[BuildRef], // 96 - 100 -> ThisBuild, // 10,271 - 3 -> LocalRootProject, // 325 - 23 -> arbitrary[ProjectRef], // 2,283 - 3 -> ThisProject, // 299 - 4 -> arbitrary[LocalProject], // 436 - 11 -> arbitrary[RootProject], // 1,133 - ) - } - - implicit def arbConfigKey: Arbitrary[ConfigKey] = Arbitrary { - Gen.frequency( - 2 -> const[ConfigKey](Compile), - 2 -> const[ConfigKey](Test), - 1 -> const[ConfigKey](Runtime), - 1 -> const[ConfigKey](IntegrationTest), - 1 -> const[ConfigKey](Provided), - ) - } - - implicit def arbAttrKey[A: Manifest]: Arbitrary[AttributeKey[_]] = - Arbitrary(Gen.identifier map (AttributeKey[A](_))) - - def withScope[K <: Scoped.ScopingSetting[K]](keyGen: Gen[K]): Arbitrary[K] = - Arbitrary(Gen.frequency( - 5 -> keyGen, - 1 -> (for (key <- keyGen; scope <- arbitrary[Scope]) yield key in scope) - )) - - def genInputKey[A: Manifest]: Gen[InputKey[A]] = Gen.identifier map (InputKey[A](_)) - def genSettingKey[A: Manifest]: Gen[SettingKey[A]] = Gen.identifier map (SettingKey[A](_)) - def genTaskKey[A: Manifest]: Gen[TaskKey[A]] = Gen.identifier map (TaskKey[A](_)) - - implicit def arbInputKey[A: Manifest]: Arbitrary[InputKey[A]] = withScope(genInputKey[A]) - implicit def arbSettingKey[A: Manifest]: Arbitrary[SettingKey[A]] = withScope(genSettingKey[A]) - implicit def arbTaskKey[A: Manifest]: Arbitrary[TaskKey[A]] = withScope(genTaskKey[A]) - - implicit def arbScoped[A: Manifest](implicit - arbInputKey: Arbitrary[InputKey[A]], - arbSettingKey: Arbitrary[SettingKey[A]], - arbTaskKey: Arbitrary[TaskKey[A]], - ): Arbitrary[Scoped] = { - Arbitrary(Gen.frequency( - 15 -> arbitrary[InputKey[A]], // 15,431 - 20 -> arbitrary[SettingKey[A]], // 19,645 - 23 -> arbitrary[TaskKey[A]], // 22,867 - )) - } - - object WithoutScope { - implicit def arbInputKey[A: Manifest]: Arbitrary[InputKey[A]] = Arbitrary(genInputKey[A]) - implicit def arbSettingKey[A: Manifest]: Arbitrary[SettingKey[A]] = Arbitrary(genSettingKey[A]) - implicit def arbTaskKey[A: Manifest]: Arbitrary[TaskKey[A]] = Arbitrary(genTaskKey[A]) - } - - implicit def arbScopeAxis[A: Arbitrary]: Arbitrary[ScopeAxis[A]] = - Arbitrary(Gen.oneOf[ScopeAxis[A]](This, Zero, arbitrary[A] map (Select(_)))) - - implicit val arbAttributeMap: Arbitrary[AttributeMap] = Arbitrary { - Gen.frequency( - 20 -> AttributeMap.empty, - 1 -> (for (name <- Gen.identifier; isModule <- arbitrary[Boolean]) - yield AttributeMap.empty - .put(AttributeKey[String]("name"), name) - .put(AttributeKey[Boolean]("isModule"), isModule) - ) - ) - } - - implicit def arbScope: Arbitrary[Scope] = Arbitrary( - for { - r <- arbitrary[ScopeAxis[Reference]] - c <- arbitrary[ScopeAxis[ConfigKey]] - t <- arbitrary[ScopeAxis[AttributeKey[_]]] - e <- arbitrary[ScopeAxis[AttributeMap]] - } yield Scope(r, c, t, e) - ) -} -import BuildDSLInstances._ - -object CustomEquality { - trait Eq[A] { - def equal(x: A, y: A): Boolean - } - - // Avoid reimplementing equality for other standard classes. - trait EqualLowPriority { - implicit def universal[A] = (x: A, y: A) => x == y - } - - object Eq extends EqualLowPriority { - def apply[A: Eq]: Eq[A] = implicitly - - implicit def eqScoped[A <: Scoped]: Eq[A] = (x, y) => x.scope == y.scope && x.key == y.key - } - - implicit class AnyWith_===[A](private val x: A) extends AnyVal { - def ===(y: A)(implicit z: Eq[A]): Boolean = z.equal(x, y) - def =?(y: A)(implicit z: Eq[A]): Prop = { - if (x === y) proved else falsified :| s"Expected $x but got $y" - } - } - - def expectValue[A: Eq](expected: A)(x: A) = expected =? x -} -import CustomEquality._ +import BuildSettingsInstances._ object SlashSyntaxSpec extends Properties("SlashSyntax") with SlashSyntax { - type Key[K] = Scoped.ScopingSetting[K] with Scoped - property("Global / key == key in Global") = { - def check[K <: Key[K]: Arbitrary] = forAll((k: K) => expectValue(k in Global)(Global / k)) - check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]] + forAll((k: Key) => expectValue(k in Global)(Global / k)) } property("Reference / key == key in Reference") = { - def check[K <: Key[K]: Arbitrary] = forAll((r: Reference, k: K) => expectValue(k in r)(r / k)) - check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]] + forAll((r: Reference, k: Key) => expectValue(k in r)(r / k)) } property("Reference / Config / key == key in Reference in Config") = { - def check[K <: Key[K]: Arbitrary] = - forAll((r: Reference, c: ConfigKey, k: K) => expectValue(k in r in c)(r / c / k)) - check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]] + forAll((r: Reference, c: ConfigKey, k: Key) => expectValue(k in r in c)(r / c / k)) } property("Reference / task.key / key == key in Reference in task") = { - def check[K <: Key[K]: Arbitrary] = - forAll((r: Reference, t: Scoped, k: K) => expectValue(k in (r, t))(r / t.key / k)) - check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]] + forAll((r: Reference, t: Scoped, k: Key) => expectValue(k in (r, t))(r / t.key / k)) } property("Reference / task / key ~= key in Reference in task") = { import WithoutScope._ - def check[T <: Key[T]: Arbitrary, K <: Key[K]: Arbitrary] = - forAll((r: Reference, t: T, k: K) => expectValue(k in (r, t))(r / t / k)) - (true - && check[InputKey[String], InputKey[String]] - && check[InputKey[String], SettingKey[String]] - && check[InputKey[String], TaskKey[String]] - && check[SettingKey[String], InputKey[String]] - && check[SettingKey[String], SettingKey[String]] - && check[SettingKey[String], TaskKey[String]] - && check[TaskKey[String], InputKey[String]] - && check[TaskKey[String], SettingKey[String]] - && check[TaskKey[String], TaskKey[String]] - ) + forAll((r: Reference, t: Key, k: Key) => expectValue(k in (r, t))(r / t / k)) } property("Reference / Config / task.key / key == key in Reference in Config in task") = { - def check[K <: Key[K]: Arbitrary] = - forAll((r: Reference, c: ConfigKey, t: Scoped, k: K) => - expectValue(k in (r, c, t))(r / c / t.key / k)) - check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]] + forAll { (r: Reference, c: ConfigKey, t: Scoped, k: Key) => + expectValue(k in (r, c, t))(r / c / t.key / k) + } } property("Reference / Config / task / key ~= key in Reference in Config in task") = { import WithoutScope._ - def check[T <: Key[T]: Arbitrary, K <: Key[K]: Arbitrary] = - forAll((r: Reference, c: ConfigKey, t: T, k: K) => expectValue(k in (r, c, t))(r / c / t / k)) - (true - && check[InputKey[String], InputKey[String]] - && check[InputKey[String], SettingKey[String]] - && check[InputKey[String], TaskKey[String]] - && check[SettingKey[String], InputKey[String]] - && check[SettingKey[String], SettingKey[String]] - && check[SettingKey[String], TaskKey[String]] - && check[TaskKey[String], InputKey[String]] - && check[TaskKey[String], SettingKey[String]] - && check[TaskKey[String], TaskKey[String]] - ) + forAll { (r: Reference, c: ConfigKey, t: Key, k: Key) => + expectValue(k in (r, c, t))(r / c / t / k) + } } property("Config / key == key in Config") = { - def check[K <: Key[K]: Arbitrary] = - forAll((c: ConfigKey, k: K) => expectValue(k in c)(c / k)) - check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]] + forAll((c: ConfigKey, k: Key) => expectValue(k in c)(c / k)) } property("Config / task.key / key == key in Config in task") = { - def check[K <: Key[K]: Arbitrary] = - forAll((c: ConfigKey, t: Scoped, k: K) => expectValue(k in c in t)(c / t.key / k)) - check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]] + forAll((c: ConfigKey, t: Scoped, k: Key) => expectValue(k in c in t)(c / t.key / k)) } property("Config / task / key ~= key in Config in task") = { import WithoutScope._ - def check[T <: Key[T]: Arbitrary, K <: Key[K]: Arbitrary] = - forAll((c: ConfigKey, t: T, k: K) => expectValue(k in c in t)(c / t / k)) - (true - && check[InputKey[String], InputKey[String]] - && check[InputKey[String], SettingKey[String]] - && check[InputKey[String], TaskKey[String]] - && check[SettingKey[String], InputKey[String]] - && check[SettingKey[String], SettingKey[String]] - && check[SettingKey[String], TaskKey[String]] - && check[TaskKey[String], InputKey[String]] - && check[TaskKey[String], SettingKey[String]] - && check[TaskKey[String], TaskKey[String]] - ) + forAll((c: ConfigKey, t: Key, k: Key) => expectValue(k in c in t)(c / t / k)) } property("task.key / key == key in task") = { - def check[K <: Key[K]: Arbitrary] = - forAll((t: Scoped, k: K) => expectValue(k in t)(t.key / k)) - check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]] + forAll((t: Scoped, k: Key) => expectValue(k in t)(t.key / k)) } property("task / key ~= key in task") = { import WithoutScope._ - def check[T <: Key[T]: Arbitrary, K <: Key[K]: Arbitrary] = - forAll((t: T, k: K) => expectValue(k in t)(t / k)) - (true - && check[InputKey[String], InputKey[String]] - && check[InputKey[String], SettingKey[String]] - && check[InputKey[String], TaskKey[String]] - && check[SettingKey[String], InputKey[String]] - && check[SettingKey[String], SettingKey[String]] - && check[SettingKey[String], TaskKey[String]] - && check[TaskKey[String], InputKey[String]] - && check[TaskKey[String], SettingKey[String]] - && check[TaskKey[String], TaskKey[String]] - ) + forAll((t: Key, k: Key) => expectValue(k in t)(t / k)) } property("Scope / key == key in Scope") = { - def check[K <: Key[K]: Arbitrary] = forAll((s: Scope, k: K) => expectValue(k in s)(s / k)) - check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]] + forAll((s: Scope, k: Key) => expectValue(k in s)(s / k)) } property("Reference? / key == key in ThisScope.copy(..)") = { - def check[K <: Key[K]: Arbitrary] = - forAll((r: ScopeAxis[Reference], k: K) => - expectValue(k in ThisScope.copy(project = r))(r / k)) - check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]] + forAll { (r: ScopeAxis[Reference], k: Key) => + expectValue(k in ThisScope.copy(project = r))(r / k) + } } property("Reference? / ConfigKey? / key == key in ThisScope.copy(..)") = { - def check[K <: Key[K]: Arbitrary] = - forAll((r: ScopeAxis[Reference], c: ScopeAxis[ConfigKey], k: K) => - expectValue(k in ThisScope.copy(project = r, config = c))(r / c / k)) - check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]] + forAll( + (r: ScopeAxis[Reference], c: ScopeAxis[ConfigKey], k: Key) => + expectValue(k in ThisScope.copy(project = r, config = c))(r / c / k) + ) } // property("Reference? / AttributeKey? / key == key in ThisScope.copy(..)") = { -// def check[K <: Key[K]: Arbitrary] = -// forAll( -// (r: ScopeAxis[Reference], t: ScopeAxis[AttributeKey[_]], k: K) => -// expectValue(k in ThisScope.copy(project = r, task = t))(r / t / k)) -// check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]] +// forAll((r: ScopeAxis[Reference], t: ScopeAxis[AttributeKey[_]], k: AnyKey) => +// expectValue(k in ThisScope.copy(project = r, task = t))(r / t / k)) // } property("Reference? / ConfigKey? / AttributeKey? / key == key in ThisScope.copy(..)") = { - def check[K <: Key[K]: Arbitrary] = - forAll( - (r: ScopeAxis[Reference], c: ScopeAxis[ConfigKey], t: ScopeAxis[AttributeKey[_]], k: K) => - expectValue(k in ThisScope.copy(project = r, config = c, task = t))(r / c / t / k)) - check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]] + forAll { + (r: ScopeAxis[Reference], c: ScopeAxis[ConfigKey], t: ScopeAxis[AttributeKey[_]], k: Key) => + expectValue(k in ThisScope.copy(project = r, config = c, task = t))(r / c / t / k) + } + } + + def expectValue(expected: Scoped)(x: Scoped) = { + val equals = x.scope == expected.scope && x.key == expected.key + if (equals) proved else falsified :| s"Expected $expected but got $x" } } diff --git a/main-settings/src/test/scala/sbt/std/TaskPosSpec.scala b/main-settings/src/test/scala/sbt/std/TaskPosSpec.scala index 68d8f9cdf..a7df2aba8 100644 --- a/main-settings/src/test/scala/sbt/std/TaskPosSpec.scala +++ b/main-settings/src/test/scala/sbt/std/TaskPosSpec.scala @@ -10,12 +10,11 @@ package sbt.std class TaskPosSpec { // Dynamic tasks can have task invocations inside if branches locally { - import sbt._ - import sbt.Def._ + import sbt._, Def._ val foo = taskKey[String]("") val bar = taskKey[String]("") - var condition = true - val baz = Def.taskDyn[String] { + val condition = true + Def.taskDyn[String] { if (condition) foo else bar } @@ -23,23 +22,21 @@ class TaskPosSpec { // Dynamic settings can have setting invocations inside if branches locally { - import sbt._ - import sbt.Def._ + import sbt._, Def._ val foo = settingKey[String]("") val bar = settingKey[String]("") - var condition = true - val baz = Def.settingDyn[String] { + val condition = true + Def.settingDyn[String] { if (condition) foo else bar } } locally { - import sbt._ - import sbt.Def._ + import sbt._, Def._ val foo = taskKey[String]("") - var condition = true - val baz = Def.task[String] { + val condition = true + Def.task[String] { val fooAnon = () => foo.value: @sbtUnchecked if (condition) fooAnon() else fooAnon() @@ -47,11 +44,10 @@ class TaskPosSpec { } locally { - import sbt._ - import sbt.Def._ + import sbt._, Def._ val foo = taskKey[String]("") - var condition = true - val baz = Def.task[String] { + val condition = true + Def.task[String] { val fooAnon = () => (foo.value: @sbtUnchecked) + "" if (condition) fooAnon() else fooAnon() @@ -59,12 +55,11 @@ class TaskPosSpec { } locally { - import sbt._ - import sbt.Def._ + import sbt._, Def._ val foo = taskKey[String]("") val bar = taskKey[String]("") - var condition = true - val baz = Def.task[String] { + val condition = true + Def.task[String] { if (condition) foo.value: @sbtUnchecked else bar.value: @sbtUnchecked } @@ -72,11 +67,10 @@ class TaskPosSpec { locally { // This is fix 1 for appearance of tasks inside anons - import sbt._ - import sbt.Def._ + import sbt._, Def._ val foo = taskKey[String]("") - var condition = true - val baz = Def.task[String] { + val condition = true + Def.task[String] { val fooResult = foo.value val anon = () => fooResult + " " if (condition) anon() @@ -86,11 +80,10 @@ class TaskPosSpec { locally { // This is fix 2 for appearance of tasks inside anons - import sbt._ - import sbt.Def._ + import sbt._, Def._ val foo = taskKey[String]("") - var condition = true - val baz = Def.taskDyn[String] { + val condition = true + Def.taskDyn[String] { val anon1 = (value: String) => value + " " if (condition) { Def.task(anon1(foo.value)) @@ -100,31 +93,27 @@ class TaskPosSpec { locally { // missing .value error should not happen inside task dyn - import sbt._ - import sbt.Def._ + import sbt._, Def._ val foo = taskKey[String]("") - val baz = Def.taskDyn[String] { + Def.taskDyn[String] { foo } } locally { - // missing .value error should not happen inside task dyn - import sbt._ - import sbt.Def._ + import sbt._, Def._ val foo = taskKey[String]("") val avoidDCE = "" - val baz = Def.task[String] { - foo: @sbtUnchecked + Def.task[String] { + val _ = foo: @sbtUnchecked avoidDCE } } locally { - import sbt._ - import sbt.Def._ + import sbt._, Def._ val foo = taskKey[String]("") - val baz = Def.task[String] { + Def.task[String] { def inner(s: KeyedInitialize[_]) = println(s) inner(foo) "" @@ -133,11 +122,10 @@ class TaskPosSpec { locally { // In theory, this should be reported, but missing .value analysis is dumb at the cost of speed - import sbt._ - import sbt.Def._ + import sbt._, Def._ val foo = taskKey[String]("") def avoidDCE = { println(""); "" } - val baz = Def.task[String] { + Def.task[String] { val (_, _) = "" match { case _ => (foo, 1 + 2) } @@ -146,15 +134,14 @@ class TaskPosSpec { } locally { - import sbt._ - import sbt.Def._ + import sbt._, Def._ val foo = taskKey[String]("") - def avoidDCE = { println(""); "" } - val baz = Def.task[String] { + def avoidDCE(x: TaskKey[String]) = x.toString + Def.task[String] { val hehe = foo // We do not detect `hehe` because guessing that the user did the wrong thing would require // us to run the unused name traverser defined in Typer (and hence proxy it from context util) - avoidDCE + avoidDCE(hehe) } } @@ -168,11 +155,10 @@ class TaskPosSpec { } locally { - import sbt._ - import sbt.Def._ + import sbt._, Def._ val foo = settingKey[String]("") val condition = true - val baz = Def.task[String] { + Def.task[String] { // settings can be evaluated in a condition if (condition) foo.value else "..." @@ -180,10 +166,9 @@ class TaskPosSpec { } locally { - import sbt._ - import sbt.Def._ + import sbt._, Def._ val foo = settingKey[String]("") - val baz = Def.task[Seq[String]] { + Def.task[Seq[String]] { (1 to 10).map(_ => foo.value) } } diff --git a/main-settings/src/test/scala/sbt/std/TestUtil.scala b/main-settings/src/test/scala/sbt/std/TestUtil.scala index dc0098f19..37bf50fef 100644 --- a/main-settings/src/test/scala/sbt/std/TestUtil.scala +++ b/main-settings/src/test/scala/sbt/std/TestUtil.scala @@ -7,11 +7,9 @@ package sbt.std -import scala.reflect._ +import scala.tools.reflect.ToolBox object TestUtil { - import tools.reflect.ToolBox - def eval(code: String, compileOptions: String = ""): Any = { val tb = mkToolbox(compileOptions) tb.eval(tb.parse(code)) diff --git a/main-settings/src/test/scala/sbt/std/neg/TaskNegSpec.scala b/main-settings/src/test/scala/sbt/std/neg/TaskNegSpec.scala index 75549783e..e3819d9eb 100644 --- a/main-settings/src/test/scala/sbt/std/neg/TaskNegSpec.scala +++ b/main-settings/src/test/scala/sbt/std/neg/TaskNegSpec.scala @@ -7,15 +7,19 @@ package sbt.std.neg +import scala.tools.reflect.ToolBoxError + import org.scalatest.FunSuite + import sbt.std.TaskLinterDSLFeedback import sbt.std.TestUtil._ class TaskNegSpec extends FunSuite { - import tools.reflect.ToolBoxError - def expectError(errorSnippet: String, - compileOptions: String = "", - baseCompileOptions: String = s"-cp $toolboxClasspath")(code: String) = { + def expectError( + errorSnippet: String, + compileOptions: String = "", + baseCompileOptions: String = s"-cp $toolboxClasspath", + )(code: String) = { val errorMessage = intercept[ToolBoxError] { eval(code, s"$compileOptions $baseCompileOptions") println(s"Test failed -- compilation was successful! Expected:\n$errorSnippet") diff --git a/main/src/main/contraband-scala/sbt/JavaVersion.scala b/main/src/main/contraband-scala/sbt/JavaVersion.scala new file mode 100644 index 000000000..4c630e3cd --- /dev/null +++ b/main/src/main/contraband-scala/sbt/JavaVersion.scala @@ -0,0 +1,40 @@ +/** + * This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]]. + */ + +// DO NOT EDIT MANUALLY +package sbt +final class JavaVersion private ( + val numbers: Vector[Long], + val vendor: Option[String]) extends Serializable { + def numberStr: String = numbers.mkString(".") + + + override def equals(o: Any): Boolean = o match { + case x: JavaVersion => (this.numbers == x.numbers) && (this.vendor == x.vendor) + case _ => false + } + override def hashCode: Int = { + 37 * (37 * (37 * (17 + "sbt.JavaVersion".##) + numbers.##) + vendor.##) + } + override def toString: String = { + vendor.map(_ + "@").getOrElse("") + numberStr + } + private[this] def copy(numbers: Vector[Long] = numbers, vendor: Option[String] = vendor): JavaVersion = { + new JavaVersion(numbers, vendor) + } + def withNumbers(numbers: Vector[Long]): JavaVersion = { + copy(numbers = numbers) + } + def withVendor(vendor: Option[String]): JavaVersion = { + copy(vendor = vendor) + } + def withVendor(vendor: String): JavaVersion = { + copy(vendor = Option(vendor)) + } +} +object JavaVersion { + def apply(version: String): JavaVersion = sbt.internal.CrossJava.parseJavaVersion(version) + def apply(numbers: Vector[Long], vendor: Option[String]): JavaVersion = new JavaVersion(numbers, vendor) + def apply(numbers: Vector[Long], vendor: String): JavaVersion = new JavaVersion(numbers, Option(vendor)) +} diff --git a/main/src/main/contraband/main.contra b/main/src/main/contraband/main.contra index eb9e9f42c..5cabb0cd4 100644 --- a/main/src/main/contraband/main.contra +++ b/main/src/main/contraband/main.contra @@ -17,3 +17,13 @@ enum PluginTrigger { AllRequirements NoTrigger } + +type JavaVersion { + numbers: [Long] + vendor: String + + #x def numberStr: String = numbers.mkString(".") + #xtostring vendor.map(_ + "@").getOrElse("") + numberStr + + #xcompanion def apply(version: String): JavaVersion = sbt.internal.CrossJava.parseJavaVersion(version) +} diff --git a/main/src/main/scala/sbt/BackgroundJobService.scala b/main/src/main/scala/sbt/BackgroundJobService.scala index 5af16058e..d06288b22 100644 --- a/main/src/main/scala/sbt/BackgroundJobService.scala +++ b/main/src/main/scala/sbt/BackgroundJobService.scala @@ -23,7 +23,8 @@ abstract class BackgroundJobService extends Closeable { * then you could process.destroy() for example. */ def runInBackground(spawningTask: ScopedKey[_], state: State)( - start: (Logger, File) => Unit): JobHandle + start: (Logger, File) => Unit + ): JobHandle /** Same as shutown. */ def close(): Unit @@ -51,7 +52,8 @@ object BackgroundJobService { { val stringIdParser: Parser[Seq[String]] = Space ~> token( NotSpace examples handles.map(_.id.toString).toSet, - description = "").+ + description = "" + ).+ stringIdParser.map { strings => strings.map(Integer.parseInt(_)).flatMap(id => handles.find(_.id == id)) } diff --git a/main/src/main/scala/sbt/BuildPaths.scala b/main/src/main/scala/sbt/BuildPaths.scala index b83fe2311..b74bfde44 100644 --- a/main/src/main/scala/sbt/BuildPaths.scala +++ b/main/src/main/scala/sbt/BuildPaths.scala @@ -17,19 +17,25 @@ object BuildPaths { val globalBaseDirectory = AttributeKey[File]( "global-base-directory", "The base directory for global sbt configuration and staging.", - DSetting) - val globalPluginsDirectory = AttributeKey[File]("global-plugins-directory", - "The base directory for global sbt plugins.", - DSetting) - val globalSettingsDirectory = AttributeKey[File]("global-settings-directory", - "The base directory for global sbt settings.", - DSetting) + DSetting + ) + val globalPluginsDirectory = AttributeKey[File]( + "global-plugins-directory", + "The base directory for global sbt plugins.", + DSetting + ) + val globalSettingsDirectory = AttributeKey[File]( + "global-settings-directory", + "The base directory for global sbt settings.", + DSetting + ) val stagingDirectory = AttributeKey[File]("staging-directory", "The directory for staging remote projects.", DSetting) val dependencyBaseDirectory = AttributeKey[File]( "dependency-base-directory", "The base directory for caching dependency resolution.", - DSetting) + DSetting + ) val globalZincDirectory = AttributeKey[File]("global-zinc-directory", "The base directory for Zinc internals.", DSetting) @@ -56,7 +62,8 @@ object BuildPaths { def getGlobalPluginsDirectory(state: State, globalBase: File): File = fileSetting(globalPluginsDirectory, GlobalPluginsProperty, defaultGlobalPlugins(globalBase))( - state) + state + ) def getGlobalSettingsDirectory(state: State, globalBase: File): File = fileSetting(globalSettingsDirectory, GlobalSettingsProperty, globalBase)(state) @@ -70,11 +77,13 @@ object BuildPaths { fileSetting(globalZincDirectory, GlobalZincProperty, defaultGlobalZinc(globalBase))(state) private[this] def fileSetting(stateKey: AttributeKey[File], property: String, default: File)( - state: State): File = + state: State + ): File = getFileSetting(stateKey, property, default)(state) def getFileSetting(stateKey: AttributeKey[File], property: String, default: => File)( - state: State): File = + state: State + ): File = state get stateKey orElse getFileProperty(property) getOrElse default def getFileProperty(name: String): Option[File] = Option(System.getProperty(name)) flatMap { diff --git a/main/src/main/scala/sbt/BuildSyntax.scala b/main/src/main/scala/sbt/BuildSyntax.scala index a2ef2c2cd..527481e87 100644 --- a/main/src/main/scala/sbt/BuildSyntax.scala +++ b/main/src/main/scala/sbt/BuildSyntax.scala @@ -11,7 +11,7 @@ import sbt.internal.DslEntry import sbt.librarymanagement.Configuration private[sbt] trait BuildSyntax { - import language.experimental.macros + import scala.language.experimental.macros def settingKey[T](description: String): SettingKey[T] = macro std.KeyMacro.settingKeyImpl[T] def taskKey[T](description: String): TaskKey[T] = macro std.KeyMacro.taskKeyImpl[T] def inputKey[T](description: String): InputKey[T] = macro std.KeyMacro.inputKeyImpl[T] diff --git a/main/src/main/scala/sbt/Cross.scala b/main/src/main/scala/sbt/Cross.scala index 745206a27..78571d78b 100644 --- a/main/src/main/scala/sbt/Cross.scala +++ b/main/src/main/scala/sbt/Cross.scala @@ -72,8 +72,7 @@ object Cross { } & spacedFirst(CrossCommand) } - private def crossRestoreSessionParser(state: State): Parser[String] = - token(CrossRestoreSessionCommand) + private def crossRestoreSessionParser: Parser[String] = token(CrossRestoreSessionCommand) private[sbt] def requireSession[T](p: State => Parser[T]): State => Parser[T] = s => if (s get sessionSettings isEmpty) failure("No project loaded") else p(s) @@ -100,14 +99,24 @@ object Cross { } /** - * Parse the given command into either an aggregate command or a command for a project + * Parse the given command into a list of aggregate projects and command to issue. */ - private def parseCommand(command: String): Either[String, (String, String)] = { + private[sbt] def parseSlashCommand( + extracted: Extracted + )(command: String): (Seq[ProjectRef], String) = { + import extracted._ import DefaultParsers._ val parser = (OpOrID <~ charClass(_ == '/', "/")) ~ any.* map { - case project ~ cmd => (project, cmd.mkString) + case seg1 ~ cmd => (seg1, cmd.mkString) + } + Parser.parse(command, parser) match { + case Right((seg1, cmd)) => + structure.allProjectRefs.find(_.project == seg1) match { + case Some(proj) => (Seq(proj), cmd) + case _ => (resolveAggregates(extracted), command) + } + case _ => (resolveAggregates(extracted), command) } - Parser.parse(command, parser).left.map(_ => command) } def crossBuild: Command = @@ -116,12 +125,7 @@ object Cross { private def crossBuildCommandImpl(state: State, args: CrossArgs): State = { val x = Project.extract(state) import x._ - - val (aggs, aggCommand) = parseCommand(args.command) match { - case Right((project, cmd)) => - (structure.allProjectRefs.filter(_.project == project), cmd) - case Left(cmd) => (resolveAggregates(x), cmd) - } + val (aggs, aggCommand) = parseSlashCommand(x)(args.command) val projCrossVersions = aggs map { proj => proj -> crossVersions(x, proj) @@ -151,7 +155,8 @@ object Cross { "configuration. This could result in subprojects cross building against Scala versions that they are " + "not compatible with. Try issuing cross building command with tasks instead, since sbt will be able " + "to ensure that cross building is only done using configured project and Scala version combinations " + - "that are configured.") + "that are configured." + ) state.log.debug("Scala versions configuration is:") projCrossVersions.foreach { case (project, versions) => state.log.debug(s"$project: $versions") @@ -175,12 +180,14 @@ object Cross { case (version, projects) if aggCommand.contains(" ") => // If the command contains a space, then the `all` command won't work because it doesn't support issuing // commands with spaces, so revert to running the command on each project one at a time - s"$SwitchCommand $verbose $version" :: projects.map(project => - s"$project/$aggCommand") + s"$SwitchCommand $verbose $version" :: projects + .map(project => s"$project/$aggCommand") case (version, projects) => // First switch scala version, then use the all command to run the command on each project concurrently - Seq(s"$SwitchCommand $verbose $version", - projects.map(_ + "/" + aggCommand).mkString("all ", " ", "")) + Seq( + s"$SwitchCommand $verbose $version", + projects.map(_ + "/" + aggCommand).mkString("all ", " ", "") + ) } } @@ -189,9 +196,11 @@ object Cross { } def crossRestoreSession: Command = - Command.arb(crossRestoreSessionParser, crossRestoreSessionHelp)(crossRestoreSessionImpl) + Command.arb(_ => crossRestoreSessionParser, crossRestoreSessionHelp)( + (s, _) => crossRestoreSessionImpl(s) + ) - private def crossRestoreSessionImpl(state: State, arg: String): State = { + private def crossRestoreSessionImpl(state: State): State = { restoreCapturedSession(state, Project.extract(state)) } @@ -216,12 +225,27 @@ object Cross { Command.arb(requireSession(switchParser), switchHelp)(switchCommandImpl) private def switchCommandImpl(state: State, args: Switch): State = { - val switchedState = switchScalaVersion(args, state) + val x = Project.extract(state) + val (switchedState, affectedRefs) = switchScalaVersion(args, state) - args.command.toList ::: switchedState + val strictCmd = + if (args.version.force) { + // The Scala version was forced on the whole build, run as is + args.command + } else { + args.command.map { rawCmd => + val (aggs, aggCommand) = parseSlashCommand(x)(rawCmd) + aggs + .intersect(affectedRefs) + .map({ case ProjectRef(_, proj) => s"$proj/$aggCommand" }) + .mkString("all ", " ", "") + } + } + + strictCmd.toList ::: switchedState } - private def switchScalaVersion(switch: Switch, state: State): State = { + private def switchScalaVersion(switch: Switch, state: State): (State, Seq[ResolvedReference]) = { val extracted = Project.extract(state) import extracted._ @@ -291,7 +315,7 @@ object Cross { } } - setScalaVersionForProjects(version, instance, projects, state, extracted) + (setScalaVersionForProjects(version, instance, projects, state, extracted), projects.map(_._1)) } private def setScalaVersionForProjects( diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index c2604a1d3..d8c072cae 100755 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -26,7 +26,12 @@ import sbt.internal.librarymanagement.mavenint.{ PomExtraDependencyAttributes, SbtPomExtraProperties } -import sbt.internal.server.{ LanguageServerReporter, Definition } +import sbt.internal.server.{ + LanguageServerReporter, + Definition, + LanguageServerProtocol, + ServerHandler +} import sbt.internal.testing.TestLogger import sbt.internal.util._ import sbt.internal.util.Attributed.data @@ -64,6 +69,7 @@ import sbt.librarymanagement.syntax._ import sbt.util.InterfaceUtil.{ toJavaFunction => f1 } import sbt.util._ import sbt.util.CacheImplicits._ +import scala.collection.immutable.ListMap import scala.concurrent.duration.FiniteDuration import scala.util.control.NonFatal import scala.xml.NodeSeq @@ -129,13 +135,13 @@ object Defaults extends BuildCommon { def buildCore: Seq[Setting[_]] = thisBuildCore ++ globalCore def thisBuildCore: Seq[Setting[_]] = inScope(GlobalScope.copy(project = Select(ThisBuild)))( - Seq( - managedDirectory := baseDirectory.value / "lib_managed" - )) + managedDirectory := baseDirectory.value / "lib_managed" + ) private[sbt] lazy val globalCore: Seq[Setting[_]] = globalDefaults( defaultTestTasks(test) ++ defaultTestTasks(testOnly) ++ defaultTestTasks(testQuick) ++ Seq( excludeFilter :== HiddenFileFilter - ) ++ globalIvyCore ++ globalJvmCore) ++ globalSbtCore + ) ++ globalIvyCore ++ globalJvmCore + ) ++ globalSbtCore private[sbt] lazy val globalJvmCore: Seq[Setting[_]] = Seq( @@ -154,6 +160,9 @@ object Defaults extends BuildCommon { scalaHome :== None, apiURL := None, javaHome :== None, + discoveredJavaHomes := CrossJava.discoverJavaHomes, + javaHomes :== ListMap.empty, + fullJavaHomes := CrossJava.expandJavaHomes(discoveredJavaHomes.value ++ javaHomes.value), testForkedParallel :== false, javaOptions :== Nil, sbtPlugin :== false, @@ -242,8 +251,10 @@ object Defaults extends BuildCommon { () => { IO.delete(dir); IO.createDirectory(dir) } }, - Previous.cache := new Previous(Def.streamsManagerKey.value, - Previous.references.value.getReferences), + Previous.cache := new Previous( + Def.streamsManagerKey.value, + Previous.references.value.getReferences + ), Previous.references :== new Previous.References, concurrentRestrictions := defaultRestrictions.value, parallelExecution :== true, @@ -278,15 +289,21 @@ object Defaults extends BuildCommon { if (serverConnectionType.value == ConnectionType.Tcp) Set(ServerAuthentication.Token) else Set() }, + serverHandlers :== Nil, + fullServerHandlers := { + (Vector(LanguageServerProtocol.handler) + ++ serverHandlers.value + ++ Vector(ServerHandler.fallback)) + }, insideCI :== sys.env.contains("BUILD_NUMBER") || sys.env.contains("CI"), - )) + ) + ) def defaultTestTasks(key: Scoped): Seq[Setting[_]] = inTask(key)( - Seq( - tags := Seq(Tags.Test -> 1), - logBuffered := true - )) + tags := Seq(Tags.Test -> 1), + logBuffered := true + ) // TODO: This should be on the new default settings for a project. def projectCore: Seq[Setting[_]] = Seq( @@ -314,18 +331,24 @@ object Defaults extends BuildCommon { scalaSource := sourceDirectory.value / "scala", javaSource := sourceDirectory.value / "java", unmanagedSourceDirectories := { - makeCrossSources(scalaSource.value, - javaSource.value, - scalaBinaryVersion.value, - crossPaths.value) ++ - makePluginCrossSources(sbtPlugin.value, - scalaSource.value, - (sbtBinaryVersion in pluginCrossBuild).value, - crossPaths.value) + makeCrossSources( + scalaSource.value, + javaSource.value, + scalaBinaryVersion.value, + crossPaths.value + ) ++ + makePluginCrossSources( + sbtPlugin.value, + scalaSource.value, + (sbtBinaryVersion in pluginCrossBuild).value, + crossPaths.value + ) }, - unmanagedSources := collectFiles(unmanagedSourceDirectories, - includeFilter in unmanagedSources, - excludeFilter in unmanagedSources).value, + unmanagedSources := collectFiles( + unmanagedSourceDirectories, + includeFilter in unmanagedSources, + excludeFilter in unmanagedSources + ).value, watchSources in ConfigGlobal ++= { val baseDir = baseDirectory.value val bases = unmanagedSourceDirectories.value @@ -363,9 +386,11 @@ object Defaults extends BuildCommon { resourceDirectories := Classpaths .concatSettings(unmanagedResourceDirectories, managedResourceDirectories) .value, - unmanagedResources := collectFiles(unmanagedResourceDirectories, - includeFilter in unmanagedResources, - excludeFilter in unmanagedResources).value, + unmanagedResources := collectFiles( + unmanagedResourceDirectories, + includeFilter in unmanagedResources, + excludeFilter in unmanagedResources + ).value, watchSources in ConfigGlobal ++= { val bases = unmanagedResourceDirectories.value val include = (includeFilter in unmanagedResources).value @@ -397,19 +422,24 @@ object Defaults extends BuildCommon { def compileBase = inTask(console)(compilersSetting :: Nil) ++ compileBaseGlobal ++ Seq( incOptions := incOptions.value .withClassfileManagerType( - Option(TransactionalManagerType - .of(crossTarget.value / "classes.bak", sbt.util.Logger.Null): ClassFileManagerType).toOptional + Option( + TransactionalManagerType + .of(crossTarget.value / "classes.bak", sbt.util.Logger.Null): ClassFileManagerType + ).toOptional ), scalaInstance := scalaInstanceTask.value, crossVersion := (if (crossPaths.value) CrossVersion.binary else CrossVersion.disabled), sbtBinaryVersion in pluginCrossBuild := binarySbtVersion( - (sbtVersion in pluginCrossBuild).value), + (sbtVersion in pluginCrossBuild).value + ), crossSbtVersions := Vector((sbtVersion in pluginCrossBuild).value), - crossTarget := makeCrossTarget(target.value, - scalaBinaryVersion.value, - (sbtBinaryVersion in pluginCrossBuild).value, - sbtPlugin.value, - crossPaths.value), + crossTarget := makeCrossTarget( + target.value, + scalaBinaryVersion.value, + (sbtBinaryVersion in pluginCrossBuild).value, + sbtPlugin.value, + crossPaths.value + ), clean := { val _ = clean.value IvyActions.cleanCachedResolutionCache(ivyModule.value, streams.value.log) @@ -429,7 +459,8 @@ object Defaults extends BuildCommon { derive(crossScalaVersions := Seq(scalaVersion.value)), derive(compilersSetting), derive(scalaBinaryVersion := binaryScalaVersion(scalaVersion.value)) - )) + ) + ) def makeCrossSources( scalaSrcDir: File, @@ -443,10 +474,12 @@ object Defaults extends BuildCommon { Seq(scalaSrcDir, javaSrcDir) } - def makePluginCrossSources(isPlugin: Boolean, - scalaSrcDir: File, - sbtBinaryV: String, - cross: Boolean): Seq[File] = { + def makePluginCrossSources( + isPlugin: Boolean, + scalaSrcDir: File, + sbtBinaryV: String, + cross: Boolean + ): Seq[File] = { if (cross && isPlugin) Vector(scalaSrcDir.getParentFile / s"${scalaSrcDir.name}-sbt-$sbtBinaryV") else Vector() @@ -475,10 +508,12 @@ object Defaults extends BuildCommon { scalaJarsTarget = zincDir, log = streams.value.log ) - val compilers = ZincUtil.compilers(instance = scalaInstance.value, - classpathOptions = classpathOptions.value, - javaHome = javaHome.value, - scalac) + val compilers = ZincUtil.compilers( + instance = scalaInstance.value, + classpathOptions = classpathOptions.value, + javaHome = javaHome.value, + scalac + ) val classLoaderCache = state.value.classLoaderCache if (java.lang.Boolean.getBoolean("sbt.disable.interface.classloader.cache")) compilers else { @@ -496,7 +531,8 @@ object Defaults extends BuildCommon { globalDefaults(enableBinaryCompileAnalysis := true) lazy val configTasks: Seq[Setting[_]] = docTaskSettings(doc) ++ inTask(compile)( - compileInputsSettings) ++ configGlobal ++ defaultCompileSettings ++ compileAnalysisSettings ++ Seq( + compileInputsSettings + ) ++ configGlobal ++ defaultCompileSettings ++ compileAnalysisSettings ++ Seq( compile := compileTask.value, manipulateBytecode := compileIncremental.value, compileIncremental := (compileIncrementalTask tag (Tags.Compile, Tags.CPU)).value, @@ -512,7 +548,7 @@ object Defaults extends BuildCommon { }, compileIncSetup := compileIncSetupTask.value, console := consoleTask.value, - collectAnalyses := Definition.collectAnalysesTask.value, + collectAnalyses := Definition.collectAnalysesTask.map(_ => ()).value, consoleQuick := consoleQuickTask.value, discoveredMainClasses := (compile map discoverMainClasses storeAs discoveredMainClasses xtriggeredBy compile).value, discoveredSbtPlugins := discoverSbtPluginNames.value, @@ -534,7 +570,8 @@ object Defaults extends BuildCommon { initialCommands :== "", cleanupCommands :== "", asciiGraphWidth :== 40 - )) + ) + ) lazy val projectTasks: Seq[Setting[_]] = Seq( cleanFiles := cleanFilesTask.value, @@ -542,6 +579,7 @@ object Defaults extends BuildCommon { clean := (Def.task { IO.delete(cleanFiles.value) } tag (Tags.Clean)).value, consoleProject := consoleProjectTask.value, watchTransitiveSources := watchTransitiveSourcesTask.value, + watchingMessage := Watched.projectWatchingMessage(thisProjectRef.value.project), watch := watchSetting.value ) @@ -662,14 +700,17 @@ object Defaults extends BuildCommon { testOptions :== Nil, testResultLogger :== TestResultLogger.Default, testFilter in testOnly :== (selectedFilter _) - )) + ) + ) lazy val testTasks : Seq[Setting[_]] = testTaskOptions(test) ++ testTaskOptions(testOnly) ++ testTaskOptions( - testQuick) ++ testDefaults ++ Seq( + testQuick + ) ++ testDefaults ++ Seq( testLoader := TestFramework.createTestLoader( data(fullClasspath.value), scalaInstance.value, - IO.createUniqueDirectory(taskTemporaryDirectory.value)), + IO.createUniqueDirectory(taskTemporaryDirectory.value) + ), loadedTestFrameworks := { val loader = testLoader.value val log = streams.value.log @@ -687,7 +728,6 @@ object Defaults extends BuildCommon { (testGrouping in test).value, (testExecution in test).value, (fullClasspath in test).value, - (javaHome in test).value, testForkedParallel.value, (javaOptions in test).value ) @@ -717,24 +757,27 @@ object Defaults extends BuildCommon { lazy val ConfigGlobal: Scope = ConfigZero def testTaskOptions(key: Scoped): Seq[Setting[_]] = inTask(key)( - Seq( - testListeners := { - TestLogger.make(streams.value.log, - closeableTestLogger(streamsManager.value, - test in resolvedScoped.value.scope, - logBuffered.value)) +: - new TestStatusReporter(succeededFile(streams.in(test).value.cacheDirectory)) +: - testListeners.in(TaskZero).value - }, - testOptions := Tests.Listeners(testListeners.value) +: (testOptions in TaskZero).value, - testExecution := testExecutionTask(key).value - )) ++ inScope(GlobalScope)( - Seq( - derive(testGrouping := singleTestGroupDefault.value) - )) + testListeners := { + TestLogger.make( + streams.value.log, + closeableTestLogger( + streamsManager.value, + test in resolvedScoped.value.scope, + logBuffered.value + ) + ) +: + new TestStatusReporter(succeededFile(streams.in(test).value.cacheDirectory)) +: + testListeners.in(TaskZero).value + }, + testOptions := Tests.Listeners(testListeners.value) +: (testOptions in TaskZero).value, + testExecution := testExecutionTask(key).value + ) ++ inScope(GlobalScope)( + derive(testGrouping := singleTestGroupDefault.value) + ) private[this] def closeableTestLogger(manager: Streams, baseKey: Scoped, buffered: Boolean)( - tdef: TestDefinition): TestLogger.PerTest = { + tdef: TestDefinition + ): TestLogger.PerTest = { val scope = baseKey.scope val extra = scope.extra match { case Select(x) => x; case _ => AttributeMap.empty } val key = ScopedKey(scope.copy(extra = Select(testExtra(extra, tdef))), baseKey.key) @@ -775,9 +818,11 @@ object Defaults extends BuildCommon { def testExecutionTask(task: Scoped): Initialize[Task[Tests.Execution]] = Def.task { - new Tests.Execution((testOptions in task).value, - (parallelExecution in task).value, - (tags in task).value) + new Tests.Execution( + (testOptions in task).value, + (parallelExecution in task).value, + (tags in task).value + ) } def testQuickFilter: Initialize[Task[Seq[String] => Seq[String => Boolean]]] = @@ -845,7 +890,6 @@ object Defaults extends BuildCommon { testGrouping.value, newConfig, fullClasspath.value, - javaHome.value, testForkedParallel.value, javaOptions.value ) @@ -856,9 +900,11 @@ object Defaults extends BuildCommon { } } - def createTestRunners(frameworks: Map[TestFramework, Framework], - loader: ClassLoader, - config: Tests.Execution): Map[TestFramework, Runner] = { + def createTestRunners( + frameworks: Map[TestFramework, Framework], + loader: ClassLoader, + config: Tests.Execution + ): Map[TestFramework, Runner] = { import Tests.Argument val opts = config.options.toList frameworks.map { @@ -872,22 +918,24 @@ object Defaults extends BuildCommon { } } - private[sbt] def allTestGroupsTask(s: TaskStreams, - frameworks: Map[TestFramework, Framework], - loader: ClassLoader, - groups: Seq[Tests.Group], - config: Tests.Execution, - cp: Classpath, - javaHome: Option[File]): Initialize[Task[Tests.Output]] = { - allTestGroupsTask(s, - frameworks, - loader, - groups, - config, - cp, - javaHome, - forkedParallelExecution = false, - javaOptions = Nil) + private[sbt] def allTestGroupsTask( + s: TaskStreams, + frameworks: Map[TestFramework, Framework], + loader: ClassLoader, + groups: Seq[Tests.Group], + config: Tests.Execution, + cp: Classpath, + ): Initialize[Task[Tests.Output]] = { + allTestGroupsTask( + s, + frameworks, + loader, + groups, + config, + cp, + forkedParallelExecution = false, + javaOptions = Nil + ) } private[sbt] def allTestGroupsTask( @@ -897,43 +945,47 @@ object Defaults extends BuildCommon { groups: Seq[Tests.Group], config: Tests.Execution, cp: Classpath, - javaHome: Option[File], - forkedParallelExecution: Boolean): Initialize[Task[Tests.Output]] = { - allTestGroupsTask(s, - frameworks, - loader, - groups, - config, - cp, - javaHome, - forkedParallelExecution, - javaOptions = Nil) + forkedParallelExecution: Boolean + ): Initialize[Task[Tests.Output]] = { + allTestGroupsTask( + s, + frameworks, + loader, + groups, + config, + cp, + forkedParallelExecution, + javaOptions = Nil + ) } - private[sbt] def allTestGroupsTask(s: TaskStreams, - frameworks: Map[TestFramework, Framework], - loader: ClassLoader, - groups: Seq[Tests.Group], - config: Tests.Execution, - cp: Classpath, - javaHome: Option[File], - forkedParallelExecution: Boolean, - javaOptions: Seq[String]): Initialize[Task[Tests.Output]] = { + private[sbt] def allTestGroupsTask( + s: TaskStreams, + frameworks: Map[TestFramework, Framework], + loader: ClassLoader, + groups: Seq[Tests.Group], + config: Tests.Execution, + cp: Classpath, + forkedParallelExecution: Boolean, + javaOptions: Seq[String] + ): Initialize[Task[Tests.Output]] = { val runners = createTestRunners(frameworks, loader, config) val groupTasks = groups map { - case Tests.Group(name, tests, runPolicy) => + case Tests.Group(_, tests, runPolicy) => runPolicy match { case Tests.SubProcess(opts) => s.log.debug(s"javaOptions: ${opts.runJVMOptions}") val forkedConfig = config.copy(parallel = config.parallel && forkedParallelExecution) s.log.debug(s"Forking tests - parallelism = ${forkedConfig.parallel}") - ForkTests(runners, - tests.toVector, - forkedConfig, - cp.files, - opts, - s.log, - Tags.ForkedTestGroup) + ForkTests( + runners, + tests.toVector, + forkedConfig, + cp.files, + opts, + s.log, + Tags.ForkedTestGroup + ) case Tests.InProcess => if (javaOptions.nonEmpty) { s.log.warn("javaOptions will be ignored, fork is set to false") @@ -988,10 +1040,11 @@ object Defaults extends BuildCommon { Seq( packageOptions :== Nil, artifactName :== (Artifact.artifactName _) - )) + ) + ) lazy val packageConfig: Seq[Setting[_]] = - inTask(packageBin)(Seq( + inTask(packageBin)( packageOptions := { val n = name.value val ver = version.value @@ -1003,14 +1056,14 @@ object Defaults extends BuildCommon { Package.addImplManifestAttributes(n, ver, homepage.value, org, orgName) +: main.map(Package.MainClass.apply) ++: old } - )) ++ + ) ++ inTask(packageSrc)( - Seq( - packageOptions := Package.addSpecManifestAttributes( - name.value, - version.value, - organizationName.value) +: packageOptions.value - )) ++ + packageOptions := Package.addSpecManifestAttributes( + name.value, + version.value, + organizationName.value + ) +: packageOptions.value + ) ++ packageTaskSettings(packageBin, packageBinMappings) ++ packageTaskSettings(packageSrc, packageSrcMappings) ++ packageTaskSettings(packageDoc, packageDocMappings) ++ @@ -1033,26 +1086,34 @@ object Defaults extends BuildCommon { (srcs --- sdirs --- base) pair (relativeTo(sdirs) | relativeTo(base) | flat) } def resourceMappings = relativeMappings(unmanagedResources, unmanagedResourceDirectories) - def relativeMappings(files: ScopedTaskable[Seq[File]], - dirs: ScopedTaskable[Seq[File]]): Initialize[Task[Seq[(File, String)]]] = + def relativeMappings( + files: ScopedTaskable[Seq[File]], + dirs: ScopedTaskable[Seq[File]] + ): Initialize[Task[Seq[(File, String)]]] = Def.task { val rs = files.toTask.value val rdirs = dirs.toTask.value (rs --- rdirs) pair (relativeTo(rdirs) | flat) } - def collectFiles(dirs: ScopedTaskable[Seq[File]], - filter: ScopedTaskable[FileFilter], - excludes: ScopedTaskable[FileFilter]): Initialize[Task[Seq[File]]] = + def collectFiles( + dirs: ScopedTaskable[Seq[File]], + filter: ScopedTaskable[FileFilter], + excludes: ScopedTaskable[FileFilter] + ): Initialize[Task[Seq[File]]] = Def.task { dirs.toTask.value.descendantsExcept(filter.toTask.value, excludes.toTask.value).get } def artifactPathSetting(art: SettingKey[Artifact]): Initialize[File] = Def.setting { val f = artifactName.value - (crossTarget.value / f(ScalaVersion((scalaVersion in artifactName).value, - (scalaBinaryVersion in artifactName).value), - projectID.value, - art.value)).asFile + (crossTarget.value / f( + ScalaVersion( + (scalaVersion in artifactName).value, + (scalaBinaryVersion in artifactName).value + ), + projectID.value, + art.value + )).asFile } def artifactSetting: Initialize[Artifact] = @@ -1077,9 +1138,11 @@ object Defaults extends BuildCommon { } @deprecated("The configuration(s) should not be decided based on the classifier.", "1.0.0") - def artifactConfigurations(base: Artifact, - scope: Configuration, - classifier: Option[String]): Iterable[Configuration] = + def artifactConfigurations( + base: Artifact, + scope: Configuration, + classifier: Option[String] + ): Iterable[Configuration] = classifier match { case Some(c) => Artifact.classifierConf(c) :: Nil case None => scope :: Nil @@ -1087,14 +1150,13 @@ object Defaults extends BuildCommon { def packageTaskSettings(key: TaskKey[File], mappingsTask: Initialize[Task[Seq[(File, String)]]]) = inTask(key)( - Seq( - key in TaskZero := packageTask.value, - packageConfiguration := packageConfigurationTask.value, - mappings := mappingsTask.value, - packagedArtifact := (artifact.value -> key.value), - artifact := artifactSetting.value, - artifactPath := artifactPathSetting(artifact).value - )) + key in TaskZero := packageTask.value, + packageConfiguration := packageConfigurationTask.value, + mappings := mappingsTask.value, + packagedArtifact := (artifact.value -> key.value), + artifact := artifactSetting.value, + artifactPath := artifactPathSetting(artifact).value + ) def packageTask: Initialize[Task[File]] = Def.task { @@ -1119,7 +1181,8 @@ object Defaults extends BuildCommon { classes match { case multiple if multiple.size > 1 => logger.warn( - "Multiple main classes detected. Run 'show discoveredMainClasses' to see the list") + "Multiple main classes detected. Run 'show discoveredMainClasses' to see the list" + ) case _ => } pickMainClass(classes) @@ -1138,7 +1201,8 @@ object Defaults extends BuildCommon { case xs if xs.isEmpty => () case xs => sys.error( - s"cleanKeepFiles contains directory/file that are not directly under cleanFiles: $xs") + s"cleanKeepFiles contains directory/file that are not directly under cleanFiles: $xs" + ) } val toClean = (dirItems filterNot { preserveSet(_) }) ++ fs toClean @@ -1150,8 +1214,9 @@ object Defaults extends BuildCommon { copyClasspath: Initialize[Boolean], scalaRun: Initialize[Task[ScalaRun]] ): Initialize[InputTask[JobHandle]] = { - val parser = Defaults.loadForParser(discoveredMainClasses)((s, names) => - Defaults.runMainParser(s, names getOrElse Nil)) + val parser = Defaults.loadForParser(discoveredMainClasses)( + (s, names) => Defaults.runMainParser(s, names getOrElse Nil) + ) Def.inputTask { val service = bgJobService.value val (mainClass, args) = parser.parsed @@ -1244,12 +1309,16 @@ object Defaults extends BuildCommon { } else { if (options.nonEmpty) { val mask = ScopeMask(project = false) - val showJavaOptions = Scope.displayMasked((javaOptions in resolvedScope).scopedKey.scope, - (javaOptions in resolvedScope).key.label, - mask) - val showFork = Scope.displayMasked((fork in resolvedScope).scopedKey.scope, - (fork in resolvedScope).key.label, - mask) + val showJavaOptions = Scope.displayMasked( + (javaOptions in resolvedScope).scopedKey.scope, + (javaOptions in resolvedScope).key.label, + mask + ) + val showFork = Scope.displayMasked( + (fork in resolvedScope).scopedKey.scope, + (fork in resolvedScope).key.label, + mask + ) s.log.warn(s"$showJavaOptions will be ignored, $showFork is set to false") } new Run(si, trap, tmp) @@ -1294,49 +1363,50 @@ object Defaults extends BuildCommon { def docTaskSettings(key: TaskKey[File] = doc): Seq[Setting[_]] = inTask(key)( - Seq( - apiMappings ++= { - val dependencyCp = dependencyClasspath.value - val log = streams.value.log - if (autoAPIMappings.value) APIMappings.extract(dependencyCp, log).toMap - else Map.empty[File, URL] - }, - fileInputOptions := Seq("-doc-root-content", "-diagrams-dot-path"), - key in TaskZero := { - val s = streams.value - val cs: Compilers = compilers.value - val srcs = sources.value - val out = target.value - val sOpts = scalacOptions.value - val xapis = apiMappings.value - val hasScala = srcs.exists(_.name.endsWith(".scala")) - val hasJava = srcs.exists(_.name.endsWith(".java")) - val cp = data(dependencyClasspath.value).toList - val label = nameForSrc(configuration.value.name) - val fiOpts = fileInputOptions.value - val reporter = (compilerReporter in compile).value - (hasScala, hasJava) match { - case (true, _) => - val options = sOpts ++ Opts.doc.externalAPI(xapis) - val runDoc = Doc.scaladoc(label, s.cacheStoreFactory sub "scala", cs.scalac match { - case ac: AnalyzingCompiler => ac.onArgs(exported(s, "scaladoc")) - }, fiOpts) - runDoc(srcs, cp, out, options, maxErrors.value, s.log) - case (_, true) => - val javadoc = - sbt.inc.Doc.cachedJavadoc(label, s.cacheStoreFactory sub "java", cs.javaTools) - javadoc.run(srcs.toList, - cp, - out, - javacOptions.value.toList, - IncToolOptionsUtil.defaultIncToolOptions(), - s.log, - reporter) - case _ => () // do nothing - } - out + apiMappings ++= { + val dependencyCp = dependencyClasspath.value + val log = streams.value.log + if (autoAPIMappings.value) APIMappings.extract(dependencyCp, log).toMap + else Map.empty[File, URL] + }, + fileInputOptions := Seq("-doc-root-content", "-diagrams-dot-path"), + key in TaskZero := { + val s = streams.value + val cs: Compilers = compilers.value + val srcs = sources.value + val out = target.value + val sOpts = scalacOptions.value + val xapis = apiMappings.value + val hasScala = srcs.exists(_.name.endsWith(".scala")) + val hasJava = srcs.exists(_.name.endsWith(".java")) + val cp = data(dependencyClasspath.value).toList + val label = nameForSrc(configuration.value.name) + val fiOpts = fileInputOptions.value + val reporter = (compilerReporter in compile).value + (hasScala, hasJava) match { + case (true, _) => + val options = sOpts ++ Opts.doc.externalAPI(xapis) + val runDoc = Doc.scaladoc(label, s.cacheStoreFactory sub "scala", cs.scalac match { + case ac: AnalyzingCompiler => ac.onArgs(exported(s, "scaladoc")) + }, fiOpts) + runDoc(srcs, cp, out, options, maxErrors.value, s.log) + case (_, true) => + val javadoc = + sbt.inc.Doc.cachedJavadoc(label, s.cacheStoreFactory sub "java", cs.javaTools) + javadoc.run( + srcs.toList, + cp, + out, + javacOptions.value.toList, + IncToolOptionsUtil.defaultIncToolOptions(), + s.log, + reporter + ) + case _ => () // do nothing } - )) + out + } + ) def mainBgRunTask = mainBgRunTaskForConfig(Select(Runtime)) def mainBgRunMainTask = mainBgRunMainTaskForConfig(Select(Runtime)) @@ -1393,7 +1463,7 @@ object Defaults extends BuildCommon { private[this] def exported(w: PrintWriter, command: String): Seq[String] => Unit = args => w.println((command +: args).mkString(" ")) - private[this] def exported(s: TaskStreams, command: String): Seq[String] => Unit = args => { + private[this] def exported(s: TaskStreams, command: String): Seq[String] => Unit = { val w = s.text(ExportStream) try exported(w, command) finally w.close() // workaround for #937 @@ -1525,15 +1595,17 @@ object Defaults extends BuildCommon { val max = maxErrors.value val spms = sourcePositionMappers.value val problems = - analysis.infos.allInfos.values.flatMap(i => - i.getReportedProblems ++ i.getUnreportedProblems) + analysis.infos.allInfos.values + .flatMap(i => i.getReportedProblems ++ i.getUnreportedProblems) val reporter = new ManagedLoggedReporter(max, streams.value.log, foldMappers(spms)) problems.foreach(p => reporter.log(p)) } def sbtPluginExtra(m: ModuleID, sbtV: String, scalaV: String): ModuleID = - m.extra(PomExtraDependencyAttributes.SbtVersionKey -> sbtV, - PomExtraDependencyAttributes.ScalaVersionKey -> scalaV) + m.extra( + PomExtraDependencyAttributes.SbtVersionKey -> sbtV, + PomExtraDependencyAttributes.ScalaVersionKey -> scalaV + ) .withCrossVersion(Disabled()) def discoverSbtPluginNames: Initialize[Task[PluginDiscovery.DiscoveredNames]] = Def.taskDyn { @@ -1549,7 +1621,7 @@ object Defaults extends BuildCommon { val cacheStore = s.cacheStoreFactory make "copy-resources" val mappings = (resources.value --- dirs) pair (rebase(dirs, t) | flat(t)) s.log.debug("Copy resource mappings: " + mappings.mkString("\n\t", "\n\t", "")) - Sync(cacheStore)(mappings) + Sync.sync(cacheStore)(mappings) mappings } @@ -1622,7 +1694,11 @@ object Defaults extends BuildCommon { val sv = (sbtVersion in pluginCrossBuild).value val scalaV = (scalaVersion in pluginCrossBuild).value val binVersion = (scalaBinaryVersion in pluginCrossBuild).value - val cross = if (id.crossVersioned) CrossVersion.binary else Disabled() + val cross = id.crossVersionedValue match { + case CrossValue.Disabled => Disabled() + case CrossValue.Full => CrossVersion.full + case CrossValue.Binary => CrossVersion.binary + } val base = ModuleID(id.groupID, id.name, sv).withCrossVersion(cross) CrossVersion(scalaV, binVersion)(base).withCrossVersion(Disabled()) } @@ -1630,22 +1706,23 @@ object Defaults extends BuildCommon { // build.sbt is treated a Scala source of metabuild, so to enable deprecation flag on build.sbt we set the option here. lazy val deprecationSettings: Seq[Setting[_]] = inConfig(Compile)( - Seq( - scalacOptions := { - val old = scalacOptions.value - val existing = old.toSet - val d = "-deprecation" - if (sbtPlugin.value && !existing(d)) d :: old.toList - else old - } - )) + scalacOptions := { + val old = scalacOptions.value + val existing = old.toSet + val d = "-deprecation" + if (sbtPlugin.value && !existing(d)) d :: old.toList + else old + } + ) } object Classpaths { import Keys._ import Defaults._ - def concatDistinct[T](a: ScopedTaskable[Seq[T]], - b: ScopedTaskable[Seq[T]]): Initialize[Task[Seq[T]]] = Def.task { + def concatDistinct[T]( + a: ScopedTaskable[Seq[T]], + b: ScopedTaskable[Seq[T]] + ): Initialize[Task[Seq[T]]] = Def.task { (a.toTask.value ++ b.toTask.value).distinct } def concat[T](a: ScopedTaskable[Seq[T]], b: ScopedTaskable[Seq[T]]): Initialize[Task[Seq[T]]] = @@ -1656,10 +1733,12 @@ object Classpaths { lazy val configSettings: Seq[Setting[_]] = classpaths ++ Seq( products := makeProducts.value, productDirectories := classDirectory.value :: Nil, - classpathConfiguration := findClasspathConfig(internalConfigurationMap.value, - configuration.value, - classpathConfiguration.?.value, - update.value) + classpathConfiguration := findClasspathConfig( + internalConfigurationMap.value, + configuration.value, + classpathConfiguration.?.value, + update.value + ) ) private[this] def classpaths: Seq[Setting[_]] = Seq( @@ -1668,9 +1747,11 @@ object Classpaths { fullClasspath := concatDistinct(exportedProducts, dependencyClasspath).value, internalDependencyClasspath := internalDependencies.value, unmanagedClasspath := unmanagedDependencies.value, - managedClasspath := managedJars(classpathConfiguration.value, - classpathTypes.value, - update.value), + managedClasspath := managedJars( + classpathConfiguration.value, + classpathTypes.value, + update.value + ), exportedProducts := trackedExportedProducts(TrackLevel.TrackAlways).value, exportedProductsIfMissing := trackedExportedProducts(TrackLevel.TrackIfMissing).value, exportedProductsNoTracking := trackedExportedProducts(TrackLevel.NoTracking).value, @@ -1680,10 +1761,12 @@ object Classpaths { internalDependencyAsJars := internalDependencyJarsTask.value, dependencyClasspathAsJars := concat(internalDependencyAsJars, externalDependencyClasspath).value, fullClasspathAsJars := concatDistinct(exportedProductJars, dependencyClasspathAsJars).value, - unmanagedJars := findUnmanagedJars(configuration.value, - unmanagedBase.value, - includeFilter in unmanagedJars value, - excludeFilter in unmanagedJars value) + unmanagedJars := findUnmanagedJars( + configuration.value, + unmanagedBase.value, + includeFilter in unmanagedJars value, + excludeFilter in unmanagedJars value + ) ).map(exportClasspath) private[this] def exportClasspath(s: Setting[Task[Classpath]]): Setting[Task[Classpath]] = @@ -1700,22 +1783,25 @@ object Classpaths { for (task <- defaultPackageKeys; conf <- Seq(Compile, Test)) yield (task in conf) lazy val defaultArtifactTasks: Seq[TaskKey[File]] = makePom +: defaultPackages - def findClasspathConfig(map: Configuration => Configuration, - thisConfig: Configuration, - delegated: Option[Configuration], - report: UpdateReport): Configuration = { + def findClasspathConfig( + map: Configuration => Configuration, + thisConfig: Configuration, + delegated: Option[Configuration], + report: UpdateReport + ): Configuration = { val defined = report.allConfigurations.toSet val search = map(thisConfig) +: (delegated.toList ++ Seq(Compile, Configurations.Default)) def notFound = sys.error( - "Configuration to use for managed classpath must be explicitly defined when default configurations are not present.") + "Configuration to use for managed classpath must be explicitly defined when default configurations are not present." + ) search find { c => defined contains ConfigRef(c.name) } getOrElse notFound } def packaged(pkgTasks: Seq[TaskKey[File]]): Initialize[Task[Map[Artifact, File]]] = - enabledOnly(packagedArtifact.task, pkgTasks) apply (_.join.map(_.toMap)) + enabledOnly(packagedArtifact.toSettingKey, pkgTasks) apply (_.join.map(_.toMap)) def artifactDefs(pkgTasks: Seq[TaskKey[File]]): Initialize[Seq[Artifact]] = enabledOnly(artifact, pkgTasks) @@ -1725,8 +1811,10 @@ object Classpaths { case (a, true) => a }) - def forallIn[T](key: Scoped.ScopingSetting[SettingKey[T]], - pkgTasks: Seq[TaskKey[_]]): Initialize[Seq[T]] = + def forallIn[T]( + key: Scoped.ScopingSetting[SettingKey[T]], // should be just SettingKey[T] (mea culpa) + pkgTasks: Seq[TaskKey[_]], + ): Initialize[Seq[T]] = pkgTasks.map(pkg => key in pkg.scope in pkg).join private[this] def publishGlobalDefaults = @@ -1735,7 +1823,8 @@ object Classpaths { publishMavenStyle :== true, publishArtifact :== true, publishArtifact in Test :== false - )) + ) + ) val jvmPublishSettings: Seq[Setting[_]] = Seq( artifacts := artifactDefs(defaultArtifactTasks).value, @@ -1756,15 +1845,16 @@ object Classpaths { deliver := deliverTask(makeIvyXmlConfiguration).value, deliverLocal := deliverTask(makeIvyXmlLocalConfiguration).value, makeIvyXml := deliverTask(makeIvyXmlConfiguration).value, - publish := publishTask(publishConfiguration, deliver).value, - publishLocal := publishTask(publishLocalConfiguration, deliverLocal).value, - publishM2 := publishTask(publishM2Configuration, deliverLocal).value + publish := publishTask(publishConfiguration).value, + publishLocal := publishTask(publishLocalConfiguration).value, + publishM2 := publishTask(publishM2Configuration).value ) private[this] def baseGlobalDefaults = Defaults.globalDefaults( Seq( conflictWarning :== ConflictWarning.default("global"), + evictionWarningOptions := EvictionWarningOptions.default, compatibilityWarningOptions :== CompatibilityWarningOptions.default, homepage :== None, startYear :== None, @@ -1804,7 +1894,8 @@ object Classpaths { CrossVersion(scalaVersion, binVersion)(base).withCrossVersion(Disabled()) }, shellPrompt := shellPromptFromState - )) + ) + ) val ivyBaseSettings: Seq[Setting[_]] = baseGlobalDefaults ++ sbtClassifiersTasks ++ Seq( conflictWarning := conflictWarning.value.copy(label = Reference.display(thisProjectRef.value)), @@ -1827,12 +1918,14 @@ object Classpaths { developers.value.toVector ), overrideBuildResolvers := appConfiguration(isOverrideRepositories).value, - externalResolvers := ((externalResolvers.?.value, - resolvers.value, - appResolvers.value, - useJCenter.value) match { + externalResolvers := (( + externalResolvers.?.value, + resolvers.value, + appResolvers.value, + useJCenter.value + ) match { case (Some(delegated), Seq(), _, _) => delegated - case (_, rs, Some(ars), uj) => ars ++ rs + case (_, rs, Some(ars), _) => ars ++ rs case (_, rs, _, uj) => Resolver.combineDefaultResolvers(rs.toVector, uj, mavenCentral = true) }), appResolvers := { @@ -1882,7 +1975,8 @@ object Classpaths { checkExplicit = true, overrideScalaVersion = true ).withScalaOrganization(scalaOrganization.value) - .withScalaArtifacts(scalaArtifacts.value.toVector)) + .withScalaArtifacts(scalaArtifacts.value.toVector) + ) } )).value, artifactPath in makePom := artifactPathSetting(artifact in makePom).value, @@ -1908,7 +2002,8 @@ object Classpaths { .withRetrieveDirectory(managedDirectory.value) .withOutputPattern(retrievePattern.value) .withSync(retrieveManagedSync.value) - .withConfigurationsToRetrieve(configurationsToRetrieve.value map { _.toVector })) + .withConfigurationsToRetrieve(configurationsToRetrieve.value map { _.toVector }) + ) else None }, dependencyResolution := IvyDependencyResolution(ivyConfiguration.value), @@ -1951,8 +2046,10 @@ object Classpaths { if (isSnapshot.value) "integration" else "release", ivyConfigurations.value.map(c => ConfigRef(c.name)).toVector, packagedArtifacts.in(publish).value.toVector, - checksums.in(publish).value.toVector, - getPublishTo(publishTo.value).name, + checksums.in(publish).value.toVector, { //resolvername: not required if publishTo is false + val publishToOption = publishTo.value + if (publishArtifact.value) getPublishTo(publishToOption).name else "local" + }, ivyLoggingLevel.value, isSnapshot.value ) @@ -1998,10 +2095,10 @@ object Classpaths { val suffix = if (crossPaths.value) s"_$binVersion" else "" s"update_cache$suffix" }, - evictionWarningOptions in update := EvictionWarningOptions.default, dependencyPositions := dependencyPositionsTask.value, unresolvedWarningConfiguration in update := UnresolvedWarningConfiguration( - dependencyPositions.value), + dependencyPositions.value + ), update := (updateTask tag (Tags.Update, Tags.Network)).value, update := { val report = update.value @@ -2009,6 +2106,7 @@ object Classpaths { ConflictWarning(conflictWarning.value, report, log) report }, + evictionWarningOptions in update := evictionWarningOptions.value, evictionWarningOptions in evicted := EvictionWarningOptions.full, evicted := { import ShowLines._ @@ -2043,7 +2141,6 @@ object Classpaths { val docTypes = docArtifactTypes.value val out = is.withIvy(s.log)(_.getSettings.getDefaultIvyUserDir) val uwConfig = (unresolvedWarningConfiguration in update).value - val scalaModule = scalaModuleInfo.value withExcludes(out, mod.classifiers, lock(app)) { excludes => lm.updateClassifiers( GetClassifiersConfiguration( @@ -2070,11 +2167,11 @@ object Classpaths { autoScalaLibrary.value && scalaHome.value.isEmpty && managedScalaInstance.value, sbtPlugin.value, scalaOrganization.value, - scalaVersion.value), + scalaVersion.value + ), // Override the default to handle mixing in the sbtPlugin + scala dependencies. allDependencies := { val base = projectDependencies.value ++ libraryDependencies.value - val dependency = sbtDependency.value val isPlugin = sbtPlugin.value val sbtdeps = (sbtDependency in pluginCrossBuild).value.withConfigurations(Some(Provided.name)) @@ -2096,7 +2193,8 @@ object Classpaths { val resset = ress.toSet for ((name, r) <- resset groupBy (_.name) if r.size > 1) { log.warn( - "Multiple resolvers having different access mechanism configured with same name '" + name + "'. To avoid conflict, Remove duplicate project resolvers (`resolvers`) or rename publishing resolver (`publishTo`).") + "Multiple resolvers having different access mechanism configured with same name '" + name + "'. To avoid conflict, Remove duplicate project resolvers (`resolvers`) or rename publishing resolver (`publishTo`)." + ) } } @@ -2112,9 +2210,11 @@ object Classpaths { def pluginProjectID: Initialize[ModuleID] = Def.setting { if (sbtPlugin.value) - sbtPluginExtra(projectID.value, - (sbtBinaryVersion in pluginCrossBuild).value, - (scalaBinaryVersion in pluginCrossBuild).value) + sbtPluginExtra( + projectID.value, + (sbtBinaryVersion in pluginCrossBuild).value, + (scalaBinaryVersion in pluginCrossBuild).value + ) else projectID.value } private[sbt] def ivySbt0: Initialize[Task[IvySbt]] = @@ -2139,86 +2239,88 @@ object Classpaths { Defaults.globalDefaults( Seq( transitiveClassifiers in updateSbtClassifiers ~= (_.filter(_ != DocClassifier)) - )) + ) + ) def sbtClassifiersTasks = sbtClassifiersGlobalDefaults ++ inTask(updateSbtClassifiers)( - Seq( - externalResolvers := { - val explicit = buildStructure.value - .units(thisProjectRef.value.build) - .unit - .plugins - .pluginData - .resolvers - explicit orElse bootRepositories(appConfiguration.value) getOrElse externalResolvers.value - }, - ivyConfiguration := InlineIvyConfiguration( - paths = ivyPaths.value, - resolvers = externalResolvers.value.toVector, - otherResolvers = Vector.empty, - moduleConfigurations = Vector.empty, - lock = Option(lock(appConfiguration.value)), - checksums = checksums.value.toVector, - managedChecksums = false, - resolutionCacheDir = Some(crossTarget.value / "resolution-cache"), - updateOptions = UpdateOptions(), - log = streams.value.log - ), - ivySbt := ivySbt0.value, - classifiersModule := classifiersModuleTask.value, - // Redefine scalaVersion and scalaBinaryVersion specifically for the dependency graph used for updateSbtClassifiers task. - // to fix https://github.com/sbt/sbt/issues/2686 - scalaVersion := appConfiguration.value.provider.scalaProvider.version, - scalaBinaryVersion := binaryScalaVersion(scalaVersion.value), - scalaModuleInfo := { - Some( - ScalaModuleInfo( - scalaVersion.value, - scalaBinaryVersion.value, - Vector(), - checkExplicit = false, - filterImplicit = false, - overrideScalaVersion = true).withScalaOrganization(scalaOrganization.value)) - }, - dependencyResolution := IvyDependencyResolution(ivyConfiguration.value), - updateSbtClassifiers in TaskGlobal := (Def.task { - val lm = dependencyResolution.value - val s = streams.value - val is = ivySbt.value - val mod = classifiersModule.value - val c = updateConfiguration.value - val app = appConfiguration.value - val srcTypes = sourceArtifactTypes.value - val docTypes = docArtifactTypes.value - val log = s.log - val out = is.withIvy(log)(_.getSettings.getDefaultIvyUserDir) - val uwConfig = (unresolvedWarningConfiguration in update).value - val depDir = dependencyCacheDirectory.value - val ivy = scalaModuleInfo.value - val st = state.value - withExcludes(out, mod.classifiers, lock(app)) { - excludes => - // val noExplicitCheck = ivy.map(_.withCheckExplicit(false)) - LibraryManagement.transitiveScratch( - lm, - "sbt", - GetClassifiersConfiguration( - mod, - excludes.toVector, - c.withArtifactFilter(c.artifactFilter.map(af => af.withInverted(!af.inverted))), - srcTypes.toVector, - docTypes.toVector - ), - uwConfig, - log - ) match { - case Left(uw) => ??? - case Right(ur) => ur - } - } - } tag (Tags.Update, Tags.Network)).value - )) ++ Seq(bootIvyConfiguration := (ivyConfiguration in updateSbtClassifiers).value) + externalResolvers := { + val explicit = buildStructure.value + .units(thisProjectRef.value.build) + .unit + .plugins + .pluginData + .resolvers + explicit orElse bootRepositories(appConfiguration.value) getOrElse externalResolvers.value + }, + ivyConfiguration := InlineIvyConfiguration( + paths = ivyPaths.value, + resolvers = externalResolvers.value.toVector, + otherResolvers = Vector.empty, + moduleConfigurations = Vector.empty, + lock = Option(lock(appConfiguration.value)), + checksums = checksums.value.toVector, + managedChecksums = false, + resolutionCacheDir = Some(crossTarget.value / "resolution-cache"), + updateOptions = UpdateOptions(), + log = streams.value.log + ), + ivySbt := ivySbt0.value, + classifiersModule := classifiersModuleTask.value, + // Redefine scalaVersion and scalaBinaryVersion specifically for the dependency graph used for updateSbtClassifiers task. + // to fix https://github.com/sbt/sbt/issues/2686 + scalaVersion := appConfiguration.value.provider.scalaProvider.version, + scalaBinaryVersion := binaryScalaVersion(scalaVersion.value), + scalaModuleInfo := { + Some( + ScalaModuleInfo( + scalaVersion.value, + scalaBinaryVersion.value, + Vector(), + checkExplicit = false, + filterImplicit = false, + overrideScalaVersion = true + ).withScalaOrganization(scalaOrganization.value) + ) + }, + dependencyResolution := IvyDependencyResolution(ivyConfiguration.value), + updateSbtClassifiers in TaskGlobal := (Def.task { + val lm = dependencyResolution.value + val s = streams.value + val is = ivySbt.value + val mod = classifiersModule.value + val c = updateConfiguration.value + val app = appConfiguration.value + val srcTypes = sourceArtifactTypes.value + val docTypes = docArtifactTypes.value + val log = s.log + val out = is.withIvy(log)(_.getSettings.getDefaultIvyUserDir) + val uwConfig = (unresolvedWarningConfiguration in update).value + val depDir = dependencyCacheDirectory.value + val ivy = scalaModuleInfo.value + val st = state.value + withExcludes(out, mod.classifiers, lock(app)) { + excludes => + // val noExplicitCheck = ivy.map(_.withCheckExplicit(false)) + LibraryManagement.transitiveScratch( + lm, + "sbt", + GetClassifiersConfiguration( + mod, + excludes.toVector, + c.withArtifactFilter(c.artifactFilter.map(af => af.withInverted(!af.inverted))), + srcTypes.toVector, + docTypes.toVector + ), + uwConfig, + log + ) match { + case Left(_) => ??? + case Right(ur) => ur + } + } + } tag (Tags.Update, Tags.Network)).value + ) ++ Seq(bootIvyConfiguration := (ivyConfiguration in updateSbtClassifiers).value) def classifiersModuleTask: Initialize[Task[GetClassifiersModule]] = Def.task { @@ -2244,16 +2346,20 @@ object Classpaths { IvyActions.deliver(ivyModule.value, config.value, streams.value.log) } - def publishTask(config: TaskKey[PublishConfiguration], - deliverKey: TaskKey[_]): Initialize[Task[Unit]] = + @deprecated("Use variant without delivery key", "1.1.1") + def publishTask( + config: TaskKey[PublishConfiguration], + deliverKey: TaskKey[_], + ): Initialize[Task[Unit]] = + publishTask(config) + + def publishTask(config: TaskKey[PublishConfiguration]): Initialize[Task[Unit]] = Def.taskDyn { val s = streams.value val skp = (skip in publish).value val ref = thisProjectRef.value if (skp) Def.task { s.log.debug(s"Skipping publish* for ${ref.project}") } else - Def.task { - IvyActions.publish(ivyModule.value, config.value, s.log) - } + Def.task { IvyActions.publish(ivyModule.value, config.value, s.log) } } tag (Tags.Publish, Tags.Network) val moduleIdJsonKeyFormat: sjsonnew.JsonKeyFormat[ModuleID] = @@ -2268,7 +2374,8 @@ object Classpaths { } def withExcludes(out: File, classifiers: Seq[String], lock: xsbti.GlobalLock)( - f: Map[ModuleID, Vector[ConfigRef]] => UpdateReport): UpdateReport = { + f: Map[ModuleID, Vector[ConfigRef]] => UpdateReport + ): UpdateReport = { import sbt.librarymanagement.LibraryManagementCodec._ import sbt.util.FileBasedStore implicit val isoString: sjsonnew.IsoString[JValue] = @@ -2287,15 +2394,15 @@ object Classpaths { val excludes = store .read[Map[ModuleID, Vector[ConfigRef]]]( - default = Map.empty[ModuleID, Vector[ConfigRef]]) + default = Map.empty[ModuleID, Vector[ConfigRef]] + ) val report = f(excludes) val allExcludes: Map[ModuleID, Vector[ConfigRef]] = excludes ++ IvyActions .extractExcludes(report) .mapValues(cs => cs.map(c => ConfigRef(c)).toVector) store.write(allExcludes) - IvyActions.addExcluded(report, - classifiers.toVector, - allExcludes.mapValues(_.map(_.name).toSet)) + IvyActions + .addExcluded(report, classifiers.toVector, allExcludes.mapValues(_.map(_.name).toSet)) } } ) @@ -2420,7 +2527,7 @@ object Classpaths { s.init.evaluate(empty) map { _ -> s.pos } }: _*) } catch { - case NonFatal(e) => Map() + case NonFatal(_) => Map() } val outCacheStore = cacheStoreFactory make "output_dsp" @@ -2480,42 +2587,50 @@ object Classpaths { def getPublishTo(repo: Option[Resolver]): Resolver = repo getOrElse sys.error("Repository for publishing is not specified.") - def publishConfig(publishMavenStyle: Boolean, - deliverIvyPattern: String, - status: String, - configurations: Vector[ConfigRef], - artifacts: Vector[(Artifact, File)], - checksums: Vector[String], - resolverName: String = "local", - logging: UpdateLogging = UpdateLogging.DownloadOnly, - overwrite: Boolean = false) = - PublishConfiguration(publishMavenStyle, - deliverIvyPattern, - status, - configurations, - resolverName, - artifacts, - checksums, - logging, - overwrite) + def publishConfig( + publishMavenStyle: Boolean, + deliverIvyPattern: String, + status: String, + configurations: Vector[ConfigRef], + artifacts: Vector[(Artifact, File)], + checksums: Vector[String], + resolverName: String = "local", + logging: UpdateLogging = UpdateLogging.DownloadOnly, + overwrite: Boolean = false + ) = + PublishConfiguration( + publishMavenStyle, + deliverIvyPattern, + status, + configurations, + resolverName, + artifacts, + checksums, + logging, + overwrite + ) - def makeIvyXmlConfig(publishMavenStyle: Boolean, - deliverIvyPattern: String, - status: String, - configurations: Vector[ConfigRef], - checksums: Vector[String], - logging: sbt.librarymanagement.UpdateLogging = UpdateLogging.DownloadOnly, - overwrite: Boolean = false, - optResolverName: Option[String] = None) = - PublishConfiguration(publishMavenStyle, - Some(deliverIvyPattern), - Some(status), - Some(configurations), - optResolverName, - Vector.empty, - checksums, - Some(logging), - overwrite) + def makeIvyXmlConfig( + publishMavenStyle: Boolean, + deliverIvyPattern: String, + status: String, + configurations: Vector[ConfigRef], + checksums: Vector[String], + logging: sbt.librarymanagement.UpdateLogging = UpdateLogging.DownloadOnly, + overwrite: Boolean = false, + optResolverName: Option[String] = None + ) = + PublishConfiguration( + publishMavenStyle, + Some(deliverIvyPattern), + Some(status), + Some(configurations), + optResolverName, + Vector.empty, + checksums, + Some(logging), + overwrite + ) def deliverPattern(outputPath: File): String = (outputPath / "[artifact]-[revision](-[classifier]).[ext]").absolutePath @@ -2534,14 +2649,18 @@ object Classpaths { private[sbt] def depMap: Initialize[Task[Map[ModuleRevisionId, ModuleDescriptor]]] = Def.taskDyn { - depMap(buildDependencies.value classpathTransitiveRefs thisProjectRef.value, - settingsData.value, - streams.value.log) + depMap( + buildDependencies.value classpathTransitiveRefs thisProjectRef.value, + settingsData.value, + streams.value.log + ) } - private[sbt] def depMap(projects: Seq[ProjectRef], - data: Settings[Scope], - log: Logger): Initialize[Task[Map[ModuleRevisionId, ModuleDescriptor]]] = + private[sbt] def depMap( + projects: Seq[ProjectRef], + data: Settings[Scope], + log: Logger + ): Initialize[Task[Map[ModuleRevisionId, ModuleDescriptor]]] = Def.value { projects.flatMap(ivyModule in _ get data).join.map { mod => mod map { _.dependencyMapping(log) } toMap; @@ -2586,14 +2705,16 @@ object Classpaths { .put(configuration.key, config) } private[this] def trackedExportedProductsImplTask( - track: TrackLevel): Initialize[Task[Seq[(File, CompileAnalysis)]]] = + track: TrackLevel + ): Initialize[Task[Seq[(File, CompileAnalysis)]]] = Def.taskDyn { val useJars = exportJars.value if (useJars) trackedJarProductsImplTask(track) else trackedNonJarProductsImplTask(track) } private[this] def trackedNonJarProductsImplTask( - track: TrackLevel): Initialize[Task[Seq[(File, CompileAnalysis)]]] = + track: TrackLevel + ): Initialize[Task[Seq[(File, CompileAnalysis)]]] = Def.taskDyn { val dirs = productDirectories.value def containsClassFile(fs: List[File]): Boolean = @@ -2615,15 +2736,18 @@ object Classpaths { Def.task { val analysisOpt = previousCompile.value.analysis.toOption dirs map { x => - (x, - if (analysisOpt.isDefined) analysisOpt.get - else Analysis.empty) + ( + x, + if (analysisOpt.isDefined) analysisOpt.get + else Analysis.empty + ) } } } } private[this] def trackedJarProductsImplTask( - track: TrackLevel): Initialize[Task[Seq[(File, CompileAnalysis)]]] = + track: TrackLevel + ): Initialize[Task[Seq[(File, CompileAnalysis)]]] = Def.taskDyn { val jar = (artifactPath in packageBin).value TrackLevel.intersection(track, exportToInternal.value) match { @@ -2639,9 +2763,11 @@ object Classpaths { Def.task { val analysisOpt = previousCompile.value.analysis.toOption Seq(jar) map { x => - (x, - if (analysisOpt.isDefined) analysisOpt.get - else Analysis.empty) + ( + x, + if (analysisOpt.isDefined) analysisOpt.get + else Analysis.empty + ) } } } @@ -2652,28 +2778,34 @@ object Classpaths { def internalDependencies: Initialize[Task[Classpath]] = Def.taskDyn { - internalDependenciesImplTask(thisProjectRef.value, - classpathConfiguration.value, - configuration.value, - settingsData.value, - buildDependencies.value, - trackInternalDependencies.value) + internalDependenciesImplTask( + thisProjectRef.value, + classpathConfiguration.value, + configuration.value, + settingsData.value, + buildDependencies.value, + trackInternalDependencies.value + ) } def internalDependencyJarsTask: Initialize[Task[Classpath]] = Def.taskDyn { - internalDependencyJarsImplTask(thisProjectRef.value, - classpathConfiguration.value, - configuration.value, - settingsData.value, - buildDependencies.value, - trackInternalDependencies.value) + internalDependencyJarsImplTask( + thisProjectRef.value, + classpathConfiguration.value, + configuration.value, + settingsData.value, + buildDependencies.value, + trackInternalDependencies.value + ) } def unmanagedDependencies: Initialize[Task[Classpath]] = Def.taskDyn { - unmanagedDependencies0(thisProjectRef.value, - configuration.value, - settingsData.value, - buildDependencies.value) + unmanagedDependencies0( + thisProjectRef.value, + configuration.value, + settingsData.value, + buildDependencies.value + ) } def mkIvyConfiguration: Initialize[Task[IvyConfiguration]] = Def.task { @@ -2694,10 +2826,12 @@ object Classpaths { import java.util.LinkedHashSet import collection.JavaConverters._ - def interSort(projectRef: ProjectRef, - conf: Configuration, - data: Settings[Scope], - deps: BuildDependencies): Seq[(ProjectRef, String)] = { + def interSort( + projectRef: ProjectRef, + conf: Configuration, + data: Settings[Scope], + deps: BuildDependencies + ): Seq[(ProjectRef, String)] = { val visited = (new LinkedHashSet[(ProjectRef, String)]).asScala def visit(p: ProjectRef, c: Configuration): Unit = { val applicableConfigs = allConfigs(c) @@ -2720,33 +2854,52 @@ object Classpaths { visit(projectRef, conf) visited.toSeq } - private[sbt] def unmanagedDependencies0(projectRef: ProjectRef, - conf: Configuration, - data: Settings[Scope], - deps: BuildDependencies): Initialize[Task[Classpath]] = - Def.value { - interDependencies(projectRef, - deps, - conf, - conf, - data, - TrackLevel.TrackAlways, - true, - unmanagedLibs0) + + def interSortConfigurations( + projectRef: ProjectRef, + conf: Configuration, + data: Settings[Scope], + deps: BuildDependencies + ): Seq[(ProjectRef, ConfigRef)] = + interSort(projectRef, conf, data, deps).map { + case (projectRef, configName) => (projectRef, ConfigRef(configName)) } - private[sbt] def internalDependenciesImplTask(projectRef: ProjectRef, - conf: Configuration, - self: Configuration, - data: Settings[Scope], - deps: BuildDependencies, - track: TrackLevel): Initialize[Task[Classpath]] = + + private[sbt] def unmanagedDependencies0( + projectRef: ProjectRef, + conf: Configuration, + data: Settings[Scope], + deps: BuildDependencies + ): Initialize[Task[Classpath]] = + Def.value { + interDependencies( + projectRef, + deps, + conf, + conf, + data, + TrackLevel.TrackAlways, + true, + (dep, conf, data, _) => unmanagedLibs(dep, conf, data), + ) + } + private[sbt] def internalDependenciesImplTask( + projectRef: ProjectRef, + conf: Configuration, + self: Configuration, + data: Settings[Scope], + deps: BuildDependencies, + track: TrackLevel + ): Initialize[Task[Classpath]] = Def.value { interDependencies(projectRef, deps, conf, self, data, track, false, productsTask) } - private[sbt] def internalDependencyJarsImplTask(projectRef: ProjectRef, - conf: Configuration, - self: Configuration, - data: Settings[Scope], - deps: BuildDependencies, - track: TrackLevel): Initialize[Task[Classpath]] = + private[sbt] def internalDependencyJarsImplTask( + projectRef: ProjectRef, + conf: Configuration, + self: Configuration, + data: Settings[Scope], + deps: BuildDependencies, + track: TrackLevel + ): Initialize[Task[Classpath]] = Def.value { interDependencies(projectRef, deps, conf, self, data, track, false, jarProductsTask) } @@ -2758,7 +2911,8 @@ object Classpaths { data: Settings[Scope], track: TrackLevel, includeSelf: Boolean, - f: (ProjectRef, String, Settings[Scope], TrackLevel) => Task[Classpath]): Task[Classpath] = { + f: (ProjectRef, String, Settings[Scope], TrackLevel) => Task[Classpath] + ): Task[Classpath] = { val visited = interSort(projectRef, conf, data, deps) val tasks = (new LinkedHashSet[Task[Classpath]]).asScala for ((dep, c) <- visited) @@ -2768,23 +2922,28 @@ object Classpaths { (tasks.toSeq.join).map(_.flatten.distinct) } - def mapped(confString: Option[String], - masterConfs: Seq[String], - depConfs: Seq[String], - default: String, - defaultMapping: String): String => Seq[String] = { + def mapped( + confString: Option[String], + masterConfs: Seq[String], + depConfs: Seq[String], + default: String, + defaultMapping: String + ): String => Seq[String] = { lazy val defaultMap = parseMapping(defaultMapping, masterConfs, depConfs, _ :: Nil) parseMapping(confString getOrElse default, masterConfs, depConfs, defaultMap) } - def parseMapping(confString: String, - masterConfs: Seq[String], - depConfs: Seq[String], - default: String => Seq[String]): String => Seq[String] = + def parseMapping( + confString: String, + masterConfs: Seq[String], + depConfs: Seq[String], + default: String => Seq[String] + ): String => Seq[String] = union(confString.split(";") map parseSingleMapping(masterConfs, depConfs, default)) def parseSingleMapping( masterConfs: Seq[String], depConfs: Seq[String], - default: String => Seq[String])(confString: String): String => Seq[String] = { + default: String => Seq[String] + )(confString: String): String => Seq[String] = { val ms: Seq[(String, Seq[String])] = trim(confString.split("->", 2)) match { case x :: Nil => for (a <- parseList(x, masterConfs)) yield (a, default(a)) @@ -2819,38 +2978,43 @@ object Classpaths { ivyConfigurations in p get data getOrElse Nil def confOpt(configurations: Seq[Configuration], conf: String): Option[Configuration] = configurations.find(_.name == conf) - private[sbt] def productsTask(dep: ResolvedReference, - conf: String, - data: Settings[Scope], - track: TrackLevel): Task[Classpath] = + private[sbt] def productsTask( + dep: ResolvedReference, + conf: String, + data: Settings[Scope], + track: TrackLevel + ): Task[Classpath] = track match { case TrackLevel.NoTracking => getClasspath(exportedProductsNoTracking, dep, conf, data) case TrackLevel.TrackIfMissing => getClasspath(exportedProductsIfMissing, dep, conf, data) case TrackLevel.TrackAlways => getClasspath(exportedProducts, dep, conf, data) } - private[sbt] def jarProductsTask(dep: ResolvedReference, - conf: String, - data: Settings[Scope], - track: TrackLevel): Task[Classpath] = + private[sbt] def jarProductsTask( + dep: ResolvedReference, + conf: String, + data: Settings[Scope], + track: TrackLevel + ): Task[Classpath] = track match { case TrackLevel.NoTracking => getClasspath(exportedProductJarsNoTracking, dep, conf, data) case TrackLevel.TrackIfMissing => getClasspath(exportedProductJarsIfMissing, dep, conf, data) case TrackLevel.TrackAlways => getClasspath(exportedProductJars, dep, conf, data) } - private[sbt] def unmanagedLibs0(dep: ResolvedReference, - conf: String, - data: Settings[Scope], - track: TrackLevel): Task[Classpath] = - unmanagedLibs(dep, conf, data) + def unmanagedLibs(dep: ResolvedReference, conf: String, data: Settings[Scope]): Task[Classpath] = getClasspath(unmanagedJars, dep, conf, data) - def getClasspath(key: TaskKey[Classpath], - dep: ResolvedReference, - conf: String, - data: Settings[Scope]): Task[Classpath] = + + def getClasspath( + key: TaskKey[Classpath], + dep: ResolvedReference, + conf: String, + data: Settings[Scope] + ): Task[Classpath] = (key in (dep, ConfigKey(conf))) get data getOrElse constant(Nil) + def defaultConfigurationTask(p: ResolvedReference, data: Settings[Scope]): Configuration = flatten(defaultConfiguration in p get data) getOrElse Configurations.Default + def flatten[T](o: Option[Option[T]]): Option[T] = o flatMap idFun val sbtIvySnapshots: URLRepository = Resolver.sbtIvyRepo("snapshots") @@ -2860,10 +3024,12 @@ object Classpaths { def modifyForPlugin(plugin: Boolean, dep: ModuleID): ModuleID = if (plugin) dep.withConfigurations(Some(Provided.name)) else dep - def autoLibraryDependency(auto: Boolean, - plugin: Boolean, - org: String, - version: String): Seq[ModuleID] = + def autoLibraryDependency( + auto: Boolean, + plugin: Boolean, + org: String, + version: String + ): Seq[ModuleID] = if (auto) modifyForPlugin(plugin, ModuleID(org, ScalaArtifacts.LibraryID, version)) :: Nil else @@ -2883,7 +3049,7 @@ object Classpaths { up.filter(configurationFilter(config.name) && artifactFilter(`type` = jarTypes)) .toSeq .map { - case (conf, module, art, file) => + case (_, module, art, file) => Attributed(file)( AttributeMap.empty .put(artifact.key, art) @@ -2893,20 +3059,25 @@ object Classpaths { } .distinct - def findUnmanagedJars(config: Configuration, - base: File, - filter: FileFilter, - excl: FileFilter): Classpath = + def findUnmanagedJars( + config: Configuration, + base: File, + filter: FileFilter, + excl: FileFilter + ): Classpath = (base * (filter -- excl) +++ (base / config.name).descendantsExcept(filter, excl)).classpath @deprecated( "The method only works for Scala 2, use the overloaded version to support both Scala 2 and Scala 3", - "1.1.5") + "1.1.5" + ) def autoPlugins(report: UpdateReport, internalPluginClasspath: Seq[File]): Seq[String] = autoPlugins(report, internalPluginClasspath, isDotty = false) - def autoPlugins(report: UpdateReport, - internalPluginClasspath: Seq[File], - isDotty: Boolean): Seq[String] = { + def autoPlugins( + report: UpdateReport, + internalPluginClasspath: Seq[File], + isDotty: Boolean + ): Seq[String] = { val pluginClasspath = report.matching(configurationFilter(CompilerPlugin.name)) ++ internalPluginClasspath val plugins = sbt.internal.inc.classpath.ClasspathUtilities.compilerPlugins(pluginClasspath, isDotty) @@ -2918,27 +3089,32 @@ object Classpaths { val ref = thisProjectRef.value val data = settingsData.value val deps = buildDependencies.value - internalDependenciesImplTask(ref, - CompilerPlugin, - CompilerPlugin, - data, - deps, - TrackLevel.TrackAlways) + internalDependenciesImplTask( + ref, + CompilerPlugin, + CompilerPlugin, + data, + deps, + TrackLevel.TrackAlways + ) } lazy val compilerPluginConfig = Seq( scalacOptions := { val options = scalacOptions.value - val newPlugins = autoPlugins(update.value, - internalCompilerPluginClasspath.value.files, - ScalaInstance.isDotty(scalaVersion.value)) + val newPlugins = autoPlugins( + update.value, + internalCompilerPluginClasspath.value.files, + ScalaInstance.isDotty(scalaVersion.value) + ) val existing = options.toSet if (autoCompilerPlugins.value) options ++ newPlugins.filterNot(existing) else options } ) def substituteScalaFiles(scalaOrg: String, report: UpdateReport)( - scalaJars: String => Seq[File]): UpdateReport = + scalaJars: String => Seq[File] + ): UpdateReport = report.substitute { (configuration, module, arts) => if (module.organization == scalaOrg) { val jarName = module.name + ".jar" @@ -2990,11 +3166,13 @@ object Classpaths { repo match { case m: xsbti.MavenRepository => MavenRepository(m.id, m.url.toString) case i: xsbti.IvyRepository => - val patterns = Patterns(Vector(i.ivyPattern), - Vector(i.artifactPattern), - mavenCompatible(i), - descriptorOptional(i), - skipConsistencyCheck(i)) + val patterns = Patterns( + Vector(i.ivyPattern), + Vector(i.artifactPattern), + mavenCompatible(i), + descriptorOptional(i), + skipConsistencyCheck(i) + ) i.url.getProtocol match { case "file" => // This hackery is to deal suitably with UNC paths on Windows. Once we can assume Java7, Paths should save us from this. @@ -3013,7 +3191,8 @@ object Classpaths { case Predefined.SonatypeOSSSnapshots => Resolver.sonatypeRepo("snapshots") case unknown => sys.error( - "Unknown predefined resolver '" + unknown + "'. This resolver may only be supported in newer sbt versions.") + "Unknown predefined resolver '" + unknown + "'. This resolver may only be supported in newer sbt versions." + ) } } } @@ -3052,15 +3231,18 @@ trait BuildExtra extends BuildCommon with DefExtra { libraryDependencies += sbtPluginExtra( ModuleID("org.scala-sbt", "sbt-maven-resolver", sbtVersion.value), sbtBinaryVersion.value, - scalaBinaryVersion.value) + scalaBinaryVersion.value + ) /** * Adds `dependency` as an sbt plugin for the specific sbt version `sbtVersion` and Scala version `scalaVersion`. * Typically, use the default values for these versions instead of specifying them explicitly. */ - def addSbtPlugin(dependency: ModuleID, - sbtVersion: String, - scalaVersion: String): Setting[Seq[ModuleID]] = + def addSbtPlugin( + dependency: ModuleID, + sbtVersion: String, + scalaVersion: String + ): Setting[Seq[ModuleID]] = libraryDependencies += sbtPluginExtra(dependency, sbtVersion, scalaVersion) /** @@ -3099,8 +3281,10 @@ trait BuildExtra extends BuildCommon with DefExtra { } /** Constructs a setting that declares a new artifact `artifact` that is generated by `taskDef`. */ - def addArtifact(artifact: Initialize[Artifact], - taskDef: Initialize[Task[File]]): SettingsDefinition = { + def addArtifact( + artifact: Initialize[Artifact], + taskDef: Initialize[Task[File]] + ): SettingsDefinition = { val artLocal = SettingKey.local[Artifact] val taskLocal = TaskKey.local[File] val art = artifacts := artLocal.value +: artifacts.value @@ -3108,22 +3292,30 @@ trait BuildExtra extends BuildCommon with DefExtra { Seq(artLocal := artifact.value, taskLocal := taskDef.value, art, pkgd) } - def externalIvySettings(file: Initialize[File] = inBase("ivysettings.xml"), - addMultiResolver: Boolean = true): Setting[Task[IvyConfiguration]] = + def externalIvySettings( + file: Initialize[File] = inBase("ivysettings.xml"), + addMultiResolver: Boolean = true + ): Setting[Task[IvyConfiguration]] = externalIvySettingsURI(file(_.toURI), addMultiResolver) - def externalIvySettingsURL(url: URL, - addMultiResolver: Boolean = true): Setting[Task[IvyConfiguration]] = + def externalIvySettingsURL( + url: URL, + addMultiResolver: Boolean = true + ): Setting[Task[IvyConfiguration]] = externalIvySettingsURI(Def.value(url.toURI), addMultiResolver) - def externalIvySettingsURI(uri: Initialize[URI], - addMultiResolver: Boolean = true): Setting[Task[IvyConfiguration]] = { + def externalIvySettingsURI( + uri: Initialize[URI], + addMultiResolver: Boolean = true + ): Setting[Task[IvyConfiguration]] = { val other = Def.task { - (baseDirectory.value, - appConfiguration.value, - projectResolver.value, - updateOptions.value, - streams.value) + ( + baseDirectory.value, + appConfiguration.value, + projectResolver.value, + updateOptions.value, + streams.value + ) } ivyConfiguration := ((uri zipWith other) { case (u, otherTask) => @@ -3145,25 +3337,33 @@ trait BuildExtra extends BuildCommon with DefExtra { baseDirectory.value / name } - def externalIvyFile(file: Initialize[File] = inBase("ivy.xml"), - iScala: Initialize[Option[ScalaModuleInfo]] = scalaModuleInfo) - : Setting[Task[ModuleSettings]] = - moduleSettings := IvyFileConfiguration(ivyValidate.value, - iScala.value, - file.value, - managedScalaInstance.value) + def externalIvyFile( + file: Initialize[File] = inBase("ivy.xml"), + iScala: Initialize[Option[ScalaModuleInfo]] = scalaModuleInfo + ): Setting[Task[ModuleSettings]] = + moduleSettings := IvyFileConfiguration( + ivyValidate.value, + iScala.value, + file.value, + managedScalaInstance.value + ) - def externalPom(file: Initialize[File] = inBase("pom.xml"), - iScala: Initialize[Option[ScalaModuleInfo]] = scalaModuleInfo) - : Setting[Task[ModuleSettings]] = - moduleSettings := PomConfiguration(ivyValidate.value, - scalaModuleInfo.value, - file.value, - managedScalaInstance.value) + def externalPom( + file: Initialize[File] = inBase("pom.xml"), + iScala: Initialize[Option[ScalaModuleInfo]] = scalaModuleInfo, + ): Setting[Task[ModuleSettings]] = + moduleSettings := PomConfiguration( + ivyValidate.value, + iScala.value, + file.value, + managedScalaInstance.value, + ) - def runInputTask(config: Configuration, - mainClass: String, - baseArguments: String*): Initialize[InputTask[Unit]] = + def runInputTask( + config: Configuration, + mainClass: String, + baseArguments: String* + ): Initialize[InputTask[Unit]] = Def.inputTask { import Def._ val r = (runner in (config, run)).value @@ -3172,9 +3372,11 @@ trait BuildExtra extends BuildCommon with DefExtra { r.run(mainClass, data(cp), baseArguments ++ args, streams.value.log).get } - def runTask(config: Configuration, - mainClass: String, - arguments: String*): Initialize[Task[Unit]] = + def runTask( + config: Configuration, + mainClass: String, + arguments: String* + ): Initialize[Task[Unit]] = Def.task { val cp = (fullClasspath in config).value val r = (runner in (config, run)).value @@ -3184,11 +3386,16 @@ trait BuildExtra extends BuildCommon with DefExtra { // public API /** Returns a vector of settings that create custom run input task. */ - def fullRunInputTask(scoped: InputKey[Unit], - config: Configuration, - mainClass: String, - baseArguments: String*): Vector[Setting[_]] = { - // Use Def.inputTask with the `Def.spaceDelimited()` parser + def fullRunInputTask( + scoped: InputKey[Unit], + config: Configuration, + mainClass: String, + baseArguments: String* + ): Vector[Setting[_]] = { + // TODO: Re-write to avoid InputTask.apply which is deprecated + // I tried "Def.spaceDelimited().parsed" (after importing Def.parserToInput) + // but it broke actions/run-task + // Maybe it needs to be defined inside a Def.inputTask? def inputTask[T](f: TaskKey[Seq[String]] => Initialize[Task[T]]): Initialize[InputTask[T]] = InputTask.apply(Def.value((s: State) => Def.spaceDelimited()))(f) @@ -3208,10 +3415,12 @@ trait BuildExtra extends BuildCommon with DefExtra { // public API /** Returns a vector of settings that create custom run task. */ - def fullRunTask(scoped: TaskKey[Unit], - config: Configuration, - mainClass: String, - arguments: String*): Vector[Setting[_]] = + def fullRunTask( + scoped: TaskKey[Unit], + config: Configuration, + mainClass: String, + arguments: String* + ): Vector[Setting[_]] = Vector( scoped := ((initScoped(scoped.scopedKey, runnerInit) .zipWith(Def.task { ((fullClasspath in config).value, streams.value) })) { @@ -3234,16 +3443,17 @@ trait BuildExtra extends BuildCommon with DefExtra { * This is useful for reducing test:compile time when not running test. */ def noTestCompletion(config: Configuration = Test): Setting[_] = - inConfig(config)(Seq(definedTests := detectTests.value)).head + inConfig(config)(definedTests := detectTests.value).head def filterKeys(ss: Seq[Setting[_]], transitive: Boolean = false)( - f: ScopedKey[_] => Boolean): Seq[Setting[_]] = + f: ScopedKey[_] => Boolean + ): Seq[Setting[_]] = ss filter (s => f(s.key) && (!transitive || s.dependencies.forall(f))) } trait DefExtra { private[this] val ts: TaskSequential = new TaskSequential {} - implicit def toTaskSequential(d: Def.type): TaskSequential = ts + implicit def toTaskSequential(@deprecated("unused", "") d: Def.type): TaskSequential = ts } trait BuildCommon { @@ -3251,7 +3461,7 @@ trait BuildCommon { /** * Allows a String to be used where a `NameFilter` is expected. * Asterisks (`*`) in the string are interpreted as wildcards. - * All other characters must match exactly. See [[sbt.GlobFilter]]. + * All other characters must match exactly. See [[sbt.io.GlobFilter]]. */ implicit def globFilter(expression: String): NameFilter = GlobFilter(expression) @@ -3293,24 +3503,29 @@ trait BuildCommon { SessionVar.get(SessionVar.resolveContext(task.scopedKey, context.scope, s), s) def loadFromContext[T](task: TaskKey[T], context: ScopedKey[_], s: State)( - implicit f: JsonFormat[T]): Option[T] = + implicit f: JsonFormat[T] + ): Option[T] = SessionVar.load(SessionVar.resolveContext(task.scopedKey, context.scope, s), s) // intended for use in constructing InputTasks - def loadForParser[P, T](task: TaskKey[T])(f: (State, Option[T]) => Parser[P])( - implicit format: JsonFormat[T]): Initialize[State => Parser[P]] = + def loadForParser[P, T](task: TaskKey[T])( + f: (State, Option[T]) => Parser[P] + )(implicit format: JsonFormat[T]): Initialize[State => Parser[P]] = loadForParserI(task)(Def value f)(format) - def loadForParserI[P, T](task: TaskKey[T])(init: Initialize[(State, Option[T]) => Parser[P]])( - implicit format: JsonFormat[T]): Initialize[State => Parser[P]] = + def loadForParserI[P, T](task: TaskKey[T])( + init: Initialize[(State, Option[T]) => Parser[P]] + )(implicit format: JsonFormat[T]): Initialize[State => Parser[P]] = Def.setting { (s: State) => init.value(s, loadFromContext(task, resolvedScoped.value, s)(format)) } - def getForParser[P, T](task: TaskKey[T])( - init: (State, Option[T]) => Parser[P]): Initialize[State => Parser[P]] = + def getForParser[P, T]( + task: TaskKey[T] + )(init: (State, Option[T]) => Parser[P]): Initialize[State => Parser[P]] = getForParserI(task)(Def value init) - def getForParserI[P, T](task: TaskKey[T])( - init: Initialize[(State, Option[T]) => Parser[P]]): Initialize[State => Parser[P]] = + def getForParserI[P, T]( + task: TaskKey[T] + )(init: Initialize[(State, Option[T]) => Parser[P]]): Initialize[State => Parser[P]] = Def.setting { (s: State) => init.value(s, getFromContext(task, resolvedScoped.value, s)) } diff --git a/main/src/main/scala/sbt/EvaluateTask.scala b/main/src/main/scala/sbt/EvaluateTask.scala index 1d17f5cc3..479530f15 100644 --- a/main/src/main/scala/sbt/EvaluateTask.scala +++ b/main/src/main/scala/sbt/EvaluateTask.scala @@ -8,7 +8,7 @@ package sbt import sbt.internal.{ Load, BuildStructure, TaskTimings, TaskName, GCUtil } -import sbt.internal.util.{ Attributed, ErrorHandling, HList, RMap, Signals, Types } +import sbt.internal.util.{ Attributed, ConsoleAppender, ErrorHandling, HList, RMap, Signals, Types } import sbt.util.{ Logger, Show } import sbt.librarymanagement.{ Resolver, UpdateReport } @@ -172,9 +172,11 @@ object EvaluateTask { val SystemProcessors = Runtime.getRuntime.availableProcessors - def extractedTaskConfig(extracted: Extracted, - structure: BuildStructure, - state: State): EvaluateTaskConfig = { + def extractedTaskConfig( + extracted: Extracted, + structure: BuildStructure, + state: State + ): EvaluateTaskConfig = { val rs = restrictions(extracted, structure) val canceller = cancelStrategy(extracted, structure, state) val progress = executeProgress(extracted, structure, state) @@ -193,10 +195,12 @@ object EvaluateTask { } def restrictions(extracted: Extracted, structure: BuildStructure): Seq[Tags.Rule] = - getSetting(Keys.concurrentRestrictions, - defaultRestrictions(extracted, structure), - extracted, - structure) + getSetting( + Keys.concurrentRestrictions, + defaultRestrictions(extracted, structure), + extracted, + structure + ) def maxWorkers(extracted: Extracted, structure: BuildStructure): Int = if (getSetting(Keys.parallelExecution, true, extracted, structure)) @@ -207,22 +211,27 @@ object EvaluateTask { def cancelable(extracted: Extracted, structure: BuildStructure): Boolean = getSetting(Keys.cancelable, false, extracted, structure) - def cancelStrategy(extracted: Extracted, - structure: BuildStructure, - state: State): TaskCancellationStrategy = + def cancelStrategy( + extracted: Extracted, + structure: BuildStructure, + state: State + ): TaskCancellationStrategy = getSetting(Keys.taskCancelStrategy, { (_: State) => TaskCancellationStrategy.Null }, extracted, structure)(state) - private[sbt] def executeProgress(extracted: Extracted, - structure: BuildStructure, - state: State): ExecuteProgress[Task] = { + private[sbt] def executeProgress( + extracted: Extracted, + structure: BuildStructure, + state: State + ): ExecuteProgress[Task] = { import Types.const val maker: State => Keys.TaskProgress = getSetting( Keys.executeProgress, const(new Keys.TaskProgress(defaultProgress)), extracted, - structure) + structure + ) maker(state).progress } // TODO - Should this pull from Global or from the project itself? @@ -230,15 +239,19 @@ object EvaluateTask { getSetting(Keys.forcegc in Global, GCUtil.defaultForceGarbageCollection, extracted, structure) // TODO - Should this pull from Global or from the project itself? private[sbt] def minForcegcInterval(extracted: Extracted, structure: BuildStructure): Duration = - getSetting(Keys.minForcegcInterval in Global, - GCUtil.defaultMinForcegcInterval, - extracted, - structure) + getSetting( + Keys.minForcegcInterval in Global, + GCUtil.defaultMinForcegcInterval, + extracted, + structure + ) - def getSetting[T](key: SettingKey[T], - default: T, - extracted: Extracted, - structure: BuildStructure): T = + def getSetting[T]( + key: SettingKey[T], + default: T, + extracted: Extracted, + structure: BuildStructure + ): T = key in extracted.currentRef get structure.data getOrElse default def injectSettings: Seq[Setting[_]] = Seq( @@ -247,16 +260,21 @@ object EvaluateTask { (executionRoots in Global) ::= dummyRoots ) - def evalPluginDef(log: Logger)(pluginDef: BuildStructure, state: State): PluginData = { + @deprecated("Use variant which doesn't take a logger", "1.1.1") + def evalPluginDef(log: Logger)(pluginDef: BuildStructure, state: State): PluginData = + evalPluginDef(pluginDef, state) + + def evalPluginDef(pluginDef: BuildStructure, state: State): PluginData = { val root = ProjectRef(pluginDef.root, Load.getRootProject(pluginDef.units)(pluginDef.root)) val pluginKey = pluginData val config = extractedTaskConfig(Project.extract(state), pluginDef, state) val evaluated = apply(pluginDef, ScopedKey(pluginKey.scope, pluginKey.key), state, root, config) val (newS, result) = evaluated getOrElse sys.error( - "Plugin data does not exist for plugin definition at " + pluginDef.root) + "Plugin data does not exist for plugin definition at " + pluginDef.root + ) Project.runUnloadHooks(newS) // discard states - processResult(result, log) + processResult2(result) } /** @@ -264,26 +282,32 @@ object EvaluateTask { * If the task is not defined, None is returned. The provided task key is resolved against the current project `ref`. * Task execution is configured according to settings defined in the loaded project. */ - def apply[T](structure: BuildStructure, - taskKey: ScopedKey[Task[T]], - state: State, - ref: ProjectRef): Option[(State, Result[T])] = - apply[T](structure, - taskKey, - state, - ref, - extractedTaskConfig(Project.extract(state), structure, state)) + def apply[T]( + structure: BuildStructure, + taskKey: ScopedKey[Task[T]], + state: State, + ref: ProjectRef + ): Option[(State, Result[T])] = + apply[T]( + structure, + taskKey, + state, + ref, + extractedTaskConfig(Project.extract(state), structure, state) + ) /** * Evaluates `taskKey` and returns the new State and the result of the task wrapped in Some. * If the task is not defined, None is returned. The provided task key is resolved against the current project `ref`. * `config` configures concurrency and canceling of task execution. */ - def apply[T](structure: BuildStructure, - taskKey: ScopedKey[Task[T]], - state: State, - ref: ProjectRef, - config: EvaluateTaskConfig): Option[(State, Result[T])] = { + def apply[T]( + structure: BuildStructure, + taskKey: ScopedKey[Task[T]], + state: State, + ref: ProjectRef, + config: EvaluateTaskConfig + ): Option[(State, Result[T])] = { withStreams(structure, state) { str => for ((task, toNode) <- getTask(structure, taskKey, state, str, ref)) yield runTask(task, state, str, structure.index.triggers, config)(toNode) @@ -296,8 +320,8 @@ object EvaluateTask { def logIncomplete(result: Incomplete, state: State, streams: Streams): Unit = { val all = Incomplete linearize result - val keyed = for (Incomplete(Some(key: ScopedKey[_]), _, msg, _, ex) <- all) - yield (key, msg, ex) + val keyed = + all collect { case Incomplete(Some(key: ScopedKey[_]), _, msg, _, ex) => (key, msg, ex) } import ExceptionCategory._ for ((key, msg, Some(ex)) <- keyed) { @@ -312,7 +336,7 @@ object EvaluateTask { for ((key, msg, ex) <- keyed if (msg.isDefined || ex.isDefined)) { val msgString = (msg.toList ++ ex.toList.map(ErrorHandling.reducedToString)).mkString("\n\t") val log = getStreams(key, streams).log - val display = contextDisplay(state, log.ansiCodesSupported) + val display = contextDisplay(state, ConsoleAppender.formatEnabledInEnv) log.error("(" + display.show(key) + ") " + msgString) } } @@ -331,34 +355,41 @@ object EvaluateTask { try { f(str) } finally { str.close() } } - def getTask[T](structure: BuildStructure, - taskKey: ScopedKey[Task[T]], - state: State, - streams: Streams, - ref: ProjectRef): Option[(Task[T], NodeView[Task])] = { + def getTask[T]( + structure: BuildStructure, + taskKey: ScopedKey[Task[T]], + state: State, + streams: Streams, + ref: ProjectRef + ): Option[(Task[T], NodeView[Task])] = { val thisScope = Load.projectScope(ref) val resolvedScope = Scope.replaceThis(thisScope)(taskKey.scope) for (t <- structure.data.get(resolvedScope, taskKey.key)) yield (t, nodeView(state, streams, taskKey :: Nil)) } - def nodeView[HL <: HList](state: State, - streams: Streams, - roots: Seq[ScopedKey[_]], - dummies: DummyTaskMap = DummyTaskMap(Nil)): NodeView[Task] = + def nodeView[HL <: HList]( + state: State, + streams: Streams, + roots: Seq[ScopedKey[_]], + dummies: DummyTaskMap = DummyTaskMap(Nil) + ): NodeView[Task] = Transform( - (dummyRoots, roots) :: (Def.dummyStreamsManager, streams) :: (dummyState, state) :: dummies) + (dummyRoots, roots) :: (Def.dummyStreamsManager, streams) :: (dummyState, state) :: dummies + ) def runTask[T]( root: Task[T], state: State, streams: Streams, triggers: Triggers[Task], - config: EvaluateTaskConfig)(implicit taskToNode: NodeView[Task]): (State, Result[T]) = { + config: EvaluateTaskConfig + )(implicit taskToNode: NodeView[Task]): (State, Result[T]) = { import ConcurrentRestrictions.{ completionService, tagged, tagsKey } val log = state.log log.debug( - s"Running task... Cancel: ${config.cancelStrategy}, check cycles: ${config.checkCycles}, forcegc: ${config.forceGarbageCollection}") + s"Running task... Cancel: ${config.cancelStrategy}, check cycles: ${config.checkCycles}, forcegc: ${config.forceGarbageCollection}" + ) val tags = tagged[Task[_]](_.info get tagsKey getOrElse Map.empty, Tags.predicate(config.restrictions)) val (service, shutdownThreads) = @@ -379,9 +410,11 @@ object EvaluateTask { case _ => true } def run() = { - val x = new Execute[Task](Execute.config(config.checkCycles, overwriteNode), - triggers, - config.progressReporter)(taskToNode) + val x = new Execute[Task]( + Execute.config(config.checkCycles, overwriteNode), + triggers, + config.progressReporter + )(taskToNode) val (newState, result) = try { val results = x.runKeep(root)(service) @@ -406,15 +439,19 @@ object EvaluateTask { finally strat.onTaskEngineFinish(cancelState) } - private[this] def storeValuesForPrevious(results: RMap[Task, Result], - state: State, - streams: Streams): Unit = + private[this] def storeValuesForPrevious( + results: RMap[Task, Result], + state: State, + streams: Streams + ): Unit = for (referenced <- Previous.references in Global get Project.structure(state).data) Previous.complete(referenced, results, streams) - def applyResults[T](results: RMap[Task, Result], - state: State, - root: Task[T]): (State, Result[T]) = + def applyResults[T]( + results: RMap[Task, Result], + state: State, + root: Task[T] + ): (State, Result[T]) = (stateTransform(results)(state), results(root)) def stateTransform(results: RMap[Task, Result]): State => State = Function.chain( @@ -433,12 +470,21 @@ object EvaluateTask { case in @ Incomplete(Some(node: Task[_]), _, _, _, _) => in.copy(node = transformNode(node)) case i => i } + type AnyCyclic = Execute[({ type A[_] <: AnyRef })#A]#CyclicException[_] + def convertCyclicInc: Incomplete => Incomplete = { - case in @ Incomplete(_, _, _, _, Some(c: AnyCyclic)) => + case in @ Incomplete( + _, + _, + _, + _, + Some(c: Execute[({ type A[_] <: AnyRef })#A @unchecked]#CyclicException[_]) + ) => in.copy(directCause = Some(new RuntimeException(convertCyclic(c)))) case i => i } + def convertCyclic(c: AnyCyclic): String = (c.caller, c.target) match { case (caller: Task[_], target: Task[_]) => @@ -448,7 +494,7 @@ object EvaluateTask { } def liftAnonymous: Incomplete => Incomplete = { - case i @ Incomplete(node, tpe, None, causes, None) => + case i @ Incomplete(_, _, None, causes, None) => causes.find(inc => inc.node.isEmpty && (inc.message.isDefined || inc.directCause.isDefined)) match { case Some(lift) => i.copy(directCause = lift.directCause, message = lift.message) case None => i @@ -456,12 +502,19 @@ object EvaluateTask { case i => i } + @deprecated("Use processResult2 which doesn't take the unused log param", "1.1.1") def processResult[T](result: Result[T], log: Logger, show: Boolean = false): T = - onResult(result, log) { v => + processResult2(result, show) + + def processResult2[T](result: Result[T], show: Boolean = false): T = + onResult(result) { v => if (show) println("Result: " + v); v } - def onResult[T, S](result: Result[T], log: Logger)(f: T => S): S = + @deprecated("Use variant that doesn't take log", "1.1.1") + def onResult[T, S](result: Result[T], log: Logger)(f: T => S): S = onResult(result)(f) + + def onResult[T, S](result: Result[T])(f: T => S): S = result match { case Value(v) => f(v) case Inc(inc) => throw inc diff --git a/main/src/main/scala/sbt/Extracted.scala b/main/src/main/scala/sbt/Extracted.scala index 4ed0e0c8b..20c2115a1 100644 --- a/main/src/main/scala/sbt/Extracted.scala +++ b/main/src/main/scala/sbt/Extracted.scala @@ -8,7 +8,6 @@ package sbt import sbt.internal.{ Load, BuildStructure, Act, Aggregation, SessionSettings } -import Project._ import Scope.GlobalScope import Def.{ ScopedKey, Setting } import sbt.internal.util.complete.Parser @@ -17,9 +16,11 @@ import sbt.util.Show import std.Transform.DummyTaskMap import sbt.EvaluateTask.extractedTaskConfig -final case class Extracted(structure: BuildStructure, - session: SessionSettings, - currentRef: ProjectRef)(implicit val showKey: Show[ScopedKey[_]]) { +final case class Extracted( + structure: BuildStructure, + session: SessionSettings, + currentRef: ProjectRef +)(implicit val showKey: Show[ScopedKey[_]]) { def rootProject = structure.rootProject lazy val currentUnit = structure units currentRef.build lazy val currentProject = currentUnit defined currentRef.project @@ -43,7 +44,7 @@ final case class Extracted(structure: BuildStructure, structure.data.get(inCurrent(key.scope), key.key) private[this] def inCurrent[T](scope: Scope): Scope = - if (scope.project == This) scope.copy(project = Select(currentRef)) else scope + if (scope.project == This) scope in currentRef else scope /** * Runs the task specified by `key` and returns the transformed State and the resulting value of the task. @@ -54,12 +55,12 @@ final case class Extracted(structure: BuildStructure, * See `runAggregated` for that. */ def runTask[T](key: TaskKey[T], state: State): (State, T) = { - val rkey = resolve(key.scopedKey) + val rkey = resolve(key) val config = extractedTaskConfig(this, structure, state) val value: Option[(State, Result[T])] = EvaluateTask(structure, key.scopedKey, state, currentRef, config) val (newS, result) = getOrError(rkey.scope, rkey.key, value) - (newS, EvaluateTask.processResult(result, newS.log)) + (newS, EvaluateTask.processResult2(result)) } /** @@ -72,22 +73,22 @@ final case class Extracted(structure: BuildStructure, * This method requests execution of only the given task and does not aggregate execution. */ def runInputTask[T](key: InputKey[T], input: String, state: State): (State, T) = { - val scopedKey = ScopedKey( + val key2 = Scoped.scopedSetting( Scope.resolveScope(Load.projectScope(currentRef), currentRef.build, rootProject)(key.scope), key.key ) - val rkey = resolve(scopedKey) - val inputTask = get(Scoped.scopedSetting(rkey.scope, rkey.key)) + val rkey = resolve(key2) + val inputTask = get(rkey) val task = Parser.parse(input, inputTask.parser(state)) match { case Right(t) => t case Left(msg) => sys.error(s"Invalid programmatic input:\n$msg") } val config = extractedTaskConfig(this, structure, state) EvaluateTask.withStreams(structure, state) { str => - val nv = EvaluateTask.nodeView(state, str, rkey :: Nil) + val nv = EvaluateTask.nodeView(state, str, rkey.scopedKey :: Nil) val (newS, result) = EvaluateTask.runTask(task, state, str, structure.index.triggers, config)(nv) - (newS, EvaluateTask.processResult(result, newS.log)) + (newS, EvaluateTask.processResult2(result)) } } @@ -98,31 +99,34 @@ final case class Extracted(structure: BuildStructure, * Other axes are resolved to `Zero` if unspecified. */ def runAggregated[T](key: TaskKey[T], state: State): State = { - val rkey = resolve(key.scopedKey) + val rkey = resolve(key) val keys = Aggregation.aggregate(rkey, ScopeMask(), structure.extra) val tasks = Act.keyValues(structure)(keys) - Aggregation.runTasks(state, - structure, - tasks, - DummyTaskMap(Nil), - show = Aggregation.defaultShow(state, false))(showKey) + Aggregation.runTasks( + state, + tasks, + DummyTaskMap(Nil), + show = Aggregation.defaultShow(state, false), + )(showKey) } - private[this] def resolve[T](key: ScopedKey[T]): ScopedKey[T] = - Project.mapScope(Scope.resolveScope(GlobalScope, currentRef.build, rootProject))(key.scopedKey) + private[this] def resolve[K <: Scoped.ScopingSetting[K] with Scoped](key: K): K = + key in Scope.resolveScope(GlobalScope, currentRef.build, rootProject)(key.scope) private def getOrError[T](scope: Scope, key: AttributeKey[_], value: Option[T])( - implicit display: Show[ScopedKey[_]]): T = + implicit display: Show[ScopedKey[_]] + ): T = value getOrElse sys.error(display.show(ScopedKey(scope, key)) + " is undefined.") private def getOrError[T](scope: Scope, key: AttributeKey[T])( - implicit display: Show[ScopedKey[_]]): T = - structure.data.get(scope, key) getOrElse sys.error( - display.show(ScopedKey(scope, key)) + " is undefined.") + implicit display: Show[ScopedKey[_]] + ): T = + getOrError(scope, key, structure.data.get(scope, key))(display) @deprecated( "This discards session settings. Migrate to appendWithSession or appendWithoutSession.", - "1.2.0") + "1.2.0" + ) def append(settings: Seq[Setting[_]], state: State): State = appendWithoutSession(settings, state) diff --git a/main/src/main/scala/sbt/Keys.scala b/main/src/main/scala/sbt/Keys.scala index 473bfffba..45d9c2d96 100644 --- a/main/src/main/scala/sbt/Keys.scala +++ b/main/src/main/scala/sbt/Keys.scala @@ -42,6 +42,7 @@ import sbt.internal.{ } import sbt.io.{ FileFilter, WatchService } import sbt.internal.io.WatchState +import sbt.internal.server.ServerHandler import sbt.internal.util.{ AttributeKey, SourcePosition } import sbt.librarymanagement.Configurations.CompilerPlugin @@ -136,6 +137,8 @@ object Keys { val serverHost = SettingKey(BasicKeys.serverHost) val serverAuthentication = SettingKey(BasicKeys.serverAuthentication) val serverConnectionType = SettingKey(BasicKeys.serverConnectionType) + val fullServerHandlers = SettingKey(BasicKeys.fullServerHandlers) + val serverHandlers = settingKey[Seq[ServerHandler]]("User-defined server handlers.") val analysis = AttributeKey[CompileAnalysis]("analysis", "Analysis of compilation, including dependencies and generated outputs.", DSetting) val watch = SettingKey(BasicKeys.watch) @@ -220,6 +223,7 @@ object Keys { val scalaCompilerBridgeSource = settingKey[ModuleID]("Configures the module ID of the sources of the compiler bridge.").withRank(CSetting) val scalaArtifacts = settingKey[Seq[String]]("Configures the list of artifacts which should match the Scala binary version").withRank(CSetting) val enableBinaryCompileAnalysis = settingKey[Boolean]("Writes the analysis file in binary format") + val crossJavaVersions = settingKey[Seq[String]]("The java versions used during JDK cross testing").withRank(BPlusSetting) val clean = taskKey[Unit]("Deletes files produced by the build, such as generated sources, compiled classes, and task caches.").withRank(APlusTask) val console = taskKey[Unit]("Starts the Scala interpreter with the project classes on the classpath.").withRank(APlusTask) @@ -269,6 +273,10 @@ object Keys { val outputStrategy = settingKey[Option[sbt.OutputStrategy]]("Selects how to log output when running a main class.").withRank(DSetting) val connectInput = settingKey[Boolean]("If true, connects standard input when running a main class forked.").withRank(CSetting) val javaHome = settingKey[Option[File]]("Selects the Java installation used for compiling and forking. If None, uses the Java installation running the build.").withRank(ASetting) + val discoveredJavaHomes = settingKey[Map[String, File]]("Discovered Java home directories") + val javaHomes = settingKey[Map[String, File]]("The user-defined additional Java home directories") + val fullJavaHomes = settingKey[Map[String, File]]("Combines discoveredJavaHomes and custom javaHomes.").withRank(CTask) + val javaOptions = taskKey[Seq[String]]("Options passed to a new JVM when forking.").withRank(BPlusTask) val envVars = taskKey[Map[String, String]]("Environment variables used when forking a new JVM").withRank(BTask) @@ -447,7 +455,7 @@ object Keys { val sbtDependency = settingKey[ModuleID]("Provides a definition for declaring the current version of sbt.").withRank(BMinusSetting) val sbtVersion = settingKey[String]("Provides the version of sbt. This setting should not be modified.").withRank(AMinusSetting) val sbtBinaryVersion = settingKey[String]("Defines the binary compatibility version substring.").withRank(BPlusSetting) - val skip = taskKey[Boolean]("For tasks that support it (currently only 'compile' and 'update'), setting skip to true will force the task to not to do its work. This exact semantics may vary by task.").withRank(BSetting) + val skip = taskKey[Boolean]("For tasks that support it (currently only 'compile', 'update', and 'publish'), setting skip to true will force the task to not to do its work. This exact semantics may vary by task.").withRank(BSetting) val templateResolverInfos = settingKey[Seq[TemplateResolverInfo]]("Template resolvers used for 'new'.").withRank(BSetting) val interactionService = taskKey[InteractionService]("Service used to ask for user input through the current user interface(s).").withRank(CTask) val insideCI = SettingKey[Boolean]("insideCI", "Determines if the SBT is running in a Continuous Integration environment", AMinusSetting) diff --git a/main/src/main/scala/sbt/Main.scala b/main/src/main/scala/sbt/Main.scala index 74781030f..db02cc7f3 100644 --- a/main/src/main/scala/sbt/Main.scala +++ b/main/src/main/scala/sbt/Main.scala @@ -14,6 +14,7 @@ import sbt.internal.{ BuildUnit, CommandExchange, CommandStrings, + CrossJava, DefaultBackgroundJobService, EvaluateConfigurations, Inspect, @@ -40,7 +41,6 @@ import sbt.internal.util.{ Types } import sbt.util.{ Level, Logger, Show } - import sbt.internal.util.complete.{ DefaultParsers, Parser } import sbt.internal.inc.ScalaInstance import sbt.compiler.EvalImports @@ -52,8 +52,6 @@ import xsbti.compile.CompilerCache import scala.annotation.tailrec import sbt.io.IO import sbt.io.syntax._ -import StandardMain._ - import java.io.{ File, IOException } import java.net.URI import java.util.{ Locale, Properties } @@ -69,34 +67,36 @@ final class xMain extends xsbti.AppMain { import BasicCommandStrings.runEarly import BuiltinCommands.defaults import sbt.internal.CommandStrings.{ BootCommand, DefaultsCommand, InitCommand } - val state = initialState( + val state = StandardMain.initialState( configuration, Seq(defaults, early), - runEarly(DefaultsCommand) :: runEarly(InitCommand) :: BootCommand :: Nil) - runManaged(state) + runEarly(DefaultsCommand) :: runEarly(InitCommand) :: BootCommand :: Nil + ) + StandardMain.runManaged(state) } } final class ScriptMain extends xsbti.AppMain { def run(configuration: xsbti.AppConfiguration): xsbti.MainResult = { import BasicCommandStrings.runEarly - runManaged( - initialState( - configuration, - BuiltinCommands.ScriptCommands, - runEarly(Level.Error.toString) :: Script.Name :: Nil - )) + val state = StandardMain.initialState( + configuration, + BuiltinCommands.ScriptCommands, + runEarly(Level.Error.toString) :: Script.Name :: Nil + ) + StandardMain.runManaged(state) } } final class ConsoleMain extends xsbti.AppMain { - def run(configuration: xsbti.AppConfiguration): xsbti.MainResult = - runManaged( - initialState( - configuration, - BuiltinCommands.ConsoleCommands, - IvyConsole.Name :: Nil - )) + def run(configuration: xsbti.AppConfiguration): xsbti.MainResult = { + val state = StandardMain.initialState( + configuration, + BuiltinCommands.ConsoleCommands, + IvyConsole.Name :: Nil + ) + StandardMain.runManaged(state) + } } object StandardMain { @@ -121,13 +121,17 @@ object StandardMain { ConsoleOut.systemOutOverwrite(ConsoleOut.overwriteContaining("Resolving ")) def initialGlobalLogging: GlobalLogging = - GlobalLogging.initial(MainAppender.globalDefault(console), - File.createTempFile("sbt", ".log"), - console) + GlobalLogging.initial( + MainAppender.globalDefault(console), + File.createTempFile("sbt", ".log"), + console + ) - def initialState(configuration: xsbti.AppConfiguration, - initialDefinitions: Seq[Command], - preCommands: Seq[String]): State = { + def initialState( + configuration: xsbti.AppConfiguration, + initialDefinitions: Seq[Command], + preCommands: Seq[String] + ): State = { // This is to workaround https://github.com/sbt/io/issues/110 sys.props.put("jna.nosys", "true") @@ -184,8 +188,11 @@ object BuiltinCommands { inspect, loadProjectImpl, loadFailed, + oldLoadFailed, Cross.crossBuild, Cross.switchVersion, + CrossJava.switchJavaHome, + CrossJava.crossJavaHome, PluginCross.pluginCross, PluginCross.pluginSwitch, Cross.crossRestoreSession, @@ -198,6 +205,7 @@ object BuiltinCommands { startServer, eval, last, + oldLastGrep, lastGrep, export, boot, @@ -286,26 +294,32 @@ object BuiltinCommands { case _ => si.actualVersion } - private[this] def quiet[T](t: => T): Option[T] = try { Some(t) } catch { - case e: Exception => None - } + private[this] def quiet[T](t: => T): Option[T] = + try Some(t) + catch { case _: Exception => None } def settingsCommand: Command = - showSettingLike(SettingsCommand, - settingsPreamble, - KeyRanks.MainSettingCutoff, - key => !isTask(key.manifest)) + showSettingLike( + SettingsCommand, + settingsPreamble, + KeyRanks.MainSettingCutoff, + key => !isTask(key.manifest) + ) def tasks: Command = - showSettingLike(TasksCommand, - tasksPreamble, - KeyRanks.MainTaskCutoff, - key => isTask(key.manifest)) + showSettingLike( + TasksCommand, + tasksPreamble, + KeyRanks.MainTaskCutoff, + key => isTask(key.manifest) + ) - def showSettingLike(command: String, - preamble: String, - cutoff: Int, - keep: AttributeKey[_] => Boolean): Command = + def showSettingLike( + command: String, + preamble: String, + cutoff: Int, + keep: AttributeKey[_] => Boolean + ): Command = Command(command, settingsBrief(command), settingsDetailed(command))(showSettingParser(keep)) { case (s: State, (verbosity: Int, selected: Option[String])) => if (selected.isEmpty) System.out.println(preamble) @@ -316,8 +330,9 @@ object BuiltinCommands { if (prominentOnly) System.out.println(moreAvailableMessage(command, selected.isDefined)) s } - def showSettingParser(keepKeys: AttributeKey[_] => Boolean)( - s: State): Parser[(Int, Option[String])] = + def showSettingParser( + keepKeys: AttributeKey[_] => Boolean + )(s: State): Parser[(Int, Option[String])] = verbosityParser ~ selectedParser(s, keepKeys).? def selectedParser(s: State, keepKeys: AttributeKey[_] => Boolean): Parser[String] = singleArgument(allTaskAndSettingKeys(s).filter(keepKeys).map(_.label).toSet) @@ -358,16 +373,19 @@ object BuiltinCommands { def sortByRank(keys: Seq[AttributeKey[_]]): Seq[AttributeKey[_]] = keys.sortBy(_.rank) def withDescription(keys: Seq[AttributeKey[_]]): Seq[AttributeKey[_]] = keys.filter(_.description.isDefined) - def isTask(mf: Manifest[_])(implicit taskMF: Manifest[Task[_]], - inputMF: Manifest[InputTask[_]]): Boolean = + def isTask( + mf: Manifest[_] + )(implicit taskMF: Manifest[Task[_]], inputMF: Manifest[InputTask[_]]): Boolean = mf.runtimeClass == taskMF.runtimeClass || mf.runtimeClass == inputMF.runtimeClass def topNRanked(n: Int) = (keys: Seq[AttributeKey[_]]) => sortByRank(keys).take(n) def highPass(rankCutoff: Int) = (keys: Seq[AttributeKey[_]]) => sortByRank(keys).takeWhile(_.rank <= rankCutoff) - def tasksHelp(s: State, - filter: Seq[AttributeKey[_]] => Seq[AttributeKey[_]], - arg: Option[String]): String = { + def tasksHelp( + s: State, + filter: Seq[AttributeKey[_]] => Seq[AttributeKey[_]], + arg: Option[String] + ): String = { val commandAndDescription = taskDetail(filter(allTaskAndSettingKeys(s)), true) arg match { case Some(selected) => detail(selected, commandAndDescription.toMap) @@ -422,7 +440,7 @@ object BuiltinCommands { // For correct behavior, we also need to re-inject a settings logger, as we'll be re-evaluating settings val loggerInject = LogManager.settingsLogger(s) val withLogger = newSession.appendRaw(loggerInject :: Nil) - val show = Project.showContextKey(newSession, structure) + val show = Project.showContextKey2(newSession) val newStructure = Load.reapply(withLogger.mergeSettings, structure)(show) Project.setProject(newSession, newStructure, s) } @@ -446,19 +464,27 @@ object BuiltinCommands { )(cl) val setResult = if (all) SettingCompletions.setAll(extracted, settings) - else SettingCompletions.setThis(s, extracted, settings, arg) + else SettingCompletions.setThis(extracted, settings, arg) s.log.info(setResult.quietSummary) s.log.debug(setResult.verboseSummary) reapply(setResult.session, structure, s) } + @deprecated("Use variant that doesn't take a State", "1.1.1") def setThis( s: State, extracted: Extracted, settings: Seq[Def.Setting[_]], arg: String ): SetResult = - SettingCompletions.setThis(s, extracted, settings, arg) + setThis(extracted, settings, arg) + + def setThis( + extracted: Extracted, + settings: Seq[Def.Setting[_]], + arg: String + ): SetResult = + SettingCompletions.setThis(extracted, settings, arg) def inspect: Command = Command(InspectCommand, inspectBrief, inspectDetailed)(Inspect.parser) { case (s, (option, sk)) => @@ -466,14 +492,29 @@ object BuiltinCommands { s } + @deprecated("Use `lastGrep` instead.", "1.2.0") + def oldLastGrep: Command = + lastGrepCommand(OldLastGrepCommand, oldLastGrepBrief, oldLastGrepDetailed, { s => + s.log.warn(deprecationWarningText(OldLastGrepCommand, LastGrepCommand)) + lastGrepParser(s) + }) + def lastGrep: Command = - Command(LastGrepCommand, lastGrepBrief, lastGrepDetailed)(lastGrepParser) { + lastGrepCommand(LastGrepCommand, lastGrepBrief, lastGrepDetailed, lastGrepParser) + + private def lastGrepCommand( + name: String, + briefHelp: (String, String), + detail: String, + parser: State => Parser[(String, Option[AnyKeys])] + ): Command = + Command(name, briefHelp, detail)(parser) { case (s, (pattern, Some(sks))) => val (str, _, display) = extractLast(s) - Output.lastGrep(sks, str.streams(s), pattern, printLast(s))(display) + Output.lastGrep(sks, str.streams(s), pattern, printLast)(display) keepLastLog(s) case (s, (pattern, None)) => - for (logFile <- lastLogFile(s)) yield Output.lastGrep(logFile, pattern, printLast(s)) + for (logFile <- lastLogFile(s)) yield Output.lastGrep(logFile, pattern, printLast) keepLastLog(s) } @@ -515,7 +556,7 @@ object BuiltinCommands { lastOnly_keys <- keysParser kvs = Act.keyValues(structure)(lastOnly_keys._2) f <- if (lastOnly_keys._1) success(() => s) - else Aggregation.evaluatingParser(s, structure, show)(kvs) + else Aggregation.evaluatingParser(s, show)(kvs) } yield () => { def export0(s: State): State = lastImpl(s, kvs, Some(ExportStream)) @@ -538,7 +579,7 @@ object BuiltinCommands { def last: Command = Command(LastCommand, lastBrief, lastDetailed)(aggregatedKeyValueParser) { case (s, Some(sks)) => lastImpl(s, sks, None) case (s, None) => - for (logFile <- lastLogFile(s)) yield Output.last(logFile, printLast(s)) + for (logFile <- lastLogFile(s)) yield Output.last(logFile, printLast) keepLastLog(s) } @@ -547,7 +588,7 @@ object BuiltinCommands { private[this] def lastImpl(s: State, sks: AnyKeys, sid: Option[String]): State = { val (str, _, display) = extractLast(s) - Output.last(sks, str.streams(s), printLast(s), sid)(display) + Output.last(sks, str.streams(s), printLast, sid)(display) keepLastLog(s) } @@ -572,7 +613,10 @@ object BuiltinCommands { */ def isLastOnly(s: State): Boolean = s.history.previous.forall(_.commandLine == Shell) - def printLast(s: State): Seq[String] => Unit = _ foreach println + @deprecated("Use variant that doesn't take the state", "1.1.1") + def printLast(s: State): Seq[String] => Unit = printLast + + def printLast: Seq[String] => Unit = _ foreach println def autoImports(extracted: Extracted): EvalImports = new EvalImports(imports(extracted), "") @@ -623,8 +667,9 @@ object BuiltinCommands { } def projects: Command = - Command(ProjectsCommand, (ProjectsCommand, projectsBrief), projectsDetailed)(s => - projectsParser(s).?) { + Command(ProjectsCommand, (ProjectsCommand, projectsBrief), projectsDetailed)( + s => projectsParser(s).? + ) { case (s, Some(modifyBuilds)) => transformExtraBuilds(s, modifyBuilds) case (s, None) => showProjects(s); s } @@ -642,7 +687,7 @@ object BuiltinCommands { val extraUpdated = Project.updateExtraBuilds(s, f) try doLoadProject(extraUpdated, LoadAction.Current) catch { - case e: Exception => + case _: Exception => s.log.error("Project loading failed: reverting to previous state.") Project.setExtraBuilds(s, original) } @@ -659,11 +704,24 @@ object BuiltinCommands { Command.make(ProjectCommand, projectBrief, projectDetailed)(ProjectNavigation.command) def loadFailed: Command = Command(LoadFailed)(loadProjectParser)(doLoadFailed) + @deprecated("Use `loadFailed` instead.", "1.2.0") + def oldLoadFailed: Command = + Command(OldLoadFailed) { s => + s.log.warn( + deprecationWarningText(OldLoadFailed, LoadFailed) + ) + loadProjectParser(s) + }(doLoadFailed) + + private[this] def deprecationWarningText(oldCommand: String, newCommand: String) = { + s"The `$oldCommand` command is deprecated in favor of `$newCommand` and will be removed in a later version" + } @tailrec private[this] def doLoadFailed(s: State, loadArg: String): State = { val result = (SimpleReader.readLine( - "Project loading failed: (r)etry, (q)uit, (l)ast, or (i)gnore? ") getOrElse Quit) + "Project loading failed: (r)etry, (q)uit, (l)ast, or (i)gnore? " + ) getOrElse Quit) .toLowerCase(Locale.ENGLISH) def matches(s: String) = !result.isEmpty && (s startsWith result) def retry = loadProjectCommand(LoadProject, loadArg) :: s.clearGlobalLog @@ -689,8 +747,9 @@ object BuiltinCommands { Nil def loadProject: Command = - Command(LoadProject, LoadProjectBrief, LoadProjectDetailed)(loadProjectParser)((s, arg) => - loadProjectCommands(arg) ::: s) + Command(LoadProject, LoadProjectBrief, LoadProjectDetailed)(loadProjectParser)( + (s, arg) => loadProjectCommands(arg) ::: s + ) private[this] def loadProjectParser: State => Parser[String] = _ => matched(Project.loadActionParser) @@ -712,11 +771,13 @@ object BuiltinCommands { Option(buildProperties.getProperty("sbt.version")) } else None - sbtVersionOpt.foreach(version => - if (version != app.id.version()) { - state.log.warn(s"""sbt version mismatch, current: ${app.id - .version()}, in build.properties: "$version", use 'reboot' to use the new value.""") - }) + sbtVersionOpt.foreach( + version => + if (version != app.id.version()) { + state.log.warn(s"""sbt version mismatch, current: ${app.id + .version()}, in build.properties: "$version", use 'reboot' to use the new value.""") + } + ) } def doLoadProject(s0: State, action: LoadAction.Value): State = { @@ -763,8 +824,10 @@ object BuiltinCommands { exchange publishEventMessage ConsolePromptEvent(s0) val exec: Exec = exchange.blockUntilNextExec val newState = s1 - .copy(onFailure = Some(Exec(Shell, None)), - remainingCommands = exec +: Exec(Shell, None) +: s1.remainingCommands) + .copy( + onFailure = Some(Exec(Shell, None)), + remainingCommands = exec +: Exec(Shell, None) +: s1.remainingCommands + ) .setInteractive(true) exchange publishEventMessage ConsoleUnpromptEvent(exec.source) if (exec.commandLine.trim.isEmpty) newState @@ -815,7 +878,7 @@ object BuiltinCommands { if (!java.lang.Boolean.getBoolean("sbt.skip.version.write") && !intendsToInvokeNew(state)) writeSbtVersionUnconditionally(state) - private def WriteSbtVersion = "write-sbt-version" + private def WriteSbtVersion = "writeSbtVersion" private def writeSbtVersion: Command = Command.command(WriteSbtVersion) { state => @@ -831,7 +894,7 @@ object BuiltinCommands { state.log info "Executing in batch mode. For better performance use sbt's shell" } - private def NotifyUsersAboutShell = "notify-users-about-shell" + private def NotifyUsersAboutShell = "notifyUsersAboutShell" private def notifyUsersAboutShell: Command = Command.command(NotifyUsersAboutShell) { state => diff --git a/main/src/main/scala/sbt/MainLoop.scala b/main/src/main/scala/sbt/MainLoop.scala index 2ff80e17e..3b007ced4 100644 --- a/main/src/main/scala/sbt/MainLoop.scala +++ b/main/src/main/scala/sbt/MainLoop.scala @@ -7,14 +7,16 @@ package sbt +import java.io.PrintWriter import java.util.Properties + +import jline.TerminalFactory + import scala.annotation.tailrec import scala.util.control.NonFatal -import jline.TerminalFactory import sbt.io.{ IO, Using } import sbt.internal.util.{ ErrorHandling, GlobalLogBacking } -import sbt.internal.util.complete.DefaultParsers import sbt.internal.langserver.ErrorCodes import sbt.util.Logger import sbt.protocol._ @@ -26,15 +28,14 @@ object MainLoop { // We've disabled jline shutdown hooks to prevent classloader leaks, and have been careful to always restore // the jline terminal in finally blocks, but hitting ctrl+c prevents finally blocks from being executed, in that // case the only way to restore the terminal is in a shutdown hook. - val shutdownHook = new Thread(new Runnable { - def run(): Unit = TerminalFactory.get().restore() - }) + val shutdownHook = new Thread(() => TerminalFactory.get().restore()) try { Runtime.getRuntime.addShutdownHook(shutdownHook) runLoggedLoop(state, state.globalLogging.backing) } finally { Runtime.getRuntime.removeShutdownHook(shutdownHook) + () } } @@ -66,7 +67,8 @@ object MainLoop { throw new xsbti.FullReload(e.arguments.toArray, false) case NonFatal(e) => System.err.println( - "sbt appears to be exiting abnormally.\n The log file for this session is at " + logBacking.file) + "sbt appears to be exiting abnormally.\n The log file for this session is at " + logBacking.file + ) deleteLastLog(logBacking) throw e } @@ -100,7 +102,7 @@ object MainLoop { /** Runs the next sequence of commands with global logging in place. */ def runWithNewLog(state: State, logBacking: GlobalLogBacking): RunNext = Using.fileWriter(append = true)(logBacking.file) { writer => - val out = new java.io.PrintWriter(writer) + val out = new PrintWriter(writer) val full = state.globalLogging.full val newLogging = state.globalLogging.newAppender(full, out, logBacking) // transferLevels(state, newLogging) @@ -124,7 +126,7 @@ object MainLoop { final class KeepGlobalLog(val state: State) extends RunNext final class Return(val result: xsbti.MainResult) extends RunNext - /** Runs the next sequence of commands that doesn't require global logging changes.*/ + /** Runs the next sequence of commands that doesn't require global logging changes. */ @tailrec def run(state: State): RunNext = state.next match { case State.Continue => run(next(state)) @@ -143,19 +145,10 @@ object MainLoop { /** This is the main function State transfer function of the sbt command processing. */ def processCommand(exec: Exec, state: State): State = { - import DefaultParsers._ val channelName = exec.source map (_.channelName) - StandardMain.exchange publishEventMessage ExecStatusEvent("Processing", - channelName, - exec.execId, - Vector()) - val parser = Command combine state.definedCommands - val newState = parse(exec.commandLine, parser(state)) match { - case Right(s) => s() // apply command. command side effects happen here - case Left(errMsg) => - state.log error errMsg - state.fail - } + StandardMain.exchange publishEventMessage + ExecStatusEvent("Processing", channelName, exec.execId, Vector()) + val newState = Command.process(exec.commandLine, state) val doneEvent = ExecStatusEvent( "Done", channelName, diff --git a/main/src/main/scala/sbt/Opts.scala b/main/src/main/scala/sbt/Opts.scala index 2d6ac2c6c..e788b027e 100644 --- a/main/src/main/scala/sbt/Opts.scala +++ b/main/src/main/scala/sbt/Opts.scala @@ -45,9 +45,11 @@ object Opts { val sonatypeSnapshots = Resolver.sonatypeRepo("snapshots") val sonatypeStaging = MavenRepository( "sonatype-staging", - "https://oss.sonatype.org/service/local/staging/deploy/maven2") + "https://oss.sonatype.org/service/local/staging/deploy/maven2" + ) val mavenLocalFile = Resolver.file("Local Repository", userHome / ".m2" / "repository" asFile)( - Resolver.defaultPatterns) + Resolver.defaultPatterns + ) val sbtSnapshots = Resolver.bintrayRepo("sbt", "maven-snapshots") val sbtIvySnapshots = Resolver.bintrayIvyRepo("sbt", "ivy-snapshots") } diff --git a/main/src/main/scala/sbt/PluginCross.scala b/main/src/main/scala/sbt/PluginCross.scala index 91e823bbc..ea0d3fda1 100644 --- a/main/src/main/scala/sbt/PluginCross.scala +++ b/main/src/main/scala/sbt/PluginCross.scala @@ -16,7 +16,7 @@ import sbt.internal.Load import sbt.internal.CommandStrings._ import Cross.{ spacedFirst, requireSession } import sbt.librarymanagement.VersionNumber -import Project.{ inScope } +import Project.inScope /** * Module responsible for plugin cross building. @@ -24,9 +24,9 @@ import Project.{ inScope } private[sbt] object PluginCross { lazy val pluginSwitch: Command = { def switchParser(state: State): Parser[(String, String)] = { - val knownVersions = Nil - lazy val switchArgs = token(NotSpace.examples(knownVersions: _*)) ~ (token( - Space ~> matched(state.combinedParser)) ?? "") + lazy val switchArgs = token(NotSpace.examples()) ~ (token( + Space ~> matched(state.combinedParser) + ) ?? "") lazy val nextSpaced = spacedFirst(PluginSwitchCommand) token(PluginSwitchCommand ~ OptSpace) flatMap { _ => switchArgs & nextSpaced @@ -47,7 +47,7 @@ private[sbt] object PluginCross { val add = List(sbtVersion in GlobalScope in pluginCrossBuild :== version) ++ List(scalaVersion := scalaVersionSetting.value) ++ inScope(GlobalScope.copy(project = Select(currentRef)))( - Seq(scalaVersion := scalaVersionSetting.value) + scalaVersion := scalaVersionSetting.value ) val cleared = session.mergeSettings.filterNot(crossExclude) val newStructure = Load.reapply(cleared ++ add, structure) @@ -59,8 +59,11 @@ private[sbt] object PluginCross { def crossParser(state: State): Parser[String] = token(PluginCrossCommand <~ OptSpace) flatMap { _ => token( - matched(state.combinedParser & - spacedFirst(PluginCrossCommand))) + matched( + state.combinedParser & + spacedFirst(PluginCrossCommand) + ) + ) } def crossVersions(state: State): List[String] = { val x = Project.extract(state) diff --git a/main/src/main/scala/sbt/Plugins.scala b/main/src/main/scala/sbt/Plugins.scala index 0b65c6aea..7df9f0561 100644 --- a/main/src/main/scala/sbt/Plugins.scala +++ b/main/src/main/scala/sbt/Plugins.scala @@ -111,7 +111,7 @@ abstract class AutoPlugin extends Plugins.Basic with PluginsFunctions { def extraProjects: Seq[Project] = Nil /** The [[Project]]s to add to the current build based on an existing project. */ - def derivedProjects(proj: ProjectDefinition[_]): Seq[Project] = Nil + def derivedProjects(@deprecated("unused", "") proj: ProjectDefinition[_]): Seq[Project] = Nil private[sbt] def unary_! : Exclude = Exclude(this) @@ -202,10 +202,12 @@ object Plugins extends PluginsFunctions { _.head subsetOf knowledge0 }) log.debug( - s"deducing auto plugins based on known facts ${knowledge0.toString} and clauses ${clauses.toString}") + s"deducing auto plugins based on known facts ${knowledge0.toString} and clauses ${clauses.toString}" + ) Logic.reduce( clauses, - (flattenConvert(requestedPlugins) ++ convertAll(alwaysEnabled)).toSet) match { + (flattenConvert(requestedPlugins) ++ convertAll(alwaysEnabled)).toSet + ) match { case Left(problem) => throw AutoPluginException(problem) case Right(results) => log.debug(s" :: deduced result: ${results}") @@ -224,20 +226,21 @@ object Plugins extends PluginsFunctions { _.label }) } - val retval = topologicalSort(selectedPlugins, log) + val retval = topologicalSort(selectedPlugins) // log.debug(s" :: sorted deduced result: ${retval.toString}") retval } } } } - private[sbt] def topologicalSort(ns: List[AutoPlugin], log: Logger): List[AutoPlugin] = { - // log.debug(s"sorting: ns: ${ns.toString}") + + private[sbt] def topologicalSort(ns: List[AutoPlugin]): List[AutoPlugin] = { @tailrec - def doSort(found0: List[AutoPlugin], - notFound0: List[AutoPlugin], - limit0: Int): List[AutoPlugin] = { - // log.debug(s" :: sorting:: found: ${found0.toString} not found ${notFound0.toString}") + def doSort( + found0: List[AutoPlugin], + notFound0: List[AutoPlugin], + limit0: Int + ): List[AutoPlugin] = { if (limit0 < 0) throw AutoPluginException(s"Failed to sort ${ns} topologically") else if (notFound0.isEmpty) found0 else { @@ -250,16 +253,16 @@ object Plugins extends PluginsFunctions { val (roots, nonRoots) = ns partition (_.isRoot) doSort(roots, nonRoots, ns.size * ns.size + 1) } + private[sbt] def translateMessage(e: LogicException) = e match { case ic: InitialContradictions => - s"Contradiction in selected plugins. These plugins were both included and excluded: ${literalsString( - ic.literals.toSeq)}" + s"Contradiction in selected plugins. These plugins were both included and excluded: ${literalsString(ic.literals.toSeq)}" case io: InitialOverlap => - s"Cannot directly enable plugins. Plugins are enabled when their required plugins are satisfied. The directly selected plugins were: ${literalsString( - io.literals.toSeq)}" + s"Cannot directly enable plugins. Plugins are enabled when their required plugins are satisfied. The directly selected plugins were: ${literalsString(io.literals.toSeq)}" case cn: CyclicNegation => s"Cycles in plugin requirements cannot involve excludes. The problematic cycle is: ${literalsString(cn.cycle)}" } + private[this] def literalsString(lits: Seq[Literal]): String = lits map { case Atom(l) => l; case Negated(Atom(l)) => l } mkString (", ") @@ -271,9 +274,12 @@ object Plugins extends PluginsFunctions { val message = s"Plugin$ns provided by multiple AutoPlugins:$nl${dupStrings.mkString(nl)}" throw AutoPluginException(message) } - private[this] def exclusionConflictError(requested: Plugins, - selected: Seq[AutoPlugin], - conflicting: Seq[AutoPlugin]): Unit = { + + private[this] def exclusionConflictError( + requested: Plugins, + selected: Seq[AutoPlugin], + conflicting: Seq[AutoPlugin] + ): Unit = { def listConflicts(ns: Seq[AutoPlugin]) = (ns map { c => val reasons = (if (flatten(requested) contains c) List("requested") @@ -360,14 +366,14 @@ ${listConflicts(conflicting)}""") // This would handle things like !!p or !(p && z) case Exclude(n) => hasInclude(n, p) case And(ns) => ns.forall(n => hasExclude(n, p)) - case b: Basic => false + case _: Basic => false case Empty => false } private[sbt] def hasInclude(n: Plugins, p: AutoPlugin): Boolean = n match { case `p` => true case Exclude(n) => hasExclude(n, p) case And(ns) => ns.forall(n => hasInclude(n, p)) - case b: Basic => false + case _: Basic => false case Empty => false } private[this] def flattenConvert(n: Plugins): Seq[Literal] = n match { @@ -425,8 +431,9 @@ ${listConflicts(conflicting)}""") val pluginClazz = ap.getClass existsAutoImportVal(pluginClazz) .orElse( - catching(classOf[ClassNotFoundException]).opt( - Class.forName(s"${pluginClazz.getName}$autoImport$$", false, loader))) + catching(classOf[ClassNotFoundException]) + .opt(Class.forName(s"${pluginClazz.getName}$autoImport$$", false, loader)) + ) .isDefined } diff --git a/main/src/main/scala/sbt/Project.scala b/main/src/main/scala/sbt/Project.scala index f292ca0e1..e78a47f14 100755 --- a/main/src/main/scala/sbt/Project.scala +++ b/main/src/main/scala/sbt/Project.scala @@ -27,11 +27,12 @@ import Keys.{ serverPort, serverAuthentication, serverConnectionType, + fullServerHandlers, logLevel, watch } import Scope.{ Global, ThisScope } -import Def.{ Flattened, Initialize, ScopedKey, Setting } +import Def.{ Flattened, Initialize, ScopedKey, Setting, SettingsDefinition } import sbt.internal.{ Load, BuildStructure, @@ -44,6 +45,7 @@ import sbt.internal.{ import sbt.internal.util.{ AttributeKey, AttributeMap, Dag, Relation, Settings, ~> } import sbt.internal.util.Types.{ const, idFun } import sbt.internal.util.complete.DefaultParsers +import sbt.internal.server.ServerHandler import sbt.librarymanagement.Configuration import sbt.util.{ Show, Level } import sjsonnew.JsonFormat @@ -119,7 +121,45 @@ sealed trait ProjectDefinition[PR <: ProjectReference] { if (ts.isEmpty) Nil else s"$label: $ts" :: Nil } -sealed trait Project extends ProjectDefinition[ProjectReference] { +trait CompositeProject { + def componentProjects: Seq[Project] +} + +private[sbt] object CompositeProject { + + /** + * Expand user defined projects with the component projects of `compositeProjects`. + * + * If two projects with the same id appear in the user defined projects and + * in `compositeProjects.componentProjects`, the user defined project wins. + * This is necessary for backward compatibility with the idioms: + * {{{ + * lazy val foo = crossProject + * lazy val fooJS = foo.js.settings(...) + * lazy val fooJVM = foo.jvm.settings(...) + * }}} + * and the rarer: + * {{{ + * lazy val fooJS = foo.js.settings(...) + * lazy val foo = crossProject + * lazy val fooJVM = foo.jvm.settings(...) + * }}} + */ + def expand(compositeProjects: Seq[CompositeProject]): Seq[Project] = { + val userProjects = compositeProjects.collect { case p: Project => p } + for (p <- compositeProjects.flatMap(_.componentProjects)) yield { + userProjects.find(_.id == p.id) match { + case Some(userProject) => userProject + case None => p + } + } + }.distinct + +} + +sealed trait Project extends ProjectDefinition[ProjectReference] with CompositeProject { + def componentProjects: Seq[Project] = this :: Nil + private[sbt] def copy( id: String = id, base: File = base, @@ -280,23 +320,29 @@ object Project extends ProjectExtra { showContextKey(state, None) def showContextKey(state: State, keyNameColor: Option[String]): Show[ScopedKey[_]] = - if (isProjectLoaded(state)) showContextKey(session(state), structure(state), keyNameColor) + if (isProjectLoaded(state)) showContextKey2(session(state), keyNameColor) else Def.showFullKey + @deprecated("Use showContextKey2 which doesn't take the unused structure param", "1.1.1") def showContextKey( session: SessionSettings, structure: BuildStructure, keyNameColor: Option[String] = None ): Show[ScopedKey[_]] = - Def.showRelativeKey(session.current, structure.allProjects.size > 1, keyNameColor) + showContextKey2(session, keyNameColor) + + def showContextKey2( + session: SessionSettings, + keyNameColor: Option[String] = None + ): Show[ScopedKey[_]] = + Def.showRelativeKey2(session.current, keyNameColor) def showLoadingKey( loaded: LoadedBuild, keyNameColor: Option[String] = None ): Show[ScopedKey[_]] = - Def.showRelativeKey( + Def.showRelativeKey2( ProjectRef(loaded.root, loaded.units(loaded.root).rootProjects.head), - loaded.allProjectRefs.size > 1, keyNameColor ) @@ -407,7 +453,7 @@ object Project extends ProjectExtra { def extract(state: State): Extracted = extract(session(state), structure(state)) private[sbt] def extract(se: SessionSettings, st: BuildStructure): Extracted = - Extracted(st, se, se.current)(showContextKey(se, st)) + Extracted(st, se, se.current)(showContextKey2(se)) def getProjectForReference(ref: Reference, structure: BuildStructure): Option[ResolvedProject] = ref match { case pr: ProjectRef => getProject(pr, structure); case _ => None } @@ -436,7 +482,8 @@ object Project extends ProjectExtra { val newState = unloaded.copy(attributes = newAttrs) // TODO: Fix this onLoad( - updateCurrent(newState) /*LogManager.setGlobalLogLevels(updateCurrent(newState), structure.data)*/ ) + updateCurrent(newState) /*LogManager.setGlobalLogLevels(updateCurrent(newState), structure.data)*/ + ) } def orIdentity[T](opt: Option[T => T]): T => T = opt getOrElse idFun @@ -469,9 +516,12 @@ object Project extends ProjectExtra { val authentication: Option[Set[ServerAuthentication]] = get(serverAuthentication) val connectionType: Option[ConnectionType] = get(serverConnectionType) val srvLogLevel: Option[Level.Value] = (logLevel in (ref, serverLog)).get(structure.data) + val hs: Option[Seq[ServerHandler]] = get(fullServerHandlers) val commandDefs = allCommands.distinct.flatten[Command].map(_ tag (projectCommand, true)) - val newDefinedCommands = commandDefs ++ BasicCommands.removeTagged(s.definedCommands, - projectCommand) + val newDefinedCommands = commandDefs ++ BasicCommands.removeTagged( + s.definedCommands, + projectCommand + ) val newAttrs = s.attributes .setCond(Watched.Configuration, watched) @@ -485,6 +535,7 @@ object Project extends ProjectExtra { .put(templateResolverInfos.key, trs) .setCond(shellPrompt.key, prompt) .setCond(serverLogLevel, srvLogLevel) + .setCond(fullServerHandlers.key, hs) s.copy( attributes = newAttrs, definedCommands = newDefinedCommands @@ -507,7 +558,8 @@ object Project extends ProjectExtra { } } private[this] def overlappingTargets( - targets: Seq[(ProjectRef, File)]): Map[File, Seq[ProjectRef]] = + targets: Seq[(ProjectRef, File)] + ): Map[File, Seq[ProjectRef]] = targets.groupBy(_._2).filter(_._2.size > 1).mapValues(_.map(_._1)) private[this] def allTargets(data: Settings[Scope]): Seq[(ProjectRef, File)] = { @@ -540,15 +592,18 @@ object Project extends ProjectExtra { def delegates(structure: BuildStructure, scope: Scope, key: AttributeKey[_]): Seq[ScopedKey[_]] = structure.delegates(scope).map(d => ScopedKey(d, key)) - def scopedKeyData(structure: BuildStructure, - scope: Scope, - key: AttributeKey[_]): Option[ScopedKeyData[_]] = + def scopedKeyData( + structure: BuildStructure, + scope: Scope, + key: AttributeKey[_] + ): Option[ScopedKeyData[_]] = structure.data.get(scope, key) map { v => ScopedKeyData(ScopedKey(scope, key), v) } def details(structure: BuildStructure, actual: Boolean, scope: Scope, key: AttributeKey[_])( - implicit display: Show[ScopedKey[_]]): String = { + implicit display: Show[ScopedKey[_]] + ): String = { val scoped = ScopedKey(scope, key) val data = scopedKeyData(structure, scope, key) map { _.description } getOrElse { @@ -589,20 +644,24 @@ object Project extends ProjectExtra { val reverse = reverseDependencies(cMap, scoped) val derivedReverse = reverse.filter(r => derivedDependencies(r).contains(definingScoped)).toSet - def printDepScopes(baseLabel: String, - derivedLabel: String, - scopes: Iterable[ScopedKey[_]], - derived: Set[ScopedKey[_]]): String = { + def printDepScopes( + baseLabel: String, + derivedLabel: String, + scopes: Iterable[ScopedKey[_]], + derived: Set[ScopedKey[_]] + ): String = { val label = s"$baseLabel${if (derived.isEmpty) "" else s" (D=$derivedLabel)"}" val prefix: ScopedKey[_] => String = if (derived.isEmpty) const("") else sk => if (derived(sk)) "D " else " " printScopes(label, scopes, prefix = prefix) } - def printScopes(label: String, - scopes: Iterable[ScopedKey[_]], - max: Int = Int.MaxValue, - prefix: ScopedKey[_] => String = const("")) = + def printScopes( + label: String, + scopes: Iterable[ScopedKey[_]], + max: Int = Int.MaxValue, + prefix: ScopedKey[_] => String = const("") + ) = if (scopes.isEmpty) "" else { val (limited, more) = @@ -620,23 +679,27 @@ object Project extends ProjectExtra { printScopes("Related", related, 10) } def settingGraph(structure: BuildStructure, basedir: File, scoped: ScopedKey[_])( - implicit display: Show[ScopedKey[_]]): SettingGraph = + implicit display: Show[ScopedKey[_]] + ): SettingGraph = SettingGraph(structure, basedir, scoped, 0) def graphSettings(structure: BuildStructure, basedir: File)( - implicit display: Show[ScopedKey[_]]): Unit = { + implicit display: Show[ScopedKey[_]] + ): Unit = { def graph(actual: Boolean, name: String) = graphSettings(structure, actual, name, new File(basedir, name + ".dot")) graph(true, "actual_dependencies") graph(false, "declared_dependencies") } def graphSettings(structure: BuildStructure, actual: Boolean, graphName: String, file: File)( - implicit display: Show[ScopedKey[_]]): Unit = { + implicit display: Show[ScopedKey[_]] + ): Unit = { val rel = relation(structure, actual) val keyToString = display.show _ DotGraph.generateGraph(file, graphName, rel, keyToString, keyToString) } def relation(structure: BuildStructure, actual: Boolean)( - implicit display: Show[ScopedKey[_]]): Relation[ScopedKey[_], ScopedKey[_]] = + implicit display: Show[ScopedKey[_]] + ): Relation[ScopedKey[_], ScopedKey[_]] = relation(structure.settings, actual)(structure.delegates, structure.scopeLocal, display) private[sbt] def relation(settings: Seq[Def.Setting[_]], actual: Boolean)( @@ -650,7 +713,8 @@ object Project extends ProjectExtra { } def showDefinitions(key: AttributeKey[_], defs: Seq[Scope])( - implicit display: Show[ScopedKey[_]]): String = + implicit display: Show[ScopedKey[_]] + ): String = showKeys(defs.map(scope => ScopedKey(scope, key))) def showUses(defs: Seq[ScopedKey[_]])(implicit display: Show[ScopedKey[_]]): String = @@ -660,17 +724,21 @@ object Project extends ProjectExtra { s.map(display.show).sorted.mkString("\n\t", "\n\t", "\n\n") def definitions(structure: BuildStructure, actual: Boolean, key: AttributeKey[_])( - implicit display: Show[ScopedKey[_]]): Seq[Scope] = + implicit display: Show[ScopedKey[_]] + ): Seq[Scope] = relation(structure, actual)(display)._1s.toSeq flatMap { sk => if (sk.key == key) sk.scope :: Nil else Nil } def usedBy(structure: BuildStructure, actual: Boolean, key: AttributeKey[_])( - implicit display: Show[ScopedKey[_]]): Seq[ScopedKey[_]] = + implicit display: Show[ScopedKey[_]] + ): Seq[ScopedKey[_]] = relation(structure, actual)(display).all.toSeq flatMap { case (a, b) => if (b.key == key) List[ScopedKey[_]](a) else Nil } - def reverseDependencies(cMap: Map[ScopedKey[_], Flattened], - scoped: ScopedKey[_]): Iterable[ScopedKey[_]] = + def reverseDependencies( + cMap: Map[ScopedKey[_], Flattened], + scoped: ScopedKey[_] + ): Iterable[ScopedKey[_]] = for ((key, compiled) <- cMap; dep <- compiled.dependencies if dep == scoped) yield key def setAll(extracted: Extracted, settings: Seq[Def.Setting[_]]): SessionSettings = @@ -678,7 +746,8 @@ object Project extends ProjectExtra { val ExtraBuilds = AttributeKey[List[URI]]( "extra-builds", - "Extra build URIs to load in addition to the ones defined by the project.") + "Extra build URIs to load in addition to the ones defined by the project." + ) def extraBuilds(s: State): List[URI] = getOrNil(s, ExtraBuilds) def getOrNil[T](s: State, key: AttributeKey[List[T]]): List[T] = s get key getOrElse Nil def setExtraBuilds(s: State, extra: List[URI]): State = s.put(ExtraBuilds, extra) @@ -751,7 +820,9 @@ object Project extends ProjectExtra { EvaluateTask(extracted.structure, taskKey, state, extracted.currentRef, config) } - implicit def projectToRef(p: Project): ProjectReference = LocalProject(p.id) + def projectToRef(p: Project): ProjectReference = LocalProject(p.id) + + implicit def projectToLocalProject(p: Project): LocalProject = LocalProject(p.id) final class RichTaskSessionVar[S](i: Def.Initialize[Task[S]]) { import SessionVar.{ persistAndSet, resolveContext, set, transform => tx } @@ -762,15 +833,20 @@ object Project extends ProjectExtra { import TupleSyntax._ (Keys.resolvedScoped, i)( (scoped, task) => - tx(task, - (state, value) => - persistAndSet(resolveContext(key, scoped.scope, state), state, value)(f))) + tx( + task, + (state, value) => + persistAndSet(resolveContext(key, scoped.scope, state), state, value)(f) + ) + ) } def keepAs(key: TaskKey[S]): Def.Initialize[Task[S]] = { import TupleSyntax._ - (i, Keys.resolvedScoped)((t, scoped) => - tx(t, (state, value) => set(resolveContext(key, scoped.scope, state), state, value))) + (i, Keys.resolvedScoped)( + (t, scoped) => + tx(t, (state, value) => set(resolveContext(key, scoped.scope, state), state, value)) + ) } } @@ -781,7 +857,8 @@ object Project extends ProjectExtra { val enclosingValName = std.KeyMacro.definingValName( c, methodName => - s"""$methodName must be directly assigned to a val, such as `val x = $methodName`. Alternatively, you can use `sbt.Project.apply`""") + s"""$methodName must be directly assigned to a val, such as `val x = $methodName`. Alternatively, you can use `sbt.Project.apply`""" + ) val name = c.Expr[String](Literal(Constant(enclosingValName))) reify { Project(name.splice, new File(name.splice)) } } @@ -790,8 +867,9 @@ object Project extends ProjectExtra { private[sbt] trait GeneratedRootProject trait ProjectExtra { - implicit def configDependencyConstructor[T](p: T)( - implicit ev: T => ProjectReference): Constructor = + implicit def configDependencyConstructor[T]( + p: T + )(implicit ev: T => ProjectReference): Constructor = new Constructor(p) implicit def classpathDependency[T]( @@ -804,7 +882,8 @@ trait ProjectExtra { new Scoped.RichInitializeTask(init) implicit def richInitializeInputTask[T]( - init: Initialize[InputTask[T]]): Scoped.RichInitializeInputTask[T] = + init: Initialize[InputTask[T]] + ): Scoped.RichInitializeInputTask[T] = new Scoped.RichInitializeInputTask(init) implicit def richInitialize[T](i: Initialize[T]): Scoped.RichInitialize[T] = @@ -813,17 +892,19 @@ trait ProjectExtra { implicit def richTaskSessionVar[T](init: Initialize[Task[T]]): Project.RichTaskSessionVar[T] = new Project.RichTaskSessionVar(init) - def inThisBuild(ss: Seq[Setting[_]]): Seq[Setting[_]] = - inScope(ThisScope.copy(project = Select(ThisBuild)))(ss) + def inThisBuild(ss: SettingsDefinition*): Seq[Setting[_]] = + inScope(ThisScope.copy(project = Select(ThisBuild)))(ss flatMap (_.settings)) - def inConfig(conf: Configuration)(ss: Seq[Setting[_]]): Seq[Setting[_]] = - inScope(ThisScope.copy(config = Select(conf)))((configuration :== conf) +: ss) + def inConfig(conf: Configuration)(ss: SettingsDefinition*): Seq[Setting[_]] = + inScope(ThisScope.copy(config = Select(conf)))( + (configuration :== conf) +: (ss flatMap (_.settings)) + ) - def inTask(t: Scoped)(ss: Seq[Setting[_]]): Seq[Setting[_]] = - inScope(ThisScope.copy(task = Select(t.key)))(ss) + def inTask(t: Scoped)(ss: SettingsDefinition*): Seq[Setting[_]] = + inScope(ThisScope.copy(task = Select(t.key)))(ss flatMap (_.settings)) - def inScope(scope: Scope)(ss: Seq[Setting[_]]): Seq[Setting[_]] = - Project.transform(Scope.replaceThis(scope), ss) + def inScope(scope: Scope)(ss: SettingsDefinition*): Seq[Setting[_]] = + Project.transform(Scope.replaceThis(scope), ss flatMap (_.settings)) private[sbt] def inThisBuild[T](i: Initialize[T]): Initialize[T] = inScope(ThisScope.copy(project = Select(ThisBuild)), i) diff --git a/main/src/main/scala/sbt/Resolvers.scala b/main/src/main/scala/sbt/Resolvers.scala index c574c55aa..36afc0217 100644 --- a/main/src/main/scala/sbt/Resolvers.scala +++ b/main/src/main/scala/sbt/Resolvers.scala @@ -40,7 +40,7 @@ object Resolvers { val to = uniqueSubdirectoryFor(info.uri, in = info.staging) Some { () => - creates(to) { IO.unzipURL(url, to) } + creates(to) { IO.unzipURL(url, to); () } } } diff --git a/main/src/main/scala/sbt/RichURI.scala b/main/src/main/scala/sbt/RichURI.scala index 78a06372d..2e54ca65d 100644 --- a/main/src/main/scala/sbt/RichURI.scala +++ b/main/src/main/scala/sbt/RichURI.scala @@ -17,13 +17,15 @@ class RichURI(uri: URI) { * Note that this method simply passes the individual components of this URI to the URI constructor * that accepts each component individually. It is thus limited by the implementation restrictions of the relevant methods. */ - def copy(scheme: String = uri.getScheme, - userInfo: String = uri.getUserInfo, - host: String = uri.getHost, - port: Int = uri.getPort, - path: String = uri.getPath, - query: String = uri.getQuery, - fragment: String = uri.getFragment) = + def copy( + scheme: String = uri.getScheme, + userInfo: String = uri.getUserInfo, + host: String = uri.getHost, + port: Int = uri.getPort, + path: String = uri.getPath, + query: String = uri.getQuery, + fragment: String = uri.getFragment + ) = new URI(scheme, userInfo, host, port, path, query, fragment) /** Returns `true` if the fragment of the URI is defined. */ diff --git a/main/src/main/scala/sbt/ScopeFilter.scala b/main/src/main/scala/sbt/ScopeFilter.scala index a303ca09a..fea70d23e 100644 --- a/main/src/main/scala/sbt/ScopeFilter.scala +++ b/main/src/main/scala/sbt/ScopeFilter.scala @@ -10,7 +10,7 @@ package sbt import sbt.internal.{ Load, LoadedBuildUnit } import sbt.internal.util.{ AttributeKey, Dag, Types } -import sbt.librarymanagement.Configuration +import sbt.librarymanagement.{ Configuration, ConfigRef } import Types.const import Def.Initialize @@ -30,9 +30,11 @@ object ScopeFilter { * If a task filter is not supplied, global is selected. * Generally, always specify the project axis. */ - def apply(projects: ProjectFilter = inProjects(ThisProject), - configurations: ConfigurationFilter = zeroAxis, - tasks: TaskFilter = zeroAxis): ScopeFilter = + def apply( + projects: ProjectFilter = inProjects(ThisProject), + configurations: ConfigurationFilter = zeroAxis, + tasks: TaskFilter = zeroAxis + ): ScopeFilter = new ScopeFilter { private[sbt] def apply(data: Data): Scope => Boolean = { val pf = projects(data) @@ -104,7 +106,7 @@ object ScopeFilter { /** Selects all scopes that apply to a single project. Zero and build-level scopes are excluded. */ def inAnyProject: ProjectFilter = - selectAxis(const { case p: ProjectRef => true; case _ => false }) + selectAxis(const { case _: ProjectRef => true; case _ => false }) /** Accepts all values for the task axis except Zero. */ def inAnyTask: TaskFilter = selectAny[AttributeKey[_]] @@ -116,27 +118,35 @@ object ScopeFilter { * Selects Scopes that have a project axis that is aggregated by `ref`, transitively if `transitive` is true. * If `includeRoot` is true, Scopes with `ref` itself as the project axis value are also selected. */ - def inAggregates(ref: ProjectReference, - transitive: Boolean = true, - includeRoot: Boolean = true): ProjectFilter = - byDeps(ref, - transitive = transitive, - includeRoot = includeRoot, - aggregate = true, - classpath = false) + def inAggregates( + ref: ProjectReference, + transitive: Boolean = true, + includeRoot: Boolean = true + ): ProjectFilter = + byDeps( + ref, + transitive = transitive, + includeRoot = includeRoot, + aggregate = true, + classpath = false + ) /** * Selects Scopes that have a project axis that is a dependency of `ref`, transitively if `transitive` is true. * If `includeRoot` is true, Scopes with `ref` itself as the project axis value are also selected. */ - def inDependencies(ref: ProjectReference, - transitive: Boolean = true, - includeRoot: Boolean = true): ProjectFilter = - byDeps(ref, - transitive = transitive, - includeRoot = includeRoot, - aggregate = false, - classpath = true) + def inDependencies( + ref: ProjectReference, + transitive: Boolean = true, + includeRoot: Boolean = true + ): ProjectFilter = + byDeps( + ref, + transitive = transitive, + includeRoot = includeRoot, + aggregate = false, + classpath = true + ) /** Selects Scopes that have a project axis with one of the provided values.*/ def inProjects(projects: ProjectReference*): ProjectFilter = @@ -154,6 +164,16 @@ object ScopeFilter { selectAxis[ConfigKey](const(c => cs(c.name))) } + def inConfigurationsByKeys(keys: ConfigKey*): ConfigurationFilter = { + val cs = keys.toSet + selectAxis[ConfigKey](const(cs)) + } + + def inConfigurationsByRefs(refs: ConfigRef*): ConfigurationFilter = { + val cs = refs.map(r => ConfigKey(r.name)).toSet + selectAxis[ConfigKey](const(cs)) + } + implicit def settingKeyAll[T](key: Initialize[T]): SettingKeyAll[T] = new SettingKeyAll[T](key) implicit def taskKeyAll[T](key: Initialize[Task[T]]): TaskKeyAll[T] = new TaskKeyAll[T](key) } @@ -162,9 +182,11 @@ object ScopeFilter { * Information provided to Scope filters. These provide project relationships, * project reference resolution, and the list of all static Scopes. */ - private final class Data(val units: Map[URI, LoadedBuildUnit], - val resolve: ProjectReference => ProjectRef, - val allScopes: Set[Scope]) + private final class Data( + val units: Map[URI, LoadedBuildUnit], + val resolve: ProjectReference => ProjectRef, + val allScopes: Set[Scope] + ) /** Constructs a Data instance from the list of static scopes and the project relationships.*/ private[this] val getData: Initialize[Data] = @@ -185,20 +207,24 @@ object ScopeFilter { new Data(build.units, resolve, scopes) } - private[this] def getDependencies(structure: Map[URI, LoadedBuildUnit], - classpath: Boolean, - aggregate: Boolean): ProjectRef => Seq[ProjectRef] = + private[this] def getDependencies( + structure: Map[URI, LoadedBuildUnit], + classpath: Boolean, + aggregate: Boolean + ): ProjectRef => Seq[ProjectRef] = ref => Project.getProject(ref, structure).toList flatMap { p => (if (classpath) p.dependencies.map(_.project) else Nil) ++ (if (aggregate) p.aggregate else Nil) } - private[this] def byDeps(ref: ProjectReference, - transitive: Boolean, - includeRoot: Boolean, - aggregate: Boolean, - classpath: Boolean): ProjectFilter = + private[this] def byDeps( + ref: ProjectReference, + transitive: Boolean, + includeRoot: Boolean, + aggregate: Boolean, + classpath: Boolean + ): ProjectFilter = inResolvedProjects { data => val resolvedRef = data.resolve(ref) val direct = getDependencies(data.units, classpath = classpath, aggregate = aggregate) diff --git a/main/src/main/scala/sbt/ScopedKeyData.scala b/main/src/main/scala/sbt/ScopedKeyData.scala index 3574f455d..abfabb2f4 100644 --- a/main/src/main/scala/sbt/ScopedKeyData.scala +++ b/main/src/main/scala/sbt/ScopedKeyData.scala @@ -16,9 +16,11 @@ final case class ScopedKeyData[A](scoped: ScopedKey[A], value: Any) { def typeName: String = fold(fmtMf("Task[%s]"), fmtMf("InputTask[%s]"), key.manifest.toString) def settingValue: Option[Any] = fold(const(None), const(None), Some(value)) def description: String = - fold(fmtMf("Task: %s"), - fmtMf("Input task: %s"), - "Setting: %s = %s" format (key.manifest.toString, value.toString)) + fold( + fmtMf("Task: %s"), + fmtMf("Input task: %s"), + "Setting: %s = %s" format (key.manifest.toString, value.toString) + ) def fold[T](targ: OptManifest[_] => T, itarg: OptManifest[_] => T, s: => T): T = key.manifest.runtimeClass match { case TaskClass => targ(key.manifest.typeArguments.head) diff --git a/main/src/main/scala/sbt/ScriptedPlugin.scala b/main/src/main/scala/sbt/ScriptedPlugin.scala new file mode 100644 index 000000000..f1dfd3434 --- /dev/null +++ b/main/src/main/scala/sbt/ScriptedPlugin.scala @@ -0,0 +1,192 @@ +/* + * sbt + * Copyright 2011 - 2017, Lightbend, Inc. + * Copyright 2008 - 2010, Mark Harrah + * Licensed under BSD-3-Clause license (see LICENSE) + */ + +package sbt + +import java.io.File +import java.lang.reflect.Method + +import sbt.io._ +import sbt.io.syntax._ + +import sbt.internal.util.complete.{ Parser, DefaultParsers } + +import sbt.librarymanagement._ +import sbt.librarymanagement.syntax._ + +import sbt.internal.inc.classpath.ClasspathUtilities +import sbt.internal.inc.ModuleUtilities + +import Def._ +import Keys._ +import Project._ + +object ScriptedPlugin extends AutoPlugin { + object autoImport { + val ScriptedConf = Configurations.config("scripted-sbt") hide + val ScriptedLaunchConf = Configurations.config("scripted-sbt-launch") hide + + val scriptedSbt = settingKey[String]("") + val sbtLauncher = taskKey[File]("") + val sbtTestDirectory = settingKey[File]("") + val scriptedBufferLog = settingKey[Boolean]("") + val scriptedClasspath = taskKey[PathFinder]("") + val scriptedTests = taskKey[AnyRef]("") + val scriptedBatchExecution = + settingKey[Boolean]("Enables or disables batch execution for scripted.") + val scriptedParallelInstances = settingKey[Int]( + "Configures the number of scripted instances for parallel testing, only used in batch mode." + ) + val scriptedRun = taskKey[Method]("") + val scriptedLaunchOpts = + settingKey[Seq[String]]("options to pass to jvm launching scripted tasks") + val scriptedDependencies = taskKey[Unit]("") + val scripted = inputKey[Unit]("") + } + import autoImport._ + + override lazy val globalSettings = Seq( + scriptedBufferLog := true, + scriptedLaunchOpts := Seq(), + ) + + override lazy val projectSettings = Seq( + ivyConfigurations ++= Seq(ScriptedConf, ScriptedLaunchConf), + scriptedSbt := (sbtVersion in pluginCrossBuild).value, + sbtLauncher := getJars(ScriptedLaunchConf).map(_.get.head).value, + sbtTestDirectory := sourceDirectory.value / "sbt-test", + libraryDependencies ++= (CrossVersion.partialVersion(scriptedSbt.value) match { + case Some((0, 13)) => + Seq( + "org.scala-sbt" % "scripted-sbt" % scriptedSbt.value % ScriptedConf, + "org.scala-sbt" % "sbt-launch" % scriptedSbt.value % ScriptedLaunchConf + ) + case Some((1, _)) => + Seq( + "org.scala-sbt" %% "scripted-sbt" % scriptedSbt.value % ScriptedConf, + "org.scala-sbt" % "sbt-launch" % scriptedSbt.value % ScriptedLaunchConf + ) + case Some((x, y)) => sys error s"Unknown sbt version ${scriptedSbt.value} ($x.$y)" + case None => sys error s"Unknown sbt version ${scriptedSbt.value}" + }), + scriptedClasspath := getJars(ScriptedConf).value, + scriptedTests := scriptedTestsTask.value, + scriptedParallelInstances := 1, + scriptedBatchExecution := false, + scriptedRun := scriptedRunTask.value, + scriptedDependencies := { + def use[A](@deprecated("unused", "") x: A*): Unit = () // avoid unused warnings + val analysis = (Keys.compile in Test).value + val pub = (publishLocal).value + use(analysis, pub) + }, + scripted := scriptedTask.evaluated + ) + + private[sbt] def scriptedTestsTask: Initialize[Task[AnyRef]] = + Def.task { + val loader = ClasspathUtilities.toLoader(scriptedClasspath.value, scalaInstance.value.loader) + try { + ModuleUtilities.getObject("sbt.scriptedtest.ScriptedTests", loader) + } catch { + case _: ClassNotFoundException => + ModuleUtilities.getObject("sbt.test.ScriptedTests", loader) + } + } + + private[sbt] def scriptedRunTask: Initialize[Task[Method]] = Def.taskDyn { + val fCls = classOf[File] + val bCls = classOf[Boolean] + val asCls = classOf[Array[String]] + val lfCls = classOf[java.util.List[File]] + val iCls = classOf[Int] + + val clazz = scriptedTests.value.getClass + val method = + if (scriptedBatchExecution.value) + clazz.getMethod("runInParallel", fCls, bCls, asCls, fCls, asCls, lfCls, iCls) + else + clazz.getMethod("run", fCls, bCls, asCls, fCls, asCls, lfCls) + + Def.task(method) + } + + private[sbt] final case class ScriptedTestPage(page: Int, total: Int) + + private[sbt] def scriptedParser(scriptedBase: File): Parser[Seq[String]] = { + import DefaultParsers._ + + val scriptedFiles: NameFilter = ("test": NameFilter) | "pending" + val pairs = (scriptedBase * AllPassFilter * AllPassFilter * scriptedFiles).get map { + (f: File) => + val p = f.getParentFile + (p.getParentFile.getName, p.getName) + } + val pairMap = pairs.groupBy(_._1).mapValues(_.map(_._2).toSet) + + val id = charClass(c => !c.isWhitespace && c != '/').+.string + val groupP = token(id.examples(pairMap.keySet)) <~ token('/') + + // A parser for page definitions + val pageP: Parser[ScriptedTestPage] = ("*" ~ NatBasic ~ "of" ~ NatBasic) map { + case _ ~ page ~ _ ~ total => ScriptedTestPage(page, total) + } + + // Grabs the filenames from a given test group in the current page definition. + def pagedFilenames(group: String, page: ScriptedTestPage): Seq[String] = { + val files = pairMap(group).toSeq.sortBy(_.toLowerCase) + val pageSize = files.size / page.total + // The last page may loose some values, so we explicitly keep them + val dropped = files.drop(pageSize * (page.page - 1)) + if (page.page == page.total) dropped + else dropped.take(pageSize) + } + + def nameP(group: String) = { + token("*".id | id.examples(pairMap.getOrElse(group, Set.empty[String]))) + } + + val PagedIds: Parser[Seq[String]] = + for { + group <- groupP + page <- pageP + files = pagedFilenames(group, page) + // TODO - Fail the parser if we don't have enough files for the given page size + //if !files.isEmpty + } yield files map (f => s"$group/$f") + + val testID = (for (group <- groupP; name <- nameP(group)) yield (group, name)) + val testIdAsGroup = matched(testID) map (test => Seq(test)) + + //(token(Space) ~> matched(testID)).* + (token(Space) ~> (PagedIds | testIdAsGroup)).* map (_.flatten) + } + + private[sbt] def scriptedTask: Initialize[InputTask[Unit]] = Def.inputTask { + val args = scriptedParser(sbtTestDirectory.value).parsed + scriptedDependencies.value + try { + val method = scriptedRun.value + val scriptedInstance = scriptedTests.value + val dir = sbtTestDirectory.value + val log = Boolean box scriptedBufferLog.value + val launcher = sbtLauncher.value + val opts = scriptedLaunchOpts.value.toArray + val empty = new java.util.ArrayList[File]() + val instances = Int box scriptedParallelInstances.value + + if (scriptedBatchExecution.value) + method.invoke(scriptedInstance, dir, log, args.toArray, launcher, opts, empty, instances) + else method.invoke(scriptedInstance, dir, log, args.toArray, launcher, opts, empty) + () + } catch { case e: java.lang.reflect.InvocationTargetException => throw e.getCause } + } + + private[this] def getJars(config: Configuration): Initialize[Task[PathFinder]] = Def.task { + PathFinder(Classpaths.managedJars(config, classpathTypes.value, Keys.update.value).map(_.data)) + } +} diff --git a/main/src/main/scala/sbt/SessionVar.scala b/main/src/main/scala/sbt/SessionVar.scala index adbda41a5..da5e41213 100644 --- a/main/src/main/scala/sbt/SessionVar.scala +++ b/main/src/main/scala/sbt/SessionVar.scala @@ -28,7 +28,8 @@ object SessionVar { def emptyMap = Map(IMap.empty) def persistAndSet[T](key: ScopedKey[Task[T]], state: State, value: T)( - implicit f: JsonFormat[T]): State = { + implicit f: JsonFormat[T] + ): State = { persist(key, state, value)(f) set(key, state, value) } @@ -63,14 +64,15 @@ object SessionVar { def read[T](key: ScopedKey[Task[T]], state: State)(implicit f: JsonFormat[T]): Option[T] = Project.structure(state).streams(state).use(key) { s => - try { Some(s.getInput(key, DefaultDataID).read[T]) } catch { case NonFatal(e) => None } + try { Some(s.getInput(key, DefaultDataID).read[T]) } catch { case NonFatal(_) => None } } def load[T](key: ScopedKey[Task[T]], state: State)(implicit f: JsonFormat[T]): Option[T] = get(key, state) orElse read(key, state)(f) def loadAndSet[T](key: ScopedKey[Task[T]], state: State, setIfUnset: Boolean = true)( - implicit f: JsonFormat[T]): (State, Option[T]) = + implicit f: JsonFormat[T] + ): (State, Option[T]) = get(key, state) match { case s: Some[T] => (state, s) case None => diff --git a/main/src/main/scala/sbt/TemplateCommand.scala b/main/src/main/scala/sbt/TemplateCommand.scala index b30755c0c..a7759ed91 100644 --- a/main/src/main/scala/sbt/TemplateCommand.scala +++ b/main/src/main/scala/sbt/TemplateCommand.scala @@ -21,9 +21,11 @@ import BasicCommandStrings._, BasicKeys._ private[sbt] object TemplateCommandUtil { def templateCommand: Command = - Command(TemplateCommand, templateBrief, templateDetailed)(templateCommandParser)(runTemplate) + Command(TemplateCommand, templateBrief, templateDetailed)(_ => templateCommandParser)( + runTemplate + ) - private def templateCommandParser(state: State): Parser[Seq[String]] = + private def templateCommandParser: Parser[Seq[String]] = (token(Space) ~> repsep(StringBasic, token(Space))) | (token(EOF) map (_ => Nil)) private def runTemplate(s0: State, inputArg: Seq[String]): State = { @@ -34,10 +36,12 @@ private[sbt] object TemplateCommandUtil { val templateStage = stagingDirectory / "new" // This moves the target directory to a staging directory // https://github.com/sbt/sbt/issues/2835 - val state = extracted0.appendWithSession(Seq( - Keys.target := templateStage - ), - s0) + val state = extracted0.appendWithSession( + Seq( + Keys.target := templateStage + ), + s0 + ) val infos = (state get templateResolverInfos getOrElse Nil).toList val log = state.globalLogging.full val extracted = (Project extract state) @@ -73,19 +77,25 @@ private[sbt] object TemplateCommandUtil { case None => System.err.println("Template not found for: " + arguments.mkString(" ")) } - private def tryTemplate(info: TemplateResolverInfo, - arguments: List[String], - loader: ClassLoader): Boolean = { + private def tryTemplate( + info: TemplateResolverInfo, + arguments: List[String], + loader: ClassLoader + ): Boolean = { val resultObj = call(info.implementationClass, "isDefined", loader)( classOf[Array[String]] )(arguments.toArray) resultObj.asInstanceOf[Boolean] } - private def runTemplate(info: TemplateResolverInfo, - arguments: List[String], - loader: ClassLoader): Unit = + private def runTemplate( + info: TemplateResolverInfo, + arguments: List[String], + loader: ClassLoader + ): Unit = { call(info.implementationClass, "run", loader)(classOf[Array[String]])(arguments.toArray) + () + } private def infoLoader( info: TemplateResolverInfo, diff --git a/main/src/main/scala/sbt/internal/Act.scala b/main/src/main/scala/sbt/internal/Act.scala index 91051f3ec..953598933 100644 --- a/main/src/main/scala/sbt/internal/Act.scala +++ b/main/src/main/scala/sbt/internal/Act.scala @@ -8,7 +8,7 @@ package sbt package internal -import Def.{ showRelativeKey, ScopedKey } +import Def.{ showRelativeKey2, ScopedKey } import Keys.sessionSettings import sbt.internal.util.complete.{ DefaultParsers, Parser } import Aggregation.{ KeyValue, Values } @@ -32,44 +32,56 @@ object Act { token(OptSpace ~> '/' <~ OptSpace).examples("/").map(_ => ()) // this does not take aggregation into account - def scopedKey(index: KeyIndex, - current: ProjectRef, - defaultConfigs: Option[ResolvedReference] => Seq[String], - keyMap: Map[String, AttributeKey[_]], - data: Settings[Scope]): Parser[ScopedKey[_]] = + def scopedKey( + index: KeyIndex, + current: ProjectRef, + defaultConfigs: Option[ResolvedReference] => Seq[String], + keyMap: Map[String, AttributeKey[_]], + data: Settings[Scope] + ): Parser[ScopedKey[_]] = scopedKeySelected(index, current, defaultConfigs, keyMap, data).map(_.key) // the index should be an aggregated index for proper tab completion - def scopedKeyAggregated(current: ProjectRef, - defaultConfigs: Option[ResolvedReference] => Seq[String], - structure: BuildStructure): KeysParser = - for (selected <- scopedKeySelected(structure.index.aggregateKeyIndex, - current, - defaultConfigs, - structure.index.keyMap, - structure.data)) + def scopedKeyAggregated( + current: ProjectRef, + defaultConfigs: Option[ResolvedReference] => Seq[String], + structure: BuildStructure + ): KeysParser = + for (selected <- scopedKeySelected( + structure.index.aggregateKeyIndex, + current, + defaultConfigs, + structure.index.keyMap, + structure.data + )) yield Aggregation.aggregate(selected.key, selected.mask, structure.extra) - def scopedKeySelected(index: KeyIndex, - current: ProjectRef, - defaultConfigs: Option[ResolvedReference] => Seq[String], - keyMap: Map[String, AttributeKey[_]], - data: Settings[Scope]): Parser[ParsedKey] = + def scopedKeySelected( + index: KeyIndex, + current: ProjectRef, + defaultConfigs: Option[ResolvedReference] => Seq[String], + keyMap: Map[String, AttributeKey[_]], + data: Settings[Scope] + ): Parser[ParsedKey] = scopedKeyFull(index, current, defaultConfigs, keyMap) flatMap { choices => - select(choices, data)(showRelativeKey(current, index.buildURIs.size > 1)) + select(choices, data)(showRelativeKey2(current)) } - def scopedKeyFull(index: KeyIndex, - current: ProjectRef, - defaultConfigs: Option[ResolvedReference] => Seq[String], - keyMap: Map[String, AttributeKey[_]]): Parser[Seq[Parser[ParsedKey]]] = { + def scopedKeyFull( + index: KeyIndex, + current: ProjectRef, + defaultConfigs: Option[ResolvedReference] => Seq[String], + keyMap: Map[String, AttributeKey[_]] + ): Parser[Seq[Parser[ParsedKey]]] = { def fullKey = for { rawProject <- optProjectRef(index, current) proj = resolveProject(rawProject, current) - confAmb <- configIdent(index configs proj, - index configIdents proj, - index.fromConfigIdent(proj)) + confAmb <- configIdent( + index configs proj, + index configIdents proj, + index.fromConfigIdent(proj) + ) partialMask = ScopeMask(rawProject.isExplicit, confAmb.isExplicit, false, false) } yield taskKeyExtra(index, defaultConfigs, keyMap, proj, confAmb, partialMask) @@ -78,12 +90,14 @@ object Act { for { g <- globalIdent } yield - taskKeyExtra(index, - defaultConfigs, - keyMap, - None, - ParsedZero, - ScopeMask(true, true, false, false)) + taskKeyExtra( + index, + defaultConfigs, + keyMap, + None, + ParsedZero, + ScopeMask(true, true, false, false) + ) globalKey | fullKey } @@ -100,7 +114,7 @@ object Act { conf <- configs(confAmb, defaultConfigs, proj, index) } yield for { - taskAmb <- taskAxis(conf, index.tasks(proj, conf), keyMap) + taskAmb <- taskAxis(index.tasks(proj, conf), keyMap) task = resolveTask(taskAmb) key <- key(index, proj, conf, task, keyMap) extra <- extraAxis(keyMap, IMap.empty) @@ -109,17 +123,21 @@ object Act { new ParsedKey(makeScopedKey(proj, conf, task, extra, key), mask) } - def makeScopedKey(proj: Option[ResolvedReference], - conf: Option[String], - task: Option[AttributeKey[_]], - extra: ScopeAxis[AttributeMap], - key: AttributeKey[_]): ScopedKey[_] = + def makeScopedKey( + proj: Option[ResolvedReference], + conf: Option[String], + task: Option[AttributeKey[_]], + extra: ScopeAxis[AttributeMap], + key: AttributeKey[_] + ): ScopedKey[_] = ScopedKey( Scope(toAxis(proj, Zero), toAxis(conf map ConfigKey.apply, Zero), toAxis(task, Zero), extra), - key) + key + ) def select(allKeys: Seq[Parser[ParsedKey]], data: Settings[Scope])( - implicit show: Show[ScopedKey[_]]): Parser[ParsedKey] = + implicit show: Show[ScopedKey[_]] + ): Parser[ParsedKey] = seq(allKeys) flatMap { ss => val default = ss.headOption match { case None => noValidKeys @@ -128,7 +146,8 @@ object Act { selectFromValid(ss filter isValid(data), default) } def selectFromValid(ss: Seq[ParsedKey], default: Parser[ParsedKey])( - implicit show: Show[ScopedKey[_]]): Parser[ParsedKey] = + implicit show: Show[ScopedKey[_]] + ): Parser[ParsedKey] = selectByTask(selectByConfig(ss)) match { case Seq() => default case Seq(single) => success(single) @@ -161,6 +180,7 @@ object Act { def examples(p: Parser[String], exs: Set[String], label: String): Parser[String] = p !!! ("Expected " + label) examples exs + def examplesStrict(p: Parser[String], exs: Set[String], label: String): Parser[String] = filterStrings(examples(p, exs, label), exs, label) @@ -168,6 +188,7 @@ object Act { p.? map { opt => toAxis(opt, ifNone) } + def toAxis[T](opt: Option[T], ifNone: ScopeAxis[T]): ScopeAxis[T] = opt match { case Some(t) => Select(t); case None => ifNone } @@ -177,9 +198,11 @@ object Act { } // New configuration parser that's able to parse configuration ident trailed by slash. - private[sbt] def configIdent(confs: Set[String], - idents: Set[String], - fromIdent: String => String): Parser[ParsedAxis[String]] = { + private[sbt] def configIdent( + confs: Set[String], + idents: Set[String], + fromIdent: String => String + ): Parser[ParsedAxis[String]] = { val oldSep: Parser[Char] = ':' val sep: Parser[Unit] = spacedSlash !!! "Expected '/'" token( @@ -193,14 +216,17 @@ object Act { ) ?? Omitted } - def configs(explicit: ParsedAxis[String], - defaultConfigs: Option[ResolvedReference] => Seq[String], - proj: Option[ResolvedReference], - index: KeyIndex): Seq[Option[String]] = + def configs( + explicit: ParsedAxis[String], + defaultConfigs: Option[ResolvedReference] => Seq[String], + proj: Option[ResolvedReference], + index: KeyIndex + ): Seq[Option[String]] = explicit match { case Omitted => None +: defaultConfigurations(proj, index, defaultConfigs).flatMap( - nonEmptyConfig(index, proj)) + nonEmptyConfig(index, proj) + ) case ParsedZero | ParsedGlobal => None :: Nil case pv: ParsedValue[x] => Some(pv.value) :: Nil } @@ -212,15 +238,19 @@ object Act { ): Seq[String] = if (index exists proj) defaultConfigs(proj) else Nil - def nonEmptyConfig(index: KeyIndex, - proj: Option[ResolvedReference]): String => Seq[Option[String]] = + def nonEmptyConfig( + index: KeyIndex, + proj: Option[ResolvedReference] + ): String => Seq[Option[String]] = config => if (index.isEmpty(proj, Some(config))) Nil else Some(config) :: Nil - def key(index: KeyIndex, - proj: Option[ResolvedReference], - conf: Option[String], - task: Option[AttributeKey[_]], - keyMap: Map[String, AttributeKey[_]]): Parser[AttributeKey[_]] = { + def key( + index: KeyIndex, + proj: Option[ResolvedReference], + conf: Option[String], + task: Option[AttributeKey[_]], + keyMap: Map[String, AttributeKey[_]] + ): Parser[AttributeKey[_]] = { def dropHyphenated(keys: Set[String]): Set[String] = keys.filterNot(Util.hasHyphen) def keyParser(keys: Set[String]): Parser[AttributeKey[_]] = token(ID !!! "Expected key" examples dropHyphenated(keys)) flatMap { keyString => @@ -231,16 +261,18 @@ object Act { // This queries the key index so tab completion will list the build-level keys. val buildKeys: Set[String] = proj match { - case Some(ProjectRef(uri, id)) => index.keys(Some(BuildRef(uri)), conf, task) - case _ => Set() + case Some(ProjectRef(uri, _)) => index.keys(Some(BuildRef(uri)), conf, task) + case _ => Set() } val keys: Set[String] = index.keys(proj, conf, task) ++ buildKeys keyParser(keys) } - def getKey[T](keyMap: Map[String, AttributeKey[_]], - keyString: String, - f: AttributeKey[_] => T): Parser[T] = + def getKey[T]( + keyMap: Map[String, AttributeKey[_]], + keyString: String, + f: AttributeKey[_] => T + ): Parser[T] = keyMap.get(keyString) match { case Some(k) => success(f(k)) case None => failure(Command.invalidValue("key", keyMap.keys)(keyString)) @@ -248,16 +280,19 @@ object Act { val spacedComma = token(OptSpace ~ ',' ~ OptSpace) - def extraAxis(knownKeys: Map[String, AttributeKey[_]], - knownValues: IMap[AttributeKey, Set]): Parser[ScopeAxis[AttributeMap]] = { + def extraAxis( + knownKeys: Map[String, AttributeKey[_]], + knownValues: IMap[AttributeKey, Set] + ): Parser[ScopeAxis[AttributeMap]] = { val extrasP = extrasParser(knownKeys, knownValues) val extras = token('(', hide = _ == 1 && knownValues.isEmpty) ~> extrasP <~ token(')') optionalAxis(extras, Zero) } - def taskAxis(d: Option[String], - tasks: Set[AttributeKey[_]], - allKnown: Map[String, AttributeKey[_]]): Parser[ParsedAxis[AttributeKey[_]]] = { + def taskAxis( + tasks: Set[AttributeKey[_]], + allKnown: Map[String, AttributeKey[_]], + ): Parser[ParsedAxis[AttributeKey[_]]] = { val taskSeq = tasks.toSeq def taskKeys(f: AttributeKey[_] => String): Seq[(String, AttributeKey[_])] = taskSeq.map(key => (f(key), key)) @@ -268,7 +303,8 @@ object Act { (token( value(keyP) | ZeroString ^^^ ParsedZero - | ZeroIdent ^^^ ParsedZero) <~ (token("::".id) | spacedSlash)) ?? Omitted + | ZeroIdent ^^^ ParsedZero + ) <~ (token("::".id) | spacedSlash)) ?? Omitted } def resolveTask(task: ParsedAxis[AttributeKey[_]]): Option[AttributeKey[_]] = @@ -280,8 +316,10 @@ object Act { def filterStrings(base: Parser[String], valid: Set[String], label: String): Parser[String] = base.filter(valid, Command.invalidValue(label, valid)) - def extrasParser(knownKeys: Map[String, AttributeKey[_]], - knownValues: IMap[AttributeKey, Set]): Parser[AttributeMap] = { + def extrasParser( + knownKeys: Map[String, AttributeKey[_]], + knownValues: IMap[AttributeKey, Set] + ): Parser[AttributeMap] = { val validKeys = knownKeys.filter { case (_, key) => knownValues get key exists (_.nonEmpty) } if (validKeys.isEmpty) failure("No valid extra keys.") @@ -289,8 +327,10 @@ object Act { rep1sep(extraParser(validKeys, knownValues), spacedComma) map AttributeMap.apply } - def extraParser(knownKeys: Map[String, AttributeKey[_]], - knownValues: IMap[AttributeKey, Set]): Parser[AttributeEntry[_]] = { + def extraParser( + knownKeys: Map[String, AttributeKey[_]], + knownValues: IMap[AttributeKey, Set] + ): Parser[AttributeEntry[_]] = { val keyp = knownIDParser(knownKeys, "Not a valid extra key") <~ token(':' ~ OptSpace) keyp flatMap { case key: AttributeKey[t] => @@ -318,12 +358,15 @@ object Act { value(resolvedReference(index, currentBuild, trailing)) } - private[sbt] def resolvedReferenceIdent(index: KeyIndex, - currentBuild: URI, - trailing: Parser[_]): Parser[ResolvedReference] = { + private[sbt] def resolvedReferenceIdent( + index: KeyIndex, + currentBuild: URI, + trailing: Parser[_] + ): Parser[ResolvedReference] = { def projectID(uri: URI) = token( - DQuoteChar ~> examplesStrict(ID, index projects uri, "project ID") <~ DQuoteChar <~ OptSpace <~ ")" <~ trailing) + DQuoteChar ~> examplesStrict(ID, index projects uri, "project ID") <~ DQuoteChar <~ OptSpace <~ ")" <~ trailing + ) def projectRef(uri: URI) = projectID(uri) map { id => ProjectRef(uri, id) } @@ -333,15 +376,18 @@ object Act { val buildRef = token( "ProjectRef(" ~> OptSpace ~> "uri(" ~> OptSpace ~> DQuoteChar ~> - resolvedURI <~ DQuoteChar <~ OptSpace <~ ")" <~ spacedComma) + resolvedURI <~ DQuoteChar <~ OptSpace <~ ")" <~ spacedComma + ) buildRef flatMap { uri => projectRef(uri) } } - def resolvedReference(index: KeyIndex, - currentBuild: URI, - trailing: Parser[_]): Parser[ResolvedReference] = { + def resolvedReference( + index: KeyIndex, + currentBuild: URI, + trailing: Parser[_] + ): Parser[ResolvedReference] = { def projectID(uri: URI) = token(examplesStrict(ID, index projects uri, "project ID") <~ trailing) def projectRef(uri: URI) = projectID(uri) map { id => @@ -360,8 +406,10 @@ object Act { def optProjectRef(index: KeyIndex, current: ProjectRef): Parser[ParsedAxis[ResolvedReference]] = projectRef(index, current.build) ?? Omitted - def resolveProject(parsed: ParsedAxis[ResolvedReference], - current: ProjectRef): Option[ResolvedReference] = + def resolveProject( + parsed: ParsedAxis[ResolvedReference], + current: ProjectRef + ): Option[ResolvedReference] = parsed match { case Omitted => Some(current) case ParsedZero => None @@ -380,7 +428,7 @@ object Act { def evaluate(kvs: Seq[ScopedKey[_]]): Parser[() => State] = { val preparedPairs = anyKeyValues(structure, kvs) val showConfig = Aggregation.defaultShow(state, showTasks = action == ShowAction) - evaluatingParser(state, structure, showConfig)(preparedPairs) map { evaluate => () => + evaluatingParser(state, showConfig)(preparedPairs) map { evaluate => () => { val keyStrings = preparedPairs.map(pp => showKey.show(pp.key)).mkString(", ") state.log.debug("Evaluating tasks: " + keyStrings) @@ -409,11 +457,13 @@ object Act { def scopedKeyParser(extracted: Extracted): Parser[ScopedKey[_]] = scopedKeyParser(extracted.structure, extracted.currentRef) def scopedKeyParser(structure: BuildStructure, currentRef: ProjectRef): Parser[ScopedKey[_]] = - scopedKey(structure.index.keyIndex, - currentRef, - structure.extra.configurationsForAxis, - structure.index.keyMap, - structure.data) + scopedKey( + structure.index.keyIndex, + currentRef, + structure.extra.configurationsForAxis, + structure.index.keyMap, + structure.data + ) type KeysParser = Parser[Seq[ScopedKey[T]] forSome { type T }] def aggregatedKeyParser(state: State): KeysParser = aggregatedKeyParser(Project extract state) @@ -432,17 +482,21 @@ object Act { KeyValue(key, value) } } - private[this] def anyKeyValues(structure: BuildStructure, - keys: Seq[ScopedKey[_]]): Seq[KeyValue[_]] = + private[this] def anyKeyValues( + structure: BuildStructure, + keys: Seq[ScopedKey[_]] + ): Seq[KeyValue[_]] = keys.flatMap { key => getValue(structure.data, key.scope, key.key) map { value => KeyValue(key, value) } } - private[this] def getValue[T](data: Settings[Scope], - scope: Scope, - key: AttributeKey[T]): Option[T] = + private[this] def getValue[T]( + data: Settings[Scope], + scope: Scope, + key: AttributeKey[T] + ): Option[T] = if (java.lang.Boolean.getBoolean("sbt.cli.nodelegation")) data.getDirect(scope, key) else data.get(scope, key) diff --git a/main/src/main/scala/sbt/internal/Aggregation.scala b/main/src/main/scala/sbt/internal/Aggregation.scala index 0d2da79d8..c6a2c9828 100644 --- a/main/src/main/scala/sbt/internal/Aggregation.scala +++ b/main/src/main/scala/sbt/internal/Aggregation.scala @@ -61,11 +61,10 @@ object Aggregation { def applyTasks[T]( s: State, - structure: BuildStructure, ps: Values[Parser[Task[T]]], show: ShowConfig )(implicit display: Show[ScopedKey[_]]): Parser[() => State] = - Command.applyEffect(seqParser(ps))(ts => runTasks(s, structure, ts, DummyTaskMap(Nil), show)) + Command.applyEffect(seqParser(ps))(ts => runTasks(s, ts, DummyTaskMap(Nil), show)) private def showRun[T](complete: Complete[T], show: ShowConfig)( implicit display: Show[ScopedKey[_]] @@ -103,11 +102,12 @@ object Aggregation { Complete(start, stop, result, newS) } - def runTasks[HL <: HList, T](s: State, - structure: BuildStructure, - ts: Values[Task[T]], - extra: DummyTaskMap, - show: ShowConfig)(implicit display: Show[ScopedKey[_]]): State = { + def runTasks[HL <: HList, T]( + s: State, + ts: Values[Task[T]], + extra: DummyTaskMap, + show: ShowConfig + )(implicit display: Show[ScopedKey[_]]): State = { val complete = timedRun[T](s, ts, extra) showRun(complete, show) complete.results match { @@ -128,33 +128,26 @@ object Aggregation { key in currentRef get structure.data getOrElse true if (get(showSuccess)) { if (get(showTiming)) { - val msg = timingString(start, stop, "", structure.data, currentRef, log) + val msg = timingString(start, stop, structure.data, currentRef) if (success) log.success(msg) else log.error(msg) } else if (success) log.success("") } } + private def timingString( startTime: Long, endTime: Long, - s: String, data: Settings[Scope], currentRef: ProjectRef, - log: Logger ): String = { val format = timingFormat in currentRef get data getOrElse defaultFormat - timing(format, startTime, endTime, "", log) + timing(format, startTime, endTime) } - def timing( - format: java.text.DateFormat, - startTime: Long, - endTime: Long, - s: String, - log: Logger - ): String = { - val ss = if (s.isEmpty) "" else s + " " + + def timing(format: java.text.DateFormat, startTime: Long, endTime: Long): String = { val nowString = format.format(new java.util.Date(endTime)) - "Total " + ss + "time: " + (endTime - startTime + 500) / 1000 + " s, completed " + nowString + "Total time: " + (endTime - startTime + 500) / 1000 + " s, completed " + nowString } def defaultFormat: DateFormat = { @@ -164,20 +157,19 @@ object Aggregation { def applyDynamicTasks[I]( s: State, - structure: BuildStructure, inputs: Values[InputTask[I]], show: ShowConfig )(implicit display: Show[ScopedKey[_]]): Parser[() => State] = { val parsers = for (KeyValue(k, it) <- inputs) yield it.parser(s).map(v => KeyValue(k, v)) Command.applyEffect(seq(parsers)) { roots => - runTasks(s, structure, roots, DummyTaskMap(Nil), show) + runTasks(s, roots, DummyTaskMap(Nil), show) } } - def evaluatingParser(s: State, structure: BuildStructure, show: ShowConfig)( - keys: Seq[KeyValue[_]] - )(implicit display: Show[ScopedKey[_]]): Parser[() => State] = { + def evaluatingParser(s: State, show: ShowConfig)(keys: Seq[KeyValue[_]])( + implicit display: Show[ScopedKey[_]] + ): Parser[() => State] = { // to make the call sites clearer def separate[L](in: Seq[KeyValue[_]])( @@ -210,12 +202,12 @@ object Aggregation { val otherStrings = other.map(_.key).mkString("Task(s)/setting(s):\n\t", "\n\t", "\n") failure(s"Cannot mix input tasks with plain tasks/settings. $inputStrings $otherStrings") } else - applyDynamicTasks(s, structure, maps(inputTasks)(castToAny), show) + applyDynamicTasks(s, maps(inputTasks)(castToAny), show) } else { val base = if (tasks.isEmpty) success(() => s) else - applyTasks(s, structure, maps(tasks)(x => success(castToAny(x))), show) + applyTasks(s, maps(tasks)(x => success(castToAny(x))), show) base.map { res => () => val newState = res() if (show.settingValues && settings.nonEmpty) printSettings(settings, show.print) @@ -236,8 +228,9 @@ object Aggregation { reverse: Boolean ): Seq[ProjectRef] = { val resRef = proj.map(p => extra.projectRefFor(extra.resolveRef(p))) - resRef.toList.flatMap(ref => - if (reverse) extra.aggregates.reverse(ref) else extra.aggregates.forward(ref)) + resRef.toList.flatMap( + ref => if (reverse) extra.aggregates.reverse(ref) else extra.aggregates.forward(ref) + ) } def aggregate[T, Proj]( diff --git a/main/src/main/scala/sbt/internal/BuildDef.scala b/main/src/main/scala/sbt/internal/BuildDef.scala index 96c4641f6..d4a435ab5 100644 --- a/main/src/main/scala/sbt/internal/BuildDef.scala +++ b/main/src/main/scala/sbt/internal/BuildDef.scala @@ -16,8 +16,9 @@ import sbt.internal.util.Attributed import sbt.internal.inc.ReflectUtilities trait BuildDef { - def projectDefinitions(baseDirectory: File): Seq[Project] = projects - def projects: Seq[Project] = ReflectUtilities.allVals[Project](this).values.toSeq + def projectDefinitions(@deprecated("unused", "") baseDirectory: File): Seq[Project] = projects + def projects: Seq[Project] = + CompositeProject.expand(ReflectUtilities.allVals[CompositeProject](this).values.toSeq) // TODO: Should we grab the build core settings here or in a plugin? def settings: Seq[Setting[_]] = Defaults.buildCore def buildLoaders: Seq[BuildLoader.Components] = Nil diff --git a/main/src/main/scala/sbt/internal/BuildDependencies.scala b/main/src/main/scala/sbt/internal/BuildDependencies.scala index 6b6709610..437018cf7 100644 --- a/main/src/main/scala/sbt/internal/BuildDependencies.scala +++ b/main/src/main/scala/sbt/internal/BuildDependencies.scala @@ -12,8 +12,10 @@ import sbt.internal.util.Types.idFun import sbt.internal.util.Dag import BuildDependencies._ -final class BuildDependencies private (val classpath: DependencyMap[ClasspathDep[ProjectRef]], - val aggregate: DependencyMap[ProjectRef]) { +final class BuildDependencies private ( + val classpath: DependencyMap[ClasspathDep[ProjectRef]], + val aggregate: DependencyMap[ProjectRef] +) { def classpathRefs(ref: ProjectRef): Seq[ProjectRef] = classpath(ref) map getID def classpathTransitiveRefs(ref: ProjectRef): Seq[ProjectRef] = classpathTransitive(ref) @@ -27,8 +29,10 @@ final class BuildDependencies private (val classpath: DependencyMap[ClasspathDep new BuildDependencies(classpath, aggregate.updated(ref, deps ++ aggregate.getOrElse(ref, Nil))) } object BuildDependencies { - def apply(classpath: DependencyMap[ClasspathDep[ProjectRef]], - aggregate: DependencyMap[ProjectRef]): BuildDependencies = + def apply( + classpath: DependencyMap[ClasspathDep[ProjectRef]], + aggregate: DependencyMap[ProjectRef] + ): BuildDependencies = new BuildDependencies(classpath, aggregate) type DependencyMap[D] = Map[ProjectRef, Seq[D]] diff --git a/main/src/main/scala/sbt/internal/BuildLoader.scala b/main/src/main/scala/sbt/internal/BuildLoader.scala index 11d024337..bdc697b68 100644 --- a/main/src/main/scala/sbt/internal/BuildLoader.scala +++ b/main/src/main/scala/sbt/internal/BuildLoader.scala @@ -16,20 +16,24 @@ import sbt.internal.util.Types.{ const, idFun } import sbt.util.Logger import sbt.librarymanagement.ModuleID -final class MultiHandler[S, T](builtIn: S => Option[T], - root: Option[S => Option[T]], - nonRoots: List[(URI, S => Option[T])], - getURI: S => URI, - log: S => Logger) { +final class MultiHandler[S, T]( + builtIn: S => Option[T], + root: Option[S => Option[T]], + nonRoots: List[(URI, S => Option[T])], + getURI: S => URI, + log: S => Logger +) { def applyFun: S => Option[T] = apply def apply(info: S): Option[T] = (baseLoader(info), applyNonRoots(info)) match { case (None, Nil) => None case (None, xs @ (_, nr) :: ignored) => if (ignored.nonEmpty) - warn("Using first of multiple matching non-root build resolvers for " + getURI(info), - log(info), - xs) + warn( + "Using first of multiple matching non-root build resolvers for " + getURI(info), + log(info), + xs + ) Some(nr) case (Some(b), xs) => if (xs.nonEmpty) @@ -72,28 +76,34 @@ object BuildLoader { type Loader = LoadInfo => Option[() => BuildUnit] type TransformAll = PartBuild => PartBuild - final class Components(val resolver: Resolver, - val builder: Builder, - val transformer: Transformer, - val full: Loader, - val transformAll: TransformAll) { + final class Components( + val resolver: Resolver, + val builder: Builder, + val transformer: Transformer, + val full: Loader, + val transformAll: TransformAll + ) { def |(cs: Components): Components = - new Components(resolver | cs.resolver, - builder | cs.builder, - seq(transformer, cs.transformer), - full | cs.full, - transformAll andThen cs.transformAll) + new Components( + resolver | cs.resolver, + builder | cs.builder, + seq(transformer, cs.transformer), + full | cs.full, + transformAll andThen cs.transformAll + ) } def transform(t: Transformer): Components = components(transformer = t) def resolve(r: Resolver): Components = components(resolver = r) def build(b: Builder): Components = components(builder = b) def full(f: Loader): Components = components(full = f) def transformAll(t: TransformAll) = components(transformAll = t) - def components(resolver: Resolver = const(None), - builder: Builder = const(None), - transformer: Transformer = _.unit, - full: Loader = const(None), - transformAll: TransformAll = idFun) = + def components( + resolver: Resolver = const(None), + builder: Builder = const(None), + transformer: Transformer = _.unit, + full: Loader = const(None), + transformAll: TransformAll = idFun + ) = new Components(resolver, builder, transformer, full, transformAll) def seq(a: Transformer, b: Transformer): Transformer = info => b(info.setUnit(a(info))) @@ -103,47 +113,55 @@ object BuildLoader { def config: LoadBuildConfiguration def state: State } - final class ResolveInfo(val uri: URI, - val staging: File, - val config: LoadBuildConfiguration, - val state: State) - extends Info - final class BuildInfo(val uri: URI, - val base: File, - val config: LoadBuildConfiguration, - val state: State) - extends Info - final class TransformInfo(val uri: URI, - val base: File, - val unit: BuildUnit, - val config: LoadBuildConfiguration, - val state: State) - extends Info { + final class ResolveInfo( + val uri: URI, + val staging: File, + val config: LoadBuildConfiguration, + val state: State + ) extends Info + final class BuildInfo( + val uri: URI, + val base: File, + val config: LoadBuildConfiguration, + val state: State + ) extends Info + final class TransformInfo( + val uri: URI, + val base: File, + val unit: BuildUnit, + val config: LoadBuildConfiguration, + val state: State + ) extends Info { def setUnit(newUnit: BuildUnit): TransformInfo = new TransformInfo(uri, base, newUnit, config, state) } - final class LoadInfo(val uri: URI, - val staging: File, - val config: LoadBuildConfiguration, - val state: State, - val components: Components) - extends Info + final class LoadInfo( + val uri: URI, + val staging: File, + val config: LoadBuildConfiguration, + val state: State, + val components: Components + ) extends Info - def apply(base: Components, - fail: URI => Nothing, - s: State, - config: LoadBuildConfiguration): BuildLoader = { + def apply( + base: Components, + fail: URI => Nothing, + s: State, + config: LoadBuildConfiguration + ): BuildLoader = { def makeMulti[S <: Info, T](base: S => Option[T]) = new MultiHandler[S, T](base, None, Nil, _.uri, _.config.log) - new BuildLoader(fail, - s, - config, - makeMulti(base.resolver), - makeMulti(base.builder), - base.transformer, - makeMulti(base.full), - base.transformAll) + new BuildLoader( + fail, + s, + config, + makeMulti(base.resolver), + makeMulti(base.builder), + base.transformer, + makeMulti(base.full), + base.transformAll + ) } def componentLoader: Loader = (info: LoadInfo) => { diff --git a/main/src/main/scala/sbt/internal/BuildStructure.scala b/main/src/main/scala/sbt/internal/BuildStructure.scala index 9b02e9dfd..04db31933 100644 --- a/main/src/main/scala/sbt/internal/BuildStructure.scala +++ b/main/src/main/scala/sbt/internal/BuildStructure.scala @@ -20,14 +20,16 @@ import sbt.internal.util.Attributed.data import sbt.util.Logger import sjsonnew.shaded.scalajson.ast.unsafe.JValue -final class BuildStructure(val units: Map[URI, LoadedBuildUnit], - val root: URI, - val settings: Seq[Setting[_]], - val data: Settings[Scope], - val index: StructureIndex, - val streams: State => Streams, - val delegates: Scope => Seq[Scope], - val scopeLocal: ScopeLocal) { +final class BuildStructure( + val units: Map[URI, LoadedBuildUnit], + val root: URI, + val settings: Seq[Setting[_]], + val data: Settings[Scope], + val index: StructureIndex, + val streams: State => Streams, + val delegates: Scope => Seq[Scope], + val scopeLocal: ScopeLocal +) { val rootProject: URI => String = Load getRootProject units def allProjects: Seq[ResolvedProject] = units.values.flatMap(_.defined.values).toSeq def allProjects(build: URI): Seq[ResolvedProject] = @@ -59,11 +61,12 @@ final class StructureIndex( * @param rootProjects The list of project IDs for the projects considered roots of this build. * The first root project is used as the default in several situations where a project is not otherwise selected. */ -final class LoadedBuildUnit(val unit: BuildUnit, - val defined: Map[String, ResolvedProject], - val rootProjects: Seq[String], - val buildSettings: Seq[Setting[_]]) - extends BuildUnitBase { +final class LoadedBuildUnit( + val unit: BuildUnit, + val defined: Map[String, ResolvedProject], + val rootProjects: Seq[String], + val buildSettings: Seq[Setting[_]] +) extends BuildUnitBase { /** * The project to use as the default when one is not otherwise selected. @@ -72,7 +75,8 @@ final class LoadedBuildUnit(val unit: BuildUnit, val root = rootProjects match { case Nil => throw new java.lang.AssertionError( - "assertion failed: No root projects defined for build unit " + unit) + "assertion failed: No root projects defined for build unit " + unit + ) case Seq(root, _*) => root } @@ -154,10 +158,13 @@ case class DetectedAutoPlugin(name: String, value: AutoPlugin, hasAutoImport: Bo * Auto-discovered modules for the build definition project. These include modules defined in build definition sources * as well as modules in binary dependencies. * - * @param builds The [[Build]]s detected in the build definition. This does not include the default [[Build]] that sbt creates if none is defined. + * @param builds The [[BuildDef]]s detected in the build definition. + * This does not include the default [[BuildDef]] that sbt creates if none is defined. */ -final class DetectedPlugins(val autoPlugins: Seq[DetectedAutoPlugin], - val builds: DetectedModules[BuildDef]) { +final class DetectedPlugins( + val autoPlugins: Seq[DetectedAutoPlugin], + val builds: DetectedModules[BuildDef] +) { /** * Sequence of import expressions for the build definition. @@ -172,9 +179,7 @@ final class DetectedPlugins(val autoPlugins: Seq[DetectedAutoPlugin], private[this] lazy val (autoPluginAutoImports, topLevelAutoPluginAutoImports) = autoPlugins .flatMap { - case DetectedAutoPlugin(name, ap, hasAutoImport) => - if (hasAutoImport) Some(name) - else None + case DetectedAutoPlugin(name, _, hasAutoImport) => if (hasAutoImport) Some(name) else None } .partition(nonTopLevelPlugin) @@ -202,10 +207,12 @@ final class DetectedPlugins(val autoPlugins: Seq[DetectedAutoPlugin], * @param loader The class loader for the build definition project, notably excluding classes used for .sbt files. * @param detected Auto-detected modules in the build definition. */ -final class LoadedPlugins(val base: File, - val pluginData: PluginData, - val loader: ClassLoader, - val detected: DetectedPlugins) { +final class LoadedPlugins( + val base: File, + val pluginData: PluginData, + val loader: ClassLoader, + val detected: DetectedPlugins +) { def fullClasspath: Seq[Attributed[File]] = pluginData.classpath def classpath = data(fullClasspath) } @@ -216,10 +223,12 @@ final class LoadedPlugins(val base: File, * @param localBase The working location of the build on the filesystem. * For local URIs, this is the same as `uri`, but for remote URIs, this is the local copy or workspace allocated for the build. */ -final class BuildUnit(val uri: URI, - val localBase: File, - val definitions: LoadedDefinitions, - val plugins: LoadedPlugins) { +final class BuildUnit( + val uri: URI, + val localBase: File, + val definitions: LoadedDefinitions, + val plugins: LoadedPlugins +) { override def toString = if (uri.getScheme == "file") localBase.toString else (uri + " (locally: " + localBase + ")") } @@ -235,11 +244,12 @@ final class LoadedBuild(val root: URI, val units: Map[URI, LoadedBuildUnit]) { } final class PartBuild(val root: URI, val units: Map[URI, PartBuildUnit]) sealed trait BuildUnitBase { def rootProjects: Seq[String]; def buildSettings: Seq[Setting[_]] } -final class PartBuildUnit(val unit: BuildUnit, - val defined: Map[String, Project], - val rootProjects: Seq[String], - val buildSettings: Seq[Setting[_]]) - extends BuildUnitBase { +final class PartBuildUnit( + val unit: BuildUnit, + val defined: Map[String, Project], + val rootProjects: Seq[String], + val buildSettings: Seq[Setting[_]] +) extends BuildUnitBase { def resolve(f: Project => ResolvedProject): LoadedBuildUnit = new LoadedBuildUnit(unit, defined mapValues f toMap, rootProjects, buildSettings) def resolveRefs(f: ProjectReference => ProjectRef): LoadedBuildUnit = resolve(_ resolve f) @@ -252,29 +262,37 @@ object BuildStreams { final val BuildUnitPath = "$build" final val StreamsDirectory = "streams" - def mkStreams(units: Map[URI, LoadedBuildUnit], - root: URI, - data: Settings[Scope]): State => Streams = s => { + def mkStreams( + units: Map[URI, LoadedBuildUnit], + root: URI, + data: Settings[Scope] + ): State => Streams = s => { implicit val isoString: sjsonnew.IsoString[JValue] = - sjsonnew.IsoString.iso(sjsonnew.support.scalajson.unsafe.CompactPrinter.apply, - sjsonnew.support.scalajson.unsafe.Parser.parseUnsafe) + sjsonnew.IsoString.iso( + sjsonnew.support.scalajson.unsafe.CompactPrinter.apply, + sjsonnew.support.scalajson.unsafe.Parser.parseUnsafe + ) (s get Keys.stateStreams) getOrElse { - std.Streams(path(units, root, data), - displayFull, - LogManager.construct(data, s), - sjsonnew.support.scalajson.unsafe.Converter) + std.Streams( + path(units, root, data), + displayFull, + LogManager.construct(data, s), + sjsonnew.support.scalajson.unsafe.Converter + ) } } def path(units: Map[URI, LoadedBuildUnit], root: URI, data: Settings[Scope])( - scoped: ScopedKey[_]): File = + scoped: ScopedKey[_] + ): File = resolvePath(projectPath(units, root, scoped, data), nonProjectPath(scoped)) def resolvePath(base: File, components: Seq[String]): File = (base /: components)((b, p) => new File(b, p)) def pathComponent[T](axis: ScopeAxis[T], scoped: ScopedKey[_], label: String)( - show: T => String): String = + show: T => String + ): String = axis match { case Zero => GlobalPath case This => @@ -293,10 +311,12 @@ object BuildStreams { a.entries.toSeq.sortBy(_.key.label).map { case AttributeEntry(key, value) => key.label + "=" + value.toString } mkString (" ") - def projectPath(units: Map[URI, LoadedBuildUnit], - root: URI, - scoped: ScopedKey[_], - data: Settings[Scope]): File = + def projectPath( + units: Map[URI, LoadedBuildUnit], + root: URI, + scoped: ScopedKey[_], + data: Settings[Scope] + ): File = scoped.scope.project match { case Zero => refTarget(GlobalScope, units(root).localBase, data) / GlobalPath case Select(br @ BuildRef(uri)) => refTarget(br, units(uri).localBase, data) / BuildUnitPath diff --git a/main/src/main/scala/sbt/internal/BuildUtil.scala b/main/src/main/scala/sbt/internal/BuildUtil.scala index 0d1ac38e5..a62511a30 100644 --- a/main/src/main/scala/sbt/internal/BuildUtil.scala +++ b/main/src/main/scala/sbt/internal/BuildUtil.scala @@ -48,10 +48,12 @@ final class BuildUtil[Proj]( refOpt => configurations(projectForAxis(refOpt)).map(_.name) } object BuildUtil { - def apply(root: URI, - units: Map[URI, LoadedBuildUnit], - keyIndex: KeyIndex, - data: Settings[Scope]): BuildUtil[ResolvedProject] = { + def apply( + root: URI, + units: Map[URI, LoadedBuildUnit], + keyIndex: KeyIndex, + data: Settings[Scope] + ): BuildUtil[ResolvedProject] = { val getp = (build: URI, project: String) => Load.getProject(units, build, project) val configs = (_: ResolvedProject).configurations.map(c => ConfigKey(c.name)) val aggregates = aggregationRelation(units) @@ -72,8 +74,9 @@ object BuildUtil { def checkCycles(units: Map[URI, LoadedBuildUnit]): Unit = { def getRef(pref: ProjectRef) = units(pref.build).defined(pref.project) - def deps(proj: ResolvedProject)( - base: ResolvedProject => Seq[ProjectRef]): Seq[ResolvedProject] = + def deps( + proj: ResolvedProject + )(base: ResolvedProject => Seq[ProjectRef]): Seq[ResolvedProject] = Dag.topologicalSort(proj)(p => base(p) map getRef) // check for cycles for ((_, lbu) <- units; proj <- lbu.defined.values) { diff --git a/main/src/main/scala/sbt/internal/CommandExchange.scala b/main/src/main/scala/sbt/internal/CommandExchange.scala index 873946340..9474a83b6 100644 --- a/main/src/main/scala/sbt/internal/CommandExchange.scala +++ b/main/src/main/scala/sbt/internal/CommandExchange.scala @@ -20,6 +20,7 @@ import BasicKeys.{ serverAuthentication, serverConnectionType, serverLogLevel, + fullServerHandlers, logLevel } import java.net.Socket @@ -44,49 +45,43 @@ import sbt.util.{ Level, Logger, LogExchange } * this exchange, which could serve command request from either of the channel. */ private[sbt] final class CommandExchange { - private val autoStartServerSysProp = sys.props.get("sbt.server.autostart") map { - _.toLowerCase == "true" - } getOrElse true - private val lock = new AnyRef {} + private val autoStartServerSysProp = + sys.props get "sbt.server.autostart" forall (_.toLowerCase == "true") private var server: Option[ServerInstance] = None private val firstInstance: AtomicBoolean = new AtomicBoolean(true) private var consoleChannel: Option[ConsoleChannel] = None private val commandQueue: ConcurrentLinkedQueue[Exec] = new ConcurrentLinkedQueue() private val channelBuffer: ListBuffer[CommandChannel] = new ListBuffer() + private val channelBufferLock = new AnyRef {} private val nextChannelId: AtomicInteger = new AtomicInteger(0) private lazy val jsonFormat = new sjsonnew.BasicJsonProtocol with JValueFormats {} def channels: List[CommandChannel] = channelBuffer.toList - def subscribe(c: CommandChannel): Unit = - lock.synchronized { - channelBuffer.append(c) - } + def subscribe(c: CommandChannel): Unit = channelBufferLock.synchronized(channelBuffer.append(c)) // periodically move all messages from all the channels @tailrec def blockUntilNextExec: Exec = { @tailrec def slurpMessages(): Unit = - (((None: Option[Exec]) /: channels) { _ orElse _.poll }) match { + channels.foldLeft(Option.empty[Exec]) { _ orElse _.poll } match { + case None => () case Some(x) => commandQueue.add(x) slurpMessages - case _ => () } slurpMessages() Option(commandQueue.poll) match { case Some(x) => x - case _ => + case None => Thread.sleep(50) blockUntilNextExec } } def run(s: State): State = { - consoleChannel match { - case Some(_) => // do nothing - case _ => - val x = new ConsoleChannel("console0") - consoleChannel = Some(x) - subscribe(x) + if (consoleChannel.isEmpty) { + val console0 = new ConsoleChannel("console0") + consoleChannel = Some(console0) + subscribe(console0) } val autoStartServerAttr = (s get autoStartServer) match { case Some(bool) => bool @@ -102,25 +97,13 @@ private[sbt] final class CommandExchange { * Check if a server instance is running already, and start one if it isn't. */ private[sbt] def runServer(s: State): State = { - lazy val port = (s get serverPort) match { - case Some(x) => x - case None => 5001 - } - lazy val host = (s get serverHost) match { - case Some(x) => x - case None => "127.0.0.1" - } - lazy val auth: Set[ServerAuthentication] = (s get serverAuthentication) match { - case Some(xs) => xs - case None => Set(ServerAuthentication.Token) - } - lazy val connectionType = (s get serverConnectionType) match { - case Some(x) => x - case None => ConnectionType.Tcp - } - lazy val level: Level.Value = (s get serverLogLevel) - .orElse(s get logLevel) - .getOrElse(Level.Warn) + lazy val port = s.get(serverPort).getOrElse(5001) + lazy val host = s.get(serverHost).getOrElse("127.0.0.1") + lazy val auth: Set[ServerAuthentication] = + s.get(serverAuthentication).getOrElse(Set(ServerAuthentication.Token)) + lazy val connectionType = s.get(serverConnectionType).getOrElse(ConnectionType.Tcp) + lazy val level = s.get(serverLogLevel).orElse(s.get(logLevel)).getOrElse(Level.Warn) + lazy val handlers = s.get(fullServerHandlers).getOrElse(Nil) def onIncomingSocket(socket: Socket, instance: ServerInstance): Unit = { val name = newNetworkName @@ -133,7 +116,7 @@ private[sbt] final class CommandExchange { log } val channel = - new NetworkChannel(name, socket, Project structure s, auth, instance, logger) + new NetworkChannel(name, socket, Project structure s, auth, instance, handlers, logger) subscribe(channel) } if (server.isEmpty && firstInstance.get) { @@ -164,7 +147,8 @@ private[sbt] final class CommandExchange { server = Some(serverInstance) case Some(Failure(_: AlreadyRunningException)) => s.log.warn( - "sbt server could not start because there's another instance of sbt running on this build.") + "sbt server could not start because there's another instance of sbt running on this build." + ) s.log.warn("Running multiple instances is unsupported") server = None firstInstance.set(false) @@ -181,9 +165,7 @@ private[sbt] final class CommandExchange { } def shutdown(): Unit = { - channels foreach { c => - c.shutdown() - } + channels foreach (_.shutdown()) // interrupt and kill the thread server.foreach(_.shutdown()) server = None @@ -206,7 +188,7 @@ private[sbt] final class CommandExchange { toDel.toList match { case Nil => // do nothing case xs => - lock.synchronized { + channelBufferLock.synchronized { channelBuffer --= xs () } @@ -222,48 +204,30 @@ private[sbt] final class CommandExchange { val params = toLogMessageParams(entry) channels collect { case c: ConsoleChannel => - if (broadcastStringMessage) { + if (broadcastStringMessage || (entry.channelName forall (_ == c.name))) c.publishEvent(event) - } else { - if (entry.channelName.isEmpty || entry.channelName == Some(c.name)) { - c.publishEvent(event) - } - } case c: NetworkChannel => try { // Note that language server's LogMessageParams does not hold the execid, // so this is weaker than the StringMessage. We might want to double-send // in case we have a better client that can utilize the knowledge. import sbt.internal.langserver.codec.JsonProtocol._ - if (broadcastStringMessage) { - c.langNotify("window/logMessage", params) - } else { - if (entry.channelName == Some(c.name)) { - c.langNotify("window/logMessage", params) - } - } - } catch { - case _: IOException => - toDel += c - } + if (broadcastStringMessage || (entry.channelName contains c.name)) + c.jsonRpcNotify("window/logMessage", params) + } catch { case _: IOException => toDel += c } } case _ => - channels collect { - case c: ConsoleChannel => - c.publishEvent(event) + channels foreach { + case c: ConsoleChannel => c.publishEvent(event) case c: NetworkChannel => - try { - c.publishEvent(event) - } catch { - case _: IOException => - toDel += c - } + try c.publishEvent(event) + catch { case _: IOException => toDel += c } } } toDel.toList match { case Nil => // do nothing case xs => - lock.synchronized { + channelBufferLock.synchronized { channelBuffer --= xs () } @@ -305,7 +269,7 @@ private[sbt] final class CommandExchange { toDel.toList match { case Nil => // do nothing case xs => - lock.synchronized { + channelBufferLock.synchronized { channelBuffer --= xs () } @@ -315,6 +279,11 @@ private[sbt] final class CommandExchange { // fanout publishEvent def publishEventMessage(event: EventMessage): Unit = { val toDel: ListBuffer[CommandChannel] = ListBuffer.empty + + def tryTo(x: => Unit, c: CommandChannel): Unit = + try x + catch { case _: IOException => toDel += c } + event match { // Special treatment for ConsolePromptEvent since it's hand coded without codec. case entry: ConsolePromptEvent => @@ -328,36 +297,21 @@ private[sbt] final class CommandExchange { case entry: ExecStatusEvent => channels collect { case c: ConsoleChannel => - if (entry.channelName.isEmpty || entry.channelName == Some(c.name)) { - c.publishEventMessage(event) - } + if (entry.channelName forall (_ == c.name)) c.publishEventMessage(event) case c: NetworkChannel => - try { - if (entry.channelName == Some(c.name)) { - c.publishEventMessage(event) - } - } catch { - case e: IOException => - toDel += c - } + if (entry.channelName contains c.name) tryTo(c.publishEventMessage(event), c) } case _ => channels collect { - case c: ConsoleChannel => - c.publishEventMessage(event) - case c: NetworkChannel => - try { - c.publishEventMessage(event) - } catch { - case _: IOException => - toDel += c - } + case c: ConsoleChannel => c.publishEventMessage(event) + case c: NetworkChannel => tryTo(c.publishEventMessage(event), c) } } + toDel.toList match { case Nil => // do nothing case xs => - lock.synchronized { + channelBufferLock.synchronized { channelBuffer --= xs () } diff --git a/main/src/main/scala/sbt/internal/CommandStrings.scala b/main/src/main/scala/sbt/internal/CommandStrings.scala index dc070a468..d30a31f29 100644 --- a/main/src/main/scala/sbt/internal/CommandStrings.scala +++ b/main/src/main/scala/sbt/internal/CommandStrings.scala @@ -57,10 +57,23 @@ $ShowCommand def pluginsDetailed = pluginsBrief // TODO: expand val LastCommand = "last" - val LastGrepCommand = "last-grep" + val OldLastGrepCommand = "last-grep" + val LastGrepCommand = "lastGrep" val ExportCommand = "export" val ExportStream = "export" + val oldLastGrepBrief = + (OldLastGrepCommand, "Shows lines from the last output for 'key' that match 'pattern'.") + val oldLastGrepDetailed = + s"""$OldLastGrepCommand + Displays lines from the logging of previous commands that match `pattern`. + +$OldLastGrepCommand [key] + Displays lines from logging associated with `key` that match `pattern`. The key typically refers to a task (for example, test:compile). The logging that is displayed is restricted to the logging for that particular task. + + is a regular expression interpreted by java.util.Pattern. Matching text is highlighted (when highlighting is supported and enabled). + See also '$LastCommand'.""" + val lastGrepBrief = (LastGrepCommand, "Shows lines from the last output for 'key' that match 'pattern'.") val lastGrepDetailed = @@ -102,8 +115,10 @@ $LastCommand val InspectCommand = "inspect" val inspectBrief = - (s"$InspectCommand [tree|uses|definitions|actual] ", - "Prints the value for 'key', the defining scope, delegates, related definitions, and dependencies.") + ( + s"$InspectCommand [tree|uses|definitions|actual] ", + "Prints the value for 'key', the defining scope, delegates, related definitions, and dependencies." + ) val inspectDetailed = s""" |$InspectCommand | @@ -265,16 +280,12 @@ $ProjectsCommand remove + def sbtrc = ".sbtrc" - def DefaultsCommand = "add-default-commands" + def DefaultsCommand = "addDefaultCommands" def DefaultsBrief = (DefaultsCommand, DefaultsDetailed) def DefaultsDetailed = "Registers default built-in commands" - def Load = "load" - def LoadLabel = "a project" - def LoadCommand = "load-commands" - def LoadCommandLabel = "commands" - - def LoadFailed = "load-failed" + def LoadFailed = "loadFailed" + def OldLoadFailed = "load-failed" def LoadProjectImpl = "loadp" def LoadProject = "reload" @@ -404,4 +415,29 @@ $SwitchCommand [=][!] [-v] [] See also `help $CrossCommand` """ + + val JavaCrossCommand = "java+" + val JavaSwitchCommand = "java++" + + def javaCrossHelp: Help = Help.more(JavaCrossCommand, JavaCrossDetailed) + def javaSwitchHelp: Help = Help.more(JavaSwitchCommand, JavaSwitchDetailed) + + def JavaCrossDetailed = + s"""$JavaCrossCommand + Runs for each JDK version specified for cross-JDK testing. + For each string in `crossJavaVersions` in the current project, this command sets the + `javaHome` of all projects to the corresponding Java home, reloads the build, + and executes . When finished, it reloads the build with the original + `javaHome`. + Note that `Test / fork := true` is needed for `javaHome` to be effective. + See also `help $JavaSwitchCommand` +""" + + def JavaSwitchDetailed = + s"""$JavaSwitchCommand + Changes the JDK version and runs a command. + Sets the `javaHome` of all projects to and + reloads the build. If is provided, it is then executed. + See also `help $JavaSwitchCommand` +""" } diff --git a/main/src/main/scala/sbt/internal/ConsoleProject.scala b/main/src/main/scala/sbt/internal/ConsoleProject.scala index c2e2edef2..bb1279d8f 100644 --- a/main/src/main/scala/sbt/internal/ConsoleProject.scala +++ b/main/src/main/scala/sbt/internal/ConsoleProject.scala @@ -15,7 +15,8 @@ import xsbti.compile.ClasspathOptionsUtil object ConsoleProject { def apply(state: State, extra: String, cleanupCommands: String = "", options: Seq[String] = Nil)( - implicit log: Logger): Unit = { + implicit log: Logger + ): Unit = { val extracted = Project extract state val cpImports = new Imports(extracted, state) val bindings = ("currentState" -> state) :: ("extracted" -> extracted) :: ("cpHelpers" -> cpImports) :: Nil @@ -51,7 +52,7 @@ object ConsoleProject { options, initCommands, cleanupCommands - )(Some(unit.loader), bindings) + )(Some(unit.loader), bindings).get } () } diff --git a/main/src/main/scala/sbt/internal/CrossJava.scala b/main/src/main/scala/sbt/internal/CrossJava.scala new file mode 100644 index 000000000..9ee668042 --- /dev/null +++ b/main/src/main/scala/sbt/internal/CrossJava.scala @@ -0,0 +1,365 @@ +/* + * sbt + * Copyright 2011 - 2017, Lightbend, Inc. + * Copyright 2008 - 2010, Mark Harrah + * Licensed under BSD-3-Clause license (see LICENSE) + */ + +package sbt +package internal + +import java.io.File +import scala.collection.immutable.ListMap +import sbt.io.Path +import sbt.io.syntax._ +import sbt.Cross._ +import sbt.Def.{ ScopedKey, Setting } +import sbt.internal.util.complete.DefaultParsers._ +import sbt.internal.util.AttributeKey +import sbt.internal.util.complete.{ DefaultParsers, Parser } +import sbt.internal.CommandStrings.{ + JavaCrossCommand, + JavaSwitchCommand, + javaCrossHelp, + javaSwitchHelp +} + +private[sbt] object CrossJava { + // parses jabaa style version number adopt@1.8 + def parseJavaVersion(version: String): JavaVersion = { + def splitDot(s: String): Vector[Long] = + Option(s) match { + case Some(x) => x.split('.').toVector.filterNot(_ == "").map(_.toLong) + case _ => Vector() + } + def splitAt(s: String): Vector[String] = + Option(s) match { + case Some(x) => x.split('@').toVector + case _ => Vector() + } + splitAt(version) match { + case Vector(vendor, rest) => JavaVersion(splitDot(rest), Option(vendor)) + case Vector(rest) => JavaVersion(splitDot(rest), None) + case _ => sys.error(s"Invalid JavaVersion: $version") + } + } + + def lookupJavaHome(jv: String, mappings: Map[String, File]): File = { + val ms = mappings map { case (k, v) => (JavaVersion(k), v) } + lookupJavaHome(JavaVersion(jv), ms) + } + + def lookupJavaHome(jv: JavaVersion, mappings: Map[JavaVersion, File]): File = { + mappings.get(jv) match { + case Some(dir) => dir + + // when looking for "10" it should match "openjdk@10" + case None if jv.vendor.isEmpty => + val noVendors: Map[JavaVersion, File] = mappings map { + case (k, v) => k.withVendor(None) -> v + } + noVendors.get(jv).getOrElse(javaHomeNotFound(jv, mappings)) + case _ => javaHomeNotFound(jv, mappings) + } + } + + private def javaHomeNotFound(version: JavaVersion, mappings: Map[JavaVersion, File]): Nothing = { + sys.error(s"""Java home for $version was not found in $mappings + | + |use Global / javaHomes += JavaVersion("$version") -> file(...)""".stripMargin) + } + + private case class SwitchTarget(version: Option[JavaVersion], home: Option[File], force: Boolean) + private case class SwitchJavaHome(target: SwitchTarget, verbose: Boolean, command: Option[String]) + + private def switchParser(state: State): Parser[SwitchJavaHome] = { + import DefaultParsers._ + def versionAndCommand(spacePresent: Boolean) = { + val x = Project.extract(state) + import x._ + val javaHomes = getJavaHomesTyped(x, currentRef) + val knownVersions = javaHomes.keysIterator.map(_.numberStr).toVector + val version: Parser[SwitchTarget] = + (token( + (StringBasic <~ "@").? ~ ((NatBasic) ~ ("." ~> NatBasic).*) + .examples(knownVersions: _*) ~ "!".? + ) || token(StringBasic)) + .map { + case Left(((vendor, (v1, vs)), bang)) => + val force = bang.isDefined + val versionArg = (Vector(v1) ++ vs) map { _.toLong } + SwitchTarget(Option(JavaVersion(versionArg, vendor)), None, force) + case Right(home) => + SwitchTarget(None, Option(new File(home)), true) + } + val spacedVersion = + if (spacePresent) version + else version & spacedFirst(JavaSwitchCommand) + val verbose = Parser.opt(token(Space ~> "-v")) + val optionalCommand = Parser.opt(token(Space ~> matched(state.combinedParser))) + (spacedVersion ~ verbose ~ optionalCommand).map { + case v ~ verbose ~ command => + SwitchJavaHome(v, verbose.isDefined, command) + } + } + token(JavaSwitchCommand ~> OptSpace) flatMap { sp => + versionAndCommand(sp.nonEmpty) + } + } + + private def getJavaHomes( + extracted: Extracted, + proj: ResolvedReference + ): Map[String, File] = { + import extracted._ + (Keys.fullJavaHomes in proj get structure.data).get + } + + private def getJavaHomesTyped( + extracted: Extracted, + proj: ResolvedReference + ): Map[JavaVersion, File] = { + getJavaHomes(extracted, proj) map { case (k, v) => (JavaVersion(k), v) } + } + + private def getCrossJavaVersions( + extracted: Extracted, + proj: ResolvedReference + ): Seq[String] = { + import extracted._ + import Keys._ + (crossJavaVersions in proj get structure.data).getOrElse(Nil) + } + + private def getCrossJavaHomes(extracted: Extracted, proj: ResolvedReference): Seq[File] = { + import extracted._ + import Keys._ + val fjh = (fullJavaHomes in proj get structure.data).get + (crossJavaVersions in proj get structure.data) map { jvs => + jvs map { jv => + lookupJavaHome(jv, fjh) + } + } getOrElse Vector() + } + + private def switchCommandImpl(state: State, switch: SwitchJavaHome): State = { + val extracted = Project.extract(state) + import extracted._ + import Keys.javaHome + + // filter out subprojects based on switch target e.g. "10" vs what's in crossJavaVersions + // for the subproject. Only if crossJavaVersions is non-empty, and does NOT include "10" + // it will skip the subproject. + val projects: Seq[(ResolvedReference, Seq[String])] = { + val projectJavaVersions = + structure.allProjectRefs.map(proj => proj -> getCrossJavaVersions(extracted, proj)) + if (switch.target.force) projectJavaVersions + else + switch.target.version match { + case None => projectJavaVersions + case Some(v) => + projectJavaVersions flatMap { + case (proj, versions) => + if (versions.isEmpty || versions.contains(v)) Vector(proj -> versions) + else Vector() + } + } + } + + def setJavaHomeForProjects: State = { + val newSettings = projects.flatMap { + case (proj, javaVersions) => + val fjh = getJavaHomesTyped(extracted, proj) + val home = switch.target match { + case SwitchTarget(Some(v), _, _) => lookupJavaHome(v, fjh) + case SwitchTarget(_, Some(h), _) => h + case _ => sys.error(s"unexpected ${switch.target}") + } + val scope = Scope(Select(proj), Zero, Zero, Zero) + Seq( + javaHome in scope := Some(home) + ) + } + + val filterKeys: Set[AttributeKey[_]] = Set(javaHome).map(_.key) + + val projectsContains: Reference => Boolean = projects.map(_._1).toSet.contains + + // Filter out any old javaHome version settings that were added, this is just for hygiene. + val filteredRawAppend = session.rawAppend.filter(_.key match { + case ScopedKey(Scope(Select(ref), Zero, Zero, Zero), key) + if filterKeys.contains(key) && projectsContains(ref) => + false + case _ => true + }) + + val newSession = session.copy(rawAppend = filteredRawAppend ++ newSettings) + + BuiltinCommands.reapply(newSession, structure, state) + } + + setJavaHomeForProjects + } + + def switchJavaHome: Command = + Command.arb(requireSession(switchParser), javaSwitchHelp)(switchCommandImpl) + + def crossJavaHome: Command = + Command.arb(requireSession(crossParser), javaCrossHelp)(crossJavaHomeCommandImpl) + + private case class CrossArgs(command: String, verbose: Boolean) + + /** + * Parse the given command into either an aggregate command or a command for a project + */ + private def crossParser(state: State): Parser[CrossArgs] = + token(JavaCrossCommand <~ OptSpace) flatMap { _ => + (token(Parser.opt("-v" <~ Space)) ~ token(matched(state.combinedParser))).map { + case (verbose, command) => CrossArgs(command, verbose.isDefined) + } & spacedFirst(JavaCrossCommand) + } + + private def crossJavaHomeCommandImpl(state: State, args: CrossArgs): State = { + val x = Project.extract(state) + import x._ + val (aggs, aggCommand) = Cross.parseSlashCommand(x)(args.command) + val projCrossVersions = aggs map { proj => + proj -> getCrossJavaHomes(x, proj) + } + // if we support javaHome, projVersions should be cached somewhere since + // running ++2.11.1 is at the root level is going to mess with the scalaVersion for the aggregated subproj + val projVersions = (projCrossVersions flatMap { + case (proj, versions) => versions map { proj.project -> _ } + }).toList + + val verbose = "" + // println(s"projVersions $projVersions") + + if (projVersions.isEmpty) { + state + } else { + // Detect whether a task or command has been issued + val allCommands = Parser.parse(aggCommand, Act.aggregatedKeyParser(x)) match { + case Left(_) => + // It's definitely not a task, check if it's a valid command, because we don't want to emit the warning + // message below for typos. + val validCommand = Parser.parse(aggCommand, state.combinedParser).isRight + + val distinctCrossConfigs = projCrossVersions.map(_._2.toSet).distinct + if (validCommand && distinctCrossConfigs.size > 1) { + state.log.warn( + "Issuing a Java cross building command, but not all sub projects have the same cross build " + + "configuration. This could result in subprojects cross building against Java versions that they are " + + "not compatible with. Try issuing cross building command with tasks instead, since sbt will be able " + + "to ensure that cross building is only done using configured project and Java version combinations " + + "that are configured." + ) + state.log.debug("Java versions configuration is:") + projCrossVersions.foreach { + case (project, versions) => state.log.debug(s"$project: $versions") + } + } + + // Execute using a blanket switch + projCrossVersions.toMap.apply(currentRef).flatMap { version => + // Force scala version + Seq(s"$JavaSwitchCommand $verbose $version!", aggCommand) + } + + case Right(_) => + // We have a key, we're likely to be able to cross build this using the per project behaviour. + + // Group all the projects by scala version + projVersions.groupBy(_._2).mapValues(_.map(_._1)).toSeq.flatMap { + case (version, Seq(project)) => + // If only one project for a version, issue it directly + Seq(s"$JavaSwitchCommand $verbose $version", s"$project/$aggCommand") + case (version, projects) if aggCommand.contains(" ") => + // If the command contains a space, then the `all` command won't work because it doesn't support issuing + // commands with spaces, so revert to running the command on each project one at a time + s"$JavaSwitchCommand $verbose $version" :: projects + .map(project => s"$project/$aggCommand") + case (version, projects) => + // First switch scala version, then use the all command to run the command on each project concurrently + Seq( + s"$JavaSwitchCommand $verbose $version", + projects.map(_ + "/" + aggCommand).mkString("all ", " ", "") + ) + } + } + + allCommands.toList ::: captureCurrentSession(state, x) + } + } + + private val JavaCapturedSession = AttributeKey[Seq[Setting[_]]]("javaCrossCapturedSession") + + private def captureCurrentSession(state: State, extracted: Extracted): State = { + state.put(JavaCapturedSession, extracted.session.rawAppend) + } + + def discoverJavaHomes: ListMap[String, File] = { + import JavaDiscoverConfig._ + val configs = Vector(jabba, linux, macOS) + ListMap(configs flatMap { _.javaHomes }: _*) + } + + sealed trait JavaDiscoverConf { + def javaHomes: Vector[(String, File)] + } + + object JavaDiscoverConfig { + val linux = new JavaDiscoverConf { + val base: File = file("/usr") / "lib" / "jvm" + val JavaHomeDir = """java-([0-9]+)-.*""".r + def javaHomes: Vector[(String, File)] = + wrapNull(base.list()).collect { + case dir @ JavaHomeDir(ver) => JavaVersion(ver).toString -> (base / dir) + } + } + + val macOS = new JavaDiscoverConf { + val base: File = file("/Library") / "Java" / "JavaVirtualMachines" + val JavaHomeDir = """jdk-?(1\.)?([0-9]+).*""".r + def javaHomes: Vector[(String, File)] = + wrapNull(base.list()).collect { + case dir @ JavaHomeDir(m, n) => + JavaVersion(nullBlank(m) + n).toString -> (base / dir / "Contents" / "Home") + } + } + + // See https://github.com/shyiko/jabba + val jabba = new JavaDiscoverConf { + val base: File = Path.userHome / ".jabba" / "jdk" + val JavaHomeDir = """([\w\-]+)\@(1\.)?([0-9]+).*""".r + def javaHomes: Vector[(String, File)] = + wrapNull(base.list()).collect { + case dir @ JavaHomeDir(vendor, m, n) => + val v = JavaVersion(nullBlank(m) + n).withVendor(vendor).toString + if ((base / dir / "Contents" / "Home").exists) v -> (base / dir / "Contents" / "Home") + else v -> (base / dir) + } + } + } + + def nullBlank(s: String): String = + if (s eq null) "" + else s + + // expand Java versions to 1-20 to 1.x, and vice versa to accept both "1.8" and "8" + private val oneDot = Map((1L to 20L).toVector flatMap { i => + Vector(Vector(i) -> Vector(1L, i), Vector(1L, i) -> Vector(i)) + }: _*) + def expandJavaHomes(hs: Map[String, File]): Map[String, File] = + hs flatMap { + case (k, v) => + val jv = JavaVersion(k) + if (oneDot.contains(jv.numbers)) + Vector(k -> v, jv.withNumbers(oneDot(jv.numbers)).toString -> v) + else Vector(k -> v) + } + + def wrapNull(a: Array[String]): Vector[String] = + if (a eq null) Vector() + else a.toVector +} diff --git a/main/src/main/scala/sbt/internal/DefaultBackgroundJobService.scala b/main/src/main/scala/sbt/internal/DefaultBackgroundJobService.scala index dc73c66fd..873ac2fdb 100644 --- a/main/src/main/scala/sbt/internal/DefaultBackgroundJobService.scala +++ b/main/src/main/scala/sbt/internal/DefaultBackgroundJobService.scala @@ -36,12 +36,15 @@ private[sbt] abstract class BackgroundJob { } def shutdown(): Unit + // this should be true on construction and stay true until // the job is complete def isRunning(): Boolean + // called after stop or on spontaneous exit, closing the result // removes the listener def onStop(listener: () => Unit)(implicit ex: ExecutionContext): Closeable + // do we need this or is the spawning task good enough? // def tags: SomeType } @@ -57,8 +60,8 @@ private[sbt] abstract class AbstractBackgroundJobService extends BackgroundJobSe private val serviceTempDir = IO.createTemporaryDirectory // hooks for sending start/stop events - protected def onAddJob(job: JobHandle): Unit = {} - protected def onRemoveJob(job: JobHandle): Unit = {} + protected def onAddJob(@deprecated("unused", "") job: JobHandle): Unit = () + protected def onRemoveJob(@deprecated("unused", "") job: JobHandle): Unit = () // this mutable state could conceptually go on State except // that then every task that runs a background job would have @@ -110,9 +113,11 @@ private[sbt] abstract class AbstractBackgroundJobService extends BackgroundJobSe protected def makeContext(id: Long, spawningTask: ScopedKey[_], state: State): ManagedLogger - def doRunInBackground(spawningTask: ScopedKey[_], - state: State, - start: (Logger, File) => BackgroundJob): JobHandle = { + def doRunInBackground( + spawningTask: ScopedKey[_], + state: State, + start: (Logger, File) => BackgroundJob + ): JobHandle = { val id = nextId.getAndIncrement() val logger = makeContext(id, spawningTask, state) val workingDir = serviceTempDir / s"job-$id" @@ -129,7 +134,8 @@ private[sbt] abstract class AbstractBackgroundJobService extends BackgroundJobSe } override def runInBackground(spawningTask: ScopedKey[_], state: State)( - start: (Logger, File) => Unit): JobHandle = { + start: (Logger, File) => Unit + ): JobHandle = { pool.run(this, spawningTask, state)(start) } @@ -152,7 +158,8 @@ private[sbt] abstract class AbstractBackgroundJobService extends BackgroundJobSe case _: DeadHandle @unchecked => () // nothing to stop or wait for case other => sys.error( - s"BackgroundJobHandle does not originate with the current BackgroundJobService: $other") + s"BackgroundJobHandle does not originate with the current BackgroundJobService: $other" + ) } private def withHandleTry(job: JobHandle)(f: ThreadJobHandle => Try[Unit]): Try[Unit] = @@ -160,8 +167,11 @@ private[sbt] abstract class AbstractBackgroundJobService extends BackgroundJobSe case handle: ThreadJobHandle @unchecked => f(handle) case _: DeadHandle @unchecked => Try(()) // nothing to stop or wait for case other => - Try(sys.error( - s"BackgroundJobHandle does not originate with the current BackgroundJobService: $other")) + Try( + sys.error( + s"BackgroundJobHandle does not originate with the current BackgroundJobService: $other" + ) + ) } override def stop(job: JobHandle): Unit = @@ -360,7 +370,8 @@ private[sbt] class BackgroundThreadPool extends java.io.Closeable { } def run(manager: AbstractBackgroundJobService, spawningTask: ScopedKey[_], state: State)( - work: (Logger, File) => Unit): JobHandle = { + work: (Logger, File) => Unit + ): JobHandle = { def start(logger: Logger, workingDir: File): BackgroundJob = { val runnable = new BackgroundRunnable(spawningTask.key.label, { () => work(logger, workingDir) diff --git a/main/src/main/scala/sbt/internal/EvaluateConfigurations.scala b/main/src/main/scala/sbt/internal/EvaluateConfigurations.scala index 6b4e77f91..347e23b86 100644 --- a/main/src/main/scala/sbt/internal/EvaluateConfigurations.scala +++ b/main/src/main/scala/sbt/internal/EvaluateConfigurations.scala @@ -8,11 +8,18 @@ package sbt package internal -import sbt.internal.util.{ complete, AttributeEntry, AttributeKey, LineRange, MessageOnlyException, RangePosition, Settings } +import sbt.internal.util.{ + AttributeEntry, + AttributeKey, + LineRange, + MessageOnlyException, + RangePosition, + Settings +} import java.io.File import compiler.{ Eval, EvalImports } -import complete.DefaultParsers.validID +import sbt.internal.util.complete.DefaultParsers.validID import Def.{ ScopedKey, Setting } import Scope.GlobalScope import sbt.internal.parser.SbtParser @@ -37,7 +44,11 @@ private[sbt] object EvaluateConfigurations { /** * This represents the parsed expressions in a build sbt, as well as where they were defined. */ - private[this] final class ParsedFile(val imports: Seq[(String, Int)], val definitions: Seq[(String, LineRange)], val settings: Seq[(String, LineRange)]) + private[this] final class ParsedFile( + val imports: Seq[(String, Int)], + val definitions: Seq[(String, LineRange)], + val settings: Seq[(String, LineRange)] + ) /** The keywords we look for when classifying a string as a definition. */ private[this] val DefinitionKeywords = Seq("lazy val ", "def ", "val ") @@ -48,18 +59,26 @@ private[sbt] object EvaluateConfigurations { * return a parsed, compiled + evaluated [[LoadedSbtFile]]. The result has * raw sbt-types that can be accessed and used. */ - def apply(eval: Eval, srcs: Seq[File], imports: Seq[String]): LazyClassLoaded[LoadedSbtFile] = - { - val loadFiles = srcs.sortBy(_.getName) map { src => evaluateSbtFile(eval, src, IO.readLines(src), imports, 0) } - loader => (LoadedSbtFile.empty /: loadFiles) { (loaded, load) => loaded merge load(loader) } + def apply(eval: Eval, srcs: Seq[File], imports: Seq[String]): LazyClassLoaded[LoadedSbtFile] = { + val loadFiles = srcs.sortBy(_.getName) map { src => + evaluateSbtFile(eval, src, IO.readLines(src), imports, 0) } + loader => + (LoadedSbtFile.empty /: loadFiles) { (loaded, load) => + loaded merge load(loader) + } + } /** * Reads a given .sbt file and evaluates it into a sequence of setting values. * * Note: This ignores any non-Setting[_] values in the file. */ - def evaluateConfiguration(eval: Eval, src: File, imports: Seq[String]): LazyClassLoaded[Seq[Setting[_]]] = + def evaluateConfiguration( + eval: Eval, + src: File, + imports: Seq[String] + ): LazyClassLoaded[Seq[Setting[_]]] = evaluateConfiguration(eval, src, IO.readLines(src), imports, 0) /** @@ -68,13 +87,19 @@ private[sbt] object EvaluateConfigurations { * * @param builtinImports The set of import statements to add to those parsed in the .sbt file. */ - private[this] def parseConfiguration(file: File, lines: Seq[String], builtinImports: Seq[String], offset: Int): ParsedFile = - { - val (importStatements, settingsAndDefinitions) = splitExpressions(file, lines) - val allImports = builtinImports.map(s => (s, -1)) ++ addOffset(offset, importStatements) - val (definitions, settings) = splitSettingsDefinitions(addOffsetToRange(offset, settingsAndDefinitions)) - new ParsedFile(allImports, definitions, settings) - } + private[this] def parseConfiguration( + file: File, + lines: Seq[String], + builtinImports: Seq[String], + offset: Int + ): ParsedFile = { + val (importStatements, settingsAndDefinitions) = splitExpressions(file, lines) + val allImports = builtinImports.map(s => (s, -1)) ++ addOffset(offset, importStatements) + val (definitions, settings) = splitSettingsDefinitions( + addOffsetToRange(offset, settingsAndDefinitions) + ) + new ParsedFile(allImports, definitions, settings) + } /** * Evaluates a parsed sbt configuration file. @@ -86,11 +111,17 @@ private[sbt] object EvaluateConfigurations { * * @return Just the Setting[_] instances defined in the .sbt file. */ - def evaluateConfiguration(eval: Eval, file: File, lines: Seq[String], imports: Seq[String], offset: Int): LazyClassLoaded[Seq[Setting[_]]] = - { - val l = evaluateSbtFile(eval, file, lines, imports, offset) - loader => l(loader).settings - } + def evaluateConfiguration( + eval: Eval, + file: File, + lines: Seq[String], + imports: Seq[String], + offset: Int + ): LazyClassLoaded[Seq[Setting[_]]] = { + val l = evaluateSbtFile(eval, file, lines, imports, offset) + loader => + l(loader).settings + } /** * Evaluates a parsed sbt configuration file. @@ -102,31 +133,41 @@ private[sbt] object EvaluateConfigurations { * @return A function which can take an sbt classloader and return the raw types/configuration * which was compiled/parsed for the given file. */ - private[sbt] def evaluateSbtFile(eval: Eval, file: File, lines: Seq[String], imports: Seq[String], offset: Int): LazyClassLoaded[LoadedSbtFile] = - { - // TODO - Store the file on the LoadedSbtFile (or the parent dir) so we can accurately do - // detection for which project project manipulations should be applied. - val name = file.getPath - val parsed = parseConfiguration(file, lines, imports, offset) - val (importDefs, definitions) = - if (parsed.definitions.isEmpty) (Nil, DefinedSbtValues.empty) else { - val definitions = evaluateDefinitions(eval, name, parsed.imports, parsed.definitions, Some(file)) - val imp = BuildUtil.importAllRoot(definitions.enclosingModule :: Nil) - (imp, DefinedSbtValues(definitions)) - } - val allImports = importDefs.map(s => (s, -1)) ++ parsed.imports - val dslEntries = parsed.settings map { - case (dslExpression, range) => - evaluateDslEntry(eval, name, allImports, dslExpression, range) + private[sbt] def evaluateSbtFile( + eval: Eval, + file: File, + lines: Seq[String], + imports: Seq[String], + offset: Int + ): LazyClassLoaded[LoadedSbtFile] = { + // TODO - Store the file on the LoadedSbtFile (or the parent dir) so we can accurately do + // detection for which project project manipulations should be applied. + val name = file.getPath + val parsed = parseConfiguration(file, lines, imports, offset) + val (importDefs, definitions) = + if (parsed.definitions.isEmpty) (Nil, DefinedSbtValues.empty) + else { + val definitions = + evaluateDefinitions(eval, name, parsed.imports, parsed.definitions, Some(file)) + val imp = BuildUtil.importAllRoot(definitions.enclosingModule :: Nil) + (imp, DefinedSbtValues(definitions)) } - eval.unlinkDeferred() - // Tracks all the files we generated from evaluating the sbt file. - val allGeneratedFiles = (definitions.generated ++ dslEntries.flatMap(_.generated)) - loader => { - val projects = - definitions.values(loader).collect { - case p: Project => resolveBase(file.getParentFile, p) + val allImports = importDefs.map(s => (s, -1)) ++ parsed.imports + val dslEntries = parsed.settings map { + case (dslExpression, range) => + evaluateDslEntry(eval, name, allImports, dslExpression, range) + } + eval.unlinkDeferred() + // Tracks all the files we generated from evaluating the sbt file. + val allGeneratedFiles = (definitions.generated ++ dslEntries.flatMap(_.generated)) + loader => + { + val projects = { + val compositeProjects = definitions.values(loader).collect { + case p: CompositeProject => p } + CompositeProject.expand(compositeProjects).map(resolveBase(file.getParentFile, _)) + } val (settingsRaw, manipulationsRaw) = dslEntries map (_.result apply loader) partition { case DslEntry.ProjectSettings(_) => true @@ -140,9 +181,16 @@ private[sbt] object EvaluateConfigurations { case DslEntry.ProjectManipulation(f) => f } // TODO -get project manipulations. - new LoadedSbtFile(settings, projects, importDefs, manipulations, definitions, allGeneratedFiles) + new LoadedSbtFile( + settings, + projects, + importDefs, + manipulations, + definitions, + allGeneratedFiles + ) } - } + } /** move a project to be relative to this file after we've evaluated it. */ private[this] def resolveBase(f: File, p: Project) = p.copy(base = IO.resolve(f, p.base)) @@ -173,11 +221,23 @@ private[sbt] object EvaluateConfigurations { * @return A method that given an sbt classloader, can return the actual [[sbt.internal.DslEntry]] defined by * the expression, and the sequence of .class files generated. */ - private[sbt] def evaluateDslEntry(eval: Eval, name: String, imports: Seq[(String, Int)], expression: String, range: LineRange): TrackedEvalResult[DslEntry] = { + private[sbt] def evaluateDslEntry( + eval: Eval, + name: String, + imports: Seq[(String, Int)], + expression: String, + range: LineRange + ): TrackedEvalResult[DslEntry] = { // TODO - Should we try to namespace these between.sbt files? IF they hash to the same value, they may actually be // exactly the same setting, so perhaps we don't care? val result = try { - eval.eval(expression, imports = new EvalImports(imports, name), srcName = name, tpeName = Some(SettingsDefinitionName), line = range.start) + eval.eval( + expression, + imports = new EvalImports(imports, name), + srcName = name, + tpeName = Some(SettingsDefinitionName), + line = range.start + ) } catch { case e: sbt.compiler.EvalException => throw new MessageOnlyException(e.getMessage) } @@ -206,7 +266,13 @@ private[sbt] object EvaluateConfigurations { */ // Build DSL now includes non-Setting[_] type settings. // Note: This method is used by the SET command, so we may want to evaluate that sucker a bit. - def evaluateSetting(eval: Eval, name: String, imports: Seq[(String, Int)], expression: String, range: LineRange): LazyClassLoaded[Seq[Setting[_]]] = + def evaluateSetting( + eval: Eval, + name: String, + imports: Seq[(String, Int)], + expression: String, + range: LineRange + ): LazyClassLoaded[Seq[Setting[_]]] = evaluateDslEntry(eval, name, imports, expression, range).result andThen { case DslEntry.ProjectSettings(values) => values case _ => Nil @@ -216,44 +282,71 @@ private[sbt] object EvaluateConfigurations { * Splits a set of lines into (imports, expressions). That is, * anything on the right of the tuple is a scala expression (definition or setting). */ - private[sbt] def splitExpressions(file: File, lines: Seq[String]): (Seq[(String, Int)], Seq[(String, LineRange)]) = - { - val split = SbtParser(file, lines) - // TODO - Look at pulling the parsed expression trees from the SbtParser and stitch them back into a different - // scala compiler rather than re-parsing. - (split.imports, split.settings) - } + private[sbt] def splitExpressions( + file: File, + lines: Seq[String] + ): (Seq[(String, Int)], Seq[(String, LineRange)]) = { + val split = SbtParser(file, lines) + // TODO - Look at pulling the parsed expression trees from the SbtParser and stitch them back into a different + // scala compiler rather than re-parsing. + (split.imports, split.settings) + } - private[this] def splitSettingsDefinitions(lines: Seq[(String, LineRange)]): (Seq[(String, LineRange)], Seq[(String, LineRange)]) = - lines partition { case (line, range) => isDefinition(line) } + private[this] def splitSettingsDefinitions( + lines: Seq[(String, LineRange)] + ): (Seq[(String, LineRange)], Seq[(String, LineRange)]) = + lines partition { case (line, _) => isDefinition(line) } - private[this] def isDefinition(line: String): Boolean = - { - val trimmed = line.trim - DefinitionKeywords.exists(trimmed startsWith _) - } + private[this] def isDefinition(line: String): Boolean = { + val trimmed = line.trim + DefinitionKeywords.exists(trimmed startsWith _) + } private[this] def extractedValTypes: Seq[String] = - Seq(classOf[Project], classOf[InputKey[_]], classOf[TaskKey[_]], classOf[SettingKey[_]]).map(_.getName) + Seq( + classOf[CompositeProject], + classOf[InputKey[_]], + classOf[TaskKey[_]], + classOf[SettingKey[_]] + ).map(_.getName) - private[this] def evaluateDefinitions(eval: Eval, name: String, imports: Seq[(String, Int)], definitions: Seq[(String, LineRange)], file: Option[File]): compiler.EvalDefinitions = - { - val convertedRanges = definitions.map { case (s, r) => (s, r.start to r.end) } - eval.evalDefinitions(convertedRanges, new EvalImports(imports, name), name, file, extractedValTypes) - } + private[this] def evaluateDefinitions( + eval: Eval, + name: String, + imports: Seq[(String, Int)], + definitions: Seq[(String, LineRange)], + file: Option[File] + ): compiler.EvalDefinitions = { + val convertedRanges = definitions.map { case (s, r) => (s, r.start to r.end) } + eval.evalDefinitions( + convertedRanges, + new EvalImports(imports, name), + name, + file, + extractedValTypes + ) + } } object Index { - def taskToKeyMap(data: Settings[Scope]): Map[Task[_], ScopedKey[Task[_]]] = - { - // AttributeEntry + the checked type test 'value: Task[_]' ensures that the cast is correct. - // (scalac couldn't determine that 'key' is of type AttributeKey[Task[_]] on its own and a type match still required the cast) - val pairs = for (scope <- data.scopes; AttributeEntry(key, value: Task[_]) <- data.data(scope).entries) yield (value, ScopedKey(scope, key.asInstanceOf[AttributeKey[Task[_]]])) // unclear why this cast is needed even with a type test in the above filter - pairs.toMap[Task[_], ScopedKey[Task[_]]] - } + def taskToKeyMap(data: Settings[Scope]): Map[Task[_], ScopedKey[Task[_]]] = { + + val pairs = data.scopes flatMap ( + scope => + data.data(scope).entries collect { + case AttributeEntry(key, value: Task[_]) => + (value, ScopedKey(scope, key.asInstanceOf[AttributeKey[Task[_]]])) + } + ) + + pairs.toMap[Task[_], ScopedKey[Task[_]]] + } def allKeys(settings: Seq[Setting[_]]): Set[ScopedKey[_]] = - settings.flatMap(s => if (s.key.key.isLocal) Nil else s.key +: s.dependencies).filter(!_.key.isLocal).toSet + settings + .flatMap(s => if (s.key.key.isLocal) Nil else s.key +: s.dependencies) + .filter(!_.key.isLocal) + .toSet def attributeKeys(settings: Settings[Scope]): Set[AttributeKey[_]] = settings.data.values.flatMap(_.keys).toSet[AttributeKey[_]] @@ -261,30 +354,38 @@ object Index { def stringToKeyMap(settings: Set[AttributeKey[_]]): Map[String, AttributeKey[_]] = stringToKeyMap0(settings)(_.label) - private[this] def stringToKeyMap0(settings: Set[AttributeKey[_]])(label: AttributeKey[_] => String): Map[String, AttributeKey[_]] = - { - val multiMap = settings.groupBy(label) - val duplicates = multiMap collect { case (k, xs) if xs.size > 1 => (k, xs.map(_.manifest)) } collect { case (k, xs) if xs.size > 1 => (k, xs) } - if (duplicates.isEmpty) - multiMap.collect { case (k, v) if validID(k) => (k, v.head) } toMap - else - sys.error(duplicates map { case (k, tps) => "'" + k + "' (" + tps.mkString(", ") + ")" } mkString ("Some keys were defined with the same name but different types: ", ", ", "")) + private[this] def stringToKeyMap0( + settings: Set[AttributeKey[_]] + )(label: AttributeKey[_] => String): Map[String, AttributeKey[_]] = { + val multiMap = settings.groupBy(label) + val duplicates = multiMap collect { case (k, xs) if xs.size > 1 => (k, xs.map(_.manifest)) } collect { + case (k, xs) if xs.size > 1 => (k, xs) } + if (duplicates.isEmpty) + multiMap.collect { case (k, v) if validID(k) => (k, v.head) } toMap + else + sys.error( + duplicates map { case (k, tps) => "'" + k + "' (" + tps.mkString(", ") + ")" } mkString ("Some keys were defined with the same name but different types: ", ", ", "") + ) + } - private[this]type TriggerMap = collection.mutable.HashMap[Task[_], Seq[Task[_]]] + private[this] type TriggerMap = collection.mutable.HashMap[Task[_], Seq[Task[_]]] - def triggers(ss: Settings[Scope]): Triggers[Task] = - { - val runBefore = new TriggerMap - val triggeredBy = new TriggerMap - for ((_, amap) <- ss.data; AttributeEntry(_, value: Task[_]) <- amap.entries) { - val as = value.info.attributes - update(runBefore, value, as get Keys.runBefore) - update(triggeredBy, value, as get Keys.triggeredBy) + def triggers(ss: Settings[Scope]): Triggers[Task] = { + val runBefore = new TriggerMap + val triggeredBy = new TriggerMap + ss.data.values foreach ( + _.entries foreach { + case AttributeEntry(_, value: Task[_]) => + val as = value.info.attributes + update(runBefore, value, as get Keys.runBefore) + update(triggeredBy, value, as get Keys.triggeredBy) + case _ => () } - val onComplete = Keys.onComplete in GlobalScope get ss getOrElse { () => () } - new Triggers[Task](runBefore, triggeredBy, map => { onComplete(); map }) - } + ) + val onComplete = Keys.onComplete in GlobalScope get ss getOrElse (() => ()) + new Triggers[Task](runBefore, triggeredBy, map => { onComplete(); map }) + } private[this] def update(map: TriggerMap, base: Task[_], tasksOpt: Option[Seq[Task[_]]]): Unit = for (tasks <- tasksOpt; task <- tasks) diff --git a/main/src/main/scala/sbt/internal/GlobalPlugin.scala b/main/src/main/scala/sbt/internal/GlobalPlugin.scala index e71b8ee24..92f237b7d 100644 --- a/main/src/main/scala/sbt/internal/GlobalPlugin.scala +++ b/main/src/main/scala/sbt/internal/GlobalPlugin.scala @@ -41,8 +41,10 @@ object GlobalPlugin { injectInternalClasspath(Runtime, gp.internalClasspath), injectInternalClasspath(Compile, gp.internalClasspath) ) - private[this] def injectInternalClasspath(config: Configuration, - cp: Seq[Attributed[File]]): Setting[_] = + private[this] def injectInternalClasspath( + config: Configuration, + cp: Seq[Attributed[File]] + ): Setting[_] = internalDependencyClasspath in config ~= { prev => (prev ++ cp).distinct } @@ -50,8 +52,10 @@ object GlobalPlugin { def build(base: File, s: State, config: LoadBuildConfiguration): (BuildStructure, State) = { val newInject = config.injectSettings.copy(global = config.injectSettings.global ++ globalPluginSettings) - val globalConfig = config.copy(injectSettings = newInject, - pluginManagement = config.pluginManagement.forGlobalPlugin) + val globalConfig = config.copy( + injectSettings = newInject, + pluginManagement = config.pluginManagement.forGlobalPlugin + ) val (eval, structure) = Load(base, s, globalConfig) val session = Load.initialSession(structure, eval) (structure, Project.setProject(session, structure, s)) @@ -73,46 +77,53 @@ object GlobalPlugin { // If we reference it directly (if it's an executionRoot) then it forces an update, which is not what we want. val updateReport = Def.taskDyn { Def.task { update.value } }.value - GlobalPluginData(projectID.value, - projectDependencies.value, - depMap, - resolvers.value.toVector, - (fullClasspath in Runtime).value, - (prods ++ intcp).distinct)(updateReport) + GlobalPluginData( + projectID.value, + projectDependencies.value, + depMap, + resolvers.value.toVector, + (fullClasspath in Runtime).value, + (prods ++ intcp).distinct + )(updateReport) } val resolvedTaskInit = taskInit mapReferenced Project.mapScope(Scope replaceThis p) val task = resolvedTaskInit evaluate data val roots = resolvedTaskInit.dependencies evaluate(state, structure, task, roots) } - def evaluate[T](state: State, - structure: BuildStructure, - t: Task[T], - roots: Seq[ScopedKey[_]]): (State, T) = { + def evaluate[T]( + state: State, + structure: BuildStructure, + t: Task[T], + roots: Seq[ScopedKey[_]] + ): (State, T) = { import EvaluateTask._ withStreams(structure, state) { str => val nv = nodeView(state, str, roots) val config = EvaluateTask.extractedTaskConfig(Project.extract(state), structure, state) val (newS, result) = runTask(t, state, str, structure.index.triggers, config)(nv) - (newS, processResult(result, newS.log)) + (newS, processResult2(result)) } } val globalPluginSettings = Project.inScope(Scope.GlobalScope in LocalRootProject)( - Seq( - organization := SbtArtifacts.Organization, - onLoadMessage := Keys.baseDirectory("Loading global plugins from " + _).value, - name := "global-plugin", - sbtPlugin := true, - version := "0.0" - )) + organization := SbtArtifacts.Organization, + onLoadMessage := Keys.baseDirectory("Loading global plugins from " + _).value, + name := "global-plugin", + sbtPlugin := true, + version := "0.0" + ) } -final case class GlobalPluginData(projectID: ModuleID, - dependencies: Seq[ModuleID], - descriptors: Map[ModuleRevisionId, ModuleDescriptor], - resolvers: Vector[Resolver], - fullClasspath: Classpath, - internalClasspath: Classpath)(val updateReport: UpdateReport) -final case class GlobalPlugin(data: GlobalPluginData, - structure: BuildStructure, - inject: Seq[Setting[_]], - base: File) +final case class GlobalPluginData( + projectID: ModuleID, + dependencies: Seq[ModuleID], + descriptors: Map[ModuleRevisionId, ModuleDescriptor], + resolvers: Vector[Resolver], + fullClasspath: Classpath, + internalClasspath: Classpath +)(val updateReport: UpdateReport) +final case class GlobalPlugin( + data: GlobalPluginData, + structure: BuildStructure, + inject: Seq[Setting[_]], + base: File +) diff --git a/main/src/main/scala/sbt/internal/GroupedAutoPlugins.scala b/main/src/main/scala/sbt/internal/GroupedAutoPlugins.scala index c43b5a50b..06564204f 100644 --- a/main/src/main/scala/sbt/internal/GroupedAutoPlugins.scala +++ b/main/src/main/scala/sbt/internal/GroupedAutoPlugins.scala @@ -11,8 +11,10 @@ package internal import Def.Setting import java.net.URI -private[sbt] final class GroupedAutoPlugins(val all: Seq[AutoPlugin], - val byBuild: Map[URI, Seq[AutoPlugin]]) { +private[sbt] final class GroupedAutoPlugins( + val all: Seq[AutoPlugin], + val byBuild: Map[URI, Seq[AutoPlugin]] +) { def globalSettings: Seq[Setting[_]] = all.flatMap(_.globalSettings) def buildSettings(uri: URI): Seq[Setting[_]] = byBuild.getOrElse(uri, Nil).flatMap(_.buildSettings) diff --git a/main/src/main/scala/sbt/internal/IvyConsole.scala b/main/src/main/scala/sbt/internal/IvyConsole.scala index 9cce3a74f..a2cd16b1d 100644 --- a/main/src/main/scala/sbt/internal/IvyConsole.scala +++ b/main/src/main/scala/sbt/internal/IvyConsole.scala @@ -50,19 +50,23 @@ object IvyConsole { logLevel in Global := Level.Warn, showSuccess in Global := false ) - val append = Load.transformSettings(Load.projectScope(currentRef), - currentRef.build, - rootProject, - depSettings) + val append = Load.transformSettings( + Load.projectScope(currentRef), + currentRef.build, + rootProject, + depSettings + ) val newStructure = Load.reapply(session.original ++ append, structure) val newState = state.copy(remainingCommands = Exec(Keys.consoleQuick.key.label, None) :: Nil) Project.setProject(session, newStructure, newState) } - final case class Dependencies(managed: Seq[ModuleID], - resolvers: Seq[Resolver], - unmanaged: Seq[File]) + final case class Dependencies( + managed: Seq[ModuleID], + resolvers: Seq[Resolver], + unmanaged: Seq[File] + ) def parseDependencies(args: Seq[String], log: Logger): Dependencies = (Dependencies(Nil, Nil, Nil) /: args)(parseArgument(log)) def parseArgument(log: Logger)(acc: Dependencies, arg: String): Dependencies = diff --git a/main/src/main/scala/sbt/internal/KeyIndex.scala b/main/src/main/scala/sbt/internal/KeyIndex.scala index 78183984e..fc6ef184c 100644 --- a/main/src/main/scala/sbt/internal/KeyIndex.scala +++ b/main/src/main/scala/sbt/internal/KeyIndex.scala @@ -17,19 +17,25 @@ import sbt.librarymanagement.Configuration object KeyIndex { def empty: ExtendableKeyIndex = new KeyIndex0(emptyBuildIndex) - def apply(known: Iterable[ScopedKey[_]], - projects: Map[URI, Set[String]], - configurations: Map[String, Seq[Configuration]]): ExtendableKeyIndex = + def apply( + known: Iterable[ScopedKey[_]], + projects: Map[URI, Set[String]], + configurations: Map[String, Seq[Configuration]] + ): ExtendableKeyIndex = (base(projects, configurations) /: known) { _ add _ } - def aggregate(known: Iterable[ScopedKey[_]], - extra: BuildUtil[_], - projects: Map[URI, Set[String]], - configurations: Map[String, Seq[Configuration]]): ExtendableKeyIndex = + def aggregate( + known: Iterable[ScopedKey[_]], + extra: BuildUtil[_], + projects: Map[URI, Set[String]], + configurations: Map[String, Seq[Configuration]] + ): ExtendableKeyIndex = (base(projects, configurations) /: known) { (index, key) => index.addAggregated(key, extra) } - private[this] def base(projects: Map[URI, Set[String]], - configurations: Map[String, Seq[Configuration]]): ExtendableKeyIndex = { + private[this] def base( + projects: Map[URI, Set[String]], + configurations: Map[String, Seq[Configuration]] + ): ExtendableKeyIndex = { val data = for { (uri, ids) <- projects } yield { @@ -78,23 +84,29 @@ trait KeyIndex { // TODO, optimize def isEmpty(proj: Option[ResolvedReference], conf: Option[String]): Boolean = keys(proj, conf).isEmpty - def isEmpty(proj: Option[ResolvedReference], - conf: Option[String], - task: Option[AttributeKey[_]]): Boolean = keys(proj, conf, task).isEmpty + def isEmpty( + proj: Option[ResolvedReference], + conf: Option[String], + task: Option[AttributeKey[_]] + ): Boolean = keys(proj, conf, task).isEmpty def buildURIs: Set[URI] def projects(uri: URI): Set[String] def exists(project: Option[ResolvedReference]): Boolean def configs(proj: Option[ResolvedReference]): Set[String] def tasks(proj: Option[ResolvedReference], conf: Option[String]): Set[AttributeKey[_]] - def tasks(proj: Option[ResolvedReference], - conf: Option[String], - key: String): Set[AttributeKey[_]] + def tasks( + proj: Option[ResolvedReference], + conf: Option[String], + key: String + ): Set[AttributeKey[_]] def keys(proj: Option[ResolvedReference]): Set[String] def keys(proj: Option[ResolvedReference], conf: Option[String]): Set[String] - def keys(proj: Option[ResolvedReference], - conf: Option[String], - task: Option[AttributeKey[_]]): Set[String] + def keys( + proj: Option[ResolvedReference], + conf: Option[String], + task: Option[AttributeKey[_]] + ): Set[String] private[sbt] def configIdents(project: Option[ResolvedReference]): Set[String] private[sbt] def fromConfigIdent(proj: Option[ResolvedReference])(configIdent: String): String } @@ -116,11 +128,15 @@ private[sbt] final class AKeyIndex(val data: Relation[Option[AttributeKey[_]], S * data contains the mapping between a configuration and keys. * identData contains the mapping between a configuration and its identifier. */ -private[sbt] final class ConfigIndex(val data: Map[Option[String], AKeyIndex], - val identData: Map[String, String]) { - def add(config: Option[String], - task: Option[AttributeKey[_]], - key: AttributeKey[_]): ConfigIndex = { +private[sbt] final class ConfigIndex( + val data: Map[Option[String], AKeyIndex], + val identData: Map[String, String] +) { + def add( + config: Option[String], + task: Option[AttributeKey[_]], + key: AttributeKey[_] + ): ConfigIndex = { new ConfigIndex(data updated (config, keyIndex(config).add(task, key)), this.identData) } @@ -141,20 +157,24 @@ private[sbt] final class ConfigIndex(val data: Map[Option[String], AKeyIndex], configIdentsInverse.getOrElse(ident, Scope.unguessConfigIdent(ident)) } private[sbt] final class ProjectIndex(val data: Map[Option[String], ConfigIndex]) { - def add(id: Option[String], - config: Option[String], - task: Option[AttributeKey[_]], - key: AttributeKey[_]): ProjectIndex = + def add( + id: Option[String], + config: Option[String], + task: Option[AttributeKey[_]], + key: AttributeKey[_] + ): ProjectIndex = new ProjectIndex(data updated (id, confIndex(id).add(config, task, key))) def confIndex(id: Option[String]): ConfigIndex = getOr(data, id, emptyConfigIndex) def projects: Set[String] = keySet(data) } private[sbt] final class BuildIndex(val data: Map[Option[URI], ProjectIndex]) { - def add(build: Option[URI], - project: Option[String], - config: Option[String], - task: Option[AttributeKey[_]], - key: AttributeKey[_]): BuildIndex = + def add( + build: Option[URI], + project: Option[String], + config: Option[String], + task: Option[AttributeKey[_]], + key: AttributeKey[_] + ): BuildIndex = new BuildIndex(data updated (build, projectIndex(build).add(project, config, task, key))) def projectIndex(build: Option[URI]): ProjectIndex = getOr(data, build, emptyProjectIndex) def builds: Set[URI] = keySet(data) @@ -176,18 +196,22 @@ private[sbt] final class KeyIndex0(val data: BuildIndex) extends ExtendableKeyIn def tasks(proj: Option[ResolvedReference], conf: Option[String]): Set[AttributeKey[_]] = keyIndex(proj, conf).tasks - def tasks(proj: Option[ResolvedReference], - conf: Option[String], - key: String): Set[AttributeKey[_]] = keyIndex(proj, conf).tasks(key) + def tasks( + proj: Option[ResolvedReference], + conf: Option[String], + key: String + ): Set[AttributeKey[_]] = keyIndex(proj, conf).tasks(key) def keys(proj: Option[ResolvedReference]): Set[String] = (Set.empty[String] /: optConfigs(proj)) { (s, c) => s ++ keys(proj, c) } def keys(proj: Option[ResolvedReference], conf: Option[String]): Set[String] = keyIndex(proj, conf).allKeys - def keys(proj: Option[ResolvedReference], - conf: Option[String], - task: Option[AttributeKey[_]]): Set[String] = keyIndex(proj, conf).keys(task) + def keys( + proj: Option[ResolvedReference], + conf: Option[String], + task: Option[AttributeKey[_]] + ): Set[String] = keyIndex(proj, conf).keys(task) def keyIndex(proj: Option[ResolvedReference], conf: Option[String]): AKeyIndex = confIndex(proj).keyIndex(conf) @@ -217,10 +241,12 @@ private[sbt] final class KeyIndex0(val data: BuildIndex) extends ExtendableKeyIn val (build, project) = parts(scoped.scope.project.toOption) add1(build, project, scoped.scope.config, scoped.scope.task, scoped.key) } - private[this] def add1(uri: Option[URI], - id: Option[String], - config: ScopeAxis[ConfigKey], - task: ScopeAxis[AttributeKey[_]], - key: AttributeKey[_]): ExtendableKeyIndex = + private[this] def add1( + uri: Option[URI], + id: Option[String], + config: ScopeAxis[ConfigKey], + task: ScopeAxis[AttributeKey[_]], + key: AttributeKey[_] + ): ExtendableKeyIndex = new KeyIndex0(data.add(uri, id, config.toOption.map(_.name), task.toOption, key)) } diff --git a/main/src/main/scala/sbt/internal/LibraryManagement.scala b/main/src/main/scala/sbt/internal/LibraryManagement.scala index d247a9d99..28a40c5c7 100644 --- a/main/src/main/scala/sbt/internal/LibraryManagement.scala +++ b/main/src/main/scala/sbt/internal/LibraryManagement.scala @@ -37,18 +37,12 @@ private[sbt] object LibraryManagement { ): UpdateReport = { /* Resolve the module settings from the inputs. */ - def resolve(inputs: UpdateInputs): UpdateReport = { + def resolve: UpdateReport = { import sbt.util.ShowLines._ log.info(s"Updating $label...") val reportOrUnresolved: Either[UnresolvedWarning, UpdateReport] = - //try { lm.update(module, updateConfig, uwConfig, log) - // } catch { - // case e: Throwable => - // e.printStackTrace - // throw e - // } val report = reportOrUnresolved match { case Right(report0) => report0 case Left(unresolvedWarning) => @@ -96,12 +90,12 @@ private[sbt] object LibraryManagement { import sbt.librarymanagement.LibraryManagementCodec._ val cachedResolve = Tracked.lastOutput[UpdateInputs, UpdateReport](cache) { case (_, Some(out)) if upToDate(inChanged, out) => markAsCached(out) - case _ => resolve(updateInputs) + case _ => resolve } import scala.util.control.Exception.catching catching(classOf[NullPointerException], classOf[OutOfMemoryError]) .withApply { t => - val resolvedAgain = resolve(updateInputs) + val resolvedAgain = resolve val culprit = t.getClass.getSimpleName log.warn(s"Update task caching failed due to $culprit.") log.warn("Report the following output to sbt:") diff --git a/main/src/main/scala/sbt/internal/Load.scala b/main/src/main/scala/sbt/internal/Load.scala index c6264f671..c496a7f32 100755 --- a/main/src/main/scala/sbt/internal/Load.scala +++ b/main/src/main/scala/sbt/internal/Load.scala @@ -61,7 +61,7 @@ private[sbt] object Load { val globalBase = getGlobalBase(state) val base = baseDirectory.getCanonicalFile val rawConfig = defaultPreGlobal(state, base, globalBase, log) - val config0 = defaultWithGlobal(state, base, rawConfig, globalBase, log) + val config0 = defaultWithGlobal(state, base, rawConfig, globalBase) val config = if (isPlugin) enableSbtPlugin(config0) else config0.copy(extraBuilds = topLevelExtras) (base, config) @@ -109,7 +109,7 @@ private[sbt] object Load { javaHome = None, scalac ) - val evalPluginDef = EvaluateTask.evalPluginDef(log) _ + val evalPluginDef: (BuildStructure, State) => PluginData = EvaluateTask.evalPluginDef _ val delegates = defaultDelegates val pluginMgmt = PluginManagement(loader) val inject = InjectSettings(injectGlobal(state), Nil, const(Nil)) @@ -145,7 +145,6 @@ private[sbt] object Load { base: File, rawConfig: LoadBuildConfiguration, globalBase: File, - log: Logger ): LoadBuildConfiguration = { val globalPluginsDir = getGlobalPluginsDirectory(state, globalBase) val withGlobal = loadGlobal(state, base, globalPluginsDir, rawConfig) @@ -208,7 +207,6 @@ private[sbt] object Load { project => projectInherit(lb, project), (project, config) => configInherit(lb, project, config, rootProject), task => task.extend, - (project, extra) => Nil ) } @@ -311,8 +309,9 @@ private[sbt] object Load { case _ => None } ) - ss.map(s => - s mapConstant setResolved(s.key) mapReferenced mapSpecial(s.key) mapInit setDefining) + ss.map( + s => s mapConstant setResolved(s.key) mapReferenced mapSpecial(s.key) mapInit setDefining + ) } def setDefinitionKey[T](tk: Task[T], key: ScopedKey[_]): Task[T] = @@ -561,8 +560,10 @@ private[sbt] object Load { def checkProjectBase(buildBase: File, projectBase: File): Unit = { checkDirectory(projectBase) - assert(buildBase == projectBase || IO.relativize(buildBase, projectBase).isDefined, - s"Directory $projectBase is not contained in build root $buildBase") + assert( + buildBase == projectBase || IO.relativize(buildBase, projectBase).isDefined, + s"Directory $projectBase is not contained in build root $buildBase" + ) } def checkBuildBase(base: File) = checkDirectory(base) @@ -583,8 +584,10 @@ private[sbt] object Load { } } - def checkAll(referenced: Map[URI, List[ProjectReference]], - builds: Map[URI, PartBuildUnit]): Unit = { + def checkAll( + referenced: Map[URI, List[ProjectReference]], + builds: Map[URI, PartBuildUnit] + ): Unit = { val rootProject = getRootProject(builds) for ((uri, refs) <- referenced; ref <- refs) { val ProjectRef(refURI, refID) = Scope.resolveProjectRef(uri, rootProject, ref) @@ -720,12 +723,15 @@ private[sbt] object Load { // here on, so the autogenerated build aggregated can be removed from this code. ( I think) // We may actually want to move it back here and have different flags in loadTransitive... val hasRoot = loadedProjectsRaw.projects.exists(_.base == normBase) || defsScala.exists( - _.rootProject.isDefined) + _.rootProject.isDefined + ) val (loadedProjects, defaultBuildIfNone, keepClassFiles) = if (hasRoot) - (loadedProjectsRaw.projects, - BuildDef.defaultEmpty, - loadedProjectsRaw.generatedConfigClassFiles) + ( + loadedProjectsRaw.projects, + BuildDef.defaultEmpty, + loadedProjectsRaw.generatedConfigClassFiles + ) else { val existingIDs = loadedProjectsRaw.projects.map(_.id) val refs = existingIDs.map(id => ProjectRef(uri, id)) @@ -734,9 +740,11 @@ private[sbt] object Load { val defaultProjects = timed("Load.loadUnit: defaultProjects", log) { loadProjects(projectsFromBuild(b, normBase), false) } - (defaultProjects.projects ++ loadedProjectsRaw.projects, - b, - defaultProjects.generatedConfigClassFiles ++ loadedProjectsRaw.generatedConfigClassFiles) + ( + defaultProjects.projects ++ loadedProjectsRaw.projects, + b, + defaultProjects.generatedConfigClassFiles ++ loadedProjectsRaw.generatedConfigClassFiles + ) } // Now we clean stale class files. // TODO - this may cause issues with multiple sbt clients, but that should be deprecated pending sbt-server anyway @@ -909,7 +917,8 @@ private[sbt] object Load { discover(AddSettings.defaultSbtFiles, buildBase) match { case DiscoveredProjects(Some(root), discovered, files, generated) => log.debug( - s"[Loading] Found root project ${root.id} w/ remaining ${discovered.map(_.id).mkString(",")}") + s"[Loading] Found root project ${root.id} w/ remaining ${discovered.map(_.id).mkString(",")}" + ) val (finalRoot, projectLevelExtra) = timed(s"Load.loadTransitive: finalizeProject($root)", log) { finalizeProject(root, files, true) @@ -959,18 +968,22 @@ private[sbt] object Load { } val result = root +: (acc ++ otherProjects.projects) log.debug( - s"[Loading] Done in ${buildBase}, returning: ${result.map(_.id).mkString("(", ", ", ")")}") + s"[Loading] Done in ${buildBase}, returning: ${result.map(_.id).mkString("(", ", ", ")")}" + ) LoadedProjects(result, generated ++ otherGenerated ++ generatedConfigClassFiles) } case Nil => log.debug( - s"[Loading] Done in ${buildBase}, returning: ${acc.map(_.id).mkString("(", ", ", ")")}") + s"[Loading] Done in ${buildBase}, returning: ${acc.map(_.id).mkString("(", ", ", ")")}" + ) LoadedProjects(acc, generatedConfigClassFiles) } } - private[this] def translateAutoPluginException(e: AutoPluginException, - project: Project): AutoPluginException = + private[this] def translateAutoPluginException( + e: AutoPluginException, + project: Project + ): AutoPluginException = e.withPrefix(s"Error determining plugins for project '${project.id}' in ${project.base}:\n") /** @@ -1026,7 +1039,9 @@ private[sbt] object Load { // Grab all the settings we already loaded from sbt files def settings(files: Seq[File]): Seq[Setting[_]] = { if (files.nonEmpty) - log.info(s"${files.map(_.getName).mkString("Loading settings from ", ",", " ...")}") + log.info( + s"${files.map(_.getName).mkString(s"Loading settings for project ${p.id} from ", ",", " ...")}" + ) for { file <- files config <- (memoSettings get file).toSeq @@ -1128,22 +1143,21 @@ private[sbt] object Load { /** These are the settings defined when loading a project "meta" build. */ val autoPluginSettings: Seq[Setting[_]] = inScope(GlobalScope in LocalRootProject)( - Seq( - sbtPlugin :== true, - pluginData := { - val prod = (exportedProducts in Configurations.Runtime).value - val cp = (fullClasspath in Configurations.Runtime).value - val opts = (scalacOptions in Configurations.Compile).value - PluginData( - removeEntries(cp, prod), - prod, - Some(fullResolvers.value.toVector), - Some(update.value), - opts - ) - }, - onLoadMessage := ("Loading project definition from " + baseDirectory.value) - )) + sbtPlugin :== true, + pluginData := { + val prod = (exportedProducts in Configurations.Runtime).value + val cp = (fullClasspath in Configurations.Runtime).value + val opts = (scalacOptions in Configurations.Compile).value + PluginData( + removeEntries(cp, prod), + prod, + Some(fullResolvers.value.toVector), + Some(update.value), + opts + ) + }, + onLoadMessage := ("Loading project definition from " + baseDirectory.value) + ) private[this] def removeEntries( cp: Seq[Attributed[File]], @@ -1160,7 +1174,8 @@ private[sbt] object Load { injectSettings = config.injectSettings.copy( global = autoPluginSettings ++ config.injectSettings.global, project = config.pluginManagement.inject ++ config.injectSettings.project - )) + ) + ) def activateGlobalPlugin(config: LoadBuildConfiguration): LoadBuildConfiguration = config.globalPlugin match { @@ -1269,8 +1284,9 @@ private[sbt] object Load { def initialSession(structure: BuildStructure, rootEval: () => Eval, s: State): SessionSettings = { val session = s get Keys.sessionSettings val currentProject = session map (_.currentProject) getOrElse Map.empty - val currentBuild = session map (_.currentBuild) filter (uri => - structure.units.keys exists (uri ==)) getOrElse structure.root + val currentBuild = session map (_.currentBuild) filter ( + uri => structure.units.keys exists (uri ==) + ) getOrElse structure.root new SessionSettings( currentBuild, projectMap(structure, currentProject), diff --git a/main/src/main/scala/sbt/internal/LogManager.scala b/main/src/main/scala/sbt/internal/LogManager.scala index 66aa49fce..982b6a45c 100644 --- a/main/src/main/scala/sbt/internal/LogManager.scala +++ b/main/src/main/scala/sbt/internal/LogManager.scala @@ -15,6 +15,7 @@ import Keys.{ logLevel, logManager, persistLogLevel, persistTraceLevel, sLog, tr import scala.Console.{ BLUE, RESET } import sbt.internal.util.{ AttributeKey, + ConsoleAppender, ConsoleOut, Settings, SuppressedTraceContext, @@ -105,16 +106,18 @@ object LogManager { def backgroundLog(data: Settings[Scope], state: State, task: ScopedKey[_]): ManagedLogger = { val console = screen(task, state) - LogManager.backgroundLog(data, state, task, console, relay(()), extra(task).toList) + LogManager.backgroundLog(data, state, task, console, relay(())) } } // to change from global being the default to overriding, switch the order of state.get and data.get - def getOr[T](key: AttributeKey[T], - data: Settings[Scope], - scope: Scope, - state: State, - default: T): T = + def getOr[T]( + key: AttributeKey[T], + data: Settings[Scope], + scope: Scope, + state: State, + default: T + ): T = data.get(scope, key) orElse state.get(key) getOrElse default // This is the main function that is used to generate the logger for tasks. @@ -191,7 +194,6 @@ object LogManager { console: Appender, /* TODO: backed: Appender,*/ relay: Appender, - extra: List[Appender] ): ManagedLogger = { val scope = task.scope val screenLevel = getOr(logLevel.key, data, scope, state, Level.Info) @@ -205,7 +207,8 @@ object LogManager { val consoleOpt = consoleLocally(state, console) LogExchange.bindLoggerAppenders( loggerName, - (consoleOpt.toList map { _ -> screenLevel }) ::: (relay -> backingLevel) :: Nil) + (consoleOpt.toList map { _ -> screenLevel }) ::: (relay -> backingLevel) :: Nil + ) log } @@ -258,7 +261,7 @@ object LogManager { private[this] def slog: Logger = Option(ref.get) getOrElse sys.error("Settings logger used after project was loaded.") - override val ansiCodesSupported = slog.ansiCodesSupported + override val ansiCodesSupported = ConsoleAppender.formatEnabledInEnv override def trace(t: => Throwable) = slog.trace(t) override def success(message: => String) = slog.success(message) override def log(level: Level.Value, message: => String) = slog.log(level, message) diff --git a/main/src/main/scala/sbt/internal/PluginDiscovery.scala b/main/src/main/scala/sbt/internal/PluginDiscovery.scala index fb82ea5e1..5fe657639 100644 --- a/main/src/main/scala/sbt/internal/PluginDiscovery.scala +++ b/main/src/main/scala/sbt/internal/PluginDiscovery.scala @@ -47,6 +47,8 @@ object PluginDiscovery { "sbt.plugins.IvyPlugin" -> sbt.plugins.IvyPlugin, "sbt.plugins.JvmPlugin" -> sbt.plugins.JvmPlugin, "sbt.plugins.CorePlugin" -> sbt.plugins.CorePlugin, + "sbt.ScriptedPlugin" -> sbt.ScriptedPlugin, + "sbt.plugins.SbtPlugin" -> sbt.plugins.SbtPlugin, "sbt.plugins.JUnitXmlReportPlugin" -> sbt.plugins.JUnitXmlReportPlugin, "sbt.plugins.Giter8TemplatePlugin" -> sbt.plugins.Giter8TemplatePlugin ) @@ -65,7 +67,7 @@ object PluginDiscovery { new DiscoveredNames(discover[AutoPlugin], discover[BuildDef]) } - // TODO: for 0.14.0, consider consolidating into a single file, which would make the classpath search 4x faster + // TODO: consider consolidating into a single file, which would make the classpath search 4x faster /** Writes discovered module `names` to zero or more files in `dir` as per [[writeDescriptor]] and returns the list of files written. */ def writeDescriptors(names: DiscoveredNames, dir: File): Seq[File] = { import Paths._ @@ -92,10 +94,12 @@ object PluginDiscovery { * Discovers the names of top-level modules listed in resources named `resourceName` as per [[binaryModuleNames]] or * available as analyzed source and extending from any of `subclasses` as per [[sourceModuleNames]]. */ - def binarySourceModuleNames(classpath: Seq[Attributed[File]], - loader: ClassLoader, - resourceName: String, - subclasses: String*): Seq[String] = + def binarySourceModuleNames( + classpath: Seq[Attributed[File]], + loader: ClassLoader, + resourceName: String, + subclasses: String* + ): Seq[String] = ( binaryModuleNames(data(classpath), loader, resourceName) ++ (analyzed(classpath) flatMap (a => sourceModuleNames(a, subclasses: _*))) @@ -118,9 +122,11 @@ object PluginDiscovery { * `classpath` and `loader` are both required to ensure that `loader` * doesn't bring in any resources outside of the intended `classpath`, such as from parent loaders. */ - def binaryModuleNames(classpath: Seq[File], - loader: ClassLoader, - resourceName: String): Seq[String] = { + def binaryModuleNames( + classpath: Seq[File], + loader: ClassLoader, + resourceName: String + ): Seq[String] = { import collection.JavaConverters._ loader.getResources(resourceName).asScala.toSeq.filter(onClasspath(classpath)) flatMap { u => IO.readLinesURL(u).map(_.trim).filter(!_.isEmpty) @@ -134,7 +140,8 @@ object PluginDiscovery { private[sbt] def binarySourceModules[T]( data: PluginData, loader: ClassLoader, - resourceName: String)(implicit classTag: reflect.ClassTag[T]): DetectedModules[T] = { + resourceName: String + )(implicit classTag: reflect.ClassTag[T]): DetectedModules[T] = { val classpath = data.classpath val namesAndValues = if (classpath.isEmpty) Nil @@ -146,9 +153,11 @@ object PluginDiscovery { new DetectedModules(namesAndValues) } - private[this] def loadModules[T: reflect.ClassTag](data: PluginData, - names: Seq[String], - loader: ClassLoader): Seq[(String, T)] = + private[this] def loadModules[T: reflect.ClassTag]( + data: PluginData, + names: Seq[String], + loader: ClassLoader + ): Seq[(String, T)] = try ModuleUtilities.getCheckedObjects[T](names, loader) catch { case e: ExceptionInInitializerError => @@ -168,7 +177,8 @@ object PluginDiscovery { if (evictedStrings.isEmpty) "" else "\nNote that conflicts were resolved for some dependencies:\n\t" + evictedStrings.mkString( - "\n\t") + "\n\t" + ) throw new IncompatiblePluginsException(msgBase + msgExtra, t) } } diff --git a/main/src/main/scala/sbt/internal/PluginManagement.scala b/main/src/main/scala/sbt/internal/PluginManagement.scala index e35964040..386648c35 100644 --- a/main/src/main/scala/sbt/internal/PluginManagement.scala +++ b/main/src/main/scala/sbt/internal/PluginManagement.scala @@ -15,17 +15,21 @@ import sbt.librarymanagement.ModuleID import java.net.{ URI, URL, URLClassLoader } -final case class PluginManagement(overrides: Set[ModuleID], - applyOverrides: Set[ModuleID], - loader: PluginClassLoader, - initialLoader: ClassLoader, - context: Context) { +final case class PluginManagement( + overrides: Set[ModuleID], + applyOverrides: Set[ModuleID], + loader: PluginClassLoader, + initialLoader: ClassLoader, + context: Context +) { def shift: PluginManagement = - PluginManagement(Set.empty, - overrides, - new PluginClassLoader(initialLoader), - initialLoader, - context) + PluginManagement( + Set.empty, + overrides, + new PluginClassLoader(initialLoader), + initialLoader, + context + ) def addOverrides(os: Set[ModuleID]): PluginManagement = copy(overrides = overrides ++ os) @@ -49,11 +53,13 @@ object PluginManagement { val emptyContext: Context = Context(false, 0) def apply(initialLoader: ClassLoader): PluginManagement = - PluginManagement(Set.empty, - Set.empty, - new PluginClassLoader(initialLoader), - initialLoader, - emptyContext) + PluginManagement( + Set.empty, + Set.empty, + new PluginClassLoader(initialLoader), + initialLoader, + emptyContext + ) def extractOverrides(classpath: Classpath): Set[ModuleID] = classpath flatMap { _.metadata get Keys.moduleID.key map keepOverrideInfo } toSet; diff --git a/main/src/main/scala/sbt/internal/PluginsDebug.scala b/main/src/main/scala/sbt/internal/PluginsDebug.scala index 676a4caff..6462475f4 100644 --- a/main/src/main/scala/sbt/internal/PluginsDebug.scala +++ b/main/src/main/scala/sbt/internal/PluginsDebug.scala @@ -48,8 +48,10 @@ private[sbt] class PluginsDebug( activePrefix + debugDeactivated(notFoundKey, deactivated) } - private[this] def debugDeactivated(notFoundKey: String, - deactivated: Seq[EnableDeactivated]): String = { + private[this] def debugDeactivated( + notFoundKey: String, + deactivated: Seq[EnableDeactivated] + ): String = { val (impossible, possible) = Util.separate(deactivated) { case pi: PluginImpossible => Left(pi) case pr: PluginRequirements => Right(pr) @@ -57,7 +59,7 @@ private[sbt] class PluginsDebug( if (possible.nonEmpty) { val explained = possible.map(explainPluginEnable) val possibleString = - if (explained.size > 1) + if (explained.lengthCompare(1) > 0) explained.zipWithIndex .map { case (s, i) => s"$i. $s" } .mkString(s"Multiple plugins are available that can provide $notFoundKey:\n", "\n", "") @@ -111,7 +113,7 @@ private[sbt] class PluginsDebug( } private[this] def multi(strs: Seq[String]): String = - strs.mkString(if (strs.size > 4) "\n\t" else ", ") + strs.mkString(if (strs.lengthCompare(4) > 0) "\n\t" else ", ") } private[sbt] object PluginsDebug { @@ -154,11 +156,13 @@ private[sbt] object PluginsDebug { val perBuild: Map[URI, Set[AutoPlugin]] = structure.units.mapValues(unit => availableAutoPlugins(unit).toSet) val pluginsThisBuild = perBuild.getOrElse(currentRef.build, Set.empty).toList - lazy val context = Context(currentProject.plugins, - currentProject.autoPlugins, - Plugins.deducer(pluginsThisBuild), - pluginsThisBuild, - s.log) + lazy val context = Context( + currentProject.plugins, + currentProject.autoPlugins, + Plugins.deducer(pluginsThisBuild), + pluginsThisBuild, + s.log + ) lazy val debug = PluginsDebug(context.available) if (!pluginsThisBuild.contains(plugin)) { val availableInBuilds: List[URI] = perBuild.toList.filter(_._2(plugin)).map(_._1) @@ -222,10 +226,11 @@ private[sbt] object PluginsDebug { sealed abstract class EnableDeactivated extends PluginEnable /** Describes a [[plugin]] that cannot be activated in a [[context]] due to [[contradictions]] in requirements. */ - final case class PluginImpossible(plugin: AutoPlugin, - context: Context, - contradictions: Set[AutoPlugin]) - extends EnableDeactivated + final case class PluginImpossible( + plugin: AutoPlugin, + context: Context, + contradictions: Set[AutoPlugin] + ) extends EnableDeactivated /** * Describes the requirements for activating [[plugin]] in [[context]]. @@ -256,9 +261,11 @@ private[sbt] object PluginsDebug { * affecting the other plugin. If empty, a direct exclusion is required. * @param newlySelected If false, this plugin was selected in the original context. */ - final case class DeactivatePlugin(plugin: AutoPlugin, - removeOneOf: Set[AutoPlugin], - newlySelected: Boolean) + final case class DeactivatePlugin( + plugin: AutoPlugin, + removeOneOf: Set[AutoPlugin], + newlySelected: Boolean + ) /** Determines how to enable [[AutoPlugin]] in [[Context]]. */ def pluginEnable(context: Context, plugin: AutoPlugin): PluginEnable = @@ -344,13 +351,15 @@ private[sbt] object PluginsDebug { DeactivatePlugin(d, removeToDeactivate, newlySelected) } - PluginRequirements(plugin, - context, - blockingExcludes, - addToExistingPlugins, - extraPlugins, - willRemove, - deactivate) + PluginRequirements( + plugin, + context, + blockingExcludes, + addToExistingPlugins, + extraPlugins, + willRemove, + deactivate + ) } } @@ -376,13 +385,15 @@ private[sbt] object PluginsDebug { /** String representation of [[PluginEnable]], intended for end users. */ def explainPluginEnable(ps: PluginEnable): String = ps match { - case PluginRequirements(plugin, - context, - blockingExcludes, - enablingPlugins, - extraEnabledPlugins, - toBeRemoved, - deactivate) => + case PluginRequirements( + plugin, + _, + blockingExcludes, + enablingPlugins, + extraEnabledPlugins, + toBeRemoved, + deactivate + ) => def indent(str: String) = if (str.isEmpty) "" else s"\t$str" def note(str: String) = if (str.isEmpty) "" else s"Note: $str" val parts = @@ -393,9 +404,8 @@ private[sbt] object PluginsDebug { note(willRemove(plugin, toBeRemoved.toList)) :: Nil parts.filterNot(_.isEmpty).mkString("\n") - case PluginImpossible(plugin, context, contradictions) => - pluginImpossible(plugin, contradictions) - case PluginActivated(plugin, context) => s"Plugin ${plugin.label} already activated." + case PluginImpossible(plugin, _, contradictions) => pluginImpossible(plugin, contradictions) + case PluginActivated(plugin, _) => s"Plugin ${plugin.label} already activated." } /** @@ -491,8 +501,9 @@ private[sbt] object PluginsDebug { s"$s1 $s2 $s3" } - private[this] def pluginImpossibleN(plugin: AutoPlugin)( - contradictions: List[AutoPlugin]): String = { + private[this] def pluginImpossibleN( + plugin: AutoPlugin + )(contradictions: List[AutoPlugin]): String = { val s1 = s"There is no way to enable plugin ${plugin.label}." val s2 = s"It (or its dependencies) requires these plugins to be both present and absent:" val s3 = s"Please report the problem to the plugin's author." diff --git a/main/src/main/scala/sbt/internal/ProjectNavigation.scala b/main/src/main/scala/sbt/internal/ProjectNavigation.scala index 305967d06..b7f04ffd1 100644 --- a/main/src/main/scala/sbt/internal/ProjectNavigation.scala +++ b/main/src/main/scala/sbt/internal/ProjectNavigation.scala @@ -49,7 +49,8 @@ final class ProjectNavigation(s: State) { setProject(uri, to) else fail( - s"Invalid project name '$to' in build $uri (type 'projects' to list available projects).") + s"Invalid project name '$to' in build $uri (type 'projects' to list available projects)." + ) def changeBuild(newBuild: URI): State = if (structure.units contains newBuild) diff --git a/main/src/main/scala/sbt/internal/RelayAppender.scala b/main/src/main/scala/sbt/internal/RelayAppender.scala index 3e0daf474..2898b5bdc 100644 --- a/main/src/main/scala/sbt/internal/RelayAppender.scala +++ b/main/src/main/scala/sbt/internal/RelayAppender.scala @@ -26,7 +26,7 @@ class RelayAppender(name: String) val level = ConsoleAppender.toLevel(event.getLevel) val message = event.getMessage message match { - case o: ObjectMessage => appendEvent(level, o.getParameter) + case o: ObjectMessage => appendEvent(o.getParameter) case p: ParameterizedMessage => appendLog(level, p.getFormattedMessage) case r: RingBufferLogEvent => appendLog(level, r.getFormattedMessage) case _ => appendLog(level, message.toString) @@ -35,7 +35,7 @@ class RelayAppender(name: String) def appendLog(level: Level.Value, message: => String): Unit = { exchange.publishEventMessage(LogEvent(level.toString, message)) } - def appendEvent(level: Level.Value, event: AnyRef): Unit = + def appendEvent(event: AnyRef): Unit = event match { case x: StringEvent => { import JsonProtocol._ diff --git a/main/src/main/scala/sbt/internal/Resolve.scala b/main/src/main/scala/sbt/internal/Resolve.scala index 9797bbab8..761ba5dd3 100644 --- a/main/src/main/scala/sbt/internal/Resolve.scala +++ b/main/src/main/scala/sbt/internal/Resolve.scala @@ -11,21 +11,23 @@ package internal import sbt.internal.util.AttributeKey object Resolve { - def apply(index: BuildUtil[_], - current: ScopeAxis[Reference], - key: AttributeKey[_], - mask: ScopeMask): Scope => Scope = { - val rs = - resolveProject(current, mask) _ :: - resolveExtra(mask) _ :: - resolveTask(mask) _ :: - resolveConfig(index, key, mask) _ :: - Nil + def apply( + index: BuildUtil[_], + current: ScopeAxis[Reference], + key: AttributeKey[_], + mask: ScopeMask, + ): Scope => Scope = { + val rs = ( + resolveProject(current, mask) _ + :: resolveExtra(mask) _ + :: resolveTask(mask) _ + :: resolveConfig(index, key, mask) _ + :: Nil + ) scope => - (scope /: rs) { (s, f) => - f(s) - } + rs.foldLeft(scope)((s, f) => f(s)) } + def resolveTask(mask: ScopeMask)(scope: Scope): Scope = if (mask.task) scope else scope.copy(task = Zero) @@ -39,16 +41,16 @@ object Resolve { else scope.copy(extra = Zero) def resolveConfig[P](index: BuildUtil[P], key: AttributeKey[_], mask: ScopeMask)( - scope: Scope): Scope = + scope: Scope, + ): Scope = if (mask.config) scope else { val (resolvedRef, proj) = scope.project match { + case Zero | This => (None, index.rootProject(index.root)) case Select(ref) => val r = index resolveRef ref (Some(r), index.projectFor(r)) - case Zero | This => - (None, index.rootProject(index.root)) } val task = scope.task.toOption val keyIndex = index.keyIndex diff --git a/main/src/main/scala/sbt/internal/Script.scala b/main/src/main/scala/sbt/internal/Script.scala index addf7d4ed..2192785a6 100644 --- a/main/src/main/scala/sbt/internal/Script.scala +++ b/main/src/main/scala/sbt/internal/Script.scala @@ -25,7 +25,8 @@ object Script { lazy val command = Command.command(Name) { state => val scriptArg = state.remainingCommands.headOption map { _.commandLine } getOrElse sys.error( - "No script file specified") + "No script file specified" + ) val scriptFile = new File(scriptArg).getAbsoluteFile val hash = Hash.halve(Hash.toHex(Hash(scriptFile.getAbsolutePath))) val base = new File(CommandUtil.bootDirectory(state), hash) @@ -51,14 +52,18 @@ object Script { } val scriptAsSource = sources in Compile := script :: Nil val asScript = scalacOptions ++= Seq("-Xscript", script.getName.stripSuffix(".scala")) - val scriptSettings = Seq(asScript, - scriptAsSource, - logLevel in Global := Level.Warn, - showSuccess in Global := false) - val append = Load.transformSettings(Load.projectScope(currentRef), - currentRef.build, - rootProject, - scriptSettings ++ embeddedSettings) + val scriptSettings = Seq( + asScript, + scriptAsSource, + logLevel in Global := Level.Warn, + showSuccess in Global := false + ) + val append = Load.transformSettings( + Load.projectScope(currentRef), + currentRef.build, + rootProject, + scriptSettings ++ embeddedSettings + ) val newStructure = Load.reapply(session.original ++ append, structure) val arguments = state.remainingCommands.drop(1).map(e => s""""${e.commandLine}"""") diff --git a/main/src/main/scala/sbt/internal/SessionSettings.scala b/main/src/main/scala/sbt/internal/SessionSettings.scala index 30911a384..ddca7d66c 100755 --- a/main/src/main/scala/sbt/internal/SessionSettings.scala +++ b/main/src/main/scala/sbt/internal/SessionSettings.scala @@ -40,8 +40,10 @@ final case class SessionSettings( currentEval: () => Eval ) { - assert(currentProject contains currentBuild, - s"Current build ($currentBuild) not associated with a current project.") + assert( + currentProject contains currentBuild, + s"Current build ($currentBuild) not associated with a current project." + ) /** * Modifiy the current state. @@ -52,9 +54,11 @@ final case class SessionSettings( * @return A new SessionSettings object */ def setCurrent(build: URI, project: String, eval: () => Eval): SessionSettings = - copy(currentBuild = build, - currentProject = currentProject.updated(build, project), - currentEval = eval) + copy( + currentBuild = build, + currentProject = currentProject.updated(build, project), + currentEval = eval + ) /** * @return The current ProjectRef with which we scope settings. @@ -147,7 +151,8 @@ object SessionSettings { val oldSettings = (oldState get Keys.sessionSettings).toList.flatMap(_.append).flatMap(_._2) if (newSession.append.isEmpty && oldSettings.nonEmpty) oldState.log.warn( - "Discarding " + pluralize(oldSettings.size, " session setting") + ". Use 'session save' to persist session settings.") + "Discarding " + pluralize(oldSettings.size, " session setting") + ". Use 'session save' to persist session settings." + ) } def removeRanges[T](in: Seq[T], ranges: Seq[(Int, Int)]): Seq[T] = { @@ -197,10 +202,12 @@ object SessionSettings { reapply(newSession.copy(original = newSession.mergeSettings, append = Map.empty), s) } - def writeSettings(pref: ProjectRef, - settings: List[SessionSetting], - original: Seq[Setting[_]], - structure: BuildStructure): (Seq[SessionSetting], Seq[Setting[_]]) = { + def writeSettings( + pref: ProjectRef, + settings: List[SessionSetting], + original: Seq[Setting[_]], + structure: BuildStructure + ): (Seq[SessionSetting], Seq[Setting[_]]) = { val project = Project.getProject(pref, structure).getOrElse(sys.error("Invalid project reference " + pref)) val writeTo: File = BuildPaths @@ -224,9 +231,10 @@ object SessionSettings { val RangePosition(_, r @ LineRange(start, end)) = s.pos settings find (_._1.key == s.key) match { case Some(ss @ (ns, newLines)) if !ns.init.dependencies.contains(ns.key) => - val shifted = ns withPos RangePosition(path, - LineRange(start - offs, - start - offs + newLines.size)) + val shifted = ns withPos RangePosition( + path, + LineRange(start - offs, start - offs + newLines.size) + ) (offs + end - start - newLines.size, shifted :: olds, ss +: repl) case _ => val shifted = s withPos RangePosition(path, r shift -offs) @@ -324,9 +332,11 @@ save, save-all lazy val parser = token(Space) ~> (token("list-all" ^^^ new Print(true)) | token("list" ^^^ new Print(false)) | token( - "clear" ^^^ new Clear(false)) | + "clear" ^^^ new Clear(false) + ) | token("save-all" ^^^ new Save(true)) | token("save" ^^^ new Save(false)) | token( - "clear-all" ^^^ new Clear(true)) | + "clear-all" ^^^ new Clear(true) + ) | remove) lazy val remove = token("remove") ~> token(Space) ~> natSelect.map(ranges => new Remove(ranges)) diff --git a/main/src/main/scala/sbt/internal/SettingCompletions.scala b/main/src/main/scala/sbt/internal/SettingCompletions.scala index d9bbafbff..1952c5598 100644 --- a/main/src/main/scala/sbt/internal/SettingCompletions.scala +++ b/main/src/main/scala/sbt/internal/SettingCompletions.scala @@ -15,7 +15,7 @@ import sbt.librarymanagement.Configuration import Project._ import Def.{ ScopedKey, Setting } import Scope.Global -import Types.{ const, idFun } +import Types.idFun import complete._ import DefaultParsers._ @@ -24,9 +24,11 @@ import DefaultParsers._ * The verbose summary will typically use more vertical space and show full details, * while the quiet summary will be a couple of lines and truncate information. */ -private[sbt] class SetResult(val session: SessionSettings, - val verboseSummary: String, - val quietSummary: String) +private[sbt] class SetResult( + val session: SessionSettings, + val verboseSummary: String, + val quietSummary: String +) /** Defines methods for implementing the `set` command.*/ private[sbt] object SettingCompletions { @@ -41,9 +43,12 @@ private[sbt] object SettingCompletions { val r = relation(extracted.structure, true) val allDefs = Def .flattenLocals( - Def.compiled(extracted.structure.settings, true)(structure.delegates, - structure.scopeLocal, - implicitly[Show[ScopedKey[_]]])) + Def.compiled(extracted.structure.settings, true)( + structure.delegates, + structure.scopeLocal, + implicitly[Show[ScopedKey[_]]] + ) + ) .keys val projectScope = Load.projectScope(currentRef) def resolve(s: Setting[_]): Seq[Setting[_]] = @@ -64,34 +69,38 @@ private[sbt] object SettingCompletions { setResult(session, r, redefined) } - /** Implementation of the `set` command that will reload the current project with `settings` appended to the current settings. */ - def setThis(s: State, - extracted: Extracted, - settings: Seq[Def.Setting[_]], - arg: String): SetResult = { + /** Implementation of the `set` command that will reload the current project with `settings` + * appended to the current settings. + */ + def setThis(extracted: Extracted, settings: Seq[Def.Setting[_]], arg: String): SetResult = { import extracted._ val append = Load.transformSettings(Load.projectScope(currentRef), currentRef.build, rootProject, settings) val newSession = session.appendSettings(append map (a => (a, arg.split('\n').toList))) - val r = relation(newSession.mergeSettings, true)(structure.delegates, - structure.scopeLocal, - implicitly) + val r = relation(newSession.mergeSettings, true)( + structure.delegates, + structure.scopeLocal, + implicitly + ) setResult(newSession, r, append) } private[this] def setResult( session: SessionSettings, r: Relation[ScopedKey[_], ScopedKey[_]], - redefined: Seq[Setting[_]])(implicit show: Show[ScopedKey[_]]): SetResult = { + redefined: Seq[Setting[_]], + )(implicit show: Show[ScopedKey[_]]): SetResult = { val redefinedKeys = redefined.map(_.key).toSet val affectedKeys = redefinedKeys.flatMap(r.reverse) def summary(verbose: Boolean): String = setSummary(redefinedKeys, affectedKeys, verbose) new SetResult(session, summary(true), summary(false)) } - private[this] def setSummary(redefined: Set[ScopedKey[_]], - affected: Set[ScopedKey[_]], - verbose: Boolean)(implicit display: Show[ScopedKey[_]]): String = { + private[this] def setSummary( + redefined: Set[ScopedKey[_]], + affected: Set[ScopedKey[_]], + verbose: Boolean, + )(implicit display: Show[ScopedKey[_]]): String = { val QuietLimit = 3 def strings(in: Set[ScopedKey[_]]): Seq[String] = in.toSeq.map(sk => display.show(sk)).sorted def lines(in: Seq[String]): (String, Boolean) = @@ -129,17 +138,17 @@ private[sbt] object SettingCompletions { * when there are fewer choices or tab is pressed multiple times. * The last part of the completion will generate a template for the value or function literal that will initialize the setting or task. */ - def settingParser(settings: Settings[Scope], - rawKeyMap: Map[String, AttributeKey[_]], - context: ResolvedProject): Parser[String] = { - val keyMap - : Map[String, AttributeKey[_]] = rawKeyMap.map { case (k, v) => (keyScalaID(k), v) }.toMap - def inputScopedKey(pred: AttributeKey[_] => Boolean): Parser[ScopedKey[_]] = - scopedKeyParser(keyMap.filter { case (_, k) => pred(k) }, settings, context) + def settingParser( + settings: Settings[Scope], + rawKeyMap: Map[String, AttributeKey[_]], + context: ResolvedProject, + ): Parser[String] = { + val keyMap: Map[String, AttributeKey[_]] = + rawKeyMap.map { case (k, v) => (keyScalaID(k), v) }.toMap val full = for { defineKey <- scopedKeyParser(keyMap, settings, context) a <- assign(defineKey) - _ <- valueParser(defineKey, a, inputScopedKey(keyFilter(defineKey.key))) + _ <- valueParser(defineKey, a) } yield () // parser is currently only for completion and the parsed data structures are not used @@ -147,9 +156,11 @@ private[sbt] object SettingCompletions { } /** Parser for a Scope+AttributeKey (ScopedKey). */ - def scopedKeyParser(keyMap: Map[String, AttributeKey[_]], - settings: Settings[Scope], - context: ResolvedProject): Parser[ScopedKey[_]] = { + def scopedKeyParser( + keyMap: Map[String, AttributeKey[_]], + settings: Settings[Scope], + context: ResolvedProject + ): Parser[ScopedKey[_]] = { val cutoff = KeyRanks.MainCutoff val keyCompletions = fixedCompletions { (seen, level) => completeKey(seen, keyMap, level, cutoff, 10).toSet @@ -167,9 +178,7 @@ private[sbt] object SettingCompletions { * Parser for the initialization expression for the assignment method `assign` on the key `sk`. * `scopedKeyP` is used to parse and complete the input keys for an initialization that depends on other keys. */ - def valueParser(sk: ScopedKey[_], - assign: Assign.Value, - scopedKeyP: Parser[ScopedKey[_]]): Parser[Seq[ScopedKey[_]]] = { + def valueParser(sk: ScopedKey[_], assign: Assign.Value): Parser[Seq[ScopedKey[_]]] = { val fullTypeString = keyTypeString(sk.key) val typeString = if (assignNoAppend(assign)) fullTypeString else "..." if (assign == Assign.Update) { @@ -181,38 +190,35 @@ private[sbt] object SettingCompletions { } } - /** - * For a setting definition `definingKey <<= (..., in, ...) { ... }`, - * `keyFilter(definingKey)(in)` returns true when `in` is an allowed input for `definingKey` based on whether they are settings or not. - * For example, if `definingKey` is for a setting, `in` may only be a setting itself. - */ - def keyFilter(definingKey: AttributeKey[_]): AttributeKey[_] => Boolean = - if (isSetting(definingKey)) isSetting _ else isTaskOrSetting _ - /** * Parser for a Scope for a `key` given the current project `context` and evaluated `settings`. * The completions are restricted to be more useful. Currently, this parser will suggest * only known axis values for configurations and tasks and only in that order. */ - def scopeParser(key: AttributeKey[_], - settings: Settings[Scope], - context: ResolvedProject): Parser[Scope] = { + def scopeParser( + key: AttributeKey[_], + settings: Settings[Scope], + context: ResolvedProject + ): Parser[Scope] = { val data = settings.data val allScopes = data.keys.toSeq val definedScopes = data.toSeq flatMap { case (scope, attrs) => if (attrs contains key) scope :: Nil else Nil } - scope(key, allScopes, definedScopes, context) + scope(allScopes, definedScopes, context) } - private[this] def scope(key: AttributeKey[_], - allScopes: Seq[Scope], - definedScopes: Seq[Scope], - context: ResolvedProject): Parser[Scope] = { - def axisParser[T](axis: Scope => ScopeAxis[T], - name: T => String, - description: T => Option[String], - label: String): Parser[ScopeAxis[T]] = { + private[this] def scope( + allScopes: Seq[Scope], + definedScopes: Seq[Scope], + context: ResolvedProject, + ): Parser[Scope] = { + def axisParser[T]( + axis: Scope => ScopeAxis[T], + name: T => String, + description: T => Option[String], + label: String, + ): Parser[ScopeAxis[T]] = { def getChoice(s: Scope): Seq[(String, T)] = axis(s) match { case Select(t) => (name(t), t) :: Nil case _ => Nil @@ -220,19 +226,23 @@ private[sbt] object SettingCompletions { def getChoices(scopes: Seq[Scope]): Map[String, T] = scopes.flatMap(getChoice).toMap val definedChoices: Set[String] = definedScopes.flatMap(s => axis(s).toOption.map(name)).toSet - val fullChoices: Map[String, T] = getChoices(allScopes.toSeq) + val fullChoices: Map[String, T] = getChoices(allScopes) val completions = fixedCompletions { (seen, level) => completeScope(seen, level, definedChoices, fullChoices)(description).toSet } - Act.optionalAxis(inParser ~> token(Space) ~> token(scalaID(fullChoices, label), completions), - This) + Act.optionalAxis( + inParser ~> token(Space) ~> token(scalaID(fullChoices, label), completions), + This, + ) } val configurations: Map[String, Configuration] = context.configurations.map(c => (configScalaID(c.name), c)).toMap - val configParser = axisParser[ConfigKey](_.config, - c => configScalaID(c.name), - ck => configurations.get(ck.name).map(_.description), - "configuration") + val configParser = axisParser[ConfigKey]( + _.config, + c => configScalaID(c.name), + ck => configurations.get(ck.name).map(_.description), + "configuration", + ) val taskParser = axisParser[AttributeKey[_]](_.task, k => keyScalaID(k.label), _.description, "task") val nonGlobal = (configParser ~ taskParser) map { case (c, t) => Scope(This, c, t, Zero) } @@ -242,8 +252,8 @@ private[sbt] object SettingCompletions { /** Parser for the assignment method (such as `:=`) for defining `key`. */ def assign(key: ScopedKey[_]): Parser[Assign.Value] = { - val completions = fixedCompletions { (seen, level) => - completeAssign(seen, level, key).toSet + val completions = fixedCompletions { (seen, _) => + completeAssign(seen, key).toSet } val identifier = Act.filterStrings(Op, Assign.values.map(_.toString), "assignment method") map Assign.withName token(Space) ~> token(optionallyQuoted(identifier), completions) @@ -267,7 +277,7 @@ private[sbt] object SettingCompletions { * Completions for an assignment method for `key` given the tab completion `level` and existing partial string `seen`. * This will filter possible assignment methods based on the underlying type of `key`, so that only `<<=` is shown for input tasks, for example. */ - def completeAssign(seen: String, level: Int, key: ScopedKey[_]): Seq[Completion] = { + def completeAssign(seen: String, key: ScopedKey[_]): Seq[Completion] = { val allowed: Iterable[Assign.Value] = if (appendable(key.key)) Assign.values else assignNoAppend @@ -278,36 +288,41 @@ private[sbt] object SettingCompletions { completeDescribed(seen, true, applicable)(assignDescription) } - def completeKey(seen: String, - keys: Map[String, AttributeKey[_]], - level: Int, - prominentCutoff: Int, - detailLimit: Int): Seq[Completion] = + def completeKey( + seen: String, + keys: Map[String, AttributeKey[_]], + level: Int, + prominentCutoff: Int, + detailLimit: Int + ): Seq[Completion] = completeSelectDescribed(seen, level, keys, detailLimit)(_.description) { - case (k, v) => v.rank <= prominentCutoff + case (_, v) => v.rank <= prominentCutoff } def completeScope[T]( seen: String, level: Int, definedChoices: Set[String], - allChoices: Map[String, T])(description: T => Option[String]): Seq[Completion] = + allChoices: Map[String, T] + )(description: T => Option[String]): Seq[Completion] = completeSelectDescribed(seen, level, allChoices, 10)(description) { - case (k, v) => definedChoices(k) + case (k, _) => definedChoices(k) } def completeSelectDescribed[T](seen: String, level: Int, all: Map[String, T], detailLimit: Int)( - description: T => Option[String])(prominent: (String, T) => Boolean): Seq[Completion] = { - val applicable = all.toSeq.filter { case (k, v) => k startsWith seen } + description: T => Option[String] + )(prominent: (String, T) => Boolean): Seq[Completion] = { + val applicable = all.toSeq.filter { case (k, _) => k startsWith seen } val prominentOnly = applicable filter { case (k, v) => prominent(k, v) } - val showAll = (level >= 3) || (level == 2 && prominentOnly.size <= detailLimit) || prominentOnly.isEmpty + val showAll = (level >= 3) || (level == 2 && prominentOnly.lengthCompare(detailLimit) <= 0) || prominentOnly.isEmpty val showKeys = if (showAll) applicable else prominentOnly - val showDescriptions = (level >= 2) || (showKeys.size <= detailLimit) + val showDescriptions = (level >= 2) || showKeys.lengthCompare(detailLimit) <= 0 completeDescribed(seen, showDescriptions, showKeys)(s => description(s).toList.mkString) } def completeDescribed[T](seen: String, showDescriptions: Boolean, in: Seq[(String, T)])( - description: T => String): Seq[Completion] = { + description: T => String + ): Seq[Completion] = { def appendString(id: String): String = id.stripPrefix(seen) + " " if (in.isEmpty) Nil @@ -315,14 +330,11 @@ private[sbt] object SettingCompletions { val withDescriptions = in map { case (id, key) => (id, description(key)) } val padded = CommandUtil.aligned("", " ", withDescriptions) (padded, in).zipped.map { - case (line, (id, key)) => + case (line, (id, _)) => Completion.tokenDisplay(append = appendString(id), display = line + "\n") } } else - in map { - case (id, key) => - Completion.tokenDisplay(display = id, append = appendString(id)) - } + in map { case (id, _) => Completion.tokenDisplay(display = id, append = appendString(id)) } } /** @@ -341,7 +353,8 @@ private[sbt] object SettingCompletions { def keyType[S](key: AttributeKey[_])( onSetting: Manifest[_] => S, onTask: Manifest[_] => S, - onInput: Manifest[_] => S)(implicit tm: Manifest[Task[_]], im: Manifest[InputTask[_]]): S = { + onInput: Manifest[_] => S + )(implicit tm: Manifest[Task[_]], im: Manifest[InputTask[_]]): S = { def argTpe = key.manifest.typeArguments.head val TaskClass = tm.runtimeClass val InputTaskClass = im.runtimeClass @@ -364,18 +377,6 @@ private[sbt] object SettingCompletions { keyType(key)(mfToString, mfToString, mfToString) } - /** True if the `key` represents an input task, false if it represents a task or setting. */ - def isInputTask(key: AttributeKey[_]): Boolean = - keyType(key)(const(false), const(false), const(true)) - - /** True if the `key` represents a setting, false if it represents a task or an input task.*/ - def isSetting(key: AttributeKey[_]): Boolean = - keyType(key)(const(true), const(false), const(false)) - - /** True if the `key` represents a setting or task, false if it is for an input task. */ - def isTaskOrSetting(key: AttributeKey[_]): Boolean = - keyType(key)(const(true), const(true), const(false)) - /** True if the `key` represents a setting or task that may be appended using an assignment method such as `+=`. */ def appendable(key: AttributeKey[_]): Boolean = { val underlying = keyUnderlyingType(key).runtimeClass diff --git a/main/src/main/scala/sbt/internal/SettingGraph.scala b/main/src/main/scala/sbt/internal/SettingGraph.scala index 33f1b9a5c..68fab0353 100644 --- a/main/src/main/scala/sbt/internal/SettingGraph.scala +++ b/main/src/main/scala/sbt/internal/SettingGraph.scala @@ -19,9 +19,11 @@ import sbt.io.IO object SettingGraph { def apply(structure: BuildStructure, basedir: File, scoped: ScopedKey[_], generation: Int)( - implicit display: Show[ScopedKey[_]]): SettingGraph = { + implicit display: Show[ScopedKey[_]] + ): SettingGraph = { val cMap = flattenLocals( - compiled(structure.settings, false)(structure.delegates, structure.scopeLocal, display)) + compiled(structure.settings, false)(structure.delegates, structure.scopeLocal, display) + ) def loop(scoped: ScopedKey[_], generation: Int): SettingGraph = { val key = scoped.key val scope = scoped.scope @@ -34,14 +36,16 @@ object SettingGraph { // val related = cMap.keys.filter(k => k.key == key && k.scope != scope) // val reverse = reverseDependencies(cMap, scoped) - SettingGraph(display.show(scoped), - definedIn, - Project.scopedKeyData(structure, scope, key), - key.description, - basedir, - depends map { (x: ScopedKey[_]) => - loop(x, generation + 1) - }) + SettingGraph( + display.show(scoped), + definedIn, + Project.scopedKeyData(structure, scope, key), + key.description, + basedir, + depends map { (x: ScopedKey[_]) => + loop(x, generation + 1) + } + ) } loop(scoped, generation) } @@ -99,7 +103,7 @@ object Graph { val withBar = childLines.zipWithIndex flatMap { case ((line, withBar), pos) if pos < (cs.size - 1) => (line +: withBar) map { insertBar(_, 2 * (level + 1)) } - case ((line, withBar), pos) if withBar.lastOption.getOrElse(line).trim != "" => + case ((line, withBar), _) if withBar.lastOption.getOrElse(line).trim != "" => (line +: withBar) ++ Vector(twoSpaces * (level + 1)) case ((line, withBar), _) => line +: withBar } diff --git a/main/src/main/scala/sbt/internal/TaskSequential.scala b/main/src/main/scala/sbt/internal/TaskSequential.scala index 8fbab839e..d21fa1b8f 100644 --- a/main/src/main/scala/sbt/internal/TaskSequential.scala +++ b/main/src/main/scala/sbt/internal/TaskSequential.scala @@ -21,60 +21,81 @@ trait TaskSequential { last: Initialize[Task[B]] ): Initialize[Task[B]] = sequential(List(unitTask(task0)), last) - def sequential[A0, A1, B](task0: Initialize[Task[A0]], - task1: Initialize[Task[A1]], - last: Initialize[Task[B]]): Initialize[Task[B]] = + def sequential[A0, A1, B]( + task0: Initialize[Task[A0]], + task1: Initialize[Task[A1]], + last: Initialize[Task[B]] + ): Initialize[Task[B]] = sequential(List(unitTask(task0), unitTask(task1)), last) - def sequential[A0, A1, A2, B](task0: Initialize[Task[A0]], - task1: Initialize[Task[A1]], - task2: Initialize[Task[A2]], - last: Initialize[Task[B]]): Initialize[Task[B]] = + def sequential[A0, A1, A2, B]( + task0: Initialize[Task[A0]], + task1: Initialize[Task[A1]], + task2: Initialize[Task[A2]], + last: Initialize[Task[B]] + ): Initialize[Task[B]] = sequential(List(unitTask(task0), unitTask(task1), unitTask(task2)), last) - def sequential[A0, A1, A2, A3, B](task0: Initialize[Task[A0]], - task1: Initialize[Task[A1]], - task2: Initialize[Task[A2]], - task3: Initialize[Task[A3]], - last: Initialize[Task[B]]): Initialize[Task[B]] = + def sequential[A0, A1, A2, A3, B]( + task0: Initialize[Task[A0]], + task1: Initialize[Task[A1]], + task2: Initialize[Task[A2]], + task3: Initialize[Task[A3]], + last: Initialize[Task[B]] + ): Initialize[Task[B]] = sequential(List(unitTask(task0), unitTask(task1), unitTask(task2), unitTask(task3)), last) - def sequential[A0, A1, A2, A3, A4, B](task0: Initialize[Task[A0]], - task1: Initialize[Task[A1]], - task2: Initialize[Task[A2]], - task3: Initialize[Task[A3]], - task4: Initialize[Task[A4]], - last: Initialize[Task[B]]): Initialize[Task[B]] = + def sequential[A0, A1, A2, A3, A4, B]( + task0: Initialize[Task[A0]], + task1: Initialize[Task[A1]], + task2: Initialize[Task[A2]], + task3: Initialize[Task[A3]], + task4: Initialize[Task[A4]], + last: Initialize[Task[B]] + ): Initialize[Task[B]] = sequential( List(unitTask(task0), unitTask(task1), unitTask(task2), unitTask(task3), unitTask(task4)), - last) - def sequential[A0, A1, A2, A3, A4, A5, B](task0: Initialize[Task[A0]], - task1: Initialize[Task[A1]], - task2: Initialize[Task[A2]], - task3: Initialize[Task[A3]], - task4: Initialize[Task[A4]], - task5: Initialize[Task[A5]], - last: Initialize[Task[B]]): Initialize[Task[B]] = - sequential(List(unitTask(task0), - unitTask(task1), - unitTask(task2), - unitTask(task3), - unitTask(task4), - unitTask(task5)), - last) - def sequential[A0, A1, A2, A3, A4, A5, A6, B](task0: Initialize[Task[A0]], - task1: Initialize[Task[A1]], - task2: Initialize[Task[A2]], - task3: Initialize[Task[A3]], - task4: Initialize[Task[A4]], - task5: Initialize[Task[A5]], - task6: Initialize[Task[A6]], - last: Initialize[Task[B]]): Initialize[Task[B]] = - sequential(List(unitTask(task0), - unitTask(task1), - unitTask(task2), - unitTask(task3), - unitTask(task4), - unitTask(task5), - unitTask(task6)), - last) + last + ) + def sequential[A0, A1, A2, A3, A4, A5, B]( + task0: Initialize[Task[A0]], + task1: Initialize[Task[A1]], + task2: Initialize[Task[A2]], + task3: Initialize[Task[A3]], + task4: Initialize[Task[A4]], + task5: Initialize[Task[A5]], + last: Initialize[Task[B]] + ): Initialize[Task[B]] = + sequential( + List( + unitTask(task0), + unitTask(task1), + unitTask(task2), + unitTask(task3), + unitTask(task4), + unitTask(task5) + ), + last + ) + def sequential[A0, A1, A2, A3, A4, A5, A6, B]( + task0: Initialize[Task[A0]], + task1: Initialize[Task[A1]], + task2: Initialize[Task[A2]], + task3: Initialize[Task[A3]], + task4: Initialize[Task[A4]], + task5: Initialize[Task[A5]], + task6: Initialize[Task[A6]], + last: Initialize[Task[B]] + ): Initialize[Task[B]] = + sequential( + List( + unitTask(task0), + unitTask(task1), + unitTask(task2), + unitTask(task3), + unitTask(task4), + unitTask(task5), + unitTask(task6) + ), + last + ) def sequential[A0, A1, A2, A3, A4, A5, A6, A7, B]( task0: Initialize[Task[A0]], task1: Initialize[Task[A1]], @@ -84,16 +105,21 @@ trait TaskSequential { task5: Initialize[Task[A5]], task6: Initialize[Task[A6]], task7: Initialize[Task[A7]], - last: Initialize[Task[B]]): Initialize[Task[B]] = - sequential(List(unitTask(task0), - unitTask(task1), - unitTask(task2), - unitTask(task3), - unitTask(task4), - unitTask(task5), - unitTask(task6), - unitTask(task7)), - last) + last: Initialize[Task[B]] + ): Initialize[Task[B]] = + sequential( + List( + unitTask(task0), + unitTask(task1), + unitTask(task2), + unitTask(task3), + unitTask(task4), + unitTask(task5), + unitTask(task6), + unitTask(task7) + ), + last + ) def sequential[A0, A1, A2, A3, A4, A5, A6, A7, A8, B]( task0: Initialize[Task[A0]], task1: Initialize[Task[A1]], @@ -104,17 +130,20 @@ trait TaskSequential { task6: Initialize[Task[A6]], task7: Initialize[Task[A7]], task8: Initialize[Task[A8]], - last: Initialize[Task[B]]): Initialize[Task[B]] = + last: Initialize[Task[B]] + ): Initialize[Task[B]] = sequential( - List(unitTask(task0), - unitTask(task1), - unitTask(task2), - unitTask(task3), - unitTask(task4), - unitTask(task5), - unitTask(task6), - unitTask(task7), - unitTask(task8)), + List( + unitTask(task0), + unitTask(task1), + unitTask(task2), + unitTask(task3), + unitTask(task4), + unitTask(task5), + unitTask(task6), + unitTask(task7), + unitTask(task8) + ), last ) def sequential[A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, B]( @@ -128,7 +157,8 @@ trait TaskSequential { task7: Initialize[Task[A7]], task8: Initialize[Task[A8]], task9: Initialize[Task[A9]], - last: Initialize[Task[B]]): Initialize[Task[B]] = + last: Initialize[Task[B]] + ): Initialize[Task[B]] = sequential( List( unitTask(task0), @@ -156,7 +186,8 @@ trait TaskSequential { task8: Initialize[Task[A8]], task9: Initialize[Task[A9]], task10: Initialize[Task[A10]], - last: Initialize[Task[B]]): Initialize[Task[B]] = + last: Initialize[Task[B]] + ): Initialize[Task[B]] = sequential( List( unitTask(task0), @@ -186,7 +217,8 @@ trait TaskSequential { task9: Initialize[Task[A9]], task10: Initialize[Task[A10]], task11: Initialize[Task[A11]], - last: Initialize[Task[B]]): Initialize[Task[B]] = + last: Initialize[Task[B]] + ): Initialize[Task[B]] = sequential( List( unitTask(task0), @@ -218,7 +250,8 @@ trait TaskSequential { task10: Initialize[Task[A10]], task11: Initialize[Task[A11]], task12: Initialize[Task[A12]], - last: Initialize[Task[B]]): Initialize[Task[B]] = + last: Initialize[Task[B]] + ): Initialize[Task[B]] = sequential( List( unitTask(task0), @@ -252,7 +285,8 @@ trait TaskSequential { task11: Initialize[Task[A11]], task12: Initialize[Task[A12]], task13: Initialize[Task[A13]], - last: Initialize[Task[B]]): Initialize[Task[B]] = + last: Initialize[Task[B]] + ): Initialize[Task[B]] = sequential( List( unitTask(task0), @@ -288,7 +322,8 @@ trait TaskSequential { task12: Initialize[Task[A12]], task13: Initialize[Task[A13]], task14: Initialize[Task[A14]], - last: Initialize[Task[B]]): Initialize[Task[B]] = + last: Initialize[Task[B]] + ): Initialize[Task[B]] = sequential( List( unitTask(task0), @@ -326,7 +361,8 @@ trait TaskSequential { task13: Initialize[Task[A13]], task14: Initialize[Task[A14]], task15: Initialize[Task[A15]], - last: Initialize[Task[B]]): Initialize[Task[B]] = + last: Initialize[Task[B]] + ): Initialize[Task[B]] = sequential( List( unitTask(task0), @@ -366,7 +402,8 @@ trait TaskSequential { task14: Initialize[Task[A14]], task15: Initialize[Task[A15]], task16: Initialize[Task[A16]], - last: Initialize[Task[B]]): Initialize[Task[B]] = + last: Initialize[Task[B]] + ): Initialize[Task[B]] = sequential( List( unitTask(task0), @@ -433,45 +470,49 @@ trait TaskSequential { ), last ) - def sequential[A0, - A1, - A2, - A3, - A4, - A5, - A6, - A7, - A8, - A9, - A10, - A11, - A12, - A13, - A14, - A15, - A16, - A17, - A18, - B](task0: Initialize[Task[A0]], - task1: Initialize[Task[A1]], - task2: Initialize[Task[A2]], - task3: Initialize[Task[A3]], - task4: Initialize[Task[A4]], - task5: Initialize[Task[A5]], - task6: Initialize[Task[A6]], - task7: Initialize[Task[A7]], - task8: Initialize[Task[A8]], - task9: Initialize[Task[A9]], - task10: Initialize[Task[A10]], - task11: Initialize[Task[A11]], - task12: Initialize[Task[A12]], - task13: Initialize[Task[A13]], - task14: Initialize[Task[A14]], - task15: Initialize[Task[A15]], - task16: Initialize[Task[A16]], - task17: Initialize[Task[A17]], - task18: Initialize[Task[A18]], - last: Initialize[Task[B]]): Initialize[Task[B]] = + def sequential[ + A0, + A1, + A2, + A3, + A4, + A5, + A6, + A7, + A8, + A9, + A10, + A11, + A12, + A13, + A14, + A15, + A16, + A17, + A18, + B + ]( + task0: Initialize[Task[A0]], + task1: Initialize[Task[A1]], + task2: Initialize[Task[A2]], + task3: Initialize[Task[A3]], + task4: Initialize[Task[A4]], + task5: Initialize[Task[A5]], + task6: Initialize[Task[A6]], + task7: Initialize[Task[A7]], + task8: Initialize[Task[A8]], + task9: Initialize[Task[A9]], + task10: Initialize[Task[A10]], + task11: Initialize[Task[A11]], + task12: Initialize[Task[A12]], + task13: Initialize[Task[A13]], + task14: Initialize[Task[A14]], + task15: Initialize[Task[A15]], + task16: Initialize[Task[A16]], + task17: Initialize[Task[A17]], + task18: Initialize[Task[A18]], + last: Initialize[Task[B]] + ): Initialize[Task[B]] = sequential( List( unitTask(task0), @@ -496,47 +537,51 @@ trait TaskSequential { ), last ) - def sequential[A0, - A1, - A2, - A3, - A4, - A5, - A6, - A7, - A8, - A9, - A10, - A11, - A12, - A13, - A14, - A15, - A16, - A17, - A18, - A19, - B](task0: Initialize[Task[A0]], - task1: Initialize[Task[A1]], - task2: Initialize[Task[A2]], - task3: Initialize[Task[A3]], - task4: Initialize[Task[A4]], - task5: Initialize[Task[A5]], - task6: Initialize[Task[A6]], - task7: Initialize[Task[A7]], - task8: Initialize[Task[A8]], - task9: Initialize[Task[A9]], - task10: Initialize[Task[A10]], - task11: Initialize[Task[A11]], - task12: Initialize[Task[A12]], - task13: Initialize[Task[A13]], - task14: Initialize[Task[A14]], - task15: Initialize[Task[A15]], - task16: Initialize[Task[A16]], - task17: Initialize[Task[A17]], - task18: Initialize[Task[A18]], - task19: Initialize[Task[A19]], - last: Initialize[Task[B]]): Initialize[Task[B]] = + def sequential[ + A0, + A1, + A2, + A3, + A4, + A5, + A6, + A7, + A8, + A9, + A10, + A11, + A12, + A13, + A14, + A15, + A16, + A17, + A18, + A19, + B + ]( + task0: Initialize[Task[A0]], + task1: Initialize[Task[A1]], + task2: Initialize[Task[A2]], + task3: Initialize[Task[A3]], + task4: Initialize[Task[A4]], + task5: Initialize[Task[A5]], + task6: Initialize[Task[A6]], + task7: Initialize[Task[A7]], + task8: Initialize[Task[A8]], + task9: Initialize[Task[A9]], + task10: Initialize[Task[A10]], + task11: Initialize[Task[A11]], + task12: Initialize[Task[A12]], + task13: Initialize[Task[A13]], + task14: Initialize[Task[A14]], + task15: Initialize[Task[A15]], + task16: Initialize[Task[A16]], + task17: Initialize[Task[A17]], + task18: Initialize[Task[A18]], + task19: Initialize[Task[A19]], + last: Initialize[Task[B]] + ): Initialize[Task[B]] = sequential( List( unitTask(task0), @@ -562,49 +607,53 @@ trait TaskSequential { ), last ) - def sequential[A0, - A1, - A2, - A3, - A4, - A5, - A6, - A7, - A8, - A9, - A10, - A11, - A12, - A13, - A14, - A15, - A16, - A17, - A18, - A19, - A20, - B](task0: Initialize[Task[A0]], - task1: Initialize[Task[A1]], - task2: Initialize[Task[A2]], - task3: Initialize[Task[A3]], - task4: Initialize[Task[A4]], - task5: Initialize[Task[A5]], - task6: Initialize[Task[A6]], - task7: Initialize[Task[A7]], - task8: Initialize[Task[A8]], - task9: Initialize[Task[A9]], - task10: Initialize[Task[A10]], - task11: Initialize[Task[A11]], - task12: Initialize[Task[A12]], - task13: Initialize[Task[A13]], - task14: Initialize[Task[A14]], - task15: Initialize[Task[A15]], - task16: Initialize[Task[A16]], - task17: Initialize[Task[A17]], - task18: Initialize[Task[A18]], - task19: Initialize[Task[A19]], - task20: Initialize[Task[A20]], - last: Initialize[Task[B]]): Initialize[Task[B]] = + def sequential[ + A0, + A1, + A2, + A3, + A4, + A5, + A6, + A7, + A8, + A9, + A10, + A11, + A12, + A13, + A14, + A15, + A16, + A17, + A18, + A19, + A20, + B + ]( + task0: Initialize[Task[A0]], + task1: Initialize[Task[A1]], + task2: Initialize[Task[A2]], + task3: Initialize[Task[A3]], + task4: Initialize[Task[A4]], + task5: Initialize[Task[A5]], + task6: Initialize[Task[A6]], + task7: Initialize[Task[A7]], + task8: Initialize[Task[A8]], + task9: Initialize[Task[A9]], + task10: Initialize[Task[A10]], + task11: Initialize[Task[A11]], + task12: Initialize[Task[A12]], + task13: Initialize[Task[A13]], + task14: Initialize[Task[A14]], + task15: Initialize[Task[A15]], + task16: Initialize[Task[A16]], + task17: Initialize[Task[A17]], + task18: Initialize[Task[A18]], + task19: Initialize[Task[A19]], + task20: Initialize[Task[A20]], + last: Initialize[Task[B]] + ): Initialize[Task[B]] = sequential( List( unitTask(task0), @@ -631,51 +680,55 @@ trait TaskSequential { ), last ) - def sequential[A0, - A1, - A2, - A3, - A4, - A5, - A6, - A7, - A8, - A9, - A10, - A11, - A12, - A13, - A14, - A15, - A16, - A17, - A18, - A19, - A20, - A21, - B](task0: Initialize[Task[A0]], - task1: Initialize[Task[A1]], - task2: Initialize[Task[A2]], - task3: Initialize[Task[A3]], - task4: Initialize[Task[A4]], - task5: Initialize[Task[A5]], - task6: Initialize[Task[A6]], - task7: Initialize[Task[A7]], - task8: Initialize[Task[A8]], - task9: Initialize[Task[A9]], - task10: Initialize[Task[A10]], - task11: Initialize[Task[A11]], - task12: Initialize[Task[A12]], - task13: Initialize[Task[A13]], - task14: Initialize[Task[A14]], - task15: Initialize[Task[A15]], - task16: Initialize[Task[A16]], - task17: Initialize[Task[A17]], - task18: Initialize[Task[A18]], - task19: Initialize[Task[A19]], - task20: Initialize[Task[A20]], - task21: Initialize[Task[A21]], - last: Initialize[Task[B]]): Initialize[Task[B]] = + def sequential[ + A0, + A1, + A2, + A3, + A4, + A5, + A6, + A7, + A8, + A9, + A10, + A11, + A12, + A13, + A14, + A15, + A16, + A17, + A18, + A19, + A20, + A21, + B + ]( + task0: Initialize[Task[A0]], + task1: Initialize[Task[A1]], + task2: Initialize[Task[A2]], + task3: Initialize[Task[A3]], + task4: Initialize[Task[A4]], + task5: Initialize[Task[A5]], + task6: Initialize[Task[A6]], + task7: Initialize[Task[A7]], + task8: Initialize[Task[A8]], + task9: Initialize[Task[A9]], + task10: Initialize[Task[A10]], + task11: Initialize[Task[A11]], + task12: Initialize[Task[A12]], + task13: Initialize[Task[A13]], + task14: Initialize[Task[A14]], + task15: Initialize[Task[A15]], + task16: Initialize[Task[A16]], + task17: Initialize[Task[A17]], + task18: Initialize[Task[A18]], + task19: Initialize[Task[A19]], + task20: Initialize[Task[A20]], + task21: Initialize[Task[A21]], + last: Initialize[Task[B]] + ): Initialize[Task[B]] = sequential( List( unitTask(task0), @@ -704,8 +757,10 @@ trait TaskSequential { last ) - def sequential[B](tasks: Seq[Initialize[Task[Unit]]], - last: Initialize[Task[B]]): Initialize[Task[B]] = + def sequential[B]( + tasks: Seq[Initialize[Task[Unit]]], + last: Initialize[Task[B]] + ): Initialize[Task[B]] = tasks.toList match { case Nil => Def.task { last.value } case x :: xs => diff --git a/main/src/main/scala/sbt/internal/TaskTimings.scala b/main/src/main/scala/sbt/internal/TaskTimings.scala index 2c39ffc43..9e095fdc3 100644 --- a/main/src/main/scala/sbt/internal/TaskTimings.scala +++ b/main/src/main/scala/sbt/internal/TaskTimings.scala @@ -52,16 +52,18 @@ private[sbt] final class TaskTimings(shutdown: Boolean) extends ExecuteProgress[ if (!shutdown) start = System.nanoTime } - def registered(state: Unit, - task: Task[_], - allDeps: Iterable[Task[_]], - pendingDeps: Iterable[Task[_]]) = { + def registered( + state: Unit, + task: Task[_], + allDeps: Iterable[Task[_]], + pendingDeps: Iterable[Task[_]] + ) = { pendingDeps foreach { t => if (transformNode(t).isEmpty) anonOwners.put(t, task) } } def ready(state: Unit, task: Task[_]) = () - def workStarting(task: Task[_]) = timings.put(task, System.nanoTime) + def workStarting(task: Task[_]) = { timings.put(task, System.nanoTime); () } def workFinished[T](task: Task[T], result: Either[Task[T], Result[T]]) = { timings.put(task, System.nanoTime - timings.get(task)) result.left.foreach { t => @@ -81,7 +83,7 @@ private[sbt] final class TaskTimings(shutdown: Boolean) extends ExecuteProgress[ println(s"Total time: $total $unit") import collection.JavaConverters._ def sumTimes(in: Seq[(Task[_], Long)]) = in.map(_._2).sum - val timingsByName = timings.asScala.toSeq.groupBy { case (t, time) => mappedName(t) } mapValues (sumTimes) + val timingsByName = timings.asScala.toSeq.groupBy { case (t, _) => mappedName(t) } mapValues (sumTimes) val times = timingsByName.toSeq .sortBy(_._2) .reverse diff --git a/main/src/main/scala/sbt/internal/parser/SbtParser.scala b/main/src/main/scala/sbt/internal/parser/SbtParser.scala index 1a7f52d57..2b170b570 100644 --- a/main/src/main/scala/sbt/internal/parser/SbtParser.scala +++ b/main/src/main/scala/sbt/internal/parser/SbtParser.scala @@ -85,7 +85,8 @@ private[sbt] object SbtParser { val reporter = reporters.get(fileName) if (reporter == null) { scalacGlobalInitReporter.getOrElse( - sys.error(s"Sbt forgot to initialize `scalacGlobalInitReporter`.")) + sys.error(s"Sbt forgot to initialize `scalacGlobalInitReporter`.") + ) } else reporter } @@ -139,9 +140,11 @@ private[sbt] object SbtParser { * The reporter id must be unique per parsing session. * @return */ - private[sbt] def parse(code: String, - filePath: String, - reporterId0: Option[String]): (Seq[Tree], String) = { + private[sbt] def parse( + code: String, + filePath: String, + reporterId0: Option[String] + ): (Seq[Tree], String) = { import defaultGlobalForParser._ val reporterId = reporterId0.getOrElse(s"$filePath-${Random.nextInt}") val reporter = globalReporter.getOrCreateReporter(reporterId) @@ -206,7 +209,8 @@ private[sbt] case class SbtParser(file: File, lines: Seq[String]) extends Parsed private def splitExpressions( file: File, - lines: Seq[String]): (Seq[(String, Int)], Seq[(String, LineRange)], Seq[(String, Tree)]) = { + lines: Seq[String] + ): (Seq[(String, Int)], Seq[(String, LineRange)], Seq[(String, Tree)]) = { import sbt.internal.parser.MissingBracketHandler.findMissingText val indexedLines = lines.toIndexedSeq @@ -226,7 +230,8 @@ private[sbt] case class SbtParser(file: File, lines: Seq[String]) extends Parsed // Issue errors val positionLine = badTree.pos.line throw new MessageOnlyException( - s"""[$fileName]:$positionLine: Pattern matching in val statements is not supported""".stripMargin) + s"""[$fileName]:$positionLine: Pattern matching in val statements is not supported""".stripMargin + ) } val (imports: Seq[Tree], statements: Seq[Tree]) = parsedTrees partition { @@ -264,9 +269,9 @@ private[sbt] case class SbtParser(file: File, lines: Seq[String]) extends Parsed } val stmtTreeLineRange = statements flatMap convertStatement val importsLineRange = importsToLineRanges(content, imports) - (importsLineRange, - stmtTreeLineRange.map { case (stmt, _, lr) => (stmt, lr) }, - stmtTreeLineRange.map { case (stmt, tree, _) => (stmt, tree) }) + (importsLineRange, stmtTreeLineRange.map { case (stmt, _, lr) => (stmt, lr) }, stmtTreeLineRange.map { + case (stmt, tree, _) => (stmt, tree) + }) } /** @@ -279,7 +284,7 @@ private[sbt] case class SbtParser(file: File, lines: Seq[String]) extends Parsed modifiedContent: String, imports: Seq[Tree] ): Seq[(String, Int)] = { - val toLineRange = imports map convertImport(modifiedContent) + val toLineRange = imports map convertImport val groupedByLineNumber = toLineRange.groupBy { case (_, lineNumber) => lineNumber } val mergedImports = groupedByLineNumber.map { case (l, seq) => (l, extractLine(modifiedContent, seq)) @@ -288,12 +293,10 @@ private[sbt] case class SbtParser(file: File, lines: Seq[String]) extends Parsed } /** - * - * @param modifiedContent - modifiedContent * @param t - tree - * @return ((start,end),lineNumber) + * @return ((start, end), lineNumber) */ - private def convertImport(modifiedContent: String)(t: Tree): ((Int, Int), Int) = { + private def convertImport(t: Tree): ((Int, Int), Int) = { val lineNumber = t.pos.line - 1 ((t.pos.start, t.pos.end), lineNumber) } @@ -304,8 +307,10 @@ private[sbt] case class SbtParser(file: File, lines: Seq[String]) extends Parsed * @param importsInOneLine - imports in line * @return - text */ - private def extractLine(modifiedContent: String, - importsInOneLine: Seq[((Int, Int), Int)]): String = { + private def extractLine( + modifiedContent: String, + importsInOneLine: Seq[((Int, Int), Int)] + ): String = { val (begin, end) = importsInOneLine.foldLeft((Int.MaxValue, Int.MinValue)) { case ((min, max), ((start, end), _)) => (min.min(start), max.max(end)) @@ -337,7 +342,8 @@ private[sbt] object MissingBracketHandler { positionLine: Int, fileName: String, originalException: Throwable, - reporterId: Option[String] = Some(Random.nextInt.toString)): String = { + reporterId: Option[String] = Some(Random.nextInt.toString) + ): String = { findClosingBracketIndex(content, positionEnd) match { case Some(index) => val text = content.substring(positionEnd, index + 1) @@ -346,16 +352,19 @@ private[sbt] object MissingBracketHandler { case Success(_) => text case Failure(_) => - findMissingText(content, - index + 1, - positionLine, - fileName, - originalException, - reporterId) + findMissingText( + content, + index + 1, + positionLine, + fileName, + originalException, + reporterId + ) } case _ => throw new MessageOnlyException( - s"""[$fileName]:$positionLine: ${originalException.getMessage}""".stripMargin) + s"""[$fileName]:$positionLine: ${originalException.getMessage}""".stripMargin + ) } } diff --git a/main/src/main/scala/sbt/internal/parser/SbtRefactorings.scala b/main/src/main/scala/sbt/internal/parser/SbtRefactorings.scala index 14e09e2bd..a82eb971e 100644 --- a/main/src/main/scala/sbt/internal/parser/SbtRefactorings.scala +++ b/main/src/main/scala/sbt/internal/parser/SbtRefactorings.scala @@ -26,8 +26,10 @@ private[sbt] object SbtRefactorings { * the first will be replaced and the other will be removed. * @return a SbtConfigFile with new lines which represent the contents of the refactored .sbt file. */ - def applySessionSettings(configFile: SbtConfigFile, - commands: Seq[SessionSetting]): SbtConfigFile = { + def applySessionSettings( + configFile: SbtConfigFile, + commands: Seq[SessionSetting] + ): SbtConfigFile = { val (file, lines) = configFile val split = SbtParser(FAKE_FILE, lines) val recordedCommands = recordCommands(commands, split) @@ -37,8 +39,10 @@ private[sbt] object SbtRefactorings { (file, newContent.lines.toList) } - private def replaceFromBottomToTop(modifiedContent: String, - sortedRecordedCommands: Seq[(Int, String, String)]) = { + private def replaceFromBottomToTop( + modifiedContent: String, + sortedRecordedCommands: Seq[(Int, String, String)] + ) = { sortedRecordedCommands.foldLeft(modifiedContent) { case (acc, (from, old, replacement)) => val before = acc.substring(0, from) @@ -57,10 +61,7 @@ private[sbt] object SbtRefactorings { commands.flatMap { case (_, command) => val map = toTreeStringMap(command) - map.flatMap { - case (name, statement) => - treesToReplacements(split, name, command) - } + map.flatMap { case (name, _) => treesToReplacements(split, name, command) } } private def treesToReplacements(split: SbtParser, name: String, command: Seq[String]) = diff --git a/main/src/main/scala/sbt/internal/server/Definition.scala b/main/src/main/scala/sbt/internal/server/Definition.scala index 0c39d15cd..44e6e9efa 100644 --- a/main/src/main/scala/sbt/internal/server/Definition.scala +++ b/main/src/main/scala/sbt/internal/server/Definition.scala @@ -9,38 +9,47 @@ package sbt package internal package server -import sbt.io.IO -import sbt.internal.inc.MixedAnalyzingCompiler -import sbt.internal.langserver.ErrorCodes -import sbt.util.Logger +import java.io.File +import java.net.URI +import java.nio.file._ + import scala.annotation.tailrec +import scala.collection.JavaConverters._ import scala.concurrent.{ ExecutionContext, Future } -import scala.concurrent.duration.Duration.Inf -import scala.util.matching.Regex.MatchIterator -import java.nio.file.{ Files, Paths } -import sbt.StandardMain +import scala.concurrent.duration.Duration +import scala.reflect.NameTransformer +import scala.tools.reflect.{ ToolBox, ToolBoxError } +import scala.util.matching.Regex + +import sjsonnew.JsonFormat +import sjsonnew.shaded.scalajson.ast.unsafe.JValue +import sjsonnew.support.scalajson.unsafe.{ CompactPrinter, Converter } + +import scalacache._ + +import sbt.io.IO +import sbt.internal.inc.{ Analysis, MixedAnalyzingCompiler } +import sbt.internal.inc.JavaInterfaceUtil._ +import sbt.internal.protocol.JsonRpcResponseError +import sbt.internal.protocol.codec.JsonRPCProtocol +import sbt.internal.langserver +import sbt.internal.langserver.{ ErrorCodes, Location, Position, Range, TextDocumentPositionParams } +import sbt.util.Logger +import sbt.Keys._ private[sbt] object Definition { - import java.net.URI - import Keys._ - import sbt.internal.inc.Analysis - import sbt.internal.inc.JavaInterfaceUtil._ - val AnalysesKey = "lsp.definition.analyses.key" - - import sjsonnew.JsonFormat def send[A: JsonFormat](source: CommandSource, execId: String)(params: A): Unit = { for { channel <- StandardMain.exchange.channels.collectFirst { case c if c.name == source.channelName => c } - } yield { + } { channel.publishEvent(params, Option(execId)) } } object textProcessor { private val isIdentifier = { - import scala.tools.reflect.{ ToolBox, ToolBoxError } lazy val tb = scala.reflect.runtime.universe .runtimeMirror(this.getClass.getClassLoader) @@ -58,23 +67,14 @@ private[sbt] object Definition { private def findInBackticks(line: String, point: Int): Option[String] = { val (even, odd) = line.zipWithIndex - .collect { - case (char, backtickIndex) if char == '`' => - backtickIndex - } + .collect { case (char, backtickIndex) if char == '`' => backtickIndex } .zipWithIndex - .partition { bs => - val (_, index) = bs - index % 2 == 0 - } + .partition { case (_, index) => index % 2 == 0 } + even - .collect { - case (backtickIndex, _) => backtickIndex - } + .collect { case (backtickIndex, _) => backtickIndex } .zip { - odd.collect { - case (backtickIndex, _) => backtickIndex + 1 - } + odd.collect { case (backtickIndex, _) => backtickIndex + 1 } } .collectFirst { case (from, to) if from <= point && point < to => line.slice(from, to) @@ -83,43 +83,43 @@ private[sbt] object Definition { def identifier(line: String, point: Int): Option[String] = findInBackticks(line, point).orElse { val whiteSpaceReg = "(\\s|\\.)+".r + val (zero, end) = fold(Seq.empty)(whiteSpaceReg.findAllIn(line)) .collect { case (white, ind) => (ind, ind + white.length) } - .fold((0, line.length)) { (z, e) => - val (from, to) = e - val (left, right) = z - (if (to > left && to <= point) to else left, - if (from < right && from >= point) from else right) + .fold((0, line.length)) { + case ((left, right), (from, to)) => + val zero = if (to > left && to <= point) to else left + val end = if (from < right && from >= point) from else right + (zero, end) } + val ranges = for { from <- zero to point to <- point to end } yield (from -> to) + ranges - .sortBy { - case (from, to) => -(to - from) - } - .foldLeft(Seq.empty[String]) { (z, e) => - val (from, to) = e - val fragment = line.slice(from, to).trim - z match { - case Nil if fragment.nonEmpty && isIdentifier(fragment) => fragment +: z - case h +: _ if h.length < fragment.length && isIdentifier(fragment) => - Seq(fragment) - case h +: _ if h.length == fragment.length && isIdentifier(fragment) => - fragment +: z - case z => z - } + .sortBy { case (from, to) => -(to - from) } + .foldLeft(List.empty[String]) { + case (z, (from, to)) => + val fragment = line.slice(from, to).trim + if (isIdentifier(fragment)) + z match { + case Nil if fragment.nonEmpty => fragment :: z + case h :: _ if h.length < fragment.length => fragment :: Nil + case h :: _ if h.length == fragment.length => fragment :: z + case _ => z + } else z } .headOption } private def asClassObjectIdentifier(sym: String) = Seq(s".$sym", s".$sym$$", s"$$$sym", s"$$$sym$$") + def potentialClsOrTraitOrObj(sym: String): PartialFunction[String, String] = { - import scala.reflect.NameTransformer val encodedSym = NameTransformer.encode(sym.toSeq match { case '`' +: body :+ '`' => body.mkString case noBackticked => noBackticked.mkString @@ -135,17 +135,17 @@ private[sbt] object Definition { } @tailrec - private def fold(z: Seq[(String, Int)])(it: MatchIterator): Seq[(String, Int)] = { + private def fold(z: Seq[(String, Int)])(it: Regex.MatchIterator): Seq[(String, Int)] = { if (!it.hasNext) z else fold(z :+ (it.next() -> it.start))(it) } def classTraitObjectInLine(sym: String)(line: String): Seq[(String, Int)] = { - import scala.util.matching.Regex.quote - val potentials = - Seq(s"object\\s+${quote(sym)}".r, - s"trait\\s+${quote(sym)} *\\[?".r, - s"class\\s+${quote(sym)} *\\[?".r) + val potentials = Seq( + s"object\\s+${Regex quote sym}".r, + s"trait\\s+${Regex quote sym} *\\[?".r, + s"class\\s+${Regex quote sym} *\\[?".r, + ) potentials .flatMap { reg => fold(Seq.empty)(reg.findAllIn(line)) @@ -156,10 +156,7 @@ private[sbt] object Definition { } } - import java.io.File def markPosition(file: File, sym: String): Seq[(File, Long, Long, Long)] = { - import java.nio.file._ - import scala.collection.JavaConverters._ val findInLine = classTraitObjectInLine(sym)(_) Files .lines(file.toPath) @@ -179,43 +176,50 @@ private[sbt] object Definition { } } - import sbt.internal.langserver.TextDocumentPositionParams - import sjsonnew.shaded.scalajson.ast.unsafe.JValue private def getDefinition(jsonDefinition: JValue): Option[TextDocumentPositionParams] = { - import sbt.internal.langserver.codec.JsonProtocol._ - import sjsonnew.support.scalajson.unsafe.Converter + import langserver.codec.JsonProtocol._ Converter.fromJson[TextDocumentPositionParams](jsonDefinition).toOption } - import java.io.File + object AnalysesAccess { + private[this] val AnalysesKey = "lsp.definition.analyses.key" + + private[server] type Analyses = Set[((String, Boolean), Option[Analysis])] + + private[server] def getFrom[F[_]]( + cache: Cache[Any] + )(implicit mode: Mode[F], flags: Flags): F[Option[Analyses]] = + mode.M.map(cache.get(AnalysesKey))(_ map (_.asInstanceOf[Analyses])) + + private[server] def putIn[F[_]]( + cache: Cache[Any], + value: Analyses, + ttl: Option[Duration], + )(implicit mode: Mode[F], flags: Flags): F[Any] = + cache.put(AnalysesKey)(value, ttl) + } + private def storeAnalysis(cacheFile: File, useBinary: Boolean): Option[Analysis] = MixedAnalyzingCompiler .staticCachedStore(cacheFile, !useBinary) .get .toOption - .collect { - case contents => - contents.getAnalysis - } - .collect { - case a: Analysis => a - } + .map { _.getAnalysis } + .collect { case a: Analysis => a } - import scalacache._ private[sbt] def updateCache[F[_]](cache: Cache[Any])(cacheFile: String, useBinary: Boolean)( implicit mode: Mode[F], - flags: Flags): F[Any] = { - mode.M.flatMap(cache.get(AnalysesKey)) { + flags: Flags + ): F[Any] = { + mode.M.flatMap(AnalysesAccess.getFrom(cache)) { case None => - cache.put(AnalysesKey)(Set(cacheFile -> useBinary -> None), Option(Inf)) + AnalysesAccess.putIn(cache, Set(cacheFile -> useBinary -> None), Option(Duration.Inf)) case Some(set) => - cache.put(AnalysesKey)( - set.asInstanceOf[Set[((String, Boolean), Option[Analysis])]].filterNot { - case ((file, _), _) => file == cacheFile - } + (cacheFile -> useBinary -> None), - Option(Inf)) - case _ => mode.M.pure(()) + val newSet = set + .filterNot { case ((file, _), _) => file == cacheFile } + .+(cacheFile -> useBinary -> None) + AnalysesAccess.putIn(cache, newSet, Option(Duration.Inf)) } } @@ -228,17 +232,16 @@ private[sbt] object Definition { updateCache(StandardMain.cache)(cacheFile, useBinary) } - private[sbt] def getAnalyses(log: Logger): Future[Seq[Analysis]] = { + private[sbt] def getAnalyses: Future[Seq[Analysis]] = { import scalacache.modes.scalaFuture._ import scala.concurrent.ExecutionContext.Implicits.global - StandardMain.cache - .get(AnalysesKey) - .collect { - case Some(a) => a.asInstanceOf[Set[((String, Boolean), Option[Analysis])]] - } + AnalysesAccess + .getFrom(StandardMain.cache) + .collect { case Some(a) => a } .map { caches => - val (working, uninitialized) = caches.partition { cacheAnalysis => - cacheAnalysis._2 != None + val (working, uninitialized) = caches.partition { + case (_, Some(_)) => true + case (_, None) => false } val addToCache = uninitialized.collect { case (title @ (file, useBinary), _) if Files.exists(Paths.get(file)) => @@ -246,7 +249,7 @@ private[sbt] object Definition { } val validCaches = working ++ addToCache if (addToCache.nonEmpty) - StandardMain.cache.put(AnalysesKey)(validCaches, Option(Inf)) + AnalysesAccess.putIn(StandardMain.cache, validCaches, Option(Duration.Inf)) validCaches.toSeq.collect { case (_, Some(analysis)) => analysis @@ -254,19 +257,19 @@ private[sbt] object Definition { } } - def lspDefinition(jsonDefinition: JValue, - requestId: String, - commandSource: CommandSource, - log: Logger)(implicit ec: ExecutionContext): Future[Unit] = Future { + def lspDefinition( + jsonDefinition: JValue, + requestId: String, + commandSource: CommandSource, + log: Logger, + )(implicit ec: ExecutionContext): Future[Unit] = Future { val LspDefinitionLogHead = "lsp-definition" - import sjsonnew.support.scalajson.unsafe.CompactPrinter - log.debug(s"$LspDefinitionLogHead json request: ${CompactPrinter(jsonDefinition)}") - lazy val analyses = getAnalyses(log) - val definition = getDefinition(jsonDefinition) - definition + val jsonDefinitionString = CompactPrinter(jsonDefinition) + log.debug(s"$LspDefinitionLogHead json request: $jsonDefinitionString") + lazy val analyses = getAnalyses + getDefinition(jsonDefinition) .flatMap { definition => val uri = URI.create(definition.textDocument.uri) - import java.nio.file._ Files .lines(Paths.get(uri)) .skip(definition.position.line) @@ -274,11 +277,10 @@ private[sbt] object Definition { .toOption .flatMap { line => log.debug(s"$LspDefinitionLogHead found line: $line") - textProcessor - .identifier(line, definition.position.character.toInt) + textProcessor.identifier(line, definition.position.character.toInt) } - } - .map { sym => + } match { + case Some(sym) => log.debug(s"symbol $sym") analyses .map { analyses => @@ -291,40 +293,39 @@ private[sbt] object Definition { log.debug(s"$LspDefinitionLogHead potentials: $classes") classes .flatMap { className => - analysis.relations.definesClass(className) ++ analysis.relations - .libraryDefinesClass(className) + analysis.relations.definesClass(className) ++ + analysis.relations.libraryDefinesClass(className) } .flatMap { classFile => textProcessor.markPosition(classFile, sym).collect { case (file, line, from, to) => - import sbt.internal.langserver.{ Location, Position, Range } - Location(IO.toURI(file).toString, - Range(Position(line, from), Position(line, to))) + Location( + IO.toURI(file).toString, + Range(Position(line, from), Position(line, to)), + ) } } }.seq - log.debug(s"$LspDefinitionLogHead locations ${locations}") - import sbt.internal.langserver.codec.JsonProtocol._ + log.debug(s"$LspDefinitionLogHead locations $locations") + import langserver.codec.JsonProtocol._ send(commandSource, requestId)(locations.toArray) } .recover { - case anyException @ _ => - log.warn( - s"Problem with processing analyses $anyException for ${CompactPrinter(jsonDefinition)}") - import sbt.internal.protocol.JsonRpcResponseError - import sbt.internal.protocol.codec.JsonRPCProtocol._ - send(commandSource, requestId)( - JsonRpcResponseError(ErrorCodes.InternalError, - "Problem with processing analyses.", - None)) + case t => + log.warn(s"Problem with processing analyses $t for $jsonDefinitionString") + val rsp = JsonRpcResponseError( + ErrorCodes.InternalError, + "Problem with processing analyses.", + None, + ) + import JsonRPCProtocol._ + send(commandSource, requestId)(rsp) } - } - .orElse { - log.info(s"Symbol not found in definition request ${CompactPrinter(jsonDefinition)}") - import sbt.internal.langserver.Location - import sbt.internal.langserver.codec.JsonProtocol._ + () + case None => + log.info(s"Symbol not found in definition request $jsonDefinitionString") + import langserver.codec.JsonProtocol._ send(commandSource, requestId)(Array.empty[Location]) - None - } + } } } diff --git a/main/src/main/scala/sbt/internal/server/LanguageServerProtocol.scala b/main/src/main/scala/sbt/internal/server/LanguageServerProtocol.scala index af5b37337..ba6ee4e85 100644 --- a/main/src/main/scala/sbt/internal/server/LanguageServerProtocol.scala +++ b/main/src/main/scala/sbt/internal/server/LanguageServerProtocol.scala @@ -10,6 +10,7 @@ package internal package server import sjsonnew.JsonFormat +import sjsonnew.shaded.scalajson.ast.unsafe.JValue import sjsonnew.support.scalajson.unsafe.Converter import sbt.protocol.Serialization import sbt.protocol.{ SettingQuery => Q } @@ -19,12 +20,78 @@ import sbt.internal.langserver._ import sbt.internal.util.ObjectEvent import sbt.util.Logger -private[sbt] case class LangServerError(code: Long, message: String) extends Throwable(message) +private[sbt] final case class LangServerError(code: Long, message: String) + extends Throwable(message) -/** - * Implements Language Server Protocol . - */ -private[sbt] trait LanguageServerProtocol extends CommandChannel { +private[sbt] object LanguageServerProtocol { + lazy val internalJsonProtocol = new InitializeOptionFormats with sjsonnew.BasicJsonProtocol {} + + lazy val serverCapabilities: ServerCapabilities = { + ServerCapabilities( + textDocumentSync = TextDocumentSyncOptions(true, 0, false, false, SaveOptions(false)), + hoverProvider = false, + definitionProvider = true + ) + } + + lazy val handler: ServerHandler = ServerHandler({ + case callback: ServerCallback => + import callback._ + ServerIntent( + { + import sbt.internal.langserver.codec.JsonProtocol._ + import internalJsonProtocol._ + def json(r: JsonRpcRequestMessage) = + r.params.getOrElse( + throw LangServerError( + ErrorCodes.InvalidParams, + s"param is expected on '${r.method}' method." + ) + ) + + { + case r: JsonRpcRequestMessage if r.method == "initialize" => + if (authOptions(ServerAuthentication.Token)) { + val param = Converter.fromJson[InitializeParams](json(r)).get + val optionJson = param.initializationOptions.getOrElse( + throw LangServerError( + ErrorCodes.InvalidParams, + "initializationOptions is expected on 'initialize' param." + ) + ) + val opt = Converter.fromJson[InitializeOption](optionJson).get + val token = opt.token.getOrElse(sys.error("'token' is missing.")) + if (authenticate(token)) () + else throw LangServerError(ErrorCodes.InvalidRequest, "invalid token") + } else () + setInitialized(true) + appendExec(Exec(s"collectAnalyses", None, Some(CommandSource(name)))) + jsonRpcRespond(InitializeResult(serverCapabilities), Option(r.id)) + + case r: JsonRpcRequestMessage if r.method == "textDocument/definition" => + import scala.concurrent.ExecutionContext.Implicits.global + Definition.lspDefinition(json(r), r.id, CommandSource(name), log) + () + case r: JsonRpcRequestMessage if r.method == "sbt/exec" => + val param = Converter.fromJson[SbtExecParams](json(r)).get + appendExec(Exec(param.commandLine, Some(r.id), Some(CommandSource(name)))) + () + case r: JsonRpcRequestMessage if r.method == "sbt/setting" => + import sbt.protocol.codec.JsonProtocol._ + val param = Converter.fromJson[Q](json(r)).get + onSettingQuery(Option(r.id), param) + } + }, { + case n: JsonRpcNotificationMessage if n.method == "textDocument/didSave" => + appendExec(Exec(";Test/compile; collectAnalyses", None, Some(CommandSource(name)))) + () + } + ) + }) +} + +/** Implements Language Server Protocol . */ +private[sbt] trait LanguageServerProtocol extends CommandChannel { self => lazy val internalJsonProtocol = new InitializeOptionFormats with sjsonnew.BasicJsonProtocol {} @@ -34,51 +101,24 @@ private[sbt] trait LanguageServerProtocol extends CommandChannel { protected def log: Logger protected def onSettingQuery(execId: Option[String], req: Q): Unit - protected def onNotification(notification: JsonRpcNotificationMessage): Unit = { - log.debug(s"onNotification: $notification") - notification.method match { - case "textDocument/didSave" => - append(Exec(";Test/compile; collectAnalyses", None, Some(CommandSource(name)))) - case u => log.debug(s"Unhandled notification received: $u") - } - } + protected lazy val callbackImpl: ServerCallback = new ServerCallback { + def jsonRpcRespond[A: JsonFormat](event: A, execId: Option[String]): Unit = + self.jsonRpcRespond(event, execId) - protected def onRequestMessage(request: JsonRpcRequestMessage): Unit = { - import sbt.internal.langserver.codec.JsonProtocol._ - import internalJsonProtocol._ - def json = - request.params.getOrElse( - throw LangServerError(ErrorCodes.InvalidParams, - s"param is expected on '${request.method}' method.")) - log.debug(s"onRequestMessage: $request") - request.method match { - case "initialize" => - if (authOptions(ServerAuthentication.Token)) { - val param = Converter.fromJson[InitializeParams](json).get - val optionJson = param.initializationOptions.getOrElse( - throw LangServerError(ErrorCodes.InvalidParams, - "initializationOptions is expected on 'initialize' param.")) - val opt = Converter.fromJson[InitializeOption](optionJson).get - val token = opt.token.getOrElse(sys.error("'token' is missing.")) - if (authenticate(token)) () - else throw LangServerError(ErrorCodes.InvalidRequest, "invalid token") - } else () - setInitialized(true) - append(Exec(s"collectAnalyses", None, Some(CommandSource(name)))) - langRespond(InitializeResult(serverCapabilities), Option(request.id)) - case "textDocument/definition" => - import scala.concurrent.ExecutionContext.Implicits.global - Definition.lspDefinition(json, request.id, CommandSource(name), log) - case "sbt/exec" => - val param = Converter.fromJson[SbtExecParams](json).get - append(Exec(param.commandLine, Some(request.id), Some(CommandSource(name)))) - case "sbt/setting" => { - import sbt.protocol.codec.JsonProtocol._ - val param = Converter.fromJson[Q](json).get - onSettingQuery(Option(request.id), param) - } - case unhandledRequest => log.debug(s"Unhandled request received: $unhandledRequest") - } + def jsonRpcRespondError(execId: Option[String], code: Long, message: String): Unit = + self.jsonRpcRespondError(execId, code, message) + + def jsonRpcNotify[A: JsonFormat](method: String, params: A): Unit = + self.jsonRpcNotify(method, params) + + def appendExec(exec: Exec): Boolean = self.append(exec) + def log: Logger = self.log + def name: String = self.name + private[sbt] def authOptions: Set[ServerAuthentication] = self.authOptions + private[sbt] def authenticate(token: String): Boolean = self.authenticate(token) + private[sbt] def setInitialized(value: Boolean): Unit = self.setInitialized(value) + private[sbt] def onSettingQuery(execId: Option[String], req: Q): Unit = + self.onSettingQuery(execId, req) } /** @@ -94,7 +134,7 @@ private[sbt] trait LanguageServerProtocol extends CommandChannel { // LanguageServerReporter sends PublishDiagnosticsParams case "sbt.internal.langserver.PublishDiagnosticsParams" => // val p = event.message.asInstanceOf[PublishDiagnosticsParams] - // langNotify("textDocument/publishDiagnostics", p) + // jsonRpcNotify("textDocument/publishDiagnostics", p) case "xsbti.Problem" => () // ignore case _ => @@ -103,62 +143,53 @@ private[sbt] trait LanguageServerProtocol extends CommandChannel { } } - /** - * Respond back to Language Server's client. - */ - private[sbt] def langRespond[A: JsonFormat](event: A, execId: Option[String]): Unit = { + /** Respond back to Language Server's client. */ + private[sbt] def jsonRpcRespond[A: JsonFormat](event: A, execId: Option[String]): Unit = { val m = JsonRpcResponseMessage("2.0", execId, Option(Converter.toJson[A](event).get), None) val bytes = Serialization.serializeResponseMessage(m) publishBytes(bytes) } - /** - * Respond back to Language Server's client. - */ - private[sbt] def langError(execId: Option[String], code: Long, message: String): Unit = { - val e = JsonRpcResponseError(code, message, None) + /** Respond back to Language Server's client. */ + private[sbt] def jsonRpcRespondError(execId: Option[String], code: Long, message: String): Unit = + jsonRpcRespondErrorImpl(execId, code, message, None) + + /** Respond back to Language Server's client. */ + private[sbt] def jsonRpcRespondError[A: JsonFormat]( + execId: Option[String], + code: Long, + message: String, + data: A, + ): Unit = + jsonRpcRespondErrorImpl(execId, code, message, Option(Converter.toJson[A](data).get)) + + private[this] def jsonRpcRespondErrorImpl( + execId: Option[String], + code: Long, + message: String, + data: Option[JValue], + ): Unit = { + val e = JsonRpcResponseError(code, message, data) val m = JsonRpcResponseMessage("2.0", execId, None, Option(e)) val bytes = Serialization.serializeResponseMessage(m) publishBytes(bytes) } - /** - * Respond back to Language Server's client. - */ - private[sbt] def langError[A: JsonFormat](execId: Option[String], - code: Long, - message: String, - data: A): Unit = { - val e = JsonRpcResponseError(code, message, Option(Converter.toJson[A](data).get)) - val m = JsonRpcResponseMessage("2.0", execId, None, Option(e)) - val bytes = Serialization.serializeResponseMessage(m) - publishBytes(bytes) - } - - /** - * Notify to Language Server's client. - */ - private[sbt] def langNotify[A: JsonFormat](method: String, params: A): Unit = { + /** Notify to Language Server's client. */ + private[sbt] def jsonRpcNotify[A: JsonFormat](method: String, params: A): Unit = { val m = JsonRpcNotificationMessage("2.0", method, Option(Converter.toJson[A](params).get)) - log.debug(s"langNotify: $m") + log.debug(s"jsonRpcNotify: $m") val bytes = Serialization.serializeNotificationMessage(m) publishBytes(bytes) } def logMessage(level: String, message: String): Unit = { import sbt.internal.langserver.codec.JsonProtocol._ - langNotify( + jsonRpcNotify( "window/logMessage", LogMessageParams(MessageType.fromLevelString(level), message) ) } - - private[sbt] lazy val serverCapabilities: ServerCapabilities = { - ServerCapabilities(textDocumentSync = - TextDocumentSyncOptions(true, 0, false, false, SaveOptions(false)), - hoverProvider = false, - definitionProvider = true) - } } diff --git a/main/src/main/scala/sbt/internal/server/NetworkChannel.scala b/main/src/main/scala/sbt/internal/server/NetworkChannel.scala index f8dfdf784..80149dfc5 100644 --- a/main/src/main/scala/sbt/internal/server/NetworkChannel.scala +++ b/main/src/main/scala/sbt/internal/server/NetworkChannel.scala @@ -21,13 +21,15 @@ import sbt.internal.util.codec.JValueFormats import sbt.internal.protocol.{ JsonRpcRequestMessage, JsonRpcNotificationMessage } import sbt.util.Logger -final class NetworkChannel(val name: String, - connection: Socket, - structure: BuildStructure, - auth: Set[ServerAuthentication], - instance: ServerInstance, - val log: Logger) - extends CommandChannel +final class NetworkChannel( + val name: String, + connection: Socket, + structure: BuildStructure, + auth: Set[ServerAuthentication], + instance: ServerInstance, + handlers: Seq[ServerHandler], + val log: Logger +) extends CommandChannel with LanguageServerProtocol { import NetworkChannel._ @@ -45,18 +47,12 @@ final class NetworkChannel(val name: String, private val VsCodeOld = "application/vscode-jsonrpc; charset=utf8" private lazy val jsonFormat = new sjsonnew.BasicJsonProtocol with JValueFormats {} - def setContentType(ct: String): Unit = synchronized { - _contentType = ct - } + def setContentType(ct: String): Unit = synchronized { _contentType = ct } def contentType: String = _contentType - protected def authenticate(token: String): Boolean = { - instance.authenticate(token) - } + protected def authenticate(token: String): Boolean = instance.authenticate(token) - protected def setInitialized(value: Boolean): Unit = { - initialized = value - } + protected def setInitialized(value: Boolean): Unit = initialized = value protected def authOptions: Set[ServerAuthentication] = auth @@ -73,10 +69,8 @@ final class NetworkChannel(val name: String, var bytesRead = 0 def resetChannelState(): Unit = { contentLength = 0 - // contentType = "" state = SingleLine } - def tillEndOfLine: Option[Vector[Byte]] = { val delimPos = buffer.indexOf(delimiter) if (delimPos > 0) { @@ -165,6 +159,21 @@ final class NetworkChannel(val name: String, } } + private lazy val intents = { + val cb = callbackImpl + handlers.toVector map { h => + h.handler(cb) + } + } + lazy val onRequestMessage: PartialFunction[JsonRpcRequestMessage, Unit] = + intents.foldLeft(PartialFunction.empty[JsonRpcRequestMessage, Unit]) { + case (f, i) => f orElse i.onRequest + } + lazy val onNotification: PartialFunction[JsonRpcNotificationMessage, Unit] = + intents.foldLeft(PartialFunction.empty[JsonRpcNotificationMessage, Unit]) { + case (f, i) => f orElse i.onNotification + } + def handleBody(chunk: Vector[Byte]): Unit = { if (isLanguageServerProtocol) { Serialization.deserializeJsonMessage(chunk) match { @@ -174,7 +183,7 @@ final class NetworkChannel(val name: String, } catch { case LangServerError(code, message) => log.debug(s"sending error: $code: $message") - langError(Option(req.id), code, message) + jsonRpcRespondError(Option(req.id), code, message) } case Right(ntf: JsonRpcNotificationMessage) => try { @@ -182,13 +191,13 @@ final class NetworkChannel(val name: String, } catch { case LangServerError(code, message) => log.debug(s"sending error: $code: $message") - langError(None, code, message) // new id? + jsonRpcRespondError(None, code, message) // new id? } case Right(msg) => log.debug(s"Unhandled message: $msg") case Left(errorDesc) => val msg = s"Got invalid chunk from client (${new String(chunk.toArray, "UTF-8")}): " + errorDesc - langError(None, ErrorCodes.ParseError, msg) + jsonRpcRespondError(None, ErrorCodes.ParseError, msg) } } else { contentType match { @@ -198,7 +207,8 @@ final class NetworkChannel(val name: String, .fold( errorDesc => log.error( - s"Got invalid chunk from client (${new String(chunk.toArray, "UTF-8")}): " + errorDesc), + s"Got invalid chunk from client (${new String(chunk.toArray, "UTF-8")}): " + errorDesc + ), onCommand ) case _ => @@ -230,7 +240,7 @@ final class NetworkChannel(val name: String, private[sbt] def notifyEvent[A: JsonFormat](method: String, params: A): Unit = { if (isLanguageServerProtocol) { - langNotify(method, params) + jsonRpcNotify(method, params) } else { () } @@ -242,11 +252,11 @@ final class NetworkChannel(val name: String, case entry: StringEvent => logMessage(entry.level, entry.message) case entry: ExecStatusEvent => entry.exitCode match { - case None => langRespond(event, entry.execId) - case Some(0) => langRespond(event, entry.execId) - case Some(exitCode) => langError(entry.execId, exitCode, "") + case None => jsonRpcRespond(event, entry.execId) + case Some(0) => jsonRpcRespond(event, entry.execId) + case Some(exitCode) => jsonRpcRespondError(entry.execId, exitCode, "") } - case _ => langRespond(event, execId) + case _ => jsonRpcRespond(event, execId) } } else { contentType match { @@ -258,8 +268,6 @@ final class NetworkChannel(val name: String, } } - def publishEvent[A: JsonFormat](event: A): Unit = publishEvent(event, None) - def publishEventMessage(event: EventMessage): Unit = { if (isLanguageServerProtocol) { event match { @@ -336,7 +344,9 @@ final class NetworkChannel(val name: String, private def onExecCommand(cmd: ExecCommand) = { if (initialized) { append( - Exec(cmd.commandLine, cmd.execId orElse Some(Exec.newExecId), Some(CommandSource(name)))) + Exec(cmd.commandLine, cmd.execId orElse Some(Exec.newExecId), Some(CommandSource(name))) + ) + () } else { log.warn(s"ignoring command $cmd before initialization") } @@ -346,8 +356,8 @@ final class NetworkChannel(val name: String, if (initialized) { import sbt.protocol.codec.JsonProtocol._ SettingQuery.handleSettingQueryEither(req, structure) match { - case Right(x) => langRespond(x, execId) - case Left(s) => langError(execId, ErrorCodes.InvalidParams, s) + case Right(x) => jsonRpcRespond(x, execId) + case Left(s) => jsonRpcRespondError(execId, ErrorCodes.InvalidParams, s) } } else { log.warn(s"ignoring query $req before initialization") diff --git a/main/src/main/scala/sbt/internal/server/SettingQuery.scala b/main/src/main/scala/sbt/internal/server/SettingQuery.scala index 25dd1b66a..6ab872f5b 100644 --- a/main/src/main/scala/sbt/internal/server/SettingQuery.scala +++ b/main/src/main/scala/sbt/internal/server/SettingQuery.scala @@ -21,7 +21,7 @@ import sjsonnew.support.scalajson.unsafe._ object SettingQuery { import sbt.internal.util.{ AttributeKey, Settings } import sbt.internal.util.complete.{ DefaultParsers, Parser }, DefaultParsers._ - import sbt.Def.{ showBuildRelativeKey, ScopedKey } + import sbt.Def.{ showBuildRelativeKey2, ScopedKey } // Similar to Act.ParsedAxis / Act.projectRef / Act.resolveProject except you can't omit the project reference @@ -32,8 +32,10 @@ object SettingQuery { new ParsedExplicitValue(v) } - def projectRef(index: KeyIndex, - currentBuild: URI): Parser[ParsedExplicitAxis[ResolvedReference]] = { + def projectRef( + index: KeyIndex, + currentBuild: URI + ): Parser[ParsedExplicitAxis[ResolvedReference]] = { val global = token(Act.ZeroString ~ '/') ^^^ ParsedExplicitGlobal val trailing = '/' !!! "Expected '/' (if selecting a project)" global | explicitValue(Act.resolvedReference(index, currentBuild, trailing)) @@ -67,7 +69,7 @@ object SettingQuery { data: Settings[Scope] ): Parser[ParsedKey] = scopedKeyFull(index, currentBuild, defaultConfigs, keyMap) flatMap { choices => - Act.select(choices, data)(showBuildRelativeKey(currentBuild, index.buildURIs.size > 1)) + Act.select(choices, data)(showBuildRelativeKey2(currentBuild)) } def scopedKey( @@ -107,15 +109,21 @@ object SettingQuery { def toJson[A: JsonWriter](x: A): JValue = Converter toJsonUnsafe x - def getSettingJsonValue[A](structure: BuildStructure, - key: Def.ScopedKey[A]): Either[String, JValue] = - getSettingValue(structure, key) flatMap (value => - getJsonWriter(key.key) map { implicit jw: JsonWriter[A] => - toJson(value) - }) + def getSettingJsonValue[A]( + structure: BuildStructure, + key: Def.ScopedKey[A] + ): Either[String, JValue] = + getSettingValue(structure, key) flatMap ( + value => + getJsonWriter(key.key) map { implicit jw: JsonWriter[A] => + toJson(value) + } + ) - def handleSettingQueryEither(req: SettingQuery, - structure: BuildStructure): Either[String, SettingQuerySuccess] = { + def handleSettingQueryEither( + req: SettingQuery, + structure: BuildStructure + ): Either[String, SettingQuerySuccess] = { val key = Parser.parse(req.setting, scopedKeyParser(structure)) for { diff --git a/main/src/main/scala/sbt/plugins/JUnitXmlReportPlugin.scala b/main/src/main/scala/sbt/plugins/JUnitXmlReportPlugin.scala index b65324be7..232265d68 100644 --- a/main/src/main/scala/sbt/plugins/JUnitXmlReportPlugin.scala +++ b/main/src/main/scala/sbt/plugins/JUnitXmlReportPlugin.scala @@ -30,6 +30,6 @@ object JUnitXmlReportPlugin extends AutoPlugin { // It might be a good idea to derive this setting into specific test scopes. override lazy val projectSettings: Seq[Setting[_]] = Seq( - testListeners += new JUnitXmlTestsListener(target.value.getAbsolutePath) + testListeners += new JUnitXmlTestsListener(target.value.getAbsolutePath, streams.value.log) ) } diff --git a/main/src/main/scala/sbt/plugins/SbtPlugin.scala b/main/src/main/scala/sbt/plugins/SbtPlugin.scala new file mode 100644 index 000000000..a8a52413c --- /dev/null +++ b/main/src/main/scala/sbt/plugins/SbtPlugin.scala @@ -0,0 +1,19 @@ +/* + * sbt + * Copyright 2011 - 2017, Lightbend, Inc. + * Copyright 2008 - 2010, Mark Harrah + * Licensed under BSD-3-Clause license (see LICENSE) + */ + +package sbt +package plugins + +import Keys._ + +object SbtPlugin extends AutoPlugin { + override def requires = ScriptedPlugin + + override lazy val projectSettings = Seq( + sbtPlugin := true + ) +} diff --git a/main/src/test/scala/DefaultsTest.scala b/main/src/test/scala/DefaultsTest.scala index b029123fc..b9399ece4 100644 --- a/main/src/test/scala/DefaultsTest.scala +++ b/main/src/test/scala/DefaultsTest.scala @@ -30,8 +30,10 @@ object DefaultsTest extends Specification { } "work correctly with excludes" in { - assertFiltered(List("Test*", "-Test2"), - Map("Test1" -> true, "Test2" -> false, "Foo" -> false)) + assertFiltered( + List("Test*", "-Test2"), + Map("Test1" -> true, "Test2" -> false, "Foo" -> false) + ) } "work correctly without includes" in { @@ -43,18 +45,24 @@ object DefaultsTest extends Specification { } "cope with multiple filters" in { - assertFiltered(List("T*1", "T*2", "-F*"), - Map("Test1" -> true, "Test2" -> true, "Foo" -> false)) + assertFiltered( + List("T*1", "T*2", "-F*"), + Map("Test1" -> true, "Test2" -> true, "Foo" -> false) + ) } "cope with multiple exclusion filters, no includes" in { - assertFiltered(List("-A*", "-F*"), - Map("Test1" -> true, "Test2" -> true, "AAA" -> false, "Foo" -> false)) + assertFiltered( + List("-A*", "-F*"), + Map("Test1" -> true, "Test2" -> true, "AAA" -> false, "Foo" -> false) + ) } "cope with multiple exclusion filters with includes" in { - assertFiltered(List("T*", "-T*1", "-T*2"), - Map("Test1" -> false, "Test2" -> false, "Test3" -> true)) + assertFiltered( + List("T*", "-T*1", "-T*2"), + Map("Test1" -> false, "Test2" -> false, "Test3" -> true) + ) } } diff --git a/main/src/test/scala/Delegates.scala b/main/src/test/scala/Delegates.scala index 11a511ad4..dd1f48635 100644 --- a/main/src/test/scala/Delegates.scala +++ b/main/src/test/scala/Delegates.scala @@ -48,7 +48,8 @@ object Delegates extends Properties("delegates") { } property("Initial scope present with all combinations of Global axes") = allAxes( - globalCombinations) + (s, ds, _) => globalCombinations(s, ds) + ) property("initial scope first") = forAll { (keys: Keys) => allDelegates(keys) { (scope, ds) => @@ -113,6 +114,7 @@ object Delegates extends Properties("delegates") { all(f(s, ds, _.project), f(s, ds, _.config), f(s, ds, _.task), f(s, ds, _.extra)) } } + def allDelegates(keys: Keys)(f: (Scope, Seq[Scope]) => Prop): Prop = all(keys.scopes map { scope => val delegates = keys.env.delegates(scope) @@ -120,16 +122,20 @@ object Delegates extends Properties("delegates") { ("Delegates:\n\t" + delegates.map(scope => Scope.display(scope, "_")).mkString("\n\t")) |: f(scope, delegates) }: _*) + def alwaysZero(s: Scope, ds: Seq[Scope], axis: Scope => ScopeAxis[_]): Prop = (axis(s) != Zero) || all(ds map { d => (axis(d) == Zero): Prop }: _*) - def globalCombinations(s: Scope, ds: Seq[Scope], axis: Scope => ScopeAxis[_]): Prop = { - val mods = List[Scope => Scope](_.copy(project = Zero), - _.copy(config = Zero), - _.copy(task = Zero), - _.copy(extra = Zero)) + + def globalCombinations(s: Scope, ds: Seq[Scope]): Prop = { + val mods = List[Scope => Scope]( + _.copy(project = Zero), + _.copy(config = Zero), + _.copy(task = Zero), + _.copy(extra = Zero), + ) val modAndIdent = mods.map(_ :: idFun[Scope] :: Nil) def loop(cur: Scope, acc: List[Scope], rem: List[Seq[Scope => Scope]]): Seq[Scope] = diff --git a/main/src/test/scala/ParseKey.scala b/main/src/test/scala/ParseKey.scala index 2d6991af7..ecc6374dc 100644 --- a/main/src/test/scala/ParseKey.scala +++ b/main/src/test/scala/ParseKey.scala @@ -8,166 +8,114 @@ package sbt import Def.{ displayFull, displayMasked, ScopedKey } -import sbt.internal.{ TestBuild, Resolve } -import TestBuild._ -import sbt.internal.util.complete._ +import sbt.internal.{ TestBuild, Resolve }, TestBuild._ +import sbt.internal.util.complete.Parser -import org.scalacheck._ -import Gen._ -import Prop._ -import Arbitrary.arbBool +import org.scalacheck._, Arbitrary.arbitrary, Gen._, Prop._ /** * Tests that the scoped key parser in Act can correctly parse a ScopedKey converted by Def.show*Key. * This includes properly resolving omitted components. */ object ParseKey extends Properties("Key parser test") { - final val MaxKeys = 5 - final val MaxScopedKeys = 100 - - implicit val gstructure = genStructure - - property("An explicitly specified axis is always parsed to that explicit value") = - forAllNoShrink(structureDefinedKey) { (skm: StructureKeyMask) => - import skm.{ structure, key, mask => mask0 } - - val hasZeroConfig = key.scope.config == Zero - val mask = if (hasZeroConfig) mask0.copy(project = true) else mask0 - val expected = resolve(structure, key, mask) - // Note that this explicitly displays the configuration axis set to Zero. - // This is to disambiguate `proj/Zero/name`, which could render potentially - // as `Zero/name`, but could be interpretted as `Zero/Zero/name`. - val s = displayMasked(key, mask, hasZeroConfig) - ("Key: " + displayPedantic(key)) |: - parseExpected(structure, s, expected, mask) - } - - property("An unspecified project axis resolves to the current project") = - forAllNoShrink(structureDefinedKey) { (skm: StructureKeyMask) => - import skm.{ structure, key } - - val mask = skm.mask.copy(project = false) - val string = displayMasked(key, mask) - // skip when config axis is set to Zero - val hasZeroConfig = key.scope.config == Zero - - ("Key: " + displayPedantic(key)) |: - ("Mask: " + mask) |: - ("Current: " + structure.current) |: - parse(structure, string) { - case Left(err) => false - case Right(sk) if hasZeroConfig => true - case Right(sk) => sk.scope.project == Select(structure.current) - } - } - - property("An unspecified task axis resolves to Zero") = forAllNoShrink(structureDefinedKey) { + property("An explicitly specified axis is always parsed to that explicit value") = forAll { (skm: StructureKeyMask) => import skm.{ structure, key } - val mask = skm.mask.copy(task = false) - val string = displayMasked(key, mask) + val hasZeroConfig = key.scope.config == Zero + val mask = if (hasZeroConfig) skm.mask.copy(project = true) else skm.mask + // Note that this explicitly displays the configuration axis set to Zero. + // This is to disambiguate `proj/Zero/name`, which could render potentially + // as `Zero/name`, but could be interpreted as `Zero/Zero/name`. + val expected = ScopedKey( + Resolve(structure.extra, Select(structure.current), key.key, mask)(key.scope), + key.key + ) + parseCheck(structure, key, mask, hasZeroConfig)( + sk => + Project.equal(sk, expected, mask) + :| s"$sk.key == $expected.key: ${sk.key == expected.key}" + :| s"${sk.scope} == ${expected.scope}: ${Scope.equal(sk.scope, expected.scope, mask)}" + ) :| s"Expected: ${displayFull(expected)}" + } - ("Key: " + displayPedantic(key)) |: - ("Mask: " + mask) |: - parse(structure, string) { - case Left(err) => false - case Right(sk) => sk.scope.task == Zero - } + property("An unspecified project axis resolves to the current project") = forAll { + (skm: StructureKeyMask) => + import skm.{ structure, key } + val mask = skm.mask.copy(project = false) + // skip when config axis is set to Zero + val hasZeroConfig = key.scope.config == Zero + parseCheck(structure, key, mask)( + sk => + (hasZeroConfig || sk.scope.project == Select(structure.current)) + :| s"Current: ${structure.current}" + ) + } + + property("An unspecified task axis resolves to Zero") = forAll { (skm: StructureKeyMask) => + import skm.{ structure, key } + val mask = skm.mask.copy(task = false) + parseCheck(structure, key, mask)(_.scope.task == Zero) } property( - "An unspecified configuration axis resolves to the first configuration directly defining the key or else Zero") = - forAllNoShrink(structureDefinedKey) { (skm: StructureKeyMask) => - import skm.{ structure, key } - val mask = ScopeMask(config = false) - val string = displayMasked(key, mask) - val resolvedConfig = Resolve.resolveConfig(structure.extra, key.key, mask)(key.scope).config - - ("Key: " + displayPedantic(key)) |: - ("Mask: " + mask) |: - ("Expected configuration: " + resolvedConfig.map(_.name)) |: - parse(structure, string) { - case Right(sk) => (sk.scope.config == resolvedConfig) || (sk.scope == Scope.GlobalScope) - case Left(err) => false - } - } - - def displayPedantic(scoped: ScopedKey[_]): String = - Scope.displayPedantic(scoped.scope, scoped.key.label) - - lazy val structureDefinedKey: Gen[StructureKeyMask] = structureKeyMask { s => - for (scope <- TestBuild.scope(s.env); key <- oneOf(s.allAttributeKeys.toSeq)) - yield ScopedKey(scope, key) - } - def structureKeyMask(genKey: Structure => Gen[ScopedKey[_]])( - implicit maskGen: Gen[ScopeMask], - structureGen: Gen[Structure]): Gen[StructureKeyMask] = - for (mask <- maskGen; structure <- structureGen; key <- genKey(structure)) - yield new StructureKeyMask(structure, key, mask) - final class StructureKeyMask(val structure: Structure, val key: ScopedKey[_], val mask: ScopeMask) - - def resolve(structure: Structure, key: ScopedKey[_], mask: ScopeMask): ScopedKey[_] = - ScopedKey(Resolve(structure.extra, Select(structure.current), key.key, mask)(key.scope), - key.key) - - def parseExpected(structure: Structure, - s: String, - expected: ScopedKey[_], - mask: ScopeMask): Prop = - ("Expected: " + displayFull(expected)) |: - ("Mask: " + mask) |: - parse(structure, s) { - case Left(err) => false - case Right(sk) => - (s"${sk}.key == ${expected}.key: ${sk.key == expected.key}") |: - (s"${sk.scope} == ${expected.scope}: ${Scope.equal(sk.scope, expected.scope, mask)}") |: - Project.equal(sk, expected, mask) - } - - def parse(structure: Structure, s: String)(f: Either[String, ScopedKey[_]] => Prop): Prop = { - val parser = makeParser(structure) - val parsed = DefaultParsers.result(parser, s).left.map(_().toString) - val showParsed = parsed.right.map(displayFull) - ("Key string: '" + s + "'") |: - ("Parsed: " + showParsed) |: - ("Structure: " + structure) |: - f(parsed) + "An unspecified configuration axis resolves to the first configuration directly defining the key or else Zero" + ) = forAll { (skm: StructureKeyMask) => + import skm.{ structure, key } + val mask = ScopeMask(config = false) + val resolvedConfig = Resolve.resolveConfig(structure.extra, key.key, mask)(key.scope).config + parseCheck(structure, key, mask)( + sk => (sk.scope.config == resolvedConfig) || (sk.scope == Scope.GlobalScope) + ) :| s"Expected configuration: ${resolvedConfig map (_.name)}" } - // Here we're shadowing the in-scope implicit called `mkEnv` for this method - // so that it will use the passed-in `Gen` rather than the one imported - // from TestBuild. - def genStructure(implicit mkEnv: Gen[Env]): Gen[Structure] = - structureGenF { (scopes: Seq[Scope], env: Env, current: ProjectRef) => - val settings = - for { - scope <- scopes - t <- env.tasks - } yield Def.setting(ScopedKey(scope, t.key), Def.value("")) - TestBuild.structure(env, settings, current) - } - - // Here we're shadowing the in-scope implicit called `mkEnv` for this method - // so that it will use the passed-in `Gen` rather than the one imported - // from TestBuild. - def structureGenF(f: (Seq[Scope], Env, ProjectRef) => Structure)( - implicit mkEnv: Gen[Env]): Gen[Structure] = - structureGen((s, e, p) => Gen.const(f(s, e, p))) - // Here we're shadowing the in-scope implicit called `mkEnv` for this method - // so that it will use the passed-in `Gen` rather than the one imported - // from TestBuild. - def structureGen(f: (Seq[Scope], Env, ProjectRef) => Gen[Structure])( - implicit mkEnv: Gen[Env]): Gen[Structure] = + implicit val arbStructure: Arbitrary[Structure] = Arbitrary { for { env <- mkEnv loadFactor <- choose(0.0, 1.0) scopes <- pickN(loadFactor, env.allFullScopes) current <- oneOf(env.allProjects.unzip._1) - structure <- f(scopes, env, current) + structure <- { + val settings = for (scope <- scopes; t <- env.tasks) + yield Def.setting(ScopedKey(scope, t.key), Def.value("")) + TestBuild.structure(env, settings, current) + } } yield structure + } - // pickN is a function that randomly picks load % items from the from sequence. + final class StructureKeyMask(val structure: Structure, val key: ScopedKey[_], val mask: ScopeMask) + + implicit val arbStructureKeyMask: Arbitrary[StructureKeyMask] = Arbitrary { + for { + mask <- maskGen + structure <- arbitrary[Structure] + key <- for { + scope <- TestBuild.scope(structure.env) + key <- oneOf(structure.allAttributeKeys.toSeq) + } yield ScopedKey(scope, key) + } yield new StructureKeyMask(structure, key, mask) + } + + def parseCheck( + structure: Structure, + key: ScopedKey[_], + mask: ScopeMask, + showZeroConfig: Boolean = false, + )(f: ScopedKey[_] => Prop): Prop = { + val s = displayMasked(key, mask, showZeroConfig) + val parser = makeParser(structure) + val parsed = Parser.result(parser, s).left.map(_().toString) + ( + parsed.fold(_ => falsified, f) + :| s"Key: ${Scope.displayPedantic(key.scope, key.key.label)}" + :| s"Mask: $mask" + :| s"Key string: '$s'" + :| s"Parsed: ${parsed.right.map(displayFull)}" + :| s"Structure: $structure" + ) + } + + // pickN is a function that randomly picks load % items from the "from" sequence. // The rest of the tests expect at least one item, so I changed it to return 1 in case of 0. def pickN[T](load: Double, from: Seq[T]): Gen[Seq[T]] = - pick(Math.max((load * from.size).toInt, 1), from) + pick((load * from.size).toInt max 1, from) } diff --git a/main/src/test/scala/PluginCommandTest.scala b/main/src/test/scala/PluginCommandTest.scala index 8262326ae..d40863578 100644 --- a/main/src/test/scala/PluginCommandTest.scala +++ b/main/src/test/scala/PluginCommandTest.scala @@ -38,23 +38,29 @@ object PluginCommandTest extends Specification { "The `plugin` command" should { "should work for plugins within nested in one package" in { - val output = processCommand("plugin sbt.PluginCommandTestPlugin0", - PluginCommandTestPlugin0, - PluginCommandTestPlugin1) + val output = processCommand( + "plugin sbt.PluginCommandTestPlugin0", + PluginCommandTestPlugin0, + PluginCommandTestPlugin1 + ) output must contain("sbt.PluginCommandTestPlugin0 is activated.") } "should work for plugins nested more than one package" in { - val output = processCommand("plugin sbt.subpackage.PluginCommandTestPlugin1", - PluginCommandTestPlugin0, - PluginCommandTestPlugin1) + val output = processCommand( + "plugin sbt.subpackage.PluginCommandTestPlugin1", + PluginCommandTestPlugin0, + PluginCommandTestPlugin1 + ) output must contain("sbt.subpackage.PluginCommandTestPlugin1 is activated.") } "suggest a plugin when given an incorrect plugin with a similar name" in { - val output = processCommand("plugin PluginCommandTestPlugin0", - PluginCommandTestPlugin0, - PluginCommandTestPlugin1) + val output = processCommand( + "plugin PluginCommandTestPlugin0", + PluginCommandTestPlugin0, + PluginCommandTestPlugin1 + ) output must contain( "Not a valid plugin: PluginCommandTestPlugin0 (similar: sbt.PluginCommandTestPlugin0, sbt.subpackage.PluginCommandTestPlugin1)" ) @@ -123,14 +129,16 @@ object FakeState { val loadedBuildUnit = Load.resolveProjects(base.toURI, partBuildUnit, _ => testProject.id) val units = Map(base.toURI -> loadedBuildUnit) - val buildStructure = new BuildStructure(units, - base.toURI, - settings, - data, - structureIndex, - streams, - delegates, - scopeLocal) + val buildStructure = new BuildStructure( + units, + base.toURI, + settings, + data, + structureIndex, + streams, + delegates, + scopeLocal + ) val attributes = AttributeMap.empty ++ AttributeMap( AttributeEntry(Keys.sessionSettings, sessionSettings), @@ -145,9 +153,11 @@ object FakeState { List(), State.newHistory, attributes, - GlobalLogging.initial(MainAppender.globalDefault(ConsoleOut.systemOut), - File.createTempFile("sbt", ".log"), - ConsoleOut.systemOut), + GlobalLogging.initial( + MainAppender.globalDefault(ConsoleOut.systemOut), + File.createTempFile("sbt", ".log"), + ConsoleOut.systemOut + ), None, State.Continue ) diff --git a/main/src/test/scala/PluginsTest.scala b/main/src/test/scala/PluginsTest.scala index 0062a7782..52f381cd1 100644 --- a/main/src/test/scala/PluginsTest.scala +++ b/main/src/test/scala/PluginsTest.scala @@ -39,18 +39,20 @@ object PluginsTest extends Specification { } "throw an AutoPluginException on conflicting requirements" in { deducePlugin(S, log) must throwAn[AutoPluginException]( - message = """Contradiction in enabled plugins: - - requested: sbt.AI\$S - - enabled: sbt.AI\$S, sbt.AI\$Q, sbt.AI\$R, sbt.AI\$B, sbt.AI\$A - - conflict: sbt.AI\$R is enabled by sbt.AI\$Q; excluded by sbt.AI\$S""") + message = s"""Contradiction in enabled plugins: + - requested: sbt.AI\\$$S + - enabled: sbt.AI\\$$S, sbt.AI\\$$Q, sbt.AI\\$$R, sbt.AI\\$$B, sbt.AI\\$$A + - conflict: sbt.AI\\$$R is enabled by sbt.AI\\$$Q; excluded by sbt.AI\\$$S""" + ) } "generates a detailed report on conflicting requirements" in { - deducePlugin(T && U, log) must throwAn[AutoPluginException](message = - """Contradiction in enabled plugins: - - requested: sbt.AI\$T && sbt.AI\$U - - enabled: sbt.AI\$U, sbt.AI\$T, sbt.AI\$A, sbt.AI\$Q, sbt.AI\$R, sbt.AI\$B - - conflict: sbt.AI\$Q is enabled by sbt.AI\$A && sbt.AI\$B; required by sbt.AI\$T, sbt.AI\$R; excluded by sbt.AI\$U - - conflict: sbt.AI\$R is enabled by sbt.AI\$Q; excluded by sbt.AI\$T""") + deducePlugin(T && U, log) must throwAn[AutoPluginException]( + message = s"""Contradiction in enabled plugins: + - requested: sbt.AI\\$$T && sbt.AI\\$$U + - enabled: sbt.AI\\$$U, sbt.AI\\$$T, sbt.AI\\$$A, sbt.AI\\$$Q, sbt.AI\\$$R, sbt.AI\\$$B + - conflict: sbt.AI\\$$Q is enabled by sbt.AI\\$$A && sbt.AI\\$$B; required by sbt.AI\\$$T, sbt.AI\\$$R; excluded by sbt.AI\\$$U + - conflict: sbt.AI\\$$R is enabled by sbt.AI\\$$Q; excluded by sbt.AI\\$$T""" + ) } } } diff --git a/main/src/test/scala/sbt/internal/TestBuild.scala b/main/src/test/scala/sbt/internal/TestBuild.scala index 7b53b1c7c..825412d35 100644 --- a/main/src/test/scala/sbt/internal/TestBuild.scala +++ b/main/src/test/scala/sbt/internal/TestBuild.scala @@ -48,11 +48,13 @@ abstract class TestBuild { lazy val delegated = scopes map env.delegates } - sealed case class Structure(env: Env, - current: ProjectRef, - data: Settings[Scope], - keyIndex: KeyIndex, - keyMap: Map[String, AttributeKey[_]]) { + sealed case class Structure( + env: Env, + current: ProjectRef, + data: Settings[Scope], + keyIndex: KeyIndex, + keyMap: Map[String, AttributeKey[_]] + ) { override def toString = env.toString + "\n" + "current: " + current + "\nSettings:\n\t" + showData + keyMap.keys .mkString("All keys:\n\t", ", ", "") @@ -64,13 +66,15 @@ abstract class TestBuild { } val extra: BuildUtil[Proj] = { val getp = (build: URI, project: String) => env.buildMap(build).projectMap(project) - new BuildUtil(keyIndex, - data, - env.root.uri, - env.rootProject, - getp, - _.configurations.map(c => ConfigKey(c.name)), - Relation.empty) + new BuildUtil( + keyIndex, + data, + env.root.uri, + env.rootProject, + getp, + _.configurations.map(c => ConfigKey(c.name)), + Relation.empty + ) } lazy val allAttributeKeys: Set[AttributeKey[_]] = { @@ -88,8 +92,10 @@ abstract class TestBuild { val taskAxesMappings = for ((scope, keys) <- data.data.toIterable; key <- keys.keys) yield - (ScopedKey(scope.copy(task = Zero), key), scope.task): (ScopedKey[_], - ScopeAxis[AttributeKey[_]]) + (ScopedKey(scope.copy(task = Zero), key), scope.task): ( + ScopedKey[_], + ScopeAxis[AttributeKey[_]] + ) val taskAxes = Relation.empty ++ taskAxesMappings val zero = new HashSet[ScopedKey[_]] @@ -143,7 +149,6 @@ abstract class TestBuild { inheritProject, inheritConfig, inheritTask, - (ref, mp) => Vector() ) lazy val allFullScopes: Seq[Scope] = for { @@ -214,7 +219,7 @@ abstract class TestBuild { } def structure(env: Env, settings: Seq[Setting[_]], current: ProjectRef): Structure = { - implicit val display = Def.showRelativeKey(current, env.allProjects.size > 1) + implicit val display = Def.showRelativeKey2(current) if (settings.isEmpty) { try { sys.error("settings is empty") @@ -273,10 +278,12 @@ abstract class TestBuild { containerOfN[Vector, T](ig, g) } - implicit def genProjects(build: URI)(implicit genID: Gen[String], - maxDeps: Gen[Int], - count: Gen[Int], - confs: Gen[Seq[Configuration]]): Gen[Seq[Proj]] = + implicit def genProjects(build: URI)( + implicit genID: Gen[String], + maxDeps: Gen[Int], + count: Gen[Int], + confs: Gen[Seq[Configuration]] + ): Gen[Seq[Proj]] = genAcyclic(maxDeps, genID, count) { (id: String) => for (cs <- confs) yield { (deps: Seq[Proj]) => new Proj(id, deps.map { dep => @@ -285,23 +292,30 @@ abstract class TestBuild { } } - def genConfigs(implicit genName: Gen[String], - maxDeps: Gen[Int], - count: Gen[Int]): Gen[Vector[Configuration]] = + def genConfigs( + implicit genName: Gen[String], + maxDeps: Gen[Int], + count: Gen[Int] + ): Gen[Vector[Configuration]] = genAcyclicDirect[Configuration, String](maxDeps, genName, count)( (key, deps) => Configuration .of(key.capitalize, key) - .withExtendsConfigs(deps.toVector)) + .withExtendsConfigs(deps.toVector) + ) - def genTasks(implicit genName: Gen[String], - maxDeps: Gen[Int], - count: Gen[Int]): Gen[Vector[Taskk]] = - genAcyclicDirect[Taskk, String](maxDeps, genName, count)((key, deps) => - new Taskk(AttributeKey[String](key), deps)) + def genTasks( + implicit genName: Gen[String], + maxDeps: Gen[Int], + count: Gen[Int] + ): Gen[Vector[Taskk]] = + genAcyclicDirect[Taskk, String](maxDeps, genName, count)( + (key, deps) => new Taskk(AttributeKey[String](key), deps) + ) def genAcyclicDirect[A, T](maxDeps: Gen[Int], keyGen: Gen[T], max: Gen[Int])( - make: (T, Vector[A]) => A): Gen[Vector[A]] = + make: (T, Vector[A]) => A + ): Gen[Vector[A]] = genAcyclic[A, T](maxDeps, keyGen, max) { t => Gen.const { deps => make(t, deps.toVector) @@ -309,14 +323,16 @@ abstract class TestBuild { } def genAcyclic[A, T](maxDeps: Gen[Int], keyGen: Gen[T], max: Gen[Int])( - make: T => Gen[Vector[A] => A]): Gen[Vector[A]] = + make: T => Gen[Vector[A] => A] + ): Gen[Vector[A]] = max flatMap { count => containerOfN[Vector, T](count, keyGen) flatMap { keys => genAcyclic(maxDeps, keys.distinct)(make) } } def genAcyclic[A, T](maxDeps: Gen[Int], keys: Vector[T])( - make: T => Gen[Vector[A] => A]): Gen[Vector[A]] = + make: T => Gen[Vector[A] => A] + ): Gen[Vector[A]] = genAcyclic(maxDeps, keys, Vector()) flatMap { pairs => sequence(pairs.map { case (key, deps) => mapMake(key, deps, make) }) flatMap { inputs => val made = new collection.mutable.HashMap[T, A] @@ -331,9 +347,11 @@ abstract class TestBuild { (key, deps, mk) } - def genAcyclic[T](maxDeps: Gen[Int], - names: Vector[T], - acc: Vector[Gen[(T, Vector[T])]]): Gen[Vector[(T, Vector[T])]] = + def genAcyclic[T]( + maxDeps: Gen[Int], + names: Vector[T], + acc: Vector[Gen[(T, Vector[T])]] + ): Gen[Vector[(T, Vector[T])]] = names match { case Vector() => sequence(acc) case Vector(x, xs @ _*) => diff --git a/main/src/test/scala/sbt/internal/parser/CheckIfParsedSpec.scala b/main/src/test/scala/sbt/internal/parser/CheckIfParsedSpec.scala index c230f8a68..807b31813 100644 --- a/main/src/test/scala/sbt/internal/parser/CheckIfParsedSpec.scala +++ b/main/src/test/scala/sbt/internal/parser/CheckIfParsedSpec.scala @@ -11,8 +11,8 @@ package parser abstract class CheckIfParsedSpec( implicit val splitter: SplitExpressions.SplitExpression = - EvaluateConfigurations.splitExpressions) - extends AbstractSpec { + EvaluateConfigurations.splitExpressions +) extends AbstractSpec { this.getClass.getName should { diff --git a/main/src/test/scala/sbt/internal/parser/CommentedXmlSpec.scala b/main/src/test/scala/sbt/internal/parser/CommentedXmlSpec.scala index a3cb444a7..95f53e91d 100644 --- a/main/src/test/scala/sbt/internal/parser/CommentedXmlSpec.scala +++ b/main/src/test/scala/sbt/internal/parser/CommentedXmlSpec.scala @@ -44,10 +44,7 @@ class CommentedXmlSpec extends CheckIfParsedSpec { | |publishMavenStyle := true | - """.stripMargin, - "Wrong Commented xml ", - false, - true), + """.stripMargin, "Wrong Commented xml ", false, true), (""" |val scmpom = taskKey[xml.NodeBuffer]("Node buffer") | @@ -67,23 +64,14 @@ class CommentedXmlSpec extends CheckIfParsedSpec { | |publishMavenStyle := true | - """.stripMargin, - "Commented xml ", - false, - true), + """.stripMargin, "Commented xml ", false, true), (""" |import sbt._ | |// - """.stripMargin, - "Xml in comment2", - false, - false) + """.stripMargin, "Xml in comment2", false, false) ) } diff --git a/main/src/test/scala/sbt/internal/parser/EmbeddedXmlSpec.scala b/main/src/test/scala/sbt/internal/parser/EmbeddedXmlSpec.scala index d939d5e8c..bc2c78d38 100644 --- a/main/src/test/scala/sbt/internal/parser/EmbeddedXmlSpec.scala +++ b/main/src/test/scala/sbt/internal/parser/EmbeddedXmlSpec.scala @@ -52,16 +52,10 @@ class EmbeddedXmlSpec extends CheckIfParsedSpec { protected val files = Seq( (""" |val p = - """.stripMargin, - "Xml modified closing tag at end of file", - false, - true), + """.stripMargin, "Xml modified closing tag at end of file", false, true), (""" |val p = - """.stripMargin, - "Xml at end of file", - false, - true), + """.stripMargin, "Xml at end of file", false, true), ("""| | |name := "play-html-compressor" @@ -98,10 +92,7 @@ class EmbeddedXmlSpec extends CheckIfParsedSpec { | |val tra = "" | - """.stripMargin, - "Xml in string", - false, - true), + """.stripMargin, "Xml in string", false, true), ("""| | |name := "play-html-compressor" @@ -131,10 +122,7 @@ class EmbeddedXmlSpec extends CheckIfParsedSpec { | | | - | """.stripMargin, - "Xml with attributes", - false, - true), + | """.stripMargin, "Xml with attributes", false, true), ( """ |scalaVersion := "2.10.2" diff --git a/main/src/test/scala/sbt/internal/parser/ErrorSpec.scala b/main/src/test/scala/sbt/internal/parser/ErrorSpec.scala index 6d7cc7b42..77641efcc 100644 --- a/main/src/test/scala/sbt/internal/parser/ErrorSpec.scala +++ b/main/src/test/scala/sbt/internal/parser/ErrorSpec.scala @@ -50,7 +50,8 @@ class ErrorSpec extends AbstractSpec { buildSbt.length, 2, "fake.txt", - new MessageOnlyException("fake")) must throwA[MessageOnlyException] + new MessageOnlyException("fake") + ) must throwA[MessageOnlyException] } "handle xml error " in { @@ -77,8 +78,7 @@ class ErrorSpec extends AbstractSpec { case exception: MessageOnlyException => val error = exception.getMessage """(\d+)""".r.findFirstIn(error) match { - case Some(x) => - true + case Some(_) => true case None => println(s"Number not found in $error") false diff --git a/main/src/test/scala/sbt/internal/parser/SessionSettingsSpec.scala b/main/src/test/scala/sbt/internal/parser/SessionSettingsSpec.scala index 3eb364ed9..8d8876091 100644 --- a/main/src/test/scala/sbt/internal/parser/SessionSettingsSpec.scala +++ b/main/src/test/scala/sbt/internal/parser/SessionSettingsSpec.scala @@ -33,8 +33,9 @@ abstract class AbstractSessionSettingsSpec(folder: String) extends AbstractSpec } } - private def runTestOnFiles(expectedResultAndMap: File => Seq[(List[String], Seq[SessionSetting])]) - : MatchResult[GenTraversableOnce[File]] = { + private def runTestOnFiles( + expectedResultAndMap: File => Seq[(List[String], Seq[SessionSetting])] + ): MatchResult[GenTraversableOnce[File]] = { val allFiles = rootDir .listFiles(new FilenameFilter() { diff --git a/main/src/test/scala/sbt/internal/parser/SplitExpressionsBehavior.scala b/main/src/test/scala/sbt/internal/parser/SplitExpressionsBehavior.scala index bda88bc0d..c4df5a0ce 100644 --- a/main/src/test/scala/sbt/internal/parser/SplitExpressionsBehavior.scala +++ b/main/src/test/scala/sbt/internal/parser/SplitExpressionsBehavior.scala @@ -15,7 +15,8 @@ import org.specs2.mutable.SpecificationLike trait SplitExpression { def split(s: String, file: File = new File("noFile"))( - implicit splitter: SplitExpressions.SplitExpression) = splitter(file, s.split("\n").toSeq) + implicit splitter: SplitExpressions.SplitExpression + ) = splitter(file, s.split("\n").toSeq) } trait SplitExpressionsBehavior extends SplitExpression { this: SpecificationLike => diff --git a/main/src/test/scala/sbt/internal/server/DefinitionTest.scala b/main/src/test/scala/sbt/internal/server/DefinitionTest.scala index 534d77083..d09c72e08 100644 --- a/main/src/test/scala/sbt/internal/server/DefinitionTest.scala +++ b/main/src/test/scala/sbt/internal/server/DefinitionTest.scala @@ -9,8 +9,6 @@ package sbt package internal package server -import sbt.internal.inc.Analysis - class DefinitionTest extends org.specs2.mutable.Specification { import Definition.textProcessor @@ -26,7 +24,8 @@ class DefinitionTest extends org.specs2.mutable.Specification { } "find valid standard scala identifier with comma" in { textProcessor.identifier("def foo(a: identifier, b: other) = ???", 13) must beSome( - "identifier") + "identifier" + ) } "find valid standard short scala identifier when caret is set at the start of it" in { textProcessor.identifier("val a = 0", 4) must beSome("a") @@ -90,11 +89,13 @@ class DefinitionTest extends org.specs2.mutable.Specification { } "match class in line version 4" in { textProcessor.classTraitObjectInLine("A")(" class A[A] ") must contain( - ("class A", 3)) + ("class A", 3) + ) } "match class in line version 5" in { textProcessor.classTraitObjectInLine("A")(" class A [A] ") must contain( - ("class A", 3)) + ("class A", 3) + ) } "match class in line version 6" in { textProcessor.classTraitObjectInLine("A")("class A[A[_]] {") must contain(("class A", 0)) @@ -113,11 +114,13 @@ class DefinitionTest extends org.specs2.mutable.Specification { } "match trait in line version 4" in { textProcessor.classTraitObjectInLine("A")(" trait A[A] ") must contain( - ("trait A", 3)) + ("trait A", 3) + ) } "match trait in line version 5" in { textProcessor.classTraitObjectInLine("A")(" trait A [A] ") must contain( - ("trait A", 3)) + ("trait A", 3) + ) } "match trait in line version 6" in { textProcessor.classTraitObjectInLine("A")("trait A[A[_]] {") must contain(("trait A", 0)) @@ -126,9 +129,12 @@ class DefinitionTest extends org.specs2.mutable.Specification { textProcessor.classTraitObjectInLine("B")("trait A ") must be empty } } + "definition" should { + import scalacache.caffeine._ import scalacache.modes.sync._ + "cache data in cache" in { val cache = CaffeineCache[Any] val cacheFile = "Test.scala" @@ -136,12 +142,11 @@ class DefinitionTest extends org.specs2.mutable.Specification { Definition.updateCache(cache)(cacheFile, useBinary) - val actual = cache.get(Definition.AnalysesKey) + val actual = Definition.AnalysesAccess.getFrom(cache) - actual.collect { - case s => s.asInstanceOf[Set[((String, Boolean), Option[Analysis])]] - }.get should contain("Test.scala" -> true -> None) + actual.get should contain("Test.scala" -> true -> None) } + "replace cache data in cache" in { val cache = CaffeineCache[Any] val cacheFile = "Test.scala" @@ -151,12 +156,11 @@ class DefinitionTest extends org.specs2.mutable.Specification { Definition.updateCache(cache)(cacheFile, falseUseBinary) Definition.updateCache(cache)(cacheFile, useBinary) - val actual = cache.get(Definition.AnalysesKey) + val actual = Definition.AnalysesAccess.getFrom(cache) - actual.collect { - case s => s.asInstanceOf[Set[((String, Boolean), Option[Analysis])]] - }.get should contain("Test.scala" -> true -> None) + actual.get should contain("Test.scala" -> true -> None) } + "cache more data in cache" in { val cache = CaffeineCache[Any] val cacheFile = "Test.scala" @@ -167,11 +171,9 @@ class DefinitionTest extends org.specs2.mutable.Specification { Definition.updateCache(cache)(otherCacheFile, otherUseBinary) Definition.updateCache(cache)(cacheFile, useBinary) - val actual = cache.get(Definition.AnalysesKey) + val actual = Definition.AnalysesAccess.getFrom(cache) - actual.collect { - case s => s.asInstanceOf[Set[((String, Boolean), Option[Analysis])]] - }.get should contain("Test.scala" -> true -> None, "OtherTest.scala" -> false -> None) + actual.get should contain("Test.scala" -> true -> None, "OtherTest.scala" -> false -> None) } } } diff --git a/main/src/test/scala/sbt/internal/server/SettingQueryTest.scala b/main/src/test/scala/sbt/internal/server/SettingQueryTest.scala index 7acf7955f..b60121458 100644 --- a/main/src/test/scala/sbt/internal/server/SettingQueryTest.scala +++ b/main/src/test/scala/sbt/internal/server/SettingQueryTest.scala @@ -122,24 +122,28 @@ object SettingQueryTest extends org.specs2.mutable.Specification { .put(globalBaseDirectory, globalDirFile) val config0 = defaultPreGlobal(state, baseFile, globalDirFile, state.log) - val config = defaultWithGlobal(state, baseFile, config0, globalDirFile, state.log) + val config = defaultWithGlobal(state, baseFile, config0, globalDirFile) val buildUnit: BuildUnit = { val loadedPlugins: LoadedPlugins = - noPlugins(projectStandard(baseFile), - config.copy(pluginManagement = config.pluginManagement.forPlugin)) + noPlugins( + projectStandard(baseFile), + config.copy(pluginManagement = config.pluginManagement.forPlugin) + ) val project: Project = { val project0 = Project("t", baseFile) settings projectSettings val fileToLoadedSbtFileMap = new mutable.HashMap[File, LoadedSbtFile] val autoPlugins = loadedPlugins.detected.deducePluginsFromProject(project0, state.log) val injectSettings = config.injectSettings - resolveProject(project0, - autoPlugins, - loadedPlugins, - injectSettings, - fileToLoadedSbtFileMap, - state.log) + resolveProject( + project0, + autoPlugins, + loadedPlugins, + injectSettings, + fileToLoadedSbtFileMap, + state.log + ) } val projects: Seq[Project] = Seq(project) @@ -160,7 +164,8 @@ object SettingQueryTest extends org.specs2.mutable.Specification { val units: Map[URI, LoadedBuildUnit] = loadedBuild.units val settings: Seq[Setting[_]] = finalTransforms( - buildConfigurations(loadedBuild, getRootProject(units), config.injectSettings)) + buildConfigurations(loadedBuild, getRootProject(units), config.injectSettings) + ) val delegates: Scope => Seq[Scope] = defaultDelegates(loadedBuild) val scopeLocal: ScopeLocal = EvaluateTask.injectStreams val display: Show[ScopedKey[_]] = Project showLoadingKey loadedBuild @@ -200,7 +205,8 @@ object SettingQueryTest extends org.specs2.mutable.Specification { "t/startYear" in qok("null", "scala.Option[Int]") "t/scalaArtifacts" in qok( """["scala-library","scala-compiler","scala-reflect","scala-actors","scalap"]""", - "scala.collection.Seq[java.lang.String]") + "scala.collection.Seq[java.lang.String]" + ) "t/libraryDependencies" in qok( """[{"organization":"org.scala-lang","name":"scala-library","revision":"2.12.1","isChanging":false,"isTransitive":true,"isForce":false,"explicitArtifacts":[],"inclusions":[],"exclusions":[],"extraAttributes":{},"crossVersion":{"type":"Disabled"}}]""", @@ -209,8 +215,10 @@ object SettingQueryTest extends org.specs2.mutable.Specification { "scalaVersion" in qko("Not a valid project ID: scalaVersion\\nscalaVersion\\n ^") "t/scalacOptions" in qko( - s"""Key ProjectRef(uri(\\"$baseUri\\"), \\"t\\") / Compile / scalacOptions is a task, can only query settings""") + s"""Key ProjectRef(uri(\\"$baseUri\\"), \\"t\\") / Compile / scalacOptions is a task, can only query settings""" + ) "t/fooo" in qko( - "Expected ':' (if selecting a configuration)\\nNot a valid key: fooo (similar: fork)\\nt/fooo\\n ^") + "Expected ':' (if selecting a configuration)\\nNot a valid key: fooo (similar: fork)\\nt/fooo\\n ^" + ) } } diff --git a/notes/1.1.1.markdown b/notes/1.1.1.markdown index 58cba0a73..2115809d3 100644 --- a/notes/1.1.1.markdown +++ b/notes/1.1.1.markdown @@ -17,7 +17,7 @@ ### autoStartServer setting sbt 1.1.1 adds a new global `Boolean` setting called `autoStartServer`, which is set to `true` by default. -When set to `true`, sbt shell will automatically start sbt server. Otherwise, it will not start the server until `startSever` command is issued. This could be used to opt out of server for security reasons. +When set to `true`, sbt shell will automatically start sbt server. Otherwise, it will not start the server until `startServer` command is issued. This could be used to opt out of server for security reasons. [#3922][3922] by [@swaldman][@swaldman] diff --git a/notes/1.1.2/in_configurations_scope_filter_factories.md b/notes/1.1.2/in_configurations_scope_filter_factories.md new file mode 100644 index 000000000..3effa6ac7 --- /dev/null +++ b/notes/1.1.2/in_configurations_scope_filter_factories.md @@ -0,0 +1,10 @@ +### Improvements + +- Adds factory methods for Configuration axis scope filters + +### More ways to create ScopeFilter for Configuration axis + +To create configuration axis `ScopeFilter` one has to provide actual configurations +to filter by. However it's not always possible to get hold of one. For example +`Classpaths.interSort` returns configuration names. +For cases like that there are now `inConfigurationsByKeys` and `inConfigurationsByRefs` to create `ScopeFilter`'s diff --git a/notes/1.1.4/improving_loading_settings_messaging.md b/notes/1.1.4/improving_loading_settings_messaging.md new file mode 100644 index 000000000..683846bbb --- /dev/null +++ b/notes/1.1.4/improving_loading_settings_messaging.md @@ -0,0 +1,22 @@ +### Improvements + +Now when loading a project that has multiple build.sbt files the logger shows the path as well. +Before it was: + +```bash +[info] Loading settings from build.sbt ... +[info] Loading settings from build.sbt ... +[info] Loading settings from build.sbt ... +[info] Loading settings from build.sbt ... +``` + +Now it's: + +```bash +[info] Loading settings from /home/user/Work/personal/someProject/build.sbt ... +[info] Loading settings from /home/user/Work/personal/someProject/subProject1/build.sbt ... +[info] Loading settings from /home/user/Work/personal/someProject/subProject2/build.sbt ... +[info] Loading settings from /home/user/Work/personal/someProject/subProject3/build.sbt ... +``` + +This should solve the issue: https://github.com/sbt/sbt/issues/3607 \ No newline at end of file diff --git a/notes/1.2.0/add-project-id-to-watching-message.md b/notes/1.2.0/add-project-id-to-watching-message.md new file mode 100644 index 000000000..7c6a1bf57 --- /dev/null +++ b/notes/1.2.0/add-project-id-to-watching-message.md @@ -0,0 +1,12 @@ +[@dwijnand]: https://github.com/dwijnand + +[#2038]: https://github.com/sbt/sbt/issues/2038 +[#3813]: https://github.com/sbt/sbt/pull/3813 + +### Fixes with compatibility implications + +### Improvements + +- Add the current project's id to `~`'s watching message. [#2038][]/[#3813][] by [@dwijnand][] + +### Bug fixes diff --git a/notes/1.2.0/cross-strict-aggregation.md b/notes/1.2.0/cross-strict-aggregation.md new file mode 100644 index 000000000..170207536 --- /dev/null +++ b/notes/1.2.0/cross-strict-aggregation.md @@ -0,0 +1,8 @@ +[@ruippeixotog]: https://github.com/ruippeixotog + +[#3698]: https://github.com/sbt/sbt/issues/3698 +[#3995]: https://github.com/sbt/sbt/pull/3995 + +### Improvements + +- Make `++ ` run `` only on compatible subprojects. [#3698][]/[#3995][] by [@ruippeixotog][] diff --git a/notes/1.2.0/fix-sbt012x-link.md b/notes/1.2.0/fix-sbt012x-link.md new file mode 100644 index 000000000..3e368f8cb --- /dev/null +++ b/notes/1.2.0/fix-sbt012x-link.md @@ -0,0 +1,4 @@ +### Bug fixes + +* Fixes link to SBT upgrade migration page + diff --git a/notes/1.2.0/global-eviction-warnin-options.md b/notes/1.2.0/global-eviction-warnin-options.md new file mode 100644 index 000000000..981bc2477 --- /dev/null +++ b/notes/1.2.0/global-eviction-warnin-options.md @@ -0,0 +1,3 @@ +### Improvements + +- Add the eviction warning options to global, so that one can change the options for all sub projects at a time. diff --git a/notes/1.2.0/introduce-CompositeProject.md b/notes/1.2.0/introduce-CompositeProject.md new file mode 100644 index 000000000..759d75a71 --- /dev/null +++ b/notes/1.2.0/introduce-CompositeProject.md @@ -0,0 +1,9 @@ + +[#3042]: https://github.com/sbt/sbt/issues/3042 +[#4056]: https://github.com/sbt/sbt/pull/4056 + +### Introduce CompositeProject + +### Improvements + +- Support for: `lazy val foo = someCompositeProject` (e.g.`CrossProject`) [#3042][]/[#4056][] diff --git a/notes/1.2.0/introduce-projectToLocalProject.md b/notes/1.2.0/introduce-projectToLocalProject.md new file mode 100644 index 000000000..8f856d3e8 --- /dev/null +++ b/notes/1.2.0/introduce-projectToLocalProject.md @@ -0,0 +1,12 @@ +[@dwijnand]: https://github.com/dwijnand + +[#3757]: https://github.com/sbt/sbt/issues/3757 +[#3836]: https://github.com/sbt/sbt/pull/3836 + +### Fixes with compatibility implications + +### Improvements + +- Introduces `projectToLocalProject` to replace `projectToRef`. [#3757][]/[#3836][] by [@dwijnand][] + +### Bug fixes diff --git a/notes/1.2.0/publishTo-not-required-if-not-publishing.md b/notes/1.2.0/publishTo-not-required-if-not-publishing.md new file mode 100644 index 000000000..b2c4ca842 --- /dev/null +++ b/notes/1.2.0/publishTo-not-required-if-not-publishing.md @@ -0,0 +1,12 @@ + +[@mpollmeier]: https://github.com/mpollmeier + +[#3760]: https://github.com/sbt/sbt/pull/3760 + +### Fixes with compatibility implications + +### Improvements + +### Bug fixes + +- Remove requirement on `publishTo` if `publishArtifact` is false. [#3760][] by [@mpollmeier][] diff --git a/notes/1.2.0/scripted-change.md b/notes/1.2.0/scripted-change.md new file mode 100644 index 000000000..63715ecab --- /dev/null +++ b/notes/1.2.0/scripted-change.md @@ -0,0 +1,27 @@ + +### Fixes with compatibility implications + +- In sbt 1.2, `ScriptedPlugin` is no longer triggered automatically. This allows easier use of the plugin in a multi-project build. We recommend migration to `SbtPlugin`. [#3514][3514]/[#3875][3875] by [@eed3si9n][@eed3si9n] +- `scriptedBufferLog` and `scriptedLaunchOpts` settings are changed so they are scoped globally. + +### Features + +- Adds `SbtPlugin`. See below. + +### Bug fixes + + +### SbtPlugin + +`SbtPlugin` is a new plugin that represents sbt plugin projects. + + lazy val fooPlugin = (project in file("plugin")) + .enablePlugins(SbtPlugin) + +This sets `sbtPlugin` setting to `true`, and brings in the new non-triggered `ScriptedPlugin`. + +[#3875][3875] by [@eed3si9n][@eed3si9n] + + [@eed3si9n]: https://github.com/eed3si9n + [3514]: https://github.com/sbt/sbt/issues/3514 + [3875]: https://github.com/sbt/sbt/pull/3875 diff --git a/notes/1.1.1/sample.md b/notes/sample.md similarity index 100% rename from notes/1.1.1/sample.md rename to notes/sample.md diff --git a/project/PublishBinPlugin.scala b/project/PublishBinPlugin.scala index a54d9958e..c013e78d6 100644 --- a/project/PublishBinPlugin.scala +++ b/project/PublishBinPlugin.scala @@ -5,14 +5,12 @@ import sbt.librarymanagement.ConfigRef /** This local plugin provides ways of publishing just the binary jar. */ object PublishBinPlugin extends AutoPlugin { - override def requires = plugins.JvmPlugin override def trigger = allRequirements object autoImport { val publishLocalBin = taskKey[Unit]("") val publishLocalBinConfig = taskKey[PublishConfiguration]("") } - import autoImport._ override def globalSettings = Seq(publishLocalBin := (())) @@ -20,22 +18,17 @@ object PublishBinPlugin extends AutoPlugin { override def projectSettings = Def settings ( publishLocalBin := Classpaths.publishTask(publishLocalBinConfig, deliverLocal).value, publishLocalBinConfig := { - val _ = deliverLocal.value Classpaths.publishConfig( - publishMavenStyle.value, - deliverPattern(crossTarget.value), + false, // publishMavenStyle.value, + Classpaths.deliverPattern(crossTarget.value), if (isSnapshot.value) "integration" else "release", ivyConfigurations.value.map(c => ConfigRef(c.name)).toVector, (packagedArtifacts in publishLocalBin).value.toVector, (checksums in publishLocalBin).value.toVector, - resolverName = "local", logging = ivyLoggingLevel.value, overwrite = isSnapshot.value ) }, packagedArtifacts in publishLocalBin := Classpaths.packaged(Seq(packageBin in Compile)).value ) - - def deliverPattern(outputPath: File): String = - (outputPath / "[artifact]-[revision](-[classifier]).[ext]").absolutePath } diff --git a/project/Scripted.scala b/project/Scripted.scala index 788f1d60e..7a46149fc 100644 --- a/project/Scripted.scala +++ b/project/Scripted.scala @@ -1,70 +1,68 @@ +import java.lang.reflect.InvocationTargetException + import sbt._ -import Keys._ -import Def.Initialize import sbt.internal.inc.ScalaInstance -import sbt.internal.inc.classpath +import sbt.internal.inc.classpath.{ ClasspathUtilities, FilteredLoader } -import scala.language.reflectiveCalls - -object ScriptedPlugin extends sbt.AutoPlugin { +object ScriptedPlugin extends AutoPlugin { override def requires = plugins.JvmPlugin + object autoImport extends ScriptedKeys { def scriptedPath = file("scripted") } import autoImport._ - import Scripted._ - override def projectSettings = Seq( + + override def globalSettings = super.globalSettings ++ Seq( scriptedBufferLog := true, - scriptedPrescripted := { _ => - } + scriptedPrescripted := { _ => }, ) } trait ScriptedKeys { - lazy val publishAll = TaskKey[Unit]("publish-all") - lazy val publishLocalBinAll = taskKey[Unit]("") - lazy val scripted = InputKey[Unit]("scripted") - lazy val scriptedUnpublished = InputKey[Unit]( - "scripted-unpublished", - "Execute scripted without publishing SBT first. Saves you some time when only your test has changed.") - lazy val scriptedSource = SettingKey[File]("scripted-source") - lazy val scriptedPrescripted = TaskKey[File => Unit]("scripted-prescripted") - lazy val scriptedBufferLog = SettingKey[Boolean]("scripted-buffer-log") - lazy val scriptedLaunchOpts = SettingKey[Seq[String]]( - "scripted-launch-opts", - "options to pass to jvm launching scripted tasks") + val publishAll = taskKey[Unit]("") + val publishLocalBinAll = taskKey[Unit]("") + val scripted = inputKey[Unit]("") + val scriptedUnpublished = inputKey[Unit]("Execute scripted without publishing sbt first. " + + "Saves you some time when only your test has changed") + val scriptedSource = settingKey[File]("") + val scriptedPrescripted = taskKey[File => Unit]("") + val scriptedBufferLog = settingKey[Boolean]("") + val scriptedLaunchOpts = settingKey[Seq[String]]("options to pass to jvm launching scripted tasks") } object Scripted { // This is to workaround https://github.com/sbt/io/issues/110 sys.props.put("jna.nosys", "true") - lazy val MavenResolverPluginTest = config("mavenResolverPluginTest") extend Compile - lazy val RepoOverrideTest = config("repoOverrideTest") extend Compile + val RepoOverrideTest = config("repoOverrideTest") extend Compile import sbt.complete._ - import DefaultParsers._ + // Paging, 1-index based. - case class ScriptedTestPage(page: Int, total: Int) + final case class ScriptedTestPage(page: Int, total: Int) + // FIXME: Duplicated with ScriptedPlugin.scriptedParser, this can be // avoided once we upgrade build.properties to 0.13.14 def scriptedParser(scriptedBase: File): Parser[Seq[String]] = { + import DefaultParsers._ + val scriptedFiles: NameFilter = ("test": NameFilter) | "pending" val pairs = (scriptedBase * AllPassFilter * AllPassFilter * scriptedFiles).get map { (f: File) => val p = f.getParentFile (p.getParentFile.getName, p.getName) } - val pairMap = pairs.groupBy(_._1).mapValues(_.map(_._2).toSet); + val pairMap = pairs.groupBy(_._1).mapValues(_.map(_._2).toSet) val id = charClass(c => !c.isWhitespace && c != '/').+.string - val groupP = token(id.examples(pairMap.keySet.toSet)) <~ token('/') + val groupP = token(id.examples(pairMap.keySet)) <~ token('/') // A parser for page definitions val pageP: Parser[ScriptedTestPage] = ("*" ~ NatBasic ~ "of" ~ NatBasic) map { case _ ~ page ~ _ ~ total => ScriptedTestPage(page, total) } + // Grabs the filenames from a given test group in the current page definition. def pagedFilenames(group: String, page: ScriptedTestPage): Seq[String] = { val files = pairMap(group).toSeq.sortBy(_.toLowerCase) @@ -74,9 +72,11 @@ object Scripted { if (page.page == page.total) dropped else dropped.take(pageSize) } + def nameP(group: String) = { token("*".id | id.examples(pairMap.getOrElse(group, Set.empty[String]))) } + val PagedIds: Parser[Seq[String]] = for { group <- groupP @@ -84,55 +84,64 @@ object Scripted { files = pagedFilenames(group, page) // TODO - Fail the parser if we don't have enough files for the given page size //if !files.isEmpty - } yield files map (f => group + '/' + f) + } yield files map (f => s"$group/$f") val testID = (for (group <- groupP; name <- nameP(group)) yield (group, name)) val testIdAsGroup = matched(testID) map (test => Seq(test)) + //(token(Space) ~> matched(testID)).* (token(Space) ~> (PagedIds | testIdAsGroup)).* map (_.flatten) } - // Interface to cross class loader - type SbtScriptedRunner = { - def runInParallel(resourceBaseDirectory: File, - bufferLog: Boolean, - tests: Array[String], - bootProperties: File, - launchOpts: Array[String], - prescripted: java.util.List[File]): Unit - } - - def doScripted(launcher: File, - scriptedSbtClasspath: Seq[Attributed[File]], - scriptedSbtInstance: ScalaInstance, - sourcePath: File, - bufferLog: Boolean, - args: Seq[String], - prescripted: File => Unit, - launchOpts: Seq[String]): Unit = { + def doScripted( + launcher: File, + scriptedSbtClasspath: Seq[Attributed[File]], + scriptedSbtInstance: ScalaInstance, + sourcePath: File, + bufferLog: Boolean, + args: Seq[String], + prescripted: File => Unit, + launchOpts: Seq[String], + ): Unit = { System.err.println(s"About to run tests: ${args.mkString("\n * ", "\n * ", "\n")}") + // Force Log4J to not use a thread context classloader otherwise it throws a CCE sys.props(org.apache.logging.log4j.util.LoaderUtil.IGNORE_TCCL_PROPERTY) = "true" - val noJLine = new classpath.FilteredLoader(scriptedSbtInstance.loader, "jline." :: Nil) - val loader = classpath.ClasspathUtilities.toLoader(scriptedSbtClasspath.files, noJLine) - val bridgeClass = Class.forName("sbt.test.ScriptedRunner", true, loader) + + val noJLine = new FilteredLoader(scriptedSbtInstance.loader, "jline." :: Nil) + val loader = ClasspathUtilities.toLoader(scriptedSbtClasspath.files, noJLine) + val bridgeClass = Class.forName("sbt.scriptedtest.ScriptedRunner", true, loader) + + // Interface to cross class loader + type SbtScriptedRunner = { + def runInParallel( + resourceBaseDirectory: File, + bufferLog: Boolean, + tests: Array[String], + bootProperties: File, + launchOpts: Array[String], + prescripted: java.util.List[File], + ): Unit + } + val bridge = bridgeClass.getDeclaredConstructor().newInstance().asInstanceOf[SbtScriptedRunner] + try { // Using java.util.List to encode File => Unit. val callback = new java.util.AbstractList[File] { - override def add(x: File): Boolean = { - prescripted(x) - false - } + override def add(x: File): Boolean = { prescripted(x); false } def get(x: Int): sbt.File = ??? def size(): Int = 0 } - bridge.runInParallel(sourcePath, - bufferLog, - args.toArray, - launcher, - launchOpts.toArray, - callback) - } catch { case ite: java.lang.reflect.InvocationTargetException => throw ite.getCause } + import scala.language.reflectiveCalls + bridge.runInParallel( + sourcePath, + bufferLog, + args.toArray, + launcher, + launchOpts.toArray, + callback, + ) + } catch { case ite: InvocationTargetException => throw ite.getCause } } } diff --git a/project/flamegraph_svg.png b/project/flamegraph_svg.png new file mode 100644 index 000000000..2fc2638c3 Binary files /dev/null and b/project/flamegraph_svg.png differ diff --git a/project/plugins.sbt b/project/plugins.sbt index b13554d41..46054b0a0 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,7 +1,10 @@ scalaVersion := "2.12.6" scalacOptions ++= Seq("-feature", "-language:postfixOps") -addSbtPlugin("org.scala-sbt" % "sbt-houserules" % "0.3.6") -addSbtPlugin("org.scala-sbt" % "sbt-contraband" % "0.4.0") -addSbtPlugin("de.heikoseeberger" % "sbt-header" % "3.0.2") -addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.8.0") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.2.0") +addSbtPlugin("org.scala-sbt" % "sbt-houserules" % "0.3.6") +addSbtPlugin("org.scala-sbt" % "sbt-contraband" % "0.4.0") +addSbtPlugin("de.heikoseeberger" % "sbt-header" % "3.0.2") +addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.8.0") +addSbtPlugin("com.lucidchart" % "sbt-scalafmt" % "1.15") +addSbtPlugin("com.lightbend" % "sbt-whitesource" % "0.1.9") diff --git a/protocol/src/main/scala/sbt/internal/protocol/codec/JsonRpcNotificationMessageFormats.scala b/protocol/src/main/scala/sbt/internal/protocol/codec/JsonRpcNotificationMessageFormats.scala index b00ae5d07..d4fe42afe 100644 --- a/protocol/src/main/scala/sbt/internal/protocol/codec/JsonRpcNotificationMessageFormats.scala +++ b/protocol/src/main/scala/sbt/internal/protocol/codec/JsonRpcNotificationMessageFormats.scala @@ -17,7 +17,8 @@ trait JsonRpcNotificationMessageFormats { new JsonFormat[sbt.internal.protocol.JsonRpcNotificationMessage] { override def read[J]( jsOpt: Option[J], - unbuilder: Unbuilder[J]): sbt.internal.protocol.JsonRpcNotificationMessage = { + unbuilder: Unbuilder[J] + ): sbt.internal.protocol.JsonRpcNotificationMessage = { jsOpt match { case Some(js) => unbuilder.beginObject(js) @@ -32,8 +33,10 @@ trait JsonRpcNotificationMessageFormats { deserializationError("Expected JsObject but found None") } } - override def write[J](obj: sbt.internal.protocol.JsonRpcNotificationMessage, - builder: Builder[J]): Unit = { + override def write[J]( + obj: sbt.internal.protocol.JsonRpcNotificationMessage, + builder: Builder[J] + ): Unit = { builder.beginObject() builder.addField("jsonrpc", obj.jsonrpc) builder.addField("method", obj.method) diff --git a/protocol/src/main/scala/sbt/internal/protocol/codec/JsonRpcRequestMessageFormats.scala b/protocol/src/main/scala/sbt/internal/protocol/codec/JsonRpcRequestMessageFormats.scala index 2c1273534..1ed949b31 100644 --- a/protocol/src/main/scala/sbt/internal/protocol/codec/JsonRpcRequestMessageFormats.scala +++ b/protocol/src/main/scala/sbt/internal/protocol/codec/JsonRpcRequestMessageFormats.scala @@ -8,15 +8,17 @@ package sbt.internal.protocol.codec import sjsonnew.shaded.scalajson.ast.unsafe.JValue -import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } +import sjsonnew.{ Builder, DeserializationException, JsonFormat, Unbuilder, deserializationError } trait JsonRpcRequestMessageFormats { self: sbt.internal.util.codec.JValueFormats with sjsonnew.BasicJsonProtocol => implicit lazy val JsonRpcRequestMessageFormat : JsonFormat[sbt.internal.protocol.JsonRpcRequestMessage] = new JsonFormat[sbt.internal.protocol.JsonRpcRequestMessage] { - override def read[J](jsOpt: Option[J], - unbuilder: Unbuilder[J]): sbt.internal.protocol.JsonRpcRequestMessage = { + override def read[J]( + jsOpt: Option[J], + unbuilder: Unbuilder[J] + ): sbt.internal.protocol.JsonRpcRequestMessage = { jsOpt match { case Some(js) => unbuilder.beginObject(js) @@ -24,7 +26,7 @@ trait JsonRpcRequestMessageFormats { val id = try { unbuilder.readField[String]("id") } catch { - case _: Throwable => { + case _: DeserializationException => { val prefix = "\u2668" // Append prefix to show the original type was Number prefix + unbuilder.readField[Long]("id").toString } @@ -39,8 +41,10 @@ trait JsonRpcRequestMessageFormats { deserializationError("Expected JsObject but found None") } } - override def write[J](obj: sbt.internal.protocol.JsonRpcRequestMessage, - builder: Builder[J]): Unit = { + override def write[J]( + obj: sbt.internal.protocol.JsonRpcRequestMessage, + builder: Builder[J] + ): Unit = { builder.beginObject() builder.addField("jsonrpc", obj.jsonrpc) builder.addField("id", obj.id) diff --git a/protocol/src/main/scala/sbt/internal/protocol/codec/JsonRpcResponseErrorFormats.scala b/protocol/src/main/scala/sbt/internal/protocol/codec/JsonRpcResponseErrorFormats.scala index 110a6cae2..d330ac5bb 100644 --- a/protocol/src/main/scala/sbt/internal/protocol/codec/JsonRpcResponseErrorFormats.scala +++ b/protocol/src/main/scala/sbt/internal/protocol/codec/JsonRpcResponseErrorFormats.scala @@ -15,8 +15,10 @@ trait JsonRpcResponseErrorFormats { implicit lazy val JsonRpcResponseErrorFormat : JsonFormat[sbt.internal.protocol.JsonRpcResponseError] = new JsonFormat[sbt.internal.protocol.JsonRpcResponseError] { - override def read[J](jsOpt: Option[J], - unbuilder: Unbuilder[J]): sbt.internal.protocol.JsonRpcResponseError = { + override def read[J]( + jsOpt: Option[J], + unbuilder: Unbuilder[J] + ): sbt.internal.protocol.JsonRpcResponseError = { jsOpt match { case Some(js) => unbuilder.beginObject(js) @@ -31,8 +33,10 @@ trait JsonRpcResponseErrorFormats { deserializationError("Expected JsObject but found None") } } - override def write[J](obj: sbt.internal.protocol.JsonRpcResponseError, - builder: Builder[J]): Unit = { + override def write[J]( + obj: sbt.internal.protocol.JsonRpcResponseError, + builder: Builder[J] + ): Unit = { builder.beginObject() builder.addField("code", obj.code) builder.addField("message", obj.message) diff --git a/protocol/src/main/scala/sbt/internal/protocol/codec/JsonRpcResponseMessageFormats.scala b/protocol/src/main/scala/sbt/internal/protocol/codec/JsonRpcResponseMessageFormats.scala index c9d943296..db967420e 100644 --- a/protocol/src/main/scala/sbt/internal/protocol/codec/JsonRpcResponseMessageFormats.scala +++ b/protocol/src/main/scala/sbt/internal/protocol/codec/JsonRpcResponseMessageFormats.scala @@ -7,7 +7,13 @@ package sbt.internal.protocol.codec -import _root_.sjsonnew.{ Builder, JsonFormat, Unbuilder, deserializationError } +import _root_.sjsonnew.{ + Builder, + DeserializationException, + JsonFormat, + Unbuilder, + deserializationError +} import sjsonnew.shaded.scalajson.ast.unsafe._ trait JsonRpcResponseMessageFormats { @@ -19,7 +25,8 @@ trait JsonRpcResponseMessageFormats { new JsonFormat[sbt.internal.protocol.JsonRpcResponseMessage] { override def read[J]( jsOpt: Option[J], - unbuilder: Unbuilder[J]): sbt.internal.protocol.JsonRpcResponseMessage = { + unbuilder: Unbuilder[J] + ): sbt.internal.protocol.JsonRpcResponseMessage = { jsOpt match { case Some(js) => unbuilder.beginObject(js) @@ -27,7 +34,8 @@ trait JsonRpcResponseMessageFormats { val id = try { unbuilder.readField[Option[String]]("id") } catch { - case _: Throwable => unbuilder.readField[Option[Long]]("id") map { _.toString } + case _: DeserializationException => + unbuilder.readField[Option[Long]]("id") map { _.toString } } val result = unbuilder.lookupField("result") map { @@ -43,8 +51,10 @@ trait JsonRpcResponseMessageFormats { deserializationError("Expected JsObject but found None") } } - override def write[J](obj: sbt.internal.protocol.JsonRpcResponseMessage, - builder: Builder[J]): Unit = { + override def write[J]( + obj: sbt.internal.protocol.JsonRpcResponseMessage, + builder: Builder[J] + ): Unit = { // Parse given id to Long or String judging by prefix def parseId(str: String): Either[Long, String] = { if (str.startsWith("\u2668")) Left(str.substring(1).toLong) diff --git a/protocol/src/main/scala/sbt/protocol/Serialization.scala b/protocol/src/main/scala/sbt/protocol/Serialization.scala index 75b9e7c83..21d798b80 100644 --- a/protocol/src/main/scala/sbt/protocol/Serialization.scala +++ b/protocol/src/main/scala/sbt/protocol/Serialization.scala @@ -8,7 +8,7 @@ package sbt package protocol -import sjsonnew.JsonFormat +import sjsonnew.{ JsonFormat, JsonWriter } import sjsonnew.support.scalajson.unsafe.{ Parser, Converter, CompactPrinter } import sjsonnew.shaded.scalajson.ast.unsafe.{ JValue, JObject, JString } import java.nio.ByteBuffer @@ -41,37 +41,31 @@ object Serialization { CompactPrinter(json).getBytes("UTF-8") } - /** - * This formats the message according to JSON-RPC. - * http://www.jsonrpc.org/specification - */ + /** This formats the message according to JSON-RPC. http://www.jsonrpc.org/specification */ private[sbt] def serializeResponseMessage(message: JsonRpcResponseMessage): Array[Byte] = { import sbt.internal.protocol.codec.JsonRPCProtocol._ - val json: JValue = Converter.toJson[JsonRpcResponseMessage](message).get - val body = CompactPrinter(json) - val bodyBytes = body.getBytes("UTF-8") - - (s"Content-Length: ${bodyBytes.size}\r\n" + - s"Content-Type: $VsCode\r\n" + - "\r\n" + - body).getBytes("UTF-8") + serializeResponse(message) } - /** - * This formats the message according to JSON-RPC. - * http://www.jsonrpc.org/specification - */ + /** This formats the message according to JSON-RPC. http://www.jsonrpc.org/specification */ private[sbt] def serializeNotificationMessage( - message: JsonRpcNotificationMessage): Array[Byte] = { + message: JsonRpcNotificationMessage, + ): Array[Byte] = { import sbt.internal.protocol.codec.JsonRPCProtocol._ - val json: JValue = Converter.toJson[JsonRpcNotificationMessage](message).get - val body = CompactPrinter(json) - val bodyBytes = body.getBytes("UTF-8") + serializeResponse(message) + } - (s"Content-Length: ${bodyBytes.size}\r\n" + - s"Content-Type: $VsCode\r\n" + - "\r\n" + - body).getBytes("UTF-8") + private[sbt] def serializeResponse[A: JsonWriter](message: A): Array[Byte] = { + val json: JValue = Converter.toJson[A](message).get + val body = CompactPrinter(json) + val bodyLength = body.getBytes("UTF-8").length + + Iterator( + s"Content-Length: $bodyLength", + s"Content-Type: $VsCode", + "", + body + ).mkString("\r\n").getBytes("UTF-8") } /** diff --git a/run/src/main/scala/sbt/Fork.scala b/run/src/main/scala/sbt/Fork.scala index 74b70d024..3fc3e4142 100644 --- a/run/src/main/scala/sbt/Fork.scala +++ b/run/src/main/scala/sbt/Fork.scala @@ -64,9 +64,11 @@ final class Fork(val commandName: String, val runnerClass: Option[String]) { case out: CustomOutput => (process #> out.output).run(connectInput = false) } } - private[this] def makeOptions(jvmOptions: Seq[String], - bootJars: Iterable[File], - arguments: Seq[String]): Seq[String] = { + private[this] def makeOptions( + jvmOptions: Seq[String], + bootJars: Iterable[File], + arguments: Seq[String] + ): Seq[String] = { val boot = if (bootJars.isEmpty) None else diff --git a/run/src/main/scala/sbt/Run.scala b/run/src/main/scala/sbt/Run.scala index ff5426b6d..a1fb598d6 100644 --- a/run/src/main/scala/sbt/Run.scala +++ b/run/src/main/scala/sbt/Run.scala @@ -29,8 +29,8 @@ class ForkRun(config: ForkOptions) extends ScalaRun { if (exitCode == 0) Success(()) else Failure( - new RuntimeException( - s"""Nonzero exit code returned from $label: $exitCode""".stripMargin)) + new RuntimeException(s"""Nonzero exit code returned from $label: $exitCode""".stripMargin) + ) val process = fork(mainClass, classpath, options, log) def cancel() = { log.warn("Run canceled.") @@ -77,10 +77,12 @@ class Run(instance: ScalaInstance, trapExit: Boolean, nativeTmp: File) extends S if (trapExit) Run.executeTrapExit(execute(), log) else directExecute() } - private def run0(mainClassName: String, - classpath: Seq[File], - options: Seq[String], - log: Logger): Unit = { + private def run0( + mainClassName: String, + classpath: Seq[File], + options: Seq[String], + log: Logger + ): Unit = { log.debug(" Classpath:\n\t" + classpath.mkString("\n\t")) val loader = ClasspathUtilities.makeLoader(classpath, instance, nativeTmp) val main = getMainMethod(mainClassName, loader) @@ -90,7 +92,7 @@ class Run(instance: ScalaInstance, trapExit: Boolean, nativeTmp: File) extends S val currentThread = Thread.currentThread val oldLoader = Thread.currentThread.getContextClassLoader currentThread.setContextClassLoader(loader) - try { main.invoke(null, options.toArray[String]) } finally { + try { main.invoke(null, options.toArray[String]); () } finally { currentThread.setContextClassLoader(oldLoader) } } @@ -112,7 +114,8 @@ class Run(instance: ScalaInstance, trapExit: Boolean, nativeTmp: File) extends S /** This module is an interface to starting the scala interpreter or runner.*/ object Run { def run(mainClass: String, classpath: Seq[File], options: Seq[String], log: Logger)( - implicit runner: ScalaRun) = + implicit runner: ScalaRun + ) = runner.run(mainClass, classpath, options, log) /** Executes the given function, trapping calls to System.exit. */ diff --git a/run/src/main/scala/sbt/SelectMainClass.scala b/run/src/main/scala/sbt/SelectMainClass.scala index b840bf839..86bcbcdb9 100644 --- a/run/src/main/scala/sbt/SelectMainClass.scala +++ b/run/src/main/scala/sbt/SelectMainClass.scala @@ -9,8 +9,10 @@ package sbt object SelectMainClass { // Some(SimpleReader.readLine _) - def apply(promptIfMultipleChoices: Option[String => Option[String]], - mainClasses: Seq[String]): Option[String] = { + def apply( + promptIfMultipleChoices: Option[String => Option[String]], + mainClasses: Seq[String] + ): Option[String] = { mainClasses.toList match { case Nil => None case head :: Nil => Some(head) diff --git a/run/src/main/scala/sbt/TrapExit.scala b/run/src/main/scala/sbt/TrapExit.scala index d1a16ef52..2dea6d17f 100644 --- a/run/src/main/scala/sbt/TrapExit.scala +++ b/run/src/main/scala/sbt/TrapExit.scala @@ -113,8 +113,9 @@ object TrapExit { * Recurses into the causes of the given exception looking for a cause of type CauseType. If one is found, `withType` is called with that cause. * If not, `notType` is called with the root cause. */ - private def withCause[CauseType <: Throwable, T](e: Throwable)(withType: CauseType => T)( - notType: Throwable => T)(implicit mf: Manifest[CauseType]): T = { + private def withCause[CauseType <: Throwable, T]( + e: Throwable + )(withType: CauseType => T)(notType: Throwable => T)(implicit mf: Manifest[CauseType]): T = { val clazz = mf.runtimeClass if (clazz.isInstance(e)) withType(e.asInstanceOf[CauseType]) @@ -152,7 +153,7 @@ private final class TrapExit(delegateManager: SecurityManager) extends SecurityM def runManaged(f: Supplier[Unit], xlog: xsbti.Logger): Int = { val _ = running.incrementAndGet() try runManaged0(f, xlog) - finally running.decrementAndGet() + finally { running.decrementAndGet(); () } } private[this] def runManaged0(f: Supplier[Unit], xlog: xsbti.Logger): Int = { val log: Logger = xlog @@ -264,6 +265,7 @@ private final class TrapExit(delegateManager: SecurityManager) extends SecurityM val old = groups.putIfAbsent(groupID, new WeakReference(g)) if (old.isEmpty) { // wasn't registered threadToApp.put(groupID, this) + () } } @@ -299,6 +301,7 @@ private final class TrapExit(delegateManager: SecurityManager) extends SecurityM threadToApp.remove(id) threads.remove(id) groups.remove(id) + () } /** Final cleanup for this application after it has terminated. */ @@ -346,8 +349,10 @@ private final class TrapExit(delegateManager: SecurityManager) extends SecurityM // takes a snapshot of the threads in `toProcess`, acquiring nested locks on each group to do so // the thread groups are accumulated in `accum` and then the threads in each are collected all at // once while they are all locked. This is the closest thing to a snapshot that can be accomplished. - private[this] def threadsInGroups(toProcess: List[ThreadGroup], - accum: List[ThreadGroup]): List[Thread] = toProcess match { + private[this] def threadsInGroups( + toProcess: List[ThreadGroup], + accum: List[ThreadGroup] + ): List[Thread] = toProcess match { case group :: tail => // ThreadGroup implementation synchronizes on its methods, so by synchronizing here, we can workaround its quirks somewhat group.synchronized { @@ -519,9 +524,10 @@ private final class ExitCode { * The default uncaught exception handler for managed executions. * It logs the thread and the exception. */ -private final class LoggingExceptionHandler(log: Logger, - delegate: Option[Thread.UncaughtExceptionHandler]) - extends Thread.UncaughtExceptionHandler { +private final class LoggingExceptionHandler( + log: Logger, + delegate: Option[Thread.UncaughtExceptionHandler] +) extends Thread.UncaughtExceptionHandler { def uncaughtException(t: Thread, e: Throwable): Unit = { log.error("(" + t.getName + ") " + e.toString) log.trace(e) diff --git a/sbt/src/main/scala/Import.scala b/sbt/src/main/scala/Import.scala index 3a0adc48a..170a39e4a 100644 --- a/sbt/src/main/scala/Import.scala +++ b/sbt/src/main/scala/Import.scala @@ -67,6 +67,7 @@ trait Import { type Cache[I, O] = sbt.util.Cache[I, O] val Cache = sbt.util.Cache val CacheImplicits = sbt.util.CacheImplicits + @deprecated("Use Tracked.inputChanged and Tracked.outputChanged instead", "1.0.1") type Changed[O] = sbt.util.Changed[O] type ChangeReport[T] = sbt.util.ChangeReport[T] val ChangeReport = sbt.util.ChangeReport diff --git a/sbt/src/sbt-test/actions/add-alias/A.scala b/sbt/src/sbt-test/actions/add-alias/A.scala index dbae0c48a..5b2ba478c 100644 --- a/sbt/src/sbt-test/actions/add-alias/A.scala +++ b/sbt/src/sbt-test/actions/add-alias/A.scala @@ -1,3 +1,5 @@ -object A extends App { - if(args(0).toBoolean) () else sys.error("Fail") +object A { + def main(args: Array[String]): Unit = { + if (args(0).toBoolean) () else sys.error("Fail") + } } diff --git a/sbt/src/sbt-test/actions/add-alias/build.sbt b/sbt/src/sbt-test/actions/add-alias/build.sbt index 451b72246..cb5461c6b 100644 --- a/sbt/src/sbt-test/actions/add-alias/build.sbt +++ b/sbt/src/sbt-test/actions/add-alias/build.sbt @@ -1,2 +1,2 @@ -addCommandAlias("demo-success", "run true") ++ +addCommandAlias("demo-success", "run true") addCommandAlias("demo-failure", "run false") diff --git a/sbt/src/sbt-test/actions/cross-multiproject/build.sbt b/sbt/src/sbt-test/actions/cross-multiproject/build.sbt index d0eff709c..f7c0b12fc 100644 --- a/sbt/src/sbt-test/actions/cross-multiproject/build.sbt +++ b/sbt/src/sbt-test/actions/cross-multiproject/build.sbt @@ -1,6 +1,6 @@ -inThisBuild(List( +inThisBuild( crossScalaVersions := Seq("2.12.1", "2.11.8") -)) +) lazy val rootProj = (project in file(".")) .aggregate(libProj, fooPlugin) diff --git a/sbt/src/sbt-test/actions/cross-strict-aggregation/build.sbt b/sbt/src/sbt-test/actions/cross-strict-aggregation/build.sbt new file mode 100644 index 000000000..6c9759d4b --- /dev/null +++ b/sbt/src/sbt-test/actions/cross-strict-aggregation/build.sbt @@ -0,0 +1,13 @@ +lazy val root = (project in file(".")) + .aggregate(core, module) + +lazy val core = (project in file("core")) + .settings( + scalaVersion := "2.12.4", + crossScalaVersions := Seq("2.11.11", "2.12.4")) + +lazy val module = (project in file("module")) + .dependsOn(core) + .settings( + scalaVersion := "2.12.4", + crossScalaVersions := Seq("2.12.4")) diff --git a/sbt/src/sbt-test/actions/cross-strict-aggregation/core/A.scala b/sbt/src/sbt-test/actions/cross-strict-aggregation/core/A.scala new file mode 100644 index 000000000..6c542aa30 --- /dev/null +++ b/sbt/src/sbt-test/actions/cross-strict-aggregation/core/A.scala @@ -0,0 +1,5 @@ +package foo + +object Foo { + +} diff --git a/sbt/src/sbt-test/actions/cross-strict-aggregation/module/B.scala b/sbt/src/sbt-test/actions/cross-strict-aggregation/module/B.scala new file mode 100644 index 000000000..d262f7e5d --- /dev/null +++ b/sbt/src/sbt-test/actions/cross-strict-aggregation/module/B.scala @@ -0,0 +1,5 @@ +package foo.module + +object FooModule { + +} diff --git a/sbt/src/sbt-test/actions/cross-strict-aggregation/test b/sbt/src/sbt-test/actions/cross-strict-aggregation/test new file mode 100644 index 000000000..fb3a4d9cb --- /dev/null +++ b/sbt/src/sbt-test/actions/cross-strict-aggregation/test @@ -0,0 +1,5 @@ +> ++2.11.11 compile + +$ exists core/target/scala-2.11 +-$ exists module/target/scala-2.11 +-$ exists module/target/scala-2.12 diff --git a/sbt/src/sbt-test/actions/doc-scala-instance/build.sbt b/sbt/src/sbt-test/actions/doc-scala-instance/build.sbt index 68d4abb6e..497ab5d39 100644 --- a/sbt/src/sbt-test/actions/doc-scala-instance/build.sbt +++ b/sbt/src/sbt-test/actions/doc-scala-instance/build.sbt @@ -1,8 +1,8 @@ lazy val a = project.settings( scalaVersion := "2.12.2", - scalaInstance in (Compile,doc) := (scalaInstance in b).value, + scalaInstance in (Compile, doc) := (scalaInstance in b).value, // 2.10.1-only, so this will only succeed if `doc` recognizes the more specific scalaInstance scoped to `doc` - scalacOptions in (Compile,doc) += "-implicits" + scalacOptions in (Compile, doc) += "-implicits" ) lazy val b = project.settings( diff --git a/sbt/src/sbt-test/actions/generator/build.sbt b/sbt/src/sbt-test/actions/generator/build.sbt index c347b414e..04b2a1b48 100644 --- a/sbt/src/sbt-test/actions/generator/build.sbt +++ b/sbt/src/sbt-test/actions/generator/build.sbt @@ -1,13 +1,11 @@ -lazy val buildInfo = taskKey[Seq[File]]("The task that generates the build info.") +scalaVersion := "2.11.8" -lazy val root = (project in file(".")) - .settings( - scalaVersion := "2.11.8", - buildInfo := { - val x = sourceManaged.value / "BuildInfo.scala" - IO.write(x, """object BuildInfo""") - x :: Nil - }, - sourceGenerators in Compile += buildInfo, - sourceGenerators in Compile += Def.task { Nil } - ) +val buildInfo = taskKey[Seq[File]]("generates the build info") +buildInfo := { + val file = sourceManaged.value / "BuildInfo.scala" + IO.write(file, "object BuildInfo") + file :: Nil +} + +sourceGenerators in Compile += buildInfo +sourceGenerators in Compile += Def.task { Nil } diff --git a/sbt/src/sbt-test/actions/previous/scopes.sbt b/sbt/src/sbt-test/actions/previous/scopes.sbt index 8062b1df1..a6998efe0 100644 --- a/sbt/src/sbt-test/actions/previous/scopes.sbt +++ b/sbt/src/sbt-test/actions/previous/scopes.sbt @@ -22,7 +22,7 @@ x in subA in Compile := { } -inConfig(Compile)(Seq( +inConfig(Compile)( y in subB := { // verify that the referenced key gets delegated val xty = (x in Test in y).previous getOrElse 0 // 13 @@ -31,7 +31,7 @@ inConfig(Compile)(Seq( println(s"xcy=$xcy, xty=$xty") xty * xcy } -)) +) def parser = { import complete.DefaultParsers._ diff --git a/sbt/src/sbt-test/actions/run-task/A.scala b/sbt/src/sbt-test/actions/run-task/A.scala index f12139e7b..f003b3e20 100644 --- a/sbt/src/sbt-test/actions/run-task/A.scala +++ b/sbt/src/sbt-test/actions/run-task/A.scala @@ -1,7 +1,6 @@ object A { - def main(args: Array[String]) = - { - assert(args(0).toInt == args(1).toInt) - assert(java.lang.Boolean.getBoolean("sbt.check.forked")) - } + def main(args: Array[String]) = { + assert(args(0).toInt == args(1).toInt) + assert(java.lang.Boolean.getBoolean("sbt.check.forked")) + } } diff --git a/sbt/src/sbt-test/actions/run-task/build.sbt b/sbt/src/sbt-test/actions/run-task/build.sbt index 0166f62ed..69fb3e613 100644 --- a/sbt/src/sbt-test/actions/run-task/build.sbt +++ b/sbt/src/sbt-test/actions/run-task/build.sbt @@ -1,13 +1,7 @@ -lazy val root = (project in file(".")). - settings( - myRun, - fork in demo := true, - javaOptions in demo := "-Dsbt.check.forked=true" :: Nil, - myIn - ) +val demo = taskKey[Unit]("Demo run task") +fullRunTask(demo, Compile, "A", "1", "1") +fork in demo := true +javaOptions in demo := "-Dsbt.check.forked=true" :: Nil -lazy val demoIn = InputKey[Unit]("demoIn", "Demo run input task", demo) -lazy val demo = taskKey[Unit]("Demo run task") - -def myRun = fullRunTask(demo, Compile, "A", "1", "1") -def myIn = fullRunInputTask(demoIn, Compile, "A", "1") +val demoIn = InputKey[Unit]("demoIn", "Demo run input task", demo) +fullRunInputTask(demoIn, Compile, "A", "1") diff --git a/sbt/src/sbt-test/dependency-management/cache-update/build.sbt b/sbt/src/sbt-test/dependency-management/cache-update/build.sbt index b0f3eec0b..146aca5b5 100644 --- a/sbt/src/sbt-test/dependency-management/cache-update/build.sbt +++ b/sbt/src/sbt-test/dependency-management/cache-update/build.sbt @@ -4,7 +4,7 @@ dependencyOverrides in ThisBuild += "com.github.nscala-time" %% "nscala-time" % lazy val root = (project in file(".")) .dependsOn(p1 % Compile) .settings( - inThisBuild(List( + inThisBuild( organizationName := "eed3si9n", organizationHomepage := Some(url("http://example.com/")), homepage := Some(url("https://github.com/example/example")), @@ -20,7 +20,7 @@ lazy val root = (project in file(".")) version := "0.3.1-SNAPSHOT", description := "An HTTP client for Scala with Async Http Client underneath.", licenses := Seq("Apache 2" -> new URL("http://www.apache.org/licenses/LICENSE-2.0.txt")), - )), + ), ivyPaths := IvyPaths( (baseDirectory in ThisBuild).value, Some((baseDirectory in LocalRootProject).value / "ivy-cache") diff --git a/sbt/src/sbt-test/dependency-management/cached-resolution-circular/multi.sbt b/sbt/src/sbt-test/dependency-management/cached-resolution-circular/multi.sbt index b1180c1af..05a8ffae7 100644 --- a/sbt/src/sbt-test/dependency-management/cached-resolution-circular/multi.sbt +++ b/sbt/src/sbt-test/dependency-management/cached-resolution-circular/multi.sbt @@ -41,8 +41,8 @@ lazy val c = project. lazy val root = (project in file(".")). settings(commonSettings). - settings(inThisBuild(Seq( + settings(inThisBuild( organization := "org.example", version := "1.0-SNAPSHOT", updateOptions := updateOptions.value.withCachedResolution(true) - ))) + )) diff --git a/sbt/src/sbt-test/dependency-management/cached-resolution-conflicts/multi.sbt b/sbt/src/sbt-test/dependency-management/cached-resolution-conflicts/multi.sbt index 61a573e18..aabebeae1 100644 --- a/sbt/src/sbt-test/dependency-management/cached-resolution-conflicts/multi.sbt +++ b/sbt/src/sbt-test/dependency-management/cached-resolution-conflicts/multi.sbt @@ -1,12 +1,12 @@ // https://github.com/sbt/sbt/issues/1710 // https://github.com/sbt/sbt/issues/1760 -inThisBuild(Seq( +inThisBuild( organization := "com.example", version := "0.1.0", scalaVersion := "2.10.4", updateOptions := updateOptions.value.withCachedResolution(true) -)) +) def commonSettings: Seq[Def.Setting[_]] = Seq( ivyPaths := IvyPaths((baseDirectory in ThisBuild).value, Some((baseDirectory in LocalRootProject).value / "ivy-cache")), diff --git a/sbt/src/sbt-test/dependency-management/cached-resolution-exclude/multi.sbt b/sbt/src/sbt-test/dependency-management/cached-resolution-exclude/multi.sbt index 7bca9158b..0a3327fa1 100644 --- a/sbt/src/sbt-test/dependency-management/cached-resolution-exclude/multi.sbt +++ b/sbt/src/sbt-test/dependency-management/cached-resolution-exclude/multi.sbt @@ -27,7 +27,7 @@ lazy val b = project. lazy val root = (project in file(".")). aggregate(a, b). - settings(inThisBuild(Seq( + settings(inThisBuild( organization := "org.example", version := "1.0", updateOptions := updateOptions.value.withCachedResolution(true), @@ -45,4 +45,4 @@ lazy val root = (project in file(".")). sys.error("commons-io NOT found when it should NOT be excluded") } } - ))) + )) diff --git a/sbt/src/sbt-test/dependency-management/cached-resolution-interproj/multi.sbt b/sbt/src/sbt-test/dependency-management/cached-resolution-interproj/multi.sbt index f359cfc76..c21995ecc 100644 --- a/sbt/src/sbt-test/dependency-management/cached-resolution-interproj/multi.sbt +++ b/sbt/src/sbt-test/dependency-management/cached-resolution-interproj/multi.sbt @@ -27,7 +27,7 @@ lazy val a = project. lazy val root = (project in file(".")). aggregate(a). - settings(inThisBuild(Seq( + settings(inThisBuild( organization := "org.example", version := "1.0", updateOptions := updateOptions.value.withCachedResolution(true), @@ -49,4 +49,4 @@ lazy val root = (project in file(".")). sys.error("junit NOT found when it should be included: " + atestcp.toString) } } - ))) + )) diff --git a/sbt/src/sbt-test/dependency-management/make-pom-type/build.sbt b/sbt/src/sbt-test/dependency-management/make-pom-type/build.sbt index 6d9005519..a7857d39d 100644 --- a/sbt/src/sbt-test/dependency-management/make-pom-type/build.sbt +++ b/sbt/src/sbt-test/dependency-management/make-pom-type/build.sbt @@ -2,12 +2,12 @@ lazy val p1 = (project in file("p1")). settings( checkTask(expectedMongo), libraryDependencies += "org.mongodb" %% "casbah" % "2.4.1" pomOnly(), - inThisBuild(List( + inThisBuild( organization := "org.example", version := "1.0", scalaVersion := "2.9.2", autoScalaLibrary := false - )) + ) ) lazy val p2 = (project in file("p2")). diff --git a/sbt/src/sbt-test/dependency-management/publish-local/build.sbt b/sbt/src/sbt-test/dependency-management/publish-local/build.sbt index 261aa6ac3..b8ace934d 100644 --- a/sbt/src/sbt-test/dependency-management/publish-local/build.sbt +++ b/sbt/src/sbt-test/dependency-management/publish-local/build.sbt @@ -1,12 +1,12 @@ lazy val root = (project in file(".")). dependsOn(sub). aggregate(sub). - settings(inThisBuild(List( + settings(inThisBuild( organization := "A", version := "1.0", ivyPaths := baseDirectory( dir => IvyPaths(dir, Some(dir / "ivy" / "cache")) ).value, externalResolvers := (baseDirectory map { base => Resolver.file("local", base / "ivy" / "local" asFile)(Resolver.ivyStylePatterns) :: Nil }).value - )), + ), mavenStyle, interProject, name := "Publish Test" diff --git a/sbt/src/sbt-test/dependency-management/publish-local/changes/RetrieveTest.sbt b/sbt/src/sbt-test/dependency-management/publish-local/changes/RetrieveTest.sbt index 795608334..ca033e3b8 100644 --- a/sbt/src/sbt-test/dependency-management/publish-local/changes/RetrieveTest.sbt +++ b/sbt/src/sbt-test/dependency-management/publish-local/changes/RetrieveTest.sbt @@ -1,10 +1,10 @@ lazy val root = (project in file(".")). - settings(inThisBuild(List( + settings(inThisBuild( organization := "A", version := "1.0", ivyPaths := baseDirectory( dir => IvyPaths(dir, Some(dir / "ivy" / "cache")) ).value, externalResolvers := (baseDirectory map { base => Resolver.file("local", base / "ivy" / "local" asFile)(Resolver.ivyStylePatterns) :: Nil }).value - )), + ), mavenStyle, name := "Retrieve Test", libraryDependencies := (publishMavenStyle { style => if(style) mavenStyleDependencies else ivyStyleDependencies }).value diff --git a/sbt/src/sbt-test/java/cross/A.scala b/sbt/src/sbt-test/java/cross/A.scala new file mode 100644 index 000000000..e9ff1f72a --- /dev/null +++ b/sbt/src/sbt-test/java/cross/A.scala @@ -0,0 +1,16 @@ +package pkg + +import java.nio.file.{ Paths, Files } +import java.nio.charset.Charset + +object A extends App { + val out = Paths.get("out.txt") + val content = sys.props("java.version") + val w = Files.newBufferedWriter(out, Charset.forName("UTF-8")) + try { + w.write(content) + w.flush() + } finally { + w.close + } +} diff --git a/sbt/src/sbt-test/java/cross/build.sbt b/sbt/src/sbt-test/java/cross/build.sbt new file mode 100644 index 000000000..c71bd179e --- /dev/null +++ b/sbt/src/sbt-test/java/cross/build.sbt @@ -0,0 +1,25 @@ +import complete.DefaultParsers._ + +val check = inputKey[Unit]("Runs the check") + +lazy val root = (project in file(".")) + .settings( + ThisBuild / scalaVersion := "2.12.6", + crossJavaVersions := List("1.8"), + + // read out.txt and see if it starts with the passed in number + check := { + val arg1: Int = (Space ~> NatBasic).parsed + file("out.txt") match { + case out if out.exists => + IO.readLines(out).headOption match { + case Some(v) if v startsWith arg1.toString => () + case Some(v) if v startsWith s"1.$arg1" => () + case x => sys.error(s"unexpected value: $x") + } + case out => sys.error(s"$out doesn't exist") + } + }, + + Compile / run / fork := true, + ) diff --git a/sbt/src/sbt-test/java/cross/test b/sbt/src/sbt-test/java/cross/test new file mode 100644 index 000000000..ff76c51bf --- /dev/null +++ b/sbt/src/sbt-test/java/cross/test @@ -0,0 +1,6 @@ +> java+ run +> check 8 + +> java++ 10! +> run +> check 10 diff --git a/sbt/src/sbt-test/java/home-discovery/build.sbt b/sbt/src/sbt-test/java/home-discovery/build.sbt new file mode 100644 index 000000000..7177ca692 --- /dev/null +++ b/sbt/src/sbt-test/java/home-discovery/build.sbt @@ -0,0 +1,5 @@ +Global / javaHomes += "6" -> file("/good/old/times/java-6") + +TaskKey[Unit]("check") := { + assert(fullJavaHomes.value("1.6").getAbsolutePath.contains("java-6")) +} diff --git a/sbt/src/sbt-test/java/home-discovery/test b/sbt/src/sbt-test/java/home-discovery/test new file mode 100644 index 000000000..15675b169 --- /dev/null +++ b/sbt/src/sbt-test/java/home-discovery/test @@ -0,0 +1 @@ +> check diff --git a/sbt/src/sbt-test/project/cross-source/build.sbt b/sbt/src/sbt-test/project/cross-source/build.sbt new file mode 100644 index 000000000..2f3894078 --- /dev/null +++ b/sbt/src/sbt-test/project/cross-source/build.sbt @@ -0,0 +1,8 @@ +val commonSettings = Seq( + crossScalaVersions := (0 to 6).map(i => s"2.10.$i") ++ (0 to 11).map(i => s"2.11.$i") ++ (0 to 2).map(i => s"2.12.$i") +) + +val p1 = project.in(file("p1")).settings(commonSettings) +val p2 = project.in(file("p2")).settings(commonSettings) +val p3 = project.in(file("p3")).settings(commonSettings) +val p4 = project.in(file("p4")).settings(commonSettings) diff --git a/sbt/src/sbt-test/project/cross-source/p1/src/main/scala-2.10/B.scala b/sbt/src/sbt-test/project/cross-source/p1/src/main/scala-2.10/B.scala new file mode 100644 index 000000000..fa8ad30ea --- /dev/null +++ b/sbt/src/sbt-test/project/cross-source/p1/src/main/scala-2.10/B.scala @@ -0,0 +1,3 @@ +object B { + def show(what: String): String = s"String interpolation is ${what.toUpperCase}!" +} diff --git a/sbt/src/sbt-test/project/cross-source/p1/src/main/scala/A.scala b/sbt/src/sbt-test/project/cross-source/p1/src/main/scala/A.scala new file mode 100644 index 000000000..8b55ead57 --- /dev/null +++ b/sbt/src/sbt-test/project/cross-source/p1/src/main/scala/A.scala @@ -0,0 +1,3 @@ +class A { + def show(what: String): Unit = println(what) +} diff --git a/sbt/src/sbt-test/project/cross-source/p2/src/main/scala-2.10/B.scala b/sbt/src/sbt-test/project/cross-source/p2/src/main/scala-2.10/B.scala new file mode 100644 index 000000000..fa8ad30ea --- /dev/null +++ b/sbt/src/sbt-test/project/cross-source/p2/src/main/scala-2.10/B.scala @@ -0,0 +1,3 @@ +object B { + def show(what: String): String = s"String interpolation is ${what.toUpperCase}!" +} diff --git a/sbt/src/sbt-test/project/cross-source/p2/src/main/scala/A.scala b/sbt/src/sbt-test/project/cross-source/p2/src/main/scala/A.scala new file mode 100644 index 000000000..8b55ead57 --- /dev/null +++ b/sbt/src/sbt-test/project/cross-source/p2/src/main/scala/A.scala @@ -0,0 +1,3 @@ +class A { + def show(what: String): Unit = println(what) +} diff --git a/sbt/src/sbt-test/project/cross-source/p3/src/main/scala-2.10/B.scala b/sbt/src/sbt-test/project/cross-source/p3/src/main/scala-2.10/B.scala new file mode 100644 index 000000000..fa8ad30ea --- /dev/null +++ b/sbt/src/sbt-test/project/cross-source/p3/src/main/scala-2.10/B.scala @@ -0,0 +1,3 @@ +object B { + def show(what: String): String = s"String interpolation is ${what.toUpperCase}!" +} diff --git a/sbt/src/sbt-test/project/cross-source/p3/src/main/scala/A.scala b/sbt/src/sbt-test/project/cross-source/p3/src/main/scala/A.scala new file mode 100644 index 000000000..8b55ead57 --- /dev/null +++ b/sbt/src/sbt-test/project/cross-source/p3/src/main/scala/A.scala @@ -0,0 +1,3 @@ +class A { + def show(what: String): Unit = println(what) +} diff --git a/sbt/src/sbt-test/project/cross-source/p4/src/main/scala-2.10/B.scala b/sbt/src/sbt-test/project/cross-source/p4/src/main/scala-2.10/B.scala new file mode 100644 index 000000000..fa8ad30ea --- /dev/null +++ b/sbt/src/sbt-test/project/cross-source/p4/src/main/scala-2.10/B.scala @@ -0,0 +1,3 @@ +object B { + def show(what: String): String = s"String interpolation is ${what.toUpperCase}!" +} diff --git a/sbt/src/sbt-test/project/cross-source/p4/src/main/scala/A.scala b/sbt/src/sbt-test/project/cross-source/p4/src/main/scala/A.scala new file mode 100644 index 000000000..8b55ead57 --- /dev/null +++ b/sbt/src/sbt-test/project/cross-source/p4/src/main/scala/A.scala @@ -0,0 +1,3 @@ +class A { + def show(what: String): Unit = println(what) +} diff --git a/sbt/src/sbt-test/project/cross-source/test b/sbt/src/sbt-test/project/cross-source/test new file mode 100644 index 000000000..17db5c8da --- /dev/null +++ b/sbt/src/sbt-test/project/cross-source/test @@ -0,0 +1,3 @@ +# https://github.com/sbt/sbt/issues/3143 +> crossScalaVersions +> +version diff --git a/sbt/src/sbt-test/project/derived/build.sbt b/sbt/src/sbt-test/project/derived/build.sbt index 21cfc9207..1d1a5fd5b 100644 --- a/sbt/src/sbt-test/project/derived/build.sbt +++ b/sbt/src/sbt-test/project/derived/build.sbt @@ -23,7 +23,7 @@ globalDepE in Global := "globalE" // ---------------- Derived settings // verify that deriving is transitive -inScope(GlobalScope)(Seq( +inScope(GlobalScope)( Def.derive(customA := customB.value + "-a"), Def.derive(customB := thisProject.value.id + "-b"), // verify that a setting with multiple triggers still only gets added once @@ -36,7 +36,7 @@ inScope(GlobalScope)(Seq( // if customE were added in Global because of name, there would be an error // because description wouldn't be found Def.derive(customE := globalDepE.value + "-" + projectDepE.value) -)) +) // ---------------- Projects diff --git a/sbt/src/sbt-test/project/no-publish/build.sbt b/sbt/src/sbt-test/project/no-publish/build.sbt new file mode 100644 index 000000000..36e1cdfd8 --- /dev/null +++ b/sbt/src/sbt-test/project/no-publish/build.sbt @@ -0,0 +1,3 @@ +// this is supposed to do nothing, and more importantly: not fail +// https://github.com/sbt/sbt/pull/3760 +publishArtifact := false diff --git a/sbt/src/sbt-test/project/no-publish/test b/sbt/src/sbt-test/project/no-publish/test new file mode 100644 index 000000000..c78ab3f9f --- /dev/null +++ b/sbt/src/sbt-test/project/no-publish/test @@ -0,0 +1 @@ +> publish \ No newline at end of file diff --git a/sbt/src/sbt-test/project/sbt-composite-projects/build.sbt b/sbt/src/sbt-test/project/sbt-composite-projects/build.sbt new file mode 100644 index 000000000..1537b2610 --- /dev/null +++ b/sbt/src/sbt-test/project/sbt-composite-projects/build.sbt @@ -0,0 +1,36 @@ +import sbt.internal.AddSettings +import sbt.CompositeProject + +lazy val check = taskKey[Unit]("check") + +// Based on sbt-file-projects test +lazy val foo = new CompositeProject +{ + val jvm = Project.apply("jvm", new File("jvm")).settings(version := "0.1.0") // this one needs to win + val js = Project.apply("js", new File("js")).settings(version := "0.1.0") // this one needs to win + def componentProjects: Seq[Project] = Seq(jvm, js) +} + +lazy val fooJS = foo.js +lazy val fooJVM = foo.jvm + +lazy val bar = project + .dependsOn(foo.jvm) + +val g = taskKey[Unit]("A task in the root project") +g := println("Hello.") + + +check := { + val verJvm = (version in foo.jvm).?.value + assert (verJvm == Some("0.1.0")) + + val verFooJvm = (version in fooJVM).?.value + assert (verFooJvm == Some("0.1.0")) + + val verJs = (version in foo.js).?.value + assert (verJs == Some("0.1.0")) + + val verFooJs = (version in fooJS).?.value + assert (verFooJs == Some("0.1.0")) +} diff --git a/sbt/src/sbt-test/project/sbt-composite-projects/changes/basic.sbt b/sbt/src/sbt-test/project/sbt-composite-projects/changes/basic.sbt new file mode 100644 index 000000000..c128b140e --- /dev/null +++ b/sbt/src/sbt-test/project/sbt-composite-projects/changes/basic.sbt @@ -0,0 +1 @@ +lazy val root = (project in file(".")) diff --git a/sbt/src/sbt-test/project/sbt-composite-projects/changes/shadow.sbt b/sbt/src/sbt-test/project/sbt-composite-projects/changes/shadow.sbt new file mode 100644 index 000000000..0713e7f4b --- /dev/null +++ b/sbt/src/sbt-test/project/sbt-composite-projects/changes/shadow.sbt @@ -0,0 +1,36 @@ +import sbt.internal.AddSettings +import sbt.CompositeProject + +lazy val check = taskKey[Unit]("check") + +// Based on sbt-file-projects test +lazy val foo = new CompositeProject +{ + val jvm = Project.apply("jvm", new File("jvm")).settings(version := "0.1.0") + val js = Project.apply("js", new File("js")).settings(version := "0.1.0") // this one needs to win + def componentProjects: Seq[Project] = Seq(jvm, js) +} + +lazy val fooJS = foo.js +lazy val fooJVM = foo.jvm.settings(version := "0.2.0") // this one needs to win + +lazy val bar = project + .dependsOn(foo.jvm) + +val g = taskKey[Unit]("A task in the root project") +g := println("Hello.") + + +check := { + val verJvm = (version in foo.jvm).?.value + assert (verJvm == Some("0.2.0")) + + val verFooJvm = (version in fooJVM).?.value + assert (verFooJvm == Some("0.2.0")) + + val verJs = (version in foo.js).?.value + assert (verJs == Some("0.1.0")) + + val verFooJs = (version in fooJS).?.value + assert (verFooJs == Some("0.1.0")) +} diff --git a/sbt/src/sbt-test/project/sbt-composite-projects/changes/shadowLazy.sbt b/sbt/src/sbt-test/project/sbt-composite-projects/changes/shadowLazy.sbt new file mode 100644 index 000000000..492fbd621 --- /dev/null +++ b/sbt/src/sbt-test/project/sbt-composite-projects/changes/shadowLazy.sbt @@ -0,0 +1,37 @@ +import sbt.internal.AddSettings +import sbt.CompositeProject + +lazy val check = taskKey[Unit]("check") + +lazy val fooJS = foo.js.settings(version := "0.2.1") // this one needs to win + +// Based on sbt-file-projects test +lazy val foo = new CompositeProject +{ + val jvm = Project.apply("jvm", new File("jvm")).settings(version := "0.1.0") + val js = Project.apply("js", new File("js")).settings(version := "0.1.0") + def componentProjects: Seq[Project] = Seq(jvm, js) +} + +lazy val fooJVM = foo.jvm.settings(version := "0.2.0") // this one needs to win + +lazy val bar = project + .dependsOn(foo.jvm) + +val g = taskKey[Unit]("A task in the root project") +g := println("Hello.") + + +check := { + val verJvm = (version in foo.jvm).?.value + assert (verJvm == Some("0.2.0")) + + val verFooJvm = (version in fooJVM).?.value + assert (verFooJvm == Some("0.2.0")) + + val verJs = (version in foo.js).?.value + assert (verJs == Some("0.2.1")) + + val verFooJs = (version in fooJS).?.value + assert (verFooJs == Some("0.2.1")) +} diff --git a/sbt/src/sbt-test/project/sbt-composite-projects/js/build.sbt b/sbt/src/sbt-test/project/sbt-composite-projects/js/build.sbt new file mode 100644 index 000000000..0ddd49e30 --- /dev/null +++ b/sbt/src/sbt-test/project/sbt-composite-projects/js/build.sbt @@ -0,0 +1,2 @@ +val h = taskKey[Unit]("A task in project 'js'") +h := println("Hello.") diff --git a/sbt/src/sbt-test/project/sbt-composite-projects/jvm/A.scala b/sbt/src/sbt-test/project/sbt-composite-projects/jvm/A.scala new file mode 100644 index 000000000..528ffce71 --- /dev/null +++ b/sbt/src/sbt-test/project/sbt-composite-projects/jvm/A.scala @@ -0,0 +1 @@ +object A \ No newline at end of file diff --git a/sbt/src/sbt-test/project/sbt-composite-projects/jvm/a.sbt b/sbt/src/sbt-test/project/sbt-composite-projects/jvm/a.sbt new file mode 100644 index 000000000..9b73a55bf --- /dev/null +++ b/sbt/src/sbt-test/project/sbt-composite-projects/jvm/a.sbt @@ -0,0 +1,2 @@ +val aa = taskKey[Unit]("A task in the 'jvm' project") +aa := println("Hello.") diff --git a/sbt/src/sbt-test/project/sbt-composite-projects/other.sbt b/sbt/src/sbt-test/project/sbt-composite-projects/other.sbt new file mode 100644 index 000000000..94e5f2363 --- /dev/null +++ b/sbt/src/sbt-test/project/sbt-composite-projects/other.sbt @@ -0,0 +1 @@ +val c = project diff --git a/sbt/src/sbt-test/project/sbt-composite-projects/test b/sbt/src/sbt-test/project/sbt-composite-projects/test new file mode 100644 index 000000000..34bc8097d --- /dev/null +++ b/sbt/src/sbt-test/project/sbt-composite-projects/test @@ -0,0 +1,40 @@ +> g +-> root/compile +> jvm/compile +> jvm/aa +> js/compile +> js/h +> c/compile +> bar/compile + +$ copy-file changes/basic.sbt basic.sbt +> reload +> g +> root/compile +> jvm/compile +> jvm/aa +> js/compile +> js/h +> c/compile +> bar/compile +> check + +$ copy-file changes/shadow.sbt build.sbt +> reload +> jvm/compile +> jvm/aa +> js/compile +> js/h +> c/compile +> bar/compile +> check + +$ copy-file changes/shadowLazy.sbt build.sbt +> reload +> jvm/compile +> jvm/aa +> js/compile +> js/h +> c/compile +> bar/compile +> check \ No newline at end of file diff --git a/sbt/src/sbt-test/project/scripted-plugin/build.sbt b/sbt/src/sbt-test/project/scripted-plugin/build.sbt new file mode 100644 index 000000000..6c91c26b3 --- /dev/null +++ b/sbt/src/sbt-test/project/scripted-plugin/build.sbt @@ -0,0 +1,2 @@ +lazy val root = (project in file(".")) + .enablePlugins(SbtPlugin) diff --git a/sbt/src/sbt-test/project/scripted-skip-incompatible/build.sbt b/sbt/src/sbt-test/project/scripted-skip-incompatible/build.sbt new file mode 100644 index 000000000..6c91c26b3 --- /dev/null +++ b/sbt/src/sbt-test/project/scripted-skip-incompatible/build.sbt @@ -0,0 +1,2 @@ +lazy val root = (project in file(".")) + .enablePlugins(SbtPlugin) diff --git a/sbt/src/sbt-test/project/scripted-skip-incompatible/project/plugins.sbt b/sbt/src/sbt-test/project/scripted-skip-incompatible/project/plugins.sbt deleted file mode 100644 index 529e7d656..000000000 --- a/sbt/src/sbt-test/project/scripted-skip-incompatible/project/plugins.sbt +++ /dev/null @@ -1,3 +0,0 @@ -libraryDependencies += { - "org.scala-sbt" %% "scripted-plugin" % sbtVersion.value -} diff --git a/sbt/src/sbt-test/project/session-update-from-cmd/project/Common.scala b/sbt/src/sbt-test/project/session-update-from-cmd/project/Common.scala index f977fa81a..423d628f6 100644 --- a/sbt/src/sbt-test/project/session-update-from-cmd/project/Common.scala +++ b/sbt/src/sbt-test/project/session-update-from-cmd/project/Common.scala @@ -11,7 +11,7 @@ object Common { val UpdateK1 = Command.command("UpdateK1") { st: State => val ex = Project extract st import ex._ - val session2 = BuiltinCommands.setThis(st, ex, Seq(k1 := {}), """k1 := { + val session2 = BuiltinCommands.setThis(ex, Seq(k1 := {}), """k1 := { |// |// |}""".stripMargin).session @@ -24,7 +24,7 @@ object Common { val UpdateK3 = Command.command("UpdateK3") { st: State => val ex = Project extract st import ex._ - val session2 = BuiltinCommands.setThis(st, ex, Seq(k3 := {}), """k3 := { + val session2 = BuiltinCommands.setThis(ex, Seq(k3 := {}), """k3 := { |// |// |}""".stripMargin).session diff --git a/sbt/src/sbt-test/project/unique-settings-computation/pending b/sbt/src/sbt-test/project/unique-settings-computation/disabled similarity index 100% rename from sbt/src/sbt-test/project/unique-settings-computation/pending rename to sbt/src/sbt-test/project/unique-settings-computation/disabled diff --git a/sbt/src/sbt-test/tests/fork-test-group-parallel/build.sbt b/sbt/src/sbt-test/tests/fork-test-group-parallel/build.sbt index 761d141ee..8f4ca2611 100644 --- a/sbt/src/sbt-test/tests/fork-test-group-parallel/build.sbt +++ b/sbt/src/sbt-test/tests/fork-test-group-parallel/build.sbt @@ -1,7 +1,7 @@ scalaVersion in ThisBuild := "2.11.8" concurrentRestrictions in Global := Seq(Tags.limitAll(4)) libraryDependencies += "org.specs2" %% "specs2-core" % "3.8.4" % Test -inConfig(Test)(Seq( +inConfig(Test)( testGrouping := { val home = javaHome.value val strategy = outputStrategy.value @@ -22,4 +22,4 @@ inConfig(Test)(Seq( ))} }, TaskKey[Unit]("test-failure") := test.failure.value -)) +) diff --git a/sbt/src/server-test/handshake/build.sbt b/sbt/src/server-test/handshake/build.sbt index 192730eef..02f5f81fd 100644 --- a/sbt/src/server-test/handshake/build.sbt +++ b/sbt/src/server-test/handshake/build.sbt @@ -1,6 +1,24 @@ +import sbt.internal.server.{ ServerHandler, ServerIntent } + lazy val root = (project in file(".")) .settings( Global / serverLog / logLevel := Level.Debug, + + // custom handler + Global / serverHandlers += ServerHandler({ callback => + import callback._ + import sjsonnew.BasicJsonProtocol._ + import sbt.internal.protocol.JsonRpcRequestMessage + ServerIntent( + { + case r: JsonRpcRequestMessage if r.method == "lunar/helo" => + jsonRpcNotify("lunar/oleh", "") + () + }, + PartialFunction.empty + ) + }), + name := "handshake", scalaVersion := "2.12.3", ) diff --git a/sbt/src/test/scala/sbt/RunFromSourceMain.scala b/sbt/src/test/scala/sbt/RunFromSourceMain.scala index 09e9758b1..84c229de0 100644 --- a/sbt/src/test/scala/sbt/RunFromSourceMain.scala +++ b/sbt/src/test/scala/sbt/RunFromSourceMain.scala @@ -14,7 +14,7 @@ import buildinfo.TestBuildInfo import xsbti._ object RunFromSourceMain { - private val sbtVersion = "1.1.0" // "dev" + private val sbtVersion = "1.1.4" // TestBuildInfo.version private val scalaVersion = "2.12.6" def fork(workingDirectory: File): Try[Unit] = { @@ -117,12 +117,12 @@ object RunFromSourceMain { def topLoader = new java.net.URLClassLoader(Array(), null) def globalLock = noGlobalLock def bootDirectory = RunFromSourceMain.bootDirectory + def ivyHome = file(sys.props("user.home")) / ".ivy2" final case class PredefRepo(id: Predefined) extends PredefinedRepository import Predefined._ def ivyRepositories = Array(PredefRepo(Local), PredefRepo(MavenCentral)) def appRepositories = Array(PredefRepo(Local), PredefRepo(MavenCentral)) def isOverrideRepositories = false - def ivyHome = file(sys.props("user.home")) / ".ivy2" def checksums = Array("sha1", "md5") } def version = scalaVersion @@ -147,6 +147,7 @@ object RunFromSourceMain { CrossValue.Disabled, Nil ) + def appHome: File = scalaHome / id.groupID / id.name / id.version def mainClasspath = buildinfo.TestBuildInfo.fullClasspath.toArray def loader = new java.net.URLClassLoader(mainClasspath map (_.toURI.toURL), null) @@ -155,11 +156,33 @@ object RunFromSourceMain { def newMain = new xMain def components = new ComponentProvider { - def componentLocation(id: String) = ??? - def component(componentID: String) = ??? - def defineComponent(componentID: String, components: Array[File]) = ??? - def addToComponent(componentID: String, components: Array[File]) = ??? - def lockFile = ??? + def componentLocation(id: String) = appHome / id + def component(id: String) = IO.listFiles(componentLocation(id), _.isFile) + + def defineComponent(id: String, files: Array[File]) = { + val location = componentLocation(id) + if (location.exists) + sys error s"Cannot redefine component. ID: $id, files: ${files mkString ","}" + else { + copy(files.toList, location) + () + } + } + + def addToComponent(id: String, files: Array[File]) = + copy(files.toList, componentLocation(id)) + + def lockFile = appHome / "sbt.components.lock" + + private def copy(files: List[File], toDirectory: File): Boolean = + files exists (copy(_, toDirectory)) + + private def copy(file: File, toDirectory: File): Boolean = { + val to = toDirectory / file.getName + val missing = !to.exists + IO.copyFile(file, to) + missing + } } } } diff --git a/sbt/src/test/scala/testpkg/ServerSpec.scala b/sbt/src/test/scala/testpkg/ServerSpec.scala index 259e5c248..303e0c630 100644 --- a/sbt/src/test/scala/testpkg/ServerSpec.scala +++ b/sbt/src/test/scala/testpkg/ServerSpec.scala @@ -21,7 +21,8 @@ class ServerSpec extends AsyncFreeSpec with Matchers { "server" - { "should start" in withTestServer("handshake") { p => p.writeLine( - """{ "jsonrpc": "2.0", "id": "3", "method": "sbt/setting", "params": { "setting": "root/name" } }""") + """{ "jsonrpc": "2.0", "id": "3", "method": "sbt/setting", "params": { "setting": "root/name" } }""" + ) assert(p.waitForString(10) { s => s contains """"id":"3"""" }) @@ -29,7 +30,8 @@ class ServerSpec extends AsyncFreeSpec with Matchers { "return number id when number id is sent" in withTestServer("handshake") { p => p.writeLine( - """{ "jsonrpc": "2.0", "id": 3, "method": "sbt/setting", "params": { "setting": "root/name" } }""") + """{ "jsonrpc": "2.0", "id": 3, "method": "sbt/setting", "params": { "setting": "root/name" } }""" + ) assert(p.waitForString(10) { s => s contains """"id":3""" }) @@ -97,7 +99,8 @@ case class TestServer(baseDirectory: File) { // initiate handshake sendJsonRpc( - """{ "jsonrpc": "2.0", "id": 1, "method": "initialize", "params": { "initializationOptions": { } } }""") + """{ "jsonrpc": "2.0", "id": 1, "method": "initialize", "params": { "initializationOptions": { } } }""" + ) def test(f: TestServer => Future[Assertion]): Future[Assertion] = { f(this) @@ -106,7 +109,8 @@ case class TestServer(baseDirectory: File) { def bye(): Unit = { hostLog("sending exit") sendJsonRpc( - """{ "jsonrpc": "2.0", "id": 9, "method": "sbt/exec", "params": { "commandLine": "exit" } }""") + """{ "jsonrpc": "2.0", "id": 9, "method": "sbt/exec", "params": { "commandLine": "exit" } }""" + ) } def sendJsonRpc(message: String): Unit = { diff --git a/scripted/plugin/src/main/resources/sbt/sbt.autoplugins b/scripted/plugin/src/main/resources/sbt/sbt.autoplugins deleted file mode 100644 index 0077b7635..000000000 --- a/scripted/plugin/src/main/resources/sbt/sbt.autoplugins +++ /dev/null @@ -1 +0,0 @@ -sbt.ScriptedPlugin \ No newline at end of file diff --git a/scripted/plugin/src/main/scala/sbt/ScriptedPlugin.scala b/scripted/plugin/src/main/scala/sbt/ScriptedPlugin.scala index 3e38a26ac..93c2d93f4 100644 --- a/scripted/plugin/src/main/scala/sbt/ScriptedPlugin.scala +++ b/scripted/plugin/src/main/scala/sbt/ScriptedPlugin.scala @@ -7,145 +7,4 @@ package sbt -import Def.Initialize -import Keys._ -import sbt.internal.util.complete.{ Parser, DefaultParsers } -import sbt.internal.inc.classpath.ClasspathUtilities -import sbt.internal.inc.ModuleUtilities -import java.lang.reflect.Method -import sbt.librarymanagement.CrossVersion.partialVersion - -object ScriptedPlugin extends AutoPlugin { - override def requires = plugins.JvmPlugin - override def trigger = allRequirements - object autoImport { - val ScriptedConf = Configurations.config("scripted-sbt") hide - val ScriptedLaunchConf = Configurations.config("scripted-sbt-launch") hide - val scriptedSbt = SettingKey[String]("scripted-sbt") - val sbtLauncher = TaskKey[File]("sbt-launcher") - val sbtTestDirectory = SettingKey[File]("sbt-test-directory") - val scriptedBufferLog = SettingKey[Boolean]("scripted-buffer-log") - val scriptedClasspath = TaskKey[PathFinder]("scripted-classpath") - val scriptedTests = TaskKey[AnyRef]("scripted-tests") - val scriptedRun = TaskKey[Method]("scripted-run") - val scriptedLaunchOpts = SettingKey[Seq[String]]( - "scripted-launch-opts", - "options to pass to jvm launching scripted tasks") - val scriptedDependencies = TaskKey[Unit]("scripted-dependencies") - val scripted = InputKey[Unit]("scripted") - } - import autoImport._ - override lazy val projectSettings = Seq( - ivyConfigurations ++= Seq(ScriptedConf, ScriptedLaunchConf), - scriptedSbt := (sbtVersion in pluginCrossBuild).value, - sbtLauncher := getJars(ScriptedLaunchConf).map(_.get.head).value, - sbtTestDirectory := sourceDirectory.value / "sbt-test", - libraryDependencies ++= (partialVersion(scriptedSbt.value) match { - case Some((0, 13)) => - Seq( - "org.scala-sbt" % "scripted-sbt" % scriptedSbt.value % ScriptedConf, - "org.scala-sbt" % "sbt-launch" % scriptedSbt.value % ScriptedLaunchConf - ) - case Some((1, _)) => - Seq( - "org.scala-sbt" %% "scripted-sbt" % scriptedSbt.value % ScriptedConf, - "org.scala-sbt" % "sbt-launch" % scriptedSbt.value % ScriptedLaunchConf - ) - }), - scriptedBufferLog := true, - scriptedClasspath := getJars(ScriptedConf).value, - scriptedTests := scriptedTestsTask.value, - scriptedRun := scriptedRunTask.value, - scriptedDependencies := { - def use[A](x: A*): Unit = () // avoid unused warnings - val analysis = (compile in Test).value - val pub = (publishLocal).value - use(analysis, pub) - }, - scriptedLaunchOpts := Seq(), - scripted := scriptedTask.evaluated - ) - - def scriptedTestsTask: Initialize[Task[AnyRef]] = - Def.task { - val loader = ClasspathUtilities.toLoader(scriptedClasspath.value, scalaInstance.value.loader) - ModuleUtilities.getObject("sbt.test.ScriptedTests", loader) - } - - def scriptedRunTask: Initialize[Task[Method]] = Def.task( - scriptedTests.value.getClass.getMethod("run", - classOf[File], - classOf[Boolean], - classOf[Array[String]], - classOf[File], - classOf[Array[String]], - classOf[java.util.List[File]]) - ) - - import DefaultParsers._ - case class ScriptedTestPage(page: Int, total: Int) - - private[sbt] def scriptedParser(scriptedBase: File): Parser[Seq[String]] = { - - val scriptedFiles: NameFilter = ("test": NameFilter) | "pending" - val pairs = (scriptedBase * AllPassFilter * AllPassFilter * scriptedFiles).get map { - (f: File) => - val p = f.getParentFile - (p.getParentFile.getName, p.getName) - } - val pairMap = pairs.groupBy(_._1).mapValues(_.map(_._2).toSet); - - val id = charClass(c => !c.isWhitespace && c != '/').+.string - val groupP = token(id.examples(pairMap.keySet.toSet)) <~ token('/') - - // A parser for page definitions - val pageP: Parser[ScriptedTestPage] = ("*" ~ NatBasic ~ "of" ~ NatBasic) map { - case _ ~ page ~ _ ~ total => ScriptedTestPage(page, total) - } - // Grabs the filenames from a given test group in the current page definition. - def pagedFilenames(group: String, page: ScriptedTestPage): Seq[String] = { - val files = pairMap(group).toSeq.sortBy(_.toLowerCase) - val pageSize = files.size / page.total - // The last page may loose some values, so we explicitly keep them - val dropped = files.drop(pageSize * (page.page - 1)) - if (page.page == page.total) dropped - else dropped.take(pageSize) - } - def nameP(group: String) = { - token("*".id | id.examples(pairMap.getOrElse(group, Set.empty[String]))) - } - val PagedIds: Parser[Seq[String]] = - for { - group <- groupP - page <- pageP - files = pagedFilenames(group, page) - // TODO - Fail the parser if we don't have enough files for the given page size - //if !files.isEmpty - } yield files map (f => group + '/' + f) - - val testID = (for (group <- groupP; name <- nameP(group)) yield (group, name)) - val testIdAsGroup = matched(testID) map (test => Seq(test)) - //(token(Space) ~> matched(testID)).* - (token(Space) ~> (PagedIds | testIdAsGroup)).* map (_.flatten) - } - - def scriptedTask: Initialize[InputTask[Unit]] = Def.inputTask { - val args = scriptedParser(sbtTestDirectory.value).parsed - scriptedDependencies.value - try { - scriptedRun.value.invoke( - scriptedTests.value, - sbtTestDirectory.value, - scriptedBufferLog.value: java.lang.Boolean, - args.toArray, - sbtLauncher.value, - scriptedLaunchOpts.value.toArray, - new java.util.ArrayList() - ) - } catch { case e: java.lang.reflect.InvocationTargetException => throw e.getCause } - } - - private[this] def getJars(config: Configuration): Initialize[Task[PathFinder]] = Def.task { - PathFinder(Classpaths.managedJars(config, classpathTypes.value, update.value).map(_.data)) - } -} +// ScriptedPlugin has moved to main. diff --git a/scripted/plugin/src/main/scala/sbt/test/ScriptedTests.scala b/scripted/plugin/src/main/scala/sbt/test/ScriptedTests.scala new file mode 100644 index 000000000..727c4bd6c --- /dev/null +++ b/scripted/plugin/src/main/scala/sbt/test/ScriptedTests.scala @@ -0,0 +1,32 @@ +/* + * sbt + * Copyright 2011 - 2017, Lightbend, Inc. + * Copyright 2008 - 2010, Mark Harrah + * Licensed under BSD-3-Clause license (see LICENSE) + */ + +package sbt.test + +import java.io.File + +/** + * This is a bincompat place holder sbt.test package that we are now trying to hide + * because of the name conflict with Keys.test. + */ +@deprecated("Use sbt.scriptedtest.ScriptedRunner.", "1.2.0") +private[sbt] class ScriptedRunner extends sbt.scriptedtest.ScriptedRunner + +/** + * This is a bincompat place holder for sbt.test package that we are now trying to hide + * because of the name conflict with Keys.test. + */ +@deprecated("Use sbt.scriptedtest.ScriptedTests.", "1.2.0") +private[sbt] object ScriptedTests extends ScriptedRunner { + + /** Represents the function that runs the scripted tests, both in single or batch mode. */ + type TestRunner = () => Seq[Option[String]] + + val emptyCallback: File => Unit = _ => () + def main(args: Array[String]): Unit = + sbt.scriptedtest.ScriptedTests.main(args) +} diff --git a/scripted/sbt/src/main/scala/sbt/test/BatchScriptRunner.scala b/scripted/sbt/src/main/scala/sbt/scriptedtest/BatchScriptRunner.scala similarity index 87% rename from scripted/sbt/src/main/scala/sbt/test/BatchScriptRunner.scala rename to scripted/sbt/src/main/scala/sbt/scriptedtest/BatchScriptRunner.scala index ccf9d5148..805059518 100644 --- a/scripted/sbt/src/main/scala/sbt/test/BatchScriptRunner.scala +++ b/scripted/sbt/src/main/scala/sbt/scriptedtest/BatchScriptRunner.scala @@ -6,13 +6,19 @@ */ package sbt -package test +package scriptedtest + +import scala.collection.mutable import sbt.internal.scripted._ -import sbt.test.BatchScriptRunner.States + +private[sbt] object BatchScriptRunner { + type States = mutable.HashMap[StatementHandler, StatementHandler#State] +} /** Defines an alternative script runner that allows batch execution. */ private[sbt] class BatchScriptRunner extends ScriptRunner { + import BatchScriptRunner.States /** Defines a method to run batched execution. * @@ -37,9 +43,8 @@ private[sbt] class BatchScriptRunner extends ScriptRunner { def processStatement(handler: StatementHandler, statement: Statement, states: States): Unit = { val state = states(handler).asInstanceOf[handler.State] val nextState = - try { Right(handler(statement.command, statement.arguments, state)) } catch { - case e: Exception => Left(e) - } + try Right(handler(statement.command, statement.arguments, state)) + catch { case e: Exception => Left(e) } nextState match { case Left(err) => if (statement.successExpected) { @@ -58,8 +63,3 @@ private[sbt] class BatchScriptRunner extends ScriptRunner { } } } - -private[sbt] object BatchScriptRunner { - import scala.collection.mutable - type States = mutable.HashMap[StatementHandler, Any] -} diff --git a/scripted/sbt/src/main/scala/sbt/scriptedtest/RemoteSbtCreator.scala b/scripted/sbt/src/main/scala/sbt/scriptedtest/RemoteSbtCreator.scala new file mode 100644 index 000000000..5dbf2ef51 --- /dev/null +++ b/scripted/sbt/src/main/scala/sbt/scriptedtest/RemoteSbtCreator.scala @@ -0,0 +1,67 @@ +/* + * sbt + * Copyright 2011 - 2017, Lightbend, Inc. + * Copyright 2008 - 2010, Mark Harrah + * Licensed under BSD-3-Clause license (see LICENSE) + */ + +package sbt +package scriptedtest + +import java.io.File + +import scala.sys.process.{ BasicIO, Process } + +import sbt.io.IO +import sbt.util.Logger + +import xsbt.IPC + +private[sbt] sealed trait RemoteSbtCreatorKind +private[sbt] object RemoteSbtCreatorKind { + case object LauncherBased extends RemoteSbtCreatorKind + case object RunFromSourceBased extends RemoteSbtCreatorKind +} + +abstract class RemoteSbtCreator private[sbt] { + def newRemote(server: IPC.Server): Process +} + +final class LauncherBasedRemoteSbtCreator( + directory: File, + launcher: File, + log: Logger, + launchOpts: Seq[String] = Nil, +) extends RemoteSbtCreator { + def newRemote(server: IPC.Server) = { + val launcherJar = launcher.getAbsolutePath + val globalBase = "-Dsbt.global.base=" + (new File(directory, "global")).getAbsolutePath + val args = List("<" + server.port) + val cmd = "java" :: launchOpts.toList ::: globalBase :: "-jar" :: launcherJar :: args ::: Nil + val io = BasicIO(false, log).withInput(_.close()) + val p = Process(cmd, directory) run (io) + val thread = new Thread() { override def run() = { p.exitValue(); server.close() } } + thread.start() + p + } +} + +final class RunFromSourceBasedRemoteSbtCreator( + directory: File, + log: Logger, + launchOpts: Seq[String] = Nil, +) extends RemoteSbtCreator { + def newRemote(server: IPC.Server) = { + val globalBase = "-Dsbt.global.base=" + (new File(directory, "global")).getAbsolutePath + val cp = IO readLinesURL (getClass getResource "/RunFromSource.classpath") + val cpString = cp mkString File.pathSeparator + val mainClassName = "sbt.RunFromSourceMain" + val args = List(mainClassName, directory.toString, "<" + server.port) + val cmd = "java" :: launchOpts.toList ::: globalBase :: "-cp" :: cpString :: args ::: Nil + val io = BasicIO(false, log).withInput(_.close()) + val p = Process(cmd, directory) run (io) + val thread = new Thread() { override def run() = { p.exitValue(); server.close() } } + thread.start() + p + } +} diff --git a/scripted/sbt/src/main/scala/sbt/test/SbtHandler.scala b/scripted/sbt/src/main/scala/sbt/scriptedtest/SbtHandler.scala similarity index 52% rename from scripted/sbt/src/main/scala/sbt/test/SbtHandler.scala rename to scripted/sbt/src/main/scala/sbt/scriptedtest/SbtHandler.scala index 04b85e572..bf7174731 100644 --- a/scripted/sbt/src/main/scala/sbt/test/SbtHandler.scala +++ b/scripted/sbt/src/main/scala/sbt/scriptedtest/SbtHandler.scala @@ -6,93 +6,86 @@ */ package sbt -package test +package scriptedtest -import java.io.{ File, IOException } -import xsbt.IPC +import java.io.IOException +import java.net.SocketException + +import scala.sys.process.Process import sbt.internal.scripted.{ StatementHandler, TestFailed } -import sbt.util.Logger -import sbt.util.Logger._ - -import scala.sys.process.{ BasicIO, Process } +import xsbt.IPC final case class SbtInstance(process: Process, server: IPC.Server) -final class SbtHandler(directory: File, - launcher: File, - log: Logger, - launchOpts: Seq[String] = Seq()) - extends StatementHandler { +final class SbtHandler(remoteSbtCreator: RemoteSbtCreator) extends StatementHandler { + type State = Option[SbtInstance] + def initialState = None def apply(command: String, arguments: List[String], i: Option[SbtInstance]): Option[SbtInstance] = - onSbtInstance(i) { (process, server) => + onSbtInstance(i) { (_, server) => send((command :: arguments.map(escape)).mkString(" "), server) - receive(command + " failed", server) + receive(s"$command failed", server) } def onSbtInstance(i: Option[SbtInstance])(f: (Process, IPC.Server) => Unit): Option[SbtInstance] = i match { - case Some(SbtInstance(_, server)) if server.isClosed => - finish(i) - onNewSbtInstance(f) - case Some(SbtInstance(process, server)) => - f(process, server) - i - case None => - onNewSbtInstance(f) + case Some(SbtInstance(_, server)) if server.isClosed => finish(i); onNewSbtInstance(f) + case Some(SbtInstance(process, server)) => f(process, server); i + case None => onNewSbtInstance(f) } private[this] def onNewSbtInstance(f: (Process, IPC.Server) => Unit): Option[SbtInstance] = { val server = IPC.unmanagedServer - val p = try newRemote(server) - catch { case e: Throwable => server.close(); throw e } - val ai = Some(SbtInstance(p, server)) + val p = + try newRemote(server) + catch { case e: Throwable => server.close(); throw e } + val i = Some(SbtInstance(p, server)) try f(p, server) catch { case e: Throwable => // TODO: closing is necessary only because StatementHandler uses exceptions for signaling errors - finish(ai); throw e + finish(i) + throw e } - ai + i } - def finish(state: Option[SbtInstance]) = state match { + def finish(state: State) = state match { + case None => case Some(SbtInstance(process, server)) => try { send("exit", server) process.exitValue() + () } catch { case _: IOException => process.destroy() } - case None => } - def send(message: String, server: IPC.Server) = server.connection { _.send(message) } + + def send(message: String, server: IPC.Server) = server.connection(_.send(message)) + def receive(errorMessage: String, server: IPC.Server) = server.connection { ipc => val resultMessage = ipc.receive if (!resultMessage.toBoolean) throw new TestFailed(errorMessage) } + def newRemote(server: IPC.Server): Process = { - val launcherJar = launcher.getAbsolutePath - val globalBase = "-Dsbt.global.base=" + (new File(directory, "global")).getAbsolutePath - val args = "java" :: (launchOpts.toList ++ (globalBase :: "-jar" :: launcherJar :: ("<" + server.port) :: Nil)) - val io = BasicIO(false, log).withInput(_.close()) - val p = Process(args, directory) run (io) - val thread = new Thread() { override def run() = { p.exitValue(); server.close() } } - thread.start() - try { receive("Remote sbt initialization failed", server) } catch { - case _: java.net.SocketException => throw new TestFailed("Remote sbt initialization failed") - } + val p = remoteSbtCreator.newRemote(server) + try receive("Remote sbt initialization failed", server) + catch { case _: SocketException => throw new TestFailed("Remote sbt initialization failed") } p } - import java.util.regex.Pattern.{ quote => q } + // if the argument contains spaces, enclose it in quotes, quoting backslashes and quotes - def escape(argument: String) = + def escape(argument: String) = { + import java.util.regex.Pattern.{ quote => q } if (argument.contains(" ")) "\"" + argument.replaceAll(q("""\"""), """\\""").replaceAll(q("\""), "\\\"") + "\"" else argument + } } diff --git a/scripted/sbt/src/main/scala/sbt/scriptedtest/ScriptedTests.scala b/scripted/sbt/src/main/scala/sbt/scriptedtest/ScriptedTests.scala new file mode 100644 index 000000000..50883790e --- /dev/null +++ b/scripted/sbt/src/main/scala/sbt/scriptedtest/ScriptedTests.scala @@ -0,0 +1,644 @@ +/* + * sbt + * Copyright 2011 - 2017, Lightbend, Inc. + * Copyright 2008 - 2010, Mark Harrah + * Licensed under BSD-3-Clause license (see LICENSE) + */ + +package sbt +package scriptedtest + +import java.io.File +import java.net.SocketException +import java.util.Properties +import java.util.concurrent.ForkJoinPool + +import scala.collection.GenSeq +import scala.collection.mutable +import scala.collection.parallel.ForkJoinTaskSupport +import scala.util.control.NonFatal + +import sbt.internal.scripted._ +import sbt.internal.io.Resources +import sbt.internal.util.{ BufferedLogger, ConsoleLogger, FullLogger } +import sbt.io.syntax._ +import sbt.io.{ DirectoryFilter, HiddenFileFilter, IO } +import sbt.io.FileFilter._ +import sbt.util.{ AbstractLogger, Logger } + +final class ScriptedTests( + resourceBaseDirectory: File, + bufferLog: Boolean, + launcher: File, + launchOpts: Seq[String], +) { + import ScriptedTests.{ TestRunner, emptyCallback } + + private val testResources = new Resources(resourceBaseDirectory) + + val ScriptFilename = "test" + val PendingScriptFilename = "pending" + + def scriptedTest(group: String, name: String, log: xsbti.Logger): Seq[TestRunner] = + scriptedTest(group, name, Logger.xlog2Log(log)) + + def scriptedTest(group: String, name: String, log: Logger): Seq[TestRunner] = + singleScriptedTest(group, name, emptyCallback, log) + + /** Returns a sequence of test runners that have to be applied in the call site. */ + def singleScriptedTest( + group: String, + name: String, + prescripted: File => Unit, + log: Logger, + ): Seq[TestRunner] = { + + // Test group and names may be file filters (like '*') + for (groupDir <- (resourceBaseDirectory * group).get; nme <- (groupDir * name).get) yield { + val g = groupDir.getName + val n = nme.getName + val label = s"$g / $n" + () => + { + println(s"Running $label") + val result = testResources.readWriteResourceDirectory(g, n) { testDirectory => + val buffer = new BufferedLogger(new FullLogger(log)) + val singleTestRunner = () => { + val handlers = + createScriptedHandlers(testDirectory, buffer, RemoteSbtCreatorKind.LauncherBased) + val runner = new BatchScriptRunner + val states = new mutable.HashMap[StatementHandler, StatementHandler#State]() + commonRunTest(label, testDirectory, prescripted, handlers, runner, states, buffer) + } + runOrHandleDisabled(label, testDirectory, singleTestRunner, buffer) + } + Seq(result) + } + } + } + + private def createScriptedHandlers( + testDir: File, + buffered: Logger, + remoteSbtCreatorKind: RemoteSbtCreatorKind, + ): Map[Char, StatementHandler] = { + val fileHandler = new FileCommands(testDir) + val remoteSbtCreator = remoteSbtCreatorKind match { + case RemoteSbtCreatorKind.LauncherBased => + new LauncherBasedRemoteSbtCreator(testDir, launcher, buffered, launchOpts) + case RemoteSbtCreatorKind.RunFromSourceBased => + new RunFromSourceBasedRemoteSbtCreator(testDir, buffered, launchOpts) + } + val sbtHandler = new SbtHandler(remoteSbtCreator) + Map('$' -> fileHandler, '>' -> sbtHandler, '#' -> CommentHandler) + } + + /** Returns a sequence of test runners that have to be applied in the call site. */ + def batchScriptedRunner( + testGroupAndNames: Seq[(String, String)], + prescripted: File => Unit, + sbtInstances: Int, + log: Logger + ): Seq[TestRunner] = { + // Test group and names may be file filters (like '*') + val groupAndNameDirs = { + for { + (group, name) <- testGroupAndNames + groupDir <- (resourceBaseDirectory * group).get + testDir <- (groupDir * name).get + } yield (groupDir, testDir) + } + + type TestInfo = ((String, String), File) + + val labelsAndDirs = groupAndNameDirs.map { + case (groupDir, nameDir) => + val groupName = groupDir.getName + val testName = nameDir.getName + val testDirectory = testResources.readOnlyResourceDirectory(groupName, testName) + (groupName, testName) -> testDirectory + } + + if (labelsAndDirs.isEmpty) List() + else { + val totalSize = labelsAndDirs.size + val batchSize = totalSize / sbtInstances + + val (launcherBasedTests, runFromSourceBasedTests) = labelsAndDirs.partition { + case (testName, _) => + determineRemoteSbtCreatorKind(testName) match { + case RemoteSbtCreatorKind.LauncherBased => true + case RemoteSbtCreatorKind.RunFromSourceBased => false + } + } + + def logTests(size: Int, how: String) = + log.info( + f"Running $size / $totalSize (${size * 100D / totalSize}%3.2f%%) scripted tests with $how" + ) + logTests(runFromSourceBasedTests.size, "RunFromSourceMain") + logTests(launcherBasedTests.size, "sbt/launcher") + + def createTestRunners( + tests: Seq[TestInfo], + remoteSbtCreatorKind: RemoteSbtCreatorKind, + ): Seq[TestRunner] = { + tests + .grouped(batchSize) + .map { batch => () => + IO.withTemporaryDirectory { + runBatchedTests(batch, _, prescripted, remoteSbtCreatorKind, log) + } + } + .toList + } + + createTestRunners(runFromSourceBasedTests, RemoteSbtCreatorKind.RunFromSourceBased) ++ + createTestRunners(launcherBasedTests, RemoteSbtCreatorKind.LauncherBased) + } + } + + private def determineRemoteSbtCreatorKind(testName: (String, String)): RemoteSbtCreatorKind = { + import RemoteSbtCreatorKind._ + val (group, name) = testName + s"$group/$name" match { + case "actions/add-alias" => LauncherBased // sbt/Package$ + case "actions/cross-multiproject" => LauncherBased // tbd + case "actions/external-doc" => LauncherBased // sbt/Package$ + case "actions/input-task" => LauncherBased // sbt/Package$ + case "actions/input-task-dyn" => LauncherBased // sbt/Package$ + case "compiler-project/dotty-compiler-plugin" => LauncherBased // sbt/Package$ + case "compiler-project/run-test" => LauncherBased // sbt/Package$ + case "compiler-project/src-dep-plugin" => LauncherBased // sbt/Package$ + case "dependency-management/artifact" => LauncherBased // tbd + case "dependency-management/cache-classifiers" => LauncherBased // tbd + case "dependency-management/cache-local" => LauncherBased // tbd + case "dependency-management/cache-resolver" => LauncherBased // sbt/Package$ + case "dependency-management/cache-update" => LauncherBased // tbd + case "dependency-management/cached-resolution-circular" => LauncherBased // tbd + case "dependency-management/cached-resolution-classifier" => LauncherBased // tbd + case "dependency-management/cached-resolution-configurations" => LauncherBased // tbd + case "dependency-management/cached-resolution-conflicts" => LauncherBased // tbd + case "dependency-management/cached-resolution-exclude" => LauncherBased // tbd + case "dependency-management/cached-resolution-force" => LauncherBased // tbd + case "dependency-management/cached-resolution-interproj" => LauncherBased // tbd + case "dependency-management/cached-resolution-overrides" => LauncherBased // tbd + case "dependency-management/chainresolver" => LauncherBased // tbd + case "dependency-management/circular-dependency" => LauncherBased // tbd + case "dependency-management/classifier" => LauncherBased // tbd + case "dependency-management/default-resolvers" => LauncherBased // tbd + case "dependency-management/deliver-artifacts" => LauncherBased // tbd + case "dependency-management/exclude-transitive" => LauncherBased // tbd + case "dependency-management/extra" => LauncherBased // tbd + case "dependency-management/force" => LauncherBased // tbd + case "dependency-management/info" => LauncherBased // tbd + case "dependency-management/inline-dependencies-a" => LauncherBased // tbd + case "dependency-management/ivy-settings-c" => LauncherBased // sbt/Package$ + case "dependency-management/latest-local-plugin" => LauncherBased // sbt/Package$ + case "dependency-management/metadata-only-resolver" => LauncherBased // tbd + case "dependency-management/no-file-fails-publish" => LauncherBased // tbd + case "dependency-management/override" => LauncherBased // tbd + case "dependency-management/parent-publish" => LauncherBased // sbt/Package$ + case "dependency-management/pom-parent-pom" => LauncherBased // tbd + case "dependency-management/publish-to-maven-local-file" => LauncherBased // sbt/Package$ + case "dependency-management/snapshot-resolution" => LauncherBased // tbd + case "dependency-management/test-artifact" => LauncherBased // sbt/Package$ + case "dependency-management/transitive-version-range" => LauncherBased // tbd + case "dependency-management/update-sbt-classifiers" => LauncherBased // tbd + case "dependency-management/url" => LauncherBased // tbd + case "java/argfile" => LauncherBased // sbt/Package$ + case "java/cross" => LauncherBased // sbt/Package$ + case "java/basic" => LauncherBased // sbt/Package$ + case "java/varargs-main" => LauncherBased // sbt/Package$ + case "package/lazy-name" => LauncherBased // sbt/Package$ + case "package/manifest" => LauncherBased // sbt/Package$ + case "package/resources" => LauncherBased // sbt/Package$ + case "project/Class.forName" => LauncherBased // sbt/Package$ + case "project/binary-plugin" => LauncherBased // sbt/Package$ + case "project/default-settings" => LauncherBased // sbt/Package$ + case "project/extra" => LauncherBased // tbd + case "project/flatten" => LauncherBased // sbt/Package$ + case "project/generated-root-no-publish" => LauncherBased // tbd + case "project/lib" => LauncherBased // sbt/Package$ + case "project/scripted-plugin" => LauncherBased // tbd + case "project/scripted-skip-incompatible" => LauncherBased // sbt/Package$ + case "project/session-update-from-cmd" => LauncherBased // tbd + case "project/transitive-plugins" => LauncherBased // tbd + case "run/awt" => LauncherBased // sbt/Package$ + case "run/classpath" => LauncherBased // sbt/Package$ + case "run/daemon" => LauncherBased // sbt/Package$ + case "run/daemon-exit" => LauncherBased // sbt/Package$ + case "run/error" => LauncherBased // sbt/Package$ + case "run/fork" => LauncherBased // sbt/Package$ + case "run/fork-loader" => LauncherBased // sbt/Package$ + case "run/non-local-main" => LauncherBased // sbt/Package$ + case "run/spawn" => LauncherBased // sbt/Package$ + case "run/spawn-exit" => LauncherBased // sbt/Package$ + case "source-dependencies/binary" => LauncherBased // sbt/Package$ + case "source-dependencies/export-jars" => LauncherBased // sbt/Package$ + case "source-dependencies/implicit-search" => LauncherBased // sbt/Package$ + case "source-dependencies/java-basic" => LauncherBased // sbt/Package$ + case "source-dependencies/less-inter-inv" => LauncherBased // sbt/Package$ + case "source-dependencies/less-inter-inv-java" => LauncherBased // sbt/Package$ + case "source-dependencies/linearization" => LauncherBased // sbt/Package$ + case "source-dependencies/named" => LauncherBased // sbt/Package$ + case "source-dependencies/specialized" => LauncherBased // sbt/Package$ + case _ => RunFromSourceBased + } + // sbt/Package$ means: + // java.lang.NoClassDefFoundError: sbt/Package$ (wrong name: sbt/package$) + // Typically from Compile / packageBin / packageOptions + } + + /** Defines an auto plugin that is injected to sbt between every scripted session. + * + * It sets the name of the local root project for those tests run in batch mode. + * + * This is necessary because the current design to run tests in batch mode forces + * scripted tests to share one common sbt dir instead of each one having its own. + * + * Sbt extracts the local root project name from the directory name. So those + * scripted tests that don't set the name for the root and whose test files check + * information based on the name will fail. + * + * The reason why we set the name here and not via `set` is because some tests + * dump the session to check that their settings have been correctly applied. + * + * @param testName The test name used to extract the root project name. + * @return A string-based implementation to run between every reload. + */ + private def createAutoPlugin(testName: String) = + s""" + |import sbt._, Keys._ + |object InstrumentScripted extends AutoPlugin { + | override def trigger = allRequirements + | override def globalSettings: Seq[Setting[_]] = + | Seq(commands += setUpScripted) ++ super.globalSettings + | + | def setUpScripted = Command.command("setUpScripted") { (state0: State) => + | val nameScriptedSetting = name.in(LocalRootProject).:=( + | if (name.value.startsWith("sbt_")) "$testName" else name.value) + | val state1 = Project.extract(state0).append(nameScriptedSetting, state0) + | "initialize" :: state1 + | } + |} + """.stripMargin + + /** Defines the batch execution of scripted tests. + * + * Scripted tests are run one after the other one recycling the handlers, under + * the assumption that handlers do not produce side effects that can change scripted + * tests' behaviours. + * + * In batch mode, the test runner performs these operations between executions: + * + * 1. Delete previous test files in the common test directory. + * 2. Copy over next test files to the common test directory. + * 3. Reload the sbt handler. + * + * @param groupedTests The labels and directories of the tests to run. + * @param tempTestDir The common test directory. + * @param preHook The hook to run before scripted execution. + * @param log The logger. + */ + private def runBatchedTests( + groupedTests: Seq[((String, String), File)], + tempTestDir: File, + preHook: File => Unit, + remoteSbtCreatorKind: RemoteSbtCreatorKind, + log: Logger, + ): Seq[Option[String]] = { + + val runner = new BatchScriptRunner + val buffer = new BufferedLogger(new FullLogger(log)) + val handlers = createScriptedHandlers(tempTestDir, buffer, remoteSbtCreatorKind) + val states = new BatchScriptRunner.States + val seqHandlers = handlers.values.toList + runner.initStates(states, seqHandlers) + + def runBatchTests = { + groupedTests.map { + case ((group, name), originalDir) => + val label = s"$group/$name" + println(s"Running $label") + // Copy test's contents and reload the sbt instance to pick them up + IO.copyDirectory(originalDir, tempTestDir) + + val runTest = () => { + // Reload and initialize (to reload contents of .sbtrc files) + val pluginImplementation = createAutoPlugin(name) + IO.write(tempTestDir / "project" / "InstrumentScripted.scala", pluginImplementation) + def sbtHandlerError = sys error "Missing sbt handler. Scripted is misconfigured." + val sbtHandler = handlers.getOrElse('>', sbtHandlerError) + val commandsToRun = ";reload;setUpScripted" + val statement = Statement(commandsToRun, Nil, successExpected = true, line = -1) + + // Run reload inside the hook to reuse error handling for pending tests + val wrapHook = (file: File) => { + preHook(file) + try runner.processStatement(sbtHandler, statement, states) + catch { + case t: Throwable => + val newMsg = "Reload for scripted batch execution failed." + throw new TestException(statement, newMsg, t) + } + } + + commonRunTest(label, tempTestDir, wrapHook, handlers, runner, states, buffer) + } + + // Run the test and delete files (except global that holds local scala jars) + val result = runOrHandleDisabled(label, tempTestDir, runTest, buffer) + IO.delete(tempTestDir.*("*" -- "global").get) + result + } + } + + try runBatchTests + finally runner.cleanUpHandlers(seqHandlers, states) + } + + private def runOrHandleDisabled( + label: String, + testDirectory: File, + runTest: () => Option[String], + log: Logger + ): Option[String] = { + val existsDisabled = new File(testDirectory, "disabled").isFile + if (!existsDisabled) runTest() + else { + log.info(s"D $label [DISABLED]") + None + } + } + + private val PendingLabel = "[PENDING]" + + private def commonRunTest( + label: String, + testDirectory: File, + preScriptedHook: File => Unit, + handlers: Map[Char, StatementHandler], + runner: BatchScriptRunner, + states: BatchScriptRunner.States, + log: BufferedLogger + ): Option[String] = { + if (bufferLog) log.record() + + val (file, pending) = { + val normal = new File(testDirectory, ScriptFilename) + val pending = new File(testDirectory, PendingScriptFilename) + if (pending.isFile) (pending, true) else (normal, false) + } + + val pendingMark = if (pending) PendingLabel else "" + + def testFailed(t: Throwable): Option[String] = { + if (pending) log.clear() else log.stop() + log.error(s"x $label $pendingMark") + if (!NonFatal(t)) throw t // We make sure fatal errors are rethrown + if (t.isInstanceOf[TestException]) { + t.getCause match { + case null | _: SocketException => log.error(s" Cause of test exception: ${t.getMessage}") + case _ => t.printStackTrace() + } + } + if (pending) None else Some(label) + } + + import scala.util.control.Exception.catching + catching(classOf[TestException]).withApply(testFailed).andFinally(log.clear).apply { + preScriptedHook(testDirectory) + val parser = new TestScriptParser(handlers) + val handlersAndStatements = parser.parse(file) + runner.apply(handlersAndStatements, states) + + // Handle successful tests + log.info(s"+ $label $pendingMark") + if (pending) { + log.clear() + log.error(" Pending test passed. Mark as passing to remove this failure.") + Some(label) + } else None + } + } +} + +object ScriptedTests extends ScriptedRunner { + + /** Represents the function that runs the scripted tests, both in single or batch mode. */ + type TestRunner = () => Seq[Option[String]] + + val emptyCallback: File => Unit = _ => () + + def main(args: Array[String]): Unit = { + val directory = new File(args(0)) + val buffer = args(1).toBoolean + // val sbtVersion = args(2) + // val defScalaVersion = args(3) + // val buildScalaVersions = args(4) + val bootProperties = new File(args(5)) + val tests = args.drop(6) + val logger = ConsoleLogger() + run(directory, buffer, tests, logger, bootProperties, Array(), emptyCallback) + } + +} + +/** Runner for `scripted`. Not be confused with ScriptRunner. */ +class ScriptedRunner { + // This is called by project/Scripted.scala + // Using java.util.List[File] to encode File => Unit + def run( + resourceBaseDirectory: File, + bufferLog: Boolean, + tests: Array[String], + bootProperties: File, + launchOpts: Array[String], + prescripted: java.util.List[File], + ): Unit = { + val logger = ConsoleLogger() + val addTestFile = (f: File) => { prescripted.add(f); () } + run(resourceBaseDirectory, bufferLog, tests, logger, bootProperties, launchOpts, addTestFile) + //new FullLogger(Logger.xlog2Log(log))) + } + + // This is called by sbt-scripted 0.13.x and 1.x (see https://github.com/sbt/sbt/issues/3245) + def run( + resourceBaseDirectory: File, + bufferLog: Boolean, + tests: Array[String], + bootProperties: File, + launchOpts: Array[String], + ): Unit = { + val logger = ConsoleLogger() + val prescripted = ScriptedTests.emptyCallback + run(resourceBaseDirectory, bufferLog, tests, logger, bootProperties, launchOpts, prescripted) + } + + def run( + resourceBaseDirectory: File, + bufferLog: Boolean, + tests: Array[String], + logger: AbstractLogger, + bootProperties: File, + launchOpts: Array[String], + prescripted: File => Unit, + ): Unit = { + // Force Log4J to not use a thread context classloader otherwise it throws a CCE + sys.props(org.apache.logging.log4j.util.LoaderUtil.IGNORE_TCCL_PROPERTY) = "true" + + val runner = new ScriptedTests(resourceBaseDirectory, bufferLog, bootProperties, launchOpts) + val sbtVersion = bootProperties.getName.dropWhile(!_.isDigit).dropRight(".jar".length) + val accept = isTestCompatible(resourceBaseDirectory, sbtVersion) _ + val allTests = get(tests, resourceBaseDirectory, accept, logger) flatMap { + case ScriptedTest(group, name) => + runner.singleScriptedTest(group, name, prescripted, logger) + } + runAll(allTests) + } + + def runInParallel( + baseDir: File, + bufferLog: Boolean, + tests: Array[String], + bootProps: File, + launchOpts: Array[String], + prescripted: java.util.List[File], + ): Unit = { + runInParallel(baseDir, bufferLog, tests, bootProps, launchOpts, prescripted, 1) + } + + // This is used by sbt-scripted sbt 1.x + def runInParallel( + baseDir: File, + bufferLog: Boolean, + tests: Array[String], + bootProps: File, + launchOpts: Array[String], + prescripted: java.util.List[File], + instances: Int + ): Unit = { + val logger = ConsoleLogger() + val addTestFile = (f: File) => { prescripted.add(f); () } + runInParallel(baseDir, bufferLog, tests, logger, bootProps, launchOpts, addTestFile, instances) + } + + def runInParallel( + baseDir: File, + bufferLog: Boolean, + tests: Array[String], + logger: AbstractLogger, + bootProps: File, + launchOpts: Array[String], + prescripted: File => Unit, + instances: Int + ): Unit = { + val runner = new ScriptedTests(baseDir, bufferLog, bootProps, launchOpts) + val sbtVersion = bootProps.getName.dropWhile(!_.isDigit).dropRight(".jar".length) + val accept = isTestCompatible(baseDir, sbtVersion) _ + // The scripted tests mapped to the inputs that the user wrote after `scripted`. + val scriptedTests = + get(tests, baseDir, accept, logger).map(st => (st.group, st.name)) + val scriptedRunners = runner.batchScriptedRunner(scriptedTests, prescripted, instances, logger) + val parallelRunners = scriptedRunners.toParArray + parallelRunners.tasksupport = new ForkJoinTaskSupport(new ForkJoinPool(instances)) + runAll(parallelRunners) + } + + private def reportErrors(errors: GenSeq[String]): Unit = + if (errors.nonEmpty) sys.error(errors.mkString("Failed tests:\n\t", "\n\t", "\n")) else () + + def runAll(toRun: GenSeq[ScriptedTests.TestRunner]): Unit = + reportErrors(toRun.flatMap(test => test.apply().flatten)) + + @deprecated("No longer used", "1.1.0") + def get(tests: Seq[String], baseDirectory: File, log: Logger): Seq[ScriptedTest] = + get(tests, baseDirectory, _ => true, log) + + def get( + tests: Seq[String], + baseDirectory: File, + accept: ScriptedTest => Boolean, + log: Logger, + ): Seq[ScriptedTest] = + if (tests.isEmpty) listTests(baseDirectory, accept, log) else parseTests(tests) + + @deprecated("No longer used", "1.1.0") + def listTests(baseDirectory: File, log: Logger): Seq[ScriptedTest] = + listTests(baseDirectory, _ => true, log) + + def listTests( + baseDirectory: File, + accept: ScriptedTest => Boolean, + log: Logger, + ): Seq[ScriptedTest] = + (new ListTests(baseDirectory, accept, log)).listTests + + def parseTests(in: Seq[String]): Seq[ScriptedTest] = + for (testString <- in) yield { + val Array(group, name) = testString.split("/").map(_.trim) + ScriptedTest(group, name) + } + + private def isTestCompatible(resourceBaseDirectory: File, sbtVersion: String)( + test: ScriptedTest + ): Boolean = { + import sbt.internal.librarymanagement.cross.CrossVersionUtil.binarySbtVersion + val buildProperties = new Properties() + val testDir = new File(new File(resourceBaseDirectory, test.group), test.name) + val buildPropertiesFile = new File(new File(testDir, "project"), "build.properties") + + IO.load(buildProperties, buildPropertiesFile) + + Option(buildProperties.getProperty("sbt.version")) match { + case Some(version) => binarySbtVersion(version) == binarySbtVersion(sbtVersion) + case None => true + } + } + +} + +final case class ScriptedTest(group: String, name: String) { + override def toString = s"$group/$name" +} + +private[sbt] final class ListTests( + baseDirectory: File, + accept: ScriptedTest => Boolean, + log: Logger, +) { + + def filter = DirectoryFilter -- HiddenFileFilter + + def listTests: Seq[ScriptedTest] = { + IO.listFiles(baseDirectory, filter) flatMap { group => + val groupName = group.getName + listTests(group).map(ScriptedTest(groupName, _)) + } + } + + private[this] def listTests(group: File): Set[String] = { + val groupName = group.getName + val allTests = IO.listFiles(group, filter) + if (allTests.isEmpty) { + log.warn(s"No tests in test group $groupName") + Set.empty + } else { + val (included, skipped) = + allTests.toList.partition(test => accept(ScriptedTest(groupName, test.getName))) + if (included.isEmpty) + log.warn(s"Test group $groupName skipped.") + else if (skipped.nonEmpty) { + log.warn(s"Tests skipped in group $groupName:") + skipped.foreach(testName => log.warn(s" ${testName.getName}")) + } + Set(included.map(_.getName): _*) + } + } +} + +class PendingTestSuccessException(label: String) extends Exception { + override def getMessage: String = + s"The pending test $label succeeded. Mark this test as passing to remove this failure." +} diff --git a/scripted/sbt/src/main/scala/sbt/test/ScriptedTests.scala b/scripted/sbt/src/main/scala/sbt/test/ScriptedTests.scala deleted file mode 100644 index 29b4f2258..000000000 --- a/scripted/sbt/src/main/scala/sbt/test/ScriptedTests.scala +++ /dev/null @@ -1,488 +0,0 @@ -/* - * sbt - * Copyright 2011 - 2017, Lightbend, Inc. - * Copyright 2008 - 2010, Mark Harrah - * Licensed under BSD-3-Clause license (see LICENSE) - */ - -package sbt -package test - -import java.io.File -import java.util.Properties - -import scala.util.control.NonFatal -import sbt.internal.scripted._ -import sbt.io.{ DirectoryFilter, HiddenFileFilter, IO } -import sbt.io.IO.wrapNull -import sbt.io.FileFilter._ -import sbt.internal.io.Resources -import sbt.internal.util.{ BufferedLogger, ConsoleLogger, FullLogger } -import sbt.util.{ AbstractLogger, Logger } - -import scala.collection.mutable -import scala.collection.parallel.ForkJoinTaskSupport -import scala.collection.parallel.mutable.ParSeq - -final class ScriptedTests(resourceBaseDirectory: File, - bufferLog: Boolean, - launcher: File, - launchOpts: Seq[String]) { - import sbt.io.syntax._ - import ScriptedTests._ - private val testResources = new Resources(resourceBaseDirectory) - - val ScriptFilename = "test" - val PendingScriptFilename = "pending" - - def scriptedTest(group: String, name: String, log: xsbti.Logger): Seq[TestRunner] = - scriptedTest(group, name, Logger.xlog2Log(log)) - def scriptedTest(group: String, name: String, log: Logger): Seq[TestRunner] = - singleScriptedTest(group, name, emptyCallback, log) - - /** Returns a sequence of test runners that have to be applied in the call site. */ - def singleScriptedTest(group: String, - name: String, - prescripted: File => Unit, - log: Logger): Seq[TestRunner] = { - - // Test group and names may be file filters (like '*') - for (groupDir <- (resourceBaseDirectory * group).get; nme <- (groupDir * name).get) yield { - val g = groupDir.getName - val n = nme.getName - val label = s"$g / $n" - () => - { - println(s"Running $label") - val result = testResources.readWriteResourceDirectory(g, n) { testDirectory => - val buffer = new BufferedLogger(new FullLogger(log)) - val singleTestRunner = () => { - val handlers = createScriptedHandlers(testDirectory, buffer) - val runner = new BatchScriptRunner - val states = new mutable.HashMap[StatementHandler, Any]() - commonRunTest(label, testDirectory, prescripted, handlers, runner, states, buffer) - } - runOrHandleDisabled(label, testDirectory, singleTestRunner, buffer) - } - Seq(result) - } - } - } - - private def createScriptedHandlers( - testDir: File, - buffered: Logger - ): Map[Char, StatementHandler] = { - val fileHandler = new FileCommands(testDir) - val sbtHandler = new SbtHandler(testDir, launcher, buffered, launchOpts) - Map('$' -> fileHandler, '>' -> sbtHandler, '#' -> CommentHandler) - } - - /** Returns a sequence of test runners that have to be applied in the call site. */ - def batchScriptedRunner( - testGroupAndNames: Seq[(String, String)], - prescripted: File => Unit, - sbtInstances: Int, - log: Logger - ): Seq[TestRunner] = { - // Test group and names may be file filters (like '*') - val groupAndNameDirs = { - for { - (group, name) <- testGroupAndNames - groupDir <- resourceBaseDirectory.*(group).get - testDir <- groupDir.*(name).get - } yield (groupDir, testDir) - } - - val labelsAndDirs = groupAndNameDirs.map { - case (groupDir, nameDir) => - val groupName = groupDir.getName - val testName = nameDir.getName - val testDirectory = testResources.readOnlyResourceDirectory(groupName, testName) - (groupName, testName) -> testDirectory - } - - if (labelsAndDirs.isEmpty) List() - else { - val batchSeed = labelsAndDirs.size / sbtInstances - val batchSize = if (batchSeed == 0) labelsAndDirs.size else batchSeed - labelsAndDirs - .grouped(batchSize) - .map(batch => () => IO.withTemporaryDirectory(runBatchedTests(batch, _, prescripted, log))) - .toList - } - } - - /** Defines an auto plugin that is injected to sbt between every scripted session. - * - * It sets the name of the local root project for those tests run in batch mode. - * - * This is necessary because the current design to run tests in batch mode forces - * scripted tests to share one common sbt dir instead of each one having its own. - * - * Sbt extracts the local root project name from the directory name. So those - * scripted tests that don't set the name for the root and whose test files check - * information based on the name will fail. - * - * The reason why we set the name here and not via `set` is because some tests - * dump the session to check that their settings have been correctly applied. - * - * @param testName The test name used to extract the root project name. - * @return A string-based implementation to run between every reload. - */ - private def createAutoPlugin(testName: String) = - s""" - |import sbt._, Keys._ - |object InstrumentScripted extends AutoPlugin { - | override def trigger = allRequirements - | override def globalSettings: Seq[Setting[_]] = - | Seq(commands += setUpScripted) ++ super.globalSettings - | - | def setUpScripted = Command.command("setUpScripted") { (state0: State) => - | val nameScriptedSetting = name.in(LocalRootProject).:=( - | if (name.value.startsWith("sbt_")) "$testName" else name.value) - | val state1 = Project.extract(state0).append(nameScriptedSetting, state0) - | "initialize" :: state1 - | } - |} - """.stripMargin - - /** Defines the batch execution of scripted tests. - * - * Scripted tests are run one after the other one recycling the handlers, under - * the assumption that handlers do not produce side effects that can change scripted - * tests' behaviours. - * - * In batch mode, the test runner performs these operations between executions: - * - * 1. Delete previous test files in the common test directory. - * 2. Copy over next test files to the common test directory. - * 3. Reload the sbt handler. - * - * @param groupedTests The labels and directories of the tests to run. - * @param tempTestDir The common test directory. - * @param preHook The hook to run before scripted execution. - * @param log The logger. - */ - private def runBatchedTests( - groupedTests: Seq[((String, String), File)], - tempTestDir: File, - preHook: File => Unit, - log: Logger - ): Seq[Option[String]] = { - - val runner = new BatchScriptRunner - val buffer = new BufferedLogger(new FullLogger(log)) - val handlers = createScriptedHandlers(tempTestDir, buffer) - val states = new BatchScriptRunner.States - val seqHandlers = handlers.values.toList - runner.initStates(states, seqHandlers) - - def runBatchTests = { - groupedTests.map { - case ((group, name), originalDir) => - val label = s"$group / $name" - println(s"Running $label") - // Copy test's contents and reload the sbt instance to pick them up - IO.copyDirectory(originalDir, tempTestDir) - - val runTest = () => { - // Reload and initialize (to reload contents of .sbtrc files) - val pluginImplementation = createAutoPlugin(name) - IO.write(tempTestDir / "project" / "InstrumentScripted.scala", pluginImplementation) - val sbtHandlerError = "Missing sbt handler. Scripted is misconfigured." - val sbtHandler = handlers.getOrElse('>', sbtHandlerError).asInstanceOf[SbtHandler] - val commandsToRun = ";reload;setUpScripted" - val statement = Statement(commandsToRun, Nil, successExpected = true, line = -1) - - // Run reload inside the hook to reuse error handling for pending tests - val wrapHook = (file: File) => { - preHook(file) - try runner.processStatement(sbtHandler, statement, states) - catch { - case t: Throwable => - val newMsg = "Reload for scripted batch execution failed." - throw new TestException(statement, newMsg, t) - } - } - - commonRunTest(label, tempTestDir, wrapHook, handlers, runner, states, buffer) - } - - // Run the test and delete files (except global that holds local scala jars) - val result = runOrHandleDisabled(label, tempTestDir, runTest, buffer) - IO.delete(tempTestDir.*("*" -- "global").get) - result - } - } - - try runBatchTests - finally runner.cleanUpHandlers(seqHandlers, states) - } - - private def runOrHandleDisabled( - label: String, - testDirectory: File, - runTest: () => Option[String], - log: Logger - ): Option[String] = { - val existsDisabled = new File(testDirectory, "disabled").isFile - if (!existsDisabled) runTest() - else { - log.info(s"D $label [DISABLED]") - None - } - } - - private val PendingLabel = "[PENDING]" - - private def commonRunTest( - label: String, - testDirectory: File, - preScriptedHook: File => Unit, - createHandlers: Map[Char, StatementHandler], - runner: BatchScriptRunner, - states: BatchScriptRunner.States, - log: BufferedLogger - ): Option[String] = { - if (bufferLog) log.record() - - val (file, pending) = { - val normal = new File(testDirectory, ScriptFilename) - val pending = new File(testDirectory, PendingScriptFilename) - if (pending.isFile) (pending, true) else (normal, false) - } - - val pendingMark = if (pending) PendingLabel else "" - def testFailed(t: Throwable): Option[String] = { - if (pending) log.clear() else log.stop() - log.error(s"x $label $pendingMark") - if (!NonFatal(t)) throw t // We make sure fatal errors are rethrown - if (t.isInstanceOf[TestException]) { - t.getCause match { - case null | _: java.net.SocketException => - log.error(" Cause of test exception: " + t.getMessage) - case _ => t.printStackTrace() - } - } - if (pending) None else Some(label) - } - - import scala.util.control.Exception.catching - catching(classOf[TestException]).withApply(testFailed).andFinally(log.clear).apply { - preScriptedHook(testDirectory) - val handlers = createHandlers - val parser = new TestScriptParser(handlers) - val handlersAndStatements = parser.parse(file) - runner.apply(handlersAndStatements, states) - - // Handle successful tests - log.info(s"+ $label $pendingMark") - if (pending) { - log.clear() - log.error(" Pending test passed. Mark as passing to remove this failure.") - Some(label) - } else None - } - } -} - -object ScriptedTests extends ScriptedRunner { - - /** Represents the function that runs the scripted tests, both in single or batch mode. */ - type TestRunner = () => Seq[Option[String]] - - val emptyCallback: File => Unit = _ => () - def main(args: Array[String]): Unit = { - val directory = new File(args(0)) - val buffer = args(1).toBoolean - // val sbtVersion = args(2) - // val defScalaVersion = args(3) - // val buildScalaVersions = args(4) - val bootProperties = new File(args(5)) - val tests = args.drop(6) - val logger = ConsoleLogger() - run(directory, buffer, tests, logger, bootProperties, Array(), emptyCallback) - } -} - -class ScriptedRunner { - // This is called by project/Scripted.scala - // Using java.util.List[File] to encode File => Unit - def run(resourceBaseDirectory: File, - bufferLog: Boolean, - tests: Array[String], - bootProperties: File, - launchOpts: Array[String], - prescripted: java.util.List[File]): Unit = { - - // Force Log4J to not use a thread context classloader otherwise it throws a CCE - sys.props(org.apache.logging.log4j.util.LoaderUtil.IGNORE_TCCL_PROPERTY) = "true" - - run(resourceBaseDirectory, bufferLog, tests, ConsoleLogger(), bootProperties, launchOpts, { - f: File => - prescripted.add(f); () - }) //new FullLogger(Logger.xlog2Log(log))) - } - // This is called by sbt-scripted 0.13.x (the sbt host) when cross-compiling to sbt 0.13.x and 1.0.x - // See https://github.com/sbt/sbt/issues/3245 - def run(resourceBaseDirectory: File, - bufferLog: Boolean, - tests: Array[String], - bootProperties: File, - launchOpts: Array[String]): Unit = - run(resourceBaseDirectory, - bufferLog, - tests, - ConsoleLogger(), - bootProperties, - launchOpts, - ScriptedTests.emptyCallback) - - def run(resourceBaseDirectory: File, - bufferLog: Boolean, - tests: Array[String], - logger: AbstractLogger, - bootProperties: File, - launchOpts: Array[String], - prescripted: File => Unit): Unit = { - val runner = new ScriptedTests(resourceBaseDirectory, bufferLog, bootProperties, launchOpts) - val sbtVersion = bootProperties.getName.dropWhile(!_.isDigit).dropRight(".jar".length) - val accept = isTestCompatible(resourceBaseDirectory, sbtVersion) _ - val allTests = get(tests, resourceBaseDirectory, accept, logger) flatMap { - case ScriptedTest(group, name) => - runner.singleScriptedTest(group, name, prescripted, logger) - } - runAll(allTests) - } - - def runInParallel(resourceBaseDirectory: File, - bufferLog: Boolean, - tests: Array[String], - bootProperties: File, - launchOpts: Array[String], - prescripted: java.util.List[File]): Unit = { - val logger = ConsoleLogger() - val addTestFile = (f: File) => { prescripted.add(f); () } - runInParallel(resourceBaseDirectory, - bufferLog, - tests, - logger, - bootProperties, - launchOpts, - addTestFile, - 1) - } - - def runInParallel( - resourceBaseDirectory: File, - bufferLog: Boolean, - tests: Array[String], - logger: AbstractLogger, - bootProperties: File, - launchOpts: Array[String], - prescripted: File => Unit, - instances: Int - ): Unit = { - val runner = new ScriptedTests(resourceBaseDirectory, bufferLog, bootProperties, launchOpts) - // The scripted tests mapped to the inputs that the user wrote after `scripted`. - val scriptedTests = get(tests, resourceBaseDirectory, logger).map(st => (st.group, st.name)) - val scriptedRunners = runner.batchScriptedRunner(scriptedTests, prescripted, instances, logger) - val parallelRunners = scriptedRunners.toParArray - val pool = new java.util.concurrent.ForkJoinPool(instances) - parallelRunners.tasksupport = new ForkJoinTaskSupport(pool) - runAllInParallel(parallelRunners) - } - - private def reportErrors(errors: Seq[String]): Unit = - if (errors.nonEmpty) sys.error(errors.mkString("Failed tests:\n\t", "\n\t", "\n")) else () - - def runAll(toRun: Seq[ScriptedTests.TestRunner]): Unit = - reportErrors(toRun.flatMap(test => test.apply().flatten.toSeq)) - - // We cannot reuse `runAll` because parallel collections != collections - def runAllInParallel(tests: ParSeq[ScriptedTests.TestRunner]): Unit = { - reportErrors(tests.flatMap(test => test.apply().flatten.toSeq).toList) - } - - @deprecated("No longer used", "1.1.0") - def get(tests: Seq[String], baseDirectory: File, log: Logger): Seq[ScriptedTest] = - get(tests, baseDirectory, _ => true, log) - def get(tests: Seq[String], - baseDirectory: File, - accept: ScriptedTest => Boolean, - log: Logger): Seq[ScriptedTest] = - if (tests.isEmpty) listTests(baseDirectory, accept, log) else parseTests(tests) - - @deprecated("No longer used", "1.1.0") - def listTests(baseDirectory: File, log: Logger): Seq[ScriptedTest] = - listTests(baseDirectory, _ => true, log) - def listTests(baseDirectory: File, - accept: ScriptedTest => Boolean, - log: Logger): Seq[ScriptedTest] = - (new ListTests(baseDirectory, accept, log)).listTests - - def parseTests(in: Seq[String]): Seq[ScriptedTest] = - for (testString <- in) yield { - val Array(group, name) = testString.split("/").map(_.trim) - ScriptedTest(group, name) - } - - private def isTestCompatible(resourceBaseDirectory: File, sbtVersion: String)( - test: ScriptedTest): Boolean = { - import sbt.internal.librarymanagement.cross.CrossVersionUtil.binarySbtVersion - val buildProperties = new Properties() - val testDir = new File(new File(resourceBaseDirectory, test.group), test.name) - val buildPropertiesFile = new File(new File(testDir, "project"), "build.properties") - - IO.load(buildProperties, buildPropertiesFile) - - Option(buildProperties.getProperty("sbt.version")) match { - case Some(version) => binarySbtVersion(version) == binarySbtVersion(sbtVersion) - case None => true - } - } - -} - -final case class ScriptedTest(group: String, name: String) { - override def toString = group + "/" + name -} -private[test] object ListTests { - def list(directory: File, filter: java.io.FileFilter) = wrapNull(directory.listFiles(filter)) -} -import ListTests._ -private[test] final class ListTests(baseDirectory: File, - accept: ScriptedTest => Boolean, - log: Logger) { - def filter = DirectoryFilter -- HiddenFileFilter - def listTests: Seq[ScriptedTest] = { - list(baseDirectory, filter) flatMap { group => - val groupName = group.getName - listTests(group).map(ScriptedTest(groupName, _)) - } - } - private[this] def listTests(group: File): Set[String] = { - val groupName = group.getName - val allTests = list(group, filter) - if (allTests.isEmpty) { - log.warn("No tests in test group " + groupName) - Set.empty - } else { - val (included, skipped) = - allTests.toList.partition(test => accept(ScriptedTest(groupName, test.getName))) - if (included.isEmpty) - log.warn("Test group " + groupName + " skipped.") - else if (skipped.nonEmpty) { - log.warn("Tests skipped in group " + group.getName + ":") - skipped.foreach(testName => log.warn(" " + testName.getName)) - } - Set(included.map(_.getName): _*) - } - } -} - -class PendingTestSuccessException(label: String) extends Exception { - override def getMessage: String = - s"The pending test $label succeeded. Mark this test as passing to remove this failure." -} diff --git a/tasks-standard/src/main/scala/sbt/Action.scala b/tasks-standard/src/main/scala/sbt/Action.scala index 0be3a8ae5..6cd50824d 100644 --- a/tasks-standard/src/main/scala/sbt/Action.scala +++ b/tasks-standard/src/main/scala/sbt/Action.scala @@ -25,7 +25,7 @@ sealed trait Action[T] { * If `inline` is true, `f` will be evaluated on the scheduler thread without the overhead of normal scheduling when possible. * This is intended as an optimization for already evaluated values or very short pure computations. */ -final case class Pure[T](f: () => T, inline: Boolean) extends Action[T] { +final case class Pure[T](f: () => T, `inline`: Boolean) extends Action[T] { private[sbt] def mapTask(f: Task ~> Task) = this } @@ -74,8 +74,10 @@ final case class Task[T](info: Info[T], work: Action[T]) { * @param attributes Arbitrary user-defined key/value pairs describing this task * @param post a transformation that takes the result of evaluating this task and produces user-defined key/value pairs. */ -final case class Info[T](attributes: AttributeMap = AttributeMap.empty, - post: T => AttributeMap = const(AttributeMap.empty)) { +final case class Info[T]( + attributes: AttributeMap = AttributeMap.empty, + post: T => AttributeMap = const(AttributeMap.empty) +) { import Info._ def name = attributes.get(Name) def description = attributes.get(Description) diff --git a/tasks-standard/src/main/scala/sbt/std/Streams.scala b/tasks-standard/src/main/scala/sbt/std/Streams.scala index 6206b9271..61f97f89e 100644 --- a/tasks-standard/src/main/scala/sbt/std/Streams.scala +++ b/tasks-standard/src/main/scala/sbt/std/Streams.scala @@ -126,10 +126,12 @@ object Streams { synchronized { streams.values.foreach(_.close()); streams.clear() } } - def apply[Key, J: IsoString](taskDirectory: Key => File, - name: Key => String, - mkLogger: (Key, PrintWriter) => ManagedLogger, - converter: SupportConverter[J]): Streams[Key] = new Streams[Key] { + def apply[Key, J: IsoString]( + taskDirectory: Key => File, + name: Key => String, + mkLogger: (Key, PrintWriter) => ManagedLogger, + converter: SupportConverter[J] + ): Streams[Key] = new Streams[Key] { def apply(a: Key): ManagedStreams[Key] = new ManagedStreams[Key] { private[this] var opened: List[Closeable] = Nil @@ -142,8 +144,9 @@ object Streams { make(a, sid)(f => new PlainOutput(new FileOutputStream(f), converter)) def readText(a: Key, sid: String = default): BufferedReader = - make(a, sid)(f => - new BufferedReader(new InputStreamReader(new FileInputStream(f), IO.defaultCharset))) + make(a, sid)( + f => new BufferedReader(new InputStreamReader(new FileInputStream(f), IO.defaultCharset)) + ) def readBinary(a: Key, sid: String = default): BufferedInputStream = make(a, sid)(f => new BufferedInputStream(new FileInputStream(f))) @@ -152,8 +155,13 @@ object Streams { make(a, sid)( f => new PrintWriter( - new DeferredWriter(new BufferedWriter( - new OutputStreamWriter(new FileOutputStream(f), IO.defaultCharset))))) + new DeferredWriter( + new BufferedWriter( + new OutputStreamWriter(new FileOutputStream(f), IO.defaultCharset) + ) + ) + ) + ) def binary(sid: String = default): BufferedOutputStream = make(a, sid)(f => new BufferedOutputStream(new FileOutputStream(f))) diff --git a/tasks-standard/src/main/scala/sbt/std/System.scala b/tasks-standard/src/main/scala/sbt/std/System.scala index 060bc0d48..ed2739646 100644 --- a/tasks-standard/src/main/scala/sbt/std/System.scala +++ b/tasks-standard/src/main/scala/sbt/std/System.scala @@ -58,8 +58,9 @@ object Transform { def uniform[T, D](tasks: Seq[Task[D]])(f: Seq[Result[D]] => Either[Task[T], T]): Node[Task, T] = toNode[T, λ[L[x] => List[L[D]]]](tasks.toList)(f)(AList.seq[D]) - def toNode[T, k[L[x]]](inputs: k[Task])(f: k[Result] => Either[Task[T], T])( - implicit a: AList[k]): Node[Task, T] = new Node[Task, T] { + def toNode[T, k[L[x]]]( + inputs: k[Task] + )(f: k[Result] => Either[Task[T], T])(implicit a: AList[k]): Node[Task, T] = new Node[Task, T] { type K[L[x]] = k[L] val in = inputs val alist = a diff --git a/tasks-standard/src/main/scala/sbt/std/TaskExtra.scala b/tasks-standard/src/main/scala/sbt/std/TaskExtra.scala index b94ea5e1c..52e754a81 100644 --- a/tasks-standard/src/main/scala/sbt/std/TaskExtra.scala +++ b/tasks-standard/src/main/scala/sbt/std/TaskExtra.scala @@ -39,22 +39,26 @@ sealed trait SingleInTask[S] { @deprecated( "Use the `result` method to create a task that returns the full Result of this task. Then, call `map` on the new task.", - "0.13.0") + "0.13.0" + ) def mapR[T](f: Result[S] => T): Task[T] @deprecated( "Use the `failure` method to create a task that returns Incomplete when this task fails and then call `flatMap` on the new task.", - "0.13.0") + "0.13.0" + ) def flatFailure[T](f: Incomplete => Task[T]): Task[T] @deprecated( "Use the `failure` method to create a task that returns Incomplete when this task fails and then call `mapFailure` on the new task.", - "0.13.0") + "0.13.0" + ) def mapFailure[T](f: Incomplete => T): Task[T] @deprecated( "Use the `result` method to create a task that returns the full Result of this task. Then, call `flatMap` on the new task.", - "0.13.0") + "0.13.0" + ) def flatMapR[T](f: Result[S] => Task[T]): Task[T] } sealed trait TaskInfo[S] { @@ -160,10 +164,12 @@ trait TaskExtra { def andFinally(fin: => Unit): Task[S] = mapR(x => Result.tryValue[S]({ fin; x })) def doFinally(t: Task[Unit]): Task[S] = - flatMapR(x => - t.result.map { tx => - Result.tryValues[S](tx :: Nil, x) - }) + flatMapR( + x => + t.result.map { tx => + Result.tryValues[S](tx :: Nil, x) + } + ) def ||[T >: S](alt: Task[T]): Task[T] = flatMapR { case Value(v) => task(v); case Inc(_) => alt } @@ -175,8 +181,9 @@ trait TaskExtra { def named(s: String): Task[S] = in.copy(info = in.info.setName(s)) } - final implicit def pipeToProcess[Key](t: Task[_])(implicit streams: Task[TaskStreams[Key]], - key: Task[_] => Key): ProcessPipe = + final implicit def pipeToProcess[Key]( + t: Task[_] + )(implicit streams: Task[TaskStreams[Key]], key: Task[_] => Key): ProcessPipe = new ProcessPipe { def #|(p: ProcessBuilder): Task[Int] = pipe0(None, p) def pipe(sid: String)(p: ProcessBuilder): Task[Int] = pipe0(Some(sid), p) @@ -190,8 +197,9 @@ trait TaskExtra { } } - final implicit def binaryPipeTask[Key](in: Task[_])(implicit streams: Task[TaskStreams[Key]], - key: Task[_] => Key): BinaryPipe = + final implicit def binaryPipeTask[Key]( + in: Task[_] + )(implicit streams: Task[TaskStreams[Key]], key: Task[_] => Key): BinaryPipe = new BinaryPipe { def binary[T](f: BufferedInputStream => T): Task[T] = pipe0(None, f) def binary[T](sid: String)(f: BufferedInputStream => T): Task[T] = pipe0(Some(sid), f) @@ -206,8 +214,9 @@ trait TaskExtra { private def toFile(f: File) = (in: InputStream) => IO.transfer(in, f) } - final implicit def textPipeTask[Key](in: Task[_])(implicit streams: Task[TaskStreams[Key]], - key: Task[_] => Key): TextPipe = new TextPipe { + final implicit def textPipeTask[Key]( + in: Task[_] + )(implicit streams: Task[TaskStreams[Key]], key: Task[_] => Key): TextPipe = new TextPipe { def text[T](f: BufferedReader => T): Task[T] = pipe0(None, f) def text[T](sid: String)(f: BufferedReader => T): Task[T] = pipe0(Some(sid), f) @@ -216,8 +225,9 @@ trait TaskExtra { f(s.readText(key(in), sid)) } } - final implicit def linesTask[Key](in: Task[_])(implicit streams: Task[TaskStreams[Key]], - key: Task[_] => Key): TaskLines = new TaskLines { + final implicit def linesTask[Key]( + in: Task[_] + )(implicit streams: Task[TaskStreams[Key]], key: Task[_] => Key): TaskLines = new TaskLines { def lines: Task[List[String]] = lines0(None) def lines(sid: String): Task[List[String]] = lines0(Some(sid)) diff --git a/tasks-standard/src/test/scala/TaskGen.scala b/tasks-standard/src/test/scala/TaskGen.scala index 8fdace35d..ee7539ef9 100644 --- a/tasks-standard/src/test/scala/TaskGen.scala +++ b/tasks-standard/src/test/scala/TaskGen.scala @@ -24,9 +24,11 @@ object TaskGen extends std.TaskExtra { def run[T](root: Task[T], checkCycles: Boolean, maxWorkers: Int): Result[T] = { val (service, shutdown) = CompletionService[Task[_], Completed](maxWorkers) val dummies = std.Transform.DummyTaskMap(Nil) - val x = new Execute[Task](Execute.config(checkCycles), - Execute.noTriggers, - ExecuteProgress.empty[Task])(std.Transform(dummies)) + val x = new Execute[Task]( + Execute.config(checkCycles), + Execute.noTriggers, + ExecuteProgress.empty[Task] + )(std.Transform(dummies)) try { x.run(root)(service) } finally { shutdown() } } def tryRun[T](root: Task[T], checkCycles: Boolean, maxWorkers: Int): T = diff --git a/tasks-standard/src/test/scala/TaskRunnerCircular.scala b/tasks-standard/src/test/scala/TaskRunnerCircular.scala index 1350a22b0..3d542a9db 100644 --- a/tasks-standard/src/test/scala/TaskRunnerCircular.scala +++ b/tasks-standard/src/test/scala/TaskRunnerCircular.scala @@ -42,8 +42,9 @@ object TaskRunnerCircularTest extends Properties("TaskRunner Circular") { } try { tryRun(top, true, workers); false } catch { case i: Incomplete => cyclic(i) } } + def cyclic(i: Incomplete) = Incomplete .allExceptions(i) - .exists(_.isInstanceOf[Execute[({ type A[_] <: AnyRef })#A]#CyclicException[_]]) + .exists(_.isInstanceOf[Execute[({ type A[_] <: AnyRef })#A @unchecked]#CyclicException[_]]) } diff --git a/tasks-standard/src/test/scala/TaskRunnerFork.scala b/tasks-standard/src/test/scala/TaskRunnerFork.scala index b469dc38e..639963454 100644 --- a/tasks-standard/src/test/scala/TaskRunnerFork.scala +++ b/tasks-standard/src/test/scala/TaskRunnerFork.scala @@ -31,8 +31,9 @@ object TaskRunnerForkTest extends Properties("TaskRunner Fork") { true } def runDoubleJoin(a: Int, b: Int, workers: Int): Unit = { - def inner(i: Int) = List.range(0, b).map(j => task(j).named(j.toString)).join - tryRun(List.range(0, a).map(inner).join, false, workers) + def inner = List.range(0, b).map(j => task(j).named(j.toString)).join + tryRun(List.range(0, a).map(_ => inner).join, false, workers) + () } property("fork and reduce") = forAll(TaskListGen, MaxWorkersGen) { (m: List[Int], workers: Int) => m.nonEmpty ==> { diff --git a/tasks-standard/src/test/scala/TaskSerial.scala b/tasks-standard/src/test/scala/TaskSerial.scala index 43aeea41f..8f2f8bf3c 100644 --- a/tasks-standard/src/test/scala/TaskSerial.scala +++ b/tasks-standard/src/test/scala/TaskSerial.scala @@ -50,9 +50,11 @@ object TaskSerial extends Properties("task serial") { } */ - def checkArbitrary(size: Int, - restrictions: ConcurrentRestrictions[Task[_]], - shouldSucceed: Boolean) = { + def checkArbitrary( + size: Int, + restrictions: ConcurrentRestrictions[Task[_]], + shouldSucceed: Boolean + ) = { val latch = task { new CountDownLatch(size) } def mktask = latch map { l => l.countDown() @@ -74,20 +76,26 @@ object TaskSerial extends Properties("task serial") { } object TaskTest { - def run[T](root: Task[T], - checkCycles: Boolean, - restrictions: ConcurrentRestrictions[Task[_]]): Result[T] = { + def run[T]( + root: Task[T], + checkCycles: Boolean, + restrictions: ConcurrentRestrictions[Task[_]] + ): Result[T] = { val (service, shutdown) = completionService[Task[_], Completed](restrictions, (x: String) => System.err.println(x)) - val x = new Execute[Task](Execute.config(checkCycles), - Execute.noTriggers, - ExecuteProgress.empty[Task])(taskToNode(idK[Task])) + val x = new Execute[Task]( + Execute.config(checkCycles), + Execute.noTriggers, + ExecuteProgress.empty[Task] + )(taskToNode(idK[Task])) try { x.run(root)(service) } finally { shutdown() } } - def tryRun[T](root: Task[T], - checkCycles: Boolean, - restrictions: ConcurrentRestrictions[Task[_]]): T = + def tryRun[T]( + root: Task[T], + checkCycles: Boolean, + restrictions: ConcurrentRestrictions[Task[_]] + ): T = run(root, checkCycles, restrictions) match { case Value(v) => v case Inc(i) => throw i diff --git a/tasks-standard/src/test/scala/Test.scala b/tasks-standard/src/test/scala/Test.scala index 0574118ca..ab4fdb1b3 100644 --- a/tasks-standard/src/test/scala/Test.scala +++ b/tasks-standard/src/test/scala/Test.scala @@ -34,12 +34,12 @@ object Test extends std.TaskExtra { val d2 = t3(a, b2, c) mapR f val f2: Values => Task[Any] = { case (Value(aa), Value(bb), Value(cc)) => task(aa + " " + bb + " " + cc) - case x => d3 + case _ => d3 } lazy val d = t3(a, b, c) flatMapR f2 val f3: Values => Task[Any] = { case (Value(aa), Value(bb), Value(cc)) => task(aa + " " + bb + " " + cc) - case x => d2 + case _ => d2 } lazy val d3 = t3(a, b, c) flatMapR f3 diff --git a/tasks/src/main/scala/sbt/CompletionService.scala b/tasks/src/main/scala/sbt/CompletionService.scala index 88f00a5e8..f64eae139 100644 --- a/tasks/src/main/scala/sbt/CompletionService.scala +++ b/tasks/src/main/scala/sbt/CompletionService.scala @@ -24,13 +24,13 @@ import java.util.concurrent.{ object CompletionService { def apply[A, T](poolSize: Int): (CompletionService[A, T], () => Unit) = { val pool = Executors.newFixedThreadPool(poolSize) - (apply[A, T](pool), () => pool.shutdownNow()) + (apply[A, T](pool), () => { pool.shutdownNow(); () }) } def apply[A, T](x: Executor): CompletionService[A, T] = apply(new ExecutorCompletionService[T](x)) def apply[A, T](completion: JCompletionService[T]): CompletionService[A, T] = new CompletionService[A, T] { - def submit(node: A, work: () => T) = CompletionService.submit(work, completion) + def submit(node: A, work: () => T) = { CompletionService.submit(work, completion); () } def take() = completion.take().get() } def submit[T](work: () => T, completion: JCompletionService[T]): () => T = { @@ -40,14 +40,16 @@ object CompletionService { () => future.get() } - def manage[A, T](service: CompletionService[A, T])(setup: A => Unit, - cleanup: A => Unit): CompletionService[A, T] = + def manage[A, T]( + service: CompletionService[A, T] + )(setup: A => Unit, cleanup: A => Unit): CompletionService[A, T] = wrap(service) { (node, work) => () => setup(node) try { work() } finally { cleanup(node) } } - def wrap[A, T](service: CompletionService[A, T])( - w: (A, () => T) => (() => T)): CompletionService[A, T] = + def wrap[A, T]( + service: CompletionService[A, T] + )(w: (A, () => T) => (() => T)): CompletionService[A, T] = new CompletionService[A, T] { def submit(node: A, work: () => T) = service.submit(node, w(node, work)) def take() = service.take() diff --git a/tasks/src/main/scala/sbt/ConcurrentRestrictions.scala b/tasks/src/main/scala/sbt/ConcurrentRestrictions.scala index 40a15d4fd..b6e011f4c 100644 --- a/tasks/src/main/scala/sbt/ConcurrentRestrictions.scala +++ b/tasks/src/main/scala/sbt/ConcurrentRestrictions.scala @@ -122,22 +122,25 @@ object ConcurrentRestrictions { * Constructs a CompletionService suitable for backing task execution based on the provided restrictions on concurrent task execution. * @return a pair, with _1 being the CompletionService and _2 a function to shutdown the service. * @tparam A the task type - * @tparam G describes a set of tasks * @tparam R the type of data that will be computed by the CompletionService. */ - def completionService[A, R](tags: ConcurrentRestrictions[A], - warn: String => Unit): (CompletionService[A, R], () => Unit) = { + def completionService[A, R]( + tags: ConcurrentRestrictions[A], + warn: String => Unit + ): (CompletionService[A, R], () => Unit) = { val pool = Executors.newCachedThreadPool() - (completionService[A, R](pool, tags, warn), () => pool.shutdownNow()) + (completionService[A, R](pool, tags, warn), () => { pool.shutdownNow(); () }) } /** * Constructs a CompletionService suitable for backing task execution based on the provided restrictions on concurrent task execution * and using the provided Executor to manage execution on threads. */ - def completionService[A, R](backing: Executor, - tags: ConcurrentRestrictions[A], - warn: String => Unit): CompletionService[A, R] = { + def completionService[A, R]( + backing: Executor, + tags: ConcurrentRestrictions[A], + warn: String => Unit + ): CompletionService[A, R] = { /** Represents submitted work for a task.*/ final class Enqueue(val node: A, val work: () => R) @@ -167,6 +170,7 @@ object ConcurrentRestrictions { if (running == 0) errorAddingToIdle() pending.add(new Enqueue(node, work)) } + () } private[this] def submitValid(node: A, work: () => R) = { running += 1 @@ -180,7 +184,8 @@ object ConcurrentRestrictions { tagState = tags.remove(tagState, node) if (!tags.valid(tagState)) warn( - "Invalid restriction: removing a completed node from a valid system must result in a valid system.") + "Invalid restriction: removing a completed node from a valid system must result in a valid system." + ) submitValid(new LinkedList) } private[this] def errorAddingToIdle() = @@ -192,6 +197,7 @@ object ConcurrentRestrictions { if (!tried.isEmpty) { if (running == 0) errorAddingToIdle() pending.addAll(tried) + () } } else { val next = pending.remove() diff --git a/tasks/src/main/scala/sbt/Execute.scala b/tasks/src/main/scala/sbt/Execute.scala index c5d16288e..74e0bcdc5 100644 --- a/tasks/src/main/scala/sbt/Execute.scala +++ b/tasks/src/main/scala/sbt/Execute.scala @@ -27,8 +27,10 @@ private[sbt] object Execute { def config(checkCycles: Boolean, overwriteNode: Incomplete => Boolean = const(false)): Config = new Config(checkCycles, overwriteNode) - final class Config private[sbt] (val checkCycles: Boolean, - val overwriteNode: Incomplete => Boolean) + final class Config private[sbt] ( + val checkCycles: Boolean, + val overwriteNode: Incomplete => Boolean + ) final val checkPreAndPostConditions = sys.props.get("sbt.execute.extrachecks").exists(java.lang.Boolean.parseBoolean) @@ -40,14 +42,17 @@ private[sbt] trait NodeView[A[_]] { def apply[T](a: A[T]): Node[A, T] def inline[T](a: A[T]): Option[() => T] } -final class Triggers[A[_]](val runBefore: collection.Map[A[_], Seq[A[_]]], - val injectFor: collection.Map[A[_], Seq[A[_]]], - val onComplete: RMap[A, Result] => RMap[A, Result]) +final class Triggers[A[_]]( + val runBefore: collection.Map[A[_], Seq[A[_]]], + val injectFor: collection.Map[A[_], Seq[A[_]]], + val onComplete: RMap[A, Result] => RMap[A, Result] +) private[sbt] final class Execute[A[_] <: AnyRef]( config: Config, triggers: Triggers[A], - progress: ExecuteProgress[A])(implicit view: NodeView[A]) { + progress: ExecuteProgress[A] +)(implicit view: NodeView[A]) { type Strategy = CompletionService[A[_], Completed] private[this] val forward = idMap[A[_], IDSet[A[_]]] @@ -205,11 +210,12 @@ private[sbt] final class Execute[A[_] <: AnyRef]( val v = register(node) val deps = dependencies(v) ++ runBefore(node) val active = IDSet[A[_]](deps filter notDone) - progressState = progress.registered(progressState, - node, - deps, - active.toList - /** active is mutable, so take a snapshot */ + progressState = progress.registered( + progressState, + node, + deps, + active.toList + /** active is mutable, so take a snapshot */ ) if (active.isEmpty) @@ -283,7 +289,8 @@ private[sbt] final class Execute[A[_] <: AnyRef]( } } private[this] def rewrap[T]( - rawResult: Either[Incomplete, Either[A[T], T]]): Either[A[T], Result[T]] = + rawResult: Either[Incomplete, Either[A[T], T]] + ): Either[A[T], Result[T]] = rawResult match { case Left(i) => Right(Inc(i)) case Right(Right(v)) => Right(Value(v)) @@ -361,8 +368,11 @@ private[sbt] final class Execute[A[_] <: AnyRef]( // cyclic reference checking def snapshotCycleCheck(): Unit = - for ((called: A[c], callers) <- callers.toSeq; caller <- callers) - cycleCheck(caller.asInstanceOf[A[c]], called) + callers.toSeq foreach { + case (called: A[c], callers) => + for (caller <- callers) cycleCheck(caller.asInstanceOf[A[c]], called) + case _ => () + } def cycleCheck[T](node: A[T], target: A[T]): Unit = { if (node eq target) cyclic(node, target, "Cannot call self") @@ -374,9 +384,11 @@ private[sbt] final class Execute[A[_] <: AnyRef]( if (all contains target) cyclic(node, target, "Cyclic reference") } def cyclic[T](caller: A[T], target: A[T], msg: String) = - throw new Incomplete(Some(caller), - message = Some(msg), - directCause = Some(new CyclicException(caller, target, msg))) + throw new Incomplete( + Some(caller), + message = Some(msg), + directCause = Some(new CyclicException(caller, target, msg)) + ) final class CyclicException[T](val caller: A[T], val target: A[T], msg: String) extends Exception(msg) diff --git a/tasks/src/main/scala/sbt/Incomplete.scala b/tasks/src/main/scala/sbt/Incomplete.scala index ad8296196..78c042ebd 100644 --- a/tasks/src/main/scala/sbt/Incomplete.scala +++ b/tasks/src/main/scala/sbt/Incomplete.scala @@ -21,12 +21,13 @@ import Incomplete.{ Error, Value => IValue } * @param causes a list of incompletions that prevented `node` from completing * @param directCause the exception that caused `node` to not complete */ -final case class Incomplete(node: Option[AnyRef], - tpe: IValue = Error, - message: Option[String] = None, - causes: Seq[Incomplete] = Nil, - directCause: Option[Throwable] = None) - extends Exception(message.orNull, directCause.orNull) +final case class Incomplete( + node: Option[AnyRef], + tpe: IValue = Error, + message: Option[String] = None, + causes: Seq[Incomplete] = Nil, + directCause: Option[Throwable] = None +) extends Exception(message.orNull, directCause.orNull) with sbt.internal.util.UnprintableException { override def toString = "Incomplete(node=" + node + ", tpe=" + tpe + ", msg=" + message + ", causes=" + causes + ", directCause=" + directCause + ")" diff --git a/testing/src/main/scala/sbt/JUnitXmlTestsListener.scala b/testing/src/main/scala/sbt/JUnitXmlTestsListener.scala index 29a4ebc8c..a59e44ee7 100644 --- a/testing/src/main/scala/sbt/JUnitXmlTestsListener.scala +++ b/testing/src/main/scala/sbt/JUnitXmlTestsListener.scala @@ -9,9 +9,14 @@ package sbt import java.io.{ File, IOException, PrintWriter, StringWriter } import java.net.InetAddress +import java.time.LocalDateTime +import java.time.format.DateTimeFormatter +import java.time.temporal.ChronoUnit import java.util.Hashtable +import java.util.concurrent.TimeUnit.NANOSECONDS import scala.collection.mutable.ListBuffer +import scala.util.Properties import scala.xml.{ Elem, Node => XNode, XML } import testing.{ Event => TEvent, @@ -20,6 +25,7 @@ import testing.{ OptionalThrowable, TestSelector } +import util.Logger import sbt.protocol.testing.TestResult /** @@ -27,14 +33,27 @@ import sbt.protocol.testing.TestResult * report format. * @param outputDir path to the dir in which a folder with results is generated */ -class JUnitXmlTestsListener(val outputDir: String) extends TestsListener { +class JUnitXmlTestsListener(val outputDir: String, logger: Logger) extends TestsListener { + // This constructor is for binary compatibility with older versions of sbt. + def this(outputDir: String) = this(outputDir, null) /**Current hostname so we know which machine executed the tests*/ - val hostname = - try InetAddress.getLocalHost.getHostName + val hostname = { + val start = System.nanoTime + val name = try InetAddress.getLocalHost.getHostName catch { case _: IOException => "localhost" } + val elapsed = System.nanoTime - start + if ((NANOSECONDS.toSeconds(elapsed) >= 4) && Properties.isMac && logger != null) { + logger.warn( + s"Getting the hostname $name was slow (${elapsed / 1.0e6} ms). " + + "This is likely because the computer's hostname is not set. You can set the " + + "hostname with the command: scutil --set HostName $(scutil --get LocalHostName)." + ) + } + name + } /**The dir in which we put all result files. Is equal to the given dir + "/test-reports"*/ val targetDir = new File(outputDir + "/test-reports/") @@ -59,7 +78,9 @@ class JUnitXmlTestsListener(val outputDir: String) extends TestsListener { * Gathers data for one Test Suite. We map test groups to TestSuites. * Each TestSuite gets its own output file. */ - class TestSuite(val name: String) { + class TestSuite(val name: String, timestamp: LocalDateTime) { + def this(name: String) = this(name, LocalDateTime.now()) + val events: ListBuffer[TEvent] = new ListBuffer() /**Adds one test result to this suite.*/ @@ -79,10 +100,11 @@ class JUnitXmlTestsListener(val outputDir: String) extends TestsListener { /** Junit XML reports don't differentiate between ignored, skipped or pending tests */ val ignoredSkippedPending = count(TStatus.Ignored) + count(TStatus.Skipped) + count( - TStatus.Pending) + TStatus.Pending + ) val result = - + { properties } { for (e <- events) yield - * + * * * * ... @@ -196,6 +219,12 @@ class JUnitXmlTestsListener(val outputDir: String) extends TestsListener { // contort the user into not using spaces. private[this] def normalizeName(s: String) = s.replaceAll("""\s+""", "-") + /** + * Format the date, without milliseconds or the timezone, per the JUnit spec. + */ + private[this] def formatISO8601DateTime(d: LocalDateTime): String = + d.truncatedTo(ChronoUnit.SECONDS).format(DateTimeFormatter.ISO_LOCAL_DATE_TIME) + private def writeSuite() = { val file = new File(targetDir, s"${normalizeName(withTestSuite(_.name))}.xml").getAbsolutePath // TODO would be nice to have a logger and log this with level debug diff --git a/testing/src/main/scala/sbt/TestFramework.scala b/testing/src/main/scala/sbt/TestFramework.scala index ffd7ef36d..11b4b51c8 100644 --- a/testing/src/main/scala/sbt/TestFramework.scala +++ b/testing/src/main/scala/sbt/TestFramework.scala @@ -41,9 +41,11 @@ final class TestFramework(val implClassNames: String*) extends Serializable { } @tailrec - private def createFramework(loader: ClassLoader, - log: ManagedLogger, - frameworkClassNames: List[String]): Option[Framework] = { + private def createFramework( + loader: ClassLoader, + log: ManagedLogger, + frameworkClassNames: List[String] + ): Option[Framework] = { frameworkClassNames match { case head :: tail => try { @@ -52,8 +54,8 @@ final class TestFramework(val implClassNames: String*) extends Serializable { case oldFramework: OldFramework => new FrameworkWrapper(oldFramework) }) } catch { - case e: ClassNotFoundException => - log.debug("Framework implementation '" + head + "' not present."); + case _: ClassNotFoundException => + log.debug("Framework implementation '" + head + "' not present.") createFramework(loader, log, tail) } case Nil => @@ -64,10 +66,12 @@ final class TestFramework(val implClassNames: String*) extends Serializable { def create(loader: ClassLoader, log: ManagedLogger): Option[Framework] = createFramework(loader, log, implClassNames.toList) } -final class TestDefinition(val name: String, - val fingerprint: Fingerprint, - val explicitlySpecified: Boolean, - val selectors: Array[Selector]) { +final class TestDefinition( + val name: String, + val fingerprint: Fingerprint, + val explicitlySpecified: Boolean, + val selectors: Array[Selector] +) { override def toString = "Test " + name + " : " + TestFramework.toString(fingerprint) override def equals(t: Any) = t match { @@ -87,13 +91,16 @@ final class TestRunner( delegate.tasks( testDefs .map(df => new TaskDef(df.name, df.fingerprint, df.explicitlySpecified, df.selectors)) - .toArray) + .toArray + ) final def run(taskDef: TaskDef, testTask: TestTask): (SuiteResult, Seq[TestTask]) = { - val testDefinition = new TestDefinition(taskDef.fullyQualifiedName, - taskDef.fingerprint, - taskDef.explicitlySpecified, - taskDef.selectors) + val testDefinition = new TestDefinition( + taskDef.fullyQualifiedName, + taskDef.fingerprint, + taskDef.explicitlySpecified, + taskDef.selectors + ) log.debug("Running " + taskDef) val name = testDefinition.name @@ -141,7 +148,8 @@ object TestFramework { it.foreach( i => try f(i) - catch { case NonFatal(e) => log.trace(e); log.error(e.toString) }) + catch { case NonFatal(e) => log.trace(e); log.error(e.toString) } + ) private[sbt] def hashCode(f: Fingerprint): Int = f match { case s: SubclassFingerprint => (s.isModule, s.superclassName).hashCode @@ -180,12 +188,16 @@ object TestFramework { }, mappedTests, tests, log, listeners) } - private[this] def order(mapped: Map[String, TestFunction], - inputs: Vector[TestDefinition]): Vector[(String, TestFunction)] = + private[this] def order( + mapped: Map[String, TestFunction], + inputs: Vector[TestDefinition] + ): Vector[(String, TestFunction)] = for (d <- inputs; act <- mapped.get(d.name)) yield (d.name, act) - private[this] def testMap(frameworks: Seq[Framework], - tests: Seq[TestDefinition]): Map[Framework, Set[TestDefinition]] = { + private[this] def testMap( + frameworks: Seq[Framework], + tests: Seq[TestDefinition] + ): Map[Framework, Set[TestDefinition]] = { import scala.collection.mutable.{ HashMap, HashSet, Set } val map = new HashMap[Framework, Set[TestDefinition]] def assignTest(test: TestDefinition): Unit = { @@ -200,13 +212,14 @@ object TestFramework { map.toMap.mapValues(_.toSet) } - private def createTestTasks(loader: ClassLoader, - runners: Map[Framework, TestRunner], - tests: Map[Framework, Set[TestDefinition]], - ordered: Vector[TestDefinition], - log: ManagedLogger, - listeners: Vector[TestReportListener]) - : (() => Unit, Vector[(String, TestFunction)], TestResult => (() => Unit)) = { + private def createTestTasks( + loader: ClassLoader, + runners: Map[Framework, TestRunner], + tests: Map[Framework, Set[TestDefinition]], + ordered: Vector[TestDefinition], + log: ManagedLogger, + listeners: Vector[TestReportListener] + ): (() => Unit, Vector[(String, TestFunction)], TestResult => (() => Unit)) = { val testsListeners = listeners collect { case tl: TestsListener => tl } def foreachListenerSafe(f: TestsListener => Unit): () => Unit = @@ -232,19 +245,23 @@ object TestFramework { Thread.currentThread.setContextClassLoader(loader) try { eval } finally { Thread.currentThread.setContextClassLoader(oldLoader) } } - def createTestLoader(classpath: Seq[File], - scalaInstance: ScalaInstance, - tempDir: File): ClassLoader = { + def createTestLoader( + classpath: Seq[File], + scalaInstance: ScalaInstance, + tempDir: File + ): ClassLoader = { val interfaceJar = IO.classLocationFile(classOf[testing.Framework]) val interfaceFilter = (name: String) => name.startsWith("org.scalatools.testing.") || name.startsWith("sbt.testing.") val notInterfaceFilter = (name: String) => !interfaceFilter(name) - val dual = new DualLoader(scalaInstance.loader, - notInterfaceFilter, - x => true, - getClass.getClassLoader, - interfaceFilter, - x => false) + val dual = new DualLoader( + scalaInstance.loader, + notInterfaceFilter, + x => true, + getClass.getClassLoader, + interfaceFilter, + x => false + ) val main = ClasspathUtilities.makeLoader(classpath, dual, scalaInstance, tempDir) // TODO - There's actually an issue with the classpath facility such that unmanagedScalaInstances are not added // to the classpath correctly. We have a temporary workaround here. @@ -253,20 +270,26 @@ object TestFramework { else scalaInstance.allJars ++ (interfaceJar +: classpath) ClasspathUtilities.filterByClasspath(cp, main) } - def createTestFunction(loader: ClassLoader, - taskDef: TaskDef, - runner: TestRunner, - testTask: TestTask): TestFunction = - new TestFunction(taskDef, - runner, - (r: TestRunner) => withContextLoader(loader) { r.run(taskDef, testTask) }) { + def createTestFunction( + loader: ClassLoader, + taskDef: TaskDef, + runner: TestRunner, + testTask: TestTask + ): TestFunction = + new TestFunction( + taskDef, + runner, + (r: TestRunner) => withContextLoader(loader) { r.run(taskDef, testTask) } + ) { def tags = testTask.tags } } -abstract class TestFunction(val taskDef: TaskDef, - val runner: TestRunner, - fun: (TestRunner) => (SuiteResult, Seq[TestTask])) { +abstract class TestFunction( + val taskDef: TaskDef, + val runner: TestRunner, + fun: (TestRunner) => (SuiteResult, Seq[TestTask]) +) { def apply(): (SuiteResult, Seq[TestTask]) = fun(runner) diff --git a/testing/src/main/scala/sbt/TestReportListener.scala b/testing/src/main/scala/sbt/TestReportListener.scala index 090ddc7ef..12566c707 100644 --- a/testing/src/main/scala/sbt/TestReportListener.scala +++ b/testing/src/main/scala/sbt/TestReportListener.scala @@ -25,7 +25,7 @@ trait TestReportListener { def endGroup(name: String, result: TestResult): Unit /** Used by the test framework for logging test results */ - def contentLogger(test: TestDefinition): Option[ContentLogger] = None + def contentLogger(@deprecated("unused", "") test: TestDefinition): Option[ContentLogger] = None } diff --git a/testing/src/main/scala/sbt/TestStatusReporter.scala b/testing/src/main/scala/sbt/TestStatusReporter.scala index 732e3257c..af5d84465 100644 --- a/testing/src/main/scala/sbt/TestStatusReporter.scala +++ b/testing/src/main/scala/sbt/TestStatusReporter.scala @@ -8,17 +8,19 @@ package sbt import java.io.File -import sbt.io.IO -import scala.collection.mutable.Map +import sbt.io.IO import sbt.protocol.testing.TestResult +import java.util.concurrent.ConcurrentHashMap + +import scala.collection.concurrent // Assumes exclusive ownership of the file. private[sbt] class TestStatusReporter(f: File) extends TestsListener { - private lazy val succeeded = TestStatus.read(f) + private lazy val succeeded: concurrent.Map[String, Long] = TestStatus.read(f) def doInit = () - def startGroup(name: String): Unit = { succeeded remove name } + def startGroup(name: String): Unit = { succeeded remove name; () } def testEvent(event: TestEvent): Unit = () def endGroup(name: String, t: Throwable): Unit = () def endGroup(name: String, result: TestResult): Unit = { @@ -32,13 +34,16 @@ private[sbt] class TestStatusReporter(f: File) extends TestsListener { private[sbt] object TestStatus { import java.util.Properties - def read(f: File): Map[String, Long] = { + def read(f: File): concurrent.Map[String, Long] = { import scala.collection.JavaConverters._ val properties = new Properties IO.load(properties, f) - properties.asScala map { case (k, v) => (k, v.toLong) } + val result = new ConcurrentHashMap[String, Long]() + properties.asScala.iterator.foreach { case (k, v) => result.put(k, v.toLong) } + result.asScala } - def write(map: Map[String, Long], label: String, f: File): Unit = { + + def write(map: collection.Map[String, Long], label: String, f: File): Unit = { val properties = new Properties for ((test, lastSuccessTime) <- map) properties.setProperty(test, lastSuccessTime.toString) diff --git a/testing/src/main/scala/sbt/internal/testing/TestLogger.scala b/testing/src/main/scala/sbt/internal/testing/TestLogger.scala index 1cd5ff4b4..6c49f793a 100644 --- a/testing/src/main/scala/sbt/internal/testing/TestLogger.scala +++ b/testing/src/main/scala/sbt/internal/testing/TestLogger.scala @@ -9,7 +9,7 @@ package sbt package internal.testing import testing.{ Logger => TLogger } -import sbt.internal.util.{ ManagedLogger, BufferedAppender } +import sbt.internal.util.{ BufferedAppender, ConsoleAppender, ManagedLogger } import sbt.util.{ Level, LogExchange, ShowLines } import sbt.protocol.testing._ import java.util.concurrent.atomic.AtomicInteger @@ -89,7 +89,7 @@ object TestLogger { def debug(s: String) = log(Level.Debug, TestStringEvent(s)) def trace(t: Throwable) = logger.trace(t) private def log(level: Level.Value, event: TestStringEvent) = logger.logEvent(level, event) - def ansiCodesSupported() = logger.ansiCodesSupported + def ansiCodesSupported() = ConsoleAppender.formatEnabledInEnv } private[sbt] def toTestItemEvent(event: TestEvent): TestItemEvent = diff --git a/vscode-sbt-scala/client/package.json b/vscode-sbt-scala/client/package.json index 1e729fc32..f4316539e 100644 --- a/vscode-sbt-scala/client/package.json +++ b/vscode-sbt-scala/client/package.json @@ -1,7 +1,7 @@ { "name": "vscode-sbt-scala", "displayName": "Scala (sbt)", - "version": "0.1.0", + "version": "0.2.0", "author": "Lightbend, Inc.", "license": "BSD-3-Clause", "publisher": "lightbend", diff --git a/vscode-sbt-scala/client/src/extension.ts b/vscode-sbt-scala/client/src/extension.ts index c5d3a1282..ff8206a37 100644 --- a/vscode-sbt-scala/client/src/extension.ts +++ b/vscode-sbt-scala/client/src/extension.ts @@ -1,28 +1,39 @@ 'use strict'; import * as path from 'path'; - -let fs = require('fs'); +import * as url from 'url'; +import * as net from 'net'; +let fs = require('fs'), + os = require('os'); import * as vscode from 'vscode'; import { ExtensionContext, workspace } from 'vscode'; // workspace, import { LanguageClient, LanguageClientOptions, ServerOptions, TransportKind } from 'vscode-languageclient'; +let terminal: vscode.Terminal = null; + +function delay(ms: number) { + return new Promise(resolve => setTimeout(resolve, ms)); +} + +export async function deactivate() { + if (terminal != null) { + terminal.sendText("exit"); + await delay(1000); + terminal.dispose(); + } +} + export async function activate(context: ExtensionContext) { // Start sbt - const terminal = vscode.window.createTerminal(`sbt`); + terminal = vscode.window.createTerminal(`sbt`); terminal.show(); terminal.sendText("sbt"); - - function delay(ms: number) { - return new Promise(resolve => setTimeout(resolve, ms)); - } - // Wait for SBT server to start - let retries = 30; + let retries = 60; while (retries > 0) { retries--; await delay(1000); - if (fs.existsSync(path.join(workspace.rootPath, 'project', 'target', 'active.json'))) { + if (isServerUp()) { break; } } @@ -46,7 +57,47 @@ export async function activate(context: ExtensionContext) { return discoverToken(); } } - + + // Don't start VS Code connection until sbt server is confirmed to be up and running. + function isServerUp(): boolean { + let isFileThere = fs.existsSync(path.join(workspace.rootPath, 'project', 'target', 'active.json')); + if (!isFileThere) { + return false; + } else { + let skt = new net.Socket(); + try { + connectSocket(skt); + } catch(e) { + return false; + } + skt.end(); + return true; + } + } + + function connectSocket(socket: net.Socket):  net.Socket { + let u = discoverUrl(); + // let socket = net.Socket(); + if (u.protocol == 'tcp:') { + socket.connect(+u.port, '127.0.0.1'); + } else if (u.protocol == 'local:' && os.platform() == 'win32') { + let pipePath = '\\\\.\\pipe\\' + u.hostname; + socket.connect(pipePath); + } else if (u.protocol == 'local:') { + socket.connect(u.path); + } else { + throw 'Unknown protocol ' + u.protocol; + } + return socket; + } + + // the port file is hardcoded to a particular location relative to the build. + function discoverUrl(): url.Url { + let pf = path.join(process.cwd(), 'project', 'target', 'active.json'); + let portfile = JSON.parse(fs.readFileSync(pf)); + return url.parse(portfile.uri); + } + // the port file is hardcoded to a particular location relative to the build. function discoverToken(): any { let pf = path.join(workspace.rootPath, 'project', 'target', 'active.json'); diff --git a/vscode-sbt-scala/server/src/server.ts b/vscode-sbt-scala/server/src/server.ts index 05932d451..8404c7b74 100644 --- a/vscode-sbt-scala/server/src/server.ts +++ b/vscode-sbt-scala/server/src/server.ts @@ -2,32 +2,20 @@ import * as path from 'path'; import * as url from 'url'; -let net = require('net'), - fs = require('fs'), +import * as net from 'net'; +let fs = require('fs'), os = require('os'), stdin = process.stdin, stdout = process.stdout; -let u = discoverUrl(); - -let socket = net.Socket(); +let socket = new net.Socket(); socket.on('data', (chunk: any) => { // send it back to stdout stdout.write(chunk); }).on('end', () => { stdin.pause(); }); - -if (u.protocol == 'tcp:') { - socket.connect(u.port, '127.0.0.1'); -} else if (u.protocol == 'local:' && os.platform() == 'win32') { - let pipePath = '\\\\.\\pipe\\' + u.hostname; - socket.connect(pipePath); -} else if (u.protocol == 'local:') { - socket.connect(u.path); -} else { - throw 'Unknown protocol ' + u.protocol; -} +connectSocket(socket); stdin.resume(); stdin.on('data', (chunk: any) => { @@ -36,6 +24,22 @@ stdin.on('data', (chunk: any) => { socket.end(); }); +function connectSocket(socket: net.Socket): net.Socket { + let u = discoverUrl(); + // let socket = net.Socket(); + if (u.protocol == 'tcp:') { + socket.connect(+u.port, '127.0.0.1'); + } else if (u.protocol == 'local:' && os.platform() == 'win32') { + let pipePath = '\\\\.\\pipe\\' + u.hostname; + socket.connect(pipePath); + } else if (u.protocol == 'local:') { + socket.connect(u.path); + } else { + throw 'Unknown protocol ' + u.protocol; + } + return socket; +} + // the port file is hardcoded to a particular location relative to the build. function discoverUrl(): url.Url { let pf = path.join(process.cwd(), 'project', 'target', 'active.json');