Merge branch '1.x' into sort-and-indent-plugins-output

This commit is contained in:
eugene yokota 2018-06-13 03:21:02 -04:00 committed by GitHub
commit 54ad0d865e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
269 changed files with 8572 additions and 5142 deletions

4
.gitattributes vendored
View File

@ -5,3 +5,7 @@
# to native line endings on checkout.
*.scala text
*.java text
# Exclude contraband generated files from diff (by default - you can see it if you want)
**/contraband-scala/**/* -diff merge=ours
**/contraband-scala/**/* linguist-generated=true

View File

@ -8,3 +8,11 @@ docstrings = JavaDoc
# This also seems more idiomatic to include whitespace in import x.{ yyy }
spaces.inImportCurlyBraces = true
# This is more idiomatic Scala.
# http://docs.scala-lang.org/style/indentation.html#methods-with-numerous-arguments
align.openParenCallSite = false
align.openParenDefnSite = false
# For better code clarity
danglingParentheses = true

View File

@ -6,6 +6,7 @@ cache:
directories:
- $HOME/.ivy2/cache
- $HOME/.sbt/boot
- $HOME/.jabba
language: scala
@ -15,16 +16,26 @@ jdk:
matrix:
fast_finish: true
matrix:
include:
- env: SBT_CMD="scripted java/*"
before_install:
- curl -sL https://raw.githubusercontent.com/shyiko/jabba/0.10.1/install.sh | bash && . ~/.jabba/jabba.sh
install:
- /home/travis/.jabba/bin/jabba install openjdk@1.10
env:
global:
- secure: d3bu2KNwsVHwfhbGgO+gmRfDKBJhfICdCJFGWKf2w3Gv86AJZX9nuTYRxz0KtdvEHO5Xw8WTBZLPb2thSJqhw9OCm4J8TBAVqCP0ruUj4+aqBUFy4bVexQ6WKE6nWHs4JPzPk8c6uC1LG3hMuzlC8RGETXtL/n81Ef1u7NjyXjs=
matrix:
- SBT_CMD=";mimaReportBinaryIssues ;scalafmt::test ;test:scalafmt::test ;sbt:scalafmt::test ;headerCheck ;test:headerCheck ;test:compile ;mainSettingsProj/test ;safeUnitTests ;otherUnitTests"
- SBT_CMD=";mimaReportBinaryIssues ;scalafmt::test ;test:scalafmt::test ;sbt:scalafmt::test ;headerCheck ;test:headerCheck ;whitesourceCheckPolicies ;test:compile ;mainSettingsProj/test ;safeUnitTests ;otherUnitTests"
- SBT_CMD="scripted actions/*"
- SBT_CMD="scripted apiinfo/* compiler-project/* ivy-deps-management/*"
- SBT_CMD="scripted dependency-management/*1of4"
- SBT_CMD="scripted dependency-management/*2of4"
- SBT_CMD="scripted dependency-management/*3of4"
- SBT_CMD="scripted dependency-management/*4of4"
- SBT_CMD="scripted java/* package/* reporter/* run/* project-load/*"
- SBT_CMD="scripted package/* reporter/* run/* project-load/*"
- SBT_CMD="scripted project/*1of2"
- SBT_CMD="scripted project/*2of2"
- SBT_CMD="scripted source-dependencies/*1of3"
@ -46,5 +57,5 @@ script:
- sbt -J-XX:ReservedCodeCacheSize=128m -J-Xmx800M -J-Xms800M -J-server "$SBT_CMD"
before_cache:
- find $HOME/.ivy2 -name "ivydata-*.properties" -print -delete
- find $HOME/.sbt -name "*.lock" -print -delete
- find $HOME/.ivy2 -name "ivydata-*.properties" -delete
- find $HOME/.sbt -name "*.lock" -delete

View File

@ -1,33 +1,14 @@
[StackOverflow]: http://stackoverflow.com/tags/sbt
[ask]: https://stackoverflow.com/questions/ask?tags=sbt
[Setup]: http://www.scala-sbt.org/release/docs/Getting-Started/Setup
[Issues]: https://github.com/sbt/sbt/issues
[sbt-dev]: https://groups.google.com/d/forum/sbt-dev
[sbt-contrib]: https://gitter.im/sbt/sbt-contrib
[Lightbend]: https://www.lightbend.com/
[subscriptions]: https://www.lightbend.com/platform/subscription
[327]: https://github.com/sbt/sbt/issues/327
[gitter]: https://gitter.im/sbt/sbt
[documentation]: https://github.com/sbt/website
Support
=======
Contributing
============
[Lightbend] sponsors sbt and encourages contributions from the active community. Enterprises can adopt it for mission critical systems with confidence because Lightbend stands behind sbt with commercial support and services.
For community support please [ask] on StackOverflow with the tag "sbt".
- State the problem or question clearly and provide enough context. Code examples and `build.sbt` are often useful when appropriately edited.
- There's also [Gitter sbt/sbt room][gitter], but Stackoverflow is recommended so others can benefit from the answers.
For professional support, [Lightbend], the maintainer of Scala compiler and sbt, provides:
- [Lightbend Subscriptions][subscriptions], which includes Expert Support
- Training
- Consulting
How to contribute to sbt
========================
(For support, see [SUPPORT](./SUPPORT.md))
There are lots of ways to contribute to sbt ecosystem depending on your interests and skill level.
@ -48,9 +29,13 @@ When you find a bug in sbt we want to hear about it. Your bug reports play an im
Effective bug reports are more likely to be fixed. These guidelines explain how to write such reports and pull requests.
Please open a GitHub issue when you are 90% sure it's an actual bug.
If you have an enhancement idea, or a general discussion, bring it up to [sbt-contrib].
### Notes about Documentation
Documentation fixes and contributions are as much welcome as to patching the core. Visit [the website project][documentation] to learn about how to contribute.
Documentation fixes and contributions are as much welcome as to patching the core. Visit [sbt/website][documentation] to learn about how to contribute.
### Preliminaries
@ -59,35 +44,29 @@ Documentation fixes and contributions are as much welcome as to patching the cor
- Open one case for each problem.
- Proceed to the next steps for details.
### Where to get help and/or file a bug report
sbt project uses GitHub Issues as a publicly visible todo list. Please open a GitHub issue when you are 90% sure it's an actual bug.
- If you need help with sbt, please [ask] on StackOverflow with the tag "sbt" and the name of the sbt plugin if any.
- If you have an enhancement idea, or a general discussion, bring it up to [sbt-contrib].
- If you need a faster response time, consider one of the [Lightbend subscriptions][subscriptions].
### What to report
The developers need three things from you: **steps**, **problems**, and **expectations**.
### Steps
The most important thing to remember about bug reporting is to clearly distinguish facts and opinions.
The most important thing to remember about bug reporting is to clearly distinguish facts and opinions. What we need first is **the exact steps to reproduce your problems on our computers**. This is called *reproduction steps*, which is often shortened to "repro steps" or "steps." Describe your method of running sbt. Provide `build.sbt` that caused the problem and the version of sbt or Scala that was used. Provide sample Scala code if it's to do with incremental compilation. If possible, minimize the problem to reduce non-essential factors.
#### Steps
What we need first is **the exact steps to reproduce your problems on our computers**. This is called *reproduction steps*, which is often shortened to "repro steps" or "steps." Describe your method of running sbt. Provide `build.sbt` that caused the problem and the version of sbt or Scala that was used. Provide sample Scala code if it's to do with incremental compilation. If possible, minimize the problem to reduce non-essential factors.
Repro steps are the most important part of a bug report. If we cannot reproduce the problem in one way or the other, the problem can't be fixed. Telling us the error messages is not enough.
### Problems
#### Problems
Next, describe the problems, or what *you think* is the problem. It might be "obvious" to you that it's a problem, but it could actually be an intentional behavior for some backward compatibility etc. For compilation errors, include the stack trace. The more raw info the better.
### Expectations
#### Expectations
Same as the problems. Describe what *you think* should've happened.
### Notes
#### Notes
Add an optional notes section to describe your analysis.
Add any optional notes section to describe your analysis.
### Subject
@ -121,7 +100,7 @@ See below for the branch to work against.
### Adding notes
All pull requests are required to include a "Notes" file which documents the change. This file should reside in the
Most pull requests should include a "Notes" file which documents the change. This file should reside in the
directory:
<sbt root>
@ -199,12 +178,96 @@ $ sbt
> compile
```
### Using Jenkins sbt-snapshots nighties
There is a Jenkins instance for sbt that every night builds and publishes (if successful) a timestamped version
of sbt to http://jenkins.scala-sbt.org/sbt-snapshots and is available for 4-5 weeks. To use it do the following:
1. Set the `sbt.version` in `project/build.properties`
```bash
echo "sbt.version=1.2.0-bin-20180423T192044" > project/build.properties
```
2. Create an sbt repositories file (`./repositories`) that includes that Maven repository:
```properties
[repositories]
local
local-preloaded-ivy: file:///${sbt.preloaded-${sbt.global.base-${user.home}/.sbt}/preloaded/}, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext]
local-preloaded: file:///${sbt.preloaded-${sbt.global.base-${user.home}/.sbt}/preloaded/}
maven-central
sbt-maven-releases: https://repo.scala-sbt.org/scalasbt/maven-releases/, bootOnly
sbt-maven-snapshots: https://repo.scala-sbt.org/scalasbt/maven-snapshots/, bootOnly
typesafe-ivy-releases: https://repo.typesafe.com/typesafe/ivy-releases/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly
sbt-ivy-snapshots: https://repo.scala-sbt.org/scalasbt/ivy-snapshots/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly
sbt-snapshots: https://jenkins.scala-sbt.org/sbt-snapshots
```
3. Start sbt with a stable launcher and the custom repositories file:
```bash
$ sbt -sbt-jar ~/.sbt/launchers/1.1.4/sbt-launch.jar -Dsbt.repository.config=repositories
Getting org.scala-sbt sbt 1.2.0-bin-20180423T192044 (this may take some time)...
downloading https://jenkins.scala-sbt.org/sbt-snapshots/org/scala-sbt/sbt/1.2.0-bin-20180423T192044/sbt-1.2.0-bin-20180423T192044.jar ...
[SUCCESSFUL ] org.scala-sbt#sbt;1.2.0-bin-20180423T192044!sbt.jar (139ms)
...
[info] sbt server started at local:///Users/dnw/.sbt/1.0/server/936e0f52ed9baf6b6d83/sock
> show sbtVersion
[info] 1.2.0-bin-20180423T192044
```
### Using Jenkins maven-snapshots nightlies
As an alternative you can request a build that publishes to https://repo.scala-sbt.org/scalasbt/maven-snapshots
and stays there forever by:
1. Logging into https://jenkins.scala-sbt.org/job/sbt-validator/
2. Clicking "Build with Parameters"
3. Making sure `deploy_to_bintray` is enabled
4. Hitting "Build"
Afterwhich start sbt with a stable launcher: `sbt -sbt-jar ~/.sbt/launchers/1.1.4/sbt-launch.jar`
### Clearing out boot and local cache
When you run a locally built sbt, the JAR artifacts will be now cached under `$HOME/.sbt/boot/scala-2.12.6/org.scala-sbt/sbt/1.$MINOR.$PATCH-SNAPSHOT` directory. To clear this out run: `reboot dev` command from sbt's session of your test application.
One drawback of `-SNAPSHOT` version is that it's slow to resolve as it tries to hit all the resolvers. You can workaround that by using a version name like `1.$MINOR.$PATCH-LOCAL1`. A non-SNAPSHOT artifacts will now be cached under `$HOME/.ivy/cache/` directory, so you need to clear that out using [sbt-dirty-money](https://github.com/sbt/sbt-dirty-money)'s `cleanCache` task.
### Running sbt "from source" - `sbtOn`
In addition to locally publishing a build of sbt, there is an alternative, experimental launcher within sbt/sbt
to be able to run sbt "from source", that is to compile sbt and run it from its resulting classfiles rather than
from published jar files.
Such a launcher is available within sbt/sbt's build through a custom `sbtOn` command that takes as its first
argument the directory on which you want to run sbt, and the remaining arguments are passed _to_ that sbt
instance. For example:
I have setup a minimal sbt build in the directory `/s/t`, to run sbt on that directory I call:
```bash
> sbtOn /s/t
[info] Packaging /d/sbt/scripted/sbt/target/scala-2.12/scripted-sbt_2.12-1.2.0-SNAPSHOT.jar ...
[info] Done packaging.
[info] Running (fork) sbt.RunFromSourceMain /s/t
Listening for transport dt_socket at address: 5005
[info] Loading settings from idea.sbt,global-plugins.sbt ...
[info] Loading global plugins from /Users/dnw/.dotfiles/.sbt/1.0/plugins
[info] Loading project definition from /s/t/project
[info] Set current project to t (in build file:/s/t/)
[info] sbt server started at local:///Users/dnw/.sbt/1.0/server/ce9baa494c7598e4d59b/sock
> show baseDirectory
[info] /s/t
> exit
[info] shutting down server
[success] Total time: 19 s, completed 25-Apr-2018 15:04:58
```
Please note that this alternative launcher does _not_ have feature parity with sbt/launcher. (Meta)
contributions welcome! :-D
### Diagnosing build failures
Globally included plugins can interfere building `sbt`; if you are getting errors building sbt, try disabling all globally included plugins and try again.
@ -232,13 +295,17 @@ command. To run a single test, such as the test in
sbt "scripted project/global-plugin"
Profiling sbt
-------------
See [PROFILING](./PROFILING.md)
Other notes for maintainers
---------------------------
### Publishing VS Code Extensions
https://code.visualstudio.com/docs/extensions/publish-extension
Reference https://code.visualstudio.com/docs/extensions/publish-extension
```
$ sbt
@ -249,3 +316,12 @@ cd vscode-sbt-scala/client
$ vsce package
$ vsce publish
```
## Signing the CLA
Contributing to sbt requires you or your employer to sign the
[Lightbend Contributor License Agreement](https://www.lightbend.com/contribute/cla).
To make it easier to respect our license agreements, we have added an sbt task
that takes care of adding the LICENSE headers to new files. Run `headerCreate`
and sbt will put a copyright notice into it.

View File

@ -1,4 +1,4 @@
(See the guidelines for contributing, linked above)
- [ ] I've read the [CONTRIBUTING](https://github.com/sbt/sbt/blob/1.x/CONTRIBUTING.md) guidelines
## steps

153
PROFILING.md Normal file
View File

@ -0,0 +1,153 @@
Profiling sbt
-------------
There are several ways to profile sbt. The new hotness in profiling is FlameGraph.
You first collect stack trace samples, and then it is processed into svg graph.
See:
- [Using FlameGraphs To Illuminate The JVM by Nitsan Wakart](https://www.youtube.com/watch?v=ugRrFdda_JQ)
- [USENIX ATC '17: Visualizing Performance with Flame Graphs](https://www.youtube.com/watch?v=D53T1Ejig1Q)
### jvm-profiling-tools/async-profiler
The first one I recommend is async-profiler. This is available for macOS and Linux,
and works fairly well.
1. Download the installer from https://github.com/jvm-profiling-tools/async-profiler/releases/tag/v1.2
2. Make symbolic link to `build/` and `profiler.sh` to `$HOME/bin`, assuming you have PATH to `$HOME/bin`:
`ln -s ~/Applications/async-profiler/profiler.sh $HOME/bin/profiler.sh`
`ln -s ~/Applications/async-profiler/build $HOME/bin/build`
Next, close all Java appliations and anything that may affect the profiling, and run sbt in one terminal:
```
$ sbt exit
```
In another terminal, run:
```
$ jps
92746 sbt-launch.jar
92780 Jps
```
This tells you the process ID of sbt. In this case, it's 92746. While it's running, run
```
$ profiler.sh -d 60 <process id>
Started [cpu] profiling
--- Execution profile ---
Total samples: 31602
Non-Java: 3239 (10.25%)
GC active: 46 (0.15%)
Unknown (native): 14667 (46.41%)
Not walkable (native): 3 (0.01%)
Unknown (Java): 433 (1.37%)
Not walkable (Java): 8 (0.03%)
Thread exit: 1 (0.00%)
Deopt: 9 (0.03%)
Frame buffer usage: 55.658%
Total: 1932000000 (6.11%) samples: 1932
[ 0] java.lang.ClassLoader$NativeLibrary.load
[ 1] java.lang.ClassLoader.loadLibrary0
[ 2] java.lang.ClassLoader.loadLibrary
[ 3] java.lang.Runtime.loadLibrary0
[ 4] java.lang.System.loadLibrary
....
```
This should show a bunch of stacktraces that are useful.
To visualize this as a flamegraph, run:
```
$ profiler.sh -d 60 -f /tmp/flamegraph.svg <process id>
```
This should produce `/tmp/flamegraph.svg` at the end.
![flamegraph](project/flamegraph_svg.png)
See https://gist.github.com/eed3si9n/82d43acc95a002876d357bd8ad5f40d5
### running sbt with standby
One of the tricky things you come across while profiling is figuring out the process ID,
while wnating to profile the beginning of the application.
For this purpose, we've added `sbt.launcher.standby` JVM flag.
In the next version of sbt, you should be able to run:
```
$ sbt -J-Dsbt.launcher.standby=20s exit
```
This will count down for 20s before doing anything else.
### jvm-profiling-tools/perf-map-agent
If you want to try the mixed flamegraph, you can try perf-map-agent.
This uses `dtrace` on macOS and `perf` on Linux.
You first have to compile https://github.com/jvm-profiling-tools/perf-map-agent.
For macOS, here to how to export `JAVA_HOME` before running `cmake .`:
```
$ export JAVA_HOME=$(/usr/libexec/java_home)
$ cmake .
-- The C compiler identification is AppleClang 9.0.0.9000039
-- The CXX compiler identification is AppleClang 9.0.0.9000039
...
$ make
```
In addition, you have to git clone https://github.com/brendangregg/FlameGraph
In a fresh termimal, run sbt with `-XX:+PreserveFramePointer` flag:
```
$ sbt -J-Dsbt.launcher.standby=20s -J-XX:+PreserveFramePointer exit
```
In the terminal that you will run the perf-map:
```
$ cd quicktest/
$ export JAVA_HOME=$(/usr/libexec/java_home)
$ export FLAMEGRAPH_DIR=$HOME/work/FlameGraph
$ jps
94592 Jps
94549 sbt-launch.jar
$ $HOME/work/perf-map-agent/bin/dtrace-java-flames 94549
dtrace: system integrity protection is on, some features will not be available
dtrace: description 'profile-99 ' matched 2 probes
Flame graph SVG written to DTRACE_FLAME_OUTPUT='/Users/xxx/work/quicktest/flamegraph-94549.svg'.
```
This would produce better flamegraph in theory, but the output looks too messy for `sbt exit` case.
See https://gist.github.com/eed3si9n/b5856ff3d987655513380d1a551aa0df
This might be because it assumes that the operations are already JITed.
### ktoso/sbt-jmh
https://github.com/ktoso/sbt-jmh
Due to JIT warmup etc, benchmarking is difficult. JMH runs the same tests multiple times to
remove these effects and comes closer to measuring the performance of your code.
There's also an integration with jvm-profiling-tools/async-profiler, apparently.
### VisualVM
I'd also mention traditional JVM profiling tool. Since VisualVM is opensource,
I'll mention this one: https://visualvm.github.io/
1. First VisualVM.
2. Start sbt from a terminal.
3. You should see `xsbt.boot.Boot` under Local.
4. Open it, and select either sampler or profiler, and hit CPU button at the point when you want to start.
If you are familiar with YourKit, it also works similarly.

View File

@ -1 +1 @@
(See the guidelines for contributing, linked above)
- [ ] I've read the [CONTRIBUTING](https://github.com/sbt/sbt/blob/1.x/CONTRIBUTING.md) guidelines

View File

@ -21,10 +21,10 @@ sbt is a build tool for Scala, Java, and more.
For general documentation, see http://www.scala-sbt.org/.
sbt 1.0.x
sbt 1.x
---------
This is the 1.0.x series of sbt. The source code of sbt is split across
This is the 1.x series of sbt. The source code of sbt is split across
several Github repositories, including this one.
- [sbt/io][sbt/io] hosts `sbt.io` module.

20
SUPPORT.md Normal file
View File

@ -0,0 +1,20 @@
[ask]: https://stackoverflow.com/questions/ask?tags=sbt
[Lightbend]: https://www.lightbend.com/
[subscriptions]: https://www.lightbend.com/platform/subscription
[gitter]: https://gitter.im/sbt/sbt
Support
=======
[Lightbend] sponsors sbt and encourages contributions from the active community. Enterprises can adopt it for mission critical systems with confidence because Lightbend stands behind sbt with commercial support and services.
For community support please [ask] on StackOverflow with the tag "sbt" (and the name of the sbt plugin(s) if any).
- State the problem or question clearly and provide enough context. Code examples and `build.sbt` are often useful when appropriately edited.
- There's also [Gitter sbt/sbt room][gitter], but Stackoverflow is recommended so others can benefit from the answers.
For professional support, for instance if you need faster response times, [Lightbend], the maintainer of Scala compiler and sbt, provides:
- [Lightbend Subscriptions][subscriptions], which includes Expert Support
- Training
- Consulting

192
build.sbt
View File

@ -9,7 +9,7 @@ def buildLevelSettings: Seq[Setting[_]] =
inThisBuild(
Seq(
organization := "org.scala-sbt",
version := "1.1.5-SNAPSHOT",
version := "1.2.0-SNAPSHOT",
description := "sbt is an interactive build tool",
bintrayOrganization := Some("sbt"),
bintrayRepository := {
@ -24,10 +24,12 @@ def buildLevelSettings: Seq[Setting[_]] =
Developer("eed3si9n", "Eugene Yokota", "@eed3si9n", url("https://github.com/eed3si9n")),
Developer("jsuereth", "Josh Suereth", "@jsuereth", url("https://github.com/jsuereth")),
Developer("dwijnand", "Dale Wijnand", "@dwijnand", url("https://github.com/dwijnand")),
Developer("gkossakowski",
"Grzegorz Kossakowski",
"@gkossakowski",
url("https://github.com/gkossakowski")),
Developer(
"gkossakowski",
"Grzegorz Kossakowski",
"@gkossakowski",
url("https://github.com/gkossakowski")
),
Developer("Duhemm", "Martin Duhem", "@Duhemm", url("https://github.com/Duhemm"))
),
homepage := Some(url("https://github.com/sbt/sbt")),
@ -35,35 +37,34 @@ def buildLevelSettings: Seq[Setting[_]] =
resolvers += Resolver.mavenLocal,
scalafmtOnCompile := true,
scalafmtOnCompile in Sbt := false,
scalafmtVersion := "1.3.0",
scalafmtVersion := "1.4.0",
))
def commonSettings: Seq[Setting[_]] =
Seq[SettingsDefinition](
headerLicense := Some(HeaderLicense.Custom(
"""|sbt
|Copyright 2011 - 2017, Lightbend, Inc.
|Copyright 2008 - 2010, Mark Harrah
|Licensed under BSD-3-Clause license (see LICENSE)
|""".stripMargin
)),
scalaVersion := baseScalaVersion,
componentID := None,
resolvers += Resolver.typesafeIvyRepo("releases"),
resolvers += Resolver.sonatypeRepo("snapshots"),
resolvers += "bintray-sbt-maven-releases" at "https://dl.bintray.com/sbt/maven-releases/",
addCompilerPlugin("org.spire-math" % "kind-projector" % "0.9.4" cross CrossVersion.binary),
concurrentRestrictions in Global += Util.testExclusiveRestriction,
testOptions in Test += Tests.Argument(TestFrameworks.ScalaCheck, "-w", "1"),
testOptions in Test += Tests.Argument(TestFrameworks.ScalaCheck, "-verbosity", "2"),
javacOptions in compile ++= Seq("-Xlint", "-Xlint:-serial"),
crossScalaVersions := Seq(baseScalaVersion),
bintrayPackage := (bintrayPackage in ThisBuild).value,
bintrayRepository := (bintrayRepository in ThisBuild).value,
publishArtifact in Test := false,
fork in compile := true,
fork in run := true
) flatMap (_.settings)
def commonSettings: Seq[Setting[_]] = Def.settings(
headerLicense := Some(HeaderLicense.Custom(
"""|sbt
|Copyright 2011 - 2017, Lightbend, Inc.
|Copyright 2008 - 2010, Mark Harrah
|Licensed under BSD-3-Clause license (see LICENSE)
|""".stripMargin
)),
scalaVersion := baseScalaVersion,
componentID := None,
resolvers += Resolver.typesafeIvyRepo("releases"),
resolvers += Resolver.sonatypeRepo("snapshots"),
resolvers += "bintray-sbt-maven-releases" at "https://dl.bintray.com/sbt/maven-releases/",
addCompilerPlugin("org.spire-math" % "kind-projector" % "0.9.4" cross CrossVersion.binary),
concurrentRestrictions in Global += Util.testExclusiveRestriction,
testOptions in Test += Tests.Argument(TestFrameworks.ScalaCheck, "-w", "1"),
testOptions in Test += Tests.Argument(TestFrameworks.ScalaCheck, "-verbosity", "2"),
javacOptions in compile ++= Seq("-Xlint", "-Xlint:-serial"),
crossScalaVersions := Seq(baseScalaVersion),
bintrayPackage := (bintrayPackage in ThisBuild).value,
bintrayRepository := (bintrayRepository in ThisBuild).value,
publishArtifact in Test := false,
fork in compile := true,
fork in run := true
)
def minimalSettings: Seq[Setting[_]] =
commonSettings ++ customCommands ++
@ -83,7 +84,14 @@ val mimaSettings = Def settings (
).map { v =>
organization.value % moduleName.value % v cross (if (crossPaths.value) CrossVersion.binary else CrossVersion.disabled)
}.toSet
}
},
mimaBinaryIssueFilters ++= Seq(
// Changes in the internal pacakge
exclude[DirectMissingMethodProblem]("sbt.internal.*"),
exclude[FinalClassProblem]("sbt.internal.*"),
exclude[FinalMethodProblem]("sbt.internal.*"),
exclude[IncompatibleResultTypeProblem]("sbt.internal.*"),
),
)
lazy val sbtRoot: Project = (project in file("."))
@ -163,6 +171,11 @@ val collectionProj = (project in file("internal") / "util-collection")
exclude[MissingClassProblem]("sbt.internal.util.Fn1"),
exclude[DirectMissingMethodProblem]("sbt.internal.util.TypeFunctions.toFn1"),
exclude[DirectMissingMethodProblem]("sbt.internal.util.Types.toFn1"),
// Instead of defining foldr in KList & overriding in KCons,
// it's now abstract in KList and defined in both KCons & KNil.
exclude[FinalMethodProblem]("sbt.internal.util.KNil.foldr"),
exclude[DirectAbstractMethodProblem]("sbt.internal.util.KList.foldr"),
),
)
.configure(addSbtUtilPosition)
@ -175,6 +188,8 @@ val completeProj = (project in file("internal") / "util-complete")
name := "Completion",
libraryDependencies += jline,
mimaSettings,
mimaBinaryIssueFilters ++= Seq(
),
)
.configure(addSbtIO, addSbtUtilControl)
@ -204,6 +219,10 @@ lazy val testingProj = (project in file("testing"))
contrabandFormatsForType in generateContrabands in Compile := ContrabandConfig.getFormats,
mimaSettings,
mimaBinaryIssueFilters ++= Seq(
// private[sbt]
exclude[IncompatibleMethTypeProblem]("sbt.TestStatus.write"),
exclude[IncompatibleResultTypeProblem]("sbt.TestStatus.read"),
// copy method was never meant to be public
exclude[DirectMissingMethodProblem]("sbt.protocol.testing.EndTestGroupErrorEvent.copy"),
exclude[DirectMissingMethodProblem]("sbt.protocol.testing.EndTestGroupErrorEvent.copy$default$*"),
@ -285,22 +304,45 @@ lazy val runProj = (project in file("run"))
)
.configure(addSbtIO, addSbtUtilLogging, addSbtCompilerClasspath)
val sbtProjDepsCompileScopeFilter =
ScopeFilter(inDependencies(LocalProject("sbtProj"), includeRoot = false), inConfigurations(Compile))
lazy val scriptedSbtProj = (project in scriptedPath / "sbt")
.dependsOn(commandProj)
.settings(
baseSettings,
name := "Scripted sbt",
libraryDependencies ++= Seq(launcherInterface % "provided"),
resourceGenerators in Compile += Def task {
val mainClassDir = (classDirectory in Compile in LocalProject("sbtProj")).value
val testClassDir = (classDirectory in Test in LocalProject("sbtProj")).value
val classDirs = (classDirectory all sbtProjDepsCompileScopeFilter).value
val extDepsCp = (externalDependencyClasspath in Compile in LocalProject("sbtProj")).value
val cpStrings = (mainClassDir +: testClassDir +: classDirs) ++ extDepsCp.files map (_.toString)
val file = (resourceManaged in Compile).value / "RunFromSource.classpath"
IO.writeLines(file, cpStrings)
List(file)
},
mimaSettings,
mimaBinaryIssueFilters ++= Seq(
// sbt.test package is renamed to sbt.scriptedtest.
exclude[MissingClassProblem]("sbt.test.*"),
),
)
.configure(addSbtIO, addSbtUtilLogging, addSbtCompilerInterface, addSbtUtilScripted, addSbtLmCore)
lazy val scriptedPluginProj = (project in scriptedPath / "plugin")
.dependsOn(sbtProj)
.dependsOn(mainProj)
.settings(
baseSettings,
name := "Scripted Plugin",
mimaSettings,
mimaBinaryIssueFilters ++= Seq(
// scripted plugin has moved into sbt mothership.
exclude[MissingClassProblem]("sbt.ScriptedPlugin*")
),
)
.configure(addSbtCompilerClasspath)
@ -313,9 +355,14 @@ lazy val actionsProj = (project in file("main-actions"))
libraryDependencies += sjsonNewScalaJson.value,
mimaSettings,
mimaBinaryIssueFilters ++= Seq(
// Removed unused private[sbt] nested class
exclude[MissingClassProblem]("sbt.Doc$Scaladoc"),
// Removed no longer used private[sbt] method
exclude[DirectMissingMethodProblem]("sbt.Doc.generate"),
exclude[DirectMissingMethodProblem]("sbt.compiler.Eval.filesModifiedBytes"),
exclude[DirectMissingMethodProblem]("sbt.compiler.Eval.fileModifiedBytes"),
)
),
)
.configure(
addSbtIO,
@ -335,6 +382,8 @@ lazy val protocolProj = (project in file("protocol"))
.dependsOn(collectionProj)
.settings(
testedBaseSettings,
scalacOptions -= "-Ywarn-unused",
scalacOptions += "-Xlint:-unused",
name := "Protocol",
libraryDependencies ++= Seq(sjsonNewScalaJson.value, ipcSocket),
managedSourceDirectories in Compile +=
@ -384,6 +433,11 @@ lazy val commandProj = (project in file("main-command"))
contrabandFormatsForType in generateContrabands in Compile := ContrabandConfig.getFormats,
mimaSettings,
mimaBinaryIssueFilters ++= Vector(
// dropped private[sbt] method
exclude[DirectMissingMethodProblem]("sbt.BasicCommands.compatCommands"),
// dropped mainly internal command strings holder
exclude[MissingClassProblem]("sbt.BasicCommandStrings$Compat$"),
exclude[DirectMissingMethodProblem]("sbt.BasicCommands.rebootOptionParser"),
// Changed the signature of Server method. nacho cheese.
exclude[DirectMissingMethodProblem]("sbt.internal.server.Server.*"),
@ -397,6 +451,9 @@ lazy val commandProj = (project in file("main-command"))
exclude[MissingClassProblem]("sbt.internal.NG*"),
exclude[MissingClassProblem]("sbt.internal.ReferenceCountedFileDescriptor"),
// made private[sbt] method private[this]
exclude[DirectMissingMethodProblem]("sbt.State.handleException"),
// copy method was never meant to be public
exclude[DirectMissingMethodProblem]("sbt.CommandSource.copy"),
exclude[DirectMissingMethodProblem]("sbt.CommandSource.copy$default$*"),
@ -421,7 +478,7 @@ lazy val commandProj = (project in file("main-command"))
lazy val coreMacrosProj = (project in file("core-macros"))
.dependsOn(collectionProj)
.settings(
commonSettings,
baseSettings,
name := "Core Macros",
libraryDependencies += "org.scala-lang" % "scala-compiler" % scalaVersion.value,
mimaSettings,
@ -445,6 +502,10 @@ lazy val mainSettingsProj = (project in file("main-settings"))
mimaSettings,
mimaBinaryIssueFilters ++= Seq(
exclude[DirectMissingMethodProblem]("sbt.Scope.display012StyleMasked"),
// added a method to a sealed trait
exclude[InheritedNewAbstractMethodProblem]("sbt.Scoped.canEqual"),
exclude[InheritedNewAbstractMethodProblem]("sbt.ScopedTaskable.canEqual"),
),
)
.configure(
@ -460,7 +521,7 @@ lazy val mainSettingsProj = (project in file("main-settings"))
// The main integration project for sbt. It brings all of the projects together, configures them, and provides for overriding conventions.
lazy val mainProj = (project in file("main"))
.enablePlugins(ContrabandPlugin)
.dependsOn(logicProj, actionsProj, mainSettingsProj, runProj, commandProj, collectionProj)
.dependsOn(logicProj, actionsProj, mainSettingsProj, runProj, commandProj, collectionProj, scriptedSbtProj)
.settings(
testedBaseSettings,
name := "Main",
@ -470,17 +531,14 @@ lazy val mainProj = (project in file("main"))
sourceManaged in (Compile, generateContrabands) := baseDirectory.value / "src" / "main" / "contraband-scala",
mimaSettings,
mimaBinaryIssueFilters ++= Vector(
// Changed the signature of NetworkChannel ctor. internal.
exclude[DirectMissingMethodProblem]("sbt.internal.server.NetworkChannel.*"),
// ctor for ConfigIndex. internal.
exclude[DirectMissingMethodProblem]("sbt.internal.ConfigIndex.*"),
// New and changed methods on KeyIndex. internal.
exclude[ReversedMissingMethodProblem]("sbt.internal.KeyIndex.*"),
exclude[DirectMissingMethodProblem]("sbt.internal.KeyIndex.*"),
// Removed unused val. internal.
exclude[DirectMissingMethodProblem]("sbt.internal.RelayAppender.jsonFormat"),
// Removed unused def. internal.
exclude[DirectMissingMethodProblem]("sbt.internal.Load.isProjectThis"),
// Changed signature or removed private[sbt] methods
exclude[DirectMissingMethodProblem]("sbt.Classpaths.unmanagedLibs0"),
exclude[DirectMissingMethodProblem]("sbt.Defaults.allTestGroupsTask"),
exclude[DirectMissingMethodProblem]("sbt.Plugins.topologicalSort"),
exclude[IncompatibleMethTypeProblem]("sbt.Defaults.allTestGroupsTask"),
)
)
.configure(
@ -508,8 +566,10 @@ lazy val sbtProj = (project in file("sbt"))
mimaBinaryIssueFilters ++= sbtIgnoredProblems,
BuildInfoPlugin.buildInfoDefaultSettings,
addBuildInfoToConfig(Test),
BuildInfoPlugin.buildInfoDefaultSettings,
buildInfoObject in Test := "TestBuildInfo",
buildInfoKeys in Test := Seq[BuildInfoKey](
version,
// WORKAROUND https://github.com/sbt/sbt-buildinfo/issues/117
BuildInfoKey.map((fullClasspath in Compile).taskValue) { case (ident, cp) => ident -> cp.files },
classDirectory in Compile,
@ -583,35 +643,29 @@ lazy val vscodePlugin = (project in file("vscode-sbt-scala"))
)
def scriptedTask: Def.Initialize[InputTask[Unit]] = Def.inputTask {
val result = scriptedSource(dir => (s: State) => Scripted.scriptedParser(dir)).parsed
// publishLocalBinAll.value // TODO: Restore scripted needing only binary jars.
publishAll.value
// These two projects need to be visible in a repo even if the default
// local repository is hidden, so we publish them to an alternate location and add
// that alternate repo to the running scripted test (in Scripted.scriptedpreScripted).
// (altLocalPublish in interfaceProj).value
// (altLocalPublish in compileInterfaceProj).value
(sbtProj / Test / compile).value // make sure sbt.RunFromSourceMain is compiled
Scripted.doScripted(
(sbtLaunchJar in bundledLauncherProj).value,
(fullClasspath in scriptedSbtProj in Test).value,
(scalaInstance in scriptedSbtProj).value,
scriptedSource.value,
scriptedBufferLog.value,
result,
Def.setting(Scripted.scriptedParser(scriptedSource.value)).parsed,
scriptedPrescripted.value,
scriptedLaunchOpts.value
)
}
def scriptedUnpublishedTask: Def.Initialize[InputTask[Unit]] = Def.inputTask {
val result = scriptedSource(dir => (s: State) => Scripted.scriptedParser(dir)).parsed
Scripted.doScripted(
(sbtLaunchJar in bundledLauncherProj).value,
(fullClasspath in scriptedSbtProj in Test).value,
(scalaInstance in scriptedSbtProj).value,
scriptedSource.value,
scriptedBufferLog.value,
result,
Def.setting(Scripted.scriptedParser(scriptedSource.value)).parsed,
scriptedPrescripted.value,
scriptedLaunchOpts.value
)
@ -648,14 +702,12 @@ def otherRootSettings =
scripted := scriptedTask.evaluated,
scriptedUnpublished := scriptedUnpublishedTask.evaluated,
scriptedSource := (sourceDirectory in sbtProj).value / "sbt-test",
// scriptedPrescripted := { addSbtAlternateResolver _ },
scriptedLaunchOpts := List("-Xmx1500M", "-Xms512M", "-server"),
publishAll := { val _ = (publishLocal).all(ScopeFilter(inAnyProject)).value },
publishLocalBinAll := { val _ = (publishLocalBin).all(ScopeFilter(inAnyProject)).value },
aggregate in bintrayRelease := false
) ++ inConfig(Scripted.RepoOverrideTest)(
Seq(
scriptedPrescripted := (_ => ()),
scriptedLaunchOpts := List(
"-Xmx1500M",
"-Xms512M",
@ -668,23 +720,6 @@ def otherRootSettings =
scriptedSource := (sourceDirectory in sbtProj).value / "repo-override-test"
))
// def addSbtAlternateResolver(scriptedRoot: File) = {
// val resolver = scriptedRoot / "project" / "AddResolverPlugin.scala"
// if (!resolver.exists) {
// IO.write(resolver, s"""import sbt._
// |import Keys._
// |
// |object AddResolverPlugin extends AutoPlugin {
// | override def requires = sbt.plugins.JvmPlugin
// | override def trigger = allRequirements
// |
// | override lazy val projectSettings = Seq(resolvers += alternativeLocalResolver)
// | lazy val alternativeLocalResolver = Resolver.file("$altLocalRepoName", file("$altLocalRepoPath"))(Resolver.ivyStylePatterns)
// |}
// |""".stripMargin)
// }
// }
lazy val docProjects: ScopeFilter = ScopeFilter(
inAnyProject -- inProjects(sbtRoot, sbtProj, scriptedSbtProj, scriptedPluginProj),
inConfigurations(Compile)
@ -780,3 +815,12 @@ def customCommands: Seq[Setting[_]] = Seq(
state
}
)
inThisBuild(Seq(
whitesourceProduct := "Lightbend Reactive Platform",
whitesourceAggregateProjectName := "sbt-master",
whitesourceAggregateProjectToken := "e7a1e55518c0489a98e9c7430c8b2ccd53d9f97c12ed46148b592ebe4c8bf128",
whitesourceIgnoredScopes ++= Seq("plugin", "scalafmt", "sxr"),
whitesourceFailOnError := sys.env.contains("WHITESOURCE_PASSWORD"), // fail if pwd is present
whitesourceForceCheckAllDependencies := true,
))

View File

@ -29,13 +29,14 @@ object ContextUtil {
* Given `myImplicitConversion(someValue).extensionMethod`, where `extensionMethod` is a macro that uses this
* method, the result of this method is `f(<Tree of someValue>)`.
*/
def selectMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
f: (c.Expr[Any], c.Position) => c.Expr[T]): c.Expr[T] = {
def selectMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(f: (c.Expr[Any], c.Position) => c.Expr[T]): c.Expr[T] = {
import c.universe._
c.macroApplication match {
case s @ Select(Apply(_, t :: Nil), tp) => f(c.Expr[Any](t), s.pos)
case a @ Apply(_, t :: Nil) => f(c.Expr[Any](t), a.pos)
case x => unexpectedTree(x)
case s @ Select(Apply(_, t :: Nil), _) => f(c.Expr[Any](t), s.pos)
case a @ Apply(_, t :: Nil) => f(c.Expr[Any](t), a.pos)
case x => unexpectedTree(x)
}
}
@ -211,12 +212,14 @@ final class ContextUtil[C <: blackbox.Context](val ctx: C) {
def changeOwner(tree: Tree, prev: Symbol, next: Symbol): Unit =
new ChangeOwnerAndModuleClassTraverser(
prev.asInstanceOf[global.Symbol],
next.asInstanceOf[global.Symbol]).traverse(tree.asInstanceOf[global.Tree])
next.asInstanceOf[global.Symbol]
).traverse(tree.asInstanceOf[global.Tree])
// Workaround copied from scala/async:can be removed once https://github.com/scala/scala/pull/3179 is merged.
private[this] class ChangeOwnerAndModuleClassTraverser(oldowner: global.Symbol,
newowner: global.Symbol)
extends global.ChangeOwnerTraverser(oldowner, newowner) {
private[this] class ChangeOwnerAndModuleClassTraverser(
oldowner: global.Symbol,
newowner: global.Symbol
) extends global.ChangeOwnerTraverser(oldowner, newowner) {
override def traverse(tree: global.Tree): Unit = {
tree match {
case _: global.DefTree => change(tree.symbol.moduleClass)
@ -248,7 +251,8 @@ final class ContextUtil[C <: blackbox.Context](val ctx: C) {
* the type constructor `[x] List[x]`.
*/
def extractTC(tcp: AnyRef with Singleton, name: String)(
implicit it: ctx.TypeTag[tcp.type]): ctx.Type = {
implicit it: ctx.TypeTag[tcp.type]
): ctx.Type = {
val itTpe = it.tpe.asInstanceOf[global.Type]
val m = itTpe.nonPrivateMember(global.newTypeName(name))
val tc = itTpe.memberInfo(m).asInstanceOf[ctx.universe.Type]
@ -262,8 +266,10 @@ final class ContextUtil[C <: blackbox.Context](val ctx: C) {
* Typically, `f` is a `Select` or `Ident`.
* The wrapper is replaced with the result of `subWrapper(<Type of T>, <Tree of v>, <wrapper Tree>)`
*/
def transformWrappers(t: Tree,
subWrapper: (String, Type, Tree, Tree) => Converted[ctx.type]): Tree = {
def transformWrappers(
t: Tree,
subWrapper: (String, Type, Tree, Tree) => Converted[ctx.type]
): Tree = {
// the main tree transformer that replaces calls to InputWrapper.wrap(x) with
// plain Idents that reference the actual input value
object appTransformer extends Transformer {

View File

@ -26,9 +26,10 @@ sealed trait Converted[C <: blackbox.Context with Singleton] {
}
object Converted {
def NotApplicable[C <: blackbox.Context with Singleton] = new NotApplicable[C]
final case class Failure[C <: blackbox.Context with Singleton](position: C#Position,
message: String)
extends Converted[C] {
final case class Failure[C <: blackbox.Context with Singleton](
position: C#Position,
message: String
) extends Converted[C] {
def isSuccess = false
def transform(f: C#Tree => C#Tree): Converted[C] = new Failure(position, message)
}
@ -36,9 +37,10 @@ object Converted {
def isSuccess = false
def transform(f: C#Tree => C#Tree): Converted[C] = this
}
final case class Success[C <: blackbox.Context with Singleton](tree: C#Tree,
finalTransform: C#Tree => C#Tree)
extends Converted[C] {
final case class Success[C <: blackbox.Context with Singleton](
tree: C#Tree,
finalTransform: C#Tree => C#Tree
) extends Converted[C] {
def isSuccess = true
def transform(f: C#Tree => C#Tree): Converted[C] = Success(f(tree), finalTransform)
}

View File

@ -41,9 +41,11 @@ object Instance {
final val MapName = "map"
final val InstanceTCName = "M"
final class Input[U <: Universe with Singleton](val tpe: U#Type,
val expr: U#Tree,
val local: U#ValDef)
final class Input[U <: Universe with Singleton](
val tpe: U#Type,
val expr: U#Tree,
val local: U#ValDef
)
trait Transform[C <: blackbox.Context with Singleton, N[_]] {
def apply(in: C#Tree): C#Tree
}

View File

@ -13,8 +13,9 @@ import macros._
/** A `TupleBuilder` that uses a KList as the tuple representation.*/
object KListBuilder extends TupleBuilder {
def make(c: blackbox.Context)(mt: c.Type,
inputs: Inputs[c.universe.type]): BuilderResult[c.type] =
def make(
c: blackbox.Context
)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] =
new BuilderResult[c.type] {
val ctx: c.type = c
val util = ContextUtil[c.type](c)
@ -47,15 +48,20 @@ object KListBuilder extends TupleBuilder {
case Nil => revBindings.reverse
}
private[this] def makeKList(revInputs: Inputs[c.universe.type],
klist: Tree,
klistType: Type): Tree =
private[this] def makeKList(
revInputs: Inputs[c.universe.type],
klist: Tree,
klistType: Type
): Tree =
revInputs match {
case in :: tail =>
val next = ApplyTree(
TypeApply(Ident(kcons),
TypeTree(in.tpe) :: TypeTree(klistType) :: TypeTree(mTC) :: Nil),
in.expr :: klist :: Nil)
TypeApply(
Ident(kcons),
TypeTree(in.tpe) :: TypeTree(klistType) :: TypeTree(mTC) :: Nil
),
in.expr :: klist :: Nil
)
makeKList(tail, next, appliedType(kconsTC, in.tpe :: klistType :: mTC :: Nil))
case Nil => klist
}

View File

@ -16,8 +16,9 @@ import macros._
* and `KList` for larger numbers of inputs. This builder cannot handle fewer than 2 inputs.
*/
object MixedBuilder extends TupleBuilder {
def make(c: blackbox.Context)(mt: c.Type,
inputs: Inputs[c.universe.type]): BuilderResult[c.type] = {
def make(
c: blackbox.Context
)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] = {
val delegate = if (inputs.size > TupleNBuilder.MaxInputs) KListBuilder else TupleNBuilder
delegate.make(c)(mt, inputs)
}

View File

@ -35,8 +35,9 @@ trait TupleBuilder {
type Inputs[U <: Universe with Singleton] = List[Instance.Input[U]]
/** Constructs a one-time use Builder for Context `c` and type constructor `tcType`. */
def make(c: blackbox.Context)(tcType: c.Type,
inputs: Inputs[c.universe.type]): BuilderResult[c.type]
def make(
c: blackbox.Context
)(tcType: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type]
}
trait BuilderResult[C <: blackbox.Context with Singleton] {

View File

@ -22,8 +22,9 @@ object TupleNBuilder extends TupleBuilder {
final val MaxInputs = 11
final val TupleMethodName = "tuple"
def make(c: blackbox.Context)(mt: c.Type,
inputs: Inputs[c.universe.type]): BuilderResult[c.type] =
def make(
c: blackbox.Context
)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] =
new BuilderResult[c.type] {
val util = ContextUtil[c.type](c)
import c.universe._
@ -34,8 +35,9 @@ object TupleNBuilder extends TupleBuilder {
val ctx: c.type = c
val representationC: PolyType = {
val tcVariable: Symbol = newTCVariable(util.initialOwner)
val tupleTypeArgs = inputs.map(in =>
internal.typeRef(NoPrefix, tcVariable, in.tpe :: Nil).asInstanceOf[global.Type])
val tupleTypeArgs = inputs.map(
in => internal.typeRef(NoPrefix, tcVariable, in.tpe :: Nil).asInstanceOf[global.Type]
)
val tuple = global.definitions.tupleType(tupleTypeArgs)
internal.polyType(tcVariable :: Nil, tuple.asInstanceOf[Type])
}
@ -47,10 +49,12 @@ object TupleNBuilder extends TupleBuilder {
}
def extract(param: ValDef): List[ValDef] = bindTuple(param, Nil, inputs.map(_.local), 1)
def bindTuple(param: ValDef,
revBindings: List[ValDef],
params: List[ValDef],
i: Int): List[ValDef] =
def bindTuple(
param: ValDef,
revBindings: List[ValDef],
params: List[ValDef],
i: Int
): List[ValDef] =
params match {
case (x @ ValDef(mods, name, tpt, _)) :: xs =>
val rhs = select(Ident(param.name), "_" + i.toString)

View File

@ -17,7 +17,9 @@ import Types._
*/
trait AList[K[L[x]]] {
def transform[M[_], N[_]](value: K[M], f: M ~> N): K[N]
def traverse[M[_], N[_], P[_]](value: K[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[K[P]]
def traverse[M[_], N[_], P[_]](value: K[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[K[P]]
def foldr[M[_], A](value: K[M], f: (M[_], A) => A, init: A): A
def toList[M[_]](value: K[M]): List[M[_]] = foldr[M, List[M[_]]](value, _ :: _, Nil)
@ -33,8 +35,11 @@ object AList {
val empty: Empty = new Empty {
def transform[M[_], N[_]](in: Unit, f: M ~> N) = ()
def foldr[M[_], T](in: Unit, f: (M[_], T) => T, init: T) = init
override def apply[M[_], C](in: Unit, f: Unit => C)(implicit app: Applicative[M]): M[C] = app.pure(f(()))
def traverse[M[_], N[_], P[_]](in: Unit, f: M ~> (N P)#l)(implicit np: Applicative[N]): N[Unit] = np.pure(())
override def apply[M[_], C](in: Unit, f: Unit => C)(implicit app: Applicative[M]): M[C] =
app.pure(f(()))
def traverse[M[_], N[_], P[_]](in: Unit, f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[Unit] = np.pure(())
}
type SeqList[T] = AList[λ[L[x] => List[L[T]]]]
@ -42,9 +47,12 @@ object AList {
/** AList for a homogeneous sequence. */
def seq[T]: SeqList[T] = new SeqList[T] {
def transform[M[_], N[_]](s: List[M[T]], f: M ~> N) = s.map(f.fn[T])
def foldr[M[_], A](s: List[M[T]], f: (M[_], A) => A, init: A): A = (init /: s.reverse)((t, m) => f(m, t))
def foldr[M[_], A](s: List[M[T]], f: (M[_], A) => A, init: A): A =
(init /: s.reverse)((t, m) => f(m, t))
override def apply[M[_], C](s: List[M[T]], f: List[T] => C)(implicit ap: Applicative[M]): M[C] = {
override def apply[M[_], C](s: List[M[T]], f: List[T] => C)(
implicit ap: Applicative[M]
): M[C] = {
def loop[V](in: List[M[T]], g: List[T] => V): M[V] =
in match {
case Nil => ap.pure(g(Nil))
@ -55,15 +63,20 @@ object AList {
loop(s, f)
}
def traverse[M[_], N[_], P[_]](s: List[M[T]], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[List[P[T]]] = ???
def traverse[M[_], N[_], P[_]](s: List[M[T]], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[List[P[T]]] = ???
}
/** AList for the arbitrary arity data structure KList. */
def klist[KL[M[_]] <: KList.Aux[M, KL]]: AList[KL] = new AList[KL] {
def transform[M[_], N[_]](k: KL[M], f: M ~> N) = k.transform(f)
def foldr[M[_], T](k: KL[M], f: (M[_], T) => T, init: T): T = k.foldr(f, init)
override def apply[M[_], C](k: KL[M], f: KL[Id] => C)(implicit app: Applicative[M]): M[C] = k.apply(f)(app)
def traverse[M[_], N[_], P[_]](k: KL[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[KL[P]] = k.traverse[N, P](f)(np)
override def apply[M[_], C](k: KL[M], f: KL[Id] => C)(implicit app: Applicative[M]): M[C] =
k.apply(f)(app)
def traverse[M[_], N[_], P[_]](k: KL[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[KL[P]] = k.traverse[N, P](f)(np)
override def toList[M[_]](k: KL[M]) = k.toList
}
@ -73,7 +86,9 @@ object AList {
def single[A]: Single[A] = new Single[A] {
def transform[M[_], N[_]](a: M[A], f: M ~> N) = f(a)
def foldr[M[_], T](a: M[A], f: (M[_], T) => T, init: T): T = f(a, init)
def traverse[M[_], N[_], P[_]](a: M[A], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[P[A]] = f(a)
def traverse[M[_], N[_], P[_]](a: M[A], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[P[A]] = f(a)
}
type ASplit[K[L[x]], B[x]] = AList[λ[L[x] => K[(L B)#l]]]
@ -85,7 +100,9 @@ object AList {
def transform[M[_], N[_]](value: Split[M], f: M ~> N): Split[N] =
base.transform[(M B)#l, (N B)#l](value, nestCon[M, N, B](f))
def traverse[M[_], N[_], P[_]](value: Split[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[Split[P]] = {
def traverse[M[_], N[_], P[_]](value: Split[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[Split[P]] = {
val g = nestCon[M, (N P)#l, B](f)
base.traverse[(M B)#l, N, (P B)#l](value, g)(np)
}
@ -101,7 +118,9 @@ object AList {
type T2[M[_]] = (M[A], M[B])
def transform[M[_], N[_]](t: T2[M], f: M ~> N): T2[N] = (f(t._1), f(t._2))
def foldr[M[_], T](t: T2[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, init))
def traverse[M[_], N[_], P[_]](t: T2[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T2[P]] = {
def traverse[M[_], N[_], P[_]](t: T2[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T2[P]] = {
val g = (Tuple2.apply[P[A], P[B]] _).curried
np.apply(np.map(g, f(t._1)), f(t._2))
}
@ -113,7 +132,9 @@ object AList {
type T3[M[_]] = (M[A], M[B], M[C])
def transform[M[_], N[_]](t: T3[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3))
def foldr[M[_], T](t: T3[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, init)))
def traverse[M[_], N[_], P[_]](t: T3[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T3[P]] = {
def traverse[M[_], N[_], P[_]](t: T3[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T3[P]] = {
val g = (Tuple3.apply[P[A], P[B], P[C]] _).curried
np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3))
}
@ -124,8 +145,11 @@ object AList {
def tuple4[A, B, C, D]: T4List[A, B, C, D] = new T4List[A, B, C, D] {
type T4[M[_]] = (M[A], M[B], M[C], M[D])
def transform[M[_], N[_]](t: T4[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4))
def foldr[M[_], T](t: T4[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, init))))
def traverse[M[_], N[_], P[_]](t: T4[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T4[P]] = {
def foldr[M[_], T](t: T4[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, init))))
def traverse[M[_], N[_], P[_]](t: T4[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T4[P]] = {
val g = (Tuple4.apply[P[A], P[B], P[C], P[D]] _).curried
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4))
}
@ -136,8 +160,11 @@ object AList {
def tuple5[A, B, C, D, E]: T5List[A, B, C, D, E] = new T5List[A, B, C, D, E] {
type T5[M[_]] = (M[A], M[B], M[C], M[D], M[E])
def transform[M[_], N[_]](t: T5[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5))
def foldr[M[_], T](t: T5[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, init)))))
def traverse[M[_], N[_], P[_]](t: T5[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T5[P]] = {
def foldr[M[_], T](t: T5[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, init)))))
def traverse[M[_], N[_], P[_]](t: T5[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T5[P]] = {
val g = (Tuple5.apply[P[A], P[B], P[C], P[D], P[E]] _).curried
np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5))
}
@ -147,71 +174,213 @@ object AList {
type T6List[A, B, C, D, E, F] = AList[T6K[A, B, C, D, E, F]#l]
def tuple6[A, B, C, D, E, F]: T6List[A, B, C, D, E, F] = new T6List[A, B, C, D, E, F] {
type T6[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F])
def transform[M[_], N[_]](t: T6[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6))
def foldr[M[_], T](t: T6[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, init))))))
def traverse[M[_], N[_], P[_]](t: T6[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T6[P]] = {
def transform[M[_], N[_]](t: T6[M], f: M ~> N) =
(f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6))
def foldr[M[_], T](t: T6[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, init))))))
def traverse[M[_], N[_], P[_]](t: T6[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T6[P]] = {
val g = (Tuple6.apply[P[A], P[B], P[C], P[D], P[E], P[F]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6))
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
)
}
}
sealed trait T7K[A, B, C, D, E, F, G] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G]) }
sealed trait T7K[A, B, C, D, E, F, G] {
type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G])
}
type T7List[A, B, C, D, E, F, G] = AList[T7K[A, B, C, D, E, F, G]#l]
def tuple7[A, B, C, D, E, F, G]: T7List[A, B, C, D, E, F, G] = new T7List[A, B, C, D, E, F, G] {
type T7[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G])
def transform[M[_], N[_]](t: T7[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7))
def foldr[M[_], T](t: T7[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, init)))))))
def traverse[M[_], N[_], P[_]](t: T7[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T7[P]] = {
def transform[M[_], N[_]](t: T7[M], f: M ~> N) =
(f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7))
def foldr[M[_], T](t: T7[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, init)))))))
def traverse[M[_], N[_], P[_]](t: T7[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T7[P]] = {
val g = (Tuple7.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7))
np.apply(
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
),
f(t._7)
)
}
}
sealed trait T8K[A, B, C, D, E, F, G, H] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H]) }
sealed trait T8K[A, B, C, D, E, F, G, H] {
type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H])
}
type T8List[A, B, C, D, E, F, G, H] = AList[T8K[A, B, C, D, E, F, G, H]#l]
def tuple8[A, B, C, D, E, F, G, H]: T8List[A, B, C, D, E, F, G, H] = new T8List[A, B, C, D, E, F, G, H] {
type T8[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H])
def transform[M[_], N[_]](t: T8[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8))
def foldr[M[_], T](t: T8[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, init))))))))
def traverse[M[_], N[_], P[_]](t: T8[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T8[P]] = {
val g = (Tuple8.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8))
def tuple8[A, B, C, D, E, F, G, H]: T8List[A, B, C, D, E, F, G, H] =
new T8List[A, B, C, D, E, F, G, H] {
type T8[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H])
def transform[M[_], N[_]](t: T8[M], f: M ~> N) =
(f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8))
def foldr[M[_], T](t: T8[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, init))))))))
def traverse[M[_], N[_], P[_]](t: T8[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T8[P]] = {
val g = (Tuple8.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H]] _).curried
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
),
f(t._7)
),
f(t._8)
)
}
}
}
sealed trait T9K[A, B, C, D, E, F, G, H, I] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I]) }
sealed trait T9K[A, B, C, D, E, F, G, H, I] {
type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I])
}
type T9List[A, B, C, D, E, F, G, H, I] = AList[T9K[A, B, C, D, E, F, G, H, I]#l]
def tuple9[A, B, C, D, E, F, G, H, I]: T9List[A, B, C, D, E, F, G, H, I] = new T9List[A, B, C, D, E, F, G, H, I] {
type T9[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I])
def transform[M[_], N[_]](t: T9[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9))
def foldr[M[_], T](t: T9[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, init)))))))))
def traverse[M[_], N[_], P[_]](t: T9[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T9[P]] = {
val g = (Tuple9.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9))
def tuple9[A, B, C, D, E, F, G, H, I]: T9List[A, B, C, D, E, F, G, H, I] =
new T9List[A, B, C, D, E, F, G, H, I] {
type T9[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I])
def transform[M[_], N[_]](t: T9[M], f: M ~> N) =
(f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9))
def foldr[M[_], T](t: T9[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, init)))))))))
def traverse[M[_], N[_], P[_]](t: T9[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T9[P]] = {
val g = (Tuple9.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I]] _).curried
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
),
f(t._7)
),
f(t._8)
),
f(t._9)
)
}
}
}
sealed trait T10K[A, B, C, D, E, F, G, H, I, J] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J]) }
sealed trait T10K[A, B, C, D, E, F, G, H, I, J] {
type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J])
}
type T10List[A, B, C, D, E, F, G, H, I, J] = AList[T10K[A, B, C, D, E, F, G, H, I, J]#l]
def tuple10[A, B, C, D, E, F, G, H, I, J]: T10List[A, B, C, D, E, F, G, H, I, J] = new T10List[A, B, C, D, E, F, G, H, I, J] {
type T10[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J])
def transform[M[_], N[_]](t: T10[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9), f(t._10))
def foldr[M[_], T](t: T10[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, init))))))))))
def traverse[M[_], N[_], P[_]](t: T10[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T10[P]] = {
val g = (Tuple10.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9)), f(t._10))
def tuple10[A, B, C, D, E, F, G, H, I, J]: T10List[A, B, C, D, E, F, G, H, I, J] =
new T10List[A, B, C, D, E, F, G, H, I, J] {
type T10[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J])
def transform[M[_], N[_]](t: T10[M], f: M ~> N) =
(f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9), f(t._10))
def foldr[M[_], T](t: T10[M], f: (M[_], T) => T, init: T): T =
f(
t._1,
f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, init)))))))))
)
def traverse[M[_], N[_], P[_]](t: T10[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T10[P]] = {
val g =
(Tuple10.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J]] _).curried
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
),
f(t._7)
),
f(t._8)
),
f(t._9)
),
f(t._10)
)
}
}
}
sealed trait T11K[A, B, C, D, E, F, G, H, I, J, K] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J], L[K]) }
type T11List[A, B, C, D, E, F, G, H, I, J, K] = AList[T11K[A, B, C, D, E, F, G, H, I, J, K]#l]
def tuple11[A, B, C, D, E, F, G, H, I, J, K]: T11List[A, B, C, D, E, F, G, H, I, J, K] = new T11List[A, B, C, D, E, F, G, H, I, J, K] {
type T11[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J], M[K])
def transform[M[_], N[_]](t: T11[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9), f(t._10), f(t._11))
def foldr[M[_], T](t: T11[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, f(t._11, init)))))))))))
def traverse[M[_], N[_], P[_]](t: T11[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T11[P]] = {
val g = (Tuple11.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J], P[K]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9)), f(t._10)), f(t._11))
}
sealed trait T11K[A, B, C, D, E, F, G, H, I, J, K] {
type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J], L[K])
}
type T11List[A, B, C, D, E, F, G, H, I, J, K] = AList[T11K[A, B, C, D, E, F, G, H, I, J, K]#l]
def tuple11[A, B, C, D, E, F, G, H, I, J, K]: T11List[A, B, C, D, E, F, G, H, I, J, K] =
new T11List[A, B, C, D, E, F, G, H, I, J, K] {
type T11[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J], M[K])
def transform[M[_], N[_]](t: T11[M], f: M ~> N) =
(
f(t._1),
f(t._2),
f(t._3),
f(t._4),
f(t._5),
f(t._6),
f(t._7),
f(t._8),
f(t._9),
f(t._10),
f(t._11)
)
def foldr[M[_], T](t: T11[M], f: (M[_], T) => T, init: T): T =
f(
t._1,
f(
t._2,
f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, f(t._11, init)))))))))
)
)
def traverse[M[_], N[_], P[_]](t: T11[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T11[P]] = {
val g = (Tuple11
.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J], P[K]] _).curried
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
),
f(t._7)
),
f(t._8)
),
f(t._9)
),
f(t._10)
),
f(t._11)
)
}
}
}

View File

@ -31,7 +31,8 @@ sealed trait AttributeKey[T] {
def description: Option[String]
/**
* In environments that support delegation, looking up this key when it has no associated value will delegate to the values associated with these keys.
* In environments that support delegation, looking up this key when it has no associated value
* will delegate to the values associated with these keys.
* The delegation proceeds in order the keys are returned here.
*/
def extend: Seq[AttributeKey[_]]
@ -70,20 +71,26 @@ object AttributeKey {
def apply[T: Manifest: OptJsonWriter](name: String, description: String): AttributeKey[T] =
apply(name, description, Nil)
def apply[T: Manifest: OptJsonWriter](name: String,
description: String,
rank: Int): AttributeKey[T] =
def apply[T: Manifest: OptJsonWriter](
name: String,
description: String,
rank: Int
): AttributeKey[T] =
apply(name, description, Nil, rank)
def apply[T: Manifest: OptJsonWriter](name: String,
description: String,
extend: Seq[AttributeKey[_]]): AttributeKey[T] =
def apply[T: Manifest: OptJsonWriter](
name: String,
description: String,
extend: Seq[AttributeKey[_]]
): AttributeKey[T] =
apply(name, description, extend, Int.MaxValue)
def apply[T: Manifest: OptJsonWriter](name: String,
description: String,
extend: Seq[AttributeKey[_]],
rank: Int): AttributeKey[T] =
def apply[T: Manifest: OptJsonWriter](
name: String,
description: String,
extend: Seq[AttributeKey[_]],
rank: Int
): AttributeKey[T] =
make(name, Some(description), extend, rank)
private[sbt] def copyWithRank[T](a: AttributeKey[T], rank: Int): AttributeKey[T] =

View File

@ -7,6 +7,8 @@
package sbt.internal.util
import scala.collection.JavaConverters._
/** A mutable set interface that uses object identity to test for set membership.*/
trait IDSet[T] {
def apply(t: T): Boolean
@ -41,7 +43,7 @@ object IDSet {
def +=(t: T) = { backing.put(t, Dummy); () }
def ++=(t: Iterable[T]) = t foreach +=
def -=(t: T) = if (backing.remove(t) eq null) false else true
def all = collection.JavaConverters.collectionAsScalaIterable(backing.keySet)
def all = backing.keySet.asScala
def toList = all.toList
def isEmpty = backing.isEmpty

View File

@ -170,8 +170,10 @@ abstract class EvaluateSettings[Scope] {
}
protected final def setValue(v: T): Unit = {
assert(state != Evaluated,
"Already evaluated (trying to set value to " + v + "): " + toString)
assert(
state != Evaluated,
"Already evaluated (trying to set value to " + v + "): " + toString
)
if (v == null) sys.error("Setting value cannot be null: " + keyString)
value = v
state = Evaluated

View File

@ -10,7 +10,7 @@ package sbt.internal.util
import Types._
import Classes.Applicative
/** Heterogeneous list with each element having type M[T] for some type T.*/
/** A higher-kinded heterogeneous list of elements that share the same type constructor `M[_]`. */
sealed trait KList[+M[_]] {
type Transform[N[_]] <: KList[N]
@ -18,7 +18,7 @@ sealed trait KList[+M[_]] {
def transform[N[_]](f: M ~> N): Transform[N]
/** Folds this list using a function that operates on the homogeneous type of the elements of this list. */
def foldr[B](f: (M[_], B) => B, init: B): B = init // had trouble defining it in KNil
def foldr[B](f: (M[_], B) => B, init: B): B
/** Applies `f` to the elements of this list in the applicative functor defined by `ap`. */
def apply[N[x] >: M[x], Z](f: Transform[Id] => Z)(implicit ap: Applicative[N]): N[Z]
@ -54,13 +54,14 @@ final case class KCons[H, +T <: KList[M], +M[_]](head: M[H], tail: T) extends KL
override def foldr[B](f: (M[_], B) => B, init: B): B = f(head, tail.foldr(f, init))
}
sealed abstract class KNil extends KList[Nothing] {
sealed abstract class KNil extends KList[NothingK] {
final type Transform[N[_]] = KNil
final def transform[N[_]](f: Nothing ~> N): Transform[N] = KNil
final def transform[N[_]](f: NothingK ~> N): Transform[N] = KNil
final def foldr[B](f: (NothingK[_], B) => B, init: B): B = init
final def toList = Nil
final def apply[N[x], Z](f: KNil => Z)(implicit ap: Applicative[N]): N[Z] = ap.pure(f(KNil))
final def traverse[N[_], P[_]](f: Nothing ~> (N P)#l)(implicit np: Applicative[N]): N[KNil] =
final def traverse[N[_], P[_]](f: NothingK ~> (N P)#l)(implicit np: Applicative[N]): N[KNil] =
np.pure(KNil)
}

View File

@ -357,7 +357,8 @@ trait Init[Scope] {
keys.map(u => showUndefined(u, validKeys, delegates)).mkString("\n\n ", "\n\n ", "")
new Uninitialized(
keys,
prefix + suffix + " to undefined setting" + suffix + ": " + keysString + "\n ")
prefix + suffix + " to undefined setting" + suffix + ": " + keysString + "\n "
)
}
final class Compiled[T](
@ -374,8 +375,9 @@ trait Init[Scope] {
val locals = compiled flatMap {
case (key, comp) => if (key.key.isLocal) Seq[Compiled[_]](comp) else Nil
}
val ordered = Dag.topologicalSort(locals)(_.dependencies.flatMap(dep =>
if (dep.key.isLocal) Seq[Compiled[_]](compiled(dep)) else Nil))
val ordered = Dag.topologicalSort(locals)(
_.dependencies.flatMap(dep => if (dep.key.isLocal) Seq[Compiled[_]](compiled(dep)) else Nil)
)
def flatten(
cmap: Map[ScopedKey[_], Flattened],
key: ScopedKey[_],
@ -383,7 +385,8 @@ trait Init[Scope] {
): Flattened =
new Flattened(
key,
deps.flatMap(dep => if (dep.key.isLocal) cmap(dep).dependencies else dep :: Nil))
deps.flatMap(dep => if (dep.key.isLocal) cmap(dep).dependencies else dep :: Nil)
)
val empty = Map.empty[ScopedKey[_], Flattened]
@ -415,7 +418,8 @@ trait Init[Scope] {
* Intersects two scopes, returning the more specific one if they intersect, or None otherwise.
*/
private[sbt] def intersect(s1: Scope, s2: Scope)(
implicit delegates: Scope => Seq[Scope]): Option[Scope] =
implicit delegates: Scope => Seq[Scope]
): Option[Scope] =
if (delegates(s1).contains(s2)) Some(s1) // s1 is more specific
else if (delegates(s2).contains(s1)) Some(s2) // s2 is more specific
else None

View File

@ -65,7 +65,7 @@ object Signals {
}
// Must only be referenced using a
// try { } catch { case e: LinkageError => ... }
// try { } catch { case _: LinkageError => ... }
// block to
private final class Signals0 {
def supported(signal: String): Boolean = {

View File

@ -9,6 +9,7 @@ package sbt.internal.util
trait TypeFunctions {
type Id[X] = X
type NothingK[X] = Nothing
sealed trait Const[A] { type Apply[B] = A }
sealed trait ConstK[A] { type l[L[x]] = A }
sealed trait Compose[A[_], B[_]] { type Apply[T] = A[B[T]] }

View File

@ -7,8 +7,7 @@
package sbt.internal.util
import org.scalacheck._
import Prop._
import org.scalacheck._, Prop._
object SettingsTest extends Properties("settings") {
val settingsExample: SettingsExample = SettingsExample()
@ -160,7 +159,7 @@ object SettingsTest extends Properties("settings") {
final def checkCircularReferences(intermediate: Int): Prop = {
val ccr = new CCR(intermediate)
try { evaluate(setting(chk, ccr.top) :: Nil); false } catch {
case e: java.lang.Exception => true
case _: java.lang.Exception => true
}
}
@ -197,18 +196,18 @@ object SettingsTest extends Properties("settings") {
def evaluate(settings: Seq[Setting[_]]): Settings[Scope] =
try { make(settings)(delegates, scopeLocal, showFullKey) } catch {
case e: Throwable => e.printStackTrace; throw e
case e: Throwable => e.printStackTrace(); throw e
}
}
// This setup is a workaround for module synchronization issues
final class CCR(intermediate: Int) {
import SettingsTest.settingsExample._
lazy val top = iterate(value(intermediate), intermediate)
def iterate(init: Initialize[Int], i: Int): Initialize[Int] =
lazy val top = iterate(value(intermediate))
def iterate(init: Initialize[Int]): Initialize[Int] =
bind(init) { t =>
if (t <= 0)
top
else
iterate(value(t - 1), t - 1)
iterate(value(t - 1))
}
}

View File

@ -49,8 +49,9 @@ abstract class JLine extends LineReader {
private[this] def readLineDirect(prompt: String, mask: Option[Char]): Option[String] =
if (handleCONT)
Signals.withHandler(() => resume(), signal = Signals.CONT)(() =>
readLineDirectRaw(prompt, mask))
Signals.withHandler(() => resume(), signal = Signals.CONT)(
() => readLineDirectRaw(prompt, mask)
)
else
readLineDirectRaw(prompt, mask)
@ -132,7 +133,7 @@ private[sbt] object JLine {
def createReader(): ConsoleReader = createReader(None, JLine.makeInputStream(true))
def createReader(historyPath: Option[File], in: InputStream): ConsoleReader =
usingTerminal { t =>
usingTerminal { _ =>
val cr = new ConsoleReader(in, System.out)
cr.setExpandEvents(false) // https://issues.scala-lang.org/browse/SI-7650
cr.setBellEnabled(false)

View File

@ -10,7 +10,7 @@ package complete
import java.lang.Character.{ toLowerCase => lower }
/** @author Paul Phillips*/
/** @author Paul Phillips */
object EditDistance {
/**
@ -24,7 +24,6 @@ object EditDistance {
insertCost: Int = 1,
deleteCost: Int = 1,
subCost: Int = 1,
transposeCost: Int = 1,
matchCost: Int = 0,
caseCost: Int = 1,
transpositions: Boolean = false

View File

@ -11,11 +11,7 @@ package complete
import History.number
import java.io.File
final class History private (
val lines: IndexedSeq[String],
val path: Option[File],
error: String => Unit
) {
final class History private (val lines: IndexedSeq[String], val path: Option[File]) {
private def reversed = lines.reverse
def all: Seq[String] = lines
@ -52,8 +48,8 @@ final class History private (
}
object History {
def apply(lines: Seq[String], path: Option[File], error: String => Unit): History =
new History(lines.toIndexedSeq, path, sys.error)
def apply(lines: Seq[String], path: Option[File]): History =
new History(lines.toIndexedSeq, path)
def number(s: String): Option[Int] =
try { Some(s.toInt) } catch { case _: NumberFormatException => None }

View File

@ -11,7 +11,7 @@ package complete
import jline.console.ConsoleReader
import jline.console.completer.{ Completer, CompletionHandler }
import scala.annotation.tailrec
import scala.collection.JavaConverters
import scala.collection.JavaConverters._
object JLineCompletion {
def installCustomCompletor(reader: ConsoleReader, parser: Parser[_]): Unit =
@ -91,7 +91,8 @@ object JLineCompletion {
def appendNonEmpty(set: Set[String], add: String) = if (add.trim.isEmpty) set else set + add
def customCompletor(
f: (String, Int) => (Seq[String], Seq[String])): (ConsoleReader, Int) => Boolean =
f: (String, Int) => (Seq[String], Seq[String])
): (ConsoleReader, Int) => Boolean =
(reader, level) => {
val success = complete(beforeCursor(reader), reader => f(reader, level), reader)
reader.flush()
@ -154,7 +155,7 @@ object JLineCompletion {
if (line.charAt(line.length - 1) != '\n')
reader.println()
}
reader.printColumns(JavaConverters.seqAsJavaList(columns.map(_.trim)))
reader.printColumns(columns.map(_.trim).asJava)
}
def hasNewline(s: String): Boolean = s.indexOf('\n') >= 0

View File

@ -275,8 +275,10 @@ object Parser extends ParserMain {
revAcc: List[T]
): Parser[Seq[T]] = {
assume(min >= 0, "Minimum must be greater than or equal to zero (was " + min + ")")
assume(max >= min,
"Minimum must be less than or equal to maximum (min: " + min + ", max: " + max + ")")
assume(
max >= min,
"Minimum must be less than or equal to maximum (min: " + min + ", max: " + max + ")"
)
def checkRepeated(invalidButOptional: => Parser[Seq[T]]): Parser[Seq[T]] =
repeated match {
@ -836,10 +838,12 @@ private final class ParserWithExamples[T](
) extends ValidParser[T] {
def derive(c: Char) =
examples(delegate derive c,
exampleSource.withAddedPrefix(c.toString),
maxNumberOfExamples,
removeInvalidExamples)
examples(
delegate derive c,
exampleSource.withAddedPrefix(c.toString),
maxNumberOfExamples,
removeInvalidExamples
)
def result = delegate.result

View File

@ -12,15 +12,17 @@ import Parser._
import java.io.File
import java.net.URI
import java.lang.Character.{
getType,
MATH_SYMBOL,
OTHER_SYMBOL,
CURRENCY_SYMBOL,
DASH_PUNCTUATION,
OTHER_PUNCTUATION,
MATH_SYMBOL,
MODIFIER_SYMBOL,
CURRENCY_SYMBOL
OTHER_PUNCTUATION,
OTHER_SYMBOL,
getType
}
import scala.annotation.tailrec
/** Provides standard implementations of commonly useful [[Parser]]s. */
trait Parsers {
@ -42,7 +44,8 @@ trait Parsers {
/** Parses a single hexadecimal digit (0-9, a-f, A-F). */
lazy val HexDigit = charClass(c => HexDigitSet(c.toUpper), "hex digit") examples HexDigitSet.map(
_.toString)
_.toString
)
/** Parses a single letter, according to Char.isLetter, into a Char. */
lazy val Letter = charClass(_.isLetter, "letter")
@ -313,6 +316,16 @@ object DefaultParsers extends Parsers with ParserMain {
apply(p)(s).resultEmpty.isValid
/** Returns `true` if `s` parses successfully according to [[ID]].*/
def validID(s: String): Boolean = matches(ID, s)
def validID(s: String): Boolean = {
// Handwritten version of `matches(ID, s)` because validID turned up in profiling.
def isIdChar(c: Char): Boolean = Character.isLetterOrDigit(c) || (c == '-') || (c == '_')
@tailrec def isRestIdChar(cur: Int, s: String, length: Int): Boolean =
if (cur < length)
isIdChar(s.charAt(cur)) && isRestIdChar(cur + 1, s, length)
else
true
!s.isEmpty && Character.isLetter(s.charAt(0)) && isRestIdChar(1, s, s.length)
}
}

View File

@ -0,0 +1,29 @@
/*
* sbt
* Copyright 2011 - 2017, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under BSD-3-Clause license (see LICENSE)
*/
package sbt.internal.util
package complete
import org.scalacheck._, Gen._, Prop._
object DefaultParsersSpec extends Properties("DefaultParsers") {
import DefaultParsers.{ ID, isIDChar, matches, validID }
property("∀ s ∈ String: validID(s) == matches(ID, s)") = forAll(
(s: String) => validID(s) == matches(ID, s)
)
property("∀ s ∈ genID: matches(ID, s)") = forAll(genID)(s => matches(ID, s))
property("∀ s ∈ genID: validID(s)") = forAll(genID)(s => validID(s))
private val chars: Seq[Char] = Char.MinValue to Char.MaxValue
private val genID: Gen[String] =
for {
c <- oneOf(chars filter (_.isLetter))
cs <- listOf(oneOf(chars filter isIDChar))
} yield (c :: cs).mkString
}

View File

@ -9,60 +9,66 @@ package sbt.internal.util
package complete
import java.io.File
import sbt.io.IO._
import org.scalatest.Assertion
import sbt.io.IO
class FileExamplesTest extends UnitSpec {
"listing all files in an absolute base directory" should
"produce the entire base directory's contents" in {
val _ = new DirectoryStructure {
fileExamples().toList should contain theSameElementsAs (allRelativizedPaths)
withDirectoryStructure() { ds =>
ds.fileExamples().toList should contain theSameElementsAs (ds.allRelativizedPaths)
}
}
"listing files with a prefix that matches none" should
"produce an empty list" in {
val _ = new DirectoryStructure(withCompletionPrefix = "z") {
fileExamples().toList shouldBe empty
"listing files with a prefix that matches none" should "produce an empty list" in {
withDirectoryStructure(withCompletionPrefix = "z") { ds =>
ds.fileExamples().toList shouldBe empty
}
}
"listing single-character prefixed files" should
"produce matching paths only" in {
val _ = new DirectoryStructure(withCompletionPrefix = "f") {
fileExamples().toList should contain theSameElementsAs (prefixedPathsOnly)
"listing single-character prefixed files" should "produce matching paths only" in {
withDirectoryStructure(withCompletionPrefix = "f") { ds =>
ds.fileExamples().toList should contain theSameElementsAs (ds.prefixedPathsOnly)
}
}
"listing directory-prefixed files" should
"produce matching paths only" in {
val _ = new DirectoryStructure(withCompletionPrefix = "far") {
fileExamples().toList should contain theSameElementsAs (prefixedPathsOnly)
"listing directory-prefixed files" should "produce matching paths only" in {
withDirectoryStructure(withCompletionPrefix = "far") { ds =>
ds.fileExamples().toList should contain theSameElementsAs (ds.prefixedPathsOnly)
}
}
it should "produce sub-dir contents only when appending a file separator to the directory" in {
val _ = new DirectoryStructure(withCompletionPrefix = "far" + File.separator) {
fileExamples().toList should contain theSameElementsAs (prefixedPathsOnly)
withDirectoryStructure(withCompletionPrefix = "far" + File.separator) { ds =>
ds.fileExamples().toList should contain theSameElementsAs (ds.prefixedPathsOnly)
}
}
"listing files with a sub-path prefix" should
"produce matching paths only" in {
val _ = new DirectoryStructure(withCompletionPrefix = "far" + File.separator + "ba") {
fileExamples().toList should contain theSameElementsAs (prefixedPathsOnly)
"listing files with a sub-path prefix" should "produce matching paths only" in {
withDirectoryStructure(withCompletionPrefix = "far" + File.separator + "ba") { ds =>
ds.fileExamples().toList should contain theSameElementsAs (ds.prefixedPathsOnly)
}
}
"completing a full path" should
"produce a list with an empty string" in {
val _ = new DirectoryStructure(withCompletionPrefix = "bazaar") {
fileExamples().toList shouldEqual List("")
"completing a full path" should "produce a list with an empty string" in {
withDirectoryStructure(withCompletionPrefix = "bazaar") { ds =>
ds.fileExamples().toList shouldEqual List("")
}
}
// TODO: Remove DelayedInit - https://github.com/scala/scala/releases/tag/v2.11.0-RC1
class DirectoryStructure(withCompletionPrefix: String = "") extends DelayedInit {
def withDirectoryStructure[A](withCompletionPrefix: String = "")(
thunk: DirectoryStructure => Assertion
): Assertion = {
IO.withTemporaryDirectory { tempDir =>
val ds = new DirectoryStructure(withCompletionPrefix)
ds.createSampleDirStructure(tempDir)
ds.fileExamples = new FileExamples(ds.baseDir, withCompletionPrefix)
thunk(ds)
}
}
final class DirectoryStructure(withCompletionPrefix: String) {
var fileExamples: FileExamples = _
var baseDir: File = _
var childFiles: List[File] = _
@ -72,22 +78,14 @@ class FileExamplesTest extends UnitSpec {
def allRelativizedPaths: List[String] =
(childFiles ++ childDirectories ++ nestedFiles ++ nestedDirectories)
.map(relativize(baseDir, _).get)
.map(IO.relativize(baseDir, _).get)
def prefixedPathsOnly: List[String] =
allRelativizedPaths
.filter(_ startsWith withCompletionPrefix)
.map(_ substring withCompletionPrefix.length)
override def delayedInit(testBody: => Unit): Unit = {
withTemporaryDirectory { tempDir =>
createSampleDirStructure(tempDir)
fileExamples = new FileExamples(baseDir, withCompletionPrefix)
testBody
}
}
private def createSampleDirStructure(tempDir: File): Unit = {
def createSampleDirStructure(tempDir: File): Unit = {
childFiles = toChildFiles(tempDir, List("foo", "bar", "bazaar"))
childDirectories = toChildFiles(tempDir, List("moo", "far"))
nestedFiles = toChildFiles(childDirectories(1), List("farfile1", "barfile2"))

View File

@ -27,7 +27,8 @@ class ParserWithExamplesTest extends UnitSpec {
Set(
suggestion("blue"),
suggestion("red")
))
)
)
parserWithExamples.completions(0) shouldEqual validCompletions
}
}
@ -38,7 +39,8 @@ class ParserWithExamplesTest extends UnitSpec {
val derivedCompletions = Completions(
Set(
suggestion("lue")
))
)
)
parserWithExamples.derive('b').completions(0) shouldEqual derivedCompletions
}
}
@ -58,7 +60,8 @@ class ParserWithExamplesTest extends UnitSpec {
Set(
suggestion("lue"),
suggestion("lock")
))
)
)
parserWithExamples.derive('b').completions(0) shouldEqual derivedCompletions
}
}

View File

@ -24,14 +24,14 @@ object LogicTest extends Properties("Logic") {
property("Properly orders results.") = secure(expect(ordering, Set(B, A, C, E, F)))
property("Detects cyclic negation") = secure(
Logic.reduceAll(badClauses, Set()) match {
case Right(res) => false
case Left(err: Logic.CyclicNegation) => true
case Left(err) => sys.error(s"Expected cyclic error, got: $err")
case Right(_) => false
case Left(_: Logic.CyclicNegation) => true
case Left(err) => sys.error(s"Expected cyclic error, got: $err")
}
)
def expect(result: Either[LogicException, Matched], expected: Set[Atom]) = result match {
case Left(err) => false
case Left(_) => false
case Right(res) =>
val actual = res.provenSet
if (actual != expected)

View File

@ -20,25 +20,30 @@ final class Console(compiler: AnalyzingCompiler) {
def apply(classpath: Seq[File], log: Logger): Try[Unit] =
apply(classpath, Nil, "", "", log)
def apply(classpath: Seq[File],
options: Seq[String],
initialCommands: String,
cleanupCommands: String,
log: Logger): Try[Unit] =
def apply(
classpath: Seq[File],
options: Seq[String],
initialCommands: String,
cleanupCommands: String,
log: Logger
): Try[Unit] =
apply(classpath, options, initialCommands, cleanupCommands)(None, Nil)(log)
def apply(classpath: Seq[File],
options: Seq[String],
loader: ClassLoader,
initialCommands: String,
cleanupCommands: String)(bindings: (String, Any)*)(implicit log: Logger): Try[Unit] =
def apply(
classpath: Seq[File],
options: Seq[String],
loader: ClassLoader,
initialCommands: String,
cleanupCommands: String
)(bindings: (String, Any)*)(implicit log: Logger): Try[Unit] =
apply(classpath, options, initialCommands, cleanupCommands)(Some(loader), bindings)
def apply(classpath: Seq[File],
options: Seq[String],
initialCommands: String,
cleanupCommands: String)(loader: Option[ClassLoader], bindings: Seq[(String, Any)])(
implicit log: Logger): Try[Unit] = {
def apply(
classpath: Seq[File],
options: Seq[String],
initialCommands: String,
cleanupCommands: String
)(loader: Option[ClassLoader], bindings: Seq[(String, Any)])(implicit log: Logger): Try[Unit] = {
def console0() =
compiler.console(classpath, options, initialCommands, cleanupCommands, log)(loader, bindings)
JLine.usingTerminal { t =>

View File

@ -10,10 +10,6 @@ package sbt
import java.io.File
import sbt.internal.inc.AnalyzingCompiler
import Predef.{ conforms => _, _ }
import sbt.io.syntax._
import sbt.io.IO
import sbt.util.CacheStoreFactory
import xsbti.Reporter
import xsbti.compile.JavaTools
@ -23,93 +19,51 @@ import sbt.internal.util.ManagedLogger
object Doc {
import RawCompileLike._
def scaladoc(label: String,
cacheStoreFactory: CacheStoreFactory,
compiler: AnalyzingCompiler): Gen =
def scaladoc(
label: String,
cacheStoreFactory: CacheStoreFactory,
compiler: AnalyzingCompiler
): Gen =
scaladoc(label, cacheStoreFactory, compiler, Seq())
def scaladoc(label: String,
cacheStoreFactory: CacheStoreFactory,
compiler: AnalyzingCompiler,
fileInputOptions: Seq[String]): Gen =
cached(cacheStoreFactory,
fileInputOptions,
prepare(label + " Scala API documentation", compiler.doc))
def javadoc(label: String,
cacheStoreFactory: CacheStoreFactory,
doc: JavaTools,
log: Logger,
reporter: Reporter): Gen =
javadoc(label, cacheStoreFactory, doc, log, reporter, Seq())
def javadoc(label: String,
cacheStoreFactory: CacheStoreFactory,
doc: JavaTools,
log: Logger,
reporter: Reporter,
fileInputOptions: Seq[String]): Gen =
def scaladoc(
label: String,
cacheStoreFactory: CacheStoreFactory,
compiler: AnalyzingCompiler,
fileInputOptions: Seq[String]
): Gen =
cached(
cacheStoreFactory,
fileInputOptions,
prepare(
label + " Java API documentation",
filterSources(
javaSourcesOnly,
(sources: Seq[File],
classpath: Seq[File],
outputDirectory: File,
options: Seq[String],
maxErrors: Int,
log: Logger) => {
// doc.doc
???
}
)
)
prepare(label + " Scala API documentation", compiler.doc)
)
@deprecated("Going away", "1.1.1")
def javadoc(
label: String,
cacheStoreFactory: CacheStoreFactory,
doc: JavaTools,
log: Logger,
reporter: Reporter,
): Gen = ???
@deprecated("Going away", "1.1.1")
def javadoc(
label: String,
cacheStoreFactory: CacheStoreFactory,
doc: JavaTools,
log: Logger,
reporter: Reporter,
fileInputOptions: Seq[String],
): Gen = ???
@deprecated("Going away", "1.1.1")
val javaSourcesOnly: File => Boolean = _.getName.endsWith(".java")
private[sbt] final class Scaladoc(maximumErrors: Int, compiler: AnalyzingCompiler) extends Doc {
def apply(label: String,
sources: Seq[File],
classpath: Seq[File],
outputDirectory: File,
options: Seq[String],
log: ManagedLogger): Unit = {
generate("Scala",
label,
compiler.doc,
sources,
classpath,
outputDirectory,
options,
maximumErrors,
log)
}
}
}
@deprecated("Going away", "1.1.1")
sealed trait Doc {
@deprecated("Going away", "1.1.1")
type Gen = (Seq[File], Seq[File], File, Seq[String], Int, ManagedLogger) => Unit
private[sbt] final def generate(variant: String,
label: String,
docf: Gen,
sources: Seq[File],
classpath: Seq[File],
outputDirectory: File,
options: Seq[String],
maxErrors: Int,
log: ManagedLogger): Unit = {
val logSnip = variant + " API documentation"
if (sources.isEmpty)
log.info("No sources available, skipping " + logSnip + "...")
else {
log.info(
"Generating " + logSnip + " for " + label + " sources to " + outputDirectory.absolutePath + "...")
IO.delete(outputDirectory)
IO.createDirectory(outputDirectory)
docf(sources, classpath, outputDirectory, options, maxErrors, log)
log.info(logSnip + " generation successful.")
}
}
}

View File

@ -30,29 +30,37 @@ object DotGraph {
val toString = packageOnly compose fToString(sourceRoots)
apply(relations, outputDirectory, toString, toString)
}
def apply(relations: Relations,
outputDir: File,
sourceToString: File => String,
externalToString: File => String): Unit = {
def apply(
relations: Relations,
outputDir: File,
sourceToString: File => String,
externalToString: File => String
): Unit = {
def file(name: String) = new File(outputDir, name)
IO.createDirectory(outputDir)
generateGraph(file("int-class-deps"),
"dependencies",
relations.internalClassDep,
identity[String],
identity[String])
generateGraph(file("binary-dependencies"),
"externalDependencies",
relations.libraryDep,
externalToString,
sourceToString)
generateGraph(
file("int-class-deps"),
"dependencies",
relations.internalClassDep,
identity[String],
identity[String]
)
generateGraph(
file("binary-dependencies"),
"externalDependencies",
relations.libraryDep,
externalToString,
sourceToString
)
}
def generateGraph[K, V](file: File,
graphName: String,
relation: Relation[K, V],
keyToString: K => String,
valueToString: V => String): Unit = {
def generateGraph[K, V](
file: File,
graphName: String,
relation: Relation[K, V],
keyToString: K => String,
valueToString: V => String
): Unit = {
import scala.collection.mutable.{ HashMap, HashSet }
val mappedGraph = new HashMap[String, HashSet[String]]
for ((key, values) <- relation.forwardMap; keyString = keyToString(key); value <- values)

View File

@ -17,15 +17,18 @@ import sbt.io.IO
import sbt.util.Logger
import sbt.ConcurrentRestrictions.Tag
import sbt.protocol.testing._
import sbt.internal.util.ConsoleAppender
private[sbt] object ForkTests {
def apply(runners: Map[TestFramework, Runner],
tests: Vector[TestDefinition],
config: Execution,
classpath: Seq[File],
fork: ForkOptions,
log: Logger,
tag: Tag): Task[TestOutput] = {
def apply(
runners: Map[TestFramework, Runner],
tests: Vector[TestDefinition],
config: Execution,
classpath: Seq[File],
fork: ForkOptions,
log: Logger,
tag: Tag
): Task[TestOutput] = {
val opts = processOptions(config, tests, log)
import std.TaskExtra._
@ -42,12 +45,14 @@ private[sbt] object ForkTests {
}
}
private[this] def mainTestTask(runners: Map[TestFramework, Runner],
opts: ProcessedOptions,
classpath: Seq[File],
fork: ForkOptions,
log: Logger,
parallel: Boolean): Task[TestOutput] =
private[this] def mainTestTask(
runners: Map[TestFramework, Runner],
opts: ProcessedOptions,
classpath: Seq[File],
fork: ForkOptions,
log: Logger,
parallel: Boolean
): Task[TestOutput] =
std.TaskExtra.task {
val server = new ServerSocket(0)
val testListeners = opts.testListeners flatMap {
@ -67,7 +72,8 @@ private[sbt] object ForkTests {
} catch {
case e: java.net.SocketException =>
log.error(
"Could not accept connection from test agent: " + e.getClass + ": " + e.getMessage)
"Could not accept connection from test agent: " + e.getClass + ": " + e.getMessage
)
log.trace(e)
server.close()
return
@ -78,15 +84,17 @@ private[sbt] object ForkTests {
val is = new ObjectInputStream(socket.getInputStream)
try {
val config = new ForkConfiguration(log.ansiCodesSupported, parallel)
val config = new ForkConfiguration(ConsoleAppender.formatEnabledInEnv, parallel)
os.writeObject(config)
val taskdefs = opts.tests.map(
t =>
new TaskDef(t.name,
forkFingerprint(t.fingerprint),
t.explicitlySpecified,
t.selectors))
val taskdefs = opts.tests.map { t =>
new TaskDef(
t.name,
forkFingerprint(t.fingerprint),
t.explicitlySpecified,
t.selectors
)
}
os.writeObject(taskdefs.toArray)
os.writeInt(runners.size)
@ -116,20 +124,27 @@ private[sbt] object ForkTests {
val acceptorThread = new Thread(Acceptor)
acceptorThread.start()
val fullCp = classpath ++: Seq(IO.classLocationFile[ForkMain],
IO.classLocationFile[Framework])
val options = Seq("-classpath",
fullCp mkString File.pathSeparator,
classOf[ForkMain].getCanonicalName,
server.getLocalPort.toString)
val fullCp = classpath ++: Seq(
IO.classLocationFile[ForkMain],
IO.classLocationFile[Framework]
)
val options = Seq(
"-classpath",
fullCp mkString File.pathSeparator,
classOf[ForkMain].getCanonicalName,
server.getLocalPort.toString
)
val ec = Fork.java(fork, options)
val result =
if (ec != 0)
TestOutput(TestResult.Error,
Map(
"Running java with options " + options
.mkString(" ") + " failed with exit code " + ec -> SuiteResult.Error),
Iterable.empty)
TestOutput(
TestResult.Error,
Map(
"Running java with options " + options
.mkString(" ") + " failed with exit code " + ec -> SuiteResult.Error
),
Iterable.empty
)
else {
// Need to wait acceptor thread to finish its business
acceptorThread.join()
@ -150,11 +165,13 @@ private[sbt] object ForkTests {
case _ => sys.error("Unknown fingerprint type: " + f.getClass)
}
}
private final class React(is: ObjectInputStream,
os: ObjectOutputStream,
log: Logger,
listeners: Seq[TestReportListener],
results: mutable.Map[String, SuiteResult]) {
private final class React(
is: ObjectInputStream,
os: ObjectOutputStream,
log: Logger,
listeners: Seq[TestReportListener],
results: mutable.Map[String, SuiteResult]
) {
import ForkTags._
@annotation.tailrec
def react(): Unit = is.readObject match {

View File

@ -7,7 +7,6 @@
package sbt
import scala.Predef.{ conforms => _, _ }
import java.io.File
import java.util.jar.{ Attributes, Manifest }
import scala.collection.JavaConverters._
@ -50,9 +49,11 @@ object Package {
}
}
final class Configuration(val sources: Seq[(File, String)],
val jar: File,
val options: Seq[PackageOption])
final class Configuration(
val sources: Seq[(File, String)],
val jar: File,
val options: Seq[PackageOption]
)
def apply(conf: Configuration, cacheStoreFactory: CacheStoreFactory, log: Logger): Unit = {
val manifest = new Manifest
val main = manifest.getMainAttributes
@ -66,9 +67,9 @@ object Package {
}
setVersion(main)
type Inputs = Map[File, String] :+: FilesInfo[ModifiedFileInfo] :+: Manifest :+: HNil
val cachedMakeJar = inputChanged(cacheStoreFactory make "inputs") {
(inChanged,
inputs: Map[File, String] :+: FilesInfo[ModifiedFileInfo] :+: Manifest :+: HNil) =>
(inChanged, inputs: Inputs) =>
import exists.format
val sources :+: _ :+: manifest :+: HNil = inputs
outputChanged(cacheStoreFactory make "output") { (outChanged, jar: PlainFileInfo) =>
@ -86,8 +87,10 @@ object Package {
}
def setVersion(main: Attributes): Unit = {
val version = Attributes.Name.MANIFEST_VERSION
if (main.getValue(version) eq null)
if (main.getValue(version) eq null) {
main.put(version, "1.0")
()
}
}
def addSpecManifestAttributes(name: String, version: String, orgName: String): PackageOption = {
import Attributes.Name._
@ -95,16 +98,26 @@ object Package {
val attribVals = Seq(name, version, orgName)
ManifestAttributes(attribKeys zip attribVals: _*)
}
def addImplManifestAttributes(name: String,
version: String,
homepage: Option[java.net.URL],
org: String,
orgName: String): PackageOption = {
def addImplManifestAttributes(
name: String,
version: String,
homepage: Option[java.net.URL],
org: String,
orgName: String
): PackageOption = {
import Attributes.Name._
val attribKeys = Seq(IMPLEMENTATION_TITLE,
IMPLEMENTATION_VERSION,
IMPLEMENTATION_VENDOR,
IMPLEMENTATION_VENDOR_ID)
// The ones in Attributes.Name are deprecated saying:
// "Extension mechanism will be removed in a future release. Use class path instead."
val IMPLEMENTATION_VENDOR_ID = new Attributes.Name("Implementation-Vendor-Id")
val IMPLEMENTATION_URL = new Attributes.Name("Implementation-URL")
val attribKeys = Seq(
IMPLEMENTATION_TITLE,
IMPLEMENTATION_VERSION,
IMPLEMENTATION_VENDOR,
IMPLEMENTATION_VENDOR_ID,
)
val attribVals = Seq(name, version, orgName, org)
ManifestAttributes((attribKeys zip attribVals) ++ {
homepage map (h => (IMPLEMENTATION_URL, h.toString))

View File

@ -7,10 +7,10 @@
package sbt
import scala.annotation.tailrec
import java.io.File
import sbt.internal.inc.{ RawCompiler, ScalaInstance }
import Predef.{ conforms => _, _ }
import sbt.io.syntax._
import sbt.io.IO
@ -30,7 +30,7 @@ object RawCompileLike {
type Gen = (Seq[File], Seq[File], File, Seq[String], Int, ManagedLogger) => Unit
private def optionFiles(options: Seq[String], fileInputOpts: Seq[String]): List[File] = {
@annotation.tailrec
@tailrec
def loop(opt: List[String], result: List[File]): List[File] = {
opt.dropWhile(!fileInputOpts.contains(_)) match {
case List(_, fileOpt, tail @ _*) => {
@ -46,16 +46,20 @@ object RawCompileLike {
def cached(cacheStoreFactory: CacheStoreFactory, doCompile: Gen): Gen =
cached(cacheStoreFactory, Seq(), doCompile)
def cached(cacheStoreFactory: CacheStoreFactory,
fileInputOpts: Seq[String],
doCompile: Gen): Gen =
def cached(
cacheStoreFactory: CacheStoreFactory,
fileInputOpts: Seq[String],
doCompile: Gen
): Gen =
(sources, classpath, outputDirectory, options, maxErrors, log) => {
type Inputs =
FilesInfo[HashFileInfo] :+: FilesInfo[ModifiedFileInfo] :+: Seq[File] :+: File :+: Seq[
String] :+: Int :+: HNil
FilesInfo[HashFileInfo] :+: FilesInfo[ModifiedFileInfo] :+: Seq[File] :+: File :+:
Seq[String] :+: Int :+: HNil
val inputs
: Inputs = hash(sources.toSet ++ optionFiles(options, fileInputOpts)) :+: lastModified(
classpath.toSet) :+: classpath :+: outputDirectory :+: options :+: maxErrors :+: HNil
classpath.toSet
) :+: classpath :+: outputDirectory :+: options :+: maxErrors :+: HNil
val cachedComp = inputChanged(cacheStoreFactory make "inputs") { (inChanged, in: Inputs) =>
inputChanged(cacheStoreFactory make "output") {
(outChanged, outputs: FilesInfo[PlainFileInfo]) =>
@ -67,6 +71,7 @@ object RawCompileLike {
}
cachedComp(inputs)(exists(outputDirectory.allPaths.get.toSet))
}
def prepare(description: String, doCompile: Gen): Gen =
(sources, classpath, outputDirectory, options, maxErrors, log) => {
if (sources.isEmpty)
@ -79,20 +84,24 @@ object RawCompileLike {
log.info(description.capitalize + " successful.")
}
}
def filterSources(f: File => Boolean, doCompile: Gen): Gen =
(sources, classpath, outputDirectory, options, maxErrors, log) =>
doCompile(sources filter f, classpath, outputDirectory, options, maxErrors, log)
def rawCompile(instance: ScalaInstance, cpOptions: ClasspathOptions): Gen =
(sources, classpath, outputDirectory, options, maxErrors, log) => {
(sources, classpath, outputDirectory, options, _, log) => {
val compiler = new RawCompiler(instance, cpOptions, log)
compiler(sources, classpath, outputDirectory, options)
}
def compile(label: String,
cacheStoreFactory: CacheStoreFactory,
instance: ScalaInstance,
cpOptions: ClasspathOptions): Gen =
def compile(
label: String,
cacheStoreFactory: CacheStoreFactory,
instance: ScalaInstance,
cpOptions: ClasspathOptions
): Gen =
cached(cacheStoreFactory, prepare(label + " sources", rawCompile(instance, cpOptions)))
val nop: Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => ()
val nop: Gen = (_, _, _, _, _, _) => ()
}

View File

@ -30,10 +30,18 @@ import sjsonnew.{ Builder, JsonFormat, Unbuilder, deserializationError }
* It is safe to use for its intended purpose: copying resources to a class output directory.
*/
object Sync {
def apply(store: CacheStore,
inStyle: FileInfo.Style = FileInfo.lastModified,
outStyle: FileInfo.Style = FileInfo.exists)
: Traversable[(File, File)] => Relation[File, File] =
@deprecated("Use sync, which doesn't take the unused outStyle param", "1.1.1")
def apply(
store: CacheStore,
inStyle: FileInfo.Style = FileInfo.lastModified,
outStyle: FileInfo.Style = FileInfo.exists,
): Traversable[(File, File)] => Relation[File, File] =
sync(store, inStyle)
def sync(
store: CacheStore,
inStyle: FileInfo.Style = FileInfo.lastModified,
): Traversable[(File, File)] => Relation[File, File] =
mappings => {
val relation = Relation.empty ++ mappings
noDuplicateTargets(relation)
@ -63,26 +71,24 @@ object Sync {
def copy(source: File, target: File): Unit =
if (source.isFile)
IO.copyFile(source, target, true)
else if (!target.exists) // we don't want to update the last modified time of an existing directory
{
IO.createDirectory(target)
IO.copyLastModified(source, target)
}
else if (!target.exists) { // we don't want to update the last modified time of an existing directory
IO.createDirectory(target)
IO.copyLastModified(source, target)
()
}
def noDuplicateTargets(relation: Relation[File, File]): Unit = {
val dups = relation.reverseMap.filter {
case (_, srcs) =>
srcs.size >= 2 && srcs.exists(!_.isDirectory)
} map {
case (target, srcs) =>
"\n\t" + target + "\nfrom\n\t" + srcs.mkString("\n\t\t")
}
val dups = relation.reverseMap
.filter { case (_, srcs) => srcs.size >= 2 && srcs.exists(!_.isDirectory) }
.map { case (target, srcs) => "\n\t" + target + "\nfrom\n\t" + srcs.mkString("\n\t\t") }
if (dups.nonEmpty)
sys.error("Duplicate mappings:" + dups.mkString)
}
implicit def relationFormat[A, B](implicit af: JsonFormat[Map[A, Set[B]]],
bf: JsonFormat[Map[B, Set[A]]]): JsonFormat[Relation[A, B]] =
implicit def relationFormat[A, B](
implicit af: JsonFormat[Map[A, Set[B]]],
bf: JsonFormat[Map[B, Set[A]]]
): JsonFormat[Relation[A, B]] =
new JsonFormat[Relation[A, B]] {
def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Relation[A, B] =
jsOpt match {
@ -105,15 +111,18 @@ object Sync {
}
def writeInfo[F <: FileInfo](store: CacheStore,
relation: Relation[File, File],
info: Map[File, F])(implicit infoFormat: JsonFormat[F]): Unit =
def writeInfo[F <: FileInfo](
store: CacheStore,
relation: Relation[File, File],
info: Map[File, F]
)(implicit infoFormat: JsonFormat[F]): Unit =
store.write((relation, info))
type RelationInfo[F] = (Relation[File, File], Map[File, F])
def readInfo[F <: FileInfo](store: CacheStore)(
implicit infoFormat: JsonFormat[F]): RelationInfo[F] =
def readInfo[F <: FileInfo](
store: CacheStore
)(implicit infoFormat: JsonFormat[F]): RelationInfo[F] =
try { readUncaught[F](store)(infoFormat) } catch {
case _: IOException => (Relation.empty[File, File], Map.empty[File, F])
case _: ZipException => (Relation.empty[File, File], Map.empty[File, F])
@ -124,7 +133,8 @@ object Sync {
}
}
private def readUncaught[F <: FileInfo](store: CacheStore)(
implicit infoFormat: JsonFormat[F]): RelationInfo[F] =
private def readUncaught[F <: FileInfo](
store: CacheStore
)(implicit infoFormat: JsonFormat[F]): RelationInfo[F] =
store.read(default = (Relation.empty[File, File], Map.empty[File, F]))
}

View File

@ -31,13 +31,17 @@ trait TestResultLogger {
def run(log: Logger, results: Output, taskName: String): Unit
/** Only allow invocation if certain criteria is met, else use another `TestResultLogger` (defaulting to nothing) . */
final def onlyIf(f: (Output, String) => Boolean,
otherwise: TestResultLogger = TestResultLogger.Null) =
final def onlyIf(
f: (Output, String) => Boolean,
otherwise: TestResultLogger = TestResultLogger.Null
) =
TestResultLogger.choose(f, this, otherwise)
/** Allow invocation unless a certain predicate passes, in which case use another `TestResultLogger` (defaulting to nothing) . */
final def unless(f: (Output, String) => Boolean,
otherwise: TestResultLogger = TestResultLogger.Null) =
final def unless(
f: (Output, String) => Boolean,
otherwise: TestResultLogger = TestResultLogger.Null
) =
TestResultLogger.choose(f, otherwise, this)
}
@ -69,8 +73,10 @@ object TestResultLogger {
* @param f The `TestResultLogger` to choose if the predicate fails.
*/
def choose(cond: (Output, String) => Boolean, t: TestResultLogger, f: TestResultLogger) =
TestResultLogger((log, results, taskName) =>
(if (cond(results, taskName)) t else f).run(log, results, taskName))
TestResultLogger(
(log, results, taskName) =>
(if (cond(results, taskName)) t else f).run(log, results, taskName)
)
/** Transforms the input to be completely silent when the subject module doesn't contain any tests. */
def silenceWhenNoTests(d: Defaults.Main) =
@ -127,32 +133,39 @@ object TestResultLogger {
results.summaries.size > 1 || results.summaries.headOption.forall(_.summaryText.isEmpty)
val printStandard = TestResultLogger((log, results, _) => {
val (skippedCount,
errorsCount,
passedCount,
failuresCount,
ignoredCount,
canceledCount,
pendingCount) =
val (
skippedCount,
errorsCount,
passedCount,
failuresCount,
ignoredCount,
canceledCount,
pendingCount,
) =
results.events.foldLeft((0, 0, 0, 0, 0, 0, 0)) {
case ((skippedAcc, errorAcc, passedAcc, failureAcc, ignoredAcc, canceledAcc, pendingAcc),
(name @ _, testEvent)) =>
(skippedAcc + testEvent.skippedCount,
errorAcc + testEvent.errorCount,
passedAcc + testEvent.passedCount,
failureAcc + testEvent.failureCount,
ignoredAcc + testEvent.ignoredCount,
canceledAcc + testEvent.canceledCount,
pendingAcc + testEvent.pendingCount)
case (acc, (_, testEvent)) =>
val (skippedAcc, errorAcc, passedAcc, failureAcc, ignoredAcc, canceledAcc, pendingAcc) =
acc
(
skippedAcc + testEvent.skippedCount,
errorAcc + testEvent.errorCount,
passedAcc + testEvent.passedCount,
failureAcc + testEvent.failureCount,
ignoredAcc + testEvent.ignoredCount,
canceledAcc + testEvent.canceledCount,
pendingAcc + testEvent.pendingCount,
)
}
val totalCount = failuresCount + errorsCount + skippedCount + passedCount
val base =
s"Total $totalCount, Failed $failuresCount, Errors $errorsCount, Passed $passedCount"
val otherCounts = Seq("Skipped" -> skippedCount,
"Ignored" -> ignoredCount,
"Canceled" -> canceledCount,
"Pending" -> pendingCount)
val otherCounts = Seq(
"Skipped" -> skippedCount,
"Ignored" -> ignoredCount,
"Canceled" -> canceledCount,
"Pending" -> pendingCount
)
val extra = otherCounts.filter(_._2 > 0).map { case (label, count) => s", $label $count" }
val postfix = base + extra.mkString
@ -181,6 +194,7 @@ object TestResultLogger {
})
val printNoTests = TestResultLogger(
(log, results, taskName) => log.info("No tests to run for " + taskName))
(log, results, taskName) => log.info("No tests to run for " + taskName)
)
}
}

View File

@ -34,6 +34,7 @@ import sbt.util.Logger
import sbt.protocol.testing.TestResult
sealed trait TestOption
object Tests {
/**
@ -43,9 +44,11 @@ object Tests {
* @param events The result of each test group (suite) executed during this test run.
* @param summaries Explicit summaries directly provided by test frameworks. This may be empty, in which case a default summary will be generated.
*/
final case class Output(overall: TestResult,
events: Map[String, SuiteResult],
summaries: Iterable[Summary])
final case class Output(
overall: TestResult,
events: Map[String, SuiteResult],
summaries: Iterable[Summary]
)
/**
* Summarizes a test run.
@ -137,9 +140,11 @@ object Tests {
val cleanup: Vector[ClassLoader => Unit],
val testListeners: Vector[TestReportListener]
)
private[sbt] def processOptions(config: Execution,
discovered: Vector[TestDefinition],
log: Logger): ProcessedOptions = {
private[sbt] def processOptions(
config: Execution,
discovered: Vector[TestDefinition],
log: Logger
): ProcessedOptions = {
import collection.mutable.{ HashSet, ListBuffer }
val testFilters = new ListBuffer[String => Boolean]
var orderedFilters = Seq[String => Boolean]()
@ -167,7 +172,8 @@ object Tests {
if (undefinedFrameworks.nonEmpty)
log.warn(
"Arguments defined for test frameworks that are not present:\n\t" + undefinedFrameworks
.mkString("\n\t"))
.mkString("\n\t")
)
def includeTest(test: TestDefinition) =
!excludeTestsSet.contains(test.name) && testFilters.forall(filter => filter(test.name))
@ -176,10 +182,12 @@ object Tests {
if (orderedFilters.isEmpty) filtered0
else orderedFilters.flatMap(f => filtered0.filter(d => f(d.name))).toList.distinct
val uniqueTests = distinctBy(tests)(_.name)
new ProcessedOptions(uniqueTests.toVector,
setup.toVector,
cleanup.toVector,
testListeners.toVector)
new ProcessedOptions(
uniqueTests.toVector,
setup.toVector,
cleanup.toVector,
testListeners.toVector
)
}
private[this] def distinctBy[T, K](in: Seq[T])(f: T => K): Seq[T] = {
@ -187,33 +195,39 @@ object Tests {
in.filter(t => seen.add(f(t)))
}
def apply(frameworks: Map[TestFramework, Framework],
testLoader: ClassLoader,
runners: Map[TestFramework, Runner],
discovered: Vector[TestDefinition],
config: Execution,
log: ManagedLogger): Task[Output] = {
def apply(
frameworks: Map[TestFramework, Framework],
testLoader: ClassLoader,
runners: Map[TestFramework, Runner],
discovered: Vector[TestDefinition],
config: Execution,
log: ManagedLogger
): Task[Output] = {
val o = processOptions(config, discovered, log)
testTask(testLoader,
frameworks,
runners,
o.tests,
o.setup,
o.cleanup,
log,
o.testListeners,
config)
testTask(
testLoader,
frameworks,
runners,
o.tests,
o.setup,
o.cleanup,
log,
o.testListeners,
config
)
}
def testTask(loader: ClassLoader,
frameworks: Map[TestFramework, Framework],
runners: Map[TestFramework, Runner],
tests: Vector[TestDefinition],
userSetup: Iterable[ClassLoader => Unit],
userCleanup: Iterable[ClassLoader => Unit],
log: ManagedLogger,
testListeners: Vector[TestReportListener],
config: Execution): Task[Output] = {
def testTask(
loader: ClassLoader,
frameworks: Map[TestFramework, Framework],
runners: Map[TestFramework, Runner],
tests: Vector[TestDefinition],
userSetup: Iterable[ClassLoader => Unit],
userCleanup: Iterable[ClassLoader => Unit],
log: ManagedLogger,
testListeners: Vector[TestReportListener],
config: Execution
): Task[Output] = {
def fj(actions: Iterable[() => Unit]): Task[Unit] = nop.dependsOn(actions.toSeq.fork(_()): _*)
def partApp(actions: Iterable[ClassLoader => Unit]) = actions.toSeq map { a => () =>
a(loader)
@ -227,7 +241,7 @@ object Tests {
if (config.parallel)
makeParallel(loader, runnables, setupTasks, config.tags) //.toSeq.join
else
makeSerial(loader, runnables, setupTasks, config.tags)
makeSerial(loader, runnables, setupTasks)
val taggedMainTasks = mainTasks.tagw(config.tags: _*)
taggedMainTasks map processResults flatMap { results =>
val cleanupTasks = fj(partApp(userCleanup) :+ frameworkCleanup(results.overall))
@ -238,31 +252,43 @@ object Tests {
}
type TestRunnable = (String, TestFunction)
private def createNestedRunnables(loader: ClassLoader,
testFun: TestFunction,
nestedTasks: Seq[TestTask]): Seq[(String, TestFunction)] =
private def createNestedRunnables(
loader: ClassLoader,
testFun: TestFunction,
nestedTasks: Seq[TestTask]
): Seq[(String, TestFunction)] =
nestedTasks.view.zipWithIndex map {
case (nt, idx) =>
val testFunDef = testFun.taskDef
(testFunDef.fullyQualifiedName,
TestFramework.createTestFunction(loader,
new TaskDef(testFunDef.fullyQualifiedName + "-" + idx,
testFunDef.fingerprint,
testFunDef.explicitlySpecified,
testFunDef.selectors),
testFun.runner,
nt))
(
testFunDef.fullyQualifiedName,
TestFramework.createTestFunction(
loader,
new TaskDef(
testFunDef.fullyQualifiedName + "-" + idx,
testFunDef.fingerprint,
testFunDef.explicitlySpecified,
testFunDef.selectors
),
testFun.runner,
nt
)
)
}
def makeParallel(loader: ClassLoader,
runnables: Iterable[TestRunnable],
setupTasks: Task[Unit],
tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] =
def makeParallel(
loader: ClassLoader,
runnables: Iterable[TestRunnable],
setupTasks: Task[Unit],
tags: Seq[(Tag, Int)]
): Task[Map[String, SuiteResult]] =
toTasks(loader, runnables.toSeq, tags).dependsOn(setupTasks)
def toTasks(loader: ClassLoader,
runnables: Seq[TestRunnable],
tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] = {
def toTasks(
loader: ClassLoader,
runnables: Seq[TestRunnable],
tags: Seq[(Tag, Int)]
): Task[Map[String, SuiteResult]] = {
val tasks = runnables.map { case (name, test) => toTask(loader, name, test, tags) }
tasks.join.map(_.foldLeft(Map.empty[String, SuiteResult]) {
case (sum, e) =>
@ -274,10 +300,12 @@ object Tests {
})
}
def toTask(loader: ClassLoader,
name: String,
fun: TestFunction,
tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] = {
def toTask(
loader: ClassLoader,
name: String,
fun: TestFunction,
tags: Seq[(Tag, Int)]
): Task[Map[String, SuiteResult]] = {
val base = task { (name, fun.apply()) }
val taggedBase = base.tagw(tags: _*).tag(fun.tags.map(ConcurrentRestrictions.Tag(_)): _*)
taggedBase flatMap {
@ -294,13 +322,25 @@ object Tests {
}
}
def makeSerial(loader: ClassLoader,
runnables: Seq[TestRunnable],
setupTasks: Task[Unit],
tags: Seq[(Tag, Int)]): Task[List[(String, SuiteResult)]] = {
@deprecated("Use the variant without tags", "1.1.1")
def makeSerial(
loader: ClassLoader,
runnables: Seq[TestRunnable],
setupTasks: Task[Unit],
tags: Seq[(Tag, Int)],
): Task[List[(String, SuiteResult)]] =
makeSerial(loader, runnables, setupTasks)
def makeSerial(
loader: ClassLoader,
runnables: Seq[TestRunnable],
setupTasks: Task[Unit],
): Task[List[(String, SuiteResult)]] = {
@tailrec
def processRunnable(runnableList: List[TestRunnable],
acc: List[(String, SuiteResult)]): List[(String, SuiteResult)] =
def processRunnable(
runnableList: List[TestRunnable],
acc: List[(String, SuiteResult)]
): List[(String, SuiteResult)] =
runnableList match {
case hd :: rst =>
val testFun = hd._2
@ -350,9 +390,11 @@ object Tests {
((TestResult.Passed: TestResult) /: results) { (acc, result) =>
if (severity(acc) < severity(result)) result else acc
}
def discover(frameworks: Seq[Framework],
analysis: CompileAnalysis,
log: Logger): (Seq[TestDefinition], Set[String]) =
def discover(
frameworks: Seq[Framework],
analysis: CompileAnalysis,
log: Logger
): (Seq[TestDefinition], Set[String]) =
discover(frameworks flatMap TestFramework.getFingerprints, allDefs(analysis), log)
def allDefs(analysis: CompileAnalysis) = analysis match {
@ -368,9 +410,11 @@ object Tests {
all
}.toSeq
}
def discover(fingerprints: Seq[Fingerprint],
definitions: Seq[Definition],
log: Logger): (Seq[TestDefinition], Set[String]) = {
def discover(
fingerprints: Seq[Fingerprint],
definitions: Seq[Definition],
log: Logger
): (Seq[TestDefinition], Set[String]) = {
val subclasses = fingerprints collect {
case sub: SubclassFingerprint => (sub.superclassName, sub.isModule, sub)
};
@ -381,9 +425,11 @@ object Tests {
log.debug("Annotation fingerprints: " + annotations)
def firsts[A, B, C](s: Seq[(A, B, C)]): Set[A] = s.map(_._1).toSet
def defined(in: Seq[(String, Boolean, Fingerprint)],
names: Set[String],
IsModule: Boolean): Seq[Fingerprint] =
def defined(
in: Seq[(String, Boolean, Fingerprint)],
names: Set[String],
IsModule: Boolean
): Seq[Fingerprint] =
in collect { case (name, IsModule, print) if names(name) => print }
def toFingerprints(d: Discovered): Seq[Fingerprint] =

View File

@ -34,10 +34,12 @@ final class EvalImports(val strings: Seq[(String, Int)], val srcName: String)
* the module from that class loader. `generated` contains the compiled classes and cache files related
* to the expression. The name of the auto-generated module wrapping the expression is `enclosingModule`.
*/
final class EvalResult(val tpe: String,
val getValue: ClassLoader => Any,
val generated: Seq[File],
val enclosingModule: String)
final class EvalResult(
val tpe: String,
val getValue: ClassLoader => Any,
val generated: Seq[File],
val enclosingModule: String
)
/**
* The result of evaluating a group of Scala definitions. The definitions are wrapped in an auto-generated,
@ -46,10 +48,12 @@ final class EvalResult(val tpe: String,
* from the classpath that the definitions were compiled against. The list of vals with the requested types is `valNames`.
* The values for these may be obtained by providing the parent class loader to `values` as is done with `loader`.
*/
final class EvalDefinitions(val loader: ClassLoader => ClassLoader,
val generated: Seq[File],
val enclosingModule: String,
val valNames: Seq[String]) {
final class EvalDefinitions(
val loader: ClassLoader => ClassLoader,
val generated: Seq[File],
val enclosingModule: String,
val valNames: Seq[String]
) {
def values(parent: ClassLoader): Seq[Any] = {
val module = getModule(enclosingModule, loader(parent))
for (n <- valNames) yield module.getClass.getMethod(n).invoke(module)
@ -58,10 +62,12 @@ final class EvalDefinitions(val loader: ClassLoader => ClassLoader,
final class EvalException(msg: String) extends RuntimeException(msg)
// not thread safe, since it reuses a Global instance
final class Eval(optionsNoncp: Seq[String],
classpath: Seq[File],
mkReporter: Settings => Reporter,
backing: Option[File]) {
final class Eval(
optionsNoncp: Seq[String],
classpath: Seq[File],
mkReporter: Settings => Reporter,
backing: Option[File]
) {
def this(mkReporter: Settings => Reporter, backing: Option[File]) =
this(Nil, IO.classLocationFile[Product] :: Nil, mkReporter, backing)
def this() = this(s => new ConsoleReporter(s), None)
@ -97,11 +103,13 @@ final class Eval(optionsNoncp: Seq[String],
private[this] var toUnlinkLater = List[Symbol]()
private[this] def unlink(sym: Symbol) = sym.owner.info.decls.unlink(sym)
def eval(expression: String,
imports: EvalImports = noImports,
tpeName: Option[String] = None,
srcName: String = "<setting>",
line: Int = DefaultStartLine): EvalResult = {
def eval(
expression: String,
imports: EvalImports = noImports,
tpeName: Option[String] = None,
srcName: String = "<setting>",
line: Int = DefaultStartLine
): EvalResult = {
val ev = new EvalType[String] {
def makeUnit = mkUnit(srcName, line, expression)
def unlink = true
@ -121,11 +129,13 @@ final class Eval(optionsNoncp: Seq[String],
val value = (cl: ClassLoader) => getValue[Any](i.enclosingModule, i.loader(cl))
new EvalResult(i.extra, value, i.generated, i.enclosingModule)
}
def evalDefinitions(definitions: Seq[(String, scala.Range)],
imports: EvalImports,
srcName: String,
file: Option[File],
valTypes: Seq[String]): EvalDefinitions = {
def evalDefinitions(
definitions: Seq[(String, scala.Range)],
imports: EvalImports,
srcName: String,
file: Option[File],
valTypes: Seq[String]
): EvalDefinitions = {
require(definitions.nonEmpty, "Definitions to evaluate cannot be empty.")
val ev = new EvalType[Seq[String]] {
lazy val (fullUnit, defUnits) = mkDefsUnit(srcName, definitions)
@ -152,10 +162,12 @@ final class Eval(optionsNoncp: Seq[String],
new EvalDefinitions(i.loader, i.generated, i.enclosingModule, i.extra)
}
private[this] def evalCommon[T](content: Seq[String],
imports: EvalImports,
tpeName: Option[String],
ev: EvalType[T]): EvalIntermediate[T] = {
private[this] def evalCommon[T](
content: Seq[String],
imports: EvalImports,
tpeName: Option[String],
ev: EvalType[T]
): EvalIntermediate[T] = {
import Eval._
// TODO - We also encode the source of the setting into the hash to avoid conflicts where the exact SAME setting
// is defined in multiple evaluated instances with a backing. This leads to issues with finding a previous
@ -212,12 +224,14 @@ final class Eval(optionsNoncp: Seq[String],
// location of the cached type or definition information
private[this] def cacheFile(base: File, moduleName: String): File =
new File(base, moduleName + ".cache")
private[this] def compileAndLoad[T](run: Run,
unit: CompilationUnit,
imports: EvalImports,
backing: Option[File],
moduleName: String,
ev: EvalType[T]): (T, ClassLoader => ClassLoader) = {
private[this] def compileAndLoad[T](
run: Run,
unit: CompilationUnit,
imports: EvalImports,
backing: Option[File],
moduleName: String,
ev: EvalType[T]
): (T, ClassLoader => ClassLoader) = {
global.curRun = run
run.currentUnit = unit
val dir = outputDirectory(backing)
@ -262,18 +276,22 @@ final class Eval(optionsNoncp: Seq[String],
parent => getValue[Any](moduleName, new URLClassLoader(Array(dir.toURI.toURL), parent))
//wrap tree in object objectName { def WrapValName = <tree> }
def augment(parser: global.syntaxAnalyzer.UnitParser,
imports: Seq[Tree],
tree: Tree,
tpt: Tree,
objectName: String): Tree = {
def augment(
parser: global.syntaxAnalyzer.UnitParser,
imports: Seq[Tree],
tree: Tree,
tpt: Tree,
objectName: String
): Tree = {
val method = DefDef(NoMods, newTermName(WrapValName), Nil, Nil, tpt, tree)
syntheticModule(parser, imports, method :: Nil, objectName)
}
private[this] def syntheticModule(parser: global.syntaxAnalyzer.UnitParser,
imports: Seq[Tree],
definitions: List[Tree],
objectName: String): Tree = {
private[this] def syntheticModule(
parser: global.syntaxAnalyzer.UnitParser,
imports: Seq[Tree],
definitions: List[Tree],
objectName: String
): Tree = {
val emptyTypeName = nme.EMPTY.toTypeName
def emptyPkg = parser.atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) }
def emptyInit = DefDef(
@ -282,8 +300,10 @@ final class Eval(optionsNoncp: Seq[String],
Nil,
List(Nil),
TypeTree(),
Block(List(Apply(Select(Super(This(emptyTypeName), emptyTypeName), nme.CONSTRUCTOR), Nil)),
Literal(Constant(())))
Block(
List(Apply(Select(Super(This(emptyTypeName), emptyTypeName), nme.CONSTRUCTOR), Nil)),
Literal(Constant(()))
)
)
def moduleBody = Template(List(gen.scalaAnyRefConstr), noSelfType, emptyInit :: definitions)
@ -321,10 +341,12 @@ final class Eval(optionsNoncp: Seq[String],
private[this] def isTopLevelModule(s: Symbol): Boolean =
s.hasFlag(reflect.internal.Flags.MODULE) && s.owner.isPackageClass
private[this] final class EvalIntermediate[T](val extra: T,
val loader: ClassLoader => ClassLoader,
val generated: Seq[File],
val enclosingModule: String)
private[this] final class EvalIntermediate[T](
val extra: T,
val loader: ClassLoader => ClassLoader,
val generated: Seq[File],
val enclosingModule: String
)
private[this] def classExists(dir: File, name: String) = (new File(dir, name + ".class")).exists
// TODO: use the code from Analyzer
@ -338,10 +360,12 @@ final class Eval(optionsNoncp: Seq[String],
(s contains moduleName)
}
private[this] class ParseErrorStrings(val base: String,
val extraBlank: String,
val missingBlank: String,
val extraSemi: String)
private[this] class ParseErrorStrings(
val base: String,
val extraBlank: String,
val missingBlank: String,
val extraSemi: String
)
private[this] def definitionErrorStrings = new ParseErrorStrings(
base = "Error parsing definition.",
extraBlank = " Ensure that there are no blank lines within a definition.",
@ -360,9 +384,11 @@ final class Eval(optionsNoncp: Seq[String],
* Parses the provided compilation `unit` according to `f` and then performs checks on the final parser state
* to catch errors that are common when the content is embedded in a blank-line-delimited format.
*/
private[this] def parse[T](unit: CompilationUnit,
errors: ParseErrorStrings,
f: syntaxAnalyzer.UnitParser => T): (syntaxAnalyzer.UnitParser, T) = {
private[this] def parse[T](
unit: CompilationUnit,
errors: ParseErrorStrings,
f: syntaxAnalyzer.UnitParser => T
): (syntaxAnalyzer.UnitParser, T) = {
val parser = new syntaxAnalyzer.UnitParser(unit)
val tree = f(parser)
@ -463,7 +489,8 @@ final class Eval(optionsNoncp: Seq[String],
*/
private[this] def mkDefsUnit(
srcName: String,
definitions: Seq[(String, scala.Range)]): (CompilationUnit, Seq[CompilationUnit]) = {
definitions: Seq[(String, scala.Range)]
): (CompilationUnit, Seq[CompilationUnit]) = {
def fragmentUnit(content: String, lineMap: Array[Int]) =
new CompilationUnit(fragmentSourceFile(srcName, content, lineMap))

View File

@ -37,19 +37,21 @@ class CacheIvyTest extends Properties("CacheIvy") {
content = converter.toJsonUnsafe(value)
}
private def testCache[T: JsonFormat, U](f: (SingletonCache[T], CacheStore) => U)(
implicit cache: SingletonCache[T]): U = {
private def testCache[T: JsonFormat, U](
f: (SingletonCache[T], CacheStore) => U
)(implicit cache: SingletonCache[T]): U = {
val store = new InMemoryStore(Converter)
f(cache, store)
}
private def cachePreservesEquality[T: JsonFormat](m: T,
eq: (T, T) => Prop,
str: T => String): Prop = testCache[T, Prop] {
(cache, store) =>
cache.write(store, m)
val out = cache.read(store)
eq(out, m) :| s"Expected: ${str(m)}" :| s"Got: ${str(out)}"
private def cachePreservesEquality[T: JsonFormat](
m: T,
eq: (T, T) => Prop,
str: T => String
): Prop = testCache[T, Prop] { (cache, store) =>
cache.write(store, m)
val out = cache.read(store)
eq(out, m) :| s"Expected: ${str(m)}" :| s"Got: ${str(out)}"
}
implicit val arbConfigRef: Arbitrary[ConfigRef] = Arbitrary(

View File

@ -38,7 +38,8 @@ class EvalTest extends Properties("eval") {
val line = math.abs(l)
val src = "mismatch"
throws(classOf[RuntimeException])(
eval.eval(i.toString, tpeName = Some(BooleanType), line = line, srcName = src)) &&
eval.eval(i.toString, tpeName = Some(BooleanType), line = line, srcName = src)
) &&
hasErrors(line + 1, src)
}
@ -78,14 +79,17 @@ val p = {
property("explicit import") = forAll(testImport("import math.abs" :: Nil))
property("wildcard import") = forAll(testImport("import math._" :: Nil))
property("comma-separated imports") = forAll(
testImport("import annotation._, math._, meta._" :: Nil))
testImport("import annotation._, math._, meta._" :: Nil)
)
property("multiple imports") = forAll(
testImport("import annotation._" :: "import math._" :: "import meta._" :: Nil))
testImport("import annotation._" :: "import math._" :: "import meta._" :: Nil)
)
private[this] def testImport(imports: Seq[String]): Int => Prop =
i =>
value(eval.eval("abs(" + i + ")", new EvalImports(imports.zipWithIndex, "imp"))) == math.abs(
i)
i
)
private[this] def local(i: Int) = "{ class ETest(val i: Int); new ETest(" + i + ") }"
val LocalType = "AnyRef{val i: Int}"

View File

@ -21,8 +21,10 @@ object BasicCommandStrings {
val TerminateAction: String = Exit
def helpBrief =
(HelpCommand,
s"Displays this help message or prints detailed help on requested commands (run '$HelpCommand <command>').")
(
HelpCommand,
s"Displays this help message or prints detailed help on requested commands (run '$HelpCommand <command>')."
)
def helpDetailed = s"""$HelpCommand
Prints a help summary.
@ -133,8 +135,10 @@ $HelpCommand <regular expression>
def Multi = ";"
def MultiBrief =
(Multi + " <command> (" + Multi + " <command>)*",
"Runs the provided semicolon-separated commands.")
(
Multi + " <command> (" + Multi + " <command>)*",
"Runs the provided semicolon-separated commands."
)
def MultiDetailed =
Multi + " command1 " + Multi + """ command2 ...
@ -185,20 +189,6 @@ $AliasCommand name=
def StashOnFailure = "sbtStashOnFailure"
def PopOnFailure = "sbtPopOnFailure"
// commands with poor choices for names since they clash with the usual conventions for command line options
// these are not documented and are mainly internal commands and can be removed without a full deprecation cycle
object Compat {
def OnFailure = "-"
def ClearOnFailure = "--"
def FailureWall = "---"
def OnFailureDeprecated = deprecatedAlias(OnFailure, BasicCommandStrings.OnFailure)
def ClearOnFailureDeprecated =
deprecatedAlias(ClearOnFailure, BasicCommandStrings.ClearOnFailure)
def FailureWallDeprecated = deprecatedAlias(FailureWall, BasicCommandStrings.FailureWall)
private[this] def deprecatedAlias(oldName: String, newName: String): String =
s"The `$oldName` command is deprecated in favor of `$newName` and will be removed in a later version"
}
def FailureWall = "resumeFromFailure"
def ClearOnFailure = "sbtClearOnFailure"

View File

@ -56,7 +56,7 @@ object BasicCommands {
client,
read,
alias
) ++ compatCommands
)
def nop: Command = Command.custom(s => success(() => s))
def ignore: Command = Command.command(FailureWall)(idFun)
@ -81,7 +81,8 @@ object BasicCommands {
val h = (Help.empty /: s.definedCommands)(
(a, b) =>
a ++ (try b.help(s)
catch { case NonFatal(_) => Help.empty }))
catch { case NonFatal(_) => Help.empty })
)
val helpCommands = h.detail.keySet
val spacedArg = singleArgument(helpCommands).?
applyEffect(spacedArg)(runHelp(s, h))
@ -95,10 +96,14 @@ object BasicCommands {
}
def completionsCommand: Command =
Command(CompletionsCommand, CompletionsBrief, CompletionsDetailed)(completionsParser)(
runCompletions(_)(_))
Command(CompletionsCommand, CompletionsBrief, CompletionsDetailed)(_ => completionsParser)(
runCompletions(_)(_)
)
def completionsParser(state: State): Parser[String] = {
@deprecated("No longer public", "1.1.1")
def completionsParser(state: State): Parser[String] = completionsParser
private[this] def completionsParser: Parser[String] = {
val notQuoted = (NotQuoted ~ any.*) map { case (nq, s) => nq ++ s }
val quotedOrUnquotedSingleArgument = Space ~> (StringVerbatim | StringEscapable | notQuoted)
token(quotedOrUnquotedSingleArgument ?? "" examples ("", " "))
@ -116,8 +121,9 @@ object BasicCommands {
def multiParser(s: State): Parser[List[String]] = {
val nonSemi = token(charClass(_ != ';').+, hide = const(true))
val semi = token(';' ~> OptSpace)
val part = semi flatMap (_ =>
matched((s.combinedParser & nonSemi) | nonSemi) <~ token(OptSpace))
val part = semi flatMap (
_ => matched((s.combinedParser & nonSemi) | nonSemi) <~ token(OptSpace)
)
(part map (_.trim)).+ map (_.toList)
}
@ -133,40 +139,26 @@ object BasicCommands {
matched(s.combinedParser | token(any, hide = const(true)))
def ifLast: Command =
Command(IfLast, Help.more(IfLast, IfLastDetailed))(otherCommandParser)((s, arg) =>
if (s.remainingCommands.isEmpty) arg :: s else s)
Command(IfLast, Help.more(IfLast, IfLastDetailed))(otherCommandParser)(
(s, arg) => if (s.remainingCommands.isEmpty) arg :: s else s
)
def append: Command =
Command(AppendCommand, Help.more(AppendCommand, AppendLastDetailed))(otherCommandParser)(
(s, arg) => s.copy(remainingCommands = s.remainingCommands :+ Exec(arg, s.source)))
(s, arg) => s.copy(remainingCommands = s.remainingCommands :+ Exec(arg, s.source))
)
def setOnFailure: Command =
Command(OnFailure, Help.more(OnFailure, OnFailureDetailed))(otherCommandParser)((s, arg) =>
s.copy(onFailure = Some(Exec(arg, s.source))))
private[sbt] def compatCommands = Seq(
Command.command(Compat.ClearOnFailure) { s =>
s.log.warn(Compat.ClearOnFailureDeprecated)
s.copy(onFailure = None)
},
Command.arb(
s =>
token(Compat.OnFailure, hide = const(true))
.flatMap(_ => otherCommandParser(s))) { (s, arg) =>
s.log.warn(Compat.OnFailureDeprecated)
s.copy(onFailure = Some(Exec(arg, s.source)))
},
Command.command(Compat.FailureWall) { s =>
s.log.warn(Compat.FailureWallDeprecated)
s
}
)
Command(OnFailure, Help.more(OnFailure, OnFailureDetailed))(otherCommandParser)(
(s, arg) => s.copy(onFailure = Some(Exec(arg, s.source)))
)
def clearOnFailure: Command = Command.command(ClearOnFailure)(s => s.copy(onFailure = None))
def stashOnFailure: Command =
Command.command(StashOnFailure)(s =>
s.copy(onFailure = None).update(OnFailureStack)(s.onFailure :: _.toList.flatten))
Command.command(StashOnFailure)(
s => s.copy(onFailure = None).update(OnFailureStack)(s.onFailure :: _.toList.flatten)
)
def popOnFailure: Command = Command.command(PopOnFailure) { s =>
val stack = s.get(OnFailureStack).getOrElse(Nil)
@ -176,19 +168,19 @@ object BasicCommands {
}
def reboot: Command =
Command(RebootCommand, Help.more(RebootCommand, RebootDetailed))(rebootOptionParser) {
Command(RebootCommand, Help.more(RebootCommand, RebootDetailed))(_ => rebootOptionParser) {
case (s, (full, currentOnly)) =>
s.reboot(full, currentOnly)
}
@deprecated("Use rebootOptionParser", "1.1.0")
def rebootParser(s: State): Parser[Boolean] =
rebootOptionParser(s) map { case (full, currentOnly) => full }
def rebootParser(s: State): Parser[Boolean] = rebootOptionParser map { case (full, _) => full }
private[sbt] def rebootOptionParser(s: State): Parser[(Boolean, Boolean)] =
token(
Space ~> (("full" ^^^ ((true, false))) |
("dev" ^^^ ((false, true))))) ?? ((false, false))
private[sbt] def rebootOptionParser: Parser[(Boolean, Boolean)] = {
val fullOption = "full" ^^^ ((true, false))
val devOption = "dev" ^^^ ((false, true))
token(Space ~> (fullOption | devOption)) ?? ((false, false))
}
def call: Command =
Command(ApplyCommand, Help.more(ApplyCommand, ApplyDetailed))(_ => callParser) {
@ -211,8 +203,9 @@ object BasicCommands {
private[this] def className: Parser[String] = {
val base = StringBasic & not('-' ~> any.*, "Class name cannot start with '-'.")
def single(s: String) = Completions.single(Completion.displayOnly(s))
val compl = TokenCompletions.fixed((seen, _) =>
if (seen.startsWith("-")) Completions.nil else single("<class name>"))
val compl = TokenCompletions.fixed(
(seen, _) => if (seen.startsWith("-")) Completions.nil else single("<class name>")
)
token(base, compl)
}
@ -237,10 +230,9 @@ object BasicCommands {
def historyParser(s: State): Parser[() => State] =
Command.applyEffect(HistoryCommands.actionParser) { histFun =>
val logError = (msg: String) => s.log.error(msg)
val hp = s get historyPath getOrElse None
val hp = (s get historyPath).flatten
val lines = hp.toList.flatMap(p => IO.readLines(p)).toIndexedSeq
histFun(CHistory(lines, hp, logError)) match {
histFun(CHistory(lines, hp)) match {
case Some(commands) =>
commands foreach println //printing is more appropriate than logging
(commands ::: s).continue
@ -401,7 +393,8 @@ object BasicCommands {
}
def delegateToAlias(name: String, orElse: Parser[() => State])(
state: State): Parser[() => State] =
state: State
): Parser[() => State] =
aliases(state, (nme, _) => nme == name).headOption match {
case None => orElse
case Some((n, v)) => aliasBody(n, v)(state)

View File

@ -10,6 +10,7 @@ package sbt
import java.io.File
import sbt.internal.util.AttributeKey
import sbt.internal.inc.classpath.ClassLoaderCache
import sbt.internal.server.ServerHandler
import sbt.librarymanagement.ModuleID
import sbt.util.Level
@ -17,11 +18,13 @@ object BasicKeys {
val historyPath = AttributeKey[Option[File]](
"history",
"The location where command line history is persisted.",
40)
40
)
val shellPrompt = AttributeKey[State => String](
"shell-prompt",
"The function that constructs the command prompt from the current build state.",
10000)
10000
)
val watch = AttributeKey[Watched]("watch", "Continuous execution configuration.", 1000)
val serverPort =
AttributeKey[Int]("server-port", "The port number used by server command.", 10000)
@ -30,20 +33,32 @@ object BasicKeys {
AttributeKey[String]("serverHost", "The host used by server command.", 10000)
val serverAuthentication =
AttributeKey[Set[ServerAuthentication]]("serverAuthentication",
"Method of authenticating server command.",
10000)
AttributeKey[Set[ServerAuthentication]](
"serverAuthentication",
"Method of authenticating server command.",
10000
)
val serverConnectionType =
AttributeKey[ConnectionType]("serverConnectionType",
"The wire protocol for the server command.",
10000)
AttributeKey[ConnectionType](
"serverConnectionType",
"The wire protocol for the server command.",
10000
)
val fullServerHandlers =
AttributeKey[Seq[ServerHandler]](
"fullServerHandlers",
"Combines default server handlers and user-defined handlers.",
10000
)
val autoStartServer =
AttributeKey[Boolean](
"autoStartServer",
"If true, the sbt server will startup automatically during interactive sessions.",
10000)
10000
)
// Unlike other BasicKeys, this is not used directly as a setting key,
// and severLog / logLevel is used instead.
@ -56,23 +71,28 @@ object BasicKeys {
private[sbt] val interactive = AttributeKey[Boolean](
"interactive",
"True if commands are currently being entered from an interactive environment.",
10)
10
)
private[sbt] val classLoaderCache = AttributeKey[ClassLoaderCache](
"class-loader-cache",
"Caches class loaders based on the classpath entries and last modified times.",
10)
10
)
private[sbt] val OnFailureStack = AttributeKey[List[Option[Exec]]](
"on-failure-stack",
"Stack that remembers on-failure handlers.",
10)
10
)
private[sbt] val explicitGlobalLogLevels = AttributeKey[Boolean](
"explicit-global-log-levels",
"True if the global logging levels were explicitly set by the user.",
10)
10
)
private[sbt] val templateResolverInfos = AttributeKey[Seq[TemplateResolverInfo]](
"templateResolverInfos",
"List of template resolver infos.",
1000)
1000
)
}
case class TemplateResolverInfo(module: ModuleID, implementationClass: String)

View File

@ -67,18 +67,21 @@ object Command {
new SimpleCommand(name, help, parser, AttributeMap.empty)
def make(name: String, briefHelp: (String, String), detail: String)(
parser: State => Parser[() => State]): Command =
parser: State => Parser[() => State]
): Command =
make(name, Help(name, briefHelp, detail))(parser)
// General command construction
/** Construct a command with the given name, parser and effect. */
def apply[T](name: String, help: Help = Help.empty)(parser: State => Parser[T])(
effect: (State, T) => State): Command =
def apply[T](name: String, help: Help = Help.empty)(
parser: State => Parser[T]
)(effect: (State, T) => State): Command =
make(name, help)(applyEffect(parser)(effect))
def apply[T](name: String, briefHelp: (String, String), detail: String)(
parser: State => Parser[T])(effect: (State, T) => State): Command =
parser: State => Parser[T]
)(effect: (State, T) => State): Command =
apply(name, Help(name, briefHelp, detail))(parser)(effect)
// No-argument command construction
@ -97,18 +100,21 @@ object Command {
make(name, help)(state => token(trimmed(spacedAny(name)) map apply1(f, state)))
def single(name: String, briefHelp: (String, String), detail: String)(
f: (State, String) => State): Command =
f: (State, String) => State
): Command =
single(name, Help(name, briefHelp, detail))(f)
// Multi-argument command construction
/** Construct a multi-argument command with the given name, tab completion display and effect. */
def args(name: String, display: String, help: Help = Help.empty)(
f: (State, Seq[String]) => State): Command =
f: (State, Seq[String]) => State
): Command =
make(name, help)(state => spaceDelimited(display) map apply1(f, state))
def args(name: String, briefHelp: (String, String), detail: String, display: String)(
f: (State, Seq[String]) => State): Command =
f: (State, Seq[String]) => State
): Command =
args(name, display, Help(name, briefHelp, detail))(f)
// create ArbitraryCommand
@ -120,7 +126,8 @@ object Command {
customHelp(parser, const(help))
def arb[T](parser: State => Parser[T], help: Help = Help.empty)(
effect: (State, T) => State): Command =
effect: (State, T) => State
): Command =
custom(applyEffect(parser)(effect), help)
// misc Command object utilities
@ -129,8 +136,9 @@ object Command {
def applyEffect[T](p: Parser[T])(f: T => State): Parser[() => State] = p map (t => () => f(t))
def applyEffect[T](parser: State => Parser[T])(
effect: (State, T) => State): State => Parser[() => State] =
def applyEffect[T](
parser: State => Parser[T]
)(effect: (State, T) => State): State => Parser[() => State] =
s => applyEffect(parser(s))(t => effect(s, t))
def combine(cmds: Seq[Command]): State => Parser[() => State] = {
@ -140,7 +148,8 @@ object Command {
}
private[this] def separateCommands(
cmds: Seq[Command]): (Seq[SimpleCommand], Seq[ArbitraryCommand]) =
cmds: Seq[Command]
): (Seq[SimpleCommand], Seq[ArbitraryCommand]) =
Util.separate(cmds) { case s: SimpleCommand => Left(s); case a: ArbitraryCommand => Right(a) }
private[this] def apply1[A, B, C](f: (A, B) => C, a: A): B => () => C = b => () => f(a, b)
@ -155,13 +164,26 @@ object Command {
}
def simpleParser(
commandMap: Map[String, State => Parser[() => State]]): State => Parser[() => State] =
commandMap: Map[String, State => Parser[() => State]]
): State => Parser[() => State] =
state =>
token(OpOrID examples commandMap.keys.toSet) flatMap (id =>
(commandMap get id) match {
case None => failure(invalidValue("command", commandMap.keys)(id))
case Some(c) => c(state)
})
token(OpOrID examples commandMap.keys.toSet) flatMap (
id =>
(commandMap get id) match {
case None => failure(invalidValue("command", commandMap.keys)(id))
case Some(c) => c(state)
}
)
def process(command: String, state: State): State = {
val parser = combine(state.definedCommands)
parse(command, parser(state)) match {
case Right(s) => s() // apply command. command side effects happen here
case Left(errMsg) =>
state.log error errMsg
state.fail
}
}
def invalidValue(label: String, allowed: Iterable[String])(value: String): String =
s"Not a valid $label: $value" + similar(value, allowed)
@ -171,22 +193,25 @@ object Command {
if (suggested.isEmpty) "" else suggested.mkString(" (similar: ", ", ", ")")
}
def suggestions(a: String,
bs: Seq[String],
maxDistance: Int = 3,
maxSuggestions: Int = 3): Seq[String] =
def suggestions(
a: String,
bs: Seq[String],
maxDistance: Int = 3,
maxSuggestions: Int = 3
): Seq[String] =
bs map (b => (b, distance(a, b))) filter (_._2 <= maxDistance) sortBy (_._2) take (maxSuggestions) map (_._1)
def distance(a: String, b: String): Int =
EditDistance.levenshtein(a,
b,
insertCost = 1,
deleteCost = 1,
subCost = 2,
transposeCost = 1,
matchCost = -1,
caseCost = 1,
transpositions = true)
EditDistance.levenshtein(
a,
b,
insertCost = 1,
deleteCost = 1,
subCost = 2,
matchCost = -1,
caseCost = 1,
transpositions = true
)
def spacedAny(name: String): Parser[String] = spacedC(name, any)
@ -222,9 +247,11 @@ object Help {
def apply(briefHelp: Seq[(String, String)], detailedHelp: Map[String, String]): Help =
apply(briefHelp, detailedHelp, Set.empty[String])
def apply(briefHelp: Seq[(String, String)],
detailedHelp: Map[String, String],
more: Set[String]): Help =
def apply(
briefHelp: Seq[(String, String)],
detailedHelp: Map[String, String],
more: Set[String]
): Help =
new Help0(briefHelp, detailedHelp, more)
def more(name: String, detailedHelp: String): Help =

View File

@ -12,21 +12,23 @@ import java.io.File
final case class Exit(code: Int) extends xsbti.Exit {
require(code >= 0)
}
final case class Reboot(scalaVersion: String,
argsList: Seq[String],
app: xsbti.ApplicationID,
baseDirectory: File)
extends xsbti.Reboot {
final case class Reboot(
scalaVersion: String,
argsList: Seq[String],
app: xsbti.ApplicationID,
baseDirectory: File
) extends xsbti.Reboot {
def arguments = argsList.toArray
}
final case class ApplicationID(groupID: String,
name: String,
version: String,
mainClass: String,
components: Seq[String],
crossVersionedValue: xsbti.CrossValue,
extra: Seq[File])
extends xsbti.ApplicationID {
final case class ApplicationID(
groupID: String,
name: String,
version: String,
mainClass: String,
components: Seq[String],
crossVersionedValue: xsbti.CrossValue,
extra: Seq[File]
) extends xsbti.ApplicationID {
def mainComponents = components.toArray
def classpathExtra = extra.toArray
def crossVersioned = crossVersionedValue != xsbti.CrossValue.Disabled
@ -35,11 +37,13 @@ object ApplicationID {
def apply(delegate: xsbti.ApplicationID, newVersion: String): ApplicationID =
apply(delegate).copy(version = newVersion)
def apply(delegate: xsbti.ApplicationID): ApplicationID =
ApplicationID(delegate.groupID,
delegate.name,
delegate.version,
delegate.mainClass,
delegate.mainComponents,
delegate.crossVersionedValue,
delegate.classpathExtra)
ApplicationID(
delegate.groupID,
delegate.name,
delegate.version,
delegate.mainClass,
delegate.mainComponents,
delegate.crossVersionedValue,
delegate.classpathExtra
)
}

View File

@ -238,14 +238,16 @@ object State {
def process(f: (Exec, State) => State): State = {
def runCmd(cmd: Exec, remainingCommands: List[Exec]) = {
log.debug(s"> $cmd")
f(cmd,
s.copy(remainingCommands = remainingCommands,
currentCommand = Some(cmd),
history = cmd :: s.history))
val s1 = s.copy(
remainingCommands = remainingCommands,
currentCommand = Some(cmd),
history = cmd :: s.history,
)
f(cmd, s1)
}
s.remainingCommands match {
case List() => exit(true)
case List(x, xs @ _*) => runCmd(x, xs.toList)
case Nil => exit(true)
case x :: xs => runCmd(x, xs)
}
}
def :::(newCommands: List[String]): State = ++:(newCommands map { Exec(_, s.source) })
@ -283,10 +285,7 @@ object State {
def log = s.globalLogging.full
def handleError(t: Throwable): State = handleException(t, s, log)
def fail = {
import BasicCommandStrings.Compat.{ FailureWall => CompatFailureWall }
val remaining =
s.remainingCommands.dropWhile(c =>
c.commandLine != FailureWall && c.commandLine != CompatFailureWall)
val remaining = s.remainingCommands.dropWhile(c => c.commandLine != FailureWall)
if (remaining.isEmpty)
applyOnFailure(s, Nil, exit(ok = false))
else
@ -321,7 +320,7 @@ object State {
import ExceptionCategory._
private[sbt] def handleException(t: Throwable, s: State, log: Logger): State = {
private[this] def handleException(t: Throwable, s: State, log: Logger): State = {
ExceptionCategory(t) match {
case AlreadyHandled => ()
case m: MessageOnly => log.error(m.message)

View File

@ -23,8 +23,8 @@ import scala.util.Properties
trait Watched {
/** The files watched when an action is run with a preceeding ~ */
def watchSources(s: State): Seq[Watched.WatchSource] = Nil
/** The files watched when an action is run with a proceeding ~ */
def watchSources(@deprecated("unused", "") s: State): Seq[Watched.WatchSource] = Nil
def terminateWatch(key: Int): Boolean = Watched.isEnter(key)
/**
@ -50,8 +50,13 @@ trait Watched {
}
object Watched {
val defaultWatchingMessage
: WatchState => String = _.count + ". Waiting for source changes... (press enter to interrupt)"
val defaultWatchingMessage: WatchState => String = ws =>
s"${ws.count}. Waiting for source changes... (press enter to interrupt)"
def projectWatchingMessage(projectId: String): WatchState => String =
ws =>
s"${ws.count}. Waiting for source changes in project $projectId... (press enter to interrupt)"
val defaultTriggeredMessage: WatchState => String = const("")
val clearWhenTriggered: WatchState => String = const(clearScreen)
def clearScreen: String = "\u001b[2J\u001b[0;0H"
@ -76,8 +81,8 @@ object Watched {
* @param base The base directory from which to include files.
* @return An instance of `Source`.
*/
def apply(base: File): Source =
apply(base, AllPassFilter, NothingFilter)
def apply(base: File): Source = apply(base, AllPassFilter, NothingFilter)
}
private[this] class AWatched extends Watched
@ -111,11 +116,13 @@ object Watched {
(ClearOnFailure :: next :: FailureWall :: repeat :: s)
.put(
ContinuousEventMonitor,
EventMonitor(WatchState.empty(watched.watchService(), watched.watchSources(s)),
watched.pollInterval,
watched.antiEntropy,
shouldTerminate,
logger)
EventMonitor(
WatchState.empty(watched.watchService(), watched.watchSources(s)),
watched.pollInterval,
watched.antiEntropy,
shouldTerminate,
logger
)
)
case Some(eventMonitor) =>
printIfDefined(watched watchingMessage eventMonitor.state)
@ -123,8 +130,9 @@ object Watched {
catch {
case e: Exception =>
log.error(
"Error occurred obtaining files to watch. Terminating continuous execution...")
State.handleException(e, s, log)
"Error occurred obtaining files to watch. Terminating continuous execution..."
)
s.handleError(e)
false
}
if (triggered) {
@ -139,16 +147,20 @@ object Watched {
}
val ContinuousEventMonitor =
AttributeKey[EventMonitor]("watch event monitor",
"Internal: maintains watch state and monitor threads.")
AttributeKey[EventMonitor](
"watch event monitor",
"Internal: maintains watch state and monitor threads."
)
@deprecated("Superseded by ContinuousEventMonitor", "1.1.5")
val ContinuousState =
AttributeKey[WatchState]("watch state", "Internal: tracks state for continuous execution.")
@deprecated("Superseded by ContinuousEventMonitor", "1.1.5")
val ContinuousWatchService =
AttributeKey[WatchService]("watch service",
"Internal: tracks watch service for continuous execution.")
AttributeKey[WatchService](
"watch service",
"Internal: tracks watch service for continuous execution."
)
val Configuration =
AttributeKey[Watched]("watched-configuration", "Configures continuous execution.")

View File

@ -19,12 +19,11 @@ import sjsonnew.JsonFormat
*/
abstract class CommandChannel {
private val commandQueue: ConcurrentLinkedQueue[Exec] = new ConcurrentLinkedQueue()
def append(exec: Exec): Boolean =
commandQueue.add(exec)
def append(exec: Exec): Boolean = commandQueue.add(exec)
def poll: Option[Exec] = Option(commandQueue.poll)
def publishEvent[A: JsonFormat](event: A, execId: Option[String]): Unit
def publishEvent[A: JsonFormat](event: A): Unit
final def publishEvent[A: JsonFormat](event: A): Unit = publishEvent(event, None)
def publishEventMessage(event: EventMessage): Unit
def publishBytes(bytes: Array[Byte]): Unit
def shutdown(): Unit

View File

@ -40,8 +40,6 @@ private[sbt] final class ConsoleChannel(val name: String) extends CommandChannel
def publishEvent[A: JsonFormat](event: A, execId: Option[String]): Unit = ()
def publishEvent[A: JsonFormat](event: A): Unit = ()
def publishEventMessage(event: EventMessage): Unit =
event match {
case e: ConsolePromptEvent =>
@ -50,7 +48,7 @@ private[sbt] final class ConsoleChannel(val name: String) extends CommandChannel
case _ =>
val x = makeAskUserThread(e.state)
askUserThread = Some(x)
x.start
x.start()
}
case e: ConsoleUnpromptEvent =>
e.lastSource match {
@ -70,7 +68,7 @@ private[sbt] final class ConsoleChannel(val name: String) extends CommandChannel
def shutdown(): Unit =
askUserThread match {
case Some(x) if x.isAlive =>
x.interrupt
x.interrupt()
askUserThread = None
case _ => ()
}

View File

@ -126,6 +126,7 @@ object NetworkClient {
def run(arguments: List[String]): Unit =
try {
new NetworkClient(arguments)
()
} catch {
case NonFatal(e) => println(e.getMessage)
}

View File

@ -40,9 +40,11 @@ private[sbt] object Server {
with TokenFileFormats
object JsonProtocol extends JsonProtocol
def start(connection: ServerConnection,
onIncomingSocket: (Socket, ServerInstance) => Unit,
log: Logger): ServerInstance =
def start(
connection: ServerConnection,
onIncomingSocket: (Socket, ServerInstance) => Unit,
log: Logger
): ServerInstance =
new ServerInstance { self =>
import connection._
val running = new AtomicBoolean(false)
@ -67,7 +69,8 @@ private[sbt] object Server {
"socket file absolute path too long; " +
"either switch to another connection type " +
"or define a short \"SBT_GLOBAL_SERVER_DIR\" value. " +
s"Current path: ${path}")
s"Current path: ${path}"
)
tryClient(new UnixDomainSocket(path))
prepareSocketfile()
addServerError(new UnixDomainServerSocket(path))
@ -103,7 +106,7 @@ private[sbt] object Server {
def tryClient(f: => Socket): Unit = {
if (portfile.exists) {
Try { f } match {
case Failure(e) => ()
case Failure(_) => ()
case Success(socket) =>
socket.close()
throw new AlreadyRunningException()

View File

@ -0,0 +1,73 @@
/*
* sbt
* Copyright 2011 - 2017, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under BSD-3-Clause license (see LICENSE)
*/
package sbt
package internal
package server
import sjsonnew.JsonFormat
import sbt.internal.protocol._
import sbt.util.Logger
import sbt.protocol.{ SettingQuery => Q }
/**
* ServerHandler allows plugins to extend sbt server.
* It's a wrapper around curried function ServerCallback => JsonRpcRequestMessage => Unit.
*/
final class ServerHandler(val handler: ServerCallback => ServerIntent) {
override def toString: String = s"Serverhandler(...)"
}
object ServerHandler {
def apply(handler: ServerCallback => ServerIntent): ServerHandler =
new ServerHandler(handler)
lazy val fallback: ServerHandler = ServerHandler({ handler =>
ServerIntent(
{ case x => handler.log.debug(s"Unhandled notification received: ${x.method}: $x") },
{ case x => handler.log.debug(s"Unhandled request received: ${x.method}: $x") }
)
})
}
final class ServerIntent(
val onRequest: PartialFunction[JsonRpcRequestMessage, Unit],
val onNotification: PartialFunction[JsonRpcNotificationMessage, Unit]
) {
override def toString: String = s"ServerIntent(...)"
}
object ServerIntent {
def apply(
onRequest: PartialFunction[JsonRpcRequestMessage, Unit],
onNotification: PartialFunction[JsonRpcNotificationMessage, Unit]
): ServerIntent =
new ServerIntent(onRequest, onNotification)
def request(onRequest: PartialFunction[JsonRpcRequestMessage, Unit]): ServerIntent =
new ServerIntent(onRequest, PartialFunction.empty)
def notify(onNotification: PartialFunction[JsonRpcNotificationMessage, Unit]): ServerIntent =
new ServerIntent(PartialFunction.empty, onNotification)
}
/**
* Interface to invoke JSON-RPC response.
*/
trait ServerCallback {
def jsonRpcRespond[A: JsonFormat](event: A, execId: Option[String]): Unit
def jsonRpcRespondError(execId: Option[String], code: Long, message: String): Unit
def jsonRpcNotify[A: JsonFormat](method: String, params: A): Unit
def appendExec(exec: Exec): Boolean
def log: Logger
def name: String
private[sbt] def authOptions: Set[ServerAuthentication]
private[sbt] def authenticate(token: String): Boolean
private[sbt] def setInitialized(value: Boolean): Unit
private[sbt] def onSettingQuery(execId: Option[String], req: Q): Unit
}

View File

@ -10,47 +10,57 @@ package xsbt
import java.io.{ BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter }
import java.net.{ InetAddress, ServerSocket, Socket }
import scala.annotation.tailrec
import scala.util.control.NonFatal
object IPC {
private val portMin = 1025
private val portMax = 65536
private val loopback = InetAddress.getByName(null) // loopback
private val loopback = InetAddress.getByName(null)
def client[T](port: Int)(f: IPC => T): T =
ipc(new Socket(loopback, port))(f)
def client[T](port: Int)(f: IPC => T): T = ipc(new Socket(loopback, port))(f)
def pullServer[T](f: Server => T): T = {
val server = makeServer
try { f(new Server(server)) } finally { server.close() }
try f(new Server(server))
finally server.close()
}
def unmanagedServer: Server = new Server(makeServer)
def makeServer: ServerSocket = {
val random = new java.util.Random
def nextPort = random.nextInt(portMax - portMin + 1) + portMin
def createServer(attempts: Int): ServerSocket =
if (attempts > 0)
try { new ServerSocket(nextPort, 1, loopback) } catch {
case NonFatal(_) => createServer(attempts - 1)
} else
sys.error("Could not connect to socket: maximum attempts exceeded")
if (attempts > 0) {
try new ServerSocket(nextPort, 1, loopback)
catch { case NonFatal(_) => createServer(attempts - 1) }
} else sys.error("Could not connect to socket: maximum attempts exceeded")
createServer(10)
}
def server[T](f: IPC => Option[T]): T = serverImpl(makeServer, f)
def server[T](port: Int)(f: IPC => Option[T]): T =
serverImpl(new ServerSocket(port, 1, loopback), f)
private def serverImpl[T](server: ServerSocket, f: IPC => Option[T]): T = {
def listen(): T = {
@tailrec def listen(): T = {
ipc(server.accept())(f) match {
case Some(done) => done
case None => listen()
}
}
try { listen() } finally { server.close() }
try listen()
finally server.close()
}
private def ipc[T](s: Socket)(f: IPC => T): T =
try { f(new IPC(s)) } finally { s.close() }
try f(new IPC(s))
finally s.close()
final class Server private[IPC] (s: ServerSocket) {
def port = s.getLocalPort
@ -59,6 +69,7 @@ object IPC {
def connection[T](f: IPC => T): T = IPC.ipc(s.accept())(f)
}
}
final class IPC private (s: Socket) {
def port = s.getLocalPort
private val in = new BufferedReader(new InputStreamReader(s.getInputStream))

View File

@ -18,12 +18,14 @@ import sbt.io.{ AllPassFilter, NothingFilter }
object Append {
@implicitNotFound(
msg = "No implicit for Append.Value[${A}, ${B}] found,\n so ${B} cannot be appended to ${A}")
msg = "No implicit for Append.Value[${A}, ${B}] found,\n so ${B} cannot be appended to ${A}"
)
trait Value[A, B] {
def appendValue(a: A, b: B): A
}
@implicitNotFound(
msg = "No implicit for Append.Values[${A}, ${B}] found,\n so ${B} cannot be appended to ${A}")
msg = "No implicit for Append.Values[${A}, ${B}] found,\n so ${B} cannot be appended to ${A}"
)
trait Values[A, -B] {
def appendValues(a: A, b: B): A
}

View File

@ -27,42 +27,60 @@ object Def extends Init[Scope] with TaskMacroExtra {
val resolvedScoped = SettingKey[ScopedKey[_]](
"resolved-scoped",
"The ScopedKey for the referencing setting or task.",
KeyRanks.DSetting)
KeyRanks.DSetting
)
private[sbt] val taskDefinitionKey = AttributeKey[ScopedKey[_]](
"task-definition-key",
"Internal: used to map a task back to its ScopedKey.",
Invisible)
Invisible
)
lazy val showFullKey: Show[ScopedKey[_]] = showFullKey(None)
def showFullKey(keyNameColor: Option[String]): Show[ScopedKey[_]] =
Show[ScopedKey[_]]((key: ScopedKey[_]) => displayFull(key, keyNameColor))
@deprecated("Use showRelativeKey2 which doesn't take the unused multi param", "1.1.1")
def showRelativeKey(
current: ProjectRef,
multi: Boolean,
keyNameColor: Option[String] = None
): Show[ScopedKey[_]] =
Show[ScopedKey[_]](
key =>
Scope.display(
key.scope,
withColor(key.key.label, keyNameColor),
ref => displayRelative(current, multi, ref)
))
showRelativeKey2(current, keyNameColor)
def showBuildRelativeKey(
currentBuild: URI,
multi: Boolean,
keyNameColor: Option[String] = None
def showRelativeKey2(
current: ProjectRef,
keyNameColor: Option[String] = None,
): Show[ScopedKey[_]] =
Show[ScopedKey[_]](
key =>
Scope.display(
key.scope,
withColor(key.key.label, keyNameColor),
ref => displayBuildRelative(currentBuild, multi, ref)
))
ref => displayRelative2(current, ref)
)
)
@deprecated("Use showBuildRelativeKey2 which doesn't take the unused multi param", "1.1.1")
def showBuildRelativeKey(
currentBuild: URI,
multi: Boolean,
keyNameColor: Option[String] = None,
): Show[ScopedKey[_]] =
showBuildRelativeKey2(currentBuild, keyNameColor)
def showBuildRelativeKey2(
currentBuild: URI,
keyNameColor: Option[String] = None,
): Show[ScopedKey[_]] =
Show[ScopedKey[_]](
key =>
Scope.display(
key.scope,
withColor(key.key.label, keyNameColor),
ref => displayBuildRelative(currentBuild, ref)
)
)
/**
* Returns a String expression for the given [[Reference]] (BuildRef, [[ProjectRef]], etc)
@ -71,17 +89,22 @@ object Def extends Init[Scope] with TaskMacroExtra {
def displayRelativeReference(current: ProjectRef, project: Reference): String =
displayRelative(current, project, false)
@deprecated("Use displayRelativeReference", "1.1.0")
@deprecated("Use displayRelative2 which doesn't take the unused multi param", "1.1.1")
def displayRelative(current: ProjectRef, multi: Boolean, project: Reference): String =
displayRelative2(current, project)
def displayRelative2(current: ProjectRef, project: Reference): String =
displayRelative(current, project, true)
/**
* Constructs the String of a given [[Reference]] relative to current.
* Note that this no longer takes "multi" parameter, and omits the subproject id at all times.
*/
private[sbt] def displayRelative(current: ProjectRef,
project: Reference,
trailingSlash: Boolean): String = {
private[sbt] def displayRelative(
current: ProjectRef,
project: Reference,
trailingSlash: Boolean
): String = {
val trailing = if (trailingSlash) " /" else ""
project match {
case BuildRef(current.build) => "ThisBuild" + trailing
@ -91,7 +114,11 @@ object Def extends Init[Scope] with TaskMacroExtra {
}
}
@deprecated("Use variant without multi", "1.1.1")
def displayBuildRelative(currentBuild: URI, multi: Boolean, project: Reference): String =
displayBuildRelative(currentBuild, project)
def displayBuildRelative(currentBuild: URI, project: Reference): String =
project match {
case BuildRef(`currentBuild`) => "ThisBuild /"
case ProjectRef(`currentBuild`, x) => x + " /"
@ -124,11 +151,14 @@ object Def extends Init[Scope] with TaskMacroExtra {
else None) orElse
s.dependencies
.find(k => k.scope != ThisScope)
.map(k =>
s"Scope cannot be defined for dependency ${k.key.label} of ${definedSettingString(s)}")
.map(
k =>
s"Scope cannot be defined for dependency ${k.key.label} of ${definedSettingString(s)}"
)
override def intersect(s1: Scope, s2: Scope)(
implicit delegates: Scope => Seq[Scope]): Option[Scope] =
implicit delegates: Scope => Seq[Scope]
): Option[Scope] =
if (s2 == GlobalScope) Some(s1) // s1 is more specific
else if (s1 == GlobalScope) Some(s2) // s2 is more specific
else super.intersect(s1, s2)
@ -173,16 +203,31 @@ object Def extends Init[Scope] with TaskMacroExtra {
// The following conversions enable the types Initialize[T], Initialize[Task[T]], and Task[T] to
// be used in task and setting macros as inputs with an ultimate result of type T
implicit def macroValueI[T](in: Initialize[T]): MacroValue[T] = ???
implicit def macroValueIT[T](in: Initialize[Task[T]]): MacroValue[T] = ???
implicit def macroValueIInT[T](in: Initialize[InputTask[T]]): InputEvaluated[T] = ???
implicit def taskMacroValueIT[T](in: Initialize[Task[T]]): MacroTaskValue[T] = ???
implicit def macroPrevious[T](in: TaskKey[T]): MacroPrevious[T] = ???
implicit def macroValueI[T](@deprecated("unused", "") in: Initialize[T]): MacroValue[T] = ???
// The following conversions enable the types Parser[T], Initialize[Parser[T]], and Initialize[State => Parser[T]] to
// be used in the inputTask macro as an input with an ultimate result of type T
implicit def parserInitToInput[T](p: Initialize[Parser[T]]): ParserInput[T] = ???
implicit def parserInitStateToInput[T](p: Initialize[State => Parser[T]]): ParserInput[T] = ???
implicit def macroValueIT[T](@deprecated("unused", "") in: Initialize[Task[T]]): MacroValue[T] =
???
implicit def macroValueIInT[T](
@deprecated("unused", "") in: Initialize[InputTask[T]]
): InputEvaluated[T] = ???
implicit def taskMacroValueIT[T](
@deprecated("unused", "") in: Initialize[Task[T]]
): MacroTaskValue[T] = ???
implicit def macroPrevious[T](@deprecated("unused", "") in: TaskKey[T]): MacroPrevious[T] = ???
// The following conversions enable the types Parser[T], Initialize[Parser[T]], and
// Initialize[State => Parser[T]] to be used in the inputTask macro as an input with an ultimate
// result of type T
implicit def parserInitToInput[T](
@deprecated("unused", "") p: Initialize[Parser[T]]
): ParserInput[T] = ???
implicit def parserInitStateToInput[T](
@deprecated("unused", "") p: Initialize[State => Parser[T]]
): ParserInput[T] = ???
def settingKey[T](description: String): SettingKey[T] = macro std.KeyMacro.settingKeyImpl[T]
def taskKey[T](description: String): TaskKey[T] = macro std.KeyMacro.taskKeyImpl[T]
@ -190,27 +235,43 @@ object Def extends Init[Scope] with TaskMacroExtra {
private[sbt] def dummy[T: Manifest](name: String, description: String): (TaskKey[T], Task[T]) =
(TaskKey[T](name, description, DTask), dummyTask(name))
private[sbt] def dummyTask[T](name: String): Task[T] = {
import std.TaskExtra.{ task => newTask, _ }
val base: Task[T] = newTask(
sys.error("Dummy task '" + name + "' did not get converted to a full task.")) named name
sys.error("Dummy task '" + name + "' did not get converted to a full task.")
) named name
base.copy(info = base.info.set(isDummyTask, true))
}
private[sbt] def isDummy(t: Task[_]): Boolean =
t.info.attributes.get(isDummyTask) getOrElse false
private[sbt] val isDummyTask = AttributeKey[Boolean](
"is-dummy-task",
"Internal: used to identify dummy tasks. sbt injects values for these tasks at the start of task execution.",
Invisible)
Invisible
)
private[sbt] val (stateKey, dummyState) = dummy[State]("state", "Current build state.")
private[sbt] val (streamsManagerKey, dummyStreamsManager) = Def.dummy[std.Streams[ScopedKey[_]]](
"streams-manager",
"Streams manager, which provides streams for different contexts.")
"Streams manager, which provides streams for different contexts."
)
}
// these need to be mixed into the sbt package object because the target doesn't involve Initialize or anything in Def
// these need to be mixed into the sbt package object
// because the target doesn't involve Initialize or anything in Def
trait TaskMacroExtra {
implicit def macroValueT[T](in: Task[T]): std.MacroValue[T] = ???
implicit def macroValueIn[T](in: InputTask[T]): std.InputEvaluated[T] = ???
implicit def parserToInput[T](in: Parser[T]): std.ParserInput[T] = ???
implicit def stateParserToInput[T](in: State => Parser[T]): std.ParserInput[T] = ???
implicit def macroValueT[T](@deprecated("unused", "") in: Task[T]): std.MacroValue[T] = ???
implicit def macroValueIn[T](@deprecated("unused", "") in: InputTask[T]): std.InputEvaluated[T] =
???
implicit def parserToInput[T](@deprecated("unused", "") in: Parser[T]): std.ParserInput[T] = ???
implicit def stateParserToInput[T](
@deprecated("unused", "") in: State => Parser[T]
): std.ParserInput[T] = ???
}

View File

@ -26,6 +26,8 @@ private final class DelegateIndex0(refs: Map[ProjectRef, ProjectDelegates]) exte
case None => Select(conf) :: Zero :: Nil
}
}
private final class ProjectDelegates(val ref: ProjectRef,
val refs: Seq[ScopeAxis[ResolvedReference]],
val confs: Map[ConfigKey, Seq[ScopeAxis[ConfigKey]]])
private final class ProjectDelegates(
val ref: ProjectRef,
val refs: Seq[ScopeAxis[ResolvedReference]],
val confs: Map[ConfigKey, Seq[ScopeAxis[ConfigKey]]]
)

View File

@ -22,13 +22,15 @@ final class InputTask[T] private (val parser: State => Parser[Task[T]]) {
new InputTask[T](s => Parser(parser(s))(in))
def fullInput(in: String): InputTask[T] =
new InputTask[T](s =>
Parser.parse(in, parser(s)) match {
case Right(v) => Parser.success(v)
case Left(msg) =>
val indented = msg.lines.map(" " + _).mkString("\n")
Parser.failure(s"Invalid programmatic input:\n$indented")
})
new InputTask[T](
s =>
Parser.parse(in, parser(s)) match {
case Right(v) => Parser.success(v)
case Left(msg) =>
val indented = msg.lines.map(" " + _).mkString("\n")
Parser.failure(s"Invalid programmatic input:\n$indented")
}
)
}
object InputTask {
@ -38,19 +40,28 @@ object InputTask {
import std.FullInstance._
def toTask(in: String): Initialize[Task[T]] = flatten(
(Def.stateKey zipWith i)((sTask, it) =>
sTask map (s =>
Parser.parse(in, it.parser(s)) match {
case Right(t) => Def.value(t)
case Left(msg) =>
val indented = msg.lines.map(" " + _).mkString("\n")
sys.error(s"Invalid programmatic input:\n$indented")
}))
(Def.stateKey zipWith i)(
(sTask, it) =>
sTask map (
s =>
Parser.parse(in, it.parser(s)) match {
case Right(t) => Def.value(t)
case Left(msg) =>
val indented = msg.lines.map(" " + _).mkString("\n")
sys.error(s"Invalid programmatic input:\n$indented")
}
)
)
)
}
implicit def inputTaskParsed[T](in: InputTask[T]): std.ParserInputTask[T] = ???
implicit def inputTaskInitParsed[T](in: Initialize[InputTask[T]]): std.ParserInputTask[T] = ???
implicit def inputTaskParsed[T](
@deprecated("unused", "") in: InputTask[T]
): std.ParserInputTask[T] = ???
implicit def inputTaskInitParsed[T](
@deprecated("unused", "") in: Initialize[InputTask[T]]
): std.ParserInputTask[T] = ???
def make[T](p: State => Parser[Task[T]]): InputTask[T] = new InputTask[T](p)
@ -62,12 +73,14 @@ object InputTask {
def free[I, T](p: State => Parser[I])(c: I => Task[T]): InputTask[T] = free(s => p(s) map c)
def separate[I, T](p: State => Parser[I])(
action: Initialize[I => Task[T]]): Initialize[InputTask[T]] =
def separate[I, T](
p: State => Parser[I]
)(action: Initialize[I => Task[T]]): Initialize[InputTask[T]] =
separate(Def value p)(action)
def separate[I, T](p: Initialize[State => Parser[I]])(
action: Initialize[I => Task[T]]): Initialize[InputTask[T]] =
def separate[I, T](
p: Initialize[State => Parser[I]]
)(action: Initialize[I => Task[T]]): Initialize[InputTask[T]] =
p.zipWith(action)((parser, act) => free(parser)(act))
/** Constructs an InputTask that accepts no user input. */
@ -81,8 +94,9 @@ object InputTask {
* a) a Parser constructed using other Settings, but not Tasks
* b) a dynamically constructed Task that uses Settings, Tasks, and the result of parsing.
*/
def createDyn[I, T](p: Initialize[State => Parser[I]])(
action: Initialize[Task[I => Initialize[Task[T]]]]): Initialize[InputTask[T]] =
def createDyn[I, T](
p: Initialize[State => Parser[I]]
)(action: Initialize[Task[I => Initialize[Task[T]]]]): Initialize[InputTask[T]] =
separate(p)(std.FullInstance.flattenFun[I, T](action))
/** A dummy parser that consumes no input and produces nothing useful (unit).*/
@ -98,8 +112,9 @@ object InputTask {
i(Types.const)
@deprecated("Use another InputTask constructor or the `Def.inputTask` macro.", "0.13.0")
def apply[I, T](p: Initialize[State => Parser[I]])(
action: TaskKey[I] => Initialize[Task[T]]): Initialize[InputTask[T]] = {
def apply[I, T](
p: Initialize[State => Parser[I]]
)(action: TaskKey[I] => Initialize[Task[T]]): Initialize[InputTask[T]] = {
val dummyKey = localKey[Task[I]]
val (marker, dummy) = dummyTask[I]
val it = action(TaskKey(dummyKey)) mapConstant subResultForDummy(dummyKey, dummy)
@ -136,9 +151,11 @@ object InputTask {
(key, t)
}
private[this] def subForDummy[I, T](marker: AttributeKey[Option[I]],
value: I,
task: Task[T]): Task[T] = {
private[this] def subForDummy[I, T](
marker: AttributeKey[Option[I]],
value: I,
task: Task[T]
): Task[T] = {
val seen = new java.util.IdentityHashMap[Task[_], Task[_]]
lazy val f: Task ~> Task = new (Task ~> Task) {
def apply[A](t: Task[A]): Task[A] = {

View File

@ -53,11 +53,13 @@ object Previous {
private[sbt] val references = SettingKey[References](
"previous-references",
"Collects all static references to previous values of tasks.",
KeyRanks.Invisible)
KeyRanks.Invisible
)
private[sbt] val cache = TaskKey[Previous](
"previous-cache",
"Caches previous values of tasks read from disk for the duration of a task execution.",
KeyRanks.Invisible)
KeyRanks.Invisible
)
/** Records references to previous task value. This should be completely populated after settings finish loading. */
private[sbt] final class References {
@ -72,9 +74,11 @@ object Previous {
}
/** Persists values of tasks t where there is some task referencing it via t.previous. */
private[sbt] def complete(referenced: References,
results: RMap[Task, Result],
streams: Streams): Unit = {
private[sbt] def complete(
referenced: References,
results: RMap[Task, Result],
streams: Streams
): Unit = {
val map = referenced.getReferences
def impl[T](key: ScopedKey[_], result: T): Unit =
for (i <- map.get(key.asInstanceOf[ScopedTaskKey[T]])) {

View File

@ -11,12 +11,14 @@ import scala.annotation.implicitNotFound
object Remove {
@implicitNotFound(
msg = "No implicit for Remove.Value[${A}, ${B}] found,\n so ${B} cannot be removed from ${A}")
msg = "No implicit for Remove.Value[${A}, ${B}] found,\n so ${B} cannot be removed from ${A}"
)
trait Value[A, B] extends Any {
def removeValue(a: A, b: B): A
}
@implicitNotFound(
msg = "No implicit for Remove.Values[${A}, ${B}] found,\n so ${B} cannot be removed from ${A}")
msg = "No implicit for Remove.Values[${A}, ${B}] found,\n so ${B} cannot be removed from ${A}"
)
trait Values[A, -B] extends Any {
def removeValues(a: A, b: B): A
}

View File

@ -13,10 +13,12 @@ import sbt.internal.util.{ AttributeKey, AttributeMap, Dag }
import sbt.io.IO
final case class Scope(project: ScopeAxis[Reference],
config: ScopeAxis[ConfigKey],
task: ScopeAxis[AttributeKey[_]],
extra: ScopeAxis[AttributeMap]) {
final case class Scope(
project: ScopeAxis[Reference],
config: ScopeAxis[ConfigKey],
task: ScopeAxis[AttributeKey[_]],
extra: ScopeAxis[AttributeMap]
) {
def in(project: Reference, config: ConfigKey): Scope =
copy(project = Select(project), config = Select(config))
def in(config: ConfigKey, task: AttributeKey[_]): Scope =
@ -106,17 +108,21 @@ object Scope {
else
IO.directoryURI(current resolve uri)
def resolveReference(current: URI,
rootProject: URI => String,
ref: Reference): ResolvedReference =
def resolveReference(
current: URI,
rootProject: URI => String,
ref: Reference
): ResolvedReference =
ref match {
case br: BuildReference => resolveBuildRef(current, br)
case pr: ProjectReference => resolveProjectRef(current, rootProject, pr)
}
def resolveProjectRef(current: URI,
rootProject: URI => String,
ref: ProjectReference): ProjectRef =
def resolveProjectRef(
current: URI,
rootProject: URI => String,
ref: ProjectReference
): ProjectRef =
ref match {
case LocalRootProject => ProjectRef(current, rootProject(current))
case LocalProject(id) => ProjectRef(current, id)
@ -164,10 +170,12 @@ object Scope {
def displayMasked(scope: Scope, sep: String, mask: ScopeMask, showZeroConfig: Boolean): String =
displayMasked(scope, sep, showProject, mask, showZeroConfig)
def displayMasked(scope: Scope,
sep: String,
showProject: Reference => String,
mask: ScopeMask): String =
def displayMasked(
scope: Scope,
sep: String,
showProject: Reference => String,
mask: ScopeMask
): String =
displayMasked(scope, sep, showProject, mask, false)
/**
@ -177,11 +185,13 @@ object Scope {
* Technically speaking an unspecified configuration axis defaults to
* the scope delegation (first configuration defining the key, then Zero).
*/
def displayMasked(scope: Scope,
sep: String,
showProject: Reference => String,
mask: ScopeMask,
showZeroConfig: Boolean): String = {
def displayMasked(
scope: Scope,
sep: String,
showProject: Reference => String,
mask: ScopeMask,
showZeroConfig: Boolean
): String = {
import scope.{ project, config, task, extra }
val zeroConfig = if (showZeroConfig) "Zero /" else ""
val configPrefix = config.foldStrict(display, zeroConfig, "./")
@ -190,57 +200,68 @@ object Scope {
val postfix = if (extras.isEmpty) "" else extras.mkString("(", ", ", ")")
if (scope == GlobalScope) "Global / " + sep + postfix
else
mask.concatShow(appendSpace(projectPrefix(project, showProject)),
appendSpace(configPrefix),
appendSpace(taskPrefix),
sep,
postfix)
mask.concatShow(
appendSpace(projectPrefix(project, showProject)),
appendSpace(configPrefix),
appendSpace(taskPrefix),
sep,
postfix
)
}
private[sbt] def appendSpace(s: String): String =
if (s == "") ""
else s + " "
// sbt 0.12 style
def display012StyleMasked(scope: Scope,
sep: String,
showProject: Reference => String,
mask: ScopeMask): String = {
import scope.{ project, config, task, extra }
val configPrefix = config.foldStrict(displayConfigKey012Style, "*:", ".:")
val taskPrefix = task.foldStrict(_.label + "::", "", ".::")
val extras = extra.foldStrict(_.entries.map(_.toString).toList, Nil, Nil)
val postfix = if (extras.isEmpty) "" else extras.mkString("(", ", ", ")")
mask.concatShow(projectPrefix012Style(project, showProject012Style),
configPrefix,
taskPrefix,
sep,
postfix)
}
def equal(a: Scope, b: Scope, mask: ScopeMask): Boolean =
(!mask.project || a.project == b.project) &&
(!mask.config || a.config == b.config) &&
(!mask.task || a.task == b.task) &&
(!mask.extra || a.extra == b.extra)
def projectPrefix(project: ScopeAxis[Reference],
show: Reference => String = showProject): String =
def projectPrefix(
project: ScopeAxis[Reference],
show: Reference => String = showProject
): String =
project.foldStrict(show, "Zero /", "./")
def projectPrefix012Style(project: ScopeAxis[Reference],
show: Reference => String = showProject): String =
def projectPrefix012Style(
project: ScopeAxis[Reference],
show: Reference => String = showProject
): String =
project.foldStrict(show, "*/", "./")
def showProject = (ref: Reference) => Reference.display(ref) + " /"
def showProject012Style = (ref: Reference) => Reference.display(ref) + "/"
@deprecated("No longer used", "1.1.3")
def transformTaskName(s: String) = {
val parts = s.split("-+")
(parts.take(1) ++ parts.drop(1).map(_.capitalize)).mkString
}
@deprecated("Use variant without extraInherit", "1.1.1")
def delegates[Proj](
refs: Seq[(ProjectRef, Proj)],
configurations: Proj => Seq[ConfigKey],
resolve: Reference => ResolvedReference,
rootProject: URI => String,
projectInherit: ProjectRef => Seq[ProjectRef],
configInherit: (ResolvedReference, ConfigKey) => Seq[ConfigKey],
taskInherit: AttributeKey[_] => Seq[AttributeKey[_]],
extraInherit: (ResolvedReference, AttributeMap) => Seq[AttributeMap]
): Scope => Seq[Scope] =
delegates(
refs,
configurations,
resolve,
rootProject,
projectInherit,
configInherit,
taskInherit,
)
// *Inherit functions should be immediate delegates and not include argument itself. Transitivity will be provided by this method
def delegates[Proj](
refs: Seq[(ProjectRef, Proj)],
@ -250,19 +271,27 @@ object Scope {
projectInherit: ProjectRef => Seq[ProjectRef],
configInherit: (ResolvedReference, ConfigKey) => Seq[ConfigKey],
taskInherit: AttributeKey[_] => Seq[AttributeKey[_]],
extraInherit: (ResolvedReference, AttributeMap) => Seq[AttributeMap]
): Scope => Seq[Scope] = {
val index = delegates(refs, configurations, projectInherit, configInherit)
scope =>
indexedDelegates(resolve, index, rootProject, taskInherit, extraInherit)(scope)
indexedDelegates(resolve, index, rootProject, taskInherit)(scope)
}
@deprecated("Use variant without extraInherit", "1.1.1")
def indexedDelegates(
resolve: Reference => ResolvedReference,
index: DelegateIndex,
rootProject: URI => String,
taskInherit: AttributeKey[_] => Seq[AttributeKey[_]],
extraInherit: (ResolvedReference, AttributeMap) => Seq[AttributeMap]
)(rawScope: Scope): Seq[Scope] =
indexedDelegates(resolve, index, rootProject, taskInherit)(rawScope)
def indexedDelegates(
resolve: Reference => ResolvedReference,
index: DelegateIndex,
rootProject: URI => String,
taskInherit: AttributeKey[_] => Seq[AttributeKey[_]],
)(rawScope: Scope): Seq[Scope] = {
val scope = Scope.replaceThis(GlobalScope)(rawScope)
@ -319,27 +348,32 @@ object Scope {
}
private[this] def delegateIndex(ref: ProjectRef, confs: Seq[ConfigKey])(
projectInherit: ProjectRef => Seq[ProjectRef],
configInherit: (ResolvedReference, ConfigKey) => Seq[ConfigKey]): ProjectDelegates = {
configInherit: (ResolvedReference, ConfigKey) => Seq[ConfigKey]
): ProjectDelegates = {
val refDelegates = withRawBuilds(linearize(Select(ref), false)(projectInherit))
val configs = confs map { c =>
axisDelegates(configInherit, ref, c)
}
new ProjectDelegates(ref, refDelegates, configs.toMap)
}
def axisDelegates[T](direct: (ResolvedReference, T) => Seq[T],
ref: ResolvedReference,
init: T): (T, Seq[ScopeAxis[T]]) =
def axisDelegates[T](
direct: (ResolvedReference, T) => Seq[T],
ref: ResolvedReference,
init: T
): (T, Seq[ScopeAxis[T]]) =
(init, linearize(Select(init))(direct(ref, _)))
def linearize[T](axis: ScopeAxis[T], appendZero: Boolean = true)(
inherit: T => Seq[T]): Seq[ScopeAxis[T]] =
inherit: T => Seq[T]
): Seq[ScopeAxis[T]] =
axis match {
case Select(x) => topologicalSort[T](x, appendZero)(inherit)
case Zero | This => if (appendZero) Zero :: Nil else Nil
}
def topologicalSort[T](node: T, appendZero: Boolean)(
dependencies: T => Seq[T]): Seq[ScopeAxis[T]] = {
dependencies: T => Seq[T]
): Seq[ScopeAxis[T]] = {
val o = Dag.topologicalSortUnchecked(node)(dependencies).map(Select.apply)
if (appendZero) o ::: Zero :: Nil
else o

View File

@ -17,7 +17,18 @@ import sbt.Def.{ Initialize, KeyedInitialize, ScopedKey, Setting, setting }
import std.TaskExtra.{ task => mktask, _ }
/** An abstraction on top of Settings for build configuration and task definition. */
sealed trait Scoped { def scope: Scope; val key: AttributeKey[_] }
sealed trait Scoped extends Equals {
def scope: Scope
val key: AttributeKey[_]
override def equals(that: Any) =
(this eq that.asInstanceOf[AnyRef]) || (that match {
case that: Scoped => scope == that.scope && key == that.key && canEqual(that)
case _ => false
})
override def hashCode() = (scope, key).##
}
/** A common type for SettingKey and TaskKey so that both can be used as inputs to tasks.*/
sealed trait ScopedTaskable[T] extends Scoped {
@ -95,6 +106,8 @@ sealed abstract class SettingKey[T]
final def withRank(rank: Int): SettingKey[T] =
SettingKey(AttributeKey.copyWithRank(key, rank))
def canEqual(that: Any): Boolean = that.isInstanceOf[SettingKey[_]]
}
/**
@ -163,6 +176,8 @@ sealed abstract class TaskKey[T]
final def withRank(rank: Int): TaskKey[T] =
TaskKey(AttributeKey.copyWithRank(key, rank))
def canEqual(that: Any): Boolean = that.isInstanceOf[TaskKey[_]]
}
/**
@ -195,6 +210,8 @@ sealed trait InputKey[T]
final def withRank(rank: Int): InputKey[T] =
InputKey(AttributeKey.copyWithRank(key, rank))
def canEqual(that: Any): Boolean = that.isInstanceOf[InputKey[_]]
}
/** Methods and types related to constructing settings, including keys, scopes, and initializations. */
@ -320,10 +337,14 @@ object Scoped {
def transform(f: S => S, source: SourcePosition): Setting[Task[S]] =
set(scopedKey(_ map f), source)
@deprecated("No longer needed with new task syntax and SettingKey inheriting from Initialize.",
"0.13.2")
@deprecated(
"No longer needed with new task syntax and SettingKey inheriting from Initialize.",
"0.13.2"
)
def task: SettingKey[Task[S]] = scopedSetting(scope, key)
def toSettingKey: SettingKey[Task[S]] = scopedSetting(scope, key)
def get(settings: Settings[Scope]): Option[Task[S]] = settings.get(scope, key)
def ? : Initialize[Task[Option[S]]] = Def.optional(scopedKey) {
@ -336,6 +357,11 @@ object Scoped {
(this.? zipWith i)((x, y) => (x, y) map { case (a, b) => a getOrElse b })
}
/** Enriches `Initialize[Task[S]]` types.
*
* @param i the original `Initialize[Task[S]]` value to enrich
* @tparam S the type of the underlying value
*/
final class RichInitializeTask[S](i: Initialize[Task[S]]) extends RichInitTaskBase[S, Task] {
protected def onTask[T](f: Task[S] => Task[T]): Initialize[Task[T]] = i apply f
@ -365,22 +391,36 @@ object Scoped {
}
}
/** Enriches `Initialize[InputTask[S]]` types.
*
* @param i the original `Initialize[InputTask[S]]` value to enrich
* @tparam S the type of the underlying value
*/
final class RichInitializeInputTask[S](i: Initialize[InputTask[S]])
extends RichInitTaskBase[S, InputTask] {
protected def onTask[T](f: Task[S] => Task[T]): Initialize[InputTask[T]] = i(_ mapTask f)
def dependsOn(tasks: AnyInitTask*): Initialize[InputTask[S]] = {
import TupleSyntax._
(i, Initialize.joinAny[Task](tasks))((thisTask, deps) =>
thisTask.mapTask(_.dependsOn(deps: _*)))
(i, Initialize.joinAny[Task](tasks))(
(thisTask, deps) => thisTask.mapTask(_.dependsOn(deps: _*))
)
}
}
/** Enriches `Initialize[R[S]]` types. Abstracts over the specific task-like type constructor.
*
* @tparam S the type of the underlying vault
* @tparam R the task-like type constructor (either Task or InputTask)
*/
sealed abstract class RichInitTaskBase[S, R[_]] {
protected def onTask[T](f: Task[S] => Task[T]): Initialize[R[T]]
def flatMap[T](f: S => Task[T]): Initialize[R[T]] = flatMapR(f compose successM)
def map[T](f: S => T): Initialize[R[T]] = mapR(f compose successM)
def flatMap[T](f: S => Task[T]): Initialize[R[T]] =
onTask(_.result flatMap (f compose successM))
def map[T](f: S => T): Initialize[R[T]] = onTask(_.result map (f compose successM))
def andFinally(fin: => Unit): Initialize[R[S]] = onTask(_ andFinally fin)
def doFinally(t: Task[Unit]): Initialize[R[S]] = onTask(_ doFinally t)
@ -392,23 +432,28 @@ object Scoped {
@deprecated(
"Use the `result` method to create a task that returns the full Result of this task. Then, call `flatMap` on the new task.",
"0.13.0")
def flatMapR[T](f: Result[S] => Task[T]): Initialize[R[T]] = onTask(_ flatMapR f)
"0.13.0"
)
def flatMapR[T](f: Result[S] => Task[T]): Initialize[R[T]] = onTask(_.result flatMap f)
@deprecated(
"Use the `result` method to create a task that returns the full Result of this task. Then, call `map` on the new task.",
"0.13.0")
def mapR[T](f: Result[S] => T): Initialize[R[T]] = onTask(_ mapR f)
"0.13.0"
)
def mapR[T](f: Result[S] => T): Initialize[R[T]] = onTask(_.result map f)
@deprecated(
"Use the `failure` method to create a task that returns Incomplete when this task fails and then call `flatMap` on the new task.",
"0.13.0")
def flatFailure[T](f: Incomplete => Task[T]): Initialize[R[T]] = flatMapR(f compose failM)
"0.13.0"
)
def flatFailure[T](f: Incomplete => Task[T]): Initialize[R[T]] =
onTask(_.result flatMap (f compose failM))
@deprecated(
"Use the `failure` method to create a task that returns Incomplete when this task fails and then call `map` on the new task.",
"0.13.0")
def mapFailure[T](f: Incomplete => T): Initialize[R[T]] = mapR(f compose failM)
"0.13.0"
)
def mapFailure[T](f: Incomplete => T): Initialize[R[T]] = onTask(_.result map (f compose failM))
}
type AnyInitTask = Initialize[Task[T]] forSome { type T }
@ -565,7 +610,7 @@ object Scoped {
/** The sbt 0.10 style DSL was deprecated in 0.13.13, favouring the use of the '.value' macro.
*
* See http://www.scala-sbt.org/1.x/docs/Migrating-from-sbt-013x.html for how to migrate.
* See http://www.scala-sbt.org/1.x/docs/Migrating-from-sbt-013x.html#Migrating+from+sbt+0.12+style for how to migrate.
*/
trait TupleSyntax {
import Scoped._
@ -628,7 +673,7 @@ object InputKey {
apply(AttributeKey[InputTask[T]](label, description, extendScoped(extend1, extendN), rank))
def apply[T](akey: AttributeKey[InputTask[T]]): InputKey[T] =
new InputKey[T] { val key = akey; def scope = Scope.ThisScope }
Scoped.scopedInput(Scope.ThisScope, akey)
}
/** Constructs TaskKeys, which are associated with tasks to define a setting.*/
@ -657,8 +702,7 @@ object TaskKey {
): TaskKey[T] =
apply(AttributeKey[Task[T]](label, description, extendScoped(extend1, extendN), rank))
def apply[T](akey: AttributeKey[Task[T]]): TaskKey[T] =
new TaskKey[T] { val key = akey; def scope = Scope.ThisScope }
def apply[T](akey: AttributeKey[Task[T]]): TaskKey[T] = Scoped.scopedTask(Scope.ThisScope, akey)
def local[T: Manifest]: TaskKey[T] = apply[T](AttributeKey.local[Task[T]])
}
@ -689,8 +733,7 @@ object SettingKey {
): SettingKey[T] =
apply(AttributeKey[T](label, description, extendScoped(extend1, extendN), rank))
def apply[T](akey: AttributeKey[T]): SettingKey[T] =
new SettingKey[T] { val key = akey; def scope = Scope.ThisScope }
def apply[T](akey: AttributeKey[T]): SettingKey[T] = Scoped.scopedSetting(Scope.ThisScope, akey)
def local[T: Manifest: OptJsonWriter]: SettingKey[T] = apply[T](AttributeKey.local[T])
}

View File

@ -8,11 +8,11 @@
package sbt
package std
import reflect.macros._
import scala.reflect.macros._
import Def.Initialize
import sbt.internal.util.complete.Parser
import sbt.internal.util.appmacro.{ Convert, Converted }
import Def.Initialize
object InputInitConvert extends Convert {
def apply[T: c.WeakTypeTag](c: blackbox.Context)(nme: String, in: c.Tree): Converted[c.type] =
@ -46,14 +46,13 @@ object TaskConvert extends Convert {
/** Converts an input `Tree` of type `Initialize[T]`, `Initialize[Task[T]]`, or `Task[T]` into a `Tree` of type `Initialize[Task[T]]`.*/
object FullConvert extends Convert {
import InputWrapper._
def apply[T: c.WeakTypeTag](c: blackbox.Context)(nme: String, in: c.Tree): Converted[c.type] =
nme match {
case WrapInitTaskName => Converted.Success[c.type](in)
case WrapPreviousName => Converted.Success[c.type](in)
case WrapInitName => wrapInit[T](c)(in)
case WrapTaskName => wrapTask[T](c)(in)
case _ => Converted.NotApplicable[c.type]
case InputWrapper.WrapInitTaskName => Converted.Success[c.type](in)
case InputWrapper.WrapPreviousName => Converted.Success[c.type](in)
case InputWrapper.WrapInitName => wrapInit[T](c)(in)
case InputWrapper.WrapTaskName => wrapTask[T](c)(in)
case _ => Converted.NotApplicable[c.type]
}
private def wrapInit[T: c.WeakTypeTag](c: blackbox.Context)(tree: c.Tree): Converted[c.type] = {

View File

@ -8,9 +8,10 @@
package sbt
package std
import language.experimental.macros
import reflect.macros._
import reflect.internal.annotations.compileTimeOnly
import scala.language.experimental.macros
import scala.annotation.compileTimeOnly
import scala.reflect.macros._
import Def.Initialize
import sbt.internal.util.appmacro.ContextUtil
@ -30,28 +31,34 @@ object InputWrapper {
private[std] final val WrapPreviousName = "wrapPrevious_\u2603\u2603"
@compileTimeOnly(
"`value` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task.")
def wrapTask_\u2603\u2603[T](in: Any): T = implDetailError
"`value` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task."
)
def wrapTask_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError
@compileTimeOnly(
"`value` can only be used within a task or setting macro, such as :=, +=, ++=, Def.task, or Def.setting.")
def wrapInit_\u2603\u2603[T](in: Any): T = implDetailError
"`value` can only be used within a task or setting macro, such as :=, +=, ++=, Def.task, or Def.setting."
)
def wrapInit_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError
@compileTimeOnly(
"`value` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task.")
def wrapInitTask_\u2603\u2603[T](in: Any): T = implDetailError
"`value` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task."
)
def wrapInitTask_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError
@compileTimeOnly(
"`value` can only be called on an input task within a task definition macro, such as := or Def.inputTask.")
def wrapInputTask_\u2603\u2603[T](in: Any): T = implDetailError
"`value` can only be called on an input task within a task definition macro, such as := or Def.inputTask."
)
def wrapInputTask_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError
@compileTimeOnly(
"`value` can only be called on an input task within a task definition macro, such as := or Def.inputTask.")
def wrapInitInputTask_\u2603\u2603[T](in: Any): T = implDetailError
"`value` can only be called on an input task within a task definition macro, such as := or Def.inputTask."
)
def wrapInitInputTask_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError
@compileTimeOnly(
"`previous` can only be called on a task within a task or input task definition macro, such as :=, +=, ++=, Def.task, or Def.inputTask.")
def wrapPrevious_\u2603\u2603[T](in: Any): T = implDetailError
"`previous` can only be called on a task within a task or input task definition macro, such as :=, +=, ++=, Def.task, or Def.inputTask."
)
def wrapPrevious_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError
private[this] def implDetailError =
sys.error("This method is an implementation detail and should not be referenced.")
@ -160,11 +167,12 @@ object InputWrapper {
}
/** Translates <task: TaskKey[T]>.previous(format) to Previous.runtime(<task>)(format).value*/
def previousMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
format: c.Expr[sjsonnew.JsonFormat[T]]): c.Expr[Option[T]] = {
def previousMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(format: c.Expr[sjsonnew.JsonFormat[T]]): c.Expr[Option[T]] = {
import c.universe._
c.macroApplication match {
case a @ Apply(Select(Apply(_, t :: Nil), tp), fmt) =>
case a @ Apply(Select(Apply(_, t :: Nil), _), _) =>
if (t.tpe <:< c.weakTypeOf[TaskKey[T]]) {
val tsTyped = c.Expr[TaskKey[T]](t)
val newTree = c.universe.reify { Previous.runtime[T](tsTyped.splice)(format.splice) }
@ -181,35 +189,42 @@ object InputWrapper {
sealed abstract class MacroTaskValue[T] {
@compileTimeOnly(
"`taskValue` can only be used within a setting macro, such as :=, +=, ++=, or Def.setting.")
"`taskValue` can only be used within a setting macro, such as :=, +=, ++=, or Def.setting."
)
def taskValue: Task[T] = macro InputWrapper.taskValueMacroImpl[T]
}
sealed abstract class MacroValue[T] {
@compileTimeOnly(
"`value` can only be used within a task or setting macro, such as :=, +=, ++=, Def.task, or Def.setting.")
"`value` can only be used within a task or setting macro, such as :=, +=, ++=, Def.task, or Def.setting."
)
def value: T = macro InputWrapper.valueMacroImpl[T]
}
sealed abstract class ParserInput[T] {
@compileTimeOnly(
"`parsed` can only be used within an input task macro, such as := or Def.inputTask.")
"`parsed` can only be used within an input task macro, such as := or Def.inputTask."
)
def parsed: T = macro ParserInput.parsedMacroImpl[T]
}
sealed abstract class InputEvaluated[T] {
@compileTimeOnly(
"`evaluated` can only be used within an input task macro, such as := or Def.inputTask.")
"`evaluated` can only be used within an input task macro, such as := or Def.inputTask."
)
def evaluated: T = macro InputWrapper.valueMacroImpl[T]
@compileTimeOnly(
"`inputTaskValue` can only be used within an input task macro, such as := or Def.inputTask.")
"`inputTaskValue` can only be used within an input task macro, such as := or Def.inputTask."
)
def inputTaskValue: InputTask[T] = macro InputWrapper.inputTaskValueMacroImpl[T]
}
sealed abstract class ParserInputTask[T] {
@compileTimeOnly(
"`parsed` can only be used within an input task macro, such as := or Def.inputTask.")
"`parsed` can only be used within an input task macro, such as := or Def.inputTask."
)
def parsed: Task[T] = macro ParserInput.parsedInputMacroImpl[T]
}
sealed abstract class MacroPrevious[T] {
@compileTimeOnly(
"`previous` can only be used within a task macro, such as :=, +=, ++=, or Def.task.")
"`previous` can only be used within a task macro, such as :=, +=, ++=, or Def.task."
)
def previous(implicit format: sjsonnew.JsonFormat[T]): Option[T] =
macro InputWrapper.previousMacroImpl[T]
}
@ -223,24 +238,29 @@ object ParserInput {
private[std] val WrapInitName = "initParser_\u2603\u2603"
@compileTimeOnly(
"`parsed` can only be used within an input task macro, such as := or Def.inputTask.")
def parser_\u2603\u2603[T](i: Any): T =
"`parsed` can only be used within an input task macro, such as := or Def.inputTask."
)
def parser_\u2603\u2603[T](@deprecated("unused", "") i: Any): T =
sys.error("This method is an implementation detail and should not be referenced.")
@compileTimeOnly(
"`parsed` can only be used within an input task macro, such as := or Def.inputTask.")
def initParser_\u2603\u2603[T](i: Any): T =
"`parsed` can only be used within an input task macro, such as := or Def.inputTask."
)
def initParser_\u2603\u2603[T](@deprecated("unused", "") i: Any): T =
sys.error("This method is an implementation detail and should not be referenced.")
private[std] def wrap[T: c.WeakTypeTag](c: blackbox.Context)(ts: c.Expr[Any],
pos: c.Position): c.Expr[T] =
private[std] def wrap[T: c.WeakTypeTag](
c: blackbox.Context
)(ts: c.Expr[Any], pos: c.Position): c.Expr[T] =
InputWrapper.wrapImpl[T, ParserInput.type](c, ParserInput, WrapName)(ts, pos)
private[std] def wrapInit[T: c.WeakTypeTag](c: blackbox.Context)(ts: c.Expr[Any],
pos: c.Position): c.Expr[T] =
private[std] def wrapInit[T: c.WeakTypeTag](
c: blackbox.Context
)(ts: c.Expr[Any], pos: c.Position): c.Expr[T] =
InputWrapper.wrapImpl[T, ParserInput.type](c, ParserInput, WrapInitName)(ts, pos)
private[std] def inputParser[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[InputTask[T]]): c.Expr[State => Parser[Task[T]]] =
private[std] def inputParser[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[InputTask[T]]): c.Expr[State => Parser[Task[T]]] =
c.universe.reify(t.splice.parser)
def parsedInputMacroImpl[T: c.WeakTypeTag](c: blackbox.Context): c.Expr[Task[T]] =
@ -260,8 +280,9 @@ object ParserInput {
wrap[Task[T]](c)(inputParser(c)(e), pos)
}
private def wrapInitInputTask[T: c.WeakTypeTag](c: blackbox.Context)(tree: c.Tree,
pos: c.Position) = {
private def wrapInitInputTask[T: c.WeakTypeTag](
c: blackbox.Context
)(tree: c.Tree, pos: c.Position) = {
val e = c.Expr[Initialize[InputTask[T]]](tree)
wrapInit[Task[T]](c)(c.universe.reify { Def.toIParser(e.splice) }, pos)
}

View File

@ -14,18 +14,21 @@ import scala.reflect.macros._
import sbt.util.OptJsonWriter
private[sbt] object KeyMacro {
def settingKeyImpl[T: c.WeakTypeTag](c: blackbox.Context)(
description: c.Expr[String]): c.Expr[SettingKey[T]] =
def settingKeyImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(description: c.Expr[String]): c.Expr[SettingKey[T]] =
keyImpl2[T, SettingKey[T]](c) { (name, mf, ojw) =>
c.universe.reify { SettingKey[T](name.splice, description.splice)(mf.splice, ojw.splice) }
}
def taskKeyImpl[T: c.WeakTypeTag](c: blackbox.Context)(
description: c.Expr[String]): c.Expr[TaskKey[T]] =
def taskKeyImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(description: c.Expr[String]): c.Expr[TaskKey[T]] =
keyImpl[T, TaskKey[T]](c) { (name, mf) =>
c.universe.reify { TaskKey[T](name.splice, description.splice)(mf.splice) }
}
def inputKeyImpl[T: c.WeakTypeTag](c: blackbox.Context)(
description: c.Expr[String]): c.Expr[InputKey[T]] =
def inputKeyImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(description: c.Expr[String]): c.Expr[InputKey[T]] =
keyImpl[T, InputKey[T]](c) { (name, mf) =>
c.universe.reify { InputKey[T](name.splice, description.splice)(mf.splice) }
}
@ -45,7 +48,8 @@ private[sbt] object KeyMacro {
val enclosingValName = definingValName(
c,
methodName =>
s"""$methodName must be directly assigned to a val, such as `val x = $methodName[Int]("description")`.""")
s"""$methodName must be directly assigned to a val, such as `val x = $methodName[Int]("description")`."""
)
c.Expr[String](Literal(Constant(enclosingValName)))
}
@ -61,10 +65,10 @@ private[sbt] object KeyMacro {
n.decodedName.toString.trim // trim is not strictly correct, but macros don't expose the API necessary
@tailrec def enclosingVal(trees: List[c.Tree]): String = {
trees match {
case vd @ ValDef(_, name, _, _) :: ts => processName(name)
case ValDef(_, name, _, _) :: _ => processName(name)
case (_: ApplyTree | _: Select | _: TypeApply) :: xs => enclosingVal(xs)
// lazy val x: X = <methodName> has this form for some reason (only when the explicit type is present, though)
case Block(_, _) :: DefDef(mods, name, _, _, _, _) :: xs if mods.hasFlag(Flag.LAZY) =>
case Block(_, _) :: DefDef(mods, name, _, _, _, _) :: _ if mods.hasFlag(Flag.LAZY) =>
processName(name)
case _ =>
c.error(c.enclosingPosition, invalidEnclosingTree(methodName.decodedName.toString))

View File

@ -46,11 +46,13 @@ object InitializeConvert extends Convert {
Converted.Success(t)
}
private def failTask[C <: blackbox.Context with Singleton](c: C)(
pos: c.Position): Converted[c.type] =
private def failTask[C <: blackbox.Context with Singleton](
c: C
)(pos: c.Position): Converted[c.type] =
Converted.Failure(pos, "A setting cannot depend on a task")
private def failPrevious[C <: blackbox.Context with Singleton](c: C)(
pos: c.Position): Converted[c.type] =
private def failPrevious[C <: blackbox.Context with Singleton](
c: C
)(pos: c.Position): Converted[c.type] =
Converted.Failure(pos, "A setting cannot depend on a task's previous value.")
}
@ -59,11 +61,14 @@ object SettingMacro {
def settingMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(t: c.Expr[T]): c.Expr[Initialize[T]] =
Instance.contImpl[T, Id](c, InitializeInstance, InitializeConvert, MixedBuilder, EmptyLinter)(
Left(t),
Instance.idTransform[c.type])
Instance.idTransform[c.type]
)
def settingDynMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[Initialize[T]]): c.Expr[Initialize[T]] =
def settingDynMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[Initialize[T]]): c.Expr[Initialize[T]] =
Instance.contImpl[T, Id](c, InitializeInstance, InitializeConvert, MixedBuilder, EmptyLinter)(
Right(t),
Instance.idTransform[c.type])
Instance.idTransform[c.type]
)
}

View File

@ -24,9 +24,7 @@ abstract class BaseTaskLinterDSL extends LinterDSL {
val isTask = convert.asPredicate(ctx)
class traverser extends Traverser {
private val unchecked = symbolOf[sbt.sbtUnchecked].asClass
private val taskKeyType = typeOf[sbt.TaskKey[_]]
private val settingKeyType = typeOf[sbt.SettingKey[_]]
private val inputKeyType = typeOf[sbt.InputKey[_]]
private val initializeType = typeOf[sbt.Def.Initialize[_]]
private val uncheckedWrappers = MutableSet.empty[Tree]
var insideIf: Boolean = false
var insideAnon: Boolean = false
@ -48,6 +46,7 @@ abstract class BaseTaskLinterDSL extends LinterDSL {
case _ => exprAtUseSite
}
uncheckedWrappers.add(removedSbtWrapper)
()
}
case _ =>
}
@ -55,8 +54,8 @@ abstract class BaseTaskLinterDSL extends LinterDSL {
}
}
@inline def isKey(tpe: Type): Boolean =
tpe <:< taskKeyType || tpe <:< settingKeyType || tpe <:< inputKeyType
@inline def isKey(tpe: Type): Boolean = isInitialize(tpe)
@inline def isInitialize(tpe: Type): Boolean = tpe <:< initializeType
def detectAndErrorOnKeyMissingValue(i: Ident): Unit = {
if (isKey(i.tpe)) {
@ -65,6 +64,20 @@ abstract class BaseTaskLinterDSL extends LinterDSL {
} else ()
}
def detectAndErrorOnKeyMissingValue(s: Select): Unit = {
if (isKey(s.tpe)) {
val keyName = s.name.decodedName.toString
ctx.error(s.pos, TaskLinterDSLFeedback.missingValueForKey(keyName))
} else ()
}
def detectAndErrorOnKeyMissingValue(a: Apply): Unit = {
if (isInitialize(a.tpe)) {
val expr = "X / y"
ctx.error(a.pos, TaskLinterDSLFeedback.missingValueForInitialize(expr))
} else ()
}
override def traverse(tree: ctx.universe.Tree): Unit = {
tree match {
case ap @ Apply(TypeApply(Select(_, nme), tpe :: Nil), qual :: Nil) =>
@ -73,7 +86,7 @@ abstract class BaseTaskLinterDSL extends LinterDSL {
val (qualName, isSettingKey) =
Option(qual.symbol)
.map(sym => (sym.name.decodedName.toString, qual.tpe <:< typeOf[SettingKey[_]]))
.getOrElse((ap.pos.lineContent, false))
.getOrElse((ap.pos.source.lineToString(ap.pos.line - 1), false))
if (!isSettingKey && !shouldIgnore && isTask(wrapperName, tpe.tpe, qual)) {
if (insideIf && !isDynamicTask) {
@ -117,11 +130,15 @@ abstract class BaseTaskLinterDSL extends LinterDSL {
// TODO: Consider using unused names analysis to be able to report on more cases
case ValDef(_, valName, _, rhs) if valName == termNames.WILDCARD =>
rhs match {
case i: Ident => detectAndErrorOnKeyMissingValue(i)
case _ => ()
case i: Ident => detectAndErrorOnKeyMissingValue(i)
case s: Select => detectAndErrorOnKeyMissingValue(s)
case a: Apply => detectAndErrorOnKeyMissingValue(a)
case _ => ()
}
case i: Ident => detectAndErrorOnKeyMissingValue(i)
case _ => ()
case i: Ident => detectAndErrorOnKeyMissingValue(i)
case s: Select => detectAndErrorOnKeyMissingValue(s)
case a: Apply => detectAndErrorOnKeyMissingValue(a)
case _ => ()
}
}
traverseTrees(stmts)
@ -160,14 +177,13 @@ object TaskLinterDSLFeedback {
private final val startGreen = if (ConsoleAppender.formatEnabledInEnv) AnsiColor.GREEN else ""
private final val reset = if (ConsoleAppender.formatEnabledInEnv) AnsiColor.RESET else ""
private final val ProblemHeader = s"${startRed}Problem${reset}"
private final val SolutionHeader = s"${startGreen}Solution${reset}"
private final val ProblemHeader = s"${startRed}problem${reset}"
private final val SolutionHeader = s"${startGreen}solution${reset}"
def useOfValueInsideAnon(task: String) =
s"""${startBold}The evaluation of `$task` inside an anonymous function is prohibited.$reset
|
|${ProblemHeader}: Task invocations inside anonymous functions are evaluated independently of whether the anonymous function is invoked or not.
|
|${SolutionHeader}:
| 1. Make `$task` evaluation explicit outside of the function body if you don't care about its evaluation.
| 2. Use a dynamic task to evaluate `$task` and pass that value as a parameter to an anonymous function.
@ -178,7 +194,6 @@ object TaskLinterDSLFeedback {
|
|${ProblemHeader}: `$task` is inside the if expression of a regular task.
| Regular tasks always evaluate task inside the bodies of if expressions.
|
|${SolutionHeader}:
| 1. If you only want to evaluate it when the if predicate is true or false, use a dynamic task.
| 2. Otherwise, make the static evaluation explicit by evaluating `$task` outside the if expression.
@ -187,8 +202,14 @@ object TaskLinterDSLFeedback {
def missingValueForKey(key: String) =
s"""${startBold}The key `$key` is not being invoked inside the task definition.$reset
|
|${ProblemHeader}: Keys missing `.value` are not initialized and their dependency is not registered.
|
|${ProblemHeader}: Keys missing `.value` are not initialized and their dependency is not registered.
|${SolutionHeader}: Replace `$key` by `$key.value` or remove it if unused.
""".stripMargin
def missingValueForInitialize(expr: String) =
s"""${startBold}The setting/task `$expr` is not being invoked inside the task definition.$reset
|
|${ProblemHeader}: Settings/tasks missing `.value` are not initialized and their dependency is not registered.
|${SolutionHeader}: Replace `$expr` by `($expr).value` or remove it if unused.
""".stripMargin
}

View File

@ -56,9 +56,11 @@ object FullInstance
extends Instance.Composed[Initialize, Task](InitializeInstance, TaskInstance)
with MonadInstance {
type SS = sbt.internal.util.Settings[Scope]
val settingsData = TaskKey[SS]("settings-data",
"Provides access to the project data for the build.",
KeyRanks.DTask)
val settingsData = TaskKey[SS](
"settings-data",
"Provides access to the project data for the build.",
KeyRanks.DTask
)
def flatten[T](in: Initialize[Task[Initialize[Task[T]]]]): Initialize[Task[T]] = {
import TupleSyntax._
@ -98,29 +100,35 @@ object TaskMacro {
import LinterDSL.{ Empty => EmptyLinter }
def taskMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[T]): c.Expr[Initialize[Task[T]]] =
def taskMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[T]): c.Expr[Initialize[Task[T]]] =
Instance.contImpl[T, Id](c, FullInstance, FullConvert, MixedBuilder, TaskLinterDSL)(
Left(t),
Instance.idTransform[c.type])
Instance.idTransform[c.type]
)
def taskDynMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[Task[T]]] =
def taskDynMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[Task[T]]] =
Instance.contImpl[T, Id](c, FullInstance, FullConvert, MixedBuilder, TaskDynLinterDSL)(
Right(t),
Instance.idTransform[c.type])
Instance.idTransform[c.type]
)
/** Implementation of := macro for settings. */
def settingAssignMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
v: c.Expr[T]): c.Expr[Setting[T]] = {
def settingAssignMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(v: c.Expr[T]): c.Expr[Setting[T]] = {
val init = SettingMacro.settingMacroImpl[T](c)(v)
val assign = transformMacroImpl(c)(init.tree)(AssignInitName)
c.Expr[Setting[T]](assign)
}
/** Implementation of := macro for tasks. */
def taskAssignMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
v: c.Expr[T]): c.Expr[Setting[Task[T]]] = {
def taskAssignMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(v: c.Expr[T]): c.Expr[Setting[Task[T]]] = {
val init = taskMacroImpl[T](c)(v)
val assign = transformMacroImpl(c)(init.tree)(AssignInitName)
c.Expr[Setting[Task[T]]](assign)
@ -130,88 +138,106 @@ object TaskMacro {
// These macros are there just so we can fail old operators like `<<=` and provide useful migration information.
def fakeSettingAssignPosition[T: c.WeakTypeTag](c: blackbox.Context)(
app: c.Expr[Initialize[T]]): c.Expr[Setting[T]] =
ContextUtil.selectMacroImpl[Setting[T]](c) { (ts, pos) =>
c.abort(pos, assignMigration)
}
def fakeSettingAppend1Position[S: c.WeakTypeTag, V: c.WeakTypeTag](c: blackbox.Context)(
v: c.Expr[Initialize[V]])(a: c.Expr[Append.Value[S, V]]): c.Expr[Setting[S]] =
ContextUtil.selectMacroImpl[Setting[S]](c) { (ts, pos) =>
c.abort(pos, append1Migration)
}
def fakeSettingAppendNPosition[S: c.WeakTypeTag, V: c.WeakTypeTag](c: blackbox.Context)(
vs: c.Expr[Initialize[V]])(a: c.Expr[Append.Values[S, V]]): c.Expr[Setting[S]] =
ContextUtil.selectMacroImpl[Setting[S]](c) { (ts, pos) =>
c.abort(pos, appendNMigration)
}
@deprecated("unused", "") app: c.Expr[Initialize[T]]
): c.Expr[Setting[T]] =
ContextUtil.selectMacroImpl[Setting[T]](c)((_, pos) => c.abort(pos, assignMigration))
def fakeSettingAppend1Position[S: c.WeakTypeTag, V: c.WeakTypeTag](
c: blackbox.Context
)(@deprecated("unused", "") v: c.Expr[Initialize[V]])(
@deprecated("unused", "") a: c.Expr[Append.Value[S, V]]
): c.Expr[Setting[S]] =
ContextUtil.selectMacroImpl[Setting[S]](c)((_, pos) => c.abort(pos, append1Migration))
def fakeSettingAppendNPosition[S: c.WeakTypeTag, V: c.WeakTypeTag](
c: blackbox.Context
)(@deprecated("unused", "") vs: c.Expr[Initialize[V]])(
@deprecated("unused", "") a: c.Expr[Append.Values[S, V]]
): c.Expr[Setting[S]] =
ContextUtil.selectMacroImpl[Setting[S]](c)((_, pos) => c.abort(pos, appendNMigration))
def fakeItaskAssignPosition[T: c.WeakTypeTag](c: blackbox.Context)(
app: c.Expr[Initialize[Task[T]]]): c.Expr[Setting[Task[T]]] =
ContextUtil.selectMacroImpl[Setting[Task[T]]](c) { (ts, pos) =>
c.abort(pos, assignMigration)
}
def fakeTaskAppend1Position[S: c.WeakTypeTag, V: c.WeakTypeTag](c: blackbox.Context)(
v: c.Expr[Initialize[Task[V]]])(a: c.Expr[Append.Value[S, V]]): c.Expr[Setting[Task[S]]] =
ContextUtil.selectMacroImpl[Setting[Task[S]]](c) { (ts, pos) =>
c.abort(pos, append1Migration)
}
def fakeTaskAppendNPosition[S: c.WeakTypeTag, V: c.WeakTypeTag](c: blackbox.Context)(
vs: c.Expr[Initialize[Task[V]]])(a: c.Expr[Append.Values[S, V]]): c.Expr[Setting[Task[S]]] =
ContextUtil.selectMacroImpl[Setting[Task[S]]](c) { (ts, pos) =>
c.abort(pos, appendNMigration)
}
@deprecated("unused", "") app: c.Expr[Initialize[Task[T]]]
): c.Expr[Setting[Task[T]]] =
ContextUtil.selectMacroImpl[Setting[Task[T]]](c)((_, pos) => c.abort(pos, assignMigration))
/* Implementations of <<= macro variations for tasks and settings. These just get the source position of the call site.*/
def fakeTaskAppend1Position[S: c.WeakTypeTag, V: c.WeakTypeTag](
c: blackbox.Context
)(@deprecated("unused", "") v: c.Expr[Initialize[Task[V]]])(
@deprecated("unused", "") a: c.Expr[Append.Value[S, V]]
): c.Expr[Setting[Task[S]]] =
ContextUtil.selectMacroImpl[Setting[Task[S]]](c)((_, pos) => c.abort(pos, append1Migration))
def itaskAssignPosition[T: c.WeakTypeTag](c: blackbox.Context)(
app: c.Expr[Initialize[Task[T]]]): c.Expr[Setting[Task[T]]] =
def fakeTaskAppendNPosition[S: c.WeakTypeTag, V: c.WeakTypeTag](
c: blackbox.Context
)(@deprecated("unused", "") vs: c.Expr[Initialize[Task[V]]])(
@deprecated("unused", "") a: c.Expr[Append.Values[S, V]]
): c.Expr[Setting[Task[S]]] =
ContextUtil.selectMacroImpl[Setting[Task[S]]](c)((_, pos) => c.abort(pos, appendNMigration))
// Implementations of <<= macro variations for tasks and settings.
// These just get the source position of the call site.
def itaskAssignPosition[T: c.WeakTypeTag](
c: blackbox.Context
)(app: c.Expr[Initialize[Task[T]]]): c.Expr[Setting[Task[T]]] =
settingAssignPosition(c)(app)
def taskAssignPositionT[T: c.WeakTypeTag](c: blackbox.Context)(
app: c.Expr[Task[T]]): c.Expr[Setting[Task[T]]] =
def taskAssignPositionT[T: c.WeakTypeTag](
c: blackbox.Context
)(app: c.Expr[Task[T]]): c.Expr[Setting[Task[T]]] =
itaskAssignPosition(c)(c.universe.reify { Def.valueStrict(app.splice) })
def taskAssignPositionPure[T: c.WeakTypeTag](c: blackbox.Context)(
app: c.Expr[T]): c.Expr[Setting[Task[T]]] =
def taskAssignPositionPure[T: c.WeakTypeTag](
c: blackbox.Context
)(app: c.Expr[T]): c.Expr[Setting[Task[T]]] =
taskAssignPositionT(c)(c.universe.reify { TaskExtra.constant(app.splice) })
def taskTransformPosition[S: c.WeakTypeTag](c: blackbox.Context)(
f: c.Expr[S => S]): c.Expr[Setting[Task[S]]] =
def taskTransformPosition[S: c.WeakTypeTag](
c: blackbox.Context
)(f: c.Expr[S => S]): c.Expr[Setting[Task[S]]] =
c.Expr[Setting[Task[S]]](transformMacroImpl(c)(f.tree)(TransformInitName))
def settingTransformPosition[S: c.WeakTypeTag](c: blackbox.Context)(
f: c.Expr[S => S]): c.Expr[Setting[S]] =
def settingTransformPosition[S: c.WeakTypeTag](
c: blackbox.Context
)(f: c.Expr[S => S]): c.Expr[Setting[S]] =
c.Expr[Setting[S]](transformMacroImpl(c)(f.tree)(TransformInitName))
def itaskTransformPosition[S: c.WeakTypeTag](c: blackbox.Context)(
f: c.Expr[S => S]): c.Expr[Setting[S]] =
def itaskTransformPosition[S: c.WeakTypeTag](
c: blackbox.Context
)(f: c.Expr[S => S]): c.Expr[Setting[S]] =
c.Expr[Setting[S]](transformMacroImpl(c)(f.tree)(TransformInitName))
def settingAssignPure[T: c.WeakTypeTag](c: blackbox.Context)(app: c.Expr[T]): c.Expr[Setting[T]] =
settingAssignPosition(c)(c.universe.reify { Def.valueStrict(app.splice) })
def settingAssignPosition[T: c.WeakTypeTag](c: blackbox.Context)(
app: c.Expr[Initialize[T]]): c.Expr[Setting[T]] =
def settingAssignPosition[T: c.WeakTypeTag](
c: blackbox.Context
)(app: c.Expr[Initialize[T]]): c.Expr[Setting[T]] =
c.Expr[Setting[T]](transformMacroImpl(c)(app.tree)(AssignInitName))
/** Implementation of := macro for tasks. */
def inputTaskAssignMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
v: c.Expr[T]): c.Expr[Setting[InputTask[T]]] = {
def inputTaskAssignMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(v: c.Expr[T]): c.Expr[Setting[InputTask[T]]] = {
val init = inputTaskMacroImpl[T](c)(v)
val assign = transformMacroImpl(c)(init.tree)(AssignInitName)
c.Expr[Setting[InputTask[T]]](assign)
}
/** Implementation of += macro for tasks. */
def taskAppend1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(v: c.Expr[U])(
a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[Task[T]]] = {
def taskAppend1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(v: c.Expr[U])(a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[Task[T]]] = {
val init = taskMacroImpl[U](c)(v)
val append = appendMacroImpl(c)(init.tree, a.tree)(Append1InitName)
c.Expr[Setting[Task[T]]](append)
}
/** Implementation of += macro for settings. */
def settingAppend1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(v: c.Expr[U])(
a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[T]] = {
def settingAppend1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(v: c.Expr[U])(a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[T]] = {
import c.universe._
val ttpe = c.weakTypeOf[T]
val typeArgs = ttpe.typeArgs
@ -221,10 +247,11 @@ object TaskMacro {
if typeArgs.nonEmpty && (typeArgs.head weak_<:< c.weakTypeOf[Task[_]])
&& (tpe weak_<:< c.weakTypeOf[Initialize[_]]) =>
c.macroApplication match {
case Apply(Apply(TypeApply(Select(preT, nmeT), targs), _), _) =>
case Apply(Apply(TypeApply(Select(preT, _), _), _), _) =>
val tree = Apply(
TypeApply(Select(preT, TermName("+=").encodedName), TypeTree(typeArgs.head) :: Nil),
Select(v.tree, TermName("taskValue").encodedName) :: Nil)
Select(v.tree, TermName("taskValue").encodedName) :: Nil
)
c.Expr[Setting[T]](tree)
case x => ContextUtil.unexpectedTree(x)
}
@ -236,73 +263,89 @@ object TaskMacro {
}
/** Implementation of ++= macro for tasks. */
def taskAppendNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(vs: c.Expr[U])(
a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[Task[T]]] = {
def taskAppendNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(vs: c.Expr[U])(a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[Task[T]]] = {
val init = taskMacroImpl[U](c)(vs)
val append = appendMacroImpl(c)(init.tree, a.tree)(AppendNInitName)
c.Expr[Setting[Task[T]]](append)
}
/** Implementation of ++= macro for settings. */
def settingAppendNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(vs: c.Expr[U])(
a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[T]] = {
def settingAppendNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(vs: c.Expr[U])(a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[T]] = {
val init = SettingMacro.settingMacroImpl[U](c)(vs)
val append = appendMacroImpl(c)(init.tree, a.tree)(AppendNInitName)
c.Expr[Setting[T]](append)
}
/** Implementation of -= macro for tasks. */
def taskRemove1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(v: c.Expr[U])(
r: c.Expr[Remove.Value[T, U]]): c.Expr[Setting[Task[T]]] = {
def taskRemove1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(v: c.Expr[U])(r: c.Expr[Remove.Value[T, U]]): c.Expr[Setting[Task[T]]] = {
val init = taskMacroImpl[U](c)(v)
val remove = removeMacroImpl(c)(init.tree, r.tree)(Remove1InitName)
c.Expr[Setting[Task[T]]](remove)
}
/** Implementation of -= macro for settings. */
def settingRemove1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(v: c.Expr[U])(
r: c.Expr[Remove.Value[T, U]]): c.Expr[Setting[T]] = {
def settingRemove1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(v: c.Expr[U])(r: c.Expr[Remove.Value[T, U]]): c.Expr[Setting[T]] = {
val init = SettingMacro.settingMacroImpl[U](c)(v)
val remove = removeMacroImpl(c)(init.tree, r.tree)(Remove1InitName)
c.Expr[Setting[T]](remove)
}
/** Implementation of --= macro for tasks. */
def taskRemoveNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(vs: c.Expr[U])(
r: c.Expr[Remove.Values[T, U]]): c.Expr[Setting[Task[T]]] = {
def taskRemoveNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(vs: c.Expr[U])(r: c.Expr[Remove.Values[T, U]]): c.Expr[Setting[Task[T]]] = {
val init = taskMacroImpl[U](c)(vs)
val remove = removeMacroImpl(c)(init.tree, r.tree)(RemoveNInitName)
c.Expr[Setting[Task[T]]](remove)
}
/** Implementation of --= macro for settings. */
def settingRemoveNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(vs: c.Expr[U])(
r: c.Expr[Remove.Values[T, U]]): c.Expr[Setting[T]] = {
def settingRemoveNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(vs: c.Expr[U])(r: c.Expr[Remove.Values[T, U]]): c.Expr[Setting[T]] = {
val init = SettingMacro.settingMacroImpl[U](c)(vs)
val remove = removeMacroImpl(c)(init.tree, r.tree)(RemoveNInitName)
c.Expr[Setting[T]](remove)
}
private[this] def appendMacroImpl(c: blackbox.Context)(init: c.Tree, append: c.Tree)(
newName: String): c.Tree = {
private[this] def appendMacroImpl(
c: blackbox.Context
)(init: c.Tree, append: c.Tree)(newName: String): c.Tree = {
import c.universe._
c.macroApplication match {
case Apply(Apply(TypeApply(Select(preT, nmeT), targs), _), _) =>
Apply(Apply(TypeApply(Select(preT, TermName(newName).encodedName), targs),
init :: sourcePosition(c).tree :: Nil),
append :: Nil)
case Apply(Apply(TypeApply(Select(preT, _), targs), _), _) =>
Apply(
Apply(
TypeApply(Select(preT, TermName(newName).encodedName), targs),
init :: sourcePosition(c).tree :: Nil
),
append :: Nil
)
case x => ContextUtil.unexpectedTree(x)
}
}
private[this] def removeMacroImpl(c: blackbox.Context)(init: c.Tree, remove: c.Tree)(
newName: String): c.Tree = {
private[this] def removeMacroImpl(
c: blackbox.Context
)(init: c.Tree, remove: c.Tree)(newName: String): c.Tree = {
import c.universe._
c.macroApplication match {
case Apply(Apply(TypeApply(Select(preT, nmeT), targs), _), r) =>
Apply(Apply(TypeApply(Select(preT, TermName(newName).encodedName), targs),
init :: sourcePosition(c).tree :: Nil),
r)
case Apply(Apply(TypeApply(Select(preT, _), targs), _), _) =>
Apply(
Apply(
TypeApply(Select(preT, TermName(newName).encodedName), targs),
init :: sourcePosition(c).tree :: Nil
),
remove :: Nil
)
case x => ContextUtil.unexpectedTree(x)
}
}
@ -316,8 +359,10 @@ object TaskMacro {
case Apply(Select(prefix, _), _) => prefix
case x => ContextUtil.unexpectedTree(x)
}
Apply.apply(Select(target, TermName(newName).encodedName),
init :: sourcePosition(c).tree :: Nil)
Apply.apply(
Select(target, TermName(newName).encodedName),
init :: sourcePosition(c).tree :: Nil
)
}
private[this] def sourcePosition(c: blackbox.Context): c.Expr[SourcePosition] = {
@ -335,7 +380,8 @@ object TaskMacro {
private[this] def settingSource(c: blackbox.Context, path: String, name: String): String = {
@tailrec def inEmptyPackage(s: c.Symbol): Boolean = s != c.universe.NoSymbol && (
s.owner == c.mirror.EmptyPackage || s.owner == c.mirror.EmptyPackageClass || inEmptyPackage(
s.owner)
s.owner
)
)
c.internal.enclosingOwner match {
case ec if !ec.isStatic => name
@ -349,16 +395,19 @@ object TaskMacro {
c.Expr[T](Literal(Constant(t)))
}
def inputTaskMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[T]): c.Expr[Initialize[InputTask[T]]] =
def inputTaskMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[T]): c.Expr[Initialize[InputTask[T]]] =
inputTaskMacro0[T](c)(t)
def inputTaskDynMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] =
def inputTaskDynMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] =
inputTaskDynMacro0[T](c)(t)
private[this] def inputTaskMacro0[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[T]): c.Expr[Initialize[InputTask[T]]] =
private[this] def inputTaskMacro0[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[T]): c.Expr[Initialize[InputTask[T]]] =
iInitializeMacro(c)(t) { et =>
val pt = iParserMacro(c)(et) { pt =>
iTaskMacro(c)(pt)
@ -367,8 +416,8 @@ object TaskMacro {
}
private[this] def iInitializeMacro[M[_], T](c: blackbox.Context)(t: c.Expr[T])(
f: c.Expr[T] => c.Expr[M[T]])(implicit tt: c.WeakTypeTag[T],
mt: c.WeakTypeTag[M[T]]): c.Expr[Initialize[M[T]]] = {
f: c.Expr[T] => c.Expr[M[T]]
)(implicit tt: c.WeakTypeTag[T], mt: c.WeakTypeTag[M[T]]): c.Expr[Initialize[M[T]]] = {
val inner: Transform[c.type, M] = new Transform[c.type, M] {
def apply(in: c.Tree): c.Tree = f(c.Expr[T](in)).tree
}
@ -376,7 +425,8 @@ object TaskMacro {
Instance
.contImpl[T, M](c, InitializeInstance, InputInitConvert, MixedBuilder, EmptyLinter)(
Left(cond),
inner)
inner
)
}
private[this] def conditionInputTaskTree(c: blackbox.Context)(t: c.Tree): c.Tree = {
@ -412,25 +462,29 @@ object TaskMacro {
}
private[this] def iParserMacro[M[_], T](c: blackbox.Context)(t: c.Expr[T])(
f: c.Expr[T] => c.Expr[M[T]])(implicit tt: c.WeakTypeTag[T],
mt: c.WeakTypeTag[M[T]]): c.Expr[State => Parser[M[T]]] = {
f: c.Expr[T] => c.Expr[M[T]]
)(implicit tt: c.WeakTypeTag[T], mt: c.WeakTypeTag[M[T]]): c.Expr[State => Parser[M[T]]] = {
val inner: Transform[c.type, M] = new Transform[c.type, M] {
def apply(in: c.Tree): c.Tree = f(c.Expr[T](in)).tree
}
Instance.contImpl[T, M](c, ParserInstance, ParserConvert, MixedBuilder, LinterDSL.Empty)(
Left(t),
inner)
inner
)
}
private[this] def iTaskMacro[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[T]): c.Expr[Task[T]] =
private[this] def iTaskMacro[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[T]): c.Expr[Task[T]] =
Instance
.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder, EmptyLinter)(
Left(t),
Instance.idTransform)
Instance.idTransform
)
private[this] def inputTaskDynMacro0[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] = {
private[this] def inputTaskDynMacro0[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] = {
import c.universe.{ Apply => ApplyTree, _ }
import internal.decorators._
@ -455,7 +509,8 @@ object TaskMacro {
if (result.isDefined) {
c.error(
qual.pos,
"Implementation restriction: a dynamic InputTask can only have a single input parser.")
"Implementation restriction: a dynamic InputTask can only have a single input parser."
)
EmptyTree
} else {
qual.foreach(checkQual)
@ -514,11 +569,13 @@ object PlainTaskMacro {
def taskImpl[T: c.WeakTypeTag](c: blackbox.Context)(t: c.Expr[T]): c.Expr[Task[T]] =
Instance.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder, OnlyTaskLinterDSL)(
Left(t),
Instance.idTransform[c.type])
Instance.idTransform[c.type]
)
def taskDyn[T](t: Task[T]): Task[T] = macro taskDynImpl[T]
def taskDynImpl[T: c.WeakTypeTag](c: blackbox.Context)(t: c.Expr[Task[T]]): c.Expr[Task[T]] =
Instance.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder, OnlyTaskDynLinterDSL)(
Right(t),
Instance.idTransform[c.type])
Instance.idTransform[c.type]
)
}

View File

@ -0,0 +1,135 @@
/*
* sbt
* Copyright 2011 - 2017, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under BSD-3-Clause license (see LICENSE)
*/
package sbt.test
import org.scalacheck.{ Test => _, _ }, Arbitrary.arbitrary, Gen._
import java.io.File
import sbt.io.IO
import sbt.{ Scope, ScopeAxis, Scoped, Select, This, Zero }
import sbt.{
BuildRef,
LocalProject,
LocalRootProject,
ProjectRef,
Reference,
RootProject,
ThisBuild,
ThisProject
}
import sbt.ConfigKey
import sbt.librarymanagement.syntax._
import sbt.{ InputKey, SettingKey, TaskKey }
import sbt.internal.util.{ AttributeKey, AttributeMap }
object BuildSettingsInstances {
val genFile: Gen[File] = Gen.oneOf(new File("."), new File("/tmp")) // for now..
implicit val arbBuildRef: Arbitrary[BuildRef] = Arbitrary(genFile map (f => BuildRef(IO toURI f)))
implicit val arbProjectRef: Arbitrary[ProjectRef] =
Arbitrary(for (f <- genFile; id <- Gen.identifier) yield ProjectRef(f, id))
implicit val arbLocalProject: Arbitrary[LocalProject] =
Arbitrary(arbitrary[String] map LocalProject)
implicit val arbRootProject: Arbitrary[RootProject] = Arbitrary(genFile map (RootProject(_)))
implicit val arbReference: Arbitrary[Reference] = Arbitrary {
Gen.frequency(
96 -> arbitrary[BuildRef],
10271 -> ThisBuild,
325 -> LocalRootProject,
2283 -> arbitrary[ProjectRef],
299 -> ThisProject,
436 -> arbitrary[LocalProject],
1133 -> arbitrary[RootProject],
)
}
implicit def arbConfigKey: Arbitrary[ConfigKey] = Arbitrary {
Gen.frequency(
2 -> const[ConfigKey](Compile),
2 -> const[ConfigKey](Test),
1 -> const[ConfigKey](Runtime),
1 -> const[ConfigKey](IntegrationTest),
1 -> const[ConfigKey](Provided),
)
}
implicit def arbAttrKey[A: Manifest]: Arbitrary[AttributeKey[_]] =
Arbitrary(Gen.identifier map (AttributeKey[A](_)))
implicit val arbAttributeMap: Arbitrary[AttributeMap] = Arbitrary {
Gen.frequency(
20 -> AttributeMap.empty,
1 -> {
for (name <- Gen.identifier; isModule <- arbitrary[Boolean])
yield
AttributeMap.empty
.put(AttributeKey[String]("name"), name)
.put(AttributeKey[Boolean]("isModule"), isModule)
}
)
}
implicit def arbScopeAxis[A: Arbitrary]: Arbitrary[ScopeAxis[A]] =
Arbitrary(Gen.oneOf[ScopeAxis[A]](This, Zero, arbitrary[A] map (Select(_))))
implicit def arbScope: Arbitrary[Scope] = Arbitrary(
for {
r <- arbitrary[ScopeAxis[Reference]]
c <- arbitrary[ScopeAxis[ConfigKey]]
t <- arbitrary[ScopeAxis[AttributeKey[_]]]
e <- arbitrary[ScopeAxis[AttributeMap]]
} yield Scope(r, c, t, e)
)
type Key = K forSome { type K <: Scoped.ScopingSetting[K] with Scoped }
final case class Label(value: String)
val genLabel: Gen[Label] = Gen.identifier map Label
implicit def arbLabel: Arbitrary[Label] = Arbitrary(genLabel)
def genInputKey[A: Manifest]: Gen[InputKey[A]] = genLabel map (x => InputKey[A](x.value))
def genSettingKey[A: Manifest]: Gen[SettingKey[A]] = genLabel map (x => SettingKey[A](x.value))
def genTaskKey[A: Manifest]: Gen[TaskKey[A]] = genLabel map (x => TaskKey[A](x.value))
def withScope[K <: Scoped.ScopingSetting[K]](keyGen: Gen[K]): Arbitrary[K] = Arbitrary {
Gen.frequency(
5 -> keyGen,
1 -> (for (key <- keyGen; scope <- arbitrary[Scope]) yield key in scope)
)
}
implicit def arbInputKey[A: Manifest]: Arbitrary[InputKey[A]] = withScope(genInputKey[A])
implicit def arbSettingKey[A: Manifest]: Arbitrary[SettingKey[A]] = withScope(genSettingKey[A])
implicit def arbTaskKey[A: Manifest]: Arbitrary[TaskKey[A]] = withScope(genTaskKey[A])
implicit def arbKey[A: Manifest](
implicit
arbInputKey: Arbitrary[InputKey[A]],
arbSettingKey: Arbitrary[SettingKey[A]],
arbTaskKey: Arbitrary[TaskKey[A]],
): Arbitrary[Key] = Arbitrary {
def convert[T](g: Gen[T]) = g.asInstanceOf[Gen[Key]]
Gen.frequency(
15431 -> convert(arbitrary[InputKey[A]]),
19645 -> convert(arbitrary[SettingKey[A]]),
22867 -> convert(arbitrary[TaskKey[A]]),
)
}
object WithoutScope {
implicit def arbInputKey[A: Manifest]: Arbitrary[InputKey[A]] = Arbitrary(genInputKey[A])
implicit def arbSettingKey[A: Manifest]: Arbitrary[SettingKey[A]] = Arbitrary(genSettingKey[A])
implicit def arbTaskKey[A: Manifest]: Arbitrary[TaskKey[A]] = Arbitrary(genTaskKey[A])
}
implicit def arbScoped[A: Manifest]: Arbitrary[Scoped] = Arbitrary(arbitrary[Key])
}

View File

@ -0,0 +1,145 @@
/*
* sbt
* Copyright 2011 - 2017, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under BSD-3-Clause license (see LICENSE)
*/
package sbt.test
import org.scalacheck._, Prop._, util.Pretty
import sbt.internal.util.AttributeKey
import sbt.util.NoJsonWriter
import sbt.{ InputTask, Scope, Task }
import sbt.{ InputKey, Scoped, SettingKey, TaskKey }
import BuildSettingsInstances._
object ScopedSpec extends Properties("Scoped") {
val intManifest = manifest[Int]
val stringManifest = manifest[String]
implicit val arbManifest: Arbitrary[Manifest[_]] =
Arbitrary(Gen.oneOf(intManifest, stringManifest))
property("setting keys are structurally equal") = {
forAll { (label: Label, manifest: Manifest[_], scope: Scope) =>
val k1 = settingKey(label, manifest, scope)
val k2 = settingKey(label, manifest, scope)
expectEq(k1, k2)
}
}
property("task keys are structurally equal") = {
forAll { (label: Label, manifest: Manifest[_], scope: Scope) =>
val k1 = taskKey(label, manifest, scope)
val k2 = taskKey(label, manifest, scope)
expectEq(k1, k2)
}
}
property("input keys are structurally equal") = {
forAll { (label: Label, manifest: Manifest[_], scope: Scope) =>
val k1 = inputKey(label, manifest, scope)
val k2 = inputKey(label, manifest, scope)
expectEq(k1, k2)
}
}
property("different key types are not equal") = {
forAll { (label: Label, manifest: Manifest[_], scope: Scope) =>
val settingKey1 = settingKey(label, manifest, scope)
val taskKey1 = taskKey(label, manifest, scope)
val inputKey1 = inputKey(label, manifest, scope)
all(
expectNe(settingKey1, taskKey1),
expectNe(settingKey1, inputKey1),
expectNe(taskKey1, inputKey1),
)
}
}
property("different key types, with the same manifest, are not equal") = {
forAll { (label: Label, scope: Scope) =>
val prop1 = {
val manifest1 = manifest[Task[String]]
val attrKey = attributeKey(label, manifest1)
val k1 = SettingKey(attrKey) in scope
val k2 = TaskKey(attrKey) in scope
expectNeSameManifest(k1, k2)
}
val prop2 = {
val manifest1 = manifest[InputTask[String]]
val attrKey = attributeKey(label, manifest1)
val k1 = SettingKey(attrKey) in scope
val k2 = InputKey(attrKey) in scope
expectNeSameManifest(k1, k2)
}
all(prop1, prop2)
}
}
///
def settingKey[A](label: Label, manifest: Manifest[A], scope: Scope): SettingKey[A] = {
val noJsonWriter = NoJsonWriter[A]()
SettingKey[A](label.value)(manifest, noJsonWriter) in scope
}
def taskKey[A](label: Label, manifest: Manifest[A], s: Scope): TaskKey[A] =
TaskKey[A](label.value)(manifest) in s
def inputKey[A](label: Label, manifest: Manifest[A], scope: Scope): InputKey[A] =
InputKey[A](label.value)(manifest) in scope
def attributeKey[A](label: Label, manifest: Manifest[A]): AttributeKey[A] = {
val jsonWriter = NoJsonWriter[A]()
AttributeKey[A](label.value)(manifest, jsonWriter)
}
///
def expectEq(k1: Scoped, k2: Scoped): Prop =
?=(k1, k2) && ?=(k2, k1) map eqLabels(k1, k2)
def expectNe(k1: Scoped, k2: Scoped): Prop =
!=(k1, k2) && !=(k2, k1) map eqLabels(k1, k2)
def expectNeSameManifest(k1: Scoped, k2: Scoped) = {
all(
?=(k1.key.manifest, k2.key.manifest), // sanity check the manifests are the same
expectNe(k1, k2),
)
}
def eqLabels(k1: Scoped, k2: Scoped): Prop.Result => Prop.Result = r => {
val eqLabel = k1.key.label == k2.key.label
val eqManifest = k1.key.manifest == k2.key.manifest
val eqScope = k1.scope == k2.scope
r.label(s"label equality: ${k1.key.label} == ${k2.key.label} : $eqLabel")
.label(s"manifest equality: ${k1.key.manifest} == ${k2.key.manifest} : $eqManifest")
.label(s"scope equality: ${k1.scope} == ${k2.scope} : $eqScope")
}
def ?=[T](x: T, y: T)(implicit pp: T => Pretty): Prop =
if (x == y) proved
else
falsified :| {
val act = Pretty.pretty[T](x, Pretty.Params(0))
val exp = Pretty.pretty[T](y, Pretty.Params(0))
s"Expected $act to be equal to $exp"
}
def !=[T](x: T, y: T)(implicit pp: T => Pretty): Prop =
if (x == y) falsified
else
proved :| {
val act = Pretty.pretty[T](x, Pretty.Params(0))
val exp = Pretty.pretty[T](y, Pretty.Params(0))
s"Expected $act to NOT be equal to $exp"
}
}

View File

@ -7,290 +7,104 @@
package sbt.test
import org.scalacheck.{ Test => _, _ }, Arbitrary.arbitrary, Gen._, Prop._
import org.scalacheck.{ Test => _, _ }, Prop._
import java.io.File
import sbt.io.IO
import sbt.SlashSyntax
import sbt.{ Scope, ScopeAxis, Scoped, Select, This, Zero }, Scope.{ Global, ThisScope }
import sbt.{ BuildRef, LocalProject, LocalRootProject, ProjectRef, Reference, RootProject, ThisBuild, ThisProject }
import sbt.{ Scope, ScopeAxis, Scoped }, Scope.{ Global, ThisScope }
import sbt.Reference
import sbt.ConfigKey
import sbt.librarymanagement.syntax._
import sbt.{ InputKey, SettingKey, TaskKey }
import sbt.internal.util.{ AttributeKey, AttributeMap }
import sbt.internal.util.AttributeKey
object BuildDSLInstances {
val genFile: Gen[File] = Gen.oneOf(new File("."), new File("/tmp")) // for now..
implicit val arbBuildRef: Arbitrary[BuildRef] = Arbitrary(genFile map (f => BuildRef(IO toURI f)))
implicit val arbProjectRef: Arbitrary[ProjectRef] =
Arbitrary(for (f <- genFile; id <- Gen.identifier) yield ProjectRef(f, id))
implicit val arbLocalProject: Arbitrary[LocalProject] =
Arbitrary(arbitrary[String] map LocalProject)
implicit val arbRootProject: Arbitrary[RootProject] = Arbitrary(genFile map (RootProject(_)))
implicit val arbReference: Arbitrary[Reference] = Arbitrary {
Gen.frequency(
1 -> arbitrary[BuildRef], // 96
100 -> ThisBuild, // 10,271
3 -> LocalRootProject, // 325
23 -> arbitrary[ProjectRef], // 2,283
3 -> ThisProject, // 299
4 -> arbitrary[LocalProject], // 436
11 -> arbitrary[RootProject], // 1,133
)
}
implicit def arbConfigKey: Arbitrary[ConfigKey] = Arbitrary {
Gen.frequency(
2 -> const[ConfigKey](Compile),
2 -> const[ConfigKey](Test),
1 -> const[ConfigKey](Runtime),
1 -> const[ConfigKey](IntegrationTest),
1 -> const[ConfigKey](Provided),
)
}
implicit def arbAttrKey[A: Manifest]: Arbitrary[AttributeKey[_]] =
Arbitrary(Gen.identifier map (AttributeKey[A](_)))
def withScope[K <: Scoped.ScopingSetting[K]](keyGen: Gen[K]): Arbitrary[K] =
Arbitrary(Gen.frequency(
5 -> keyGen,
1 -> (for (key <- keyGen; scope <- arbitrary[Scope]) yield key in scope)
))
def genInputKey[A: Manifest]: Gen[InputKey[A]] = Gen.identifier map (InputKey[A](_))
def genSettingKey[A: Manifest]: Gen[SettingKey[A]] = Gen.identifier map (SettingKey[A](_))
def genTaskKey[A: Manifest]: Gen[TaskKey[A]] = Gen.identifier map (TaskKey[A](_))
implicit def arbInputKey[A: Manifest]: Arbitrary[InputKey[A]] = withScope(genInputKey[A])
implicit def arbSettingKey[A: Manifest]: Arbitrary[SettingKey[A]] = withScope(genSettingKey[A])
implicit def arbTaskKey[A: Manifest]: Arbitrary[TaskKey[A]] = withScope(genTaskKey[A])
implicit def arbScoped[A: Manifest](implicit
arbInputKey: Arbitrary[InputKey[A]],
arbSettingKey: Arbitrary[SettingKey[A]],
arbTaskKey: Arbitrary[TaskKey[A]],
): Arbitrary[Scoped] = {
Arbitrary(Gen.frequency(
15 -> arbitrary[InputKey[A]], // 15,431
20 -> arbitrary[SettingKey[A]], // 19,645
23 -> arbitrary[TaskKey[A]], // 22,867
))
}
object WithoutScope {
implicit def arbInputKey[A: Manifest]: Arbitrary[InputKey[A]] = Arbitrary(genInputKey[A])
implicit def arbSettingKey[A: Manifest]: Arbitrary[SettingKey[A]] = Arbitrary(genSettingKey[A])
implicit def arbTaskKey[A: Manifest]: Arbitrary[TaskKey[A]] = Arbitrary(genTaskKey[A])
}
implicit def arbScopeAxis[A: Arbitrary]: Arbitrary[ScopeAxis[A]] =
Arbitrary(Gen.oneOf[ScopeAxis[A]](This, Zero, arbitrary[A] map (Select(_))))
implicit val arbAttributeMap: Arbitrary[AttributeMap] = Arbitrary {
Gen.frequency(
20 -> AttributeMap.empty,
1 -> (for (name <- Gen.identifier; isModule <- arbitrary[Boolean])
yield AttributeMap.empty
.put(AttributeKey[String]("name"), name)
.put(AttributeKey[Boolean]("isModule"), isModule)
)
)
}
implicit def arbScope: Arbitrary[Scope] = Arbitrary(
for {
r <- arbitrary[ScopeAxis[Reference]]
c <- arbitrary[ScopeAxis[ConfigKey]]
t <- arbitrary[ScopeAxis[AttributeKey[_]]]
e <- arbitrary[ScopeAxis[AttributeMap]]
} yield Scope(r, c, t, e)
)
}
import BuildDSLInstances._
object CustomEquality {
trait Eq[A] {
def equal(x: A, y: A): Boolean
}
// Avoid reimplementing equality for other standard classes.
trait EqualLowPriority {
implicit def universal[A] = (x: A, y: A) => x == y
}
object Eq extends EqualLowPriority {
def apply[A: Eq]: Eq[A] = implicitly
implicit def eqScoped[A <: Scoped]: Eq[A] = (x, y) => x.scope == y.scope && x.key == y.key
}
implicit class AnyWith_===[A](private val x: A) extends AnyVal {
def ===(y: A)(implicit z: Eq[A]): Boolean = z.equal(x, y)
def =?(y: A)(implicit z: Eq[A]): Prop = {
if (x === y) proved else falsified :| s"Expected $x but got $y"
}
}
def expectValue[A: Eq](expected: A)(x: A) = expected =? x
}
import CustomEquality._
import BuildSettingsInstances._
object SlashSyntaxSpec extends Properties("SlashSyntax") with SlashSyntax {
type Key[K] = Scoped.ScopingSetting[K] with Scoped
property("Global / key == key in Global") = {
def check[K <: Key[K]: Arbitrary] = forAll((k: K) => expectValue(k in Global)(Global / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll((k: Key) => expectValue(k in Global)(Global / k))
}
property("Reference / key == key in Reference") = {
def check[K <: Key[K]: Arbitrary] = forAll((r: Reference, k: K) => expectValue(k in r)(r / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll((r: Reference, k: Key) => expectValue(k in r)(r / k))
}
property("Reference / Config / key == key in Reference in Config") = {
def check[K <: Key[K]: Arbitrary] =
forAll((r: Reference, c: ConfigKey, k: K) => expectValue(k in r in c)(r / c / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll((r: Reference, c: ConfigKey, k: Key) => expectValue(k in r in c)(r / c / k))
}
property("Reference / task.key / key == key in Reference in task") = {
def check[K <: Key[K]: Arbitrary] =
forAll((r: Reference, t: Scoped, k: K) => expectValue(k in (r, t))(r / t.key / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll((r: Reference, t: Scoped, k: Key) => expectValue(k in (r, t))(r / t.key / k))
}
property("Reference / task / key ~= key in Reference in task") = {
import WithoutScope._
def check[T <: Key[T]: Arbitrary, K <: Key[K]: Arbitrary] =
forAll((r: Reference, t: T, k: K) => expectValue(k in (r, t))(r / t / k))
(true
&& check[InputKey[String], InputKey[String]]
&& check[InputKey[String], SettingKey[String]]
&& check[InputKey[String], TaskKey[String]]
&& check[SettingKey[String], InputKey[String]]
&& check[SettingKey[String], SettingKey[String]]
&& check[SettingKey[String], TaskKey[String]]
&& check[TaskKey[String], InputKey[String]]
&& check[TaskKey[String], SettingKey[String]]
&& check[TaskKey[String], TaskKey[String]]
)
forAll((r: Reference, t: Key, k: Key) => expectValue(k in (r, t))(r / t / k))
}
property("Reference / Config / task.key / key == key in Reference in Config in task") = {
def check[K <: Key[K]: Arbitrary] =
forAll((r: Reference, c: ConfigKey, t: Scoped, k: K) =>
expectValue(k in (r, c, t))(r / c / t.key / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll { (r: Reference, c: ConfigKey, t: Scoped, k: Key) =>
expectValue(k in (r, c, t))(r / c / t.key / k)
}
}
property("Reference / Config / task / key ~= key in Reference in Config in task") = {
import WithoutScope._
def check[T <: Key[T]: Arbitrary, K <: Key[K]: Arbitrary] =
forAll((r: Reference, c: ConfigKey, t: T, k: K) => expectValue(k in (r, c, t))(r / c / t / k))
(true
&& check[InputKey[String], InputKey[String]]
&& check[InputKey[String], SettingKey[String]]
&& check[InputKey[String], TaskKey[String]]
&& check[SettingKey[String], InputKey[String]]
&& check[SettingKey[String], SettingKey[String]]
&& check[SettingKey[String], TaskKey[String]]
&& check[TaskKey[String], InputKey[String]]
&& check[TaskKey[String], SettingKey[String]]
&& check[TaskKey[String], TaskKey[String]]
)
forAll { (r: Reference, c: ConfigKey, t: Key, k: Key) =>
expectValue(k in (r, c, t))(r / c / t / k)
}
}
property("Config / key == key in Config") = {
def check[K <: Key[K]: Arbitrary] =
forAll((c: ConfigKey, k: K) => expectValue(k in c)(c / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll((c: ConfigKey, k: Key) => expectValue(k in c)(c / k))
}
property("Config / task.key / key == key in Config in task") = {
def check[K <: Key[K]: Arbitrary] =
forAll((c: ConfigKey, t: Scoped, k: K) => expectValue(k in c in t)(c / t.key / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll((c: ConfigKey, t: Scoped, k: Key) => expectValue(k in c in t)(c / t.key / k))
}
property("Config / task / key ~= key in Config in task") = {
import WithoutScope._
def check[T <: Key[T]: Arbitrary, K <: Key[K]: Arbitrary] =
forAll((c: ConfigKey, t: T, k: K) => expectValue(k in c in t)(c / t / k))
(true
&& check[InputKey[String], InputKey[String]]
&& check[InputKey[String], SettingKey[String]]
&& check[InputKey[String], TaskKey[String]]
&& check[SettingKey[String], InputKey[String]]
&& check[SettingKey[String], SettingKey[String]]
&& check[SettingKey[String], TaskKey[String]]
&& check[TaskKey[String], InputKey[String]]
&& check[TaskKey[String], SettingKey[String]]
&& check[TaskKey[String], TaskKey[String]]
)
forAll((c: ConfigKey, t: Key, k: Key) => expectValue(k in c in t)(c / t / k))
}
property("task.key / key == key in task") = {
def check[K <: Key[K]: Arbitrary] =
forAll((t: Scoped, k: K) => expectValue(k in t)(t.key / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll((t: Scoped, k: Key) => expectValue(k in t)(t.key / k))
}
property("task / key ~= key in task") = {
import WithoutScope._
def check[T <: Key[T]: Arbitrary, K <: Key[K]: Arbitrary] =
forAll((t: T, k: K) => expectValue(k in t)(t / k))
(true
&& check[InputKey[String], InputKey[String]]
&& check[InputKey[String], SettingKey[String]]
&& check[InputKey[String], TaskKey[String]]
&& check[SettingKey[String], InputKey[String]]
&& check[SettingKey[String], SettingKey[String]]
&& check[SettingKey[String], TaskKey[String]]
&& check[TaskKey[String], InputKey[String]]
&& check[TaskKey[String], SettingKey[String]]
&& check[TaskKey[String], TaskKey[String]]
)
forAll((t: Key, k: Key) => expectValue(k in t)(t / k))
}
property("Scope / key == key in Scope") = {
def check[K <: Key[K]: Arbitrary] = forAll((s: Scope, k: K) => expectValue(k in s)(s / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll((s: Scope, k: Key) => expectValue(k in s)(s / k))
}
property("Reference? / key == key in ThisScope.copy(..)") = {
def check[K <: Key[K]: Arbitrary] =
forAll((r: ScopeAxis[Reference], k: K) =>
expectValue(k in ThisScope.copy(project = r))(r / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll { (r: ScopeAxis[Reference], k: Key) =>
expectValue(k in ThisScope.copy(project = r))(r / k)
}
}
property("Reference? / ConfigKey? / key == key in ThisScope.copy(..)") = {
def check[K <: Key[K]: Arbitrary] =
forAll((r: ScopeAxis[Reference], c: ScopeAxis[ConfigKey], k: K) =>
expectValue(k in ThisScope.copy(project = r, config = c))(r / c / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll(
(r: ScopeAxis[Reference], c: ScopeAxis[ConfigKey], k: Key) =>
expectValue(k in ThisScope.copy(project = r, config = c))(r / c / k)
)
}
// property("Reference? / AttributeKey? / key == key in ThisScope.copy(..)") = {
// def check[K <: Key[K]: Arbitrary] =
// forAll(
// (r: ScopeAxis[Reference], t: ScopeAxis[AttributeKey[_]], k: K) =>
// expectValue(k in ThisScope.copy(project = r, task = t))(r / t / k))
// check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
// forAll((r: ScopeAxis[Reference], t: ScopeAxis[AttributeKey[_]], k: AnyKey) =>
// expectValue(k in ThisScope.copy(project = r, task = t))(r / t / k))
// }
property("Reference? / ConfigKey? / AttributeKey? / key == key in ThisScope.copy(..)") = {
def check[K <: Key[K]: Arbitrary] =
forAll(
(r: ScopeAxis[Reference], c: ScopeAxis[ConfigKey], t: ScopeAxis[AttributeKey[_]], k: K) =>
expectValue(k in ThisScope.copy(project = r, config = c, task = t))(r / c / t / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll {
(r: ScopeAxis[Reference], c: ScopeAxis[ConfigKey], t: ScopeAxis[AttributeKey[_]], k: Key) =>
expectValue(k in ThisScope.copy(project = r, config = c, task = t))(r / c / t / k)
}
}
def expectValue(expected: Scoped)(x: Scoped) = {
val equals = x.scope == expected.scope && x.key == expected.key
if (equals) proved else falsified :| s"Expected $expected but got $x"
}
}

View File

@ -10,12 +10,11 @@ package sbt.std
class TaskPosSpec {
// Dynamic tasks can have task invocations inside if branches
locally {
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = taskKey[String]("")
val bar = taskKey[String]("")
var condition = true
val baz = Def.taskDyn[String] {
val condition = true
Def.taskDyn[String] {
if (condition) foo
else bar
}
@ -23,23 +22,21 @@ class TaskPosSpec {
// Dynamic settings can have setting invocations inside if branches
locally {
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = settingKey[String]("")
val bar = settingKey[String]("")
var condition = true
val baz = Def.settingDyn[String] {
val condition = true
Def.settingDyn[String] {
if (condition) foo
else bar
}
}
locally {
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = taskKey[String]("")
var condition = true
val baz = Def.task[String] {
val condition = true
Def.task[String] {
val fooAnon = () => foo.value: @sbtUnchecked
if (condition) fooAnon()
else fooAnon()
@ -47,11 +44,10 @@ class TaskPosSpec {
}
locally {
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = taskKey[String]("")
var condition = true
val baz = Def.task[String] {
val condition = true
Def.task[String] {
val fooAnon = () => (foo.value: @sbtUnchecked) + ""
if (condition) fooAnon()
else fooAnon()
@ -59,12 +55,11 @@ class TaskPosSpec {
}
locally {
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = taskKey[String]("")
val bar = taskKey[String]("")
var condition = true
val baz = Def.task[String] {
val condition = true
Def.task[String] {
if (condition) foo.value: @sbtUnchecked
else bar.value: @sbtUnchecked
}
@ -72,11 +67,10 @@ class TaskPosSpec {
locally {
// This is fix 1 for appearance of tasks inside anons
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = taskKey[String]("")
var condition = true
val baz = Def.task[String] {
val condition = true
Def.task[String] {
val fooResult = foo.value
val anon = () => fooResult + " "
if (condition) anon()
@ -86,11 +80,10 @@ class TaskPosSpec {
locally {
// This is fix 2 for appearance of tasks inside anons
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = taskKey[String]("")
var condition = true
val baz = Def.taskDyn[String] {
val condition = true
Def.taskDyn[String] {
val anon1 = (value: String) => value + " "
if (condition) {
Def.task(anon1(foo.value))
@ -100,31 +93,27 @@ class TaskPosSpec {
locally {
// missing .value error should not happen inside task dyn
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = taskKey[String]("")
val baz = Def.taskDyn[String] {
Def.taskDyn[String] {
foo
}
}
locally {
// missing .value error should not happen inside task dyn
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = taskKey[String]("")
val avoidDCE = ""
val baz = Def.task[String] {
foo: @sbtUnchecked
Def.task[String] {
val _ = foo: @sbtUnchecked
avoidDCE
}
}
locally {
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = taskKey[String]("")
val baz = Def.task[String] {
Def.task[String] {
def inner(s: KeyedInitialize[_]) = println(s)
inner(foo)
""
@ -133,11 +122,10 @@ class TaskPosSpec {
locally {
// In theory, this should be reported, but missing .value analysis is dumb at the cost of speed
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = taskKey[String]("")
def avoidDCE = { println(""); "" }
val baz = Def.task[String] {
Def.task[String] {
val (_, _) = "" match {
case _ => (foo, 1 + 2)
}
@ -146,15 +134,14 @@ class TaskPosSpec {
}
locally {
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = taskKey[String]("")
def avoidDCE = { println(""); "" }
val baz = Def.task[String] {
def avoidDCE(x: TaskKey[String]) = x.toString
Def.task[String] {
val hehe = foo
// We do not detect `hehe` because guessing that the user did the wrong thing would require
// us to run the unused name traverser defined in Typer (and hence proxy it from context util)
avoidDCE
avoidDCE(hehe)
}
}
@ -168,11 +155,10 @@ class TaskPosSpec {
}
locally {
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = settingKey[String]("")
val condition = true
val baz = Def.task[String] {
Def.task[String] {
// settings can be evaluated in a condition
if (condition) foo.value
else "..."
@ -180,10 +166,9 @@ class TaskPosSpec {
}
locally {
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = settingKey[String]("")
val baz = Def.task[Seq[String]] {
Def.task[Seq[String]] {
(1 to 10).map(_ => foo.value)
}
}

View File

@ -7,11 +7,9 @@
package sbt.std
import scala.reflect._
import scala.tools.reflect.ToolBox
object TestUtil {
import tools.reflect.ToolBox
def eval(code: String, compileOptions: String = ""): Any = {
val tb = mkToolbox(compileOptions)
tb.eval(tb.parse(code))

View File

@ -7,15 +7,19 @@
package sbt.std.neg
import scala.tools.reflect.ToolBoxError
import org.scalatest.FunSuite
import sbt.std.TaskLinterDSLFeedback
import sbt.std.TestUtil._
class TaskNegSpec extends FunSuite {
import tools.reflect.ToolBoxError
def expectError(errorSnippet: String,
compileOptions: String = "",
baseCompileOptions: String = s"-cp $toolboxClasspath")(code: String) = {
def expectError(
errorSnippet: String,
compileOptions: String = "",
baseCompileOptions: String = s"-cp $toolboxClasspath",
)(code: String) = {
val errorMessage = intercept[ToolBoxError] {
eval(code, s"$compileOptions $baseCompileOptions")
println(s"Test failed -- compilation was successful! Expected:\n$errorSnippet")

View File

@ -0,0 +1,40 @@
/**
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
package sbt
final class JavaVersion private (
val numbers: Vector[Long],
val vendor: Option[String]) extends Serializable {
def numberStr: String = numbers.mkString(".")
override def equals(o: Any): Boolean = o match {
case x: JavaVersion => (this.numbers == x.numbers) && (this.vendor == x.vendor)
case _ => false
}
override def hashCode: Int = {
37 * (37 * (37 * (17 + "sbt.JavaVersion".##) + numbers.##) + vendor.##)
}
override def toString: String = {
vendor.map(_ + "@").getOrElse("") + numberStr
}
private[this] def copy(numbers: Vector[Long] = numbers, vendor: Option[String] = vendor): JavaVersion = {
new JavaVersion(numbers, vendor)
}
def withNumbers(numbers: Vector[Long]): JavaVersion = {
copy(numbers = numbers)
}
def withVendor(vendor: Option[String]): JavaVersion = {
copy(vendor = vendor)
}
def withVendor(vendor: String): JavaVersion = {
copy(vendor = Option(vendor))
}
}
object JavaVersion {
def apply(version: String): JavaVersion = sbt.internal.CrossJava.parseJavaVersion(version)
def apply(numbers: Vector[Long], vendor: Option[String]): JavaVersion = new JavaVersion(numbers, vendor)
def apply(numbers: Vector[Long], vendor: String): JavaVersion = new JavaVersion(numbers, Option(vendor))
}

View File

@ -17,3 +17,13 @@ enum PluginTrigger {
AllRequirements
NoTrigger
}
type JavaVersion {
numbers: [Long]
vendor: String
#x def numberStr: String = numbers.mkString(".")
#xtostring vendor.map(_ + "@").getOrElse("") + numberStr
#xcompanion def apply(version: String): JavaVersion = sbt.internal.CrossJava.parseJavaVersion(version)
}

View File

@ -23,7 +23,8 @@ abstract class BackgroundJobService extends Closeable {
* then you could process.destroy() for example.
*/
def runInBackground(spawningTask: ScopedKey[_], state: State)(
start: (Logger, File) => Unit): JobHandle
start: (Logger, File) => Unit
): JobHandle
/** Same as shutown. */
def close(): Unit
@ -51,7 +52,8 @@ object BackgroundJobService {
{
val stringIdParser: Parser[Seq[String]] = Space ~> token(
NotSpace examples handles.map(_.id.toString).toSet,
description = "<job id>").+
description = "<job id>"
).+
stringIdParser.map { strings =>
strings.map(Integer.parseInt(_)).flatMap(id => handles.find(_.id == id))
}

View File

@ -17,19 +17,25 @@ object BuildPaths {
val globalBaseDirectory = AttributeKey[File](
"global-base-directory",
"The base directory for global sbt configuration and staging.",
DSetting)
val globalPluginsDirectory = AttributeKey[File]("global-plugins-directory",
"The base directory for global sbt plugins.",
DSetting)
val globalSettingsDirectory = AttributeKey[File]("global-settings-directory",
"The base directory for global sbt settings.",
DSetting)
DSetting
)
val globalPluginsDirectory = AttributeKey[File](
"global-plugins-directory",
"The base directory for global sbt plugins.",
DSetting
)
val globalSettingsDirectory = AttributeKey[File](
"global-settings-directory",
"The base directory for global sbt settings.",
DSetting
)
val stagingDirectory =
AttributeKey[File]("staging-directory", "The directory for staging remote projects.", DSetting)
val dependencyBaseDirectory = AttributeKey[File](
"dependency-base-directory",
"The base directory for caching dependency resolution.",
DSetting)
DSetting
)
val globalZincDirectory =
AttributeKey[File]("global-zinc-directory", "The base directory for Zinc internals.", DSetting)
@ -56,7 +62,8 @@ object BuildPaths {
def getGlobalPluginsDirectory(state: State, globalBase: File): File =
fileSetting(globalPluginsDirectory, GlobalPluginsProperty, defaultGlobalPlugins(globalBase))(
state)
state
)
def getGlobalSettingsDirectory(state: State, globalBase: File): File =
fileSetting(globalSettingsDirectory, GlobalSettingsProperty, globalBase)(state)
@ -70,11 +77,13 @@ object BuildPaths {
fileSetting(globalZincDirectory, GlobalZincProperty, defaultGlobalZinc(globalBase))(state)
private[this] def fileSetting(stateKey: AttributeKey[File], property: String, default: File)(
state: State): File =
state: State
): File =
getFileSetting(stateKey, property, default)(state)
def getFileSetting(stateKey: AttributeKey[File], property: String, default: => File)(
state: State): File =
state: State
): File =
state get stateKey orElse getFileProperty(property) getOrElse default
def getFileProperty(name: String): Option[File] = Option(System.getProperty(name)) flatMap {

View File

@ -11,7 +11,7 @@ import sbt.internal.DslEntry
import sbt.librarymanagement.Configuration
private[sbt] trait BuildSyntax {
import language.experimental.macros
import scala.language.experimental.macros
def settingKey[T](description: String): SettingKey[T] = macro std.KeyMacro.settingKeyImpl[T]
def taskKey[T](description: String): TaskKey[T] = macro std.KeyMacro.taskKeyImpl[T]
def inputKey[T](description: String): InputKey[T] = macro std.KeyMacro.inputKeyImpl[T]

View File

@ -72,8 +72,7 @@ object Cross {
} & spacedFirst(CrossCommand)
}
private def crossRestoreSessionParser(state: State): Parser[String] =
token(CrossRestoreSessionCommand)
private def crossRestoreSessionParser: Parser[String] = token(CrossRestoreSessionCommand)
private[sbt] def requireSession[T](p: State => Parser[T]): State => Parser[T] =
s => if (s get sessionSettings isEmpty) failure("No project loaded") else p(s)
@ -100,14 +99,24 @@ object Cross {
}
/**
* Parse the given command into either an aggregate command or a command for a project
* Parse the given command into a list of aggregate projects and command to issue.
*/
private def parseCommand(command: String): Either[String, (String, String)] = {
private[sbt] def parseSlashCommand(
extracted: Extracted
)(command: String): (Seq[ProjectRef], String) = {
import extracted._
import DefaultParsers._
val parser = (OpOrID <~ charClass(_ == '/', "/")) ~ any.* map {
case project ~ cmd => (project, cmd.mkString)
case seg1 ~ cmd => (seg1, cmd.mkString)
}
Parser.parse(command, parser) match {
case Right((seg1, cmd)) =>
structure.allProjectRefs.find(_.project == seg1) match {
case Some(proj) => (Seq(proj), cmd)
case _ => (resolveAggregates(extracted), command)
}
case _ => (resolveAggregates(extracted), command)
}
Parser.parse(command, parser).left.map(_ => command)
}
def crossBuild: Command =
@ -116,12 +125,7 @@ object Cross {
private def crossBuildCommandImpl(state: State, args: CrossArgs): State = {
val x = Project.extract(state)
import x._
val (aggs, aggCommand) = parseCommand(args.command) match {
case Right((project, cmd)) =>
(structure.allProjectRefs.filter(_.project == project), cmd)
case Left(cmd) => (resolveAggregates(x), cmd)
}
val (aggs, aggCommand) = parseSlashCommand(x)(args.command)
val projCrossVersions = aggs map { proj =>
proj -> crossVersions(x, proj)
@ -151,7 +155,8 @@ object Cross {
"configuration. This could result in subprojects cross building against Scala versions that they are " +
"not compatible with. Try issuing cross building command with tasks instead, since sbt will be able " +
"to ensure that cross building is only done using configured project and Scala version combinations " +
"that are configured.")
"that are configured."
)
state.log.debug("Scala versions configuration is:")
projCrossVersions.foreach {
case (project, versions) => state.log.debug(s"$project: $versions")
@ -175,12 +180,14 @@ object Cross {
case (version, projects) if aggCommand.contains(" ") =>
// If the command contains a space, then the `all` command won't work because it doesn't support issuing
// commands with spaces, so revert to running the command on each project one at a time
s"$SwitchCommand $verbose $version" :: projects.map(project =>
s"$project/$aggCommand")
s"$SwitchCommand $verbose $version" :: projects
.map(project => s"$project/$aggCommand")
case (version, projects) =>
// First switch scala version, then use the all command to run the command on each project concurrently
Seq(s"$SwitchCommand $verbose $version",
projects.map(_ + "/" + aggCommand).mkString("all ", " ", ""))
Seq(
s"$SwitchCommand $verbose $version",
projects.map(_ + "/" + aggCommand).mkString("all ", " ", "")
)
}
}
@ -189,9 +196,11 @@ object Cross {
}
def crossRestoreSession: Command =
Command.arb(crossRestoreSessionParser, crossRestoreSessionHelp)(crossRestoreSessionImpl)
Command.arb(_ => crossRestoreSessionParser, crossRestoreSessionHelp)(
(s, _) => crossRestoreSessionImpl(s)
)
private def crossRestoreSessionImpl(state: State, arg: String): State = {
private def crossRestoreSessionImpl(state: State): State = {
restoreCapturedSession(state, Project.extract(state))
}
@ -216,12 +225,27 @@ object Cross {
Command.arb(requireSession(switchParser), switchHelp)(switchCommandImpl)
private def switchCommandImpl(state: State, args: Switch): State = {
val switchedState = switchScalaVersion(args, state)
val x = Project.extract(state)
val (switchedState, affectedRefs) = switchScalaVersion(args, state)
args.command.toList ::: switchedState
val strictCmd =
if (args.version.force) {
// The Scala version was forced on the whole build, run as is
args.command
} else {
args.command.map { rawCmd =>
val (aggs, aggCommand) = parseSlashCommand(x)(rawCmd)
aggs
.intersect(affectedRefs)
.map({ case ProjectRef(_, proj) => s"$proj/$aggCommand" })
.mkString("all ", " ", "")
}
}
strictCmd.toList ::: switchedState
}
private def switchScalaVersion(switch: Switch, state: State): State = {
private def switchScalaVersion(switch: Switch, state: State): (State, Seq[ResolvedReference]) = {
val extracted = Project.extract(state)
import extracted._
@ -291,7 +315,7 @@ object Cross {
}
}
setScalaVersionForProjects(version, instance, projects, state, extracted)
(setScalaVersionForProjects(version, instance, projects, state, extracted), projects.map(_._1))
}
private def setScalaVersionForProjects(

File diff suppressed because it is too large Load Diff

View File

@ -8,7 +8,7 @@
package sbt
import sbt.internal.{ Load, BuildStructure, TaskTimings, TaskName, GCUtil }
import sbt.internal.util.{ Attributed, ErrorHandling, HList, RMap, Signals, Types }
import sbt.internal.util.{ Attributed, ConsoleAppender, ErrorHandling, HList, RMap, Signals, Types }
import sbt.util.{ Logger, Show }
import sbt.librarymanagement.{ Resolver, UpdateReport }
@ -172,9 +172,11 @@ object EvaluateTask {
val SystemProcessors = Runtime.getRuntime.availableProcessors
def extractedTaskConfig(extracted: Extracted,
structure: BuildStructure,
state: State): EvaluateTaskConfig = {
def extractedTaskConfig(
extracted: Extracted,
structure: BuildStructure,
state: State
): EvaluateTaskConfig = {
val rs = restrictions(extracted, structure)
val canceller = cancelStrategy(extracted, structure, state)
val progress = executeProgress(extracted, structure, state)
@ -193,10 +195,12 @@ object EvaluateTask {
}
def restrictions(extracted: Extracted, structure: BuildStructure): Seq[Tags.Rule] =
getSetting(Keys.concurrentRestrictions,
defaultRestrictions(extracted, structure),
extracted,
structure)
getSetting(
Keys.concurrentRestrictions,
defaultRestrictions(extracted, structure),
extracted,
structure
)
def maxWorkers(extracted: Extracted, structure: BuildStructure): Int =
if (getSetting(Keys.parallelExecution, true, extracted, structure))
@ -207,22 +211,27 @@ object EvaluateTask {
def cancelable(extracted: Extracted, structure: BuildStructure): Boolean =
getSetting(Keys.cancelable, false, extracted, structure)
def cancelStrategy(extracted: Extracted,
structure: BuildStructure,
state: State): TaskCancellationStrategy =
def cancelStrategy(
extracted: Extracted,
structure: BuildStructure,
state: State
): TaskCancellationStrategy =
getSetting(Keys.taskCancelStrategy, { (_: State) =>
TaskCancellationStrategy.Null
}, extracted, structure)(state)
private[sbt] def executeProgress(extracted: Extracted,
structure: BuildStructure,
state: State): ExecuteProgress[Task] = {
private[sbt] def executeProgress(
extracted: Extracted,
structure: BuildStructure,
state: State
): ExecuteProgress[Task] = {
import Types.const
val maker: State => Keys.TaskProgress = getSetting(
Keys.executeProgress,
const(new Keys.TaskProgress(defaultProgress)),
extracted,
structure)
structure
)
maker(state).progress
}
// TODO - Should this pull from Global or from the project itself?
@ -230,15 +239,19 @@ object EvaluateTask {
getSetting(Keys.forcegc in Global, GCUtil.defaultForceGarbageCollection, extracted, structure)
// TODO - Should this pull from Global or from the project itself?
private[sbt] def minForcegcInterval(extracted: Extracted, structure: BuildStructure): Duration =
getSetting(Keys.minForcegcInterval in Global,
GCUtil.defaultMinForcegcInterval,
extracted,
structure)
getSetting(
Keys.minForcegcInterval in Global,
GCUtil.defaultMinForcegcInterval,
extracted,
structure
)
def getSetting[T](key: SettingKey[T],
default: T,
extracted: Extracted,
structure: BuildStructure): T =
def getSetting[T](
key: SettingKey[T],
default: T,
extracted: Extracted,
structure: BuildStructure
): T =
key in extracted.currentRef get structure.data getOrElse default
def injectSettings: Seq[Setting[_]] = Seq(
@ -247,16 +260,21 @@ object EvaluateTask {
(executionRoots in Global) ::= dummyRoots
)
def evalPluginDef(log: Logger)(pluginDef: BuildStructure, state: State): PluginData = {
@deprecated("Use variant which doesn't take a logger", "1.1.1")
def evalPluginDef(log: Logger)(pluginDef: BuildStructure, state: State): PluginData =
evalPluginDef(pluginDef, state)
def evalPluginDef(pluginDef: BuildStructure, state: State): PluginData = {
val root = ProjectRef(pluginDef.root, Load.getRootProject(pluginDef.units)(pluginDef.root))
val pluginKey = pluginData
val config = extractedTaskConfig(Project.extract(state), pluginDef, state)
val evaluated =
apply(pluginDef, ScopedKey(pluginKey.scope, pluginKey.key), state, root, config)
val (newS, result) = evaluated getOrElse sys.error(
"Plugin data does not exist for plugin definition at " + pluginDef.root)
"Plugin data does not exist for plugin definition at " + pluginDef.root
)
Project.runUnloadHooks(newS) // discard states
processResult(result, log)
processResult2(result)
}
/**
@ -264,26 +282,32 @@ object EvaluateTask {
* If the task is not defined, None is returned. The provided task key is resolved against the current project `ref`.
* Task execution is configured according to settings defined in the loaded project.
*/
def apply[T](structure: BuildStructure,
taskKey: ScopedKey[Task[T]],
state: State,
ref: ProjectRef): Option[(State, Result[T])] =
apply[T](structure,
taskKey,
state,
ref,
extractedTaskConfig(Project.extract(state), structure, state))
def apply[T](
structure: BuildStructure,
taskKey: ScopedKey[Task[T]],
state: State,
ref: ProjectRef
): Option[(State, Result[T])] =
apply[T](
structure,
taskKey,
state,
ref,
extractedTaskConfig(Project.extract(state), structure, state)
)
/**
* Evaluates `taskKey` and returns the new State and the result of the task wrapped in Some.
* If the task is not defined, None is returned. The provided task key is resolved against the current project `ref`.
* `config` configures concurrency and canceling of task execution.
*/
def apply[T](structure: BuildStructure,
taskKey: ScopedKey[Task[T]],
state: State,
ref: ProjectRef,
config: EvaluateTaskConfig): Option[(State, Result[T])] = {
def apply[T](
structure: BuildStructure,
taskKey: ScopedKey[Task[T]],
state: State,
ref: ProjectRef,
config: EvaluateTaskConfig
): Option[(State, Result[T])] = {
withStreams(structure, state) { str =>
for ((task, toNode) <- getTask(structure, taskKey, state, str, ref))
yield runTask(task, state, str, structure.index.triggers, config)(toNode)
@ -296,8 +320,8 @@ object EvaluateTask {
def logIncomplete(result: Incomplete, state: State, streams: Streams): Unit = {
val all = Incomplete linearize result
val keyed = for (Incomplete(Some(key: ScopedKey[_]), _, msg, _, ex) <- all)
yield (key, msg, ex)
val keyed =
all collect { case Incomplete(Some(key: ScopedKey[_]), _, msg, _, ex) => (key, msg, ex) }
import ExceptionCategory._
for ((key, msg, Some(ex)) <- keyed) {
@ -312,7 +336,7 @@ object EvaluateTask {
for ((key, msg, ex) <- keyed if (msg.isDefined || ex.isDefined)) {
val msgString = (msg.toList ++ ex.toList.map(ErrorHandling.reducedToString)).mkString("\n\t")
val log = getStreams(key, streams).log
val display = contextDisplay(state, log.ansiCodesSupported)
val display = contextDisplay(state, ConsoleAppender.formatEnabledInEnv)
log.error("(" + display.show(key) + ") " + msgString)
}
}
@ -331,34 +355,41 @@ object EvaluateTask {
try { f(str) } finally { str.close() }
}
def getTask[T](structure: BuildStructure,
taskKey: ScopedKey[Task[T]],
state: State,
streams: Streams,
ref: ProjectRef): Option[(Task[T], NodeView[Task])] = {
def getTask[T](
structure: BuildStructure,
taskKey: ScopedKey[Task[T]],
state: State,
streams: Streams,
ref: ProjectRef
): Option[(Task[T], NodeView[Task])] = {
val thisScope = Load.projectScope(ref)
val resolvedScope = Scope.replaceThis(thisScope)(taskKey.scope)
for (t <- structure.data.get(resolvedScope, taskKey.key))
yield (t, nodeView(state, streams, taskKey :: Nil))
}
def nodeView[HL <: HList](state: State,
streams: Streams,
roots: Seq[ScopedKey[_]],
dummies: DummyTaskMap = DummyTaskMap(Nil)): NodeView[Task] =
def nodeView[HL <: HList](
state: State,
streams: Streams,
roots: Seq[ScopedKey[_]],
dummies: DummyTaskMap = DummyTaskMap(Nil)
): NodeView[Task] =
Transform(
(dummyRoots, roots) :: (Def.dummyStreamsManager, streams) :: (dummyState, state) :: dummies)
(dummyRoots, roots) :: (Def.dummyStreamsManager, streams) :: (dummyState, state) :: dummies
)
def runTask[T](
root: Task[T],
state: State,
streams: Streams,
triggers: Triggers[Task],
config: EvaluateTaskConfig)(implicit taskToNode: NodeView[Task]): (State, Result[T]) = {
config: EvaluateTaskConfig
)(implicit taskToNode: NodeView[Task]): (State, Result[T]) = {
import ConcurrentRestrictions.{ completionService, tagged, tagsKey }
val log = state.log
log.debug(
s"Running task... Cancel: ${config.cancelStrategy}, check cycles: ${config.checkCycles}, forcegc: ${config.forceGarbageCollection}")
s"Running task... Cancel: ${config.cancelStrategy}, check cycles: ${config.checkCycles}, forcegc: ${config.forceGarbageCollection}"
)
val tags =
tagged[Task[_]](_.info get tagsKey getOrElse Map.empty, Tags.predicate(config.restrictions))
val (service, shutdownThreads) =
@ -379,9 +410,11 @@ object EvaluateTask {
case _ => true
}
def run() = {
val x = new Execute[Task](Execute.config(config.checkCycles, overwriteNode),
triggers,
config.progressReporter)(taskToNode)
val x = new Execute[Task](
Execute.config(config.checkCycles, overwriteNode),
triggers,
config.progressReporter
)(taskToNode)
val (newState, result) =
try {
val results = x.runKeep(root)(service)
@ -406,15 +439,19 @@ object EvaluateTask {
finally strat.onTaskEngineFinish(cancelState)
}
private[this] def storeValuesForPrevious(results: RMap[Task, Result],
state: State,
streams: Streams): Unit =
private[this] def storeValuesForPrevious(
results: RMap[Task, Result],
state: State,
streams: Streams
): Unit =
for (referenced <- Previous.references in Global get Project.structure(state).data)
Previous.complete(referenced, results, streams)
def applyResults[T](results: RMap[Task, Result],
state: State,
root: Task[T]): (State, Result[T]) =
def applyResults[T](
results: RMap[Task, Result],
state: State,
root: Task[T]
): (State, Result[T]) =
(stateTransform(results)(state), results(root))
def stateTransform(results: RMap[Task, Result]): State => State =
Function.chain(
@ -433,12 +470,21 @@ object EvaluateTask {
case in @ Incomplete(Some(node: Task[_]), _, _, _, _) => in.copy(node = transformNode(node))
case i => i
}
type AnyCyclic = Execute[({ type A[_] <: AnyRef })#A]#CyclicException[_]
def convertCyclicInc: Incomplete => Incomplete = {
case in @ Incomplete(_, _, _, _, Some(c: AnyCyclic)) =>
case in @ Incomplete(
_,
_,
_,
_,
Some(c: Execute[({ type A[_] <: AnyRef })#A @unchecked]#CyclicException[_])
) =>
in.copy(directCause = Some(new RuntimeException(convertCyclic(c))))
case i => i
}
def convertCyclic(c: AnyCyclic): String =
(c.caller, c.target) match {
case (caller: Task[_], target: Task[_]) =>
@ -448,7 +494,7 @@ object EvaluateTask {
}
def liftAnonymous: Incomplete => Incomplete = {
case i @ Incomplete(node, tpe, None, causes, None) =>
case i @ Incomplete(_, _, None, causes, None) =>
causes.find(inc => inc.node.isEmpty && (inc.message.isDefined || inc.directCause.isDefined)) match {
case Some(lift) => i.copy(directCause = lift.directCause, message = lift.message)
case None => i
@ -456,12 +502,19 @@ object EvaluateTask {
case i => i
}
@deprecated("Use processResult2 which doesn't take the unused log param", "1.1.1")
def processResult[T](result: Result[T], log: Logger, show: Boolean = false): T =
onResult(result, log) { v =>
processResult2(result, show)
def processResult2[T](result: Result[T], show: Boolean = false): T =
onResult(result) { v =>
if (show) println("Result: " + v); v
}
def onResult[T, S](result: Result[T], log: Logger)(f: T => S): S =
@deprecated("Use variant that doesn't take log", "1.1.1")
def onResult[T, S](result: Result[T], log: Logger)(f: T => S): S = onResult(result)(f)
def onResult[T, S](result: Result[T])(f: T => S): S =
result match {
case Value(v) => f(v)
case Inc(inc) => throw inc

View File

@ -8,7 +8,6 @@
package sbt
import sbt.internal.{ Load, BuildStructure, Act, Aggregation, SessionSettings }
import Project._
import Scope.GlobalScope
import Def.{ ScopedKey, Setting }
import sbt.internal.util.complete.Parser
@ -17,9 +16,11 @@ import sbt.util.Show
import std.Transform.DummyTaskMap
import sbt.EvaluateTask.extractedTaskConfig
final case class Extracted(structure: BuildStructure,
session: SessionSettings,
currentRef: ProjectRef)(implicit val showKey: Show[ScopedKey[_]]) {
final case class Extracted(
structure: BuildStructure,
session: SessionSettings,
currentRef: ProjectRef
)(implicit val showKey: Show[ScopedKey[_]]) {
def rootProject = structure.rootProject
lazy val currentUnit = structure units currentRef.build
lazy val currentProject = currentUnit defined currentRef.project
@ -43,7 +44,7 @@ final case class Extracted(structure: BuildStructure,
structure.data.get(inCurrent(key.scope), key.key)
private[this] def inCurrent[T](scope: Scope): Scope =
if (scope.project == This) scope.copy(project = Select(currentRef)) else scope
if (scope.project == This) scope in currentRef else scope
/**
* Runs the task specified by `key` and returns the transformed State and the resulting value of the task.
@ -54,12 +55,12 @@ final case class Extracted(structure: BuildStructure,
* See `runAggregated` for that.
*/
def runTask[T](key: TaskKey[T], state: State): (State, T) = {
val rkey = resolve(key.scopedKey)
val rkey = resolve(key)
val config = extractedTaskConfig(this, structure, state)
val value: Option[(State, Result[T])] =
EvaluateTask(structure, key.scopedKey, state, currentRef, config)
val (newS, result) = getOrError(rkey.scope, rkey.key, value)
(newS, EvaluateTask.processResult(result, newS.log))
(newS, EvaluateTask.processResult2(result))
}
/**
@ -72,22 +73,22 @@ final case class Extracted(structure: BuildStructure,
* This method requests execution of only the given task and does not aggregate execution.
*/
def runInputTask[T](key: InputKey[T], input: String, state: State): (State, T) = {
val scopedKey = ScopedKey(
val key2 = Scoped.scopedSetting(
Scope.resolveScope(Load.projectScope(currentRef), currentRef.build, rootProject)(key.scope),
key.key
)
val rkey = resolve(scopedKey)
val inputTask = get(Scoped.scopedSetting(rkey.scope, rkey.key))
val rkey = resolve(key2)
val inputTask = get(rkey)
val task = Parser.parse(input, inputTask.parser(state)) match {
case Right(t) => t
case Left(msg) => sys.error(s"Invalid programmatic input:\n$msg")
}
val config = extractedTaskConfig(this, structure, state)
EvaluateTask.withStreams(structure, state) { str =>
val nv = EvaluateTask.nodeView(state, str, rkey :: Nil)
val nv = EvaluateTask.nodeView(state, str, rkey.scopedKey :: Nil)
val (newS, result) =
EvaluateTask.runTask(task, state, str, structure.index.triggers, config)(nv)
(newS, EvaluateTask.processResult(result, newS.log))
(newS, EvaluateTask.processResult2(result))
}
}
@ -98,31 +99,34 @@ final case class Extracted(structure: BuildStructure,
* Other axes are resolved to `Zero` if unspecified.
*/
def runAggregated[T](key: TaskKey[T], state: State): State = {
val rkey = resolve(key.scopedKey)
val rkey = resolve(key)
val keys = Aggregation.aggregate(rkey, ScopeMask(), structure.extra)
val tasks = Act.keyValues(structure)(keys)
Aggregation.runTasks(state,
structure,
tasks,
DummyTaskMap(Nil),
show = Aggregation.defaultShow(state, false))(showKey)
Aggregation.runTasks(
state,
tasks,
DummyTaskMap(Nil),
show = Aggregation.defaultShow(state, false),
)(showKey)
}
private[this] def resolve[T](key: ScopedKey[T]): ScopedKey[T] =
Project.mapScope(Scope.resolveScope(GlobalScope, currentRef.build, rootProject))(key.scopedKey)
private[this] def resolve[K <: Scoped.ScopingSetting[K] with Scoped](key: K): K =
key in Scope.resolveScope(GlobalScope, currentRef.build, rootProject)(key.scope)
private def getOrError[T](scope: Scope, key: AttributeKey[_], value: Option[T])(
implicit display: Show[ScopedKey[_]]): T =
implicit display: Show[ScopedKey[_]]
): T =
value getOrElse sys.error(display.show(ScopedKey(scope, key)) + " is undefined.")
private def getOrError[T](scope: Scope, key: AttributeKey[T])(
implicit display: Show[ScopedKey[_]]): T =
structure.data.get(scope, key) getOrElse sys.error(
display.show(ScopedKey(scope, key)) + " is undefined.")
implicit display: Show[ScopedKey[_]]
): T =
getOrError(scope, key, structure.data.get(scope, key))(display)
@deprecated(
"This discards session settings. Migrate to appendWithSession or appendWithoutSession.",
"1.2.0")
"1.2.0"
)
def append(settings: Seq[Setting[_]], state: State): State =
appendWithoutSession(settings, state)

View File

@ -42,6 +42,7 @@ import sbt.internal.{
}
import sbt.io.{ FileFilter, WatchService }
import sbt.internal.io.WatchState
import sbt.internal.server.ServerHandler
import sbt.internal.util.{ AttributeKey, SourcePosition }
import sbt.librarymanagement.Configurations.CompilerPlugin
@ -136,6 +137,8 @@ object Keys {
val serverHost = SettingKey(BasicKeys.serverHost)
val serverAuthentication = SettingKey(BasicKeys.serverAuthentication)
val serverConnectionType = SettingKey(BasicKeys.serverConnectionType)
val fullServerHandlers = SettingKey(BasicKeys.fullServerHandlers)
val serverHandlers = settingKey[Seq[ServerHandler]]("User-defined server handlers.")
val analysis = AttributeKey[CompileAnalysis]("analysis", "Analysis of compilation, including dependencies and generated outputs.", DSetting)
val watch = SettingKey(BasicKeys.watch)
@ -220,6 +223,7 @@ object Keys {
val scalaCompilerBridgeSource = settingKey[ModuleID]("Configures the module ID of the sources of the compiler bridge.").withRank(CSetting)
val scalaArtifacts = settingKey[Seq[String]]("Configures the list of artifacts which should match the Scala binary version").withRank(CSetting)
val enableBinaryCompileAnalysis = settingKey[Boolean]("Writes the analysis file in binary format")
val crossJavaVersions = settingKey[Seq[String]]("The java versions used during JDK cross testing").withRank(BPlusSetting)
val clean = taskKey[Unit]("Deletes files produced by the build, such as generated sources, compiled classes, and task caches.").withRank(APlusTask)
val console = taskKey[Unit]("Starts the Scala interpreter with the project classes on the classpath.").withRank(APlusTask)
@ -269,6 +273,10 @@ object Keys {
val outputStrategy = settingKey[Option[sbt.OutputStrategy]]("Selects how to log output when running a main class.").withRank(DSetting)
val connectInput = settingKey[Boolean]("If true, connects standard input when running a main class forked.").withRank(CSetting)
val javaHome = settingKey[Option[File]]("Selects the Java installation used for compiling and forking. If None, uses the Java installation running the build.").withRank(ASetting)
val discoveredJavaHomes = settingKey[Map[String, File]]("Discovered Java home directories")
val javaHomes = settingKey[Map[String, File]]("The user-defined additional Java home directories")
val fullJavaHomes = settingKey[Map[String, File]]("Combines discoveredJavaHomes and custom javaHomes.").withRank(CTask)
val javaOptions = taskKey[Seq[String]]("Options passed to a new JVM when forking.").withRank(BPlusTask)
val envVars = taskKey[Map[String, String]]("Environment variables used when forking a new JVM").withRank(BTask)
@ -447,7 +455,7 @@ object Keys {
val sbtDependency = settingKey[ModuleID]("Provides a definition for declaring the current version of sbt.").withRank(BMinusSetting)
val sbtVersion = settingKey[String]("Provides the version of sbt. This setting should not be modified.").withRank(AMinusSetting)
val sbtBinaryVersion = settingKey[String]("Defines the binary compatibility version substring.").withRank(BPlusSetting)
val skip = taskKey[Boolean]("For tasks that support it (currently only 'compile' and 'update'), setting skip to true will force the task to not to do its work. This exact semantics may vary by task.").withRank(BSetting)
val skip = taskKey[Boolean]("For tasks that support it (currently only 'compile', 'update', and 'publish'), setting skip to true will force the task to not to do its work. This exact semantics may vary by task.").withRank(BSetting)
val templateResolverInfos = settingKey[Seq[TemplateResolverInfo]]("Template resolvers used for 'new'.").withRank(BSetting)
val interactionService = taskKey[InteractionService]("Service used to ask for user input through the current user interface(s).").withRank(CTask)
val insideCI = SettingKey[Boolean]("insideCI", "Determines if the SBT is running in a Continuous Integration environment", AMinusSetting)

View File

@ -14,6 +14,7 @@ import sbt.internal.{
BuildUnit,
CommandExchange,
CommandStrings,
CrossJava,
DefaultBackgroundJobService,
EvaluateConfigurations,
Inspect,
@ -40,7 +41,6 @@ import sbt.internal.util.{
Types
}
import sbt.util.{ Level, Logger, Show }
import sbt.internal.util.complete.{ DefaultParsers, Parser }
import sbt.internal.inc.ScalaInstance
import sbt.compiler.EvalImports
@ -52,8 +52,6 @@ import xsbti.compile.CompilerCache
import scala.annotation.tailrec
import sbt.io.IO
import sbt.io.syntax._
import StandardMain._
import java.io.{ File, IOException }
import java.net.URI
import java.util.{ Locale, Properties }
@ -69,34 +67,36 @@ final class xMain extends xsbti.AppMain {
import BasicCommandStrings.runEarly
import BuiltinCommands.defaults
import sbt.internal.CommandStrings.{ BootCommand, DefaultsCommand, InitCommand }
val state = initialState(
val state = StandardMain.initialState(
configuration,
Seq(defaults, early),
runEarly(DefaultsCommand) :: runEarly(InitCommand) :: BootCommand :: Nil)
runManaged(state)
runEarly(DefaultsCommand) :: runEarly(InitCommand) :: BootCommand :: Nil
)
StandardMain.runManaged(state)
}
}
final class ScriptMain extends xsbti.AppMain {
def run(configuration: xsbti.AppConfiguration): xsbti.MainResult = {
import BasicCommandStrings.runEarly
runManaged(
initialState(
configuration,
BuiltinCommands.ScriptCommands,
runEarly(Level.Error.toString) :: Script.Name :: Nil
))
val state = StandardMain.initialState(
configuration,
BuiltinCommands.ScriptCommands,
runEarly(Level.Error.toString) :: Script.Name :: Nil
)
StandardMain.runManaged(state)
}
}
final class ConsoleMain extends xsbti.AppMain {
def run(configuration: xsbti.AppConfiguration): xsbti.MainResult =
runManaged(
initialState(
configuration,
BuiltinCommands.ConsoleCommands,
IvyConsole.Name :: Nil
))
def run(configuration: xsbti.AppConfiguration): xsbti.MainResult = {
val state = StandardMain.initialState(
configuration,
BuiltinCommands.ConsoleCommands,
IvyConsole.Name :: Nil
)
StandardMain.runManaged(state)
}
}
object StandardMain {
@ -121,13 +121,17 @@ object StandardMain {
ConsoleOut.systemOutOverwrite(ConsoleOut.overwriteContaining("Resolving "))
def initialGlobalLogging: GlobalLogging =
GlobalLogging.initial(MainAppender.globalDefault(console),
File.createTempFile("sbt", ".log"),
console)
GlobalLogging.initial(
MainAppender.globalDefault(console),
File.createTempFile("sbt", ".log"),
console
)
def initialState(configuration: xsbti.AppConfiguration,
initialDefinitions: Seq[Command],
preCommands: Seq[String]): State = {
def initialState(
configuration: xsbti.AppConfiguration,
initialDefinitions: Seq[Command],
preCommands: Seq[String]
): State = {
// This is to workaround https://github.com/sbt/io/issues/110
sys.props.put("jna.nosys", "true")
@ -184,8 +188,11 @@ object BuiltinCommands {
inspect,
loadProjectImpl,
loadFailed,
oldLoadFailed,
Cross.crossBuild,
Cross.switchVersion,
CrossJava.switchJavaHome,
CrossJava.crossJavaHome,
PluginCross.pluginCross,
PluginCross.pluginSwitch,
Cross.crossRestoreSession,
@ -198,6 +205,7 @@ object BuiltinCommands {
startServer,
eval,
last,
oldLastGrep,
lastGrep,
export,
boot,
@ -286,26 +294,32 @@ object BuiltinCommands {
case _ => si.actualVersion
}
private[this] def quiet[T](t: => T): Option[T] = try { Some(t) } catch {
case e: Exception => None
}
private[this] def quiet[T](t: => T): Option[T] =
try Some(t)
catch { case _: Exception => None }
def settingsCommand: Command =
showSettingLike(SettingsCommand,
settingsPreamble,
KeyRanks.MainSettingCutoff,
key => !isTask(key.manifest))
showSettingLike(
SettingsCommand,
settingsPreamble,
KeyRanks.MainSettingCutoff,
key => !isTask(key.manifest)
)
def tasks: Command =
showSettingLike(TasksCommand,
tasksPreamble,
KeyRanks.MainTaskCutoff,
key => isTask(key.manifest))
showSettingLike(
TasksCommand,
tasksPreamble,
KeyRanks.MainTaskCutoff,
key => isTask(key.manifest)
)
def showSettingLike(command: String,
preamble: String,
cutoff: Int,
keep: AttributeKey[_] => Boolean): Command =
def showSettingLike(
command: String,
preamble: String,
cutoff: Int,
keep: AttributeKey[_] => Boolean
): Command =
Command(command, settingsBrief(command), settingsDetailed(command))(showSettingParser(keep)) {
case (s: State, (verbosity: Int, selected: Option[String])) =>
if (selected.isEmpty) System.out.println(preamble)
@ -316,8 +330,9 @@ object BuiltinCommands {
if (prominentOnly) System.out.println(moreAvailableMessage(command, selected.isDefined))
s
}
def showSettingParser(keepKeys: AttributeKey[_] => Boolean)(
s: State): Parser[(Int, Option[String])] =
def showSettingParser(
keepKeys: AttributeKey[_] => Boolean
)(s: State): Parser[(Int, Option[String])] =
verbosityParser ~ selectedParser(s, keepKeys).?
def selectedParser(s: State, keepKeys: AttributeKey[_] => Boolean): Parser[String] =
singleArgument(allTaskAndSettingKeys(s).filter(keepKeys).map(_.label).toSet)
@ -358,16 +373,19 @@ object BuiltinCommands {
def sortByRank(keys: Seq[AttributeKey[_]]): Seq[AttributeKey[_]] = keys.sortBy(_.rank)
def withDescription(keys: Seq[AttributeKey[_]]): Seq[AttributeKey[_]] =
keys.filter(_.description.isDefined)
def isTask(mf: Manifest[_])(implicit taskMF: Manifest[Task[_]],
inputMF: Manifest[InputTask[_]]): Boolean =
def isTask(
mf: Manifest[_]
)(implicit taskMF: Manifest[Task[_]], inputMF: Manifest[InputTask[_]]): Boolean =
mf.runtimeClass == taskMF.runtimeClass || mf.runtimeClass == inputMF.runtimeClass
def topNRanked(n: Int) = (keys: Seq[AttributeKey[_]]) => sortByRank(keys).take(n)
def highPass(rankCutoff: Int) =
(keys: Seq[AttributeKey[_]]) => sortByRank(keys).takeWhile(_.rank <= rankCutoff)
def tasksHelp(s: State,
filter: Seq[AttributeKey[_]] => Seq[AttributeKey[_]],
arg: Option[String]): String = {
def tasksHelp(
s: State,
filter: Seq[AttributeKey[_]] => Seq[AttributeKey[_]],
arg: Option[String]
): String = {
val commandAndDescription = taskDetail(filter(allTaskAndSettingKeys(s)), true)
arg match {
case Some(selected) => detail(selected, commandAndDescription.toMap)
@ -422,7 +440,7 @@ object BuiltinCommands {
// For correct behavior, we also need to re-inject a settings logger, as we'll be re-evaluating settings
val loggerInject = LogManager.settingsLogger(s)
val withLogger = newSession.appendRaw(loggerInject :: Nil)
val show = Project.showContextKey(newSession, structure)
val show = Project.showContextKey2(newSession)
val newStructure = Load.reapply(withLogger.mergeSettings, structure)(show)
Project.setProject(newSession, newStructure, s)
}
@ -446,19 +464,27 @@ object BuiltinCommands {
)(cl)
val setResult =
if (all) SettingCompletions.setAll(extracted, settings)
else SettingCompletions.setThis(s, extracted, settings, arg)
else SettingCompletions.setThis(extracted, settings, arg)
s.log.info(setResult.quietSummary)
s.log.debug(setResult.verboseSummary)
reapply(setResult.session, structure, s)
}
@deprecated("Use variant that doesn't take a State", "1.1.1")
def setThis(
s: State,
extracted: Extracted,
settings: Seq[Def.Setting[_]],
arg: String
): SetResult =
SettingCompletions.setThis(s, extracted, settings, arg)
setThis(extracted, settings, arg)
def setThis(
extracted: Extracted,
settings: Seq[Def.Setting[_]],
arg: String
): SetResult =
SettingCompletions.setThis(extracted, settings, arg)
def inspect: Command = Command(InspectCommand, inspectBrief, inspectDetailed)(Inspect.parser) {
case (s, (option, sk)) =>
@ -466,14 +492,29 @@ object BuiltinCommands {
s
}
@deprecated("Use `lastGrep` instead.", "1.2.0")
def oldLastGrep: Command =
lastGrepCommand(OldLastGrepCommand, oldLastGrepBrief, oldLastGrepDetailed, { s =>
s.log.warn(deprecationWarningText(OldLastGrepCommand, LastGrepCommand))
lastGrepParser(s)
})
def lastGrep: Command =
Command(LastGrepCommand, lastGrepBrief, lastGrepDetailed)(lastGrepParser) {
lastGrepCommand(LastGrepCommand, lastGrepBrief, lastGrepDetailed, lastGrepParser)
private def lastGrepCommand(
name: String,
briefHelp: (String, String),
detail: String,
parser: State => Parser[(String, Option[AnyKeys])]
): Command =
Command(name, briefHelp, detail)(parser) {
case (s, (pattern, Some(sks))) =>
val (str, _, display) = extractLast(s)
Output.lastGrep(sks, str.streams(s), pattern, printLast(s))(display)
Output.lastGrep(sks, str.streams(s), pattern, printLast)(display)
keepLastLog(s)
case (s, (pattern, None)) =>
for (logFile <- lastLogFile(s)) yield Output.lastGrep(logFile, pattern, printLast(s))
for (logFile <- lastLogFile(s)) yield Output.lastGrep(logFile, pattern, printLast)
keepLastLog(s)
}
@ -515,7 +556,7 @@ object BuiltinCommands {
lastOnly_keys <- keysParser
kvs = Act.keyValues(structure)(lastOnly_keys._2)
f <- if (lastOnly_keys._1) success(() => s)
else Aggregation.evaluatingParser(s, structure, show)(kvs)
else Aggregation.evaluatingParser(s, show)(kvs)
} yield
() => {
def export0(s: State): State = lastImpl(s, kvs, Some(ExportStream))
@ -538,7 +579,7 @@ object BuiltinCommands {
def last: Command = Command(LastCommand, lastBrief, lastDetailed)(aggregatedKeyValueParser) {
case (s, Some(sks)) => lastImpl(s, sks, None)
case (s, None) =>
for (logFile <- lastLogFile(s)) yield Output.last(logFile, printLast(s))
for (logFile <- lastLogFile(s)) yield Output.last(logFile, printLast)
keepLastLog(s)
}
@ -547,7 +588,7 @@ object BuiltinCommands {
private[this] def lastImpl(s: State, sks: AnyKeys, sid: Option[String]): State = {
val (str, _, display) = extractLast(s)
Output.last(sks, str.streams(s), printLast(s), sid)(display)
Output.last(sks, str.streams(s), printLast, sid)(display)
keepLastLog(s)
}
@ -572,7 +613,10 @@ object BuiltinCommands {
*/
def isLastOnly(s: State): Boolean = s.history.previous.forall(_.commandLine == Shell)
def printLast(s: State): Seq[String] => Unit = _ foreach println
@deprecated("Use variant that doesn't take the state", "1.1.1")
def printLast(s: State): Seq[String] => Unit = printLast
def printLast: Seq[String] => Unit = _ foreach println
def autoImports(extracted: Extracted): EvalImports =
new EvalImports(imports(extracted), "<auto-imports>")
@ -623,8 +667,9 @@ object BuiltinCommands {
}
def projects: Command =
Command(ProjectsCommand, (ProjectsCommand, projectsBrief), projectsDetailed)(s =>
projectsParser(s).?) {
Command(ProjectsCommand, (ProjectsCommand, projectsBrief), projectsDetailed)(
s => projectsParser(s).?
) {
case (s, Some(modifyBuilds)) => transformExtraBuilds(s, modifyBuilds)
case (s, None) => showProjects(s); s
}
@ -642,7 +687,7 @@ object BuiltinCommands {
val extraUpdated = Project.updateExtraBuilds(s, f)
try doLoadProject(extraUpdated, LoadAction.Current)
catch {
case e: Exception =>
case _: Exception =>
s.log.error("Project loading failed: reverting to previous state.")
Project.setExtraBuilds(s, original)
}
@ -659,11 +704,24 @@ object BuiltinCommands {
Command.make(ProjectCommand, projectBrief, projectDetailed)(ProjectNavigation.command)
def loadFailed: Command = Command(LoadFailed)(loadProjectParser)(doLoadFailed)
@deprecated("Use `loadFailed` instead.", "1.2.0")
def oldLoadFailed: Command =
Command(OldLoadFailed) { s =>
s.log.warn(
deprecationWarningText(OldLoadFailed, LoadFailed)
)
loadProjectParser(s)
}(doLoadFailed)
private[this] def deprecationWarningText(oldCommand: String, newCommand: String) = {
s"The `$oldCommand` command is deprecated in favor of `$newCommand` and will be removed in a later version"
}
@tailrec
private[this] def doLoadFailed(s: State, loadArg: String): State = {
val result = (SimpleReader.readLine(
"Project loading failed: (r)etry, (q)uit, (l)ast, or (i)gnore? ") getOrElse Quit)
"Project loading failed: (r)etry, (q)uit, (l)ast, or (i)gnore? "
) getOrElse Quit)
.toLowerCase(Locale.ENGLISH)
def matches(s: String) = !result.isEmpty && (s startsWith result)
def retry = loadProjectCommand(LoadProject, loadArg) :: s.clearGlobalLog
@ -689,8 +747,9 @@ object BuiltinCommands {
Nil
def loadProject: Command =
Command(LoadProject, LoadProjectBrief, LoadProjectDetailed)(loadProjectParser)((s, arg) =>
loadProjectCommands(arg) ::: s)
Command(LoadProject, LoadProjectBrief, LoadProjectDetailed)(loadProjectParser)(
(s, arg) => loadProjectCommands(arg) ::: s
)
private[this] def loadProjectParser: State => Parser[String] =
_ => matched(Project.loadActionParser)
@ -712,11 +771,13 @@ object BuiltinCommands {
Option(buildProperties.getProperty("sbt.version"))
} else None
sbtVersionOpt.foreach(version =>
if (version != app.id.version()) {
state.log.warn(s"""sbt version mismatch, current: ${app.id
.version()}, in build.properties: "$version", use 'reboot' to use the new value.""")
})
sbtVersionOpt.foreach(
version =>
if (version != app.id.version()) {
state.log.warn(s"""sbt version mismatch, current: ${app.id
.version()}, in build.properties: "$version", use 'reboot' to use the new value.""")
}
)
}
def doLoadProject(s0: State, action: LoadAction.Value): State = {
@ -763,8 +824,10 @@ object BuiltinCommands {
exchange publishEventMessage ConsolePromptEvent(s0)
val exec: Exec = exchange.blockUntilNextExec
val newState = s1
.copy(onFailure = Some(Exec(Shell, None)),
remainingCommands = exec +: Exec(Shell, None) +: s1.remainingCommands)
.copy(
onFailure = Some(Exec(Shell, None)),
remainingCommands = exec +: Exec(Shell, None) +: s1.remainingCommands
)
.setInteractive(true)
exchange publishEventMessage ConsoleUnpromptEvent(exec.source)
if (exec.commandLine.trim.isEmpty) newState
@ -815,7 +878,7 @@ object BuiltinCommands {
if (!java.lang.Boolean.getBoolean("sbt.skip.version.write") && !intendsToInvokeNew(state))
writeSbtVersionUnconditionally(state)
private def WriteSbtVersion = "write-sbt-version"
private def WriteSbtVersion = "writeSbtVersion"
private def writeSbtVersion: Command =
Command.command(WriteSbtVersion) { state =>
@ -831,7 +894,7 @@ object BuiltinCommands {
state.log info "Executing in batch mode. For better performance use sbt's shell"
}
private def NotifyUsersAboutShell = "notify-users-about-shell"
private def NotifyUsersAboutShell = "notifyUsersAboutShell"
private def notifyUsersAboutShell: Command =
Command.command(NotifyUsersAboutShell) { state =>

View File

@ -7,14 +7,16 @@
package sbt
import java.io.PrintWriter
import java.util.Properties
import jline.TerminalFactory
import scala.annotation.tailrec
import scala.util.control.NonFatal
import jline.TerminalFactory
import sbt.io.{ IO, Using }
import sbt.internal.util.{ ErrorHandling, GlobalLogBacking }
import sbt.internal.util.complete.DefaultParsers
import sbt.internal.langserver.ErrorCodes
import sbt.util.Logger
import sbt.protocol._
@ -26,15 +28,14 @@ object MainLoop {
// We've disabled jline shutdown hooks to prevent classloader leaks, and have been careful to always restore
// the jline terminal in finally blocks, but hitting ctrl+c prevents finally blocks from being executed, in that
// case the only way to restore the terminal is in a shutdown hook.
val shutdownHook = new Thread(new Runnable {
def run(): Unit = TerminalFactory.get().restore()
})
val shutdownHook = new Thread(() => TerminalFactory.get().restore())
try {
Runtime.getRuntime.addShutdownHook(shutdownHook)
runLoggedLoop(state, state.globalLogging.backing)
} finally {
Runtime.getRuntime.removeShutdownHook(shutdownHook)
()
}
}
@ -66,7 +67,8 @@ object MainLoop {
throw new xsbti.FullReload(e.arguments.toArray, false)
case NonFatal(e) =>
System.err.println(
"sbt appears to be exiting abnormally.\n The log file for this session is at " + logBacking.file)
"sbt appears to be exiting abnormally.\n The log file for this session is at " + logBacking.file
)
deleteLastLog(logBacking)
throw e
}
@ -100,7 +102,7 @@ object MainLoop {
/** Runs the next sequence of commands with global logging in place. */
def runWithNewLog(state: State, logBacking: GlobalLogBacking): RunNext =
Using.fileWriter(append = true)(logBacking.file) { writer =>
val out = new java.io.PrintWriter(writer)
val out = new PrintWriter(writer)
val full = state.globalLogging.full
val newLogging = state.globalLogging.newAppender(full, out, logBacking)
// transferLevels(state, newLogging)
@ -124,7 +126,7 @@ object MainLoop {
final class KeepGlobalLog(val state: State) extends RunNext
final class Return(val result: xsbti.MainResult) extends RunNext
/** Runs the next sequence of commands that doesn't require global logging changes.*/
/** Runs the next sequence of commands that doesn't require global logging changes. */
@tailrec def run(state: State): RunNext =
state.next match {
case State.Continue => run(next(state))
@ -143,19 +145,10 @@ object MainLoop {
/** This is the main function State transfer function of the sbt command processing. */
def processCommand(exec: Exec, state: State): State = {
import DefaultParsers._
val channelName = exec.source map (_.channelName)
StandardMain.exchange publishEventMessage ExecStatusEvent("Processing",
channelName,
exec.execId,
Vector())
val parser = Command combine state.definedCommands
val newState = parse(exec.commandLine, parser(state)) match {
case Right(s) => s() // apply command. command side effects happen here
case Left(errMsg) =>
state.log error errMsg
state.fail
}
StandardMain.exchange publishEventMessage
ExecStatusEvent("Processing", channelName, exec.execId, Vector())
val newState = Command.process(exec.commandLine, state)
val doneEvent = ExecStatusEvent(
"Done",
channelName,

View File

@ -45,9 +45,11 @@ object Opts {
val sonatypeSnapshots = Resolver.sonatypeRepo("snapshots")
val sonatypeStaging = MavenRepository(
"sonatype-staging",
"https://oss.sonatype.org/service/local/staging/deploy/maven2")
"https://oss.sonatype.org/service/local/staging/deploy/maven2"
)
val mavenLocalFile = Resolver.file("Local Repository", userHome / ".m2" / "repository" asFile)(
Resolver.defaultPatterns)
Resolver.defaultPatterns
)
val sbtSnapshots = Resolver.bintrayRepo("sbt", "maven-snapshots")
val sbtIvySnapshots = Resolver.bintrayIvyRepo("sbt", "ivy-snapshots")
}

View File

@ -16,7 +16,7 @@ import sbt.internal.Load
import sbt.internal.CommandStrings._
import Cross.{ spacedFirst, requireSession }
import sbt.librarymanagement.VersionNumber
import Project.{ inScope }
import Project.inScope
/**
* Module responsible for plugin cross building.
@ -24,9 +24,9 @@ import Project.{ inScope }
private[sbt] object PluginCross {
lazy val pluginSwitch: Command = {
def switchParser(state: State): Parser[(String, String)] = {
val knownVersions = Nil
lazy val switchArgs = token(NotSpace.examples(knownVersions: _*)) ~ (token(
Space ~> matched(state.combinedParser)) ?? "")
lazy val switchArgs = token(NotSpace.examples()) ~ (token(
Space ~> matched(state.combinedParser)
) ?? "")
lazy val nextSpaced = spacedFirst(PluginSwitchCommand)
token(PluginSwitchCommand ~ OptSpace) flatMap { _ =>
switchArgs & nextSpaced
@ -47,7 +47,7 @@ private[sbt] object PluginCross {
val add = List(sbtVersion in GlobalScope in pluginCrossBuild :== version) ++
List(scalaVersion := scalaVersionSetting.value) ++
inScope(GlobalScope.copy(project = Select(currentRef)))(
Seq(scalaVersion := scalaVersionSetting.value)
scalaVersion := scalaVersionSetting.value
)
val cleared = session.mergeSettings.filterNot(crossExclude)
val newStructure = Load.reapply(cleared ++ add, structure)
@ -59,8 +59,11 @@ private[sbt] object PluginCross {
def crossParser(state: State): Parser[String] =
token(PluginCrossCommand <~ OptSpace) flatMap { _ =>
token(
matched(state.combinedParser &
spacedFirst(PluginCrossCommand)))
matched(
state.combinedParser &
spacedFirst(PluginCrossCommand)
)
)
}
def crossVersions(state: State): List[String] = {
val x = Project.extract(state)

View File

@ -111,7 +111,7 @@ abstract class AutoPlugin extends Plugins.Basic with PluginsFunctions {
def extraProjects: Seq[Project] = Nil
/** The [[Project]]s to add to the current build based on an existing project. */
def derivedProjects(proj: ProjectDefinition[_]): Seq[Project] = Nil
def derivedProjects(@deprecated("unused", "") proj: ProjectDefinition[_]): Seq[Project] = Nil
private[sbt] def unary_! : Exclude = Exclude(this)
@ -202,10 +202,12 @@ object Plugins extends PluginsFunctions {
_.head subsetOf knowledge0
})
log.debug(
s"deducing auto plugins based on known facts ${knowledge0.toString} and clauses ${clauses.toString}")
s"deducing auto plugins based on known facts ${knowledge0.toString} and clauses ${clauses.toString}"
)
Logic.reduce(
clauses,
(flattenConvert(requestedPlugins) ++ convertAll(alwaysEnabled)).toSet) match {
(flattenConvert(requestedPlugins) ++ convertAll(alwaysEnabled)).toSet
) match {
case Left(problem) => throw AutoPluginException(problem)
case Right(results) =>
log.debug(s" :: deduced result: ${results}")
@ -224,20 +226,21 @@ object Plugins extends PluginsFunctions {
_.label
})
}
val retval = topologicalSort(selectedPlugins, log)
val retval = topologicalSort(selectedPlugins)
// log.debug(s" :: sorted deduced result: ${retval.toString}")
retval
}
}
}
}
private[sbt] def topologicalSort(ns: List[AutoPlugin], log: Logger): List[AutoPlugin] = {
// log.debug(s"sorting: ns: ${ns.toString}")
private[sbt] def topologicalSort(ns: List[AutoPlugin]): List[AutoPlugin] = {
@tailrec
def doSort(found0: List[AutoPlugin],
notFound0: List[AutoPlugin],
limit0: Int): List[AutoPlugin] = {
// log.debug(s" :: sorting:: found: ${found0.toString} not found ${notFound0.toString}")
def doSort(
found0: List[AutoPlugin],
notFound0: List[AutoPlugin],
limit0: Int
): List[AutoPlugin] = {
if (limit0 < 0) throw AutoPluginException(s"Failed to sort ${ns} topologically")
else if (notFound0.isEmpty) found0
else {
@ -250,16 +253,16 @@ object Plugins extends PluginsFunctions {
val (roots, nonRoots) = ns partition (_.isRoot)
doSort(roots, nonRoots, ns.size * ns.size + 1)
}
private[sbt] def translateMessage(e: LogicException) = e match {
case ic: InitialContradictions =>
s"Contradiction in selected plugins. These plugins were both included and excluded: ${literalsString(
ic.literals.toSeq)}"
s"Contradiction in selected plugins. These plugins were both included and excluded: ${literalsString(ic.literals.toSeq)}"
case io: InitialOverlap =>
s"Cannot directly enable plugins. Plugins are enabled when their required plugins are satisfied. The directly selected plugins were: ${literalsString(
io.literals.toSeq)}"
s"Cannot directly enable plugins. Plugins are enabled when their required plugins are satisfied. The directly selected plugins were: ${literalsString(io.literals.toSeq)}"
case cn: CyclicNegation =>
s"Cycles in plugin requirements cannot involve excludes. The problematic cycle is: ${literalsString(cn.cycle)}"
}
private[this] def literalsString(lits: Seq[Literal]): String =
lits map { case Atom(l) => l; case Negated(Atom(l)) => l } mkString (", ")
@ -271,9 +274,12 @@ object Plugins extends PluginsFunctions {
val message = s"Plugin$ns provided by multiple AutoPlugins:$nl${dupStrings.mkString(nl)}"
throw AutoPluginException(message)
}
private[this] def exclusionConflictError(requested: Plugins,
selected: Seq[AutoPlugin],
conflicting: Seq[AutoPlugin]): Unit = {
private[this] def exclusionConflictError(
requested: Plugins,
selected: Seq[AutoPlugin],
conflicting: Seq[AutoPlugin]
): Unit = {
def listConflicts(ns: Seq[AutoPlugin]) =
(ns map { c =>
val reasons = (if (flatten(requested) contains c) List("requested")
@ -360,14 +366,14 @@ ${listConflicts(conflicting)}""")
// This would handle things like !!p or !(p && z)
case Exclude(n) => hasInclude(n, p)
case And(ns) => ns.forall(n => hasExclude(n, p))
case b: Basic => false
case _: Basic => false
case Empty => false
}
private[sbt] def hasInclude(n: Plugins, p: AutoPlugin): Boolean = n match {
case `p` => true
case Exclude(n) => hasExclude(n, p)
case And(ns) => ns.forall(n => hasInclude(n, p))
case b: Basic => false
case _: Basic => false
case Empty => false
}
private[this] def flattenConvert(n: Plugins): Seq[Literal] = n match {
@ -425,8 +431,9 @@ ${listConflicts(conflicting)}""")
val pluginClazz = ap.getClass
existsAutoImportVal(pluginClazz)
.orElse(
catching(classOf[ClassNotFoundException]).opt(
Class.forName(s"${pluginClazz.getName}$autoImport$$", false, loader)))
catching(classOf[ClassNotFoundException])
.opt(Class.forName(s"${pluginClazz.getName}$autoImport$$", false, loader))
)
.isDefined
}

View File

@ -27,11 +27,12 @@ import Keys.{
serverPort,
serverAuthentication,
serverConnectionType,
fullServerHandlers,
logLevel,
watch
}
import Scope.{ Global, ThisScope }
import Def.{ Flattened, Initialize, ScopedKey, Setting }
import Def.{ Flattened, Initialize, ScopedKey, Setting, SettingsDefinition }
import sbt.internal.{
Load,
BuildStructure,
@ -44,6 +45,7 @@ import sbt.internal.{
import sbt.internal.util.{ AttributeKey, AttributeMap, Dag, Relation, Settings, ~> }
import sbt.internal.util.Types.{ const, idFun }
import sbt.internal.util.complete.DefaultParsers
import sbt.internal.server.ServerHandler
import sbt.librarymanagement.Configuration
import sbt.util.{ Show, Level }
import sjsonnew.JsonFormat
@ -119,7 +121,45 @@ sealed trait ProjectDefinition[PR <: ProjectReference] {
if (ts.isEmpty) Nil else s"$label: $ts" :: Nil
}
sealed trait Project extends ProjectDefinition[ProjectReference] {
trait CompositeProject {
def componentProjects: Seq[Project]
}
private[sbt] object CompositeProject {
/**
* Expand user defined projects with the component projects of `compositeProjects`.
*
* If two projects with the same id appear in the user defined projects and
* in `compositeProjects.componentProjects`, the user defined project wins.
* This is necessary for backward compatibility with the idioms:
* {{{
* lazy val foo = crossProject
* lazy val fooJS = foo.js.settings(...)
* lazy val fooJVM = foo.jvm.settings(...)
* }}}
* and the rarer:
* {{{
* lazy val fooJS = foo.js.settings(...)
* lazy val foo = crossProject
* lazy val fooJVM = foo.jvm.settings(...)
* }}}
*/
def expand(compositeProjects: Seq[CompositeProject]): Seq[Project] = {
val userProjects = compositeProjects.collect { case p: Project => p }
for (p <- compositeProjects.flatMap(_.componentProjects)) yield {
userProjects.find(_.id == p.id) match {
case Some(userProject) => userProject
case None => p
}
}
}.distinct
}
sealed trait Project extends ProjectDefinition[ProjectReference] with CompositeProject {
def componentProjects: Seq[Project] = this :: Nil
private[sbt] def copy(
id: String = id,
base: File = base,
@ -280,23 +320,29 @@ object Project extends ProjectExtra {
showContextKey(state, None)
def showContextKey(state: State, keyNameColor: Option[String]): Show[ScopedKey[_]] =
if (isProjectLoaded(state)) showContextKey(session(state), structure(state), keyNameColor)
if (isProjectLoaded(state)) showContextKey2(session(state), keyNameColor)
else Def.showFullKey
@deprecated("Use showContextKey2 which doesn't take the unused structure param", "1.1.1")
def showContextKey(
session: SessionSettings,
structure: BuildStructure,
keyNameColor: Option[String] = None
): Show[ScopedKey[_]] =
Def.showRelativeKey(session.current, structure.allProjects.size > 1, keyNameColor)
showContextKey2(session, keyNameColor)
def showContextKey2(
session: SessionSettings,
keyNameColor: Option[String] = None
): Show[ScopedKey[_]] =
Def.showRelativeKey2(session.current, keyNameColor)
def showLoadingKey(
loaded: LoadedBuild,
keyNameColor: Option[String] = None
): Show[ScopedKey[_]] =
Def.showRelativeKey(
Def.showRelativeKey2(
ProjectRef(loaded.root, loaded.units(loaded.root).rootProjects.head),
loaded.allProjectRefs.size > 1,
keyNameColor
)
@ -407,7 +453,7 @@ object Project extends ProjectExtra {
def extract(state: State): Extracted = extract(session(state), structure(state))
private[sbt] def extract(se: SessionSettings, st: BuildStructure): Extracted =
Extracted(st, se, se.current)(showContextKey(se, st))
Extracted(st, se, se.current)(showContextKey2(se))
def getProjectForReference(ref: Reference, structure: BuildStructure): Option[ResolvedProject] =
ref match { case pr: ProjectRef => getProject(pr, structure); case _ => None }
@ -436,7 +482,8 @@ object Project extends ProjectExtra {
val newState = unloaded.copy(attributes = newAttrs)
// TODO: Fix this
onLoad(
updateCurrent(newState) /*LogManager.setGlobalLogLevels(updateCurrent(newState), structure.data)*/ )
updateCurrent(newState) /*LogManager.setGlobalLogLevels(updateCurrent(newState), structure.data)*/
)
}
def orIdentity[T](opt: Option[T => T]): T => T = opt getOrElse idFun
@ -469,9 +516,12 @@ object Project extends ProjectExtra {
val authentication: Option[Set[ServerAuthentication]] = get(serverAuthentication)
val connectionType: Option[ConnectionType] = get(serverConnectionType)
val srvLogLevel: Option[Level.Value] = (logLevel in (ref, serverLog)).get(structure.data)
val hs: Option[Seq[ServerHandler]] = get(fullServerHandlers)
val commandDefs = allCommands.distinct.flatten[Command].map(_ tag (projectCommand, true))
val newDefinedCommands = commandDefs ++ BasicCommands.removeTagged(s.definedCommands,
projectCommand)
val newDefinedCommands = commandDefs ++ BasicCommands.removeTagged(
s.definedCommands,
projectCommand
)
val newAttrs =
s.attributes
.setCond(Watched.Configuration, watched)
@ -485,6 +535,7 @@ object Project extends ProjectExtra {
.put(templateResolverInfos.key, trs)
.setCond(shellPrompt.key, prompt)
.setCond(serverLogLevel, srvLogLevel)
.setCond(fullServerHandlers.key, hs)
s.copy(
attributes = newAttrs,
definedCommands = newDefinedCommands
@ -507,7 +558,8 @@ object Project extends ProjectExtra {
}
}
private[this] def overlappingTargets(
targets: Seq[(ProjectRef, File)]): Map[File, Seq[ProjectRef]] =
targets: Seq[(ProjectRef, File)]
): Map[File, Seq[ProjectRef]] =
targets.groupBy(_._2).filter(_._2.size > 1).mapValues(_.map(_._1))
private[this] def allTargets(data: Settings[Scope]): Seq[(ProjectRef, File)] = {
@ -540,15 +592,18 @@ object Project extends ProjectExtra {
def delegates(structure: BuildStructure, scope: Scope, key: AttributeKey[_]): Seq[ScopedKey[_]] =
structure.delegates(scope).map(d => ScopedKey(d, key))
def scopedKeyData(structure: BuildStructure,
scope: Scope,
key: AttributeKey[_]): Option[ScopedKeyData[_]] =
def scopedKeyData(
structure: BuildStructure,
scope: Scope,
key: AttributeKey[_]
): Option[ScopedKeyData[_]] =
structure.data.get(scope, key) map { v =>
ScopedKeyData(ScopedKey(scope, key), v)
}
def details(structure: BuildStructure, actual: Boolean, scope: Scope, key: AttributeKey[_])(
implicit display: Show[ScopedKey[_]]): String = {
implicit display: Show[ScopedKey[_]]
): String = {
val scoped = ScopedKey(scope, key)
val data = scopedKeyData(structure, scope, key) map { _.description } getOrElse {
@ -589,20 +644,24 @@ object Project extends ProjectExtra {
val reverse = reverseDependencies(cMap, scoped)
val derivedReverse = reverse.filter(r => derivedDependencies(r).contains(definingScoped)).toSet
def printDepScopes(baseLabel: String,
derivedLabel: String,
scopes: Iterable[ScopedKey[_]],
derived: Set[ScopedKey[_]]): String = {
def printDepScopes(
baseLabel: String,
derivedLabel: String,
scopes: Iterable[ScopedKey[_]],
derived: Set[ScopedKey[_]]
): String = {
val label = s"$baseLabel${if (derived.isEmpty) "" else s" (D=$derivedLabel)"}"
val prefix: ScopedKey[_] => String =
if (derived.isEmpty) const("") else sk => if (derived(sk)) "D " else " "
printScopes(label, scopes, prefix = prefix)
}
def printScopes(label: String,
scopes: Iterable[ScopedKey[_]],
max: Int = Int.MaxValue,
prefix: ScopedKey[_] => String = const("")) =
def printScopes(
label: String,
scopes: Iterable[ScopedKey[_]],
max: Int = Int.MaxValue,
prefix: ScopedKey[_] => String = const("")
) =
if (scopes.isEmpty) ""
else {
val (limited, more) =
@ -620,23 +679,27 @@ object Project extends ProjectExtra {
printScopes("Related", related, 10)
}
def settingGraph(structure: BuildStructure, basedir: File, scoped: ScopedKey[_])(
implicit display: Show[ScopedKey[_]]): SettingGraph =
implicit display: Show[ScopedKey[_]]
): SettingGraph =
SettingGraph(structure, basedir, scoped, 0)
def graphSettings(structure: BuildStructure, basedir: File)(
implicit display: Show[ScopedKey[_]]): Unit = {
implicit display: Show[ScopedKey[_]]
): Unit = {
def graph(actual: Boolean, name: String) =
graphSettings(structure, actual, name, new File(basedir, name + ".dot"))
graph(true, "actual_dependencies")
graph(false, "declared_dependencies")
}
def graphSettings(structure: BuildStructure, actual: Boolean, graphName: String, file: File)(
implicit display: Show[ScopedKey[_]]): Unit = {
implicit display: Show[ScopedKey[_]]
): Unit = {
val rel = relation(structure, actual)
val keyToString = display.show _
DotGraph.generateGraph(file, graphName, rel, keyToString, keyToString)
}
def relation(structure: BuildStructure, actual: Boolean)(
implicit display: Show[ScopedKey[_]]): Relation[ScopedKey[_], ScopedKey[_]] =
implicit display: Show[ScopedKey[_]]
): Relation[ScopedKey[_], ScopedKey[_]] =
relation(structure.settings, actual)(structure.delegates, structure.scopeLocal, display)
private[sbt] def relation(settings: Seq[Def.Setting[_]], actual: Boolean)(
@ -650,7 +713,8 @@ object Project extends ProjectExtra {
}
def showDefinitions(key: AttributeKey[_], defs: Seq[Scope])(
implicit display: Show[ScopedKey[_]]): String =
implicit display: Show[ScopedKey[_]]
): String =
showKeys(defs.map(scope => ScopedKey(scope, key)))
def showUses(defs: Seq[ScopedKey[_]])(implicit display: Show[ScopedKey[_]]): String =
@ -660,17 +724,21 @@ object Project extends ProjectExtra {
s.map(display.show).sorted.mkString("\n\t", "\n\t", "\n\n")
def definitions(structure: BuildStructure, actual: Boolean, key: AttributeKey[_])(
implicit display: Show[ScopedKey[_]]): Seq[Scope] =
implicit display: Show[ScopedKey[_]]
): Seq[Scope] =
relation(structure, actual)(display)._1s.toSeq flatMap { sk =>
if (sk.key == key) sk.scope :: Nil else Nil
}
def usedBy(structure: BuildStructure, actual: Boolean, key: AttributeKey[_])(
implicit display: Show[ScopedKey[_]]): Seq[ScopedKey[_]] =
implicit display: Show[ScopedKey[_]]
): Seq[ScopedKey[_]] =
relation(structure, actual)(display).all.toSeq flatMap {
case (a, b) => if (b.key == key) List[ScopedKey[_]](a) else Nil
}
def reverseDependencies(cMap: Map[ScopedKey[_], Flattened],
scoped: ScopedKey[_]): Iterable[ScopedKey[_]] =
def reverseDependencies(
cMap: Map[ScopedKey[_], Flattened],
scoped: ScopedKey[_]
): Iterable[ScopedKey[_]] =
for ((key, compiled) <- cMap; dep <- compiled.dependencies if dep == scoped) yield key
def setAll(extracted: Extracted, settings: Seq[Def.Setting[_]]): SessionSettings =
@ -678,7 +746,8 @@ object Project extends ProjectExtra {
val ExtraBuilds = AttributeKey[List[URI]](
"extra-builds",
"Extra build URIs to load in addition to the ones defined by the project.")
"Extra build URIs to load in addition to the ones defined by the project."
)
def extraBuilds(s: State): List[URI] = getOrNil(s, ExtraBuilds)
def getOrNil[T](s: State, key: AttributeKey[List[T]]): List[T] = s get key getOrElse Nil
def setExtraBuilds(s: State, extra: List[URI]): State = s.put(ExtraBuilds, extra)
@ -751,7 +820,9 @@ object Project extends ProjectExtra {
EvaluateTask(extracted.structure, taskKey, state, extracted.currentRef, config)
}
implicit def projectToRef(p: Project): ProjectReference = LocalProject(p.id)
def projectToRef(p: Project): ProjectReference = LocalProject(p.id)
implicit def projectToLocalProject(p: Project): LocalProject = LocalProject(p.id)
final class RichTaskSessionVar[S](i: Def.Initialize[Task[S]]) {
import SessionVar.{ persistAndSet, resolveContext, set, transform => tx }
@ -762,15 +833,20 @@ object Project extends ProjectExtra {
import TupleSyntax._
(Keys.resolvedScoped, i)(
(scoped, task) =>
tx(task,
(state, value) =>
persistAndSet(resolveContext(key, scoped.scope, state), state, value)(f)))
tx(
task,
(state, value) =>
persistAndSet(resolveContext(key, scoped.scope, state), state, value)(f)
)
)
}
def keepAs(key: TaskKey[S]): Def.Initialize[Task[S]] = {
import TupleSyntax._
(i, Keys.resolvedScoped)((t, scoped) =>
tx(t, (state, value) => set(resolveContext(key, scoped.scope, state), state, value)))
(i, Keys.resolvedScoped)(
(t, scoped) =>
tx(t, (state, value) => set(resolveContext(key, scoped.scope, state), state, value))
)
}
}
@ -781,7 +857,8 @@ object Project extends ProjectExtra {
val enclosingValName = std.KeyMacro.definingValName(
c,
methodName =>
s"""$methodName must be directly assigned to a val, such as `val x = $methodName`. Alternatively, you can use `sbt.Project.apply`""")
s"""$methodName must be directly assigned to a val, such as `val x = $methodName`. Alternatively, you can use `sbt.Project.apply`"""
)
val name = c.Expr[String](Literal(Constant(enclosingValName)))
reify { Project(name.splice, new File(name.splice)) }
}
@ -790,8 +867,9 @@ object Project extends ProjectExtra {
private[sbt] trait GeneratedRootProject
trait ProjectExtra {
implicit def configDependencyConstructor[T](p: T)(
implicit ev: T => ProjectReference): Constructor =
implicit def configDependencyConstructor[T](
p: T
)(implicit ev: T => ProjectReference): Constructor =
new Constructor(p)
implicit def classpathDependency[T](
@ -804,7 +882,8 @@ trait ProjectExtra {
new Scoped.RichInitializeTask(init)
implicit def richInitializeInputTask[T](
init: Initialize[InputTask[T]]): Scoped.RichInitializeInputTask[T] =
init: Initialize[InputTask[T]]
): Scoped.RichInitializeInputTask[T] =
new Scoped.RichInitializeInputTask(init)
implicit def richInitialize[T](i: Initialize[T]): Scoped.RichInitialize[T] =
@ -813,17 +892,19 @@ trait ProjectExtra {
implicit def richTaskSessionVar[T](init: Initialize[Task[T]]): Project.RichTaskSessionVar[T] =
new Project.RichTaskSessionVar(init)
def inThisBuild(ss: Seq[Setting[_]]): Seq[Setting[_]] =
inScope(ThisScope.copy(project = Select(ThisBuild)))(ss)
def inThisBuild(ss: SettingsDefinition*): Seq[Setting[_]] =
inScope(ThisScope.copy(project = Select(ThisBuild)))(ss flatMap (_.settings))
def inConfig(conf: Configuration)(ss: Seq[Setting[_]]): Seq[Setting[_]] =
inScope(ThisScope.copy(config = Select(conf)))((configuration :== conf) +: ss)
def inConfig(conf: Configuration)(ss: SettingsDefinition*): Seq[Setting[_]] =
inScope(ThisScope.copy(config = Select(conf)))(
(configuration :== conf) +: (ss flatMap (_.settings))
)
def inTask(t: Scoped)(ss: Seq[Setting[_]]): Seq[Setting[_]] =
inScope(ThisScope.copy(task = Select(t.key)))(ss)
def inTask(t: Scoped)(ss: SettingsDefinition*): Seq[Setting[_]] =
inScope(ThisScope.copy(task = Select(t.key)))(ss flatMap (_.settings))
def inScope(scope: Scope)(ss: Seq[Setting[_]]): Seq[Setting[_]] =
Project.transform(Scope.replaceThis(scope), ss)
def inScope(scope: Scope)(ss: SettingsDefinition*): Seq[Setting[_]] =
Project.transform(Scope.replaceThis(scope), ss flatMap (_.settings))
private[sbt] def inThisBuild[T](i: Initialize[T]): Initialize[T] =
inScope(ThisScope.copy(project = Select(ThisBuild)), i)

View File

@ -40,7 +40,7 @@ object Resolvers {
val to = uniqueSubdirectoryFor(info.uri, in = info.staging)
Some { () =>
creates(to) { IO.unzipURL(url, to) }
creates(to) { IO.unzipURL(url, to); () }
}
}

View File

@ -17,13 +17,15 @@ class RichURI(uri: URI) {
* Note that this method simply passes the individual components of this URI to the URI constructor
* that accepts each component individually. It is thus limited by the implementation restrictions of the relevant methods.
*/
def copy(scheme: String = uri.getScheme,
userInfo: String = uri.getUserInfo,
host: String = uri.getHost,
port: Int = uri.getPort,
path: String = uri.getPath,
query: String = uri.getQuery,
fragment: String = uri.getFragment) =
def copy(
scheme: String = uri.getScheme,
userInfo: String = uri.getUserInfo,
host: String = uri.getHost,
port: Int = uri.getPort,
path: String = uri.getPath,
query: String = uri.getQuery,
fragment: String = uri.getFragment
) =
new URI(scheme, userInfo, host, port, path, query, fragment)
/** Returns `true` if the fragment of the URI is defined. */

View File

@ -10,7 +10,7 @@ package sbt
import sbt.internal.{ Load, LoadedBuildUnit }
import sbt.internal.util.{ AttributeKey, Dag, Types }
import sbt.librarymanagement.Configuration
import sbt.librarymanagement.{ Configuration, ConfigRef }
import Types.const
import Def.Initialize
@ -30,9 +30,11 @@ object ScopeFilter {
* If a task filter is not supplied, global is selected.
* Generally, always specify the project axis.
*/
def apply(projects: ProjectFilter = inProjects(ThisProject),
configurations: ConfigurationFilter = zeroAxis,
tasks: TaskFilter = zeroAxis): ScopeFilter =
def apply(
projects: ProjectFilter = inProjects(ThisProject),
configurations: ConfigurationFilter = zeroAxis,
tasks: TaskFilter = zeroAxis
): ScopeFilter =
new ScopeFilter {
private[sbt] def apply(data: Data): Scope => Boolean = {
val pf = projects(data)
@ -104,7 +106,7 @@ object ScopeFilter {
/** Selects all scopes that apply to a single project. Zero and build-level scopes are excluded. */
def inAnyProject: ProjectFilter =
selectAxis(const { case p: ProjectRef => true; case _ => false })
selectAxis(const { case _: ProjectRef => true; case _ => false })
/** Accepts all values for the task axis except Zero. */
def inAnyTask: TaskFilter = selectAny[AttributeKey[_]]
@ -116,27 +118,35 @@ object ScopeFilter {
* Selects Scopes that have a project axis that is aggregated by `ref`, transitively if `transitive` is true.
* If `includeRoot` is true, Scopes with `ref` itself as the project axis value are also selected.
*/
def inAggregates(ref: ProjectReference,
transitive: Boolean = true,
includeRoot: Boolean = true): ProjectFilter =
byDeps(ref,
transitive = transitive,
includeRoot = includeRoot,
aggregate = true,
classpath = false)
def inAggregates(
ref: ProjectReference,
transitive: Boolean = true,
includeRoot: Boolean = true
): ProjectFilter =
byDeps(
ref,
transitive = transitive,
includeRoot = includeRoot,
aggregate = true,
classpath = false
)
/**
* Selects Scopes that have a project axis that is a dependency of `ref`, transitively if `transitive` is true.
* If `includeRoot` is true, Scopes with `ref` itself as the project axis value are also selected.
*/
def inDependencies(ref: ProjectReference,
transitive: Boolean = true,
includeRoot: Boolean = true): ProjectFilter =
byDeps(ref,
transitive = transitive,
includeRoot = includeRoot,
aggregate = false,
classpath = true)
def inDependencies(
ref: ProjectReference,
transitive: Boolean = true,
includeRoot: Boolean = true
): ProjectFilter =
byDeps(
ref,
transitive = transitive,
includeRoot = includeRoot,
aggregate = false,
classpath = true
)
/** Selects Scopes that have a project axis with one of the provided values.*/
def inProjects(projects: ProjectReference*): ProjectFilter =
@ -154,6 +164,16 @@ object ScopeFilter {
selectAxis[ConfigKey](const(c => cs(c.name)))
}
def inConfigurationsByKeys(keys: ConfigKey*): ConfigurationFilter = {
val cs = keys.toSet
selectAxis[ConfigKey](const(cs))
}
def inConfigurationsByRefs(refs: ConfigRef*): ConfigurationFilter = {
val cs = refs.map(r => ConfigKey(r.name)).toSet
selectAxis[ConfigKey](const(cs))
}
implicit def settingKeyAll[T](key: Initialize[T]): SettingKeyAll[T] = new SettingKeyAll[T](key)
implicit def taskKeyAll[T](key: Initialize[Task[T]]): TaskKeyAll[T] = new TaskKeyAll[T](key)
}
@ -162,9 +182,11 @@ object ScopeFilter {
* Information provided to Scope filters. These provide project relationships,
* project reference resolution, and the list of all static Scopes.
*/
private final class Data(val units: Map[URI, LoadedBuildUnit],
val resolve: ProjectReference => ProjectRef,
val allScopes: Set[Scope])
private final class Data(
val units: Map[URI, LoadedBuildUnit],
val resolve: ProjectReference => ProjectRef,
val allScopes: Set[Scope]
)
/** Constructs a Data instance from the list of static scopes and the project relationships.*/
private[this] val getData: Initialize[Data] =
@ -185,20 +207,24 @@ object ScopeFilter {
new Data(build.units, resolve, scopes)
}
private[this] def getDependencies(structure: Map[URI, LoadedBuildUnit],
classpath: Boolean,
aggregate: Boolean): ProjectRef => Seq[ProjectRef] =
private[this] def getDependencies(
structure: Map[URI, LoadedBuildUnit],
classpath: Boolean,
aggregate: Boolean
): ProjectRef => Seq[ProjectRef] =
ref =>
Project.getProject(ref, structure).toList flatMap { p =>
(if (classpath) p.dependencies.map(_.project) else Nil) ++
(if (aggregate) p.aggregate else Nil)
}
private[this] def byDeps(ref: ProjectReference,
transitive: Boolean,
includeRoot: Boolean,
aggregate: Boolean,
classpath: Boolean): ProjectFilter =
private[this] def byDeps(
ref: ProjectReference,
transitive: Boolean,
includeRoot: Boolean,
aggregate: Boolean,
classpath: Boolean
): ProjectFilter =
inResolvedProjects { data =>
val resolvedRef = data.resolve(ref)
val direct = getDependencies(data.units, classpath = classpath, aggregate = aggregate)

Some files were not shown because too many files have changed in this diff Show More