Merge branch '1.x' into help-sbt-new

This commit is contained in:
eugene yokota 2018-06-27 22:15:02 -04:00 committed by GitHub
commit cf31a11b69
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
382 changed files with 11797 additions and 7356 deletions

26
.appveyor.yml Normal file
View File

@ -0,0 +1,26 @@
build: off
init:
- git config --global core.autocrlf input
install:
- SET JAVA_HOME=C:\Program Files\Java\jdk1.8.0
- SET PATH=%JAVA_HOME%\bin;%PATH%
- ps: |
Add-Type -AssemblyName System.IO.Compression.FileSystem
if (!(Test-Path -Path "C:\sbt" )) {
(new-object System.Net.WebClient).DownloadFile(
'https://github.com/sbt/sbt/releases/download/v1.0.4/sbt-1.0.4.zip',
'C:\sbt-bin.zip'
)
[System.IO.Compression.ZipFile]::ExtractToDirectory("C:\sbt-bin.zip", "C:\sbt")
}
- SET PATH=C:\sbt\sbt\bin;%PATH%
- SET SBT_OPTS=-XX:MaxPermSize=2g -Xmx4g -Dfile.encoding=UTF8
test_script:
- sbt "scripted actions/*" "testOnly sbt.ServerSpec"
cache:
- '%USERPROFILE%\.ivy2\cache'
- '%USERPROFILE%\.sbt'

10
.gitattributes vendored
View File

@ -1,7 +1,3 @@
# Set default behaviour, in case users don't have core.autocrlf set.
* text=auto
# Explicitly declare text files we want to always be normalized and converted
# to native line endings on checkout.
*.scala text
*.java text
# Exclude contraband generated files from diff (by default - you can see it if you want)
**/contraband-scala/**/* -diff merge=ours
**/contraband-scala/**/* linguist-generated=true

1
.gitignore vendored
View File

@ -1,6 +1,5 @@
target/
__pycache__
toolbox.classpath
out
node_modules
vscode-sbt-scala/client/server

3
.sbtopts Normal file
View File

@ -0,0 +1,3 @@
-J-Xms2048M
-J-Xmx2048M
-J-Xss2M

View File

@ -8,3 +8,11 @@ docstrings = JavaDoc
# This also seems more idiomatic to include whitespace in import x.{ yyy }
spaces.inImportCurlyBraces = true
# This is more idiomatic Scala.
# http://docs.scala-lang.org/style/indentation.html#methods-with-numerous-arguments
align.openParenCallSite = false
align.openParenDefnSite = false
# For better code clarity
danglingParentheses = true

View File

@ -6,6 +6,7 @@ cache:
directories:
- $HOME/.ivy2/cache
- $HOME/.sbt/boot
- $HOME/.jabba
language: scala
@ -15,18 +16,28 @@ jdk:
matrix:
fast_finish: true
matrix:
include:
- env: SBT_CMD="scripted java/*"
before_install:
- curl -sL https://raw.githubusercontent.com/shyiko/jabba/0.10.1/install.sh | bash && . ~/.jabba/jabba.sh
install:
- /home/travis/.jabba/bin/jabba install openjdk@1.10
env:
global:
- secure: d3bu2KNwsVHwfhbGgO+gmRfDKBJhfICdCJFGWKf2w3Gv86AJZX9nuTYRxz0KtdvEHO5Xw8WTBZLPb2thSJqhw9OCm4J8TBAVqCP0ruUj4+aqBUFy4bVexQ6WKE6nWHs4JPzPk8c6uC1LG3hMuzlC8RGETXtL/n81Ef1u7NjyXjs=
matrix:
- SBT_CMD=";mimaReportBinaryIssues ;scalafmt::test ;test:scalafmt::test ;sbt:scalafmt::test ;headerCheck ;test:headerCheck ;test:compile ;mainSettingsProj/test ;safeUnitTests ;otherUnitTests"
- SBT_CMD=";mimaReportBinaryIssues ;scalafmt::test ;test:scalafmt::test ;sbt:scalafmt::test ;headerCheck ;test:headerCheck ;whitesourceCheckPolicies ;test:compile ;mainSettingsProj/test ;safeUnitTests ;otherUnitTests; doc"
- SBT_CMD="scripted actions/*"
- SBT_CMD="scripted apiinfo/* compiler-project/* ivy-deps-management/*"
- SBT_CMD="scripted dependency-management/*1of4"
- SBT_CMD="scripted dependency-management/*2of4"
- SBT_CMD="scripted dependency-management/*3of4"
- SBT_CMD="scripted dependency-management/*4of4"
- SBT_CMD="scripted java/* package/* reporter/* run/* project-load/*"
- SBT_CMD="scripted package/* reporter/* run/* project-load/*"
- SBT_CMD="scripted project/*1of2"
- SBT_CMD="scripted project/*2of2 server/*"
- SBT_CMD="scripted project/*2of2"
- SBT_CMD="scripted source-dependencies/*1of3"
- SBT_CMD="scripted source-dependencies/*2of3"
- SBT_CMD="scripted source-dependencies/*3of3"
@ -39,12 +50,12 @@ notifications:
# Undo _JAVA_OPTIONS environment variable
before_script:
- _JAVA_OPTIONS=
- unset _JAVA_OPTIONS
script:
# It doesn't need that much memory because compile and run are forked
- sbt -J-XX:ReservedCodeCacheSize=128m -J-Xmx800M -J-Xms800M -J-server "$SBT_CMD"
before_cache:
- find $HOME/.ivy2 -name "ivydata-*.properties" -print -delete
- find $HOME/.sbt -name "*.lock" -print -delete
- find $HOME/.ivy2 -name "ivydata-*.properties" -delete
- find $HOME/.sbt -name "*.lock" -delete

View File

@ -1,57 +1,72 @@
[StackOverflow]: http://stackoverflow.com/tags/sbt
[ask]: https://stackoverflow.com/questions/ask?tags=sbt
[Setup]: http://www.scala-sbt.org/release/docs/Getting-Started/Setup
[Issues]: https://github.com/sbt/sbt/issues
[sbt-dev]: https://groups.google.com/d/forum/sbt-dev
[subscriptions]: https://www.lightbend.com/platform/subscription
[sbt-contrib]: https://gitter.im/sbt/sbt-contrib
[327]: https://github.com/sbt/sbt/issues/327
[documentation]: https://github.com/sbt/website
Contributing
============
(For support, see [SUPPORT](./SUPPORT.md))
There are lots of ways to contribute to sbt ecosystem depending on your interests and skill level.
- Help someone at work or online fix their build problem.
- Answer StackOverflow questions.
- Ask StackOverflow questions.
- Create plugins that extend sbt's features.
- Maintain and update [documentation].
- Garden the issue tracker.
- Report issues.
- Patch the core (send pull requests to code).
- On-ramp other contributors.
Issues and Pull Requests
========================
------------------------
When you find a bug in sbt we want to hear about it. Your bug reports play an important part in making sbt more reliable and usable.
Effective bug reports are more likely to be fixed. These guidelines explain how to write such reports and pull requests.
Preliminaries
--------------
Please open a GitHub issue when you are 90% sure it's an actual bug.
If you have an enhancement idea, or a general discussion, bring it up to [sbt-contrib].
### Notes about Documentation
Documentation fixes and contributions are as much welcome as to patching the core. Visit [sbt/website][documentation] to learn about how to contribute.
### Preliminaries
- Make sure your sbt version is up to date.
- Search [StackOverflow] and [Issues] to see whether your bug has already been reported.
- Open one case for each problem.
- Proceed to the next steps for details.
Where to get help and/or file a bug report
------------------------------------------
sbt project uses GitHub Issues as a publicly visible todo list. Please open a GitHub issue only when asked to do so.
- If you need help with sbt, please [ask] on StackOverflow with the tag "sbt" and the name of the sbt plugin if any.
- If you run into an issue, have an enhancement idea, or a general discussion, bring it up to [sbt-dev] Google Group first.
- If you need a faster response time, consider one of the [Lightbend subscriptions][subscriptions].
What to report
--------------
### What to report
The developers need three things from you: **steps**, **problems**, and **expectations**.
### Steps
The most important thing to remember about bug reporting is to clearly distinguish facts and opinions.
The most important thing to remember about bug reporting is to clearly distinguish facts and opinions. What we need first is **the exact steps to reproduce your problems on our computers**. This is called *reproduction steps*, which is often shortened to "repro steps" or "steps." Describe your method of running sbt. Provide `build.sbt` that caused the problem and the version of sbt or Scala that was used. Provide sample Scala code if it's to do with incremental compilation. If possible, minimize the problem to reduce non-essential factors.
#### Steps
What we need first is **the exact steps to reproduce your problems on our computers**. This is called *reproduction steps*, which is often shortened to "repro steps" or "steps." Describe your method of running sbt. Provide `build.sbt` that caused the problem and the version of sbt or Scala that was used. Provide sample Scala code if it's to do with incremental compilation. If possible, minimize the problem to reduce non-essential factors.
Repro steps are the most important part of a bug report. If we cannot reproduce the problem in one way or the other, the problem can't be fixed. Telling us the error messages is not enough.
### Problems
#### Problems
Next, describe the problems, or what *you think* is the problem. It might be "obvious" to you that it's a problem, but it could actually be an intentional behavior for some backward compatibility etc. For compilation errors, include the stack trace. The more raw info the better.
### Expectations
#### Expectations
Same as the problems. Describe what *you think* should've happened.
### Notes
#### Notes
Add an optional notes section to describe your analysis.
Add any optional notes section to describe your analysis.
### Subject
@ -81,14 +96,11 @@ Finally, thank you for taking the time to report a problem.
Pull Requests
-------------
### Branch to work against
Whether implementing a new feature, fixing a bug, or modifying documentation, please work against the latest development branch (currently, 1.0.x).
See below for instructions on building sbt from source.
See below for the branch to work against.
### Adding notes
All pull requests are required to include a "Notes" file which documents the change. This file should reside in the
Most pull requests should include a "Notes" file which documents the change. This file should reside in the
directory:
<sbt root>
@ -111,73 +123,164 @@ Make sure you document each commit and squash them appropriately. You can use th
* Scala's documentation on [Git Hygiene](https://github.com/scala/scala/tree/v2.12.0-M3#git-hygiene)
* Play's documentation on [Working with Git](https://www.playframework.com/documentation/2.4.4/WorkingWithGit#Squashing-commits)
Documentation
-------------
Documentation fixes and contributions are as much welcome as to the source code itself. Visit [the website project](https://github.com/sbt/website) to learn about how to contribute.
Build from source
=================
### Branch to work against
sbt uses two branches for development:
- Development branch: `1.x` (this is also called "master")
- Stable branch: `1.$MINOR.x`, where `$MINOR` is current minor version (e.g. `1.1.x` during 1.1.x series)
### Instruction to build all modules from source
1. Install the current stable binary release of sbt (see [Setup]), which will be used to build sbt from source.
2. Get the source code.
$ git clone git://github.com/sbt/sbt.git
$ cd sbt
```
$ mkdir sbt-modules
$ cd sbt-modules
$ for i in sbt io util librarymanagement zinc; do \
git clone https://github.com/sbt/$i.git && (cd $i; git checkout -b 1.x origin/1.x)
done
$ cd sbt
$ ./sbt-allsources.sh
```
3. The default branch is the development branch [1.0.x](https://github.com/sbt/sbt/tree/1.0.x), which contains the latest code for the next major sbt release. To build a specific release or commit, switch to the associated tag. The tag for the latest stable release is [v0.13.13](https://github.com/sbt/sbt/tree/v0.13.13):
3. To build and publish all components locally,
$ git checkout v0.13.13
```
$ ./sbt-allsources.sh
sbt:sbtRoot> publishLocalAllModule
```
Note that sbt is always built with the previous stable release. For example, the [1.0.x](https://github.com/sbt/sbt/tree/1.0.x) branch is built with 0.13.13 and the [v0.13.13](https://github.com/sbt/sbt/tree/v0.13.13) tag is built with 0.13.12.
### Instruction to build just sbt
4. To build the launcher and publish all components locally,
If the change you are making is contained in sbt/sbt, you could publishLocal on sbt/sbt:
$ sbt
> publishLocal
```
$ sbt
sbt:sbtRoot> publishLocal
```
5. To use this locally built version of sbt, copy your stable `~/bin/sbt` script to `~/bin/xsbt` and change it to use the launcher jar at `<sbt>/launch/target/sbt-launch.jar`.
### Using the locally built sbt
Directory `target` is removed by `clean` command. Second solution is using the artifact stored in the local ivy repository.
The `publishLocal` above will build and publish version `1.$MINOR.$PATCH-SNAPSHOT` (e.g. 1.1.2-SNAPSHOT) to your local ivy repository.
The launcher is located in:
To use the locally built sbt, set the version in `build.properties` file in your project to `1.$MINOR.$PATCH-SNAPSHOT` then launch `sbt` (this can be the `sbt` launcher installed in your machine).
$HOME/.ivy2/local/org.scala-sbt/sbt-launch/0.13.9/jars/sbt-launch.jar
```
$ cd $YOUR_OWN_PROJECT
$ sbt
> compile
```
for v0.13.9 tag, or in:
### Using Jenkins sbt-snapshots nighties
$HOME/.ivy2/local/org.scala-sbt/sbt-launch/0.13.10-SNAPSHOT/jars/sbt-launch.jar
There is a Jenkins instance for sbt that every night builds and publishes (if successful) a timestamped version
of sbt to http://jenkins.scala-sbt.org/sbt-snapshots and is available for 4-5 weeks. To use it do the following:
for the development branch.
1. Set the `sbt.version` in `project/build.properties`
## Modifying sbt
```bash
echo "sbt.version=1.2.0-bin-20180423T192044" > project/build.properties
```
1. When developing sbt itself, run `compile` when checking compilation only.
2. Create an sbt repositories file (`./repositories`) that includes that Maven repository:
2. To use your modified version of sbt in a project locally, run `publishLocal`.
```properties
[repositories]
local
local-preloaded-ivy: file:///${sbt.preloaded-${sbt.global.base-${user.home}/.sbt}/preloaded/}, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext]
local-preloaded: file:///${sbt.preloaded-${sbt.global.base-${user.home}/.sbt}/preloaded/}
maven-central
sbt-maven-releases: https://repo.scala-sbt.org/scalasbt/maven-releases/, bootOnly
sbt-maven-snapshots: https://repo.scala-sbt.org/scalasbt/maven-snapshots/, bootOnly
typesafe-ivy-releases: https://repo.typesafe.com/typesafe/ivy-releases/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly
sbt-ivy-snapshots: https://repo.scala-sbt.org/scalasbt/ivy-snapshots/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly
sbt-snapshots: https://jenkins.scala-sbt.org/sbt-snapshots
```
3. After each `publishLocal`, clean the `~/.sbt/boot/` directory. Alternatively, if sbt is running and the launcher hasn't changed, run `reboot full` to have sbt do this for you.
3. Start sbt with a stable launcher and the custom repositories file:
4. If a project has `project/build.properties` defined, either delete the file or change `sbt.version` to `1.0.0-SNAPSHOT`.
```bash
$ sbt -sbt-jar ~/.sbt/launchers/1.1.4/sbt-launch.jar -Dsbt.repository.config=repositories
Getting org.scala-sbt sbt 1.2.0-bin-20180423T192044 (this may take some time)...
downloading https://jenkins.scala-sbt.org/sbt-snapshots/org/scala-sbt/sbt/1.2.0-bin-20180423T192044/sbt-1.2.0-bin-20180423T192044.jar ...
[SUCCESSFUL ] org.scala-sbt#sbt;1.2.0-bin-20180423T192044!sbt.jar (139ms)
...
[info] sbt server started at local:///Users/dnw/.sbt/1.0/server/936e0f52ed9baf6b6d83/sock
> show sbtVersion
[info] 1.2.0-bin-20180423T192044
```
## Diagnosing build failures
### Using Jenkins maven-snapshots nightlies
As an alternative you can request a build that publishes to https://repo.scala-sbt.org/scalasbt/maven-snapshots
and stays there forever by:
1. Logging into https://jenkins.scala-sbt.org/job/sbt-validator/
2. Clicking "Build with Parameters"
3. Making sure `deploy_to_bintray` is enabled
4. Hitting "Build"
Afterwhich start sbt with a stable launcher: `sbt -sbt-jar ~/.sbt/launchers/1.1.4/sbt-launch.jar`
### Clearing out boot and local cache
When you run a locally built sbt, the JAR artifacts will be now cached under `$HOME/.sbt/boot/scala-2.12.6/org.scala-sbt/sbt/1.$MINOR.$PATCH-SNAPSHOT` directory. To clear this out run: `reboot dev` command from sbt's session of your test application.
One drawback of `-SNAPSHOT` version is that it's slow to resolve as it tries to hit all the resolvers. You can workaround that by using a version name like `1.$MINOR.$PATCH-LOCAL1`. A non-SNAPSHOT artifacts will now be cached under `$HOME/.ivy/cache/` directory, so you need to clear that out using [sbt-dirty-money](https://github.com/sbt/sbt-dirty-money)'s `cleanCache` task.
### Running sbt "from source" - `sbtOn`
In addition to locally publishing a build of sbt, there is an alternative, experimental launcher within sbt/sbt
to be able to run sbt "from source", that is to compile sbt and run it from its resulting classfiles rather than
from published jar files.
Such a launcher is available within sbt/sbt's build through a custom `sbtOn` command that takes as its first
argument the directory on which you want to run sbt, and the remaining arguments are passed _to_ that sbt
instance. For example:
I have setup a minimal sbt build in the directory `/s/t`, to run sbt on that directory I call:
```bash
> sbtOn /s/t
[info] Packaging /d/sbt/scripted/sbt/target/scala-2.12/scripted-sbt_2.12-1.2.0-SNAPSHOT.jar ...
[info] Done packaging.
[info] Running (fork) sbt.RunFromSourceMain /s/t
Listening for transport dt_socket at address: 5005
[info] Loading settings from idea.sbt,global-plugins.sbt ...
[info] Loading global plugins from /Users/dnw/.dotfiles/.sbt/1.0/plugins
[info] Loading project definition from /s/t/project
[info] Set current project to t (in build file:/s/t/)
[info] sbt server started at local:///Users/dnw/.sbt/1.0/server/ce9baa494c7598e4d59b/sock
> show baseDirectory
[info] /s/t
> exit
[info] shutting down server
[success] Total time: 19 s, completed 25-Apr-2018 15:04:58
```
Please note that this alternative launcher does _not_ have feature parity with sbt/launcher. (Meta)
contributions welcome! :-D
### Diagnosing build failures
Globally included plugins can interfere building `sbt`; if you are getting errors building sbt, try disabling all globally included plugins and try again.
Running Tests
=============
### Running Tests
sbt has an extensive test suite of Unit tests and Integration tests!
sbt has a suite of unit tests and integration tests, also known as scripted tests.
Unit / Functional tests
-----------------------
#### Unit / Functional tests
Various functional and unit tests are defined throughout the
project. To run all of them, run `sbt test`. You can run a single test
suite with `sbt testOnly`
Integration tests
-----------------
#### Integration tests
Scripted integration tests reside in `sbt/src/sbt-test` and are
written using the same testing infrastructure sbt plugin authors can
@ -190,25 +293,17 @@ command. To run a single test, such as the test in
sbt "scripted project/global-plugin"
Please note that these tests run PAINFULLY slow if the version set in
`build.sbt` is set to SNAPSHOT, as every time the scripted test boots
up a test instance of sbt, remote mirrors are scanned for possible
updates. It is recommended that you set the version suffix to
`-devel`, as in `1.0.0-devel`.
Profiling sbt
-------------
Building Documentation
======================
See [PROFILING](./PROFILING.md)
The scala-sbt.org site documentation is a separate project [website](https://github.com/sbt/website). Follow [the steps in the README](https://github.com/sbt/website#scala-sbtorg) to generate the documentation.
Other notes for maintainers
---------------------------
### Publishing VS Code Extensions
Note for maintainers
====================
Publishing VS Code Extensions
-----------------------------
https://code.visualstudio.com/docs/extensions/publish-extension
Reference https://code.visualstudio.com/docs/extensions/publish-extension
```
$ sbt
@ -219,3 +314,12 @@ cd vscode-sbt-scala/client
$ vsce package
$ vsce publish
```
## Signing the CLA
Contributing to sbt requires you or your employer to sign the
[Lightbend Contributor License Agreement](https://www.lightbend.com/contribute/cla).
To make it easier to respect our license agreements, we have added an sbt task
that takes care of adding the LICENSE headers to new files. Run `headerCreate`
and sbt will put a copyright notice into it.

View File

@ -1,4 +1,4 @@
(See the guidelines for contributing, linked above)
- [ ] I've read the [CONTRIBUTING](https://github.com/sbt/sbt/blob/1.x/CONTRIBUTING.md) guidelines
## steps

153
PROFILING.md Normal file
View File

@ -0,0 +1,153 @@
Profiling sbt
-------------
There are several ways to profile sbt. The new hotness in profiling is FlameGraph.
You first collect stack trace samples, and then it is processed into svg graph.
See:
- [Using FlameGraphs To Illuminate The JVM by Nitsan Wakart](https://www.youtube.com/watch?v=ugRrFdda_JQ)
- [USENIX ATC '17: Visualizing Performance with Flame Graphs](https://www.youtube.com/watch?v=D53T1Ejig1Q)
### jvm-profiling-tools/async-profiler
The first one I recommend is async-profiler. This is available for macOS and Linux,
and works fairly well.
1. Download the installer from https://github.com/jvm-profiling-tools/async-profiler/releases/tag/v1.2
2. Make symbolic link to `build/` and `profiler.sh` to `$HOME/bin`, assuming you have PATH to `$HOME/bin`:
`ln -s ~/Applications/async-profiler/profiler.sh $HOME/bin/profiler.sh`
`ln -s ~/Applications/async-profiler/build $HOME/bin/build`
Next, close all Java appliations and anything that may affect the profiling, and run sbt in one terminal:
```
$ sbt exit
```
In another terminal, run:
```
$ jps
92746 sbt-launch.jar
92780 Jps
```
This tells you the process ID of sbt. In this case, it's 92746. While it's running, run
```
$ profiler.sh -d 60 <process id>
Started [cpu] profiling
--- Execution profile ---
Total samples: 31602
Non-Java: 3239 (10.25%)
GC active: 46 (0.15%)
Unknown (native): 14667 (46.41%)
Not walkable (native): 3 (0.01%)
Unknown (Java): 433 (1.37%)
Not walkable (Java): 8 (0.03%)
Thread exit: 1 (0.00%)
Deopt: 9 (0.03%)
Frame buffer usage: 55.658%
Total: 1932000000 (6.11%) samples: 1932
[ 0] java.lang.ClassLoader$NativeLibrary.load
[ 1] java.lang.ClassLoader.loadLibrary0
[ 2] java.lang.ClassLoader.loadLibrary
[ 3] java.lang.Runtime.loadLibrary0
[ 4] java.lang.System.loadLibrary
....
```
This should show a bunch of stacktraces that are useful.
To visualize this as a flamegraph, run:
```
$ profiler.sh -d 60 -f /tmp/flamegraph.svg <process id>
```
This should produce `/tmp/flamegraph.svg` at the end.
![flamegraph](project/flamegraph_svg.png)
See https://gist.github.com/eed3si9n/82d43acc95a002876d357bd8ad5f40d5
### running sbt with standby
One of the tricky things you come across while profiling is figuring out the process ID,
while wnating to profile the beginning of the application.
For this purpose, we've added `sbt.launcher.standby` JVM flag.
In the next version of sbt, you should be able to run:
```
$ sbt -J-Dsbt.launcher.standby=20s exit
```
This will count down for 20s before doing anything else.
### jvm-profiling-tools/perf-map-agent
If you want to try the mixed flamegraph, you can try perf-map-agent.
This uses `dtrace` on macOS and `perf` on Linux.
You first have to compile https://github.com/jvm-profiling-tools/perf-map-agent.
For macOS, here to how to export `JAVA_HOME` before running `cmake .`:
```
$ export JAVA_HOME=$(/usr/libexec/java_home)
$ cmake .
-- The C compiler identification is AppleClang 9.0.0.9000039
-- The CXX compiler identification is AppleClang 9.0.0.9000039
...
$ make
```
In addition, you have to git clone https://github.com/brendangregg/FlameGraph
In a fresh termimal, run sbt with `-XX:+PreserveFramePointer` flag:
```
$ sbt -J-Dsbt.launcher.standby=20s -J-XX:+PreserveFramePointer exit
```
In the terminal that you will run the perf-map:
```
$ cd quicktest/
$ export JAVA_HOME=$(/usr/libexec/java_home)
$ export FLAMEGRAPH_DIR=$HOME/work/FlameGraph
$ jps
94592 Jps
94549 sbt-launch.jar
$ $HOME/work/perf-map-agent/bin/dtrace-java-flames 94549
dtrace: system integrity protection is on, some features will not be available
dtrace: description 'profile-99 ' matched 2 probes
Flame graph SVG written to DTRACE_FLAME_OUTPUT='/Users/xxx/work/quicktest/flamegraph-94549.svg'.
```
This would produce better flamegraph in theory, but the output looks too messy for `sbt exit` case.
See https://gist.github.com/eed3si9n/b5856ff3d987655513380d1a551aa0df
This might be because it assumes that the operations are already JITed.
### ktoso/sbt-jmh
https://github.com/ktoso/sbt-jmh
Due to JIT warmup etc, benchmarking is difficult. JMH runs the same tests multiple times to
remove these effects and comes closer to measuring the performance of your code.
There's also an integration with jvm-profiling-tools/async-profiler, apparently.
### VisualVM
I'd also mention traditional JVM profiling tool. Since VisualVM is opensource,
I'll mention this one: https://visualvm.github.io/
1. First VisualVM.
2. Start sbt from a terminal.
3. You should see `xsbt.boot.Boot` under Local.
4. Open it, and select either sampler or profiler, and hit CPU button at the point when you want to start.
If you are familiar with YourKit, it also works similarly.

View File

@ -1 +1 @@
(See the guidelines for contributing, linked above)
- [ ] I've read the [CONTRIBUTING](https://github.com/sbt/sbt/blob/1.x/CONTRIBUTING.md) guidelines

View File

@ -21,10 +21,10 @@ sbt is a build tool for Scala, Java, and more.
For general documentation, see http://www.scala-sbt.org/.
sbt 1.0.x
sbt 1.x
---------
This is the 1.0.x series of sbt. The source code of sbt is split across
This is the 1.x series of sbt. The source code of sbt is split across
several Github repositories, including this one.
- [sbt/io][sbt/io] hosts `sbt.io` module.

20
SUPPORT.md Normal file
View File

@ -0,0 +1,20 @@
[ask]: https://stackoverflow.com/questions/ask?tags=sbt
[Lightbend]: https://www.lightbend.com/
[subscriptions]: https://www.lightbend.com/platform/subscription
[gitter]: https://gitter.im/sbt/sbt
Support
=======
[Lightbend] sponsors sbt and encourages contributions from the active community. Enterprises can adopt it for mission critical systems with confidence because Lightbend stands behind sbt with commercial support and services.
For community support please [ask] on StackOverflow with the tag "sbt" (and the name of the sbt plugin(s) if any).
- State the problem or question clearly and provide enough context. Code examples and `build.sbt` are often useful when appropriately edited.
- There's also [Gitter sbt/sbt room][gitter], but Stackoverflow is recommended so others can benefit from the answers.
For professional support, for instance if you need faster response times, [Lightbend], the maintainer of Scala compiler and sbt, provides:
- [Lightbend Subscriptions][subscriptions], which includes Expert Support
- Training
- Consulting

368
build.sbt
View File

@ -9,7 +9,7 @@ def buildLevelSettings: Seq[Setting[_]] =
inThisBuild(
Seq(
organization := "org.scala-sbt",
version := "1.0.3-SNAPSHOT",
version := "1.2.0-SNAPSHOT",
description := "sbt is an interactive build tool",
bintrayOrganization := Some("sbt"),
bintrayRepository := {
@ -24,45 +24,47 @@ def buildLevelSettings: Seq[Setting[_]] =
Developer("eed3si9n", "Eugene Yokota", "@eed3si9n", url("https://github.com/eed3si9n")),
Developer("jsuereth", "Josh Suereth", "@jsuereth", url("https://github.com/jsuereth")),
Developer("dwijnand", "Dale Wijnand", "@dwijnand", url("https://github.com/dwijnand")),
Developer("gkossakowski",
"Grzegorz Kossakowski",
"@gkossakowski",
url("https://github.com/gkossakowski")),
Developer(
"gkossakowski",
"Grzegorz Kossakowski",
"@gkossakowski",
url("https://github.com/gkossakowski")
),
Developer("Duhemm", "Martin Duhem", "@Duhemm", url("https://github.com/Duhemm"))
),
homepage := Some(url("https://github.com/sbt/sbt")),
scmInfo := Some(ScmInfo(url("https://github.com/sbt/sbt"), "git@github.com:sbt/sbt.git")),
resolvers += Resolver.mavenLocal,
scalafmtOnCompile := true,
scalafmtVersion := "1.3.0",
scalafmtOnCompile in Sbt := false,
scalafmtVersion := "1.4.0",
))
def commonSettings: Seq[Setting[_]] =
Seq[SettingsDefinition](
headerLicense := Some(HeaderLicense.Custom(
"""|sbt
|Copyright 2011 - 2017, Lightbend, Inc.
|Copyright 2008 - 2010, Mark Harrah
|Licensed under BSD-3-Clause license (see LICENSE)
|""".stripMargin
)),
scalaVersion := baseScalaVersion,
componentID := None,
resolvers += Resolver.typesafeIvyRepo("releases"),
resolvers += Resolver.sonatypeRepo("snapshots"),
resolvers += "bintray-sbt-maven-releases" at "https://dl.bintray.com/sbt/maven-releases/",
addCompilerPlugin("org.spire-math" % "kind-projector" % "0.9.4" cross CrossVersion.binary),
concurrentRestrictions in Global += Util.testExclusiveRestriction,
testOptions in Test += Tests.Argument(TestFrameworks.ScalaCheck, "-w", "1"),
testOptions in Test += Tests.Argument(TestFrameworks.ScalaCheck, "-verbosity", "2"),
javacOptions in compile ++= Seq("-Xlint", "-Xlint:-serial"),
crossScalaVersions := Seq(baseScalaVersion),
bintrayPackage := (bintrayPackage in ThisBuild).value,
bintrayRepository := (bintrayRepository in ThisBuild).value,
publishArtifact in Test := false,
fork in compile := true,
fork in run := true
) flatMap (_.settings)
def commonSettings: Seq[Setting[_]] = Def.settings(
headerLicense := Some(HeaderLicense.Custom(
"""|sbt
|Copyright 2011 - 2017, Lightbend, Inc.
|Copyright 2008 - 2010, Mark Harrah
|Licensed under BSD-3-Clause license (see LICENSE)
|""".stripMargin
)),
scalaVersion := baseScalaVersion,
componentID := None,
resolvers += Resolver.typesafeIvyRepo("releases"),
resolvers += Resolver.sonatypeRepo("snapshots"),
resolvers += "bintray-sbt-maven-releases" at "https://dl.bintray.com/sbt/maven-releases/",
addCompilerPlugin("org.spire-math" % "kind-projector" % "0.9.4" cross CrossVersion.binary),
concurrentRestrictions in Global += Util.testExclusiveRestriction,
testOptions in Test += Tests.Argument(TestFrameworks.ScalaCheck, "-w", "1"),
testOptions in Test += Tests.Argument(TestFrameworks.ScalaCheck, "-verbosity", "2"),
javacOptions in compile ++= Seq("-Xlint", "-Xlint:-serial"),
crossScalaVersions := Seq(baseScalaVersion),
bintrayPackage := (bintrayPackage in ThisBuild).value,
bintrayRepository := (bintrayRepository in ThisBuild).value,
publishArtifact in Test := false,
fork in compile := true,
fork in run := true
)
def minimalSettings: Seq[Setting[_]] =
commonSettings ++ customCommands ++
@ -75,9 +77,21 @@ def testedBaseSettings: Seq[Setting[_]] =
baseSettings ++ testDependencies
val mimaSettings = Def settings (
mimaPreviousArtifacts := (0 to 4).map { v =>
organization.value % moduleName.value % s"1.0.$v" cross (if (crossPaths.value) CrossVersion.binary else CrossVersion.disabled)
}.toSet
mimaPreviousArtifacts := {
Seq(
"1.0.0", "1.0.1", "1.0.2", "1.0.3", "1.0.4",
"1.1.0", "1.1.1", "1.1.2",
).map { v =>
organization.value % moduleName.value % v cross (if (crossPaths.value) CrossVersion.binary else CrossVersion.disabled)
}.toSet
},
mimaBinaryIssueFilters ++= Seq(
// Changes in the internal pacakge
exclude[DirectMissingMethodProblem]("sbt.internal.*"),
exclude[FinalClassProblem]("sbt.internal.*"),
exclude[FinalMethodProblem]("sbt.internal.*"),
exclude[IncompatibleResultTypeProblem]("sbt.internal.*"),
),
)
lazy val sbtRoot: Project = (project in file("."))
@ -157,6 +171,11 @@ val collectionProj = (project in file("internal") / "util-collection")
exclude[MissingClassProblem]("sbt.internal.util.Fn1"),
exclude[DirectMissingMethodProblem]("sbt.internal.util.TypeFunctions.toFn1"),
exclude[DirectMissingMethodProblem]("sbt.internal.util.Types.toFn1"),
// Instead of defining foldr in KList & overriding in KCons,
// it's now abstract in KList and defined in both KCons & KNil.
exclude[FinalMethodProblem]("sbt.internal.util.KNil.foldr"),
exclude[DirectAbstractMethodProblem]("sbt.internal.util.KList.foldr"),
),
)
.configure(addSbtUtilPosition)
@ -169,6 +188,8 @@ val completeProj = (project in file("internal") / "util-complete")
name := "Completion",
libraryDependencies += jline,
mimaSettings,
mimaBinaryIssueFilters ++= Seq(
),
)
.configure(addSbtIO, addSbtUtilControl)
@ -197,6 +218,30 @@ lazy val testingProj = (project in file("testing"))
sourceManaged in (Compile, generateContrabands) := baseDirectory.value / "src" / "main" / "contraband-scala",
contrabandFormatsForType in generateContrabands in Compile := ContrabandConfig.getFormats,
mimaSettings,
mimaBinaryIssueFilters ++= Seq(
// private[sbt]
exclude[IncompatibleMethTypeProblem]("sbt.TestStatus.write"),
exclude[IncompatibleResultTypeProblem]("sbt.TestStatus.read"),
// copy method was never meant to be public
exclude[DirectMissingMethodProblem]("sbt.protocol.testing.EndTestGroupErrorEvent.copy"),
exclude[DirectMissingMethodProblem]("sbt.protocol.testing.EndTestGroupErrorEvent.copy$default$*"),
exclude[DirectMissingMethodProblem]("sbt.protocol.testing.EndTestGroupEvent.copy"),
exclude[DirectMissingMethodProblem]("sbt.protocol.testing.EndTestGroupEvent.copy$default$*"),
exclude[DirectMissingMethodProblem]("sbt.protocol.testing.StartTestGroupEvent.copy"),
exclude[DirectMissingMethodProblem]("sbt.protocol.testing.StartTestGroupEvent.copy$default$*"),
exclude[DirectMissingMethodProblem]("sbt.protocol.testing.TestCompleteEvent.copy"),
exclude[DirectMissingMethodProblem]("sbt.protocol.testing.TestCompleteEvent.copy$default$*"),
exclude[DirectMissingMethodProblem]("sbt.protocol.testing.TestInitEvent.copy"),
exclude[DirectMissingMethodProblem]("sbt.protocol.testing.TestItemDetail.copy"),
exclude[DirectMissingMethodProblem]("sbt.protocol.testing.TestItemDetail.copy$default$*"),
exclude[DirectMissingMethodProblem]("sbt.protocol.testing.TestItemEvent.copy"),
exclude[DirectMissingMethodProblem]("sbt.protocol.testing.TestItemEvent.copy$default$*"),
exclude[DirectMissingMethodProblem]("sbt.protocol.testing.TestStringEvent.copy"),
exclude[DirectMissingMethodProblem]("sbt.protocol.testing.TestStringEvent.copy$default$1"),
//no reason to use
exclude[DirectMissingMethodProblem]("sbt.JUnitXmlTestsListener.testSuite"),
)
)
.configure(addSbtIO, addSbtCompilerClasspath, addSbtUtilLogging)
@ -245,8 +290,22 @@ lazy val runProj = (project in file("run"))
baseDirectory.value / "src" / "main" / "contraband-scala",
sourceManaged in (Compile, generateContrabands) := baseDirectory.value / "src" / "main" / "contraband-scala",
mimaSettings,
mimaBinaryIssueFilters ++= Seq(
// copy method was never meant to be public
exclude[DirectMissingMethodProblem]("sbt.ForkOptions.copy"),
exclude[DirectMissingMethodProblem]("sbt.ForkOptions.copy$default$*"),
exclude[DirectMissingMethodProblem]("sbt.OutputStrategy#BufferedOutput.copy"),
exclude[DirectMissingMethodProblem]("sbt.OutputStrategy#BufferedOutput.copy$default$*"),
exclude[DirectMissingMethodProblem]("sbt.OutputStrategy#CustomOutput.copy"),
exclude[DirectMissingMethodProblem]("sbt.OutputStrategy#CustomOutput.copy$default$*"),
exclude[DirectMissingMethodProblem]("sbt.OutputStrategy#LoggedOutput.copy"),
exclude[DirectMissingMethodProblem]("sbt.OutputStrategy#LoggedOutput.copy$default$*"),
)
)
.configure(addSbtIO, addSbtUtilLogging, addSbtCompilerClasspath)
.configure(addSbtIO, addSbtUtilLogging, addSbtUtilControl, addSbtCompilerClasspath)
val sbtProjDepsCompileScopeFilter =
ScopeFilter(inDependencies(LocalProject("sbtProj"), includeRoot = false), inConfigurations(Compile))
lazy val scriptedSbtProj = (project in scriptedPath / "sbt")
.dependsOn(commandProj)
@ -254,16 +313,36 @@ lazy val scriptedSbtProj = (project in scriptedPath / "sbt")
baseSettings,
name := "Scripted sbt",
libraryDependencies ++= Seq(launcherInterface % "provided"),
resourceGenerators in Compile += Def task {
val mainClassDir = (classDirectory in Compile in LocalProject("sbtProj")).value
val testClassDir = (classDirectory in Test in LocalProject("sbtProj")).value
val classDirs = (classDirectory all sbtProjDepsCompileScopeFilter).value
val extDepsCp = (externalDependencyClasspath in Compile in LocalProject("sbtProj")).value
val cpStrings = (mainClassDir +: testClassDir +: classDirs) ++ extDepsCp.files map (_.toString)
val file = (resourceManaged in Compile).value / "RunFromSource.classpath"
IO.writeLines(file, cpStrings)
List(file)
},
mimaSettings,
mimaBinaryIssueFilters ++= Seq(
// sbt.test package is renamed to sbt.scriptedtest.
exclude[MissingClassProblem]("sbt.test.*"),
),
)
.configure(addSbtIO, addSbtUtilLogging, addSbtCompilerInterface, addSbtUtilScripted, addSbtLmCore)
lazy val scriptedPluginProj = (project in scriptedPath / "plugin")
.dependsOn(sbtProj)
.dependsOn(mainProj)
.settings(
baseSettings,
name := "Scripted Plugin",
mimaSettings,
mimaBinaryIssueFilters ++= Seq(
// scripted plugin has moved into sbt mothership.
exclude[MissingClassProblem]("sbt.ScriptedPlugin*")
),
)
.configure(addSbtCompilerClasspath)
@ -275,6 +354,15 @@ lazy val actionsProj = (project in file("main-actions"))
name := "Actions",
libraryDependencies += sjsonNewScalaJson.value,
mimaSettings,
mimaBinaryIssueFilters ++= Seq(
// Removed unused private[sbt] nested class
exclude[MissingClassProblem]("sbt.Doc$Scaladoc"),
// Removed no longer used private[sbt] method
exclude[DirectMissingMethodProblem]("sbt.Doc.generate"),
exclude[DirectMissingMethodProblem]("sbt.compiler.Eval.filesModifiedBytes"),
exclude[DirectMissingMethodProblem]("sbt.compiler.Eval.fileModifiedBytes"),
),
)
.configure(
addSbtIO,
@ -291,15 +379,43 @@ lazy val actionsProj = (project in file("main-actions"))
lazy val protocolProj = (project in file("protocol"))
.enablePlugins(ContrabandPlugin, JsonCodecPlugin)
.dependsOn(collectionProj)
.settings(
testedBaseSettings,
scalacOptions -= "-Ywarn-unused",
scalacOptions += "-Xlint:-unused",
name := "Protocol",
libraryDependencies ++= Seq(sjsonNewScalaJson.value),
libraryDependencies ++= Seq(sjsonNewScalaJson.value, ipcSocket),
managedSourceDirectories in Compile +=
baseDirectory.value / "src" / "main" / "contraband-scala",
sourceManaged in (Compile, generateContrabands) := baseDirectory.value / "src" / "main" / "contraband-scala",
contrabandFormatsForType in generateContrabands in Compile := ContrabandConfig.getFormats,
mimaSettings,
mimaBinaryIssueFilters ++= Seq(
// copy method was never meant to be public
exclude[DirectMissingMethodProblem]("sbt.protocol.ChannelAcceptedEvent.copy"),
exclude[DirectMissingMethodProblem]("sbt.protocol.ChannelAcceptedEvent.copy$default$1"),
exclude[DirectMissingMethodProblem]("sbt.protocol.ExecCommand.copy"),
exclude[DirectMissingMethodProblem]("sbt.protocol.ExecCommand.copy$default$1"),
exclude[DirectMissingMethodProblem]("sbt.protocol.ExecCommand.copy$default$2"),
exclude[DirectMissingMethodProblem]("sbt.protocol.ExecStatusEvent.copy"),
exclude[DirectMissingMethodProblem]("sbt.protocol.ExecStatusEvent.copy$default$*"),
exclude[DirectMissingMethodProblem]("sbt.protocol.ExecutionEvent.copy"),
exclude[DirectMissingMethodProblem]("sbt.protocol.ExecutionEvent.copy$default$*"),
exclude[DirectMissingMethodProblem]("sbt.protocol.InitCommand.copy"),
exclude[DirectMissingMethodProblem]("sbt.protocol.InitCommand.copy$default$*"),
exclude[DirectMissingMethodProblem]("sbt.protocol.LogEvent.copy"),
exclude[DirectMissingMethodProblem]("sbt.protocol.LogEvent.copy$default$*"),
exclude[DirectMissingMethodProblem]("sbt.protocol.SettingQuery.copy"),
exclude[DirectMissingMethodProblem]("sbt.protocol.SettingQuery.copy$default$1"),
exclude[DirectMissingMethodProblem]("sbt.protocol.SettingQueryFailure.copy"),
exclude[DirectMissingMethodProblem]("sbt.protocol.SettingQueryFailure.copy$default$*"),
exclude[DirectMissingMethodProblem]("sbt.protocol.SettingQuerySuccess.copy"),
exclude[DirectMissingMethodProblem]("sbt.protocol.SettingQuerySuccess.copy$default$*"),
// ignore missing methods in sbt.internal
exclude[DirectMissingMethodProblem]("sbt.internal.*"),
)
)
.configure(addSbtUtilLogging)
@ -310,14 +426,19 @@ lazy val commandProj = (project in file("main-command"))
.settings(
testedBaseSettings,
name := "Command",
libraryDependencies ++= Seq(launcherInterface, sjsonNewScalaJson.value, templateResolverApi,
jna, jnaPlatform),
libraryDependencies ++= Seq(launcherInterface, sjsonNewScalaJson.value, templateResolverApi),
managedSourceDirectories in Compile +=
baseDirectory.value / "src" / "main" / "contraband-scala",
sourceManaged in (Compile, generateContrabands) := baseDirectory.value / "src" / "main" / "contraband-scala",
contrabandFormatsForType in generateContrabands in Compile := ContrabandConfig.getFormats,
mimaSettings,
mimaBinaryIssueFilters ++= Vector(
// dropped private[sbt] method
exclude[DirectMissingMethodProblem]("sbt.BasicCommands.compatCommands"),
// dropped mainly internal command strings holder
exclude[MissingClassProblem]("sbt.BasicCommandStrings$Compat$"),
exclude[DirectMissingMethodProblem]("sbt.BasicCommands.rebootOptionParser"),
// Changed the signature of Server method. nacho cheese.
exclude[DirectMissingMethodProblem]("sbt.internal.server.Server.*"),
// Added method to ServerInstance. This is also internal.
@ -326,6 +447,21 @@ lazy val commandProj = (project in file("main-command"))
exclude[ReversedMissingMethodProblem]("sbt.internal.CommandChannel.*"),
// Added an overload to reboot. The overload is private[sbt].
exclude[ReversedMissingMethodProblem]("sbt.StateOps.reboot"),
// Replace nailgun socket stuff
exclude[MissingClassProblem]("sbt.internal.NG*"),
exclude[MissingClassProblem]("sbt.internal.ReferenceCountedFileDescriptor"),
// made private[sbt] method private[this]
exclude[DirectMissingMethodProblem]("sbt.State.handleException"),
// copy method was never meant to be public
exclude[DirectMissingMethodProblem]("sbt.CommandSource.copy"),
exclude[DirectMissingMethodProblem]("sbt.CommandSource.copy$default$*"),
exclude[DirectMissingMethodProblem]("sbt.Exec.copy"),
exclude[DirectMissingMethodProblem]("sbt.Exec.copy$default$*"),
// internal
exclude[ReversedMissingMethodProblem]("sbt.internal.client.ServerConnection.*"),
),
unmanagedSources in (Compile, headerCreate) := {
val old = (unmanagedSources in (Compile, headerCreate)).value
@ -345,40 +481,35 @@ lazy val commandProj = (project in file("main-command"))
lazy val coreMacrosProj = (project in file("core-macros"))
.dependsOn(collectionProj)
.settings(
commonSettings,
baseSettings,
name := "Core Macros",
libraryDependencies += "org.scala-lang" % "scala-compiler" % scalaVersion.value,
mimaSettings,
)
/* Write all the compile-time dependencies of the spores macro to a file,
* in order to read it from the created Toolbox to run the neg tests. */
lazy val generateToolboxClasspath = Def.task {
val classpathAttributes = (dependencyClasspath in Compile).value
val dependenciesClasspath =
classpathAttributes.map(_.data.getAbsolutePath).mkString(":")
val scalaBinVersion = (scalaBinaryVersion in Compile).value
val targetDir = (target in Compile).value
val compiledClassesDir = targetDir / s"scala-$scalaBinVersion/classes"
val testClassesDir = targetDir / s"scala-$scalaBinVersion/test-classes"
val classpath = s"$compiledClassesDir:$testClassesDir:$dependenciesClasspath"
val resourceDir = (resourceDirectory in Compile).value
resourceDir.mkdir() // In case it doesn't exist
val toolboxTestClasspath = resourceDir / "toolbox.classpath"
IO.write(toolboxTestClasspath, classpath)
val result = List(toolboxTestClasspath.getAbsoluteFile)
streams.value.log.success("Wrote the classpath for the macro neg test suite.")
result
}
// Fixes scope=Scope for Setting (core defined in collectionProj) to define the settings system used in build definitions
lazy val mainSettingsProj = (project in file("main-settings"))
.dependsOn(completeProj, commandProj, stdTaskProj, coreMacrosProj)
.settings(
testedBaseSettings,
name := "Main Settings",
resourceGenerators in Compile += generateToolboxClasspath.taskValue,
BuildInfoPlugin.buildInfoDefaultSettings,
addBuildInfoToConfig(Test),
buildInfoObject in Test := "TestBuildInfo",
buildInfoKeys in Test := Seq[BuildInfoKey](
classDirectory in Compile,
classDirectory in Test,
// WORKAROUND https://github.com/sbt/sbt-buildinfo/issues/117
BuildInfoKey.map((dependencyClasspath in Compile).taskValue) { case (ident, cp) => ident -> cp.files },
),
mimaSettings,
mimaBinaryIssueFilters ++= Seq(
exclude[DirectMissingMethodProblem]("sbt.Scope.display012StyleMasked"),
// added a method to a sealed trait
exclude[InheritedNewAbstractMethodProblem]("sbt.Scoped.canEqual"),
exclude[InheritedNewAbstractMethodProblem]("sbt.ScopedTaskable.canEqual"),
),
)
.configure(
addSbtIO,
@ -393,7 +524,7 @@ lazy val mainSettingsProj = (project in file("main-settings"))
// The main integration project for sbt. It brings all of the projects together, configures them, and provides for overriding conventions.
lazy val mainProj = (project in file("main"))
.enablePlugins(ContrabandPlugin)
.dependsOn(logicProj, actionsProj, mainSettingsProj, runProj, commandProj, collectionProj)
.dependsOn(logicProj, actionsProj, mainSettingsProj, runProj, commandProj, collectionProj, scriptedSbtProj)
.settings(
testedBaseSettings,
name := "Main",
@ -403,17 +534,14 @@ lazy val mainProj = (project in file("main"))
sourceManaged in (Compile, generateContrabands) := baseDirectory.value / "src" / "main" / "contraband-scala",
mimaSettings,
mimaBinaryIssueFilters ++= Vector(
// Changed the signature of NetworkChannel ctor. internal.
exclude[DirectMissingMethodProblem]("sbt.internal.server.NetworkChannel.*"),
// ctor for ConfigIndex. internal.
exclude[DirectMissingMethodProblem]("sbt.internal.ConfigIndex.*"),
// New and changed methods on KeyIndex. internal.
exclude[ReversedMissingMethodProblem]("sbt.internal.KeyIndex.*"),
exclude[DirectMissingMethodProblem]("sbt.internal.KeyIndex.*"),
// Removed unused val. internal.
exclude[DirectMissingMethodProblem]("sbt.internal.RelayAppender.jsonFormat"),
// Removed unused def. internal.
exclude[DirectMissingMethodProblem]("sbt.internal.Load.isProjectThis"),
// Changed signature or removed private[sbt] methods
exclude[DirectMissingMethodProblem]("sbt.Classpaths.unmanagedLibs0"),
exclude[DirectMissingMethodProblem]("sbt.Defaults.allTestGroupsTask"),
exclude[DirectMissingMethodProblem]("sbt.Plugins.topologicalSort"),
exclude[IncompatibleMethTypeProblem]("sbt.Defaults.allTestGroupsTask"),
)
)
.configure(
@ -430,9 +558,8 @@ lazy val mainProj = (project in file("main"))
// with the sole purpose of providing certain identifiers without qualification (with a package object)
lazy val sbtProj = (project in file("sbt"))
.dependsOn(mainProj, scriptedSbtProj % "test->test")
.enablePlugins(BuildInfoPlugin)
.settings(
baseSettings,
testedBaseSettings,
name := "sbt",
normalizedName := "sbt",
crossScalaVersions := Seq(baseScalaVersion),
@ -440,15 +567,28 @@ lazy val sbtProj = (project in file("sbt"))
javaOptions ++= Seq("-Xdebug", "-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=5005"),
mimaSettings,
mimaBinaryIssueFilters ++= sbtIgnoredProblems,
BuildInfoPlugin.buildInfoDefaultSettings,
addBuildInfoToConfig(Test),
BuildInfoPlugin.buildInfoDefaultSettings,
buildInfoObject in Test := "TestBuildInfo",
buildInfoKeys in Test := Seq[BuildInfoKey](fullClasspath in Compile),
connectInput in run in Test := true,
buildInfoKeys in Test := Seq[BuildInfoKey](
version,
// WORKAROUND https://github.com/sbt/sbt-buildinfo/issues/117
BuildInfoKey.map((fullClasspath in Compile).taskValue) { case (ident, cp) => ident -> cp.files },
classDirectory in Compile,
classDirectory in Test,
),
Test / run / connectInput := true,
Test / run / outputStrategy := Some(StdoutOutput),
Test / run / fork := true,
)
.configure(addSbtCompilerBridge)
lazy val sbtIgnoredProblems = {
Vector(
exclude[MissingClassProblem]("buildinfo.BuildInfo"),
exclude[MissingClassProblem]("buildinfo.BuildInfo$"),
// Added more items to Import trait.
exclude[ReversedMissingMethodProblem]("sbt.Import.sbt$Import$_setter_$WatchSource_="),
exclude[ReversedMissingMethodProblem]("sbt.Import.WatchSource"),
@ -463,10 +603,9 @@ lazy val sbtIgnoredProblems = {
}
def runNpm(command: String, base: File, log: sbt.internal.util.ManagedLogger) = {
val npm = if (sbt.internal.util.Util.isWindows) "npm.cmd" else "npm"
import scala.sys.process._
try {
val exitCode = Process(s"$npm $command", Option(base)) ! log
val exitCode = Process(s"npm $command", Option(base)) ! log
if (exitCode != 0) throw new Exception("Process returned exit code: " + exitCode)
} catch {
case e: java.io.IOException => log.warn("failed to run npm " + e.getMessage)
@ -507,35 +646,29 @@ lazy val vscodePlugin = (project in file("vscode-sbt-scala"))
)
def scriptedTask: Def.Initialize[InputTask[Unit]] = Def.inputTask {
val result = scriptedSource(dir => (s: State) => Scripted.scriptedParser(dir)).parsed
// publishLocalBinAll.value // TODO: Restore scripted needing only binary jars.
publishAll.value
// These two projects need to be visible in a repo even if the default
// local repository is hidden, so we publish them to an alternate location and add
// that alternate repo to the running scripted test (in Scripted.scriptedpreScripted).
// (altLocalPublish in interfaceProj).value
// (altLocalPublish in compileInterfaceProj).value
(sbtProj / Test / compile).value // make sure sbt.RunFromSourceMain is compiled
Scripted.doScripted(
(sbtLaunchJar in bundledLauncherProj).value,
(fullClasspath in scriptedSbtProj in Test).value,
(scalaInstance in scriptedSbtProj).value,
scriptedSource.value,
scriptedBufferLog.value,
result,
Def.setting(Scripted.scriptedParser(scriptedSource.value)).parsed,
scriptedPrescripted.value,
scriptedLaunchOpts.value
)
}
def scriptedUnpublishedTask: Def.Initialize[InputTask[Unit]] = Def.inputTask {
val result = scriptedSource(dir => (s: State) => Scripted.scriptedParser(dir)).parsed
Scripted.doScripted(
(sbtLaunchJar in bundledLauncherProj).value,
(fullClasspath in scriptedSbtProj in Test).value,
(scalaInstance in scriptedSbtProj).value,
scriptedSource.value,
scriptedBufferLog.value,
result,
Def.setting(Scripted.scriptedParser(scriptedSource.value)).parsed,
scriptedPrescripted.value,
scriptedLaunchOpts.value
)
@ -572,14 +705,12 @@ def otherRootSettings =
scripted := scriptedTask.evaluated,
scriptedUnpublished := scriptedUnpublishedTask.evaluated,
scriptedSource := (sourceDirectory in sbtProj).value / "sbt-test",
// scriptedPrescripted := { addSbtAlternateResolver _ },
scriptedLaunchOpts := List("-Xmx1500M", "-Xms512M", "-server"),
publishAll := { val _ = (publishLocal).all(ScopeFilter(inAnyProject)).value },
publishLocalBinAll := { val _ = (publishLocalBin).all(ScopeFilter(inAnyProject)).value },
aggregate in bintrayRelease := false
) ++ inConfig(Scripted.RepoOverrideTest)(
Seq(
scriptedPrescripted := (_ => ()),
scriptedLaunchOpts := List(
"-Xmx1500M",
"-Xms512M",
@ -592,43 +723,18 @@ def otherRootSettings =
scriptedSource := (sourceDirectory in sbtProj).value / "repo-override-test"
))
// def addSbtAlternateResolver(scriptedRoot: File) = {
// val resolver = scriptedRoot / "project" / "AddResolverPlugin.scala"
// if (!resolver.exists) {
// IO.write(resolver, s"""import sbt._
// |import Keys._
// |
// |object AddResolverPlugin extends AutoPlugin {
// | override def requires = sbt.plugins.JvmPlugin
// | override def trigger = allRequirements
// |
// | override lazy val projectSettings = Seq(resolvers += alternativeLocalResolver)
// | lazy val alternativeLocalResolver = Resolver.file("$altLocalRepoName", file("$altLocalRepoPath"))(Resolver.ivyStylePatterns)
// |}
// |""".stripMargin)
// }
// }
lazy val docProjects: ScopeFilter = ScopeFilter(
inAnyProject -- inProjects(sbtRoot, sbtProj, scriptedSbtProj, scriptedPluginProj),
inConfigurations(Compile)
)
lazy val safeUnitTests = taskKey[Unit]("Known working tests (for both 2.10 and 2.11)")
lazy val safeProjects: ScopeFilter = ScopeFilter(
inProjects(mainSettingsProj, mainProj, actionsProj, runProj, stdTaskProj),
inAnyProject -- inProjects(sbtRoot, sbtProj),
inConfigurations(Test)
)
lazy val otherUnitTests = taskKey[Unit]("Unit test other projects")
lazy val otherProjects: ScopeFilter = ScopeFilter(
inProjects(
testingProj,
testAgentProj,
taskProj,
scriptedSbtProj,
scriptedPluginProj,
commandProj,
mainSettingsProj,
mainProj,
sbtProj
),
inConfigurations(Test)
@ -652,6 +758,29 @@ def customCommands: Seq[Setting[_]] = Seq(
"reload" ::
state
},
commands += Command.command("publishLocalAllModule") { state =>
val extracted = Project.extract(state)
import extracted._
val sv = get(scalaVersion)
val projs = structure.allProjectRefs
val ioOpt = projs find { case ProjectRef(_, id) => id == "ioRoot"; case _ => false }
val utilOpt = projs find { case ProjectRef(_, id) => id == "utilRoot"; case _ => false }
val lmOpt = projs find { case ProjectRef(_, id) => id == "lmRoot"; case _ => false }
val zincOpt = projs find { case ProjectRef(_, id) => id == "zincRoot"; case _ => false }
(ioOpt map { case ProjectRef(build, _) => "{" + build.toString + "}/publishLocal" }).toList :::
(utilOpt map { case ProjectRef(build, _) => "{" + build.toString + "}/publishLocal" }).toList :::
(lmOpt map { case ProjectRef(build, _) => "{" + build.toString + "}/publishLocal" }).toList :::
(zincOpt map { case ProjectRef(build, _) =>
val zincSv = get(scalaVersion in ProjectRef(build, "zinc"))
val csv = get(crossScalaVersions in ProjectRef(build, "compilerBridge")).toList
(csv flatMap { bridgeSv =>
s"++$bridgeSv" :: ("{" + build.toString + "}compilerBridge/publishLocal") :: Nil
}) :::
List(s"++$zincSv", "{" + build.toString + "}/publishLocal")
}).getOrElse(Nil) :::
List(s"++$sv", "publishLocal") :::
state
},
/** There are several complications with sbt's build.
* First is the fact that interface project is a Java-only project
* that uses source generator from datatype subproject in Scala 2.10.6.
@ -681,3 +810,12 @@ def customCommands: Seq[Setting[_]] = Seq(
state
}
)
inThisBuild(Seq(
whitesourceProduct := "Lightbend Reactive Platform",
whitesourceAggregateProjectName := "sbt-master",
whitesourceAggregateProjectToken := "e7a1e55518c0489a98e9c7430c8b2ccd53d9f97c12ed46148b592ebe4c8bf128",
whitesourceIgnoredScopes ++= Seq("plugin", "scalafmt", "sxr"),
whitesourceFailOnError := sys.env.contains("WHITESOURCE_PASSWORD"), // fail if pwd is present
whitesourceForceCheckAllDependencies := true,
))

View File

@ -29,13 +29,14 @@ object ContextUtil {
* Given `myImplicitConversion(someValue).extensionMethod`, where `extensionMethod` is a macro that uses this
* method, the result of this method is `f(<Tree of someValue>)`.
*/
def selectMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
f: (c.Expr[Any], c.Position) => c.Expr[T]): c.Expr[T] = {
def selectMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(f: (c.Expr[Any], c.Position) => c.Expr[T]): c.Expr[T] = {
import c.universe._
c.macroApplication match {
case s @ Select(Apply(_, t :: Nil), tp) => f(c.Expr[Any](t), s.pos)
case a @ Apply(_, t :: Nil) => f(c.Expr[Any](t), a.pos)
case x => unexpectedTree(x)
case s @ Select(Apply(_, t :: Nil), _) => f(c.Expr[Any](t), s.pos)
case a @ Apply(_, t :: Nil) => f(c.Expr[Any](t), a.pos)
case x => unexpectedTree(x)
}
}
@ -211,12 +212,14 @@ final class ContextUtil[C <: blackbox.Context](val ctx: C) {
def changeOwner(tree: Tree, prev: Symbol, next: Symbol): Unit =
new ChangeOwnerAndModuleClassTraverser(
prev.asInstanceOf[global.Symbol],
next.asInstanceOf[global.Symbol]).traverse(tree.asInstanceOf[global.Tree])
next.asInstanceOf[global.Symbol]
).traverse(tree.asInstanceOf[global.Tree])
// Workaround copied from scala/async:can be removed once https://github.com/scala/scala/pull/3179 is merged.
private[this] class ChangeOwnerAndModuleClassTraverser(oldowner: global.Symbol,
newowner: global.Symbol)
extends global.ChangeOwnerTraverser(oldowner, newowner) {
private[this] class ChangeOwnerAndModuleClassTraverser(
oldowner: global.Symbol,
newowner: global.Symbol
) extends global.ChangeOwnerTraverser(oldowner, newowner) {
override def traverse(tree: global.Tree): Unit = {
tree match {
case _: global.DefTree => change(tree.symbol.moduleClass)
@ -248,7 +251,8 @@ final class ContextUtil[C <: blackbox.Context](val ctx: C) {
* the type constructor `[x] List[x]`.
*/
def extractTC(tcp: AnyRef with Singleton, name: String)(
implicit it: ctx.TypeTag[tcp.type]): ctx.Type = {
implicit it: ctx.TypeTag[tcp.type]
): ctx.Type = {
val itTpe = it.tpe.asInstanceOf[global.Type]
val m = itTpe.nonPrivateMember(global.newTypeName(name))
val tc = itTpe.memberInfo(m).asInstanceOf[ctx.universe.Type]
@ -262,8 +266,10 @@ final class ContextUtil[C <: blackbox.Context](val ctx: C) {
* Typically, `f` is a `Select` or `Ident`.
* The wrapper is replaced with the result of `subWrapper(<Type of T>, <Tree of v>, <wrapper Tree>)`
*/
def transformWrappers(t: Tree,
subWrapper: (String, Type, Tree, Tree) => Converted[ctx.type]): Tree = {
def transformWrappers(
t: Tree,
subWrapper: (String, Type, Tree, Tree) => Converted[ctx.type]
): Tree = {
// the main tree transformer that replaces calls to InputWrapper.wrap(x) with
// plain Idents that reference the actual input value
object appTransformer extends Transformer {

View File

@ -26,9 +26,10 @@ sealed trait Converted[C <: blackbox.Context with Singleton] {
}
object Converted {
def NotApplicable[C <: blackbox.Context with Singleton] = new NotApplicable[C]
final case class Failure[C <: blackbox.Context with Singleton](position: C#Position,
message: String)
extends Converted[C] {
final case class Failure[C <: blackbox.Context with Singleton](
position: C#Position,
message: String
) extends Converted[C] {
def isSuccess = false
def transform(f: C#Tree => C#Tree): Converted[C] = new Failure(position, message)
}
@ -36,9 +37,10 @@ object Converted {
def isSuccess = false
def transform(f: C#Tree => C#Tree): Converted[C] = this
}
final case class Success[C <: blackbox.Context with Singleton](tree: C#Tree,
finalTransform: C#Tree => C#Tree)
extends Converted[C] {
final case class Success[C <: blackbox.Context with Singleton](
tree: C#Tree,
finalTransform: C#Tree => C#Tree
) extends Converted[C] {
def isSuccess = true
def transform(f: C#Tree => C#Tree): Converted[C] = Success(f(tree), finalTransform)
}

View File

@ -41,9 +41,11 @@ object Instance {
final val MapName = "map"
final val InstanceTCName = "M"
final class Input[U <: Universe with Singleton](val tpe: U#Type,
val expr: U#Tree,
val local: U#ValDef)
final class Input[U <: Universe with Singleton](
val tpe: U#Type,
val expr: U#Tree,
val local: U#ValDef
)
trait Transform[C <: blackbox.Context with Singleton, N[_]] {
def apply(in: C#Tree): C#Tree
}

View File

@ -13,8 +13,9 @@ import macros._
/** A `TupleBuilder` that uses a KList as the tuple representation.*/
object KListBuilder extends TupleBuilder {
def make(c: blackbox.Context)(mt: c.Type,
inputs: Inputs[c.universe.type]): BuilderResult[c.type] =
def make(
c: blackbox.Context
)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] =
new BuilderResult[c.type] {
val ctx: c.type = c
val util = ContextUtil[c.type](c)
@ -47,15 +48,20 @@ object KListBuilder extends TupleBuilder {
case Nil => revBindings.reverse
}
private[this] def makeKList(revInputs: Inputs[c.universe.type],
klist: Tree,
klistType: Type): Tree =
private[this] def makeKList(
revInputs: Inputs[c.universe.type],
klist: Tree,
klistType: Type
): Tree =
revInputs match {
case in :: tail =>
val next = ApplyTree(
TypeApply(Ident(kcons),
TypeTree(in.tpe) :: TypeTree(klistType) :: TypeTree(mTC) :: Nil),
in.expr :: klist :: Nil)
TypeApply(
Ident(kcons),
TypeTree(in.tpe) :: TypeTree(klistType) :: TypeTree(mTC) :: Nil
),
in.expr :: klist :: Nil
)
makeKList(tail, next, appliedType(kconsTC, in.tpe :: klistType :: mTC :: Nil))
case Nil => klist
}

View File

@ -16,8 +16,9 @@ import macros._
* and `KList` for larger numbers of inputs. This builder cannot handle fewer than 2 inputs.
*/
object MixedBuilder extends TupleBuilder {
def make(c: blackbox.Context)(mt: c.Type,
inputs: Inputs[c.universe.type]): BuilderResult[c.type] = {
def make(
c: blackbox.Context
)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] = {
val delegate = if (inputs.size > TupleNBuilder.MaxInputs) KListBuilder else TupleNBuilder
delegate.make(c)(mt, inputs)
}

View File

@ -35,8 +35,9 @@ trait TupleBuilder {
type Inputs[U <: Universe with Singleton] = List[Instance.Input[U]]
/** Constructs a one-time use Builder for Context `c` and type constructor `tcType`. */
def make(c: blackbox.Context)(tcType: c.Type,
inputs: Inputs[c.universe.type]): BuilderResult[c.type]
def make(
c: blackbox.Context
)(tcType: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type]
}
trait BuilderResult[C <: blackbox.Context with Singleton] {

View File

@ -22,8 +22,9 @@ object TupleNBuilder extends TupleBuilder {
final val MaxInputs = 11
final val TupleMethodName = "tuple"
def make(c: blackbox.Context)(mt: c.Type,
inputs: Inputs[c.universe.type]): BuilderResult[c.type] =
def make(
c: blackbox.Context
)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] =
new BuilderResult[c.type] {
val util = ContextUtil[c.type](c)
import c.universe._
@ -34,8 +35,9 @@ object TupleNBuilder extends TupleBuilder {
val ctx: c.type = c
val representationC: PolyType = {
val tcVariable: Symbol = newTCVariable(util.initialOwner)
val tupleTypeArgs = inputs.map(in =>
internal.typeRef(NoPrefix, tcVariable, in.tpe :: Nil).asInstanceOf[global.Type])
val tupleTypeArgs = inputs.map(
in => internal.typeRef(NoPrefix, tcVariable, in.tpe :: Nil).asInstanceOf[global.Type]
)
val tuple = global.definitions.tupleType(tupleTypeArgs)
internal.polyType(tcVariable :: Nil, tuple.asInstanceOf[Type])
}
@ -47,10 +49,12 @@ object TupleNBuilder extends TupleBuilder {
}
def extract(param: ValDef): List[ValDef] = bindTuple(param, Nil, inputs.map(_.local), 1)
def bindTuple(param: ValDef,
revBindings: List[ValDef],
params: List[ValDef],
i: Int): List[ValDef] =
def bindTuple(
param: ValDef,
revBindings: List[ValDef],
params: List[ValDef],
i: Int
): List[ValDef] =
params match {
case (x @ ValDef(mods, name, tpt, _)) :: xs =>
val rhs = select(Ident(param.name), "_" + i.toString)

View File

@ -17,7 +17,9 @@ import Types._
*/
trait AList[K[L[x]]] {
def transform[M[_], N[_]](value: K[M], f: M ~> N): K[N]
def traverse[M[_], N[_], P[_]](value: K[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[K[P]]
def traverse[M[_], N[_], P[_]](value: K[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[K[P]]
def foldr[M[_], A](value: K[M], f: (M[_], A) => A, init: A): A
def toList[M[_]](value: K[M]): List[M[_]] = foldr[M, List[M[_]]](value, _ :: _, Nil)
@ -33,8 +35,11 @@ object AList {
val empty: Empty = new Empty {
def transform[M[_], N[_]](in: Unit, f: M ~> N) = ()
def foldr[M[_], T](in: Unit, f: (M[_], T) => T, init: T) = init
override def apply[M[_], C](in: Unit, f: Unit => C)(implicit app: Applicative[M]): M[C] = app.pure(f(()))
def traverse[M[_], N[_], P[_]](in: Unit, f: M ~> (N P)#l)(implicit np: Applicative[N]): N[Unit] = np.pure(())
override def apply[M[_], C](in: Unit, f: Unit => C)(implicit app: Applicative[M]): M[C] =
app.pure(f(()))
def traverse[M[_], N[_], P[_]](in: Unit, f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[Unit] = np.pure(())
}
type SeqList[T] = AList[λ[L[x] => List[L[T]]]]
@ -42,9 +47,12 @@ object AList {
/** AList for a homogeneous sequence. */
def seq[T]: SeqList[T] = new SeqList[T] {
def transform[M[_], N[_]](s: List[M[T]], f: M ~> N) = s.map(f.fn[T])
def foldr[M[_], A](s: List[M[T]], f: (M[_], A) => A, init: A): A = (init /: s.reverse)((t, m) => f(m, t))
def foldr[M[_], A](s: List[M[T]], f: (M[_], A) => A, init: A): A =
(init /: s.reverse)((t, m) => f(m, t))
override def apply[M[_], C](s: List[M[T]], f: List[T] => C)(implicit ap: Applicative[M]): M[C] = {
override def apply[M[_], C](s: List[M[T]], f: List[T] => C)(
implicit ap: Applicative[M]
): M[C] = {
def loop[V](in: List[M[T]], g: List[T] => V): M[V] =
in match {
case Nil => ap.pure(g(Nil))
@ -55,15 +63,20 @@ object AList {
loop(s, f)
}
def traverse[M[_], N[_], P[_]](s: List[M[T]], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[List[P[T]]] = ???
def traverse[M[_], N[_], P[_]](s: List[M[T]], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[List[P[T]]] = ???
}
/** AList for the arbitrary arity data structure KList. */
def klist[KL[M[_]] <: KList.Aux[M, KL]]: AList[KL] = new AList[KL] {
def transform[M[_], N[_]](k: KL[M], f: M ~> N) = k.transform(f)
def foldr[M[_], T](k: KL[M], f: (M[_], T) => T, init: T): T = k.foldr(f, init)
override def apply[M[_], C](k: KL[M], f: KL[Id] => C)(implicit app: Applicative[M]): M[C] = k.apply(f)(app)
def traverse[M[_], N[_], P[_]](k: KL[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[KL[P]] = k.traverse[N, P](f)(np)
override def apply[M[_], C](k: KL[M], f: KL[Id] => C)(implicit app: Applicative[M]): M[C] =
k.apply(f)(app)
def traverse[M[_], N[_], P[_]](k: KL[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[KL[P]] = k.traverse[N, P](f)(np)
override def toList[M[_]](k: KL[M]) = k.toList
}
@ -73,7 +86,9 @@ object AList {
def single[A]: Single[A] = new Single[A] {
def transform[M[_], N[_]](a: M[A], f: M ~> N) = f(a)
def foldr[M[_], T](a: M[A], f: (M[_], T) => T, init: T): T = f(a, init)
def traverse[M[_], N[_], P[_]](a: M[A], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[P[A]] = f(a)
def traverse[M[_], N[_], P[_]](a: M[A], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[P[A]] = f(a)
}
type ASplit[K[L[x]], B[x]] = AList[λ[L[x] => K[(L B)#l]]]
@ -85,7 +100,9 @@ object AList {
def transform[M[_], N[_]](value: Split[M], f: M ~> N): Split[N] =
base.transform[(M B)#l, (N B)#l](value, nestCon[M, N, B](f))
def traverse[M[_], N[_], P[_]](value: Split[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[Split[P]] = {
def traverse[M[_], N[_], P[_]](value: Split[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[Split[P]] = {
val g = nestCon[M, (N P)#l, B](f)
base.traverse[(M B)#l, N, (P B)#l](value, g)(np)
}
@ -101,7 +118,9 @@ object AList {
type T2[M[_]] = (M[A], M[B])
def transform[M[_], N[_]](t: T2[M], f: M ~> N): T2[N] = (f(t._1), f(t._2))
def foldr[M[_], T](t: T2[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, init))
def traverse[M[_], N[_], P[_]](t: T2[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T2[P]] = {
def traverse[M[_], N[_], P[_]](t: T2[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T2[P]] = {
val g = (Tuple2.apply[P[A], P[B]] _).curried
np.apply(np.map(g, f(t._1)), f(t._2))
}
@ -113,7 +132,9 @@ object AList {
type T3[M[_]] = (M[A], M[B], M[C])
def transform[M[_], N[_]](t: T3[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3))
def foldr[M[_], T](t: T3[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, init)))
def traverse[M[_], N[_], P[_]](t: T3[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T3[P]] = {
def traverse[M[_], N[_], P[_]](t: T3[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T3[P]] = {
val g = (Tuple3.apply[P[A], P[B], P[C]] _).curried
np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3))
}
@ -124,8 +145,11 @@ object AList {
def tuple4[A, B, C, D]: T4List[A, B, C, D] = new T4List[A, B, C, D] {
type T4[M[_]] = (M[A], M[B], M[C], M[D])
def transform[M[_], N[_]](t: T4[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4))
def foldr[M[_], T](t: T4[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, init))))
def traverse[M[_], N[_], P[_]](t: T4[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T4[P]] = {
def foldr[M[_], T](t: T4[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, init))))
def traverse[M[_], N[_], P[_]](t: T4[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T4[P]] = {
val g = (Tuple4.apply[P[A], P[B], P[C], P[D]] _).curried
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4))
}
@ -136,8 +160,11 @@ object AList {
def tuple5[A, B, C, D, E]: T5List[A, B, C, D, E] = new T5List[A, B, C, D, E] {
type T5[M[_]] = (M[A], M[B], M[C], M[D], M[E])
def transform[M[_], N[_]](t: T5[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5))
def foldr[M[_], T](t: T5[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, init)))))
def traverse[M[_], N[_], P[_]](t: T5[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T5[P]] = {
def foldr[M[_], T](t: T5[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, init)))))
def traverse[M[_], N[_], P[_]](t: T5[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T5[P]] = {
val g = (Tuple5.apply[P[A], P[B], P[C], P[D], P[E]] _).curried
np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5))
}
@ -147,71 +174,213 @@ object AList {
type T6List[A, B, C, D, E, F] = AList[T6K[A, B, C, D, E, F]#l]
def tuple6[A, B, C, D, E, F]: T6List[A, B, C, D, E, F] = new T6List[A, B, C, D, E, F] {
type T6[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F])
def transform[M[_], N[_]](t: T6[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6))
def foldr[M[_], T](t: T6[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, init))))))
def traverse[M[_], N[_], P[_]](t: T6[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T6[P]] = {
def transform[M[_], N[_]](t: T6[M], f: M ~> N) =
(f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6))
def foldr[M[_], T](t: T6[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, init))))))
def traverse[M[_], N[_], P[_]](t: T6[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T6[P]] = {
val g = (Tuple6.apply[P[A], P[B], P[C], P[D], P[E], P[F]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6))
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
)
}
}
sealed trait T7K[A, B, C, D, E, F, G] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G]) }
sealed trait T7K[A, B, C, D, E, F, G] {
type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G])
}
type T7List[A, B, C, D, E, F, G] = AList[T7K[A, B, C, D, E, F, G]#l]
def tuple7[A, B, C, D, E, F, G]: T7List[A, B, C, D, E, F, G] = new T7List[A, B, C, D, E, F, G] {
type T7[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G])
def transform[M[_], N[_]](t: T7[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7))
def foldr[M[_], T](t: T7[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, init)))))))
def traverse[M[_], N[_], P[_]](t: T7[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T7[P]] = {
def transform[M[_], N[_]](t: T7[M], f: M ~> N) =
(f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7))
def foldr[M[_], T](t: T7[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, init)))))))
def traverse[M[_], N[_], P[_]](t: T7[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T7[P]] = {
val g = (Tuple7.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7))
np.apply(
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
),
f(t._7)
)
}
}
sealed trait T8K[A, B, C, D, E, F, G, H] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H]) }
sealed trait T8K[A, B, C, D, E, F, G, H] {
type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H])
}
type T8List[A, B, C, D, E, F, G, H] = AList[T8K[A, B, C, D, E, F, G, H]#l]
def tuple8[A, B, C, D, E, F, G, H]: T8List[A, B, C, D, E, F, G, H] = new T8List[A, B, C, D, E, F, G, H] {
type T8[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H])
def transform[M[_], N[_]](t: T8[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8))
def foldr[M[_], T](t: T8[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, init))))))))
def traverse[M[_], N[_], P[_]](t: T8[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T8[P]] = {
val g = (Tuple8.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8))
def tuple8[A, B, C, D, E, F, G, H]: T8List[A, B, C, D, E, F, G, H] =
new T8List[A, B, C, D, E, F, G, H] {
type T8[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H])
def transform[M[_], N[_]](t: T8[M], f: M ~> N) =
(f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8))
def foldr[M[_], T](t: T8[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, init))))))))
def traverse[M[_], N[_], P[_]](t: T8[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T8[P]] = {
val g = (Tuple8.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H]] _).curried
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
),
f(t._7)
),
f(t._8)
)
}
}
}
sealed trait T9K[A, B, C, D, E, F, G, H, I] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I]) }
sealed trait T9K[A, B, C, D, E, F, G, H, I] {
type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I])
}
type T9List[A, B, C, D, E, F, G, H, I] = AList[T9K[A, B, C, D, E, F, G, H, I]#l]
def tuple9[A, B, C, D, E, F, G, H, I]: T9List[A, B, C, D, E, F, G, H, I] = new T9List[A, B, C, D, E, F, G, H, I] {
type T9[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I])
def transform[M[_], N[_]](t: T9[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9))
def foldr[M[_], T](t: T9[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, init)))))))))
def traverse[M[_], N[_], P[_]](t: T9[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T9[P]] = {
val g = (Tuple9.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9))
def tuple9[A, B, C, D, E, F, G, H, I]: T9List[A, B, C, D, E, F, G, H, I] =
new T9List[A, B, C, D, E, F, G, H, I] {
type T9[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I])
def transform[M[_], N[_]](t: T9[M], f: M ~> N) =
(f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9))
def foldr[M[_], T](t: T9[M], f: (M[_], T) => T, init: T): T =
f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, init)))))))))
def traverse[M[_], N[_], P[_]](t: T9[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T9[P]] = {
val g = (Tuple9.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I]] _).curried
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
),
f(t._7)
),
f(t._8)
),
f(t._9)
)
}
}
}
sealed trait T10K[A, B, C, D, E, F, G, H, I, J] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J]) }
sealed trait T10K[A, B, C, D, E, F, G, H, I, J] {
type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J])
}
type T10List[A, B, C, D, E, F, G, H, I, J] = AList[T10K[A, B, C, D, E, F, G, H, I, J]#l]
def tuple10[A, B, C, D, E, F, G, H, I, J]: T10List[A, B, C, D, E, F, G, H, I, J] = new T10List[A, B, C, D, E, F, G, H, I, J] {
type T10[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J])
def transform[M[_], N[_]](t: T10[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9), f(t._10))
def foldr[M[_], T](t: T10[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, init))))))))))
def traverse[M[_], N[_], P[_]](t: T10[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T10[P]] = {
val g = (Tuple10.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9)), f(t._10))
def tuple10[A, B, C, D, E, F, G, H, I, J]: T10List[A, B, C, D, E, F, G, H, I, J] =
new T10List[A, B, C, D, E, F, G, H, I, J] {
type T10[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J])
def transform[M[_], N[_]](t: T10[M], f: M ~> N) =
(f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9), f(t._10))
def foldr[M[_], T](t: T10[M], f: (M[_], T) => T, init: T): T =
f(
t._1,
f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, init)))))))))
)
def traverse[M[_], N[_], P[_]](t: T10[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T10[P]] = {
val g =
(Tuple10.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J]] _).curried
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
),
f(t._7)
),
f(t._8)
),
f(t._9)
),
f(t._10)
)
}
}
}
sealed trait T11K[A, B, C, D, E, F, G, H, I, J, K] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J], L[K]) }
type T11List[A, B, C, D, E, F, G, H, I, J, K] = AList[T11K[A, B, C, D, E, F, G, H, I, J, K]#l]
def tuple11[A, B, C, D, E, F, G, H, I, J, K]: T11List[A, B, C, D, E, F, G, H, I, J, K] = new T11List[A, B, C, D, E, F, G, H, I, J, K] {
type T11[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J], M[K])
def transform[M[_], N[_]](t: T11[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9), f(t._10), f(t._11))
def foldr[M[_], T](t: T11[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, f(t._11, init)))))))))))
def traverse[M[_], N[_], P[_]](t: T11[M], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[T11[P]] = {
val g = (Tuple11.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J], P[K]] _).curried
np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9)), f(t._10)), f(t._11))
}
sealed trait T11K[A, B, C, D, E, F, G, H, I, J, K] {
type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J], L[K])
}
type T11List[A, B, C, D, E, F, G, H, I, J, K] = AList[T11K[A, B, C, D, E, F, G, H, I, J, K]#l]
def tuple11[A, B, C, D, E, F, G, H, I, J, K]: T11List[A, B, C, D, E, F, G, H, I, J, K] =
new T11List[A, B, C, D, E, F, G, H, I, J, K] {
type T11[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J], M[K])
def transform[M[_], N[_]](t: T11[M], f: M ~> N) =
(
f(t._1),
f(t._2),
f(t._3),
f(t._4),
f(t._5),
f(t._6),
f(t._7),
f(t._8),
f(t._9),
f(t._10),
f(t._11)
)
def foldr[M[_], T](t: T11[M], f: (M[_], T) => T, init: T): T =
f(
t._1,
f(
t._2,
f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, f(t._11, init)))))))))
)
)
def traverse[M[_], N[_], P[_]](t: T11[M], f: M ~> (N P)#l)(
implicit np: Applicative[N]
): N[T11[P]] = {
val g = (Tuple11
.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J], P[K]] _).curried
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(
np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)),
f(t._5)
),
f(t._6)
),
f(t._7)
),
f(t._8)
),
f(t._9)
),
f(t._10)
),
f(t._11)
)
}
}
}

View File

@ -31,7 +31,8 @@ sealed trait AttributeKey[T] {
def description: Option[String]
/**
* In environments that support delegation, looking up this key when it has no associated value will delegate to the values associated with these keys.
* In environments that support delegation, looking up this key when it has no associated value
* will delegate to the values associated with these keys.
* The delegation proceeds in order the keys are returned here.
*/
def extend: Seq[AttributeKey[_]]
@ -70,20 +71,26 @@ object AttributeKey {
def apply[T: Manifest: OptJsonWriter](name: String, description: String): AttributeKey[T] =
apply(name, description, Nil)
def apply[T: Manifest: OptJsonWriter](name: String,
description: String,
rank: Int): AttributeKey[T] =
def apply[T: Manifest: OptJsonWriter](
name: String,
description: String,
rank: Int
): AttributeKey[T] =
apply(name, description, Nil, rank)
def apply[T: Manifest: OptJsonWriter](name: String,
description: String,
extend: Seq[AttributeKey[_]]): AttributeKey[T] =
def apply[T: Manifest: OptJsonWriter](
name: String,
description: String,
extend: Seq[AttributeKey[_]]
): AttributeKey[T] =
apply(name, description, extend, Int.MaxValue)
def apply[T: Manifest: OptJsonWriter](name: String,
description: String,
extend: Seq[AttributeKey[_]],
rank: Int): AttributeKey[T] =
def apply[T: Manifest: OptJsonWriter](
name: String,
description: String,
extend: Seq[AttributeKey[_]],
rank: Int
): AttributeKey[T] =
make(name, Some(description), extend, rank)
private[sbt] def copyWithRank[T](a: AttributeKey[T], rank: Int): AttributeKey[T] =

View File

@ -7,6 +7,8 @@
package sbt.internal.util
import scala.collection.JavaConverters._
/** A mutable set interface that uses object identity to test for set membership.*/
trait IDSet[T] {
def apply(t: T): Boolean
@ -41,7 +43,7 @@ object IDSet {
def +=(t: T) = { backing.put(t, Dummy); () }
def ++=(t: Iterable[T]) = t foreach +=
def -=(t: T) = if (backing.remove(t) eq null) false else true
def all = collection.JavaConverters.collectionAsScalaIterable(backing.keySet)
def all = backing.keySet.asScala
def toList = all.toList
def isEmpty = backing.isEmpty

View File

@ -170,8 +170,10 @@ abstract class EvaluateSettings[Scope] {
}
protected final def setValue(v: T): Unit = {
assert(state != Evaluated,
"Already evaluated (trying to set value to " + v + "): " + toString)
assert(
state != Evaluated,
"Already evaluated (trying to set value to " + v + "): " + toString
)
if (v == null) sys.error("Setting value cannot be null: " + keyString)
value = v
state = Evaluated

View File

@ -10,7 +10,7 @@ package sbt.internal.util
import Types._
import Classes.Applicative
/** Heterogeneous list with each element having type M[T] for some type T.*/
/** A higher-kinded heterogeneous list of elements that share the same type constructor `M[_]`. */
sealed trait KList[+M[_]] {
type Transform[N[_]] <: KList[N]
@ -18,7 +18,7 @@ sealed trait KList[+M[_]] {
def transform[N[_]](f: M ~> N): Transform[N]
/** Folds this list using a function that operates on the homogeneous type of the elements of this list. */
def foldr[B](f: (M[_], B) => B, init: B): B = init // had trouble defining it in KNil
def foldr[B](f: (M[_], B) => B, init: B): B
/** Applies `f` to the elements of this list in the applicative functor defined by `ap`. */
def apply[N[x] >: M[x], Z](f: Transform[Id] => Z)(implicit ap: Applicative[N]): N[Z]
@ -54,13 +54,14 @@ final case class KCons[H, +T <: KList[M], +M[_]](head: M[H], tail: T) extends KL
override def foldr[B](f: (M[_], B) => B, init: B): B = f(head, tail.foldr(f, init))
}
sealed abstract class KNil extends KList[Nothing] {
sealed abstract class KNil extends KList[NothingK] {
final type Transform[N[_]] = KNil
final def transform[N[_]](f: Nothing ~> N): Transform[N] = KNil
final def transform[N[_]](f: NothingK ~> N): Transform[N] = KNil
final def foldr[B](f: (NothingK[_], B) => B, init: B): B = init
final def toList = Nil
final def apply[N[x], Z](f: KNil => Z)(implicit ap: Applicative[N]): N[Z] = ap.pure(f(KNil))
final def traverse[N[_], P[_]](f: Nothing ~> (N P)#l)(implicit np: Applicative[N]): N[KNil] =
final def traverse[N[_], P[_]](f: NothingK ~> (N P)#l)(implicit np: Applicative[N]): N[KNil] =
np.pure(KNil)
}

View File

@ -35,10 +35,10 @@ private final class Settings0[Scope](
data.flatMap { case (scope, map) => map.keys.map(k => f(scope, k)) }.toSeq
def get[T](scope: Scope, key: AttributeKey[T]): Option[T] =
delegates(scope).toStream.flatMap(sc => getDirect(sc, key)).headOption
delegates(scope).flatMap(sc => getDirect(sc, key)).headOption
def definingScope(scope: Scope, key: AttributeKey[_]): Option[Scope] =
delegates(scope).toStream.find(sc => getDirect(sc, key).isDefined)
delegates(scope).find(sc => getDirect(sc, key).isDefined)
def getDirect[T](scope: Scope, key: AttributeKey[T]): Option[T] =
(data get scope).flatMap(_ get key)
@ -357,7 +357,8 @@ trait Init[Scope] {
keys.map(u => showUndefined(u, validKeys, delegates)).mkString("\n\n ", "\n\n ", "")
new Uninitialized(
keys,
prefix + suffix + " to undefined setting" + suffix + ": " + keysString + "\n ")
prefix + suffix + " to undefined setting" + suffix + ": " + keysString + "\n "
)
}
final class Compiled[T](
@ -374,8 +375,9 @@ trait Init[Scope] {
val locals = compiled flatMap {
case (key, comp) => if (key.key.isLocal) Seq[Compiled[_]](comp) else Nil
}
val ordered = Dag.topologicalSort(locals)(_.dependencies.flatMap(dep =>
if (dep.key.isLocal) Seq[Compiled[_]](compiled(dep)) else Nil))
val ordered = Dag.topologicalSort(locals)(
_.dependencies.flatMap(dep => if (dep.key.isLocal) Seq[Compiled[_]](compiled(dep)) else Nil)
)
def flatten(
cmap: Map[ScopedKey[_], Flattened],
key: ScopedKey[_],
@ -383,7 +385,8 @@ trait Init[Scope] {
): Flattened =
new Flattened(
key,
deps.flatMap(dep => if (dep.key.isLocal) cmap(dep).dependencies else dep :: Nil))
deps.flatMap(dep => if (dep.key.isLocal) cmap(dep).dependencies else dep :: Nil)
)
val empty = Map.empty[ScopedKey[_], Flattened]
@ -415,7 +418,8 @@ trait Init[Scope] {
* Intersects two scopes, returning the more specific one if they intersect, or None otherwise.
*/
private[sbt] def intersect(s1: Scope, s2: Scope)(
implicit delegates: Scope => Seq[Scope]): Option[Scope] =
implicit delegates: Scope => Seq[Scope]
): Option[Scope] =
if (delegates(s1).contains(s2)) Some(s1) // s1 is more specific
else if (delegates(s2).contains(s1)) Some(s2) // s2 is more specific
else None

View File

@ -65,7 +65,7 @@ object Signals {
}
// Must only be referenced using a
// try { } catch { case e: LinkageError => ... }
// try { } catch { case _: LinkageError => ... }
// block to
private final class Signals0 {
def supported(signal: String): Boolean = {

View File

@ -9,6 +9,7 @@ package sbt.internal.util
trait TypeFunctions {
type Id[X] = X
type NothingK[X] = Nothing
sealed trait Const[A] { type Apply[B] = A }
sealed trait ConstK[A] { type l[L[x]] = A }
sealed trait Compose[A[_], B[_]] { type Apply[T] = A[B[T]] }

View File

@ -7,8 +7,7 @@
package sbt.internal.util
import org.scalacheck._
import Prop._
import org.scalacheck._, Prop._
object SettingsTest extends Properties("settings") {
val settingsExample: SettingsExample = SettingsExample()
@ -160,7 +159,7 @@ object SettingsTest extends Properties("settings") {
final def checkCircularReferences(intermediate: Int): Prop = {
val ccr = new CCR(intermediate)
try { evaluate(setting(chk, ccr.top) :: Nil); false } catch {
case e: java.lang.Exception => true
case _: java.lang.Exception => true
}
}
@ -197,18 +196,18 @@ object SettingsTest extends Properties("settings") {
def evaluate(settings: Seq[Setting[_]]): Settings[Scope] =
try { make(settings)(delegates, scopeLocal, showFullKey) } catch {
case e: Throwable => e.printStackTrace; throw e
case e: Throwable => e.printStackTrace(); throw e
}
}
// This setup is a workaround for module synchronization issues
final class CCR(intermediate: Int) {
import SettingsTest.settingsExample._
lazy val top = iterate(value(intermediate), intermediate)
def iterate(init: Initialize[Int], i: Int): Initialize[Int] =
lazy val top = iterate(value(intermediate))
def iterate(init: Initialize[Int]): Initialize[Int] =
bind(init) { t =>
if (t <= 0)
top
else
iterate(value(t - 1), t - 1)
iterate(value(t - 1))
}
}

View File

@ -49,8 +49,9 @@ abstract class JLine extends LineReader {
private[this] def readLineDirect(prompt: String, mask: Option[Char]): Option[String] =
if (handleCONT)
Signals.withHandler(() => resume(), signal = Signals.CONT)(() =>
readLineDirectRaw(prompt, mask))
Signals.withHandler(() => resume(), signal = Signals.CONT)(
() => readLineDirectRaw(prompt, mask)
)
else
readLineDirectRaw(prompt, mask)
@ -132,7 +133,7 @@ private[sbt] object JLine {
def createReader(): ConsoleReader = createReader(None, JLine.makeInputStream(true))
def createReader(historyPath: Option[File], in: InputStream): ConsoleReader =
usingTerminal { t =>
usingTerminal { _ =>
val cr = new ConsoleReader(in, System.out)
cr.setExpandEvents(false) // https://issues.scala-lang.org/browse/SI-7650
cr.setBellEnabled(false)

View File

@ -10,7 +10,7 @@ package complete
import java.lang.Character.{ toLowerCase => lower }
/** @author Paul Phillips*/
/** @author Paul Phillips */
object EditDistance {
/**
@ -24,7 +24,6 @@ object EditDistance {
insertCost: Int = 1,
deleteCost: Int = 1,
subCost: Int = 1,
transposeCost: Int = 1,
matchCost: Int = 0,
caseCost: Int = 1,
transpositions: Boolean = false

View File

@ -11,11 +11,7 @@ package complete
import History.number
import java.io.File
final class History private (
val lines: IndexedSeq[String],
val path: Option[File],
error: String => Unit
) {
final class History private (val lines: IndexedSeq[String], val path: Option[File]) {
private def reversed = lines.reverse
def all: Seq[String] = lines
@ -52,8 +48,8 @@ final class History private (
}
object History {
def apply(lines: Seq[String], path: Option[File], error: String => Unit): History =
new History(lines.toIndexedSeq, path, sys.error)
def apply(lines: Seq[String], path: Option[File]): History =
new History(lines.toIndexedSeq, path)
def number(s: String): Option[Int] =
try { Some(s.toInt) } catch { case _: NumberFormatException => None }

View File

@ -11,7 +11,7 @@ package complete
import jline.console.ConsoleReader
import jline.console.completer.{ Completer, CompletionHandler }
import scala.annotation.tailrec
import scala.collection.JavaConverters
import scala.collection.JavaConverters._
object JLineCompletion {
def installCustomCompletor(reader: ConsoleReader, parser: Parser[_]): Unit =
@ -91,7 +91,8 @@ object JLineCompletion {
def appendNonEmpty(set: Set[String], add: String) = if (add.trim.isEmpty) set else set + add
def customCompletor(
f: (String, Int) => (Seq[String], Seq[String])): (ConsoleReader, Int) => Boolean =
f: (String, Int) => (Seq[String], Seq[String])
): (ConsoleReader, Int) => Boolean =
(reader, level) => {
val success = complete(beforeCursor(reader), reader => f(reader, level), reader)
reader.flush()
@ -154,7 +155,7 @@ object JLineCompletion {
if (line.charAt(line.length - 1) != '\n')
reader.println()
}
reader.printColumns(JavaConverters.seqAsJavaList(columns.map(_.trim)))
reader.printColumns(columns.map(_.trim).asJava)
}
def hasNewline(s: String): Boolean = s.indexOf('\n') >= 0

View File

@ -209,7 +209,11 @@ object Parser extends ParserMain {
a.ifValid {
a.result match {
case Some(av) => success(f(av))
case None => new MapParser(a, f)
case None =>
a match {
case m: MapParser[_, A] => m.map(f)
case _ => new MapParser(a, f)
}
}
}
@ -275,8 +279,10 @@ object Parser extends ParserMain {
revAcc: List[T]
): Parser[Seq[T]] = {
assume(min >= 0, "Minimum must be greater than or equal to zero (was " + min + ")")
assume(max >= min,
"Minimum must be less than or equal to maximum (min: " + min + ", max: " + max + ")")
assume(
max >= min,
"Minimum must be less than or equal to maximum (min: " + min + ", max: " + max + ")"
)
def checkRepeated(invalidButOptional: => Parser[Seq[T]]): Parser[Seq[T]] =
repeated match {
@ -381,8 +387,8 @@ trait ParserMain {
}
/** Presents a Char range as a Parser. A single Char is parsed only if it is in the given range.*/
implicit def range(r: collection.immutable.NumericRange[Char]): Parser[Char] =
charClass(r contains _).examples(r.map(_.toString): _*)
implicit def range(r: collection.immutable.NumericRange[Char], label: String): Parser[Char] =
charClass(r contains _, label).examples(r.map(_.toString): _*)
/** Defines a Parser that parses a single character only if it is contained in `legal`.*/
def chars(legal: String): Parser[Char] = {
@ -394,7 +400,7 @@ trait ParserMain {
* Defines a Parser that parses a single character only if the predicate `f` returns true for that character.
* If this parser fails, `label` is used as the failure message.
*/
def charClass(f: Char => Boolean, label: String = "<unspecified>"): Parser[Char] =
def charClass(f: Char => Boolean, label: String): Parser[Char] =
new CharacterClass(f, label)
/** Presents a single Char `ch` as a Parser that only parses that exact character. */
@ -744,6 +750,7 @@ private final class MapParser[A, B](a: Parser[A], f: A => B) extends ValidParser
def completions(level: Int) = a.completions(level)
override def isTokenStart = a.isTokenStart
override def toString = "map(" + a + ")"
def map[C](g: B => C) = new MapParser[A, C](a, f.andThen(g))
}
private final class Filter[T](p: Parser[T], f: T => Boolean, seen: String, msg: String => String)
@ -836,10 +843,12 @@ private final class ParserWithExamples[T](
) extends ValidParser[T] {
def derive(c: Char) =
examples(delegate derive c,
exampleSource.withAddedPrefix(c.toString),
maxNumberOfExamples,
removeInvalidExamples)
examples(
delegate derive c,
exampleSource.withAddedPrefix(c.toString),
maxNumberOfExamples,
removeInvalidExamples
)
def result = delegate.result

View File

@ -12,15 +12,17 @@ import Parser._
import java.io.File
import java.net.URI
import java.lang.Character.{
getType,
MATH_SYMBOL,
OTHER_SYMBOL,
CURRENCY_SYMBOL,
DASH_PUNCTUATION,
OTHER_PUNCTUATION,
MATH_SYMBOL,
MODIFIER_SYMBOL,
CURRENCY_SYMBOL
OTHER_PUNCTUATION,
OTHER_SYMBOL,
getType
}
import scala.annotation.tailrec
/** Provides standard implementations of commonly useful [[Parser]]s. */
trait Parsers {
@ -42,7 +44,8 @@ trait Parsers {
/** Parses a single hexadecimal digit (0-9, a-f, A-F). */
lazy val HexDigit = charClass(c => HexDigitSet(c.toUpper), "hex digit") examples HexDigitSet.map(
_.toString)
_.toString
)
/** Parses a single letter, according to Char.isLetter, into a Char. */
lazy val Letter = charClass(_.isLetter, "letter")
@ -163,7 +166,7 @@ trait Parsers {
}, "non-double-quote-backslash character")
/** Matches a single character that is valid somewhere in a URI. */
lazy val URIChar = charClass(alphanum) | chars("_-!.~'()*,;:$&+=?/[]@%#")
lazy val URIChar = charClass(alphanum, "alphanum") | chars("_-!.~'()*,;:$&+=?/[]@%#")
/** Returns true if `c` is an ASCII letter or digit. */
def alphanum(c: Char) =
@ -313,6 +316,16 @@ object DefaultParsers extends Parsers with ParserMain {
apply(p)(s).resultEmpty.isValid
/** Returns `true` if `s` parses successfully according to [[ID]].*/
def validID(s: String): Boolean = matches(ID, s)
def validID(s: String): Boolean = {
// Handwritten version of `matches(ID, s)` because validID turned up in profiling.
def isIdChar(c: Char): Boolean = Character.isLetterOrDigit(c) || (c == '-') || (c == '_')
@tailrec def isRestIdChar(cur: Int, s: String, length: Int): Boolean =
if (cur < length)
isIdChar(s.charAt(cur)) && isRestIdChar(cur + 1, s, length)
else
true
!s.isEmpty && Character.isLetter(s.charAt(0)) && isRestIdChar(1, s, s.length)
}
}

View File

@ -0,0 +1,29 @@
/*
* sbt
* Copyright 2011 - 2017, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under BSD-3-Clause license (see LICENSE)
*/
package sbt.internal.util
package complete
import org.scalacheck._, Gen._, Prop._
object DefaultParsersSpec extends Properties("DefaultParsers") {
import DefaultParsers.{ ID, isIDChar, matches, validID }
property("∀ s ∈ String: validID(s) == matches(ID, s)") = forAll(
(s: String) => validID(s) == matches(ID, s)
)
property("∀ s ∈ genID: matches(ID, s)") = forAll(genID)(s => matches(ID, s))
property("∀ s ∈ genID: validID(s)") = forAll(genID)(s => validID(s))
private val chars: Seq[Char] = Char.MinValue to Char.MaxValue
private val genID: Gen[String] =
for {
c <- oneOf(chars filter (_.isLetter))
cs <- listOf(oneOf(chars filter isIDChar))
} yield (c :: cs).mkString
}

View File

@ -121,8 +121,8 @@ object ParserTest extends Properties("Completing Parser") {
property("repeatDep accepts two tokens") = matches(repeat, colors.toSeq.take(2).mkString(" "))
}
object ParserExample {
val ws = charClass(_.isWhitespace).+
val notws = charClass(!_.isWhitespace).+
val ws = charClass(_.isWhitespace, "whitespace").+
val notws = charClass(!_.isWhitespace, "not whitespace").+
val name = token("test")
val options = (ws ~> token("quick" | "failed" | "new")).*

View File

@ -9,60 +9,66 @@ package sbt.internal.util
package complete
import java.io.File
import sbt.io.IO._
import org.scalatest.Assertion
import sbt.io.IO
class FileExamplesTest extends UnitSpec {
"listing all files in an absolute base directory" should
"produce the entire base directory's contents" in {
val _ = new DirectoryStructure {
fileExamples().toList should contain theSameElementsAs (allRelativizedPaths)
withDirectoryStructure() { ds =>
ds.fileExamples().toList should contain theSameElementsAs (ds.allRelativizedPaths)
}
}
"listing files with a prefix that matches none" should
"produce an empty list" in {
val _ = new DirectoryStructure(withCompletionPrefix = "z") {
fileExamples().toList shouldBe empty
"listing files with a prefix that matches none" should "produce an empty list" in {
withDirectoryStructure(withCompletionPrefix = "z") { ds =>
ds.fileExamples().toList shouldBe empty
}
}
"listing single-character prefixed files" should
"produce matching paths only" in {
val _ = new DirectoryStructure(withCompletionPrefix = "f") {
fileExamples().toList should contain theSameElementsAs (prefixedPathsOnly)
"listing single-character prefixed files" should "produce matching paths only" in {
withDirectoryStructure(withCompletionPrefix = "f") { ds =>
ds.fileExamples().toList should contain theSameElementsAs (ds.prefixedPathsOnly)
}
}
"listing directory-prefixed files" should
"produce matching paths only" in {
val _ = new DirectoryStructure(withCompletionPrefix = "far") {
fileExamples().toList should contain theSameElementsAs (prefixedPathsOnly)
"listing directory-prefixed files" should "produce matching paths only" in {
withDirectoryStructure(withCompletionPrefix = "far") { ds =>
ds.fileExamples().toList should contain theSameElementsAs (ds.prefixedPathsOnly)
}
}
it should "produce sub-dir contents only when appending a file separator to the directory" in {
val _ = new DirectoryStructure(withCompletionPrefix = "far" + File.separator) {
fileExamples().toList should contain theSameElementsAs (prefixedPathsOnly)
withDirectoryStructure(withCompletionPrefix = "far" + File.separator) { ds =>
ds.fileExamples().toList should contain theSameElementsAs (ds.prefixedPathsOnly)
}
}
"listing files with a sub-path prefix" should
"produce matching paths only" in {
val _ = new DirectoryStructure(withCompletionPrefix = "far" + File.separator + "ba") {
fileExamples().toList should contain theSameElementsAs (prefixedPathsOnly)
"listing files with a sub-path prefix" should "produce matching paths only" in {
withDirectoryStructure(withCompletionPrefix = "far" + File.separator + "ba") { ds =>
ds.fileExamples().toList should contain theSameElementsAs (ds.prefixedPathsOnly)
}
}
"completing a full path" should
"produce a list with an empty string" in {
val _ = new DirectoryStructure(withCompletionPrefix = "bazaar") {
fileExamples().toList shouldEqual List("")
"completing a full path" should "produce a list with an empty string" in {
withDirectoryStructure(withCompletionPrefix = "bazaar") { ds =>
ds.fileExamples().toList shouldEqual List("")
}
}
// TODO: Remove DelayedInit - https://github.com/scala/scala/releases/tag/v2.11.0-RC1
class DirectoryStructure(withCompletionPrefix: String = "") extends DelayedInit {
def withDirectoryStructure[A](withCompletionPrefix: String = "")(
thunk: DirectoryStructure => Assertion
): Assertion = {
IO.withTemporaryDirectory { tempDir =>
val ds = new DirectoryStructure(withCompletionPrefix)
ds.createSampleDirStructure(tempDir)
ds.fileExamples = new FileExamples(ds.baseDir, withCompletionPrefix)
thunk(ds)
}
}
final class DirectoryStructure(withCompletionPrefix: String) {
var fileExamples: FileExamples = _
var baseDir: File = _
var childFiles: List[File] = _
@ -72,22 +78,14 @@ class FileExamplesTest extends UnitSpec {
def allRelativizedPaths: List[String] =
(childFiles ++ childDirectories ++ nestedFiles ++ nestedDirectories)
.map(relativize(baseDir, _).get)
.map(IO.relativize(baseDir, _).get)
def prefixedPathsOnly: List[String] =
allRelativizedPaths
.filter(_ startsWith withCompletionPrefix)
.map(_ substring withCompletionPrefix.length)
override def delayedInit(testBody: => Unit): Unit = {
withTemporaryDirectory { tempDir =>
createSampleDirStructure(tempDir)
fileExamples = new FileExamples(baseDir, withCompletionPrefix)
testBody
}
}
private def createSampleDirStructure(tempDir: File): Unit = {
def createSampleDirStructure(tempDir: File): Unit = {
childFiles = toChildFiles(tempDir, List("foo", "bar", "bazaar"))
childDirectories = toChildFiles(tempDir, List("moo", "far"))
nestedFiles = toChildFiles(childDirectories(1), List("farfile1", "barfile2"))

View File

@ -27,7 +27,8 @@ class ParserWithExamplesTest extends UnitSpec {
Set(
suggestion("blue"),
suggestion("red")
))
)
)
parserWithExamples.completions(0) shouldEqual validCompletions
}
}
@ -38,7 +39,8 @@ class ParserWithExamplesTest extends UnitSpec {
val derivedCompletions = Completions(
Set(
suggestion("lue")
))
)
)
parserWithExamples.derive('b').completions(0) shouldEqual derivedCompletions
}
}
@ -58,7 +60,8 @@ class ParserWithExamplesTest extends UnitSpec {
Set(
suggestion("lue"),
suggestion("lock")
))
)
)
parserWithExamples.derive('b').completions(0) shouldEqual derivedCompletions
}
}

View File

@ -24,14 +24,14 @@ object LogicTest extends Properties("Logic") {
property("Properly orders results.") = secure(expect(ordering, Set(B, A, C, E, F)))
property("Detects cyclic negation") = secure(
Logic.reduceAll(badClauses, Set()) match {
case Right(res) => false
case Left(err: Logic.CyclicNegation) => true
case Left(err) => sys.error(s"Expected cyclic error, got: $err")
case Right(_) => false
case Left(_: Logic.CyclicNegation) => true
case Left(err) => sys.error(s"Expected cyclic error, got: $err")
}
)
def expect(result: Either[LogicException, Matched], expected: Set[Atom]) = result match {
case Left(err) => false
case Left(_) => false
case Right(res) =>
val actual = res.provenSet
if (actual != expected)

View File

@ -22,6 +22,7 @@
[boot]
directory: ${sbt.boot.directory-${sbt.global.base-${user.home}/.sbt}/boot/}
lock: ${sbt.boot.lock-true}
[ivy]
ivy-home: ${sbt.ivy.home-${user.home}/.ivy2/}

View File

@ -9,8 +9,8 @@ package sbt
import java.io.File
import sbt.internal.inc.AnalyzingCompiler
import sbt.internal.util.JLine
import sbt.util.Logger
import xsbti.compile.{ Inputs, Compilers }
import scala.util.Try
@ -20,32 +20,39 @@ final class Console(compiler: AnalyzingCompiler) {
def apply(classpath: Seq[File], log: Logger): Try[Unit] =
apply(classpath, Nil, "", "", log)
def apply(classpath: Seq[File],
options: Seq[String],
initialCommands: String,
cleanupCommands: String,
log: Logger): Try[Unit] =
def apply(
classpath: Seq[File],
options: Seq[String],
initialCommands: String,
cleanupCommands: String,
log: Logger
): Try[Unit] =
apply(classpath, options, initialCommands, cleanupCommands)(None, Nil)(log)
def apply(classpath: Seq[File],
options: Seq[String],
loader: ClassLoader,
initialCommands: String,
cleanupCommands: String)(bindings: (String, Any)*)(implicit log: Logger): Try[Unit] =
def apply(
classpath: Seq[File],
options: Seq[String],
loader: ClassLoader,
initialCommands: String,
cleanupCommands: String
)(bindings: (String, Any)*)(implicit log: Logger): Try[Unit] =
apply(classpath, options, initialCommands, cleanupCommands)(Some(loader), bindings)
def apply(classpath: Seq[File],
options: Seq[String],
initialCommands: String,
cleanupCommands: String)(loader: Option[ClassLoader], bindings: Seq[(String, Any)])(
implicit log: Logger): Try[Unit] = {
def apply(
classpath: Seq[File],
options: Seq[String],
initialCommands: String,
cleanupCommands: String
)(loader: Option[ClassLoader], bindings: Seq[(String, Any)])(implicit log: Logger): Try[Unit] = {
def console0() =
compiler.console(classpath, options, initialCommands, cleanupCommands, log)(loader, bindings)
// TODO: Fix JLine
//JLine.withJLine(Run.executeTrapExit(console0, log))
Run.executeTrapExit(console0, log)
JLine.usingTerminal { t =>
t.init
Run.executeTrapExit(console0, log)
}
}
}
object Console {
def apply(conf: Inputs): Console =
conf.compilers match {

View File

@ -10,10 +10,6 @@ package sbt
import java.io.File
import sbt.internal.inc.AnalyzingCompiler
import Predef.{ conforms => _, _ }
import sbt.io.syntax._
import sbt.io.IO
import sbt.util.CacheStoreFactory
import xsbti.Reporter
import xsbti.compile.JavaTools
@ -23,93 +19,51 @@ import sbt.internal.util.ManagedLogger
object Doc {
import RawCompileLike._
def scaladoc(label: String,
cacheStoreFactory: CacheStoreFactory,
compiler: AnalyzingCompiler): Gen =
def scaladoc(
label: String,
cacheStoreFactory: CacheStoreFactory,
compiler: AnalyzingCompiler
): Gen =
scaladoc(label, cacheStoreFactory, compiler, Seq())
def scaladoc(label: String,
cacheStoreFactory: CacheStoreFactory,
compiler: AnalyzingCompiler,
fileInputOptions: Seq[String]): Gen =
cached(cacheStoreFactory,
fileInputOptions,
prepare(label + " Scala API documentation", compiler.doc))
def javadoc(label: String,
cacheStoreFactory: CacheStoreFactory,
doc: JavaTools,
log: Logger,
reporter: Reporter): Gen =
javadoc(label, cacheStoreFactory, doc, log, reporter, Seq())
def javadoc(label: String,
cacheStoreFactory: CacheStoreFactory,
doc: JavaTools,
log: Logger,
reporter: Reporter,
fileInputOptions: Seq[String]): Gen =
def scaladoc(
label: String,
cacheStoreFactory: CacheStoreFactory,
compiler: AnalyzingCompiler,
fileInputOptions: Seq[String]
): Gen =
cached(
cacheStoreFactory,
fileInputOptions,
prepare(
label + " Java API documentation",
filterSources(
javaSourcesOnly,
(sources: Seq[File],
classpath: Seq[File],
outputDirectory: File,
options: Seq[String],
maxErrors: Int,
log: Logger) => {
// doc.doc
???
}
)
)
prepare(label + " Scala API documentation", compiler.doc)
)
@deprecated("Going away", "1.1.1")
def javadoc(
label: String,
cacheStoreFactory: CacheStoreFactory,
doc: JavaTools,
log: Logger,
reporter: Reporter,
): Gen = ???
@deprecated("Going away", "1.1.1")
def javadoc(
label: String,
cacheStoreFactory: CacheStoreFactory,
doc: JavaTools,
log: Logger,
reporter: Reporter,
fileInputOptions: Seq[String],
): Gen = ???
@deprecated("Going away", "1.1.1")
val javaSourcesOnly: File => Boolean = _.getName.endsWith(".java")
private[sbt] final class Scaladoc(maximumErrors: Int, compiler: AnalyzingCompiler) extends Doc {
def apply(label: String,
sources: Seq[File],
classpath: Seq[File],
outputDirectory: File,
options: Seq[String],
log: ManagedLogger): Unit = {
generate("Scala",
label,
compiler.doc,
sources,
classpath,
outputDirectory,
options,
maximumErrors,
log)
}
}
}
@deprecated("Going away", "1.1.1")
sealed trait Doc {
@deprecated("Going away", "1.1.1")
type Gen = (Seq[File], Seq[File], File, Seq[String], Int, ManagedLogger) => Unit
private[sbt] final def generate(variant: String,
label: String,
docf: Gen,
sources: Seq[File],
classpath: Seq[File],
outputDirectory: File,
options: Seq[String],
maxErrors: Int,
log: ManagedLogger): Unit = {
val logSnip = variant + " API documentation"
if (sources.isEmpty)
log.info("No sources available, skipping " + logSnip + "...")
else {
log.info(
"Generating " + logSnip + " for " + label + " sources to " + outputDirectory.absolutePath + "...")
IO.delete(outputDirectory)
IO.createDirectory(outputDirectory)
docf(sources, classpath, outputDirectory, options, maxErrors, log)
log.info(logSnip + " generation successful.")
}
}
}

View File

@ -30,29 +30,37 @@ object DotGraph {
val toString = packageOnly compose fToString(sourceRoots)
apply(relations, outputDirectory, toString, toString)
}
def apply(relations: Relations,
outputDir: File,
sourceToString: File => String,
externalToString: File => String): Unit = {
def apply(
relations: Relations,
outputDir: File,
sourceToString: File => String,
externalToString: File => String
): Unit = {
def file(name: String) = new File(outputDir, name)
IO.createDirectory(outputDir)
generateGraph(file("int-class-deps"),
"dependencies",
relations.internalClassDep,
identity[String],
identity[String])
generateGraph(file("binary-dependencies"),
"externalDependencies",
relations.libraryDep,
externalToString,
sourceToString)
generateGraph(
file("int-class-deps"),
"dependencies",
relations.internalClassDep,
identity[String],
identity[String]
)
generateGraph(
file("binary-dependencies"),
"externalDependencies",
relations.libraryDep,
externalToString,
sourceToString
)
}
def generateGraph[K, V](file: File,
graphName: String,
relation: Relation[K, V],
keyToString: K => String,
valueToString: V => String): Unit = {
def generateGraph[K, V](
file: File,
graphName: String,
relation: Relation[K, V],
keyToString: K => String,
valueToString: V => String
): Unit = {
import scala.collection.mutable.{ HashMap, HashSet }
val mappedGraph = new HashMap[String, HashSet[String]]
for ((key, values) <- relation.forwardMap; keyString = keyToString(key); value <- values)

View File

@ -17,15 +17,18 @@ import sbt.io.IO
import sbt.util.Logger
import sbt.ConcurrentRestrictions.Tag
import sbt.protocol.testing._
import sbt.internal.util.ConsoleAppender
private[sbt] object ForkTests {
def apply(runners: Map[TestFramework, Runner],
tests: Vector[TestDefinition],
config: Execution,
classpath: Seq[File],
fork: ForkOptions,
log: Logger,
tag: Tag): Task[TestOutput] = {
def apply(
runners: Map[TestFramework, Runner],
tests: Vector[TestDefinition],
config: Execution,
classpath: Seq[File],
fork: ForkOptions,
log: Logger,
tag: Tag
): Task[TestOutput] = {
val opts = processOptions(config, tests, log)
import std.TaskExtra._
@ -42,12 +45,14 @@ private[sbt] object ForkTests {
}
}
private[this] def mainTestTask(runners: Map[TestFramework, Runner],
opts: ProcessedOptions,
classpath: Seq[File],
fork: ForkOptions,
log: Logger,
parallel: Boolean): Task[TestOutput] =
private[this] def mainTestTask(
runners: Map[TestFramework, Runner],
opts: ProcessedOptions,
classpath: Seq[File],
fork: ForkOptions,
log: Logger,
parallel: Boolean
): Task[TestOutput] =
std.TaskExtra.task {
val server = new ServerSocket(0)
val testListeners = opts.testListeners flatMap {
@ -67,7 +72,8 @@ private[sbt] object ForkTests {
} catch {
case e: java.net.SocketException =>
log.error(
"Could not accept connection from test agent: " + e.getClass + ": " + e.getMessage)
"Could not accept connection from test agent: " + e.getClass + ": " + e.getMessage
)
log.trace(e)
server.close()
return
@ -78,15 +84,17 @@ private[sbt] object ForkTests {
val is = new ObjectInputStream(socket.getInputStream)
try {
val config = new ForkConfiguration(log.ansiCodesSupported, parallel)
val config = new ForkConfiguration(ConsoleAppender.formatEnabledInEnv, parallel)
os.writeObject(config)
val taskdefs = opts.tests.map(
t =>
new TaskDef(t.name,
forkFingerprint(t.fingerprint),
t.explicitlySpecified,
t.selectors))
val taskdefs = opts.tests.map { t =>
new TaskDef(
t.name,
forkFingerprint(t.fingerprint),
t.explicitlySpecified,
t.selectors
)
}
os.writeObject(taskdefs.toArray)
os.writeInt(runners.size)
@ -116,20 +124,27 @@ private[sbt] object ForkTests {
val acceptorThread = new Thread(Acceptor)
acceptorThread.start()
val fullCp = classpath ++: Seq(IO.classLocationFile[ForkMain],
IO.classLocationFile[Framework])
val options = Seq("-classpath",
fullCp mkString File.pathSeparator,
classOf[ForkMain].getCanonicalName,
server.getLocalPort.toString)
val fullCp = classpath ++: Seq(
IO.classLocationFile[ForkMain],
IO.classLocationFile[Framework]
)
val options = Seq(
"-classpath",
fullCp mkString File.pathSeparator,
classOf[ForkMain].getCanonicalName,
server.getLocalPort.toString
)
val ec = Fork.java(fork, options)
val result =
if (ec != 0)
TestOutput(TestResult.Error,
Map(
"Running java with options " + options
.mkString(" ") + " failed with exit code " + ec -> SuiteResult.Error),
Iterable.empty)
TestOutput(
TestResult.Error,
Map(
"Running java with options " + options
.mkString(" ") + " failed with exit code " + ec -> SuiteResult.Error
),
Iterable.empty
)
else {
// Need to wait acceptor thread to finish its business
acceptorThread.join()
@ -150,11 +165,13 @@ private[sbt] object ForkTests {
case _ => sys.error("Unknown fingerprint type: " + f.getClass)
}
}
private final class React(is: ObjectInputStream,
os: ObjectOutputStream,
log: Logger,
listeners: Seq[TestReportListener],
results: mutable.Map[String, SuiteResult]) {
private final class React(
is: ObjectInputStream,
os: ObjectOutputStream,
log: Logger,
listeners: Seq[TestReportListener],
results: mutable.Map[String, SuiteResult]
) {
import ForkTags._
@annotation.tailrec
def react(): Unit = is.readObject match {

View File

@ -7,7 +7,6 @@
package sbt
import scala.Predef.{ conforms => _, _ }
import java.io.File
import java.util.jar.{ Attributes, Manifest }
import scala.collection.JavaConverters._
@ -23,7 +22,7 @@ import sbt.internal.util.HNil
import sbt.internal.util.HListFormats._
import sbt.util.FileInfo.{ exists, lastModified }
import sbt.util.CacheImplicits._
import sbt.util.Tracked.inputChanged
import sbt.util.Tracked.{ inputChanged, outputChanged }
sealed trait PackageOption
object Package {
@ -50,9 +49,11 @@ object Package {
}
}
final class Configuration(val sources: Seq[(File, String)],
val jar: File,
val options: Seq[PackageOption])
final class Configuration(
val sources: Seq[(File, String)],
val jar: File,
val options: Seq[PackageOption]
)
def apply(conf: Configuration, cacheStoreFactory: CacheStoreFactory, log: Logger): Unit = {
val manifest = new Manifest
val main = manifest.getMainAttributes
@ -66,27 +67,30 @@ object Package {
}
setVersion(main)
type Inputs = Map[File, String] :+: FilesInfo[ModifiedFileInfo] :+: Manifest :+: HNil
val cachedMakeJar = inputChanged(cacheStoreFactory make "inputs") {
(inChanged,
inputs: Map[File, String] :+: FilesInfo[ModifiedFileInfo] :+: Manifest :+: HNil) =>
(inChanged, inputs: Inputs) =>
import exists.format
val sources :+: _ :+: manifest :+: HNil = inputs
inputChanged(cacheStoreFactory make "output") { (outChanged, jar: PlainFileInfo) =>
if (inChanged || outChanged)
outputChanged(cacheStoreFactory make "output") { (outChanged, jar: PlainFileInfo) =>
if (inChanged || outChanged) {
makeJar(sources.toSeq, jar.file, manifest, log)
else
jar.file
} else
log.debug("Jar uptodate: " + jar.file)
}
}
val map = conf.sources.toMap
val inputs = map :+: lastModified(map.keySet) :+: manifest :+: HNil
cachedMakeJar(inputs)(exists(conf.jar))
cachedMakeJar(inputs)(() => exists(conf.jar))
}
def setVersion(main: Attributes): Unit = {
val version = Attributes.Name.MANIFEST_VERSION
if (main.getValue(version) eq null)
if (main.getValue(version) eq null) {
main.put(version, "1.0")
()
}
}
def addSpecManifestAttributes(name: String, version: String, orgName: String): PackageOption = {
import Attributes.Name._
@ -94,16 +98,26 @@ object Package {
val attribVals = Seq(name, version, orgName)
ManifestAttributes(attribKeys zip attribVals: _*)
}
def addImplManifestAttributes(name: String,
version: String,
homepage: Option[java.net.URL],
org: String,
orgName: String): PackageOption = {
def addImplManifestAttributes(
name: String,
version: String,
homepage: Option[java.net.URL],
org: String,
orgName: String
): PackageOption = {
import Attributes.Name._
val attribKeys = Seq(IMPLEMENTATION_TITLE,
IMPLEMENTATION_VERSION,
IMPLEMENTATION_VENDOR,
IMPLEMENTATION_VENDOR_ID)
// The ones in Attributes.Name are deprecated saying:
// "Extension mechanism will be removed in a future release. Use class path instead."
val IMPLEMENTATION_VENDOR_ID = new Attributes.Name("Implementation-Vendor-Id")
val IMPLEMENTATION_URL = new Attributes.Name("Implementation-URL")
val attribKeys = Seq(
IMPLEMENTATION_TITLE,
IMPLEMENTATION_VERSION,
IMPLEMENTATION_VENDOR,
IMPLEMENTATION_VENDOR_ID,
)
val attribVals = Seq(name, version, orgName, org)
ManifestAttributes((attribKeys zip attribVals) ++ {
homepage map (h => (IMPLEMENTATION_URL, h.toString))

View File

@ -7,10 +7,10 @@
package sbt
import scala.annotation.tailrec
import java.io.File
import sbt.internal.inc.{ RawCompiler, ScalaInstance }
import Predef.{ conforms => _, _ }
import sbt.io.syntax._
import sbt.io.IO
@ -30,7 +30,7 @@ object RawCompileLike {
type Gen = (Seq[File], Seq[File], File, Seq[String], Int, ManagedLogger) => Unit
private def optionFiles(options: Seq[String], fileInputOpts: Seq[String]): List[File] = {
@annotation.tailrec
@tailrec
def loop(opt: List[String], result: List[File]): List[File] = {
opt.dropWhile(!fileInputOpts.contains(_)) match {
case List(_, fileOpt, tail @ _*) => {
@ -46,16 +46,20 @@ object RawCompileLike {
def cached(cacheStoreFactory: CacheStoreFactory, doCompile: Gen): Gen =
cached(cacheStoreFactory, Seq(), doCompile)
def cached(cacheStoreFactory: CacheStoreFactory,
fileInputOpts: Seq[String],
doCompile: Gen): Gen =
def cached(
cacheStoreFactory: CacheStoreFactory,
fileInputOpts: Seq[String],
doCompile: Gen
): Gen =
(sources, classpath, outputDirectory, options, maxErrors, log) => {
type Inputs =
FilesInfo[HashFileInfo] :+: FilesInfo[ModifiedFileInfo] :+: Seq[File] :+: File :+: Seq[
String] :+: Int :+: HNil
FilesInfo[HashFileInfo] :+: FilesInfo[ModifiedFileInfo] :+: Seq[File] :+: File :+:
Seq[String] :+: Int :+: HNil
val inputs
: Inputs = hash(sources.toSet ++ optionFiles(options, fileInputOpts)) :+: lastModified(
classpath.toSet) :+: classpath :+: outputDirectory :+: options :+: maxErrors :+: HNil
classpath.toSet
) :+: classpath :+: outputDirectory :+: options :+: maxErrors :+: HNil
val cachedComp = inputChanged(cacheStoreFactory make "inputs") { (inChanged, in: Inputs) =>
inputChanged(cacheStoreFactory make "output") {
(outChanged, outputs: FilesInfo[PlainFileInfo]) =>
@ -67,6 +71,7 @@ object RawCompileLike {
}
cachedComp(inputs)(exists(outputDirectory.allPaths.get.toSet))
}
def prepare(description: String, doCompile: Gen): Gen =
(sources, classpath, outputDirectory, options, maxErrors, log) => {
if (sources.isEmpty)
@ -79,20 +84,24 @@ object RawCompileLike {
log.info(description.capitalize + " successful.")
}
}
def filterSources(f: File => Boolean, doCompile: Gen): Gen =
(sources, classpath, outputDirectory, options, maxErrors, log) =>
doCompile(sources filter f, classpath, outputDirectory, options, maxErrors, log)
def rawCompile(instance: ScalaInstance, cpOptions: ClasspathOptions): Gen =
(sources, classpath, outputDirectory, options, maxErrors, log) => {
(sources, classpath, outputDirectory, options, _, log) => {
val compiler = new RawCompiler(instance, cpOptions, log)
compiler(sources, classpath, outputDirectory, options)
}
def compile(label: String,
cacheStoreFactory: CacheStoreFactory,
instance: ScalaInstance,
cpOptions: ClasspathOptions): Gen =
def compile(
label: String,
cacheStoreFactory: CacheStoreFactory,
instance: ScalaInstance,
cpOptions: ClasspathOptions
): Gen =
cached(cacheStoreFactory, prepare(label + " sources", rawCompile(instance, cpOptions)))
val nop: Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => ()
val nop: Gen = (_, _, _, _, _, _) => ()
}

View File

@ -30,10 +30,18 @@ import sjsonnew.{ Builder, JsonFormat, Unbuilder, deserializationError }
* It is safe to use for its intended purpose: copying resources to a class output directory.
*/
object Sync {
def apply(store: CacheStore,
inStyle: FileInfo.Style = FileInfo.lastModified,
outStyle: FileInfo.Style = FileInfo.exists)
: Traversable[(File, File)] => Relation[File, File] =
@deprecated("Use sync, which doesn't take the unused outStyle param", "1.1.1")
def apply(
store: CacheStore,
inStyle: FileInfo.Style = FileInfo.lastModified,
outStyle: FileInfo.Style = FileInfo.exists,
): Traversable[(File, File)] => Relation[File, File] =
sync(store, inStyle)
def sync(
store: CacheStore,
inStyle: FileInfo.Style = FileInfo.lastModified,
): Traversable[(File, File)] => Relation[File, File] =
mappings => {
val relation = Relation.empty ++ mappings
noDuplicateTargets(relation)
@ -63,26 +71,24 @@ object Sync {
def copy(source: File, target: File): Unit =
if (source.isFile)
IO.copyFile(source, target, true)
else if (!target.exists) // we don't want to update the last modified time of an existing directory
{
IO.createDirectory(target)
IO.copyLastModified(source, target)
}
else if (!target.exists) { // we don't want to update the last modified time of an existing directory
IO.createDirectory(target)
IO.copyLastModified(source, target)
()
}
def noDuplicateTargets(relation: Relation[File, File]): Unit = {
val dups = relation.reverseMap.filter {
case (_, srcs) =>
srcs.size >= 2 && srcs.exists(!_.isDirectory)
} map {
case (target, srcs) =>
"\n\t" + target + "\nfrom\n\t" + srcs.mkString("\n\t\t")
}
val dups = relation.reverseMap
.filter { case (_, srcs) => srcs.size >= 2 && srcs.exists(!_.isDirectory) }
.map { case (target, srcs) => "\n\t" + target + "\nfrom\n\t" + srcs.mkString("\n\t\t") }
if (dups.nonEmpty)
sys.error("Duplicate mappings:" + dups.mkString)
}
implicit def relationFormat[A, B](implicit af: JsonFormat[Map[A, Set[B]]],
bf: JsonFormat[Map[B, Set[A]]]): JsonFormat[Relation[A, B]] =
implicit def relationFormat[A, B](
implicit af: JsonFormat[Map[A, Set[B]]],
bf: JsonFormat[Map[B, Set[A]]]
): JsonFormat[Relation[A, B]] =
new JsonFormat[Relation[A, B]] {
def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Relation[A, B] =
jsOpt match {
@ -105,15 +111,18 @@ object Sync {
}
def writeInfo[F <: FileInfo](store: CacheStore,
relation: Relation[File, File],
info: Map[File, F])(implicit infoFormat: JsonFormat[F]): Unit =
def writeInfo[F <: FileInfo](
store: CacheStore,
relation: Relation[File, File],
info: Map[File, F]
)(implicit infoFormat: JsonFormat[F]): Unit =
store.write((relation, info))
type RelationInfo[F] = (Relation[File, File], Map[File, F])
def readInfo[F <: FileInfo](store: CacheStore)(
implicit infoFormat: JsonFormat[F]): RelationInfo[F] =
def readInfo[F <: FileInfo](
store: CacheStore
)(implicit infoFormat: JsonFormat[F]): RelationInfo[F] =
try { readUncaught[F](store)(infoFormat) } catch {
case _: IOException => (Relation.empty[File, File], Map.empty[File, F])
case _: ZipException => (Relation.empty[File, File], Map.empty[File, F])
@ -124,7 +133,8 @@ object Sync {
}
}
private def readUncaught[F <: FileInfo](store: CacheStore)(
implicit infoFormat: JsonFormat[F]): RelationInfo[F] =
private def readUncaught[F <: FileInfo](
store: CacheStore
)(implicit infoFormat: JsonFormat[F]): RelationInfo[F] =
store.read(default = (Relation.empty[File, File], Map.empty[File, F]))
}

View File

@ -31,13 +31,17 @@ trait TestResultLogger {
def run(log: Logger, results: Output, taskName: String): Unit
/** Only allow invocation if certain criteria is met, else use another `TestResultLogger` (defaulting to nothing) . */
final def onlyIf(f: (Output, String) => Boolean,
otherwise: TestResultLogger = TestResultLogger.Null) =
final def onlyIf(
f: (Output, String) => Boolean,
otherwise: TestResultLogger = TestResultLogger.Null
) =
TestResultLogger.choose(f, this, otherwise)
/** Allow invocation unless a certain predicate passes, in which case use another `TestResultLogger` (defaulting to nothing) . */
final def unless(f: (Output, String) => Boolean,
otherwise: TestResultLogger = TestResultLogger.Null) =
final def unless(
f: (Output, String) => Boolean,
otherwise: TestResultLogger = TestResultLogger.Null
) =
TestResultLogger.choose(f, otherwise, this)
}
@ -69,8 +73,10 @@ object TestResultLogger {
* @param f The `TestResultLogger` to choose if the predicate fails.
*/
def choose(cond: (Output, String) => Boolean, t: TestResultLogger, f: TestResultLogger) =
TestResultLogger((log, results, taskName) =>
(if (cond(results, taskName)) t else f).run(log, results, taskName))
TestResultLogger(
(log, results, taskName) =>
(if (cond(results, taskName)) t else f).run(log, results, taskName)
)
/** Transforms the input to be completely silent when the subject module doesn't contain any tests. */
def silenceWhenNoTests(d: Defaults.Main) =
@ -127,32 +133,39 @@ object TestResultLogger {
results.summaries.size > 1 || results.summaries.headOption.forall(_.summaryText.isEmpty)
val printStandard = TestResultLogger((log, results, _) => {
val (skippedCount,
errorsCount,
passedCount,
failuresCount,
ignoredCount,
canceledCount,
pendingCount) =
val (
skippedCount,
errorsCount,
passedCount,
failuresCount,
ignoredCount,
canceledCount,
pendingCount,
) =
results.events.foldLeft((0, 0, 0, 0, 0, 0, 0)) {
case ((skippedAcc, errorAcc, passedAcc, failureAcc, ignoredAcc, canceledAcc, pendingAcc),
(name @ _, testEvent)) =>
(skippedAcc + testEvent.skippedCount,
errorAcc + testEvent.errorCount,
passedAcc + testEvent.passedCount,
failureAcc + testEvent.failureCount,
ignoredAcc + testEvent.ignoredCount,
canceledAcc + testEvent.canceledCount,
pendingAcc + testEvent.pendingCount)
case (acc, (_, testEvent)) =>
val (skippedAcc, errorAcc, passedAcc, failureAcc, ignoredAcc, canceledAcc, pendingAcc) =
acc
(
skippedAcc + testEvent.skippedCount,
errorAcc + testEvent.errorCount,
passedAcc + testEvent.passedCount,
failureAcc + testEvent.failureCount,
ignoredAcc + testEvent.ignoredCount,
canceledAcc + testEvent.canceledCount,
pendingAcc + testEvent.pendingCount,
)
}
val totalCount = failuresCount + errorsCount + skippedCount + passedCount
val base =
s"Total $totalCount, Failed $failuresCount, Errors $errorsCount, Passed $passedCount"
val otherCounts = Seq("Skipped" -> skippedCount,
"Ignored" -> ignoredCount,
"Canceled" -> canceledCount,
"Pending" -> pendingCount)
val otherCounts = Seq(
"Skipped" -> skippedCount,
"Ignored" -> ignoredCount,
"Canceled" -> canceledCount,
"Pending" -> pendingCount
)
val extra = otherCounts.filter(_._2 > 0).map { case (label, count) => s", $label $count" }
val postfix = base + extra.mkString
@ -181,6 +194,7 @@ object TestResultLogger {
})
val printNoTests = TestResultLogger(
(log, results, taskName) => log.info("No tests to run for " + taskName))
(log, results, taskName) => log.info("No tests to run for " + taskName)
)
}
}

View File

@ -34,6 +34,7 @@ import sbt.util.Logger
import sbt.protocol.testing.TestResult
sealed trait TestOption
object Tests {
/**
@ -43,9 +44,11 @@ object Tests {
* @param events The result of each test group (suite) executed during this test run.
* @param summaries Explicit summaries directly provided by test frameworks. This may be empty, in which case a default summary will be generated.
*/
final case class Output(overall: TestResult,
events: Map[String, SuiteResult],
summaries: Iterable[Summary])
final case class Output(
overall: TestResult,
events: Map[String, SuiteResult],
summaries: Iterable[Summary]
)
/**
* Summarizes a test run.
@ -137,9 +140,11 @@ object Tests {
val cleanup: Vector[ClassLoader => Unit],
val testListeners: Vector[TestReportListener]
)
private[sbt] def processOptions(config: Execution,
discovered: Vector[TestDefinition],
log: Logger): ProcessedOptions = {
private[sbt] def processOptions(
config: Execution,
discovered: Vector[TestDefinition],
log: Logger
): ProcessedOptions = {
import collection.mutable.{ HashSet, ListBuffer }
val testFilters = new ListBuffer[String => Boolean]
var orderedFilters = Seq[String => Boolean]()
@ -167,7 +172,8 @@ object Tests {
if (undefinedFrameworks.nonEmpty)
log.warn(
"Arguments defined for test frameworks that are not present:\n\t" + undefinedFrameworks
.mkString("\n\t"))
.mkString("\n\t")
)
def includeTest(test: TestDefinition) =
!excludeTestsSet.contains(test.name) && testFilters.forall(filter => filter(test.name))
@ -176,10 +182,12 @@ object Tests {
if (orderedFilters.isEmpty) filtered0
else orderedFilters.flatMap(f => filtered0.filter(d => f(d.name))).toList.distinct
val uniqueTests = distinctBy(tests)(_.name)
new ProcessedOptions(uniqueTests.toVector,
setup.toVector,
cleanup.toVector,
testListeners.toVector)
new ProcessedOptions(
uniqueTests.toVector,
setup.toVector,
cleanup.toVector,
testListeners.toVector
)
}
private[this] def distinctBy[T, K](in: Seq[T])(f: T => K): Seq[T] = {
@ -187,33 +195,39 @@ object Tests {
in.filter(t => seen.add(f(t)))
}
def apply(frameworks: Map[TestFramework, Framework],
testLoader: ClassLoader,
runners: Map[TestFramework, Runner],
discovered: Vector[TestDefinition],
config: Execution,
log: ManagedLogger): Task[Output] = {
def apply(
frameworks: Map[TestFramework, Framework],
testLoader: ClassLoader,
runners: Map[TestFramework, Runner],
discovered: Vector[TestDefinition],
config: Execution,
log: ManagedLogger
): Task[Output] = {
val o = processOptions(config, discovered, log)
testTask(testLoader,
frameworks,
runners,
o.tests,
o.setup,
o.cleanup,
log,
o.testListeners,
config)
testTask(
testLoader,
frameworks,
runners,
o.tests,
o.setup,
o.cleanup,
log,
o.testListeners,
config
)
}
def testTask(loader: ClassLoader,
frameworks: Map[TestFramework, Framework],
runners: Map[TestFramework, Runner],
tests: Vector[TestDefinition],
userSetup: Iterable[ClassLoader => Unit],
userCleanup: Iterable[ClassLoader => Unit],
log: ManagedLogger,
testListeners: Vector[TestReportListener],
config: Execution): Task[Output] = {
def testTask(
loader: ClassLoader,
frameworks: Map[TestFramework, Framework],
runners: Map[TestFramework, Runner],
tests: Vector[TestDefinition],
userSetup: Iterable[ClassLoader => Unit],
userCleanup: Iterable[ClassLoader => Unit],
log: ManagedLogger,
testListeners: Vector[TestReportListener],
config: Execution
): Task[Output] = {
def fj(actions: Iterable[() => Unit]): Task[Unit] = nop.dependsOn(actions.toSeq.fork(_()): _*)
def partApp(actions: Iterable[ClassLoader => Unit]) = actions.toSeq map { a => () =>
a(loader)
@ -227,7 +241,7 @@ object Tests {
if (config.parallel)
makeParallel(loader, runnables, setupTasks, config.tags) //.toSeq.join
else
makeSerial(loader, runnables, setupTasks, config.tags)
makeSerial(loader, runnables, setupTasks)
val taggedMainTasks = mainTasks.tagw(config.tags: _*)
taggedMainTasks map processResults flatMap { results =>
val cleanupTasks = fj(partApp(userCleanup) :+ frameworkCleanup(results.overall))
@ -238,31 +252,43 @@ object Tests {
}
type TestRunnable = (String, TestFunction)
private def createNestedRunnables(loader: ClassLoader,
testFun: TestFunction,
nestedTasks: Seq[TestTask]): Seq[(String, TestFunction)] =
private def createNestedRunnables(
loader: ClassLoader,
testFun: TestFunction,
nestedTasks: Seq[TestTask]
): Seq[(String, TestFunction)] =
nestedTasks.view.zipWithIndex map {
case (nt, idx) =>
val testFunDef = testFun.taskDef
(testFunDef.fullyQualifiedName,
TestFramework.createTestFunction(loader,
new TaskDef(testFunDef.fullyQualifiedName + "-" + idx,
testFunDef.fingerprint,
testFunDef.explicitlySpecified,
testFunDef.selectors),
testFun.runner,
nt))
(
testFunDef.fullyQualifiedName,
TestFramework.createTestFunction(
loader,
new TaskDef(
testFunDef.fullyQualifiedName + "-" + idx,
testFunDef.fingerprint,
testFunDef.explicitlySpecified,
testFunDef.selectors
),
testFun.runner,
nt
)
)
}
def makeParallel(loader: ClassLoader,
runnables: Iterable[TestRunnable],
setupTasks: Task[Unit],
tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] =
def makeParallel(
loader: ClassLoader,
runnables: Iterable[TestRunnable],
setupTasks: Task[Unit],
tags: Seq[(Tag, Int)]
): Task[Map[String, SuiteResult]] =
toTasks(loader, runnables.toSeq, tags).dependsOn(setupTasks)
def toTasks(loader: ClassLoader,
runnables: Seq[TestRunnable],
tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] = {
def toTasks(
loader: ClassLoader,
runnables: Seq[TestRunnable],
tags: Seq[(Tag, Int)]
): Task[Map[String, SuiteResult]] = {
val tasks = runnables.map { case (name, test) => toTask(loader, name, test, tags) }
tasks.join.map(_.foldLeft(Map.empty[String, SuiteResult]) {
case (sum, e) =>
@ -274,10 +300,12 @@ object Tests {
})
}
def toTask(loader: ClassLoader,
name: String,
fun: TestFunction,
tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] = {
def toTask(
loader: ClassLoader,
name: String,
fun: TestFunction,
tags: Seq[(Tag, Int)]
): Task[Map[String, SuiteResult]] = {
val base = task { (name, fun.apply()) }
val taggedBase = base.tagw(tags: _*).tag(fun.tags.map(ConcurrentRestrictions.Tag(_)): _*)
taggedBase flatMap {
@ -294,13 +322,25 @@ object Tests {
}
}
def makeSerial(loader: ClassLoader,
runnables: Seq[TestRunnable],
setupTasks: Task[Unit],
tags: Seq[(Tag, Int)]): Task[List[(String, SuiteResult)]] = {
@deprecated("Use the variant without tags", "1.1.1")
def makeSerial(
loader: ClassLoader,
runnables: Seq[TestRunnable],
setupTasks: Task[Unit],
tags: Seq[(Tag, Int)],
): Task[List[(String, SuiteResult)]] =
makeSerial(loader, runnables, setupTasks)
def makeSerial(
loader: ClassLoader,
runnables: Seq[TestRunnable],
setupTasks: Task[Unit],
): Task[List[(String, SuiteResult)]] = {
@tailrec
def processRunnable(runnableList: List[TestRunnable],
acc: List[(String, SuiteResult)]): List[(String, SuiteResult)] =
def processRunnable(
runnableList: List[TestRunnable],
acc: List[(String, SuiteResult)]
): List[(String, SuiteResult)] =
runnableList match {
case hd :: rst =>
val testFun = hd._2
@ -350,9 +390,11 @@ object Tests {
((TestResult.Passed: TestResult) /: results) { (acc, result) =>
if (severity(acc) < severity(result)) result else acc
}
def discover(frameworks: Seq[Framework],
analysis: CompileAnalysis,
log: Logger): (Seq[TestDefinition], Set[String]) =
def discover(
frameworks: Seq[Framework],
analysis: CompileAnalysis,
log: Logger
): (Seq[TestDefinition], Set[String]) =
discover(frameworks flatMap TestFramework.getFingerprints, allDefs(analysis), log)
def allDefs(analysis: CompileAnalysis) = analysis match {
@ -368,9 +410,11 @@ object Tests {
all
}.toSeq
}
def discover(fingerprints: Seq[Fingerprint],
definitions: Seq[Definition],
log: Logger): (Seq[TestDefinition], Set[String]) = {
def discover(
fingerprints: Seq[Fingerprint],
definitions: Seq[Definition],
log: Logger
): (Seq[TestDefinition], Set[String]) = {
val subclasses = fingerprints collect {
case sub: SubclassFingerprint => (sub.superclassName, sub.isModule, sub)
};
@ -381,9 +425,11 @@ object Tests {
log.debug("Annotation fingerprints: " + annotations)
def firsts[A, B, C](s: Seq[(A, B, C)]): Set[A] = s.map(_._1).toSet
def defined(in: Seq[(String, Boolean, Fingerprint)],
names: Set[String],
IsModule: Boolean): Seq[Fingerprint] =
def defined(
in: Seq[(String, Boolean, Fingerprint)],
names: Set[String],
IsModule: Boolean
): Seq[Fingerprint] =
in collect { case (name, IsModule, print) if names(name) => print }
def toFingerprints(d: Discovered): Seq[Fingerprint] =

View File

@ -15,9 +15,10 @@ import ast.parser.Tokens
import reporters.{ ConsoleReporter, Reporter }
import scala.reflect.internal.util.{ AbstractFileClassLoader, BatchSourceFile }
import Tokens.{ EOF, NEWLINE, NEWLINES, SEMI }
import java.io.File
import java.io.{ File, FileNotFoundException }
import java.nio.ByteBuffer
import java.net.URLClassLoader
import java.security.MessageDigest
import Eval.{ getModule, getValue, WrapValName }
import sbt.io.{ DirectoryFilter, FileFilter, GlobFilter, Hash, IO, Path }
@ -33,10 +34,12 @@ final class EvalImports(val strings: Seq[(String, Int)], val srcName: String)
* the module from that class loader. `generated` contains the compiled classes and cache files related
* to the expression. The name of the auto-generated module wrapping the expression is `enclosingModule`.
*/
final class EvalResult(val tpe: String,
val getValue: ClassLoader => Any,
val generated: Seq[File],
val enclosingModule: String)
final class EvalResult(
val tpe: String,
val getValue: ClassLoader => Any,
val generated: Seq[File],
val enclosingModule: String
)
/**
* The result of evaluating a group of Scala definitions. The definitions are wrapped in an auto-generated,
@ -45,10 +48,12 @@ final class EvalResult(val tpe: String,
* from the classpath that the definitions were compiled against. The list of vals with the requested types is `valNames`.
* The values for these may be obtained by providing the parent class loader to `values` as is done with `loader`.
*/
final class EvalDefinitions(val loader: ClassLoader => ClassLoader,
val generated: Seq[File],
val enclosingModule: String,
val valNames: Seq[String]) {
final class EvalDefinitions(
val loader: ClassLoader => ClassLoader,
val generated: Seq[File],
val enclosingModule: String,
val valNames: Seq[String]
) {
def values(parent: ClassLoader): Seq[Any] = {
val module = getModule(enclosingModule, loader(parent))
for (n <- valNames) yield module.getClass.getMethod(n).invoke(module)
@ -57,10 +62,12 @@ final class EvalDefinitions(val loader: ClassLoader => ClassLoader,
final class EvalException(msg: String) extends RuntimeException(msg)
// not thread safe, since it reuses a Global instance
final class Eval(optionsNoncp: Seq[String],
classpath: Seq[File],
mkReporter: Settings => Reporter,
backing: Option[File]) {
final class Eval(
optionsNoncp: Seq[String],
classpath: Seq[File],
mkReporter: Settings => Reporter,
backing: Option[File]
) {
def this(mkReporter: Settings => Reporter, backing: Option[File]) =
this(Nil, IO.classLocationFile[Product] :: Nil, mkReporter, backing)
def this() = this(s => new ConsoleReporter(s), None)
@ -96,11 +103,13 @@ final class Eval(optionsNoncp: Seq[String],
private[this] var toUnlinkLater = List[Symbol]()
private[this] def unlink(sym: Symbol) = sym.owner.info.decls.unlink(sym)
def eval(expression: String,
imports: EvalImports = noImports,
tpeName: Option[String] = None,
srcName: String = "<setting>",
line: Int = DefaultStartLine): EvalResult = {
def eval(
expression: String,
imports: EvalImports = noImports,
tpeName: Option[String] = None,
srcName: String = "<setting>",
line: Int = DefaultStartLine
): EvalResult = {
val ev = new EvalType[String] {
def makeUnit = mkUnit(srcName, line, expression)
def unlink = true
@ -120,11 +129,13 @@ final class Eval(optionsNoncp: Seq[String],
val value = (cl: ClassLoader) => getValue[Any](i.enclosingModule, i.loader(cl))
new EvalResult(i.extra, value, i.generated, i.enclosingModule)
}
def evalDefinitions(definitions: Seq[(String, scala.Range)],
imports: EvalImports,
srcName: String,
file: Option[File],
valTypes: Seq[String]): EvalDefinitions = {
def evalDefinitions(
definitions: Seq[(String, scala.Range)],
imports: EvalImports,
srcName: String,
file: Option[File],
valTypes: Seq[String]
): EvalDefinitions = {
require(definitions.nonEmpty, "Definitions to evaluate cannot be empty.")
val ev = new EvalType[Seq[String]] {
lazy val (fullUnit, defUnits) = mkDefsUnit(srcName, definitions)
@ -151,20 +162,41 @@ final class Eval(optionsNoncp: Seq[String],
new EvalDefinitions(i.loader, i.generated, i.enclosingModule, i.extra)
}
private[this] def evalCommon[T](content: Seq[String],
imports: EvalImports,
tpeName: Option[String],
ev: EvalType[T]): EvalIntermediate[T] = {
private[this] def evalCommon[T](
content: Seq[String],
imports: EvalImports,
tpeName: Option[String],
ev: EvalType[T]
): EvalIntermediate[T] = {
import Eval._
// TODO - We also encode the source of the setting into the hash to avoid conflicts where the exact SAME setting
// is defined in multiple evaluated instances with a backing. This leads to issues with finding a previous
// value on the classpath when compiling.
val hash = Hash.toHex(
Hash(bytes(
stringSeqBytes(content) :: optBytes(backing)(fileExistsBytes) :: stringSeqBytes(options) ::
seqBytes(classpath)(fileModifiedBytes) :: stringSeqBytes(imports.strings.map(_._1)) :: optBytes(
tpeName)(bytes) ::
bytes(ev.extraHash) :: Nil)))
// This is a hot path.
val digester = MessageDigest.getInstance("SHA")
content foreach { c =>
digester.update(bytes(c))
}
backing foreach { x =>
digester.update(fileExistsBytes(x))
}
options foreach { o =>
digester.update(bytes(o))
}
classpath foreach { f =>
fileModifiedHash(f, digester)
}
imports.strings.map(_._1) foreach { x =>
digester.update(bytes(x))
}
tpeName foreach { x =>
digester.update(bytes(x))
}
digester.update(bytes(ev.extraHash))
val d = digester.digest()
val hash = Hash.toHex(d)
val moduleName = makeModuleName(hash)
lazy val unit = {
@ -192,12 +224,14 @@ final class Eval(optionsNoncp: Seq[String],
// location of the cached type or definition information
private[this] def cacheFile(base: File, moduleName: String): File =
new File(base, moduleName + ".cache")
private[this] def compileAndLoad[T](run: Run,
unit: CompilationUnit,
imports: EvalImports,
backing: Option[File],
moduleName: String,
ev: EvalType[T]): (T, ClassLoader => ClassLoader) = {
private[this] def compileAndLoad[T](
run: Run,
unit: CompilationUnit,
imports: EvalImports,
backing: Option[File],
moduleName: String,
ev: EvalType[T]
): (T, ClassLoader => ClassLoader) = {
global.curRun = run
run.currentUnit = unit
val dir = outputDirectory(backing)
@ -242,18 +276,22 @@ final class Eval(optionsNoncp: Seq[String],
parent => getValue[Any](moduleName, new URLClassLoader(Array(dir.toURI.toURL), parent))
//wrap tree in object objectName { def WrapValName = <tree> }
def augment(parser: global.syntaxAnalyzer.UnitParser,
imports: Seq[Tree],
tree: Tree,
tpt: Tree,
objectName: String): Tree = {
def augment(
parser: global.syntaxAnalyzer.UnitParser,
imports: Seq[Tree],
tree: Tree,
tpt: Tree,
objectName: String
): Tree = {
val method = DefDef(NoMods, newTermName(WrapValName), Nil, Nil, tpt, tree)
syntheticModule(parser, imports, method :: Nil, objectName)
}
private[this] def syntheticModule(parser: global.syntaxAnalyzer.UnitParser,
imports: Seq[Tree],
definitions: List[Tree],
objectName: String): Tree = {
private[this] def syntheticModule(
parser: global.syntaxAnalyzer.UnitParser,
imports: Seq[Tree],
definitions: List[Tree],
objectName: String
): Tree = {
val emptyTypeName = nme.EMPTY.toTypeName
def emptyPkg = parser.atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) }
def emptyInit = DefDef(
@ -262,8 +300,10 @@ final class Eval(optionsNoncp: Seq[String],
Nil,
List(Nil),
TypeTree(),
Block(List(Apply(Select(Super(This(emptyTypeName), emptyTypeName), nme.CONSTRUCTOR), Nil)),
Literal(Constant(())))
Block(
List(Apply(Select(Super(This(emptyTypeName), emptyTypeName), nme.CONSTRUCTOR), Nil)),
Literal(Constant(()))
)
)
def moduleBody = Template(List(gen.scalaAnyRefConstr), noSelfType, emptyInit :: definitions)
@ -301,10 +341,12 @@ final class Eval(optionsNoncp: Seq[String],
private[this] def isTopLevelModule(s: Symbol): Boolean =
s.hasFlag(reflect.internal.Flags.MODULE) && s.owner.isPackageClass
private[this] final class EvalIntermediate[T](val extra: T,
val loader: ClassLoader => ClassLoader,
val generated: Seq[File],
val enclosingModule: String)
private[this] final class EvalIntermediate[T](
val extra: T,
val loader: ClassLoader => ClassLoader,
val generated: Seq[File],
val enclosingModule: String
)
private[this] def classExists(dir: File, name: String) = (new File(dir, name + ".class")).exists
// TODO: use the code from Analyzer
@ -318,10 +360,12 @@ final class Eval(optionsNoncp: Seq[String],
(s contains moduleName)
}
private[this] class ParseErrorStrings(val base: String,
val extraBlank: String,
val missingBlank: String,
val extraSemi: String)
private[this] class ParseErrorStrings(
val base: String,
val extraBlank: String,
val missingBlank: String,
val extraSemi: String
)
private[this] def definitionErrorStrings = new ParseErrorStrings(
base = "Error parsing definition.",
extraBlank = " Ensure that there are no blank lines within a definition.",
@ -340,9 +384,11 @@ final class Eval(optionsNoncp: Seq[String],
* Parses the provided compilation `unit` according to `f` and then performs checks on the final parser state
* to catch errors that are common when the content is embedded in a blank-line-delimited format.
*/
private[this] def parse[T](unit: CompilationUnit,
errors: ParseErrorStrings,
f: syntaxAnalyzer.UnitParser => T): (syntaxAnalyzer.UnitParser, T) = {
private[this] def parse[T](
unit: CompilationUnit,
errors: ParseErrorStrings,
f: syntaxAnalyzer.UnitParser => T
): (syntaxAnalyzer.UnitParser, T) = {
val parser = new syntaxAnalyzer.UnitParser(unit)
val tree = f(parser)
@ -443,7 +489,8 @@ final class Eval(optionsNoncp: Seq[String],
*/
private[this] def mkDefsUnit(
srcName: String,
definitions: Seq[(String, scala.Range)]): (CompilationUnit, Seq[CompilationUnit]) = {
definitions: Seq[(String, scala.Range)]
): (CompilationUnit, Seq[CompilationUnit]) = {
def fragmentUnit(content: String, lineMap: Array[Int]) =
new CompilationUnit(fragmentSourceFile(srcName, content, lineMap))
@ -482,11 +529,26 @@ private[sbt] object Eval {
def seqBytes[T](s: Seq[T])(f: T => Array[Byte]): Array[Byte] = bytes(s map f)
def bytes(b: Seq[Array[Byte]]): Array[Byte] = bytes(b.length) ++ b.flatten.toArray[Byte]
def bytes(b: Boolean): Array[Byte] = Array[Byte](if (b) 1 else 0)
def filesModifiedBytes(fs: Array[File]): Array[Byte] =
if (fs eq null) filesModifiedBytes(Array[File]()) else seqBytes(fs)(fileModifiedBytes)
def fileModifiedBytes(f: File): Array[Byte] =
(if (f.isDirectory) filesModifiedBytes(f listFiles classDirFilter) else bytes(f.lastModified)) ++
bytes(f.getAbsolutePath)
// fileModifiedBytes is a hot method, taking up 0.85% of reload time
// This is a procedural version
def fileModifiedHash(f: File, digester: MessageDigest): Unit = {
if (f.isDirectory)
(f listFiles classDirFilter) foreach { x =>
fileModifiedHash(x, digester)
} else digester.update(bytes(getModifiedTimeOrZero(f)))
digester.update(bytes(f.getAbsolutePath))
}
// This uses NIO instead of the JNA-based IO.getModifiedTimeOrZero for speed
def getModifiedTimeOrZero(f: File): Long =
try {
sbt.io.JavaMilli.getModifiedTime(f.getPath)
} catch {
case _: FileNotFoundException => 0L
}
def fileExistsBytes(f: File): Array[Byte] =
bytes(f.exists) ++
bytes(f.getAbsolutePath)

View File

@ -37,19 +37,21 @@ class CacheIvyTest extends Properties("CacheIvy") {
content = converter.toJsonUnsafe(value)
}
private def testCache[T: JsonFormat, U](f: (SingletonCache[T], CacheStore) => U)(
implicit cache: SingletonCache[T]): U = {
private def testCache[T: JsonFormat, U](
f: (SingletonCache[T], CacheStore) => U
)(implicit cache: SingletonCache[T]): U = {
val store = new InMemoryStore(Converter)
f(cache, store)
}
private def cachePreservesEquality[T: JsonFormat](m: T,
eq: (T, T) => Prop,
str: T => String): Prop = testCache[T, Prop] {
(cache, store) =>
cache.write(store, m)
val out = cache.read(store)
eq(out, m) :| s"Expected: ${str(m)}" :| s"Got: ${str(out)}"
private def cachePreservesEquality[T: JsonFormat](
m: T,
eq: (T, T) => Prop,
str: T => String
): Prop = testCache[T, Prop] { (cache, store) =>
cache.write(store, m)
val out = cache.read(store)
eq(out, m) :| s"Expected: ${str(m)}" :| s"Got: ${str(out)}"
}
implicit val arbConfigRef: Arbitrary[ConfigRef] = Arbitrary(

View File

@ -38,7 +38,8 @@ class EvalTest extends Properties("eval") {
val line = math.abs(l)
val src = "mismatch"
throws(classOf[RuntimeException])(
eval.eval(i.toString, tpeName = Some(BooleanType), line = line, srcName = src)) &&
eval.eval(i.toString, tpeName = Some(BooleanType), line = line, srcName = src)
) &&
hasErrors(line + 1, src)
}
@ -78,14 +79,17 @@ val p = {
property("explicit import") = forAll(testImport("import math.abs" :: Nil))
property("wildcard import") = forAll(testImport("import math._" :: Nil))
property("comma-separated imports") = forAll(
testImport("import annotation._, math._, meta._" :: Nil))
testImport("import annotation._, math._, meta._" :: Nil)
)
property("multiple imports") = forAll(
testImport("import annotation._" :: "import math._" :: "import meta._" :: Nil))
testImport("import annotation._" :: "import math._" :: "import meta._" :: Nil)
)
private[this] def testImport(imports: Seq[String]): Int => Prop =
i =>
value(eval.eval("abs(" + i + ")", new EvalImports(imports.zipWithIndex, "imp"))) == math.abs(
i)
i
)
private[this] def local(i: Int) = "{ class ETest(val i: Int); new ETest(" + i + ") }"
val LocalType = "AnyRef{val i: Int}"

View File

@ -19,7 +19,7 @@ final class CommandSource private (
override def toString: String = {
"CommandSource(" + channelName + ")"
}
protected[this] def copy(channelName: String = channelName): CommandSource = {
private[this] def copy(channelName: String = channelName): CommandSource = {
new CommandSource(channelName)
}
def withChannelName(channelName: String): CommandSource = {

View File

@ -21,7 +21,7 @@ final class Exec private (
override def toString: String = {
"Exec(" + commandLine + ", " + execId + ", " + source + ")"
}
protected[this] def copy(commandLine: String = commandLine, execId: Option[String] = execId, source: Option[sbt.CommandSource] = source): Exec = {
private[this] def copy(commandLine: String = commandLine, execId: Option[String] = execId, source: Option[sbt.CommandSource] = source): Exec = {
new Exec(commandLine, execId, source)
}
def withCommandLine(commandLine: String): Exec = {
@ -42,8 +42,8 @@ final class Exec private (
}
object Exec {
def newExecId: String = java.util.UUID.randomUUID.toString
def apply(commandLine: String, source: Option[sbt.CommandSource]): Exec = new Exec(commandLine, None, source)
def apply(commandLine: String, source: sbt.CommandSource): Exec = new Exec(commandLine, None, Option(source))
def apply(commandLine: String, source: Option[sbt.CommandSource]): Exec = new Exec(commandLine, source)
def apply(commandLine: String, source: sbt.CommandSource): Exec = new Exec(commandLine, Option(source))
def apply(commandLine: String, execId: Option[String], source: Option[sbt.CommandSource]): Exec = new Exec(commandLine, execId, source)
def apply(commandLine: String, execId: String, source: sbt.CommandSource): Exec = new Exec(commandLine, Option(execId), Option(source))
}

View File

@ -1,178 +0,0 @@
// Copied from https://github.com/facebook/nailgun/blob/af623fddedfdca010df46302a0711ce0e2cc1ba6/nailgun-server/src/main/java/com/martiansoftware/nailgun/NGUnixDomainServerSocket.java
/*
Copyright 2004-2015, Martian Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package sbt.internal;
import java.io.IOException;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketAddress;
import java.util.concurrent.atomic.AtomicInteger;
import com.sun.jna.LastErrorException;
import com.sun.jna.ptr.IntByReference;
/**
* Implements a {@link ServerSocket} which binds to a local Unix domain socket
* and returns instances of {@link NGUnixDomainSocket} from
* {@link #accept()}.
*/
public class NGUnixDomainServerSocket extends ServerSocket {
private static final int DEFAULT_BACKLOG = 50;
// We use an AtomicInteger to prevent a race in this situation which
// could happen if fd were just an int:
//
// Thread 1 -> NGUnixDomainServerSocket.accept()
// -> lock this
// -> check isBound and isClosed
// -> unlock this
// -> descheduled while still in method
// Thread 2 -> NGUnixDomainServerSocket.close()
// -> lock this
// -> check isClosed
// -> NGUnixDomainSocketLibrary.close(fd)
// -> now fd is invalid
// -> unlock this
// Thread 1 -> re-scheduled while still in method
// -> NGUnixDomainSocketLibrary.accept(fd, which is invalid and maybe re-used)
//
// By using an AtomicInteger, we'll set this to -1 after it's closed, which
// will cause the accept() call above to cleanly fail instead of possibly
// being called on an unrelated fd (which may or may not fail).
private final AtomicInteger fd;
private final int backlog;
private boolean isBound;
private boolean isClosed;
public static class NGUnixDomainServerSocketAddress extends SocketAddress {
private final String path;
public NGUnixDomainServerSocketAddress(String path) {
this.path = path;
}
public String getPath() {
return path;
}
}
/**
* Constructs an unbound Unix domain server socket.
*/
public NGUnixDomainServerSocket() throws IOException {
this(DEFAULT_BACKLOG, null);
}
/**
* Constructs an unbound Unix domain server socket with the specified listen backlog.
*/
public NGUnixDomainServerSocket(int backlog) throws IOException {
this(backlog, null);
}
/**
* Constructs and binds a Unix domain server socket to the specified path.
*/
public NGUnixDomainServerSocket(String path) throws IOException {
this(DEFAULT_BACKLOG, path);
}
/**
* Constructs and binds a Unix domain server socket to the specified path
* with the specified listen backlog.
*/
public NGUnixDomainServerSocket(int backlog, String path) throws IOException {
try {
fd = new AtomicInteger(
NGUnixDomainSocketLibrary.socket(
NGUnixDomainSocketLibrary.PF_LOCAL,
NGUnixDomainSocketLibrary.SOCK_STREAM,
0));
this.backlog = backlog;
if (path != null) {
bind(new NGUnixDomainServerSocketAddress(path));
}
} catch (LastErrorException e) {
throw new IOException(e);
}
}
public synchronized void bind(SocketAddress endpoint) throws IOException {
if (!(endpoint instanceof NGUnixDomainServerSocketAddress)) {
throw new IllegalArgumentException(
"endpoint must be an instance of NGUnixDomainServerSocketAddress");
}
if (isBound) {
throw new IllegalStateException("Socket is already bound");
}
if (isClosed) {
throw new IllegalStateException("Socket is already closed");
}
NGUnixDomainServerSocketAddress unEndpoint = (NGUnixDomainServerSocketAddress) endpoint;
NGUnixDomainSocketLibrary.SockaddrUn address =
new NGUnixDomainSocketLibrary.SockaddrUn(unEndpoint.getPath());
try {
int socketFd = fd.get();
NGUnixDomainSocketLibrary.bind(socketFd, address, address.size());
NGUnixDomainSocketLibrary.listen(socketFd, backlog);
isBound = true;
} catch (LastErrorException e) {
throw new IOException(e);
}
}
public Socket accept() throws IOException {
// We explicitly do not make this method synchronized, since the
// call to NGUnixDomainSocketLibrary.accept() will block
// indefinitely, causing another thread's call to close() to deadlock.
synchronized (this) {
if (!isBound) {
throw new IllegalStateException("Socket is not bound");
}
if (isClosed) {
throw new IllegalStateException("Socket is already closed");
}
}
try {
NGUnixDomainSocketLibrary.SockaddrUn sockaddrUn =
new NGUnixDomainSocketLibrary.SockaddrUn();
IntByReference addressLen = new IntByReference();
addressLen.setValue(sockaddrUn.size());
int clientFd = NGUnixDomainSocketLibrary.accept(fd.get(), sockaddrUn, addressLen);
return new NGUnixDomainSocket(clientFd);
} catch (LastErrorException e) {
throw new IOException(e);
}
}
public synchronized void close() throws IOException {
if (isClosed) {
throw new IllegalStateException("Socket is already closed");
}
try {
// Ensure any pending call to accept() fails.
NGUnixDomainSocketLibrary.close(fd.getAndSet(-1));
isClosed = true;
} catch (LastErrorException e) {
throw new IOException(e);
}
}
}

View File

@ -1,171 +0,0 @@
// Copied from https://github.com/facebook/nailgun/blob/af623fddedfdca010df46302a0711ce0e2cc1ba6/nailgun-server/src/main/java/com/martiansoftware/nailgun/NGUnixDomainSocket.java
/*
Copyright 2004-2015, Martian Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package sbt.internal;
import com.sun.jna.LastErrorException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.net.Socket;
/**
* Implements a {@link Socket} backed by a native Unix domain socket.
*
* Instances of this class always return {@code null} for
* {@link Socket#getInetAddress()}, {@link Socket#getLocalAddress()},
* {@link Socket#getLocalSocketAddress()}, {@link Socket#getRemoteSocketAddress()}.
*/
public class NGUnixDomainSocket extends Socket {
private final ReferenceCountedFileDescriptor fd;
private final InputStream is;
private final OutputStream os;
/**
* Creates a Unix domain socket backed by a native file descriptor.
*/
public NGUnixDomainSocket(int fd) {
this.fd = new ReferenceCountedFileDescriptor(fd);
this.is = new NGUnixDomainSocketInputStream();
this.os = new NGUnixDomainSocketOutputStream();
}
public InputStream getInputStream() {
return is;
}
public OutputStream getOutputStream() {
return os;
}
public void shutdownInput() throws IOException {
doShutdown(NGUnixDomainSocketLibrary.SHUT_RD);
}
public void shutdownOutput() throws IOException {
doShutdown(NGUnixDomainSocketLibrary.SHUT_WR);
}
private void doShutdown(int how) throws IOException {
try {
int socketFd = fd.acquire();
if (socketFd != -1) {
NGUnixDomainSocketLibrary.shutdown(socketFd, how);
}
} catch (LastErrorException e) {
throw new IOException(e);
} finally {
fd.release();
}
}
public void close() throws IOException {
super.close();
try {
// This might not close the FD right away. In case we are about
// to read or write on another thread, it will delay the close
// until the read or write completes, to prevent the FD from
// being re-used for a different purpose and the other thread
// reading from a different FD.
fd.close();
} catch (LastErrorException e) {
throw new IOException(e);
}
}
private class NGUnixDomainSocketInputStream extends InputStream {
public int read() throws IOException {
ByteBuffer buf = ByteBuffer.allocate(1);
int result;
if (doRead(buf) == 0) {
result = -1;
} else {
// Make sure to & with 0xFF to avoid sign extension
result = 0xFF & buf.get();
}
return result;
}
public int read(byte[] b, int off, int len) throws IOException {
if (len == 0) {
return 0;
}
ByteBuffer buf = ByteBuffer.wrap(b, off, len);
int result = doRead(buf);
if (result == 0) {
result = -1;
}
return result;
}
private int doRead(ByteBuffer buf) throws IOException {
try {
int fdToRead = fd.acquire();
if (fdToRead == -1) {
return -1;
}
return NGUnixDomainSocketLibrary.read(fdToRead, buf, buf.remaining());
} catch (LastErrorException e) {
throw new IOException(e);
} finally {
fd.release();
}
}
}
private class NGUnixDomainSocketOutputStream extends OutputStream {
public void write(int b) throws IOException {
ByteBuffer buf = ByteBuffer.allocate(1);
buf.put(0, (byte) (0xFF & b));
doWrite(buf);
}
public void write(byte[] b, int off, int len) throws IOException {
if (len == 0) {
return;
}
ByteBuffer buf = ByteBuffer.wrap(b, off, len);
doWrite(buf);
}
private void doWrite(ByteBuffer buf) throws IOException {
try {
int fdToWrite = fd.acquire();
if (fdToWrite == -1) {
return;
}
int ret = NGUnixDomainSocketLibrary.write(fdToWrite, buf, buf.remaining());
if (ret != buf.remaining()) {
// This shouldn't happen with standard blocking Unix domain sockets.
throw new IOException("Could not write " + buf.remaining() + " bytes as requested " +
"(wrote " + ret + " bytes instead)");
}
} catch (LastErrorException e) {
throw new IOException(e);
} finally {
fd.release();
}
}
}
}

View File

@ -1,140 +0,0 @@
// Copied from https://github.com/facebook/nailgun/blob/af623fddedfdca010df46302a0711ce0e2cc1ba6/nailgun-server/src/main/java/com/martiansoftware/nailgun/NGUnixDomainSocketLibrary.java
/*
Copyright 2004-2015, Martian Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package sbt.internal;
import com.sun.jna.LastErrorException;
import com.sun.jna.Native;
import com.sun.jna.Platform;
import com.sun.jna.Structure;
import com.sun.jna.Union;
import com.sun.jna.ptr.IntByReference;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.List;
/**
* Utility class to bridge native Unix domain socket calls to Java using JNA.
*/
public class NGUnixDomainSocketLibrary {
public static final int PF_LOCAL = 1;
public static final int AF_LOCAL = 1;
public static final int SOCK_STREAM = 1;
public static final int SHUT_RD = 0;
public static final int SHUT_WR = 1;
// Utility class, do not instantiate.
private NGUnixDomainSocketLibrary() { }
// BSD platforms write a length byte at the start of struct sockaddr_un.
private static final boolean HAS_SUN_LEN =
Platform.isMac() || Platform.isFreeBSD() || Platform.isNetBSD() ||
Platform.isOpenBSD() || Platform.iskFreeBSD();
/**
* Bridges {@code struct sockaddr_un} to and from native code.
*/
public static class SockaddrUn extends Structure implements Structure.ByReference {
/**
* On BSD platforms, the {@code sun_len} and {@code sun_family} values in
* {@code struct sockaddr_un}.
*/
public static class SunLenAndFamily extends Structure {
public byte sunLen;
public byte sunFamily;
protected List getFieldOrder() {
return Arrays.asList(new String[] { "sunLen", "sunFamily" });
}
}
/**
* On BSD platforms, {@code sunLenAndFamily} will be present.
* On other platforms, only {@code sunFamily} will be present.
*/
public static class SunFamily extends Union {
public SunLenAndFamily sunLenAndFamily;
public short sunFamily;
}
public SunFamily sunFamily = new SunFamily();
public byte[] sunPath = new byte[104];
/**
* Constructs an empty {@code struct sockaddr_un}.
*/
public SockaddrUn() {
if (HAS_SUN_LEN) {
sunFamily.sunLenAndFamily = new SunLenAndFamily();
sunFamily.setType(SunLenAndFamily.class);
} else {
sunFamily.setType(Short.TYPE);
}
allocateMemory();
}
/**
* Constructs a {@code struct sockaddr_un} with a path whose bytes are encoded
* using the default encoding of the platform.
*/
public SockaddrUn(String path) throws IOException {
byte[] pathBytes = path.getBytes();
if (pathBytes.length > sunPath.length - 1) {
throw new IOException("Cannot fit name [" + path + "] in maximum unix domain socket length");
}
System.arraycopy(pathBytes, 0, sunPath, 0, pathBytes.length);
sunPath[pathBytes.length] = (byte) 0;
if (HAS_SUN_LEN) {
int len = fieldOffset("sunPath") + pathBytes.length;
sunFamily.sunLenAndFamily = new SunLenAndFamily();
sunFamily.sunLenAndFamily.sunLen = (byte) len;
sunFamily.sunLenAndFamily.sunFamily = AF_LOCAL;
sunFamily.setType(SunLenAndFamily.class);
} else {
sunFamily.sunFamily = AF_LOCAL;
sunFamily.setType(Short.TYPE);
}
allocateMemory();
}
protected List getFieldOrder() {
return Arrays.asList(new String[] { "sunFamily", "sunPath" });
}
}
static {
Native.register(Platform.C_LIBRARY_NAME);
}
public static native int socket(int domain, int type, int protocol) throws LastErrorException;
public static native int bind(int fd, SockaddrUn address, int addressLen)
throws LastErrorException;
public static native int listen(int fd, int backlog) throws LastErrorException;
public static native int accept(int fd, SockaddrUn address, IntByReference addressLen)
throws LastErrorException;
public static native int read(int fd, ByteBuffer buffer, int count)
throws LastErrorException;
public static native int write(int fd, ByteBuffer buffer, int count)
throws LastErrorException;
public static native int close(int fd) throws LastErrorException;
public static native int shutdown(int fd, int how) throws LastErrorException;
}

View File

@ -1,90 +0,0 @@
// Copied from https://github.com/facebook/nailgun/blob/af623fddedfdca010df46302a0711ce0e2cc1ba6/nailgun-server/src/main/java/com/martiansoftware/nailgun/NGWin32NamedPipeLibrary.java
/*
Copyright 2004-2017, Martian Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package sbt.internal;
import java.nio.ByteBuffer;
import com.sun.jna.*;
import com.sun.jna.platform.win32.WinNT;
import com.sun.jna.platform.win32.WinNT.*;
import com.sun.jna.platform.win32.WinBase.*;
import com.sun.jna.ptr.IntByReference;
import com.sun.jna.win32.W32APIOptions;
public interface NGWin32NamedPipeLibrary extends WinNT {
int PIPE_ACCESS_DUPLEX = 3;
int PIPE_UNLIMITED_INSTANCES = 255;
int FILE_FLAG_FIRST_PIPE_INSTANCE = 524288;
NGWin32NamedPipeLibrary INSTANCE =
(NGWin32NamedPipeLibrary) Native.loadLibrary(
"kernel32",
NGWin32NamedPipeLibrary.class,
W32APIOptions.UNICODE_OPTIONS);
HANDLE CreateNamedPipe(
String lpName,
int dwOpenMode,
int dwPipeMode,
int nMaxInstances,
int nOutBufferSize,
int nInBufferSize,
int nDefaultTimeOut,
SECURITY_ATTRIBUTES lpSecurityAttributes);
boolean ConnectNamedPipe(
HANDLE hNamedPipe,
Pointer lpOverlapped);
boolean DisconnectNamedPipe(
HANDLE hObject);
boolean ReadFile(
HANDLE hFile,
Memory lpBuffer,
int nNumberOfBytesToRead,
IntByReference lpNumberOfBytesRead,
Pointer lpOverlapped);
boolean WriteFile(
HANDLE hFile,
ByteBuffer lpBuffer,
int nNumberOfBytesToWrite,
IntByReference lpNumberOfBytesWritten,
Pointer lpOverlapped);
boolean CloseHandle(
HANDLE hObject);
boolean GetOverlappedResult(
HANDLE hFile,
Pointer lpOverlapped,
IntByReference lpNumberOfBytesTransferred,
boolean wait);
boolean CancelIoEx(
HANDLE hObject,
Pointer lpOverlapped);
HANDLE CreateEvent(
SECURITY_ATTRIBUTES lpEventAttributes,
boolean bManualReset,
boolean bInitialState,
String lpName);
int WaitForSingleObject(
HANDLE hHandle,
int dwMilliseconds
);
int GetLastError();
}

View File

@ -1,173 +0,0 @@
// Copied from https://github.com/facebook/nailgun/blob/af623fddedfdca010df46302a0711ce0e2cc1ba6/nailgun-server/src/main/java/com/martiansoftware/nailgun/NGWin32NamedPipeServerSocket.java
/*
Copyright 2004-2017, Martian Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package sbt.internal;
import com.sun.jna.platform.win32.WinBase;
import com.sun.jna.platform.win32.WinError;
import com.sun.jna.platform.win32.WinNT;
import com.sun.jna.platform.win32.WinNT.HANDLE;
import com.sun.jna.ptr.IntByReference;
import java.io.IOException;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketAddress;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.LinkedBlockingQueue;
public class NGWin32NamedPipeServerSocket extends ServerSocket {
private static final NGWin32NamedPipeLibrary API = NGWin32NamedPipeLibrary.INSTANCE;
private static final String WIN32_PIPE_PREFIX = "\\\\.\\pipe\\";
private static final int BUFFER_SIZE = 65535;
private final LinkedBlockingQueue<HANDLE> openHandles;
private final LinkedBlockingQueue<HANDLE> connectedHandles;
private final NGWin32NamedPipeSocket.CloseCallback closeCallback;
private final String path;
private final int maxInstances;
private final HANDLE lockHandle;
public NGWin32NamedPipeServerSocket(String path) throws IOException {
this(NGWin32NamedPipeLibrary.PIPE_UNLIMITED_INSTANCES, path);
}
public NGWin32NamedPipeServerSocket(int maxInstances, String path) throws IOException {
this.openHandles = new LinkedBlockingQueue<>();
this.connectedHandles = new LinkedBlockingQueue<>();
this.closeCallback = handle -> {
if (connectedHandles.remove(handle)) {
closeConnectedPipe(handle, false);
}
if (openHandles.remove(handle)) {
closeOpenPipe(handle);
}
};
this.maxInstances = maxInstances;
if (!path.startsWith(WIN32_PIPE_PREFIX)) {
this.path = WIN32_PIPE_PREFIX + path;
} else {
this.path = path;
}
String lockPath = this.path + "_lock";
lockHandle = API.CreateNamedPipe(
lockPath,
NGWin32NamedPipeLibrary.FILE_FLAG_FIRST_PIPE_INSTANCE | NGWin32NamedPipeLibrary.PIPE_ACCESS_DUPLEX,
0,
1,
BUFFER_SIZE,
BUFFER_SIZE,
0,
null);
if (lockHandle == NGWin32NamedPipeLibrary.INVALID_HANDLE_VALUE) {
throw new IOException(String.format("Could not create lock for %s, error %d", lockPath, API.GetLastError()));
} else {
if (!API.DisconnectNamedPipe(lockHandle)) {
throw new IOException(String.format("Could not disconnect lock %d", API.GetLastError()));
}
}
}
public void bind(SocketAddress endpoint) throws IOException {
throw new IOException("Win32 named pipes do not support bind(), pass path to constructor");
}
public Socket accept() throws IOException {
HANDLE handle = API.CreateNamedPipe(
path,
NGWin32NamedPipeLibrary.PIPE_ACCESS_DUPLEX | WinNT.FILE_FLAG_OVERLAPPED,
0,
maxInstances,
BUFFER_SIZE,
BUFFER_SIZE,
0,
null);
if (handle == NGWin32NamedPipeLibrary.INVALID_HANDLE_VALUE) {
throw new IOException(String.format("Could not create named pipe, error %d", API.GetLastError()));
}
openHandles.add(handle);
HANDLE connWaitable = API.CreateEvent(null, true, false, null);
WinBase.OVERLAPPED olap = new WinBase.OVERLAPPED();
olap.hEvent = connWaitable;
olap.write();
boolean immediate = API.ConnectNamedPipe(handle, olap.getPointer());
if (immediate) {
openHandles.remove(handle);
connectedHandles.add(handle);
return new NGWin32NamedPipeSocket(handle, closeCallback);
}
int connectError = API.GetLastError();
if (connectError == WinError.ERROR_PIPE_CONNECTED) {
openHandles.remove(handle);
connectedHandles.add(handle);
return new NGWin32NamedPipeSocket(handle, closeCallback);
} else if (connectError == WinError.ERROR_NO_DATA) {
// Client has connected and disconnected between CreateNamedPipe() and ConnectNamedPipe()
// connection is broken, but it is returned it avoid loop here.
// Actual error will happen for NGSession when it will try to read/write from/to pipe
return new NGWin32NamedPipeSocket(handle, closeCallback);
} else if (connectError == WinError.ERROR_IO_PENDING) {
if (!API.GetOverlappedResult(handle, olap.getPointer(), new IntByReference(), true)) {
openHandles.remove(handle);
closeOpenPipe(handle);
throw new IOException("GetOverlappedResult() failed for connect operation: " + API.GetLastError());
}
openHandles.remove(handle);
connectedHandles.add(handle);
return new NGWin32NamedPipeSocket(handle, closeCallback);
} else {
throw new IOException("ConnectNamedPipe() failed with: " + connectError);
}
}
public void close() throws IOException {
try {
List<HANDLE> handlesToClose = new ArrayList<>();
openHandles.drainTo(handlesToClose);
for (HANDLE handle : handlesToClose) {
closeOpenPipe(handle);
}
List<HANDLE> handlesToDisconnect = new ArrayList<>();
connectedHandles.drainTo(handlesToDisconnect);
for (HANDLE handle : handlesToDisconnect) {
closeConnectedPipe(handle, true);
}
} finally {
API.CloseHandle(lockHandle);
}
}
private void closeOpenPipe(HANDLE handle) throws IOException {
API.CancelIoEx(handle, null);
API.CloseHandle(handle);
}
private void closeConnectedPipe(HANDLE handle, boolean shutdown) throws IOException {
if (!shutdown) {
API.WaitForSingleObject(handle, 10000);
}
API.DisconnectNamedPipe(handle);
API.CloseHandle(handle);
}
}

View File

@ -1,172 +0,0 @@
// Copied from https://github.com/facebook/nailgun/blob/af623fddedfdca010df46302a0711ce0e2cc1ba6/nailgun-server/src/main/java/com/martiansoftware/nailgun/NGWin32NamedPipeSocket.java
// Made change in `read` to read just the amount of bytes available.
/*
Copyright 2004-2017, Martian Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package sbt.internal;
import com.sun.jna.Memory;
import com.sun.jna.platform.win32.WinBase;
import com.sun.jna.platform.win32.WinError;
import com.sun.jna.platform.win32.WinNT.HANDLE;
import com.sun.jna.ptr.IntByReference;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.Socket;
import java.nio.ByteBuffer;
public class NGWin32NamedPipeSocket extends Socket {
private static final NGWin32NamedPipeLibrary API = NGWin32NamedPipeLibrary.INSTANCE;
private final HANDLE handle;
private final CloseCallback closeCallback;
private final InputStream is;
private final OutputStream os;
private final HANDLE readerWaitable;
private final HANDLE writerWaitable;
interface CloseCallback {
void onNamedPipeSocketClose(HANDLE handle) throws IOException;
}
public NGWin32NamedPipeSocket(
HANDLE handle,
NGWin32NamedPipeSocket.CloseCallback closeCallback) throws IOException {
this.handle = handle;
this.closeCallback = closeCallback;
this.readerWaitable = API.CreateEvent(null, true, false, null);
if (readerWaitable == null) {
throw new IOException("CreateEvent() failed ");
}
writerWaitable = API.CreateEvent(null, true, false, null);
if (writerWaitable == null) {
throw new IOException("CreateEvent() failed ");
}
this.is = new NGWin32NamedPipeSocketInputStream(handle);
this.os = new NGWin32NamedPipeSocketOutputStream(handle);
}
@Override
public InputStream getInputStream() {
return is;
}
@Override
public OutputStream getOutputStream() {
return os;
}
@Override
public void close() throws IOException {
closeCallback.onNamedPipeSocketClose(handle);
}
@Override
public void shutdownInput() throws IOException {
}
@Override
public void shutdownOutput() throws IOException {
}
private class NGWin32NamedPipeSocketInputStream extends InputStream {
private final HANDLE handle;
NGWin32NamedPipeSocketInputStream(HANDLE handle) {
this.handle = handle;
}
@Override
public int read() throws IOException {
int result;
byte[] b = new byte[1];
if (read(b) == 0) {
result = -1;
} else {
result = 0xFF & b[0];
}
return result;
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
Memory readBuffer = new Memory(len);
WinBase.OVERLAPPED olap = new WinBase.OVERLAPPED();
olap.hEvent = readerWaitable;
olap.write();
boolean immediate = API.ReadFile(handle, readBuffer, len, null, olap.getPointer());
if (!immediate) {
int lastError = API.GetLastError();
if (lastError != WinError.ERROR_IO_PENDING) {
throw new IOException("ReadFile() failed: " + lastError);
}
}
IntByReference read = new IntByReference();
if (!API.GetOverlappedResult(handle, olap.getPointer(), read, true)) {
int lastError = API.GetLastError();
throw new IOException("GetOverlappedResult() failed for read operation: " + lastError);
}
int actualLen = read.getValue();
byte[] byteArray = readBuffer.getByteArray(0, actualLen);
System.arraycopy(byteArray, 0, b, off, actualLen);
return actualLen;
}
}
private class NGWin32NamedPipeSocketOutputStream extends OutputStream {
private final HANDLE handle;
NGWin32NamedPipeSocketOutputStream(HANDLE handle) {
this.handle = handle;
}
@Override
public void write(int b) throws IOException {
write(new byte[]{(byte) (0xFF & b)});
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
ByteBuffer data = ByteBuffer.wrap(b, off, len);
WinBase.OVERLAPPED olap = new WinBase.OVERLAPPED();
olap.hEvent = writerWaitable;
olap.write();
boolean immediate = API.WriteFile(handle, data, len, null, olap.getPointer());
if (!immediate) {
int lastError = API.GetLastError();
if (lastError != WinError.ERROR_IO_PENDING) {
throw new IOException("WriteFile() failed: " + lastError);
}
}
IntByReference written = new IntByReference();
if (!API.GetOverlappedResult(handle, olap.getPointer(), written, true)) {
int lastError = API.GetLastError();
throw new IOException("GetOverlappedResult() failed for write operation: " + lastError);
}
if (written.getValue() != len) {
throw new IOException("WriteFile() wrote less bytes than requested");
}
}
}
}

View File

@ -1,82 +0,0 @@
// Copied from https://github.com/facebook/nailgun/blob/af623fddedfdca010df46302a0711ce0e2cc1ba6/nailgun-server/src/main/java/com/martiansoftware/nailgun/ReferenceCountedFileDescriptor.java
/*
Copyright 2004-2015, Martian Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package sbt.internal;
import com.sun.jna.LastErrorException;
import java.io.IOException;
/**
* Encapsulates a file descriptor plus a reference count to ensure close requests
* only close the file descriptor once the last reference to the file descriptor
* is released.
*
* If not explicitly closed, the file descriptor will be closed when
* this object is finalized.
*/
public class ReferenceCountedFileDescriptor {
private int fd;
private int fdRefCount;
private boolean closePending;
public ReferenceCountedFileDescriptor(int fd) {
this.fd = fd;
this.fdRefCount = 0;
this.closePending = false;
}
protected void finalize() throws IOException {
close();
}
public synchronized int acquire() {
fdRefCount++;
return fd;
}
public synchronized void release() throws IOException {
fdRefCount--;
if (fdRefCount == 0 && closePending && fd != -1) {
doClose();
}
}
public synchronized void close() throws IOException {
if (fd == -1 || closePending) {
return;
}
if (fdRefCount == 0) {
doClose();
} else {
// Another thread has the FD. We'll close it when they release the reference.
closePending = true;
}
}
private void doClose() throws IOException {
try {
NGUnixDomainSocketLibrary.close(fd);
fd = -1;
} catch (LastErrorException e) {
throw new IOException(e);
}
}
}

View File

@ -21,8 +21,10 @@ object BasicCommandStrings {
val TerminateAction: String = Exit
def helpBrief =
(HelpCommand,
s"Displays this help message or prints detailed help on requested commands (run '$HelpCommand <command>').")
(
HelpCommand,
s"Displays this help message or prints detailed help on requested commands (run '$HelpCommand <command>')."
)
def helpDetailed = s"""$HelpCommand
Prints a help summary.
@ -73,7 +75,7 @@ $HelpCommand <regular expression>
This will be used as the default level for logging from commands, settings, and tasks.
Any explicit `logLevel` configuration in a project overrides this setting.
-$level
-$level OR --$level
Sets the global logging level as described above, but does so before any other commands are executed on startup, including project loading.
This is useful as a startup option:
@ -83,9 +85,12 @@ $HelpCommand <regular expression>
def runEarly(command: String) = s"$EarlyCommand($command)"
private[sbt] def isEarlyCommand(s: String): Boolean = {
val levelOptions = Level.values.toSeq map { "-" + _ }
val levelOptions = Level.values.toSeq flatMap { elem =>
List("-" + elem, "--" + elem)
}
(s.startsWith(EarlyCommand + "(") && s.endsWith(")")) ||
(levelOptions contains s)
(levelOptions contains s) ||
(s.startsWith("-" + AddPluginSbtFileCommand) || s.startsWith("--" + AddPluginSbtFileCommand))
}
val EarlyCommand = "early"
@ -98,6 +103,14 @@ $HelpCommand <regular expression>
The order is preserved between all early commands, so `sbt "early(a)" "early(b)"` executes `a` and `b` in order.
"""
def addPluginSbtFileHelp = {
val brief =
(s"--$AddPluginSbtFileCommand=<file>", "Adds the given *.sbt file to the plugin build.")
Help(brief)
}
val AddPluginSbtFileCommand = "addPluginSbtFile"
def ReadCommand = "<"
def ReadFiles = " file1 file2 ..."
def ReadDetailed =
@ -137,8 +150,10 @@ $HelpCommand <regular expression>
def Multi = ";"
def MultiBrief =
(Multi + " <command> (" + Multi + " <command>)*",
"Runs the provided semicolon-separated commands.")
(
Multi + " <command> (" + Multi + " <command>)*",
"Runs the provided semicolon-separated commands."
)
def MultiDetailed =
Multi + " command1 " + Multi + """ command2 ...
@ -185,24 +200,12 @@ $AliasCommand name=
def Client = "client"
def ClientDetailed = "Provides an interactive prompt from which commands can be run on a server."
def DashClient = "-client"
def DashDashClient = "--client"
def StashOnFailure = "sbtStashOnFailure"
def PopOnFailure = "sbtPopOnFailure"
// commands with poor choices for names since they clash with the usual conventions for command line options
// these are not documented and are mainly internal commands and can be removed without a full deprecation cycle
object Compat {
def OnFailure = "-"
def ClearOnFailure = "--"
def FailureWall = "---"
def OnFailureDeprecated = deprecatedAlias(OnFailure, BasicCommandStrings.OnFailure)
def ClearOnFailureDeprecated =
deprecatedAlias(ClearOnFailure, BasicCommandStrings.ClearOnFailure)
def FailureWallDeprecated = deprecatedAlias(FailureWall, BasicCommandStrings.FailureWall)
private[this] def deprecatedAlias(oldName: String, newName: String): String =
s"The `$oldName` command is deprecated in favor of `$newName` and will be removed in a later version"
}
def FailureWall = "resumeFromFailure"
def ClearOnFailure = "sbtClearOnFailure"

View File

@ -56,7 +56,7 @@ object BasicCommands {
client,
read,
alias
) ++ compatCommands
)
def nop: Command = Command.custom(s => success(() => s))
def ignore: Command = Command.command(FailureWall)(idFun)
@ -66,21 +66,50 @@ object BasicCommands {
private[this] def levelParser: Parser[String] =
Iterator(Level.Debug, Level.Info, Level.Warn, Level.Error) map (l => token(l.toString)) reduce (_ | _)
private[this] def addPluginSbtFileParser: Parser[File] = {
token(AddPluginSbtFileCommand) ~> (":" | "=" | Space) ~> (StringBasic).examples(
"/some/extra.sbt"
) map {
new File(_)
}
}
private[this] def addPluginSbtFileStringParser: Parser[String] = {
token(
token(AddPluginSbtFileCommand) ~ (":" | "=" | Space) ~ (StringBasic)
.examples("/some/extra.sbt") map {
case s1 ~ s2 ~ s3 => s1 + s2 + s3
}
)
}
private[this] def earlyParser: State => Parser[String] = (s: State) => {
val p1 = token(EarlyCommand + "(") flatMap (_ => otherCommandParser(s) <~ token(")"))
val p2 = token("-") flatMap (_ => levelParser)
p1 | p2
val p2 = (token("-") | token("--")) flatMap (_ => levelParser)
val p3 = (token("-") | token("--")) flatMap (_ => addPluginSbtFileStringParser)
p1 | p2 | p3
}
private[this] def earlyHelp = Help(EarlyCommand, EarlyCommandBrief, EarlyCommandDetailed)
/**
* Adds additional *.sbt to the plugin build.
* This must be combined with early command as: --addPluginSbtFile=/tmp/extra.sbt
*/
def addPluginSbtFile: Command = Command.arb(_ => addPluginSbtFileParser, addPluginSbtFileHelp) {
(s, extraSbtFile) =>
val extraFiles = s.get(BasicKeys.extraMetaSbtFiles).toList.flatten
s.put(BasicKeys.extraMetaSbtFiles, extraFiles :+ extraSbtFile)
}
def help: Command = Command.make(HelpCommand, helpBrief, helpDetailed)(helpParser)
def helpParser(s: State): Parser[() => State] = {
val h = (Help.empty /: s.definedCommands)(
(a, b) =>
a ++ (try b.help(s)
catch { case NonFatal(_) => Help.empty }))
catch { case NonFatal(_) => Help.empty })
)
val helpCommands = h.detail.keySet
val spacedArg = singleArgument(helpCommands).?
applyEffect(spacedArg)(runHelp(s, h))
@ -104,10 +133,14 @@ object BasicCommands {
}
def completionsCommand: Command =
Command(CompletionsCommand, CompletionsBrief, CompletionsDetailed)(completionsParser)(
runCompletions(_)(_))
Command(CompletionsCommand, CompletionsBrief, CompletionsDetailed)(_ => completionsParser)(
runCompletions(_)(_)
)
def completionsParser(state: State): Parser[String] = {
@deprecated("No longer public", "1.1.1")
def completionsParser(state: State): Parser[String] = completionsParser
private[this] def completionsParser: Parser[String] = {
val notQuoted = (NotQuoted ~ any.*) map { case (nq, s) => nq ++ s }
val quotedOrUnquotedSingleArgument = Space ~> (StringVerbatim | StringEscapable | notQuoted)
token(quotedOrUnquotedSingleArgument ?? "" examples ("", " "))
@ -123,10 +156,11 @@ object BasicCommands {
}
def multiParser(s: State): Parser[List[String]] = {
val nonSemi = token(charClass(_ != ';').+, hide = const(true))
val nonSemi = token(charClass(_ != ';', "not ';'").+, hide = const(true))
val semi = token(';' ~> OptSpace)
val part = semi flatMap (_ =>
matched((s.combinedParser & nonSemi) | nonSemi) <~ token(OptSpace))
val part = semi flatMap (
_ => matched((s.combinedParser & nonSemi) | nonSemi) <~ token(OptSpace)
)
(part map (_.trim)).+ map (_.toList)
}
@ -142,40 +176,26 @@ object BasicCommands {
matched(s.combinedParser | token(any, hide = const(true)))
def ifLast: Command =
Command(IfLast, Help.more(IfLast, IfLastDetailed))(otherCommandParser)((s, arg) =>
if (s.remainingCommands.isEmpty) arg :: s else s)
Command(IfLast, Help.more(IfLast, IfLastDetailed))(otherCommandParser)(
(s, arg) => if (s.remainingCommands.isEmpty) arg :: s else s
)
def append: Command =
Command(AppendCommand, Help.more(AppendCommand, AppendLastDetailed))(otherCommandParser)(
(s, arg) => s.copy(remainingCommands = s.remainingCommands :+ Exec(arg, s.source)))
(s, arg) => s.copy(remainingCommands = s.remainingCommands :+ Exec(arg, s.source))
)
def setOnFailure: Command =
Command(OnFailure, Help.more(OnFailure, OnFailureDetailed))(otherCommandParser)((s, arg) =>
s.copy(onFailure = Some(Exec(arg, s.source))))
private[sbt] def compatCommands = Seq(
Command.command(Compat.ClearOnFailure) { s =>
s.log.warn(Compat.ClearOnFailureDeprecated)
s.copy(onFailure = None)
},
Command.arb(
s =>
token(Compat.OnFailure, hide = const(true))
.flatMap(_ => otherCommandParser(s))) { (s, arg) =>
s.log.warn(Compat.OnFailureDeprecated)
s.copy(onFailure = Some(Exec(arg, s.source)))
},
Command.command(Compat.FailureWall) { s =>
s.log.warn(Compat.FailureWallDeprecated)
s
}
)
Command(OnFailure, Help.more(OnFailure, OnFailureDetailed))(otherCommandParser)(
(s, arg) => s.copy(onFailure = Some(Exec(arg, s.source)))
)
def clearOnFailure: Command = Command.command(ClearOnFailure)(s => s.copy(onFailure = None))
def stashOnFailure: Command =
Command.command(StashOnFailure)(s =>
s.copy(onFailure = None).update(OnFailureStack)(s.onFailure :: _.toList.flatten))
Command.command(StashOnFailure)(
s => s.copy(onFailure = None).update(OnFailureStack)(s.onFailure :: _.toList.flatten)
)
def popOnFailure: Command = Command.command(PopOnFailure) { s =>
val stack = s.get(OnFailureStack).getOrElse(Nil)
@ -185,19 +205,19 @@ object BasicCommands {
}
def reboot: Command =
Command(RebootCommand, Help.more(RebootCommand, RebootDetailed))(rebootOptionParser) {
Command(RebootCommand, Help.more(RebootCommand, RebootDetailed))(_ => rebootOptionParser) {
case (s, (full, currentOnly)) =>
s.reboot(full, currentOnly)
}
@deprecated("Use rebootOptionParser", "1.1.0")
def rebootParser(s: State): Parser[Boolean] =
rebootOptionParser(s) map { case (full, currentOnly) => full }
def rebootParser(s: State): Parser[Boolean] = rebootOptionParser map { case (full, _) => full }
private[sbt] def rebootOptionParser(s: State): Parser[(Boolean, Boolean)] =
token(
Space ~> (("full" ^^^ ((true, false))) |
("dev" ^^^ ((false, true))))) ?? ((false, false))
private[sbt] def rebootOptionParser: Parser[(Boolean, Boolean)] = {
val fullOption = "full" ^^^ ((true, false))
val devOption = "dev" ^^^ ((false, true))
token(Space ~> (fullOption | devOption)) ?? ((false, false))
}
def call: Command =
Command(ApplyCommand, Help.more(ApplyCommand, ApplyDetailed))(_ => callParser) {
@ -220,8 +240,9 @@ object BasicCommands {
private[this] def className: Parser[String] = {
val base = StringBasic & not('-' ~> any.*, "Class name cannot start with '-'.")
def single(s: String) = Completions.single(Completion.displayOnly(s))
val compl = TokenCompletions.fixed((seen, _) =>
if (seen.startsWith("-")) Completions.nil else single("<class name>"))
val compl = TokenCompletions.fixed(
(seen, _) => if (seen.startsWith("-")) Completions.nil else single("<class name>")
)
token(base, compl)
}
@ -246,10 +267,9 @@ object BasicCommands {
def historyParser(s: State): Parser[() => State] =
Command.applyEffect(HistoryCommands.actionParser) { histFun =>
val logError = (msg: String) => s.log.error(msg)
val hp = s get historyPath getOrElse None
val hp = (s get historyPath).flatten
val lines = hp.toList.flatMap(p => IO.readLines(p)).toIndexedSeq
histFun(CHistory(lines, hp, logError)) match {
histFun(CHistory(lines, hp)) match {
case Some(commands) =>
commands foreach println //printing is more appropriate than logging
(commands ::: s).continue
@ -287,7 +307,7 @@ object BasicCommands {
case e :: Nil if e.commandLine == "shell" => Nil
case xs => xs map (_.commandLine)
})
NetworkClient.run(arguments)
NetworkClient.run(s0.configuration, arguments)
"exit" :: s0.copy(remainingCommands = Nil)
}
@ -410,7 +430,8 @@ object BasicCommands {
}
def delegateToAlias(name: String, orElse: Parser[() => State])(
state: State): Parser[() => State] =
state: State
): Parser[() => State] =
aliases(state, (nme, _) => nme == name).headOption match {
case None => orElse
case Some((n, v)) => aliasBody(n, v)(state)

View File

@ -10,17 +10,28 @@ package sbt
import java.io.File
import sbt.internal.util.AttributeKey
import sbt.internal.inc.classpath.ClassLoaderCache
import sbt.internal.server.ServerHandler
import sbt.librarymanagement.ModuleID
import sbt.util.Level
object BasicKeys {
val historyPath = AttributeKey[Option[File]](
"history",
"The location where command line history is persisted.",
40)
40
)
val extraMetaSbtFiles = AttributeKey[Seq[File]](
"extraMetaSbtFile",
"Additional plugin.sbt files.",
10000
)
val shellPrompt = AttributeKey[State => String](
"shell-prompt",
"The function that constructs the command prompt from the current build state.",
10000)
10000
)
val watch = AttributeKey[Watched]("watch", "Continuous execution configuration.", 1000)
val serverPort =
AttributeKey[Int]("server-port", "The port number used by server command.", 10000)
@ -29,35 +40,66 @@ object BasicKeys {
AttributeKey[String]("serverHost", "The host used by server command.", 10000)
val serverAuthentication =
AttributeKey[Set[ServerAuthentication]]("serverAuthentication",
"Method of authenticating server command.",
10000)
AttributeKey[Set[ServerAuthentication]](
"serverAuthentication",
"Method of authenticating server command.",
10000
)
val serverConnectionType =
AttributeKey[ConnectionType]("serverConnectionType",
"The wire protocol for the server command.",
10000)
AttributeKey[ConnectionType](
"serverConnectionType",
"The wire protocol for the server command.",
10000
)
val fullServerHandlers =
AttributeKey[Seq[ServerHandler]](
"fullServerHandlers",
"Combines default server handlers and user-defined handlers.",
10000
)
val autoStartServer =
AttributeKey[Boolean](
"autoStartServer",
"If true, the sbt server will startup automatically during interactive sessions.",
10000
)
// Unlike other BasicKeys, this is not used directly as a setting key,
// and severLog / logLevel is used instead.
private[sbt] val serverLogLevel =
AttributeKey[Level.Value]("serverLogLevel", "The log level for the server.", 10000)
private[sbt] val logLevel =
AttributeKey[Level.Value]("logLevel", "The amount of logging sent to the screen.", 10)
private[sbt] val interactive = AttributeKey[Boolean](
"interactive",
"True if commands are currently being entered from an interactive environment.",
10)
10
)
private[sbt] val classLoaderCache = AttributeKey[ClassLoaderCache](
"class-loader-cache",
"Caches class loaders based on the classpath entries and last modified times.",
10)
10
)
private[sbt] val OnFailureStack = AttributeKey[List[Option[Exec]]](
"on-failure-stack",
"Stack that remembers on-failure handlers.",
10)
10
)
private[sbt] val explicitGlobalLogLevels = AttributeKey[Boolean](
"explicit-global-log-levels",
"True if the global logging levels were explicitly set by the user.",
10)
10
)
private[sbt] val templateResolverInfos = AttributeKey[Seq[TemplateResolverInfo]](
"templateResolverInfos",
"List of template resolver infos.",
1000)
1000
)
}
case class TemplateResolverInfo(module: ModuleID, implementationClass: String)

View File

@ -67,18 +67,21 @@ object Command {
new SimpleCommand(name, help, parser, AttributeMap.empty)
def make(name: String, briefHelp: (String, String), detail: String)(
parser: State => Parser[() => State]): Command =
parser: State => Parser[() => State]
): Command =
make(name, Help(name, briefHelp, detail))(parser)
// General command construction
/** Construct a command with the given name, parser and effect. */
def apply[T](name: String, help: Help = Help.empty)(parser: State => Parser[T])(
effect: (State, T) => State): Command =
def apply[T](name: String, help: Help = Help.empty)(
parser: State => Parser[T]
)(effect: (State, T) => State): Command =
make(name, help)(applyEffect(parser)(effect))
def apply[T](name: String, briefHelp: (String, String), detail: String)(
parser: State => Parser[T])(effect: (State, T) => State): Command =
parser: State => Parser[T]
)(effect: (State, T) => State): Command =
apply(name, Help(name, briefHelp, detail))(parser)(effect)
// No-argument command construction
@ -97,18 +100,21 @@ object Command {
make(name, help)(state => token(trimmed(spacedAny(name)) map apply1(f, state)))
def single(name: String, briefHelp: (String, String), detail: String)(
f: (State, String) => State): Command =
f: (State, String) => State
): Command =
single(name, Help(name, briefHelp, detail))(f)
// Multi-argument command construction
/** Construct a multi-argument command with the given name, tab completion display and effect. */
def args(name: String, display: String, help: Help = Help.empty)(
f: (State, Seq[String]) => State): Command =
f: (State, Seq[String]) => State
): Command =
make(name, help)(state => spaceDelimited(display) map apply1(f, state))
def args(name: String, briefHelp: (String, String), detail: String, display: String)(
f: (State, Seq[String]) => State): Command =
f: (State, Seq[String]) => State
): Command =
args(name, display, Help(name, briefHelp, detail))(f)
// create ArbitraryCommand
@ -120,7 +126,8 @@ object Command {
customHelp(parser, const(help))
def arb[T](parser: State => Parser[T], help: Help = Help.empty)(
effect: (State, T) => State): Command =
effect: (State, T) => State
): Command =
custom(applyEffect(parser)(effect), help)
// misc Command object utilities
@ -129,8 +136,9 @@ object Command {
def applyEffect[T](p: Parser[T])(f: T => State): Parser[() => State] = p map (t => () => f(t))
def applyEffect[T](parser: State => Parser[T])(
effect: (State, T) => State): State => Parser[() => State] =
def applyEffect[T](
parser: State => Parser[T]
)(effect: (State, T) => State): State => Parser[() => State] =
s => applyEffect(parser(s))(t => effect(s, t))
def combine(cmds: Seq[Command]): State => Parser[() => State] = {
@ -140,7 +148,8 @@ object Command {
}
private[this] def separateCommands(
cmds: Seq[Command]): (Seq[SimpleCommand], Seq[ArbitraryCommand]) =
cmds: Seq[Command]
): (Seq[SimpleCommand], Seq[ArbitraryCommand]) =
Util.separate(cmds) { case s: SimpleCommand => Left(s); case a: ArbitraryCommand => Right(a) }
private[this] def apply1[A, B, C](f: (A, B) => C, a: A): B => () => C = b => () => f(a, b)
@ -155,13 +164,26 @@ object Command {
}
def simpleParser(
commandMap: Map[String, State => Parser[() => State]]): State => Parser[() => State] =
commandMap: Map[String, State => Parser[() => State]]
): State => Parser[() => State] =
state =>
token(OpOrID examples commandMap.keys.toSet) flatMap (id =>
(commandMap get id) match {
case None => failure(invalidValue("command", commandMap.keys)(id))
case Some(c) => c(state)
})
token(OpOrID examples commandMap.keys.toSet) flatMap (
id =>
(commandMap get id) match {
case None => failure(invalidValue("command", commandMap.keys)(id))
case Some(c) => c(state)
}
)
def process(command: String, state: State): State = {
val parser = combine(state.definedCommands)
parse(command, parser(state)) match {
case Right(s) => s() // apply command. command side effects happen here
case Left(errMsg) =>
state.log error errMsg
state.fail
}
}
def invalidValue(label: String, allowed: Iterable[String])(value: String): String =
s"Not a valid $label: $value" + similar(value, allowed)
@ -171,22 +193,25 @@ object Command {
if (suggested.isEmpty) "" else suggested.mkString(" (similar: ", ", ", ")")
}
def suggestions(a: String,
bs: Seq[String],
maxDistance: Int = 3,
maxSuggestions: Int = 3): Seq[String] =
def suggestions(
a: String,
bs: Seq[String],
maxDistance: Int = 3,
maxSuggestions: Int = 3
): Seq[String] =
bs map (b => (b, distance(a, b))) filter (_._2 <= maxDistance) sortBy (_._2) take (maxSuggestions) map (_._1)
def distance(a: String, b: String): Int =
EditDistance.levenshtein(a,
b,
insertCost = 1,
deleteCost = 1,
subCost = 2,
transposeCost = 1,
matchCost = -1,
caseCost = 1,
transpositions = true)
EditDistance.levenshtein(
a,
b,
insertCost = 1,
deleteCost = 1,
subCost = 2,
matchCost = -1,
caseCost = 1,
transpositions = true
)
def spacedAny(name: String): Parser[String] = spacedC(name, any)
@ -222,9 +247,11 @@ object Help {
def apply(briefHelp: Seq[(String, String)], detailedHelp: Map[String, String]): Help =
apply(briefHelp, detailedHelp, Set.empty[String])
def apply(briefHelp: Seq[(String, String)],
detailedHelp: Map[String, String],
more: Set[String]): Help =
def apply(
briefHelp: Seq[(String, String)],
detailedHelp: Map[String, String],
more: Set[String]
): Help =
new Help0(briefHelp, detailedHelp, more)
def more(name: String, detailedHelp: String): Help =

View File

@ -15,6 +15,7 @@ import sbt.internal.util.complete.Parser
import sbt.internal.util.complete.DefaultParsers._
import sbt.io.IO
import sbt.io.syntax._
object CommandUtil {
def readLines(files: Seq[File]): Seq[String] =
@ -89,4 +90,7 @@ object CommandUtil {
details.map { case (k, v) => k + "\n\n " + v } mkString ("\n", "\n\n", "\n")
final val HelpPatternFlags = Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE
private[sbt] def isSbtBuild(baseDir: File) =
(baseDir / "project").exists() || (baseDir * "*.sbt").get.nonEmpty
}

View File

@ -12,21 +12,23 @@ import java.io.File
final case class Exit(code: Int) extends xsbti.Exit {
require(code >= 0)
}
final case class Reboot(scalaVersion: String,
argsList: Seq[String],
app: xsbti.ApplicationID,
baseDirectory: File)
extends xsbti.Reboot {
final case class Reboot(
scalaVersion: String,
argsList: Seq[String],
app: xsbti.ApplicationID,
baseDirectory: File
) extends xsbti.Reboot {
def arguments = argsList.toArray
}
final case class ApplicationID(groupID: String,
name: String,
version: String,
mainClass: String,
components: Seq[String],
crossVersionedValue: xsbti.CrossValue,
extra: Seq[File])
extends xsbti.ApplicationID {
final case class ApplicationID(
groupID: String,
name: String,
version: String,
mainClass: String,
components: Seq[String],
crossVersionedValue: xsbti.CrossValue,
extra: Seq[File]
) extends xsbti.ApplicationID {
def mainComponents = components.toArray
def classpathExtra = extra.toArray
def crossVersioned = crossVersionedValue != xsbti.CrossValue.Disabled
@ -35,11 +37,13 @@ object ApplicationID {
def apply(delegate: xsbti.ApplicationID, newVersion: String): ApplicationID =
apply(delegate).copy(version = newVersion)
def apply(delegate: xsbti.ApplicationID): ApplicationID =
ApplicationID(delegate.groupID,
delegate.name,
delegate.version,
delegate.mainClass,
delegate.mainComponents,
delegate.crossVersionedValue,
delegate.classpathExtra)
ApplicationID(
delegate.groupID,
delegate.name,
delegate.version,
delegate.mainClass,
delegate.mainComponents,
delegate.crossVersionedValue,
delegate.classpathExtra
)
}

View File

@ -238,14 +238,16 @@ object State {
def process(f: (Exec, State) => State): State = {
def runCmd(cmd: Exec, remainingCommands: List[Exec]) = {
log.debug(s"> $cmd")
f(cmd,
s.copy(remainingCommands = remainingCommands,
currentCommand = Some(cmd),
history = cmd :: s.history))
val s1 = s.copy(
remainingCommands = remainingCommands,
currentCommand = Some(cmd),
history = cmd :: s.history,
)
f(cmd, s1)
}
s.remainingCommands match {
case List() => exit(true)
case List(x, xs @ _*) => runCmd(x, xs.toList)
case Nil => exit(true)
case x :: xs => runCmd(x, xs)
}
}
def :::(newCommands: List[String]): State = ++:(newCommands map { Exec(_, s.source) })
@ -283,10 +285,7 @@ object State {
def log = s.globalLogging.full
def handleError(t: Throwable): State = handleException(t, s, log)
def fail = {
import BasicCommandStrings.Compat.{ FailureWall => CompatFailureWall }
val remaining =
s.remainingCommands.dropWhile(c =>
c.commandLine != FailureWall && c.commandLine != CompatFailureWall)
val remaining = s.remainingCommands.dropWhile(c => c.commandLine != FailureWall)
if (remaining.isEmpty)
applyOnFailure(s, Nil, exit(ok = false))
else
@ -321,7 +320,7 @@ object State {
import ExceptionCategory._
private[sbt] def handleException(t: Throwable, s: State, log: Logger): State = {
private[this] def handleException(t: Throwable, s: State, log: Logger): State = {
ExceptionCategory(t) match {
case AlreadyHandled => ()
case m: MessageOnly => log.error(m.message)

View File

@ -12,7 +12,7 @@ import java.nio.file.FileSystems
import sbt.BasicCommandStrings.ClearOnFailure
import sbt.State.FailureWall
import sbt.internal.io.{ Source, SourceModificationWatch, WatchState }
import sbt.internal.io.{ EventMonitor, Source, WatchState }
import sbt.internal.util.AttributeKey
import sbt.internal.util.Types.const
import sbt.io._
@ -23,8 +23,8 @@ import scala.util.Properties
trait Watched {
/** The files watched when an action is run with a preceeding ~ */
def watchSources(s: State): Seq[Watched.WatchSource] = Nil
/** The files watched when an action is run with a proceeding ~ */
def watchSources(@deprecated("unused", "") s: State): Seq[Watched.WatchSource] = Nil
def terminateWatch(key: Int): Boolean = Watched.isEnter(key)
/**
@ -33,6 +33,12 @@ trait Watched {
*/
def pollInterval: FiniteDuration = Watched.PollDelay
/**
* The duration for which the EventMonitor while ignore file events after a file triggers
* a new build.
*/
def antiEntropy: FiniteDuration = Watched.AntiEntropy
/** The message to show when triggered execution waits for sources to change.*/
private[sbt] def watchingMessage(s: WatchState): String = Watched.defaultWatchingMessage(s)
@ -44,8 +50,13 @@ trait Watched {
}
object Watched {
val defaultWatchingMessage
: WatchState => String = _.count + ". Waiting for source changes... (press enter to interrupt)"
val defaultWatchingMessage: WatchState => String = ws =>
s"${ws.count}. Waiting for source changes... (press enter to interrupt)"
def projectWatchingMessage(projectId: String): WatchState => String =
ws =>
s"${ws.count}. Waiting for source changes in project $projectId... (press enter to interrupt)"
val defaultTriggeredMessage: WatchState => String = const("")
val clearWhenTriggered: WatchState => String = const(clearScreen)
def clearScreen: String = "\u001b[2J\u001b[0;0H"
@ -70,8 +81,8 @@ object Watched {
* @param base The base directory from which to include files.
* @return An instance of `Source`.
*/
def apply(base: File): Source =
apply(base, AllPassFilter, NothingFilter)
def apply(base: File): Source = apply(base, AllPassFilter, NothingFilter)
}
private[this] class AWatched extends Watched
@ -81,61 +92,87 @@ object Watched {
override def watchSources(s: State) = (base.watchSources(s) /: paths)(_ ++ _.watchSources(s))
override def terminateWatch(key: Int): Boolean = base.terminateWatch(key)
override val pollInterval = (base +: paths).map(_.pollInterval).min
override val antiEntropy = (base +: paths).map(_.antiEntropy).min
override def watchingMessage(s: WatchState) = base.watchingMessage(s)
override def triggeredMessage(s: WatchState) = base.triggeredMessage(s)
}
def empty: Watched = new AWatched
val PollDelay: FiniteDuration = 500.milliseconds
val AntiEntropy: FiniteDuration = 40.milliseconds
def isEnter(key: Int): Boolean = key == 10 || key == 13
def printIfDefined(msg: String) = if (!msg.isEmpty) System.out.println(msg)
def executeContinuously(watched: Watched, s: State, next: String, repeat: String): State = {
@tailrec def shouldTerminate: Boolean =
(System.in.available > 0) && (watched.terminateWatch(System.in.read()) || shouldTerminate)
val sources = watched.watchSources(s)
val service = watched.watchService()
val watchState = s get ContinuousState getOrElse WatchState.empty(service, sources)
if (watchState.count > 0)
printIfDefined(watched watchingMessage watchState)
val (triggered, newWatchState) =
try {
val (triggered, newWatchState) =
SourceModificationWatch.watch(watched.pollInterval, watchState)(shouldTerminate)
(triggered, newWatchState)
} catch {
case e: Exception =>
val log = s.log
log.error("Error occurred obtaining files to watch. Terminating continuous execution...")
State.handleException(e, s, log)
(false, watchState)
}
if (triggered) {
printIfDefined(watched triggeredMessage newWatchState)
(ClearOnFailure :: next :: FailureWall :: repeat :: s).put(ContinuousState, newWatchState)
} else {
while (System.in.available() > 0) System.in.read()
service.close()
s.remove(ContinuousState)
val log = s.log
val logger = new EventMonitor.Logger {
override def debug(msg: => Any): Unit = log.debug(msg.toString)
}
s get ContinuousEventMonitor match {
case None =>
// This is the first iteration, so run the task and create a new EventMonitor
(ClearOnFailure :: next :: FailureWall :: repeat :: s)
.put(
ContinuousEventMonitor,
EventMonitor(
WatchState.empty(watched.watchService(), watched.watchSources(s)),
watched.pollInterval,
watched.antiEntropy,
shouldTerminate,
logger
)
)
case Some(eventMonitor) =>
printIfDefined(watched watchingMessage eventMonitor.state)
val triggered = try eventMonitor.awaitEvent()
catch {
case e: Exception =>
log.error(
"Error occurred obtaining files to watch. Terminating continuous execution..."
)
s.handleError(e)
false
}
if (triggered) {
printIfDefined(watched triggeredMessage eventMonitor.state)
ClearOnFailure :: next :: FailureWall :: repeat :: s
} else {
while (System.in.available() > 0) System.in.read()
eventMonitor.close()
s.remove(ContinuousEventMonitor)
}
}
}
val ContinuousEventMonitor =
AttributeKey[EventMonitor](
"watch event monitor",
"Internal: maintains watch state and monitor threads."
)
@deprecated("Superseded by ContinuousEventMonitor", "1.1.5")
val ContinuousState =
AttributeKey[WatchState]("watch state", "Internal: tracks state for continuous execution.")
@deprecated("Superseded by ContinuousEventMonitor", "1.1.5")
val ContinuousWatchService =
AttributeKey[WatchService](
"watch service",
"Internal: tracks watch service for continuous execution."
)
val Configuration =
AttributeKey[Watched]("watched-configuration", "Configures continuous execution.")
def createWatchService(): WatchService = {
def closeWatch = new MacOSXWatchService()
sys.props.get("sbt.watch.mode") match {
case Some("polling") =>
new PollingWatchService(PollDelay)
case Some("nio") =>
FileSystems.getDefault.newWatchService()
case _ if Properties.isMac =>
// WatchService is slow on macOS - use old polling mode
new PollingWatchService(PollDelay)
case Some("closewatch") => closeWatch
case _ if Properties.isMac => closeWatch
case _ =>
FileSystems.getDefault.newWatchService()
}

View File

@ -19,12 +19,11 @@ import sjsonnew.JsonFormat
*/
abstract class CommandChannel {
private val commandQueue: ConcurrentLinkedQueue[Exec] = new ConcurrentLinkedQueue()
def append(exec: Exec): Boolean =
commandQueue.add(exec)
def append(exec: Exec): Boolean = commandQueue.add(exec)
def poll: Option[Exec] = Option(commandQueue.poll)
def publishEvent[A: JsonFormat](event: A, execId: Option[String]): Unit
def publishEvent[A: JsonFormat](event: A): Unit
final def publishEvent[A: JsonFormat](event: A): Unit = publishEvent(event, None)
def publishEventMessage(event: EventMessage): Unit
def publishBytes(bytes: Array[Byte]): Unit
def shutdown(): Unit

View File

@ -40,8 +40,6 @@ private[sbt] final class ConsoleChannel(val name: String) extends CommandChannel
def publishEvent[A: JsonFormat](event: A, execId: Option[String]): Unit = ()
def publishEvent[A: JsonFormat](event: A): Unit = ()
def publishEventMessage(event: EventMessage): Unit =
event match {
case e: ConsolePromptEvent =>
@ -50,7 +48,7 @@ private[sbt] final class ConsoleChannel(val name: String) extends CommandChannel
case _ =>
val x = makeAskUserThread(e.state)
askUserThread = Some(x)
x.start
x.start()
}
case e: ConsoleUnpromptEvent =>
e.lastSource match {
@ -70,7 +68,7 @@ private[sbt] final class ConsoleChannel(val name: String) extends CommandChannel
def shutdown(): Unit =
askUserThread match {
case Some(x) if x.isAlive =>
x.interrupt
x.interrupt()
askUserThread = None
case _ => ()
}

View File

@ -9,17 +9,23 @@ package sbt
package internal
package client
import java.io.IOException
import java.net.{ URI, Socket, InetAddress }
import java.io.{ File, IOException }
import java.util.UUID
import java.util.concurrent.atomic.{ AtomicBoolean, AtomicReference }
import scala.collection.mutable.ListBuffer
import scala.util.control.NonFatal
import scala.util.{ Success, Failure }
import scala.sys.process.{ BasicIO, Process, ProcessLogger }
import sbt.protocol._
import sbt.internal.util.{ JLine, StringEvent, ConsoleAppender }
import sbt.internal.protocol._
import sbt.internal.langserver.{ LogMessageParams, MessageType, PublishDiagnosticsParams }
import sbt.internal.util.{ JLine, ConsoleAppender }
import sbt.util.Level
import sbt.io.syntax._
import sbt.io.IO
import sjsonnew.support.scalajson.unsafe.Converter
class NetworkClient(arguments: List[String]) { self =>
class NetworkClient(configuration: xsbti.AppConfiguration, arguments: List[String]) { self =>
private val channelName = new AtomicReference("_")
private val status = new AtomicReference("Ready")
private val lock: AnyRef = new AnyRef {}
@ -27,78 +33,198 @@ class NetworkClient(arguments: List[String]) { self =>
private val pendingExecIds = ListBuffer.empty[String]
private val console = ConsoleAppender("thin1")
private def baseDirectory: File = configuration.baseDirectory
lazy val connection = init()
def usageError = sys.error("Expecting: sbt client 127.0.0.1:port")
val connection = init()
start()
// Open server connection based on the portfile
def init(): ServerConnection = {
val u = arguments match {
case List(x) =>
if (x contains "://") new URI(x)
else new URI("tcp://" + x)
case _ => usageError
val portfile = baseDirectory / "project" / "target" / "active.json"
if (!portfile.exists) {
forkServer(portfile)
}
val host = Option(u.getHost) match {
case None => usageError
case Some(x) => x
}
val port = Option(u.getPort) match {
case None => usageError
case Some(x) if x == -1 => usageError
case Some(x) => x
}
println(s"client on port $port")
val socket = new Socket(InetAddress.getByName(host), port)
new ServerConnection(socket) {
override def onEvent(event: EventMessage): Unit = self.onEvent(event)
override def onLogEntry(event: StringEvent): Unit = self.onLogEntry(event)
val (sk, tkn) = ClientSocket.socket(portfile)
val conn = new ServerConnection(sk) {
override def onNotification(msg: JsonRpcNotificationMessage): Unit = self.onNotification(msg)
override def onRequest(msg: JsonRpcRequestMessage): Unit = self.onRequest(msg)
override def onResponse(msg: JsonRpcResponseMessage): Unit = self.onResponse(msg)
override def onShutdown(): Unit = {
running.set(false)
}
}
// initiate handshake
val execId = UUID.randomUUID.toString
val initCommand = InitCommand(tkn, Option(execId))
conn.sendString(Serialization.serializeCommandAsJsonMessage(initCommand))
conn
}
def onLogEntry(event: StringEvent): Unit = {
val level = event.level match {
case "debug" => Level.Debug
case "info" => Level.Info
case "warn" => Level.Warn
case "error" => Level.Error
/**
* Forks another instance of sbt in the background.
* This instance must be shutdown explicitly via `sbt -client shutdown`
*/
def forkServer(portfile: File): Unit = {
console.appendLog(Level.Info, "server was not detected. starting an instance")
val args = List[String]()
val launchOpts = List("-Xms2048M", "-Xmx2048M", "-Xss2M")
val launcherJarString = sys.props.get("java.class.path") match {
case Some(cp) =>
cp.split(File.pathSeparator)
.toList
.headOption
.getOrElse(sys.error("launcher JAR classpath not found"))
case _ => sys.error("property java.class.path expected")
}
console.appendLog(level, event.message)
}
def onEvent(event: EventMessage): Unit =
event match {
case e: ChannelAcceptedEvent =>
channelName.set(e.channelName)
println(event)
case e: ExecStatusEvent =>
status.set(e.status)
// println(event)
e.execId foreach { execId =>
if (e.status == "Done" && (pendingExecIds contains execId)) {
lock.synchronized {
pendingExecIds -= execId
}
val cmd = "java" :: launchOpts ::: "-jar" :: launcherJarString :: args
// val cmd = "sbt"
val io = BasicIO(false, ProcessLogger(_ => ()))
val _ = Process(cmd, baseDirectory).run(io)
def waitForPortfile(n: Int): Unit =
if (portfile.exists) {
console.appendLog(Level.Info, "server found")
} else {
if (n <= 0) sys.error(s"timeout. $portfile is not found.")
else {
Thread.sleep(1000)
if ((n - 1) % 10 == 0) {
console.appendLog(Level.Info, "waiting for the server...")
}
waitForPortfile(n - 1)
}
case e => println(e.toString)
}
waitForPortfile(90)
}
/** Called on the response for a returning message. */
def onReturningReponse(msg: JsonRpcResponseMessage): Unit = {
def printResponse(): Unit = {
msg.result match {
case Some(result) =>
// ignore result JSON
console.success("completed")
case _ =>
msg.error match {
case Some(err) =>
// ignore err details
console.appendLog(Level.Error, "completed")
case _ => // ignore
}
}
}
printResponse()
}
def onResponse(msg: JsonRpcResponseMessage): Unit = {
msg.id foreach {
case execId if pendingExecIds contains execId =>
onReturningReponse(msg)
lock.synchronized {
pendingExecIds -= execId
}
case _ =>
}
}
def onNotification(msg: JsonRpcNotificationMessage): Unit = {
def splitToMessage: Vector[(Level.Value, String)] =
(msg.method, msg.params) match {
case ("window/logMessage", Some(json)) =>
import sbt.internal.langserver.codec.JsonProtocol._
Converter.fromJson[LogMessageParams](json) match {
case Success(params) => splitLogMessage(params)
case Failure(e) => Vector()
}
case ("textDocument/publishDiagnostics", Some(json)) =>
import sbt.internal.langserver.codec.JsonProtocol._
Converter.fromJson[PublishDiagnosticsParams](json) match {
case Success(params) => splitDiagnostics(params)
case Failure(e) => Vector()
}
case _ =>
Vector(
(
Level.Warn,
s"unknown event: ${msg.method} " + Serialization.compactPrintJsonOpt(msg.params)
)
)
}
splitToMessage foreach {
case (level, msg) => console.appendLog(level, msg)
}
}
def splitLogMessage(params: LogMessageParams): Vector[(Level.Value, String)] = {
val level = messageTypeToLevel(params.`type`)
if (level == Level.Debug) Vector()
else Vector((level, params.message))
}
def messageTypeToLevel(severity: Long): Level.Value = {
severity match {
case MessageType.Error => Level.Error
case MessageType.Warning => Level.Warn
case MessageType.Info => Level.Info
case MessageType.Log => Level.Debug
}
}
def splitDiagnostics(params: PublishDiagnosticsParams): Vector[(Level.Value, String)] = {
val uri = new URI(params.uri)
val f = IO.toFile(uri)
params.diagnostics map { d =>
val level = d.severity match {
case Some(severity) => messageTypeToLevel(severity)
case _ => Level.Error
}
val line = d.range.start.line + 1
val offset = d.range.start.character + 1
val msg = s"$f:$line:$offset: ${d.message}"
(level, msg)
}
}
def onRequest(msg: JsonRpcRequestMessage): Unit = {
// ignore
}
def start(): Unit = {
console.appendLog(Level.Info, "entering *experimental* thin client - BEEP WHIRR")
val _ = connection
val userCommands = arguments filterNot { cmd =>
cmd.startsWith("-")
}
if (userCommands.isEmpty) shell()
else batchExecute(userCommands)
}
def batchExecute(userCommands: List[String]): Unit = {
userCommands foreach { cmd =>
println("> " + cmd)
val execId =
if (cmd == "shutdown") sendExecCommand("exit")
else sendExecCommand(cmd)
while (pendingExecIds contains execId) {
Thread.sleep(100)
}
}
}
def shell(): Unit = {
val reader = JLine.simple(None, JLine.HandleCONT, injectThreadSleep = true)
while (running.get) {
reader.readLine("> ", None) match {
case Some("shutdown") =>
// `sbt -client shutdown` shuts down the server
sendExecCommand("exit")
Thread.sleep(100)
running.set(false)
case Some("exit") =>
running.set(false)
case Some(s) =>
val execId = UUID.randomUUID.toString
publishCommand(ExecCommand(s, execId))
lock.synchronized {
pendingExecIds += execId
}
case Some(s) if s.trim.nonEmpty =>
val execId = sendExecCommand(s)
while (pendingExecIds contains execId) {
Thread.sleep(100)
}
@ -107,10 +233,19 @@ class NetworkClient(arguments: List[String]) { self =>
}
}
def publishCommand(command: CommandMessage): Unit = {
val bytes = Serialization.serializeCommand(command)
def sendExecCommand(commandLine: String): String = {
val execId = UUID.randomUUID.toString
sendCommand(ExecCommand(commandLine, execId))
lock.synchronized {
pendingExecIds += execId
}
execId
}
def sendCommand(command: CommandMessage): Unit = {
try {
connection.publish(bytes)
val s = Serialization.serializeCommandAsJsonMessage(command)
connection.sendString(s)
} catch {
case _: IOException =>
// log.debug(e.getMessage)
@ -123,9 +258,10 @@ class NetworkClient(arguments: List[String]) { self =>
}
object NetworkClient {
def run(arguments: List[String]): Unit =
def run(configuration: xsbti.AppConfiguration, arguments: List[String]): Unit =
try {
new NetworkClient(arguments)
new NetworkClient(configuration, arguments)
()
} catch {
case NonFatal(e) => println(e.getMessage)
}

View File

@ -12,11 +12,12 @@ package client
import java.net.{ SocketTimeoutException, Socket }
import java.util.concurrent.atomic.AtomicBoolean
import sbt.protocol._
import sbt.internal.util.StringEvent
import sbt.internal.protocol._
abstract class ServerConnection(connection: Socket) {
private val running = new AtomicBoolean(true)
private val retByte: Byte = '\r'.toByte
private val delimiter: Byte = '\n'.toByte
private val out = connection.getOutputStream
@ -28,32 +29,63 @@ abstract class ServerConnection(connection: Socket) {
val in = connection.getInputStream
connection.setSoTimeout(5000)
var buffer: Vector[Byte] = Vector.empty
var bytesRead = 0
while (bytesRead != -1 && running.get) {
try {
bytesRead = in.read(readBuffer)
buffer = buffer ++ readBuffer.toVector.take(bytesRead)
// handle un-framing
var delimPos = buffer.indexOf(delimiter)
while (delimPos > -1) {
val chunk = buffer.take(delimPos)
buffer = buffer.drop(delimPos + 1)
def readFrame: Array[Byte] = {
def getContentLength: Int = {
readLine.drop(16).toInt
}
val l = getContentLength
readLine
readLine
readContentLength(l)
}
Serialization
.deserializeEvent(chunk)
.fold(
{ errorDesc =>
val s = new String(chunk.toArray, "UTF-8")
println(s"Got invalid chunk from server: $s \n" + errorDesc)
},
_ match {
case event: EventMessage => onEvent(event)
case event: StringEvent => onLogEntry(event)
}
)
delimPos = buffer.indexOf(delimiter)
def readLine: String = {
if (buffer.isEmpty) {
val bytesRead = in.read(readBuffer)
if (bytesRead > 0) {
buffer = buffer ++ readBuffer.toVector.take(bytesRead)
}
}
val delimPos = buffer.indexOf(delimiter)
if (delimPos > 0) {
val chunk0 = buffer.take(delimPos)
buffer = buffer.drop(delimPos + 1)
// remove \r at the end of line.
val chunk1 = if (chunk0.lastOption contains retByte) chunk0.dropRight(1) else chunk0
new String(chunk1.toArray, "utf-8")
} else readLine
}
def readContentLength(length: Int): Array[Byte] = {
if (buffer.size < length) {
val bytesRead = in.read(readBuffer)
if (bytesRead > 0) {
buffer = buffer ++ readBuffer.toVector.take(bytesRead)
}
}
if (length <= buffer.size) {
val chunk = buffer.take(length)
buffer = buffer.drop(length)
chunk.toArray
} else readContentLength(length)
}
while (running.get) {
try {
val frame = readFrame
Serialization
.deserializeJsonMessage(frame)
.fold(
{ errorDesc =>
val s = new String(frame.toArray, "UTF-8")
println(s"Got invalid chunk from server: $s \n" + errorDesc)
},
_ match {
case msg: JsonRpcRequestMessage => onRequest(msg)
case msg: JsonRpcResponseMessage => onResponse(msg)
case msg: JsonRpcNotificationMessage => onNotification(msg)
}
)
} catch {
case _: SocketTimeoutException => // its ok
}
@ -65,14 +97,28 @@ abstract class ServerConnection(connection: Socket) {
}
thread.start()
def publish(command: Array[Byte]): Unit = {
out.write(command)
out.write(delimiter.toInt)
out.flush()
def sendString(message: String): Unit = {
val a = message.getBytes("UTF-8")
writeLine(s"""Content-Length: ${a.length + 2}""".getBytes("UTF-8"))
writeLine(Array())
writeLine(a)
}
def onEvent(event: EventMessage): Unit
def onLogEntry(event: StringEvent): Unit
def writeLine(a: Array[Byte]): Unit = {
def writeEndLine(): Unit = {
out.write(retByte.toInt)
out.write(delimiter.toInt)
out.flush
}
if (a.nonEmpty) {
out.write(a)
}
writeEndLine
}
def onRequest(msg: JsonRpcRequestMessage): Unit
def onResponse(msg: JsonRpcResponseMessage): Unit
def onNotification(msg: JsonRpcNotificationMessage): Unit
def onShutdown(): Unit

View File

@ -9,7 +9,7 @@ package sbt
package internal
package server
import java.io.File
import java.io.{ File, IOException }
import java.net.{ SocketTimeoutException, InetAddress, ServerSocket, Socket }
import java.util.concurrent.atomic.AtomicBoolean
import java.nio.file.attribute.{ UserPrincipal, AclEntry, AclEntryPermission, AclEntryType }
@ -25,6 +25,7 @@ import sjsonnew.support.scalajson.unsafe.{ Converter, CompactPrinter }
import sbt.internal.protocol.codec._
import sbt.internal.util.ErrorHandling
import sbt.internal.util.Util.isWindows
import org.scalasbt.ipcsocket._
private[sbt] sealed trait ServerInstance {
def shutdown(): Unit
@ -39,9 +40,11 @@ private[sbt] object Server {
with TokenFileFormats
object JsonProtocol extends JsonProtocol
def start(connection: ServerConnection,
onIncomingSocket: (Socket, ServerInstance) => Unit,
log: Logger): ServerInstance =
def start(
connection: ServerConnection,
onIncomingSocket: (Socket, ServerInstance) => Unit,
log: Logger
): ServerInstance =
new ServerInstance { self =>
import connection._
val running = new AtomicBoolean(false)
@ -54,15 +57,26 @@ private[sbt] object Server {
val serverThread = new Thread("sbt-socket-server") {
override def run(): Unit = {
Try {
ErrorHandling.translate(s"server failed to start on ${connection.shortName}. ") {
connection.connectionType match {
case ConnectionType.Local if isWindows =>
new NGWin32NamedPipeServerSocket(pipeName)
case ConnectionType.Local =>
prepareSocketfile()
new NGUnixDomainServerSocket(socketfile.getAbsolutePath)
case ConnectionType.Tcp => new ServerSocket(port, 50, InetAddress.getByName(host))
}
connection.connectionType match {
case ConnectionType.Local if isWindows =>
// Named pipe already has an exclusive lock.
addServerError(new Win32NamedPipeServerSocket(pipeName))
case ConnectionType.Local =>
val maxSocketLength = new UnixDomainSocketLibrary.SockaddrUn().sunPath.length - 1
val path = socketfile.getAbsolutePath
if (path.length > maxSocketLength)
sys.error(
"socket file absolute path too long; " +
"either switch to another connection type " +
"or define a short \"SBT_GLOBAL_SERVER_DIR\" value. " +
s"Current path: ${path}"
)
tryClient(new UnixDomainSocket(path))
prepareSocketfile()
addServerError(new UnixDomainServerSocket(path))
case ConnectionType.Tcp =>
tryClient(new Socket(InetAddress.getByName(host), port))
addServerError(new ServerSocket(port, 50, InetAddress.getByName(host)))
}
} match {
case Failure(e) => p.failure(e)
@ -87,6 +101,24 @@ private[sbt] object Server {
}
serverThread.start()
// Try the socket as a client to make sure that the server is not already up.
// f tries to connect to the server, and flip the result.
def tryClient(f: => Socket): Unit = {
if (portfile.exists) {
Try { f } match {
case Failure(_) => ()
case Success(socket) =>
socket.close()
throw new AlreadyRunningException()
}
} else ()
}
def addServerError(f: => ServerSocket): ServerSocket =
ErrorHandling.translate(s"server failed to start on ${connection.shortName}. ") {
f
}
override def authenticate(challenge: String): Boolean = synchronized {
if (token == challenge) {
token = nextToken
@ -154,7 +186,7 @@ private[sbt] object Server {
auth match {
case _ if auth(ServerAuthentication.Token) =>
writeTokenfile()
PortFile(uri, Option(tokenfile.toString), Option(tokenfile.toURI.toString))
PortFile(uri, Option(tokenfile.toString), Option(IO.toURI(tokenfile).toString))
case _ =>
PortFile(uri, None, None)
}
@ -190,3 +222,5 @@ private[sbt] case class ServerConnection(
}
}
}
private[sbt] class AlreadyRunningException extends IOException("sbt server is already running.")

View File

@ -0,0 +1,73 @@
/*
* sbt
* Copyright 2011 - 2017, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under BSD-3-Clause license (see LICENSE)
*/
package sbt
package internal
package server
import sjsonnew.JsonFormat
import sbt.internal.protocol._
import sbt.util.Logger
import sbt.protocol.{ SettingQuery => Q }
/**
* ServerHandler allows plugins to extend sbt server.
* It's a wrapper around curried function ServerCallback => JsonRpcRequestMessage => Unit.
*/
final class ServerHandler(val handler: ServerCallback => ServerIntent) {
override def toString: String = s"Serverhandler(...)"
}
object ServerHandler {
def apply(handler: ServerCallback => ServerIntent): ServerHandler =
new ServerHandler(handler)
lazy val fallback: ServerHandler = ServerHandler({ handler =>
ServerIntent(
{ case x => handler.log.debug(s"Unhandled notification received: ${x.method}: $x") },
{ case x => handler.log.debug(s"Unhandled request received: ${x.method}: $x") }
)
})
}
final class ServerIntent(
val onRequest: PartialFunction[JsonRpcRequestMessage, Unit],
val onNotification: PartialFunction[JsonRpcNotificationMessage, Unit]
) {
override def toString: String = s"ServerIntent(...)"
}
object ServerIntent {
def apply(
onRequest: PartialFunction[JsonRpcRequestMessage, Unit],
onNotification: PartialFunction[JsonRpcNotificationMessage, Unit]
): ServerIntent =
new ServerIntent(onRequest, onNotification)
def request(onRequest: PartialFunction[JsonRpcRequestMessage, Unit]): ServerIntent =
new ServerIntent(onRequest, PartialFunction.empty)
def notify(onNotification: PartialFunction[JsonRpcNotificationMessage, Unit]): ServerIntent =
new ServerIntent(PartialFunction.empty, onNotification)
}
/**
* Interface to invoke JSON-RPC response.
*/
trait ServerCallback {
def jsonRpcRespond[A: JsonFormat](event: A, execId: Option[String]): Unit
def jsonRpcRespondError(execId: Option[String], code: Long, message: String): Unit
def jsonRpcNotify[A: JsonFormat](method: String, params: A): Unit
def appendExec(exec: Exec): Boolean
def log: Logger
def name: String
private[sbt] def authOptions: Set[ServerAuthentication]
private[sbt] def authenticate(token: String): Boolean
private[sbt] def setInitialized(value: Boolean): Unit
private[sbt] def onSettingQuery(execId: Option[String], req: Q): Unit
}

View File

@ -10,47 +10,57 @@ package xsbt
import java.io.{ BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter }
import java.net.{ InetAddress, ServerSocket, Socket }
import scala.annotation.tailrec
import scala.util.control.NonFatal
object IPC {
private val portMin = 1025
private val portMax = 65536
private val loopback = InetAddress.getByName(null) // loopback
private val loopback = InetAddress.getByName(null)
def client[T](port: Int)(f: IPC => T): T =
ipc(new Socket(loopback, port))(f)
def client[T](port: Int)(f: IPC => T): T = ipc(new Socket(loopback, port))(f)
def pullServer[T](f: Server => T): T = {
val server = makeServer
try { f(new Server(server)) } finally { server.close() }
try f(new Server(server))
finally server.close()
}
def unmanagedServer: Server = new Server(makeServer)
def makeServer: ServerSocket = {
val random = new java.util.Random
def nextPort = random.nextInt(portMax - portMin + 1) + portMin
def createServer(attempts: Int): ServerSocket =
if (attempts > 0)
try { new ServerSocket(nextPort, 1, loopback) } catch {
case NonFatal(_) => createServer(attempts - 1)
} else
sys.error("Could not connect to socket: maximum attempts exceeded")
if (attempts > 0) {
try new ServerSocket(nextPort, 1, loopback)
catch { case NonFatal(_) => createServer(attempts - 1) }
} else sys.error("Could not connect to socket: maximum attempts exceeded")
createServer(10)
}
def server[T](f: IPC => Option[T]): T = serverImpl(makeServer, f)
def server[T](port: Int)(f: IPC => Option[T]): T =
serverImpl(new ServerSocket(port, 1, loopback), f)
private def serverImpl[T](server: ServerSocket, f: IPC => Option[T]): T = {
def listen(): T = {
@tailrec def listen(): T = {
ipc(server.accept())(f) match {
case Some(done) => done
case None => listen()
}
}
try { listen() } finally { server.close() }
try listen()
finally server.close()
}
private def ipc[T](s: Socket)(f: IPC => T): T =
try { f(new IPC(s)) } finally { s.close() }
try f(new IPC(s))
finally s.close()
final class Server private[IPC] (s: ServerSocket) {
def port = s.getLocalPort
@ -59,6 +69,7 @@ object IPC {
def connection[T](f: IPC => T): T = IPC.ipc(s.accept())(f)
}
}
final class IPC private (s: Socket) {
def port = s.getLocalPort
private val in = new BufferedReader(new InputStreamReader(s.getInputStream))

View File

@ -18,12 +18,14 @@ import sbt.io.{ AllPassFilter, NothingFilter }
object Append {
@implicitNotFound(
msg = "No implicit for Append.Value[${A}, ${B}] found,\n so ${B} cannot be appended to ${A}")
msg = "No implicit for Append.Value[${A}, ${B}] found,\n so ${B} cannot be appended to ${A}"
)
trait Value[A, B] {
def appendValue(a: A, b: B): A
}
@implicitNotFound(
msg = "No implicit for Append.Values[${A}, ${B}] found,\n so ${B} cannot be appended to ${A}")
msg = "No implicit for Append.Values[${A}, ${B}] found,\n so ${B} cannot be appended to ${A}"
)
trait Values[A, -B] {
def appendValues(a: A, b: B): A
}

View File

@ -27,42 +27,60 @@ object Def extends Init[Scope] with TaskMacroExtra {
val resolvedScoped = SettingKey[ScopedKey[_]](
"resolved-scoped",
"The ScopedKey for the referencing setting or task.",
KeyRanks.DSetting)
KeyRanks.DSetting
)
private[sbt] val taskDefinitionKey = AttributeKey[ScopedKey[_]](
"task-definition-key",
"Internal: used to map a task back to its ScopedKey.",
Invisible)
Invisible
)
lazy val showFullKey: Show[ScopedKey[_]] = showFullKey(None)
def showFullKey(keyNameColor: Option[String]): Show[ScopedKey[_]] =
Show[ScopedKey[_]]((key: ScopedKey[_]) => displayFull(key, keyNameColor))
@deprecated("Use showRelativeKey2 which doesn't take the unused multi param", "1.1.1")
def showRelativeKey(
current: ProjectRef,
multi: Boolean,
keyNameColor: Option[String] = None
): Show[ScopedKey[_]] =
Show[ScopedKey[_]](
key =>
Scope.display(
key.scope,
withColor(key.key.label, keyNameColor),
ref => displayRelative(current, multi, ref)
))
showRelativeKey2(current, keyNameColor)
def showBuildRelativeKey(
currentBuild: URI,
multi: Boolean,
keyNameColor: Option[String] = None
def showRelativeKey2(
current: ProjectRef,
keyNameColor: Option[String] = None,
): Show[ScopedKey[_]] =
Show[ScopedKey[_]](
key =>
Scope.display(
key.scope,
withColor(key.key.label, keyNameColor),
ref => displayBuildRelative(currentBuild, multi, ref)
))
ref => displayRelative2(current, ref)
)
)
@deprecated("Use showBuildRelativeKey2 which doesn't take the unused multi param", "1.1.1")
def showBuildRelativeKey(
currentBuild: URI,
multi: Boolean,
keyNameColor: Option[String] = None,
): Show[ScopedKey[_]] =
showBuildRelativeKey2(currentBuild, keyNameColor)
def showBuildRelativeKey2(
currentBuild: URI,
keyNameColor: Option[String] = None,
): Show[ScopedKey[_]] =
Show[ScopedKey[_]](
key =>
Scope.display(
key.scope,
withColor(key.key.label, keyNameColor),
ref => displayBuildRelative(currentBuild, ref)
)
)
/**
* Returns a String expression for the given [[Reference]] (BuildRef, [[ProjectRef]], etc)
@ -71,17 +89,22 @@ object Def extends Init[Scope] with TaskMacroExtra {
def displayRelativeReference(current: ProjectRef, project: Reference): String =
displayRelative(current, project, false)
@deprecated("Use displayRelativeReference", "1.1.0")
@deprecated("Use displayRelative2 which doesn't take the unused multi param", "1.1.1")
def displayRelative(current: ProjectRef, multi: Boolean, project: Reference): String =
displayRelative2(current, project)
def displayRelative2(current: ProjectRef, project: Reference): String =
displayRelative(current, project, true)
/**
* Constructs the String of a given [[Reference]] relative to current.
* Note that this no longer takes "multi" parameter, and omits the subproject id at all times.
*/
private[sbt] def displayRelative(current: ProjectRef,
project: Reference,
trailingSlash: Boolean): String = {
private[sbt] def displayRelative(
current: ProjectRef,
project: Reference,
trailingSlash: Boolean
): String = {
val trailing = if (trailingSlash) " /" else ""
project match {
case BuildRef(current.build) => "ThisBuild" + trailing
@ -91,7 +114,11 @@ object Def extends Init[Scope] with TaskMacroExtra {
}
}
@deprecated("Use variant without multi", "1.1.1")
def displayBuildRelative(currentBuild: URI, multi: Boolean, project: Reference): String =
displayBuildRelative(currentBuild, project)
def displayBuildRelative(currentBuild: URI, project: Reference): String =
project match {
case BuildRef(`currentBuild`) => "ThisBuild /"
case ProjectRef(`currentBuild`, x) => x + " /"
@ -124,11 +151,14 @@ object Def extends Init[Scope] with TaskMacroExtra {
else None) orElse
s.dependencies
.find(k => k.scope != ThisScope)
.map(k =>
s"Scope cannot be defined for dependency ${k.key.label} of ${definedSettingString(s)}")
.map(
k =>
s"Scope cannot be defined for dependency ${k.key.label} of ${definedSettingString(s)}"
)
override def intersect(s1: Scope, s2: Scope)(
implicit delegates: Scope => Seq[Scope]): Option[Scope] =
implicit delegates: Scope => Seq[Scope]
): Option[Scope] =
if (s2 == GlobalScope) Some(s1) // s1 is more specific
else if (s1 == GlobalScope) Some(s2) // s2 is more specific
else super.intersect(s1, s2)
@ -173,16 +203,31 @@ object Def extends Init[Scope] with TaskMacroExtra {
// The following conversions enable the types Initialize[T], Initialize[Task[T]], and Task[T] to
// be used in task and setting macros as inputs with an ultimate result of type T
implicit def macroValueI[T](in: Initialize[T]): MacroValue[T] = ???
implicit def macroValueIT[T](in: Initialize[Task[T]]): MacroValue[T] = ???
implicit def macroValueIInT[T](in: Initialize[InputTask[T]]): InputEvaluated[T] = ???
implicit def taskMacroValueIT[T](in: Initialize[Task[T]]): MacroTaskValue[T] = ???
implicit def macroPrevious[T](in: TaskKey[T]): MacroPrevious[T] = ???
implicit def macroValueI[T](@deprecated("unused", "") in: Initialize[T]): MacroValue[T] = ???
// The following conversions enable the types Parser[T], Initialize[Parser[T]], and Initialize[State => Parser[T]] to
// be used in the inputTask macro as an input with an ultimate result of type T
implicit def parserInitToInput[T](p: Initialize[Parser[T]]): ParserInput[T] = ???
implicit def parserInitStateToInput[T](p: Initialize[State => Parser[T]]): ParserInput[T] = ???
implicit def macroValueIT[T](@deprecated("unused", "") in: Initialize[Task[T]]): MacroValue[T] =
???
implicit def macroValueIInT[T](
@deprecated("unused", "") in: Initialize[InputTask[T]]
): InputEvaluated[T] = ???
implicit def taskMacroValueIT[T](
@deprecated("unused", "") in: Initialize[Task[T]]
): MacroTaskValue[T] = ???
implicit def macroPrevious[T](@deprecated("unused", "") in: TaskKey[T]): MacroPrevious[T] = ???
// The following conversions enable the types Parser[T], Initialize[Parser[T]], and
// Initialize[State => Parser[T]] to be used in the inputTask macro as an input with an ultimate
// result of type T
implicit def parserInitToInput[T](
@deprecated("unused", "") p: Initialize[Parser[T]]
): ParserInput[T] = ???
implicit def parserInitStateToInput[T](
@deprecated("unused", "") p: Initialize[State => Parser[T]]
): ParserInput[T] = ???
def settingKey[T](description: String): SettingKey[T] = macro std.KeyMacro.settingKeyImpl[T]
def taskKey[T](description: String): TaskKey[T] = macro std.KeyMacro.taskKeyImpl[T]
@ -190,27 +235,43 @@ object Def extends Init[Scope] with TaskMacroExtra {
private[sbt] def dummy[T: Manifest](name: String, description: String): (TaskKey[T], Task[T]) =
(TaskKey[T](name, description, DTask), dummyTask(name))
private[sbt] def dummyTask[T](name: String): Task[T] = {
import std.TaskExtra.{ task => newTask, _ }
val base: Task[T] = newTask(
sys.error("Dummy task '" + name + "' did not get converted to a full task.")) named name
sys.error("Dummy task '" + name + "' did not get converted to a full task.")
) named name
base.copy(info = base.info.set(isDummyTask, true))
}
private[sbt] def isDummy(t: Task[_]): Boolean =
t.info.attributes.get(isDummyTask) getOrElse false
private[sbt] val isDummyTask = AttributeKey[Boolean](
"is-dummy-task",
"Internal: used to identify dummy tasks. sbt injects values for these tasks at the start of task execution.",
Invisible)
Invisible
)
private[sbt] val (stateKey, dummyState) = dummy[State]("state", "Current build state.")
private[sbt] val (streamsManagerKey, dummyStreamsManager) = Def.dummy[std.Streams[ScopedKey[_]]](
"streams-manager",
"Streams manager, which provides streams for different contexts.")
"Streams manager, which provides streams for different contexts."
)
}
// these need to be mixed into the sbt package object because the target doesn't involve Initialize or anything in Def
// these need to be mixed into the sbt package object
// because the target doesn't involve Initialize or anything in Def
trait TaskMacroExtra {
implicit def macroValueT[T](in: Task[T]): std.MacroValue[T] = ???
implicit def macroValueIn[T](in: InputTask[T]): std.InputEvaluated[T] = ???
implicit def parserToInput[T](in: Parser[T]): std.ParserInput[T] = ???
implicit def stateParserToInput[T](in: State => Parser[T]): std.ParserInput[T] = ???
implicit def macroValueT[T](@deprecated("unused", "") in: Task[T]): std.MacroValue[T] = ???
implicit def macroValueIn[T](@deprecated("unused", "") in: InputTask[T]): std.InputEvaluated[T] =
???
implicit def parserToInput[T](@deprecated("unused", "") in: Parser[T]): std.ParserInput[T] = ???
implicit def stateParserToInput[T](
@deprecated("unused", "") in: State => Parser[T]
): std.ParserInput[T] = ???
}

View File

@ -26,6 +26,8 @@ private final class DelegateIndex0(refs: Map[ProjectRef, ProjectDelegates]) exte
case None => Select(conf) :: Zero :: Nil
}
}
private final class ProjectDelegates(val ref: ProjectRef,
val refs: Seq[ScopeAxis[ResolvedReference]],
val confs: Map[ConfigKey, Seq[ScopeAxis[ConfigKey]]])
private final class ProjectDelegates(
val ref: ProjectRef,
val refs: Seq[ScopeAxis[ResolvedReference]],
val confs: Map[ConfigKey, Seq[ScopeAxis[ConfigKey]]]
)

View File

@ -22,13 +22,15 @@ final class InputTask[T] private (val parser: State => Parser[Task[T]]) {
new InputTask[T](s => Parser(parser(s))(in))
def fullInput(in: String): InputTask[T] =
new InputTask[T](s =>
Parser.parse(in, parser(s)) match {
case Right(v) => Parser.success(v)
case Left(msg) =>
val indented = msg.lines.map(" " + _).mkString("\n")
Parser.failure(s"Invalid programmatic input:\n$indented")
})
new InputTask[T](
s =>
Parser.parse(in, parser(s)) match {
case Right(v) => Parser.success(v)
case Left(msg) =>
val indented = msg.lines.map(" " + _).mkString("\n")
Parser.failure(s"Invalid programmatic input:\n$indented")
}
)
}
object InputTask {
@ -38,19 +40,28 @@ object InputTask {
import std.FullInstance._
def toTask(in: String): Initialize[Task[T]] = flatten(
(Def.stateKey zipWith i)((sTask, it) =>
sTask map (s =>
Parser.parse(in, it.parser(s)) match {
case Right(t) => Def.value(t)
case Left(msg) =>
val indented = msg.lines.map(" " + _).mkString("\n")
sys.error(s"Invalid programmatic input:\n$indented")
}))
(Def.stateKey zipWith i)(
(sTask, it) =>
sTask map (
s =>
Parser.parse(in, it.parser(s)) match {
case Right(t) => Def.value(t)
case Left(msg) =>
val indented = msg.lines.map(" " + _).mkString("\n")
sys.error(s"Invalid programmatic input:\n$indented")
}
)
)
)
}
implicit def inputTaskParsed[T](in: InputTask[T]): std.ParserInputTask[T] = ???
implicit def inputTaskInitParsed[T](in: Initialize[InputTask[T]]): std.ParserInputTask[T] = ???
implicit def inputTaskParsed[T](
@deprecated("unused", "") in: InputTask[T]
): std.ParserInputTask[T] = ???
implicit def inputTaskInitParsed[T](
@deprecated("unused", "") in: Initialize[InputTask[T]]
): std.ParserInputTask[T] = ???
def make[T](p: State => Parser[Task[T]]): InputTask[T] = new InputTask[T](p)
@ -62,12 +73,14 @@ object InputTask {
def free[I, T](p: State => Parser[I])(c: I => Task[T]): InputTask[T] = free(s => p(s) map c)
def separate[I, T](p: State => Parser[I])(
action: Initialize[I => Task[T]]): Initialize[InputTask[T]] =
def separate[I, T](
p: State => Parser[I]
)(action: Initialize[I => Task[T]]): Initialize[InputTask[T]] =
separate(Def value p)(action)
def separate[I, T](p: Initialize[State => Parser[I]])(
action: Initialize[I => Task[T]]): Initialize[InputTask[T]] =
def separate[I, T](
p: Initialize[State => Parser[I]]
)(action: Initialize[I => Task[T]]): Initialize[InputTask[T]] =
p.zipWith(action)((parser, act) => free(parser)(act))
/** Constructs an InputTask that accepts no user input. */
@ -81,8 +94,9 @@ object InputTask {
* a) a Parser constructed using other Settings, but not Tasks
* b) a dynamically constructed Task that uses Settings, Tasks, and the result of parsing.
*/
def createDyn[I, T](p: Initialize[State => Parser[I]])(
action: Initialize[Task[I => Initialize[Task[T]]]]): Initialize[InputTask[T]] =
def createDyn[I, T](
p: Initialize[State => Parser[I]]
)(action: Initialize[Task[I => Initialize[Task[T]]]]): Initialize[InputTask[T]] =
separate(p)(std.FullInstance.flattenFun[I, T](action))
/** A dummy parser that consumes no input and produces nothing useful (unit).*/
@ -98,8 +112,9 @@ object InputTask {
i(Types.const)
@deprecated("Use another InputTask constructor or the `Def.inputTask` macro.", "0.13.0")
def apply[I, T](p: Initialize[State => Parser[I]])(
action: TaskKey[I] => Initialize[Task[T]]): Initialize[InputTask[T]] = {
def apply[I, T](
p: Initialize[State => Parser[I]]
)(action: TaskKey[I] => Initialize[Task[T]]): Initialize[InputTask[T]] = {
val dummyKey = localKey[Task[I]]
val (marker, dummy) = dummyTask[I]
val it = action(TaskKey(dummyKey)) mapConstant subResultForDummy(dummyKey, dummy)
@ -136,9 +151,11 @@ object InputTask {
(key, t)
}
private[this] def subForDummy[I, T](marker: AttributeKey[Option[I]],
value: I,
task: Task[T]): Task[T] = {
private[this] def subForDummy[I, T](
marker: AttributeKey[Option[I]],
value: I,
task: Task[T]
): Task[T] = {
val seen = new java.util.IdentityHashMap[Task[_], Task[_]]
lazy val f: Task ~> Task = new (Task ~> Task) {
def apply[A](t: Task[A]): Task[A] = {

View File

@ -53,11 +53,13 @@ object Previous {
private[sbt] val references = SettingKey[References](
"previous-references",
"Collects all static references to previous values of tasks.",
KeyRanks.Invisible)
KeyRanks.Invisible
)
private[sbt] val cache = TaskKey[Previous](
"previous-cache",
"Caches previous values of tasks read from disk for the duration of a task execution.",
KeyRanks.Invisible)
KeyRanks.Invisible
)
/** Records references to previous task value. This should be completely populated after settings finish loading. */
private[sbt] final class References {
@ -72,9 +74,11 @@ object Previous {
}
/** Persists values of tasks t where there is some task referencing it via t.previous. */
private[sbt] def complete(referenced: References,
results: RMap[Task, Result],
streams: Streams): Unit = {
private[sbt] def complete(
referenced: References,
results: RMap[Task, Result],
streams: Streams
): Unit = {
val map = referenced.getReferences
def impl[T](key: ScopedKey[_], result: T): Unit =
for (i <- map.get(key.asInstanceOf[ScopedTaskKey[T]])) {

View File

@ -11,12 +11,14 @@ import scala.annotation.implicitNotFound
object Remove {
@implicitNotFound(
msg = "No implicit for Remove.Value[${A}, ${B}] found,\n so ${B} cannot be removed from ${A}")
msg = "No implicit for Remove.Value[${A}, ${B}] found,\n so ${B} cannot be removed from ${A}"
)
trait Value[A, B] extends Any {
def removeValue(a: A, b: B): A
}
@implicitNotFound(
msg = "No implicit for Remove.Values[${A}, ${B}] found,\n so ${B} cannot be removed from ${A}")
msg = "No implicit for Remove.Values[${A}, ${B}] found,\n so ${B} cannot be removed from ${A}"
)
trait Values[A, -B] extends Any {
def removeValues(a: A, b: B): A
}

View File

@ -13,10 +13,12 @@ import sbt.internal.util.{ AttributeKey, AttributeMap, Dag }
import sbt.io.IO
final case class Scope(project: ScopeAxis[Reference],
config: ScopeAxis[ConfigKey],
task: ScopeAxis[AttributeKey[_]],
extra: ScopeAxis[AttributeMap]) {
final case class Scope(
project: ScopeAxis[Reference],
config: ScopeAxis[ConfigKey],
task: ScopeAxis[AttributeKey[_]],
extra: ScopeAxis[AttributeMap]
) {
def in(project: Reference, config: ConfigKey): Scope =
copy(project = Select(project), config = Select(config))
def in(config: ConfigKey, task: AttributeKey[_]): Scope =
@ -106,17 +108,21 @@ object Scope {
else
IO.directoryURI(current resolve uri)
def resolveReference(current: URI,
rootProject: URI => String,
ref: Reference): ResolvedReference =
def resolveReference(
current: URI,
rootProject: URI => String,
ref: Reference
): ResolvedReference =
ref match {
case br: BuildReference => resolveBuildRef(current, br)
case pr: ProjectReference => resolveProjectRef(current, rootProject, pr)
}
def resolveProjectRef(current: URI,
rootProject: URI => String,
ref: ProjectReference): ProjectRef =
def resolveProjectRef(
current: URI,
rootProject: URI => String,
ref: ProjectReference
): ProjectRef =
ref match {
case LocalRootProject => ProjectRef(current, rootProject(current))
case LocalProject(id) => ProjectRef(current, id)
@ -164,10 +170,12 @@ object Scope {
def displayMasked(scope: Scope, sep: String, mask: ScopeMask, showZeroConfig: Boolean): String =
displayMasked(scope, sep, showProject, mask, showZeroConfig)
def displayMasked(scope: Scope,
sep: String,
showProject: Reference => String,
mask: ScopeMask): String =
def displayMasked(
scope: Scope,
sep: String,
showProject: Reference => String,
mask: ScopeMask
): String =
displayMasked(scope, sep, showProject, mask, false)
/**
@ -177,11 +185,13 @@ object Scope {
* Technically speaking an unspecified configuration axis defaults to
* the scope delegation (first configuration defining the key, then Zero).
*/
def displayMasked(scope: Scope,
sep: String,
showProject: Reference => String,
mask: ScopeMask,
showZeroConfig: Boolean): String = {
def displayMasked(
scope: Scope,
sep: String,
showProject: Reference => String,
mask: ScopeMask,
showZeroConfig: Boolean
): String = {
import scope.{ project, config, task, extra }
val zeroConfig = if (showZeroConfig) "Zero /" else ""
val configPrefix = config.foldStrict(display, zeroConfig, "./")
@ -190,57 +200,68 @@ object Scope {
val postfix = if (extras.isEmpty) "" else extras.mkString("(", ", ", ")")
if (scope == GlobalScope) "Global / " + sep + postfix
else
mask.concatShow(appendSpace(projectPrefix(project, showProject)),
appendSpace(configPrefix),
appendSpace(taskPrefix),
sep,
postfix)
mask.concatShow(
appendSpace(projectPrefix(project, showProject)),
appendSpace(configPrefix),
appendSpace(taskPrefix),
sep,
postfix
)
}
private[sbt] def appendSpace(s: String): String =
if (s == "") ""
else s + " "
// sbt 0.12 style
def display012StyleMasked(scope: Scope,
sep: String,
showProject: Reference => String,
mask: ScopeMask): String = {
import scope.{ project, config, task, extra }
val configPrefix = config.foldStrict(displayConfigKey012Style, "*:", ".:")
val taskPrefix = task.foldStrict(_.label + "::", "", ".::")
val extras = extra.foldStrict(_.entries.map(_.toString).toList, Nil, Nil)
val postfix = if (extras.isEmpty) "" else extras.mkString("(", ", ", ")")
mask.concatShow(projectPrefix012Style(project, showProject012Style),
configPrefix,
taskPrefix,
sep,
postfix)
}
def equal(a: Scope, b: Scope, mask: ScopeMask): Boolean =
(!mask.project || a.project == b.project) &&
(!mask.config || a.config == b.config) &&
(!mask.task || a.task == b.task) &&
(!mask.extra || a.extra == b.extra)
def projectPrefix(project: ScopeAxis[Reference],
show: Reference => String = showProject): String =
def projectPrefix(
project: ScopeAxis[Reference],
show: Reference => String = showProject
): String =
project.foldStrict(show, "Zero /", "./")
def projectPrefix012Style(project: ScopeAxis[Reference],
show: Reference => String = showProject): String =
def projectPrefix012Style(
project: ScopeAxis[Reference],
show: Reference => String = showProject
): String =
project.foldStrict(show, "*/", "./")
def showProject = (ref: Reference) => Reference.display(ref) + " /"
def showProject012Style = (ref: Reference) => Reference.display(ref) + "/"
@deprecated("No longer used", "1.1.3")
def transformTaskName(s: String) = {
val parts = s.split("-+")
(parts.take(1) ++ parts.drop(1).map(_.capitalize)).mkString
}
@deprecated("Use variant without extraInherit", "1.1.1")
def delegates[Proj](
refs: Seq[(ProjectRef, Proj)],
configurations: Proj => Seq[ConfigKey],
resolve: Reference => ResolvedReference,
rootProject: URI => String,
projectInherit: ProjectRef => Seq[ProjectRef],
configInherit: (ResolvedReference, ConfigKey) => Seq[ConfigKey],
taskInherit: AttributeKey[_] => Seq[AttributeKey[_]],
extraInherit: (ResolvedReference, AttributeMap) => Seq[AttributeMap]
): Scope => Seq[Scope] =
delegates(
refs,
configurations,
resolve,
rootProject,
projectInherit,
configInherit,
taskInherit,
)
// *Inherit functions should be immediate delegates and not include argument itself. Transitivity will be provided by this method
def delegates[Proj](
refs: Seq[(ProjectRef, Proj)],
@ -250,36 +271,72 @@ object Scope {
projectInherit: ProjectRef => Seq[ProjectRef],
configInherit: (ResolvedReference, ConfigKey) => Seq[ConfigKey],
taskInherit: AttributeKey[_] => Seq[AttributeKey[_]],
extraInherit: (ResolvedReference, AttributeMap) => Seq[AttributeMap]
): Scope => Seq[Scope] = {
val index = delegates(refs, configurations, projectInherit, configInherit)
scope =>
indexedDelegates(resolve, index, rootProject, taskInherit, extraInherit)(scope)
indexedDelegates(resolve, index, rootProject, taskInherit)(scope)
}
@deprecated("Use variant without extraInherit", "1.1.1")
def indexedDelegates(
resolve: Reference => ResolvedReference,
index: DelegateIndex,
rootProject: URI => String,
taskInherit: AttributeKey[_] => Seq[AttributeKey[_]],
extraInherit: (ResolvedReference, AttributeMap) => Seq[AttributeMap]
)(rawScope: Scope): Seq[Scope] =
indexedDelegates(resolve, index, rootProject, taskInherit)(rawScope)
def indexedDelegates(
resolve: Reference => ResolvedReference,
index: DelegateIndex,
rootProject: URI => String,
taskInherit: AttributeKey[_] => Seq[AttributeKey[_]],
)(rawScope: Scope): Seq[Scope] = {
val scope = Scope.replaceThis(GlobalScope)(rawScope)
def nonProjectScopes(resolvedProj: ResolvedReference)(px: ScopeAxis[ResolvedReference]) = {
val p = px.toOption getOrElse resolvedProj
val configProj = p match {
case pr: ProjectRef => pr; case br: BuildRef => ProjectRef(br.build, rootProject(br.build))
}
val cLin = scope.config match {
case Select(conf) => index.config(configProj, conf); case _ => withZeroAxis(scope.config)
}
// This is a hot method that gets called many times
def expandDelegateScopes(
resolvedProj: ResolvedReference
)(pLin: Seq[ScopeAxis[ResolvedReference]]): Vector[Scope] = {
val tLin = scope.task match {
case t @ Select(_) => linearize(t)(taskInherit); case _ => withZeroAxis(scope.task)
case t @ Select(_) => linearize(t)(taskInherit)
case _ => withZeroAxis(scope.task)
}
val eLin = withZeroAxis(scope.extra)
for (c <- cLin; t <- tLin; e <- eLin) yield Scope(px, c, t, e)
// val eLin = withZeroAxis(scope.extra)
// The following while loops handroll the nested for-expression + flatMap
// projAxes flatMap nonProjectScopes(resolvedProj)
// ...
// for (c <- cLin; t <- tLin; e <- eLin) yield Scope(px, c, t, e)
val res = Vector.newBuilder[Scope]
val pIt = pLin.iterator
while (pIt.hasNext) {
val px = pIt.next()
val p = px.toOption getOrElse resolvedProj
val configProj = p match {
case pr: ProjectRef => pr
case br: BuildRef => ProjectRef(br.build, rootProject(br.build))
}
val cLin = scope.config match {
case Select(conf) => index.config(configProj, conf)
case _ => withZeroAxis(scope.config)
}
val cLinIt = cLin.iterator
while (cLinIt.hasNext) {
val c = cLinIt.next()
val tLinIt = tLin.iterator
while (tLinIt.hasNext) {
val t = tLinIt.next()
if (scope.extra.isSelect) {
res += Scope(px, c, t, scope.extra)
}
res += Scope(px, c, t, Zero)
}
}
}
res.result()
}
scope.project match {
case Zero | This => globalProjectDelegates(scope)
case Select(proj) =>
@ -287,15 +344,17 @@ object Scope {
val projAxes: Seq[ScopeAxis[ResolvedReference]] =
resolvedProj match {
case pr: ProjectRef => index.project(pr)
case br: BuildRef => Select(br) :: Zero :: Nil
case br: BuildRef => List(Select(br), Zero)
}
projAxes flatMap nonProjectScopes(resolvedProj)
expandDelegateScopes(resolvedProj)(projAxes)
}
}
private val zeroL = List(Zero)
def withZeroAxis[T](base: ScopeAxis[T]): Seq[ScopeAxis[T]] =
if (base.isSelect) base :: Zero :: Nil
else Zero :: Nil
if (base.isSelect) List(base, Zero)
else zeroL
def withGlobalScope(base: Scope): Seq[Scope] =
if (base == GlobalScope) GlobalScope :: Nil else base :: GlobalScope :: Nil
def withRawBuilds(ps: Seq[ScopeAxis[ProjectRef]]): Seq[ScopeAxis[ResolvedReference]] =
@ -319,27 +378,32 @@ object Scope {
}
private[this] def delegateIndex(ref: ProjectRef, confs: Seq[ConfigKey])(
projectInherit: ProjectRef => Seq[ProjectRef],
configInherit: (ResolvedReference, ConfigKey) => Seq[ConfigKey]): ProjectDelegates = {
configInherit: (ResolvedReference, ConfigKey) => Seq[ConfigKey]
): ProjectDelegates = {
val refDelegates = withRawBuilds(linearize(Select(ref), false)(projectInherit))
val configs = confs map { c =>
axisDelegates(configInherit, ref, c)
}
new ProjectDelegates(ref, refDelegates, configs.toMap)
}
def axisDelegates[T](direct: (ResolvedReference, T) => Seq[T],
ref: ResolvedReference,
init: T): (T, Seq[ScopeAxis[T]]) =
def axisDelegates[T](
direct: (ResolvedReference, T) => Seq[T],
ref: ResolvedReference,
init: T
): (T, Seq[ScopeAxis[T]]) =
(init, linearize(Select(init))(direct(ref, _)))
def linearize[T](axis: ScopeAxis[T], appendZero: Boolean = true)(
inherit: T => Seq[T]): Seq[ScopeAxis[T]] =
inherit: T => Seq[T]
): Seq[ScopeAxis[T]] =
axis match {
case Select(x) => topologicalSort[T](x, appendZero)(inherit)
case Zero | This => if (appendZero) Zero :: Nil else Nil
}
def topologicalSort[T](node: T, appendZero: Boolean)(
dependencies: T => Seq[T]): Seq[ScopeAxis[T]] = {
dependencies: T => Seq[T]
): Seq[ScopeAxis[T]] = {
val o = Dag.topologicalSortUnchecked(node)(dependencies).map(Select.apply)
if (appendZero) o ::: Zero :: Nil
else o

View File

@ -17,7 +17,18 @@ import sbt.Def.{ Initialize, KeyedInitialize, ScopedKey, Setting, setting }
import std.TaskExtra.{ task => mktask, _ }
/** An abstraction on top of Settings for build configuration and task definition. */
sealed trait Scoped { def scope: Scope; val key: AttributeKey[_] }
sealed trait Scoped extends Equals {
def scope: Scope
val key: AttributeKey[_]
override def equals(that: Any) =
(this eq that.asInstanceOf[AnyRef]) || (that match {
case that: Scoped => scope == that.scope && key == that.key && canEqual(that)
case _ => false
})
override def hashCode() = (scope, key).##
}
/** A common type for SettingKey and TaskKey so that both can be used as inputs to tasks.*/
sealed trait ScopedTaskable[T] extends Scoped {
@ -95,6 +106,8 @@ sealed abstract class SettingKey[T]
final def withRank(rank: Int): SettingKey[T] =
SettingKey(AttributeKey.copyWithRank(key, rank))
def canEqual(that: Any): Boolean = that.isInstanceOf[SettingKey[_]]
}
/**
@ -163,6 +176,8 @@ sealed abstract class TaskKey[T]
final def withRank(rank: Int): TaskKey[T] =
TaskKey(AttributeKey.copyWithRank(key, rank))
def canEqual(that: Any): Boolean = that.isInstanceOf[TaskKey[_]]
}
/**
@ -195,6 +210,8 @@ sealed trait InputKey[T]
final def withRank(rank: Int): InputKey[T] =
InputKey(AttributeKey.copyWithRank(key, rank))
def canEqual(that: Any): Boolean = that.isInstanceOf[InputKey[_]]
}
/** Methods and types related to constructing settings, including keys, scopes, and initializations. */
@ -320,10 +337,14 @@ object Scoped {
def transform(f: S => S, source: SourcePosition): Setting[Task[S]] =
set(scopedKey(_ map f), source)
@deprecated("No longer needed with new task syntax and SettingKey inheriting from Initialize.",
"0.13.2")
@deprecated(
"No longer needed with new task syntax and SettingKey inheriting from Initialize.",
"0.13.2"
)
def task: SettingKey[Task[S]] = scopedSetting(scope, key)
def toSettingKey: SettingKey[Task[S]] = scopedSetting(scope, key)
def get(settings: Settings[Scope]): Option[Task[S]] = settings.get(scope, key)
def ? : Initialize[Task[Option[S]]] = Def.optional(scopedKey) {
@ -336,6 +357,11 @@ object Scoped {
(this.? zipWith i)((x, y) => (x, y) map { case (a, b) => a getOrElse b })
}
/** Enriches `Initialize[Task[S]]` types.
*
* @param i the original `Initialize[Task[S]]` value to enrich
* @tparam S the type of the underlying value
*/
final class RichInitializeTask[S](i: Initialize[Task[S]]) extends RichInitTaskBase[S, Task] {
protected def onTask[T](f: Task[S] => Task[T]): Initialize[Task[T]] = i apply f
@ -365,22 +391,36 @@ object Scoped {
}
}
/** Enriches `Initialize[InputTask[S]]` types.
*
* @param i the original `Initialize[InputTask[S]]` value to enrich
* @tparam S the type of the underlying value
*/
final class RichInitializeInputTask[S](i: Initialize[InputTask[S]])
extends RichInitTaskBase[S, InputTask] {
protected def onTask[T](f: Task[S] => Task[T]): Initialize[InputTask[T]] = i(_ mapTask f)
def dependsOn(tasks: AnyInitTask*): Initialize[InputTask[S]] = {
import TupleSyntax._
(i, Initialize.joinAny[Task](tasks))((thisTask, deps) =>
thisTask.mapTask(_.dependsOn(deps: _*)))
(i, Initialize.joinAny[Task](tasks))(
(thisTask, deps) => thisTask.mapTask(_.dependsOn(deps: _*))
)
}
}
/** Enriches `Initialize[R[S]]` types. Abstracts over the specific task-like type constructor.
*
* @tparam S the type of the underlying vault
* @tparam R the task-like type constructor (either Task or InputTask)
*/
sealed abstract class RichInitTaskBase[S, R[_]] {
protected def onTask[T](f: Task[S] => Task[T]): Initialize[R[T]]
def flatMap[T](f: S => Task[T]): Initialize[R[T]] = flatMapR(f compose successM)
def map[T](f: S => T): Initialize[R[T]] = mapR(f compose successM)
def flatMap[T](f: S => Task[T]): Initialize[R[T]] =
onTask(_.result flatMap (f compose successM))
def map[T](f: S => T): Initialize[R[T]] = onTask(_.result map (f compose successM))
def andFinally(fin: => Unit): Initialize[R[S]] = onTask(_ andFinally fin)
def doFinally(t: Task[Unit]): Initialize[R[S]] = onTask(_ doFinally t)
@ -392,23 +432,28 @@ object Scoped {
@deprecated(
"Use the `result` method to create a task that returns the full Result of this task. Then, call `flatMap` on the new task.",
"0.13.0")
def flatMapR[T](f: Result[S] => Task[T]): Initialize[R[T]] = onTask(_ flatMapR f)
"0.13.0"
)
def flatMapR[T](f: Result[S] => Task[T]): Initialize[R[T]] = onTask(_.result flatMap f)
@deprecated(
"Use the `result` method to create a task that returns the full Result of this task. Then, call `map` on the new task.",
"0.13.0")
def mapR[T](f: Result[S] => T): Initialize[R[T]] = onTask(_ mapR f)
"0.13.0"
)
def mapR[T](f: Result[S] => T): Initialize[R[T]] = onTask(_.result map f)
@deprecated(
"Use the `failure` method to create a task that returns Incomplete when this task fails and then call `flatMap` on the new task.",
"0.13.0")
def flatFailure[T](f: Incomplete => Task[T]): Initialize[R[T]] = flatMapR(f compose failM)
"0.13.0"
)
def flatFailure[T](f: Incomplete => Task[T]): Initialize[R[T]] =
onTask(_.result flatMap (f compose failM))
@deprecated(
"Use the `failure` method to create a task that returns Incomplete when this task fails and then call `map` on the new task.",
"0.13.0")
def mapFailure[T](f: Incomplete => T): Initialize[R[T]] = mapR(f compose failM)
"0.13.0"
)
def mapFailure[T](f: Incomplete => T): Initialize[R[T]] = onTask(_.result map (f compose failM))
}
type AnyInitTask = Initialize[Task[T]] forSome { type T }
@ -565,7 +610,7 @@ object Scoped {
/** The sbt 0.10 style DSL was deprecated in 0.13.13, favouring the use of the '.value' macro.
*
* See http://www.scala-sbt.org/0.13/docs/Migrating-from-sbt-012x.html for how to migrate.
* See http://www.scala-sbt.org/1.x/docs/Migrating-from-sbt-013x.html#Migrating+from+sbt+0.12+style for how to migrate.
*/
trait TupleSyntax {
import Scoped._
@ -628,7 +673,7 @@ object InputKey {
apply(AttributeKey[InputTask[T]](label, description, extendScoped(extend1, extendN), rank))
def apply[T](akey: AttributeKey[InputTask[T]]): InputKey[T] =
new InputKey[T] { val key = akey; def scope = Scope.ThisScope }
Scoped.scopedInput(Scope.ThisScope, akey)
}
/** Constructs TaskKeys, which are associated with tasks to define a setting.*/
@ -657,8 +702,7 @@ object TaskKey {
): TaskKey[T] =
apply(AttributeKey[Task[T]](label, description, extendScoped(extend1, extendN), rank))
def apply[T](akey: AttributeKey[Task[T]]): TaskKey[T] =
new TaskKey[T] { val key = akey; def scope = Scope.ThisScope }
def apply[T](akey: AttributeKey[Task[T]]): TaskKey[T] = Scoped.scopedTask(Scope.ThisScope, akey)
def local[T: Manifest]: TaskKey[T] = apply[T](AttributeKey.local[Task[T]])
}
@ -689,8 +733,7 @@ object SettingKey {
): SettingKey[T] =
apply(AttributeKey[T](label, description, extendScoped(extend1, extendN), rank))
def apply[T](akey: AttributeKey[T]): SettingKey[T] =
new SettingKey[T] { val key = akey; def scope = Scope.ThisScope }
def apply[T](akey: AttributeKey[T]): SettingKey[T] = Scoped.scopedSetting(Scope.ThisScope, akey)
def local[T: Manifest: OptJsonWriter]: SettingKey[T] = apply[T](AttributeKey.local[T])
}

View File

@ -8,11 +8,11 @@
package sbt
package std
import reflect.macros._
import scala.reflect.macros._
import Def.Initialize
import sbt.internal.util.complete.Parser
import sbt.internal.util.appmacro.{ Convert, Converted }
import Def.Initialize
object InputInitConvert extends Convert {
def apply[T: c.WeakTypeTag](c: blackbox.Context)(nme: String, in: c.Tree): Converted[c.type] =
@ -46,14 +46,13 @@ object TaskConvert extends Convert {
/** Converts an input `Tree` of type `Initialize[T]`, `Initialize[Task[T]]`, or `Task[T]` into a `Tree` of type `Initialize[Task[T]]`.*/
object FullConvert extends Convert {
import InputWrapper._
def apply[T: c.WeakTypeTag](c: blackbox.Context)(nme: String, in: c.Tree): Converted[c.type] =
nme match {
case WrapInitTaskName => Converted.Success[c.type](in)
case WrapPreviousName => Converted.Success[c.type](in)
case WrapInitName => wrapInit[T](c)(in)
case WrapTaskName => wrapTask[T](c)(in)
case _ => Converted.NotApplicable[c.type]
case InputWrapper.WrapInitTaskName => Converted.Success[c.type](in)
case InputWrapper.WrapPreviousName => Converted.Success[c.type](in)
case InputWrapper.WrapInitName => wrapInit[T](c)(in)
case InputWrapper.WrapTaskName => wrapTask[T](c)(in)
case _ => Converted.NotApplicable[c.type]
}
private def wrapInit[T: c.WeakTypeTag](c: blackbox.Context)(tree: c.Tree): Converted[c.type] = {

View File

@ -8,9 +8,10 @@
package sbt
package std
import language.experimental.macros
import reflect.macros._
import reflect.internal.annotations.compileTimeOnly
import scala.language.experimental.macros
import scala.annotation.compileTimeOnly
import scala.reflect.macros._
import Def.Initialize
import sbt.internal.util.appmacro.ContextUtil
@ -30,28 +31,34 @@ object InputWrapper {
private[std] final val WrapPreviousName = "wrapPrevious_\u2603\u2603"
@compileTimeOnly(
"`value` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task.")
def wrapTask_\u2603\u2603[T](in: Any): T = implDetailError
"`value` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task."
)
def wrapTask_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError
@compileTimeOnly(
"`value` can only be used within a task or setting macro, such as :=, +=, ++=, Def.task, or Def.setting.")
def wrapInit_\u2603\u2603[T](in: Any): T = implDetailError
"`value` can only be used within a task or setting macro, such as :=, +=, ++=, Def.task, or Def.setting."
)
def wrapInit_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError
@compileTimeOnly(
"`value` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task.")
def wrapInitTask_\u2603\u2603[T](in: Any): T = implDetailError
"`value` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task."
)
def wrapInitTask_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError
@compileTimeOnly(
"`value` can only be called on an input task within a task definition macro, such as := or Def.inputTask.")
def wrapInputTask_\u2603\u2603[T](in: Any): T = implDetailError
"`value` can only be called on an input task within a task definition macro, such as := or Def.inputTask."
)
def wrapInputTask_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError
@compileTimeOnly(
"`value` can only be called on an input task within a task definition macro, such as := or Def.inputTask.")
def wrapInitInputTask_\u2603\u2603[T](in: Any): T = implDetailError
"`value` can only be called on an input task within a task definition macro, such as := or Def.inputTask."
)
def wrapInitInputTask_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError
@compileTimeOnly(
"`previous` can only be called on a task within a task or input task definition macro, such as :=, +=, ++=, Def.task, or Def.inputTask.")
def wrapPrevious_\u2603\u2603[T](in: Any): T = implDetailError
"`previous` can only be called on a task within a task or input task definition macro, such as :=, +=, ++=, Def.task, or Def.inputTask."
)
def wrapPrevious_\u2603\u2603[T](@deprecated("unused", "") in: Any): T = implDetailError
private[this] def implDetailError =
sys.error("This method is an implementation detail and should not be referenced.")
@ -160,11 +167,12 @@ object InputWrapper {
}
/** Translates <task: TaskKey[T]>.previous(format) to Previous.runtime(<task>)(format).value*/
def previousMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
format: c.Expr[sjsonnew.JsonFormat[T]]): c.Expr[Option[T]] = {
def previousMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(format: c.Expr[sjsonnew.JsonFormat[T]]): c.Expr[Option[T]] = {
import c.universe._
c.macroApplication match {
case a @ Apply(Select(Apply(_, t :: Nil), tp), fmt) =>
case a @ Apply(Select(Apply(_, t :: Nil), _), _) =>
if (t.tpe <:< c.weakTypeOf[TaskKey[T]]) {
val tsTyped = c.Expr[TaskKey[T]](t)
val newTree = c.universe.reify { Previous.runtime[T](tsTyped.splice)(format.splice) }
@ -181,35 +189,42 @@ object InputWrapper {
sealed abstract class MacroTaskValue[T] {
@compileTimeOnly(
"`taskValue` can only be used within a setting macro, such as :=, +=, ++=, or Def.setting.")
"`taskValue` can only be used within a setting macro, such as :=, +=, ++=, or Def.setting."
)
def taskValue: Task[T] = macro InputWrapper.taskValueMacroImpl[T]
}
sealed abstract class MacroValue[T] {
@compileTimeOnly(
"`value` can only be used within a task or setting macro, such as :=, +=, ++=, Def.task, or Def.setting.")
"`value` can only be used within a task or setting macro, such as :=, +=, ++=, Def.task, or Def.setting."
)
def value: T = macro InputWrapper.valueMacroImpl[T]
}
sealed abstract class ParserInput[T] {
@compileTimeOnly(
"`parsed` can only be used within an input task macro, such as := or Def.inputTask.")
"`parsed` can only be used within an input task macro, such as := or Def.inputTask."
)
def parsed: T = macro ParserInput.parsedMacroImpl[T]
}
sealed abstract class InputEvaluated[T] {
@compileTimeOnly(
"`evaluated` can only be used within an input task macro, such as := or Def.inputTask.")
"`evaluated` can only be used within an input task macro, such as := or Def.inputTask."
)
def evaluated: T = macro InputWrapper.valueMacroImpl[T]
@compileTimeOnly(
"`inputTaskValue` can only be used within an input task macro, such as := or Def.inputTask.")
"`inputTaskValue` can only be used within an input task macro, such as := or Def.inputTask."
)
def inputTaskValue: InputTask[T] = macro InputWrapper.inputTaskValueMacroImpl[T]
}
sealed abstract class ParserInputTask[T] {
@compileTimeOnly(
"`parsed` can only be used within an input task macro, such as := or Def.inputTask.")
"`parsed` can only be used within an input task macro, such as := or Def.inputTask."
)
def parsed: Task[T] = macro ParserInput.parsedInputMacroImpl[T]
}
sealed abstract class MacroPrevious[T] {
@compileTimeOnly(
"`previous` can only be used within a task macro, such as :=, +=, ++=, or Def.task.")
"`previous` can only be used within a task macro, such as :=, +=, ++=, or Def.task."
)
def previous(implicit format: sjsonnew.JsonFormat[T]): Option[T] =
macro InputWrapper.previousMacroImpl[T]
}
@ -223,24 +238,29 @@ object ParserInput {
private[std] val WrapInitName = "initParser_\u2603\u2603"
@compileTimeOnly(
"`parsed` can only be used within an input task macro, such as := or Def.inputTask.")
def parser_\u2603\u2603[T](i: Any): T =
"`parsed` can only be used within an input task macro, such as := or Def.inputTask."
)
def parser_\u2603\u2603[T](@deprecated("unused", "") i: Any): T =
sys.error("This method is an implementation detail and should not be referenced.")
@compileTimeOnly(
"`parsed` can only be used within an input task macro, such as := or Def.inputTask.")
def initParser_\u2603\u2603[T](i: Any): T =
"`parsed` can only be used within an input task macro, such as := or Def.inputTask."
)
def initParser_\u2603\u2603[T](@deprecated("unused", "") i: Any): T =
sys.error("This method is an implementation detail and should not be referenced.")
private[std] def wrap[T: c.WeakTypeTag](c: blackbox.Context)(ts: c.Expr[Any],
pos: c.Position): c.Expr[T] =
private[std] def wrap[T: c.WeakTypeTag](
c: blackbox.Context
)(ts: c.Expr[Any], pos: c.Position): c.Expr[T] =
InputWrapper.wrapImpl[T, ParserInput.type](c, ParserInput, WrapName)(ts, pos)
private[std] def wrapInit[T: c.WeakTypeTag](c: blackbox.Context)(ts: c.Expr[Any],
pos: c.Position): c.Expr[T] =
private[std] def wrapInit[T: c.WeakTypeTag](
c: blackbox.Context
)(ts: c.Expr[Any], pos: c.Position): c.Expr[T] =
InputWrapper.wrapImpl[T, ParserInput.type](c, ParserInput, WrapInitName)(ts, pos)
private[std] def inputParser[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[InputTask[T]]): c.Expr[State => Parser[Task[T]]] =
private[std] def inputParser[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[InputTask[T]]): c.Expr[State => Parser[Task[T]]] =
c.universe.reify(t.splice.parser)
def parsedInputMacroImpl[T: c.WeakTypeTag](c: blackbox.Context): c.Expr[Task[T]] =
@ -260,8 +280,9 @@ object ParserInput {
wrap[Task[T]](c)(inputParser(c)(e), pos)
}
private def wrapInitInputTask[T: c.WeakTypeTag](c: blackbox.Context)(tree: c.Tree,
pos: c.Position) = {
private def wrapInitInputTask[T: c.WeakTypeTag](
c: blackbox.Context
)(tree: c.Tree, pos: c.Position) = {
val e = c.Expr[Initialize[InputTask[T]]](tree)
wrapInit[Task[T]](c)(c.universe.reify { Def.toIParser(e.splice) }, pos)
}

View File

@ -14,18 +14,21 @@ import scala.reflect.macros._
import sbt.util.OptJsonWriter
private[sbt] object KeyMacro {
def settingKeyImpl[T: c.WeakTypeTag](c: blackbox.Context)(
description: c.Expr[String]): c.Expr[SettingKey[T]] =
def settingKeyImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(description: c.Expr[String]): c.Expr[SettingKey[T]] =
keyImpl2[T, SettingKey[T]](c) { (name, mf, ojw) =>
c.universe.reify { SettingKey[T](name.splice, description.splice)(mf.splice, ojw.splice) }
}
def taskKeyImpl[T: c.WeakTypeTag](c: blackbox.Context)(
description: c.Expr[String]): c.Expr[TaskKey[T]] =
def taskKeyImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(description: c.Expr[String]): c.Expr[TaskKey[T]] =
keyImpl[T, TaskKey[T]](c) { (name, mf) =>
c.universe.reify { TaskKey[T](name.splice, description.splice)(mf.splice) }
}
def inputKeyImpl[T: c.WeakTypeTag](c: blackbox.Context)(
description: c.Expr[String]): c.Expr[InputKey[T]] =
def inputKeyImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(description: c.Expr[String]): c.Expr[InputKey[T]] =
keyImpl[T, InputKey[T]](c) { (name, mf) =>
c.universe.reify { InputKey[T](name.splice, description.splice)(mf.splice) }
}
@ -45,7 +48,8 @@ private[sbt] object KeyMacro {
val enclosingValName = definingValName(
c,
methodName =>
s"""$methodName must be directly assigned to a val, such as `val x = $methodName[Int]("description")`.""")
s"""$methodName must be directly assigned to a val, such as `val x = $methodName[Int]("description")`."""
)
c.Expr[String](Literal(Constant(enclosingValName)))
}
@ -61,10 +65,10 @@ private[sbt] object KeyMacro {
n.decodedName.toString.trim // trim is not strictly correct, but macros don't expose the API necessary
@tailrec def enclosingVal(trees: List[c.Tree]): String = {
trees match {
case vd @ ValDef(_, name, _, _) :: ts => processName(name)
case ValDef(_, name, _, _) :: _ => processName(name)
case (_: ApplyTree | _: Select | _: TypeApply) :: xs => enclosingVal(xs)
// lazy val x: X = <methodName> has this form for some reason (only when the explicit type is present, though)
case Block(_, _) :: DefDef(mods, name, _, _, _, _) :: xs if mods.hasFlag(Flag.LAZY) =>
case Block(_, _) :: DefDef(mods, name, _, _, _, _) :: _ if mods.hasFlag(Flag.LAZY) =>
processName(name)
case _ =>
c.error(c.enclosingPosition, invalidEnclosingTree(methodName.decodedName.toString))

View File

@ -46,11 +46,13 @@ object InitializeConvert extends Convert {
Converted.Success(t)
}
private def failTask[C <: blackbox.Context with Singleton](c: C)(
pos: c.Position): Converted[c.type] =
private def failTask[C <: blackbox.Context with Singleton](
c: C
)(pos: c.Position): Converted[c.type] =
Converted.Failure(pos, "A setting cannot depend on a task")
private def failPrevious[C <: blackbox.Context with Singleton](c: C)(
pos: c.Position): Converted[c.type] =
private def failPrevious[C <: blackbox.Context with Singleton](
c: C
)(pos: c.Position): Converted[c.type] =
Converted.Failure(pos, "A setting cannot depend on a task's previous value.")
}
@ -59,11 +61,14 @@ object SettingMacro {
def settingMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(t: c.Expr[T]): c.Expr[Initialize[T]] =
Instance.contImpl[T, Id](c, InitializeInstance, InitializeConvert, MixedBuilder, EmptyLinter)(
Left(t),
Instance.idTransform[c.type])
Instance.idTransform[c.type]
)
def settingDynMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[Initialize[T]]): c.Expr[Initialize[T]] =
def settingDynMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[Initialize[T]]): c.Expr[Initialize[T]] =
Instance.contImpl[T, Id](c, InitializeInstance, InitializeConvert, MixedBuilder, EmptyLinter)(
Right(t),
Instance.idTransform[c.type])
Instance.idTransform[c.type]
)
}

View File

@ -24,9 +24,7 @@ abstract class BaseTaskLinterDSL extends LinterDSL {
val isTask = convert.asPredicate(ctx)
class traverser extends Traverser {
private val unchecked = symbolOf[sbt.sbtUnchecked].asClass
private val taskKeyType = typeOf[sbt.TaskKey[_]]
private val settingKeyType = typeOf[sbt.SettingKey[_]]
private val inputKeyType = typeOf[sbt.InputKey[_]]
private val initializeType = typeOf[sbt.Def.Initialize[_]]
private val uncheckedWrappers = MutableSet.empty[Tree]
var insideIf: Boolean = false
var insideAnon: Boolean = false
@ -48,6 +46,7 @@ abstract class BaseTaskLinterDSL extends LinterDSL {
case _ => exprAtUseSite
}
uncheckedWrappers.add(removedSbtWrapper)
()
}
case _ =>
}
@ -55,8 +54,8 @@ abstract class BaseTaskLinterDSL extends LinterDSL {
}
}
@inline def isKey(tpe: Type): Boolean =
tpe <:< taskKeyType || tpe <:< settingKeyType || tpe <:< inputKeyType
@inline def isKey(tpe: Type): Boolean = isInitialize(tpe)
@inline def isInitialize(tpe: Type): Boolean = tpe <:< initializeType
def detectAndErrorOnKeyMissingValue(i: Ident): Unit = {
if (isKey(i.tpe)) {
@ -65,6 +64,20 @@ abstract class BaseTaskLinterDSL extends LinterDSL {
} else ()
}
def detectAndErrorOnKeyMissingValue(s: Select): Unit = {
if (isKey(s.tpe)) {
val keyName = s.name.decodedName.toString
ctx.error(s.pos, TaskLinterDSLFeedback.missingValueForKey(keyName))
} else ()
}
def detectAndErrorOnKeyMissingValue(a: Apply): Unit = {
if (isInitialize(a.tpe)) {
val expr = "X / y"
ctx.error(a.pos, TaskLinterDSLFeedback.missingValueForInitialize(expr))
} else ()
}
override def traverse(tree: ctx.universe.Tree): Unit = {
tree match {
case ap @ Apply(TypeApply(Select(_, nme), tpe :: Nil), qual :: Nil) =>
@ -73,7 +86,7 @@ abstract class BaseTaskLinterDSL extends LinterDSL {
val (qualName, isSettingKey) =
Option(qual.symbol)
.map(sym => (sym.name.decodedName.toString, qual.tpe <:< typeOf[SettingKey[_]]))
.getOrElse((ap.pos.lineContent, false))
.getOrElse((ap.pos.source.lineToString(ap.pos.line - 1), false))
if (!isSettingKey && !shouldIgnore && isTask(wrapperName, tpe.tpe, qual)) {
if (insideIf && !isDynamicTask) {
@ -117,11 +130,15 @@ abstract class BaseTaskLinterDSL extends LinterDSL {
// TODO: Consider using unused names analysis to be able to report on more cases
case ValDef(_, valName, _, rhs) if valName == termNames.WILDCARD =>
rhs match {
case i: Ident => detectAndErrorOnKeyMissingValue(i)
case _ => ()
case i: Ident => detectAndErrorOnKeyMissingValue(i)
case s: Select => detectAndErrorOnKeyMissingValue(s)
case a: Apply => detectAndErrorOnKeyMissingValue(a)
case _ => ()
}
case i: Ident => detectAndErrorOnKeyMissingValue(i)
case _ => ()
case i: Ident => detectAndErrorOnKeyMissingValue(i)
case s: Select => detectAndErrorOnKeyMissingValue(s)
case a: Apply => detectAndErrorOnKeyMissingValue(a)
case _ => ()
}
}
traverseTrees(stmts)
@ -160,14 +177,13 @@ object TaskLinterDSLFeedback {
private final val startGreen = if (ConsoleAppender.formatEnabledInEnv) AnsiColor.GREEN else ""
private final val reset = if (ConsoleAppender.formatEnabledInEnv) AnsiColor.RESET else ""
private final val ProblemHeader = s"${startRed}Problem${reset}"
private final val SolutionHeader = s"${startGreen}Solution${reset}"
private final val ProblemHeader = s"${startRed}problem${reset}"
private final val SolutionHeader = s"${startGreen}solution${reset}"
def useOfValueInsideAnon(task: String) =
s"""${startBold}The evaluation of `$task` inside an anonymous function is prohibited.$reset
|
|${ProblemHeader}: Task invocations inside anonymous functions are evaluated independently of whether the anonymous function is invoked or not.
|
|${SolutionHeader}:
| 1. Make `$task` evaluation explicit outside of the function body if you don't care about its evaluation.
| 2. Use a dynamic task to evaluate `$task` and pass that value as a parameter to an anonymous function.
@ -178,7 +194,6 @@ object TaskLinterDSLFeedback {
|
|${ProblemHeader}: `$task` is inside the if expression of a regular task.
| Regular tasks always evaluate task inside the bodies of if expressions.
|
|${SolutionHeader}:
| 1. If you only want to evaluate it when the if predicate is true or false, use a dynamic task.
| 2. Otherwise, make the static evaluation explicit by evaluating `$task` outside the if expression.
@ -187,8 +202,14 @@ object TaskLinterDSLFeedback {
def missingValueForKey(key: String) =
s"""${startBold}The key `$key` is not being invoked inside the task definition.$reset
|
|${ProblemHeader}: Keys missing `.value` are not initialized and their dependency is not registered.
|
|${ProblemHeader}: Keys missing `.value` are not initialized and their dependency is not registered.
|${SolutionHeader}: Replace `$key` by `$key.value` or remove it if unused.
""".stripMargin
def missingValueForInitialize(expr: String) =
s"""${startBold}The setting/task `$expr` is not being invoked inside the task definition.$reset
|
|${ProblemHeader}: Settings/tasks missing `.value` are not initialized and their dependency is not registered.
|${SolutionHeader}: Replace `$expr` by `($expr).value` or remove it if unused.
""".stripMargin
}

View File

@ -56,9 +56,11 @@ object FullInstance
extends Instance.Composed[Initialize, Task](InitializeInstance, TaskInstance)
with MonadInstance {
type SS = sbt.internal.util.Settings[Scope]
val settingsData = TaskKey[SS]("settings-data",
"Provides access to the project data for the build.",
KeyRanks.DTask)
val settingsData = TaskKey[SS](
"settings-data",
"Provides access to the project data for the build.",
KeyRanks.DTask
)
def flatten[T](in: Initialize[Task[Initialize[Task[T]]]]): Initialize[Task[T]] = {
import TupleSyntax._
@ -89,38 +91,44 @@ object TaskMacro {
final val InputTaskCreateDynName = "createDyn"
final val InputTaskCreateFreeName = "createFree"
final val append1Migration =
"`<+=` operator is removed. Try `lhs += { x.value }`\n or see http://www.scala-sbt.org/1.0/docs/Migrating-from-sbt-012x.html."
"`<+=` operator is removed. Try `lhs += { x.value }`\n or see http://www.scala-sbt.org/1.x/docs/Migrating-from-sbt-013x.html."
final val appendNMigration =
"`<++=` operator is removed. Try `lhs ++= { x.value }`\n or see http://www.scala-sbt.org/1.0/docs/Migrating-from-sbt-012x.html."
"`<++=` operator is removed. Try `lhs ++= { x.value }`\n or see http://www.scala-sbt.org/1.x/docs/Migrating-from-sbt-013x.html."
final val assignMigration =
"""`<<=` operator is removed. Use `key := { x.value }` or `key ~= (old => { newValue })`.
|See http://www.scala-sbt.org/1.0/docs/Migrating-from-sbt-012x.html""".stripMargin
|See http://www.scala-sbt.org/1.x/docs/Migrating-from-sbt-013x.html""".stripMargin
import LinterDSL.{ Empty => EmptyLinter }
def taskMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[T]): c.Expr[Initialize[Task[T]]] =
def taskMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[T]): c.Expr[Initialize[Task[T]]] =
Instance.contImpl[T, Id](c, FullInstance, FullConvert, MixedBuilder, TaskLinterDSL)(
Left(t),
Instance.idTransform[c.type])
Instance.idTransform[c.type]
)
def taskDynMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[Task[T]]] =
def taskDynMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[Task[T]]] =
Instance.contImpl[T, Id](c, FullInstance, FullConvert, MixedBuilder, TaskDynLinterDSL)(
Right(t),
Instance.idTransform[c.type])
Instance.idTransform[c.type]
)
/** Implementation of := macro for settings. */
def settingAssignMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
v: c.Expr[T]): c.Expr[Setting[T]] = {
def settingAssignMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(v: c.Expr[T]): c.Expr[Setting[T]] = {
val init = SettingMacro.settingMacroImpl[T](c)(v)
val assign = transformMacroImpl(c)(init.tree)(AssignInitName)
c.Expr[Setting[T]](assign)
}
/** Implementation of := macro for tasks. */
def taskAssignMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
v: c.Expr[T]): c.Expr[Setting[Task[T]]] = {
def taskAssignMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(v: c.Expr[T]): c.Expr[Setting[Task[T]]] = {
val init = taskMacroImpl[T](c)(v)
val assign = transformMacroImpl(c)(init.tree)(AssignInitName)
c.Expr[Setting[Task[T]]](assign)
@ -130,88 +138,106 @@ object TaskMacro {
// These macros are there just so we can fail old operators like `<<=` and provide useful migration information.
def fakeSettingAssignPosition[T: c.WeakTypeTag](c: blackbox.Context)(
app: c.Expr[Initialize[T]]): c.Expr[Setting[T]] =
ContextUtil.selectMacroImpl[Setting[T]](c) { (ts, pos) =>
c.abort(pos, assignMigration)
}
def fakeSettingAppend1Position[S: c.WeakTypeTag, V: c.WeakTypeTag](c: blackbox.Context)(
v: c.Expr[Initialize[V]])(a: c.Expr[Append.Value[S, V]]): c.Expr[Setting[S]] =
ContextUtil.selectMacroImpl[Setting[S]](c) { (ts, pos) =>
c.abort(pos, append1Migration)
}
def fakeSettingAppendNPosition[S: c.WeakTypeTag, V: c.WeakTypeTag](c: blackbox.Context)(
vs: c.Expr[Initialize[V]])(a: c.Expr[Append.Values[S, V]]): c.Expr[Setting[S]] =
ContextUtil.selectMacroImpl[Setting[S]](c) { (ts, pos) =>
c.abort(pos, appendNMigration)
}
@deprecated("unused", "") app: c.Expr[Initialize[T]]
): c.Expr[Setting[T]] =
ContextUtil.selectMacroImpl[Setting[T]](c)((_, pos) => c.abort(pos, assignMigration))
def fakeSettingAppend1Position[S: c.WeakTypeTag, V: c.WeakTypeTag](
c: blackbox.Context
)(@deprecated("unused", "") v: c.Expr[Initialize[V]])(
@deprecated("unused", "") a: c.Expr[Append.Value[S, V]]
): c.Expr[Setting[S]] =
ContextUtil.selectMacroImpl[Setting[S]](c)((_, pos) => c.abort(pos, append1Migration))
def fakeSettingAppendNPosition[S: c.WeakTypeTag, V: c.WeakTypeTag](
c: blackbox.Context
)(@deprecated("unused", "") vs: c.Expr[Initialize[V]])(
@deprecated("unused", "") a: c.Expr[Append.Values[S, V]]
): c.Expr[Setting[S]] =
ContextUtil.selectMacroImpl[Setting[S]](c)((_, pos) => c.abort(pos, appendNMigration))
def fakeItaskAssignPosition[T: c.WeakTypeTag](c: blackbox.Context)(
app: c.Expr[Initialize[Task[T]]]): c.Expr[Setting[Task[T]]] =
ContextUtil.selectMacroImpl[Setting[Task[T]]](c) { (ts, pos) =>
c.abort(pos, assignMigration)
}
def fakeTaskAppend1Position[S: c.WeakTypeTag, V: c.WeakTypeTag](c: blackbox.Context)(
v: c.Expr[Initialize[Task[V]]])(a: c.Expr[Append.Value[S, V]]): c.Expr[Setting[Task[S]]] =
ContextUtil.selectMacroImpl[Setting[Task[S]]](c) { (ts, pos) =>
c.abort(pos, append1Migration)
}
def fakeTaskAppendNPosition[S: c.WeakTypeTag, V: c.WeakTypeTag](c: blackbox.Context)(
vs: c.Expr[Initialize[Task[V]]])(a: c.Expr[Append.Values[S, V]]): c.Expr[Setting[Task[S]]] =
ContextUtil.selectMacroImpl[Setting[Task[S]]](c) { (ts, pos) =>
c.abort(pos, appendNMigration)
}
@deprecated("unused", "") app: c.Expr[Initialize[Task[T]]]
): c.Expr[Setting[Task[T]]] =
ContextUtil.selectMacroImpl[Setting[Task[T]]](c)((_, pos) => c.abort(pos, assignMigration))
/* Implementations of <<= macro variations for tasks and settings. These just get the source position of the call site.*/
def fakeTaskAppend1Position[S: c.WeakTypeTag, V: c.WeakTypeTag](
c: blackbox.Context
)(@deprecated("unused", "") v: c.Expr[Initialize[Task[V]]])(
@deprecated("unused", "") a: c.Expr[Append.Value[S, V]]
): c.Expr[Setting[Task[S]]] =
ContextUtil.selectMacroImpl[Setting[Task[S]]](c)((_, pos) => c.abort(pos, append1Migration))
def itaskAssignPosition[T: c.WeakTypeTag](c: blackbox.Context)(
app: c.Expr[Initialize[Task[T]]]): c.Expr[Setting[Task[T]]] =
def fakeTaskAppendNPosition[S: c.WeakTypeTag, V: c.WeakTypeTag](
c: blackbox.Context
)(@deprecated("unused", "") vs: c.Expr[Initialize[Task[V]]])(
@deprecated("unused", "") a: c.Expr[Append.Values[S, V]]
): c.Expr[Setting[Task[S]]] =
ContextUtil.selectMacroImpl[Setting[Task[S]]](c)((_, pos) => c.abort(pos, appendNMigration))
// Implementations of <<= macro variations for tasks and settings.
// These just get the source position of the call site.
def itaskAssignPosition[T: c.WeakTypeTag](
c: blackbox.Context
)(app: c.Expr[Initialize[Task[T]]]): c.Expr[Setting[Task[T]]] =
settingAssignPosition(c)(app)
def taskAssignPositionT[T: c.WeakTypeTag](c: blackbox.Context)(
app: c.Expr[Task[T]]): c.Expr[Setting[Task[T]]] =
def taskAssignPositionT[T: c.WeakTypeTag](
c: blackbox.Context
)(app: c.Expr[Task[T]]): c.Expr[Setting[Task[T]]] =
itaskAssignPosition(c)(c.universe.reify { Def.valueStrict(app.splice) })
def taskAssignPositionPure[T: c.WeakTypeTag](c: blackbox.Context)(
app: c.Expr[T]): c.Expr[Setting[Task[T]]] =
def taskAssignPositionPure[T: c.WeakTypeTag](
c: blackbox.Context
)(app: c.Expr[T]): c.Expr[Setting[Task[T]]] =
taskAssignPositionT(c)(c.universe.reify { TaskExtra.constant(app.splice) })
def taskTransformPosition[S: c.WeakTypeTag](c: blackbox.Context)(
f: c.Expr[S => S]): c.Expr[Setting[Task[S]]] =
def taskTransformPosition[S: c.WeakTypeTag](
c: blackbox.Context
)(f: c.Expr[S => S]): c.Expr[Setting[Task[S]]] =
c.Expr[Setting[Task[S]]](transformMacroImpl(c)(f.tree)(TransformInitName))
def settingTransformPosition[S: c.WeakTypeTag](c: blackbox.Context)(
f: c.Expr[S => S]): c.Expr[Setting[S]] =
def settingTransformPosition[S: c.WeakTypeTag](
c: blackbox.Context
)(f: c.Expr[S => S]): c.Expr[Setting[S]] =
c.Expr[Setting[S]](transformMacroImpl(c)(f.tree)(TransformInitName))
def itaskTransformPosition[S: c.WeakTypeTag](c: blackbox.Context)(
f: c.Expr[S => S]): c.Expr[Setting[S]] =
def itaskTransformPosition[S: c.WeakTypeTag](
c: blackbox.Context
)(f: c.Expr[S => S]): c.Expr[Setting[S]] =
c.Expr[Setting[S]](transformMacroImpl(c)(f.tree)(TransformInitName))
def settingAssignPure[T: c.WeakTypeTag](c: blackbox.Context)(app: c.Expr[T]): c.Expr[Setting[T]] =
settingAssignPosition(c)(c.universe.reify { Def.valueStrict(app.splice) })
def settingAssignPosition[T: c.WeakTypeTag](c: blackbox.Context)(
app: c.Expr[Initialize[T]]): c.Expr[Setting[T]] =
def settingAssignPosition[T: c.WeakTypeTag](
c: blackbox.Context
)(app: c.Expr[Initialize[T]]): c.Expr[Setting[T]] =
c.Expr[Setting[T]](transformMacroImpl(c)(app.tree)(AssignInitName))
/** Implementation of := macro for tasks. */
def inputTaskAssignMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
v: c.Expr[T]): c.Expr[Setting[InputTask[T]]] = {
def inputTaskAssignMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(v: c.Expr[T]): c.Expr[Setting[InputTask[T]]] = {
val init = inputTaskMacroImpl[T](c)(v)
val assign = transformMacroImpl(c)(init.tree)(AssignInitName)
c.Expr[Setting[InputTask[T]]](assign)
}
/** Implementation of += macro for tasks. */
def taskAppend1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(v: c.Expr[U])(
a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[Task[T]]] = {
def taskAppend1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(v: c.Expr[U])(a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[Task[T]]] = {
val init = taskMacroImpl[U](c)(v)
val append = appendMacroImpl(c)(init.tree, a.tree)(Append1InitName)
c.Expr[Setting[Task[T]]](append)
}
/** Implementation of += macro for settings. */
def settingAppend1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(v: c.Expr[U])(
a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[T]] = {
def settingAppend1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(v: c.Expr[U])(a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[T]] = {
import c.universe._
val ttpe = c.weakTypeOf[T]
val typeArgs = ttpe.typeArgs
@ -221,10 +247,11 @@ object TaskMacro {
if typeArgs.nonEmpty && (typeArgs.head weak_<:< c.weakTypeOf[Task[_]])
&& (tpe weak_<:< c.weakTypeOf[Initialize[_]]) =>
c.macroApplication match {
case Apply(Apply(TypeApply(Select(preT, nmeT), targs), _), _) =>
case Apply(Apply(TypeApply(Select(preT, _), _), _), _) =>
val tree = Apply(
TypeApply(Select(preT, TermName("+=").encodedName), TypeTree(typeArgs.head) :: Nil),
Select(v.tree, TermName("taskValue").encodedName) :: Nil)
Select(v.tree, TermName("taskValue").encodedName) :: Nil
)
c.Expr[Setting[T]](tree)
case x => ContextUtil.unexpectedTree(x)
}
@ -236,73 +263,89 @@ object TaskMacro {
}
/** Implementation of ++= macro for tasks. */
def taskAppendNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(vs: c.Expr[U])(
a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[Task[T]]] = {
def taskAppendNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(vs: c.Expr[U])(a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[Task[T]]] = {
val init = taskMacroImpl[U](c)(vs)
val append = appendMacroImpl(c)(init.tree, a.tree)(AppendNInitName)
c.Expr[Setting[Task[T]]](append)
}
/** Implementation of ++= macro for settings. */
def settingAppendNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(vs: c.Expr[U])(
a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[T]] = {
def settingAppendNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(vs: c.Expr[U])(a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[T]] = {
val init = SettingMacro.settingMacroImpl[U](c)(vs)
val append = appendMacroImpl(c)(init.tree, a.tree)(AppendNInitName)
c.Expr[Setting[T]](append)
}
/** Implementation of -= macro for tasks. */
def taskRemove1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(v: c.Expr[U])(
r: c.Expr[Remove.Value[T, U]]): c.Expr[Setting[Task[T]]] = {
def taskRemove1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(v: c.Expr[U])(r: c.Expr[Remove.Value[T, U]]): c.Expr[Setting[Task[T]]] = {
val init = taskMacroImpl[U](c)(v)
val remove = removeMacroImpl(c)(init.tree, r.tree)(Remove1InitName)
c.Expr[Setting[Task[T]]](remove)
}
/** Implementation of -= macro for settings. */
def settingRemove1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(v: c.Expr[U])(
r: c.Expr[Remove.Value[T, U]]): c.Expr[Setting[T]] = {
def settingRemove1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(v: c.Expr[U])(r: c.Expr[Remove.Value[T, U]]): c.Expr[Setting[T]] = {
val init = SettingMacro.settingMacroImpl[U](c)(v)
val remove = removeMacroImpl(c)(init.tree, r.tree)(Remove1InitName)
c.Expr[Setting[T]](remove)
}
/** Implementation of --= macro for tasks. */
def taskRemoveNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(vs: c.Expr[U])(
r: c.Expr[Remove.Values[T, U]]): c.Expr[Setting[Task[T]]] = {
def taskRemoveNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(vs: c.Expr[U])(r: c.Expr[Remove.Values[T, U]]): c.Expr[Setting[Task[T]]] = {
val init = taskMacroImpl[U](c)(vs)
val remove = removeMacroImpl(c)(init.tree, r.tree)(RemoveNInitName)
c.Expr[Setting[Task[T]]](remove)
}
/** Implementation of --= macro for settings. */
def settingRemoveNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(vs: c.Expr[U])(
r: c.Expr[Remove.Values[T, U]]): c.Expr[Setting[T]] = {
def settingRemoveNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](
c: blackbox.Context
)(vs: c.Expr[U])(r: c.Expr[Remove.Values[T, U]]): c.Expr[Setting[T]] = {
val init = SettingMacro.settingMacroImpl[U](c)(vs)
val remove = removeMacroImpl(c)(init.tree, r.tree)(RemoveNInitName)
c.Expr[Setting[T]](remove)
}
private[this] def appendMacroImpl(c: blackbox.Context)(init: c.Tree, append: c.Tree)(
newName: String): c.Tree = {
private[this] def appendMacroImpl(
c: blackbox.Context
)(init: c.Tree, append: c.Tree)(newName: String): c.Tree = {
import c.universe._
c.macroApplication match {
case Apply(Apply(TypeApply(Select(preT, nmeT), targs), _), _) =>
Apply(Apply(TypeApply(Select(preT, TermName(newName).encodedName), targs),
init :: sourcePosition(c).tree :: Nil),
append :: Nil)
case Apply(Apply(TypeApply(Select(preT, _), targs), _), _) =>
Apply(
Apply(
TypeApply(Select(preT, TermName(newName).encodedName), targs),
init :: sourcePosition(c).tree :: Nil
),
append :: Nil
)
case x => ContextUtil.unexpectedTree(x)
}
}
private[this] def removeMacroImpl(c: blackbox.Context)(init: c.Tree, remove: c.Tree)(
newName: String): c.Tree = {
private[this] def removeMacroImpl(
c: blackbox.Context
)(init: c.Tree, remove: c.Tree)(newName: String): c.Tree = {
import c.universe._
c.macroApplication match {
case Apply(Apply(TypeApply(Select(preT, nmeT), targs), _), r) =>
Apply(Apply(TypeApply(Select(preT, TermName(newName).encodedName), targs),
init :: sourcePosition(c).tree :: Nil),
r)
case Apply(Apply(TypeApply(Select(preT, _), targs), _), _) =>
Apply(
Apply(
TypeApply(Select(preT, TermName(newName).encodedName), targs),
init :: sourcePosition(c).tree :: Nil
),
remove :: Nil
)
case x => ContextUtil.unexpectedTree(x)
}
}
@ -316,8 +359,10 @@ object TaskMacro {
case Apply(Select(prefix, _), _) => prefix
case x => ContextUtil.unexpectedTree(x)
}
Apply.apply(Select(target, TermName(newName).encodedName),
init :: sourcePosition(c).tree :: Nil)
Apply.apply(
Select(target, TermName(newName).encodedName),
init :: sourcePosition(c).tree :: Nil
)
}
private[this] def sourcePosition(c: blackbox.Context): c.Expr[SourcePosition] = {
@ -335,7 +380,8 @@ object TaskMacro {
private[this] def settingSource(c: blackbox.Context, path: String, name: String): String = {
@tailrec def inEmptyPackage(s: c.Symbol): Boolean = s != c.universe.NoSymbol && (
s.owner == c.mirror.EmptyPackage || s.owner == c.mirror.EmptyPackageClass || inEmptyPackage(
s.owner)
s.owner
)
)
c.internal.enclosingOwner match {
case ec if !ec.isStatic => name
@ -349,16 +395,19 @@ object TaskMacro {
c.Expr[T](Literal(Constant(t)))
}
def inputTaskMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[T]): c.Expr[Initialize[InputTask[T]]] =
def inputTaskMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[T]): c.Expr[Initialize[InputTask[T]]] =
inputTaskMacro0[T](c)(t)
def inputTaskDynMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] =
def inputTaskDynMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] =
inputTaskDynMacro0[T](c)(t)
private[this] def inputTaskMacro0[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[T]): c.Expr[Initialize[InputTask[T]]] =
private[this] def inputTaskMacro0[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[T]): c.Expr[Initialize[InputTask[T]]] =
iInitializeMacro(c)(t) { et =>
val pt = iParserMacro(c)(et) { pt =>
iTaskMacro(c)(pt)
@ -367,8 +416,8 @@ object TaskMacro {
}
private[this] def iInitializeMacro[M[_], T](c: blackbox.Context)(t: c.Expr[T])(
f: c.Expr[T] => c.Expr[M[T]])(implicit tt: c.WeakTypeTag[T],
mt: c.WeakTypeTag[M[T]]): c.Expr[Initialize[M[T]]] = {
f: c.Expr[T] => c.Expr[M[T]]
)(implicit tt: c.WeakTypeTag[T], mt: c.WeakTypeTag[M[T]]): c.Expr[Initialize[M[T]]] = {
val inner: Transform[c.type, M] = new Transform[c.type, M] {
def apply(in: c.Tree): c.Tree = f(c.Expr[T](in)).tree
}
@ -376,7 +425,8 @@ object TaskMacro {
Instance
.contImpl[T, M](c, InitializeInstance, InputInitConvert, MixedBuilder, EmptyLinter)(
Left(cond),
inner)
inner
)
}
private[this] def conditionInputTaskTree(c: blackbox.Context)(t: c.Tree): c.Tree = {
@ -412,25 +462,29 @@ object TaskMacro {
}
private[this] def iParserMacro[M[_], T](c: blackbox.Context)(t: c.Expr[T])(
f: c.Expr[T] => c.Expr[M[T]])(implicit tt: c.WeakTypeTag[T],
mt: c.WeakTypeTag[M[T]]): c.Expr[State => Parser[M[T]]] = {
f: c.Expr[T] => c.Expr[M[T]]
)(implicit tt: c.WeakTypeTag[T], mt: c.WeakTypeTag[M[T]]): c.Expr[State => Parser[M[T]]] = {
val inner: Transform[c.type, M] = new Transform[c.type, M] {
def apply(in: c.Tree): c.Tree = f(c.Expr[T](in)).tree
}
Instance.contImpl[T, M](c, ParserInstance, ParserConvert, MixedBuilder, LinterDSL.Empty)(
Left(t),
inner)
inner
)
}
private[this] def iTaskMacro[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[T]): c.Expr[Task[T]] =
private[this] def iTaskMacro[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[T]): c.Expr[Task[T]] =
Instance
.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder, EmptyLinter)(
Left(t),
Instance.idTransform)
Instance.idTransform
)
private[this] def inputTaskDynMacro0[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] = {
private[this] def inputTaskDynMacro0[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] = {
import c.universe.{ Apply => ApplyTree, _ }
import internal.decorators._
@ -455,7 +509,8 @@ object TaskMacro {
if (result.isDefined) {
c.error(
qual.pos,
"Implementation restriction: a dynamic InputTask can only have a single input parser.")
"Implementation restriction: a dynamic InputTask can only have a single input parser."
)
EmptyTree
} else {
qual.foreach(checkQual)
@ -514,11 +569,13 @@ object PlainTaskMacro {
def taskImpl[T: c.WeakTypeTag](c: blackbox.Context)(t: c.Expr[T]): c.Expr[Task[T]] =
Instance.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder, OnlyTaskLinterDSL)(
Left(t),
Instance.idTransform[c.type])
Instance.idTransform[c.type]
)
def taskDyn[T](t: Task[T]): Task[T] = macro taskDynImpl[T]
def taskDynImpl[T: c.WeakTypeTag](c: blackbox.Context)(t: c.Expr[Task[T]]): c.Expr[Task[T]] =
Instance.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder, OnlyTaskDynLinterDSL)(
Right(t),
Instance.idTransform[c.type])
Instance.idTransform[c.type]
)
}

View File

@ -0,0 +1,135 @@
/*
* sbt
* Copyright 2011 - 2017, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under BSD-3-Clause license (see LICENSE)
*/
package sbt.test
import org.scalacheck.{ Test => _, _ }, Arbitrary.arbitrary, Gen._
import java.io.File
import sbt.io.IO
import sbt.{ Scope, ScopeAxis, Scoped, Select, This, Zero }
import sbt.{
BuildRef,
LocalProject,
LocalRootProject,
ProjectRef,
Reference,
RootProject,
ThisBuild,
ThisProject
}
import sbt.ConfigKey
import sbt.librarymanagement.syntax._
import sbt.{ InputKey, SettingKey, TaskKey }
import sbt.internal.util.{ AttributeKey, AttributeMap }
object BuildSettingsInstances {
val genFile: Gen[File] = Gen.oneOf(new File("."), new File("/tmp")) // for now..
implicit val arbBuildRef: Arbitrary[BuildRef] = Arbitrary(genFile map (f => BuildRef(IO toURI f)))
implicit val arbProjectRef: Arbitrary[ProjectRef] =
Arbitrary(for (f <- genFile; id <- Gen.identifier) yield ProjectRef(f, id))
implicit val arbLocalProject: Arbitrary[LocalProject] =
Arbitrary(arbitrary[String] map LocalProject)
implicit val arbRootProject: Arbitrary[RootProject] = Arbitrary(genFile map (RootProject(_)))
implicit val arbReference: Arbitrary[Reference] = Arbitrary {
Gen.frequency(
96 -> arbitrary[BuildRef],
10271 -> ThisBuild,
325 -> LocalRootProject,
2283 -> arbitrary[ProjectRef],
299 -> ThisProject,
436 -> arbitrary[LocalProject],
1133 -> arbitrary[RootProject],
)
}
implicit def arbConfigKey: Arbitrary[ConfigKey] = Arbitrary {
Gen.frequency(
2 -> const[ConfigKey](Compile),
2 -> const[ConfigKey](Test),
1 -> const[ConfigKey](Runtime),
1 -> const[ConfigKey](IntegrationTest),
1 -> const[ConfigKey](Provided),
)
}
implicit def arbAttrKey[A: Manifest]: Arbitrary[AttributeKey[_]] =
Arbitrary(Gen.identifier map (AttributeKey[A](_)))
implicit val arbAttributeMap: Arbitrary[AttributeMap] = Arbitrary {
Gen.frequency(
20 -> AttributeMap.empty,
1 -> {
for (name <- Gen.identifier; isModule <- arbitrary[Boolean])
yield
AttributeMap.empty
.put(AttributeKey[String]("name"), name)
.put(AttributeKey[Boolean]("isModule"), isModule)
}
)
}
implicit def arbScopeAxis[A: Arbitrary]: Arbitrary[ScopeAxis[A]] =
Arbitrary(Gen.oneOf[ScopeAxis[A]](This, Zero, arbitrary[A] map (Select(_))))
implicit def arbScope: Arbitrary[Scope] = Arbitrary(
for {
r <- arbitrary[ScopeAxis[Reference]]
c <- arbitrary[ScopeAxis[ConfigKey]]
t <- arbitrary[ScopeAxis[AttributeKey[_]]]
e <- arbitrary[ScopeAxis[AttributeMap]]
} yield Scope(r, c, t, e)
)
type Key = K forSome { type K <: Scoped.ScopingSetting[K] with Scoped }
final case class Label(value: String)
val genLabel: Gen[Label] = Gen.identifier map Label
implicit def arbLabel: Arbitrary[Label] = Arbitrary(genLabel)
def genInputKey[A: Manifest]: Gen[InputKey[A]] = genLabel map (x => InputKey[A](x.value))
def genSettingKey[A: Manifest]: Gen[SettingKey[A]] = genLabel map (x => SettingKey[A](x.value))
def genTaskKey[A: Manifest]: Gen[TaskKey[A]] = genLabel map (x => TaskKey[A](x.value))
def withScope[K <: Scoped.ScopingSetting[K]](keyGen: Gen[K]): Arbitrary[K] = Arbitrary {
Gen.frequency(
5 -> keyGen,
1 -> (for (key <- keyGen; scope <- arbitrary[Scope]) yield key in scope)
)
}
implicit def arbInputKey[A: Manifest]: Arbitrary[InputKey[A]] = withScope(genInputKey[A])
implicit def arbSettingKey[A: Manifest]: Arbitrary[SettingKey[A]] = withScope(genSettingKey[A])
implicit def arbTaskKey[A: Manifest]: Arbitrary[TaskKey[A]] = withScope(genTaskKey[A])
implicit def arbKey[A: Manifest](
implicit
arbInputKey: Arbitrary[InputKey[A]],
arbSettingKey: Arbitrary[SettingKey[A]],
arbTaskKey: Arbitrary[TaskKey[A]],
): Arbitrary[Key] = Arbitrary {
def convert[T](g: Gen[T]) = g.asInstanceOf[Gen[Key]]
Gen.frequency(
15431 -> convert(arbitrary[InputKey[A]]),
19645 -> convert(arbitrary[SettingKey[A]]),
22867 -> convert(arbitrary[TaskKey[A]]),
)
}
object WithoutScope {
implicit def arbInputKey[A: Manifest]: Arbitrary[InputKey[A]] = Arbitrary(genInputKey[A])
implicit def arbSettingKey[A: Manifest]: Arbitrary[SettingKey[A]] = Arbitrary(genSettingKey[A])
implicit def arbTaskKey[A: Manifest]: Arbitrary[TaskKey[A]] = Arbitrary(genTaskKey[A])
}
implicit def arbScoped[A: Manifest]: Arbitrary[Scoped] = Arbitrary(arbitrary[Key])
}

View File

@ -0,0 +1,145 @@
/*
* sbt
* Copyright 2011 - 2017, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under BSD-3-Clause license (see LICENSE)
*/
package sbt.test
import org.scalacheck._, Prop._, util.Pretty
import sbt.internal.util.AttributeKey
import sbt.util.NoJsonWriter
import sbt.{ InputTask, Scope, Task }
import sbt.{ InputKey, Scoped, SettingKey, TaskKey }
import BuildSettingsInstances._
object ScopedSpec extends Properties("Scoped") {
val intManifest = manifest[Int]
val stringManifest = manifest[String]
implicit val arbManifest: Arbitrary[Manifest[_]] =
Arbitrary(Gen.oneOf(intManifest, stringManifest))
property("setting keys are structurally equal") = {
forAll { (label: Label, manifest: Manifest[_], scope: Scope) =>
val k1 = settingKey(label, manifest, scope)
val k2 = settingKey(label, manifest, scope)
expectEq(k1, k2)
}
}
property("task keys are structurally equal") = {
forAll { (label: Label, manifest: Manifest[_], scope: Scope) =>
val k1 = taskKey(label, manifest, scope)
val k2 = taskKey(label, manifest, scope)
expectEq(k1, k2)
}
}
property("input keys are structurally equal") = {
forAll { (label: Label, manifest: Manifest[_], scope: Scope) =>
val k1 = inputKey(label, manifest, scope)
val k2 = inputKey(label, manifest, scope)
expectEq(k1, k2)
}
}
property("different key types are not equal") = {
forAll { (label: Label, manifest: Manifest[_], scope: Scope) =>
val settingKey1 = settingKey(label, manifest, scope)
val taskKey1 = taskKey(label, manifest, scope)
val inputKey1 = inputKey(label, manifest, scope)
all(
expectNe(settingKey1, taskKey1),
expectNe(settingKey1, inputKey1),
expectNe(taskKey1, inputKey1),
)
}
}
property("different key types, with the same manifest, are not equal") = {
forAll { (label: Label, scope: Scope) =>
val prop1 = {
val manifest1 = manifest[Task[String]]
val attrKey = attributeKey(label, manifest1)
val k1 = SettingKey(attrKey) in scope
val k2 = TaskKey(attrKey) in scope
expectNeSameManifest(k1, k2)
}
val prop2 = {
val manifest1 = manifest[InputTask[String]]
val attrKey = attributeKey(label, manifest1)
val k1 = SettingKey(attrKey) in scope
val k2 = InputKey(attrKey) in scope
expectNeSameManifest(k1, k2)
}
all(prop1, prop2)
}
}
///
def settingKey[A](label: Label, manifest: Manifest[A], scope: Scope): SettingKey[A] = {
val noJsonWriter = NoJsonWriter[A]()
SettingKey[A](label.value)(manifest, noJsonWriter) in scope
}
def taskKey[A](label: Label, manifest: Manifest[A], s: Scope): TaskKey[A] =
TaskKey[A](label.value)(manifest) in s
def inputKey[A](label: Label, manifest: Manifest[A], scope: Scope): InputKey[A] =
InputKey[A](label.value)(manifest) in scope
def attributeKey[A](label: Label, manifest: Manifest[A]): AttributeKey[A] = {
val jsonWriter = NoJsonWriter[A]()
AttributeKey[A](label.value)(manifest, jsonWriter)
}
///
def expectEq(k1: Scoped, k2: Scoped): Prop =
?=(k1, k2) && ?=(k2, k1) map eqLabels(k1, k2)
def expectNe(k1: Scoped, k2: Scoped): Prop =
!=(k1, k2) && !=(k2, k1) map eqLabels(k1, k2)
def expectNeSameManifest(k1: Scoped, k2: Scoped) = {
all(
?=(k1.key.manifest, k2.key.manifest), // sanity check the manifests are the same
expectNe(k1, k2),
)
}
def eqLabels(k1: Scoped, k2: Scoped): Prop.Result => Prop.Result = r => {
val eqLabel = k1.key.label == k2.key.label
val eqManifest = k1.key.manifest == k2.key.manifest
val eqScope = k1.scope == k2.scope
r.label(s"label equality: ${k1.key.label} == ${k2.key.label} : $eqLabel")
.label(s"manifest equality: ${k1.key.manifest} == ${k2.key.manifest} : $eqManifest")
.label(s"scope equality: ${k1.scope} == ${k2.scope} : $eqScope")
}
def ?=[T](x: T, y: T)(implicit pp: T => Pretty): Prop =
if (x == y) proved
else
falsified :| {
val act = Pretty.pretty[T](x, Pretty.Params(0))
val exp = Pretty.pretty[T](y, Pretty.Params(0))
s"Expected $act to be equal to $exp"
}
def !=[T](x: T, y: T)(implicit pp: T => Pretty): Prop =
if (x == y) falsified
else
proved :| {
val act = Pretty.pretty[T](x, Pretty.Params(0))
val exp = Pretty.pretty[T](y, Pretty.Params(0))
s"Expected $act to NOT be equal to $exp"
}
}

View File

@ -7,290 +7,104 @@
package sbt.test
import org.scalacheck.{ Test => _, _ }, Arbitrary.arbitrary, Gen._, Prop._
import org.scalacheck.{ Test => _, _ }, Prop._
import java.io.File
import sbt.io.IO
import sbt.SlashSyntax
import sbt.{ Scope, ScopeAxis, Scoped, Select, This, Zero }, Scope.{ Global, ThisScope }
import sbt.{ BuildRef, LocalProject, LocalRootProject, ProjectRef, Reference, RootProject, ThisBuild, ThisProject }
import sbt.{ Scope, ScopeAxis, Scoped }, Scope.{ Global, ThisScope }
import sbt.Reference
import sbt.ConfigKey
import sbt.librarymanagement.syntax._
import sbt.{ InputKey, SettingKey, TaskKey }
import sbt.internal.util.{ AttributeKey, AttributeMap }
import sbt.internal.util.AttributeKey
object BuildDSLInstances {
val genFile: Gen[File] = Gen.oneOf(new File("."), new File("/tmp")) // for now..
implicit val arbBuildRef: Arbitrary[BuildRef] = Arbitrary(genFile map (f => BuildRef(IO toURI f)))
implicit val arbProjectRef: Arbitrary[ProjectRef] =
Arbitrary(for (f <- genFile; id <- Gen.identifier) yield ProjectRef(f, id))
implicit val arbLocalProject: Arbitrary[LocalProject] =
Arbitrary(arbitrary[String] map LocalProject)
implicit val arbRootProject: Arbitrary[RootProject] = Arbitrary(genFile map (RootProject(_)))
implicit val arbReference: Arbitrary[Reference] = Arbitrary {
Gen.frequency(
1 -> arbitrary[BuildRef], // 96
100 -> ThisBuild, // 10,271
3 -> LocalRootProject, // 325
23 -> arbitrary[ProjectRef], // 2,283
3 -> ThisProject, // 299
4 -> arbitrary[LocalProject], // 436
11 -> arbitrary[RootProject], // 1,133
)
}
implicit def arbConfigKey: Arbitrary[ConfigKey] = Arbitrary {
Gen.frequency(
2 -> const[ConfigKey](Compile),
2 -> const[ConfigKey](Test),
1 -> const[ConfigKey](Runtime),
1 -> const[ConfigKey](IntegrationTest),
1 -> const[ConfigKey](Provided),
)
}
implicit def arbAttrKey[A: Manifest]: Arbitrary[AttributeKey[_]] =
Arbitrary(Gen.identifier map (AttributeKey[A](_)))
def withScope[K <: Scoped.ScopingSetting[K]](keyGen: Gen[K]): Arbitrary[K] =
Arbitrary(Gen.frequency(
5 -> keyGen,
1 -> (for (key <- keyGen; scope <- arbitrary[Scope]) yield key in scope)
))
def genInputKey[A: Manifest]: Gen[InputKey[A]] = Gen.identifier map (InputKey[A](_))
def genSettingKey[A: Manifest]: Gen[SettingKey[A]] = Gen.identifier map (SettingKey[A](_))
def genTaskKey[A: Manifest]: Gen[TaskKey[A]] = Gen.identifier map (TaskKey[A](_))
implicit def arbInputKey[A: Manifest]: Arbitrary[InputKey[A]] = withScope(genInputKey[A])
implicit def arbSettingKey[A: Manifest]: Arbitrary[SettingKey[A]] = withScope(genSettingKey[A])
implicit def arbTaskKey[A: Manifest]: Arbitrary[TaskKey[A]] = withScope(genTaskKey[A])
implicit def arbScoped[A: Manifest](implicit
arbInputKey: Arbitrary[InputKey[A]],
arbSettingKey: Arbitrary[SettingKey[A]],
arbTaskKey: Arbitrary[TaskKey[A]],
): Arbitrary[Scoped] = {
Arbitrary(Gen.frequency(
15 -> arbitrary[InputKey[A]], // 15,431
20 -> arbitrary[SettingKey[A]], // 19,645
23 -> arbitrary[TaskKey[A]], // 22,867
))
}
object WithoutScope {
implicit def arbInputKey[A: Manifest]: Arbitrary[InputKey[A]] = Arbitrary(genInputKey[A])
implicit def arbSettingKey[A: Manifest]: Arbitrary[SettingKey[A]] = Arbitrary(genSettingKey[A])
implicit def arbTaskKey[A: Manifest]: Arbitrary[TaskKey[A]] = Arbitrary(genTaskKey[A])
}
implicit def arbScopeAxis[A: Arbitrary]: Arbitrary[ScopeAxis[A]] =
Arbitrary(Gen.oneOf[ScopeAxis[A]](This, Zero, arbitrary[A] map (Select(_))))
implicit val arbAttributeMap: Arbitrary[AttributeMap] = Arbitrary {
Gen.frequency(
20 -> AttributeMap.empty,
1 -> (for (name <- Gen.identifier; isModule <- arbitrary[Boolean])
yield AttributeMap.empty
.put(AttributeKey[String]("name"), name)
.put(AttributeKey[Boolean]("isModule"), isModule)
)
)
}
implicit def arbScope: Arbitrary[Scope] = Arbitrary(
for {
r <- arbitrary[ScopeAxis[Reference]]
c <- arbitrary[ScopeAxis[ConfigKey]]
t <- arbitrary[ScopeAxis[AttributeKey[_]]]
e <- arbitrary[ScopeAxis[AttributeMap]]
} yield Scope(r, c, t, e)
)
}
import BuildDSLInstances._
object CustomEquality {
trait Eq[A] {
def equal(x: A, y: A): Boolean
}
// Avoid reimplementing equality for other standard classes.
trait EqualLowPriority {
implicit def universal[A] = (x: A, y: A) => x == y
}
object Eq extends EqualLowPriority {
def apply[A: Eq]: Eq[A] = implicitly
implicit def eqScoped[A <: Scoped]: Eq[A] = (x, y) => x.scope == y.scope && x.key == y.key
}
implicit class AnyWith_===[A](private val x: A) extends AnyVal {
def ===(y: A)(implicit z: Eq[A]): Boolean = z.equal(x, y)
def =?(y: A)(implicit z: Eq[A]): Prop = {
if (x === y) proved else falsified :| s"Expected $x but got $y"
}
}
def expectValue[A: Eq](expected: A)(x: A) = expected =? x
}
import CustomEquality._
import BuildSettingsInstances._
object SlashSyntaxSpec extends Properties("SlashSyntax") with SlashSyntax {
type Key[K] = Scoped.ScopingSetting[K] with Scoped
property("Global / key == key in Global") = {
def check[K <: Key[K]: Arbitrary] = forAll((k: K) => expectValue(k in Global)(Global / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll((k: Key) => expectValue(k in Global)(Global / k))
}
property("Reference / key == key in Reference") = {
def check[K <: Key[K]: Arbitrary] = forAll((r: Reference, k: K) => expectValue(k in r)(r / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll((r: Reference, k: Key) => expectValue(k in r)(r / k))
}
property("Reference / Config / key == key in Reference in Config") = {
def check[K <: Key[K]: Arbitrary] =
forAll((r: Reference, c: ConfigKey, k: K) => expectValue(k in r in c)(r / c / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll((r: Reference, c: ConfigKey, k: Key) => expectValue(k in r in c)(r / c / k))
}
property("Reference / task.key / key == key in Reference in task") = {
def check[K <: Key[K]: Arbitrary] =
forAll((r: Reference, t: Scoped, k: K) => expectValue(k in (r, t))(r / t.key / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll((r: Reference, t: Scoped, k: Key) => expectValue(k in (r, t))(r / t.key / k))
}
property("Reference / task / key ~= key in Reference in task") = {
import WithoutScope._
def check[T <: Key[T]: Arbitrary, K <: Key[K]: Arbitrary] =
forAll((r: Reference, t: T, k: K) => expectValue(k in (r, t))(r / t / k))
(true
&& check[InputKey[String], InputKey[String]]
&& check[InputKey[String], SettingKey[String]]
&& check[InputKey[String], TaskKey[String]]
&& check[SettingKey[String], InputKey[String]]
&& check[SettingKey[String], SettingKey[String]]
&& check[SettingKey[String], TaskKey[String]]
&& check[TaskKey[String], InputKey[String]]
&& check[TaskKey[String], SettingKey[String]]
&& check[TaskKey[String], TaskKey[String]]
)
forAll((r: Reference, t: Key, k: Key) => expectValue(k in (r, t))(r / t / k))
}
property("Reference / Config / task.key / key == key in Reference in Config in task") = {
def check[K <: Key[K]: Arbitrary] =
forAll((r: Reference, c: ConfigKey, t: Scoped, k: K) =>
expectValue(k in (r, c, t))(r / c / t.key / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll { (r: Reference, c: ConfigKey, t: Scoped, k: Key) =>
expectValue(k in (r, c, t))(r / c / t.key / k)
}
}
property("Reference / Config / task / key ~= key in Reference in Config in task") = {
import WithoutScope._
def check[T <: Key[T]: Arbitrary, K <: Key[K]: Arbitrary] =
forAll((r: Reference, c: ConfigKey, t: T, k: K) => expectValue(k in (r, c, t))(r / c / t / k))
(true
&& check[InputKey[String], InputKey[String]]
&& check[InputKey[String], SettingKey[String]]
&& check[InputKey[String], TaskKey[String]]
&& check[SettingKey[String], InputKey[String]]
&& check[SettingKey[String], SettingKey[String]]
&& check[SettingKey[String], TaskKey[String]]
&& check[TaskKey[String], InputKey[String]]
&& check[TaskKey[String], SettingKey[String]]
&& check[TaskKey[String], TaskKey[String]]
)
forAll { (r: Reference, c: ConfigKey, t: Key, k: Key) =>
expectValue(k in (r, c, t))(r / c / t / k)
}
}
property("Config / key == key in Config") = {
def check[K <: Key[K]: Arbitrary] =
forAll((c: ConfigKey, k: K) => expectValue(k in c)(c / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll((c: ConfigKey, k: Key) => expectValue(k in c)(c / k))
}
property("Config / task.key / key == key in Config in task") = {
def check[K <: Key[K]: Arbitrary] =
forAll((c: ConfigKey, t: Scoped, k: K) => expectValue(k in c in t)(c / t.key / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll((c: ConfigKey, t: Scoped, k: Key) => expectValue(k in c in t)(c / t.key / k))
}
property("Config / task / key ~= key in Config in task") = {
import WithoutScope._
def check[T <: Key[T]: Arbitrary, K <: Key[K]: Arbitrary] =
forAll((c: ConfigKey, t: T, k: K) => expectValue(k in c in t)(c / t / k))
(true
&& check[InputKey[String], InputKey[String]]
&& check[InputKey[String], SettingKey[String]]
&& check[InputKey[String], TaskKey[String]]
&& check[SettingKey[String], InputKey[String]]
&& check[SettingKey[String], SettingKey[String]]
&& check[SettingKey[String], TaskKey[String]]
&& check[TaskKey[String], InputKey[String]]
&& check[TaskKey[String], SettingKey[String]]
&& check[TaskKey[String], TaskKey[String]]
)
forAll((c: ConfigKey, t: Key, k: Key) => expectValue(k in c in t)(c / t / k))
}
property("task.key / key == key in task") = {
def check[K <: Key[K]: Arbitrary] =
forAll((t: Scoped, k: K) => expectValue(k in t)(t.key / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll((t: Scoped, k: Key) => expectValue(k in t)(t.key / k))
}
property("task / key ~= key in task") = {
import WithoutScope._
def check[T <: Key[T]: Arbitrary, K <: Key[K]: Arbitrary] =
forAll((t: T, k: K) => expectValue(k in t)(t / k))
(true
&& check[InputKey[String], InputKey[String]]
&& check[InputKey[String], SettingKey[String]]
&& check[InputKey[String], TaskKey[String]]
&& check[SettingKey[String], InputKey[String]]
&& check[SettingKey[String], SettingKey[String]]
&& check[SettingKey[String], TaskKey[String]]
&& check[TaskKey[String], InputKey[String]]
&& check[TaskKey[String], SettingKey[String]]
&& check[TaskKey[String], TaskKey[String]]
)
forAll((t: Key, k: Key) => expectValue(k in t)(t / k))
}
property("Scope / key == key in Scope") = {
def check[K <: Key[K]: Arbitrary] = forAll((s: Scope, k: K) => expectValue(k in s)(s / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll((s: Scope, k: Key) => expectValue(k in s)(s / k))
}
property("Reference? / key == key in ThisScope.copy(..)") = {
def check[K <: Key[K]: Arbitrary] =
forAll((r: ScopeAxis[Reference], k: K) =>
expectValue(k in ThisScope.copy(project = r))(r / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll { (r: ScopeAxis[Reference], k: Key) =>
expectValue(k in ThisScope.copy(project = r))(r / k)
}
}
property("Reference? / ConfigKey? / key == key in ThisScope.copy(..)") = {
def check[K <: Key[K]: Arbitrary] =
forAll((r: ScopeAxis[Reference], c: ScopeAxis[ConfigKey], k: K) =>
expectValue(k in ThisScope.copy(project = r, config = c))(r / c / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll(
(r: ScopeAxis[Reference], c: ScopeAxis[ConfigKey], k: Key) =>
expectValue(k in ThisScope.copy(project = r, config = c))(r / c / k)
)
}
// property("Reference? / AttributeKey? / key == key in ThisScope.copy(..)") = {
// def check[K <: Key[K]: Arbitrary] =
// forAll(
// (r: ScopeAxis[Reference], t: ScopeAxis[AttributeKey[_]], k: K) =>
// expectValue(k in ThisScope.copy(project = r, task = t))(r / t / k))
// check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
// forAll((r: ScopeAxis[Reference], t: ScopeAxis[AttributeKey[_]], k: AnyKey) =>
// expectValue(k in ThisScope.copy(project = r, task = t))(r / t / k))
// }
property("Reference? / ConfigKey? / AttributeKey? / key == key in ThisScope.copy(..)") = {
def check[K <: Key[K]: Arbitrary] =
forAll(
(r: ScopeAxis[Reference], c: ScopeAxis[ConfigKey], t: ScopeAxis[AttributeKey[_]], k: K) =>
expectValue(k in ThisScope.copy(project = r, config = c, task = t))(r / c / t / k))
check[InputKey[String]] && check[SettingKey[String]] && check[TaskKey[String]]
forAll {
(r: ScopeAxis[Reference], c: ScopeAxis[ConfigKey], t: ScopeAxis[AttributeKey[_]], k: Key) =>
expectValue(k in ThisScope.copy(project = r, config = c, task = t))(r / c / t / k)
}
}
def expectValue(expected: Scoped)(x: Scoped) = {
val equals = x.scope == expected.scope && x.key == expected.key
if (equals) proved else falsified :| s"Expected $expected but got $x"
}
}

View File

@ -10,12 +10,11 @@ package sbt.std
class TaskPosSpec {
// Dynamic tasks can have task invocations inside if branches
locally {
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = taskKey[String]("")
val bar = taskKey[String]("")
var condition = true
val baz = Def.taskDyn[String] {
val condition = true
Def.taskDyn[String] {
if (condition) foo
else bar
}
@ -23,23 +22,21 @@ class TaskPosSpec {
// Dynamic settings can have setting invocations inside if branches
locally {
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = settingKey[String]("")
val bar = settingKey[String]("")
var condition = true
val baz = Def.settingDyn[String] {
val condition = true
Def.settingDyn[String] {
if (condition) foo
else bar
}
}
locally {
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = taskKey[String]("")
var condition = true
val baz = Def.task[String] {
val condition = true
Def.task[String] {
val fooAnon = () => foo.value: @sbtUnchecked
if (condition) fooAnon()
else fooAnon()
@ -47,11 +44,10 @@ class TaskPosSpec {
}
locally {
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = taskKey[String]("")
var condition = true
val baz = Def.task[String] {
val condition = true
Def.task[String] {
val fooAnon = () => (foo.value: @sbtUnchecked) + ""
if (condition) fooAnon()
else fooAnon()
@ -59,12 +55,11 @@ class TaskPosSpec {
}
locally {
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = taskKey[String]("")
val bar = taskKey[String]("")
var condition = true
val baz = Def.task[String] {
val condition = true
Def.task[String] {
if (condition) foo.value: @sbtUnchecked
else bar.value: @sbtUnchecked
}
@ -72,11 +67,10 @@ class TaskPosSpec {
locally {
// This is fix 1 for appearance of tasks inside anons
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = taskKey[String]("")
var condition = true
val baz = Def.task[String] {
val condition = true
Def.task[String] {
val fooResult = foo.value
val anon = () => fooResult + " "
if (condition) anon()
@ -86,11 +80,10 @@ class TaskPosSpec {
locally {
// This is fix 2 for appearance of tasks inside anons
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = taskKey[String]("")
var condition = true
val baz = Def.taskDyn[String] {
val condition = true
Def.taskDyn[String] {
val anon1 = (value: String) => value + " "
if (condition) {
Def.task(anon1(foo.value))
@ -100,31 +93,27 @@ class TaskPosSpec {
locally {
// missing .value error should not happen inside task dyn
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = taskKey[String]("")
val baz = Def.taskDyn[String] {
Def.taskDyn[String] {
foo
}
}
locally {
// missing .value error should not happen inside task dyn
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = taskKey[String]("")
val avoidDCE = ""
val baz = Def.task[String] {
foo: @sbtUnchecked
Def.task[String] {
val _ = foo: @sbtUnchecked
avoidDCE
}
}
locally {
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = taskKey[String]("")
val baz = Def.task[String] {
Def.task[String] {
def inner(s: KeyedInitialize[_]) = println(s)
inner(foo)
""
@ -133,11 +122,10 @@ class TaskPosSpec {
locally {
// In theory, this should be reported, but missing .value analysis is dumb at the cost of speed
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = taskKey[String]("")
def avoidDCE = { println(""); "" }
val baz = Def.task[String] {
Def.task[String] {
val (_, _) = "" match {
case _ => (foo, 1 + 2)
}
@ -146,15 +134,14 @@ class TaskPosSpec {
}
locally {
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = taskKey[String]("")
def avoidDCE = { println(""); "" }
val baz = Def.task[String] {
def avoidDCE(x: TaskKey[String]) = x.toString
Def.task[String] {
val hehe = foo
// We do not detect `hehe` because guessing that the user did the wrong thing would require
// us to run the unused name traverser defined in Typer (and hence proxy it from context util)
avoidDCE
avoidDCE(hehe)
}
}
@ -168,11 +155,10 @@ class TaskPosSpec {
}
locally {
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = settingKey[String]("")
val condition = true
val baz = Def.task[String] {
Def.task[String] {
// settings can be evaluated in a condition
if (condition) foo.value
else "..."
@ -180,10 +166,9 @@ class TaskPosSpec {
}
locally {
import sbt._
import sbt.Def._
import sbt._, Def._
val foo = settingKey[String]("")
val baz = Def.task[Seq[String]] {
Def.task[Seq[String]] {
(1 to 10).map(_ => foo.value)
}
}

View File

@ -7,11 +7,9 @@
package sbt.std
import scala.reflect._
import scala.tools.reflect.ToolBox
object TestUtil {
import tools.reflect.ToolBox
def eval(code: String, compileOptions: String = ""): Any = {
val tb = mkToolbox(compileOptions)
tb.eval(tb.parse(code))
@ -24,9 +22,9 @@ object TestUtil {
}
lazy val toolboxClasspath: String = {
val resource = getClass.getClassLoader.getResource("toolbox.classpath")
val classpathFile = scala.io.Source.fromFile(resource.toURI)
val completeSporesCoreClasspath = classpathFile.getLines.mkString
completeSporesCoreClasspath
val mainClassesDir = buildinfo.TestBuildInfo.classDirectory
val testClassesDir = buildinfo.TestBuildInfo.test_classDirectory
val depsClasspath = buildinfo.TestBuildInfo.dependencyClasspath
mainClassesDir +: testClassesDir +: depsClasspath mkString java.io.File.pathSeparator
}
}

View File

@ -7,15 +7,19 @@
package sbt.std.neg
import scala.tools.reflect.ToolBoxError
import org.scalatest.FunSuite
import sbt.std.TaskLinterDSLFeedback
import sbt.std.TestUtil._
class TaskNegSpec extends FunSuite {
import tools.reflect.ToolBoxError
def expectError(errorSnippet: String,
compileOptions: String = "",
baseCompileOptions: String = s"-cp $toolboxClasspath")(code: String) = {
def expectError(
errorSnippet: String,
compileOptions: String = "",
baseCompileOptions: String = s"-cp $toolboxClasspath",
)(code: String) = {
val errorMessage = intercept[ToolBoxError] {
eval(code, s"$compileOptions $baseCompileOptions")
println(s"Test failed -- compilation was successful! Expected:\n$errorSnippet")

View File

@ -0,0 +1,40 @@
/**
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
package sbt
final class JavaVersion private (
val numbers: Vector[Long],
val vendor: Option[String]) extends Serializable {
def numberStr: String = numbers.mkString(".")
override def equals(o: Any): Boolean = o match {
case x: JavaVersion => (this.numbers == x.numbers) && (this.vendor == x.vendor)
case _ => false
}
override def hashCode: Int = {
37 * (37 * (37 * (17 + "sbt.JavaVersion".##) + numbers.##) + vendor.##)
}
override def toString: String = {
vendor.map(_ + "@").getOrElse("") + numberStr
}
private[this] def copy(numbers: Vector[Long] = numbers, vendor: Option[String] = vendor): JavaVersion = {
new JavaVersion(numbers, vendor)
}
def withNumbers(numbers: Vector[Long]): JavaVersion = {
copy(numbers = numbers)
}
def withVendor(vendor: Option[String]): JavaVersion = {
copy(vendor = vendor)
}
def withVendor(vendor: String): JavaVersion = {
copy(vendor = Option(vendor))
}
}
object JavaVersion {
def apply(version: String): JavaVersion = sbt.internal.CrossJava.parseJavaVersion(version)
def apply(numbers: Vector[Long], vendor: Option[String]): JavaVersion = new JavaVersion(numbers, vendor)
def apply(numbers: Vector[Long], vendor: String): JavaVersion = new JavaVersion(numbers, Option(vendor))
}

View File

@ -17,3 +17,13 @@ enum PluginTrigger {
AllRequirements
NoTrigger
}
type JavaVersion {
numbers: [Long]
vendor: String
#x def numberStr: String = numbers.mkString(".")
#xtostring vendor.map(_ + "@").getOrElse("") + numberStr
#xcompanion def apply(version: String): JavaVersion = sbt.internal.CrossJava.parseJavaVersion(version)
}

View File

@ -23,7 +23,8 @@ abstract class BackgroundJobService extends Closeable {
* then you could process.destroy() for example.
*/
def runInBackground(spawningTask: ScopedKey[_], state: State)(
start: (Logger, File) => Unit): JobHandle
start: (Logger, File) => Unit
): JobHandle
/** Same as shutown. */
def close(): Unit
@ -51,7 +52,8 @@ object BackgroundJobService {
{
val stringIdParser: Parser[Seq[String]] = Space ~> token(
NotSpace examples handles.map(_.id.toString).toSet,
description = "<job id>").+
description = "<job id>"
).+
stringIdParser.map { strings =>
strings.map(Integer.parseInt(_)).flatMap(id => handles.find(_.id == id))
}

Some files were not shown because too many files have changed in this diff Show More