From 0e6387303246c6f074a34d2b8bd44676cc5af57c Mon Sep 17 00:00:00 2001 From: Ethan Atkins Date: Fri, 23 Oct 2020 08:23:33 -0700 Subject: [PATCH 01/24] Set color by default only if log format enabled I noticed that if you run `sbt -Dsbt.log.noformat=true` there are colors printed when using the latest sbt code. When running with that property set, the expectation is there are no colors. --- .../src/main/scala/sbt/internal/util/Terminal.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/internal/util-logging/src/main/scala/sbt/internal/util/Terminal.scala b/internal/util-logging/src/main/scala/sbt/internal/util/Terminal.scala index 0c336fa2d..35ce45363 100644 --- a/internal/util-logging/src/main/scala/sbt/internal/util/Terminal.scala +++ b/internal/util-logging/src/main/scala/sbt/internal/util/Terminal.scala @@ -314,7 +314,10 @@ object Terminal { private[this] def useColorDefault: Boolean = { // This approximates that both stdin and stdio are connected, // so by default color will be turned off for pipes and redirects. - props.map(_.color).orElse(isColorEnabledProp).getOrElse((hasConsole && !isDumbTerminal) || isCI) + props + .map(_.color) + .orElse(isColorEnabledProp) + .getOrElse((hasConsole && !isDumbTerminal && logFormatEnabled.getOrElse(true)) || isCI) } private[this] lazy val isColorEnabledProp: Option[Boolean] = sys.props.get("sbt.color").orElse(sys.props.get("sbt.colour")).flatMap(parseLogOption) From 69510b126bf5a05d139aa6b129b51eeb9d565d8c Mon Sep 17 00:00:00 2001 From: Ethan Atkins Date: Sat, 24 Oct 2020 12:49:57 -0700 Subject: [PATCH 02/24] Parse network client arguments early With sbtn, the system properties are passed in as regular command arguments. We need to parse them before we call Terminal.withStreams or else system properties like -Dsbt.color=false are ignored. --- .../sbt/internal/client/NetworkClient.scala | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/main-command/src/main/scala/sbt/internal/client/NetworkClient.scala b/main-command/src/main/scala/sbt/internal/client/NetworkClient.scala index 2b76b1205..8678a401a 100644 --- a/main-command/src/main/scala/sbt/internal/client/NetworkClient.scala +++ b/main-command/src/main/scala/sbt/internal/client/NetworkClient.scala @@ -1072,7 +1072,7 @@ object NetworkClient { } def client( baseDirectory: File, - args: Array[String], + args: Arguments, inputStream: InputStream, errorStream: PrintStream, terminal: Terminal, @@ -1080,7 +1080,7 @@ object NetworkClient { ): Int = { val client = simpleClient( - NetworkClient.parseArgs(args).withBaseDirectory(baseDirectory), + args.withBaseDirectory(baseDirectory), inputStream, errorStream, useJNI, @@ -1091,6 +1091,15 @@ object NetworkClient { else 1 } catch { case _: Exception => 1 } finally client.close() } + def client( + baseDirectory: File, + args: Array[String], + inputStream: InputStream, + errorStream: PrintStream, + terminal: Terminal, + useJNI: Boolean + ): Int = client(baseDirectory, parseArgs(args), inputStream, errorStream, terminal, useJNI) + private def simpleClient( arguments: Arguments, inputStream: InputStream, @@ -1129,9 +1138,10 @@ object NetworkClient { }) Runtime.getRuntime.addShutdownHook(hook) if (Util.isNonCygwinWindows) sbt.internal.util.JLine3.forceWindowsJansi() + val parsed = parseArgs(restOfArgs) System.exit(Terminal.withStreams(false) { val term = Terminal.console - try client(base, restOfArgs, term.inputStream, System.err, term, useJNI) + try client(base, parsed, term.inputStream, System.err, term, useJNI) catch { case _: AccessDeniedException => 1 } finally { Runtime.getRuntime.removeShutdownHook(hook) hook.run() From 7eafcaf544e90e19303519859ab7b407ec0d48d3 Mon Sep 17 00:00:00 2001 From: Ethan Atkins Date: Sat, 24 Oct 2020 12:57:22 -0700 Subject: [PATCH 03/24] Strip ansi and color codes from terminal output It is possible for downstream dependencies to print or log messages containing ansi escape sequences and/or color codes. In older versions of sbt, these would be printed even if the user had disabled ansi codes or color via the sbt.log.noformat or sbt.color parameters. This commit adds a general api to EscHelpers that strips general ansi codes and color codes independently via flags. We can then use that api to ensure that all bytes written to System.out are stripped of ansi escape and color codes if the terminal properties demand this. The motivation was that JLine 3 will prepend the prompt string with \E[?2004h, which turns on bracketed paste mode (https://en.wikipedia.org/wiki/ANSI_escape_code). If the sbt shell is started with a terminal that doesn't support general ansi escape codes, such as the jEdit shell, ?2004h gets printed to the shell. To fix this, we can strip ansi codes from all output if the terminal doesn't support general ansic codes. This has the additional side effect of any ansi codes that appear in log messages or printlns that are added by non-sbt code will be stripped. It's unlikely that this is all that common. In addition to the JLine use case, I've noticed that utest prints colored output during test runs. Prior to this change, the colored output was present even when sbt was run with `-Dsbt.color=false` and after this change, the colors are correctly stripped. --- .../sbt/internal/util/ConsoleAppender.scala | 6 +- .../scala/sbt/internal/util/EscHelpers.scala | 76 +++++++++++++++---- .../main/scala/sbt/internal/util/JLine3.scala | 7 +- .../scala/sbt/internal/util/Terminal.scala | 9 ++- .../sbt/internal/util/CleanStringSpec.scala | 42 ++++++++-- 5 files changed, 113 insertions(+), 27 deletions(-) diff --git a/internal/util-logging/src/main/scala/sbt/internal/util/ConsoleAppender.scala b/internal/util-logging/src/main/scala/sbt/internal/util/ConsoleAppender.scala index c248eb484..55b0fe5d8 100644 --- a/internal/util-logging/src/main/scala/sbt/internal/util/ConsoleAppender.scala +++ b/internal/util-logging/src/main/scala/sbt/internal/util/ConsoleAppender.scala @@ -495,8 +495,10 @@ trait Appender extends AutoCloseable { // the output may have unwanted colors but it would still be legible. This should // only be relevant if the log message string itself contains ansi escape sequences // other than color codes which is very unlikely. - val toWrite = if (!ansiCodesSupported) { - if (useFormat) EscHelpers.stripMoves(msg) else EscHelpers.removeEscapeSequences(msg) + val toWrite = if (!ansiCodesSupported || !useFormat && msg.getBytes.contains(27.toByte)) { + val (bytes, len) = + EscHelpers.strip(msg.getBytes, stripAnsi = !ansiCodesSupported, stripColor = !useFormat) + new String(bytes, 0, len) } else msg out.println(toWrite) } diff --git a/internal/util-logging/src/main/scala/sbt/internal/util/EscHelpers.scala b/internal/util-logging/src/main/scala/sbt/internal/util/EscHelpers.scala index 19af0024a..ee84680c6 100644 --- a/internal/util-logging/src/main/scala/sbt/internal/util/EscHelpers.scala +++ b/internal/util-logging/src/main/scala/sbt/internal/util/EscHelpers.scala @@ -126,32 +126,76 @@ object EscHelpers { } index } - def stripMoves(s: String): String = { - val bytes = s.getBytes + + /** + * Strips ansi escape and color codes from an input string. + * + * @param bytes the input bytes + * @param stripAnsi toggles whether or not to remove general ansi escape codes + * @param stripColor toggles whether or not to remove ansi color codes + * @return a string with the escape and color codes removed depending on the input + * parameter along with the length of the output string (which may be smaller than + * the returned array) + */ + def strip(bytes: Array[Byte], stripAnsi: Boolean, stripColor: Boolean): (Array[Byte], Int) = { val res = Array.fill[Byte](bytes.length)(0) + var i = 0 var index = 0 - var lastEscapeIndex = -1 var state = 0 - def set(b: Byte) = { - res(index) = b - index += 1 - } + var limit = 0 + val digit = new ArrayBuffer[Byte] + var leftDigit = -1 + var escIndex = -1 bytes.foreach { b => - set(b) + if (index < res.length) res(index) = b + index += 1 + limit = math.max(limit, index) + if (state == 0) escIndex = -1 b match { case 27 => + escIndex = index - 1 state = esc - lastEscapeIndex = math.max(0, index) - case b if b == '[' && state == esc => state = csi - case 'm' => state = 0 - case b if state == csi && (b < 48 || b >= 58) && b != ';' => + case b if (state == esc || state == csi) && b >= 48 && b < 58 => + state = csi + digit += b + case '[' if state == esc => state = csi + case 8 => state = 0 - index = math.max(0, lastEscapeIndex - 1) - case b => + index = math.max(index - 1, 0) + case b if state == csi => + leftDigit = Try(new String(digit.toArray).toInt).getOrElse(0) + state = 0 + b.toChar match { + case 'h' | 'A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'J' | 'K' => + if (stripAnsi) index = math.max(escIndex, 0) + case 'm' => if (stripColor) index = escIndex + case ';' | 's' | 'u' | '?' => state = csi + case b => + } + digit.clear() + case b if state == esc => state = 0 + case b => } } - new String(res, 0, index) + (res, index) } + @deprecated("use EscHelpers.strip", "1.4.2") + def stripMoves(s: String): String = { + val (bytes, len) = strip(s.getBytes, stripAnsi = true, stripColor = false) + new String(bytes, 0, len) + } + + /** + * Removes the ansi escape sequences from a string and makes a best attempt at + * calculating any ansi moves by hand. For example, if the string contains + * a backspace character followed by a character, the output string would + * replace the character preceding the backspaces with the character proceding it. + * This is in contrast to `strip` which just removes all ansi codes entirely. + * + * @param s the input string + * @return a string containing the original characters of the input stream with + * the ansi escape codes removed. + */ def stripColorsAndMoves(s: String): String = { val bytes = s.getBytes val res = Array.fill[Byte](bytes.length)(0) @@ -174,6 +218,7 @@ object EscHelpers { leftDigit = Try(new String(digit.toArray).toInt).getOrElse(0) state = 0 b.toChar match { + case 'h' => index = math.max(index - 1, 0) case 'D' => index = math.max(index - leftDigit, 0) case 'C' => index = math.min(limit, math.min(index + leftDigit, res.length - 1)) case 'K' | 'J' => @@ -190,6 +235,7 @@ object EscHelpers { index += 1 limit = math.max(limit, index) } + (res, limit) new String(res, 0, limit) } diff --git a/internal/util-logging/src/main/scala/sbt/internal/util/JLine3.scala b/internal/util-logging/src/main/scala/sbt/internal/util/JLine3.scala index a80d81410..9de086fd3 100644 --- a/internal/util-logging/src/main/scala/sbt/internal/util/JLine3.scala +++ b/internal/util-logging/src/main/scala/sbt/internal/util/JLine3.scala @@ -124,7 +124,12 @@ private[sbt] object JLine3 { override val output: OutputStream = new OutputStream { override def write(b: Int): Unit = write(Array[Byte](b.toByte)) override def write(b: Array[Byte]): Unit = if (!closed.get) term.withPrintStream { ps => - ps.write(b) + val (toWrite, len) = if (b.contains(27.toByte)) { + if (!term.isAnsiSupported || !term.isColorEnabled) { + EscHelpers.strip(b, !term.isAnsiSupported, !term.isColorEnabled) + } else (b, b.length) + } else (b, b.length) + if (len == toWrite.length) ps.write(toWrite) else ps.write(toWrite, 0, len) term.prompt match { case a: Prompt.AskUser => a.write(b) case _ => diff --git a/internal/util-logging/src/main/scala/sbt/internal/util/Terminal.scala b/internal/util-logging/src/main/scala/sbt/internal/util/Terminal.scala index 6e1ddf277..7ff0c228a 100644 --- a/internal/util-logging/src/main/scala/sbt/internal/util/Terminal.scala +++ b/internal/util-logging/src/main/scala/sbt/internal/util/Terminal.scala @@ -930,7 +930,14 @@ object Terminal { } override def flush(): Unit = combinedOutputStream.flush() } - private def doWrite(bytes: Array[Byte]): Unit = withPrintStream { ps => + private def doWrite(rawBytes: Array[Byte]): Unit = withPrintStream { ps => + val (toWrite, len) = + if (rawBytes.contains(27.toByte)) { + if (!isAnsiSupported || !isColorEnabled) + EscHelpers.strip(rawBytes, stripAnsi = !isAnsiSupported, stripColor = !isColorEnabled) + else (rawBytes, rawBytes.length) + } else (rawBytes, rawBytes.length) + val bytes = if (len < toWrite.length) toWrite.take(len) else toWrite progressState.write(TerminalImpl.this, bytes, ps, hasProgress.get && !rawMode.get) } override private[sbt] val printStream: PrintStream = new LinePrintStream(outputStream) diff --git a/internal/util-logging/src/test/scala/sbt/internal/util/CleanStringSpec.scala b/internal/util-logging/src/test/scala/sbt/internal/util/CleanStringSpec.scala index 9939d668f..f734923a6 100644 --- a/internal/util-logging/src/test/scala/sbt/internal/util/CleanStringSpec.scala +++ b/internal/util-logging/src/test/scala/sbt/internal/util/CleanStringSpec.scala @@ -45,23 +45,25 @@ class CleanStringSpec extends FlatSpec { } it should "remove moves in string with only moves" in { val original = - new String(Array[Byte](27, 91, 50, 75, 27, 91, 51, 65, 27, 91, 49, 48, 48, 48, 68)) - assert(EscHelpers.stripMoves(original) == "") + Array[Byte](27, 91, 50, 75, 27, 91, 51, 65, 27, 91, 49, 48, 48, 48, 68) + val (bytes, len) = EscHelpers.strip(original, stripAnsi = true, stripColor = true) + assert(len == 0) } it should "remove moves in string with moves and letters" in { - val original = new String( + val original = Array[Byte](27, 91, 50, 75, 27, 91, 51, 65) ++ "foo".getBytes ++ Array[Byte](27, 91, 49, 48, 48, 48, 68) - ) - assert(EscHelpers.stripMoves(original) == "foo") + val (bytes, len) = EscHelpers.strip(original, stripAnsi = true, stripColor = true) + assert(new String(bytes, 0, len) == "foo") } it should "preserve colors" in { - val original = new String( + val original = Array[Byte](27, 91, 49, 48, 48, 48, 68, 27, 91, 48, 74, 102, 111, 111, 27, 91, 51, 54, 109, 62, 32, 27, 91, 48, 109) - ) // this is taken from an sbt prompt that looks like "foo> " with the > rendered blue + // this is taken from an sbt prompt that looks like "foo> " with the > rendered blue val colorArrow = new String(Array[Byte](27, 91, 51, 54, 109, 62)) - assert(EscHelpers.stripMoves(original) == "foo" + colorArrow + " " + scala.Console.RESET) + val (bytes, len) = EscHelpers.strip(original, stripAnsi = true, stripColor = false) + assert(new String(bytes, 0, len) == "foo" + colorArrow + " " + scala.Console.RESET) } it should "remove unusual escape characters" in { val original = new String( @@ -70,4 +72,28 @@ class CleanStringSpec extends FlatSpec { ) assert(EscHelpers.stripColorsAndMoves(original).isEmpty) } + it should "remove bracketed paste csi" in { + // taken from a test project prompt + val original = + Array[Byte](27, 91, 63, 50, 48, 48, 52, 104, 115, 98, 116, 58, 114, 101, 112, 114, 111, 62, + 32) + val (bytes, len) = EscHelpers.strip(original, stripAnsi = true, stripColor = false) + assert(new String(bytes, 0, len) == "sbt:repro> ") + } + it should "strip colors" in { + // taken from utest output + val original = + Array[Byte](91, 105, 110, 102, 111, 93, 32, 27, 91, 51, 50, 109, 43, 27, 91, 51, 57, 109, 32, + 99, 111, 109, 46, 97, 99, 109, 101, 46, 67, 111, 121, 111, 116, 101, 84, 101, 115, 116, 46, + 109, 97, 107, 101, 84, 114, 97, 112, 32, 27, 91, 50, 109, 57, 109, 115, 27, 91, 48, 109, 32, + 32, 27, 91, 48, 74, 10) + val (bytes, len) = EscHelpers.strip(original, stripAnsi = false, stripColor = true) + val expected = "[info] + com.acme.CoyoteTest.makeTrap 9ms " + + new String(Array[Byte](27, 91, 48, 74, 10)) + assert(new String(bytes, 0, len) == expected) + + val (bytes2, len2) = EscHelpers.strip(original, stripAnsi = true, stripColor = true) + val expected2 = "[info] + com.acme.CoyoteTest.makeTrap 9ms \n" + assert(new String(bytes2, 0, len2) == expected2) + } } From 3aeede3774b4cf6d41ef04f52e9e4e9573bf6e9e Mon Sep 17 00:00:00 2001 From: Ethan Atkins Date: Sat, 24 Oct 2020 15:37:39 -0700 Subject: [PATCH 04/24] Flush terminal output stream after readline With the thin client, when running the command `exit`, it is often the case that the log message `[info] disconnected` is printed on the same line as the prompt. This is because there is a small flush delay on the network client's output stream channel that causes the disconnected info message to be logged before the the newline that jline 3 echoes to the client has been printed. To fix this we can manually flush the terminal output stream before exiting. --- main/src/main/scala/sbt/internal/server/NetworkChannel.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/main/src/main/scala/sbt/internal/server/NetworkChannel.scala b/main/src/main/scala/sbt/internal/server/NetworkChannel.scala index 5168b55bf..7df83c092 100644 --- a/main/src/main/scala/sbt/internal/server/NetworkChannel.scala +++ b/main/src/main/scala/sbt/internal/server/NetworkChannel.scala @@ -565,6 +565,7 @@ final class NetworkChannel( logShutdown: Boolean, remainingCommands: Option[(String, String)] ): Unit = { + doFlush() terminal.close() StandardMain.exchange.removeChannel(this) super.shutdown(logShutdown) From c66f31d8a1404eb99ea7038aeffc2922a0812725 Mon Sep 17 00:00:00 2001 From: Ethan Atkins Date: Sun, 25 Oct 2020 15:07:24 -0700 Subject: [PATCH 05/24] Avoid throwing interrupted exception in JoinThread I saw a stacktrace when exiting sbtn on windows due to an interrupted exception being thrown during thread joining. We only want to throw this exception if we didn't successfully join the thread. I also noticed that we would try to join the thread forever. There was supposed to be a timelimit so that we would eventually stop blocking even if we were unable to join the thread. The limit was set but not respected. --- .../src/main/scala/sbt/internal/util/JoinThread.scala | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/main-command/src/main/scala/sbt/internal/util/JoinThread.scala b/main-command/src/main/scala/sbt/internal/util/JoinThread.scala index e2bee2f07..0f70a902e 100644 --- a/main-command/src/main/scala/sbt/internal/util/JoinThread.scala +++ b/main-command/src/main/scala/sbt/internal/util/JoinThread.scala @@ -20,11 +20,13 @@ object JoinThread { t.interrupt() t.join(10) } catch { case e: InterruptedException => exception = Some(e) } - if (t.isAlive) impl() + if (t.isAlive && !deadline.isOverdue) impl() } impl() - if (t.isAlive) System.err.println(s"Unable to join thread $t after $duration") - exception.foreach(throw _) + if (t.isAlive) { + System.err.println(s"Unable to join thread $t after $duration") + exception.foreach(throw _) + } } } } From d255481adeec51e4e5c32675df7ac5f5c6f7ad22 Mon Sep 17 00:00:00 2001 From: Ethan Atkins Date: Sun, 25 Oct 2020 19:02:49 -0700 Subject: [PATCH 06/24] Replace %20 with space in sbt script name We replace spaces in the sbt script with %20 and we need to replace the %20s with spaces. --- .../main/scala/sbt/internal/client/NetworkClient.scala | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/main-command/src/main/scala/sbt/internal/client/NetworkClient.scala b/main-command/src/main/scala/sbt/internal/client/NetworkClient.scala index 8678a401a..17d5e87fb 100644 --- a/main-command/src/main/scala/sbt/internal/client/NetworkClient.scala +++ b/main-command/src/main/scala/sbt/internal/client/NetworkClient.scala @@ -1032,10 +1032,14 @@ object NetworkClient { case a if a == noStdErr || a == noTab || a.startsWith(completions) => completionArguments += a case a if a.startsWith("--sbt-script=") => - sbtScript = a.split("--sbt-script=").lastOption.getOrElse(sbtScript) + sbtScript = a + .split("--sbt-script=") + .lastOption + .map(_.replaceAllLiterally("%20", " ")) + .getOrElse(sbtScript) case "--sbt-script" if i + 1 < sanitized.length => i += 1 - sbtScript = sanitized(i) + sbtScript = sanitized(i).replaceAllLiterally("%20", " ") case a if !a.startsWith("-") => commandArgs += a case a @ SysProp(key, value) => System.setProperty(key, value) From 2425ca4950f766df8db443fe7a9f64d74d354113 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Wed, 21 Oct 2020 15:41:23 +0200 Subject: [PATCH 07/24] handle fake positions in absoluteSourceMapper --- main/src/main/scala/sbt/Defaults.scala | 31 +++++++++++++++++--------- 1 file changed, 21 insertions(+), 10 deletions(-) diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index 591169545..97306ae5e 100644 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -21,6 +21,7 @@ import org.apache.logging.log4j.core.{ Appender => XAppender } import org.scalasbt.ipcsocket.Win32SecurityLevel import sbt.Def.{ Initialize, ScopedKey, Setting, SettingsDefinition } import sbt.Keys._ +import sbt.OptionSyntax._ import sbt.Project.{ inConfig, inScope, @@ -91,6 +92,7 @@ import xsbti.{ FileConverter, Position } import scala.collection.immutable.ListMap import scala.concurrent.duration._ +import scala.util.Try import scala.util.control.NonFatal import scala.xml.NodeSeq @@ -415,7 +417,7 @@ object Defaults extends BuildCommon { sourcePositionMappers ++= { val fc = fileConverter.value if (reportAbsolutePath.value) { - List(toAbsoluteSourceMapper(fc)) + List(toAbsoluteSourceMapper(fc) _) } else Nil }, // The virtual file value cache needs to be global or sbt will run out of direct byte buffer memory. @@ -464,13 +466,21 @@ object Defaults extends BuildCommon { }, ) - private[sbt] def toAbsoluteSourceMapper(fc: FileConverter): Position => Option[Position] = { - pos => - val newPath: Optional[String] = pos.sourcePath - .map { id => - fc.toPath(VirtualFileRef.of(id)).toAbsolutePath.toString - } - Some( + private[sbt] def toAbsoluteSourceMapper(fc: FileConverter)(pos: Position): Option[Position] = { + def isValid(path: String): Boolean = { + Try(Paths.get(path)).map(_ => true).getOrElse(false) + } + + val newPath: Option[String] = pos + .sourcePath() + .asScala + .filter(isValid) + .map { path => + fc.toPath(VirtualFileRef.of(path)).toAbsolutePath.toString + } + + newPath + .map { path => new Position { override def line(): Optional[Integer] = pos.line() @@ -482,11 +492,12 @@ object Defaults extends BuildCommon { override def pointerSpace(): Optional[String] = pos.pointerSpace() - override def sourcePath(): Optional[String] = newPath + override def sourcePath(): Optional[String] = Optional.of(path) override def sourceFile(): Optional[File] = pos.sourceFile() } - ) + } + .orElse(Some(pos)) } // csrCacheDirectory is scoped to ThisBuild to allow customization. From d51057db4f8ecc1a0c272801e2bfe8aa5fb685a5 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Thu, 22 Oct 2020 12:22:01 +0200 Subject: [PATCH 08/24] Add tests on sourcePositionMappers --- .../sbt-test/reporter/source-mapper/build.sbt | 46 +++++++++++++++++++ .../source-mapper/src/main/scala/Foo.scala | 0 sbt/src/sbt-test/reporter/source-mapper/test | 2 + 3 files changed, 48 insertions(+) create mode 100644 sbt/src/sbt-test/reporter/source-mapper/build.sbt create mode 100644 sbt/src/sbt-test/reporter/source-mapper/src/main/scala/Foo.scala create mode 100644 sbt/src/sbt-test/reporter/source-mapper/test diff --git a/sbt/src/sbt-test/reporter/source-mapper/build.sbt b/sbt/src/sbt-test/reporter/source-mapper/build.sbt new file mode 100644 index 000000000..3ca4753ed --- /dev/null +++ b/sbt/src/sbt-test/reporter/source-mapper/build.sbt @@ -0,0 +1,46 @@ +import java.util.Optional +import xsbti.Position + +val assertAbsolutePathConversion = taskKey[Unit]("checks source mappers convert to absolute path") + +val assertHandleFakePos = taskKey[Unit]("checks source mappers handle fake position") + +assertAbsolutePathConversion := { + val converter = fileConverter.value + val source = (Compile/sources).value.head + val position = newPosition(converter.toVirtualFile(source.toPath).id, source) + val mappedPos = sourcePositionMappers.value + .foldLeft(Option(position)) { + case (pos, mapper) => pos.flatMap(mapper) + } + assert { + mappedPos.get.sourcePath.asScala.contains(source.getAbsolutePath) + } +} + +assertHandleFakePos := { + val position = newPosition("", new File("")) + val mappedPos = sourcePositionMappers.value + .foldLeft(Option(position)) { + case (pos, mapper) => pos.flatMap(mapper) + } + assert { + mappedPos.get.sourcePath.asScala.get.contains("") + } +} + +def newPosition(path: String, file: File): Position = new Position { + override def line(): Optional[Integer] = Optional.empty() + + override def lineContent() = "" + + override def offset(): Optional[Integer] = Optional.empty() + + override def pointer(): Optional[Integer] = Optional.empty() + + override def pointerSpace(): Optional[String] = Optional.empty() + + override def sourcePath(): Optional[String] = Optional.of(path) + + override def sourceFile(): Optional[File] = Optional.of(file) +} \ No newline at end of file diff --git a/sbt/src/sbt-test/reporter/source-mapper/src/main/scala/Foo.scala b/sbt/src/sbt-test/reporter/source-mapper/src/main/scala/Foo.scala new file mode 100644 index 000000000..e69de29bb diff --git a/sbt/src/sbt-test/reporter/source-mapper/test b/sbt/src/sbt-test/reporter/source-mapper/test new file mode 100644 index 000000000..ced01bedc --- /dev/null +++ b/sbt/src/sbt-test/reporter/source-mapper/test @@ -0,0 +1,2 @@ +> assertAbsolutePathConversion +> assertHandleFakePos \ No newline at end of file From e4b93182baaf1cb296cbcf38817742c8691d53ff Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Thu, 22 Oct 2020 13:28:53 +0200 Subject: [PATCH 09/24] Test Source Mapper on Windows --- .appveyor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.appveyor.yml b/.appveyor.yml index c7fbdcf8b..b068a53f7 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -157,4 +157,4 @@ for: test_script: # The server tests often fail in CI when run together so just run a single test to ensure # that the thin client works on windows - - sbt "-Dsbt.io.virtual=false" "scripted actions/* classloader-cache/* nio/* watch/*" "serverTestProj/testOnly testpkg.ClientTest" + - sbt "-Dsbt.io.virtual=false" "scripted actions/* reporter/source-mapper classloader-cache/* nio/* watch/*" "serverTestProj/testOnly testpkg.ClientTest" From 66f4032699d69f6b854d43fe0de92a190ede78da Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Thu, 22 Oct 2020 11:09:00 +0200 Subject: [PATCH 10/24] Fix BuildServerReporter and add tests --- .../internal/server/BuildServerReporter.scala | 16 +++--- .../src/server-test/buildserver/build.sbt | 12 +++-- .../foo/src/test/scala/foo/FooTest.scala | 9 ---- .../src/main/scala/reporterror/Error.scala | 5 ++ .../main/scala/reportwarning/Warning.scala | 7 +++ .../src/main/scala/main/Main.scala} | 4 +- .../src/test/scala/tests}/FailingTest.scala | 2 +- .../src/test/scala/tests/PassingTest.scala | 9 ++++ .../test/scala/testpkg/BuildServerTest.scala | 49 +++++++++++++++---- 9 files changed, 80 insertions(+), 33 deletions(-) delete mode 100644 server-test/src/server-test/buildserver/foo/src/test/scala/foo/FooTest.scala create mode 100644 server-test/src/server-test/buildserver/report-error/src/main/scala/reporterror/Error.scala create mode 100644 server-test/src/server-test/buildserver/report-warning/src/main/scala/reportwarning/Warning.scala rename server-test/src/server-test/buildserver/{foo/src/main/scala/foo/FooMain.scala => run-and-test/src/main/scala/main/Main.scala} (59%) rename server-test/src/server-test/buildserver/{foo/src/test/scala/foo => run-and-test/src/test/scala/tests}/FailingTest.scala (90%) create mode 100644 server-test/src/server-test/buildserver/run-and-test/src/test/scala/tests/PassingTest.scala diff --git a/main/src/main/scala/sbt/internal/server/BuildServerReporter.scala b/main/src/main/scala/sbt/internal/server/BuildServerReporter.scala index c6c95106c..164f03832 100644 --- a/main/src/main/scala/sbt/internal/server/BuildServerReporter.scala +++ b/main/src/main/scala/sbt/internal/server/BuildServerReporter.scala @@ -7,6 +7,8 @@ package sbt.internal.server +import java.nio.file.Path + import sbt.StandardMain import sbt.internal.bsp._ import sbt.internal.util.ManagedLogger @@ -69,7 +71,7 @@ final class BuildServerReporterImpl( import sbt.internal.inc.JavaInterfaceUtil._ private lazy val exchange = StandardMain.exchange - private val problemsByFile = mutable.Map[VirtualFileRef, Vector[Diagnostic]]() + private val problemsByFile = mutable.Map[Path, Vector[Diagnostic]]() override def sendSuccessReport(analysis: CompileAnalysis): Unit = { for { @@ -90,9 +92,8 @@ final class BuildServerReporterImpl( override def sendFailureReport(sources: Array[VirtualFile]): Unit = { for (source <- sources) { - val ref = VirtualFileRef.of(source.id()) - val diagnostics = problemsByFile.getOrElse(ref, Vector()) val filePath = converter.toPath(source) + val diagnostics = problemsByFile.getOrElse(filePath, Vector()) val params = PublishDiagnosticsParams( textDocument = TextDocumentIdentifier(filePath.toUri), buildTarget, @@ -106,14 +107,13 @@ final class BuildServerReporterImpl( protected override def publishDiagnostic(problem: Problem): Unit = { for { - path <- problem.position().sourcePath.toOption - source <- problem.position.sourceFile.toOption + id <- problem.position.sourcePath.toOption diagnostic <- toDiagnostic(problem) } { - val fileId = VirtualFileRef.of(path) - problemsByFile(fileId) = problemsByFile.getOrElse(fileId, Vector()) :+ diagnostic + val filePath = converter.toPath(VirtualFileRef.of(id)) + problemsByFile(filePath) = problemsByFile.getOrElse(filePath, Vector()) :+ diagnostic val params = PublishDiagnosticsParams( - TextDocumentIdentifier(source.toURI), + TextDocumentIdentifier(filePath.toUri), buildTarget, originId = None, Vector(diagnostic), diff --git a/server-test/src/server-test/buildserver/build.sbt b/server-test/src/server-test/buildserver/build.sbt index fbbfc556b..e1a151256 100644 --- a/server-test/src/server-test/buildserver/build.sbt +++ b/server-test/src/server-test/buildserver/build.sbt @@ -2,13 +2,17 @@ ThisBuild / scalaVersion := "2.13.1" Global / serverLog / logLevel := Level.Debug -lazy val root = (project in file(".")) - .aggregate(foo, util) - -lazy val foo = project.in(file("foo")) +lazy val runAndTest = project.in(file("run-and-test")) .settings( libraryDependencies += "org.scalatest" %% "scalatest" % "3.0.8" % "test", ) .dependsOn(util) +lazy val reportError = project.in(file("report-error")) + +lazy val reportWarning = project.in(file("report-warning")) + .settings( + scalacOptions += "-deprecation" + ) + lazy val util = project diff --git a/server-test/src/server-test/buildserver/foo/src/test/scala/foo/FooTest.scala b/server-test/src/server-test/buildserver/foo/src/test/scala/foo/FooTest.scala deleted file mode 100644 index 1874da8c1..000000000 --- a/server-test/src/server-test/buildserver/foo/src/test/scala/foo/FooTest.scala +++ /dev/null @@ -1,9 +0,0 @@ -package foo - -import org.scalatest.FreeSpec - -class FooTest extends FreeSpec { - "test message" in { - assert(FooMain.message == "Hello World!") - } -} \ No newline at end of file diff --git a/server-test/src/server-test/buildserver/report-error/src/main/scala/reporterror/Error.scala b/server-test/src/server-test/buildserver/report-error/src/main/scala/reporterror/Error.scala new file mode 100644 index 000000000..496e48b8e --- /dev/null +++ b/server-test/src/server-test/buildserver/report-error/src/main/scala/reporterror/Error.scala @@ -0,0 +1,5 @@ +package reportertests + +object Error { + val version: String = 5 +} diff --git a/server-test/src/server-test/buildserver/report-warning/src/main/scala/reportwarning/Warning.scala b/server-test/src/server-test/buildserver/report-warning/src/main/scala/reportwarning/Warning.scala new file mode 100644 index 000000000..6a9554a70 --- /dev/null +++ b/server-test/src/server-test/buildserver/report-warning/src/main/scala/reportwarning/Warning.scala @@ -0,0 +1,7 @@ +package reportertests + +object Warning { + def print() { + prtinln("bar") + } +} diff --git a/server-test/src/server-test/buildserver/foo/src/main/scala/foo/FooMain.scala b/server-test/src/server-test/buildserver/run-and-test/src/main/scala/main/Main.scala similarity index 59% rename from server-test/src/server-test/buildserver/foo/src/main/scala/foo/FooMain.scala rename to server-test/src/server-test/buildserver/run-and-test/src/main/scala/main/Main.scala index 348be2497..8ccc2eb6c 100644 --- a/server-test/src/server-test/buildserver/foo/src/main/scala/foo/FooMain.scala +++ b/server-test/src/server-test/buildserver/run-and-test/src/main/scala/main/Main.scala @@ -1,6 +1,6 @@ -package foo +package main -object FooMain extends App { +object Main extends App { lazy val message = "Hello World!" println(message) diff --git a/server-test/src/server-test/buildserver/foo/src/test/scala/foo/FailingTest.scala b/server-test/src/server-test/buildserver/run-and-test/src/test/scala/tests/FailingTest.scala similarity index 90% rename from server-test/src/server-test/buildserver/foo/src/test/scala/foo/FailingTest.scala rename to server-test/src/server-test/buildserver/run-and-test/src/test/scala/tests/FailingTest.scala index 886d3d6aa..ee03bfbb4 100644 --- a/server-test/src/server-test/buildserver/foo/src/test/scala/foo/FailingTest.scala +++ b/server-test/src/server-test/buildserver/run-and-test/src/test/scala/tests/FailingTest.scala @@ -1,4 +1,4 @@ -package foo +package tests import org.scalatest.FreeSpec diff --git a/server-test/src/server-test/buildserver/run-and-test/src/test/scala/tests/PassingTest.scala b/server-test/src/server-test/buildserver/run-and-test/src/test/scala/tests/PassingTest.scala new file mode 100644 index 000000000..9dab63ca9 --- /dev/null +++ b/server-test/src/server-test/buildserver/run-and-test/src/test/scala/tests/PassingTest.scala @@ -0,0 +1,9 @@ +package tests + +import org.scalatest.FreeSpec + +class PassingTest extends FreeSpec { + "test message" in { + assert(main.Main.message == "Hello World!") + } +} \ No newline at end of file diff --git a/server-test/src/test/scala/testpkg/BuildServerTest.scala b/server-test/src/test/scala/testpkg/BuildServerTest.scala index c6d9c75ec..3f0a7acea 100644 --- a/server-test/src/test/scala/testpkg/BuildServerTest.scala +++ b/server-test/src/test/scala/testpkg/BuildServerTest.scala @@ -86,7 +86,7 @@ object BuildServerTest extends AbstractServerTest { } test("buildTarget/scalaMainClasses") { _ => - val x = s"${svr.baseDirectory.getAbsoluteFile.toURI}#foo/Compile" + val x = s"${svr.baseDirectory.getAbsoluteFile.toURI}#runAndTest/Compile" svr.sendJsonRpc( s"""{ "jsonrpc": "2.0", "id": "16", "method": "buildTarget/scalaMainClasses", "params": { | "targets": [{ "uri": "$x" }] @@ -95,17 +95,17 @@ object BuildServerTest extends AbstractServerTest { assert(svr.waitForString(30.seconds) { s => println(s) (s contains """"id":"16"""") && - (s contains """"class":"foo.FooMain"""") + (s contains """"class":"main.Main"""") }) } test("buildTarget/run") { _ => - val x = s"${svr.baseDirectory.getAbsoluteFile.toURI}#foo/Compile" + val x = s"${svr.baseDirectory.getAbsoluteFile.toURI}#runAndTest/Compile" svr.sendJsonRpc( s"""{ "jsonrpc": "2.0", "id": "17", "method": "buildTarget/run", "params": { | "target": { "uri": "$x" }, | "dataKind": "scala-main-class", - | "data": { "class": "foo.FooMain" } + | "data": { "class": "main.Main" } |} }""".stripMargin ) assert(svr.waitForString(10.seconds) { s => @@ -121,7 +121,7 @@ object BuildServerTest extends AbstractServerTest { } test("buildTarget/scalaTestClasses") { _ => - val x = s"${svr.baseDirectory.getAbsoluteFile.toURI}#foo/Test" + val x = s"${svr.baseDirectory.getAbsoluteFile.toURI}#runAndTest/Test" svr.sendJsonRpc( s"""{ "jsonrpc": "2.0", "id": "18", "method": "buildTarget/scalaTestClasses", "params": { | "targets": [{ "uri": "$x" }] @@ -130,12 +130,13 @@ object BuildServerTest extends AbstractServerTest { assert(svr.waitForString(10.seconds) { s => println(s) (s contains """"id":"18"""") && - (s contains """"classes":["foo.FailingTest","foo.FooTest"]""") + (s contains """"tests.FailingTest"""") && + (s contains """"tests.PassingTest"""") }) } test("buildTarget/test: run all tests") { _ => - val x = s"${svr.baseDirectory.getAbsoluteFile.toURI}#foo/Test" + val x = s"${svr.baseDirectory.getAbsoluteFile.toURI}#runAndTest/Test" svr.sendJsonRpc( s"""{ "jsonrpc": "2.0", "id": "19", "method": "buildTarget/test", "params": { | "targets": [{ "uri": "$x" }] @@ -149,7 +150,7 @@ object BuildServerTest extends AbstractServerTest { } test("buildTarget/test: run one test class") { _ => - val x = s"${svr.baseDirectory.getAbsoluteFile.toURI}#foo/Test" + val x = s"${svr.baseDirectory.getAbsoluteFile.toURI}#runAndTest/Test" svr.sendJsonRpc( s"""{ "jsonrpc": "2.0", "id": "20", "method": "buildTarget/test", "params": { | "targets": [{ "uri": "$x" }], @@ -158,7 +159,7 @@ object BuildServerTest extends AbstractServerTest { | "testClasses": [ | { | "target": { "uri": "$x" }, - | "classes": ["foo.FooTest"] + | "classes": ["tests.PassingTest"] | } | ] | } @@ -171,6 +172,36 @@ object BuildServerTest extends AbstractServerTest { }) } + test("buildTarget/compile: report error") { _ => + val x = s"${svr.baseDirectory.getAbsoluteFile.toURI}#reportError/Compile" + svr.sendJsonRpc( + s"""{ "jsonrpc": "2.0", "id": "21", "method": "buildTarget/compile", "params": { + | "targets": [{ "uri": "$x" }] + |} }""".stripMargin + ) + assert(svr.waitForString(10.seconds) { s => + println(s) + (s contains s""""buildTarget":{"uri":"$x"}""") && + (s contains """"severity":1""") && + (s contains """"reset":true""") + }) + } + + test("buildTarget/compile: report warning") { _ => + val x = s"${svr.baseDirectory.getAbsoluteFile.toURI}#reportWarning/Compile" + svr.sendJsonRpc( + s"""{ "jsonrpc": "2.0", "id": "22", "method": "buildTarget/compile", "params": { + | "targets": [{ "uri": "$x" }] + |} }""".stripMargin + ) + assert(svr.waitForString(10.seconds) { s => + println(s) + (s contains s""""buildTarget":{"uri":"$x"}""") && + (s contains """"severity":2""") && + (s contains """"reset":true""") + }) + } + def initializeRequest(): Unit = { svr.sendJsonRpc( """{ "jsonrpc": "2.0", "id": "10", "method": "build/initialize", From 3ca7951d13f95b44b5c744e09aec90e3d7584a1f Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Mon, 26 Oct 2020 15:42:13 +0100 Subject: [PATCH 11/24] Add serverIdleTimeout to the list of excluded lint keys --- main/src/main/scala/sbt/internal/LintUnused.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/main/src/main/scala/sbt/internal/LintUnused.scala b/main/src/main/scala/sbt/internal/LintUnused.scala index 3eb0a7398..d29993b18 100644 --- a/main/src/main/scala/sbt/internal/LintUnused.scala +++ b/main/src/main/scala/sbt/internal/LintUnused.scala @@ -38,6 +38,7 @@ object LintUnused { onUnload, sbt.nio.Keys.watchTriggers, serverConnectionType, + serverIdleTimeout, shellPrompt, ), includeLintKeys := Set( From beab10fc64bc120257f237ce59767899837d1e27 Mon Sep 17 00:00:00 2001 From: Ethan Atkins Date: Sun, 25 Oct 2020 11:53:21 -0700 Subject: [PATCH 12/24] Add wizard for installing sbtn and completions This commit adds a wizard for installing sbtn along with tab completions for bash, fish, powershell and zsh. It introduces the `installSbtn` command which installs sbtn into ~/.sbt/1.0/bin/sbtn(.exe) depending on the platform. It also can optionally install completions. The completions are installed into ~/.sbt/1.0/completions. The sbtn native executable is installed by downloading the sbt universal zip for the version (which can be provided as an input argument with a fallback to the running sbt version) and extracting the platform specific binary into ~/.sbt/1.0/bin. After installing the executable, it offers to setup the path and completions for the four shells. With the user's consent, it adds a line to the shell config that updates the path to include ~/.sbt/1.0/bin and another line to source the appropriate completion file for the shell from ~/.sbt/1.0/completions. --- main/src/main/scala/sbt/Defaults.scala | 2 + .../main/scala/sbt/internal/InstallSbtn.scala | 226 ++++++++++++++++++ .../scala/sbt/internal/TaskProgress.scala | 1 + .../scala/sbt/internal/InstallSbtnSpec.scala | 66 +++++ 4 files changed, 295 insertions(+) create mode 100644 main/src/main/scala/sbt/internal/InstallSbtn.scala create mode 100644 main/src/test/scala/sbt/internal/InstallSbtnSpec.scala diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index 46b81844c..2f4b7ebaa 100644 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -391,6 +391,8 @@ object Defaults extends BuildCommon { canonicalInput :== true, echoInput :== true, terminal := state.value.get(terminalKey).getOrElse(Terminal(ITerminal.get)), + InstallSbtn.installSbtn := InstallSbtn.installSbtnImpl.evaluated, + InstallSbtn.installSbtn / aggregate := false, ) ++ LintUnused.lintSettings ++ DefaultBackgroundJobService.backgroundJobServiceSettings ++ RemoteCache.globalSettings diff --git a/main/src/main/scala/sbt/internal/InstallSbtn.scala b/main/src/main/scala/sbt/internal/InstallSbtn.scala new file mode 100644 index 000000000..271a9a0cf --- /dev/null +++ b/main/src/main/scala/sbt/internal/InstallSbtn.scala @@ -0,0 +1,226 @@ +/* + * sbt + * Copyright 2011 - 2018, Lightbend, Inc. + * Copyright 2008 - 2010, Mark Harrah + * Licensed under Apache License 2.0 (see LICENSE) + */ + +package sbt +package internal + +import Def._ +import Keys.{ sbtVersion, state, terminal } + +import java.io.{ File, FileInputStream, FileOutputStream, InputStream, IOException } +import java.net.URL +import java.nio.file.{ Files, Path } +import java.util.zip.ZipInputStream +import sbt.io.IO +import sbt.io.Path.userHome +import sbt.io.syntax._ +import scala.util.{ Properties, Try } + +private[sbt] object InstallSbtn { + private[sbt] val installSbtn = + Def.inputKey[Unit]("install sbtn and tab completions").withRank(KeyRanks.BTask) + private[sbt] def installSbtnImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask { + val inputVersion = Def.spaceDelimited("version").parsed.headOption + val version = inputVersion.getOrElse(sbtVersion.value.replaceAllLiterally("-SNAPSHOT", "")) + val term = terminal.value + term.setMode(canonical = false, echo = false) + val baseDirectory = BuildPaths.getGlobalBase(state.value).toPath + val tmp = Files.createTempFile(s"sbt-$version", "zip") + val sbtn = if (Properties.isWin) "sbtn.exe" else "sbtn" + try extractSbtn(term, version, tmp, baseDirectory.resolve("bin").resolve(sbtn)) + finally { + Files.deleteIfExists(tmp) + () + } + val shell = if (System.console != null) getShell(term) else "none" + shell match { + case "none" => + case s => + val completion = shellCompletions(s) + val completionLocation = baseDirectory.resolve("completions").resolve(completion) + downloadCompletion(completion, version, completionLocation) + s match { + case "bash" => setupBash(baseDirectory, term) + case "fish" => setupFish(baseDirectory, term) + case "zsh" => setupZsh(baseDirectory, term) + case "powershell" => setupPowershell(baseDirectory, term) + case _ => // should be unreachable + } + val msg = s"Successfully installed sbtn for $s. You may need to restart $s for the " + + "changes to take effect." + term.printStream.println(msg) + } + () + } + + private[sbt] def extractSbtn(term: Terminal, version: String, sbtZip: Path, sbtn: Path): Unit = { + downloadRelease(term, version, sbtZip) + Files.createDirectories(sbtn.getParent) + val bin = + if (Properties.isWin) "pc-win32.exe" + else if (Properties.isLinux) "pc-linux" + else "apple-darwin" + val sbtnName = s"sbt/bin/sbtn-x86_64-$bin" + val fis = new FileInputStream(sbtZip.toFile) + val zipInputStream = new ZipInputStream(fis) + var foundBinary = false + try { + var entry = zipInputStream.getNextEntry + while (entry != null) { + if (entry.getName == sbtnName) { + foundBinary = true + term.printStream.println(s"extracting $sbtZip!$sbtnName to $sbtn") + transfer(zipInputStream, sbtn) + sbtn.toFile.setExecutable(true) + entry = null + } else { + entry = zipInputStream.getNextEntry + } + } + if (!foundBinary) throw new IllegalStateException(s"couldn't find $sbtnName in $sbtZip") + } finally { + fis.close() + zipInputStream.close() + } + () + } + private[this] def downloadRelease(term: Terminal, version: String, location: Path): Unit = { + val zip = s"https://github.com/sbt/sbt/releases/download/v$version/sbt-$version.zip" + val url = new URL(zip) + term.printStream.println(s"downloading $zip to $location") + transfer(url.openStream(), location) + } + private[this] def transfer(inputStream: InputStream, path: Path): Unit = + try { + val os = new FileOutputStream(path.toFile) + try { + val result = new Array[Byte](1024 * 1024) + var bytesRead = -1 + do { + bytesRead = inputStream.read(result) + if (bytesRead > 0) os.write(result, 0, bytesRead) + } while (bytesRead > 0) + } finally os.close() + } finally inputStream.close() + private[this] def getShell(term: Terminal): String = { + term.printStream.print(s"""Setup sbtn for shell: + | [1] bash + | [2] fish + | [3] powershell + | [4] zsh + | [5] none + |Enter option: """.stripMargin) + term.printStream.flush() + val key = term.inputStream.read + term.printStream.println(key.toChar) + key match { + case 49 => "bash" + case 50 => "fish" + case 51 => "powershell" + case 52 => "zsh" + case _ => "none" + } + } + private[this] def downloadCompletion(completion: String, version: String, target: Path): Unit = { + Files.createDirectories(target.getParent) + val comp = s"https://raw.githubusercontent.com/sbt/sbt/v$version/client/completions/$completion" + transfer(new URL(comp).openStream, target) + } + private[this] def setupShell( + shell: String, + baseDirectory: Path, + term: Terminal, + configFile: File, + setPath: Path => String, + setCompletions: Path => String, + ): Unit = { + val bin = baseDirectory.resolve("bin") + val export = setPath(bin) + val completions = baseDirectory.resolve("completions") + val sourceCompletions = setCompletions(completions) + val contents = try IO.read(configFile) + catch { case _: IOException => "" } + if (!contents.contains(export)) { + term.printStream.print(s"Add $bin to PATH in $configFile? y/n (y default): ") + term.printStream.flush() + term.inputStream.read() match { + case 110 => term.printStream.println() + case c => + term.printStream.println(c.toChar) + // put the export at the bottom so that the ~/.sbt/1.0/bin/sbtn is least preferred + // but still on the path + IO.write(configFile, s"$contents\n$export") + } + } + val newContents = try IO.read(configFile) + catch { case _: IOException => "" } + if (!newContents.contains(sourceCompletions)) { + term.printStream.print(s"Add tab completions to $configFile? y/n (y default): ") + term.printStream.flush() + term.inputStream.read() match { + case 110 => + case c => + term.printStream.println(c.toChar) + if (shell == "zsh") { + // delete the .zcompdump file because it can prevent the new completions from + // being recognized + Files.deleteIfExists((userHome / ".zcompdump").toPath) + // put the completions at the top because it is effectively just a source + // so the order in the file doesn't really matter but we want to make sure + // that we set fpath before any autoload command in zsh + IO.write(configFile, s"$sourceCompletions\n$newContents") + } else { + IO.write(configFile, s"$newContents\n$sourceCompletions") + } + } + term.printStream.println() + } + } + private[this] def setupBash(baseDirectory: Path, term: Terminal): Unit = + setupShell( + "bash", + baseDirectory, + term, + userHome / ".bashrc", + bin => s"export PATH=$$PATH:$bin", + completions => s"source $completions/sbtn.bash" + ) + private[this] def setupZsh(baseDirectory: Path, term: Terminal): Unit = { + val comp = (completions: Path) => { + "# The following two lines were added by the sbt installSbtn task:\n" + + s"fpath=($$fpath $completions)\nautoload -Uz compinit; compinit" + } + setupShell("zsh", baseDirectory, term, userHome / ".zshrc", bin => s"path=($$path $bin)", comp) + } + private[this] def setupFish(baseDirectory: Path, term: Terminal): Unit = { + val comp = (completions: Path) => s"source $completions/sbtn.fish" + val path = (bin: Path) => s"set PATH $$PATH $bin" + val config = userHome / ".config" / "fish" / "config.fish" + setupShell("fish", baseDirectory, term, config, path, comp) + } + private[this] def setupPowershell(baseDirectory: Path, term: Terminal): Unit = { + val comp = (completions: Path) => s""". "$completions\\sbtn.ps1"""" + val path = (bin: Path) => s"""$$env:Path += ";$bin"""" + import scala.sys.process._ + Try(Seq("pwsh", "-Command", "echo $PROFILE").!!).foreach { output => + output.linesIterator.toSeq.headOption.foreach { l => + setupShell("pwsh", baseDirectory, term, new File(l), path, comp) + } + } + Try(Seq("powershell", "-Command", "echo $PROFILE").!!).foreach { output => + output.linesIterator.toSeq.headOption.foreach { l => + setupShell("pwsh", baseDirectory, term, new File(l), path, comp) + } + } + } + private[this] val shellCompletions = Map( + "bash" -> "sbtn.bash", + "fish" -> "sbtn.fish", + "powershell" -> "sbtn.ps1", + "zsh" -> "_sbtn", + ) +} diff --git a/main/src/main/scala/sbt/internal/TaskProgress.scala b/main/src/main/scala/sbt/internal/TaskProgress.scala index f8308c5ef..cf9ce2e88 100644 --- a/main/src/main/scala/sbt/internal/TaskProgress.scala +++ b/main/src/main/scala/sbt/internal/TaskProgress.scala @@ -139,6 +139,7 @@ private[sbt] class TaskProgress( } private[this] val skipReportTasks = Set( + "installSbtn", "run", "runMain", "bgRun", diff --git a/main/src/test/scala/sbt/internal/InstallSbtnSpec.scala b/main/src/test/scala/sbt/internal/InstallSbtnSpec.scala new file mode 100644 index 000000000..74c78858c --- /dev/null +++ b/main/src/test/scala/sbt/internal/InstallSbtnSpec.scala @@ -0,0 +1,66 @@ +/* + * sbt + * Copyright 2011 - 2018, Lightbend, Inc. + * Copyright 2008 - 2010, Mark Harrah + * Licensed under Apache License 2.0 (see LICENSE) + */ + +package sbt +package internal + +import java.io.{ InputStream, OutputStream, PrintStream } +import java.lang.ProcessBuilder +import java.lang.ProcessBuilder.Redirect +import java.nio.file.{ Files, Path } +import java.util.concurrent.TimeUnit +import org.scalatest.FlatSpec +import sbt.io.IO + +class InstallSbtnSpec extends FlatSpec { + private def withTemp[R](ext: String)(f: Path => R): R = { + val tmp = Files.createTempFile("sbt-1.4.1-", ext) + try f(tmp) + finally { + Files.deleteIfExists(tmp) + () + } + } + private[this] val term = new Terminal { + def getHeight: Int = 0 + def getWidth: Int = 0 + def inputStream: InputStream = () => -1 + def printStream: PrintStream = new PrintStream((_ => {}): OutputStream) + def setMode(canonical: Boolean, echo: Boolean): Unit = {} + + } + "InstallSbtn" should "extract native sbtn" in + withTemp(".zip") { tmp => + withTemp(".exe") { sbtn => + InstallSbtn.extractSbtn(term, "1.4.1", tmp, sbtn) + val tmpDir = Files.createTempDirectory("sbtn-test").toRealPath() + Files.createDirectories(tmpDir.resolve("project")) + val foo = tmpDir.resolve("foo") + val fooPath = foo.toString.replaceAllLiterally("\\", "\\\\") + val build = s"""TaskKey[Unit]("foo") := IO.write(file("$fooPath"), "foo")""" + IO.write(tmpDir.resolve("build.sbt").toFile, build) + IO.write( + tmpDir.resolve("project").resolve("build.properties").toFile, + "sbt.version=1.4.1" + ) + try { + val proc = + new ProcessBuilder(sbtn.toString, "foo;shutdown") + .redirectInput(Redirect.INHERIT) + .redirectOutput(Redirect.INHERIT) + .redirectError(Redirect.INHERIT) + .directory(tmpDir.toFile) + .start() + proc.waitFor(1, TimeUnit.MINUTES) + assert(proc.exitValue == 0) + assert(IO.read(foo.toFile) == "foo") + } finally { + sbt.io.IO.delete(tmpDir.toFile) + } + } + } +} From 37e4dc5318d5dc9adf1de1efe8a874630e63791f Mon Sep 17 00:00:00 2001 From: Ethan Atkins Date: Sun, 25 Oct 2020 20:05:09 -0700 Subject: [PATCH 13/24] Disable InstallSbtnSpec This test works fine locally on all platforms but there are issues in CI. I think that it might work ok with 1.4.2 without a lot of extra effort so I'm going to disable it for now. --- main/src/test/scala/sbt/internal/InstallSbtnSpec.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/main/src/test/scala/sbt/internal/InstallSbtnSpec.scala b/main/src/test/scala/sbt/internal/InstallSbtnSpec.scala index 74c78858c..3580007ed 100644 --- a/main/src/test/scala/sbt/internal/InstallSbtnSpec.scala +++ b/main/src/test/scala/sbt/internal/InstallSbtnSpec.scala @@ -33,7 +33,8 @@ class InstallSbtnSpec extends FlatSpec { def setMode(canonical: Boolean, echo: Boolean): Unit = {} } - "InstallSbtn" should "extract native sbtn" in + // This test has issues in ci but runs ok locally on all platforms + "InstallSbtn" should "extract native sbtn" ignore withTemp(".zip") { tmp => withTemp(".exe") { sbtn => InstallSbtn.extractSbtn(term, "1.4.1", tmp, sbtn) From 0d69705e7338de25d53ef3bfdf2f0dbe9bb8575f Mon Sep 17 00:00:00 2001 From: Ethan Atkins Date: Mon, 26 Oct 2020 13:56:23 -0700 Subject: [PATCH 14/24] Refactor BspClient This is a refactoring so that the thin client can invoke the bsp client. --- .../scala/sbt/internal/client/BspClient.scala | 54 ++++++++++--------- 1 file changed, 29 insertions(+), 25 deletions(-) diff --git a/main-command/src/main/scala/sbt/internal/client/BspClient.scala b/main-command/src/main/scala/sbt/internal/client/BspClient.scala index d9568304c..4ab8d99e5 100644 --- a/main-command/src/main/scala/sbt/internal/client/BspClient.scala +++ b/main-command/src/main/scala/sbt/internal/client/BspClient.scala @@ -9,6 +9,7 @@ package sbt.internal.client import java.io.{ File, InputStream, OutputStream } import java.net.Socket +import java.util.concurrent.atomic.AtomicBoolean import sbt.Exit import sbt.io.syntax._ @@ -18,18 +19,38 @@ import scala.sys.process.Process import scala.util.control.NonFatal class BspClient private (sbtServer: Socket) { - private val lock = new AnyRef - private var terminated = false - private def transferTo(input: InputStream, output: OutputStream): Thread = { + private def run(): Exit = Exit(BspClient.bspRun(sbtServer)) +} + +object BspClient { + private[sbt] def bspRun(sbtServer: Socket): Int = { + val lock = new AnyRef + val terminated = new AtomicBoolean(false) + transferTo(terminated, lock, sbtServer.getInputStream, System.out).start() + transferTo(terminated, lock, System.in, sbtServer.getOutputStream).start() + try { + lock.synchronized { + while (!terminated.get) lock.wait() + } + 0 + } catch { case _: Throwable => 1 } finally sbtServer.close() + } + + private[sbt] def transferTo( + terminated: AtomicBoolean, + lock: AnyRef, + input: InputStream, + output: OutputStream + ): Thread = { val thread = new Thread { override def run(): Unit = { val buffer = Array.ofDim[Byte](1024) try { - while (!terminated) { + while (!terminated.get) { val size = input.read(buffer) if (size == -1) { - terminated = true + terminated.set(true) } else { output.write(buffer, 0, size) output.flush() @@ -38,10 +59,11 @@ class BspClient private (sbtServer: Socket) { input.close() output.close() } catch { - case NonFatal(_) => () + case _: InterruptedException => terminated.set(true) + case NonFatal(_) => () } finally { lock.synchronized { - terminated = true + terminated.set(true) lock.notify() } } @@ -50,24 +72,6 @@ class BspClient private (sbtServer: Socket) { thread.setDaemon(true) thread } - - private def run(): Exit = { - try { - transferTo(sbtServer.getInputStream, System.out).start() - transferTo(System.in, sbtServer.getOutputStream).start() - - lock.synchronized { - while (!terminated) lock.wait() - } - - Exit(0) - } catch { - case NonFatal(_) => Exit(1) - } - } -} - -object BspClient { def run(configuration: xsbti.AppConfiguration): Exit = { val baseDirectory = configuration.baseDirectory val portFile = baseDirectory / "project" / "target" / "active.json" From 65ab7c94d038955d48eefbf7951fb9cd399ab7ef Mon Sep 17 00:00:00 2001 From: Ethan Atkins Date: Mon, 26 Oct 2020 14:24:43 -0700 Subject: [PATCH 15/24] Support -bsp in thin client This refactors the thin client so that it can run the BspClient if invoked with -bsp. --- .../scala/sbt/internal/client/BspClient.scala | 1 - .../sbt/internal/client/NetworkClient.scala | 183 ++++++++++-------- 2 files changed, 100 insertions(+), 84 deletions(-) diff --git a/main-command/src/main/scala/sbt/internal/client/BspClient.scala b/main-command/src/main/scala/sbt/internal/client/BspClient.scala index 4ab8d99e5..613e5de96 100644 --- a/main-command/src/main/scala/sbt/internal/client/BspClient.scala +++ b/main-command/src/main/scala/sbt/internal/client/BspClient.scala @@ -19,7 +19,6 @@ import scala.sys.process.Process import scala.util.control.NonFatal class BspClient private (sbtServer: Socket) { - private def run(): Exit = Exit(BspClient.bspRun(sbtServer)) } diff --git a/main-command/src/main/scala/sbt/internal/client/NetworkClient.scala b/main-command/src/main/scala/sbt/internal/client/NetworkClient.scala index 17d5e87fb..5774f9e3a 100644 --- a/main-command/src/main/scala/sbt/internal/client/NetworkClient.scala +++ b/main-command/src/main/scala/sbt/internal/client/NetworkClient.scala @@ -164,8 +164,10 @@ class NetworkClient( case _ => } - // Open server connection based on the portfile - def init(promptCompleteUsers: Boolean, retry: Boolean): ServerConnection = + private[sbt] def connectOrStartServerAndConnect( + promptCompleteUsers: Boolean, + retry: Boolean + ): (Socket, Option[String]) = try { if (!portfile.exists) { if (promptCompleteUsers) { @@ -208,88 +210,94 @@ class NetworkClient( connect(attempt + 1) } } - val (sk, tkn) = connect(0) - val conn = new ServerConnection(sk) { - override def onNotification(msg: JsonRpcNotificationMessage): Unit = { - msg.method match { - case `Shutdown` => - val (log, rebootCommands) = msg.params match { - case Some(jvalue) => - Converter - .fromJson[(Boolean, Option[(String, String)])](jvalue) - .getOrElse((true, None)) - case _ => (false, None) - } - if (rebootCommands.nonEmpty) { - rebooting.set(true) - attached.set(false) - connectionHolder.getAndSet(null) match { - case null => - case c => c.shutdown() - } - waitForServer(portfile, true, false) - init(promptCompleteUsers = false, retry = false) - attachUUID.set(sendJson(attach, s"""{"interactive": ${!batchMode.get}}""")) - rebooting.set(false) - rebootCommands match { - case Some((execId, cmd)) if execId.nonEmpty => - if (batchMode.get && !pendingResults.containsKey(execId) && cmd.nonEmpty) { - console.appendLog( - Level.Error, - s"received request to re-run unknown command '$cmd' after reboot" - ) - } else if (cmd.nonEmpty) { - if (batchMode.get) sendCommand(ExecCommand(cmd, execId)) - else - inLock.synchronized { - val toSend = cmd.getBytes :+ '\r'.toByte - toSend.foreach(b => sendNotification(systemIn, b.toString)) - } - } else completeExec(execId, 0) - case _ => - } - } else { - if (!rebooting.get() && running.compareAndSet(true, false) && log) { - if (!arguments.commandArguments.contains(Shutdown)) { - console.appendLog(Level.Error, "sbt server disconnected") - exitClean.set(false) - } - } else { - console.appendLog(Level.Info, s"${if (log) "sbt server " else ""}disconnected") - } - stdinBytes.offer(-1) - Option(inputThread.get).foreach(_.close()) - Option(interactiveThread.get).foreach(_.interrupt) - } - case `readSystemIn` => startInputThread() - case `cancelReadSystemIn` => - inputThread.get match { - case null => - case t => t.close() - } - case _ => self.onNotification(msg) - } - } - override def onRequest(msg: JsonRpcRequestMessage): Unit = self.onRequest(msg) - override def onResponse(msg: JsonRpcResponseMessage): Unit = self.onResponse(msg) - override def onShutdown(): Unit = if (!rebooting.get) { - if (exitClean.get != false) exitClean.set(!running.get) - running.set(false) - Option(interactiveThread.get).foreach(_.interrupt()) - } - } - // initiate handshake - val execId = UUID.randomUUID.toString - val initCommand = InitCommand(tkn, Option(execId), Some(true)) - conn.sendString(Serialization.serializeCommandAsJsonMessage(initCommand)) - connectionHolder.set(conn) - conn + connect(0) } catch { case e: ConnectionRefusedException if retry => - if (Files.deleteIfExists(portfile.toPath)) init(promptCompleteUsers, retry = false) + if (Files.deleteIfExists(portfile.toPath)) + connectOrStartServerAndConnect(promptCompleteUsers, retry = false) else throw e } + // Open server connection based on the portfile + def init(promptCompleteUsers: Boolean, retry: Boolean): ServerConnection = { + val (sk, tkn) = connectOrStartServerAndConnect(promptCompleteUsers, retry) + val conn = new ServerConnection(sk) { + override def onNotification(msg: JsonRpcNotificationMessage): Unit = { + msg.method match { + case `Shutdown` => + val (log, rebootCommands) = msg.params match { + case Some(jvalue) => + Converter + .fromJson[(Boolean, Option[(String, String)])](jvalue) + .getOrElse((true, None)) + case _ => (false, None) + } + if (rebootCommands.nonEmpty) { + rebooting.set(true) + attached.set(false) + connectionHolder.getAndSet(null) match { + case null => + case c => c.shutdown() + } + waitForServer(portfile, true, false) + init(promptCompleteUsers = false, retry = false) + attachUUID.set(sendJson(attach, s"""{"interactive": ${!batchMode.get}}""")) + rebooting.set(false) + rebootCommands match { + case Some((execId, cmd)) if execId.nonEmpty => + if (batchMode.get && !pendingResults.containsKey(execId) && cmd.nonEmpty) { + console.appendLog( + Level.Error, + s"received request to re-run unknown command '$cmd' after reboot" + ) + } else if (cmd.nonEmpty) { + if (batchMode.get) sendCommand(ExecCommand(cmd, execId)) + else + inLock.synchronized { + val toSend = cmd.getBytes :+ '\r'.toByte + toSend.foreach(b => sendNotification(systemIn, b.toString)) + } + } else completeExec(execId, 0) + case _ => + } + } else { + if (!rebooting.get() && running.compareAndSet(true, false) && log) { + if (!arguments.commandArguments.contains(Shutdown)) { + console.appendLog(Level.Error, "sbt server disconnected") + exitClean.set(false) + } + } else { + console.appendLog(Level.Info, s"${if (log) "sbt server " else ""}disconnected") + } + stdinBytes.offer(-1) + Option(inputThread.get).foreach(_.close()) + Option(interactiveThread.get).foreach(_.interrupt) + } + case `readSystemIn` => startInputThread() + case `cancelReadSystemIn` => + inputThread.get match { + case null => + case t => t.close() + } + case _ => self.onNotification(msg) + } + } + override def onRequest(msg: JsonRpcRequestMessage): Unit = self.onRequest(msg) + override def onResponse(msg: JsonRpcResponseMessage): Unit = self.onResponse(msg) + override def onShutdown(): Unit = if (!rebooting.get) { + if (exitClean.get != false) exitClean.set(!running.get) + running.set(false) + Option(interactiveThread.get).foreach(_.interrupt()) + } + } + // initiate handshake + val execId = UUID.randomUUID.toString + val initCommand = InitCommand(tkn, Option(execId), Some(true)) + conn.sendString(Serialization.serializeCommandAsJsonMessage(initCommand)) + connectionHolder.set(conn) + conn + } + /** * Forks another instance of sbt in the background. * This instance must be shutdown explicitly via `sbt -client shutdown` @@ -1006,9 +1014,10 @@ object NetworkClient { val commandArguments: Seq[String], val completionArguments: Seq[String], val sbtScript: String, + val bsp: Boolean, ) { def withBaseDirectory(file: File): Arguments = - new Arguments(file, sbtArguments, commandArguments, completionArguments, sbtScript) + new Arguments(file, sbtArguments, commandArguments, completionArguments, sbtScript, bsp) } private[client] val completions = "--completions" private[client] val noTab = "--no-tab" @@ -1016,6 +1025,7 @@ object NetworkClient { private[client] val sbtBase = "--sbt-base-directory" private[client] def parseArgs(args: Array[String]): Arguments = { var sbtScript = if (Properties.isWin) "sbt.bat" else "sbt" + var bsp = false val commandArgs = new mutable.ArrayBuffer[String] val sbtArguments = new mutable.ArrayBuffer[String] val completionArguments = new mutable.ArrayBuffer[String] @@ -1037,6 +1047,7 @@ object NetworkClient { .lastOption .map(_.replaceAllLiterally("%20", " ")) .getOrElse(sbtScript) + case "-bsp" | "--bsp" => bsp = true case "--sbt-script" if i + 1 < sanitized.length => i += 1 sbtScript = sanitized(i).replaceAllLiterally("%20", " ") @@ -1050,7 +1061,7 @@ object NetworkClient { } val base = new File("").getCanonicalFile if (!sbtArguments.contains("-Dsbt.io.virtual=true")) sbtArguments += "-Dsbt.io.virtual=true" - new Arguments(base, sbtArguments, commandArgs, completionArguments, sbtScript) + new Arguments(base, sbtArguments, commandArgs, completionArguments, sbtScript, bsp) } def client( @@ -1091,8 +1102,14 @@ object NetworkClient { terminal ) try { - if (client.connect(log = true, promptCompleteUsers = false)) client.run() - else 1 + if (args.bsp) { + val (socket, _) = + client.connectOrStartServerAndConnect(promptCompleteUsers = false, retry = true) + BspClient.bspRun(socket) + } else { + if (client.connect(log = true, promptCompleteUsers = false)) client.run() + else 1 + } } catch { case _: Exception => 1 } finally client.close() } def client( From 98ec0fd8d7069ae4545d945ecc6dbd649d36a1c8 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Mon, 26 Oct 2020 00:08:18 -0400 Subject: [PATCH 16/24] 1.4.2-SNAPSHOT --- .travis.yml | 2 +- build.sbt | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index e7247c4c0..88c2bed03 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,7 +23,7 @@ matrix: include: - env: - SBT_LOCAL=true - - SBT_VERSION_PROP=-Dsbt.version=1.4.1-SNAPSHOT + - SBT_VERSION_PROP=-Dsbt.version=1.4.2-SNAPSHOT - TRAVIS_JDK=adopt@1.8.0-222 - SBT_CMD="++$SCALA_213; $UTIL_TESTS; ++$SCALA_212; $UTIL_TESTS; scripted actions/* source-dependencies/*1of3 dependency-management/*1of4 java/*" diff --git a/build.sbt b/build.sbt index 9a3265239..fe70d1d5d 100644 --- a/build.sbt +++ b/build.sbt @@ -8,7 +8,8 @@ import java.nio.file.{ Files, Path => JPath } import scala.util.Try ThisBuild / version := { - val v = "1.4.1-SNAPSHOT" + // update .travis.yml too for dog fooding + val v = "1.4.2-SNAPSHOT" nightlyVersion.getOrElse(v) } ThisBuild / versionScheme := Some("early-semver") From 86e793cd743c42ef46937f36eac16f0ad2d94140 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Mon, 26 Oct 2020 00:11:23 -0400 Subject: [PATCH 17/24] Make remoteCacheId content-based Fixes https://github.com/sbt/sbt/issues/5842 --- main/src/main/scala/sbt/Defaults.scala | 8 ++++ .../main/scala/sbt/internal/RemoteCache.scala | 38 +++++++++++++++++-- 2 files changed, 43 insertions(+), 3 deletions(-) diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index 00dec4d37..be2fe3bb2 100644 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -2519,6 +2519,14 @@ object Classpaths { excludeFilter in unmanagedJars value ) ).map(exportClasspath) ++ Seq( + externalDependencyClasspath / outputFileStamps := { + val stamper = timeWrappedStamper.value + val converter = fileConverter.value + externalDependencyClasspath.value flatMap { file0 => + val p = file0.data.toPath + FileStamp(stamper.library(converter.toVirtualFile(p))).map(p -> _) + } + }, dependencyClasspathFiles := data(dependencyClasspath.value).map(_.toPath), dependencyClasspathFiles / outputFileStamps := { val stamper = timeWrappedStamper.value diff --git a/main/src/main/scala/sbt/internal/RemoteCache.scala b/main/src/main/scala/sbt/internal/RemoteCache.scala index a6f24cb81..78977ff61 100644 --- a/main/src/main/scala/sbt/internal/RemoteCache.scala +++ b/main/src/main/scala/sbt/internal/RemoteCache.scala @@ -9,6 +9,7 @@ package sbt package internal import java.io.File +import java.nio.file.Path import Keys._ import SlashSyntax0._ import Project._ // for tag and inTask() @@ -17,11 +18,14 @@ import sbt.coursierint.LMCoursier import sbt.librarymanagement._ import sbt.librarymanagement.ivy.Credentials import sbt.librarymanagement.syntax._ +import sbt.nio.FileStamp +import sbt.nio.Keys.{ inputFileStamps, outputFileStamps } import sbt.internal.librarymanagement._ import sbt.io.IO import sbt.io.syntax._ import sbt.internal.remotecache._ -import sbt.internal.inc.JarUtils +import sbt.internal.inc.{ HashUtil, JarUtils } +import sbt.util.InterfaceUtil.toOption import sbt.util.Logger object RemoteCache { @@ -41,12 +45,30 @@ object RemoteCache { .map(_.take(commitLength)) lazy val globalSettings: Seq[Def.Setting[_]] = Seq( - remoteCacheId := gitCommitId, - remoteCacheIdCandidates := gitCommitIds(5), + remoteCacheId := "", + remoteCacheIdCandidates := Nil, pushRemoteCacheTo :== None ) lazy val projectSettings: Seq[Def.Setting[_]] = (Seq( + remoteCacheId := { + val compileExtraInc = (Compile / extraIncOptions).value + val compileInputs = (Compile / unmanagedSources / inputFileStamps).value + val compileCp = (Compile / externalDependencyClasspath / outputFileStamps).value + val testExtraInc = (Compile / extraIncOptions).value + val testInputs = (Test / unmanagedSources / inputFileStamps).value + val testCp = (Test / externalDependencyClasspath / outputFileStamps).value + val extraInc = (compileExtraInc.toVector ++ testExtraInc.toVector) flatMap { + case (k, v) => + Vector(k, v) + } + combineHash( + extractHash(compileInputs) ++ extractHash(compileCp) ++ extractHash(testInputs) ++ extractHash( + testCp + ) ++ extraInc + ) + }, + remoteCacheIdCandidates := List(remoteCacheId.value), remoteCacheProjectId := { val o = organization.value val m = moduleName.value @@ -332,4 +354,14 @@ object RemoteCache { Classpaths.forallIn(pushRemoteCacheArtifact, pkgTasks))(_ zip _ collect { case (a, true) => a }) + + private def extractHash(inputs: Seq[(Path, FileStamp)]): Vector[String] = + inputs.toVector map { + case (_, stamp0) => toOption(stamp0.stamp.getHash).getOrElse("cafe") + } + + private def combineHash(vs: Vector[String]): String = { + val hashValue = HashUtil.farmHash(vs.sorted.mkString("").getBytes("UTF-8")) + java.lang.Long.toHexString(hashValue) + } } From 6a356c61e06731fa43037dcf477d8a82bf193ae9 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Thu, 29 Oct 2020 09:47:27 -0400 Subject: [PATCH 18/24] Refactor remoteCacheId --- .../main/scala/sbt/internal/RemoteCache.scala | 35 ++++++++++--------- 1 file changed, 18 insertions(+), 17 deletions(-) diff --git a/main/src/main/scala/sbt/internal/RemoteCache.scala b/main/src/main/scala/sbt/internal/RemoteCache.scala index 78977ff61..cea7abecd 100644 --- a/main/src/main/scala/sbt/internal/RemoteCache.scala +++ b/main/src/main/scala/sbt/internal/RemoteCache.scala @@ -12,6 +12,7 @@ import java.io.File import java.nio.file.Path import Keys._ import SlashSyntax0._ +import ScopeFilter.Make._ import Project._ // for tag and inTask() import std.TaskExtra._ // for join import sbt.coursierint.LMCoursier @@ -51,23 +52,14 @@ object RemoteCache { ) lazy val projectSettings: Seq[Def.Setting[_]] = (Seq( - remoteCacheId := { - val compileExtraInc = (Compile / extraIncOptions).value - val compileInputs = (Compile / unmanagedSources / inputFileStamps).value - val compileCp = (Compile / externalDependencyClasspath / outputFileStamps).value - val testExtraInc = (Compile / extraIncOptions).value - val testInputs = (Test / unmanagedSources / inputFileStamps).value - val testCp = (Test / externalDependencyClasspath / outputFileStamps).value - val extraInc = (compileExtraInc.toVector ++ testExtraInc.toVector) flatMap { - case (k, v) => - Vector(k, v) + remoteCacheId := (Def.taskDyn { + val filter = + ScopeFilter(configurations = inConfigurations(Compile, Test), tasks = inTasks(packageCache)) + Def.task { + val allHashes = remoteCacheId.all(filter).value + combineHash(allHashes.toVector) } - combineHash( - extractHash(compileInputs) ++ extractHash(compileCp) ++ extractHash(testInputs) ++ extractHash( - testCp - ) ++ extraInc - ) - }, + }).value, remoteCacheIdCandidates := List(remoteCacheId.value), remoteCacheProjectId := { val o = organization.value @@ -206,11 +198,20 @@ object RemoteCache { ) ++ inConfig(Compile)(packageCacheSettings(compileArtifact(Compile, cachedCompileClassifier))) ++ inConfig(Test)(packageCacheSettings(testArtifact(Test, cachedTestClassifier)))) - private def packageCacheSettings[A <: RemoteCacheArtifact]( + def packageCacheSettings[A <: RemoteCacheArtifact]( cacheArtifact: Def.Initialize[Task[A]] ): Seq[Def.Setting[_]] = inTask(packageCache)( Seq( + remoteCacheId := { + val inputs = (unmanagedSources / inputFileStamps).value + val cp = (externalDependencyClasspath / outputFileStamps).value + val extraInc = (extraIncOptions.value) flatMap { + case (k, v) => + Vector(k, v) + } + combineHash(extractHash(inputs) ++ extractHash(cp) ++ extraInc) + }, packageCache.in(Defaults.TaskZero) := { val original = packageBin.in(Defaults.TaskZero).value val artp = artifactPath.value From 512494d11cab62785d35c990230315dcf78df55d Mon Sep 17 00:00:00 2001 From: kenji yoshida <6b656e6a69@gmail.com> Date: Thu, 29 Oct 2020 23:54:09 +0900 Subject: [PATCH 19/24] Update travis-ci badge. s/.org/.com/ https://github.com/sbt/sbt/issues/6031 --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 2f9ef8a6e..4224db130 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -[![Build Status](https://travis-ci.org/sbt/sbt.svg?branch=develop)](https://travis-ci.org/sbt/sbt) +[![Build Status](https://travis-ci.com/sbt/sbt.svg?branch=develop)](https://travis-ci.com/github/sbt/sbt) [![Latest version](https://img.shields.io/github/tag/sbt/sbt.svg)](https://index.scala-lang.org/sbt/sbt) [![Gitter Chat](https://badges.gitter.im/sbt/sbt.svg)](https://gitter.im/sbt/sbt) From 078280ac47cad409150aa9fe95d692d8e3f565a1 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Sat, 31 Oct 2020 07:26:33 -0400 Subject: [PATCH 20/24] Make sure we test the caching effect --- .../sbt-test/actions/remote-cache/build.sbt | 44 ++++++++++++++----- .../remote-cache/project/CompileState.scala | 4 ++ sbt/src/sbt-test/actions/remote-cache/test | 27 ++++++------ 3 files changed, 50 insertions(+), 25 deletions(-) create mode 100644 sbt/src/sbt-test/actions/remote-cache/project/CompileState.scala diff --git a/sbt/src/sbt-test/actions/remote-cache/build.sbt b/sbt/src/sbt-test/actions/remote-cache/build.sbt index e14afca15..8682caad0 100644 --- a/sbt/src/sbt-test/actions/remote-cache/build.sbt +++ b/sbt/src/sbt-test/actions/remote-cache/build.sbt @@ -1,21 +1,27 @@ import sbt.internal.remotecache.CustomRemoteCacheArtifact +import sbt.internal.inc.Analysis +import complete.DefaultParsers._ + +lazy val CustomArtifact = config("custom-artifact") + +// Reset compiler iterations, necessary because tests run in batch mode +val recordPreviousIterations = taskKey[Unit]("Record previous iterations.") +val checkIterations = inputKey[Unit]("Verifies the accumulated number of iterations of incremental compilation.") + +ThisBuild / scalaVersion := "2.12.12" +ThisBuild / pushRemoteCacheTo := Some( + MavenCache("local-cache", (ThisBuild / baseDirectory).value / "remote-cache") +) lazy val root = (project in file(".")) .configs(CustomArtifact) .settings( name := "my-project", - scalaVersion := "2.12.12", - pushRemoteCacheTo := Some( - MavenCache("local-cache", (ThisBuild / baseDirectory).value / "remote-cache") - ), - remoteCacheId := "fixed-id", - remoteCacheIdCandidates := Seq("fixed-id"), pushRemoteCacheConfiguration := pushRemoteCacheConfiguration.value.withOverwrite(true), pushRemoteCacheConfiguration / remoteCacheArtifacts += { val art = (CustomArtifact / artifact).value val packaged = CustomArtifact / packageCache val extractDirectory = (CustomArtifact / sourceManaged).value - CustomRemoteCacheArtifact(art, packaged, extractDirectory, preserveLastModified = false) }, Compile / sourceGenerators += Def.task { @@ -23,11 +29,27 @@ lazy val root = (project in file(".")) val output = extractDirectory / "HelloWorld.scala" IO.write(output, "class HelloWorld") Seq(output) - }.taskValue + }.taskValue, + customArtifactSettings, + // test tasks + recordPreviousIterations := { + val log = streams.value.log + CompileState.previousIterations = { + val previousAnalysis = (previousCompile in Compile).value.analysis.asScala + previousAnalysis match { + case None => + log.info("No previous analysis detected") + 0 + case Some(a: Analysis) => a.compilations.allCompilations.size + } + } + }, + checkIterations := { + val expected: Int = (Space ~> NatBasic).parsed + val actual: Int = ((compile in Compile).value match { case a: Analysis => a.compilations.allCompilations.size }) - CompileState.previousIterations + assert(expected == actual, s"Expected $expected compilations, got $actual") + } ) - .settings(customArtifactSettings) - -lazy val CustomArtifact = config("custom-artifact") def customArtifactSettings: Seq[Def.Setting[_]] = { val classifier = "custom-artifact" diff --git a/sbt/src/sbt-test/actions/remote-cache/project/CompileState.scala b/sbt/src/sbt-test/actions/remote-cache/project/CompileState.scala new file mode 100644 index 000000000..078db9c7b --- /dev/null +++ b/sbt/src/sbt-test/actions/remote-cache/project/CompileState.scala @@ -0,0 +1,4 @@ +// This is necessary because tests are run in batch mode +object CompileState { + @volatile var previousIterations: Int = -1 +} diff --git a/sbt/src/sbt-test/actions/remote-cache/test b/sbt/src/sbt-test/actions/remote-cache/test index e90fade3e..ff1289d45 100644 --- a/sbt/src/sbt-test/actions/remote-cache/test +++ b/sbt/src/sbt-test/actions/remote-cache/test @@ -1,3 +1,4 @@ +> recordPreviousIterations > compile > pushRemoteCache @@ -11,25 +12,20 @@ $ exists target/scala-2.12/test-classes/MyTest.class $ exists target/scala-2.12/test-classes/MyTest$.class $ exists target/scala-2.12/test-zinc/inc_compile_2.12.zip -# Pom file -$ exists remote-cache/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id.pom -$ exists remote-cache/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id.pom.md5 -$ exists remote-cache/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id.pom.sha1 - # Compile -$ exists remote-cache/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id-cached-compile.jar -$ exists remote-cache/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id-cached-compile.jar.md5 -$ exists remote-cache/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id-cached-compile.jar.sha1 +$ exists remote-cache/my-project/my-project_2.12/0.0.0-b63e5c6efa2d0fd2/my-project_2.12-0.0.0-b63e5c6efa2d0fd2-cached-compile.jar +$ exists remote-cache/my-project/my-project_2.12/0.0.0-b63e5c6efa2d0fd2/my-project_2.12-0.0.0-b63e5c6efa2d0fd2-cached-compile.jar.md5 +$ exists remote-cache/my-project/my-project_2.12/0.0.0-b63e5c6efa2d0fd2/my-project_2.12-0.0.0-b63e5c6efa2d0fd2-cached-compile.jar.sha1 # Test -$ exists remote-cache/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id-cached-test.jar -$ exists remote-cache/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id-cached-test.jar.md5 -$ exists remote-cache/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id-cached-test.jar.sha1 +$ exists remote-cache/my-project/my-project_2.12/0.0.0-b63e5c6efa2d0fd2/my-project_2.12-0.0.0-b63e5c6efa2d0fd2-cached-test.jar +$ exists remote-cache/my-project/my-project_2.12/0.0.0-b63e5c6efa2d0fd2/my-project_2.12-0.0.0-b63e5c6efa2d0fd2-cached-test.jar.md5 +$ exists remote-cache/my-project/my-project_2.12/0.0.0-b63e5c6efa2d0fd2/my-project_2.12-0.0.0-b63e5c6efa2d0fd2-cached-test.jar.sha1 # Custom artifact -$ exists remote-cache/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id-custom-artifact.jar -$ exists remote-cache/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id-custom-artifact.jar.md5 -$ exists remote-cache/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id-custom-artifact.jar.sha1 +$ exists remote-cache/my-project/my-project_2.12/0.0.0-b63e5c6efa2d0fd2/my-project_2.12-0.0.0-b63e5c6efa2d0fd2-custom-artifact.jar +$ exists remote-cache/my-project/my-project_2.12/0.0.0-b63e5c6efa2d0fd2/my-project_2.12-0.0.0-b63e5c6efa2d0fd2-custom-artifact.jar.md5 +$ exists remote-cache/my-project/my-project_2.12/0.0.0-b63e5c6efa2d0fd2/my-project_2.12-0.0.0-b63e5c6efa2d0fd2-custom-artifact.jar.sha1 > clean @@ -54,5 +50,8 @@ $ exists target/scala-2.12/test-classes/MyTest.class $ exists target/scala-2.12/test-classes/MyTest$.class $ exists target/scala-2.12/test-zinc/inc_compile_2.12.zip +> debug +> checkIterations 1 + # Artifacts can be pushed twice (enabled overriding) > pushRemoteCache \ No newline at end of file From 744cfefa6c118a309d44b3b2ffd638c89083fd36 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Sun, 1 Nov 2020 15:08:46 -0500 Subject: [PATCH 21/24] Refactor remote cache to per-config Refactor remote caching to be scoped to configuration. In addition, this avoid the use of dependency resolver (since I'm not resolving anything) and directly invoke the Ivy resolver for the artifact, somewhat analogus to publishing process. This should speed up the `pullRemoteCache` since it avoids the POM download as well. For sbt-binrary-remote-cache this created a bit of complication since the (publishing) resolver doesn't act correctly as (downloading) resolver in terms of the credentials, so I had to create a new key `remoteCacheResolvers` to have asymmetric resolver. --- build.sbt | 2 +- main/src/main/scala/sbt/Keys.scala | 1 + .../main/scala/sbt/internal/RemoteCache.scala | 320 +++++++++++------- .../actions/remote-cache-semanticdb/build.sbt | 11 +- .../actions/remote-cache-semanticdb/test | 5 - .../sbt-test/actions/remote-cache/build.sbt | 25 +- sbt/src/sbt-test/actions/remote-cache/test | 18 +- 7 files changed, 224 insertions(+), 158 deletions(-) diff --git a/build.sbt b/build.sbt index fe70d1d5d..0f24a0767 100644 --- a/build.sbt +++ b/build.sbt @@ -16,7 +16,7 @@ ThisBuild / versionScheme := Some("early-semver") ThisBuild / scalafmtOnCompile := !(Global / insideCI).value ThisBuild / Test / scalafmtOnCompile := !(Global / insideCI).value ThisBuild / turbo := true -ThisBuild / usePipelining := !(Global / insideCI).value +ThisBuild / usePipelining := false // !(Global / insideCI).value val excludeLint = SettingKey[Set[Def.KeyedInitialize[_]]]("excludeLintKeys") Global / excludeLint := (Global / excludeLint).?.value.getOrElse(Set.empty) diff --git a/main/src/main/scala/sbt/Keys.scala b/main/src/main/scala/sbt/Keys.scala index b00fed740..14b3d54dc 100644 --- a/main/src/main/scala/sbt/Keys.scala +++ b/main/src/main/scala/sbt/Keys.scala @@ -386,6 +386,7 @@ object Keys { val pushRemoteCacheArtifact = settingKey[Boolean]("Enables publishing an artifact to remote cache.") val pushRemoteCacheConfiguration = taskKey[PublishConfiguration]("") val pushRemoteCacheTo = settingKey[Option[Resolver]]("The resolver to publish remote cache to.") + val remoteCacheResolvers = settingKey[Seq[Resolver]]("Resolvers for remote cache.") val remoteCachePom = taskKey[File]("Generates a pom for publishing when publishing Maven-style.") val usePipelining = settingKey[Boolean]("Use subproject pipelining for compilation.").withRank(BSetting) val exportPipelining = settingKey[Boolean]("Product early output so downstream subprojects can do pipelining.").withRank(BSetting) diff --git a/main/src/main/scala/sbt/internal/RemoteCache.scala b/main/src/main/scala/sbt/internal/RemoteCache.scala index cea7abecd..be3b29b82 100644 --- a/main/src/main/scala/sbt/internal/RemoteCache.scala +++ b/main/src/main/scala/sbt/internal/RemoteCache.scala @@ -14,10 +14,15 @@ import Keys._ import SlashSyntax0._ import ScopeFilter.Make._ import Project._ // for tag and inTask() + +import org.apache.ivy.core.module.descriptor.{ Artifact => IArtifact, DefaultArtifact } +import org.apache.ivy.core.resolve.DownloadOptions +import org.apache.ivy.core.report.DownloadStatus +import org.apache.ivy.plugins.resolver.DependencyResolver import std.TaskExtra._ // for join import sbt.coursierint.LMCoursier import sbt.librarymanagement._ -import sbt.librarymanagement.ivy.Credentials +import sbt.librarymanagement.ivy.{ Credentials, IvyPaths, UpdateOptions } import sbt.librarymanagement.syntax._ import sbt.nio.FileStamp import sbt.nio.Keys.{ inputFileStamps, outputFileStamps } @@ -52,94 +57,43 @@ object RemoteCache { ) lazy val projectSettings: Seq[Def.Setting[_]] = (Seq( - remoteCacheId := (Def.taskDyn { - val filter = - ScopeFilter(configurations = inConfigurations(Compile, Test), tasks = inTasks(packageCache)) + pushRemoteCache := (Def.taskDyn { + val arts = (pushRemoteCacheConfiguration / remoteCacheArtifacts).value + val configs = arts flatMap { art => + art.packaged.scopedKey.scope match { + case Scope(_, Select(c), _, _) => Some(c) + case _ => None + } + } + val filter = ScopeFilter(configurations = inConfigurationsByKeys(configs: _*)) Def.task { - val allHashes = remoteCacheId.all(filter).value - combineHash(allHashes.toVector) + val _ = pushRemoteCache.all(filter).value + () + } + }).value, + pullRemoteCache := (Def.taskDyn { + val arts = (pushRemoteCacheConfiguration / remoteCacheArtifacts).value + val configs = arts flatMap { art => + art.packaged.scopedKey.scope match { + case Scope(_, Select(c), _, _) => Some(c) + case _ => None + } + } + val filter = ScopeFilter(configurations = inConfigurationsByKeys(configs: _*)) + Def.task { + val _ = pullRemoteCache.all(filter).value + () } }).value, - remoteCacheIdCandidates := List(remoteCacheId.value), - remoteCacheProjectId := { - val o = organization.value - val m = moduleName.value - val id = remoteCacheId.value - val c = (projectID / crossVersion).value - val v = toVersion(id) - ModuleID(o, m, v).cross(c) - }, - pushRemoteCacheConfiguration / publishMavenStyle := true, - pushRemoteCacheConfiguration / packagedArtifacts := Def.taskDyn { - val artifacts = (pushRemoteCacheConfiguration / remoteCacheArtifacts).value - - artifacts - .map(a => a.packaged.map(file => (a.artifact, file))) - .join - .apply(_.join.map(_.toMap)) - }.value, pushRemoteCacheConfiguration / remoteCacheArtifacts := { enabledOnly(remoteCacheArtifact.toSettingKey, defaultArtifactTasks).apply(_.join).value }, + pushRemoteCacheConfiguration / publishMavenStyle := true, Compile / packageCache / pushRemoteCacheArtifact := true, Test / packageCache / pushRemoteCacheArtifact := true, Compile / packageCache / artifact := Artifact(moduleName.value, cachedCompileClassifier), Test / packageCache / artifact := Artifact(moduleName.value, cachedTestClassifier), remoteCachePom / pushRemoteCacheArtifact := true, - pushRemoteCacheConfiguration := { - Classpaths.publishConfig( - (pushRemoteCacheConfiguration / publishMavenStyle).value, - Classpaths.deliverPattern(crossTarget.value), - if (isSnapshot.value) "integration" else "release", - ivyConfigurations.value.map(c => ConfigRef(c.name)).toVector, - (pushRemoteCacheConfiguration / packagedArtifacts).value.toVector, - (pushRemoteCacheConfiguration / checksums).value.toVector, - Classpaths.getPublishTo(pushRemoteCacheTo.value).name, - ivyLoggingLevel.value, - isSnapshot.value - ) - }, - pullRemoteCache := { - val log = streams.value.log - val smi = scalaModuleInfo.value - val dr = (pullRemoteCache / dependencyResolution).value - val is = (pushRemoteCache / ivySbt).value - val t = crossTarget.value / "cache-download" - val p = remoteCacheProjectId.value - val ids = remoteCacheIdCandidates.value - val artifacts = (pushRemoteCacheConfiguration / remoteCacheArtifacts).value - val applicable = artifacts.filterNot(isPomArtifact) - val classifiers = applicable.flatMap(_.artifact.classifier).toVector - - var found = false - ids foreach { - id: String => - val v = toVersion(id) - val modId = p.withRevision(v) - if (found) () - else - pullFromMavenRepo0(modId, classifiers, smi, is, dr, t, log) match { - case Right(xs0) => - val jars = xs0.distinct - - applicable.foreach { art => - val classifier = art.artifact.classifier - - findJar(classifier, v, jars) match { - case Some(jar) => - extractJar(art, jar) - log.info(s"remote cache artifact extracted for $p $classifier") - - case None => - log.info(s"remote cache artifact not found for $p $classifier") - } - } - found = true - case Left(unresolvedWarning) => - log.info(s"remote cache not found for ${v}") - } - } - }, remoteCachePom := { val s = streams.value val config = (remoteCachePom / makePomConfiguration).value @@ -156,62 +110,43 @@ object RemoteCache { }, remoteCachePom / remoteCacheArtifact := { PomRemoteCacheArtifact((makePom / artifact).value, remoteCachePom) - } + }, + remoteCacheResolvers := pushRemoteCacheTo.value.toVector, ) ++ inTask(pushRemoteCache)( Seq( + ivyPaths := IvyPaths(baseDirectory.value, crossTarget.value / "remote-cache"), ivyConfiguration := { - val other = pushRemoteCacheTo.value.toVector val config0 = Classpaths.mkIvyConfiguration.value config0 - .withOtherResolvers(other) + .withResolvers(remoteCacheResolvers.value.toVector) + .withOtherResolvers(pushRemoteCacheTo.value.toVector) .withResolutionCacheDir(crossTarget.value / "alt-resolution") + .withPaths(ivyPaths.value) + .withUpdateOptions(UpdateOptions().withGigahorse(true)) }, ivySbt := { - val config0 = ivyConfiguration.value Credentials.register(credentials.value, streams.value.log) + val config0 = ivyConfiguration.value new IvySbt(config0, CustomHttp.okhttpClient.value) }, - ivyModule := { - val is = ivySbt.value - new is.Module(moduleSettings.value) - }, - moduleSettings := { - val smi = scalaModuleInfo.value - ModuleDescriptorConfiguration(remoteCacheProjectId.value, projectInfo.value) - .withScalaModuleInfo(smi) - }, - pushRemoteCache.in(Defaults.TaskZero) := (Def.task { - val s = streams.value - val config = pushRemoteCacheConfiguration.value - IvyActions.publish(ivyModule.value, config, s.log) - } tag (Tags.Publish, Tags.Network)).value ) ) ++ inTask(pullRemoteCache)( Seq( dependencyResolution := Defaults.dependencyResolutionTask.value, csrConfiguration := { - val rs = pushRemoteCacheTo.value.toVector + val rs = pushRemoteCacheTo.value.toVector ++ remoteCacheResolvers.value.toVector LMCoursier.scalaCompilerBridgeConfigurationTask.value .withResolvers(rs) } ) - ) ++ inConfig(Compile)(packageCacheSettings(compileArtifact(Compile, cachedCompileClassifier))) - ++ inConfig(Test)(packageCacheSettings(testArtifact(Test, cachedTestClassifier)))) + ) ++ inConfig(Compile)(configCacheSettings(compileArtifact(Compile, cachedCompileClassifier))) + ++ inConfig(Test)(configCacheSettings(testArtifact(Test, cachedTestClassifier)))) - def packageCacheSettings[A <: RemoteCacheArtifact]( - cacheArtifact: Def.Initialize[Task[A]] + def configCacheSettings[A <: RemoteCacheArtifact]( + cacheArtifactTask: Def.Initialize[Task[A]] ): Seq[Def.Setting[_]] = inTask(packageCache)( Seq( - remoteCacheId := { - val inputs = (unmanagedSources / inputFileStamps).value - val cp = (externalDependencyClasspath / outputFileStamps).value - val extraInc = (extraIncOptions.value) flatMap { - case (k, v) => - Vector(k, v) - } - combineHash(extractHash(inputs) ++ extractHash(cp) ++ extraInc) - }, packageCache.in(Defaults.TaskZero) := { val original = packageBin.in(Defaults.TaskZero).value val artp = artifactPath.value @@ -229,10 +164,128 @@ object RemoteCache { // } artp }, - remoteCacheArtifact := cacheArtifact.value, + pushRemoteCacheArtifact := true, + remoteCacheArtifact := cacheArtifactTask.value, packagedArtifact := (artifact.value -> packageCache.value), artifactPath := Defaults.artifactPathSetting(artifact).value ) + ) ++ inTask(pushRemoteCache)( + Seq( + moduleSettings := { + val smi = scalaModuleInfo.value + ModuleDescriptorConfiguration(remoteCacheProjectId.value, projectInfo.value) + .withScalaModuleInfo(smi) + }, + pushRemoteCache.in(Defaults.TaskZero) := (Def.task { + val s = streams.value + val config = pushRemoteCacheConfiguration.value + val is = (pushRemoteCache / ivySbt).value + val m = new is.Module(moduleSettings.value) + IvyActions.publish(m, config, s.log) + } tag (Tags.Publish, Tags.Network)).value, + ) + ) ++ Seq( + remoteCacheIdCandidates := List(remoteCacheId.value), + remoteCacheProjectId := { + val o = organization.value + val m = moduleName.value + val id = remoteCacheId.value + val c = (projectID / crossVersion).value + val v = toVersion(id) + ModuleID(o, m, v).cross(c) + }, + remoteCacheId := { + val inputs = (unmanagedSources / inputFileStamps).value + val cp = (externalDependencyClasspath / outputFileStamps).?.value.getOrElse(Nil) + val extraInc = (extraIncOptions.value) flatMap { + case (k, v) => + Vector(k, v) + } + combineHash(extractHash(inputs) ++ extractHash(cp) ++ extraInc) + }, + pushRemoteCacheConfiguration := { + Classpaths.publishConfig( + (pushRemoteCacheConfiguration / publishMavenStyle).value, + Classpaths.deliverPattern(crossTarget.value), + if (isSnapshot.value) "integration" else "release", + ivyConfigurations.value.map(c => ConfigRef(c.name)).toVector, + (pushRemoteCacheConfiguration / packagedArtifacts).value.toVector, + (pushRemoteCacheConfiguration / checksums).value.toVector, + Classpaths.getPublishTo(pushRemoteCacheTo.value).name, + ivyLoggingLevel.value, + isSnapshot.value + ) + }, + pushRemoteCacheConfiguration / packagedArtifacts := Def.taskDyn { + val artifacts = (pushRemoteCacheConfiguration / remoteCacheArtifacts).value + artifacts + .map(a => a.packaged.map(file => (a.artifact, file))) + .join + .apply(_.join.map(_.toMap)) + }.value, + pushRemoteCacheConfiguration / remoteCacheArtifacts := { + List((packageCache / remoteCacheArtifact).value) + }, + pullRemoteCache := { + import scala.collection.JavaConverters._ + val log = streams.value.log + val r = remoteCacheResolvers.value.head + val p = remoteCacheProjectId.value + val ids = remoteCacheIdCandidates.value + val is = (pushRemoteCache / ivySbt).value + val m = new is.Module((pushRemoteCache / moduleSettings).value) + val smi = scalaModuleInfo.value + val artifacts = (pushRemoteCacheConfiguration / remoteCacheArtifacts).value + val nonPom = artifacts.filterNot(isPomArtifact).toVector + m.withModule(log) { + case (ivy, md, _) => + val resolver = ivy.getSettings.getResolver(r.name) + if (resolver eq null) sys.error(s"undefined resolver '${r.name}'") + val cross = CrossVersion(p, smi) + val crossf: String => String = cross.getOrElse(identity _) + var found = false + ids foreach { + id: String => + val v = toVersion(id) + val modId = p.withRevision(v).withName(crossf(p.name)) + val ivyId = IvySbt.toID(modId) + if (found) () + else { + val rawa = nonPom map { _.artifact } + val seqa = CrossVersion.substituteCross(rawa, cross) + val as = seqa map { a => + val extra = a.classifier match { + case Some(c) => Map("e:classifier" -> c) + case None => Map.empty + } + new DefaultArtifact(ivyId, null, a.name, a.`type`, a.extension, extra.asJava) + } + pullFromMavenRepo0(as, resolver, log) match { + case Right(xs0) => + val jars = xs0.distinct + + nonPom.foreach { art => + val classifier = art.artifact.classifier + + findJar(classifier, v, jars) match { + case Some(jar) => + extractJar(art, jar) + log.info(s"remote cache artifact extracted for $p $classifier") + + case None => + log.info(s"remote cache artifact not found for $p $classifier") + } + } + found = true + case Left(e) => + log.info(s"remote cache not found for ${v}") + log.debug(e.getMessage) + } + } + } + () + } + }, ) def isPomArtifact(artifact: RemoteCacheArtifact): Boolean = @@ -268,26 +321,35 @@ object RemoteCache { private def toVersion(v: String): String = s"0.0.0-$v" + private lazy val doption = new DownloadOptions private def pullFromMavenRepo0( - modId: ModuleID, - classifiers: Vector[String], - smi: Option[ScalaModuleInfo], - is: IvySbt, - dr: DependencyResolution, - cacheDir: File, + artifacts: Vector[IArtifact], + r: DependencyResolver, log: Logger - ): Either[UnresolvedWarning, Vector[File]] = { - def dummyModule(deps: Vector[ModuleID]): ModuleDescriptorConfiguration = { - val module = ModuleID("com.example.temp", "fake", "0.1.0-SNAPSHOT") - val info = ModuleInfo("fake", "", None, None, Vector(), "", None, None, Vector()) - ModuleDescriptorConfiguration(module, info) - .withScalaModuleInfo(smi) - .withDependencies(deps) + ): Either[Throwable, Vector[File]] = { + try { + val files = r.download(artifacts.toArray, doption).getArtifactsReports.toVector map { + report => + if (report == null) sys.error(s"failed to download $artifacts: " + r.toString) + else + report.getDownloadStatus match { + case DownloadStatus.NO => + val o = report.getArtifactOrigin + if (o.isLocal) { + val localFile = new File(o.getLocation) + if (!localFile.exists) sys.error(s"$localFile doesn't exist") + else localFile + } else report.getLocalFile + case DownloadStatus.SUCCESSFUL => + report.getLocalFile + case DownloadStatus.FAILED => + sys.error(s"failed to download $artifacts: " + r.toString) + } + } + Right(files) + } catch { + case e: Throwable => Left(e) } - val deps = classifiers.map(modId.classifier) - val mconfig = dummyModule(deps) - val m = new is.Module(mconfig) - dr.retrieve(m, cacheDir, log) } private def findJar(classifier: Option[String], ver: String, jars: Vector[File]): Option[File] = { @@ -345,7 +407,7 @@ object RemoteCache { } private def defaultArtifactTasks: Seq[TaskKey[File]] = - Seq(remoteCachePom, Compile / packageCache, Test / packageCache) + Seq(Compile / packageCache, Test / packageCache) private def enabledOnly[A]( key: SettingKey[A], diff --git a/sbt/src/sbt-test/actions/remote-cache-semanticdb/build.sbt b/sbt/src/sbt-test/actions/remote-cache-semanticdb/build.sbt index d35dd9ea0..374410229 100644 --- a/sbt/src/sbt-test/actions/remote-cache-semanticdb/build.sbt +++ b/sbt/src/sbt-test/actions/remote-cache-semanticdb/build.sbt @@ -10,8 +10,9 @@ pushRemoteCacheTo := Some( MavenCache("local-cache", (ThisBuild / baseDirectory).value / "remote-cache-semanticdb") ) -remoteCacheId := "fixed-id" - -remoteCacheIdCandidates := Seq(remoteCacheId.value) - -pushRemoteCacheConfiguration := pushRemoteCacheConfiguration.value.withOverwrite(true) +Compile / remoteCacheId := "fixed-id" +Compile / remoteCacheIdCandidates := Seq((Compile / remoteCacheId).value) +Test / remoteCacheId := "fixed-id" +Test / remoteCacheIdCandidates := Seq((Test / remoteCacheId).value) +Compile / pushRemoteCacheConfiguration := (Compile / pushRemoteCacheConfiguration).value.withOverwrite(true) +Test / pushRemoteCacheConfiguration := (Test / pushRemoteCacheConfiguration).value.withOverwrite(true) diff --git a/sbt/src/sbt-test/actions/remote-cache-semanticdb/test b/sbt/src/sbt-test/actions/remote-cache-semanticdb/test index bb3bff980..3875dd2ea 100644 --- a/sbt/src/sbt-test/actions/remote-cache-semanticdb/test +++ b/sbt/src/sbt-test/actions/remote-cache-semanticdb/test @@ -10,11 +10,6 @@ $ exists target/scala-2.12/test-classes/MyTest$.class $ exists target/scala-2.12/test-classes/META-INF/semanticdb/src/test/scala/MyTest.scala.semanticdb $ exists target/scala-2.12/test-zinc/inc_compile_2.12.zip -# Pom file -$ exists remote-cache-semanticdb/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id.pom -$ exists remote-cache-semanticdb/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id.pom.md5 -$ exists remote-cache-semanticdb/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id.pom.sha1 - # Compile $ exists remote-cache-semanticdb/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id-cached-compile.jar $ exists remote-cache-semanticdb/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id-cached-compile.jar.md5 diff --git a/sbt/src/sbt-test/actions/remote-cache/build.sbt b/sbt/src/sbt-test/actions/remote-cache/build.sbt index 8682caad0..6ea9d9f49 100644 --- a/sbt/src/sbt-test/actions/remote-cache/build.sbt +++ b/sbt/src/sbt-test/actions/remote-cache/build.sbt @@ -10,27 +10,27 @@ val checkIterations = inputKey[Unit]("Verifies the accumulated number of iterati ThisBuild / scalaVersion := "2.12.12" ThisBuild / pushRemoteCacheTo := Some( - MavenCache("local-cache", (ThisBuild / baseDirectory).value / "remote-cache") + MavenCache("local-cache", (ThisBuild / baseDirectory).value / "r") ) lazy val root = (project in file(".")) .configs(CustomArtifact) .settings( name := "my-project", - pushRemoteCacheConfiguration := pushRemoteCacheConfiguration.value.withOverwrite(true), - pushRemoteCacheConfiguration / remoteCacheArtifacts += { - val art = (CustomArtifact / artifact).value - val packaged = CustomArtifact / packageCache - val extractDirectory = (CustomArtifact / sourceManaged).value - CustomRemoteCacheArtifact(art, packaged, extractDirectory, preserveLastModified = false) - }, + + customArtifactSettings, + pushRemoteCacheConfiguration / remoteCacheArtifacts += (CustomArtifact / packageCache / remoteCacheArtifact).value, + + Compile / pushRemoteCacheConfiguration := (Compile / pushRemoteCacheConfiguration).value.withOverwrite(true), + Test / pushRemoteCacheConfiguration := (Test / pushRemoteCacheConfiguration).value.withOverwrite(true), + Compile / sourceGenerators += Def.task { val extractDirectory = (CustomArtifact / sourceManaged).value val output = extractDirectory / "HelloWorld.scala" IO.write(output, "class HelloWorld") Seq(output) }.taskValue, - customArtifactSettings, + // test tasks recordPreviousIterations := { val log = streams.value.log @@ -54,7 +54,14 @@ lazy val root = (project in file(".")) def customArtifactSettings: Seq[Def.Setting[_]] = { val classifier = "custom-artifact" + def cachedArtifactTask = Def.task { + val art = (CustomArtifact / artifact).value + val packaged = CustomArtifact / packageCache + val extractDirectory = (CustomArtifact / sourceManaged).value + CustomRemoteCacheArtifact(art, packaged, extractDirectory, preserveLastModified = false) + } inConfig(CustomArtifact)( + sbt.internal.RemoteCache.configCacheSettings(cachedArtifactTask) ++ Seq( packageOptions := { val n = name.value + "-" + classifier diff --git a/sbt/src/sbt-test/actions/remote-cache/test b/sbt/src/sbt-test/actions/remote-cache/test index ff1289d45..feab2a060 100644 --- a/sbt/src/sbt-test/actions/remote-cache/test +++ b/sbt/src/sbt-test/actions/remote-cache/test @@ -13,19 +13,19 @@ $ exists target/scala-2.12/test-classes/MyTest$.class $ exists target/scala-2.12/test-zinc/inc_compile_2.12.zip # Compile -$ exists remote-cache/my-project/my-project_2.12/0.0.0-b63e5c6efa2d0fd2/my-project_2.12-0.0.0-b63e5c6efa2d0fd2-cached-compile.jar -$ exists remote-cache/my-project/my-project_2.12/0.0.0-b63e5c6efa2d0fd2/my-project_2.12-0.0.0-b63e5c6efa2d0fd2-cached-compile.jar.md5 -$ exists remote-cache/my-project/my-project_2.12/0.0.0-b63e5c6efa2d0fd2/my-project_2.12-0.0.0-b63e5c6efa2d0fd2-cached-compile.jar.sha1 +$ exists r/my-project/my-project_2.12/0.0.0-789740d77fd44aa9/my-project_2.12-0.0.0-789740d77fd44aa9-cached-compile.jar +$ exists r/my-project/my-project_2.12/0.0.0-789740d77fd44aa9/my-project_2.12-0.0.0-789740d77fd44aa9-cached-compile.jar.md5 +$ exists r/my-project/my-project_2.12/0.0.0-789740d77fd44aa9/my-project_2.12-0.0.0-789740d77fd44aa9-cached-compile.jar.sha1 # Test -$ exists remote-cache/my-project/my-project_2.12/0.0.0-b63e5c6efa2d0fd2/my-project_2.12-0.0.0-b63e5c6efa2d0fd2-cached-test.jar -$ exists remote-cache/my-project/my-project_2.12/0.0.0-b63e5c6efa2d0fd2/my-project_2.12-0.0.0-b63e5c6efa2d0fd2-cached-test.jar.md5 -$ exists remote-cache/my-project/my-project_2.12/0.0.0-b63e5c6efa2d0fd2/my-project_2.12-0.0.0-b63e5c6efa2d0fd2-cached-test.jar.sha1 +$ exists r/my-project/my-project_2.12/0.0.0-9cd077da3ad28ae5/my-project_2.12-0.0.0-9cd077da3ad28ae5-cached-test.jar +$ exists r/my-project/my-project_2.12/0.0.0-9cd077da3ad28ae5/my-project_2.12-0.0.0-9cd077da3ad28ae5-cached-test.jar.md5 +$ exists r/my-project/my-project_2.12/0.0.0-9cd077da3ad28ae5/my-project_2.12-0.0.0-9cd077da3ad28ae5-cached-test.jar.sha1 # Custom artifact -$ exists remote-cache/my-project/my-project_2.12/0.0.0-b63e5c6efa2d0fd2/my-project_2.12-0.0.0-b63e5c6efa2d0fd2-custom-artifact.jar -$ exists remote-cache/my-project/my-project_2.12/0.0.0-b63e5c6efa2d0fd2/my-project_2.12-0.0.0-b63e5c6efa2d0fd2-custom-artifact.jar.md5 -$ exists remote-cache/my-project/my-project_2.12/0.0.0-b63e5c6efa2d0fd2/my-project_2.12-0.0.0-b63e5c6efa2d0fd2-custom-artifact.jar.sha1 +$ exists r/my-project/my-project_2.12/0.0.0-1497188b634e2cd0/my-project_2.12-0.0.0-1497188b634e2cd0-custom-artifact.jar +$ exists r/my-project/my-project_2.12/0.0.0-1497188b634e2cd0/my-project_2.12-0.0.0-1497188b634e2cd0-custom-artifact.jar.md5 +$ exists r/my-project/my-project_2.12/0.0.0-1497188b634e2cd0/my-project_2.12-0.0.0-1497188b634e2cd0-custom-artifact.jar.sha1 > clean From 6c344830df204a69dae3ff51669a72ad6dae6f20 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Sun, 1 Nov 2020 19:51:57 -0500 Subject: [PATCH 22/24] Bring back fixed-id for testing --- .../sbt-test/actions/remote-cache/build.sbt | 7 +++++++ sbt/src/sbt-test/actions/remote-cache/test | 18 +++++++++--------- 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/sbt/src/sbt-test/actions/remote-cache/build.sbt b/sbt/src/sbt-test/actions/remote-cache/build.sbt index 6ea9d9f49..920bc2e0c 100644 --- a/sbt/src/sbt-test/actions/remote-cache/build.sbt +++ b/sbt/src/sbt-test/actions/remote-cache/build.sbt @@ -30,6 +30,13 @@ lazy val root = (project in file(".")) IO.write(output, "class HelloWorld") Seq(output) }.taskValue, + // bring back fixed-id because JDK 8/11 would be different intentionally + Compile / remoteCacheId := "fixed-id", + Compile / remoteCacheIdCandidates := Seq((Compile / remoteCacheId).value), + Test / remoteCacheId := "fixed-id", + Test / remoteCacheIdCandidates := Seq((Test / remoteCacheId).value), + CustomArtifact / remoteCacheId := "fixed-id", + CustomArtifact / remoteCacheIdCandidates := Seq((CustomArtifact / remoteCacheId).value), // test tasks recordPreviousIterations := { diff --git a/sbt/src/sbt-test/actions/remote-cache/test b/sbt/src/sbt-test/actions/remote-cache/test index feab2a060..5c81f25a5 100644 --- a/sbt/src/sbt-test/actions/remote-cache/test +++ b/sbt/src/sbt-test/actions/remote-cache/test @@ -13,19 +13,19 @@ $ exists target/scala-2.12/test-classes/MyTest$.class $ exists target/scala-2.12/test-zinc/inc_compile_2.12.zip # Compile -$ exists r/my-project/my-project_2.12/0.0.0-789740d77fd44aa9/my-project_2.12-0.0.0-789740d77fd44aa9-cached-compile.jar -$ exists r/my-project/my-project_2.12/0.0.0-789740d77fd44aa9/my-project_2.12-0.0.0-789740d77fd44aa9-cached-compile.jar.md5 -$ exists r/my-project/my-project_2.12/0.0.0-789740d77fd44aa9/my-project_2.12-0.0.0-789740d77fd44aa9-cached-compile.jar.sha1 +$ exists r/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id-cached-compile.jar +$ exists r/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id-cached-compile.jar.md5 +$ exists r/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id-cached-compile.jar.sha1 # Test -$ exists r/my-project/my-project_2.12/0.0.0-9cd077da3ad28ae5/my-project_2.12-0.0.0-9cd077da3ad28ae5-cached-test.jar -$ exists r/my-project/my-project_2.12/0.0.0-9cd077da3ad28ae5/my-project_2.12-0.0.0-9cd077da3ad28ae5-cached-test.jar.md5 -$ exists r/my-project/my-project_2.12/0.0.0-9cd077da3ad28ae5/my-project_2.12-0.0.0-9cd077da3ad28ae5-cached-test.jar.sha1 +$ exists r/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id-cached-test.jar +$ exists r/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id-cached-test.jar.md5 +$ exists r/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id-cached-test.jar.sha1 # Custom artifact -$ exists r/my-project/my-project_2.12/0.0.0-1497188b634e2cd0/my-project_2.12-0.0.0-1497188b634e2cd0-custom-artifact.jar -$ exists r/my-project/my-project_2.12/0.0.0-1497188b634e2cd0/my-project_2.12-0.0.0-1497188b634e2cd0-custom-artifact.jar.md5 -$ exists r/my-project/my-project_2.12/0.0.0-1497188b634e2cd0/my-project_2.12-0.0.0-1497188b634e2cd0-custom-artifact.jar.sha1 +$ exists r/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id-custom-artifact.jar +$ exists r/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id-custom-artifact.jar.md5 +$ exists r/my-project/my-project_2.12/0.0.0-fixed-id/my-project_2.12-0.0.0-fixed-id-custom-artifact.jar.sha1 > clean From d2252cc89d8c3e37288df0047c1864a23f5618c4 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Sun, 1 Nov 2020 21:24:35 -0500 Subject: [PATCH 23/24] Zinc 1.4.2 --- project/Dependencies.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 3cc44eb29..ec68e9455 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -14,7 +14,7 @@ object Dependencies { private val ioVersion = nightlyVersion.getOrElse("1.4.0") private val lmVersion = sys.props.get("sbt.build.lm.version").orElse(nightlyVersion).getOrElse("1.4.0") - val zincVersion = nightlyVersion.getOrElse("1.4.1") + val zincVersion = nightlyVersion.getOrElse("1.4.2") private val sbtIO = "org.scala-sbt" %% "io" % ioVersion From c5fd92959fd0bab8e1d7a20358c3e3b16d51f534 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Sun, 1 Nov 2020 21:32:01 -0500 Subject: [PATCH 24/24] AdoptOpenJDK 8 on Windows --- .appveyor.yml | 4 ++-- sbt/project/build.properties | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 sbt/project/build.properties diff --git a/.appveyor.yml b/.appveyor.yml index b068a53f7..af4ec165c 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -84,7 +84,7 @@ for: - path: client\target\bin\sbtn.exe name: sbtn.exe install: - - cinst jdk8 -params 'installdir=C:\\jdk8' + - cinst adoptopenjdk8 -params 'installdir=C:\\jdk8' - SET CI=true #- choco install windows-sdk-7.1 kb2519277 - call "C:\Program Files\Microsoft SDKs\Windows\v7.1\Bin\SetEnv.cmd" @@ -132,7 +132,7 @@ for: except: - build-graal install: - - cinst jdk8 -params 'installdir=C:\\jdk8' + - cinst adoptopenjdk8 -params 'installdir=C:\\jdk8' - SET JAVA_HOME=C:\jdk8 - SET PATH=C:\jdk8\bin;%PATH% - SET CI=true diff --git a/sbt/project/build.properties b/sbt/project/build.properties new file mode 100644 index 000000000..6db984250 --- /dev/null +++ b/sbt/project/build.properties @@ -0,0 +1 @@ +sbt.version=1.4.0