* spelling: 1.x

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: a

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: aether

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: anymore

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: artifact

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: available

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: be

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: bridge

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: cannot

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: case-insensitive

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: checksum

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: class loads

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: contra

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: dependencies

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: dependency

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: dependent

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: deriveds

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: describes

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: early

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: enclosed

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: evaluation

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: excluding

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: execution

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: for

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: frequently

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: github

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: green

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: https://www

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: https

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: in-sourcing

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: include

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: incompatible

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: indefinitely

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: information

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: inputted

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: just

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: lastmodifiedtimes

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: latest

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: manifest

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: miscellaneous

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: more

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: neither

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: never

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: nonexistent

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: opted

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: outputting

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: params

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: performance

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: preceding

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: presentation

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: project

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: projects

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: protocol

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: related

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: representation

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: res

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: resolverlist

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: resolverset

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: response

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: returned

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: sbt_version

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: scalacheck

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: sentinels

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: separates

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: serves

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: should

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: significant

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: specifically

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: substitute

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: suppress

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: terminal

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: the

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: title

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: transitive

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: version

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: versions

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: want

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: wanting

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* spelling: whether

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* link: sbt Cached Resolution

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

* link: Testing sbt plugins

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

---------

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>
This commit is contained in:
Josh Soref 2025-02-04 01:11:28 -05:00 committed by GitHub
parent c60142e061
commit 613eb86447
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
84 changed files with 136 additions and 138 deletions

View File

@ -8,7 +8,7 @@ Create a [fork](https://docs.github.com/en/github/getting-started-with-github/fo
### Branch to work against
sbt uses two or three branches for development:
Use the **default** branch set on Github for bug fixes.
Use the **default** branch set on GitHub for bug fixes.
- Next minor branch: `1.$MINOR.x`, where `$MINOR` is next minor version (e.g. `1.10.x` during 1.9.x series)
- Development branch: `develop`
@ -162,8 +162,7 @@ suite with `sbt testOnly`
Scripted integration tests reside in `sbt-app/src/sbt-test` and are
written using the same testing infrastructure sbt plugin authors can
use to test their own plugins with sbt. You can read more about this
style of tests [here](https://www.scala-sbt.org/1.x/docs/Testing-sbt-plugins).
use to test their own plugins with sbt. You can read more about [Testing sbt plugins](https://www.scala-sbt.org/1.x/docs/Testing-sbt-plugins).
You can run the integration tests with the `sbt scripted` sbt
command. To run a single test, such as the test in

View File

@ -75,7 +75,7 @@ See https://gist.github.com/eed3si9n/82d43acc95a002876d357bd8ad5f40d5
### running sbt with standby
One of the tricky things you come across while profiling is figuring out the process ID,
while wnating to profile the beginning of the application.
while wanting to profile the beginning of the application.
For this purpose, we've added `sbt.launcher.standby` JVM flag.
In the next version of sbt, you should be able to run:

View File

@ -417,7 +417,7 @@ lazy val utilCache = project
.in(file("util-cache"))
.enablePlugins(
ContrabandPlugin,
// we generate JsonCodec only for actionresult.conta
// we generate JsonCodec only for actionresult.contra
JsonCodecPlugin,
)
.dependsOn(utilLogging)

View File

@ -144,7 +144,7 @@ private[sbt] object EvaluateConfigurations {
offset: Int
): LazyClassLoaded[LoadedSbtFile] = {
// TODO - Store the file on the LoadedSbtFile (or the parent dir) so we can accurately do
// detection for which project project manipulations should be applied.
// detection for which project manipulations should be applied.
val name = file match
case file: PathBasedFile => file.toPath.toString
case file => file.id

View File

@ -116,7 +116,7 @@ trait Cont:
/**
* Implementation of a macro that provides a direct syntax for applicative functors and monads.
* It is intended to bcke used in conjunction with another macro that conditions the inputs.
* It is intended to be used in conjunction with another macro that conditions the inputs.
*
* This method processes the Term `t` to find inputs of the form `wrap[A]( input )` This form is
* typically constructed by another macro that pretends to be able to get a value of type `A`

View File

@ -102,7 +102,7 @@ object LineReader {
parser: Parser[?],
terminal: Terminal,
): LineReader = {
// We may want to consider insourcing LineReader.java from jline. We don't otherwise
// We may want to consider in-sourcing LineReader.java from jline. We don't otherwise
// directly need jline3 for sbt.
new LineReader {
override def readLine(prompt: String, mask: Option[Char]): Option[String] = {
@ -243,7 +243,7 @@ private[sbt] object JLine {
// When calling this, ensure that enableEcho has been or will be called.
// TerminalFactory.get will initialize the terminal to disable echo.
@deprecated("Don't use jline.Terminal directly", "1.4.0")
private[sbt] def terminal: jline.Terminal = Terminal.deprecatedTeminal
private[sbt] def terminal: jline.Terminal = Terminal.deprecatedTerminal
/**
* For accessing the JLine Terminal object. This ensures synchronized access as well as

View File

@ -274,7 +274,7 @@ trait Parsers {
* @param close
* the closing character, e.g. '}'
* @return
* a parser for the brace encloosed string.
* a parser for the brace enclosed string.
*/
private[sbt] def braces(open: Char, close: Char): Parser[String] = {
val notDelim = charClass(c => c != open && c != close).*.string

View File

@ -79,7 +79,7 @@ class ParserWithExamplesTest extends UnitSpec {
class parserWithAllExamples extends ParserExample(removeInvalidExamples = false)
case class ParserExample(
examples: Iterable[String] = Set("blue", "yellow", "greeen", "block", "red"),
examples: Iterable[String] = Set("blue", "yellow", "green_", "block", "red"),
maxNumberOfExamples: Int = 25,
removeInvalidExamples: Boolean
) {

View File

@ -49,7 +49,7 @@ public interface Problem {
}
/**
* The possible releated information for the diagnostic being reported.
* The possible related information for the diagnostic being reported.
*
* <p>NOTE: To avoid breaking compatibility we provide a default to account for older Scala
* versions that do not have the concept of "related information".

View File

@ -11,7 +11,7 @@ package xsbti;
import java.util.List;
/**
* A minimal representatin of the `WorkspaceEdit` found in the LSP protocol.
* A minimal representation of the `WorkspaceEdit` found in the LSP protocol.
*
* <p>However it only supports the minimal `changes` to ensure the fixes will work with all clients.
*

View File

@ -33,7 +33,7 @@ object EscHelpers {
* The CSI (control sequence instruction) codes start with ESC + '['. This is for testing the
* second character.
*
* There is an additional CSI (one character) that we could test for, but is not frequnetly used,
* There is an additional CSI (one character) that we could test for, but is not frequently used,
* and we don't check for it.
*
* cf. http://en.wikipedia.org/wiki/ANSI_escape_code#CSI_codes
@ -193,7 +193,7 @@ object EscHelpers {
* Removes the ansi escape sequences from a string and makes a best attempt at calculating any
* ansi moves by hand. For example, if the string contains a backspace character followed by a
* character, the output string would replace the character preceding the backspaces with the
* character proceding it. This is in contrast to `strip` which just removes all ansi codes
* character preceding it. This is in contrast to `strip` which just removes all ansi codes
* entirely.
*
* @param s

View File

@ -890,7 +890,7 @@ object Terminal {
consoleProgressState.set(progressState)
@deprecated("For compatibility only", "1.4.0")
private[sbt] def deprecatedTeminal: jline.Terminal = console.toJLine
private[sbt] def deprecatedTerminal: jline.Terminal = console.toJLine
private[util] class ConsoleTerminal(
in: WriteableInputStream,
out: OutputStream,

View File

@ -111,7 +111,7 @@ private[util] class WindowsInputStream(term: org.jline.terminal.Terminal, in: In
case 0x7a /* VK_F11 */ => getCapability(Capability.key_f11)
case 0x7b /* VK_F12 */ => getCapability(Capability.key_f12)
// VK_END, VK_INSERT and VK_DELETE are not in the ansi key bindings so we
// have to manually apply the the sequences here and in JLine3.wrap
// have to manually apply the sequences here and in JLine3.wrap
case 0x23 /* VK_END */ =>
Option(getCapability(Capability.key_end)).getOrElse("\u001B[4~")
case 0x2d /* VK_INSERT */ =>

View File

@ -149,7 +149,7 @@ object InterfaceUtil {
sev: Severity,
rendered: Option[String],
diagnosticCode: Option[DiagnosticCode],
diagnosticRelatedInforamation: List[DiagnosticRelatedInformation]
diagnosticRelatedInformation: List[DiagnosticRelatedInformation]
): Problem =
problem(
cat,
@ -158,7 +158,7 @@ object InterfaceUtil {
sev,
rendered,
diagnosticCode,
diagnosticRelatedInforamation,
diagnosticRelatedInformation,
List.empty[Action],
)

View File

@ -43,7 +43,7 @@ class ManagedLoggerSpec extends AnyFlatSpec with Matchers {
}
val after = System.currentTimeMillis()
log.info(s"Peformance test took: ${after - before}ms")
log.info(s"Performance test took: ${after - before}ms")
}
it should "support logging Throwable out of the box" in {

View File

@ -336,7 +336,7 @@ val root = (project in file(".")).
else Seq[(File, String)](base.getParentFile / "LICENSE" -> "LICENSE", base / "NOTICE" -> "NOTICE")
},
// Misccelaneous publishing stuff...
// Miscellaneous publishing stuff...
projectID in Debian := {
val m = moduleID.value
m.copy(revision = (version in Debian).value)

View File

@ -2,7 +2,7 @@
#
# This script will upload an sbt distribution (tar/tgz/msi and
# checksump files) to IBM Cloud Object Storage with the correct
# checksum files) to IBM Cloud Object Storage with the correct
# permissions, and prepare the shortened URLs on the "piccolo.link"
# Polr server.
#

View File

@ -5,7 +5,7 @@
// DO NOT EDIT MANUALLY
package sbt.librarymanagement
/**
* An instance of maven CACHE directory. You cannot treat a cache directory the same as a a remote repository because
* An instance of maven CACHE directory. You cannot treat a cache directory the same as a remote repository because
* the metadata is different (see Aether ML discussion).
*/
final class MavenCache private (

View File

@ -35,7 +35,7 @@ package sbt.librarymanagement
*
* The hyphen range like `1.2.3 - 4.5.6` matches inclusive set of versions.
* So `1.2.3 - 4.5.6` is equivalent to `>=1.2.3 <=4.5.6`.
* Both sides of comparators around - are required and they can not have any operators.
* Both sides of comparators around - are required and they cannot have any operators.
* For example, `>=1.2.3 - 4.5.6` is invalid.
*
* The order of versions basically follows the rule specified in https://semver.org/#spec-item-11
@ -52,7 +52,7 @@ package sbt.librarymanagement
* > Example: 1.0.0-alpha < 1.0.0-alpha.1 < 1.0.0-alpha.beta < 1.0.0-beta < 1.0.0-beta.2 < 1.0.0-beta.11 < 1.0.0-rc.1 < 1.0.0.
*
* The differences from the original specification are following
* - `SemanticVersionSelector` separetes the pre-release fields by hyphen instead of dot
* - `SemanticVersionSelector` separates the pre-release fields by hyphen instead of dot
* - hyphen cannot be used in pre-release identifiers because it is used as separator for pre-release fields
*
* Therefore, in order to match pre-release versions like `1.0.0-beta`

View File

@ -9,7 +9,7 @@ package sbt.librarymanagement
Otherwise, dependencies are used directly from the cache.
* @param missingOk If set to true, it ignores when artifacts are missing.
This setting could be uses when retrieving source/javadocs jars opportunistically.
* @param logging Logging setting used specifially for library management.
* @param logging Logging setting used specifically for library management.
* @param logicalClock The clock that may be used for caching.
* @param metadataDirectory The base directory that may be used to store metadata.
*/

View File

@ -31,7 +31,7 @@
{
"name": "logging",
"doc": [
"Logging setting used specifially for library management."
"Logging setting used specifically for library management."
],
"type": "sbt.librarymanagement.UpdateLogging",
"default": "sbt.librarymanagement.UpdateLogging.Default",
@ -602,7 +602,7 @@
"target": "Scala",
"type": "record",
"doc": [
"An instance of maven CACHE directory. You cannot treat a cache directory the same as a a remote repository because",
"An instance of maven CACHE directory. You cannot treat a cache directory the same as a remote repository because",
"the metadata is different (see Aether ML discussion)."
],
"fields": [

View File

@ -54,7 +54,7 @@
"",
"The hyphen range like `1.2.3 - 4.5.6` matches inclusive set of versions.",
"So `1.2.3 - 4.5.6` is equivalent to `>=1.2.3 <=4.5.6`.",
"Both sides of comparators around - are required and they can not have any operators.",
"Both sides of comparators around - are required and they cannot have any operators.",
"For example, `>=1.2.3 - 4.5.6` is invalid.",
"",
"The order of versions basically follows the rule specified in https://semver.org/#spec-item-11",
@ -71,7 +71,7 @@
"> Example: 1.0.0-alpha < 1.0.0-alpha.1 < 1.0.0-alpha.beta < 1.0.0-beta < 1.0.0-beta.2 < 1.0.0-beta.11 < 1.0.0-rc.1 < 1.0.0.",
"",
"The differences from the original specification are following",
"- `SemanticVersionSelector` separetes the pre-release fields by hyphen instead of dot",
"- `SemanticVersionSelector` separates the pre-release fields by hyphen instead of dot",
"- hyphen cannot be used in pre-release identifiers because it is used as separator for pre-release fields",
"",
"Therefore, in order to match pre-release versions like `1.0.0-beta`",

View File

@ -18,10 +18,10 @@ private[librarymanagement] abstract class SemSelAndChunkFunctions {
val (before, after) = comparatorTokens.splitAt(hyphenIndex)
(before.lastOption, after.drop(1).headOption) match {
case (Some(fromStr), Some(toStr)) =>
// from and to can not have an operator.
// from and to cannot have an operator.
if (hasOperator(fromStr) || hasOperator(toStr)) {
throw new IllegalArgumentException(
s"Invalid ' - ' range, both side of comparators can not have an operator: $fromStr - $toStr"
s"Invalid ' - ' range, both side of comparators cannot have an operator: $fromStr - $toStr"
)
}
val from = SemComparator(fromStr)
@ -124,7 +124,7 @@ private[librarymanagement] abstract class SemComparatorExtra {
val cmp = (ts1head.matches("\\d+"), ts2head.matches("\\d+")) match {
// Identifiers consisting of only digits are compared numerically.
// Numeric identifiers always have lower precedence than non-numeric identifiers.
// Identifiers with letters are compared case insensitive lexical order.
// Identifiers with letters are compared case-insensitive lexical order.
case (true, true) => implicitly[Ordering[Long]].compare(ts1head.toLong, ts2head.toLong)
case (false, true) => 1
case (true, false) => -1

View File

@ -54,7 +54,7 @@ object VersionRange {
case "+" => "[0,)"
case DotPlusPattern(base) => plusRange(base)
// This is a heuristic. Maven just doesn't support Ivy's notions of 1+, so
// we assume version ranges never go beyond 5 siginificant digits.
// we assume version ranges never go beyond 5 significant digits.
case NumPlusPattern(tail) => (0 until maxDigit).map(plusRange(tail, _)).mkString(",")
case DotNumPlusPattern(base, tail) =>
(0 until maxDigit).map(plusRange(base + "." + tail, _)).mkString(",")

View File

@ -71,7 +71,7 @@ class DependencyResolution private[sbt] (lmEngine: DependencyResolutionInterface
* Returns a `ModuleDescriptor` that depends on `dependencyId`.
*
* @param dependencyId The module to depend on.
* @param scalaModuleInfo The information about the Scala verson used, if any.
* @param scalaModuleInfo The information about the Scala version used, if any.
* @return A `ModuleDescriptor` that depends on `dependencyId`.
*/
def wrapDependencyInModule(

View File

@ -59,7 +59,8 @@ object EvictionError {
)
}
val incompatibleEvictions: mutable.ListBuffer[(EvictionPair, String)] = mutable.ListBuffer()
val assumedIncompatEvictions: mutable.ListBuffer[(EvictionPair, String)] = mutable.ListBuffer()
val assumedIncompatibleEvictions: mutable.ListBuffer[(EvictionPair, String)] =
mutable.ListBuffer()
val sbvOpt = module.scalaModuleInfo.map(_.scalaBinaryVersion)
val userDefinedSchemes: Map[(String, String), String] = Map(schemes flatMap { s =>
val organization = s.organization
@ -123,7 +124,7 @@ object EvictionError {
else assumedVersionSchemeJava
if (hasIncompatibleVersionForScheme(assumedScheme))
assumedIncompatEvictions += (p -> assumedScheme)
assumedIncompatibleEvictions += (p -> assumedScheme)
}
case _ => ()
@ -131,7 +132,7 @@ object EvictionError {
new EvictionError(
incompatibleEvictions.toList,
assumedIncompatEvictions.toList,
assumedIncompatibleEvictions.toList,
)
}

View File

@ -68,7 +68,7 @@ trait PublisherInterface {
}
/**
* Decribes the representation of a module, including its dependencies
* Describes the representation of a module, including its dependencies
* and the version of Scala it uses, if any.
*/
trait ModuleDescriptor {

View File

@ -14,7 +14,7 @@ class SbtCoursierCache {
private val resolutionsCache =
new ConcurrentHashMap[ResolutionKey, Map[Configuration, Resolution]]
// these may actually not need to be cached any more, now that the resolutions
// these may actually not need to be cached anymore, now that the resolutions
// are cached
private val reportsCache = new ConcurrentHashMap[ReportKey, UpdateReport]

View File

@ -41,11 +41,11 @@ object ReplaceMavenConfigurationMappings {
def addMappingConfs(dd: DefaultDependencyDescriptor, isOptional: Boolean): Unit = {
if (isOptional) {
dd.addDependencyConfiguration("optional", "compile(*)")
// FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there.
// FIX - Here we take a more conservative approach of depending on the compile configuration if master isn't there.
dd.addDependencyConfiguration("optional", "master(compile)")
} else {
dd.addDependencyConfiguration("compile", "compile(*)")
// FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there.
// FIX - Here we take a more conservative approach of depending on the compile configuration if master isn't there.
dd.addDependencyConfiguration("compile", "master(compile)")
dd.addDependencyConfiguration("runtime", "runtime(*)")
}
@ -60,13 +60,13 @@ object ReplaceMavenConfigurationMappings {
dd.addDependencyConfiguration("optional", "compile(*)")
dd.addDependencyConfiguration("optional", "provided(*)")
dd.addDependencyConfiguration("optional", "runtime(*)")
// FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there.
// FIX - Here we take a more conservative approach of depending on the compile configuration if master isn't there.
dd.addDependencyConfiguration("optional", "master(compile)")
} else {
dd.addDependencyConfiguration("provided", "compile(*)")
dd.addDependencyConfiguration("provided", "provided(*)")
dd.addDependencyConfiguration("provided", "runtime(*)")
// FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there.
// FIX - Here we take a more conservative approach of depending on the compile configuration if master isn't there.
dd.addDependencyConfiguration("provided", "master(compile)")
}
}
@ -80,12 +80,12 @@ object ReplaceMavenConfigurationMappings {
if (isOptional) {
dd.addDependencyConfiguration("optional", "compile(*)")
dd.addDependencyConfiguration("optional", "provided(*)")
// FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there.
// FIX - Here we take a more conservative approach of depending on the compile configuration if master isn't there.
dd.addDependencyConfiguration("optional", "master(compile)")
} else {
dd.addDependencyConfiguration("runtime", "compile(*)")
dd.addDependencyConfiguration("runtime", "runtime(*)")
// FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there.
// FIX - Here we take a more conservative approach of depending on the compile configuration if master isn't there.
dd.addDependencyConfiguration("runtime", "master(compile)")
}
}
@ -97,7 +97,7 @@ object ReplaceMavenConfigurationMappings {
new PomModuleDescriptorBuilder.ConfMapper {
def addMappingConfs(dd: DefaultDependencyDescriptor, isOptional: Boolean): Unit = {
dd.addDependencyConfiguration("test", "runtime(*)")
// FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there.
// FIX - Here we take a more conservative approach of depending on the compile configuration if master isn't there.
dd.addDependencyConfiguration("test", "master(compile)")
}
}
@ -107,7 +107,7 @@ object ReplaceMavenConfigurationMappings {
"system",
new PomModuleDescriptorBuilder.ConfMapper {
def addMappingConfs(dd: DefaultDependencyDescriptor, isOptional: Boolean): Unit = {
// FIX - Here we take a mroe conservative approach of depending on the compile configuration if master isn't there.
// FIX - Here we take a more conservative approach of depending on the compile configuration if master isn't there.
dd.addDependencyConfiguration("system", "master(compile)")
}
}

View File

@ -77,7 +77,7 @@ final class IvySbt(
Message.setDefaultLogger(originalLogger)
}
}
// Ivy is not thread-safe nor can the cache be used concurrently.
// Ivy is neither thread-safe nor can the cache be used concurrently.
// If provided a GlobalLock, we can use that to ensure safe access to the cache.
// Otherwise, we can at least synchronize within the JVM.
// For thread-safety in particular, Ivy uses a static DocumentBuilder, which is not thread-safe.

View File

@ -341,8 +341,8 @@ object IvyActions {
}
/**
* Resolves and retrieves a module with a cache mechanism defined
* <a href="http://www.scala-sbt.org/0.13/docs/Cached-Resolution.html">here</a>.
* Resolves and retrieves a module with a cache mechanism defined in
* <a href="https://www.scala-sbt.org/1.x/docs/Cached-Resolution.html">sbt Cached Resolution</a>.
*
* It's the cached version of [[resolveAndRetrieve]].
*

View File

@ -56,7 +56,7 @@ class ProjectResolver(name: String, map: Map[ModuleRevisionId, ModuleDescriptor]
r
}
// this resolver nevers locates artifacts, only resolves dependencies
// this resolver never locates artifacts, only resolves dependencies
def exists(artifact: IArtifact) = false
def locate(artifact: IArtifact) = null
def download(artifacts: Array[IArtifact], options: DownloadOptions): DownloadReport = {

View File

@ -135,7 +135,7 @@ private[sbt] final case class MergedDescriptors(a: DependencyDescriptor, b: Depe
)
addConfigurations(dd, ArraySeq.unsafeWrapArray(a.getModuleConfigurations))
// If the dependency descriptor is empty, then it means that it has been created from a POM file. In this case,
// it is correct to create a seemingly non-existent dependency artifact.
// it is correct to create a seemingly nonexistent dependency artifact.
if (a.getAllDependencyArtifacts.isEmpty) Array(dd)
else a.getAllDependencyArtifacts filter (_ == dd)
}

View File

@ -406,7 +406,7 @@ private[sbt] case class SbtChainResolver(
}
}
/** Ported from BasicResolver#findFirstAirfactRef. */
/** Ported from BasicResolver#findFirstArtifactRef. */
private def findFirstArtifactRef(
md: ModuleDescriptor,
data: ResolveData,

View File

@ -51,7 +51,7 @@ object PomExtraDependencyAttributes {
* Mutates the to collection with the extra dependency attributes from the incoming pom properties list.
*
* @param from The properties directly off a maven POM file
* @param to The aaether properties where we can write whatever we want.
* @param to The aether properties where we can write whatever we want.
*
* TODO - maybe we can just parse this directly here. Note the `readFromAether` method uses
* whatever we set here.

View File

@ -45,7 +45,7 @@ object ManagedChecksumsSpec extends BaseIvySpecification {
assert(shaFile.exists(), s"The checksum $Checksum for $file does not exist")
}
test("Managed checksums should should download the checksum files") {
test("Managed checksums should download the checksum files") {
cleanAll()
val updateOptions = UpdateOptions()
val toResolve = module(defaultModuleId, dependencies, None, updateOptions)

View File

@ -44,7 +44,7 @@ object PlatformResolutionSpec extends BaseIvySpecification {
)
}
test("None platform can specify .platform(sjs1) depenency") {
test("None platform can specify .platform(sjs1) dependency") {
cleanCache()
val m = module(
exampleModuleId("0.6.0"),
@ -60,7 +60,7 @@ object PlatformResolutionSpec extends BaseIvySpecification {
)
}
test("sjs1 platform can specify .platform(jvm) depenency") {
test("sjs1 platform can specify .platform(jvm) dependency") {
cleanCache()
val m = module(
exampleModuleId("0.6.0"),

View File

@ -25,7 +25,7 @@ abstract class ResolutionSpec extends AbstractEngineSpec {
assert(report.configurations.size == 3)
}
test("Resolving the unsolvable module should should not work") {
test("Resolving the unsolvable module should not work") {
// log.setLevel(Level.Debug)
val m = module(
exampleModuleId("0.2.0"),

View File

@ -41,10 +41,10 @@ object DotGraph {
val mappings =
for {
(dependsOn, dependants) <- mappedGraph.toSeq
dependant <- dependants
if dependant != dependsOn && !dependsOn.isEmpty && !dependant.isEmpty
} yield "\"" + dependant + "\" -> \"" + dependsOn + "\""
(dependsOn, dependents) <- mappedGraph.toSeq
dependent <- dependents
if dependent != dependsOn && !dependsOn.isEmpty && !dependent.isEmpty
} yield "\"" + dependent + "\" -> \"" + dependsOn + "\""
val lines =
("digraph " + graphName + " {") +:

View File

@ -77,7 +77,7 @@ object Mapper:
* }}}
*
* @param baseDirectory The directory that should be turned into a mappings sequence.
* @return mappings - The `basicDirectory`'s contents exlcuding `basicDirectory` itself
* @return mappings - The `basicDirectory`'s contents excluding `basicDirectory` itself
*/
def contentOf(baseDirectory: File)(using conv: FileConverter): Seq[(VirtualFile, String)] =
(PathFinder(baseDirectory).allPaths --- PathFinder(baseDirectory))

View File

@ -40,8 +40,6 @@ import xsbti.{ FileConverter, HashedVirtualFileRef, VirtualFile, VirtualFileRef
object Pkg:
def JarManifest(m: Manifest) = PackageOption.JarManifest(m)
def MainClass(mainClassName: String) = PackageOption.MainClass(mainClassName)
def MainfestAttributes(attributes: (Attributes.Name, String)*) =
PackageOption.ManifestAttributes(attributes*)
def ManifestAttributes(attributes: (String, String)*) = {
val converted = for ((name, value) <- attributes) yield (new Attributes.Name(name), value)
PackageOption.ManifestAttributes(converted*)

View File

@ -542,8 +542,8 @@ object Tests {
sequence(tl, out :: acc)
}
}
sequence(results.toList, List()) map { ress =>
val (rs, ms) = ress.unzip { e =>
sequence(results.toList, List()) map { res =>
val (rs, ms) = res.unzip { e =>
(e.overall, e.events)
}
val m = ms reduce { (m1: Map[String, SuiteResult], m2: Map[String, SuiteResult]) =>

View File

@ -195,7 +195,7 @@ object BasicCommands {
val multiCmdParser: Parser[String] = token(';') ~> OptSpace ~> cmdParser
/*
* We accept empty commands at the end of the the list as an implementation detail that allows
* We accept empty commands at the end of the list as an implementation detail that allows
* for a trailing semi-colon without an extra parser since the cmdParser accepts an empty string
* and the multi parser is `token(';') ~ cmdParser`. We do not want to accept empty commands
* that occur in the middle of the sequence so if we find one, we return a failed parser. If

View File

@ -502,7 +502,7 @@ class NetworkClient(
}
/** Called on the response for a returning message. */
def onReturningReponse(msg: JsonRpcResponseMessage): Unit = {
def onReturningResponse(msg: JsonRpcResponseMessage): Unit = {
def printResponse(): Unit = {
msg.result match {
case Some(result) =>

View File

@ -31,7 +31,7 @@ object ServerHandler {
lazy val fallback: ServerHandler = ServerHandler({ handler =>
ServerIntent(
onRequest = { case x => handler.log.debug(s"Unhandled request received: ${x.method}: $x") },
onResponse = { case x => handler.log.debug(s"Unhandled responce received") },
onResponse = { case x => handler.log.debug(s"Unhandled response received") },
onNotification = { case x =>
handler.log.debug(s"Unhandled notification received: ${x.method}: $x")
},

View File

@ -118,7 +118,7 @@ object InputWrapper:
// untyped trees under typed trees, as the type checker doesn't descend if `tree.tpe == null`.
//
// #1031 The previous attempt to fix this just set the type on `tree`, which worked in cases when the
// call to `.value` was inside a the task macro and eliminated before the end of the typer phase.
// call to `.value` was inside a task macro and eliminated before the end of the typer phase.
// But, if a "naked" call to `.value` left the typer, the superaccessors phase would freak out when
// if hit the untyped trees, before we could get to refchecks and the desired @compileTimeOnly warning.
val typedTree = c.typecheck(tree)
@ -133,7 +133,7 @@ object InputWrapper:
c.abort(
pos,
"""`value` is removed from input tasks. Use `evaluated` or `inputTaskValue`.
|See https://www.scala-sbt.org/1.0/docs/Input-Tasks.html for more details.""".stripMargin
|See https://www.scala-sbt.org/1.x/docs/Input-Tasks.html for more details.""".stripMargin
)
}
InputWrapper.wrapInit[A1](c)(ts, pos)

View File

@ -2239,7 +2239,7 @@ object Defaults extends BuildCommon {
val store = analysisStore(compileAnalysisFile)
val contents = store.unsafeGet()
if (exportP) {
// this stores the eary analysis (again) in case the subproject contains a macro
// this stores the early analysis (again) in case the subproject contains a macro
setup.earlyAnalysisStore.toOption map { earlyStore =>
earlyStore.set(contents)
}
@ -2822,7 +2822,7 @@ object Classpaths {
val vf = converter.toVirtualFile(p)
FileStamp(stamper.library(vf)).map(p -> _)
},
// Note: invoking this task from shell would block indefinately because it will
// Note: invoking this task from shell would block indefinitely because it will
// wait for the upstream compilation to start.
dependencyPicklePath := {
// This is a conditional task. Do not refactor.
@ -3498,17 +3498,17 @@ object Classpaths {
}
)
def warnResolversConflict(ress: Seq[Resolver], log: Logger): Unit = {
val resset = ress.toSet
for ((name, r) <- resset groupBy (_.name) if r.size > 1) {
def warnResolversConflict(resolverList: Seq[Resolver], log: Logger): Unit = {
val resolverSet = resolverList.toSet
for ((name, r) <- resolverSet groupBy (_.name) if r.size > 1) {
log.warn(
"Multiple resolvers having different access mechanism configured with same name '" + name + "'. To avoid conflict, Remove duplicate project resolvers (`resolvers`) or rename publishing resolver (`publishTo`)."
)
}
}
private[sbt] def errorInsecureProtocol(ress: Seq[Resolver], log: Logger): Unit = {
val bad = !ress.forall(!_.validateProtocol(log))
private[sbt] def errorInsecureProtocol(resolverList: Seq[Resolver], log: Logger): Unit = {
val bad = !resolverList.forall(!_.validateProtocol(log))
if (bad) {
sys.error("insecure protocol is unsupported")
}
@ -3717,7 +3717,7 @@ object Classpaths {
val pluginClasspath = unit.plugins.pluginData.dependencyClasspath.toVector
// Exclude directories: an approximation to whether they've been published
// Note: it might be a redundant legacy from sbt 0.13/1.x times where the classpath contained directories
// but it's left jsut in case
// but it's left just in case
val pluginJars = pluginClasspath.filter: x =>
!Files.isDirectory(converter.toPath(x.data))
val pluginIDs: Vector[ModuleID] = pluginJars.flatMap(_.get(moduleIDStr).map: str =>

View File

@ -140,7 +140,8 @@ private[sbt] object TemplateCommandUtil {
private def getInterfaceClass(name: String, loader: ClassLoader) =
Class.forName(name, true, loader)
// Cache files under ~/.sbt/0.13/templates/org_name_version
// sbt_version is typically 0.13 or 1.0
// Cache files under ~/.sbt/sbt_version/templates/org_name_version
private def classpathForInfo(
info: TemplateResolverInfo,
ivyConf: IvyConfiguration,

View File

@ -52,7 +52,7 @@ trait Terminal {
/**
* Sets the mode of the terminal. By default,the terminal will be in canonical mode
* with echo enabled. This means that the terminal's inputStream will not return any
* bytes until a newline is received and that all of the characters inputed by the
* bytes until a newline is received and that all of the characters inputted by the
* user will be echoed to the terminal's output stream.
*
* @param canonical toggles whether or not the terminal input stream is line buffered

View File

@ -382,7 +382,7 @@ $SwitchCommand [<scala-version>=]<scala-home>[!] [-v] [<command>]
<scala-version> may be an actual Scala version such as 3.1.3, or a Semantic Version selector
pattern such as 2.13.x. Only subprojects that are listed to match the version pattern
have their Scala version switched. If ! is supplied, then all projects projects have
have their Scala version switched. If ! is supplied, then all projects have
their Scala version switched.
If -v is supplied, verbose logging of the Scala version switching is done.

View File

@ -197,7 +197,7 @@ class ClassStamper(
relations
.products(sourceFile)
.map(stampVf)
// TODO: substitue the above with
// TODO: substitute the above with
// val classDigests = relations.productClassName
// .reverse(className)
// .flatMap: prodClassName =>

View File

@ -1462,7 +1462,7 @@ private[sbt] object Load {
/**
* Creates a classloader with a hierarchical structure, where the parent
* classloads the dependency classpath and the return classloader classloads
* class loads the dependency classpath and the returned classloader class loads
* the definition classpath.
*
* @param config The configuration for the whole sbt build.

View File

@ -57,7 +57,7 @@ object DOT:
.map(m => Edge(m.id, m.id.copy(version = m.evictedByVersion.get)))
// remove edges to new evicted-by module which is now replaced by a chain
// dependend -> [evicted] -> dependee
// dependent -> [evicted] -> dependee
val evictionTargetEdges =
graph.edges.collect {
case edge @ (from, evicted) if targetWasEvicted(edge) =>

View File

@ -173,10 +173,10 @@ object FileStamp {
case Some(js) =>
unbuilder.beginObject(js)
val hashes = unbuilder.readField("hashes")(using seqPathHashJsonFormatter)
val lastModifieds =
val lastModifiedTimes =
unbuilder.readField("lastModifiedTimes")(using seqPathLastModifiedJsonFormatter)
unbuilder.endObject()
hashes ++ lastModifieds
hashes ++ lastModifiedTimes
case None =>
deserializationError("Expected JsObject but found None")
}

View File

@ -160,7 +160,7 @@
### Configurable Scala compiler bridge
sbt 0.13.11 adds `scalaCompilerBridgeSource` setting to specify the compiler brigde source. This allows different implementation of the bridge for Scala versions, and also allows future versions of Scala compiler implementation to diverge. The source module will be retrieved using library management configured by `bootIvyConfiguration` task.
sbt 0.13.11 adds `scalaCompilerBridgeSource` setting to specify the compiler bridge source. This allows different implementation of the bridge for Scala versions, and also allows future versions of Scala compiler implementation to diverge. The source module will be retrieved using library management configured by `bootIvyConfiguration` task.
[#2106][2106]/[#2197][2197]/[#2336][2336] by [@Duhemm][@Duhemm]

View File

@ -173,7 +173,7 @@ To display all eviction warnings with caller information, run `evicted` task.
### Latest SNAPSHOTs
sbt 0.13.6 adds a new setting key called `updateOptions` for customizing the details of managed dependency resolution with `update` task. One of its flags is called `lastestSnapshots`, which controls the behavior of the chained resolver. Up until 0.13.6, sbt was picking the first `-SNAPSHOT` revision it found along the chain. When `latestSnapshots` is enabled (default: `true`), it will look into all resolvers on the chain, and compare them using the publish date.
sbt 0.13.6 adds a new setting key called `updateOptions` for customizing the details of managed dependency resolution with `update` task. One of its flags is called `latestSnapshots`, which controls the behavior of the chained resolver. Up until 0.13.6, sbt was picking the first `-SNAPSHOT` revision it found along the chain. When `latestSnapshots` is enabled (default: `true`), it will look into all resolvers on the chain, and compare them using the publish date.
The tradeoff is probably a longer resolution time if you have many remote repositories on the build or you live away from the servers. So here's how to disable it:

View File

@ -31,7 +31,7 @@
[1602]: https://github.com/sbt/sbt/pull/1602
[1606]: https://github.com/sbt/sbt/issues/1606
[1607]: https://github.com/sbt/sbt/pull/1607
[1611]: Https://github.com/sbt/sbt/issues/1611
[1611]: https://github.com/sbt/sbt/issues/1611
[1618]: https://github.com/sbt/sbt/pull/1618
[1621]: https://github.com/sbt/sbt/pull/1621
[1631]: https://github.com/sbt/sbt/pull/1631

View File

@ -69,7 +69,7 @@ See [Migrating from sbt 0.13.x][Migrating-from-sbt-013x] also.
- Add logging of the name of the different `build.sbt` (matching `*.sbt`) files used. [#1911][1911] by [@valydia][@valydia]
- Add the ability to call `aggregate` for the current project inside a build sbt file. By [@xuwei-k][@xuwei-k]
- Add new global setting `asciiGraphWidth` that controls the maximum width of the ASCII graphs printed by commands like `inspect tree`. Default value corresponds to the previously hardcoded value of 40 characters. By [@RomanIakovlev][@RomanIakovlev].
- Revamped documentation for [Scopes](www.scala-sbt.org/0.13/docs/Scopes.html), and added [Scope Delegation](www.scala-sbt.org/0.13/docs/Scope-Delegation.html). [@eed3si9n][@eed3si9n]
- Revamped documentation for [Scopes](https://www.scala-sbt.org/0.13/docs/Scopes.html), and added [Scope Delegation](https://www.scala-sbt.org/0.13/docs/Scope-Delegation.html). [@eed3si9n][@eed3si9n]
- Adds support for cross-versioned exclusions. [#1518][1518]/[lm#88][lm88] by [@jvican][@jvican]
- Adds new offline mode to the Ivy-based library management. [lm#92][lm92] by [@jvican][@jvican]
- A number of features related to dependency locking. See below.
@ -96,7 +96,7 @@ See [Migrating from sbt 0.13.x][Migrating-from-sbt-013x] also.
A major improvement brought into Zinc 1.0 by Grzegorz Kossakowski (commissioned by Lightbend) is class-based name hashing, which will speed up the incremental compilation of Scala in large projects.
Zinc 1.0's name hashing tracks your code dependendencies at the class level, instead of at the source file level. The GitHub issue [sbt/sbt#1104](https://github.com/sbt/sbt/issues/1104) lists some comparisons of adding a method to an existing class in some projects:
Zinc 1.0's name hashing tracks your code dependencies at the class level, instead of at the source file level. The GitHub issue [sbt/sbt#1104](https://github.com/sbt/sbt/issues/1104) lists some comparisons of adding a method to an existing class in some projects:
```
ScalaTest AndHaveWord class: Before 49s, After 4s (12x)
@ -153,7 +153,7 @@ The static validation also catches if you forget to call `.value` in a body of a
#### Eviction warning presentation
sbt 1.0 improves the eviction warning presetation.
sbt 1.0 improves the eviction warning presentation.
Before:
@ -240,7 +240,7 @@ and [lm#104][lm104] by [@eed3si9n][@eed3si9n].
#### Binary format for Zinc's internal storage
Jorge ([@jvican][@jvican]) from Scala Center contributed a binary format for Zinc's internal storage using Google Procol Buffer.
Jorge ([@jvican][@jvican]) from Scala Center contributed a binary format for Zinc's internal storage using Google Protocol Buffer.
The new format provides us with three main advantages:
1. Backwards and forwards binary compatibility at the analysis format level.

View File

@ -5,7 +5,7 @@ This is a hotfix release for sbt 1.0.x series.
- Fixes undercompilation of value classes when the underlying type changes. [zinc#444][zinc444] by [@smarter][@smarter]
- Fixes `ArrayIndexOutOfBoundsException` on Ivy when running on Java 9. [ivy#27][ivy27] by [@xuwei-k][@xuwei-k]
- Fixes Java 9 warning by upgrading to launcher 1.0.2. [ivy#26][ivy26]/[launcher#45][launcher45] by [@dwijnand][@dwijnand]
- Fixes `run` outputing debug level logs. [#3655][3655]/[#3717][3717] by [@cunei][@cunei]
- Fixes `run` outputting debug level logs. [#3655][3655]/[#3717][3717] by [@cunei][@cunei]
- Fixes performance regression caused by classpath hashing. [zinc#452][zinc452] by [@jvican][@jvican]
- Fixes performance regression of `testQuick`. [#3680][3680]/[#3720][3720] by [@OlegYch][@OlegYch]
- Disables Ivy log4j caller location calculation for performance regression reported in [#3711][3711]. [util#132][util132] by [@leonardehrenfried][@leonardehrenfried]

View File

@ -32,7 +32,7 @@
- Preserves JAR order in `ScalaInstance.otherJars`. [zinc#411][zinc411] by [@dwijnand][@dwijnand]
- Fixes used name when it contains NL. [zinc#449][zinc449] by [@jilen][@jilen]
- Fixes handling of `ThisProject`. [#3609][3609] by [@dwijnand][@dwijnand]
- Escapes imports from sbt files, so if user creates a backquoted definition then task evalution will not fail. [#3635][3635] by [@panaeon][@panaeon]
- Escapes imports from sbt files, so if user creates a backquoted definition then task evaluation will not fail. [#3635][3635] by [@panaeon][@panaeon]
- Removes reference to version 0.14.0 from a warning message. [#3693][3693] by [@saniyatech][@saniyatech]
- Fixes screpl throwing "Not a valid key: console-quick". [#3762][3762] by [@xuwei-k][@xuwei-k]

2
sbt
View File

@ -132,7 +132,7 @@ download_url () {
elif command -v wget > /dev/null; then
wget --quiet -O "$jar" "$url"
else
echoerr "failed to download $url: Neither curl nor wget is avaialble"
echoerr "failed to download $url: Neither curl nor wget is available"
exit 2
fi
} && [[ -f "$jar" ]]

View File

@ -17,7 +17,7 @@
## test + with command or alias
> clean
## for command cross building you do need crossScalaVerions on root
## for command cross building you do need crossScalaVersions on root
> set root/crossScalaVersions := Seq("2.12.20", "2.13.12")
> + build
$ exists target/out/jvm/scala-2.12.20/foo

View File

@ -1,6 +1,6 @@
import complete.Parser
// https://www.scala-sbt.org/0.13/docs/Input-Tasks.html
// https://www.scala-sbt.org/1.x/docs/Input-Tasks.html
val runFoo = inputKey[Unit]("Runs Foo with passed arguments")
val check = taskKey[Unit]("")

View File

@ -1,6 +1,6 @@
import complete.Parser
// https://www.scala-sbt.org/0.13/docs/Input-Tasks.html
// https://www.scala-sbt.org/1.x/docs/Input-Tasks.html
val run2 = inputKey[Unit](
"Runs the main class twice with different argument lists separated by --")

View File

@ -6,7 +6,7 @@ Global / localCacheDirectory := baseDirectory.value / "diskcache"
aa := A()
// This tests that aa is opt'ed out from caching
// This tests that aa is opted out from caching
map1 := (Def.cachedTask {
aa.value
val output1 = StringVirtualFile1("target/out/b1.txt", "foo")

View File

@ -3,10 +3,10 @@ import scala.collection.mutable.ListBuffer
ThisBuild / scalaVersion := "2.9.2"
ThisBuild / version := "0.1-SNAPSHOT"
lazy val justATransiviteDependencyEndpointProject = project
lazy val justATransitiveDependencyEndpointProject = project
lazy val justATransitiveDependencyProject = project
.dependsOn(justATransiviteDependencyEndpointProject)
.dependsOn(justATransitiveDependencyEndpointProject)
lazy val justADependencyProject = project
@ -23,9 +23,9 @@ lazy val test_project = project
| ]
| "justadependencyproject:justadependencyproject_2.9.2:0.1-SNAPSHOT"[shape=box label=<justadependencyproject<BR/><B>justadependencyproject_2.9.2</B><BR/>0.1-SNAPSHOT> style="" penwidth="5" color="#B6E316"]
| "justatransitivedependencyproject:justatransitivedependencyproject_2.9.2:0.1-SNAPSHOT"[shape=box label=<justatransitivedependencyproject<BR/><B>justatransitivedependencyproject_2.9.2</B><BR/>0.1-SNAPSHOT> style="" penwidth="5" color="#0E92BE"]
| "justatransivitedependencyendpointproject:justatransivitedependencyendpointproject_2.9.2:0.1-SNAPSHOT"[shape=box label=<justatransivitedependencyendpointproject<BR/><B>justatransivitedependencyendpointproject_2.9.2</B><BR/>0.1-SNAPSHOT> style="" penwidth="5" color="#9EAD1B"]
| "justatransitivedependencyendpointproject:justatransitivedependencyendpointproject_2.9.2:0.1-SNAPSHOT"[shape=box label=<justatransitivedependencyendpointproject<BR/><B>justatransitivedependencyendpointproject_2.9.2</B><BR/>0.1-SNAPSHOT> style="" penwidth="5" color="#9EAD1B"]
| "test_project:test_project_2.9.2:0.1-SNAPSHOT"[shape=box label=<test_project<BR/><B>test_project_2.9.2</B><BR/>0.1-SNAPSHOT> style="" penwidth="5" color="#C37661"]
| "justatransitivedependencyproject:justatransitivedependencyproject_2.9.2:0.1-SNAPSHOT" -> "justatransivitedependencyendpointproject:justatransivitedependencyendpointproject_2.9.2:0.1-SNAPSHOT"
| "justatransitivedependencyproject:justatransitivedependencyproject_2.9.2:0.1-SNAPSHOT" -> "justatransitivedependencyendpointproject:justatransitivedependencyendpointproject_2.9.2:0.1-SNAPSHOT"
| "test_project:test_project_2.9.2:0.1-SNAPSHOT" -> "justadependencyproject:justadependencyproject_2.9.2:0.1-SNAPSHOT"
| "test_project:test_project_2.9.2:0.1-SNAPSHOT" -> "justatransitivedependencyproject:justatransitivedependencyproject_2.9.2:0.1-SNAPSHOT"
|}

View File

@ -3,10 +3,10 @@ import scala.collection.mutable.ListBuffer
ThisBuild / scalaVersion := "2.9.2"
ThisBuild / version := "0.1-SNAPSHOT"
lazy val justATransiviteDependencyEndpointProject = project
lazy val justATransitiveDependencyEndpointProject = project
lazy val justATransitiveDependencyProject = project
.dependsOn(justATransiviteDependencyEndpointProject)
.dependsOn(justATransitiveDependencyEndpointProject)
lazy val justADependencyProject = project

View File

@ -4,7 +4,7 @@ scalaVersion := "2.13.10"
platform := Platform.sjs1
// By default platformOpt field is set to None
// Given %% lm engines will sustitute it with the subproject's platform suffix on `update`
// Given %% lm engines will substitute it with the subproject's platform suffix on `update`
libraryDependencies ++= Seq(
"com.github.scopt" %% "scopt" % "4.1.0",
"junit" % "junit" % "4.13.1",

View File

@ -7,7 +7,7 @@
// Depending on the version of sbt-plugin-example-diamond, we test different patterns
// of dependencies:
// * Some dependencies were published using the deprecated Maven paths, some with the new
// * Wheter the dependency on sbt-plugin-example-bottom needs conflict resolution or not
// * Whether the dependency on sbt-plugin-example-bottom needs conflict resolution or not
inThisBuild(
Seq(

View File

@ -1,4 +1,4 @@
// The library surves two purposes:
// The library serves two purposes:
// 1. add some non-standard library to the meta-build classpath to later check that it's included into updateSbtClassifiers
// 2. use assertions from junit in custom assertion in `build.sbt` of current scripted test
libraryDependencies += "junit" % "junit" % "4.13.2"

View File

@ -9,13 +9,13 @@ case object MyPlugin extends AutoPlugin {
}
import autoImport._
override def projectSettings: Seq[Def.Setting[_]] = Seq(
// should not produce a "@nowarn annotation does not suppres any warnings" warning
// should not produce a "@nowarn annotation does not suppress any warnings" warning
helloWorld := {
streams.value.log("Hello world")
"Hello world"
},
Compile / compile := {
helloWorld.value // shoult not produce "a pure expression does nothing" warning
helloWorld.value // should not produce "a pure expression does nothing" warning
(Compile / compile).value
}
)

View File

@ -23,6 +23,6 @@ def check(expectation: Boolean) = Def.task[Unit] {
else if (!expectation && contains)
sys.error(s"compiler output still contains warning")
else {
IO.write(lastLog, "") // clear the backing log for for 'last'.
IO.write(lastLog, "") // clear the backing log for 'last'.
}
}

View File

@ -1,4 +1,4 @@
val scalcheck = "org.scalacheck" %% "scalacheck" % "1.14.0"
val scalacheck = "org.scalacheck" %% "scalacheck" % "1.14.0"
ThisBuild / scalaVersion := "2.12.20"
@ -6,5 +6,5 @@ lazy val root = (project in file("."))
.settings(
name := "forked-test",
organization := "org.example",
libraryDependencies += scalcheck % Test
libraryDependencies += scalacheck % Test
)

View File

@ -8,9 +8,9 @@ object HelloMacro {
annottees match {
case (classDecl: ClassDef) :: Nil =>
val q"$mods class $name[..$tparams] $ctorMods(...$paramss) extends { ..$earlydefns } with ..$bases { $self => ..$body }" = classDecl
val q"$mods class $name[..$tparams] $ctorMods(...$params) extends { ..$earlydefns } with ..$bases { $self => ..$body }" = classDecl
q"""
case class $name(...$paramss) extends ..$bases {
case class $name(...$params) extends ..$bases {
..$body
def hello = "Hello"
}

View File

@ -1,8 +1,8 @@
val scalcheck = "org.scalacheck" %% "scalacheck" % "1.14.0"
val scalacheck = "org.scalacheck" %% "scalacheck" % "1.14.0"
ThisBuild / scalaVersion := "2.12.20"
ThisBuild / version := "0.0.1"
ThisBuild / organization := "org.catastrophe"
libraryDependencies += scalcheck % Test
libraryDependencies += scalacheck % Test
name := "broken"

View File

@ -1,5 +1,5 @@
val scalcheck = "org.scalacheck" %% "scalacheck" % "1.14.0"
val scalacheck = "org.scalacheck" %% "scalacheck" % "1.14.0"
ThisBuild / scalaVersion := "2.12.20"
Test / parallelExecution := false
libraryDependencies += scalcheck % Test
libraryDependencies += scalacheck % Test

View File

@ -13,10 +13,10 @@ class BadTest {
// * Try to load that same something from the THREAD CONTEXT classloader.
// * Ensure we can do both, i.e. the second used to be filtered and broken.
val system = ActorSystem()
def evilGetThreadExectionContextName =
def evilGetThreadExecutionContextName =
system.asInstanceOf[ActorSystemImpl].internalCallingThreadExecutionContext.getClass.getName
Await.result(system.terminate(), 5.seconds)
val expected = "scala.concurrent.Future$InternalCallbackExecutor$"
Assert.assertEquals("Failed to grab appropriate Akka name", expected, evilGetThreadExectionContextName)
Assert.assertEquals("Failed to grab appropriate Akka name", expected, evilGetThreadExecutionContextName)
}
}

View File

@ -56,7 +56,7 @@ object Build {
Test / checkTriggers := {
val testTriggers = triggers((Test / test / transitiveDynamicInputs).value).toSet
// This validates that since the "test.txt" trigger is only added to the Test / test task,
// that the Test / compile does not pick it up. Both of them pick up the the triggers that
// that the Test / compile does not pick it up. Both of them pick up the triggers that
// are found in the test above for the compile configuration because of the transitive
// classpath dependency that is added in Defaults.internalDependencies.
val compileTriggers = triggers((Test / compile / transitiveDynamicInputs).value).toSet

View File

@ -6,7 +6,7 @@
<title>@title</title>
</head>
<body>
<h1>@tilte</h1>
<h1>@title_</h1>
@for(paragraph <- paragraphs) {
<p>@paragraph</p>
}

View File

@ -641,7 +641,7 @@ class BuildServerTest extends AbstractServerTest {
"build/publishDiagnostics",
"main.scala.html",
""""severity":1""",
"not found: value tilte"
"not found: value title_"
)(message = "should report diagnostic in Twirl file")
IO.write(
testFile,

View File

@ -44,7 +44,7 @@ trait ConcurrentRestrictions {
def valid(g: G): Boolean
}
private[sbt] sealed trait CancelSentiels {
private[sbt] sealed trait CancelSentinels {
def cancelSentinels(): Unit
}
@ -60,8 +60,8 @@ object ConcurrentRestrictions {
}
private[sbt] def cancelAllSentinels() = completionServices.keySet.asScala.toVector.foreach {
case a: CancelSentiels => a.cancelSentinels()
case _ =>
case a: CancelSentinels => a.cancelSentinels()
case _ =>
}
/**
@ -213,12 +213,12 @@ object ConcurrentRestrictions {
tags: ConcurrentRestrictions,
warn: String => Unit,
isSentinel: TaskId[?] => Boolean,
): CompletionService & CancelSentiels & AutoCloseable = {
): CompletionService & CancelSentinels & AutoCloseable = {
// Represents submitted work for a task.
final class Enqueue(val node: TaskId[?], val work: () => Completed)
new CompletionService with CancelSentiels with AutoCloseable {
new CompletionService with CancelSentinels with AutoCloseable {
completionServices.put(this, true)
private val closed = new AtomicBoolean(false)
override def close(): Unit = if (closed.compareAndSet(false, true)) {

View File

@ -534,10 +534,10 @@ trait Init:
val defs = addLocal(rawDefs)(using scopeLocal)
// group derived settings by the key they define
val derivsByDef = new mutable.HashMap[AttributeKey[?], Deriveds]
val derivedsByDef = new mutable.HashMap[AttributeKey[?], Deriveds]
for (s <- derived) {
val key = s.setting.key.key
derivsByDef.getOrElseUpdate(key, new Deriveds(key, new mutable.ListBuffer)).settings += s
derivedsByDef.getOrElseUpdate(key, new Deriveds(key, new mutable.ListBuffer)).settings += s
}
// index derived settings by triggering key. This maps a key to the list of settings potentially derived from it.

View File

@ -49,8 +49,7 @@ object SettingsTest extends Properties("settings") {
forAllNoShrink(Gen.choose(1, 100).label("numSettings")) { derivedSettings }
final def derivedSettings(nr: Int): Prop = {
val genScopedKeys = {
// We wan
// t to generate lists of keys that DO NOT inclue the "ch" key we use to check things.
// We want to generate lists of keys that DO NOT include the "ch" key we use to check things.
val attrKeys = mkAttrKeys[Int](nr).filter(_.forall(_.label != "ch"))
attrKeys map (_ map (ak => ScopedKey(Scope(0), ak)))
}.label("scopedKeys").filter(_.nonEmpty)

View File

@ -18,7 +18,7 @@ final class MissingScalaJar(msg: String, cause: Throwable) extends RuntimeExcept
object MissingScalaJar {
def missingTemplate(missing: String): String =
s"The $missing could not be found in your cache nor downloaded from the Internet."
s"The $missing could neither be found in your cache nor downloaded from the Internet."
def compiler: MissingScalaJar = new MissingScalaJar(missingTemplate("Scala compiler"))
def library: MissingScalaJar = new MissingScalaJar(missingTemplate("Scala library"))
}