mirror of https://github.com/sbt/sbt.git
Merge pull request #442 from adpi2/wip/sbt-2.x
[2.x] Merge develop into wip/sbt-2.x
This commit is contained in:
commit
f8021499c8
|
|
@ -14,6 +14,9 @@ jobs:
|
|||
- os: ubuntu-latest
|
||||
java: 11
|
||||
jobtype: 1
|
||||
- os: ubuntu-latest
|
||||
java: 17
|
||||
jobtype: 1
|
||||
runs-on: ${{ matrix.os }}
|
||||
env:
|
||||
# define Java options for both official sbt and sbt-extras
|
||||
|
|
@ -21,31 +24,15 @@ jobs:
|
|||
JVM_OPTS: -Xms2048M -Xmx2048M -Xss6M -XX:ReservedCodeCacheSize=256M
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v1
|
||||
- name: Setup
|
||||
uses: olafurpg/setup-scala@v10
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup JDK
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
java-version: "adopt@1.${{ matrix.java }}"
|
||||
- name: Coursier cache
|
||||
uses: coursier/cache-action@v5
|
||||
- name: Cache sbt
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: $HOME/.sbt
|
||||
key: ${{ runner.os }}-sbt-cache-${{ hashFiles('**/*.sbt') }}-${{ hashFiles('project/build.properties') }}
|
||||
- name: Build and test
|
||||
distribution: temurin
|
||||
java-version: "${{ matrix.java }}"
|
||||
cache: sbt
|
||||
- name: Build and test (1)
|
||||
if: ${{ matrix.jobtype == 1 }}
|
||||
shell: bash
|
||||
run: |
|
||||
case ${{ matrix.jobtype }} in
|
||||
1)
|
||||
sbt -v -Dfile.encoding=UTF8 scalafmtCheckAll whitesourceCheckPolicies +test +packagedArtifacts
|
||||
;;
|
||||
*)
|
||||
echo unknown jobtype
|
||||
exit 1
|
||||
esac
|
||||
rm -rf "$HOME/.ivy2/local"
|
||||
find $HOME/Library/Caches/Coursier/v1 -name "ivydata-*.properties" -delete || true
|
||||
find $HOME/.ivy2/cache -name "ivydata-*.properties" -delete || true
|
||||
find $HOME/.ivy2/cache -name "*-LM-SNAPSHOT*" -delete || true
|
||||
find $HOME/.cache/coursier/v1 -name "ivydata-*.properties" -delete || true
|
||||
find $HOME/.sbt -name "*.lock" -delete || true
|
||||
sbt -v -Dfile.encoding=UTF8 scalafmtCheckAll +test +packagedArtifacts
|
||||
|
|
|
|||
|
|
@ -0,0 +1,24 @@
|
|||
name: Scala CLA
|
||||
on: [pull_request]
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Check CLA
|
||||
env:
|
||||
AUTHOR: ${{ github.event.pull_request.user.login }}
|
||||
run: |
|
||||
echo "Pull request submitted by $AUTHOR";
|
||||
signed=$(curl -s "https://www.lightbend.com/contribute/cla/scala/check/$AUTHOR" | jq -r ".signed");
|
||||
if [ "$signed" = "true" ] ; then
|
||||
echo "CLA check for $AUTHOR successful";
|
||||
else
|
||||
echo "CLA check for $AUTHOR failed";
|
||||
echo "Please sign the Scala CLA to contribute to the Scala compiler.";
|
||||
echo "Go to https://www.lightbend.com/contribute/cla/scala and then";
|
||||
echo "comment on the pull request to ask for a new check.";
|
||||
echo "";
|
||||
echo "Check if CLA is signed: https://www.lightbend.com/contribute/cla/scala/check/$AUTHOR";
|
||||
exit 1;
|
||||
fi;
|
||||
|
|
@ -4,3 +4,9 @@ __pycache__
|
|||
|
||||
scripted-test/src/sbt-test/*/*/project/build.properties
|
||||
scripted-test/src/sbt-test/*/*/project/plugins.sbt
|
||||
|
||||
.idea
|
||||
.bloop
|
||||
.metals
|
||||
.bsp/
|
||||
metals.sbt
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
version = 3.2.1
|
||||
version = 3.7.4
|
||||
runner.dialect = scala3
|
||||
|
||||
maxColumn = 100
|
||||
|
|
|
|||
|
|
@ -0,0 +1,204 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright (c) 2023, Scala Center
|
||||
Copyright (c) 2011 - 2022, Lightbend, Inc.
|
||||
Copyright (c) 2008 - 2010, Mark Harrah
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
57
build.sbt
57
build.sbt
|
|
@ -3,10 +3,11 @@ import Path._
|
|||
import com.typesafe.tools.mima.core._, ProblemFilters._
|
||||
|
||||
val _ = {
|
||||
//https://github.com/sbt/contraband/issues/122
|
||||
// https://github.com/sbt/contraband/issues/122
|
||||
sys.props += ("line.separator" -> "\n")
|
||||
}
|
||||
|
||||
Global / semanticdbEnabled := !(Global / insideCI).value
|
||||
Global / semanticdbVersion := "4.7.8"
|
||||
ThisBuild / version := {
|
||||
val old = (ThisBuild / version).value
|
||||
nightlyVersion match {
|
||||
|
|
@ -43,22 +44,23 @@ def commonSettings: Seq[Setting[_]] = Def.settings(
|
|||
scalaVersion := scala3,
|
||||
// publishArtifact in packageDoc := false,
|
||||
resolvers += Resolver.typesafeIvyRepo("releases"),
|
||||
resolvers += Resolver.sonatypeRepo("snapshots"),
|
||||
resolvers ++= Resolver.sonatypeOssRepos("snapshots"),
|
||||
resolvers += Resolver.sbtPluginRepo("releases"),
|
||||
testFrameworks += new TestFramework("verify.runner.Framework"),
|
||||
// concurrentRestrictions in Global += Util.testExclusiveRestriction,
|
||||
testOptions += Tests.Argument(TestFrameworks.ScalaCheck, "-w", "1"),
|
||||
compile / javacOptions ++= Seq("-Xlint", "-Xlint:-serial"),
|
||||
crossScalaVersions := Seq(scala3),
|
||||
resolvers += Resolver.sonatypeRepo("public"),
|
||||
resolvers ++= Resolver.sonatypeOssRepos("public"),
|
||||
scalacOptions := {
|
||||
val old = scalacOptions.value
|
||||
scalaVersion.value match {
|
||||
case sv if sv.startsWith("2.10") =>
|
||||
old diff List("-Xfuture", "-Ywarn-unused", "-Ywarn-unused-import")
|
||||
case sv if sv.startsWith("2.11") => old ++ List("-Ywarn-unused", "-Ywarn-unused-import")
|
||||
case sv if sv.startsWith("2.12") =>
|
||||
old ++ List("-Ywarn-unused", "-Ywarn-unused-import", "-YdisableFlatCpCaching")
|
||||
old ++ List(
|
||||
"-Ywarn-unused",
|
||||
"-Ywarn-unused-import",
|
||||
"-Ywarn-unused:-nowarn",
|
||||
)
|
||||
case _ => old
|
||||
}
|
||||
},
|
||||
|
|
@ -88,11 +90,11 @@ val mimaSettings = Def settings (
|
|||
"1.3.0",
|
||||
"1.4.0",
|
||||
"1.5.0",
|
||||
) map (
|
||||
version =>
|
||||
organization.value %% moduleName.value % version
|
||||
cross (if (crossPaths.value) CrossVersion.binary else CrossVersion.disabled)
|
||||
),
|
||||
"1.6.0",
|
||||
) map (version =>
|
||||
organization.value %% moduleName.value % version
|
||||
cross (if (crossPaths.value) CrossVersion.binary else CrossVersion.disabled)
|
||||
),
|
||||
)
|
||||
|
||||
lazy val lmRoot = (project in file("."))
|
||||
|
|
@ -119,8 +121,7 @@ lazy val lmCore = (project in file("core"))
|
|||
// scalaReflect.value,
|
||||
// scalaCompiler.value,
|
||||
launcherInterface,
|
||||
gigahorseOkhttp,
|
||||
okhttpUrlconnection,
|
||||
gigahorseApacheHttp,
|
||||
scalaXml,
|
||||
sjsonnewScalaJson.value % Optional,
|
||||
sjsonnew.value % Optional,
|
||||
|
|
@ -262,7 +263,9 @@ lazy val lmCore = (project in file("core"))
|
|||
"sbt.librarymanagement.ResolverFunctions.validateArtifact"
|
||||
),
|
||||
exclude[IncompatibleResultTypeProblem]("sbt.librarymanagement.*.validateProtocol"),
|
||||
exclude[DirectMissingMethodProblem]("sbt.internal.librarymanagement.cross.CrossVersionUtil.TransitionDottyVersion"),
|
||||
exclude[DirectMissingMethodProblem](
|
||||
"sbt.internal.librarymanagement.cross.CrossVersionUtil.TransitionDottyVersion"
|
||||
),
|
||||
exclude[DirectMissingMethodProblem]("sbt.librarymanagement.ScalaArtifacts.dottyID"),
|
||||
exclude[DirectMissingMethodProblem]("sbt.librarymanagement.ScalaArtifacts.DottyIDPrefix"),
|
||||
exclude[DirectMissingMethodProblem]("sbt.librarymanagement.ScalaArtifacts.toolDependencies*"),
|
||||
|
|
@ -279,6 +282,8 @@ lazy val lmIvy = (project in file("ivy"))
|
|||
contrabandSjsonNewVersion := sjsonNewVersion,
|
||||
libraryDependencies ++= Seq(
|
||||
ivy,
|
||||
sjsonnewScalaJson.value,
|
||||
sjsonnew.value,
|
||||
scalaTest % Test,
|
||||
scalaCheck % Test,
|
||||
scalaVerify % Test,
|
||||
|
|
@ -354,6 +359,15 @@ lazy val lmIvy = (project in file("ivy"))
|
|||
"sbt.internal.librarymanagement.CustomPomParser.versionRangeFlag"
|
||||
),
|
||||
exclude[MissingClassProblem]("sbt.internal.librarymanagement.FixedParser*"),
|
||||
exclude[MissingClassProblem]("sbt.internal.librarymanagement.ivyint.GigahorseUrlHandler*"),
|
||||
exclude[MissingClassProblem]("sbt.internal.librarymanagement.JavaNetAuthenticator"),
|
||||
exclude[MissingClassProblem]("sbt.internal.librarymanagement.CustomHttp*"),
|
||||
exclude[DirectMissingMethodProblem]("sbt.internal.librarymanagement.IvySbt.http"),
|
||||
exclude[DirectMissingMethodProblem]("sbt.internal.librarymanagement.IvySbt.this"),
|
||||
exclude[DirectMissingMethodProblem]("sbt.librarymanagement.ivy.IvyPublisher.apply"),
|
||||
exclude[DirectMissingMethodProblem](
|
||||
"sbt.librarymanagement.ivy.IvyDependencyResolution.apply"
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
|
@ -390,16 +404,5 @@ def customCommands: Seq[Setting[_]] = Seq(
|
|||
}
|
||||
)
|
||||
|
||||
inThisBuild(
|
||||
Seq(
|
||||
whitesourceProduct := "Lightbend Reactive Platform",
|
||||
whitesourceAggregateProjectName := "sbt-lm-master",
|
||||
whitesourceAggregateProjectToken := "9bde4ccbaab7401a91f8cda337af84365d379e13abaf473b85cb16e3f5c65cb6",
|
||||
whitesourceIgnoredScopes += "scalafmt",
|
||||
whitesourceFailOnError := sys.env.contains("WHITESOURCE_PASSWORD"), // fail if pwd is present
|
||||
whitesourceForceCheckAllDependencies := true,
|
||||
)
|
||||
)
|
||||
|
||||
def inCompileAndTest(ss: SettingsDefinition*): Seq[Setting[_]] =
|
||||
Seq(Compile, Test) flatMap (inConfig(_)(Def.settings(ss: _*)))
|
||||
|
|
|
|||
|
|
@ -5,20 +5,7 @@
|
|||
// DO NOT EDIT MANUALLY
|
||||
package sbt.librarymanagement
|
||||
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
|
||||
trait ChainedResolverFormats { self: sbt.librarymanagement.ResolverFormats with
|
||||
sjsonnew.BasicJsonProtocol with
|
||||
sbt.librarymanagement.MavenRepoFormats with
|
||||
sbt.librarymanagement.MavenCacheFormats with
|
||||
sbt.librarymanagement.PatternsFormats with
|
||||
sbt.librarymanagement.FileConfigurationFormats with
|
||||
sbt.librarymanagement.FileRepositoryFormats with
|
||||
sbt.librarymanagement.URLRepositoryFormats with
|
||||
sbt.librarymanagement.SshConnectionFormats with
|
||||
sbt.librarymanagement.SshAuthenticationFormats with
|
||||
sbt.librarymanagement.SshRepositoryFormats with
|
||||
sbt.librarymanagement.SftpRepositoryFormats with
|
||||
sbt.librarymanagement.PasswordAuthenticationFormats with
|
||||
sbt.librarymanagement.KeyFileAuthenticationFormats =>
|
||||
trait ChainedResolverFormats { self: sbt.librarymanagement.ResolverFormats with sjsonnew.BasicJsonProtocol =>
|
||||
implicit lazy val ChainedResolverFormat: JsonFormat[sbt.librarymanagement.ChainedResolver] = new JsonFormat[sbt.librarymanagement.ChainedResolver] {
|
||||
override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.ChainedResolver = {
|
||||
__jsOpt match {
|
||||
|
|
|
|||
|
|
@ -5,23 +5,7 @@
|
|||
// DO NOT EDIT MANUALLY
|
||||
package sbt.librarymanagement
|
||||
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
|
||||
trait ConfigurationReportFormats { self: sbt.librarymanagement.ConfigRefFormats with
|
||||
sbt.librarymanagement.ModuleReportFormats with
|
||||
sbt.librarymanagement.ModuleIDFormats with
|
||||
sbt.librarymanagement.ArtifactFormats with
|
||||
sbt.librarymanagement.ChecksumFormats with
|
||||
sjsonnew.BasicJsonProtocol with
|
||||
sbt.librarymanagement.InclExclRuleFormats with
|
||||
sbt.librarymanagement.CrossVersionFormats with
|
||||
sbt.librarymanagement.DisabledFormats with
|
||||
sbt.librarymanagement.BinaryFormats with
|
||||
sbt.librarymanagement.ConstantFormats with
|
||||
sbt.librarymanagement.PatchFormats with
|
||||
sbt.librarymanagement.FullFormats with
|
||||
sbt.librarymanagement.For3Use2_13Formats with
|
||||
sbt.librarymanagement.For2_13Use3Formats with
|
||||
sbt.librarymanagement.CallerFormats with
|
||||
sbt.librarymanagement.OrganizationArtifactReportFormats =>
|
||||
trait ConfigurationReportFormats { self: sbt.librarymanagement.ConfigRefFormats with sbt.librarymanagement.ModuleReportFormats with sbt.librarymanagement.ModuleIDFormats with sbt.librarymanagement.ArtifactFormats with sbt.librarymanagement.ChecksumFormats with sjsonnew.BasicJsonProtocol with sbt.librarymanagement.InclExclRuleFormats with sbt.librarymanagement.CrossVersionFormats with sbt.librarymanagement.DisabledFormats with sbt.librarymanagement.BinaryFormats with sbt.librarymanagement.ConstantFormats with sbt.librarymanagement.PatchFormats with sbt.librarymanagement.FullFormats with sbt.librarymanagement.For3Use2_13Formats with sbt.librarymanagement.For2_13Use3Formats with sbt.librarymanagement.CallerFormats with sbt.librarymanagement.OrganizationArtifactReportFormats =>
|
||||
implicit lazy val ConfigurationReportFormat: JsonFormat[sbt.librarymanagement.ConfigurationReport] = new JsonFormat[sbt.librarymanagement.ConfigurationReport] {
|
||||
override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.ConfigurationReport = {
|
||||
__jsOpt match {
|
||||
|
|
|
|||
|
|
@ -5,21 +5,7 @@
|
|||
// DO NOT EDIT MANUALLY
|
||||
package sbt.librarymanagement
|
||||
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
|
||||
trait ModuleConfigurationFormats { self: sbt.librarymanagement.ResolverFormats with
|
||||
sjsonnew.BasicJsonProtocol with
|
||||
sbt.librarymanagement.ChainedResolverFormats with
|
||||
sbt.librarymanagement.MavenRepoFormats with
|
||||
sbt.librarymanagement.MavenCacheFormats with
|
||||
sbt.librarymanagement.PatternsFormats with
|
||||
sbt.librarymanagement.FileConfigurationFormats with
|
||||
sbt.librarymanagement.FileRepositoryFormats with
|
||||
sbt.librarymanagement.URLRepositoryFormats with
|
||||
sbt.librarymanagement.SshConnectionFormats with
|
||||
sbt.librarymanagement.SshAuthenticationFormats with
|
||||
sbt.librarymanagement.SshRepositoryFormats with
|
||||
sbt.librarymanagement.SftpRepositoryFormats with
|
||||
sbt.librarymanagement.PasswordAuthenticationFormats with
|
||||
sbt.librarymanagement.KeyFileAuthenticationFormats =>
|
||||
trait ModuleConfigurationFormats { self: sbt.librarymanagement.ResolverFormats with sjsonnew.BasicJsonProtocol =>
|
||||
implicit lazy val ModuleConfigurationFormat: JsonFormat[sbt.librarymanagement.ModuleConfiguration] = new JsonFormat[sbt.librarymanagement.ModuleConfiguration] {
|
||||
override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.ModuleConfiguration = {
|
||||
__jsOpt match {
|
||||
|
|
|
|||
|
|
@ -6,16 +6,6 @@
|
|||
package sbt.librarymanagement
|
||||
|
||||
import _root_.sjsonnew.JsonFormat
|
||||
trait PatternsBasedRepositoryFormats { self: sbt.librarymanagement.PatternsFormats with
|
||||
sjsonnew.BasicJsonProtocol with
|
||||
sbt.librarymanagement.FileConfigurationFormats with
|
||||
sbt.librarymanagement.FileRepositoryFormats with
|
||||
sbt.librarymanagement.URLRepositoryFormats with
|
||||
sbt.librarymanagement.SshConnectionFormats with
|
||||
sbt.librarymanagement.SshAuthenticationFormats with
|
||||
sbt.librarymanagement.SshRepositoryFormats with
|
||||
sbt.librarymanagement.SftpRepositoryFormats with
|
||||
sbt.librarymanagement.PasswordAuthenticationFormats with
|
||||
sbt.librarymanagement.KeyFileAuthenticationFormats =>
|
||||
trait PatternsBasedRepositoryFormats { self: sbt.librarymanagement.PatternsFormats with sjsonnew.BasicJsonProtocol with sbt.librarymanagement.FileConfigurationFormats with sbt.librarymanagement.FileRepositoryFormats with sbt.librarymanagement.URLRepositoryFormats with sbt.librarymanagement.SshConnectionFormats with sbt.librarymanagement.SshAuthenticationFormats with sbt.librarymanagement.SshRepositoryFormats with sbt.librarymanagement.SftpRepositoryFormats =>
|
||||
implicit lazy val PatternsBasedRepositoryFormat: JsonFormat[sbt.librarymanagement.PatternsBasedRepository] = flatUnionFormat4[sbt.librarymanagement.PatternsBasedRepository, sbt.librarymanagement.FileRepository, sbt.librarymanagement.URLRepository, sbt.librarymanagement.SshRepository, sbt.librarymanagement.SftpRepository]("type")
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,13 +5,7 @@
|
|||
// DO NOT EDIT MANUALLY
|
||||
package sbt.librarymanagement
|
||||
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
|
||||
trait PublishConfigurationFormats { self: sbt.librarymanagement.ConfigRefFormats with
|
||||
sbt.librarymanagement.ArtifactFormats with
|
||||
sbt.librarymanagement.UpdateLoggingFormats with
|
||||
sjsonnew.BasicJsonProtocol with
|
||||
sbt.librarymanagement.PasswordAuthenticationFormats with
|
||||
sbt.librarymanagement.KeyFileAuthenticationFormats with
|
||||
sbt.librarymanagement.ChecksumFormats =>
|
||||
trait PublishConfigurationFormats { self: sbt.librarymanagement.ConfigRefFormats with sbt.librarymanagement.ArtifactFormats with sbt.librarymanagement.UpdateLoggingFormats with sjsonnew.BasicJsonProtocol =>
|
||||
implicit lazy val PublishConfigurationFormat: JsonFormat[sbt.librarymanagement.PublishConfiguration] = new JsonFormat[sbt.librarymanagement.PublishConfiguration] {
|
||||
override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.PublishConfiguration = {
|
||||
__jsOpt match {
|
||||
|
|
|
|||
|
|
@ -6,19 +6,6 @@
|
|||
package sbt.librarymanagement
|
||||
|
||||
import _root_.sjsonnew.JsonFormat
|
||||
trait ResolverFormats { self: sjsonnew.BasicJsonProtocol with
|
||||
sbt.librarymanagement.ChainedResolverFormats with
|
||||
sbt.librarymanagement.MavenRepoFormats with
|
||||
sbt.librarymanagement.MavenCacheFormats with
|
||||
sbt.librarymanagement.PatternsFormats with
|
||||
sbt.librarymanagement.FileConfigurationFormats with
|
||||
sbt.librarymanagement.FileRepositoryFormats with
|
||||
sbt.librarymanagement.URLRepositoryFormats with
|
||||
sbt.librarymanagement.SshConnectionFormats with
|
||||
sbt.librarymanagement.SshAuthenticationFormats with
|
||||
sbt.librarymanagement.SshRepositoryFormats with
|
||||
sbt.librarymanagement.SftpRepositoryFormats with
|
||||
sbt.librarymanagement.PasswordAuthenticationFormats with
|
||||
sbt.librarymanagement.KeyFileAuthenticationFormats =>
|
||||
trait ResolverFormats { self: sjsonnew.BasicJsonProtocol with sbt.librarymanagement.ChainedResolverFormats with sbt.librarymanagement.MavenRepoFormats with sbt.librarymanagement.MavenCacheFormats with sbt.librarymanagement.PatternsFormats with sbt.librarymanagement.FileConfigurationFormats with sbt.librarymanagement.FileRepositoryFormats with sbt.librarymanagement.URLRepositoryFormats with sbt.librarymanagement.SshConnectionFormats with sbt.librarymanagement.SshAuthenticationFormats with sbt.librarymanagement.SshRepositoryFormats with sbt.librarymanagement.SftpRepositoryFormats =>
|
||||
implicit lazy val ResolverFormat: JsonFormat[sbt.librarymanagement.Resolver] = flatUnionFormat7[sbt.librarymanagement.Resolver, sbt.librarymanagement.ChainedResolver, sbt.librarymanagement.MavenRepo, sbt.librarymanagement.MavenCache, sbt.librarymanagement.FileRepository, sbt.librarymanagement.URLRepository, sbt.librarymanagement.SshRepository, sbt.librarymanagement.SftpRepository]("type")
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,12 +5,7 @@
|
|||
// DO NOT EDIT MANUALLY
|
||||
package sbt.librarymanagement
|
||||
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
|
||||
trait SftpRepositoryFormats { self: sbt.librarymanagement.PatternsFormats with
|
||||
sjsonnew.BasicJsonProtocol with
|
||||
sbt.librarymanagement.SshConnectionFormats with
|
||||
sbt.librarymanagement.SshAuthenticationFormats with
|
||||
sbt.librarymanagement.PasswordAuthenticationFormats with
|
||||
sbt.librarymanagement.KeyFileAuthenticationFormats =>
|
||||
trait SftpRepositoryFormats { self: sbt.librarymanagement.PatternsFormats with sjsonnew.BasicJsonProtocol with sbt.librarymanagement.SshConnectionFormats with sbt.librarymanagement.SshAuthenticationFormats =>
|
||||
implicit lazy val SftpRepositoryFormat: JsonFormat[sbt.librarymanagement.SftpRepository] = new JsonFormat[sbt.librarymanagement.SftpRepository] {
|
||||
override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.SftpRepository = {
|
||||
__jsOpt match {
|
||||
|
|
|
|||
|
|
@ -6,13 +6,6 @@
|
|||
package sbt.librarymanagement
|
||||
|
||||
import _root_.sjsonnew.JsonFormat
|
||||
trait SshBasedRepositoryFormats { self: sbt.librarymanagement.PatternsFormats with
|
||||
sjsonnew.BasicJsonProtocol with
|
||||
sbt.librarymanagement.SshConnectionFormats with
|
||||
sbt.librarymanagement.SshAuthenticationFormats with
|
||||
sbt.librarymanagement.SshRepositoryFormats with
|
||||
sbt.librarymanagement.SftpRepositoryFormats with
|
||||
sbt.librarymanagement.PasswordAuthenticationFormats with
|
||||
sbt.librarymanagement.KeyFileAuthenticationFormats =>
|
||||
trait SshBasedRepositoryFormats { self: sbt.librarymanagement.PatternsFormats with sjsonnew.BasicJsonProtocol with sbt.librarymanagement.SshConnectionFormats with sbt.librarymanagement.SshAuthenticationFormats with sbt.librarymanagement.SshRepositoryFormats with sbt.librarymanagement.SftpRepositoryFormats =>
|
||||
implicit lazy val SshBasedRepositoryFormat: JsonFormat[sbt.librarymanagement.SshBasedRepository] = flatUnionFormat2[sbt.librarymanagement.SshBasedRepository, sbt.librarymanagement.SshRepository, sbt.librarymanagement.SftpRepository]("type")
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,10 +5,7 @@
|
|||
// DO NOT EDIT MANUALLY
|
||||
package sbt.librarymanagement
|
||||
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
|
||||
trait SshConnectionFormats { self: sbt.librarymanagement.SshAuthenticationFormats with
|
||||
sjsonnew.BasicJsonProtocol with
|
||||
sbt.librarymanagement.PasswordAuthenticationFormats with
|
||||
sbt.librarymanagement.KeyFileAuthenticationFormats =>
|
||||
trait SshConnectionFormats { self: sbt.librarymanagement.SshAuthenticationFormats with sjsonnew.BasicJsonProtocol =>
|
||||
implicit lazy val SshConnectionFormat: JsonFormat[sbt.librarymanagement.SshConnection] = new JsonFormat[sbt.librarymanagement.SshConnection] {
|
||||
override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.SshConnection = {
|
||||
__jsOpt match {
|
||||
|
|
|
|||
|
|
@ -5,12 +5,7 @@
|
|||
// DO NOT EDIT MANUALLY
|
||||
package sbt.librarymanagement
|
||||
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
|
||||
trait SshRepositoryFormats { self: sbt.librarymanagement.PatternsFormats with
|
||||
sjsonnew.BasicJsonProtocol with
|
||||
sbt.librarymanagement.SshConnectionFormats with
|
||||
sbt.librarymanagement.SshAuthenticationFormats with
|
||||
sbt.librarymanagement.PasswordAuthenticationFormats with
|
||||
sbt.librarymanagement.KeyFileAuthenticationFormats =>
|
||||
trait SshRepositoryFormats { self: sbt.librarymanagement.PatternsFormats with sjsonnew.BasicJsonProtocol with sbt.librarymanagement.SshConnectionFormats with sbt.librarymanagement.SshAuthenticationFormats =>
|
||||
implicit lazy val SshRepositoryFormat: JsonFormat[sbt.librarymanagement.SshRepository] = new JsonFormat[sbt.librarymanagement.SshRepository] {
|
||||
override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.librarymanagement.SshRepository = {
|
||||
__jsOpt match {
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ final class UpdateReport private (
|
|||
val cachedDescriptor: java.io.File,
|
||||
val configurations: Vector[sbt.librarymanagement.ConfigurationReport],
|
||||
val stats: sbt.librarymanagement.UpdateStats,
|
||||
val stamps: Map[java.io.File, Long]) extends sbt.librarymanagement.UpdateReportExtra with Serializable {
|
||||
val stamps: Map[String, Long]) extends sbt.librarymanagement.UpdateReportExtra with Serializable {
|
||||
|
||||
|
||||
|
||||
|
|
@ -30,7 +30,7 @@ final class UpdateReport private (
|
|||
override def toString: String = {
|
||||
"Update report:\n\t" + stats + "\n" + configurations.mkString
|
||||
}
|
||||
private[this] def copy(cachedDescriptor: java.io.File = cachedDescriptor, configurations: Vector[sbt.librarymanagement.ConfigurationReport] = configurations, stats: sbt.librarymanagement.UpdateStats = stats, stamps: Map[java.io.File, Long] = stamps): UpdateReport = {
|
||||
private[this] def copy(cachedDescriptor: java.io.File = cachedDescriptor, configurations: Vector[sbt.librarymanagement.ConfigurationReport] = configurations, stats: sbt.librarymanagement.UpdateStats = stats, stamps: Map[String, Long] = stamps): UpdateReport = {
|
||||
new UpdateReport(cachedDescriptor, configurations, stats, stamps)
|
||||
}
|
||||
def withCachedDescriptor(cachedDescriptor: java.io.File): UpdateReport = {
|
||||
|
|
@ -42,11 +42,11 @@ final class UpdateReport private (
|
|||
def withStats(stats: sbt.librarymanagement.UpdateStats): UpdateReport = {
|
||||
copy(stats = stats)
|
||||
}
|
||||
def withStamps(stamps: Map[java.io.File, Long]): UpdateReport = {
|
||||
def withStamps(stamps: Map[String, Long]): UpdateReport = {
|
||||
copy(stamps = stamps)
|
||||
}
|
||||
}
|
||||
object UpdateReport {
|
||||
|
||||
def apply(cachedDescriptor: java.io.File, configurations: Vector[sbt.librarymanagement.ConfigurationReport], stats: sbt.librarymanagement.UpdateStats, stamps: Map[java.io.File, Long]): UpdateReport = new UpdateReport(cachedDescriptor, configurations, stats, stamps)
|
||||
def apply(cachedDescriptor: java.io.File, configurations: Vector[sbt.librarymanagement.ConfigurationReport], stats: sbt.librarymanagement.UpdateStats, stamps: Map[String, Long]): UpdateReport = new UpdateReport(cachedDescriptor, configurations, stats, stamps)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ implicit lazy val UpdateReportFormat: JsonFormat[sbt.librarymanagement.UpdateRep
|
|||
val cachedDescriptor = unbuilder.readField[java.io.File]("cachedDescriptor")
|
||||
val configurations = unbuilder.readField[Vector[sbt.librarymanagement.ConfigurationReport]]("configurations")
|
||||
val stats = unbuilder.readField[sbt.librarymanagement.UpdateStats]("stats")
|
||||
val stamps = unbuilder.readField[Map[java.io.File, Long]]("stamps")
|
||||
val stamps = unbuilder.readField[Map[String, Long]]("stamps")
|
||||
unbuilder.endObject()
|
||||
sbt.librarymanagement.UpdateReport(cachedDescriptor, configurations, stats, stamps)
|
||||
case None =>
|
||||
|
|
|
|||
|
|
@ -814,7 +814,7 @@
|
|||
"type": "sbt.librarymanagement.UpdateStats",
|
||||
"doc": [ "stats information about the update that produced this report" ]
|
||||
},
|
||||
{ "name": "stamps", "type": "Map[java.io.File, Long]" }
|
||||
{ "name": "stamps", "type": "Map[String, Long]" }
|
||||
],
|
||||
"toString": "\"Update report:\\n\\t\" + stats + \"\\n\" + configurations.mkString"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ public class SbtPomExtraProperties {
|
|||
public static final String POM_SBT_VERSION = "sbtVersion";
|
||||
public static final String POM_API_KEY = "info.apiURL";
|
||||
public static final String VERSION_SCHEME_KEY = "info.versionScheme";
|
||||
public static final String POM_RELEASE_NOTES_KEY = "info.releaseNotesUrl";
|
||||
|
||||
public static final String LICENSE_COUNT_KEY = "license.count";
|
||||
|
||||
|
|
|
|||
|
|
@ -64,10 +64,10 @@ object CrossVersionUtil {
|
|||
* Compatible versions include 2.10.0-1 and 2.10.1-M1 for Some(2, 10), but not 2.10.0-RC1.
|
||||
*/
|
||||
private[sbt] def scalaApiVersion(v: String): Option[(Long, Long)] = v match {
|
||||
case ReleaseV(x, y, _, _) => Some((x.toLong, y.toLong))
|
||||
case BinCompatV(x, y, _, _, _) => Some((x.toLong, y.toLong))
|
||||
case NonReleaseV_1(x, y, z, _) if z.toInt > 0 => Some((x.toLong, y.toLong))
|
||||
case _ => None
|
||||
case ReleaseV(x, y, _, _) => Some((x.toLong, y.toLong))
|
||||
case BinCompatV(x, y, _, _, _) => Some((x.toLong, y.toLong))
|
||||
case NonReleaseV_1(x, y, z, _) if z.toLong > 0 => Some((x.toLong, y.toLong))
|
||||
case _ => None
|
||||
}
|
||||
|
||||
private[sbt] def partialVersion(s: String): Option[(Long, Long)] =
|
||||
|
|
@ -85,6 +85,31 @@ object CrossVersionUtil {
|
|||
case _ => full
|
||||
}
|
||||
|
||||
// Uses the following rules:
|
||||
//
|
||||
// - Forwards and backwards compatibility is guaranteed for Scala 2.N.x (https://docs.scala-lang.org/overviews/core/binary-compatibility-of-scala-releases.html)
|
||||
//
|
||||
// - A Scala compiler in version 3.x1.y1 is able to read TASTy files produced by another compiler in version 3.x2.y2 if x1 >= x2 (https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html)
|
||||
//
|
||||
// - For non-stable Scala 3 versions, compiler versions can read TASTy in an older stable format but their TASTY versions are not compatible between each other even if the compilers have the same minor version (https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html)
|
||||
//
|
||||
private[sbt] def isScalaBinaryCompatibleWith(newVersion: String, origVersion: String): Boolean = {
|
||||
(newVersion, origVersion) match {
|
||||
case (NonReleaseV_n("2", _, _, _), NonReleaseV_n("2", _, _, _)) =>
|
||||
val api1 = scalaApiVersion(newVersion)
|
||||
val api2 = scalaApiVersion(origVersion)
|
||||
(api1.isDefined && api1 == api2) || (newVersion == origVersion)
|
||||
case (ReleaseV(nMaj, nMin, _, _), ReleaseV(oMaj, oMin, _, _))
|
||||
if nMaj == oMaj && nMaj.toLong >= 3 =>
|
||||
nMin.toInt >= oMin.toInt
|
||||
case (NonReleaseV_1(nMaj, nMin, _, _), ReleaseV(oMaj, oMin, _, _))
|
||||
if nMaj == oMaj && nMaj.toLong >= 3 =>
|
||||
nMin.toInt > oMin.toInt
|
||||
case _ =>
|
||||
newVersion == origVersion
|
||||
}
|
||||
}
|
||||
|
||||
def binaryScalaVersion(full: String): String = {
|
||||
if (ScalaArtifacts.isScala3(full)) binaryScala3Version(full)
|
||||
else
|
||||
|
|
@ -92,7 +117,13 @@ object CrossVersionUtil {
|
|||
}
|
||||
|
||||
def binarySbtVersion(full: String): String =
|
||||
binaryVersionWithApi(full, TransitionSbtVersion)(sbtApiVersion)
|
||||
sbtApiVersion(full) match {
|
||||
case Some((0, minor)) if minor < 12 => full
|
||||
case Some((0, minor)) => s"0.$minor"
|
||||
case Some((1, minor)) => s"1.$minor"
|
||||
case Some((major, _)) => major.toString
|
||||
case _ => full
|
||||
}
|
||||
|
||||
private[this] def isNewer(major: Long, minor: Long, minMajor: Long, minMinor: Long): Boolean =
|
||||
major > minMajor || (major == minMajor && minor >= minMinor)
|
||||
|
|
|
|||
|
|
@ -21,11 +21,11 @@ final class Configuration private[sbt] (
|
|||
override def equals(o: Any): Boolean = o match {
|
||||
case x: Configuration =>
|
||||
(this.id == x.id) &&
|
||||
(this.name == x.name) &&
|
||||
(this.description == x.description) &&
|
||||
(this.isPublic == x.isPublic) &&
|
||||
(this.extendsConfigs == x.extendsConfigs) &&
|
||||
(this.transitive == x.transitive)
|
||||
(this.name == x.name) &&
|
||||
(this.description == x.description) &&
|
||||
(this.isPublic == x.isPublic) &&
|
||||
(this.extendsConfigs == x.extendsConfigs) &&
|
||||
(this.transitive == x.transitive)
|
||||
case _ => false
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
*/
|
||||
package sbt.librarymanagement
|
||||
|
||||
import scala.annotation.tailrec
|
||||
import scala.annotation.{ nowarn, tailrec }
|
||||
import scala.quoted.*
|
||||
|
||||
object Configurations {
|
||||
|
|
@ -21,9 +21,11 @@ object Configurations {
|
|||
|
||||
lazy val RuntimeInternal = optionalInternal(Runtime)
|
||||
lazy val TestInternal = fullInternal(Test)
|
||||
@nowarn
|
||||
lazy val IntegrationTestInternal = fullInternal(IntegrationTest)
|
||||
lazy val CompileInternal = fullInternal(Compile)
|
||||
|
||||
@nowarn
|
||||
def internalMap(c: Configuration) = c match {
|
||||
case Compile => CompileInternal
|
||||
case Test => TestInternal
|
||||
|
|
@ -41,6 +43,7 @@ object Configurations {
|
|||
|
||||
lazy val Default = Configuration.of("Default", "default")
|
||||
lazy val Compile = Configuration.of("Compile", "compile")
|
||||
@deprecated("Create a separate subproject for testing instead", "1.9.0")
|
||||
lazy val IntegrationTest = Configuration.of("IntegrationTest", "it") extend (Runtime)
|
||||
lazy val Provided = Configuration.of("Provided", "provided")
|
||||
lazy val Runtime = Configuration.of("Runtime", "runtime") extend (Compile)
|
||||
|
|
@ -69,6 +72,7 @@ object Configurations {
|
|||
)
|
||||
|
||||
/** Returns true if the configuration should be under the influence of scalaVersion. */
|
||||
@nowarn
|
||||
private[sbt] def underScalaVersion(c: Configuration): Boolean =
|
||||
c match {
|
||||
case Default | Compile | IntegrationTest | Provided | Runtime | Test | Optional |
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ private[librarymanagement] abstract class CrossVersionFunctions {
|
|||
|
||||
/** Compatibility with 0.13 */
|
||||
@deprecated(
|
||||
"use CrossVersion.disabled instead. prior to sbt 1.3.0, Diabled did not work without apply(). sbt/sbt#4977",
|
||||
"use CrossVersion.disabled instead. prior to sbt 1.3.0, Disabled did not work without apply(). sbt/sbt#4977",
|
||||
"1.3.0"
|
||||
)
|
||||
final val Disabled = sbt.librarymanagement.Disabled
|
||||
|
|
@ -235,4 +235,10 @@ private[librarymanagement] abstract class CrossVersionFunctions {
|
|||
* Full sbt versions earlier than [[sbt.librarymanagement.CrossVersion.TransitionSbtVersion]] are returned as is.
|
||||
*/
|
||||
def binarySbtVersion(full: String): String = CrossVersionUtil.binarySbtVersion(full)
|
||||
|
||||
/**
|
||||
* Returns `true` if a project targeting version `origVersion` can run with version `newVersion`.
|
||||
*/
|
||||
def isScalaBinaryCompatibleWith(newVersion: String, origVersion: String): Boolean =
|
||||
CrossVersionUtil.isScalaBinaryCompatibleWith(newVersion, origVersion)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -83,52 +83,52 @@ object EvictionError {
|
|||
}
|
||||
}: _*)
|
||||
|
||||
def calculateCompatible(p: EvictionPair): (Boolean, String, Boolean, String) = {
|
||||
val winnerOpt = p.winner map { _.module }
|
||||
val extraAttributes = ((p.winner match {
|
||||
case Some(r) => r.extraAttributes.toMap
|
||||
case _ => Map.empty
|
||||
}): collection.immutable.Map[String, String]) ++ (winnerOpt match {
|
||||
case Some(w) => w.extraAttributes.toMap
|
||||
case _ => Map.empty
|
||||
})
|
||||
// prioritize user-defined version scheme to allow overriding the real scheme
|
||||
val schemeOpt = userDefinedSchemes
|
||||
.get((p.organization, p.name))
|
||||
.orElse(userDefinedSchemes.get((p.organization, "*")))
|
||||
.orElse(VersionSchemes.extractFromExtraAttributes(extraAttributes))
|
||||
.orElse(userDefinedSchemes.get(("*", "*")))
|
||||
val f = (winnerOpt, schemeOpt) match {
|
||||
case (Some(_), Some(scheme)) => VersionSchemes.evalFunc(scheme)
|
||||
case _ => EvictionWarningOptions.guessTrue
|
||||
}
|
||||
val scheme =
|
||||
if (isNameScalaSuffixed(p.name)) assumedVersionScheme
|
||||
else assumedVersionSchemeJava
|
||||
val guess = VersionSchemes.evalFunc(scheme)
|
||||
(
|
||||
p.evicteds forall { r =>
|
||||
f((r.module, winnerOpt, module.scalaModuleInfo))
|
||||
},
|
||||
schemeOpt.getOrElse("?"),
|
||||
p.evicteds forall { r =>
|
||||
guess((r.module, winnerOpt, module.scalaModuleInfo))
|
||||
},
|
||||
scheme
|
||||
)
|
||||
}
|
||||
pairs foreach {
|
||||
// don't report on a transitive eviction that does not have a winner
|
||||
// https://github.com/sbt/sbt/issues/4946
|
||||
case p if p.winner.isDefined =>
|
||||
val r = calculateCompatible(p)
|
||||
if (!r._1) {
|
||||
incompatibleEvictions += (p -> r._2)
|
||||
} else if (!r._3) {
|
||||
assumedIncompatEvictions += (p -> r._4)
|
||||
val winner = p.winner.get
|
||||
|
||||
def hasIncompatibleVersionForScheme(scheme: String) = {
|
||||
val isCompat = VersionSchemes.evalFunc(scheme)
|
||||
p.evicteds.exists { r =>
|
||||
!isCompat((r.module, Some(winner.module), module.scalaModuleInfo))
|
||||
}
|
||||
}
|
||||
|
||||
// from libraryDependencyScheme or defined in the pom using the `info.versionScheme` attribute
|
||||
val userDefinedSchemeOrFromPom = {
|
||||
def fromLibraryDependencySchemes(org: String = "*", mod: String = "*") =
|
||||
userDefinedSchemes.get((org, mod))
|
||||
def fromWinnerPom = VersionSchemes.extractFromExtraAttributes(
|
||||
winner.extraAttributes.toMap ++ winner.module.extraAttributes
|
||||
)
|
||||
|
||||
fromLibraryDependencySchemes(p.organization, p.name) // by org and name
|
||||
.orElse(fromLibraryDependencySchemes(p.organization)) // for whole org
|
||||
.orElse(fromWinnerPom) // from pom
|
||||
.orElse(fromLibraryDependencySchemes()) // global
|
||||
}
|
||||
|
||||
// We want the user to be able to suppress eviction errors for a specific library,
|
||||
// which would result in an incompatible eviction based on the assumed version scheme.
|
||||
// So, only fall back to the assumed scheme if there is no given scheme by the user or the pom.
|
||||
userDefinedSchemeOrFromPom match {
|
||||
case Some(givenScheme) =>
|
||||
if (hasIncompatibleVersionForScheme(givenScheme))
|
||||
incompatibleEvictions += (p -> givenScheme)
|
||||
case None =>
|
||||
val assumedScheme =
|
||||
if (isNameScalaSuffixed(p.name)) assumedVersionScheme
|
||||
else assumedVersionSchemeJava
|
||||
|
||||
if (hasIncompatibleVersionForScheme(assumedScheme))
|
||||
assumedIncompatEvictions += (p -> assumedScheme)
|
||||
}
|
||||
|
||||
case _ => ()
|
||||
}
|
||||
|
||||
new EvictionError(
|
||||
incompatibleEvictions.toList,
|
||||
assumedIncompatEvictions.toList,
|
||||
|
|
@ -158,9 +158,6 @@ final class EvictionError private[sbt] (
|
|||
out += "found version conflict(s) in library dependencies; some are suspected to be binary incompatible:"
|
||||
out += ""
|
||||
evictions.foreach({ case (a, scheme) =>
|
||||
val revs = a.evicteds map { _.module.revision }
|
||||
val revsStr =
|
||||
if (revs.size <= 1) revs.mkString else "{" + revs.distinct.mkString(", ") + "}"
|
||||
val seen: mutable.Set[ModuleID] = mutable.Set()
|
||||
val callers: List[String] = (a.evicteds.toList ::: a.winner.toList) flatMap { r =>
|
||||
val rev = r.module.revision
|
||||
|
|
@ -174,7 +171,7 @@ final class EvictionError private[sbt] (
|
|||
}
|
||||
val que = if (assumed) "?" else ""
|
||||
val winnerRev = a.winner match {
|
||||
case Some(r) => s":${r.module.revision} ($scheme$que) is selected over ${revsStr}"
|
||||
case Some(r) => s":${r.module.revision} ($scheme$que) is selected over ${a.evictedRevs}"
|
||||
case _ => " is evicted for all versions"
|
||||
}
|
||||
val title = s"\t* ${a.organization}:${a.name}$winnerRev"
|
||||
|
|
|
|||
|
|
@ -191,12 +191,17 @@ final class EvictionPair private[sbt] (
|
|||
val includesDirect: Boolean,
|
||||
val showCallers: Boolean
|
||||
) {
|
||||
val evictedRevs: String = {
|
||||
val revs = evicteds map { _.module.revision }
|
||||
if (revs.size <= 1) revs.mkString else revs.distinct.mkString("{", ", ", "}")
|
||||
}
|
||||
|
||||
override def toString: String =
|
||||
EvictionPair.evictionPairLines.showLines(this).mkString
|
||||
override def equals(o: Any): Boolean = o match {
|
||||
case o: EvictionPair =>
|
||||
(this.organization == o.organization) &&
|
||||
(this.name == o.name)
|
||||
(this.name == o.name)
|
||||
case _ => false
|
||||
}
|
||||
override def hashCode: Int = {
|
||||
|
|
@ -209,8 +214,6 @@ final class EvictionPair private[sbt] (
|
|||
|
||||
object EvictionPair {
|
||||
implicit val evictionPairLines: ShowLines[EvictionPair] = ShowLines { (a: EvictionPair) =>
|
||||
val revs = a.evicteds map { _.module.revision }
|
||||
val revsStr = if (revs.size <= 1) revs.mkString else "{" + revs.mkString(", ") + "}"
|
||||
val seen: mutable.Set[ModuleID] = mutable.Set()
|
||||
val callers: List[String] = (a.evicteds.toList ::: a.winner.toList) flatMap { r =>
|
||||
val rev = r.module.revision
|
||||
|
|
@ -223,7 +226,7 @@ object EvictionPair {
|
|||
}
|
||||
}
|
||||
val winnerRev = a.winner match {
|
||||
case Some(r) => s":${r.module.revision} is selected over ${revsStr}"
|
||||
case Some(r) => s":${r.module.revision} is selected over ${a.evictedRevs}"
|
||||
case _ => " is evicted for all versions"
|
||||
}
|
||||
val title = s"\t* ${a.organization}:${a.name}$winnerRev"
|
||||
|
|
@ -300,7 +303,7 @@ object EvictionWarning {
|
|||
module.scalaModuleInfo match {
|
||||
case Some(s) =>
|
||||
organization == s.scalaOrganization &&
|
||||
(name == LibraryID) || (name == CompilerID)
|
||||
(name == LibraryID) || (name == CompilerID)
|
||||
case _ => false
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
package sbt.librarymanagement
|
||||
|
||||
import gigahorse._, support.okhttp.Gigahorse
|
||||
import gigahorse._, support.apachehttp.Gigahorse
|
||||
import scala.concurrent.duration.DurationInt
|
||||
|
||||
object Http {
|
||||
|
|
|
|||
|
|
@ -28,6 +28,7 @@ trait LibraryManagementSyntax
|
|||
final val Compile = C.Compile
|
||||
final val Test = C.Test
|
||||
final val Runtime = C.Runtime
|
||||
@deprecated("Create a separate subproject for testing instead", "1.9.0")
|
||||
final val IntegrationTest = C.IntegrationTest
|
||||
final val Default = C.Default
|
||||
final val Provided = C.Provided
|
||||
|
|
|
|||
|
|
@ -0,0 +1,25 @@
|
|||
package sbt.librarymanagement
|
||||
|
||||
import java.net.URL
|
||||
import java.net.URI
|
||||
|
||||
/**
|
||||
* Commonly used software licenses
|
||||
* Names are SPDX ids:
|
||||
* https://raw.githubusercontent.com/spdx/license-list-data/master/json/licenses.json
|
||||
*/
|
||||
object License {
|
||||
lazy val Apache2: (String, URL) =
|
||||
("Apache-2.0", new URI("https://www.apache.org/licenses/LICENSE-2.0.txt").toURL)
|
||||
|
||||
lazy val MIT: (String, URL) =
|
||||
("MIT", new URI("https://opensource.org/licenses/MIT").toURL)
|
||||
|
||||
lazy val CC0: (String, URL) =
|
||||
("CC0-1.0", new URI("https://creativecommons.org/publicdomain/zero/1.0/legalcode").toURL)
|
||||
|
||||
def PublicDomain: (String, URL) = CC0
|
||||
|
||||
lazy val GPL3_or_later: (String, URL) =
|
||||
("GPL-3.0-or-later", new URI("https://spdx.org/licenses/GPL-3.0-or-later.html").toURL)
|
||||
}
|
||||
|
|
@ -3,7 +3,7 @@
|
|||
*/
|
||||
package sbt.librarymanagement
|
||||
|
||||
import java.net.URL
|
||||
import java.net.URI
|
||||
|
||||
import sbt.internal.librarymanagement.mavenint.SbtPomExtraProperties
|
||||
import scala.collection.mutable.ListBuffer
|
||||
|
|
@ -133,7 +133,7 @@ private[librarymanagement] abstract class ModuleIDExtra {
|
|||
* It is not included in published metadata.
|
||||
*/
|
||||
def from(url: String, allowInsecureProtocol: Boolean): ModuleID =
|
||||
artifacts(Artifact(name, new URL(url), allowInsecureProtocol))
|
||||
artifacts(Artifact(name, new URI(url).toURL, allowInsecureProtocol))
|
||||
|
||||
/** Adds a dependency on the artifact for this module with classifier `c`. */
|
||||
def classifier(c: String): ModuleID = artifacts(Artifact(name, c))
|
||||
|
|
|
|||
|
|
@ -5,9 +5,11 @@ package sbt.librarymanagement
|
|||
|
||||
import java.io.{ IOException, File }
|
||||
import java.net.{ URI, URL }
|
||||
import scala.annotation.nowarn
|
||||
import scala.xml.XML
|
||||
import org.xml.sax.SAXParseException
|
||||
import sbt.util.Logger
|
||||
import java.net.URI
|
||||
|
||||
final class RawRepository(val resolver: AnyRef, name: String) extends Resolver(name) {
|
||||
override def toString = "Raw(" + resolver.toString + ")"
|
||||
|
|
@ -102,6 +104,7 @@ private[librarymanagement] abstract class ResolverFunctions {
|
|||
@deprecated("Renamed to SbtRepositoryRoot.", "1.0.0")
|
||||
val SbtPluginRepositoryRoot = SbtRepositoryRoot
|
||||
val SonatypeRepositoryRoot = "https://oss.sonatype.org/content/repositories"
|
||||
val SonatypeS01RepositoryRoot = "https://s01.oss.sonatype.org/content/repositories"
|
||||
val SonatypeReleasesRepository =
|
||||
"https://oss.sonatype.org/service/local/repositories/releases/content/"
|
||||
val JavaNet2RepositoryName = "java.net Maven2 Repository"
|
||||
|
|
@ -144,29 +147,50 @@ private[librarymanagement] abstract class ResolverFunctions {
|
|||
def typesafeRepo(status: String) =
|
||||
MavenRepository("typesafe-" + status, TypesafeRepositoryRoot + "/" + status)
|
||||
def typesafeIvyRepo(status: String) =
|
||||
url("typesafe-ivy-" + status, new URL(TypesafeRepositoryRoot + "/ivy-" + status + "/"))(
|
||||
url("typesafe-ivy-" + status, new URI(TypesafeRepositoryRoot + "/ivy-" + status + "/").toURL)(
|
||||
ivyStylePatterns
|
||||
)
|
||||
def sbtIvyRepo(status: String) =
|
||||
url(s"sbt-ivy-$status", new URL(s"$SbtRepositoryRoot/ivy-$status/"))(ivyStylePatterns)
|
||||
url(s"sbt-ivy-$status", new URI(s"$SbtRepositoryRoot/ivy-$status/").toURL)(ivyStylePatterns)
|
||||
def sbtPluginRepo(status: String) =
|
||||
url("sbt-plugin-" + status, new URL(SbtRepositoryRoot + "/sbt-plugin-" + status + "/"))(
|
||||
url("sbt-plugin-" + status, new URI(SbtRepositoryRoot + "/sbt-plugin-" + status + "/").toURL)(
|
||||
ivyStylePatterns
|
||||
)
|
||||
@deprecated(
|
||||
"""Use sonatypeOssRepos instead e.g. `resolvers ++= Resolver.sonatypeOssRepos("snapshots")`""",
|
||||
"1.7.0"
|
||||
)
|
||||
def sonatypeRepo(status: String) =
|
||||
MavenRepository(
|
||||
"sonatype-" + status,
|
||||
if (status == "releases") SonatypeReleasesRepository
|
||||
else SonatypeRepositoryRoot + "/" + status
|
||||
)
|
||||
private def sonatypeS01Repo(status: String) =
|
||||
MavenRepository(
|
||||
"sonatype-s01-" + status,
|
||||
SonatypeS01RepositoryRoot + "/" + status
|
||||
)
|
||||
def sonatypeOssRepos(status: String) =
|
||||
Vector(sonatypeRepo(status): @nowarn("cat=deprecation"), sonatypeS01Repo(status))
|
||||
def bintrayRepo(owner: String, repo: String) =
|
||||
MavenRepository(s"bintray-$owner-$repo", s"https://dl.bintray.com/$owner/$repo/")
|
||||
def bintrayIvyRepo(owner: String, repo: String) =
|
||||
url(s"bintray-$owner-$repo", new URL(s"https://dl.bintray.com/$owner/$repo/"))(
|
||||
url(s"bintray-$owner-$repo", new URI(s"https://dl.bintray.com/$owner/$repo/").toURL)(
|
||||
Resolver.ivyStylePatterns
|
||||
)
|
||||
def jcenterRepo = JCenterRepository
|
||||
|
||||
val ApacheMavenSnapshotsRepo = MavenRepository(
|
||||
"apache-snapshots",
|
||||
"https://repository.apache.org/content/repositories/snapshots/"
|
||||
)
|
||||
|
||||
val ApacheMavenStagingRepo = MavenRepository(
|
||||
"apache-staging",
|
||||
"https://repository.apache.org/content/groups/staging/"
|
||||
)
|
||||
|
||||
/** Add the local and Maven Central repositories to the user repositories. */
|
||||
def combineDefaultResolvers(userResolvers: Vector[Resolver]): Vector[Resolver] =
|
||||
combineDefaultResolvers(userResolvers, mavenCentral = true)
|
||||
|
|
@ -376,6 +400,20 @@ private[librarymanagement] abstract class ResolverFunctions {
|
|||
def defaultRetrievePattern =
|
||||
"[type]s/[organisation]/[module]/" + PluginPattern + "[artifact](-[revision])(-[classifier]).[ext]"
|
||||
final val PluginPattern = "(scala_[scalaVersion]/)(sbt_[sbtVersion]/)"
|
||||
private[librarymanagement] def expandMavenSettings(str: String): String = {
|
||||
// Aren't regular expressions beautifully clear and concise.
|
||||
// This means "find all ${...}" blocks, with the first group of each being the text between curly brackets.
|
||||
val findQuoted = "\\$\\{([^\\}]*)\\}".r
|
||||
val env = "env\\.(.*)".r
|
||||
|
||||
findQuoted.replaceAllIn(
|
||||
str,
|
||||
_.group(1) match {
|
||||
case env(variable) => sys.env.getOrElse(variable, "")
|
||||
case property => sys.props.getOrElse(property, "")
|
||||
}
|
||||
)
|
||||
}
|
||||
private[this] def mavenLocalDir: File = {
|
||||
def loadHomeFromSettings(f: () => File): Option[File] =
|
||||
try {
|
||||
|
|
@ -384,7 +422,7 @@ private[librarymanagement] abstract class ResolverFunctions {
|
|||
else
|
||||
((XML.loadFile(file) \ "localRepository").text match {
|
||||
case "" => None
|
||||
case e @ _ => Some(new File(e))
|
||||
case e @ _ => Some(new File(expandMavenSettings(e)))
|
||||
})
|
||||
} catch {
|
||||
// Occurs inside File constructor when property or environment variable does not exist
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ final class RichUpdateReport(report: UpdateReport) {
|
|||
val stamps = files
|
||||
.map(f =>
|
||||
(
|
||||
f,
|
||||
f.toString,
|
||||
// TODO: The list of files may also contain some odd files that do not actually exist like:
|
||||
// "./target/ivyhome/resolution-cache/com.example/foo/0.4.0/resolved.xml.xml".
|
||||
// IO.getModifiedTimeOrZero() will just return zero, but the list of files should not contain such
|
||||
|
|
|
|||
|
|
@ -36,8 +36,8 @@ object UnresolvedWarning {
|
|||
def modulePosition(m0: ModuleID): Option[SourcePosition] =
|
||||
config.modulePositions.find { case (m, _) =>
|
||||
(m.organization == m0.organization) &&
|
||||
(m0.name startsWith m.name) &&
|
||||
(m.revision == m0.revision)
|
||||
(m0.name startsWith m.name) &&
|
||||
(m.revision == m0.revision)
|
||||
} map { case (_, p) =>
|
||||
p
|
||||
}
|
||||
|
|
|
|||
|
|
@ -123,12 +123,13 @@ private[librarymanagement] abstract class UpdateReportExtra {
|
|||
def cachedDescriptor: File
|
||||
def configurations: Vector[ConfigurationReport]
|
||||
def stats: UpdateStats
|
||||
private[sbt] def stamps: Map[File, Long]
|
||||
private[sbt] def stamps: Map[String, Long]
|
||||
|
||||
private[sbt] def moduleKey(m: ModuleID) = (m.organization, m.name, m.revision)
|
||||
|
||||
/** All resolved modules in all configurations. */
|
||||
def allModules: Vector[ModuleID] = {
|
||||
val key = (m: ModuleID) => (m.organization, m.name, m.revision)
|
||||
configurations.flatMap(_.allModules).groupBy(key).toVector map { case (_, v) =>
|
||||
configurations.flatMap(_.allModules).groupBy(moduleKey).toVector map { case (_, v) =>
|
||||
v reduceLeft { (agg, x) =>
|
||||
agg.withConfigurations(
|
||||
(agg.configurations, x.configurations) match {
|
||||
|
|
@ -141,6 +142,21 @@ private[librarymanagement] abstract class UpdateReportExtra {
|
|||
}
|
||||
}
|
||||
|
||||
def allModuleReports: Vector[ModuleReport] = {
|
||||
configurations.flatMap(_.modules).groupBy(mR => moduleKey(mR.module)).toVector map {
|
||||
case (_, v) =>
|
||||
v reduceLeft { (agg, x) =>
|
||||
agg.withConfigurations(
|
||||
(agg.configurations, x.configurations) match {
|
||||
case (v, _) if v.isEmpty => x.configurations
|
||||
case (ac, v) if v.isEmpty => ac
|
||||
case (ac, xc) => ac ++ xc
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def retrieve(f: (ConfigRef, ModuleID, Artifact, File) => File): UpdateReport =
|
||||
UpdateReport(cachedDescriptor, configurations map { _ retrieve f }, stats, stamps)
|
||||
|
||||
|
|
|
|||
|
|
@ -96,23 +96,23 @@ class CrossVersionTest extends UnitSpec {
|
|||
"binarySbtVersion" should "for 0.11.3 return 0.11.3" in {
|
||||
binarySbtVersion("0.11.3") shouldBe "0.11.3"
|
||||
}
|
||||
it should "for 0.12.0-M1 return 0.12.0-M1" in {
|
||||
binarySbtVersion("0.12.0-M1") shouldBe "0.12.0-M1"
|
||||
it should "for 2.0.0 return 2" in {
|
||||
binarySbtVersion("2.0.0") shouldBe "2"
|
||||
}
|
||||
it should "for 0.12.0-RC1 return 0.12" in {
|
||||
binarySbtVersion("0.12.0-RC1") shouldBe "0.12"
|
||||
it should "for 2.0.0-M1 return 2.0.0-M1" in {
|
||||
binarySbtVersion("2.0.0-M1") shouldBe "2.0.0-M1"
|
||||
}
|
||||
it should "for 0.12.0 return 0.12" in {
|
||||
binarySbtVersion("0.12.0") shouldBe "0.12"
|
||||
it should "for 2.0.0-RC1 return 2" in {
|
||||
binarySbtVersion("2.0.0-RC1") shouldBe "2"
|
||||
}
|
||||
it should "for 0.12.1-SNAPSHOT return 0.12" in {
|
||||
binarySbtVersion("0.12.1-SNAPSHOT") shouldBe "0.12"
|
||||
it should "for 2.1.0-M1 return 2" in {
|
||||
binarySbtVersion("2.1.0-M1") shouldBe "2"
|
||||
}
|
||||
it should "for 0.12.1-RC1 return 0.12" in {
|
||||
binarySbtVersion("0.12.1-RC1") shouldBe "0.12"
|
||||
it should "for 2.1.0 return 2" in {
|
||||
binarySbtVersion("2.1.0") shouldBe "2"
|
||||
}
|
||||
it should "for 0.12.1 return 0.12" in {
|
||||
binarySbtVersion("0.12.1") shouldBe "0.12"
|
||||
it should "for 0.13.1 return 0.13" in {
|
||||
binarySbtVersion("0.13.1") shouldBe "0.13"
|
||||
}
|
||||
it should "for 1.0.0-M6 return 1.0.0-M6" in {
|
||||
binarySbtVersion("1.0.0-M6") shouldBe "1.0.0-M6"
|
||||
|
|
@ -144,9 +144,6 @@ class CrossVersionTest extends UnitSpec {
|
|||
it should "for 1.10.0 return 1.0" in {
|
||||
binarySbtVersion("1.10.0") shouldBe "1.0"
|
||||
}
|
||||
it should "for 2.0.0 return 2.0" in {
|
||||
binarySbtVersion("2.0.0") shouldBe "2.0"
|
||||
}
|
||||
|
||||
"scalaApiVersion" should "for xyz return None" in {
|
||||
scalaApiVersion("xyz") shouldBe None
|
||||
|
|
@ -284,6 +281,55 @@ class CrossVersionTest extends UnitSpec {
|
|||
patchVersion("2.11.8-X1.5-bin-extra") shouldBe Some("artefact_2.11.8-X1.5")
|
||||
}
|
||||
|
||||
"isScalaBinaryCompatibleWith" should "for (2.10.4, 2.10.5) return true" in {
|
||||
isScalaBinaryCompatibleWith("2.10.4", "2.10.5") shouldBe true
|
||||
}
|
||||
it should "for (2.10.6, 2.10.5) return true" in {
|
||||
isScalaBinaryCompatibleWith("2.10.6", "2.10.5") shouldBe true
|
||||
}
|
||||
it should "for (2.11.0, 2.10.5) return false" in {
|
||||
isScalaBinaryCompatibleWith("2.11.0", "2.10.5") shouldBe false
|
||||
}
|
||||
it should "for (3.0.0, 2.10.5) return false" in {
|
||||
isScalaBinaryCompatibleWith("3.0.0", "2.10.5") shouldBe false
|
||||
}
|
||||
it should "for (3.0.0, 3.1.0) return false" in {
|
||||
isScalaBinaryCompatibleWith("3.0.0", "3.1.0") shouldBe false
|
||||
}
|
||||
it should "for (3.1.0, 3.0.0) return true" in {
|
||||
isScalaBinaryCompatibleWith("3.1.0", "3.0.0") shouldBe true
|
||||
}
|
||||
it should "for (3.1.0, 3.1.1) return true" in {
|
||||
isScalaBinaryCompatibleWith("3.1.0", "3.1.1") shouldBe true
|
||||
}
|
||||
it should "for (3.1.1, 3.1.0) return true" in {
|
||||
isScalaBinaryCompatibleWith("3.1.1", "3.1.0") shouldBe true
|
||||
}
|
||||
it should "for (2.10.0-M1, 2.10.5) return false" in {
|
||||
isScalaBinaryCompatibleWith("2.10.0-M1", "2.10.5") shouldBe false
|
||||
}
|
||||
it should "for (2.10.5, 2.10.0-M1) return false" in {
|
||||
isScalaBinaryCompatibleWith("2.10.5", "2.10.0-M1") shouldBe false
|
||||
}
|
||||
it should "for (2.10.0-M1, 2.10.0-M2) return false" in {
|
||||
isScalaBinaryCompatibleWith("2.10.0-M1", "2.10.0-M2") shouldBe false
|
||||
}
|
||||
it should "for (2.10.0-M1, 2.11.0-M1) return false" in {
|
||||
isScalaBinaryCompatibleWith("2.10.0-M1", "2.11.0-M1") shouldBe false
|
||||
}
|
||||
it should "for (3.1.0-M1, 3.0.0) return true" in {
|
||||
isScalaBinaryCompatibleWith("3.1.0-M1", "3.0.0") shouldBe true
|
||||
}
|
||||
it should "for (3.1.0-M1, 3.1.0) return false" in {
|
||||
isScalaBinaryCompatibleWith("3.1.0-M1", "3.1.0") shouldBe false
|
||||
}
|
||||
it should "for (3.1.0-M1, 3.1.0-M2) return false" in {
|
||||
isScalaBinaryCompatibleWith("3.1.0-M1", "3.1.0-M2") shouldBe false
|
||||
}
|
||||
it should "for (3.1.0-M2, 3.1.0-M1) return false" in {
|
||||
isScalaBinaryCompatibleWith("3.1.0-M2", "3.1.0-M1") shouldBe false
|
||||
}
|
||||
|
||||
private def constantVersion(value: String) =
|
||||
CrossVersion(CrossVersion.constant(value), "dummy1", "dummy2") map (fn => fn("artefact"))
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,43 @@
|
|||
package sbt.librarymanagement
|
||||
|
||||
import verify.BasicTestSuite
|
||||
import scala.annotation.nowarn
|
||||
|
||||
@nowarn // Necessary because our test cases look like interpolated strings.
|
||||
object ResolverExtraTest extends BasicTestSuite {
|
||||
test("expandMavenSettings should expand existing environment variables") {
|
||||
assertExpansion(
|
||||
input = "User home: ${env.HOME}",
|
||||
expected = s"User home: ${env("HOME")}"
|
||||
)
|
||||
}
|
||||
|
||||
test("expandMavenSettings should expand existing system properties") {
|
||||
assertExpansion(
|
||||
input = "User dir: ${user.dir}",
|
||||
expected = s"User dir: ${prop("user.dir")}"
|
||||
)
|
||||
}
|
||||
|
||||
test("expandMavenSettings should expand unknown system properties to the empty string") {
|
||||
assertExpansion(
|
||||
input = "Unknown system property: ${IF_THIS_EXISTS_WE_NEED_TO_HAVE_A_CHAT}",
|
||||
expected = s"Unknown system property: "
|
||||
)
|
||||
}
|
||||
|
||||
test("expandMavenSettings should expand unknown environment variables to the empty string") {
|
||||
assertExpansion(
|
||||
input = "Unknown environment variable: ${IF_THIS_EXISTS_I_WORRY_ABOUT_YOU}",
|
||||
expected = s"Unknown environment variable: "
|
||||
)
|
||||
}
|
||||
|
||||
// - Helper functions ----------------------------------------------------------------------------
|
||||
// -----------------------------------------------------------------------------------------------
|
||||
def assertExpansion(input: String, expected: String) =
|
||||
assert(Resolver.expandMavenSettings(input) == expected)
|
||||
|
||||
def env(name: String) = sys.env.getOrElse(name, "")
|
||||
def prop(name: String) = sys.props.getOrElse(name, "")
|
||||
}
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
package sbt.librarymanagement
|
||||
|
||||
import java.net.URL
|
||||
import java.net.URI
|
||||
|
||||
import sbt.internal.librarymanagement.UnitSpec
|
||||
|
||||
|
|
@ -10,7 +10,7 @@ object ResolverTest extends UnitSpec {
|
|||
val pats = Vector("[orgPath]")
|
||||
val patsExpected = Vector("http://foo.com/test/[orgPath]")
|
||||
val patterns = Resolver
|
||||
.url("test", new URL("http://foo.com/test"))(
|
||||
.url("test", new URI("http://foo.com/test").toURL)(
|
||||
Patterns(
|
||||
pats,
|
||||
pats,
|
||||
|
|
|
|||
|
|
@ -1,82 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2013 Square, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package sbt.internal.librarymanagement;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.Authenticator.RequestorType;
|
||||
import java.net.InetAddress;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.net.PasswordAuthentication;
|
||||
import java.net.Proxy;
|
||||
import java.util.List;
|
||||
import okhttp3.Authenticator;
|
||||
import okhttp3.Route;
|
||||
import okhttp3.Request;
|
||||
import okhttp3.Response;
|
||||
import okhttp3.HttpUrl;
|
||||
import okhttp3.Challenge;
|
||||
import okhttp3.Credentials;
|
||||
|
||||
/**
|
||||
* Adapts java.net.Authenticator to Authenticator. Configure OkHttp to use
|
||||
* java.net.Authenticator with OkHttpClient.Builder#authenticator or
|
||||
* OkHttpClient.Builder#proxyAuthenticator(Authenticator).
|
||||
*/
|
||||
public final class JavaNetAuthenticator implements Authenticator {
|
||||
@Override public Request authenticate(Route route, Response response) throws IOException {
|
||||
List<Challenge> challenges = response.challenges();
|
||||
Request request = response.request();
|
||||
HttpUrl url = request.url();
|
||||
boolean proxyAuthorization = response.code() == 407;
|
||||
Proxy proxy = null;
|
||||
if (route != null) {
|
||||
proxy = route.proxy();
|
||||
}
|
||||
|
||||
for (int i = 0, size = challenges.size(); i < size; i++) {
|
||||
Challenge challenge = challenges.get(i);
|
||||
if (!"Basic".equalsIgnoreCase(challenge.scheme())) continue;
|
||||
|
||||
PasswordAuthentication auth;
|
||||
if (proxyAuthorization) {
|
||||
InetSocketAddress proxyAddress = (InetSocketAddress) proxy.address();
|
||||
auth = java.net.Authenticator.requestPasswordAuthentication(
|
||||
proxyAddress.getHostName(), getConnectToInetAddress(proxy, url), proxyAddress.getPort(),
|
||||
url.scheme(), challenge.realm(), challenge.scheme(), url.url(),
|
||||
RequestorType.PROXY);
|
||||
} else {
|
||||
auth = java.net.Authenticator.requestPasswordAuthentication(
|
||||
url.host(), getConnectToInetAddress(proxy, url), url.port(), url.scheme(),
|
||||
challenge.realm(), challenge.scheme(), url.url(), RequestorType.SERVER);
|
||||
}
|
||||
|
||||
if (auth != null) {
|
||||
String credential = Credentials.basic(auth.getUserName(), new String(auth.getPassword()));
|
||||
return request.newBuilder()
|
||||
.header(proxyAuthorization ? "Proxy-Authorization" : "Authorization", credential)
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
return null; // No challenges were satisfied!
|
||||
}
|
||||
|
||||
private InetAddress getConnectToInetAddress(Proxy proxy, HttpUrl url) throws IOException {
|
||||
return (proxy != null && proxy.type() != Proxy.Type.DIRECT)
|
||||
? ((InetSocketAddress) proxy.address()).getAddress()
|
||||
: InetAddress.getByName(url.host());
|
||||
}
|
||||
}
|
||||
|
|
@ -3,7 +3,7 @@
|
|||
*/
|
||||
package sbt.internal.librarymanagement
|
||||
|
||||
import java.net.URL
|
||||
import java.net.URI
|
||||
import java.util.Collections
|
||||
|
||||
import org.apache.ivy.core.module.descriptor.DependencyDescriptor
|
||||
|
|
@ -27,10 +27,16 @@ import org.apache.ivy.plugins.resolver.{
|
|||
import org.apache.ivy.plugins.repository.url.{ URLRepository => URLRepo }
|
||||
import org.apache.ivy.plugins.repository.file.{ FileResource, FileRepository => FileRepo }
|
||||
import java.io.{ File, IOException }
|
||||
import java.util.Date
|
||||
|
||||
import org.apache.ivy.util.{ ChecksumHelper, FileUtil, Message }
|
||||
import org.apache.ivy.core.module.descriptor.{ Artifact => IArtifact }
|
||||
import org.apache.ivy.core.module.id.ModuleRevisionId
|
||||
import org.apache.ivy.core.module.descriptor.DefaultArtifact
|
||||
import org.apache.ivy.core.report.DownloadReport
|
||||
import org.apache.ivy.plugins.resolver.util.{ ResolvedResource, ResourceMDParser }
|
||||
import org.apache.ivy.util.{ ChecksumHelper, FileUtil, Message }
|
||||
import scala.collection.JavaConverters._
|
||||
import sbt.internal.librarymanagement.mavenint.PomExtraDependencyAttributes
|
||||
import sbt.io.IO
|
||||
import sbt.util.Logger
|
||||
import sbt.librarymanagement._
|
||||
|
|
@ -172,6 +178,32 @@ private[sbt] object ConvertResolver {
|
|||
setArtifactPatterns(pattern)
|
||||
setIvyPatterns(pattern)
|
||||
}
|
||||
override protected def findResourceUsingPattern(
|
||||
mrid: ModuleRevisionId,
|
||||
pattern: String,
|
||||
artifact: IArtifact,
|
||||
rmdparser: ResourceMDParser,
|
||||
date: Date
|
||||
): ResolvedResource = {
|
||||
val extraAttributes =
|
||||
mrid.getExtraAttributes.asScala.toMap.asInstanceOf[Map[String, String]]
|
||||
getSbtPluginCrossVersion(extraAttributes) match {
|
||||
case Some(sbtCrossVersion) =>
|
||||
// if the module is an sbt plugin
|
||||
// we first try to resolve the artifact with the sbt cross version suffix
|
||||
// and we fallback to the one without the suffix
|
||||
val newArtifact = DefaultArtifact.cloneWithAnotherName(
|
||||
artifact,
|
||||
artifact.getName + sbtCrossVersion
|
||||
)
|
||||
val resolved =
|
||||
super.findResourceUsingPattern(mrid, pattern, newArtifact, rmdparser, date)
|
||||
if (resolved != null) resolved
|
||||
else super.findResourceUsingPattern(mrid, pattern, artifact, rmdparser, date)
|
||||
case None =>
|
||||
super.findResourceUsingPattern(mrid, pattern, artifact, rmdparser, date)
|
||||
}
|
||||
}
|
||||
}
|
||||
val resolver = new PluginCapableResolver
|
||||
if (repo.localIfFile) resolver.setRepository(new LocalIfFileRepo)
|
||||
|
|
@ -229,6 +261,13 @@ private[sbt] object ConvertResolver {
|
|||
}
|
||||
}
|
||||
|
||||
private def getSbtPluginCrossVersion(extraAttributes: Map[String, String]): Option[String] = {
|
||||
for {
|
||||
sbtVersion <- extraAttributes.get(PomExtraDependencyAttributes.SbtVersionKey)
|
||||
scalaVersion <- extraAttributes.get(PomExtraDependencyAttributes.ScalaVersionKey)
|
||||
} yield s"_${scalaVersion}_$sbtVersion"
|
||||
}
|
||||
|
||||
private sealed trait DescriptorRequired extends BasicResolver {
|
||||
// Works around implementation restriction to access protected method `get`
|
||||
def getResource(resource: Resource, dest: File): Long
|
||||
|
|
@ -355,7 +394,7 @@ private[sbt] object ConvertResolver {
|
|||
private[this] val repo = new WarnOnOverwriteFileRepo()
|
||||
private[this] val progress = new RepositoryCopyProgressListener(this);
|
||||
override def getResource(source: String) = {
|
||||
val url = new URL(source)
|
||||
val url = new URI(source).toURL
|
||||
if (url.getProtocol == IO.FileScheme)
|
||||
new FileResource(repo, IO.toFile(url))
|
||||
else
|
||||
|
|
@ -363,7 +402,7 @@ private[sbt] object ConvertResolver {
|
|||
}
|
||||
|
||||
override def put(source: File, destination: String, overwrite: Boolean): Unit = {
|
||||
val url = new URL(destination)
|
||||
val url = new URI(destination).toURL
|
||||
try {
|
||||
if (url.getProtocol != IO.FileScheme) super.put(source, destination, overwrite)
|
||||
else {
|
||||
|
|
|
|||
|
|
@ -1,21 +0,0 @@
|
|||
package sbt.internal.librarymanagement
|
||||
|
||||
import gigahorse.HttpClient
|
||||
import okhttp3.{ JavaNetAuthenticator => _, _ }
|
||||
import sbt.librarymanagement.Http
|
||||
|
||||
object CustomHttp {
|
||||
private[this] def http0: HttpClient = Http.http
|
||||
|
||||
private[sbt] def defaultHttpClientBuilder: OkHttpClient.Builder = {
|
||||
http0
|
||||
.underlying[OkHttpClient]
|
||||
.newBuilder()
|
||||
.authenticator(new sbt.internal.librarymanagement.JavaNetAuthenticator)
|
||||
.followRedirects(true)
|
||||
.followSslRedirects(true)
|
||||
}
|
||||
|
||||
private[sbt] lazy val defaultHttpClient: OkHttpClient =
|
||||
defaultHttpClientBuilder.build
|
||||
}
|
||||
|
|
@ -75,6 +75,32 @@ object CustomPomParser {
|
|||
private[this] val unqualifiedKeys =
|
||||
Set(SbtVersionKey, ScalaVersionKey, ExtraAttributesKey, ApiURLKey, VersionSchemeKey)
|
||||
|
||||
/**
|
||||
* In the new POM format of sbt plugins, the dependency to an sbt plugin
|
||||
* contains the sbt cross-version _2.12_1.0. The reason is we want Maven to be able
|
||||
* to resolve the dependency using the pattern:
|
||||
* <org>/<artifact-name>_2.12_1.0/<version>/<artifact-name>_2.12_1.0-<version>.pom
|
||||
* In sbt 1.x we use extra-attributes to resolve sbt plugins, so here we must remove
|
||||
* the sbt cross-version and keep the extra-attributes.
|
||||
* Parsing a dependency found in the new POM format produces the same module as
|
||||
* if it is found in the old POM format. It used not to contain the sbt cross-version
|
||||
* suffix, but that was invalid.
|
||||
* Hence we can resolve conflicts between new and old POM formats.
|
||||
*
|
||||
* To compare the two formats you can look at the POMs in:
|
||||
* https://repo1.maven.org/maven2/ch/epfl/scala/sbt-plugin-example-diamond_2.12_1.0/0.5.0/
|
||||
*/
|
||||
private def removeSbtCrossVersion(
|
||||
properties: Map[String, String],
|
||||
moduleName: String
|
||||
): String = {
|
||||
val sbtCrossVersion = for {
|
||||
sbtVersion <- properties.get(s"e:$SbtVersionKey")
|
||||
scalaVersion <- properties.get(s"e:$ScalaVersionKey")
|
||||
} yield s"_${scalaVersion}_$sbtVersion"
|
||||
sbtCrossVersion.map(moduleName.stripSuffix).getOrElse(moduleName)
|
||||
}
|
||||
|
||||
// packagings that should be jars, but that Ivy doesn't handle as jars
|
||||
// TODO - move this elsewhere.
|
||||
val JarPackagings = Set("eclipse-plugin", "hk2-jar", "orbit", "scala-jar")
|
||||
|
|
@ -165,9 +191,12 @@ object CustomPomParser {
|
|||
import collection.JavaConverters._
|
||||
val oldExtra = qualifiedExtra(id)
|
||||
val newExtra = (oldExtra ++ properties).asJava
|
||||
// remove the sbt plugin cross version from the resolved ModuleRevisionId
|
||||
// sbt-plugin-example_2.12_1.0 => sbt-plugin-example
|
||||
val nameWithoutCrossVersion = removeSbtCrossVersion(properties, id.getName)
|
||||
ModuleRevisionId.newInstance(
|
||||
id.getOrganisation,
|
||||
id.getName,
|
||||
nameWithoutCrossVersion,
|
||||
id.getBranch,
|
||||
id.getRevision,
|
||||
newExtra
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import java.net.URL
|
||||
import java.net.URI
|
||||
|
||||
import org.apache.ivy.core.cache.ArtifactOrigin
|
||||
import org.apache.ivy.core.cache.{ DefaultRepositoryCacheManager, RepositoryCacheManager }
|
||||
|
|
@ -69,7 +69,7 @@ private[sbt] class FakeResolver(private var name: String, cacheDir: File, module
|
|||
): ArtifactDownloadReport = {
|
||||
|
||||
val report = new ArtifactDownloadReport(artifact.getArtifact)
|
||||
val path = new URL(artifact.getLocation).toURI.getPath
|
||||
val path = new URI(artifact.getLocation).toURL.toURI.getPath
|
||||
val localFile = new File(path)
|
||||
|
||||
if (path.nonEmpty && localFile.exists) {
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ import java.io.File
|
|||
import java.net.URI
|
||||
import java.util.concurrent.Callable
|
||||
|
||||
import okhttp3.OkHttpClient
|
||||
import org.apache.ivy.Ivy
|
||||
import org.apache.ivy.core.IvyPatternHelper
|
||||
import org.apache.ivy.core.cache.{ CacheMetadataOptions, DefaultRepositoryCacheManager }
|
||||
|
|
@ -51,17 +50,13 @@ import ivyint.{
|
|||
CachedResolutionResolveEngine,
|
||||
ParallelResolveEngine,
|
||||
SbtDefaultDependencyDescriptor,
|
||||
GigahorseUrlHandler
|
||||
}
|
||||
import sjsonnew.JsonFormat
|
||||
import sjsonnew.support.murmurhash.Hasher
|
||||
|
||||
final class IvySbt(
|
||||
val configuration: IvyConfiguration,
|
||||
val http: OkHttpClient
|
||||
) { self =>
|
||||
def this(configuration: IvyConfiguration) = this(configuration, CustomHttp.defaultHttpClient)
|
||||
|
||||
/*
|
||||
* ========== Configuration/Setup ============
|
||||
* This part configures the Ivy instance by first creating the logger interface to ivy, then IvySettings, and then the Ivy instance.
|
||||
|
|
@ -91,7 +86,6 @@ final class IvySbt(
|
|||
}
|
||||
|
||||
private lazy val basicUrlHandler: URLHandler = new BasicURLHandler
|
||||
private lazy val gigahorseUrlHandler: URLHandler = new GigahorseUrlHandler(http)
|
||||
|
||||
private lazy val settings: IvySettings = {
|
||||
val dispatcher: URLHandlerDispatcher = URLHandlerRegistry.getDefault match {
|
||||
|
|
@ -107,8 +101,8 @@ final class IvySbt(
|
|||
disp
|
||||
}
|
||||
|
||||
val urlHandler: URLHandler =
|
||||
if (configuration.updateOptions.gigahorse) gigahorseUrlHandler else basicUrlHandler
|
||||
// Ignore configuration.updateOptions.gigahorse due to sbt/sbt#6912
|
||||
val urlHandler: URLHandler = basicUrlHandler
|
||||
|
||||
// Only set the urlHandler for the http/https protocols so we do not conflict with any other plugins
|
||||
// that might register other protocol handlers.
|
||||
|
|
@ -229,9 +223,28 @@ final class IvySbt(
|
|||
else IvySbt.cachedResolutionResolveCache.clean()
|
||||
}
|
||||
|
||||
final class Module(rawModuleSettings: ModuleSettings)
|
||||
/**
|
||||
* In the new POM format of sbt plugins, we append the sbt-cross version _2.12_1.0 to
|
||||
* the module artifactId, and the artifactIds of its dependencies that are sbt plugins.
|
||||
*
|
||||
* The goal is to produce a valid Maven POM, a POM that Maven can resolve:
|
||||
* Maven will try and succeed to resolve the POM of pattern:
|
||||
* <org>/<artifact-name>_2.12_1.0/<version>/<artifact-name>_2.12_1.0-<version>.pom
|
||||
*/
|
||||
final class Module(rawModuleSettings: ModuleSettings, appendSbtCrossVersion: Boolean)
|
||||
extends sbt.librarymanagement.ModuleDescriptor { self =>
|
||||
val moduleSettings: ModuleSettings = IvySbt.substituteCross(rawModuleSettings)
|
||||
|
||||
def this(rawModuleSettings: ModuleSettings) =
|
||||
this(rawModuleSettings, appendSbtCrossVersion = false)
|
||||
|
||||
val moduleSettings: ModuleSettings =
|
||||
rawModuleSettings match {
|
||||
case ic: InlineConfiguration =>
|
||||
val icWithCross = IvySbt.substituteCross(ic)
|
||||
if (appendSbtCrossVersion) IvySbt.appendSbtCrossVersion(icWithCross)
|
||||
else icWithCross
|
||||
case m => m
|
||||
}
|
||||
|
||||
def directDependencies: Vector[ModuleID] =
|
||||
moduleSettings match {
|
||||
|
|
@ -704,37 +717,32 @@ private[sbt] object IvySbt {
|
|||
)
|
||||
}
|
||||
|
||||
private def substituteCross(m: ModuleSettings): ModuleSettings = {
|
||||
m.scalaModuleInfo match {
|
||||
case None => m
|
||||
case Some(is) => substituteCross(m, is.scalaFullVersion, is.scalaBinaryVersion, is.platform)
|
||||
private def substituteCross(ic: InlineConfiguration): InlineConfiguration = {
|
||||
ic.scalaModuleInfo match {
|
||||
case None => ic
|
||||
case Some(is) => substituteCross(ic, is.scalaFullVersion, is.scalaBinaryVersion, is.platform)
|
||||
}
|
||||
}
|
||||
|
||||
private def substituteCross(
|
||||
m: ModuleSettings,
|
||||
ic: InlineConfiguration,
|
||||
scalaFullVersion: String,
|
||||
scalaBinaryVersion: String,
|
||||
platform: Option[String],
|
||||
): ModuleSettings =
|
||||
m match {
|
||||
case ic: InlineConfiguration =>
|
||||
val applyPlatform: ModuleID => ModuleID = substitutePlatform(platform)
|
||||
val transform: ModuleID => ModuleID = (m: ModuleID) =>
|
||||
val applyCross = CrossVersion(scalaFullVersion, scalaBinaryVersion)
|
||||
applyCross(applyPlatform(m))
|
||||
def propagateCrossVersion(moduleID: ModuleID): ModuleID = {
|
||||
val crossExclusions: Vector[ExclusionRule] =
|
||||
moduleID.exclusions
|
||||
.map(CrossVersion.substituteCross(_, ic.scalaModuleInfo))
|
||||
transform(moduleID)
|
||||
.withExclusions(crossExclusions)
|
||||
}
|
||||
ic.withModule(transform(ic.module))
|
||||
.withDependencies(ic.dependencies.map(propagateCrossVersion))
|
||||
.withOverrides(ic.overrides map transform)
|
||||
case _ => m
|
||||
platform: Option[String]
|
||||
): InlineConfiguration = {
|
||||
val applyPlatform: ModuleID => ModuleID = substitutePlatform(platform)
|
||||
val applyCross = CrossVersion(scalaFullVersion, scalaBinaryVersion)
|
||||
val transform: ModuleID => ModuleID = (m: ModuleID) => applyCross(applyPlatform(m))
|
||||
def propagateCrossVersion(moduleID: ModuleID): ModuleID = {
|
||||
val crossExclusions: Vector[ExclusionRule] =
|
||||
moduleID.exclusions.map(CrossVersion.substituteCross(_, ic.scalaModuleInfo))
|
||||
transform(moduleID)
|
||||
.withExclusions(crossExclusions)
|
||||
}
|
||||
ic.withModule(transform(ic.module))
|
||||
.withDependencies(ic.dependencies.map(propagateCrossVersion))
|
||||
.withOverrides(ic.overrides map transform)
|
||||
}
|
||||
|
||||
private def substitutePlatform(platform: Option[String]): ModuleID => ModuleID = {
|
||||
def addSuffix(m: ModuleID, platformName: String): ModuleID =
|
||||
|
|
@ -748,6 +756,22 @@ private[sbt] object IvySbt {
|
|||
case _ => m
|
||||
}
|
||||
|
||||
private def appendSbtCrossVersion(ic: InlineConfiguration): InlineConfiguration =
|
||||
ic.withModule(appendSbtCrossVersion(ic.module))
|
||||
.withDependencies(ic.dependencies.map(appendSbtCrossVersion))
|
||||
.withOverrides(ic.overrides.map(appendSbtCrossVersion))
|
||||
|
||||
private def appendSbtCrossVersion(mid: ModuleID): ModuleID = {
|
||||
val crossVersion = for {
|
||||
scalaVersion <- mid.extraAttributes.get("e:scalaVersion")
|
||||
sbtVersion <- mid.extraAttributes.get("e:sbtVersion")
|
||||
} yield s"_${scalaVersion}_$sbtVersion"
|
||||
crossVersion
|
||||
.filter(!mid.name.endsWith(_))
|
||||
.map(cv => mid.withName(mid.name + cv))
|
||||
.getOrElse(mid)
|
||||
}
|
||||
|
||||
private def toIvyArtifact(
|
||||
moduleID: ModuleDescriptor,
|
||||
a: Artifact,
|
||||
|
|
|
|||
|
|
@ -112,7 +112,7 @@ class IvyCache(val ivyHome: Option[File]) {
|
|||
.withResolvers(Vector(local))
|
||||
.withLock(lock)
|
||||
.withLog(log)
|
||||
(new IvySbt(conf, CustomHttp.defaultHttpClient), local)
|
||||
(new IvySbt(conf), local)
|
||||
}
|
||||
|
||||
/** Creates a default jar artifact based on the given ID. */
|
||||
|
|
|
|||
|
|
@ -105,7 +105,7 @@ private[sbt] class CachedResolutionResolveCache {
|
|||
s"""Include(${rule.getId},${rule.getConfigurations.mkString(",")},${rule.getMatcher})"""
|
||||
def artifactString(dad: DependencyArtifactDescriptor): String =
|
||||
s"""Artifact(${dad.getName},${dad.getType},${dad.getExt},${dad.getUrl},${dad.getConfigurations
|
||||
.mkString(",")},${dad.getExtraAttributes})"""
|
||||
.mkString(",")},${dad.getExtraAttributes})"""
|
||||
val mrid = dd.getDependencyRevisionId
|
||||
val confMap = (dd.getModuleConfigurations map { conf =>
|
||||
conf + "->(" + dd.getDependencyConfigurations(conf).mkString(",") + ")"
|
||||
|
|
@ -128,12 +128,12 @@ private[sbt] class CachedResolutionResolveCache {
|
|||
val os = extractOverrides(parent)
|
||||
val moduleLevel = s"""dependencyOverrides=${os.mkString(",")};moduleExclusions=$mesStr"""
|
||||
val depsString = s"""$mrid;${confMap.mkString(
|
||||
","
|
||||
)};isForce=${dd.isForce};isChanging=${dd.isChanging};isTransitive=${dd.isTransitive};""" +
|
||||
s"""exclusions=${exclusions.mkString(",")};inclusions=${inclusions.mkString(
|
||||
","
|
||||
)};explicitArtifacts=${explicitArtifacts
|
||||
.mkString(",")};$moduleLevel;"""
|
||||
)};isForce=${dd.isForce};isChanging=${dd.isChanging};isTransitive=${dd.isTransitive};""" +
|
||||
s"""exclusions=${exclusions.mkString(",")};inclusions=${inclusions.mkString(
|
||||
","
|
||||
)};explicitArtifacts=${explicitArtifacts
|
||||
.mkString(",")};$moduleLevel;"""
|
||||
val sha1 = Hash.toHex(
|
||||
Hash(s"""graphVersion=${CachedResolutionResolveCache.graphVersion};$depsString""")
|
||||
)
|
||||
|
|
|
|||
|
|
@ -64,12 +64,14 @@ object ErrorMessageAuthenticator {
|
|||
ivyOriginalField.set(ivy, newOriginal)
|
||||
}
|
||||
|
||||
try Option(ivyOriginalField.get(ivy).asInstanceOf[Authenticator]) match {
|
||||
case Some(
|
||||
_: ErrorMessageAuthenticator
|
||||
) => // We're already installed, no need to do the work again.
|
||||
case originalOpt => installIntoIvyImpl(originalOpt)
|
||||
} catch {
|
||||
try
|
||||
Option(ivyOriginalField.get(ivy).asInstanceOf[Authenticator]) match {
|
||||
case Some(
|
||||
_: ErrorMessageAuthenticator
|
||||
) => // We're already installed, no need to do the work again.
|
||||
case originalOpt => installIntoIvyImpl(originalOpt)
|
||||
}
|
||||
catch {
|
||||
case t: Throwable =>
|
||||
Message.debug(
|
||||
"Error occurred while trying to install debug messages into Ivy Authentication" + t.getMessage
|
||||
|
|
@ -137,16 +139,17 @@ private[sbt] final class ErrorMessageAuthenticator(original: Option[Authenticato
|
|||
// Grabs the authentication that would have been provided had we not been installed...
|
||||
def originalAuthentication: Option[PasswordAuthentication] = {
|
||||
Authenticator.setDefault(original.orNull)
|
||||
try Option(
|
||||
Authenticator.requestPasswordAuthentication(
|
||||
getRequestingHost,
|
||||
getRequestingSite,
|
||||
getRequestingPort,
|
||||
getRequestingProtocol,
|
||||
getRequestingPrompt,
|
||||
getRequestingScheme
|
||||
try
|
||||
Option(
|
||||
Authenticator.requestPasswordAuthentication(
|
||||
getRequestingHost,
|
||||
getRequestingSite,
|
||||
getRequestingPort,
|
||||
getRequestingProtocol,
|
||||
getRequestingPrompt,
|
||||
getRequestingScheme
|
||||
)
|
||||
)
|
||||
)
|
||||
finally Authenticator.setDefault(this)
|
||||
}
|
||||
originalAuthentication.orNull
|
||||
|
|
|
|||
|
|
@ -1,341 +0,0 @@
|
|||
package sbt.internal.librarymanagement
|
||||
package ivyint
|
||||
|
||||
import java.net.{ URL, UnknownHostException }
|
||||
import java.io._
|
||||
|
||||
import scala.util.control.NonFatal
|
||||
|
||||
import okhttp3.{ MediaType, Request, RequestBody }
|
||||
import okhttp3.internal.http.HttpDate
|
||||
|
||||
import okhttp3.{ JavaNetAuthenticator => _, _ }
|
||||
import okio._
|
||||
|
||||
import org.apache.ivy.util.{ CopyProgressEvent, CopyProgressListener, Message }
|
||||
import org.apache.ivy.util.url.{ AbstractURLHandler, BasicURLHandler, IvyAuthenticator, URLHandler }
|
||||
import org.apache.ivy.util.url.URLHandler._
|
||||
import sbt.io.IO
|
||||
|
||||
// Copied from Ivy's BasicURLHandler.
|
||||
class GigahorseUrlHandler(http: OkHttpClient) extends AbstractURLHandler {
|
||||
|
||||
import GigahorseUrlHandler._
|
||||
|
||||
/**
|
||||
* Returns the URLInfo of the given url or a #UNAVAILABLE instance,
|
||||
* if the url is not reachable.
|
||||
*/
|
||||
def getURLInfo(url: URL): URLInfo = getURLInfo(url, 0)
|
||||
|
||||
/**
|
||||
* Returns the URLInfo of the given url or a #UNAVAILABLE instance,
|
||||
* if the url is not reachable.
|
||||
*/
|
||||
def getURLInfo(url0: URL, timeout: Int): URLInfo = {
|
||||
// Install the ErrorMessageAuthenticator
|
||||
if ("http" == url0.getProtocol || "https" == url0.getProtocol) {
|
||||
IvyAuthenticator.install()
|
||||
ErrorMessageAuthenticator.install()
|
||||
}
|
||||
|
||||
val url = normalizeToURL(url0)
|
||||
val request = new Request.Builder()
|
||||
.url(url)
|
||||
|
||||
if (getRequestMethod == URLHandler.REQUEST_METHOD_HEAD) request.head() else request.get()
|
||||
|
||||
val response = http.newCall(request.build()).execute()
|
||||
try {
|
||||
val infoOption =
|
||||
try {
|
||||
|
||||
if (checkStatusCode(url, response)) {
|
||||
val bodyCharset =
|
||||
BasicURLHandler.getCharSetFromContentType(
|
||||
Option(response.body().contentType()).map(_.toString).orNull
|
||||
)
|
||||
Some(
|
||||
new SbtUrlInfo(
|
||||
true,
|
||||
response.body().contentLength(),
|
||||
lastModifiedTimestamp(response),
|
||||
bodyCharset
|
||||
)
|
||||
)
|
||||
} else None
|
||||
//
|
||||
// Commented out for now - can potentially be used for non HTTP urls
|
||||
//
|
||||
// val contentLength: Long = con.getContentLengthLong
|
||||
// if (contentLength <= 0) None
|
||||
// else {
|
||||
// // TODO: not HTTP... maybe we *don't* want to default to ISO-8559-1 here?
|
||||
// val bodyCharset = BasicURLHandler.getCharSetFromContentType(con.getContentType)
|
||||
// Some(new SbtUrlInfo(true, contentLength, con.getLastModified(), bodyCharset))
|
||||
// }
|
||||
|
||||
} catch {
|
||||
case e: UnknownHostException =>
|
||||
Message.warn("Host " + e.getMessage + " not found. url=" + url)
|
||||
Message.info(
|
||||
"You probably access the destination server through "
|
||||
+ "a proxy server that is not well configured."
|
||||
)
|
||||
None
|
||||
case e: IOException =>
|
||||
Message.error("Server access Error: " + e.getMessage + " url=" + url)
|
||||
None
|
||||
}
|
||||
infoOption.getOrElse(UNAVAILABLE)
|
||||
} finally {
|
||||
response.close()
|
||||
}
|
||||
}
|
||||
|
||||
// The caller of this *MUST* call Response.close()
|
||||
private def getUrl(url0: URL): okhttp3.Response = {
|
||||
// Install the ErrorMessageAuthenticator
|
||||
if ("http" == url0.getProtocol || "https" == url0.getProtocol) {
|
||||
IvyAuthenticator.install()
|
||||
ErrorMessageAuthenticator.install()
|
||||
}
|
||||
|
||||
val url = normalizeToURL(url0)
|
||||
val request = new Request.Builder()
|
||||
.url(url)
|
||||
.get()
|
||||
.build()
|
||||
|
||||
val response = http.newCall(request).execute()
|
||||
try {
|
||||
if (!checkStatusCode(url, response)) {
|
||||
throw new IOException(
|
||||
"The HTTP response code for " + url + " did not indicate a success."
|
||||
+ " See log for more detail."
|
||||
)
|
||||
}
|
||||
response
|
||||
} catch {
|
||||
case NonFatal(e) =>
|
||||
// ensure the response gets closed if there's an error
|
||||
response.close()
|
||||
throw e
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
def openStream(url: URL): InputStream = {
|
||||
// It's assumed that the caller of this will call close() on the supplied inputstream,
|
||||
// thus closing the OkHTTP request
|
||||
getUrl(url).body().byteStream()
|
||||
}
|
||||
|
||||
def download(url: URL, dest: File, l: CopyProgressListener): Unit = {
|
||||
|
||||
val response = getUrl(url)
|
||||
try {
|
||||
|
||||
if (l != null) {
|
||||
l.start(new CopyProgressEvent())
|
||||
}
|
||||
val sink = Okio.buffer(Okio.sink(dest))
|
||||
try {
|
||||
sink.writeAll(response.body().source())
|
||||
sink.flush()
|
||||
} finally {
|
||||
sink.close()
|
||||
}
|
||||
|
||||
val contentLength = response.body().contentLength()
|
||||
if (contentLength != -1 && dest.length != contentLength) {
|
||||
IO.delete(dest)
|
||||
throw new IOException(
|
||||
"Downloaded file size doesn't match expected Content Length for " + url
|
||||
+ ". Please retry."
|
||||
)
|
||||
}
|
||||
|
||||
val lastModified = lastModifiedTimestamp(response)
|
||||
if (lastModified > 0) {
|
||||
IO.setModifiedTimeOrFalse(dest, lastModified)
|
||||
}
|
||||
|
||||
if (l != null) {
|
||||
l.end(new CopyProgressEvent(EmptyBuffer, contentLength))
|
||||
}
|
||||
|
||||
} finally {
|
||||
response.close()
|
||||
}
|
||||
}
|
||||
|
||||
def upload(source: File, dest0: URL, l: CopyProgressListener): Unit = {
|
||||
|
||||
if (("http" != dest0.getProtocol) && ("https" != dest0.getProtocol)) {
|
||||
throw new UnsupportedOperationException("URL repository only support HTTP PUT at the moment")
|
||||
}
|
||||
|
||||
IvyAuthenticator.install()
|
||||
ErrorMessageAuthenticator.install()
|
||||
|
||||
val dest = normalizeToURL(dest0)
|
||||
|
||||
val body = RequestBody.create(MediaType.parse("application/octet-stream"), source)
|
||||
|
||||
val request = new Request.Builder()
|
||||
.url(dest)
|
||||
.put(body)
|
||||
.build()
|
||||
|
||||
if (l != null) {
|
||||
l.start(new CopyProgressEvent())
|
||||
}
|
||||
val response = http.newCall(request).execute()
|
||||
try {
|
||||
if (l != null) {
|
||||
l.end(new CopyProgressEvent(EmptyBuffer, source.length()))
|
||||
}
|
||||
validatePutStatusCode(dest, response)
|
||||
} finally {
|
||||
response.close()
|
||||
}
|
||||
}
|
||||
|
||||
private val ErrorBodyTruncateLen =
|
||||
512 // in case some bad service returns files rather than messages in error bodies
|
||||
private val DefaultErrorCharset = java.nio.charset.StandardCharsets.UTF_8
|
||||
|
||||
// neurotic resource managemement...
|
||||
// we could use this elsewhere in the class too
|
||||
private def borrow[S <: AutoCloseable, T](rsrc: => S)(op: S => T): T = {
|
||||
val r = rsrc
|
||||
val out = {
|
||||
try {
|
||||
op(r)
|
||||
} catch {
|
||||
case NonFatal(t) => {
|
||||
try {
|
||||
r.close()
|
||||
} catch {
|
||||
case NonFatal(ct) => t.addSuppressed(ct)
|
||||
}
|
||||
throw t
|
||||
}
|
||||
}
|
||||
}
|
||||
r.close()
|
||||
out
|
||||
}
|
||||
|
||||
// this is perhaps overly cautious, but oh well
|
||||
private def readTruncated(byteStream: InputStream): Option[(Array[Byte], Boolean)] = {
|
||||
borrow(byteStream) { is =>
|
||||
borrow(new ByteArrayOutputStream(ErrorBodyTruncateLen)) { os =>
|
||||
var count = 0
|
||||
var b = is.read()
|
||||
var truncated = false
|
||||
while (!truncated && b >= 0) {
|
||||
if (count >= ErrorBodyTruncateLen) {
|
||||
truncated = true
|
||||
} else {
|
||||
os.write(b)
|
||||
count += 1
|
||||
b = is.read()
|
||||
}
|
||||
}
|
||||
if (count > 0) {
|
||||
Some((os.toByteArray, truncated))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Supplements the IOException emitted on a bad status code by our inherited validatePutStatusCode(...)
|
||||
* method with any message that might be present in an error response body.
|
||||
*
|
||||
* after calling this method, the object given as the response parameter must be reliably closed.
|
||||
*/
|
||||
private def validatePutStatusCode(dest: URL, response: Response): Unit = {
|
||||
try {
|
||||
validatePutStatusCode(dest, response.code(), response.message())
|
||||
} catch {
|
||||
case ioe: IOException => {
|
||||
val mbBodyMessage = {
|
||||
for {
|
||||
body <- Option(response.body())
|
||||
is <- Option(body.byteStream)
|
||||
(bytes, truncated) <- readTruncated(is)
|
||||
charset <- Option(body.contentType()).map(_.charset(DefaultErrorCharset)) orElse Some(
|
||||
DefaultErrorCharset
|
||||
)
|
||||
} yield {
|
||||
val raw = new String(bytes, charset)
|
||||
if (truncated) raw + "..." else raw
|
||||
}
|
||||
}
|
||||
|
||||
mbBodyMessage match {
|
||||
case Some(bodyMessage) => { // reconstruct the IOException
|
||||
val newMessage = ioe.getMessage() + s"; Response Body: ${bodyMessage}"
|
||||
val reconstructed = new IOException(newMessage, ioe.getCause())
|
||||
reconstructed.setStackTrace(ioe.getStackTrace())
|
||||
throw reconstructed
|
||||
}
|
||||
case None => {
|
||||
throw ioe
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
object GigahorseUrlHandler {
|
||||
// This is requires to access the constructor of URLInfo.
|
||||
private[sbt] class SbtUrlInfo(
|
||||
available: Boolean,
|
||||
contentLength: Long,
|
||||
lastModified: Long,
|
||||
bodyCharset: String
|
||||
) extends URLInfo(available, contentLength, lastModified, bodyCharset) {
|
||||
def this(available: Boolean, contentLength: Long, lastModified: Long) = {
|
||||
this(available, contentLength, lastModified, null)
|
||||
}
|
||||
}
|
||||
|
||||
private val EmptyBuffer: Array[Byte] = new Array[Byte](0)
|
||||
|
||||
private def checkStatusCode(url: URL, response: Response): Boolean =
|
||||
response.code() match {
|
||||
case 200 => true
|
||||
case 204 if "HEAD" == response.request().method() => true
|
||||
case status =>
|
||||
Message.debug("HTTP response status: " + status + " url=" + url)
|
||||
if (status == 407 /* PROXY_AUTHENTICATION_REQUIRED */ ) {
|
||||
Message.warn("Your proxy requires authentication.")
|
||||
} else if (status == 401) {
|
||||
Message.warn(
|
||||
"CLIENT ERROR: 401 Unauthorized. Check your resolvers username and password."
|
||||
)
|
||||
} else if (String.valueOf(status).startsWith("4")) {
|
||||
Message.verbose("CLIENT ERROR: " + response.message() + " url=" + url)
|
||||
} else if (String.valueOf(status).startsWith("5")) {
|
||||
Message.error("SERVER ERROR: " + response.message() + " url=" + url)
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
private def lastModifiedTimestamp(response: Response): Long = {
|
||||
val lastModifiedDate =
|
||||
Option(response.headers().get("Last-Modified")).flatMap { headerValue =>
|
||||
Option(HttpDate.parse(headerValue))
|
||||
}
|
||||
|
||||
lastModifiedDate.map(_.getTime).getOrElse(0)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -37,9 +37,9 @@ private[sbt] case class SbtChainResolver(
|
|||
override def equals(o: Any): Boolean = o match {
|
||||
case o: SbtChainResolver =>
|
||||
this.name == o.name &&
|
||||
this.resolvers == o.resolvers &&
|
||||
this.settings == o.settings &&
|
||||
this.updateOptions == o.updateOptions
|
||||
this.resolvers == o.resolvers &&
|
||||
this.settings == o.settings &&
|
||||
this.updateOptions == o.updateOptions
|
||||
case _ => false
|
||||
}
|
||||
|
||||
|
|
@ -234,7 +234,7 @@ private[sbt] case class SbtChainResolver(
|
|||
val (module, resolver) = h
|
||||
Message.info(
|
||||
s"Out of ${sortedRevisions.size} candidates we found for ${module.getId} in ${resolvers
|
||||
.mkString(" and ")}, we are choosing ${resolver}."
|
||||
.mkString(" and ")}, we are choosing ${resolver}."
|
||||
)
|
||||
})
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@ package sbt
|
|||
package librarymanagement
|
||||
package ivy
|
||||
|
||||
import okhttp3.OkHttpClient
|
||||
import sbt.internal.librarymanagement._
|
||||
import sbt.util.Logger
|
||||
|
||||
|
|
@ -28,8 +27,5 @@ class IvyDependencyResolution private[sbt] (val ivySbt: IvySbt)
|
|||
|
||||
object IvyDependencyResolution {
|
||||
def apply(ivyConfiguration: IvyConfiguration): DependencyResolution =
|
||||
apply(ivyConfiguration, CustomHttp.defaultHttpClient)
|
||||
|
||||
def apply(ivyConfiguration: IvyConfiguration, http: OkHttpClient): DependencyResolution =
|
||||
DependencyResolution(new IvyDependencyResolution(new IvySbt(ivyConfiguration, http)))
|
||||
DependencyResolution(new IvyDependencyResolution(new IvySbt(ivyConfiguration)))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@ package sbt
|
|||
package librarymanagement
|
||||
package ivy
|
||||
|
||||
import okhttp3.OkHttpClient
|
||||
import sbt.internal.librarymanagement._
|
||||
import sbt.util.Logger
|
||||
import java.io.File
|
||||
|
|
@ -34,8 +33,5 @@ class IvyPublisher private[sbt] (val ivySbt: IvySbt) extends PublisherInterface
|
|||
|
||||
object IvyPublisher {
|
||||
def apply(ivyConfiguration: IvyConfiguration): Publisher =
|
||||
apply(ivyConfiguration, CustomHttp.defaultHttpClient)
|
||||
|
||||
def apply(ivyConfiguration: IvyConfiguration, http: OkHttpClient): Publisher =
|
||||
Publisher(new IvyPublisher(new IvySbt(ivyConfiguration, http)))
|
||||
Publisher(new IvyPublisher(new IvySbt(ivyConfiguration)))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -79,12 +79,12 @@ final class UpdateOptions private[sbt] (
|
|||
override def equals(o: Any): Boolean = o match {
|
||||
case o: UpdateOptions =>
|
||||
this.circularDependencyLevel == o.circularDependencyLevel &&
|
||||
this.interProjectFirst == o.interProjectFirst &&
|
||||
this.latestSnapshots == o.latestSnapshots &&
|
||||
this.cachedResolution == o.cachedResolution &&
|
||||
this.gigahorse == o.gigahorse &&
|
||||
this.resolverConverter == o.resolverConverter &&
|
||||
this.moduleResolvers == o.moduleResolvers
|
||||
this.interProjectFirst == o.interProjectFirst &&
|
||||
this.latestSnapshots == o.latestSnapshots &&
|
||||
this.cachedResolution == o.cachedResolution &&
|
||||
this.gigahorse == o.gigahorse &&
|
||||
this.resolverConverter == o.resolverConverter &&
|
||||
this.moduleResolvers == o.moduleResolvers
|
||||
case _ => false
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -40,6 +40,7 @@ trait BaseIvySpecification extends AbstractEngineSpec {
|
|||
scalaFullVersion: Option[String],
|
||||
uo: UpdateOptions = UpdateOptions(),
|
||||
overrideScalaVersion: Boolean = true,
|
||||
appendSbtCrossVersion: Boolean = false,
|
||||
platform: Option[String] = None,
|
||||
): IvySbt#Module = {
|
||||
val scalaModuleInfo = scalaFullVersion map { fv =>
|
||||
|
|
@ -59,7 +60,7 @@ trait BaseIvySpecification extends AbstractEngineSpec {
|
|||
.withConfigurations(configurations)
|
||||
.withScalaModuleInfo(scalaModuleInfo)
|
||||
val ivySbt = new IvySbt(mkIvyConfiguration(uo))
|
||||
new ivySbt.Module(moduleSetting)
|
||||
new ivySbt.Module(moduleSetting, appendSbtCrossVersion)
|
||||
}
|
||||
|
||||
def resolvers: Vector[Resolver] = Vector(Resolver.mavenCentral)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
package sbt.internal.librarymanagement
|
||||
|
||||
import java.net.URL
|
||||
import java.net.URI
|
||||
import java.io.File
|
||||
|
||||
import sbt.librarymanagement._
|
||||
|
|
@ -43,7 +43,7 @@ object DMSerializationSpec extends BasicTestSuite {
|
|||
}
|
||||
|
||||
test("""Artifact("foo", url("http://example.com/")) should roundtrip""") {
|
||||
roundtrip(Artifact("foo", new URL("http://example.com/")))
|
||||
roundtrip(Artifact("foo", new URI("http://example.com/").toURL))
|
||||
}
|
||||
|
||||
test("""Artifact("foo").extra(("key", "value")) should roundtrip""") {
|
||||
|
|
@ -75,7 +75,7 @@ object DMSerializationSpec extends BasicTestSuite {
|
|||
new File("./foo"),
|
||||
Vector(configurationReportExample),
|
||||
UpdateStats(0, 0, 0, false),
|
||||
Map(new File("./foo") -> 0)
|
||||
Map("./foo" -> 0)
|
||||
)
|
||||
lazy val configurationReportExample =
|
||||
ConfigurationReport(
|
||||
|
|
|
|||
|
|
@ -98,6 +98,23 @@ object EvictionErrorSpec extends BaseIvySpecification {
|
|||
assert(EvictionError(report, m, overrideRules).incompatibleEvictions.isEmpty)
|
||||
}
|
||||
|
||||
test("it should selectively allow opt-out from the error despite assumed scheme") {
|
||||
val deps = Vector(`scala2.12.17`, `akkaActor2.6.0`, `swagger-akka-http1.4.0`)
|
||||
val m = module(defaultModuleId, deps, Some("2.12.17"))
|
||||
val report = ivyUpdate(m)
|
||||
val overrideRules = List("org.scala-lang.modules" %% "scala-java8-compat" % "always")
|
||||
assert(
|
||||
EvictionError(
|
||||
report = report,
|
||||
module = m,
|
||||
schemes = overrideRules,
|
||||
assumedVersionScheme = "early-semver",
|
||||
assumedVersionSchemeJava = "always",
|
||||
assumedEvictionErrorLevel = Level.Error,
|
||||
).assumedIncompatibleEvictions.isEmpty
|
||||
)
|
||||
}
|
||||
|
||||
// older Akka was on pvp
|
||||
def oldAkkaPvp = List("com.typesafe.akka" % "*" % "pvp")
|
||||
|
||||
|
|
@ -109,8 +126,14 @@ object EvictionErrorSpec extends BaseIvySpecification {
|
|||
ModuleID("com.typesafe.akka", "akka-actor", "2.3.0").withConfigurations(
|
||||
Some("compile")
|
||||
) cross CrossVersion.binary
|
||||
lazy val `akkaActor2.6.0` =
|
||||
ModuleID("com.typesafe.akka", "akka-actor", "2.6.0").withConfigurations(
|
||||
Some("compile")
|
||||
) cross CrossVersion.binary
|
||||
lazy val `scala2.10.4` =
|
||||
ModuleID("org.scala-lang", "scala-library", "2.10.4").withConfigurations(Some("compile"))
|
||||
lazy val `scala2.12.17` =
|
||||
ModuleID("org.scala-lang", "scala-library", "2.12.17").withConfigurations(Some("compile"))
|
||||
lazy val `scala2.13.3` =
|
||||
ModuleID("org.scala-lang", "scala-library", "2.13.3").withConfigurations(Some("compile"))
|
||||
lazy val `bananaSesame0.4` =
|
||||
|
|
@ -131,6 +154,9 @@ object EvictionErrorSpec extends BaseIvySpecification {
|
|||
("org.typelevel" %% "cats-parse" % "0.1.0").withConfigurations(Some("compile"))
|
||||
lazy val `cats-parse0.2.0` =
|
||||
("org.typelevel" %% "cats-parse" % "0.2.0").withConfigurations(Some("compile"))
|
||||
lazy val `swagger-akka-http1.4.0` =
|
||||
("com.github.swagger-akka-http" %% "swagger-akka-http" % "1.4.0")
|
||||
.withConfigurations(Some("compile"))
|
||||
|
||||
def dummyScalaModuleInfo(v: String): ScalaModuleInfo =
|
||||
ScalaModuleInfo(
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ object FakeResolverSpecification extends BaseIvySpecification {
|
|||
val allFiles = getAllFiles(report)
|
||||
|
||||
assert(report.allModules.length == 1)
|
||||
assert(report.allModuleReports.length == 1)
|
||||
assert(report.configurations.length == 3)
|
||||
assert(allFiles.toSet.size == 1)
|
||||
assert(allFiles(1).getName == "artifact1-0.0.1-SNAPSHOT.jar")
|
||||
|
|
@ -34,6 +35,7 @@ object FakeResolverSpecification extends BaseIvySpecification {
|
|||
val allFiles = getAllFiles(report).toSet
|
||||
|
||||
assert(report.allModules.length == 1)
|
||||
assert(report.allModuleReports.length == 1)
|
||||
assert(report.configurations.length == 3)
|
||||
assert(allFiles.toSet.size == 2)
|
||||
assert(allFiles.map(_.getName) == Set("artifact1-1.0.0.jar", "artifact2-1.0.0.txt"))
|
||||
|
|
|
|||
|
|
@ -35,6 +35,7 @@ object FrozenModeSpec extends BaseIvySpecification {
|
|||
val onlineResolution = update(toResolve, onlineConf)
|
||||
assert(onlineResolution.isRight)
|
||||
val numberResolved = onlineResolution.right.get.allModules.size
|
||||
val numberReportsResolved = onlineResolution.right.get.allModuleReports.size
|
||||
|
||||
cleanIvyCache()
|
||||
val singleFrozenResolution = update(toResolve, frozenConf)
|
||||
|
|
@ -43,6 +44,10 @@ object FrozenModeSpec extends BaseIvySpecification {
|
|||
singleFrozenResolution.right.get.allModules.size == 1,
|
||||
s"The number of explicit modules in frozen mode should 1"
|
||||
)
|
||||
assert(
|
||||
singleFrozenResolution.right.get.allModuleReports.size == 1,
|
||||
s"The number of explicit module reports in frozen mode should 1"
|
||||
)
|
||||
|
||||
cleanIvyCache()
|
||||
// This relies on the fact that stoml has 5 transitive dependencies
|
||||
|
|
@ -53,5 +58,9 @@ object FrozenModeSpec extends BaseIvySpecification {
|
|||
frozenResolution.right.get.allModules.size == numberResolved,
|
||||
s"The number of explicit modules in frozen mode should be equal than $numberResolved"
|
||||
)
|
||||
assert(
|
||||
frozenResolution.right.get.allModuleReports.size == numberReportsResolved,
|
||||
s"The number of explicit module reports in frozen mode should be equal than $numberReportsResolved"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -68,6 +68,10 @@ object InclExclSpec extends BaseIvySpecification {
|
|||
!report.allModules.exists(_.name.contains("lift-json")),
|
||||
"lift-json has not been excluded."
|
||||
)
|
||||
assert(
|
||||
!report.allModuleReports.exists(_.module.name.contains("lift-json")),
|
||||
"lift-json has not been excluded."
|
||||
)
|
||||
}
|
||||
|
||||
def testScalaLibraryIsMissing(report: UpdateReport): Unit = {
|
||||
|
|
@ -75,6 +79,10 @@ object InclExclSpec extends BaseIvySpecification {
|
|||
!report.allModules.exists(_.name.contains("scala-library")),
|
||||
"scala-library has not been excluded."
|
||||
)
|
||||
assert(
|
||||
!report.allModuleReports.exists(_.module.name.contains("scala-library")),
|
||||
"scala-library has not been excluded."
|
||||
)
|
||||
}
|
||||
|
||||
def testScalahostIsMissing(report: UpdateReport): Unit = {
|
||||
|
|
@ -82,5 +90,9 @@ object InclExclSpec extends BaseIvySpecification {
|
|||
!report.allModules.exists(_.name.contains("scalahost")),
|
||||
"scalahost has not been excluded."
|
||||
)
|
||||
assert(
|
||||
!report.allModuleReports.exists(_.module.name.contains("scalahost")),
|
||||
"scalahost has not been excluded."
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,38 @@
|
|||
package sbt.internal.librarymanagement
|
||||
|
||||
import sbt.internal.librarymanagement.mavenint.PomExtraDependencyAttributes.{
|
||||
SbtVersionKey,
|
||||
ScalaVersionKey
|
||||
}
|
||||
import sbt.librarymanagement.{ CrossVersion, ModuleDescriptorConfiguration }
|
||||
|
||||
object IvyModuleSpec extends BaseIvySpecification {
|
||||
|
||||
test("The Scala binary version of a Scala module should be appended to its name") {
|
||||
val m = module(
|
||||
defaultModuleId.withCrossVersion(CrossVersion.Binary()),
|
||||
Vector.empty,
|
||||
Some("2.13.10")
|
||||
)
|
||||
m.moduleSettings match {
|
||||
case configuration: ModuleDescriptorConfiguration =>
|
||||
assert(configuration.module.name == "foo_2.13")
|
||||
case _ => fail()
|
||||
}
|
||||
}
|
||||
|
||||
test("The sbt cross-version should be appended to the name of an sbt plugin") {
|
||||
val m = module(
|
||||
defaultModuleId.extra(SbtVersionKey -> "1.0", ScalaVersionKey -> "2.12"),
|
||||
Vector.empty,
|
||||
Some("2.12.17"),
|
||||
appendSbtCrossVersion = true
|
||||
)
|
||||
m.moduleSettings match {
|
||||
case configuration: ModuleDescriptorConfiguration =>
|
||||
assert(configuration.module.name == "foo_2.12_1.0")
|
||||
case _ => fail()
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,13 +1,13 @@
|
|||
package sbttest
|
||||
|
||||
import java.net.URL
|
||||
import java.net.URI
|
||||
import sbt.librarymanagement._
|
||||
import sbt.librarymanagement.syntax._
|
||||
import verify.BasicTestSuite
|
||||
|
||||
class ResolverSpec extends BasicTestSuite {
|
||||
test("Resolver.url") {
|
||||
Resolver.url("Test Repo", new URL("http://example.com/"))(Resolver.ivyStylePatterns)
|
||||
Resolver.url("Test Repo", new URI("http://example.com/").toURL)(Resolver.ivyStylePatterns)
|
||||
()
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -3,14 +3,14 @@ import Keys._
|
|||
import sbt.contraband.ContrabandPlugin.autoImport._
|
||||
|
||||
object Dependencies {
|
||||
val scala212 = "2.12.15"
|
||||
val scala213 = "2.13.6"
|
||||
val scala3 = "3.2.1"
|
||||
val scala212 = "2.12.18"
|
||||
val scala213 = "2.13.12"
|
||||
val scala3 = "3.3.3"
|
||||
|
||||
def nightlyVersion: Option[String] =
|
||||
sys.env.get("BUILD_VERSION") orElse sys.props.get("sbt.build.version")
|
||||
|
||||
private val ioVersion = nightlyVersion.getOrElse("1.7.0")
|
||||
private val ioVersion = nightlyVersion.getOrElse("1.10.0")
|
||||
private val utilVersion = nightlyVersion.getOrElse("2.0.0-alpha5")
|
||||
|
||||
private val sbtIO = "org.scala-sbt" %% "io" % ioVersion
|
||||
|
|
@ -44,25 +44,24 @@ object Dependencies {
|
|||
def addSbtUtilCache(p: Project): Project = addSbtModule(p, sbtUtilPath, "utilCache", utilCache)
|
||||
|
||||
val launcherInterface = "org.scala-sbt" % "launcher-interface" % "1.0.0"
|
||||
val ivy = "org.scala-sbt.ivy" % "ivy" % "2.3.0-sbt-fbc4f586aeeb1591710b14eb4f41b94880dcd745"
|
||||
val ivy = "org.scala-sbt.ivy" % "ivy" % "2.3.0-sbt-396a783bba347016e7fe30dacc60d355be607fe2"
|
||||
|
||||
val sbtV = "1.0"
|
||||
val scalaV = "2.12"
|
||||
|
||||
val jsch = "com.jcraft" % "jsch" % "0.1.54" intransitive ()
|
||||
val jsch = "com.github.mwiede" % "jsch" % "0.2.17" intransitive ()
|
||||
val scalaReflect = Def.setting { "org.scala-lang" % "scala-reflect" % scalaVersion.value }
|
||||
val scalaCompiler = Def.setting { "org.scala-lang" % "scala-compiler" % scalaVersion.value }
|
||||
val scalaXml = "org.scala-lang.modules" %% "scala-xml" % "2.1.0"
|
||||
val scalaTest = "org.scalatest" %% "scalatest" % "3.2.10"
|
||||
val scalaVerify = "com.eed3si9n.verify" %% "verify" % "1.0.0"
|
||||
val scalaCheck = "org.scalacheck" %% "scalacheck" % "1.15.3"
|
||||
val sjsonNewVersion = "0.13.0"
|
||||
val sjsonNewVersion = "0.14.0-M1"
|
||||
val sjsonnew = Def.setting {
|
||||
"com.eed3si9n" %% "sjson-new-core" % sjsonNewVersion
|
||||
}
|
||||
val sjsonnewScalaJson = Def.setting {
|
||||
"com.eed3si9n" %% "sjson-new-scalajson" % sjsonNewVersion
|
||||
}
|
||||
val gigahorseOkhttp = "com.eed3si9n" %% "gigahorse-okhttp" % "0.6.0"
|
||||
val okhttpUrlconnection = "com.squareup.okhttp3" % "okhttp-urlconnection" % "3.7.0"
|
||||
val gigahorseApacheHttp = "com.eed3si9n" %% "gigahorse-apache-http" % "0.7.0"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -31,8 +31,8 @@ object HouseRulesPlugin extends AutoPlugin {
|
|||
scalacOptions += "-Ywarn-numeric-widen",
|
||||
scalacOptions += "-Ywarn-value-discard",
|
||||
scalacOptions ++= "-Ywarn-unused-import".ifScala(v => 11 <= v && v <= 12).value.toList
|
||||
) ++ Seq(Compile, Test).flatMap(
|
||||
c => (c / console / scalacOptions) --= Seq("-Ywarn-unused-import", "-Xlint")
|
||||
) ++ Seq(Compile, Test).flatMap(c =>
|
||||
(c / console / scalacOptions) --= Seq("-Ywarn-unused-import", "-Xlint")
|
||||
)
|
||||
|
||||
private def scalaPartV = Def setting (CrossVersion partialVersion scalaVersion.value)
|
||||
|
|
|
|||
|
|
@ -30,7 +30,9 @@ object Util {
|
|||
val f = dir / "xsbt.version.properties"
|
||||
// TODO: replace lastModified() with sbt.io.IO.getModifiedTimeOrZero(), once the build
|
||||
// has been upgraded to a version of sbt that includes that call.
|
||||
if (!f.exists || f.lastModified < lastCompilationTime(analysis) || !containsVersion(f, version)) {
|
||||
if (
|
||||
!f.exists || f.lastModified < lastCompilationTime(analysis) || !containsVersion(f, version)
|
||||
) {
|
||||
s.log.info("Writing version information to " + f + " :\n" + content)
|
||||
IO.write(f, content)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
sbt.version=1.8.0
|
||||
sbt.version=1.9.3
|
||||
|
|
|
|||
|
|
@ -3,6 +3,5 @@ addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.1.2")
|
|||
addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1")
|
||||
addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.0.2")
|
||||
addSbtPlugin("org.scala-sbt" % "sbt-contraband" % "0.5.3")
|
||||
addSbtPlugin("com.lightbend" % "sbt-whitesource" % "0.1.14")
|
||||
|
||||
scalacOptions += "-language:postfixOps"
|
||||
|
|
|
|||
Loading…
Reference in New Issue