mirror of https://github.com/sbt/sbt.git
fix array conversions warnings
method copyArrayToImmutableIndexedSeq in class LowPriorityImplicits2 is deprecated since 2.13.0: implicit conversions from Array to immutable.IndexedSeq are implemented by copying; use `toIndexedSeq` explicitly if you want to copy, or use the more efficient non-copying ArraySeq.unsafeWrapArray
This commit is contained in:
parent
099087f486
commit
e8b0cd08e6
|
|
@ -84,7 +84,7 @@ object SemanticSelector {
|
|||
def apply(selector: String): SemanticSelector = {
|
||||
val orChunkTokens = selector.split("\\s+\\|\\|\\s+").map(_.trim)
|
||||
val orChunks = orChunkTokens.map { chunk => sbt.internal.librarymanagement.SemSelAndChunk(chunk) }
|
||||
SemanticSelector(orChunks)
|
||||
SemanticSelector(scala.collection.immutable.ArraySeq.unsafeWrapArray(orChunks))
|
||||
}
|
||||
def apply(selectors: Seq[sbt.internal.librarymanagement.SemSelAndChunk]): SemanticSelector = new SemanticSelector(selectors)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -87,7 +87,7 @@
|
|||
"def apply(selector: String): SemanticSelector = {",
|
||||
" val orChunkTokens = selector.split(\"\\\\s+\\\\|\\\\|\\\\s+\").map(_.trim)",
|
||||
" val orChunks = orChunkTokens.map { chunk => sbt.internal.librarymanagement.SemSelAndChunk(chunk) }",
|
||||
" SemanticSelector(orChunks)",
|
||||
" SemanticSelector(scala.collection.immutable.ArraySeq.unsafeWrapArray(orChunks))",
|
||||
"}"
|
||||
]
|
||||
},
|
||||
|
|
|
|||
|
|
@ -8,7 +8,8 @@ import java.util.Locale
|
|||
|
||||
private[librarymanagement] abstract class SemSelAndChunkFunctions {
|
||||
protected def parse(andClauseToken: String): SemSelAndChunk = {
|
||||
val comparatorTokens = andClauseToken.split("\\s+")
|
||||
val comparatorTokens =
|
||||
scala.collection.immutable.ArraySeq.unsafeWrapArray(andClauseToken.split("\\s+"))
|
||||
val hyphenIndex = comparatorTokens.indexWhere(_ == "-")
|
||||
val comparators = if (hyphenIndex == -1) {
|
||||
comparatorTokens.map(SemComparator.apply)
|
||||
|
|
|
|||
|
|
@ -29,6 +29,7 @@ import sbt.internal.librarymanagement.mavenint.{
|
|||
SbtPomExtraProperties
|
||||
}
|
||||
import sbt.io.Hash
|
||||
import scala.collection.immutable.ArraySeq
|
||||
|
||||
// @deprecated("We now use an Aether-based pom parser.", "0.13.8")
|
||||
final class CustomPomParser(
|
||||
|
|
@ -311,7 +312,7 @@ object CustomPomParser {
|
|||
) dmd.addExtraAttributeNamespace(key, value)
|
||||
IvySbt.addExtraNamespace(dmd)
|
||||
|
||||
val withExtra = md.getDependencies map { dd =>
|
||||
val withExtra = ArraySeq.unsafeWrapArray(md.getDependencies) map { dd =>
|
||||
addExtra(dd, dependencyExtra)
|
||||
}
|
||||
val withVersionRangeMod: Seq[DependencyDescriptor] =
|
||||
|
|
|
|||
|
|
@ -36,6 +36,7 @@ import org.apache.ivy.util.extendable.ExtendableItem
|
|||
import org.apache.ivy.util.url._
|
||||
import scala.xml.NodeSeq
|
||||
import scala.collection.mutable
|
||||
import scala.collection.immutable.ArraySeq
|
||||
import scala.util.{ Success, Failure }
|
||||
import sbt.util._
|
||||
import sbt.librarymanagement.{ ModuleDescriptorConfiguration => InlineConfiguration, _ }
|
||||
|
|
@ -891,7 +892,7 @@ private[sbt] object IvySbt {
|
|||
|
||||
def inconsistentDuplicateWarning(moduleID: DefaultModuleDescriptor): List[String] = {
|
||||
import IvyRetrieve.toModuleID
|
||||
val dds = moduleID.getDependencies
|
||||
val dds = ArraySeq.unsafeWrapArray(moduleID.getDependencies)
|
||||
val deps = dds flatMap { dd =>
|
||||
val module = toModuleID(dd.getDependencyRevisionId)
|
||||
dd.getModuleConfigurations map (c => module.withConfigurations(Some(c)))
|
||||
|
|
|
|||
|
|
@ -320,8 +320,9 @@ object IvyActions {
|
|||
|
||||
val resolveReport = ivyInstance.resolve(moduleDescriptor, resolveOptions)
|
||||
if (resolveReport.hasError && !missingOk) {
|
||||
import scala.jdk.CollectionConverters._
|
||||
// If strict error, collect report information and generated UnresolvedWarning
|
||||
val messages = resolveReport.getAllProblemMessages.toArray.map(_.toString).distinct
|
||||
val messages = resolveReport.getAllProblemMessages.asScala.toSeq.map(_.toString).distinct
|
||||
val failedPaths = resolveReport.getUnresolvedDependencies.map { node =>
|
||||
val moduleID = IvyRetrieve.toModuleID(node.getId)
|
||||
val path = IvyRetrieve
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ package sbt.internal.librarymanagement
|
|||
import java.io.File
|
||||
import java.{ util => ju }
|
||||
import collection.mutable
|
||||
import collection.immutable.ArraySeq
|
||||
import org.apache.ivy.core.{ module, report, resolve }
|
||||
import module.descriptor.{ Artifact => IvyArtifact, License => IvyLicense }
|
||||
import module.id.{ ModuleRevisionId, ModuleId => IvyModuleId }
|
||||
|
|
@ -187,7 +188,9 @@ object IvyRetrieve {
|
|||
case _ => Vector.empty
|
||||
}
|
||||
val callers = dep.getCallers(confReport.getConfiguration).toVector map { toCaller }
|
||||
val (resolved, missing) = artifacts(confReport getDownloadReports revId)
|
||||
val (resolved, missing) = artifacts(
|
||||
ArraySeq.unsafeWrapArray(confReport.getDownloadReports(revId))
|
||||
)
|
||||
|
||||
ModuleReport(
|
||||
moduleId,
|
||||
|
|
@ -212,7 +215,7 @@ object IvyRetrieve {
|
|||
}
|
||||
|
||||
def evicted(confReport: ConfigurationResolveReport): Seq[ModuleID] =
|
||||
confReport.getEvictedNodes.map(node => toModuleID(node.getId))
|
||||
ArraySeq.unsafeWrapArray(confReport.getEvictedNodes).map(node => toModuleID(node.getId))
|
||||
|
||||
def toModuleID(revID: ModuleRevisionId): ModuleID =
|
||||
ModuleID(revID.getOrganisation, revID.getName, revID.getRevision)
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ import sbt.librarymanagement._
|
|||
import Resolver._
|
||||
import mavenint.PomExtraDependencyAttributes
|
||||
|
||||
import scala.collection.immutable.ArraySeq
|
||||
// Node needs to be renamed to XNode because the task subproject contains a Node type that will shadow
|
||||
// scala.xml.Node when generating aggregated API documentation
|
||||
import scala.xml.{ Elem, Node => XNode, NodeSeq, PrettyPrinter, PrefixedAttribute }
|
||||
|
|
@ -147,7 +148,7 @@ class MakePom(val log: Logger) {
|
|||
{
|
||||
val deps = depsInConfs(module, configurations)
|
||||
makeProperties(module, deps) ++
|
||||
makeDependencies(deps, includeTypes, module.getAllExcludeRules)
|
||||
makeDependencies(deps, includeTypes, ArraySeq.unsafeWrapArray(module.getAllExcludeRules))
|
||||
}
|
||||
{makeRepositories(ivy.getSettings, allRepositories, filterRepositories)}
|
||||
</project>)
|
||||
|
|
@ -442,7 +443,9 @@ class MakePom(val log: Logger) {
|
|||
def exclusions(dependency: DependencyDescriptor): NodeSeq = exclusions(dependency, Nil)
|
||||
|
||||
def exclusions(dependency: DependencyDescriptor, excludes: Seq[ExcludeRule]): NodeSeq = {
|
||||
val excl = dependency.getExcludeRules(dependency.getModuleConfigurations) ++ excludes
|
||||
val excl = ArraySeq.unsafeWrapArray(
|
||||
dependency.getExcludeRules(dependency.getModuleConfigurations)
|
||||
) ++ excludes
|
||||
val (warns, excls) = IvyUtil.separate(excl.map(makeExclusion))
|
||||
if (warns.nonEmpty) log.warn(warns.mkString(IO.Newline))
|
||||
if (excls.nonEmpty) <exclusions>{
|
||||
|
|
@ -500,8 +503,10 @@ class MakePom(val log: Logger) {
|
|||
r match { case c: ChainResolver => flatten(castResolvers(c.getResolvers)); case _ => r :: Nil }
|
||||
|
||||
// cast the contents of a pre-generics collection
|
||||
private def castResolvers(s: java.util.Collection[_]): Seq[DependencyResolver] =
|
||||
s.toArray.map(_.asInstanceOf[DependencyResolver])
|
||||
private def castResolvers(s: java.util.Collection[_]): Seq[DependencyResolver] = {
|
||||
import scala.jdk.CollectionConverters._
|
||||
s.asScala.toSeq.map(_.asInstanceOf[DependencyResolver])
|
||||
}
|
||||
|
||||
def toID(name: String) = checkID(name.filter(isValidIDCharacter).mkString, name)
|
||||
def isValidIDCharacter(c: Char) = !"""\/:"<>|?*""".contains(c)
|
||||
|
|
@ -535,6 +540,6 @@ class MakePom(val log: Logger) {
|
|||
else // TODO: translate the dependency to contain only configurations to keep
|
||||
Some(dependency)
|
||||
}
|
||||
module.getDependencies flatMap translate
|
||||
ArraySeq.unsafeWrapArray(module.getDependencies) flatMap translate
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -159,9 +159,8 @@ private[sbt] class CachedResolutionResolveCache {
|
|||
}
|
||||
def extractOverrides(md0: ModuleDescriptor): Vector[IvyOverride] = {
|
||||
import scala.jdk.CollectionConverters._
|
||||
md0.getAllDependencyDescriptorMediators.getAllRules.asScala.toSeq.toVector sortBy {
|
||||
case (k, _) =>
|
||||
k.toString
|
||||
md0.getAllDependencyDescriptorMediators.getAllRules.asScala.toVector sortBy { case (k, _) =>
|
||||
k.toString
|
||||
} collect { case (k: MapMatcher, v: OverrideDependencyDescriptorMediator) =>
|
||||
val attr: Map[Any, Any] = k.getAttributes.asScala.toMap
|
||||
val module = IvyModuleId.newInstance(
|
||||
|
|
@ -398,13 +397,14 @@ private[sbt] trait CachedResolutionResolveEngine extends ResolveEngine {
|
|||
doWorkUsingIvy(md)
|
||||
}
|
||||
def doWorkUsingIvy(md: ModuleDescriptor): Either[ResolveException, UpdateReport] = {
|
||||
import scala.jdk.CollectionConverters._
|
||||
val options1 = new ResolveOptions(options0)
|
||||
val rr = withIvy(log) { ivy =>
|
||||
ivy.resolve(md, options1)
|
||||
}
|
||||
if (!rr.hasError || missingOk) Right(IvyRetrieve.updateReport(rr, cachedDescriptor))
|
||||
else {
|
||||
val messages = rr.getAllProblemMessages.toArray.map(_.toString).distinct
|
||||
val messages = rr.getAllProblemMessages.asScala.toSeq.map(_.toString).distinct
|
||||
val failedPaths = ListMap(rr.getUnresolvedDependencies map { node =>
|
||||
val m = IvyRetrieve.toModuleID(node.getId)
|
||||
val path = IvyRetrieve.findPath(node, md.getModuleRevisionId) map { x =>
|
||||
|
|
@ -781,7 +781,7 @@ private[sbt] trait CachedResolutionResolveEngine extends ResolveEngine {
|
|||
}
|
||||
val merged = (modules groupBy { m =>
|
||||
(m.module.organization, m.module.name, m.module.revision)
|
||||
}).toSeq.toVector flatMap { case (_, xs) =>
|
||||
}).toVector flatMap { case (_, xs) =>
|
||||
if (xs.size < 2) xs
|
||||
else Vector(mergeModuleReports(xs))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
package sbt.internal.librarymanagement
|
||||
package ivyint
|
||||
|
||||
import scala.collection.immutable.ArraySeq
|
||||
import org.apache.ivy.core
|
||||
import core.module.descriptor.{ DependencyArtifactDescriptor, DefaultDependencyArtifactDescriptor }
|
||||
import core.module.descriptor.DependencyDescriptor
|
||||
|
|
@ -117,7 +118,7 @@ private[sbt] final case class MergedDescriptors(a: DependencyDescriptor, b: Depe
|
|||
// See gh-1500, gh-2002
|
||||
aConfs match {
|
||||
case None | Some(Nil) | Some(List("*")) =>
|
||||
copyWithConfigurations(art, base.getModuleConfigurations)
|
||||
copyWithConfigurations(art, ArraySeq.unsafeWrapArray(base.getModuleConfigurations))
|
||||
case _ => art
|
||||
}
|
||||
}
|
||||
|
|
@ -132,7 +133,7 @@ private[sbt] final case class MergedDescriptors(a: DependencyDescriptor, b: Depe
|
|||
null,
|
||||
null
|
||||
)
|
||||
addConfigurations(dd, a.getModuleConfigurations)
|
||||
addConfigurations(dd, ArraySeq.unsafeWrapArray(a.getModuleConfigurations))
|
||||
// If the dependency descriptor is empty, then it means that it has been created from a POM file. In this case,
|
||||
// it is correct to create a seemingly non-existent dependency artifact.
|
||||
if (a.getAllDependencyArtifacts.isEmpty) Array(dd)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
package sbt.internal.librarymanagement
|
||||
package mavenint
|
||||
|
||||
import scala.collection.immutable.ArraySeq
|
||||
import java.util.Properties
|
||||
import java.util.regex.Pattern
|
||||
|
||||
|
|
@ -100,8 +101,9 @@ object PomExtraDependencyAttributes {
|
|||
}
|
||||
|
||||
/** parses the sequence of dependencies with extra attribute information, with one dependency per line */
|
||||
def readDependencyExtra(s: String): Seq[ModuleRevisionId] =
|
||||
def readDependencyExtra(s: String): Seq[ModuleRevisionId] = ArraySeq.unsafeWrapArray(
|
||||
LinesP.split(s).map(_.trim).filter(!_.isEmpty).map(ModuleRevisionId.decode)
|
||||
)
|
||||
|
||||
private[this] val LinesP = Pattern.compile("(?m)^")
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue