mirror of https://github.com/sbt/sbt.git
Merge remote-tracking branch 'upstream/0.13' into 0.13
This commit is contained in:
commit
d27c7f40c2
|
|
@ -10,12 +10,14 @@ sbt
|
|||
===
|
||||
|
||||
sbt is a build tool for Scala, Java, and more.
|
||||
|
||||
For general documentation, see http://www.scala-sbt.org/.
|
||||
|
||||
Issues and Pull Requests
|
||||
------------------------
|
||||
|
||||
Please read [CONTRIBUTING] carefully before opening a GitHub Issue.
|
||||
|
||||
The short version: try [StackOverflow] and [sbt-dev]. Don't open an Issue.
|
||||
|
||||
sbt 0.13
|
||||
|
|
|
|||
|
|
@ -51,7 +51,15 @@ object CustomPomParser {
|
|||
private[this] val TransformedHashKey = "e:sbtTransformHash"
|
||||
// A hash of the parameters transformation is based on.
|
||||
// If a descriptor has a different hash, we need to retransform it.
|
||||
private[this] val TransformHash: String = hash((unqualifiedKeys ++ JarPackagings).toSeq.sorted)
|
||||
private[this] def makeCoords(mrid: ModuleRevisionId): String = s"${mrid.getOrganisation}:${mrid.getName}:${mrid.getRevision}"
|
||||
|
||||
// We now include the ModuleID in a hash, to ensure that parent-pom transformations don't corrupt child poms.
|
||||
private[this] def MakeTransformHash(md: ModuleDescriptor): String = {
|
||||
val coords: String = makeCoords(md.getModuleRevisionId)
|
||||
|
||||
hash((unqualifiedKeys ++ JarPackagings ++ Set(coords)).toSeq.sorted)
|
||||
}
|
||||
|
||||
private[this] def hash(ss: Seq[String]): String = Hash.toHex(Hash(ss.flatMap(_ getBytes "UTF-8").toArray))
|
||||
|
||||
// Unfortunately, ModuleDescriptorParserRegistry is add-only and is a singleton instance.
|
||||
|
|
@ -65,11 +73,12 @@ object CustomPomParser {
|
|||
{
|
||||
val oldTransformedHashKey = "sbtTransformHash"
|
||||
val extraInfo = md.getExtraInfo
|
||||
val MyHash = MakeTransformHash(md)
|
||||
// sbt 0.13.1 used "sbtTransformHash" instead of "e:sbtTransformHash" until #1192 so read both
|
||||
Option(extraInfo).isDefined &&
|
||||
((Option(extraInfo get TransformedHashKey) orElse Option(extraInfo get oldTransformedHashKey)) match {
|
||||
case Some(TransformHash) => true
|
||||
case _ => false
|
||||
case Some(MyHash) => true
|
||||
case _ => false
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -95,10 +104,10 @@ object CustomPomParser {
|
|||
val mergeDuplicates = IvySbt.hasDuplicateDependencies(md.getDependencies)
|
||||
|
||||
val unqualify = toUnqualify(filtered)
|
||||
if (unqualify.isEmpty && extraDepAttributes.isEmpty && !convertArtifacts && !mergeDuplicates)
|
||||
md
|
||||
else
|
||||
addExtra(unqualify, extraDepAttributes, parser, md)
|
||||
|
||||
// Here we always add extra attributes. There's a scenario where parent-pom information corrupts child-poms with "e:" namespaced xml elements
|
||||
// and we have to force the every generated xml file to have the appropriate xml namespace
|
||||
addExtra(unqualify, extraDepAttributes, parser, md)
|
||||
}
|
||||
// The <properties> element of the pom is used to store additional metadata, such as for sbt plugins or for the base URL for API docs.
|
||||
// This is done because the pom XSD does not appear to allow extra metadata anywhere else.
|
||||
|
|
@ -185,7 +194,7 @@ object CustomPomParser {
|
|||
|
||||
for (l <- md.getLicenses) dmd.addLicense(l)
|
||||
for ((key, value) <- md.getExtraInfo.asInstanceOf[java.util.Map[String, String]].asScala) dmd.addExtraInfo(key, value)
|
||||
dmd.addExtraInfo(TransformedHashKey, TransformHash) // mark as transformed by this version, so we don't need to do it again
|
||||
dmd.addExtraInfo(TransformedHashKey, MakeTransformHash(md)) // mark as transformed by this version, so we don't need to do it again
|
||||
for ((key, value) <- md.getExtraAttributesNamespaces.asInstanceOf[java.util.Map[String, String]].asScala) dmd.addExtraAttributeNamespace(key, value)
|
||||
IvySbt.addExtraNamespace(dmd)
|
||||
|
||||
|
|
|
|||
|
|
@ -25,8 +25,34 @@ private[sbt] object JsonUtil {
|
|||
}
|
||||
def toLite(ur: UpdateReport): UpdateReportLite =
|
||||
UpdateReportLite(ur.configurations map { cr =>
|
||||
ConfigurationReportLite(cr.configuration, cr.details)
|
||||
ConfigurationReportLite(cr.configuration, cr.details map { oar =>
|
||||
new OrganizationArtifactReport(oar.organization, oar.name, oar.modules map { mr =>
|
||||
new ModuleReport(
|
||||
mr.module, mr.artifacts, mr.missingArtifacts, mr.status,
|
||||
mr.publicationDate, mr.resolver, mr.artifactResolver,
|
||||
mr.evicted, mr.evictedData, mr.evictedReason,
|
||||
mr.problem, mr.homepage, mr.extraAttributes,
|
||||
mr.isDefault, mr.branch, mr.configurations, mr.licenses,
|
||||
summarizeCallers(mr.callers))
|
||||
})
|
||||
})
|
||||
})
|
||||
// #1763/#2030. Caller takes up 97% of space, so we need to shrink it down,
|
||||
// but there are semantics associated with some of them.
|
||||
def summarizeCallers(callers: Seq[Caller]): Seq[Caller] =
|
||||
if (callers.isEmpty) callers
|
||||
else {
|
||||
// Use the first element to represent all callers
|
||||
val head = callers.head
|
||||
val caller = new Caller(
|
||||
head.caller, head.callerConfigurations, head.callerExtraAttributes,
|
||||
callers exists { _.isForceDependency },
|
||||
callers exists { _.isChangingDependency },
|
||||
callers exists { _.isTransitiveDependency },
|
||||
callers exists { _.isDirectlyForceDependency })
|
||||
Seq(caller)
|
||||
}
|
||||
|
||||
def fromLite(lite: UpdateReportLite, cachedDescriptor: File): UpdateReport =
|
||||
{
|
||||
val stats = new UpdateStats(0L, 0L, 0L, false)
|
||||
|
|
|
|||
|
|
@ -104,7 +104,7 @@ class MakePom(val log: Logger) {
|
|||
{
|
||||
val deps = depsInConfs(module, configurations)
|
||||
makeProperties(module, deps) ++
|
||||
makeDependencies(deps, includeTypes)
|
||||
makeDependencies(deps, includeTypes, module.getAllExcludeRules)
|
||||
}
|
||||
{ makeRepositories(ivy.getSettings, allRepositories, filterRepositories) }
|
||||
</project>)
|
||||
|
|
@ -220,43 +220,65 @@ class MakePom(val log: Logger) {
|
|||
}
|
||||
val IgnoreTypes: Set[String] = Set(Artifact.SourceType, Artifact.DocType, Artifact.PomType)
|
||||
|
||||
@deprecated("Use `makeDependencies` variant which takes excludes", "0.13.9")
|
||||
def makeDependencies(dependencies: Seq[DependencyDescriptor], includeTypes: Set[String]): NodeSeq =
|
||||
makeDependencies(dependencies, includeTypes, Nil)
|
||||
|
||||
def makeDependencies(dependencies: Seq[DependencyDescriptor], includeTypes: Set[String], excludes: Seq[ExcludeRule]): NodeSeq =
|
||||
if (dependencies.isEmpty)
|
||||
NodeSeq.Empty
|
||||
else
|
||||
<dependencies>
|
||||
{ dependencies.map(makeDependency(_, includeTypes)) }
|
||||
{ dependencies.map(makeDependency(_, includeTypes, excludes)) }
|
||||
</dependencies>
|
||||
|
||||
@deprecated("Use `makeDependency` variant which takes excludes", "0.13.9")
|
||||
def makeDependency(dependency: DependencyDescriptor, includeTypes: Set[String]): NodeSeq =
|
||||
makeDependency(dependency, includeTypes, Nil)
|
||||
|
||||
def makeDependency(dependency: DependencyDescriptor, includeTypes: Set[String], excludes: Seq[ExcludeRule]): NodeSeq =
|
||||
{
|
||||
val artifacts = dependency.getAllDependencyArtifacts
|
||||
val includeArtifacts = artifacts.filter(d => includeTypes(d.getType))
|
||||
if (artifacts.isEmpty) {
|
||||
val (scope, optional) = getScopeAndOptional(dependency.getModuleConfigurations)
|
||||
makeDependencyElem(dependency, scope, optional, None, None)
|
||||
val configs = dependency.getModuleConfigurations
|
||||
if (configs.filterNot(Set("sources", "docs")).nonEmpty) {
|
||||
val (scope, optional) = getScopeAndOptional(dependency.getModuleConfigurations)
|
||||
makeDependencyElem(dependency, scope, optional, None, None, excludes)
|
||||
} else NodeSeq.Empty
|
||||
} else if (includeArtifacts.isEmpty)
|
||||
NodeSeq.Empty
|
||||
else
|
||||
NodeSeq.fromSeq(artifacts.map(a => makeDependencyElem(dependency, a)))
|
||||
NodeSeq.fromSeq(artifacts.flatMap(a => makeDependencyElem(dependency, a, excludes)))
|
||||
}
|
||||
|
||||
def makeDependencyElem(dependency: DependencyDescriptor, artifact: DependencyArtifactDescriptor): Elem =
|
||||
@deprecated("Use `makeDependencyElem` variant which takes excludes", "0.13.9")
|
||||
def makeDependencyElem(dependency: DependencyDescriptor, artifact: DependencyArtifactDescriptor): Option[Elem] =
|
||||
makeDependencyElem(dependency, artifact, Nil)
|
||||
|
||||
def makeDependencyElem(dependency: DependencyDescriptor, artifact: DependencyArtifactDescriptor, excludes: Seq[ExcludeRule]): Option[Elem] =
|
||||
{
|
||||
val configs = artifact.getConfigurations.toList match {
|
||||
case Nil | "*" :: Nil => dependency.getModuleConfigurations
|
||||
case x => x.toArray
|
||||
}
|
||||
val (scope, optional) = getScopeAndOptional(configs)
|
||||
val classifier = artifactClassifier(artifact)
|
||||
val baseType = artifactType(artifact)
|
||||
val tpe = (classifier, baseType) match {
|
||||
case (Some(c), Some(tpe)) if Artifact.classifierType(c) == tpe => None
|
||||
case _ => baseType
|
||||
}
|
||||
makeDependencyElem(dependency, scope, optional, classifier, tpe)
|
||||
if (configs.filterNot(Set("sources", "docs")).nonEmpty) {
|
||||
val (scope, optional) = getScopeAndOptional(configs)
|
||||
val classifier = artifactClassifier(artifact)
|
||||
val baseType = artifactType(artifact)
|
||||
val tpe = (classifier, baseType) match {
|
||||
case (Some(c), Some(tpe)) if Artifact.classifierType(c) == tpe => None
|
||||
case _ => baseType
|
||||
}
|
||||
Some(makeDependencyElem(dependency, scope, optional, classifier, tpe, excludes))
|
||||
} else None
|
||||
}
|
||||
|
||||
@deprecated("Use `makeDependencyElem` variant which takes excludes", "0.13.9")
|
||||
def makeDependencyElem(dependency: DependencyDescriptor, scope: Option[String], optional: Boolean, classifier: Option[String], tpe: Option[String]): Elem =
|
||||
makeDependencyElem(dependency, scope, optional, classifier, tpe, Nil)
|
||||
|
||||
def makeDependencyElem(dependency: DependencyDescriptor, scope: Option[String], optional: Boolean, classifier: Option[String], tpe: Option[String], excludes: Seq[ExcludeRule]): Elem =
|
||||
{
|
||||
val mrid = dependency.getDependencyRevisionId
|
||||
<dependency>
|
||||
|
|
@ -267,7 +289,7 @@ class MakePom(val log: Logger) {
|
|||
{ optionalElem(optional) }
|
||||
{ classifierElem(classifier) }
|
||||
{ typeElem(tpe) }
|
||||
{ exclusions(dependency) }
|
||||
{ exclusions(dependency, excludes) }
|
||||
</dependency>
|
||||
}
|
||||
|
||||
|
|
@ -317,9 +339,12 @@ class MakePom(val log: Logger) {
|
|||
(scope, opt.nonEmpty)
|
||||
}
|
||||
|
||||
def exclusions(dependency: DependencyDescriptor): NodeSeq =
|
||||
@deprecated("Use `exclusions` variant which takes excludes", "0.13.9")
|
||||
def exclusions(dependency: DependencyDescriptor): NodeSeq = exclusions(dependency, Nil)
|
||||
|
||||
def exclusions(dependency: DependencyDescriptor, excludes: Seq[ExcludeRule]): NodeSeq =
|
||||
{
|
||||
val excl = dependency.getExcludeRules(dependency.getModuleConfigurations)
|
||||
val excl = dependency.getExcludeRules(dependency.getModuleConfigurations) ++ excludes
|
||||
val (warns, excls) = IvyUtil.separate(excl.map(makeExclusion))
|
||||
if (warns.nonEmpty) log.warn(warns.mkString(IO.Newline))
|
||||
if (excls.nonEmpty) <exclusions>{ excls }</exclusions>
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ package ivyint
|
|||
import java.util.Date
|
||||
import java.net.URL
|
||||
import java.io.File
|
||||
import java.text.SimpleDateFormat
|
||||
import collection.concurrent
|
||||
import collection.mutable
|
||||
import collection.immutable.ListMap
|
||||
|
|
@ -20,12 +21,19 @@ import org.apache.ivy.plugins.latest.{ ArtifactInfo => IvyArtifactInfo }
|
|||
import org.apache.ivy.plugins.matcher.{ MapMatcher, PatternMatcher }
|
||||
import Configurations.{ System => _, _ }
|
||||
import annotation.tailrec
|
||||
import scala.concurrent.duration._
|
||||
|
||||
private[sbt] object CachedResolutionResolveCache {
|
||||
def createID(organization: String, name: String, revision: String) =
|
||||
ModuleRevisionId.newInstance(organization, name, revision)
|
||||
def sbtOrgTemp = "org.scala-sbt.temp"
|
||||
def graphVersion = "0.13.8"
|
||||
def graphVersion = "0.13.9"
|
||||
val buildStartup: Long = System.currentTimeMillis
|
||||
lazy val todayStr: String = toYyyymmdd(buildStartup)
|
||||
lazy val tomorrowStr: String = toYyyymmdd(buildStartup + (1 day).toMillis)
|
||||
lazy val yesterdayStr: String = toYyyymmdd(buildStartup - (1 day).toMillis)
|
||||
def toYyyymmdd(timeSinceEpoch: Long): String = yyyymmdd.format(new Date(timeSinceEpoch))
|
||||
lazy val yyyymmdd: SimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd")
|
||||
}
|
||||
|
||||
private[sbt] class CachedResolutionResolveCache() {
|
||||
|
|
@ -137,7 +145,17 @@ private[sbt] class CachedResolutionResolveCache() {
|
|||
val staticGraphDirectory = miniGraphPath / "static"
|
||||
val dynamicGraphDirectory = miniGraphPath / "dynamic"
|
||||
val staticGraphPath = staticGraphDirectory / pathOrg / pathName / pathRevision / "graphs" / "graph.json"
|
||||
val dynamicGraphPath = dynamicGraphDirectory / logicalClock.toString / pathOrg / pathName / pathRevision / "graphs" / "graph.json"
|
||||
val dynamicGraphPath = dynamicGraphDirectory / todayStr / logicalClock.toString / pathOrg / pathName / pathRevision / "graphs" / "graph.json"
|
||||
def cleanDynamicGraph(): Unit =
|
||||
{
|
||||
val list = IO.listFiles(dynamicGraphDirectory, DirectoryFilter).toList
|
||||
list filterNot { d =>
|
||||
(d.getName == todayStr) || (d.getName == tomorrowStr) || (d.getName == yesterdayStr)
|
||||
} foreach { d =>
|
||||
log.debug(s"deleting old graphs $d...")
|
||||
IO.delete(d)
|
||||
}
|
||||
}
|
||||
def loadMiniGraphFromFile: Option[Either[ResolveException, UpdateReport]] =
|
||||
(if (staticGraphPath.exists) Some(staticGraphPath)
|
||||
else if (dynamicGraphPath.exists) Some(dynamicGraphPath)
|
||||
|
|
@ -175,6 +193,9 @@ private[sbt] class CachedResolutionResolveCache() {
|
|||
val gp = if (changing) dynamicGraphPath
|
||||
else staticGraphPath
|
||||
log.debug(s"saving minigraph to $gp")
|
||||
if (changing) {
|
||||
cleanDynamicGraph()
|
||||
}
|
||||
JsonUtil.writeUpdateReport(ur, gp)
|
||||
// limit the update cache size
|
||||
if (updateReportCache.size > maxUpdateReportCacheSize) {
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
*/
|
||||
package sbt
|
||||
|
||||
import scala.concurrent.duration.Duration
|
||||
import scala.concurrent.duration.{ FiniteDuration, Duration }
|
||||
import Attributed.data
|
||||
import Scope.{ fillTaskAxis, GlobalScope, ThisScope }
|
||||
import sbt.Compiler.InputsWithPrevious
|
||||
|
|
@ -27,7 +27,7 @@ import org.apache.ivy.core.module.{ descriptor, id }
|
|||
import descriptor.ModuleDescriptor, id.ModuleRevisionId
|
||||
import java.io.{ File, PrintWriter }
|
||||
import java.net.{ URI, URL, MalformedURLException }
|
||||
import java.util.concurrent.Callable
|
||||
import java.util.concurrent.{ TimeUnit, Callable }
|
||||
import sbinary.DefaultProtocol.StringFormat
|
||||
import Cache.seqFormat
|
||||
import CommandStrings.ExportStream
|
||||
|
|
@ -115,7 +115,8 @@ object Defaults extends BuildCommon {
|
|||
pomPostProcess :== idFun,
|
||||
pomAllRepositories :== false,
|
||||
pomIncludeRepository :== Classpaths.defaultRepositoryFilter,
|
||||
updateOptions := UpdateOptions()
|
||||
updateOptions := UpdateOptions(),
|
||||
forceUpdatePeriod :== None
|
||||
)
|
||||
|
||||
/** Core non-plugin settings for sbt builds. These *must* be on every build or the sbt engine will fail to run at all. */
|
||||
|
|
@ -570,11 +571,20 @@ object Defaults extends BuildCommon {
|
|||
|
||||
def selectedFilter(args: Seq[String]): Seq[String => Boolean] =
|
||||
{
|
||||
val filters = args map GlobFilter.apply
|
||||
if (filters.isEmpty)
|
||||
def matches(nfs: Seq[NameFilter], s: String) = nfs.exists(_.accept(s))
|
||||
|
||||
val (excludeArgs, includeArgs) = args.partition(_.startsWith("-"))
|
||||
|
||||
val includeFilters = includeArgs map GlobFilter.apply
|
||||
val excludeFilters = excludeArgs.map(_.substring(1)).map(GlobFilter.apply)
|
||||
|
||||
if (includeFilters.isEmpty && excludeArgs.isEmpty) {
|
||||
Seq(const(true))
|
||||
else
|
||||
filters.map { f => (s: String) => f accept s }
|
||||
} else if (includeFilters.isEmpty) {
|
||||
Seq({ (s: String) => !matches(excludeFilters, s) })
|
||||
} else {
|
||||
includeFilters.map { f => (s: String) => (f.accept(s) && !matches(excludeFilters, s)) }
|
||||
}
|
||||
}
|
||||
def detectTests: Initialize[Task[Seq[TestDefinition]]] = (loadedTestFrameworks, compile, streams) map { (frameworkMap, analysis, s) =>
|
||||
Tests.discover(frameworkMap.values.toList, analysis, s.log)._1
|
||||
|
|
@ -896,7 +906,7 @@ object Defaults extends BuildCommon {
|
|||
selectTests ~ options
|
||||
}
|
||||
|
||||
def distinctParser(exs: Set[String], raw: Boolean): Parser[Seq[String]] =
|
||||
private def distinctParser(exs: Set[String], raw: Boolean): Parser[Seq[String]] =
|
||||
{
|
||||
import DefaultParsers._
|
||||
val base = token(Space) ~> token(NotSpace - "--" examples exs)
|
||||
|
|
@ -1315,7 +1325,15 @@ object Classpaths {
|
|||
def updateTask: Initialize[Task[UpdateReport]] = Def.task {
|
||||
val depsUpdated = transitiveUpdate.value.exists(!_.stats.cached)
|
||||
val isRoot = executionRoots.value contains resolvedScoped.value
|
||||
val forceUpdate = forceUpdatePeriod.value
|
||||
val s = streams.value
|
||||
val fullUpdateOutput = s.cacheDirectory / "out"
|
||||
val forceUpdateByTime = forceUpdate match {
|
||||
case None => false
|
||||
case Some(period) =>
|
||||
val elapsedDuration = new FiniteDuration(System.currentTimeMillis() - fullUpdateOutput.lastModified(), TimeUnit.MILLISECONDS)
|
||||
fullUpdateOutput.exists() && elapsedDuration > period
|
||||
}
|
||||
val scalaProvider = appConfiguration.value.provider.scalaProvider
|
||||
|
||||
// Only substitute unmanaged jars for managed jars when the major.minor parts of the versions the same for:
|
||||
|
|
@ -1351,7 +1369,7 @@ object Classpaths {
|
|||
if (executionRoots.value exists { _.key == evicted.key }) EvictionWarningOptions.empty
|
||||
else (evictionWarningOptions in update).value
|
||||
cachedUpdate(s.cacheDirectory / updateCacheName.value, show, ivyModule.value, uc, transform,
|
||||
skip = (skip in update).value, force = isRoot, depsUpdated = depsUpdated,
|
||||
skip = (skip in update).value, force = isRoot || forceUpdateByTime, depsUpdated = depsUpdated,
|
||||
uwConfig = uwConfig, logicalClock = logicalClock, depDir = Some(depDir),
|
||||
ewo = ewo, log = s.log)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ package sbt
|
|||
|
||||
import java.io.File
|
||||
import java.net.URL
|
||||
import scala.concurrent.duration.Duration
|
||||
import scala.concurrent.duration.{ FiniteDuration, Duration }
|
||||
import Def.ScopedKey
|
||||
import complete._
|
||||
import inc.Analysis
|
||||
|
|
@ -324,6 +324,7 @@ object Keys {
|
|||
val publishArtifact = SettingKey[Boolean]("publish-artifact", "Enables (true) or disables (false) publishing an artifact.", AMinusSetting)
|
||||
val packagedArtifact = TaskKey[(Artifact, File)]("packaged-artifact", "Generates a packaged artifact, returning the Artifact and the produced File.", CTask)
|
||||
val checksums = SettingKey[Seq[String]]("checksums", "The list of checksums to generate and to verify for dependencies.", BSetting)
|
||||
val forceUpdatePeriod = SettingKey[Option[FiniteDuration]]("force-update-period", "Duration after which to force a full update to occur", CSetting)
|
||||
|
||||
val classifiersModule = TaskKey[GetClassifiersModule]("classifiers-module", rank = CTask)
|
||||
val conflictWarning = SettingKey[ConflictWarning]("conflict-warning", "Configures warnings for conflicts in dependency management.", CSetting)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,53 @@
|
|||
package sbt
|
||||
|
||||
import java.io._
|
||||
|
||||
import org.specs2.mutable.Specification
|
||||
|
||||
object DefaultsTest extends Specification {
|
||||
private def assertFiltered(filter: List[String], expected: Map[String, Boolean]) = {
|
||||
val actual = expected.map(t => (t._1, Defaults.selectedFilter(filter).exists(fn => fn(t._1))))
|
||||
|
||||
actual must be equalTo (expected)
|
||||
}
|
||||
|
||||
"`selectedFilter`" should {
|
||||
"return all tests for an empty list" in {
|
||||
assertFiltered(List(), Map("Test1" -> true, "Test2" -> true))
|
||||
}
|
||||
|
||||
"work correctly with exact matches" in {
|
||||
assertFiltered(List("Test1", "foo"), Map("Test1" -> true, "Test2" -> false, "Foo" -> false))
|
||||
}
|
||||
|
||||
"work correctly with glob" in {
|
||||
assertFiltered(List("Test*"), Map("Test1" -> true, "Test2" -> true, "Foo" -> false))
|
||||
}
|
||||
|
||||
"work correctly with excludes" in {
|
||||
assertFiltered(List("Test*", "-Test2"), Map("Test1" -> true, "Test2" -> false, "Foo" -> false))
|
||||
}
|
||||
|
||||
"work correctly without includes" in {
|
||||
assertFiltered(List("-Test2"), Map("Test1" -> true, "Test2" -> false, "Foo" -> true))
|
||||
}
|
||||
|
||||
"work correctly with excluded globs" in {
|
||||
assertFiltered(List("Test*", "-F*"), Map("Test1" -> true, "Test2" -> true, "Foo" -> false))
|
||||
}
|
||||
|
||||
"cope with multiple filters" in {
|
||||
assertFiltered(List("T*1", "T*2", "-F*"), Map("Test1" -> true, "Test2" -> true, "Foo" -> false))
|
||||
}
|
||||
|
||||
"cope with multiple exclusion filters, no includes" in {
|
||||
assertFiltered(List("-A*", "-F*"), Map("Test1" -> true, "Test2" -> true, "AAA" -> false, "Foo" -> false))
|
||||
}
|
||||
|
||||
"cope with multiple exclusion filters with includes" in {
|
||||
assertFiltered(List("T*", "-T*1", "-T*2"), Map("Test1" -> false, "Test2" -> false, "Test3" -> true))
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
[@cunei]: https://github.com/cunei
|
||||
[@eed3si9n]: https://github.com/eed3si9n
|
||||
[@gkossakowski]: https://github.com/gkossakowski
|
||||
[@jsuereth]: https://github.com/jsuereth
|
||||
|
||||
[1721]: https://github.com/sbt/sbt/issues/1721
|
||||
[2014]: https://github.com/sbt/sbt/issues/2014
|
||||
[2030]: https://github.com/sbt/sbt/pull/2030
|
||||
|
||||
### Fixes with compatibility implications
|
||||
|
||||
### Improvements
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fixes memory/performance issue with cached resolution. See below.
|
||||
|
||||
### Cached resolution fixes
|
||||
|
||||
On a larger dependency graph, the JSON file growing to be 100MB+
|
||||
with 97% of taken up by *caller* information.
|
||||
The caller information is not useful once the graph is successfully resolved.
|
||||
To make the matter worse, these large JSON files were never cleaned up.
|
||||
|
||||
sbt 0.13.9 creates a single caller to represent all callers,
|
||||
which fixes `OutOfMemoryException` seen on some builds.
|
||||
This generally shrinks the size of JSON, so it should make the IO operations faster.
|
||||
Dynamic graphs will be rotated with directories named after `yyyy-mm-dd`,
|
||||
and stale JSON files will be cleaned up after few days.
|
||||
|
||||
[#2030][2030]/[#1721][1721]/[#2014][2014] by [@eed3si9n][@eed3si9n]
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
[@ajsquared]: https://github.com/ajsquared
|
||||
|
||||
|
||||
### Changes with compatibility implications
|
||||
|
||||
### Improvements
|
||||
- Adds `forceUpdatePeriod` key, that takes values of `Option[FiniteDuration]`. If set, a full `update` will occur after that amount of time without needing to explicitly run the `update` task. By [@ajsquared][@ajsquared]
|
||||
|
||||
### Fixes
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
[@kamilkloch]: https://github.com/kamilkloch
|
||||
[2028]: https://github.com/sbt/sbt/issues/2028
|
||||
|
||||
### Changes with compatibility implications
|
||||
|
||||
### Improvements
|
||||
|
||||
- Update ForkError.getMessage() to include exception's original name. [#2028][2028] by [@kamilkloch][@kamilkloch]
|
||||
|
||||
### Fixes
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
### Bug fixes
|
||||
|
||||
- Prevent history command(s) from going into an infinite loop [#1562][1562] by [@PanAeon][@PanAeon]
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
[@jsuereth]: https://github.com/jsuereth
|
||||
|
||||
[1856]: https://github.com/sbt/sbt/issues/1856
|
||||
|
||||
### Fixes with compatibility implications
|
||||
|
||||
### Improvements
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fixes a certain class of pom corruption that can occur in the presence of parent-poms.
|
||||
|
||||
|
||||
[#1856][1856] by [@jsuereth][@jsuereth]
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
|
||||
[@dwijnand]: http://github.com/dwijnand
|
||||
[#1877]: https://github.com/sbt/sbt/issues/1877
|
||||
[#2035]: https://github.com/sbt/sbt/pull/2035
|
||||
|
||||
### Fixes with compatibility implications
|
||||
|
||||
### Improvements
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Add dependency-level exclusions in the POM for project-level exclusions. [#1877][]/[#2035][] by [@dwijnand][]
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
|
||||
[@cunei]: http://github.com/cunei
|
||||
[2001]: https://github.com/sbt/sbt/issues/2001
|
||||
[2027]: https://github.com/sbt/sbt/pull/2027
|
||||
|
||||
### Fixes with compatibility implications
|
||||
|
||||
- Starting with 0.13.9, the generated POM files no longer include dependencies on source or javadoc jars
|
||||
obtained via withSources() or withJavadoc()
|
||||
|
||||
### Improvements
|
||||
|
||||
### Bug fixes
|
||||
|
||||
### POM files no longer include certain source and javadoc jars
|
||||
|
||||
When declaring library dependencies using the withSources() or withJavadoc() options, sbt was also including
|
||||
in the pom file, as dependencies, the source or javadoc jars using the default Maven scope. Such dependencies
|
||||
might be erroneously processed as they were regular jars by automated tools
|
||||
|
||||
[#2001][2001]/[#2027][2027] by [@cunei][@cunei]
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
|
||||
[@matthewfarwell]: http://github.com/matthewfarwell
|
||||
[1970]: https://github.com/sbt/sbt/pull/1970
|
||||
|
||||
### Fixes with compatibility implications
|
||||
|
||||
### Improvements
|
||||
|
||||
- Support excluding tests in `testOnly`/`testQuick` with `-`, for example `-MySpec`.
|
||||
[#1970][1970] by [@matthewfarwell][@matthewfarwell]
|
||||
|
||||
### Bug fixes
|
||||
|
|
@ -1,3 +1,6 @@
|
|||
import scala.xml.{ Node, _ }
|
||||
import scala.xml.Utility.trim
|
||||
|
||||
lazy val check = taskKey[Unit]("check")
|
||||
|
||||
val dispatch = "net.databinder.dispatch" %% "dispatch-core" % "0.11.2"
|
||||
|
|
@ -31,5 +34,30 @@ lazy val root = (project in file(".")).
|
|||
if (bcp exists { _.data.getName contains "dispatch-core_2.11-0.11.1.jar" }) {
|
||||
sys.error("dispatch-core_2.11-0.11.1.jar found when it should NOT be included: " + bcp.toString)
|
||||
}
|
||||
|
||||
val bPomXml = makePomXml(streams.value.log, (makePomConfiguration in b).value, (ivyModule in b).value)
|
||||
|
||||
val repatchTwitterXml = bPomXml \ "dependencies" \ "dependency" find { d =>
|
||||
(d \ "groupId").text == "com.eed3si9n" && (d \ "artifactId").text == "repatch-twitter-core_2.11"
|
||||
} getOrElse (sys error s"Missing repatch-twitter-core dependency: $bPomXml")
|
||||
|
||||
val excludeDispatchCoreXml =
|
||||
<exclusion>
|
||||
<groupId>net.databinder.dispatch</groupId>
|
||||
<artifactId>dispatch-core_2.11</artifactId>
|
||||
</exclusion>
|
||||
|
||||
if (trim((repatchTwitterXml \ "exclusions" \ "exclusion").head) != trim(excludeDispatchCoreXml))
|
||||
sys error s"Missing dispatch-core exclusion: $repatchTwitterXml"
|
||||
|
||||
()
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
def makePomXml(log: Logger, makePomConfig: MakePomConfiguration, ivyModule: IvySbt#Module): Node = {
|
||||
ivyModule.withModule[Node](log) { (ivy, md, default) =>
|
||||
import makePomConfig._
|
||||
new MakePom(log).toPom(
|
||||
ivy, md, moduleInfo, configurations, includeTypes, extra, filterRepositories, allRepositories)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,13 @@
|
|||
libraryDependencies += "log4j" % "log4j" % "1.2.16" % "compile"
|
||||
|
||||
autoScalaLibrary := false
|
||||
|
||||
TaskKey[Unit]("check-last-update-time") <<= streams map { (s) =>
|
||||
val fullUpdateOutput = s.cacheDirectory / "out"
|
||||
val timeDiff = System.currentTimeMillis()-fullUpdateOutput.lastModified()
|
||||
val exists = fullUpdateOutput.exists()
|
||||
s.log.info(s"Amount of time since last full update: $timeDiff")
|
||||
if (exists && timeDiff > 5000) {
|
||||
sys.error("Full update not perfomed")
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
$ absent target/resolution-cache
|
||||
> compile
|
||||
$ exists target/resolution-cache
|
||||
> checkLastUpdateTime
|
||||
$ sleep 10000
|
||||
> compile
|
||||
# This is expected to fail
|
||||
-> checkLastUpdateTime
|
||||
> set forceUpdatePeriod := Some(new scala.concurrent.duration.FiniteDuration(5000, java.util.concurrent.TimeUnit.MILLISECONDS))
|
||||
> compile
|
||||
> checkLastUpdateTime
|
||||
|
|
@ -10,6 +10,8 @@ resolvers +=
|
|||
libraryDependencies +=
|
||||
"com.example" % "example-child" % "1.0-SNAPSHOT"
|
||||
|
||||
libraryDependencies += "org.apache.geronimo.specs" % "geronimo-jta_1.1_spec" % "1.1.1"
|
||||
|
||||
version := "1.0-SNAPSHOT"
|
||||
|
||||
|
||||
|
|
@ -20,3 +22,18 @@ cleanExampleCache := {
|
|||
IO.delete(cacheDir / "com.example")
|
||||
}
|
||||
}
|
||||
|
||||
val checkIvyXml = taskKey[Unit]("Checks the ivy.xml transform was correct")
|
||||
|
||||
checkIvyXml := {
|
||||
ivySbt.value.withIvy(streams.value.log) { ivy =>
|
||||
val cacheDir = ivy.getSettings.getDefaultRepositoryCacheBasedir
|
||||
// TODO - Is this actually ok?
|
||||
val xmlFile =
|
||||
cacheDir / "org.apache.geronimo.specs" / "geronimo-jta_1.1_spec" / "ivy-1.1.1.xml"
|
||||
//cacheDir / "com.example" / "example-child" / "ivy-1.0-SNAPSHOT.xml"
|
||||
val lines = IO.read(xmlFile)
|
||||
if(lines.isEmpty) sys.error(s"Unable to read $xmlFile, could not resolve geronimo...")
|
||||
assert(lines contains "xmlns:e", s"Failed to appropriately modify ivy.xml file for sbt extra attributes!\n$lines")
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,2 +1,3 @@
|
|||
> cleanExampleCache
|
||||
> update
|
||||
> update
|
||||
> checkIvyXml
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ object build extends Build {
|
|||
val defaultSettings = Seq(
|
||||
libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-reflect" % _ ),
|
||||
incOptions := incOptions.value.withNameHashing(true),
|
||||
scalaVersion := "2.11.0-RC3"
|
||||
scalaVersion := "2.11.6"
|
||||
)
|
||||
|
||||
lazy val root = Project(
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ object build extends Build {
|
|||
val defaultSettings = Seq(
|
||||
libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-reflect" % _ ),
|
||||
incOptions := incOptions.value.withNameHashing(true),
|
||||
scalaVersion := "2.11.0-RC3"
|
||||
scalaVersion := "2.11.6"
|
||||
)
|
||||
|
||||
lazy val root = Project(
|
||||
|
|
|
|||
|
|
@ -0,0 +1,10 @@
|
|||
import sbt._
|
||||
import Keys._
|
||||
import Defaults._
|
||||
|
||||
object B extends Build {
|
||||
lazy val root = Project("root", file("."), settings = defaultSettings ++ Seq(
|
||||
libraryDependencies += "org.scalatest" %% "scalatest" % "1.9.1" % "test",
|
||||
parallelExecution in test := false
|
||||
))
|
||||
}
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
import java.io.File
|
||||
import org.scalatest.FlatSpec
|
||||
import org.scalatest.matchers.ShouldMatchers
|
||||
|
||||
class Test1 extends FlatSpec with ShouldMatchers {
|
||||
"a test" should "pass" in {
|
||||
new File("target/Test1.run").createNewFile()
|
||||
}
|
||||
}
|
||||
|
||||
class Test2 extends FlatSpec with ShouldMatchers {
|
||||
"a test" should "pass" in {
|
||||
new File("target/Test2.run").createNewFile()
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,61 @@
|
|||
# Test1 & Test2 create files Test1.run & Test2.run respectively
|
||||
|
||||
# no parameters
|
||||
> test-only
|
||||
$ exists target/Test1.run
|
||||
$ exists target/Test2.run
|
||||
|
||||
$ delete target/Test1.run
|
||||
$ delete target/Test2.run
|
||||
|
||||
|
||||
# with explicit match
|
||||
> test-only Test1*
|
||||
$ exists target/Test1.run
|
||||
-$ exists target/Test2.run
|
||||
|
||||
$ delete target/Test1.run
|
||||
|
||||
|
||||
# with explicit match and exclusion
|
||||
> test-only Test* -Test1
|
||||
-$ exists target/Test1.run
|
||||
$ exists target/Test2.run
|
||||
|
||||
$ delete target/Test2.run
|
||||
|
||||
|
||||
# with explicit match and exclusion
|
||||
> test-only Test* -Test2
|
||||
$ exists target/Test1.run
|
||||
-$ exists target/Test2.run
|
||||
|
||||
$ delete target/Test1.run
|
||||
|
||||
|
||||
# with only exclusion
|
||||
> test-only -Test2
|
||||
$ exists target/Test1.run
|
||||
-$ exists target/Test2.run
|
||||
|
||||
$ delete target/Test1.run
|
||||
|
||||
|
||||
# with only exclusion
|
||||
> test-only -Test1
|
||||
-$ exists target/Test1.run
|
||||
$ exists target/Test2.run
|
||||
|
||||
$ delete target/Test2.run
|
||||
|
||||
|
||||
# with only glob exclusion
|
||||
> test-only -Test*
|
||||
-$ exists target/Test1.run
|
||||
-$ exists target/Test2.run
|
||||
|
||||
|
||||
# with only glob exclusion
|
||||
> test-only -T*1 -T*2
|
||||
-$ exists target/Test1.run
|
||||
-$ exists target/Test2.run
|
||||
|
|
@ -95,13 +95,15 @@ public class ForkMain {
|
|||
|
||||
static class ForkError extends Exception {
|
||||
private String originalMessage;
|
||||
private String originalName;
|
||||
private ForkError cause;
|
||||
ForkError(Throwable t) {
|
||||
originalMessage = t.getMessage();
|
||||
originalName = t.getClass().getName();
|
||||
setStackTrace(t.getStackTrace());
|
||||
if (t.getCause() != null) cause = new ForkError(t.getCause());
|
||||
}
|
||||
public String getMessage() { return originalMessage; }
|
||||
public String getMessage() { return originalName + ": " + originalMessage; }
|
||||
public Exception getCause() { return cause; }
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ object HistoryCommands {
|
|||
|
||||
def execute(f: History => Option[String]): History => Option[List[String]] = (h: History) =>
|
||||
{
|
||||
val command = f(h)
|
||||
val command = f(h).filterNot(_.startsWith(Start))
|
||||
val lines = h.lines.toArray
|
||||
command.foreach(lines(lines.length - 1) = _)
|
||||
h.path foreach { h => IO.writeLines(h, lines) }
|
||||
|
|
|
|||
Loading…
Reference in New Issue