mirror of https://github.com/sbt/sbt.git
Merge pull request #2137 from pdalpra/clean-ops-on-collections
Clean ops on collections
This commit is contained in:
commit
303b0681da
|
|
@ -293,7 +293,7 @@ object ClassToAPI {
|
|||
def referenceP(t: ParameterizedType): api.Parameterized =
|
||||
{
|
||||
val targs = t.getActualTypeArguments
|
||||
val args = if (targs.length == 0) emptyTypeArray else arrayMap(targs)(t => reference(t): api.Type)
|
||||
val args = if (targs.isEmpty) emptyTypeArray else arrayMap(targs)(t => reference(t): api.Type)
|
||||
val base = reference(t.getRawType)
|
||||
new api.Parameterized(base, args.toArray[api.Type])
|
||||
}
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ object TopLevel {
|
|||
/** Checks the API of two source files for equality.*/
|
||||
object SameAPI {
|
||||
def apply(a: Source, b: Source): Boolean =
|
||||
a.apiHash == b.apiHash && (a.hash.length > 0 && b.hash.length > 0) && apply(a.api, b.api)
|
||||
a.apiHash == b.apiHash && (a.hash.nonEmpty && b.hash.nonEmpty) && apply(a.api, b.api)
|
||||
|
||||
def apply(a: Def, b: Def): Boolean =
|
||||
(new SameAPI(false, true)).sameDefinitions(List(a), List(b), true)
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ object ClassfileManager {
|
|||
logger.debug("Rolling back changes to class files.")
|
||||
logger.debug(s"Removing generated classes:\n${showFiles(generatedClasses)}")
|
||||
IO.deleteFilesEmptyDirs(generatedClasses)
|
||||
logger.debug(s"Restoring class files: \n${showFiles(movedClasses.map(_._1))}")
|
||||
logger.debug(s"Restoring class files: \n${showFiles(movedClasses.keys)}")
|
||||
for ((orig, tmp) <- movedClasses) IO.move(tmp, orig)
|
||||
}
|
||||
logger.debug(s"Removing the temporary directory used for backing up class files: $tempDir")
|
||||
|
|
|
|||
|
|
@ -84,7 +84,7 @@ private final class AnalysisCallback(internalMap: File => Option[File], external
|
|||
val outputSettings = output match {
|
||||
case single: SingleOutput => Array(new OutputSetting("/", single.outputDirectory.getAbsolutePath))
|
||||
case multi: MultipleOutput =>
|
||||
multi.outputGroups.map(out => new OutputSetting(out.sourceDirectory.getAbsolutePath, out.outputDirectory.getAbsolutePath)).toArray
|
||||
multi.outputGroups.map(out => new OutputSetting(out.sourceDirectory.getAbsolutePath, out.outputDirectory.getAbsolutePath))
|
||||
}
|
||||
new Compilation(System.currentTimeMillis, outputSettings)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -185,7 +185,7 @@ class AggressiveCompile(cacheFile: File) {
|
|||
def javaOnly(f: File) = f.getName.endsWith(".java")
|
||||
|
||||
private[this] def explicitBootClasspath(options: Seq[String]): Seq[File] =
|
||||
options.dropWhile(_ != CompilerArguments.BootClasspathOption).drop(1).take(1).headOption.toList.flatMap(IO.parseClasspath)
|
||||
options.dropWhile(_ != CompilerArguments.BootClasspathOption).slice(1, 2).headOption.toList.flatMap(IO.parseClasspath)
|
||||
|
||||
val store = MixedAnalyzingCompiler.staticCachedStore(cacheFile)
|
||||
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ final class MixedAnalyzingCompiler(
|
|||
logInputs(log, javaSrcs.size, scalaSrcs.size, outputDirs)
|
||||
/** compiles the scala code necessary using the analyzing compiler. */
|
||||
def compileScala(): Unit =
|
||||
if (!scalaSrcs.isEmpty) {
|
||||
if (scalaSrcs.nonEmpty) {
|
||||
val sources = if (order == Mixed) incSrc else scalaSrcs
|
||||
val arguments = cArgs(Nil, absClasspath, None, options.options)
|
||||
timed("Scala compilation", log) {
|
||||
|
|
@ -54,7 +54,7 @@ final class MixedAnalyzingCompiler(
|
|||
* Compiles the Java code necessary. All analysis code is included in this method.
|
||||
*/
|
||||
def compileJava(): Unit =
|
||||
if (!javaSrcs.isEmpty) {
|
||||
if (javaSrcs.nonEmpty) {
|
||||
// Runs the analysis portion of Javac.
|
||||
timed("Java compile + analysis", log) {
|
||||
javac.compile(javaSrcs, options.javacOptions.toArray[String], output, callback, reporter, log, progress)
|
||||
|
|
@ -81,7 +81,7 @@ final class MixedAnalyzingCompiler(
|
|||
val scalaMsg = Analysis.counted("Scala source", "", "s", scalaCount)
|
||||
val javaMsg = Analysis.counted("Java source", "", "s", javaCount)
|
||||
val combined = scalaMsg ++ javaMsg
|
||||
if (!combined.isEmpty)
|
||||
if (combined.nonEmpty)
|
||||
log.info(combined.mkString("Compiling ", " and ", " to " + outputDirs.map(_.getAbsolutePath).mkString(",") + "..."))
|
||||
}
|
||||
|
||||
|
|
@ -187,7 +187,7 @@ object MixedAnalyzingCompiler {
|
|||
def withBootclasspath(args: CompilerArguments, classpath: Seq[File]): Seq[File] =
|
||||
args.bootClasspathFor(classpath) ++ args.extClasspath ++ args.finishClasspath(classpath)
|
||||
private[this] def explicitBootClasspath(options: Seq[String]): Seq[File] =
|
||||
options.dropWhile(_ != CompilerArguments.BootClasspathOption).drop(1).take(1).headOption.toList.flatMap(IO.parseClasspath)
|
||||
options.dropWhile(_ != CompilerArguments.BootClasspathOption).slice(1, 2).headOption.toList.flatMap(IO.parseClasspath)
|
||||
|
||||
private[this] val cache = new collection.mutable.HashMap[File, Reference[AnalysisStore]]
|
||||
private def staticCache(file: File, backing: => AnalysisStore): AnalysisStore =
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ final class AnalyzingJavaCompiler private[sbt] (
|
|||
* @param progressOpt An optional compilation progress reporter. Where we can report back what files we're currently compiling.
|
||||
*/
|
||||
def compile(sources: Seq[File], options: Seq[String], output: Output, callback: AnalysisCallback, reporter: Reporter, log: Logger, progressOpt: Option[CompileProgress]): Unit = {
|
||||
if (!sources.isEmpty) {
|
||||
if (sources.nonEmpty) {
|
||||
val absClasspath = classpath.map(_.getAbsoluteFile)
|
||||
@annotation.tailrec def ancestor(f1: File, f2: File): Boolean =
|
||||
if (f2 eq null) false else if (f1 == f2) true else ancestor(f1, f2.getParentFile)
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ class ScalaCompilerForUnitTesting(nameHashing: Boolean = false) {
|
|||
|
||||
def extractUsedNamesFromSrc(src: String): Set[String] = {
|
||||
val (Seq(tempSrcFile), analysisCallback) = compileSrcs(src)
|
||||
analysisCallback.usedNames(tempSrcFile).toSet
|
||||
analysisCallback.usedNames(tempSrcFile)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -46,7 +46,7 @@ class ScalaCompilerForUnitTesting(nameHashing: Boolean = false) {
|
|||
def extractUsedNamesFromSrc(definitionSrc: String, actualSrc: String): Set[String] = {
|
||||
// we drop temp src file corresponding to the definition src file
|
||||
val (Seq(_, tempSrcFile), analysisCallback) = compileSrcs(definitionSrc, actualSrc)
|
||||
analysisCallback.usedNames(tempSrcFile).toSet
|
||||
analysisCallback.usedNames(tempSrcFile)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -62,8 +62,8 @@ class ScalaCompilerForUnitTesting(nameHashing: Boolean = false) {
|
|||
* file system-independent way of testing dependencies between source code "files".
|
||||
*/
|
||||
def extractDependenciesFromSrcs(srcs: List[Map[Symbol, String]]): ExtractedSourceDependencies = {
|
||||
val rawGroupedSrcs = srcs.map(_.values.toList).toList
|
||||
val symbols = srcs.map(_.keys).flatten
|
||||
val rawGroupedSrcs = srcs.map(_.values.toList)
|
||||
val symbols = srcs.flatMap(_.keys)
|
||||
val (tempSrcFiles, testCallback) = compileSrcs(rawGroupedSrcs)
|
||||
val fileToSymbol = (tempSrcFiles zip symbols).toMap
|
||||
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ private class ProcessLoggerWriter(delegate: ProcessLogger, level: Level.Value, n
|
|||
override def close() = flush()
|
||||
override def flush(): Unit =
|
||||
synchronized {
|
||||
if (buffer.length > 0) {
|
||||
if (buffer.nonEmpty) {
|
||||
log(buffer.toString)
|
||||
buffer.clear()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -74,7 +74,7 @@ object IvyRetrieve {
|
|||
}: _*)
|
||||
def toCaller(caller: IvyCaller): Caller = {
|
||||
val m = toModuleID(caller.getModuleRevisionId)
|
||||
val callerConfigurations = caller.getCallerConfigurations.toArray.toVector collect {
|
||||
val callerConfigurations = caller.getCallerConfigurations.toVector collect {
|
||||
case x if nonEmptyString(x).isDefined => x
|
||||
}
|
||||
val ddOpt = Option(caller.getDependencyDescriptor)
|
||||
|
|
@ -128,16 +128,16 @@ object IvyRetrieve {
|
|||
case _ => dep.getResolvedId.getExtraAttributes
|
||||
})
|
||||
val isDefault = Option(dep.getDescriptor) map { _.isDefault }
|
||||
val configurations = dep.getConfigurations(confReport.getConfiguration).toArray.toList
|
||||
val configurations = dep.getConfigurations(confReport.getConfiguration).toList
|
||||
val licenses: Seq[(String, Option[String])] = mdOpt match {
|
||||
case Some(md) => md.getLicenses.toArray.toVector collect {
|
||||
case Some(md) => md.getLicenses.toVector collect {
|
||||
case lic: IvyLicense if Option(lic.getName).isDefined =>
|
||||
val temporaryURL = "http://localhost"
|
||||
(lic.getName, nonEmptyString(lic.getUrl) orElse { Some(temporaryURL) })
|
||||
}
|
||||
case _ => Nil
|
||||
}
|
||||
val callers = dep.getCallers(confReport.getConfiguration).toArray.toVector map { toCaller }
|
||||
val callers = dep.getCallers(confReport.getConfiguration).toVector map { toCaller }
|
||||
val (resolved, missing) = artifacts(moduleId, confReport getDownloadReports revId)
|
||||
|
||||
new ModuleReport(moduleId, resolved, missing, status, publicationDate, resolver, artifactResolver,
|
||||
|
|
|
|||
|
|
@ -50,9 +50,9 @@ private[sbt] object JsonUtil {
|
|||
(c.caller.organization != sbtOrgTemp) &&
|
||||
(c.caller.organization != fakeCallerOrganization)
|
||||
}
|
||||
val interProj = (callers filter { c =>
|
||||
(c.caller.organization == sbtOrgTemp)
|
||||
}).headOption.toList
|
||||
val interProj = (callers find { c =>
|
||||
c.caller.organization == sbtOrgTemp
|
||||
}).toList
|
||||
interProj ::: nonArtificial.toList
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -248,7 +248,7 @@ class MakePom(val log: Logger) {
|
|||
val includeArtifacts = artifacts.filter(d => includeTypes(d.getType))
|
||||
if (artifacts.isEmpty) {
|
||||
val configs = dependency.getModuleConfigurations
|
||||
if (configs.filterNot(Set("sources", "docs")).nonEmpty) {
|
||||
if (!configs.forall(Set("sources", "docs"))) {
|
||||
warnIntransitve()
|
||||
val (scope, optional) = getScopeAndOptional(dependency.getModuleConfigurations)
|
||||
makeDependencyElem(dependency, scope, optional, None, None, excludes)
|
||||
|
|
@ -271,7 +271,7 @@ class MakePom(val log: Logger) {
|
|||
case Nil | "*" :: Nil => dependency.getModuleConfigurations
|
||||
case x => x.toArray
|
||||
}
|
||||
if (configs.filterNot(Set("sources", "docs")).nonEmpty) {
|
||||
if (!configs.forall(Set("sources", "docs"))) {
|
||||
val (scope, optional) = getScopeAndOptional(configs)
|
||||
val classifier = artifactClassifier(artifact)
|
||||
val baseType = artifactType(artifact)
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ private[sbt] class CachedResolutionResolveCache() {
|
|||
{
|
||||
log.debug(s":: building artificial module descriptors from ${md0.getModuleRevisionId}")
|
||||
// val expanded = expandInternalDependencies(md0, data, prOpt, log)
|
||||
val rootModuleConfigs = md0.getConfigurations.toArray.toVector
|
||||
val rootModuleConfigs = md0.getConfigurations.toVector
|
||||
directDependencies(md0) map { dd =>
|
||||
val arts = dd.getAllDependencyArtifacts.toVector map { x => s"""${x.getName}:${x.getType}:${x.getExt}:${x.getExtraAttributes}""" }
|
||||
log.debug(s"::: dd: $dd (artifacts: ${arts.mkString(",")})")
|
||||
|
|
@ -430,7 +430,7 @@ private[sbt] trait CachedResolutionResolveEngine extends ResolveEngine {
|
|||
cs match {
|
||||
case Nil => Nil
|
||||
case (k, Vector()) :: rest => resolveConflicts(rest)
|
||||
case (k, Vector(oa)) :: rest if (oa.modules.size == 0) => resolveConflicts(rest)
|
||||
case (k, Vector(oa)) :: rest if (oa.modules.isEmpty) => resolveConflicts(rest)
|
||||
case (k, Vector(oa)) :: rest if (oa.modules.size == 1 && !oa.modules.head.evicted) =>
|
||||
log.debug(s":: no conflict $rootModuleConf: ${oa.organization}:${oa.name}")
|
||||
oa :: resolveConflicts(rest)
|
||||
|
|
@ -599,7 +599,7 @@ private[sbt] trait CachedResolutionResolveEngine extends ResolveEngine {
|
|||
case None => Vector()
|
||||
}
|
||||
// These are the configurations from the original project we want to resolve.
|
||||
val rootModuleConfs = md0.getConfigurations.toArray.toVector
|
||||
val rootModuleConfs = md0.getConfigurations.toVector
|
||||
val configurations0 = ur.configurations.toVector
|
||||
// This is how md looks from md0 via dd's mapping.
|
||||
val remappedConfigs0: Map[String, Vector[String]] = Map(rootModuleConfs map { conf0 =>
|
||||
|
|
|
|||
|
|
@ -109,7 +109,7 @@ private final class MergedDescriptors(a: DependencyDescriptor, b: DependencyDesc
|
|||
private[this] def addConfigurations(dd: DefaultDependencyArtifactDescriptor, confs: Seq[String]): Unit =
|
||||
confs foreach dd.addConfiguration
|
||||
|
||||
private[this] def concat[T: reflect.ClassTag](a: Array[T], b: Array[T]): Array[T] = (a ++ b).distinct.toArray
|
||||
private[this] def concat[T: reflect.ClassTag](a: Array[T], b: Array[T]): Array[T] = (a ++ b).distinct
|
||||
|
||||
def getAllExcludeRules = concat(a.getAllExcludeRules, b.getAllExcludeRules)
|
||||
|
||||
|
|
|
|||
|
|
@ -68,7 +68,7 @@ object Package {
|
|||
}
|
||||
|
||||
val map = conf.sources.toMap
|
||||
val inputs = map :+: lastModified(map.keySet.toSet) :+: manifest :+: HNil
|
||||
val inputs = map :+: lastModified(map.keySet) :+: manifest :+: HNil
|
||||
cachedMakeJar(inputs)(() => exists(conf.jar))
|
||||
}
|
||||
def setVersion(main: Attributes) {
|
||||
|
|
|
|||
|
|
@ -112,7 +112,7 @@ object TestResultLogger {
|
|||
val printStandard_? : Output => Boolean =
|
||||
results =>
|
||||
// Print the standard one-liner statistic if no framework summary is defined, or when > 1 framework is in used.
|
||||
results.summaries.size > 1 || results.summaries.headOption.forall(_.summaryText.size == 0)
|
||||
results.summaries.size > 1 || results.summaries.headOption.forall(_.summaryText.isEmpty)
|
||||
|
||||
val printStandard = TestResultLogger((log, results, _) => {
|
||||
val (skippedCount, errorsCount, passedCount, failuresCount, ignoredCount, canceledCount, pendingCount) =
|
||||
|
|
|
|||
|
|
@ -142,9 +142,9 @@ object Tests {
|
|||
}
|
||||
}
|
||||
|
||||
if (excludeTestsSet.size > 0)
|
||||
if (excludeTestsSet.nonEmpty)
|
||||
log.debug(excludeTestsSet.mkString("Excluding tests: \n\t", "\n\t", ""))
|
||||
if (undefinedFrameworks.size > 0)
|
||||
if (undefinedFrameworks.nonEmpty)
|
||||
log.warn("Arguments defined for test frameworks that are not present:\n\t" + undefinedFrameworks.mkString("\n\t"))
|
||||
|
||||
def includeTest(test: TestDefinition) = !excludeTestsSet.contains(test.name) && testFilters.forall(filter => filter(test.name))
|
||||
|
|
|
|||
|
|
@ -138,8 +138,8 @@ final object Aggregation {
|
|||
// tasks, and input tasks in the same call. The code below allows settings and tasks to be mixed, but not input tasks.
|
||||
// One problem with input tasks in `all` is that many input tasks consume all input and would need syntactic delimiters.
|
||||
// Once that is addressed, the tasks constructed by the input tasks would need to be combined with the explicit tasks.
|
||||
if (inputTasks.size > 0) {
|
||||
if (other.size > 0) {
|
||||
if (inputTasks.nonEmpty) {
|
||||
if (other.nonEmpty) {
|
||||
val inputStrings = inputTasks.map(_.key).mkString("Input task(s):\n\t", "\n\t", "\n")
|
||||
val otherStrings = other.map(_.key).mkString("Task(s)/setting(s):\n\t", "\n\t", "\n")
|
||||
failure(s"Cannot mix input tasks with plain tasks/settings. $inputStrings $otherStrings")
|
||||
|
|
|
|||
|
|
@ -635,12 +635,12 @@ object Defaults extends BuildCommon {
|
|||
|
||||
// drop base directories, since there are no valid mappings for these
|
||||
def sourceMappings = (unmanagedSources, unmanagedSourceDirectories, baseDirectory) map { (srcs, sdirs, base) =>
|
||||
((srcs --- sdirs --- base) pair (relativeTo(sdirs) | relativeTo(base) | flat)) toSeq
|
||||
(srcs --- sdirs --- base) pair (relativeTo(sdirs) | relativeTo(base) | flat)
|
||||
}
|
||||
def resourceMappings = relativeMappings(unmanagedResources, unmanagedResourceDirectories)
|
||||
def relativeMappings(files: ScopedTaskable[Seq[File]], dirs: ScopedTaskable[Seq[File]]): Initialize[Task[Seq[(File, String)]]] =
|
||||
(files, dirs) map { (rs, rdirs) =>
|
||||
(rs --- rdirs) pair (relativeTo(rdirs) | flat) toSeq
|
||||
(rs --- rdirs) pair (relativeTo(rdirs) | flat)
|
||||
}
|
||||
|
||||
def collectFiles(dirs: ScopedTaskable[Seq[File]], filter: ScopedTaskable[FileFilter], excludes: ScopedTaskable[FileFilter]): Initialize[Task[Seq[File]]] =
|
||||
|
|
@ -1181,7 +1181,7 @@ object Classpaths {
|
|||
(confs ++ confs.map(internalConfigurationMap.value) ++ (if (autoCompilerPlugins.value) CompilerPlugin :: Nil else Nil)).distinct
|
||||
},
|
||||
ivyConfigurations ++= Configurations.auxiliary,
|
||||
ivyConfigurations ++= { if (managedScalaInstance.value && !scalaHome.value.isDefined) Configurations.ScalaTool :: Nil else Nil },
|
||||
ivyConfigurations ++= { if (managedScalaInstance.value && scalaHome.value.isEmpty) Configurations.ScalaTool :: Nil else Nil },
|
||||
moduleSettings <<= moduleSettings0,
|
||||
makePomConfiguration := new MakePomConfiguration(artifactPath in makePom value, projectInfo.value, None, pomExtra.value, pomPostProcess.value, pomIncludeRepository.value, pomAllRepositories.value),
|
||||
deliverLocalConfiguration := deliverConfig(crossTarget.value, status = if (isSnapshot.value) "integration" else "release", logging = ivyLoggingLevel.value),
|
||||
|
|
@ -1239,7 +1239,7 @@ object Classpaths {
|
|||
)
|
||||
|
||||
val jvmBaseSettings: Seq[Setting[_]] = Seq(
|
||||
libraryDependencies ++= autoLibraryDependency(autoScalaLibrary.value && !scalaHome.value.isDefined && managedScalaInstance.value, sbtPlugin.value, scalaOrganization.value, scalaVersion.value),
|
||||
libraryDependencies ++= autoLibraryDependency(autoScalaLibrary.value && scalaHome.value.isEmpty && managedScalaInstance.value, sbtPlugin.value, scalaOrganization.value, scalaVersion.value),
|
||||
// Override the default to handle mixing in the sbtPlugin + scala dependencies.
|
||||
allDependencies := {
|
||||
val base = projectDependencies.value ++ libraryDependencies.value
|
||||
|
|
|
|||
|
|
@ -415,7 +415,7 @@ object EvaluateTask {
|
|||
|
||||
def liftAnonymous: Incomplete => Incomplete = {
|
||||
case i @ Incomplete(node, tpe, None, causes, None) =>
|
||||
causes.find(inc => !inc.node.isDefined && (inc.message.isDefined || inc.directCause.isDefined)) match {
|
||||
causes.find(inc => inc.node.isEmpty && (inc.message.isDefined || inc.directCause.isDefined)) match {
|
||||
case Some(lift) => i.copy(directCause = lift.directCause, message = lift.message)
|
||||
case None => i
|
||||
}
|
||||
|
|
|
|||
|
|
@ -174,7 +174,7 @@ object Load {
|
|||
val keys = Index.allKeys(settings)
|
||||
val attributeKeys = Index.attributeKeys(data) ++ keys.map(_.key)
|
||||
val scopedKeys = keys ++ data.allKeys((s, k) => ScopedKey(s, k))
|
||||
val projectsMap = projects.mapValues(_.defined.keySet).toMap
|
||||
val projectsMap = projects.mapValues(_.defined.keySet)
|
||||
val keyIndex = KeyIndex(scopedKeys, projectsMap)
|
||||
val aggIndex = KeyIndex.aggregate(scopedKeys, extra(keyIndex), projectsMap)
|
||||
new sbt.StructureIndex(Index.stringToKeyMap(attributeKeys), Index.taskToKeyMap(data), Index.triggers(data), keyIndex, aggIndex)
|
||||
|
|
@ -358,7 +358,7 @@ object Load {
|
|||
builds map {
|
||||
case (uri, unit) =>
|
||||
(uri, unit.resolveRefs(ref => Scope.resolveProjectRef(uri, rootProject, ref)))
|
||||
} toMap;
|
||||
}
|
||||
}
|
||||
def checkAll(referenced: Map[URI, List[ProjectReference]], builds: Map[URI, sbt.PartBuildUnit]) {
|
||||
val rootProject = getRootProject(builds)
|
||||
|
|
@ -396,7 +396,7 @@ object Load {
|
|||
{
|
||||
IO.assertAbsolute(uri)
|
||||
val resolve = (_: Project).resolve(ref => Scope.resolveProjectRef(uri, rootProject, ref))
|
||||
new sbt.LoadedBuildUnit(unit.unit, unit.defined mapValues resolve toMap, unit.rootProjects, unit.buildSettings)
|
||||
new sbt.LoadedBuildUnit(unit.unit, unit.defined mapValues resolve, unit.rootProjects, unit.buildSettings)
|
||||
}
|
||||
def projects(unit: sbt.BuildUnit): Seq[Project] =
|
||||
{
|
||||
|
|
|
|||
|
|
@ -510,7 +510,7 @@ object Project extends ProjectExtra {
|
|||
|
||||
val ProjectReturn = AttributeKey[List[File]]("project-return", "Maintains a stack of builds visited using reload.")
|
||||
def projectReturn(s: State): List[File] = getOrNil(s, ProjectReturn)
|
||||
def inPluginProject(s: State): Boolean = projectReturn(s).toList.length > 1
|
||||
def inPluginProject(s: State): Boolean = projectReturn(s).length > 1
|
||||
def setProjectReturn(s: State, pr: List[File]): State = s.copy(attributes = s.attributes.put(ProjectReturn, pr))
|
||||
def loadAction(s: State, action: LoadAction.Value) = action match {
|
||||
case Return =>
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ private[sbt] object SettingCompletions {
|
|||
{
|
||||
import extracted._
|
||||
val r = relation(extracted.structure, true)
|
||||
val allDefs = Def.flattenLocals(Def.compiled(extracted.structure.settings, true)(structure.delegates, structure.scopeLocal, implicitly[Show[ScopedKey[_]]])).map(_._1)
|
||||
val allDefs = Def.flattenLocals(Def.compiled(extracted.structure.settings, true)(structure.delegates, structure.scopeLocal, implicitly[Show[ScopedKey[_]]])).keys
|
||||
val projectScope = Load.projectScope(currentRef)
|
||||
def resolve(s: Setting[_]): Seq[Setting[_]] = Load.transformSettings(projectScope, currentRef.build, rootProject, s :: Nil)
|
||||
def rescope[T](setting: Setting[T]): Seq[Setting[_]] =
|
||||
|
|
|
|||
|
|
@ -478,7 +478,7 @@ private final class TrapExit(delegateManager: SecurityManager) extends SecurityM
|
|||
|
||||
private def disposeAllFrames(log: Logger) {
|
||||
val allFrames = java.awt.Frame.getFrames
|
||||
if (allFrames.length > 0) {
|
||||
if (allFrames.nonEmpty) {
|
||||
log.debug(s"Disposing ${allFrames.length} top-level windows...")
|
||||
allFrames.foreach(_.dispose) // dispose all top-level windows, which will cause the AWT-EventQueue-* threads to exit
|
||||
val waitSeconds = 2
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import org.specs2.mutable._
|
|||
class B extends Specification
|
||||
{
|
||||
"'hello world' has 11 characters" in {
|
||||
"hello world".size must be equalTo(122)
|
||||
"hello world".length must be equalTo(122)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -60,12 +60,12 @@ class FileCommands(baseDirectory: File) extends BasicStatementHandler {
|
|||
}
|
||||
def exists(paths: List[String]) {
|
||||
val notPresent = fromStrings(paths).filter(!_.exists)
|
||||
if (notPresent.length > 0)
|
||||
if (notPresent.nonEmpty)
|
||||
scriptError("File(s) did not exist: " + notPresent.mkString("[ ", " , ", " ]"))
|
||||
}
|
||||
def absent(paths: List[String]) {
|
||||
val present = fromStrings(paths).filter(_.exists)
|
||||
if (present.length > 0)
|
||||
if (present.nonEmpty)
|
||||
scriptError("File(s) existed: " + present.mkString("[ ", " , ", " ]"))
|
||||
}
|
||||
def execute(command: List[String]): Unit = execute0(command.head, command.tail)
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ object ScriptedPlugin extends Plugin {
|
|||
val pairMap = pairs.groupBy(_._1).mapValues(_.map(_._2).toSet)
|
||||
|
||||
val id = charClass(c => !c.isWhitespace && c != '/').+.string
|
||||
val groupP = token(id.examples(pairMap.keySet.toSet)) <~ token('/')
|
||||
val groupP = token(id.examples(pairMap.keySet)) <~ token('/')
|
||||
def nameP(group: String) = token("*".id | id.examples(pairMap(group)))
|
||||
val testID = for (group <- groupP; name <- nameP(group)) yield (group, name)
|
||||
(token(Space) ~> matched(testID)).*
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ object TaskRunnerForkTest extends Properties("TaskRunner Fork") {
|
|||
}
|
||||
property("fork and reduce") = forAll(TaskListGen, MaxWorkersGen) { (m: List[Int], workers: Int) =>
|
||||
m.nonEmpty ==> {
|
||||
val expected = m.reduceLeft(_ + _)
|
||||
val expected = m.sum
|
||||
checkResult(tryRun(m.tasks.reduced(_ + _), false, workers), expected)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -56,10 +56,10 @@ private[sbt] final case class Constant(tag: Byte, nameIndex: Int, typeIndex: Int
|
|||
private[sbt] final case class FieldOrMethodInfo(accessFlags: Int, name: Option[String], descriptor: Option[String], attributes: IndexedSeq[AttributeInfo]) extends NotNull {
|
||||
def isStatic = (accessFlags & ACC_STATIC) == ACC_STATIC
|
||||
def isPublic = (accessFlags & ACC_PUBLIC) == ACC_PUBLIC
|
||||
def isMain = isPublic && isStatic && descriptor.filter(_ == "([Ljava/lang/String;)V").isDefined
|
||||
def isMain = isPublic && isStatic && descriptor.exists(_ == "([Ljava/lang/String;)V")
|
||||
}
|
||||
private[sbt] final case class AttributeInfo(name: Option[String], value: Array[Byte]) extends NotNull {
|
||||
def isNamed(s: String) = name.filter(s == _).isDefined
|
||||
def isNamed(s: String) = name.exists(s == _)
|
||||
def isSignature = isNamed("Signature")
|
||||
def isSourceFile = isNamed("SourceFile")
|
||||
}
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ object ReflectUtilities {
|
|||
var mappings = new immutable.TreeMap[String, T]
|
||||
val correspondingFields = fields(self.getClass)
|
||||
for (method <- self.getClass.getMethods) {
|
||||
if (method.getParameterTypes.length == 0 && clazz.isAssignableFrom(method.getReturnType)) {
|
||||
if (method.getParameterTypes.isEmpty && clazz.isAssignableFrom(method.getReturnType)) {
|
||||
for (field <- correspondingFields.get(method.getName) if field.getType == method.getReturnType) {
|
||||
val value = method.invoke(self).asInstanceOf[T]
|
||||
if (value == null) throw new UninitializedVal(method.getName, method.getDeclaringClass.getName)
|
||||
|
|
|
|||
|
|
@ -52,7 +52,7 @@ object IMap {
|
|||
put(k, f(this get k getOrElse init))
|
||||
|
||||
def mapValues[V2[_]](f: V ~> V2) =
|
||||
new IMap0[K, V2](backing.mapValues(x => f(x)).toMap)
|
||||
new IMap0[K, V2](backing.mapValues(x => f(x)))
|
||||
|
||||
def mapSeparate[VL[_], VR[_]](f: V ~> ({ type l[T] = Either[VL[T], VR[T]] })#l) =
|
||||
{
|
||||
|
|
|
|||
|
|
@ -17,9 +17,9 @@ sealed trait Settings[Scope] {
|
|||
}
|
||||
|
||||
private final class Settings0[Scope](val data: Map[Scope, AttributeMap], val delegates: Scope => Seq[Scope]) extends Settings[Scope] {
|
||||
def scopes: Set[Scope] = data.keySet.toSet
|
||||
def scopes: Set[Scope] = data.keySet
|
||||
def keys(scope: Scope) = data(scope).keys.toSet
|
||||
def allKeys[T](f: (Scope, AttributeKey[_]) => T): Seq[T] = data.flatMap { case (scope, map) => map.keys.map(k => f(scope, k)) } toSeq;
|
||||
def allKeys[T](f: (Scope, AttributeKey[_]) => T): Seq[T] = data.flatMap { case (scope, map) => map.keys.map(k => f(scope, k)) } toSeq
|
||||
|
||||
def get[T](scope: Scope, key: AttributeKey[T]): Option[T] =
|
||||
delegates(scope).toStream.flatMap(sc => getDirect(sc, key)).headOption
|
||||
|
|
@ -296,7 +296,7 @@ trait Init[Scope] {
|
|||
def definedAtString(settings: Seq[Setting[_]]): String =
|
||||
{
|
||||
val posDefined = settings.flatMap(_.positionString.toList)
|
||||
if (posDefined.size > 0) {
|
||||
if (posDefined.nonEmpty) {
|
||||
val header = if (posDefined.size == settings.size) "defined at:" else
|
||||
"some of the defining occurrences:"
|
||||
header + (posDefined.distinct mkString ("\n\t", "\n\t", "\n"))
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ abstract class JLine extends LineReader {
|
|||
|
||||
private[this] def handleMultilinePrompt(prompt: String): String = {
|
||||
val lines = """\r?\n""".r.split(prompt)
|
||||
lines.size match {
|
||||
lines.length match {
|
||||
case 0 | 1 => prompt
|
||||
case _ => reader.print(lines.init.mkString("\n") + "\n"); lines.last;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ class LoggerWriter(delegate: Logger, unbufferedLevel: Option[Level.Value], nl: S
|
|||
override def close() = flush()
|
||||
override def flush(): Unit =
|
||||
synchronized {
|
||||
if (buffer.length > 0) {
|
||||
if (buffer.nonEmpty) {
|
||||
log(buffer.toString)
|
||||
buffer.clear()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -123,7 +123,7 @@ private final class MRelation[A, B](fwd: Map[A, Set[B]], rev: Map[B, Set[A]]) ex
|
|||
def _1s = fwd.keySet
|
||||
def _2s = rev.keySet
|
||||
|
||||
def size = (fwd.valuesIterator map { _.size }).foldLeft(0)(_ + _)
|
||||
def size = (fwd.valuesIterator map (_.size)).sum
|
||||
|
||||
def all: Traversable[(A, B)] = fwd.iterator.flatMap { case (a, bs) => bs.iterator.map(b => (a, b)) }.toTraversable
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue