Merge pull request #2672 from dwijnand/unusued

Remove unused imports, vals and defs
This commit is contained in:
eugene yokota 2016-07-14 22:32:51 -04:00 committed by GitHub
commit 8a85d851cf
68 changed files with 100 additions and 233 deletions

View File

@ -11,10 +11,7 @@ import sbt.internal.util.Types.{ :+:, idFun }
import java.io.File
import java.{ util => ju }
import java.net.URL
import scala.xml.NodeSeq
import sbinary.{ DefaultProtocol, Format }
// import sbt.internal.librarymanagement.{ ExternalIvyConfiguration, IvyConfiguration, IvyPaths, IvyScala, ModuleSettings, RetrieveConfiguration, SbtExclusionRule, UpdateConfiguration, UpdateReport }
// import sbt.librarymanagement.{ Configuration, ExclusionRule, CrossVersion, ModuleID, Patterns }
import sbt.internal.librarymanagement._
import sbt.librarymanagement._
import sbt.librarymanagement.RepositoryHelpers._

View File

@ -4,14 +4,13 @@
package sbt
import sbt.internal.inc.javac.JavaTools
import sbt.internal.inc.{ Analysis, AnalyzingCompiler, CompileOutput, ComponentCompiler, IncrementalCompilerImpl, Locate, LoggerReporter, ScalaInstance }
import sbt.internal.inc.{ AnalyzingCompiler, ComponentCompiler, ScalaInstance }
import xsbti.{ Logger => _, _ }
import xsbti.compile.{ ClasspathOptions, CompileOrder, Compilers, CompileResult, GlobalsCache, IncOptions, Inputs, MiniSetup }
import CompileOrder.{ JavaThenScala, Mixed, ScalaThenJava }
import xsbti.compile.{ ClasspathOptions, Compilers, CompileResult, Inputs }
import java.io.File
import sbt.internal.librarymanagement.{ ComponentManager, IvyConfiguration }
import sbt.librarymanagement.{ ModuleID, CrossVersion, VersionNumber }
import sbt.librarymanagement.{ ModuleID, VersionNumber }
import sbt.util.Logger
object Compiler {

View File

@ -5,7 +5,6 @@ package sbt
import java.io.File
import sbt.internal.inc.AnalyzingCompiler
import sbt.internal.util.JLine
import sbt.util.Logger
import xsbti.compile.{ Inputs, Compilers }

View File

@ -3,19 +3,13 @@
*/
package sbt
import java.io.{ File, PrintWriter }
import java.io.File
import sbt.internal.inc.AnalyzingCompiler
import Predef.{ conforms => _, _ }
import sbt.internal.util.Types.:+:
import sbt.io.syntax._
import sbt.io.IO
import sbinary.DefaultProtocol.FileFormat
import sbt.internal.util.Cache.{ defaultEquiv, hConsCache, hNilCache, seqCache, seqFormat, streamFormat, StringFormat, UnitFormat, wrapIn }
import sbt.internal.util.Tracked.{ inputChanged, outputChanged }
import sbt.internal.util.{ FilesInfo, HashFileInfo, HNil, ModifiedFileInfo, PlainFileInfo }
import sbt.internal.util.FilesInfo.{ exists, hash, lastModified }
import xsbti.Reporter
import xsbti.compile.JavaTools

View File

@ -3,7 +3,7 @@
*/
package sbt
import java.io.{ File, Writer }
import java.io.File
import sbt.internal.inc.Relations
import sbt.internal.util.Relation

View File

@ -8,7 +8,6 @@ import testing.{ Logger => _, _ }
import java.net.ServerSocket
import java.io._
import Tests.{ Output => TestOutput, _ }
import ForkMain._
import sbt.io.IO
import sbt.util.Logger

View File

@ -12,9 +12,9 @@ import sbt.io.syntax._
import sbt.io.IO
import sbinary.{ DefaultProtocol, Format }
import DefaultProtocol.{ FileFormat, immutableMapFormat, StringFormat, UnitFormat }
import DefaultProtocol.{ FileFormat, immutableMapFormat, StringFormat }
import sbt.internal.util.{ Cache, FileInfo, FilesInfo, HNil, ModifiedFileInfo, PlainFileInfo, Tracked }
import Cache.{ defaultEquiv, hConsCache, hNilCache, streamFormat, wrapIn }
import Cache.{ defaultEquiv, hConsCache, hNilCache, streamFormat }
import Tracked.{ inputChanged, outputChanged }
import FileInfo.exists
import FilesInfo.lastModified

View File

@ -4,7 +4,7 @@
package sbt
import java.io.File
import sbt.internal.inc.{ AnalyzingCompiler, RawCompiler, ScalaInstance }
import sbt.internal.inc.{ RawCompiler, ScalaInstance }
import Predef.{ conforms => _, _ }
import sbt.io.syntax._
@ -12,7 +12,7 @@ import sbt.io.IO
import sbinary.DefaultProtocol.FileFormat
import sbt.internal.util.Types.:+:
import sbt.internal.util.Cache.{ defaultEquiv, hConsCache, hNilCache, IntFormat, seqCache, seqFormat, streamFormat, StringFormat, UnitFormat, wrapIn }
import sbt.internal.util.Cache.{ defaultEquiv, hConsCache, hNilCache, IntFormat, seqCache, StringFormat }
import sbt.internal.util.Tracked.{ inputChanged, outputChanged }
import sbt.internal.util.{ FilesInfo, HashFileInfo, HNil, ModifiedFileInfo, PlainFileInfo }
import sbt.internal.util.FilesInfo.{ exists, hash, lastModified }

View File

@ -8,7 +8,6 @@ import xsbt.api.{ Discovered, Discovery }
import sbt.internal.inc.Analysis
import TaskExtra._
import sbt.internal.util.FeedbackProvidedException
import sbt.internal.util.Types._
import xsbti.api.Definition
import xsbti.compile.CompileAnalysis
import ConcurrentRestrictions.Tag
@ -16,8 +15,6 @@ import ConcurrentRestrictions.Tag
import testing.{ AnnotatedFingerprint, Fingerprint, Framework, SubclassFingerprint, Runner, TaskDef, SuiteSelector, Task => TestTask }
import scala.annotation.tailrec
import java.io.File
import sbt.util.Logger
sealed trait TestOption
@ -126,7 +123,7 @@ object Tests {
val testListeners: Seq[TestReportListener])
private[sbt] def processOptions(config: Execution, discovered: Seq[TestDefinition], log: Logger): ProcessedOptions =
{
import collection.mutable.{ HashSet, ListBuffer, Map, Set }
import collection.mutable.{ HashSet, ListBuffer }
val testFilters = new ListBuffer[String => Boolean]
var orderedFilters = Seq[String => Boolean]()
val excludeTestsSet = new HashSet[String]

View File

@ -2,8 +2,7 @@ package sbt
package compiler
import scala.collection.mutable.ListBuffer
import scala.reflect.Manifest
import scala.tools.nsc.{ ast, interpreter, io, reporters, util, CompilerCommand, Global, Phase, Settings }
import scala.tools.nsc.{ ast, io, reporters, CompilerCommand, Global, Phase, Settings }
import io.{ AbstractFile, PlainFile, VirtualDirectory }
import ast.parser.Tokens
import reporters.{ ConsoleReporter, Reporter }
@ -56,7 +55,7 @@ final class Eval(optionsNoncp: Seq[String], classpath: Seq[File], mkReporter: Se
lazy val settings =
{
val s = new Settings(println)
val command = new CompilerCommand(options.toList, s)
new CompilerCommand(options.toList, s) // this side-effects on Settings..
s
}
lazy val reporter = mkReporter(settings)
@ -70,7 +69,6 @@ final class Eval(optionsNoncp: Seq[String], classpath: Seq[File], mkReporter: Se
}
lazy val global: EvalGlobal = new EvalGlobal(settings, reporter)
import global._
import definitions._
private[sbt] def unlinkDeferred(): Unit = {
toUnlinkLater foreach unlink
@ -261,11 +259,6 @@ final class Eval(optionsNoncp: Seq[String], classpath: Seq[File], mkReporter: Se
case None => Nil
case Some(dir) => dir listFiles moduleFileFilter(moduleName)
}
private[this] def getClassFiles(backing: Option[File], moduleName: String): Seq[File] =
backing match {
case None => Nil
case Some(dir) => dir listFiles moduleClassFilter(moduleName)
}
private[this] def moduleFileFilter(moduleName: String) = new java.io.FilenameFilter {
def accept(dir: File, s: String) =
(s contains moduleName)

View File

@ -5,10 +5,6 @@ package sbt
import sbt.util.Level
import sbt.internal.util.complete.HistoryCommands
import scala.annotation.tailrec
import java.io.File
import sbt.io.syntax._
object BasicCommandStrings {
val HelpCommand = "help"

View File

@ -8,7 +8,6 @@ import sbt.internal.inc.ModuleUtilities
import DefaultParsers._
import Function.tupled
import Command.applyEffect
import HistoryCommands.{ Start => HistoryPrefix }
import BasicCommandStrings._
import CommandUtil._
import BasicKeys._

View File

@ -3,7 +3,6 @@
*/
package sbt
import java.io.File
import sbt.internal.inc.ReflectUtilities
import sbt.internal.util.complete.{ DefaultParsers, EditDistance, Parser }
import sbt.internal.util.Types.const

View File

@ -5,7 +5,6 @@ package sbt
import scala.annotation.tailrec
import scala.util.control.NonFatal
import java.io.{ File, PrintWriter }
import jline.TerminalFactory
import sbt.io.Using

View File

@ -52,23 +52,24 @@ object Watched {
def executeContinuously(watched: Watched, s: State, next: String, repeat: String): State =
{
@tailrec def shouldTerminate: Boolean = (System.in.available > 0) && (watched.terminateWatch(System.in.read()) || shouldTerminate)
@tailrec def shouldTerminate: Boolean =
(System.in.available > 0) && (watched.terminateWatch(System.in.read()) || shouldTerminate)
val sourcesFinder = PathFinder { watched watchPaths s }
val watchState = s get ContinuousState getOrElse WatchState.empty
if (watchState.count > 0)
printIfDefined(watched watchingMessage watchState)
val (triggered, newWatchState, newState) =
val (triggered, newWatchState) =
try {
val (triggered, newWatchState) = SourceModificationWatch.watch(sourcesFinder, watched.pollInterval, watchState)(shouldTerminate)
(triggered, newWatchState, s)
(triggered, newWatchState)
} catch {
case e: Exception =>
val log = s.log
log.error("Error occurred obtaining files to watch. Terminating continuous execution...")
State.handleException(e, s, log)
(false, watchState, s.fail)
(false, watchState)
}
if (triggered) {

View File

@ -3,7 +3,7 @@
*/
package xsbt
import java.io.{ BufferedReader, BufferedWriter, InputStream, InputStreamReader, OutputStreamWriter, OutputStream }
import java.io.{ BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter }
import java.net.{ InetAddress, ServerSocket, Socket }
import scala.util.control.NonFatal

View File

@ -2,8 +2,7 @@ package sbt
import sbt.internal.util.complete.Parser
import Def.{ Initialize, ScopedKey }
import std.TaskExtra.{ task => mktask, _ }
import Task._
import std.TaskExtra._
import sbt.internal.util.{ ~>, AttributeKey, Types }
import sbt.internal.util.Types._

View File

@ -1,9 +1,8 @@
package sbt
import Def.{ Initialize, resolvedScoped, ScopedKey, Setting, streamsManagerKey }
import Def.{ Initialize, ScopedKey, streamsManagerKey }
import Previous._
import sbt.internal.util.{ ~>, AttributeKey, IMap, RMap }
import sbt.internal.util.Types._
import sbt.internal.util.{ ~>, IMap, RMap }
import java.io.{ InputStream, OutputStream }
@ -47,7 +46,6 @@ object Previous {
private[sbt] val references = SettingKey[References]("previous-references", "Collects all static references to previous values of tasks.", KeyRanks.Invisible)
private[sbt] val cache = TaskKey[Previous]("previous-cache", "Caches previous values of tasks read from disk for the duration of a task execution.", KeyRanks.Invisible)
private[this] val previousReferenced = AttributeKey[Referenced[_]]("previous-referenced")
/** Records references to previous task value. This should be completely populated after settings finish loading. */
private[sbt] final class References {

View File

@ -3,7 +3,6 @@
*/
package sbt
import java.io.File
import java.net.URI
import sbt.internal.util.{ AttributeKey, AttributeMap, Dag }

View File

@ -6,19 +6,16 @@ package sbt
/** An abstraction on top of Settings for build configuration and task definition. */
import java.io.File
import java.net.URI
import ConcurrentRestrictions.Tag
import Def.{ Initialize, KeyedInitialize, ScopedKey, Setting, setting }
import sbt.io.{ FileFilter, Path, PathFinder }
import sbt.io.{ FileFilter, PathFinder }
import sbt.io.syntax._
import std.TaskExtra.{ task => mktask, _ }
import Task._
import sbt.internal.util.Types._
import sbt.internal.util.{ ~>, AList, AttributeKey, Settings, SourcePosition }
import language.experimental.macros
import reflect.internal.annotations.compileTimeOnly
sealed trait Scoped { def scope: Scope; val key: AttributeKey[_] }

View File

@ -2,7 +2,6 @@ package sbt
package std
import language.experimental.macros
import scala.reflect._
import reflect.macros._
import Def.Initialize

View File

@ -2,11 +2,10 @@ package sbt
package std
import language.experimental.macros
import scala.reflect._
import reflect.macros._
import reflect.internal.annotations.compileTimeOnly
import Def.{ Initialize, ScopedKey }
import Def.Initialize
import sbt.internal.util.appmacro.ContextUtil
import sbt.internal.util.complete.Parser

View File

@ -1,8 +1,6 @@
package sbt
package std
import language.experimental.macros
import scala.reflect._
import reflect.macros._
private[sbt] object KeyMacro {
@ -21,7 +19,7 @@ private[sbt] object KeyMacro {
def keyImpl[T: c.WeakTypeTag, S: c.WeakTypeTag](c: Context)(f: (c.Expr[String], c.Expr[Manifest[T]]) => c.Expr[S]): c.Expr[S] =
{
import c.universe.{ Apply => ApplyTree, _ }
import c.universe._
val enclosingValName = definingValName(c, methodName => s"""$methodName must be directly assigned to a val, such as `val x = $methodName[Int]("description")`.""")
val name = c.Expr[String](Literal(Constant(enclosingValName)))
val mf = c.Expr[Manifest[T]](c.inferImplicitValue(weakTypeOf[Manifest[T]]))

View File

@ -1,7 +1,7 @@
package sbt
package std
import Def.{ Initialize, Setting }
import Def.Initialize
import sbt.internal.util.Types.{ idFun, Id }
import sbt.internal.util.AList
import sbt.internal.util.appmacro.{ Convert, Converted, Instance, MixedBuilder, MonadInstance }
@ -14,8 +14,6 @@ object InitializeInstance extends MonadInstance {
def pure[T](t: () => T): Initialize[T] = Def.pure(t)
}
import language.experimental.macros
import scala.reflect._
import reflect.macros._
object InitializeConvert extends Convert {

View File

@ -3,17 +3,14 @@ package std
import Def.{ Initialize, Setting }
import sbt.internal.util.Types.{ const, idFun, Id }
import TaskExtra.allM
import sbt.internal.util.appmacro.{ ContextUtil, Convert, Converted, Instance, MixedBuilder, MonadInstance }
import sbt.internal.util.appmacro.{ ContextUtil, Converted, Instance, MixedBuilder, MonadInstance }
import Instance.Transform
import sbt.internal.util.complete.{ DefaultParsers, Parser }
import sbt.internal.util.{ AList, LinePosition, NoPosition, SourcePosition }
import language.experimental.macros
import scala.annotation.tailrec
import scala.reflect._
import reflect.macros._
import reflect.internal.annotations.compileTimeOnly
import scala.reflect.internal.util.UndefinedPosition
/** Instance for the monad/applicative functor for plain Tasks. */

View File

@ -4,7 +4,6 @@
package sbt
import java.io.File
import java.net.URI
import KeyRanks.DSetting
import sbt.io.{ GlobFilter, Path }

View File

@ -3,7 +3,7 @@
*/
package sbt
import scala.concurrent.duration.{ FiniteDuration, Duration }
import scala.concurrent.duration.FiniteDuration
import sbt.internal._
import sbt.internal.util.Attributed.data
import Scope.{ fillTaskAxis, GlobalScope, ThisScope }
@ -29,7 +29,7 @@ import scala.util.control.NonFatal
import org.apache.ivy.core.module.{ descriptor, id }
import descriptor.ModuleDescriptor, id.ModuleRevisionId
import java.io.{ File, PrintWriter }
import java.net.{ URI, URL, MalformedURLException }
import java.net.{ URI, URL }
import java.util.concurrent.{ TimeUnit, Callable }
import sbinary.DefaultProtocol.StringFormat
import sbt.internal.util.Cache.seqFormat
@ -49,11 +49,11 @@ import Keys._
// incremental compiler
import xsbt.api.Discovery
import xsbti.compile.{ Compilers, ClasspathOptions, CompileAnalysis, CompileOptions, CompileOrder,
CompileResult, DefinesClass, IncOptions, IncOptionsUtil, Inputs, MiniSetup, PerClasspathEntryLookup,
import xsbti.compile.{ Compilers, CompileAnalysis, CompileOptions, CompileOrder,
CompileResult, DefinesClass, IncOptionsUtil, Inputs, MiniSetup, PerClasspathEntryLookup,
PreviousResult, Setup, TransactionalManagerType }
import sbt.internal.inc.{ AnalyzingCompiler, Analysis, ClassfileManager, CompilerCache, FileValueCache,
IncrementalCompilerImpl, Locate, LoggerReporter, MixedAnalyzingCompiler, ScalaInstance, ClasspathOptionsUtil }
import sbt.internal.inc.{ AnalyzingCompiler, Analysis, CompilerCache, FileValueCache,
Locate, LoggerReporter, MixedAnalyzingCompiler, ScalaInstance, ClasspathOptionsUtil }
object Defaults extends BuildCommon {
final val CacheDirectoryName = "cache"
@ -755,7 +755,6 @@ object Defaults extends BuildCommon {
}
def runMainTask(classpath: Initialize[Task[Classpath]], scalaRun: Initialize[Task[ScalaRun]]): Initialize[InputTask[Unit]] =
{
import DefaultParsers._
val parser = loadForParser(discoveredMainClasses)((s, names) => runMainParser(s, names getOrElse Nil))
Def.inputTask {
val (mainClass, args) = parser.parsed
@ -777,7 +776,6 @@ object Defaults extends BuildCommon {
def runnerInit: Initialize[Task[ScalaRun]] = Def.task {
val tmp = taskTemporaryDirectory.value
val resolvedScope = resolvedScoped.value.scope
val structure = buildStructure.value
val si = scalaInstance.value
val s = streams.value
val options = javaOptions.value
@ -812,9 +810,6 @@ object Defaults extends BuildCommon {
val cp = data(dependencyClasspath.value).toList
val label = nameForSrc(configuration.value.name)
val fiOpts = fileInputOptions.value
val logger: Logger = s.log
val maxer = maxErrors.value
val spms = sourcePositionMappers.value
val reporter = (compilerReporter in compile).value
(hasScala, hasJava) match {
case (true, _) =>
@ -1060,9 +1055,7 @@ object Defaults extends BuildCommon {
}
object Classpaths {
import Keys._
import Scope.ThisScope
import Defaults._
import Attributed.{ blank, blankSeq }
def concatDistinct[T](a: ScopedTaskable[Seq[T]], b: ScopedTaskable[Seq[T]]): Initialize[Task[Seq[T]]] = (a, b) map { (x, y) => (x ++ y).distinct }
def concat[T](a: ScopedTaskable[Seq[T]], b: ScopedTaskable[Seq[T]]): Initialize[Task[Seq[T]]] = (a, b) map (_ ++ _)
@ -1286,7 +1279,6 @@ object Classpaths {
ew
},
classifiersModule in updateClassifiers := {
import language.implicitConversions
implicit val key = (m: ModuleID) => (m.organization, m.name, m.revision)
val projectDeps = projectDependencies.value.iterator.map(key).toSet
val externalModules = update.value.allModules.filterNot(m => projectDeps contains key(m))
@ -1304,9 +1296,6 @@ object Classpaths {
val uwConfig = (unresolvedWarningConfiguration in update).value
val depDir = dependencyCacheDirectory.value
withExcludes(out, mod.classifiers, lock(app)) { excludes =>
val uwConfig = (unresolvedWarningConfiguration in update).value
val logicalClock = LogicalClock(state.value.hashCode)
val depDir = dependencyCacheDirectory.value
IvyActions.updateClassifiers(is, GetClassifiersConfiguration(mod, excludes, c.copy(artifactFilter = c.artifactFilter.invert), ivyScala.value, srcTypes, docTypes), uwConfig, LogicalClock(state.value.hashCode), Some(depDir), Vector.empty, s.log)
}
} tag (Tags.Update, Tags.Network)
@ -1399,14 +1388,7 @@ object Classpaths {
IvyActions.publish(module, config, s.log)
} tag (Tags.Publish, Tags.Network)
import Cache._
import CacheIvy.{
classpathFormat, /*publishIC,*/ updateIC,
updateReportFormat,
excludeMap,
moduleIDSeqIC,
modulePositionMapFormat
}
import CacheIvy.{ updateIC, updateReportFormat, excludeMap, moduleIDSeqIC, modulePositionMapFormat }
def withExcludes(out: File, classifiers: Seq[String], lock: xsbti.GlobalLock)(f: Map[ModuleID, Set[String]] => UpdateReport): UpdateReport =
{
@ -1504,7 +1486,7 @@ object Classpaths {
val ew = EvictionWarning(module, ewo, result, log)
ew.lines foreach { log.warn(_) }
ew.infoAllTheThings foreach { log.info(_) }
val cw = CompatibilityWarning.run(compatWarning, module, mavenStyle, log)
CompatibilityWarning.run(compatWarning, module, mavenStyle, log)
result
}
def uptodate(inChanged: Boolean, out: UpdateReport): Boolean =
@ -1633,8 +1615,8 @@ object Classpaths {
def analyzed[T](data: T, analysis: CompileAnalysis) = Attributed.blank(data).put(Keys.analysis, analysis)
def makeProducts: Initialize[Task[Seq[File]]] = Def.task {
val x1 = compile.value
val x2 = copyResources.value
compile.value
copyResources.value
classDirectory.value :: Nil
}
// This is a variant of exportProductsTask with tracking

View File

@ -10,14 +10,13 @@ import sbt.librarymanagement.{ Resolver, UpdateReport }
import scala.concurrent.duration.Duration
import java.io.File
import Def.{ displayFull, dummyState, ScopedKey, Setting }
import Keys.{ Streams, TaskStreams, dummyRoots, dummyStreamsManager, executionRoots, pluginData, streams,
streamsManager, taskDefinitionKey, transformState }
import Def.{ dummyState, ScopedKey, Setting }
import Keys.{ Streams, TaskStreams, dummyRoots, executionRoots, pluginData, streams,
streamsManager, transformState }
import Project.richInitializeTask
import Scope.{ GlobalScope, ThisScope }
import sbt.internal.util.Types.const
import Scope.GlobalScope
import scala.Console.RED
import std.Transform.{ DummyTaskMap, TaskAndValue }
import std.Transform.DummyTaskMap
import TaskName._
@deprecated("Use EvaluateTaskConfig instead.", "0.13.5")
@ -26,10 +25,8 @@ final case class EvaluateConfig(cancelable: Boolean, restrictions: Seq[Tags.Rule
/**
* An API that allows you to cancel executing tasks upon some signal.
*
* For example, this is implemented by the TaskEngine; invoking `cancel()` allows you
* to cancel the current task exeuction. A `TaskCancel` is passed to the
* [[TaskEvalautionCancelHandler]] which is responsible for calling `cancel()` when
* appropriate.
* For example, this is implemented by the TaskEngine;
* invoking `cancelAndShutdown()` allows you to cancel the current task execution.
*/
trait RunningTaskEngine {
/** Attempts to kill and shutdown the running task engine.*/
@ -98,7 +95,7 @@ sealed trait EvaluateTaskConfig {
*/
def minForcegcInterval: Duration
}
final object EvaluateTaskConfig {
object EvaluateTaskConfig {
@deprecated("Use the alternative that specifies minForcegcInterval", "0.13.9")
def apply(restrictions: Seq[Tags.Rule],
checkCycles: Boolean,
@ -138,8 +135,7 @@ final case class PluginData(dependencyClasspath: Seq[Attributed[File]], definiti
}
object EvaluateTask {
import std.{ TaskExtra, Transform }
import TaskExtra._
import std.Transform
import Keys.state
private[sbt] def defaultProgress: ExecuteProgress[Task] =
@ -258,7 +254,6 @@ object EvaluateTask {
def logIncomplete(result: Incomplete, state: State, streams: Streams): Unit = {
val all = Incomplete linearize result
val keyed = for (Incomplete(Some(key: ScopedKey[_]), _, msg, _, ex) <- all) yield (key, msg, ex)
val un = all.filter { i => i.node.isEmpty || i.message.isEmpty }
import ExceptionCategory._
for ((key, msg, Some(ex)) <- keyed) {
@ -300,7 +295,7 @@ object EvaluateTask {
def runTask[T](root: Task[T], state: State, streams: Streams, triggers: Triggers[Task], config: EvaluateTaskConfig)(implicit taskToNode: NodeView[Task]): (State, Result[T]) =
{
import ConcurrentRestrictions.{ completionService, TagMap, Tag, tagged, tagsKey }
import ConcurrentRestrictions.{ completionService, tagged, tagsKey }
val log = state.log
log.debug(s"Running task... Cancel: ${config.cancelStrategy}, check cycles: ${config.checkCycles}, forcegc: ${config.forceGarbageCollection}")

View File

@ -7,16 +7,13 @@ import java.io.File
import java.net.URL
import scala.concurrent.duration.{ FiniteDuration, Duration }
import Def.ScopedKey
import sbt.internal.util.complete._
import sbt.internal.inc.{ MixedAnalyzingCompiler, ScalaInstance }
import std.TaskExtra._
import sbt.internal.inc.ScalaInstance
import xsbti.compile.{ DefinesClass, ClasspathOptions, CompileAnalysis, CompileOptions, CompileOrder,
Compilers, CompileResult, GlobalsCache, IncOptions, Inputs, PreviousResult, Setup }
import scala.xml.{ Node => XNode, NodeSeq }
import org.apache.ivy.core.module.{ descriptor, id }
import descriptor.ModuleDescriptor, id.ModuleRevisionId
import testing.Framework
import sbt.internal.util.Types.Id
import KeyRanks._
import sbt.internal.{ BuildStructure, LoadedBuild, PluginDiscovery, BuildDependencies, SessionSettings }
@ -424,7 +421,7 @@ object Keys {
private[sbt] val executeProgress = SettingKey[State => TaskProgress]("executeProgress", "Experimental task execution listener.", DTask)
private[sbt] val taskCancelStrategy = SettingKey[State => TaskCancellationStrategy]("taskCancelStrategy", "Experimental task cancellation handler.", DTask)
// Experimental in sbt 0.13.2 to enable grabing semantic compile failures.
// Experimental in sbt 0.13.2 to enable grabbing semantic compile failures.
private[sbt] val compilerReporter = TaskKey[xsbti.Reporter]("compilerReporter", "Experimental hook to listen (or send) compilation failure messages.", DTask)
val triggeredBy = Def.triggeredBy

View File

@ -3,13 +3,10 @@
*/
package sbt
import java.io.{ File, PrintWriter }
import LogManager._
import std.Transform
import java.io.PrintWriter
import Def.ScopedKey
import Scope.GlobalScope
import BasicKeys.explicitGlobalLogLevels
import Keys.{ logLevel, logManager, persistLogLevel, persistTraceLevel, sLog, state, traceLevel }
import Keys.{ logLevel, logManager, persistLogLevel, persistTraceLevel, sLog, traceLevel }
import scala.Console.{ BLUE, RESET }
import sbt.internal.util.{ AttributeKey, ConsoleOut, MultiLoggerConfig, Settings, SuppressedTraceContext }
import sbt.internal.util.MainLogging._
@ -51,7 +48,7 @@ object LogManager {
val screenTrace = getOr(traceLevel.key, defaultTraceLevel(state))
val backingTrace = getOr(persistTraceLevel.key, Int.MaxValue)
val extraBacked = state.globalLogging.backed :: Nil
multiLogger(new MultiLoggerConfig(console, backed, extraBacked ::: extra, screenLevel, backingLevel, screenTrace, backingTrace))
multiLogger(MultiLoggerConfig(console, backed, extraBacked ::: extra, screenLevel, backingLevel, screenTrace, backingTrace))
}
def defaultTraceLevel(state: State): Int =
if (state.interactive) -1 else Int.MaxValue
@ -94,8 +91,6 @@ object LogManager {
s.put(BasicKeys.explicitGlobalLogLevels, true).put(Keys.logLevel.key, level)
}
private[this] def setExplicitGlobalLogLevels(s: State, flag: Boolean): State =
s.put(BasicKeys.explicitGlobalLogLevels, flag)
private[this] def hasExplicitGlobalLogLevels(s: State): Boolean =
State.getBoolean(s, BasicKeys.explicitGlobalLogLevels, default = false)

View File

@ -32,7 +32,7 @@ final class xMain extends xsbti.AppMain {
{
import BasicCommands.early
import BasicCommandStrings.runEarly
import BuiltinCommands.{ initialize, defaults }
import BuiltinCommands.defaults
import sbt.internal.CommandStrings.{ BootCommand, DefaultsCommand, InitCommand }
runManaged(initialState(configuration,
Seq(defaults, early),
@ -286,7 +286,7 @@ object BuiltinCommands {
def lastGrep = Command(LastGrepCommand, lastGrepBrief, lastGrepDetailed)(lastGrepParser) {
case (s, (pattern, Some(sks))) =>
val (str, ref, display) = extractLast(s)
val (str, _, display) = extractLast(s)
Output.lastGrep(sks, str.streams(s), pattern, printLast(s))(display)
keepLastLog(s)
case (s, (pattern, None)) =>
@ -353,7 +353,7 @@ object BuiltinCommands {
private[this] def lastImpl(s: State, sks: AnyKeys, sid: Option[String]): State =
{
val (str, ref, display) = extractLast(s)
val (str, _, display) = extractLast(s)
Output.last(sks, str.streams(s), printLast(s), sid)(display)
keepLastLog(s)
}

View File

@ -7,7 +7,8 @@ import java.io.File
import java.net.URI
import java.util.Locale
import Project._
import Keys.{ appConfiguration, stateBuildStructure, commands, configuration, historyPath, projectCommand, sessionSettings, shellPrompt, thisProject, thisProjectRef, watch }
import Keys.{ stateBuildStructure, commands, configuration, historyPath, projectCommand, sessionSettings,
shellPrompt, watch }
import Scope.{ GlobalScope, ThisScope }
import Def.{ Flattened, Initialize, ScopedKey, Setting }
import sbt.internal.{ Load, BuildStructure, LoadedBuild, LoadedBuildUnit, SettingGraph, SettingCompletions, AddSettings, SessionSettings }
@ -384,11 +385,11 @@ object Project extends ProjectExtra {
{
val structure = Project.structure(s)
val ref = Project.current(s)
val project = Load.getProject(structure.units, ref.build, ref.project)
Load.getProject(structure.units, ref.build, ref.project)
val msg = Keys.onLoadMessage in ref get structure.data getOrElse ""
if (!msg.isEmpty) s.log.info(msg)
def get[T](k: SettingKey[T]): Option[T] = k in ref get structure.data
def commandsIn(axis: ResolvedReference) = commands in axis get structure.data toList;
def commandsIn(axis: ResolvedReference) = commands in axis get structure.data toList
val allCommands = commandsIn(ref) ++ commandsIn(BuildRef(ref.build)) ++ (commands in Global get structure.data toList)
val history = get(historyPath) flatMap idFun
@ -611,7 +612,6 @@ object Project extends ProjectExtra {
(i, Keys.resolvedScoped)((t, scoped) => tx(t, (state, value) => set(resolveContext(key, scoped.scope, state), state, value)))
}
import scala.reflect._
import reflect.macros._
def projectMacroImpl(c: Context): c.Expr[Project] =

View File

@ -5,7 +5,6 @@
package sbt
import sbt.internal.BuildLoader
import sbt.internal.librarymanagement.StringUtilities
import sbt.io.{ Hash, IO }

View File

@ -5,10 +5,9 @@ import sbt.internal.util.{ AttributeKey, Dag, Types }
import sbt.librarymanagement.Configuration
import Types.{ const, idFun }
import Types.const
import Def.Initialize
import java.net.URI
import ScopeFilter.Data
object ScopeFilter {
type ScopeFilter = Base[Scope]

View File

@ -5,8 +5,7 @@ package sbt
package internal
import Def.{ showRelativeKey, ScopedKey }
import Project.showContextKey
import Keys.{ sessionSettings, thisProject }
import Keys.sessionSettings
import sbt.internal.util.complete.{ DefaultParsers, Parser }
import Aggregation.{ KeyValue, Values }
import DefaultParsers._

View File

@ -5,17 +5,16 @@ package sbt
package internal
import Def.ScopedKey
import Keys.{ aggregate, showSuccess, showTiming, timingFormat }
import Keys.{ showSuccess, showTiming, timingFormat }
import sbt.internal.util.complete.Parser
import sbt.internal.util.{ Dag, HList, Relation, Settings, Show, Util }
import sbt.util.Logger
import java.net.URI
import Parser.{ seq, failure, success }
import collection.mutable
import std.Transform.{ DummyTaskMap, TaskAndValue }
import std.Transform.DummyTaskMap
sealed trait Aggregation
final object Aggregation {
object Aggregation {
final case class ShowConfig(settingValues: Boolean, taskValues: Boolean, print: String => Unit, success: Boolean)
final case class Complete[T](start: Long, stop: Long, results: sbt.Result[Seq[KeyValue[T]]], state: State)
final case class KeyValue[+T](key: ScopedKey[_], value: T)
@ -111,7 +110,6 @@ final object Aggregation {
{
val parsers = for (KeyValue(k, it) <- inputs) yield it.parser(s).map(v => KeyValue(k, v))
Command.applyEffect(seq(parsers)) { roots =>
import EvaluateTask._
runTasks(s, structure, roots, DummyTaskMap(Nil), show)
}
}

View File

@ -5,8 +5,8 @@ package sbt
package internal
import java.io.File
import Keys.{ name, organization, thisProject, autoGeneratedProject }
import Def.{ ScopedKey, Setting }
import Keys.{ organization, thisProject, autoGeneratedProject }
import Def.Setting
import sbt.io.Hash
import sbt.internal.util.{ Attributed, Eval }
import sbt.internal.inc.ReflectUtilities
@ -15,7 +15,7 @@ import sbt.Project._
trait BuildDef {
def projectDefinitions(baseDirectory: File): Seq[Project] = projects
def projects: Seq[Project] = ReflectUtilities.allVals[Project](this).values.toSeq
// TODO: Should we grab the build core setting shere or in a plugin?
// TODO: Should we grab the build core settings here or in a plugin?
def settings: Seq[Setting[_]] = Defaults.buildCore
def buildLoaders: Seq[BuildLoader.Components] = Nil
/**

View File

@ -84,7 +84,7 @@ object BuildLoader {
}
def componentLoader: Loader = (info: LoadInfo) => {
import info.{ components, config, staging, state, uri }
import info.{ config, staging, state, uri }
val cs = info.components
for {
resolve <- cs.resolver(new ResolveInfo(uri, staging, config, state))

View File

@ -5,10 +5,7 @@ package sbt
package internal
import sbt.util.Logger
import java.io.File
import sbt.librarymanagement.Resolver
import sbt.internal.librarymanagement.{ InlineIvyConfiguration, IvyPaths }
import sbt.internal.inc.{ AnalyzingCompiler, ClasspathOptionsUtil, IncrementalCompilerImpl, ScalaInstance }
import sbt.internal.inc.{ ClasspathOptionsUtil, ScalaInstance }
object ConsoleProject {
def apply(state: State, extra: String, cleanupCommands: String = "", options: Seq[String] = Nil)(implicit log: Logger): Unit = {
@ -38,7 +35,4 @@ object ConsoleProject {
implicit def settingKeyEvaluate[T](s: SettingKey[T]): Evaluate[T] = new Evaluate(get(s))
}
final class Evaluate[T] private[sbt] (val eval: T)
private def bootIvyHome(app: xsbti.AppConfiguration): Option[File] =
try { Option(app.provider.scalaProvider.launcher.ivyHome) }
catch { case _: NoSuchMethodError => None }
}

View File

@ -13,8 +13,6 @@ import Def.{ ScopedKey, Setting }
import Scope.GlobalScope
import sbt.internal.parser.SbtParser
import scala.annotation.tailrec
import sbt.io.IO
/**
@ -113,7 +111,6 @@ private[sbt] object EvaluateConfigurations {
if (parsed.definitions.isEmpty) (Nil, DefinedSbtValues.empty) else {
val definitions = evaluateDefinitions(eval, name, parsed.imports, parsed.definitions, Some(file))
val imp = BuildUtil.importAllRoot(definitions.enclosingModule :: Nil)
val projs = (loader: ClassLoader) => definitions.values(loader).map(p => resolveBase(file.getParentFile, p.asInstanceOf[Project]))
(imp, DefinedSbtValues(definitions))
}
val allImports = importDefs.map(s => (s, -1)) ++ parsed.imports

View File

@ -4,7 +4,6 @@ package internal
import sbt.librarymanagement.{ Configuration, Configurations, ModuleID, Resolver, SbtArtifacts, UpdateReport }
import sbt.internal.util.Attributed
import Def.{ ScopedKey, Setting }
import Scoped._
import Keys._
import Configurations.{ Compile, Runtime }
import java.io.File

View File

@ -10,7 +10,6 @@ import sbt.util.{ Level, Logger }
import sbt.librarymanagement.{ Configurations, CrossVersion, MavenRepository, ModuleID, Resolver }
import java.io.File
import Attributed.blankSeq
import Configurations.Compile
import Def.Setting
import Keys._

View File

@ -7,7 +7,7 @@ package internal
import java.net.URI
import Def.ScopedKey
import sbt.internal.util.complete.DefaultParsers.validID
import sbt.internal.util.Types.{ idFun, some }
import sbt.internal.util.Types.some
import sbt.internal.util.{ AttributeKey, Relation }
object KeyIndex {

View File

@ -9,25 +9,24 @@ import sbt.internal.librarymanagement.{ InlineIvyConfiguration, IvyPaths }
import java.io.File
import java.net.{ URI, URL }
import compiler.{ Eval, EvalImports }
import compiler.Eval
import scala.annotation.tailrec
import collection.mutable
import sbt.internal.inc.{ Analysis, ClasspathOptionsUtil, FileValueCache, Locate, ModuleUtilities }
import sbt.internal.inc.{ Analysis, ClasspathOptionsUtil, ModuleUtilities }
import sbt.internal.inc.classpath.ClasspathUtilities
import Project.inScope
import Def.{ isDummy, ScopedKey, ScopeLocal, Setting }
import Keys.{ appConfiguration, baseDirectory, configuration, exportedProducts, fullClasspath, fullResolvers,
loadedBuild, onLoadMessage, pluginData, resolvedScoped, sbtPlugin, scalacOptions, streams, taskDefinitionKey,
loadedBuild, onLoadMessage, pluginData, resolvedScoped, sbtPlugin, scalacOptions, streams,
thisProject, thisProjectRef, update }
import tools.nsc.reporters.ConsoleReporter
import sbt.internal.util.{ Attributed, Eval => Ev, Settings, Show, ~> }
import sbt.internal.util.Attributed.data
import Scope.{ GlobalScope, ThisScope }
import Scope.GlobalScope
import sbt.internal.util.Types.const
import BuildPaths._
import BuildStreams._
import sbt.io.{ GlobFilter, IO, Path }
import sbt.internal.io.Alternatives
import sbt.util.Logger
import xsbti.compile.Compilers
@ -59,10 +58,9 @@ private[sbt] object Load {
val compilers = Compiler.compilers(ClasspathOptionsUtil.boot, ivyConfiguration)(state.configuration, log)
val evalPluginDef = EvaluateTask.evalPluginDef(log) _
val delegates = defaultDelegates
val initialID = baseDirectory.getName
val pluginMgmt = PluginManagement(loader)
val inject = InjectSettings(injectGlobal(state), Nil, const(Nil))
new LoadBuildConfiguration(stagingDirectory, classpath, loader, compilers, evalPluginDef, delegates,
LoadBuildConfiguration(stagingDirectory, classpath, loader, compilers, evalPluginDef, delegates,
EvaluateTask.injectStreams, pluginMgmt, inject, None, Nil, log)
}
private def bootIvyHome(app: xsbti.AppConfiguration): Option[File] =
@ -304,7 +302,6 @@ private[sbt] object Load {
unit.definitions.builds.flatMap(_.buildLoaders).toList match {
case Nil => loaders
case x :: xs =>
import Alternatives._
val resolver = (x /: xs) { _ | _ }
if (isRoot) loaders.setRoot(resolver) else loaders.addNonRoot(unit.uri, resolver)
}
@ -731,7 +728,7 @@ private[sbt] object Load {
merge(fs.sortBy(_.getName).map(memoLoadSettingsFile))
// Finds all the build files associated with this project
import AddSettings.{ User, SbtFiles, DefaultSbtFiles, AutoPlugins, Sequence, BuildScalaFiles }
import AddSettings.{ SbtFiles, DefaultSbtFiles, Sequence }
def associatedFiles(auto: AddSettings): Seq[File] = auto match {
case sf: SbtFiles => sf.files.map(f => IO.resolve(projectBase, f)).filterNot(_.isHidden)
case sf: DefaultSbtFiles => defaultSbtFiles.filter(sf.include).filterNot(_.isHidden)

View File

@ -8,7 +8,7 @@ import sbt.internal.util.{ Show, Types }
import java.util.regex.Pattern
import java.io.File
import Keys.{ Streams, TaskStreams }
import Keys.Streams
import Def.ScopedKey
import Aggregation.{ KeyValue, Values }
import Types.idFun

View File

@ -2,10 +2,8 @@ package sbt
package internal
import sbt.internal.util.Attributed
// import sbt.internal.{ BuildDef, IncompatiblePluginsException, OldPlugin }
import java.io.File
import java.net.URL
import scala.util.control.NonFatal
import Attributed.data
import sbt.internal.BuildDef.analyzed
import xsbt.api.{ Discovered, Discovery }

View File

@ -215,15 +215,15 @@ private[sbt] object PluginsDebug {
val minRequiredPlugins = plugins(minModel)
// The presence of any one of these plugins would deactivate `plugin`
val minAbsentPlugins = excludes(minModel).toSet
val minAbsentPlugins = excludes(minModel)
// Plugins that must be both activated and deactivated for `plugin` to activate.
// A non-empty list here cannot be satisfied and is an error.
val contradictions = minAbsentPlugins & minRequiredPlugins
if(contradictions.nonEmpty) PluginImpossible(plugin, context, contradictions)
if (contradictions.nonEmpty) PluginImpossible(plugin, context, contradictions)
else {
// Plguins that the user has to add to the currently selected plugins in order to enable `plugin`.
// Plugins that the user has to add to the currently selected plugins in order to enable `plugin`.
val addToExistingPlugins = minRequiredPlugins -- initialPlugins
// Plugins that are currently excluded that need to be allowed.
@ -232,9 +232,7 @@ private[sbt] object PluginsDebug {
// The model that results when the minimal plugins are enabled and the minimal plugins are excluded.
// This can include more plugins than just `minRequiredPlugins` because the plguins required for `plugin`
// might activate other plugins as well.
val modelForMin = context.deducePlugin(and(includeAll(minRequiredPlugins), excludeAll(minAbsentPlugins)), context.log)
val incrementalInputs = and( includeAll(minRequiredPlugins ++ initialPlugins), excludeAll(minAbsentPlugins ++ initialExcludes -- minRequiredPlugins))
val incrementalInputs = and(includeAll(minRequiredPlugins ++ initialPlugins), excludeAll(minAbsentPlugins ++ initialExcludes -- minRequiredPlugins))
val incrementalModel = context.deducePlugin(incrementalInputs, context.log).toSet
// Plugins that are newly enabled as a result of selecting the plugins needed for `plugin`, but aren't strictly required for `plugin`.

View File

@ -6,7 +6,6 @@ package internal
import sbt.internal.util.complete
import ProjectNavigation._
import Project.updateCurrent
import Keys.sessionSettings
import complete.{ DefaultParsers, Parser }

View File

@ -3,8 +3,6 @@ package internal
import sbt.internal.util.AttributeKey
import java.net.URI
object Resolve {
def apply(index: BuildUtil[_], current: ScopeAxis[Reference], key: AttributeKey[_], mask: ScopeMask): Scope => Scope =
{

View File

@ -7,7 +7,6 @@ package internal
import java.io.File
import java.net.URI
import sbt.internal.BuildLoader.ResolveInfo
import Def.{ ScopedKey, Setting }
object RetrieveUnit {
def apply(info: ResolveInfo): Option[() => File] =

View File

@ -5,12 +5,10 @@ import sbt.internal.util.{ AttributeKey, complete, Relation, Settings, Show, Typ
import sbt.librarymanagement.Configuration
import java.io.File
import java.net.URI
import Project._
import Def.{ ScopedKey, Setting }
import Scope.{ GlobalScope, ThisScope }
import Types.{ const, idFun, Id }
import Scope.GlobalScope
import Types.{ const, idFun }
import complete._
import DefaultParsers._
@ -58,7 +56,6 @@ private[sbt] object SettingCompletions {
import extracted._
val append = Load.transformSettings(Load.projectScope(currentRef), currentRef.build, rootProject, settings)
val newSession = session.appendSettings(append map (a => (a, arg.split('\n').toList)))
val struct = extracted.structure
val r = relation(newSession.mergeSettings, true)(structure.delegates, structure.scopeLocal, implicitly)
setResult(newSession, r, append)
}
@ -113,8 +110,7 @@ private[sbt] object SettingCompletions {
*/
def settingParser(settings: Settings[Scope], rawKeyMap: Map[String, AttributeKey[_]], context: ResolvedProject): Parser[String] =
{
val cutoff = KeyRanks.MainCutoff
val keyMap: Map[String, AttributeKey[_]] = rawKeyMap.map { case (k, v) => (keyScalaID(k), v) } toMap;
val keyMap: Map[String, AttributeKey[_]] = rawKeyMap.map { case (k, v) => (keyScalaID(k), v) }.toMap
def inputScopedKey(pred: AttributeKey[_] => Boolean): Parser[ScopedKey[_]] =
scopedKeyParser(keyMap.filter { case (_, k) => pred(k) }, settings, context)
val full = for {

View File

@ -6,7 +6,6 @@ package internal
import sbt.internal.util.Show
import java.net.URI
import java.io.File
import Def.{ compiled, flattenLocals, ScopedKey }
import Predef.{ any2stringadd => _, _ }

View File

@ -1,12 +1,8 @@
package sbt
package plugins
import sbt.librarymanagement.Configurations
import Def.Setting
import Keys._
import Project.inConfig
import Configurations.Test
/** An experimental plugin that adds the ability for junit-xml to be generated.
*

View File

@ -35,7 +35,9 @@ object Util {
"-Yinline-warnings",
"-Yno-adapted-args",
"-Ywarn-dead-code",
"-Ywarn-numeric-widen"
"-Ywarn-numeric-widen",
"-Ywarn-unused",
"-Ywarn-unused-import"
)
},
scalacOptions <++= scalaVersion map CrossVersion.partialVersion map {

View File

@ -7,7 +7,7 @@ import java.io.{ File, OutputStream }
import java.util.Locale
import sbt.util.Logger
import scala.sys.process.{ Process, ProcessBuilder }
import scala.sys.process.Process
/**
* Configures forking.

View File

@ -4,7 +4,6 @@
package sbt
import java.io.File
import java.net.{ URL, URLClassLoader }
import java.lang.reflect.{ Method, Modifier }
import Modifier.{ isPublic, isStatic }
import sbt.internal.inc.classpath.ClasspathUtilities

View File

@ -7,7 +7,6 @@
*/
package sbt
import scala.collection.Set
import scala.reflect.Manifest
import scala.collection.concurrent.TrieMap
@ -16,7 +15,6 @@ import Thread.currentThread
import java.security.Permission
import java.util.concurrent.{ ConcurrentHashMap => CMap }
import java.lang.Integer.{ toHexString => hex }
import java.lang.Long.{ toHexString => hexL }
import sbt.util.Logger
@ -94,7 +92,6 @@ object TrapExit {
// interrupts the given thread, but first replaces the exception handler so that the InterruptedException is not printed
private def safeInterrupt(thread: Thread, log: Logger): Unit = {
val name = thread.getName
log.debug("Interrupting thread " + thread.getName)
thread.setUncaughtExceptionHandler(new TrapInterrupt(thread.getUncaughtExceptionHandler))
thread.interrupt

View File

@ -1,6 +1,6 @@
package sbt
import java.lang.{ Process => JProcess, ProcessBuilder => JProcessBuilder }
import java.lang.{ ProcessBuilder => JProcessBuilder }
trait ProcessExtra {
import scala.sys.process._
@ -8,8 +8,6 @@ trait ProcessExtra {
implicit def builderToProcess(builder: JProcessBuilder): ProcessBuilder = apply(builder)
implicit def fileToProcess(file: File): ProcessBuilder.FileBuilder = apply(file)
implicit def urlToProcess(url: URL): ProcessBuilder.URLBuilder = apply(url)
// @deprecated("Use string interpolation", "0.13.0")
// implicit def xmlToProcess(command: scala.xml.Elem): ProcessBuilder = apply(command)
implicit def buildersToProcess[T](builders: Seq[T])(implicit convert: T => ProcessBuilder.Source): Seq[ProcessBuilder.Source] = applySeq(builders)
implicit def stringToProcess(command: String): ProcessBuilder = apply(command)

View File

@ -8,8 +8,7 @@ import Keys._
import sbt.internal.util.complete.{ Parser, DefaultParsers }
import sbt.internal.inc.classpath.ClasspathUtilities
import sbt.internal.inc.ModuleUtilities
import java.lang.reflect.{ InvocationTargetException, Method }
import java.util.Properties
import java.lang.reflect.Method
import sbt.syntax._
@ -78,7 +77,7 @@ object ScriptedPlugin extends AutoPlugin {
def scriptedTask: Initialize[InputTask[Unit]] = Def.inputTask {
val args = scriptedParser(sbtTestDirectory.value).parsed
val prereq: Unit = scriptedDependencies.value
scriptedDependencies.value
try {
scriptedRun.value.invoke(
scriptedTests.value, sbtTestDirectory.value, scriptedBufferLog.value: java.lang.Boolean,

View File

@ -6,13 +6,11 @@ package sbt
package test
import java.io.File
import java.nio.charset.Charset
import scala.util.control.NonFatal
import xsbt.IPC
import sbt.internal.scripted.{ CommentHandler, FileCommands, ScriptRunner, TestScriptParser, TestException }
import sbt.io.{ DirectoryFilter, GlobFilter, HiddenFileFilter, Path }
import sbt.io.{ DirectoryFilter, HiddenFileFilter }
import sbt.io.IO.wrapNull
import sbt.internal.io.Resources
@ -32,7 +30,6 @@ final class ScriptedTests(resourceBaseDirectory: File, bufferLog: Boolean, launc
scriptedTest(group, name, emptyCallback, log)
def scriptedTest(group: String, name: String, prescripted: File => Unit, log: Logger): Seq[() => Option[String]] = {
import sbt.io.syntax._
import GlobFilter._
for (groupDir <- (resourceBaseDirectory * group).get; nme <- (groupDir * name).get) yield {
val g = groupDir.getName
val n = nme.getName
@ -112,9 +109,9 @@ object ScriptedTests extends ScriptedRunner {
def main(args: Array[String]): Unit = {
val directory = new File(args(0))
val buffer = args(1).toBoolean
val sbtVersion = args(2)
val defScalaVersion = args(3)
val buildScalaVersions = args(4)
// val sbtVersion = args(2)
// val defScalaVersion = args(3)
// val buildScalaVersions = args(4)
val bootProperties = new File(args(5))
val tests = args.drop(6)
val logger = ConsoleLogger()

View File

@ -5,7 +5,6 @@ package sbt
import sbt.internal.util.Types._
import sbt.internal.util.{ ~>, AList, AttributeKey, AttributeMap }
import Task._
import ConcurrentRestrictions.{ Tag, TagMap, tagsKey }
// Action, Task, and Info are intentionally invariant in their type parameter.

View File

@ -5,8 +5,7 @@ package sbt
package std
import java.io.{ BufferedInputStream, BufferedOutputStream, BufferedReader, BufferedWriter, Closeable, File,
FileInputStream, FileOutputStream, IOException, InputStream, InputStreamReader, OutputStream,
OutputStreamWriter, PrintWriter, Reader, Writer }
FileInputStream, FileOutputStream, IOException, InputStreamReader, OutputStreamWriter, PrintWriter }
import sbt.internal.io.DeferredWriter
import sbt.io.IO

View File

@ -6,9 +6,7 @@ package std
import sbt.internal.util.Types._
import sbt.internal.util.{ ~>, AList, DelegatingPMap, RMap }
import Task._
import TaskExtra.{ all, existToAny }
import Execute._
object Transform {
def fromDummy[T](original: Task[T])(action: => T): Task[T] = Task(original.info, Pure(action _, false))
@ -34,11 +32,7 @@ object Transform {
def apply[T](in: Task[T]): Task[T] = map(in).getOrElse(in)
}
def apply(dummies: DummyTaskMap) =
{
import System._
taskToNode(getOrId(dummyMap(dummies)))
}
def apply(dummies: DummyTaskMap) = taskToNode(getOrId(dummyMap(dummies)))
def taskToNode(pre: Task ~> Task): NodeView[Task] = new NodeView[Task] {
def apply[T](t: Task[T]): Node[Task, T] = pre(t).work match {

View File

@ -8,7 +8,6 @@ import scala.sys.process.{ BasicIO, ProcessIO, ProcessBuilder }
import sbt.internal.util.AList
import sbt.internal.util.Types._
import Task._
import java.io.{ BufferedInputStream, BufferedReader, File, InputStream }
import sbt.io.IO

View File

@ -4,7 +4,6 @@
package sbt
import sbt.internal.util.AList
import sbt.internal.util.Types._
/**
* Represents a task node in a format understood by the task evaluation engine Execute.

View File

@ -4,11 +4,10 @@
package sbt
import java.io.File
import java.net.URLClassLoader
import scala.util.control.NonFatal
import testing.{ Logger => TLogger, Task => TestTask, _ }
import testing.{ Task => TestTask, _ }
import org.scalatools.testing.{ Framework => OldFramework }
import sbt.internal.inc.classpath.{ ClasspathUtilities, DualLoader, FilteredLoader }
import sbt.internal.inc.classpath.{ ClasspathUtilities, DualLoader }
import sbt.internal.inc.ScalaInstance
import scala.annotation.tailrec
import sbt.util.Logger
@ -165,8 +164,6 @@ object TestFramework {
def foreachListenerSafe(f: TestsListener => Unit): () => Unit = () => safeForeach(testsListeners, log)(f)
import TestResult.{ Error, Passed, Failed }
val startTask = foreachListenerSafe(_.doInit)
val testTasks =
tests flatMap {

View File

@ -6,7 +6,7 @@ package sbt
import testing.{ Logger => TLogger, Event => TEvent, Status => TStatus }
import sbt.internal.util.{ BufferedLogger, FullLogger }
import sbt.util.{ Level, Logger }
import sbt.util.Level
trait TestReportListener {
/** called for each class or equivalent grouping */

View File

@ -29,7 +29,7 @@ private[sbt] object TestStatus {
import java.util.Properties
def read(f: File): Map[String, Long] =
{
import scala.collection.JavaConversions.{ enumerationAsScalaIterator, propertiesAsScalaMap }
import scala.collection.JavaConversions.{ propertiesAsScalaMap }
val properties = new Properties
IO.load(properties, f)
properties map { case (k, v) => (k, v.toLong) }