diff --git a/compile/api/ClassToAPI.scala b/compile/api/ClassToAPI.scala index ccb6640b0..81f18b761 100644 --- a/compile/api/ClassToAPI.scala +++ b/compile/api/ClassToAPI.scala @@ -7,6 +7,7 @@ import xsbti.api import xsbti.SafeLazy import SafeLazy.strict import collection.mutable +import scala.reflect.ClassTag object ClassToAPI { @@ -18,7 +19,7 @@ object ClassToAPI } // Avoiding implicit allocation. - private def arrayMap[T <: AnyRef, U <: AnyRef : ClassManifest](xs: Array[T])(f: T => U): Array[U] = { + private def arrayMap[T <: AnyRef, U <: AnyRef : ClassTag](xs: Array[T])(f: T => U): Array[U] = { val len = xs.length var i = 0 val res = new Array[U](len) diff --git a/main/Defaults.scala b/main/Defaults.scala index 117f8da1d..3f4900ae8 100755 --- a/main/Defaults.scala +++ b/main/Defaults.scala @@ -55,17 +55,17 @@ object Defaults extends BuildCommon managedDirectory <<= baseDirectory(_ / "lib_managed") )) def globalCore: Seq[Setting[_]] = inScope(GlobalScope)(defaultTestTasks(test) ++ defaultTestTasks(testOnly) ++ defaultTestTasks(testQuick) ++ Seq( - compilerCache <<= state map { _ get Keys.stateCompilerCache getOrElse compiler.CompilerCache.fresh }, + compilerCache := state.value get Keys.stateCompilerCache getOrElse compiler.CompilerCache.fresh, crossVersion :== CrossVersion.Disabled, scalaOrganization :== ScalaArtifacts.Organization, buildDependencies <<= buildDependencies or Classpaths.constructBuildDependencies, taskTemporaryDirectory := { val dir = IO.createTemporaryDirectory; dir.deleteOnExit(); dir }, - onComplete <<= taskTemporaryDirectory { dir => () => IO.delete(dir); IO.createDirectory(dir) }, + onComplete := { val dir = taskTemporaryDirectory.value; () => IO.delete(dir); IO.createDirectory(dir) }, concurrentRestrictions <<= concurrentRestrictions or defaultRestrictions, parallelExecution :== true, - sbtVersion <<= appConfiguration { _.provider.id.version }, - sbtBinaryVersion <<= sbtVersion apply binarySbtVersion, - sbtResolver <<= sbtVersion { sbtV => if(sbtV endsWith "-SNAPSHOT") Classpaths.typesafeSnapshots else Classpaths.typesafeReleases }, + sbtVersion := appConfiguration.value.provider.id.version, + sbtBinaryVersion := binarySbtVersion(sbtVersion.value), + sbtResolver := { if(sbtVersion.value endsWith "-SNAPSHOT") Classpaths.typesafeSnapshots else Classpaths.typesafeReleases }, pollInterval :== 500, logBuffered :== false, connectInput :== false, @@ -74,7 +74,7 @@ object Defaults extends BuildCommon autoScalaLibrary :== true, onLoad <<= onLoad ?? idFun[State], onUnload <<= (onUnload ?? idFun[State]), - onUnload <<= (onUnload, taskTemporaryDirectory) { (f, dir) => s => { try f(s) finally IO.delete(dir) } }, + onUnload := { s => try onUnload.value(s) finally IO.delete(taskTemporaryDirectory.value) }, watchingMessage <<= watchingMessage ?? Watched.defaultWatchingMessage, triggeredMessage <<= triggeredMessage ?? Watched.defaultTriggeredMessage, definesClass :== FileValueCache(Locate.definesClass _ ).get, @@ -109,12 +109,12 @@ object Defaults extends BuildCommon showSuccess :== true, commands :== Nil, retrieveManaged :== false, - buildStructure <<= state map Project.structure, - settings <<= buildStructure map ( _.data ), + buildStructure := Project.structure(state.value), + settings := buildStructure.value.data, artifactClassifier :== None, artifactClassifier in packageSrc :== Some(SourceClassifier), artifactClassifier in packageDoc :== Some(DocClassifier), - checksums <<= appConfiguration(Classpaths.bootChecksums), + checksums := Classpaths.bootChecksums(appConfiguration.value), pomExtra :== NodeSeq.Empty, pomPostProcess :== idFun, pomAllRepositories :== false, @@ -130,44 +130,44 @@ object Defaults extends BuildCommon logBuffered := true )) def projectCore: Seq[Setting[_]] = Seq( - name <<= thisProject(_.id), - logManager <<= extraLoggers(extra => LogManager.defaults(extra, StandardMain.console)), + name := thisProject.value.id, + logManager := LogManager.defaults(extraLoggers.value, StandardMain.console), onLoadMessage <<= onLoadMessage or (name, thisProjectRef)("Set current project to " + _ + " (in build " + _.build +")"), runnerTask ) def paths = Seq( - baseDirectory <<= thisProject(_.base), - target <<= baseDirectory / "target", + baseDirectory := thisProject.value.base, + target := baseDirectory.value / "target", historyPath <<= historyPath or target(t => Some(t / ".history")), - sourceDirectory <<= baseDirectory / "src", - sourceManaged <<= crossTarget / "src_managed", - resourceManaged <<= crossTarget / "resource_managed", - cacheDirectory <<= (crossTarget, thisProject)(_ / CacheDirectoryName / _.id / "global") + sourceDirectory := baseDirectory.value / "src", + sourceManaged := crossTarget.value / "src_managed", + resourceManaged := crossTarget.value / "resource_managed", + cacheDirectory := crossTarget.value / CacheDirectoryName / thisProject.value.id / "global" ) lazy val configPaths = sourceConfigPaths ++ resourceConfigPaths ++ outputConfigPaths lazy val sourceConfigPaths = Seq( sourceDirectory <<= configSrcSub(sourceDirectory), sourceManaged <<= configSrcSub(sourceManaged), - scalaSource <<= sourceDirectory / "scala", - javaSource <<= sourceDirectory / "java", - unmanagedSourceDirectories <<= Seq(scalaSource, javaSource).join, + scalaSource := sourceDirectory.value / "scala", + javaSource := sourceDirectory.value / "java", + unmanagedSourceDirectories := Seq(scalaSource.value, javaSource.value), // remove when sourceFilter, defaultExcludes are removed includeFilter in unmanagedSources <<= (sourceFilter in unmanagedSources) or (includeFilter in unmanagedSources), excludeFilter in unmanagedSources <<= (defaultExcludes in unmanagedSources) or (excludeFilter in unmanagedSources), unmanagedSources <<= collectFiles(unmanagedSourceDirectories, includeFilter in unmanagedSources, excludeFilter in unmanagedSources), watchSources in ConfigGlobal <++= unmanagedSources, - managedSourceDirectories <<= Seq(sourceManaged).join, + managedSourceDirectories := Seq(sourceManaged.value), managedSources <<= generate(sourceGenerators), sourceGenerators :== Nil, sourceDirectories <<= Classpaths.concatSettings(unmanagedSourceDirectories, managedSourceDirectories), sources <<= Classpaths.concat(unmanagedSources, managedSources) ) lazy val resourceConfigPaths = Seq( - resourceDirectory <<= sourceDirectory / "resources", + resourceDirectory := sourceDirectory.value / "resources", resourceManaged <<= configSrcSub(resourceManaged), - unmanagedResourceDirectories <<= Seq(resourceDirectory).join, - managedResourceDirectories <<= Seq(resourceManaged).join, + unmanagedResourceDirectories := Seq(resourceDirectory.value), + managedResourceDirectories := Seq(resourceManaged.value), resourceDirectories <<= Classpaths.concatSettings(unmanagedResourceDirectories, managedResourceDirectories), // remove when defaultExcludes are removed excludeFilter in unmanagedResources <<= (defaultExcludes in unmanagedResources) or (excludeFilter in unmanagedResources), @@ -179,13 +179,16 @@ object Defaults extends BuildCommon resources <<= Classpaths.concat(managedResources, unmanagedResources) ) lazy val outputConfigPaths = Seq( - cacheDirectory <<= (crossTarget, thisProject, configuration) { _ / CacheDirectoryName / _.id / _.name }, - classDirectory <<= (crossTarget, configuration) { (outDir, conf) => outDir / (prefix(conf.name) + "classes") }, - docDirectory <<= (crossTarget, configuration) { (outDir, conf) => outDir / (prefix(conf.name) + "api") } + cacheDirectory := crossTarget.value / CacheDirectoryName / thisProject.value.id / configuration.value.name, + classDirectory := crossTarget.value / (prefix(configuration.value.name) + "classes"), + docDirectory := crossTarget.value / (prefix(configuration.value.name) + "api") ) def addBaseSources = Seq( - unmanagedSources <<= (unmanagedSources, baseDirectory, includeFilter in unmanagedSources, excludeFilter in unmanagedSources, sourcesInBase) map { - (srcs,b,f,excl,enable) => if(enable) (srcs +++ b * (f -- excl)).get else srcs + unmanagedSources := { + val srcs = unmanagedSources.value + val f = (includeFilter in unmanagedSources).value + val excl = (excludeFilter in unmanagedSources).value + if(sourcesInBase.value) (srcs +++ baseDirectory.value * (f -- excl)).get else srcs } ) @@ -197,18 +200,18 @@ object Defaults extends BuildCommon javacOptions in GlobalScope :== Nil, scalacOptions in GlobalScope :== Nil, scalaInstance <<= scalaInstanceSetting, - scalaVersion in GlobalScope <<= appConfiguration( _.provider.scalaProvider.version), - scalaBinaryVersion in GlobalScope <<= scalaVersion apply binaryScalaVersion, - crossVersion <<= (crossPaths) { enabled => if(enabled) CrossVersion.binary else CrossVersion.Disabled }, - crossScalaVersions in GlobalScope <<= Seq(scalaVersion).join, - crossTarget <<= (target, scalaBinaryVersion, sbtBinaryVersion, sbtPlugin, crossPaths)(makeCrossTarget) + scalaVersion in GlobalScope := appConfiguration.value.provider.scalaProvider.version, + scalaBinaryVersion in GlobalScope := binaryScalaVersion(scalaVersion.value), + crossVersion := (if(crossPaths.value) CrossVersion.binary else CrossVersion.Disabled), + crossScalaVersions in GlobalScope := Seq(scalaVersion.value), + crossTarget := makeCrossTarget(target.value, scalaBinaryVersion.value, sbtBinaryVersion.value, sbtPlugin.value, crossPaths.value) ) def makeCrossTarget(t: File, sv: String, sbtv: String, plugin: Boolean, cross: Boolean): File = { val scalaBase = if(cross) t / ("scala-" + sv) else t if(plugin) scalaBase / ("sbt-" + sbtv) else scalaBase } - def compilersSetting = compilers <<= (scalaInstance, appConfiguration, streams, classpathOptions, javaHome) map { (si, app, s, co, jh) => Compiler.compilers(si, co, jh)(app, s.log) } + def compilersSetting = compilers := Compiler.compilers(scalaInstance.value, classpathOptions.value, javaHome.value)(appConfiguration.value, streams.value.log) lazy val configTasks = docTaskSettings(doc) ++ compileTaskSettings ++ compileInputsSettings ++ Seq( initialCommands in GlobalScope :== "", @@ -221,18 +224,18 @@ object Defaults extends BuildCommon discoveredMainClasses <<= compile map discoverMainClasses storeAs discoveredMainClasses triggeredBy compile, definedSbtPlugins <<= discoverPlugins, inTask(run)(runnerTask :: Nil).head, - selectMainClass <<= (discoveredMainClasses, mainClass) map { (classes, explicit) => explicit orElse selectRunMain(classes) }, - mainClass in run <<= selectMainClass in run, - mainClass <<= discoveredMainClasses map selectPackageMain, + selectMainClass := mainClass.value orElse selectRunMain(discoveredMainClasses.value), + mainClass in run := (selectMainClass in run).value, + mainClass := selectPackageMain(discoveredMainClasses.value), run <<= runTask(fullClasspath, mainClass in run, runner in run), runMain <<= runMainTask(fullClasspath, runner in run), copyResources <<= copyResourcesTask ) lazy val projectTasks: Seq[Setting[_]] = Seq( - cleanFiles <<= Seq(managedDirectory, target).join, - cleanKeepFiles <<= historyPath(_.toList), - clean <<= (cleanFiles, cleanKeepFiles) map doClean, + cleanFiles := Seq(managedDirectory.value, target.value), + cleanKeepFiles := historyPath.value.toList, + clean := doClean(cleanFiles.value, cleanKeepFiles.value), consoleProject <<= consoleProjectTask, watchTransitiveSources <<= watchTransitiveSourcesTask, watch <<= watchSetting @@ -276,14 +279,12 @@ object Defaults extends BuildCommon } lazy val testTasks: Seq[Setting[_]] = testTaskOptions(test) ++ testTaskOptions(testOnly) ++ testTaskOptions(testQuick) ++ Seq( - testLoader <<= (fullClasspath, scalaInstance, taskTemporaryDirectory) map { (cp, si, temp) => TestFramework.createTestLoader(data(cp), si, IO.createUniqueDirectory(temp)) }, + testLoader := TestFramework.createTestLoader(data(fullClasspath.value), scalaInstance.value, IO.createUniqueDirectory(taskTemporaryDirectory.value)), testFrameworks in GlobalScope :== { import sbt.TestFrameworks._ Seq(ScalaCheck, Specs2, Specs, ScalaTest, JUnit) }, - loadedTestFrameworks <<= (testFrameworks, streams, testLoader) map { (frameworks, s, loader) => - frameworks.flatMap(f => f.create(loader, s.log).map( x => (f,x)).toIterable).toMap - }, + loadedTestFrameworks := testFrameworks.value.flatMap(f => f.create(testLoader.value, streams.value.log).map( x => (f,x)).toIterable).toMap, definedTests <<= detectTests, definedTestNames <<= definedTests map ( _.map(_.name).distinct) storeAs definedTestNames triggeredBy compile, testListeners in GlobalScope :== Nil, @@ -308,7 +309,7 @@ object Defaults extends BuildCommon testListeners <<= (streams, resolvedScoped, streamsManager, logBuffered, cacheDirectory in test, testListeners in TaskGlobal) map { (s, sco, sm, buff, dir, ls) => TestLogger(s.log, testLogger(sm, test in sco.scope), buff) +: new TestStatusReporter(succeededFile(dir)) +: ls }, - testOptions <<= (testOptions in TaskGlobal, testListeners) map { (options, ls) => Tests.Listeners(ls) +: options }, + testOptions := Tests.Listeners(testListeners.value) +: (testOptions in TaskGlobal).value, testExecution <<= testExecutionTask(key), testGrouping <<= testGrouping or singleTestGroup(key) ) ) @@ -402,7 +403,7 @@ object Defaults extends BuildCommon } lazy val packageBase: Seq[Setting[_]] = Seq( - artifact <<= moduleName(n => Artifact(n)), + artifact := Artifact(moduleName.value), packageOptions in GlobalScope :== Nil, artifactName in GlobalScope :== ( Artifact.artifactName _ ) ) @@ -410,11 +411,11 @@ object Defaults extends BuildCommon inTask(packageBin)(Seq( packageOptions <<= (name, version, homepage, organization, organizationName, mainClass, packageOptions) map { (name, ver, h, org, orgName, main, p) => Package.addSpecManifestAttributes(name, ver, orgName) +: Package.addImplManifestAttributes(name, ver, h, org, orgName) +: main.map(Package.MainClass.apply) ++: p })) ++ inTask(packageSrc)(Seq( - packageOptions <<= (name, version, organizationName, packageOptions) map { Package.addSpecManifestAttributes(_, _, _) +: _ })) ++ + packageOptions := Package.addSpecManifestAttributes(name.value, version.value, organizationName.value) +: packageOptions.value )) ++ packageTaskSettings(packageBin, packageBinMappings) ++ packageTaskSettings(packageSrc, packageSrcMappings) ++ packageTaskSettings(packageDoc, packageDocMappings) ++ - Seq(`package` <<= packageBin) + Seq(`package` := packageBin.value) def packageBinMappings = products map { _ flatMap Path.allSubpaths } def packageDocMappings = doc map { Path.allSubpaths(_).toSeq } @@ -580,14 +581,12 @@ object Defaults extends BuildCommon (dependencyClasspath, cacheDirectory, skip in compile, definesClass, compilerCache) map { (cp, cacheDir, skip, definesC, cache) => Compiler.IncSetup(analysisMap(cp), definesC, skip, cacheDir / "inc_compile", cache) } - def compileInputsSettings: Seq[Setting[_]] = { - val optionsPair = TaskKey.local[(Seq[String], Seq[String])] - Seq(optionsPair <<= (scalacOptions, javacOptions) map Util.pairID, - compileInputs <<= (dependencyClasspath, sources, compilers, optionsPair, classDirectory, compileOrder, compileIncSetup, maxErrors, streams, sourcePositionMappers) map { - (cp, srcs, cs, optsPair, classes, order, incSetup, maxErr, s, spms) => - Compiler.inputs(classes +: data(cp), srcs, classes, optsPair._1, optsPair._2, maxErr, spms, order)(cs, incSetup, s.log) - }) - } + def compileInputsSettings: Seq[Setting[_]] = + Seq(compileInputs := { + val cp = classDirectory.value +: data(dependencyClasspath.value) + Compiler.inputs(cp, sources.value, classDirectory.value, scalacOptions.value, javacOptions.value, maxErrors.value, sourcePositionMappers.value, compileOrder.value)(compilers.value, compileIncSetup.value, streams.value.log) + }) + def printWarningsTask: Initialize[Task[Unit]] = (streams, compile, maxErrors, sourcePositionMappers) map { (s, analysis, max, spms) => val problems = analysis.infos.allInfos.values.flatMap(i => i.reportedProblems++ i.unreportedProblems) @@ -871,7 +870,8 @@ object Classpaths IvyActions.updateClassifiers(is, GetClassifiersConfiguration(mod, excludes, c, ivyScala), s.log) } } tag(Tags.Update, Tags.Network), - sbtDependency in GlobalScope <<= appConfiguration { app => + sbtDependency in GlobalScope := { + val app = appConfiguration.value val id = app.provider.id val scalaVersion = app.provider.scalaProvider.version val binVersion = binaryScalaVersion(scalaVersion) @@ -902,14 +902,11 @@ object Classpaths def sbtClassifiersTasks = inTask(updateSbtClassifiers)(Seq( transitiveClassifiers in GlobalScope in updateSbtClassifiers ~= ( _.filter(_ != DocClassifier) ), - externalResolvers <<= (externalResolvers, appConfiguration, buildStructure, thisProjectRef) map { (defaultRs, ac, struct, ref) => - val explicit = struct.units(ref.build).unit.plugins.pluginData.resolvers - explicit orElse bootRepositories(ac) getOrElse defaultRs - }, - ivyConfiguration <<= (externalResolvers, ivyPaths, offline, checksums, appConfiguration, target, streams) map { (rs, paths, off, check, app, t, s) => - val resCacheDir = t / "resolution-cache" - new InlineIvyConfiguration(paths, rs, Nil, Nil, off, Option(lock(app)), check, Some(resCacheDir), s.log) + externalResolvers := { + val explicit = buildStructure.value.units(thisProjectRef.value.build).unit.plugins.pluginData.resolvers + explicit orElse bootRepositories(appConfiguration.value) getOrElse externalResolvers.value }, + ivyConfiguration := new InlineIvyConfiguration(ivyPaths.value, externalResolvers.value, Nil, Nil, offline.value, Option(lock(appConfiguration.value)), checksums.value, Some(target.value / "resolution-cache"), streams.value.log), ivySbt <<= ivySbt0, classifiersModule <<= (projectID, sbtDependency, transitiveClassifiers, loadedBuild, thisProjectRef) map { ( pid, sbtDep, classifiers, lb, ref) => val pluginIDs: Seq[ModuleID] = lb.units(ref.build).unit.plugins.fullClasspath.flatMap(_ get moduleID.key) @@ -1006,10 +1003,10 @@ object Classpaths def projectDependenciesTask: Initialize[Task[Seq[ModuleID]]] = (thisProjectRef, settings, buildDependencies) map { (ref, data, deps) => - deps.classpath(ref) flatMap { dep => (projectID in dep.project) get data map { + deps.classpath(ref) flatMap { dep => (projectID in dep.project) get data map { _.copy(configurations = dep.configuration, explicitArtifacts = Nil) } } - } + } def depMap: Initialize[Task[Map[ModuleRevisionId, ModuleDescriptor]]] = (thisProjectRef, settings, buildDependencies, streams) flatMap { (root, data, deps, s) => @@ -1179,8 +1176,9 @@ object Classpaths } lazy val compilerPluginConfig = Seq( - scalacOptions <<= (scalacOptions, autoCompilerPlugins, update) map { (options, auto, report) => - if(auto) options ++ autoPlugins(report) else options + scalacOptions := { + val options = scalacOptions.value + if(autoCompilerPlugins.value) options ++ autoPlugins(update.value) else options } ) def substituteScalaFiles(scalaInstance: ScalaInstance, report: UpdateReport): UpdateReport = @@ -1264,16 +1262,16 @@ trait BuildExtra extends BuildCommon def addArtifact(a: Artifact, taskDef: TaskKey[File]): SettingsDefinition = { - val pkgd = packagedArtifacts <<= (packagedArtifacts, taskDef) map ( (pas,file) => pas updated (a, file) ) + val pkgd = packagedArtifacts := packagedArtifacts.value updated (a, taskDef.value) seq( artifacts += a, pkgd ) } def addArtifact(artifact: Initialize[Artifact], taskDef: Initialize[Task[File]]): SettingsDefinition = { val artLocal = SettingKey.local[Artifact] val taskLocal = TaskKey.local[File] - val art = artifacts <<= (artLocal, artifacts)( _ +: _ ) - val pkgd = packagedArtifacts <<= (packagedArtifacts, artLocal, taskLocal) map ( (pas,a,file) => pas updated (a, file)) - seq( artLocal <<= artifact, taskLocal <<= taskDef, art, pkgd ) + val art = artifacts := artLocal.value +: artifacts.value + val pkgd = packagedArtifacts := packagedArtifacts.value updated (artLocal.value, taskLocal.value) + seq( artLocal := artifact.value, taskLocal := taskDef.value, art, pkgd ) } def seq(settings: Setting[_]*): SettingsDefinition = new Def.SettingList(settings) diff --git a/main/EvaluateConfigurations.scala b/main/EvaluateConfigurations.scala index 8160f0a7c..87ba3570b 100644 --- a/main/EvaluateConfigurations.scala +++ b/main/EvaluateConfigurations.scala @@ -52,7 +52,10 @@ object EvaluateConfigurations def addOffsetToRange(offset: Int, ranges: Seq[(String,LineRange)]): Seq[(String,LineRange)] = ranges.map { case (s, r) => (s, r shift offset) } - val SettingsDefinitionName = classOf[SettingsDefinition].getName + val SettingsDefinitionName = { + val _ = classOf[SettingsDefinition] // this line exists to try to provide a compile-time error when the following line needs to be changed + "sbt.Def.SettingsDefinition" + } def evaluateSetting(eval: Eval, name: String, imports: Seq[(String,Int)], expression: String, range: LineRange): ClassLoader => Seq[Setting[_]] = { val result = try { diff --git a/main/Keys.scala b/main/Keys.scala index fb26359d0..2dc9861f5 100644 --- a/main/Keys.scala +++ b/main/Keys.scala @@ -320,7 +320,7 @@ object Keys val tags = SettingKey[Seq[(Tags.Tag,Int)]]("tags", ConcurrentRestrictions.tagsKey.label, BSetting) val concurrentRestrictions = SettingKey[Seq[Tags.Rule]]("concurrent-restrictions", "Rules describing restrictions on concurrent task execution.", BSetting) val cancelable = SettingKey[Boolean]("cancelable", "Enables (true) or disables (false) the ability to interrupt task execution with CTRL+C.", BMinusSetting) - val settingsData = TaskKey[Settings[Scope]]("settings-data", "Provides access to the project data for the build.", DTask) + val settingsData = std.FullInstance.settingsData @deprecated("Use Keys.settingsData.", "0.12.0") val settings = settingsData val streams = TaskKey[TaskStreams]("streams", "Provides streams for logging and persisting data.", DTask) diff --git a/main/Project.scala b/main/Project.scala index 8ff6c0796..523aacb26 100755 --- a/main/Project.scala +++ b/main/Project.scala @@ -72,6 +72,12 @@ final case class ClasspathDependency(project: ProjectReference, configuration: O object Project extends ProjectExtra { + @deprecated("Use Def.Setting", "0.13.0") + type Setting[T] = Def.Setting[T] + + @deprecated("Use Def.Initialize", "0.13.0") + type Initialize[T] = Def.Initialize[T] + def showContextKey(state: State): Show[ScopedKey[_]] = showContextKey(state, None) diff --git a/main/settings/Def.scala b/main/settings/Def.scala index cd4d5a50e..7895e5017 100644 --- a/main/settings/Def.scala +++ b/main/settings/Def.scala @@ -2,6 +2,7 @@ package sbt import java.io.File +/** A concrete settings system that uses `sbt.Scope` for the scope type. */ object Def extends Init[Scope] { type Classpath = Seq[Attributed[File]] @@ -9,7 +10,6 @@ object Def extends Init[Scope] val triggeredBy = AttributeKey[Seq[Task[_]]]("triggered-by") val runBefore = AttributeKey[Seq[Task[_]]]("run-before") private[sbt] val parseResult: TaskKey[Any] = TaskKey("$parse-result", "Internal: used to implement input tasks.", KeyRanks.Invisible) - // TODO: move back to Keys val resolvedScoped = SettingKey[ScopedKey[_]]("resolved-scoped", "The ScopedKey for the referencing setting or task.", KeyRanks.DSetting) lazy val showFullKey: Show[ScopedKey[_]] = showFullKey(None) @@ -34,4 +34,16 @@ object Def extends Init[Scope] case Some(c) => c + s + scala.Console.RESET case None => s } + + /** Lifts the result of a setting initialization into a Task. */ + def toITask[T](i: Initialize[T]): Initialize[Task[T]] = map(i)(std.TaskExtra.inlineTask) + + // The following conversions enable the types Initialize[T], Initialize[Task[T]], and Task[T] to + // be used in task and setting macros as inputs with an ultimate result of type T + + import language.experimental.macros + import std.TaskMacro.MacroValue + implicit def macroValueI[T](in: Initialize[T]): MacroValue[T] = ??? + implicit def macroValueIT[T](in: Initialize[Task[T]]): MacroValue[T] = ??? + implicit def macroValueT[T](in: Task[T]): MacroValue[T] = ??? } \ No newline at end of file diff --git a/main/settings/SettingMacro.scala b/main/settings/SettingMacro.scala new file mode 100644 index 000000000..875c3e4c9 --- /dev/null +++ b/main/settings/SettingMacro.scala @@ -0,0 +1,43 @@ +package sbt +package std + + import Def.{Initialize,Setting} + import Types.{idFun,Id} + import appmacro.{Convert, Instance, MixedBuilder, MonadInstance} + +object InitializeInstance extends MonadInstance +{ + type M[x] = Initialize[x] + def app[K[L[x]], Z](in: K[Initialize], f: K[Id] => Z)(implicit a: AList[K]): Initialize[Z] = Def.app[K,Z](in)(f)(a) + def map[S,T](in: Initialize[S], f: S => T): Initialize[T] = Def.map(in)(f) + def flatten[T](in: Initialize[Initialize[T]]): Initialize[T] = Def.bind(in)(idFun[Initialize[T]]) + def pure[T](t: () => T): Initialize[T] = Def.pure(t) +} +object InitializeConvert extends Convert +{ + def apply[T: c.TypeTag](c: reflect.makro.Context)(in: c.Tree): c.Tree = + if(in.tpe <:< c.typeOf[Initialize[Task[T]]] || in.tpe <:< c.typeOf[Task[T]]) + c.abort(in.pos, "A setting cannot depend on a task") + else if(in.tpe <:< c.typeOf[Initialize[T]]) + { + val i = c.Expr[Initialize[T]](in) + c.universe.reify( i.splice ).tree + } + else + c.abort(in.pos, "Unknown input type: " + in.tpe) +} + + import language.experimental.macros + import scala.reflect._ + import makro._ + +object SettingMacro +{ + def setting[T](t: T): Initialize[T] = macro settingMacroImpl[T] + def settingMacroImpl[T: c.TypeTag](c: Context)(t: c.Expr[T]): c.Expr[Initialize[T]] = + Instance.contImpl[T](c, InitializeInstance, InitializeConvert, MixedBuilder)(Left(t)) + + def settingDyn[T](t: Initialize[T]): Initialize[T] = macro settingDynMacroImpl[T] + def settingDynMacroImpl[T: c.TypeTag](c: Context)(t: c.Expr[Initialize[T]]): c.Expr[Initialize[T]] = + Instance.contImpl[T](c, InitializeInstance, InitializeConvert, MixedBuilder)(Right(t)) +} diff --git a/main/settings/Structure.scala b/main/settings/Structure.scala index 6ff75c035..66aa07fd5 100644 --- a/main/settings/Structure.scala +++ b/main/settings/Structure.scala @@ -15,6 +15,8 @@ package sbt import Task._ import Types._ + import language.experimental.macros + sealed trait Scoped { def scope: Scope; val key: AttributeKey[_] } /** A common type for SettingKey and TaskKey so that both can be used as inputs to tasks.*/ @@ -26,13 +28,18 @@ sealed trait ScopedTaskable[T] extends Scoped { * The scope is represented by a value of type Scope. * The name and the type are represented by a value of type AttributeKey[T]. * Instances are constructed using the companion object. */ -sealed trait SettingKey[T] extends ScopedTaskable[T] with KeyedInitialize[T] with Scoped.ScopingSetting[SettingKey[T]] with Scoped.DefinableSetting[T] with Scoped.ListSetting[T, Id] +sealed trait SettingKey[T] extends ScopedTaskable[T] with KeyedInitialize[T] with Scoped.ScopingSetting[SettingKey[T]] with Scoped.DefinableSetting[T] { val key: AttributeKey[T] def toTask: Initialize[Task[T]] = this apply inlineTask def scopedKey: ScopedKey[T] = ScopedKey(scope, key) def in(scope: Scope): SettingKey[T] = Scoped.scopedSetting(Scope.replaceThis(this.scope)(scope), this.key) + def +=[U](v: U)(implicit a: Append.Value[T, U]): Setting[T] = macro std.TaskMacro.settingAppend1Impl[T,U] + def ++=[U](vs: U)(implicit a: Append.Values[T, U]): Setting[T] = macro std.TaskMacro.settingAppendNImpl[T,U] + def <+= [V](value: Initialize[V])(implicit a: Append.Value[T, V]): Setting[T] = make(value)(a.appendValue) + def <++= [V](values: Initialize[V])(implicit a: Append.Values[T, V]): Setting[T] = make(values)(a.appendValues) + protected[this] def make[S](other: Initialize[S])(f: (T, S) => T): Setting[T] = this <<= (this, other)(f) } @@ -40,14 +47,19 @@ sealed trait SettingKey[T] extends ScopedTaskable[T] with KeyedInitialize[T] wit * The scope is represented by a value of type Scope. * The name and the type are represented by a value of type AttributeKey[Task[T]]. * Instances are constructed using the companion object. */ -sealed trait TaskKey[T] extends ScopedTaskable[T] with KeyedInitialize[Task[T]] with Scoped.ScopingSetting[TaskKey[T]] with Scoped.ListSetting[T, Task] with Scoped.DefinableTask[T] +sealed trait TaskKey[T] extends ScopedTaskable[T] with KeyedInitialize[Task[T]] with Scoped.ScopingSetting[TaskKey[T]] with Scoped.DefinableTask[T] { val key: AttributeKey[Task[T]] def toTask: Initialize[Task[T]] = this def scopedKey: ScopedKey[Task[T]] = ScopedKey(scope, key) def in(scope: Scope): TaskKey[T] = Scoped.scopedTask(Scope.replaceThis(this.scope)(scope), this.key) - protected[this] def make[S](other: Initialize[Task[S]])(f: (T, S) => T): Setting[Task[T]] = this <<= (this, other) { (a,b) => (a,b) map f.tupled } + def +=[U](v: U)(implicit a: Append.Value[T, U]): Setting[Task[T]] = macro std.TaskMacro.taskAppend1Impl[T,U] + def ++=[U](vs: U)(implicit a: Append.Values[T, U]): Setting[Task[T]] = macro std.TaskMacro.taskAppendNImpl[T,U] + def <+= [V](v: Initialize[Task[V]])(implicit a: Append.Value[T, V]): Setting[Task[T]] = make(v)(a.appendValue) + def <++= [V](vs: Initialize[Task[V]])(implicit a: Append.Values[T, V]): Setting[Task[T]] = make(vs)(a.appendValues) + + private[this] def make[S](other: Initialize[Task[S]])(f: (T, S) => T): Setting[Task[T]] = this <<= (this, other) { (a,b) => (a,b) map f.tupled } } /** Identifies an input task. An input task parses input and produces a task to run. @@ -86,24 +98,15 @@ object Scoped def scopedInput[T](s: Scope, k: AttributeKey[InputTask[T]]): InputKey[T] = new InputKey[T] { val scope = s; val key = k } def scopedTask[T](s: Scope, k: AttributeKey[Task[T]]): TaskKey[T] = new TaskKey[T] { val scope = s; val key = k } - sealed trait ListSetting[S, M[_]] - { - protected[this] def make[T](other: Initialize[M[T]])(f: (S, T) => S): Setting[M[S]] - protected[this] def ~=(f: S => S): Setting[M[S]] - - def <+= [V](value: Initialize[M[V]])(implicit a: Append.Value[S, V]): Setting[M[S]] = make(value)(a.appendValue) - def <++= [V](values: Initialize[M[V]])(implicit a: Append.Values[S, V]): Setting[M[S]] = make(values)(a.appendValues) - def += [U](value: => U)(implicit a: Append.Value[S, U]): Setting[M[S]] = this ~= ( v => a.appendValue(v, value) ) - def ++=[U](values: => U)(implicit a: Append.Values[S, U]): Setting[M[S]] = this ~= ( v => a.appendValues(v, values) ) - } sealed trait DefinableSetting[S] { def scopedKey: ScopedKey[S] - private[sbt] final def :==(value: S): Setting[S] = :=(value) - final def := (value: => S): Setting[S] = setting(scopedKey, Def.value(value)) + private[sbt] final def :==(value: S): Setting[S] = setting(scopedKey, Def.valueStrict(value)) + final def := (v: S): Setting[S] = macro std.TaskMacro.settingAssignMacroImpl[S] final def ~= (f: S => S): Setting[S] = Def.update(scopedKey)(f) - final def <<= (app: Initialize[S]): Setting[S] = setting(scopedKey, app) + final def <<= (app: Initialize[S]): Setting[S] = set(app) + final def set (app: Initialize[S]): Setting[S] = setting(scopedKey, app) final def get(settings: Settings[Scope]): Option[S] = settings.get(scopedKey.scope, scopedKey.key) final def ? : Initialize[Option[S]] = Def.optional(scopedKey)(idFun) final def or[T >: S](i: Initialize[T]): Initialize[T] = (this.?, i)(_ getOrElse _ ) @@ -111,28 +114,24 @@ object Scoped } final class RichInitialize[S](init: Initialize[S]) { - @deprecated("A call to 'identity' is no longer necessary and can be removed.", "0.11.0") - final def identity: Initialize[S] = init def map[T](f: S => T): Initialize[Task[T]] = init(s => mktask(f(s)) ) def flatMap[T](f: S => Task[T]): Initialize[Task[T]] = init(f) } sealed trait DefinableTask[S] { self: TaskKey[S] => - private[sbt] def :==(value: S): Setting[Task[S]] = :=(value) - private[sbt] def ::=(value: Task[S]): Setting[Task[S]] = Def.setting(scopedKey, Def.value( value )) - def := (value: => S): Setting[Task[S]] = ::=(mktask(value)) + private[sbt] def :==(v: S): Setting[Task[S]] = ::=(constant(v)) + private[sbt] def ::=(value: Task[S]): Setting[Task[S]] = Def.setting(scopedKey, Def.valueStrict( value )) + def := (v: S): Setting[Task[S]] = macro std.TaskMacro.taskAssignMacroImpl[S] //::=(mktask(value)) private[sbt] def :== (v: SettingKey[S]): Setting[Task[S]] = <<=( v(constant)) def ~= (f: S => S): Setting[Task[S]] = Def.update(scopedKey)( _ map f ) - def <<= (app: Initialize[Task[S]]): Setting[Task[S]] = Def.setting(scopedKey, app) + def <<= (app: Initialize[Task[S]]): Setting[Task[S]] = set(app) + def set(app: Initialize[Task[S]]): Setting[Task[S]] = Def.setting(scopedKey, app) def task: SettingKey[Task[S]] = scopedSetting(scope, key) def get(settings: Settings[Scope]): Option[Task[S]] = settings.get(scope, key) - @deprecated("A call to 'identity' is no longer necessary and can be removed.", "0.11.0") - def identity: Initialize[Task[S]] = this - def ? : Initialize[Task[Option[S]]] = Def.optional(scopedKey) { case None => mktask { None }; case Some(t) => t map some.fn } def ??[T >: S](or: => T): Initialize[Task[T]] = Def.optional(scopedKey)( _ getOrElse mktask(or) ) def or[T >: S](i: Initialize[Task[T]]): Initialize[Task[T]] = (this.? zipWith i)( (x,y) => (x, y) map { case (a,b) => a getOrElse b}) @@ -246,7 +245,6 @@ object Scoped def flatFailure[T](f: Seq[Incomplete] => Task[T]): App[T] = onTasks(_ flatFailure f) def mapFailure[T](f: Seq[Incomplete] => T): App[T] = onTasks(_ mapFailure f) } - type :@:[H, T <: KList[ScopedTaskable]] = KCons[H, T, ScopedTaskable] type ST[X] = ScopedTaskable[X] final class RichTaskable2[A,B](t2: (ST[A], ST[B])) extends RichTaskables[ AList.T2K[A,B]#l ](t2)(AList.tuple2[A,B]) { diff --git a/main/settings/TaskMacro.scala b/main/settings/TaskMacro.scala new file mode 100644 index 000000000..52539ea91 --- /dev/null +++ b/main/settings/TaskMacro.scala @@ -0,0 +1,185 @@ +package sbt +package std + + import Def.{Initialize,Setting} + import Types.{idFun,Id} + import TaskExtra.allM + import appmacro.{Convert, InputWrapper, Instance, MixedBuilder, MonadInstance} + + import language.experimental.macros + import scala.reflect._ + import makro._ + +/** Instance for the monad/applicative functor for plain Tasks. */ +object TaskInstance extends MonadInstance +{ + import TaskExtra._ + + final type M[x] = Task[x] + def app[K[L[x]], Z](in: K[Task], f: K[Id] => Z)(implicit a: AList[K]): Task[Z] = new Mapped[Z,K](in, f compose allM, a) + def map[S,T](in: Task[S], f: S => T): Task[T] = in map f + def flatten[T](in: Task[Task[T]]): Task[T] = in flatMap idFun[Task[T]] + def pure[T](t: () => T): Task[T] = toTask(t) +} + +/** Composes the Task and Initialize Instances to provide an Instance for [T] Initialize[Task[T]].*/ +object FullInstance extends Instance.Composed[Initialize, Task](InitializeInstance, TaskInstance) with MonadInstance +{ + type SS = sbt.Settings[Scope] + val settingsData = TaskKey[SS]("settings-data", "Provides access to the project data for the build.", KeyRanks.DTask) + + def flatten[T](in: Initialize[Task[Initialize[Task[T]]]]): Initialize[Task[T]] = + { + import Scoped._ + (in,settingsData) apply{ + (a: Task[Initialize[Task[T]]], data: Task[SS]) => + import TaskExtra.multT2Task + (a, data) flatMap { case (a,d) => a evaluate d } + } + } +} +/** Converts an input `Tree` of type `Initialize[T]`, `Initialize[Task[T]]`, or `Task[T]` into a `Tree` of type `Initialize[Task[T]]`.*/ +object FullConvert extends Convert +{ + def apply[T: c.TypeTag](c: Context)(in: c.Tree): c.Tree = + if(in.tpe <:< c.typeOf[Initialize[Task[T]]]) + in + else if(in.tpe <:< c.typeOf[Initialize[T]]) + { + val i = c.Expr[Initialize[T]](in) + c.universe.reify( Def.toITask(i.splice) ).tree + } + else if(in.tpe <:< c.typeOf[Task[T]]) + { + val i = c.Expr[Task[T]](in) + c.universe.reify( Def.valueStrict[Task[T]](i.splice) ).tree + } + else + c.abort(in.pos, "Unknown input type: " + in.tpe) +} + +object TaskMacro +{ + final val AssignInitName = "<<=" + final val Append1InitName = "<+=" + final val AppendNInitName = "<++=" + + def task[T](t: T): Initialize[Task[T]] = macro taskMacroImpl[T] + def taskMacroImpl[T: c.TypeTag](c: Context)(t: c.Expr[T]): c.Expr[Initialize[Task[T]]] = + Instance.contImpl[T](c, FullInstance, FullConvert, MixedBuilder)(Left(t)) + + def taskDyn[T](t: Initialize[Task[T]]): Initialize[Task[T]] = macro taskDynMacroImpl[T] + def taskDynMacroImpl[T: c.TypeTag](c: Context)(t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[Task[T]]] = + Instance.contImpl[T](c, FullInstance, FullConvert, MixedBuilder)(Right(t)) + + /** Implementation of := macro for settings. */ + def settingAssignMacroImpl[T: c.TypeTag](c: Context)(v: c.Expr[T]): c.Expr[Setting[T]] = + { + val init = SettingMacro.settingMacroImpl[T](c)(v) + val assign = transformMacroImpl(c)( init.tree )( AssignInitName ) + c.Expr[Setting[T]]( assign ) + } + /** Implementation of := macro for tasks. */ + def taskAssignMacroImpl[T: c.TypeTag](c: Context)(v: c.Expr[T]): c.Expr[Setting[Task[T]]] = + { + val init = taskMacroImpl[T](c)(v) + val assign = transformMacroImpl(c)( init.tree )( AssignInitName ) + c.Expr[Setting[Task[T]]]( assign ) + } + /** Implementation of += macro for tasks. */ + def taskAppend1Impl[T: c.TypeTag, U: c.TypeTag](c: Context)(v: c.Expr[U])(a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[Task[T]]] = + { + val init = taskMacroImpl[U](c)(v) + val assign = appendMacroImpl(c)( init.tree, a.tree )( Append1InitName ) + c.Expr[Setting[Task[T]]]( assign ) + } + /** Implementation of += macro for settings. */ + def settingAppend1Impl[T: c.TypeTag, U: c.TypeTag](c: Context)(v: c.Expr[U])(a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[T]] = + { + val init = SettingMacro.settingMacroImpl[U](c)(v) + val assign = appendMacroImpl(c)( init.tree, a.tree )( Append1InitName ) + c.Expr[Setting[T]]( assign ) + } + /** Implementation of ++= macro for tasks. */ + def taskAppendNImpl[T: c.TypeTag, U: c.TypeTag](c: Context)(vs: c.Expr[U])(a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[Task[T]]] = + { + val init = taskMacroImpl[U](c)(vs) + val assign = appendMacroImpl(c)( init.tree, a.tree )( AppendNInitName ) + c.Expr[Setting[Task[T]]]( assign ) + } + /** Implementation of ++= macro for settings. */ + def settingAppendNImpl[T: c.TypeTag, U: c.TypeTag](c: Context)(vs: c.Expr[U])(a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[T]] = + { + val init = SettingMacro.settingMacroImpl[U](c)(vs) + val assign = appendMacroImpl(c)( init.tree, a.tree )( AppendNInitName ) + c.Expr[Setting[T]]( assign ) + } + + private[this] def appendMacroImpl(c: Context)(init: c.Tree, append: c.Tree)(newName: String): c.Tree = + { + import c.universe.{Apply,newTermName,Select,TypeApply} + c.macroApplication match { + case Apply(Apply(TypeApply(Select(preT, nmeT), targs), _), a) => + Apply(Apply(TypeApply(Select(preT, newTermName(newName).encodedName), targs), init :: Nil), a) + case x => unexpectedTree(x) + } + } + private[this] def transformMacroImpl(c: Context)(init: c.Tree)(newName: String): c.Tree = + { + import c.universe.{Apply,newTermName,Select} + val target = + c.macroApplication match { + case Apply(Select(prefix, _), _) => prefix + case x => unexpectedTree(x) + } + Apply.apply(Select(target, newTermName(newName).encodedName), init :: Nil) + } + private[this] def unexpectedTree[C <: Context](tree: C#Tree): Nothing = error("Unexpected macro application tree (" + tree.getClass + "): " + tree) + + sealed abstract class MacroValue[T] { + def value: T = macro std.TaskMacro.valueMacroImpl[T] + } + + def valueMacroImpl[T: c.TypeTag](c: Context): c.Expr[T] = + { + import c.universe._ + c.macroApplication match { + case Select(Apply(_, t :: Nil), _) => wrap[T](c)(t, implicitly[c.TypeTag[T]]) + case x => unexpectedTree(x) + } + } + private[this] def wrap[T](c: Context)(t: c.Tree, tag: c.TypeTag[T]): c.Expr[T] = + { + val rc = c.asInstanceOf[scala.reflect.makro.runtime.Context] + val tree = rc.universe.TypeTree().setType(tag.tpe.asInstanceOf[rc.universe.Type]) + val cleaned = rc.callsiteTyper.packedType(tree, rc.universe.NoSymbol).asInstanceOf[c.universe.Type] + implicit val t1 = c.TypeTag[T](cleaned) + val ts = c.Expr[Any](t) + c.universe.reify { InputWrapper.wrap[T](ts.splice) } + } +} + +/** Convert instance for plain `Task`s not within the settings system. +* This is not used for the main task/setting macros, but could be used when manipulating plain Tasks.*/ +object TaskConvert extends Convert +{ + def apply[T: c.TypeTag](c: Context)(in: c.Tree): c.Tree = + if(in.tpe <:< c.typeOf[Task[T]]) + { + val i = c.Expr[Task[T]](in) + c.universe.reify( i.splice ).tree + } + else + c.abort(in.pos, "Unknown input type: " + in.tpe) +} + +object PlainTaskMacro +{ + def task[T](t: T): Task[T] = macro taskImpl[T] + def taskImpl[T: c.TypeTag](c: Context)(t: c.Expr[T]): c.Expr[Task[T]] = + Instance.contImpl[T](c, TaskInstance, TaskConvert, MixedBuilder)(Left(t)) + + def taskDyn[T](t: Task[T]): Task[T] = macro taskDynImpl[T] + def taskDynImpl[T: c.TypeTag](c: Context)(t: c.Expr[Task[T]]): c.Expr[Task[T]] = + Instance.contImpl[T](c, TaskInstance, TaskConvert, MixedBuilder)(Right(t)) +} diff --git a/main/settings/src/test/scala/UsageTest.scala b/main/settings/src/test/scala/UsageTest.scala new file mode 100644 index 000000000..ed05480d3 --- /dev/null +++ b/main/settings/src/test/scala/UsageTest.scala @@ -0,0 +1,38 @@ +package sbt +package std + +object UseTask +{ + import Def._ + import TaskMacro.{task, taskDyn} + + val set = SettingMacro setting { 23 } + val plain = PlainTaskMacro task { 19 } + + val x = task { set.value } + val y = task { true } + val z = task { if(y.value) x.value else plain.value } + val a = taskDyn { + if(y.value) z else x + } +} +object Assign +{ + import java.io.File + import Def.macroValueT + import UseTask.{x,y,z,a,set,plain} + + val ak = TaskKey[Int]("a") + val bk = TaskKey[Seq[Int]]("b") + val ck = SettingKey[File]("c") + val sk = TaskKey[Set[_]]("s") + + def azy = sk.value + + val settings = Seq( + ak += z.value + (if(y.value) set.value else plain.value), + bk ++= Seq(z.value), + ck := new File(ck.value, "asdf"), + ak := sk.value.size + sk.value.size + ) +} \ No newline at end of file diff --git a/project/Sbt.scala b/project/Sbt.scala index 65481f3ef..37ff6b541 100644 --- a/project/Sbt.scala +++ b/project/Sbt.scala @@ -16,7 +16,7 @@ object Sbt extends Build organization := "org.scala-sbt", version := "0.13.0-SNAPSHOT", publishArtifact in packageDoc := false, - scalaVersion := "2.10.0-M5", + scalaVersion := "2.10.0-M6", publishMavenStyle := false, componentID := None, crossPaths := false, @@ -53,6 +53,7 @@ object Sbt extends Build lazy val controlSub = baseProject(utilPath / "control", "Control") lazy val collectionSub = testedBaseProject(utilPath / "collection", "Collections") settings( Util.keywordsSettings: _* ) + lazy val applyMacroSub = testedBaseProject(utilPath / "appmacro", "Apply Macro") dependsOn(collectionSub) settings(scalaCompiler) // The API for forking, combining, and doing I/O with system processes lazy val processSub = baseProject(utilPath / "process", "Process") dependsOn(ioSub % "test->test") // Path, IO (formerly FileUtilities), NameFilter and other I/O utility classes @@ -121,13 +122,13 @@ object Sbt extends Build interfaceSub, ioSub, ivySub, logSub, processSub, runSub, relationSub, stdTaskSub, taskSub, trackingSub, testingSub) // General command support and core commands not specific to a build system - lazy val commandSub = testedBaseProject(commandPath, "Command") dependsOn(interfaceSub, ioSub, launchInterfaceSub, logSub, completeSub, classpathSub) + lazy val commandSub = testedBaseProject(mainPath / "command", "Command") dependsOn(interfaceSub, ioSub, launchInterfaceSub, logSub, completeSub, classpathSub) // Fixes scope=Scope for Setting (core defined in collectionSub) to define the settings system used in build definitions - lazy val settingsSub = testedBaseProject(settingsPath, "Settings") dependsOn(interfaceSub, ivySub, relationSub, logSub, ioSub, commandSub, completeSub, - classpathSub, stdTaskSub, processSub) settings( sbinary ) + lazy val mainSettingsSub = testedBaseProject(mainPath / "settings", "Main Settings") dependsOn(applyMacroSub, interfaceSub, ivySub, relationSub, logSub, ioSub, commandSub, + completeSub, classpathSub, stdTaskSub, processSub) settings( sbinary ) // The main integration project for sbt. It brings all of the subsystems together, configures them, and provides for overriding conventions. - lazy val mainSub = testedBaseProject(mainPath, "Main") dependsOn(actionsSub, settingsSub, interfaceSub, ioSub, ivySub, launchInterfaceSub, logSub, processSub, runSub, commandSub) + lazy val mainSub = testedBaseProject(mainPath, "Main") dependsOn(actionsSub, mainSettingsSub, interfaceSub, ioSub, ivySub, launchInterfaceSub, logSub, processSub, runSub, commandSub) // Strictly for bringing implicits and aliases from subsystems into the top-level sbt namespace through a single package object // technically, we need a dependency on all of mainSub's dependencies, but we don't do that since this is strictly an integration project @@ -142,8 +143,6 @@ object Sbt extends Build def utilPath = file("util") def compilePath = file("compile") def mainPath = file("main") - def commandPath = mainPath / "command" - def settingsPath = mainPath / "settings" def scriptedPath = file("scripted") def sbtSettings = Seq( diff --git a/sbt/src/sbt-test/project/flatten/project/Flat.scala b/sbt/src/sbt-test/project/flatten/project/Flat.scala index 7d46ee4e4..032624155 100644 --- a/sbt/src/sbt-test/project/flatten/project/Flat.scala +++ b/sbt/src/sbt-test/project/flatten/project/Flat.scala @@ -17,12 +17,10 @@ object Flat extends Build def forConfig(conf: Configuration, name: String) = Project.inConfig(conf)( unpackageSettings(name) ) def unpackageSettings(name: String) = Seq( - unmanagedSourceDirectories <<= baseDirectory( base => (base / name) :: Nil ), - defaultExcludes in unmanagedResources <<= sourceFilter.identity, - unmanagedResourceDirectories <<= unmanagedSourceDirectories.identity, - unpackage <<= (artifactPath in packageSrc, baseDirectory) map { (jar, base) => - IO.unzip(jar, base / name) - } + unmanagedSourceDirectories := (baseDirectory.value / name) :: Nil, + defaultExcludes in unmanagedResources := sourceFilter.value, + unmanagedResourceDirectories := unmanagedSourceDirectories.value, + unpackage := IO.unzip(artifactPath in packageSrc value, baseDirectory.value / name) ) val unpackage = TaskKey[Unit]("unpackage") diff --git a/util/appmacro/ContextUtil.scala b/util/appmacro/ContextUtil.scala new file mode 100644 index 000000000..31f61c356 --- /dev/null +++ b/util/appmacro/ContextUtil.scala @@ -0,0 +1,104 @@ +package sbt +package appmacro + + import scala.reflect._ + import makro._ + import scala.tools.nsc.Global + +object ContextUtil { + /** Constructs an object with utility methods for operating in the provided macro context `c`. + * Callers should explicitly specify the type parameter as `c.type` in order to preserve the path dependent types. */ + def apply[C <: Context with Singleton](c: C): ContextUtil[C] = new ContextUtil(c) +} + +/** Utility methods for macros. Several methods assume that the context's universe is a full compiler (`scala.tools.nsc.Global`). +* This is not thread safe due to the underlying Context and related data structures not being thread safe. +* Use `ContextUtil[c.type](c)` to construct. */ +final class ContextUtil[C <: Context with Singleton](val ctx: C) +{ + import ctx.universe.{Apply=>ApplyTree,_} + + val alistType = ctx.typeOf[AList[KList]] + val alist: Symbol = alistType.typeSymbol.companionSymbol + val alistTC: Type = alistType.typeConstructor + + /** Modifiers for a local val.*/ + val localModifiers = Modifiers(NoFlags) + + def getPos(sym: Symbol) = if(sym eq null) NoPosition else sym.pos + + /** Constructs a unique term name with the given prefix within this Context. + * (The current implementation uses Context.fresh, which increments*/ + def freshTermName(prefix: String) = newTermName(ctx.fresh("$" + prefix)) + + def typeTree(tpe: Type) = TypeTree().setType(tpe) + + /** Constructs a new, local ValDef with the given Type, a unique name, + * the same position as `sym`, and an empty implementation (no rhs). */ + def freshValDef(tpe: Type, sym: Symbol): ValDef = + { + val vd = localValDef(typeTree(tpe), EmptyTree) + vd setPos getPos(sym) + vd + } + + /** Constructs a ValDef with local modifiers and a unique name. */ + def localValDef(tpt: Tree, rhs: Tree): ValDef = + ValDef(localModifiers, freshTermName("q"), tpt, rhs) + + /** Constructs a tuple value of the right TupleN type from the provided inputs.*/ + def mkTuple(args: List[Tree]): Tree = + { + val global: Global = ctx.universe.asInstanceOf[Global] + global.gen.mkTuple(args.asInstanceOf[List[global.Tree]]).asInstanceOf[ctx.universe.Tree] + } + + /** Creates a new, synthetic type variable with the specified `owner`. */ + def newTypeVariable(owner: Symbol): Symbol = + { + val global: Global = ctx.universe.asInstanceOf[Global] + owner.asInstanceOf[global.Symbol].newSyntheticTypeParam().asInstanceOf[ctx.universe.Symbol] + } + /** The type representing the type constructor `[X] X` */ + val idTC: Type = + { + val tvar = newTypeVariable(NoSymbol) + polyType(tvar :: Nil, refVar(tvar)) + } + /** Constructs a new, synthetic type variable that is a type constructor. For example, in type Y[L[x]], L is such a type variable. */ + def newTCVariable(owner: Symbol): Symbol = + { + val global: Global = ctx.universe.asInstanceOf[Global] + val tc = owner.asInstanceOf[global.Symbol].newSyntheticTypeParam() + val arg = tc.newSyntheticTypeParam("x", 0L) + tc.setInfo(global.PolyType(arg :: Nil, global.TypeBounds.empty)).asInstanceOf[ctx.universe.Symbol] + } + /** Returns the Symbol that references the statically accessible singleton `i`. */ + def singleton[T <: AnyRef with Singleton](i: T)(implicit it: ctx.TypeTag[i.type]): Symbol = + it.tpe match { + case SingleType(_, sym) if !sym.isFreeTerm && sym.isStatic => sym + case x => error("Instance must be static (was " + x + ").") + } + /** Constructs a Type that references the given type variable. */ + def refVar(variable: Symbol): Type = typeRef(NoPrefix, variable, Nil) + + /** Returns the symbol for the non-private method named `name` for the class/module `obj`. */ + def method(obj: Symbol, name: String): Symbol = { + val global: Global = ctx.universe.asInstanceOf[Global] + obj.asInstanceOf[global.Symbol].info.nonPrivateMember(global.newTermName(name)).asInstanceOf[ctx.universe.Symbol] + } + + /** Returns a Type representing the type constructor tcp.. For example, given + * `object Demo { type M[x] = List[x] }`, the call `extractTC(Demo, "M")` will return a type representing + * the type constructor `[x] List[x]`. + **/ + def extractTC(tcp: AnyRef with Singleton, name: String)(implicit it: ctx.TypeTag[tcp.type]): ctx.Type = + { + val global: Global = ctx.universe.asInstanceOf[Global] + val itTpe = it.tpe.asInstanceOf[global.Type] + val m = itTpe.nonPrivateMember(global.newTypeName(name)) + val tc = itTpe.memberInfo(m).asInstanceOf[ctx.universe.Type] + assert(tc != NoType && tc.isHigherKinded, "Invalid type constructor: " + tc) + tc + } +} \ No newline at end of file diff --git a/util/appmacro/Instance.scala b/util/appmacro/Instance.scala new file mode 100644 index 000000000..05a80b4e8 --- /dev/null +++ b/util/appmacro/Instance.scala @@ -0,0 +1,260 @@ +package sbt +package appmacro + + import Classes.Applicative + import Types.Id + +/** The separate hierarchy from Applicative/Monad is for two reasons. +* +* 1. The type constructor is represented as an abstract type because a TypeTag cannot represent a type constructor directly. +* 2. The applicative interface is uncurried. +*/ +trait Instance +{ + type M[x] + def app[K[L[x]], Z](in: K[M], f: K[Id] => Z)(implicit a: AList[K]): M[Z] + def map[S,T](in: M[S], f: S => T): M[T] + def pure[T](t: () => T): M[T] +} +trait Convert +{ + def apply[T: c.TypeTag](c: scala.reflect.makro.Context)(in: c.Tree): c.Tree +} +trait MonadInstance extends Instance +{ + def flatten[T](in: M[M[T]]): M[T] +} +object InputWrapper +{ + def wrap[T](in: Any): T = error("This method is an implementation detail and should not be referenced.") +} + + import scala.reflect._ + import makro._ + +object Instance +{ + final val DynamicDependencyError = "Illegal dynamic dependency." + final val DynamicReferenceError = "Illegal dynamic reference." + final val ApplyName = "app" + final val FlattenName = "flatten" + final val PureName = "pure" + final val MapName = "map" + final val InstanceTCName = "M" + final val WrapName = "wrap" + + final class Input[U <: Universe with Singleton](val tpe: U#Type, val expr: U#Tree, val local: U#ValDef) + + /** Implementation of a macro that provides a direct syntax for applicative functors and monads. + * It is intended to be used in conjunction with another macro that conditions the inputs. + * + * This method processes the Tree `t` to find inputs of the form `InputWrapper.wrap[T]( input )` + * This form is typically constructed by another macro that pretends to be able to get a value of type `T` + * from a value convertible to `M[T]`. This `wrap(input)` form has two main purposes. + * First, it identifies the inputs that should be transformed. + * Second, it allows the input trees to be wrapped for later conversion into the appropriate `M[T]` type by `convert`. + * This wrapping is necessary because applying the first macro must preserve the original type, + * but it is useful to delay conversion until the outer, second macro is called. The `wrap` method accomplishes this by + * allowing the original `Tree` and `Type` to be hidden behind the raw `T` type. This method will remove the call to `wrap` + * so that it is not actually called at runtime. + * + * Each `input` in each expression of the form `InputWrapper.wrap[T]( input )` is transformed by `convert`. + * This transformation converts the input Tree to a Tree of type `M[T]`. + * The original wrapped expression `wrap(input)` is replaced by a reference to a new local `val $x: T`, where `$x` is a fresh name. + * These converted inputs are passed to `builder` as well as the list of these synthetic `ValDef`s. + * The `TupleBuilder` instance constructs a tuple (Tree) from the inputs and defines the right hand side of the vals + * that unpacks the tuple containing the results of the inputs. + * + * The constructed tuple of inputs and the code that unpacks the results of the inputs are then passed to the `i`, + * which is an implementation of `Instance` that is statically accessible. + * An Instance defines a applicative functor associated with a specific type constructor and, if it implements MonadInstance as well, a monad. + * Typically, it will be either a top-level module or a stable member of a top-level module (such as a val or a nested module). + * The `with Singleton` part of the type verifies some cases at macro compilation time, + * while the full check for static accessibility is done at macro expansion time. + * Note: Ideally, the types would verify that `i: MonadInstance` when `t.isRight`. + * With the various dependent types involved, this is not worth it. + * + * The `t` argument is the argument of the macro that will be transformed as described above. + * If the macro that calls this method is for a multi-input map (app followed by map), + * `t` should be the argument wrapped in Left. + * If this is for multi-input flatMap (app followed by flatMap), + * this should be the argument wrapped in Right. + */ + def contImpl[T: c.TypeTag](c: Context, i: Instance with Singleton, convert: Convert, builder: TupleBuilder)(t: Either[c.Expr[T], c.Expr[i.M[T]]])( + implicit tt: c.TypeTag[T], mt: c.TypeTag[i.M[T]], it: c.TypeTag[i.type]): c.Expr[i.M[T]] = + { + import c.universe.{Apply=>ApplyTree,_} + + import scala.tools.nsc.Global + // Used to access compiler methods not yet exposed via the reflection/macro APIs + val global: Global = c.universe.asInstanceOf[Global] + + val util = ContextUtil[c.type](c) + val mTC: Type = util.extractTC(i, InstanceTCName) + + // the tree for the macro argument + val (tree, treeType) = t match { + case Left(l) => (l.tree, tt.tpe.normalize) + case Right(r) => (r.tree, mt.tpe.normalize) + } + + val instanceSym = util.singleton(i) + // A Tree that references the statically accessible Instance that provides the actual implementations of map, flatMap, ... + val instance = Ident(instanceSym) + + val parameterModifiers = Modifiers(Flag.PARAM) + + val wrapperSym = util.singleton(InputWrapper) + val wrapMethodSymbol = util.method(wrapperSym, WrapName) + def isWrapper(fun: Tree) = fun.symbol == wrapMethodSymbol + + type In = Input[c.universe.type] + var inputs = List[In]() + + // constructs a ValDef with a parameter modifier, a unique name, with the provided Type and with an empty rhs + def freshMethodParameter(tpe: Type): ValDef = + ValDef(parameterModifiers, freshTermName("p"), typeTree(tpe), EmptyTree) + + def freshTermName(prefix: String) = newTermName(c.fresh("$" + prefix)) + def typeTree(tpe: Type) = TypeTree().setType(tpe) + + // constructs a function that applies f to each subtree of the input tree + def visitor(f: Tree => Unit): Tree => Unit = + { + val v: Transformer = new Transformer { + override def transform(tree: Tree): Tree = { f(tree); super.transform(tree) } + } + (tree: Tree) => v.transform(tree) + } + + /* Local definitions in the macro. This is used to ensure + * references are to M instances defined outside of the macro call.*/ + val defs = new collection.mutable.HashSet[Symbol] + + // a reference is illegal if it is to an M instance defined within the scope of the macro call + def illegalReference(sym: Symbol): Boolean = + sym != null && sym != NoSymbol && defs.contains(sym) + + // a function that checks the provided tree for illegal references to M instances defined in the + // expression passed to the macro and for illegal dereferencing of M instances. + val checkQual = visitor { + case s @ ApplyTree(fun, qual :: Nil) => if(isWrapper(fun)) c.error(s.pos, DynamicDependencyError) + case id @ Ident(name) if illegalReference(id.symbol) => c.error(id.pos, DynamicReferenceError) + case _ => () + } + // adds the symbols for all non-Ident subtrees to `defs`. + val defSearch = visitor { + case _: Ident => () + case tree => if(tree.symbol ne null) defs += tree.symbol; + } + + // transforms the original tree into calls to the Instance functions pure, map, ..., + // resulting in a value of type M[T] + def makeApp(body: Tree): Tree = + inputs match { + case Nil => pure(body) + case x :: Nil => single(body, x) + case xs => arbArity(body, xs) + } + + // no inputs, so construct M[T] via Instance.pure or pure+flatten + def pure(body: Tree): Tree = + { + val typeApplied = TypeApply(Select(instance, PureName), typeTree(treeType) :: Nil) + val p = ApplyTree(typeApplied, Function(Nil, body) :: Nil) + if(t.isLeft) p else flatten(p) + } + // m should have type M[M[T]] + // the returned Tree will have type M[T] + def flatten(m: Tree): Tree = + { + val typedFlatten = TypeApply(Select(instance, FlattenName), typeTree(tt.tpe) :: Nil) + ApplyTree(typedFlatten, m :: Nil) + } + + // calls Instance.map or flatmap directly, skipping the intermediate Instance.app that is unnecessary for a single input + def single(body: Tree, input: In): Tree = + { + val variable = input.local + val param = ValDef(parameterModifiers, variable.name, variable.tpt, EmptyTree) + val typeApplied = TypeApply(Select(instance, MapName), variable.tpt :: typeTree(treeType) :: Nil) + val mapped = ApplyTree(typeApplied, input.expr :: Function(param :: Nil, body) :: Nil) + if(t.isLeft) mapped else flatten(mapped) + } + + // calls Instance.app to get the values for all inputs and then calls Instance.map or flatMap to evaluate the body + def arbArity(body: Tree, inputs: List[In]): Tree = + { + val result = builder.make(c)(mTC, inputs) + val param = freshMethodParameter( appliedType(result.representationC, util.idTC :: Nil) ) + val bindings = result.extract(param) + val f = Function(param :: Nil, Block(bindings, body)) + val ttt = typeTree(treeType) + val typedApp = TypeApply(Select(instance, ApplyName), typeTree(result.representationC) :: ttt :: Nil) + val app = ApplyTree(ApplyTree(typedApp, result.input :: f :: Nil), result.alistInstance :: Nil) + if(t.isLeft) app else flatten(app) + } + + // called when transforming the tree to add an input + // for `qual` of type M[A], and a selection qual.value, + // the call is addType(Type A, Tree qual) + // the result is a Tree representing a reference to + // the bound value of the input + def addType(tpe: Type, qual: Tree): Tree = + { + checkQual(qual) + val vd = util.freshValDef(tpe, qual.symbol) + inputs ::= new Input(tpe, qual, vd) + Ident(vd.name) + } + + // the main tree transformer that replaces calls to InputWrapper.wrap(x) with + // plain Idents that reference the actual input value + object appTransformer extends Transformer + { + override def transform(tree: Tree): Tree = + tree match + { + case ApplyTree(TypeApply(fun, t :: Nil), qual :: Nil) if isWrapper(fun) => + val tag = c.TypeTag(t.tpe) + addType(t.tpe, convert(c)(qual)(tag) ) + case _ => super.transform(tree) + } + } + + // collects all definitions in the tree. used for finding illegal references + defSearch(tree) + + // applies the transformation + // resetting attributes: a) must be local b) must be done + // on the transformed tree and not the wrapped tree or else there are obscure errors + val tr = makeApp( c.resetLocalAttrs(appTransformer.transform(tree)) ) + c.Expr[i.M[T]](tr) + } + + import Types._ + + implicit def applicativeInstance[A[_]](implicit ap: Applicative[A]): Instance { type M[x] = A[x] } = new Instance + { + type M[x] = A[x] + def app[ K[L[x]], Z ](in: K[A], f: K[Id] => Z)(implicit a: AList[K]) = a.apply[A,Z](in, f) + def map[S,T](in: A[S], f: S => T) = ap.map(f, in) + def pure[S](s: () => S): M[S] = ap.pure(s()) + } + + type AI[A[_]] = Instance { type M[x] = A[x] } + def compose[A[_], B[_]](implicit a: AI[A], b: AI[B]): Instance { type M[x] = A[B[x]] } = new Composed[A,B](a,b) + // made a public, named, unsealed class because of trouble with macros and inference when the Instance is not an object + class Composed[A[_], B[_]](a: AI[A], b: AI[B]) extends Instance + { + type M[x] = A[B[x]] + def pure[S](s: () => S): A[B[S]] = a.pure(() => b.pure(s)) + def map[S,T](in: M[S], f: S => T): M[T] = a.map(in, (bv: B[S]) => b.map(bv, f)) + def app[ K[L[x]], Z ](in: K[M], f: K[Id] => Z)(implicit alist: AList[K]): A[B[Z]] = + { + val g: K[B] => B[Z] = in => b.app[K, Z](in, f) + type Split[ L[x] ] = K[ (L ∙ B)#l ] + a.app[Split, B[Z]](in, g)(AList.asplit(alist)) + } + } +} diff --git a/util/appmacro/KListBuilder.scala b/util/appmacro/KListBuilder.scala new file mode 100644 index 000000000..5b658ea69 --- /dev/null +++ b/util/appmacro/KListBuilder.scala @@ -0,0 +1,58 @@ +package sbt +package appmacro + + import Types.Id + import scala.tools.nsc.Global + import scala.reflect._ + import makro._ + +/** A `TupleBuilder` that uses a KList as the tuple representation.*/ +object KListBuilder extends TupleBuilder +{ + def make(c: Context)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] = new BuilderResult[c.type] + { + val ctx: c.type = c + val util = ContextUtil[c.type](c) + import c.universe.{Apply=>ApplyTree,_} + import util._ + + val knilType = c.typeOf[KNil] + val knil = Ident(knilType.typeSymbol.companionSymbol) + val kconsTpe = c.typeOf[KCons[Int,KNil,List]] + val kcons = kconsTpe.typeSymbol.companionSymbol + val mTC: Type = mt.asInstanceOf[c.universe.Type] + val kconsTC: Type = kconsTpe.typeConstructor + + /** This is the L in the type function [L[x]] ... */ + val tcVariable: Symbol = newTCVariable(NoSymbol) + + /** Instantiates KCons[h, t <: KList[L], L], where L is the type constructor variable */ + def kconsType(h: Type, t: Type): Type = + appliedType(kconsTC, h :: t :: refVar(tcVariable) :: Nil) + + def bindKList(prev: ValDef, revBindings: List[ValDef], params: List[ValDef]): List[ValDef] = + params match + { + case ValDef(mods, name, tpt, _) :: xs => + val head = ValDef(mods, name, tpt, Select(Ident(prev.name), "head")) + val tail = localValDef(TypeTree(), Select(Ident(prev.name), "tail")) + val base = head :: revBindings + bindKList(tail, if(xs.isEmpty) base else tail :: base, xs) + case Nil => revBindings.reverse + } + + /** The input trees combined in a KList */ + val klist = (inputs :\ (knil: Tree))( (in, klist) => ApplyTree(kcons, in.expr, klist) ) + + /** The input types combined in a KList type. The main concern is tracking the heterogeneous types. + * The type constructor is tcVariable, so that it can be applied to [X] X or M later. + * When applied to `M`, this type gives the type of the `input` KList. */ + val klistType: Type = (inputs :\ knilType)( (in, klist) => kconsType(in.tpe, klist) ) + + val representationC = PolyType(tcVariable :: Nil, klistType) + val resultType = appliedType(representationC, idTC :: Nil) + val input = klist + val alistInstance = TypeApply(Select(Ident(alist), "klist"), typeTree(representationC) :: Nil) + def extract(param: ValDef) = bindKList(param, Nil, inputs.map(_.local)) + } +} \ No newline at end of file diff --git a/util/appmacro/MixedBuilder.scala b/util/appmacro/MixedBuilder.scala new file mode 100644 index 000000000..593f60382 --- /dev/null +++ b/util/appmacro/MixedBuilder.scala @@ -0,0 +1,16 @@ +package sbt +package appmacro + + import scala.reflect._ + import makro._ + +/** A builder that uses `TupleN` as the representation for small numbers of inputs (up to `TupleNBuilder.MaxInputs`) +* and `KList` for larger numbers of inputs. This builder cannot handle fewer than 2 inputs.*/ +object MixedBuilder extends TupleBuilder +{ + def make(c: Context)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] = + { + val delegate = if(inputs.size > TupleNBuilder.MaxInputs) KListBuilder else TupleNBuilder + delegate.make(c)(mt, inputs) + } +} \ No newline at end of file diff --git a/util/appmacro/TupleBuilder.scala b/util/appmacro/TupleBuilder.scala new file mode 100644 index 000000000..f91d3c91c --- /dev/null +++ b/util/appmacro/TupleBuilder.scala @@ -0,0 +1,56 @@ +package sbt +package appmacro + + import Types.Id + import scala.tools.nsc.Global + import scala.reflect._ + import makro._ + +/** +* A `TupleBuilder` abstracts the work of constructing a tuple data structure such as a `TupleN` or `KList` +* and extracting values from it. The `Instance` macro implementation will (roughly) traverse the tree of its argument +* and ultimately obtain a list of expressions with type `M[T]` for different types `T`. +* The macro constructs an `Input` value for each of these expressions that contains the `Type` for `T`, +* the `Tree` for the expression, and a `ValDef` that will hold the value for the input. +* +* `TupleBuilder.apply` is provided with the list of `Input`s and is expected to provide three values in the returned BuilderResult. +* First, it returns the constructed tuple data structure Tree in `input`. +* Next, it provides the type constructor `representationC` that, when applied to M, gives the type of tuple data structure. +* For example, a builder that constructs a `Tuple3` for inputs `M[Int]`, `M[Boolean]`, and `M[String]` +* would provide a Type representing `[L[x]] (L[Int], L[Boolean], L[String])`. The `input` method +* would return a value whose type is that type constructor applied to M, or `(M[Int], M[Boolean], M[String])`. +* +* Finally, the `extract` method provides a list of vals that extract information from the applied input. +* The type of the applied input is the type constructor applied to `Id` (`[X] X`). +* The returned list of ValDefs should be the ValDefs from `inputs`, but with non-empty right-hand sides. +*/ +trait TupleBuilder { + /** A convenience alias for a list of inputs (associated with a Universe of type U). */ + type Inputs[U <: Universe with Singleton] = List[Instance.Input[U]] + + /** Constructs a one-time use Builder for Context `c` and type constructor `tcType`. */ + def make(c: Context)(tcType: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] +} + +trait BuilderResult[C <: Context with Singleton] +{ + val ctx: C + import ctx.universe._ + + /** Represents the higher-order type constructor `[L[x]] ...` where `...` is the + * type of the data structure containing the added expressions, + * except that it is abstracted over the type constructor applied to each heterogeneous part of the type . */ + def representationC: PolyType + + /** The instance of AList for the input. For a `representationC` of `[L[x]]`, this `Tree` should have a `Type` of `AList[L]`*/ + def alistInstance: Tree + + /** Returns the completed value containing all expressions added to the builder. */ + def input: Tree + + /* The list of definitions that extract values from a value of type `$representationC[Id]`. + * The returned value should be identical to the `ValDef`s provided to the `TupleBuilder.make` method but with + * non-empty right hand sides. Each `ValDef` may refer to `param` and previous `ValDef`s in the list.*/ + def extract(param: ValDef): List[ValDef] +} + diff --git a/util/appmacro/TupleNBuilder.scala b/util/appmacro/TupleNBuilder.scala new file mode 100644 index 000000000..ddf312f1b --- /dev/null +++ b/util/appmacro/TupleNBuilder.scala @@ -0,0 +1,51 @@ +package sbt +package appmacro + + import Types.Id + import scala.tools.nsc.Global + import scala.reflect._ + import makro._ + +/** A builder that uses a TupleN as the tuple representation. +* It is limited to tuples of size 2 to `MaxInputs`. */ +object TupleNBuilder extends TupleBuilder +{ + /** The largest number of inputs that this builder can handle. */ + final val MaxInputs = 11 + final val TupleMethodName = "tuple" + + def make(c: Context)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] = new BuilderResult[c.type] + { + val util = ContextUtil[c.type](c) + import c.universe.{Apply=>ApplyTree,_} + import util._ + + val global: Global = c.universe.asInstanceOf[Global] + val mTC: Type = mt.asInstanceOf[c.universe.Type] + + val ctx: c.type = c + val representationC: PolyType = { + val tcVariable: Symbol = newTCVariable(NoSymbol) + val tupleTypeArgs = inputs.map(in => typeRef(NoPrefix, tcVariable, in.tpe :: Nil).asInstanceOf[global.Type]) + val tuple = global.definitions.tupleType(tupleTypeArgs) + PolyType(tcVariable :: Nil, tuple.asInstanceOf[Type] ) + } + val resultType = appliedType(representationC, idTC :: Nil) + + val input: Tree = mkTuple(inputs.map(_.expr)) + val alistInstance: Tree = { + val select = Select(Ident(alist), TupleMethodName + inputs.size.toString) + TypeApply(select, inputs.map(in => typeTree(in.tpe))) + } + def extract(param: ValDef): List[ValDef] = bindTuple(param, Nil, inputs.map(_.local), 1) + + def bindTuple(param: ValDef, revBindings: List[ValDef], params: List[ValDef], i: Int): List[ValDef] = + params match + { + case ValDef(mods, name, tpt, _) :: xs => + val x = ValDef(mods, name, tpt, Select(Ident(param.name), "_" + i.toString)) + bindTuple(param, x :: revBindings, xs, i+1) + case Nil => revBindings.reverse + } + } +} diff --git a/util/collection/AList.scala b/util/collection/AList.scala index 212a58411..6e5946318 100644 --- a/util/collection/AList.scala +++ b/util/collection/AList.scala @@ -3,8 +3,9 @@ package sbt import Classes.Applicative import Types._ -/** An abstraction over (* -> *) -> * with the purpose of abstracting over arity abstractions like KList and TupleN -* as well as homogeneous sequences Seq[M[T]]. */ +/** An abstraction over a higher-order type constructor `K[x[y]]` with the purpose of abstracting +* over heterogeneous sequences like `KList` and `TupleN` with elements with a common type +* constructor as well as homogeneous sequences `Seq[M[T]]`. */ trait AList[K[L[x]] ] { def transform[M[_], N[_]](value: K[M], f: M ~> N): K[N] @@ -18,6 +19,7 @@ trait AList[K[L[x]] ] object AList { type Empty = AList[({ type l[L[x]] = Unit})#l] + /** AList for Unit, which represents a sequence that is always empty.*/ val empty: Empty = new Empty { def transform[M[_], N[_]](in: Unit, f: M ~> N) = () def foldr[M[_], T](in: Unit, f: (M[_], T) => T, init: T) = init @@ -26,6 +28,7 @@ object AList } type SeqList[T] = AList[({ type l[L[x]] = List[L[T]] })#l] + /** AList for a homogeneous sequence. */ def seq[T]: SeqList[T] = new SeqList[T] { def transform[M[_], N[_]](s: List[M[T]], f: M ~> N) = s.map(f.fn[T]) @@ -44,6 +47,7 @@ object AList def traverse[M[_], N[_], P[_]](s: List[M[T]], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[List[P[T]]] = ??? } + /** AList for the abitrary arity data structure KList. */ def klist[KL[M[_]] <: KList[M] { type Transform[N[_]] = KL[N] }]: AList[KL] = new AList[KL] { def transform[M[_], N[_]](k: KL[M], f: M ~> N) = k.transform(f) def foldr[M[_], T](k: KL[M], f: (M[_], T) => T, init: T): T = k.foldr(f, init) @@ -51,6 +55,7 @@ object AList def traverse[M[_], N[_], P[_]](k: KL[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[KL[P]] = k.traverse[N,P](f)(np) } + /** AList for a single value. */ type Single[A] = AList[({ type l[L[x]] = L[A]})#l] def single[A]: Single[A] = new Single[A] { def transform[M[_], N[_]](a: M[A], f: M ~> N) = f(a) @@ -58,8 +63,8 @@ object AList def traverse[M[_], N[_], P[_]](a: M[A], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[P[A]] = f(a) } - type ASplit[K[L[x]], B[x]] = AList[ ({ type l[L[x]] = K[ (L ∙ B)#l] })#l ] + /** AList that operates on the outer type constructor `A` of a composition `[x] A[B[x]]` for type constructors `A` and `B`*/ def asplit[ K[L[x]], B[x] ](base: AList[K]): ASplit[K,B] = new ASplit[K, B] { type Split[ L[x] ] = K[ (L ∙ B)#l ] diff --git a/util/collection/KList.scala b/util/collection/KList.scala index 70e3852f9..7ecc6ba6a 100644 --- a/util/collection/KList.scala +++ b/util/collection/KList.scala @@ -11,9 +11,16 @@ sealed trait KList[+M[_]] /** Apply the natural transformation `f` to each element. */ def transform[N[_]](f: M ~> N): Transform[N] + /** Folds this list using a function that operates on the homogeneous type of the elements of this list. */ def foldr[T](f: (M[_], T) => T, init: T): T = init // had trouble defining it in KNil + + /** Applies `f` to the elements of this list in the applicative functor defined by `ap`. */ def apply[N[x] >: M[x], Z](f: Transform[Id] => Z)(implicit ap: Applicative[N]): N[Z] + + /** Equivalent to `transform(f) . apply(x => x)`, this is the essence of the iterator at the level of natural transformations.*/ def traverse[N[_], P[_]](f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[Transform[P]] + + /** Discards the heterogeneous type information and constructs a plain List from this KList's elements. */ def toList: List[M[_]] } final case class KCons[H, +T <: KList[M], +M[_]](head: M[H], tail: T) extends KList[M] diff --git a/util/collection/Settings.scala b/util/collection/Settings.scala index 981096c0b..783956f2a 100644 --- a/util/collection/Settings.scala +++ b/util/collection/Settings.scala @@ -59,7 +59,9 @@ trait Init[Scope] type MapConstant = ScopedKey ~> Option def setting[T](key: ScopedKey[T], init: Initialize[T], pos: SourcePosition = NoPosition): Setting[T] = new Setting[T](key, init, pos) - def value[T](value: => T): Initialize[T] = new Value(value _) + def valueStrict[T](value: T): Initialize[T] = pure(() => value) + def value[T](value: => T): Initialize[T] = pure(value _) + def pure[T](value: () => T): Initialize[T] = new Value(value) def optional[T,U](i: Initialize[T])(f: Option[T] => U): Initialize[U] = new Optional(Some(i), f) def update[T](key: ScopedKey[T])(f: T => T): Setting[T] = new Setting[T](key, map(key)(f), NoPosition) def bind[S,T](in: Initialize[S])(f: S => Initialize[T]): Initialize[T] = new Bind(f, in)