mirror of https://github.com/sbt/sbt.git
Merge pull request #7522 from adpi2/sbt2-fix-scripted
[2.x] fix scripted
This commit is contained in:
commit
a5295ce80e
|
|
@ -398,27 +398,18 @@ object Index {
|
|||
def triggers(ss: Settings[Scope]): Triggers = {
|
||||
val runBefore = new TriggerMap
|
||||
val triggeredBy = new TriggerMap
|
||||
ss.data.values foreach (
|
||||
_.entries foreach {
|
||||
case AttributeEntry(_, value: Task[?]) =>
|
||||
val as = value.info.attributes
|
||||
update(runBefore, value, as.get(Def.runBefore.asInstanceOf))
|
||||
update(triggeredBy, value, as.get(Def.triggeredBy.asInstanceOf))
|
||||
case _ => ()
|
||||
}
|
||||
)
|
||||
val onComplete = (GlobalScope / Def.onComplete) get ss getOrElse (() => ())
|
||||
for
|
||||
a <- ss.data.values
|
||||
case AttributeEntry(_, base: Task[?]) <- a.entries
|
||||
do
|
||||
def update(map: TriggerMap, key: AttributeKey[Seq[Task[?]]]): Unit =
|
||||
base.info.attributes.get(key).getOrElse(Seq.empty).foreach { task =>
|
||||
map(task) = base +: map.getOrElse(task, Nil)
|
||||
}
|
||||
update(runBefore, Def.runBefore)
|
||||
update(triggeredBy, Def.triggeredBy)
|
||||
val onComplete = (GlobalScope / Def.onComplete).get(ss).getOrElse(() => ())
|
||||
new Triggers(runBefore, triggeredBy, map => { onComplete(); map })
|
||||
}
|
||||
|
||||
private[this] def update(
|
||||
map: TriggerMap,
|
||||
base: Task[?],
|
||||
tasksOpt: Option[Seq[Task[?]]]
|
||||
): Unit =
|
||||
for {
|
||||
tasks <- tasksOpt
|
||||
task <- tasks
|
||||
}
|
||||
map(task) = base +: map.getOrElse(task, Nil)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -229,7 +229,7 @@ object Def extends Init[Scope] with TaskMacroExtra with InitializeImplicits:
|
|||
|
||||
import language.experimental.macros
|
||||
|
||||
// These are here, as opposed to RemoteCahe, since we need them from TaskMacro etc
|
||||
// These are here, as opposed to RemoteCache, since we need them from TaskMacro etc
|
||||
private[sbt] var _cacheStore: ActionCacheStore = InMemoryActionCacheStore()
|
||||
def cacheStore: ActionCacheStore = _cacheStore
|
||||
private[sbt] var _outputDirectory: Option[Path] = None
|
||||
|
|
@ -319,7 +319,7 @@ object Def extends Init[Scope] with TaskMacroExtra with InitializeImplicits:
|
|||
inline def value: A1 = InputWrapper.`wrapInitTask_\u2603\u2603`[A1](in)
|
||||
|
||||
/**
|
||||
* This treats the `Initailize[Task[A]]` as a setting that returns the Task value,
|
||||
* This treats the `Initialize[Task[A]]` as a setting that returns the Task value,
|
||||
* instead of evaluating the task.
|
||||
*/
|
||||
inline def taskValue: Task[A1] = InputWrapper.`wrapInit_\u2603\u2603`[Task[A1]](in)
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ sealed abstract class SettingKey[A1]
|
|||
final inline def :=(inline v: A1): Setting[A1] =
|
||||
${ TaskMacro.settingAssignMacroImpl('this, 'v) }
|
||||
|
||||
final inline def +=[A2](inline v: A2)(using Append.Value[A1, A2]): Setting[A1] =
|
||||
final inline def +=[A2](inline v: A2)(using inline ev: Append.Value[A1, A2]): Setting[A1] =
|
||||
${ TaskMacro.settingAppend1Impl[A1, A2]('this, 'v) }
|
||||
|
||||
final inline def append1[A2](v: Initialize[A2])(using
|
||||
|
|
|
|||
|
|
@ -25,13 +25,6 @@ object InitializeInstance:
|
|||
Def.flatMap[A1, A2](fa)(f)
|
||||
end InitializeInstance
|
||||
|
||||
private[std] object ComposeInstance:
|
||||
import InitializeInstance.initializeMonad
|
||||
val InitInstance = summon[Applicative[Initialize]]
|
||||
val F1F2: Applicative[[a] =>> Initialize[Task[a]]] =
|
||||
summon[Applicative[[a] =>> Initialize[Task[a]]]]
|
||||
end ComposeInstance
|
||||
|
||||
object ParserInstance:
|
||||
type F1[x] = State => Parser[x]
|
||||
// import sbt.internal.util.Classes.Applicative
|
||||
|
|
@ -59,11 +52,17 @@ object FullInstance:
|
|||
KeyRanks.DTask
|
||||
)
|
||||
|
||||
given Monad[Initialize] = InitializeInstance.initializeMonad
|
||||
val F1F2: Applicative[[a] =>> Initialize[Task[a]]] = ComposeInstance.F1F2
|
||||
val F1F2: Applicative[[a] =>> Initialize[Task[a]]] = Applicative.given_Applicative_F1(using
|
||||
InitializeInstance.initializeMonad,
|
||||
Task.taskMonad
|
||||
)
|
||||
given initializeTaskMonad: Monad[[a] =>> Initialize[Task[a]]] with
|
||||
type F[x] = Initialize[Task[x]]
|
||||
override def pure[A1](x: () => A1): Initialize[Task[A1]] = F1F2.pure(x)
|
||||
|
||||
override def map[A1, A2](fa: Initialize[Task[A1]])(f: A1 => A2): Initialize[Task[A2]] =
|
||||
F1F2.map(fa)(f)
|
||||
|
||||
override def ap[A1, A2](ff: Initialize[Task[A1 => A2]])(
|
||||
fa: Initialize[Task[A1]]
|
||||
): Initialize[Task[A2]] =
|
||||
|
|
|
|||
|
|
@ -12,70 +12,54 @@ import java.io.File
|
|||
import scala.quoted.*
|
||||
import scala.reflect.ClassTag
|
||||
|
||||
import sbt.util.OptJsonWriter
|
||||
import sbt.util.{ NoJsonWriter, OptJsonWriter }
|
||||
import sbt.internal.util.{ AttributeKey, KeyTag }
|
||||
|
||||
private[sbt] object KeyMacro:
|
||||
def settingKeyImpl[A1: Type](
|
||||
description: Expr[String]
|
||||
)(using qctx: Quotes): Expr[SettingKey[A1]] =
|
||||
keyImpl2[A1, SettingKey[A1]]("settingKey") { (name, mf, ojw) =>
|
||||
val n = Expr(name)
|
||||
'{
|
||||
SettingKey[A1]($n, $description)($mf, $ojw)
|
||||
}
|
||||
}
|
||||
def settingKeyImpl[A1: Type](description: Expr[String])(using Quotes): Expr[SettingKey[A1]] =
|
||||
val name = definingValName(errorMsg("settingKey"))
|
||||
val tag = '{ KeyTag.Setting[A1](${ summonRuntimeClass[A1] }) }
|
||||
val ojw = Expr
|
||||
.summon[OptJsonWriter[A1]]
|
||||
.getOrElse(errorAndAbort(s"OptJsonWriter[A] not found for ${Type.show[A1]}"))
|
||||
'{ SettingKey(AttributeKey($name, $description, Int.MaxValue)(using $tag, $ojw)) }
|
||||
|
||||
def taskKeyImpl[A1: Type](description: Expr[String])(using qctx: Quotes): Expr[TaskKey[A1]] =
|
||||
keyImpl[A1, TaskKey[A1]]("taskKey") { (name, mf) =>
|
||||
val n = Expr(name)
|
||||
'{
|
||||
TaskKey[A1]($n, $description)($mf)
|
||||
}
|
||||
}
|
||||
def taskKeyImpl[A1: Type](description: Expr[String])(using Quotes): Expr[TaskKey[A1]] =
|
||||
val name = definingValName(errorMsg("taskKey"))
|
||||
val tag: Expr[KeyTag[Task[A1]]] = Type.of[A1] match
|
||||
case '[Seq[a]] =>
|
||||
'{ KeyTag.SeqTask(${ summonRuntimeClass[a] }) }
|
||||
case _ => '{ KeyTag.Task(${ summonRuntimeClass[A1] }) }
|
||||
'{ TaskKey(AttributeKey($name, $description, Int.MaxValue)(using $tag, NoJsonWriter())) }
|
||||
|
||||
def inputKeyImpl[A1: Type](description: Expr[String])(using qctx: Quotes): Expr[InputKey[A1]] =
|
||||
keyImpl[A1, InputKey[A1]]("inputKey") { (name, mf) =>
|
||||
val n = Expr(name)
|
||||
'{
|
||||
InputKey[A1]($n, $description)($mf)
|
||||
}
|
||||
}
|
||||
def inputKeyImpl[A1: Type](description: Expr[String])(using Quotes): Expr[InputKey[A1]] =
|
||||
val name = definingValName(errorMsg("inputTaskKey"))
|
||||
val tag: Expr[KeyTag[InputTask[A1]]] = '{ KeyTag.InputTask(${ summonRuntimeClass[A1] }) }
|
||||
'{ InputKey(AttributeKey($name, $description, Int.MaxValue)(using $tag, NoJsonWriter())) }
|
||||
|
||||
private def keyImpl[A1: Type, A2: Type](methodName: String)(
|
||||
f: (String, Expr[ClassTag[A1]]) => Expr[A2]
|
||||
)(using qctx: Quotes): Expr[A2] =
|
||||
val tpe = summon[Type[A1]]
|
||||
f(
|
||||
definingValName(errorMsg(methodName)),
|
||||
Expr.summon[ClassTag[A1]].getOrElse(sys.error("ClassTag[A] not found for $tpe"))
|
||||
)
|
||||
def projectImpl(using Quotes): Expr[Project] =
|
||||
val name = definingValName(errorMsg2)
|
||||
'{ Project($name, new File($name)) }
|
||||
|
||||
private def keyImpl2[A1: Type, A2: Type](methodName: String)(
|
||||
f: (String, Expr[ClassTag[A1]], Expr[OptJsonWriter[A1]]) => Expr[A2]
|
||||
)(using qctx: Quotes): Expr[A2] =
|
||||
val tpe = summon[Type[A1]]
|
||||
f(
|
||||
definingValName(errorMsg(methodName)),
|
||||
Expr.summon[ClassTag[A1]].getOrElse(sys.error("ClassTag[A] not found for $tpe")),
|
||||
Expr.summon[OptJsonWriter[A1]].getOrElse(sys.error("OptJsonWriter[A] not found for $tpe")),
|
||||
)
|
||||
private def summonRuntimeClass[A: Type](using Quotes): Expr[Class[?]] =
|
||||
val classTag = Expr
|
||||
.summon[ClassTag[A]]
|
||||
.getOrElse(errorAndAbort(s"ClassTag[${Type.show[A]}] not found"))
|
||||
'{ $classTag.runtimeClass }
|
||||
|
||||
def projectImpl(using qctx: Quotes): Expr[Project] =
|
||||
val name = Expr(definingValName(errorMsg2("project")))
|
||||
'{
|
||||
Project($name, new File($name))
|
||||
}
|
||||
private def errorAndAbort(msg: String)(using q: Quotes): Nothing =
|
||||
q.reflect.report.errorAndAbort(msg)
|
||||
|
||||
private def errorMsg(methodName: String): String =
|
||||
s"""$methodName must be directly assigned to a val, such as `val x = $methodName[Int]("description")`."""
|
||||
|
||||
private def errorMsg2(methodName: String): String =
|
||||
s"""$methodName must be directly assigned to a val, such as `val x = ($methodName in file("core"))`."""
|
||||
private def errorMsg2: String =
|
||||
"""project must be directly assigned to a val, such as `val x = project.in(file("core"))`."""
|
||||
|
||||
private def definingValName(errorMsg: String)(using qctx: Quotes): String =
|
||||
private def definingValName(errorMsg: String)(using qctx: Quotes): Expr[String] =
|
||||
val term = enclosingTerm
|
||||
if term.isValDef then term.name
|
||||
else sys.error(errorMsg)
|
||||
if term.isValDef then Expr(term.name)
|
||||
else errorAndAbort(errorMsg)
|
||||
|
||||
def enclosingTerm(using qctx: Quotes) =
|
||||
import qctx.reflect._
|
||||
|
|
|
|||
|
|
@ -93,7 +93,7 @@ object TaskMacro:
|
|||
'{
|
||||
InputWrapper.`wrapInitTask_\u2603\u2603`[Option[A1]](Previous.runtime[A1]($t)($ev))
|
||||
}
|
||||
case _ => report.errorAndAbort(s"JsonFormat[${Type.of[A1]}] missing")
|
||||
case _ => report.errorAndAbort(s"JsonFormat[${Type.show[A1]}] missing")
|
||||
|
||||
/** Implementation of := macro for settings. */
|
||||
def settingAssignMacroImpl[A1: Type](rec: Expr[Scoped.DefinableSetting[A1]], v: Expr[A1])(using
|
||||
|
|
@ -147,7 +147,7 @@ object TaskMacro:
|
|||
$rec.+=($v2.taskValue)(using $ev)
|
||||
}
|
||||
case _ =>
|
||||
report.errorAndAbort(s"Append.Value[${Type.of[A1]}, ${Type.of[Task[a]]}] missing")
|
||||
report.errorAndAbort(s"Append.Value[${Type.show[A1]}, ${Type.show[Task[a]]}] missing")
|
||||
case _ =>
|
||||
Expr.summon[Append.Value[A1, A2]] match
|
||||
case Some(ev) =>
|
||||
|
|
@ -155,7 +155,8 @@ object TaskMacro:
|
|||
'{
|
||||
$rec.append1[A2]($init)(using $ev)
|
||||
}
|
||||
case _ => report.errorAndAbort(s"Append.Value[${Type.of[A1]}, ${Type.of[A2]}] missing")
|
||||
case _ =>
|
||||
report.errorAndAbort(s"Append.Value[${Type.show[A1]}, ${Type.show[A2]}] missing")
|
||||
|
||||
/*
|
||||
private[this] def transformMacroImpl[A](using qctx: Quotes)(init: Expr[A])(
|
||||
|
|
|
|||
|
|
@ -81,41 +81,27 @@ public final class MetaBuildLoader extends URLClassLoader {
|
|||
jnaJars);
|
||||
final Pattern pattern = Pattern.compile(fullPattern);
|
||||
final File[] cp = appProvider.mainClasspath();
|
||||
final URL[] interfaceURLs = new URL[3];
|
||||
final URL[] jlineURLs = new URL[7];
|
||||
final Set<File> interfaceFiles = new LinkedHashSet<>();
|
||||
final Set<File> jlineFiles = new LinkedHashSet<>();
|
||||
final File[] extra =
|
||||
appProvider.id().classpathExtra() == null ? new File[0] : appProvider.id().classpathExtra();
|
||||
final Set<File> bottomClasspath = new LinkedHashSet<>();
|
||||
|
||||
{
|
||||
int interfaceIndex = 0;
|
||||
int jlineIndex = 0;
|
||||
for (final File file : cp) {
|
||||
final String name = file.getName();
|
||||
if ((name.contains("test-interface")
|
||||
|| name.contains("compiler-interface")
|
||||
|| name.contains("util-interface"))
|
||||
&& pattern.matcher(name).find()) {
|
||||
interfaceURLs[interfaceIndex] = file.toURI().toURL();
|
||||
interfaceIndex += 1;
|
||||
} else if (pattern.matcher(name).find()) {
|
||||
jlineURLs[jlineIndex] = file.toURI().toURL();
|
||||
jlineIndex += 1;
|
||||
} else {
|
||||
bottomClasspath.add(file);
|
||||
}
|
||||
}
|
||||
for (final File file : extra) {
|
||||
for (final File file : cp) {
|
||||
final String name = file.getName();
|
||||
if ((name.contains("test-interface")
|
||||
|| name.contains("compiler-interface")
|
||||
|| name.contains("util-interface"))
|
||||
&& pattern.matcher(name).find()) {
|
||||
interfaceFiles.add(file);
|
||||
} else if (pattern.matcher(name).find()) {
|
||||
jlineFiles.add(file);
|
||||
} else {
|
||||
bottomClasspath.add(file);
|
||||
}
|
||||
}
|
||||
final URL[] rest = new URL[bottomClasspath.size()];
|
||||
{
|
||||
int i = 0;
|
||||
for (final File file : bottomClasspath) {
|
||||
rest[i] = file.toURI().toURL();
|
||||
i += 1;
|
||||
}
|
||||
for (final File file : extra) {
|
||||
bottomClasspath.add(file);
|
||||
}
|
||||
final ScalaProvider scalaProvider = appProvider.scalaProvider();
|
||||
ClassLoader topLoader = scalaProvider.launcher().topLoader();
|
||||
|
|
@ -148,8 +134,9 @@ public final class MetaBuildLoader extends URLClassLoader {
|
|||
}
|
||||
};
|
||||
|
||||
final SbtInterfaceLoader interfaceLoader = new SbtInterfaceLoader(interfaceURLs, topLoader);
|
||||
final JLineLoader jlineLoader = new JLineLoader(jlineURLs, interfaceLoader);
|
||||
final SbtInterfaceLoader interfaceLoader =
|
||||
new SbtInterfaceLoader(toURLArray(interfaceFiles), topLoader);
|
||||
final JLineLoader jlineLoader = new JLineLoader(toURLArray(jlineFiles), interfaceLoader);
|
||||
final File[] siJars = scalaProvider.jars();
|
||||
final URL[] lib = new URL[1];
|
||||
int scalaRestCount = siJars.length - 1;
|
||||
|
|
@ -175,6 +162,17 @@ public final class MetaBuildLoader extends URLClassLoader {
|
|||
assert lib[0] != null : "no scala-library.jar";
|
||||
final ScalaLibraryClassLoader libraryLoader = new ScalaLibraryClassLoader(lib, jlineLoader);
|
||||
final FullScalaLoader fullScalaLoader = new FullScalaLoader(scalaRest, libraryLoader);
|
||||
return new MetaBuildLoader(rest, fullScalaLoader, libraryLoader, interfaceLoader, jlineLoader);
|
||||
return new MetaBuildLoader(
|
||||
toURLArray(bottomClasspath), fullScalaLoader, libraryLoader, interfaceLoader, jlineLoader);
|
||||
}
|
||||
|
||||
private static URL[] toURLArray(Set<File> files) throws java.net.MalformedURLException {
|
||||
URL[] urls = new URL[files.size()];
|
||||
int i = 0;
|
||||
for (final File file : files) {
|
||||
urls[i] = file.toURI().toURL();
|
||||
i += 1;
|
||||
}
|
||||
return urls;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,10 +8,14 @@
|
|||
package sbt.internal;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.nio.file.Paths;
|
||||
import xsbti.*;
|
||||
|
||||
/**
|
||||
|
|
@ -25,6 +29,10 @@ public class XMainConfiguration {
|
|||
public xsbti.MainResult run(String moduleName, xsbti.AppConfiguration configuration)
|
||||
throws Throwable {
|
||||
try {
|
||||
boolean isScripted = Boolean.parseBoolean(System.getProperty("sbt.scripted"));
|
||||
// in batch scripted tests, we disable caching of JAR URL connections to avoid interference
|
||||
// between tests
|
||||
if (isScripted) disableCachingOfURLConnections();
|
||||
ClassLoader topLoader = configuration.provider().scalaProvider().launcher().topLoader();
|
||||
xsbti.AppConfiguration updatedConfiguration = null;
|
||||
try {
|
||||
|
|
@ -56,7 +64,7 @@ public class XMainConfiguration {
|
|||
clw.getMethod("warmup").invoke(clw.getField("MODULE$").get(null));
|
||||
return (xsbti.MainResult) runMethod.invoke(instance, updatedConfiguration);
|
||||
} catch (InvocationTargetException e) {
|
||||
// This propogates xsbti.FullReload to the launcher
|
||||
// This propagates xsbti.FullReload to the launcher
|
||||
throw e.getCause();
|
||||
}
|
||||
} catch (ReflectiveOperationException e) {
|
||||
|
|
@ -104,6 +112,22 @@ public class XMainConfiguration {
|
|||
}
|
||||
}
|
||||
|
||||
private class FakeURLConnection extends URLConnection {
|
||||
public FakeURLConnection(URL url) {
|
||||
super(url);
|
||||
}
|
||||
|
||||
public void connect() throws IOException {}
|
||||
}
|
||||
|
||||
private void disableCachingOfURLConnections() {
|
||||
try {
|
||||
URLConnection conn = new FakeURLConnection(Paths.get(".").toUri().toURL());
|
||||
conn.setDefaultUseCaches(false);
|
||||
} catch (MalformedURLException e) {
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Replaces the AppProvider.loader method with a new loader that puts the sbt test interface
|
||||
* jar ahead of the rest of the sbt classpath in the classloading hierarchy.
|
||||
|
|
|
|||
|
|
@ -88,8 +88,6 @@ import sbt.util.CacheImplicits.given
|
|||
import sbt.util.InterfaceUtil.{ t2, toJavaFunction => f1 }
|
||||
import sbt.util._
|
||||
import sjsonnew._
|
||||
import xsbti.compile.TastyFiles
|
||||
import xsbti.{ FileConverter, Position }
|
||||
|
||||
import scala.annotation.nowarn
|
||||
import scala.collection.immutable.ListMap
|
||||
|
|
@ -102,12 +100,22 @@ import sbt.SlashSyntax0._
|
|||
import sbt.internal.inc.{
|
||||
Analysis,
|
||||
AnalyzingCompiler,
|
||||
FileAnalysisStore,
|
||||
ManagedLoggedReporter,
|
||||
MixedAnalyzingCompiler,
|
||||
ScalaInstance
|
||||
}
|
||||
import sbt.internal.io.Retry
|
||||
import xsbti.{ CompileFailed, CrossValue, HashedVirtualFileRef, VirtualFile, VirtualFileRef }
|
||||
import xsbti.{
|
||||
AppConfiguration,
|
||||
CompileFailed,
|
||||
CrossValue,
|
||||
FileConverter,
|
||||
HashedVirtualFileRef,
|
||||
Position,
|
||||
VirtualFile,
|
||||
VirtualFileRef
|
||||
}
|
||||
import xsbti.compile.{
|
||||
AnalysisContents,
|
||||
AnalysisStore,
|
||||
|
|
@ -128,6 +136,7 @@ import xsbti.compile.{
|
|||
PerClasspathEntryLookup,
|
||||
PreviousResult,
|
||||
Setup,
|
||||
TastyFiles,
|
||||
TransactionalManagerType
|
||||
}
|
||||
|
||||
|
|
@ -143,22 +152,17 @@ object Defaults extends BuildCommon {
|
|||
|
||||
def lock(app: xsbti.AppConfiguration): xsbti.GlobalLock = LibraryManagement.lock(app)
|
||||
|
||||
def extractAnalysis[A1](a: Attributed[A1]): (A1, CompileAnalysis) =
|
||||
(
|
||||
a.data,
|
||||
a.metadata.get(Keys.analysis) match
|
||||
case Some(ref) => RemoteCache.getCachedAnalysis(ref)
|
||||
case None => Analysis.Empty
|
||||
)
|
||||
|
||||
def analysisMap[T](cp: Seq[Attributed[T]]): T => Option[CompileAnalysis] = {
|
||||
val m = (for {
|
||||
a <- cp
|
||||
ref <- a.metadata.get(Keys.analysis)
|
||||
an = RemoteCache.getCachedAnalysis(ref)
|
||||
} yield (a.data, an)).toMap
|
||||
m.get(_)
|
||||
}
|
||||
private[sbt] def extractAnalysis(
|
||||
metadata: StringAttributeMap,
|
||||
converter: FileConverter
|
||||
): Option[CompileAnalysis] =
|
||||
def asBinary(file: File) = FileAnalysisStore.binary(file).get.asScala
|
||||
def asText(file: File) = FileAnalysisStore.text(file).get.asScala
|
||||
for
|
||||
ref <- metadata.get(Keys.analysis)
|
||||
file = converter.toPath(VirtualFileRef.of(ref)).toFile
|
||||
content <- asBinary(file).orElse(asText(file))
|
||||
yield content.getAnalysis
|
||||
|
||||
private[sbt] def globalDefaults(ss: Seq[Setting[_]]): Seq[Setting[_]] =
|
||||
Def.defaultSettings(inScope(GlobalScope)(ss))
|
||||
|
|
@ -236,8 +240,28 @@ object Defaults extends BuildCommon {
|
|||
closeClassLoaders :== SysProp.closeClassLoaders,
|
||||
allowZombieClassLoaders :== true,
|
||||
packageTimestamp :== Pkg.defaultTimestamp,
|
||||
rootPaths := {
|
||||
val app = appConfiguration.value
|
||||
val coursierCache = csrCacheDirectory.value.toPath
|
||||
val out = rootOutputDirectory.value
|
||||
getRootPaths(out, app) + ("CSR_CACHE" -> coursierCache)
|
||||
},
|
||||
fileConverter := MappedFileConverter(rootPaths.value, allowMachinePath.value)
|
||||
) ++ BuildServerProtocol.globalSettings
|
||||
|
||||
private[sbt] def getRootPaths(out: NioPath, app: AppConfiguration): ListMap[String, NioPath] =
|
||||
val base = app.baseDirectory.getCanonicalFile.toPath
|
||||
val boot = app.provider.scalaProvider.launcher.bootDirectory.toPath
|
||||
val ih = app.provider.scalaProvider.launcher.ivyHome.toPath
|
||||
val javaHome = Paths.get(sys.props("java.home"))
|
||||
ListMap(
|
||||
"OUT" -> out,
|
||||
"BASE" -> base,
|
||||
"SBT_BOOT" -> boot,
|
||||
"IVY_HOME" -> ih,
|
||||
"JAVA_HOME" -> javaHome
|
||||
)
|
||||
|
||||
private[sbt] lazy val globalIvyCore: Seq[Setting[_]] =
|
||||
Seq(
|
||||
internalConfigurationMap :== Configurations.internalMap _,
|
||||
|
|
@ -286,6 +310,10 @@ object Defaults extends BuildCommon {
|
|||
csrLogger := LMCoursier.coursierLoggerTask.value,
|
||||
csrMavenProfiles :== Set.empty,
|
||||
csrReconciliations :== LMCoursier.relaxedForAllModules,
|
||||
csrCacheDirectory := {
|
||||
if (useCoursier.value) LMCoursier.defaultCacheLocation
|
||||
else Classpaths.dummyCoursierDirectory(appConfiguration.value)
|
||||
}
|
||||
)
|
||||
|
||||
/** Core non-plugin settings for sbt builds. These *must* be on every build or the sbt engine will fail to run at all. */
|
||||
|
|
@ -414,24 +442,6 @@ object Defaults extends BuildCommon {
|
|||
|
||||
private[sbt] lazy val buildLevelJvmSettings: Seq[Setting[_]] = Seq(
|
||||
exportPipelining := usePipelining.value,
|
||||
rootPaths := {
|
||||
val app = appConfiguration.value
|
||||
val base = app.baseDirectory.getCanonicalFile
|
||||
val boot = app.provider.scalaProvider.launcher.bootDirectory
|
||||
val ih = app.provider.scalaProvider.launcher.ivyHome
|
||||
val coursierCache = csrCacheDirectory.value
|
||||
val javaHome = Paths.get(sys.props("java.home"))
|
||||
val out = rootOutputDirectory.value
|
||||
ListMap(
|
||||
"OUT" -> out,
|
||||
"BASE" -> base.toPath,
|
||||
"SBT_BOOT" -> boot.toPath,
|
||||
"CSR_CACHE" -> coursierCache.toPath,
|
||||
"IVY_HOME" -> ih.toPath,
|
||||
"JAVA_HOME" -> javaHome,
|
||||
)
|
||||
},
|
||||
fileConverter := MappedFileConverter(rootPaths.value, allowMachinePath.value),
|
||||
sourcePositionMappers := Nil, // Never set a default sourcePositionMapper, see #6352! Whatever you are trying to solve, do it in the foldMappers method.
|
||||
// The virtual file value cache needs to be global or sbt will run out of direct byte buffer memory.
|
||||
classpathDefinesClassCache := VirtualFileValueCache.definesClassCache(fileConverter.value),
|
||||
|
|
@ -530,14 +540,6 @@ object Defaults extends BuildCommon {
|
|||
.getOrElse(pos)
|
||||
}
|
||||
|
||||
// csrCacheDirectory is scoped to ThisBuild to allow customization.
|
||||
private[sbt] lazy val buildLevelIvySettings: Seq[Setting[_]] = Seq(
|
||||
csrCacheDirectory := {
|
||||
if (useCoursier.value) LMCoursier.defaultCacheLocation
|
||||
else Classpaths.dummyCoursierDirectory(appConfiguration.value)
|
||||
},
|
||||
)
|
||||
|
||||
def defaultTestTasks(key: Scoped): Seq[Setting[_]] =
|
||||
inTask(key)(
|
||||
Seq(
|
||||
|
|
@ -661,7 +663,7 @@ object Defaults extends BuildCommon {
|
|||
classDirectory := target.value / (prefix(configuration.value.name) + "classes"),
|
||||
backendOutput := {
|
||||
val converter = fileConverter.value
|
||||
val dir = classDirectory.value
|
||||
val dir = target.value / (prefix(configuration.value.name) + "backend")
|
||||
converter.toVirtualFile(dir.toPath)
|
||||
},
|
||||
earlyOutput / artifactPath := configArtifactPathSetting(artifact, "early").value,
|
||||
|
|
@ -724,7 +726,8 @@ object Defaults extends BuildCommon {
|
|||
clean := clean.dependsOnTask(cleanIvy).value,
|
||||
scalaCompilerBridgeBinaryJar := Def.settingDyn {
|
||||
val sv = scalaVersion.value
|
||||
if (ScalaArtifacts.isScala3(sv)) fetchBridgeBinaryJarTask(sv)
|
||||
val managed = managedScalaInstance.value
|
||||
if (ScalaArtifacts.isScala3(sv) && managed) fetchBridgeBinaryJarTask(sv)
|
||||
else Def.task[Option[File]](None)
|
||||
}.value,
|
||||
scalaCompilerBridgeSource := ZincLmUtil.getDefaultBridgeSourceModule(scalaVersion.value),
|
||||
|
|
@ -902,21 +905,24 @@ object Defaults extends BuildCommon {
|
|||
compileOutputs := {
|
||||
import scala.jdk.CollectionConverters.*
|
||||
val c = fileConverter.value
|
||||
val classFiles =
|
||||
manipulateBytecode.value.analysis.readStamps.getAllProductStamps.keySet.asScala
|
||||
(classFiles.toSeq map { x =>
|
||||
c.toPath(x)
|
||||
}) :+ compileAnalysisFile.value.toPath
|
||||
val (_, jarFile) = compileIncremental.value
|
||||
val classFiles = compile.value.readStamps.getAllProductStamps.keySet.asScala
|
||||
classFiles.toSeq.map(c.toPath) :+ compileAnalysisFile.value.toPath :+ c.toPath(jarFile)
|
||||
},
|
||||
compileOutputs := compileOutputs.triggeredBy(compile).value,
|
||||
tastyFiles := Def.taskIf {
|
||||
if (ScalaArtifacts.isScala3(scalaVersion.value)) {
|
||||
val _ = compile.value
|
||||
val tastyFiles = classDirectory.value.**("*.tasty").get()
|
||||
val c = fileConverter.value
|
||||
val dir = c.toPath(backendOutput.value).toFile
|
||||
val tastyFiles = dir.**("*.tasty").get()
|
||||
tastyFiles.map(_.getAbsoluteFile)
|
||||
} else Nil
|
||||
}.value,
|
||||
clean := (compileOutputs / clean).value,
|
||||
clean := {
|
||||
(compileOutputs / clean).value
|
||||
(products / clean).value
|
||||
},
|
||||
earlyOutputPing := Def.promise[Boolean],
|
||||
compileProgress := {
|
||||
val s = streams.value
|
||||
|
|
@ -1426,40 +1432,35 @@ object Defaults extends BuildCommon {
|
|||
Def.task {
|
||||
val cp = (test / fullClasspath).value
|
||||
val s = (test / streams).value
|
||||
val ans: Seq[Analysis] = cp
|
||||
.flatMap(_.metadata.get(Keys.analysis))
|
||||
.map: str =>
|
||||
RemoteCache.getCachedAnalysis(str).asInstanceOf[Analysis]
|
||||
val converter = fileConverter.value
|
||||
val analyses = cp
|
||||
.flatMap(a => extractAnalysis(a.metadata, converter))
|
||||
.collect { case analysis: Analysis => analysis }
|
||||
val succeeded = TestStatus.read(succeededFile(s.cacheDirectory))
|
||||
val stamps = collection.mutable.Map.empty[String, Long]
|
||||
def stamp(dep: String): Long = {
|
||||
val stamps = for (a <- ans) yield intlStamp(dep, a, Set.empty)
|
||||
if (stamps.isEmpty) Long.MinValue
|
||||
else stamps.max
|
||||
}
|
||||
def intlStamp(c: String, analysis: Analysis, s: Set[String]): Long = {
|
||||
if (s contains c) Long.MinValue
|
||||
def stamp(dep: String): Option[Long] =
|
||||
analyses.flatMap(internalStamp(dep, _, Set.empty)).maxOption
|
||||
def internalStamp(c: String, analysis: Analysis, alreadySeen: Set[String]): Option[Long] = {
|
||||
if (alreadySeen.contains(c)) None
|
||||
else
|
||||
stamps.getOrElse(
|
||||
c, {
|
||||
val x = {
|
||||
import analysis.{ apis, relations }
|
||||
relations.internalClassDeps(c).map(intlStamp(_, analysis, s + c)) ++
|
||||
relations.externalDeps(c).map(stamp) ++
|
||||
relations.productClassName.reverse(c).flatMap { pc =>
|
||||
apis.internal.get(pc).map(_.compilationTimestamp)
|
||||
} + Long.MinValue
|
||||
}.max
|
||||
if (x != Long.MinValue) {
|
||||
stamps(c) = x
|
||||
}
|
||||
x
|
||||
def computeAndStoreStamp: Option[Long] = {
|
||||
import analysis.{ apis, relations }
|
||||
val internalDeps = relations
|
||||
.internalClassDeps(c)
|
||||
.flatMap(internalStamp(_, analysis, alreadySeen + c))
|
||||
val externalDeps = relations.externalDeps(c).flatMap(stamp)
|
||||
val classStamps = relations.productClassName.reverse(c).flatMap { pc =>
|
||||
apis.internal.get(pc).map(_.compilationTimestamp)
|
||||
}
|
||||
)
|
||||
val maxStamp = (internalDeps ++ externalDeps ++ classStamps).maxOption
|
||||
maxStamp.foreach(maxStamp => stamps(c) = maxStamp)
|
||||
maxStamp
|
||||
}
|
||||
stamps.get(c).orElse(computeAndStoreStamp)
|
||||
}
|
||||
def noSuccessYet(test: String) = succeeded.get(test) match {
|
||||
case None => true
|
||||
case Some(ts) => stamps.synchronized(stamp(test)) > ts
|
||||
case Some(ts) => stamps.synchronized(stamp(test)).exists(_ > ts)
|
||||
}
|
||||
args =>
|
||||
for (filter <- selectedFilter(args))
|
||||
|
|
@ -2400,7 +2401,7 @@ object Defaults extends BuildCommon {
|
|||
val _ = compileIncremental.value
|
||||
val exportP = exportPipelining.value
|
||||
// Save analysis midway if pipelining is enabled
|
||||
val store = analysisStore
|
||||
val store = analysisStore(compileAnalysisFile)
|
||||
val contents = store.unsafeGet()
|
||||
if (exportP) {
|
||||
// this stores the eary analysis (again) in case the subproject contains a macro
|
||||
|
|
@ -2425,7 +2426,7 @@ object Defaults extends BuildCommon {
|
|||
.debug(s"${name.value}: compileEarly: blocking on earlyOutputPing")
|
||||
earlyOutputPing.await.value
|
||||
}) {
|
||||
val store = earlyAnalysisStore
|
||||
val store = analysisStore(earlyCompileAnalysisFile)
|
||||
store.get.toOption match {
|
||||
case Some(contents) => contents.getAnalysis
|
||||
case _ => Analysis.empty
|
||||
|
|
@ -2437,7 +2438,7 @@ object Defaults extends BuildCommon {
|
|||
|
||||
def compileTask: Initialize[Task[CompileAnalysis]] = Def.task {
|
||||
val setup: Setup = compileIncSetup.value
|
||||
val store = analysisStore
|
||||
val store = analysisStore(compileAnalysisFile)
|
||||
val c = fileConverter.value
|
||||
// TODO - expose bytecode manipulation phase.
|
||||
val analysisResult: CompileResult = manipulateBytecode.value
|
||||
|
|
@ -2460,10 +2461,16 @@ object Defaults extends BuildCommon {
|
|||
val bspTask = (compile / bspCompileTask).value
|
||||
val result = cachedCompileIncrementalTask.result.value
|
||||
val reporter = (compile / bspReporter).value
|
||||
val store = analysisStore
|
||||
val store = analysisStore(compileAnalysisFile)
|
||||
val ci = (compile / compileInputs).value
|
||||
val c = fileConverter.value
|
||||
val dir = c.toPath(backendOutput.value).toFile
|
||||
result match
|
||||
case Result.Value(res) =>
|
||||
val rawJarPath = c.toPath(res._2)
|
||||
IO.delete(dir)
|
||||
IO.unzip(rawJarPath.toFile, dir)
|
||||
IO.delete(dir / "META-INF" / "MANIFEST.MF")
|
||||
val analysis = store.unsafeGet().getAnalysis()
|
||||
reporter.sendSuccessReport(analysis)
|
||||
bspTask.notifySuccess(analysis)
|
||||
|
|
@ -2493,7 +2500,7 @@ object Defaults extends BuildCommon {
|
|||
val ci2 = (compile / compileInputs2).value
|
||||
val ping = (TaskZero / earlyOutputPing).value
|
||||
val setup: Setup = (TaskZero / compileIncSetup).value
|
||||
val store = analysisStore
|
||||
val store = analysisStore(compileAnalysisFile)
|
||||
val c = fileConverter.value
|
||||
// TODO - Should readAnalysis + saveAnalysis be scoped by the compile task too?
|
||||
val analysisResult = Retry(compileIncrementalTaskImpl(bspTask, s, ci, ping))
|
||||
|
|
@ -2501,9 +2508,7 @@ object Defaults extends BuildCommon {
|
|||
val contents = AnalysisContents.create(analysisResult.analysis(), analysisResult.setup())
|
||||
store.set(contents)
|
||||
Def.declareOutput(analysisOut)
|
||||
val dir = classDirectory.value
|
||||
if (dir / "META-INF" / "MANIFEST.MF").exists then IO.delete(dir / "META-INF" / "MANIFEST.MF")
|
||||
// inline mappings
|
||||
val dir = ci.options.classesDirectory.toFile()
|
||||
val mappings = Path
|
||||
.allSubpaths(dir)
|
||||
.filter(_._1.isFile())
|
||||
|
|
@ -2574,17 +2579,22 @@ object Defaults extends BuildCommon {
|
|||
|
||||
def compileIncSetupTask = Def.task {
|
||||
val cp = dependencyPicklePath.value
|
||||
val converter = fileConverter.value
|
||||
val cachedAnalysisMap: Map[VirtualFile, CompileAnalysis] = (
|
||||
for
|
||||
attributed <- cp
|
||||
analysis <- extractAnalysis(attributed.metadata, converter)
|
||||
yield (converter.toVirtualFile(attributed.data), analysis)
|
||||
).toMap
|
||||
val cachedPerEntryDefinesClassLookup: VirtualFile => DefinesClass =
|
||||
Keys.classpathEntryDefinesClassVF.value
|
||||
val lookup = new PerClasspathEntryLookup:
|
||||
private val cachedAnalysisMap: VirtualFile => Option[CompileAnalysis] =
|
||||
analysisMap(cp)
|
||||
private val cachedPerEntryDefinesClassLookup: VirtualFile => DefinesClass =
|
||||
Keys.classpathEntryDefinesClassVF.value
|
||||
override def analysis(classpathEntry: VirtualFile): Optional[CompileAnalysis] =
|
||||
cachedAnalysisMap(classpathEntry).toOptional
|
||||
cachedAnalysisMap.get(classpathEntry).toOptional
|
||||
override def definesClass(classpathEntry: VirtualFile): DefinesClass =
|
||||
cachedPerEntryDefinesClassLookup(classpathEntry)
|
||||
val extra = extraIncOptions.value.map(t2)
|
||||
val store = earlyAnalysisStore
|
||||
val store = analysisStore(earlyCompileAnalysisFile)
|
||||
val eaOpt = if exportPipelining.value then Some(store) else None
|
||||
Setup.of(
|
||||
lookup,
|
||||
|
|
@ -2689,7 +2699,7 @@ object Defaults extends BuildCommon {
|
|||
def compileAnalysisSettings: Seq[Setting[_]] = Seq(
|
||||
previousCompile := {
|
||||
val setup = compileIncSetup.value
|
||||
val store = analysisStore
|
||||
val store = analysisStore(compileAnalysisFile)
|
||||
val prev = store.get().toOption match {
|
||||
case Some(contents) =>
|
||||
val analysis = Option(contents.getAnalysis).toOptional
|
||||
|
|
@ -2701,17 +2711,11 @@ object Defaults extends BuildCommon {
|
|||
}
|
||||
)
|
||||
|
||||
private inline def analysisStore: AnalysisStore = {
|
||||
val setup = compileIncSetup.value
|
||||
val useBinary = enableBinaryCompileAnalysis.value
|
||||
MixedAnalyzingCompiler.staticCachedStore(setup.cacheFile.toPath, !useBinary)
|
||||
}
|
||||
|
||||
private inline def earlyAnalysisStore: AnalysisStore = {
|
||||
val earlyAnalysisPath = earlyCompileAnalysisFile.value.toPath
|
||||
val useBinary = enableBinaryCompileAnalysis.value
|
||||
MixedAnalyzingCompiler.staticCachedStore(earlyAnalysisPath, !useBinary)
|
||||
}
|
||||
private inline def analysisStore(inline analysisFile: TaskKey[File]): AnalysisStore =
|
||||
MixedAnalyzingCompiler.staticCachedStore(
|
||||
analysisFile.value.toPath,
|
||||
!enableBinaryCompileAnalysis.value
|
||||
)
|
||||
|
||||
def printWarningsTask: Initialize[Task[Unit]] =
|
||||
Def.task {
|
||||
|
|
@ -4236,17 +4240,20 @@ object Classpaths {
|
|||
new RawRepository(resolver, resolver.getName)
|
||||
}
|
||||
|
||||
def analyzed[T](data: T, analysis: CompileAnalysis) = ClasspathImpl.analyzed[T](data, analysis)
|
||||
def makeProducts: Initialize[Task[Seq[File]]] = Def.task {
|
||||
val c = fileConverter.value
|
||||
Def.unit(copyResources.value)
|
||||
Def.unit(compile.value)
|
||||
val dir = c.toPath(backendOutput.value)
|
||||
val resources = copyResources.value.map(_._2).toSet
|
||||
val dir = classDirectory.value
|
||||
val rawJar = compileIncremental.value._2
|
||||
val rawJarPath = c.toPath(rawJar)
|
||||
IO.unzip(rawJarPath.toFile, dir.toFile)
|
||||
IO.delete(dir.toFile / "META-INF" / "MANIFEST.MF")
|
||||
dir.toFile :: Nil
|
||||
// delete outdated files
|
||||
Path
|
||||
.allSubpaths(dir)
|
||||
.collect { case (f, _) if f.isFile() && !resources.contains(f) => f }
|
||||
.foreach(IO.delete)
|
||||
IO.unzip(rawJarPath.toFile, dir)
|
||||
IO.delete(dir / "META-INF" / "MANIFEST.MF")
|
||||
dir :: Nil
|
||||
}
|
||||
|
||||
private[sbt] def makePickleProducts: Initialize[Task[Seq[VirtualFile]]] = Def.task {
|
||||
|
|
@ -4815,8 +4822,8 @@ trait BuildExtra extends BuildCommon with DefExtra {
|
|||
baseArguments: String*
|
||||
): Vector[Setting[_]] = {
|
||||
Vector(
|
||||
scoped := (Def
|
||||
.input((s: State) => Def.spaceDelimited())
|
||||
scoped := Def
|
||||
.input(_ => Def.spaceDelimited())
|
||||
.flatMapTask { result =>
|
||||
initScoped(
|
||||
scoped.scopedKey,
|
||||
|
|
@ -4829,7 +4836,7 @@ trait BuildExtra extends BuildCommon with DefExtra {
|
|||
r.run(mainClass, cp.files, baseArguments ++ args, s.log).get
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
.evaluated
|
||||
) ++ inTask(scoped)((config / forkOptions) := forkOptionsTask.value)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -337,10 +337,9 @@ object EvaluateTask {
|
|||
|
||||
def evalPluginDef(pluginDef: BuildStructure, state: State): PluginData = {
|
||||
val root = ProjectRef(pluginDef.root, Load.getRootProject(pluginDef.units)(pluginDef.root))
|
||||
val pluginKey = pluginData
|
||||
val config = extractedTaskConfig(Project.extract(state), pluginDef, state)
|
||||
val evaluated =
|
||||
apply(pluginDef, ScopedKey(pluginKey.scope, pluginKey.key), state, root, config)
|
||||
apply(pluginDef, ScopedKey(pluginData.scope, pluginData.key), state, root, config)
|
||||
val (newS, result) = evaluated getOrElse sys.error(
|
||||
"Plugin data does not exist for plugin definition at " + pluginDef.root
|
||||
)
|
||||
|
|
|
|||
|
|
@ -174,7 +174,7 @@ object Keys {
|
|||
@cacheLevel(include = Array.empty)
|
||||
val classDirectory = settingKey[File]("Directory for compiled classes and copied resources.").withRank(AMinusSetting)
|
||||
val earlyOutput = settingKey[VirtualFile]("JAR file for pickles used for build pipelining")
|
||||
val backendOutput = settingKey[VirtualFile]("Directory or JAR file for compiled classes and copied resources")
|
||||
val backendOutput = settingKey[VirtualFile]("Output directory of the compiler backend")
|
||||
val cleanFiles = taskKey[Seq[File]]("The files to recursively delete during a clean.").withRank(BSetting)
|
||||
val cleanKeepFiles = settingKey[Seq[File]]("Files or directories to keep during a clean. Must be direct children of target.").withRank(CSetting)
|
||||
val cleanKeepGlobs = settingKey[Seq[Glob]]("Globs to keep during a clean. Must be direct children of target.").withRank(CSetting)
|
||||
|
|
@ -254,6 +254,7 @@ object Keys {
|
|||
val compileAnalysisFilename = taskKey[String]("Defines the filename used for compileAnalysisFile.").withRank(DTask)
|
||||
val compileAnalysisTargetRoot = settingKey[File]("The output directory to produce Zinc Analysis files").withRank(DSetting)
|
||||
val earlyCompileAnalysisTargetRoot = settingKey[File]("The output directory to produce Zinc Analysis files").withRank(DSetting)
|
||||
@cacheLevel(include = Array.empty)
|
||||
val compileAnalysisFile = taskKey[File]("Zinc analysis storage.").withRank(DSetting)
|
||||
val earlyCompileAnalysisFile = taskKey[File]("Zinc analysis storage for early compilation").withRank(DSetting)
|
||||
|
||||
|
|
|
|||
|
|
@ -56,6 +56,7 @@ import sjsonnew.JsonFormat
|
|||
import scala.annotation.targetName
|
||||
import scala.concurrent.{ Await, TimeoutException }
|
||||
import scala.concurrent.duration.*
|
||||
import ClasspathDep.*
|
||||
|
||||
/*
|
||||
sealed trait Project extends ProjectDefinition[ProjectReference] with CompositeProject {
|
||||
|
|
@ -637,13 +638,12 @@ trait ProjectExtra extends Scoped.Syntax:
|
|||
given classpathDependency[A](using
|
||||
Conversion[A, ProjectReference]
|
||||
): Conversion[A, ClasspathDep[ProjectReference]] =
|
||||
(a: A) => ClasspathDep.ClasspathDependency(a, None)
|
||||
(a: A) => ClasspathDependency(a, None)
|
||||
|
||||
extension (p: ProjectReference)
|
||||
def %(conf: Configuration): ClasspathDep.ClasspathDependency = %(conf.name)
|
||||
def %(conf: Configuration): ClasspathDependency = %(conf.name)
|
||||
@targetName("percentString")
|
||||
def %(conf: String): ClasspathDep.ClasspathDependency =
|
||||
ClasspathDep.ClasspathDependency(p, Some(conf))
|
||||
def %(conf: String): ClasspathDependency = ClasspathDependency(p, Some(conf))
|
||||
|
||||
extension [A1](in: Def.Initialize[Task[A1]])
|
||||
def updateState(f: (State, A1) => State): Def.Initialize[Task[A1]] =
|
||||
|
|
@ -712,12 +712,13 @@ trait ProjectExtra extends Scoped.Syntax:
|
|||
p: T
|
||||
)(implicit ev: T => ProjectReference): Constructor =
|
||||
new Constructor(p)
|
||||
|
||||
implicit def classpathDependency[T](
|
||||
p: T
|
||||
)(implicit ev: T => ProjectReference): ClasspathDependency = ClasspathDependency(p, None)
|
||||
*/
|
||||
|
||||
implicit def classpathDependency[T](p: T)(implicit
|
||||
ev: T => ProjectReference
|
||||
): ClasspathDependency =
|
||||
ClasspathDependency(ev(p), None)
|
||||
|
||||
// Duplicated with Structure
|
||||
|
||||
// These used to be in Project so that they didn't need to get imported (due to Initialize being nested in Project).
|
||||
|
|
|
|||
|
|
@ -22,16 +22,9 @@ import sbt.ProjectExtra.*
|
|||
import sbt.ScopeFilter.Make._
|
||||
import sbt.SlashSyntax0._
|
||||
import sbt.coursierint.LMCoursier
|
||||
import sbt.internal.inc.{
|
||||
CompileOutput,
|
||||
FileAnalysisStore,
|
||||
HashUtil,
|
||||
JarUtils,
|
||||
MappedFileConverter
|
||||
}
|
||||
import sbt.internal.inc.{ MappedFileConverter, HashUtil, JarUtils }
|
||||
import sbt.internal.librarymanagement._
|
||||
import sbt.internal.remotecache._
|
||||
import sbt.internal.inc.Analysis
|
||||
import sbt.io.IO
|
||||
import sbt.io.syntax._
|
||||
import sbt.librarymanagement._
|
||||
|
|
@ -41,16 +34,10 @@ import sbt.nio.FileStamp
|
|||
import sbt.nio.Keys.{ inputFileStamps, outputFileStamps }
|
||||
import sbt.std.TaskExtra._
|
||||
import sbt.util.InterfaceUtil.toOption
|
||||
import sbt.util.{
|
||||
ActionCacheStore,
|
||||
AggregateActionCacheStore,
|
||||
CacheImplicits,
|
||||
DiskActionCacheStore,
|
||||
Logger
|
||||
}
|
||||
import sbt.util.{ ActionCacheStore, AggregateActionCacheStore, DiskActionCacheStore, Logger }
|
||||
import sjsonnew.JsonFormat
|
||||
import xsbti.{ HashedVirtualFileRef, VirtualFileRef }
|
||||
import xsbti.compile.{ AnalysisContents, CompileAnalysis, MiniSetup, MiniOptions }
|
||||
import xsbti.{ FileConverter, HashedVirtualFileRef, VirtualFileRef }
|
||||
import xsbti.compile.CompileAnalysis
|
||||
|
||||
import scala.collection.mutable
|
||||
|
||||
|
|
@ -59,8 +46,6 @@ object RemoteCache {
|
|||
final val cachedTestClassifier = "cached-test"
|
||||
final val commitLength = 10
|
||||
|
||||
def cacheStore: ActionCacheStore = Def.cacheStore
|
||||
|
||||
// TODO: cap with caffeine
|
||||
private[sbt] val analysisStore: mutable.Map[HashedVirtualFileRef, CompileAnalysis] =
|
||||
mutable.Map.empty
|
||||
|
|
@ -69,51 +54,22 @@ object RemoteCache {
|
|||
// currently this is called twice so metabuild can call compile with a minimal setting
|
||||
private[sbt] def initializeRemoteCache(s: State): Unit =
|
||||
val outDir =
|
||||
s.get(BasicKeys.rootOutputDirectory).getOrElse((s.baseDir / "target" / "out").toPath())
|
||||
s.get(BasicKeys.rootOutputDirectory).getOrElse((s.baseDir / "target" / "out").toPath)
|
||||
Def._outputDirectory = Some(outDir)
|
||||
val caches = s.get(BasicKeys.cacheStores)
|
||||
caches match
|
||||
case Some(xs) if xs.nonEmpty => Def._cacheStore = AggregateActionCacheStore(xs)
|
||||
case _ =>
|
||||
val tempDiskCache = (s.baseDir / "target" / "bootcache").toPath()
|
||||
Def._cacheStore = DiskActionCacheStore(tempDiskCache)
|
||||
|
||||
private[sbt] def getCachedAnalysis(ref: String): CompileAnalysis =
|
||||
getCachedAnalysis(CacheImplicits.strToHashedVirtualFileRef(ref))
|
||||
private[sbt] def getCachedAnalysis(ref: HashedVirtualFileRef): CompileAnalysis =
|
||||
analysisStore.getOrElseUpdate(
|
||||
ref, {
|
||||
val vfs = cacheStore.getBlobs(ref :: Nil)
|
||||
if vfs.nonEmpty then
|
||||
val outputDirectory = Def.cacheConfiguration.outputDirectory
|
||||
cacheStore.syncBlobs(vfs, outputDirectory).headOption match
|
||||
case Some(file) => FileAnalysisStore.binary(file.toFile()).get.get.getAnalysis
|
||||
case None => Analysis.empty
|
||||
else Analysis.empty
|
||||
}
|
||||
)
|
||||
|
||||
private[sbt] val tempConverter: MappedFileConverter = MappedFileConverter.empty
|
||||
private[sbt] def postAnalysis(analysis: CompileAnalysis): Option[HashedVirtualFileRef] =
|
||||
IO.withTemporaryFile("analysis", ".tmp", true): file =>
|
||||
val output = CompileOutput.empty
|
||||
val option = MiniOptions.of(Array(), Array(), Array())
|
||||
val setup = MiniSetup.of(
|
||||
output,
|
||||
option,
|
||||
"",
|
||||
xsbti.compile.CompileOrder.Mixed,
|
||||
false,
|
||||
Array()
|
||||
)
|
||||
FileAnalysisStore.binary(file).set(AnalysisContents.create(analysis, setup))
|
||||
val vf = tempConverter.toVirtualFile(file.toPath)
|
||||
val refs = cacheStore.putBlobs(vf :: Nil)
|
||||
refs.headOption match
|
||||
case Some(ref) =>
|
||||
analysisStore(ref) = analysis
|
||||
Some(ref)
|
||||
case None => None
|
||||
def defaultCache =
|
||||
val fileConverter = s
|
||||
.get(Keys.fileConverter.key)
|
||||
.getOrElse {
|
||||
MappedFileConverter(
|
||||
Defaults.getRootPaths(outDir, s.configuration),
|
||||
allowMachinePath = true
|
||||
)
|
||||
}
|
||||
DiskActionCacheStore((s.baseDir / "target" / "bootcache").toPath, fileConverter)
|
||||
Def._cacheStore = s
|
||||
.get(BasicKeys.cacheStores)
|
||||
.collect { case xs if xs.nonEmpty => AggregateActionCacheStore(xs) }
|
||||
.getOrElse(defaultCache)
|
||||
|
||||
private[sbt] def artifactToStr(art: Artifact): String = {
|
||||
import LibraryManagementCodec._
|
||||
|
|
@ -154,7 +110,7 @@ object RemoteCache {
|
|||
},
|
||||
cacheStores := {
|
||||
List(
|
||||
DiskActionCacheStore(localCacheDirectory.value.toPath())
|
||||
DiskActionCacheStore(localCacheDirectory.value.toPath(), fileConverter.value)
|
||||
)
|
||||
},
|
||||
)
|
||||
|
|
@ -562,10 +518,10 @@ object RemoteCache {
|
|||
key: SettingKey[A],
|
||||
pkgTasks: Seq[TaskKey[HashedVirtualFileRef]]
|
||||
): Def.Initialize[Seq[A]] =
|
||||
(Classpaths.forallIn(key, pkgTasks) zipWith
|
||||
Classpaths.forallIn(pushRemoteCacheArtifact, pkgTasks))(_ zip _ collect { case (a, true) =>
|
||||
a
|
||||
})
|
||||
Classpaths
|
||||
.forallIn(key, pkgTasks)
|
||||
.zipWith(Classpaths.forallIn(pushRemoteCacheArtifact, pkgTasks))
|
||||
.apply(_.zip(_).collect { case (a, true) => a })
|
||||
|
||||
private def extractHash(inputs: Seq[(Path, FileStamp)]): Vector[String] =
|
||||
inputs.toVector map { case (_, stamp0) =>
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ final case class ScopedKeyData[A](scoped: ScopedKey[A], value: Any) {
|
|||
def description: String =
|
||||
key.tag match
|
||||
case KeyTag.Task(typeArg) => s"Task: $typeArg"
|
||||
case KeyTag.SeqTask(typeArg) => s"Task: Seq[$typeArg]"
|
||||
case KeyTag.InputTask(typeArg) => s"Input task: $typeArg"
|
||||
case KeyTag.Setting(typeArg) => s"Setting: $typeArg = $value"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ import Def.Setting
|
|||
import sbt.io.Hash
|
||||
import sbt.internal.util.Attributed
|
||||
import sbt.internal.inc.ReflectUtilities
|
||||
import xsbti.FileConverter
|
||||
|
||||
trait BuildDef {
|
||||
def projectDefinitions(@deprecated("unused", "") baseDirectory: File): Seq[Project] = projects
|
||||
|
|
@ -72,10 +73,9 @@ private[sbt] object BuildDef {
|
|||
autoGeneratedProject := true
|
||||
)
|
||||
|
||||
def analyzed(in: Seq[Attributed[_]]): Seq[xsbti.compile.CompileAnalysis] =
|
||||
in.flatMap: a =>
|
||||
a.metadata
|
||||
.get(Keys.analysis)
|
||||
.map: str =>
|
||||
RemoteCache.getCachedAnalysis(str)
|
||||
def analyzed(
|
||||
in: Seq[Attributed[_]],
|
||||
converter: FileConverter
|
||||
): Seq[xsbti.compile.CompileAnalysis] =
|
||||
in.flatMap(a => Defaults.extractAnalysis(a.metadata, converter))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -15,15 +15,13 @@ import sbt.Keys._
|
|||
import sbt.nio.Keys._
|
||||
import sbt.nio.file.{ Glob, RecursiveGlob }
|
||||
import sbt.Def.Initialize
|
||||
import sbt.internal.inc.Analysis
|
||||
import sbt.internal.inc.JavaInterfaceUtil._
|
||||
import sbt.internal.util.{ Attributed, Dag, Settings }
|
||||
import sbt.librarymanagement.{ Configuration, TrackLevel }
|
||||
import sbt.librarymanagement.Configurations.names
|
||||
import sbt.std.TaskExtra._
|
||||
import sbt.util._
|
||||
import scala.jdk.CollectionConverters.*
|
||||
import xsbti.{ HashedVirtualFileRef, VirtualFileRef }
|
||||
import xsbti.{ HashedVirtualFileRef, VirtualFile, VirtualFileRef }
|
||||
import xsbti.compile.CompileAnalysis
|
||||
|
||||
private[sbt] object ClasspathImpl {
|
||||
|
|
@ -38,10 +36,13 @@ private[sbt] object ClasspathImpl {
|
|||
val config = configuration.value
|
||||
val products = pickleProducts.value
|
||||
val analysis = compileEarly.value
|
||||
val xs = products map { _ -> analysis }
|
||||
val converter = fileConverter.value
|
||||
val analysisFile = converter.toVirtualFile(earlyCompileAnalysisFile.value.toPath)
|
||||
|
||||
val xs = products.map(_ -> analysis)
|
||||
for (f, analysis) <- xs
|
||||
yield APIMappings
|
||||
.store(analyzed(f, analysis), apiURL.value)
|
||||
.store(analyzed(f, analysisFile), apiURL.value)
|
||||
.put(Keys.moduleIDStr, Classpaths.moduleIdJsonKeyFormat.write(module))
|
||||
.put(Keys.configurationStr, config.name)
|
||||
else exportedProducts.value
|
||||
|
|
@ -55,7 +56,7 @@ private[sbt] object ClasspathImpl {
|
|||
val config = configuration.value
|
||||
for (f, analysis) <- trackedExportedProductsImplTask(track).value
|
||||
yield APIMappings
|
||||
.store(analyzed[HashedVirtualFileRef](f, analysis), apiURL.value)
|
||||
.store(analyzed(f, analysis), apiURL.value)
|
||||
.put(Keys.artifactStr, RemoteCache.artifactToStr(art))
|
||||
.put(Keys.moduleIDStr, Classpaths.moduleIdJsonKeyFormat.write(module))
|
||||
.put(Keys.configurationStr, config.name)
|
||||
|
|
@ -67,7 +68,6 @@ private[sbt] object ClasspathImpl {
|
|||
val art = (packageBin / artifact).value
|
||||
val module = projectID.value
|
||||
val config = configuration.value
|
||||
val converter = fileConverter.value
|
||||
for (f, analysis) <- trackedJarProductsImplTask(track).value
|
||||
yield APIMappings
|
||||
.store(analyzed(f, analysis), apiURL.value)
|
||||
|
|
@ -78,7 +78,7 @@ private[sbt] object ClasspathImpl {
|
|||
|
||||
private[this] def trackedExportedProductsImplTask(
|
||||
track: TrackLevel
|
||||
): Initialize[Task[Seq[(HashedVirtualFileRef, CompileAnalysis)]]] =
|
||||
): Initialize[Task[Seq[(HashedVirtualFileRef, VirtualFile)]]] =
|
||||
Def.taskIf {
|
||||
if {
|
||||
val _ = (packageBin / dynamicDependency).value
|
||||
|
|
@ -89,44 +89,38 @@ private[sbt] object ClasspathImpl {
|
|||
|
||||
private[this] def trackedNonJarProductsImplTask(
|
||||
track: TrackLevel
|
||||
): Initialize[Task[Seq[(HashedVirtualFileRef, CompileAnalysis)]]] =
|
||||
(Def
|
||||
): Initialize[Task[Seq[(HashedVirtualFileRef, VirtualFile)]]] =
|
||||
Def
|
||||
.task {
|
||||
val dirs = productDirectories.value
|
||||
val view = fileTreeView.value
|
||||
(TrackLevel.intersection(track, exportToInternal.value), dirs, view)
|
||||
})
|
||||
}
|
||||
.flatMapTask {
|
||||
case (TrackLevel.TrackAlways, _, _) =>
|
||||
Def.task {
|
||||
val converter = fileConverter.value
|
||||
val a = compile.value
|
||||
products.value
|
||||
.map { x => converter.toVirtualFile(x.toPath()) }
|
||||
.map { (_, a) }
|
||||
val analysisFile = converter.toVirtualFile(compileAnalysisFile.value.toPath)
|
||||
products.value.map(x => (converter.toVirtualFile(x.toPath()), analysisFile))
|
||||
}
|
||||
case (TrackLevel.TrackIfMissing, dirs, view)
|
||||
if view.list(dirs.map(Glob(_, RecursiveGlob / "*.class"))).isEmpty =>
|
||||
Def.task {
|
||||
val converter = fileConverter.value
|
||||
val a = compile.value
|
||||
products.value
|
||||
.map { x => converter.toVirtualFile(x.toPath()) }
|
||||
.map { (_, a) }
|
||||
val analysisFile = converter.toVirtualFile(compileAnalysisFile.value.toPath)
|
||||
products.value.map(x => (converter.toVirtualFile(x.toPath()), analysisFile))
|
||||
}
|
||||
case (_, dirs, _) =>
|
||||
Def.task {
|
||||
val converter = fileConverter.value
|
||||
val analysis = previousCompile.value.analysis.toOption.getOrElse(Analysis.empty)
|
||||
dirs
|
||||
.map { x => converter.toVirtualFile(x.toPath()) }
|
||||
.map(_ -> analysis)
|
||||
val analysisFile = converter.toVirtualFile(compileAnalysisFile.value.toPath)
|
||||
dirs.map { x => (converter.toVirtualFile(x.toPath()), analysisFile) }
|
||||
}
|
||||
}
|
||||
|
||||
private[this] def trackedJarProductsImplTask(
|
||||
track: TrackLevel
|
||||
): Initialize[Task[Seq[(HashedVirtualFileRef, CompileAnalysis)]]] =
|
||||
): Initialize[Task[Seq[(HashedVirtualFileRef, VirtualFile)]]] =
|
||||
(Def
|
||||
.task {
|
||||
val converter = fileConverter.value
|
||||
|
|
@ -137,23 +131,21 @@ private[sbt] object ClasspathImpl {
|
|||
.flatMapTask {
|
||||
case (TrackLevel.TrackAlways, _, _) =>
|
||||
Def.task {
|
||||
Seq((packageBin.value, compile.value))
|
||||
val converter = fileConverter.value
|
||||
val analysisFile = converter.toVirtualFile(compileAnalysisFile.value.toPath)
|
||||
Seq((packageBin.value, analysisFile))
|
||||
}
|
||||
case (TrackLevel.TrackIfMissing, _, jar) if !jar.toFile().exists =>
|
||||
Def.task {
|
||||
Seq((packageBin.value, compile.value))
|
||||
val converter = fileConverter.value
|
||||
val analysisFile = converter.toVirtualFile(compileAnalysisFile.value.toPath)
|
||||
Seq((packageBin.value, analysisFile))
|
||||
}
|
||||
case (_, vf, _) =>
|
||||
Def.task {
|
||||
val converter = fileConverter.value
|
||||
val analysisOpt = previousCompile.value.analysis.toOption
|
||||
Seq(vf).map(converter.toPath).map(converter.toVirtualFile).map { x =>
|
||||
(
|
||||
x,
|
||||
if (analysisOpt.isDefined) analysisOpt.get
|
||||
else Analysis.empty
|
||||
)
|
||||
}
|
||||
val analysisFile = converter.toVirtualFile(compileAnalysisFile.value.toPath)
|
||||
Seq(vf).map { x => (converter.toVirtualFile(x), analysisFile) }
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -351,13 +343,8 @@ private[sbt] object ClasspathImpl {
|
|||
(tasks.toSeq.join).map(_.flatten.distinct)
|
||||
}
|
||||
|
||||
def analyzed[A](data: A, analysis: CompileAnalysis) =
|
||||
RemoteCache.postAnalysis(analysis) match
|
||||
case Some(ref) =>
|
||||
Attributed
|
||||
.blank(data)
|
||||
.put(Keys.analysis, CacheImplicits.hashedVirtualFileRefToStr(ref))
|
||||
case None => Attributed.blank(data)
|
||||
def analyzed[A](data: A, analysisFile: VirtualFile): Attributed[A] =
|
||||
Attributed.blank(data).put(Keys.analysis, analysisFile.id)
|
||||
|
||||
def interSort(
|
||||
projectRef: ProjectRef,
|
||||
|
|
|
|||
|
|
@ -24,6 +24,7 @@ import sbt.nio.file.Glob.{ GlobOps }
|
|||
import sbt.util.Level
|
||||
import sjsonnew.JsonFormat
|
||||
import scala.annotation.nowarn
|
||||
import xsbti.{ PathBasedFile, VirtualFileRef }
|
||||
|
||||
private[sbt] object Clean {
|
||||
|
||||
|
|
@ -142,8 +143,13 @@ private[sbt] object Clean {
|
|||
private[sbt] object ToSeqPath:
|
||||
given identitySeqPath: ToSeqPath[Seq[Path]] = identity[Seq[Path]](_)
|
||||
given seqFile: ToSeqPath[Seq[File]] = _.map(_.toPath)
|
||||
given virtualFileRefSeq: ToSeqPath[Seq[VirtualFileRef]] =
|
||||
_.collect { case f: PathBasedFile => f.toPath }
|
||||
given path: ToSeqPath[Path] = _ :: Nil
|
||||
given file: ToSeqPath[File] = _.toPath :: Nil
|
||||
given virtualFileRef: ToSeqPath[VirtualFileRef] =
|
||||
case f: PathBasedFile => Seq(f.toPath)
|
||||
case _ => Nil
|
||||
end ToSeqPath
|
||||
|
||||
private[this] implicit class ToSeqPathOps[T](val t: T) extends AnyVal {
|
||||
|
|
|
|||
|
|
@ -1217,6 +1217,7 @@ private[sbt] object Load {
|
|||
|
||||
// Default sbt files to read, if needed
|
||||
lazy val defaultSbtFiles = configurationSources(projectBase)
|
||||
.filterNot(_.isHidden)
|
||||
.map(_.getAbsoluteFile().toPath)
|
||||
.map(converter.toVirtualFile)
|
||||
lazy val sbtFiles = defaultSbtFiles ++ extraSbtFiles
|
||||
|
|
@ -1264,11 +1265,9 @@ private[sbt] object Load {
|
|||
// case sf: SbtFiles =>
|
||||
// sf.files
|
||||
// .map(f => IO.resolve(projectBase, f))
|
||||
// .filterNot(_.isHidden)
|
||||
// .map(_.toPath)
|
||||
case sf: DefaultSbtFiles =>
|
||||
sbtFiles.filter(sf.include)
|
||||
// .filterNot(_.isHidden)
|
||||
// .map(_.toPath)
|
||||
case q: Sequence =>
|
||||
q.sequence.foldLeft(Seq.empty[VirtualFile]) { (b, add) =>
|
||||
|
|
|
|||
|
|
@ -105,7 +105,7 @@ object PluginDiscovery:
|
|||
): Seq[String] =
|
||||
(
|
||||
binaryModuleNames(classpath, converter, loader, resourceName) ++
|
||||
(analyzed(classpath) flatMap (a => sourceModuleNames(a, subclasses: _*)))
|
||||
analyzed(classpath, converter).flatMap(a => sourceModuleNames(a, subclasses: _*))
|
||||
).distinct
|
||||
|
||||
/** Discovers top-level modules in `analysis` that inherit from any of `subclasses`. */
|
||||
|
|
@ -136,9 +136,10 @@ object PluginDiscovery:
|
|||
.getResources(resourceName)
|
||||
.asScala
|
||||
.toSeq
|
||||
.filter(onClasspath(classpath, converter)) flatMap { u =>
|
||||
IO.readLinesURL(u).map(_.trim).filter(!_.isEmpty)
|
||||
}
|
||||
.filter(onClasspath(classpath, converter))
|
||||
.flatMap { u =>
|
||||
IO.readLinesURL(u).map(_.trim).filter(!_.isEmpty)
|
||||
}
|
||||
|
||||
/** Returns `true` if `url` is an entry in `classpath`. */
|
||||
def onClasspath(classpath: Def.Classpath, converter: FileConverter)(url: URL): Boolean =
|
||||
|
|
|
|||
|
|
@ -199,7 +199,7 @@ private[sbt] object Definition {
|
|||
}
|
||||
|
||||
def collectAnalysesTask = Def.task {
|
||||
val cacheFile: String = compileIncSetup.value.cacheFile.getAbsolutePath
|
||||
val cacheFile: String = compileAnalysisFile.value.getAbsolutePath
|
||||
val useBinary = enableBinaryCompileAnalysis.value
|
||||
val s = state.value
|
||||
s.log.debug(s"analysis location ${cacheFile -> useBinary}")
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ import sbt.io.IO
|
|||
import sbt.nio.file.FileAttributes
|
||||
import sjsonnew.{ Builder, JsonFormat, Unbuilder, deserializationError }
|
||||
import xsbti.compile.analysis.{ Stamp => XStamp }
|
||||
import xsbti.VirtualFileRef
|
||||
|
||||
/**
|
||||
* A trait that indicates what file stamping implementation should be used to track the state of
|
||||
|
|
@ -102,65 +103,49 @@ object FileStamp {
|
|||
private[sbt] final case class Error(exception: IOException) extends FileStamp
|
||||
|
||||
object Formats {
|
||||
implicit val seqPathJsonFormatter: JsonFormat[Seq[Path]] = new JsonFormat[Seq[Path]] {
|
||||
override def write[J](obj: Seq[Path], builder: Builder[J]): Unit = {
|
||||
builder.beginArray()
|
||||
obj.foreach { path =>
|
||||
builder.writeString(path.toString)
|
||||
implicit val seqPathJsonFormatter: JsonFormat[Seq[Path]] =
|
||||
asStringArray(_.toString, Paths.get(_))
|
||||
implicit val seqFileJsonFormatter: JsonFormat[Seq[File]] =
|
||||
asStringArray(_.toString, new File(_))
|
||||
implicit val seqVirtualFileRefJsonFormatter: JsonFormat[Seq[VirtualFileRef]] =
|
||||
asStringArray(_.id, VirtualFileRef.of)
|
||||
|
||||
implicit val fileJsonFormatter: JsonFormat[File] = fromSeqJsonFormat[File]
|
||||
implicit val pathJsonFormatter: JsonFormat[Path] = fromSeqJsonFormat[Path]
|
||||
implicit val virtualFileRefJsonFormatter: JsonFormat[VirtualFileRef] =
|
||||
fromSeqJsonFormat[VirtualFileRef]
|
||||
|
||||
private def asStringArray[T](toStr: T => String, fromStr: String => T): JsonFormat[Seq[T]] =
|
||||
new JsonFormat[Seq[T]] {
|
||||
override def write[J](obj: Seq[T], builder: Builder[J]): Unit = {
|
||||
builder.beginArray()
|
||||
obj.foreach { x => builder.writeString(toStr(x)) }
|
||||
builder.endArray()
|
||||
}
|
||||
builder.endArray()
|
||||
|
||||
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Seq[T] =
|
||||
jsOpt match {
|
||||
case Some(js) =>
|
||||
val size = unbuilder.beginArray(js)
|
||||
val res = (1 to size) map { _ =>
|
||||
fromStr(unbuilder.readString(unbuilder.nextElement))
|
||||
}
|
||||
unbuilder.endArray()
|
||||
res
|
||||
case None =>
|
||||
deserializationError("Expected JsArray but found None")
|
||||
}
|
||||
}
|
||||
|
||||
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Seq[Path] =
|
||||
jsOpt match {
|
||||
case Some(js) =>
|
||||
val size = unbuilder.beginArray(js)
|
||||
val res = (1 to size) map { _ =>
|
||||
Paths.get(unbuilder.readString(unbuilder.nextElement))
|
||||
}
|
||||
unbuilder.endArray()
|
||||
res
|
||||
case None =>
|
||||
deserializationError("Expected JsArray but found None")
|
||||
}
|
||||
}
|
||||
private def fromSeqJsonFormat[T](using seqJsonFormat: JsonFormat[Seq[T]]): JsonFormat[T] =
|
||||
new JsonFormat[T] {
|
||||
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): T =
|
||||
seqJsonFormat.read(jsOpt, unbuilder).head
|
||||
|
||||
implicit val seqFileJsonFormatter: JsonFormat[Seq[File]] = new JsonFormat[Seq[File]] {
|
||||
override def write[J](obj: Seq[File], builder: Builder[J]): Unit = {
|
||||
builder.beginArray()
|
||||
obj.foreach { file =>
|
||||
builder.writeString(file.toString)
|
||||
}
|
||||
builder.endArray()
|
||||
override def write[J](obj: T, builder: Builder[J]): Unit =
|
||||
seqJsonFormat.write(obj :: Nil, builder)
|
||||
}
|
||||
|
||||
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Seq[File] =
|
||||
jsOpt match {
|
||||
case Some(js) =>
|
||||
val size = unbuilder.beginArray(js)
|
||||
val res = (1 to size) map { _ =>
|
||||
new File(unbuilder.readString(unbuilder.nextElement))
|
||||
}
|
||||
unbuilder.endArray()
|
||||
res
|
||||
case None =>
|
||||
deserializationError("Expected JsArray but found None")
|
||||
}
|
||||
}
|
||||
implicit val fileJsonFormatter: JsonFormat[File] = new JsonFormat[File] {
|
||||
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): File =
|
||||
seqFileJsonFormatter.read(jsOpt, unbuilder).head
|
||||
|
||||
override def write[J](obj: File, builder: Builder[J]): Unit =
|
||||
seqFileJsonFormatter.write(obj :: Nil, builder)
|
||||
}
|
||||
implicit val pathJsonFormatter: JsonFormat[Path] = new JsonFormat[Path] {
|
||||
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Path =
|
||||
seqPathJsonFormatter.read(jsOpt, unbuilder).head
|
||||
|
||||
override def write[J](obj: Path, builder: Builder[J]): Unit =
|
||||
seqPathJsonFormatter.write(obj :: Nil, builder)
|
||||
}
|
||||
implicit val seqPathFileStampJsonFormatter: JsonFormat[Seq[(Path, FileStamp)]] =
|
||||
new JsonFormat[Seq[(Path, FileStamp)]] {
|
||||
override def write[J](obj: Seq[(Path, FileStamp)], builder: Builder[J]): Unit = {
|
||||
|
|
|
|||
|
|
@ -25,6 +25,8 @@ import sjsonnew.JsonFormat
|
|||
|
||||
import scala.annotation.nowarn
|
||||
import scala.collection.immutable.VectorBuilder
|
||||
import java.io.File
|
||||
import xsbti.VirtualFileRef
|
||||
|
||||
private[sbt] object Settings {
|
||||
private[sbt] def inject(transformed: Seq[Def.Setting[_]]): Seq[Def.Setting[_]] = {
|
||||
|
|
@ -68,45 +70,55 @@ private[sbt] object Settings {
|
|||
setting: Def.Setting[_],
|
||||
fileOutputScopes: Set[Scope]
|
||||
): List[Def.Setting[_]] = {
|
||||
val attributeKey = setting.key.key
|
||||
attributeKey.tag match {
|
||||
setting.key.key.tag match {
|
||||
case tag: KeyTag.Task[?] =>
|
||||
def default: List[Def.Setting[_]] = {
|
||||
val scope = setting.key.scope.copy(task = Select(attributeKey))
|
||||
if (fileOutputScopes.contains(scope)) {
|
||||
val sk = setting.asInstanceOf[Def.Setting[Task[Any]]].key
|
||||
val scopedKey = Keys.dynamicFileOutputs in (sk.scope in sk.key)
|
||||
addTaskDefinition {
|
||||
val init: Def.Initialize[Task[Seq[Path]]] = sk(_.map(_ => Nil))
|
||||
Def.setting[Task[Seq[Path]]](scopedKey, init, setting.pos)
|
||||
} :: allOutputPathsImpl(scope) :: outputFileStampsImpl(scope) :: cleanImpl(scope) :: Nil
|
||||
} else Nil
|
||||
}
|
||||
def mkSetting[T: JsonFormat: ToSeqPath]: List[Def.Setting[_]] = {
|
||||
val sk = setting.asInstanceOf[Def.Setting[Task[T]]].key
|
||||
val taskKey = TaskKey(sk.key) in sk.scope
|
||||
// We create a previous reference so that clean automatically works without the
|
||||
// user having to explicitly call previous anywhere.
|
||||
val init = Previous.runtime(taskKey).zip(taskKey) { case (_, t) =>
|
||||
t.map(implicitly[ToSeqPath[T]].apply)
|
||||
}
|
||||
val key = Def.ScopedKey(taskKey.scope in taskKey.key, Keys.dynamicFileOutputs.key)
|
||||
addTaskDefinition(Def.setting[Task[Seq[Path]]](key, init, setting.pos)) ::
|
||||
outputsAndStamps(taskKey)
|
||||
}
|
||||
if seqClass.isAssignableFrom(tag.typeArg) then
|
||||
// TODO fix this: maybe using the taskKey macro to convey the information
|
||||
// t.typeArguments match {
|
||||
// case p :: Nil if pathClass.isAssignableFrom(p.runtimeClass) => mkSetting[Seq[Path]]
|
||||
// case _ => default
|
||||
// }
|
||||
default
|
||||
else if pathClass.isAssignableFrom(tag.typeArg) then mkSetting[Path]
|
||||
else default
|
||||
if pathClass.isAssignableFrom(tag.typeArg) then addOutputAndStampTasks[Path](setting)
|
||||
else if fileClass.isAssignableFrom(tag.typeArg) then addOutputAndStampTasks[File](setting)
|
||||
else if virtualFileRefClass.isAssignableFrom(tag.typeArg) then
|
||||
addOutputAndStampTasks[VirtualFileRef](setting)
|
||||
else addDefaultTasks(setting, fileOutputScopes)
|
||||
case tag: KeyTag.SeqTask[?] =>
|
||||
if pathClass.isAssignableFrom(tag.typeArg) then addOutputAndStampTasks[Seq[Path]](setting)
|
||||
else if fileClass.isAssignableFrom(tag.typeArg) then
|
||||
addOutputAndStampTasks[Seq[File]](setting)
|
||||
else if virtualFileRefClass.isAssignableFrom(tag.typeArg) then
|
||||
addOutputAndStampTasks[Seq[VirtualFileRef]](setting)
|
||||
else addDefaultTasks(setting, fileOutputScopes)
|
||||
case _ => Nil
|
||||
}
|
||||
}
|
||||
|
||||
@nowarn
|
||||
private def addDefaultTasks(
|
||||
setting: Def.Setting[_],
|
||||
fileOutputScopes: Set[Scope]
|
||||
): List[Def.Setting[_]] = {
|
||||
val scope = setting.key.scope.copy(task = Select(setting.key.key))
|
||||
if (fileOutputScopes.contains(scope)) {
|
||||
val sk = setting.asInstanceOf[Def.Setting[Task[Any]]].key
|
||||
val scopedKey = Keys.dynamicFileOutputs in (sk.scope in sk.key)
|
||||
val init: Def.Initialize[Task[Seq[Path]]] = sk(_.map(_ => Nil))
|
||||
addTaskDefinition(Def.setting[Task[Seq[Path]]](scopedKey, init, setting.pos)) ::
|
||||
allOutputPathsImpl(scope) :: outputFileStampsImpl(scope) :: cleanImpl(scope) :: Nil
|
||||
} else Nil
|
||||
}
|
||||
|
||||
@nowarn
|
||||
private def addOutputAndStampTasks[T: JsonFormat: ToSeqPath](
|
||||
setting: Def.Setting[_]
|
||||
): List[Def.Setting[_]] = {
|
||||
val sk = setting.asInstanceOf[Def.Setting[Task[T]]].key
|
||||
val taskKey = TaskKey(sk.key) in sk.scope
|
||||
// We create a previous reference so that clean automatically works without the
|
||||
// user having to explicitly call previous anywhere.
|
||||
val init = Previous.runtime(taskKey).zip(taskKey) { case (_, t) =>
|
||||
t.map(implicitly[ToSeqPath[T]].apply)
|
||||
}
|
||||
val key = Def.ScopedKey(taskKey.scope in taskKey.key, Keys.dynamicFileOutputs.key)
|
||||
addTaskDefinition(Def.setting[Task[Seq[Path]]](key, init, setting.pos)) ::
|
||||
outputsAndStamps(taskKey)
|
||||
}
|
||||
|
||||
private[sbt] val inject: Def.ScopedKey[_] => Seq[Def.Setting[_]] = scopedKey =>
|
||||
scopedKey.key match {
|
||||
case transitiveDynamicInputs.key =>
|
||||
|
|
@ -161,7 +173,9 @@ private[sbt] object Settings {
|
|||
}
|
||||
|
||||
private[this] val seqClass = classOf[Seq[_]]
|
||||
private[this] val pathClass = classOf[java.nio.file.Path]
|
||||
private[this] val pathClass = classOf[Path]
|
||||
private val fileClass = classOf[File]
|
||||
private val virtualFileRefClass = classOf[VirtualFileRef]
|
||||
|
||||
/**
|
||||
* Returns all of the paths for the regular files described by a glob. Directories and hidden
|
||||
|
|
|
|||
|
|
@ -28,8 +28,6 @@ object IvyPlugin extends AutoPlugin {
|
|||
|
||||
override lazy val globalSettings: Seq[Setting[_]] =
|
||||
Defaults.globalIvyCore
|
||||
override lazy val buildSettings: Seq[Setting[_]] =
|
||||
Defaults.buildLevelIvySettings
|
||||
override lazy val projectSettings: Seq[Setting[_]] =
|
||||
Classpaths.ivyPublishSettings ++ Classpaths.ivyBaseSettings
|
||||
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ object HouseRulesPlugin extends AutoPlugin {
|
|||
scalacOptions ++= "-Ywarn-unused-import".ifScala2x(v => 11 <= v && v <= 12).value.toList,
|
||||
scalacOptions ++= {
|
||||
scalaPartV.value match {
|
||||
case Some((3, _)) => Seq("-Wunused:imports,implicits,nowarn")
|
||||
case Some((3, _)) => Seq("-Wunused:imports,implicits") // ,nowarn
|
||||
case Some((2, _)) => Seq("-Ywarn-unused:-privates,-locals,-explicits")
|
||||
case _ => Seq.empty
|
||||
}
|
||||
|
|
|
|||
|
|
@ -295,6 +295,10 @@ trait Import {
|
|||
type IvyScala = sbt.librarymanagement.ScalaModuleInfo
|
||||
val JCenterRepository = sbt.librarymanagement.Resolver.JCenterRepository
|
||||
val JavaNet2Repository = sbt.librarymanagement.Resolver.JavaNet2Repository
|
||||
import sbt.librarymanagement.{ InclExclRule, DependencyBuilders }
|
||||
given Conversion[String, InclExclRule] = InclExclRule.stringToExclusionRule
|
||||
given Conversion[DependencyBuilders.OrganizationArtifactName, InclExclRule] =
|
||||
InclExclRule.organizationArtifactNameToExclusionRule
|
||||
|
||||
// todo: fix
|
||||
// val License = sbt.librarymanagement.License
|
||||
|
|
|
|||
|
|
@ -3,7 +3,8 @@ sbtPlugin := true
|
|||
val copyOutputDir = taskKey[Unit]("Copies the compiled classes to a root-level directory")
|
||||
|
||||
copyOutputDir := {
|
||||
val cd = (Compile / classDirectory).value
|
||||
val to = baseDirectory.value / "out spaced"
|
||||
IO.copyDirectory(cd, to)
|
||||
val _ = (Compile / products).value
|
||||
val cd = (Compile / classDirectory).value
|
||||
val to = baseDirectory.value / "out spaced"
|
||||
IO.copyDirectory(cd, to)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
# compiles a new State => State instance
|
||||
> compile
|
||||
# puts the classes in a stable location (out spaced/ to test escaping)
|
||||
> copyOutputDir
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
import sbt.nio.file.Glob
|
||||
|
||||
name := "clean-managed"
|
||||
scalaVersion := "3.3.1"
|
||||
Compile / sourceGenerators += {
|
||||
Def.task {
|
||||
val files = Seq(sourceManaged.value / "foo.txt", sourceManaged.value / "bar.txt")
|
||||
|
|
|
|||
|
|
@ -1,6 +0,0 @@
|
|||
> compile
|
||||
$ exists target/scala-2.12/src_managed/foo.txt target/scala-2.12/src_managed/bar.txt
|
||||
|
||||
> clean
|
||||
$ absent target/scala-2.12/src_managed/foo.txt
|
||||
$ exists target/scala-2.12/src_managed/bar.txt
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
> compile
|
||||
$ exists target/out/jvm/scala-3.3.1/clean-managed/src_managed/foo.txt target/out/jvm/scala-3.3.1/clean-managed/src_managed/bar.txt
|
||||
|
||||
> clean
|
||||
$ absent target/out/jvm/scala-3.3.1/clean-managed/src_managed/foo.txt
|
||||
$ exists target/out/jvm/scala-3.3.1/clean-managed/src_managed/bar.txt
|
||||
|
|
@ -1,5 +1,7 @@
|
|||
import sbt.nio.file.Glob
|
||||
|
||||
ThisBuild / scalaVersion := "2.12.17"
|
||||
Global / cacheStores := Seq.empty
|
||||
name := "compile-clean"
|
||||
scalaVersion := "2.12.17"
|
||||
Compile / cleanKeepGlobs +=
|
||||
Glob((Compile / compile / classDirectory).value, "X.class")
|
||||
Glob(target.value) / RecursiveGlob / "X.class"
|
||||
|
|
|
|||
|
|
@ -1,22 +0,0 @@
|
|||
$ touch target/cant-touch-this
|
||||
|
||||
> Test/compile
|
||||
$ exists target/scala-2.12/classes/A.class
|
||||
$ exists target/scala-2.12/test-classes/B.class
|
||||
|
||||
> Test/clean
|
||||
$ exists target/cant-touch-this
|
||||
# it should clean only compile classes
|
||||
$ exists target/scala-2.12/classes/A.class
|
||||
$ exists target/scala-2.12/classes/X.class
|
||||
$ absent target/scala-2.12/test-classes/B.class
|
||||
|
||||
# compiling everything again, but now cleaning only compile classes
|
||||
> Test/compile
|
||||
> Compile/clean
|
||||
$ exists target/cant-touch-this
|
||||
# it should clean only compile classes
|
||||
$ absent target/scala-2.12/classes/A.class
|
||||
$ exists target/scala-2.12/test-classes/B.class
|
||||
# and X has to be kept, because of the cleanKeepFiles override
|
||||
$ exists target/scala-2.12/classes/X.class
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
$ touch target/out/jvm/scala-2.12.17/compile-clean/backend/cant-touch-this
|
||||
|
||||
> Test/compile
|
||||
$ exists target/out/jvm/scala-2.12.17/compile-clean/backend/A.class
|
||||
$ exists target/out/jvm/scala-2.12.17/compile-clean/backend/X.class
|
||||
$ exists target/out/jvm/scala-2.12.17/compile-clean/test-backend/B.class
|
||||
|
||||
> Test/clean
|
||||
$ exists target/out/jvm/scala-2.12.17/compile-clean/backend/cant-touch-this
|
||||
$ exists target/out/jvm/scala-2.12.17/compile-clean/backend/A.class
|
||||
$ exists target/out/jvm/scala-2.12.17/compile-clean/backend/X.class
|
||||
$ absent target/out/jvm/scala-2.12.17/compile-clean/test-backend/B.class
|
||||
|
||||
> Compile/clean
|
||||
$ exists target/out/jvm/scala-2.12.17/compile-clean/backend/cant-touch-this
|
||||
$ absent target/out/jvm/scala-2.12.17/compile-clean/backend/A.class
|
||||
$ exists target/out/jvm/scala-2.12.17/compile-clean/backend/X.class
|
||||
|
|
@ -1,8 +1,6 @@
|
|||
ThisBuild / libraryDependencySchemes += "org.scala-lang.modules" %% "scala-xml" % VersionScheme.Always
|
||||
|
||||
ThisBuild / evictionErrorLevel := Level.Info
|
||||
libraryDependencies += "org.scala-sbt" % "sbt" % sbtVersion.value
|
||||
|
||||
|
||||
lazy val expectErrorNotCrash = taskKey[Unit]("Ensures that sbt properly set types on Trees so that the compiler doesn't crash on a bad reference to .value, but gives a proper error instead.")
|
||||
|
||||
expectErrorNotCrash := {
|
||||
|
|
|
|||
|
|
@ -19,9 +19,9 @@
|
|||
## for command cross building you do need crossScalaVerions on root
|
||||
> set root/crossScalaVersions := Seq("2.12.17", "2.13.1")
|
||||
> + build
|
||||
# $ exists foo/target/scala-2.12
|
||||
# $ exists foo/target/scala-2.13
|
||||
# $ exists bar/target/scala-2.12
|
||||
# $ exists bar/target/scala-2.13
|
||||
# $ exists client/target/scala-2.12
|
||||
# $ exists client/target/scala-2.13
|
||||
$ exists target/out/jvm/scala-2.12.17/foo
|
||||
$ exists target/out/jvm/scala-2.13.1/foo
|
||||
$ exists target/out/jvm/scala-2.12.17/bar
|
||||
$ exists target/out/jvm/scala-2.13.1/bar
|
||||
$ exists target/out/jvm/scala-2.12.17/client
|
||||
$ exists target/out/jvm/scala-2.13.1/client
|
||||
|
|
|
|||
|
|
@ -10,5 +10,5 @@ lazy val root = (project in file("."))
|
|||
file :: Nil
|
||||
},
|
||||
Compile / sourceGenerators += buildInfo,
|
||||
Compile / sourceGenerators += Def.task { Nil },
|
||||
Compile / sourceGenerators += Def.task { Seq.empty[File] },
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,2 +0,0 @@
|
|||
> compile
|
||||
$ exists target/scala-2.12/src_managed/BuildInfo.scala
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
> compile
|
||||
$ exists target/out/jvm/scala-2.12.12/root/src_managed/BuildInfo.scala
|
||||
|
|
@ -4,6 +4,8 @@ val taskB = taskKey[File]("")
|
|||
val taskE = taskKey[File]("")
|
||||
val taskF = taskKey[File]("")
|
||||
|
||||
scalaVersion := "3.3.1"
|
||||
name := "task-map"
|
||||
taskA := touch(target.value / "a")
|
||||
taskB := touch(target.value / "b")
|
||||
|
||||
|
|
@ -14,13 +16,13 @@ taskF := touch(target.value / "f")
|
|||
// means "a" will be triggered by "b"
|
||||
// said differently, invoking "b" will run "b" and then run "a"
|
||||
|
||||
taskA := (taskA triggeredBy taskB).value
|
||||
taskA := taskA.triggeredBy(taskB).value
|
||||
|
||||
// e <<= e runBefore f
|
||||
// means "e" will be run before running "f"
|
||||
// said differently, invoking "f" will run "e" and then run "f"
|
||||
|
||||
taskE := (taskE runBefore taskF).value
|
||||
taskE := taskE.runBefore(taskF).value
|
||||
|
||||
// test utils
|
||||
def touch(f: File): File = { IO.touch(f); f }
|
||||
|
|
|
|||
|
|
@ -1,7 +0,0 @@
|
|||
> taskB
|
||||
$ exists target/b
|
||||
$ exists target/a
|
||||
|
||||
> taskF
|
||||
$ exists target/e
|
||||
$ exists target/f
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
> taskB
|
||||
$ exists target/out/jvm/scala-3.3.1/task-map/b
|
||||
$ exists target/out/jvm/scala-3.3.1/task-map/a
|
||||
|
||||
> taskF
|
||||
$ exists target/out/jvm/scala-3.3.1/task-map/e
|
||||
$ exists target/out/jvm/scala-3.3.1/task-map/f
|
||||
|
|
@ -12,4 +12,4 @@ $ copy-file changes/updated-test.txt src/test/resources/bar.txt
|
|||
|
||||
$ copy-file changes/UpdatedResourceTest.scala src/test/scala/scripted/ResourceTest.scala
|
||||
|
||||
> test
|
||||
> test
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
import Configurations.{CompilerPlugin => CPlugin}
|
||||
|
||||
lazy val use = project.dependsOn(RootProject(file("def")) % CPlugin).settings(
|
||||
autoCompilerPlugins := true
|
||||
)
|
||||
lazy val use = project
|
||||
.dependsOn(RootProject(file("def")) % Configurations.CompilerPlugin)
|
||||
.settings(
|
||||
scalaVersion := "2.12.17",
|
||||
autoCompilerPlugins := true
|
||||
)
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ name := "demo-compiler-plugin"
|
|||
|
||||
version := "0.1"
|
||||
|
||||
scalaVersion := "2.12.17"
|
||||
libraryDependencies += "org.scala-lang" % "scala-compiler" % scalaVersion.value % "provided"
|
||||
|
||||
exportJars := true
|
||||
|
|
@ -6,15 +6,17 @@ lazy val root = (project in file(".")).
|
|||
settings(
|
||||
resolvers ++= Seq(local, Resolver.sonatypeRepo("releases"), Resolver.sonatypeRepo("snapshots")),
|
||||
InputKey[Unit]("checkPom") := {
|
||||
val converter = fileConverter.value
|
||||
val result = spaceDelimited("<args>").parsed
|
||||
checkPomRepositories(makePom.value, result, streams.value)
|
||||
val pomFile = converter.toPath(makePom.value)
|
||||
checkPomRepositories(pomFile.toFile, result, streams.value)
|
||||
},
|
||||
makePomConfiguration := {
|
||||
val conf = makePomConfiguration.value
|
||||
conf
|
||||
.withFilterRepositories(pomIncludeRepository(baseDirectory.value, conf.filterRepositories))
|
||||
},
|
||||
ivyPaths := baseDirectory( dir => IvyPaths(dir, Some(dir / "ivy-home"))).value
|
||||
ivyPaths := baseDirectory(dir => IvyPaths(dir.toString, Some((dir / "ivy-home").toString))).value
|
||||
)
|
||||
|
||||
val local = "local-maven-repo" at "file://" + (Path.userHome / ".m2" /"repository").absolutePath
|
||||
|
|
|
|||
|
|
@ -1,10 +0,0 @@
|
|||
# write the default pom. The only repositories should be Scala Tools Releases and Snapshots
|
||||
> checkPom https://scala-ci.typesafe.com/artifactory/scala-integration/ https://scala-ci.typesafe.com/artifactory/scala-pr-validation-snapshots/ https://oss.sonatype.org/service/local/repositories/releases/content/ https://oss.sonatype.org/content/repositories/snapshots/
|
||||
|
||||
# include file:// repositories. The generated repositories section should include the local Maven repository as well
|
||||
$ touch repo.all
|
||||
> checkPom https://scala-ci.typesafe.com/artifactory/scala-integration/ https://scala-ci.typesafe.com/artifactory/scala-pr-validation-snapshots/ https://oss.sonatype.org/service/local/repositories/releases/content/ https://oss.sonatype.org/content/repositories/snapshots/ file://*.m2/repository/
|
||||
|
||||
$ delete repo.all
|
||||
$ touch repo.none
|
||||
> checkPom
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
# write the default pom. The only repositories should be Scala Tools Releases and Snapshots
|
||||
> checkPom https://oss.sonatype.org/service/local/repositories/releases/content/ https://oss.sonatype.org/content/repositories/snapshots/
|
||||
|
||||
# include file:// repositories. The generated repositories section should include the local Maven repository as well
|
||||
$ touch repo.all
|
||||
> checkPom https://oss.sonatype.org/service/local/repositories/releases/content/ https://oss.sonatype.org/content/repositories/snapshots/ file://*.m2/repository/
|
||||
|
||||
$ delete repo.all
|
||||
$ touch repo.none
|
||||
> checkPom
|
||||
|
|
@ -1,23 +1,18 @@
|
|||
|
||||
libraryDependencies += "org.json4s" %% "json4s-native" % "[3.3.0,3.5.0)"
|
||||
|
||||
scalaVersion := "2.12.17"
|
||||
|
||||
lazy val actualVersionCheck = taskKey[Unit]("")
|
||||
|
||||
actualVersionCheck := {
|
||||
|
||||
val log = streams.value.log
|
||||
|
||||
val configReport = update.value
|
||||
.configuration(Compile)
|
||||
.getOrElse {
|
||||
sys.error("compile configuration not found in update report")
|
||||
}
|
||||
|
||||
val modules = configReport
|
||||
.modules
|
||||
.map(_.module)
|
||||
|
||||
assert(modules.nonEmpty)
|
||||
assert(modules.exists(_.name.startsWith("json4s-native")))
|
||||
|
||||
|
|
@ -25,7 +20,6 @@ actualVersionCheck := {
|
|||
val v = m.revision
|
||||
v.contains("[") || v.contains("]") || v.contains("(") || v.contains(")")
|
||||
}
|
||||
|
||||
if (wrongModules.nonEmpty) {
|
||||
log.error("Found unexpected intervals in revisions")
|
||||
for (m <- wrongModules)
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
val parser = token(Space ~> ( ("exists" ^^^ true) | ("absent" ^^^ false) ) )
|
||||
InputKey[Unit]("checkOutput") := {
|
||||
val shouldExist = parser.parsed
|
||||
val _ = (Compile / products).value
|
||||
val dir = (Compile / classDirectory).value
|
||||
if((dir / "Anon.class").exists != shouldExist)
|
||||
sys.error("Top level class incorrect" )
|
||||
|
|
|
|||
|
|
@ -1,6 +1,4 @@
|
|||
$ copy-file changes/Anon.java src/main/java/Anon.java
|
||||
> compile
|
||||
> checkOutput exists
|
||||
$ delete src/main/java/Anon.java
|
||||
> compile
|
||||
> checkOutput absent
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
val test123 = project in file(".") enablePlugins TestP settings(
|
||||
Compile / resourceGenerators += Def.task {
|
||||
streams.value.log info "resource generated in settings"
|
||||
Nil
|
||||
streams.value.log.info("resource generated in settings")
|
||||
Seq.empty[File]
|
||||
}
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -3,8 +3,8 @@ import sbt._, Keys._
|
|||
object TestP extends AutoPlugin {
|
||||
override def projectSettings: Seq[Setting[_]] = Seq(
|
||||
Compile / resourceGenerators += Def.task {
|
||||
streams.value.log info "resource generated in plugin"
|
||||
Nil
|
||||
streams.value.log.info("resource generated in plugin")
|
||||
Seq.empty[File]
|
||||
}
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,6 @@
|
|||
sbtPlugin := true
|
||||
|
||||
name := "demo-plugin"
|
||||
|
||||
// TODO fix doc task
|
||||
Compile / doc / sources := Seq.empty
|
||||
|
|
|
|||
|
|
@ -24,22 +24,19 @@ def checkExtra =
|
|||
s
|
||||
}
|
||||
|
||||
def addExtra1(s: State, extra: Seq[File]): State =
|
||||
{
|
||||
val cs = s.configuration.provider.components()
|
||||
val copied = cs.addToComponent("extra", extra.toArray)
|
||||
if(copied) s.reload else s
|
||||
}
|
||||
def addExtra1(s: State, extra: Seq[File]): State = {
|
||||
val cs = s.configuration.provider.components()
|
||||
val copied = cs.addToComponent("extra", extra.toArray)
|
||||
if(copied) s.reload else s
|
||||
}
|
||||
|
||||
def addExtra2(s: State, extra: Seq[File]): State = {
|
||||
val reload = State.defaultReload(s)
|
||||
val currentID = reload.app
|
||||
val currentExtra = currentID.classpathExtra
|
||||
val newExtra = (currentExtra ++ extra).distinct
|
||||
if(newExtra.length == currentExtra.length)
|
||||
s
|
||||
else
|
||||
{
|
||||
if(newExtra.length == currentExtra.length) s
|
||||
else {
|
||||
val newID = ApplicationID(currentID).copy(extra = extra)
|
||||
s.setNext(new State.Return(reload.copy(app = newID)))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
object CheckLoader {
|
||||
def main(args: Array[String]): Unit = apply()
|
||||
def apply(): Unit = {
|
||||
val loader = getClass.getClassLoader
|
||||
val appLoader = ClassLoader.getSystemClassLoader
|
||||
assert(loader eq appLoader, "Application classes not loaded in the system class loader")
|
||||
}
|
||||
def main(args: Array[String]): Unit = apply()
|
||||
def apply(): Unit = {
|
||||
val loader = getClass.getClassLoader
|
||||
val appLoader = ClassLoader.getSystemClassLoader
|
||||
assert(loader eq appLoader, "Application classes not loaded in the system class loader")
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
import org.scalacheck._
|
||||
|
||||
object TestForked extends Properties("Forked loader") {
|
||||
property("Loaded from application loader") = Prop.secure {
|
||||
CheckLoader()
|
||||
true
|
||||
}
|
||||
}
|
||||
property("Loaded from application loader") = Prop.secure {
|
||||
CheckLoader()
|
||||
true
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
ThisBuild / scalaVersion := "2.12.17"
|
||||
|
||||
lazy val main = project.settings(
|
||||
organization := "org.scala-sbt.testsuite.example",
|
||||
name := "has-main",
|
||||
|
|
|
|||
|
|
@ -1 +0,0 @@
|
|||
> run
|
||||
|
|
@ -0,0 +1 @@
|
|||
> run
|
||||
|
|
@ -1,7 +1,9 @@
|
|||
TaskKey[Unit]("outputEmpty") := ((Configurations.Compile / classDirectory) map { outputDirectory =>
|
||||
def classes = (outputDirectory ** "*.class").get()
|
||||
if (!classes.isEmpty) sys.error("Classes existed:\n\t" + classes.mkString("\n\t")) else ()
|
||||
}).value
|
||||
TaskKey[Unit]("outputEmpty") := {
|
||||
val c = fileConverter.value
|
||||
val dir = c.toPath((Compile / backendOutput).value).toFile()
|
||||
def classes = dir.**("*.class").get()
|
||||
if (!classes.isEmpty) sys.error("Classes existed:\n\t" + classes.mkString("\n\t"))
|
||||
}
|
||||
|
||||
// apparently Travis CI stopped allowing long file names
|
||||
// it fails with the default setting of 255 characters so
|
||||
|
|
|
|||
|
|
@ -11,19 +11,20 @@ lazy val root = (project in file("."))
|
|||
libraryDependencies += {
|
||||
"org.scala-lang" % "scala-compiler" % scalaVersion.value % OtherScala.name
|
||||
},
|
||||
OtherScala / managedClasspath := Classpaths.managedJars(OtherScala, classpathTypes.value, update.value),
|
||||
OtherScala / managedClasspath :=
|
||||
Classpaths.managedJars(OtherScala, classpathTypes.value, update.value, fileConverter.value),
|
||||
|
||||
// Hack in the scala instance
|
||||
scalaInstance := {
|
||||
val rawJars = (OtherScala / managedClasspath).value.map(_.data)
|
||||
val converter = fileConverter.value
|
||||
val rawJars = (OtherScala / managedClasspath).value.map(c => converter.toPath(c.data).toFile)
|
||||
val scalaHome = (target.value / "scala-home")
|
||||
def removeVersion(name: String): String =
|
||||
name.replaceAll("\\-2.12.11", "")
|
||||
val sv = scalaVersion.value
|
||||
def removeVersion(name: String): String = name.replaceAll(s"\\-$sv", "")
|
||||
for(jar <- rawJars) {
|
||||
val tjar = scalaHome / s"lib/${removeVersion(jar.getName)}"
|
||||
IO.copyFile(jar, tjar)
|
||||
IO.copyFile(jar, scalaHome / s"lib" / removeVersion(jar.getName))
|
||||
}
|
||||
IO.listFiles(scalaHome).foreach(f => System.err.println(s" * $f}"))
|
||||
IO.listFiles(scalaHome / "lib").foreach(f => System.err.println(s" * $f"))
|
||||
ScalaInstance(scalaHome, appConfiguration.value.provider.scalaProvider.launcher)
|
||||
},
|
||||
|
||||
|
|
|
|||
|
|
@ -1 +0,0 @@
|
|||
> test
|
||||
|
|
@ -0,0 +1 @@
|
|||
> test
|
||||
|
|
@ -10,20 +10,14 @@ val commonSettings = Seq(
|
|||
|
||||
lazy val root = (project in file("."))
|
||||
.aggregate(sub1, sub2)
|
||||
.settings(
|
||||
commonSettings
|
||||
)
|
||||
.settings(commonSettings)
|
||||
|
||||
lazy val rootRef = LocalProject("root")
|
||||
|
||||
lazy val sub1 = project
|
||||
.dependsOn(rootRef)
|
||||
.settings(
|
||||
commonSettings
|
||||
)
|
||||
.settings(commonSettings)
|
||||
|
||||
lazy val sub2 = project
|
||||
.dependsOn(rootRef)
|
||||
.settings(
|
||||
commonSettings
|
||||
)
|
||||
.settings(commonSettings)
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
Global / cacheStores := Seq.empty
|
||||
|
||||
val scalatest = "org.scalatest" %% "scalatest" % "3.0.5"
|
||||
ThisBuild / scalaVersion := "2.12.12"
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ $ copy-file changed/A.scala src/main/scala/A.scala
|
|||
> compile
|
||||
$ sleep 2000
|
||||
# Create is run. Delete is not since it doesn't have src/main dependency.
|
||||
> testQuick
|
||||
-> testQuick
|
||||
> testOnly Delete
|
||||
# Previous run of Create failed, re-run.
|
||||
> testQuick Create
|
||||
|
|
@ -254,7 +254,7 @@ class BuildServerTest extends AbstractServerTest {
|
|||
|
||||
test("buildTarget/cleanCache") {
|
||||
def classFile = svr.baseDirectory.toPath.resolve(
|
||||
"target/out/jvm/scala-2.13.8/runandtest/classes/main/Main.class"
|
||||
"target/out/jvm/scala-2.13.8/runandtest/backend/main/Main.class"
|
||||
)
|
||||
val buildTarget = buildTargetUri("runAndTest", "Compile")
|
||||
compile(buildTarget, id = 43)
|
||||
|
|
@ -305,12 +305,12 @@ class BuildServerTest extends AbstractServerTest {
|
|||
test("workspace/reload: send diagnostic and respond with error") {
|
||||
// write an other-build.sbt file that does not compile
|
||||
val otherBuildFile = svr.baseDirectory.toPath.resolve("other-build.sbt")
|
||||
Files.writeString(
|
||||
Files.write(
|
||||
otherBuildFile,
|
||||
"""|val someSettings = Seq(
|
||||
| scalacOptions ++= "-deprecation"
|
||||
|)
|
||||
|""".stripMargin
|
||||
|""".stripMargin.getBytes
|
||||
)
|
||||
// reload
|
||||
reloadWorkspace(id = 52)
|
||||
|
|
@ -331,12 +331,12 @@ class BuildServerTest extends AbstractServerTest {
|
|||
}
|
||||
)
|
||||
// fix the other-build.sbt file and reload again
|
||||
Files.writeString(
|
||||
Files.write(
|
||||
otherBuildFile,
|
||||
"""|val someSettings = Seq(
|
||||
| scalacOptions += "-deprecation"
|
||||
|)
|
||||
|""".stripMargin
|
||||
|""".stripMargin.getBytes
|
||||
)
|
||||
reloadWorkspace(id = 52)
|
||||
// assert received an empty diagnostic
|
||||
|
|
|
|||
|
|
@ -10,10 +10,10 @@ import scala.reflect.ClassTag
|
|||
import scala.util.control.NonFatal
|
||||
import sbt.io.IO
|
||||
import sbt.io.syntax.*
|
||||
import xsbti.{ HashedVirtualFileRef, PathBasedFile, VirtualFile }
|
||||
import xsbti.{ FileConverter, HashedVirtualFileRef, PathBasedFile, VirtualFile }
|
||||
|
||||
/**
|
||||
* An abstration of a remote or local cache store.
|
||||
* An abstraction of a remote or local cache store.
|
||||
*/
|
||||
trait ActionCacheStore:
|
||||
/**
|
||||
|
|
@ -129,7 +129,7 @@ class InMemoryActionCacheStore extends ActionCacheStore:
|
|||
underlying.toString()
|
||||
end InMemoryActionCacheStore
|
||||
|
||||
class DiskActionCacheStore(base: Path) extends ActionCacheStore:
|
||||
class DiskActionCacheStore(base: Path, fileConverter: FileConverter) extends ActionCacheStore:
|
||||
lazy val casBase: Path = {
|
||||
val dir = base.resolve("cas")
|
||||
IO.createDirectory(dir.toFile)
|
||||
|
|
@ -181,13 +181,10 @@ class DiskActionCacheStore(base: Path) extends ActionCacheStore:
|
|||
else None
|
||||
|
||||
override def syncBlobs(refs: Seq[HashedVirtualFileRef], outputDirectory: Path): Seq[Path] =
|
||||
refs.flatMap: r =>
|
||||
val casFile = casBase.toFile / Digest(r.contentHashStr).toString
|
||||
refs.flatMap: ref =>
|
||||
val casFile = casBase.toFile / Digest(ref.contentHashStr).toString
|
||||
if casFile.exists then
|
||||
val shortPath =
|
||||
if r.id.startsWith("${OUT}/") then r.id.drop(7)
|
||||
else r.id
|
||||
val outPath = outputDirectory.resolve(shortPath)
|
||||
val outPath = fileConverter.toPath(ref)
|
||||
Files.createDirectories(outPath.getParent())
|
||||
if outPath.toFile().exists() then IO.delete(outPath.toFile())
|
||||
Some(Files.createSymbolicLink(outPath, casFile.toPath))
|
||||
|
|
|
|||
|
|
@ -4,7 +4,13 @@ import sbt.internal.util.StringVirtualFile1
|
|||
import sbt.io.IO
|
||||
import sbt.io.syntax.*
|
||||
import verify.BasicTestSuite
|
||||
import xsbti.FileConverter
|
||||
import xsbti.VirtualFile
|
||||
import xsbti.VirtualFileRef
|
||||
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.nio.file.Paths
|
||||
|
||||
object ActionCacheTest extends BasicTestSuite:
|
||||
val tags = CacheLevelTag.all.toList
|
||||
|
|
@ -13,10 +19,10 @@ object ActionCacheTest extends BasicTestSuite:
|
|||
withDiskCache(testHoldBlob)
|
||||
|
||||
def testHoldBlob(cache: ActionCacheStore): Unit =
|
||||
val in = StringVirtualFile1("a.txt", "foo")
|
||||
val hashRefs = cache.putBlobs(in :: Nil)
|
||||
assert(hashRefs.size == 1)
|
||||
IO.withTemporaryDirectory: tempDir =>
|
||||
val in = StringVirtualFile1(s"$tempDir/a.txt", "foo")
|
||||
val hashRefs = cache.putBlobs(in :: Nil)
|
||||
assert(hashRefs.size == 1)
|
||||
val actual = cache.syncBlobs(hashRefs, tempDir.toPath()).head
|
||||
assert(actual.getFileName().toString() == "a.txt")
|
||||
|
||||
|
|
@ -48,14 +54,14 @@ object ActionCacheTest extends BasicTestSuite:
|
|||
withDiskCache(testActionCacheWithBlob)
|
||||
|
||||
def testActionCacheWithBlob(cache: ActionCacheStore): Unit =
|
||||
import sjsonnew.BasicJsonProtocol.*
|
||||
var called = 0
|
||||
val action: ((Int, Int)) => (Int, Seq[VirtualFile]) = { case (a, b) =>
|
||||
called += 1
|
||||
val out = StringVirtualFile1("a.txt", (a + b).toString)
|
||||
(a + b, Seq(out))
|
||||
}
|
||||
IO.withTemporaryDirectory: (tempDir) =>
|
||||
import sjsonnew.BasicJsonProtocol.*
|
||||
var called = 0
|
||||
val action: ((Int, Int)) => (Int, Seq[VirtualFile]) = { case (a, b) =>
|
||||
called += 1
|
||||
val out = StringVirtualFile1(s"$tempDir/a.txt", (a + b).toString)
|
||||
(a + b, Seq(out))
|
||||
}
|
||||
val config = BuildWideCacheConfiguration(cache, tempDir.toPath())
|
||||
val v1 =
|
||||
ActionCache.cache[(Int, Int), Int]((1, 1), Digest.zero, Digest.zero, tags)(action)(config)
|
||||
|
|
@ -81,9 +87,15 @@ object ActionCacheTest extends BasicTestSuite:
|
|||
IO.withTemporaryDirectory(
|
||||
{ tempDir0 =>
|
||||
val tempDir = tempDir0.toPath
|
||||
val cache = DiskActionCacheStore(tempDir)
|
||||
val cache = DiskActionCacheStore(tempDir, fileConverter)
|
||||
f(cache)
|
||||
},
|
||||
keepDirectory = false
|
||||
)
|
||||
|
||||
def fileConverter = new FileConverter:
|
||||
override def toPath(ref: VirtualFileRef): Path = Paths.get(ref.id)
|
||||
override def toVirtualFile(path: Path): VirtualFile =
|
||||
val content = if Files.isRegularFile(path) then new String(Files.readAllBytes(path)) else ""
|
||||
StringVirtualFile1(path.toString, content)
|
||||
end ActionCacheTest
|
||||
|
|
|
|||
|
|
@ -15,11 +15,13 @@ import sjsonnew.*
|
|||
enum KeyTag[A]:
|
||||
case Setting[A](typeArg: Class[?]) extends KeyTag[A]
|
||||
case Task[A](typeArg: Class[?]) extends KeyTag[A]
|
||||
case SeqTask[A](typeArg: Class[?]) extends KeyTag[A]
|
||||
case InputTask[A](typeArg: Class[?]) extends KeyTag[A]
|
||||
|
||||
override def toString: String = this match
|
||||
case Setting(typeArg) => typeArg.toString
|
||||
case Task(typeArg) => s"Task[$typeArg]"
|
||||
case SeqTask(typeArg) => s"Task[Seq[$typeArg]]"
|
||||
case InputTask(typeArg) => s"InputTask[$typeArg]"
|
||||
|
||||
def typeArg: Class[?]
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ abstract class EvaluateSettings[ScopeType]:
|
|||
case k: Keyed[s, A] => single(getStatic(k.scopedKey), k.transform)
|
||||
case u: Uniform[s, A] => UniformNode(u.inputs.map(transform[s]), u.f)
|
||||
case a: Apply[k, A] =>
|
||||
MixedNode[k, A](TupleMapExtension.transform(a.inputs) { transform }, a.f)
|
||||
MixedNode[k, A](TupleMapExtension.transform(a.inputs)(transform), a.f)
|
||||
case b: Bind[s, A] => BindNode[s, A](transform(b.in), x => transform(b.f(x)))
|
||||
case v: Value[A] => constant(v.value)
|
||||
case v: ValidationCapture[a] => strictConstant(v.key: A)
|
||||
|
|
@ -89,8 +89,7 @@ abstract class EvaluateSettings[ScopeType]:
|
|||
|
||||
private[this] def submit(work: => Unit): Unit =
|
||||
startWork()
|
||||
// new Runnable { def run = if (!cancel.get()) run0(work) }
|
||||
executor.execute(() => if !cancel.get() then run0(work) else ())
|
||||
executor.execute(() => if !cancel.get() then run0(work))
|
||||
|
||||
private[this] def run0(work: => Unit): Unit =
|
||||
try {
|
||||
|
|
@ -102,7 +101,6 @@ abstract class EvaluateSettings[ScopeType]:
|
|||
|
||||
private[this] def workComplete(): Unit =
|
||||
if running.decrementAndGet() == 0 then complete.put(None)
|
||||
else ()
|
||||
|
||||
private[this] sealed abstract class INode[A1]:
|
||||
private[this] var state: EvaluationState = New
|
||||
|
|
@ -116,9 +114,12 @@ abstract class EvaluateSettings[ScopeType]:
|
|||
keyString
|
||||
|
||||
private[this] def keyString =
|
||||
(static.toSeq.flatMap { case (key, value) =>
|
||||
if (value eq this) init.showFullKey.show(key) :: Nil else List.empty[String]
|
||||
}).headOption getOrElse "non-static"
|
||||
static.toSeq
|
||||
.flatMap { case (key, value) =>
|
||||
if (value eq this) init.showFullKey.show(key) :: Nil else Nil
|
||||
}
|
||||
.headOption
|
||||
.getOrElse("non-static")
|
||||
|
||||
final def get: A1 = synchronized {
|
||||
assert(value != null, toString + " not evaluated")
|
||||
|
|
|
|||
|
|
@ -23,6 +23,8 @@ object Applicative:
|
|||
val F1 = summon[Applicative[F1]]
|
||||
val F2 = summon[Applicative[F2]]
|
||||
override def pure[A1](x: () => A1): F1[F2[A1]] = F1.pure(() => F2.pure(x))
|
||||
override def map[A1, A2](fa: F1[F2[A1]])(f: A1 => A2): F1[F2[A2]] =
|
||||
F1.map(fa)(f2 => F2.map(f2)(f))
|
||||
override def ap[A1, A2](f1f2f: F1[F2[A1 => A2]])(f1f2a: F1[F2[A1]]): F1[F2[A2]] =
|
||||
F1.ap(F1.map(f1f2f) { (f2f: F2[A1 => A2]) => (f2a: F2[A1]) => F2.ap(f2f)(f2a) })(f1f2a)
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue