mirror of https://github.com/sbt/sbt.git
Don't use filtered classpath
The classpath filter for test and run was added in #661 to ensure that the classloaders were correctly isolated. That is no longer necessary with the new layering strategies that are more precise about which jars are exposed at each level. Using the filtered classloader was causing the reflection used by spark to fail when using java 11.
This commit is contained in:
parent
ce6484f4b4
commit
1b8f0ed20f
|
|
@ -126,7 +126,7 @@ private[sbt] object ClassLoaders {
|
|||
scope: Scope
|
||||
): ClassLoader = {
|
||||
val cpFiles = fullCP.map(_._1)
|
||||
val raw = strategy match {
|
||||
strategy match {
|
||||
case Flat => flatLoader(cpFiles, interfaceLoader)
|
||||
case _ =>
|
||||
val layerDependencies = strategy match {
|
||||
|
|
@ -169,7 +169,6 @@ private[sbt] object ClassLoaders {
|
|||
val dynamicClasspath = cpFiles.filterNot(f => filteredSet(f) || scalaJarNames(f.getName))
|
||||
new LayeredClassLoader(dynamicClasspath, dependencyLayer, resources, tmp)
|
||||
}
|
||||
ClasspathUtilities.filterByClasspath(cpFiles, raw)
|
||||
}
|
||||
|
||||
private def dependencyJars(
|
||||
|
|
|
|||
Loading…
Reference in New Issue