Don't use filtered classpath

The classpath filter for test and run was added in #661 to ensure that
the classloaders were correctly isolated. That is no longer necessary
with the new layering strategies that are more precise about which jars
are exposed at each level. Using the filtered classloader was causing
the reflection used by spark to fail when using java 11.
This commit is contained in:
Ethan Atkins 2019-06-03 15:07:51 -07:00
parent ce6484f4b4
commit 1b8f0ed20f
1 changed files with 1 additions and 2 deletions

View File

@ -126,7 +126,7 @@ private[sbt] object ClassLoaders {
scope: Scope
): ClassLoader = {
val cpFiles = fullCP.map(_._1)
val raw = strategy match {
strategy match {
case Flat => flatLoader(cpFiles, interfaceLoader)
case _ =>
val layerDependencies = strategy match {
@ -169,7 +169,6 @@ private[sbt] object ClassLoaders {
val dynamicClasspath = cpFiles.filterNot(f => filteredSet(f) || scalaJarNames(f.getName))
new LayeredClassLoader(dynamicClasspath, dependencyLayer, resources, tmp)
}
ClasspathUtilities.filterByClasspath(cpFiles, raw)
}
private def dependencyJars(