From 1b8f0ed20f1d322162b3a0f616ab9d7d45cf013a Mon Sep 17 00:00:00 2001 From: Ethan Atkins Date: Mon, 3 Jun 2019 15:07:51 -0700 Subject: [PATCH] Don't use filtered classpath The classpath filter for test and run was added in #661 to ensure that the classloaders were correctly isolated. That is no longer necessary with the new layering strategies that are more precise about which jars are exposed at each level. Using the filtered classloader was causing the reflection used by spark to fail when using java 11. --- main/src/main/scala/sbt/internal/ClassLoaders.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/main/src/main/scala/sbt/internal/ClassLoaders.scala b/main/src/main/scala/sbt/internal/ClassLoaders.scala index 16c46e5a9..f4f57275c 100644 --- a/main/src/main/scala/sbt/internal/ClassLoaders.scala +++ b/main/src/main/scala/sbt/internal/ClassLoaders.scala @@ -126,7 +126,7 @@ private[sbt] object ClassLoaders { scope: Scope ): ClassLoader = { val cpFiles = fullCP.map(_._1) - val raw = strategy match { + strategy match { case Flat => flatLoader(cpFiles, interfaceLoader) case _ => val layerDependencies = strategy match { @@ -169,7 +169,6 @@ private[sbt] object ClassLoaders { val dynamicClasspath = cpFiles.filterNot(f => filteredSet(f) || scalaJarNames(f.getName)) new LayeredClassLoader(dynamicClasspath, dependencyLayer, resources, tmp) } - ClasspathUtilities.filterByClasspath(cpFiles, raw) } private def dependencyJars(