From 233307b696923f9013c0d94d4eb37582cfa6df67 Mon Sep 17 00:00:00 2001 From: Ethan Atkins Date: Mon, 3 Jun 2019 13:38:40 -0700 Subject: [PATCH] Fix classpath filter in run We were incorrectly building the dependency layer in the run task using the raw jars from dependencyClasspath rather than the actual classpath jars (which may be different if bgCopyClasspath is true -- which it is by default). This was preventing spark from working with AllLibraryJars because it would load its classes and resources from the coursier cache but the classpath filter would reject the resources because they came from the coursier cache instead of the classpath. --- main/src/main/scala/sbt/internal/ClassLoaders.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/main/src/main/scala/sbt/internal/ClassLoaders.scala b/main/src/main/scala/sbt/internal/ClassLoaders.scala index df02a7f07..16c46e5a9 100644 --- a/main/src/main/scala/sbt/internal/ClassLoaders.scala +++ b/main/src/main/scala/sbt/internal/ClassLoaders.scala @@ -79,12 +79,14 @@ private[sbt] object ClassLoaders { val allDeps = dependencyJars(dependencyClasspath).value.filterNot(exclude) val newLoader = (classpath: Seq[File]) => { + val mappings = classpath.map(f => f.getName -> f).toMap + val transformedDependencies = allDeps.map(f => mappings.get(f.getName).getOrElse(f)) buildLayers( strategy = classLoaderLayeringStrategy.value: @sbtUnchecked, si = instance, fullCP = classpath.map(f => f -> IO.getModifiedTimeOrZero(f)), resourceCP = resourceCP, - allDependencies = allDeps, + allDependencies = transformedDependencies, cache = extendedClassLoaderCache.value: @sbtUnchecked, resources = ClasspathUtilities.createClasspathResources(classpath, instance), tmp = taskTemporaryDirectory.value: @sbtUnchecked,