Adjust to upstream changes

This commit is contained in:
Eugene Yokota 2017-07-16 00:51:49 -04:00
parent 347914191a
commit f0ac6ae11c
15 changed files with 83 additions and 56 deletions

View File

@ -120,7 +120,6 @@ object Package {
def sourcesDebugString(sources: Seq[(File, String)]): String =
"Input file mappings:\n\t" + (sources map { case (f, s) => s + "\n\t " + f } mkString ("\n\t"))
implicit def manifestEquiv: Equiv[Manifest] = defaultEquiv
implicit def manifestFormat: JsonFormat[Manifest] = projectFormat[Manifest, Array[Byte]](
m => {
val bos = new java.io.ByteArrayOutputStream()
@ -129,6 +128,4 @@ object Package {
},
bs => new Manifest(new java.io.ByteArrayInputStream(bs))
)
implicit def stringMapEquiv: Equiv[Map[File, String]] = defaultEquiv
}

View File

@ -52,9 +52,6 @@ object RawCompileLike {
val inputs
: Inputs = hash(sources.toSet ++ optionFiles(options, fileInputOpts)) :+: lastModified(
classpath.toSet) :+: classpath :+: outputDirectory :+: options :+: maxErrors :+: HNil
implicit val stringEquiv: Equiv[String] = defaultEquiv
implicit val fileEquiv: Equiv[File] = defaultEquiv
implicit val intEquiv: Equiv[Int] = defaultEquiv
val cachedComp = inputChanged(cacheStoreFactory make "inputs") { (inChanged, in: Inputs) =>
inputChanged(cacheStoreFactory make "output") {
(outChanged, outputs: FilesInfo[PlainFileInfo]) =>

View File

@ -4,6 +4,7 @@ import org.scalacheck._
import org.scalacheck.Arbitrary._
import Prop._
import sbt.librarymanagement._
import sjsonnew.shaded.scalajson.ast.unsafe.JValue
class CacheIvyTest extends Properties("CacheIvy") {
import sbt.util.{ CacheStore, SingletonCache }
@ -12,8 +13,6 @@ class CacheIvyTest extends Properties("CacheIvy") {
import sjsonnew._
import sjsonnew.support.scalajson.unsafe.Converter
import scalajson.ast.unsafe.JValue
private class InMemoryStore(converter: SupportConverter[JValue]) extends CacheStore {
private var content: JValue = _
override def delete(): Unit = ()

View File

@ -72,9 +72,10 @@ import scala.util.control.NonFatal
import scala.xml.NodeSeq
import Scope.{ fillTaskAxis, GlobalScope, ThisScope }
import sjsonnew.{ IsoLList, JsonFormat, LList, LNil }, LList.:*:
import sjsonnew.shaded.scalajson.ast.unsafe.JValue
import std.TaskExtra._
import testing.{ Framework, Runner, AnnotatedFingerprint, SubclassFingerprint }
import xsbti.compile.{ IncToolOptionsUtil, AnalysisContents }
import xsbti.compile.{ IncToolOptionsUtil, AnalysisContents, IncOptions }
import xsbti.CrossValue
// incremental compiler
@ -376,8 +377,8 @@ object Defaults extends BuildCommon {
def compileBase = inTask(console)(compilersSetting :: Nil) ++ compileBaseGlobal ++ Seq(
incOptions := incOptions.value
.withClassfileManagerType(
Option(new TransactionalManagerType(crossTarget.value / "classes.bak",
sbt.util.Logger.Null): ClassFileManagerType).toOptional
Option(TransactionalManagerType
.of(crossTarget.value / "classes.bak", sbt.util.Logger.Null): ClassFileManagerType).toOptional
),
scalaInstance := scalaInstanceTask.value,
crossVersion := (if (crossPaths.value) CrossVersion.binary else Disabled()),
@ -413,7 +414,7 @@ object Defaults extends BuildCommon {
// must be a val: duplication detected by object identity
private[this] lazy val compileBaseGlobal: Seq[Setting[_]] = globalDefaults(
Seq(
incOptions := IncOptionsUtil.defaultIncOptions,
incOptions := IncOptions.of(),
classpathOptions :== ClasspathOptionsUtil.boot,
classpathOptions in console :== ClasspathOptionsUtil.repl,
compileOrder :== CompileOrder.Mixed,
@ -470,7 +471,8 @@ object Defaults extends BuildCommon {
globalLock = launcher.globalLock,
componentProvider = app.provider.components,
secondaryCacheDir = Option(zincDir),
dependencyResolution = dependencyResolution.value,
// Todo: Fix typo
depencencyResolution = dependencyResolution.value,
compilerBridgeSource = scalaCompilerBridgeSource.value,
scalaJarsTarget = zincDir,
log = streams.value.log
@ -1417,7 +1419,7 @@ object Defaults extends BuildCommon {
override def definesClass(classpathEntry: File): DefinesClass =
cachedPerEntryDefinesClassLookup(classpathEntry)
}
new Setup(
Setup.of(
lookup,
(skip in compile).value,
// TODO - this is kind of a bad way to grab the cache directory for streams...
@ -1432,7 +1434,7 @@ object Defaults extends BuildCommon {
}
def compileInputsSettings: Seq[Setting[_]] = {
Seq(
compileOptions := new CompileOptions(
compileOptions := CompileOptions.of(
(classDirectory.value +: data(dependencyClasspath.value)).toArray,
sources.value.toArray,
classDirectory.value,
@ -1449,7 +1451,7 @@ object Defaults extends BuildCommon {
foldMappers(sourcePositionMappers.value)
)
},
compileInputs := new Inputs(
compileInputs := Inputs.of(
compilers.value,
compileOptions.value,
compileIncSetup.value,
@ -1477,8 +1479,8 @@ object Defaults extends BuildCommon {
case Some(contents) =>
val analysis = Option(contents.getAnalysis).toOptional
val setup = Option(contents.getMiniSetup).toOptional
new PreviousResult(analysis, setup)
case None => new PreviousResult(jnone[CompileAnalysis], jnone[MiniSetup])
PreviousResult.of(analysis, setup)
case None => PreviousResult.of(jnone[CompileAnalysis], jnone[MiniSetup])
}
}
)
@ -1709,7 +1711,7 @@ object Classpaths {
val config = makePomConfiguration.value
val publisher = Keys.publisher.value
publisher.makePomFile(ivyModule.value, config, streams.value.log)
config.file
config.file.get
},
packagedArtifact in makePom := ((artifact in makePom).value -> makePom.value),
deliver := deliverTask(publishConfiguration).value,
@ -1887,13 +1889,13 @@ object Classpaths {
else Nil
},
moduleSettings := moduleSettings0.value,
makePomConfiguration := new MakePomConfiguration(artifactPath in makePom value,
projectInfo.value,
None,
pomExtra.value,
pomPostProcess.value,
pomIncludeRepository.value,
pomAllRepositories.value),
makePomConfiguration := MakePomConfiguration()
.withFile((artifactPath in makePom).value)
.withModuleInfo(projectInfo.value)
.withExtra(pomExtra.value)
.withProcess(pomPostProcess.value)
.withFilterRepositories(pomIncludeRepository.value)
.withAllRepositories(pomAllRepositories.value),
publishConfiguration := {
publishConfig(
publishMavenStyle.value,
@ -2136,22 +2138,24 @@ object Classpaths {
val depDir = dependencyCacheDirectory.value
val ivy = scalaModuleInfo.value
val st = state.value
withExcludes(out, mod.classifiers, lock(app)) { excludes =>
// val noExplicitCheck = ivy.map(_.withCheckExplicit(false))
lm.transitiveScratch(
"sbt",
GetClassifiersConfiguration(
mod,
excludes.toVector,
c.withArtifactFilter(c.artifactFilter.map(af => af.withInverted(!af.inverted))),
srcTypes,
docTypes),
uwConfig,
log
) match {
case Left(uw) => ???
case Right(ur) => ur
}
withExcludes(out, mod.classifiers, lock(app)) {
excludes =>
// val noExplicitCheck = ivy.map(_.withCheckExplicit(false))
LibraryManagement.transitiveScratch(
lm,
"sbt",
GetClassifiersConfiguration(mod,
excludes.toVector,
c.withArtifactFilter(c.artifactFilter.map(af =>
af.withInverted(!af.inverted))),
srcTypes,
docTypes),
uwConfig,
log
) match {
case Left(uw) => ???
case Right(ur) => ur
}
}
} tag (Tags.Update, Tags.Network)).value
)) ++ Seq(bootIvyConfiguration := (ivyConfiguration in updateSbtClassifiers).value)
@ -2197,7 +2201,7 @@ object Classpaths {
f: Map[ModuleID, Vector[ConfigRef]] => UpdateReport): UpdateReport = {
import sbt.librarymanagement.LibraryManagementCodec._
import sbt.util.FileBasedStore
implicit val isoString: sjsonnew.IsoString[scalajson.ast.unsafe.JValue] =
implicit val isoString: sjsonnew.IsoString[JValue] =
sjsonnew.IsoString.iso(
sjsonnew.support.scalajson.unsafe.CompactPrinter.apply,
sjsonnew.support.scalajson.unsafe.Parser.parseUnsafe

View File

@ -14,6 +14,7 @@ import sbt.io.syntax._
import sbt.internal.util.{ Attributed, AttributeEntry, AttributeKey, AttributeMap, Settings }
import sbt.internal.util.Attributed.data
import sbt.util.Logger
import sjsonnew.shaded.scalajson.ast.unsafe.JValue
final class BuildStructure(val units: Map[URI, LoadedBuildUnit],
val root: URI,
@ -250,7 +251,7 @@ object BuildStreams {
def mkStreams(units: Map[URI, LoadedBuildUnit],
root: URI,
data: Settings[Scope]): State => Streams = s => {
implicit val isoString: sjsonnew.IsoString[scalajson.ast.unsafe.JValue] =
implicit val isoString: sjsonnew.IsoString[JValue] =
sjsonnew.IsoString.iso(sjsonnew.support.scalajson.unsafe.CompactPrinter.apply,
sjsonnew.support.scalajson.unsafe.Parser.parseUnsafe)
(s get Keys.stateStreams) getOrElse {

View File

@ -30,7 +30,8 @@ object ConsoleProject {
globalLock = launcher.globalLock,
componentProvider = app.provider.components,
secondaryCacheDir = Option(zincDir),
dependencyResolution = dependencyResolution,
// Todo: Fix typo
depencencyResolution = dependencyResolution,
compilerBridgeSource = extracted.get(Keys.scalaCompilerBridgeSource),
scalaJarsTarget = zincDir,
log = log

View File

@ -124,4 +124,31 @@ object LibraryManagement {
private[this] def fileUptodate(file: File, stamps: Map[File, Long]): Boolean =
stamps.get(file).forall(_ == file.lastModified)
private[sbt] def transitiveScratch(
lm: DependencyResolution,
label: String,
config: GetClassifiersConfiguration,
uwconfig: UnresolvedWarningConfiguration,
log: Logger
): Either[UnresolvedWarning, UpdateReport] = {
import config.{ updateConfiguration => c, module => mod }
import mod.{ id, dependencies => deps, scalaModuleInfo }
val base = restrictedCopy(id, true).withName(id.name + "$" + label)
val module = lm.moduleDescriptor(base, deps, scalaModuleInfo)
val report = lm.update(module, c, uwconfig, log) match {
case Right(r) => r
case Left(w) =>
throw w.resolveException
}
val newConfig = config
.withModule(mod.withDependencies(report.allModules))
lm.updateClassifiers(newConfig, uwconfig, Vector(), log)
}
private[sbt] def restrictedCopy(m: ModuleID, confs: Boolean) =
ModuleID(m.organization, m.name, m.revision)
.withCrossVersion(m.crossVersion)
.withExtraAttributes(m.extraAttributes)
.withConfigurations(if (confs) m.configurations else None)
.branch(m.branchName)
}

View File

@ -94,7 +94,8 @@ private[sbt] object Load {
globalLock = launcher.globalLock,
componentProvider = app.provider.components,
secondaryCacheDir = Option(zincDir),
dependencyResolution = dependencyResolution,
// Todo: Fix typo
depencencyResolution = dependencyResolution,
compilerBridgeSource = ZincUtil.getDefaultBridgeModule(scalaProvider.version),
scalaJarsTarget = zincDir,
log = log

View File

@ -10,7 +10,7 @@ import sbt.util.Level
import sbt.internal.util._
import sbt.protocol.LogEvent
import sbt.internal.util.codec._
import scalajson.ast.unsafe._
import sjsonnew.shaded.scalajson.ast.unsafe._
class RelayAppender(name: String)
extends AbstractAppender(name, null, PatternLayout.createDefaultLayout(), true) {

View File

@ -6,7 +6,7 @@ package internal
package server
import java.net.URI
import scalajson.ast.unsafe.JValue
import sjsonnew.shaded.scalajson.ast.unsafe.JValue
import scala.util.{ Left, Right }
import sbt.util.{ SomeJsonWriter, NoJsonWriter }
import sbt.librarymanagement.LibraryManagementCodec._

View File

@ -32,7 +32,7 @@ object ContrabandConfig {
case "sbt.testing.Status" => { _ =>
"sbt.internal.testing.StatusFormats" :: Nil
}
case "scalajson.ast.unsafe.JValue" => { _ =>
case "scalajson.ast.unsafe.JValue" | "sjsonnew.shaded.scalajson.ast.unsafe.JValue" => { _ =>
"sbt.internal.util.codec.JValueFormats" :: Nil
}
}

View File

@ -5,7 +5,7 @@
// DO NOT EDIT MANUALLY
package sbt.protocol
final class SettingQuerySuccess private (
val value: scalajson.ast.unsafe.JValue,
val value: sjsonnew.shaded.scalajson.ast.unsafe.JValue,
val contentType: String) extends sbt.protocol.SettingQueryResponse() with Serializable {
@ -20,10 +20,10 @@ final class SettingQuerySuccess private (
override def toString: String = {
"SettingQuerySuccess(" + value + ", " + contentType + ")"
}
protected[this] def copy(value: scalajson.ast.unsafe.JValue = value, contentType: String = contentType): SettingQuerySuccess = {
protected[this] def copy(value: sjsonnew.shaded.scalajson.ast.unsafe.JValue = value, contentType: String = contentType): SettingQuerySuccess = {
new SettingQuerySuccess(value, contentType)
}
def withValue(value: scalajson.ast.unsafe.JValue): SettingQuerySuccess = {
def withValue(value: sjsonnew.shaded.scalajson.ast.unsafe.JValue): SettingQuerySuccess = {
copy(value = value)
}
def withContentType(contentType: String): SettingQuerySuccess = {
@ -32,5 +32,5 @@ final class SettingQuerySuccess private (
}
object SettingQuerySuccess {
def apply(value: scalajson.ast.unsafe.JValue, contentType: String): SettingQuerySuccess = new SettingQuerySuccess(value, contentType)
def apply(value: sjsonnew.shaded.scalajson.ast.unsafe.JValue, contentType: String): SettingQuerySuccess = new SettingQuerySuccess(value, contentType)
}

View File

@ -11,7 +11,7 @@ implicit lazy val SettingQuerySuccessFormat: JsonFormat[sbt.protocol.SettingQuer
jsOpt match {
case Some(js) =>
unbuilder.beginObject(js)
val value = unbuilder.readField[scalajson.ast.unsafe.JValue]("value")
val value = unbuilder.readField[sjsonnew.shaded.scalajson.ast.unsafe.JValue]("value")
val contentType = unbuilder.readField[String]("contentType")
unbuilder.endObject()
sbt.protocol.SettingQuerySuccess(value, contentType)

View File

@ -43,7 +43,7 @@ type ExecStatusEvent implements EventMessage {
interface SettingQueryResponse implements EventMessage {}
type SettingQuerySuccess implements SettingQueryResponse {
value: scalajson.ast.unsafe.JValue!
value: sjsonnew.shaded.scalajson.ast.unsafe.JValue!
contentType: String!
}

View File

@ -6,7 +6,7 @@ package protocol
import sjsonnew.JsonFormat
import sjsonnew.support.scalajson.unsafe.{ Parser, Converter, CompactPrinter }
import scalajson.ast.unsafe.{ JValue, JObject, JString }
import sjsonnew.shaded.scalajson.ast.unsafe.{ JValue, JObject, JString }
import java.nio.ByteBuffer
import scala.util.{ Success, Failure }
import sbt.internal.util.StringEvent