mirror of https://github.com/sbt/sbt.git
Remove source that went into the modules
This commit is contained in:
parent
7132491f6c
commit
4053ea92ba
|
|
@ -1,3 +0,0 @@
|
|||
Simple Build Tool: Cache Component
|
||||
Copyright 2009 Mark Harrah
|
||||
Licensed under BSD-style license (see LICENSE)
|
||||
|
|
@ -1,248 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import sbinary.{ CollectionTypes, DefaultProtocol, Format, Input, JavaFormats, Output => Out }
|
||||
import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, File, InputStream, OutputStream }
|
||||
import java.net.{ URI, URL }
|
||||
import Types.:+:
|
||||
import DefaultProtocol.{ asProduct2, asSingleton, BooleanFormat, ByteFormat, IntFormat, wrap }
|
||||
import scala.xml.NodeSeq
|
||||
|
||||
trait Cache[I, O] {
|
||||
def apply(file: File)(i: I): Either[O, O => Unit]
|
||||
}
|
||||
trait SBinaryFormats extends CollectionTypes with JavaFormats {
|
||||
implicit def urlFormat: Format[URL] = DefaultProtocol.UrlFormat
|
||||
implicit def uriFormat: Format[URI] = DefaultProtocol.UriFormat
|
||||
}
|
||||
object Cache extends CacheImplicits {
|
||||
def cache[I, O](implicit c: Cache[I, O]): Cache[I, O] = c
|
||||
|
||||
def cached[I, O](file: File)(f: I => O)(implicit cache: Cache[I, O]): I => O =
|
||||
in =>
|
||||
cache(file)(in) match {
|
||||
case Left(value) => value
|
||||
case Right(store) =>
|
||||
val out = f(in)
|
||||
store(out)
|
||||
out
|
||||
}
|
||||
|
||||
def debug[I](label: String, c: InputCache[I]): InputCache[I] =
|
||||
new InputCache[I] {
|
||||
type Internal = c.Internal
|
||||
def convert(i: I) = c.convert(i)
|
||||
def read(from: Input) =
|
||||
{
|
||||
val v = c.read(from)
|
||||
println(label + ".read: " + v)
|
||||
v
|
||||
}
|
||||
def write(to: Out, v: Internal): Unit = {
|
||||
println(label + ".write: " + v)
|
||||
c.write(to, v)
|
||||
}
|
||||
def equiv: Equiv[Internal] = new Equiv[Internal] {
|
||||
def equiv(a: Internal, b: Internal) =
|
||||
{
|
||||
val equ = c.equiv.equiv(a, b)
|
||||
println(label + ".equiv(" + a + ", " + b + "): " + equ)
|
||||
equ
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
trait CacheImplicits extends BasicCacheImplicits with SBinaryFormats with HListCacheImplicits with UnionImplicits
|
||||
trait BasicCacheImplicits {
|
||||
implicit def basicCache[I, O](implicit in: InputCache[I], outFormat: Format[O]): Cache[I, O] =
|
||||
new BasicCache()(in, outFormat)
|
||||
def basicInput[I](implicit eq: Equiv[I], fmt: Format[I]): InputCache[I] = InputCache.basicInputCache(fmt, eq)
|
||||
|
||||
def defaultEquiv[T]: Equiv[T] = new Equiv[T] { def equiv(a: T, b: T) = a == b }
|
||||
|
||||
implicit def optInputCache[T](implicit t: InputCache[T]): InputCache[Option[T]] =
|
||||
new InputCache[Option[T]] {
|
||||
type Internal = Option[t.Internal]
|
||||
def convert(v: Option[T]): Internal = v.map(x => t.convert(x))
|
||||
def read(from: Input) =
|
||||
{
|
||||
val isDefined = BooleanFormat.reads(from)
|
||||
if (isDefined) Some(t.read(from)) else None
|
||||
}
|
||||
def write(to: Out, j: Internal): Unit =
|
||||
{
|
||||
BooleanFormat.writes(to, j.isDefined)
|
||||
j foreach { x => t.write(to, x) }
|
||||
}
|
||||
def equiv = optEquiv(t.equiv)
|
||||
}
|
||||
|
||||
def wrapEquiv[S, T](f: S => T)(implicit eqT: Equiv[T]): Equiv[S] =
|
||||
new Equiv[S] {
|
||||
def equiv(a: S, b: S) =
|
||||
eqT.equiv(f(a), f(b))
|
||||
}
|
||||
|
||||
implicit def optEquiv[T](implicit t: Equiv[T]): Equiv[Option[T]] =
|
||||
new Equiv[Option[T]] {
|
||||
def equiv(a: Option[T], b: Option[T]) =
|
||||
(a, b) match {
|
||||
case (None, None) => true
|
||||
case (Some(va), Some(vb)) => t.equiv(va, vb)
|
||||
case _ => false
|
||||
}
|
||||
}
|
||||
implicit def urlEquiv(implicit uriEq: Equiv[URI]): Equiv[URL] = wrapEquiv[URL, URI](_.toURI)(uriEq)
|
||||
implicit def uriEquiv: Equiv[URI] = defaultEquiv
|
||||
implicit def stringSetEquiv: Equiv[Set[String]] = defaultEquiv
|
||||
implicit def stringMapEquiv: Equiv[Map[String, String]] = defaultEquiv
|
||||
|
||||
def streamFormat[T](write: (T, OutputStream) => Unit, f: InputStream => T): Format[T] =
|
||||
{
|
||||
val toBytes = (t: T) => { val bos = new ByteArrayOutputStream; write(t, bos); bos.toByteArray }
|
||||
val fromBytes = (bs: Array[Byte]) => f(new ByteArrayInputStream(bs))
|
||||
wrap(toBytes, fromBytes)(DefaultProtocol.ByteArrayFormat)
|
||||
}
|
||||
|
||||
implicit def xmlInputCache(implicit strEq: InputCache[String]): InputCache[NodeSeq] = wrapIn[NodeSeq, String](_.toString, strEq)
|
||||
|
||||
implicit def seqCache[T](implicit t: InputCache[T]): InputCache[Seq[T]] =
|
||||
new InputCache[Seq[T]] {
|
||||
type Internal = Seq[t.Internal]
|
||||
def convert(v: Seq[T]) = v.map(x => t.convert(x))
|
||||
def read(from: Input) =
|
||||
{
|
||||
val size = IntFormat.reads(from)
|
||||
def next(left: Int, acc: List[t.Internal]): Internal =
|
||||
if (left <= 0) acc.reverse else next(left - 1, t.read(from) :: acc)
|
||||
next(size, Nil)
|
||||
}
|
||||
def write(to: Out, vs: Internal): Unit = {
|
||||
val size = vs.length
|
||||
IntFormat.writes(to, size)
|
||||
for (v <- vs) t.write(to, v)
|
||||
}
|
||||
def equiv: Equiv[Internal] = seqEquiv(t.equiv)
|
||||
}
|
||||
|
||||
implicit def arrEquiv[T](implicit t: Equiv[T]): Equiv[Array[T]] =
|
||||
wrapEquiv((x: Array[T]) => x: Seq[T])(seqEquiv[T](t))
|
||||
|
||||
implicit def seqEquiv[T](implicit t: Equiv[T]): Equiv[Seq[T]] =
|
||||
new Equiv[Seq[T]] {
|
||||
def equiv(a: Seq[T], b: Seq[T]) =
|
||||
a.length == b.length &&
|
||||
((a, b).zipped forall t.equiv)
|
||||
}
|
||||
implicit def seqFormat[T](implicit t: Format[T]): Format[Seq[T]] =
|
||||
wrap[Seq[T], List[T]](_.toList, _.toSeq)(DefaultProtocol.listFormat)
|
||||
|
||||
def wrapIn[I, J](implicit f: I => J, jCache: InputCache[J]): InputCache[I] =
|
||||
new InputCache[I] {
|
||||
type Internal = jCache.Internal
|
||||
def convert(i: I) = jCache.convert(f(i))
|
||||
def read(from: Input) = jCache.read(from)
|
||||
def write(to: Out, j: Internal) = jCache.write(to, j)
|
||||
def equiv = jCache.equiv
|
||||
}
|
||||
|
||||
def singleton[T](t: T): InputCache[T] =
|
||||
basicInput(trueEquiv, asSingleton(t))
|
||||
|
||||
def trueEquiv[T] = new Equiv[T] { def equiv(a: T, b: T) = true }
|
||||
}
|
||||
|
||||
trait HListCacheImplicits {
|
||||
implicit def hConsCache[H, T <: HList](implicit head: InputCache[H], tail: InputCache[T]): InputCache[H :+: T] =
|
||||
new InputCache[H :+: T] {
|
||||
type Internal = (head.Internal, tail.Internal)
|
||||
def convert(in: H :+: T) = (head.convert(in.head), tail.convert(in.tail))
|
||||
def read(from: Input) =
|
||||
{
|
||||
val h = head.read(from)
|
||||
val t = tail.read(from)
|
||||
(h, t)
|
||||
}
|
||||
def write(to: Out, j: Internal): Unit = {
|
||||
head.write(to, j._1)
|
||||
tail.write(to, j._2)
|
||||
}
|
||||
def equiv = new Equiv[Internal] {
|
||||
def equiv(a: Internal, b: Internal) =
|
||||
head.equiv.equiv(a._1, b._1) &&
|
||||
tail.equiv.equiv(a._2, b._2)
|
||||
}
|
||||
}
|
||||
|
||||
implicit def hNilCache: InputCache[HNil] = Cache.singleton(HNil: HNil)
|
||||
|
||||
implicit def hConsFormat[H, T <: HList](implicit head: Format[H], tail: Format[T]): Format[H :+: T] = new Format[H :+: T] {
|
||||
def reads(from: Input) =
|
||||
{
|
||||
val h = head.reads(from)
|
||||
val t = tail.reads(from)
|
||||
HCons(h, t)
|
||||
}
|
||||
def writes(to: Out, hc: H :+: T): Unit = {
|
||||
head.writes(to, hc.head)
|
||||
tail.writes(to, hc.tail)
|
||||
}
|
||||
}
|
||||
|
||||
implicit def hNilFormat: Format[HNil] = asSingleton(HNil)
|
||||
}
|
||||
trait UnionImplicits {
|
||||
def unionInputCache[UB, HL <: HList](implicit uc: UnionCache[HL, UB]): InputCache[UB] =
|
||||
new InputCache[UB] {
|
||||
type Internal = Found[_]
|
||||
def convert(in: UB) = uc.find(in)
|
||||
def read(in: Input) =
|
||||
{
|
||||
val index = ByteFormat.reads(in)
|
||||
val (cache, clazz) = uc.at(index)
|
||||
val value = cache.read(in)
|
||||
new Found[cache.Internal](cache, clazz, value, index)
|
||||
}
|
||||
def write(to: Out, i: Internal): Unit = {
|
||||
def write0[I](f: Found[I]): Unit = {
|
||||
ByteFormat.writes(to, f.index.toByte)
|
||||
f.cache.write(to, f.value)
|
||||
}
|
||||
write0(i)
|
||||
}
|
||||
def equiv: Equiv[Internal] = new Equiv[Internal] {
|
||||
def equiv(a: Internal, b: Internal) =
|
||||
{
|
||||
if (a.clazz == b.clazz)
|
||||
force(a.cache.equiv, a.value, b.value)
|
||||
else
|
||||
false
|
||||
}
|
||||
def force[T <: UB, UB](e: Equiv[T], a: UB, b: UB) = e.equiv(a.asInstanceOf[T], b.asInstanceOf[T])
|
||||
}
|
||||
}
|
||||
|
||||
implicit def unionCons[H <: UB, UB, T <: HList](implicit head: InputCache[H], mf: Manifest[H], t: UnionCache[T, UB]): UnionCache[H :+: T, UB] =
|
||||
new UnionCache[H :+: T, UB] {
|
||||
val size = 1 + t.size
|
||||
def c = mf.runtimeClass
|
||||
def find(value: UB): Found[_] =
|
||||
if (c.isInstance(value)) new Found[head.Internal](head, c, head.convert(value.asInstanceOf[H]), size - 1) else t.find(value)
|
||||
def at(i: Int): (InputCache[_ <: UB], Class[_]) = if (size == i + 1) (head, c) else t.at(i)
|
||||
}
|
||||
|
||||
implicit def unionNil[UB]: UnionCache[HNil, UB] = new UnionCache[HNil, UB] {
|
||||
def size = 0
|
||||
def find(value: UB) = sys.error("No valid sum type for " + value)
|
||||
def at(i: Int) = sys.error("Invalid union index " + i)
|
||||
}
|
||||
|
||||
final class Found[I](val cache: InputCache[_] { type Internal = I }, val clazz: Class[_], val value: I, val index: Int)
|
||||
sealed trait UnionCache[HL <: HList, UB] {
|
||||
def size: Int
|
||||
def at(i: Int): (InputCache[_ <: UB], Class[_])
|
||||
def find(forValue: UB): Found[_]
|
||||
}
|
||||
}
|
||||
|
|
@ -1,44 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.{ File, FileNotFoundException }
|
||||
import sbinary.{ DefaultProtocol, Format, Operations }
|
||||
import scala.reflect.Manifest
|
||||
|
||||
object CacheIO {
|
||||
def toBytes[T](format: Format[T])(value: T)(implicit mf: Manifest[Format[T]]): Array[Byte] =
|
||||
toBytes[T](value)(format, mf)
|
||||
def toBytes[T](value: T)(implicit format: Format[T], mf: Manifest[Format[T]]): Array[Byte] =
|
||||
Operations.toByteArray(value)(stampedFormat(format))
|
||||
def fromBytes[T](format: Format[T], default: => T)(bytes: Array[Byte])(implicit mf: Manifest[Format[T]]): T =
|
||||
fromBytes(default)(bytes)(format, mf)
|
||||
def fromBytes[T](default: => T)(bytes: Array[Byte])(implicit format: Format[T], mf: Manifest[Format[T]]): T =
|
||||
if (bytes.isEmpty) default else Operations.fromByteArray(bytes)(stampedFormat(format))
|
||||
|
||||
def fromFile[T](format: Format[T], default: => T)(file: File)(implicit mf: Manifest[Format[T]]): T =
|
||||
fromFile(file, default)(format, mf)
|
||||
def fromFile[T](file: File, default: => T)(implicit format: Format[T], mf: Manifest[Format[T]]): T =
|
||||
fromFile[T](file) getOrElse default
|
||||
def fromFile[T](file: File)(implicit format: Format[T], mf: Manifest[Format[T]]): Option[T] =
|
||||
try { Some(Operations.fromFile(file)(stampedFormat(format))) }
|
||||
catch { case e: Exception => None }
|
||||
|
||||
def toFile[T](format: Format[T])(value: T)(file: File)(implicit mf: Manifest[Format[T]]): Unit =
|
||||
toFile(value)(file)(format, mf)
|
||||
def toFile[T](value: T)(file: File)(implicit format: Format[T], mf: Manifest[Format[T]]): Unit =
|
||||
{
|
||||
IO.createDirectory(file.getParentFile)
|
||||
Operations.toFile(value)(file)(stampedFormat(format))
|
||||
}
|
||||
def stampedFormat[T](format: Format[T])(implicit mf: Manifest[Format[T]]): Format[T] =
|
||||
{
|
||||
import DefaultProtocol._
|
||||
withStamp(stamp(format))(format)
|
||||
}
|
||||
def stamp[T](format: Format[T])(implicit mf: Manifest[Format[T]]): Int = typeHash(mf)
|
||||
def typeHash[T](implicit mf: Manifest[T]) = mf.toString.hashCode
|
||||
def manifest[T](implicit mf: Manifest[T]): Manifest[T] = mf
|
||||
def objManifest[T](t: T)(implicit mf: Manifest[T]): Manifest[T] = mf
|
||||
}
|
||||
|
|
@ -1,106 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.{ File, IOException }
|
||||
import sbinary.{ DefaultProtocol, Format }
|
||||
import DefaultProtocol._
|
||||
import scala.reflect.Manifest
|
||||
|
||||
sealed trait FileInfo extends NotNull {
|
||||
val file: File
|
||||
}
|
||||
sealed trait HashFileInfo extends FileInfo {
|
||||
val hash: List[Byte]
|
||||
}
|
||||
sealed trait ModifiedFileInfo extends FileInfo {
|
||||
val lastModified: Long
|
||||
}
|
||||
sealed trait PlainFileInfo extends FileInfo {
|
||||
def exists: Boolean
|
||||
}
|
||||
sealed trait HashModifiedFileInfo extends HashFileInfo with ModifiedFileInfo
|
||||
|
||||
private final case class PlainFile(file: File, exists: Boolean) extends PlainFileInfo
|
||||
private final case class FileHash(file: File, hash: List[Byte]) extends HashFileInfo
|
||||
private final case class FileModified(file: File, lastModified: Long) extends ModifiedFileInfo
|
||||
private final case class FileHashModified(file: File, hash: List[Byte], lastModified: Long) extends HashModifiedFileInfo
|
||||
|
||||
object FileInfo {
|
||||
implicit def existsInputCache: InputCache[PlainFileInfo] = exists.infoInputCache
|
||||
implicit def modifiedInputCache: InputCache[ModifiedFileInfo] = lastModified.infoInputCache
|
||||
implicit def hashInputCache: InputCache[HashFileInfo] = hash.infoInputCache
|
||||
implicit def fullInputCache: InputCache[HashModifiedFileInfo] = full.infoInputCache
|
||||
|
||||
sealed trait Style {
|
||||
type F <: FileInfo
|
||||
implicit def apply(file: File): F
|
||||
implicit def unapply(info: F): File = info.file
|
||||
implicit val format: Format[F]
|
||||
import Cache._
|
||||
implicit def fileInfoEquiv: Equiv[F] = defaultEquiv
|
||||
def infoInputCache: InputCache[F] = basicInput
|
||||
implicit def fileInputCache: InputCache[File] = wrapIn[File, F]
|
||||
}
|
||||
object full extends Style {
|
||||
type F = HashModifiedFileInfo
|
||||
implicit def apply(file: File): HashModifiedFileInfo = make(file, Hash(file).toList, file.lastModified)
|
||||
def make(file: File, hash: List[Byte], lastModified: Long): HashModifiedFileInfo = FileHashModified(file.getAbsoluteFile, hash, lastModified)
|
||||
implicit val format: Format[HashModifiedFileInfo] = wrap(f => (f.file, f.hash, f.lastModified), (make _).tupled)
|
||||
}
|
||||
object hash extends Style {
|
||||
type F = HashFileInfo
|
||||
implicit def apply(file: File): HashFileInfo = make(file, computeHash(file))
|
||||
def make(file: File, hash: List[Byte]): HashFileInfo = FileHash(file.getAbsoluteFile, hash)
|
||||
implicit val format: Format[HashFileInfo] = wrap(f => (f.file, f.hash), (make _).tupled)
|
||||
private def computeHash(file: File): List[Byte] = try { Hash(file).toList } catch { case e: Exception => Nil }
|
||||
}
|
||||
object lastModified extends Style {
|
||||
type F = ModifiedFileInfo
|
||||
implicit def apply(file: File): ModifiedFileInfo = make(file, file.lastModified)
|
||||
def make(file: File, lastModified: Long): ModifiedFileInfo = FileModified(file.getAbsoluteFile, lastModified)
|
||||
implicit val format: Format[ModifiedFileInfo] = wrap(f => (f.file, f.lastModified), (make _).tupled)
|
||||
}
|
||||
object exists extends Style {
|
||||
type F = PlainFileInfo
|
||||
implicit def apply(file: File): PlainFileInfo = make(file)
|
||||
def make(file: File): PlainFileInfo = { val abs = file.getAbsoluteFile; PlainFile(abs, abs.exists) }
|
||||
implicit val format: Format[PlainFileInfo] = asProduct2[PlainFileInfo, File, Boolean](PlainFile.apply)(x => (x.file, x.exists))
|
||||
}
|
||||
}
|
||||
|
||||
final case class FilesInfo[F <: FileInfo] private (files: Set[F])
|
||||
object FilesInfo {
|
||||
sealed abstract class Style {
|
||||
type F <: FileInfo
|
||||
val fileStyle: FileInfo.Style { type F = Style.this.F }
|
||||
|
||||
//def manifest: Manifest[F] = fileStyle.manifest
|
||||
implicit def apply(files: Set[File]): FilesInfo[F]
|
||||
implicit def unapply(info: FilesInfo[F]): Set[File] = info.files.map(_.file)
|
||||
implicit val formats: Format[FilesInfo[F]]
|
||||
val manifest: Manifest[Format[FilesInfo[F]]]
|
||||
def empty: FilesInfo[F] = new FilesInfo[F](Set.empty)
|
||||
import Cache._
|
||||
def infosInputCache: InputCache[FilesInfo[F]] = basicInput
|
||||
implicit def filesInputCache: InputCache[Set[File]] = wrapIn[Set[File], FilesInfo[F]]
|
||||
implicit def filesInfoEquiv: Equiv[FilesInfo[F]] = defaultEquiv
|
||||
}
|
||||
private final class BasicStyle[FI <: FileInfo](style: FileInfo.Style { type F = FI })(implicit val manifest: Manifest[Format[FilesInfo[FI]]]) extends Style {
|
||||
type F = FI
|
||||
val fileStyle: FileInfo.Style { type F = FI } = style
|
||||
private implicit val infoFormat: Format[FI] = fileStyle.format
|
||||
implicit def apply(files: Set[File]): FilesInfo[F] = FilesInfo(files.map(_.getAbsoluteFile).map(fileStyle.apply))
|
||||
implicit val formats: Format[FilesInfo[F]] = wrap(_.files, (fs: Set[F]) => new FilesInfo(fs))
|
||||
}
|
||||
lazy val full: Style { type F = HashModifiedFileInfo } = new BasicStyle(FileInfo.full)
|
||||
lazy val hash: Style { type F = HashFileInfo } = new BasicStyle(FileInfo.hash)
|
||||
lazy val lastModified: Style { type F = ModifiedFileInfo } = new BasicStyle(FileInfo.lastModified)
|
||||
lazy val exists: Style { type F = PlainFileInfo } = new BasicStyle(FileInfo.exists)
|
||||
|
||||
implicit def existsInputsCache: InputCache[FilesInfo[PlainFileInfo]] = exists.infosInputCache
|
||||
implicit def hashInputsCache: InputCache[FilesInfo[HashFileInfo]] = hash.infosInputCache
|
||||
implicit def modifiedInputsCache: InputCache[FilesInfo[ModifiedFileInfo]] = lastModified.infosInputCache
|
||||
implicit def fullInputsCache: InputCache[FilesInfo[HashModifiedFileInfo]] = full.infosInputCache
|
||||
}
|
||||
|
|
@ -1,62 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import Types.:+:
|
||||
import sbinary.{ DefaultProtocol, Format, Input, Output => Out }
|
||||
import DefaultProtocol.ByteFormat
|
||||
import java.io.{ File, InputStream, OutputStream }
|
||||
|
||||
trait InputCache[I] {
|
||||
type Internal
|
||||
def convert(i: I): Internal
|
||||
def read(from: Input): Internal
|
||||
def write(to: Out, j: Internal): Unit
|
||||
def equiv: Equiv[Internal]
|
||||
}
|
||||
object InputCache {
|
||||
implicit def basicInputCache[I](implicit fmt: Format[I], eqv: Equiv[I]): InputCache[I] =
|
||||
new InputCache[I] {
|
||||
type Internal = I
|
||||
def convert(i: I) = i
|
||||
def read(from: Input): I = fmt.reads(from)
|
||||
def write(to: Out, i: I) = fmt.writes(to, i)
|
||||
def equiv = eqv
|
||||
}
|
||||
def lzy[I](mkIn: => InputCache[I]): InputCache[I] =
|
||||
new InputCache[I] {
|
||||
lazy val ic = mkIn
|
||||
type Internal = ic.Internal
|
||||
def convert(i: I) = ic convert i
|
||||
def read(from: Input): ic.Internal = ic.read(from)
|
||||
def write(to: Out, i: ic.Internal) = ic.write(to, i)
|
||||
def equiv = ic.equiv
|
||||
}
|
||||
}
|
||||
|
||||
class BasicCache[I, O](implicit input: InputCache[I], outFormat: Format[O]) extends Cache[I, O] {
|
||||
def apply(file: File)(in: I) =
|
||||
{
|
||||
val j = input.convert(in)
|
||||
try { applyImpl(file, j) }
|
||||
catch { case e: Exception => Right(update(file)(j)) }
|
||||
}
|
||||
protected def applyImpl(file: File, in: input.Internal) =
|
||||
{
|
||||
Using.fileInputStream(file) { stream =>
|
||||
val previousIn = input.read(stream)
|
||||
if (input.equiv.equiv(in, previousIn))
|
||||
Left(outFormat.reads(stream))
|
||||
else
|
||||
Right(update(file)(in))
|
||||
}
|
||||
}
|
||||
protected def update(file: File)(in: input.Internal) = (out: O) =>
|
||||
{
|
||||
Using.fileOutputStream(false)(file) { stream =>
|
||||
input.write(stream, in)
|
||||
outFormat.writes(stream, out)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,31 +0,0 @@
|
|||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import Types.:+:
|
||||
|
||||
object CacheTest // extends Properties("Cache test")
|
||||
{
|
||||
val lengthCache = new File("/tmp/length-cache")
|
||||
val cCache = new File("/tmp/c-cache")
|
||||
|
||||
import Cache._
|
||||
import FileInfo.hash._
|
||||
import Ordering._
|
||||
import sbinary.DefaultProtocol.FileFormat
|
||||
def test(): Unit = {
|
||||
lazy val create = new File("test")
|
||||
|
||||
val length = cached(lengthCache) {
|
||||
(f: File) => { println("File length: " + f.length); f.length }
|
||||
}
|
||||
|
||||
lazy val fileLength = length(create)
|
||||
|
||||
val c = cached(cCache) { (in: (File :+: Long :+: HNil)) =>
|
||||
val file :+: len :+: HNil = in
|
||||
println("File: " + file + " (" + file.exists + "), length: " + len)
|
||||
(len + 1) :+: file :+: HNil
|
||||
}
|
||||
c(create :+: fileLength :+: HNil)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
Simple Build Tool: Tracking Component
|
||||
Copyright 2009, 2010 Mark Harrah
|
||||
Licensed under BSD-style license (see LICENSE)
|
||||
|
|
@ -1,70 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
object ChangeReport {
|
||||
def modified[T](files: Set[T]) =
|
||||
new EmptyChangeReport[T] {
|
||||
override def checked = files
|
||||
override def modified = files
|
||||
override def markAllModified = this
|
||||
}
|
||||
def unmodified[T](files: Set[T]) =
|
||||
new EmptyChangeReport[T] {
|
||||
override def checked = files
|
||||
override def unmodified = files
|
||||
}
|
||||
}
|
||||
/** The result of comparing some current set of objects against a previous set of objects.*/
|
||||
trait ChangeReport[T] extends NotNull {
|
||||
/** The set of all of the objects in the current set.*/
|
||||
def checked: Set[T]
|
||||
/** All of the objects that are in the same state in the current and reference sets.*/
|
||||
def unmodified: Set[T]
|
||||
/**
|
||||
* All checked objects that are not in the same state as the reference. This includes objects that are in both
|
||||
* sets but have changed and files that are only in one set.
|
||||
*/
|
||||
def modified: Set[T] // all changes, including added
|
||||
/** All objects that are only in the current set.*/
|
||||
def added: Set[T]
|
||||
/** All objects only in the previous set*/
|
||||
def removed: Set[T]
|
||||
def +++(other: ChangeReport[T]): ChangeReport[T] = new CompoundChangeReport(this, other)
|
||||
/**
|
||||
* Generate a new report with this report's unmodified set included in the new report's modified set. The new report's
|
||||
* unmodified set is empty. The new report's added, removed, and checked sets are the same as in this report.
|
||||
*/
|
||||
def markAllModified: ChangeReport[T] =
|
||||
new ChangeReport[T] {
|
||||
def checked = ChangeReport.this.checked
|
||||
def unmodified = Set.empty[T]
|
||||
def modified = ChangeReport.this.checked
|
||||
def added = ChangeReport.this.added
|
||||
def removed = ChangeReport.this.removed
|
||||
override def markAllModified = this
|
||||
}
|
||||
override def toString =
|
||||
{
|
||||
val labels = List("Checked", "Modified", "Unmodified", "Added", "Removed")
|
||||
val sets = List(checked, modified, unmodified, added, removed)
|
||||
val keyValues = labels.zip(sets).map { case (label, set) => label + ": " + set.mkString(", ") }
|
||||
keyValues.mkString("Change report:\n\t", "\n\t", "")
|
||||
}
|
||||
}
|
||||
class EmptyChangeReport[T] extends ChangeReport[T] {
|
||||
def checked = Set.empty[T]
|
||||
def unmodified = Set.empty[T]
|
||||
def modified = Set.empty[T]
|
||||
def added = Set.empty[T]
|
||||
def removed = Set.empty[T]
|
||||
override def toString = "No changes"
|
||||
}
|
||||
private class CompoundChangeReport[T](a: ChangeReport[T], b: ChangeReport[T]) extends ChangeReport[T] {
|
||||
lazy val checked = a.checked ++ b.checked
|
||||
lazy val unmodified = a.unmodified ++ b.unmodified
|
||||
lazy val modified = a.modified ++ b.modified
|
||||
lazy val added = a.added ++ b.added
|
||||
lazy val removed = a.removed ++ b.removed
|
||||
}
|
||||
|
|
@ -1,254 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.{ File, IOException }
|
||||
import CacheIO.{ fromFile, toFile }
|
||||
import sbinary.Format
|
||||
import scala.pickling.PicklingException
|
||||
import scala.reflect.Manifest
|
||||
import scala.collection.mutable
|
||||
import IO.{ delete, read, write }
|
||||
import sbt.serialization._
|
||||
|
||||
object Tracked {
|
||||
/**
|
||||
* Creates a tracker that provides the last time it was evaluated.
|
||||
* If 'useStartTime' is true, the recorded time is the start of the evaluated function.
|
||||
* If 'useStartTime' is false, the recorded time is when the evaluated function completes.
|
||||
* In both cases, the timestamp is not updated if the function throws an exception.
|
||||
*/
|
||||
def tstamp(cacheFile: File, useStartTime: Boolean = true): Timestamp = new Timestamp(cacheFile, useStartTime)
|
||||
/** Creates a tracker that only evaluates a function when the input has changed.*/
|
||||
//def changed[O](cacheFile: File)(implicit format: Format[O], equiv: Equiv[O]): Changed[O] =
|
||||
// new Changed[O](cacheFile)
|
||||
|
||||
/** Creates a tracker that provides the difference between a set of input files for successive invocations.*/
|
||||
def diffInputs(cache: File, style: FilesInfo.Style): Difference =
|
||||
Difference.inputs(cache, style)
|
||||
/** Creates a tracker that provides the difference between a set of output files for successive invocations.*/
|
||||
def diffOutputs(cache: File, style: FilesInfo.Style): Difference =
|
||||
Difference.outputs(cache, style)
|
||||
|
||||
def lastOutput[I, O](cacheFile: File)(f: (I, Option[O]) => O)(implicit o: Format[O], mf: Manifest[Format[O]]): I => O = in =>
|
||||
{
|
||||
val previous: Option[O] = fromFile[O](cacheFile)
|
||||
val next = f(in, previous)
|
||||
toFile(next)(cacheFile)
|
||||
next
|
||||
}
|
||||
// Todo: This function needs more testing.
|
||||
private[sbt] def lastOutputWithJson[I, O: Pickler: Unpickler](cacheFile: File)(f: (I, Option[O]) => O): I => O = in =>
|
||||
{
|
||||
val previous: Option[O] = try {
|
||||
fromJsonFile[O](cacheFile).toOption
|
||||
} catch {
|
||||
case e: PicklingException => None
|
||||
case e: IOException => None
|
||||
}
|
||||
val next = f(in, previous)
|
||||
IO.createDirectory(cacheFile.getParentFile)
|
||||
toJsonFile(next, cacheFile)
|
||||
next
|
||||
}
|
||||
def inputChanged[I, O](cacheFile: File)(f: (Boolean, I) => O)(implicit ic: InputCache[I]): I => O = in =>
|
||||
{
|
||||
val help = new CacheHelp(ic)
|
||||
val conv = help.convert(in)
|
||||
val changed = help.changed(cacheFile, conv)
|
||||
val result = f(changed, in)
|
||||
|
||||
if (changed)
|
||||
help.save(cacheFile, conv)
|
||||
|
||||
result
|
||||
}
|
||||
private[sbt] def inputChangedWithJson[I: Pickler: Unpickler, O](cacheFile: File)(f: (Boolean, I) => O): I => O = in =>
|
||||
{
|
||||
val help = new JsonCacheHelp[I]
|
||||
val conv = help.convert(in)
|
||||
val changed = help.changed(cacheFile, conv)
|
||||
val result = f(changed, in)
|
||||
|
||||
if (changed)
|
||||
help.save(cacheFile, conv)
|
||||
|
||||
result
|
||||
}
|
||||
def outputChanged[I, O](cacheFile: File)(f: (Boolean, I) => O)(implicit ic: InputCache[I]): (() => I) => O = in =>
|
||||
{
|
||||
val initial = in()
|
||||
val help = new CacheHelp(ic)
|
||||
val changed = help.changed(cacheFile, help.convert(initial))
|
||||
val result = f(changed, initial)
|
||||
|
||||
if (changed)
|
||||
help.save(cacheFile, help.convert(in()))
|
||||
|
||||
result
|
||||
}
|
||||
private[sbt] def outputChangedWithJson[I: Pickler, O](cacheFile: File)(f: (Boolean, I) => O): (() => I) => O = in =>
|
||||
{
|
||||
val initial = in()
|
||||
val help = new JsonCacheHelp[I]
|
||||
val changed = help.changed(cacheFile, help.convert(initial))
|
||||
val result = f(changed, initial)
|
||||
|
||||
if (changed)
|
||||
help.save(cacheFile, help.convert(in()))
|
||||
|
||||
result
|
||||
}
|
||||
final class CacheHelp[I](val ic: InputCache[I]) {
|
||||
def convert(i: I): ic.Internal = ic.convert(i)
|
||||
def save(cacheFile: File, value: ic.Internal): Unit =
|
||||
Using.fileOutputStream()(cacheFile)(out => ic.write(out, value))
|
||||
def changed(cacheFile: File, converted: ic.Internal): Boolean =
|
||||
try {
|
||||
val prev = Using.fileInputStream(cacheFile)(x => ic.read(x))
|
||||
!ic.equiv.equiv(converted, prev)
|
||||
} catch { case e: Exception => true }
|
||||
}
|
||||
private[sbt] final class JsonCacheHelp[I: Pickler] {
|
||||
def convert(i: I): String = toJsonString(i)
|
||||
def save(cacheFile: File, value: String): Unit =
|
||||
IO.write(cacheFile, value, IO.utf8)
|
||||
def changed(cacheFile: File, converted: String): Boolean =
|
||||
try {
|
||||
val prev = IO.read(cacheFile, IO.utf8)
|
||||
converted != prev
|
||||
} catch { case e: Exception => true }
|
||||
}
|
||||
}
|
||||
|
||||
trait Tracked {
|
||||
/** Cleans outputs and clears the cache.*/
|
||||
def clean(): Unit
|
||||
}
|
||||
class Timestamp(val cacheFile: File, useStartTime: Boolean) extends Tracked {
|
||||
def clean() = delete(cacheFile)
|
||||
/**
|
||||
* Reads the previous timestamp, evaluates the provided function,
|
||||
* and then updates the timestamp if the function completes normally.
|
||||
*/
|
||||
def apply[T](f: Long => T): T =
|
||||
{
|
||||
val start = now()
|
||||
val result = f(readTimestamp)
|
||||
write(cacheFile, (if (useStartTime) start else now()).toString)
|
||||
result
|
||||
}
|
||||
private def now() = System.currentTimeMillis
|
||||
def readTimestamp: Long =
|
||||
try { read(cacheFile).toLong }
|
||||
catch { case _: NumberFormatException | _: java.io.FileNotFoundException => 0 }
|
||||
}
|
||||
|
||||
class Changed[O](val cacheFile: File)(implicit equiv: Equiv[O], format: Format[O]) extends Tracked {
|
||||
def clean() = delete(cacheFile)
|
||||
def apply[O2](ifChanged: O => O2, ifUnchanged: O => O2): O => O2 = value =>
|
||||
{
|
||||
if (uptodate(value))
|
||||
ifUnchanged(value)
|
||||
else {
|
||||
update(value)
|
||||
ifChanged(value)
|
||||
}
|
||||
}
|
||||
|
||||
def update(value: O): Unit = Using.fileOutputStream(false)(cacheFile)(stream => format.writes(stream, value))
|
||||
def uptodate(value: O): Boolean =
|
||||
try {
|
||||
Using.fileInputStream(cacheFile) {
|
||||
stream => equiv.equiv(value, format.reads(stream))
|
||||
}
|
||||
} catch {
|
||||
case _: Exception => false
|
||||
}
|
||||
}
|
||||
object Difference {
|
||||
def constructor(defineClean: Boolean, filesAreOutputs: Boolean): (File, FilesInfo.Style) => Difference =
|
||||
(cache, style) => new Difference(cache, style, defineClean, filesAreOutputs)
|
||||
|
||||
/**
|
||||
* Provides a constructor for a Difference that removes the files from the previous run on a call to 'clean' and saves the
|
||||
* hash/last modified time of the files as they are after running the function. This means that this information must be evaluated twice:
|
||||
* before and after running the function.
|
||||
*/
|
||||
val outputs = constructor(true, true)
|
||||
/**
|
||||
* Provides a constructor for a Difference that does nothing on a call to 'clean' and saves the
|
||||
* hash/last modified time of the files as they were prior to running the function.
|
||||
*/
|
||||
val inputs = constructor(false, false)
|
||||
}
|
||||
class Difference(val cache: File, val style: FilesInfo.Style, val defineClean: Boolean, val filesAreOutputs: Boolean) extends Tracked {
|
||||
def clean() =
|
||||
{
|
||||
if (defineClean) delete(raw(cachedFilesInfo)) else ()
|
||||
clearCache()
|
||||
}
|
||||
private def clearCache() = delete(cache)
|
||||
|
||||
private def cachedFilesInfo = fromFile(style.formats, style.empty)(cache)(style.manifest).files
|
||||
private def raw(fs: Set[style.F]): Set[File] = fs.map(_.file)
|
||||
|
||||
def apply[T](files: Set[File])(f: ChangeReport[File] => T): T =
|
||||
{
|
||||
val lastFilesInfo = cachedFilesInfo
|
||||
apply(files, lastFilesInfo)(f)(_ => files)
|
||||
}
|
||||
|
||||
def apply[T](f: ChangeReport[File] => T)(implicit toFiles: T => Set[File]): T =
|
||||
{
|
||||
val lastFilesInfo = cachedFilesInfo
|
||||
apply(raw(lastFilesInfo), lastFilesInfo)(f)(toFiles)
|
||||
}
|
||||
|
||||
private def abs(files: Set[File]) = files.map(_.getAbsoluteFile)
|
||||
private[this] def apply[T](files: Set[File], lastFilesInfo: Set[style.F])(f: ChangeReport[File] => T)(extractFiles: T => Set[File]): T =
|
||||
{
|
||||
val lastFiles = raw(lastFilesInfo)
|
||||
val currentFiles = abs(files)
|
||||
val currentFilesInfo = style(currentFiles)
|
||||
|
||||
val report = new ChangeReport[File] {
|
||||
lazy val checked = currentFiles
|
||||
lazy val removed = lastFiles -- checked // all files that were included previously but not this time. This is independent of whether the files exist.
|
||||
lazy val added = checked -- lastFiles // all files included now but not previously. This is independent of whether the files exist.
|
||||
lazy val modified = raw(lastFilesInfo -- currentFilesInfo.files) ++ added
|
||||
lazy val unmodified = checked -- modified
|
||||
}
|
||||
|
||||
val result = f(report)
|
||||
val info = if (filesAreOutputs) style(abs(extractFiles(result))) else currentFilesInfo
|
||||
toFile(style.formats)(info)(cache)(style.manifest)
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
object FileFunction {
|
||||
type UpdateFunction = (ChangeReport[File], ChangeReport[File]) => Set[File]
|
||||
|
||||
def cached(cacheBaseDirectory: File, inStyle: FilesInfo.Style = FilesInfo.lastModified, outStyle: FilesInfo.Style = FilesInfo.exists)(action: Set[File] => Set[File]): Set[File] => Set[File] =
|
||||
cached(cacheBaseDirectory)(inStyle, outStyle)((in, out) => action(in.checked))
|
||||
|
||||
def cached(cacheBaseDirectory: File)(inStyle: FilesInfo.Style, outStyle: FilesInfo.Style)(action: UpdateFunction): Set[File] => Set[File] =
|
||||
{
|
||||
import Path._
|
||||
lazy val inCache = Difference.inputs(cacheBaseDirectory / "in-cache", inStyle)
|
||||
lazy val outCache = Difference.outputs(cacheBaseDirectory / "out-cache", outStyle)
|
||||
inputs =>
|
||||
{
|
||||
inCache(inputs) { inReport =>
|
||||
outCache { outReport =>
|
||||
if (inReport.modified.isEmpty && outReport.modified.isEmpty)
|
||||
outReport.checked
|
||||
else
|
||||
action(inReport, outReport)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
Simple Build Tool: Compile Component
|
||||
Copyright 2009, 2010 Mark Harrah, Seth Tisue, Jason Zaugg
|
||||
Licensed under BSD-style license (see LICENSE)
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
Simple Build Tool: Source API Component
|
||||
Copyright 2009, 2010 Mark Harrah
|
||||
Licensed under BSD-style license (see LICENSE)
|
||||
|
|
@ -1,387 +0,0 @@
|
|||
package sbt
|
||||
|
||||
import java.lang.reflect.{ Array => _, _ }
|
||||
import java.lang.annotation.Annotation
|
||||
import annotation.tailrec
|
||||
import sbt.classfile.ClassFile
|
||||
import xsbti.api
|
||||
import xsbti.SafeLazy
|
||||
import SafeLazy.strict
|
||||
import collection.mutable
|
||||
|
||||
object ClassToAPI {
|
||||
def apply(c: Seq[Class[_]]): api.SourceAPI = process(c)._1
|
||||
|
||||
// (api, public inherited classes)
|
||||
def process(c: Seq[Class[_]]): (api.SourceAPI, Set[Class[_]]) =
|
||||
{
|
||||
val pkgs = packages(c).map(p => new api.Package(p))
|
||||
val cmap = emptyClassMap
|
||||
val defs = c.filter(isTopLevel).flatMap(toDefinitions(cmap))
|
||||
val source = new api.SourceAPI(pkgs.toArray, defs.toArray)
|
||||
cmap.lz.foreach(_.get()) // force thunks to ensure all inherited dependencies are recorded
|
||||
val inDeps = cmap.inherited.toSet
|
||||
cmap.clear()
|
||||
(source, inDeps)
|
||||
}
|
||||
|
||||
// Avoiding implicit allocation.
|
||||
private def arrayMap[T <: AnyRef, U <: AnyRef: reflect.ClassTag](xs: Array[T])(f: T => U): Array[U] = {
|
||||
val len = xs.length
|
||||
var i = 0
|
||||
val res = new Array[U](len)
|
||||
while (i < len) {
|
||||
res(i) = f(xs(i))
|
||||
i += 1
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
def packages(c: Seq[Class[_]]): Set[String] =
|
||||
c.flatMap(packageName).toSet
|
||||
|
||||
def isTopLevel(c: Class[_]): Boolean =
|
||||
c.getEnclosingClass eq null
|
||||
|
||||
final class ClassMap private[sbt] (private[sbt] val memo: mutable.Map[String, Seq[api.ClassLike]], private[sbt] val inherited: mutable.Set[Class[_]], private[sbt] val lz: mutable.Buffer[xsbti.api.Lazy[_]]) {
|
||||
def clear(): Unit = {
|
||||
memo.clear()
|
||||
inherited.clear()
|
||||
lz.clear()
|
||||
}
|
||||
}
|
||||
def emptyClassMap: ClassMap = new ClassMap(new mutable.HashMap, new mutable.HashSet, new mutable.ListBuffer)
|
||||
|
||||
def toDefinitions(cmap: ClassMap)(c: Class[_]): Seq[api.ClassLike] =
|
||||
cmap.memo.getOrElseUpdate(c.getName, toDefinitions0(c, cmap))
|
||||
def toDefinitions0(c: Class[_], cmap: ClassMap): Seq[api.ClassLike] =
|
||||
{
|
||||
import api.DefinitionType.{ ClassDef, Module, Trait }
|
||||
val enclPkg = packageName(c)
|
||||
val mods = modifiers(c.getModifiers)
|
||||
val acc = access(c.getModifiers, enclPkg)
|
||||
val annots = annotations(c.getAnnotations)
|
||||
val name = c.getName
|
||||
val tpe = if (Modifier.isInterface(c.getModifiers)) Trait else ClassDef
|
||||
lazy val (static, instance) = structure(c, enclPkg, cmap)
|
||||
val cls = new api.ClassLike(tpe, strict(Empty), lzy(instance, cmap), emptyStringArray, typeParameters(typeParameterTypes(c)), name, acc, mods, annots)
|
||||
val stat = new api.ClassLike(Module, strict(Empty), lzy(static, cmap), emptyStringArray, emptyTypeParameterArray, name, acc, mods, annots)
|
||||
val defs = cls :: stat :: Nil
|
||||
cmap.memo(c.getName) = defs
|
||||
defs
|
||||
}
|
||||
|
||||
/** Returns the (static structure, instance structure, inherited classes) for `c`. */
|
||||
def structure(c: Class[_], enclPkg: Option[String], cmap: ClassMap): (api.Structure, api.Structure) = {
|
||||
lazy val cf = classFileForClass(c)
|
||||
val methods = mergeMap(c, c.getDeclaredMethods, c.getMethods, methodToDef(enclPkg))
|
||||
val fields = mergeMap(c, c.getDeclaredFields, c.getFields, fieldToDef(c, cf, enclPkg))
|
||||
val constructors = mergeMap(c, c.getDeclaredConstructors, c.getConstructors, constructorToDef(enclPkg))
|
||||
val classes = merge[Class[_]](c, c.getDeclaredClasses, c.getClasses, toDefinitions(cmap), (_: Seq[Class[_]]).partition(isStatic), _.getEnclosingClass != c)
|
||||
val all = methods ++ fields ++ constructors ++ classes
|
||||
val parentJavaTypes = allSuperTypes(c)
|
||||
if (!Modifier.isPrivate(c.getModifiers))
|
||||
cmap.inherited ++= parentJavaTypes.collect { case c: Class[_] => c }
|
||||
val parentTypes = types(parentJavaTypes)
|
||||
val instanceStructure = new api.Structure(lzyS(parentTypes.toArray), lzyS(all.declared.toArray), lzyS(all.inherited.toArray))
|
||||
val staticStructure = new api.Structure(lzyEmptyTpeArray, lzyS(all.staticDeclared.toArray), lzyS(all.staticInherited.toArray))
|
||||
(staticStructure, instanceStructure)
|
||||
}
|
||||
|
||||
/** TODO: over time, ClassToAPI should switch the majority of access to the classfile parser */
|
||||
private[this] def classFileForClass(c: Class[_]): ClassFile = {
|
||||
val file = new java.io.File(IO.classLocationFile(c), s"${c.getName.replace('.', '/')}.class")
|
||||
classfile.Parser.apply(file)
|
||||
}
|
||||
|
||||
private[this] def lzyS[T <: AnyRef](t: T): xsbti.api.Lazy[T] = lzy(t)
|
||||
def lzy[T <: AnyRef](t: => T): xsbti.api.Lazy[T] = xsbti.SafeLazy(t)
|
||||
private[this] def lzy[T <: AnyRef](t: => T, cmap: ClassMap): xsbti.api.Lazy[T] = {
|
||||
val s = lzy(t)
|
||||
cmap.lz += s
|
||||
s
|
||||
}
|
||||
|
||||
private val emptyStringArray = new Array[String](0)
|
||||
private val emptyTypeArray = new Array[xsbti.api.Type](0)
|
||||
private val emptyAnnotationArray = new Array[xsbti.api.Annotation](0)
|
||||
private val emptyTypeParameterArray = new Array[xsbti.api.TypeParameter](0)
|
||||
private val emptySimpleTypeArray = new Array[xsbti.api.SimpleType](0)
|
||||
private val lzyEmptyTpeArray = lzyS(emptyTypeArray)
|
||||
private val lzyEmptyDefArray = lzyS(new Array[xsbti.api.Definition](0))
|
||||
|
||||
private def allSuperTypes(t: Type): Seq[Type] =
|
||||
{
|
||||
@tailrec def accumulate(t: Type, accum: Seq[Type] = Seq.empty): Seq[Type] = t match {
|
||||
case c: Class[_] =>
|
||||
val (parent, interfaces) = (c.getGenericSuperclass, c.getGenericInterfaces)
|
||||
accumulate(parent, (accum :+ parent) ++ flattenAll(interfaces))
|
||||
case p: ParameterizedType =>
|
||||
accumulate(p.getRawType, accum)
|
||||
case _ =>
|
||||
accum
|
||||
}
|
||||
@tailrec def flattenAll(interfaces: Seq[Type], accum: Seq[Type] = Seq.empty): Seq[Type] =
|
||||
{
|
||||
if (interfaces.nonEmpty) {
|
||||
val raw = interfaces map { case p: ParameterizedType => p.getRawType; case i => i }
|
||||
val children = raw flatMap { case i: Class[_] => i.getGenericInterfaces; case _ => Seq.empty }
|
||||
flattenAll(children, accum ++ interfaces ++ children)
|
||||
} else
|
||||
accum
|
||||
}
|
||||
accumulate(t).filterNot(_ == null).distinct
|
||||
}
|
||||
|
||||
@deprecated("No longer used", "0.13.0")
|
||||
def parents(c: Class[_]): Seq[api.Type] = types(allSuperTypes(c))
|
||||
def types(ts: Seq[Type]): Array[api.Type] = ts filter (_ ne null) map reference toArray;
|
||||
def upperBounds(ts: Array[Type]): api.Type =
|
||||
new api.Structure(lzy(types(ts)), lzyEmptyDefArray, lzyEmptyDefArray)
|
||||
|
||||
@deprecated("Use fieldToDef[4] instead", "0.13.9")
|
||||
def fieldToDef(enclPkg: Option[String])(f: Field): api.FieldLike = {
|
||||
val c = f.getDeclaringClass()
|
||||
fieldToDef(c, classFileForClass(c), enclPkg)(f)
|
||||
}
|
||||
|
||||
def fieldToDef(c: Class[_], cf: => ClassFile, enclPkg: Option[String])(f: Field): api.FieldLike =
|
||||
{
|
||||
val name = f.getName
|
||||
val accs = access(f.getModifiers, enclPkg)
|
||||
val mods = modifiers(f.getModifiers)
|
||||
val annots = annotations(f.getDeclaredAnnotations)
|
||||
val fieldTpe = reference(returnType(f))
|
||||
// generate a more specific type for constant fields
|
||||
val specificTpe: Option[api.Type] =
|
||||
if (mods.isFinal) {
|
||||
try {
|
||||
cf.constantValue(name).map(singletonForConstantField(c, f, _))
|
||||
} catch {
|
||||
case e: Throwable =>
|
||||
throw new IllegalStateException(
|
||||
s"Failed to parse class $c: this may mean your classfiles are corrupted. Please clean and try again.",
|
||||
e
|
||||
)
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
val tpe = specificTpe.getOrElse(fieldTpe)
|
||||
if (mods.isFinal) {
|
||||
new api.Val(tpe, name, accs, mods, annots)
|
||||
} else {
|
||||
new api.Var(tpe, name, accs, mods, annots)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a Singleton type that includes both the type and ConstantValue for the given Field.
|
||||
*
|
||||
* Since java compilers are allowed to inline constant (static final primitive) fields in
|
||||
* downstream classfiles, we generate a type that will cause APIs to match only when both
|
||||
* the type and value of the field match. We include the classname mostly for readability.
|
||||
*
|
||||
* Because this type is purely synthetic, it's fine that the name might contain filename-
|
||||
* banned characters.
|
||||
*/
|
||||
private def singletonForConstantField(c: Class[_], field: Field, constantValue: AnyRef) =
|
||||
new api.Singleton(
|
||||
pathFromStrings(
|
||||
c.getName.split("\\.").toSeq :+ (field.getName + "$" + returnType(field) + "$" + constantValue)
|
||||
)
|
||||
)
|
||||
|
||||
def methodToDef(enclPkg: Option[String])(m: Method): api.Def =
|
||||
defLike(m.getName, m.getModifiers, m.getDeclaredAnnotations, typeParameterTypes(m), m.getParameterAnnotations, parameterTypes(m), Option(returnType(m)), exceptionTypes(m), m.isVarArgs, enclPkg)
|
||||
|
||||
def constructorToDef(enclPkg: Option[String])(c: Constructor[_]): api.Def =
|
||||
defLike("<init>", c.getModifiers, c.getDeclaredAnnotations, typeParameterTypes(c), c.getParameterAnnotations, parameterTypes(c), None, exceptionTypes(c), c.isVarArgs, enclPkg)
|
||||
|
||||
def defLike[T <: GenericDeclaration](name: String, mods: Int, annots: Array[Annotation], tps: Array[TypeVariable[T]], paramAnnots: Array[Array[Annotation]], paramTypes: Array[Type], retType: Option[Type], exceptions: Array[Type], varArgs: Boolean, enclPkg: Option[String]): api.Def =
|
||||
{
|
||||
val varArgPosition = if (varArgs) paramTypes.length - 1 else -1
|
||||
val isVarArg = List.tabulate(paramTypes.length)(_ == varArgPosition)
|
||||
val pa = (paramAnnots, paramTypes, isVarArg).zipped map { case (a, p, v) => parameter(a, p, v) }
|
||||
val params = new api.ParameterList(pa, false)
|
||||
val ret = retType match { case Some(rt) => reference(rt); case None => Empty }
|
||||
new api.Def(Array(params), ret, typeParameters(tps), name, access(mods, enclPkg), modifiers(mods), annotations(annots) ++ exceptionAnnotations(exceptions))
|
||||
}
|
||||
|
||||
def exceptionAnnotations(exceptions: Array[Type]): Array[api.Annotation] =
|
||||
if (exceptions.length == 0) emptyAnnotationArray
|
||||
else arrayMap(exceptions)(t => new api.Annotation(Throws, Array(new api.AnnotationArgument("value", t.toString))))
|
||||
|
||||
def parameter(annots: Array[Annotation], parameter: Type, varArgs: Boolean): api.MethodParameter =
|
||||
new api.MethodParameter("", annotated(reference(parameter), annots), false, if (varArgs) api.ParameterModifier.Repeated else api.ParameterModifier.Plain)
|
||||
|
||||
def annotated(t: api.SimpleType, annots: Array[Annotation]): api.Type = (
|
||||
if (annots.length == 0) t
|
||||
else new api.Annotated(t, annotations(annots))
|
||||
)
|
||||
|
||||
case class Defs(declared: Seq[api.Definition], inherited: Seq[api.Definition], staticDeclared: Seq[api.Definition], staticInherited: Seq[api.Definition]) {
|
||||
def ++(o: Defs) = Defs(declared ++ o.declared, inherited ++ o.inherited, staticDeclared ++ o.staticDeclared, staticInherited ++ o.staticInherited)
|
||||
}
|
||||
def mergeMap[T <: Member](of: Class[_], self: Seq[T], public: Seq[T], f: T => api.Definition): Defs =
|
||||
merge[T](of, self, public, x => f(x) :: Nil, splitStatic _, _.getDeclaringClass != of)
|
||||
|
||||
def merge[T](of: Class[_], self: Seq[T], public: Seq[T], f: T => Seq[api.Definition], splitStatic: Seq[T] => (Seq[T], Seq[T]), isInherited: T => Boolean): Defs =
|
||||
{
|
||||
val (selfStatic, selfInstance) = splitStatic(self)
|
||||
val (inheritedStatic, inheritedInstance) = splitStatic(public filter isInherited)
|
||||
Defs(selfInstance flatMap f, inheritedInstance flatMap f, selfStatic flatMap f, inheritedStatic flatMap f)
|
||||
}
|
||||
|
||||
def splitStatic[T <: Member](defs: Seq[T]): (Seq[T], Seq[T]) =
|
||||
defs partition isStatic
|
||||
|
||||
def isStatic(c: Class[_]): Boolean = Modifier.isStatic(c.getModifiers)
|
||||
def isStatic(a: Member): Boolean = Modifier.isStatic(a.getModifiers)
|
||||
|
||||
def typeParameters[T <: GenericDeclaration](tps: Array[TypeVariable[T]]): Array[api.TypeParameter] =
|
||||
if (tps.length == 0) emptyTypeParameterArray
|
||||
else arrayMap(tps)(typeParameter)
|
||||
|
||||
def typeParameter[T <: GenericDeclaration](tp: TypeVariable[T]): api.TypeParameter =
|
||||
new api.TypeParameter(typeVariable(tp), emptyAnnotationArray, emptyTypeParameterArray, api.Variance.Invariant, NothingRef, upperBounds(tp.getBounds))
|
||||
|
||||
// needs to be stable across compilations
|
||||
def typeVariable[T <: GenericDeclaration](tv: TypeVariable[T]): String =
|
||||
name(tv.getGenericDeclaration) + " " + tv.getName
|
||||
|
||||
def reduceHash(in: Array[Byte]): Int =
|
||||
(0 /: in)((acc, b) => (acc * 43) ^ b)
|
||||
|
||||
def name(gd: GenericDeclaration): String =
|
||||
gd match {
|
||||
case c: Class[_] => c.getName
|
||||
case m: Method => m.getName
|
||||
case c: Constructor[_] => c.getName
|
||||
}
|
||||
|
||||
def modifiers(i: Int): api.Modifiers =
|
||||
{
|
||||
import Modifier.{ isAbstract, isFinal }
|
||||
new api.Modifiers(isAbstract(i), false, isFinal(i), false, false, false, false)
|
||||
}
|
||||
def access(i: Int, pkg: Option[String]): api.Access =
|
||||
{
|
||||
import Modifier.{ isPublic, isPrivate, isProtected }
|
||||
if (isPublic(i)) Public else if (isPrivate(i)) Private else if (isProtected(i)) Protected else packagePrivate(pkg)
|
||||
}
|
||||
|
||||
def annotations(a: Array[Annotation]): Array[api.Annotation] = if (a.length == 0) emptyAnnotationArray else arrayMap(a)(annotation)
|
||||
def annotation(a: Annotation): api.Annotation =
|
||||
new api.Annotation(reference(a.annotationType), Array(javaAnnotation(a.toString)))
|
||||
|
||||
// full information not available from reflection
|
||||
def javaAnnotation(s: String): api.AnnotationArgument =
|
||||
new api.AnnotationArgument("toString", s)
|
||||
|
||||
def array(tpe: api.Type): api.SimpleType = new api.Parameterized(ArrayRef, Array(tpe))
|
||||
def reference(c: Class[_]): api.SimpleType =
|
||||
if (c.isArray) array(reference(c.getComponentType)) else if (c.isPrimitive) primitive(c.getName) else reference(c.getName)
|
||||
|
||||
// does not handle primitives
|
||||
def reference(s: String): api.SimpleType =
|
||||
{
|
||||
val (pkg, cls) = packageAndName(s)
|
||||
pkg match {
|
||||
// translate all primitives?
|
||||
case None => new api.Projection(Empty, cls)
|
||||
case Some(p) =>
|
||||
new api.Projection(new api.Singleton(pathFromString(p)), cls)
|
||||
}
|
||||
}
|
||||
def referenceP(t: ParameterizedType): api.Parameterized =
|
||||
{
|
||||
val targs = t.getActualTypeArguments
|
||||
val args = if (targs.isEmpty) emptyTypeArray else arrayMap(targs)(t => reference(t): api.Type)
|
||||
val base = reference(t.getRawType)
|
||||
new api.Parameterized(base, args.toArray[api.Type])
|
||||
}
|
||||
def reference(t: Type): api.SimpleType =
|
||||
t match {
|
||||
case w: WildcardType => reference("_")
|
||||
case tv: TypeVariable[_] => new api.ParameterRef(typeVariable(tv))
|
||||
case pt: ParameterizedType => referenceP(pt)
|
||||
case gat: GenericArrayType => array(reference(gat.getGenericComponentType))
|
||||
case c: Class[_] => reference(c)
|
||||
}
|
||||
|
||||
def pathFromString(s: String): api.Path =
|
||||
pathFromStrings(s.split("\\."))
|
||||
def pathFromStrings(ss: Seq[String]): api.Path =
|
||||
new api.Path((ss.map(new api.Id(_)) :+ ThisRef).toArray)
|
||||
def packageName(c: Class[_]) = packageAndName(c)._1
|
||||
def packageAndName(c: Class[_]): (Option[String], String) =
|
||||
packageAndName(c.getName)
|
||||
def packageAndName(name: String): (Option[String], String) =
|
||||
{
|
||||
val lastDot = name.lastIndexOf('.')
|
||||
if (lastDot >= 0)
|
||||
(Some(name.substring(0, lastDot)), name.substring(lastDot + 1))
|
||||
else
|
||||
(None, name)
|
||||
}
|
||||
|
||||
val Empty = new api.EmptyType
|
||||
val ThisRef = new api.This
|
||||
|
||||
val Public = new api.Public
|
||||
val Unqualified = new api.Unqualified
|
||||
val Private = new api.Private(Unqualified)
|
||||
val Protected = new api.Protected(Unqualified)
|
||||
def packagePrivate(pkg: Option[String]): api.Access = new api.Private(new api.IdQualifier(pkg getOrElse ""))
|
||||
|
||||
val ArrayRef = reference("scala.Array")
|
||||
val Throws = reference("scala.throws")
|
||||
val NothingRef = reference("scala.Nothing")
|
||||
|
||||
private[this] def PrimitiveNames = Seq("boolean", "byte", "char", "short", "int", "long", "float", "double")
|
||||
private[this] def PrimitiveMap = PrimitiveNames.map(j => (j, j.capitalize)) :+ ("void" -> "Unit")
|
||||
private[this] val PrimitiveRefs = PrimitiveMap.map { case (n, sn) => (n, reference("scala." + sn)) }.toMap
|
||||
def primitive(name: String): api.SimpleType = PrimitiveRefs(name)
|
||||
|
||||
// Workarounds for https://github.com/sbt/sbt/issues/1035
|
||||
// these catch the GenericSignatureFormatError and return the erased type
|
||||
|
||||
private[this] def returnType(f: Field): Type = try f.getGenericType catch {
|
||||
case _: GenericSignatureFormatError => f.getType
|
||||
}
|
||||
private[this] def parameterTypes(c: Constructor[_]): Array[Type] = try c.getGenericParameterTypes catch {
|
||||
case _: GenericSignatureFormatError => convert(c.getParameterTypes)
|
||||
}
|
||||
private[this] def exceptionTypes(c: Constructor[_]): Array[Type] = try c.getGenericExceptionTypes catch {
|
||||
case _: GenericSignatureFormatError => convert(c.getExceptionTypes)
|
||||
}
|
||||
private[this] def parameterTypes(m: Method): Array[Type] = try m.getGenericParameterTypes catch {
|
||||
case _: GenericSignatureFormatError => convert(m.getParameterTypes)
|
||||
}
|
||||
private[this] def returnType(m: Method): Type = try m.getGenericReturnType catch {
|
||||
case _: GenericSignatureFormatError => m.getReturnType
|
||||
}
|
||||
private[this] def exceptionTypes(m: Method): Array[Type] = try m.getGenericExceptionTypes catch {
|
||||
case _: GenericSignatureFormatError => convert(m.getExceptionTypes)
|
||||
}
|
||||
|
||||
private[this] def typeParameterTypes[T](m: Constructor[T]): Array[TypeVariable[Constructor[T]]] = try m.getTypeParameters catch {
|
||||
case _: GenericSignatureFormatError => new Array(0)
|
||||
}
|
||||
private[this] def typeParameterTypes[T](m: Class[T]): Array[TypeVariable[Class[T]]] = try m.getTypeParameters catch {
|
||||
case _: GenericSignatureFormatError => new Array(0)
|
||||
}
|
||||
private[this] def typeParameterTypes(m: Method): Array[TypeVariable[Method]] = try m.getTypeParameters catch {
|
||||
case _: GenericSignatureFormatError => new Array(0)
|
||||
}
|
||||
private[this] def superclassType(c: Class[_]): Type = try c.getGenericSuperclass catch {
|
||||
case _: GenericSignatureFormatError => c.getSuperclass
|
||||
}
|
||||
private[this] def interfaces(c: Class[_]): Array[Type] = try c.getGenericInterfaces catch {
|
||||
case _: GenericSignatureFormatError => convert(c.getInterfaces)
|
||||
}
|
||||
|
||||
private[this] def convert(classes: Array[Class[_]]): Array[Type] =
|
||||
classes.asInstanceOf[Array[Type]] // ok: treat Arrays as read-only
|
||||
}
|
||||
|
|
@ -1,66 +0,0 @@
|
|||
package xsbt.api
|
||||
|
||||
import xsbti.SafeLazy
|
||||
import xsbti.api._
|
||||
import scala.collection.mutable.HashSet
|
||||
|
||||
object APIUtil {
|
||||
val modifiersToByte = (m: Modifiers) => {
|
||||
import m._
|
||||
def x(b: Boolean, bit: Int) = if (b) 1 << bit else 0
|
||||
(x(isAbstract, 0) | x(isOverride, 1) | x(isFinal, 2) | x(isSealed, 3) | x(isImplicit, 4) | x(isLazy, 5) | x(isMacro, 6)).toByte
|
||||
}
|
||||
val byteToModifiers = (b: Byte) => {
|
||||
def x(bit: Int) = (b & (1 << bit)) != 0
|
||||
new Modifiers(x(0), x(1), x(2), x(3), x(4), x(5), x(6))
|
||||
}
|
||||
|
||||
def isScalaSourceName(name: String): Boolean = name.endsWith(".scala")
|
||||
|
||||
def hasMacro(s: SourceAPI): Boolean =
|
||||
{
|
||||
val check = new HasMacro
|
||||
check.visitAPI(s)
|
||||
check.hasMacro
|
||||
}
|
||||
|
||||
private[this] class HasMacro extends Visit {
|
||||
var hasMacro = false
|
||||
|
||||
// Don't visit inherited definitions since we consider that a class
|
||||
// that inherits a macro does not have a macro.
|
||||
override def visitStructure0(structure: Structure): Unit = {
|
||||
visitTypes(structure.parents)
|
||||
visitDefinitions(structure.declared)
|
||||
}
|
||||
|
||||
override def visitModifiers(m: Modifiers): Unit = {
|
||||
hasMacro ||= m.isMacro
|
||||
super.visitModifiers(m)
|
||||
}
|
||||
}
|
||||
|
||||
def minimize(api: SourceAPI): SourceAPI =
|
||||
new SourceAPI(api.packages, minimizeDefinitions(api.definitions))
|
||||
def minimizeDefinitions(ds: Array[Definition]): Array[Definition] =
|
||||
ds flatMap minimizeDefinition
|
||||
def minimizeDefinition(d: Definition): Array[Definition] =
|
||||
d match {
|
||||
case c: ClassLike => Array(minimizeClass(c))
|
||||
case _ => Array()
|
||||
}
|
||||
def minimizeClass(c: ClassLike): ClassLike =
|
||||
{
|
||||
val savedAnnotations = Discovery.defAnnotations(c.structure, (_: Any) => true).toArray[String]
|
||||
val struct = minimizeStructure(c.structure, c.definitionType == DefinitionType.Module)
|
||||
new ClassLike(c.definitionType, lzy(emptyType), lzy(struct), savedAnnotations, c.typeParameters, c.name, c.access, c.modifiers, c.annotations)
|
||||
}
|
||||
|
||||
def minimizeStructure(s: Structure, isModule: Boolean): Structure =
|
||||
new Structure(lzy(s.parents), filterDefinitions(s.declared, isModule), filterDefinitions(s.inherited, isModule))
|
||||
def filterDefinitions(ds: Array[Definition], isModule: Boolean): Lazy[Array[Definition]] =
|
||||
lzy(if (isModule) ds filter Discovery.isMainMethod else Array())
|
||||
private[this] def lzy[T <: AnyRef](t: T): Lazy[T] = SafeLazy.strict(t)
|
||||
|
||||
private[this] val emptyType = new EmptyType
|
||||
}
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package xsbt.api
|
||||
|
||||
final case class Discovered(baseClasses: Set[String], annotations: Set[String], hasMain: Boolean, isModule: Boolean) {
|
||||
def isEmpty = baseClasses.isEmpty && annotations.isEmpty
|
||||
}
|
||||
object Discovered {
|
||||
def empty = new Discovered(Set.empty, Set.empty, false, false)
|
||||
}
|
||||
|
|
@ -1,103 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package xsbt.api
|
||||
|
||||
import xsbti.api.{ Path => APath, _ }
|
||||
|
||||
import Discovery._
|
||||
|
||||
class Discovery(baseClasses: Set[String], annotations: Set[String]) {
|
||||
def apply(s: Seq[Definition]): Seq[(Definition, Discovered)] =
|
||||
s.map { d => (d, apply(d)) }
|
||||
def apply(d: Definition): Discovered =
|
||||
d match {
|
||||
case c: ClassLike if isConcrete(c.modifiers) =>
|
||||
if (isPublic(c))
|
||||
discover(c)
|
||||
else if (isModule(c) && hasMainMethod(c)) // jvm does not require a main class to be public
|
||||
new Discovered(Set.empty, Set.empty, true, true)
|
||||
else
|
||||
Discovered.empty
|
||||
case _ => Discovered.empty
|
||||
}
|
||||
def discover(c: ClassLike): Discovered =
|
||||
{
|
||||
val onClass = Discovery.findAnnotations(c.annotations, annotations)
|
||||
val onDefs = Discovery.defAnnotations(c.structure, annotations) ++ c.savedAnnotations.filter(annotations)
|
||||
val module = isModule(c)
|
||||
new Discovered(bases(c.name, c.structure.parents), onClass ++ onDefs, module && hasMainMethod(c), module)
|
||||
}
|
||||
|
||||
def bases(own: String, c: Seq[Type]): Set[String] =
|
||||
(own +: c.flatMap(simpleName)).filter(baseClasses).toSet
|
||||
|
||||
}
|
||||
object Discovery {
|
||||
def apply(subclasses: Set[String], annotations: Set[String])(definitions: Seq[Definition]): Seq[(Definition, Discovered)] =
|
||||
{
|
||||
val d = new Discovery(subclasses, annotations)
|
||||
d(definitions)
|
||||
}
|
||||
def applications(definitions: Seq[Definition]): Seq[(Definition, Discovered)] =
|
||||
apply(Set.empty, Set.empty)(definitions)
|
||||
|
||||
def findAnnotations(as: Seq[Annotation], pred: String => Boolean): Set[String] =
|
||||
as.flatMap { a => simpleName(a.base).filter(pred) }.toSet
|
||||
def defAnnotations(s: Structure, pred: String => Boolean): Set[String] =
|
||||
defAnnotations(s.declared, pred) ++ defAnnotations(s.inherited, pred)
|
||||
def defAnnotations(defs: Seq[Definition], pred: String => Boolean): Set[String] =
|
||||
findAnnotations(defs.flatMap { case d: Def if isPublic(d) => d.annotations.toSeq; case _ => Nil }, pred)
|
||||
|
||||
def isConcrete(a: Definition): Boolean = isConcrete(a.modifiers)
|
||||
def isConcrete(m: Modifiers) = !m.isAbstract
|
||||
def isPublic(a: Definition): Boolean = isPublic(a.access)
|
||||
def isPublic(a: Access): Boolean = a.isInstanceOf[Public]
|
||||
def isModule(c: ClassLike) = c.definitionType == DefinitionType.Module
|
||||
|
||||
def hasMainMethod(c: ClassLike): Boolean =
|
||||
hasMainMethod(c.structure.declared) || hasMainMethod(c.structure.inherited)
|
||||
def hasMainMethod(defs: Seq[Definition]): Boolean =
|
||||
defs.exists(isMainMethod)
|
||||
def isMainMethod(d: Definition): Boolean =
|
||||
d match {
|
||||
case d: Def => d.name == "main" && isPublic(d) && isConcrete(d) && isUnit(d.returnType) && isStringArray(d.valueParameters)
|
||||
case _ => false
|
||||
}
|
||||
def isStringArray(vp: IndexedSeq[ParameterList]): Boolean = vp.length == 1 && isStringArray(vp(0).parameters)
|
||||
def isStringArray(params: Seq[MethodParameter]): Boolean = params.length == 1 && isStringArray(params(0))
|
||||
def isStringArray(p: MethodParameter): Boolean = (p.modifier == ParameterModifier.Plain || p.modifier == ParameterModifier.Repeated) && isStringArray(p.tpe)
|
||||
def isStringArray(t: Type): Boolean = isParameterized(t, "scala.Array", "java.lang.String") // doesn't handle scala.this#Predef#String, should API phase dealias?
|
||||
|
||||
def isParameterized(t: Type, base: String, args: String*): Boolean = t match {
|
||||
case p: Parameterized =>
|
||||
named(p.baseType, base) && p.typeArguments.length == args.length && p.typeArguments.flatMap(simpleName).sameElements(args)
|
||||
case _ => false
|
||||
}
|
||||
def named(t: Type, nme: String) = simpleName(t) == Some(nme)
|
||||
|
||||
def simpleName(t: Type): Option[String] = t match {
|
||||
case a: Annotated => simpleName(a.baseType)
|
||||
case sing: Singleton => None
|
||||
case p: Projection =>
|
||||
p.prefix match {
|
||||
case s: Singleton => pathName(s.path, p.id)
|
||||
case e: EmptyType => Some(p.id)
|
||||
case _ => None
|
||||
}
|
||||
case _ => None
|
||||
}
|
||||
|
||||
def pathName(p: APath, id: String): Option[String] =
|
||||
{
|
||||
val cs = p.components
|
||||
cs.last match {
|
||||
case _: This =>
|
||||
val ids = cs.init.collect { case i: Id => i.id }
|
||||
if (ids.length == cs.length - 1) Some((ids ++ Seq(id)).mkString(".")) else None
|
||||
case _ => None
|
||||
}
|
||||
}
|
||||
|
||||
def isUnit(t: Type): Boolean = named(t, "scala.Unit")
|
||||
}
|
||||
|
|
@ -1,369 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009, 2010, 2011 Mark Harrah
|
||||
*/
|
||||
package xsbt.api
|
||||
|
||||
import scala.util
|
||||
import xsbti.api._
|
||||
import util.MurmurHash
|
||||
import HashAPI.Hash
|
||||
|
||||
object HashAPI {
|
||||
type Hash = Int
|
||||
def apply(a: SourceAPI): Hash =
|
||||
(new HashAPI(false, true, true)).hashAPI(a)
|
||||
|
||||
def apply(x: Def): Hash = {
|
||||
val hashApi = new HashAPI(false, true, true)
|
||||
hashApi.hashDefinition(x)
|
||||
hashApi.finalizeHash
|
||||
}
|
||||
|
||||
def hashDefinitionsWithExtraHashes(ds: Seq[(Definition, Hash)]): Hash = {
|
||||
val hashAPI = new HashAPI(false, true, false)
|
||||
hashAPI.hashDefinitionsWithExtraHashes(ds)
|
||||
hashAPI.finalizeHash
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements hashing of public API.
|
||||
*
|
||||
* @param includePrivate should private definitions be included in a hash sum
|
||||
* @param includeParamNames should parameter names for methods be included in a hash sum
|
||||
* @param includeDefinitions when hashing a structure (e.g. of a class) should hashes of definitions (members)
|
||||
* be included in a hash sum. Structure can appear as a type (in structural type) and in that case we
|
||||
* always include definitions in a hash sum.
|
||||
*/
|
||||
final class HashAPI(includePrivate: Boolean, includeParamNames: Boolean, includeDefinitions: Boolean) {
|
||||
// this constructor variant is for source and binary backwards compatibility with sbt 0.13.0
|
||||
def this(includePrivate: Boolean, includeParamNames: Boolean) {
|
||||
// in the old logic we used to always include definitions hence
|
||||
// includeDefinitions=true
|
||||
this(includePrivate, includeParamNames, includeDefinitions = true)
|
||||
}
|
||||
|
||||
import scala.collection.mutable
|
||||
import MurmurHash.{ extendHash, nextMagicA, nextMagicB, startHash, startMagicA, startMagicB, stringHash, symmetricHash }
|
||||
|
||||
private[this] val visitedStructures = visitedMap[Structure]
|
||||
private[this] val visitedClassLike = visitedMap[ClassLike]
|
||||
private[this] def visitedMap[T] = new mutable.HashMap[T, List[Hash]]
|
||||
private[this] def visit[T](map: mutable.Map[T, List[Hash]], t: T)(hashF: T => Unit): Unit = {
|
||||
map.put(t, hash :: map.getOrElse(t, Nil)) match {
|
||||
case Some(x :: _) => extend(x)
|
||||
case _ =>
|
||||
hashF(t)
|
||||
for (hs <- map(t))
|
||||
extend(hs)
|
||||
map.put(t, hash :: Nil)
|
||||
}
|
||||
}
|
||||
|
||||
private[this] def isTrait(cl: ClassLike) = cl.definitionType == DefinitionType.Trait
|
||||
|
||||
private[this] final val ValHash = 1
|
||||
private[this] final val VarHash = 2
|
||||
private[this] final val DefHash = 3
|
||||
private[this] final val ClassHash = 4
|
||||
private[this] final val TypeDeclHash = 5
|
||||
private[this] final val TypeAliasHash = 6
|
||||
|
||||
private[this] final val PublicHash = 30
|
||||
private[this] final val ProtectedHash = 31
|
||||
private[this] final val PrivateHash = 32
|
||||
private[this] final val UnqualifiedHash = 33
|
||||
private[this] final val ThisQualifierHash = 34
|
||||
private[this] final val IdQualifierHash = 35
|
||||
|
||||
private[this] final val IdPathHash = 20
|
||||
private[this] final val SuperHash = 21
|
||||
private[this] final val ThisPathHash = 22
|
||||
|
||||
private[this] final val ValueParamsHash = 40
|
||||
private[this] final val ClassPendingHash = 41
|
||||
private[this] final val StructurePendingHash = 42
|
||||
|
||||
private[this] final val EmptyTypeHash = 51
|
||||
private[this] final val ParameterRefHash = 52
|
||||
private[this] final val SingletonHash = 53
|
||||
private[this] final val ProjectionHash = 54
|
||||
private[this] final val ParameterizedHash = 55
|
||||
private[this] final val AnnotatedHash = 56
|
||||
private[this] final val PolymorphicHash = 57
|
||||
private[this] final val ConstantHash = 58
|
||||
private[this] final val ExistentialHash = 59
|
||||
private[this] final val StructureHash = 60
|
||||
|
||||
private[this] final val TrueHash = 97
|
||||
private[this] final val FalseHash = 98
|
||||
|
||||
private[this] var hash: Hash = startHash(0)
|
||||
private[this] var magicA: Hash = startMagicA
|
||||
private[this] var magicB: Hash = startMagicB
|
||||
|
||||
@inline final def hashString(s: String): Unit = extend(stringHash(s))
|
||||
@inline final def hashBoolean(b: Boolean): Unit = extend(if (b) TrueHash else FalseHash)
|
||||
@inline final def hashSeq[T](s: Seq[T], hashF: T => Unit): Unit = {
|
||||
extend(s.length)
|
||||
s foreach hashF
|
||||
}
|
||||
final def hashSymmetric[T](ts: TraversableOnce[T], hashF: T => Unit): Unit = {
|
||||
val current = hash
|
||||
val mA = magicA
|
||||
val mB = magicB
|
||||
val (hashes, mAs, mBs) = ts.toList.map { t =>
|
||||
hash = startHash(1)
|
||||
magicA = startMagicA
|
||||
magicB = startMagicB
|
||||
hashF(t)
|
||||
(finalizeHash, magicA, magicB)
|
||||
} unzip3;
|
||||
hash = current
|
||||
magicA = mA
|
||||
magicB = mB
|
||||
extend(symmetricHash(hashes, 0xb592f7ae)) // constant from MurmurHash3
|
||||
}
|
||||
|
||||
@inline final def extend(a: Hash): Unit = {
|
||||
hash = extendHash(hash, a, magicA, magicB)
|
||||
magicA = nextMagicA(magicA)
|
||||
magicB = nextMagicB(magicB)
|
||||
}
|
||||
|
||||
def finalizeHash: Hash = MurmurHash.finalizeHash(hash)
|
||||
|
||||
def hashModifiers(m: Modifiers) = extend(m.raw)
|
||||
|
||||
def hashAPI(s: SourceAPI): Hash =
|
||||
{
|
||||
hash = startHash(0)
|
||||
hashSymmetric(s.packages, hashPackage)
|
||||
hashDefinitions(s.definitions, true)
|
||||
finalizeHash
|
||||
}
|
||||
|
||||
def hashPackage(p: Package) = hashString(p.name)
|
||||
|
||||
def hashDefinitions(ds: Seq[Definition], topLevel: Boolean): Unit =
|
||||
{
|
||||
val defs = SameAPI.filterDefinitions(ds, topLevel, includePrivate)
|
||||
hashSymmetric(defs, hashDefinition)
|
||||
}
|
||||
|
||||
/**
|
||||
* Hashes a sequence of definitions by combining each definition's own
|
||||
* hash with extra one supplied as first element of a pair.
|
||||
*
|
||||
* It's useful when one wants to influence hash of a definition by some
|
||||
* external (to definition) factor (e.g. location of definition).
|
||||
*
|
||||
* NOTE: This method doesn't perform any filtering of passed definitions.
|
||||
*/
|
||||
def hashDefinitionsWithExtraHashes(ds: Seq[(Definition, Hash)]): Unit =
|
||||
{
|
||||
def hashDefinitionCombined(d: Definition, extraHash: Hash): Unit = {
|
||||
hashDefinition(d)
|
||||
extend(extraHash)
|
||||
}
|
||||
hashSymmetric(ds, (hashDefinitionCombined _).tupled)
|
||||
}
|
||||
def hashDefinition(d: Definition): Unit = {
|
||||
hashString(d.name)
|
||||
hashAnnotations(d.annotations)
|
||||
hashModifiers(d.modifiers)
|
||||
hashAccess(d.access)
|
||||
d match {
|
||||
case c: ClassLike => hashClass(c)
|
||||
case f: FieldLike => hashField(f)
|
||||
case d: Def => hashDef(d)
|
||||
case t: TypeDeclaration => hashTypeDeclaration(t)
|
||||
case t: TypeAlias => hashTypeAlias(t)
|
||||
}
|
||||
}
|
||||
final def hashClass(c: ClassLike): Unit = visit(visitedClassLike, c)(hashClass0)
|
||||
def hashClass0(c: ClassLike): Unit = {
|
||||
extend(ClassHash)
|
||||
hashParameterizedDefinition(c)
|
||||
hashType(c.selfType)
|
||||
hashStructure(c.structure, includeDefinitions, isTrait(c))
|
||||
}
|
||||
def hashField(f: FieldLike): Unit = {
|
||||
f match {
|
||||
case v: Var => extend(VarHash)
|
||||
case v: Val => extend(ValHash)
|
||||
}
|
||||
hashType(f.tpe)
|
||||
}
|
||||
def hashDef(d: Def): Unit = {
|
||||
extend(DefHash)
|
||||
hashParameterizedDefinition(d)
|
||||
hashValueParameters(d.valueParameters)
|
||||
hashType(d.returnType)
|
||||
}
|
||||
def hashAccess(a: Access): Unit =
|
||||
a match {
|
||||
case pub: Public => extend(PublicHash)
|
||||
case qual: Qualified => hashQualified(qual)
|
||||
}
|
||||
def hashQualified(qual: Qualified): Unit =
|
||||
{
|
||||
qual match {
|
||||
case p: Protected => extend(ProtectedHash)
|
||||
case p: Private => extend(PrivateHash)
|
||||
}
|
||||
hashQualifier(qual.qualifier)
|
||||
}
|
||||
def hashQualifier(qual: Qualifier): Unit =
|
||||
qual match {
|
||||
case _: Unqualified => extend(UnqualifiedHash)
|
||||
case _: ThisQualifier => extend(ThisQualifierHash)
|
||||
case id: IdQualifier =>
|
||||
extend(IdQualifierHash)
|
||||
hashString(id.value)
|
||||
}
|
||||
|
||||
def hashValueParameters(valueParameters: Seq[ParameterList]) = hashSeq(valueParameters, hashValueParameterList)
|
||||
def hashValueParameterList(list: ParameterList) =
|
||||
{
|
||||
extend(ValueParamsHash)
|
||||
hashBoolean(list.isImplicit)
|
||||
hashSeq(list.parameters, hashValueParameter)
|
||||
}
|
||||
def hashValueParameter(parameter: MethodParameter) =
|
||||
{
|
||||
hashString(parameter.name)
|
||||
hashType(parameter.tpe)
|
||||
extend(parameter.modifier.ordinal)
|
||||
hashBoolean(parameter.hasDefault)
|
||||
}
|
||||
|
||||
def hashParameterizedDefinition[T <: ParameterizedDefinition](d: T): Unit = {
|
||||
hashTypeParameters(d.typeParameters)
|
||||
}
|
||||
def hashTypeDeclaration(d: TypeDeclaration): Unit = {
|
||||
extend(TypeDeclHash)
|
||||
hashParameterizedDefinition(d)
|
||||
hashType(d.lowerBound)
|
||||
hashType(d.upperBound)
|
||||
}
|
||||
def hashTypeAlias(d: TypeAlias): Unit = {
|
||||
extend(TypeAliasHash)
|
||||
hashParameterizedDefinition(d)
|
||||
hashType(d.tpe)
|
||||
}
|
||||
|
||||
def hashTypeParameters(parameters: Seq[TypeParameter]) = hashSeq(parameters, hashTypeParameter)
|
||||
def hashTypeParameter(parameter: TypeParameter): Unit = {
|
||||
hashString(parameter.id)
|
||||
extend(parameter.variance.ordinal)
|
||||
hashTypeParameters(parameter.typeParameters)
|
||||
hashType(parameter.lowerBound)
|
||||
hashType(parameter.upperBound)
|
||||
hashAnnotations(parameter.annotations)
|
||||
}
|
||||
def hashAnnotations(annotations: Seq[Annotation]) = hashSeq(annotations, hashAnnotation)
|
||||
def hashAnnotation(annotation: Annotation) =
|
||||
{
|
||||
hashType(annotation.base)
|
||||
hashAnnotationArguments(annotation.arguments)
|
||||
}
|
||||
def hashAnnotationArguments(args: Seq[AnnotationArgument]) = hashSeq(args, hashAnnotationArgument)
|
||||
def hashAnnotationArgument(arg: AnnotationArgument): Unit = {
|
||||
hashString(arg.name)
|
||||
hashString(arg.value)
|
||||
}
|
||||
|
||||
def hashTypes(ts: Seq[Type], includeDefinitions: Boolean = true) =
|
||||
hashSeq(ts, (t: Type) => hashType(t, includeDefinitions))
|
||||
def hashType(t: Type, includeDefinitions: Boolean = true): Unit =
|
||||
t match {
|
||||
case s: Structure => hashStructure(s, includeDefinitions, isTrait = false)
|
||||
case e: Existential => hashExistential(e)
|
||||
case c: Constant => hashConstant(c)
|
||||
case p: Polymorphic => hashPolymorphic(p)
|
||||
case a: Annotated => hashAnnotated(a)
|
||||
case p: Parameterized => hashParameterized(p)
|
||||
case p: Projection => hashProjection(p)
|
||||
case _: EmptyType => extend(EmptyTypeHash)
|
||||
case s: Singleton => hashSingleton(s)
|
||||
case pr: ParameterRef => hashParameterRef(pr)
|
||||
}
|
||||
|
||||
def hashParameterRef(p: ParameterRef): Unit = {
|
||||
extend(ParameterRefHash)
|
||||
hashString(p.id)
|
||||
}
|
||||
def hashSingleton(s: Singleton): Unit = {
|
||||
extend(SingletonHash)
|
||||
hashPath(s.path)
|
||||
}
|
||||
def hashPath(path: Path) = hashSeq(path.components, hashPathComponent)
|
||||
def hashPathComponent(pc: PathComponent) = pc match {
|
||||
case _: This => extend(ThisPathHash)
|
||||
case s: Super => hashSuperPath(s)
|
||||
case id: Id => hashIdPath(id)
|
||||
}
|
||||
def hashSuperPath(s: Super): Unit = {
|
||||
extend(SuperHash)
|
||||
hashPath(s.qualifier)
|
||||
}
|
||||
def hashIdPath(id: Id): Unit = {
|
||||
extend(IdPathHash)
|
||||
hashString(id.id)
|
||||
}
|
||||
|
||||
def hashConstant(c: Constant) =
|
||||
{
|
||||
extend(ConstantHash)
|
||||
hashString(c.value)
|
||||
hashType(c.baseType)
|
||||
}
|
||||
def hashExistential(e: Existential) =
|
||||
{
|
||||
extend(ExistentialHash)
|
||||
hashParameters(e.clause, e.baseType)
|
||||
}
|
||||
def hashPolymorphic(p: Polymorphic) =
|
||||
{
|
||||
extend(PolymorphicHash)
|
||||
hashParameters(p.parameters, p.baseType)
|
||||
}
|
||||
def hashProjection(p: Projection) =
|
||||
{
|
||||
extend(ProjectionHash)
|
||||
hashString(p.id)
|
||||
hashType(p.prefix)
|
||||
}
|
||||
def hashParameterized(p: Parameterized): Unit = {
|
||||
extend(ParameterizedHash)
|
||||
hashType(p.baseType)
|
||||
hashTypes(p.typeArguments)
|
||||
}
|
||||
def hashAnnotated(a: Annotated): Unit = {
|
||||
extend(AnnotatedHash)
|
||||
hashType(a.baseType)
|
||||
hashAnnotations(a.annotations)
|
||||
}
|
||||
@deprecated("Use the overload that indicates if the definition is a trait.", "0.14")
|
||||
final def hashStructure(structure: Structure, includeDefinitions: Boolean): Unit =
|
||||
hashStructure(structure, includeDefinitions, isTrait = false)
|
||||
final def hashStructure(structure: Structure, includeDefinitions: Boolean, isTrait: Boolean = false): Unit =
|
||||
visit(visitedStructures, structure)(structure => hashStructure0(structure, includeDefinitions, isTrait))
|
||||
@deprecated("Use the overload that indicates if the definition is a trait.", "0.14")
|
||||
def hashStructure0(structure: Structure, includeDefinitions: Boolean): Unit =
|
||||
hashStructure0(structure, includeDefinitions, isTrait = false)
|
||||
def hashStructure0(structure: Structure, includeDefinitions: Boolean, isTrait: Boolean = false): Unit = {
|
||||
extend(StructureHash)
|
||||
hashTypes(structure.parents, includeDefinitions)
|
||||
if (includeDefinitions || isTrait) {
|
||||
hashDefinitions(structure.declared, isTrait)
|
||||
hashDefinitions(structure.inherited, isTrait)
|
||||
}
|
||||
}
|
||||
def hashParameters(parameters: Seq[TypeParameter], base: Type): Unit =
|
||||
{
|
||||
hashTypeParameters(parameters)
|
||||
hashType(base)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,147 +0,0 @@
|
|||
package xsbt.api
|
||||
|
||||
import xsbti.api.SourceAPI
|
||||
import xsbti.api.Definition
|
||||
import xsbti.api.DefinitionType
|
||||
import xsbti.api.ClassLike
|
||||
import xsbti.api._internalOnly_NameHash
|
||||
import xsbti.api._internalOnly_NameHashes
|
||||
import xsbti.api.DefinitionType.ClassDef
|
||||
import xsbti.api.DefinitionType.Module
|
||||
import xsbti.api.DefinitionType.PackageModule
|
||||
import xsbti.api.DefinitionType.Trait
|
||||
|
||||
/**
|
||||
* A class that computes hashes for each group of definitions grouped by a simple name.
|
||||
*
|
||||
* See `nameHashes` method for details.
|
||||
*/
|
||||
class NameHashing {
|
||||
|
||||
import NameHashing._
|
||||
|
||||
/**
|
||||
* This method takes an API representation and extracts a flat collection of all
|
||||
* definitions contained in that API representation. Then it groups definition
|
||||
* by a simple name. Lastly, it computes a hash sum of all definitions in a single
|
||||
* group.
|
||||
*
|
||||
* NOTE: The hashing sum used for hashing a group of definition is insensitive
|
||||
* to order of definitions.
|
||||
*/
|
||||
def nameHashes(source: SourceAPI): _internalOnly_NameHashes = {
|
||||
val apiPublicDefs = publicDefs(source)
|
||||
val (regularDefs, implicitDefs) = apiPublicDefs.partition(locDef => !locDef.definition.modifiers.isImplicit)
|
||||
val regularNameHashes = nameHashesForLocatedDefinitions(regularDefs)
|
||||
val implicitNameHashes = nameHashesForLocatedDefinitions(implicitDefs)
|
||||
new _internalOnly_NameHashes(regularNameHashes.toArray, implicitNameHashes.toArray)
|
||||
}
|
||||
|
||||
private def nameHashesForLocatedDefinitions(locatedDefs: Iterable[LocatedDefinition]): Iterable[_internalOnly_NameHash] = {
|
||||
val groupedBySimpleName = locatedDefs.groupBy(locatedDef => localName(locatedDef.definition.name))
|
||||
val hashes = groupedBySimpleName.mapValues(hashLocatedDefinitions)
|
||||
hashes.toIterable.map({ case (name: String, hash: Int) => new _internalOnly_NameHash(name, hash) })
|
||||
}
|
||||
|
||||
private def hashLocatedDefinitions(locatedDefs: Iterable[LocatedDefinition]): Int = {
|
||||
val defsWithExtraHashes = locatedDefs.toSeq.map(ld => ld.definition -> ld.location.hashCode)
|
||||
xsbt.api.HashAPI.hashDefinitionsWithExtraHashes(defsWithExtraHashes)
|
||||
}
|
||||
|
||||
/**
|
||||
* A visitor that visits given API object and extracts all nested public
|
||||
* definitions it finds. The extracted definitions have Location attached
|
||||
* to them which identifies API object's location.
|
||||
*
|
||||
* The returned location is basically a path to a definition that contains
|
||||
* the located definition. For example, if we have:
|
||||
*
|
||||
* object Foo {
|
||||
* class Bar { def abc: Int }
|
||||
* }
|
||||
*
|
||||
* then location of `abc` is Seq((TermName, Foo), (TypeName, Bar))
|
||||
*/
|
||||
private class ExtractPublicDefinitions extends Visit {
|
||||
val locatedDefs = scala.collection.mutable.Buffer[LocatedDefinition]()
|
||||
private var currentLocation: Location = Location()
|
||||
override def visitAPI(s: SourceAPI): Unit = {
|
||||
s.packages foreach visitPackage
|
||||
s.definitions foreach {
|
||||
case topLevelDef: ClassLike =>
|
||||
val packageName = {
|
||||
val fullName = topLevelDef.name()
|
||||
val lastDotIndex = fullName.lastIndexOf('.')
|
||||
if (lastDotIndex <= 0) "" else fullName.substring(0, lastDotIndex - 1)
|
||||
}
|
||||
currentLocation = packageAsLocation(packageName)
|
||||
visitDefinition(topLevelDef)
|
||||
}
|
||||
}
|
||||
override def visitDefinition(d: Definition): Unit = {
|
||||
val locatedDef = LocatedDefinition(currentLocation, d)
|
||||
locatedDefs += locatedDef
|
||||
d match {
|
||||
case cl: xsbti.api.ClassLike =>
|
||||
val savedLocation = currentLocation
|
||||
currentLocation = classLikeAsLocation(currentLocation, cl)
|
||||
super.visitDefinition(d)
|
||||
currentLocation = savedLocation
|
||||
case _ =>
|
||||
super.visitDefinition(d)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private def publicDefs(source: SourceAPI): Iterable[LocatedDefinition] = {
|
||||
val visitor = new ExtractPublicDefinitions
|
||||
visitor.visitAPI(source)
|
||||
visitor.locatedDefs
|
||||
}
|
||||
|
||||
private def localName(name: String): String = {
|
||||
// when there's no dot in name `lastIndexOf` returns -1 so we handle
|
||||
// that case properly
|
||||
val index = name.lastIndexOf('.') + 1
|
||||
name.substring(index)
|
||||
}
|
||||
|
||||
private def packageAsLocation(pkg: String): Location = if (pkg != "") {
|
||||
val selectors = pkg.split('.').map(name => Selector(name, TermName)).toSeq
|
||||
Location(selectors: _*)
|
||||
} else Location.Empty
|
||||
|
||||
private def classLikeAsLocation(prefix: Location, cl: ClassLike): Location = {
|
||||
val selector = {
|
||||
val clNameType = NameType(cl.definitionType)
|
||||
Selector(localName(cl.name), clNameType)
|
||||
}
|
||||
Location((prefix.selectors :+ selector): _*)
|
||||
}
|
||||
}
|
||||
|
||||
object NameHashing {
|
||||
private case class LocatedDefinition(location: Location, definition: Definition)
|
||||
/**
|
||||
* Location is expressed as sequence of annotated names. The annotation denotes
|
||||
* a type of a name, i.e. whether it's a term name or type name.
|
||||
*
|
||||
* Using Scala compiler terminology, location is defined as a sequence of member
|
||||
* selections that uniquely identify a given Symbol.
|
||||
*/
|
||||
private case class Location(selectors: Selector*)
|
||||
private object Location {
|
||||
val Empty = Location(Seq.empty: _*)
|
||||
}
|
||||
private case class Selector(name: String, nameType: NameType)
|
||||
private sealed trait NameType
|
||||
private object NameType {
|
||||
import DefinitionType._
|
||||
def apply(dt: DefinitionType): NameType = dt match {
|
||||
case Trait | ClassDef => TypeName
|
||||
case Module | PackageModule => TermName
|
||||
}
|
||||
}
|
||||
private case object TermName extends NameType
|
||||
private case object TypeName extends NameType
|
||||
}
|
||||
|
|
@ -1,391 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009, 2010, 2011 Mark Harrah
|
||||
*/
|
||||
package xsbt.api
|
||||
|
||||
import xsbti.api._
|
||||
|
||||
import Function.tupled
|
||||
import scala.collection.{ immutable, mutable }
|
||||
|
||||
@deprecated("This class is not used in incremental compiler and will be removed in next major version.", "0.13.2")
|
||||
class NameChanges(val newTypes: Set[String], val removedTypes: Set[String], val newTerms: Set[String], val removedTerms: Set[String]) {
|
||||
override def toString =
|
||||
(("New types", newTypes) :: ("Removed types", removedTypes) :: ("New terms", newTerms) :: ("Removed terms", removedTerms) :: Nil).map {
|
||||
case (label, set) => label + ":\n\t" + set.mkString("\n\t")
|
||||
}.mkString("Name changes:\n ", "\n ", "\n")
|
||||
|
||||
}
|
||||
|
||||
object TopLevel {
|
||||
@deprecated("The NameChanges class is deprecated and will be removed in next major version.", "0.13.2")
|
||||
def nameChanges(a: Iterable[Source], b: Iterable[Source]): NameChanges = {
|
||||
val api = (_: Source).api
|
||||
apiNameChanges(a map api, b map api)
|
||||
}
|
||||
/** Identifies removed and new top-level definitions by name. */
|
||||
@deprecated("The NameChanges class is deprecated and will be removed in next major version.", "0.13.2")
|
||||
def apiNameChanges(a: Iterable[SourceAPI], b: Iterable[SourceAPI]): NameChanges =
|
||||
{
|
||||
def changes(s: Set[String], t: Set[String]) = (s -- t, t -- s)
|
||||
|
||||
val (avalues, atypes) = definitions(a)
|
||||
val (bvalues, btypes) = definitions(b)
|
||||
|
||||
val (newTypes, removedTypes) = changes(names(atypes), names(btypes))
|
||||
val (newTerms, removedTerms) = changes(names(avalues), names(bvalues))
|
||||
|
||||
new NameChanges(newTypes, removedTypes, newTerms, removedTerms)
|
||||
}
|
||||
def definitions(i: Iterable[SourceAPI]) = SameAPI.separateDefinitions(i.toSeq.flatMap(_.definitions))
|
||||
def names(s: Iterable[Definition]): Set[String] = Set() ++ s.map(_.name)
|
||||
}
|
||||
|
||||
/** Checks the API of two source files for equality.*/
|
||||
object SameAPI {
|
||||
def apply(a: Source, b: Source): Boolean =
|
||||
a.apiHash == b.apiHash && (a.hash.nonEmpty && b.hash.nonEmpty) && apply(a.api, b.api)
|
||||
|
||||
def apply(a: Def, b: Def): Boolean =
|
||||
(new SameAPI(false, true)).sameDefinitions(List(a), List(b), true)
|
||||
|
||||
def apply(a: SourceAPI, b: SourceAPI): Boolean =
|
||||
{
|
||||
val start = System.currentTimeMillis
|
||||
|
||||
/*println("\n=========== API #1 ================")
|
||||
import DefaultShowAPI._
|
||||
println(ShowAPI.show(a))
|
||||
println("\n=========== API #2 ================")
|
||||
println(ShowAPI.show(b))*/
|
||||
|
||||
val result = (new SameAPI(false, true)).check(a, b)
|
||||
val end = System.currentTimeMillis
|
||||
//println(" API comparison took: " + (end - start) / 1000.0 + " s")
|
||||
result
|
||||
}
|
||||
|
||||
def separateDefinitions(s: Seq[Definition]): (Seq[Definition], Seq[Definition]) =
|
||||
s.partition(isValueDefinition)
|
||||
def isValueDefinition(d: Definition): Boolean =
|
||||
d match {
|
||||
case _: FieldLike | _: Def => true
|
||||
case c: ClassLike => isValue(c.definitionType)
|
||||
case _ => false
|
||||
}
|
||||
def isValue(d: DefinitionType): Boolean =
|
||||
d == DefinitionType.Module || d == DefinitionType.PackageModule
|
||||
/** Puts the given definitions in a map according to their names.*/
|
||||
def byName(s: Seq[Definition]): Map[String, List[Definition]] =
|
||||
{
|
||||
var map = Map[String, List[Definition]]()
|
||||
for (d <- s; name = d.name)
|
||||
map = map.updated(name, d :: map.getOrElse(name, Nil))
|
||||
map
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes definitions that should not be considered for API equality.
|
||||
* All top-level definitions are always considered: 'private' only means package-private.
|
||||
* Other definitions are considered if they are not qualified with 'private[this]' or 'private'.
|
||||
*/
|
||||
def filterDefinitions(d: Seq[Definition], topLevel: Boolean, includePrivate: Boolean) = if (topLevel || includePrivate) d else d.filter(isNonPrivate)
|
||||
def isNonPrivate(d: Definition): Boolean = isNonPrivate(d.access)
|
||||
/** Returns false if the `access` is `Private` and qualified, true otherwise.*/
|
||||
def isNonPrivate(access: Access): Boolean =
|
||||
access match {
|
||||
case p: Private if !p.qualifier.isInstanceOf[IdQualifier] => false
|
||||
case _ => true
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Used to implement API equality.
|
||||
*
|
||||
* If `includePrivate` is true, `private` and `private[this]` members are included in the comparison. Otherwise, those members are excluded.
|
||||
*/
|
||||
class SameAPI(includePrivate: Boolean, includeParamNames: Boolean) {
|
||||
import SameAPI._
|
||||
|
||||
private val pending = new mutable.HashSet[AnyRef]
|
||||
private[this] val debugEnabled = java.lang.Boolean.getBoolean("xsbt.api.debug")
|
||||
def debug(flag: Boolean, msg: => String): Boolean =
|
||||
{
|
||||
if (debugEnabled && !flag) println(msg)
|
||||
flag
|
||||
}
|
||||
|
||||
/** Returns true if source `a` has the same API as source `b`.*/
|
||||
def check(a: SourceAPI, b: SourceAPI): Boolean =
|
||||
{
|
||||
samePackages(a, b) &&
|
||||
debug(sameDefinitions(a, b), "Definitions differed")
|
||||
}
|
||||
|
||||
def samePackages(a: SourceAPI, b: SourceAPI): Boolean =
|
||||
sameStrings(packages(a), packages(b))
|
||||
def packages(s: SourceAPI): Set[String] =
|
||||
Set() ++ s.packages.map(_.name)
|
||||
|
||||
def sameDefinitions(a: SourceAPI, b: SourceAPI): Boolean =
|
||||
sameDefinitions(a.definitions, b.definitions, true)
|
||||
def sameDefinitions(a: Seq[Definition], b: Seq[Definition], topLevel: Boolean): Boolean =
|
||||
{
|
||||
val (avalues, atypes) = separateDefinitions(filterDefinitions(a, topLevel, includePrivate))
|
||||
val (bvalues, btypes) = separateDefinitions(filterDefinitions(b, topLevel, includePrivate))
|
||||
debug(sameDefinitions(byName(avalues), byName(bvalues)), "Value definitions differed") &&
|
||||
debug(sameDefinitions(byName(atypes), byName(btypes)), "Type definitions differed")
|
||||
}
|
||||
def sameDefinitions(a: scala.collection.Map[String, List[Definition]], b: scala.collection.Map[String, List[Definition]]): Boolean =
|
||||
debug(sameStrings(a.keySet, b.keySet), "\tDefinition strings differed (a: " + (a.keySet -- b.keySet) + ", b: " + (b.keySet -- a.keySet) + ")") &&
|
||||
zippedEntries(a, b).forall(tupled(sameNamedDefinitions))
|
||||
|
||||
/**
|
||||
* Checks that the definitions in `a` are the same as those in `b`, ignoring order.
|
||||
* Each list is assumed to have already been checked to have the same names (by `sameDefinitions`, for example).
|
||||
*/
|
||||
def sameNamedDefinitions(a: List[Definition], b: List[Definition]): Boolean =
|
||||
{
|
||||
def sameDefs(a: List[Definition], b: List[Definition]): Boolean =
|
||||
{
|
||||
a match {
|
||||
case adef :: atail =>
|
||||
def sameDef(seen: List[Definition], remaining: List[Definition]): Boolean =
|
||||
remaining match {
|
||||
case Nil => debug(false, "Definition different in new API: \n" + adef.name)
|
||||
case bdef :: btail =>
|
||||
val eq = sameDefinitionContent(adef, bdef)
|
||||
if (eq) sameDefs(atail, seen ::: btail) else sameDef(bdef :: seen, btail)
|
||||
}
|
||||
sameDef(Nil, b)
|
||||
case Nil => true
|
||||
}
|
||||
}
|
||||
debug((a.length == b.length), "\t\tLength differed for " + a.headOption.map(_.name).getOrElse("empty")) && sameDefs(a, b)
|
||||
}
|
||||
|
||||
/** Checks that the two definitions are the same, other than their name.*/
|
||||
def sameDefinitionContent(a: Definition, b: Definition): Boolean =
|
||||
samePending(a, b)(sameDefinitionContentDirect)
|
||||
def sameDefinitionContentDirect(a: Definition, b: Definition): Boolean =
|
||||
{
|
||||
//a.name == b.name &&
|
||||
debug(sameAccess(a.access, b.access), "Access differed") &&
|
||||
debug(sameModifiers(a.modifiers, b.modifiers), "Modifiers differed") &&
|
||||
debug(sameAnnotations(a.annotations, b.annotations), "Annotations differed") &&
|
||||
debug(sameDefinitionSpecificAPI(a, b), "Definition-specific differed")
|
||||
}
|
||||
|
||||
def sameAccess(a: Access, b: Access): Boolean =
|
||||
(a, b) match {
|
||||
case (_: Public, _: Public) => true
|
||||
case (qa: Protected, qb: Protected) => sameQualifier(qa, qb)
|
||||
case (qa: Private, qb: Private) => sameQualifier(qa, qb)
|
||||
case _ => debug(false, "Different access categories")
|
||||
}
|
||||
def sameQualifier(a: Qualified, b: Qualified): Boolean =
|
||||
sameQualifier(a.qualifier, b.qualifier)
|
||||
def sameQualifier(a: Qualifier, b: Qualifier): Boolean =
|
||||
(a, b) match {
|
||||
case (_: Unqualified, _: Unqualified) => true
|
||||
case (_: ThisQualifier, _: ThisQualifier) => true
|
||||
case (ia: IdQualifier, ib: IdQualifier) => debug(ia.value == ib.value, "Different qualifiers")
|
||||
case _ => debug(false, "Different qualifier categories: " + a.getClass.getName + " -- " + b.getClass.getName)
|
||||
}
|
||||
|
||||
def sameModifiers(a: Modifiers, b: Modifiers): Boolean =
|
||||
bitSet(a) == bitSet(b)
|
||||
|
||||
def bitSet(m: Modifiers): immutable.BitSet =
|
||||
{
|
||||
import m._
|
||||
val bs = new mutable.BitSet
|
||||
setIf(bs, isAbstract, 0)
|
||||
setIf(bs, isOverride, 1)
|
||||
setIf(bs, isFinal, 2)
|
||||
setIf(bs, isSealed, 3)
|
||||
setIf(bs, isImplicit, 4)
|
||||
setIf(bs, isLazy, 5)
|
||||
setIf(bs, isMacro, 6)
|
||||
bs.toImmutable
|
||||
}
|
||||
def setIf(bs: mutable.BitSet, flag: Boolean, i: Int): Unit =
|
||||
if (flag) bs += i
|
||||
|
||||
def sameAnnotations(a: Seq[Annotation], b: Seq[Annotation]): Boolean =
|
||||
sameSeq(a, b)(sameAnnotation)
|
||||
def sameAnnotation(a: Annotation, b: Annotation): Boolean =
|
||||
debug(sameType(a.base, b.base), "Annotation base type differed") &&
|
||||
debug(sameAnnotationArguments(a.arguments, b.arguments), "Annotation arguments differed (" + a + ") and (" + b + ")")
|
||||
def sameAnnotationArguments(a: Seq[AnnotationArgument], b: Seq[AnnotationArgument]): Boolean =
|
||||
argumentMap(a) == argumentMap(b)
|
||||
def argumentMap(a: Seq[AnnotationArgument]): Map[String, String] =
|
||||
Map() ++ a.map(arg => (arg.name, arg.value))
|
||||
|
||||
def sameDefinitionSpecificAPI(a: Definition, b: Definition): Boolean =
|
||||
(a, b) match {
|
||||
case (fa: FieldLike, fb: FieldLike) => sameFieldSpecificAPI(fa, fb)
|
||||
case (pa: ParameterizedDefinition, pb: ParameterizedDefinition) => sameParameterizedDefinition(pa, pb)
|
||||
case _ => false
|
||||
}
|
||||
|
||||
def sameParameterizedDefinition(a: ParameterizedDefinition, b: ParameterizedDefinition): Boolean =
|
||||
debug(sameTypeParameters(a.typeParameters, b.typeParameters), "Different type parameters for " + a.name) &&
|
||||
sameParameterizedSpecificAPI(a, b)
|
||||
|
||||
def sameParameterizedSpecificAPI(a: ParameterizedDefinition, b: ParameterizedDefinition): Boolean =
|
||||
(a, b) match {
|
||||
case (da: Def, db: Def) => sameDefSpecificAPI(da, db)
|
||||
case (ca: ClassLike, cb: ClassLike) => sameClassLikeSpecificAPI(ca, cb)
|
||||
case (ta: TypeAlias, tb: TypeAlias) => sameAliasSpecificAPI(ta, tb)
|
||||
case (ta: TypeDeclaration, tb: TypeDeclaration) => sameDeclarationSpecificAPI(ta, tb)
|
||||
case _ => false
|
||||
}
|
||||
|
||||
def sameDefSpecificAPI(a: Def, b: Def): Boolean =
|
||||
debug(sameValueParameters(a.valueParameters, b.valueParameters), "Different def value parameters for " + a.name) &&
|
||||
debug(sameType(a.returnType, b.returnType), "Different def return type for " + a.name)
|
||||
def sameAliasSpecificAPI(a: TypeAlias, b: TypeAlias): Boolean =
|
||||
debug(sameType(a.tpe, b.tpe), "Different alias type for " + a.name)
|
||||
def sameDeclarationSpecificAPI(a: TypeDeclaration, b: TypeDeclaration): Boolean =
|
||||
debug(sameType(a.lowerBound, b.lowerBound), "Different lower bound for declaration " + a.name) &&
|
||||
debug(sameType(a.upperBound, b.upperBound), "Different upper bound for declaration " + a.name)
|
||||
def sameFieldSpecificAPI(a: FieldLike, b: FieldLike): Boolean =
|
||||
debug(sameFieldCategory(a, b), "Different field categories (" + a.name + "=" + a.getClass.getName + " -- " + a.name + "=" + a.getClass.getName + ")") &&
|
||||
debug(sameType(a.tpe, b.tpe), "Different field type for " + a.name)
|
||||
|
||||
def sameFieldCategory(a: FieldLike, b: FieldLike): Boolean =
|
||||
(a, b) match {
|
||||
case (_: Val, _: Val) => true
|
||||
case (_: Var, _: Var) => true
|
||||
case _ => false
|
||||
}
|
||||
|
||||
def sameClassLikeSpecificAPI(a: ClassLike, b: ClassLike): Boolean =
|
||||
sameDefinitionType(a.definitionType, b.definitionType) &&
|
||||
sameType(a.selfType, b.selfType) &&
|
||||
sameStructure(a.structure, b.structure)
|
||||
|
||||
def sameValueParameters(a: Seq[ParameterList], b: Seq[ParameterList]): Boolean =
|
||||
sameSeq(a, b)(sameParameterList)
|
||||
|
||||
def sameParameterList(a: ParameterList, b: ParameterList): Boolean =
|
||||
(a.isImplicit == b.isImplicit) &&
|
||||
sameParameters(a.parameters, b.parameters)
|
||||
def sameParameters(a: Seq[MethodParameter], b: Seq[MethodParameter]): Boolean =
|
||||
sameSeq(a, b)(sameMethodParameter)
|
||||
def sameMethodParameter(a: MethodParameter, b: MethodParameter): Boolean =
|
||||
(!includeParamNames || a.name == b.name) &&
|
||||
sameType(a.tpe, b.tpe) &&
|
||||
(a.hasDefault == b.hasDefault) &&
|
||||
sameParameterModifier(a.modifier, b.modifier)
|
||||
def sameParameterModifier(a: ParameterModifier, b: ParameterModifier) =
|
||||
a == b
|
||||
def sameDefinitionType(a: DefinitionType, b: DefinitionType): Boolean =
|
||||
a == b
|
||||
def sameVariance(a: Variance, b: Variance): Boolean =
|
||||
a == b
|
||||
|
||||
def sameTypeParameters(a: Seq[TypeParameter], b: Seq[TypeParameter]): Boolean =
|
||||
debug(sameSeq(a, b)(sameTypeParameter), "Different type parameters")
|
||||
def sameTypeParameter(a: TypeParameter, b: TypeParameter): Boolean =
|
||||
{
|
||||
sameTypeParameters(a.typeParameters, b.typeParameters) &&
|
||||
debug(sameAnnotations(a.annotations, b.annotations), "Different type parameter annotations") &&
|
||||
debug(sameVariance(a.variance, b.variance), "Different variance") &&
|
||||
debug(sameType(a.lowerBound, b.lowerBound), "Different lower bound") &&
|
||||
debug(sameType(a.upperBound, b.upperBound), "Different upper bound") &&
|
||||
sameTags(a.id, b.id)
|
||||
}
|
||||
def sameTags(a: String, b: String): Boolean =
|
||||
debug(a == b, "Different type parameter bindings: " + a + ", " + b)
|
||||
|
||||
def sameType(a: Type, b: Type): Boolean =
|
||||
samePending(a, b)(sameTypeDirect)
|
||||
def sameTypeDirect(a: Type, b: Type): Boolean =
|
||||
(a, b) match {
|
||||
case (sa: SimpleType, sb: SimpleType) => debug(sameSimpleTypeDirect(sa, sb), "Different simple types: " + DefaultShowAPI(sa) + " and " + DefaultShowAPI(sb))
|
||||
case (ca: Constant, cb: Constant) => debug(sameConstantType(ca, cb), "Different constant types: " + DefaultShowAPI(ca) + " and " + DefaultShowAPI(cb))
|
||||
case (aa: Annotated, ab: Annotated) => debug(sameAnnotatedType(aa, ab), "Different annotated types")
|
||||
case (sa: Structure, sb: Structure) => debug(sameStructureDirect(sa, sb), "Different structure type")
|
||||
case (ea: Existential, eb: Existential) => debug(sameExistentialType(ea, eb), "Different existential type")
|
||||
case (pa: Polymorphic, pb: Polymorphic) => debug(samePolymorphicType(pa, pb), "Different polymorphic type")
|
||||
case _ => differentCategory("type", a, b)
|
||||
}
|
||||
|
||||
def sameConstantType(ca: Constant, cb: Constant): Boolean =
|
||||
sameType(ca.baseType, cb.baseType) &&
|
||||
ca.value == cb.value
|
||||
def sameExistentialType(a: Existential, b: Existential): Boolean =
|
||||
sameTypeParameters(a.clause, b.clause) &&
|
||||
sameType(a.baseType, b.baseType)
|
||||
def samePolymorphicType(a: Polymorphic, b: Polymorphic): Boolean =
|
||||
sameTypeParameters(a.parameters, b.parameters) &&
|
||||
sameType(a.baseType, b.baseType)
|
||||
def sameAnnotatedType(a: Annotated, b: Annotated): Boolean =
|
||||
sameType(a.baseType, b.baseType) &&
|
||||
sameAnnotations(a.annotations, b.annotations)
|
||||
def sameStructure(a: Structure, b: Structure): Boolean =
|
||||
samePending(a, b)(sameStructureDirect)
|
||||
|
||||
private[this] def samePending[T](a: T, b: T)(f: (T, T) => Boolean): Boolean =
|
||||
if (pending add ((a, b))) f(a, b) else true
|
||||
|
||||
def sameStructureDirect(a: Structure, b: Structure): Boolean =
|
||||
{
|
||||
sameSeq(a.parents, b.parents)(sameType) &&
|
||||
sameMembers(a.declared, b.declared) &&
|
||||
sameMembers(a.inherited, b.inherited)
|
||||
}
|
||||
|
||||
def sameMembers(a: Seq[Definition], b: Seq[Definition]): Boolean =
|
||||
sameDefinitions(a, b, false)
|
||||
|
||||
def sameSimpleType(a: SimpleType, b: SimpleType): Boolean =
|
||||
samePending(a, b)(sameSimpleTypeDirect)
|
||||
def sameSimpleTypeDirect(a: SimpleType, b: SimpleType): Boolean =
|
||||
(a, b) match {
|
||||
case (pa: Projection, pb: Projection) => debug(sameProjection(pa, pb), "Different projection")
|
||||
case (pa: ParameterRef, pb: ParameterRef) => debug(sameParameterRef(pa, pb), "Different parameter ref")
|
||||
case (sa: Singleton, sb: Singleton) => debug(sameSingleton(sa, sb), "Different singleton")
|
||||
case (_: EmptyType, _: EmptyType) => true
|
||||
case (pa: Parameterized, pb: Parameterized) => debug(sameParameterized(pa, pb), "Different parameterized")
|
||||
case _ => differentCategory("simple type", a, b)
|
||||
}
|
||||
def differentCategory(label: String, a: AnyRef, b: AnyRef): Boolean =
|
||||
debug(false, "Different category of " + label + " (" + a.getClass.getName + " and " + b.getClass.getName + ") for (" + a + " and " + b + ")")
|
||||
|
||||
def sameParameterized(a: Parameterized, b: Parameterized): Boolean =
|
||||
sameSimpleType(a.baseType, b.baseType) &&
|
||||
sameSeq(a.typeArguments, b.typeArguments)(sameType)
|
||||
def sameParameterRef(a: ParameterRef, b: ParameterRef): Boolean = sameTags(a.id, b.id)
|
||||
def sameSingleton(a: Singleton, b: Singleton): Boolean =
|
||||
samePath(a.path, b.path)
|
||||
def sameProjection(a: Projection, b: Projection): Boolean =
|
||||
sameSimpleType(a.prefix, b.prefix) &&
|
||||
(a.id == b.id)
|
||||
|
||||
def samePath(a: Path, b: Path): Boolean =
|
||||
samePathComponents(a.components, b.components)
|
||||
def samePathComponents(a: Seq[PathComponent], b: Seq[PathComponent]): Boolean =
|
||||
sameSeq(a, b)(samePathComponent)
|
||||
def samePathComponent(a: PathComponent, b: PathComponent): Boolean =
|
||||
(a, b) match {
|
||||
case (_: This, _: This) => true
|
||||
case (sa: Super, sb: Super) => samePathSuper(sa, sb)
|
||||
case (ia: Id, ib: Id) => samePathId(ia, ib)
|
||||
case _ => false
|
||||
}
|
||||
def samePathSuper(a: Super, b: Super): Boolean =
|
||||
samePath(a.qualifier, b.qualifier)
|
||||
def samePathId(a: Id, b: Id): Boolean =
|
||||
a.id == b.id
|
||||
|
||||
// precondition: a.keySet == b.keySet
|
||||
protected def zippedEntries[A, B](a: scala.collection.Map[A, B], b: scala.collection.Map[A, B]): Iterable[(B, B)] =
|
||||
for ((key, avalue) <- a) yield (avalue, b(key))
|
||||
|
||||
def sameStrings(a: scala.collection.Set[String], b: scala.collection.Set[String]): Boolean =
|
||||
a == b
|
||||
final def sameSeq[T](a: Seq[T], b: Seq[T])(eq: (T, T) => Boolean): Boolean =
|
||||
(a.length == b.length) && (a zip b).forall(tupled(eq))
|
||||
}
|
||||
|
|
@ -1,288 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package xsbt.api
|
||||
|
||||
import xsbti.api._
|
||||
|
||||
trait Show[A] {
|
||||
def show(a: A): String
|
||||
}
|
||||
|
||||
final class ShowLazy[A](delegate: => Show[A]) extends Show[A] {
|
||||
private lazy val s = delegate
|
||||
def show(a: A) = s.show(a)
|
||||
}
|
||||
|
||||
import ShowAPI._
|
||||
|
||||
object ShowAPI {
|
||||
def Show[T](implicit s: Show[T]): Show[T] = s
|
||||
def show[T](t: T)(implicit s: Show[T]): String = s.show(t)
|
||||
|
||||
def bounds(lower: Type, upper: Type)(implicit t: Show[Type]): String =
|
||||
">: " + t.show(lower) + " <: " + t.show(upper)
|
||||
|
||||
import ParameterModifier._
|
||||
def parameterModifier(base: String, pm: ParameterModifier): String =
|
||||
pm match {
|
||||
case Plain => base
|
||||
case Repeated => base + "*"
|
||||
case ByName => "=> " + base
|
||||
}
|
||||
|
||||
def concat[A](list: Seq[A], as: Show[A], sep: String): String = mapSeq(list, as).mkString(sep)
|
||||
def commas[A](list: Seq[A], as: Show[A]): String = concat(list, as, ", ")
|
||||
def spaced[A](list: Seq[A], as: Show[A]): String = concat(list, as, " ")
|
||||
def lines[A](list: Seq[A], as: Show[A]): String = mapSeq(list, as).mkString("\n")
|
||||
def mapSeq[A](list: Seq[A], as: Show[A]): Seq[String] = list.map(as.show)
|
||||
}
|
||||
|
||||
trait ShowBase {
|
||||
implicit def showAnnotation(implicit as: Show[AnnotationArgument], t: Show[Type]): Show[Annotation] =
|
||||
new Show[Annotation] { def show(a: Annotation) = "@" + t.show(a.base) + (if (a.arguments.isEmpty) "" else "(" + commas(a.arguments, as) + ")") }
|
||||
|
||||
implicit def showAnnotationArgument: Show[AnnotationArgument] =
|
||||
new Show[AnnotationArgument] { def show(a: AnnotationArgument) = a.name + " = " + a.value }
|
||||
|
||||
import Variance._
|
||||
implicit def showVariance: Show[Variance] =
|
||||
new Show[Variance] { def show(v: Variance) = v match { case Invariant => ""; case Covariant => "+"; case Contravariant => "-" } }
|
||||
|
||||
implicit def showSource(implicit ps: Show[Package], ds: Show[Definition]): Show[SourceAPI] =
|
||||
new Show[SourceAPI] { def show(a: SourceAPI) = lines(a.packages, ps) + "\n" + lines(a.definitions, ds) }
|
||||
|
||||
implicit def showPackage: Show[Package] =
|
||||
new Show[Package] { def show(pkg: Package) = "package " + pkg.name }
|
||||
|
||||
implicit def showAccess(implicit sq: Show[Qualified]): Show[Access] =
|
||||
new Show[Access] {
|
||||
def show(a: Access) =
|
||||
a match {
|
||||
case p: Public => ""
|
||||
case q: Qualified => sq.show(q)
|
||||
}
|
||||
}
|
||||
implicit def showQualified(implicit sq: Show[Qualifier]): Show[Qualified] =
|
||||
new Show[Qualified] {
|
||||
def show(q: Qualified) =
|
||||
((q match {
|
||||
case p: Protected => "protected"
|
||||
case p: Private => "private"
|
||||
})
|
||||
+ sq.show(q.qualifier))
|
||||
}
|
||||
implicit def showQualifier: Show[Qualifier] =
|
||||
new Show[Qualifier] {
|
||||
def show(q: Qualifier) =
|
||||
q match {
|
||||
case _: Unqualified => ""
|
||||
case _: ThisQualifier => "[this]"
|
||||
case i: IdQualifier => "[" + i.value + "]"
|
||||
}
|
||||
}
|
||||
implicit def showModifiers: Show[Modifiers] =
|
||||
new Show[Modifiers] {
|
||||
def show(m: Modifiers) =
|
||||
{
|
||||
val mods =
|
||||
(m.isOverride, "override") ::
|
||||
(m.isFinal, "final") ::
|
||||
(m.isSealed, "sealed") ::
|
||||
(m.isImplicit, "implicit") ::
|
||||
(m.isAbstract, "abstract") ::
|
||||
(m.isLazy, "lazy") ::
|
||||
Nil
|
||||
mods.filter(_._1).map(_._2).mkString(" ")
|
||||
}
|
||||
}
|
||||
|
||||
implicit def showDefinitionType: Show[DefinitionType] =
|
||||
new Show[DefinitionType] {
|
||||
import DefinitionType._
|
||||
def show(dt: DefinitionType) =
|
||||
dt match {
|
||||
case Trait => "trait"
|
||||
case ClassDef => "class"
|
||||
case Module => "object"
|
||||
case PackageModule => "package object"
|
||||
}
|
||||
}
|
||||
}
|
||||
trait ShowDefinitions {
|
||||
implicit def showVal(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], t: Show[Type]): Show[Val] =
|
||||
new Show[Val] { def show(v: Val) = definitionBase(v, "val")(acs, ms, ans) + ": " + t.show(v.tpe) }
|
||||
|
||||
implicit def showVar(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], t: Show[Type]): Show[Var] =
|
||||
new Show[Var] { def show(v: Var) = definitionBase(v, "var")(acs, ms, ans) + ": " + t.show(v.tpe) }
|
||||
|
||||
implicit def showDef(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], vp: Show[Seq[ParameterList]], t: Show[Type]): Show[Def] =
|
||||
new Show[Def] { def show(d: Def) = parameterizedDef(d, "def")(acs, ms, ans, tp) + vp.show(d.valueParameters) + ": " + t.show(d.returnType) }
|
||||
|
||||
implicit def showClassLike(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], dt: Show[DefinitionType], s: Show[Structure], t: Show[Type]): Show[ClassLike] =
|
||||
new Show[ClassLike] { def show(cl: ClassLike) = parameterizedDef(cl, dt.show(cl.definitionType))(acs, ms, ans, tp) + " requires " + t.show(cl.selfType) + " extends " + s.show(cl.structure) }
|
||||
|
||||
implicit def showTypeAlias(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], t: Show[Type]): Show[TypeAlias] =
|
||||
new Show[TypeAlias] { def show(ta: TypeAlias) = parameterizedDef(ta, "type")(acs, ms, ans, tp) + " = " + t.show(ta.tpe) }
|
||||
|
||||
implicit def showTypeDeclaration(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], t: Show[Type]): Show[TypeDeclaration] =
|
||||
new Show[TypeDeclaration] { def show(td: TypeDeclaration) = parameterizedDef(td, "type")(acs, ms, ans, tp) + bounds(td.lowerBound, td.upperBound) }
|
||||
def showClassLikeSimple(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], dt: Show[DefinitionType]): Show[ClassLike] =
|
||||
new Show[ClassLike] { def show(cl: ClassLike) = parameterizedDef(cl, dt.show(cl.definitionType))(acs, ms, ans, tp) }
|
||||
|
||||
def parameterizedDef(d: ParameterizedDefinition, label: String)(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]]): String =
|
||||
definitionBase(d, label)(acs, ms, ans) + tp.show(d.typeParameters)
|
||||
def definitionBase(d: Definition, label: String)(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation]): String =
|
||||
space(spaced(d.annotations, ans)) + space(acs.show(d.access)) + space(ms.show(d.modifiers)) + space(label) + d.name
|
||||
def space(s: String) = if (s.isEmpty) s else s + " "
|
||||
}
|
||||
trait ShowDefinition {
|
||||
implicit def showDefinition(implicit vl: Show[Val], vr: Show[Var], ds: Show[Def], cl: Show[ClassLike], ta: Show[TypeAlias], td: Show[TypeDeclaration]): Show[Definition] =
|
||||
new Show[Definition] {
|
||||
def show(d: Definition) =
|
||||
d match {
|
||||
case v: Val => vl.show(v)
|
||||
case v: Var => vr.show(v)
|
||||
case d: Def => ds.show(d)
|
||||
case c: ClassLike => cl.show(c)
|
||||
case t: TypeAlias => ta.show(t)
|
||||
case t: TypeDeclaration => td.show(t)
|
||||
}
|
||||
}
|
||||
}
|
||||
trait ShowType {
|
||||
implicit def showType(implicit s: Show[SimpleType], a: Show[Annotated], st: Show[Structure], c: Show[Constant], e: Show[Existential], po: Show[Polymorphic]): Show[Type] =
|
||||
new Show[Type] {
|
||||
def show(t: Type) =
|
||||
t match {
|
||||
case q: SimpleType => s.show(q)
|
||||
case q: Constant => c.show(q)
|
||||
case q: Annotated => a.show(q)
|
||||
case q: Structure => st.show(q)
|
||||
case q: Existential => e.show(q)
|
||||
case q: Polymorphic => po.show(q)
|
||||
}
|
||||
}
|
||||
|
||||
implicit def showSimpleType(implicit pr: Show[Projection], pa: Show[ParameterRef], si: Show[Singleton], et: Show[EmptyType], p: Show[Parameterized]): Show[SimpleType] =
|
||||
new Show[SimpleType] {
|
||||
def show(t: SimpleType) =
|
||||
t match {
|
||||
case q: Projection => pr.show(q)
|
||||
case q: ParameterRef => pa.show(q)
|
||||
case q: Singleton => si.show(q)
|
||||
case q: EmptyType => et.show(q)
|
||||
case q: Parameterized => p.show(q)
|
||||
}
|
||||
}
|
||||
}
|
||||
trait ShowBasicTypes {
|
||||
implicit def showSingleton(implicit p: Show[Path]): Show[Singleton] =
|
||||
new Show[Singleton] { def show(s: Singleton) = p.show(s.path) }
|
||||
implicit def showEmptyType: Show[EmptyType] =
|
||||
new Show[EmptyType] { def show(e: EmptyType) = "<empty>" }
|
||||
implicit def showParameterRef: Show[ParameterRef] =
|
||||
new Show[ParameterRef] { def show(p: ParameterRef) = "<" + p.id + ">" }
|
||||
}
|
||||
trait ShowTypes {
|
||||
implicit def showStructure(implicit t: Show[Type], d: Show[Definition]): Show[Structure] =
|
||||
new Show[Structure] {
|
||||
def show(s: Structure) = {
|
||||
// don't show inherited class like definitions to avoid dealing with cycles
|
||||
val safeInherited = s.inherited.filterNot(_.isInstanceOf[ClassLike])
|
||||
val showInherited: Show[Definition] = new Show[Definition] {
|
||||
def show(deff: Definition): String = "^inherited^ " + d.show(deff)
|
||||
}
|
||||
concat(s.parents, t, " with ") + "\n{\n" + lines(safeInherited, showInherited) + "\n" + lines(s.declared, d) + "\n}"
|
||||
}
|
||||
}
|
||||
implicit def showAnnotated(implicit as: Show[Annotation], t: Show[Type]): Show[Annotated] =
|
||||
new Show[Annotated] { def show(a: Annotated) = spaced(a.annotations, as) + " " + t.show(a.baseType) }
|
||||
implicit def showProjection(implicit t: Show[SimpleType]): Show[Projection] =
|
||||
new Show[Projection] { def show(p: Projection) = t.show(p.prefix) + "#" + p.id }
|
||||
implicit def showParameterized(implicit t: Show[Type]): Show[Parameterized] =
|
||||
new Show[Parameterized] { def show(p: Parameterized) = t.show(p.baseType) + mapSeq(p.typeArguments, t).mkString("[", ", ", "]") }
|
||||
implicit def showConstant(implicit t: Show[Type]): Show[Constant] =
|
||||
new Show[Constant] { def show(c: Constant) = t.show(c.baseType) + "(" + c.value + ")" }
|
||||
implicit def showExistential(implicit t: Show[Type], tp: Show[TypeParameter]): Show[Existential] =
|
||||
new Show[Existential] {
|
||||
def show(e: Existential) =
|
||||
t.show(e.baseType) + e.clause.map(t => "type " + tp.show(t)).mkString(" forSome { ", "; ", "}")
|
||||
}
|
||||
implicit def showPolymorphic(implicit t: Show[Type], tps: Show[Seq[TypeParameter]]): Show[Polymorphic] =
|
||||
new Show[Polymorphic] { def show(p: Polymorphic) = t.show(p.baseType) + tps.show(p.parameters) }
|
||||
|
||||
}
|
||||
|
||||
trait ShowPath {
|
||||
implicit def showPath(implicit pc: Show[PathComponent]): Show[Path] =
|
||||
new Show[Path] { def show(p: Path) = mapSeq(p.components, pc).mkString(".") }
|
||||
|
||||
implicit def showPathComponent(implicit sp: Show[Path]): Show[PathComponent] =
|
||||
new Show[PathComponent] {
|
||||
def show(p: PathComponent) =
|
||||
p match {
|
||||
case s: Super => "super[" + sp.show(s.qualifier) + "]"
|
||||
case _: This => "this"
|
||||
case i: Id => i.id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait ShowValueParameters {
|
||||
implicit def showParameterLists(implicit pl: Show[ParameterList]): Show[Seq[ParameterList]] =
|
||||
new Show[Seq[ParameterList]] { def show(p: Seq[ParameterList]) = concat(p, pl, "") }
|
||||
implicit def showParameterList(implicit mp: Show[MethodParameter]): Show[ParameterList] =
|
||||
new Show[ParameterList] { def show(pl: ParameterList) = "(" + (if (pl.isImplicit) "implicit " else "") + commas(pl.parameters, mp) + ")" }
|
||||
|
||||
implicit def showMethodParameter(implicit t: Show[Type]): Show[MethodParameter] =
|
||||
new Show[MethodParameter] {
|
||||
def show(mp: MethodParameter) =
|
||||
mp.name + ": " + parameterModifier(t.show(mp.tpe), mp.modifier) + (if (mp.hasDefault) "= ..." else "")
|
||||
}
|
||||
}
|
||||
trait ShowTypeParameters {
|
||||
implicit def showTypeParameters(implicit as: Show[TypeParameter]): Show[Seq[TypeParameter]] =
|
||||
new Show[Seq[TypeParameter]] { def show(tps: Seq[TypeParameter]) = if (tps.isEmpty) "" else mapSeq(tps, as).mkString("[", ",", "]") }
|
||||
implicit def showTypeParameter(implicit as: Show[Annotation], tp: Show[Seq[TypeParameter]], t: Show[Type], v: Show[Variance]): Show[TypeParameter] =
|
||||
new Show[TypeParameter] {
|
||||
def show(tps: TypeParameter) =
|
||||
spaced(tps.annotations, as) + " " + v.show(tps.variance) + tps.id + tp.show(tps.typeParameters) + " " + bounds(tps.lowerBound, tps.upperBound)
|
||||
}
|
||||
}
|
||||
|
||||
// this class is a hack to resolve some diverging implicit errors.
|
||||
// I'm pretty sure the cause is the Show[Seq[T]] dominating Show[X] issue.
|
||||
// It could probably be reduced a bit if that is the case (below was trial and error)
|
||||
object DefaultShowAPI extends ShowBase with ShowBasicTypes with ShowValueParameters {
|
||||
def apply(d: Definition) = ShowAPI.show(d)
|
||||
def apply(d: Type) = ShowAPI.show(d)
|
||||
|
||||
implicit lazy val showVal: Show[Val] = Cyclic.showVal
|
||||
implicit lazy val showVar: Show[Var] = Cyclic.showVar
|
||||
implicit lazy val showClassLike: Show[ClassLike] = Cyclic.showClassLike
|
||||
implicit lazy val showTypeDeclaration: Show[TypeDeclaration] = Cyclic.showTypeDeclaration
|
||||
implicit lazy val showTypeAlias: Show[TypeAlias] = Cyclic.showTypeAlias
|
||||
implicit lazy val showDef: Show[Def] = Cyclic.showDef
|
||||
|
||||
implicit lazy val showProj: Show[Projection] = Cyclic.showProjection
|
||||
implicit lazy val showPoly: Show[Polymorphic] = Cyclic.showPolymorphic
|
||||
|
||||
implicit lazy val showSimple: Show[SimpleType] = new ShowLazy(Cyclic.showSimpleType)
|
||||
implicit lazy val showAnnotated: Show[Annotated] = Cyclic.showAnnotated
|
||||
implicit lazy val showExistential: Show[Existential] = Cyclic.showExistential
|
||||
implicit lazy val showConstant: Show[Constant] = Cyclic.showConstant
|
||||
implicit lazy val showParameterized: Show[Parameterized] = Cyclic.showParameterized
|
||||
|
||||
implicit lazy val showTypeParameters: Show[Seq[TypeParameter]] = new ShowLazy(Cyclic.showTypeParameters)
|
||||
implicit lazy val showTypeParameter: Show[TypeParameter] = Cyclic.showTypeParameter
|
||||
|
||||
implicit lazy val showDefinition: Show[Definition] = new ShowLazy(Cyclic.showDefinition)
|
||||
implicit lazy val showType: Show[Type] = new ShowLazy(Cyclic.showType)
|
||||
implicit lazy val showStructure: Show[Structure] = new ShowLazy(Cyclic.showStructure)
|
||||
|
||||
implicit lazy val showPath: Show[Path] = new ShowLazy(Cyclic.showPath)
|
||||
implicit lazy val showPathComponent: Show[PathComponent] = Cyclic.showPathComponent
|
||||
|
||||
private object Cyclic extends ShowTypes with ShowType with ShowPath with ShowDefinition with ShowDefinitions with ShowTypeParameters
|
||||
}
|
||||
|
|
@ -1,184 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package xsbt.api
|
||||
|
||||
import xsbti.api._
|
||||
import scala.collection.mutable
|
||||
|
||||
class Visit {
|
||||
private[this] val visitedStructures = new mutable.HashSet[Structure]
|
||||
private[this] val visitedClassLike = new mutable.HashSet[ClassLike]
|
||||
|
||||
def visit(s: Source): Unit = visitAPI(s.api)
|
||||
def visitAPI(s: SourceAPI): Unit =
|
||||
{
|
||||
s.packages foreach visitPackage
|
||||
s.definitions foreach visitDefinition
|
||||
}
|
||||
|
||||
def visitPackage(p: Package): Unit = {
|
||||
visitString(p.name)
|
||||
}
|
||||
|
||||
def visitDefinitions(ds: Seq[Definition]) = ds foreach visitDefinition
|
||||
def visitDefinition(d: Definition): Unit = {
|
||||
visitString(d.name)
|
||||
visitAnnotations(d.annotations)
|
||||
visitModifiers(d.modifiers)
|
||||
visitAccess(d.access)
|
||||
d match {
|
||||
case c: ClassLike => visitClass(c)
|
||||
case f: FieldLike => visitField(f)
|
||||
case d: Def => visitDef(d)
|
||||
case t: TypeDeclaration => visitTypeDeclaration(t)
|
||||
case t: TypeAlias => visitTypeAlias(t)
|
||||
}
|
||||
}
|
||||
final def visitClass(c: ClassLike): Unit = if (visitedClassLike add c) visitClass0(c)
|
||||
def visitClass0(c: ClassLike): Unit = {
|
||||
visitParameterizedDefinition(c)
|
||||
visitType(c.selfType)
|
||||
visitStructure(c.structure)
|
||||
}
|
||||
def visitField(f: FieldLike): Unit = {
|
||||
visitType(f.tpe)
|
||||
f match {
|
||||
case v: Var => visitVar(v)
|
||||
case v: Val => visitVal(v)
|
||||
}
|
||||
}
|
||||
def visitVar(v: Var): Unit = ()
|
||||
def visitVal(v: Val): Unit = ()
|
||||
def visitDef(d: Def): Unit = {
|
||||
visitParameterizedDefinition(d)
|
||||
visitValueParameters(d.valueParameters)
|
||||
visitType(d.returnType)
|
||||
}
|
||||
def visitAccess(a: Access): Unit =
|
||||
a match {
|
||||
case pub: Public => visitPublic(pub)
|
||||
case qual: Qualified => visitQualified(qual)
|
||||
}
|
||||
def visitQualified(qual: Qualified): Unit =
|
||||
qual match {
|
||||
case p: Protected => visitProtected(p)
|
||||
case p: Private => visitPrivate(p)
|
||||
}
|
||||
def visitQualifier(qual: Qualifier): Unit =
|
||||
qual match {
|
||||
case unq: Unqualified => visitUnqualified(unq)
|
||||
case thisq: ThisQualifier => visitThisQualifier(thisq)
|
||||
case id: IdQualifier => visitIdQualifier(id)
|
||||
}
|
||||
def visitIdQualifier(id: IdQualifier): Unit = {
|
||||
visitString(id.value)
|
||||
}
|
||||
def visitUnqualified(unq: Unqualified): Unit = ()
|
||||
def visitThisQualifier(thisq: ThisQualifier): Unit = ()
|
||||
def visitPublic(pub: Public): Unit = ()
|
||||
def visitPrivate(p: Private): Unit = visitQualifier(p.qualifier)
|
||||
def visitProtected(p: Protected): Unit = visitQualifier(p.qualifier)
|
||||
def visitModifiers(m: Modifiers): Unit = ()
|
||||
|
||||
def visitValueParameters(valueParameters: Seq[ParameterList]) = valueParameters foreach visitValueParameterList
|
||||
def visitValueParameterList(list: ParameterList) = list.parameters foreach visitValueParameter
|
||||
def visitValueParameter(parameter: MethodParameter) =
|
||||
{
|
||||
visitString(parameter.name)
|
||||
visitType(parameter.tpe)
|
||||
}
|
||||
|
||||
def visitParameterizedDefinition[T <: ParameterizedDefinition](d: T): Unit = visitTypeParameters(d.typeParameters)
|
||||
|
||||
def visitTypeDeclaration(d: TypeDeclaration): Unit = {
|
||||
visitParameterizedDefinition(d)
|
||||
visitType(d.lowerBound)
|
||||
visitType(d.upperBound)
|
||||
}
|
||||
def visitTypeAlias(d: TypeAlias): Unit = {
|
||||
visitParameterizedDefinition(d)
|
||||
visitType(d.tpe)
|
||||
}
|
||||
|
||||
def visitTypeParameters(parameters: Seq[TypeParameter]) = parameters foreach visitTypeParameter
|
||||
def visitTypeParameter(parameter: TypeParameter): Unit = {
|
||||
visitTypeParameters(parameter.typeParameters)
|
||||
visitType(parameter.lowerBound)
|
||||
visitType(parameter.upperBound)
|
||||
visitAnnotations(parameter.annotations)
|
||||
}
|
||||
def visitAnnotations(annotations: Seq[Annotation]) = annotations foreach visitAnnotation
|
||||
def visitAnnotation(annotation: Annotation) =
|
||||
{
|
||||
visitType(annotation.base)
|
||||
visitAnnotationArguments(annotation.arguments)
|
||||
}
|
||||
def visitAnnotationArguments(args: Seq[AnnotationArgument]) = args foreach visitAnnotationArgument
|
||||
def visitAnnotationArgument(arg: AnnotationArgument): Unit = {
|
||||
visitString(arg.name)
|
||||
visitString(arg.value)
|
||||
}
|
||||
|
||||
def visitTypes(ts: Seq[Type]) = ts.foreach(visitType)
|
||||
def visitType(t: Type): Unit = {
|
||||
t match {
|
||||
case s: Structure => visitStructure(s)
|
||||
case e: Existential => visitExistential(e)
|
||||
case c: Constant => visitConstant(c)
|
||||
case p: Polymorphic => visitPolymorphic(p)
|
||||
case a: Annotated => visitAnnotated(a)
|
||||
case p: Parameterized => visitParameterized(p)
|
||||
case p: Projection => visitProjection(p)
|
||||
case _: EmptyType => visitEmptyType()
|
||||
case s: Singleton => visitSingleton(s)
|
||||
case pr: ParameterRef => visitParameterRef(pr)
|
||||
}
|
||||
}
|
||||
|
||||
def visitEmptyType(): Unit = ()
|
||||
def visitParameterRef(p: ParameterRef): Unit = ()
|
||||
def visitSingleton(s: Singleton): Unit = visitPath(s.path)
|
||||
def visitPath(path: Path) = path.components foreach visitPathComponent
|
||||
def visitPathComponent(pc: PathComponent) = pc match {
|
||||
case t: This => visitThisPath(t)
|
||||
case s: Super => visitSuperPath(s)
|
||||
case id: Id => visitIdPath(id)
|
||||
}
|
||||
def visitThisPath(t: This): Unit = ()
|
||||
def visitSuperPath(s: Super): Unit = visitPath(s.qualifier)
|
||||
def visitIdPath(id: Id): Unit = visitString(id.id)
|
||||
|
||||
def visitConstant(c: Constant) =
|
||||
{
|
||||
visitString(c.value)
|
||||
visitType(c.baseType)
|
||||
}
|
||||
def visitExistential(e: Existential) = visitParameters(e.clause, e.baseType)
|
||||
def visitPolymorphic(p: Polymorphic) = visitParameters(p.parameters, p.baseType)
|
||||
def visitProjection(p: Projection) =
|
||||
{
|
||||
visitString(p.id)
|
||||
visitType(p.prefix)
|
||||
}
|
||||
def visitParameterized(p: Parameterized): Unit = {
|
||||
visitType(p.baseType)
|
||||
visitTypes(p.typeArguments)
|
||||
}
|
||||
def visitAnnotated(a: Annotated): Unit = {
|
||||
visitType(a.baseType)
|
||||
visitAnnotations(a.annotations)
|
||||
}
|
||||
final def visitStructure(structure: Structure) = if (visitedStructures add structure) visitStructure0(structure)
|
||||
def visitStructure0(structure: Structure): Unit = {
|
||||
visitTypes(structure.parents)
|
||||
visitDefinitions(structure.declared)
|
||||
visitDefinitions(structure.inherited)
|
||||
}
|
||||
def visitParameters(parameters: Seq[TypeParameter], base: Type): Unit =
|
||||
{
|
||||
visitTypeParameters(parameters)
|
||||
visitType(base)
|
||||
}
|
||||
def visitString(s: String): Unit = ()
|
||||
}
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
// needs to be in xsbti package (or a subpackage) to pass through the filter in DualLoader
|
||||
// and be accessible to the compiler-side interface
|
||||
package xsbti
|
||||
|
||||
object SafeLazy {
|
||||
def apply[T <: AnyRef](eval: xsbti.F0[T]): xsbti.api.Lazy[T] =
|
||||
apply(eval())
|
||||
def apply[T <: AnyRef](eval: => T): xsbti.api.Lazy[T] =
|
||||
fromFunction0(eval _)
|
||||
def fromFunction0[T <: AnyRef](eval: () => T): xsbti.api.Lazy[T] =
|
||||
new Impl(eval)
|
||||
|
||||
def strict[T <: AnyRef](value: T): xsbti.api.Lazy[T] = apply(value)
|
||||
|
||||
private[this] final class Impl[T <: AnyRef](private[this] var eval: () => T) extends xsbti.api.AbstractLazy[T] {
|
||||
private[this] lazy val _t =
|
||||
{
|
||||
val t = eval()
|
||||
eval = null // clear the reference, ensuring the only memory we hold onto is the result
|
||||
t
|
||||
}
|
||||
def get: T = _t
|
||||
}
|
||||
}
|
||||
|
|
@ -1,327 +0,0 @@
|
|||
package xsbt.api
|
||||
|
||||
import org.junit.runner.RunWith
|
||||
import xsbti.api._
|
||||
import org.specs2.mutable.Specification
|
||||
import org.specs2.runner.JUnitRunner
|
||||
|
||||
@RunWith(classOf[JUnitRunner])
|
||||
class NameHashingSpecification extends Specification {
|
||||
|
||||
/**
|
||||
* Very basic test which checks whether a name hash is insensitive to
|
||||
* definition order (across the whole compilation unit).
|
||||
*/
|
||||
"new member" in {
|
||||
val nameHashing = new NameHashing
|
||||
val def1 = new Def(Array.empty, strTpe, Array.empty, "foo", publicAccess, defaultModifiers, Array.empty)
|
||||
val def2 = new Def(Array.empty, intTpe, Array.empty, "bar", publicAccess, defaultModifiers, Array.empty)
|
||||
val classBar1 = simpleClass("Bar", def1)
|
||||
val classBar2 = simpleClass("Bar", def1, def2)
|
||||
val api1 = new SourceAPI(Array.empty, Array(classBar1))
|
||||
val api2 = new SourceAPI(Array.empty, Array(classBar2))
|
||||
val nameHashes1 = nameHashing.nameHashes(api1)
|
||||
val nameHashes2 = nameHashing.nameHashes(api2)
|
||||
assertNameHashEqualForRegularName("Bar", nameHashes1, nameHashes2)
|
||||
assertNameHashEqualForRegularName("foo", nameHashes1, nameHashes2)
|
||||
nameHashes1.regularMembers.map(_.name).toSeq must not contain ("bar")
|
||||
nameHashes2.regularMembers.map(_.name).toSeq must contain("bar")
|
||||
}
|
||||
|
||||
/**
|
||||
* Very basic test which checks whether a name hash is insensitive to
|
||||
* definition order (across the whole compilation unit).
|
||||
*/
|
||||
"definition order" in {
|
||||
val nameHashing = new NameHashing
|
||||
val def1 = new Def(Array.empty, intTpe, Array.empty, "bar", publicAccess, defaultModifiers, Array.empty)
|
||||
val def2 = new Def(Array.empty, strTpe, Array.empty, "bar", publicAccess, defaultModifiers, Array.empty)
|
||||
val nestedBar1 = simpleClass("Bar1", def1)
|
||||
val nestedBar2 = simpleClass("Bar2", def2)
|
||||
val classA = simpleClass("Foo", nestedBar1, nestedBar2)
|
||||
val classB = simpleClass("Foo", nestedBar2, nestedBar1)
|
||||
val api1 = new SourceAPI(Array.empty, Array(classA))
|
||||
val api2 = new SourceAPI(Array.empty, Array(classB))
|
||||
val nameHashes1 = nameHashing.nameHashes(api1)
|
||||
val nameHashes2 = nameHashing.nameHashes(api2)
|
||||
val def1Hash = HashAPI(def1)
|
||||
val def2Hash = HashAPI(def2)
|
||||
def1Hash !=== def2Hash
|
||||
nameHashes1 === nameHashes2
|
||||
}
|
||||
|
||||
/**
|
||||
* Very basic test which asserts that a name hash is sensitive to definition location.
|
||||
*
|
||||
* For example, if we have:
|
||||
* // Foo1.scala
|
||||
* class Foo { def xyz: Int = ... }
|
||||
* object Foo
|
||||
*
|
||||
* and:
|
||||
* // Foo2.scala
|
||||
* class Foo
|
||||
* object Foo { def xyz: Int = ... }
|
||||
*
|
||||
* then hash for `xyz` name should differ in those two cases
|
||||
* because method `xyz` was moved from class to an object.
|
||||
*/
|
||||
"definition location" in {
|
||||
val nameHashing = new NameHashing
|
||||
val deff = new Def(Array.empty, intTpe, Array.empty, "bar", publicAccess, defaultModifiers, Array.empty)
|
||||
val classA = {
|
||||
val nestedBar1 = simpleClass("Bar1", deff)
|
||||
val nestedBar2 = simpleClass("Bar2")
|
||||
simpleClass("Foo", nestedBar1, nestedBar2)
|
||||
}
|
||||
val classB = {
|
||||
val nestedBar1 = simpleClass("Bar1")
|
||||
val nestedBar2 = simpleClass("Bar2", deff)
|
||||
simpleClass("Foo", nestedBar1, nestedBar2)
|
||||
}
|
||||
val api1 = new SourceAPI(Array.empty, Array(classA))
|
||||
val api2 = new SourceAPI(Array.empty, Array(classB))
|
||||
val nameHashes1 = nameHashing.nameHashes(api1)
|
||||
val nameHashes2 = nameHashing.nameHashes(api2)
|
||||
nameHashes1 !=== nameHashes2
|
||||
}
|
||||
|
||||
/**
|
||||
* Test if members introduced in parent class affect hash of a name
|
||||
* of a child class.
|
||||
*
|
||||
* For example, if we have:
|
||||
* // Test1.scala
|
||||
* class Parent
|
||||
* class Child extends Parent
|
||||
*
|
||||
* and:
|
||||
* // Test2.scala
|
||||
* class Parent { def bar: Int = ... }
|
||||
* class Child extends Parent
|
||||
*
|
||||
* then hash for `Child` name should be the same in both
|
||||
* cases.
|
||||
*/
|
||||
"definition in parent class" in {
|
||||
val parentA = simpleClass("Parent")
|
||||
val barMethod = new Def(Array.empty, intTpe, Array.empty, "bar", publicAccess, defaultModifiers, Array.empty)
|
||||
val parentB = simpleClass("Parent", barMethod)
|
||||
val childA = {
|
||||
val structure = new Structure(lzy(Array[Type](parentA.structure)), lzy(Array.empty[Definition]), lzy(Array.empty[Definition]))
|
||||
simpleClass("Child", structure)
|
||||
}
|
||||
val childB = {
|
||||
val structure = new Structure(lzy(Array[Type](parentB.structure)), lzy(Array.empty[Definition]), lzy(Array[Definition](barMethod)))
|
||||
simpleClass("Child", structure)
|
||||
}
|
||||
val parentANameHashes = nameHashesForClass(parentA)
|
||||
val parentBNameHashes = nameHashesForClass(parentB)
|
||||
Seq("Parent") === parentANameHashes.regularMembers.map(_.name).toSeq
|
||||
Seq("Parent", "bar") === parentBNameHashes.regularMembers.map(_.name).toSeq
|
||||
parentANameHashes !=== parentBNameHashes
|
||||
val childANameHashes = nameHashesForClass(childA)
|
||||
val childBNameHashes = nameHashesForClass(childB)
|
||||
assertNameHashEqualForRegularName("Child", childANameHashes, childBNameHashes)
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if changes to structural types that appear in method signature
|
||||
* affect name hash of the method. For example, if we have:
|
||||
*
|
||||
* // Test1.scala
|
||||
* class A {
|
||||
* def foo: { bar: Int }
|
||||
* }
|
||||
*
|
||||
* // Test2.scala
|
||||
* class A {
|
||||
* def foo: { bar: String }
|
||||
* }
|
||||
*
|
||||
* then name hash for "foo" should be different in those two cases.
|
||||
*/
|
||||
"structural type in definition" in {
|
||||
/** def foo: { bar: Int } */
|
||||
val fooMethod1 = {
|
||||
val barMethod1 = new Def(Array.empty, intTpe, Array.empty, "bar", publicAccess, defaultModifiers, Array.empty)
|
||||
new Def(Array.empty, simpleStructure(barMethod1), Array.empty, "foo", publicAccess, defaultModifiers, Array.empty)
|
||||
}
|
||||
/** def foo: { bar: String } */
|
||||
val fooMethod2 = {
|
||||
val barMethod2 = new Def(Array.empty, strTpe, Array.empty, "bar", publicAccess, defaultModifiers, Array.empty)
|
||||
new Def(Array.empty, simpleStructure(barMethod2), Array.empty, "foo", publicAccess, defaultModifiers, Array.empty)
|
||||
}
|
||||
val aClass1 = simpleClass("A", fooMethod1)
|
||||
val aClass2 = simpleClass("A", fooMethod2)
|
||||
val nameHashes1 = nameHashesForClass(aClass1)
|
||||
val nameHashes2 = nameHashesForClass(aClass2)
|
||||
// note that `bar` does appear here
|
||||
Seq("A", "foo", "bar") === nameHashes1.regularMembers.map(_.name).toSeq
|
||||
Seq("A", "foo", "bar") === nameHashes2.regularMembers.map(_.name).toSeq
|
||||
assertNameHashEqualForRegularName("A", nameHashes1, nameHashes2)
|
||||
assertNameHashNotEqualForRegularName("foo", nameHashes1, nameHashes2)
|
||||
assertNameHashNotEqualForRegularName("bar", nameHashes1, nameHashes2)
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks that private members are included in the hash of the public API of traits.
|
||||
* Including the private members of traits is required because classes that implement a trait
|
||||
* have to define the private members of the trait. Therefore, if a private member of a trait is added,
|
||||
* modified or removed we need to recompile the classes that implement this trait.
|
||||
* For instance, if trait Foo is initially defined as:
|
||||
* trait Foo { private val x = new A }
|
||||
* changing it to
|
||||
* trait Foo { private val x = new B }
|
||||
* requires us to recompile all implementors of trait Foo, because scalac generates setters and getters
|
||||
* for the private fields of trait Foo in its implementor. If the clients of trait Foo are not recompiled,
|
||||
* we get abstract method errors at runtime, because the types expected by the setter (for instance) does not
|
||||
* match.
|
||||
*/
|
||||
"private members in traits" in {
|
||||
/* trait Foo { private val x } */
|
||||
val fooTrait1 =
|
||||
simpleTrait("Foo",
|
||||
simpleStructure(new Val(emptyType, "x", privateAccess, defaultModifiers, Array.empty)),
|
||||
publicAccess)
|
||||
|
||||
/* trait Foo */
|
||||
val fooTrait2 =
|
||||
simpleTrait("Foo",
|
||||
simpleStructure(),
|
||||
publicAccess)
|
||||
|
||||
val api1 = new SourceAPI(Array.empty, Array(fooTrait1))
|
||||
val api2 = new SourceAPI(Array.empty, Array(fooTrait2))
|
||||
|
||||
HashAPI(api1) !== HashAPI(api2)
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks that private members in non-top-level traits are included as well.
|
||||
*/
|
||||
"private members in nested traits" in {
|
||||
/* class A { trait Foo { private val x } } */
|
||||
val classA1 =
|
||||
simpleClass("A",
|
||||
simpleTrait("Foo",
|
||||
simpleStructure(new Val(emptyType, "x", privateAccess, defaultModifiers, Array.empty)),
|
||||
publicAccess))
|
||||
|
||||
/* class A { trait Foo } */
|
||||
val classA2 =
|
||||
simpleClass("A",
|
||||
simpleTrait("Foo",
|
||||
simpleStructure(),
|
||||
publicAccess))
|
||||
|
||||
val api1 = new SourceAPI(Array.empty, Array(classA1))
|
||||
val api2 = new SourceAPI(Array.empty, Array(classA2))
|
||||
|
||||
HashAPI(api1) !== HashAPI(api2)
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks that private traits are NOT included in the hash.
|
||||
*/
|
||||
"private traits" in {
|
||||
/* class Foo { private trait T { private val x } } */
|
||||
val classFoo1 =
|
||||
simpleClass("Foo",
|
||||
simpleTrait("T",
|
||||
simpleStructure(new Val(emptyType, "x", privateAccess, defaultModifiers, Array.empty)),
|
||||
privateAccess))
|
||||
|
||||
/** class Foo { private trait T } */
|
||||
val classFoo2 =
|
||||
simpleClass("Foo",
|
||||
simpleTrait("T",
|
||||
simpleStructure(),
|
||||
privateAccess))
|
||||
|
||||
/** class Foo */
|
||||
val classFoo3 =
|
||||
simpleClass("Foo")
|
||||
|
||||
val api1 = new SourceAPI(Array.empty, Array(classFoo1))
|
||||
val api2 = new SourceAPI(Array.empty, Array(classFoo2))
|
||||
val api3 = new SourceAPI(Array.empty, Array(classFoo3))
|
||||
|
||||
HashAPI(api1) === HashAPI(api2) && HashAPI(api2) === HashAPI(api3)
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks that private members are NOT included in the hash of the public API of classes.
|
||||
*/
|
||||
"private members in classes" in {
|
||||
/* class Foo { private val x } */
|
||||
val classFoo1 =
|
||||
simpleClass("Foo",
|
||||
simpleStructure(new Val(emptyType, "x", privateAccess, defaultModifiers, Array.empty)))
|
||||
|
||||
/* class Foo */
|
||||
val classFoo2 =
|
||||
simpleClass("Foo",
|
||||
simpleStructure())
|
||||
|
||||
val api1 = new SourceAPI(Array.empty, Array(classFoo1))
|
||||
val api2 = new SourceAPI(Array.empty, Array(classFoo2))
|
||||
|
||||
HashAPI(api1) === HashAPI(api2)
|
||||
|
||||
}
|
||||
|
||||
private def assertNameHashEqualForRegularName(name: String, nameHashes1: _internalOnly_NameHashes,
|
||||
nameHashes2: _internalOnly_NameHashes) = {
|
||||
val nameHash1 = nameHashForRegularName(nameHashes1, name)
|
||||
val nameHash2 = nameHashForRegularName(nameHashes1, name)
|
||||
nameHash1 === nameHash2
|
||||
}
|
||||
|
||||
private def assertNameHashNotEqualForRegularName(name: String, nameHashes1: _internalOnly_NameHashes,
|
||||
nameHashes2: _internalOnly_NameHashes) = {
|
||||
val nameHash1 = nameHashForRegularName(nameHashes1, name)
|
||||
val nameHash2 = nameHashForRegularName(nameHashes2, name)
|
||||
nameHash1 !=== nameHash2
|
||||
}
|
||||
|
||||
private def nameHashForRegularName(nameHashes: _internalOnly_NameHashes, name: String): _internalOnly_NameHash =
|
||||
try {
|
||||
nameHashes.regularMembers.find(_.name == name).get
|
||||
} catch {
|
||||
case e: NoSuchElementException => throw new RuntimeException(s"Couldn't find $name in $nameHashes", e)
|
||||
}
|
||||
|
||||
private def nameHashesForClass(cl: ClassLike): _internalOnly_NameHashes = {
|
||||
val sourceAPI = new SourceAPI(Array.empty, Array(cl))
|
||||
val nameHashing = new NameHashing
|
||||
nameHashing.nameHashes(sourceAPI)
|
||||
}
|
||||
|
||||
private def lzy[T](x: T): Lazy[T] = new Lazy[T] { def get: T = x }
|
||||
|
||||
private def simpleStructure(defs: Definition*) = new Structure(lzy(Array.empty[Type]), lzy(defs.toArray), lzy(Array.empty[Definition]))
|
||||
|
||||
private def simpleClass(name: String, defs: Definition*): ClassLike = {
|
||||
val structure = simpleStructure(defs: _*)
|
||||
simpleClass(name, structure)
|
||||
}
|
||||
|
||||
private def simpleClass(name: String, structure: Structure): ClassLike = {
|
||||
new ClassLike(DefinitionType.ClassDef, lzy(emptyType), lzy(structure), Array.empty, Array.empty, name, publicAccess, defaultModifiers, Array.empty)
|
||||
}
|
||||
|
||||
private def simpleTrait(name: String, structure: Structure, access: Access): ClassLike = {
|
||||
new ClassLike(DefinitionType.Trait, lzy(emptyType), lzy(structure), Array.empty, Array.empty, name, access, defaultModifiers, Array.empty)
|
||||
}
|
||||
|
||||
private val emptyType = new EmptyType
|
||||
private val intTpe = new Projection(emptyType, "Int")
|
||||
private val strTpe = new Projection(emptyType, "String")
|
||||
private val publicAccess = new Public
|
||||
private val privateAccess = new Private(new Unqualified)
|
||||
private val defaultModifiers = new Modifiers(false, false, false, false, false, false, false)
|
||||
|
||||
}
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
Simple Build Tool: Incremental Logic Component
|
||||
Copyright 2010 Mark Harrah
|
||||
Licensed under BSD-style license (see LICENSE)
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
each compilation group gets an Analysis
|
||||
an sbt-style project could have multiple compilation groups or there could be multiple projects per compilation group.
|
||||
Traditionally, there has been a main group and a test group.
|
||||
|
||||
Each Analysis is associated with one or more classpath entries. Typically, it will be associated with the output directory and/or any artifacts produced from that output directory.
|
||||
|
||||
For Java sources, need to write a (File, Set[File]) => Source function that reads an API from a class file. The compile function passed to IncrementalCompile needs to handle compiling Java sources in the proper order
|
||||
|
||||
Need to handle entries removed from classpath. Could be done similarly to how Locate is used for getting the API for a dependency. In this case, we'd get the Stamp for a binary dependency.
|
||||
|
||||
Post-analysis
|
||||
- need to handle inherited definitions
|
||||
|
||||
Need builder component:
|
||||
Processor[D]
|
||||
def process(command: String, arg: String, current: State[D]): Result[State[D]]
|
||||
Initial[D]
|
||||
def init: State[D]
|
||||
State[D]
|
||||
value: D
|
||||
processors: Map[String, Processor[D]]
|
||||
|
|
@ -1,80 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import xsbti.compile.{ CompileOrder, Output => APIOutput, SingleOutput, MultipleOutput }
|
||||
import java.io.File
|
||||
|
||||
// this class exists because of Scala's restriction on implicit parameter search.
|
||||
// We cannot require an implicit parameter Equiv[Seq[String]] to construct Equiv[CompileSetup]
|
||||
// because complexity(Equiv[Seq[String]]) > complexity(Equiv[CompileSetup])
|
||||
// (6 > 4)
|
||||
final class CompileOptions(val options: Seq[String], val javacOptions: Seq[String]) {
|
||||
override def toString = s"CompileOptions(scalac=$options, javac=$javacOptions)"
|
||||
}
|
||||
final class CompileSetup(val output: APIOutput, val options: CompileOptions, val compilerVersion: String,
|
||||
val order: CompileOrder, val nameHashing: Boolean) {
|
||||
@deprecated("Use the other overloaded variant of the constructor that takes `nameHashing` value, instead.", "0.13.2")
|
||||
def this(output: APIOutput, options: CompileOptions, compilerVersion: String, order: CompileOrder) = {
|
||||
this(output, options, compilerVersion, order, false)
|
||||
}
|
||||
override def toString = s"""CompileSetup(
|
||||
| options = $options
|
||||
| compilerVersion = $compilerVersion
|
||||
| order = $order
|
||||
| nameHashing = $nameHashing
|
||||
| output = $output
|
||||
|)""".stripMargin
|
||||
}
|
||||
|
||||
object CompileSetup {
|
||||
// Equiv[CompileOrder.Value] dominates Equiv[CompileSetup]
|
||||
implicit def equivCompileSetup(implicit equivOutput: Equiv[APIOutput], equivOpts: Equiv[CompileOptions], equivComp: Equiv[String] /*, equivOrder: Equiv[CompileOrder]*/ ): Equiv[CompileSetup] = new Equiv[CompileSetup] {
|
||||
def equiv(a: CompileSetup, b: CompileSetup) = {
|
||||
// For some reason, an Equiv[Nothing] or some such is getting injected into here now, and borking all our results.
|
||||
// We hardcode these to use the Equiv defined in this class.
|
||||
def sameOutput = CompileSetup.equivOutput.equiv(a.output, b.output)
|
||||
def sameOptions = CompileSetup.equivOpts.equiv(a.options, b.options)
|
||||
def sameCompiler = equivComp.equiv(a.compilerVersion, b.compilerVersion)
|
||||
def sameOrder = a.order == b.order
|
||||
def sameNameHasher = a.nameHashing == b.nameHashing
|
||||
sameOutput &&
|
||||
sameOptions &&
|
||||
sameCompiler &&
|
||||
sameOrder && // equivOrder.equiv(a.order, b.order)
|
||||
sameNameHasher
|
||||
}
|
||||
}
|
||||
implicit val equivFile: Equiv[File] = new Equiv[File] {
|
||||
def equiv(a: File, b: File) = a.getAbsoluteFile == b.getAbsoluteFile
|
||||
}
|
||||
implicit val equivOutput: Equiv[APIOutput] = new Equiv[APIOutput] {
|
||||
implicit val outputGroupsOrdering = Ordering.by((og: MultipleOutput.OutputGroup) => og.sourceDirectory)
|
||||
def equiv(out1: APIOutput, out2: APIOutput) = (out1, out2) match {
|
||||
case (m1: MultipleOutput, m2: MultipleOutput) =>
|
||||
(m1.outputGroups.length == m2.outputGroups.length) &&
|
||||
(m1.outputGroups.sorted zip m2.outputGroups.sorted forall {
|
||||
case (a, b) =>
|
||||
equivFile.equiv(a.sourceDirectory, b.sourceDirectory) && equivFile.equiv(a.outputDirectory, b.outputDirectory)
|
||||
})
|
||||
case (s1: SingleOutput, s2: SingleOutput) =>
|
||||
equivFile.equiv(s1.outputDirectory, s2.outputDirectory)
|
||||
case _ =>
|
||||
false
|
||||
}
|
||||
}
|
||||
implicit val equivOpts: Equiv[CompileOptions] = new Equiv[CompileOptions] {
|
||||
def equiv(a: CompileOptions, b: CompileOptions) = {
|
||||
(a.options sameElements b.options) &&
|
||||
(a.javacOptions sameElements b.javacOptions)
|
||||
}
|
||||
}
|
||||
implicit val equivCompilerVersion: Equiv[String] = new Equiv[String] {
|
||||
def equiv(a: String, b: String) = a == b
|
||||
}
|
||||
|
||||
implicit val equivOrder: Equiv[CompileOrder] = new Equiv[CompileOrder] {
|
||||
def equiv(a: CompileOrder, b: CompileOrder) = a == b
|
||||
}
|
||||
}
|
||||
|
|
@ -1,67 +0,0 @@
|
|||
package sbt.inc
|
||||
|
||||
import xsbti.api.SourceAPI
|
||||
import xsbt.api.ShowAPI
|
||||
import xsbt.api.DefaultShowAPI._
|
||||
import java.lang.reflect.Method
|
||||
import java.util.{ List => JList }
|
||||
|
||||
/**
|
||||
* A class which computes diffs (unified diffs) between two textual representations of an API.
|
||||
*
|
||||
* Internally, it uses java-diff-utils library but it calls it through reflection so there's
|
||||
* no hard dependency on java-diff-utils.
|
||||
*
|
||||
* The reflective lookup of java-diff-utils library is performed in the constructor. Exceptions
|
||||
* thrown by reflection are passed as-is to the caller of the constructor.
|
||||
*
|
||||
* @throws ClassNotFoundException if difflib.DiffUtils class cannot be located
|
||||
* @throws LinkageError
|
||||
* @throws ExceptionInInitializerError
|
||||
*/
|
||||
private[inc] class APIDiff {
|
||||
|
||||
import APIDiff._
|
||||
|
||||
private val diffUtilsClass = Class.forName(diffUtilsClassName)
|
||||
// method signature: diff(List<?>, List<?>)
|
||||
private val diffMethod: Method =
|
||||
diffUtilsClass.getMethod(diffMethodName, classOf[JList[_]], classOf[JList[_]])
|
||||
|
||||
private val generateUnifiedDiffMethod: Method = {
|
||||
val patchClass = Class.forName(patchClassName)
|
||||
// method signature: generateUnifiedDiff(String, String, List<String>, Patch, int)
|
||||
diffUtilsClass.getMethod(generateUnifiedDiffMethodName, classOf[String],
|
||||
classOf[String], classOf[JList[String]], patchClass, classOf[Int])
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an unified diff between textual representations of `api1` and `api2`.
|
||||
*/
|
||||
def generateApiDiff(fileName: String, api1: SourceAPI, api2: SourceAPI, contextSize: Int): String = {
|
||||
val api1Str = ShowAPI.show(api1)
|
||||
val api2Str = ShowAPI.show(api2)
|
||||
generateApiDiff(fileName, api1Str, api2Str, contextSize)
|
||||
}
|
||||
|
||||
private def generateApiDiff(fileName: String, f1: String, f2: String, contextSize: Int): String = {
|
||||
assert((diffMethod != null) && (generateUnifiedDiffMethod != null), "APIDiff isn't properly initialized.")
|
||||
import scala.collection.JavaConverters._
|
||||
def asJavaList[T](it: Iterator[T]): java.util.List[T] = it.toSeq.asJava
|
||||
val f1Lines = asJavaList(f1.lines)
|
||||
val f2Lines = asJavaList(f2.lines)
|
||||
//val diff = DiffUtils.diff(f1Lines, f2Lines)
|
||||
val diff /*: Patch*/ = diffMethod.invoke(null, f1Lines, f2Lines)
|
||||
val unifiedPatch: JList[String] = generateUnifiedDiffMethod.invoke(null, fileName, fileName, f1Lines, diff,
|
||||
(contextSize: java.lang.Integer)).asInstanceOf[JList[String]]
|
||||
unifiedPatch.asScala.mkString("\n")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private[inc] object APIDiff {
|
||||
private val diffUtilsClassName = "difflib.DiffUtils"
|
||||
private val patchClassName = "difflib.Patch"
|
||||
private val diffMethodName = "diff"
|
||||
private val generateUnifiedDiffMethodName = "generateUnifiedDiff"
|
||||
}
|
||||
|
|
@ -1,92 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
package inc
|
||||
|
||||
import xsbti.api.Source
|
||||
import java.io.File
|
||||
import APIs.getAPI
|
||||
import xsbti.api._internalOnly_NameHashes
|
||||
import scala.util.Sorting
|
||||
import xsbt.api.SameAPI
|
||||
|
||||
trait APIs {
|
||||
/**
|
||||
* The API for the source file `src` at the time represented by this instance.
|
||||
* This method returns an empty API if the file had no API or is not known to this instance.
|
||||
*/
|
||||
def internalAPI(src: File): Source
|
||||
/**
|
||||
* The API for the external class `ext` at the time represented by this instance.
|
||||
* This method returns an empty API if the file had no API or is not known to this instance.
|
||||
*/
|
||||
def externalAPI(ext: String): Source
|
||||
|
||||
def allExternals: collection.Set[String]
|
||||
def allInternalSources: collection.Set[File]
|
||||
|
||||
def ++(o: APIs): APIs
|
||||
|
||||
def markInternalSource(src: File, api: Source): APIs
|
||||
def markExternalAPI(ext: String, api: Source): APIs
|
||||
|
||||
def removeInternal(remove: Iterable[File]): APIs
|
||||
def filterExt(keep: String => Boolean): APIs
|
||||
@deprecated("OK to remove in 0.14", "0.13.1")
|
||||
def groupBy[K](internal: (File) => K, keepExternal: Map[K, String => Boolean]): Map[K, APIs]
|
||||
|
||||
def internal: Map[File, Source]
|
||||
def external: Map[String, Source]
|
||||
}
|
||||
object APIs {
|
||||
def apply(internal: Map[File, Source], external: Map[String, Source]): APIs = new MAPIs(internal, external)
|
||||
def empty: APIs = apply(Map.empty, Map.empty)
|
||||
|
||||
val emptyAPI = new xsbti.api.SourceAPI(Array(), Array())
|
||||
val emptyCompilation = new xsbti.api.Compilation(-1, Array())
|
||||
val emptyNameHashes = new xsbti.api._internalOnly_NameHashes(Array.empty, Array.empty)
|
||||
val emptySource = new xsbti.api.Source(emptyCompilation, Array(), emptyAPI, 0, emptyNameHashes, false)
|
||||
def getAPI[T](map: Map[T, Source], src: T): Source = map.getOrElse(src, emptySource)
|
||||
}
|
||||
|
||||
private class MAPIs(val internal: Map[File, Source], val external: Map[String, Source]) extends APIs {
|
||||
def allInternalSources: collection.Set[File] = internal.keySet
|
||||
def allExternals: collection.Set[String] = external.keySet
|
||||
|
||||
def ++(o: APIs): APIs = new MAPIs(internal ++ o.internal, external ++ o.external)
|
||||
|
||||
def markInternalSource(src: File, api: Source): APIs =
|
||||
new MAPIs(internal.updated(src, api), external)
|
||||
|
||||
def markExternalAPI(ext: String, api: Source): APIs =
|
||||
new MAPIs(internal, external.updated(ext, api))
|
||||
|
||||
def removeInternal(remove: Iterable[File]): APIs = new MAPIs(internal -- remove, external)
|
||||
def filterExt(keep: String => Boolean): APIs = new MAPIs(internal, external.filterKeys(keep))
|
||||
@deprecated("Broken implementation. OK to remove in 0.14", "0.13.1")
|
||||
def groupBy[K](f: (File) => K, keepExternal: Map[K, String => Boolean]): Map[K, APIs] =
|
||||
internal.groupBy(item => f(item._1)) map { group => (group._1, new MAPIs(group._2, external).filterExt(keepExternal.getOrElse(group._1, _ => false))) }
|
||||
|
||||
def internalAPI(src: File) = getAPI(internal, src)
|
||||
def externalAPI(ext: String) = getAPI(external, ext)
|
||||
|
||||
override def equals(other: Any): Boolean = other match {
|
||||
case o: MAPIs => {
|
||||
def areEqual[T](x: Map[T, Source], y: Map[T, Source])(implicit ord: math.Ordering[T]) = {
|
||||
x.size == y.size && (sorted(x) zip sorted(y) forall { z => z._1._1 == z._2._1 && SameAPI(z._1._2, z._2._2) })
|
||||
}
|
||||
areEqual(internal, o.internal) && areEqual(external, o.external)
|
||||
}
|
||||
case _ => false
|
||||
}
|
||||
|
||||
override lazy val hashCode: Int = {
|
||||
def hash[T](m: Map[T, Source])(implicit ord: math.Ordering[T]) = sorted(m).map(x => (x._1, x._2.apiHash).hashCode).hashCode
|
||||
(hash(internal), hash(external)).hashCode
|
||||
}
|
||||
|
||||
override def toString: String = "API(internal: %d, external: %d)".format(internal.size, external.size)
|
||||
|
||||
private[this] def sorted[T](m: Map[T, Source])(implicit ord: math.Ordering[T]): Seq[(T, Source)] = m.toSeq.sortBy(_._1)
|
||||
}
|
||||
|
|
@ -1,307 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
package inc
|
||||
|
||||
import xsbti.api.Source
|
||||
import xsbti.DependencyContext._
|
||||
import java.io.File
|
||||
|
||||
/**
|
||||
* The merge/groupBy functionality requires understanding of the concepts of internalizing/externalizing dependencies:
|
||||
*
|
||||
* Say we have source files X, Y. And say we have some analysis A_X containing X as a source, and likewise for A_Y and Y.
|
||||
* If X depends on Y then A_X contains an external dependency X -> Y.
|
||||
*
|
||||
* However if we merge A_X and A_Y into a combined analysis A_XY, then A_XY contains X and Y as sources, and therefore
|
||||
* X -> Y must be converted to an internal dependency in A_XY. We refer to this as "internalizing" the dependency.
|
||||
*
|
||||
* The reverse transformation must occur if we group an analysis A_XY into A_X and A_Y, so that the dependency X->Y
|
||||
* crosses the boundary. We refer to this as "externalizing" the dependency.
|
||||
*
|
||||
* These transformations are complicated by the fact that internal dependencies are expressed as source file -> source file,
|
||||
* but external dependencies are expressed as source file -> fully-qualified class name.
|
||||
*/
|
||||
trait Analysis {
|
||||
val stamps: Stamps
|
||||
val apis: APIs
|
||||
/** Mappings between sources, classes, and binaries. */
|
||||
val relations: Relations
|
||||
val infos: SourceInfos
|
||||
/**
|
||||
* Information about compiler runs accumulated since `clean` command has been run.
|
||||
*
|
||||
* The main use-case for using `compilations` field is to determine how
|
||||
* many iterations it took to compilen give code. The `Compilation` object
|
||||
* are also stored in `Source` objects so there's an indirect way to recover
|
||||
* information about files being recompiled in every iteration.
|
||||
*
|
||||
* The incremental compilation algorithm doesn't use information stored in
|
||||
* `compilations`. It's safe to prune contents of that field without breaking
|
||||
* internal consistency of the entire Analysis object.
|
||||
*/
|
||||
val compilations: Compilations
|
||||
|
||||
/** Concatenates Analysis objects naively, i.e., doesn't internalize external deps on added files. See `Analysis.merge`. */
|
||||
def ++(other: Analysis): Analysis
|
||||
|
||||
/** Drops all analysis information for `sources` naively, i.e., doesn't externalize internal deps on removed files. */
|
||||
def --(sources: Iterable[File]): Analysis
|
||||
|
||||
def copy(stamps: Stamps = stamps, apis: APIs = apis, relations: Relations = relations, infos: SourceInfos = infos,
|
||||
compilations: Compilations = compilations): Analysis
|
||||
|
||||
def addSource(src: File, api: Source, stamp: Stamp, info: SourceInfo,
|
||||
products: Iterable[(File, String, Stamp)],
|
||||
internalDeps: Iterable[InternalDependency],
|
||||
externalDeps: Iterable[ExternalDependency],
|
||||
binaryDeps: Iterable[(File, String, Stamp)]): Analysis
|
||||
|
||||
@deprecated("Register all products and dependencies in addSource.", "0.13.8")
|
||||
def addSource(src: File, api: Source, stamp: Stamp, directInternal: Iterable[File], inheritedInternal: Iterable[File], info: SourceInfo): Analysis
|
||||
@deprecated("Register all products and dependencies in addSource.", "0.13.8")
|
||||
def addBinaryDep(src: File, dep: File, className: String, stamp: Stamp): Analysis
|
||||
@deprecated("Register all products and dependencies in addSource.", "0.13.8")
|
||||
def addExternalDep(src: File, dep: String, api: Source, inherited: Boolean): Analysis
|
||||
@deprecated("Register all products and dependencies in addSource.", "0.13.8")
|
||||
def addProduct(src: File, product: File, stamp: Stamp, name: String): Analysis
|
||||
|
||||
/** Partitions this Analysis using the discriminator function. Externalizes internal deps that cross partitions. */
|
||||
def groupBy[K](discriminator: (File => K)): Map[K, Analysis]
|
||||
|
||||
override lazy val toString = Analysis.summary(this)
|
||||
}
|
||||
|
||||
object Analysis {
|
||||
lazy val Empty: Analysis = new MAnalysis(Stamps.empty, APIs.empty, Relations.empty, SourceInfos.empty, Compilations.empty)
|
||||
private[sbt] def empty(nameHashing: Boolean): Analysis = new MAnalysis(Stamps.empty, APIs.empty,
|
||||
Relations.empty(nameHashing = nameHashing), SourceInfos.empty, Compilations.empty)
|
||||
|
||||
/** Merge multiple analysis objects into one. Deps will be internalized as needed. */
|
||||
def merge(analyses: Traversable[Analysis]): Analysis = {
|
||||
if (analyses.exists(_.relations.nameHashing))
|
||||
throw new IllegalArgumentException("Merging of Analyses that have" +
|
||||
"`relations.memberRefAndInheritanceDeps` set to `true` is not supported.")
|
||||
|
||||
// Merge the Relations, internalizing deps as needed.
|
||||
val mergedSrcProd = Relation.merge(analyses map { _.relations.srcProd })
|
||||
val mergedBinaryDep = Relation.merge(analyses map { _.relations.binaryDep })
|
||||
val mergedClasses = Relation.merge(analyses map { _.relations.classes })
|
||||
|
||||
val stillInternal = Relation.merge(analyses map { _.relations.direct.internal })
|
||||
val (internalized, stillExternal) = Relation.merge(analyses map { _.relations.direct.external }) partition { case (a, b) => mergedClasses._2s.contains(b) }
|
||||
val internalizedFiles = Relation.reconstruct(internalized.forwardMap mapValues { _ flatMap mergedClasses.reverse })
|
||||
val mergedInternal = stillInternal ++ internalizedFiles
|
||||
|
||||
val stillInternalPI = Relation.merge(analyses map { _.relations.publicInherited.internal })
|
||||
val (internalizedPI, stillExternalPI) = Relation.merge(analyses map { _.relations.publicInherited.external }) partition { case (a, b) => mergedClasses._2s.contains(b) }
|
||||
val internalizedFilesPI = Relation.reconstruct(internalizedPI.forwardMap mapValues { _ flatMap mergedClasses.reverse })
|
||||
val mergedInternalPI = stillInternalPI ++ internalizedFilesPI
|
||||
|
||||
val mergedRelations = Relations.make(
|
||||
mergedSrcProd,
|
||||
mergedBinaryDep,
|
||||
Relations.makeSource(mergedInternal, stillExternal),
|
||||
Relations.makeSource(mergedInternalPI, stillExternalPI),
|
||||
mergedClasses
|
||||
)
|
||||
|
||||
// Merge the APIs, internalizing APIs for targets of dependencies we internalized above.
|
||||
val concatenatedAPIs = (APIs.empty /: (analyses map { _.apis }))(_ ++ _)
|
||||
val stillInternalAPIs = concatenatedAPIs.internal
|
||||
val (internalizedAPIs, stillExternalAPIs) = concatenatedAPIs.external partition { x: (String, Source) => internalized._2s.contains(x._1) }
|
||||
val internalizedFilesAPIs = internalizedAPIs flatMap {
|
||||
case (cls: String, source: Source) => mergedRelations.definesClass(cls) map { file: File => (file, concatenatedAPIs.internalAPI(file)) }
|
||||
}
|
||||
val mergedAPIs = APIs(stillInternalAPIs ++ internalizedFilesAPIs, stillExternalAPIs)
|
||||
|
||||
val mergedStamps = Stamps.merge(analyses map { _.stamps })
|
||||
val mergedInfos = SourceInfos.merge(analyses map { _.infos })
|
||||
val mergedCompilations = Compilations.merge(analyses map { _.compilations })
|
||||
|
||||
new MAnalysis(mergedStamps, mergedAPIs, mergedRelations, mergedInfos, mergedCompilations)
|
||||
}
|
||||
|
||||
def summary(a: Analysis): String =
|
||||
{
|
||||
val (j, s) = a.apis.allInternalSources.partition(_.getName.endsWith(".java"))
|
||||
val c = a.stamps.allProducts
|
||||
val ext = a.apis.allExternals
|
||||
val jars = a.relations.allBinaryDeps.filter(_.getName.endsWith(".jar"))
|
||||
val unreportedCount = a.infos.allInfos.values.map(_.unreportedProblems.size).sum
|
||||
val sections =
|
||||
counted("Scala source", "", "s", s.size) ++
|
||||
counted("Java source", "", "s", j.size) ++
|
||||
counted("class", "", "es", c.size) ++
|
||||
counted("external source dependenc", "y", "ies", ext.size) ++
|
||||
counted("binary dependenc", "y", "ies", jars.size) ++
|
||||
counted("unreported warning", "", "s", unreportedCount)
|
||||
sections.mkString("Analysis: ", ", ", "")
|
||||
}
|
||||
|
||||
def counted(prefix: String, single: String, plural: String, count: Int): Option[String] =
|
||||
count match {
|
||||
case 0 => None
|
||||
case 1 => Some("1 " + prefix + single)
|
||||
case x => Some(x.toString + " " + prefix + plural)
|
||||
}
|
||||
|
||||
}
|
||||
private class MAnalysis(val stamps: Stamps, val apis: APIs, val relations: Relations, val infos: SourceInfos, val compilations: Compilations) extends Analysis {
|
||||
def ++(o: Analysis): Analysis = new MAnalysis(stamps ++ o.stamps, apis ++ o.apis, relations ++ o.relations,
|
||||
infos ++ o.infos, compilations ++ o.compilations)
|
||||
|
||||
def --(sources: Iterable[File]): Analysis =
|
||||
{
|
||||
val newRelations = relations -- sources
|
||||
def keep[T](f: (Relations, T) => Set[_]): T => Boolean = f(newRelations, _).nonEmpty
|
||||
|
||||
val newAPIs = apis.removeInternal(sources).filterExt(keep(_ usesExternal _))
|
||||
val newStamps = stamps.filter(keep(_ produced _), sources, keep(_ usesBinary _))
|
||||
val newInfos = infos -- sources
|
||||
new MAnalysis(newStamps, newAPIs, newRelations, newInfos, compilations)
|
||||
}
|
||||
|
||||
def copy(stamps: Stamps, apis: APIs, relations: Relations, infos: SourceInfos, compilations: Compilations = compilations): Analysis =
|
||||
new MAnalysis(stamps, apis, relations, infos, compilations)
|
||||
|
||||
def addSource(src: File, api: Source, stamp: Stamp, info: SourceInfo,
|
||||
products: Iterable[(File, String, Stamp)],
|
||||
internalDeps: Iterable[InternalDependency],
|
||||
externalDeps: Iterable[ExternalDependency],
|
||||
binaryDeps: Iterable[(File, String, Stamp)]): Analysis = {
|
||||
|
||||
val newStamps = {
|
||||
val productStamps = products.foldLeft(stamps.markInternalSource(src, stamp)) {
|
||||
case (tmpStamps, (toProduct, _, prodStamp)) => tmpStamps.markProduct(toProduct, prodStamp)
|
||||
}
|
||||
|
||||
binaryDeps.foldLeft(productStamps) {
|
||||
case (tmpStamps, (toBinary, className, binStamp)) => tmpStamps.markBinary(toBinary, className, binStamp)
|
||||
}
|
||||
}
|
||||
|
||||
val newAPIs = externalDeps.foldLeft(apis.markInternalSource(src, api)) {
|
||||
case (tmpApis, ExternalDependency(_, toClassName, classApi, _)) => tmpApis.markExternalAPI(toClassName, classApi)
|
||||
}
|
||||
|
||||
val newRelations = relations.addSource(src, products map (p => (p._1, p._2)), internalDeps, externalDeps, binaryDeps)
|
||||
|
||||
copy(newStamps, newAPIs, newRelations, infos.add(src, info))
|
||||
}
|
||||
|
||||
def addSource(src: File, api: Source, stamp: Stamp, directInternal: Iterable[File], inheritedInternal: Iterable[File], info: SourceInfo): Analysis = {
|
||||
|
||||
val directDeps = directInternal.map(InternalDependency(src, _, DependencyByMemberRef))
|
||||
val inheritedDeps = inheritedInternal.map(InternalDependency(src, _, DependencyByInheritance))
|
||||
|
||||
addSource(src, api, stamp, info, products = Nil, directDeps ++ inheritedDeps, Nil, Nil)
|
||||
}
|
||||
|
||||
def addBinaryDep(src: File, dep: File, className: String, stamp: Stamp): Analysis =
|
||||
copy(stamps.markBinary(dep, className, stamp), apis, relations.addBinaryDeps(src, (dep, className, stamp) :: Nil), infos)
|
||||
|
||||
def addExternalDep(src: File, dep: String, depAPI: Source, inherited: Boolean): Analysis = {
|
||||
val context = if (inherited) DependencyByInheritance else DependencyByMemberRef
|
||||
copy(stamps, apis.markExternalAPI(dep, depAPI), relations.addExternalDeps(src, ExternalDependency(src, dep, depAPI, context) :: Nil), infos)
|
||||
}
|
||||
|
||||
def addProduct(src: File, product: File, stamp: Stamp, name: String): Analysis =
|
||||
copy(stamps.markProduct(product, stamp), apis, relations.addProducts(src, (product, name) :: Nil), infos)
|
||||
|
||||
def groupBy[K](discriminator: File => K): Map[K, Analysis] = {
|
||||
if (relations.nameHashing)
|
||||
throw new UnsupportedOperationException("Grouping of Analyses that have" +
|
||||
"`relations.memberRefAndInheritanceDeps` set to `true` is not supported.")
|
||||
|
||||
def discriminator1(x: (File, _)) = discriminator(x._1) // Apply the discriminator to the first coordinate.
|
||||
|
||||
val kSrcProd = relations.srcProd.groupBy(discriminator1)
|
||||
val kBinaryDep = relations.binaryDep.groupBy(discriminator1)
|
||||
val kClasses = relations.classes.groupBy(discriminator1)
|
||||
val kSourceInfos = infos.allInfos.groupBy(discriminator1)
|
||||
|
||||
val (kStillInternal, kExternalized) = relations.direct.internal partition { case (a, b) => discriminator(a) == discriminator(b) } match {
|
||||
case (i, e) => (i.groupBy(discriminator1), e.groupBy(discriminator1))
|
||||
}
|
||||
val kStillExternal = relations.direct.external.groupBy(discriminator1)
|
||||
|
||||
// Find all possible groups.
|
||||
val allMaps = kSrcProd :: kBinaryDep :: kStillInternal :: kExternalized :: kStillExternal :: kClasses :: kSourceInfos :: Nil
|
||||
val allKeys: Set[K] = (Set.empty[K] /: (allMaps map { _.keySet }))(_ ++ _)
|
||||
|
||||
// Map from file to a single representative class defined in that file.
|
||||
// This is correct (for now): currently all classes in an external dep share the same Source object,
|
||||
// and a change to any of them will act like a change to all of them.
|
||||
// We don't use all the top-level classes in source.api.definitions, even though that's more intuitively
|
||||
// correct, because this can cause huge bloat of the analysis file.
|
||||
def getRepresentativeClass(file: File): Option[String] = apis.internalAPI(file).api.definitions.headOption map { _.name }
|
||||
|
||||
// Create an Analysis for each group.
|
||||
(for (k <- allKeys) yield {
|
||||
def getFrom[A, B](m: Map[K, Relation[A, B]]): Relation[A, B] = m.getOrElse(k, Relation.empty)
|
||||
|
||||
// Products and binary deps.
|
||||
val srcProd = getFrom(kSrcProd)
|
||||
val binaryDep = getFrom(kBinaryDep)
|
||||
|
||||
// Direct Sources.
|
||||
val stillInternal = getFrom(kStillInternal)
|
||||
val stillExternal = getFrom(kStillExternal)
|
||||
val externalized = getFrom(kExternalized)
|
||||
val externalizedClasses = Relation.reconstruct(externalized.forwardMap mapValues { _ flatMap getRepresentativeClass })
|
||||
val newExternal = stillExternal ++ externalizedClasses
|
||||
|
||||
// Public inherited sources.
|
||||
val stillInternalPI = stillInternal filter relations.publicInherited.internal.contains
|
||||
val stillExternalPI = stillExternal filter relations.publicInherited.external.contains
|
||||
val externalizedPI = externalized filter relations.publicInherited.internal.contains
|
||||
val externalizedClassesPI = Relation.reconstruct(externalizedPI.forwardMap mapValues { _ flatMap getRepresentativeClass })
|
||||
val newExternalPI = stillExternalPI ++ externalizedClassesPI
|
||||
|
||||
// Class names.
|
||||
val classes = getFrom(kClasses)
|
||||
|
||||
// Create new relations for this group.
|
||||
val newRelations = Relations.make(
|
||||
srcProd,
|
||||
binaryDep,
|
||||
Relations.makeSource(stillInternal, newExternal),
|
||||
Relations.makeSource(stillInternalPI, newExternalPI),
|
||||
classes
|
||||
)
|
||||
|
||||
// Compute new API mappings.
|
||||
def apisFor[T](m: Map[T, Source], x: Traversable[T]): Map[T, Source] =
|
||||
(x map { e: T => (e, m.get(e)) } collect { case (t, Some(source)) => (t, source) }).toMap
|
||||
val stillInternalAPIs = apisFor(apis.internal, srcProd._1s)
|
||||
val stillExternalAPIs = apisFor(apis.external, stillExternal._2s)
|
||||
val externalizedAPIs = apisFor(apis.internal, externalized._2s)
|
||||
val externalizedClassesAPIs = externalizedAPIs flatMap {
|
||||
case (file: File, source: Source) => getRepresentativeClass(file) map { cls: String => (cls, source) }
|
||||
}
|
||||
val newAPIs = APIs(stillInternalAPIs, stillExternalAPIs ++ externalizedClassesAPIs)
|
||||
|
||||
// New stamps.
|
||||
val newStamps = Stamps(
|
||||
stamps.products.filterKeys(srcProd._2s.contains),
|
||||
stamps.sources.filterKeys({ discriminator(_) == k }),
|
||||
stamps.binaries.filterKeys(binaryDep._2s.contains),
|
||||
stamps.classNames.filterKeys(binaryDep._2s.contains))
|
||||
|
||||
// New infos.
|
||||
val newSourceInfos = SourceInfos.make(kSourceInfos.getOrElse(k, Map.empty))
|
||||
|
||||
(k, new MAnalysis(newStamps, newAPIs, newRelations, newSourceInfos, compilations))
|
||||
}).toMap
|
||||
}
|
||||
|
||||
override def equals(other: Any) = other match {
|
||||
// Note: Equality doesn't consider source infos or compilations.
|
||||
case o: MAnalysis => stamps == o.stamps && apis == o.apis && relations == o.relations
|
||||
case _ => false
|
||||
}
|
||||
|
||||
override lazy val hashCode = (stamps :: apis :: relations :: Nil).hashCode
|
||||
}
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
package inc
|
||||
|
||||
trait AnalysisStore {
|
||||
def set(analysis: Analysis, setup: CompileSetup): Unit
|
||||
def get(): Option[(Analysis, CompileSetup)]
|
||||
}
|
||||
|
||||
object AnalysisStore {
|
||||
def cached(backing: AnalysisStore): AnalysisStore = new AnalysisStore {
|
||||
private var last: Option[(Analysis, CompileSetup)] = None
|
||||
def set(analysis: Analysis, setup: CompileSetup): Unit = {
|
||||
backing.set(analysis, setup)
|
||||
last = Some((analysis, setup))
|
||||
}
|
||||
def get(): Option[(Analysis, CompileSetup)] =
|
||||
{
|
||||
if (last.isEmpty)
|
||||
last = backing.get()
|
||||
last
|
||||
}
|
||||
}
|
||||
def sync(backing: AnalysisStore): AnalysisStore = new AnalysisStore {
|
||||
def set(analysis: Analysis, setup: CompileSetup): Unit = synchronized { backing.set(analysis, setup) }
|
||||
def get(): Option[(Analysis, CompileSetup)] = synchronized { backing.get() }
|
||||
}
|
||||
}
|
||||
|
|
@ -1,67 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
package inc
|
||||
|
||||
import xsbt.api.NameChanges
|
||||
import java.io.File
|
||||
import xsbti.api.{ _internalOnly_NameHashes => NameHashes }
|
||||
import xsbti.api.{ _internalOnly_NameHash => NameHash }
|
||||
|
||||
final case class InitialChanges(internalSrc: Changes[File], removedProducts: Set[File], binaryDeps: Set[File], external: APIChanges[String])
|
||||
final class APIChanges[T](val apiChanges: Iterable[APIChange[T]]) {
|
||||
override def toString = "API Changes: " + apiChanges
|
||||
def allModified: Iterable[T] = apiChanges.map(_.modified)
|
||||
}
|
||||
|
||||
sealed abstract class APIChange[T](val modified: T)
|
||||
/**
|
||||
* If we recompile a source file that contains a macro definition then we always assume that it's
|
||||
* api has changed. The reason is that there's no way to determine if changes to macros implementation
|
||||
* are affecting its users or not. Therefore we err on the side of caution.
|
||||
*/
|
||||
final case class APIChangeDueToMacroDefinition[T](modified0: T) extends APIChange(modified0)
|
||||
final case class SourceAPIChange[T](modified0: T) extends APIChange(modified0)
|
||||
/**
|
||||
* An APIChange that carries information about modified names.
|
||||
*
|
||||
* This class is used only when name hashing algorithm is enabled.
|
||||
*/
|
||||
final case class NamesChange[T](modified0: T, modifiedNames: ModifiedNames) extends APIChange(modified0)
|
||||
|
||||
/**
|
||||
* ModifiedNames are determined by comparing name hashes in two versions of an API representation.
|
||||
*
|
||||
* Note that we distinguish between sets of regular (non-implicit) and implicit modified names.
|
||||
* This distinction is needed because the name hashing algorithm makes different decisions based
|
||||
* on whether modified name is implicit or not. Implicit names are much more difficult to handle
|
||||
* due to difficulty of reasoning about the implicit scope.
|
||||
*/
|
||||
final case class ModifiedNames(regularNames: Set[String], implicitNames: Set[String]) {
|
||||
override def toString: String =
|
||||
s"ModifiedNames(regularNames = ${regularNames mkString ", "}, implicitNames = ${implicitNames mkString ", "})"
|
||||
}
|
||||
object ModifiedNames {
|
||||
def compareTwoNameHashes(a: NameHashes, b: NameHashes): ModifiedNames = {
|
||||
val modifiedRegularNames = calculateModifiedNames(a.regularMembers.toSet, b.regularMembers.toSet)
|
||||
val modifiedImplicitNames = calculateModifiedNames(a.implicitMembers.toSet, b.implicitMembers.toSet)
|
||||
ModifiedNames(modifiedRegularNames, modifiedImplicitNames)
|
||||
}
|
||||
private def calculateModifiedNames(xs: Set[NameHash], ys: Set[NameHash]): Set[String] = {
|
||||
val differentNameHashes = (xs union ys) diff (xs intersect ys)
|
||||
differentNameHashes.map(_.name)
|
||||
}
|
||||
}
|
||||
|
||||
trait Changes[A] {
|
||||
def added: Set[A]
|
||||
def removed: Set[A]
|
||||
def changed: Set[A]
|
||||
def unmodified: Set[A]
|
||||
}
|
||||
|
||||
sealed abstract class Change(val file: File)
|
||||
final class Removed(f: File) extends Change(f)
|
||||
final class Added(f: File, newStamp: Stamp) extends Change(f)
|
||||
final class Modified(f: File, oldStamp: Stamp, newStamp: Stamp) extends Change(f)
|
||||
|
|
@ -1,79 +0,0 @@
|
|||
package sbt.inc
|
||||
|
||||
import sbt.IO
|
||||
import java.io.File
|
||||
import collection.mutable
|
||||
|
||||
/**
|
||||
* During an incremental compilation run, a ClassfileManager deletes class files and is notified of generated class files.
|
||||
* A ClassfileManager can be used only once.
|
||||
*/
|
||||
trait ClassfileManager {
|
||||
/**
|
||||
* Called once per compilation step with the class files to delete prior to that step's compilation.
|
||||
* The files in `classes` must not exist if this method returns normally.
|
||||
* Any empty ancestor directories of deleted files must not exist either.
|
||||
*/
|
||||
def delete(classes: Iterable[File]): Unit
|
||||
|
||||
/** Called once per compilation step with the class files generated during that step.*/
|
||||
def generated(classes: Iterable[File]): Unit
|
||||
|
||||
/** Called once at the end of the whole compilation run, with `success` indicating whether compilation succeeded (true) or not (false).*/
|
||||
def complete(success: Boolean): Unit
|
||||
}
|
||||
|
||||
object ClassfileManager {
|
||||
/** Constructs a minimal ClassfileManager implementation that immediately deletes class files when requested. */
|
||||
val deleteImmediately: () => ClassfileManager = () => new ClassfileManager {
|
||||
def delete(classes: Iterable[File]): Unit = IO.deleteFilesEmptyDirs(classes)
|
||||
def generated(classes: Iterable[File]): Unit = ()
|
||||
def complete(success: Boolean): Unit = ()
|
||||
}
|
||||
@deprecated("Use overloaded variant that takes additional logger argument, instead.", "0.13.5")
|
||||
def transactional(tempDir0: File): () => ClassfileManager =
|
||||
transactional(tempDir0, sbt.Logger.Null)
|
||||
/** When compilation fails, this ClassfileManager restores class files to the way they were before compilation.*/
|
||||
def transactional(tempDir0: File, logger: sbt.Logger): () => ClassfileManager = () => new ClassfileManager {
|
||||
val tempDir = tempDir0.getCanonicalFile
|
||||
IO.delete(tempDir)
|
||||
IO.createDirectory(tempDir)
|
||||
logger.debug(s"Created transactional ClassfileManager with tempDir = $tempDir")
|
||||
|
||||
private[this] val generatedClasses = new mutable.HashSet[File]
|
||||
private[this] val movedClasses = new mutable.HashMap[File, File]
|
||||
|
||||
private def showFiles(files: Iterable[File]): String = files.map(f => s"\t$f").mkString("\n")
|
||||
def delete(classes: Iterable[File]): Unit = {
|
||||
logger.debug(s"About to delete class files:\n${showFiles(classes)}")
|
||||
val toBeBackedUp = classes.filter(c => c.exists && !movedClasses.contains(c) && !generatedClasses(c))
|
||||
logger.debug(s"We backup classs files:\n${showFiles(toBeBackedUp)}")
|
||||
for (c <- toBeBackedUp) {
|
||||
movedClasses.put(c, move(c))
|
||||
}
|
||||
IO.deleteFilesEmptyDirs(classes)
|
||||
}
|
||||
def generated(classes: Iterable[File]): Unit = {
|
||||
logger.debug(s"Registering generated classes:\n${showFiles(classes)}")
|
||||
generatedClasses ++= classes
|
||||
}
|
||||
def complete(success: Boolean): Unit = {
|
||||
if (!success) {
|
||||
logger.debug("Rolling back changes to class files.")
|
||||
logger.debug(s"Removing generated classes:\n${showFiles(generatedClasses)}")
|
||||
IO.deleteFilesEmptyDirs(generatedClasses)
|
||||
logger.debug(s"Restoring class files: \n${showFiles(movedClasses.keys)}")
|
||||
for ((orig, tmp) <- movedClasses) IO.move(tmp, orig)
|
||||
}
|
||||
logger.debug(s"Removing the temporary directory used for backing up class files: $tempDir")
|
||||
IO.delete(tempDir)
|
||||
}
|
||||
|
||||
def move(c: File): File =
|
||||
{
|
||||
val target = File.createTempFile("sbt", ".class", tempDir)
|
||||
IO.move(c, target)
|
||||
target
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
package sbt.inc
|
||||
|
||||
import xsbti.api.Compilation
|
||||
|
||||
/** Information about compiler runs accumulated since `clean` command has been run. */
|
||||
trait Compilations {
|
||||
def allCompilations: Seq[Compilation]
|
||||
def ++(o: Compilations): Compilations
|
||||
def add(c: Compilation): Compilations
|
||||
}
|
||||
|
||||
object Compilations {
|
||||
val empty: Compilations = new MCompilations(Seq.empty)
|
||||
def make(s: Seq[Compilation]): Compilations = new MCompilations(s)
|
||||
def merge(s: Traversable[Compilations]): Compilations = make((s flatMap { _.allCompilations }).toSeq.distinct)
|
||||
}
|
||||
|
||||
private final class MCompilations(val allCompilations: Seq[Compilation]) extends Compilations {
|
||||
def ++(o: Compilations): Compilations = new MCompilations(allCompilations ++ o.allCompilations)
|
||||
def add(c: Compilation): Compilations = new MCompilations(allCompilations :+ c)
|
||||
}
|
||||
|
|
@ -1,234 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
package inc
|
||||
|
||||
import xsbti.api.{ Source, SourceAPI, Compilation, OutputSetting, _internalOnly_NameHashes }
|
||||
import xsbti.compile.{ DependencyChanges, Output, SingleOutput, MultipleOutput }
|
||||
import xsbti.{ Position, Problem, Severity }
|
||||
import Logger.{ m2o, problem }
|
||||
import java.io.File
|
||||
import xsbti.api.Definition
|
||||
import xsbti.DependencyContext
|
||||
import xsbti.DependencyContext.{ DependencyByInheritance, DependencyByMemberRef }
|
||||
|
||||
/**
|
||||
* Helper methods for running incremental compilation. All this is responsible for is
|
||||
* adapting any xsbti.AnalysisCallback into one compatible with the [[sbt.inc.Incremental]] class.
|
||||
*/
|
||||
object IncrementalCompile {
|
||||
/**
|
||||
* Runs the incremental compilation algorithm.
|
||||
* @param sources
|
||||
* The full set of input sources
|
||||
* @param entry
|
||||
* A className -> source file lookup function.
|
||||
* @param compile
|
||||
* The mechanism to run a single 'step' of compile, for ALL source files involved.
|
||||
* @param previous
|
||||
* The previous dependency Analysis (or an empty one).
|
||||
* @param forEntry
|
||||
* The dependency Analysis associated with a given file
|
||||
* @param output
|
||||
* The configured output directory/directory mapping for source files.
|
||||
* @param log
|
||||
* Where all log messages should go
|
||||
* @param options
|
||||
* Incremental compiler options (like name hashing vs. not).
|
||||
* @return
|
||||
* A flag of whether or not compilation completed succesfully, and the resulting dependency analysis object.
|
||||
*
|
||||
*/
|
||||
def apply(sources: Set[File], entry: String => Option[File],
|
||||
compile: (Set[File], DependencyChanges, xsbti.AnalysisCallback) => Unit,
|
||||
previous: Analysis,
|
||||
forEntry: File => Option[Analysis],
|
||||
output: Output, log: Logger,
|
||||
options: IncOptions): (Boolean, Analysis) =
|
||||
{
|
||||
val current = Stamps.initial(Stamp.lastModified, Stamp.hash, Stamp.lastModified)
|
||||
val internalMap = (f: File) => previous.relations.produced(f).headOption
|
||||
val externalAPI = getExternalAPI(entry, forEntry)
|
||||
try {
|
||||
Incremental.compile(sources, entry, previous, current, forEntry, doCompile(compile, internalMap, externalAPI, current, output, options), log, options)
|
||||
} catch {
|
||||
case e: xsbti.CompileCancelled =>
|
||||
log.info("Compilation has been cancelled")
|
||||
// in case compilation got cancelled potential partial compilation results (e.g. produced classs files) got rolled back
|
||||
// and we can report back as there was no change (false) and return a previous Analysis which is still up-to-date
|
||||
(false, previous)
|
||||
}
|
||||
}
|
||||
def doCompile(compile: (Set[File], DependencyChanges, xsbti.AnalysisCallback) => Unit, internalMap: File => Option[File], externalAPI: (File, String) => Option[Source], current: ReadStamps, output: Output, options: IncOptions) =
|
||||
(srcs: Set[File], changes: DependencyChanges) => {
|
||||
val callback = new AnalysisCallback(internalMap, externalAPI, current, output, options)
|
||||
compile(srcs, changes, callback)
|
||||
callback.get
|
||||
}
|
||||
def getExternalAPI(entry: String => Option[File], forEntry: File => Option[Analysis]): (File, String) => Option[Source] =
|
||||
(file: File, className: String) =>
|
||||
entry(className) flatMap { defines =>
|
||||
if (file != Locate.resolve(defines, className))
|
||||
None
|
||||
else
|
||||
forEntry(defines) flatMap { analysis =>
|
||||
analysis.relations.definesClass(className).headOption flatMap { src =>
|
||||
analysis.apis.internal get src
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
private final class AnalysisCallback(internalMap: File => Option[File], externalAPI: (File, String) => Option[Source], current: ReadStamps, output: Output, options: IncOptions) extends xsbti.AnalysisCallback {
|
||||
val compilation = {
|
||||
val outputSettings = output match {
|
||||
case single: SingleOutput => Array(new OutputSetting("/", single.outputDirectory.getAbsolutePath))
|
||||
case multi: MultipleOutput =>
|
||||
multi.outputGroups.map(out => new OutputSetting(out.sourceDirectory.getAbsolutePath, out.outputDirectory.getAbsolutePath))
|
||||
}
|
||||
new Compilation(System.currentTimeMillis, outputSettings)
|
||||
}
|
||||
|
||||
override def toString = (List("APIs", "Binary deps", "Products", "Source deps") zip List(apis, binaryDeps, classes, intSrcDeps)).map { case (label, map) => label + "\n\t" + map.mkString("\n\t") }.mkString("\n")
|
||||
|
||||
import collection.mutable.{ HashMap, HashSet, ListBuffer, Map, Set }
|
||||
|
||||
private[this] val apis = new HashMap[File, (Int, SourceAPI)]
|
||||
private[this] val usedNames = new HashMap[File, Set[String]]
|
||||
private[this] val publicNameHashes = new HashMap[File, _internalOnly_NameHashes]
|
||||
private[this] val unreporteds = new HashMap[File, ListBuffer[Problem]]
|
||||
private[this] val reporteds = new HashMap[File, ListBuffer[Problem]]
|
||||
private[this] val binaryDeps = new HashMap[File, Set[File]]
|
||||
// source file to set of generated (class file, class name)
|
||||
private[this] val classes = new HashMap[File, Set[(File, String)]]
|
||||
// generated class file to its source file
|
||||
private[this] val classToSource = new HashMap[File, File]
|
||||
// internal source dependencies
|
||||
private[this] val intSrcDeps = new HashMap[File, Set[InternalDependency]]
|
||||
// external source dependencies
|
||||
private[this] val extSrcDeps = new HashMap[File, Set[ExternalDependency]]
|
||||
private[this] val binaryClassName = new HashMap[File, String]
|
||||
// source files containing a macro def.
|
||||
private[this] val macroSources = Set[File]()
|
||||
|
||||
private def add[A, B](map: Map[A, Set[B]], a: A, b: B): Unit =
|
||||
map.getOrElseUpdate(a, new HashSet[B]) += b
|
||||
|
||||
def problem(category: String, pos: Position, msg: String, severity: Severity, reported: Boolean): Unit =
|
||||
{
|
||||
for (source <- m2o(pos.sourceFile)) {
|
||||
val map = if (reported) reporteds else unreporteds
|
||||
map.getOrElseUpdate(source, ListBuffer.empty) += Logger.problem(category, pos, msg, severity)
|
||||
}
|
||||
}
|
||||
|
||||
def sourceDependency(dependsOn: File, source: File, context: DependencyContext) = {
|
||||
add(intSrcDeps, source, InternalDependency(source, dependsOn, context))
|
||||
}
|
||||
|
||||
@deprecated("Use `sourceDependency(File, File, DependencyContext)`.", "0.13.8")
|
||||
def sourceDependency(dependsOn: File, source: File, inherited: Boolean) =
|
||||
{
|
||||
val context = if (inherited) DependencyByInheritance else DependencyByMemberRef
|
||||
sourceDependency(dependsOn, source, context)
|
||||
}
|
||||
|
||||
private[this] def externalBinaryDependency(binary: File, className: String, source: File, context: DependencyContext) = {
|
||||
binaryClassName.put(binary, className)
|
||||
add(binaryDeps, source, binary)
|
||||
}
|
||||
|
||||
private[this] def externalSourceDependency(sourceFile: File, dependsOn: String, source: Source, context: DependencyContext) = {
|
||||
val dependency = ExternalDependency(sourceFile, dependsOn, source, context)
|
||||
add(extSrcDeps, sourceFile, dependency)
|
||||
}
|
||||
|
||||
def binaryDependency(classFile: File, name: String, source: File, context: DependencyContext) =
|
||||
internalMap(classFile) match {
|
||||
case Some(dependsOn) =>
|
||||
// dependency is a product of a source not included in this compilation
|
||||
sourceDependency(dependsOn, source, context)
|
||||
case None =>
|
||||
classToSource.get(classFile) match {
|
||||
case Some(dependsOn) =>
|
||||
// dependency is a product of a source in this compilation step,
|
||||
// but not in the same compiler run (as in javac v. scalac)
|
||||
sourceDependency(dependsOn, source, context)
|
||||
case None =>
|
||||
externalDependency(classFile, name, source, context)
|
||||
}
|
||||
}
|
||||
|
||||
@deprecated("Use `binaryDependency(File, String, File, DependencyContext)`.", "0.13.8")
|
||||
def binaryDependency(classFile: File, name: String, source: File, inherited: Boolean) = {
|
||||
val context = if (inherited) DependencyByInheritance else DependencyByMemberRef
|
||||
binaryDependency(classFile, name, source, context)
|
||||
}
|
||||
|
||||
private[this] def externalDependency(classFile: File, name: String, source: File, context: DependencyContext): Unit =
|
||||
externalAPI(classFile, name) match {
|
||||
case Some(api) =>
|
||||
// dependency is a product of a source in another project
|
||||
externalSourceDependency(source, name, api, context)
|
||||
case None =>
|
||||
// dependency is some other binary on the classpath
|
||||
externalBinaryDependency(classFile, name, source, context)
|
||||
}
|
||||
|
||||
def generatedClass(source: File, module: File, name: String) =
|
||||
{
|
||||
add(classes, source, (module, name))
|
||||
classToSource.put(module, source)
|
||||
}
|
||||
|
||||
// empty value used when name hashing algorithm is disabled
|
||||
private val emptyNameHashes = new xsbti.api._internalOnly_NameHashes(Array.empty, Array.empty)
|
||||
|
||||
def api(sourceFile: File, source: SourceAPI): Unit = {
|
||||
import xsbt.api.{ APIUtil, HashAPI }
|
||||
if (APIUtil.isScalaSourceName(sourceFile.getName) && APIUtil.hasMacro(source)) macroSources += sourceFile
|
||||
publicNameHashes(sourceFile) = {
|
||||
if (nameHashing)
|
||||
(new xsbt.api.NameHashing).nameHashes(source)
|
||||
else
|
||||
emptyNameHashes
|
||||
}
|
||||
val shouldMinimize = !Incremental.apiDebug(options)
|
||||
val savedSource = if (shouldMinimize) APIUtil.minimize(source) else source
|
||||
apis(sourceFile) = (HashAPI(source), savedSource)
|
||||
}
|
||||
|
||||
def usedName(sourceFile: File, name: String) = add(usedNames, sourceFile, name)
|
||||
|
||||
def nameHashing: Boolean = options.nameHashing
|
||||
|
||||
def get: Analysis = addUsedNames(addCompilation(addProductsAndDeps(Analysis.empty(nameHashing = nameHashing))))
|
||||
|
||||
def getOrNil[A, B](m: collection.Map[A, Seq[B]], a: A): Seq[B] = m.get(a).toList.flatten
|
||||
def addCompilation(base: Analysis): Analysis = base.copy(compilations = base.compilations.add(compilation))
|
||||
def addUsedNames(base: Analysis): Analysis = (base /: usedNames) {
|
||||
case (a, (src, names)) =>
|
||||
(a /: names) { case (a, name) => a.copy(relations = a.relations.addUsedName(src, name)) }
|
||||
}
|
||||
|
||||
def addProductsAndDeps(base: Analysis): Analysis =
|
||||
(base /: apis) {
|
||||
case (a, (src, api)) =>
|
||||
val stamp = current.internalSource(src)
|
||||
val hash = stamp match { case h: Hash => h.value; case _ => new Array[Byte](0) }
|
||||
// TODO store this in Relations, rather than Source.
|
||||
val hasMacro: Boolean = macroSources.contains(src)
|
||||
val s = new xsbti.api.Source(compilation, hash, api._2, api._1, publicNameHashes(src), hasMacro)
|
||||
val info = SourceInfos.makeInfo(getOrNil(reporteds, src), getOrNil(unreporteds, src))
|
||||
val binaries = binaryDeps.getOrElse(src, Nil: Iterable[File])
|
||||
val prods = classes.getOrElse(src, Nil: Iterable[(File, String)])
|
||||
|
||||
val products = prods.map { case (prod, name) => (prod, name, current product prod) }
|
||||
val internalDeps = intSrcDeps.getOrElse(src, Set.empty)
|
||||
val externalDeps = extSrcDeps.getOrElse(src, Set.empty)
|
||||
val binDeps = binaries.map(d => (d, binaryClassName(d), current binary d))
|
||||
|
||||
a.addSource(src, s, stamp, info, products, internalDeps, externalDeps, binDeps)
|
||||
|
||||
}
|
||||
}
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
package sbt.inc
|
||||
|
||||
import java.io.File
|
||||
import xsbti.api.Source
|
||||
import xsbti.DependencyContext
|
||||
|
||||
/**
|
||||
* Represents the kind of dependency that exists between `sourceFile` and either `targetFile`
|
||||
* or `targetClassName`.
|
||||
*
|
||||
* `InternalDependency` represent dependencies that exist between the files of a same project,
|
||||
* while `ExternalDependency` represent cross-project dependencies.
|
||||
*/
|
||||
private[inc] final case class InternalDependency(sourceFile: File, targetFile: File, context: DependencyContext)
|
||||
private[inc] final case class ExternalDependency(sourceFile: File, targetClassName: String, targetSource: Source, context: DependencyContext)
|
||||
|
|
@ -1,43 +0,0 @@
|
|||
package sbt
|
||||
package inc
|
||||
|
||||
import java.io.File
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
|
||||
sealed trait FileValueCache[T] {
|
||||
def clear(): Unit
|
||||
def get: File => T
|
||||
}
|
||||
|
||||
private[this] final class FileValueCache0[T](getStamp: File => Stamp, make: File => T)(implicit equiv: Equiv[Stamp]) extends FileValueCache[T] {
|
||||
private[this] val backing = new ConcurrentHashMap[File, FileCache]
|
||||
|
||||
def clear(): Unit = backing.clear()
|
||||
def get = file => {
|
||||
val ifAbsent = new FileCache(file)
|
||||
val cache = backing.putIfAbsent(file, ifAbsent)
|
||||
(if (cache eq null) ifAbsent else cache).get()
|
||||
}
|
||||
|
||||
private[this] final class FileCache(file: File) {
|
||||
private[this] var stampedValue: Option[(Stamp, T)] = None
|
||||
def get(): T = synchronized {
|
||||
val latest = getStamp(file)
|
||||
stampedValue match {
|
||||
case Some((stamp, value)) if (equiv.equiv(latest, stamp)) => value
|
||||
case _ => update(latest)
|
||||
}
|
||||
}
|
||||
|
||||
private[this] def update(stamp: Stamp): T =
|
||||
{
|
||||
val value = make(file)
|
||||
stampedValue = Some((stamp, value))
|
||||
value
|
||||
}
|
||||
}
|
||||
}
|
||||
object FileValueCache {
|
||||
def apply[T](f: File => T): FileValueCache[T] = make(Stamp.lastModified)(f)
|
||||
def make[T](stamp: File => Stamp)(f: File => T): FileValueCache[T] = new FileValueCache0[T](stamp, f)
|
||||
}
|
||||
|
|
@ -1,338 +0,0 @@
|
|||
package sbt.inc
|
||||
|
||||
import java.io.File
|
||||
|
||||
/**
|
||||
* Represents all configuration options for the incremental compiler itself and
|
||||
* not the underlying Java/Scala compiler.
|
||||
*
|
||||
* NOTE: This class used to be a case class but due to problems with retaining
|
||||
* binary compatibility while new fields are added it has been expanded to a
|
||||
* regular class. All compiler-generated methods for a case class has been
|
||||
* defined explicitly.
|
||||
*/
|
||||
final class IncOptions(
|
||||
/** After which step include whole transitive closure of invalidated source files. */
|
||||
val transitiveStep: Int,
|
||||
/**
|
||||
* What's the fraction of invalidated source files when we switch to recompiling
|
||||
* all files and giving up incremental compilation altogether. That's useful in
|
||||
* cases when probability that we end up recompiling most of source files but
|
||||
* in multiple steps is high. Multi-step incremental recompilation is slower
|
||||
* than recompiling everything in one step.
|
||||
*/
|
||||
val recompileAllFraction: Double,
|
||||
/** Print very detailed information about relations, such as dependencies between source files. */
|
||||
val relationsDebug: Boolean,
|
||||
/**
|
||||
* Enable tools for debugging API changes. At the moment this option is unused but in the
|
||||
* future it will enable for example:
|
||||
* - disabling API hashing and API minimization (potentially very memory consuming)
|
||||
* - diffing textual API representation which helps understanding what kind of changes
|
||||
* to APIs are visible to the incremental compiler
|
||||
*/
|
||||
val apiDebug: Boolean,
|
||||
/**
|
||||
* Controls context size (in lines) displayed when diffs are produced for textual API
|
||||
* representation.
|
||||
*
|
||||
* This option is used only when `apiDebug == true`.
|
||||
*/
|
||||
val apiDiffContextSize: Int,
|
||||
/**
|
||||
* The directory where we dump textual representation of APIs. This method might be called
|
||||
* only if apiDebug returns true. This is unused option at the moment as the needed functionality
|
||||
* is not implemented yet.
|
||||
*/
|
||||
val apiDumpDirectory: Option[java.io.File],
|
||||
/** Creates a new ClassfileManager that will handle class file deletion and addition during a single incremental compilation run. */
|
||||
val newClassfileManager: () => ClassfileManager,
|
||||
/**
|
||||
* Determines whether incremental compiler should recompile all dependencies of a file
|
||||
* that contains a macro definition.
|
||||
*/
|
||||
val recompileOnMacroDef: Boolean,
|
||||
/**
|
||||
* Determines whether incremental compiler uses the new algorithm known as name hashing.
|
||||
*
|
||||
* This flag is disabled by default so incremental compiler's behavior is the same as in sbt 0.13.0.
|
||||
*
|
||||
* IMPLEMENTATION NOTE:
|
||||
* Enabling this flag enables a few additional functionalities that are needed by the name hashing algorithm:
|
||||
*
|
||||
* 1. New dependency source tracking is used. See `sbt.inc.Relations` for details.
|
||||
* 2. Used names extraction and tracking is enabled. See `sbt.inc.Relations` for details as well.
|
||||
* 3. Hashing of public names is enabled. See `sbt.inc.AnalysisCallback` for details.
|
||||
*
|
||||
*/
|
||||
val nameHashing: Boolean,
|
||||
/**
|
||||
* THE `antStyle` OPTION IS UNSUPPORTED, MAY GO AWAY AT ANY POINT.
|
||||
*
|
||||
* Enables "ant-style" mode of incremental compilation. This mode emulates what Ant's scalac command does.
|
||||
* The idea is to recompile just changed source files and not perform any invalidation of dependencies. This
|
||||
* is a very naive mode of incremental compilation that very often leads to broken binaries.
|
||||
*
|
||||
* The Ant-style mode has been introduced because Scala team needs it for migration of Scala compiler to sbt.
|
||||
* The name hashing algorithm doesn't work well with Scala compiler sources due to deep inheritance chains.
|
||||
* There's a plan to refactor compiler's code to use more composition instead of inheritance.
|
||||
*
|
||||
* Once Scala compiler sources are refactored to work well with name hashing algorithm this option will be
|
||||
* deleted immediately.
|
||||
*/
|
||||
val antStyle: Boolean) extends Product with Serializable {
|
||||
|
||||
/**
|
||||
* Secondary constructor introduced to make IncOptions to be binary compatible with version that didn't have
|
||||
* `recompileOnMacroDef` and `nameHashing` fields defined.
|
||||
*/
|
||||
def this(transitiveStep: Int, recompileAllFraction: Double, relationsDebug: Boolean, apiDebug: Boolean,
|
||||
apiDiffContextSize: Int, apiDumpDirectory: Option[java.io.File], newClassfileManager: () => ClassfileManager) = {
|
||||
this(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, IncOptions.recompileOnMacroDefDefault, IncOptions.nameHashingDefault,
|
||||
IncOptions.antStyleDefault)
|
||||
}
|
||||
|
||||
assert(!(antStyle && nameHashing), "Name hashing and Ant-style cannot be enabled at the same time.")
|
||||
|
||||
def withTransitiveStep(transitiveStep: Int): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing, antStyle)
|
||||
}
|
||||
|
||||
def withRecompileAllFraction(recompileAllFraction: Double): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing, antStyle)
|
||||
}
|
||||
|
||||
def withRelationsDebug(relationsDebug: Boolean): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing, antStyle)
|
||||
}
|
||||
|
||||
def withApiDebug(apiDebug: Boolean): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing, antStyle)
|
||||
}
|
||||
|
||||
def withApiDiffContextSize(apiDiffContextSize: Int): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing, antStyle)
|
||||
}
|
||||
|
||||
def withApiDumpDirectory(apiDumpDirectory: Option[File]): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing, antStyle)
|
||||
}
|
||||
|
||||
def withNewClassfileManager(newClassfileManager: () => ClassfileManager): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing, antStyle)
|
||||
}
|
||||
|
||||
def withRecompileOnMacroDef(recompileOnMacroDef: Boolean): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing, antStyle)
|
||||
}
|
||||
|
||||
def withNameHashing(nameHashing: Boolean): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing, antStyle)
|
||||
}
|
||||
|
||||
def withAntStyle(antStyle: Boolean): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing, antStyle)
|
||||
}
|
||||
|
||||
//- EXPANDED CASE CLASS METHOD BEGIN -//
|
||||
@deprecated("Use `with$nameOfTheField` copying methods instead.", "0.13.2")
|
||||
def copy(transitiveStep: Int = this.transitiveStep, recompileAllFraction: Double = this.recompileAllFraction,
|
||||
relationsDebug: Boolean = this.relationsDebug, apiDebug: Boolean = this.apiDebug,
|
||||
apiDiffContextSize: Int = this.apiDiffContextSize,
|
||||
apiDumpDirectory: Option[java.io.File] = this.apiDumpDirectory,
|
||||
newClassfileManager: () => ClassfileManager = this.newClassfileManager): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing, antStyle)
|
||||
}
|
||||
|
||||
@deprecated("Methods generated for case class will be removed in the future.", "0.13.2")
|
||||
override def productPrefix: String = "IncOptions"
|
||||
|
||||
@deprecated("Methods generated for case class will be removed in the future.", "0.13.2")
|
||||
def productArity: Int = 10
|
||||
|
||||
@deprecated("Methods generated for case class will be removed in the future.", "0.13.2")
|
||||
def productElement(x$1: Int): Any = x$1 match {
|
||||
case 0 => IncOptions.this.transitiveStep
|
||||
case 1 => IncOptions.this.recompileAllFraction
|
||||
case 2 => IncOptions.this.relationsDebug
|
||||
case 3 => IncOptions.this.apiDebug
|
||||
case 4 => IncOptions.this.apiDiffContextSize
|
||||
case 5 => IncOptions.this.apiDumpDirectory
|
||||
case 6 => IncOptions.this.newClassfileManager
|
||||
case 7 => IncOptions.this.recompileOnMacroDef
|
||||
case 8 => IncOptions.this.nameHashing
|
||||
case 9 => IncOptions.this.antStyle
|
||||
case _ => throw new IndexOutOfBoundsException(x$1.toString())
|
||||
}
|
||||
|
||||
@deprecated("Methods generated for case class will be removed in the future.", "0.13.2")
|
||||
override def productIterator: Iterator[Any] = scala.runtime.ScalaRunTime.typedProductIterator[Any](IncOptions.this)
|
||||
|
||||
@deprecated("Methods generated for case class will be removed in the future.", "0.13.2")
|
||||
def canEqual(x$1: Any): Boolean = x$1.isInstanceOf[IncOptions]
|
||||
|
||||
override def hashCode(): Int = {
|
||||
import scala.runtime.Statics
|
||||
var acc: Int = -889275714
|
||||
acc = Statics.mix(acc, transitiveStep)
|
||||
acc = Statics.mix(acc, Statics.doubleHash(recompileAllFraction))
|
||||
acc = Statics.mix(acc, if (relationsDebug) 1231 else 1237)
|
||||
acc = Statics.mix(acc, if (apiDebug) 1231 else 1237)
|
||||
acc = Statics.mix(acc, apiDiffContextSize)
|
||||
acc = Statics.mix(acc, Statics.anyHash(apiDumpDirectory))
|
||||
acc = Statics.mix(acc, Statics.anyHash(newClassfileManager))
|
||||
acc = Statics.mix(acc, if (recompileOnMacroDef) 1231 else 1237)
|
||||
acc = Statics.mix(acc, if (nameHashing) 1231 else 1237)
|
||||
acc = Statics.mix(acc, if (antStyle) 1231 else 1237)
|
||||
Statics.finalizeHash(acc, 9)
|
||||
}
|
||||
|
||||
override def toString(): String = scala.runtime.ScalaRunTime._toString(IncOptions.this)
|
||||
|
||||
override def equals(x$1: Any): Boolean = {
|
||||
this.eq(x$1.asInstanceOf[Object]) || (x$1.isInstanceOf[IncOptions] && ({
|
||||
val IncOptions$1: IncOptions = x$1.asInstanceOf[IncOptions]
|
||||
transitiveStep == IncOptions$1.transitiveStep && recompileAllFraction == IncOptions$1.recompileAllFraction &&
|
||||
relationsDebug == IncOptions$1.relationsDebug && apiDebug == IncOptions$1.apiDebug &&
|
||||
apiDiffContextSize == IncOptions$1.apiDiffContextSize && apiDumpDirectory == IncOptions$1.apiDumpDirectory &&
|
||||
newClassfileManager == IncOptions$1.newClassfileManager &&
|
||||
recompileOnMacroDef == IncOptions$1.recompileOnMacroDef && nameHashing == IncOptions$1.nameHashing &&
|
||||
antStyle == IncOptions$1.antStyle
|
||||
}))
|
||||
}
|
||||
//- EXPANDED CASE CLASS METHOD END -//
|
||||
}
|
||||
|
||||
object IncOptions extends Serializable {
|
||||
private val recompileOnMacroDefDefault: Boolean = true
|
||||
private[sbt] val nameHashingDefault: Boolean = true
|
||||
private val antStyleDefault: Boolean = false
|
||||
val Default = IncOptions(
|
||||
// 1. recompile changed sources
|
||||
// 2(3). recompile direct dependencies and transitive public inheritance dependencies of sources with API changes in 1(2).
|
||||
// 4. further changes invalidate all dependencies transitively to avoid too many steps
|
||||
transitiveStep = 3,
|
||||
recompileAllFraction = 0.5,
|
||||
relationsDebug = false,
|
||||
apiDebug = false,
|
||||
apiDiffContextSize = 5,
|
||||
apiDumpDirectory = None,
|
||||
newClassfileManager = ClassfileManager.deleteImmediately,
|
||||
recompileOnMacroDef = recompileOnMacroDefDefault,
|
||||
nameHashing = nameHashingDefault
|
||||
)
|
||||
//- EXPANDED CASE CLASS METHOD BEGIN -//
|
||||
final override def toString(): String = "IncOptions"
|
||||
@deprecated("Use overloaded variant of `apply` with complete list of arguments instead.", "0.13.2")
|
||||
def apply(transitiveStep: Int, recompileAllFraction: Double, relationsDebug: Boolean, apiDebug: Boolean,
|
||||
apiDiffContextSize: Int, apiDumpDirectory: Option[java.io.File],
|
||||
newClassfileManager: () => ClassfileManager): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager)
|
||||
}
|
||||
def apply(transitiveStep: Int, recompileAllFraction: Double, relationsDebug: Boolean, apiDebug: Boolean,
|
||||
apiDiffContextSize: Int, apiDumpDirectory: Option[java.io.File],
|
||||
newClassfileManager: () => ClassfileManager, recompileOnMacroDef: Boolean,
|
||||
nameHashing: Boolean): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing, antStyleDefault)
|
||||
}
|
||||
@deprecated("Methods generated for case class will be removed in the future.", "0.13.2")
|
||||
def unapply(x$0: IncOptions): Option[(Int, Double, Boolean, Boolean, Int, Option[java.io.File], () => AnyRef)] = {
|
||||
if (x$0 == null) None
|
||||
else Some.apply[(Int, Double, Boolean, Boolean, Int, Option[java.io.File], () => AnyRef)](
|
||||
Tuple7.apply[Int, Double, Boolean, Boolean, Int, Option[java.io.File], () => AnyRef](
|
||||
x$0.transitiveStep, x$0.recompileAllFraction, x$0.relationsDebug, x$0.apiDebug, x$0.apiDiffContextSize,
|
||||
x$0.apiDumpDirectory, x$0.newClassfileManager))
|
||||
}
|
||||
private def readResolve(): Object = IncOptions
|
||||
//- EXPANDED CASE CLASS METHOD END -//
|
||||
|
||||
@deprecated("Use IncOptions.Default.withNewClassfileManager(ClassfileManager.transactional(tempDir)), instead.", "0.13.5")
|
||||
def defaultTransactional(tempDir: File): IncOptions =
|
||||
setTransactional(Default, tempDir)
|
||||
@deprecated("Use opts.withNewClassfileManager(ClassfileManager.transactional(tempDir)), instead.", "0.13.5")
|
||||
def setTransactional(opts: IncOptions, tempDir: File): IncOptions =
|
||||
opts.withNewClassfileManager(ClassfileManager.transactional(tempDir, sbt.Logger.Null))
|
||||
|
||||
private val transitiveStepKey = "transitiveStep"
|
||||
private val recompileAllFractionKey = "recompileAllFraction"
|
||||
private val relationsDebugKey = "relationsDebug"
|
||||
private val apiDebugKey = "apiDebug"
|
||||
private val apiDumpDirectoryKey = "apiDumpDirectory"
|
||||
private val apiDiffContextSizeKey = "apiDiffContextSize"
|
||||
private val recompileOnMacroDefKey = "recompileOnMacroDef"
|
||||
private val nameHashingKey = "nameHashing"
|
||||
private val antStyleKey = "antStyle"
|
||||
|
||||
def fromStringMap(m: java.util.Map[String, String]): IncOptions = {
|
||||
// all the code below doesn't look like idiomatic Scala for a good reason: we are working with Java API
|
||||
def getTransitiveStep: Int = {
|
||||
val k = transitiveStepKey
|
||||
if (m.containsKey(k)) m.get(k).toInt else Default.transitiveStep
|
||||
}
|
||||
def getRecompileAllFraction: Double = {
|
||||
val k = recompileAllFractionKey
|
||||
if (m.containsKey(k)) m.get(k).toDouble else Default.recompileAllFraction
|
||||
}
|
||||
def getRelationsDebug: Boolean = {
|
||||
val k = relationsDebugKey
|
||||
if (m.containsKey(k)) m.get(k).toBoolean else Default.relationsDebug
|
||||
}
|
||||
def getApiDebug: Boolean = {
|
||||
val k = apiDebugKey
|
||||
if (m.containsKey(k)) m.get(k).toBoolean else Default.apiDebug
|
||||
}
|
||||
def getApiDiffContextSize: Int = {
|
||||
val k = apiDiffContextSizeKey
|
||||
if (m.containsKey(k)) m.get(k).toInt else Default.apiDiffContextSize
|
||||
}
|
||||
def getApiDumpDirectory: Option[java.io.File] = {
|
||||
val k = apiDumpDirectoryKey
|
||||
if (m.containsKey(k))
|
||||
Some(new java.io.File(m.get(k)))
|
||||
else None
|
||||
}
|
||||
def getRecompileOnMacroDef: Boolean = {
|
||||
val k = recompileOnMacroDefKey
|
||||
if (m.containsKey(k)) m.get(k).toBoolean else Default.recompileOnMacroDef
|
||||
}
|
||||
def getNameHashing: Boolean = {
|
||||
val k = nameHashingKey
|
||||
if (m.containsKey(k)) m.get(k).toBoolean else Default.nameHashing
|
||||
}
|
||||
|
||||
def getAntStyle: Boolean = {
|
||||
val k = antStyleKey
|
||||
if (m.containsKey(k)) m.get(k).toBoolean else Default.antStyle
|
||||
}
|
||||
|
||||
new IncOptions(getTransitiveStep, getRecompileAllFraction, getRelationsDebug, getApiDebug, getApiDiffContextSize,
|
||||
getApiDumpDirectory, ClassfileManager.deleteImmediately, getRecompileOnMacroDef, getNameHashing, getAntStyle)
|
||||
}
|
||||
|
||||
def toStringMap(o: IncOptions): java.util.Map[String, String] = {
|
||||
val m = new java.util.HashMap[String, String]
|
||||
m.put(transitiveStepKey, o.transitiveStep.toString)
|
||||
m.put(recompileAllFractionKey, o.recompileAllFraction.toString)
|
||||
m.put(relationsDebugKey, o.relationsDebug.toString)
|
||||
m.put(apiDebugKey, o.apiDebug.toString)
|
||||
o.apiDumpDirectory.foreach(f => m.put(apiDumpDirectoryKey, f.toString))
|
||||
m.put(apiDiffContextSizeKey, o.apiDiffContextSize.toString)
|
||||
m.put(recompileOnMacroDefKey, o.recompileOnMacroDef.toString)
|
||||
m.put(nameHashingKey, o.nameHashing.toString)
|
||||
m
|
||||
}
|
||||
}
|
||||
|
|
@ -1,98 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
package inc
|
||||
|
||||
import xsbt.api.{ NameChanges, SameAPI, TopLevel }
|
||||
import annotation.tailrec
|
||||
import xsbti.api.{ Compilation, Source }
|
||||
import xsbti.compile.DependencyChanges
|
||||
import java.io.File
|
||||
|
||||
/**
|
||||
* Helper class to run incremental compilation algorithm.
|
||||
*
|
||||
*
|
||||
* This class delegates down to
|
||||
* - IncrementalNameHashing
|
||||
* - IncrementalDefault
|
||||
* - IncrementalAnyStyle
|
||||
*/
|
||||
object Incremental {
|
||||
/**
|
||||
* Runs the incremental compiler algorithm.
|
||||
*
|
||||
* @param sources The sources to compile
|
||||
* @param entry The means of looking up a class on the classpath.
|
||||
* @param previous The previously detected source dependencies.
|
||||
* @param current A mechanism for generating stamps (timestamps, hashes, etc).
|
||||
* @param doCompile The function which can run one level of compile.
|
||||
* @param log The log where we write debugging information
|
||||
* @param options Incremental compilation options
|
||||
* @param equivS The means of testing whether two "Stamps" are the same.
|
||||
* @return
|
||||
* A flag of whether or not compilation completed succesfully, and the resulting dependency analysis object.
|
||||
*/
|
||||
def compile(sources: Set[File],
|
||||
entry: String => Option[File],
|
||||
previous: Analysis,
|
||||
current: ReadStamps,
|
||||
forEntry: File => Option[Analysis],
|
||||
doCompile: (Set[File], DependencyChanges) => Analysis,
|
||||
log: Logger,
|
||||
options: IncOptions)(implicit equivS: Equiv[Stamp]): (Boolean, Analysis) =
|
||||
{
|
||||
val incremental: IncrementalCommon =
|
||||
if (options.nameHashing)
|
||||
new IncrementalNameHashing(log, options)
|
||||
else if (options.antStyle)
|
||||
new IncrementalAntStyle(log, options)
|
||||
else
|
||||
new IncrementalDefaultImpl(log, options)
|
||||
val initialChanges = incremental.changedInitial(entry, sources, previous, current, forEntry)
|
||||
val binaryChanges = new DependencyChanges {
|
||||
val modifiedBinaries = initialChanges.binaryDeps.toArray
|
||||
val modifiedClasses = initialChanges.external.allModified.toArray
|
||||
def isEmpty = modifiedBinaries.isEmpty && modifiedClasses.isEmpty
|
||||
}
|
||||
val initialInv = incremental.invalidateInitial(previous.relations, initialChanges)
|
||||
log.debug("All initially invalidated sources: " + initialInv + "\n")
|
||||
val analysis = manageClassfiles(options) { classfileManager =>
|
||||
incremental.cycle(initialInv, sources, binaryChanges, previous, doCompile, classfileManager, 1)
|
||||
}
|
||||
(initialInv.nonEmpty, analysis)
|
||||
}
|
||||
|
||||
// the name of system property that was meant to enable debugging mode of incremental compiler but
|
||||
// it ended up being used just to enable debugging of relations. That's why if you migrate to new
|
||||
// API for configuring incremental compiler (IncOptions) it's enough to control value of `relationsDebug`
|
||||
// flag to achieve the same effect as using `incDebugProp`.
|
||||
@deprecated("Use `IncOptions.relationsDebug` flag to enable debugging of relations.", "0.13.2")
|
||||
val incDebugProp = "xsbt.inc.debug"
|
||||
|
||||
private[inc] val apiDebugProp = "xsbt.api.debug"
|
||||
private[inc] def apiDebug(options: IncOptions): Boolean = options.apiDebug || java.lang.Boolean.getBoolean(apiDebugProp)
|
||||
|
||||
private[sbt] def prune(invalidatedSrcs: Set[File], previous: Analysis): Analysis =
|
||||
prune(invalidatedSrcs, previous, ClassfileManager.deleteImmediately())
|
||||
|
||||
private[sbt] def prune(invalidatedSrcs: Set[File], previous: Analysis, classfileManager: ClassfileManager): Analysis =
|
||||
{
|
||||
classfileManager.delete(invalidatedSrcs.flatMap(previous.relations.products))
|
||||
previous -- invalidatedSrcs
|
||||
}
|
||||
|
||||
private[this] def manageClassfiles[T](options: IncOptions)(run: ClassfileManager => T): T =
|
||||
{
|
||||
val classfileManager = options.newClassfileManager()
|
||||
val result = try run(classfileManager) catch {
|
||||
case e: Exception =>
|
||||
classfileManager.complete(success = false)
|
||||
throw e
|
||||
}
|
||||
classfileManager.complete(success = true)
|
||||
result
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
package sbt
|
||||
package inc
|
||||
|
||||
import java.io.File
|
||||
import xsbti.api.Source
|
||||
|
||||
private final class IncrementalAntStyle(log: Logger, options: IncOptions) extends IncrementalCommon(log, options) {
|
||||
|
||||
/** Ant-style mode doesn't do anything special with package objects */
|
||||
override protected def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File] = Set.empty
|
||||
|
||||
/** In Ant-style mode we don't need to compare APIs because we don't perform any invalidation */
|
||||
override protected def sameAPI[T](src: T, a: Source, b: Source): Option[APIChange[T]] = None
|
||||
|
||||
/** In Ant-style mode we don't perform any invalidation */
|
||||
override protected def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File] = Set.empty
|
||||
|
||||
/** In Ant-style mode we don't perform any invalidation */
|
||||
override protected def invalidateSource(relations: Relations, change: APIChange[File]): Set[File] = Set.empty
|
||||
|
||||
/** In Ant-style mode we don't need to perform any dependency analysis hence we can always return an empty set. */
|
||||
override protected def allDeps(relations: Relations): File => Set[File] = _ => Set.empty
|
||||
|
||||
}
|
||||
|
|
@ -1,338 +0,0 @@
|
|||
package sbt
|
||||
package inc
|
||||
|
||||
import scala.annotation.tailrec
|
||||
import xsbti.compile.DependencyChanges
|
||||
import xsbti.api.{ Compilation, Source }
|
||||
import java.io.File
|
||||
|
||||
private[inc] abstract class IncrementalCommon(log: Logger, options: IncOptions) {
|
||||
|
||||
private def incDebug(options: IncOptions): Boolean = options.relationsDebug || java.lang.Boolean.getBoolean(Incremental.incDebugProp)
|
||||
|
||||
// setting the related system property to true will skip checking that the class name
|
||||
// still comes from the same classpath entry. This can workaround bugs in classpath construction,
|
||||
// such as the currently problematic -javabootclasspath. This is subject to removal at any time.
|
||||
private[this] def skipClasspathLookup = java.lang.Boolean.getBoolean("xsbt.skip.cp.lookup")
|
||||
|
||||
// TODO: the Analysis for the last successful compilation should get returned + Boolean indicating success
|
||||
// TODO: full external name changes, scopeInvalidations
|
||||
@tailrec final def cycle(invalidatedRaw: Set[File], allSources: Set[File], binaryChanges: DependencyChanges, previous: Analysis,
|
||||
doCompile: (Set[File], DependencyChanges) => Analysis, classfileManager: ClassfileManager, cycleNum: Int): Analysis =
|
||||
if (invalidatedRaw.isEmpty)
|
||||
previous
|
||||
else {
|
||||
def debug(s: => String) = if (incDebug(options)) log.debug(s) else ()
|
||||
val withPackageObjects = invalidatedRaw ++ invalidatedPackageObjects(invalidatedRaw, previous.relations)
|
||||
val invalidated = expand(withPackageObjects, allSources)
|
||||
val pruned = Incremental.prune(invalidated, previous, classfileManager)
|
||||
debug("********* Pruned: \n" + pruned.relations + "\n*********")
|
||||
|
||||
val fresh = doCompile(invalidated, binaryChanges)
|
||||
classfileManager.generated(fresh.relations.allProducts)
|
||||
debug("********* Fresh: \n" + fresh.relations + "\n*********")
|
||||
val merged = pruned ++ fresh //.copy(relations = pruned.relations ++ fresh.relations, apis = pruned.apis ++ fresh.apis)
|
||||
debug("********* Merged: \n" + merged.relations + "\n*********")
|
||||
|
||||
val incChanges = changedIncremental(invalidated, previous.apis.internalAPI _, merged.apis.internalAPI _)
|
||||
debug("\nChanges:\n" + incChanges)
|
||||
val transitiveStep = options.transitiveStep
|
||||
val incInv = invalidateIncremental(merged.relations, merged.apis, incChanges, invalidated, cycleNum >= transitiveStep)
|
||||
cycle(incInv, allSources, emptyChanges, merged, doCompile, classfileManager, cycleNum + 1)
|
||||
}
|
||||
private[this] def emptyChanges: DependencyChanges = new DependencyChanges {
|
||||
val modifiedBinaries = new Array[File](0)
|
||||
val modifiedClasses = new Array[String](0)
|
||||
def isEmpty = true
|
||||
}
|
||||
private[this] def expand(invalidated: Set[File], all: Set[File]): Set[File] = {
|
||||
val recompileAllFraction = options.recompileAllFraction
|
||||
if (invalidated.size > all.size * recompileAllFraction) {
|
||||
log.debug("Recompiling all " + all.size + " sources: invalidated sources (" + invalidated.size + ") exceeded " + (recompileAllFraction * 100.0) + "% of all sources")
|
||||
all ++ invalidated // need the union because all doesn't contain removed sources
|
||||
} else invalidated
|
||||
}
|
||||
|
||||
protected def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File]
|
||||
|
||||
/**
|
||||
* Logs API changes using debug-level logging. The API are obtained using the APIDiff class.
|
||||
*
|
||||
* NOTE: This method creates a new APIDiff instance on every invocation.
|
||||
*/
|
||||
private def logApiChanges[T](apiChanges: Iterable[APIChange[T]], oldAPIMapping: T => Source,
|
||||
newAPIMapping: T => Source): Unit = {
|
||||
val contextSize = options.apiDiffContextSize
|
||||
try {
|
||||
val apiDiff = new APIDiff
|
||||
apiChanges foreach {
|
||||
case APIChangeDueToMacroDefinition(src) =>
|
||||
log.debug(s"Public API is considered to be changed because $src contains a macro definition.")
|
||||
case apiChange @ (_: SourceAPIChange[T] | _: NamesChange[T]) =>
|
||||
val src = apiChange.modified
|
||||
val oldApi = oldAPIMapping(src)
|
||||
val newApi = newAPIMapping(src)
|
||||
val apiUnifiedPatch = apiDiff.generateApiDiff(src.toString, oldApi.api, newApi.api, contextSize)
|
||||
log.debug(s"Detected a change in a public API (${src.toString}):\n"
|
||||
+ apiUnifiedPatch)
|
||||
}
|
||||
} catch {
|
||||
case e: ClassNotFoundException =>
|
||||
log.error("You have api debugging enabled but DiffUtils library cannot be found on sbt's classpath")
|
||||
case e: LinkageError =>
|
||||
log.error("Encoutared linkage error while trying to load DiffUtils library.")
|
||||
log.trace(e)
|
||||
case e: Exception =>
|
||||
log.error("An exception has been thrown while trying to dump an api diff.")
|
||||
log.trace(e)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Accepts the sources that were recompiled during the last step and functions
|
||||
* providing the API before and after the last step. The functions should return
|
||||
* an empty API if the file did not/does not exist.
|
||||
*/
|
||||
def changedIncremental[T](lastSources: collection.Set[T], oldAPI: T => Source, newAPI: T => Source): APIChanges[T] =
|
||||
{
|
||||
val oldApis = lastSources.toSeq map oldAPI
|
||||
val newApis = lastSources.toSeq map newAPI
|
||||
val apiChanges = (lastSources, oldApis, newApis).zipped.flatMap { (src, oldApi, newApi) => sameSource(src, oldApi, newApi) }
|
||||
|
||||
if (Incremental.apiDebug(options) && apiChanges.nonEmpty) {
|
||||
logApiChanges(apiChanges, oldAPI, newAPI)
|
||||
}
|
||||
|
||||
new APIChanges(apiChanges)
|
||||
}
|
||||
def sameSource[T](src: T, a: Source, b: Source): Option[APIChange[T]] = {
|
||||
// Clients of a modified source file (ie, one that doesn't satisfy `shortcutSameSource`) containing macros must be recompiled.
|
||||
val hasMacro = a.hasMacro || b.hasMacro
|
||||
if (shortcutSameSource(a, b)) {
|
||||
None
|
||||
} else {
|
||||
if (hasMacro && options.recompileOnMacroDef) {
|
||||
Some(APIChangeDueToMacroDefinition(src))
|
||||
} else sameAPI(src, a, b)
|
||||
}
|
||||
}
|
||||
|
||||
protected def sameAPI[T](src: T, a: Source, b: Source): Option[APIChange[T]]
|
||||
|
||||
def shortcutSameSource(a: Source, b: Source): Boolean = a.hash.nonEmpty && b.hash.nonEmpty && sameCompilation(a.compilation, b.compilation) && (a.hash.deep equals b.hash.deep)
|
||||
def sameCompilation(a: Compilation, b: Compilation): Boolean = a.startTime == b.startTime && a.outputs.corresponds(b.outputs) {
|
||||
case (co1, co2) => co1.sourceDirectory == co2.sourceDirectory && co1.outputDirectory == co2.outputDirectory
|
||||
}
|
||||
|
||||
def changedInitial(entry: String => Option[File], sources: Set[File], previousAnalysis: Analysis, current: ReadStamps,
|
||||
forEntry: File => Option[Analysis])(implicit equivS: Equiv[Stamp]): InitialChanges =
|
||||
{
|
||||
val previous = previousAnalysis.stamps
|
||||
val previousAPIs = previousAnalysis.apis
|
||||
|
||||
val srcChanges = changes(previous.allInternalSources.toSet, sources, f => !equivS.equiv(previous.internalSource(f), current.internalSource(f)))
|
||||
val removedProducts = previous.allProducts.filter(p => !equivS.equiv(previous.product(p), current.product(p))).toSet
|
||||
val binaryDepChanges = previous.allBinaries.filter(externalBinaryModified(entry, forEntry, previous, current)).toSet
|
||||
val extChanges = changedIncremental(previousAPIs.allExternals, previousAPIs.externalAPI _, currentExternalAPI(entry, forEntry))
|
||||
|
||||
InitialChanges(srcChanges, removedProducts, binaryDepChanges, extChanges)
|
||||
}
|
||||
|
||||
def changes(previous: Set[File], current: Set[File], existingModified: File => Boolean): Changes[File] =
|
||||
new Changes[File] {
|
||||
private val inBoth = previous & current
|
||||
val removed = previous -- inBoth
|
||||
val added = current -- inBoth
|
||||
val (changed, unmodified) = inBoth.partition(existingModified)
|
||||
}
|
||||
|
||||
def invalidateIncremental(previous: Relations, apis: APIs, changes: APIChanges[File], recompiledSources: Set[File], transitive: Boolean): Set[File] =
|
||||
{
|
||||
val dependsOnSrc = previous.usesInternalSrc _
|
||||
val propagated =
|
||||
if (transitive)
|
||||
transitiveDependencies(dependsOnSrc, changes.allModified.toSet)
|
||||
else
|
||||
invalidateIntermediate(previous, changes)
|
||||
|
||||
val dups = invalidateDuplicates(previous)
|
||||
if (dups.nonEmpty)
|
||||
log.debug("Invalidated due to generated class file collision: " + dups)
|
||||
|
||||
val inv = propagated ++ dups // ++ scopeInvalidations(previous.extAPI _, changes.modified, changes.names)
|
||||
val newlyInvalidated = inv -- recompiledSources
|
||||
log.debug("All newly invalidated sources after taking into account (previously) recompiled sources:" + newlyInvalidated)
|
||||
if (newlyInvalidated.isEmpty) Set.empty else inv
|
||||
}
|
||||
|
||||
/** Invalidate all sources that claim to produce the same class file as another source file. */
|
||||
def invalidateDuplicates(merged: Relations): Set[File] =
|
||||
merged.srcProd.reverseMap.flatMap {
|
||||
case (classFile, sources) =>
|
||||
if (sources.size > 1) sources else Nil
|
||||
} toSet;
|
||||
|
||||
/**
|
||||
* Returns the transitive source dependencies of `initial`.
|
||||
* Because the intermediate steps do not pull in cycles, this result includes the initial files
|
||||
* if they are part of a cycle containing newly invalidated files .
|
||||
*/
|
||||
def transitiveDependencies(dependsOnSrc: File => Set[File], initial: Set[File]): Set[File] =
|
||||
{
|
||||
val transitiveWithInitial = transitiveDeps(initial)(dependsOnSrc)
|
||||
val transitivePartial = includeInitialCond(initial, transitiveWithInitial, dependsOnSrc)
|
||||
log.debug("Final step, transitive dependencies:\n\t" + transitivePartial)
|
||||
transitivePartial
|
||||
}
|
||||
|
||||
/** Invalidates sources based on initially detected 'changes' to the sources, products, and dependencies.*/
|
||||
def invalidateInitial(previous: Relations, changes: InitialChanges): Set[File] =
|
||||
{
|
||||
val srcChanges = changes.internalSrc
|
||||
val srcDirect = srcChanges.removed ++ srcChanges.removed.flatMap(previous.usesInternalSrc) ++ srcChanges.added ++ srcChanges.changed
|
||||
val byProduct = changes.removedProducts.flatMap(previous.produced)
|
||||
val byBinaryDep = changes.binaryDeps.flatMap(previous.usesBinary)
|
||||
val byExtSrcDep = invalidateByAllExternal(previous, changes.external) //changes.external.modified.flatMap(previous.usesExternal) // ++ scopeInvalidations
|
||||
checkAbsolute(srcChanges.added.toList)
|
||||
log.debug(
|
||||
"\nInitial source changes: \n\tremoved:" + srcChanges.removed + "\n\tadded: " + srcChanges.added + "\n\tmodified: " + srcChanges.changed +
|
||||
"\nInvalidated products: " + changes.removedProducts +
|
||||
"\nExternal API changes: " + changes.external +
|
||||
"\nModified binary dependencies: " + changes.binaryDeps +
|
||||
"\nInitial directly invalidated sources: " + srcDirect +
|
||||
"\n\nSources indirectly invalidated by:" +
|
||||
"\n\tproduct: " + byProduct +
|
||||
"\n\tbinary dep: " + byBinaryDep +
|
||||
"\n\texternal source: " + byExtSrcDep
|
||||
)
|
||||
|
||||
srcDirect ++ byProduct ++ byBinaryDep ++ byExtSrcDep
|
||||
}
|
||||
private[this] def checkAbsolute(addedSources: List[File]): Unit =
|
||||
if (addedSources.nonEmpty) {
|
||||
addedSources.filterNot(_.isAbsolute) match {
|
||||
case first :: more =>
|
||||
val fileStrings = more match {
|
||||
case Nil => first.toString
|
||||
case x :: Nil => s"$first and $x"
|
||||
case _ => s"$first and ${more.size} others"
|
||||
}
|
||||
sys.error(s"The incremental compiler requires absolute sources, but some were relative: $fileStrings")
|
||||
case Nil =>
|
||||
}
|
||||
}
|
||||
|
||||
def invalidateByAllExternal(relations: Relations, externalAPIChanges: APIChanges[String]): Set[File] = {
|
||||
(externalAPIChanges.apiChanges.flatMap { externalAPIChange =>
|
||||
invalidateByExternal(relations, externalAPIChange)
|
||||
}).toSet
|
||||
}
|
||||
|
||||
/** Sources invalidated by `external` sources in other projects according to the previous `relations`. */
|
||||
protected def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File]
|
||||
|
||||
/** Intermediate invalidation step: steps after the initial invalidation, but before the final transitive invalidation. */
|
||||
def invalidateIntermediate(relations: Relations, changes: APIChanges[File]): Set[File] =
|
||||
{
|
||||
invalidateSources(relations, changes)
|
||||
}
|
||||
/**
|
||||
* Invalidates inheritance dependencies, transitively. Then, invalidates direct dependencies. Finally, excludes initial dependencies not
|
||||
* included in a cycle with newly invalidated sources.
|
||||
*/
|
||||
private[this] def invalidateSources(relations: Relations, changes: APIChanges[File]): Set[File] =
|
||||
{
|
||||
val initial = changes.allModified.toSet
|
||||
val all = (changes.apiChanges flatMap { change =>
|
||||
invalidateSource(relations, change)
|
||||
}).toSet
|
||||
includeInitialCond(initial, all, allDeps(relations))
|
||||
}
|
||||
|
||||
protected def allDeps(relations: Relations): File => Set[File]
|
||||
|
||||
protected def invalidateSource(relations: Relations, change: APIChange[File]): Set[File]
|
||||
|
||||
/**
|
||||
* Conditionally include initial sources that are dependencies of newly invalidated sources.
|
||||
* * Initial sources included in this step can be because of a cycle, but not always.
|
||||
*/
|
||||
private[this] def includeInitialCond(initial: Set[File], currentInvalidations: Set[File], allDeps: File => Set[File]): Set[File] =
|
||||
{
|
||||
val newInv = currentInvalidations -- initial
|
||||
log.debug("New invalidations:\n\t" + newInv)
|
||||
val transitiveOfNew = transitiveDeps(newInv)(allDeps)
|
||||
val initialDependsOnNew = transitiveOfNew & initial
|
||||
log.debug("Previously invalidated, but (transitively) depend on new invalidations:\n\t" + initialDependsOnNew)
|
||||
newInv ++ initialDependsOnNew
|
||||
}
|
||||
|
||||
def externalBinaryModified(entry: String => Option[File], analysis: File => Option[Analysis], previous: Stamps, current: ReadStamps)(implicit equivS: Equiv[Stamp]): File => Boolean =
|
||||
dependsOn =>
|
||||
{
|
||||
def inv(reason: String): Boolean = {
|
||||
log.debug("Invalidating " + dependsOn + ": " + reason)
|
||||
true
|
||||
}
|
||||
def entryModified(className: String, classpathEntry: File): Boolean =
|
||||
{
|
||||
val resolved = Locate.resolve(classpathEntry, className)
|
||||
if (resolved.getCanonicalPath != dependsOn.getCanonicalPath)
|
||||
inv("class " + className + " now provided by " + resolved.getCanonicalPath)
|
||||
else
|
||||
fileModified(dependsOn, resolved)
|
||||
}
|
||||
def fileModified(previousFile: File, currentFile: File): Boolean =
|
||||
{
|
||||
val previousStamp = previous.binary(previousFile)
|
||||
val currentStamp = current.binary(currentFile)
|
||||
if (equivS.equiv(previousStamp, currentStamp))
|
||||
false
|
||||
else
|
||||
inv("stamp changed from " + previousStamp + " to " + currentStamp)
|
||||
}
|
||||
def dependencyModified(file: File): Boolean =
|
||||
previous.className(file) match {
|
||||
case None => inv("no class name was mapped for it.")
|
||||
case Some(name) => entry(name) match {
|
||||
case None => inv("could not find class " + name + " on the classpath.")
|
||||
case Some(e) => entryModified(name, e)
|
||||
}
|
||||
}
|
||||
|
||||
analysis(dependsOn).isEmpty &&
|
||||
(if (skipClasspathLookup) fileModified(dependsOn, dependsOn) else dependencyModified(dependsOn))
|
||||
|
||||
}
|
||||
|
||||
def currentExternalAPI(entry: String => Option[File], forEntry: File => Option[Analysis]): String => Source =
|
||||
className =>
|
||||
orEmpty(
|
||||
for {
|
||||
e <- entry(className)
|
||||
analysis <- forEntry(e)
|
||||
src <- analysis.relations.definesClass(className).headOption
|
||||
} yield analysis.apis.internalAPI(src)
|
||||
)
|
||||
|
||||
def orEmpty(o: Option[Source]): Source = o getOrElse APIs.emptySource
|
||||
def orTrue(o: Option[Boolean]): Boolean = o getOrElse true
|
||||
|
||||
protected def transitiveDeps[T](nodes: Iterable[T])(dependencies: T => Iterable[T]): Set[T] =
|
||||
{
|
||||
val xs = new collection.mutable.HashSet[T]
|
||||
def all(from: T, tos: Iterable[T]): Unit = tos.foreach(to => visit(from, to))
|
||||
def visit(from: T, to: T): Unit =
|
||||
if (!xs.contains(to)) {
|
||||
log.debug(s"Including $to by $from")
|
||||
xs += to
|
||||
all(to, dependencies(to))
|
||||
}
|
||||
log.debug("Initial set of included nodes: " + nodes)
|
||||
nodes foreach { start =>
|
||||
xs += start
|
||||
all(start, dependencies(start))
|
||||
}
|
||||
xs.toSet
|
||||
}
|
||||
}
|
||||
|
|
@ -1,56 +0,0 @@
|
|||
package sbt
|
||||
package inc
|
||||
|
||||
import xsbti.api.Source
|
||||
import xsbt.api.SameAPI
|
||||
import java.io.File
|
||||
|
||||
private final class IncrementalDefaultImpl(log: Logger, options: IncOptions) extends IncrementalCommon(log, options) {
|
||||
|
||||
// Package objects are fragile: if they inherit from an invalidated source, get "class file needed by package is missing" error
|
||||
// This might be too conservative: we probably only need package objects for packages of invalidated sources.
|
||||
override protected def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File] =
|
||||
invalidated flatMap relations.publicInherited.internal.reverse filter { _.getName == "package.scala" }
|
||||
|
||||
override protected def sameAPI[T](src: T, a: Source, b: Source): Option[SourceAPIChange[T]] = {
|
||||
if (SameAPI(a, b))
|
||||
None
|
||||
else {
|
||||
val sourceApiChange = SourceAPIChange(src)
|
||||
Some(sourceApiChange)
|
||||
}
|
||||
}
|
||||
|
||||
/** Invalidates sources based on initially detected 'changes' to the sources, products, and dependencies.*/
|
||||
override protected def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File] = {
|
||||
val modified = externalAPIChange.modified
|
||||
// Propagate public inheritance dependencies transitively.
|
||||
// This differs from normal because we need the initial crossing from externals to sources in this project.
|
||||
val externalInheritedR = relations.publicInherited.external
|
||||
val byExternalInherited = externalInheritedR.reverse(modified)
|
||||
val internalInheritedR = relations.publicInherited.internal
|
||||
val transitiveInherited = transitiveDeps(byExternalInherited)(internalInheritedR.reverse _)
|
||||
|
||||
// Get the direct dependencies of all sources transitively invalidated by inheritance
|
||||
val directA = transitiveInherited flatMap relations.direct.internal.reverse
|
||||
// Get the sources that directly depend on externals. This includes non-inheritance dependencies and is not transitive.
|
||||
val directB = relations.direct.external.reverse(modified)
|
||||
transitiveInherited ++ directA ++ directB
|
||||
}
|
||||
|
||||
override protected def invalidateSource(relations: Relations, change: APIChange[File]): Set[File] = {
|
||||
def reverse(r: Relations.Source) = r.internal.reverse _
|
||||
val directDeps: File => Set[File] = reverse(relations.direct)
|
||||
val publicInherited: File => Set[File] = reverse(relations.publicInherited)
|
||||
log.debug("Invalidating by inheritance (transitively)...")
|
||||
val transitiveInherited = transitiveDeps(Set(change.modified))(publicInherited)
|
||||
log.debug("Invalidated by transitive public inheritance: " + transitiveInherited)
|
||||
val direct = transitiveInherited flatMap directDeps
|
||||
log.debug("Invalidated by direct dependency: " + direct)
|
||||
transitiveInherited ++ direct
|
||||
}
|
||||
|
||||
override protected def allDeps(relations: Relations): File => Set[File] =
|
||||
f => relations.direct.internal.reverse(f)
|
||||
|
||||
}
|
||||
|
|
@ -1,86 +0,0 @@
|
|||
package sbt
|
||||
package inc
|
||||
|
||||
import xsbti.api.Source
|
||||
import xsbt.api.SameAPI
|
||||
import java.io.File
|
||||
|
||||
/**
|
||||
* Implementation of incremental algorithm known as "name hashing". It differs from the default implementation
|
||||
* by applying pruning (filter) of member reference dependencies based on used and modified simple names.
|
||||
*
|
||||
* See MemberReferenceInvalidationStrategy for some more information.
|
||||
*/
|
||||
private final class IncrementalNameHashing(log: Logger, options: IncOptions) extends IncrementalCommon(log, options) {
|
||||
|
||||
private val memberRefInvalidator = new MemberRefInvalidator(log)
|
||||
|
||||
// Package objects are fragile: if they inherit from an invalidated source, get "class file needed by package is missing" error
|
||||
// This might be too conservative: we probably only need package objects for packages of invalidated sources.
|
||||
override protected def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File] =
|
||||
invalidated flatMap relations.inheritance.internal.reverse filter { _.getName == "package.scala" }
|
||||
|
||||
override protected def sameAPI[T](src: T, a: Source, b: Source): Option[APIChange[T]] = {
|
||||
if (SameAPI(a, b))
|
||||
None
|
||||
else {
|
||||
val aNameHashes = a._internalOnly_nameHashes
|
||||
val bNameHashes = b._internalOnly_nameHashes
|
||||
val modifiedNames = ModifiedNames.compareTwoNameHashes(aNameHashes, bNameHashes)
|
||||
val apiChange = NamesChange(src, modifiedNames)
|
||||
Some(apiChange)
|
||||
}
|
||||
}
|
||||
|
||||
/** Invalidates sources based on initially detected 'changes' to the sources, products, and dependencies.*/
|
||||
override protected def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File] = {
|
||||
val modified = externalAPIChange.modified
|
||||
val invalidationReason = memberRefInvalidator.invalidationReason(externalAPIChange)
|
||||
log.debug(s"$invalidationReason\nAll member reference dependencies will be considered within this context.")
|
||||
// Propagate inheritance dependencies transitively.
|
||||
// This differs from normal because we need the initial crossing from externals to sources in this project.
|
||||
val externalInheritanceR = relations.inheritance.external
|
||||
val byExternalInheritance = externalInheritanceR.reverse(modified)
|
||||
log.debug(s"Files invalidated by inheriting from (external) $modified: $byExternalInheritance; now invalidating by inheritance (internally).")
|
||||
val transitiveInheritance = byExternalInheritance flatMap { file =>
|
||||
invalidateByInheritance(relations, file)
|
||||
}
|
||||
val memberRefInvalidationInternal = memberRefInvalidator.get(relations.memberRef.internal,
|
||||
relations.names, externalAPIChange)
|
||||
val memberRefInvalidationExternal = memberRefInvalidator.get(relations.memberRef.external,
|
||||
relations.names, externalAPIChange)
|
||||
|
||||
// Get the member reference dependencies of all sources transitively invalidated by inheritance
|
||||
log.debug("Getting direct dependencies of all sources transitively invalidated by inheritance.")
|
||||
val memberRefA = transitiveInheritance flatMap memberRefInvalidationInternal
|
||||
// Get the sources that depend on externals by member reference.
|
||||
// This includes non-inheritance dependencies and is not transitive.
|
||||
log.debug(s"Getting sources that directly depend on (external) $modified.")
|
||||
val memberRefB = memberRefInvalidationExternal(modified)
|
||||
transitiveInheritance ++ memberRefA ++ memberRefB
|
||||
}
|
||||
|
||||
private def invalidateByInheritance(relations: Relations, modified: File): Set[File] = {
|
||||
val inheritanceDeps = relations.inheritance.internal.reverse _
|
||||
log.debug(s"Invalidating (transitively) by inheritance from $modified...")
|
||||
val transitiveInheritance = transitiveDeps(Set(modified))(inheritanceDeps)
|
||||
log.debug("Invalidated by transitive inheritance dependency: " + transitiveInheritance)
|
||||
transitiveInheritance
|
||||
}
|
||||
|
||||
override protected def invalidateSource(relations: Relations, change: APIChange[File]): Set[File] = {
|
||||
log.debug(s"Invalidating ${change.modified}...")
|
||||
val transitiveInheritance = invalidateByInheritance(relations, change.modified)
|
||||
val reasonForInvalidation = memberRefInvalidator.invalidationReason(change)
|
||||
log.debug(s"$reasonForInvalidation\nAll member reference dependencies will be considered within this context.")
|
||||
val memberRefInvalidation = memberRefInvalidator.get(relations.memberRef.internal,
|
||||
relations.names, change)
|
||||
val memberRef = transitiveInheritance flatMap memberRefInvalidation
|
||||
val all = transitiveInheritance ++ memberRef
|
||||
all
|
||||
}
|
||||
|
||||
override protected def allDeps(relations: Relations): File => Set[File] =
|
||||
f => relations.memberRef.internal.reverse(f)
|
||||
|
||||
}
|
||||
|
|
@ -1,95 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
package inc
|
||||
|
||||
import java.io.File
|
||||
import java.util.zip.{ ZipException, ZipFile }
|
||||
import Function.const
|
||||
|
||||
object Locate {
|
||||
type DefinesClass = File => String => Boolean
|
||||
|
||||
/**
|
||||
* Right(src) provides the value for the found class
|
||||
* Left(true) means that the class was found, but it had no associated value
|
||||
* Left(false) means that the class was not found
|
||||
*/
|
||||
def value[S](classpath: Seq[File], get: File => String => Option[S]): String => Either[Boolean, S] =
|
||||
{
|
||||
val gets = classpath.toStream.map(getValue(get))
|
||||
className => find(className, gets)
|
||||
}
|
||||
|
||||
def find[S](name: String, gets: Stream[String => Either[Boolean, S]]): Either[Boolean, S] =
|
||||
if (gets.isEmpty)
|
||||
Left(false)
|
||||
else
|
||||
gets.head(name) match {
|
||||
case Left(false) => find(name, gets.tail)
|
||||
case x => x
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a function that searches the provided class path for
|
||||
* a class name and returns the entry that defines that class.
|
||||
*/
|
||||
def entry(classpath: Seq[File], f: DefinesClass): String => Option[File] =
|
||||
{
|
||||
val entries = classpath.toStream.map { entry => (entry, f(entry)) }
|
||||
className => entries collect { case (entry, defines) if defines(className) => entry } headOption;
|
||||
}
|
||||
def resolve(f: File, className: String): File = if (f.isDirectory) classFile(f, className) else f
|
||||
|
||||
def getValue[S](get: File => String => Option[S])(entry: File): String => Either[Boolean, S] =
|
||||
{
|
||||
val defClass = definesClass(entry)
|
||||
val getF = get(entry)
|
||||
className => if (defClass(className)) getF(className).toRight(true) else Left(false)
|
||||
}
|
||||
|
||||
def definesClass(entry: File): String => Boolean =
|
||||
if (entry.isDirectory)
|
||||
directoryDefinesClass(entry)
|
||||
else if (entry.exists && classpath.ClasspathUtilities.isArchive(entry, contentFallback = true))
|
||||
jarDefinesClass(entry)
|
||||
else
|
||||
const(false)
|
||||
|
||||
def jarDefinesClass(entry: File): String => Boolean =
|
||||
{
|
||||
import collection.JavaConversions._
|
||||
val jar = try { new ZipFile(entry, ZipFile.OPEN_READ) } catch {
|
||||
// ZipException doesn't include the file name :(
|
||||
case e: ZipException => throw new RuntimeException("Error opening zip file: " + entry.getName, e)
|
||||
}
|
||||
val entries = try { jar.entries.map(e => toClassName(e.getName)).toSet } finally { jar.close() }
|
||||
entries.contains _
|
||||
}
|
||||
|
||||
def toClassName(entry: String): String =
|
||||
entry.stripSuffix(ClassExt).replace('/', '.')
|
||||
|
||||
val ClassExt = ".class"
|
||||
|
||||
def directoryDefinesClass(entry: File): String => Boolean =
|
||||
className => classFile(entry, className).isFile
|
||||
|
||||
def classFile(baseDir: File, className: String): File =
|
||||
{
|
||||
val (pkg, name) = components(className)
|
||||
val dir = subDirectory(baseDir, pkg)
|
||||
new File(dir, name + ClassExt)
|
||||
}
|
||||
|
||||
def subDirectory(base: File, parts: Seq[String]): File =
|
||||
(base /: parts)((b, p) => new File(b, p))
|
||||
|
||||
def components(className: String): (Seq[String], String) =
|
||||
{
|
||||
assume(!className.isEmpty)
|
||||
val parts = className.split("\\.")
|
||||
if (parts.length == 1) (Nil, parts(0)) else (parts.init, parts.last)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,123 +0,0 @@
|
|||
package sbt.inc
|
||||
|
||||
import sbt.Relation
|
||||
import java.io.File
|
||||
import sbt.Logger
|
||||
import xsbt.api.APIUtil
|
||||
|
||||
/**
|
||||
* Implements various strategies for invalidating dependencies introduced by member reference.
|
||||
*
|
||||
* The strategy is represented as function T => Set[File] where T is a source file that other
|
||||
* source files depend on. When you apply that function to given element `src` you get set of
|
||||
* files that depend on `src` by member reference and should be invalidated due to api change
|
||||
* that was passed to a method constructing that function. There are two questions that arise:
|
||||
*
|
||||
* 1. Why is signature T => Set[File] and not T => Set[T] or File => Set[File]?
|
||||
* 2. Why would we apply that function to any other `src` that then one that got modified
|
||||
* and the modification is described by APIChange?
|
||||
*
|
||||
* Let's address the second question with the following example of source code structure:
|
||||
*
|
||||
* // A.scala
|
||||
* class A
|
||||
*
|
||||
* // B.scala
|
||||
* class B extends A
|
||||
*
|
||||
* // C.scala
|
||||
* class C { def foo(a: A) = ??? }
|
||||
*
|
||||
* // D.scala
|
||||
* class D { def bar(b: B) = ??? }
|
||||
*
|
||||
* Member reference dependencies on A.scala are B.scala, C.scala. When the api of A changes
|
||||
* then we would consider B and C for invalidation. However, B is also a dependency by inheritance
|
||||
* so we always invalidate it. The api change to A is relevant when B is considered (because
|
||||
* of how inheritance works) so we would invalidate B by inheritance and then we would like to
|
||||
* invalidate member reference dependencies of B as well. In other words, we have a function
|
||||
* because we want to apply it (with the same api change in mind) to all src files invalidated
|
||||
* by inheritance of the originally modified file.
|
||||
*
|
||||
* The first question is a bit more straightforward to answer. We always invalidate internal
|
||||
* source files (in given project) that are represented as File but they might depend either on
|
||||
* internal source files (then T=File) or they can depend on external class name (then T=String).
|
||||
*
|
||||
* The specific invalidation strategy is determined based on APIChange that describes a change to api
|
||||
* of a single source file.
|
||||
*
|
||||
* For example, if we get APIChangeDueToMacroDefinition then we invalidate all member reference
|
||||
* dependencies unconditionally. On the other hand, if api change is due to modified name hashes
|
||||
* of regular members then we'll invalidate sources that use those names.
|
||||
*/
|
||||
private[inc] class MemberRefInvalidator(log: Logger) {
|
||||
def get[T](memberRef: Relation[File, T], usedNames: Relation[File, String], apiChange: APIChange[_]): T => Set[File] = apiChange match {
|
||||
case _: APIChangeDueToMacroDefinition[_] =>
|
||||
new InvalidateUnconditionally(memberRef)
|
||||
case NamesChange(_, modifiedNames) if modifiedNames.implicitNames.nonEmpty =>
|
||||
new InvalidateUnconditionally(memberRef)
|
||||
case NamesChange(modifiedSrcFile, modifiedNames) =>
|
||||
new NameHashFilteredInvalidator[T](usedNames, memberRef, modifiedNames.regularNames)
|
||||
case _: SourceAPIChange[_] =>
|
||||
sys.error(wrongAPIChangeMsg)
|
||||
}
|
||||
|
||||
def invalidationReason(apiChange: APIChange[_]): String = apiChange match {
|
||||
case APIChangeDueToMacroDefinition(modifiedSrcFile) =>
|
||||
s"The $modifiedSrcFile source file declares a macro."
|
||||
case NamesChange(modifiedSrcFile, modifiedNames) if modifiedNames.implicitNames.nonEmpty =>
|
||||
s"""|The $modifiedSrcFile source file has the following implicit definitions changed:
|
||||
|\t${modifiedNames.implicitNames.mkString(", ")}.""".stripMargin
|
||||
case NamesChange(modifiedSrcFile, modifiedNames) =>
|
||||
s"""|The $modifiedSrcFile source file has the following regular definitions changed:
|
||||
|\t${modifiedNames.regularNames.mkString(", ")}.""".stripMargin
|
||||
case _: SourceAPIChange[_] =>
|
||||
sys.error(wrongAPIChangeMsg)
|
||||
}
|
||||
|
||||
private val wrongAPIChangeMsg =
|
||||
"MemberReferenceInvalidator.get should be called when name hashing is enabled " +
|
||||
"and in that case we shouldn't have SourceAPIChange as an api change."
|
||||
|
||||
private class InvalidateUnconditionally[T](memberRef: Relation[File, T]) extends (T => Set[File]) {
|
||||
def apply(from: T): Set[File] = {
|
||||
val invalidated = memberRef.reverse(from)
|
||||
if (invalidated.nonEmpty)
|
||||
log.debug(s"The following member ref dependencies of $from are invalidated:\n" +
|
||||
formatInvalidated(invalidated))
|
||||
invalidated
|
||||
}
|
||||
private def formatInvalidated(invalidated: Set[File]): String = {
|
||||
val sortedFiles = invalidated.toSeq.sortBy(_.getAbsolutePath)
|
||||
sortedFiles.map(file => "\t" + file).mkString("\n")
|
||||
}
|
||||
}
|
||||
|
||||
private class NameHashFilteredInvalidator[T](
|
||||
usedNames: Relation[File, String],
|
||||
memberRef: Relation[File, T],
|
||||
modifiedNames: Set[String]) extends (T => Set[File]) {
|
||||
|
||||
def apply(to: T): Set[File] = {
|
||||
val dependent = memberRef.reverse(to)
|
||||
filteredDependencies(dependent)
|
||||
}
|
||||
private def filteredDependencies(dependent: Set[File]): Set[File] = {
|
||||
dependent.filter {
|
||||
case from if APIUtil.isScalaSourceName(from.getName) =>
|
||||
val usedNamesInDependent = usedNames.forward(from)
|
||||
val modifiedAndUsedNames = modifiedNames intersect usedNamesInDependent
|
||||
if (modifiedAndUsedNames.isEmpty) {
|
||||
log.debug(s"None of the modified names appears in $from. This dependency is not being considered for invalidation.")
|
||||
false
|
||||
} else {
|
||||
log.debug(s"The following modified names cause invalidation of $from: $modifiedAndUsedNames")
|
||||
true
|
||||
}
|
||||
case from =>
|
||||
log.debug(s"Name hashing optimization doesn't apply to non-Scala dependency: $from")
|
||||
true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,752 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
package inc
|
||||
|
||||
import java.io.File
|
||||
import Relations.Source
|
||||
import Relations.SourceDependencies
|
||||
import xsbti.api.{ Source => APISource }
|
||||
import xsbti.DependencyContext
|
||||
import xsbti.DependencyContext._
|
||||
|
||||
/**
|
||||
* Provides mappings between source files, generated classes (products), and binaries.
|
||||
* Dependencies that are tracked include internal: a dependency on a source in the same compilation group (project),
|
||||
* external: a dependency on a source in another compilation group (tracked as the name of the class),
|
||||
* binary: a dependency on a class or jar file not generated by a source file in any tracked compilation group,
|
||||
* inherited: a dependency that resulted from a public template inheriting,
|
||||
* direct: any type of dependency, including inheritance.
|
||||
*/
|
||||
trait Relations {
|
||||
/** All sources _with at least one product_ . */
|
||||
def allSources: collection.Set[File]
|
||||
|
||||
/** All products associated with sources. */
|
||||
def allProducts: collection.Set[File]
|
||||
|
||||
/** All files that are recorded as a binary dependency of a source file.*/
|
||||
def allBinaryDeps: collection.Set[File]
|
||||
|
||||
/** All files in this compilation group (project) that are recorded as a source dependency of a source file in this group.*/
|
||||
def allInternalSrcDeps: collection.Set[File]
|
||||
|
||||
/** All files in another compilation group (project) that are recorded as a source dependency of a source file in this group.*/
|
||||
def allExternalDeps: collection.Set[String]
|
||||
|
||||
/** Fully qualified names of classes generated from source file `src`. */
|
||||
def classNames(src: File): Set[String]
|
||||
|
||||
/** Source files that generated a class with the given fully qualified `name`. This is typically a set containing a single file. */
|
||||
def definesClass(name: String): Set[File]
|
||||
|
||||
/** The classes that were generated for source file `src`. */
|
||||
def products(src: File): Set[File]
|
||||
/** The source files that generated class file `prod`. This is typically a set containing a single file. */
|
||||
def produced(prod: File): Set[File]
|
||||
|
||||
/** The binary dependencies for the source file `src`. */
|
||||
def binaryDeps(src: File): Set[File]
|
||||
/** The source files that depend on binary file `dep`. */
|
||||
def usesBinary(dep: File): Set[File]
|
||||
|
||||
/** Internal source dependencies for `src`. This includes both direct and inherited dependencies. */
|
||||
def internalSrcDeps(src: File): Set[File]
|
||||
/** Internal source files that depend on internal source `dep`. This includes both direct and inherited dependencies. */
|
||||
def usesInternalSrc(dep: File): Set[File]
|
||||
|
||||
/** External source dependencies that internal source file `src` depends on. This includes both direct and inherited dependencies. */
|
||||
def externalDeps(src: File): Set[String]
|
||||
/** Internal source dependencies that depend on external source file `dep`. This includes both direct and inherited dependencies. */
|
||||
def usesExternal(dep: String): Set[File]
|
||||
|
||||
private[inc] def usedNames(src: File): Set[String]
|
||||
|
||||
/** Records internal source file `src` as generating class file `prod` with top-level class `name`. */
|
||||
@deprecated("Record all products using `addProducts`.", "0.13.8")
|
||||
def addProduct(src: File, prod: File, name: String): Relations
|
||||
|
||||
/**
|
||||
* Records internal source file `src` as dependending on `dependsOn`. If this dependency is introduced
|
||||
* by an inheritance relation, `inherited` is set to true. Note that in this case, the dependency is
|
||||
* also registered as a direct dependency.
|
||||
*/
|
||||
@deprecated("Record all external dependencies using `addExternalDeps`.", "0.13.8")
|
||||
def addExternalDep(src: File, dependsOn: String, inherited: Boolean): Relations
|
||||
|
||||
/** Records internal source file `src` depending on a dependency binary dependency `dependsOn`.*/
|
||||
@deprecated("Record all binary dependencies using `addBinaryDeps`.", "0.13.8")
|
||||
def addBinaryDep(src: File, dependsOn: File): Relations
|
||||
|
||||
/**
|
||||
* Records internal source file `src` as having direct dependencies on internal source files `directDependsOn`
|
||||
* and inheritance dependencies on `inheritedDependsOn`. Everything in `inheritedDependsOn` must be included in `directDependsOn`;
|
||||
* this method does not automatically record direct dependencies like `addExternalDep` does.
|
||||
*/
|
||||
@deprecated("Record all internal dependencies using `addInternalSrcDeps(File, Iterable[InternalDependencies])`.", "0.13.8")
|
||||
def addInternalSrcDeps(src: File, directDependsOn: Iterable[File], inheritedDependsOn: Iterable[File]): Relations
|
||||
|
||||
/**
|
||||
* Records that the file `src` generates products `products`, has internal dependencies `internalDeps`,
|
||||
* has external dependencies `externalDeps` and binary dependencies `binaryDeps`.
|
||||
*/
|
||||
def addSource(src: File,
|
||||
products: Iterable[(File, String)],
|
||||
internalDeps: Iterable[InternalDependency],
|
||||
externalDeps: Iterable[ExternalDependency],
|
||||
binaryDeps: Iterable[(File, String, Stamp)]): Relations =
|
||||
addProducts(src, products).addInternalSrcDeps(src, internalDeps).addExternalDeps(src, externalDeps).addBinaryDeps(src, binaryDeps)
|
||||
|
||||
/**
|
||||
* Records all the products `prods` generated by `src`
|
||||
*/
|
||||
private[inc] def addProducts(src: File, prods: Iterable[(File, String)]): Relations
|
||||
|
||||
/**
|
||||
* Records all the internal source dependencies `deps` of `src`
|
||||
*/
|
||||
private[inc] def addInternalSrcDeps(src: File, deps: Iterable[InternalDependency]): Relations
|
||||
|
||||
/**
|
||||
* Records all the external dependencies `deps` of `src`
|
||||
*/
|
||||
private[inc] def addExternalDeps(src: File, deps: Iterable[ExternalDependency]): Relations
|
||||
|
||||
/**
|
||||
* Records all the binary dependencies `deps` of `src`
|
||||
*/
|
||||
private[inc] def addBinaryDeps(src: File, deps: Iterable[(File, String, Stamp)]): Relations
|
||||
|
||||
private[inc] def addUsedName(src: File, name: String): Relations
|
||||
|
||||
/** Concatenates the two relations. Acts naively, i.e., doesn't internalize external deps on added files. */
|
||||
def ++(o: Relations): Relations
|
||||
|
||||
/** Drops all dependency mappings a->b where a is in `sources`. Acts naively, i.e., doesn't externalize internal deps on removed files. */
|
||||
def --(sources: Iterable[File]): Relations
|
||||
|
||||
@deprecated("OK to remove in 0.14", "0.13.1")
|
||||
def groupBy[K](f: (File => K)): Map[K, Relations]
|
||||
|
||||
/** The relation between internal sources and generated class files. */
|
||||
def srcProd: Relation[File, File]
|
||||
|
||||
/** The dependency relation between internal sources and binaries. */
|
||||
def binaryDep: Relation[File, File]
|
||||
|
||||
/** The dependency relation between internal sources. This includes both direct and inherited dependencies.*/
|
||||
def internalSrcDep: Relation[File, File]
|
||||
|
||||
/** The dependency relation between internal and external sources. This includes both direct and inherited dependencies.*/
|
||||
def externalDep: Relation[File, String]
|
||||
|
||||
/** All the internal dependencies */
|
||||
private[inc] def internalDependencies: InternalDependencies
|
||||
|
||||
/** All the external dependencies */
|
||||
private[inc] def externalDependencies: ExternalDependencies
|
||||
|
||||
/**
|
||||
* The source dependency relation between source files introduced by member reference.
|
||||
*
|
||||
* NOTE: All inheritance dependencies are included in this relation because in order to
|
||||
* inherit from a member you have to refer to it. If you check documentation of `inheritance`
|
||||
* you'll see that there's small oddity related to traits being the first parent of a
|
||||
* class/trait that results in additional parents being introduced due to normalization.
|
||||
* This relation properly accounts for that so the invariant that `memberRef` is a superset
|
||||
* of `inheritance` is preserved.
|
||||
*/
|
||||
private[inc] def memberRef: SourceDependencies
|
||||
|
||||
/**
|
||||
* The source dependency relation between source files introduced by inheritance.
|
||||
* The dependency by inheritance is introduced when a template (class or trait) mentions
|
||||
* a given type in a parent position.
|
||||
*
|
||||
* NOTE: Due to an oddity in how Scala's type checker works there's one unexpected dependency
|
||||
* on a class being introduced. An example illustrates the best the problem. Let's consider
|
||||
* the following structure:
|
||||
*
|
||||
* trait A extends B
|
||||
* trait B extends C
|
||||
* trait C extends D
|
||||
* class D
|
||||
*
|
||||
* We are interested in dependencies by inheritance of `A`. One would expect it to be just `B`
|
||||
* but the answer is `B` and `D`. The reason is because Scala's type checker performs a certain
|
||||
* normalization so the first parent of a type is a class. Therefore the example above is normalized
|
||||
* to the following form:
|
||||
*
|
||||
* trait A extends D with B
|
||||
* trait B extends D with C
|
||||
* trait C extends D
|
||||
* class D
|
||||
*
|
||||
* Therefore if you inherit from a trait you'll get an additional dependency on a class that is
|
||||
* resolved transitively. You should not rely on this behavior, though.
|
||||
*
|
||||
*/
|
||||
private[inc] def inheritance: SourceDependencies
|
||||
|
||||
/** The dependency relations between sources. These include both direct and inherited dependencies.*/
|
||||
def direct: Source
|
||||
|
||||
/** The inheritance dependency relations between sources.*/
|
||||
def publicInherited: Source
|
||||
|
||||
/** The relation between a source file and the fully qualified names of classes generated from it.*/
|
||||
def classes: Relation[File, String]
|
||||
|
||||
/**
|
||||
* Flag which indicates whether given Relations object supports operations needed by name hashing algorithm.
|
||||
*
|
||||
* At the moment the list includes the following operations:
|
||||
*
|
||||
* - memberRef: SourceDependencies
|
||||
* - inheritance: SourceDependencies
|
||||
*
|
||||
* The `memberRef` and `inheritance` implement a new style source dependency tracking. When this flag is
|
||||
* enabled access to `direct` and `publicInherited` relations is illegal and will cause runtime exception
|
||||
* being thrown. That is done as an optimization that prevents from storing two overlapping sets of
|
||||
* dependencies.
|
||||
*
|
||||
* Conversely, when `nameHashing` flag is disabled access to `memberRef` and `inheritance`
|
||||
* relations is illegal and will cause runtime exception being thrown.
|
||||
*/
|
||||
private[inc] def nameHashing: Boolean
|
||||
/**
|
||||
* Relation between source files and _unqualified_ term and type names used in given source file.
|
||||
*/
|
||||
private[inc] def names: Relation[File, String]
|
||||
|
||||
/**
|
||||
* Lists of all the pairs (header, relation) that sbt knows of.
|
||||
* Used by TextAnalysisFormat to persist relations.
|
||||
* This cannot be stored as a Map because the order is important.
|
||||
*/
|
||||
private[inc] def allRelations: List[(String, Relation[File, _])]
|
||||
}
|
||||
|
||||
object Relations {
|
||||
|
||||
/**
|
||||
* Represents all the relations that sbt knows of along with a way to recreate each
|
||||
* of their elements from their string representation.
|
||||
*/
|
||||
private[inc] val existingRelations = {
|
||||
val string2File: String => File = new File(_)
|
||||
List(
|
||||
("products", string2File),
|
||||
("binary dependencies", string2File),
|
||||
("direct source dependencies", string2File),
|
||||
("direct external dependencies", identity[String] _),
|
||||
("public inherited source dependencies", string2File),
|
||||
("public inherited external dependencies", identity[String] _),
|
||||
("member reference internal dependencies", string2File),
|
||||
("member reference external dependencies", identity[String] _),
|
||||
("inheritance internal dependencies", string2File),
|
||||
("inheritance external dependencies", identity[String] _),
|
||||
("class names", identity[String] _),
|
||||
("used names", identity[String] _))
|
||||
}
|
||||
/**
|
||||
* Reconstructs a Relations from a list of Relation
|
||||
* The order in which the relations are read matters and is defined by `existingRelations`.
|
||||
*/
|
||||
def construct(nameHashing: Boolean, relations: List[Relation[_, _]]) =
|
||||
relations match {
|
||||
case p :: bin :: di :: de :: pii :: pie :: mri :: mre :: ii :: ie :: cn :: un :: Nil =>
|
||||
val srcProd = p.asInstanceOf[Relation[File, File]]
|
||||
val binaryDep = bin.asInstanceOf[Relation[File, File]]
|
||||
val directSrcDeps = makeSource(di.asInstanceOf[Relation[File, File]], de.asInstanceOf[Relation[File, String]])
|
||||
val publicInheritedSrcDeps = makeSource(pii.asInstanceOf[Relation[File, File]], pie.asInstanceOf[Relation[File, String]])
|
||||
val memberRefSrcDeps = makeSourceDependencies(mri.asInstanceOf[Relation[File, File]], mre.asInstanceOf[Relation[File, String]])
|
||||
val inheritanceSrcDeps = makeSourceDependencies(ii.asInstanceOf[Relation[File, File]], ie.asInstanceOf[Relation[File, String]])
|
||||
val classes = cn.asInstanceOf[Relation[File, String]]
|
||||
val names = un.asInstanceOf[Relation[File, String]]
|
||||
|
||||
// we don't check for emptiness of publicInherited/inheritance relations because
|
||||
// we assume that invariant that says they are subsets of direct/memberRef holds
|
||||
assert(nameHashing || (memberRefSrcDeps == emptySourceDependencies), "When name hashing is disabled the `memberRef` relation should be empty.")
|
||||
assert(!nameHashing || (directSrcDeps == emptySource), "When name hashing is enabled the `direct` relation should be empty.")
|
||||
|
||||
if (nameHashing) {
|
||||
val internal = InternalDependencies(Map(DependencyByMemberRef -> mri.asInstanceOf[Relation[File, File]], DependencyByInheritance -> ii.asInstanceOf[Relation[File, File]]))
|
||||
val external = ExternalDependencies(Map(DependencyByMemberRef -> mre.asInstanceOf[Relation[File, String]], DependencyByInheritance -> ie.asInstanceOf[Relation[File, String]]))
|
||||
Relations.make(srcProd, binaryDep, internal, external, classes, names)
|
||||
} else {
|
||||
assert(names.all.isEmpty, s"When `nameHashing` is disabled `names` relation should be empty: $names")
|
||||
Relations.make(srcProd, binaryDep, directSrcDeps, publicInheritedSrcDeps, classes)
|
||||
}
|
||||
case _ => throw new java.io.IOException(s"Expected to read ${existingRelations.length} relations but read ${relations.length}.")
|
||||
}
|
||||
|
||||
/** Tracks internal and external source dependencies for a specific dependency type, such as direct or inherited.*/
|
||||
final class Source private[sbt] (val internal: Relation[File, File], val external: Relation[File, String]) {
|
||||
def addInternal(source: File, dependsOn: Iterable[File]): Source = new Source(internal + (source, dependsOn), external)
|
||||
@deprecated("Use addExternal(File, Iterable[String])", "0.13.8")
|
||||
def addExternal(source: File, dependsOn: String): Source = new Source(internal, external + (source, dependsOn))
|
||||
def addExternal(source: File, dependsOn: Iterable[String]): Source = new Source(internal, external + (source, dependsOn))
|
||||
/** Drops all dependency mappings from `sources`. Acts naively, i.e., doesn't externalize internal deps on removed files.*/
|
||||
def --(sources: Iterable[File]): Source = new Source(internal -- sources, external -- sources)
|
||||
def ++(o: Source): Source = new Source(internal ++ o.internal, external ++ o.external)
|
||||
|
||||
@deprecated("Broken implementation. OK to remove in 0.14", "0.13.1")
|
||||
def groupBySource[K](f: File => K): Map[K, Source] = {
|
||||
|
||||
val i = internal.groupBy { case (a, b) => f(a) }
|
||||
val e = external.groupBy { case (a, b) => f(a) }
|
||||
val pairs = for (k <- i.keySet ++ e.keySet) yield (k, new Source(getOrEmpty(i, k), getOrEmpty(e, k)))
|
||||
pairs.toMap
|
||||
}
|
||||
|
||||
override def equals(other: Any) = other match {
|
||||
case o: Source => internal == o.internal && external == o.external
|
||||
case _ => false
|
||||
}
|
||||
|
||||
override def hashCode = (internal, external).hashCode
|
||||
}
|
||||
|
||||
/** Tracks internal and external source dependencies for a specific dependency type, such as direct or inherited.*/
|
||||
private[inc] final class SourceDependencies(val internal: Relation[File, File], val external: Relation[File, String]) {
|
||||
def addInternal(source: File, dependsOn: Iterable[File]): SourceDependencies = new SourceDependencies(internal + (source, dependsOn), external)
|
||||
@deprecated("Use addExternal(File, Iterable[String])", "0.13.8")
|
||||
def addExternal(source: File, dependsOn: String): SourceDependencies = new SourceDependencies(internal, external + (source, dependsOn))
|
||||
def addExternal(source: File, dependsOn: Iterable[String]): SourceDependencies = new SourceDependencies(internal, external + (source, dependsOn))
|
||||
/** Drops all dependency mappings from `sources`. Acts naively, i.e., doesn't externalize internal deps on removed files.*/
|
||||
def --(sources: Iterable[File]): SourceDependencies = new SourceDependencies(internal -- sources, external -- sources)
|
||||
def ++(o: SourceDependencies): SourceDependencies = new SourceDependencies(internal ++ o.internal, external ++ o.external)
|
||||
|
||||
override def equals(other: Any) = other match {
|
||||
case o: SourceDependencies => internal == o.internal && external == o.external
|
||||
case _ => false
|
||||
}
|
||||
|
||||
override def hashCode = (internal, external).hashCode
|
||||
}
|
||||
|
||||
private[sbt] def getOrEmpty[A, B, K](m: Map[K, Relation[A, B]], k: K): Relation[A, B] = m.getOrElse(k, Relation.empty)
|
||||
|
||||
private[this] lazy val e = Relation.empty[File, File]
|
||||
private[this] lazy val estr = Relation.empty[File, String]
|
||||
private[this] lazy val es = new Source(e, estr)
|
||||
|
||||
def emptySource: Source = es
|
||||
private[inc] lazy val emptySourceDependencies: SourceDependencies = new SourceDependencies(e, estr)
|
||||
def empty: Relations = empty(nameHashing = IncOptions.nameHashingDefault)
|
||||
private[inc] def empty(nameHashing: Boolean): Relations =
|
||||
if (nameHashing)
|
||||
new MRelationsNameHashing(e, e, InternalDependencies.empty, ExternalDependencies.empty, estr, estr)
|
||||
else
|
||||
new MRelationsDefaultImpl(e, e, es, es, estr)
|
||||
|
||||
def make(srcProd: Relation[File, File], binaryDep: Relation[File, File], direct: Source, publicInherited: Source, classes: Relation[File, String]): Relations =
|
||||
new MRelationsDefaultImpl(srcProd, binaryDep, direct = direct, publicInherited = publicInherited, classes)
|
||||
|
||||
private[inc] def make(srcProd: Relation[File, File], binaryDep: Relation[File, File],
|
||||
internalDependencies: InternalDependencies, externalDependencies: ExternalDependencies,
|
||||
classes: Relation[File, String], names: Relation[File, String]): Relations =
|
||||
new MRelationsNameHashing(srcProd, binaryDep, internalDependencies = internalDependencies, externalDependencies = externalDependencies, classes, names)
|
||||
def makeSource(internal: Relation[File, File], external: Relation[File, String]): Source = new Source(internal, external)
|
||||
private[inc] def makeSourceDependencies(internal: Relation[File, File], external: Relation[File, String]): SourceDependencies = new SourceDependencies(internal, external)
|
||||
}
|
||||
|
||||
private object DependencyCollection {
|
||||
/**
|
||||
* Combine `m1` and `m2` such that the result contains all the dependencies they represent.
|
||||
* `m1` is expected to be smaller than `m2`.
|
||||
*/
|
||||
def joinMaps[T](m1: Map[DependencyContext, Relation[File, T]], m2: Map[DependencyContext, Relation[File, T]]) =
|
||||
m1.foldLeft(m2) { case (tmp, (key, values)) => tmp.updated(key, tmp.getOrElse(key, Relation.empty) ++ values) }
|
||||
}
|
||||
|
||||
private object InternalDependencies {
|
||||
/**
|
||||
* Constructs an empty `InteralDependencies`
|
||||
*/
|
||||
def empty = InternalDependencies(Map.empty)
|
||||
}
|
||||
|
||||
private case class InternalDependencies(dependencies: Map[DependencyContext, Relation[File, File]]) {
|
||||
/**
|
||||
* Adds `dep` to the dependencies
|
||||
*/
|
||||
def +(dep: InternalDependency): InternalDependencies =
|
||||
InternalDependencies(dependencies.updated(dep.context, dependencies.getOrElse(dep.context, Relation.empty) + (dep.sourceFile, dep.targetFile)))
|
||||
|
||||
/**
|
||||
* Adds all `deps` to the dependencies
|
||||
*/
|
||||
def ++(deps: Iterable[InternalDependency]): InternalDependencies = deps.foldLeft(this)(_ + _)
|
||||
def ++(deps: InternalDependencies): InternalDependencies = InternalDependencies(DependencyCollection.joinMaps(dependencies, deps.dependencies))
|
||||
|
||||
/**
|
||||
* Removes all dependencies from `sources` to another file from the dependencies
|
||||
*/
|
||||
def --(sources: Iterable[File]): InternalDependencies = InternalDependencies(dependencies.mapValues(_ -- sources).filter(_._2.size > 0))
|
||||
}
|
||||
|
||||
private object ExternalDependencies {
|
||||
/**
|
||||
* Constructs an empty `ExternalDependencies`
|
||||
*/
|
||||
def empty = ExternalDependencies(Map.empty)
|
||||
}
|
||||
|
||||
private case class ExternalDependencies(dependencies: Map[DependencyContext, Relation[File, String]]) {
|
||||
/**
|
||||
* Adds `dep` to the dependencies
|
||||
*/
|
||||
def +(dep: ExternalDependency): ExternalDependencies = ExternalDependencies(dependencies.updated(dep.context, dependencies.getOrElse(dep.context, Relation.empty) + (dep.sourceFile, dep.targetClassName)))
|
||||
|
||||
/**
|
||||
* Adds all `deps` to the dependencies
|
||||
*/
|
||||
def ++(deps: Iterable[ExternalDependency]): ExternalDependencies = deps.foldLeft(this)(_ + _)
|
||||
def ++(deps: ExternalDependencies): ExternalDependencies = ExternalDependencies(DependencyCollection.joinMaps(dependencies, deps.dependencies))
|
||||
|
||||
/**
|
||||
* Removes all dependencies from `sources` to another file from the dependencies
|
||||
*/
|
||||
def --(sources: Iterable[File]): ExternalDependencies = ExternalDependencies(dependencies.mapValues(_ -- sources).filter(_._2.size > 0))
|
||||
}
|
||||
|
||||
/**
|
||||
* An abstract class that contains common functionality inherited by two implementations of Relations trait.
|
||||
*
|
||||
* A little note why we have two different implementations of Relations trait. This is needed for the time
|
||||
* being when we are slowly migrating to the new invalidation algorithm called "name hashing" which requires
|
||||
* some subtle changes to dependency tracking. For some time we plan to keep both algorithms side-by-side
|
||||
* and have a runtime switch which allows to pick one. So we need logic for both old and new dependency
|
||||
* tracking to be available. That's exactly what two subclasses of MRelationsCommon implement. Once name
|
||||
* hashing is proven to be stable and reliable we'll phase out the old algorithm and the old dependency tracking
|
||||
* logic.
|
||||
*
|
||||
* `srcProd` is a relation between a source file and a product: (source, product).
|
||||
* Note that some source files may not have a product and will not be included in this relation.
|
||||
*
|
||||
* `binaryDeps` is a relation between a source file and a binary dependency: (source, binary dependency).
|
||||
* This only includes dependencies on classes and jars that do not have a corresponding source/API to track instead.
|
||||
* A class or jar with a corresponding source should only be tracked in one of the source dependency relations.
|
||||
*
|
||||
* `classes` is a relation between a source file and its generated fully-qualified class names.
|
||||
*/
|
||||
private abstract class MRelationsCommon(val srcProd: Relation[File, File], val binaryDep: Relation[File, File],
|
||||
val classes: Relation[File, String]) extends Relations {
|
||||
def allSources: collection.Set[File] = srcProd._1s
|
||||
|
||||
def allProducts: collection.Set[File] = srcProd._2s
|
||||
def allBinaryDeps: collection.Set[File] = binaryDep._2s
|
||||
def allInternalSrcDeps: collection.Set[File] = internalSrcDep._2s
|
||||
def allExternalDeps: collection.Set[String] = externalDep._2s
|
||||
|
||||
def classNames(src: File): Set[String] = classes.forward(src)
|
||||
def definesClass(name: String): Set[File] = classes.reverse(name)
|
||||
|
||||
def products(src: File): Set[File] = srcProd.forward(src)
|
||||
def produced(prod: File): Set[File] = srcProd.reverse(prod)
|
||||
|
||||
def binaryDeps(src: File): Set[File] = binaryDep.forward(src)
|
||||
def usesBinary(dep: File): Set[File] = binaryDep.reverse(dep)
|
||||
|
||||
def internalSrcDeps(src: File): Set[File] = internalSrcDep.forward(src)
|
||||
def usesInternalSrc(dep: File): Set[File] = internalSrcDep.reverse(dep)
|
||||
|
||||
def externalDeps(src: File): Set[String] = externalDep.forward(src)
|
||||
def usesExternal(dep: String): Set[File] = externalDep.reverse(dep)
|
||||
|
||||
def usedNames(src: File): Set[String] = names.forward(src)
|
||||
|
||||
/** Making large Relations a little readable. */
|
||||
private val userDir = sys.props("user.dir").stripSuffix("/") + "/"
|
||||
private def nocwd(s: String) = s stripPrefix userDir
|
||||
private def line_s(kv: (Any, Any)) = " " + nocwd("" + kv._1) + " -> " + nocwd("" + kv._2) + "\n"
|
||||
protected def relation_s(r: Relation[_, _]) = (
|
||||
if (r.forwardMap.isEmpty) "Relation [ ]"
|
||||
else (r.all.toSeq map line_s sorted) mkString ("Relation [\n", "", "]")
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* This class implements Relations trait with support for tracking of `direct` and `publicInherited` source
|
||||
* dependencies. Therefore this class preserves the "old" (from sbt 0.13.0) dependency tracking logic and it's
|
||||
* a default implementation.
|
||||
*
|
||||
* `direct` defines relations for dependencies between internal and external source dependencies. It includes all types of
|
||||
* dependencies, including inheritance.
|
||||
*
|
||||
* `publicInherited` defines relations for internal and external source dependencies, only including dependencies
|
||||
* introduced by inheritance.
|
||||
*
|
||||
*/
|
||||
private class MRelationsDefaultImpl(srcProd: Relation[File, File], binaryDep: Relation[File, File],
|
||||
// direct should include everything in inherited
|
||||
val direct: Source, val publicInherited: Source,
|
||||
classes: Relation[File, String]) extends MRelationsCommon(srcProd, binaryDep, classes) {
|
||||
def internalSrcDep: Relation[File, File] = direct.internal
|
||||
def externalDep: Relation[File, String] = direct.external
|
||||
|
||||
def nameHashing: Boolean = false
|
||||
|
||||
def memberRef: SourceDependencies =
|
||||
throw new UnsupportedOperationException("The `memberRef` source dependencies relation is not supported " +
|
||||
"when `nameHashing` flag is disabled.")
|
||||
def inheritance: SourceDependencies =
|
||||
throw new UnsupportedOperationException("The `memberRef` source dependencies relation is not supported " +
|
||||
"when `nameHashing` flag is disabled.")
|
||||
|
||||
def addProduct(src: File, prod: File, name: String): Relations =
|
||||
new MRelationsDefaultImpl(srcProd + (src, prod), binaryDep, direct = direct,
|
||||
publicInherited = publicInherited, classes + (src, name))
|
||||
|
||||
def addProducts(src: File, products: Iterable[(File, String)]): Relations =
|
||||
new MRelationsDefaultImpl(srcProd ++ products.map(p => (src, p._1)), binaryDep, direct = direct,
|
||||
publicInherited = publicInherited, classes ++ products.map(p => (src, p._2)))
|
||||
|
||||
def addInternalSrcDeps(src: File, deps: Iterable[InternalDependency]) = {
|
||||
val depsByInheritance = deps.collect { case InternalDependency(_, targetFile, DependencyByInheritance) => targetFile }
|
||||
|
||||
val newD = direct.addInternal(src, deps.map(_.targetFile))
|
||||
val newI = publicInherited.addInternal(src, depsByInheritance)
|
||||
|
||||
new MRelationsDefaultImpl(srcProd, binaryDep, direct = newD, publicInherited = newI, classes)
|
||||
}
|
||||
|
||||
def addInternalSrcDeps(src: File, directDependsOn: Iterable[File], inheritedDependsOn: Iterable[File]): Relations = {
|
||||
val directDeps = directDependsOn.map(d => InternalDependency(src, d, DependencyByMemberRef))
|
||||
val inheritedDeps = inheritedDependsOn.map(d => InternalDependency(src, d, DependencyByInheritance))
|
||||
addInternalSrcDeps(src, directDeps ++ inheritedDeps)
|
||||
}
|
||||
|
||||
def addExternalDeps(src: File, deps: Iterable[ExternalDependency]) = {
|
||||
val depsByInheritance = deps.collect { case ExternalDependency(_, targetClassName, _, DependencyByInheritance) => targetClassName }
|
||||
|
||||
val newD = direct.addExternal(src, deps.map(_.targetClassName))
|
||||
val newI = publicInherited.addExternal(src, depsByInheritance)
|
||||
|
||||
new MRelationsDefaultImpl(srcProd, binaryDep, direct = newD, publicInherited = newI, classes)
|
||||
}
|
||||
|
||||
def addExternalDep(src: File, dependsOn: String, inherited: Boolean): Relations = {
|
||||
val newI = if (inherited) publicInherited.addExternal(src, dependsOn :: Nil) else publicInherited
|
||||
val newD = direct.addExternal(src, dependsOn :: Nil)
|
||||
new MRelationsDefaultImpl(srcProd, binaryDep, direct = newD, publicInherited = newI, classes)
|
||||
}
|
||||
|
||||
def addBinaryDeps(src: File, deps: Iterable[(File, String, Stamp)]) =
|
||||
new MRelationsDefaultImpl(srcProd, binaryDep + (src, deps.map(_._1)), direct, publicInherited, classes)
|
||||
|
||||
def addBinaryDep(src: File, dependsOn: File): Relations =
|
||||
new MRelationsDefaultImpl(srcProd, binaryDep + (src, dependsOn), direct = direct,
|
||||
publicInherited = publicInherited, classes)
|
||||
|
||||
def names: Relation[File, String] =
|
||||
throw new UnsupportedOperationException("Tracking of used names is not supported " +
|
||||
"when `nameHashing` is disabled.")
|
||||
|
||||
def addUsedName(src: File, name: String): Relations =
|
||||
throw new UnsupportedOperationException("Tracking of used names is not supported " +
|
||||
"when `nameHashing` is disabled.")
|
||||
|
||||
override def externalDependencies: ExternalDependencies = ExternalDependencies(Map(DependencyByMemberRef -> direct.external, DependencyByInheritance -> publicInherited.external))
|
||||
override def internalDependencies: InternalDependencies = InternalDependencies(Map(DependencyByMemberRef -> direct.internal, DependencyByInheritance -> publicInherited.internal))
|
||||
|
||||
def ++(o: Relations): Relations = {
|
||||
if (nameHashing != o.nameHashing)
|
||||
throw new UnsupportedOperationException("The `++` operation is not supported for relations " +
|
||||
"with different values of `nameHashing` flag.")
|
||||
new MRelationsDefaultImpl(srcProd ++ o.srcProd, binaryDep ++ o.binaryDep, direct ++ o.direct,
|
||||
publicInherited ++ o.publicInherited, classes ++ o.classes)
|
||||
}
|
||||
def --(sources: Iterable[File]) =
|
||||
new MRelationsDefaultImpl(srcProd -- sources, binaryDep -- sources, direct = direct -- sources,
|
||||
publicInherited = publicInherited -- sources, classes -- sources)
|
||||
|
||||
@deprecated("Broken implementation. OK to remove in 0.14", "0.13.1")
|
||||
def groupBy[K](f: File => K): Map[K, Relations] =
|
||||
{
|
||||
type MapRel[T] = Map[K, Relation[File, T]]
|
||||
def outerJoin(srcProdMap: MapRel[File], binaryDepMap: MapRel[File], direct: Map[K, Source],
|
||||
inherited: Map[K, Source], classesMap: MapRel[String],
|
||||
namesMap: MapRel[String]): Map[K, Relations] =
|
||||
{
|
||||
def kRelations(k: K): Relations = {
|
||||
def get[T](m: Map[K, Relation[File, T]]) = Relations.getOrEmpty(m, k)
|
||||
def getSrc(m: Map[K, Source]): Source = m.getOrElse(k, Relations.emptySource)
|
||||
def getSrcDeps(m: Map[K, SourceDependencies]): SourceDependencies =
|
||||
m.getOrElse(k, Relations.emptySourceDependencies)
|
||||
new MRelationsDefaultImpl(get(srcProdMap), get(binaryDepMap), getSrc(direct), getSrc(inherited),
|
||||
get(classesMap))
|
||||
}
|
||||
val keys = (srcProdMap.keySet ++ binaryDepMap.keySet ++ direct.keySet ++ inherited.keySet ++ classesMap.keySet).toList
|
||||
Map(keys.map((k: K) => (k, kRelations(k))): _*)
|
||||
}
|
||||
|
||||
def f1[B](item: (File, B)): K = f(item._1)
|
||||
|
||||
outerJoin(srcProd.groupBy(f1), binaryDep.groupBy(f1), direct.groupBySource(f),
|
||||
publicInherited.groupBySource(f), classes.groupBy(f1), names.groupBy(f1))
|
||||
}
|
||||
|
||||
override def equals(other: Any) = other match {
|
||||
case o: MRelationsDefaultImpl =>
|
||||
srcProd == o.srcProd && binaryDep == o.binaryDep && direct == o.direct &&
|
||||
publicInherited == o.publicInherited && classes == o.classes
|
||||
case _ => false
|
||||
}
|
||||
|
||||
def allRelations = {
|
||||
val rels = List(
|
||||
srcProd,
|
||||
binaryDep,
|
||||
direct.internal,
|
||||
direct.external,
|
||||
publicInherited.internal,
|
||||
publicInherited.external,
|
||||
Relations.emptySourceDependencies.internal, // Default implementation doesn't provide memberRef source deps
|
||||
Relations.emptySourceDependencies.external, // Default implementation doesn't provide memberRef source deps
|
||||
Relations.emptySourceDependencies.internal, // Default implementation doesn't provide inheritance source deps
|
||||
Relations.emptySourceDependencies.external, // Default implementation doesn't provide inheritance source deps
|
||||
classes,
|
||||
Relation.empty[File, String]) // Default implementation doesn't provide used names relation
|
||||
Relations.existingRelations map (_._1) zip rels
|
||||
}
|
||||
|
||||
override def hashCode = (srcProd :: binaryDep :: direct :: publicInherited :: classes :: Nil).hashCode
|
||||
|
||||
override def toString = (
|
||||
"""
|
||||
|Relations:
|
||||
| products: %s
|
||||
| bin deps: %s
|
||||
| src deps: %s
|
||||
| ext deps: %s
|
||||
| class names: %s
|
||||
""".trim.stripMargin.format(List(srcProd, binaryDep, internalSrcDep, externalDep, classes) map relation_s: _*)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* This class implements Relations trait with support for tracking of `memberRef` and `inheritance` source
|
||||
* dependencies. Therefore this class implements the new (compared to sbt 0.13.0) dependency tracking logic
|
||||
* needed by the name hashing invalidation algorithm.
|
||||
*/
|
||||
private class MRelationsNameHashing(srcProd: Relation[File, File], binaryDep: Relation[File, File],
|
||||
val internalDependencies: InternalDependencies,
|
||||
val externalDependencies: ExternalDependencies,
|
||||
classes: Relation[File, String],
|
||||
val names: Relation[File, String]) extends MRelationsCommon(srcProd, binaryDep, classes) {
|
||||
def direct: Source =
|
||||
throw new UnsupportedOperationException("The `direct` source dependencies relation is not supported " +
|
||||
"when `nameHashing` flag is enabled.")
|
||||
def publicInherited: Source =
|
||||
throw new UnsupportedOperationException("The `publicInherited` source dependencies relation is not supported " +
|
||||
"when `nameHashing` flag is enabled.")
|
||||
|
||||
val nameHashing: Boolean = true
|
||||
|
||||
def internalSrcDep: Relation[File, File] = memberRef.internal
|
||||
def externalDep: Relation[File, String] = memberRef.external
|
||||
|
||||
def addProduct(src: File, prod: File, name: String): Relations =
|
||||
new MRelationsNameHashing(srcProd + (src, prod), binaryDep, internalDependencies = internalDependencies,
|
||||
externalDependencies = externalDependencies, classes + (src, name), names = names)
|
||||
|
||||
def addProducts(src: File, products: Iterable[(File, String)]): Relations =
|
||||
new MRelationsNameHashing(srcProd ++ products.map(p => (src, p._1)), binaryDep,
|
||||
internalDependencies = internalDependencies, externalDependencies = externalDependencies,
|
||||
classes ++ products.map(p => (src, p._2)), names = names)
|
||||
|
||||
def addInternalSrcDeps(src: File, deps: Iterable[InternalDependency]) =
|
||||
new MRelationsNameHashing(srcProd, binaryDep, internalDependencies = internalDependencies ++ deps,
|
||||
externalDependencies = externalDependencies, classes, names)
|
||||
|
||||
def addInternalSrcDeps(src: File, dependsOn: Iterable[File], inherited: Iterable[File]): Relations = {
|
||||
val memberRefDeps = dependsOn.map(InternalDependency(src, _, DependencyByMemberRef))
|
||||
val inheritedDeps = inherited.map(InternalDependency(src, _, DependencyByInheritance))
|
||||
addInternalSrcDeps(src, memberRefDeps ++ inheritedDeps)
|
||||
}
|
||||
|
||||
def addExternalDeps(src: File, deps: Iterable[ExternalDependency]) =
|
||||
new MRelationsNameHashing(srcProd, binaryDep, internalDependencies = internalDependencies,
|
||||
externalDependencies = externalDependencies ++ deps, classes, names)
|
||||
|
||||
def addExternalDep(src: File, dependsOn: String, inherited: Boolean): Relations =
|
||||
throw new UnsupportedOperationException("This method is not supported when `nameHashing` flag is enabled.")
|
||||
|
||||
def addBinaryDeps(src: File, deps: Iterable[(File, String, Stamp)]) =
|
||||
new MRelationsNameHashing(srcProd, binaryDep + (src, deps.map(_._1)), internalDependencies = internalDependencies,
|
||||
externalDependencies = externalDependencies, classes, names)
|
||||
|
||||
def addBinaryDep(src: File, dependsOn: File): Relations =
|
||||
new MRelationsNameHashing(srcProd, binaryDep + (src, dependsOn), internalDependencies = internalDependencies,
|
||||
externalDependencies = externalDependencies, classes, names = names)
|
||||
|
||||
def addUsedName(src: File, name: String): Relations =
|
||||
new MRelationsNameHashing(srcProd, binaryDep, internalDependencies = internalDependencies,
|
||||
externalDependencies = externalDependencies, classes, names = names + (src, name))
|
||||
|
||||
override def inheritance: SourceDependencies =
|
||||
new SourceDependencies(internalDependencies.dependencies.getOrElse(DependencyByInheritance, Relation.empty), externalDependencies.dependencies.getOrElse(DependencyByInheritance, Relation.empty))
|
||||
override def memberRef: SourceDependencies =
|
||||
new SourceDependencies(internalDependencies.dependencies.getOrElse(DependencyByMemberRef, Relation.empty), externalDependencies.dependencies.getOrElse(DependencyByMemberRef, Relation.empty))
|
||||
|
||||
def ++(o: Relations): Relations = {
|
||||
if (!o.nameHashing)
|
||||
throw new UnsupportedOperationException("The `++` operation is not supported for relations " +
|
||||
"with different values of `nameHashing` flag.")
|
||||
new MRelationsNameHashing(srcProd ++ o.srcProd, binaryDep ++ o.binaryDep,
|
||||
internalDependencies = internalDependencies ++ o.internalDependencies, externalDependencies = externalDependencies ++ o.externalDependencies,
|
||||
classes ++ o.classes, names = names ++ o.names)
|
||||
}
|
||||
def --(sources: Iterable[File]) =
|
||||
new MRelationsNameHashing(srcProd -- sources, binaryDep -- sources,
|
||||
internalDependencies = internalDependencies -- sources, externalDependencies = externalDependencies -- sources, classes -- sources,
|
||||
names = names -- sources)
|
||||
|
||||
def groupBy[K](f: File => K): Map[K, Relations] = {
|
||||
throw new UnsupportedOperationException("Merging of Analyses that have" +
|
||||
"`relations.nameHashing` set to `true` is not supported.")
|
||||
}
|
||||
|
||||
override def equals(other: Any) = other match {
|
||||
case o: MRelationsNameHashing =>
|
||||
srcProd == o.srcProd && binaryDep == o.binaryDep && memberRef == o.memberRef &&
|
||||
inheritance == o.inheritance && classes == o.classes
|
||||
case _ => false
|
||||
}
|
||||
|
||||
def allRelations = {
|
||||
val rels = List(
|
||||
srcProd,
|
||||
binaryDep,
|
||||
Relations.emptySource.internal, // NameHashing doesn't provide direct dependencies
|
||||
Relations.emptySource.external, // NameHashing doesn't provide direct dependencies
|
||||
Relations.emptySource.internal, // NameHashing doesn't provide public inherited dependencies
|
||||
Relations.emptySource.external, // NameHashing doesn't provide public inherited dependencies
|
||||
memberRef.internal,
|
||||
memberRef.external,
|
||||
inheritance.internal,
|
||||
inheritance.external,
|
||||
classes,
|
||||
names)
|
||||
Relations.existingRelations map (_._1) zip rels
|
||||
}
|
||||
|
||||
override def hashCode = (srcProd :: binaryDep :: memberRef :: inheritance :: classes :: Nil).hashCode
|
||||
|
||||
override def toString = (
|
||||
"""
|
||||
|Relations (with name hashing enabled):
|
||||
| products: %s
|
||||
| bin deps: %s
|
||||
| src deps: %s
|
||||
| ext deps: %s
|
||||
| class names: %s
|
||||
| used names: %s
|
||||
""".trim.stripMargin.format(List(srcProd, binaryDep, internalSrcDep, externalDep, classes, names) map relation_s: _*)
|
||||
)
|
||||
|
||||
}
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
package sbt
|
||||
package inc
|
||||
|
||||
import xsbti.Problem
|
||||
|
||||
import java.io.File
|
||||
|
||||
trait SourceInfo {
|
||||
def reportedProblems: Seq[Problem]
|
||||
def unreportedProblems: Seq[Problem]
|
||||
}
|
||||
trait SourceInfos {
|
||||
def ++(o: SourceInfos): SourceInfos
|
||||
def add(file: File, info: SourceInfo): SourceInfos
|
||||
def --(files: Iterable[File]): SourceInfos
|
||||
def groupBy[K](f: (File) => K): Map[K, SourceInfos]
|
||||
def get(file: File): SourceInfo
|
||||
def allInfos: Map[File, SourceInfo]
|
||||
}
|
||||
object SourceInfos {
|
||||
def empty: SourceInfos = make(Map.empty)
|
||||
def make(m: Map[File, SourceInfo]): SourceInfos = new MSourceInfos(m)
|
||||
|
||||
val emptyInfo: SourceInfo = makeInfo(Nil, Nil)
|
||||
def makeInfo(reported: Seq[Problem], unreported: Seq[Problem]): SourceInfo =
|
||||
new MSourceInfo(reported, unreported)
|
||||
def merge(infos: Traversable[SourceInfos]): SourceInfos = (SourceInfos.empty /: infos)(_ ++ _)
|
||||
}
|
||||
private final class MSourceInfos(val allInfos: Map[File, SourceInfo]) extends SourceInfos {
|
||||
def ++(o: SourceInfos) = new MSourceInfos(allInfos ++ o.allInfos)
|
||||
def --(sources: Iterable[File]) = new MSourceInfos(allInfos -- sources)
|
||||
def groupBy[K](f: File => K): Map[K, SourceInfos] = allInfos groupBy (x => f(x._1)) map { x => (x._1, new MSourceInfos(x._2)) }
|
||||
def add(file: File, info: SourceInfo) = new MSourceInfos(allInfos + ((file, info)))
|
||||
def get(file: File) = allInfos.getOrElse(file, SourceInfos.emptyInfo)
|
||||
}
|
||||
private final class MSourceInfo(val reportedProblems: Seq[Problem], val unreportedProblems: Seq[Problem]) extends SourceInfo
|
||||
|
|
@ -1,189 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
package inc
|
||||
|
||||
import java.io.{ File, IOException }
|
||||
import Stamp.getStamp
|
||||
import scala.util.matching.Regex
|
||||
|
||||
trait ReadStamps {
|
||||
/** The Stamp for the given product at the time represented by this Stamps instance.*/
|
||||
def product(prod: File): Stamp
|
||||
/** The Stamp for the given source file at the time represented by this Stamps instance.*/
|
||||
def internalSource(src: File): Stamp
|
||||
/** The Stamp for the given binary dependency at the time represented by this Stamps instance.*/
|
||||
def binary(bin: File): Stamp
|
||||
}
|
||||
|
||||
/** Provides information about files as they were at a specific time.*/
|
||||
trait Stamps extends ReadStamps {
|
||||
def allInternalSources: collection.Set[File]
|
||||
def allBinaries: collection.Set[File]
|
||||
def allProducts: collection.Set[File]
|
||||
|
||||
def sources: Map[File, Stamp]
|
||||
def binaries: Map[File, Stamp]
|
||||
def products: Map[File, Stamp]
|
||||
def classNames: Map[File, String]
|
||||
|
||||
def className(bin: File): Option[String]
|
||||
|
||||
def markInternalSource(src: File, s: Stamp): Stamps
|
||||
def markBinary(bin: File, className: String, s: Stamp): Stamps
|
||||
def markProduct(prod: File, s: Stamp): Stamps
|
||||
|
||||
def filter(prod: File => Boolean, removeSources: Iterable[File], bin: File => Boolean): Stamps
|
||||
|
||||
def ++(o: Stamps): Stamps
|
||||
def groupBy[K](prod: Map[K, File => Boolean], sourcesGrouping: File => K, bin: Map[K, File => Boolean]): Map[K, Stamps]
|
||||
}
|
||||
|
||||
sealed trait Stamp {
|
||||
override def equals(other: Any): Boolean = other match {
|
||||
case o: Stamp => Stamp.equivStamp.equiv(this, o)
|
||||
case _ => false
|
||||
}
|
||||
|
||||
override def toString: String = Stamp.toString(this)
|
||||
}
|
||||
|
||||
final class Hash(val value: Array[Byte]) extends Stamp {
|
||||
override def hashCode: Int = java.util.Arrays.hashCode(value)
|
||||
}
|
||||
final class LastModified(val value: Long) extends Stamp {
|
||||
override def hashCode: Int = (value ^ (value >>> 32)).toInt
|
||||
}
|
||||
final class Exists(val value: Boolean) extends Stamp {
|
||||
override def hashCode: Int = if (value) 0 else 1
|
||||
}
|
||||
|
||||
object Stamp {
|
||||
implicit val equivStamp: Equiv[Stamp] = new Equiv[Stamp] {
|
||||
def equiv(a: Stamp, b: Stamp) = (a, b) match {
|
||||
case (h1: Hash, h2: Hash) => h1.value sameElements h2.value
|
||||
case (e1: Exists, e2: Exists) => e1.value == e2.value
|
||||
case (lm1: LastModified, lm2: LastModified) => lm1.value == lm2.value
|
||||
case _ => false
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE: toString/fromString used for serialization, not just for debug prints.
|
||||
|
||||
def toString(s: Stamp): String = s match {
|
||||
case e: Exists => if (e.value) "exists" else "absent"
|
||||
case h: Hash => "hash(" + Hash.toHex(h.value) + ")"
|
||||
case lm: LastModified => "lastModified(" + lm.value + ")"
|
||||
}
|
||||
|
||||
private val hashPattern = """hash\((\w+)\)""".r
|
||||
private val lastModifiedPattern = """lastModified\((\d+)\)""".r
|
||||
|
||||
def fromString(s: String): Stamp = s match {
|
||||
case "exists" => new Exists(true)
|
||||
case "absent" => new Exists(false)
|
||||
case hashPattern(value) => new Hash(Hash.fromHex(value))
|
||||
case lastModifiedPattern(value) => new LastModified(java.lang.Long.parseLong(value))
|
||||
case _ => throw new IllegalArgumentException("Unrecognized Stamp string representation: " + s)
|
||||
}
|
||||
|
||||
def show(s: Stamp): String = s match {
|
||||
case h: Hash => "hash(" + Hash.toHex(h.value) + ")"
|
||||
case e: Exists => if (e.value) "exists" else "does not exist"
|
||||
case lm: LastModified => "last modified(" + lm.value + ")"
|
||||
}
|
||||
|
||||
val hash = (f: File) => tryStamp(new Hash(Hash(f)))
|
||||
val lastModified = (f: File) => tryStamp(new LastModified(f.lastModified))
|
||||
val exists = (f: File) => tryStamp(if (f.exists) present else notPresent)
|
||||
|
||||
def tryStamp(g: => Stamp): Stamp = try { g } catch { case i: IOException => notPresent }
|
||||
|
||||
val notPresent = new Exists(false)
|
||||
val present = new Exists(true)
|
||||
|
||||
def getStamp(map: Map[File, Stamp], src: File): Stamp = map.getOrElse(src, notPresent)
|
||||
}
|
||||
|
||||
object Stamps {
|
||||
/**
|
||||
* Creates a ReadStamps instance that will calculate and cache the stamp for sources and binaries
|
||||
* on the first request according to the provided `srcStamp` and `binStamp` functions. Each
|
||||
* stamp is calculated separately on demand.
|
||||
* The stamp for a product is always recalculated.
|
||||
*/
|
||||
def initial(prodStamp: File => Stamp, srcStamp: File => Stamp, binStamp: File => Stamp): ReadStamps = new InitialStamps(prodStamp, srcStamp, binStamp)
|
||||
|
||||
def empty: Stamps =
|
||||
{
|
||||
val eSt = Map.empty[File, Stamp]
|
||||
apply(eSt, eSt, eSt, Map.empty[File, String])
|
||||
}
|
||||
def apply(products: Map[File, Stamp], sources: Map[File, Stamp], binaries: Map[File, Stamp], binaryClassNames: Map[File, String]): Stamps =
|
||||
new MStamps(products, sources, binaries, binaryClassNames)
|
||||
|
||||
def merge(stamps: Traversable[Stamps]): Stamps = (Stamps.empty /: stamps)(_ ++ _)
|
||||
}
|
||||
|
||||
private class MStamps(val products: Map[File, Stamp], val sources: Map[File, Stamp], val binaries: Map[File, Stamp], val classNames: Map[File, String]) extends Stamps {
|
||||
def allInternalSources: collection.Set[File] = sources.keySet
|
||||
def allBinaries: collection.Set[File] = binaries.keySet
|
||||
def allProducts: collection.Set[File] = products.keySet
|
||||
|
||||
def ++(o: Stamps): Stamps =
|
||||
new MStamps(products ++ o.products, sources ++ o.sources, binaries ++ o.binaries, classNames ++ o.classNames)
|
||||
|
||||
def markInternalSource(src: File, s: Stamp): Stamps =
|
||||
new MStamps(products, sources.updated(src, s), binaries, classNames)
|
||||
|
||||
def markBinary(bin: File, className: String, s: Stamp): Stamps =
|
||||
new MStamps(products, sources, binaries.updated(bin, s), classNames.updated(bin, className))
|
||||
|
||||
def markProduct(prod: File, s: Stamp): Stamps =
|
||||
new MStamps(products.updated(prod, s), sources, binaries, classNames)
|
||||
|
||||
def filter(prod: File => Boolean, removeSources: Iterable[File], bin: File => Boolean): Stamps =
|
||||
new MStamps(products.filterKeys(prod), sources -- removeSources, binaries.filterKeys(bin), classNames.filterKeys(bin))
|
||||
|
||||
def groupBy[K](prod: Map[K, File => Boolean], f: File => K, bin: Map[K, File => Boolean]): Map[K, Stamps] =
|
||||
{
|
||||
val sourcesMap: Map[K, Map[File, Stamp]] = sources.groupBy(x => f(x._1))
|
||||
|
||||
val constFalse = (f: File) => false
|
||||
def kStamps(k: K): Stamps = new MStamps(
|
||||
products.filterKeys(prod.getOrElse(k, constFalse)),
|
||||
sourcesMap.getOrElse(k, Map.empty[File, Stamp]),
|
||||
binaries.filterKeys(bin.getOrElse(k, constFalse)),
|
||||
classNames.filterKeys(bin.getOrElse(k, constFalse))
|
||||
)
|
||||
|
||||
(for (k <- prod.keySet ++ sourcesMap.keySet ++ bin.keySet) yield (k, kStamps(k))).toMap
|
||||
}
|
||||
|
||||
def product(prod: File) = getStamp(products, prod)
|
||||
def internalSource(src: File) = getStamp(sources, src)
|
||||
def binary(bin: File) = getStamp(binaries, bin)
|
||||
def className(bin: File) = classNames get bin
|
||||
|
||||
override def equals(other: Any): Boolean = other match {
|
||||
case o: MStamps => products == o.products && sources == o.sources && binaries == o.binaries && classNames == o.classNames
|
||||
case _ => false
|
||||
}
|
||||
|
||||
override lazy val hashCode: Int = (products :: sources :: binaries :: classNames :: Nil).hashCode
|
||||
|
||||
override def toString: String =
|
||||
"Stamps for: %d products, %d sources, %d binaries, %d classNames".format(products.size, sources.size, binaries.size, classNames.size)
|
||||
}
|
||||
|
||||
private class InitialStamps(prodStamp: File => Stamp, srcStamp: File => Stamp, binStamp: File => Stamp) extends ReadStamps {
|
||||
import collection.mutable.{ HashMap, Map }
|
||||
// cached stamps for files that do not change during compilation
|
||||
private val sources: Map[File, Stamp] = new HashMap
|
||||
private val binaries: Map[File, Stamp] = new HashMap
|
||||
|
||||
def product(prod: File): Stamp = prodStamp(prod)
|
||||
def internalSource(src: File): Stamp = synchronized { sources.getOrElseUpdate(src, srcStamp(src)) }
|
||||
def binary(bin: File): Stamp = synchronized { binaries.getOrElseUpdate(bin, binStamp(bin)) }
|
||||
}
|
||||
|
|
@ -1,90 +0,0 @@
|
|||
package sbt
|
||||
package inc
|
||||
|
||||
import java.io.File
|
||||
import scala.math.abs
|
||||
import sbt.inc.TestCaseGenerators._
|
||||
import org.scalacheck._
|
||||
import Gen._
|
||||
import Prop._
|
||||
import xsbti.DependencyContext._
|
||||
|
||||
object AnalysisTest extends Properties("Analysis") {
|
||||
// Merge and split a hard-coded trivial example.
|
||||
property("Simple Merge and Split") = {
|
||||
def f(s: String) = new File(s)
|
||||
val aScala = f("A.scala")
|
||||
val bScala = f("B.scala")
|
||||
val aSource = genSource("A" :: "A$" :: Nil).sample.get
|
||||
val bSource = genSource("B" :: "B$" :: Nil).sample.get
|
||||
val cSource = genSource("C" :: Nil).sample.get
|
||||
val exists = new Exists(true)
|
||||
val sourceInfos = SourceInfos.makeInfo(Nil, Nil)
|
||||
|
||||
// a
|
||||
val aProducts = (f("A.class"), "A", exists) :: (f("A$.class"), "A$", exists) :: Nil
|
||||
val aInternal = Nil
|
||||
val aExternal = ExternalDependency(aScala, "C", cSource, DependencyByMemberRef) :: Nil
|
||||
val aBinary = (f("x.jar"), "x", exists) :: Nil
|
||||
|
||||
val a = Analysis.empty(false).addSource(aScala, aSource, exists, sourceInfos, aProducts, aInternal, aExternal, aBinary)
|
||||
|
||||
// b
|
||||
val bProducts = (f("B.class"), "B", exists) :: (f("B$.class"), "B$", exists) :: Nil
|
||||
val bInternal = Nil
|
||||
val bExternal = ExternalDependency(bScala, "A", aSource, DependencyByInheritance) :: Nil
|
||||
val bBinary = (f("x.jar"), "x", exists) :: (f("y.jar"), "y", exists) :: Nil
|
||||
|
||||
val b = Analysis.empty(false).addSource(bScala, bSource, exists, sourceInfos, bProducts, bInternal, bExternal, bBinary)
|
||||
|
||||
// ab
|
||||
// `b` has an external dependency on `a` that will be internalized
|
||||
val abAProducts = (f("A.class"), "A", exists) :: (f("A$.class"), "A$", exists) :: Nil
|
||||
val abAInternal = Nil
|
||||
val abAExternal = ExternalDependency(aScala, "C", cSource, DependencyByMemberRef) :: Nil
|
||||
val abABinary = (f("x.jar"), "x", exists) :: Nil
|
||||
|
||||
val abBProducts = (f("B.class"), "B", exists) :: (f("B$.class"), "B$", exists) :: Nil
|
||||
val abBInternal = InternalDependency(bScala, aScala, DependencyByMemberRef) :: InternalDependency(bScala, aScala, DependencyByInheritance) :: Nil
|
||||
val abBExternal = Nil
|
||||
val abBBinary = (f("x.jar"), "x", exists) :: (f("y.jar"), "y", exists) :: Nil
|
||||
|
||||
val ab = Analysis.empty(false).addSource(aScala, aSource, exists, sourceInfos, abAProducts, abAInternal, abAExternal, abABinary)
|
||||
.addSource(bScala, bSource, exists, sourceInfos, abBProducts, abBInternal, abBExternal, abBBinary)
|
||||
|
||||
val split: Map[String, Analysis] = ab.groupBy({ f: File => f.getName.substring(0, 1) })
|
||||
|
||||
val aSplit = split.getOrElse("A", Analysis.empty(false))
|
||||
val bSplit = split.getOrElse("B", Analysis.empty(false))
|
||||
|
||||
val merged = Analysis.merge(a :: b :: Nil)
|
||||
|
||||
("split(AB)(A) == A" |: compare(a, aSplit)) &&
|
||||
("split(AB)(B) == B" |: compare(b, bSplit)) &&
|
||||
("merge(A, B) == AB" |: compare(merged, ab))
|
||||
}
|
||||
|
||||
// Merge and split large, generated examples.
|
||||
// Mustn't shrink, as the default Shrink[Int] doesn't respect the lower bound of choose(), which will cause
|
||||
// a divide-by-zero error masking the original error.
|
||||
// Note that the generated Analyses have nameHashing = false (Grouping of Analyses with name hashing enabled
|
||||
// is not supported right now)
|
||||
property("Complex Merge and Split") = forAllNoShrink(genAnalysis(nameHashing = false), choose(1, 10)) { (analysis: Analysis, numSplits: Int) =>
|
||||
val grouped: Map[Int, Analysis] = analysis.groupBy({ f: File => abs(f.hashCode()) % numSplits })
|
||||
def getGroup(i: Int): Analysis = grouped.getOrElse(i, Analysis.empty(false))
|
||||
val splits = (Range(0, numSplits) map getGroup).toList
|
||||
|
||||
val merged: Analysis = Analysis.merge(splits)
|
||||
"Merge all" |: compare(analysis, merged)
|
||||
}
|
||||
|
||||
// Compare two analyses with useful labelling when they aren't equal.
|
||||
private[this] def compare(left: Analysis, right: Analysis): Prop =
|
||||
s" LEFT: $left" |:
|
||||
s"RIGHT: $right" |:
|
||||
s"STAMPS EQUAL: ${left.stamps == right.stamps}" |:
|
||||
s"APIS EQUAL: ${left.apis == right.apis}" |:
|
||||
s"RELATIONS EQUAL: ${left.relations == right.relations}" |:
|
||||
"UNEQUAL" |:
|
||||
(left == right)
|
||||
}
|
||||
|
|
@ -1,188 +0,0 @@
|
|||
package sbt
|
||||
package inc
|
||||
|
||||
import java.io.File
|
||||
|
||||
import org.scalacheck._
|
||||
import Arbitrary._
|
||||
import Gen._
|
||||
|
||||
import sbt.Relation
|
||||
import xsbti.api._
|
||||
import xsbti.SafeLazy
|
||||
import xsbti.DependencyContext._
|
||||
|
||||
/**
|
||||
* Scalacheck generators for Analysis objects and their substructures.
|
||||
* Fairly complex, as Analysis has interconnected state that can't be
|
||||
* independently generated.
|
||||
*/
|
||||
object TestCaseGenerators {
|
||||
// We restrict sizes, otherwise the generated Analysis objects get huge and the tests take a long time.
|
||||
val maxSources = 10 // Max number of source files.
|
||||
val maxRelatives = 10 // Max number of things that a source x can relate to in a single Relation.
|
||||
val maxPathSegmentLen = 10 // Max number of characters in a path segment.
|
||||
val maxPathLen = 6 // Max number of path segments in a path.
|
||||
|
||||
// Ensure that we generate unique class names and file paths every time.
|
||||
// Using repeated strings may lead to all sorts of undesirable interactions.
|
||||
val used1 = scala.collection.mutable.Set.empty[String]
|
||||
val used2 = scala.collection.mutable.Set.empty[String]
|
||||
|
||||
// When using `retryUntil`, the condition is actually tested twice (see implementation in ScalaCheck),
|
||||
// which is why we need to insert twice the element.
|
||||
// If the element is present in both sets, then it has already been used.
|
||||
def unique[T](g: Gen[T]) = g retryUntil { o: T =>
|
||||
if (used1.add(o.toString))
|
||||
true
|
||||
else
|
||||
used2.add(o.toString)
|
||||
}
|
||||
|
||||
def identifier: Gen[String] = sized { size =>
|
||||
resize(Math.max(size, 3), Gen.identifier)
|
||||
}
|
||||
|
||||
def genFilePathSegment: Gen[String] = for {
|
||||
n <- choose(3, maxPathSegmentLen) // Segments have at least 3 characters.
|
||||
c <- alphaChar
|
||||
cs <- listOfN(n - 1, alphaNumChar)
|
||||
} yield (c :: cs).mkString
|
||||
|
||||
def genFile: Gen[File] = for {
|
||||
n <- choose(2, maxPathLen) // Paths have at least 2 segments.
|
||||
path <- listOfN(n, genFilePathSegment)
|
||||
} yield new File(path.mkString("/"))
|
||||
|
||||
def genStamp: Gen[Stamp] = for {
|
||||
b <- oneOf(true, false)
|
||||
} yield new Exists(b)
|
||||
|
||||
def zipMap[A, B](a: Seq[A], b: Seq[B]): Map[A, B] = (a zip b).toMap
|
||||
|
||||
def genStamps(rel: Relations): Gen[Stamps] = {
|
||||
val prod = rel.allProducts.toList
|
||||
val src = rel.allSources.toList
|
||||
val bin = rel.allBinaryDeps.toList
|
||||
for {
|
||||
prodStamps <- listOfN(prod.length, genStamp)
|
||||
srcStamps <- listOfN(src.length, genStamp)
|
||||
binStamps <- listOfN(bin.length, genStamp)
|
||||
binClassNames <- listOfN(bin.length, unique(identifier))
|
||||
} yield Stamps(zipMap(prod, prodStamps), zipMap(src, srcStamps), zipMap(bin, binStamps), zipMap(bin, binClassNames))
|
||||
}
|
||||
|
||||
// We need "proper" definitions with specific class names, as groupBy use these to pick a representative top-level class when splitting.
|
||||
private[this] def makeDefinition(name: String): Definition =
|
||||
new ClassLike(DefinitionType.ClassDef, lzy(new EmptyType()),
|
||||
lzy(new Structure(lzy(Array()), lzy(Array()), lzy(Array()))), Array(), Array(),
|
||||
name, new Public(), new Modifiers(false, false, false, false, false, false, false), Array())
|
||||
|
||||
private[this] def lzy[T <: AnyRef](x: T) = SafeLazy.strict(x)
|
||||
|
||||
def genNameHash(defn: String): Gen[xsbti.api._internalOnly_NameHash] =
|
||||
const(new xsbti.api._internalOnly_NameHash(defn, defn.hashCode()))
|
||||
|
||||
def genNameHashes(defns: Seq[String]): Gen[xsbti.api._internalOnly_NameHashes] = {
|
||||
def partitionAccordingToMask[T](mask: List[Boolean], xs: List[T]): (List[T], List[T]) = {
|
||||
val (p1, p2) = (mask zip xs).partition(_._1)
|
||||
(p1.map(_._2), p2.map(_._2))
|
||||
}
|
||||
val pairsOfGenerators = for (defn <- defns) yield {
|
||||
for {
|
||||
isRegularMember <- arbitrary[Boolean]
|
||||
nameHash <- genNameHash(defn)
|
||||
} yield (isRegularMember, nameHash)
|
||||
}
|
||||
val genNameHashesList = Gen.sequence[List, xsbti.api._internalOnly_NameHash](defns.map(genNameHash))
|
||||
val genTwoListOfNameHashes = for {
|
||||
nameHashesList <- genNameHashesList
|
||||
isRegularMemberList <- listOfN(nameHashesList.length, arbitrary[Boolean])
|
||||
} yield partitionAccordingToMask(isRegularMemberList, nameHashesList)
|
||||
for {
|
||||
(regularMemberNameHashes, implicitMemberNameHashes) <- genTwoListOfNameHashes
|
||||
} yield new xsbti.api._internalOnly_NameHashes(regularMemberNameHashes.toArray, implicitMemberNameHashes.toArray)
|
||||
}
|
||||
|
||||
def genSource(defns: Seq[String]): Gen[Source] = for {
|
||||
startTime <- arbitrary[Long]
|
||||
hashLen <- choose(10, 20) // Requred by SameAPI to be > 0.
|
||||
hash <- Gen.containerOfN[Array, Byte](hashLen, arbitrary[Byte])
|
||||
apiHash <- arbitrary[Int]
|
||||
hasMacro <- arbitrary[Boolean]
|
||||
nameHashes <- genNameHashes(defns)
|
||||
} yield new Source(new Compilation(startTime, Array()), hash, new SourceAPI(Array(), Array(defns map makeDefinition: _*)), apiHash, nameHashes, hasMacro)
|
||||
|
||||
def genSources(all_defns: Seq[Seq[String]]): Gen[Seq[Source]] = Gen.sequence[List, Source](all_defns.map(genSource))
|
||||
|
||||
def genAPIs(rel: Relations): Gen[APIs] = {
|
||||
val internal = rel.allInternalSrcDeps.toList.sorted
|
||||
val external = rel.allExternalDeps.toList.sorted
|
||||
for {
|
||||
internalSources <- genSources(internal map { f: File => rel.classNames(f).toList.sorted })
|
||||
externalSources <- genSources(external map { s: String => s :: Nil })
|
||||
} yield APIs(zipMap(internal, internalSources), zipMap(external, externalSources))
|
||||
}
|
||||
|
||||
def genRelation[T](g: Gen[T])(srcs: List[File]): Gen[Relation[File, T]] = for {
|
||||
n <- choose(1, maxRelatives)
|
||||
entries <- listOfN(srcs.length, containerOfN[Set, T](n, g))
|
||||
} yield Relation.reconstruct(zipMap(srcs, entries))
|
||||
|
||||
val genFileRelation = genRelation[File](unique(genFile)) _
|
||||
val genStringRelation = genRelation[String](unique(identifier)) _
|
||||
|
||||
def genRSource(srcs: List[File]): Gen[Relations.Source] = for {
|
||||
internal <- listOfN(srcs.length, someOf(srcs)) // Internal dep targets must come from list of sources.
|
||||
external <- genStringRelation(srcs)
|
||||
} yield Relations.makeSource( // Ensure that we don't generate a dep of some file on itself.
|
||||
Relation.reconstruct((srcs zip (internal map { _.toSet }) map { case (a, b) => (a, b - a) }).toMap),
|
||||
external)
|
||||
|
||||
def genSubRSource(src: Relations.Source): Gen[Relations.Source] = for {
|
||||
internal <- someOf(src.internal.all.toList)
|
||||
external <- someOf(src.external.all.toList)
|
||||
} yield Relations.makeSource(Relation.empty ++ internal, Relation.empty ++ external)
|
||||
|
||||
def genRSourceDependencies(srcs: List[File]): Gen[Relations.SourceDependencies] = for {
|
||||
internal <- listOfN(srcs.length, someOf(srcs))
|
||||
external <- genStringRelation(srcs)
|
||||
} yield Relations.makeSourceDependencies(
|
||||
Relation.reconstruct((srcs zip (internal map { _.toSet }) map { case (a, b) => (a, b - a) }).toMap),
|
||||
external)
|
||||
|
||||
def genSubRSourceDependencies(src: Relations.SourceDependencies): Gen[Relations.SourceDependencies] = for {
|
||||
internal <- someOf(src.internal.all.toList)
|
||||
external <- someOf(src.external.all.toList)
|
||||
} yield Relations.makeSourceDependencies(Relation.empty ++ internal, Relation.empty ++ external)
|
||||
|
||||
def genRelations: Gen[Relations] = for {
|
||||
numSrcs <- choose(0, maxSources)
|
||||
srcs <- listOfN(numSrcs, genFile)
|
||||
srcProd <- genFileRelation(srcs)
|
||||
binaryDep <- genFileRelation(srcs)
|
||||
direct <- genRSource(srcs)
|
||||
publicInherited <- genSubRSource(direct)
|
||||
classes <- genStringRelation(srcs)
|
||||
|
||||
} yield Relations.make(srcProd, binaryDep, direct, publicInherited, classes)
|
||||
|
||||
def genRelationsNameHashing: Gen[Relations] = for {
|
||||
numSrcs <- choose(0, maxSources)
|
||||
srcs <- listOfN(numSrcs, genFile)
|
||||
srcProd <- genFileRelation(srcs)
|
||||
binaryDep <- genFileRelation(srcs)
|
||||
memberRef <- genRSourceDependencies(srcs)
|
||||
inheritance <- genSubRSourceDependencies(memberRef)
|
||||
classes <- genStringRelation(srcs)
|
||||
names <- genStringRelation(srcs)
|
||||
internal <- InternalDependencies(Map(DependencyByMemberRef -> memberRef.internal, DependencyByInheritance -> inheritance.internal))
|
||||
external <- ExternalDependencies(Map(DependencyByMemberRef -> memberRef.external, DependencyByInheritance -> inheritance.external))
|
||||
} yield Relations.make(srcProd, binaryDep, internal, external, classes, names)
|
||||
|
||||
def genAnalysis(nameHashing: Boolean): Gen[Analysis] = for {
|
||||
rels <- if (nameHashing) genRelationsNameHashing else genRelations
|
||||
stamps <- genStamps(rels)
|
||||
apis <- genAPIs(rels)
|
||||
} yield new MAnalysis(stamps, apis, rels, SourceInfos.empty, Compilations.empty)
|
||||
}
|
||||
|
|
@ -1,249 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
package compiler
|
||||
|
||||
import inc._
|
||||
|
||||
import scala.annotation.tailrec
|
||||
import java.io.File
|
||||
import classpath.ClasspathUtilities
|
||||
import classfile.Analyze
|
||||
import inc.Locate.DefinesClass
|
||||
import inc.IncOptions
|
||||
import CompileSetup._
|
||||
import sbinary.DefaultProtocol.{ immutableMapFormat, immutableSetFormat, StringFormat }
|
||||
|
||||
import xsbti.{ Reporter, AnalysisCallback }
|
||||
import xsbti.api.Source
|
||||
import xsbti.compile.{ CompileOrder, DependencyChanges, GlobalsCache, Output, SingleOutput, MultipleOutput, CompileProgress }
|
||||
import CompileOrder.{ JavaThenScala, Mixed, ScalaThenJava }
|
||||
|
||||
@deprecated("Use MixedAnalyzingCompiler or IC instead.", "0.13.8")
|
||||
class AggressiveCompile(cacheFile: File) {
|
||||
@deprecated("Use IC.compile instead.", "0.13.8")
|
||||
def apply(compiler: AnalyzingCompiler,
|
||||
javac: xsbti.compile.JavaCompiler,
|
||||
sources: Seq[File], classpath: Seq[File],
|
||||
output: Output,
|
||||
cache: GlobalsCache,
|
||||
progress: Option[CompileProgress] = None,
|
||||
options: Seq[String] = Nil,
|
||||
javacOptions: Seq[String] = Nil,
|
||||
analysisMap: File => Option[Analysis] = { _ => None },
|
||||
definesClass: DefinesClass = Locate.definesClass _,
|
||||
reporter: Reporter,
|
||||
compileOrder: CompileOrder = Mixed,
|
||||
skip: Boolean = false,
|
||||
incrementalCompilerOptions: IncOptions)(implicit log: Logger): Analysis =
|
||||
{
|
||||
val setup = new CompileSetup(output, new CompileOptions(options, javacOptions),
|
||||
compiler.scalaInstance.actualVersion, compileOrder, incrementalCompilerOptions.nameHashing)
|
||||
compile1(sources, classpath, setup, progress, store, analysisMap, definesClass,
|
||||
compiler, javac, reporter, skip, cache, incrementalCompilerOptions)
|
||||
}
|
||||
|
||||
def withBootclasspath(args: CompilerArguments, classpath: Seq[File]): Seq[File] =
|
||||
args.bootClasspathFor(classpath) ++ args.extClasspath ++ args.finishClasspath(classpath)
|
||||
|
||||
def compile1(sources: Seq[File],
|
||||
classpath: Seq[File],
|
||||
setup: CompileSetup, progress: Option[CompileProgress],
|
||||
store: AnalysisStore,
|
||||
analysis: File => Option[Analysis],
|
||||
definesClass: DefinesClass,
|
||||
compiler: AnalyzingCompiler,
|
||||
javac: xsbti.compile.JavaCompiler,
|
||||
reporter: Reporter, skip: Boolean,
|
||||
cache: GlobalsCache,
|
||||
incrementalCompilerOptions: IncOptions)(implicit log: Logger): Analysis =
|
||||
{
|
||||
val (previousAnalysis, previousSetup) = extract(store.get(), incrementalCompilerOptions)
|
||||
if (skip)
|
||||
previousAnalysis
|
||||
else {
|
||||
val config = new CompileConfiguration(sources, classpath, previousAnalysis, previousSetup, setup,
|
||||
progress, analysis, definesClass, reporter, compiler, javac, cache, incrementalCompilerOptions)
|
||||
val (modified, result) = compile2(config)
|
||||
if (modified)
|
||||
store.set(result, setup)
|
||||
result
|
||||
}
|
||||
}
|
||||
def compile2(config: CompileConfiguration)(implicit log: Logger, equiv: Equiv[CompileSetup]): (Boolean, Analysis) =
|
||||
{
|
||||
import config._
|
||||
import currentSetup._
|
||||
val absClasspath = classpath.map(_.getAbsoluteFile)
|
||||
val apiOption = (api: Either[Boolean, Source]) => api.right.toOption
|
||||
val cArgs = new CompilerArguments(compiler.scalaInstance, compiler.cp)
|
||||
val searchClasspath = explicitBootClasspath(options.options) ++ withBootclasspath(cArgs, absClasspath)
|
||||
val entry = Locate.entry(searchClasspath, definesClass)
|
||||
|
||||
val compile0 = (include: Set[File], changes: DependencyChanges, callback: AnalysisCallback) => {
|
||||
val outputDirs = outputDirectories(output)
|
||||
outputDirs foreach (IO.createDirectory)
|
||||
val incSrc = sources.filter(include)
|
||||
val (javaSrcs, scalaSrcs) = incSrc partition javaOnly
|
||||
logInputs(log, javaSrcs.size, scalaSrcs.size, outputDirs)
|
||||
def compileScala() =
|
||||
if (scalaSrcs.nonEmpty) {
|
||||
val sources = if (order == Mixed) incSrc else scalaSrcs
|
||||
val arguments = cArgs(Nil, absClasspath, None, options.options)
|
||||
timed("Scala compilation", log) {
|
||||
compiler.compile(sources, changes, arguments, output, callback, reporter, cache, log, progress)
|
||||
}
|
||||
}
|
||||
def compileJava() =
|
||||
if (javaSrcs.nonEmpty) {
|
||||
import Path._
|
||||
@tailrec def ancestor(f1: File, f2: File): Boolean =
|
||||
if (f2 eq null) false else if (f1 == f2) true else ancestor(f1, f2.getParentFile)
|
||||
|
||||
val chunks: Map[Option[File], Seq[File]] = output match {
|
||||
case single: SingleOutput => Map(Some(single.outputDirectory) -> javaSrcs)
|
||||
case multi: MultipleOutput =>
|
||||
javaSrcs groupBy { src =>
|
||||
multi.outputGroups find { out => ancestor(out.sourceDirectory, src) } map (_.outputDirectory)
|
||||
}
|
||||
}
|
||||
chunks.get(None) foreach { srcs =>
|
||||
log.error("No output directory mapped for: " + srcs.map(_.getAbsolutePath).mkString(","))
|
||||
}
|
||||
val memo = for ((Some(outputDirectory), srcs) <- chunks) yield {
|
||||
val classesFinder = PathFinder(outputDirectory) ** "*.class"
|
||||
(classesFinder, classesFinder.get, srcs)
|
||||
}
|
||||
|
||||
val loader = ClasspathUtilities.toLoader(searchClasspath)
|
||||
timed("Java compilation", log) {
|
||||
try javac.compileWithReporter(javaSrcs.toArray, absClasspath.toArray, output, options.javacOptions.toArray, reporter, log)
|
||||
catch {
|
||||
// Handle older APIs
|
||||
case _: NoSuchMethodError =>
|
||||
javac.compile(javaSrcs.toArray, absClasspath.toArray, output, options.javacOptions.toArray, log)
|
||||
}
|
||||
}
|
||||
|
||||
def readAPI(source: File, classes: Seq[Class[_]]): Set[String] = {
|
||||
val (api, inherits) = ClassToAPI.process(classes)
|
||||
callback.api(source, api)
|
||||
inherits.map(_.getName)
|
||||
}
|
||||
|
||||
timed("Java analysis", log) {
|
||||
for ((classesFinder, oldClasses, srcs) <- memo) {
|
||||
val newClasses = Set(classesFinder.get: _*) -- oldClasses
|
||||
Analyze(newClasses.toSeq, srcs, log)(callback, loader, readAPI)
|
||||
}
|
||||
}
|
||||
}
|
||||
if (order == JavaThenScala) { compileJava(); compileScala() } else { compileScala(); compileJava() }
|
||||
}
|
||||
|
||||
val sourcesSet = sources.toSet
|
||||
val analysis = previousSetup match {
|
||||
case Some(previous) if previous.nameHashing != currentSetup.nameHashing =>
|
||||
// if the value of `nameHashing` flag has changed we have to throw away
|
||||
// previous Analysis completely and start with empty Analysis object
|
||||
// that supports the particular value of the `nameHashing` flag.
|
||||
// Otherwise we'll be getting UnsupportedOperationExceptions
|
||||
log.warn("Ignoring previous analysis due to incompatible nameHashing setting.")
|
||||
Analysis.empty(currentSetup.nameHashing)
|
||||
case Some(previous) if equiv.equiv(previous, currentSetup) => previousAnalysis
|
||||
case _ =>
|
||||
log.warn("Pruning sources from previous analysis, due to incompatible CompileSetup.")
|
||||
Incremental.prune(sourcesSet, previousAnalysis)
|
||||
}
|
||||
IncrementalCompile(sourcesSet, entry, compile0, analysis, getAnalysis, output, log, incOptions)
|
||||
}
|
||||
private[this] def outputDirectories(output: Output): Seq[File] = output match {
|
||||
case single: SingleOutput => List(single.outputDirectory)
|
||||
case mult: MultipleOutput => mult.outputGroups map (_.outputDirectory)
|
||||
}
|
||||
private[this] def timed[T](label: String, log: Logger)(t: => T): T =
|
||||
{
|
||||
val start = System.nanoTime
|
||||
val result = t
|
||||
val elapsed = System.nanoTime - start
|
||||
log.debug(label + " took " + (elapsed / 1e9) + " s")
|
||||
result
|
||||
}
|
||||
private[this] def logInputs(log: Logger, javaCount: Int, scalaCount: Int, outputDirs: Seq[File]): Unit = {
|
||||
val scalaMsg = Analysis.counted("Scala source", "", "s", scalaCount)
|
||||
val javaMsg = Analysis.counted("Java source", "", "s", javaCount)
|
||||
val combined = scalaMsg ++ javaMsg
|
||||
if (combined.nonEmpty)
|
||||
log.info(combined.mkString("Compiling ", " and ", " to " + outputDirs.map(_.getAbsolutePath).mkString(",") + "..."))
|
||||
}
|
||||
private def extract(previous: Option[(Analysis, CompileSetup)], incOptions: IncOptions): (Analysis, Option[CompileSetup]) =
|
||||
previous match {
|
||||
case Some((an, setup)) => (an, Some(setup))
|
||||
case None => (Analysis.empty(nameHashing = incOptions.nameHashing), None)
|
||||
}
|
||||
def javaOnly(f: File) = f.getName.endsWith(".java")
|
||||
|
||||
private[this] def explicitBootClasspath(options: Seq[String]): Seq[File] =
|
||||
options.dropWhile(_ != CompilerArguments.BootClasspathOption).slice(1, 2).headOption.toList.flatMap(IO.parseClasspath)
|
||||
|
||||
val store = MixedAnalyzingCompiler.staticCachedStore(cacheFile)
|
||||
|
||||
}
|
||||
@deprecated("Use MixedAnalyzingCompiler instead.", "0.13.8")
|
||||
object AggressiveCompile {
|
||||
@deprecated("Use MixedAnalyzingCompiler.staticCachedStore instead.", "0.13.8")
|
||||
def staticCachedStore(cacheFile: File) = MixedAnalyzingCompiler.staticCachedStore(cacheFile)
|
||||
|
||||
@deprecated("Deprecated in favor of new sbt.compiler.javac package.", "0.13.8")
|
||||
def directOrFork(instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File]): JavaTool =
|
||||
if (javaHome.isDefined)
|
||||
JavaCompiler.fork(cpOptions, instance)(forkJavac(javaHome))
|
||||
else
|
||||
JavaCompiler.directOrFork(cpOptions, instance)(forkJavac(None))
|
||||
|
||||
@deprecated("Deprecated in favor of new sbt.compiler.javac package.", "0.13.8")
|
||||
def forkJavac(javaHome: Option[File]): JavaCompiler.Fork =
|
||||
{
|
||||
import Path._
|
||||
def exec(jc: JavacContract) = javaHome match { case None => jc.name; case Some(jh) => (jh / "bin" / jc.name).absolutePath }
|
||||
(contract: JavacContract, args: Seq[String], log: Logger) => {
|
||||
log.debug("Forking " + contract.name + ": " + exec(contract) + " " + args.mkString(" "))
|
||||
val javacLogger = new JavacLogger(log)
|
||||
var exitCode = -1
|
||||
try {
|
||||
exitCode = Process(exec(contract), args) ! javacLogger
|
||||
} finally {
|
||||
javacLogger.flush(exitCode)
|
||||
}
|
||||
exitCode
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@deprecated("Deprecated in favor of new sbt.compiler.javac package.", "0.13.8")
|
||||
private[sbt] class JavacLogger(log: Logger) extends ProcessLogger {
|
||||
import scala.collection.mutable.ListBuffer
|
||||
import Level.{ Info, Warn, Error, Value => LogLevel }
|
||||
|
||||
private val msgs: ListBuffer[(LogLevel, String)] = new ListBuffer()
|
||||
|
||||
def info(s: => String): Unit =
|
||||
synchronized { msgs += ((Info, s)) }
|
||||
|
||||
def error(s: => String): Unit =
|
||||
synchronized { msgs += ((Error, s)) }
|
||||
|
||||
def buffer[T](f: => T): T = f
|
||||
|
||||
private def print(desiredLevel: LogLevel)(t: (LogLevel, String)) = t match {
|
||||
case (Info, msg) => log.info(msg)
|
||||
case (Error, msg) => log.log(desiredLevel, msg)
|
||||
}
|
||||
|
||||
def flush(exitCode: Int): Unit = {
|
||||
val level = if (exitCode == 0) Warn else Error
|
||||
msgs foreach print(level)
|
||||
msgs.clear()
|
||||
}
|
||||
}
|
||||
|
|
@ -1,41 +0,0 @@
|
|||
package sbt.compiler
|
||||
|
||||
import java.io.File
|
||||
|
||||
import sbt.CompileSetup
|
||||
import sbt.inc.{ IncOptions, Analysis }
|
||||
import sbt.inc.Locate._
|
||||
import xsbti.Reporter
|
||||
import xsbti.compile.{ GlobalsCache, CompileProgress }
|
||||
|
||||
/**
|
||||
* Configuration used for running an analyzing compiler (a compiler which can extract dependencies between source files and JARs).
|
||||
*
|
||||
* @param sources
|
||||
* @param classpath
|
||||
* @param previousAnalysis
|
||||
* @param previousSetup
|
||||
* @param currentSetup
|
||||
* @param progress
|
||||
* @param getAnalysis
|
||||
* @param definesClass
|
||||
* @param reporter
|
||||
* @param compiler
|
||||
* @param javac
|
||||
* @param cache
|
||||
* @param incOptions
|
||||
*/
|
||||
final class CompileConfiguration(
|
||||
val sources: Seq[File],
|
||||
val classpath: Seq[File],
|
||||
val previousAnalysis: Analysis,
|
||||
val previousSetup: Option[CompileSetup],
|
||||
val currentSetup: CompileSetup,
|
||||
val progress: Option[CompileProgress],
|
||||
val getAnalysis: File => Option[Analysis],
|
||||
val definesClass: DefinesClass,
|
||||
val reporter: Reporter,
|
||||
val compiler: AnalyzingCompiler,
|
||||
val javac: xsbti.compile.JavaCompiler,
|
||||
val cache: GlobalsCache,
|
||||
val incOptions: IncOptions)
|
||||
|
|
@ -1,162 +0,0 @@
|
|||
package sbt.compiler
|
||||
|
||||
import java.io.File
|
||||
import sbt.compiler.javac.AnalyzingJavaCompiler
|
||||
import sbt.inc.Locate._
|
||||
import sbt._
|
||||
import sbt.inc._
|
||||
import xsbti.Logger
|
||||
import xsbti.api.Source
|
||||
import xsbti.compile.ClasspathOptions
|
||||
import xsbti.compile.CompileOrder._
|
||||
import xsbti.compile.DefinesClass
|
||||
import xsbti.compile.ScalaInstance
|
||||
import xsbti.{ Reporter, Logger, Maybe }
|
||||
import xsbti.compile._
|
||||
|
||||
// TODO -
|
||||
// 1. Move analyzingCompile from MixedAnalyzingCompiler into here
|
||||
// 2. Create AnalyzingJavaComiler class
|
||||
// 3. MixedAnalyzingCompiler should just provide the raw 'compile' method used in incremental compiler (and
|
||||
// by this class.
|
||||
|
||||
/**
|
||||
* An implementation of the incremental compiler that can compile inputs and dump out source dependency analysis.
|
||||
*/
|
||||
object IC extends IncrementalCompiler[Analysis, AnalyzingCompiler] {
|
||||
|
||||
override def compile(in: Inputs[Analysis, AnalyzingCompiler], log: Logger): Analysis =
|
||||
{
|
||||
val setup = in.setup; import setup._
|
||||
val options = in.options; import options.{ options => scalacOptions, _ }
|
||||
val compilers = in.compilers; import compilers._
|
||||
val aMap = (f: File) => m2o(analysisMap(f))
|
||||
val defClass = (f: File) => { val dc = definesClass(f); (name: String) => dc.apply(name) }
|
||||
val incOptions = IncOptions.fromStringMap(incrementalCompilerOptions)
|
||||
val (previousAnalysis, previousSetup) = {
|
||||
MixedAnalyzingCompiler.staticCachedStore(setup.cacheFile()).get().map {
|
||||
case (a, s) => (a, Some(s))
|
||||
} getOrElse {
|
||||
(Analysis.empty(nameHashing = incOptions.nameHashing), None)
|
||||
}
|
||||
}
|
||||
incrementalCompile(scalac, javac, sources, classpath, output, cache, m2o(progress), scalacOptions, javacOptions, previousAnalysis,
|
||||
previousSetup, aMap, defClass, reporter, order, skip, incOptions)(log).analysis
|
||||
}
|
||||
|
||||
private[this] def m2o[S](opt: Maybe[S]): Option[S] = if (opt.isEmpty) None else Some(opt.get)
|
||||
|
||||
@deprecated("A logger is no longer needed.", "0.13.8")
|
||||
override def newScalaCompiler(instance: ScalaInstance, interfaceJar: File, options: ClasspathOptions, log: Logger): AnalyzingCompiler =
|
||||
new AnalyzingCompiler(instance, CompilerInterfaceProvider.constant(interfaceJar), options)
|
||||
|
||||
override def newScalaCompiler(instance: ScalaInstance, interfaceJar: File, options: ClasspathOptions): AnalyzingCompiler =
|
||||
new AnalyzingCompiler(instance, CompilerInterfaceProvider.constant(interfaceJar), options)
|
||||
|
||||
def compileInterfaceJar(label: String, sourceJar: File, targetJar: File, interfaceJar: File, instance: ScalaInstance, log: Logger) {
|
||||
val raw = new RawCompiler(instance, sbt.ClasspathOptions.auto, log)
|
||||
AnalyzingCompiler.compileSources(sourceJar :: Nil, targetJar, interfaceJar :: Nil, label, raw, log)
|
||||
}
|
||||
|
||||
def readCache(file: File): Maybe[(Analysis, CompileSetup)] =
|
||||
try { Maybe.just(readCacheUncaught(file)) } catch { case _: Exception => Maybe.nothing() }
|
||||
|
||||
@deprecated("Use overloaded variant which takes `IncOptions` as parameter.", "0.13.2")
|
||||
def readAnalysis(file: File): Analysis =
|
||||
try { readCacheUncaught(file)._1 } catch { case _: Exception => Analysis.Empty }
|
||||
|
||||
def readAnalysis(file: File, incOptions: IncOptions): Analysis =
|
||||
try { readCacheUncaught(file)._1 } catch {
|
||||
case _: Exception => Analysis.empty(nameHashing = incOptions.nameHashing)
|
||||
}
|
||||
|
||||
def readCacheUncaught(file: File): (Analysis, CompileSetup) =
|
||||
Using.fileReader(IO.utf8)(file) { reader =>
|
||||
try {
|
||||
TextAnalysisFormat.read(reader)
|
||||
} catch {
|
||||
case ex: sbt.inc.ReadException =>
|
||||
throw new java.io.IOException(s"Error while reading $file", ex)
|
||||
}
|
||||
}
|
||||
|
||||
/** The result of running the compilation. */
|
||||
final case class Result(analysis: Analysis, setup: CompileSetup, hasModified: Boolean)
|
||||
|
||||
/**
|
||||
* This will run a mixed-compilation of Java/Scala sources
|
||||
*
|
||||
*
|
||||
* TODO - this is the interface sbt uses. Somehow this needs to be exposed further.
|
||||
*
|
||||
* @param scalac An instances of the Scalac compiler which can also extract "Analysis" (dependencies)
|
||||
* @param javac An instance of the Javac compiler.
|
||||
* @param sources The set of sources to compile
|
||||
* @param classpath The classpath to use when compiling.
|
||||
* @param output Configuration for where to output .class files.
|
||||
* @param cache The caching mechanism to use instead of insantiating new compiler instances.
|
||||
* @param progress Progress listening for the compilation process. TODO - Feed this through the Javac Compiler!
|
||||
* @param options Options for the Scala compiler
|
||||
* @param javacOptions Options for the Java compiler
|
||||
* @param previousAnalysis The previous dependency Analysis object/
|
||||
* @param previousSetup The previous compilation setup (if any)
|
||||
* @param analysisMap A map of file to the dependency analysis of that file.
|
||||
* @param definesClass A mehcnaism of looking up whether or not a JAR defines a particular Class.
|
||||
* @param reporter Where we sent all compilation error/warning events
|
||||
* @param compileOrder The order we'd like to mix compilation. JavaThenScala, ScalaThenJava or Mixed.
|
||||
* @param skip IF true, we skip compilation and just return the previous analysis file.
|
||||
* @param incrementalCompilerOptions Options specific to incremental compilation.
|
||||
* @param log The location where we write log messages.
|
||||
* @return The full configuration used to instantiate this mixed-analyzing compiler, the set of extracted dependencies and
|
||||
* whether or not any file were modified.
|
||||
*/
|
||||
def incrementalCompile(scalac: AnalyzingCompiler,
|
||||
javac: xsbti.compile.JavaCompiler,
|
||||
sources: Seq[File],
|
||||
classpath: Seq[File],
|
||||
output: Output,
|
||||
cache: GlobalsCache,
|
||||
progress: Option[CompileProgress] = None,
|
||||
options: Seq[String] = Nil,
|
||||
javacOptions: Seq[String] = Nil,
|
||||
previousAnalysis: Analysis,
|
||||
previousSetup: Option[CompileSetup],
|
||||
analysisMap: File => Option[Analysis] = { _ => None },
|
||||
definesClass: Locate.DefinesClass = Locate.definesClass _,
|
||||
reporter: Reporter,
|
||||
compileOrder: CompileOrder = Mixed,
|
||||
skip: Boolean = false,
|
||||
incrementalCompilerOptions: IncOptions)(implicit log: Logger): Result = {
|
||||
val config = MixedAnalyzingCompiler.makeConfig(scalac, javac, sources, classpath, output, cache,
|
||||
progress, options, javacOptions, previousAnalysis, previousSetup, analysisMap, definesClass, reporter,
|
||||
compileOrder, skip, incrementalCompilerOptions
|
||||
)
|
||||
import config.{ currentSetup => setup }
|
||||
|
||||
if (skip) Result(previousAnalysis, setup, false)
|
||||
else {
|
||||
val (analysis, changed) = compileInternal(MixedAnalyzingCompiler(config)(log))
|
||||
Result(analysis, setup, changed)
|
||||
}
|
||||
}
|
||||
|
||||
/** Actually runs the incremental compiler using the given mixed compiler. This will prune the inputs based on the CompileSetup. */
|
||||
private def compileInternal(mixedCompiler: MixedAnalyzingCompiler)(implicit log: Logger, equiv: Equiv[CompileSetup]): (Analysis, Boolean) = {
|
||||
val entry = MixedAnalyzingCompiler.classPathLookup(mixedCompiler.config)
|
||||
import mixedCompiler.config._
|
||||
import mixedCompiler.config.currentSetup.output
|
||||
val sourcesSet = sources.toSet
|
||||
val analysis = previousSetup match {
|
||||
case Some(previous) if previous.nameHashing != currentSetup.nameHashing =>
|
||||
// if the value of `nameHashing` flag has changed we have to throw away
|
||||
// previous Analysis completely and start with empty Analysis object
|
||||
// that supports the particular value of the `nameHashing` flag.
|
||||
// Otherwise we'll be getting UnsupportedOperationExceptions
|
||||
Analysis.empty(currentSetup.nameHashing)
|
||||
case Some(previous) if equiv.equiv(previous, currentSetup) => previousAnalysis
|
||||
case _ => Incremental.prune(sourcesSet, previousAnalysis)
|
||||
}
|
||||
// Run the incremental compiler using the mixed compiler we've defined.
|
||||
IncrementalCompile(sourcesSet, entry, mixedCompiler.compile, analysis, getAnalysis, output, log, incOptions).swap
|
||||
}
|
||||
}
|
||||
|
|
@ -1,205 +0,0 @@
|
|||
package sbt.compiler
|
||||
|
||||
import java.io.File
|
||||
import java.lang.ref.{ SoftReference, Reference }
|
||||
|
||||
import sbt.classfile.Analyze
|
||||
import sbt.classpath.ClasspathUtilities
|
||||
import sbt.compiler.javac.AnalyzingJavaCompiler
|
||||
import sbt.inc.Locate.DefinesClass
|
||||
import sbt._
|
||||
import sbt.inc._
|
||||
import sbt.inc.Locate
|
||||
import xsbti.{ AnalysisCallback, Reporter }
|
||||
import xsbti.api.Source
|
||||
import xsbti.compile.CompileOrder._
|
||||
import xsbti.compile._
|
||||
|
||||
/** An instance of an analyzing compiler that can run both javac + scalac. */
|
||||
final class MixedAnalyzingCompiler(
|
||||
val scalac: AnalyzingCompiler,
|
||||
val javac: AnalyzingJavaCompiler,
|
||||
val config: CompileConfiguration,
|
||||
val log: Logger) {
|
||||
import config._
|
||||
import currentSetup._
|
||||
|
||||
private[this] val absClasspath = classpath.map(_.getAbsoluteFile)
|
||||
/** Mechanism to work with compiler arguments. */
|
||||
private[this] val cArgs = new CompilerArguments(compiler.scalaInstance, compiler.cp)
|
||||
|
||||
/**
|
||||
* Compiles the given Java/Scala files.
|
||||
*
|
||||
* @param include The files to compile right now
|
||||
* @param changes A list of dependency changes.
|
||||
* @param callback The callback where we report dependency issues.
|
||||
*/
|
||||
def compile(include: Set[File], changes: DependencyChanges, callback: AnalysisCallback): Unit = {
|
||||
val outputDirs = outputDirectories(output)
|
||||
outputDirs foreach (IO.createDirectory)
|
||||
val incSrc = sources.filter(include)
|
||||
val (javaSrcs, scalaSrcs) = incSrc partition javaOnly
|
||||
logInputs(log, javaSrcs.size, scalaSrcs.size, outputDirs)
|
||||
/** compiles the scala code necessary using the analyzing compiler. */
|
||||
def compileScala(): Unit =
|
||||
if (scalaSrcs.nonEmpty) {
|
||||
val sources = if (order == Mixed) incSrc else scalaSrcs
|
||||
val arguments = cArgs(Nil, absClasspath, None, options.options)
|
||||
timed("Scala compilation", log) {
|
||||
compiler.compile(sources, changes, arguments, output, callback, reporter, config.cache, log, progress)
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Compiles the Java code necessary. All analysis code is included in this method.
|
||||
*/
|
||||
def compileJava(): Unit =
|
||||
if (javaSrcs.nonEmpty) {
|
||||
// Runs the analysis portion of Javac.
|
||||
timed("Java compile + analysis", log) {
|
||||
javac.compile(javaSrcs, options.javacOptions.toArray[String], output, callback, reporter, log, progress)
|
||||
}
|
||||
}
|
||||
// TODO - Maybe on "Mixed" we should try to compile both Scala + Java.
|
||||
if (order == JavaThenScala) { compileJava(); compileScala() } else { compileScala(); compileJava() }
|
||||
}
|
||||
|
||||
private[this] def outputDirectories(output: Output): Seq[File] = output match {
|
||||
case single: SingleOutput => List(single.outputDirectory)
|
||||
case mult: MultipleOutput => mult.outputGroups map (_.outputDirectory)
|
||||
}
|
||||
/** Debugging method to time how long it takes to run various compilation tasks. */
|
||||
private[this] def timed[T](label: String, log: Logger)(t: => T): T = {
|
||||
val start = System.nanoTime
|
||||
val result = t
|
||||
val elapsed = System.nanoTime - start
|
||||
log.debug(label + " took " + (elapsed / 1e9) + " s")
|
||||
result
|
||||
}
|
||||
|
||||
private[this] def logInputs(log: Logger, javaCount: Int, scalaCount: Int, outputDirs: Seq[File]): Unit = {
|
||||
val scalaMsg = Analysis.counted("Scala source", "", "s", scalaCount)
|
||||
val javaMsg = Analysis.counted("Java source", "", "s", javaCount)
|
||||
val combined = scalaMsg ++ javaMsg
|
||||
if (combined.nonEmpty)
|
||||
log.info(combined.mkString("Compiling ", " and ", " to " + outputDirs.map(_.getAbsolutePath).mkString(",") + "..."))
|
||||
}
|
||||
|
||||
/** Returns true if the file is java. */
|
||||
private[this] def javaOnly(f: File) = f.getName.endsWith(".java")
|
||||
}
|
||||
|
||||
/**
|
||||
* This is a compiler that mixes the `sbt.compiler.AnalyzingCompiler` for Scala incremental compilation
|
||||
* with a `xsbti.JavaCompiler`, allowing cross-compilation of mixed Java/Scala projects with analysis output.
|
||||
*
|
||||
*
|
||||
* NOTE: this class *defines* how to run one step of cross-Java-Scala compilation and then delegates
|
||||
* down to the incremental compiler for the rest.
|
||||
*/
|
||||
object MixedAnalyzingCompiler {
|
||||
|
||||
def makeConfig(scalac: AnalyzingCompiler,
|
||||
javac: xsbti.compile.JavaCompiler,
|
||||
sources: Seq[File],
|
||||
classpath: Seq[File],
|
||||
output: Output,
|
||||
cache: GlobalsCache,
|
||||
progress: Option[CompileProgress] = None,
|
||||
options: Seq[String] = Nil,
|
||||
javacOptions: Seq[String] = Nil,
|
||||
previousAnalysis: Analysis,
|
||||
previousSetup: Option[CompileSetup],
|
||||
analysisMap: File => Option[Analysis] = { _ => None },
|
||||
definesClass: DefinesClass = Locate.definesClass _,
|
||||
reporter: Reporter,
|
||||
compileOrder: CompileOrder = Mixed,
|
||||
skip: Boolean = false,
|
||||
incrementalCompilerOptions: IncOptions): CompileConfiguration =
|
||||
{
|
||||
val compileSetup = new CompileSetup(output, new CompileOptions(options, javacOptions),
|
||||
scalac.scalaInstance.actualVersion, compileOrder, incrementalCompilerOptions.nameHashing)
|
||||
config(
|
||||
sources,
|
||||
classpath,
|
||||
compileSetup,
|
||||
progress,
|
||||
previousAnalysis,
|
||||
previousSetup,
|
||||
analysisMap,
|
||||
definesClass,
|
||||
scalac,
|
||||
javac,
|
||||
reporter,
|
||||
skip,
|
||||
cache,
|
||||
incrementalCompilerOptions)
|
||||
}
|
||||
|
||||
def config(
|
||||
sources: Seq[File],
|
||||
classpath: Seq[File],
|
||||
setup: CompileSetup,
|
||||
progress: Option[CompileProgress],
|
||||
previousAnalysis: Analysis,
|
||||
previousSetup: Option[CompileSetup],
|
||||
analysis: File => Option[Analysis],
|
||||
definesClass: DefinesClass,
|
||||
compiler: AnalyzingCompiler,
|
||||
javac: xsbti.compile.JavaCompiler,
|
||||
reporter: Reporter,
|
||||
skip: Boolean,
|
||||
cache: GlobalsCache,
|
||||
incrementalCompilerOptions: IncOptions): CompileConfiguration = {
|
||||
import CompileSetup._
|
||||
new CompileConfiguration(sources, classpath, previousAnalysis, previousSetup, setup,
|
||||
progress, analysis, definesClass, reporter, compiler, javac, cache, incrementalCompilerOptions)
|
||||
}
|
||||
|
||||
/** Returns the search classpath (for dependencies) and a function which can also do so. */
|
||||
def searchClasspathAndLookup(config: CompileConfiguration): (Seq[File], String => Option[File]) = {
|
||||
import config._
|
||||
import currentSetup._
|
||||
val absClasspath = classpath.map(_.getAbsoluteFile)
|
||||
val apiOption = (api: Either[Boolean, Source]) => api.right.toOption
|
||||
val cArgs = new CompilerArguments(compiler.scalaInstance, compiler.cp)
|
||||
val searchClasspath = explicitBootClasspath(options.options) ++ withBootclasspath(cArgs, absClasspath)
|
||||
(searchClasspath, Locate.entry(searchClasspath, definesClass))
|
||||
}
|
||||
|
||||
/** Returns a "lookup file for a given class name" function. */
|
||||
def classPathLookup(config: CompileConfiguration): String => Option[File] =
|
||||
searchClasspathAndLookup(config)._2
|
||||
|
||||
def apply(config: CompileConfiguration)(implicit log: Logger): MixedAnalyzingCompiler = {
|
||||
import config._
|
||||
val (searchClasspath, entry) = searchClasspathAndLookup(config)
|
||||
// Construct a compiler which can handle both java and scala sources.
|
||||
new MixedAnalyzingCompiler(
|
||||
compiler,
|
||||
// TODO - Construction of analyzing Java compiler MAYBE should be earlier...
|
||||
new AnalyzingJavaCompiler(javac, classpath, compiler.scalaInstance, entry, searchClasspath),
|
||||
config,
|
||||
log
|
||||
)
|
||||
}
|
||||
|
||||
def withBootclasspath(args: CompilerArguments, classpath: Seq[File]): Seq[File] =
|
||||
args.bootClasspathFor(classpath) ++ args.extClasspath ++ args.finishClasspath(classpath)
|
||||
private[this] def explicitBootClasspath(options: Seq[String]): Seq[File] =
|
||||
options.dropWhile(_ != CompilerArguments.BootClasspathOption).slice(1, 2).headOption.toList.flatMap(IO.parseClasspath)
|
||||
|
||||
private[this] val cache = new collection.mutable.HashMap[File, Reference[AnalysisStore]]
|
||||
private def staticCache(file: File, backing: => AnalysisStore): AnalysisStore =
|
||||
synchronized {
|
||||
cache get file flatMap { ref => Option(ref.get) } getOrElse {
|
||||
val b = backing
|
||||
cache.put(file, new SoftReference(b))
|
||||
b
|
||||
}
|
||||
}
|
||||
|
||||
/** Create a an analysis store cache at the desired location. */
|
||||
def staticCachedStore(cacheFile: File) = staticCache(cacheFile, AnalysisStore.sync(AnalysisStore.cached(FileBasedStore(cacheFile))))
|
||||
|
||||
}
|
||||
|
|
@ -1,98 +0,0 @@
|
|||
package sbt.compiler.javac
|
||||
|
||||
import java.io.File
|
||||
|
||||
import sbt._
|
||||
import sbt.classfile.Analyze
|
||||
import sbt.classpath.ClasspathUtilities
|
||||
import sbt.compiler.CompilerArguments
|
||||
import sbt.inc.Locate
|
||||
import xsbti.api.Source
|
||||
import xsbti.compile._
|
||||
import xsbti.{ AnalysisCallback, Reporter }
|
||||
|
||||
/**
|
||||
* This is a java compiler which will also report any discovered source dependencies/apis out via
|
||||
* an analysis callback.
|
||||
*
|
||||
* @param searchClasspath Differes from classpath in that we look up binary dependencies via this classpath.
|
||||
* @param classLookup A mechanism by which we can figure out if a JAR contains a classfile.
|
||||
*/
|
||||
final class AnalyzingJavaCompiler private[sbt] (
|
||||
val javac: xsbti.compile.JavaCompiler,
|
||||
val classpath: Seq[File],
|
||||
val scalaInstance: xsbti.compile.ScalaInstance,
|
||||
val classLookup: (String => Option[File]),
|
||||
val searchClasspath: Seq[File]) {
|
||||
/**
|
||||
* Compile some java code using the current configured compiler.
|
||||
*
|
||||
* @param sources The sources to compile
|
||||
* @param options The options for the Java compiler
|
||||
* @param output The output configuration for this compiler
|
||||
* @param callback A callback to report discovered source/binary dependencies on.
|
||||
* @param reporter A reporter where semantic compiler failures can be reported.
|
||||
* @param log A place where we can log debugging/error messages.
|
||||
* @param progressOpt An optional compilation progress reporter. Where we can report back what files we're currently compiling.
|
||||
*/
|
||||
def compile(sources: Seq[File], options: Seq[String], output: Output, callback: AnalysisCallback, reporter: Reporter, log: Logger, progressOpt: Option[CompileProgress]): Unit = {
|
||||
if (sources.nonEmpty) {
|
||||
val absClasspath = classpath.map(_.getAbsoluteFile)
|
||||
@annotation.tailrec def ancestor(f1: File, f2: File): Boolean =
|
||||
if (f2 eq null) false else if (f1 == f2) true else ancestor(f1, f2.getParentFile)
|
||||
// Here we outline "chunks" of compiles we need to run so that the .class files end up in the right
|
||||
// location for Java.
|
||||
val chunks: Map[Option[File], Seq[File]] = output match {
|
||||
case single: SingleOutput => Map(Some(single.outputDirectory) -> sources)
|
||||
case multi: MultipleOutput =>
|
||||
sources groupBy { src =>
|
||||
multi.outputGroups find { out => ancestor(out.sourceDirectory, src) } map (_.outputDirectory)
|
||||
}
|
||||
}
|
||||
// Report warnings about source files that have no output directory.
|
||||
chunks.get(None) foreach { srcs =>
|
||||
log.error("No output directory mapped for: " + srcs.map(_.getAbsolutePath).mkString(","))
|
||||
}
|
||||
// Here we try to memoize (cache) the known class files in the output directory.
|
||||
val memo = for ((Some(outputDirectory), srcs) <- chunks) yield {
|
||||
val classesFinder = PathFinder(outputDirectory) ** "*.class"
|
||||
(classesFinder, classesFinder.get, srcs)
|
||||
}
|
||||
// Here we construct a class-loader we'll use to load + analyze the
|
||||
val loader = ClasspathUtilities.toLoader(searchClasspath)
|
||||
// TODO - Perhaps we just record task 0/2 here
|
||||
timed("Java compilation", log) {
|
||||
try javac.compileWithReporter(sources.toArray, absClasspath.toArray, output, options.toArray, reporter, log)
|
||||
catch {
|
||||
// Handle older APIs
|
||||
case _: NoSuchMethodError =>
|
||||
javac.compile(sources.toArray, absClasspath.toArray, output, options.toArray, log)
|
||||
}
|
||||
}
|
||||
// TODO - Perhaps we just record task 1/2 here
|
||||
|
||||
/** Reads the API information directly from the Class[_] object. Used when Analyzing dependencies. */
|
||||
def readAPI(source: File, classes: Seq[Class[_]]): Set[String] = {
|
||||
val (api, inherits) = ClassToAPI.process(classes)
|
||||
callback.api(source, api)
|
||||
inherits.map(_.getName)
|
||||
}
|
||||
// Runs the analysis portion of Javac.
|
||||
timed("Java analysis", log) {
|
||||
for ((classesFinder, oldClasses, srcs) <- memo) {
|
||||
val newClasses = Set(classesFinder.get: _*) -- oldClasses
|
||||
Analyze(newClasses.toSeq, srcs, log)(callback, loader, readAPI)
|
||||
}
|
||||
}
|
||||
// TODO - Perhaps we just record task 2/2 here
|
||||
}
|
||||
}
|
||||
/** Debugging method to time how long it takes to run various compilation tasks. */
|
||||
private[this] def timed[T](label: String, log: Logger)(t: => T): T = {
|
||||
val start = System.nanoTime
|
||||
val result = t
|
||||
val elapsed = System.nanoTime - start
|
||||
log.debug(label + " took " + (elapsed / 1e9) + " s")
|
||||
result
|
||||
}
|
||||
}
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
Simple Build Tool: Compiler Interface Component
|
||||
Copyright 2008, 2009, 2010 Mark Harrah
|
||||
Licensed under BSD-style license (see LICENSE)
|
||||
|
||||
Portions based on code from the Scala compiler.
|
||||
Copyright 2002-2008 EPFL, Lausanne
|
||||
Licensed under BSD-style license (see licenses/LICENSE_Scala)
|
||||
|
|
@ -1,89 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009, 2010, 2011 Mark Harrah
|
||||
*/
|
||||
package xsbt
|
||||
|
||||
import java.io.File
|
||||
import java.util.{ Arrays, Comparator }
|
||||
import scala.tools.nsc.{ io, plugins, symtab, Global, Phase }
|
||||
import io.{ AbstractFile, PlainFile, ZipArchive }
|
||||
import plugins.{ Plugin, PluginComponent }
|
||||
import symtab.Flags
|
||||
import scala.collection.mutable.{ HashMap, HashSet, ListBuffer }
|
||||
import xsbti.api.{ ClassLike, DefinitionType, PathComponent, SimpleType }
|
||||
|
||||
object API {
|
||||
val name = "xsbt-api"
|
||||
}
|
||||
|
||||
final class API(val global: CallbackGlobal) extends Compat {
|
||||
import global._
|
||||
|
||||
@inline def debug(msg: => String) = if (settings.verbose.value) inform(msg)
|
||||
|
||||
def newPhase(prev: Phase) = new ApiPhase(prev)
|
||||
class ApiPhase(prev: Phase) extends Phase(prev) {
|
||||
override def description = "Extracts the public API from source files."
|
||||
def name = API.name
|
||||
def run: Unit =
|
||||
{
|
||||
val start = System.currentTimeMillis
|
||||
currentRun.units.foreach(processUnit)
|
||||
val stop = System.currentTimeMillis
|
||||
debug("API phase took : " + ((stop - start) / 1000.0) + " s")
|
||||
}
|
||||
def processUnit(unit: CompilationUnit) = if (!unit.isJava) processScalaUnit(unit)
|
||||
def processScalaUnit(unit: CompilationUnit): Unit = {
|
||||
val sourceFile = unit.source.file.file
|
||||
debug("Traversing " + sourceFile)
|
||||
val extractApi = new ExtractAPI[global.type](global, sourceFile)
|
||||
val traverser = new TopLevelHandler(extractApi)
|
||||
traverser.apply(unit.body)
|
||||
if (global.callback.nameHashing) {
|
||||
val extractUsedNames = new ExtractUsedNames[global.type](global)
|
||||
val names = extractUsedNames.extract(unit)
|
||||
debug("The " + sourceFile + " contains the following used names " + names)
|
||||
names foreach { (name: String) => callback.usedName(sourceFile, name) }
|
||||
}
|
||||
val packages = traverser.packages.toArray[String].map(p => new xsbti.api.Package(p))
|
||||
val source = new xsbti.api.SourceAPI(packages, traverser.definitions.toArray[xsbti.api.Definition])
|
||||
extractApi.forceStructures()
|
||||
callback.api(sourceFile, source)
|
||||
}
|
||||
}
|
||||
|
||||
private final class TopLevelHandler(extractApi: ExtractAPI[global.type]) extends TopLevelTraverser {
|
||||
val packages = new HashSet[String]
|
||||
val definitions = new ListBuffer[xsbti.api.Definition]
|
||||
def `class`(c: Symbol): Unit = {
|
||||
definitions += extractApi.classLike(c.owner, c)
|
||||
}
|
||||
/** Record packages declared in the source file*/
|
||||
def `package`(p: Symbol): Unit = {
|
||||
if ((p eq null) || p == NoSymbol || p.isRoot || p.isRootPackage || p.isEmptyPackageClass || p.isEmptyPackage)
|
||||
()
|
||||
else {
|
||||
packages += p.fullName
|
||||
`package`(p.enclosingPackage)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private abstract class TopLevelTraverser extends Traverser {
|
||||
def `class`(s: Symbol)
|
||||
def `package`(s: Symbol)
|
||||
override def traverse(tree: Tree): Unit = {
|
||||
tree match {
|
||||
case (_: ClassDef | _: ModuleDef) if isTopLevel(tree.symbol) => `class`(tree.symbol)
|
||||
case p: PackageDef =>
|
||||
`package`(p.symbol)
|
||||
super.traverse(tree)
|
||||
case _ =>
|
||||
}
|
||||
}
|
||||
def isTopLevel(sym: Symbol): Boolean =
|
||||
(sym ne null) && (sym != NoSymbol) && !sym.isImplClass && !sym.isNestedClass && sym.isStatic &&
|
||||
!sym.hasFlag(Flags.SYNTHETIC) && !sym.hasFlag(Flags.JAVA)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,45 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package xsbt
|
||||
|
||||
import scala.tools.nsc.{ io, plugins, symtab, Global, Phase }
|
||||
import io.{ AbstractFile, PlainFile, ZipArchive }
|
||||
import plugins.{ Plugin, PluginComponent }
|
||||
import scala.collection.mutable.{ HashMap, HashSet, Map, Set }
|
||||
|
||||
import java.io.File
|
||||
import java.util.zip.ZipFile
|
||||
import xsbti.AnalysisCallback
|
||||
|
||||
object Analyzer {
|
||||
def name = "xsbt-analyzer"
|
||||
}
|
||||
final class Analyzer(val global: CallbackGlobal) extends LocateClassFile {
|
||||
import global._
|
||||
|
||||
def newPhase(prev: Phase): Phase = new AnalyzerPhase(prev)
|
||||
private class AnalyzerPhase(prev: Phase) extends Phase(prev) {
|
||||
override def description = "Finds concrete instances of provided superclasses, and application entry points."
|
||||
def name = Analyzer.name
|
||||
def run {
|
||||
for (unit <- currentRun.units if !unit.isJava) {
|
||||
val sourceFile = unit.source.file.file
|
||||
// build list of generated classes
|
||||
for (iclass <- unit.icode) {
|
||||
val sym = iclass.symbol
|
||||
def addGenerated(separatorRequired: Boolean): Unit = {
|
||||
for (classFile <- outputDirs map (fileForClass(_, sym, separatorRequired)) find (_.exists))
|
||||
callback.generatedClass(sourceFile, classFile, className(sym, '.', separatorRequired))
|
||||
}
|
||||
if (sym.isModuleClass && !sym.isImplClass) {
|
||||
if (isTopLevelModule(sym) && sym.companionClass == NoSymbol)
|
||||
addGenerated(false)
|
||||
addGenerated(true)
|
||||
} else
|
||||
addGenerated(false)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Jason Zaugg
|
||||
*/
|
||||
package xsbt
|
||||
|
||||
import scala.tools.nsc.{ CompilerCommand, Settings }
|
||||
|
||||
object Command {
|
||||
/**
|
||||
* Construct a CompilerCommand using reflection, to be compatible with Scalac before and after
|
||||
* <a href="https://lampsvn.epfl.ch/trac/scala/changeset/21274">r21274</a>
|
||||
*/
|
||||
def apply(arguments: List[String], settings: Settings): CompilerCommand = {
|
||||
def constr(params: Class[_]*) = classOf[CompilerCommand].getConstructor(params: _*)
|
||||
try {
|
||||
constr(classOf[List[_]], classOf[Settings]).newInstance(arguments, settings)
|
||||
} catch {
|
||||
case e: NoSuchMethodException =>
|
||||
constr(classOf[List[_]], classOf[Settings], classOf[Function1[_, _]], classOf[Boolean]).newInstance(arguments, settings, (s: String) => throw new RuntimeException(s), false.asInstanceOf[AnyRef])
|
||||
}
|
||||
}
|
||||
|
||||
def getWarnFatal(settings: Settings): Boolean =
|
||||
settings.Xwarnfatal.value
|
||||
|
||||
def getNoWarn(settings: Settings): Boolean =
|
||||
settings.nowarn.value
|
||||
}
|
||||
|
|
@ -1,129 +0,0 @@
|
|||
package xsbt
|
||||
|
||||
import scala.tools.nsc.Global
|
||||
import scala.tools.nsc.symtab.Flags
|
||||
|
||||
/**
|
||||
* Collection of hacks that make it possible for the compiler interface
|
||||
* to stay source compatible with Scala compiler 2.9, 2.10 and 2.11.
|
||||
*
|
||||
* One common technique used in `Compat` class is use of implicit conversions to deal
|
||||
* with methods that got renamed or moved between different Scala compiler versions.
|
||||
*
|
||||
* Let's pick a specific example. In Scala 2.9 and 2.10 there was a method called `toplevelClass`
|
||||
* defined on `Symbol`. In 2.10 that method has been deprecated and `enclosingTopLevelClass`
|
||||
* method has been introduce as a replacement. In Scala 2.11 the old `toplevelClass` method has
|
||||
* been removed. How can we pick the right version based on availability of those two methods?
|
||||
*
|
||||
* We define an implicit conversion from Symbol to a class that contains both method definitions:
|
||||
*
|
||||
* implicit def symbolCompat(sym: Symbol): SymbolCompat = new SymbolCompat(sym)
|
||||
* class SymbolCompat(sym: Symbol) {
|
||||
* def enclosingTopLevelClass: Symbol = sym.toplevelClass
|
||||
* def toplevelClass: Symbol =
|
||||
* throw new RuntimeException("For source compatibility only: should not get here.")
|
||||
* }
|
||||
*
|
||||
* We assume that client code (code in compiler interface) should always call `enclosingTopLevelClass`
|
||||
* method. If we compile that code against 2.11 it will just directly link against method provided by
|
||||
* Symbol. However, if we compile against 2.9 or 2.10 `enclosingTopLevelClass` won't be found so the
|
||||
* implicit conversion defined above will kick in. That conversion will provide `enclosingTopLevelClass`
|
||||
* that simply forwards to the old `toplevelClass` method that is available in 2.9 and 2.10 so that
|
||||
* method will be called in the end. There's one twist: since `enclosingTopLevelClass` forwards to
|
||||
* `toplevelClass` which doesn't exist in 2.11! Therefore, we need to also define `toplevelClass`
|
||||
* that will be provided by an implicit conversion as well. However, we should never reach that method
|
||||
* at runtime if either `enclosingTopLevelClass` or `toplevelClass` is available on Symbol so this
|
||||
* is purely source compatibility stub.
|
||||
*
|
||||
* The technique described above is used in several places below.
|
||||
*
|
||||
*/
|
||||
abstract class Compat {
|
||||
val global: Global
|
||||
import global._
|
||||
val LocalChild = global.tpnme.LOCAL_CHILD
|
||||
val Nullary = global.NullaryMethodType
|
||||
val ScalaObjectClass = definitions.ScalaObjectClass
|
||||
|
||||
private[this] final class MiscCompat {
|
||||
// in 2.9, nme.LOCALCHILD was renamed to tpnme.LOCAL_CHILD
|
||||
def tpnme = nme
|
||||
def LOCAL_CHILD = nme.LOCALCHILD
|
||||
def LOCALCHILD = sourceCompatibilityOnly
|
||||
|
||||
// in 2.10, ScalaObject was removed
|
||||
def ScalaObjectClass = definitions.ObjectClass
|
||||
|
||||
def NullaryMethodType = NullaryMethodTpe
|
||||
|
||||
def MACRO = DummyValue
|
||||
|
||||
// in 2.10, sym.moduleSuffix exists, but genJVM.moduleSuffix(Symbol) does not
|
||||
def moduleSuffix(sym: Symbol): String = sourceCompatibilityOnly
|
||||
// in 2.11 genJVM does not exist
|
||||
def genJVM = this
|
||||
}
|
||||
// in 2.9, NullaryMethodType was added to Type
|
||||
object NullaryMethodTpe {
|
||||
def unapply(t: Type): Option[Type] = None
|
||||
}
|
||||
|
||||
protected implicit def symbolCompat(sym: Symbol): SymbolCompat = new SymbolCompat(sym)
|
||||
protected final class SymbolCompat(sym: Symbol) {
|
||||
// before 2.10, sym.moduleSuffix doesn't exist, but genJVM.moduleSuffix does
|
||||
def moduleSuffix = global.genJVM.moduleSuffix(sym)
|
||||
|
||||
def enclosingTopLevelClass: Symbol = sym.toplevelClass
|
||||
def toplevelClass: Symbol = sourceCompatibilityOnly
|
||||
}
|
||||
|
||||
val DummyValue = 0
|
||||
def hasMacro(s: Symbol): Boolean =
|
||||
{
|
||||
val MACRO = Flags.MACRO // will be DummyValue for versions before 2.10
|
||||
MACRO != DummyValue && s.hasFlag(MACRO)
|
||||
}
|
||||
def moduleSuffix(s: Symbol): String = s.moduleSuffix
|
||||
|
||||
private[this] def sourceCompatibilityOnly: Nothing = throw new RuntimeException("For source compatibility only: should not get here.")
|
||||
|
||||
private[this] final implicit def miscCompat(n: AnyRef): MiscCompat = new MiscCompat
|
||||
|
||||
object MacroExpansionOf {
|
||||
def unapply(tree: Tree): Option[Tree] = {
|
||||
|
||||
// MacroExpansionAttachment (MEA) compatibility for 2.8.x and 2.9.x
|
||||
object Compat {
|
||||
class MacroExpansionAttachment(val original: Tree)
|
||||
|
||||
// Trees have no attachments in 2.8.x and 2.9.x
|
||||
implicit def withAttachments(tree: Tree): WithAttachments = new WithAttachments(tree)
|
||||
class WithAttachments(val tree: Tree) {
|
||||
object EmptyAttachments {
|
||||
def all = Set.empty[Any]
|
||||
}
|
||||
val attachments = EmptyAttachments
|
||||
}
|
||||
}
|
||||
import Compat._
|
||||
|
||||
locally {
|
||||
// Wildcard imports are necessary since 2.8.x and 2.9.x don't have `MacroExpansionAttachment` at all
|
||||
import global._ // this is where MEA lives in 2.10.x
|
||||
|
||||
// `original` has been renamed to `expandee` in 2.11.x
|
||||
implicit def withExpandee(att: MacroExpansionAttachment): WithExpandee = new WithExpandee(att)
|
||||
class WithExpandee(att: MacroExpansionAttachment) {
|
||||
def expandee: Tree = att.original
|
||||
}
|
||||
|
||||
locally {
|
||||
import analyzer._ // this is where MEA lives in 2.11.x
|
||||
tree.attachments.all.collect {
|
||||
case att: MacroExpansionAttachment => att.expandee
|
||||
} headOption
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,254 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package xsbt
|
||||
|
||||
import xsbti.{ AnalysisCallback, Logger, Problem, Reporter, Severity }
|
||||
import xsbti.compile._
|
||||
import scala.tools.nsc.{ backend, io, reporters, symtab, util, Phase, Global, Settings, SubComponent }
|
||||
import scala.tools.nsc.interactive.RangePositions
|
||||
import backend.JavaPlatform
|
||||
import scala.tools.util.PathResolver
|
||||
import symtab.SymbolLoaders
|
||||
import util.{ ClassPath, DirectoryClassPath, MergedClassPath, JavaClassPath }
|
||||
import ClassPath.{ ClassPathContext, JavaContext }
|
||||
import io.AbstractFile
|
||||
import scala.annotation.tailrec
|
||||
import scala.collection.mutable
|
||||
import Log.debug
|
||||
import java.io.File
|
||||
|
||||
final class CompilerInterface {
|
||||
def newCompiler(options: Array[String], output: Output, initialLog: Logger, initialDelegate: Reporter, resident: Boolean): CachedCompiler =
|
||||
new CachedCompiler0(options, output, new WeakLog(initialLog, initialDelegate), resident)
|
||||
|
||||
def run(sources: Array[File], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, delegate: Reporter, progress: CompileProgress, cached: CachedCompiler): Unit =
|
||||
cached.run(sources, changes, callback, log, delegate, progress)
|
||||
}
|
||||
// for compatibility with Scala versions without Global.registerTopLevelSym (2.8.1 and earlier)
|
||||
sealed trait GlobalCompat { self: Global =>
|
||||
def registerTopLevelSym(sym: Symbol): Unit
|
||||
sealed trait RunCompat {
|
||||
def informUnitStarting(phase: Phase, unit: CompilationUnit): Unit = ()
|
||||
}
|
||||
}
|
||||
sealed abstract class CallbackGlobal(settings: Settings, reporter: reporters.Reporter, output: Output) extends Global(settings, reporter) with GlobalCompat {
|
||||
def callback: AnalysisCallback
|
||||
def findClass(name: String): Option[(AbstractFile, Boolean)]
|
||||
lazy val outputDirs: Iterable[File] = {
|
||||
output match {
|
||||
case single: SingleOutput => List(single.outputDirectory)
|
||||
case multi: MultipleOutput => multi.outputGroups.toStream map (_.outputDirectory)
|
||||
}
|
||||
}
|
||||
// Map source files to public inherited dependencies. These dependencies are tracked as the symbol for the dealiased base class.
|
||||
val inheritedDependencies = new mutable.HashMap[File, mutable.Set[Symbol]]
|
||||
def addInheritedDependencies(file: File, deps: Iterable[Symbol]): Unit = {
|
||||
inheritedDependencies.getOrElseUpdate(file, new mutable.HashSet) ++= deps
|
||||
}
|
||||
}
|
||||
class InterfaceCompileFailed(val arguments: Array[String], val problems: Array[Problem], override val toString: String) extends xsbti.CompileFailed
|
||||
|
||||
class InterfaceCompileCancelled(val arguments: Array[String], override val toString: String) extends xsbti.CompileCancelled
|
||||
|
||||
private final class WeakLog(private[this] var log: Logger, private[this] var delegate: Reporter) {
|
||||
def apply(message: String): Unit = {
|
||||
assert(log ne null, "Stale reference to logger")
|
||||
log.error(Message(message))
|
||||
}
|
||||
def logger: Logger = log
|
||||
def reporter: Reporter = delegate
|
||||
def clear(): Unit = {
|
||||
log = null
|
||||
delegate = null
|
||||
}
|
||||
}
|
||||
|
||||
private final class CachedCompiler0(args: Array[String], output: Output, initialLog: WeakLog, resident: Boolean) extends CachedCompiler {
|
||||
val settings = new Settings(s => initialLog(s))
|
||||
output match {
|
||||
case multi: MultipleOutput =>
|
||||
for (out <- multi.outputGroups)
|
||||
settings.outputDirs.add(out.sourceDirectory.getAbsolutePath, out.outputDirectory.getAbsolutePath)
|
||||
case single: SingleOutput =>
|
||||
settings.outputDirs.setSingleOutput(single.outputDirectory.getAbsolutePath)
|
||||
}
|
||||
|
||||
val command = Command(args.toList, settings)
|
||||
private[this] val dreporter = DelegatingReporter(settings, initialLog.reporter)
|
||||
try {
|
||||
if (!noErrors(dreporter)) {
|
||||
dreporter.printSummary()
|
||||
handleErrors(dreporter, initialLog.logger)
|
||||
}
|
||||
} finally
|
||||
initialLog.clear()
|
||||
|
||||
def noErrors(dreporter: DelegatingReporter) = !dreporter.hasErrors && command.ok
|
||||
|
||||
def commandArguments(sources: Array[File]): Array[String] =
|
||||
(command.settings.recreateArgs ++ sources.map(_.getAbsolutePath)).toArray[String]
|
||||
|
||||
def run(sources: Array[File], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, delegate: Reporter, progress: CompileProgress): Unit = synchronized {
|
||||
debug(log, "Running cached compiler " + hashCode.toHexString + ", interfacing (CompilerInterface) with Scala compiler " + scala.tools.nsc.Properties.versionString)
|
||||
val dreporter = DelegatingReporter(settings, delegate)
|
||||
try { run(sources.toList, changes, callback, log, dreporter, progress) }
|
||||
finally { dreporter.dropDelegate() }
|
||||
}
|
||||
private[this] def run(sources: List[File], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, dreporter: DelegatingReporter, compileProgress: CompileProgress): Unit = {
|
||||
if (command.shouldStopWithInfo) {
|
||||
dreporter.info(null, command.getInfoMessage(compiler), true)
|
||||
throw new InterfaceCompileFailed(args, Array(), "Compiler option supplied that disabled actual compilation.")
|
||||
}
|
||||
if (noErrors(dreporter)) {
|
||||
debug(log, args.mkString("Calling Scala compiler with arguments (CompilerInterface):\n\t", "\n\t", ""))
|
||||
compiler.set(callback, dreporter)
|
||||
val run = new compiler.Run with compiler.RunCompat {
|
||||
override def informUnitStarting(phase: Phase, unit: compiler.CompilationUnit): Unit = {
|
||||
compileProgress.startUnit(phase.name, unit.source.path)
|
||||
}
|
||||
override def progress(current: Int, total: Int): Unit = {
|
||||
if (!compileProgress.advance(current, total))
|
||||
cancel
|
||||
}
|
||||
}
|
||||
val sortedSourceFiles = sources.map(_.getAbsolutePath).sortWith(_ < _)
|
||||
run compile sortedSourceFiles
|
||||
processUnreportedWarnings(run)
|
||||
dreporter.problems foreach { p => callback.problem(p.category, p.position, p.message, p.severity, true) }
|
||||
}
|
||||
dreporter.printSummary()
|
||||
if (!noErrors(dreporter)) handleErrors(dreporter, log)
|
||||
// the case where we cancelled compilation _after_ some compilation errors got reported
|
||||
// will be handled by line above so errors still will be reported properly just potentially not
|
||||
// all of them (because we cancelled the compilation)
|
||||
if (dreporter.cancelled) handleCompilationCancellation(dreporter, log)
|
||||
}
|
||||
def handleErrors(dreporter: DelegatingReporter, log: Logger): Nothing =
|
||||
{
|
||||
debug(log, "Compilation failed (CompilerInterface)")
|
||||
throw new InterfaceCompileFailed(args, dreporter.problems, "Compilation failed")
|
||||
}
|
||||
def handleCompilationCancellation(dreporter: DelegatingReporter, log: Logger): Nothing = {
|
||||
assert(dreporter.cancelled, "We should get here only if when compilation got cancelled")
|
||||
debug(log, "Compilation cancelled (CompilerInterface)")
|
||||
throw new InterfaceCompileCancelled(args, "Compilation has been cancelled")
|
||||
}
|
||||
def processUnreportedWarnings(run: compiler.Run): Unit = {
|
||||
// allConditionalWarnings and the ConditionalWarning class are only in 2.10+
|
||||
final class CondWarnCompat(val what: String, val warnings: mutable.ListBuffer[(compiler.Position, String)])
|
||||
implicit def compat(run: AnyRef): Compat = new Compat
|
||||
final class Compat { def allConditionalWarnings = List[CondWarnCompat]() }
|
||||
|
||||
val warnings = run.allConditionalWarnings
|
||||
if (warnings.nonEmpty)
|
||||
compiler.logUnreportedWarnings(warnings.map(cw => ("" /*cw.what*/ , cw.warnings.toList)))
|
||||
}
|
||||
|
||||
val compiler: Compiler = {
|
||||
if (command.settings.Yrangepos.value)
|
||||
new Compiler() with RangePositions // unnecessary in 2.11
|
||||
else
|
||||
new Compiler()
|
||||
}
|
||||
class Compiler extends CallbackGlobal(command.settings, dreporter, output) {
|
||||
object dummy // temporary fix for #4426
|
||||
object sbtAnalyzer extends {
|
||||
val global: Compiler.this.type = Compiler.this
|
||||
val phaseName = Analyzer.name
|
||||
val runsAfter = List("jvm")
|
||||
override val runsBefore = List("terminal")
|
||||
val runsRightAfter = None
|
||||
} with SubComponent {
|
||||
val analyzer = new Analyzer(global)
|
||||
def newPhase(prev: Phase) = analyzer.newPhase(prev)
|
||||
def name = phaseName
|
||||
}
|
||||
|
||||
/** Phase that extracts dependency information */
|
||||
object sbtDependency extends {
|
||||
val global: Compiler.this.type = Compiler.this
|
||||
val phaseName = Dependency.name
|
||||
val runsAfter = List(API.name)
|
||||
override val runsBefore = List("refchecks")
|
||||
// keep API and dependency close to each other
|
||||
// we might want to merge them in the future and even if don't
|
||||
// do that then it makes sense to run those phases next to each other
|
||||
val runsRightAfter = Some(API.name)
|
||||
} with SubComponent {
|
||||
val dependency = new Dependency(global)
|
||||
def newPhase(prev: Phase) = dependency.newPhase(prev)
|
||||
def name = phaseName
|
||||
}
|
||||
|
||||
/**
|
||||
* This phase walks trees and constructs a representation of the public API, which is used for incremental recompilation.
|
||||
*
|
||||
* We extract the api after picklers, since that way we see the same symbol information/structure
|
||||
* irrespective of whether we were typechecking from source / unpickling previously compiled classes.
|
||||
*/
|
||||
object apiExtractor extends {
|
||||
val global: Compiler.this.type = Compiler.this
|
||||
val phaseName = API.name
|
||||
val runsAfter = List("typer")
|
||||
override val runsBefore = List("erasure")
|
||||
// allow apiExtractor's phase to be overridden using the sbt.api.phase property
|
||||
// (in case someone would like the old timing, which was right after typer)
|
||||
// TODO: consider migrating to simply specifying "pickler" for `runsAfter` and "uncurry" for `runsBefore`
|
||||
val runsRightAfter = Option(System.getProperty("sbt.api.phase")) orElse Some("pickler")
|
||||
} with SubComponent {
|
||||
val api = new API(global)
|
||||
def newPhase(prev: Phase) = api.newPhase(prev)
|
||||
def name = phaseName
|
||||
}
|
||||
|
||||
override lazy val phaseDescriptors =
|
||||
{
|
||||
phasesSet += sbtAnalyzer
|
||||
phasesSet += sbtDependency
|
||||
phasesSet += apiExtractor
|
||||
superComputePhaseDescriptors
|
||||
}
|
||||
// Required because computePhaseDescriptors is private in 2.8 (changed to protected sometime later).
|
||||
private[this] def superComputePhaseDescriptors() = superCall("computePhaseDescriptors").asInstanceOf[List[SubComponent]]
|
||||
private[this] def superDropRun(): Unit =
|
||||
try { superCall("dropRun") } catch { case e: NoSuchMethodException => () } // dropRun not in 2.8.1
|
||||
private[this] def superCall(methodName: String): AnyRef =
|
||||
{
|
||||
val meth = classOf[Global].getDeclaredMethod(methodName)
|
||||
meth.setAccessible(true)
|
||||
meth.invoke(this)
|
||||
}
|
||||
def logUnreportedWarnings(seq: Seq[(String, List[(Position, String)])]): Unit = // Scala 2.10.x and later
|
||||
{
|
||||
val drep = reporter.asInstanceOf[DelegatingReporter]
|
||||
for ((what, warnings) <- seq; (pos, msg) <- warnings) yield callback.problem(what, drep.convert(pos), msg, Severity.Warn, false)
|
||||
}
|
||||
|
||||
def set(callback: AnalysisCallback, dreporter: DelegatingReporter): Unit = {
|
||||
this.callback0 = callback
|
||||
reporter = dreporter
|
||||
}
|
||||
def clear(): Unit = {
|
||||
callback0 = null
|
||||
superDropRun()
|
||||
reporter = null
|
||||
}
|
||||
|
||||
def findClass(name: String): Option[(AbstractFile, Boolean)] =
|
||||
getOutputClass(name).map(f => (f, true)) orElse findOnClassPath(name).map(f => (f, false))
|
||||
|
||||
def getOutputClass(name: String): Option[AbstractFile] =
|
||||
{
|
||||
// This could be improved if a hint where to look is given.
|
||||
val className = name.replace('.', '/') + ".class"
|
||||
outputDirs map (new File(_, className)) find (_.exists) map (AbstractFile.getFile(_))
|
||||
}
|
||||
|
||||
def findOnClassPath(name: String): Option[AbstractFile] =
|
||||
classPath.findClass(name).flatMap(_.binary.asInstanceOf[Option[AbstractFile]])
|
||||
|
||||
private[this] var callback0: AnalysisCallback = null
|
||||
def callback: AnalysisCallback = callback0
|
||||
}
|
||||
}
|
||||
|
|
@ -1,97 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package xsbt
|
||||
|
||||
import xsbti.Logger
|
||||
import scala.tools.nsc.{ GenericRunnerCommand, Interpreter, InterpreterLoop, ObjectRunner, Settings }
|
||||
import scala.tools.nsc.interpreter.InteractiveReader
|
||||
import scala.tools.nsc.reporters.Reporter
|
||||
import scala.tools.nsc.util.ClassPath
|
||||
|
||||
class ConsoleInterface {
|
||||
def commandArguments(args: Array[String], bootClasspathString: String, classpathString: String, log: Logger): Array[String] =
|
||||
MakeSettings.sync(args, bootClasspathString, classpathString, log).recreateArgs.toArray[String]
|
||||
|
||||
def run(args: Array[String], bootClasspathString: String, classpathString: String, initialCommands: String, cleanupCommands: String, loader: ClassLoader, bindNames: Array[String], bindValues: Array[Any], log: Logger): Unit = {
|
||||
lazy val interpreterSettings = MakeSettings.sync(args.toList, log)
|
||||
val compilerSettings = MakeSettings.sync(args, bootClasspathString, classpathString, log)
|
||||
|
||||
if (!bootClasspathString.isEmpty)
|
||||
compilerSettings.bootclasspath.value = bootClasspathString
|
||||
compilerSettings.classpath.value = classpathString
|
||||
log.info(Message("Starting scala interpreter..."))
|
||||
log.info(Message(""))
|
||||
val loop = new InterpreterLoop {
|
||||
|
||||
override def createInterpreter() = {
|
||||
|
||||
if (loader ne null) {
|
||||
in = InteractiveReader.createDefault()
|
||||
interpreter = new Interpreter(settings) {
|
||||
override protected def parentClassLoader = if (loader eq null) super.parentClassLoader else loader
|
||||
override protected def newCompiler(settings: Settings, reporter: Reporter) = super.newCompiler(compilerSettings, reporter)
|
||||
}
|
||||
interpreter.setContextClassLoader()
|
||||
} else
|
||||
super.createInterpreter()
|
||||
|
||||
def bind(values: Seq[(String, Any)]): Unit = {
|
||||
// for 2.8 compatibility
|
||||
final class Compat {
|
||||
def bindValue(id: String, value: Any) =
|
||||
interpreter.bind(id, value.asInstanceOf[AnyRef].getClass.getName, value)
|
||||
}
|
||||
implicit def compat(a: AnyRef): Compat = new Compat
|
||||
|
||||
for ((id, value) <- values)
|
||||
interpreter.beQuietDuring(interpreter.bindValue(id, value))
|
||||
}
|
||||
|
||||
bind(bindNames zip bindValues)
|
||||
|
||||
if (!initialCommands.isEmpty)
|
||||
interpreter.interpret(initialCommands)
|
||||
}
|
||||
override def closeInterpreter(): Unit = {
|
||||
if (!cleanupCommands.isEmpty)
|
||||
interpreter.interpret(cleanupCommands)
|
||||
super.closeInterpreter()
|
||||
}
|
||||
}
|
||||
loop.main(if (loader eq null) compilerSettings else interpreterSettings)
|
||||
}
|
||||
}
|
||||
object MakeSettings {
|
||||
def apply(args: List[String], log: Logger) =
|
||||
{
|
||||
val command = new GenericRunnerCommand(args, message => log.error(Message(message)))
|
||||
if (command.ok)
|
||||
command.settings
|
||||
else
|
||||
throw new InterfaceCompileFailed(Array(), Array(), command.usageMsg)
|
||||
}
|
||||
|
||||
def sync(args: Array[String], bootClasspathString: String, classpathString: String, log: Logger): Settings =
|
||||
{
|
||||
val compilerSettings = sync(args.toList, log)
|
||||
if (!bootClasspathString.isEmpty)
|
||||
compilerSettings.bootclasspath.value = bootClasspathString
|
||||
compilerSettings.classpath.value = classpathString
|
||||
compilerSettings
|
||||
}
|
||||
|
||||
def sync(options: List[String], log: Logger) =
|
||||
{
|
||||
val settings = apply(options, log)
|
||||
|
||||
// -Yrepl-sync is only in 2.9.1+
|
||||
final class Compat {
|
||||
def Yreplsync = settings.BooleanSetting("-Yrepl-sync", "For compatibility only.")
|
||||
}
|
||||
implicit def compat(s: Settings): Compat = new Compat
|
||||
|
||||
settings.Yreplsync.value = true
|
||||
settings
|
||||
}
|
||||
}
|
||||
|
|
@ -1,102 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package xsbt
|
||||
|
||||
import xsbti.{ F0, Logger, Maybe }
|
||||
import java.io.File
|
||||
|
||||
private object DelegatingReporter {
|
||||
def apply(settings: scala.tools.nsc.Settings, delegate: xsbti.Reporter): DelegatingReporter =
|
||||
new DelegatingReporter(Command.getWarnFatal(settings), Command.getNoWarn(settings), delegate)
|
||||
}
|
||||
|
||||
// The following code is based on scala.tools.nsc.reporters.{AbstractReporter, ConsoleReporter}
|
||||
// Copyright 2002-2009 LAMP/EPFL
|
||||
// Original author: Martin Odersky
|
||||
private final class DelegatingReporter(warnFatal: Boolean, noWarn: Boolean, private[this] var delegate: xsbti.Reporter) extends scala.tools.nsc.reporters.Reporter {
|
||||
import scala.tools.nsc.util.{ FakePos, NoPosition, Position }
|
||||
|
||||
def dropDelegate(): Unit = { delegate = null }
|
||||
def error(msg: String): Unit = error(FakePos("scalac"), msg)
|
||||
|
||||
def printSummary(): Unit = delegate.printSummary()
|
||||
|
||||
override def hasErrors = delegate.hasErrors
|
||||
override def hasWarnings = delegate.hasWarnings
|
||||
def problems = delegate.problems
|
||||
override def comment(pos: Position, msg: String): Unit = delegate.comment(convert(pos), msg)
|
||||
|
||||
override def reset(): Unit = {
|
||||
super.reset
|
||||
delegate.reset()
|
||||
}
|
||||
protected def info0(pos: Position, msg: String, rawSeverity: Severity, force: Boolean): Unit = {
|
||||
val skip = rawSeverity == WARNING && noWarn
|
||||
if (!skip) {
|
||||
val severity = if (warnFatal && rawSeverity == WARNING) ERROR else rawSeverity
|
||||
delegate.log(convert(pos), msg, convert(severity))
|
||||
}
|
||||
}
|
||||
def convert(posIn: Position): xsbti.Position =
|
||||
{
|
||||
val pos =
|
||||
posIn match {
|
||||
case null | NoPosition => NoPosition
|
||||
case x: FakePos => x
|
||||
case x =>
|
||||
posIn.inUltimateSource(posIn.source)
|
||||
}
|
||||
pos match {
|
||||
case NoPosition | FakePos(_) => position(None, None, None, "", None, None, None)
|
||||
case _ => makePosition(pos)
|
||||
}
|
||||
}
|
||||
private[this] def makePosition(pos: Position): xsbti.Position =
|
||||
{
|
||||
val src = pos.source
|
||||
val sourcePath = src.file.path
|
||||
val sourceFile = src.file.file
|
||||
val line = pos.line
|
||||
val lineContent = pos.lineContent.stripLineEnd
|
||||
val offset = getOffset(pos)
|
||||
val pointer = offset - src.lineToOffset(src.offsetToLine(offset))
|
||||
val pointerSpace = ((lineContent: Seq[Char]).take(pointer).map { case '\t' => '\t'; case x => ' ' }).mkString
|
||||
position(Some(sourcePath), Some(sourceFile), Some(line), lineContent, Some(offset), Some(pointer), Some(pointerSpace))
|
||||
}
|
||||
private[this] def getOffset(pos: Position): Int =
|
||||
{
|
||||
// for compatibility with 2.8
|
||||
implicit def withPoint(p: Position): WithPoint = new WithPoint(pos)
|
||||
final class WithPoint(val p: Position) { def point = p.offset.get }
|
||||
pos.point
|
||||
}
|
||||
private[this] def position(sourcePath0: Option[String], sourceFile0: Option[File], line0: Option[Int], lineContent0: String, offset0: Option[Int], pointer0: Option[Int], pointerSpace0: Option[String]) =
|
||||
new xsbti.Position {
|
||||
val line = o2mi(line0)
|
||||
val lineContent = lineContent0
|
||||
val offset = o2mi(offset0)
|
||||
val sourcePath = o2m(sourcePath0)
|
||||
val sourceFile = o2m(sourceFile0)
|
||||
val pointer = o2mi(pointer0)
|
||||
val pointerSpace = o2m(pointerSpace0)
|
||||
override def toString =
|
||||
(sourcePath0, line0) match {
|
||||
case (Some(s), Some(l)) => s + ":" + l
|
||||
case (Some(s), _) => s + ":"
|
||||
case _ => ""
|
||||
}
|
||||
}
|
||||
|
||||
import xsbti.Severity.{ Info, Warn, Error }
|
||||
private[this] def convert(sev: Severity): xsbti.Severity =
|
||||
sev match {
|
||||
case INFO => Info
|
||||
case WARNING => Warn
|
||||
case ERROR => Error
|
||||
}
|
||||
|
||||
import java.lang.{ Integer => I }
|
||||
private[this] def o2mi(opt: Option[Int]): Maybe[I] = opt match { case None => Maybe.nothing[I]; case Some(s) => Maybe.just[I](s) }
|
||||
private[this] def o2m[S](opt: Option[S]): Maybe[S] = opt match { case None => Maybe.nothing[S]; case Some(s) => Maybe.just(s) }
|
||||
}
|
||||
|
|
@ -1,202 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package xsbt
|
||||
|
||||
import scala.tools.nsc.{ io, symtab, Phase }
|
||||
import io.{ AbstractFile, PlainFile, ZipArchive }
|
||||
import symtab.Flags
|
||||
import xsbti.DependencyContext
|
||||
import xsbti.DependencyContext._
|
||||
|
||||
import java.io.File
|
||||
|
||||
object Dependency {
|
||||
def name = "xsbt-dependency"
|
||||
}
|
||||
/**
|
||||
* Extracts dependency information from each compilation unit.
|
||||
*
|
||||
* This phase uses CompilationUnit.depends and CallbackGlobal.inheritedDependencies
|
||||
* to collect all symbols that given compilation unit depends on. Those symbols are
|
||||
* guaranteed to represent Class-like structures.
|
||||
*
|
||||
* The CallbackGlobal.inheritedDependencies is populated by the API phase. See,
|
||||
* ExtractAPI class.
|
||||
*
|
||||
* When dependency symbol is processed, it is mapped back to either source file where
|
||||
* it's defined in (if it's available in current compilation run) or classpath entry
|
||||
* where it originates from. The Symbol->Classfile mapping is implemented by
|
||||
* LocateClassFile that we inherit from.
|
||||
*/
|
||||
final class Dependency(val global: CallbackGlobal) extends LocateClassFile {
|
||||
import global._
|
||||
|
||||
def newPhase(prev: Phase): Phase = new DependencyPhase(prev)
|
||||
private class DependencyPhase(prev: Phase) extends Phase(prev) {
|
||||
override def description = "Extracts dependency information"
|
||||
def name = Dependency.name
|
||||
def run {
|
||||
for (unit <- currentRun.units if !unit.isJava) {
|
||||
// build dependencies structure
|
||||
val sourceFile = unit.source.file.file
|
||||
if (global.callback.nameHashing) {
|
||||
val dependenciesByMemberRef = extractDependenciesByMemberRef(unit)
|
||||
for (on <- dependenciesByMemberRef)
|
||||
processDependency(on, context = DependencyByMemberRef)
|
||||
|
||||
val dependenciesByInheritance = extractDependenciesByInheritance(unit)
|
||||
for (on <- dependenciesByInheritance)
|
||||
processDependency(on, context = DependencyByInheritance)
|
||||
} else {
|
||||
for (on <- unit.depends) processDependency(on, context = DependencyByMemberRef)
|
||||
for (on <- inheritedDependencies.getOrElse(sourceFile, Nil: Iterable[Symbol])) processDependency(on, context = DependencyByInheritance)
|
||||
}
|
||||
/**
|
||||
* Handles dependency on given symbol by trying to figure out if represents a term
|
||||
* that is coming from either source code (not necessarily compiled in this compilation
|
||||
* run) or from class file and calls respective callback method.
|
||||
*/
|
||||
def processDependency(on: Symbol, context: DependencyContext): Unit = {
|
||||
def binaryDependency(file: File, className: String) = callback.binaryDependency(file, className, sourceFile, context)
|
||||
val onSource = on.sourceFile
|
||||
if (onSource == null) {
|
||||
classFile(on) match {
|
||||
case Some((f, className, inOutDir)) =>
|
||||
if (inOutDir && on.isJavaDefined) registerTopLevelSym(on)
|
||||
f match {
|
||||
case ze: ZipArchive#Entry => for (zip <- ze.underlyingSource; zipFile <- Option(zip.file)) binaryDependency(zipFile, className)
|
||||
case pf: PlainFile => binaryDependency(pf.file, className)
|
||||
case _ => ()
|
||||
}
|
||||
case None => ()
|
||||
}
|
||||
} else if (onSource.file != sourceFile)
|
||||
callback.sourceDependency(onSource.file, sourceFile, context)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Traverses given type and collects result of applying a partial function `pf`.
|
||||
*
|
||||
* NOTE: This class exists in Scala 2.10 as CollectTypeCollector but does not in earlier
|
||||
* versions (like 2.9) of Scala compiler that incremental cmpiler supports so we had to
|
||||
* reimplement that class here.
|
||||
*/
|
||||
private final class CollectTypeTraverser[T](pf: PartialFunction[Type, T]) extends TypeTraverser {
|
||||
var collected: List[T] = Nil
|
||||
def traverse(tpe: Type): Unit = {
|
||||
if (pf.isDefinedAt(tpe))
|
||||
collected = pf(tpe) :: collected
|
||||
mapOver(tpe)
|
||||
}
|
||||
}
|
||||
|
||||
private abstract class ExtractDependenciesTraverser extends Traverser {
|
||||
protected val depBuf = collection.mutable.ArrayBuffer.empty[Symbol]
|
||||
protected def addDependency(dep: Symbol): Unit = depBuf += dep
|
||||
def dependencies: collection.immutable.Set[Symbol] = {
|
||||
// convert to immutable set and remove NoSymbol if we have one
|
||||
depBuf.toSet - NoSymbol
|
||||
}
|
||||
}
|
||||
|
||||
private class ExtractDependenciesByMemberRefTraverser extends ExtractDependenciesTraverser {
|
||||
|
||||
/*
|
||||
* Some macros appear to contain themselves as original tree.
|
||||
* We must check that we don't inspect the same tree over and over.
|
||||
* See https://issues.scala-lang.org/browse/SI-8486
|
||||
* https://github.com/sbt/sbt/issues/1237
|
||||
* https://github.com/sbt/sbt/issues/1544
|
||||
*/
|
||||
private val inspectedOriginalTrees = collection.mutable.Set.empty[Tree]
|
||||
|
||||
override def traverse(tree: Tree): Unit = {
|
||||
tree match {
|
||||
case Import(expr, selectors) =>
|
||||
selectors.foreach {
|
||||
case ImportSelector(nme.WILDCARD, _, null, _) =>
|
||||
// in case of wildcard import we do not rely on any particular name being defined
|
||||
// on `expr`; all symbols that are being used will get caught through selections
|
||||
case ImportSelector(name: Name, _, _, _) =>
|
||||
def lookupImported(name: Name) = expr.symbol.info.member(name)
|
||||
// importing a name means importing both a term and a type (if they exist)
|
||||
addDependency(lookupImported(name.toTermName))
|
||||
addDependency(lookupImported(name.toTypeName))
|
||||
}
|
||||
case select: Select =>
|
||||
addDependency(select.symbol)
|
||||
/*
|
||||
* Idents are used in number of situations:
|
||||
* - to refer to local variable
|
||||
* - to refer to a top-level package (other packages are nested selections)
|
||||
* - to refer to a term defined in the same package as an enclosing class;
|
||||
* this looks fishy, see this thread:
|
||||
* https://groups.google.com/d/topic/scala-internals/Ms9WUAtokLo/discussion
|
||||
*/
|
||||
case ident: Ident =>
|
||||
addDependency(ident.symbol)
|
||||
// In some cases (eg. macro annotations), `typeTree.tpe` may be null.
|
||||
// See sbt/sbt#1593 and sbt/sbt#1655.
|
||||
case typeTree: TypeTree if typeTree.tpe != null =>
|
||||
val typeSymbolCollector = new CollectTypeTraverser({
|
||||
case tpe if !tpe.typeSymbol.isPackage => tpe.typeSymbol
|
||||
})
|
||||
typeSymbolCollector.traverse(typeTree.tpe)
|
||||
val deps = typeSymbolCollector.collected.toSet
|
||||
deps.foreach(addDependency)
|
||||
case Template(parents, self, body) =>
|
||||
traverseTrees(body)
|
||||
case MacroExpansionOf(original) if inspectedOriginalTrees.add(original) =>
|
||||
this.traverse(original)
|
||||
case other => ()
|
||||
}
|
||||
super.traverse(tree)
|
||||
}
|
||||
}
|
||||
|
||||
private def extractDependenciesByMemberRef(unit: CompilationUnit): collection.immutable.Set[Symbol] = {
|
||||
val traverser = new ExtractDependenciesByMemberRefTraverser
|
||||
traverser.traverse(unit.body)
|
||||
val dependencies = traverser.dependencies
|
||||
dependencies.map(enclosingTopLevelClass)
|
||||
}
|
||||
|
||||
/** Copied straight from Scala 2.10 as it does not exist in Scala 2.9 compiler */
|
||||
private final def debuglog(msg: => String): Unit = {
|
||||
if (settings.debug.value)
|
||||
log(msg)
|
||||
}
|
||||
|
||||
private final class ExtractDependenciesByInheritanceTraverser extends ExtractDependenciesTraverser {
|
||||
override def traverse(tree: Tree): Unit = tree match {
|
||||
case Template(parents, self, body) =>
|
||||
// we are using typeSymbol and not typeSymbolDirect because we want
|
||||
// type aliases to be expanded
|
||||
val parentTypeSymbols = parents.map(parent => parent.tpe.typeSymbol).toSet
|
||||
debuglog("Parent type symbols for " + tree.pos + ": " + parentTypeSymbols.map(_.fullName))
|
||||
parentTypeSymbols.foreach(addDependency)
|
||||
traverseTrees(body)
|
||||
case tree => super.traverse(tree)
|
||||
}
|
||||
}
|
||||
|
||||
private def extractDependenciesByInheritance(unit: CompilationUnit): collection.immutable.Set[Symbol] = {
|
||||
val traverser = new ExtractDependenciesByInheritanceTraverser
|
||||
traverser.traverse(unit.body)
|
||||
val dependencies = traverser.dependencies
|
||||
dependencies.map(enclosingTopLevelClass)
|
||||
}
|
||||
|
||||
/**
|
||||
* We capture enclosing classes only because that's what CompilationUnit.depends does and we don't want
|
||||
* to deviate from old behaviour too much for now.
|
||||
*/
|
||||
private def enclosingTopLevelClass(sym: Symbol): Symbol =
|
||||
// for Scala 2.8 and 2.9 this method is provided through SymbolCompat
|
||||
sym.enclosingTopLevelClass
|
||||
|
||||
}
|
||||
|
|
@ -1,533 +0,0 @@
|
|||
package xsbt
|
||||
|
||||
import java.io.File
|
||||
import java.util.{ Arrays, Comparator }
|
||||
import scala.tools.nsc.{ io, plugins, symtab, Global, Phase }
|
||||
import io.{ AbstractFile, PlainFile, ZipArchive }
|
||||
import plugins.{ Plugin, PluginComponent }
|
||||
import symtab.Flags
|
||||
import scala.collection.mutable.{ HashMap, HashSet, ListBuffer }
|
||||
import xsbti.api.{ ClassLike, DefinitionType, PathComponent, SimpleType }
|
||||
|
||||
/**
|
||||
* Extracts API representation out of Symbols and Types.
|
||||
*
|
||||
* Each compilation unit should be processed by a fresh instance of this class.
|
||||
*
|
||||
* This class depends on instance of CallbackGlobal instead of regular Global because
|
||||
* it has a call to `addInheritedDependencies` method defined in CallbackGlobal. In the future
|
||||
* we should refactor this code so inherited dependencies are just accumulated in a buffer and
|
||||
* exposed to a client that can pass them to an instance of CallbackGlobal it holds.
|
||||
*/
|
||||
class ExtractAPI[GlobalType <: CallbackGlobal](val global: GlobalType,
|
||||
// Tracks the source file associated with the CompilationUnit currently being processed by the API phase.
|
||||
// This is used when recording inheritance dependencies.
|
||||
sourceFile: File) extends Compat {
|
||||
|
||||
import global._
|
||||
|
||||
private def error(msg: String) = throw new RuntimeException(msg)
|
||||
|
||||
// this cache reduces duplicate work both here and when persisting
|
||||
// caches on other structures had minimal effect on time and cache size
|
||||
// (tried: Definition, Modifier, Path, Id, String)
|
||||
private[this] val typeCache = new HashMap[(Symbol, Type), xsbti.api.Type]
|
||||
// these caches are necessary for correctness
|
||||
private[this] val structureCache = new HashMap[Symbol, xsbti.api.Structure]
|
||||
private[this] val classLikeCache = new HashMap[(Symbol, Symbol), xsbti.api.ClassLike]
|
||||
private[this] val pending = new HashSet[xsbti.api.Lazy[_]]
|
||||
|
||||
private[this] val emptyStringArray = new Array[String](0)
|
||||
|
||||
/**
|
||||
* Implements a work-around for https://github.com/sbt/sbt/issues/823
|
||||
*
|
||||
* The strategy is to rename all type variables bound by existential type to stable
|
||||
* names by assigning to each type variable a De Bruijn-like index. As a result, each
|
||||
* type variable gets name of this shape:
|
||||
*
|
||||
* "existential_${nestingLevel}_${i}"
|
||||
*
|
||||
* where `nestingLevel` indicates nesting level of existential types and `i` variable
|
||||
* indicates position of type variable in given existential type.
|
||||
*
|
||||
* For example, let's assume we have the following classes declared:
|
||||
*
|
||||
* class A[T]; class B[T,U]
|
||||
*
|
||||
* and we have type A[_] that is expanded by Scala compiler into
|
||||
*
|
||||
* A[_$1] forSome { type _$1 }
|
||||
*
|
||||
* After applying our renaming strategy we get
|
||||
*
|
||||
* A[existential_0_0] forSome { type existential_0_0 }
|
||||
*
|
||||
* Let's consider a bit more complicated example which shows how our strategy deals with
|
||||
* nested existential types:
|
||||
*
|
||||
* A[_ <: B[_, _]]
|
||||
*
|
||||
* which gets expanded into:
|
||||
*
|
||||
* A[_$1] forSome {
|
||||
* type _$1 <: B[_$2, _$3] forSome { type _$2; type _$3 }
|
||||
* }
|
||||
*
|
||||
* After applying our renaming strategy we get
|
||||
*
|
||||
* A[existential_0_0] forSome {
|
||||
* type existential_0_0 <: B[existential_1_0, existential_1_1] forSome {
|
||||
* type existential_1_0; type existential_1_1
|
||||
* }
|
||||
* }
|
||||
*
|
||||
* Note how the first index (nesting level) is bumped for both existential types.
|
||||
*
|
||||
* This way, all names of existential type variables depend only on the structure of
|
||||
* existential types and are kept stable.
|
||||
*
|
||||
* Both examples presented above used placeholder syntax for existential types but our
|
||||
* strategy is applied uniformly to all existential types no matter if they are written
|
||||
* using placeholder syntax or explicitly.
|
||||
*/
|
||||
private[this] object existentialRenamings {
|
||||
private var nestingLevel: Int = 0
|
||||
import scala.collection.mutable.Map
|
||||
private var renameTo: Map[Symbol, String] = Map.empty
|
||||
|
||||
def leaveExistentialTypeVariables(typeVariables: Seq[Symbol]): Unit = {
|
||||
nestingLevel -= 1
|
||||
assert(nestingLevel >= 0)
|
||||
typeVariables.foreach(renameTo.remove)
|
||||
}
|
||||
def enterExistentialTypeVariables(typeVariables: Seq[Symbol]): Unit = {
|
||||
nestingLevel += 1
|
||||
typeVariables.zipWithIndex foreach {
|
||||
case (tv, i) =>
|
||||
val newName = "existential_" + nestingLevel + "_" + i
|
||||
renameTo(tv) = newName
|
||||
}
|
||||
}
|
||||
def renaming(symbol: Symbol): Option[String] = renameTo.get(symbol)
|
||||
}
|
||||
|
||||
// call back to the xsbti.SafeLazy class in main sbt code to construct a SafeLazy instance
|
||||
// we pass a thunk, whose class is loaded by the interface class loader (this class's loader)
|
||||
// SafeLazy ensures that once the value is forced, the thunk is nulled out and so
|
||||
// references to the thunk's classes are not retained. Specifically, it allows the interface classes
|
||||
// (those in this subproject) to be garbage collected after compilation.
|
||||
private[this] val safeLazy = Class.forName("xsbti.SafeLazy").getMethod("apply", classOf[xsbti.F0[_]])
|
||||
private def lzy[S <: AnyRef](s: => S): xsbti.api.Lazy[S] =
|
||||
{
|
||||
val z = safeLazy.invoke(null, Message(s)).asInstanceOf[xsbti.api.Lazy[S]]
|
||||
pending += z
|
||||
z
|
||||
}
|
||||
|
||||
/**
|
||||
* Force all lazy structures. This is necessary so that we see the symbols/types at this phase and
|
||||
* so that we don't hold on to compiler objects and classes
|
||||
*/
|
||||
def forceStructures(): Unit =
|
||||
if (pending.isEmpty)
|
||||
structureCache.clear()
|
||||
else {
|
||||
val toProcess = pending.toList
|
||||
pending.clear()
|
||||
toProcess foreach { _.get() }
|
||||
forceStructures()
|
||||
}
|
||||
|
||||
private def thisPath(sym: Symbol) = path(pathComponents(sym, Constants.thisPath :: Nil))
|
||||
private def path(components: List[PathComponent]) = new xsbti.api.Path(components.toArray[PathComponent])
|
||||
private def pathComponents(sym: Symbol, postfix: List[PathComponent]): List[PathComponent] =
|
||||
{
|
||||
if (sym == NoSymbol || sym.isRoot || sym.isEmptyPackageClass || sym.isRootPackage) postfix
|
||||
else pathComponents(sym.owner, new xsbti.api.Id(simpleName(sym)) :: postfix)
|
||||
}
|
||||
private def simpleType(in: Symbol, t: Type): SimpleType =
|
||||
processType(in, t) match {
|
||||
case s: SimpleType => s
|
||||
case x => log("Not a simple type:\n\tType: " + t + " (" + t.getClass + ")\n\tTransformed: " + x.getClass); Constants.emptyType
|
||||
}
|
||||
private def types(in: Symbol, t: List[Type]): Array[xsbti.api.Type] = t.toArray[Type].map(processType(in, _))
|
||||
private def projectionType(in: Symbol, pre: Type, sym: Symbol) =
|
||||
{
|
||||
if (pre == NoPrefix) {
|
||||
if (sym.isLocalClass || sym.isRoot || sym.isRootPackage) Constants.emptyType
|
||||
else if (sym.isTypeParameterOrSkolem || sym.isExistentiallyBound) reference(sym)
|
||||
else {
|
||||
// this appears to come from an existential type in an inherited member- not sure why isExistential is false here
|
||||
/*println("Warning: Unknown prefixless type: " + sym + " in " + sym.owner + " in " + sym.enclClass)
|
||||
println("\tFlags: " + sym.flags + ", istype: " + sym.isType + ", absT: " + sym.isAbstractType + ", alias: " + sym.isAliasType + ", nonclass: " + isNonClassType(sym))*/
|
||||
reference(sym)
|
||||
}
|
||||
} else if (sym.isRoot || sym.isRootPackage) Constants.emptyType
|
||||
else new xsbti.api.Projection(simpleType(in, pre), simpleName(sym))
|
||||
}
|
||||
private def reference(sym: Symbol): xsbti.api.ParameterRef = new xsbti.api.ParameterRef(tparamID(sym))
|
||||
|
||||
private def annotations(in: Symbol, as: List[AnnotationInfo]): Array[xsbti.api.Annotation] = as.toArray[AnnotationInfo].map(annotation(in, _))
|
||||
private def annotation(in: Symbol, a: AnnotationInfo) =
|
||||
new xsbti.api.Annotation(processType(in, a.atp),
|
||||
if (a.assocs.isEmpty) Array(new xsbti.api.AnnotationArgument("", a.args.mkString("(", ",", ")"))) // what else to do with a Tree?
|
||||
else a.assocs.map { case (name, value) => new xsbti.api.AnnotationArgument(name.toString, value.toString) }.toArray[xsbti.api.AnnotationArgument]
|
||||
)
|
||||
private def annotated(in: Symbol, as: List[AnnotationInfo], tpe: Type) = new xsbti.api.Annotated(processType(in, tpe), annotations(in, as))
|
||||
|
||||
private def viewer(s: Symbol) = (if (s.isModule) s.moduleClass else s).thisType
|
||||
private def printMember(label: String, in: Symbol, t: Type) = println(label + " in " + in + " : " + t + " (debug: " + debugString(t) + " )")
|
||||
private def defDef(in: Symbol, s: Symbol) =
|
||||
{
|
||||
def build(t: Type, typeParams: Array[xsbti.api.TypeParameter], valueParameters: List[xsbti.api.ParameterList]): xsbti.api.Def =
|
||||
{
|
||||
def parameterList(syms: List[Symbol]): xsbti.api.ParameterList =
|
||||
{
|
||||
val isImplicitList = syms match { case head :: _ => isImplicit(head); case _ => false }
|
||||
new xsbti.api.ParameterList(syms.map(parameterS).toArray, isImplicitList)
|
||||
}
|
||||
t match {
|
||||
case PolyType(typeParams0, base) =>
|
||||
assert(typeParams.isEmpty)
|
||||
assert(valueParameters.isEmpty)
|
||||
build(base, typeParameters(in, typeParams0), Nil)
|
||||
case MethodType(params, resultType) =>
|
||||
build(resultType, typeParams, parameterList(params) :: valueParameters)
|
||||
case Nullary(resultType) => // 2.9 and later
|
||||
build(resultType, typeParams, valueParameters)
|
||||
case returnType =>
|
||||
val t2 = processType(in, dropConst(returnType))
|
||||
new xsbti.api.Def(valueParameters.reverse.toArray, t2, typeParams, simpleName(s), getAccess(s), getModifiers(s), annotations(in, s))
|
||||
}
|
||||
}
|
||||
def parameterS(s: Symbol): xsbti.api.MethodParameter =
|
||||
makeParameter(simpleName(s), s.info, s.info.typeSymbol, s)
|
||||
|
||||
// paramSym is only for 2.8 and is to determine if the parameter has a default
|
||||
def makeParameter(name: String, tpe: Type, ts: Symbol, paramSym: Symbol): xsbti.api.MethodParameter =
|
||||
{
|
||||
import xsbti.api.ParameterModifier._
|
||||
val (t, special) =
|
||||
if (ts == definitions.RepeatedParamClass) // || s == definitions.JavaRepeatedParamClass)
|
||||
(tpe.typeArgs(0), Repeated)
|
||||
else if (ts == definitions.ByNameParamClass)
|
||||
(tpe.typeArgs(0), ByName)
|
||||
else
|
||||
(tpe, Plain)
|
||||
new xsbti.api.MethodParameter(name, processType(in, t), hasDefault(paramSym), special)
|
||||
}
|
||||
val t = viewer(in).memberInfo(s)
|
||||
build(t, Array(), Nil)
|
||||
}
|
||||
private def hasDefault(s: Symbol) = s != NoSymbol && s.hasFlag(Flags.DEFAULTPARAM)
|
||||
private def fieldDef[T](in: Symbol, s: Symbol, keepConst: Boolean, create: (xsbti.api.Type, String, xsbti.api.Access, xsbti.api.Modifiers, Array[xsbti.api.Annotation]) => T): T =
|
||||
{
|
||||
val t = dropNullary(viewer(in).memberType(s))
|
||||
val t2 = if (keepConst) t else dropConst(t)
|
||||
create(processType(in, t2), simpleName(s), getAccess(s), getModifiers(s), annotations(in, s))
|
||||
}
|
||||
private def dropConst(t: Type): Type = t match {
|
||||
case ConstantType(constant) => constant.tpe
|
||||
case _ => t
|
||||
}
|
||||
private def dropNullary(t: Type): Type = t match {
|
||||
case Nullary(un) => un
|
||||
case _ => t
|
||||
}
|
||||
|
||||
private def typeDef(in: Symbol, s: Symbol): xsbti.api.TypeMember =
|
||||
{
|
||||
val (typeParams, tpe) =
|
||||
viewer(in).memberInfo(s) match {
|
||||
case PolyType(typeParams0, base) => (typeParameters(in, typeParams0), base)
|
||||
case t => (Array[xsbti.api.TypeParameter](), t)
|
||||
}
|
||||
val name = simpleName(s)
|
||||
val access = getAccess(s)
|
||||
val modifiers = getModifiers(s)
|
||||
val as = annotations(in, s)
|
||||
|
||||
if (s.isAliasType)
|
||||
new xsbti.api.TypeAlias(processType(in, tpe), typeParams, name, access, modifiers, as)
|
||||
else if (s.isAbstractType) {
|
||||
val bounds = tpe.bounds
|
||||
new xsbti.api.TypeDeclaration(processType(in, bounds.lo), processType(in, bounds.hi), typeParams, name, access, modifiers, as)
|
||||
} else
|
||||
error("Unknown type member" + s)
|
||||
}
|
||||
|
||||
private def structure(in: Symbol, s: Symbol): xsbti.api.Structure = structure(viewer(in).memberInfo(s), s, true)
|
||||
private def structure(info: Type): xsbti.api.Structure = structure(info, info.typeSymbol, false)
|
||||
private def structure(info: Type, s: Symbol, inherit: Boolean): xsbti.api.Structure =
|
||||
structureCache.getOrElseUpdate(s, mkStructure(info, s, inherit))
|
||||
|
||||
private def removeConstructors(ds: List[Symbol]): List[Symbol] = ds filter { !_.isConstructor }
|
||||
|
||||
private def mkStructure(info: Type, s: Symbol, inherit: Boolean): xsbti.api.Structure =
|
||||
{
|
||||
val (declared, inherited) = info.members.toList.reverse.partition(_.owner == s)
|
||||
val baseTypes = info.baseClasses.tail.map(info.baseType)
|
||||
val ds = if (s.isModuleClass) removeConstructors(declared) else declared
|
||||
val is = if (inherit) removeConstructors(inherited) else Nil
|
||||
mkStructure(s, baseTypes, ds, is)
|
||||
}
|
||||
|
||||
// If true, this template is publicly visible and should be processed as a public inheritance dependency.
|
||||
// Local classes and local refinements will never be traversed by the api phase, so we don't need to check for that.
|
||||
private[this] def isPublicStructure(s: Symbol): Boolean =
|
||||
s.isStructuralRefinement ||
|
||||
// do not consider templates that are private[this] or private
|
||||
!(s.isPrivate && (s.privateWithin == NoSymbol || s.isLocal))
|
||||
|
||||
private def mkStructure(s: Symbol, bases: List[Type], declared: List[Symbol], inherited: List[Symbol]): xsbti.api.Structure = {
|
||||
if (isPublicStructure(s))
|
||||
addInheritedDependencies(sourceFile, bases.map(_.dealias.typeSymbol))
|
||||
new xsbti.api.Structure(lzy(types(s, bases)), lzy(processDefinitions(s, declared)), lzy(processDefinitions(s, inherited)))
|
||||
}
|
||||
private def processDefinitions(in: Symbol, defs: List[Symbol]): Array[xsbti.api.Definition] =
|
||||
sort(defs.toArray).flatMap((d: Symbol) => definition(in, d))
|
||||
private[this] def sort(defs: Array[Symbol]): Array[Symbol] = {
|
||||
Arrays.sort(defs, sortClasses)
|
||||
defs
|
||||
}
|
||||
|
||||
private def definition(in: Symbol, sym: Symbol): Option[xsbti.api.Definition] =
|
||||
{
|
||||
def mkVar = Some(fieldDef(in, sym, false, new xsbti.api.Var(_, _, _, _, _)))
|
||||
def mkVal = Some(fieldDef(in, sym, true, new xsbti.api.Val(_, _, _, _, _)))
|
||||
if (isClass(sym))
|
||||
if (ignoreClass(sym)) None else Some(classLike(in, sym))
|
||||
else if (sym.isNonClassType)
|
||||
Some(typeDef(in, sym))
|
||||
else if (sym.isVariable)
|
||||
if (isSourceField(sym)) mkVar else None
|
||||
else if (sym.isStable)
|
||||
if (isSourceField(sym)) mkVal else None
|
||||
else if (sym.isSourceMethod && !sym.isSetter)
|
||||
if (sym.isGetter) mkVar else Some(defDef(in, sym))
|
||||
else
|
||||
None
|
||||
}
|
||||
private def ignoreClass(sym: Symbol): Boolean =
|
||||
sym.isLocalClass || sym.isAnonymousClass || sym.fullName.endsWith(LocalChild.toString)
|
||||
|
||||
// This filters private[this] vals/vars that were not in the original source.
|
||||
// The getter will be used for processing instead.
|
||||
private def isSourceField(sym: Symbol): Boolean =
|
||||
{
|
||||
val getter = sym.getter(sym.enclClass)
|
||||
// the check `getter eq sym` is a precaution against infinite recursion
|
||||
// `isParamAccessor` does not exist in all supported versions of Scala, so the flag check is done directly
|
||||
(getter == NoSymbol && !sym.hasFlag(Flags.PARAMACCESSOR)) || (getter eq sym)
|
||||
}
|
||||
private def getModifiers(s: Symbol): xsbti.api.Modifiers =
|
||||
{
|
||||
import Flags._
|
||||
val absOver = s.hasFlag(ABSOVERRIDE)
|
||||
val abs = s.hasFlag(ABSTRACT) || s.hasFlag(DEFERRED) || absOver
|
||||
val over = s.hasFlag(OVERRIDE) || absOver
|
||||
new xsbti.api.Modifiers(abs, over, s.isFinal, s.hasFlag(SEALED), isImplicit(s), s.hasFlag(LAZY), hasMacro(s))
|
||||
}
|
||||
|
||||
private def isImplicit(s: Symbol) = s.hasFlag(Flags.IMPLICIT)
|
||||
private def getAccess(c: Symbol): xsbti.api.Access =
|
||||
{
|
||||
if (c.isPublic) Constants.public
|
||||
else if (c.isPrivateLocal) Constants.privateLocal
|
||||
else if (c.isProtectedLocal) Constants.protectedLocal
|
||||
else {
|
||||
val within = c.privateWithin
|
||||
val qualifier = if (within == NoSymbol) Constants.unqualified else new xsbti.api.IdQualifier(within.fullName)
|
||||
if (c.hasFlag(Flags.PROTECTED)) new xsbti.api.Protected(qualifier)
|
||||
else new xsbti.api.Private(qualifier)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace all types that directly refer to the `forbidden` symbol by `NoType`.
|
||||
* (a specialized version of substThisAndSym)
|
||||
*/
|
||||
class SuppressSymbolRef(forbidden: Symbol) extends TypeMap {
|
||||
def apply(tp: Type) =
|
||||
if (tp.typeSymbolDirect == forbidden) NoType
|
||||
else mapOver(tp)
|
||||
}
|
||||
|
||||
private def processType(in: Symbol, t: Type): xsbti.api.Type = typeCache.getOrElseUpdate((in, t), makeType(in, t))
|
||||
private def makeType(in: Symbol, t: Type): xsbti.api.Type =
|
||||
{
|
||||
|
||||
val dealiased = t match {
|
||||
case TypeRef(_, sym, _) if sym.isAliasType => t.dealias
|
||||
case _ => t
|
||||
}
|
||||
|
||||
dealiased match {
|
||||
case NoPrefix => Constants.emptyType
|
||||
case ThisType(sym) => new xsbti.api.Singleton(thisPath(sym))
|
||||
case SingleType(pre, sym) => projectionType(in, pre, sym)
|
||||
case ConstantType(constant) => new xsbti.api.Constant(processType(in, constant.tpe), constant.stringValue)
|
||||
|
||||
/* explaining the special-casing of references to refinement classes (https://support.typesafe.com/tickets/1882)
|
||||
*
|
||||
* goal: a representation of type references to refinement classes that's stable across compilation runs
|
||||
* (and thus insensitive to typing from source or unpickling from bytecode)
|
||||
*
|
||||
* problem: the current representation, which corresponds to the owner chain of the refinement:
|
||||
* 1. is affected by pickling, so typing from source or using unpickled symbols give different results (because the unpickler "localizes" owners -- this could be fixed in the compiler)
|
||||
* 2. can't distinguish multiple refinements in the same owner (this is a limitation of SBT's internal representation and cannot be fixed in the compiler)
|
||||
*
|
||||
* potential solutions:
|
||||
* - simply drop the reference: won't work as collapsing all refinement types will cause recompilation to be skipped when a refinement is changed to another refinement
|
||||
* - represent the symbol in the api: can't think of a stable way of referring to an anonymous symbol whose owner changes when pickled
|
||||
* + expand the reference to the corresponding refinement type: doing that recursively may not terminate, but we can deal with that by approximating recursive references
|
||||
* (all we care about is being sound for recompilation: recompile iff a dependency changes, and this will happen as long as we have one unrolling of the reference to the refinement)
|
||||
*/
|
||||
case TypeRef(pre, sym, Nil) if sym.isRefinementClass =>
|
||||
// Since we only care about detecting changes reliably, we unroll a reference to a refinement class once.
|
||||
// Recursive references are simply replaced by NoType -- changes to the type will be seen in the first unrolling.
|
||||
// The API need not be type correct, so this truncation is acceptable. Most of all, the API should be compact.
|
||||
val unrolling = pre.memberInfo(sym) // this is a refinement type
|
||||
|
||||
// in case there are recursive references, suppress them -- does this ever happen?
|
||||
// we don't have a test case for this, so warn and hope we'll get a contribution for it :-)
|
||||
val withoutRecursiveRefs = new SuppressSymbolRef(sym).mapOver(unrolling)
|
||||
if (unrolling ne withoutRecursiveRefs)
|
||||
reporter.warning(sym.pos, "sbt-api: approximated refinement ref" + t + " (== " + unrolling + ") to " + withoutRecursiveRefs + "\nThis is currently untested, please report the code you were compiling.")
|
||||
|
||||
structure(withoutRecursiveRefs)
|
||||
case tr @ TypeRef(pre, sym, args) =>
|
||||
val base = projectionType(in, pre, sym)
|
||||
if (args.isEmpty)
|
||||
if (isRawType(tr))
|
||||
processType(in, rawToExistential(tr))
|
||||
else
|
||||
base
|
||||
else
|
||||
new xsbti.api.Parameterized(base, types(in, args))
|
||||
case SuperType(thistpe: Type, supertpe: Type) =>
|
||||
warning("sbt-api: Super type (not implemented): this=" + thistpe + ", super=" + supertpe); Constants.emptyType
|
||||
case at: AnnotatedType => annotatedType(in, at)
|
||||
case rt: CompoundType => structure(rt)
|
||||
case t: ExistentialType => makeExistentialType(in, t)
|
||||
case NoType => Constants.emptyType // this can happen when there is an error that will be reported by a later phase
|
||||
case PolyType(typeParams, resultType) => new xsbti.api.Polymorphic(processType(in, resultType), typeParameters(in, typeParams))
|
||||
case Nullary(resultType) =>
|
||||
warning("sbt-api: Unexpected nullary method type " + in + " in " + in.owner); Constants.emptyType
|
||||
case _ => warning("sbt-api: Unhandled type " + t.getClass + " : " + t); Constants.emptyType
|
||||
}
|
||||
}
|
||||
private def makeExistentialType(in: Symbol, t: ExistentialType): xsbti.api.Existential = {
|
||||
val ExistentialType(typeVariables, qualified) = t
|
||||
existentialRenamings.enterExistentialTypeVariables(typeVariables)
|
||||
try {
|
||||
val typeVariablesConverted = typeParameters(in, typeVariables)
|
||||
val qualifiedConverted = processType(in, qualified)
|
||||
new xsbti.api.Existential(qualifiedConverted, typeVariablesConverted)
|
||||
} finally {
|
||||
existentialRenamings.leaveExistentialTypeVariables(typeVariables)
|
||||
}
|
||||
}
|
||||
private def typeParameters(in: Symbol, s: Symbol): Array[xsbti.api.TypeParameter] = typeParameters(in, s.typeParams)
|
||||
private def typeParameters(in: Symbol, s: List[Symbol]): Array[xsbti.api.TypeParameter] = s.map(typeParameter(in, _)).toArray[xsbti.api.TypeParameter]
|
||||
private def typeParameter(in: Symbol, s: Symbol): xsbti.api.TypeParameter =
|
||||
{
|
||||
val varianceInt = s.variance
|
||||
import xsbti.api.Variance._
|
||||
val annots = annotations(in, s)
|
||||
val variance = if (varianceInt < 0) Contravariant else if (varianceInt > 0) Covariant else Invariant
|
||||
viewer(in).memberInfo(s) match {
|
||||
case TypeBounds(low, high) => new xsbti.api.TypeParameter(tparamID(s), annots, typeParameters(in, s), variance, processType(in, low), processType(in, high))
|
||||
case PolyType(typeParams, base) => new xsbti.api.TypeParameter(tparamID(s), annots, typeParameters(in, typeParams), variance, processType(in, base.bounds.lo), processType(in, base.bounds.hi))
|
||||
case x => error("Unknown type parameter info: " + x.getClass)
|
||||
}
|
||||
}
|
||||
private def tparamID(s: Symbol): String = {
|
||||
val renameTo = existentialRenamings.renaming(s)
|
||||
renameTo match {
|
||||
case Some(rename) =>
|
||||
// can't use debuglog because it doesn't exist in Scala 2.9.x
|
||||
if (settings.debug.value)
|
||||
log("Renaming existential type variable " + s.fullName + " to " + rename)
|
||||
rename
|
||||
case None =>
|
||||
s.fullName
|
||||
}
|
||||
}
|
||||
private def selfType(in: Symbol, s: Symbol): xsbti.api.Type = processType(in, s.thisSym.typeOfThis)
|
||||
|
||||
def classLike(in: Symbol, c: Symbol): ClassLike = classLikeCache.getOrElseUpdate((in, c), mkClassLike(in, c))
|
||||
private def mkClassLike(in: Symbol, c: Symbol): ClassLike =
|
||||
{
|
||||
val name = c.fullName
|
||||
val isModule = c.isModuleClass || c.isModule
|
||||
val struct = if (isModule) c.moduleClass else c
|
||||
val defType =
|
||||
if (c.isTrait) DefinitionType.Trait
|
||||
else if (isModule) {
|
||||
if (c.isPackage) DefinitionType.PackageModule
|
||||
else DefinitionType.Module
|
||||
} else DefinitionType.ClassDef
|
||||
new xsbti.api.ClassLike(defType, lzy(selfType(in, c)), lzy(structure(in, struct)), emptyStringArray, typeParameters(in, c), name, getAccess(c), getModifiers(c), annotations(in, c))
|
||||
}
|
||||
|
||||
private[this] def isClass(s: Symbol) = s.isClass || s.isModule
|
||||
// necessary to ensure a stable ordering of classes in the definitions list:
|
||||
// modules and classes come first and are sorted by name
|
||||
// all other definitions come later and are not sorted
|
||||
private[this] val sortClasses = new Comparator[Symbol] {
|
||||
def compare(a: Symbol, b: Symbol) = {
|
||||
val aIsClass = isClass(a)
|
||||
val bIsClass = isClass(b)
|
||||
if (aIsClass == bIsClass)
|
||||
if (aIsClass)
|
||||
if (a.isModule == b.isModule)
|
||||
a.fullName.compareTo(b.fullName)
|
||||
else if (a.isModule)
|
||||
-1
|
||||
else
|
||||
1
|
||||
else
|
||||
0 // substantial performance hit if fullNames are compared here
|
||||
else if (aIsClass)
|
||||
-1
|
||||
else
|
||||
1
|
||||
}
|
||||
}
|
||||
private object Constants {
|
||||
val local = new xsbti.api.ThisQualifier
|
||||
val public = new xsbti.api.Public
|
||||
val privateLocal = new xsbti.api.Private(local)
|
||||
val protectedLocal = new xsbti.api.Protected(local)
|
||||
val unqualified = new xsbti.api.Unqualified
|
||||
val emptyPath = new xsbti.api.Path(Array())
|
||||
val thisPath = new xsbti.api.This
|
||||
val emptyType = new xsbti.api.EmptyType
|
||||
}
|
||||
|
||||
private def simpleName(s: Symbol): String =
|
||||
{
|
||||
val n = s.originalName
|
||||
val n2 = if (n.toString == "<init>") n else n.decode
|
||||
n2.toString.trim
|
||||
}
|
||||
|
||||
private def annotations(in: Symbol, s: Symbol): Array[xsbti.api.Annotation] =
|
||||
atPhase(currentRun.typerPhase) {
|
||||
val base = if (s.hasFlag(Flags.ACCESSOR)) s.accessed else NoSymbol
|
||||
val b = if (base == NoSymbol) s else base
|
||||
// annotations from bean methods are not handled because:
|
||||
// a) they are recorded as normal source methods anyway
|
||||
// b) there is no way to distinguish them from user-defined methods
|
||||
val associated = List(b, b.getter(b.enclClass), b.setter(b.enclClass)).filter(_ != NoSymbol)
|
||||
associated.flatMap(ss => annotations(in, ss.annotations)).distinct.toArray;
|
||||
}
|
||||
private def annotatedType(in: Symbol, at: AnnotatedType): xsbti.api.Type =
|
||||
{
|
||||
val annots = at.annotations
|
||||
if (annots.isEmpty) processType(in, at.underlying) else annotated(in, annots, at.underlying)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,128 +0,0 @@
|
|||
package xsbt
|
||||
|
||||
import scala.tools.nsc._
|
||||
|
||||
/**
|
||||
* Extracts simple names used in given compilation unit.
|
||||
*
|
||||
* Extracts simple (unqualified) names mentioned in given in non-definition position by collecting
|
||||
* all symbols associated with non-definition trees and extracting names from all collected symbols.
|
||||
*
|
||||
* If given symbol is mentioned both in definition and in non-definition position (e.g. in member
|
||||
* selection) then that symbol is collected. It means that names of symbols defined and used in the
|
||||
* same compilation unit are extracted. We've considered not extracting names of those symbols
|
||||
* as an optimization strategy. It turned out that this is not correct. Check
|
||||
* https://github.com/gkossakowski/sbt/issues/3 for an example of scenario where it matters.
|
||||
*
|
||||
* All extracted names are returned in _decoded_ form. This way we stay consistent with the rest
|
||||
* of incremental compiler which works with names in decoded form.
|
||||
*
|
||||
* Names mentioned in Import nodes are handled properly but require some special logic for two
|
||||
* reasons:
|
||||
*
|
||||
* 1. import node itself has a term symbol associated with it with a name `<import`>.
|
||||
* I (gkossakowski) tried to track down what role this symbol serves but I couldn't.
|
||||
* It doesn't look like there are many places in Scala compiler that refer to
|
||||
* that kind of symbols explicitly.
|
||||
* 2. ImportSelector is not subtype of Tree therefore is not processed by `Tree.foreach`
|
||||
*
|
||||
* Another type of tree nodes that requires special handling is TypeTree. TypeTree nodes
|
||||
* has a little bit odd representation:
|
||||
*
|
||||
* 1. TypeTree.hasSymbol always returns false even when TypeTree.symbol
|
||||
* returns a symbol
|
||||
* 2. The original tree from which given TypeTree was derived is stored
|
||||
* in TypeTree.original but Tree.forech doesn't walk into original
|
||||
* tree so we missed it
|
||||
*
|
||||
* The tree walking algorithm walks into TypeTree.original explicitly.
|
||||
*
|
||||
*/
|
||||
class ExtractUsedNames[GlobalType <: CallbackGlobal](val global: GlobalType) extends Compat {
|
||||
import global._
|
||||
|
||||
def extract(unit: CompilationUnit): Set[String] = {
|
||||
val tree = unit.body
|
||||
val extractedByTreeWalk = extractByTreeWalk(tree)
|
||||
extractedByTreeWalk
|
||||
}
|
||||
|
||||
private def extractByTreeWalk(tree: Tree): Set[String] = {
|
||||
val namesBuffer = collection.mutable.ListBuffer.empty[String]
|
||||
|
||||
/*
|
||||
* Some macros appear to contain themselves as original tree.
|
||||
* We must check that we don't inspect the same tree over and over.
|
||||
* See https://issues.scala-lang.org/browse/SI-8486
|
||||
* https://github.com/sbt/sbt/issues/1237
|
||||
* https://github.com/sbt/sbt/issues/1544
|
||||
*/
|
||||
val inspectedOriginalTrees = collection.mutable.Set.empty[Tree]
|
||||
|
||||
def addSymbol(symbol: Symbol): Unit = {
|
||||
val symbolNameAsString = symbol.name.decode.trim
|
||||
namesBuffer += symbolNameAsString
|
||||
}
|
||||
|
||||
def handleTreeNode(node: Tree): Unit = {
|
||||
def handleMacroExpansion(original: Tree): Unit = {
|
||||
original.foreach(handleTreeNode)
|
||||
}
|
||||
|
||||
def handleClassicTreeNode(node: Tree): Unit = node match {
|
||||
case _: DefTree | _: Template => ()
|
||||
// turns out that Import node has a TermSymbol associated with it
|
||||
// I (Grzegorz) tried to understand why it's there and what does it represent but
|
||||
// that logic was introduced in 2005 without any justification I'll just ignore the
|
||||
// import node altogether and just process the selectors in the import node
|
||||
case Import(_, selectors: List[ImportSelector]) =>
|
||||
def usedNameInImportSelector(name: Name): Unit =
|
||||
if ((name != null) && (name != nme.WILDCARD)) namesBuffer += name.toString
|
||||
selectors foreach { selector =>
|
||||
usedNameInImportSelector(selector.name)
|
||||
usedNameInImportSelector(selector.rename)
|
||||
}
|
||||
// TODO: figure out whether we should process the original tree or walk the type
|
||||
// the argument for processing the original tree: we process what user wrote
|
||||
// the argument for processing the type: we catch all transformations that typer applies
|
||||
// to types but that might be a bad thing because it might expand aliases eagerly which
|
||||
// not what we need
|
||||
case t: TypeTree if t.original != null =>
|
||||
t.original.foreach(handleTreeNode)
|
||||
case t if t.hasSymbol && eligibleAsUsedName(t.symbol) =>
|
||||
addSymbol(t.symbol)
|
||||
case _ => ()
|
||||
}
|
||||
|
||||
node match {
|
||||
case MacroExpansionOf(original) if inspectedOriginalTrees.add(original) =>
|
||||
handleClassicTreeNode(node)
|
||||
handleMacroExpansion(original)
|
||||
case _ =>
|
||||
handleClassicTreeNode(node)
|
||||
}
|
||||
}
|
||||
|
||||
tree.foreach(handleTreeNode)
|
||||
namesBuffer.toSet
|
||||
}
|
||||
|
||||
/**
|
||||
* Needed for compatibility with Scala 2.8 which doesn't define `tpnme`
|
||||
*/
|
||||
private object tpnme {
|
||||
val EMPTY = nme.EMPTY.toTypeName
|
||||
val EMPTY_PACKAGE_NAME = nme.EMPTY_PACKAGE_NAME.toTypeName
|
||||
}
|
||||
|
||||
private def eligibleAsUsedName(symbol: Symbol): Boolean = {
|
||||
def emptyName(name: Name): Boolean = name match {
|
||||
case nme.EMPTY | nme.EMPTY_PACKAGE_NAME | tpnme.EMPTY | tpnme.EMPTY_PACKAGE_NAME => true
|
||||
case _ => false
|
||||
}
|
||||
|
||||
(symbol != NoSymbol) &&
|
||||
!symbol.isSynthetic &&
|
||||
!emptyName(symbol.name)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,47 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package xsbt
|
||||
|
||||
import scala.tools.nsc.symtab.Flags
|
||||
import scala.tools.nsc.io.AbstractFile
|
||||
|
||||
import java.io.File
|
||||
|
||||
/**
|
||||
* Contains utility methods for looking up class files corresponding to Symbols.
|
||||
*/
|
||||
abstract class LocateClassFile extends Compat {
|
||||
val global: CallbackGlobal
|
||||
import global._
|
||||
|
||||
private[this] final val classSeparator = '.'
|
||||
protected def classFile(sym: Symbol): Option[(AbstractFile, String, Boolean)] =
|
||||
// package can never have a corresponding class file; this test does not
|
||||
// catch package objects (that do not have this flag set)
|
||||
if (sym hasFlag scala.tools.nsc.symtab.Flags.PACKAGE) None else {
|
||||
import scala.tools.nsc.symtab.Flags
|
||||
val name = flatname(sym, classSeparator) + moduleSuffix(sym)
|
||||
findClass(name).map { case (file, inOut) => (file, name, inOut) } orElse {
|
||||
if (isTopLevelModule(sym)) {
|
||||
val linked = sym.companionClass
|
||||
if (linked == NoSymbol)
|
||||
None
|
||||
else
|
||||
classFile(linked)
|
||||
} else
|
||||
None
|
||||
}
|
||||
}
|
||||
private def flatname(s: Symbol, separator: Char) =
|
||||
atPhase(currentRun.flattenPhase.next) { s fullName separator }
|
||||
|
||||
protected def isTopLevelModule(sym: Symbol): Boolean =
|
||||
atPhase(currentRun.picklerPhase.next) {
|
||||
sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass
|
||||
}
|
||||
protected def className(s: Symbol, sep: Char, dollarRequired: Boolean): String =
|
||||
flatname(s, sep) + (if (dollarRequired) "$" else "")
|
||||
protected def fileForClass(outputDirectory: File, s: Symbol, separatorRequired: Boolean): File =
|
||||
new File(outputDirectory, className(s, File.separatorChar, separatorRequired) + ".class")
|
||||
}
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package xsbt
|
||||
|
||||
object Log {
|
||||
def debug(log: xsbti.Logger, msg: => String) = log.debug(Message(msg))
|
||||
def settingsError(log: xsbti.Logger): String => Unit =
|
||||
s => log.error(Message(s))
|
||||
}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package xsbt
|
||||
|
||||
object Message {
|
||||
def apply[T](s: => T) = new xsbti.F0[T] { def apply() = s }
|
||||
}
|
||||
|
|
@ -1,68 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package xsbt
|
||||
|
||||
import xsbti.Logger
|
||||
import Log.debug
|
||||
|
||||
class ScaladocInterface {
|
||||
def run(args: Array[String], log: Logger, delegate: xsbti.Reporter) = (new Runner(args, log, delegate)).run
|
||||
}
|
||||
private class Runner(args: Array[String], log: Logger, delegate: xsbti.Reporter) {
|
||||
import scala.tools.nsc.{ doc, Global, reporters }
|
||||
import reporters.Reporter
|
||||
val docSettings: doc.Settings = new doc.Settings(Log.settingsError(log))
|
||||
val command = Command(args.toList, docSettings)
|
||||
val reporter = DelegatingReporter(docSettings, delegate)
|
||||
def noErrors = !reporter.hasErrors && command.ok
|
||||
|
||||
import forScope._
|
||||
def run(): Unit = {
|
||||
debug(log, "Calling Scaladoc with arguments:\n\t" + args.mkString("\n\t"))
|
||||
if (noErrors) {
|
||||
import doc._ // 2.8 trunk and Beta1-RC4 have doc.DocFactory. For other Scala versions, the next line creates forScope.DocFactory
|
||||
val processor = new DocFactory(reporter, docSettings)
|
||||
processor.document(command.files)
|
||||
}
|
||||
reporter.printSummary()
|
||||
if (!noErrors) throw new InterfaceCompileFailed(args, reporter.problems, "Scaladoc generation failed")
|
||||
}
|
||||
|
||||
object forScope {
|
||||
class DocFactory(reporter: Reporter, docSettings: doc.Settings) // 2.7 compatibility
|
||||
{
|
||||
// see https://github.com/paulp/scala-full/commit/649823703a574641407d75d5c073be325ea31307
|
||||
trait GlobalCompat {
|
||||
def onlyPresentation = false
|
||||
|
||||
def forScaladoc = false
|
||||
}
|
||||
|
||||
object compiler extends Global(command.settings, reporter) with GlobalCompat {
|
||||
override def onlyPresentation = true
|
||||
override def forScaladoc = true
|
||||
class DefaultDocDriver // 2.8 source compatibility
|
||||
{
|
||||
assert(false)
|
||||
def process(units: Iterator[CompilationUnit]) = error("for 2.8 compatibility only")
|
||||
}
|
||||
}
|
||||
def document(ignore: Seq[String]): Unit = {
|
||||
import compiler._
|
||||
val run = new Run
|
||||
run compile command.files
|
||||
|
||||
val generator =
|
||||
{
|
||||
import doc._
|
||||
new DefaultDocDriver {
|
||||
lazy val global: compiler.type = compiler
|
||||
lazy val settings = docSettings
|
||||
}
|
||||
}
|
||||
generator.process(run.units)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,146 +0,0 @@
|
|||
package xsbt
|
||||
|
||||
import org.junit.runner.RunWith
|
||||
import xsbti.api.ClassLike
|
||||
import xsbti.api.Def
|
||||
import xsbt.api.SameAPI
|
||||
import org.specs2.mutable.Specification
|
||||
import org.specs2.runner.JUnitRunner
|
||||
|
||||
import ScalaCompilerForUnitTesting.ExtractedSourceDependencies
|
||||
|
||||
@RunWith(classOf[JUnitRunner])
|
||||
class DependencySpecification extends Specification {
|
||||
|
||||
"Extracted source dependencies from public members" in {
|
||||
val sourceDependencies = extractSourceDependenciesPublic
|
||||
val memberRef = sourceDependencies.memberRef
|
||||
val inheritance = sourceDependencies.inheritance
|
||||
memberRef('A) === Set.empty
|
||||
inheritance('A) === Set.empty
|
||||
memberRef('B) === Set('A, 'D)
|
||||
inheritance('B) === Set('D)
|
||||
memberRef('C) === Set('A)
|
||||
inheritance('C) === Set.empty
|
||||
memberRef('D) === Set.empty
|
||||
inheritance('D) === Set.empty
|
||||
memberRef('E) === Set.empty
|
||||
inheritance('E) === Set.empty
|
||||
memberRef('F) === Set('A, 'B, 'C, 'D, 'E)
|
||||
inheritance('F) === Set('A, 'E)
|
||||
memberRef('H) === Set('B, 'E, 'G)
|
||||
// aliases and applied type constructors are expanded so we have inheritance dependency on B
|
||||
inheritance('H) === Set('B, 'E)
|
||||
}
|
||||
|
||||
"Extracted source dependencies from private members" in {
|
||||
val sourceDependencies = extractSourceDependenciesPrivate
|
||||
val memberRef = sourceDependencies.memberRef
|
||||
val inheritance = sourceDependencies.inheritance
|
||||
memberRef('A) === Set.empty
|
||||
inheritance('A) === Set.empty
|
||||
memberRef('B) === Set.empty
|
||||
inheritance('B) === Set.empty
|
||||
memberRef('C) === Set('A)
|
||||
inheritance('C) === Set('A)
|
||||
memberRef('D) === Set('B)
|
||||
inheritance('D) === Set('B)
|
||||
}
|
||||
|
||||
"Extracted source dependencies with trait as first parent" in {
|
||||
val sourceDependencies = extractSourceDependenciesTraitAsFirstPatent
|
||||
val memberRef = sourceDependencies.memberRef
|
||||
val inheritance = sourceDependencies.inheritance
|
||||
memberRef('A) === Set.empty
|
||||
inheritance('A) === Set.empty
|
||||
memberRef('B) === Set('A)
|
||||
inheritance('B) === Set('A)
|
||||
// verify that memberRef captures the oddity described in documentation of `Relations.inheritance`
|
||||
// we are mainly interested whether dependency on A is captured in `memberRef` relation so
|
||||
// the invariant that says that memberRef is superset of inheritance relation is preserved
|
||||
memberRef('C) === Set('A, 'B)
|
||||
inheritance('C) === Set('A, 'B)
|
||||
// same as above but indirect (C -> B -> A), note that only A is visible here
|
||||
memberRef('D) === Set('A, 'C)
|
||||
inheritance('D) === Set('A, 'C)
|
||||
}
|
||||
|
||||
"Extracted source dependencies from macro arguments" in {
|
||||
val sourceDependencies = extractSourceDependenciesFromMacroArgument
|
||||
val memberRef = sourceDependencies.memberRef
|
||||
val inheritance = sourceDependencies.inheritance
|
||||
|
||||
memberRef('A) === Set('B, 'C)
|
||||
inheritance('A) === Set.empty
|
||||
memberRef('B) === Set.empty
|
||||
inheritance('B) === Set.empty
|
||||
memberRef('C) === Set.empty
|
||||
inheritance('C) === Set.empty
|
||||
}
|
||||
|
||||
private def extractSourceDependenciesPublic: ExtractedSourceDependencies = {
|
||||
val srcA = "class A"
|
||||
val srcB = "class B extends D[A]"
|
||||
val srcC = """|class C {
|
||||
| def a: A = null
|
||||
|}""".stripMargin
|
||||
val srcD = "class D[T]"
|
||||
val srcE = "trait E[T]"
|
||||
val srcF = "trait F extends A with E[D[B]] { self: C => }"
|
||||
val srcG = "object G { type T[x] = B }"
|
||||
// T is a type constructor [x]B
|
||||
// B extends D
|
||||
// E verifies the core type gets pulled out
|
||||
val srcH = "trait H extends G.T[Int] with (E[Int] @unchecked)"
|
||||
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val sourceDependencies = compilerForTesting.extractDependenciesFromSrcs('A -> srcA, 'B -> srcB, 'C -> srcC,
|
||||
'D -> srcD, 'E -> srcE, 'F -> srcF, 'G -> srcG, 'H -> srcH)
|
||||
sourceDependencies
|
||||
}
|
||||
|
||||
private def extractSourceDependenciesPrivate: ExtractedSourceDependencies = {
|
||||
val srcA = "class A"
|
||||
val srcB = "class B"
|
||||
val srcC = "class C { private class Inner1 extends A }"
|
||||
val srcD = "class D { def foo: Unit = { class Inner2 extends B } }"
|
||||
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val sourceDependencies =
|
||||
compilerForTesting.extractDependenciesFromSrcs('A -> srcA, 'B -> srcB, 'C -> srcC, 'D -> srcD)
|
||||
sourceDependencies
|
||||
}
|
||||
|
||||
private def extractSourceDependenciesTraitAsFirstPatent: ExtractedSourceDependencies = {
|
||||
val srcA = "class A"
|
||||
val srcB = "trait B extends A"
|
||||
val srcC = "trait C extends B"
|
||||
val srcD = "class D extends C"
|
||||
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val sourceDependencies =
|
||||
compilerForTesting.extractDependenciesFromSrcs('A -> srcA, 'B -> srcB, 'C -> srcC, 'D -> srcD)
|
||||
sourceDependencies
|
||||
}
|
||||
|
||||
private def extractSourceDependenciesFromMacroArgument: ExtractedSourceDependencies = {
|
||||
val srcA = "class A { println(B.printTree(C.foo)) }"
|
||||
val srcB = """
|
||||
|import scala.language.experimental.macros
|
||||
|import scala.reflect.macros._
|
||||
|object B {
|
||||
| def printTree(arg: Any) = macro printTreeImpl
|
||||
| def printTreeImpl(c: Context)(arg: c.Expr[Any]): c.Expr[String] = {
|
||||
| val argStr = arg.tree.toString
|
||||
| val literalStr = c.universe.Literal(c.universe.Constant(argStr))
|
||||
| c.Expr[String](literalStr)
|
||||
| }
|
||||
|}""".stripMargin
|
||||
val srcC = "object C { val foo = 1 }"
|
||||
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val sourceDependencies =
|
||||
compilerForTesting.extractDependenciesFromSrcs(List(Map('B -> srcB, 'C -> srcC), Map('A -> srcA)))
|
||||
sourceDependencies
|
||||
}
|
||||
}
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
package xsbt
|
||||
|
||||
import org.junit.runner.RunWith
|
||||
import xsbti.api.ClassLike
|
||||
import xsbti.api.Def
|
||||
import xsbt.api.SameAPI
|
||||
import org.specs2.mutable.Specification
|
||||
import org.specs2.runner.JUnitRunner
|
||||
|
||||
@RunWith(classOf[JUnitRunner])
|
||||
class ExtractAPISpecification extends Specification {
|
||||
|
||||
"Existential types in method signatures" should {
|
||||
"have stable names" in { stableExistentialNames }
|
||||
}
|
||||
|
||||
def stableExistentialNames: Boolean = {
|
||||
def compileAndGetFooMethodApi(src: String): Def = {
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting
|
||||
val sourceApi = compilerForTesting.extractApiFromSrc(src)
|
||||
val FooApi = sourceApi.definitions().find(_.name() == "Foo").get.asInstanceOf[ClassLike]
|
||||
val fooMethodApi = FooApi.structure().declared().find(_.name == "foo").get
|
||||
fooMethodApi.asInstanceOf[Def]
|
||||
}
|
||||
val src1 = """
|
||||
|class Box[T]
|
||||
|class Foo {
|
||||
| def foo: Box[_] = null
|
||||
|
|
||||
}""".stripMargin
|
||||
val fooMethodApi1 = compileAndGetFooMethodApi(src1)
|
||||
val src2 = """
|
||||
|class Box[T]
|
||||
|class Foo {
|
||||
| def bar: Box[_] = null
|
||||
| def foo: Box[_] = null
|
||||
|
|
||||
}""".stripMargin
|
||||
val fooMethodApi2 = compileAndGetFooMethodApi(src2)
|
||||
SameAPI.apply(fooMethodApi1, fooMethodApi2)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,108 +0,0 @@
|
|||
package xsbt
|
||||
|
||||
import org.junit.runner.RunWith
|
||||
import xsbti.api.ClassLike
|
||||
import xsbti.api.Def
|
||||
import xsbti.api.Package
|
||||
import xsbt.api.SameAPI
|
||||
import org.junit.runners.JUnit4
|
||||
|
||||
import org.specs2.mutable.Specification
|
||||
|
||||
@RunWith(classOf[JUnit4])
|
||||
class ExtractUsedNamesSpecification extends Specification {
|
||||
|
||||
/**
|
||||
* Standard names that appear in every compilation unit that has any class
|
||||
* definition.
|
||||
*/
|
||||
private val standardNames = Set(
|
||||
// AnyRef is added as default parent of a class
|
||||
"scala", "AnyRef",
|
||||
// class receives a default constructor which is internally called "<init>"
|
||||
"<init>")
|
||||
|
||||
"imported name" in {
|
||||
val src = """
|
||||
|package a { class A }
|
||||
|package b {
|
||||
| import a.{A => A2}
|
||||
|}""".stripMargin
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val usedNames = compilerForTesting.extractUsedNamesFromSrc(src)
|
||||
val expectedNames = standardNames ++ Set("a", "A", "A2", "b")
|
||||
usedNames === expectedNames
|
||||
}
|
||||
|
||||
// test covers https://github.com/gkossakowski/sbt/issues/6
|
||||
"names in type tree" in {
|
||||
val srcA = """|
|
||||
|package a {
|
||||
| class A {
|
||||
| class C { class D }
|
||||
| }
|
||||
| class B[T]
|
||||
| class BB
|
||||
|}""".stripMargin
|
||||
val srcB = """|
|
||||
|package b {
|
||||
| abstract class X {
|
||||
| def foo: a.A#C#D
|
||||
| def bar: a.B[a.BB]
|
||||
| }
|
||||
|}""".stripMargin
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val usedNames = compilerForTesting.extractUsedNamesFromSrc(srcA, srcB)
|
||||
val expectedNames = standardNames ++ Set("a", "A", "B", "C", "D", "b", "X", "BB")
|
||||
usedNames === expectedNames
|
||||
}
|
||||
|
||||
// test for https://github.com/gkossakowski/sbt/issues/5
|
||||
"symbolic names" in {
|
||||
val srcA = """|
|
||||
|class A {
|
||||
| def `=`: Int = 3
|
||||
|}""".stripMargin
|
||||
val srcB = """|
|
||||
|class B {
|
||||
| def foo(a: A) = a.`=`
|
||||
|}""".stripMargin
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val usedNames = compilerForTesting.extractUsedNamesFromSrc(srcA, srcB)
|
||||
val expectedNames = standardNames ++ Set("A", "a", "B", "=")
|
||||
usedNames === expectedNames
|
||||
}
|
||||
|
||||
// test for https://github.com/gkossakowski/sbt/issues/3
|
||||
"used names from the same compilation unit" in {
|
||||
val src = "class A { def foo: Int = 0; def bar: Int = foo }"
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val usedNames = compilerForTesting.extractUsedNamesFromSrc(src)
|
||||
val expectedNames = standardNames ++ Set("A", "foo", "Int")
|
||||
usedNames === expectedNames
|
||||
}
|
||||
|
||||
// pending test for https://issues.scala-lang.org/browse/SI-7173
|
||||
"names of constants" in {
|
||||
val src = "class A { final val foo = 12; def bar: Int = foo }"
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val usedNames = compilerForTesting.extractUsedNamesFromSrc(src)
|
||||
val expectedNames = standardNames ++ Set("A", "foo", "Int")
|
||||
usedNames === expectedNames
|
||||
}.pendingUntilFixed("Scala's type checker inlines constants so we can't see the original name.")
|
||||
|
||||
// pending test for https://github.com/gkossakowski/sbt/issues/4
|
||||
// TODO: we should fix it by having special treatment of `selectDynamic` and `applyDynamic` calls
|
||||
"names from method calls on Dynamic" in {
|
||||
val srcA = """|import scala.language.dynamics
|
||||
|class A extends Dynamic {
|
||||
| def selectDynamic(name: String): Int = name.length
|
||||
|}""".stripMargin
|
||||
val srcB = "class B { def foo(a: A): Int = a.bla }"
|
||||
val compilerForTesting = new ScalaCompilerForUnitTesting(nameHashing = true)
|
||||
val usedNames = compilerForTesting.extractUsedNamesFromSrc(srcA, srcB)
|
||||
val expectedNames = standardNames ++ Set("B", "A", "a", "Int", "selectDynamic", "bla")
|
||||
usedNames === expectedNames
|
||||
}.pendingUntilFixed("Call to Dynamic is desugared in type checker so Select nodes is turned into string literal.")
|
||||
|
||||
}
|
||||
|
|
@ -1,182 +0,0 @@
|
|||
package xsbt
|
||||
|
||||
import xsbti.compile.SingleOutput
|
||||
import java.io.File
|
||||
import _root_.scala.tools.nsc.reporters.ConsoleReporter
|
||||
import _root_.scala.tools.nsc.Settings
|
||||
import xsbti._
|
||||
import xsbti.api.SourceAPI
|
||||
import sbt.IO.withTemporaryDirectory
|
||||
import xsbti.api.ClassLike
|
||||
import xsbti.api.Definition
|
||||
import xsbti.api.Def
|
||||
import xsbt.api.SameAPI
|
||||
import sbt.ConsoleLogger
|
||||
import xsbti.DependencyContext._
|
||||
|
||||
import ScalaCompilerForUnitTesting.ExtractedSourceDependencies
|
||||
|
||||
/**
|
||||
* Provides common functionality needed for unit tests that require compiling
|
||||
* source code using Scala compiler.
|
||||
*/
|
||||
class ScalaCompilerForUnitTesting(nameHashing: Boolean = false) {
|
||||
|
||||
/**
|
||||
* Compiles given source code using Scala compiler and returns API representation
|
||||
* extracted by ExtractAPI class.
|
||||
*/
|
||||
def extractApiFromSrc(src: String): SourceAPI = {
|
||||
val (Seq(tempSrcFile), analysisCallback) = compileSrcs(src)
|
||||
analysisCallback.apis(tempSrcFile)
|
||||
}
|
||||
|
||||
def extractUsedNamesFromSrc(src: String): Set[String] = {
|
||||
val (Seq(tempSrcFile), analysisCallback) = compileSrcs(src)
|
||||
analysisCallback.usedNames(tempSrcFile)
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract used names from src provided as the second argument.
|
||||
*
|
||||
* The purpose of the first argument is to define names that the second
|
||||
* source is going to refer to. Both files are compiled in the same compiler
|
||||
* Run but only names used in the second src file are returned.
|
||||
*/
|
||||
def extractUsedNamesFromSrc(definitionSrc: String, actualSrc: String): Set[String] = {
|
||||
// we drop temp src file corresponding to the definition src file
|
||||
val (Seq(_, tempSrcFile), analysisCallback) = compileSrcs(definitionSrc, actualSrc)
|
||||
analysisCallback.usedNames(tempSrcFile)
|
||||
}
|
||||
|
||||
/**
|
||||
* Compiles given source code snippets (passed as Strings) using Scala compiler and returns extracted
|
||||
* dependencies between snippets. Source code snippets are identified by symbols. Each symbol should
|
||||
* be associated with one snippet only.
|
||||
*
|
||||
* Snippets can be grouped to be compiled together in the same compiler run. This is
|
||||
* useful to compile macros, which cannot be used in the same compilation run that
|
||||
* defines them.
|
||||
*
|
||||
* Symbols are used to express extracted dependencies between source code snippets. This way we have
|
||||
* file system-independent way of testing dependencies between source code "files".
|
||||
*/
|
||||
def extractDependenciesFromSrcs(srcs: List[Map[Symbol, String]]): ExtractedSourceDependencies = {
|
||||
val rawGroupedSrcs = srcs.map(_.values.toList)
|
||||
val symbols = srcs.flatMap(_.keys)
|
||||
val (tempSrcFiles, testCallback) = compileSrcs(rawGroupedSrcs)
|
||||
val fileToSymbol = (tempSrcFiles zip symbols).toMap
|
||||
|
||||
val memberRefFileDeps = testCallback.sourceDependencies collect {
|
||||
// false indicates that those dependencies are not introduced by inheritance
|
||||
case (target, src, DependencyByMemberRef) => (src, target)
|
||||
}
|
||||
val inheritanceFileDeps = testCallback.sourceDependencies collect {
|
||||
// true indicates that those dependencies are introduced by inheritance
|
||||
case (target, src, DependencyByInheritance) => (src, target)
|
||||
}
|
||||
def toSymbols(src: File, target: File): (Symbol, Symbol) = (fileToSymbol(src), fileToSymbol(target))
|
||||
val memberRefDeps = memberRefFileDeps map { case (src, target) => toSymbols(src, target) }
|
||||
val inheritanceDeps = inheritanceFileDeps map { case (src, target) => toSymbols(src, target) }
|
||||
def pairsToMultiMap[A, B](pairs: Seq[(A, B)]): Map[A, Set[B]] = {
|
||||
import scala.collection.mutable.{ HashMap, MultiMap }
|
||||
val emptyMultiMap = new HashMap[A, scala.collection.mutable.Set[B]] with MultiMap[A, B]
|
||||
val multiMap = pairs.foldLeft(emptyMultiMap) {
|
||||
case (acc, (key, value)) =>
|
||||
acc.addBinding(key, value)
|
||||
}
|
||||
// convert all collections to immutable variants
|
||||
multiMap.toMap.mapValues(_.toSet).withDefaultValue(Set.empty)
|
||||
}
|
||||
|
||||
ExtractedSourceDependencies(pairsToMultiMap(memberRefDeps), pairsToMultiMap(inheritanceDeps))
|
||||
}
|
||||
|
||||
def extractDependenciesFromSrcs(srcs: (Symbol, String)*): ExtractedSourceDependencies = {
|
||||
val symbols = srcs.map(_._1)
|
||||
assert(symbols.distinct.size == symbols.size,
|
||||
s"Duplicate symbols for srcs detected: $symbols")
|
||||
extractDependenciesFromSrcs(List(srcs.toMap))
|
||||
}
|
||||
|
||||
/**
|
||||
* Compiles given source code snippets written to temporary files. Each snippet is
|
||||
* written to a separate temporary file.
|
||||
*
|
||||
* Snippets can be grouped to be compiled together in the same compiler run. This is
|
||||
* useful to compile macros, which cannot be used in the same compilation run that
|
||||
* defines them.
|
||||
*
|
||||
* The sequence of temporary files corresponding to passed snippets and analysis
|
||||
* callback is returned as a result.
|
||||
*/
|
||||
private def compileSrcs(groupedSrcs: List[List[String]]): (Seq[File], TestCallback) = {
|
||||
withTemporaryDirectory { temp =>
|
||||
val analysisCallback = new TestCallback(nameHashing)
|
||||
val classesDir = new File(temp, "classes")
|
||||
classesDir.mkdir()
|
||||
|
||||
val compiler = prepareCompiler(classesDir, analysisCallback, classesDir.toString)
|
||||
|
||||
val files = for ((compilationUnit, unitId) <- groupedSrcs.zipWithIndex) yield {
|
||||
val run = new compiler.Run
|
||||
val srcFiles = compilationUnit.toSeq.zipWithIndex map {
|
||||
case (src, i) =>
|
||||
val fileName = s"Test-$unitId-$i.scala"
|
||||
prepareSrcFile(temp, fileName, src)
|
||||
}
|
||||
val srcFilePaths = srcFiles.map(srcFile => srcFile.getAbsolutePath).toList
|
||||
|
||||
run.compile(srcFilePaths)
|
||||
|
||||
srcFilePaths.foreach(f => new File(f).delete)
|
||||
srcFiles
|
||||
}
|
||||
(files.flatten.toSeq, analysisCallback)
|
||||
}
|
||||
}
|
||||
|
||||
private def compileSrcs(srcs: String*): (Seq[File], TestCallback) = {
|
||||
compileSrcs(List(srcs.toList))
|
||||
}
|
||||
|
||||
private def prepareSrcFile(baseDir: File, fileName: String, src: String): File = {
|
||||
val srcFile = new File(baseDir, fileName)
|
||||
sbt.IO.write(srcFile, src)
|
||||
srcFile
|
||||
}
|
||||
|
||||
private def prepareCompiler(outputDir: File, analysisCallback: AnalysisCallback, classpath: String = "."): CachedCompiler0#Compiler = {
|
||||
val args = Array.empty[String]
|
||||
object output extends SingleOutput {
|
||||
def outputDirectory: File = outputDir
|
||||
override def toString = s"SingleOutput($outputDirectory)"
|
||||
}
|
||||
val weakLog = new WeakLog(ConsoleLogger(), ConsoleReporter)
|
||||
val cachedCompiler = new CachedCompiler0(args, output, weakLog, false)
|
||||
val settings = cachedCompiler.settings
|
||||
settings.classpath.value = classpath
|
||||
settings.usejavacp.value = true
|
||||
val scalaReporter = new ConsoleReporter(settings)
|
||||
val delegatingReporter = DelegatingReporter(settings, ConsoleReporter)
|
||||
val compiler = cachedCompiler.compiler
|
||||
compiler.set(analysisCallback, delegatingReporter)
|
||||
compiler
|
||||
}
|
||||
|
||||
private object ConsoleReporter extends Reporter {
|
||||
def reset(): Unit = ()
|
||||
def hasErrors: Boolean = false
|
||||
def hasWarnings: Boolean = false
|
||||
def printWarnings(): Unit = ()
|
||||
def problems: Array[Problem] = Array.empty
|
||||
def log(pos: Position, msg: String, sev: Severity): Unit = println(msg)
|
||||
def comment(pos: Position, msg: String): Unit = ()
|
||||
def printSummary(): Unit = ()
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
object ScalaCompilerForUnitTesting {
|
||||
case class ExtractedSourceDependencies(memberRef: Map[Symbol, Set[Symbol]], inheritance: Map[Symbol, Set[Symbol]])
|
||||
}
|
||||
|
|
@ -1,211 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
package compiler
|
||||
|
||||
import java.io.File
|
||||
import scala.util.Try
|
||||
|
||||
object ComponentCompiler {
|
||||
val xsbtiID = "xsbti"
|
||||
val srcExtension = "-src"
|
||||
val binSeparator = "-bin_"
|
||||
val compilerInterfaceID = "compiler-interface"
|
||||
val compilerInterfaceSrcID = compilerInterfaceID + srcExtension
|
||||
val javaVersion = System.getProperty("java.class.version")
|
||||
|
||||
@deprecated("Use `interfaceProvider(ComponentManager, IvyConfiguration, File)`.", "0.13.10")
|
||||
def interfaceProvider(manager: ComponentManager): CompilerInterfaceProvider = new CompilerInterfaceProvider {
|
||||
def apply(scalaInstance: xsbti.compile.ScalaInstance, log: Logger): File =
|
||||
{
|
||||
// this is the instance used to compile the interface component
|
||||
val componentCompiler = new ComponentCompiler(new RawCompiler(scalaInstance, ClasspathOptions.auto, log), manager)
|
||||
log.debug("Getting " + compilerInterfaceID + " from component compiler for Scala " + scalaInstance.version)
|
||||
componentCompiler(compilerInterfaceID)
|
||||
}
|
||||
}
|
||||
|
||||
def interfaceProvider(manager: ComponentManager, ivyConfiguration: IvyConfiguration, bootDirectory: File): CompilerInterfaceProvider = new CompilerInterfaceProvider {
|
||||
def apply(scalaInstance: xsbti.compile.ScalaInstance, log: Logger): File =
|
||||
{
|
||||
// this is the instance used to compile the interface component
|
||||
val componentCompiler = new IvyComponentCompiler(new RawCompiler(scalaInstance, ClasspathOptions.auto, log), manager, ivyConfiguration, bootDirectory, log)
|
||||
log.debug("Getting " + compilerInterfaceID + " from component compiler for Scala " + scalaInstance.version)
|
||||
componentCompiler(compilerInterfaceID)
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* This class provides source components compiled with the provided RawCompiler.
|
||||
* The compiled classes are cached using the provided component manager according
|
||||
* to the actualVersion field of the RawCompiler.
|
||||
*/
|
||||
@deprecated("Replaced by IvyComponentCompiler.", "0.13.10")
|
||||
class ComponentCompiler(compiler: RawCompiler, manager: ComponentManager) {
|
||||
import ComponentCompiler._
|
||||
def apply(id: String): File =
|
||||
try { getPrecompiled(id) }
|
||||
catch { case _: InvalidComponent => getLocallyCompiled(id) }
|
||||
|
||||
/**
|
||||
* Gets the precompiled (distributed with sbt) component with the given 'id'
|
||||
* If the component has not been precompiled, this throws InvalidComponent.
|
||||
*/
|
||||
def getPrecompiled(id: String): File = manager.file(binaryID(id, false))(IfMissing.Fail)
|
||||
/**
|
||||
* Get the locally compiled component with the given 'id' or compiles it if it has not been compiled yet.
|
||||
* If the component does not exist, this throws InvalidComponent.
|
||||
*/
|
||||
def getLocallyCompiled(id: String): File =
|
||||
{
|
||||
val binID = binaryID(id, true)
|
||||
manager.file(binID)(new IfMissing.Define(true, compileAndInstall(id, binID)))
|
||||
}
|
||||
def clearCache(id: String): Unit = manager.clearCache(binaryID(id, true))
|
||||
protected def binaryID(id: String, withJavaVersion: Boolean) =
|
||||
{
|
||||
val base = id + binSeparator + compiler.scalaInstance.actualVersion
|
||||
if (withJavaVersion) base + "__" + javaVersion else base
|
||||
}
|
||||
protected def compileAndInstall(id: String, binID: String): Unit = {
|
||||
val srcID = id + srcExtension
|
||||
IO.withTemporaryDirectory { binaryDirectory =>
|
||||
val targetJar = new File(binaryDirectory, id + ".jar")
|
||||
val xsbtiJars = manager.files(xsbtiID)(IfMissing.Fail)
|
||||
AnalyzingCompiler.compileSources(manager.files(srcID)(IfMissing.Fail), targetJar, xsbtiJars, id, compiler, manager.log)
|
||||
manager.define(binID, Seq(targetJar))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Component compiler which is able to find the most specific version available of
|
||||
* the compiler interface sources using Ivy.
|
||||
* The compiled classes are cached using the provided component manager according
|
||||
* to the actualVersion field of the RawCompiler.
|
||||
*/
|
||||
private[compiler] class IvyComponentCompiler(compiler: RawCompiler, manager: ComponentManager, ivyConfiguration: IvyConfiguration, bootDirectory: File, log: Logger) {
|
||||
import ComponentCompiler._
|
||||
|
||||
private val sbtOrg = xsbti.ArtifactInfo.SbtOrganization
|
||||
private val sbtOrgTemp = JsonUtil.sbtOrgTemp
|
||||
private val modulePrefixTemp = "temp-module-"
|
||||
private val ivySbt: IvySbt = new IvySbt(ivyConfiguration)
|
||||
private val sbtVersion = ComponentManager.version
|
||||
private val buffered = new BufferedLogger(FullLogger(log))
|
||||
private val retrieveDirectory = new File(s"$bootDirectory/scala-${compiler.scalaInstance.version}/$sbtOrg/sbt/$sbtVersion/compiler-interface-srcs")
|
||||
|
||||
def apply(id: String): File = {
|
||||
val binID = binaryID(id)
|
||||
manager.file(binID)(new IfMissing.Define(true, compileAndInstall(id, binID)))
|
||||
}
|
||||
|
||||
private def binaryID(id: String): String = {
|
||||
val base = id + binSeparator + compiler.scalaInstance.actualVersion
|
||||
base + "__" + javaVersion
|
||||
}
|
||||
|
||||
private def compileAndInstall(id: String, binID: String): Unit = {
|
||||
def interfaceSources(moduleVersions: Vector[VersionNumber]): Iterable[File] =
|
||||
moduleVersions match {
|
||||
case Vector() =>
|
||||
def getAndDefineDefaultSources() =
|
||||
update(getModule(id))(_.getName endsWith "-sources.jar") map { sourcesJar =>
|
||||
manager.define(id, sourcesJar)
|
||||
sourcesJar
|
||||
} getOrElse (throw new InvalidComponent(s"Couldn't retrieve default sources: module '$id'"))
|
||||
|
||||
buffered.debug(s"Fetching default sources: module '$id'")
|
||||
manager.files(id)(new IfMissing.Fallback(getAndDefineDefaultSources()))
|
||||
|
||||
case version +: rest =>
|
||||
val moduleName = s"${id}_$version"
|
||||
def getAndDefineVersionSpecificSources() =
|
||||
update(getModule(moduleName))(_.getName endsWith "-sources.jar") map { sourcesJar =>
|
||||
manager.define(moduleName, sourcesJar)
|
||||
sourcesJar
|
||||
} getOrElse interfaceSources(rest)
|
||||
|
||||
buffered.debug(s"Fetching version-specific sources: module '$moduleName'")
|
||||
manager.files(moduleName)(new IfMissing.Fallback(getAndDefineVersionSpecificSources()))
|
||||
}
|
||||
IO.withTemporaryDirectory { binaryDirectory =>
|
||||
|
||||
val targetJar = new File(binaryDirectory, s"$binID.jar")
|
||||
val xsbtiJars = manager.files(xsbtiID)(IfMissing.Fail)
|
||||
|
||||
val sourceModuleVersions = VersionNumber(compiler.scalaInstance.actualVersion).cascadingVersions
|
||||
val sources = buffered bufferQuietly interfaceSources(sourceModuleVersions)
|
||||
AnalyzingCompiler.compileSources(sources, targetJar, xsbtiJars, id, compiler, log)
|
||||
|
||||
manager.define(binID, Seq(targetJar))
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a dummy module that depends on "org.scala-sbt" % `id` % `sbtVersion`.
|
||||
* Note: Sbt's implementation of Ivy requires us to do this, because only the dependencies
|
||||
* of the specified module will be downloaded.
|
||||
*/
|
||||
private def getModule(id: String): ivySbt.Module = {
|
||||
val sha1 = Hash.toHex(Hash(id))
|
||||
val dummyID = ModuleID(sbtOrgTemp, modulePrefixTemp + sha1, sbtVersion, Some("component"))
|
||||
val moduleID = ModuleID(sbtOrg, id, sbtVersion, Some("component")).sources()
|
||||
getModule(dummyID, Seq(moduleID))
|
||||
}
|
||||
|
||||
private def getModule(moduleID: ModuleID, deps: Seq[ModuleID], uo: UpdateOptions = UpdateOptions()): ivySbt.Module = {
|
||||
val moduleSetting = InlineConfiguration(
|
||||
module = moduleID,
|
||||
moduleInfo = ModuleInfo(moduleID.name),
|
||||
dependencies = deps,
|
||||
configurations = Seq(Configurations.Component),
|
||||
ivyScala = None)
|
||||
|
||||
new ivySbt.Module(moduleSetting)
|
||||
}
|
||||
|
||||
private def dependenciesNames(module: ivySbt.Module): String = module.moduleSettings match {
|
||||
// `module` is a dummy module, we will only fetch its dependencies.
|
||||
case ic: InlineConfiguration =>
|
||||
ic.dependencies map {
|
||||
case mID: ModuleID =>
|
||||
import mID._
|
||||
s"$organization % $name % $revision"
|
||||
} mkString ", "
|
||||
case _ =>
|
||||
s"unknown"
|
||||
}
|
||||
|
||||
private def update(module: ivySbt.Module)(predicate: File => Boolean): Option[Seq[File]] = {
|
||||
|
||||
val retrieveConfiguration = new RetrieveConfiguration(retrieveDirectory, Resolver.defaultRetrievePattern, false)
|
||||
val updateConfiguration = new UpdateConfiguration(Some(retrieveConfiguration), true, UpdateLogging.DownloadOnly)
|
||||
|
||||
buffered.info(s"Attempting to fetch ${dependenciesNames(module)}. This operation may fail.")
|
||||
IvyActions.updateEither(module, updateConfiguration, UnresolvedWarningConfiguration(), LogicalClock.unknown, None, buffered) match {
|
||||
case Left(unresolvedWarning) =>
|
||||
buffered.debug("Couldn't retrieve module ${dependenciesNames(module)}.")
|
||||
None
|
||||
|
||||
case Right(updateReport) =>
|
||||
val allFiles =
|
||||
for {
|
||||
conf <- updateReport.configurations
|
||||
m <- conf.modules
|
||||
(_, f) <- m.artifacts
|
||||
} yield f
|
||||
|
||||
buffered.debug(s"Files retrieved for ${dependenciesNames(module)}:")
|
||||
buffered.debug(allFiles mkString ", ")
|
||||
|
||||
allFiles filter predicate match {
|
||||
case Seq() => None
|
||||
case files => Some(files)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
Simple Build Tool: Analysis Store Component
|
||||
Copyright 2010 Mark Harrah
|
||||
Licensed under BSD-style license (see LICENSE)
|
||||
|
|
@ -1,154 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
package inc
|
||||
|
||||
import xsbti.api.{ Source, Compilation }
|
||||
import xsbti.{ Position, Problem, Severity }
|
||||
import xsbti.compile.{ CompileOrder, Output => APIOutput, SingleOutput, MultipleOutput }
|
||||
import xsbti.DependencyContext._
|
||||
import MultipleOutput.OutputGroup
|
||||
import java.io.File
|
||||
import sbinary._
|
||||
import DefaultProtocol._
|
||||
import DefaultProtocol.tuple2Format
|
||||
import Logger.{ m2o, position, problem }
|
||||
import Relations.{ Source => RSource, SourceDependencies }
|
||||
|
||||
@deprecated("Replaced by TextAnalysisFormat. OK to remove in 0.14.", since = "0.13.1")
|
||||
object AnalysisFormats {
|
||||
type RFF = Relation[File, File]
|
||||
type RFS = Relation[File, String]
|
||||
|
||||
import System.{ currentTimeMillis => now }
|
||||
val start = now
|
||||
def time(label: String) =
|
||||
{
|
||||
val end = now
|
||||
println(label + ": " + (end - start) + " ms")
|
||||
}
|
||||
|
||||
def debug[T](label: String, f: Format[T]): Format[T] = new Format[T] {
|
||||
def reads(in: Input): T =
|
||||
{
|
||||
time(label + ".read.start")
|
||||
val r = f.reads(in)
|
||||
time(label + ".read.end")
|
||||
r
|
||||
}
|
||||
def writes(out: Output, t: T): Unit = {
|
||||
time(label + ".write.start")
|
||||
f.writes(out, t)
|
||||
time(label + ".write.end")
|
||||
}
|
||||
}
|
||||
|
||||
implicit def analysisFormat(implicit stampsF: Format[Stamps], apisF: Format[APIs], relationsF: Format[Relations],
|
||||
infosF: Format[SourceInfos], compilationsF: Format[Compilations]): Format[Analysis] =
|
||||
asProduct5(Analysis.Empty.copy _)(a => (a.stamps, a.apis, a.relations, a.infos, a.compilations))(stampsF, apisF, relationsF, infosF, compilationsF)
|
||||
|
||||
implicit def infosFormat(implicit infoF: Format[Map[File, SourceInfo]]): Format[SourceInfos] =
|
||||
wrap[SourceInfos, Map[File, SourceInfo]](_.allInfos, SourceInfos.make _)
|
||||
|
||||
implicit def infoFormat: Format[SourceInfo] =
|
||||
wrap[SourceInfo, (Seq[Problem], Seq[Problem])](si => (si.reportedProblems, si.unreportedProblems), { case (a, b) => SourceInfos.makeInfo(a, b) })
|
||||
|
||||
implicit def problemFormat: Format[Problem] = asProduct4(problem _)(p => (p.category, p.position, p.message, p.severity))
|
||||
|
||||
implicit def compilationsFormat: Format[Compilations] = {
|
||||
implicit val compilationSeqF = seqFormat(xsbt.api.CompilationFormat)
|
||||
wrap[Compilations, Seq[Compilation]](_.allCompilations, Compilations.make _)
|
||||
}
|
||||
|
||||
implicit def positionFormat: Format[Position] =
|
||||
asProduct7(position _)(p => (m2o(p.line), p.lineContent, m2o(p.offset), m2o(p.pointer), m2o(p.pointerSpace), m2o(p.sourcePath), m2o(p.sourceFile)))
|
||||
|
||||
implicit val fileOptionFormat: Format[Option[File]] = optionsAreFormat[File](fileFormat)
|
||||
implicit val integerFormat: Format[Integer] = wrap[Integer, Int](_.toInt, Integer.valueOf)
|
||||
implicit val severityFormat: Format[Severity] =
|
||||
wrap[Severity, Byte](_.ordinal.toByte, b => Severity.values.apply(b.toInt))
|
||||
|
||||
implicit def setupFormat(implicit outputF: Format[APIOutput], optionF: Format[CompileOptions], compilerVersion: Format[String], orderF: Format[CompileOrder], nameHashingF: Format[Boolean]): Format[CompileSetup] =
|
||||
asProduct5[CompileSetup, APIOutput, CompileOptions, String, CompileOrder, Boolean]((a, b, c, d, e) => new CompileSetup(a, b, c, d, e))(s => (s.output, s.options, s.compilerVersion, s.order, s.nameHashing))(outputF, optionF, compilerVersion, orderF, nameHashingF)
|
||||
|
||||
implicit val outputGroupFormat: Format[OutputGroup] =
|
||||
asProduct2((a: File, b: File) => new OutputGroup { def sourceDirectory = a; def outputDirectory = b }) { out => (out.sourceDirectory, out.outputDirectory) }(fileFormat, fileFormat)
|
||||
implicit val multipleOutputFormat: Format[MultipleOutput] =
|
||||
wrap[MultipleOutput, Array[OutputGroup]](
|
||||
(_.outputGroups),
|
||||
{
|
||||
groups =>
|
||||
new MultipleOutput {
|
||||
def outputGroups = groups
|
||||
override def toString = s"MultipleOutput($outputGroups)"
|
||||
}
|
||||
}
|
||||
)
|
||||
implicit val singleOutputFormat: Format[SingleOutput] =
|
||||
wrap[SingleOutput, File](
|
||||
(_.outputDirectory),
|
||||
{ out => new SingleOutput { def outputDirectory = out } }
|
||||
)(fileFormat)
|
||||
implicit val outputFormat: Format[APIOutput] = asUnion(singleOutputFormat, multipleOutputFormat)
|
||||
|
||||
implicit def stampsFormat(implicit prodF: Format[Map[File, Stamp]], srcF: Format[Map[File, Stamp]], binF: Format[Map[File, Stamp]], nameF: Format[Map[File, String]]): Format[Stamps] =
|
||||
asProduct4(Stamps.apply _)(s => (s.products, s.sources, s.binaries, s.classNames))(prodF, srcF, binF, nameF)
|
||||
|
||||
implicit def stampFormat(implicit hashF: Format[Hash], modF: Format[LastModified], existsF: Format[Exists]): Format[Stamp] =
|
||||
asUnion(hashF, modF, existsF)
|
||||
|
||||
implicit def apisFormat(implicit internalF: Format[Map[File, Source]], externalF: Format[Map[String, Source]]): Format[APIs] =
|
||||
asProduct2(APIs.apply _)(as => (as.internal, as.external))(internalF, externalF)
|
||||
|
||||
implicit def relationsFormat(implicit prodF: Format[RFF], binF: Format[RFF], directF: Format[RSource], inheritedF: Format[RSource], memberRefF: Format[SourceDependencies], inheritanceF: Format[SourceDependencies], csF: Format[RFS], namesF: Format[RFS]): Format[Relations] =
|
||||
{
|
||||
def makeRelation(srcProd: RFF, binaryDep: RFF, direct: RSource, publicInherited: RSource,
|
||||
memberRef: SourceDependencies, inheritance: SourceDependencies, classes: RFS,
|
||||
nameHashing: Boolean, names: RFS): Relations = if (nameHashing) {
|
||||
def isEmpty(sourceDependencies: RSource): Boolean =
|
||||
sourceDependencies.internal.all.isEmpty && sourceDependencies.external.all.isEmpty
|
||||
// we check direct dependencies only because publicInherited dependencies are subset of direct
|
||||
assert(isEmpty(direct), "Direct dependencies are not empty but `nameHashing` flag is enabled.")
|
||||
val internalDependencies = InternalDependencies(Map(DependencyByMemberRef -> memberRef.internal, DependencyByInheritance -> inheritance.internal))
|
||||
val externalDependencies = ExternalDependencies(Map(DependencyByMemberRef -> memberRef.external, DependencyByInheritance -> inheritance.external))
|
||||
Relations.make(srcProd, binaryDep, internalDependencies, externalDependencies, classes, names)
|
||||
} else {
|
||||
def isEmpty(sourceDependencies: SourceDependencies): Boolean =
|
||||
sourceDependencies.internal.all.isEmpty && sourceDependencies.external.all.isEmpty
|
||||
// we check memberRef dependencies only because inheritance dependencies are subset of memberRef
|
||||
assert(isEmpty(memberRef), "Direct dependencies are not empty but `nameHashing` flag is enabled.")
|
||||
Relations.make(srcProd, binaryDep, direct, publicInherited, classes)
|
||||
}
|
||||
asProduct9[Relations, RFF, RFF, RSource, RSource, SourceDependencies, SourceDependencies, RFS, Boolean, RFS]((a, b, c, d, e, f, g, h, i) => makeRelation(a, b, c, d, e, f, g, h, i))(
|
||||
rs => (rs.srcProd, rs.binaryDep, rs.direct, rs.publicInherited, rs.memberRef, rs.inheritance, rs.classes, rs.nameHashing, rs.names))(
|
||||
prodF, binF, directF, inheritedF, memberRefF, inheritanceF, csF, implicitly[Format[Boolean]], namesF)
|
||||
}
|
||||
|
||||
implicit def relationsSourceFormat(implicit internalFormat: Format[Relation[File, File]], externalFormat: Format[Relation[File, String]]): Format[RSource] =
|
||||
asProduct2[RSource, RFF, RFS]((a, b) => Relations.makeSource(a, b))(rs => (rs.internal, rs.external))
|
||||
|
||||
implicit def relationsSourceDependenciesFormat(implicit internalFormat: Format[Relation[File, File]], externalFormat: Format[Relation[File, String]]): Format[SourceDependencies] =
|
||||
asProduct2[SourceDependencies, RFF, RFS]((a, b) => Relations.makeSourceDependencies(a, b))(rs => (rs.internal, rs.external))
|
||||
|
||||
implicit def relationFormat[A, B](implicit af: Format[Map[A, Set[B]]], bf: Format[Map[B, Set[A]]]): Format[Relation[A, B]] =
|
||||
asProduct2[Relation[A, B], Map[A, Set[B]], Map[B, Set[A]]](Relation.make _)(r => (r.forwardMap, r.reverseMap))(af, bf)
|
||||
|
||||
implicit val sourceFormat: Format[Source] = xsbt.api.SourceFormat
|
||||
|
||||
implicit def fileFormat: Format[File] = wrap[File, String](_.getAbsolutePath, s => new File(s))
|
||||
// can't require Format[Seq[String]] because its complexity is higher than Format[CompileOptions]
|
||||
implicit def optsFormat(implicit strF: Format[String]): Format[CompileOptions] =
|
||||
wrap[CompileOptions, (Seq[String], Seq[String])](co => (co.options, co.javacOptions), os => new CompileOptions(os._1, os._2))
|
||||
|
||||
implicit val orderFormat: Format[CompileOrder] =
|
||||
{
|
||||
val values = CompileOrder.values
|
||||
wrap[CompileOrder, Int](_.ordinal, values)
|
||||
}
|
||||
implicit def seqFormat[T](implicit optionFormat: Format[T]): Format[Seq[T]] = viaSeq[Seq[T], T](x => x)
|
||||
|
||||
implicit def hashStampFormat: Format[Hash] = wrap[Hash, Array[Byte]](_.value, new Hash(_))
|
||||
implicit def lastModFormat: Format[LastModified] = wrap[LastModified, Long](_.value, new LastModified(_))
|
||||
implicit def existsFormat: Format[Exists] = wrap[Exists, Boolean](_.value, new Exists(_))
|
||||
}
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
package inc
|
||||
|
||||
import java.io.File
|
||||
|
||||
object FileBasedStore {
|
||||
def apply(file: File): AnalysisStore = new AnalysisStore {
|
||||
def set(analysis: Analysis, setup: CompileSetup): Unit = {
|
||||
Using.fileWriter(IO.utf8)(file) { writer => TextAnalysisFormat.write(writer, analysis, setup) }
|
||||
}
|
||||
|
||||
def get(): Option[(Analysis, CompileSetup)] =
|
||||
try { Some(getUncaught()) } catch { case _: Exception => None }
|
||||
def getUncaught(): (Analysis, CompileSetup) =
|
||||
Using.fileReader(IO.utf8)(file) { reader => TextAnalysisFormat.read(reader) }
|
||||
}
|
||||
}
|
||||
|
|
@ -1,403 +0,0 @@
|
|||
package sbt
|
||||
package inc
|
||||
|
||||
import java.io._
|
||||
import sbt.{ CompileSetup, Relation }
|
||||
import xsbti.api.{ Compilation, Source }
|
||||
import xsbti.compile.{ MultipleOutput, SingleOutput }
|
||||
import javax.xml.bind.DatatypeConverter
|
||||
|
||||
// Very simple timer for timing repeated code sections.
|
||||
// TODO: Temporary. Remove once we've milked all available performance gains.
|
||||
private[inc] object FormatTimer {
|
||||
private val timers = scala.collection.mutable.Map[String, Long]()
|
||||
private val printTimings = "true" == System.getProperty("sbt.analysis.debug.timing")
|
||||
|
||||
def aggregate[T](key: String)(f: => T) = {
|
||||
val start = System.nanoTime()
|
||||
val ret = f
|
||||
val elapsed = System.nanoTime() - start
|
||||
timers.update(key, timers.getOrElseUpdate(key, 0) + elapsed)
|
||||
ret
|
||||
}
|
||||
|
||||
def time[T](key: String)(f: => T) = {
|
||||
val ret = aggregate(key)(f)
|
||||
close(key)
|
||||
ret
|
||||
}
|
||||
|
||||
def close(key: String): Unit = {
|
||||
if (printTimings) {
|
||||
println("[%s] %dms".format(key, timers.getOrElse(key, 0L) / 1000000))
|
||||
}
|
||||
timers.remove(key)
|
||||
}
|
||||
}
|
||||
|
||||
class ReadException(s: String) extends Exception(s) {
|
||||
def this(expected: String, found: String) = this("Expected: %s. Found: %s.".format(expected, found))
|
||||
}
|
||||
|
||||
class EOFException extends ReadException("Unexpected EOF.")
|
||||
|
||||
// A text-based serialization format for Analysis objects.
|
||||
// This code has been tuned for high performance, and therefore has non-idiomatic areas.
|
||||
// Please refrain from making changes that significantly degrade read/write performance on large analysis files.
|
||||
object TextAnalysisFormat {
|
||||
// Some types are not required for external inspection/manipulation of the analysis file,
|
||||
// and are complex to serialize as text. So we serialize them as base64-encoded sbinary-serialized blobs.
|
||||
// TODO: This is a big performance hit. Figure out a more efficient way to serialize API objects?
|
||||
import sbinary.DefaultProtocol.{ immutableMapFormat, immutableSetFormat, StringFormat, tuple2Format }
|
||||
import AnalysisFormats._
|
||||
implicit val compilationF = xsbt.api.CompilationFormat
|
||||
|
||||
def write(out: Writer, analysis: Analysis, setup: CompileSetup): Unit = {
|
||||
VersionF.write(out)
|
||||
// We start with writing compile setup which contains value of the `nameHashing`
|
||||
// flag that is needed to properly deserialize relations
|
||||
FormatTimer.time("write setup") { CompileSetupF.write(out, setup) }
|
||||
// Next we write relations because that's the part of greatest interest to external readers,
|
||||
// who can abort reading early once they're read them.
|
||||
FormatTimer.time("write relations") { RelationsF.write(out, analysis.relations) }
|
||||
FormatTimer.time("write stamps") { StampsF.write(out, analysis.stamps) }
|
||||
FormatTimer.time("write apis") { APIsF.write(out, analysis.apis) }
|
||||
FormatTimer.time("write sourceinfos") { SourceInfosF.write(out, analysis.infos) }
|
||||
FormatTimer.time("write compilations") { CompilationsF.write(out, analysis.compilations) }
|
||||
out.flush()
|
||||
}
|
||||
|
||||
def read(in: BufferedReader): (Analysis, CompileSetup) = {
|
||||
VersionF.read(in)
|
||||
val setup = FormatTimer.time("read setup") { CompileSetupF.read(in) }
|
||||
val relations = FormatTimer.time("read relations") { RelationsF.read(in, setup.nameHashing) }
|
||||
val stamps = FormatTimer.time("read stamps") { StampsF.read(in) }
|
||||
val apis = FormatTimer.time("read apis") { APIsF.read(in) }
|
||||
val infos = FormatTimer.time("read sourceinfos") { SourceInfosF.read(in) }
|
||||
val compilations = FormatTimer.time("read compilations") { CompilationsF.read(in) }
|
||||
|
||||
(Analysis.Empty.copy(stamps, apis, relations, infos, compilations), setup)
|
||||
}
|
||||
|
||||
private[this] object VersionF {
|
||||
val currentVersion = "5"
|
||||
|
||||
def write(out: Writer): Unit = {
|
||||
out.write("format version: %s\n".format(currentVersion))
|
||||
}
|
||||
|
||||
private val versionPattern = """format version: (\w+)""".r
|
||||
def read(in: BufferedReader): Unit = {
|
||||
in.readLine() match {
|
||||
case versionPattern(version) => validateVersion(version)
|
||||
case s: String => throw new ReadException("\"format version: <version>\"", s)
|
||||
case null => throw new EOFException
|
||||
}
|
||||
}
|
||||
|
||||
def validateVersion(version: String): Unit = {
|
||||
// TODO: Support backwards compatibility?
|
||||
if (version != currentVersion) {
|
||||
throw new ReadException("File uses format version %s, but we are compatible with version %s only.".format(version, currentVersion))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private[this] object RelationsF {
|
||||
object Headers {
|
||||
val srcProd = "products"
|
||||
val binaryDep = "binary dependencies"
|
||||
val directSrcDep = "direct source dependencies"
|
||||
val directExternalDep = "direct external dependencies"
|
||||
val internalSrcDepPI = "public inherited source dependencies"
|
||||
val externalDepPI = "public inherited external dependencies"
|
||||
val classes = "class names"
|
||||
|
||||
val memberRefInternalDep = "member reference internal dependencies"
|
||||
val memberRefExternalDep = "member reference external dependencies"
|
||||
val inheritanceInternalDep = "inheritance internal dependencies"
|
||||
val inheritanceExternalDep = "inheritance external dependencies"
|
||||
|
||||
val usedNames = "used names"
|
||||
}
|
||||
|
||||
def write(out: Writer, relations: Relations): Unit = {
|
||||
// This ordering is used to persist all values in order. Since all values will be
|
||||
// persisted using their string representation, it makes sense to sort them using
|
||||
// their string representation.
|
||||
val toStringOrd = new Ordering[Any] {
|
||||
def compare(a: Any, b: Any) = a.toString compare b.toString
|
||||
}
|
||||
def writeRelation[T](header: String, rel: Relation[File, T]): Unit = {
|
||||
writeHeader(out, header)
|
||||
writeSize(out, rel.size)
|
||||
// We sort for ease of debugging and for more efficient reconstruction when reading.
|
||||
// Note that we don't share code with writeMap. Each is implemented more efficiently
|
||||
// than the shared code would be, and the difference is measurable on large analyses.
|
||||
rel.forwardMap.toSeq.sortBy(_._1).foreach {
|
||||
case (k, vs) =>
|
||||
val kStr = k.toString
|
||||
vs.toSeq.sorted(toStringOrd) foreach { v =>
|
||||
out.write(kStr); out.write(" -> "); out.write(v.toString); out.write("\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
relations.allRelations.foreach {
|
||||
case (header, rel) => writeRelation(header, rel)
|
||||
}
|
||||
}
|
||||
|
||||
def read(in: BufferedReader, nameHashing: Boolean): Relations = {
|
||||
def readRelation[T](expectedHeader: String, s2t: String => T): Relation[File, T] = {
|
||||
val items = readPairs(in)(expectedHeader, new File(_), s2t).toIterator
|
||||
// Reconstruct the forward map. This is more efficient than Relation.empty ++ items.
|
||||
var forward: List[(File, Set[T])] = Nil
|
||||
var currentItem: (File, T) = null
|
||||
var currentFile: File = null
|
||||
var currentVals: List[T] = Nil
|
||||
def closeEntry(): Unit = {
|
||||
if (currentFile != null) forward = (currentFile, currentVals.toSet) :: forward
|
||||
currentFile = currentItem._1
|
||||
currentVals = currentItem._2 :: Nil
|
||||
}
|
||||
while (items.hasNext) {
|
||||
currentItem = items.next()
|
||||
if (currentItem._1 == currentFile) currentVals = currentItem._2 :: currentVals else closeEntry()
|
||||
}
|
||||
if (currentItem != null) closeEntry()
|
||||
Relation.reconstruct(forward.toMap)
|
||||
}
|
||||
|
||||
val relations = Relations.existingRelations map { case (header, fun) => readRelation(header, fun) }
|
||||
|
||||
Relations.construct(nameHashing, relations)
|
||||
}
|
||||
}
|
||||
|
||||
private[this] object StampsF {
|
||||
object Headers {
|
||||
val products = "product stamps"
|
||||
val sources = "source stamps"
|
||||
val binaries = "binary stamps"
|
||||
val classNames = "class names"
|
||||
}
|
||||
|
||||
def write(out: Writer, stamps: Stamps): Unit = {
|
||||
def doWriteMap[V](header: String, m: Map[File, V]) = writeMap(out)(header, m, { v: V => v.toString })
|
||||
|
||||
doWriteMap(Headers.products, stamps.products)
|
||||
doWriteMap(Headers.sources, stamps.sources)
|
||||
doWriteMap(Headers.binaries, stamps.binaries)
|
||||
doWriteMap(Headers.classNames, stamps.classNames)
|
||||
}
|
||||
|
||||
def read(in: BufferedReader): Stamps = {
|
||||
def doReadMap[V](expectedHeader: String, s2v: String => V) = readMap(in)(expectedHeader, new File(_), s2v)
|
||||
val products = doReadMap(Headers.products, Stamp.fromString)
|
||||
val sources = doReadMap(Headers.sources, Stamp.fromString)
|
||||
val binaries = doReadMap(Headers.binaries, Stamp.fromString)
|
||||
val classNames = doReadMap(Headers.classNames, identity[String])
|
||||
|
||||
Stamps(products, sources, binaries, classNames)
|
||||
}
|
||||
}
|
||||
|
||||
private[this] object APIsF {
|
||||
object Headers {
|
||||
val internal = "internal apis"
|
||||
val external = "external apis"
|
||||
}
|
||||
|
||||
val stringToSource = ObjectStringifier.stringToObj[Source] _
|
||||
val sourceToString = ObjectStringifier.objToString[Source] _
|
||||
|
||||
def write(out: Writer, apis: APIs): Unit = {
|
||||
writeMap(out)(Headers.internal, apis.internal, sourceToString, inlineVals = false)
|
||||
writeMap(out)(Headers.external, apis.external, sourceToString, inlineVals = false)
|
||||
FormatTimer.close("bytes -> base64")
|
||||
FormatTimer.close("byte copy")
|
||||
FormatTimer.close("sbinary write")
|
||||
}
|
||||
|
||||
def read(in: BufferedReader): APIs = {
|
||||
val internal = readMap(in)(Headers.internal, new File(_), stringToSource)
|
||||
val external = readMap(in)(Headers.external, identity[String], stringToSource)
|
||||
FormatTimer.close("base64 -> bytes")
|
||||
FormatTimer.close("sbinary read")
|
||||
APIs(internal, external)
|
||||
}
|
||||
}
|
||||
|
||||
private[this] object SourceInfosF {
|
||||
object Headers {
|
||||
val infos = "source infos"
|
||||
}
|
||||
|
||||
val stringToSourceInfo = ObjectStringifier.stringToObj[SourceInfo] _
|
||||
val sourceInfoToString = ObjectStringifier.objToString[SourceInfo] _
|
||||
|
||||
def write(out: Writer, infos: SourceInfos): Unit = writeMap(out)(Headers.infos, infos.allInfos, sourceInfoToString, inlineVals = false)
|
||||
def read(in: BufferedReader): SourceInfos = SourceInfos.make(readMap(in)(Headers.infos, new File(_), stringToSourceInfo))
|
||||
}
|
||||
|
||||
private[this] object CompilationsF {
|
||||
object Headers {
|
||||
val compilations = "compilations"
|
||||
}
|
||||
|
||||
val stringToCompilation = ObjectStringifier.stringToObj[Compilation] _
|
||||
val compilationToString = ObjectStringifier.objToString[Compilation] _
|
||||
|
||||
def write(out: Writer, compilations: Compilations): Unit =
|
||||
writeSeq(out)(Headers.compilations, compilations.allCompilations, compilationToString)
|
||||
|
||||
def read(in: BufferedReader): Compilations = Compilations.make(
|
||||
readSeq[Compilation](in)(Headers.compilations, stringToCompilation))
|
||||
}
|
||||
|
||||
private[this] object CompileSetupF {
|
||||
object Headers {
|
||||
val outputMode = "output mode"
|
||||
val outputDir = "output directories"
|
||||
val compileOptions = "compile options"
|
||||
val javacOptions = "javac options"
|
||||
val compilerVersion = "compiler version"
|
||||
val compileOrder = "compile order"
|
||||
val nameHashing = "name hashing"
|
||||
}
|
||||
|
||||
private[this] val singleOutputMode = "single"
|
||||
private[this] val multipleOutputMode = "multiple"
|
||||
private[this] val singleOutputKey = new File("output dir")
|
||||
|
||||
def write(out: Writer, setup: CompileSetup): Unit = {
|
||||
val (mode, outputAsMap) = setup.output match {
|
||||
case s: SingleOutput => (singleOutputMode, Map(singleOutputKey -> s.outputDirectory))
|
||||
case m: MultipleOutput => (multipleOutputMode, m.outputGroups.map(x => x.sourceDirectory -> x.outputDirectory).toMap)
|
||||
}
|
||||
|
||||
writeSeq(out)(Headers.outputMode, mode :: Nil, identity[String])
|
||||
writeMap(out)(Headers.outputDir, outputAsMap, { f: File => f.getPath })
|
||||
writeSeq(out)(Headers.compileOptions, setup.options.options, identity[String])
|
||||
writeSeq(out)(Headers.javacOptions, setup.options.javacOptions, identity[String])
|
||||
writeSeq(out)(Headers.compilerVersion, setup.compilerVersion :: Nil, identity[String])
|
||||
writeSeq(out)(Headers.compileOrder, setup.order.name :: Nil, identity[String])
|
||||
writeSeq(out)(Headers.nameHashing, setup.nameHashing :: Nil, (b: Boolean) => b.toString)
|
||||
}
|
||||
|
||||
def read(in: BufferedReader): CompileSetup = {
|
||||
def s2f(s: String) = new File(s)
|
||||
def s2b(s: String): Boolean = s.toBoolean
|
||||
val outputDirMode = readSeq(in)(Headers.outputMode, identity[String]).headOption
|
||||
val outputAsMap = readMap(in)(Headers.outputDir, s2f, s2f)
|
||||
val compileOptions = readSeq(in)(Headers.compileOptions, identity[String])
|
||||
val javacOptions = readSeq(in)(Headers.javacOptions, identity[String])
|
||||
val compilerVersion = readSeq(in)(Headers.compilerVersion, identity[String]).head
|
||||
val compileOrder = readSeq(in)(Headers.compileOrder, identity[String]).head
|
||||
val nameHashing = readSeq(in)(Headers.nameHashing, s2b).head
|
||||
|
||||
val output = outputDirMode match {
|
||||
case Some(s) => s match {
|
||||
case `singleOutputMode` => new SingleOutput {
|
||||
val outputDirectory = outputAsMap(singleOutputKey)
|
||||
}
|
||||
case `multipleOutputMode` => new MultipleOutput {
|
||||
val outputGroups: Array[MultipleOutput.OutputGroup] = outputAsMap.toArray.map {
|
||||
case (src: File, out: File) => new MultipleOutput.OutputGroup {
|
||||
val sourceDirectory = src
|
||||
val outputDirectory = out
|
||||
override def toString = s"OutputGroup($src -> $out)"
|
||||
}
|
||||
}
|
||||
override def toString = s"MultipleOuput($outputGroups)"
|
||||
}
|
||||
case str: String => throw new ReadException("Unrecognized output mode: " + str)
|
||||
}
|
||||
case None => throw new ReadException("No output mode specified")
|
||||
}
|
||||
|
||||
new CompileSetup(output, new CompileOptions(compileOptions, javacOptions), compilerVersion,
|
||||
xsbti.compile.CompileOrder.valueOf(compileOrder), nameHashing)
|
||||
}
|
||||
}
|
||||
|
||||
private[this] object ObjectStringifier {
|
||||
def objToString[T](o: T)(implicit fmt: sbinary.Format[T]) = {
|
||||
val baos = new ByteArrayOutputStream()
|
||||
val out = new sbinary.JavaOutput(baos)
|
||||
FormatTimer.aggregate("sbinary write") { try { fmt.writes(out, o) } finally { baos.close() } }
|
||||
val bytes = FormatTimer.aggregate("byte copy") { baos.toByteArray }
|
||||
FormatTimer.aggregate("bytes -> base64") { DatatypeConverter.printBase64Binary(bytes) }
|
||||
}
|
||||
|
||||
def stringToObj[T](s: String)(implicit fmt: sbinary.Format[T]) = {
|
||||
val bytes = FormatTimer.aggregate("base64 -> bytes") { DatatypeConverter.parseBase64Binary(s) }
|
||||
val in = new sbinary.JavaInput(new ByteArrayInputStream(bytes))
|
||||
FormatTimer.aggregate("sbinary read") { fmt.reads(in) }
|
||||
}
|
||||
}
|
||||
|
||||
// Various helper functions.
|
||||
|
||||
private[this] def writeHeader(out: Writer, header: String): Unit = out.write(header + ":\n")
|
||||
|
||||
private[this] def expectHeader(in: BufferedReader, expectedHeader: String): Unit = {
|
||||
val header = in.readLine()
|
||||
if (header != expectedHeader + ":") throw new ReadException(expectedHeader, if (header == null) "EOF" else header)
|
||||
}
|
||||
|
||||
private[this] def writeSize(out: Writer, n: Int): Unit = out.write("%d items\n".format(n))
|
||||
|
||||
private val itemsPattern = """(\d+) items""".r
|
||||
private[this] def readSize(in: BufferedReader): Int = {
|
||||
in.readLine() match {
|
||||
case itemsPattern(nStr) => Integer.parseInt(nStr)
|
||||
case s: String => throw new ReadException("\"<n> items\"", s)
|
||||
case null => throw new EOFException
|
||||
}
|
||||
}
|
||||
|
||||
private[this] def writeSeq[T](out: Writer)(header: String, s: Seq[T], t2s: T => String): Unit = {
|
||||
// We write sequences as idx -> element maps, for uniformity with maps/relations.
|
||||
def n = s.length
|
||||
val numDigits = if (n < 2) 1 else math.log10(n - 1).toInt + 1
|
||||
val fmtStr = "%%0%dd".format(numDigits)
|
||||
// We only use this for relatively short seqs, so creating this extra map won't be a performance hit.
|
||||
val m: Map[String, T] = s.zipWithIndex.map(x => fmtStr.format(x._2) -> x._1).toMap
|
||||
writeMap(out)(header, m, t2s)
|
||||
}
|
||||
|
||||
private[this] def readSeq[T](in: BufferedReader)(expectedHeader: String, s2t: String => T): Seq[T] =
|
||||
(readPairs(in)(expectedHeader, identity[String], s2t).toSeq.sortBy(_._1) map (_._2))
|
||||
|
||||
private[this] def writeMap[K, V](out: Writer)(header: String, m: Map[K, V], v2s: V => String, inlineVals: Boolean = true)(implicit ord: Ordering[K]): Unit = {
|
||||
writeHeader(out, header)
|
||||
writeSize(out, m.size)
|
||||
m.keys.toSeq.sorted foreach { k =>
|
||||
out.write(k.toString)
|
||||
out.write(" -> ")
|
||||
if (!inlineVals) out.write("\n") // Put large vals on their own line, to save string munging on read.
|
||||
out.write(v2s(m(k)))
|
||||
out.write("\n")
|
||||
}
|
||||
}
|
||||
|
||||
private[this] def readPairs[K, V](in: BufferedReader)(expectedHeader: String, s2k: String => K, s2v: String => V): Traversable[(K, V)] = {
|
||||
def toPair(s: String): (K, V) = {
|
||||
if (s == null) throw new EOFException
|
||||
val p = s.indexOf(" -> ")
|
||||
val k = s2k(s.substring(0, p))
|
||||
// Pair is either "a -> b" or "a -> \nb". This saves us a lot of substring munging when b is a large blob.
|
||||
val v = s2v(if (p == s.length - 4) in.readLine() else s.substring(p + 4))
|
||||
(k, v)
|
||||
}
|
||||
expectHeader(in, expectedHeader)
|
||||
val n = readSize(in)
|
||||
for (i <- 0 until n) yield toPair(in.readLine())
|
||||
}
|
||||
|
||||
private[this] def readMap[K, V](in: BufferedReader)(expectedHeader: String, s2k: String => K, s2v: String => V): Map[K, V] = {
|
||||
readPairs(in)(expectedHeader, s2k, s2v).toMap
|
||||
}
|
||||
}
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
package xsbt.api
|
||||
|
||||
import xsbti.api._
|
||||
import sbinary._
|
||||
|
||||
object CompilationFormat extends Format[Compilation] {
|
||||
import java.io._
|
||||
def reads(in: Input): Compilation = {
|
||||
val oin = new ObjectInputStream(new InputWrapperStream(in))
|
||||
try { oin.readObject.asInstanceOf[Compilation] } finally { oin.close() }
|
||||
}
|
||||
def writes(out: Output, src: Compilation): Unit = {
|
||||
val oout = new ObjectOutputStream(new OutputWrapperStream(out))
|
||||
try { oout.writeObject(src) } finally { oout.close() }
|
||||
}
|
||||
}
|
||||
|
|
@ -1,35 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package xsbt.api
|
||||
|
||||
import xsbti.SafeLazy
|
||||
import xsbti.api._
|
||||
import sbt.Using
|
||||
import sbinary._
|
||||
import DefaultProtocol._
|
||||
import Operations.{ read, write }
|
||||
import java.io.File
|
||||
import scala.collection.mutable
|
||||
|
||||
object SourceFormat extends Format[Source] {
|
||||
import java.io._
|
||||
def reads(in: Input): Source =
|
||||
{
|
||||
val oin = new ObjectInputStream(new InputWrapperStream(in))
|
||||
try { oin.readObject.asInstanceOf[Source] } finally { oin.close() }
|
||||
}
|
||||
def writes(out: Output, src: Source): Unit = {
|
||||
val oout = new ObjectOutputStream(new OutputWrapperStream(out))
|
||||
try { oout.writeObject(src) } finally { oout.close() }
|
||||
}
|
||||
}
|
||||
final class InputWrapperStream(in: Input) extends java.io.InputStream {
|
||||
def toInt(b: Byte) = if (b < 0) b + 256 else b.toInt
|
||||
def read() = try { toInt(in.readByte) } catch { case e: sbinary.EOF => -1 }
|
||||
override def read(b: Array[Byte], off: Int, len: Int) = in.readTo(b, off, len)
|
||||
}
|
||||
final class OutputWrapperStream(out: Output) extends java.io.OutputStream {
|
||||
override def write(bs: Array[Byte], off: Int, len: Int) = out.writeAll(bs, off, len)
|
||||
def write(b: Int) = out.writeByte(b.toByte)
|
||||
}
|
||||
|
|
@ -1,112 +0,0 @@
|
|||
package sbt
|
||||
package inc
|
||||
|
||||
import java.io.{ BufferedReader, File, StringReader, StringWriter }
|
||||
import scala.math.abs
|
||||
import org.scalacheck._
|
||||
import Gen._
|
||||
import Prop._
|
||||
|
||||
object TextAnalysisFormatTest extends Properties("TextAnalysisFormat") {
|
||||
|
||||
val nameHashing = true
|
||||
val dummyOutput = new xsbti.compile.SingleOutput { def outputDirectory: java.io.File = new java.io.File("dummy") }
|
||||
val commonSetup = new CompileSetup(dummyOutput, new CompileOptions(Nil, Nil), "2.10.4", xsbti.compile.CompileOrder.Mixed, nameHashing)
|
||||
val commonHeader = """format version: 5
|
||||
|output mode:
|
||||
|1 items
|
||||
|0 -> single
|
||||
|output directories:
|
||||
|1 items
|
||||
|output dir -> dummy
|
||||
|compile options:
|
||||
|0 items
|
||||
|javac options:
|
||||
|0 items
|
||||
|compiler version:
|
||||
|1 items
|
||||
|0 -> 2.10.4
|
||||
|compile order:
|
||||
|1 items
|
||||
|0 -> Mixed
|
||||
|name hashing:
|
||||
|1 items
|
||||
|0 -> true""".stripMargin
|
||||
|
||||
property("Write and read empty Analysis") = {
|
||||
|
||||
val writer = new StringWriter
|
||||
val analysis = Analysis.empty(nameHashing)
|
||||
TextAnalysisFormat.write(writer, analysis, commonSetup)
|
||||
|
||||
val result = writer.toString
|
||||
|
||||
result.startsWith(commonHeader)
|
||||
val reader = new BufferedReader(new StringReader(result))
|
||||
|
||||
val (readAnalysis, readSetup) = TextAnalysisFormat.read(reader)
|
||||
|
||||
analysis == readAnalysis
|
||||
|
||||
}
|
||||
|
||||
property("Write and read simple Analysis") = {
|
||||
|
||||
import TestCaseGenerators._
|
||||
|
||||
def f(s: String) = new File(s)
|
||||
val aScala = f("A.scala")
|
||||
val bScala = f("B.scala")
|
||||
val aSource = genSource("A" :: "A$" :: Nil).sample.get
|
||||
val bSource = genSource("B" :: "B$" :: Nil).sample.get
|
||||
val cSource = genSource("C" :: Nil).sample.get
|
||||
val exists = new Exists(true)
|
||||
val sourceInfos = SourceInfos.makeInfo(Nil, Nil)
|
||||
|
||||
var analysis = Analysis.empty(nameHashing)
|
||||
analysis = analysis.addProduct(aScala, f("A.class"), exists, "A")
|
||||
analysis = analysis.addProduct(aScala, f("A$.class"), exists, "A$")
|
||||
analysis = analysis.addSource(aScala, aSource, exists, Nil, Nil, sourceInfos)
|
||||
analysis = analysis.addBinaryDep(aScala, f("x.jar"), "x", exists)
|
||||
analysis = analysis.addExternalDep(aScala, "C", cSource, inherited = false)
|
||||
|
||||
val writer = new StringWriter
|
||||
|
||||
TextAnalysisFormat.write(writer, analysis, commonSetup)
|
||||
|
||||
val result = writer.toString
|
||||
|
||||
result.startsWith(commonHeader)
|
||||
val reader = new BufferedReader(new StringReader(result))
|
||||
|
||||
val (readAnalysis, readSetup) = TextAnalysisFormat.read(reader)
|
||||
|
||||
compare(analysis, readAnalysis)
|
||||
|
||||
}
|
||||
|
||||
property("Write and read complex Analysis") = forAllNoShrink(TestCaseGenerators.genAnalysis(nameHashing)) { analysis: Analysis =>
|
||||
val writer = new StringWriter
|
||||
|
||||
TextAnalysisFormat.write(writer, analysis, commonSetup)
|
||||
|
||||
val result = writer.toString
|
||||
|
||||
result.startsWith(commonHeader)
|
||||
val reader = new BufferedReader(new StringReader(result))
|
||||
|
||||
val (readAnalysis, readSetup) = TextAnalysisFormat.read(reader)
|
||||
|
||||
compare(analysis, readAnalysis)
|
||||
}
|
||||
|
||||
// Compare two analyses with useful labelling when they aren't equal.
|
||||
private[this] def compare(left: Analysis, right: Analysis): Prop =
|
||||
s" LEFT: $left" |:
|
||||
s"RIGHT: $right" |:
|
||||
s"STAMPS EQUAL: ${left.stamps == right.stamps}" |:
|
||||
s"APIS EQUAL: ${left.apis == right.apis}" |:
|
||||
s"RELATIONS EQUAL: ${left.relations == right.relations}" |:
|
||||
"UNEQUAL" |:
|
||||
(left == right)
|
||||
}
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
final case class ClasspathOptions(bootLibrary: Boolean, compiler: Boolean, extra: Boolean, autoBoot: Boolean, filterLibrary: Boolean) extends xsbti.compile.ClasspathOptions
|
||||
object ClasspathOptions {
|
||||
def manual = ClasspathOptions(false, false, false, true, false)
|
||||
def boot = ClasspathOptions(true, false, false, true, true)
|
||||
def repl = auto
|
||||
def javac(compiler: Boolean) = new ClasspathOptions(false, compiler, false, false, false)
|
||||
def auto = ClasspathOptions(true, true, true, true, true)
|
||||
}
|
||||
|
|
@ -1,135 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
// The following code is based on scala.tools.nsc.reporters.{AbstractReporter, ConsoleReporter, Reporter}
|
||||
// Copyright 2002-2009 LAMP/EPFL
|
||||
// see licenses/LICENSE_Scala
|
||||
// Original author: Martin Odersky
|
||||
|
||||
import xsbti.{ Maybe, Position, Problem, Reporter, Severity }
|
||||
import java.io.File
|
||||
import java.util.EnumMap
|
||||
import scala.collection.mutable
|
||||
import LoggerReporter._
|
||||
import Logger.{ m2o, o2m, position, problem }
|
||||
import Severity.{ Error, Info => SInfo, Warn }
|
||||
|
||||
object LoggerReporter {
|
||||
final class PositionKey(pos: Position) {
|
||||
def offset = pos.offset
|
||||
def sourceFile = pos.sourceFile
|
||||
|
||||
override def equals(o: Any) =
|
||||
o match { case pk: PositionKey => equalsKey(pk); case _ => false }
|
||||
|
||||
def equalsKey(o: PositionKey) =
|
||||
m2o(pos.offset) == m2o(o.offset) &&
|
||||
m2o(pos.sourceFile) == m2o(o.sourceFile)
|
||||
override def hashCode =
|
||||
m2o(pos.offset).hashCode * 31
|
||||
m2o(pos.sourceFile).hashCode
|
||||
}
|
||||
|
||||
def countElementsAsString(n: Int, elements: String): String =
|
||||
n match {
|
||||
case 0 => "no " + elements + "s"
|
||||
case 1 => "one " + elements
|
||||
case 2 => "two " + elements + "s"
|
||||
case 3 => "three " + elements + "s"
|
||||
case 4 => "four " + elements + "s"
|
||||
case _ => "" + n + " " + elements + "s"
|
||||
}
|
||||
}
|
||||
|
||||
class LoggerReporter(maximumErrors: Int, log: Logger, sourcePositionMapper: Position => Position = { p => p }) extends xsbti.Reporter {
|
||||
val positions = new mutable.HashMap[PositionKey, Severity]
|
||||
val count = new EnumMap[Severity, Int](classOf[Severity])
|
||||
private[this] val allProblems = new mutable.ListBuffer[Problem]
|
||||
|
||||
reset()
|
||||
|
||||
def reset(): Unit = {
|
||||
count.put(Warn, 0)
|
||||
count.put(SInfo, 0)
|
||||
count.put(Error, 0)
|
||||
positions.clear()
|
||||
allProblems.clear()
|
||||
}
|
||||
def hasWarnings = count.get(Warn) > 0
|
||||
def hasErrors = count.get(Error) > 0
|
||||
def problems: Array[Problem] = allProblems.toArray
|
||||
def comment(pos: Position, msg: String): Unit = ()
|
||||
|
||||
def printSummary(): Unit = {
|
||||
val warnings = count.get(Severity.Warn)
|
||||
if (warnings > 0)
|
||||
log.warn(countElementsAsString(warnings, "warning") + " found")
|
||||
val errors = count.get(Severity.Error)
|
||||
if (errors > 0)
|
||||
log.error(countElementsAsString(errors, "error") + " found")
|
||||
}
|
||||
|
||||
def inc(sev: Severity) = count.put(sev, count.get(sev) + 1)
|
||||
|
||||
def display(pos: Position, msg: String, severity: Severity): Unit = {
|
||||
inc(severity)
|
||||
if (severity != Error || maximumErrors <= 0 || count.get(severity) <= maximumErrors)
|
||||
print(severityLogger(severity), pos, msg)
|
||||
}
|
||||
def severityLogger(severity: Severity): (=> String) => Unit =
|
||||
m =>
|
||||
{
|
||||
(severity match {
|
||||
case Error => log.error(m)
|
||||
case Warn => log.warn(m)
|
||||
case SInfo => log.info(m)
|
||||
})
|
||||
}
|
||||
|
||||
def print(log: (=> String) => Unit, pos: Position, msg: String): Unit = {
|
||||
if (pos.sourcePath.isEmpty && pos.line.isEmpty)
|
||||
log(msg)
|
||||
else {
|
||||
val sourcePrefix = m2o(pos.sourcePath).getOrElse("")
|
||||
val lineNumberString = m2o(pos.line).map(":" + _ + ":").getOrElse(":") + " "
|
||||
log(sourcePrefix + lineNumberString + msg)
|
||||
val lineContent = pos.lineContent
|
||||
if (!lineContent.isEmpty) {
|
||||
log(lineContent)
|
||||
for (space <- m2o(pos.pointerSpace))
|
||||
log(space + "^") // pointer to the column position of the error/warning
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def log(pos: Position, msg: String, severity: Severity): Unit =
|
||||
{
|
||||
val mappedPos = sourcePositionMapper(pos)
|
||||
allProblems += problem("", mappedPos, msg, severity)
|
||||
severity match {
|
||||
case Warn | Error =>
|
||||
{
|
||||
if (!testAndLog(mappedPos, severity))
|
||||
display(mappedPos, msg, severity)
|
||||
}
|
||||
case _ => display(mappedPos, msg, severity)
|
||||
}
|
||||
}
|
||||
|
||||
def testAndLog(pos: Position, severity: Severity): Boolean =
|
||||
{
|
||||
if (pos.offset.isEmpty || pos.sourceFile.isEmpty)
|
||||
false
|
||||
else {
|
||||
val key = new PositionKey(pos)
|
||||
if (positions.get(key).exists(_.ordinal >= severity.ordinal))
|
||||
true
|
||||
else {
|
||||
positions(key) = severity
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,161 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
package compiler
|
||||
|
||||
import xsbti.{ AnalysisCallback, Logger => xLogger, Reporter }
|
||||
import xsbti.compile.{ CachedCompiler, CachedCompilerProvider, DependencyChanges, GlobalsCache, CompileProgress, Output }
|
||||
import java.io.File
|
||||
import java.net.{ URL, URLClassLoader }
|
||||
|
||||
/**
|
||||
* Interface to the Scala compiler that uses the dependency analysis plugin. This class uses the Scala library and compiler
|
||||
* provided by scalaInstance. This class requires a ComponentManager in order to obtain the interface code to scalac and
|
||||
* the analysis plugin. Because these call Scala code for a different Scala version than the one used for this class, they must
|
||||
* be compiled for the version of Scala being used.
|
||||
*/
|
||||
final class AnalyzingCompiler private (val scalaInstance: xsbti.compile.ScalaInstance, val provider: CompilerInterfaceProvider, val cp: xsbti.compile.ClasspathOptions, onArgsF: Seq[String] => Unit) extends CachedCompilerProvider {
|
||||
def this(scalaInstance: xsbti.compile.ScalaInstance, provider: CompilerInterfaceProvider, cp: xsbti.compile.ClasspathOptions) =
|
||||
this(scalaInstance, provider, cp, _ => ())
|
||||
def this(scalaInstance: ScalaInstance, provider: CompilerInterfaceProvider) = this(scalaInstance, provider, ClasspathOptions.auto)
|
||||
|
||||
@deprecated("A Logger is no longer needed.", "0.13.0")
|
||||
def this(scalaInstance: ScalaInstance, provider: CompilerInterfaceProvider, log: Logger) = this(scalaInstance, provider)
|
||||
|
||||
@deprecated("A Logger is no longer needed.", "0.13.0")
|
||||
def this(scalaInstance: xsbti.compile.ScalaInstance, provider: CompilerInterfaceProvider, cp: xsbti.compile.ClasspathOptions, log: Logger) = this(scalaInstance, provider, cp)
|
||||
|
||||
def onArgs(f: Seq[String] => Unit): AnalyzingCompiler = new AnalyzingCompiler(scalaInstance, provider, cp, f)
|
||||
|
||||
def apply(sources: Seq[File], changes: DependencyChanges, classpath: Seq[File], singleOutput: File, options: Seq[String], callback: AnalysisCallback, maximumErrors: Int, cache: GlobalsCache, log: Logger) {
|
||||
val arguments = (new CompilerArguments(scalaInstance, cp))(Nil, classpath, None, options)
|
||||
val output = CompileOutput(singleOutput)
|
||||
compile(sources, changes, arguments, output, callback, new LoggerReporter(maximumErrors, log, p => p), cache, log, None)
|
||||
}
|
||||
|
||||
def compile(sources: Seq[File], changes: DependencyChanges, options: Seq[String], output: Output, callback: AnalysisCallback, reporter: Reporter, cache: GlobalsCache, log: Logger, progressOpt: Option[CompileProgress]): Unit =
|
||||
{
|
||||
val cached = cache(options.toArray, output, !changes.isEmpty, this, log, reporter)
|
||||
val progress = progressOpt getOrElse IgnoreProgress
|
||||
compile(sources, changes, callback, log, reporter, progress, cached)
|
||||
}
|
||||
|
||||
def compile(sources: Seq[File], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, reporter: Reporter, progress: CompileProgress, compiler: CachedCompiler) {
|
||||
onArgsF(compiler.commandArguments(sources.toArray))
|
||||
call("xsbt.CompilerInterface", "run", log)(
|
||||
classOf[Array[File]], classOf[DependencyChanges], classOf[AnalysisCallback], classOf[xLogger], classOf[Reporter], classOf[CompileProgress], classOf[CachedCompiler])(
|
||||
sources.toArray, changes, callback, log, reporter, progress, compiler)
|
||||
}
|
||||
def newCachedCompiler(arguments: Array[String], output: Output, log: xLogger, reporter: Reporter, resident: Boolean): CachedCompiler =
|
||||
newCachedCompiler(arguments: Seq[String], output, log, reporter, resident)
|
||||
|
||||
def newCachedCompiler(arguments: Seq[String], output: Output, log: xLogger, reporter: Reporter, resident: Boolean): CachedCompiler =
|
||||
{
|
||||
call("xsbt.CompilerInterface", "newCompiler", log)(
|
||||
classOf[Array[String]], classOf[Output], classOf[xLogger], classOf[Reporter], classOf[Boolean])(
|
||||
arguments.toArray[String]: Array[String], output, log, reporter, resident: java.lang.Boolean).
|
||||
asInstanceOf[CachedCompiler]
|
||||
}
|
||||
|
||||
def doc(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maximumErrors: Int, log: Logger): Unit =
|
||||
doc(sources, classpath, outputDirectory, options, log, new LoggerReporter(maximumErrors, log))
|
||||
def doc(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger, reporter: Reporter): Unit =
|
||||
{
|
||||
val arguments = (new CompilerArguments(scalaInstance, cp))(sources, classpath, Some(outputDirectory), options)
|
||||
onArgsF(arguments)
|
||||
call("xsbt.ScaladocInterface", "run", log)(classOf[Array[String]], classOf[xLogger], classOf[Reporter])(
|
||||
arguments.toArray[String]: Array[String], log, reporter)
|
||||
}
|
||||
def console(classpath: Seq[File], options: Seq[String], initialCommands: String, cleanupCommands: String, log: Logger)(loader: Option[ClassLoader] = None, bindings: Seq[(String, Any)] = Nil): Unit =
|
||||
{
|
||||
onArgsF(consoleCommandArguments(classpath, options, log))
|
||||
val (classpathString, bootClasspath) = consoleClasspaths(classpath)
|
||||
val (names, values) = bindings.unzip
|
||||
call("xsbt.ConsoleInterface", "run", log)(
|
||||
classOf[Array[String]], classOf[String], classOf[String], classOf[String], classOf[String], classOf[ClassLoader], classOf[Array[String]], classOf[Array[Any]], classOf[xLogger])(
|
||||
options.toArray[String]: Array[String], bootClasspath, classpathString, initialCommands, cleanupCommands, loader.orNull, names.toArray[String], values.toArray[Any], log)
|
||||
}
|
||||
|
||||
private[this] def consoleClasspaths(classpath: Seq[File]): (String, String) =
|
||||
{
|
||||
val arguments = new CompilerArguments(scalaInstance, cp)
|
||||
val classpathString = CompilerArguments.absString(arguments.finishClasspath(classpath))
|
||||
val bootClasspath = if (cp.autoBoot) arguments.createBootClasspathFor(classpath) else ""
|
||||
(classpathString, bootClasspath)
|
||||
}
|
||||
def consoleCommandArguments(classpath: Seq[File], options: Seq[String], log: Logger): Seq[String] =
|
||||
{
|
||||
val (classpathString, bootClasspath) = consoleClasspaths(classpath)
|
||||
val argsObj = call("xsbt.ConsoleInterface", "commandArguments", log)(
|
||||
classOf[Array[String]], classOf[String], classOf[String], classOf[xLogger])(
|
||||
options.toArray[String]: Array[String], bootClasspath, classpathString, log)
|
||||
argsObj.asInstanceOf[Array[String]].toSeq
|
||||
}
|
||||
def force(log: Logger): Unit = provider(scalaInstance, log)
|
||||
private def call(interfaceClassName: String, methodName: String, log: Logger)(argTypes: Class[_]*)(args: AnyRef*): AnyRef =
|
||||
{
|
||||
val interfaceClass = getInterfaceClass(interfaceClassName, log)
|
||||
val interface = interfaceClass.newInstance.asInstanceOf[AnyRef]
|
||||
val method = interfaceClass.getMethod(methodName, argTypes: _*)
|
||||
try { method.invoke(interface, args: _*) }
|
||||
catch {
|
||||
case e: java.lang.reflect.InvocationTargetException =>
|
||||
e.getCause match {
|
||||
case c: xsbti.CompileFailed => throw new CompileFailed(c.arguments, c.toString, c.problems)
|
||||
case t => throw t
|
||||
}
|
||||
}
|
||||
}
|
||||
private[this] def loader(log: Logger) =
|
||||
{
|
||||
val interfaceJar = provider(scalaInstance, log)
|
||||
// this goes to scalaInstance.loader for scala classes and the loader of this class for xsbti classes
|
||||
val dual = createDualLoader(scalaInstance.loader, getClass.getClassLoader)
|
||||
new URLClassLoader(Array(interfaceJar.toURI.toURL), dual)
|
||||
}
|
||||
private[this] def getInterfaceClass(name: String, log: Logger) = Class.forName(name, true, loader(log))
|
||||
protected def createDualLoader(scalaLoader: ClassLoader, sbtLoader: ClassLoader): ClassLoader =
|
||||
{
|
||||
val xsbtiFilter = (name: String) => name.startsWith("xsbti.")
|
||||
val notXsbtiFilter = (name: String) => !xsbtiFilter(name)
|
||||
new classpath.DualLoader(scalaLoader, notXsbtiFilter, x => true, sbtLoader, xsbtiFilter, x => false)
|
||||
}
|
||||
override def toString = "Analyzing compiler (Scala " + scalaInstance.actualVersion + ")"
|
||||
}
|
||||
object AnalyzingCompiler {
|
||||
import sbt.IO.{ copy, createDirectory, zip, jars, unzip, withTemporaryDirectory }
|
||||
|
||||
// Note: The Scala build now depends on some details of this method:
|
||||
// https://github.com/jsuereth/scala/commit/3431860048df8d2a381fb85a526097e00154eae0
|
||||
/**
|
||||
* Extract sources from source jars, compile them with the xsbti interfaces on the classpath, and package the compiled classes and
|
||||
* any resources from the source jars into a final jar.
|
||||
*/
|
||||
def compileSources(sourceJars: Iterable[File], targetJar: File, xsbtiJars: Iterable[File], id: String, compiler: RawCompiler, log: Logger) {
|
||||
val isSource = (f: File) => isSourceName(f.getName)
|
||||
def keepIfSource(files: Set[File]): Set[File] = if (files.exists(isSource)) files else Set()
|
||||
|
||||
withTemporaryDirectory { dir =>
|
||||
val extractedSources = (Set[File]() /: sourceJars) { (extracted, sourceJar) => extracted ++ keepIfSource(unzip(sourceJar, dir)) }
|
||||
val (sourceFiles, resources) = extractedSources.partition(isSource)
|
||||
withTemporaryDirectory { outputDirectory =>
|
||||
log.info("'" + id + "' not yet compiled for Scala " + compiler.scalaInstance.actualVersion + ". Compiling...")
|
||||
val start = System.currentTimeMillis
|
||||
try {
|
||||
compiler(sourceFiles.toSeq, compiler.scalaInstance.libraryJar +: (xsbtiJars.toSeq ++ sourceJars), outputDirectory, "-nowarn" :: Nil)
|
||||
log.info(" Compilation completed in " + (System.currentTimeMillis - start) / 1000.0 + " s")
|
||||
} catch { case e: xsbti.CompileFailed => throw new CompileFailed(e.arguments, "Error compiling sbt component '" + id + "'", e.problems) }
|
||||
import sbt.Path._
|
||||
copy(resources pair rebase(dir, outputDirectory))
|
||||
zip((outputDirectory ***) pair (relativeTo(outputDirectory), false), targetJar)
|
||||
}
|
||||
}
|
||||
}
|
||||
private def isSourceName(name: String): Boolean = name.endsWith(".scala") || name.endsWith(".java")
|
||||
}
|
||||
|
||||
private[this] object IgnoreProgress extends CompileProgress {
|
||||
def startUnit(phase: String, unitPath: String) {}
|
||||
def advance(current: Int, total: Int) = true
|
||||
}
|
||||
|
|
@ -1,70 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
package compiler
|
||||
|
||||
import xsbti.ArtifactInfo
|
||||
import scala.util
|
||||
import java.io.File
|
||||
import CompilerArguments.{ abs, absString, BootClasspathOption }
|
||||
|
||||
/**
|
||||
* Forms the list of options that is passed to the compiler from the required inputs and other options.
|
||||
* The directory containing scala-library.jar and scala-compiler.jar (scalaLibDirectory) is required in
|
||||
* order to add these jars to the boot classpath. The 'scala.home' property must be unset because Scala
|
||||
* puts jars in that directory on the bootclasspath. Because we use multiple Scala versions,
|
||||
* this would lead to compiling against the wrong library jar.
|
||||
*/
|
||||
final class CompilerArguments(scalaInstance: xsbti.compile.ScalaInstance, cp: xsbti.compile.ClasspathOptions) {
|
||||
def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: Option[File], options: Seq[String]): Seq[String] =
|
||||
{
|
||||
checkScalaHomeUnset()
|
||||
val cpWithCompiler = finishClasspath(classpath)
|
||||
// Scala compiler's treatment of empty classpath is troublesome (as of 2.9.1).
|
||||
// We append a random dummy element as workaround.
|
||||
val dummy = "dummy_" + Integer.toHexString(util.Random.nextInt)
|
||||
val classpathOption = Seq("-classpath", if (cpWithCompiler.isEmpty) dummy else absString(cpWithCompiler))
|
||||
val outputOption = outputDirectory map { out => Seq("-d", out.getAbsolutePath) } getOrElse Seq()
|
||||
options ++ outputOption ++ bootClasspathOption(hasLibrary(classpath)) ++ classpathOption ++ abs(sources)
|
||||
}
|
||||
def finishClasspath(classpath: Seq[File]): Seq[File] =
|
||||
filterLibrary(classpath) ++ include(cp.compiler, scalaInstance.compilerJar) ++ include(cp.extra, scalaInstance.otherJars: _*)
|
||||
private[this] def include(flag: Boolean, jars: File*) = if (flag) jars else Nil
|
||||
private[this] def abs(files: Seq[File]) = files.map(_.getAbsolutePath).sortWith(_ < _)
|
||||
private[this] def checkScalaHomeUnset(): Unit = {
|
||||
val scalaHome = System.getProperty("scala.home")
|
||||
assert((scalaHome eq null) || scalaHome.isEmpty, "'scala.home' should not be set (was " + scalaHome + ")")
|
||||
}
|
||||
def createBootClasspathFor(classpath: Seq[File]) = createBootClasspath(hasLibrary(classpath) || cp.compiler || cp.extra)
|
||||
|
||||
/** Add the correct Scala library jar to the boot classpath if `addLibrary` is true.*/
|
||||
def createBootClasspath(addLibrary: Boolean) =
|
||||
{
|
||||
val originalBoot = System.getProperty("sun.boot.class.path", "")
|
||||
if (addLibrary) {
|
||||
val newBootPrefix = if (originalBoot.isEmpty) "" else originalBoot + File.pathSeparator
|
||||
newBootPrefix + scalaInstance.libraryJar.getAbsolutePath
|
||||
} else
|
||||
originalBoot
|
||||
}
|
||||
def filterLibrary(classpath: Seq[File]) = if (cp.filterLibrary) classpath filterNot isScalaLibrary else classpath
|
||||
def hasLibrary(classpath: Seq[File]) = classpath exists isScalaLibrary
|
||||
private[this] val isScalaLibrary: File => Boolean = file => {
|
||||
val name = file.getName
|
||||
(name contains ArtifactInfo.ScalaLibraryID) || file.getName == scalaInstance.libraryJar.getName
|
||||
}
|
||||
def bootClasspathOption(addLibrary: Boolean) = if (cp.autoBoot) Seq(BootClasspathOption, createBootClasspath(addLibrary)) else Nil
|
||||
def bootClasspath(addLibrary: Boolean) = if (cp.autoBoot) IO.parseClasspath(createBootClasspath(addLibrary)) else Nil
|
||||
def bootClasspathFor(classpath: Seq[File]) = bootClasspath(hasLibrary(classpath))
|
||||
|
||||
import Path._
|
||||
def extClasspath: Seq[File] = (IO.parseClasspath(System.getProperty("java.ext.dirs")) * "*.jar").get
|
||||
}
|
||||
object CompilerArguments {
|
||||
val BootClasspathOption = "-bootclasspath"
|
||||
def abs(files: Seq[File]): Seq[String] = files.map(_.getAbsolutePath)
|
||||
def abs(files: Set[File]): Seq[String] = abs(files.toSeq)
|
||||
def absString(files: Seq[File]): String = abs(files).mkString(File.pathSeparator)
|
||||
def absString(files: Set[File]): String = absString(files.toSeq)
|
||||
}
|
||||
|
|
@ -1,49 +0,0 @@
|
|||
package sbt
|
||||
package compiler
|
||||
|
||||
import xsbti.{ Logger => xLogger, Reporter }
|
||||
import xsbti.compile.{ CachedCompiler, CachedCompilerProvider, GlobalsCache, Output }
|
||||
import Logger.f0
|
||||
import java.io.File
|
||||
import java.util.{ LinkedHashMap, Map }
|
||||
|
||||
private final class CompilerCache(val maxInstances: Int) extends GlobalsCache {
|
||||
private[this] val cache = lru[CompilerKey, CachedCompiler](maxInstances)
|
||||
private[this] def lru[A, B](max: Int) = new LinkedHashMap[A, B](8, 0.75f, true) {
|
||||
override def removeEldestEntry(eldest: Map.Entry[A, B]): Boolean = size > max
|
||||
}
|
||||
def apply(args: Array[String], output: Output, forceNew: Boolean, c: CachedCompilerProvider, log: xLogger, reporter: Reporter): CachedCompiler = synchronized {
|
||||
val key = CompilerKey(dropSources(args.toList), c.scalaInstance.actualVersion)
|
||||
if (forceNew) cache.remove(key)
|
||||
cache.get(key) match {
|
||||
case null =>
|
||||
log.debug(f0("Compiler cache miss. " + key.toString))
|
||||
put(key, c.newCachedCompiler(args, output, log, reporter, /* resident = */ !forceNew))
|
||||
case cc =>
|
||||
log.debug(f0("Compiler cache hit (" + cc.hashCode.toHexString + "). " + key.toString))
|
||||
cc
|
||||
}
|
||||
}
|
||||
def clear(): Unit = synchronized { cache.clear() }
|
||||
|
||||
private[this] def dropSources(args: Seq[String]): Seq[String] =
|
||||
args.filterNot(arg => arg.endsWith(".scala") || arg.endsWith(".java"))
|
||||
|
||||
private[this] def put(key: CompilerKey, cc: CachedCompiler): CachedCompiler =
|
||||
{
|
||||
cache.put(key, cc)
|
||||
cc
|
||||
}
|
||||
private[this] final case class CompilerKey(args: Seq[String], scalaVersion: String) {
|
||||
override def toString = "scala " + scalaVersion + ", args: " + args.mkString(" ")
|
||||
}
|
||||
}
|
||||
object CompilerCache {
|
||||
def apply(maxInstances: Int): GlobalsCache = new CompilerCache(maxInstances)
|
||||
|
||||
val fresh: GlobalsCache = new GlobalsCache {
|
||||
def clear(): Unit = ()
|
||||
def apply(args: Array[String], output: Output, forceNew: Boolean, c: CachedCompilerProvider, log: xLogger, reporter: Reporter): CachedCompiler =
|
||||
c.newCachedCompiler(args, output, log, reporter, /*resident = */ false)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
package sbt
|
||||
package compiler
|
||||
|
||||
import java.io.File
|
||||
|
||||
trait CompilerInterfaceProvider {
|
||||
def apply(scalaInstance: xsbti.compile.ScalaInstance, log: Logger): File
|
||||
}
|
||||
object CompilerInterfaceProvider {
|
||||
def constant(file: File): CompilerInterfaceProvider = new CompilerInterfaceProvider {
|
||||
def apply(scalaInstance: xsbti.compile.ScalaInstance, log: Logger): File = file
|
||||
}
|
||||
}
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2012 Eugene Vigdorchik
|
||||
*/
|
||||
|
||||
package sbt
|
||||
package compiler
|
||||
|
||||
import xsbti.compile.{ Output, SingleOutput, MultipleOutput }
|
||||
import java.io.File
|
||||
|
||||
/** Constructor for the `Output` ADT for incremental compiler. Can either take groups (src -> out) or a single output. */
|
||||
object CompileOutput {
|
||||
def apply(dir: File): Output = new SingleOutput {
|
||||
def outputDirectory = dir
|
||||
override def toString = s"SingleOutput($outputDirectory)"
|
||||
}
|
||||
|
||||
def apply(groups: (File, File)*): Output = new MultipleOutput {
|
||||
def outputGroups = groups.toArray map {
|
||||
case (src, out) => new MultipleOutput.OutputGroup {
|
||||
def sourceDirectory = src
|
||||
def outputDirectory = out
|
||||
override def toString = s"OutputGroup($src -> $out)"
|
||||
}
|
||||
}
|
||||
override def toString = s"MultiOutput($outputGroups)"
|
||||
}
|
||||
}
|
||||
|
|
@ -1,158 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009, 2010 Mark Harrah, Seth Tisue
|
||||
*/
|
||||
package sbt
|
||||
package compiler
|
||||
|
||||
import java.io.{ File, PrintWriter }
|
||||
|
||||
import xsbti.{ Severity, Reporter }
|
||||
import xsbti.compile.Output
|
||||
|
||||
@deprecated("Please use the new set of compilers in sbt.compilers.javac", "0.13.8")
|
||||
abstract class JavacContract(val name: String, val clazz: String) {
|
||||
def exec(args: Array[String], writer: PrintWriter): Int
|
||||
}
|
||||
/** An interface we use to call the Java compiler. */
|
||||
@deprecated("Please use the new set of compilers in sbt.compilers.javac", "0.13.8")
|
||||
trait JavaCompiler extends xsbti.compile.JavaCompiler {
|
||||
/**
|
||||
* Runs the java compiler
|
||||
*
|
||||
* @param sources The source files to compile
|
||||
* @param classpath The classpath for the compiler
|
||||
* @param outputDirectory The output directory for class files
|
||||
* @param options The arguments to pass into Javac
|
||||
* @param log A log in which we write all the output from Javac.
|
||||
*/
|
||||
def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger): Unit
|
||||
|
||||
def compile(sources: Array[File], classpath: Array[File], output: xsbti.compile.Output, options: Array[String], log: xsbti.Logger): Unit = {
|
||||
val outputDirectory = output match {
|
||||
case single: xsbti.compile.SingleOutput => single.outputDirectory
|
||||
case _ => throw new RuntimeException("Javac doesn't support multiple output directories")
|
||||
}
|
||||
apply(sources, classpath, outputDirectory, options)(log)
|
||||
}
|
||||
|
||||
// TODO - Fix this so that the reporter is actually used.
|
||||
def compileWithReporter(sources: Array[File], classpath: Array[File], output: Output, options: Array[String], reporter: Reporter, log: xsbti.Logger): Unit = {
|
||||
compile(sources, classpath, output, options, log)
|
||||
}
|
||||
|
||||
def onArgs(f: Seq[String] => Unit): JavaCompiler
|
||||
}
|
||||
@deprecated("Please use the new set of compilers in sbt.compilers.javac", "0.13.8")
|
||||
trait Javadoc {
|
||||
def doc(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maximumErrors: Int, log: Logger)
|
||||
|
||||
def onArgs(f: Seq[String] => Unit): Javadoc
|
||||
}
|
||||
@deprecated("Please use the new set of compilers in sbt.compilers.javac", "0.13.8")
|
||||
trait JavaTool extends Javadoc with JavaCompiler {
|
||||
def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger) =
|
||||
compile(JavaCompiler.javac, sources, classpath, outputDirectory, options)(log)
|
||||
|
||||
def doc(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maximumErrors: Int, log: Logger) =
|
||||
compile(JavaCompiler.javadoc, sources, classpath, outputDirectory, options)(log)
|
||||
|
||||
def compile(contract: JavacContract, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger): Unit
|
||||
|
||||
def onArgs(f: Seq[String] => Unit): JavaTool
|
||||
}
|
||||
@deprecated("Please use the new set of compilers in sbt.compilers.javac", "0.13.8")
|
||||
object JavaCompiler {
|
||||
@deprecated("Please use the new set of compilers in sbt.compilers.javac", "0.13.8")
|
||||
type Fork = (JavacContract, Seq[String], Logger) => Int
|
||||
|
||||
val javac = new JavacContract("javac", "com.sun.tools.javac.Main") {
|
||||
def exec(args: Array[String], writer: PrintWriter) = {
|
||||
val m = Class.forName(clazz).getDeclaredMethod("compile", classOf[Array[String]], classOf[PrintWriter])
|
||||
m.invoke(null, args, writer).asInstanceOf[java.lang.Integer].intValue
|
||||
}
|
||||
}
|
||||
val javadoc = new JavacContract("javadoc", "com.sun.tools.javadoc.Main") {
|
||||
def exec(args: Array[String], writer: PrintWriter) = {
|
||||
val m = Class.forName(clazz).getDeclaredMethod("execute", classOf[String], classOf[PrintWriter], classOf[PrintWriter], classOf[PrintWriter], classOf[String], classOf[Array[String]])
|
||||
m.invoke(null, name, writer, writer, writer, "com.sun.tools.doclets.standard.Standard", args).asInstanceOf[java.lang.Integer].intValue
|
||||
}
|
||||
}
|
||||
|
||||
def construct(f: Fork, cp: ClasspathOptions, scalaInstance: ScalaInstance): JavaTool = new JavaTool0(f, cp, scalaInstance, _ => ())
|
||||
|
||||
/** The actual implementation of a JavaTool (javadoc + javac). */
|
||||
private[this] class JavaTool0(f: Fork, cp: ClasspathOptions, scalaInstance: ScalaInstance, onArgsF: Seq[String] => Unit) extends JavaTool {
|
||||
def onArgs(g: Seq[String] => Unit): JavaTool = new JavaTool0(f, cp, scalaInstance, g)
|
||||
def commandArguments(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger): Seq[String] =
|
||||
{
|
||||
val augmentedClasspath = if (cp.autoBoot) classpath ++ Seq(scalaInstance.libraryJar) else classpath
|
||||
val javaCp = ClasspathOptions.javac(cp.compiler)
|
||||
(new CompilerArguments(scalaInstance, javaCp))(sources, augmentedClasspath, Some(outputDirectory), options)
|
||||
}
|
||||
def compile(contract: JavacContract, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger): Unit = {
|
||||
val arguments = commandArguments(sources, classpath, outputDirectory, options, log)
|
||||
onArgsF(arguments)
|
||||
val code: Int = f(contract, arguments, log)
|
||||
log.debug(contract.name + " returned exit code: " + code)
|
||||
if (code != 0) throw new CompileFailed(arguments.toArray, contract.name + " returned nonzero exit code", Array())
|
||||
}
|
||||
}
|
||||
def directOrFork(cp: ClasspathOptions, scalaInstance: ScalaInstance)(implicit doFork: Fork): JavaTool =
|
||||
construct(directOrForkJavac, cp, scalaInstance)
|
||||
|
||||
def direct(cp: ClasspathOptions, scalaInstance: ScalaInstance): JavaTool =
|
||||
construct(directJavac, cp, scalaInstance)
|
||||
|
||||
def fork(cp: ClasspathOptions, scalaInstance: ScalaInstance)(implicit doFork: Fork): JavaTool =
|
||||
construct(forkJavac, cp, scalaInstance)
|
||||
|
||||
def directOrForkJavac(implicit doFork: Fork) = (contract: JavacContract, arguments: Seq[String], log: Logger) =>
|
||||
try { directJavac(contract, arguments, log) }
|
||||
catch {
|
||||
case e @ (_: ClassNotFoundException | _: NoSuchMethodException) =>
|
||||
log.debug(contract.clazz + " not found with appropriate method signature; forking " + contract.name + " instead")
|
||||
forkJavac(doFork)(contract, arguments, log)
|
||||
}
|
||||
|
||||
/** `doFork` should be a function that forks javac with the provided arguments and sends output to the given Logger.*/
|
||||
def forkJavac(implicit doFork: Fork) = (contract: JavacContract, arguments: Seq[String], log: Logger) =>
|
||||
{
|
||||
val (jArgs, nonJArgs) = arguments.partition(_.startsWith("-J"))
|
||||
def externalJavac(argFile: File) =
|
||||
doFork(contract, jArgs :+ ("@" + normalizeSlash(argFile.getAbsolutePath)), log)
|
||||
withArgumentFile(nonJArgs)(externalJavac)
|
||||
}
|
||||
val directJavac = (contract: JavacContract, arguments: Seq[String], log: Logger) =>
|
||||
{
|
||||
val logger = new LoggerWriter(log)
|
||||
val writer = new PrintWriter(logger)
|
||||
val argsArray = arguments.toArray
|
||||
log.debug("Attempting to call " + contract.name + " directly...")
|
||||
|
||||
var exitCode = -1
|
||||
try { exitCode = contract.exec(argsArray, writer) }
|
||||
finally { logger.flushLines(if (exitCode == 0) Level.Warn else Level.Error) }
|
||||
exitCode
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to create an argument file that we pass to Javac. Gets over the windows
|
||||
* command line length limitation.
|
||||
* @param args The string arguments to pass to Javac.
|
||||
* @param f A function which is passed the arg file.
|
||||
* @tparam T The return type.
|
||||
* @return The result of using the argument file.
|
||||
*/
|
||||
def withArgumentFile[T](args: Seq[String])(f: File => T): T =
|
||||
{
|
||||
import IO.{ Newline, withTemporaryDirectory, write }
|
||||
withTemporaryDirectory { tmp =>
|
||||
val argFile = new File(tmp, "argfile")
|
||||
write(argFile, args.map(escapeSpaces).mkString(Newline))
|
||||
f(argFile)
|
||||
}
|
||||
}
|
||||
// javac's argument file seems to allow naive space escaping with quotes. escaping a quote with a backslash does not work
|
||||
def escapeSpaces(s: String): String = '\"' + normalizeSlash(s) + '\"'
|
||||
def normalizeSlash(s: String) = s.replace(File.separatorChar, '/')
|
||||
}
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
package compiler
|
||||
|
||||
import java.io.File
|
||||
|
||||
/**
|
||||
* A basic interface to the compiler. It is called in the same virtual machine, but no dependency analysis is done. This
|
||||
* is used, for example, to compile the interface/plugin code.
|
||||
* If `explicitClasspath` is true, the bootclasspath and classpath are not augmented. If it is false,
|
||||
* the scala-library.jar from `scalaInstance` is put on bootclasspath and the scala-compiler jar goes on the classpath.
|
||||
*/
|
||||
class RawCompiler(val scalaInstance: xsbti.compile.ScalaInstance, cp: ClasspathOptions, log: Logger) {
|
||||
def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String]): Unit = {
|
||||
// reflection is required for binary compatibility
|
||||
// The following import ensures there is a compile error if the identifiers change,
|
||||
// but should not be otherwise directly referenced
|
||||
import scala.tools.nsc.Main.{ process => _ }
|
||||
|
||||
val arguments = compilerArguments(sources, classpath, Some(outputDirectory), options)
|
||||
log.debug("Plain interface to Scala compiler " + scalaInstance.actualVersion + " with arguments: " + arguments.mkString("\n\t", "\n\t", ""))
|
||||
val mainClass = Class.forName("scala.tools.nsc.Main", true, scalaInstance.loader)
|
||||
val process = mainClass.getMethod("process", classOf[Array[String]])
|
||||
process.invoke(null, arguments.toArray)
|
||||
checkForFailure(mainClass, arguments.toArray)
|
||||
}
|
||||
def compilerArguments = new CompilerArguments(scalaInstance, cp)
|
||||
protected def checkForFailure(mainClass: Class[_], args: Array[String]): Unit = {
|
||||
val reporter = mainClass.getMethod("reporter").invoke(null)
|
||||
val failed = reporter.getClass.getMethod("hasErrors").invoke(reporter).asInstanceOf[Boolean]
|
||||
if (failed) throw new CompileFailed(args, "Plain compile failed", Array())
|
||||
}
|
||||
}
|
||||
class CompileFailed(val arguments: Array[String], override val toString: String, val problems: Array[xsbti.Problem]) extends xsbti.CompileFailed with FeedbackProvidedException
|
||||
|
|
@ -1,114 +0,0 @@
|
|||
package sbt.compiler.javac
|
||||
|
||||
import java.io.File
|
||||
import javax.tools.{ Diagnostic, JavaFileObject, DiagnosticListener }
|
||||
|
||||
import sbt.Logger
|
||||
import xsbti.{ Severity, Reporter, Maybe }
|
||||
import javax.tools.Diagnostic.NOPOS
|
||||
|
||||
/**
|
||||
* A diagnostics listener that feeds all messages into the given reporter.
|
||||
* @param reporter
|
||||
*/
|
||||
final class DiagnosticsReporter(reporter: Reporter) extends DiagnosticListener[JavaFileObject] {
|
||||
val END_OF_LINE_MATCHER = "(\r\n)|[\r]|[\n]"
|
||||
val EOL = System.getProperty("line.separator")
|
||||
private def fixedDiagnosticMessage(d: Diagnostic[_ <: JavaFileObject]): String = {
|
||||
def getRawMessage = d.getMessage(null)
|
||||
def fixWarnOrErrorMessage = {
|
||||
val tmp = getRawMessage
|
||||
// we fragment off the line/source/type report from the message.
|
||||
// NOTE - End of line handling may be off.
|
||||
val lines: Seq[String] =
|
||||
tmp.split(END_OF_LINE_MATCHER) match {
|
||||
case Array(head, tail @ _*) =>
|
||||
val newHead = head.split(":").last
|
||||
newHead +: tail
|
||||
case Array(head) =>
|
||||
head.split(":").last :: Nil
|
||||
case Array() => Seq.empty[String]
|
||||
}
|
||||
lines.mkString(EOL)
|
||||
}
|
||||
d.getKind match {
|
||||
case Diagnostic.Kind.ERROR | Diagnostic.Kind.WARNING | Diagnostic.Kind.MANDATORY_WARNING => fixWarnOrErrorMessage
|
||||
case _ => getRawMessage
|
||||
}
|
||||
}
|
||||
private def fixSource[T <: JavaFileObject](source: T): Option[String] = {
|
||||
try Option(source).map(_.toUri.normalize).map(new File(_)).map(_.getAbsolutePath)
|
||||
catch {
|
||||
case t: IllegalArgumentException =>
|
||||
// Oracle JDK6 has a super dumb notion of what a URI is. In fact, it's not even a legimitate URL, but a dump
|
||||
// of the filename in a "I hope this works to toString it" kind of way. This appears to work in practice
|
||||
// but we may need to re-evaluate.
|
||||
Option(source).map(_.toUri.toString)
|
||||
}
|
||||
}
|
||||
override def report(d: Diagnostic[_ <: JavaFileObject]): Unit = {
|
||||
val severity =
|
||||
d.getKind match {
|
||||
case Diagnostic.Kind.ERROR => Severity.Error
|
||||
case Diagnostic.Kind.WARNING | Diagnostic.Kind.MANDATORY_WARNING => Severity.Warn
|
||||
case _ => Severity.Info
|
||||
}
|
||||
val msg = fixedDiagnosticMessage(d)
|
||||
val pos: xsbti.Position =
|
||||
new xsbti.Position {
|
||||
// https://docs.oracle.com/javase/7/docs/api/javax/tools/Diagnostic.html
|
||||
// Negative values (except NOPOS) and 0 are not valid line or column numbers.
|
||||
private[this] def checkNoPos(n: Long): Option[Long] =
|
||||
n match {
|
||||
case NOPOS => None
|
||||
case x if x <= 0 => sys.error(s"Invalid position: $x")
|
||||
case x => Option(x)
|
||||
}
|
||||
|
||||
override val line: Maybe[Integer] = Logger.o2m(checkNoPos(d.getLineNumber) map { x => new Integer(x.toInt) })
|
||||
def startPosition: Option[Long] = checkNoPos(d.getStartPosition)
|
||||
def endPosition: Option[Long] = checkNoPos(d.getEndPosition)
|
||||
override val offset: Maybe[Integer] = Logger.o2m(checkNoPos(d.getPosition) map { x => new Integer(x.toInt) })
|
||||
override def lineContent: String = {
|
||||
def getDiagnosticLine: Option[String] =
|
||||
try {
|
||||
// See com.sun.tools.javac.api.ClientCodeWrapper.DiagnosticSourceUnwrapper
|
||||
val diagnostic = d.getClass.getField("d").get(d)
|
||||
// See com.sun.tools.javac.util.JCDiagnostic#getDiagnosticSource
|
||||
val getDiagnosticSourceMethod = diagnostic.getClass.getDeclaredMethod("getDiagnosticSource")
|
||||
Option(getDiagnosticSourceMethod.invoke(diagnostic)) match {
|
||||
case Some(diagnosticSource) =>
|
||||
// See com.sun.tools.javac.util.DiagnosticSource
|
||||
val getLineMethod = diagnosticSource.getClass.getMethod("getLine", Integer.TYPE)
|
||||
Option(getLineMethod.invoke(diagnosticSource, line.get())).map(_.toString)
|
||||
case _ => None
|
||||
}
|
||||
} catch {
|
||||
// TODO - catch ReflectiveOperationException once sbt is migrated to JDK7
|
||||
case ignored: Throwable => None
|
||||
}
|
||||
|
||||
def getExpression: String =
|
||||
Option(d.getSource) match {
|
||||
case Some(source: JavaFileObject) =>
|
||||
(Option(source.getCharContent(true)), startPosition, endPosition) match {
|
||||
case (Some(cc), Some(start), Some(end)) => cc.subSequence(start.toInt, end.toInt).toString
|
||||
case _ => ""
|
||||
}
|
||||
case _ => ""
|
||||
}
|
||||
|
||||
getDiagnosticLine.getOrElse(getExpression)
|
||||
}
|
||||
private val sourceUri = fixSource(d.getSource)
|
||||
override val sourcePath = Logger.o2m(sourceUri)
|
||||
override val sourceFile = Logger.o2m(sourceUri.map(new File(_)))
|
||||
override val pointer = Logger.o2m(Option.empty[Integer])
|
||||
override val pointerSpace = Logger.o2m(Option.empty[String])
|
||||
override def toString =
|
||||
if (sourceUri.isDefined) s"${sourceUri.get}:${if (line.isDefined) line.get else -1}"
|
||||
else ""
|
||||
}
|
||||
reporter.log(pos, msg, severity)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,73 +0,0 @@
|
|||
package sbt.compiler.javac
|
||||
|
||||
import java.io.File
|
||||
|
||||
import sbt.IO._
|
||||
import sbt.{ IO, Process, Logger }
|
||||
import xsbti.Reporter
|
||||
import xsbti.compile.{ ClasspathOptions, ScalaInstance }
|
||||
|
||||
/** Helper methods for running the java toolchain by forking. */
|
||||
object ForkedJava {
|
||||
/** Helper method to launch programs. */
|
||||
private[javac] def launch(javaHome: Option[File], program: String, sources: Seq[File], options: Seq[String], log: Logger, reporter: Reporter): Boolean = {
|
||||
val (jArgs, nonJArgs) = options.partition(_.startsWith("-J"))
|
||||
val allArguments = nonJArgs ++ sources.map(_.getAbsolutePath)
|
||||
|
||||
withArgumentFile(allArguments) { argsFile =>
|
||||
val forkArgs = jArgs :+ s"@${normalizeSlash(argsFile.getAbsolutePath)}"
|
||||
val exe = getJavaExecutable(javaHome, program)
|
||||
val cwd = new File(new File(".").getAbsolutePath).getCanonicalFile
|
||||
val javacLogger = new JavacLogger(log, reporter, cwd)
|
||||
var exitCode = -1
|
||||
try {
|
||||
exitCode = Process(exe +: forkArgs, cwd) ! javacLogger
|
||||
} finally {
|
||||
javacLogger.flush(exitCode)
|
||||
}
|
||||
// We return true or false, depending on success.
|
||||
exitCode == 0
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to create an argument file that we pass to Javac. Gets over the windows
|
||||
* command line length limitation.
|
||||
* @param args The string arguments to pass to Javac.
|
||||
* @param f A function which is passed the arg file.
|
||||
* @tparam T The return type.
|
||||
* @return The result of using the argument file.
|
||||
*/
|
||||
def withArgumentFile[T](args: Seq[String])(f: File => T): T =
|
||||
{
|
||||
import IO.{ Newline, withTemporaryDirectory, write }
|
||||
withTemporaryDirectory { tmp =>
|
||||
val argFile = new File(tmp, "argfile")
|
||||
write(argFile, args.map(escapeSpaces).mkString(Newline))
|
||||
f(argFile)
|
||||
}
|
||||
}
|
||||
// javac's argument file seems to allow naive space escaping with quotes. escaping a quote with a backslash does not work
|
||||
private def escapeSpaces(s: String): String = '\"' + normalizeSlash(s) + '\"'
|
||||
private def normalizeSlash(s: String) = s.replace(File.separatorChar, '/')
|
||||
|
||||
import sbt.Path._
|
||||
/** create the executable name for java */
|
||||
private[javac] def getJavaExecutable(javaHome: Option[File], name: String): String =
|
||||
javaHome match {
|
||||
case None => name
|
||||
case Some(jh) =>
|
||||
// TODO - Was there any hackery for windows before?
|
||||
(jh / "bin" / name).getAbsolutePath
|
||||
}
|
||||
}
|
||||
|
||||
/** An implementation of compiling java which forks a Javac instance. */
|
||||
final class ForkedJavaCompiler(javaHome: Option[File]) extends JavaCompiler {
|
||||
def run(sources: Seq[File], options: Seq[String])(implicit log: Logger, reporter: Reporter): Boolean =
|
||||
ForkedJava.launch(javaHome, "javac", sources, options, log, reporter)
|
||||
}
|
||||
final class ForkedJavadoc(javaHome: Option[File]) extends Javadoc {
|
||||
def run(sources: Seq[File], options: Seq[String])(implicit log: Logger, reporter: Reporter): Boolean =
|
||||
ForkedJava.launch(javaHome, "javadoc", sources, options, log, reporter)
|
||||
}
|
||||
|
|
@ -1,151 +0,0 @@
|
|||
package sbt.compiler.javac
|
||||
|
||||
import sbt.ClasspathOptions
|
||||
import sbt.{ ClasspathOptions => _, _ }
|
||||
import sbt.compiler._
|
||||
import java.io.{ PrintWriter, File }
|
||||
|
||||
import javax.tools.{ DiagnosticListener, Diagnostic, JavaFileObject, DiagnosticCollector }
|
||||
import xsbti.compile.ScalaInstance
|
||||
import xsbti.compile._
|
||||
import xsbti.{ Severity, Reporter }
|
||||
|
||||
/**
|
||||
* An interface to the toolchain of Java.
|
||||
*
|
||||
* Specifically, access to run javadoc + javac.
|
||||
*/
|
||||
sealed trait JavaTools {
|
||||
/** The raw interface of the java compiler for direct access. */
|
||||
def compiler: JavaTool
|
||||
/**
|
||||
* This will run a java compiler.
|
||||
*
|
||||
*
|
||||
* @param sources The list of java source files to compile.
|
||||
* @param options The set of options to pass to the java compiler (includes the classpath).
|
||||
* @param log The logger to dump output into.
|
||||
* @param reporter The reporter for semantic error messages.
|
||||
* @return true if no errors, false otherwise.
|
||||
*/
|
||||
def compile(sources: Seq[File], options: Seq[String])(implicit log: Logger, reporter: Reporter): Boolean
|
||||
/**
|
||||
* This will run a java compiler.
|
||||
*
|
||||
*
|
||||
* @param sources The list of java source files to compile.
|
||||
* @param options The set of options to pass to the java compiler (includes the classpath).
|
||||
* @param log The logger to dump output into.
|
||||
* @param reporter The reporter for semantic error messages.
|
||||
* @return true if no errors, false otherwise.
|
||||
*/
|
||||
def doc(sources: Seq[File], options: Seq[String])(implicit log: Logger, reporter: Reporter): Boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* An extension of the JavaTools trait that also includes interfaces specific to running
|
||||
* the java compiler inside of the incremental comppiler.
|
||||
*/
|
||||
sealed trait IncrementalCompilerJavaTools extends JavaTools {
|
||||
/** An instance of the java Compiler for use with incremental compilation. */
|
||||
def xsbtiCompiler: xsbti.compile.JavaCompiler
|
||||
}
|
||||
/** Factory methods for getting a java toolchain. */
|
||||
object JavaTools {
|
||||
/** Create a new aggregate tool from existing tools. */
|
||||
def apply(c: JavaCompiler, docgen: Javadoc): JavaTools =
|
||||
new JavaTools {
|
||||
override def compiler = c
|
||||
def compile(sources: Seq[File], options: Seq[String])(implicit log: Logger, reporter: Reporter): Boolean =
|
||||
c.run(sources, options)
|
||||
def doc(sources: Seq[File], options: Seq[String])(implicit log: Logger, reporter: Reporter): Boolean =
|
||||
docgen.run(sources, options)
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new set of java toolchain for incremental compilation.
|
||||
*
|
||||
* @param instance
|
||||
* The scalaInstance being used in this incremental compile. Used if we need to append
|
||||
* scala to the classpath (yeah.... the classpath doesn't already have it).
|
||||
* @param cpOptions
|
||||
* Classpath options configured for this incremental compiler. Basically, should we append scala or not.
|
||||
* @param javaHome
|
||||
* If this is defined, the location where we should look for javac when we run.
|
||||
* @return
|
||||
* A new set of the Java toolchain that also includes and instance of xsbti.compile.JavaCompiler
|
||||
*/
|
||||
def directOrFork(instance: xsbti.compile.ScalaInstance, cpOptions: xsbti.compile.ClasspathOptions, javaHome: Option[File]): IncrementalCompilerJavaTools = {
|
||||
val (compiler, doc) = javaHome match {
|
||||
case Some(_) => (JavaCompiler.fork(javaHome), Javadoc.fork(javaHome))
|
||||
case _ =>
|
||||
val c = JavaCompiler.local.getOrElse(JavaCompiler.fork(None))
|
||||
val d = Javadoc.local.getOrElse(Javadoc.fork())
|
||||
(c, d)
|
||||
}
|
||||
val delegate = apply(compiler, doc)
|
||||
new IncrementalCompilerJavaTools {
|
||||
val xsbtiCompiler = new JavaCompilerAdapter(delegate.compiler, instance, cpOptions)
|
||||
def compiler = delegate.compiler
|
||||
def compile(sources: Seq[File], options: Seq[String])(implicit log: Logger, reporter: Reporter): Boolean =
|
||||
delegate.compile(sources, options)
|
||||
def doc(sources: Seq[File], options: Seq[String])(implicit log: Logger, reporter: Reporter): Boolean =
|
||||
delegate.doc(sources, options)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* An interface for on of the tools in the java tool chain.
|
||||
*
|
||||
* We assume the following is true of tools:
|
||||
* - The all take sources and options and log error messages
|
||||
* - They return success or failure.
|
||||
*/
|
||||
sealed trait JavaTool {
|
||||
/**
|
||||
* This will run a java compiler / or other like tool (e.g. javadoc).
|
||||
*
|
||||
*
|
||||
* @param sources The list of java source files to compile.
|
||||
* @param options The set of options to pass to the java compiler (includes the classpath).
|
||||
* @param log The logger to dump output into.
|
||||
* @param reporter The reporter for semantic error messages.
|
||||
* @return true if no errors, false otherwise.
|
||||
*/
|
||||
def run(sources: Seq[File], options: Seq[String])(implicit log: Logger, reporter: Reporter): Boolean
|
||||
}
|
||||
|
||||
/** Interface we use to compile java code. This is mostly a tag over the raw JavaTool interface. */
|
||||
trait JavaCompiler extends JavaTool {}
|
||||
/** Factory methods for constructing a java compiler. */
|
||||
object JavaCompiler {
|
||||
/** Returns a local compiler, if the current runtime supports it. */
|
||||
def local: Option[JavaCompiler] =
|
||||
for {
|
||||
compiler <- Option(javax.tools.ToolProvider.getSystemJavaCompiler)
|
||||
} yield new LocalJavaCompiler(compiler)
|
||||
|
||||
/** Returns a local compiler that will fork javac when needed. */
|
||||
def fork(javaHome: Option[File] = None): JavaCompiler =
|
||||
new ForkedJavaCompiler(javaHome)
|
||||
|
||||
}
|
||||
|
||||
/** Interface we use to document java code. This is a tag over the raw JavaTool interface. */
|
||||
trait Javadoc extends JavaTool {}
|
||||
/** Factory methods for constructing a javadoc. */
|
||||
object Javadoc {
|
||||
/** Returns a local compiler, if the current runtime supports it. */
|
||||
def local: Option[Javadoc] =
|
||||
// TODO - javax doc tool not supported in JDK6
|
||||
//Option(javax.tools.ToolProvider.getSystemDocumentationTool)
|
||||
if (LocalJava.hasLocalJavadoc) Some(new LocalJavadoc)
|
||||
else None
|
||||
|
||||
/** Returns a local compiler that will fork javac when needed. */
|
||||
def fork(javaHome: Option[File] = None): Javadoc =
|
||||
new ForkedJavadoc(javaHome)
|
||||
|
||||
}
|
||||
|
||||
|
|
@ -1,44 +0,0 @@
|
|||
package sbt.compiler.javac
|
||||
|
||||
import java.io.File
|
||||
|
||||
import sbt.compiler.{ CompileFailed, CompilerArguments }
|
||||
import sbt.{ ClasspathOptions, Logger, LoggerReporter }
|
||||
import xsbti.Reporter
|
||||
import xsbti.compile.{ MultipleOutput, SingleOutput, Output }
|
||||
|
||||
/**
|
||||
* This class adapts the new java compiler with the classpath/argument option hackery needed to handle scala.
|
||||
*
|
||||
* The xsbti.Compiler interface is used by the IncrementalCompiler classes, so this lets us adapt a more generic
|
||||
* wrapper around running Javac (forked or direct) into the interfaces used by incremental compiler.
|
||||
*
|
||||
*/
|
||||
class JavaCompilerAdapter(delegate: JavaTool, scalaInstance: xsbti.compile.ScalaInstance, cpOptions: xsbti.compile.ClasspathOptions) extends xsbti.compile.JavaCompiler {
|
||||
override final def compile(sources: Array[File], classpath: Array[File], output: Output, options: Array[String], log: xsbti.Logger): Unit = {
|
||||
// TODO - 5 max errors ok? We're not expecting this code path to be called, ever. This is only for clients who try to use the xsbti.compile.JavaCompiler interface
|
||||
// outside of the incremental compiler, for some reason.
|
||||
val reporter = new LoggerReporter(5, log)
|
||||
compileWithReporter(sources, classpath, output, options, reporter, log)
|
||||
}
|
||||
override final def compileWithReporter(sources: Array[File], classpath: Array[File], output: Output, options: Array[String], reporter: Reporter, log: xsbti.Logger): Unit = {
|
||||
val target = output match {
|
||||
case so: SingleOutput => so.outputDirectory
|
||||
case mo: MultipleOutput => throw new RuntimeException("Javac doesn't support multiple output directories")
|
||||
}
|
||||
val args = commandArguments(Seq(), classpath, target, options, log)
|
||||
// We sort the sources for deterministic results.
|
||||
val success = delegate.run(sources.sortBy(_.getAbsolutePath), args)(log, reporter)
|
||||
if (!success) {
|
||||
// TODO - Will the reporter have problems from Scalac? It appears like it does not, only from the most recent run.
|
||||
// This is because the incremental compiler will not run javac if scalac fails.
|
||||
throw new CompileFailed(args.toArray, "javac returned nonzero exit code", reporter.problems())
|
||||
}
|
||||
}
|
||||
private[this] def commandArguments(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger): Seq[String] =
|
||||
{
|
||||
val augmentedClasspath = if (cpOptions.autoBoot) classpath ++ Seq(scalaInstance.libraryJar) else classpath
|
||||
val javaCp = ClasspathOptions.javac(cpOptions.compiler)
|
||||
(new CompilerArguments(scalaInstance, javaCp))(sources, augmentedClasspath, Some(outputDirectory), options)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,199 +0,0 @@
|
|||
package sbt.compiler.javac
|
||||
|
||||
import java.io.File
|
||||
|
||||
import sbt.Logger.o2m
|
||||
import xsbti.{ Problem, Severity, Maybe, Position }
|
||||
|
||||
/** A wrapper around xsbti.Position so we can pass in Java input. */
|
||||
final case class JavaPosition(_sourceFilePath: String, _line: Int, _contents: String) extends Position {
|
||||
def line: Maybe[Integer] = o2m(Option(Integer.valueOf(_line)))
|
||||
def lineContent: String = _contents
|
||||
def offset: Maybe[Integer] = o2m(None)
|
||||
def pointer: Maybe[Integer] = o2m(None)
|
||||
def pointerSpace: Maybe[String] = o2m(None)
|
||||
def sourcePath: Maybe[String] = o2m(Option(_sourceFilePath))
|
||||
def sourceFile: Maybe[File] = o2m(Option(new File(_sourceFilePath)))
|
||||
override def toString = s"${_sourceFilePath}:${_line}"
|
||||
}
|
||||
|
||||
/** A position which has no information, because there is none. */
|
||||
object JavaNoPosition extends Position {
|
||||
def line: Maybe[Integer] = o2m(None)
|
||||
def lineContent: String = ""
|
||||
def offset: Maybe[Integer] = o2m(None)
|
||||
def pointer: Maybe[Integer] = o2m(None)
|
||||
def pointerSpace: Maybe[String] = o2m(None)
|
||||
def sourcePath: Maybe[String] = o2m(None)
|
||||
def sourceFile: Maybe[File] = o2m(None)
|
||||
override def toString = "NoPosition"
|
||||
}
|
||||
|
||||
/** A wrapper around xsbti.Problem with java-specific options. */
|
||||
final case class JavaProblem(position: Position, severity: Severity, message: String) extends xsbti.Problem {
|
||||
override def category: String = "javac" // TODO - what is this even supposed to be? For now it appears unused.
|
||||
override def toString = s"$severity @ $position - $message"
|
||||
}
|
||||
|
||||
/** A parser that is able to parse java's error output successfully. */
|
||||
class JavaErrorParser(relativeDir: File = new File(new File(".").getAbsolutePath).getCanonicalFile) extends util.parsing.combinator.RegexParsers {
|
||||
// Here we track special handlers to catch "Note:" and "Warning:" lines.
|
||||
private val NOTE_LINE_PREFIXES = Array("Note: ", "\u6ce8: ", "\u6ce8\u610f\uff1a ")
|
||||
private val WARNING_PREFIXES = Array("warning", "\u8b66\u544a", "\u8b66\u544a\uff1a")
|
||||
private val END_OF_LINE = System.getProperty("line.separator")
|
||||
|
||||
override val skipWhitespace = false
|
||||
|
||||
val CHARAT: Parser[String] = literal("^")
|
||||
val SEMICOLON: Parser[String] = literal(":") | literal("\uff1a")
|
||||
val SYMBOL: Parser[String] = allUntilChar(':') // We ignore whether it actually says "symbol" for i18n
|
||||
val LOCATION: Parser[String] = allUntilChar(':') // We ignore whether it actually says "location" for i18n.
|
||||
val WARNING: Parser[String] = allUntilChar(':') ^? {
|
||||
case x if WARNING_PREFIXES.exists(x.trim.startsWith) => x
|
||||
}
|
||||
// Parses the rest of an input line.
|
||||
val restOfLine: Parser[String] =
|
||||
// TODO - Can we use END_OF_LINE here without issues?
|
||||
allUntilChars(Array('\n', '\r')) ~ "[\r]?[\n]?".r ^^ {
|
||||
case msg ~ _ => msg
|
||||
}
|
||||
val NOTE: Parser[String] = restOfLine ^? {
|
||||
case x if NOTE_LINE_PREFIXES exists x.startsWith => x
|
||||
}
|
||||
|
||||
// Parses ALL characters until an expected character is met.
|
||||
def allUntilChar(c: Char): Parser[String] = allUntilChars(Array(c))
|
||||
def allUntilChars(chars: Array[Char]): Parser[String] = new Parser[String] {
|
||||
def isStopChar(c: Char): Boolean = {
|
||||
var i = 0
|
||||
while (i < chars.length) {
|
||||
if (c == chars(i)) return true
|
||||
i += 1
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
def apply(in: Input) = {
|
||||
val source = in.source
|
||||
val offset = in.offset
|
||||
val start = handleWhiteSpace(source, offset)
|
||||
var i = start
|
||||
while (i < source.length && !isStopChar(source.charAt(i))) {
|
||||
i += 1
|
||||
}
|
||||
Success(source.subSequence(start, i).toString, in.drop(i - offset))
|
||||
}
|
||||
}
|
||||
|
||||
// Helper to extract an integer from a string
|
||||
private object ParsedInteger {
|
||||
def unapply(s: String): Option[Int] = try Some(Integer.parseInt(s)) catch { case e: NumberFormatException => None }
|
||||
}
|
||||
// Parses a line number
|
||||
val line: Parser[Int] = allUntilChar(':') ^? {
|
||||
case ParsedInteger(x) => x
|
||||
}
|
||||
|
||||
// Parses the file + lineno output of javac.
|
||||
val fileAndLineNo: Parser[(String, Int)] = {
|
||||
val linuxFile = allUntilChar(':') ^^ { _.trim() }
|
||||
val windowsRootFile = linuxFile ~ SEMICOLON ~ linuxFile ^^ { case root ~ _ ~ path => s"$root:$path" }
|
||||
val linuxOption = linuxFile ~ SEMICOLON ~ line ^^ { case f ~ _ ~ l => (f, l) }
|
||||
val windowsOption = windowsRootFile ~ SEMICOLON ~ line ^^ { case f ~ _ ~ l => (f, l) }
|
||||
(linuxOption | windowsOption)
|
||||
}
|
||||
|
||||
val allUntilCharat: Parser[String] = allUntilChar('^')
|
||||
|
||||
// Helper method to try to handle relative vs. absolute file pathing....
|
||||
// NOTE - this is probably wrong...
|
||||
private def findFileSource(f: String): String = {
|
||||
// If a file looks like an absolute path, leave it as is.
|
||||
def isAbsolute(f: String) =
|
||||
(f startsWith "/") || (f matches """[^\\]+:\\.*""")
|
||||
// TODO - we used to use existence checks, that may be the right way to go
|
||||
if (isAbsolute(f)) f
|
||||
else (new File(relativeDir, f)).getAbsolutePath
|
||||
}
|
||||
|
||||
/** Parses an error message (not this WILL parse warning messages as error messages if used incorrectly. */
|
||||
val errorMessage: Parser[Problem] = {
|
||||
val fileLineMessage = fileAndLineNo ~ SEMICOLON ~ restOfLine ^^ {
|
||||
case (file, line) ~ _ ~ msg => (file, line, msg)
|
||||
}
|
||||
fileLineMessage ~ allUntilCharat ~ restOfLine ^^ {
|
||||
case (file, line, msg) ~ contents ~ _ =>
|
||||
new JavaProblem(
|
||||
new JavaPosition(
|
||||
findFileSource(file),
|
||||
line,
|
||||
contents + '^' // TODO - Actually parse charat position out of here.
|
||||
),
|
||||
Severity.Error,
|
||||
msg
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/** Parses javac warning messages. */
|
||||
val warningMessage: Parser[Problem] = {
|
||||
val fileLineMessage = fileAndLineNo ~ SEMICOLON ~ WARNING ~ SEMICOLON ~ restOfLine ^^ {
|
||||
case (file, line) ~ _ ~ _ ~ _ ~ msg => (file, line, msg)
|
||||
}
|
||||
fileLineMessage ~ allUntilCharat ~ restOfLine ^^ {
|
||||
case (file, line, msg) ~ contents ~ _ =>
|
||||
new JavaProblem(
|
||||
new JavaPosition(
|
||||
findFileSource(file),
|
||||
line,
|
||||
contents + "^"
|
||||
),
|
||||
Severity.Warn,
|
||||
msg
|
||||
)
|
||||
}
|
||||
}
|
||||
val noteMessage: Parser[Problem] =
|
||||
NOTE ^^ { msg =>
|
||||
new JavaProblem(
|
||||
JavaNoPosition,
|
||||
Severity.Info,
|
||||
msg
|
||||
)
|
||||
}
|
||||
|
||||
val potentialProblem: Parser[Problem] = warningMessage | errorMessage | noteMessage
|
||||
|
||||
val javacOutput: Parser[Seq[Problem]] = rep(potentialProblem)
|
||||
/**
|
||||
* Example:
|
||||
*
|
||||
* Test.java:4: cannot find symbol
|
||||
* symbol : method baz()
|
||||
* location: class Foo
|
||||
* return baz();
|
||||
* ^
|
||||
*
|
||||
* Test.java:8: warning: [deprecation] RMISecurityException(java.lang.String) in java.rmi.RMISecurityException has been deprecated
|
||||
* throw new java.rmi.RMISecurityException("O NOES");
|
||||
* ^
|
||||
*/
|
||||
|
||||
final def parseProblems(in: String, logger: sbt.Logger): Seq[Problem] =
|
||||
parse(javacOutput, in) match {
|
||||
case Success(result, _) => result
|
||||
case Failure(msg, n) =>
|
||||
logger.warn("Unexpected javac output at:${n.pos.longString}. Please report to sbt-dev@googlegroups.com.")
|
||||
Seq.empty
|
||||
case Error(msg, n) =>
|
||||
logger.warn("Unexpected javac output at:${n.pos.longString}. Please report to sbt-dev@googlegroups.com.")
|
||||
Seq.empty
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
object JavaErrorParser {
|
||||
def main(args: Array[String]): Unit = {
|
||||
|
||||
}
|
||||
}
|
||||
|
|
@ -1,62 +0,0 @@
|
|||
package sbt
|
||||
package compiler
|
||||
package javac
|
||||
|
||||
import java.util.StringTokenizer
|
||||
|
||||
import xsbti._
|
||||
import java.io.File
|
||||
|
||||
/**
|
||||
* An adapted process logger which can feed semantic error events from Javac as well as just
|
||||
* dump logs.
|
||||
*
|
||||
*
|
||||
* @param log The logger where all input will go.
|
||||
* @param reporter A reporter for semantic Javac error messages.
|
||||
* @param cwd The current working directory of the Javac process, used when parsing Filenames.
|
||||
*/
|
||||
final class JavacLogger(log: sbt.Logger, reporter: Reporter, cwd: File) extends ProcessLogger {
|
||||
import scala.collection.mutable.ListBuffer
|
||||
import Level.{ Info, Warn, Error, Value => LogLevel }
|
||||
|
||||
private val msgs: ListBuffer[(LogLevel, String)] = new ListBuffer()
|
||||
|
||||
def info(s: => String): Unit =
|
||||
synchronized { msgs += ((Info, s)) }
|
||||
|
||||
def error(s: => String): Unit =
|
||||
synchronized { msgs += ((Error, s)) }
|
||||
|
||||
def buffer[T](f: => T): T = f
|
||||
|
||||
private def print(desiredLevel: LogLevel)(t: (LogLevel, String)) = t match {
|
||||
case (Info, msg) => log.info(msg)
|
||||
case (Error, msg) => log.log(desiredLevel, msg)
|
||||
}
|
||||
|
||||
// Helper method to dump all semantic errors.
|
||||
private def parseAndDumpSemanticErrors(): Unit = {
|
||||
val input =
|
||||
msgs collect {
|
||||
case (Error, msg) => msg
|
||||
} mkString "\n"
|
||||
val parser = new JavaErrorParser(cwd)
|
||||
parser.parseProblems(input, log) foreach { e =>
|
||||
reporter.log(e.position, e.message, e.severity)
|
||||
}
|
||||
}
|
||||
|
||||
def flush(exitCode: Int): Unit = {
|
||||
parseAndDumpSemanticErrors()
|
||||
val level = if (exitCode == 0) Warn else Error
|
||||
// Here we only display things that wouldn't otherwise be output by the error reporter.
|
||||
// TODO - NOTES may not be displayed correctly!
|
||||
msgs collect {
|
||||
case (Info, msg) => msg
|
||||
} foreach { msg =>
|
||||
log.info(msg)
|
||||
}
|
||||
msgs.clear()
|
||||
}
|
||||
}
|
||||
|
|
@ -1,71 +0,0 @@
|
|||
package sbt.compiler.javac
|
||||
|
||||
import java.io.{ File, PrintWriter }
|
||||
|
||||
import sbt.{ LoggerWriter, Level, Logger }
|
||||
import xsbti.Reporter
|
||||
import xsbti.compile.{ ScalaInstance, ClasspathOptions }
|
||||
|
||||
/**
|
||||
* Helper methods for trying to run the java toolchain out of our own classloaders.
|
||||
*/
|
||||
object LocalJava {
|
||||
private[this] val javadocClass = "com.sun.tools.javadoc.Main"
|
||||
|
||||
private[this] def javadocMethod =
|
||||
try {
|
||||
Option(Class.forName(javadocClass).getDeclaredMethod("execute", classOf[String], classOf[PrintWriter], classOf[PrintWriter], classOf[PrintWriter], classOf[String], classOf[Array[String]]))
|
||||
} catch {
|
||||
case e @ (_: ClassNotFoundException | _: NoSuchMethodException) => None
|
||||
}
|
||||
|
||||
/** True if we can call a forked Javadoc. */
|
||||
def hasLocalJavadoc: Boolean = javadocMethod.isDefined
|
||||
|
||||
/** A mechanism to call the javadoc tool via reflection. */
|
||||
private[javac] def unsafeJavadoc(args: Array[String], err: PrintWriter, warn: PrintWriter, notice: PrintWriter): Int = {
|
||||
javadocMethod match {
|
||||
case Some(m) =>
|
||||
System.err.println("Running javadoc tool!")
|
||||
m.invoke(null, "javadoc", err, warn, notice, "com.sun.tools.doclets.standard.Standard", args).asInstanceOf[java.lang.Integer].intValue
|
||||
case _ =>
|
||||
System.err.println("Unable to reflectively invoke javadoc, cannot find it on the current classloader!")
|
||||
-1
|
||||
}
|
||||
}
|
||||
}
|
||||
/** Implementation of javadoc tool which attempts to run it locally (in-class). */
|
||||
final class LocalJavadoc() extends Javadoc {
|
||||
override def run(sources: Seq[File], options: Seq[String])(implicit log: Logger, reporter: Reporter): Boolean = {
|
||||
val cwd = new File(new File(".").getAbsolutePath).getCanonicalFile
|
||||
val (jArgs, nonJArgs) = options.partition(_.startsWith("-J"))
|
||||
val allArguments = nonJArgs ++ sources.map(_.getAbsolutePath)
|
||||
val javacLogger = new JavacLogger(log, reporter, cwd)
|
||||
val warnOrError = new PrintWriter(new ProcessLoggerWriter(javacLogger, Level.Error))
|
||||
val infoWriter = new PrintWriter(new ProcessLoggerWriter(javacLogger, Level.Info))
|
||||
var exitCode = -1
|
||||
try {
|
||||
exitCode = LocalJava.unsafeJavadoc(allArguments.toArray, warnOrError, warnOrError, infoWriter)
|
||||
} finally {
|
||||
warnOrError.close()
|
||||
infoWriter.close()
|
||||
javacLogger.flush(exitCode)
|
||||
}
|
||||
// We return true or false, depending on success.
|
||||
exitCode == 0
|
||||
}
|
||||
}
|
||||
|
||||
/** An implementation of compiling java which delegates to the JVM resident java compiler. */
|
||||
final class LocalJavaCompiler(compiler: javax.tools.JavaCompiler) extends JavaCompiler {
|
||||
override def run(sources: Seq[File], options: Seq[String])(implicit log: Logger, reporter: Reporter): Boolean = {
|
||||
import collection.JavaConverters._
|
||||
val logger = new LoggerWriter(log)
|
||||
val logWriter = new PrintWriter(logger)
|
||||
log.debug("Attempting to call " + compiler + " directly...")
|
||||
val diagnostics = new DiagnosticsReporter(reporter)
|
||||
val fileManager = compiler.getStandardFileManager(diagnostics, null, null)
|
||||
val jfiles = fileManager.getJavaFileObjectsFromFiles(sources.asJava)
|
||||
compiler.getTask(logWriter, fileManager, diagnostics, options.asJava, null, jfiles).call()
|
||||
}
|
||||
}
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
package sbt.compiler.javac
|
||||
|
||||
import sbt.{ Level, ProcessLogger }
|
||||
|
||||
/** Delegates a stream into a process logger. Mimics LoggerWriter, but for the ProcessLogger interface which differs. */
|
||||
private class ProcessLoggerWriter(delegate: ProcessLogger, level: Level.Value, nl: String = System.getProperty("line.separator")) extends java.io.Writer {
|
||||
private[this] val buffer = new StringBuilder
|
||||
override def close() = flush()
|
||||
override def flush(): Unit =
|
||||
synchronized {
|
||||
if (buffer.nonEmpty) {
|
||||
log(buffer.toString)
|
||||
buffer.clear()
|
||||
}
|
||||
}
|
||||
override def write(content: Array[Char], offset: Int, length: Int): Unit =
|
||||
synchronized {
|
||||
buffer.appendAll(content, offset, length)
|
||||
process()
|
||||
}
|
||||
|
||||
private[this] def process(): Unit = {
|
||||
val i = buffer.indexOf(nl)
|
||||
if (i >= 0) {
|
||||
log(buffer.substring(0, i))
|
||||
buffer.delete(0, i + nl.length)
|
||||
process()
|
||||
}
|
||||
}
|
||||
private[this] def log(s: String): Unit = level match {
|
||||
case Level.Warn | Level.Error => delegate.error(s)
|
||||
case Level.Info => delegate.info(s)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
|
||||
|
||||
public class good {
|
||||
public static String test() {
|
||||
return "Hello";
|
||||
}
|
||||
}
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
public class hasstaticfinal {
|
||||
// the `TYPE` and `VALUE` strings are replaced with various values during tests
|
||||
public static final TYPE HELLO = VALUE;
|
||||
}
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
import java.rmi.RMISecurityException;
|
||||
|
||||
public class Test {
|
||||
public NotFound foo() { return 5; }
|
||||
|
||||
public String warning() {
|
||||
throw new RMISecurityException("O NOES");
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue