mirror of https://github.com/sbt/sbt.git
commit
68e7c9a2b6
|
|
@ -3,271 +3,246 @@
|
|||
*/
|
||||
package sbt
|
||||
|
||||
import sbinary.{CollectionTypes, DefaultProtocol, Format, Input, JavaFormats, Output => Out}
|
||||
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, File, InputStream, OutputStream}
|
||||
import java.net.{URI, URL}
|
||||
import sbinary.{ CollectionTypes, DefaultProtocol, Format, Input, JavaFormats, Output => Out }
|
||||
import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, File, InputStream, OutputStream }
|
||||
import java.net.{ URI, URL }
|
||||
import Types.:+:
|
||||
import DefaultProtocol.{asProduct2, asSingleton, BooleanFormat, ByteFormat, IntFormat, wrap}
|
||||
import DefaultProtocol.{ asProduct2, asSingleton, BooleanFormat, ByteFormat, IntFormat, wrap }
|
||||
import scala.xml.NodeSeq
|
||||
|
||||
trait Cache[I,O]
|
||||
{
|
||||
def apply(file: File)(i: I): Either[O, O => Unit]
|
||||
trait Cache[I, O] {
|
||||
def apply(file: File)(i: I): Either[O, O => Unit]
|
||||
}
|
||||
trait SBinaryFormats extends CollectionTypes with JavaFormats
|
||||
{
|
||||
implicit def urlFormat: Format[URL] = DefaultProtocol.UrlFormat
|
||||
implicit def uriFormat: Format[URI] = DefaultProtocol.UriFormat
|
||||
trait SBinaryFormats extends CollectionTypes with JavaFormats {
|
||||
implicit def urlFormat: Format[URL] = DefaultProtocol.UrlFormat
|
||||
implicit def uriFormat: Format[URI] = DefaultProtocol.UriFormat
|
||||
}
|
||||
object Cache extends CacheImplicits
|
||||
{
|
||||
def cache[I,O](implicit c: Cache[I,O]): Cache[I,O] = c
|
||||
object Cache extends CacheImplicits {
|
||||
def cache[I, O](implicit c: Cache[I, O]): Cache[I, O] = c
|
||||
|
||||
def cached[I,O](file: File)(f: I => O)(implicit cache: Cache[I,O]): I => O =
|
||||
in =>
|
||||
cache(file)(in) match
|
||||
{
|
||||
case Left(value) => value
|
||||
case Right(store) =>
|
||||
val out = f(in)
|
||||
store(out)
|
||||
out
|
||||
}
|
||||
def cached[I, O](file: File)(f: I => O)(implicit cache: Cache[I, O]): I => O =
|
||||
in =>
|
||||
cache(file)(in) match {
|
||||
case Left(value) => value
|
||||
case Right(store) =>
|
||||
val out = f(in)
|
||||
store(out)
|
||||
out
|
||||
}
|
||||
|
||||
def debug[I](label: String, c: InputCache[I]): InputCache[I] =
|
||||
new InputCache[I]
|
||||
{
|
||||
type Internal = c.Internal
|
||||
def convert(i: I) = c.convert(i)
|
||||
def read(from: Input) =
|
||||
{
|
||||
val v = c.read(from)
|
||||
println(label + ".read: " + v)
|
||||
v
|
||||
}
|
||||
def write(to: Out, v: Internal)
|
||||
{
|
||||
println(label + ".write: " + v)
|
||||
c.write(to, v)
|
||||
}
|
||||
def equiv: Equiv[Internal] = new Equiv[Internal] {
|
||||
def equiv(a: Internal, b: Internal)=
|
||||
{
|
||||
val equ = c.equiv.equiv(a,b)
|
||||
println(label + ".equiv(" + a + ", " + b +"): " + equ)
|
||||
equ
|
||||
}
|
||||
}
|
||||
}
|
||||
def debug[I](label: String, c: InputCache[I]): InputCache[I] =
|
||||
new InputCache[I] {
|
||||
type Internal = c.Internal
|
||||
def convert(i: I) = c.convert(i)
|
||||
def read(from: Input) =
|
||||
{
|
||||
val v = c.read(from)
|
||||
println(label + ".read: " + v)
|
||||
v
|
||||
}
|
||||
def write(to: Out, v: Internal) {
|
||||
println(label + ".write: " + v)
|
||||
c.write(to, v)
|
||||
}
|
||||
def equiv: Equiv[Internal] = new Equiv[Internal] {
|
||||
def equiv(a: Internal, b: Internal) =
|
||||
{
|
||||
val equ = c.equiv.equiv(a, b)
|
||||
println(label + ".equiv(" + a + ", " + b + "): " + equ)
|
||||
equ
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
trait CacheImplicits extends BasicCacheImplicits with SBinaryFormats with HListCacheImplicits with UnionImplicits
|
||||
trait BasicCacheImplicits
|
||||
{
|
||||
implicit def basicCache[I, O](implicit in: InputCache[I], outFormat: Format[O]): Cache[I,O] =
|
||||
new BasicCache()(in, outFormat)
|
||||
def basicInput[I](implicit eq: Equiv[I], fmt: Format[I]): InputCache[I] = InputCache.basicInputCache(fmt, eq)
|
||||
trait BasicCacheImplicits {
|
||||
implicit def basicCache[I, O](implicit in: InputCache[I], outFormat: Format[O]): Cache[I, O] =
|
||||
new BasicCache()(in, outFormat)
|
||||
def basicInput[I](implicit eq: Equiv[I], fmt: Format[I]): InputCache[I] = InputCache.basicInputCache(fmt, eq)
|
||||
|
||||
def defaultEquiv[T]: Equiv[T] = new Equiv[T] { def equiv(a: T, b: T) = a == b }
|
||||
|
||||
implicit def optInputCache[T](implicit t: InputCache[T]): InputCache[Option[T]] =
|
||||
new InputCache[Option[T]]
|
||||
{
|
||||
type Internal = Option[t.Internal]
|
||||
def convert(v: Option[T]): Internal = v.map(x => t.convert(x))
|
||||
def read(from: Input) =
|
||||
{
|
||||
val isDefined = BooleanFormat.reads(from)
|
||||
if(isDefined) Some(t.read(from)) else None
|
||||
}
|
||||
def write(to: Out, j: Internal): Unit =
|
||||
{
|
||||
BooleanFormat.writes(to, j.isDefined)
|
||||
j foreach { x => t.write(to, x) }
|
||||
}
|
||||
def equiv = optEquiv(t.equiv)
|
||||
}
|
||||
|
||||
def wrapEquiv[S,T](f: S => T)(implicit eqT: Equiv[T]): Equiv[S] =
|
||||
new Equiv[S] {
|
||||
def equiv(a: S, b: S) =
|
||||
eqT.equiv( f(a), f(b) )
|
||||
}
|
||||
def defaultEquiv[T]: Equiv[T] = new Equiv[T] { def equiv(a: T, b: T) = a == b }
|
||||
|
||||
implicit def optEquiv[T](implicit t: Equiv[T]): Equiv[Option[T]] =
|
||||
new Equiv[Option[T]] {
|
||||
def equiv(a: Option[T], b: Option[T]) =
|
||||
(a,b) match
|
||||
{
|
||||
case (None, None) => true
|
||||
case (Some(va), Some(vb)) => t.equiv(va, vb)
|
||||
case _ => false
|
||||
}
|
||||
}
|
||||
implicit def urlEquiv(implicit uriEq: Equiv[URI]): Equiv[URL] = wrapEquiv[URL, URI](_.toURI)(uriEq)
|
||||
implicit def uriEquiv: Equiv[URI] = defaultEquiv
|
||||
implicit def stringSetEquiv: Equiv[Set[String]] = defaultEquiv
|
||||
implicit def stringMapEquiv: Equiv[Map[String, String]] = defaultEquiv
|
||||
implicit def optInputCache[T](implicit t: InputCache[T]): InputCache[Option[T]] =
|
||||
new InputCache[Option[T]] {
|
||||
type Internal = Option[t.Internal]
|
||||
def convert(v: Option[T]): Internal = v.map(x => t.convert(x))
|
||||
def read(from: Input) =
|
||||
{
|
||||
val isDefined = BooleanFormat.reads(from)
|
||||
if (isDefined) Some(t.read(from)) else None
|
||||
}
|
||||
def write(to: Out, j: Internal): Unit =
|
||||
{
|
||||
BooleanFormat.writes(to, j.isDefined)
|
||||
j foreach { x => t.write(to, x) }
|
||||
}
|
||||
def equiv = optEquiv(t.equiv)
|
||||
}
|
||||
|
||||
def streamFormat[T](write: (T, OutputStream) => Unit, f: InputStream => T): Format[T] =
|
||||
{
|
||||
val toBytes = (t: T) => { val bos = new ByteArrayOutputStream; write(t, bos); bos.toByteArray }
|
||||
val fromBytes = (bs: Array[Byte]) => f(new ByteArrayInputStream(bs))
|
||||
wrap(toBytes, fromBytes)(DefaultProtocol.ByteArrayFormat)
|
||||
}
|
||||
|
||||
implicit def xmlInputCache(implicit strEq: InputCache[String]): InputCache[NodeSeq] = wrapIn[NodeSeq, String](_.toString, strEq)
|
||||
def wrapEquiv[S, T](f: S => T)(implicit eqT: Equiv[T]): Equiv[S] =
|
||||
new Equiv[S] {
|
||||
def equiv(a: S, b: S) =
|
||||
eqT.equiv(f(a), f(b))
|
||||
}
|
||||
|
||||
implicit def seqCache[T](implicit t: InputCache[T]): InputCache[Seq[T]] =
|
||||
new InputCache[Seq[T]]
|
||||
{
|
||||
type Internal = Seq[t.Internal]
|
||||
def convert(v: Seq[T]) = v.map(x => t.convert(x))
|
||||
def read(from: Input) =
|
||||
{
|
||||
val size = IntFormat.reads(from)
|
||||
def next(left: Int, acc: List[t.Internal]): Internal =
|
||||
if(left <= 0) acc.reverse else next(left - 1, t.read(from) :: acc)
|
||||
next(size, Nil)
|
||||
}
|
||||
def write(to: Out, vs: Internal)
|
||||
{
|
||||
val size = vs.length
|
||||
IntFormat.writes(to, size)
|
||||
for(v <- vs) t.write(to, v)
|
||||
}
|
||||
def equiv: Equiv[Internal] = seqEquiv(t.equiv)
|
||||
}
|
||||
implicit def optEquiv[T](implicit t: Equiv[T]): Equiv[Option[T]] =
|
||||
new Equiv[Option[T]] {
|
||||
def equiv(a: Option[T], b: Option[T]) =
|
||||
(a, b) match {
|
||||
case (None, None) => true
|
||||
case (Some(va), Some(vb)) => t.equiv(va, vb)
|
||||
case _ => false
|
||||
}
|
||||
}
|
||||
implicit def urlEquiv(implicit uriEq: Equiv[URI]): Equiv[URL] = wrapEquiv[URL, URI](_.toURI)(uriEq)
|
||||
implicit def uriEquiv: Equiv[URI] = defaultEquiv
|
||||
implicit def stringSetEquiv: Equiv[Set[String]] = defaultEquiv
|
||||
implicit def stringMapEquiv: Equiv[Map[String, String]] = defaultEquiv
|
||||
|
||||
implicit def arrEquiv[T](implicit t: Equiv[T]): Equiv[Array[T]] =
|
||||
wrapEquiv( (x: Array[T]) => x :Seq[T] )(seqEquiv[T](t))
|
||||
def streamFormat[T](write: (T, OutputStream) => Unit, f: InputStream => T): Format[T] =
|
||||
{
|
||||
val toBytes = (t: T) => { val bos = new ByteArrayOutputStream; write(t, bos); bos.toByteArray }
|
||||
val fromBytes = (bs: Array[Byte]) => f(new ByteArrayInputStream(bs))
|
||||
wrap(toBytes, fromBytes)(DefaultProtocol.ByteArrayFormat)
|
||||
}
|
||||
|
||||
implicit def seqEquiv[T](implicit t: Equiv[T]): Equiv[Seq[T]] =
|
||||
new Equiv[Seq[T]]
|
||||
{
|
||||
def equiv(a: Seq[T], b: Seq[T]) =
|
||||
a.length == b.length &&
|
||||
((a,b).zipped forall t.equiv)
|
||||
}
|
||||
implicit def seqFormat[T](implicit t: Format[T]): Format[Seq[T]] =
|
||||
wrap[Seq[T], List[T]](_.toList, _.toSeq)(DefaultProtocol.listFormat)
|
||||
|
||||
def wrapIn[I,J](implicit f: I => J, jCache: InputCache[J]): InputCache[I] =
|
||||
new InputCache[I]
|
||||
{
|
||||
type Internal = jCache.Internal
|
||||
def convert(i: I) = jCache.convert(f(i))
|
||||
def read(from: Input) = jCache.read(from)
|
||||
def write(to: Out, j: Internal) = jCache.write(to, j)
|
||||
def equiv = jCache.equiv
|
||||
}
|
||||
implicit def xmlInputCache(implicit strEq: InputCache[String]): InputCache[NodeSeq] = wrapIn[NodeSeq, String](_.toString, strEq)
|
||||
|
||||
def singleton[T](t: T): InputCache[T] =
|
||||
basicInput(trueEquiv, asSingleton(t))
|
||||
implicit def seqCache[T](implicit t: InputCache[T]): InputCache[Seq[T]] =
|
||||
new InputCache[Seq[T]] {
|
||||
type Internal = Seq[t.Internal]
|
||||
def convert(v: Seq[T]) = v.map(x => t.convert(x))
|
||||
def read(from: Input) =
|
||||
{
|
||||
val size = IntFormat.reads(from)
|
||||
def next(left: Int, acc: List[t.Internal]): Internal =
|
||||
if (left <= 0) acc.reverse else next(left - 1, t.read(from) :: acc)
|
||||
next(size, Nil)
|
||||
}
|
||||
def write(to: Out, vs: Internal) {
|
||||
val size = vs.length
|
||||
IntFormat.writes(to, size)
|
||||
for (v <- vs) t.write(to, v)
|
||||
}
|
||||
def equiv: Equiv[Internal] = seqEquiv(t.equiv)
|
||||
}
|
||||
|
||||
def trueEquiv[T] = new Equiv[T] { def equiv(a: T, b: T) = true }
|
||||
implicit def arrEquiv[T](implicit t: Equiv[T]): Equiv[Array[T]] =
|
||||
wrapEquiv((x: Array[T]) => x: Seq[T])(seqEquiv[T](t))
|
||||
|
||||
implicit def seqEquiv[T](implicit t: Equiv[T]): Equiv[Seq[T]] =
|
||||
new Equiv[Seq[T]] {
|
||||
def equiv(a: Seq[T], b: Seq[T]) =
|
||||
a.length == b.length &&
|
||||
((a, b).zipped forall t.equiv)
|
||||
}
|
||||
implicit def seqFormat[T](implicit t: Format[T]): Format[Seq[T]] =
|
||||
wrap[Seq[T], List[T]](_.toList, _.toSeq)(DefaultProtocol.listFormat)
|
||||
|
||||
def wrapIn[I, J](implicit f: I => J, jCache: InputCache[J]): InputCache[I] =
|
||||
new InputCache[I] {
|
||||
type Internal = jCache.Internal
|
||||
def convert(i: I) = jCache.convert(f(i))
|
||||
def read(from: Input) = jCache.read(from)
|
||||
def write(to: Out, j: Internal) = jCache.write(to, j)
|
||||
def equiv = jCache.equiv
|
||||
}
|
||||
|
||||
def singleton[T](t: T): InputCache[T] =
|
||||
basicInput(trueEquiv, asSingleton(t))
|
||||
|
||||
def trueEquiv[T] = new Equiv[T] { def equiv(a: T, b: T) = true }
|
||||
}
|
||||
|
||||
trait HListCacheImplicits
|
||||
{
|
||||
implicit def hConsCache[H, T <: HList](implicit head: InputCache[H], tail: InputCache[T]): InputCache[H :+: T] =
|
||||
new InputCache[H :+: T]
|
||||
{
|
||||
type Internal = (head.Internal, tail.Internal)
|
||||
def convert(in: H :+: T) = (head.convert(in.head), tail.convert(in.tail))
|
||||
def read(from: Input) =
|
||||
{
|
||||
val h = head.read(from)
|
||||
val t = tail.read(from)
|
||||
(h, t)
|
||||
}
|
||||
def write(to: Out, j: Internal)
|
||||
{
|
||||
head.write(to, j._1)
|
||||
tail.write(to, j._2)
|
||||
}
|
||||
def equiv = new Equiv[Internal]
|
||||
{
|
||||
def equiv(a: Internal, b: Internal) =
|
||||
head.equiv.equiv(a._1, b._1) &&
|
||||
tail.equiv.equiv(a._2, b._2)
|
||||
}
|
||||
}
|
||||
|
||||
implicit def hNilCache: InputCache[HNil] = Cache.singleton(HNil : HNil)
|
||||
trait HListCacheImplicits {
|
||||
implicit def hConsCache[H, T <: HList](implicit head: InputCache[H], tail: InputCache[T]): InputCache[H :+: T] =
|
||||
new InputCache[H :+: T] {
|
||||
type Internal = (head.Internal, tail.Internal)
|
||||
def convert(in: H :+: T) = (head.convert(in.head), tail.convert(in.tail))
|
||||
def read(from: Input) =
|
||||
{
|
||||
val h = head.read(from)
|
||||
val t = tail.read(from)
|
||||
(h, t)
|
||||
}
|
||||
def write(to: Out, j: Internal) {
|
||||
head.write(to, j._1)
|
||||
tail.write(to, j._2)
|
||||
}
|
||||
def equiv = new Equiv[Internal] {
|
||||
def equiv(a: Internal, b: Internal) =
|
||||
head.equiv.equiv(a._1, b._1) &&
|
||||
tail.equiv.equiv(a._2, b._2)
|
||||
}
|
||||
}
|
||||
|
||||
implicit def hConsFormat[H, T <: HList](implicit head: Format[H], tail: Format[T]): Format[H :+: T] = new Format[H :+: T] {
|
||||
def reads(from: Input) =
|
||||
{
|
||||
val h = head.reads(from)
|
||||
val t = tail.reads(from)
|
||||
HCons(h, t)
|
||||
}
|
||||
def writes(to: Out, hc: H :+: T)
|
||||
{
|
||||
head.writes(to, hc.head)
|
||||
tail.writes(to, hc.tail)
|
||||
}
|
||||
}
|
||||
implicit def hNilCache: InputCache[HNil] = Cache.singleton(HNil: HNil)
|
||||
|
||||
implicit def hNilFormat: Format[HNil] = asSingleton(HNil)
|
||||
implicit def hConsFormat[H, T <: HList](implicit head: Format[H], tail: Format[T]): Format[H :+: T] = new Format[H :+: T] {
|
||||
def reads(from: Input) =
|
||||
{
|
||||
val h = head.reads(from)
|
||||
val t = tail.reads(from)
|
||||
HCons(h, t)
|
||||
}
|
||||
def writes(to: Out, hc: H :+: T) {
|
||||
head.writes(to, hc.head)
|
||||
tail.writes(to, hc.tail)
|
||||
}
|
||||
}
|
||||
|
||||
implicit def hNilFormat: Format[HNil] = asSingleton(HNil)
|
||||
}
|
||||
trait UnionImplicits
|
||||
{
|
||||
def unionInputCache[UB, HL <: HList](implicit uc: UnionCache[HL, UB]): InputCache[UB] =
|
||||
new InputCache[UB]
|
||||
{
|
||||
type Internal = Found[_]
|
||||
def convert(in: UB) = uc.find(in)
|
||||
def read(in: Input) =
|
||||
{
|
||||
val index = ByteFormat.reads(in)
|
||||
val (cache, clazz) = uc.at(index)
|
||||
val value = cache.read(in)
|
||||
new Found[cache.Internal](cache, clazz, value, index)
|
||||
}
|
||||
def write(to: Out, i: Internal)
|
||||
{
|
||||
def write0[I](f: Found[I])
|
||||
{
|
||||
ByteFormat.writes(to, f.index.toByte)
|
||||
f.cache.write(to, f.value)
|
||||
}
|
||||
write0(i)
|
||||
}
|
||||
def equiv: Equiv[Internal] = new Equiv[Internal]
|
||||
{
|
||||
def equiv(a: Internal, b: Internal) =
|
||||
{
|
||||
if(a.clazz == b.clazz)
|
||||
force(a.cache.equiv, a.value, b.value)
|
||||
else
|
||||
false
|
||||
}
|
||||
def force[T <: UB, UB](e: Equiv[T], a: UB, b: UB) = e.equiv(a.asInstanceOf[T], b.asInstanceOf[T])
|
||||
}
|
||||
}
|
||||
trait UnionImplicits {
|
||||
def unionInputCache[UB, HL <: HList](implicit uc: UnionCache[HL, UB]): InputCache[UB] =
|
||||
new InputCache[UB] {
|
||||
type Internal = Found[_]
|
||||
def convert(in: UB) = uc.find(in)
|
||||
def read(in: Input) =
|
||||
{
|
||||
val index = ByteFormat.reads(in)
|
||||
val (cache, clazz) = uc.at(index)
|
||||
val value = cache.read(in)
|
||||
new Found[cache.Internal](cache, clazz, value, index)
|
||||
}
|
||||
def write(to: Out, i: Internal) {
|
||||
def write0[I](f: Found[I]) {
|
||||
ByteFormat.writes(to, f.index.toByte)
|
||||
f.cache.write(to, f.value)
|
||||
}
|
||||
write0(i)
|
||||
}
|
||||
def equiv: Equiv[Internal] = new Equiv[Internal] {
|
||||
def equiv(a: Internal, b: Internal) =
|
||||
{
|
||||
if (a.clazz == b.clazz)
|
||||
force(a.cache.equiv, a.value, b.value)
|
||||
else
|
||||
false
|
||||
}
|
||||
def force[T <: UB, UB](e: Equiv[T], a: UB, b: UB) = e.equiv(a.asInstanceOf[T], b.asInstanceOf[T])
|
||||
}
|
||||
}
|
||||
|
||||
implicit def unionCons[H <: UB, UB, T <: HList](implicit head: InputCache[H], mf: Manifest[H], t: UnionCache[T, UB]): UnionCache[H :+: T, UB] =
|
||||
new UnionCache[H :+: T, UB]
|
||||
{
|
||||
val size = 1 + t.size
|
||||
def c = mf.runtimeClass
|
||||
def find(value: UB): Found[_] =
|
||||
if(c.isInstance(value)) new Found[head.Internal](head, c, head.convert(value.asInstanceOf[H]), size - 1) else t.find(value)
|
||||
def at(i: Int): (InputCache[_ <: UB], Class[_]) = if(size == i + 1) (head, c) else t.at(i)
|
||||
}
|
||||
implicit def unionCons[H <: UB, UB, T <: HList](implicit head: InputCache[H], mf: Manifest[H], t: UnionCache[T, UB]): UnionCache[H :+: T, UB] =
|
||||
new UnionCache[H :+: T, UB] {
|
||||
val size = 1 + t.size
|
||||
def c = mf.runtimeClass
|
||||
def find(value: UB): Found[_] =
|
||||
if (c.isInstance(value)) new Found[head.Internal](head, c, head.convert(value.asInstanceOf[H]), size - 1) else t.find(value)
|
||||
def at(i: Int): (InputCache[_ <: UB], Class[_]) = if (size == i + 1) (head, c) else t.at(i)
|
||||
}
|
||||
|
||||
implicit def unionNil[UB]: UnionCache[HNil, UB] = new UnionCache[HNil, UB] {
|
||||
def size = 0
|
||||
def find(value: UB) = sys.error("No valid sum type for " + value)
|
||||
def at(i: Int) = sys.error("Invalid union index " + i)
|
||||
}
|
||||
implicit def unionNil[UB]: UnionCache[HNil, UB] = new UnionCache[HNil, UB] {
|
||||
def size = 0
|
||||
def find(value: UB) = sys.error("No valid sum type for " + value)
|
||||
def at(i: Int) = sys.error("Invalid union index " + i)
|
||||
}
|
||||
|
||||
final class Found[I](val cache: InputCache[_] { type Internal = I }, val clazz: Class[_], val value: I, val index: Int)
|
||||
sealed trait UnionCache[HL <: HList, UB]
|
||||
{
|
||||
def size: Int
|
||||
def at(i: Int): (InputCache[_ <: UB], Class[_])
|
||||
def find(forValue: UB): Found[_]
|
||||
}
|
||||
final class Found[I](val cache: InputCache[_] { type Internal = I }, val clazz: Class[_], val value: I, val index: Int)
|
||||
sealed trait UnionCache[HL <: HList, UB] {
|
||||
def size: Int
|
||||
def at(i: Int): (InputCache[_ <: UB], Class[_])
|
||||
def find(forValue: UB): Found[_]
|
||||
}
|
||||
}
|
||||
|
|
@ -3,43 +3,42 @@
|
|||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.{File, FileNotFoundException}
|
||||
import sbinary.{DefaultProtocol, Format, Operations}
|
||||
import java.io.{ File, FileNotFoundException }
|
||||
import sbinary.{ DefaultProtocol, Format, Operations }
|
||||
import scala.reflect.Manifest
|
||||
|
||||
object CacheIO
|
||||
{
|
||||
def toBytes[T](format: Format[T])(value: T)(implicit mf: Manifest[Format[T]]): Array[Byte] =
|
||||
toBytes[T](value)(format, mf)
|
||||
def toBytes[T](value: T)(implicit format: Format[T], mf: Manifest[Format[T]]): Array[Byte] =
|
||||
Operations.toByteArray(value)(stampedFormat(format))
|
||||
def fromBytes[T](format: Format[T], default: => T)(bytes: Array[Byte])(implicit mf: Manifest[Format[T]]): T =
|
||||
fromBytes(default)(bytes)(format, mf)
|
||||
def fromBytes[T](default: => T)(bytes: Array[Byte])(implicit format: Format[T], mf: Manifest[Format[T]]): T =
|
||||
if(bytes.isEmpty) default else Operations.fromByteArray(bytes)(stampedFormat(format))
|
||||
|
||||
def fromFile[T](format: Format[T], default: => T)(file: File)(implicit mf: Manifest[Format[T]]): T =
|
||||
fromFile(file, default)(format, mf)
|
||||
def fromFile[T](file: File, default: => T)(implicit format: Format[T], mf: Manifest[Format[T]]): T =
|
||||
fromFile[T](file) getOrElse default
|
||||
def fromFile[T](file: File)(implicit format: Format[T], mf: Manifest[Format[T]]): Option[T] =
|
||||
try { Some( Operations.fromFile(file)(stampedFormat(format)) ) }
|
||||
catch { case e: Exception => None }
|
||||
|
||||
def toFile[T](format: Format[T])(value: T)(file: File)(implicit mf: Manifest[Format[T]]): Unit =
|
||||
toFile(value)(file)(format, mf)
|
||||
def toFile[T](value: T)(file: File)(implicit format: Format[T], mf: Manifest[Format[T]]): Unit =
|
||||
{
|
||||
IO.createDirectory(file.getParentFile)
|
||||
Operations.toFile(value)(file)(stampedFormat(format))
|
||||
}
|
||||
def stampedFormat[T](format: Format[T])(implicit mf: Manifest[Format[T]]): Format[T] =
|
||||
{
|
||||
import DefaultProtocol._
|
||||
withStamp(stamp(format))(format)
|
||||
}
|
||||
def stamp[T](format: Format[T])(implicit mf: Manifest[Format[T]]): Int = typeHash(mf)
|
||||
def typeHash[T](implicit mf: Manifest[T]) = mf.toString.hashCode
|
||||
def manifest[T](implicit mf: Manifest[T]): Manifest[T] = mf
|
||||
def objManifest[T](t: T)(implicit mf: Manifest[T]): Manifest[T] = mf
|
||||
object CacheIO {
|
||||
def toBytes[T](format: Format[T])(value: T)(implicit mf: Manifest[Format[T]]): Array[Byte] =
|
||||
toBytes[T](value)(format, mf)
|
||||
def toBytes[T](value: T)(implicit format: Format[T], mf: Manifest[Format[T]]): Array[Byte] =
|
||||
Operations.toByteArray(value)(stampedFormat(format))
|
||||
def fromBytes[T](format: Format[T], default: => T)(bytes: Array[Byte])(implicit mf: Manifest[Format[T]]): T =
|
||||
fromBytes(default)(bytes)(format, mf)
|
||||
def fromBytes[T](default: => T)(bytes: Array[Byte])(implicit format: Format[T], mf: Manifest[Format[T]]): T =
|
||||
if (bytes.isEmpty) default else Operations.fromByteArray(bytes)(stampedFormat(format))
|
||||
|
||||
def fromFile[T](format: Format[T], default: => T)(file: File)(implicit mf: Manifest[Format[T]]): T =
|
||||
fromFile(file, default)(format, mf)
|
||||
def fromFile[T](file: File, default: => T)(implicit format: Format[T], mf: Manifest[Format[T]]): T =
|
||||
fromFile[T](file) getOrElse default
|
||||
def fromFile[T](file: File)(implicit format: Format[T], mf: Manifest[Format[T]]): Option[T] =
|
||||
try { Some(Operations.fromFile(file)(stampedFormat(format))) }
|
||||
catch { case e: Exception => None }
|
||||
|
||||
def toFile[T](format: Format[T])(value: T)(file: File)(implicit mf: Manifest[Format[T]]): Unit =
|
||||
toFile(value)(file)(format, mf)
|
||||
def toFile[T](value: T)(file: File)(implicit format: Format[T], mf: Manifest[Format[T]]): Unit =
|
||||
{
|
||||
IO.createDirectory(file.getParentFile)
|
||||
Operations.toFile(value)(file)(stampedFormat(format))
|
||||
}
|
||||
def stampedFormat[T](format: Format[T])(implicit mf: Manifest[Format[T]]): Format[T] =
|
||||
{
|
||||
import DefaultProtocol._
|
||||
withStamp(stamp(format))(format)
|
||||
}
|
||||
def stamp[T](format: Format[T])(implicit mf: Manifest[Format[T]]): Int = typeHash(mf)
|
||||
def typeHash[T](implicit mf: Manifest[T]) = mf.toString.hashCode
|
||||
def manifest[T](implicit mf: Manifest[T]): Manifest[T] = mf
|
||||
def objManifest[T](t: T)(implicit mf: Manifest[T]): Manifest[T] = mf
|
||||
}
|
||||
|
|
@ -3,26 +3,22 @@
|
|||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.{File, IOException}
|
||||
import sbinary.{DefaultProtocol, Format}
|
||||
import java.io.{ File, IOException }
|
||||
import sbinary.{ DefaultProtocol, Format }
|
||||
import DefaultProtocol._
|
||||
import scala.reflect.Manifest
|
||||
|
||||
sealed trait FileInfo extends NotNull
|
||||
{
|
||||
val file: File
|
||||
sealed trait FileInfo extends NotNull {
|
||||
val file: File
|
||||
}
|
||||
sealed trait HashFileInfo extends FileInfo
|
||||
{
|
||||
val hash: List[Byte]
|
||||
sealed trait HashFileInfo extends FileInfo {
|
||||
val hash: List[Byte]
|
||||
}
|
||||
sealed trait ModifiedFileInfo extends FileInfo
|
||||
{
|
||||
val lastModified: Long
|
||||
sealed trait ModifiedFileInfo extends FileInfo {
|
||||
val lastModified: Long
|
||||
}
|
||||
sealed trait PlainFileInfo extends FileInfo
|
||||
{
|
||||
def exists: Boolean
|
||||
sealed trait PlainFileInfo extends FileInfo {
|
||||
def exists: Boolean
|
||||
}
|
||||
sealed trait HashModifiedFileInfo extends HashFileInfo with ModifiedFileInfo
|
||||
|
||||
|
|
@ -31,90 +27,80 @@ private final case class FileHash(file: File, hash: List[Byte]) extends HashFile
|
|||
private final case class FileModified(file: File, lastModified: Long) extends ModifiedFileInfo
|
||||
private final case class FileHashModified(file: File, hash: List[Byte], lastModified: Long) extends HashModifiedFileInfo
|
||||
|
||||
object FileInfo
|
||||
{
|
||||
implicit def existsInputCache: InputCache[PlainFileInfo] = exists.infoInputCache
|
||||
implicit def modifiedInputCache: InputCache[ModifiedFileInfo] = lastModified.infoInputCache
|
||||
implicit def hashInputCache: InputCache[HashFileInfo] = hash.infoInputCache
|
||||
implicit def fullInputCache: InputCache[HashModifiedFileInfo] = full.infoInputCache
|
||||
object FileInfo {
|
||||
implicit def existsInputCache: InputCache[PlainFileInfo] = exists.infoInputCache
|
||||
implicit def modifiedInputCache: InputCache[ModifiedFileInfo] = lastModified.infoInputCache
|
||||
implicit def hashInputCache: InputCache[HashFileInfo] = hash.infoInputCache
|
||||
implicit def fullInputCache: InputCache[HashModifiedFileInfo] = full.infoInputCache
|
||||
|
||||
sealed trait Style
|
||||
{
|
||||
type F <: FileInfo
|
||||
implicit def apply(file: File): F
|
||||
implicit def unapply(info: F): File = info.file
|
||||
implicit val format: Format[F]
|
||||
import Cache._
|
||||
implicit def fileInfoEquiv: Equiv[F] = defaultEquiv
|
||||
def infoInputCache: InputCache[F] = basicInput
|
||||
implicit def fileInputCache: InputCache[File] = wrapIn[File,F]
|
||||
}
|
||||
object full extends Style
|
||||
{
|
||||
type F = HashModifiedFileInfo
|
||||
implicit def apply(file: File): HashModifiedFileInfo = make(file, Hash(file).toList, file.lastModified)
|
||||
def make(file: File, hash: List[Byte], lastModified: Long): HashModifiedFileInfo = FileHashModified(file.getAbsoluteFile, hash, lastModified)
|
||||
implicit val format: Format[HashModifiedFileInfo] = wrap(f => (f.file, f.hash, f.lastModified), (make _).tupled)
|
||||
}
|
||||
object hash extends Style
|
||||
{
|
||||
type F = HashFileInfo
|
||||
implicit def apply(file: File): HashFileInfo = make(file, computeHash(file))
|
||||
def make(file: File, hash: List[Byte]): HashFileInfo = FileHash(file.getAbsoluteFile, hash)
|
||||
implicit val format: Format[HashFileInfo] = wrap(f => (f.file, f.hash), (make _).tupled)
|
||||
private def computeHash(file: File): List[Byte] = try { Hash(file).toList } catch { case e: Exception => Nil }
|
||||
}
|
||||
object lastModified extends Style
|
||||
{
|
||||
type F = ModifiedFileInfo
|
||||
implicit def apply(file: File): ModifiedFileInfo = make(file, file.lastModified)
|
||||
def make(file: File, lastModified: Long): ModifiedFileInfo = FileModified(file.getAbsoluteFile, lastModified)
|
||||
implicit val format: Format[ModifiedFileInfo] = wrap(f => (f.file, f.lastModified), (make _).tupled)
|
||||
}
|
||||
object exists extends Style
|
||||
{
|
||||
type F = PlainFileInfo
|
||||
implicit def apply(file: File): PlainFileInfo = make(file)
|
||||
def make(file: File): PlainFileInfo = { val abs = file.getAbsoluteFile; PlainFile(abs, abs.exists) }
|
||||
implicit val format: Format[PlainFileInfo] = asProduct2[PlainFileInfo, File, Boolean](PlainFile.apply)(x => (x.file, x.exists))
|
||||
}
|
||||
sealed trait Style {
|
||||
type F <: FileInfo
|
||||
implicit def apply(file: File): F
|
||||
implicit def unapply(info: F): File = info.file
|
||||
implicit val format: Format[F]
|
||||
import Cache._
|
||||
implicit def fileInfoEquiv: Equiv[F] = defaultEquiv
|
||||
def infoInputCache: InputCache[F] = basicInput
|
||||
implicit def fileInputCache: InputCache[File] = wrapIn[File, F]
|
||||
}
|
||||
object full extends Style {
|
||||
type F = HashModifiedFileInfo
|
||||
implicit def apply(file: File): HashModifiedFileInfo = make(file, Hash(file).toList, file.lastModified)
|
||||
def make(file: File, hash: List[Byte], lastModified: Long): HashModifiedFileInfo = FileHashModified(file.getAbsoluteFile, hash, lastModified)
|
||||
implicit val format: Format[HashModifiedFileInfo] = wrap(f => (f.file, f.hash, f.lastModified), (make _).tupled)
|
||||
}
|
||||
object hash extends Style {
|
||||
type F = HashFileInfo
|
||||
implicit def apply(file: File): HashFileInfo = make(file, computeHash(file))
|
||||
def make(file: File, hash: List[Byte]): HashFileInfo = FileHash(file.getAbsoluteFile, hash)
|
||||
implicit val format: Format[HashFileInfo] = wrap(f => (f.file, f.hash), (make _).tupled)
|
||||
private def computeHash(file: File): List[Byte] = try { Hash(file).toList } catch { case e: Exception => Nil }
|
||||
}
|
||||
object lastModified extends Style {
|
||||
type F = ModifiedFileInfo
|
||||
implicit def apply(file: File): ModifiedFileInfo = make(file, file.lastModified)
|
||||
def make(file: File, lastModified: Long): ModifiedFileInfo = FileModified(file.getAbsoluteFile, lastModified)
|
||||
implicit val format: Format[ModifiedFileInfo] = wrap(f => (f.file, f.lastModified), (make _).tupled)
|
||||
}
|
||||
object exists extends Style {
|
||||
type F = PlainFileInfo
|
||||
implicit def apply(file: File): PlainFileInfo = make(file)
|
||||
def make(file: File): PlainFileInfo = { val abs = file.getAbsoluteFile; PlainFile(abs, abs.exists) }
|
||||
implicit val format: Format[PlainFileInfo] = asProduct2[PlainFileInfo, File, Boolean](PlainFile.apply)(x => (x.file, x.exists))
|
||||
}
|
||||
}
|
||||
|
||||
final case class FilesInfo[F <: FileInfo] private(files: Set[F])
|
||||
object FilesInfo
|
||||
{
|
||||
sealed abstract class Style
|
||||
{
|
||||
type F <: FileInfo
|
||||
val fileStyle: FileInfo.Style { type F = Style.this.F }
|
||||
final case class FilesInfo[F <: FileInfo] private (files: Set[F])
|
||||
object FilesInfo {
|
||||
sealed abstract class Style {
|
||||
type F <: FileInfo
|
||||
val fileStyle: FileInfo.Style { type F = Style.this.F }
|
||||
|
||||
//def manifest: Manifest[F] = fileStyle.manifest
|
||||
implicit def apply(files: Set[File]): FilesInfo[F]
|
||||
implicit def unapply(info: FilesInfo[F]): Set[File] = info.files.map(_.file)
|
||||
implicit val formats: Format[FilesInfo[F]]
|
||||
val manifest: Manifest[Format[FilesInfo[F]]]
|
||||
def empty: FilesInfo[F] = new FilesInfo[F](Set.empty)
|
||||
import Cache._
|
||||
def infosInputCache: InputCache[FilesInfo[F]] = basicInput
|
||||
implicit def filesInputCache: InputCache[Set[File]] = wrapIn[Set[File],FilesInfo[F]]
|
||||
implicit def filesInfoEquiv: Equiv[FilesInfo[F]] = defaultEquiv
|
||||
}
|
||||
private final class BasicStyle[FI <: FileInfo](style: FileInfo.Style { type F = FI })
|
||||
(implicit val manifest: Manifest[Format[FilesInfo[FI]]]) extends Style
|
||||
{
|
||||
type F = FI
|
||||
val fileStyle: FileInfo.Style { type F = FI } = style
|
||||
private implicit val infoFormat: Format[FI] = fileStyle.format
|
||||
implicit def apply(files: Set[File]): FilesInfo[F] = FilesInfo( files.map(_.getAbsoluteFile).map(fileStyle.apply) )
|
||||
implicit val formats: Format[FilesInfo[F]] = wrap(_.files, (fs: Set[F]) => new FilesInfo(fs))
|
||||
}
|
||||
lazy val full: Style { type F = HashModifiedFileInfo } = new BasicStyle(FileInfo.full)
|
||||
lazy val hash: Style { type F = HashFileInfo } = new BasicStyle(FileInfo.hash)
|
||||
lazy val lastModified: Style { type F = ModifiedFileInfo } = new BasicStyle(FileInfo.lastModified)
|
||||
lazy val exists: Style { type F = PlainFileInfo } = new BasicStyle(FileInfo.exists)
|
||||
//def manifest: Manifest[F] = fileStyle.manifest
|
||||
implicit def apply(files: Set[File]): FilesInfo[F]
|
||||
implicit def unapply(info: FilesInfo[F]): Set[File] = info.files.map(_.file)
|
||||
implicit val formats: Format[FilesInfo[F]]
|
||||
val manifest: Manifest[Format[FilesInfo[F]]]
|
||||
def empty: FilesInfo[F] = new FilesInfo[F](Set.empty)
|
||||
import Cache._
|
||||
def infosInputCache: InputCache[FilesInfo[F]] = basicInput
|
||||
implicit def filesInputCache: InputCache[Set[File]] = wrapIn[Set[File], FilesInfo[F]]
|
||||
implicit def filesInfoEquiv: Equiv[FilesInfo[F]] = defaultEquiv
|
||||
}
|
||||
private final class BasicStyle[FI <: FileInfo](style: FileInfo.Style { type F = FI })(implicit val manifest: Manifest[Format[FilesInfo[FI]]]) extends Style {
|
||||
type F = FI
|
||||
val fileStyle: FileInfo.Style { type F = FI } = style
|
||||
private implicit val infoFormat: Format[FI] = fileStyle.format
|
||||
implicit def apply(files: Set[File]): FilesInfo[F] = FilesInfo(files.map(_.getAbsoluteFile).map(fileStyle.apply))
|
||||
implicit val formats: Format[FilesInfo[F]] = wrap(_.files, (fs: Set[F]) => new FilesInfo(fs))
|
||||
}
|
||||
lazy val full: Style { type F = HashModifiedFileInfo } = new BasicStyle(FileInfo.full)
|
||||
lazy val hash: Style { type F = HashFileInfo } = new BasicStyle(FileInfo.hash)
|
||||
lazy val lastModified: Style { type F = ModifiedFileInfo } = new BasicStyle(FileInfo.lastModified)
|
||||
lazy val exists: Style { type F = PlainFileInfo } = new BasicStyle(FileInfo.exists)
|
||||
|
||||
implicit def existsInputsCache: InputCache[FilesInfo[PlainFileInfo]] = exists.infosInputCache
|
||||
implicit def hashInputsCache: InputCache[FilesInfo[HashFileInfo]] = hash.infosInputCache
|
||||
implicit def modifiedInputsCache: InputCache[FilesInfo[ModifiedFileInfo]] = lastModified.infosInputCache
|
||||
implicit def fullInputsCache: InputCache[FilesInfo[HashModifiedFileInfo]] = full.infosInputCache
|
||||
implicit def existsInputsCache: InputCache[FilesInfo[PlainFileInfo]] = exists.infosInputCache
|
||||
implicit def hashInputsCache: InputCache[FilesInfo[HashFileInfo]] = hash.infosInputCache
|
||||
implicit def modifiedInputsCache: InputCache[FilesInfo[ModifiedFileInfo]] = lastModified.infosInputCache
|
||||
implicit def fullInputsCache: InputCache[FilesInfo[HashModifiedFileInfo]] = full.infosInputCache
|
||||
}
|
||||
|
|
@ -4,64 +4,59 @@
|
|||
package sbt
|
||||
|
||||
import Types.:+:
|
||||
import sbinary.{DefaultProtocol, Format, Input, Output => Out}
|
||||
import sbinary.{ DefaultProtocol, Format, Input, Output => Out }
|
||||
import DefaultProtocol.ByteFormat
|
||||
import java.io.{File, InputStream, OutputStream}
|
||||
import java.io.{ File, InputStream, OutputStream }
|
||||
|
||||
trait InputCache[I]
|
||||
{
|
||||
type Internal
|
||||
def convert(i: I): Internal
|
||||
def read(from: Input): Internal
|
||||
def write(to: Out, j: Internal): Unit
|
||||
def equiv: Equiv[Internal]
|
||||
trait InputCache[I] {
|
||||
type Internal
|
||||
def convert(i: I): Internal
|
||||
def read(from: Input): Internal
|
||||
def write(to: Out, j: Internal): Unit
|
||||
def equiv: Equiv[Internal]
|
||||
}
|
||||
object InputCache
|
||||
{
|
||||
implicit def basicInputCache[I](implicit fmt: Format[I], eqv: Equiv[I]): InputCache[I] =
|
||||
new InputCache[I]
|
||||
{
|
||||
type Internal = I
|
||||
def convert(i: I) = i
|
||||
def read(from: Input): I = fmt.reads(from)
|
||||
def write(to: Out, i: I) = fmt.writes(to, i)
|
||||
def equiv = eqv
|
||||
}
|
||||
def lzy[I](mkIn: => InputCache[I]): InputCache[I] =
|
||||
new InputCache[I]
|
||||
{
|
||||
lazy val ic = mkIn
|
||||
type Internal = ic.Internal
|
||||
def convert(i: I) = ic convert i
|
||||
def read(from: Input): ic.Internal = ic.read(from)
|
||||
def write(to: Out, i: ic.Internal) = ic.write(to, i)
|
||||
def equiv = ic.equiv
|
||||
}
|
||||
object InputCache {
|
||||
implicit def basicInputCache[I](implicit fmt: Format[I], eqv: Equiv[I]): InputCache[I] =
|
||||
new InputCache[I] {
|
||||
type Internal = I
|
||||
def convert(i: I) = i
|
||||
def read(from: Input): I = fmt.reads(from)
|
||||
def write(to: Out, i: I) = fmt.writes(to, i)
|
||||
def equiv = eqv
|
||||
}
|
||||
def lzy[I](mkIn: => InputCache[I]): InputCache[I] =
|
||||
new InputCache[I] {
|
||||
lazy val ic = mkIn
|
||||
type Internal = ic.Internal
|
||||
def convert(i: I) = ic convert i
|
||||
def read(from: Input): ic.Internal = ic.read(from)
|
||||
def write(to: Out, i: ic.Internal) = ic.write(to, i)
|
||||
def equiv = ic.equiv
|
||||
}
|
||||
}
|
||||
|
||||
class BasicCache[I,O](implicit input: InputCache[I], outFormat: Format[O]) extends Cache[I,O]
|
||||
{
|
||||
def apply(file: File)(in: I) =
|
||||
{
|
||||
val j = input.convert(in)
|
||||
try { applyImpl(file, j) }
|
||||
catch { case e: Exception => Right(update(file)(j)) }
|
||||
}
|
||||
protected def applyImpl(file: File, in: input.Internal) =
|
||||
{
|
||||
Using.fileInputStream(file) { stream =>
|
||||
val previousIn = input.read(stream)
|
||||
if(input.equiv.equiv(in, previousIn))
|
||||
Left(outFormat.reads(stream))
|
||||
else
|
||||
Right(update(file)(in))
|
||||
}
|
||||
}
|
||||
protected def update(file: File)(in: input.Internal) = (out: O) =>
|
||||
{
|
||||
Using.fileOutputStream(false)(file) { stream =>
|
||||
input.write(stream, in)
|
||||
outFormat.writes(stream, out)
|
||||
}
|
||||
}
|
||||
class BasicCache[I, O](implicit input: InputCache[I], outFormat: Format[O]) extends Cache[I, O] {
|
||||
def apply(file: File)(in: I) =
|
||||
{
|
||||
val j = input.convert(in)
|
||||
try { applyImpl(file, j) }
|
||||
catch { case e: Exception => Right(update(file)(j)) }
|
||||
}
|
||||
protected def applyImpl(file: File, in: input.Internal) =
|
||||
{
|
||||
Using.fileInputStream(file) { stream =>
|
||||
val previousIn = input.read(stream)
|
||||
if (input.equiv.equiv(in, previousIn))
|
||||
Left(outFormat.reads(stream))
|
||||
else
|
||||
Right(update(file)(in))
|
||||
}
|
||||
}
|
||||
protected def update(file: File)(in: input.Internal) = (out: O) =>
|
||||
{
|
||||
Using.fileOutputStream(false)(file) { stream =>
|
||||
input.write(stream, in)
|
||||
outFormat.writes(stream, out)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -3,71 +3,68 @@
|
|||
*/
|
||||
package sbt
|
||||
|
||||
object ChangeReport
|
||||
{
|
||||
def modified[T](files: Set[T]) =
|
||||
new EmptyChangeReport[T]
|
||||
{
|
||||
override def checked = files
|
||||
override def modified = files
|
||||
override def markAllModified = this
|
||||
}
|
||||
def unmodified[T](files: Set[T]) =
|
||||
new EmptyChangeReport[T]
|
||||
{
|
||||
override def checked = files
|
||||
override def unmodified = files
|
||||
}
|
||||
object ChangeReport {
|
||||
def modified[T](files: Set[T]) =
|
||||
new EmptyChangeReport[T] {
|
||||
override def checked = files
|
||||
override def modified = files
|
||||
override def markAllModified = this
|
||||
}
|
||||
def unmodified[T](files: Set[T]) =
|
||||
new EmptyChangeReport[T] {
|
||||
override def checked = files
|
||||
override def unmodified = files
|
||||
}
|
||||
}
|
||||
/** The result of comparing some current set of objects against a previous set of objects.*/
|
||||
trait ChangeReport[T] extends NotNull
|
||||
{
|
||||
/** The set of all of the objects in the current set.*/
|
||||
def checked: Set[T]
|
||||
/** All of the objects that are in the same state in the current and reference sets.*/
|
||||
def unmodified: Set[T]
|
||||
/** All checked objects that are not in the same state as the reference. This includes objects that are in both
|
||||
* sets but have changed and files that are only in one set.*/
|
||||
def modified: Set[T] // all changes, including added
|
||||
/** All objects that are only in the current set.*/
|
||||
def added: Set[T]
|
||||
/** All objects only in the previous set*/
|
||||
def removed: Set[T]
|
||||
def +++(other: ChangeReport[T]): ChangeReport[T] = new CompoundChangeReport(this, other)
|
||||
/** Generate a new report with this report's unmodified set included in the new report's modified set. The new report's
|
||||
* unmodified set is empty. The new report's added, removed, and checked sets are the same as in this report. */
|
||||
def markAllModified: ChangeReport[T] =
|
||||
new ChangeReport[T]
|
||||
{
|
||||
def checked = ChangeReport.this.checked
|
||||
def unmodified = Set.empty[T]
|
||||
def modified = ChangeReport.this.checked
|
||||
def added = ChangeReport.this.added
|
||||
def removed = ChangeReport.this.removed
|
||||
override def markAllModified = this
|
||||
}
|
||||
override def toString =
|
||||
{
|
||||
val labels = List("Checked", "Modified", "Unmodified", "Added", "Removed")
|
||||
val sets = List(checked, modified, unmodified, added, removed)
|
||||
val keyValues = labels.zip(sets).map{ case (label, set) => label + ": " + set.mkString(", ") }
|
||||
keyValues.mkString("Change report:\n\t", "\n\t", "")
|
||||
}
|
||||
trait ChangeReport[T] extends NotNull {
|
||||
/** The set of all of the objects in the current set.*/
|
||||
def checked: Set[T]
|
||||
/** All of the objects that are in the same state in the current and reference sets.*/
|
||||
def unmodified: Set[T]
|
||||
/**
|
||||
* All checked objects that are not in the same state as the reference. This includes objects that are in both
|
||||
* sets but have changed and files that are only in one set.
|
||||
*/
|
||||
def modified: Set[T] // all changes, including added
|
||||
/** All objects that are only in the current set.*/
|
||||
def added: Set[T]
|
||||
/** All objects only in the previous set*/
|
||||
def removed: Set[T]
|
||||
def +++(other: ChangeReport[T]): ChangeReport[T] = new CompoundChangeReport(this, other)
|
||||
/**
|
||||
* Generate a new report with this report's unmodified set included in the new report's modified set. The new report's
|
||||
* unmodified set is empty. The new report's added, removed, and checked sets are the same as in this report.
|
||||
*/
|
||||
def markAllModified: ChangeReport[T] =
|
||||
new ChangeReport[T] {
|
||||
def checked = ChangeReport.this.checked
|
||||
def unmodified = Set.empty[T]
|
||||
def modified = ChangeReport.this.checked
|
||||
def added = ChangeReport.this.added
|
||||
def removed = ChangeReport.this.removed
|
||||
override def markAllModified = this
|
||||
}
|
||||
override def toString =
|
||||
{
|
||||
val labels = List("Checked", "Modified", "Unmodified", "Added", "Removed")
|
||||
val sets = List(checked, modified, unmodified, added, removed)
|
||||
val keyValues = labels.zip(sets).map { case (label, set) => label + ": " + set.mkString(", ") }
|
||||
keyValues.mkString("Change report:\n\t", "\n\t", "")
|
||||
}
|
||||
}
|
||||
class EmptyChangeReport[T] extends ChangeReport[T]
|
||||
{
|
||||
def checked = Set.empty[T]
|
||||
def unmodified = Set.empty[T]
|
||||
def modified = Set.empty[T]
|
||||
def added = Set.empty[T]
|
||||
def removed = Set.empty[T]
|
||||
override def toString = "No changes"
|
||||
class EmptyChangeReport[T] extends ChangeReport[T] {
|
||||
def checked = Set.empty[T]
|
||||
def unmodified = Set.empty[T]
|
||||
def modified = Set.empty[T]
|
||||
def added = Set.empty[T]
|
||||
def removed = Set.empty[T]
|
||||
override def toString = "No changes"
|
||||
}
|
||||
private class CompoundChangeReport[T](a: ChangeReport[T], b: ChangeReport[T]) extends ChangeReport[T]
|
||||
{
|
||||
lazy val checked = a.checked ++ b.checked
|
||||
lazy val unmodified = a.unmodified ++ b.unmodified
|
||||
lazy val modified = a.modified ++ b.modified
|
||||
lazy val added = a.added ++ b.added
|
||||
lazy val removed = a.removed ++ b.removed
|
||||
private class CompoundChangeReport[T](a: ChangeReport[T], b: ChangeReport[T]) extends ChangeReport[T] {
|
||||
lazy val checked = a.checked ++ b.checked
|
||||
lazy val unmodified = a.unmodified ++ b.unmodified
|
||||
lazy val modified = a.modified ++ b.modified
|
||||
lazy val added = a.added ++ b.added
|
||||
lazy val removed = a.removed ++ b.removed
|
||||
}
|
||||
|
|
@ -4,204 +4,202 @@
|
|||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import CacheIO.{fromFile, toFile}
|
||||
import CacheIO.{ fromFile, toFile }
|
||||
import sbinary.Format
|
||||
import scala.reflect.Manifest
|
||||
import scala.collection.mutable
|
||||
import IO.{delete, read, write}
|
||||
import IO.{ delete, read, write }
|
||||
|
||||
object Tracked {
|
||||
/**
|
||||
* Creates a tracker that provides the last time it was evaluated.
|
||||
* If 'useStartTime' is true, the recorded time is the start of the evaluated function.
|
||||
* If 'useStartTime' is false, the recorded time is when the evaluated function completes.
|
||||
* In both cases, the timestamp is not updated if the function throws an exception.
|
||||
*/
|
||||
def tstamp(cacheFile: File, useStartTime: Boolean = true): Timestamp = new Timestamp(cacheFile, useStartTime)
|
||||
/** Creates a tracker that only evaluates a function when the input has changed.*/
|
||||
//def changed[O](cacheFile: File)(implicit format: Format[O], equiv: Equiv[O]): Changed[O] =
|
||||
// new Changed[O](cacheFile)
|
||||
|
||||
object Tracked
|
||||
{
|
||||
/** Creates a tracker that provides the last time it was evaluated.
|
||||
* If 'useStartTime' is true, the recorded time is the start of the evaluated function.
|
||||
* If 'useStartTime' is false, the recorded time is when the evaluated function completes.
|
||||
* In both cases, the timestamp is not updated if the function throws an exception.*/
|
||||
def tstamp(cacheFile: File, useStartTime: Boolean = true): Timestamp = new Timestamp(cacheFile, useStartTime)
|
||||
/** Creates a tracker that only evaluates a function when the input has changed.*/
|
||||
//def changed[O](cacheFile: File)(implicit format: Format[O], equiv: Equiv[O]): Changed[O] =
|
||||
// new Changed[O](cacheFile)
|
||||
|
||||
/** Creates a tracker that provides the difference between a set of input files for successive invocations.*/
|
||||
def diffInputs(cache: File, style: FilesInfo.Style): Difference =
|
||||
Difference.inputs(cache, style)
|
||||
/** Creates a tracker that provides the difference between a set of output files for successive invocations.*/
|
||||
def diffOutputs(cache: File, style: FilesInfo.Style): Difference =
|
||||
Difference.outputs(cache, style)
|
||||
/** Creates a tracker that provides the difference between a set of input files for successive invocations.*/
|
||||
def diffInputs(cache: File, style: FilesInfo.Style): Difference =
|
||||
Difference.inputs(cache, style)
|
||||
/** Creates a tracker that provides the difference between a set of output files for successive invocations.*/
|
||||
def diffOutputs(cache: File, style: FilesInfo.Style): Difference =
|
||||
Difference.outputs(cache, style)
|
||||
|
||||
def lastOutput[I,O](cacheFile: File)(f: (I,Option[O]) => O)(implicit o: Format[O], mf: Manifest[Format[O]]): I => O = in =>
|
||||
{
|
||||
val previous: Option[O] = fromFile[O](cacheFile)
|
||||
val next = f(in, previous)
|
||||
toFile(next)(cacheFile)
|
||||
next
|
||||
}
|
||||
def lastOutput[I, O](cacheFile: File)(f: (I, Option[O]) => O)(implicit o: Format[O], mf: Manifest[Format[O]]): I => O = in =>
|
||||
{
|
||||
val previous: Option[O] = fromFile[O](cacheFile)
|
||||
val next = f(in, previous)
|
||||
toFile(next)(cacheFile)
|
||||
next
|
||||
}
|
||||
|
||||
def inputChanged[I,O](cacheFile: File)(f: (Boolean, I) => O)(implicit ic: InputCache[I]): I => O = in =>
|
||||
{
|
||||
val help = new CacheHelp(ic)
|
||||
val conv = help.convert(in)
|
||||
val changed = help.changed(cacheFile, conv)
|
||||
val result = f(changed, in)
|
||||
|
||||
if(changed)
|
||||
help.save(cacheFile, conv)
|
||||
def inputChanged[I, O](cacheFile: File)(f: (Boolean, I) => O)(implicit ic: InputCache[I]): I => O = in =>
|
||||
{
|
||||
val help = new CacheHelp(ic)
|
||||
val conv = help.convert(in)
|
||||
val changed = help.changed(cacheFile, conv)
|
||||
val result = f(changed, in)
|
||||
|
||||
result
|
||||
}
|
||||
def outputChanged[I,O](cacheFile: File)(f: (Boolean, I) => O)(implicit ic: InputCache[I]): (() => I) => O = in =>
|
||||
{
|
||||
val initial = in()
|
||||
val help = new CacheHelp(ic)
|
||||
val changed = help.changed(cacheFile, help.convert(initial))
|
||||
val result = f(changed, initial)
|
||||
|
||||
if(changed)
|
||||
help.save(cacheFile, help.convert(in()))
|
||||
if (changed)
|
||||
help.save(cacheFile, conv)
|
||||
|
||||
result
|
||||
}
|
||||
final class CacheHelp[I](val ic: InputCache[I])
|
||||
{
|
||||
def convert(i: I): ic.Internal = ic.convert(i)
|
||||
def save(cacheFile: File, value: ic.Internal): Unit =
|
||||
Using.fileOutputStream()(cacheFile)(out => ic.write(out, value) )
|
||||
def changed(cacheFile: File, converted: ic.Internal): Boolean =
|
||||
try {
|
||||
val prev = Using.fileInputStream(cacheFile)(x => ic.read(x))
|
||||
!ic.equiv.equiv(converted, prev)
|
||||
} catch { case e: Exception => true }
|
||||
}
|
||||
result
|
||||
}
|
||||
def outputChanged[I, O](cacheFile: File)(f: (Boolean, I) => O)(implicit ic: InputCache[I]): (() => I) => O = in =>
|
||||
{
|
||||
val initial = in()
|
||||
val help = new CacheHelp(ic)
|
||||
val changed = help.changed(cacheFile, help.convert(initial))
|
||||
val result = f(changed, initial)
|
||||
|
||||
if (changed)
|
||||
help.save(cacheFile, help.convert(in()))
|
||||
|
||||
result
|
||||
}
|
||||
final class CacheHelp[I](val ic: InputCache[I]) {
|
||||
def convert(i: I): ic.Internal = ic.convert(i)
|
||||
def save(cacheFile: File, value: ic.Internal): Unit =
|
||||
Using.fileOutputStream()(cacheFile)(out => ic.write(out, value))
|
||||
def changed(cacheFile: File, converted: ic.Internal): Boolean =
|
||||
try {
|
||||
val prev = Using.fileInputStream(cacheFile)(x => ic.read(x))
|
||||
!ic.equiv.equiv(converted, prev)
|
||||
} catch { case e: Exception => true }
|
||||
}
|
||||
}
|
||||
|
||||
trait Tracked
|
||||
{
|
||||
/** Cleans outputs and clears the cache.*/
|
||||
def clean(): Unit
|
||||
trait Tracked {
|
||||
/** Cleans outputs and clears the cache.*/
|
||||
def clean(): Unit
|
||||
}
|
||||
class Timestamp(val cacheFile: File, useStartTime: Boolean) extends Tracked
|
||||
{
|
||||
def clean() = delete(cacheFile)
|
||||
/** Reads the previous timestamp, evaluates the provided function,
|
||||
* and then updates the timestamp if the function completes normally.*/
|
||||
def apply[T](f: Long => T): T =
|
||||
{
|
||||
val start = now()
|
||||
val result = f(readTimestamp)
|
||||
write(cacheFile, (if(useStartTime) start else now()).toString)
|
||||
result
|
||||
}
|
||||
private def now() = System.currentTimeMillis
|
||||
def readTimestamp: Long =
|
||||
try { read(cacheFile).toLong }
|
||||
catch { case _: NumberFormatException | _: java.io.FileNotFoundException => 0 }
|
||||
class Timestamp(val cacheFile: File, useStartTime: Boolean) extends Tracked {
|
||||
def clean() = delete(cacheFile)
|
||||
/**
|
||||
* Reads the previous timestamp, evaluates the provided function,
|
||||
* and then updates the timestamp if the function completes normally.
|
||||
*/
|
||||
def apply[T](f: Long => T): T =
|
||||
{
|
||||
val start = now()
|
||||
val result = f(readTimestamp)
|
||||
write(cacheFile, (if (useStartTime) start else now()).toString)
|
||||
result
|
||||
}
|
||||
private def now() = System.currentTimeMillis
|
||||
def readTimestamp: Long =
|
||||
try { read(cacheFile).toLong }
|
||||
catch { case _: NumberFormatException | _: java.io.FileNotFoundException => 0 }
|
||||
}
|
||||
|
||||
class Changed[O](val cacheFile: File)(implicit equiv: Equiv[O], format: Format[O]) extends Tracked
|
||||
{
|
||||
def clean() = delete(cacheFile)
|
||||
def apply[O2](ifChanged: O => O2, ifUnchanged: O => O2): O => O2 = value =>
|
||||
{
|
||||
if(uptodate(value))
|
||||
ifUnchanged(value)
|
||||
else
|
||||
{
|
||||
update(value)
|
||||
ifChanged(value)
|
||||
}
|
||||
}
|
||||
class Changed[O](val cacheFile: File)(implicit equiv: Equiv[O], format: Format[O]) extends Tracked {
|
||||
def clean() = delete(cacheFile)
|
||||
def apply[O2](ifChanged: O => O2, ifUnchanged: O => O2): O => O2 = value =>
|
||||
{
|
||||
if (uptodate(value))
|
||||
ifUnchanged(value)
|
||||
else {
|
||||
update(value)
|
||||
ifChanged(value)
|
||||
}
|
||||
}
|
||||
|
||||
def update(value: O): Unit = Using.fileOutputStream(false)(cacheFile)(stream => format.writes(stream, value))
|
||||
def uptodate(value: O): Boolean =
|
||||
try {
|
||||
Using.fileInputStream(cacheFile) {
|
||||
stream => equiv.equiv(value, format.reads(stream))
|
||||
}
|
||||
} catch {
|
||||
case _: Exception => false
|
||||
}
|
||||
def update(value: O): Unit = Using.fileOutputStream(false)(cacheFile)(stream => format.writes(stream, value))
|
||||
def uptodate(value: O): Boolean =
|
||||
try {
|
||||
Using.fileInputStream(cacheFile) {
|
||||
stream => equiv.equiv(value, format.reads(stream))
|
||||
}
|
||||
} catch {
|
||||
case _: Exception => false
|
||||
}
|
||||
}
|
||||
object Difference
|
||||
{
|
||||
def constructor(defineClean: Boolean, filesAreOutputs: Boolean): (File, FilesInfo.Style) => Difference =
|
||||
(cache, style) => new Difference(cache, style, defineClean, filesAreOutputs)
|
||||
object Difference {
|
||||
def constructor(defineClean: Boolean, filesAreOutputs: Boolean): (File, FilesInfo.Style) => Difference =
|
||||
(cache, style) => new Difference(cache, style, defineClean, filesAreOutputs)
|
||||
|
||||
/** Provides a constructor for a Difference that removes the files from the previous run on a call to 'clean' and saves the
|
||||
* hash/last modified time of the files as they are after running the function. This means that this information must be evaluated twice:
|
||||
* before and after running the function.*/
|
||||
val outputs = constructor(true, true)
|
||||
/** Provides a constructor for a Difference that does nothing on a call to 'clean' and saves the
|
||||
* hash/last modified time of the files as they were prior to running the function.*/
|
||||
val inputs = constructor(false, false)
|
||||
/**
|
||||
* Provides a constructor for a Difference that removes the files from the previous run on a call to 'clean' and saves the
|
||||
* hash/last modified time of the files as they are after running the function. This means that this information must be evaluated twice:
|
||||
* before and after running the function.
|
||||
*/
|
||||
val outputs = constructor(true, true)
|
||||
/**
|
||||
* Provides a constructor for a Difference that does nothing on a call to 'clean' and saves the
|
||||
* hash/last modified time of the files as they were prior to running the function.
|
||||
*/
|
||||
val inputs = constructor(false, false)
|
||||
}
|
||||
class Difference(val cache: File, val style: FilesInfo.Style, val defineClean: Boolean, val filesAreOutputs: Boolean) extends Tracked
|
||||
{
|
||||
def clean() =
|
||||
{
|
||||
if(defineClean) delete(raw(cachedFilesInfo)) else ()
|
||||
clearCache()
|
||||
}
|
||||
private def clearCache() = delete(cache)
|
||||
|
||||
private def cachedFilesInfo = fromFile(style.formats, style.empty)(cache)(style.manifest).files
|
||||
private def raw(fs: Set[style.F]): Set[File] = fs.map(_.file)
|
||||
|
||||
def apply[T](files: Set[File])(f: ChangeReport[File] => T): T =
|
||||
{
|
||||
val lastFilesInfo = cachedFilesInfo
|
||||
apply(files, lastFilesInfo)(f)(_ => files)
|
||||
}
|
||||
|
||||
def apply[T](f: ChangeReport[File] => T)(implicit toFiles: T => Set[File]): T =
|
||||
{
|
||||
val lastFilesInfo = cachedFilesInfo
|
||||
apply(raw(lastFilesInfo), lastFilesInfo)(f)(toFiles)
|
||||
}
|
||||
|
||||
private def abs(files: Set[File]) = files.map(_.getAbsoluteFile)
|
||||
private[this] def apply[T](files: Set[File], lastFilesInfo: Set[style.F])(f: ChangeReport[File] => T)(extractFiles: T => Set[File]): T =
|
||||
{
|
||||
val lastFiles = raw(lastFilesInfo)
|
||||
val currentFiles = abs(files)
|
||||
val currentFilesInfo = style(currentFiles)
|
||||
class Difference(val cache: File, val style: FilesInfo.Style, val defineClean: Boolean, val filesAreOutputs: Boolean) extends Tracked {
|
||||
def clean() =
|
||||
{
|
||||
if (defineClean) delete(raw(cachedFilesInfo)) else ()
|
||||
clearCache()
|
||||
}
|
||||
private def clearCache() = delete(cache)
|
||||
|
||||
val report = new ChangeReport[File]
|
||||
{
|
||||
lazy val checked = currentFiles
|
||||
lazy val removed = lastFiles -- checked // all files that were included previously but not this time. This is independent of whether the files exist.
|
||||
lazy val added = checked -- lastFiles // all files included now but not previously. This is independent of whether the files exist.
|
||||
lazy val modified = raw(lastFilesInfo -- currentFilesInfo.files) ++ added
|
||||
lazy val unmodified = checked -- modified
|
||||
}
|
||||
private def cachedFilesInfo = fromFile(style.formats, style.empty)(cache)(style.manifest).files
|
||||
private def raw(fs: Set[style.F]): Set[File] = fs.map(_.file)
|
||||
|
||||
val result = f(report)
|
||||
val info = if(filesAreOutputs) style(abs(extractFiles(result))) else currentFilesInfo
|
||||
toFile(style.formats)(info)(cache)(style.manifest)
|
||||
result
|
||||
}
|
||||
def apply[T](files: Set[File])(f: ChangeReport[File] => T): T =
|
||||
{
|
||||
val lastFilesInfo = cachedFilesInfo
|
||||
apply(files, lastFilesInfo)(f)(_ => files)
|
||||
}
|
||||
|
||||
def apply[T](f: ChangeReport[File] => T)(implicit toFiles: T => Set[File]): T =
|
||||
{
|
||||
val lastFilesInfo = cachedFilesInfo
|
||||
apply(raw(lastFilesInfo), lastFilesInfo)(f)(toFiles)
|
||||
}
|
||||
|
||||
private def abs(files: Set[File]) = files.map(_.getAbsoluteFile)
|
||||
private[this] def apply[T](files: Set[File], lastFilesInfo: Set[style.F])(f: ChangeReport[File] => T)(extractFiles: T => Set[File]): T =
|
||||
{
|
||||
val lastFiles = raw(lastFilesInfo)
|
||||
val currentFiles = abs(files)
|
||||
val currentFilesInfo = style(currentFiles)
|
||||
|
||||
val report = new ChangeReport[File] {
|
||||
lazy val checked = currentFiles
|
||||
lazy val removed = lastFiles -- checked // all files that were included previously but not this time. This is independent of whether the files exist.
|
||||
lazy val added = checked -- lastFiles // all files included now but not previously. This is independent of whether the files exist.
|
||||
lazy val modified = raw(lastFilesInfo -- currentFilesInfo.files) ++ added
|
||||
lazy val unmodified = checked -- modified
|
||||
}
|
||||
|
||||
val result = f(report)
|
||||
val info = if (filesAreOutputs) style(abs(extractFiles(result))) else currentFilesInfo
|
||||
toFile(style.formats)(info)(cache)(style.manifest)
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
object FileFunction {
|
||||
type UpdateFunction = (ChangeReport[File], ChangeReport[File]) => Set[File]
|
||||
|
||||
def cached(cacheBaseDirectory: File, inStyle: FilesInfo.Style = FilesInfo.lastModified, outStyle: FilesInfo.Style = FilesInfo.exists)(action: Set[File] => Set[File]): Set[File] => Set[File] =
|
||||
cached(cacheBaseDirectory)(inStyle, outStyle)( (in, out) => action(in.checked) )
|
||||
|
||||
def cached(cacheBaseDirectory: File)(inStyle: FilesInfo.Style, outStyle: FilesInfo.Style)(action: UpdateFunction): Set[File] => Set[File] =
|
||||
{
|
||||
import Path._
|
||||
lazy val inCache = Difference.inputs(cacheBaseDirectory / "in-cache", inStyle)
|
||||
lazy val outCache = Difference.outputs(cacheBaseDirectory / "out-cache", outStyle)
|
||||
inputs =>
|
||||
{
|
||||
inCache(inputs) { inReport =>
|
||||
outCache { outReport =>
|
||||
if(inReport.modified.isEmpty && outReport.modified.isEmpty)
|
||||
outReport.checked
|
||||
else
|
||||
action(inReport, outReport)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
type UpdateFunction = (ChangeReport[File], ChangeReport[File]) => Set[File]
|
||||
|
||||
def cached(cacheBaseDirectory: File, inStyle: FilesInfo.Style = FilesInfo.lastModified, outStyle: FilesInfo.Style = FilesInfo.exists)(action: Set[File] => Set[File]): Set[File] => Set[File] =
|
||||
cached(cacheBaseDirectory)(inStyle, outStyle)((in, out) => action(in.checked))
|
||||
|
||||
def cached(cacheBaseDirectory: File)(inStyle: FilesInfo.Style, outStyle: FilesInfo.Style)(action: UpdateFunction): Set[File] => Set[File] =
|
||||
{
|
||||
import Path._
|
||||
lazy val inCache = Difference.inputs(cacheBaseDirectory / "in-cache", inStyle)
|
||||
lazy val outCache = Difference.outputs(cacheBaseDirectory / "out-cache", outStyle)
|
||||
inputs =>
|
||||
{
|
||||
inCache(inputs) { inReport =>
|
||||
outCache { outReport =>
|
||||
if (inReport.modified.isEmpty && outReport.modified.isEmpty)
|
||||
outReport.checked
|
||||
else
|
||||
action(inReport, outReport)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
package sbt
|
||||
|
||||
import java.lang.reflect.{Array => _, _}
|
||||
import java.lang.reflect.{ Array => _, _ }
|
||||
import java.lang.annotation.Annotation
|
||||
import annotation.tailrec
|
||||
import xsbti.api
|
||||
|
|
@ -8,329 +8,323 @@ import xsbti.SafeLazy
|
|||
import SafeLazy.strict
|
||||
import collection.mutable
|
||||
|
||||
object ClassToAPI
|
||||
{
|
||||
def apply(c: Seq[Class[_]]): api.SourceAPI = process(c)._1
|
||||
object ClassToAPI {
|
||||
def apply(c: Seq[Class[_]]): api.SourceAPI = process(c)._1
|
||||
|
||||
// (api, public inherited classes)
|
||||
def process(c: Seq[Class[_]]): (api.SourceAPI, Set[Class[_]]) =
|
||||
{
|
||||
val pkgs = packages(c).map(p => new api.Package(p))
|
||||
val cmap = emptyClassMap
|
||||
val defs = c.filter(isTopLevel).flatMap(toDefinitions(cmap))
|
||||
val source = new api.SourceAPI(pkgs.toArray, defs.toArray)
|
||||
cmap.lz.foreach(_.get()) // force thunks to ensure all inherited dependencies are recorded
|
||||
val inDeps = cmap.inherited.toSet
|
||||
cmap.clear()
|
||||
(source, inDeps)
|
||||
}
|
||||
// (api, public inherited classes)
|
||||
def process(c: Seq[Class[_]]): (api.SourceAPI, Set[Class[_]]) =
|
||||
{
|
||||
val pkgs = packages(c).map(p => new api.Package(p))
|
||||
val cmap = emptyClassMap
|
||||
val defs = c.filter(isTopLevel).flatMap(toDefinitions(cmap))
|
||||
val source = new api.SourceAPI(pkgs.toArray, defs.toArray)
|
||||
cmap.lz.foreach(_.get()) // force thunks to ensure all inherited dependencies are recorded
|
||||
val inDeps = cmap.inherited.toSet
|
||||
cmap.clear()
|
||||
(source, inDeps)
|
||||
}
|
||||
|
||||
// Avoiding implicit allocation.
|
||||
private def arrayMap[T <: AnyRef, U <: AnyRef : ClassManifest](xs: Array[T])(f: T => U): Array[U] = {
|
||||
val len = xs.length
|
||||
var i = 0
|
||||
val res = new Array[U](len)
|
||||
while (i < len) {
|
||||
res(i) = f(xs(i))
|
||||
i += 1
|
||||
}
|
||||
res
|
||||
}
|
||||
// Avoiding implicit allocation.
|
||||
private def arrayMap[T <: AnyRef, U <: AnyRef: ClassManifest](xs: Array[T])(f: T => U): Array[U] = {
|
||||
val len = xs.length
|
||||
var i = 0
|
||||
val res = new Array[U](len)
|
||||
while (i < len) {
|
||||
res(i) = f(xs(i))
|
||||
i += 1
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
def packages(c: Seq[Class[_]]): Set[String] =
|
||||
c.flatMap(packageName).toSet
|
||||
def packages(c: Seq[Class[_]]): Set[String] =
|
||||
c.flatMap(packageName).toSet
|
||||
|
||||
def isTopLevel(c: Class[_]): Boolean =
|
||||
c.getEnclosingClass eq null
|
||||
def isTopLevel(c: Class[_]): Boolean =
|
||||
c.getEnclosingClass eq null
|
||||
|
||||
final class ClassMap private[sbt](private[sbt] val memo: mutable.Map[String, Seq[api.ClassLike]], private[sbt] val inherited: mutable.Set[Class[_]], private[sbt] val lz: mutable.Buffer[xsbti.api.Lazy[_]]) {
|
||||
def clear() { memo.clear(); inherited.clear(); lz.clear() }
|
||||
}
|
||||
def emptyClassMap: ClassMap = new ClassMap(new mutable.HashMap, new mutable.HashSet, new mutable.ListBuffer)
|
||||
final class ClassMap private[sbt] (private[sbt] val memo: mutable.Map[String, Seq[api.ClassLike]], private[sbt] val inherited: mutable.Set[Class[_]], private[sbt] val lz: mutable.Buffer[xsbti.api.Lazy[_]]) {
|
||||
def clear() { memo.clear(); inherited.clear(); lz.clear() }
|
||||
}
|
||||
def emptyClassMap: ClassMap = new ClassMap(new mutable.HashMap, new mutable.HashSet, new mutable.ListBuffer)
|
||||
|
||||
def toDefinitions(cmap: ClassMap)(c: Class[_]): Seq[api.ClassLike] =
|
||||
cmap.memo.getOrElseUpdate(c.getName, toDefinitions0(c, cmap))
|
||||
def toDefinitions0(c: Class[_], cmap: ClassMap): Seq[api.ClassLike] =
|
||||
{
|
||||
import api.DefinitionType.{ClassDef, Module, Trait}
|
||||
val enclPkg = packageName(c)
|
||||
val mods = modifiers(c.getModifiers)
|
||||
val acc = access(c.getModifiers, enclPkg)
|
||||
val annots = annotations(c.getAnnotations)
|
||||
val name = c.getName
|
||||
val tpe = if(Modifier.isInterface(c.getModifiers)) Trait else ClassDef
|
||||
lazy val (static, instance) = structure(c, enclPkg, cmap)
|
||||
val cls = new api.ClassLike(tpe, strict(Empty), lzy(instance, cmap), emptyStringArray, typeParameters(typeParameterTypes(c)), name, acc, mods, annots)
|
||||
val stat = new api.ClassLike(Module, strict(Empty), lzy(static, cmap), emptyStringArray, emptyTypeParameterArray, name, acc, mods, annots)
|
||||
val defs = cls :: stat :: Nil
|
||||
cmap.memo(c.getName) = defs
|
||||
defs
|
||||
}
|
||||
def toDefinitions(cmap: ClassMap)(c: Class[_]): Seq[api.ClassLike] =
|
||||
cmap.memo.getOrElseUpdate(c.getName, toDefinitions0(c, cmap))
|
||||
def toDefinitions0(c: Class[_], cmap: ClassMap): Seq[api.ClassLike] =
|
||||
{
|
||||
import api.DefinitionType.{ ClassDef, Module, Trait }
|
||||
val enclPkg = packageName(c)
|
||||
val mods = modifiers(c.getModifiers)
|
||||
val acc = access(c.getModifiers, enclPkg)
|
||||
val annots = annotations(c.getAnnotations)
|
||||
val name = c.getName
|
||||
val tpe = if (Modifier.isInterface(c.getModifiers)) Trait else ClassDef
|
||||
lazy val (static, instance) = structure(c, enclPkg, cmap)
|
||||
val cls = new api.ClassLike(tpe, strict(Empty), lzy(instance, cmap), emptyStringArray, typeParameters(typeParameterTypes(c)), name, acc, mods, annots)
|
||||
val stat = new api.ClassLike(Module, strict(Empty), lzy(static, cmap), emptyStringArray, emptyTypeParameterArray, name, acc, mods, annots)
|
||||
val defs = cls :: stat :: Nil
|
||||
cmap.memo(c.getName) = defs
|
||||
defs
|
||||
}
|
||||
|
||||
/** Returns the (static structure, instance structure, inherited classes) for `c`. */
|
||||
def structure(c: Class[_], enclPkg: Option[String], cmap: ClassMap): (api.Structure, api.Structure) =
|
||||
{
|
||||
val methods = mergeMap(c, c.getDeclaredMethods, c.getMethods, methodToDef(enclPkg))
|
||||
val fields = mergeMap(c, c.getDeclaredFields, c.getFields, fieldToDef(enclPkg))
|
||||
val constructors = mergeMap(c, c.getDeclaredConstructors, c.getConstructors, constructorToDef(enclPkg))
|
||||
val classes = merge[Class[_]](c, c.getDeclaredClasses, c.getClasses, toDefinitions(cmap), (_: Seq[Class[_]]).partition(isStatic), _.getEnclosingClass != c)
|
||||
val all = (methods ++ fields ++ constructors ++ classes)
|
||||
val parentJavaTypes = allSuperTypes(c)
|
||||
if(!Modifier.isPrivate(c.getModifiers))
|
||||
cmap.inherited ++= parentJavaTypes.collect { case c: Class[_] => c }
|
||||
val parentTypes = types(parentJavaTypes)
|
||||
val instanceStructure = new api.Structure(lzyS(parentTypes.toArray), lzyS(all.declared.toArray), lzyS(all.inherited.toArray))
|
||||
val staticStructure = new api.Structure(lzyEmptyTpeArray, lzyS(all.staticDeclared.toArray), lzyS(all.staticInherited.toArray))
|
||||
(staticStructure, instanceStructure)
|
||||
}
|
||||
private[this] def lzyS[T <: AnyRef](t: T): xsbti.api.Lazy[T] = lzy(t)
|
||||
def lzy[T <: AnyRef](t: => T): xsbti.api.Lazy[T] = xsbti.SafeLazy(t)
|
||||
private[this] def lzy[T <: AnyRef](t: => T, cmap: ClassMap): xsbti.api.Lazy[T] = {
|
||||
val s = lzy(t)
|
||||
cmap.lz += s
|
||||
s
|
||||
}
|
||||
/** Returns the (static structure, instance structure, inherited classes) for `c`. */
|
||||
def structure(c: Class[_], enclPkg: Option[String], cmap: ClassMap): (api.Structure, api.Structure) =
|
||||
{
|
||||
val methods = mergeMap(c, c.getDeclaredMethods, c.getMethods, methodToDef(enclPkg))
|
||||
val fields = mergeMap(c, c.getDeclaredFields, c.getFields, fieldToDef(enclPkg))
|
||||
val constructors = mergeMap(c, c.getDeclaredConstructors, c.getConstructors, constructorToDef(enclPkg))
|
||||
val classes = merge[Class[_]](c, c.getDeclaredClasses, c.getClasses, toDefinitions(cmap), (_: Seq[Class[_]]).partition(isStatic), _.getEnclosingClass != c)
|
||||
val all = (methods ++ fields ++ constructors ++ classes)
|
||||
val parentJavaTypes = allSuperTypes(c)
|
||||
if (!Modifier.isPrivate(c.getModifiers))
|
||||
cmap.inherited ++= parentJavaTypes.collect { case c: Class[_] => c }
|
||||
val parentTypes = types(parentJavaTypes)
|
||||
val instanceStructure = new api.Structure(lzyS(parentTypes.toArray), lzyS(all.declared.toArray), lzyS(all.inherited.toArray))
|
||||
val staticStructure = new api.Structure(lzyEmptyTpeArray, lzyS(all.staticDeclared.toArray), lzyS(all.staticInherited.toArray))
|
||||
(staticStructure, instanceStructure)
|
||||
}
|
||||
private[this] def lzyS[T <: AnyRef](t: T): xsbti.api.Lazy[T] = lzy(t)
|
||||
def lzy[T <: AnyRef](t: => T): xsbti.api.Lazy[T] = xsbti.SafeLazy(t)
|
||||
private[this] def lzy[T <: AnyRef](t: => T, cmap: ClassMap): xsbti.api.Lazy[T] = {
|
||||
val s = lzy(t)
|
||||
cmap.lz += s
|
||||
s
|
||||
}
|
||||
|
||||
private val emptyStringArray = new Array[String](0)
|
||||
private val emptyTypeArray = new Array[xsbti.api.Type](0)
|
||||
private val emptyAnnotationArray = new Array[xsbti.api.Annotation](0)
|
||||
private val emptyTypeParameterArray = new Array[xsbti.api.TypeParameter](0)
|
||||
private val emptySimpleTypeArray = new Array[xsbti.api.SimpleType](0)
|
||||
private val lzyEmptyTpeArray = lzyS(emptyTypeArray)
|
||||
private val lzyEmptyDefArray = lzyS(new Array[xsbti.api.Definition](0))
|
||||
private val emptyStringArray = new Array[String](0)
|
||||
private val emptyTypeArray = new Array[xsbti.api.Type](0)
|
||||
private val emptyAnnotationArray = new Array[xsbti.api.Annotation](0)
|
||||
private val emptyTypeParameterArray = new Array[xsbti.api.TypeParameter](0)
|
||||
private val emptySimpleTypeArray = new Array[xsbti.api.SimpleType](0)
|
||||
private val lzyEmptyTpeArray = lzyS(emptyTypeArray)
|
||||
private val lzyEmptyDefArray = lzyS(new Array[xsbti.api.Definition](0))
|
||||
|
||||
private def allSuperTypes(t: Type): Seq[Type] =
|
||||
{
|
||||
@tailrec def accumulate(t: Type, accum: Seq[Type] = Seq.empty): Seq[Type] = t match {
|
||||
case c: Class[_] =>
|
||||
val (parent, interfaces) = (c.getGenericSuperclass, c.getGenericInterfaces)
|
||||
accumulate(parent, (accum :+ parent) ++ flattenAll(interfaces))
|
||||
case p: ParameterizedType =>
|
||||
accumulate(p.getRawType, accum)
|
||||
case _ =>
|
||||
accum
|
||||
}
|
||||
@tailrec def flattenAll(interfaces: Seq[Type], accum: Seq[Type] = Seq.empty): Seq[Type] =
|
||||
{
|
||||
if (!interfaces.isEmpty) {
|
||||
val raw = interfaces map { case p: ParameterizedType => p.getRawType; case i => i }
|
||||
val children = raw flatMap { case i: Class[_] => i.getGenericInterfaces; case _ => Seq.empty }
|
||||
flattenAll(children, accum ++ interfaces ++ children)
|
||||
}
|
||||
else
|
||||
accum
|
||||
}
|
||||
accumulate(t).filterNot(_ == null).distinct
|
||||
}
|
||||
private def allSuperTypes(t: Type): Seq[Type] =
|
||||
{
|
||||
@tailrec def accumulate(t: Type, accum: Seq[Type] = Seq.empty): Seq[Type] = t match {
|
||||
case c: Class[_] =>
|
||||
val (parent, interfaces) = (c.getGenericSuperclass, c.getGenericInterfaces)
|
||||
accumulate(parent, (accum :+ parent) ++ flattenAll(interfaces))
|
||||
case p: ParameterizedType =>
|
||||
accumulate(p.getRawType, accum)
|
||||
case _ =>
|
||||
accum
|
||||
}
|
||||
@tailrec def flattenAll(interfaces: Seq[Type], accum: Seq[Type] = Seq.empty): Seq[Type] =
|
||||
{
|
||||
if (!interfaces.isEmpty) {
|
||||
val raw = interfaces map { case p: ParameterizedType => p.getRawType; case i => i }
|
||||
val children = raw flatMap { case i: Class[_] => i.getGenericInterfaces; case _ => Seq.empty }
|
||||
flattenAll(children, accum ++ interfaces ++ children)
|
||||
} else
|
||||
accum
|
||||
}
|
||||
accumulate(t).filterNot(_ == null).distinct
|
||||
}
|
||||
|
||||
@deprecated("No longer used", "0.13.0")
|
||||
def parents(c: Class[_]): Seq[api.Type] = types(allSuperTypes(c))
|
||||
def types(ts: Seq[Type]): Array[api.Type] = ts filter (_ ne null) map reference toArray;
|
||||
def upperBounds(ts: Array[Type]): api.Type =
|
||||
new api.Structure(lzy(types(ts)), lzyEmptyDefArray, lzyEmptyDefArray)
|
||||
@deprecated("No longer used", "0.13.0")
|
||||
def parents(c: Class[_]): Seq[api.Type] = types(allSuperTypes(c))
|
||||
def types(ts: Seq[Type]): Array[api.Type] = ts filter (_ ne null) map reference toArray;
|
||||
def upperBounds(ts: Array[Type]): api.Type =
|
||||
new api.Structure(lzy(types(ts)), lzyEmptyDefArray, lzyEmptyDefArray)
|
||||
|
||||
def fieldToDef(enclPkg: Option[String])(f: Field): api.FieldLike =
|
||||
{
|
||||
val name = f.getName
|
||||
val accs = access(f.getModifiers, enclPkg)
|
||||
val mods = modifiers(f.getModifiers)
|
||||
val annots = annotations(f.getDeclaredAnnotations)
|
||||
val tpe = reference(returnType(f))
|
||||
if(mods.isFinal) new api.Val(tpe, name, accs, mods, annots) else new api.Var(tpe, name, accs, mods, annots)
|
||||
}
|
||||
def fieldToDef(enclPkg: Option[String])(f: Field): api.FieldLike =
|
||||
{
|
||||
val name = f.getName
|
||||
val accs = access(f.getModifiers, enclPkg)
|
||||
val mods = modifiers(f.getModifiers)
|
||||
val annots = annotations(f.getDeclaredAnnotations)
|
||||
val tpe = reference(returnType(f))
|
||||
if (mods.isFinal) new api.Val(tpe, name, accs, mods, annots) else new api.Var(tpe, name, accs, mods, annots)
|
||||
}
|
||||
|
||||
def methodToDef(enclPkg: Option[String])(m: Method): api.Def =
|
||||
defLike(m.getName, m.getModifiers, m.getDeclaredAnnotations, typeParameterTypes(m), m.getParameterAnnotations, parameterTypes(m), Some(returnType(m)), exceptionTypes(m), m.isVarArgs, enclPkg)
|
||||
def methodToDef(enclPkg: Option[String])(m: Method): api.Def =
|
||||
defLike(m.getName, m.getModifiers, m.getDeclaredAnnotations, typeParameterTypes(m), m.getParameterAnnotations, parameterTypes(m), Some(returnType(m)), exceptionTypes(m), m.isVarArgs, enclPkg)
|
||||
|
||||
def constructorToDef(enclPkg: Option[String])(c: Constructor[_]): api.Def =
|
||||
defLike("<init>", c.getModifiers, c.getDeclaredAnnotations, typeParameterTypes(c), c.getParameterAnnotations, parameterTypes(c), None, exceptionTypes(c), c.isVarArgs, enclPkg)
|
||||
def constructorToDef(enclPkg: Option[String])(c: Constructor[_]): api.Def =
|
||||
defLike("<init>", c.getModifiers, c.getDeclaredAnnotations, typeParameterTypes(c), c.getParameterAnnotations, parameterTypes(c), None, exceptionTypes(c), c.isVarArgs, enclPkg)
|
||||
|
||||
def defLike[T <: GenericDeclaration](name: String, mods: Int, annots: Array[Annotation], tps: Array[TypeVariable[T]], paramAnnots: Array[Array[Annotation]], paramTypes: Array[Type], retType: Option[Type], exceptions: Array[Type], varArgs: Boolean, enclPkg: Option[String]): api.Def =
|
||||
{
|
||||
val varArgPosition = if(varArgs) paramTypes.length - 1 else -1
|
||||
val isVarArg = List.tabulate(paramTypes.length)(_ == varArgPosition)
|
||||
val pa = (paramAnnots, paramTypes, isVarArg).zipped map { case (a,p,v) => parameter(a,p,v) }
|
||||
val params = new api.ParameterList(pa, false)
|
||||
val ret = retType match { case Some(rt) => reference(rt); case None => Empty }
|
||||
new api.Def(Array(params), ret, typeParameters(tps), name, access(mods, enclPkg), modifiers(mods), annotations(annots) ++ exceptionAnnotations(exceptions))
|
||||
}
|
||||
def defLike[T <: GenericDeclaration](name: String, mods: Int, annots: Array[Annotation], tps: Array[TypeVariable[T]], paramAnnots: Array[Array[Annotation]], paramTypes: Array[Type], retType: Option[Type], exceptions: Array[Type], varArgs: Boolean, enclPkg: Option[String]): api.Def =
|
||||
{
|
||||
val varArgPosition = if (varArgs) paramTypes.length - 1 else -1
|
||||
val isVarArg = List.tabulate(paramTypes.length)(_ == varArgPosition)
|
||||
val pa = (paramAnnots, paramTypes, isVarArg).zipped map { case (a, p, v) => parameter(a, p, v) }
|
||||
val params = new api.ParameterList(pa, false)
|
||||
val ret = retType match { case Some(rt) => reference(rt); case None => Empty }
|
||||
new api.Def(Array(params), ret, typeParameters(tps), name, access(mods, enclPkg), modifiers(mods), annotations(annots) ++ exceptionAnnotations(exceptions))
|
||||
}
|
||||
|
||||
def exceptionAnnotations(exceptions: Array[Type]): Array[api.Annotation] =
|
||||
if (exceptions.length == 0) emptyAnnotationArray
|
||||
else arrayMap(exceptions)(t => new api.Annotation(Throws, Array(new api.AnnotationArgument("value", t.toString))))
|
||||
def exceptionAnnotations(exceptions: Array[Type]): Array[api.Annotation] =
|
||||
if (exceptions.length == 0) emptyAnnotationArray
|
||||
else arrayMap(exceptions)(t => new api.Annotation(Throws, Array(new api.AnnotationArgument("value", t.toString))))
|
||||
|
||||
def parameter(annots: Array[Annotation], parameter: Type, varArgs: Boolean): api.MethodParameter =
|
||||
new api.MethodParameter("", annotated(reference(parameter),annots), false, if(varArgs) api.ParameterModifier.Repeated else api.ParameterModifier.Plain)
|
||||
def parameter(annots: Array[Annotation], parameter: Type, varArgs: Boolean): api.MethodParameter =
|
||||
new api.MethodParameter("", annotated(reference(parameter), annots), false, if (varArgs) api.ParameterModifier.Repeated else api.ParameterModifier.Plain)
|
||||
|
||||
def annotated(t: api.SimpleType, annots: Array[Annotation]): api.Type = (
|
||||
if (annots.length == 0) t
|
||||
else new api.Annotated(t, annotations(annots))
|
||||
)
|
||||
def annotated(t: api.SimpleType, annots: Array[Annotation]): api.Type = (
|
||||
if (annots.length == 0) t
|
||||
else new api.Annotated(t, annotations(annots))
|
||||
)
|
||||
|
||||
case class Defs(declared: Seq[api.Definition], inherited: Seq[api.Definition], staticDeclared: Seq[api.Definition], staticInherited: Seq[api.Definition])
|
||||
{
|
||||
def ++(o: Defs) = Defs(declared ++ o.declared, inherited ++ o.inherited, staticDeclared ++ o.staticDeclared, staticInherited ++ o.staticInherited)
|
||||
}
|
||||
def mergeMap[T <: Member](of: Class[_], self: Seq[T], public: Seq[T], f: T => api.Definition): Defs =
|
||||
merge[T](of, self, public, x => f(x) :: Nil, splitStatic _, _.getDeclaringClass != of)
|
||||
case class Defs(declared: Seq[api.Definition], inherited: Seq[api.Definition], staticDeclared: Seq[api.Definition], staticInherited: Seq[api.Definition]) {
|
||||
def ++(o: Defs) = Defs(declared ++ o.declared, inherited ++ o.inherited, staticDeclared ++ o.staticDeclared, staticInherited ++ o.staticInherited)
|
||||
}
|
||||
def mergeMap[T <: Member](of: Class[_], self: Seq[T], public: Seq[T], f: T => api.Definition): Defs =
|
||||
merge[T](of, self, public, x => f(x) :: Nil, splitStatic _, _.getDeclaringClass != of)
|
||||
|
||||
def merge[T](of: Class[_], self: Seq[T], public: Seq[T], f: T => Seq[api.Definition], splitStatic: Seq[T] => (Seq[T],Seq[T]), isInherited: T => Boolean): Defs =
|
||||
{
|
||||
val (selfStatic, selfInstance) = splitStatic(self)
|
||||
val (inheritedStatic, inheritedInstance) = splitStatic(public filter isInherited)
|
||||
Defs(selfInstance flatMap f, inheritedInstance flatMap f, selfStatic flatMap f, inheritedStatic flatMap f)
|
||||
}
|
||||
def merge[T](of: Class[_], self: Seq[T], public: Seq[T], f: T => Seq[api.Definition], splitStatic: Seq[T] => (Seq[T], Seq[T]), isInherited: T => Boolean): Defs =
|
||||
{
|
||||
val (selfStatic, selfInstance) = splitStatic(self)
|
||||
val (inheritedStatic, inheritedInstance) = splitStatic(public filter isInherited)
|
||||
Defs(selfInstance flatMap f, inheritedInstance flatMap f, selfStatic flatMap f, inheritedStatic flatMap f)
|
||||
}
|
||||
|
||||
def splitStatic[T <: Member](defs: Seq[T]): (Seq[T], Seq[T]) =
|
||||
defs partition isStatic
|
||||
def splitStatic[T <: Member](defs: Seq[T]): (Seq[T], Seq[T]) =
|
||||
defs partition isStatic
|
||||
|
||||
def isStatic(c: Class[_]): Boolean = Modifier.isStatic(c.getModifiers)
|
||||
def isStatic(a: Member): Boolean = Modifier.isStatic(a.getModifiers)
|
||||
def isStatic(c: Class[_]): Boolean = Modifier.isStatic(c.getModifiers)
|
||||
def isStatic(a: Member): Boolean = Modifier.isStatic(a.getModifiers)
|
||||
|
||||
def typeParameters[T <: GenericDeclaration](tps: Array[TypeVariable[T]]): Array[api.TypeParameter] =
|
||||
if (tps.length == 0) emptyTypeParameterArray
|
||||
else arrayMap(tps)(typeParameter)
|
||||
def typeParameters[T <: GenericDeclaration](tps: Array[TypeVariable[T]]): Array[api.TypeParameter] =
|
||||
if (tps.length == 0) emptyTypeParameterArray
|
||||
else arrayMap(tps)(typeParameter)
|
||||
|
||||
def typeParameter[T <: GenericDeclaration](tp: TypeVariable[T]): api.TypeParameter =
|
||||
new api.TypeParameter(typeVariable(tp), emptyAnnotationArray, emptyTypeParameterArray, api.Variance.Invariant, NothingRef, upperBounds(tp.getBounds))
|
||||
def typeParameter[T <: GenericDeclaration](tp: TypeVariable[T]): api.TypeParameter =
|
||||
new api.TypeParameter(typeVariable(tp), emptyAnnotationArray, emptyTypeParameterArray, api.Variance.Invariant, NothingRef, upperBounds(tp.getBounds))
|
||||
|
||||
// needs to be stable across compilations
|
||||
def typeVariable[T <: GenericDeclaration](tv: TypeVariable[T]): String =
|
||||
name(tv.getGenericDeclaration) + " " + tv.getName
|
||||
// needs to be stable across compilations
|
||||
def typeVariable[T <: GenericDeclaration](tv: TypeVariable[T]): String =
|
||||
name(tv.getGenericDeclaration) + " " + tv.getName
|
||||
|
||||
def reduceHash(in: Array[Byte]): Int =
|
||||
(0 /: in)( (acc, b) => (acc * 43) ^ b)
|
||||
def reduceHash(in: Array[Byte]): Int =
|
||||
(0 /: in)((acc, b) => (acc * 43) ^ b)
|
||||
|
||||
def name(gd: GenericDeclaration): String =
|
||||
gd match
|
||||
{
|
||||
case c: Class[_] => c.getName
|
||||
case m: Method => m.getName
|
||||
case c: Constructor[_] => c.getName
|
||||
}
|
||||
def name(gd: GenericDeclaration): String =
|
||||
gd match {
|
||||
case c: Class[_] => c.getName
|
||||
case m: Method => m.getName
|
||||
case c: Constructor[_] => c.getName
|
||||
}
|
||||
|
||||
def modifiers(i: Int): api.Modifiers =
|
||||
{
|
||||
import Modifier.{isAbstract, isFinal}
|
||||
new api.Modifiers( isAbstract(i), false, isFinal(i), false, false, false, false)
|
||||
}
|
||||
def access(i: Int, pkg: Option[String]): api.Access =
|
||||
{
|
||||
import Modifier.{isPublic, isPrivate, isProtected}
|
||||
if(isPublic(i)) Public else if(isPrivate(i)) Private else if(isProtected(i)) Protected else packagePrivate(pkg)
|
||||
}
|
||||
def modifiers(i: Int): api.Modifiers =
|
||||
{
|
||||
import Modifier.{ isAbstract, isFinal }
|
||||
new api.Modifiers(isAbstract(i), false, isFinal(i), false, false, false, false)
|
||||
}
|
||||
def access(i: Int, pkg: Option[String]): api.Access =
|
||||
{
|
||||
import Modifier.{ isPublic, isPrivate, isProtected }
|
||||
if (isPublic(i)) Public else if (isPrivate(i)) Private else if (isProtected(i)) Protected else packagePrivate(pkg)
|
||||
}
|
||||
|
||||
def annotations(a: Array[Annotation]): Array[api.Annotation] = if (a.length == 0) emptyAnnotationArray else arrayMap(a)(annotation)
|
||||
def annotation(a: Annotation): api.Annotation =
|
||||
new api.Annotation( reference(a.annotationType), Array(javaAnnotation(a.toString)))
|
||||
def annotations(a: Array[Annotation]): Array[api.Annotation] = if (a.length == 0) emptyAnnotationArray else arrayMap(a)(annotation)
|
||||
def annotation(a: Annotation): api.Annotation =
|
||||
new api.Annotation(reference(a.annotationType), Array(javaAnnotation(a.toString)))
|
||||
|
||||
// full information not available from reflection
|
||||
def javaAnnotation(s: String): api.AnnotationArgument =
|
||||
new api.AnnotationArgument("toString", s)
|
||||
// full information not available from reflection
|
||||
def javaAnnotation(s: String): api.AnnotationArgument =
|
||||
new api.AnnotationArgument("toString", s)
|
||||
|
||||
def array(tpe: api.Type): api.SimpleType = new api.Parameterized(ArrayRef, Array(tpe))
|
||||
def reference(c: Class[_]): api.SimpleType =
|
||||
if(c.isArray) array(reference(c.getComponentType)) else if(c.isPrimitive) primitive(c.getName) else reference(c.getName)
|
||||
def array(tpe: api.Type): api.SimpleType = new api.Parameterized(ArrayRef, Array(tpe))
|
||||
def reference(c: Class[_]): api.SimpleType =
|
||||
if (c.isArray) array(reference(c.getComponentType)) else if (c.isPrimitive) primitive(c.getName) else reference(c.getName)
|
||||
|
||||
// does not handle primitives
|
||||
def reference(s: String): api.SimpleType =
|
||||
{
|
||||
val (pkg, cls) = packageAndName(s)
|
||||
pkg match
|
||||
{
|
||||
// translate all primitives?
|
||||
case None => new api.Projection(Empty, cls)
|
||||
case Some(p) =>
|
||||
new api.Projection(new api.Singleton(pathFromString(p)), cls)
|
||||
}
|
||||
}
|
||||
def referenceP(t: ParameterizedType): api.Parameterized =
|
||||
{
|
||||
val targs = t.getActualTypeArguments
|
||||
val args = if (targs.length == 0) emptyTypeArray else arrayMap(targs)(t => reference(t): api.Type)
|
||||
val base = reference(t.getRawType)
|
||||
new api.Parameterized(base, args.toArray[api.Type])
|
||||
}
|
||||
def reference(t: Type): api.SimpleType =
|
||||
t match
|
||||
{
|
||||
case w: WildcardType => reference("_")
|
||||
case tv: TypeVariable[_] => new api.ParameterRef(typeVariable(tv))
|
||||
case pt: ParameterizedType => referenceP(pt)
|
||||
case gat: GenericArrayType => array(reference(gat.getGenericComponentType))
|
||||
case c: Class[_] => reference(c)
|
||||
}
|
||||
// does not handle primitives
|
||||
def reference(s: String): api.SimpleType =
|
||||
{
|
||||
val (pkg, cls) = packageAndName(s)
|
||||
pkg match {
|
||||
// translate all primitives?
|
||||
case None => new api.Projection(Empty, cls)
|
||||
case Some(p) =>
|
||||
new api.Projection(new api.Singleton(pathFromString(p)), cls)
|
||||
}
|
||||
}
|
||||
def referenceP(t: ParameterizedType): api.Parameterized =
|
||||
{
|
||||
val targs = t.getActualTypeArguments
|
||||
val args = if (targs.length == 0) emptyTypeArray else arrayMap(targs)(t => reference(t): api.Type)
|
||||
val base = reference(t.getRawType)
|
||||
new api.Parameterized(base, args.toArray[api.Type])
|
||||
}
|
||||
def reference(t: Type): api.SimpleType =
|
||||
t match {
|
||||
case w: WildcardType => reference("_")
|
||||
case tv: TypeVariable[_] => new api.ParameterRef(typeVariable(tv))
|
||||
case pt: ParameterizedType => referenceP(pt)
|
||||
case gat: GenericArrayType => array(reference(gat.getGenericComponentType))
|
||||
case c: Class[_] => reference(c)
|
||||
}
|
||||
|
||||
def pathFromString(s: String): api.Path =
|
||||
new api.Path(s.split("\\.").map(new api.Id(_)) :+ ThisRef )
|
||||
def packageName(c: Class[_]) = packageAndName(c)._1
|
||||
def packageAndName(c: Class[_]): (Option[String], String) =
|
||||
packageAndName(c.getName)
|
||||
def packageAndName(name: String): (Option[String], String) =
|
||||
{
|
||||
val lastDot = name.lastIndexOf('.')
|
||||
if(lastDot >= 0)
|
||||
(Some(name.substring(0, lastDot)), name.substring(lastDot+1))
|
||||
else
|
||||
(None, name)
|
||||
}
|
||||
def pathFromString(s: String): api.Path =
|
||||
new api.Path(s.split("\\.").map(new api.Id(_)) :+ ThisRef)
|
||||
def packageName(c: Class[_]) = packageAndName(c)._1
|
||||
def packageAndName(c: Class[_]): (Option[String], String) =
|
||||
packageAndName(c.getName)
|
||||
def packageAndName(name: String): (Option[String], String) =
|
||||
{
|
||||
val lastDot = name.lastIndexOf('.')
|
||||
if (lastDot >= 0)
|
||||
(Some(name.substring(0, lastDot)), name.substring(lastDot + 1))
|
||||
else
|
||||
(None, name)
|
||||
}
|
||||
|
||||
val Empty = new api.EmptyType
|
||||
val ThisRef = new api.This
|
||||
val Empty = new api.EmptyType
|
||||
val ThisRef = new api.This
|
||||
|
||||
val Public = new api.Public
|
||||
val Unqualified = new api.Unqualified
|
||||
val Private = new api.Private(Unqualified)
|
||||
val Protected = new api.Protected(Unqualified)
|
||||
def packagePrivate(pkg: Option[String]): api.Access = new api.Private(new api.IdQualifier(pkg getOrElse ""))
|
||||
val Public = new api.Public
|
||||
val Unqualified = new api.Unqualified
|
||||
val Private = new api.Private(Unqualified)
|
||||
val Protected = new api.Protected(Unqualified)
|
||||
def packagePrivate(pkg: Option[String]): api.Access = new api.Private(new api.IdQualifier(pkg getOrElse ""))
|
||||
|
||||
val ArrayRef = reference("scala.Array")
|
||||
val Throws = reference("scala.throws")
|
||||
val NothingRef = reference("scala.Nothing")
|
||||
val ArrayRef = reference("scala.Array")
|
||||
val Throws = reference("scala.throws")
|
||||
val NothingRef = reference("scala.Nothing")
|
||||
|
||||
private[this] def PrimitiveNames = Seq("boolean", "byte", "char", "short", "int", "long", "float", "double")
|
||||
private[this] def PrimitiveMap = PrimitiveNames.map( j => (j, j.capitalize)) :+ ("void" -> "Unit")
|
||||
private[this] val PrimitiveRefs = PrimitiveMap.map { case (n, sn) => (n, reference("scala." + sn)) }.toMap
|
||||
def primitive(name: String): api.SimpleType = PrimitiveRefs(name)
|
||||
private[this] def PrimitiveNames = Seq("boolean", "byte", "char", "short", "int", "long", "float", "double")
|
||||
private[this] def PrimitiveMap = PrimitiveNames.map(j => (j, j.capitalize)) :+ ("void" -> "Unit")
|
||||
private[this] val PrimitiveRefs = PrimitiveMap.map { case (n, sn) => (n, reference("scala." + sn)) }.toMap
|
||||
def primitive(name: String): api.SimpleType = PrimitiveRefs(name)
|
||||
|
||||
// Workarounds for https://github.com/sbt/sbt/issues/1035
|
||||
// these catch the GenericSignatureFormatError and return the erased type
|
||||
// Workarounds for https://github.com/sbt/sbt/issues/1035
|
||||
// these catch the GenericSignatureFormatError and return the erased type
|
||||
|
||||
private[this] def returnType(f: Field): Type = try f.getGenericType catch {
|
||||
case _: GenericSignatureFormatError => f.getType
|
||||
}
|
||||
private[this] def parameterTypes(c: Constructor[_]): Array[Type] = try c.getGenericParameterTypes catch {
|
||||
case _: GenericSignatureFormatError => convert(c.getParameterTypes)
|
||||
}
|
||||
private[this] def exceptionTypes(c: Constructor[_]): Array[Type] = try c.getGenericExceptionTypes catch {
|
||||
case _: GenericSignatureFormatError => convert(c.getExceptionTypes)
|
||||
}
|
||||
private[this] def parameterTypes(m: Method): Array[Type] = try m.getGenericParameterTypes catch {
|
||||
case _: GenericSignatureFormatError => convert(m.getParameterTypes)
|
||||
}
|
||||
private[this] def returnType(m: Method): Type = try m.getGenericReturnType catch {
|
||||
case _: GenericSignatureFormatError => m.getReturnType
|
||||
}
|
||||
private[this] def exceptionTypes(m: Method): Array[Type] = try m.getGenericExceptionTypes catch {
|
||||
case _: GenericSignatureFormatError => convert(m.getExceptionTypes)
|
||||
}
|
||||
private[this] def returnType(f: Field): Type = try f.getGenericType catch {
|
||||
case _: GenericSignatureFormatError => f.getType
|
||||
}
|
||||
private[this] def parameterTypes(c: Constructor[_]): Array[Type] = try c.getGenericParameterTypes catch {
|
||||
case _: GenericSignatureFormatError => convert(c.getParameterTypes)
|
||||
}
|
||||
private[this] def exceptionTypes(c: Constructor[_]): Array[Type] = try c.getGenericExceptionTypes catch {
|
||||
case _: GenericSignatureFormatError => convert(c.getExceptionTypes)
|
||||
}
|
||||
private[this] def parameterTypes(m: Method): Array[Type] = try m.getGenericParameterTypes catch {
|
||||
case _: GenericSignatureFormatError => convert(m.getParameterTypes)
|
||||
}
|
||||
private[this] def returnType(m: Method): Type = try m.getGenericReturnType catch {
|
||||
case _: GenericSignatureFormatError => m.getReturnType
|
||||
}
|
||||
private[this] def exceptionTypes(m: Method): Array[Type] = try m.getGenericExceptionTypes catch {
|
||||
case _: GenericSignatureFormatError => convert(m.getExceptionTypes)
|
||||
}
|
||||
|
||||
private[this] def typeParameterTypes[T](m: Constructor[T]): Array[TypeVariable[Constructor[T]]] = try m.getTypeParameters catch {
|
||||
case _: GenericSignatureFormatError => new Array(0)
|
||||
}
|
||||
private[this] def typeParameterTypes[T](m: Class[T]): Array[TypeVariable[Class[T]]] = try m.getTypeParameters catch {
|
||||
case _: GenericSignatureFormatError => new Array(0)
|
||||
}
|
||||
private[this] def typeParameterTypes(m: Method): Array[TypeVariable[Method]] = try m.getTypeParameters catch {
|
||||
case _: GenericSignatureFormatError => new Array(0)
|
||||
}
|
||||
private[this] def superclassType(c: Class[_]): Type = try c.getGenericSuperclass catch{
|
||||
case _: GenericSignatureFormatError => c.getSuperclass
|
||||
}
|
||||
private[this] def interfaces(c: Class[_]): Array[Type] = try c.getGenericInterfaces catch{
|
||||
case _: GenericSignatureFormatError => convert(c.getInterfaces)
|
||||
}
|
||||
private[this] def typeParameterTypes[T](m: Constructor[T]): Array[TypeVariable[Constructor[T]]] = try m.getTypeParameters catch {
|
||||
case _: GenericSignatureFormatError => new Array(0)
|
||||
}
|
||||
private[this] def typeParameterTypes[T](m: Class[T]): Array[TypeVariable[Class[T]]] = try m.getTypeParameters catch {
|
||||
case _: GenericSignatureFormatError => new Array(0)
|
||||
}
|
||||
private[this] def typeParameterTypes(m: Method): Array[TypeVariable[Method]] = try m.getTypeParameters catch {
|
||||
case _: GenericSignatureFormatError => new Array(0)
|
||||
}
|
||||
private[this] def superclassType(c: Class[_]): Type = try c.getGenericSuperclass catch {
|
||||
case _: GenericSignatureFormatError => c.getSuperclass
|
||||
}
|
||||
private[this] def interfaces(c: Class[_]): Array[Type] = try c.getGenericInterfaces catch {
|
||||
case _: GenericSignatureFormatError => convert(c.getInterfaces)
|
||||
}
|
||||
|
||||
private[this] def convert(classes: Array[Class[_]]): Array[Type] =
|
||||
classes.asInstanceOf[Array[Type]] // ok: treat Arrays as read-only
|
||||
private[this] def convert(classes: Array[Class[_]]): Array[Type] =
|
||||
classes.asInstanceOf[Array[Type]] // ok: treat Arrays as read-only
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,71 +1,66 @@
|
|||
package xsbt.api
|
||||
|
||||
import xsbti.SafeLazy
|
||||
import xsbti.api._
|
||||
import scala.collection.mutable.HashSet
|
||||
import xsbti.SafeLazy
|
||||
import xsbti.api._
|
||||
import scala.collection.mutable.HashSet
|
||||
|
||||
object APIUtil
|
||||
{
|
||||
val modifiersToByte = (m: Modifiers) => {
|
||||
import m._
|
||||
def x(b: Boolean, bit: Int) = if(b) 1 << bit else 0
|
||||
( x(isAbstract, 0) | x(isOverride, 1) | x(isFinal, 2) | x(isSealed, 3) | x(isImplicit, 4) | x(isLazy, 5) | x(isMacro, 6) ).toByte
|
||||
}
|
||||
val byteToModifiers = (b: Byte) => {
|
||||
def x(bit: Int) = (b & (1 << bit)) != 0
|
||||
new Modifiers( x(0), x(1), x(2), x(3), x(4), x(5), x(6) )
|
||||
}
|
||||
object APIUtil {
|
||||
val modifiersToByte = (m: Modifiers) => {
|
||||
import m._
|
||||
def x(b: Boolean, bit: Int) = if (b) 1 << bit else 0
|
||||
(x(isAbstract, 0) | x(isOverride, 1) | x(isFinal, 2) | x(isSealed, 3) | x(isImplicit, 4) | x(isLazy, 5) | x(isMacro, 6)).toByte
|
||||
}
|
||||
val byteToModifiers = (b: Byte) => {
|
||||
def x(bit: Int) = (b & (1 << bit)) != 0
|
||||
new Modifiers(x(0), x(1), x(2), x(3), x(4), x(5), x(6))
|
||||
}
|
||||
|
||||
def isScalaSourceName(name: String): Boolean = name.endsWith(".scala")
|
||||
def isScalaSourceName(name: String): Boolean = name.endsWith(".scala")
|
||||
|
||||
def hasMacro(s: SourceAPI): Boolean =
|
||||
{
|
||||
val check = new HasMacro
|
||||
check.visitAPI(s)
|
||||
check.hasMacro
|
||||
}
|
||||
def hasMacro(s: SourceAPI): Boolean =
|
||||
{
|
||||
val check = new HasMacro
|
||||
check.visitAPI(s)
|
||||
check.hasMacro
|
||||
}
|
||||
|
||||
private[this] class HasMacro extends Visit
|
||||
{
|
||||
var hasMacro = false
|
||||
private[this] class HasMacro extends Visit {
|
||||
var hasMacro = false
|
||||
|
||||
// Don't visit inherited definitions since we consider that a class
|
||||
// that inherits a macro does not have a macro.
|
||||
override def visitStructure0(structure: Structure)
|
||||
{
|
||||
visitTypes(structure.parents)
|
||||
visitDefinitions(structure.declared)
|
||||
}
|
||||
// Don't visit inherited definitions since we consider that a class
|
||||
// that inherits a macro does not have a macro.
|
||||
override def visitStructure0(structure: Structure) {
|
||||
visitTypes(structure.parents)
|
||||
visitDefinitions(structure.declared)
|
||||
}
|
||||
|
||||
override def visitModifiers(m: Modifiers)
|
||||
{
|
||||
hasMacro ||= m.isMacro
|
||||
super.visitModifiers(m)
|
||||
}
|
||||
}
|
||||
override def visitModifiers(m: Modifiers) {
|
||||
hasMacro ||= m.isMacro
|
||||
super.visitModifiers(m)
|
||||
}
|
||||
}
|
||||
|
||||
def minimize(api: SourceAPI): SourceAPI =
|
||||
new SourceAPI(api.packages, minimizeDefinitions(api.definitions))
|
||||
def minimizeDefinitions(ds: Array[Definition]): Array[Definition] =
|
||||
ds flatMap minimizeDefinition
|
||||
def minimizeDefinition(d: Definition): Array[Definition] =
|
||||
d match
|
||||
{
|
||||
case c: ClassLike => Array(minimizeClass(c))
|
||||
case _ => Array()
|
||||
}
|
||||
def minimizeClass(c: ClassLike): ClassLike =
|
||||
{
|
||||
val savedAnnotations = Discovery.defAnnotations(c.structure, (_: Any) => true).toArray[String]
|
||||
val struct = minimizeStructure(c.structure, c.definitionType == DefinitionType.Module)
|
||||
new ClassLike(c.definitionType, lzy(emptyType), lzy(struct), savedAnnotations, c.typeParameters, c.name, c.access, c.modifiers, c.annotations)
|
||||
}
|
||||
def minimize(api: SourceAPI): SourceAPI =
|
||||
new SourceAPI(api.packages, minimizeDefinitions(api.definitions))
|
||||
def minimizeDefinitions(ds: Array[Definition]): Array[Definition] =
|
||||
ds flatMap minimizeDefinition
|
||||
def minimizeDefinition(d: Definition): Array[Definition] =
|
||||
d match {
|
||||
case c: ClassLike => Array(minimizeClass(c))
|
||||
case _ => Array()
|
||||
}
|
||||
def minimizeClass(c: ClassLike): ClassLike =
|
||||
{
|
||||
val savedAnnotations = Discovery.defAnnotations(c.structure, (_: Any) => true).toArray[String]
|
||||
val struct = minimizeStructure(c.structure, c.definitionType == DefinitionType.Module)
|
||||
new ClassLike(c.definitionType, lzy(emptyType), lzy(struct), savedAnnotations, c.typeParameters, c.name, c.access, c.modifiers, c.annotations)
|
||||
}
|
||||
|
||||
def minimizeStructure(s: Structure, isModule: Boolean): Structure =
|
||||
new Structure(lzy(s.parents), filterDefinitions(s.declared, isModule), filterDefinitions(s.inherited, isModule))
|
||||
def filterDefinitions(ds: Array[Definition], isModule: Boolean): Lazy[Array[Definition]] =
|
||||
lzy(if(isModule) ds filter Discovery.isMainMethod else Array())
|
||||
private[this] def lzy[T <: AnyRef](t: T): Lazy[T] = SafeLazy.strict(t)
|
||||
def minimizeStructure(s: Structure, isModule: Boolean): Structure =
|
||||
new Structure(lzy(s.parents), filterDefinitions(s.declared, isModule), filterDefinitions(s.inherited, isModule))
|
||||
def filterDefinitions(ds: Array[Definition], isModule: Boolean): Lazy[Array[Definition]] =
|
||||
lzy(if (isModule) ds filter Discovery.isMainMethod else Array())
|
||||
private[this] def lzy[T <: AnyRef](t: T): Lazy[T] = SafeLazy.strict(t)
|
||||
|
||||
private[this] val emptyType = new EmptyType
|
||||
private[this] val emptyType = new EmptyType
|
||||
}
|
||||
|
|
@ -3,11 +3,9 @@
|
|||
*/
|
||||
package xsbt.api
|
||||
|
||||
final case class Discovered(baseClasses: Set[String], annotations: Set[String], hasMain: Boolean, isModule: Boolean)
|
||||
{
|
||||
def isEmpty = baseClasses.isEmpty && annotations.isEmpty
|
||||
final case class Discovered(baseClasses: Set[String], annotations: Set[String], hasMain: Boolean, isModule: Boolean) {
|
||||
def isEmpty = baseClasses.isEmpty && annotations.isEmpty
|
||||
}
|
||||
object Discovered
|
||||
{
|
||||
def empty = new Discovered(Set.empty, Set.empty, false, false)
|
||||
object Discovered {
|
||||
def empty = new Discovered(Set.empty, Set.empty, false, false)
|
||||
}
|
||||
|
|
@ -3,105 +3,101 @@
|
|||
*/
|
||||
package xsbt.api
|
||||
|
||||
import xsbti.api.{Path => APath, _}
|
||||
import xsbti.api.{ Path => APath, _ }
|
||||
|
||||
import Discovery._
|
||||
|
||||
class Discovery(baseClasses: Set[String], annotations: Set[String])
|
||||
{
|
||||
def apply(s: Seq[Definition]): Seq[(Definition, Discovered)] =
|
||||
s.map { d => (d, apply(d)) }
|
||||
def apply(d: Definition): Discovered =
|
||||
d match
|
||||
{
|
||||
case c: ClassLike if isConcrete(c.modifiers) =>
|
||||
if(isPublic(c))
|
||||
discover(c)
|
||||
else if(isModule(c) && hasMainMethod(c)) // jvm does not require a main class to be public
|
||||
new Discovered(Set.empty, Set.empty, true, true)
|
||||
else
|
||||
Discovered.empty
|
||||
case _ => Discovered.empty
|
||||
}
|
||||
def discover(c: ClassLike): Discovered =
|
||||
{
|
||||
val onClass = Discovery.findAnnotations(c.annotations, annotations)
|
||||
val onDefs = Discovery.defAnnotations(c.structure, annotations) ++ c.savedAnnotations.filter(annotations)
|
||||
val module = isModule(c)
|
||||
new Discovered( bases(c.name, c.structure.parents), onClass ++ onDefs, module && hasMainMethod(c), module )
|
||||
}
|
||||
class Discovery(baseClasses: Set[String], annotations: Set[String]) {
|
||||
def apply(s: Seq[Definition]): Seq[(Definition, Discovered)] =
|
||||
s.map { d => (d, apply(d)) }
|
||||
def apply(d: Definition): Discovered =
|
||||
d match {
|
||||
case c: ClassLike if isConcrete(c.modifiers) =>
|
||||
if (isPublic(c))
|
||||
discover(c)
|
||||
else if (isModule(c) && hasMainMethod(c)) // jvm does not require a main class to be public
|
||||
new Discovered(Set.empty, Set.empty, true, true)
|
||||
else
|
||||
Discovered.empty
|
||||
case _ => Discovered.empty
|
||||
}
|
||||
def discover(c: ClassLike): Discovered =
|
||||
{
|
||||
val onClass = Discovery.findAnnotations(c.annotations, annotations)
|
||||
val onDefs = Discovery.defAnnotations(c.structure, annotations) ++ c.savedAnnotations.filter(annotations)
|
||||
val module = isModule(c)
|
||||
new Discovered(bases(c.name, c.structure.parents), onClass ++ onDefs, module && hasMainMethod(c), module)
|
||||
}
|
||||
|
||||
def bases(own: String, c: Seq[Type]): Set[String] =
|
||||
(own +: c.flatMap(simpleName)).filter(baseClasses).toSet
|
||||
def bases(own: String, c: Seq[Type]): Set[String] =
|
||||
(own +: c.flatMap(simpleName)).filter(baseClasses).toSet
|
||||
|
||||
}
|
||||
object Discovery
|
||||
{
|
||||
def apply(subclasses: Set[String], annotations: Set[String])(definitions: Seq[Definition]): Seq[(Definition, Discovered)] =
|
||||
{
|
||||
val d = new Discovery(subclasses, annotations)
|
||||
d(definitions)
|
||||
}
|
||||
def applications(definitions: Seq[Definition]): Seq[(Definition, Discovered)] =
|
||||
apply(Set.empty, Set.empty)( definitions )
|
||||
object Discovery {
|
||||
def apply(subclasses: Set[String], annotations: Set[String])(definitions: Seq[Definition]): Seq[(Definition, Discovered)] =
|
||||
{
|
||||
val d = new Discovery(subclasses, annotations)
|
||||
d(definitions)
|
||||
}
|
||||
def applications(definitions: Seq[Definition]): Seq[(Definition, Discovered)] =
|
||||
apply(Set.empty, Set.empty)(definitions)
|
||||
|
||||
def findAnnotations(as: Seq[Annotation], pred: String => Boolean): Set[String] =
|
||||
as.flatMap { a => simpleName(a.base).filter(pred) }.toSet
|
||||
def defAnnotations(s: Structure, pred: String => Boolean): Set[String] =
|
||||
defAnnotations(s.declared, pred) ++ defAnnotations(s.inherited, pred)
|
||||
def defAnnotations(defs: Seq[Definition], pred: String => Boolean): Set[String] =
|
||||
findAnnotations( defs.flatMap { case d: Def if isPublic(d) => d.annotations.toSeq; case _ => Nil }, pred )
|
||||
def findAnnotations(as: Seq[Annotation], pred: String => Boolean): Set[String] =
|
||||
as.flatMap { a => simpleName(a.base).filter(pred) }.toSet
|
||||
def defAnnotations(s: Structure, pred: String => Boolean): Set[String] =
|
||||
defAnnotations(s.declared, pred) ++ defAnnotations(s.inherited, pred)
|
||||
def defAnnotations(defs: Seq[Definition], pred: String => Boolean): Set[String] =
|
||||
findAnnotations(defs.flatMap { case d: Def if isPublic(d) => d.annotations.toSeq; case _ => Nil }, pred)
|
||||
|
||||
def isConcrete(a: Definition): Boolean = isConcrete(a.modifiers)
|
||||
def isConcrete(m: Modifiers) = !m.isAbstract
|
||||
def isPublic(a: Definition): Boolean = isPublic(a.access)
|
||||
def isPublic(a: Access): Boolean = a.isInstanceOf[Public]
|
||||
def isModule(c: ClassLike) = c.definitionType == DefinitionType.Module
|
||||
def isConcrete(a: Definition): Boolean = isConcrete(a.modifiers)
|
||||
def isConcrete(m: Modifiers) = !m.isAbstract
|
||||
def isPublic(a: Definition): Boolean = isPublic(a.access)
|
||||
def isPublic(a: Access): Boolean = a.isInstanceOf[Public]
|
||||
def isModule(c: ClassLike) = c.definitionType == DefinitionType.Module
|
||||
|
||||
def hasMainMethod(c: ClassLike): Boolean =
|
||||
hasMainMethod(c.structure.declared) || hasMainMethod(c.structure.inherited)
|
||||
def hasMainMethod(defs: Seq[Definition]): Boolean =
|
||||
defs.exists(isMainMethod)
|
||||
def isMainMethod(d: Definition): Boolean =
|
||||
d match {
|
||||
case d: Def => d.name == "main" && isPublic(d) && isConcrete(d) && isUnit(d.returnType) && isStringArray(d.valueParameters)
|
||||
case _ => false
|
||||
}
|
||||
def isStringArray(vp: IndexedSeq[ParameterList]): Boolean = vp.length == 1 && isStringArray(vp(0).parameters)
|
||||
def isStringArray(params: Seq[MethodParameter]): Boolean = params.length == 1 && isStringArray(params(0))
|
||||
def isStringArray(p: MethodParameter): Boolean = (p.modifier == ParameterModifier.Plain || p.modifier == ParameterModifier.Repeated) && isStringArray(p.tpe)
|
||||
def isStringArray(t: Type): Boolean = isParameterized(t, "scala.Array", "java.lang.String") // doesn't handle scala.this#Predef#String, should API phase dealias?
|
||||
def hasMainMethod(c: ClassLike): Boolean =
|
||||
hasMainMethod(c.structure.declared) || hasMainMethod(c.structure.inherited)
|
||||
def hasMainMethod(defs: Seq[Definition]): Boolean =
|
||||
defs.exists(isMainMethod)
|
||||
def isMainMethod(d: Definition): Boolean =
|
||||
d match {
|
||||
case d: Def => d.name == "main" && isPublic(d) && isConcrete(d) && isUnit(d.returnType) && isStringArray(d.valueParameters)
|
||||
case _ => false
|
||||
}
|
||||
def isStringArray(vp: IndexedSeq[ParameterList]): Boolean = vp.length == 1 && isStringArray(vp(0).parameters)
|
||||
def isStringArray(params: Seq[MethodParameter]): Boolean = params.length == 1 && isStringArray(params(0))
|
||||
def isStringArray(p: MethodParameter): Boolean = (p.modifier == ParameterModifier.Plain || p.modifier == ParameterModifier.Repeated) && isStringArray(p.tpe)
|
||||
def isStringArray(t: Type): Boolean = isParameterized(t, "scala.Array", "java.lang.String") // doesn't handle scala.this#Predef#String, should API phase dealias?
|
||||
|
||||
def isParameterized(t: Type, base: String, args: String*): Boolean = t match {
|
||||
case p: Parameterized =>
|
||||
named(p.baseType, base) && p.typeArguments.length == args.length && p.typeArguments.flatMap(simpleName).sameElements(args)
|
||||
case _ => false
|
||||
}
|
||||
def named(t: Type, nme: String) = simpleName(t) == Some(nme)
|
||||
def isParameterized(t: Type, base: String, args: String*): Boolean = t match {
|
||||
case p: Parameterized =>
|
||||
named(p.baseType, base) && p.typeArguments.length == args.length && p.typeArguments.flatMap(simpleName).sameElements(args)
|
||||
case _ => false
|
||||
}
|
||||
def named(t: Type, nme: String) = simpleName(t) == Some(nme)
|
||||
|
||||
def simpleName(t: Type): Option[String] = t match {
|
||||
case a: Annotated => simpleName(a.baseType)
|
||||
case sing: Singleton => None
|
||||
case p: Projection =>
|
||||
p.prefix match {
|
||||
case s: Singleton => pathName(s.path, p.id)
|
||||
case e: EmptyType => Some( p.id )
|
||||
case _ => None
|
||||
}
|
||||
case _ => None
|
||||
}
|
||||
def simpleName(t: Type): Option[String] = t match {
|
||||
case a: Annotated => simpleName(a.baseType)
|
||||
case sing: Singleton => None
|
||||
case p: Projection =>
|
||||
p.prefix match {
|
||||
case s: Singleton => pathName(s.path, p.id)
|
||||
case e: EmptyType => Some(p.id)
|
||||
case _ => None
|
||||
}
|
||||
case _ => None
|
||||
}
|
||||
|
||||
def pathName(p: APath, id: String): Option[String] =
|
||||
{
|
||||
val cs = p.components
|
||||
cs.last match
|
||||
{
|
||||
case _: This =>
|
||||
val ids = cs.init.collect { case i: Id => i.id }
|
||||
if(ids.length == cs.length - 1) Some( (ids ++ Seq(id)).mkString(".") ) else None
|
||||
case _ => None
|
||||
}
|
||||
}
|
||||
def pathName(p: APath, id: String): Option[String] =
|
||||
{
|
||||
val cs = p.components
|
||||
cs.last match {
|
||||
case _: This =>
|
||||
val ids = cs.init.collect { case i: Id => i.id }
|
||||
if (ids.length == cs.length - 1) Some((ids ++ Seq(id)).mkString(".")) else None
|
||||
case _ => None
|
||||
}
|
||||
}
|
||||
|
||||
def isUnit(t: Type): Boolean = named(t, "scala.Unit")
|
||||
def isUnit(t: Type): Boolean = named(t, "scala.Unit")
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,23 +8,22 @@ import xsbti.api._
|
|||
import util.MurmurHash
|
||||
import HashAPI.Hash
|
||||
|
||||
object HashAPI
|
||||
{
|
||||
type Hash = Int
|
||||
def apply(a: SourceAPI): Hash =
|
||||
(new HashAPI(false, true, true)).hashAPI(a)
|
||||
object HashAPI {
|
||||
type Hash = Int
|
||||
def apply(a: SourceAPI): Hash =
|
||||
(new HashAPI(false, true, true)).hashAPI(a)
|
||||
|
||||
def apply(x: Def): Hash = {
|
||||
val hashApi = new HashAPI(false, true, true)
|
||||
hashApi.hashDefinition(x)
|
||||
hashApi.finalizeHash
|
||||
}
|
||||
def apply(x: Def): Hash = {
|
||||
val hashApi = new HashAPI(false, true, true)
|
||||
hashApi.hashDefinition(x)
|
||||
hashApi.finalizeHash
|
||||
}
|
||||
|
||||
def hashDefinitionsWithExtraHashes(ds: Seq[(Definition, Hash)]): Hash = {
|
||||
val hashAPI = new HashAPI(false, true, false)
|
||||
hashAPI.hashDefinitionsWithExtraHashes(ds)
|
||||
hashAPI.finalizeHash
|
||||
}
|
||||
def hashDefinitionsWithExtraHashes(ds: Seq[(Definition, Hash)]): Hash = {
|
||||
val hashAPI = new HashAPI(false, true, false)
|
||||
hashAPI.hashDefinitionsWithExtraHashes(ds)
|
||||
hashAPI.finalizeHash
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -36,357 +35,328 @@ object HashAPI
|
|||
* be included in a hash sum. Structure can appear as a type (in structural type) and in that case we
|
||||
* always include definitions in a hash sum.
|
||||
*/
|
||||
final class HashAPI(includePrivate: Boolean, includeParamNames: Boolean, includeDefinitions: Boolean)
|
||||
{
|
||||
// this constructor variant is for source and binary backwards compatibility with sbt 0.13.0
|
||||
def this(includePrivate: Boolean, includeParamNames: Boolean) {
|
||||
// in the old logic we used to always include definitions hence
|
||||
// includeDefinitions=true
|
||||
this(includePrivate, includeParamNames, includeDefinitions=true)
|
||||
}
|
||||
final class HashAPI(includePrivate: Boolean, includeParamNames: Boolean, includeDefinitions: Boolean) {
|
||||
// this constructor variant is for source and binary backwards compatibility with sbt 0.13.0
|
||||
def this(includePrivate: Boolean, includeParamNames: Boolean) {
|
||||
// in the old logic we used to always include definitions hence
|
||||
// includeDefinitions=true
|
||||
this(includePrivate, includeParamNames, includeDefinitions = true)
|
||||
}
|
||||
|
||||
import scala.collection.mutable
|
||||
import MurmurHash.{extendHash, nextMagicA, nextMagicB, startHash, startMagicA, startMagicB, stringHash, symmetricHash}
|
||||
import scala.collection.mutable
|
||||
import MurmurHash.{ extendHash, nextMagicA, nextMagicB, startHash, startMagicA, startMagicB, stringHash, symmetricHash }
|
||||
|
||||
private[this] val visitedStructures = visitedMap[Structure]
|
||||
private[this] val visitedClassLike = visitedMap[ClassLike]
|
||||
private[this] def visitedMap[T] = new mutable.HashMap[T, List[Hash]]
|
||||
private[this] def visit[T](map: mutable.Map[T, List[Hash]], t: T)(hashF: T => Unit)
|
||||
{
|
||||
map.put(t, hash :: map.getOrElse(t,Nil)) match {
|
||||
case Some(x :: _) => extend(x)
|
||||
case _ =>
|
||||
hashF(t)
|
||||
for(hs <- map(t))
|
||||
extend(hs)
|
||||
map.put(t, hash :: Nil)
|
||||
}
|
||||
}
|
||||
private[this] val visitedStructures = visitedMap[Structure]
|
||||
private[this] val visitedClassLike = visitedMap[ClassLike]
|
||||
private[this] def visitedMap[T] = new mutable.HashMap[T, List[Hash]]
|
||||
private[this] def visit[T](map: mutable.Map[T, List[Hash]], t: T)(hashF: T => Unit) {
|
||||
map.put(t, hash :: map.getOrElse(t, Nil)) match {
|
||||
case Some(x :: _) => extend(x)
|
||||
case _ =>
|
||||
hashF(t)
|
||||
for (hs <- map(t))
|
||||
extend(hs)
|
||||
map.put(t, hash :: Nil)
|
||||
}
|
||||
}
|
||||
|
||||
private[this] final val ValHash = 1
|
||||
private[this] final val VarHash = 2
|
||||
private[this] final val DefHash = 3
|
||||
private[this] final val ClassHash = 4
|
||||
private[this] final val TypeDeclHash = 5
|
||||
private[this] final val TypeAliasHash = 6
|
||||
private[this] final val ValHash = 1
|
||||
private[this] final val VarHash = 2
|
||||
private[this] final val DefHash = 3
|
||||
private[this] final val ClassHash = 4
|
||||
private[this] final val TypeDeclHash = 5
|
||||
private[this] final val TypeAliasHash = 6
|
||||
|
||||
private[this] final val PublicHash = 30
|
||||
private[this] final val ProtectedHash = 31
|
||||
private[this] final val PrivateHash = 32
|
||||
private[this] final val UnqualifiedHash = 33
|
||||
private[this] final val ThisQualifierHash = 34
|
||||
private[this] final val IdQualifierHash = 35
|
||||
private[this] final val PublicHash = 30
|
||||
private[this] final val ProtectedHash = 31
|
||||
private[this] final val PrivateHash = 32
|
||||
private[this] final val UnqualifiedHash = 33
|
||||
private[this] final val ThisQualifierHash = 34
|
||||
private[this] final val IdQualifierHash = 35
|
||||
|
||||
private[this] final val IdPathHash = 20
|
||||
private[this] final val SuperHash = 21
|
||||
private[this] final val ThisPathHash = 22
|
||||
private[this] final val IdPathHash = 20
|
||||
private[this] final val SuperHash = 21
|
||||
private[this] final val ThisPathHash = 22
|
||||
|
||||
private[this] final val ValueParamsHash = 40
|
||||
private[this] final val ClassPendingHash = 41
|
||||
private[this] final val StructurePendingHash = 42
|
||||
private[this] final val ValueParamsHash = 40
|
||||
private[this] final val ClassPendingHash = 41
|
||||
private[this] final val StructurePendingHash = 42
|
||||
|
||||
private[this] final val EmptyTypeHash = 51
|
||||
private[this] final val ParameterRefHash = 52
|
||||
private[this] final val SingletonHash = 53
|
||||
private[this] final val ProjectionHash = 54
|
||||
private[this] final val ParameterizedHash = 55
|
||||
private[this] final val AnnotatedHash = 56
|
||||
private[this] final val PolymorphicHash = 57
|
||||
private[this] final val ConstantHash = 58
|
||||
private[this] final val ExistentialHash = 59
|
||||
private[this] final val StructureHash = 60
|
||||
private[this] final val EmptyTypeHash = 51
|
||||
private[this] final val ParameterRefHash = 52
|
||||
private[this] final val SingletonHash = 53
|
||||
private[this] final val ProjectionHash = 54
|
||||
private[this] final val ParameterizedHash = 55
|
||||
private[this] final val AnnotatedHash = 56
|
||||
private[this] final val PolymorphicHash = 57
|
||||
private[this] final val ConstantHash = 58
|
||||
private[this] final val ExistentialHash = 59
|
||||
private[this] final val StructureHash = 60
|
||||
|
||||
private[this] final val TrueHash = 97
|
||||
private[this] final val FalseHash = 98
|
||||
private[this] final val TrueHash = 97
|
||||
private[this] final val FalseHash = 98
|
||||
|
||||
private[this] var hash: Hash = startHash(0)
|
||||
private[this] var magicA: Hash = startMagicA
|
||||
private[this] var magicB: Hash = startMagicB
|
||||
|
||||
private[this] var hash: Hash = startHash(0)
|
||||
private[this] var magicA: Hash = startMagicA
|
||||
private[this] var magicB: Hash = startMagicB
|
||||
@inline final def hashString(s: String): Unit = extend(stringHash(s))
|
||||
@inline final def hashBoolean(b: Boolean): Unit = extend(if (b) TrueHash else FalseHash)
|
||||
@inline final def hashSeq[T](s: Seq[T], hashF: T => Unit) {
|
||||
extend(s.length)
|
||||
s foreach hashF
|
||||
}
|
||||
final def hashSymmetric[T](ts: TraversableOnce[T], hashF: T => Unit) {
|
||||
val current = hash
|
||||
val mA = magicA
|
||||
val mB = magicB
|
||||
val (hashes, mAs, mBs) = ts.toList.map { t =>
|
||||
hash = startHash(1)
|
||||
magicA = startMagicA
|
||||
magicB = startMagicB
|
||||
hashF(t)
|
||||
(finalizeHash, magicA, magicB)
|
||||
} unzip3;
|
||||
hash = current
|
||||
magicA = mA
|
||||
magicB = mB
|
||||
extend(symmetricHash(hashes, 0xb592f7ae)) // constant from MurmurHash3
|
||||
}
|
||||
|
||||
@inline final def hashString(s: String): Unit = extend(stringHash(s))
|
||||
@inline final def hashBoolean(b: Boolean): Unit = extend(if(b) TrueHash else FalseHash)
|
||||
@inline final def hashSeq[T](s: Seq[T], hashF: T => Unit)
|
||||
{
|
||||
extend(s.length)
|
||||
s foreach hashF
|
||||
}
|
||||
final def hashSymmetric[T](ts: TraversableOnce[T], hashF: T => Unit)
|
||||
{
|
||||
val current = hash
|
||||
val mA = magicA
|
||||
val mB = magicB
|
||||
val (hashes, mAs, mBs) = ts.toList.map { t =>
|
||||
hash = startHash(1)
|
||||
magicA = startMagicA
|
||||
magicB = startMagicB
|
||||
hashF(t)
|
||||
(finalizeHash, magicA, magicB)
|
||||
} unzip3;
|
||||
hash = current
|
||||
magicA = mA
|
||||
magicB = mB
|
||||
extend(symmetricHash(hashes, 0xb592f7ae)) // constant from MurmurHash3
|
||||
}
|
||||
@inline final def extend(a: Hash) {
|
||||
hash = extendHash(hash, a, magicA, magicB)
|
||||
magicA = nextMagicA(magicA)
|
||||
magicB = nextMagicB(magicB)
|
||||
}
|
||||
|
||||
@inline final def extend(a: Hash)
|
||||
{
|
||||
hash = extendHash(hash, a, magicA, magicB)
|
||||
magicA = nextMagicA(magicA)
|
||||
magicB = nextMagicB(magicB)
|
||||
}
|
||||
def finalizeHash: Hash = MurmurHash.finalizeHash(hash)
|
||||
|
||||
def finalizeHash: Hash = MurmurHash.finalizeHash(hash)
|
||||
def hashModifiers(m: Modifiers) = extend(m.raw)
|
||||
|
||||
def hashModifiers(m: Modifiers) = extend(m.raw)
|
||||
def hashAPI(s: SourceAPI): Hash =
|
||||
{
|
||||
hash = startHash(0)
|
||||
hashSymmetric(s.packages, hashPackage)
|
||||
hashDefinitions(s.definitions, true)
|
||||
finalizeHash
|
||||
}
|
||||
|
||||
def hashAPI(s: SourceAPI): Hash =
|
||||
{
|
||||
hash = startHash(0)
|
||||
hashSymmetric(s.packages, hashPackage)
|
||||
hashDefinitions(s.definitions, true)
|
||||
finalizeHash
|
||||
}
|
||||
def hashPackage(p: Package) = hashString(p.name)
|
||||
|
||||
def hashPackage(p: Package) = hashString(p.name)
|
||||
def hashDefinitions(ds: Seq[Definition], topLevel: Boolean): Unit =
|
||||
{
|
||||
val defs = SameAPI.filterDefinitions(ds, topLevel, includePrivate)
|
||||
hashSymmetric(defs, hashDefinition)
|
||||
}
|
||||
|
||||
def hashDefinitions(ds: Seq[Definition], topLevel: Boolean): Unit =
|
||||
{
|
||||
val defs = SameAPI.filterDefinitions(ds, topLevel, includePrivate)
|
||||
hashSymmetric(defs, hashDefinition)
|
||||
}
|
||||
/**
|
||||
* Hashes a sequence of definitions by combining each definition's own
|
||||
* hash with extra one supplied as first element of a pair.
|
||||
*
|
||||
* It's useful when one wants to influence hash of a definition by some
|
||||
* external (to definition) factor (e.g. location of definition).
|
||||
*
|
||||
* NOTE: This method doesn't perform any filtering of passed definitions.
|
||||
*/
|
||||
def hashDefinitionsWithExtraHashes(ds: Seq[(Definition, Hash)]): Unit =
|
||||
{
|
||||
def hashDefinitionCombined(d: Definition, extraHash: Hash): Unit = {
|
||||
hashDefinition(d)
|
||||
extend(extraHash)
|
||||
}
|
||||
hashSymmetric(ds, (hashDefinitionCombined _).tupled)
|
||||
}
|
||||
def hashDefinition(d: Definition) {
|
||||
hashString(d.name)
|
||||
hashAnnotations(d.annotations)
|
||||
hashModifiers(d.modifiers)
|
||||
hashAccess(d.access)
|
||||
d match {
|
||||
case c: ClassLike => hashClass(c)
|
||||
case f: FieldLike => hashField(f)
|
||||
case d: Def => hashDef(d)
|
||||
case t: TypeDeclaration => hashTypeDeclaration(t)
|
||||
case t: TypeAlias => hashTypeAlias(t)
|
||||
}
|
||||
}
|
||||
final def hashClass(c: ClassLike): Unit = visit(visitedClassLike, c)(hashClass0)
|
||||
def hashClass0(c: ClassLike) {
|
||||
extend(ClassHash)
|
||||
hashParameterizedDefinition(c)
|
||||
hashType(c.selfType)
|
||||
hashStructure(c.structure, includeDefinitions)
|
||||
}
|
||||
def hashField(f: FieldLike) {
|
||||
f match {
|
||||
case v: Var => extend(VarHash)
|
||||
case v: Val => extend(ValHash)
|
||||
}
|
||||
hashType(f.tpe)
|
||||
}
|
||||
def hashDef(d: Def) {
|
||||
extend(DefHash)
|
||||
hashParameterizedDefinition(d)
|
||||
hashValueParameters(d.valueParameters)
|
||||
hashType(d.returnType)
|
||||
}
|
||||
def hashAccess(a: Access): Unit =
|
||||
a match {
|
||||
case pub: Public => extend(PublicHash)
|
||||
case qual: Qualified => hashQualified(qual)
|
||||
}
|
||||
def hashQualified(qual: Qualified): Unit =
|
||||
{
|
||||
qual match {
|
||||
case p: Protected => extend(ProtectedHash)
|
||||
case p: Private => extend(PrivateHash)
|
||||
}
|
||||
hashQualifier(qual.qualifier)
|
||||
}
|
||||
def hashQualifier(qual: Qualifier): Unit =
|
||||
qual match {
|
||||
case _: Unqualified => extend(UnqualifiedHash)
|
||||
case _: ThisQualifier => extend(ThisQualifierHash)
|
||||
case id: IdQualifier =>
|
||||
extend(IdQualifierHash)
|
||||
hashString(id.value)
|
||||
}
|
||||
|
||||
/**
|
||||
* Hashes a sequence of definitions by combining each definition's own
|
||||
* hash with extra one supplied as first element of a pair.
|
||||
*
|
||||
* It's useful when one wants to influence hash of a definition by some
|
||||
* external (to definition) factor (e.g. location of definition).
|
||||
*
|
||||
* NOTE: This method doesn't perform any filtering of passed definitions.
|
||||
*/
|
||||
def hashDefinitionsWithExtraHashes(ds: Seq[(Definition, Hash)]): Unit =
|
||||
{
|
||||
def hashDefinitionCombined(d: Definition, extraHash: Hash): Unit = {
|
||||
hashDefinition(d)
|
||||
extend(extraHash)
|
||||
}
|
||||
hashSymmetric(ds, (hashDefinitionCombined _).tupled)
|
||||
}
|
||||
def hashDefinition(d: Definition)
|
||||
{
|
||||
hashString(d.name)
|
||||
hashAnnotations(d.annotations)
|
||||
hashModifiers(d.modifiers)
|
||||
hashAccess(d.access)
|
||||
d match
|
||||
{
|
||||
case c: ClassLike => hashClass(c)
|
||||
case f: FieldLike => hashField(f)
|
||||
case d: Def => hashDef(d)
|
||||
case t: TypeDeclaration => hashTypeDeclaration(t)
|
||||
case t: TypeAlias => hashTypeAlias(t)
|
||||
}
|
||||
}
|
||||
final def hashClass(c: ClassLike): Unit = visit(visitedClassLike, c)(hashClass0)
|
||||
def hashClass0(c: ClassLike)
|
||||
{
|
||||
extend(ClassHash)
|
||||
hashParameterizedDefinition(c)
|
||||
hashType(c.selfType)
|
||||
hashStructure(c.structure, includeDefinitions)
|
||||
}
|
||||
def hashField(f: FieldLike)
|
||||
{
|
||||
f match
|
||||
{
|
||||
case v: Var => extend(VarHash)
|
||||
case v: Val => extend(ValHash)
|
||||
}
|
||||
hashType(f.tpe)
|
||||
}
|
||||
def hashDef(d: Def)
|
||||
{
|
||||
extend(DefHash)
|
||||
hashParameterizedDefinition(d)
|
||||
hashValueParameters(d.valueParameters)
|
||||
hashType(d.returnType)
|
||||
}
|
||||
def hashAccess(a: Access): Unit =
|
||||
a match
|
||||
{
|
||||
case pub: Public => extend(PublicHash)
|
||||
case qual: Qualified => hashQualified(qual)
|
||||
}
|
||||
def hashQualified(qual: Qualified): Unit =
|
||||
{
|
||||
qual match
|
||||
{
|
||||
case p: Protected => extend(ProtectedHash)
|
||||
case p: Private => extend(PrivateHash)
|
||||
}
|
||||
hashQualifier(qual.qualifier)
|
||||
}
|
||||
def hashQualifier(qual: Qualifier): Unit =
|
||||
qual match
|
||||
{
|
||||
case _: Unqualified => extend(UnqualifiedHash)
|
||||
case _: ThisQualifier => extend(ThisQualifierHash)
|
||||
case id: IdQualifier =>
|
||||
extend(IdQualifierHash)
|
||||
hashString(id.value)
|
||||
}
|
||||
def hashValueParameters(valueParameters: Seq[ParameterList]) = hashSeq(valueParameters, hashValueParameterList)
|
||||
def hashValueParameterList(list: ParameterList) =
|
||||
{
|
||||
extend(ValueParamsHash)
|
||||
hashBoolean(list.isImplicit)
|
||||
hashSeq(list.parameters, hashValueParameter)
|
||||
}
|
||||
def hashValueParameter(parameter: MethodParameter) =
|
||||
{
|
||||
hashString(parameter.name)
|
||||
hashType(parameter.tpe)
|
||||
extend(parameter.modifier.ordinal)
|
||||
hashBoolean(parameter.hasDefault)
|
||||
}
|
||||
|
||||
def hashValueParameters(valueParameters: Seq[ParameterList]) = hashSeq(valueParameters, hashValueParameterList)
|
||||
def hashValueParameterList(list: ParameterList) =
|
||||
{
|
||||
extend(ValueParamsHash)
|
||||
hashBoolean(list.isImplicit)
|
||||
hashSeq(list.parameters, hashValueParameter)
|
||||
}
|
||||
def hashValueParameter(parameter: MethodParameter) =
|
||||
{
|
||||
hashString(parameter.name)
|
||||
hashType(parameter.tpe)
|
||||
extend(parameter.modifier.ordinal)
|
||||
hashBoolean(parameter.hasDefault)
|
||||
}
|
||||
def hashParameterizedDefinition[T <: ParameterizedDefinition](d: T) {
|
||||
hashTypeParameters(d.typeParameters)
|
||||
}
|
||||
def hashTypeDeclaration(d: TypeDeclaration) {
|
||||
extend(TypeDeclHash)
|
||||
hashParameterizedDefinition(d)
|
||||
hashType(d.lowerBound)
|
||||
hashType(d.upperBound)
|
||||
}
|
||||
def hashTypeAlias(d: TypeAlias) {
|
||||
extend(TypeAliasHash)
|
||||
hashParameterizedDefinition(d)
|
||||
hashType(d.tpe)
|
||||
}
|
||||
|
||||
def hashParameterizedDefinition[T <: ParameterizedDefinition](d: T)
|
||||
{
|
||||
hashTypeParameters(d.typeParameters)
|
||||
}
|
||||
def hashTypeDeclaration(d: TypeDeclaration)
|
||||
{
|
||||
extend(TypeDeclHash)
|
||||
hashParameterizedDefinition(d)
|
||||
hashType(d.lowerBound)
|
||||
hashType(d.upperBound)
|
||||
}
|
||||
def hashTypeAlias(d: TypeAlias)
|
||||
{
|
||||
extend(TypeAliasHash)
|
||||
hashParameterizedDefinition(d)
|
||||
hashType(d.tpe)
|
||||
}
|
||||
def hashTypeParameters(parameters: Seq[TypeParameter]) = hashSeq(parameters, hashTypeParameter)
|
||||
def hashTypeParameter(parameter: TypeParameter) {
|
||||
hashString(parameter.id)
|
||||
extend(parameter.variance.ordinal)
|
||||
hashTypeParameters(parameter.typeParameters)
|
||||
hashType(parameter.lowerBound)
|
||||
hashType(parameter.upperBound)
|
||||
hashAnnotations(parameter.annotations)
|
||||
}
|
||||
def hashAnnotations(annotations: Seq[Annotation]) = hashSeq(annotations, hashAnnotation)
|
||||
def hashAnnotation(annotation: Annotation) =
|
||||
{
|
||||
hashType(annotation.base)
|
||||
hashAnnotationArguments(annotation.arguments)
|
||||
}
|
||||
def hashAnnotationArguments(args: Seq[AnnotationArgument]) = hashSeq(args, hashAnnotationArgument)
|
||||
def hashAnnotationArgument(arg: AnnotationArgument) {
|
||||
hashString(arg.name)
|
||||
hashString(arg.value)
|
||||
}
|
||||
|
||||
def hashTypeParameters(parameters: Seq[TypeParameter]) = hashSeq(parameters, hashTypeParameter)
|
||||
def hashTypeParameter(parameter: TypeParameter)
|
||||
{
|
||||
hashString(parameter.id)
|
||||
extend(parameter.variance.ordinal)
|
||||
hashTypeParameters(parameter.typeParameters)
|
||||
hashType(parameter.lowerBound)
|
||||
hashType(parameter.upperBound)
|
||||
hashAnnotations(parameter.annotations)
|
||||
}
|
||||
def hashAnnotations(annotations: Seq[Annotation]) = hashSeq(annotations, hashAnnotation)
|
||||
def hashAnnotation(annotation: Annotation) =
|
||||
{
|
||||
hashType(annotation.base)
|
||||
hashAnnotationArguments(annotation.arguments)
|
||||
}
|
||||
def hashAnnotationArguments(args: Seq[AnnotationArgument]) = hashSeq(args, hashAnnotationArgument)
|
||||
def hashAnnotationArgument(arg: AnnotationArgument)
|
||||
{
|
||||
hashString(arg.name)
|
||||
hashString(arg.value)
|
||||
}
|
||||
def hashTypes(ts: Seq[Type], includeDefinitions: Boolean = true) =
|
||||
hashSeq(ts, (t: Type) => hashType(t, includeDefinitions))
|
||||
def hashType(t: Type, includeDefinitions: Boolean = true): Unit =
|
||||
t match {
|
||||
case s: Structure => hashStructure(s, includeDefinitions)
|
||||
case e: Existential => hashExistential(e)
|
||||
case c: Constant => hashConstant(c)
|
||||
case p: Polymorphic => hashPolymorphic(p)
|
||||
case a: Annotated => hashAnnotated(a)
|
||||
case p: Parameterized => hashParameterized(p)
|
||||
case p: Projection => hashProjection(p)
|
||||
case _: EmptyType => extend(EmptyTypeHash)
|
||||
case s: Singleton => hashSingleton(s)
|
||||
case pr: ParameterRef => hashParameterRef(pr)
|
||||
}
|
||||
|
||||
def hashTypes(ts: Seq[Type], includeDefinitions: Boolean = true) =
|
||||
hashSeq(ts, (t: Type) => hashType(t, includeDefinitions))
|
||||
def hashType(t: Type, includeDefinitions: Boolean = true): Unit =
|
||||
t match
|
||||
{
|
||||
case s: Structure => hashStructure(s, includeDefinitions)
|
||||
case e: Existential => hashExistential(e)
|
||||
case c: Constant => hashConstant(c)
|
||||
case p: Polymorphic => hashPolymorphic(p)
|
||||
case a: Annotated => hashAnnotated(a)
|
||||
case p: Parameterized => hashParameterized(p)
|
||||
case p: Projection => hashProjection(p)
|
||||
case _: EmptyType => extend(EmptyTypeHash)
|
||||
case s: Singleton => hashSingleton(s)
|
||||
case pr: ParameterRef => hashParameterRef(pr)
|
||||
}
|
||||
def hashParameterRef(p: ParameterRef) {
|
||||
extend(ParameterRefHash)
|
||||
hashString(p.id)
|
||||
}
|
||||
def hashSingleton(s: Singleton) {
|
||||
extend(SingletonHash)
|
||||
hashPath(s.path)
|
||||
}
|
||||
def hashPath(path: Path) = hashSeq(path.components, hashPathComponent)
|
||||
def hashPathComponent(pc: PathComponent) = pc match {
|
||||
case _: This => extend(ThisPathHash)
|
||||
case s: Super => hashSuperPath(s)
|
||||
case id: Id => hashIdPath(id)
|
||||
}
|
||||
def hashSuperPath(s: Super) {
|
||||
extend(SuperHash)
|
||||
hashPath(s.qualifier)
|
||||
}
|
||||
def hashIdPath(id: Id) {
|
||||
extend(IdPathHash)
|
||||
hashString(id.id)
|
||||
}
|
||||
|
||||
def hashParameterRef(p: ParameterRef)
|
||||
{
|
||||
extend(ParameterRefHash)
|
||||
hashString(p.id)
|
||||
}
|
||||
def hashSingleton(s: Singleton)
|
||||
{
|
||||
extend(SingletonHash)
|
||||
hashPath(s.path)
|
||||
}
|
||||
def hashPath(path: Path) = hashSeq(path.components, hashPathComponent)
|
||||
def hashPathComponent(pc: PathComponent) = pc match
|
||||
{
|
||||
case _: This => extend(ThisPathHash)
|
||||
case s: Super => hashSuperPath(s)
|
||||
case id: Id => hashIdPath(id)
|
||||
}
|
||||
def hashSuperPath(s: Super)
|
||||
{
|
||||
extend(SuperHash)
|
||||
hashPath(s.qualifier)
|
||||
}
|
||||
def hashIdPath(id: Id)
|
||||
{
|
||||
extend(IdPathHash)
|
||||
hashString(id.id)
|
||||
}
|
||||
|
||||
def hashConstant(c: Constant) =
|
||||
{
|
||||
extend(ConstantHash)
|
||||
hashString(c.value)
|
||||
hashType(c.baseType)
|
||||
}
|
||||
def hashExistential(e: Existential) =
|
||||
{
|
||||
extend(ExistentialHash)
|
||||
hashParameters(e.clause, e.baseType)
|
||||
}
|
||||
def hashPolymorphic(p: Polymorphic) =
|
||||
{
|
||||
extend(PolymorphicHash)
|
||||
hashParameters(p.parameters, p.baseType)
|
||||
}
|
||||
def hashProjection(p: Projection) =
|
||||
{
|
||||
extend(ProjectionHash)
|
||||
hashString(p.id)
|
||||
hashType(p.prefix)
|
||||
}
|
||||
def hashParameterized(p: Parameterized)
|
||||
{
|
||||
extend(ParameterizedHash)
|
||||
hashType(p.baseType)
|
||||
hashTypes(p.typeArguments)
|
||||
}
|
||||
def hashAnnotated(a: Annotated)
|
||||
{
|
||||
extend(AnnotatedHash)
|
||||
hashType(a.baseType)
|
||||
hashAnnotations(a.annotations)
|
||||
}
|
||||
final def hashStructure(structure: Structure, includeDefinitions: Boolean) =
|
||||
visit(visitedStructures, structure)(structure => hashStructure0(structure, includeDefinitions))
|
||||
def hashStructure0(structure: Structure, includeDefinitions: Boolean)
|
||||
{
|
||||
extend(StructureHash)
|
||||
hashTypes(structure.parents, includeDefinitions)
|
||||
if (includeDefinitions) {
|
||||
hashDefinitions(structure.declared, false)
|
||||
hashDefinitions(structure.inherited, false)
|
||||
}
|
||||
}
|
||||
def hashParameters(parameters: Seq[TypeParameter], base: Type): Unit =
|
||||
{
|
||||
hashTypeParameters(parameters)
|
||||
hashType(base)
|
||||
}
|
||||
def hashConstant(c: Constant) =
|
||||
{
|
||||
extend(ConstantHash)
|
||||
hashString(c.value)
|
||||
hashType(c.baseType)
|
||||
}
|
||||
def hashExistential(e: Existential) =
|
||||
{
|
||||
extend(ExistentialHash)
|
||||
hashParameters(e.clause, e.baseType)
|
||||
}
|
||||
def hashPolymorphic(p: Polymorphic) =
|
||||
{
|
||||
extend(PolymorphicHash)
|
||||
hashParameters(p.parameters, p.baseType)
|
||||
}
|
||||
def hashProjection(p: Projection) =
|
||||
{
|
||||
extend(ProjectionHash)
|
||||
hashString(p.id)
|
||||
hashType(p.prefix)
|
||||
}
|
||||
def hashParameterized(p: Parameterized) {
|
||||
extend(ParameterizedHash)
|
||||
hashType(p.baseType)
|
||||
hashTypes(p.typeArguments)
|
||||
}
|
||||
def hashAnnotated(a: Annotated) {
|
||||
extend(AnnotatedHash)
|
||||
hashType(a.baseType)
|
||||
hashAnnotations(a.annotations)
|
||||
}
|
||||
final def hashStructure(structure: Structure, includeDefinitions: Boolean) =
|
||||
visit(visitedStructures, structure)(structure => hashStructure0(structure, includeDefinitions))
|
||||
def hashStructure0(structure: Structure, includeDefinitions: Boolean) {
|
||||
extend(StructureHash)
|
||||
hashTypes(structure.parents, includeDefinitions)
|
||||
if (includeDefinitions) {
|
||||
hashDefinitions(structure.declared, false)
|
||||
hashDefinitions(structure.inherited, false)
|
||||
}
|
||||
}
|
||||
def hashParameters(parameters: Seq[TypeParameter], base: Type): Unit =
|
||||
{
|
||||
hashTypeParameters(parameters)
|
||||
hashType(base)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -18,129 +18,130 @@ import xsbti.api.DefinitionType.Trait
|
|||
*/
|
||||
class NameHashing {
|
||||
|
||||
import NameHashing._
|
||||
import NameHashing._
|
||||
|
||||
/**
|
||||
* This method takes an API representation and extracts a flat collection of all
|
||||
* definitions contained in that API representation. Then it groups definition
|
||||
* by a simple name. Lastly, it computes a hash sum of all definitions in a single
|
||||
* group.
|
||||
*
|
||||
* NOTE: The hashing sum used for hashing a group of definition is insensitive
|
||||
* to order of definitions.
|
||||
*/
|
||||
def nameHashes(source: SourceAPI): _internalOnly_NameHashes = {
|
||||
val apiPublicDefs = publicDefs(source)
|
||||
val (regularDefs, implicitDefs) = apiPublicDefs.partition(locDef => !locDef.definition.modifiers.isImplicit)
|
||||
val regularNameHashes = nameHashesForLocatedDefinitions(regularDefs)
|
||||
val implicitNameHashes = nameHashesForLocatedDefinitions(implicitDefs)
|
||||
new _internalOnly_NameHashes(regularNameHashes.toArray, implicitNameHashes.toArray)
|
||||
}
|
||||
/**
|
||||
* This method takes an API representation and extracts a flat collection of all
|
||||
* definitions contained in that API representation. Then it groups definition
|
||||
* by a simple name. Lastly, it computes a hash sum of all definitions in a single
|
||||
* group.
|
||||
*
|
||||
* NOTE: The hashing sum used for hashing a group of definition is insensitive
|
||||
* to order of definitions.
|
||||
*/
|
||||
def nameHashes(source: SourceAPI): _internalOnly_NameHashes = {
|
||||
val apiPublicDefs = publicDefs(source)
|
||||
val (regularDefs, implicitDefs) = apiPublicDefs.partition(locDef => !locDef.definition.modifiers.isImplicit)
|
||||
val regularNameHashes = nameHashesForLocatedDefinitions(regularDefs)
|
||||
val implicitNameHashes = nameHashesForLocatedDefinitions(implicitDefs)
|
||||
new _internalOnly_NameHashes(regularNameHashes.toArray, implicitNameHashes.toArray)
|
||||
}
|
||||
|
||||
private def nameHashesForLocatedDefinitions(locatedDefs: Iterable[LocatedDefinition]): Iterable[_internalOnly_NameHash] = {
|
||||
val groupedBySimpleName = locatedDefs.groupBy(locatedDef => localName(locatedDef.definition.name))
|
||||
val hashes = groupedBySimpleName.mapValues(hashLocatedDefinitions)
|
||||
hashes.toIterable.map({ case (name: String, hash: Int) => new _internalOnly_NameHash(name, hash) })
|
||||
}
|
||||
private def nameHashesForLocatedDefinitions(locatedDefs: Iterable[LocatedDefinition]): Iterable[_internalOnly_NameHash] = {
|
||||
val groupedBySimpleName = locatedDefs.groupBy(locatedDef => localName(locatedDef.definition.name))
|
||||
val hashes = groupedBySimpleName.mapValues(hashLocatedDefinitions)
|
||||
hashes.toIterable.map({ case (name: String, hash: Int) => new _internalOnly_NameHash(name, hash) })
|
||||
}
|
||||
|
||||
private def hashLocatedDefinitions(locatedDefs: Iterable[LocatedDefinition]): Int = {
|
||||
val defsWithExtraHashes = locatedDefs.toSeq.map(ld => ld.definition -> ld.location.hashCode)
|
||||
xsbt.api.HashAPI.hashDefinitionsWithExtraHashes(defsWithExtraHashes)
|
||||
}
|
||||
private def hashLocatedDefinitions(locatedDefs: Iterable[LocatedDefinition]): Int = {
|
||||
val defsWithExtraHashes = locatedDefs.toSeq.map(ld => ld.definition -> ld.location.hashCode)
|
||||
xsbt.api.HashAPI.hashDefinitionsWithExtraHashes(defsWithExtraHashes)
|
||||
}
|
||||
|
||||
/**
|
||||
* A visitor that visits given API object and extracts all nested public
|
||||
* definitions it finds. The extracted definitions have Location attached
|
||||
* to them which identifies API object's location.
|
||||
*
|
||||
* The returned location is basically a path to a definition that contains
|
||||
* the located definition. For example, if we have:
|
||||
*
|
||||
* object Foo {
|
||||
* class Bar { def abc: Int }
|
||||
* }
|
||||
*
|
||||
* then location of `abc` is Seq((TermName, Foo), (TypeName, Bar))
|
||||
*/
|
||||
private class ExtractPublicDefinitions extends Visit {
|
||||
val locatedDefs = scala.collection.mutable.Buffer[LocatedDefinition]()
|
||||
private var currentLocation: Location = Location()
|
||||
override def visitAPI(s: SourceAPI): Unit = {
|
||||
s.packages foreach visitPackage
|
||||
s.definitions foreach { case topLevelDef: ClassLike =>
|
||||
val packageName = {
|
||||
val fullName = topLevelDef.name()
|
||||
val lastDotIndex = fullName.lastIndexOf('.')
|
||||
if (lastDotIndex <= 0) "" else fullName.substring(0, lastDotIndex-1)
|
||||
}
|
||||
currentLocation = packageAsLocation(packageName)
|
||||
visitDefinition(topLevelDef)
|
||||
}
|
||||
}
|
||||
override def visitDefinition(d: Definition): Unit = {
|
||||
val locatedDef = LocatedDefinition(currentLocation, d)
|
||||
locatedDefs += locatedDef
|
||||
d match {
|
||||
case cl: xsbti.api.ClassLike =>
|
||||
val savedLocation = currentLocation
|
||||
currentLocation = classLikeAsLocation(currentLocation, cl)
|
||||
super.visitDefinition(d)
|
||||
currentLocation = savedLocation
|
||||
case _ =>
|
||||
super.visitDefinition(d)
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* A visitor that visits given API object and extracts all nested public
|
||||
* definitions it finds. The extracted definitions have Location attached
|
||||
* to them which identifies API object's location.
|
||||
*
|
||||
* The returned location is basically a path to a definition that contains
|
||||
* the located definition. For example, if we have:
|
||||
*
|
||||
* object Foo {
|
||||
* class Bar { def abc: Int }
|
||||
* }
|
||||
*
|
||||
* then location of `abc` is Seq((TermName, Foo), (TypeName, Bar))
|
||||
*/
|
||||
private class ExtractPublicDefinitions extends Visit {
|
||||
val locatedDefs = scala.collection.mutable.Buffer[LocatedDefinition]()
|
||||
private var currentLocation: Location = Location()
|
||||
override def visitAPI(s: SourceAPI): Unit = {
|
||||
s.packages foreach visitPackage
|
||||
s.definitions foreach {
|
||||
case topLevelDef: ClassLike =>
|
||||
val packageName = {
|
||||
val fullName = topLevelDef.name()
|
||||
val lastDotIndex = fullName.lastIndexOf('.')
|
||||
if (lastDotIndex <= 0) "" else fullName.substring(0, lastDotIndex - 1)
|
||||
}
|
||||
currentLocation = packageAsLocation(packageName)
|
||||
visitDefinition(topLevelDef)
|
||||
}
|
||||
}
|
||||
override def visitDefinition(d: Definition): Unit = {
|
||||
val locatedDef = LocatedDefinition(currentLocation, d)
|
||||
locatedDefs += locatedDef
|
||||
d match {
|
||||
case cl: xsbti.api.ClassLike =>
|
||||
val savedLocation = currentLocation
|
||||
currentLocation = classLikeAsLocation(currentLocation, cl)
|
||||
super.visitDefinition(d)
|
||||
currentLocation = savedLocation
|
||||
case _ =>
|
||||
super.visitDefinition(d)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private def publicDefs(source: SourceAPI): Iterable[LocatedDefinition] = {
|
||||
val visitor = new ExtractPublicDefinitions
|
||||
visitor.visitAPI(source)
|
||||
visitor.locatedDefs
|
||||
}
|
||||
private def publicDefs(source: SourceAPI): Iterable[LocatedDefinition] = {
|
||||
val visitor = new ExtractPublicDefinitions
|
||||
visitor.visitAPI(source)
|
||||
visitor.locatedDefs
|
||||
}
|
||||
|
||||
private def localName(name: String): String = {
|
||||
// when there's no dot in name `lastIndexOf` returns -1 so we handle
|
||||
// that case properly
|
||||
val index = name.lastIndexOf('.') + 1
|
||||
name.substring(index)
|
||||
}
|
||||
private def localName(name: String): String = {
|
||||
// when there's no dot in name `lastIndexOf` returns -1 so we handle
|
||||
// that case properly
|
||||
val index = name.lastIndexOf('.') + 1
|
||||
name.substring(index)
|
||||
}
|
||||
|
||||
private def packageAsLocation(pkg: String): Location = if (pkg != "") {
|
||||
val selectors = pkg.split('.').map(name => Selector(name, TermName)).toSeq
|
||||
Location(selectors: _*)
|
||||
} else Location.Empty
|
||||
private def packageAsLocation(pkg: String): Location = if (pkg != "") {
|
||||
val selectors = pkg.split('.').map(name => Selector(name, TermName)).toSeq
|
||||
Location(selectors: _*)
|
||||
} else Location.Empty
|
||||
|
||||
private def classLikeAsLocation(prefix: Location, cl: ClassLike): Location = {
|
||||
val selector = {
|
||||
val clNameType = NameType(cl.definitionType)
|
||||
Selector(localName(cl.name), clNameType)
|
||||
}
|
||||
Location((prefix.selectors :+ selector): _*)
|
||||
}
|
||||
private def classLikeAsLocation(prefix: Location, cl: ClassLike): Location = {
|
||||
val selector = {
|
||||
val clNameType = NameType(cl.definitionType)
|
||||
Selector(localName(cl.name), clNameType)
|
||||
}
|
||||
Location((prefix.selectors :+ selector): _*)
|
||||
}
|
||||
}
|
||||
|
||||
object NameHashing {
|
||||
private case class LocatedDefinition(location: Location, definition: Definition)
|
||||
/**
|
||||
* Location is expressed as sequence of annotated names. The annotation denotes
|
||||
* a type of a name, i.e. whether it's a term name or type name.
|
||||
*
|
||||
* Using Scala compiler terminology, location is defined as a sequence of member
|
||||
* selections that uniquely identify a given Symbol.
|
||||
*/
|
||||
private case class Location(selectors: Selector*)
|
||||
private object Location {
|
||||
val Empty = Location(Seq.empty: _*)
|
||||
}
|
||||
private case class Selector(name: String, nameType: NameType)
|
||||
private sealed trait NameType
|
||||
private object NameType {
|
||||
import DefinitionType._
|
||||
def apply(dt: DefinitionType): NameType = dt match {
|
||||
case Trait | ClassDef => TypeName
|
||||
case Module | PackageModule => TermName
|
||||
}
|
||||
}
|
||||
private case object TermName extends NameType
|
||||
private case object TypeName extends NameType
|
||||
private case class LocatedDefinition(location: Location, definition: Definition)
|
||||
/**
|
||||
* Location is expressed as sequence of annotated names. The annotation denotes
|
||||
* a type of a name, i.e. whether it's a term name or type name.
|
||||
*
|
||||
* Using Scala compiler terminology, location is defined as a sequence of member
|
||||
* selections that uniquely identify a given Symbol.
|
||||
*/
|
||||
private case class Location(selectors: Selector*)
|
||||
private object Location {
|
||||
val Empty = Location(Seq.empty: _*)
|
||||
}
|
||||
private case class Selector(name: String, nameType: NameType)
|
||||
private sealed trait NameType
|
||||
private object NameType {
|
||||
import DefinitionType._
|
||||
def apply(dt: DefinitionType): NameType = dt match {
|
||||
case Trait | ClassDef => TypeName
|
||||
case Module | PackageModule => TermName
|
||||
}
|
||||
}
|
||||
private case object TermName extends NameType
|
||||
private case object TypeName extends NameType
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,397 +6,386 @@ package xsbt.api
|
|||
import xsbti.api._
|
||||
|
||||
import Function.tupled
|
||||
import scala.collection.{immutable, mutable}
|
||||
import scala.collection.{ immutable, mutable }
|
||||
|
||||
@deprecated("This class is not used in incremental compiler and will be removed in next major version.", "0.13.2")
|
||||
class NameChanges(val newTypes: Set[String], val removedTypes: Set[String], val newTerms: Set[String], val removedTerms: Set[String])
|
||||
{
|
||||
override def toString =
|
||||
(("New types", newTypes) :: ("Removed types", removedTypes) :: ("New terms", newTerms) :: ("Removed terms", removedTerms) :: Nil).map {
|
||||
case (label,set) => label + ":\n\t" + set.mkString("\n\t")
|
||||
}.mkString("Name changes:\n ", "\n ", "\n")
|
||||
class NameChanges(val newTypes: Set[String], val removedTypes: Set[String], val newTerms: Set[String], val removedTerms: Set[String]) {
|
||||
override def toString =
|
||||
(("New types", newTypes) :: ("Removed types", removedTypes) :: ("New terms", newTerms) :: ("Removed terms", removedTerms) :: Nil).map {
|
||||
case (label, set) => label + ":\n\t" + set.mkString("\n\t")
|
||||
}.mkString("Name changes:\n ", "\n ", "\n")
|
||||
|
||||
}
|
||||
|
||||
object TopLevel
|
||||
{
|
||||
@deprecated("The NameChanges class is deprecated and will be removed in next major version.", "0.13.2")
|
||||
def nameChanges(a: Iterable[Source], b: Iterable[Source]): NameChanges = {
|
||||
val api = (_: Source).api
|
||||
apiNameChanges(a map api, b map api)
|
||||
}
|
||||
/** Identifies removed and new top-level definitions by name. */
|
||||
@deprecated("The NameChanges class is deprecated and will be removed in next major version.", "0.13.2")
|
||||
def apiNameChanges(a: Iterable[SourceAPI], b: Iterable[SourceAPI]): NameChanges =
|
||||
{
|
||||
def changes(s: Set[String], t: Set[String]) = (s -- t, t -- s)
|
||||
object TopLevel {
|
||||
@deprecated("The NameChanges class is deprecated and will be removed in next major version.", "0.13.2")
|
||||
def nameChanges(a: Iterable[Source], b: Iterable[Source]): NameChanges = {
|
||||
val api = (_: Source).api
|
||||
apiNameChanges(a map api, b map api)
|
||||
}
|
||||
/** Identifies removed and new top-level definitions by name. */
|
||||
@deprecated("The NameChanges class is deprecated and will be removed in next major version.", "0.13.2")
|
||||
def apiNameChanges(a: Iterable[SourceAPI], b: Iterable[SourceAPI]): NameChanges =
|
||||
{
|
||||
def changes(s: Set[String], t: Set[String]) = (s -- t, t -- s)
|
||||
|
||||
val (avalues, atypes) = definitions(a)
|
||||
val (bvalues, btypes) = definitions(b)
|
||||
val (avalues, atypes) = definitions(a)
|
||||
val (bvalues, btypes) = definitions(b)
|
||||
|
||||
val (newTypes, removedTypes) = changes(names(atypes), names(btypes))
|
||||
val (newTerms, removedTerms) = changes(names(avalues), names(bvalues))
|
||||
val (newTypes, removedTypes) = changes(names(atypes), names(btypes))
|
||||
val (newTerms, removedTerms) = changes(names(avalues), names(bvalues))
|
||||
|
||||
new NameChanges(newTypes, removedTypes, newTerms, removedTerms)
|
||||
}
|
||||
def definitions(i: Iterable[SourceAPI]) = SameAPI.separateDefinitions(i.toSeq.flatMap( _.definitions ))
|
||||
def names(s: Iterable[Definition]): Set[String] = Set() ++ s.map(_.name)
|
||||
new NameChanges(newTypes, removedTypes, newTerms, removedTerms)
|
||||
}
|
||||
def definitions(i: Iterable[SourceAPI]) = SameAPI.separateDefinitions(i.toSeq.flatMap(_.definitions))
|
||||
def names(s: Iterable[Definition]): Set[String] = Set() ++ s.map(_.name)
|
||||
}
|
||||
|
||||
/** Checks the API of two source files for equality.*/
|
||||
object SameAPI
|
||||
{
|
||||
def apply(a: Source, b: Source): Boolean =
|
||||
a.apiHash == b.apiHash && (a.hash.length > 0 && b.hash.length > 0) && apply(a.api, b.api)
|
||||
object SameAPI {
|
||||
def apply(a: Source, b: Source): Boolean =
|
||||
a.apiHash == b.apiHash && (a.hash.length > 0 && b.hash.length > 0) && apply(a.api, b.api)
|
||||
|
||||
def apply(a: Def, b: Def): Boolean =
|
||||
(new SameAPI(false, true)).sameDefinitions(List(a), List(b), true)
|
||||
def apply(a: Def, b: Def): Boolean =
|
||||
(new SameAPI(false, true)).sameDefinitions(List(a), List(b), true)
|
||||
|
||||
def apply(a: SourceAPI, b: SourceAPI): Boolean =
|
||||
{
|
||||
val start = System.currentTimeMillis
|
||||
def apply(a: SourceAPI, b: SourceAPI): Boolean =
|
||||
{
|
||||
val start = System.currentTimeMillis
|
||||
|
||||
/*println("\n=========== API #1 ================")
|
||||
/*println("\n=========== API #1 ================")
|
||||
import DefaultShowAPI._
|
||||
println(ShowAPI.show(a))
|
||||
println("\n=========== API #2 ================")
|
||||
println(ShowAPI.show(b))*/
|
||||
|
||||
val result = (new SameAPI(false, true)).check(a,b)
|
||||
val end = System.currentTimeMillis
|
||||
//println(" API comparison took: " + (end - start) / 1000.0 + " s")
|
||||
result
|
||||
}
|
||||
val result = (new SameAPI(false, true)).check(a, b)
|
||||
val end = System.currentTimeMillis
|
||||
//println(" API comparison took: " + (end - start) / 1000.0 + " s")
|
||||
result
|
||||
}
|
||||
|
||||
def separateDefinitions(s: Seq[Definition]): (Seq[Definition], Seq[Definition]) =
|
||||
s.partition(isValueDefinition)
|
||||
def isValueDefinition(d: Definition): Boolean =
|
||||
d match
|
||||
{
|
||||
case _: FieldLike | _: Def=> true
|
||||
case c: ClassLike => isValue(c.definitionType)
|
||||
case _ => false
|
||||
}
|
||||
def isValue(d: DefinitionType): Boolean =
|
||||
d == DefinitionType.Module || d == DefinitionType.PackageModule
|
||||
/** Puts the given definitions in a map according to their names.*/
|
||||
def byName(s: Seq[Definition]): Map[String, List[Definition]] =
|
||||
{
|
||||
var map = Map[String, List[Definition]]()
|
||||
for(d <- s; name = d.name)
|
||||
map = map.updated(name, d :: map.getOrElse(name, Nil) )
|
||||
map
|
||||
}
|
||||
def separateDefinitions(s: Seq[Definition]): (Seq[Definition], Seq[Definition]) =
|
||||
s.partition(isValueDefinition)
|
||||
def isValueDefinition(d: Definition): Boolean =
|
||||
d match {
|
||||
case _: FieldLike | _: Def => true
|
||||
case c: ClassLike => isValue(c.definitionType)
|
||||
case _ => false
|
||||
}
|
||||
def isValue(d: DefinitionType): Boolean =
|
||||
d == DefinitionType.Module || d == DefinitionType.PackageModule
|
||||
/** Puts the given definitions in a map according to their names.*/
|
||||
def byName(s: Seq[Definition]): Map[String, List[Definition]] =
|
||||
{
|
||||
var map = Map[String, List[Definition]]()
|
||||
for (d <- s; name = d.name)
|
||||
map = map.updated(name, d :: map.getOrElse(name, Nil))
|
||||
map
|
||||
}
|
||||
|
||||
/** Removes definitions that should not be considered for API equality.
|
||||
* All top-level definitions are always considered: 'private' only means package-private.
|
||||
* Other definitions are considered if they are not qualified with 'private[this]' or 'private'.*/
|
||||
def filterDefinitions(d: Seq[Definition], topLevel: Boolean, includePrivate: Boolean) = if(topLevel || includePrivate) d else d.filter(isNonPrivate)
|
||||
def isNonPrivate(d: Definition): Boolean = isNonPrivate(d.access)
|
||||
/** Returns false if the `access` is `Private` and qualified, true otherwise.*/
|
||||
def isNonPrivate(access: Access): Boolean =
|
||||
access match
|
||||
{
|
||||
case p: Private if !p.qualifier.isInstanceOf[IdQualifier] => false
|
||||
case _ => true
|
||||
}
|
||||
/**
|
||||
* Removes definitions that should not be considered for API equality.
|
||||
* All top-level definitions are always considered: 'private' only means package-private.
|
||||
* Other definitions are considered if they are not qualified with 'private[this]' or 'private'.
|
||||
*/
|
||||
def filterDefinitions(d: Seq[Definition], topLevel: Boolean, includePrivate: Boolean) = if (topLevel || includePrivate) d else d.filter(isNonPrivate)
|
||||
def isNonPrivate(d: Definition): Boolean = isNonPrivate(d.access)
|
||||
/** Returns false if the `access` is `Private` and qualified, true otherwise.*/
|
||||
def isNonPrivate(access: Access): Boolean =
|
||||
access match {
|
||||
case p: Private if !p.qualifier.isInstanceOf[IdQualifier] => false
|
||||
case _ => true
|
||||
}
|
||||
}
|
||||
/** Used to implement API equality.
|
||||
*
|
||||
* If `includePrivate` is true, `private` and `private[this]` members are included in the comparison. Otherwise, those members are excluded.
|
||||
*/
|
||||
class SameAPI(includePrivate: Boolean, includeParamNames: Boolean)
|
||||
{
|
||||
import SameAPI._
|
||||
/**
|
||||
* Used to implement API equality.
|
||||
*
|
||||
* If `includePrivate` is true, `private` and `private[this]` members are included in the comparison. Otherwise, those members are excluded.
|
||||
*/
|
||||
class SameAPI(includePrivate: Boolean, includeParamNames: Boolean) {
|
||||
import SameAPI._
|
||||
|
||||
private val pending = new mutable.HashSet[AnyRef]
|
||||
private[this] val debugEnabled = java.lang.Boolean.getBoolean("xsbt.api.debug")
|
||||
def debug(flag: Boolean, msg: => String): Boolean =
|
||||
{
|
||||
if(debugEnabled && !flag) println(msg)
|
||||
flag
|
||||
}
|
||||
private val pending = new mutable.HashSet[AnyRef]
|
||||
private[this] val debugEnabled = java.lang.Boolean.getBoolean("xsbt.api.debug")
|
||||
def debug(flag: Boolean, msg: => String): Boolean =
|
||||
{
|
||||
if (debugEnabled && !flag) println(msg)
|
||||
flag
|
||||
}
|
||||
|
||||
/** Returns true if source `a` has the same API as source `b`.*/
|
||||
def check(a: SourceAPI, b: SourceAPI): Boolean =
|
||||
{
|
||||
samePackages(a, b) &&
|
||||
debug(sameDefinitions(a, b), "Definitions differed")
|
||||
}
|
||||
/** Returns true if source `a` has the same API as source `b`.*/
|
||||
def check(a: SourceAPI, b: SourceAPI): Boolean =
|
||||
{
|
||||
samePackages(a, b) &&
|
||||
debug(sameDefinitions(a, b), "Definitions differed")
|
||||
}
|
||||
|
||||
def samePackages(a: SourceAPI, b: SourceAPI): Boolean =
|
||||
sameStrings(packages(a), packages(b))
|
||||
def packages(s: SourceAPI): Set[String] =
|
||||
Set() ++ s.packages.map(_.name)
|
||||
def samePackages(a: SourceAPI, b: SourceAPI): Boolean =
|
||||
sameStrings(packages(a), packages(b))
|
||||
def packages(s: SourceAPI): Set[String] =
|
||||
Set() ++ s.packages.map(_.name)
|
||||
|
||||
def sameDefinitions(a: SourceAPI, b: SourceAPI): Boolean =
|
||||
sameDefinitions(a.definitions, b.definitions, true)
|
||||
def sameDefinitions(a: Seq[Definition], b: Seq[Definition], topLevel: Boolean): Boolean =
|
||||
{
|
||||
val (avalues, atypes) = separateDefinitions(filterDefinitions(a, topLevel, includePrivate))
|
||||
val (bvalues, btypes) = separateDefinitions(filterDefinitions(b, topLevel, includePrivate))
|
||||
debug(sameDefinitions(byName(avalues), byName(bvalues)), "Value definitions differed") &&
|
||||
debug(sameDefinitions(byName(atypes), byName(btypes)), "Type definitions differed")
|
||||
}
|
||||
def sameDefinitions(a: scala.collection.Map[String, List[Definition]], b: scala.collection.Map[String, List[Definition]]): Boolean =
|
||||
debug(sameStrings(a.keySet, b.keySet), "\tDefinition strings differed (a: " + (a.keySet -- b.keySet) + ", b: " + (b.keySet -- a.keySet) + ")") &&
|
||||
zippedEntries(a,b).forall(tupled(sameNamedDefinitions))
|
||||
def sameDefinitions(a: SourceAPI, b: SourceAPI): Boolean =
|
||||
sameDefinitions(a.definitions, b.definitions, true)
|
||||
def sameDefinitions(a: Seq[Definition], b: Seq[Definition], topLevel: Boolean): Boolean =
|
||||
{
|
||||
val (avalues, atypes) = separateDefinitions(filterDefinitions(a, topLevel, includePrivate))
|
||||
val (bvalues, btypes) = separateDefinitions(filterDefinitions(b, topLevel, includePrivate))
|
||||
debug(sameDefinitions(byName(avalues), byName(bvalues)), "Value definitions differed") &&
|
||||
debug(sameDefinitions(byName(atypes), byName(btypes)), "Type definitions differed")
|
||||
}
|
||||
def sameDefinitions(a: scala.collection.Map[String, List[Definition]], b: scala.collection.Map[String, List[Definition]]): Boolean =
|
||||
debug(sameStrings(a.keySet, b.keySet), "\tDefinition strings differed (a: " + (a.keySet -- b.keySet) + ", b: " + (b.keySet -- a.keySet) + ")") &&
|
||||
zippedEntries(a, b).forall(tupled(sameNamedDefinitions))
|
||||
|
||||
/** Checks that the definitions in `a` are the same as those in `b`, ignoring order.
|
||||
* Each list is assumed to have already been checked to have the same names (by `sameDefinitions`, for example).*/
|
||||
def sameNamedDefinitions(a: List[Definition], b: List[Definition]): Boolean =
|
||||
{
|
||||
def sameDefs(a: List[Definition], b: List[Definition]): Boolean =
|
||||
{
|
||||
a match
|
||||
{
|
||||
case adef :: atail =>
|
||||
def sameDef(seen: List[Definition], remaining: List[Definition]): Boolean =
|
||||
remaining match
|
||||
{
|
||||
case Nil => debug(false, "Definition different in new API: \n" + adef.name )
|
||||
case bdef :: btail =>
|
||||
val eq = sameDefinitionContent(adef, bdef)
|
||||
if(eq) sameDefs(atail, seen ::: btail) else sameDef(bdef :: seen, btail)
|
||||
}
|
||||
sameDef(Nil, b)
|
||||
case Nil => true
|
||||
}
|
||||
}
|
||||
debug((a.length == b.length), "\t\tLength differed for " + a.headOption.map(_.name).getOrElse("empty")) && sameDefs(a, b)
|
||||
}
|
||||
/**
|
||||
* Checks that the definitions in `a` are the same as those in `b`, ignoring order.
|
||||
* Each list is assumed to have already been checked to have the same names (by `sameDefinitions`, for example).
|
||||
*/
|
||||
def sameNamedDefinitions(a: List[Definition], b: List[Definition]): Boolean =
|
||||
{
|
||||
def sameDefs(a: List[Definition], b: List[Definition]): Boolean =
|
||||
{
|
||||
a match {
|
||||
case adef :: atail =>
|
||||
def sameDef(seen: List[Definition], remaining: List[Definition]): Boolean =
|
||||
remaining match {
|
||||
case Nil => debug(false, "Definition different in new API: \n" + adef.name)
|
||||
case bdef :: btail =>
|
||||
val eq = sameDefinitionContent(adef, bdef)
|
||||
if (eq) sameDefs(atail, seen ::: btail) else sameDef(bdef :: seen, btail)
|
||||
}
|
||||
sameDef(Nil, b)
|
||||
case Nil => true
|
||||
}
|
||||
}
|
||||
debug((a.length == b.length), "\t\tLength differed for " + a.headOption.map(_.name).getOrElse("empty")) && sameDefs(a, b)
|
||||
}
|
||||
|
||||
/** Checks that the two definitions are the same, other than their name.*/
|
||||
def sameDefinitionContent(a: Definition, b: Definition): Boolean =
|
||||
samePending(a,b)(sameDefinitionContentDirect)
|
||||
def sameDefinitionContentDirect(a: Definition, b: Definition): Boolean =
|
||||
{
|
||||
//a.name == b.name &&
|
||||
debug(sameAccess(a.access, b.access), "Access differed") &&
|
||||
debug(sameModifiers(a.modifiers, b.modifiers), "Modifiers differed") &&
|
||||
debug(sameAnnotations(a.annotations, b.annotations), "Annotations differed") &&
|
||||
debug(sameDefinitionSpecificAPI(a, b), "Definition-specific differed")
|
||||
}
|
||||
/** Checks that the two definitions are the same, other than their name.*/
|
||||
def sameDefinitionContent(a: Definition, b: Definition): Boolean =
|
||||
samePending(a, b)(sameDefinitionContentDirect)
|
||||
def sameDefinitionContentDirect(a: Definition, b: Definition): Boolean =
|
||||
{
|
||||
//a.name == b.name &&
|
||||
debug(sameAccess(a.access, b.access), "Access differed") &&
|
||||
debug(sameModifiers(a.modifiers, b.modifiers), "Modifiers differed") &&
|
||||
debug(sameAnnotations(a.annotations, b.annotations), "Annotations differed") &&
|
||||
debug(sameDefinitionSpecificAPI(a, b), "Definition-specific differed")
|
||||
}
|
||||
|
||||
def sameAccess(a: Access, b: Access): Boolean =
|
||||
(a, b) match
|
||||
{
|
||||
case (_: Public, _: Public) => true
|
||||
case (qa: Protected, qb: Protected) => sameQualifier(qa, qb)
|
||||
case (qa: Private, qb: Private) => sameQualifier(qa, qb)
|
||||
case _ => debug(false, "Different access categories")
|
||||
}
|
||||
def sameQualifier(a: Qualified, b: Qualified): Boolean =
|
||||
sameQualifier(a.qualifier, b.qualifier)
|
||||
def sameQualifier(a: Qualifier, b: Qualifier): Boolean =
|
||||
(a, b) match
|
||||
{
|
||||
case (_: Unqualified, _: Unqualified) => true
|
||||
case (_: ThisQualifier, _: ThisQualifier) => true
|
||||
case (ia: IdQualifier, ib: IdQualifier) => debug(ia.value == ib.value, "Different qualifiers")
|
||||
case _ => debug(false, "Different qualifier categories: " + a.getClass.getName + " -- " +b.getClass.getName)
|
||||
}
|
||||
def sameAccess(a: Access, b: Access): Boolean =
|
||||
(a, b) match {
|
||||
case (_: Public, _: Public) => true
|
||||
case (qa: Protected, qb: Protected) => sameQualifier(qa, qb)
|
||||
case (qa: Private, qb: Private) => sameQualifier(qa, qb)
|
||||
case _ => debug(false, "Different access categories")
|
||||
}
|
||||
def sameQualifier(a: Qualified, b: Qualified): Boolean =
|
||||
sameQualifier(a.qualifier, b.qualifier)
|
||||
def sameQualifier(a: Qualifier, b: Qualifier): Boolean =
|
||||
(a, b) match {
|
||||
case (_: Unqualified, _: Unqualified) => true
|
||||
case (_: ThisQualifier, _: ThisQualifier) => true
|
||||
case (ia: IdQualifier, ib: IdQualifier) => debug(ia.value == ib.value, "Different qualifiers")
|
||||
case _ => debug(false, "Different qualifier categories: " + a.getClass.getName + " -- " + b.getClass.getName)
|
||||
}
|
||||
|
||||
def sameModifiers(a: Modifiers, b: Modifiers): Boolean =
|
||||
bitSet(a) == bitSet(b)
|
||||
def sameModifiers(a: Modifiers, b: Modifiers): Boolean =
|
||||
bitSet(a) == bitSet(b)
|
||||
|
||||
def bitSet(m: Modifiers): immutable.BitSet =
|
||||
{
|
||||
import m._
|
||||
val bs = new mutable.BitSet
|
||||
setIf(bs, isAbstract, 0)
|
||||
setIf(bs, isOverride, 1)
|
||||
setIf(bs, isFinal, 2)
|
||||
setIf(bs, isSealed, 3)
|
||||
setIf(bs, isImplicit, 4)
|
||||
setIf(bs, isLazy, 5)
|
||||
setIf(bs, isMacro, 6)
|
||||
bs.toImmutable
|
||||
}
|
||||
def setIf(bs: mutable.BitSet, flag: Boolean, i: Int): Unit =
|
||||
if(flag) bs += i
|
||||
def bitSet(m: Modifiers): immutable.BitSet =
|
||||
{
|
||||
import m._
|
||||
val bs = new mutable.BitSet
|
||||
setIf(bs, isAbstract, 0)
|
||||
setIf(bs, isOverride, 1)
|
||||
setIf(bs, isFinal, 2)
|
||||
setIf(bs, isSealed, 3)
|
||||
setIf(bs, isImplicit, 4)
|
||||
setIf(bs, isLazy, 5)
|
||||
setIf(bs, isMacro, 6)
|
||||
bs.toImmutable
|
||||
}
|
||||
def setIf(bs: mutable.BitSet, flag: Boolean, i: Int): Unit =
|
||||
if (flag) bs += i
|
||||
|
||||
def sameAnnotations(a: Seq[Annotation], b: Seq[Annotation]): Boolean =
|
||||
sameSeq(a, b)(sameAnnotation)
|
||||
def sameAnnotation(a: Annotation, b: Annotation): Boolean =
|
||||
debug(sameType(a.base, b.base), "Annotation base type differed") &&
|
||||
debug(sameAnnotationArguments(a.arguments, b.arguments), "Annotation arguments differed (" + a + ") and (" + b + ")")
|
||||
def sameAnnotationArguments(a: Seq[AnnotationArgument], b: Seq[AnnotationArgument]): Boolean =
|
||||
argumentMap(a) == argumentMap(b)
|
||||
def argumentMap(a: Seq[AnnotationArgument]): Map[String,String] =
|
||||
Map() ++ a.map(arg => (arg.name, arg.value))
|
||||
def sameAnnotations(a: Seq[Annotation], b: Seq[Annotation]): Boolean =
|
||||
sameSeq(a, b)(sameAnnotation)
|
||||
def sameAnnotation(a: Annotation, b: Annotation): Boolean =
|
||||
debug(sameType(a.base, b.base), "Annotation base type differed") &&
|
||||
debug(sameAnnotationArguments(a.arguments, b.arguments), "Annotation arguments differed (" + a + ") and (" + b + ")")
|
||||
def sameAnnotationArguments(a: Seq[AnnotationArgument], b: Seq[AnnotationArgument]): Boolean =
|
||||
argumentMap(a) == argumentMap(b)
|
||||
def argumentMap(a: Seq[AnnotationArgument]): Map[String, String] =
|
||||
Map() ++ a.map(arg => (arg.name, arg.value))
|
||||
|
||||
def sameDefinitionSpecificAPI(a: Definition, b: Definition): Boolean =
|
||||
(a, b) match
|
||||
{
|
||||
case (fa: FieldLike, fb: FieldLike) => sameFieldSpecificAPI(fa, fb)
|
||||
case (pa: ParameterizedDefinition, pb: ParameterizedDefinition) => sameParameterizedDefinition(pa, pb)
|
||||
case _ => false
|
||||
}
|
||||
def sameDefinitionSpecificAPI(a: Definition, b: Definition): Boolean =
|
||||
(a, b) match {
|
||||
case (fa: FieldLike, fb: FieldLike) => sameFieldSpecificAPI(fa, fb)
|
||||
case (pa: ParameterizedDefinition, pb: ParameterizedDefinition) => sameParameterizedDefinition(pa, pb)
|
||||
case _ => false
|
||||
}
|
||||
|
||||
def sameParameterizedDefinition(a: ParameterizedDefinition, b: ParameterizedDefinition): Boolean =
|
||||
debug(sameTypeParameters(a.typeParameters, b.typeParameters), "Different type parameters for " + a.name) &&
|
||||
sameParameterizedSpecificAPI(a, b)
|
||||
def sameParameterizedDefinition(a: ParameterizedDefinition, b: ParameterizedDefinition): Boolean =
|
||||
debug(sameTypeParameters(a.typeParameters, b.typeParameters), "Different type parameters for " + a.name) &&
|
||||
sameParameterizedSpecificAPI(a, b)
|
||||
|
||||
def sameParameterizedSpecificAPI(a: ParameterizedDefinition, b: ParameterizedDefinition): Boolean =
|
||||
(a, b) match
|
||||
{
|
||||
case (da: Def, db: Def) => sameDefSpecificAPI(da, db)
|
||||
case (ca: ClassLike, cb: ClassLike) => sameClassLikeSpecificAPI(ca, cb)
|
||||
case (ta: TypeAlias, tb: TypeAlias) => sameAliasSpecificAPI(ta, tb)
|
||||
case (ta: TypeDeclaration, tb: TypeDeclaration) => sameDeclarationSpecificAPI(ta, tb)
|
||||
case _ => false
|
||||
}
|
||||
def sameParameterizedSpecificAPI(a: ParameterizedDefinition, b: ParameterizedDefinition): Boolean =
|
||||
(a, b) match {
|
||||
case (da: Def, db: Def) => sameDefSpecificAPI(da, db)
|
||||
case (ca: ClassLike, cb: ClassLike) => sameClassLikeSpecificAPI(ca, cb)
|
||||
case (ta: TypeAlias, tb: TypeAlias) => sameAliasSpecificAPI(ta, tb)
|
||||
case (ta: TypeDeclaration, tb: TypeDeclaration) => sameDeclarationSpecificAPI(ta, tb)
|
||||
case _ => false
|
||||
}
|
||||
|
||||
def sameDefSpecificAPI(a: Def, b: Def): Boolean =
|
||||
debug(sameValueParameters(a.valueParameters, b.valueParameters), "Different def value parameters for " + a.name) &&
|
||||
debug(sameType(a.returnType, b.returnType), "Different def return type for " + a.name)
|
||||
def sameAliasSpecificAPI(a: TypeAlias, b: TypeAlias): Boolean =
|
||||
debug(sameType(a.tpe, b.tpe), "Different alias type for " + a.name)
|
||||
def sameDeclarationSpecificAPI(a: TypeDeclaration, b: TypeDeclaration): Boolean =
|
||||
debug(sameType(a.lowerBound, b.lowerBound), "Different lower bound for declaration " + a.name) &&
|
||||
debug(sameType(a.upperBound, b.upperBound), "Different upper bound for declaration " + a.name)
|
||||
def sameFieldSpecificAPI(a: FieldLike, b: FieldLike): Boolean =
|
||||
debug(sameFieldCategory(a, b), "Different field categories (" + a.name + "=" + a.getClass.getName + " -- " +a.name + "=" + a.getClass.getName + ")")&&
|
||||
debug(sameType(a.tpe, b.tpe), "Different field type for " + a.name)
|
||||
def sameDefSpecificAPI(a: Def, b: Def): Boolean =
|
||||
debug(sameValueParameters(a.valueParameters, b.valueParameters), "Different def value parameters for " + a.name) &&
|
||||
debug(sameType(a.returnType, b.returnType), "Different def return type for " + a.name)
|
||||
def sameAliasSpecificAPI(a: TypeAlias, b: TypeAlias): Boolean =
|
||||
debug(sameType(a.tpe, b.tpe), "Different alias type for " + a.name)
|
||||
def sameDeclarationSpecificAPI(a: TypeDeclaration, b: TypeDeclaration): Boolean =
|
||||
debug(sameType(a.lowerBound, b.lowerBound), "Different lower bound for declaration " + a.name) &&
|
||||
debug(sameType(a.upperBound, b.upperBound), "Different upper bound for declaration " + a.name)
|
||||
def sameFieldSpecificAPI(a: FieldLike, b: FieldLike): Boolean =
|
||||
debug(sameFieldCategory(a, b), "Different field categories (" + a.name + "=" + a.getClass.getName + " -- " + a.name + "=" + a.getClass.getName + ")") &&
|
||||
debug(sameType(a.tpe, b.tpe), "Different field type for " + a.name)
|
||||
|
||||
def sameFieldCategory(a: FieldLike, b: FieldLike): Boolean =
|
||||
(a,b) match
|
||||
{
|
||||
case (_: Val, _: Val) => true
|
||||
case (_: Var, _: Var) => true
|
||||
case _=> false
|
||||
}
|
||||
def sameFieldCategory(a: FieldLike, b: FieldLike): Boolean =
|
||||
(a, b) match {
|
||||
case (_: Val, _: Val) => true
|
||||
case (_: Var, _: Var) => true
|
||||
case _ => false
|
||||
}
|
||||
|
||||
def sameClassLikeSpecificAPI(a: ClassLike, b: ClassLike): Boolean =
|
||||
sameDefinitionType(a.definitionType, b.definitionType) &&
|
||||
sameType(a.selfType, b.selfType) &&
|
||||
sameStructure(a.structure, b.structure)
|
||||
def sameClassLikeSpecificAPI(a: ClassLike, b: ClassLike): Boolean =
|
||||
sameDefinitionType(a.definitionType, b.definitionType) &&
|
||||
sameType(a.selfType, b.selfType) &&
|
||||
sameStructure(a.structure, b.structure)
|
||||
|
||||
def sameValueParameters(a: Seq[ParameterList], b: Seq[ParameterList]): Boolean =
|
||||
sameSeq(a, b)(sameParameterList)
|
||||
def sameValueParameters(a: Seq[ParameterList], b: Seq[ParameterList]): Boolean =
|
||||
sameSeq(a, b)(sameParameterList)
|
||||
|
||||
def sameParameterList(a: ParameterList, b: ParameterList): Boolean =
|
||||
(a.isImplicit == b.isImplicit) &&
|
||||
sameParameters(a.parameters, b.parameters)
|
||||
def sameParameters(a: Seq[MethodParameter], b: Seq[MethodParameter]): Boolean =
|
||||
sameSeq(a, b)(sameMethodParameter)
|
||||
def sameMethodParameter(a: MethodParameter, b: MethodParameter): Boolean =
|
||||
(!includeParamNames || a.name == b.name) &&
|
||||
sameType(a.tpe, b.tpe) &&
|
||||
(a.hasDefault == b.hasDefault) &&
|
||||
sameParameterModifier(a.modifier, b.modifier)
|
||||
def sameParameterModifier(a: ParameterModifier, b: ParameterModifier) =
|
||||
a == b
|
||||
def sameDefinitionType(a: DefinitionType, b: DefinitionType): Boolean =
|
||||
a == b
|
||||
def sameVariance(a: Variance, b: Variance): Boolean =
|
||||
a == b
|
||||
def sameParameterList(a: ParameterList, b: ParameterList): Boolean =
|
||||
(a.isImplicit == b.isImplicit) &&
|
||||
sameParameters(a.parameters, b.parameters)
|
||||
def sameParameters(a: Seq[MethodParameter], b: Seq[MethodParameter]): Boolean =
|
||||
sameSeq(a, b)(sameMethodParameter)
|
||||
def sameMethodParameter(a: MethodParameter, b: MethodParameter): Boolean =
|
||||
(!includeParamNames || a.name == b.name) &&
|
||||
sameType(a.tpe, b.tpe) &&
|
||||
(a.hasDefault == b.hasDefault) &&
|
||||
sameParameterModifier(a.modifier, b.modifier)
|
||||
def sameParameterModifier(a: ParameterModifier, b: ParameterModifier) =
|
||||
a == b
|
||||
def sameDefinitionType(a: DefinitionType, b: DefinitionType): Boolean =
|
||||
a == b
|
||||
def sameVariance(a: Variance, b: Variance): Boolean =
|
||||
a == b
|
||||
|
||||
def sameTypeParameters(a: Seq[TypeParameter], b: Seq[TypeParameter]): Boolean =
|
||||
debug(sameSeq(a, b)(sameTypeParameter), "Different type parameters")
|
||||
def sameTypeParameter(a: TypeParameter, b: TypeParameter): Boolean =
|
||||
{
|
||||
sameTypeParameters(a.typeParameters, b.typeParameters) &&
|
||||
debug(sameAnnotations(a.annotations, b.annotations), "Different type parameter annotations") &&
|
||||
debug(sameVariance(a.variance, b.variance), "Different variance") &&
|
||||
debug(sameType(a.lowerBound, b.lowerBound), "Different lower bound") &&
|
||||
debug(sameType(a.upperBound, b.upperBound), "Different upper bound") &&
|
||||
sameTags(a.id, b.id)
|
||||
}
|
||||
def sameTags(a: String, b: String): Boolean =
|
||||
debug(a == b, "Different type parameter bindings: " + a + ", " + b)
|
||||
def sameTypeParameters(a: Seq[TypeParameter], b: Seq[TypeParameter]): Boolean =
|
||||
debug(sameSeq(a, b)(sameTypeParameter), "Different type parameters")
|
||||
def sameTypeParameter(a: TypeParameter, b: TypeParameter): Boolean =
|
||||
{
|
||||
sameTypeParameters(a.typeParameters, b.typeParameters) &&
|
||||
debug(sameAnnotations(a.annotations, b.annotations), "Different type parameter annotations") &&
|
||||
debug(sameVariance(a.variance, b.variance), "Different variance") &&
|
||||
debug(sameType(a.lowerBound, b.lowerBound), "Different lower bound") &&
|
||||
debug(sameType(a.upperBound, b.upperBound), "Different upper bound") &&
|
||||
sameTags(a.id, b.id)
|
||||
}
|
||||
def sameTags(a: String, b: String): Boolean =
|
||||
debug(a == b, "Different type parameter bindings: " + a + ", " + b)
|
||||
|
||||
def sameType(a: Type, b: Type): Boolean =
|
||||
samePending(a,b)(sameTypeDirect)
|
||||
def sameTypeDirect(a: Type, b: Type): Boolean =
|
||||
(a, b) match
|
||||
{
|
||||
case (sa: SimpleType, sb: SimpleType) => debug(sameSimpleTypeDirect(sa, sb), "Different simple types: " + DefaultShowAPI(sa) + " and " + DefaultShowAPI(sb))
|
||||
case (ca: Constant, cb: Constant) => debug(sameConstantType(ca, cb), "Different constant types: " + DefaultShowAPI(ca) + " and " + DefaultShowAPI(cb))
|
||||
case (aa: Annotated, ab: Annotated) => debug(sameAnnotatedType(aa, ab), "Different annotated types")
|
||||
case (sa: Structure, sb: Structure) => debug(sameStructureDirect(sa, sb), "Different structure type")
|
||||
case (ea: Existential, eb: Existential) => debug(sameExistentialType(ea, eb), "Different existential type")
|
||||
case (pa: Polymorphic, pb: Polymorphic) => debug(samePolymorphicType(pa, pb), "Different polymorphic type")
|
||||
case _ => differentCategory("type", a, b)
|
||||
}
|
||||
def sameType(a: Type, b: Type): Boolean =
|
||||
samePending(a, b)(sameTypeDirect)
|
||||
def sameTypeDirect(a: Type, b: Type): Boolean =
|
||||
(a, b) match {
|
||||
case (sa: SimpleType, sb: SimpleType) => debug(sameSimpleTypeDirect(sa, sb), "Different simple types: " + DefaultShowAPI(sa) + " and " + DefaultShowAPI(sb))
|
||||
case (ca: Constant, cb: Constant) => debug(sameConstantType(ca, cb), "Different constant types: " + DefaultShowAPI(ca) + " and " + DefaultShowAPI(cb))
|
||||
case (aa: Annotated, ab: Annotated) => debug(sameAnnotatedType(aa, ab), "Different annotated types")
|
||||
case (sa: Structure, sb: Structure) => debug(sameStructureDirect(sa, sb), "Different structure type")
|
||||
case (ea: Existential, eb: Existential) => debug(sameExistentialType(ea, eb), "Different existential type")
|
||||
case (pa: Polymorphic, pb: Polymorphic) => debug(samePolymorphicType(pa, pb), "Different polymorphic type")
|
||||
case _ => differentCategory("type", a, b)
|
||||
}
|
||||
|
||||
def sameConstantType(ca: Constant, cb: Constant): Boolean =
|
||||
sameType(ca.baseType, cb.baseType) &&
|
||||
ca.value == cb.value
|
||||
def sameExistentialType(a: Existential, b: Existential): Boolean =
|
||||
sameTypeParameters(a.clause, b.clause) &&
|
||||
sameType(a.baseType, b.baseType)
|
||||
def samePolymorphicType(a: Polymorphic, b: Polymorphic): Boolean =
|
||||
sameTypeParameters(a.parameters, b.parameters) &&
|
||||
sameType(a.baseType, b.baseType)
|
||||
def sameAnnotatedType(a: Annotated, b: Annotated): Boolean =
|
||||
sameType(a.baseType, b.baseType) &&
|
||||
sameAnnotations(a.annotations, b.annotations)
|
||||
def sameStructure(a: Structure, b: Structure): Boolean =
|
||||
samePending(a,b)(sameStructureDirect)
|
||||
def sameConstantType(ca: Constant, cb: Constant): Boolean =
|
||||
sameType(ca.baseType, cb.baseType) &&
|
||||
ca.value == cb.value
|
||||
def sameExistentialType(a: Existential, b: Existential): Boolean =
|
||||
sameTypeParameters(a.clause, b.clause) &&
|
||||
sameType(a.baseType, b.baseType)
|
||||
def samePolymorphicType(a: Polymorphic, b: Polymorphic): Boolean =
|
||||
sameTypeParameters(a.parameters, b.parameters) &&
|
||||
sameType(a.baseType, b.baseType)
|
||||
def sameAnnotatedType(a: Annotated, b: Annotated): Boolean =
|
||||
sameType(a.baseType, b.baseType) &&
|
||||
sameAnnotations(a.annotations, b.annotations)
|
||||
def sameStructure(a: Structure, b: Structure): Boolean =
|
||||
samePending(a, b)(sameStructureDirect)
|
||||
|
||||
private[this] def samePending[T](a: T, b: T)(f: (T,T) => Boolean): Boolean =
|
||||
if(pending add ((a,b)) ) f(a,b) else true
|
||||
private[this] def samePending[T](a: T, b: T)(f: (T, T) => Boolean): Boolean =
|
||||
if (pending add ((a, b))) f(a, b) else true
|
||||
|
||||
def sameStructureDirect(a: Structure, b: Structure): Boolean =
|
||||
{
|
||||
sameSeq(a.parents, b.parents)(sameType) &&
|
||||
sameMembers(a.declared, b.declared) &&
|
||||
sameMembers(a.inherited, b.inherited)
|
||||
}
|
||||
def sameStructureDirect(a: Structure, b: Structure): Boolean =
|
||||
{
|
||||
sameSeq(a.parents, b.parents)(sameType) &&
|
||||
sameMembers(a.declared, b.declared) &&
|
||||
sameMembers(a.inherited, b.inherited)
|
||||
}
|
||||
|
||||
def sameMembers(a: Seq[Definition], b: Seq[Definition]): Boolean =
|
||||
sameDefinitions(a, b, false)
|
||||
def sameMembers(a: Seq[Definition], b: Seq[Definition]): Boolean =
|
||||
sameDefinitions(a, b, false)
|
||||
|
||||
def sameSimpleType(a: SimpleType, b: SimpleType): Boolean =
|
||||
samePending(a,b)(sameSimpleTypeDirect)
|
||||
def sameSimpleTypeDirect(a: SimpleType, b: SimpleType): Boolean =
|
||||
(a, b) match
|
||||
{
|
||||
case (pa: Projection, pb: Projection) => debug(sameProjection(pa, pb), "Different projection")
|
||||
case (pa: ParameterRef, pb: ParameterRef) => debug(sameParameterRef(pa, pb), "Different parameter ref")
|
||||
case (sa: Singleton, sb: Singleton) => debug(sameSingleton(sa, sb), "Different singleton")
|
||||
case (_: EmptyType, _: EmptyType) => true
|
||||
case (pa: Parameterized, pb: Parameterized) => debug(sameParameterized(pa, pb), "Different parameterized")
|
||||
case _ => differentCategory("simple type", a, b)
|
||||
}
|
||||
def differentCategory(label: String, a: AnyRef, b: AnyRef): Boolean =
|
||||
debug(false, "Different category of " + label + " (" + a.getClass.getName + " and " + b.getClass.getName + ") for (" + a + " and " + b + ")")
|
||||
def sameSimpleType(a: SimpleType, b: SimpleType): Boolean =
|
||||
samePending(a, b)(sameSimpleTypeDirect)
|
||||
def sameSimpleTypeDirect(a: SimpleType, b: SimpleType): Boolean =
|
||||
(a, b) match {
|
||||
case (pa: Projection, pb: Projection) => debug(sameProjection(pa, pb), "Different projection")
|
||||
case (pa: ParameterRef, pb: ParameterRef) => debug(sameParameterRef(pa, pb), "Different parameter ref")
|
||||
case (sa: Singleton, sb: Singleton) => debug(sameSingleton(sa, sb), "Different singleton")
|
||||
case (_: EmptyType, _: EmptyType) => true
|
||||
case (pa: Parameterized, pb: Parameterized) => debug(sameParameterized(pa, pb), "Different parameterized")
|
||||
case _ => differentCategory("simple type", a, b)
|
||||
}
|
||||
def differentCategory(label: String, a: AnyRef, b: AnyRef): Boolean =
|
||||
debug(false, "Different category of " + label + " (" + a.getClass.getName + " and " + b.getClass.getName + ") for (" + a + " and " + b + ")")
|
||||
|
||||
def sameParameterized(a: Parameterized, b: Parameterized): Boolean =
|
||||
sameSimpleType(a.baseType, b.baseType) &&
|
||||
sameSeq(a.typeArguments, b.typeArguments)(sameType)
|
||||
def sameParameterRef(a: ParameterRef, b: ParameterRef): Boolean = sameTags(a.id, b.id)
|
||||
def sameSingleton(a: Singleton, b: Singleton): Boolean =
|
||||
samePath(a.path, b.path)
|
||||
def sameProjection(a: Projection, b: Projection): Boolean =
|
||||
sameSimpleType(a.prefix, b.prefix) &&
|
||||
(a.id == b.id)
|
||||
def sameParameterized(a: Parameterized, b: Parameterized): Boolean =
|
||||
sameSimpleType(a.baseType, b.baseType) &&
|
||||
sameSeq(a.typeArguments, b.typeArguments)(sameType)
|
||||
def sameParameterRef(a: ParameterRef, b: ParameterRef): Boolean = sameTags(a.id, b.id)
|
||||
def sameSingleton(a: Singleton, b: Singleton): Boolean =
|
||||
samePath(a.path, b.path)
|
||||
def sameProjection(a: Projection, b: Projection): Boolean =
|
||||
sameSimpleType(a.prefix, b.prefix) &&
|
||||
(a.id == b.id)
|
||||
|
||||
def samePath(a: Path, b: Path): Boolean =
|
||||
samePathComponents(a.components, b.components)
|
||||
def samePathComponents(a: Seq[PathComponent], b: Seq[PathComponent]): Boolean =
|
||||
sameSeq(a, b)(samePathComponent)
|
||||
def samePathComponent(a: PathComponent, b: PathComponent): Boolean =
|
||||
(a, b) match
|
||||
{
|
||||
case (_: This, _: This) => true
|
||||
case (sa: Super, sb: Super) => samePathSuper(sa, sb)
|
||||
case (ia: Id, ib: Id) => samePathId(ia, ib)
|
||||
case _ => false
|
||||
}
|
||||
def samePathSuper(a: Super, b: Super): Boolean =
|
||||
samePath(a.qualifier, b.qualifier)
|
||||
def samePathId(a: Id, b: Id): Boolean =
|
||||
a.id == b.id
|
||||
def samePath(a: Path, b: Path): Boolean =
|
||||
samePathComponents(a.components, b.components)
|
||||
def samePathComponents(a: Seq[PathComponent], b: Seq[PathComponent]): Boolean =
|
||||
sameSeq(a, b)(samePathComponent)
|
||||
def samePathComponent(a: PathComponent, b: PathComponent): Boolean =
|
||||
(a, b) match {
|
||||
case (_: This, _: This) => true
|
||||
case (sa: Super, sb: Super) => samePathSuper(sa, sb)
|
||||
case (ia: Id, ib: Id) => samePathId(ia, ib)
|
||||
case _ => false
|
||||
}
|
||||
def samePathSuper(a: Super, b: Super): Boolean =
|
||||
samePath(a.qualifier, b.qualifier)
|
||||
def samePathId(a: Id, b: Id): Boolean =
|
||||
a.id == b.id
|
||||
|
||||
// precondition: a.keySet == b.keySet
|
||||
protected def zippedEntries[A,B](a: scala.collection.Map[A,B], b: scala.collection.Map[A,B]): Iterable[(B,B)] =
|
||||
for( (key, avalue) <- a) yield (avalue, b(key))
|
||||
// precondition: a.keySet == b.keySet
|
||||
protected def zippedEntries[A, B](a: scala.collection.Map[A, B], b: scala.collection.Map[A, B]): Iterable[(B, B)] =
|
||||
for ((key, avalue) <- a) yield (avalue, b(key))
|
||||
|
||||
def sameStrings(a: scala.collection.Set[String], b: scala.collection.Set[String]): Boolean =
|
||||
a == b
|
||||
final def sameSeq[T](a: Seq[T], b: Seq[T])(eq: (T,T) => Boolean): Boolean =
|
||||
(a.length == b.length) && (a zip b).forall(tupled(eq))
|
||||
def sameStrings(a: scala.collection.Set[String], b: scala.collection.Set[String]): Boolean =
|
||||
a == b
|
||||
final def sameSeq[T](a: Seq[T], b: Seq[T])(eq: (T, T) => Boolean): Boolean =
|
||||
(a.length == b.length) && (a zip b).forall(tupled(eq))
|
||||
}
|
||||
|
|
@ -3,314 +3,286 @@
|
|||
*/
|
||||
package xsbt.api
|
||||
|
||||
import xsbti.api._
|
||||
import xsbti.api._
|
||||
|
||||
trait Show[A]
|
||||
{
|
||||
def show(a: A): String
|
||||
trait Show[A] {
|
||||
def show(a: A): String
|
||||
}
|
||||
|
||||
final class ShowLazy[A](delegate: => Show[A]) extends Show[A]
|
||||
{
|
||||
private lazy val s = delegate
|
||||
def show(a: A) = s.show(a)
|
||||
final class ShowLazy[A](delegate: => Show[A]) extends Show[A] {
|
||||
private lazy val s = delegate
|
||||
def show(a: A) = s.show(a)
|
||||
}
|
||||
|
||||
import ShowAPI._
|
||||
import ShowAPI._
|
||||
|
||||
object ShowAPI
|
||||
{
|
||||
def Show[T](implicit s: Show[T]): Show[T] = s
|
||||
def show[T](t: T)(implicit s: Show[T]): String = s.show(t)
|
||||
|
||||
def bounds(lower: Type, upper: Type)(implicit t: Show[Type]): String =
|
||||
">: " + t.show(lower) + " <: " + t.show(upper)
|
||||
object ShowAPI {
|
||||
def Show[T](implicit s: Show[T]): Show[T] = s
|
||||
def show[T](t: T)(implicit s: Show[T]): String = s.show(t)
|
||||
|
||||
import ParameterModifier._
|
||||
def parameterModifier(base: String, pm: ParameterModifier): String =
|
||||
pm match
|
||||
{
|
||||
case Plain => base
|
||||
case Repeated => base + "*"
|
||||
case ByName => "=> " + base
|
||||
}
|
||||
|
||||
def concat[A](list: Seq[A], as: Show[A], sep: String): String = mapSeq(list, as).mkString(sep)
|
||||
def commas[A](list: Seq[A], as: Show[A]): String = concat(list, as, ", ")
|
||||
def spaced[A](list: Seq[A], as: Show[A]): String = concat(list, as, " ")
|
||||
def lines[A](list: Seq[A], as: Show[A]): String = mapSeq(list, as).mkString("\n")
|
||||
def mapSeq[A](list: Seq[A], as: Show[A]): Seq[String] = list.map(as.show)
|
||||
def bounds(lower: Type, upper: Type)(implicit t: Show[Type]): String =
|
||||
">: " + t.show(lower) + " <: " + t.show(upper)
|
||||
|
||||
import ParameterModifier._
|
||||
def parameterModifier(base: String, pm: ParameterModifier): String =
|
||||
pm match {
|
||||
case Plain => base
|
||||
case Repeated => base + "*"
|
||||
case ByName => "=> " + base
|
||||
}
|
||||
|
||||
def concat[A](list: Seq[A], as: Show[A], sep: String): String = mapSeq(list, as).mkString(sep)
|
||||
def commas[A](list: Seq[A], as: Show[A]): String = concat(list, as, ", ")
|
||||
def spaced[A](list: Seq[A], as: Show[A]): String = concat(list, as, " ")
|
||||
def lines[A](list: Seq[A], as: Show[A]): String = mapSeq(list, as).mkString("\n")
|
||||
def mapSeq[A](list: Seq[A], as: Show[A]): Seq[String] = list.map(as.show)
|
||||
}
|
||||
|
||||
trait ShowBase
|
||||
{
|
||||
implicit def showAnnotation(implicit as: Show[AnnotationArgument], t: Show[Type]): Show[Annotation] =
|
||||
new Show[Annotation] { def show(a: Annotation) = "@" + t.show(a.base) + (if(a.arguments.isEmpty) "" else "(" + commas(a.arguments, as) + ")") }
|
||||
|
||||
implicit def showAnnotationArgument: Show[AnnotationArgument] =
|
||||
new Show[AnnotationArgument] { def show(a: AnnotationArgument) = a.name + " = " + a.value }
|
||||
|
||||
import Variance._
|
||||
implicit def showVariance: Show[Variance] =
|
||||
new Show[Variance] { def show(v: Variance) = v match { case Invariant => ""; case Covariant => "+"; case Contravariant => "-" } }
|
||||
|
||||
implicit def showSource(implicit ps: Show[Package], ds: Show[Definition]): Show[SourceAPI] =
|
||||
new Show[SourceAPI] { def show(a: SourceAPI) = lines(a.packages, ps) + "\n" + lines(a.definitions, ds) }
|
||||
trait ShowBase {
|
||||
implicit def showAnnotation(implicit as: Show[AnnotationArgument], t: Show[Type]): Show[Annotation] =
|
||||
new Show[Annotation] { def show(a: Annotation) = "@" + t.show(a.base) + (if (a.arguments.isEmpty) "" else "(" + commas(a.arguments, as) + ")") }
|
||||
|
||||
implicit def showPackage: Show[Package] =
|
||||
new Show[Package] { def show(pkg: Package) = "package " + pkg.name }
|
||||
implicit def showAnnotationArgument: Show[AnnotationArgument] =
|
||||
new Show[AnnotationArgument] { def show(a: AnnotationArgument) = a.name + " = " + a.value }
|
||||
|
||||
implicit def showAccess(implicit sq: Show[Qualified]): Show[Access] =
|
||||
new Show[Access]
|
||||
{
|
||||
def show(a: Access) =
|
||||
a match
|
||||
{
|
||||
case p: Public => ""
|
||||
case q: Qualified => sq.show(q)
|
||||
}
|
||||
}
|
||||
implicit def showQualified(implicit sq: Show[Qualifier]): Show[Qualified] =
|
||||
new Show[Qualified]
|
||||
{
|
||||
def show(q: Qualified) =
|
||||
((q match
|
||||
{
|
||||
case p: Protected => "protected"
|
||||
case p: Private => "private"
|
||||
})
|
||||
+ sq.show(q.qualifier) )
|
||||
}
|
||||
implicit def showQualifier: Show[Qualifier] =
|
||||
new Show[Qualifier]
|
||||
{
|
||||
def show(q: Qualifier) =
|
||||
q match
|
||||
{
|
||||
case _: Unqualified => ""
|
||||
case _: ThisQualifier => "[this]"
|
||||
case i: IdQualifier => "[" + i.value + "]"
|
||||
}
|
||||
}
|
||||
implicit def showModifiers: Show[Modifiers] =
|
||||
new Show[Modifiers]
|
||||
{
|
||||
def show(m: Modifiers) =
|
||||
{
|
||||
val mods =
|
||||
(m.isOverride, "override") ::
|
||||
(m.isFinal, "final") ::
|
||||
(m.isSealed, "sealed") ::
|
||||
(m.isImplicit, "implicit") ::
|
||||
(m.isAbstract, "abstract") ::
|
||||
(m.isLazy, "lazy") ::
|
||||
Nil
|
||||
mods.filter(_._1).map(_._2).mkString(" ")
|
||||
}
|
||||
}
|
||||
|
||||
implicit def showDefinitionType: Show[DefinitionType] =
|
||||
new Show[DefinitionType] {
|
||||
import DefinitionType._
|
||||
def show(dt: DefinitionType) =
|
||||
dt match
|
||||
{
|
||||
case Trait => "trait"
|
||||
case ClassDef => "class"
|
||||
case Module => "object"
|
||||
case PackageModule => "package object"
|
||||
}
|
||||
}
|
||||
}
|
||||
trait ShowDefinitions
|
||||
{
|
||||
implicit def showVal(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], t: Show[Type]): Show[Val] =
|
||||
new Show[Val] { def show(v: Val) = definitionBase(v, "val")(acs, ms, ans) + ": " + t.show(v.tpe) }
|
||||
|
||||
implicit def showVar(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], t: Show[Type]): Show[Var] =
|
||||
new Show[Var] { def show(v: Var) = definitionBase(v, "var")(acs, ms, ans) + ": " + t.show(v.tpe) }
|
||||
|
||||
implicit def showDef(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], vp: Show[Seq[ParameterList]], t: Show[Type]): Show[Def] =
|
||||
new Show[Def] { def show(d: Def) = parameterizedDef(d, "def")(acs, ms, ans, tp) + vp.show(d.valueParameters) + ": " + t.show(d.returnType) }
|
||||
|
||||
implicit def showClassLike(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], dt: Show[DefinitionType], s: Show[Structure], t: Show[Type]): Show[ClassLike] =
|
||||
new Show[ClassLike] { def show(cl: ClassLike) = parameterizedDef(cl, dt.show(cl.definitionType))(acs, ms, ans, tp) + " requires " + t.show(cl.selfType) + " extends " + s.show(cl.structure) }
|
||||
|
||||
implicit def showTypeAlias(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], t: Show[Type]): Show[TypeAlias] =
|
||||
new Show[TypeAlias] { def show(ta: TypeAlias) = parameterizedDef(ta, "type")(acs, ms, ans, tp) + " = " + t.show(ta.tpe) }
|
||||
|
||||
implicit def showTypeDeclaration(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], t: Show[Type]): Show[TypeDeclaration] =
|
||||
new Show[TypeDeclaration] { def show(td: TypeDeclaration) = parameterizedDef(td, "type")(acs, ms, ans, tp) + bounds(td.lowerBound, td.upperBound) }
|
||||
def showClassLikeSimple(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], dt: Show[DefinitionType]): Show[ClassLike] =
|
||||
new Show[ClassLike] { def show(cl: ClassLike) = parameterizedDef(cl, dt.show(cl.definitionType))(acs, ms, ans, tp) }
|
||||
import Variance._
|
||||
implicit def showVariance: Show[Variance] =
|
||||
new Show[Variance] { def show(v: Variance) = v match { case Invariant => ""; case Covariant => "+"; case Contravariant => "-" } }
|
||||
|
||||
def parameterizedDef(d: ParameterizedDefinition, label: String)(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]]): String =
|
||||
definitionBase(d, label)(acs, ms, ans) + tp.show(d.typeParameters)
|
||||
def definitionBase(d: Definition, label: String)(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation]): String =
|
||||
space(spaced(d.annotations, ans)) + space(acs.show(d.access)) + space(ms.show(d.modifiers)) + space(label) + d.name
|
||||
def space(s: String) = if(s.isEmpty) s else s + " "
|
||||
implicit def showSource(implicit ps: Show[Package], ds: Show[Definition]): Show[SourceAPI] =
|
||||
new Show[SourceAPI] { def show(a: SourceAPI) = lines(a.packages, ps) + "\n" + lines(a.definitions, ds) }
|
||||
|
||||
implicit def showPackage: Show[Package] =
|
||||
new Show[Package] { def show(pkg: Package) = "package " + pkg.name }
|
||||
|
||||
implicit def showAccess(implicit sq: Show[Qualified]): Show[Access] =
|
||||
new Show[Access] {
|
||||
def show(a: Access) =
|
||||
a match {
|
||||
case p: Public => ""
|
||||
case q: Qualified => sq.show(q)
|
||||
}
|
||||
}
|
||||
implicit def showQualified(implicit sq: Show[Qualifier]): Show[Qualified] =
|
||||
new Show[Qualified] {
|
||||
def show(q: Qualified) =
|
||||
((q match {
|
||||
case p: Protected => "protected"
|
||||
case p: Private => "private"
|
||||
})
|
||||
+ sq.show(q.qualifier))
|
||||
}
|
||||
implicit def showQualifier: Show[Qualifier] =
|
||||
new Show[Qualifier] {
|
||||
def show(q: Qualifier) =
|
||||
q match {
|
||||
case _: Unqualified => ""
|
||||
case _: ThisQualifier => "[this]"
|
||||
case i: IdQualifier => "[" + i.value + "]"
|
||||
}
|
||||
}
|
||||
implicit def showModifiers: Show[Modifiers] =
|
||||
new Show[Modifiers] {
|
||||
def show(m: Modifiers) =
|
||||
{
|
||||
val mods =
|
||||
(m.isOverride, "override") ::
|
||||
(m.isFinal, "final") ::
|
||||
(m.isSealed, "sealed") ::
|
||||
(m.isImplicit, "implicit") ::
|
||||
(m.isAbstract, "abstract") ::
|
||||
(m.isLazy, "lazy") ::
|
||||
Nil
|
||||
mods.filter(_._1).map(_._2).mkString(" ")
|
||||
}
|
||||
}
|
||||
|
||||
implicit def showDefinitionType: Show[DefinitionType] =
|
||||
new Show[DefinitionType] {
|
||||
import DefinitionType._
|
||||
def show(dt: DefinitionType) =
|
||||
dt match {
|
||||
case Trait => "trait"
|
||||
case ClassDef => "class"
|
||||
case Module => "object"
|
||||
case PackageModule => "package object"
|
||||
}
|
||||
}
|
||||
}
|
||||
trait ShowDefinition
|
||||
{
|
||||
implicit def showDefinition(implicit vl: Show[Val], vr: Show[Var], ds: Show[Def], cl: Show[ClassLike], ta: Show[TypeAlias], td: Show[TypeDeclaration]): Show[Definition] =
|
||||
new Show[Definition]
|
||||
{
|
||||
def show(d: Definition) =
|
||||
d match
|
||||
{
|
||||
case v: Val => vl.show(v)
|
||||
case v: Var => vr.show(v)
|
||||
case d: Def => ds.show(d)
|
||||
case c: ClassLike => cl.show(c)
|
||||
case t: TypeAlias => ta.show(t)
|
||||
case t: TypeDeclaration => td.show(t)
|
||||
}
|
||||
}
|
||||
trait ShowDefinitions {
|
||||
implicit def showVal(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], t: Show[Type]): Show[Val] =
|
||||
new Show[Val] { def show(v: Val) = definitionBase(v, "val")(acs, ms, ans) + ": " + t.show(v.tpe) }
|
||||
|
||||
implicit def showVar(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], t: Show[Type]): Show[Var] =
|
||||
new Show[Var] { def show(v: Var) = definitionBase(v, "var")(acs, ms, ans) + ": " + t.show(v.tpe) }
|
||||
|
||||
implicit def showDef(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], vp: Show[Seq[ParameterList]], t: Show[Type]): Show[Def] =
|
||||
new Show[Def] { def show(d: Def) = parameterizedDef(d, "def")(acs, ms, ans, tp) + vp.show(d.valueParameters) + ": " + t.show(d.returnType) }
|
||||
|
||||
implicit def showClassLike(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], dt: Show[DefinitionType], s: Show[Structure], t: Show[Type]): Show[ClassLike] =
|
||||
new Show[ClassLike] { def show(cl: ClassLike) = parameterizedDef(cl, dt.show(cl.definitionType))(acs, ms, ans, tp) + " requires " + t.show(cl.selfType) + " extends " + s.show(cl.structure) }
|
||||
|
||||
implicit def showTypeAlias(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], t: Show[Type]): Show[TypeAlias] =
|
||||
new Show[TypeAlias] { def show(ta: TypeAlias) = parameterizedDef(ta, "type")(acs, ms, ans, tp) + " = " + t.show(ta.tpe) }
|
||||
|
||||
implicit def showTypeDeclaration(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], t: Show[Type]): Show[TypeDeclaration] =
|
||||
new Show[TypeDeclaration] { def show(td: TypeDeclaration) = parameterizedDef(td, "type")(acs, ms, ans, tp) + bounds(td.lowerBound, td.upperBound) }
|
||||
def showClassLikeSimple(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], dt: Show[DefinitionType]): Show[ClassLike] =
|
||||
new Show[ClassLike] { def show(cl: ClassLike) = parameterizedDef(cl, dt.show(cl.definitionType))(acs, ms, ans, tp) }
|
||||
|
||||
def parameterizedDef(d: ParameterizedDefinition, label: String)(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]]): String =
|
||||
definitionBase(d, label)(acs, ms, ans) + tp.show(d.typeParameters)
|
||||
def definitionBase(d: Definition, label: String)(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation]): String =
|
||||
space(spaced(d.annotations, ans)) + space(acs.show(d.access)) + space(ms.show(d.modifiers)) + space(label) + d.name
|
||||
def space(s: String) = if (s.isEmpty) s else s + " "
|
||||
}
|
||||
trait ShowType
|
||||
{
|
||||
implicit def showType(implicit s: Show[SimpleType], a: Show[Annotated], st: Show[Structure], c: Show[Constant], e: Show[Existential], po: Show[Polymorphic]): Show[Type] =
|
||||
new Show[Type]
|
||||
{
|
||||
def show(t: Type) =
|
||||
t match
|
||||
{
|
||||
case q: SimpleType => s.show(q)
|
||||
case q: Constant => c.show(q)
|
||||
case q: Annotated => a.show(q)
|
||||
case q: Structure => st.show(q)
|
||||
case q: Existential => e.show(q)
|
||||
case q: Polymorphic => po.show(q)
|
||||
}
|
||||
}
|
||||
|
||||
implicit def showSimpleType(implicit pr: Show[Projection], pa: Show[ParameterRef], si: Show[Singleton], et: Show[EmptyType], p: Show[Parameterized]): Show[SimpleType] =
|
||||
new Show[SimpleType] {
|
||||
def show(t: SimpleType) =
|
||||
t match
|
||||
{
|
||||
case q: Projection => pr.show(q)
|
||||
case q: ParameterRef => pa.show(q)
|
||||
case q: Singleton => si.show(q)
|
||||
case q: EmptyType => et.show(q)
|
||||
case q: Parameterized => p.show(q)
|
||||
}
|
||||
}
|
||||
trait ShowDefinition {
|
||||
implicit def showDefinition(implicit vl: Show[Val], vr: Show[Var], ds: Show[Def], cl: Show[ClassLike], ta: Show[TypeAlias], td: Show[TypeDeclaration]): Show[Definition] =
|
||||
new Show[Definition] {
|
||||
def show(d: Definition) =
|
||||
d match {
|
||||
case v: Val => vl.show(v)
|
||||
case v: Var => vr.show(v)
|
||||
case d: Def => ds.show(d)
|
||||
case c: ClassLike => cl.show(c)
|
||||
case t: TypeAlias => ta.show(t)
|
||||
case t: TypeDeclaration => td.show(t)
|
||||
}
|
||||
}
|
||||
}
|
||||
trait ShowBasicTypes
|
||||
{
|
||||
implicit def showSingleton(implicit p: Show[Path]): Show[Singleton] =
|
||||
new Show[Singleton] { def show(s: Singleton) = p.show(s.path) }
|
||||
implicit def showEmptyType: Show[EmptyType] =
|
||||
new Show[EmptyType] { def show(e: EmptyType) = "<empty>" }
|
||||
implicit def showParameterRef: Show[ParameterRef] =
|
||||
new Show[ParameterRef] { def show(p: ParameterRef) = "<" + p.id + ">" }
|
||||
trait ShowType {
|
||||
implicit def showType(implicit s: Show[SimpleType], a: Show[Annotated], st: Show[Structure], c: Show[Constant], e: Show[Existential], po: Show[Polymorphic]): Show[Type] =
|
||||
new Show[Type] {
|
||||
def show(t: Type) =
|
||||
t match {
|
||||
case q: SimpleType => s.show(q)
|
||||
case q: Constant => c.show(q)
|
||||
case q: Annotated => a.show(q)
|
||||
case q: Structure => st.show(q)
|
||||
case q: Existential => e.show(q)
|
||||
case q: Polymorphic => po.show(q)
|
||||
}
|
||||
}
|
||||
|
||||
implicit def showSimpleType(implicit pr: Show[Projection], pa: Show[ParameterRef], si: Show[Singleton], et: Show[EmptyType], p: Show[Parameterized]): Show[SimpleType] =
|
||||
new Show[SimpleType] {
|
||||
def show(t: SimpleType) =
|
||||
t match {
|
||||
case q: Projection => pr.show(q)
|
||||
case q: ParameterRef => pa.show(q)
|
||||
case q: Singleton => si.show(q)
|
||||
case q: EmptyType => et.show(q)
|
||||
case q: Parameterized => p.show(q)
|
||||
}
|
||||
}
|
||||
}
|
||||
trait ShowTypes
|
||||
{
|
||||
implicit def showStructure(implicit t: Show[Type], d: Show[Definition]): Show[Structure] =
|
||||
new Show[Structure] {
|
||||
def show(s: Structure) = {
|
||||
// don't show inherited class like definitions to avoid dealing with cycles
|
||||
val safeInherited = s.inherited.filterNot(_.isInstanceOf[ClassLike])
|
||||
val showInherited: Show[Definition] = new Show[Definition] {
|
||||
def show(deff: Definition): String = "^inherited^ " + d.show(deff)
|
||||
}
|
||||
concat(s.parents, t, " with ") + "\n{\n" + lines(safeInherited, showInherited) + "\n" + lines(s.declared, d) + "\n}"
|
||||
}
|
||||
}
|
||||
implicit def showAnnotated(implicit as: Show[Annotation], t: Show[Type]): Show[Annotated] =
|
||||
new Show[Annotated] { def show(a: Annotated) = spaced(a.annotations, as) + " " + t.show(a.baseType) }
|
||||
implicit def showProjection(implicit t: Show[SimpleType]): Show[Projection] =
|
||||
new Show[Projection] { def show(p: Projection) = t.show(p.prefix) + "#" + p.id }
|
||||
implicit def showParameterized(implicit t: Show[Type]): Show[Parameterized] =
|
||||
new Show[Parameterized] { def show(p: Parameterized) = t.show(p.baseType) + mapSeq(p.typeArguments, t).mkString("[", ", ", "]") }
|
||||
implicit def showConstant(implicit t: Show[Type]): Show[Constant] =
|
||||
new Show[Constant] { def show(c: Constant) = t.show(c.baseType) + "(" + c.value + ")" }
|
||||
implicit def showExistential(implicit t: Show[Type], tp: Show[TypeParameter]): Show[Existential] =
|
||||
new Show[Existential] {
|
||||
def show(e: Existential) =
|
||||
t.show(e.baseType) + e.clause.map(t => "type " + tp.show(t)).mkString(" forSome { ", "; ", "}")
|
||||
}
|
||||
implicit def showPolymorphic(implicit t: Show[Type], tps: Show[Seq[TypeParameter]]): Show[Polymorphic] =
|
||||
new Show[Polymorphic] { def show(p: Polymorphic) = t.show(p.baseType) + tps.show(p.parameters) }
|
||||
|
||||
trait ShowBasicTypes {
|
||||
implicit def showSingleton(implicit p: Show[Path]): Show[Singleton] =
|
||||
new Show[Singleton] { def show(s: Singleton) = p.show(s.path) }
|
||||
implicit def showEmptyType: Show[EmptyType] =
|
||||
new Show[EmptyType] { def show(e: EmptyType) = "<empty>" }
|
||||
implicit def showParameterRef: Show[ParameterRef] =
|
||||
new Show[ParameterRef] { def show(p: ParameterRef) = "<" + p.id + ">" }
|
||||
}
|
||||
trait ShowTypes {
|
||||
implicit def showStructure(implicit t: Show[Type], d: Show[Definition]): Show[Structure] =
|
||||
new Show[Structure] {
|
||||
def show(s: Structure) = {
|
||||
// don't show inherited class like definitions to avoid dealing with cycles
|
||||
val safeInherited = s.inherited.filterNot(_.isInstanceOf[ClassLike])
|
||||
val showInherited: Show[Definition] = new Show[Definition] {
|
||||
def show(deff: Definition): String = "^inherited^ " + d.show(deff)
|
||||
}
|
||||
concat(s.parents, t, " with ") + "\n{\n" + lines(safeInherited, showInherited) + "\n" + lines(s.declared, d) + "\n}"
|
||||
}
|
||||
}
|
||||
implicit def showAnnotated(implicit as: Show[Annotation], t: Show[Type]): Show[Annotated] =
|
||||
new Show[Annotated] { def show(a: Annotated) = spaced(a.annotations, as) + " " + t.show(a.baseType) }
|
||||
implicit def showProjection(implicit t: Show[SimpleType]): Show[Projection] =
|
||||
new Show[Projection] { def show(p: Projection) = t.show(p.prefix) + "#" + p.id }
|
||||
implicit def showParameterized(implicit t: Show[Type]): Show[Parameterized] =
|
||||
new Show[Parameterized] { def show(p: Parameterized) = t.show(p.baseType) + mapSeq(p.typeArguments, t).mkString("[", ", ", "]") }
|
||||
implicit def showConstant(implicit t: Show[Type]): Show[Constant] =
|
||||
new Show[Constant] { def show(c: Constant) = t.show(c.baseType) + "(" + c.value + ")" }
|
||||
implicit def showExistential(implicit t: Show[Type], tp: Show[TypeParameter]): Show[Existential] =
|
||||
new Show[Existential] {
|
||||
def show(e: Existential) =
|
||||
t.show(e.baseType) + e.clause.map(t => "type " + tp.show(t)).mkString(" forSome { ", "; ", "}")
|
||||
}
|
||||
implicit def showPolymorphic(implicit t: Show[Type], tps: Show[Seq[TypeParameter]]): Show[Polymorphic] =
|
||||
new Show[Polymorphic] { def show(p: Polymorphic) = t.show(p.baseType) + tps.show(p.parameters) }
|
||||
|
||||
}
|
||||
|
||||
trait ShowPath
|
||||
{
|
||||
implicit def showPath(implicit pc: Show[PathComponent]): Show[Path] =
|
||||
new Show[Path] { def show(p: Path) = mapSeq(p.components, pc).mkString(".") }
|
||||
|
||||
implicit def showPathComponent(implicit sp: Show[Path]): Show[PathComponent] =
|
||||
new Show[PathComponent] {
|
||||
def show(p: PathComponent) =
|
||||
p match
|
||||
{
|
||||
case s: Super => "super[" + sp.show(s.qualifier) + "]"
|
||||
case _: This => "this"
|
||||
case i: Id => i.id
|
||||
}
|
||||
}
|
||||
trait ShowPath {
|
||||
implicit def showPath(implicit pc: Show[PathComponent]): Show[Path] =
|
||||
new Show[Path] { def show(p: Path) = mapSeq(p.components, pc).mkString(".") }
|
||||
|
||||
implicit def showPathComponent(implicit sp: Show[Path]): Show[PathComponent] =
|
||||
new Show[PathComponent] {
|
||||
def show(p: PathComponent) =
|
||||
p match {
|
||||
case s: Super => "super[" + sp.show(s.qualifier) + "]"
|
||||
case _: This => "this"
|
||||
case i: Id => i.id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait ShowValueParameters
|
||||
{
|
||||
implicit def showParameterLists(implicit pl: Show[ParameterList]): Show[Seq[ParameterList]] =
|
||||
new Show[Seq[ParameterList]] { def show(p: Seq[ParameterList]) = concat(p,pl, "") }
|
||||
implicit def showParameterList(implicit mp: Show[MethodParameter]): Show[ParameterList] =
|
||||
new Show[ParameterList] { def show(pl: ParameterList) = "(" + (if(pl.isImplicit) "implicit " else "") + commas(pl.parameters, mp) + ")" }
|
||||
|
||||
implicit def showMethodParameter(implicit t: Show[Type]): Show[MethodParameter] =
|
||||
new Show[MethodParameter] {
|
||||
def show(mp: MethodParameter) =
|
||||
mp.name + ": " + parameterModifier(t.show(mp.tpe), mp.modifier) + (if(mp.hasDefault) "= ..." else "")
|
||||
}
|
||||
trait ShowValueParameters {
|
||||
implicit def showParameterLists(implicit pl: Show[ParameterList]): Show[Seq[ParameterList]] =
|
||||
new Show[Seq[ParameterList]] { def show(p: Seq[ParameterList]) = concat(p, pl, "") }
|
||||
implicit def showParameterList(implicit mp: Show[MethodParameter]): Show[ParameterList] =
|
||||
new Show[ParameterList] { def show(pl: ParameterList) = "(" + (if (pl.isImplicit) "implicit " else "") + commas(pl.parameters, mp) + ")" }
|
||||
|
||||
implicit def showMethodParameter(implicit t: Show[Type]): Show[MethodParameter] =
|
||||
new Show[MethodParameter] {
|
||||
def show(mp: MethodParameter) =
|
||||
mp.name + ": " + parameterModifier(t.show(mp.tpe), mp.modifier) + (if (mp.hasDefault) "= ..." else "")
|
||||
}
|
||||
}
|
||||
trait ShowTypeParameters
|
||||
{
|
||||
implicit def showTypeParameters(implicit as: Show[TypeParameter]): Show[Seq[TypeParameter]] =
|
||||
new Show[Seq[TypeParameter]] { def show(tps: Seq[TypeParameter]) = if(tps.isEmpty) "" else mapSeq(tps, as).mkString("[", ",", "]") }
|
||||
implicit def showTypeParameter(implicit as: Show[Annotation], tp: Show[Seq[TypeParameter]], t: Show[Type], v: Show[Variance]): Show[TypeParameter] =
|
||||
new Show[TypeParameter] {
|
||||
def show(tps: TypeParameter) =
|
||||
spaced(tps.annotations, as) + " " + v.show(tps.variance) + tps.id + tp.show(tps.typeParameters) + " " + bounds(tps.lowerBound, tps.upperBound)
|
||||
}
|
||||
trait ShowTypeParameters {
|
||||
implicit def showTypeParameters(implicit as: Show[TypeParameter]): Show[Seq[TypeParameter]] =
|
||||
new Show[Seq[TypeParameter]] { def show(tps: Seq[TypeParameter]) = if (tps.isEmpty) "" else mapSeq(tps, as).mkString("[", ",", "]") }
|
||||
implicit def showTypeParameter(implicit as: Show[Annotation], tp: Show[Seq[TypeParameter]], t: Show[Type], v: Show[Variance]): Show[TypeParameter] =
|
||||
new Show[TypeParameter] {
|
||||
def show(tps: TypeParameter) =
|
||||
spaced(tps.annotations, as) + " " + v.show(tps.variance) + tps.id + tp.show(tps.typeParameters) + " " + bounds(tps.lowerBound, tps.upperBound)
|
||||
}
|
||||
}
|
||||
|
||||
// this class is a hack to resolve some diverging implicit errors.
|
||||
// I'm pretty sure the cause is the Show[Seq[T]] dominating Show[X] issue.
|
||||
// It could probably be reduced a bit if that is the case (below was trial and error)
|
||||
object DefaultShowAPI extends ShowBase with ShowBasicTypes with ShowValueParameters
|
||||
{
|
||||
def apply(d: Definition) = ShowAPI.show(d)
|
||||
def apply(d: Type) = ShowAPI.show(d)
|
||||
object DefaultShowAPI extends ShowBase with ShowBasicTypes with ShowValueParameters {
|
||||
def apply(d: Definition) = ShowAPI.show(d)
|
||||
def apply(d: Type) = ShowAPI.show(d)
|
||||
|
||||
implicit lazy val showVal: Show[Val] = Cyclic.showVal
|
||||
implicit lazy val showVar: Show[Var] = Cyclic.showVar
|
||||
implicit lazy val showClassLike: Show[ClassLike] = Cyclic.showClassLike
|
||||
implicit lazy val showTypeDeclaration: Show[TypeDeclaration] = Cyclic.showTypeDeclaration
|
||||
implicit lazy val showTypeAlias: Show[TypeAlias] = Cyclic.showTypeAlias
|
||||
implicit lazy val showDef: Show[Def] = Cyclic.showDef
|
||||
|
||||
implicit lazy val showProj: Show[Projection] = Cyclic.showProjection
|
||||
implicit lazy val showPoly: Show[Polymorphic] = Cyclic.showPolymorphic
|
||||
|
||||
implicit lazy val showSimple: Show[SimpleType] = new ShowLazy(Cyclic.showSimpleType)
|
||||
implicit lazy val showAnnotated: Show[Annotated] = Cyclic.showAnnotated
|
||||
implicit lazy val showExistential: Show[Existential] = Cyclic.showExistential
|
||||
implicit lazy val showConstant: Show[Constant] = Cyclic.showConstant
|
||||
implicit lazy val showParameterized: Show[Parameterized] = Cyclic.showParameterized
|
||||
|
||||
implicit lazy val showTypeParameters: Show[Seq[TypeParameter]] = new ShowLazy(Cyclic.showTypeParameters)
|
||||
implicit lazy val showTypeParameter: Show[TypeParameter] = Cyclic.showTypeParameter
|
||||
|
||||
implicit lazy val showDefinition: Show[Definition] = new ShowLazy(Cyclic.showDefinition)
|
||||
implicit lazy val showType: Show[Type] = new ShowLazy(Cyclic.showType)
|
||||
implicit lazy val showStructure: Show[Structure] = new ShowLazy(Cyclic.showStructure)
|
||||
|
||||
implicit lazy val showPath: Show[Path] = new ShowLazy(Cyclic.showPath)
|
||||
implicit lazy val showPathComponent: Show[PathComponent] = Cyclic.showPathComponent
|
||||
implicit lazy val showVal: Show[Val] = Cyclic.showVal
|
||||
implicit lazy val showVar: Show[Var] = Cyclic.showVar
|
||||
implicit lazy val showClassLike: Show[ClassLike] = Cyclic.showClassLike
|
||||
implicit lazy val showTypeDeclaration: Show[TypeDeclaration] = Cyclic.showTypeDeclaration
|
||||
implicit lazy val showTypeAlias: Show[TypeAlias] = Cyclic.showTypeAlias
|
||||
implicit lazy val showDef: Show[Def] = Cyclic.showDef
|
||||
|
||||
private object Cyclic extends ShowTypes with ShowType with ShowPath with ShowDefinition with ShowDefinitions with ShowTypeParameters
|
||||
implicit lazy val showProj: Show[Projection] = Cyclic.showProjection
|
||||
implicit lazy val showPoly: Show[Polymorphic] = Cyclic.showPolymorphic
|
||||
|
||||
implicit lazy val showSimple: Show[SimpleType] = new ShowLazy(Cyclic.showSimpleType)
|
||||
implicit lazy val showAnnotated: Show[Annotated] = Cyclic.showAnnotated
|
||||
implicit lazy val showExistential: Show[Existential] = Cyclic.showExistential
|
||||
implicit lazy val showConstant: Show[Constant] = Cyclic.showConstant
|
||||
implicit lazy val showParameterized: Show[Parameterized] = Cyclic.showParameterized
|
||||
|
||||
implicit lazy val showTypeParameters: Show[Seq[TypeParameter]] = new ShowLazy(Cyclic.showTypeParameters)
|
||||
implicit lazy val showTypeParameter: Show[TypeParameter] = Cyclic.showTypeParameter
|
||||
|
||||
implicit lazy val showDefinition: Show[Definition] = new ShowLazy(Cyclic.showDefinition)
|
||||
implicit lazy val showType: Show[Type] = new ShowLazy(Cyclic.showType)
|
||||
implicit lazy val showStructure: Show[Structure] = new ShowLazy(Cyclic.showStructure)
|
||||
|
||||
implicit lazy val showPath: Show[Path] = new ShowLazy(Cyclic.showPath)
|
||||
implicit lazy val showPathComponent: Show[PathComponent] = Cyclic.showPathComponent
|
||||
|
||||
private object Cyclic extends ShowTypes with ShowType with ShowPath with ShowDefinition with ShowDefinitions with ShowTypeParameters
|
||||
}
|
||||
|
|
@ -3,207 +3,183 @@
|
|||
*/
|
||||
package xsbt.api
|
||||
|
||||
import xsbti.api._
|
||||
import scala.collection.mutable
|
||||
import xsbti.api._
|
||||
import scala.collection.mutable
|
||||
|
||||
class Visit
|
||||
{
|
||||
private[this] val visitedStructures = new mutable.HashSet[Structure]
|
||||
private[this] val visitedClassLike = new mutable.HashSet[ClassLike]
|
||||
class Visit {
|
||||
private[this] val visitedStructures = new mutable.HashSet[Structure]
|
||||
private[this] val visitedClassLike = new mutable.HashSet[ClassLike]
|
||||
|
||||
def visit(s: Source): Unit = visitAPI(s.api)
|
||||
def visitAPI(s: SourceAPI): Unit =
|
||||
{
|
||||
s.packages foreach visitPackage
|
||||
s.definitions foreach visitDefinition
|
||||
}
|
||||
def visit(s: Source): Unit = visitAPI(s.api)
|
||||
def visitAPI(s: SourceAPI): Unit =
|
||||
{
|
||||
s.packages foreach visitPackage
|
||||
s.definitions foreach visitDefinition
|
||||
}
|
||||
|
||||
def visitPackage(p: Package)
|
||||
{
|
||||
visitString(p.name)
|
||||
}
|
||||
def visitPackage(p: Package) {
|
||||
visitString(p.name)
|
||||
}
|
||||
|
||||
def visitDefinitions(ds: Seq[Definition]) = ds foreach visitDefinition
|
||||
def visitDefinition(d: Definition)
|
||||
{
|
||||
visitString(d.name)
|
||||
visitAnnotations(d.annotations)
|
||||
visitModifiers(d.modifiers)
|
||||
visitAccess(d.access)
|
||||
d match
|
||||
{
|
||||
case c: ClassLike => visitClass(c)
|
||||
case f: FieldLike => visitField(f)
|
||||
case d: Def => visitDef(d)
|
||||
case t: TypeDeclaration => visitTypeDeclaration(t)
|
||||
case t: TypeAlias => visitTypeAlias(t)
|
||||
}
|
||||
}
|
||||
final def visitClass(c: ClassLike): Unit = if(visitedClassLike add c) visitClass0(c)
|
||||
def visitClass0(c: ClassLike)
|
||||
{
|
||||
visitParameterizedDefinition(c)
|
||||
visitType(c.selfType)
|
||||
visitStructure(c.structure)
|
||||
}
|
||||
def visitField(f: FieldLike)
|
||||
{
|
||||
visitType(f.tpe)
|
||||
f match
|
||||
{
|
||||
case v: Var => visitVar(v)
|
||||
case v: Val => visitVal(v)
|
||||
}
|
||||
}
|
||||
def visitVar(v: Var) {}
|
||||
def visitVal(v: Val) {}
|
||||
def visitDef(d: Def)
|
||||
{
|
||||
visitParameterizedDefinition(d)
|
||||
visitValueParameters(d.valueParameters)
|
||||
visitType(d.returnType)
|
||||
}
|
||||
def visitAccess(a: Access): Unit =
|
||||
a match
|
||||
{
|
||||
case pub: Public => visitPublic(pub)
|
||||
case qual: Qualified => visitQualified(qual)
|
||||
}
|
||||
def visitQualified(qual: Qualified): Unit =
|
||||
qual match
|
||||
{
|
||||
case p: Protected => visitProtected(p)
|
||||
case p: Private => visitPrivate(p)
|
||||
}
|
||||
def visitQualifier(qual: Qualifier): Unit =
|
||||
qual match
|
||||
{
|
||||
case unq: Unqualified => visitUnqualified(unq)
|
||||
case thisq: ThisQualifier => visitThisQualifier(thisq)
|
||||
case id: IdQualifier => visitIdQualifier(id)
|
||||
}
|
||||
def visitIdQualifier(id: IdQualifier)
|
||||
{
|
||||
visitString(id.value)
|
||||
}
|
||||
def visitUnqualified(unq: Unqualified) {}
|
||||
def visitThisQualifier(thisq: ThisQualifier) {}
|
||||
def visitPublic(pub: Public) {}
|
||||
def visitPrivate(p: Private) { visitQualifier(p.qualifier) }
|
||||
def visitProtected(p: Protected) { visitQualifier(p.qualifier) }
|
||||
def visitModifiers(m: Modifiers) {}
|
||||
|
||||
def visitValueParameters(valueParameters: Seq[ParameterList]) = valueParameters foreach visitValueParameterList
|
||||
def visitValueParameterList(list: ParameterList) = list.parameters foreach visitValueParameter
|
||||
def visitValueParameter(parameter: MethodParameter) =
|
||||
{
|
||||
visitString(parameter.name)
|
||||
visitType(parameter.tpe)
|
||||
}
|
||||
|
||||
def visitParameterizedDefinition[T <: ParameterizedDefinition](d: T)
|
||||
{
|
||||
visitTypeParameters(d.typeParameters)
|
||||
}
|
||||
def visitTypeDeclaration(d: TypeDeclaration)
|
||||
{
|
||||
visitParameterizedDefinition(d)
|
||||
visitType(d.lowerBound)
|
||||
visitType(d.upperBound)
|
||||
}
|
||||
def visitTypeAlias(d: TypeAlias)
|
||||
{
|
||||
visitParameterizedDefinition(d)
|
||||
visitType(d.tpe)
|
||||
}
|
||||
|
||||
def visitTypeParameters(parameters: Seq[TypeParameter]) = parameters foreach visitTypeParameter
|
||||
def visitTypeParameter(parameter: TypeParameter)
|
||||
{
|
||||
visitTypeParameters(parameter.typeParameters)
|
||||
visitType(parameter.lowerBound)
|
||||
visitType(parameter.upperBound)
|
||||
visitAnnotations(parameter.annotations)
|
||||
}
|
||||
def visitAnnotations(annotations: Seq[Annotation]) = annotations foreach visitAnnotation
|
||||
def visitAnnotation(annotation: Annotation) =
|
||||
{
|
||||
visitType(annotation.base)
|
||||
visitAnnotationArguments(annotation.arguments)
|
||||
}
|
||||
def visitAnnotationArguments(args: Seq[AnnotationArgument]) = args foreach visitAnnotationArgument
|
||||
def visitAnnotationArgument(arg: AnnotationArgument)
|
||||
{
|
||||
visitString(arg.name)
|
||||
visitString(arg.value)
|
||||
}
|
||||
|
||||
def visitTypes(ts: Seq[Type]) = ts.foreach(visitType)
|
||||
def visitType(t: Type)
|
||||
{
|
||||
t match
|
||||
{
|
||||
case s: Structure => visitStructure(s)
|
||||
case e: Existential => visitExistential(e)
|
||||
case c: Constant => visitConstant(c)
|
||||
case p: Polymorphic => visitPolymorphic(p)
|
||||
case a: Annotated => visitAnnotated(a)
|
||||
case p: Parameterized => visitParameterized(p)
|
||||
case p: Projection => visitProjection(p)
|
||||
case _: EmptyType => visitEmptyType()
|
||||
case s: Singleton => visitSingleton(s)
|
||||
case pr: ParameterRef => visitParameterRef(pr)
|
||||
}
|
||||
}
|
||||
|
||||
def visitEmptyType() {}
|
||||
def visitParameterRef(p: ParameterRef) {}
|
||||
def visitSingleton(s: Singleton) { visitPath(s.path) }
|
||||
def visitPath(path: Path) = path.components foreach visitPathComponent
|
||||
def visitPathComponent(pc: PathComponent) = pc match
|
||||
{
|
||||
case t: This => visitThisPath(t)
|
||||
case s: Super => visitSuperPath(s)
|
||||
case id: Id => visitIdPath(id)
|
||||
}
|
||||
def visitThisPath(t: This) {}
|
||||
def visitSuperPath(s: Super) { visitPath(s.qualifier) }
|
||||
def visitIdPath(id: Id) { visitString(id.id) }
|
||||
def visitDefinitions(ds: Seq[Definition]) = ds foreach visitDefinition
|
||||
def visitDefinition(d: Definition) {
|
||||
visitString(d.name)
|
||||
visitAnnotations(d.annotations)
|
||||
visitModifiers(d.modifiers)
|
||||
visitAccess(d.access)
|
||||
d match {
|
||||
case c: ClassLike => visitClass(c)
|
||||
case f: FieldLike => visitField(f)
|
||||
case d: Def => visitDef(d)
|
||||
case t: TypeDeclaration => visitTypeDeclaration(t)
|
||||
case t: TypeAlias => visitTypeAlias(t)
|
||||
}
|
||||
}
|
||||
final def visitClass(c: ClassLike): Unit = if (visitedClassLike add c) visitClass0(c)
|
||||
def visitClass0(c: ClassLike) {
|
||||
visitParameterizedDefinition(c)
|
||||
visitType(c.selfType)
|
||||
visitStructure(c.structure)
|
||||
}
|
||||
def visitField(f: FieldLike) {
|
||||
visitType(f.tpe)
|
||||
f match {
|
||||
case v: Var => visitVar(v)
|
||||
case v: Val => visitVal(v)
|
||||
}
|
||||
}
|
||||
def visitVar(v: Var) {}
|
||||
def visitVal(v: Val) {}
|
||||
def visitDef(d: Def) {
|
||||
visitParameterizedDefinition(d)
|
||||
visitValueParameters(d.valueParameters)
|
||||
visitType(d.returnType)
|
||||
}
|
||||
def visitAccess(a: Access): Unit =
|
||||
a match {
|
||||
case pub: Public => visitPublic(pub)
|
||||
case qual: Qualified => visitQualified(qual)
|
||||
}
|
||||
def visitQualified(qual: Qualified): Unit =
|
||||
qual match {
|
||||
case p: Protected => visitProtected(p)
|
||||
case p: Private => visitPrivate(p)
|
||||
}
|
||||
def visitQualifier(qual: Qualifier): Unit =
|
||||
qual match {
|
||||
case unq: Unqualified => visitUnqualified(unq)
|
||||
case thisq: ThisQualifier => visitThisQualifier(thisq)
|
||||
case id: IdQualifier => visitIdQualifier(id)
|
||||
}
|
||||
def visitIdQualifier(id: IdQualifier) {
|
||||
visitString(id.value)
|
||||
}
|
||||
def visitUnqualified(unq: Unqualified) {}
|
||||
def visitThisQualifier(thisq: ThisQualifier) {}
|
||||
def visitPublic(pub: Public) {}
|
||||
def visitPrivate(p: Private) { visitQualifier(p.qualifier) }
|
||||
def visitProtected(p: Protected) { visitQualifier(p.qualifier) }
|
||||
def visitModifiers(m: Modifiers) {}
|
||||
|
||||
def visitValueParameters(valueParameters: Seq[ParameterList]) = valueParameters foreach visitValueParameterList
|
||||
def visitValueParameterList(list: ParameterList) = list.parameters foreach visitValueParameter
|
||||
def visitValueParameter(parameter: MethodParameter) =
|
||||
{
|
||||
visitString(parameter.name)
|
||||
visitType(parameter.tpe)
|
||||
}
|
||||
|
||||
def visitConstant(c: Constant) =
|
||||
{
|
||||
visitString(c.value)
|
||||
visitType(c.baseType)
|
||||
}
|
||||
def visitExistential(e: Existential) = visitParameters(e.clause, e.baseType)
|
||||
def visitPolymorphic(p: Polymorphic) = visitParameters(p.parameters, p.baseType)
|
||||
def visitProjection(p: Projection) =
|
||||
{
|
||||
visitString(p.id)
|
||||
visitType(p.prefix)
|
||||
}
|
||||
def visitParameterized(p: Parameterized)
|
||||
{
|
||||
visitType(p.baseType)
|
||||
visitTypes(p.typeArguments)
|
||||
}
|
||||
def visitAnnotated(a: Annotated)
|
||||
{
|
||||
visitType(a.baseType)
|
||||
visitAnnotations(a.annotations)
|
||||
}
|
||||
final def visitStructure(structure: Structure) = if(visitedStructures add structure) visitStructure0(structure)
|
||||
def visitStructure0(structure: Structure)
|
||||
{
|
||||
visitTypes(structure.parents)
|
||||
visitDefinitions(structure.declared)
|
||||
visitDefinitions(structure.inherited)
|
||||
}
|
||||
def visitParameters(parameters: Seq[TypeParameter], base: Type): Unit =
|
||||
{
|
||||
visitTypeParameters(parameters)
|
||||
visitType(base)
|
||||
}
|
||||
def visitString(s: String) {}
|
||||
def visitParameterizedDefinition[T <: ParameterizedDefinition](d: T) {
|
||||
visitTypeParameters(d.typeParameters)
|
||||
}
|
||||
def visitTypeDeclaration(d: TypeDeclaration) {
|
||||
visitParameterizedDefinition(d)
|
||||
visitType(d.lowerBound)
|
||||
visitType(d.upperBound)
|
||||
}
|
||||
def visitTypeAlias(d: TypeAlias) {
|
||||
visitParameterizedDefinition(d)
|
||||
visitType(d.tpe)
|
||||
}
|
||||
|
||||
def visitTypeParameters(parameters: Seq[TypeParameter]) = parameters foreach visitTypeParameter
|
||||
def visitTypeParameter(parameter: TypeParameter) {
|
||||
visitTypeParameters(parameter.typeParameters)
|
||||
visitType(parameter.lowerBound)
|
||||
visitType(parameter.upperBound)
|
||||
visitAnnotations(parameter.annotations)
|
||||
}
|
||||
def visitAnnotations(annotations: Seq[Annotation]) = annotations foreach visitAnnotation
|
||||
def visitAnnotation(annotation: Annotation) =
|
||||
{
|
||||
visitType(annotation.base)
|
||||
visitAnnotationArguments(annotation.arguments)
|
||||
}
|
||||
def visitAnnotationArguments(args: Seq[AnnotationArgument]) = args foreach visitAnnotationArgument
|
||||
def visitAnnotationArgument(arg: AnnotationArgument) {
|
||||
visitString(arg.name)
|
||||
visitString(arg.value)
|
||||
}
|
||||
|
||||
def visitTypes(ts: Seq[Type]) = ts.foreach(visitType)
|
||||
def visitType(t: Type) {
|
||||
t match {
|
||||
case s: Structure => visitStructure(s)
|
||||
case e: Existential => visitExistential(e)
|
||||
case c: Constant => visitConstant(c)
|
||||
case p: Polymorphic => visitPolymorphic(p)
|
||||
case a: Annotated => visitAnnotated(a)
|
||||
case p: Parameterized => visitParameterized(p)
|
||||
case p: Projection => visitProjection(p)
|
||||
case _: EmptyType => visitEmptyType()
|
||||
case s: Singleton => visitSingleton(s)
|
||||
case pr: ParameterRef => visitParameterRef(pr)
|
||||
}
|
||||
}
|
||||
|
||||
def visitEmptyType() {}
|
||||
def visitParameterRef(p: ParameterRef) {}
|
||||
def visitSingleton(s: Singleton) { visitPath(s.path) }
|
||||
def visitPath(path: Path) = path.components foreach visitPathComponent
|
||||
def visitPathComponent(pc: PathComponent) = pc match {
|
||||
case t: This => visitThisPath(t)
|
||||
case s: Super => visitSuperPath(s)
|
||||
case id: Id => visitIdPath(id)
|
||||
}
|
||||
def visitThisPath(t: This) {}
|
||||
def visitSuperPath(s: Super) { visitPath(s.qualifier) }
|
||||
def visitIdPath(id: Id) { visitString(id.id) }
|
||||
|
||||
def visitConstant(c: Constant) =
|
||||
{
|
||||
visitString(c.value)
|
||||
visitType(c.baseType)
|
||||
}
|
||||
def visitExistential(e: Existential) = visitParameters(e.clause, e.baseType)
|
||||
def visitPolymorphic(p: Polymorphic) = visitParameters(p.parameters, p.baseType)
|
||||
def visitProjection(p: Projection) =
|
||||
{
|
||||
visitString(p.id)
|
||||
visitType(p.prefix)
|
||||
}
|
||||
def visitParameterized(p: Parameterized) {
|
||||
visitType(p.baseType)
|
||||
visitTypes(p.typeArguments)
|
||||
}
|
||||
def visitAnnotated(a: Annotated) {
|
||||
visitType(a.baseType)
|
||||
visitAnnotations(a.annotations)
|
||||
}
|
||||
final def visitStructure(structure: Structure) = if (visitedStructures add structure) visitStructure0(structure)
|
||||
def visitStructure0(structure: Structure) {
|
||||
visitTypes(structure.parents)
|
||||
visitDefinitions(structure.declared)
|
||||
visitDefinitions(structure.inherited)
|
||||
}
|
||||
def visitParameters(parameters: Seq[TypeParameter], base: Type): Unit =
|
||||
{
|
||||
visitTypeParameters(parameters)
|
||||
visitType(base)
|
||||
}
|
||||
def visitString(s: String) {}
|
||||
}
|
||||
|
|
@ -2,25 +2,23 @@
|
|||
// and be accessible to the compiler-side interface
|
||||
package xsbti
|
||||
|
||||
object SafeLazy
|
||||
{
|
||||
def apply[T <: AnyRef](eval: xsbti.F0[T]): xsbti.api.Lazy[T] =
|
||||
apply( eval() )
|
||||
def apply[T <: AnyRef](eval: => T): xsbti.api.Lazy[T] =
|
||||
fromFunction0( eval _ )
|
||||
def fromFunction0[T <: AnyRef](eval: () => T): xsbti.api.Lazy[T] =
|
||||
new Impl( eval )
|
||||
object SafeLazy {
|
||||
def apply[T <: AnyRef](eval: xsbti.F0[T]): xsbti.api.Lazy[T] =
|
||||
apply(eval())
|
||||
def apply[T <: AnyRef](eval: => T): xsbti.api.Lazy[T] =
|
||||
fromFunction0(eval _)
|
||||
def fromFunction0[T <: AnyRef](eval: () => T): xsbti.api.Lazy[T] =
|
||||
new Impl(eval)
|
||||
|
||||
def strict[T <: AnyRef](value: T): xsbti.api.Lazy[T] = apply(value)
|
||||
def strict[T <: AnyRef](value: T): xsbti.api.Lazy[T] = apply(value)
|
||||
|
||||
private[this] final class Impl[T <: AnyRef](private[this] var eval: () => T) extends xsbti.api.AbstractLazy[T]
|
||||
{
|
||||
private[this] lazy val _t =
|
||||
{
|
||||
val t = eval()
|
||||
eval = null // clear the reference, ensuring the only memory we hold onto is the result
|
||||
t
|
||||
}
|
||||
def get: T = _t
|
||||
}
|
||||
private[this] final class Impl[T <: AnyRef](private[this] var eval: () => T) extends xsbti.api.AbstractLazy[T] {
|
||||
private[this] lazy val _t =
|
||||
{
|
||||
val t = eval()
|
||||
eval = null // clear the reference, ensuring the only memory we hold onto is the result
|
||||
t
|
||||
}
|
||||
def get: T = _t
|
||||
}
|
||||
}
|
||||
|
|
@ -3,8 +3,8 @@
|
|||
*/
|
||||
package sbt
|
||||
|
||||
import xsbti.compile.{ CompileOrder, Output => APIOutput, SingleOutput, MultipleOutput}
|
||||
import java.io.File
|
||||
import xsbti.compile.{ CompileOrder, Output => APIOutput, SingleOutput, MultipleOutput }
|
||||
import java.io.File
|
||||
|
||||
// this class exists because of Scala's restriction on implicit parameter search.
|
||||
// We cannot require an implicit parameter Equiv[Seq[String]] to construct Equiv[CompileSetup]
|
||||
|
|
@ -12,50 +12,49 @@ package sbt
|
|||
// (6 > 4)
|
||||
final class CompileOptions(val options: Seq[String], val javacOptions: Seq[String])
|
||||
final class CompileSetup(val output: APIOutput, val options: CompileOptions, val compilerVersion: String,
|
||||
val order: CompileOrder, val nameHashing: Boolean) {
|
||||
@deprecated("Use the other overloaded variant of the constructor that takes `nameHashing` value, instead.", "0.13.2")
|
||||
def this(output: APIOutput, options: CompileOptions, compilerVersion: String, order: CompileOrder) = {
|
||||
this(output, options, compilerVersion, order, false)
|
||||
}
|
||||
val order: CompileOrder, val nameHashing: Boolean) {
|
||||
@deprecated("Use the other overloaded variant of the constructor that takes `nameHashing` value, instead.", "0.13.2")
|
||||
def this(output: APIOutput, options: CompileOptions, compilerVersion: String, order: CompileOrder) = {
|
||||
this(output, options, compilerVersion, order, false)
|
||||
}
|
||||
}
|
||||
|
||||
object CompileSetup
|
||||
{
|
||||
// Equiv[CompileOrder.Value] dominates Equiv[CompileSetup]
|
||||
implicit def equivCompileSetup(implicit equivOutput: Equiv[APIOutput], equivOpts: Equiv[CompileOptions], equivComp: Equiv[String]/*, equivOrder: Equiv[CompileOrder]*/): Equiv[CompileSetup] = new Equiv[CompileSetup] {
|
||||
def equiv(a: CompileSetup, b: CompileSetup) =
|
||||
equivOutput.equiv(a.output, b.output) &&
|
||||
equivOpts.equiv(a.options, b.options) &&
|
||||
equivComp.equiv(a.compilerVersion, b.compilerVersion) &&
|
||||
a.order == b.order && // equivOrder.equiv(a.order, b.order)
|
||||
a.nameHashing == b.nameHashing
|
||||
}
|
||||
implicit val equivFile: Equiv[File] = new Equiv[File] {
|
||||
def equiv(a: File, b: File) = a.getAbsoluteFile == b.getAbsoluteFile
|
||||
}
|
||||
implicit val equivOutput: Equiv[APIOutput] = new Equiv[APIOutput] {
|
||||
implicit val outputGroupsOrdering = Ordering.by((og: MultipleOutput.OutputGroup) => og.sourceDirectory)
|
||||
def equiv(out1: APIOutput, out2: APIOutput) = (out1, out2) match {
|
||||
case (m1: MultipleOutput, m2: MultipleOutput) =>
|
||||
(m1.outputGroups.length == m2.outputGroups.length) &&
|
||||
(m1.outputGroups.sorted zip m2.outputGroups.sorted forall {
|
||||
case (a,b) =>
|
||||
equivFile.equiv(a.sourceDirectory, b.sourceDirectory) && equivFile.equiv(a.outputDirectory, b.outputDirectory)
|
||||
})
|
||||
case (s1: SingleOutput, s2: SingleOutput) => equivFile.equiv(s1.outputDirectory, s2.outputDirectory)
|
||||
case _ => false
|
||||
}
|
||||
}
|
||||
implicit val equivOpts: Equiv[CompileOptions] = new Equiv[CompileOptions] {
|
||||
def equiv(a: CompileOptions, b: CompileOptions) =
|
||||
(a.options sameElements b.options) &&
|
||||
(a.javacOptions sameElements b.javacOptions)
|
||||
}
|
||||
implicit val equivCompilerVersion: Equiv[String] = new Equiv[String] {
|
||||
def equiv(a: String, b: String) = a == b
|
||||
}
|
||||
object CompileSetup {
|
||||
// Equiv[CompileOrder.Value] dominates Equiv[CompileSetup]
|
||||
implicit def equivCompileSetup(implicit equivOutput: Equiv[APIOutput], equivOpts: Equiv[CompileOptions], equivComp: Equiv[String] /*, equivOrder: Equiv[CompileOrder]*/ ): Equiv[CompileSetup] = new Equiv[CompileSetup] {
|
||||
def equiv(a: CompileSetup, b: CompileSetup) =
|
||||
equivOutput.equiv(a.output, b.output) &&
|
||||
equivOpts.equiv(a.options, b.options) &&
|
||||
equivComp.equiv(a.compilerVersion, b.compilerVersion) &&
|
||||
a.order == b.order && // equivOrder.equiv(a.order, b.order)
|
||||
a.nameHashing == b.nameHashing
|
||||
}
|
||||
implicit val equivFile: Equiv[File] = new Equiv[File] {
|
||||
def equiv(a: File, b: File) = a.getAbsoluteFile == b.getAbsoluteFile
|
||||
}
|
||||
implicit val equivOutput: Equiv[APIOutput] = new Equiv[APIOutput] {
|
||||
implicit val outputGroupsOrdering = Ordering.by((og: MultipleOutput.OutputGroup) => og.sourceDirectory)
|
||||
def equiv(out1: APIOutput, out2: APIOutput) = (out1, out2) match {
|
||||
case (m1: MultipleOutput, m2: MultipleOutput) =>
|
||||
(m1.outputGroups.length == m2.outputGroups.length) &&
|
||||
(m1.outputGroups.sorted zip m2.outputGroups.sorted forall {
|
||||
case (a, b) =>
|
||||
equivFile.equiv(a.sourceDirectory, b.sourceDirectory) && equivFile.equiv(a.outputDirectory, b.outputDirectory)
|
||||
})
|
||||
case (s1: SingleOutput, s2: SingleOutput) => equivFile.equiv(s1.outputDirectory, s2.outputDirectory)
|
||||
case _ => false
|
||||
}
|
||||
}
|
||||
implicit val equivOpts: Equiv[CompileOptions] = new Equiv[CompileOptions] {
|
||||
def equiv(a: CompileOptions, b: CompileOptions) =
|
||||
(a.options sameElements b.options) &&
|
||||
(a.javacOptions sameElements b.javacOptions)
|
||||
}
|
||||
implicit val equivCompilerVersion: Equiv[String] = new Equiv[String] {
|
||||
def equiv(a: String, b: String) = a == b
|
||||
}
|
||||
|
||||
implicit val equivOrder: Equiv[CompileOrder] = new Equiv[CompileOrder] {
|
||||
def equiv(a: CompileOrder, b: CompileOrder) = a == b
|
||||
}
|
||||
implicit val equivOrder: Equiv[CompileOrder] = new Equiv[CompileOrder] {
|
||||
def equiv(a: CompileOrder, b: CompileOrder) = a == b
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import xsbti.api.SourceAPI
|
|||
import xsbt.api.ShowAPI
|
||||
import xsbt.api.DefaultShowAPI._
|
||||
import java.lang.reflect.Method
|
||||
import java.util.{List => JList}
|
||||
import java.util.{ List => JList }
|
||||
|
||||
/**
|
||||
* A class which computes diffs (unified diffs) between two textual representations of an API.
|
||||
|
|
@ -21,47 +21,47 @@ import java.util.{List => JList}
|
|||
*/
|
||||
private[inc] class APIDiff {
|
||||
|
||||
import APIDiff._
|
||||
import APIDiff._
|
||||
|
||||
private val diffUtilsClass = Class.forName(diffUtilsClassName)
|
||||
// method signature: diff(List<?>, List<?>)
|
||||
private val diffMethod: Method =
|
||||
diffUtilsClass.getMethod(diffMethodName, classOf[JList[_]], classOf[JList[_]])
|
||||
private val diffUtilsClass = Class.forName(diffUtilsClassName)
|
||||
// method signature: diff(List<?>, List<?>)
|
||||
private val diffMethod: Method =
|
||||
diffUtilsClass.getMethod(diffMethodName, classOf[JList[_]], classOf[JList[_]])
|
||||
|
||||
private val generateUnifiedDiffMethod: Method = {
|
||||
val patchClass = Class.forName(patchClassName)
|
||||
// method signature: generateUnifiedDiff(String, String, List<String>, Patch, int)
|
||||
diffUtilsClass.getMethod(generateUnifiedDiffMethodName, classOf[String],
|
||||
classOf[String], classOf[JList[String]], patchClass, classOf[Int])
|
||||
}
|
||||
private val generateUnifiedDiffMethod: Method = {
|
||||
val patchClass = Class.forName(patchClassName)
|
||||
// method signature: generateUnifiedDiff(String, String, List<String>, Patch, int)
|
||||
diffUtilsClass.getMethod(generateUnifiedDiffMethodName, classOf[String],
|
||||
classOf[String], classOf[JList[String]], patchClass, classOf[Int])
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an unified diff between textual representations of `api1` and `api2`.
|
||||
*/
|
||||
def generateApiDiff(fileName: String, api1: SourceAPI, api2: SourceAPI, contextSize: Int): String = {
|
||||
val api1Str = ShowAPI.show(api1)
|
||||
val api2Str = ShowAPI.show(api2)
|
||||
generateApiDiff(fileName, api1Str, api2Str, contextSize)
|
||||
}
|
||||
/**
|
||||
* Generates an unified diff between textual representations of `api1` and `api2`.
|
||||
*/
|
||||
def generateApiDiff(fileName: String, api1: SourceAPI, api2: SourceAPI, contextSize: Int): String = {
|
||||
val api1Str = ShowAPI.show(api1)
|
||||
val api2Str = ShowAPI.show(api2)
|
||||
generateApiDiff(fileName, api1Str, api2Str, contextSize)
|
||||
}
|
||||
|
||||
private def generateApiDiff(fileName: String, f1: String, f2: String, contextSize: Int): String = {
|
||||
assert((diffMethod != null) && (generateUnifiedDiffMethod != null), "APIDiff isn't properly initialized.")
|
||||
import scala.collection.JavaConverters._
|
||||
def asJavaList[T](it: Iterator[T]): java.util.List[T] = it.toSeq.asJava
|
||||
val f1Lines = asJavaList(f1.lines)
|
||||
val f2Lines = asJavaList(f2.lines)
|
||||
//val diff = DiffUtils.diff(f1Lines, f2Lines)
|
||||
val diff /*: Patch*/ = diffMethod.invoke(null, f1Lines, f2Lines)
|
||||
val unifiedPatch: JList[String] = generateUnifiedDiffMethod.invoke(null, fileName, fileName, f1Lines, diff,
|
||||
(contextSize: java.lang.Integer)).asInstanceOf[JList[String]]
|
||||
unifiedPatch.asScala.mkString("\n")
|
||||
}
|
||||
private def generateApiDiff(fileName: String, f1: String, f2: String, contextSize: Int): String = {
|
||||
assert((diffMethod != null) && (generateUnifiedDiffMethod != null), "APIDiff isn't properly initialized.")
|
||||
import scala.collection.JavaConverters._
|
||||
def asJavaList[T](it: Iterator[T]): java.util.List[T] = it.toSeq.asJava
|
||||
val f1Lines = asJavaList(f1.lines)
|
||||
val f2Lines = asJavaList(f2.lines)
|
||||
//val diff = DiffUtils.diff(f1Lines, f2Lines)
|
||||
val diff /*: Patch*/ = diffMethod.invoke(null, f1Lines, f2Lines)
|
||||
val unifiedPatch: JList[String] = generateUnifiedDiffMethod.invoke(null, fileName, fileName, f1Lines, diff,
|
||||
(contextSize: java.lang.Integer)).asInstanceOf[JList[String]]
|
||||
unifiedPatch.asScala.mkString("\n")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private[inc] object APIDiff {
|
||||
private val diffUtilsClassName = "difflib.DiffUtils"
|
||||
private val patchClassName = "difflib.Patch"
|
||||
private val diffMethodName = "diff"
|
||||
private val generateUnifiedDiffMethodName = "generateUnifiedDiff"
|
||||
private val diffUtilsClassName = "difflib.DiffUtils"
|
||||
private val patchClassName = "difflib.Patch"
|
||||
private val diffMethodName = "diff"
|
||||
private val generateUnifiedDiffMethodName = "generateUnifiedDiff"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,81 +11,82 @@ import xsbti.api._internalOnly_NameHashes
|
|||
import scala.util.Sorting
|
||||
import xsbt.api.SameAPI
|
||||
|
||||
trait APIs
|
||||
{
|
||||
/** The API for the source file `src` at the time represented by this instance.
|
||||
* This method returns an empty API if the file had no API or is not known to this instance. */
|
||||
def internalAPI(src: File): Source
|
||||
/** The API for the external class `ext` at the time represented by this instance.
|
||||
* This method returns an empty API if the file had no API or is not known to this instance. */
|
||||
def externalAPI(ext: String): Source
|
||||
trait APIs {
|
||||
/**
|
||||
* The API for the source file `src` at the time represented by this instance.
|
||||
* This method returns an empty API if the file had no API or is not known to this instance.
|
||||
*/
|
||||
def internalAPI(src: File): Source
|
||||
/**
|
||||
* The API for the external class `ext` at the time represented by this instance.
|
||||
* This method returns an empty API if the file had no API or is not known to this instance.
|
||||
*/
|
||||
def externalAPI(ext: String): Source
|
||||
|
||||
def allExternals: collection.Set[String]
|
||||
def allInternalSources: collection.Set[File]
|
||||
def allExternals: collection.Set[String]
|
||||
def allInternalSources: collection.Set[File]
|
||||
|
||||
def ++ (o: APIs): APIs
|
||||
def ++(o: APIs): APIs
|
||||
|
||||
def markInternalSource(src: File, api: Source): APIs
|
||||
def markExternalAPI(ext: String, api: Source): APIs
|
||||
def markInternalSource(src: File, api: Source): APIs
|
||||
def markExternalAPI(ext: String, api: Source): APIs
|
||||
|
||||
def removeInternal(remove: Iterable[File]): APIs
|
||||
def filterExt(keep: String => Boolean): APIs
|
||||
@deprecated("OK to remove in 0.14", "0.13.1")
|
||||
def groupBy[K](internal: (File) => K, keepExternal: Map[K, String => Boolean]): Map[K, APIs]
|
||||
def removeInternal(remove: Iterable[File]): APIs
|
||||
def filterExt(keep: String => Boolean): APIs
|
||||
@deprecated("OK to remove in 0.14", "0.13.1")
|
||||
def groupBy[K](internal: (File) => K, keepExternal: Map[K, String => Boolean]): Map[K, APIs]
|
||||
|
||||
def internal: Map[File, Source]
|
||||
def external: Map[String, Source]
|
||||
def internal: Map[File, Source]
|
||||
def external: Map[String, Source]
|
||||
}
|
||||
object APIs
|
||||
{
|
||||
def apply(internal: Map[File, Source], external: Map[String, Source]): APIs = new MAPIs(internal, external)
|
||||
def empty: APIs = apply(Map.empty, Map.empty)
|
||||
object APIs {
|
||||
def apply(internal: Map[File, Source], external: Map[String, Source]): APIs = new MAPIs(internal, external)
|
||||
def empty: APIs = apply(Map.empty, Map.empty)
|
||||
|
||||
val emptyAPI = new xsbti.api.SourceAPI(Array(), Array())
|
||||
val emptyCompilation = new xsbti.api.Compilation(-1, Array())
|
||||
val emptyNameHashes = new xsbti.api._internalOnly_NameHashes(Array.empty, Array.empty)
|
||||
val emptySource = new xsbti.api.Source(emptyCompilation, Array(), emptyAPI, 0, emptyNameHashes, false)
|
||||
def getAPI[T](map: Map[T, Source], src: T): Source = map.getOrElse(src, emptySource)
|
||||
val emptyAPI = new xsbti.api.SourceAPI(Array(), Array())
|
||||
val emptyCompilation = new xsbti.api.Compilation(-1, Array())
|
||||
val emptyNameHashes = new xsbti.api._internalOnly_NameHashes(Array.empty, Array.empty)
|
||||
val emptySource = new xsbti.api.Source(emptyCompilation, Array(), emptyAPI, 0, emptyNameHashes, false)
|
||||
def getAPI[T](map: Map[T, Source], src: T): Source = map.getOrElse(src, emptySource)
|
||||
}
|
||||
|
||||
private class MAPIs(val internal: Map[File, Source], val external: Map[String, Source]) extends APIs
|
||||
{
|
||||
def allInternalSources: collection.Set[File] = internal.keySet
|
||||
def allExternals: collection.Set[String] = external.keySet
|
||||
private class MAPIs(val internal: Map[File, Source], val external: Map[String, Source]) extends APIs {
|
||||
def allInternalSources: collection.Set[File] = internal.keySet
|
||||
def allExternals: collection.Set[String] = external.keySet
|
||||
|
||||
def ++ (o: APIs): APIs = new MAPIs(internal ++ o.internal, external ++ o.external)
|
||||
def ++(o: APIs): APIs = new MAPIs(internal ++ o.internal, external ++ o.external)
|
||||
|
||||
def markInternalSource(src: File, api: Source): APIs =
|
||||
new MAPIs(internal.updated(src, api), external)
|
||||
def markInternalSource(src: File, api: Source): APIs =
|
||||
new MAPIs(internal.updated(src, api), external)
|
||||
|
||||
def markExternalAPI(ext: String, api: Source): APIs =
|
||||
new MAPIs(internal, external.updated(ext, api))
|
||||
def markExternalAPI(ext: String, api: Source): APIs =
|
||||
new MAPIs(internal, external.updated(ext, api))
|
||||
|
||||
def removeInternal(remove: Iterable[File]): APIs = new MAPIs(internal -- remove, external)
|
||||
def filterExt(keep: String => Boolean): APIs = new MAPIs(internal, external.filterKeys(keep))
|
||||
@deprecated("Broken implementation. OK to remove in 0.14", "0.13.1")
|
||||
def groupBy[K](f: (File) => K, keepExternal: Map[K, String => Boolean]): Map[K, APIs] =
|
||||
internal.groupBy(item => f(item._1)) map { group => (group._1, new MAPIs(group._2, external).filterExt(keepExternal.getOrElse(group._1, _ => false)))}
|
||||
def removeInternal(remove: Iterable[File]): APIs = new MAPIs(internal -- remove, external)
|
||||
def filterExt(keep: String => Boolean): APIs = new MAPIs(internal, external.filterKeys(keep))
|
||||
@deprecated("Broken implementation. OK to remove in 0.14", "0.13.1")
|
||||
def groupBy[K](f: (File) => K, keepExternal: Map[K, String => Boolean]): Map[K, APIs] =
|
||||
internal.groupBy(item => f(item._1)) map { group => (group._1, new MAPIs(group._2, external).filterExt(keepExternal.getOrElse(group._1, _ => false))) }
|
||||
|
||||
def internalAPI(src: File) = getAPI(internal, src)
|
||||
def externalAPI(ext: String) = getAPI(external, ext)
|
||||
def internalAPI(src: File) = getAPI(internal, src)
|
||||
def externalAPI(ext: String) = getAPI(external, ext)
|
||||
|
||||
override def equals(other: Any): Boolean = other match {
|
||||
case o: MAPIs => {
|
||||
def areEqual[T](x: Map[T, Source], y: Map[T, Source])(implicit ord: math.Ordering[T]) = {
|
||||
x.size == y.size && (sorted(x) zip sorted(y) forall { z => z._1._1 == z._2._1 && SameAPI(z._1._2, z._2._2)})
|
||||
}
|
||||
areEqual(internal, o.internal) && areEqual(external, o.external)
|
||||
}
|
||||
case _ => false
|
||||
}
|
||||
override def equals(other: Any): Boolean = other match {
|
||||
case o: MAPIs => {
|
||||
def areEqual[T](x: Map[T, Source], y: Map[T, Source])(implicit ord: math.Ordering[T]) = {
|
||||
x.size == y.size && (sorted(x) zip sorted(y) forall { z => z._1._1 == z._2._1 && SameAPI(z._1._2, z._2._2) })
|
||||
}
|
||||
areEqual(internal, o.internal) && areEqual(external, o.external)
|
||||
}
|
||||
case _ => false
|
||||
}
|
||||
|
||||
override lazy val hashCode: Int = {
|
||||
def hash[T](m: Map[T, Source])(implicit ord: math.Ordering[T]) = sorted(m).map(x => (x._1, x._2.apiHash).hashCode).hashCode
|
||||
(hash(internal), hash(external)).hashCode
|
||||
}
|
||||
override lazy val hashCode: Int = {
|
||||
def hash[T](m: Map[T, Source])(implicit ord: math.Ordering[T]) = sorted(m).map(x => (x._1, x._2.apiHash).hashCode).hashCode
|
||||
(hash(internal), hash(external)).hashCode
|
||||
}
|
||||
|
||||
override def toString: String = "API(internal: %d, external: %d)".format(internal.size, external.size)
|
||||
override def toString: String = "API(internal: %d, external: %d)".format(internal.size, external.size)
|
||||
|
||||
private[this] def sorted[T](m: Map[T, Source])(implicit ord: math.Ordering[T]): Seq[(T, Source)] = m.toSeq.sortBy(_._1)
|
||||
private[this] def sorted[T](m: Map[T, Source])(implicit ord: math.Ordering[T]): Seq[(T, Source)] = m.toSeq.sortBy(_._1)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ package inc
|
|||
import xsbti.api.Source
|
||||
import java.io.File
|
||||
|
||||
|
||||
/**
|
||||
* The merge/groupBy functionality requires understanding of the concepts of internalizing/externalizing dependencies:
|
||||
*
|
||||
|
|
@ -23,248 +22,243 @@ import java.io.File
|
|||
* These transformations are complicated by the fact that internal dependencies are expressed as source file -> source file,
|
||||
* but external dependencies are expressed as source file -> fully-qualified class name.
|
||||
*/
|
||||
trait Analysis
|
||||
{
|
||||
val stamps: Stamps
|
||||
val apis: APIs
|
||||
/** Mappings between sources, classes, and binaries. */
|
||||
val relations: Relations
|
||||
val infos: SourceInfos
|
||||
/**
|
||||
* Information about compiler runs accumulated since `clean` command has been run.
|
||||
*
|
||||
* The main use-case for using `compilations` field is to determine how
|
||||
* many iterations it took to compilen give code. The `Compilation` object
|
||||
* are also stored in `Source` objects so there's an indirect way to recover
|
||||
* information about files being recompiled in every iteration.
|
||||
*
|
||||
* The incremental compilation algorithm doesn't use information stored in
|
||||
* `compilations`. It's safe to prune contents of that field without breaking
|
||||
* internal consistency of the entire Analysis object.
|
||||
*/
|
||||
val compilations: Compilations
|
||||
trait Analysis {
|
||||
val stamps: Stamps
|
||||
val apis: APIs
|
||||
/** Mappings between sources, classes, and binaries. */
|
||||
val relations: Relations
|
||||
val infos: SourceInfos
|
||||
/**
|
||||
* Information about compiler runs accumulated since `clean` command has been run.
|
||||
*
|
||||
* The main use-case for using `compilations` field is to determine how
|
||||
* many iterations it took to compilen give code. The `Compilation` object
|
||||
* are also stored in `Source` objects so there's an indirect way to recover
|
||||
* information about files being recompiled in every iteration.
|
||||
*
|
||||
* The incremental compilation algorithm doesn't use information stored in
|
||||
* `compilations`. It's safe to prune contents of that field without breaking
|
||||
* internal consistency of the entire Analysis object.
|
||||
*/
|
||||
val compilations: Compilations
|
||||
|
||||
/** Concatenates Analysis objects naively, i.e., doesn't internalize external deps on added files. See `Analysis.merge`. */
|
||||
def ++ (other: Analysis): Analysis
|
||||
/** Concatenates Analysis objects naively, i.e., doesn't internalize external deps on added files. See `Analysis.merge`. */
|
||||
def ++(other: Analysis): Analysis
|
||||
|
||||
/** Drops all analysis information for `sources` naively, i.e., doesn't externalize internal deps on removed files. */
|
||||
def -- (sources: Iterable[File]): Analysis
|
||||
/** Drops all analysis information for `sources` naively, i.e., doesn't externalize internal deps on removed files. */
|
||||
def --(sources: Iterable[File]): Analysis
|
||||
|
||||
def copy(stamps: Stamps = stamps, apis: APIs = apis, relations: Relations = relations, infos: SourceInfos = infos,
|
||||
compilations: Compilations = compilations): Analysis
|
||||
def copy(stamps: Stamps = stamps, apis: APIs = apis, relations: Relations = relations, infos: SourceInfos = infos,
|
||||
compilations: Compilations = compilations): Analysis
|
||||
|
||||
def addSource(src: File, api: Source, stamp: Stamp, directInternal: Iterable[File], inheritedInternal: Iterable[File], info: SourceInfo): Analysis
|
||||
def addBinaryDep(src: File, dep: File, className: String, stamp: Stamp): Analysis
|
||||
def addExternalDep(src: File, dep: String, api: Source, inherited: Boolean): Analysis
|
||||
def addProduct(src: File, product: File, stamp: Stamp, name: String): Analysis
|
||||
def addSource(src: File, api: Source, stamp: Stamp, directInternal: Iterable[File], inheritedInternal: Iterable[File], info: SourceInfo): Analysis
|
||||
def addBinaryDep(src: File, dep: File, className: String, stamp: Stamp): Analysis
|
||||
def addExternalDep(src: File, dep: String, api: Source, inherited: Boolean): Analysis
|
||||
def addProduct(src: File, product: File, stamp: Stamp, name: String): Analysis
|
||||
|
||||
/** Partitions this Analysis using the discriminator function. Externalizes internal deps that cross partitions. */
|
||||
def groupBy[K](discriminator: (File => K)): Map[K, Analysis]
|
||||
/** Partitions this Analysis using the discriminator function. Externalizes internal deps that cross partitions. */
|
||||
def groupBy[K](discriminator: (File => K)): Map[K, Analysis]
|
||||
|
||||
override lazy val toString = Analysis.summary(this)
|
||||
override lazy val toString = Analysis.summary(this)
|
||||
}
|
||||
|
||||
object Analysis
|
||||
{
|
||||
lazy val Empty: Analysis = new MAnalysis(Stamps.empty, APIs.empty, Relations.empty, SourceInfos.empty, Compilations.empty)
|
||||
private[sbt] def empty(nameHashing: Boolean): Analysis = new MAnalysis(Stamps.empty, APIs.empty,
|
||||
Relations.empty(nameHashing = nameHashing), SourceInfos.empty, Compilations.empty)
|
||||
object Analysis {
|
||||
lazy val Empty: Analysis = new MAnalysis(Stamps.empty, APIs.empty, Relations.empty, SourceInfos.empty, Compilations.empty)
|
||||
private[sbt] def empty(nameHashing: Boolean): Analysis = new MAnalysis(Stamps.empty, APIs.empty,
|
||||
Relations.empty(nameHashing = nameHashing), SourceInfos.empty, Compilations.empty)
|
||||
|
||||
/** Merge multiple analysis objects into one. Deps will be internalized as needed. */
|
||||
def merge(analyses: Traversable[Analysis]): Analysis = {
|
||||
if (analyses.exists(_.relations.nameHashing))
|
||||
throw new IllegalArgumentException("Merging of Analyses that have" +
|
||||
"`relations.memberRefAndInheritanceDeps` set to `true` is not supported.")
|
||||
/** Merge multiple analysis objects into one. Deps will be internalized as needed. */
|
||||
def merge(analyses: Traversable[Analysis]): Analysis = {
|
||||
if (analyses.exists(_.relations.nameHashing))
|
||||
throw new IllegalArgumentException("Merging of Analyses that have" +
|
||||
"`relations.memberRefAndInheritanceDeps` set to `true` is not supported.")
|
||||
|
||||
// Merge the Relations, internalizing deps as needed.
|
||||
val mergedSrcProd = Relation.merge(analyses map { _.relations.srcProd })
|
||||
val mergedBinaryDep = Relation.merge(analyses map { _.relations.binaryDep })
|
||||
val mergedClasses = Relation.merge(analyses map { _.relations.classes })
|
||||
// Merge the Relations, internalizing deps as needed.
|
||||
val mergedSrcProd = Relation.merge(analyses map { _.relations.srcProd })
|
||||
val mergedBinaryDep = Relation.merge(analyses map { _.relations.binaryDep })
|
||||
val mergedClasses = Relation.merge(analyses map { _.relations.classes })
|
||||
|
||||
val stillInternal = Relation.merge(analyses map { _.relations.direct.internal })
|
||||
val (internalized, stillExternal) = Relation.merge(analyses map { _.relations.direct.external }) partition { case (a, b) => mergedClasses._2s.contains(b) }
|
||||
val internalizedFiles = Relation.reconstruct(internalized.forwardMap mapValues { _ flatMap mergedClasses.reverse })
|
||||
val mergedInternal = stillInternal ++ internalizedFiles
|
||||
val stillInternal = Relation.merge(analyses map { _.relations.direct.internal })
|
||||
val (internalized, stillExternal) = Relation.merge(analyses map { _.relations.direct.external }) partition { case (a, b) => mergedClasses._2s.contains(b) }
|
||||
val internalizedFiles = Relation.reconstruct(internalized.forwardMap mapValues { _ flatMap mergedClasses.reverse })
|
||||
val mergedInternal = stillInternal ++ internalizedFiles
|
||||
|
||||
val stillInternalPI = Relation.merge(analyses map { _.relations.publicInherited.internal })
|
||||
val (internalizedPI, stillExternalPI) = Relation.merge(analyses map { _.relations.publicInherited.external }) partition { case (a, b) => mergedClasses._2s.contains(b) }
|
||||
val internalizedFilesPI = Relation.reconstruct(internalizedPI.forwardMap mapValues { _ flatMap mergedClasses.reverse })
|
||||
val mergedInternalPI = stillInternalPI ++ internalizedFilesPI
|
||||
val stillInternalPI = Relation.merge(analyses map { _.relations.publicInherited.internal })
|
||||
val (internalizedPI, stillExternalPI) = Relation.merge(analyses map { _.relations.publicInherited.external }) partition { case (a, b) => mergedClasses._2s.contains(b) }
|
||||
val internalizedFilesPI = Relation.reconstruct(internalizedPI.forwardMap mapValues { _ flatMap mergedClasses.reverse })
|
||||
val mergedInternalPI = stillInternalPI ++ internalizedFilesPI
|
||||
|
||||
val mergedRelations = Relations.make(
|
||||
mergedSrcProd,
|
||||
mergedBinaryDep,
|
||||
Relations.makeSource(mergedInternal, stillExternal),
|
||||
Relations.makeSource(mergedInternalPI, stillExternalPI),
|
||||
mergedClasses
|
||||
)
|
||||
val mergedRelations = Relations.make(
|
||||
mergedSrcProd,
|
||||
mergedBinaryDep,
|
||||
Relations.makeSource(mergedInternal, stillExternal),
|
||||
Relations.makeSource(mergedInternalPI, stillExternalPI),
|
||||
mergedClasses
|
||||
)
|
||||
|
||||
// Merge the APIs, internalizing APIs for targets of dependencies we internalized above.
|
||||
val concatenatedAPIs = (APIs.empty /: (analyses map {_.apis}))(_ ++ _)
|
||||
val stillInternalAPIs = concatenatedAPIs.internal
|
||||
val (internalizedAPIs, stillExternalAPIs) = concatenatedAPIs.external partition { x: (String, Source) => internalized._2s.contains(x._1) }
|
||||
val internalizedFilesAPIs = internalizedAPIs flatMap {
|
||||
case (cls: String, source: Source) => mergedRelations.definesClass(cls) map { file: File => (file, concatenatedAPIs.internalAPI(file)) }
|
||||
}
|
||||
val mergedAPIs = APIs(stillInternalAPIs ++ internalizedFilesAPIs, stillExternalAPIs)
|
||||
// Merge the APIs, internalizing APIs for targets of dependencies we internalized above.
|
||||
val concatenatedAPIs = (APIs.empty /: (analyses map { _.apis }))(_ ++ _)
|
||||
val stillInternalAPIs = concatenatedAPIs.internal
|
||||
val (internalizedAPIs, stillExternalAPIs) = concatenatedAPIs.external partition { x: (String, Source) => internalized._2s.contains(x._1) }
|
||||
val internalizedFilesAPIs = internalizedAPIs flatMap {
|
||||
case (cls: String, source: Source) => mergedRelations.definesClass(cls) map { file: File => (file, concatenatedAPIs.internalAPI(file)) }
|
||||
}
|
||||
val mergedAPIs = APIs(stillInternalAPIs ++ internalizedFilesAPIs, stillExternalAPIs)
|
||||
|
||||
val mergedStamps = Stamps.merge(analyses map { _.stamps })
|
||||
val mergedInfos = SourceInfos.merge(analyses map { _.infos })
|
||||
val mergedCompilations = Compilations.merge(analyses map { _.compilations })
|
||||
val mergedStamps = Stamps.merge(analyses map { _.stamps })
|
||||
val mergedInfos = SourceInfos.merge(analyses map { _.infos })
|
||||
val mergedCompilations = Compilations.merge(analyses map { _.compilations })
|
||||
|
||||
new MAnalysis(mergedStamps, mergedAPIs, mergedRelations, mergedInfos, mergedCompilations)
|
||||
}
|
||||
new MAnalysis(mergedStamps, mergedAPIs, mergedRelations, mergedInfos, mergedCompilations)
|
||||
}
|
||||
|
||||
def summary(a: Analysis): String =
|
||||
{
|
||||
val (j, s) = a.apis.allInternalSources.partition(_.getName.endsWith(".java"))
|
||||
val c = a.stamps.allProducts
|
||||
val ext = a.apis.allExternals
|
||||
val jars = a.relations.allBinaryDeps.filter(_.getName.endsWith(".jar"))
|
||||
val unreportedCount = a.infos.allInfos.values.map(_.unreportedProblems.size).sum
|
||||
val sections =
|
||||
counted("Scala source", "", "s", s.size) ++
|
||||
counted("Java source", "", "s", j.size) ++
|
||||
counted("class", "", "es", c.size) ++
|
||||
counted("external source dependenc", "y", "ies", ext.size) ++
|
||||
counted("binary dependenc", "y", "ies", jars.size) ++
|
||||
counted("unreported warning", "", "s", unreportedCount)
|
||||
sections.mkString("Analysis: ", ", ", "")
|
||||
}
|
||||
def summary(a: Analysis): String =
|
||||
{
|
||||
val (j, s) = a.apis.allInternalSources.partition(_.getName.endsWith(".java"))
|
||||
val c = a.stamps.allProducts
|
||||
val ext = a.apis.allExternals
|
||||
val jars = a.relations.allBinaryDeps.filter(_.getName.endsWith(".jar"))
|
||||
val unreportedCount = a.infos.allInfos.values.map(_.unreportedProblems.size).sum
|
||||
val sections =
|
||||
counted("Scala source", "", "s", s.size) ++
|
||||
counted("Java source", "", "s", j.size) ++
|
||||
counted("class", "", "es", c.size) ++
|
||||
counted("external source dependenc", "y", "ies", ext.size) ++
|
||||
counted("binary dependenc", "y", "ies", jars.size) ++
|
||||
counted("unreported warning", "", "s", unreportedCount)
|
||||
sections.mkString("Analysis: ", ", ", "")
|
||||
}
|
||||
|
||||
def counted(prefix: String, single: String, plural: String, count: Int): Option[String] =
|
||||
count match
|
||||
{
|
||||
case 0 => None
|
||||
case 1 => Some("1 " + prefix + single)
|
||||
case x => Some(x.toString + " " + prefix + plural)
|
||||
}
|
||||
def counted(prefix: String, single: String, plural: String, count: Int): Option[String] =
|
||||
count match {
|
||||
case 0 => None
|
||||
case 1 => Some("1 " + prefix + single)
|
||||
case x => Some(x.toString + " " + prefix + plural)
|
||||
}
|
||||
|
||||
}
|
||||
private class MAnalysis(val stamps: Stamps, val apis: APIs, val relations: Relations, val infos: SourceInfos, val compilations: Compilations) extends Analysis
|
||||
{
|
||||
def ++ (o: Analysis): Analysis = new MAnalysis(stamps ++ o.stamps, apis ++ o.apis, relations ++ o.relations,
|
||||
infos ++ o.infos, compilations ++ o.compilations)
|
||||
private class MAnalysis(val stamps: Stamps, val apis: APIs, val relations: Relations, val infos: SourceInfos, val compilations: Compilations) extends Analysis {
|
||||
def ++(o: Analysis): Analysis = new MAnalysis(stamps ++ o.stamps, apis ++ o.apis, relations ++ o.relations,
|
||||
infos ++ o.infos, compilations ++ o.compilations)
|
||||
|
||||
def -- (sources: Iterable[File]): Analysis =
|
||||
{
|
||||
val newRelations = relations -- sources
|
||||
def keep[T](f: (Relations, T) => Set[_]): T => Boolean = !f(newRelations, _).isEmpty
|
||||
def --(sources: Iterable[File]): Analysis =
|
||||
{
|
||||
val newRelations = relations -- sources
|
||||
def keep[T](f: (Relations, T) => Set[_]): T => Boolean = !f(newRelations, _).isEmpty
|
||||
|
||||
val newAPIs = apis.removeInternal(sources).filterExt( keep(_ usesExternal _) )
|
||||
val newStamps = stamps.filter( keep(_ produced _), sources, keep(_ usesBinary _))
|
||||
val newInfos = infos -- sources
|
||||
new MAnalysis(newStamps, newAPIs, newRelations, newInfos, compilations)
|
||||
}
|
||||
val newAPIs = apis.removeInternal(sources).filterExt(keep(_ usesExternal _))
|
||||
val newStamps = stamps.filter(keep(_ produced _), sources, keep(_ usesBinary _))
|
||||
val newInfos = infos -- sources
|
||||
new MAnalysis(newStamps, newAPIs, newRelations, newInfos, compilations)
|
||||
}
|
||||
|
||||
def copy(stamps: Stamps, apis: APIs, relations: Relations, infos: SourceInfos, compilations: Compilations = compilations): Analysis =
|
||||
new MAnalysis(stamps, apis, relations, infos, compilations)
|
||||
def copy(stamps: Stamps, apis: APIs, relations: Relations, infos: SourceInfos, compilations: Compilations = compilations): Analysis =
|
||||
new MAnalysis(stamps, apis, relations, infos, compilations)
|
||||
|
||||
def addSource(src: File, api: Source, stamp: Stamp, directInternal: Iterable[File], inheritedInternal: Iterable[File], info: SourceInfo): Analysis =
|
||||
copy( stamps.markInternalSource(src, stamp), apis.markInternalSource(src, api), relations.addInternalSrcDeps(src, directInternal, inheritedInternal), infos.add(src, info) )
|
||||
def addSource(src: File, api: Source, stamp: Stamp, directInternal: Iterable[File], inheritedInternal: Iterable[File], info: SourceInfo): Analysis =
|
||||
copy(stamps.markInternalSource(src, stamp), apis.markInternalSource(src, api), relations.addInternalSrcDeps(src, directInternal, inheritedInternal), infos.add(src, info))
|
||||
|
||||
def addBinaryDep(src: File, dep: File, className: String, stamp: Stamp): Analysis =
|
||||
copy( stamps.markBinary(dep, className, stamp), apis, relations.addBinaryDep(src, dep), infos )
|
||||
def addBinaryDep(src: File, dep: File, className: String, stamp: Stamp): Analysis =
|
||||
copy(stamps.markBinary(dep, className, stamp), apis, relations.addBinaryDep(src, dep), infos)
|
||||
|
||||
def addExternalDep(src: File, dep: String, depAPI: Source, inherited: Boolean): Analysis =
|
||||
copy( stamps, apis.markExternalAPI(dep, depAPI), relations.addExternalDep(src, dep, inherited), infos )
|
||||
def addExternalDep(src: File, dep: String, depAPI: Source, inherited: Boolean): Analysis =
|
||||
copy(stamps, apis.markExternalAPI(dep, depAPI), relations.addExternalDep(src, dep, inherited), infos)
|
||||
|
||||
def addProduct(src: File, product: File, stamp: Stamp, name: String): Analysis =
|
||||
copy( stamps.markProduct(product, stamp), apis, relations.addProduct(src, product, name), infos )
|
||||
def addProduct(src: File, product: File, stamp: Stamp, name: String): Analysis =
|
||||
copy(stamps.markProduct(product, stamp), apis, relations.addProduct(src, product, name), infos)
|
||||
|
||||
def groupBy[K](discriminator: File => K): Map[K, Analysis] = {
|
||||
if (relations.nameHashing)
|
||||
throw new UnsupportedOperationException("Grouping of Analyses that have" +
|
||||
"`relations.memberRefAndInheritanceDeps` set to `true` is not supported.")
|
||||
def groupBy[K](discriminator: File => K): Map[K, Analysis] = {
|
||||
if (relations.nameHashing)
|
||||
throw new UnsupportedOperationException("Grouping of Analyses that have" +
|
||||
"`relations.memberRefAndInheritanceDeps` set to `true` is not supported.")
|
||||
|
||||
def discriminator1(x: (File, _)) = discriminator(x._1) // Apply the discriminator to the first coordinate.
|
||||
def discriminator1(x: (File, _)) = discriminator(x._1) // Apply the discriminator to the first coordinate.
|
||||
|
||||
val kSrcProd = relations.srcProd.groupBy(discriminator1)
|
||||
val kBinaryDep = relations.binaryDep.groupBy(discriminator1)
|
||||
val kClasses = relations.classes.groupBy(discriminator1)
|
||||
val kSourceInfos = infos.allInfos.groupBy(discriminator1)
|
||||
val kSrcProd = relations.srcProd.groupBy(discriminator1)
|
||||
val kBinaryDep = relations.binaryDep.groupBy(discriminator1)
|
||||
val kClasses = relations.classes.groupBy(discriminator1)
|
||||
val kSourceInfos = infos.allInfos.groupBy(discriminator1)
|
||||
|
||||
val (kStillInternal, kExternalized) = relations.direct.internal partition { case (a, b) => discriminator(a) == discriminator(b) } match {
|
||||
case (i, e) => (i.groupBy(discriminator1), e.groupBy(discriminator1))
|
||||
}
|
||||
val kStillExternal = relations.direct.external.groupBy(discriminator1)
|
||||
val (kStillInternal, kExternalized) = relations.direct.internal partition { case (a, b) => discriminator(a) == discriminator(b) } match {
|
||||
case (i, e) => (i.groupBy(discriminator1), e.groupBy(discriminator1))
|
||||
}
|
||||
val kStillExternal = relations.direct.external.groupBy(discriminator1)
|
||||
|
||||
// Find all possible groups.
|
||||
val allMaps = kSrcProd :: kBinaryDep :: kStillInternal :: kExternalized :: kStillExternal :: kClasses :: kSourceInfos :: Nil
|
||||
val allKeys: Set[K] = (Set.empty[K] /: (allMaps map { _.keySet }))(_ ++ _)
|
||||
|
||||
// Find all possible groups.
|
||||
val allMaps = kSrcProd :: kBinaryDep :: kStillInternal :: kExternalized :: kStillExternal :: kClasses :: kSourceInfos :: Nil
|
||||
val allKeys: Set[K] = (Set.empty[K] /: (allMaps map { _.keySet }))(_ ++ _)
|
||||
// Map from file to a single representative class defined in that file.
|
||||
// This is correct (for now): currently all classes in an external dep share the same Source object,
|
||||
// and a change to any of them will act like a change to all of them.
|
||||
// We don't use all the top-level classes in source.api.definitions, even though that's more intuitively
|
||||
// correct, because this can cause huge bloat of the analysis file.
|
||||
def getRepresentativeClass(file: File): Option[String] = apis.internalAPI(file).api.definitions.headOption map { _.name }
|
||||
|
||||
// Map from file to a single representative class defined in that file.
|
||||
// This is correct (for now): currently all classes in an external dep share the same Source object,
|
||||
// and a change to any of them will act like a change to all of them.
|
||||
// We don't use all the top-level classes in source.api.definitions, even though that's more intuitively
|
||||
// correct, because this can cause huge bloat of the analysis file.
|
||||
def getRepresentativeClass(file: File): Option[String] = apis.internalAPI(file).api.definitions.headOption map { _.name }
|
||||
// Create an Analysis for each group.
|
||||
(for (k <- allKeys) yield {
|
||||
def getFrom[A, B](m: Map[K, Relation[A, B]]): Relation[A, B] = m.getOrElse(k, Relation.empty)
|
||||
|
||||
// Create an Analysis for each group.
|
||||
(for (k <- allKeys) yield {
|
||||
def getFrom[A, B](m: Map[K, Relation[A, B]]): Relation[A, B] = m.getOrElse(k, Relation.empty)
|
||||
// Products and binary deps.
|
||||
val srcProd = getFrom(kSrcProd)
|
||||
val binaryDep = getFrom(kBinaryDep)
|
||||
|
||||
// Products and binary deps.
|
||||
val srcProd = getFrom(kSrcProd)
|
||||
val binaryDep = getFrom(kBinaryDep)
|
||||
// Direct Sources.
|
||||
val stillInternal = getFrom(kStillInternal)
|
||||
val stillExternal = getFrom(kStillExternal)
|
||||
val externalized = getFrom(kExternalized)
|
||||
val externalizedClasses = Relation.reconstruct(externalized.forwardMap mapValues { _ flatMap getRepresentativeClass })
|
||||
val newExternal = stillExternal ++ externalizedClasses
|
||||
|
||||
// Direct Sources.
|
||||
val stillInternal = getFrom(kStillInternal)
|
||||
val stillExternal = getFrom(kStillExternal)
|
||||
val externalized = getFrom(kExternalized)
|
||||
val externalizedClasses = Relation.reconstruct(externalized.forwardMap mapValues { _ flatMap getRepresentativeClass })
|
||||
val newExternal = stillExternal ++ externalizedClasses
|
||||
// Public inherited sources.
|
||||
val stillInternalPI = stillInternal filter relations.publicInherited.internal.contains
|
||||
val stillExternalPI = stillExternal filter relations.publicInherited.external.contains
|
||||
val externalizedPI = externalized filter relations.publicInherited.internal.contains
|
||||
val externalizedClassesPI = Relation.reconstruct(externalizedPI.forwardMap mapValues { _ flatMap getRepresentativeClass })
|
||||
val newExternalPI = stillExternalPI ++ externalizedClassesPI
|
||||
|
||||
// Public inherited sources.
|
||||
val stillInternalPI = stillInternal filter relations.publicInherited.internal.contains
|
||||
val stillExternalPI = stillExternal filter relations.publicInherited.external.contains
|
||||
val externalizedPI = externalized filter relations.publicInherited.internal.contains
|
||||
val externalizedClassesPI = Relation.reconstruct(externalizedPI.forwardMap mapValues { _ flatMap getRepresentativeClass })
|
||||
val newExternalPI = stillExternalPI ++ externalizedClassesPI
|
||||
// Class names.
|
||||
val classes = getFrom(kClasses)
|
||||
|
||||
// Class names.
|
||||
val classes = getFrom(kClasses)
|
||||
// Create new relations for this group.
|
||||
val newRelations = Relations.make(
|
||||
srcProd,
|
||||
binaryDep,
|
||||
Relations.makeSource(stillInternal, newExternal),
|
||||
Relations.makeSource(stillInternalPI, newExternalPI),
|
||||
classes
|
||||
)
|
||||
|
||||
// Create new relations for this group.
|
||||
val newRelations = Relations.make(
|
||||
srcProd,
|
||||
binaryDep,
|
||||
Relations.makeSource(stillInternal, newExternal),
|
||||
Relations.makeSource(stillInternalPI, newExternalPI),
|
||||
classes
|
||||
)
|
||||
// Compute new API mappings.
|
||||
def apisFor[T](m: Map[T, Source], x: Traversable[T]): Map[T, Source] =
|
||||
(x map { e: T => (e, m.get(e)) } collect { case (t, Some(source)) => (t, source) }).toMap
|
||||
val stillInternalAPIs = apisFor(apis.internal, srcProd._1s)
|
||||
val stillExternalAPIs = apisFor(apis.external, stillExternal._2s)
|
||||
val externalizedAPIs = apisFor(apis.internal, externalized._2s)
|
||||
val externalizedClassesAPIs = externalizedAPIs flatMap {
|
||||
case (file: File, source: Source) => getRepresentativeClass(file) map { cls: String => (cls, source) }
|
||||
}
|
||||
val newAPIs = APIs(stillInternalAPIs, stillExternalAPIs ++ externalizedClassesAPIs)
|
||||
|
||||
// Compute new API mappings.
|
||||
def apisFor[T](m: Map[T, Source], x: Traversable[T]): Map[T, Source] =
|
||||
(x map { e: T => (e, m.get(e)) } collect { case (t, Some(source)) => (t, source)}).toMap
|
||||
val stillInternalAPIs = apisFor(apis.internal, srcProd._1s)
|
||||
val stillExternalAPIs = apisFor(apis.external, stillExternal._2s)
|
||||
val externalizedAPIs = apisFor(apis.internal, externalized._2s)
|
||||
val externalizedClassesAPIs = externalizedAPIs flatMap {
|
||||
case (file: File, source: Source) => getRepresentativeClass(file) map { cls: String => (cls, source) }
|
||||
}
|
||||
val newAPIs = APIs(stillInternalAPIs, stillExternalAPIs ++ externalizedClassesAPIs)
|
||||
// New stamps.
|
||||
val newStamps = Stamps(
|
||||
stamps.products.filterKeys(srcProd._2s.contains),
|
||||
stamps.sources.filterKeys({ discriminator(_) == k }),
|
||||
stamps.binaries.filterKeys(binaryDep._2s.contains),
|
||||
stamps.classNames.filterKeys(binaryDep._2s.contains))
|
||||
|
||||
// New stamps.
|
||||
val newStamps = Stamps(
|
||||
stamps.products.filterKeys(srcProd._2s.contains),
|
||||
stamps.sources.filterKeys({ discriminator(_) == k }),
|
||||
stamps.binaries.filterKeys(binaryDep._2s.contains),
|
||||
stamps.classNames.filterKeys(binaryDep._2s.contains))
|
||||
// New infos.
|
||||
val newSourceInfos = SourceInfos.make(kSourceInfos.getOrElse(k, Map.empty))
|
||||
|
||||
// New infos.
|
||||
val newSourceInfos = SourceInfos.make(kSourceInfos.getOrElse(k, Map.empty))
|
||||
(k, new MAnalysis(newStamps, newAPIs, newRelations, newSourceInfos, compilations))
|
||||
}).toMap
|
||||
}
|
||||
|
||||
(k, new MAnalysis(newStamps, newAPIs, newRelations, newSourceInfos, compilations))
|
||||
}).toMap
|
||||
}
|
||||
override def equals(other: Any) = other match {
|
||||
// Note: Equality doesn't consider source infos or compilations.
|
||||
case o: MAnalysis => stamps == o.stamps && apis == o.apis && relations == o.relations
|
||||
case _ => false
|
||||
}
|
||||
|
||||
override def equals(other: Any) = other match {
|
||||
// Note: Equality doesn't consider source infos or compilations.
|
||||
case o: MAnalysis => stamps == o.stamps && apis == o.apis && relations == o.relations
|
||||
case _ => false
|
||||
}
|
||||
|
||||
override lazy val hashCode = (stamps :: apis :: relations :: Nil).hashCode
|
||||
override lazy val hashCode = (stamps :: apis :: relations :: Nil).hashCode
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,30 +4,27 @@
|
|||
package sbt
|
||||
package inc
|
||||
|
||||
trait AnalysisStore
|
||||
{
|
||||
def set(analysis: Analysis, setup: CompileSetup): Unit
|
||||
def get(): Option[(Analysis, CompileSetup)]
|
||||
trait AnalysisStore {
|
||||
def set(analysis: Analysis, setup: CompileSetup): Unit
|
||||
def get(): Option[(Analysis, CompileSetup)]
|
||||
}
|
||||
|
||||
object AnalysisStore
|
||||
{
|
||||
def cached(backing: AnalysisStore): AnalysisStore = new AnalysisStore {
|
||||
private var last: Option[(Analysis, CompileSetup)] = None
|
||||
def set(analysis: Analysis, setup: CompileSetup)
|
||||
{
|
||||
backing.set(analysis, setup)
|
||||
last = Some( (analysis, setup) )
|
||||
}
|
||||
def get(): Option[(Analysis, CompileSetup)] =
|
||||
{
|
||||
if(last.isEmpty)
|
||||
last = backing.get()
|
||||
last
|
||||
}
|
||||
}
|
||||
def sync(backing: AnalysisStore): AnalysisStore = new AnalysisStore {
|
||||
def set(analysis: Analysis, setup: CompileSetup): Unit = synchronized { backing.set(analysis, setup) }
|
||||
def get(): Option[(Analysis, CompileSetup)] = synchronized { backing.get() }
|
||||
}
|
||||
object AnalysisStore {
|
||||
def cached(backing: AnalysisStore): AnalysisStore = new AnalysisStore {
|
||||
private var last: Option[(Analysis, CompileSetup)] = None
|
||||
def set(analysis: Analysis, setup: CompileSetup) {
|
||||
backing.set(analysis, setup)
|
||||
last = Some((analysis, setup))
|
||||
}
|
||||
def get(): Option[(Analysis, CompileSetup)] =
|
||||
{
|
||||
if (last.isEmpty)
|
||||
last = backing.get()
|
||||
last
|
||||
}
|
||||
}
|
||||
def sync(backing: AnalysisStore): AnalysisStore = new AnalysisStore {
|
||||
def set(analysis: Analysis, setup: CompileSetup): Unit = synchronized { backing.set(analysis, setup) }
|
||||
def get(): Option[(Analysis, CompileSetup)] = synchronized { backing.get() }
|
||||
}
|
||||
}
|
||||
|
|
@ -6,14 +6,13 @@ package inc
|
|||
|
||||
import xsbt.api.NameChanges
|
||||
import java.io.File
|
||||
import xsbti.api.{_internalOnly_NameHashes => NameHashes}
|
||||
import xsbti.api.{_internalOnly_NameHash => NameHash}
|
||||
import xsbti.api.{ _internalOnly_NameHashes => NameHashes }
|
||||
import xsbti.api.{ _internalOnly_NameHash => NameHash }
|
||||
|
||||
final case class InitialChanges(internalSrc: Changes[File], removedProducts: Set[File], binaryDeps: Set[File], external: APIChanges[String])
|
||||
final class APIChanges[T](val apiChanges: Iterable[APIChange[T]])
|
||||
{
|
||||
override def toString = "API Changes: " + apiChanges
|
||||
def allModified: Iterable[T] = apiChanges.map(_.modified)
|
||||
final class APIChanges[T](val apiChanges: Iterable[APIChange[T]]) {
|
||||
override def toString = "API Changes: " + apiChanges
|
||||
def allModified: Iterable[T] = apiChanges.map(_.modified)
|
||||
}
|
||||
|
||||
sealed abstract class APIChange[T](val modified: T)
|
||||
|
|
@ -40,28 +39,26 @@ final case class NamesChange[T](modified0: T, modifiedNames: ModifiedNames) exte
|
|||
* due to difficulty of reasoning about the implicit scope.
|
||||
*/
|
||||
final case class ModifiedNames(regularNames: Set[String], implicitNames: Set[String]) {
|
||||
override def toString: String =
|
||||
s"ModifiedNames(regularNames = ${regularNames mkString ", "}, implicitNames = ${implicitNames mkString ", "})"
|
||||
override def toString: String =
|
||||
s"ModifiedNames(regularNames = ${regularNames mkString ", "}, implicitNames = ${implicitNames mkString ", "})"
|
||||
}
|
||||
object ModifiedNames {
|
||||
def compareTwoNameHashes(a: NameHashes, b: NameHashes): ModifiedNames = {
|
||||
val modifiedRegularNames = calculateModifiedNames(a.regularMembers.toSet, b.regularMembers.toSet)
|
||||
val modifiedImplicitNames = calculateModifiedNames(a.implicitMembers.toSet, b.implicitMembers.toSet)
|
||||
ModifiedNames(modifiedRegularNames, modifiedImplicitNames)
|
||||
}
|
||||
private def calculateModifiedNames(xs: Set[NameHash], ys: Set[NameHash]): Set[String] = {
|
||||
val differentNameHashes = (xs union ys) diff (xs intersect ys)
|
||||
differentNameHashes.map(_.name)
|
||||
}
|
||||
def compareTwoNameHashes(a: NameHashes, b: NameHashes): ModifiedNames = {
|
||||
val modifiedRegularNames = calculateModifiedNames(a.regularMembers.toSet, b.regularMembers.toSet)
|
||||
val modifiedImplicitNames = calculateModifiedNames(a.implicitMembers.toSet, b.implicitMembers.toSet)
|
||||
ModifiedNames(modifiedRegularNames, modifiedImplicitNames)
|
||||
}
|
||||
private def calculateModifiedNames(xs: Set[NameHash], ys: Set[NameHash]): Set[String] = {
|
||||
val differentNameHashes = (xs union ys) diff (xs intersect ys)
|
||||
differentNameHashes.map(_.name)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
trait Changes[A]
|
||||
{
|
||||
def added: Set[A]
|
||||
def removed: Set[A]
|
||||
def changed: Set[A]
|
||||
def unmodified: Set[A]
|
||||
trait Changes[A] {
|
||||
def added: Set[A]
|
||||
def removed: Set[A]
|
||||
def changed: Set[A]
|
||||
def unmodified: Set[A]
|
||||
}
|
||||
|
||||
sealed abstract class Change(val file: File)
|
||||
|
|
|
|||
|
|
@ -1,81 +1,79 @@
|
|||
package sbt.inc
|
||||
|
||||
import sbt.IO
|
||||
import java.io.File
|
||||
import collection.mutable
|
||||
import sbt.IO
|
||||
import java.io.File
|
||||
import collection.mutable
|
||||
|
||||
/** During an incremental compilation run, a ClassfileManager deletes class files and is notified of generated class files.
|
||||
* A ClassfileManager can be used only once.*/
|
||||
trait ClassfileManager
|
||||
{
|
||||
/** Called once per compilation step with the class files to delete prior to that step's compilation.
|
||||
* The files in `classes` must not exist if this method returns normally.
|
||||
* Any empty ancestor directories of deleted files must not exist either.*/
|
||||
def delete(classes: Iterable[File]): Unit
|
||||
/**
|
||||
* During an incremental compilation run, a ClassfileManager deletes class files and is notified of generated class files.
|
||||
* A ClassfileManager can be used only once.
|
||||
*/
|
||||
trait ClassfileManager {
|
||||
/**
|
||||
* Called once per compilation step with the class files to delete prior to that step's compilation.
|
||||
* The files in `classes` must not exist if this method returns normally.
|
||||
* Any empty ancestor directories of deleted files must not exist either.
|
||||
*/
|
||||
def delete(classes: Iterable[File]): Unit
|
||||
|
||||
/** Called once per compilation step with the class files generated during that step.*/
|
||||
def generated(classes: Iterable[File]): Unit
|
||||
/** Called once per compilation step with the class files generated during that step.*/
|
||||
def generated(classes: Iterable[File]): Unit
|
||||
|
||||
/** Called once at the end of the whole compilation run, with `success` indicating whether compilation succeeded (true) or not (false).*/
|
||||
def complete(success: Boolean): Unit
|
||||
/** Called once at the end of the whole compilation run, with `success` indicating whether compilation succeeded (true) or not (false).*/
|
||||
def complete(success: Boolean): Unit
|
||||
}
|
||||
|
||||
object ClassfileManager
|
||||
{
|
||||
/** Constructs a minimal ClassfileManager implementation that immediately deletes class files when requested. */
|
||||
val deleteImmediately: () => ClassfileManager = () => new ClassfileManager
|
||||
{
|
||||
def delete(classes: Iterable[File]): Unit = IO.deleteFilesEmptyDirs(classes)
|
||||
def generated(classes: Iterable[File]) {}
|
||||
def complete(success: Boolean) {}
|
||||
}
|
||||
@deprecated("Use overloaded variant that takes additional logger argument, instead.", "0.13.5")
|
||||
def transactional(tempDir0: File): () => ClassfileManager =
|
||||
transactional(tempDir0, sbt.Logger.Null)
|
||||
/** When compilation fails, this ClassfileManager restores class files to the way they were before compilation.*/
|
||||
def transactional(tempDir0: File, logger: sbt.Logger): () => ClassfileManager = () => new ClassfileManager
|
||||
{
|
||||
val tempDir = tempDir0.getCanonicalFile
|
||||
IO.delete(tempDir)
|
||||
IO.createDirectory(tempDir)
|
||||
logger.debug(s"Created transactional ClassfileManager with tempDir = $tempDir")
|
||||
object ClassfileManager {
|
||||
/** Constructs a minimal ClassfileManager implementation that immediately deletes class files when requested. */
|
||||
val deleteImmediately: () => ClassfileManager = () => new ClassfileManager {
|
||||
def delete(classes: Iterable[File]): Unit = IO.deleteFilesEmptyDirs(classes)
|
||||
def generated(classes: Iterable[File]) {}
|
||||
def complete(success: Boolean) {}
|
||||
}
|
||||
@deprecated("Use overloaded variant that takes additional logger argument, instead.", "0.13.5")
|
||||
def transactional(tempDir0: File): () => ClassfileManager =
|
||||
transactional(tempDir0, sbt.Logger.Null)
|
||||
/** When compilation fails, this ClassfileManager restores class files to the way they were before compilation.*/
|
||||
def transactional(tempDir0: File, logger: sbt.Logger): () => ClassfileManager = () => new ClassfileManager {
|
||||
val tempDir = tempDir0.getCanonicalFile
|
||||
IO.delete(tempDir)
|
||||
IO.createDirectory(tempDir)
|
||||
logger.debug(s"Created transactional ClassfileManager with tempDir = $tempDir")
|
||||
|
||||
private[this] val generatedClasses = new mutable.HashSet[File]
|
||||
private[this] val movedClasses = new mutable.HashMap[File, File]
|
||||
private[this] val generatedClasses = new mutable.HashSet[File]
|
||||
private[this] val movedClasses = new mutable.HashMap[File, File]
|
||||
|
||||
private def showFiles(files: Iterable[File]): String = files.map(f => s"\t$f").mkString("\n")
|
||||
def delete(classes: Iterable[File])
|
||||
{
|
||||
logger.debug(s"About to delete class files:\n${showFiles(classes)}")
|
||||
val toBeBackedUp = classes.filter(c => c.exists && !movedClasses.contains(c) && !generatedClasses(c))
|
||||
logger.debug(s"We backup classs files:\n${showFiles(toBeBackedUp)}")
|
||||
for(c <- toBeBackedUp) {
|
||||
movedClasses.put(c, move(c))
|
||||
}
|
||||
IO.deleteFilesEmptyDirs(classes)
|
||||
}
|
||||
def generated(classes: Iterable[File]): Unit = {
|
||||
logger.debug(s"Registering generated classes:\n${showFiles(classes)}")
|
||||
generatedClasses ++= classes
|
||||
}
|
||||
def complete(success: Boolean)
|
||||
{
|
||||
if(!success) {
|
||||
logger.debug("Rolling back changes to class files.")
|
||||
logger.debug(s"Removing generated classes:\n${showFiles(generatedClasses)}")
|
||||
IO.deleteFilesEmptyDirs(generatedClasses)
|
||||
logger.debug(s"Restoring class files: \n${showFiles(movedClasses.map(_._1))}")
|
||||
for( (orig, tmp) <- movedClasses ) IO.move(tmp, orig)
|
||||
}
|
||||
logger.debug(s"Removing the temporary directory used for backing up class files: $tempDir")
|
||||
IO.delete(tempDir)
|
||||
}
|
||||
private def showFiles(files: Iterable[File]): String = files.map(f => s"\t$f").mkString("\n")
|
||||
def delete(classes: Iterable[File]) {
|
||||
logger.debug(s"About to delete class files:\n${showFiles(classes)}")
|
||||
val toBeBackedUp = classes.filter(c => c.exists && !movedClasses.contains(c) && !generatedClasses(c))
|
||||
logger.debug(s"We backup classs files:\n${showFiles(toBeBackedUp)}")
|
||||
for (c <- toBeBackedUp) {
|
||||
movedClasses.put(c, move(c))
|
||||
}
|
||||
IO.deleteFilesEmptyDirs(classes)
|
||||
}
|
||||
def generated(classes: Iterable[File]): Unit = {
|
||||
logger.debug(s"Registering generated classes:\n${showFiles(classes)}")
|
||||
generatedClasses ++= classes
|
||||
}
|
||||
def complete(success: Boolean) {
|
||||
if (!success) {
|
||||
logger.debug("Rolling back changes to class files.")
|
||||
logger.debug(s"Removing generated classes:\n${showFiles(generatedClasses)}")
|
||||
IO.deleteFilesEmptyDirs(generatedClasses)
|
||||
logger.debug(s"Restoring class files: \n${showFiles(movedClasses.map(_._1))}")
|
||||
for ((orig, tmp) <- movedClasses) IO.move(tmp, orig)
|
||||
}
|
||||
logger.debug(s"Removing the temporary directory used for backing up class files: $tempDir")
|
||||
IO.delete(tempDir)
|
||||
}
|
||||
|
||||
def move(c: File): File =
|
||||
{
|
||||
val target = File.createTempFile("sbt", ".class", tempDir)
|
||||
IO.move(c, target)
|
||||
target
|
||||
}
|
||||
}
|
||||
def move(c: File): File =
|
||||
{
|
||||
val target = File.createTempFile("sbt", ".class", tempDir)
|
||||
IO.move(c, target)
|
||||
target
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -12,7 +12,7 @@ trait Compilations {
|
|||
object Compilations {
|
||||
val empty: Compilations = new MCompilations(Seq.empty)
|
||||
def make(s: Seq[Compilation]): Compilations = new MCompilations(s)
|
||||
def merge(s: Traversable[Compilations]): Compilations = make((s flatMap { _.allCompilations }).toSeq.distinct)
|
||||
def merge(s: Traversable[Compilations]): Compilations = make((s flatMap { _.allCompilations }).toSeq.distinct)
|
||||
}
|
||||
|
||||
private final class MCompilations(val allCompilations: Seq[Compilation]) extends Compilations {
|
||||
|
|
|
|||
|
|
@ -4,194 +4,192 @@
|
|||
package sbt
|
||||
package inc
|
||||
|
||||
import xsbti.api.{Source, SourceAPI, Compilation, OutputSetting, _internalOnly_NameHashes}
|
||||
import xsbti.compile.{DependencyChanges, Output, SingleOutput, MultipleOutput}
|
||||
import xsbti.{Position,Problem,Severity}
|
||||
import Logger.{m2o, problem}
|
||||
import xsbti.api.{ Source, SourceAPI, Compilation, OutputSetting, _internalOnly_NameHashes }
|
||||
import xsbti.compile.{ DependencyChanges, Output, SingleOutput, MultipleOutput }
|
||||
import xsbti.{ Position, Problem, Severity }
|
||||
import Logger.{ m2o, problem }
|
||||
import java.io.File
|
||||
import xsbti.api.Definition
|
||||
|
||||
object IncrementalCompile
|
||||
{
|
||||
def apply(sources: Set[File], entry: String => Option[File],
|
||||
compile: (Set[File], DependencyChanges, xsbti.AnalysisCallback) => Unit,
|
||||
previous: Analysis,
|
||||
forEntry: File => Option[Analysis],
|
||||
output: Output, log: Logger,
|
||||
options: IncOptions): (Boolean, Analysis) =
|
||||
{
|
||||
val current = Stamps.initial(Stamp.lastModified, Stamp.hash, Stamp.lastModified)
|
||||
val internalMap = (f: File) => previous.relations.produced(f).headOption
|
||||
val externalAPI = getExternalAPI(entry, forEntry)
|
||||
try {
|
||||
Incremental.compile(sources, entry, previous, current, forEntry, doCompile(compile, internalMap, externalAPI, current, output, options), log, options)
|
||||
} catch {
|
||||
case e: xsbti.CompileCancelled =>
|
||||
log.info("Compilation has been cancelled")
|
||||
// in case compilation got cancelled potential partial compilation results (e.g. produced classs files) got rolled back
|
||||
// and we can report back as there was no change (false) and return a previous Analysis which is still up-to-date
|
||||
(false, previous)
|
||||
}
|
||||
}
|
||||
def doCompile(compile: (Set[File], DependencyChanges, xsbti.AnalysisCallback) => Unit, internalMap: File => Option[File], externalAPI: (File, String) => Option[Source], current: ReadStamps, output: Output, options: IncOptions) =
|
||||
(srcs: Set[File], changes: DependencyChanges) => {
|
||||
val callback = new AnalysisCallback(internalMap, externalAPI, current, output, options)
|
||||
compile(srcs, changes, callback)
|
||||
callback.get
|
||||
}
|
||||
def getExternalAPI(entry: String => Option[File], forEntry: File => Option[Analysis]): (File, String) => Option[Source] =
|
||||
(file: File,className: String) =>
|
||||
entry(className) flatMap { defines =>
|
||||
if(file != Locate.resolve(defines, className) )
|
||||
None
|
||||
else
|
||||
forEntry(defines) flatMap { analysis =>
|
||||
analysis.relations.definesClass(className).headOption flatMap { src =>
|
||||
analysis.apis.internal get src
|
||||
}
|
||||
}
|
||||
}
|
||||
object IncrementalCompile {
|
||||
def apply(sources: Set[File], entry: String => Option[File],
|
||||
compile: (Set[File], DependencyChanges, xsbti.AnalysisCallback) => Unit,
|
||||
previous: Analysis,
|
||||
forEntry: File => Option[Analysis],
|
||||
output: Output, log: Logger,
|
||||
options: IncOptions): (Boolean, Analysis) =
|
||||
{
|
||||
val current = Stamps.initial(Stamp.lastModified, Stamp.hash, Stamp.lastModified)
|
||||
val internalMap = (f: File) => previous.relations.produced(f).headOption
|
||||
val externalAPI = getExternalAPI(entry, forEntry)
|
||||
try {
|
||||
Incremental.compile(sources, entry, previous, current, forEntry, doCompile(compile, internalMap, externalAPI, current, output, options), log, options)
|
||||
} catch {
|
||||
case e: xsbti.CompileCancelled =>
|
||||
log.info("Compilation has been cancelled")
|
||||
// in case compilation got cancelled potential partial compilation results (e.g. produced classs files) got rolled back
|
||||
// and we can report back as there was no change (false) and return a previous Analysis which is still up-to-date
|
||||
(false, previous)
|
||||
}
|
||||
}
|
||||
def doCompile(compile: (Set[File], DependencyChanges, xsbti.AnalysisCallback) => Unit, internalMap: File => Option[File], externalAPI: (File, String) => Option[Source], current: ReadStamps, output: Output, options: IncOptions) =
|
||||
(srcs: Set[File], changes: DependencyChanges) => {
|
||||
val callback = new AnalysisCallback(internalMap, externalAPI, current, output, options)
|
||||
compile(srcs, changes, callback)
|
||||
callback.get
|
||||
}
|
||||
def getExternalAPI(entry: String => Option[File], forEntry: File => Option[Analysis]): (File, String) => Option[Source] =
|
||||
(file: File, className: String) =>
|
||||
entry(className) flatMap { defines =>
|
||||
if (file != Locate.resolve(defines, className))
|
||||
None
|
||||
else
|
||||
forEntry(defines) flatMap { analysis =>
|
||||
analysis.relations.definesClass(className).headOption flatMap { src =>
|
||||
analysis.apis.internal get src
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
private final class AnalysisCallback(internalMap: File => Option[File], externalAPI: (File, String) => Option[Source], current: ReadStamps, output: Output, options: IncOptions) extends xsbti.AnalysisCallback
|
||||
{
|
||||
val compilation = {
|
||||
val outputSettings = output match {
|
||||
case single: SingleOutput => Array(new OutputSetting("/", single.outputDirectory.getAbsolutePath))
|
||||
case multi: MultipleOutput =>
|
||||
multi.outputGroups.map(out => new OutputSetting(out.sourceDirectory.getAbsolutePath, out.outputDirectory.getAbsolutePath)).toArray
|
||||
}
|
||||
new Compilation(System.currentTimeMillis, outputSettings)
|
||||
}
|
||||
private final class AnalysisCallback(internalMap: File => Option[File], externalAPI: (File, String) => Option[Source], current: ReadStamps, output: Output, options: IncOptions) extends xsbti.AnalysisCallback {
|
||||
val compilation = {
|
||||
val outputSettings = output match {
|
||||
case single: SingleOutput => Array(new OutputSetting("/", single.outputDirectory.getAbsolutePath))
|
||||
case multi: MultipleOutput =>
|
||||
multi.outputGroups.map(out => new OutputSetting(out.sourceDirectory.getAbsolutePath, out.outputDirectory.getAbsolutePath)).toArray
|
||||
}
|
||||
new Compilation(System.currentTimeMillis, outputSettings)
|
||||
}
|
||||
|
||||
override def toString = ( List("APIs", "Binary deps", "Products", "Source deps") zip List(apis, binaryDeps, classes, sourceDeps)).map { case (label, map) => label + "\n\t" + map.mkString("\n\t") }.mkString("\n")
|
||||
override def toString = (List("APIs", "Binary deps", "Products", "Source deps") zip List(apis, binaryDeps, classes, sourceDeps)).map { case (label, map) => label + "\n\t" + map.mkString("\n\t") }.mkString("\n")
|
||||
|
||||
import collection.mutable.{HashMap, HashSet, ListBuffer, Map, Set}
|
||||
import collection.mutable.{ HashMap, HashSet, ListBuffer, Map, Set }
|
||||
|
||||
private[this] val apis = new HashMap[File, (Int, SourceAPI)]
|
||||
private[this] val usedNames = new HashMap[File, Set[String]]
|
||||
private[this] val publicNameHashes = new HashMap[File, _internalOnly_NameHashes]
|
||||
private[this] val unreporteds = new HashMap[File, ListBuffer[Problem]]
|
||||
private[this] val reporteds = new HashMap[File, ListBuffer[Problem]]
|
||||
private[this] val binaryDeps = new HashMap[File, Set[File]]
|
||||
// source file to set of generated (class file, class name)
|
||||
private[this] val classes = new HashMap[File, Set[(File, String)]]
|
||||
// generated class file to its source file
|
||||
private[this] val classToSource = new HashMap[File, File]
|
||||
// all internal source depenencies, including direct and inherited
|
||||
private[this] val sourceDeps = new HashMap[File, Set[File]]
|
||||
// inherited internal source dependencies
|
||||
private[this] val inheritedSourceDeps = new HashMap[File, Set[File]]
|
||||
// external source dependencies:
|
||||
// (internal source, external source depended on, API of external dependency, true if an inheritance dependency)
|
||||
private[this] val extSrcDeps = new ListBuffer[(File, String, Source, Boolean)]
|
||||
private[this] val binaryClassName = new HashMap[File, String]
|
||||
// source files containing a macro def.
|
||||
private[this] val macroSources = Set[File]()
|
||||
private[this] val apis = new HashMap[File, (Int, SourceAPI)]
|
||||
private[this] val usedNames = new HashMap[File, Set[String]]
|
||||
private[this] val publicNameHashes = new HashMap[File, _internalOnly_NameHashes]
|
||||
private[this] val unreporteds = new HashMap[File, ListBuffer[Problem]]
|
||||
private[this] val reporteds = new HashMap[File, ListBuffer[Problem]]
|
||||
private[this] val binaryDeps = new HashMap[File, Set[File]]
|
||||
// source file to set of generated (class file, class name)
|
||||
private[this] val classes = new HashMap[File, Set[(File, String)]]
|
||||
// generated class file to its source file
|
||||
private[this] val classToSource = new HashMap[File, File]
|
||||
// all internal source depenencies, including direct and inherited
|
||||
private[this] val sourceDeps = new HashMap[File, Set[File]]
|
||||
// inherited internal source dependencies
|
||||
private[this] val inheritedSourceDeps = new HashMap[File, Set[File]]
|
||||
// external source dependencies:
|
||||
// (internal source, external source depended on, API of external dependency, true if an inheritance dependency)
|
||||
private[this] val extSrcDeps = new ListBuffer[(File, String, Source, Boolean)]
|
||||
private[this] val binaryClassName = new HashMap[File, String]
|
||||
// source files containing a macro def.
|
||||
private[this] val macroSources = Set[File]()
|
||||
|
||||
private def add[A,B](map: Map[A,Set[B]], a: A, b: B): Unit =
|
||||
map.getOrElseUpdate(a, new HashSet[B]) += b
|
||||
private def add[A, B](map: Map[A, Set[B]], a: A, b: B): Unit =
|
||||
map.getOrElseUpdate(a, new HashSet[B]) += b
|
||||
|
||||
def problem(category: String, pos: Position, msg: String, severity: Severity, reported: Boolean): Unit =
|
||||
{
|
||||
for(source <- m2o(pos.sourceFile)) {
|
||||
val map = if(reported) reporteds else unreporteds
|
||||
map.getOrElseUpdate(source, ListBuffer.empty) += Logger.problem(category, pos, msg, severity)
|
||||
}
|
||||
}
|
||||
def problem(category: String, pos: Position, msg: String, severity: Severity, reported: Boolean): Unit =
|
||||
{
|
||||
for (source <- m2o(pos.sourceFile)) {
|
||||
val map = if (reported) reporteds else unreporteds
|
||||
map.getOrElseUpdate(source, ListBuffer.empty) += Logger.problem(category, pos, msg, severity)
|
||||
}
|
||||
}
|
||||
|
||||
def sourceDependency(dependsOn: File, source: File, inherited: Boolean) =
|
||||
{
|
||||
add(sourceDeps, source, dependsOn)
|
||||
if(inherited) add(inheritedSourceDeps, source, dependsOn)
|
||||
}
|
||||
def externalBinaryDependency(binary: File, className: String, source: File, inherited: Boolean)
|
||||
{
|
||||
binaryClassName.put(binary, className)
|
||||
add(binaryDeps, source, binary)
|
||||
}
|
||||
def externalSourceDependency(t4: (File, String, Source, Boolean)) = extSrcDeps += t4
|
||||
def sourceDependency(dependsOn: File, source: File, inherited: Boolean) =
|
||||
{
|
||||
add(sourceDeps, source, dependsOn)
|
||||
if (inherited) add(inheritedSourceDeps, source, dependsOn)
|
||||
}
|
||||
def externalBinaryDependency(binary: File, className: String, source: File, inherited: Boolean) {
|
||||
binaryClassName.put(binary, className)
|
||||
add(binaryDeps, source, binary)
|
||||
}
|
||||
def externalSourceDependency(t4: (File, String, Source, Boolean)) = extSrcDeps += t4
|
||||
|
||||
def binaryDependency(classFile: File, name: String, source: File, inherited: Boolean) =
|
||||
internalMap(classFile) match
|
||||
{
|
||||
case Some(dependsOn) =>
|
||||
// dependency is a product of a source not included in this compilation
|
||||
sourceDependency(dependsOn, source, inherited)
|
||||
case None =>
|
||||
classToSource.get(classFile) match
|
||||
{
|
||||
case Some(dependsOn) =>
|
||||
// dependency is a product of a source in this compilation step,
|
||||
// but not in the same compiler run (as in javac v. scalac)
|
||||
sourceDependency(dependsOn, source, inherited)
|
||||
case None =>
|
||||
externalDependency(classFile, name, source, inherited)
|
||||
}
|
||||
}
|
||||
def binaryDependency(classFile: File, name: String, source: File, inherited: Boolean) =
|
||||
internalMap(classFile) match {
|
||||
case Some(dependsOn) =>
|
||||
// dependency is a product of a source not included in this compilation
|
||||
sourceDependency(dependsOn, source, inherited)
|
||||
case None =>
|
||||
classToSource.get(classFile) match {
|
||||
case Some(dependsOn) =>
|
||||
// dependency is a product of a source in this compilation step,
|
||||
// but not in the same compiler run (as in javac v. scalac)
|
||||
sourceDependency(dependsOn, source, inherited)
|
||||
case None =>
|
||||
externalDependency(classFile, name, source, inherited)
|
||||
}
|
||||
}
|
||||
|
||||
private[this] def externalDependency(classFile: File, name: String, source: File, inherited: Boolean): Unit =
|
||||
externalAPI(classFile, name) match
|
||||
{
|
||||
case Some(api) =>
|
||||
// dependency is a product of a source in another project
|
||||
externalSourceDependency( (source, name, api, inherited) )
|
||||
case None =>
|
||||
// dependency is some other binary on the classpath
|
||||
externalBinaryDependency(classFile, name, source, inherited)
|
||||
}
|
||||
private[this] def externalDependency(classFile: File, name: String, source: File, inherited: Boolean): Unit =
|
||||
externalAPI(classFile, name) match {
|
||||
case Some(api) =>
|
||||
// dependency is a product of a source in another project
|
||||
externalSourceDependency((source, name, api, inherited))
|
||||
case None =>
|
||||
// dependency is some other binary on the classpath
|
||||
externalBinaryDependency(classFile, name, source, inherited)
|
||||
}
|
||||
|
||||
def generatedClass(source: File, module: File, name: String) =
|
||||
{
|
||||
add(classes, source, (module, name))
|
||||
classToSource.put(module, source)
|
||||
}
|
||||
def generatedClass(source: File, module: File, name: String) =
|
||||
{
|
||||
add(classes, source, (module, name))
|
||||
classToSource.put(module, source)
|
||||
}
|
||||
|
||||
// empty value used when name hashing algorithm is disabled
|
||||
private val emptyNameHashes = new xsbti.api._internalOnly_NameHashes(Array.empty, Array.empty)
|
||||
// empty value used when name hashing algorithm is disabled
|
||||
private val emptyNameHashes = new xsbti.api._internalOnly_NameHashes(Array.empty, Array.empty)
|
||||
|
||||
def api(sourceFile: File, source: SourceAPI) {
|
||||
import xsbt.api.{APIUtil, HashAPI}
|
||||
if (APIUtil.isScalaSourceName(sourceFile.getName) && APIUtil.hasMacro(source)) macroSources += sourceFile
|
||||
publicNameHashes(sourceFile) = {
|
||||
if (nameHashing)
|
||||
(new xsbt.api.NameHashing).nameHashes(source)
|
||||
else
|
||||
emptyNameHashes
|
||||
}
|
||||
val shouldMinimize = !Incremental.apiDebug(options)
|
||||
val savedSource = if (shouldMinimize) APIUtil.minimize(source) else source
|
||||
apis(sourceFile) = (HashAPI(source), savedSource)
|
||||
}
|
||||
def api(sourceFile: File, source: SourceAPI) {
|
||||
import xsbt.api.{ APIUtil, HashAPI }
|
||||
if (APIUtil.isScalaSourceName(sourceFile.getName) && APIUtil.hasMacro(source)) macroSources += sourceFile
|
||||
publicNameHashes(sourceFile) = {
|
||||
if (nameHashing)
|
||||
(new xsbt.api.NameHashing).nameHashes(source)
|
||||
else
|
||||
emptyNameHashes
|
||||
}
|
||||
val shouldMinimize = !Incremental.apiDebug(options)
|
||||
val savedSource = if (shouldMinimize) APIUtil.minimize(source) else source
|
||||
apis(sourceFile) = (HashAPI(source), savedSource)
|
||||
}
|
||||
|
||||
def usedName(sourceFile: File, name: String) = add(usedNames, sourceFile, name)
|
||||
def usedName(sourceFile: File, name: String) = add(usedNames, sourceFile, name)
|
||||
|
||||
def nameHashing: Boolean = options.nameHashing
|
||||
def nameHashing: Boolean = options.nameHashing
|
||||
|
||||
def get: Analysis = addUsedNames( addCompilation( addExternals( addBinaries( addProducts( addSources(Analysis.empty(nameHashing = nameHashing)) ) ) ) ) )
|
||||
def addProducts(base: Analysis): Analysis = addAll(base, classes) { case (a, src, (prod, name)) => a.addProduct(src, prod, current product prod, name ) }
|
||||
def addBinaries(base: Analysis): Analysis = addAll(base, binaryDeps)( (a, src, bin) => a.addBinaryDep(src, bin, binaryClassName(bin), current binary bin) )
|
||||
def addSources(base: Analysis): Analysis =
|
||||
(base /: apis) { case (a, (src, api) ) =>
|
||||
val stamp = current.internalSource(src)
|
||||
val hash = stamp match { case h: Hash => h.value; case _ => new Array[Byte](0) }
|
||||
// TODO store this in Relations, rather than Source.
|
||||
val hasMacro: Boolean = macroSources.contains(src)
|
||||
val s = new xsbti.api.Source(compilation, hash, api._2, api._1, publicNameHashes(src), hasMacro)
|
||||
val info = SourceInfos.makeInfo(getOrNil(reporteds, src), getOrNil(unreporteds, src))
|
||||
val direct = sourceDeps.getOrElse(src, Nil: Iterable[File])
|
||||
val publicInherited = inheritedSourceDeps.getOrElse(src, Nil: Iterable[File])
|
||||
a.addSource(src, s, stamp, direct, publicInherited, info)
|
||||
}
|
||||
def getOrNil[A,B](m: collection.Map[A, Seq[B]], a: A): Seq[B] = m.get(a).toList.flatten
|
||||
def addExternals(base: Analysis): Analysis = (base /: extSrcDeps) { case (a, (source, name, api, inherited)) => a.addExternalDep(source, name, api, inherited) }
|
||||
def addCompilation(base: Analysis): Analysis = base.copy(compilations = base.compilations.add(compilation))
|
||||
def addUsedNames(base: Analysis): Analysis = (base /: usedNames) { case (a, (src, names)) =>
|
||||
(a /: names) { case (a, name) => a.copy(relations = a.relations.addUsedName(src, name)) }
|
||||
}
|
||||
def get: Analysis = addUsedNames(addCompilation(addExternals(addBinaries(addProducts(addSources(Analysis.empty(nameHashing = nameHashing)))))))
|
||||
def addProducts(base: Analysis): Analysis = addAll(base, classes) { case (a, src, (prod, name)) => a.addProduct(src, prod, current product prod, name) }
|
||||
def addBinaries(base: Analysis): Analysis = addAll(base, binaryDeps)((a, src, bin) => a.addBinaryDep(src, bin, binaryClassName(bin), current binary bin))
|
||||
def addSources(base: Analysis): Analysis =
|
||||
(base /: apis) {
|
||||
case (a, (src, api)) =>
|
||||
val stamp = current.internalSource(src)
|
||||
val hash = stamp match { case h: Hash => h.value; case _ => new Array[Byte](0) }
|
||||
// TODO store this in Relations, rather than Source.
|
||||
val hasMacro: Boolean = macroSources.contains(src)
|
||||
val s = new xsbti.api.Source(compilation, hash, api._2, api._1, publicNameHashes(src), hasMacro)
|
||||
val info = SourceInfos.makeInfo(getOrNil(reporteds, src), getOrNil(unreporteds, src))
|
||||
val direct = sourceDeps.getOrElse(src, Nil: Iterable[File])
|
||||
val publicInherited = inheritedSourceDeps.getOrElse(src, Nil: Iterable[File])
|
||||
a.addSource(src, s, stamp, direct, publicInherited, info)
|
||||
}
|
||||
def getOrNil[A, B](m: collection.Map[A, Seq[B]], a: A): Seq[B] = m.get(a).toList.flatten
|
||||
def addExternals(base: Analysis): Analysis = (base /: extSrcDeps) { case (a, (source, name, api, inherited)) => a.addExternalDep(source, name, api, inherited) }
|
||||
def addCompilation(base: Analysis): Analysis = base.copy(compilations = base.compilations.add(compilation))
|
||||
def addUsedNames(base: Analysis): Analysis = (base /: usedNames) {
|
||||
case (a, (src, names)) =>
|
||||
(a /: names) { case (a, name) => a.copy(relations = a.relations.addUsedName(src, name)) }
|
||||
}
|
||||
|
||||
def addAll[A,B](base: Analysis, m: Map[A, Set[B]])( f: (Analysis, A, B) => Analysis): Analysis =
|
||||
(base /: m) { case (outer, (a, bs)) =>
|
||||
(outer /: bs) { (inner, b) =>
|
||||
f(inner, a, b)
|
||||
} }
|
||||
def addAll[A, B](base: Analysis, m: Map[A, Set[B]])(f: (Analysis, A, B) => Analysis): Analysis =
|
||||
(base /: m) {
|
||||
case (outer, (a, bs)) =>
|
||||
(outer /: bs) { (inner, b) =>
|
||||
f(inner, a, b)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,49 +1,43 @@
|
|||
package sbt
|
||||
package inc
|
||||
|
||||
import java.io.File
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
import java.io.File
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
|
||||
sealed trait FileValueCache[T]
|
||||
{
|
||||
def clear(): Unit
|
||||
def get: File => T
|
||||
sealed trait FileValueCache[T] {
|
||||
def clear(): Unit
|
||||
def get: File => T
|
||||
}
|
||||
|
||||
private[this] final class FileValueCache0[T](getStamp: File => Stamp, make: File => T)(implicit equiv: Equiv[Stamp]) extends FileValueCache[T]
|
||||
{
|
||||
private[this] val backing = new ConcurrentHashMap[File, FileCache]
|
||||
private[this] final class FileValueCache0[T](getStamp: File => Stamp, make: File => T)(implicit equiv: Equiv[Stamp]) extends FileValueCache[T] {
|
||||
private[this] val backing = new ConcurrentHashMap[File, FileCache]
|
||||
|
||||
def clear(): Unit = backing.clear()
|
||||
def get = file => {
|
||||
val ifAbsent = new FileCache(file)
|
||||
val cache = backing.putIfAbsent(file, ifAbsent)
|
||||
(if(cache eq null) ifAbsent else cache).get()
|
||||
}
|
||||
def clear(): Unit = backing.clear()
|
||||
def get = file => {
|
||||
val ifAbsent = new FileCache(file)
|
||||
val cache = backing.putIfAbsent(file, ifAbsent)
|
||||
(if (cache eq null) ifAbsent else cache).get()
|
||||
}
|
||||
|
||||
private[this] final class FileCache(file: File)
|
||||
{
|
||||
private[this] var stampedValue: Option[(Stamp,T)] = None
|
||||
def get(): T = synchronized
|
||||
{
|
||||
val latest = getStamp(file)
|
||||
stampedValue match
|
||||
{
|
||||
case Some( (stamp, value) ) if(equiv.equiv(latest, stamp)) => value
|
||||
case _ => update(latest)
|
||||
}
|
||||
}
|
||||
private[this] final class FileCache(file: File) {
|
||||
private[this] var stampedValue: Option[(Stamp, T)] = None
|
||||
def get(): T = synchronized {
|
||||
val latest = getStamp(file)
|
||||
stampedValue match {
|
||||
case Some((stamp, value)) if (equiv.equiv(latest, stamp)) => value
|
||||
case _ => update(latest)
|
||||
}
|
||||
}
|
||||
|
||||
private[this] def update(stamp: Stamp): T =
|
||||
{
|
||||
val value = make(file)
|
||||
stampedValue = Some((stamp, value))
|
||||
value
|
||||
}
|
||||
}
|
||||
private[this] def update(stamp: Stamp): T =
|
||||
{
|
||||
val value = make(file)
|
||||
stampedValue = Some((stamp, value))
|
||||
value
|
||||
}
|
||||
}
|
||||
}
|
||||
object FileValueCache
|
||||
{
|
||||
def apply[T](f: File => T): FileValueCache[T] = make(Stamp.lastModified)(f)
|
||||
def make[T](stamp: File => Stamp)(f: File => T): FileValueCache[T] = new FileValueCache0[T](stamp, f)
|
||||
object FileValueCache {
|
||||
def apply[T](f: File => T): FileValueCache[T] = make(Stamp.lastModified)(f)
|
||||
def make[T](stamp: File => Stamp)(f: File => T): FileValueCache[T] = new FileValueCache0[T](stamp, f)
|
||||
}
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
package sbt.inc
|
||||
|
||||
import java.io.File
|
||||
import java.io.File
|
||||
|
||||
/**
|
||||
* Represents all configuration options for the incremental compiler itself and
|
||||
|
|
@ -12,295 +12,294 @@ package sbt.inc
|
|||
* defined explicitly.
|
||||
*/
|
||||
final class IncOptions(
|
||||
/** After which step include whole transitive closure of invalidated source files. */
|
||||
val transitiveStep: Int,
|
||||
/**
|
||||
* What's the fraction of invalidated source files when we switch to recompiling
|
||||
* all files and giving up incremental compilation altogether. That's useful in
|
||||
* cases when probability that we end up recompiling most of source files but
|
||||
* in multiple steps is high. Multi-step incremental recompilation is slower
|
||||
* than recompiling everything in one step.
|
||||
*/
|
||||
val recompileAllFraction: Double,
|
||||
/** Print very detailed information about relations, such as dependencies between source files. */
|
||||
val relationsDebug: Boolean,
|
||||
/**
|
||||
* Enable tools for debugging API changes. At the moment this option is unused but in the
|
||||
* future it will enable for example:
|
||||
* - disabling API hashing and API minimization (potentially very memory consuming)
|
||||
* - diffing textual API representation which helps understanding what kind of changes
|
||||
* to APIs are visible to the incremental compiler
|
||||
*/
|
||||
val apiDebug: Boolean,
|
||||
/**
|
||||
* Controls context size (in lines) displayed when diffs are produced for textual API
|
||||
* representation.
|
||||
*
|
||||
* This option is used only when `apiDebug == true`.
|
||||
*/
|
||||
val apiDiffContextSize: Int,
|
||||
/**
|
||||
* The directory where we dump textual representation of APIs. This method might be called
|
||||
* only if apiDebug returns true. This is unused option at the moment as the needed functionality
|
||||
* is not implemented yet.
|
||||
*/
|
||||
val apiDumpDirectory: Option[java.io.File],
|
||||
/** Creates a new ClassfileManager that will handle class file deletion and addition during a single incremental compilation run. */
|
||||
val newClassfileManager: () => ClassfileManager,
|
||||
/**
|
||||
* Determines whether incremental compiler should recompile all dependencies of a file
|
||||
* that contains a macro definition.
|
||||
*/
|
||||
val recompileOnMacroDef: Boolean,
|
||||
/**
|
||||
* Determines whether incremental compiler uses the new algorithm known as name hashing.
|
||||
*
|
||||
* This flag is disabled by default so incremental compiler's behavior is the same as in sbt 0.13.0.
|
||||
*
|
||||
* IMPLEMENTATION NOTE:
|
||||
* Enabling this flag enables a few additional functionalities that are needed by the name hashing algorithm:
|
||||
*
|
||||
* 1. New dependency source tracking is used. See `sbt.inc.Relations` for details.
|
||||
* 2. Used names extraction and tracking is enabled. See `sbt.inc.Relations` for details as well.
|
||||
* 3. Hashing of public names is enabled. See `sbt.inc.AnalysisCallback` for details.
|
||||
*
|
||||
*/
|
||||
val nameHashing: Boolean
|
||||
) extends Product with Serializable {
|
||||
/** After which step include whole transitive closure of invalidated source files. */
|
||||
val transitiveStep: Int,
|
||||
/**
|
||||
* What's the fraction of invalidated source files when we switch to recompiling
|
||||
* all files and giving up incremental compilation altogether. That's useful in
|
||||
* cases when probability that we end up recompiling most of source files but
|
||||
* in multiple steps is high. Multi-step incremental recompilation is slower
|
||||
* than recompiling everything in one step.
|
||||
*/
|
||||
val recompileAllFraction: Double,
|
||||
/** Print very detailed information about relations, such as dependencies between source files. */
|
||||
val relationsDebug: Boolean,
|
||||
/**
|
||||
* Enable tools for debugging API changes. At the moment this option is unused but in the
|
||||
* future it will enable for example:
|
||||
* - disabling API hashing and API minimization (potentially very memory consuming)
|
||||
* - diffing textual API representation which helps understanding what kind of changes
|
||||
* to APIs are visible to the incremental compiler
|
||||
*/
|
||||
val apiDebug: Boolean,
|
||||
/**
|
||||
* Controls context size (in lines) displayed when diffs are produced for textual API
|
||||
* representation.
|
||||
*
|
||||
* This option is used only when `apiDebug == true`.
|
||||
*/
|
||||
val apiDiffContextSize: Int,
|
||||
/**
|
||||
* The directory where we dump textual representation of APIs. This method might be called
|
||||
* only if apiDebug returns true. This is unused option at the moment as the needed functionality
|
||||
* is not implemented yet.
|
||||
*/
|
||||
val apiDumpDirectory: Option[java.io.File],
|
||||
/** Creates a new ClassfileManager that will handle class file deletion and addition during a single incremental compilation run. */
|
||||
val newClassfileManager: () => ClassfileManager,
|
||||
/**
|
||||
* Determines whether incremental compiler should recompile all dependencies of a file
|
||||
* that contains a macro definition.
|
||||
*/
|
||||
val recompileOnMacroDef: Boolean,
|
||||
/**
|
||||
* Determines whether incremental compiler uses the new algorithm known as name hashing.
|
||||
*
|
||||
* This flag is disabled by default so incremental compiler's behavior is the same as in sbt 0.13.0.
|
||||
*
|
||||
* IMPLEMENTATION NOTE:
|
||||
* Enabling this flag enables a few additional functionalities that are needed by the name hashing algorithm:
|
||||
*
|
||||
* 1. New dependency source tracking is used. See `sbt.inc.Relations` for details.
|
||||
* 2. Used names extraction and tracking is enabled. See `sbt.inc.Relations` for details as well.
|
||||
* 3. Hashing of public names is enabled. See `sbt.inc.AnalysisCallback` for details.
|
||||
*
|
||||
*/
|
||||
val nameHashing: Boolean) extends Product with Serializable {
|
||||
|
||||
/**
|
||||
* Secondary constructor introduced to make IncOptions to be binary compatible with version that didn't have
|
||||
* `recompileOnMacroDef` and `nameHashing` fields defined.
|
||||
*/
|
||||
def this(transitiveStep: Int, recompileAllFraction: Double, relationsDebug: Boolean, apiDebug: Boolean,
|
||||
apiDiffContextSize: Int, apiDumpDirectory: Option[java.io.File], newClassfileManager: () => ClassfileManager) = {
|
||||
this(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, IncOptions.recompileOnMacroDefDefault, IncOptions.nameHashingDefault)
|
||||
}
|
||||
/**
|
||||
* Secondary constructor introduced to make IncOptions to be binary compatible with version that didn't have
|
||||
* `recompileOnMacroDef` and `nameHashing` fields defined.
|
||||
*/
|
||||
def this(transitiveStep: Int, recompileAllFraction: Double, relationsDebug: Boolean, apiDebug: Boolean,
|
||||
apiDiffContextSize: Int, apiDumpDirectory: Option[java.io.File], newClassfileManager: () => ClassfileManager) = {
|
||||
this(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, IncOptions.recompileOnMacroDefDefault, IncOptions.nameHashingDefault)
|
||||
}
|
||||
|
||||
def withTransitiveStep(transitiveStep: Int): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing)
|
||||
}
|
||||
def withTransitiveStep(transitiveStep: Int): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing)
|
||||
}
|
||||
|
||||
def withRecompileAllFraction(recompileAllFraction: Double): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing)
|
||||
}
|
||||
def withRecompileAllFraction(recompileAllFraction: Double): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing)
|
||||
}
|
||||
|
||||
def withRelationsDebug(relationsDebug: Boolean): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing)
|
||||
}
|
||||
def withRelationsDebug(relationsDebug: Boolean): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing)
|
||||
}
|
||||
|
||||
def withApiDebug(apiDebug: Boolean): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing)
|
||||
}
|
||||
def withApiDebug(apiDebug: Boolean): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing)
|
||||
}
|
||||
|
||||
def withApiDiffContextSize(apiDiffContextSize: Int): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing)
|
||||
}
|
||||
def withApiDiffContextSize(apiDiffContextSize: Int): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing)
|
||||
}
|
||||
|
||||
def withApiDumpDirectory(apiDumpDirectory: Option[File]): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing)
|
||||
}
|
||||
def withApiDumpDirectory(apiDumpDirectory: Option[File]): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing)
|
||||
}
|
||||
|
||||
def withNewClassfileManager(newClassfileManager: () => ClassfileManager): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing)
|
||||
}
|
||||
def withNewClassfileManager(newClassfileManager: () => ClassfileManager): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing)
|
||||
}
|
||||
|
||||
def withRecompileOnMacroDef(recompileOnMacroDef: Boolean): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing)
|
||||
}
|
||||
def withRecompileOnMacroDef(recompileOnMacroDef: Boolean): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing)
|
||||
}
|
||||
|
||||
def withNameHashing(nameHashing: Boolean): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing)
|
||||
}
|
||||
def withNameHashing(nameHashing: Boolean): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing)
|
||||
}
|
||||
|
||||
//- EXPANDED CASE CLASS METHOD BEGIN -//
|
||||
@deprecated("Use `with$nameOfTheField` copying methods instead.", "0.13.2")
|
||||
def copy(transitiveStep: Int = this.transitiveStep, recompileAllFraction: Double = this.recompileAllFraction,
|
||||
relationsDebug: Boolean = this.relationsDebug, apiDebug: Boolean = this.apiDebug,
|
||||
apiDiffContextSize: Int = this.apiDiffContextSize,
|
||||
apiDumpDirectory: Option[java.io.File] = this.apiDumpDirectory,
|
||||
newClassfileManager: () => ClassfileManager = this.newClassfileManager): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing)
|
||||
}
|
||||
//- EXPANDED CASE CLASS METHOD BEGIN -//
|
||||
@deprecated("Use `with$nameOfTheField` copying methods instead.", "0.13.2")
|
||||
def copy(transitiveStep: Int = this.transitiveStep, recompileAllFraction: Double = this.recompileAllFraction,
|
||||
relationsDebug: Boolean = this.relationsDebug, apiDebug: Boolean = this.apiDebug,
|
||||
apiDiffContextSize: Int = this.apiDiffContextSize,
|
||||
apiDumpDirectory: Option[java.io.File] = this.apiDumpDirectory,
|
||||
newClassfileManager: () => ClassfileManager = this.newClassfileManager): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing)
|
||||
}
|
||||
|
||||
@deprecated("Methods generated for case class will be removed in the future.", "0.13.2")
|
||||
override def productPrefix: String = "IncOptions"
|
||||
@deprecated("Methods generated for case class will be removed in the future.", "0.13.2")
|
||||
override def productPrefix: String = "IncOptions"
|
||||
|
||||
@deprecated("Methods generated for case class will be removed in the future.", "0.13.2")
|
||||
def productArity: Int = 9
|
||||
@deprecated("Methods generated for case class will be removed in the future.", "0.13.2")
|
||||
def productArity: Int = 9
|
||||
|
||||
@deprecated("Methods generated for case class will be removed in the future.", "0.13.2")
|
||||
def productElement(x$1: Int): Any = x$1 match {
|
||||
case 0 => IncOptions.this.transitiveStep
|
||||
case 1 => IncOptions.this.recompileAllFraction
|
||||
case 2 => IncOptions.this.relationsDebug
|
||||
case 3 => IncOptions.this.apiDebug
|
||||
case 4 => IncOptions.this.apiDiffContextSize
|
||||
case 5 => IncOptions.this.apiDumpDirectory
|
||||
case 6 => IncOptions.this.newClassfileManager
|
||||
case 7 => IncOptions.this.recompileOnMacroDef
|
||||
case 8 => IncOptions.this.nameHashing
|
||||
case _ => throw new IndexOutOfBoundsException(x$1.toString())
|
||||
}
|
||||
@deprecated("Methods generated for case class will be removed in the future.", "0.13.2")
|
||||
def productElement(x$1: Int): Any = x$1 match {
|
||||
case 0 => IncOptions.this.transitiveStep
|
||||
case 1 => IncOptions.this.recompileAllFraction
|
||||
case 2 => IncOptions.this.relationsDebug
|
||||
case 3 => IncOptions.this.apiDebug
|
||||
case 4 => IncOptions.this.apiDiffContextSize
|
||||
case 5 => IncOptions.this.apiDumpDirectory
|
||||
case 6 => IncOptions.this.newClassfileManager
|
||||
case 7 => IncOptions.this.recompileOnMacroDef
|
||||
case 8 => IncOptions.this.nameHashing
|
||||
case _ => throw new IndexOutOfBoundsException(x$1.toString())
|
||||
}
|
||||
|
||||
@deprecated("Methods generated for case class will be removed in the future.", "0.13.2")
|
||||
override def productIterator: Iterator[Any] = scala.runtime.ScalaRunTime.typedProductIterator[Any](IncOptions.this)
|
||||
@deprecated("Methods generated for case class will be removed in the future.", "0.13.2")
|
||||
override def productIterator: Iterator[Any] = scala.runtime.ScalaRunTime.typedProductIterator[Any](IncOptions.this)
|
||||
|
||||
@deprecated("Methods generated for case class will be removed in the future.", "0.13.2")
|
||||
def canEqual(x$1: Any): Boolean = x$1.isInstanceOf[IncOptions]
|
||||
@deprecated("Methods generated for case class will be removed in the future.", "0.13.2")
|
||||
def canEqual(x$1: Any): Boolean = x$1.isInstanceOf[IncOptions]
|
||||
|
||||
override def hashCode(): Int = {
|
||||
import scala.runtime.Statics
|
||||
var acc: Int = -889275714
|
||||
acc = Statics.mix(acc, transitiveStep)
|
||||
acc = Statics.mix(acc, Statics.doubleHash(recompileAllFraction))
|
||||
acc = Statics.mix(acc, if (relationsDebug) 1231 else 1237)
|
||||
acc = Statics.mix(acc, if (apiDebug) 1231 else 1237)
|
||||
acc = Statics.mix(acc, apiDiffContextSize)
|
||||
acc = Statics.mix(acc, Statics.anyHash(apiDumpDirectory))
|
||||
acc = Statics.mix(acc, Statics.anyHash(newClassfileManager))
|
||||
acc = Statics.mix(acc, if (recompileOnMacroDef) 1231 else 1237)
|
||||
acc = Statics.mix(acc, if (nameHashing) 1231 else 1237)
|
||||
Statics.finalizeHash(acc, 9)
|
||||
}
|
||||
override def hashCode(): Int = {
|
||||
import scala.runtime.Statics
|
||||
var acc: Int = -889275714
|
||||
acc = Statics.mix(acc, transitiveStep)
|
||||
acc = Statics.mix(acc, Statics.doubleHash(recompileAllFraction))
|
||||
acc = Statics.mix(acc, if (relationsDebug) 1231 else 1237)
|
||||
acc = Statics.mix(acc, if (apiDebug) 1231 else 1237)
|
||||
acc = Statics.mix(acc, apiDiffContextSize)
|
||||
acc = Statics.mix(acc, Statics.anyHash(apiDumpDirectory))
|
||||
acc = Statics.mix(acc, Statics.anyHash(newClassfileManager))
|
||||
acc = Statics.mix(acc, if (recompileOnMacroDef) 1231 else 1237)
|
||||
acc = Statics.mix(acc, if (nameHashing) 1231 else 1237)
|
||||
Statics.finalizeHash(acc, 9)
|
||||
}
|
||||
|
||||
override def toString(): String = scala.runtime.ScalaRunTime._toString(IncOptions.this)
|
||||
override def toString(): String = scala.runtime.ScalaRunTime._toString(IncOptions.this)
|
||||
|
||||
override def equals(x$1: Any): Boolean = {
|
||||
this.eq(x$1.asInstanceOf[Object]) || (x$1.isInstanceOf[IncOptions] && ({
|
||||
val IncOptions$1: IncOptions = x$1.asInstanceOf[IncOptions]
|
||||
transitiveStep == IncOptions$1.transitiveStep && recompileAllFraction == IncOptions$1.recompileAllFraction &&
|
||||
relationsDebug == IncOptions$1.relationsDebug && apiDebug == IncOptions$1.apiDebug &&
|
||||
override def equals(x$1: Any): Boolean = {
|
||||
this.eq(x$1.asInstanceOf[Object]) || (x$1.isInstanceOf[IncOptions] && ({
|
||||
val IncOptions$1: IncOptions = x$1.asInstanceOf[IncOptions]
|
||||
transitiveStep == IncOptions$1.transitiveStep && recompileAllFraction == IncOptions$1.recompileAllFraction &&
|
||||
relationsDebug == IncOptions$1.relationsDebug && apiDebug == IncOptions$1.apiDebug &&
|
||||
apiDiffContextSize == IncOptions$1.apiDiffContextSize && apiDumpDirectory == IncOptions$1.apiDumpDirectory &&
|
||||
newClassfileManager == IncOptions$1.newClassfileManager &&
|
||||
recompileOnMacroDef == IncOptions$1.recompileOnMacroDef && nameHashing == IncOptions$1.nameHashing
|
||||
}))
|
||||
}
|
||||
//- EXPANDED CASE CLASS METHOD END -//
|
||||
}))
|
||||
}
|
||||
//- EXPANDED CASE CLASS METHOD END -//
|
||||
}
|
||||
|
||||
object IncOptions extends Serializable {
|
||||
private val recompileOnMacroDefDefault: Boolean = true
|
||||
private val nameHashingDefault: Boolean = false
|
||||
val Default = IncOptions(
|
||||
// 1. recompile changed sources
|
||||
// 2(3). recompile direct dependencies and transitive public inheritance dependencies of sources with API changes in 1(2).
|
||||
// 4. further changes invalidate all dependencies transitively to avoid too many steps
|
||||
transitiveStep = 3,
|
||||
recompileAllFraction = 0.5,
|
||||
relationsDebug = false,
|
||||
apiDebug = false,
|
||||
apiDiffContextSize = 5,
|
||||
apiDumpDirectory = None,
|
||||
newClassfileManager = ClassfileManager.deleteImmediately,
|
||||
recompileOnMacroDef = recompileOnMacroDefDefault,
|
||||
nameHashing = nameHashingDefault
|
||||
)
|
||||
//- EXPANDED CASE CLASS METHOD BEGIN -//
|
||||
final override def toString(): String = "IncOptions"
|
||||
@deprecated("Use overloaded variant of `apply` with complete list of arguments instead.", "0.13.2")
|
||||
def apply(transitiveStep: Int, recompileAllFraction: Double, relationsDebug: Boolean, apiDebug: Boolean,
|
||||
apiDiffContextSize: Int, apiDumpDirectory: Option[java.io.File],
|
||||
newClassfileManager: () => ClassfileManager): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager)
|
||||
}
|
||||
def apply(transitiveStep: Int, recompileAllFraction: Double, relationsDebug: Boolean, apiDebug: Boolean,
|
||||
apiDiffContextSize: Int, apiDumpDirectory: Option[java.io.File],
|
||||
newClassfileManager: () => ClassfileManager, recompileOnMacroDef: Boolean,
|
||||
nameHashing: Boolean): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing)
|
||||
}
|
||||
@deprecated("Methods generated for case class will be removed in the future.", "0.13.2")
|
||||
def unapply(x$0: IncOptions): Option[(Int, Double, Boolean, Boolean, Int, Option[java.io.File], () => AnyRef)] = {
|
||||
if (x$0 == null) None
|
||||
else Some.apply[(Int, Double, Boolean, Boolean, Int, Option[java.io.File], () => AnyRef)](
|
||||
Tuple7.apply[Int, Double, Boolean, Boolean, Int, Option[java.io.File], () => AnyRef](
|
||||
x$0.transitiveStep, x$0.recompileAllFraction, x$0.relationsDebug, x$0.apiDebug, x$0.apiDiffContextSize,
|
||||
x$0.apiDumpDirectory, x$0.newClassfileManager))
|
||||
}
|
||||
private def readResolve(): Object = IncOptions
|
||||
//- EXPANDED CASE CLASS METHOD END -//
|
||||
private val recompileOnMacroDefDefault: Boolean = true
|
||||
private val nameHashingDefault: Boolean = false
|
||||
val Default = IncOptions(
|
||||
// 1. recompile changed sources
|
||||
// 2(3). recompile direct dependencies and transitive public inheritance dependencies of sources with API changes in 1(2).
|
||||
// 4. further changes invalidate all dependencies transitively to avoid too many steps
|
||||
transitiveStep = 3,
|
||||
recompileAllFraction = 0.5,
|
||||
relationsDebug = false,
|
||||
apiDebug = false,
|
||||
apiDiffContextSize = 5,
|
||||
apiDumpDirectory = None,
|
||||
newClassfileManager = ClassfileManager.deleteImmediately,
|
||||
recompileOnMacroDef = recompileOnMacroDefDefault,
|
||||
nameHashing = nameHashingDefault
|
||||
)
|
||||
//- EXPANDED CASE CLASS METHOD BEGIN -//
|
||||
final override def toString(): String = "IncOptions"
|
||||
@deprecated("Use overloaded variant of `apply` with complete list of arguments instead.", "0.13.2")
|
||||
def apply(transitiveStep: Int, recompileAllFraction: Double, relationsDebug: Boolean, apiDebug: Boolean,
|
||||
apiDiffContextSize: Int, apiDumpDirectory: Option[java.io.File],
|
||||
newClassfileManager: () => ClassfileManager): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager)
|
||||
}
|
||||
def apply(transitiveStep: Int, recompileAllFraction: Double, relationsDebug: Boolean, apiDebug: Boolean,
|
||||
apiDiffContextSize: Int, apiDumpDirectory: Option[java.io.File],
|
||||
newClassfileManager: () => ClassfileManager, recompileOnMacroDef: Boolean,
|
||||
nameHashing: Boolean): IncOptions = {
|
||||
new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize,
|
||||
apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing)
|
||||
}
|
||||
@deprecated("Methods generated for case class will be removed in the future.", "0.13.2")
|
||||
def unapply(x$0: IncOptions): Option[(Int, Double, Boolean, Boolean, Int, Option[java.io.File], () => AnyRef)] = {
|
||||
if (x$0 == null) None
|
||||
else Some.apply[(Int, Double, Boolean, Boolean, Int, Option[java.io.File], () => AnyRef)](
|
||||
Tuple7.apply[Int, Double, Boolean, Boolean, Int, Option[java.io.File], () => AnyRef](
|
||||
x$0.transitiveStep, x$0.recompileAllFraction, x$0.relationsDebug, x$0.apiDebug, x$0.apiDiffContextSize,
|
||||
x$0.apiDumpDirectory, x$0.newClassfileManager))
|
||||
}
|
||||
private def readResolve(): Object = IncOptions
|
||||
//- EXPANDED CASE CLASS METHOD END -//
|
||||
|
||||
@deprecated("Use IncOptions.Default.withNewClassfileManager(ClassfileManager.transactional(tempDir)), instead.", "0.13.5")
|
||||
def defaultTransactional(tempDir: File): IncOptions =
|
||||
setTransactional(Default, tempDir)
|
||||
@deprecated("Use opts.withNewClassfileManager(ClassfileManager.transactional(tempDir)), instead.", "0.13.5")
|
||||
def setTransactional(opts: IncOptions, tempDir: File): IncOptions =
|
||||
opts.withNewClassfileManager(ClassfileManager.transactional(tempDir, sbt.Logger.Null))
|
||||
@deprecated("Use IncOptions.Default.withNewClassfileManager(ClassfileManager.transactional(tempDir)), instead.", "0.13.5")
|
||||
def defaultTransactional(tempDir: File): IncOptions =
|
||||
setTransactional(Default, tempDir)
|
||||
@deprecated("Use opts.withNewClassfileManager(ClassfileManager.transactional(tempDir)), instead.", "0.13.5")
|
||||
def setTransactional(opts: IncOptions, tempDir: File): IncOptions =
|
||||
opts.withNewClassfileManager(ClassfileManager.transactional(tempDir, sbt.Logger.Null))
|
||||
|
||||
private val transitiveStepKey = "transitiveStep"
|
||||
private val recompileAllFractionKey = "recompileAllFraction"
|
||||
private val relationsDebugKey = "relationsDebug"
|
||||
private val apiDebugKey = "apiDebug"
|
||||
private val apiDumpDirectoryKey = "apiDumpDirectory"
|
||||
private val apiDiffContextSizeKey = "apiDiffContextSize"
|
||||
private val recompileOnMacroDefKey = "recompileOnMacroDef"
|
||||
private val nameHashingKey = "nameHashing"
|
||||
private val transitiveStepKey = "transitiveStep"
|
||||
private val recompileAllFractionKey = "recompileAllFraction"
|
||||
private val relationsDebugKey = "relationsDebug"
|
||||
private val apiDebugKey = "apiDebug"
|
||||
private val apiDumpDirectoryKey = "apiDumpDirectory"
|
||||
private val apiDiffContextSizeKey = "apiDiffContextSize"
|
||||
private val recompileOnMacroDefKey = "recompileOnMacroDef"
|
||||
private val nameHashingKey = "nameHashing"
|
||||
|
||||
def fromStringMap(m: java.util.Map[String, String]): IncOptions = {
|
||||
// all the code below doesn't look like idiomatic Scala for a good reason: we are working with Java API
|
||||
def getTransitiveStep: Int = {
|
||||
val k = transitiveStepKey
|
||||
if (m.containsKey(k)) m.get(k).toInt else Default.transitiveStep
|
||||
}
|
||||
def getRecompileAllFraction: Double = {
|
||||
val k = recompileAllFractionKey
|
||||
if (m.containsKey(k)) m.get(k).toDouble else Default.recompileAllFraction
|
||||
}
|
||||
def getRelationsDebug: Boolean = {
|
||||
val k = relationsDebugKey
|
||||
if (m.containsKey(k)) m.get(k).toBoolean else Default.relationsDebug
|
||||
}
|
||||
def getApiDebug: Boolean = {
|
||||
val k = apiDebugKey
|
||||
if (m.containsKey(k)) m.get(k).toBoolean else Default.apiDebug
|
||||
}
|
||||
def getApiDiffContextSize: Int = {
|
||||
val k = apiDiffContextSizeKey
|
||||
if (m.containsKey(k)) m.get(k).toInt else Default.apiDiffContextSize
|
||||
}
|
||||
def getApiDumpDirectory: Option[java.io.File] = {
|
||||
val k = apiDumpDirectoryKey
|
||||
if (m.containsKey(k))
|
||||
Some(new java.io.File(m.get(k)))
|
||||
else None
|
||||
}
|
||||
def getRecompileOnMacroDef: Boolean = {
|
||||
val k = recompileOnMacroDefKey
|
||||
if (m.containsKey(k)) m.get(k).toBoolean else Default.recompileOnMacroDef
|
||||
}
|
||||
def getNameHashing: Boolean = {
|
||||
val k = nameHashingKey
|
||||
if (m.containsKey(k)) m.get(k).toBoolean else Default.nameHashing
|
||||
}
|
||||
def fromStringMap(m: java.util.Map[String, String]): IncOptions = {
|
||||
// all the code below doesn't look like idiomatic Scala for a good reason: we are working with Java API
|
||||
def getTransitiveStep: Int = {
|
||||
val k = transitiveStepKey
|
||||
if (m.containsKey(k)) m.get(k).toInt else Default.transitiveStep
|
||||
}
|
||||
def getRecompileAllFraction: Double = {
|
||||
val k = recompileAllFractionKey
|
||||
if (m.containsKey(k)) m.get(k).toDouble else Default.recompileAllFraction
|
||||
}
|
||||
def getRelationsDebug: Boolean = {
|
||||
val k = relationsDebugKey
|
||||
if (m.containsKey(k)) m.get(k).toBoolean else Default.relationsDebug
|
||||
}
|
||||
def getApiDebug: Boolean = {
|
||||
val k = apiDebugKey
|
||||
if (m.containsKey(k)) m.get(k).toBoolean else Default.apiDebug
|
||||
}
|
||||
def getApiDiffContextSize: Int = {
|
||||
val k = apiDiffContextSizeKey
|
||||
if (m.containsKey(k)) m.get(k).toInt else Default.apiDiffContextSize
|
||||
}
|
||||
def getApiDumpDirectory: Option[java.io.File] = {
|
||||
val k = apiDumpDirectoryKey
|
||||
if (m.containsKey(k))
|
||||
Some(new java.io.File(m.get(k)))
|
||||
else None
|
||||
}
|
||||
def getRecompileOnMacroDef: Boolean = {
|
||||
val k = recompileOnMacroDefKey
|
||||
if (m.containsKey(k)) m.get(k).toBoolean else Default.recompileOnMacroDef
|
||||
}
|
||||
def getNameHashing: Boolean = {
|
||||
val k = nameHashingKey
|
||||
if (m.containsKey(k)) m.get(k).toBoolean else Default.nameHashing
|
||||
}
|
||||
|
||||
new IncOptions(getTransitiveStep, getRecompileAllFraction, getRelationsDebug, getApiDebug, getApiDiffContextSize,
|
||||
getApiDumpDirectory, ClassfileManager.deleteImmediately, getRecompileOnMacroDef, getNameHashing)
|
||||
}
|
||||
new IncOptions(getTransitiveStep, getRecompileAllFraction, getRelationsDebug, getApiDebug, getApiDiffContextSize,
|
||||
getApiDumpDirectory, ClassfileManager.deleteImmediately, getRecompileOnMacroDef, getNameHashing)
|
||||
}
|
||||
|
||||
def toStringMap(o: IncOptions): java.util.Map[String, String] = {
|
||||
val m = new java.util.HashMap[String, String]
|
||||
m.put(transitiveStepKey, o.transitiveStep.toString)
|
||||
m.put(recompileAllFractionKey, o.recompileAllFraction.toString)
|
||||
m.put(relationsDebugKey, o.relationsDebug.toString)
|
||||
m.put(apiDebugKey, o.apiDebug.toString)
|
||||
o.apiDumpDirectory.foreach(f => m.put(apiDumpDirectoryKey, f.toString))
|
||||
m.put(apiDiffContextSizeKey, o.apiDiffContextSize.toString)
|
||||
m.put(recompileOnMacroDefKey, o.recompileOnMacroDef.toString)
|
||||
m.put(nameHashingKey, o.nameHashing.toString)
|
||||
m
|
||||
}
|
||||
def toStringMap(o: IncOptions): java.util.Map[String, String] = {
|
||||
val m = new java.util.HashMap[String, String]
|
||||
m.put(transitiveStepKey, o.transitiveStep.toString)
|
||||
m.put(recompileAllFractionKey, o.recompileAllFraction.toString)
|
||||
m.put(relationsDebugKey, o.relationsDebug.toString)
|
||||
m.put(apiDebugKey, o.apiDebug.toString)
|
||||
o.apiDumpDirectory.foreach(f => m.put(apiDumpDirectoryKey, f.toString))
|
||||
m.put(apiDiffContextSizeKey, o.apiDiffContextSize.toString)
|
||||
m.put(recompileOnMacroDefKey, o.recompileOnMacroDef.toString)
|
||||
m.put(nameHashingKey, o.nameHashing.toString)
|
||||
m
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,406 +4,407 @@
|
|||
package sbt
|
||||
package inc
|
||||
|
||||
import xsbt.api.{NameChanges, SameAPI, TopLevel}
|
||||
import xsbt.api.{ NameChanges, SameAPI, TopLevel }
|
||||
import annotation.tailrec
|
||||
import xsbti.api.{Compilation, Source}
|
||||
import xsbti.api.{ Compilation, Source }
|
||||
import xsbti.compile.DependencyChanges
|
||||
import java.io.File
|
||||
|
||||
object Incremental
|
||||
{
|
||||
def compile(sources: Set[File],
|
||||
entry: String => Option[File],
|
||||
previous: Analysis,
|
||||
current: ReadStamps,
|
||||
forEntry: File => Option[Analysis],
|
||||
doCompile: (Set[File], DependencyChanges) => Analysis,
|
||||
log: Logger,
|
||||
options: IncOptions)(implicit equivS: Equiv[Stamp]): (Boolean, Analysis) =
|
||||
{
|
||||
val incremental: IncrementalCommon =
|
||||
if (!options.nameHashing)
|
||||
new IncrementalDefaultImpl(log, options)
|
||||
else
|
||||
new IncrementalNameHashing(log, options)
|
||||
val initialChanges = incremental.changedInitial(entry, sources, previous, current, forEntry)
|
||||
val binaryChanges = new DependencyChanges {
|
||||
val modifiedBinaries = initialChanges.binaryDeps.toArray
|
||||
val modifiedClasses = initialChanges.external.allModified.toArray
|
||||
def isEmpty = modifiedBinaries.isEmpty && modifiedClasses.isEmpty
|
||||
}
|
||||
val initialInv = incremental.invalidateInitial(previous.relations, initialChanges)
|
||||
log.debug("All initially invalidated sources: " + initialInv + "\n")
|
||||
val analysis = manageClassfiles(options) { classfileManager =>
|
||||
incremental.cycle(initialInv, sources, binaryChanges, previous, doCompile, classfileManager, 1)
|
||||
}
|
||||
(!initialInv.isEmpty, analysis)
|
||||
}
|
||||
object Incremental {
|
||||
def compile(sources: Set[File],
|
||||
entry: String => Option[File],
|
||||
previous: Analysis,
|
||||
current: ReadStamps,
|
||||
forEntry: File => Option[Analysis],
|
||||
doCompile: (Set[File], DependencyChanges) => Analysis,
|
||||
log: Logger,
|
||||
options: IncOptions)(implicit equivS: Equiv[Stamp]): (Boolean, Analysis) =
|
||||
{
|
||||
val incremental: IncrementalCommon =
|
||||
if (!options.nameHashing)
|
||||
new IncrementalDefaultImpl(log, options)
|
||||
else
|
||||
new IncrementalNameHashing(log, options)
|
||||
val initialChanges = incremental.changedInitial(entry, sources, previous, current, forEntry)
|
||||
val binaryChanges = new DependencyChanges {
|
||||
val modifiedBinaries = initialChanges.binaryDeps.toArray
|
||||
val modifiedClasses = initialChanges.external.allModified.toArray
|
||||
def isEmpty = modifiedBinaries.isEmpty && modifiedClasses.isEmpty
|
||||
}
|
||||
val initialInv = incremental.invalidateInitial(previous.relations, initialChanges)
|
||||
log.debug("All initially invalidated sources: " + initialInv + "\n")
|
||||
val analysis = manageClassfiles(options) { classfileManager =>
|
||||
incremental.cycle(initialInv, sources, binaryChanges, previous, doCompile, classfileManager, 1)
|
||||
}
|
||||
(!initialInv.isEmpty, analysis)
|
||||
}
|
||||
|
||||
// the name of system property that was meant to enable debugging mode of incremental compiler but
|
||||
// it ended up being used just to enable debugging of relations. That's why if you migrate to new
|
||||
// API for configuring incremental compiler (IncOptions) it's enough to control value of `relationsDebug`
|
||||
// flag to achieve the same effect as using `incDebugProp`.
|
||||
@deprecated("Use `IncOptions.relationsDebug` flag to enable debugging of relations.", "0.13.2")
|
||||
val incDebugProp = "xsbt.inc.debug"
|
||||
// the name of system property that was meant to enable debugging mode of incremental compiler but
|
||||
// it ended up being used just to enable debugging of relations. That's why if you migrate to new
|
||||
// API for configuring incremental compiler (IncOptions) it's enough to control value of `relationsDebug`
|
||||
// flag to achieve the same effect as using `incDebugProp`.
|
||||
@deprecated("Use `IncOptions.relationsDebug` flag to enable debugging of relations.", "0.13.2")
|
||||
val incDebugProp = "xsbt.inc.debug"
|
||||
|
||||
private[inc] val apiDebugProp = "xsbt.api.debug"
|
||||
private[inc] def apiDebug(options: IncOptions): Boolean = options.apiDebug || java.lang.Boolean.getBoolean(apiDebugProp)
|
||||
private[inc] val apiDebugProp = "xsbt.api.debug"
|
||||
private[inc] def apiDebug(options: IncOptions): Boolean = options.apiDebug || java.lang.Boolean.getBoolean(apiDebugProp)
|
||||
|
||||
private[sbt] def prune(invalidatedSrcs: Set[File], previous: Analysis): Analysis =
|
||||
prune(invalidatedSrcs, previous, ClassfileManager.deleteImmediately())
|
||||
private[sbt] def prune(invalidatedSrcs: Set[File], previous: Analysis): Analysis =
|
||||
prune(invalidatedSrcs, previous, ClassfileManager.deleteImmediately())
|
||||
|
||||
private[sbt] def prune(invalidatedSrcs: Set[File], previous: Analysis, classfileManager: ClassfileManager): Analysis =
|
||||
{
|
||||
classfileManager.delete( invalidatedSrcs.flatMap(previous.relations.products) )
|
||||
previous -- invalidatedSrcs
|
||||
}
|
||||
private[sbt] def prune(invalidatedSrcs: Set[File], previous: Analysis, classfileManager: ClassfileManager): Analysis =
|
||||
{
|
||||
classfileManager.delete(invalidatedSrcs.flatMap(previous.relations.products))
|
||||
previous -- invalidatedSrcs
|
||||
}
|
||||
|
||||
private[this] def manageClassfiles[T](options: IncOptions)(run: ClassfileManager => T): T =
|
||||
{
|
||||
val classfileManager = options.newClassfileManager()
|
||||
val result = try run(classfileManager) catch { case e: Exception =>
|
||||
classfileManager.complete(success = false)
|
||||
throw e
|
||||
}
|
||||
classfileManager.complete(success = true)
|
||||
result
|
||||
}
|
||||
private[this] def manageClassfiles[T](options: IncOptions)(run: ClassfileManager => T): T =
|
||||
{
|
||||
val classfileManager = options.newClassfileManager()
|
||||
val result = try run(classfileManager) catch {
|
||||
case e: Exception =>
|
||||
classfileManager.complete(success = false)
|
||||
throw e
|
||||
}
|
||||
classfileManager.complete(success = true)
|
||||
result
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
private abstract class IncrementalCommon(log: Logger, options: IncOptions) {
|
||||
|
||||
private def incDebug(options: IncOptions): Boolean = options.relationsDebug || java.lang.Boolean.getBoolean(Incremental.incDebugProp)
|
||||
private def incDebug(options: IncOptions): Boolean = options.relationsDebug || java.lang.Boolean.getBoolean(Incremental.incDebugProp)
|
||||
|
||||
// setting the related system property to true will skip checking that the class name
|
||||
// still comes from the same classpath entry. This can workaround bugs in classpath construction,
|
||||
// such as the currently problematic -javabootclasspath. This is subject to removal at any time.
|
||||
private[this] def skipClasspathLookup = java.lang.Boolean.getBoolean("xsbt.skip.cp.lookup")
|
||||
// setting the related system property to true will skip checking that the class name
|
||||
// still comes from the same classpath entry. This can workaround bugs in classpath construction,
|
||||
// such as the currently problematic -javabootclasspath. This is subject to removal at any time.
|
||||
private[this] def skipClasspathLookup = java.lang.Boolean.getBoolean("xsbt.skip.cp.lookup")
|
||||
|
||||
// TODO: the Analysis for the last successful compilation should get returned + Boolean indicating success
|
||||
// TODO: full external name changes, scopeInvalidations
|
||||
@tailrec final def cycle(invalidatedRaw: Set[File], allSources: Set[File], binaryChanges: DependencyChanges, previous: Analysis,
|
||||
doCompile: (Set[File], DependencyChanges) => Analysis, classfileManager: ClassfileManager, cycleNum: Int): Analysis =
|
||||
if(invalidatedRaw.isEmpty)
|
||||
previous
|
||||
else
|
||||
{
|
||||
def debug(s: => String) = if (incDebug(options)) log.debug(s) else ()
|
||||
val withPackageObjects = invalidatedRaw ++ invalidatedPackageObjects(invalidatedRaw, previous.relations)
|
||||
val invalidated = expand(withPackageObjects, allSources)
|
||||
val pruned = Incremental.prune(invalidated, previous, classfileManager)
|
||||
debug("********* Pruned: \n" + pruned.relations + "\n*********")
|
||||
// TODO: the Analysis for the last successful compilation should get returned + Boolean indicating success
|
||||
// TODO: full external name changes, scopeInvalidations
|
||||
@tailrec final def cycle(invalidatedRaw: Set[File], allSources: Set[File], binaryChanges: DependencyChanges, previous: Analysis,
|
||||
doCompile: (Set[File], DependencyChanges) => Analysis, classfileManager: ClassfileManager, cycleNum: Int): Analysis =
|
||||
if (invalidatedRaw.isEmpty)
|
||||
previous
|
||||
else {
|
||||
def debug(s: => String) = if (incDebug(options)) log.debug(s) else ()
|
||||
val withPackageObjects = invalidatedRaw ++ invalidatedPackageObjects(invalidatedRaw, previous.relations)
|
||||
val invalidated = expand(withPackageObjects, allSources)
|
||||
val pruned = Incremental.prune(invalidated, previous, classfileManager)
|
||||
debug("********* Pruned: \n" + pruned.relations + "\n*********")
|
||||
|
||||
val fresh = doCompile(invalidated, binaryChanges)
|
||||
classfileManager.generated(fresh.relations.allProducts)
|
||||
debug("********* Fresh: \n" + fresh.relations + "\n*********")
|
||||
val merged = pruned ++ fresh//.copy(relations = pruned.relations ++ fresh.relations, apis = pruned.apis ++ fresh.apis)
|
||||
debug("********* Merged: \n" + merged.relations + "\n*********")
|
||||
val fresh = doCompile(invalidated, binaryChanges)
|
||||
classfileManager.generated(fresh.relations.allProducts)
|
||||
debug("********* Fresh: \n" + fresh.relations + "\n*********")
|
||||
val merged = pruned ++ fresh //.copy(relations = pruned.relations ++ fresh.relations, apis = pruned.apis ++ fresh.apis)
|
||||
debug("********* Merged: \n" + merged.relations + "\n*********")
|
||||
|
||||
val incChanges = changedIncremental(invalidated, previous.apis.internalAPI _, merged.apis.internalAPI _)
|
||||
debug("\nChanges:\n" + incChanges)
|
||||
val transitiveStep = options.transitiveStep
|
||||
val incInv = invalidateIncremental(merged.relations, merged.apis, incChanges, invalidated, cycleNum >= transitiveStep)
|
||||
cycle(incInv, allSources, emptyChanges, merged, doCompile, classfileManager, cycleNum+1)
|
||||
}
|
||||
private[this] def emptyChanges: DependencyChanges = new DependencyChanges {
|
||||
val modifiedBinaries = new Array[File](0)
|
||||
val modifiedClasses = new Array[String](0)
|
||||
def isEmpty = true
|
||||
}
|
||||
private[this] def expand(invalidated: Set[File], all: Set[File]): Set[File] = {
|
||||
val recompileAllFraction = options.recompileAllFraction
|
||||
if(invalidated.size > all.size * recompileAllFraction) {
|
||||
log.debug("Recompiling all " + all.size + " sources: invalidated sources (" + invalidated.size + ") exceeded " + (recompileAllFraction*100.0) + "% of all sources")
|
||||
all ++ invalidated // need the union because all doesn't contain removed sources
|
||||
}
|
||||
else invalidated
|
||||
}
|
||||
|
||||
protected def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File]
|
||||
|
||||
/**
|
||||
* Logs API changes using debug-level logging. The API are obtained using the APIDiff class.
|
||||
*
|
||||
* NOTE: This method creates a new APIDiff instance on every invocation.
|
||||
*/
|
||||
private def logApiChanges[T](apiChanges: Iterable[APIChange[T]], oldAPIMapping: T => Source,
|
||||
newAPIMapping: T => Source): Unit = {
|
||||
val contextSize = options.apiDiffContextSize
|
||||
try {
|
||||
val apiDiff = new APIDiff
|
||||
apiChanges foreach {
|
||||
case APIChangeDueToMacroDefinition(src) =>
|
||||
log.debug(s"Public API is considered to be changed because $src contains a macro definition.")
|
||||
case apiChange@(_: SourceAPIChange[T] | _: NamesChange[T]) =>
|
||||
val src = apiChange.modified
|
||||
val oldApi = oldAPIMapping(src)
|
||||
val newApi = newAPIMapping(src)
|
||||
val apiUnifiedPatch = apiDiff.generateApiDiff(src.toString, oldApi.api, newApi.api, contextSize)
|
||||
log.debug(s"Detected a change in a public API (${src.toString}):\n"
|
||||
+ apiUnifiedPatch)
|
||||
}
|
||||
} catch {
|
||||
case e: ClassNotFoundException =>
|
||||
log.error("You have api debugging enabled but DiffUtils library cannot be found on sbt's classpath")
|
||||
case e: LinkageError =>
|
||||
log.error("Encoutared linkage error while trying to load DiffUtils library.")
|
||||
log.trace(e)
|
||||
case e: Exception =>
|
||||
log.error("An exception has been thrown while trying to dump an api diff.")
|
||||
log.trace(e)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Accepts the sources that were recompiled during the last step and functions
|
||||
* providing the API before and after the last step. The functions should return
|
||||
* an empty API if the file did not/does not exist.
|
||||
*/
|
||||
def changedIncremental[T](lastSources: collection.Set[T], oldAPI: T => Source, newAPI: T => Source): APIChanges[T] =
|
||||
{
|
||||
val oldApis = lastSources.toSeq map oldAPI
|
||||
val newApis = lastSources.toSeq map newAPI
|
||||
val apiChanges = (lastSources, oldApis, newApis).zipped.flatMap { (src, oldApi, newApi) => sameSource(src, oldApi, newApi) }
|
||||
|
||||
if (Incremental.apiDebug(options) && apiChanges.nonEmpty) {
|
||||
logApiChanges(apiChanges, oldAPI, newAPI)
|
||||
}
|
||||
|
||||
new APIChanges(apiChanges)
|
||||
}
|
||||
def sameSource[T](src: T, a: Source, b: Source): Option[APIChange[T]] = {
|
||||
// Clients of a modified source file (ie, one that doesn't satisfy `shortcutSameSource`) containing macros must be recompiled.
|
||||
val hasMacro = a.hasMacro || b.hasMacro
|
||||
if (shortcutSameSource(a, b)) {
|
||||
None
|
||||
} else {
|
||||
if (hasMacro && options.recompileOnMacroDef) {
|
||||
Some(APIChangeDueToMacroDefinition(src))
|
||||
} else sameAPI(src, a, b)
|
||||
}
|
||||
}
|
||||
|
||||
protected def sameAPI[T](src: T, a: Source, b: Source): Option[APIChange[T]]
|
||||
|
||||
def shortcutSameSource(a: Source, b: Source): Boolean = !a.hash.isEmpty && !b.hash.isEmpty && sameCompilation(a.compilation, b.compilation) && (a.hash.deep equals b.hash.deep)
|
||||
def sameCompilation(a: Compilation, b: Compilation): Boolean = a.startTime == b.startTime && a.outputs.corresponds(b.outputs){
|
||||
case (co1, co2) => co1.sourceDirectory == co2.sourceDirectory && co1.outputDirectory == co2.outputDirectory
|
||||
val incChanges = changedIncremental(invalidated, previous.apis.internalAPI _, merged.apis.internalAPI _)
|
||||
debug("\nChanges:\n" + incChanges)
|
||||
val transitiveStep = options.transitiveStep
|
||||
val incInv = invalidateIncremental(merged.relations, merged.apis, incChanges, invalidated, cycleNum >= transitiveStep)
|
||||
cycle(incInv, allSources, emptyChanges, merged, doCompile, classfileManager, cycleNum + 1)
|
||||
}
|
||||
private[this] def emptyChanges: DependencyChanges = new DependencyChanges {
|
||||
val modifiedBinaries = new Array[File](0)
|
||||
val modifiedClasses = new Array[String](0)
|
||||
def isEmpty = true
|
||||
}
|
||||
private[this] def expand(invalidated: Set[File], all: Set[File]): Set[File] = {
|
||||
val recompileAllFraction = options.recompileAllFraction
|
||||
if (invalidated.size > all.size * recompileAllFraction) {
|
||||
log.debug("Recompiling all " + all.size + " sources: invalidated sources (" + invalidated.size + ") exceeded " + (recompileAllFraction * 100.0) + "% of all sources")
|
||||
all ++ invalidated // need the union because all doesn't contain removed sources
|
||||
} else invalidated
|
||||
}
|
||||
|
||||
def changedInitial(entry: String => Option[File], sources: Set[File], previousAnalysis: Analysis, current: ReadStamps,
|
||||
forEntry: File => Option[Analysis])(implicit equivS: Equiv[Stamp]): InitialChanges =
|
||||
{
|
||||
val previous = previousAnalysis.stamps
|
||||
val previousAPIs = previousAnalysis.apis
|
||||
protected def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File]
|
||||
|
||||
val srcChanges = changes(previous.allInternalSources.toSet, sources, f => !equivS.equiv( previous.internalSource(f), current.internalSource(f) ) )
|
||||
val removedProducts = previous.allProducts.filter( p => !equivS.equiv( previous.product(p), current.product(p) ) ).toSet
|
||||
val binaryDepChanges = previous.allBinaries.filter( externalBinaryModified(entry, forEntry, previous, current)).toSet
|
||||
val extChanges = changedIncremental(previousAPIs.allExternals, previousAPIs.externalAPI _, currentExternalAPI(entry, forEntry))
|
||||
/**
|
||||
* Logs API changes using debug-level logging. The API are obtained using the APIDiff class.
|
||||
*
|
||||
* NOTE: This method creates a new APIDiff instance on every invocation.
|
||||
*/
|
||||
private def logApiChanges[T](apiChanges: Iterable[APIChange[T]], oldAPIMapping: T => Source,
|
||||
newAPIMapping: T => Source): Unit = {
|
||||
val contextSize = options.apiDiffContextSize
|
||||
try {
|
||||
val apiDiff = new APIDiff
|
||||
apiChanges foreach {
|
||||
case APIChangeDueToMacroDefinition(src) =>
|
||||
log.debug(s"Public API is considered to be changed because $src contains a macro definition.")
|
||||
case apiChange @ (_: SourceAPIChange[T] | _: NamesChange[T]) =>
|
||||
val src = apiChange.modified
|
||||
val oldApi = oldAPIMapping(src)
|
||||
val newApi = newAPIMapping(src)
|
||||
val apiUnifiedPatch = apiDiff.generateApiDiff(src.toString, oldApi.api, newApi.api, contextSize)
|
||||
log.debug(s"Detected a change in a public API (${src.toString}):\n"
|
||||
+ apiUnifiedPatch)
|
||||
}
|
||||
} catch {
|
||||
case e: ClassNotFoundException =>
|
||||
log.error("You have api debugging enabled but DiffUtils library cannot be found on sbt's classpath")
|
||||
case e: LinkageError =>
|
||||
log.error("Encoutared linkage error while trying to load DiffUtils library.")
|
||||
log.trace(e)
|
||||
case e: Exception =>
|
||||
log.error("An exception has been thrown while trying to dump an api diff.")
|
||||
log.trace(e)
|
||||
}
|
||||
}
|
||||
|
||||
InitialChanges(srcChanges, removedProducts, binaryDepChanges, extChanges )
|
||||
}
|
||||
/**
|
||||
* Accepts the sources that were recompiled during the last step and functions
|
||||
* providing the API before and after the last step. The functions should return
|
||||
* an empty API if the file did not/does not exist.
|
||||
*/
|
||||
def changedIncremental[T](lastSources: collection.Set[T], oldAPI: T => Source, newAPI: T => Source): APIChanges[T] =
|
||||
{
|
||||
val oldApis = lastSources.toSeq map oldAPI
|
||||
val newApis = lastSources.toSeq map newAPI
|
||||
val apiChanges = (lastSources, oldApis, newApis).zipped.flatMap { (src, oldApi, newApi) => sameSource(src, oldApi, newApi) }
|
||||
|
||||
def changes(previous: Set[File], current: Set[File], existingModified: File => Boolean): Changes[File] =
|
||||
new Changes[File]
|
||||
{
|
||||
private val inBoth = previous & current
|
||||
val removed = previous -- inBoth
|
||||
val added = current -- inBoth
|
||||
val (changed, unmodified) = inBoth.partition(existingModified)
|
||||
}
|
||||
if (Incremental.apiDebug(options) && apiChanges.nonEmpty) {
|
||||
logApiChanges(apiChanges, oldAPI, newAPI)
|
||||
}
|
||||
|
||||
def invalidateIncremental(previous: Relations, apis: APIs, changes: APIChanges[File], recompiledSources: Set[File], transitive: Boolean): Set[File] =
|
||||
{
|
||||
val dependsOnSrc = previous.usesInternalSrc _
|
||||
val propagated =
|
||||
if(transitive)
|
||||
transitiveDependencies(dependsOnSrc, changes.allModified.toSet)
|
||||
else
|
||||
invalidateIntermediate(previous, changes)
|
||||
new APIChanges(apiChanges)
|
||||
}
|
||||
def sameSource[T](src: T, a: Source, b: Source): Option[APIChange[T]] = {
|
||||
// Clients of a modified source file (ie, one that doesn't satisfy `shortcutSameSource`) containing macros must be recompiled.
|
||||
val hasMacro = a.hasMacro || b.hasMacro
|
||||
if (shortcutSameSource(a, b)) {
|
||||
None
|
||||
} else {
|
||||
if (hasMacro && options.recompileOnMacroDef) {
|
||||
Some(APIChangeDueToMacroDefinition(src))
|
||||
} else sameAPI(src, a, b)
|
||||
}
|
||||
}
|
||||
|
||||
val dups = invalidateDuplicates(previous)
|
||||
if(dups.nonEmpty)
|
||||
log.debug("Invalidated due to generated class file collision: " + dups)
|
||||
protected def sameAPI[T](src: T, a: Source, b: Source): Option[APIChange[T]]
|
||||
|
||||
val inv = propagated ++ dups // ++ scopeInvalidations(previous.extAPI _, changes.modified, changes.names)
|
||||
val newlyInvalidated = inv -- recompiledSources
|
||||
log.debug("All newly invalidated sources after taking into account (previously) recompiled sources:" + newlyInvalidated)
|
||||
if(newlyInvalidated.isEmpty) Set.empty else inv
|
||||
}
|
||||
def shortcutSameSource(a: Source, b: Source): Boolean = !a.hash.isEmpty && !b.hash.isEmpty && sameCompilation(a.compilation, b.compilation) && (a.hash.deep equals b.hash.deep)
|
||||
def sameCompilation(a: Compilation, b: Compilation): Boolean = a.startTime == b.startTime && a.outputs.corresponds(b.outputs) {
|
||||
case (co1, co2) => co1.sourceDirectory == co2.sourceDirectory && co1.outputDirectory == co2.outputDirectory
|
||||
}
|
||||
|
||||
/** Invalidate all sources that claim to produce the same class file as another source file. */
|
||||
def invalidateDuplicates(merged: Relations): Set[File] =
|
||||
merged.srcProd.reverseMap.flatMap { case (classFile, sources) =>
|
||||
if(sources.size > 1) sources else Nil
|
||||
} toSet;
|
||||
def changedInitial(entry: String => Option[File], sources: Set[File], previousAnalysis: Analysis, current: ReadStamps,
|
||||
forEntry: File => Option[Analysis])(implicit equivS: Equiv[Stamp]): InitialChanges =
|
||||
{
|
||||
val previous = previousAnalysis.stamps
|
||||
val previousAPIs = previousAnalysis.apis
|
||||
|
||||
/** Returns the transitive source dependencies of `initial`.
|
||||
* Because the intermediate steps do not pull in cycles, this result includes the initial files
|
||||
* if they are part of a cycle containing newly invalidated files . */
|
||||
def transitiveDependencies(dependsOnSrc: File => Set[File], initial: Set[File]): Set[File] =
|
||||
{
|
||||
val transitiveWithInitial = transitiveDeps(initial)(dependsOnSrc)
|
||||
val transitivePartial = includeInitialCond(initial, transitiveWithInitial, dependsOnSrc)
|
||||
log.debug("Final step, transitive dependencies:\n\t" + transitivePartial)
|
||||
transitivePartial
|
||||
}
|
||||
val srcChanges = changes(previous.allInternalSources.toSet, sources, f => !equivS.equiv(previous.internalSource(f), current.internalSource(f)))
|
||||
val removedProducts = previous.allProducts.filter(p => !equivS.equiv(previous.product(p), current.product(p))).toSet
|
||||
val binaryDepChanges = previous.allBinaries.filter(externalBinaryModified(entry, forEntry, previous, current)).toSet
|
||||
val extChanges = changedIncremental(previousAPIs.allExternals, previousAPIs.externalAPI _, currentExternalAPI(entry, forEntry))
|
||||
|
||||
/** Invalidates sources based on initially detected 'changes' to the sources, products, and dependencies.*/
|
||||
def invalidateInitial(previous: Relations, changes: InitialChanges): Set[File] =
|
||||
{
|
||||
val srcChanges = changes.internalSrc
|
||||
val srcDirect = srcChanges.removed ++ srcChanges.removed.flatMap(previous.usesInternalSrc) ++ srcChanges.added ++ srcChanges.changed
|
||||
val byProduct = changes.removedProducts.flatMap(previous.produced)
|
||||
val byBinaryDep = changes.binaryDeps.flatMap(previous.usesBinary)
|
||||
val byExtSrcDep = invalidateByAllExternal(previous, changes.external) //changes.external.modified.flatMap(previous.usesExternal) // ++ scopeInvalidations
|
||||
checkAbsolute(srcChanges.added.toList)
|
||||
log.debug(
|
||||
"\nInitial source changes: \n\tremoved:" + srcChanges.removed + "\n\tadded: " + srcChanges.added + "\n\tmodified: " + srcChanges.changed +
|
||||
"\nRemoved products: " + changes.removedProducts +
|
||||
"\nExternal API changes: " + changes.external +
|
||||
"\nModified binary dependencies: " + changes.binaryDeps +
|
||||
"\nInitial directly invalidated sources: " + srcDirect +
|
||||
"\n\nSources indirectly invalidated by:" +
|
||||
"\n\tproduct: " + byProduct +
|
||||
"\n\tbinary dep: " + byBinaryDep +
|
||||
"\n\texternal source: " + byExtSrcDep
|
||||
)
|
||||
InitialChanges(srcChanges, removedProducts, binaryDepChanges, extChanges)
|
||||
}
|
||||
|
||||
srcDirect ++ byProduct ++ byBinaryDep ++ byExtSrcDep
|
||||
}
|
||||
private[this] def checkAbsolute(addedSources: List[File]): Unit =
|
||||
if(addedSources.nonEmpty) {
|
||||
addedSources.filterNot(_.isAbsolute) match {
|
||||
case first :: more =>
|
||||
val fileStrings = more match {
|
||||
case Nil => first.toString
|
||||
case x :: Nil => s"$first and $x"
|
||||
case _ => s"$first and ${more.size} others"
|
||||
}
|
||||
sys.error(s"The incremental compiler requires absolute sources, but some were relative: $fileStrings")
|
||||
case Nil =>
|
||||
}
|
||||
}
|
||||
def changes(previous: Set[File], current: Set[File], existingModified: File => Boolean): Changes[File] =
|
||||
new Changes[File] {
|
||||
private val inBoth = previous & current
|
||||
val removed = previous -- inBoth
|
||||
val added = current -- inBoth
|
||||
val (changed, unmodified) = inBoth.partition(existingModified)
|
||||
}
|
||||
|
||||
def invalidateByAllExternal(relations: Relations, externalAPIChanges: APIChanges[String]): Set[File] = {
|
||||
(externalAPIChanges.apiChanges.flatMap { externalAPIChange =>
|
||||
invalidateByExternal(relations, externalAPIChange)
|
||||
}).toSet
|
||||
}
|
||||
def invalidateIncremental(previous: Relations, apis: APIs, changes: APIChanges[File], recompiledSources: Set[File], transitive: Boolean): Set[File] =
|
||||
{
|
||||
val dependsOnSrc = previous.usesInternalSrc _
|
||||
val propagated =
|
||||
if (transitive)
|
||||
transitiveDependencies(dependsOnSrc, changes.allModified.toSet)
|
||||
else
|
||||
invalidateIntermediate(previous, changes)
|
||||
|
||||
/** Sources invalidated by `external` sources in other projects according to the previous `relations`. */
|
||||
protected def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File]
|
||||
val dups = invalidateDuplicates(previous)
|
||||
if (dups.nonEmpty)
|
||||
log.debug("Invalidated due to generated class file collision: " + dups)
|
||||
|
||||
/** Intermediate invalidation step: steps after the initial invalidation, but before the final transitive invalidation. */
|
||||
def invalidateIntermediate(relations: Relations, changes: APIChanges[File]): Set[File] =
|
||||
{
|
||||
invalidateSources(relations, changes)
|
||||
}
|
||||
/** Invalidates inheritance dependencies, transitively. Then, invalidates direct dependencies. Finally, excludes initial dependencies not
|
||||
* included in a cycle with newly invalidated sources. */
|
||||
private[this] def invalidateSources(relations: Relations, changes: APIChanges[File]): Set[File] =
|
||||
{
|
||||
val initial = changes.allModified.toSet
|
||||
val all = (changes.apiChanges flatMap { change =>
|
||||
invalidateSource(relations, change)
|
||||
}).toSet
|
||||
includeInitialCond(initial, all, allDeps(relations))
|
||||
}
|
||||
val inv = propagated ++ dups // ++ scopeInvalidations(previous.extAPI _, changes.modified, changes.names)
|
||||
val newlyInvalidated = inv -- recompiledSources
|
||||
log.debug("All newly invalidated sources after taking into account (previously) recompiled sources:" + newlyInvalidated)
|
||||
if (newlyInvalidated.isEmpty) Set.empty else inv
|
||||
}
|
||||
|
||||
protected def allDeps(relations: Relations): File => Set[File]
|
||||
/** Invalidate all sources that claim to produce the same class file as another source file. */
|
||||
def invalidateDuplicates(merged: Relations): Set[File] =
|
||||
merged.srcProd.reverseMap.flatMap {
|
||||
case (classFile, sources) =>
|
||||
if (sources.size > 1) sources else Nil
|
||||
} toSet;
|
||||
|
||||
protected def invalidateSource(relations: Relations, change: APIChange[File]): Set[File]
|
||||
/**
|
||||
* Returns the transitive source dependencies of `initial`.
|
||||
* Because the intermediate steps do not pull in cycles, this result includes the initial files
|
||||
* if they are part of a cycle containing newly invalidated files .
|
||||
*/
|
||||
def transitiveDependencies(dependsOnSrc: File => Set[File], initial: Set[File]): Set[File] =
|
||||
{
|
||||
val transitiveWithInitial = transitiveDeps(initial)(dependsOnSrc)
|
||||
val transitivePartial = includeInitialCond(initial, transitiveWithInitial, dependsOnSrc)
|
||||
log.debug("Final step, transitive dependencies:\n\t" + transitivePartial)
|
||||
transitivePartial
|
||||
}
|
||||
|
||||
/** Conditionally include initial sources that are dependencies of newly invalidated sources.
|
||||
** Initial sources included in this step can be because of a cycle, but not always. */
|
||||
private[this] def includeInitialCond(initial: Set[File], currentInvalidations: Set[File], allDeps: File => Set[File]): Set[File] =
|
||||
{
|
||||
val newInv = currentInvalidations -- initial
|
||||
log.debug("New invalidations:\n\t" + newInv)
|
||||
val transitiveOfNew = transitiveDeps(newInv)(allDeps)
|
||||
val initialDependsOnNew = transitiveOfNew & initial
|
||||
log.debug("Previously invalidated, but (transitively) depend on new invalidations:\n\t" + initialDependsOnNew)
|
||||
newInv ++ initialDependsOnNew
|
||||
}
|
||||
/** Invalidates sources based on initially detected 'changes' to the sources, products, and dependencies.*/
|
||||
def invalidateInitial(previous: Relations, changes: InitialChanges): Set[File] =
|
||||
{
|
||||
val srcChanges = changes.internalSrc
|
||||
val srcDirect = srcChanges.removed ++ srcChanges.removed.flatMap(previous.usesInternalSrc) ++ srcChanges.added ++ srcChanges.changed
|
||||
val byProduct = changes.removedProducts.flatMap(previous.produced)
|
||||
val byBinaryDep = changes.binaryDeps.flatMap(previous.usesBinary)
|
||||
val byExtSrcDep = invalidateByAllExternal(previous, changes.external) //changes.external.modified.flatMap(previous.usesExternal) // ++ scopeInvalidations
|
||||
checkAbsolute(srcChanges.added.toList)
|
||||
log.debug(
|
||||
"\nInitial source changes: \n\tremoved:" + srcChanges.removed + "\n\tadded: " + srcChanges.added + "\n\tmodified: " + srcChanges.changed +
|
||||
"\nRemoved products: " + changes.removedProducts +
|
||||
"\nExternal API changes: " + changes.external +
|
||||
"\nModified binary dependencies: " + changes.binaryDeps +
|
||||
"\nInitial directly invalidated sources: " + srcDirect +
|
||||
"\n\nSources indirectly invalidated by:" +
|
||||
"\n\tproduct: " + byProduct +
|
||||
"\n\tbinary dep: " + byBinaryDep +
|
||||
"\n\texternal source: " + byExtSrcDep
|
||||
)
|
||||
|
||||
def externalBinaryModified(entry: String => Option[File], analysis: File => Option[Analysis], previous: Stamps, current: ReadStamps)(implicit equivS: Equiv[Stamp]): File => Boolean =
|
||||
dependsOn =>
|
||||
{
|
||||
def inv(reason: String): Boolean = {
|
||||
log.debug("Invalidating " + dependsOn + ": " + reason)
|
||||
true
|
||||
}
|
||||
def entryModified(className: String, classpathEntry: File): Boolean =
|
||||
{
|
||||
val resolved = Locate.resolve(classpathEntry, className)
|
||||
if(resolved.getCanonicalPath != dependsOn.getCanonicalPath)
|
||||
inv("class " + className + " now provided by " + resolved.getCanonicalPath)
|
||||
else
|
||||
fileModified(dependsOn, resolved)
|
||||
}
|
||||
def fileModified(previousFile: File, currentFile: File): Boolean =
|
||||
{
|
||||
val previousStamp = previous.binary(previousFile)
|
||||
val currentStamp = current.binary(currentFile)
|
||||
if(equivS.equiv(previousStamp, currentStamp))
|
||||
false
|
||||
else
|
||||
inv("stamp changed from " + previousStamp + " to " + currentStamp)
|
||||
}
|
||||
def dependencyModified(file: File): Boolean =
|
||||
previous.className(file) match {
|
||||
case None => inv("no class name was mapped for it.")
|
||||
case Some(name) => entry(name) match {
|
||||
case None => inv("could not find class " + name + " on the classpath.")
|
||||
case Some(e) => entryModified(name, e)
|
||||
}
|
||||
}
|
||||
srcDirect ++ byProduct ++ byBinaryDep ++ byExtSrcDep
|
||||
}
|
||||
private[this] def checkAbsolute(addedSources: List[File]): Unit =
|
||||
if (addedSources.nonEmpty) {
|
||||
addedSources.filterNot(_.isAbsolute) match {
|
||||
case first :: more =>
|
||||
val fileStrings = more match {
|
||||
case Nil => first.toString
|
||||
case x :: Nil => s"$first and $x"
|
||||
case _ => s"$first and ${more.size} others"
|
||||
}
|
||||
sys.error(s"The incremental compiler requires absolute sources, but some were relative: $fileStrings")
|
||||
case Nil =>
|
||||
}
|
||||
}
|
||||
|
||||
analysis(dependsOn).isEmpty &&
|
||||
(if(skipClasspathLookup) fileModified(dependsOn, dependsOn) else dependencyModified(dependsOn))
|
||||
def invalidateByAllExternal(relations: Relations, externalAPIChanges: APIChanges[String]): Set[File] = {
|
||||
(externalAPIChanges.apiChanges.flatMap { externalAPIChange =>
|
||||
invalidateByExternal(relations, externalAPIChange)
|
||||
}).toSet
|
||||
}
|
||||
|
||||
}
|
||||
/** Sources invalidated by `external` sources in other projects according to the previous `relations`. */
|
||||
protected def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File]
|
||||
|
||||
def currentExternalAPI(entry: String => Option[File], forEntry: File => Option[Analysis]): String => Source =
|
||||
className =>
|
||||
orEmpty(
|
||||
for {
|
||||
e <- entry(className)
|
||||
analysis <- forEntry(e)
|
||||
src <- analysis.relations.definesClass(className).headOption
|
||||
} yield
|
||||
analysis.apis.internalAPI(src)
|
||||
)
|
||||
/** Intermediate invalidation step: steps after the initial invalidation, but before the final transitive invalidation. */
|
||||
def invalidateIntermediate(relations: Relations, changes: APIChanges[File]): Set[File] =
|
||||
{
|
||||
invalidateSources(relations, changes)
|
||||
}
|
||||
/**
|
||||
* Invalidates inheritance dependencies, transitively. Then, invalidates direct dependencies. Finally, excludes initial dependencies not
|
||||
* included in a cycle with newly invalidated sources.
|
||||
*/
|
||||
private[this] def invalidateSources(relations: Relations, changes: APIChanges[File]): Set[File] =
|
||||
{
|
||||
val initial = changes.allModified.toSet
|
||||
val all = (changes.apiChanges flatMap { change =>
|
||||
invalidateSource(relations, change)
|
||||
}).toSet
|
||||
includeInitialCond(initial, all, allDeps(relations))
|
||||
}
|
||||
|
||||
def orEmpty(o: Option[Source]): Source = o getOrElse APIs.emptySource
|
||||
def orTrue(o: Option[Boolean]): Boolean = o getOrElse true
|
||||
protected def allDeps(relations: Relations): File => Set[File]
|
||||
|
||||
protected def transitiveDeps[T](nodes: Iterable[T])(dependencies: T => Iterable[T]): Set[T] =
|
||||
{
|
||||
val xs = new collection.mutable.HashSet[T]
|
||||
def all(from: T, tos: Iterable[T]): Unit = tos.foreach(to => visit(from, to))
|
||||
def visit(from: T, to: T): Unit =
|
||||
if (!xs.contains(to)) {
|
||||
log.debug(s"Including $to by $from")
|
||||
xs += to
|
||||
all(to, dependencies(to))
|
||||
}
|
||||
log.debug("Initial set of included nodes: " + nodes)
|
||||
nodes foreach { start =>
|
||||
xs += start
|
||||
all(start, dependencies(start))
|
||||
}
|
||||
xs.toSet
|
||||
}
|
||||
protected def invalidateSource(relations: Relations, change: APIChange[File]): Set[File]
|
||||
|
||||
/**
|
||||
* Conditionally include initial sources that are dependencies of newly invalidated sources.
|
||||
* * Initial sources included in this step can be because of a cycle, but not always.
|
||||
*/
|
||||
private[this] def includeInitialCond(initial: Set[File], currentInvalidations: Set[File], allDeps: File => Set[File]): Set[File] =
|
||||
{
|
||||
val newInv = currentInvalidations -- initial
|
||||
log.debug("New invalidations:\n\t" + newInv)
|
||||
val transitiveOfNew = transitiveDeps(newInv)(allDeps)
|
||||
val initialDependsOnNew = transitiveOfNew & initial
|
||||
log.debug("Previously invalidated, but (transitively) depend on new invalidations:\n\t" + initialDependsOnNew)
|
||||
newInv ++ initialDependsOnNew
|
||||
}
|
||||
|
||||
// unmodifiedSources should not contain any sources in the previous compilation run
|
||||
// (this may unnecessarily invalidate them otherwise)
|
||||
/*def scopeInvalidation(previous: Analysis, otherSources: Set[File], names: NameChanges): Set[File] =
|
||||
def externalBinaryModified(entry: String => Option[File], analysis: File => Option[Analysis], previous: Stamps, current: ReadStamps)(implicit equivS: Equiv[Stamp]): File => Boolean =
|
||||
dependsOn =>
|
||||
{
|
||||
def inv(reason: String): Boolean = {
|
||||
log.debug("Invalidating " + dependsOn + ": " + reason)
|
||||
true
|
||||
}
|
||||
def entryModified(className: String, classpathEntry: File): Boolean =
|
||||
{
|
||||
val resolved = Locate.resolve(classpathEntry, className)
|
||||
if (resolved.getCanonicalPath != dependsOn.getCanonicalPath)
|
||||
inv("class " + className + " now provided by " + resolved.getCanonicalPath)
|
||||
else
|
||||
fileModified(dependsOn, resolved)
|
||||
}
|
||||
def fileModified(previousFile: File, currentFile: File): Boolean =
|
||||
{
|
||||
val previousStamp = previous.binary(previousFile)
|
||||
val currentStamp = current.binary(currentFile)
|
||||
if (equivS.equiv(previousStamp, currentStamp))
|
||||
false
|
||||
else
|
||||
inv("stamp changed from " + previousStamp + " to " + currentStamp)
|
||||
}
|
||||
def dependencyModified(file: File): Boolean =
|
||||
previous.className(file) match {
|
||||
case None => inv("no class name was mapped for it.")
|
||||
case Some(name) => entry(name) match {
|
||||
case None => inv("could not find class " + name + " on the classpath.")
|
||||
case Some(e) => entryModified(name, e)
|
||||
}
|
||||
}
|
||||
|
||||
analysis(dependsOn).isEmpty &&
|
||||
(if (skipClasspathLookup) fileModified(dependsOn, dependsOn) else dependencyModified(dependsOn))
|
||||
|
||||
}
|
||||
|
||||
def currentExternalAPI(entry: String => Option[File], forEntry: File => Option[Analysis]): String => Source =
|
||||
className =>
|
||||
orEmpty(
|
||||
for {
|
||||
e <- entry(className)
|
||||
analysis <- forEntry(e)
|
||||
src <- analysis.relations.definesClass(className).headOption
|
||||
} yield analysis.apis.internalAPI(src)
|
||||
)
|
||||
|
||||
def orEmpty(o: Option[Source]): Source = o getOrElse APIs.emptySource
|
||||
def orTrue(o: Option[Boolean]): Boolean = o getOrElse true
|
||||
|
||||
protected def transitiveDeps[T](nodes: Iterable[T])(dependencies: T => Iterable[T]): Set[T] =
|
||||
{
|
||||
val xs = new collection.mutable.HashSet[T]
|
||||
def all(from: T, tos: Iterable[T]): Unit = tos.foreach(to => visit(from, to))
|
||||
def visit(from: T, to: T): Unit =
|
||||
if (!xs.contains(to)) {
|
||||
log.debug(s"Including $to by $from")
|
||||
xs += to
|
||||
all(to, dependencies(to))
|
||||
}
|
||||
log.debug("Initial set of included nodes: " + nodes)
|
||||
nodes foreach { start =>
|
||||
xs += start
|
||||
all(start, dependencies(start))
|
||||
}
|
||||
xs.toSet
|
||||
}
|
||||
|
||||
// unmodifiedSources should not contain any sources in the previous compilation run
|
||||
// (this may unnecessarily invalidate them otherwise)
|
||||
/*def scopeInvalidation(previous: Analysis, otherSources: Set[File], names: NameChanges): Set[File] =
|
||||
{
|
||||
val newNames = newTypes ++ names.newTerms
|
||||
val newMap = pkgNameMap(newNames)
|
||||
|
|
@ -445,51 +446,51 @@ private abstract class IncrementalCommon(log: Logger, options: IncOptions) {
|
|||
|
||||
private final class IncrementalDefaultImpl(log: Logger, options: IncOptions) extends IncrementalCommon(log, options) {
|
||||
|
||||
// Package objects are fragile: if they inherit from an invalidated source, get "class file needed by package is missing" error
|
||||
// This might be too conservative: we probably only need package objects for packages of invalidated sources.
|
||||
override protected def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File] =
|
||||
invalidated flatMap relations.publicInherited.internal.reverse filter { _.getName == "package.scala" }
|
||||
// Package objects are fragile: if they inherit from an invalidated source, get "class file needed by package is missing" error
|
||||
// This might be too conservative: we probably only need package objects for packages of invalidated sources.
|
||||
override protected def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File] =
|
||||
invalidated flatMap relations.publicInherited.internal.reverse filter { _.getName == "package.scala" }
|
||||
|
||||
override protected def sameAPI[T](src: T, a: Source, b: Source): Option[SourceAPIChange[T]] = {
|
||||
if (SameAPI(a,b))
|
||||
None
|
||||
else {
|
||||
val sourceApiChange = SourceAPIChange(src)
|
||||
Some(sourceApiChange)
|
||||
}
|
||||
}
|
||||
override protected def sameAPI[T](src: T, a: Source, b: Source): Option[SourceAPIChange[T]] = {
|
||||
if (SameAPI(a, b))
|
||||
None
|
||||
else {
|
||||
val sourceApiChange = SourceAPIChange(src)
|
||||
Some(sourceApiChange)
|
||||
}
|
||||
}
|
||||
|
||||
/** Invalidates sources based on initially detected 'changes' to the sources, products, and dependencies.*/
|
||||
override protected def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File] = {
|
||||
val modified = externalAPIChange.modified
|
||||
// Propagate public inheritance dependencies transitively.
|
||||
// This differs from normal because we need the initial crossing from externals to sources in this project.
|
||||
val externalInheritedR = relations.publicInherited.external
|
||||
val byExternalInherited = externalInheritedR.reverse(modified)
|
||||
val internalInheritedR = relations.publicInherited.internal
|
||||
val transitiveInherited = transitiveDeps(byExternalInherited)(internalInheritedR.reverse _)
|
||||
/** Invalidates sources based on initially detected 'changes' to the sources, products, and dependencies.*/
|
||||
override protected def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File] = {
|
||||
val modified = externalAPIChange.modified
|
||||
// Propagate public inheritance dependencies transitively.
|
||||
// This differs from normal because we need the initial crossing from externals to sources in this project.
|
||||
val externalInheritedR = relations.publicInherited.external
|
||||
val byExternalInherited = externalInheritedR.reverse(modified)
|
||||
val internalInheritedR = relations.publicInherited.internal
|
||||
val transitiveInherited = transitiveDeps(byExternalInherited)(internalInheritedR.reverse _)
|
||||
|
||||
// Get the direct dependencies of all sources transitively invalidated by inheritance
|
||||
val directA = transitiveInherited flatMap relations.direct.internal.reverse
|
||||
// Get the sources that directly depend on externals. This includes non-inheritance dependencies and is not transitive.
|
||||
val directB = relations.direct.external.reverse(modified)
|
||||
transitiveInherited ++ directA ++ directB
|
||||
}
|
||||
// Get the direct dependencies of all sources transitively invalidated by inheritance
|
||||
val directA = transitiveInherited flatMap relations.direct.internal.reverse
|
||||
// Get the sources that directly depend on externals. This includes non-inheritance dependencies and is not transitive.
|
||||
val directB = relations.direct.external.reverse(modified)
|
||||
transitiveInherited ++ directA ++ directB
|
||||
}
|
||||
|
||||
override protected def invalidateSource(relations: Relations, change: APIChange[File]): Set[File] = {
|
||||
def reverse(r: Relations.Source) = r.internal.reverse _
|
||||
val directDeps: File => Set[File] = reverse(relations.direct)
|
||||
val publicInherited: File => Set[File] = reverse(relations.publicInherited)
|
||||
log.debug("Invalidating by inheritance (transitively)...")
|
||||
val transitiveInherited = transitiveDeps(Set(change.modified))(publicInherited)
|
||||
log.debug("Invalidated by transitive public inheritance: " + transitiveInherited)
|
||||
val direct = transitiveInherited flatMap directDeps
|
||||
log.debug("Invalidated by direct dependency: " + direct)
|
||||
transitiveInherited ++ direct
|
||||
}
|
||||
override protected def invalidateSource(relations: Relations, change: APIChange[File]): Set[File] = {
|
||||
def reverse(r: Relations.Source) = r.internal.reverse _
|
||||
val directDeps: File => Set[File] = reverse(relations.direct)
|
||||
val publicInherited: File => Set[File] = reverse(relations.publicInherited)
|
||||
log.debug("Invalidating by inheritance (transitively)...")
|
||||
val transitiveInherited = transitiveDeps(Set(change.modified))(publicInherited)
|
||||
log.debug("Invalidated by transitive public inheritance: " + transitiveInherited)
|
||||
val direct = transitiveInherited flatMap directDeps
|
||||
log.debug("Invalidated by direct dependency: " + direct)
|
||||
transitiveInherited ++ direct
|
||||
}
|
||||
|
||||
override protected def allDeps(relations: Relations): File => Set[File] =
|
||||
f => relations.direct.internal.reverse(f)
|
||||
override protected def allDeps(relations: Relations): File => Set[File] =
|
||||
f => relations.direct.internal.reverse(f)
|
||||
|
||||
}
|
||||
|
||||
|
|
@ -501,74 +502,74 @@ private final class IncrementalDefaultImpl(log: Logger, options: IncOptions) ext
|
|||
*/
|
||||
private final class IncrementalNameHashing(log: Logger, options: IncOptions) extends IncrementalCommon(log, options) {
|
||||
|
||||
private val memberRefInvalidator = new MemberRefInvalidator(log)
|
||||
private val memberRefInvalidator = new MemberRefInvalidator(log)
|
||||
|
||||
// Package objects are fragile: if they inherit from an invalidated source, get "class file needed by package is missing" error
|
||||
// This might be too conservative: we probably only need package objects for packages of invalidated sources.
|
||||
override protected def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File] =
|
||||
invalidated flatMap relations.inheritance.internal.reverse filter { _.getName == "package.scala" }
|
||||
// Package objects are fragile: if they inherit from an invalidated source, get "class file needed by package is missing" error
|
||||
// This might be too conservative: we probably only need package objects for packages of invalidated sources.
|
||||
override protected def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File] =
|
||||
invalidated flatMap relations.inheritance.internal.reverse filter { _.getName == "package.scala" }
|
||||
|
||||
override protected def sameAPI[T](src: T, a: Source, b: Source): Option[APIChange[T]] = {
|
||||
if (SameAPI(a,b))
|
||||
None
|
||||
else {
|
||||
val aNameHashes = a._internalOnly_nameHashes
|
||||
val bNameHashes = b._internalOnly_nameHashes
|
||||
val modifiedNames = ModifiedNames.compareTwoNameHashes(aNameHashes, bNameHashes)
|
||||
val apiChange = NamesChange(src, modifiedNames)
|
||||
Some(apiChange)
|
||||
}
|
||||
}
|
||||
override protected def sameAPI[T](src: T, a: Source, b: Source): Option[APIChange[T]] = {
|
||||
if (SameAPI(a, b))
|
||||
None
|
||||
else {
|
||||
val aNameHashes = a._internalOnly_nameHashes
|
||||
val bNameHashes = b._internalOnly_nameHashes
|
||||
val modifiedNames = ModifiedNames.compareTwoNameHashes(aNameHashes, bNameHashes)
|
||||
val apiChange = NamesChange(src, modifiedNames)
|
||||
Some(apiChange)
|
||||
}
|
||||
}
|
||||
|
||||
/** Invalidates sources based on initially detected 'changes' to the sources, products, and dependencies.*/
|
||||
override protected def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File] = {
|
||||
val modified = externalAPIChange.modified
|
||||
val invalidationReason = memberRefInvalidator.invalidationReason(externalAPIChange)
|
||||
log.debug(s"$invalidationReason\nAll member reference dependencies will be considered within this context.")
|
||||
// Propagate inheritance dependencies transitively.
|
||||
// This differs from normal because we need the initial crossing from externals to sources in this project.
|
||||
val externalInheritanceR = relations.inheritance.external
|
||||
val byExternalInheritance = externalInheritanceR.reverse(modified)
|
||||
log.debug(s"Files invalidated by inheriting from (external) $modified: $byExternalInheritance; now invalidating by inheritance (internally).")
|
||||
val transitiveInheritance = byExternalInheritance flatMap { file =>
|
||||
invalidateByInheritance(relations, file)
|
||||
}
|
||||
val memberRefInvalidationInternal = memberRefInvalidator.get(relations.memberRef.internal,
|
||||
relations.names, externalAPIChange)
|
||||
val memberRefInvalidationExternal = memberRefInvalidator.get(relations.memberRef.external,
|
||||
relations.names, externalAPIChange)
|
||||
/** Invalidates sources based on initially detected 'changes' to the sources, products, and dependencies.*/
|
||||
override protected def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File] = {
|
||||
val modified = externalAPIChange.modified
|
||||
val invalidationReason = memberRefInvalidator.invalidationReason(externalAPIChange)
|
||||
log.debug(s"$invalidationReason\nAll member reference dependencies will be considered within this context.")
|
||||
// Propagate inheritance dependencies transitively.
|
||||
// This differs from normal because we need the initial crossing from externals to sources in this project.
|
||||
val externalInheritanceR = relations.inheritance.external
|
||||
val byExternalInheritance = externalInheritanceR.reverse(modified)
|
||||
log.debug(s"Files invalidated by inheriting from (external) $modified: $byExternalInheritance; now invalidating by inheritance (internally).")
|
||||
val transitiveInheritance = byExternalInheritance flatMap { file =>
|
||||
invalidateByInheritance(relations, file)
|
||||
}
|
||||
val memberRefInvalidationInternal = memberRefInvalidator.get(relations.memberRef.internal,
|
||||
relations.names, externalAPIChange)
|
||||
val memberRefInvalidationExternal = memberRefInvalidator.get(relations.memberRef.external,
|
||||
relations.names, externalAPIChange)
|
||||
|
||||
// Get the member reference dependencies of all sources transitively invalidated by inheritance
|
||||
log.debug("Getting direct dependencies of all sources transitively invalidated by inheritance.")
|
||||
val memberRefA = transitiveInheritance flatMap memberRefInvalidationInternal
|
||||
// Get the sources that depend on externals by member reference.
|
||||
// This includes non-inheritance dependencies and is not transitive.
|
||||
log.debug(s"Getting sources that directly depend on (external) $modified.")
|
||||
val memberRefB = memberRefInvalidationExternal(modified)
|
||||
transitiveInheritance ++ memberRefA ++ memberRefB
|
||||
}
|
||||
// Get the member reference dependencies of all sources transitively invalidated by inheritance
|
||||
log.debug("Getting direct dependencies of all sources transitively invalidated by inheritance.")
|
||||
val memberRefA = transitiveInheritance flatMap memberRefInvalidationInternal
|
||||
// Get the sources that depend on externals by member reference.
|
||||
// This includes non-inheritance dependencies and is not transitive.
|
||||
log.debug(s"Getting sources that directly depend on (external) $modified.")
|
||||
val memberRefB = memberRefInvalidationExternal(modified)
|
||||
transitiveInheritance ++ memberRefA ++ memberRefB
|
||||
}
|
||||
|
||||
private def invalidateByInheritance(relations: Relations, modified: File): Set[File] = {
|
||||
val inheritanceDeps = relations.inheritance.internal.reverse _
|
||||
log.debug(s"Invalidating (transitively) by inheritance from $modified...")
|
||||
val transitiveInheritance = transitiveDeps(Set(modified))(inheritanceDeps)
|
||||
log.debug("Invalidated by transitive inheritance dependency: " + transitiveInheritance)
|
||||
transitiveInheritance
|
||||
}
|
||||
private def invalidateByInheritance(relations: Relations, modified: File): Set[File] = {
|
||||
val inheritanceDeps = relations.inheritance.internal.reverse _
|
||||
log.debug(s"Invalidating (transitively) by inheritance from $modified...")
|
||||
val transitiveInheritance = transitiveDeps(Set(modified))(inheritanceDeps)
|
||||
log.debug("Invalidated by transitive inheritance dependency: " + transitiveInheritance)
|
||||
transitiveInheritance
|
||||
}
|
||||
|
||||
override protected def invalidateSource(relations: Relations, change: APIChange[File]): Set[File] = {
|
||||
log.debug(s"Invalidating ${change.modified}...")
|
||||
val transitiveInheritance = invalidateByInheritance(relations, change.modified)
|
||||
val reasonForInvalidation = memberRefInvalidator.invalidationReason(change)
|
||||
log.debug(s"$reasonForInvalidation\nAll member reference dependencies will be considered within this context.")
|
||||
val memberRefInvalidation = memberRefInvalidator.get(relations.memberRef.internal,
|
||||
relations.names, change)
|
||||
val memberRef = transitiveInheritance flatMap memberRefInvalidation
|
||||
val all = transitiveInheritance ++ memberRef
|
||||
all
|
||||
}
|
||||
override protected def invalidateSource(relations: Relations, change: APIChange[File]): Set[File] = {
|
||||
log.debug(s"Invalidating ${change.modified}...")
|
||||
val transitiveInheritance = invalidateByInheritance(relations, change.modified)
|
||||
val reasonForInvalidation = memberRefInvalidator.invalidationReason(change)
|
||||
log.debug(s"$reasonForInvalidation\nAll member reference dependencies will be considered within this context.")
|
||||
val memberRefInvalidation = memberRefInvalidator.get(relations.memberRef.internal,
|
||||
relations.names, change)
|
||||
val memberRef = transitiveInheritance flatMap memberRefInvalidation
|
||||
val all = transitiveInheritance ++ memberRef
|
||||
all
|
||||
}
|
||||
|
||||
override protected def allDeps(relations: Relations): File => Set[File] =
|
||||
f => relations.memberRef.internal.reverse(f)
|
||||
override protected def allDeps(relations: Relations): File => Set[File] =
|
||||
f => relations.memberRef.internal.reverse(f)
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,89 +5,91 @@ package sbt
|
|||
package inc
|
||||
|
||||
import java.io.File
|
||||
import java.util.zip.{ZipException, ZipFile}
|
||||
import java.util.zip.{ ZipException, ZipFile }
|
||||
import Function.const
|
||||
|
||||
object Locate
|
||||
{
|
||||
type DefinesClass = File => String => Boolean
|
||||
object Locate {
|
||||
type DefinesClass = File => String => Boolean
|
||||
|
||||
/** Right(src) provides the value for the found class
|
||||
* Left(true) means that the class was found, but it had no associated value
|
||||
* Left(false) means that the class was not found */
|
||||
def value[S](classpath: Seq[File], get: File => String => Option[S]): String => Either[Boolean, S] =
|
||||
{
|
||||
val gets = classpath.toStream.map(getValue(get))
|
||||
className => find(className, gets)
|
||||
}
|
||||
|
||||
def find[S](name: String, gets: Stream[String => Either[Boolean, S]]): Either[Boolean, S] =
|
||||
if(gets.isEmpty)
|
||||
Left(false)
|
||||
else
|
||||
gets.head(name) match
|
||||
{
|
||||
case Left(false) => find(name, gets.tail)
|
||||
case x => x
|
||||
}
|
||||
|
||||
/** Returns a function that searches the provided class path for
|
||||
* a class name and returns the entry that defines that class.*/
|
||||
def entry(classpath: Seq[File], f: DefinesClass): String => Option[File] =
|
||||
{
|
||||
val entries = classpath.toStream.map { entry => (entry, f(entry)) }
|
||||
className => entries collect { case (entry, defines) if defines(className) => entry } headOption;
|
||||
}
|
||||
def resolve(f: File, className: String): File = if(f.isDirectory) classFile(f, className) else f
|
||||
|
||||
def getValue[S](get: File => String => Option[S])(entry: File): String => Either[Boolean, S] =
|
||||
{
|
||||
val defClass = definesClass(entry)
|
||||
val getF = get(entry)
|
||||
className => if(defClass(className)) getF(className).toRight(true) else Left(false)
|
||||
}
|
||||
|
||||
def definesClass(entry: File): String => Boolean =
|
||||
if(entry.isDirectory)
|
||||
directoryDefinesClass(entry)
|
||||
else if(entry.exists && classpath.ClasspathUtilities.isArchive(entry, contentFallback=true))
|
||||
jarDefinesClass(entry)
|
||||
else
|
||||
const(false)
|
||||
|
||||
def jarDefinesClass(entry: File): String => Boolean =
|
||||
{
|
||||
import collection.JavaConversions._
|
||||
val jar = try { new ZipFile(entry, ZipFile.OPEN_READ) } catch {
|
||||
// ZipException doesn't include the file name :(
|
||||
case e: ZipException => throw new RuntimeException("Error opening zip file: " + entry.getName, e)
|
||||
}
|
||||
val entries = try { jar.entries.map(e => toClassName(e.getName)).toSet } finally { jar.close() }
|
||||
entries.contains _
|
||||
}
|
||||
|
||||
def toClassName(entry: String): String =
|
||||
entry.stripSuffix(ClassExt).replace('/', '.')
|
||||
|
||||
val ClassExt = ".class"
|
||||
|
||||
def directoryDefinesClass(entry: File): String => Boolean =
|
||||
className => classFile(entry, className).isFile
|
||||
|
||||
def classFile(baseDir: File, className: String): File =
|
||||
{
|
||||
val (pkg, name) = components(className)
|
||||
val dir = subDirectory(baseDir, pkg)
|
||||
new File(dir, name + ClassExt)
|
||||
}
|
||||
|
||||
def subDirectory(base: File, parts: Seq[String]): File =
|
||||
(base /: parts) ( (b, p) => new File(b,p) )
|
||||
|
||||
def components(className: String): (Seq[String], String) =
|
||||
{
|
||||
assume(!className.isEmpty)
|
||||
val parts = className.split("\\.")
|
||||
if(parts.length == 1) (Nil, parts(0)) else (parts.init, parts.last)
|
||||
}
|
||||
/**
|
||||
* Right(src) provides the value for the found class
|
||||
* Left(true) means that the class was found, but it had no associated value
|
||||
* Left(false) means that the class was not found
|
||||
*/
|
||||
def value[S](classpath: Seq[File], get: File => String => Option[S]): String => Either[Boolean, S] =
|
||||
{
|
||||
val gets = classpath.toStream.map(getValue(get))
|
||||
className => find(className, gets)
|
||||
}
|
||||
|
||||
def find[S](name: String, gets: Stream[String => Either[Boolean, S]]): Either[Boolean, S] =
|
||||
if (gets.isEmpty)
|
||||
Left(false)
|
||||
else
|
||||
gets.head(name) match {
|
||||
case Left(false) => find(name, gets.tail)
|
||||
case x => x
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a function that searches the provided class path for
|
||||
* a class name and returns the entry that defines that class.
|
||||
*/
|
||||
def entry(classpath: Seq[File], f: DefinesClass): String => Option[File] =
|
||||
{
|
||||
val entries = classpath.toStream.map { entry => (entry, f(entry)) }
|
||||
className => entries collect { case (entry, defines) if defines(className) => entry } headOption;
|
||||
}
|
||||
def resolve(f: File, className: String): File = if (f.isDirectory) classFile(f, className) else f
|
||||
|
||||
def getValue[S](get: File => String => Option[S])(entry: File): String => Either[Boolean, S] =
|
||||
{
|
||||
val defClass = definesClass(entry)
|
||||
val getF = get(entry)
|
||||
className => if (defClass(className)) getF(className).toRight(true) else Left(false)
|
||||
}
|
||||
|
||||
def definesClass(entry: File): String => Boolean =
|
||||
if (entry.isDirectory)
|
||||
directoryDefinesClass(entry)
|
||||
else if (entry.exists && classpath.ClasspathUtilities.isArchive(entry, contentFallback = true))
|
||||
jarDefinesClass(entry)
|
||||
else
|
||||
const(false)
|
||||
|
||||
def jarDefinesClass(entry: File): String => Boolean =
|
||||
{
|
||||
import collection.JavaConversions._
|
||||
val jar = try { new ZipFile(entry, ZipFile.OPEN_READ) } catch {
|
||||
// ZipException doesn't include the file name :(
|
||||
case e: ZipException => throw new RuntimeException("Error opening zip file: " + entry.getName, e)
|
||||
}
|
||||
val entries = try { jar.entries.map(e => toClassName(e.getName)).toSet } finally { jar.close() }
|
||||
entries.contains _
|
||||
}
|
||||
|
||||
def toClassName(entry: String): String =
|
||||
entry.stripSuffix(ClassExt).replace('/', '.')
|
||||
|
||||
val ClassExt = ".class"
|
||||
|
||||
def directoryDefinesClass(entry: File): String => Boolean =
|
||||
className => classFile(entry, className).isFile
|
||||
|
||||
def classFile(baseDir: File, className: String): File =
|
||||
{
|
||||
val (pkg, name) = components(className)
|
||||
val dir = subDirectory(baseDir, pkg)
|
||||
new File(dir, name + ClassExt)
|
||||
}
|
||||
|
||||
def subDirectory(base: File, parts: Seq[String]): File =
|
||||
(base /: parts)((b, p) => new File(b, p))
|
||||
|
||||
def components(className: String): (Seq[String], String) =
|
||||
{
|
||||
assume(!className.isEmpty)
|
||||
val parts = className.split("\\.")
|
||||
if (parts.length == 1) (Nil, parts(0)) else (parts.init, parts.last)
|
||||
}
|
||||
}
|
||||
|
|
@ -51,74 +51,73 @@ import xsbt.api.APIUtil
|
|||
* of regular members then we'll invalidate sources that use those names.
|
||||
*/
|
||||
private[inc] class MemberRefInvalidator(log: Logger) {
|
||||
def get[T](memberRef: Relation[File, T], usedNames: Relation[File, String], apiChange: APIChange[_]):
|
||||
T => Set[File] = apiChange match {
|
||||
case _: APIChangeDueToMacroDefinition[_] =>
|
||||
new InvalidateUnconditionally(memberRef)
|
||||
case NamesChange(_, modifiedNames) if !modifiedNames.implicitNames.isEmpty =>
|
||||
new InvalidateUnconditionally(memberRef)
|
||||
case NamesChange(modifiedSrcFile, modifiedNames) =>
|
||||
new NameHashFilteredInvalidator[T](usedNames, memberRef, modifiedNames.regularNames)
|
||||
case _: SourceAPIChange[_] =>
|
||||
sys.error(wrongAPIChangeMsg)
|
||||
}
|
||||
def get[T](memberRef: Relation[File, T], usedNames: Relation[File, String], apiChange: APIChange[_]): T => Set[File] = apiChange match {
|
||||
case _: APIChangeDueToMacroDefinition[_] =>
|
||||
new InvalidateUnconditionally(memberRef)
|
||||
case NamesChange(_, modifiedNames) if !modifiedNames.implicitNames.isEmpty =>
|
||||
new InvalidateUnconditionally(memberRef)
|
||||
case NamesChange(modifiedSrcFile, modifiedNames) =>
|
||||
new NameHashFilteredInvalidator[T](usedNames, memberRef, modifiedNames.regularNames)
|
||||
case _: SourceAPIChange[_] =>
|
||||
sys.error(wrongAPIChangeMsg)
|
||||
}
|
||||
|
||||
def invalidationReason(apiChange: APIChange[_]): String = apiChange match {
|
||||
case APIChangeDueToMacroDefinition(modifiedSrcFile) =>
|
||||
s"The $modifiedSrcFile source file declares a macro."
|
||||
case NamesChange(modifiedSrcFile, modifiedNames) if !modifiedNames.implicitNames.isEmpty =>
|
||||
s"""|The $modifiedSrcFile source file has the following implicit definitions changed:
|
||||
def invalidationReason(apiChange: APIChange[_]): String = apiChange match {
|
||||
case APIChangeDueToMacroDefinition(modifiedSrcFile) =>
|
||||
s"The $modifiedSrcFile source file declares a macro."
|
||||
case NamesChange(modifiedSrcFile, modifiedNames) if !modifiedNames.implicitNames.isEmpty =>
|
||||
s"""|The $modifiedSrcFile source file has the following implicit definitions changed:
|
||||
|\t${modifiedNames.implicitNames.mkString(", ")}.""".stripMargin
|
||||
case NamesChange(modifiedSrcFile, modifiedNames) =>
|
||||
s"""|The $modifiedSrcFile source file has the following regular definitions changed:
|
||||
case NamesChange(modifiedSrcFile, modifiedNames) =>
|
||||
s"""|The $modifiedSrcFile source file has the following regular definitions changed:
|
||||
|\t${modifiedNames.regularNames.mkString(", ")}.""".stripMargin
|
||||
case _: SourceAPIChange[_] =>
|
||||
sys.error(wrongAPIChangeMsg)
|
||||
}
|
||||
case _: SourceAPIChange[_] =>
|
||||
sys.error(wrongAPIChangeMsg)
|
||||
}
|
||||
|
||||
private val wrongAPIChangeMsg =
|
||||
"MemberReferenceInvalidator.get should be called when name hashing is enabled " +
|
||||
"and in that case we shouldn't have SourceAPIChange as an api change."
|
||||
private val wrongAPIChangeMsg =
|
||||
"MemberReferenceInvalidator.get should be called when name hashing is enabled " +
|
||||
"and in that case we shouldn't have SourceAPIChange as an api change."
|
||||
|
||||
private class InvalidateUnconditionally[T](memberRef: Relation[File, T]) extends (T => Set[File]) {
|
||||
def apply(from: T): Set[File] = {
|
||||
val invalidated = memberRef.reverse(from)
|
||||
if (!invalidated.isEmpty)
|
||||
log.debug(s"The following member ref dependencies of $from are invalidated:\n" +
|
||||
formatInvalidated(invalidated))
|
||||
invalidated
|
||||
}
|
||||
private def formatInvalidated(invalidated: Set[File]): String = {
|
||||
val sortedFiles = invalidated.toSeq.sortBy(_.getAbsolutePath)
|
||||
sortedFiles.map(file => "\t"+file).mkString("\n")
|
||||
}
|
||||
}
|
||||
private class InvalidateUnconditionally[T](memberRef: Relation[File, T]) extends (T => Set[File]) {
|
||||
def apply(from: T): Set[File] = {
|
||||
val invalidated = memberRef.reverse(from)
|
||||
if (!invalidated.isEmpty)
|
||||
log.debug(s"The following member ref dependencies of $from are invalidated:\n" +
|
||||
formatInvalidated(invalidated))
|
||||
invalidated
|
||||
}
|
||||
private def formatInvalidated(invalidated: Set[File]): String = {
|
||||
val sortedFiles = invalidated.toSeq.sortBy(_.getAbsolutePath)
|
||||
sortedFiles.map(file => "\t" + file).mkString("\n")
|
||||
}
|
||||
}
|
||||
|
||||
private class NameHashFilteredInvalidator[T](
|
||||
usedNames: Relation[File, String],
|
||||
memberRef: Relation[File, T],
|
||||
modifiedNames: Set[String]) extends (T => Set[File]) {
|
||||
private class NameHashFilteredInvalidator[T](
|
||||
usedNames: Relation[File, String],
|
||||
memberRef: Relation[File, T],
|
||||
modifiedNames: Set[String]) extends (T => Set[File]) {
|
||||
|
||||
def apply(to: T): Set[File] = {
|
||||
val dependent = memberRef.reverse(to)
|
||||
filteredDependencies(dependent)
|
||||
}
|
||||
private def filteredDependencies(dependent: Set[File]): Set[File] = {
|
||||
dependent.filter {
|
||||
case from if APIUtil.isScalaSourceName(from.getName) =>
|
||||
val usedNamesInDependent = usedNames.forward(from)
|
||||
val modifiedAndUsedNames = modifiedNames intersect usedNamesInDependent
|
||||
if (modifiedAndUsedNames.isEmpty) {
|
||||
log.debug(s"None of the modified names appears in $from. This dependency is not being considered for invalidation.")
|
||||
false
|
||||
} else {
|
||||
log.debug(s"The following modified names cause invalidation of $from: $modifiedAndUsedNames")
|
||||
true
|
||||
}
|
||||
case from =>
|
||||
log.debug(s"Name hashing optimization doesn't apply to non-Scala dependency: $from")
|
||||
true
|
||||
}
|
||||
}
|
||||
}
|
||||
def apply(to: T): Set[File] = {
|
||||
val dependent = memberRef.reverse(to)
|
||||
filteredDependencies(dependent)
|
||||
}
|
||||
private def filteredDependencies(dependent: Set[File]): Set[File] = {
|
||||
dependent.filter {
|
||||
case from if APIUtil.isScalaSourceName(from.getName) =>
|
||||
val usedNamesInDependent = usedNames.forward(from)
|
||||
val modifiedAndUsedNames = modifiedNames intersect usedNamesInDependent
|
||||
if (modifiedAndUsedNames.isEmpty) {
|
||||
log.debug(s"None of the modified names appears in $from. This dependency is not being considered for invalidation.")
|
||||
false
|
||||
} else {
|
||||
log.debug(s"The following modified names cause invalidation of $from: $modifiedAndUsedNames")
|
||||
true
|
||||
}
|
||||
case from =>
|
||||
log.debug(s"Name hashing optimization doesn't apply to non-Scala dependency: $from")
|
||||
true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,244 +8,244 @@ import java.io.File
|
|||
import Relations.Source
|
||||
import Relations.SourceDependencies
|
||||
|
||||
/**
|
||||
* Provides mappings between source files, generated classes (products), and binaries.
|
||||
* Dependencies that are tracked include internal: a dependency on a source in the same compilation group (project),
|
||||
* external: a dependency on a source in another compilation group (tracked as the name of the class),
|
||||
* binary: a dependency on a class or jar file not generated by a source file in any tracked compilation group,
|
||||
* inherited: a dependency that resulted from a public template inheriting,
|
||||
* direct: any type of dependency, including inheritance.
|
||||
*/
|
||||
trait Relations {
|
||||
/** All sources _with at least one product_ . */
|
||||
def allSources: collection.Set[File]
|
||||
|
||||
/** Provides mappings between source files, generated classes (products), and binaries.
|
||||
* Dependencies that are tracked include internal: a dependency on a source in the same compilation group (project),
|
||||
* external: a dependency on a source in another compilation group (tracked as the name of the class),
|
||||
* binary: a dependency on a class or jar file not generated by a source file in any tracked compilation group,
|
||||
* inherited: a dependency that resulted from a public template inheriting,
|
||||
* direct: any type of dependency, including inheritance. */
|
||||
trait Relations
|
||||
{
|
||||
/** All sources _with at least one product_ . */
|
||||
def allSources: collection.Set[File]
|
||||
/** All products associated with sources. */
|
||||
def allProducts: collection.Set[File]
|
||||
|
||||
/** All products associated with sources. */
|
||||
def allProducts: collection.Set[File]
|
||||
/** All files that are recorded as a binary dependency of a source file.*/
|
||||
def allBinaryDeps: collection.Set[File]
|
||||
|
||||
/** All files that are recorded as a binary dependency of a source file.*/
|
||||
def allBinaryDeps: collection.Set[File]
|
||||
/** All files in this compilation group (project) that are recorded as a source dependency of a source file in this group.*/
|
||||
def allInternalSrcDeps: collection.Set[File]
|
||||
|
||||
/** All files in this compilation group (project) that are recorded as a source dependency of a source file in this group.*/
|
||||
def allInternalSrcDeps: collection.Set[File]
|
||||
/** All files in another compilation group (project) that are recorded as a source dependency of a source file in this group.*/
|
||||
def allExternalDeps: collection.Set[String]
|
||||
|
||||
/** All files in another compilation group (project) that are recorded as a source dependency of a source file in this group.*/
|
||||
def allExternalDeps: collection.Set[String]
|
||||
/** Fully qualified names of classes generated from source file `src`. */
|
||||
def classNames(src: File): Set[String]
|
||||
|
||||
/** Fully qualified names of classes generated from source file `src`. */
|
||||
def classNames(src: File): Set[String]
|
||||
/** Source files that generated a class with the given fully qualified `name`. This is typically a set containing a single file. */
|
||||
def definesClass(name: String): Set[File]
|
||||
|
||||
/** Source files that generated a class with the given fully qualified `name`. This is typically a set containing a single file. */
|
||||
def definesClass(name: String): Set[File]
|
||||
/** The classes that were generated for source file `src`. */
|
||||
def products(src: File): Set[File]
|
||||
/** The source files that generated class file `prod`. This is typically a set containing a single file. */
|
||||
def produced(prod: File): Set[File]
|
||||
|
||||
/** The classes that were generated for source file `src`. */
|
||||
def products(src: File): Set[File]
|
||||
/** The source files that generated class file `prod`. This is typically a set containing a single file. */
|
||||
def produced(prod: File): Set[File]
|
||||
/** The binary dependencies for the source file `src`. */
|
||||
def binaryDeps(src: File): Set[File]
|
||||
/** The source files that depend on binary file `dep`. */
|
||||
def usesBinary(dep: File): Set[File]
|
||||
|
||||
/** The binary dependencies for the source file `src`. */
|
||||
def binaryDeps(src: File): Set[File]
|
||||
/** The source files that depend on binary file `dep`. */
|
||||
def usesBinary(dep: File): Set[File]
|
||||
/** Internal source dependencies for `src`. This includes both direct and inherited dependencies. */
|
||||
def internalSrcDeps(src: File): Set[File]
|
||||
/** Internal source files that depend on internal source `dep`. This includes both direct and inherited dependencies. */
|
||||
def usesInternalSrc(dep: File): Set[File]
|
||||
|
||||
/** Internal source dependencies for `src`. This includes both direct and inherited dependencies. */
|
||||
def internalSrcDeps(src: File): Set[File]
|
||||
/** Internal source files that depend on internal source `dep`. This includes both direct and inherited dependencies. */
|
||||
def usesInternalSrc(dep: File): Set[File]
|
||||
/** External source dependencies that internal source file `src` depends on. This includes both direct and inherited dependencies. */
|
||||
def externalDeps(src: File): Set[String]
|
||||
/** Internal source dependencies that depend on external source file `dep`. This includes both direct and inherited dependencies. */
|
||||
def usesExternal(dep: String): Set[File]
|
||||
|
||||
/** External source dependencies that internal source file `src` depends on. This includes both direct and inherited dependencies. */
|
||||
def externalDeps(src: File): Set[String]
|
||||
/** Internal source dependencies that depend on external source file `dep`. This includes both direct and inherited dependencies. */
|
||||
def usesExternal(dep: String): Set[File]
|
||||
private[inc] def usedNames(src: File): Set[String]
|
||||
|
||||
private[inc] def usedNames(src: File): Set[String]
|
||||
/** Records internal source file `src` as generating class file `prod` with top-level class `name`. */
|
||||
def addProduct(src: File, prod: File, name: String): Relations
|
||||
|
||||
/** Records internal source file `src` as generating class file `prod` with top-level class `name`. */
|
||||
def addProduct(src: File, prod: File, name: String): Relations
|
||||
/**
|
||||
* Records internal source file `src` as depending on class `dependsOn` in an external source file.
|
||||
* If `inherited` is true, this dependency is recorded as coming from a public template in `src` extending something in `dependsOn` (an inheritance dependency).
|
||||
* Whatever the value of `inherited`, the dependency is also recorded as a direct dependency.
|
||||
*/
|
||||
def addExternalDep(src: File, dependsOn: String, inherited: Boolean): Relations
|
||||
|
||||
/** Records internal source file `src` as depending on class `dependsOn` in an external source file.
|
||||
* If `inherited` is true, this dependency is recorded as coming from a public template in `src` extending something in `dependsOn` (an inheritance dependency).
|
||||
* Whatever the value of `inherited`, the dependency is also recorded as a direct dependency. */
|
||||
def addExternalDep(src: File, dependsOn: String, inherited: Boolean): Relations
|
||||
/** Records internal source file `src` depending on a dependency binary dependency `dependsOn`.*/
|
||||
def addBinaryDep(src: File, dependsOn: File): Relations
|
||||
|
||||
/** Records internal source file `src` depending on a dependency binary dependency `dependsOn`.*/
|
||||
def addBinaryDep(src: File, dependsOn: File): Relations
|
||||
/**
|
||||
* Records internal source file `src` as having direct dependencies on internal source files `directDependsOn`
|
||||
* and inheritance dependencies on `inheritedDependsOn`. Everything in `inheritedDependsOn` must be included in `directDependsOn`;
|
||||
* this method does not automatically record direct dependencies like `addExternalDep` does.
|
||||
*/
|
||||
def addInternalSrcDeps(src: File, directDependsOn: Iterable[File], inheritedDependsOn: Iterable[File]): Relations
|
||||
|
||||
/** Records internal source file `src` as having direct dependencies on internal source files `directDependsOn`
|
||||
* and inheritance dependencies on `inheritedDependsOn`. Everything in `inheritedDependsOn` must be included in `directDependsOn`;
|
||||
* this method does not automatically record direct dependencies like `addExternalDep` does.*/
|
||||
def addInternalSrcDeps(src: File, directDependsOn: Iterable[File], inheritedDependsOn: Iterable[File]): Relations
|
||||
private[inc] def addUsedName(src: File, name: String): Relations
|
||||
|
||||
private[inc] def addUsedName(src: File, name: String): Relations
|
||||
/** Concatenates the two relations. Acts naively, i.e., doesn't internalize external deps on added files. */
|
||||
def ++(o: Relations): Relations
|
||||
|
||||
/** Concatenates the two relations. Acts naively, i.e., doesn't internalize external deps on added files. */
|
||||
def ++ (o: Relations): Relations
|
||||
/** Drops all dependency mappings a->b where a is in `sources`. Acts naively, i.e., doesn't externalize internal deps on removed files. */
|
||||
def --(sources: Iterable[File]): Relations
|
||||
|
||||
/** Drops all dependency mappings a->b where a is in `sources`. Acts naively, i.e., doesn't externalize internal deps on removed files. */
|
||||
def -- (sources: Iterable[File]): Relations
|
||||
@deprecated("OK to remove in 0.14", "0.13.1")
|
||||
def groupBy[K](f: (File => K)): Map[K, Relations]
|
||||
|
||||
@deprecated("OK to remove in 0.14", "0.13.1")
|
||||
def groupBy[K](f: (File => K)): Map[K, Relations]
|
||||
/** The relation between internal sources and generated class files. */
|
||||
def srcProd: Relation[File, File]
|
||||
|
||||
/** The relation between internal sources and generated class files. */
|
||||
def srcProd: Relation[File, File]
|
||||
/** The dependency relation between internal sources and binaries. */
|
||||
def binaryDep: Relation[File, File]
|
||||
|
||||
/** The dependency relation between internal sources and binaries. */
|
||||
def binaryDep: Relation[File, File]
|
||||
/** The dependency relation between internal sources. This includes both direct and inherited dependencies.*/
|
||||
def internalSrcDep: Relation[File, File]
|
||||
|
||||
/** The dependency relation between internal sources. This includes both direct and inherited dependencies.*/
|
||||
def internalSrcDep: Relation[File, File]
|
||||
/** The dependency relation between internal and external sources. This includes both direct and inherited dependencies.*/
|
||||
def externalDep: Relation[File, String]
|
||||
|
||||
/** The dependency relation between internal and external sources. This includes both direct and inherited dependencies.*/
|
||||
def externalDep: Relation[File, String]
|
||||
/**
|
||||
* The source dependency relation between source files introduced by member reference.
|
||||
*
|
||||
* NOTE: All inheritance dependencies are included in this relation because in order to
|
||||
* inherit from a member you have to refer to it. If you check documentation of `inheritance`
|
||||
* you'll see that there's small oddity related to traits being the first parent of a
|
||||
* class/trait that results in additional parents being introduced due to normalization.
|
||||
* This relation properly accounts for that so the invariant that `memberRef` is a superset
|
||||
* of `inheritance` is preserved.
|
||||
*/
|
||||
private[inc] def memberRef: SourceDependencies
|
||||
|
||||
/**
|
||||
* The source dependency relation between source files introduced by member reference.
|
||||
*
|
||||
* NOTE: All inheritance dependencies are included in this relation because in order to
|
||||
* inherit from a member you have to refer to it. If you check documentation of `inheritance`
|
||||
* you'll see that there's small oddity related to traits being the first parent of a
|
||||
* class/trait that results in additional parents being introduced due to normalization.
|
||||
* This relation properly accounts for that so the invariant that `memberRef` is a superset
|
||||
* of `inheritance` is preserved.
|
||||
*/
|
||||
private[inc] def memberRef: SourceDependencies
|
||||
/**
|
||||
* The source dependency relation between source files introduced by inheritance.
|
||||
* The dependency by inheritance is introduced when a template (class or trait) mentions
|
||||
* a given type in a parent position.
|
||||
*
|
||||
* NOTE: Due to an oddity in how Scala's type checker works there's one unexpected dependency
|
||||
* on a class being introduced. An example illustrates the best the problem. Let's consider
|
||||
* the following structure:
|
||||
*
|
||||
* trait A extends B
|
||||
* trait B extends C
|
||||
* trait C extends D
|
||||
* class D
|
||||
*
|
||||
* We are interested in dependencies by inheritance of `A`. One would expect it to be just `B`
|
||||
* but the answer is `B` and `D`. The reason is because Scala's type checker performs a certain
|
||||
* normalization so the first parent of a type is a class. Therefore the example above is normalized
|
||||
* to the following form:
|
||||
*
|
||||
* trait A extends D with B
|
||||
* trait B extends D with C
|
||||
* trait C extends D
|
||||
* class D
|
||||
*
|
||||
* Therefore if you inherit from a trait you'll get an additional dependency on a class that is
|
||||
* resolved transitively. You should not rely on this behavior, though.
|
||||
*
|
||||
*/
|
||||
private[inc] def inheritance: SourceDependencies
|
||||
|
||||
/**
|
||||
* The source dependency relation between source files introduced by inheritance.
|
||||
* The dependency by inheritance is introduced when a template (class or trait) mentions
|
||||
* a given type in a parent position.
|
||||
*
|
||||
* NOTE: Due to an oddity in how Scala's type checker works there's one unexpected dependency
|
||||
* on a class being introduced. An example illustrates the best the problem. Let's consider
|
||||
* the following structure:
|
||||
*
|
||||
* trait A extends B
|
||||
* trait B extends C
|
||||
* trait C extends D
|
||||
* class D
|
||||
*
|
||||
* We are interested in dependencies by inheritance of `A`. One would expect it to be just `B`
|
||||
* but the answer is `B` and `D`. The reason is because Scala's type checker performs a certain
|
||||
* normalization so the first parent of a type is a class. Therefore the example above is normalized
|
||||
* to the following form:
|
||||
*
|
||||
* trait A extends D with B
|
||||
* trait B extends D with C
|
||||
* trait C extends D
|
||||
* class D
|
||||
*
|
||||
* Therefore if you inherit from a trait you'll get an additional dependency on a class that is
|
||||
* resolved transitively. You should not rely on this behavior, though.
|
||||
*
|
||||
*/
|
||||
private[inc] def inheritance: SourceDependencies
|
||||
/** The dependency relations between sources. These include both direct and inherited dependencies.*/
|
||||
def direct: Source
|
||||
|
||||
/** The dependency relations between sources. These include both direct and inherited dependencies.*/
|
||||
def direct: Source
|
||||
/** The inheritance dependency relations between sources.*/
|
||||
def publicInherited: Source
|
||||
|
||||
/** The inheritance dependency relations between sources.*/
|
||||
def publicInherited: Source
|
||||
/** The relation between a source file and the fully qualified names of classes generated from it.*/
|
||||
def classes: Relation[File, String]
|
||||
|
||||
/** The relation between a source file and the fully qualified names of classes generated from it.*/
|
||||
def classes: Relation[File, String]
|
||||
|
||||
/**
|
||||
* Flag which indicates whether given Relations object supports operations needed by name hashing algorithm.
|
||||
*
|
||||
* At the moment the list includes the following operations:
|
||||
*
|
||||
* - memberRef: SourceDependencies
|
||||
* - inheritance: SourceDependencies
|
||||
*
|
||||
* The `memberRef` and `inheritance` implement a new style source dependency tracking. When this flag is
|
||||
* enabled access to `direct` and `publicInherited` relations is illegal and will cause runtime exception
|
||||
* being thrown. That is done as an optimization that prevents from storing two overlapping sets of
|
||||
* dependencies.
|
||||
*
|
||||
* Conversely, when `nameHashing` flag is disabled access to `memberRef` and `inheritance`
|
||||
* relations is illegal and will cause runtime exception being thrown.
|
||||
*/
|
||||
private[inc] def nameHashing: Boolean
|
||||
/**
|
||||
* Relation between source files and _unqualified_ term and type names used in given source file.
|
||||
*/
|
||||
private[inc] def names: Relation[File, String]
|
||||
/**
|
||||
* Flag which indicates whether given Relations object supports operations needed by name hashing algorithm.
|
||||
*
|
||||
* At the moment the list includes the following operations:
|
||||
*
|
||||
* - memberRef: SourceDependencies
|
||||
* - inheritance: SourceDependencies
|
||||
*
|
||||
* The `memberRef` and `inheritance` implement a new style source dependency tracking. When this flag is
|
||||
* enabled access to `direct` and `publicInherited` relations is illegal and will cause runtime exception
|
||||
* being thrown. That is done as an optimization that prevents from storing two overlapping sets of
|
||||
* dependencies.
|
||||
*
|
||||
* Conversely, when `nameHashing` flag is disabled access to `memberRef` and `inheritance`
|
||||
* relations is illegal and will cause runtime exception being thrown.
|
||||
*/
|
||||
private[inc] def nameHashing: Boolean
|
||||
/**
|
||||
* Relation between source files and _unqualified_ term and type names used in given source file.
|
||||
*/
|
||||
private[inc] def names: Relation[File, String]
|
||||
}
|
||||
|
||||
object Relations {
|
||||
/** Tracks internal and external source dependencies for a specific dependency type, such as direct or inherited.*/
|
||||
final class Source private[sbt] (val internal: Relation[File, File], val external: Relation[File, String]) {
|
||||
def addInternal(source: File, dependsOn: Iterable[File]): Source = new Source(internal + (source, dependsOn), external)
|
||||
def addExternal(source: File, dependsOn: String): Source = new Source(internal, external + (source, dependsOn))
|
||||
/** Drops all dependency mappings from `sources`. Acts naively, i.e., doesn't externalize internal deps on removed files.*/
|
||||
def --(sources: Iterable[File]): Source = new Source(internal -- sources, external -- sources)
|
||||
def ++(o: Source): Source = new Source(internal ++ o.internal, external ++ o.external)
|
||||
|
||||
object Relations
|
||||
{
|
||||
/** Tracks internal and external source dependencies for a specific dependency type, such as direct or inherited.*/
|
||||
final class Source private[sbt](val internal: Relation[File,File], val external: Relation[File,String]) {
|
||||
def addInternal(source: File, dependsOn: Iterable[File]): Source = new Source(internal + (source, dependsOn), external)
|
||||
def addExternal(source: File, dependsOn: String): Source = new Source(internal, external + (source, dependsOn))
|
||||
/** Drops all dependency mappings from `sources`. Acts naively, i.e., doesn't externalize internal deps on removed files.*/
|
||||
def --(sources: Iterable[File]): Source = new Source(internal -- sources, external -- sources)
|
||||
def ++(o: Source): Source = new Source(internal ++ o.internal, external ++ o.external)
|
||||
@deprecated("Broken implementation. OK to remove in 0.14", "0.13.1")
|
||||
def groupBySource[K](f: File => K): Map[K, Source] = {
|
||||
|
||||
@deprecated("Broken implementation. OK to remove in 0.14", "0.13.1")
|
||||
def groupBySource[K](f: File => K): Map[K, Source] = {
|
||||
val i = internal.groupBy { case (a, b) => f(a) }
|
||||
val e = external.groupBy { case (a, b) => f(a) }
|
||||
val pairs = for (k <- i.keySet ++ e.keySet) yield (k, new Source(getOrEmpty(i, k), getOrEmpty(e, k)))
|
||||
pairs.toMap
|
||||
}
|
||||
|
||||
val i = internal.groupBy { case (a,b) => f(a) }
|
||||
val e = external.groupBy { case (a,b) => f(a) }
|
||||
val pairs = for( k <- i.keySet ++ e.keySet ) yield
|
||||
(k, new Source( getOrEmpty(i, k), getOrEmpty(e, k) ))
|
||||
pairs.toMap
|
||||
}
|
||||
override def equals(other: Any) = other match {
|
||||
case o: Source => internal == o.internal && external == o.external
|
||||
case _ => false
|
||||
}
|
||||
|
||||
override def equals(other: Any) = other match {
|
||||
case o: Source => internal == o.internal && external == o.external
|
||||
case _ => false
|
||||
}
|
||||
override def hashCode = (internal, external).hashCode
|
||||
}
|
||||
|
||||
override def hashCode = (internal, external).hashCode
|
||||
}
|
||||
/** Tracks internal and external source dependencies for a specific dependency type, such as direct or inherited.*/
|
||||
private[inc] final class SourceDependencies(val internal: Relation[File, File], val external: Relation[File, String]) {
|
||||
def addInternal(source: File, dependsOn: Iterable[File]): SourceDependencies = new SourceDependencies(internal + (source, dependsOn), external)
|
||||
def addExternal(source: File, dependsOn: String): SourceDependencies = new SourceDependencies(internal, external + (source, dependsOn))
|
||||
/** Drops all dependency mappings from `sources`. Acts naively, i.e., doesn't externalize internal deps on removed files.*/
|
||||
def --(sources: Iterable[File]): SourceDependencies = new SourceDependencies(internal -- sources, external -- sources)
|
||||
def ++(o: SourceDependencies): SourceDependencies = new SourceDependencies(internal ++ o.internal, external ++ o.external)
|
||||
|
||||
/** Tracks internal and external source dependencies for a specific dependency type, such as direct or inherited.*/
|
||||
private[inc] final class SourceDependencies(val internal: Relation[File,File], val external: Relation[File,String]) {
|
||||
def addInternal(source: File, dependsOn: Iterable[File]): SourceDependencies = new SourceDependencies(internal + (source, dependsOn), external)
|
||||
def addExternal(source: File, dependsOn: String): SourceDependencies = new SourceDependencies(internal, external + (source, dependsOn))
|
||||
/** Drops all dependency mappings from `sources`. Acts naively, i.e., doesn't externalize internal deps on removed files.*/
|
||||
def --(sources: Iterable[File]): SourceDependencies = new SourceDependencies(internal -- sources, external -- sources)
|
||||
def ++(o: SourceDependencies): SourceDependencies = new SourceDependencies(internal ++ o.internal, external ++ o.external)
|
||||
override def equals(other: Any) = other match {
|
||||
case o: SourceDependencies => internal == o.internal && external == o.external
|
||||
case _ => false
|
||||
}
|
||||
|
||||
override def equals(other: Any) = other match {
|
||||
case o: SourceDependencies => internal == o.internal && external == o.external
|
||||
case _ => false
|
||||
}
|
||||
override def hashCode = (internal, external).hashCode
|
||||
}
|
||||
|
||||
override def hashCode = (internal, external).hashCode
|
||||
}
|
||||
private[sbt] def getOrEmpty[A, B, K](m: Map[K, Relation[A, B]], k: K): Relation[A, B] = m.getOrElse(k, Relation.empty)
|
||||
|
||||
private[sbt] def getOrEmpty[A,B,K](m: Map[K, Relation[A,B]], k: K): Relation[A,B] = m.getOrElse(k, Relation.empty)
|
||||
private[this] lazy val e = Relation.empty[File, File]
|
||||
private[this] lazy val estr = Relation.empty[File, String]
|
||||
private[this] lazy val es = new Source(e, estr)
|
||||
|
||||
private[this] lazy val e = Relation.empty[File, File]
|
||||
private[this] lazy val estr = Relation.empty[File, String]
|
||||
private[this] lazy val es = new Source(e, estr)
|
||||
def emptySource: Source = es
|
||||
private[inc] lazy val emptySourceDependencies: SourceDependencies = new SourceDependencies(e, estr)
|
||||
def empty: Relations = empty(nameHashing = false)
|
||||
private[inc] def empty(nameHashing: Boolean): Relations =
|
||||
if (nameHashing)
|
||||
new MRelationsNameHashing(e, e, emptySourceDependencies, emptySourceDependencies, estr, estr)
|
||||
else
|
||||
new MRelationsDefaultImpl(e, e, es, es, estr)
|
||||
|
||||
def emptySource: Source = es
|
||||
private[inc] lazy val emptySourceDependencies: SourceDependencies = new SourceDependencies(e, estr)
|
||||
def empty: Relations = empty(nameHashing = false)
|
||||
private[inc] def empty(nameHashing: Boolean): Relations =
|
||||
if (nameHashing)
|
||||
new MRelationsNameHashing(e, e, emptySourceDependencies, emptySourceDependencies, estr, estr)
|
||||
else
|
||||
new MRelationsDefaultImpl(e, e, es, es, estr)
|
||||
def make(srcProd: Relation[File, File], binaryDep: Relation[File, File], direct: Source, publicInherited: Source, classes: Relation[File, String]): Relations =
|
||||
new MRelationsDefaultImpl(srcProd, binaryDep, direct = direct, publicInherited = publicInherited, classes)
|
||||
|
||||
def make(srcProd: Relation[File, File], binaryDep: Relation[File, File], direct: Source, publicInherited: Source, classes: Relation[File, String]): Relations =
|
||||
new MRelationsDefaultImpl(srcProd, binaryDep, direct = direct, publicInherited = publicInherited, classes)
|
||||
|
||||
private[inc] def make(srcProd: Relation[File, File], binaryDep: Relation[File, File],
|
||||
memberRef: SourceDependencies, inheritance: SourceDependencies, classes: Relation[File, String],
|
||||
names: Relation[File, String]): Relations =
|
||||
new MRelationsNameHashing(srcProd, binaryDep, memberRef = memberRef, inheritance = inheritance,
|
||||
classes, names)
|
||||
def makeSource(internal: Relation[File,File], external: Relation[File,String]): Source = new Source(internal, external)
|
||||
private[inc] def makeSourceDependencies(internal: Relation[File,File], external: Relation[File,String]): SourceDependencies = new SourceDependencies(internal, external)
|
||||
private[inc] def make(srcProd: Relation[File, File], binaryDep: Relation[File, File],
|
||||
memberRef: SourceDependencies, inheritance: SourceDependencies, classes: Relation[File, String],
|
||||
names: Relation[File, String]): Relations =
|
||||
new MRelationsNameHashing(srcProd, binaryDep, memberRef = memberRef, inheritance = inheritance,
|
||||
classes, names)
|
||||
def makeSource(internal: Relation[File, File], external: Relation[File, String]): Source = new Source(internal, external)
|
||||
private[inc] def makeSourceDependencies(internal: Relation[File, File], external: Relation[File, String]): SourceDependencies = new SourceDependencies(internal, external)
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* An abstract class that contains common functionality inherited by two implementations of Relations trait.
|
||||
*
|
||||
|
|
@ -267,43 +267,41 @@ object Relations
|
|||
* `classes` is a relation between a source file and its generated fully-qualified class names.
|
||||
*/
|
||||
private abstract class MRelationsCommon(val srcProd: Relation[File, File], val binaryDep: Relation[File, File],
|
||||
val classes: Relation[File, String]) extends Relations
|
||||
{
|
||||
def allSources: collection.Set[File] = srcProd._1s
|
||||
val classes: Relation[File, String]) extends Relations {
|
||||
def allSources: collection.Set[File] = srcProd._1s
|
||||
|
||||
def allProducts: collection.Set[File] = srcProd._2s
|
||||
def allBinaryDeps: collection.Set[File] = binaryDep._2s
|
||||
def allInternalSrcDeps: collection.Set[File] = internalSrcDep._2s
|
||||
def allExternalDeps: collection.Set[String] = externalDep._2s
|
||||
def allProducts: collection.Set[File] = srcProd._2s
|
||||
def allBinaryDeps: collection.Set[File] = binaryDep._2s
|
||||
def allInternalSrcDeps: collection.Set[File] = internalSrcDep._2s
|
||||
def allExternalDeps: collection.Set[String] = externalDep._2s
|
||||
|
||||
def classNames(src: File): Set[String] = classes.forward(src)
|
||||
def definesClass(name: String): Set[File] = classes.reverse(name)
|
||||
def classNames(src: File): Set[String] = classes.forward(src)
|
||||
def definesClass(name: String): Set[File] = classes.reverse(name)
|
||||
|
||||
def products(src: File): Set[File] = srcProd.forward(src)
|
||||
def produced(prod: File): Set[File] = srcProd.reverse(prod)
|
||||
def products(src: File): Set[File] = srcProd.forward(src)
|
||||
def produced(prod: File): Set[File] = srcProd.reverse(prod)
|
||||
|
||||
def binaryDeps(src: File): Set[File] = binaryDep.forward(src)
|
||||
def usesBinary(dep: File): Set[File] = binaryDep.reverse(dep)
|
||||
def binaryDeps(src: File): Set[File] = binaryDep.forward(src)
|
||||
def usesBinary(dep: File): Set[File] = binaryDep.reverse(dep)
|
||||
|
||||
def internalSrcDeps(src: File): Set[File] = internalSrcDep.forward(src)
|
||||
def usesInternalSrc(dep: File): Set[File] = internalSrcDep.reverse(dep)
|
||||
def internalSrcDeps(src: File): Set[File] = internalSrcDep.forward(src)
|
||||
def usesInternalSrc(dep: File): Set[File] = internalSrcDep.reverse(dep)
|
||||
|
||||
def externalDeps(src: File): Set[String] = externalDep.forward(src)
|
||||
def usesExternal(dep: String): Set[File] = externalDep.reverse(dep)
|
||||
def externalDeps(src: File): Set[String] = externalDep.forward(src)
|
||||
def usesExternal(dep: String): Set[File] = externalDep.reverse(dep)
|
||||
|
||||
def usedNames(src: File): Set[String] = names.forward(src)
|
||||
def usedNames(src: File): Set[String] = names.forward(src)
|
||||
|
||||
/** Making large Relations a little readable. */
|
||||
private val userDir = sys.props("user.dir").stripSuffix("/") + "/"
|
||||
private def nocwd(s: String) = s stripPrefix userDir
|
||||
private def line_s(kv: (Any, Any)) = " " + nocwd("" + kv._1) + " -> " + nocwd("" + kv._2) + "\n"
|
||||
private def nocwd(s: String) = s stripPrefix userDir
|
||||
private def line_s(kv: (Any, Any)) = " " + nocwd("" + kv._1) + " -> " + nocwd("" + kv._2) + "\n"
|
||||
protected def relation_s(r: Relation[_, _]) = (
|
||||
if (r.forwardMap.isEmpty) "Relation [ ]"
|
||||
else (r.all.toSeq map line_s sorted) mkString ("Relation [\n", "", "]")
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* This class implements Relations trait with support for tracking of `direct` and `publicInherited` source
|
||||
* dependencies. Therefore this class preserves the "old" (from sbt 0.13.0) dependency tracking logic and it's
|
||||
|
|
@ -317,107 +315,106 @@ private abstract class MRelationsCommon(val srcProd: Relation[File, File], val b
|
|||
*
|
||||
*/
|
||||
private class MRelationsDefaultImpl(srcProd: Relation[File, File], binaryDep: Relation[File, File],
|
||||
// direct should include everything in inherited
|
||||
val direct: Source, val publicInherited: Source,
|
||||
classes: Relation[File, String]) extends MRelationsCommon(srcProd, binaryDep, classes)
|
||||
{
|
||||
def internalSrcDep: Relation[File, File] = direct.internal
|
||||
def externalDep: Relation[File, String] = direct.external
|
||||
// direct should include everything in inherited
|
||||
val direct: Source, val publicInherited: Source,
|
||||
classes: Relation[File, String]) extends MRelationsCommon(srcProd, binaryDep, classes) {
|
||||
def internalSrcDep: Relation[File, File] = direct.internal
|
||||
def externalDep: Relation[File, String] = direct.external
|
||||
|
||||
def nameHashing: Boolean = false
|
||||
def nameHashing: Boolean = false
|
||||
|
||||
def memberRef: SourceDependencies =
|
||||
throw new UnsupportedOperationException("The `memberRef` source dependencies relation is not supported " +
|
||||
"when `nameHashing` flag is disabled.")
|
||||
def inheritance: SourceDependencies =
|
||||
throw new UnsupportedOperationException("The `memberRef` source dependencies relation is not supported " +
|
||||
"when `nameHashing` flag is disabled.")
|
||||
def memberRef: SourceDependencies =
|
||||
throw new UnsupportedOperationException("The `memberRef` source dependencies relation is not supported " +
|
||||
"when `nameHashing` flag is disabled.")
|
||||
def inheritance: SourceDependencies =
|
||||
throw new UnsupportedOperationException("The `memberRef` source dependencies relation is not supported " +
|
||||
"when `nameHashing` flag is disabled.")
|
||||
|
||||
def addProduct(src: File, prod: File, name: String): Relations =
|
||||
new MRelationsDefaultImpl(srcProd + (src, prod), binaryDep, direct = direct,
|
||||
publicInherited = publicInherited, classes + (src, name))
|
||||
def addProduct(src: File, prod: File, name: String): Relations =
|
||||
new MRelationsDefaultImpl(srcProd + (src, prod), binaryDep, direct = direct,
|
||||
publicInherited = publicInherited, classes + (src, name))
|
||||
|
||||
def addExternalDep(src: File, dependsOn: String, inherited: Boolean): Relations = {
|
||||
val newI = if(inherited) publicInherited.addExternal(src, dependsOn) else publicInherited
|
||||
val newD = direct.addExternal(src, dependsOn)
|
||||
new MRelationsDefaultImpl( srcProd, binaryDep, direct = newD, publicInherited = newI, classes)
|
||||
}
|
||||
def addExternalDep(src: File, dependsOn: String, inherited: Boolean): Relations = {
|
||||
val newI = if (inherited) publicInherited.addExternal(src, dependsOn) else publicInherited
|
||||
val newD = direct.addExternal(src, dependsOn)
|
||||
new MRelationsDefaultImpl(srcProd, binaryDep, direct = newD, publicInherited = newI, classes)
|
||||
}
|
||||
|
||||
def addInternalSrcDeps(src: File, dependsOn: Iterable[File], inherited: Iterable[File]): Relations =
|
||||
{
|
||||
val newI = publicInherited.addInternal(src, inherited)
|
||||
val newD = direct.addInternal(src, dependsOn)
|
||||
new MRelationsDefaultImpl( srcProd, binaryDep, direct = newD, publicInherited = newI, classes)
|
||||
}
|
||||
|
||||
def names: Relation[File, String] =
|
||||
throw new UnsupportedOperationException("Tracking of used names is not supported " +
|
||||
"when `nameHashing` is disabled.")
|
||||
|
||||
def addUsedName(src: File, name: String): Relations =
|
||||
throw new UnsupportedOperationException("Tracking of used names is not supported " +
|
||||
"when `nameHashing` is disabled.")
|
||||
|
||||
def addBinaryDep(src: File, dependsOn: File): Relations =
|
||||
new MRelationsDefaultImpl( srcProd, binaryDep + (src, dependsOn), direct = direct,
|
||||
publicInherited = publicInherited, classes)
|
||||
|
||||
def ++ (o: Relations): Relations = {
|
||||
if (nameHashing != o.nameHashing)
|
||||
throw new UnsupportedOperationException("The `++` operation is not supported for relations " +
|
||||
"with different values of `nameHashing` flag.")
|
||||
new MRelationsDefaultImpl(srcProd ++ o.srcProd, binaryDep ++ o.binaryDep, direct ++ o.direct,
|
||||
publicInherited ++ o.publicInherited, classes ++ o.classes)
|
||||
}
|
||||
def -- (sources: Iterable[File]) =
|
||||
new MRelationsDefaultImpl(srcProd -- sources, binaryDep -- sources, direct = direct -- sources,
|
||||
publicInherited = publicInherited -- sources, classes -- sources)
|
||||
|
||||
@deprecated("Broken implementation. OK to remove in 0.14", "0.13.1")
|
||||
def groupBy[K](f: File => K): Map[K, Relations] =
|
||||
{
|
||||
type MapRel[T] = Map[K, Relation[File, T]]
|
||||
def outerJoin(srcProdMap: MapRel[File], binaryDepMap: MapRel[File], direct: Map[K, Source],
|
||||
inherited: Map[K, Source], classesMap: MapRel[String],
|
||||
namesMap: MapRel[String]): Map[K, Relations] =
|
||||
{
|
||||
def kRelations(k: K): Relations = {
|
||||
def get[T](m: Map[K, Relation[File, T]]) = Relations.getOrEmpty(m, k)
|
||||
def getSrc(m: Map[K, Source]): Source = m.getOrElse(k, Relations.emptySource)
|
||||
def getSrcDeps(m: Map[K, SourceDependencies]): SourceDependencies =
|
||||
m.getOrElse(k, Relations.emptySourceDependencies)
|
||||
new MRelationsDefaultImpl( get(srcProdMap), get(binaryDepMap), getSrc(direct), getSrc(inherited),
|
||||
get(classesMap))
|
||||
}
|
||||
val keys = (srcProdMap.keySet ++ binaryDepMap.keySet ++ direct.keySet ++ inherited.keySet ++ classesMap.keySet).toList
|
||||
Map( keys.map( (k: K) => (k, kRelations(k)) ) : _*)
|
||||
}
|
||||
|
||||
def f1[B](item: (File, B)): K = f(item._1)
|
||||
|
||||
outerJoin(srcProd.groupBy(f1), binaryDep.groupBy(f1), direct.groupBySource(f),
|
||||
publicInherited.groupBySource(f), classes.groupBy(f1), names.groupBy(f1))
|
||||
def addInternalSrcDeps(src: File, dependsOn: Iterable[File], inherited: Iterable[File]): Relations =
|
||||
{
|
||||
val newI = publicInherited.addInternal(src, inherited)
|
||||
val newD = direct.addInternal(src, dependsOn)
|
||||
new MRelationsDefaultImpl(srcProd, binaryDep, direct = newD, publicInherited = newI, classes)
|
||||
}
|
||||
|
||||
override def equals(other: Any) = other match {
|
||||
case o: MRelationsDefaultImpl =>
|
||||
srcProd == o.srcProd && binaryDep == o.binaryDep && direct == o.direct &&
|
||||
publicInherited == o.publicInherited && classes == o.classes
|
||||
case _ => false
|
||||
}
|
||||
def names: Relation[File, String] =
|
||||
throw new UnsupportedOperationException("Tracking of used names is not supported " +
|
||||
"when `nameHashing` is disabled.")
|
||||
|
||||
override def hashCode = (srcProd :: binaryDep :: direct :: publicInherited :: classes :: Nil).hashCode
|
||||
def addUsedName(src: File, name: String): Relations =
|
||||
throw new UnsupportedOperationException("Tracking of used names is not supported " +
|
||||
"when `nameHashing` is disabled.")
|
||||
|
||||
override def toString = (
|
||||
"""
|
||||
def addBinaryDep(src: File, dependsOn: File): Relations =
|
||||
new MRelationsDefaultImpl(srcProd, binaryDep + (src, dependsOn), direct = direct,
|
||||
publicInherited = publicInherited, classes)
|
||||
|
||||
def ++(o: Relations): Relations = {
|
||||
if (nameHashing != o.nameHashing)
|
||||
throw new UnsupportedOperationException("The `++` operation is not supported for relations " +
|
||||
"with different values of `nameHashing` flag.")
|
||||
new MRelationsDefaultImpl(srcProd ++ o.srcProd, binaryDep ++ o.binaryDep, direct ++ o.direct,
|
||||
publicInherited ++ o.publicInherited, classes ++ o.classes)
|
||||
}
|
||||
def --(sources: Iterable[File]) =
|
||||
new MRelationsDefaultImpl(srcProd -- sources, binaryDep -- sources, direct = direct -- sources,
|
||||
publicInherited = publicInherited -- sources, classes -- sources)
|
||||
|
||||
@deprecated("Broken implementation. OK to remove in 0.14", "0.13.1")
|
||||
def groupBy[K](f: File => K): Map[K, Relations] =
|
||||
{
|
||||
type MapRel[T] = Map[K, Relation[File, T]]
|
||||
def outerJoin(srcProdMap: MapRel[File], binaryDepMap: MapRel[File], direct: Map[K, Source],
|
||||
inherited: Map[K, Source], classesMap: MapRel[String],
|
||||
namesMap: MapRel[String]): Map[K, Relations] =
|
||||
{
|
||||
def kRelations(k: K): Relations = {
|
||||
def get[T](m: Map[K, Relation[File, T]]) = Relations.getOrEmpty(m, k)
|
||||
def getSrc(m: Map[K, Source]): Source = m.getOrElse(k, Relations.emptySource)
|
||||
def getSrcDeps(m: Map[K, SourceDependencies]): SourceDependencies =
|
||||
m.getOrElse(k, Relations.emptySourceDependencies)
|
||||
new MRelationsDefaultImpl(get(srcProdMap), get(binaryDepMap), getSrc(direct), getSrc(inherited),
|
||||
get(classesMap))
|
||||
}
|
||||
val keys = (srcProdMap.keySet ++ binaryDepMap.keySet ++ direct.keySet ++ inherited.keySet ++ classesMap.keySet).toList
|
||||
Map(keys.map((k: K) => (k, kRelations(k))): _*)
|
||||
}
|
||||
|
||||
def f1[B](item: (File, B)): K = f(item._1)
|
||||
|
||||
outerJoin(srcProd.groupBy(f1), binaryDep.groupBy(f1), direct.groupBySource(f),
|
||||
publicInherited.groupBySource(f), classes.groupBy(f1), names.groupBy(f1))
|
||||
}
|
||||
|
||||
override def equals(other: Any) = other match {
|
||||
case o: MRelationsDefaultImpl =>
|
||||
srcProd == o.srcProd && binaryDep == o.binaryDep && direct == o.direct &&
|
||||
publicInherited == o.publicInherited && classes == o.classes
|
||||
case _ => false
|
||||
}
|
||||
|
||||
override def hashCode = (srcProd :: binaryDep :: direct :: publicInherited :: classes :: Nil).hashCode
|
||||
|
||||
override def toString = (
|
||||
"""
|
||||
|Relations:
|
||||
| products: %s
|
||||
| bin deps: %s
|
||||
| src deps: %s
|
||||
| ext deps: %s
|
||||
| class names: %s
|
||||
""".trim.stripMargin.format(List(srcProd, binaryDep, internalSrcDep, externalDep, classes) map relation_s : _*)
|
||||
)
|
||||
""".trim.stripMargin.format(List(srcProd, binaryDep, internalSrcDep, externalDep, classes) map relation_s: _*)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -426,78 +423,77 @@ private class MRelationsDefaultImpl(srcProd: Relation[File, File], binaryDep: Re
|
|||
* needed by the name hashing invalidation algorithm.
|
||||
*/
|
||||
private class MRelationsNameHashing(srcProd: Relation[File, File], binaryDep: Relation[File, File],
|
||||
// memberRef should include everything in inherited
|
||||
val memberRef: SourceDependencies, val inheritance: SourceDependencies,
|
||||
classes: Relation[File, String],
|
||||
val names: Relation[File, String]) extends MRelationsCommon(srcProd, binaryDep, classes)
|
||||
{
|
||||
def direct: Source =
|
||||
throw new UnsupportedOperationException("The `direct` source dependencies relation is not supported " +
|
||||
"when `nameHashing` flag is disabled.")
|
||||
def publicInherited: Source =
|
||||
throw new UnsupportedOperationException("The `publicInherited` source dependencies relation is not supported " +
|
||||
"when `nameHashing` flag is disabled.")
|
||||
// memberRef should include everything in inherited
|
||||
val memberRef: SourceDependencies, val inheritance: SourceDependencies,
|
||||
classes: Relation[File, String],
|
||||
val names: Relation[File, String]) extends MRelationsCommon(srcProd, binaryDep, classes) {
|
||||
def direct: Source =
|
||||
throw new UnsupportedOperationException("The `direct` source dependencies relation is not supported " +
|
||||
"when `nameHashing` flag is disabled.")
|
||||
def publicInherited: Source =
|
||||
throw new UnsupportedOperationException("The `publicInherited` source dependencies relation is not supported " +
|
||||
"when `nameHashing` flag is disabled.")
|
||||
|
||||
val nameHashing: Boolean = true
|
||||
val nameHashing: Boolean = true
|
||||
|
||||
def internalSrcDep: Relation[File, File] = memberRef.internal
|
||||
def externalDep: Relation[File, String] = memberRef.external
|
||||
def internalSrcDep: Relation[File, File] = memberRef.internal
|
||||
def externalDep: Relation[File, String] = memberRef.external
|
||||
|
||||
def addProduct(src: File, prod: File, name: String): Relations =
|
||||
new MRelationsNameHashing(srcProd + (src, prod), binaryDep, memberRef = memberRef,
|
||||
inheritance = inheritance, classes + (src, name), names = names)
|
||||
def addProduct(src: File, prod: File, name: String): Relations =
|
||||
new MRelationsNameHashing(srcProd + (src, prod), binaryDep, memberRef = memberRef,
|
||||
inheritance = inheritance, classes + (src, name), names = names)
|
||||
|
||||
def addExternalDep(src: File, dependsOn: String, inherited: Boolean): Relations = {
|
||||
val newIH = if(inherited) inheritance.addExternal(src, dependsOn) else inheritance
|
||||
val newMR = memberRef.addExternal(src, dependsOn)
|
||||
new MRelationsNameHashing( srcProd, binaryDep, memberRef = newMR, inheritance = newIH, classes,
|
||||
names = names)
|
||||
}
|
||||
def addExternalDep(src: File, dependsOn: String, inherited: Boolean): Relations = {
|
||||
val newIH = if (inherited) inheritance.addExternal(src, dependsOn) else inheritance
|
||||
val newMR = memberRef.addExternal(src, dependsOn)
|
||||
new MRelationsNameHashing(srcProd, binaryDep, memberRef = newMR, inheritance = newIH, classes,
|
||||
names = names)
|
||||
}
|
||||
|
||||
def addInternalSrcDeps(src: File, dependsOn: Iterable[File], inherited: Iterable[File]): Relations = {
|
||||
val newIH = inheritance.addInternal(src, inherited)
|
||||
val newMR = memberRef.addInternal(src, dependsOn)
|
||||
new MRelationsNameHashing( srcProd, binaryDep, memberRef = newMR, inheritance = newIH, classes,
|
||||
names = names)
|
||||
}
|
||||
def addInternalSrcDeps(src: File, dependsOn: Iterable[File], inherited: Iterable[File]): Relations = {
|
||||
val newIH = inheritance.addInternal(src, inherited)
|
||||
val newMR = memberRef.addInternal(src, dependsOn)
|
||||
new MRelationsNameHashing(srcProd, binaryDep, memberRef = newMR, inheritance = newIH, classes,
|
||||
names = names)
|
||||
}
|
||||
|
||||
def addUsedName(src: File, name: String): Relations =
|
||||
new MRelationsNameHashing(srcProd, binaryDep, memberRef = memberRef,
|
||||
inheritance = inheritance, classes, names = names + (src, name))
|
||||
def addUsedName(src: File, name: String): Relations =
|
||||
new MRelationsNameHashing(srcProd, binaryDep, memberRef = memberRef,
|
||||
inheritance = inheritance, classes, names = names + (src, name))
|
||||
|
||||
def addBinaryDep(src: File, dependsOn: File): Relations =
|
||||
new MRelationsNameHashing(srcProd, binaryDep + (src, dependsOn), memberRef = memberRef,
|
||||
inheritance = inheritance, classes, names = names)
|
||||
def addBinaryDep(src: File, dependsOn: File): Relations =
|
||||
new MRelationsNameHashing(srcProd, binaryDep + (src, dependsOn), memberRef = memberRef,
|
||||
inheritance = inheritance, classes, names = names)
|
||||
|
||||
def ++ (o: Relations): Relations = {
|
||||
if (!o.nameHashing)
|
||||
throw new UnsupportedOperationException("The `++` operation is not supported for relations " +
|
||||
"with different values of `nameHashing` flag.")
|
||||
new MRelationsNameHashing(srcProd ++ o.srcProd, binaryDep ++ o.binaryDep,
|
||||
memberRef = memberRef ++ o.memberRef, inheritance = inheritance ++ o.inheritance,
|
||||
classes ++ o.classes, names = names ++ o.names)
|
||||
}
|
||||
def -- (sources: Iterable[File]) =
|
||||
new MRelationsNameHashing(srcProd -- sources, binaryDep -- sources,
|
||||
memberRef = memberRef -- sources, inheritance = inheritance -- sources, classes -- sources,
|
||||
names = names -- sources)
|
||||
def ++(o: Relations): Relations = {
|
||||
if (!o.nameHashing)
|
||||
throw new UnsupportedOperationException("The `++` operation is not supported for relations " +
|
||||
"with different values of `nameHashing` flag.")
|
||||
new MRelationsNameHashing(srcProd ++ o.srcProd, binaryDep ++ o.binaryDep,
|
||||
memberRef = memberRef ++ o.memberRef, inheritance = inheritance ++ o.inheritance,
|
||||
classes ++ o.classes, names = names ++ o.names)
|
||||
}
|
||||
def --(sources: Iterable[File]) =
|
||||
new MRelationsNameHashing(srcProd -- sources, binaryDep -- sources,
|
||||
memberRef = memberRef -- sources, inheritance = inheritance -- sources, classes -- sources,
|
||||
names = names -- sources)
|
||||
|
||||
def groupBy[K](f: File => K): Map[K, Relations] = {
|
||||
throw new UnsupportedOperationException("Merging of Analyses that have" +
|
||||
"`relations.nameHashing` set to `true` is not supported.")
|
||||
}
|
||||
def groupBy[K](f: File => K): Map[K, Relations] = {
|
||||
throw new UnsupportedOperationException("Merging of Analyses that have" +
|
||||
"`relations.nameHashing` set to `true` is not supported.")
|
||||
}
|
||||
|
||||
override def equals(other: Any) = other match {
|
||||
case o: MRelationsNameHashing =>
|
||||
srcProd == o.srcProd && binaryDep == o.binaryDep && memberRef == o.memberRef &&
|
||||
inheritance == o.inheritance && classes == o.classes
|
||||
case _ => false
|
||||
}
|
||||
override def equals(other: Any) = other match {
|
||||
case o: MRelationsNameHashing =>
|
||||
srcProd == o.srcProd && binaryDep == o.binaryDep && memberRef == o.memberRef &&
|
||||
inheritance == o.inheritance && classes == o.classes
|
||||
case _ => false
|
||||
}
|
||||
|
||||
override def hashCode = (srcProd :: binaryDep :: memberRef :: inheritance :: classes :: Nil).hashCode
|
||||
override def hashCode = (srcProd :: binaryDep :: memberRef :: inheritance :: classes :: Nil).hashCode
|
||||
|
||||
override def toString = (
|
||||
"""
|
||||
override def toString = (
|
||||
"""
|
||||
|Relations (with name hashing enabled):
|
||||
| products: %s
|
||||
| bin deps: %s
|
||||
|
|
@ -505,7 +501,7 @@ private class MRelationsNameHashing(srcProd: Relation[File, File], binaryDep: Re
|
|||
| ext deps: %s
|
||||
| class names: %s
|
||||
| used names: %s
|
||||
""".trim.stripMargin.format(List(srcProd, binaryDep, internalSrcDep, externalDep, classes, names) map relation_s : _*)
|
||||
)
|
||||
""".trim.stripMargin.format(List(srcProd, binaryDep, internalSrcDep, externalDep, classes, names) map relation_s: _*)
|
||||
)
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,40 +1,36 @@
|
|||
package sbt
|
||||
package inc
|
||||
|
||||
import xsbti.Problem
|
||||
import xsbti.Problem
|
||||
|
||||
import java.io.File
|
||||
import java.io.File
|
||||
|
||||
trait SourceInfo
|
||||
{
|
||||
def reportedProblems: Seq[Problem]
|
||||
def unreportedProblems: Seq[Problem]
|
||||
trait SourceInfo {
|
||||
def reportedProblems: Seq[Problem]
|
||||
def unreportedProblems: Seq[Problem]
|
||||
}
|
||||
trait SourceInfos
|
||||
{
|
||||
def ++(o: SourceInfos): SourceInfos
|
||||
def add(file: File, info: SourceInfo): SourceInfos
|
||||
def --(files: Iterable[File]): SourceInfos
|
||||
def groupBy[K](f: (File) => K): Map[K, SourceInfos]
|
||||
def get(file: File): SourceInfo
|
||||
def allInfos: Map[File, SourceInfo]
|
||||
trait SourceInfos {
|
||||
def ++(o: SourceInfos): SourceInfos
|
||||
def add(file: File, info: SourceInfo): SourceInfos
|
||||
def --(files: Iterable[File]): SourceInfos
|
||||
def groupBy[K](f: (File) => K): Map[K, SourceInfos]
|
||||
def get(file: File): SourceInfo
|
||||
def allInfos: Map[File, SourceInfo]
|
||||
}
|
||||
object SourceInfos
|
||||
{
|
||||
def empty: SourceInfos = make(Map.empty)
|
||||
def make(m: Map[File, SourceInfo]): SourceInfos = new MSourceInfos(m)
|
||||
object SourceInfos {
|
||||
def empty: SourceInfos = make(Map.empty)
|
||||
def make(m: Map[File, SourceInfo]): SourceInfos = new MSourceInfos(m)
|
||||
|
||||
val emptyInfo: SourceInfo = makeInfo(Nil, Nil)
|
||||
def makeInfo(reported: Seq[Problem], unreported: Seq[Problem]): SourceInfo =
|
||||
new MSourceInfo(reported, unreported)
|
||||
def merge(infos: Traversable[SourceInfos]): SourceInfos = (SourceInfos.empty /: infos)(_ ++ _)
|
||||
val emptyInfo: SourceInfo = makeInfo(Nil, Nil)
|
||||
def makeInfo(reported: Seq[Problem], unreported: Seq[Problem]): SourceInfo =
|
||||
new MSourceInfo(reported, unreported)
|
||||
def merge(infos: Traversable[SourceInfos]): SourceInfos = (SourceInfos.empty /: infos)(_ ++ _)
|
||||
}
|
||||
private final class MSourceInfos(val allInfos: Map[File, SourceInfo]) extends SourceInfos
|
||||
{
|
||||
def ++(o: SourceInfos) = new MSourceInfos(allInfos ++ o.allInfos)
|
||||
def --(sources: Iterable[File]) = new MSourceInfos(allInfos -- sources)
|
||||
def groupBy[K](f: File => K): Map[K, SourceInfos] = allInfos groupBy (x => f(x._1)) map { x => (x._1, new MSourceInfos(x._2)) }
|
||||
def add(file: File, info: SourceInfo) = new MSourceInfos(allInfos + ((file, info)))
|
||||
def get(file:File) = allInfos.getOrElse(file, SourceInfos.emptyInfo)
|
||||
private final class MSourceInfos(val allInfos: Map[File, SourceInfo]) extends SourceInfos {
|
||||
def ++(o: SourceInfos) = new MSourceInfos(allInfos ++ o.allInfos)
|
||||
def --(sources: Iterable[File]) = new MSourceInfos(allInfos -- sources)
|
||||
def groupBy[K](f: File => K): Map[K, SourceInfos] = allInfos groupBy (x => f(x._1)) map { x => (x._1, new MSourceInfos(x._2)) }
|
||||
def add(file: File, info: SourceInfo) = new MSourceInfos(allInfos + ((file, info)))
|
||||
def get(file: File) = allInfos.getOrElse(file, SourceInfos.emptyInfo)
|
||||
}
|
||||
private final class MSourceInfo(val reportedProblems: Seq[Problem], val unreportedProblems: Seq[Problem]) extends SourceInfo
|
||||
|
|
|
|||
|
|
@ -4,191 +4,186 @@
|
|||
package sbt
|
||||
package inc
|
||||
|
||||
import java.io.{File, IOException}
|
||||
import java.io.{ File, IOException }
|
||||
import Stamp.getStamp
|
||||
import scala.util.matching.Regex
|
||||
|
||||
trait ReadStamps
|
||||
{
|
||||
/** The Stamp for the given product at the time represented by this Stamps instance.*/
|
||||
def product(prod: File): Stamp
|
||||
/** The Stamp for the given source file at the time represented by this Stamps instance.*/
|
||||
def internalSource(src: File): Stamp
|
||||
/** The Stamp for the given binary dependency at the time represented by this Stamps instance.*/
|
||||
def binary(bin: File): Stamp
|
||||
trait ReadStamps {
|
||||
/** The Stamp for the given product at the time represented by this Stamps instance.*/
|
||||
def product(prod: File): Stamp
|
||||
/** The Stamp for the given source file at the time represented by this Stamps instance.*/
|
||||
def internalSource(src: File): Stamp
|
||||
/** The Stamp for the given binary dependency at the time represented by this Stamps instance.*/
|
||||
def binary(bin: File): Stamp
|
||||
}
|
||||
|
||||
/** Provides information about files as they were at a specific time.*/
|
||||
trait Stamps extends ReadStamps
|
||||
{
|
||||
def allInternalSources: collection.Set[File]
|
||||
def allBinaries: collection.Set[File]
|
||||
def allProducts: collection.Set[File]
|
||||
|
||||
def sources: Map[File, Stamp]
|
||||
def binaries: Map[File, Stamp]
|
||||
def products: Map[File, Stamp]
|
||||
def classNames: Map[File, String]
|
||||
|
||||
def className(bin: File): Option[String]
|
||||
|
||||
def markInternalSource(src: File, s: Stamp): Stamps
|
||||
def markBinary(bin: File, className: String, s: Stamp): Stamps
|
||||
def markProduct(prod: File, s: Stamp): Stamps
|
||||
|
||||
def filter(prod: File => Boolean, removeSources: Iterable[File], bin: File => Boolean): Stamps
|
||||
|
||||
def ++ (o: Stamps): Stamps
|
||||
def groupBy[K](prod: Map[K, File => Boolean], sourcesGrouping: File => K, bin: Map[K, File => Boolean]): Map[K, Stamps]
|
||||
trait Stamps extends ReadStamps {
|
||||
def allInternalSources: collection.Set[File]
|
||||
def allBinaries: collection.Set[File]
|
||||
def allProducts: collection.Set[File]
|
||||
|
||||
def sources: Map[File, Stamp]
|
||||
def binaries: Map[File, Stamp]
|
||||
def products: Map[File, Stamp]
|
||||
def classNames: Map[File, String]
|
||||
|
||||
def className(bin: File): Option[String]
|
||||
|
||||
def markInternalSource(src: File, s: Stamp): Stamps
|
||||
def markBinary(bin: File, className: String, s: Stamp): Stamps
|
||||
def markProduct(prod: File, s: Stamp): Stamps
|
||||
|
||||
def filter(prod: File => Boolean, removeSources: Iterable[File], bin: File => Boolean): Stamps
|
||||
|
||||
def ++(o: Stamps): Stamps
|
||||
def groupBy[K](prod: Map[K, File => Boolean], sourcesGrouping: File => K, bin: Map[K, File => Boolean]): Map[K, Stamps]
|
||||
}
|
||||
|
||||
sealed trait Stamp
|
||||
{
|
||||
override def equals(other: Any): Boolean = other match {
|
||||
case o: Stamp => Stamp.equivStamp.equiv(this, o)
|
||||
case _ => false
|
||||
}
|
||||
sealed trait Stamp {
|
||||
override def equals(other: Any): Boolean = other match {
|
||||
case o: Stamp => Stamp.equivStamp.equiv(this, o)
|
||||
case _ => false
|
||||
}
|
||||
|
||||
override def toString: String = Stamp.toString(this)
|
||||
}
|
||||
|
||||
final class Hash(val value: Array[Byte]) extends Stamp {
|
||||
override def hashCode: Int = java.util.Arrays.hashCode(value)
|
||||
override def hashCode: Int = java.util.Arrays.hashCode(value)
|
||||
}
|
||||
final class LastModified(val value: Long) extends Stamp {
|
||||
override def hashCode: Int = (value ^ (value >>> 32)).toInt
|
||||
override def hashCode: Int = (value ^ (value >>> 32)).toInt
|
||||
}
|
||||
final class Exists(val value: Boolean) extends Stamp {
|
||||
override def hashCode: Int = if(value) 0 else 1
|
||||
override def hashCode: Int = if (value) 0 else 1
|
||||
}
|
||||
|
||||
object Stamp
|
||||
{
|
||||
implicit val equivStamp: Equiv[Stamp] = new Equiv[Stamp] {
|
||||
def equiv(a: Stamp, b: Stamp) = (a,b) match {
|
||||
case (h1: Hash, h2: Hash) => h1.value sameElements h2.value
|
||||
case (e1: Exists, e2: Exists) => e1.value == e2.value
|
||||
case (lm1: LastModified, lm2: LastModified) => lm1.value == lm2.value
|
||||
case _ => false
|
||||
}
|
||||
}
|
||||
object Stamp {
|
||||
implicit val equivStamp: Equiv[Stamp] = new Equiv[Stamp] {
|
||||
def equiv(a: Stamp, b: Stamp) = (a, b) match {
|
||||
case (h1: Hash, h2: Hash) => h1.value sameElements h2.value
|
||||
case (e1: Exists, e2: Exists) => e1.value == e2.value
|
||||
case (lm1: LastModified, lm2: LastModified) => lm1.value == lm2.value
|
||||
case _ => false
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE: toString/fromString used for serialization, not just for debug prints.
|
||||
|
||||
def toString(s: Stamp): String = s match {
|
||||
case e: Exists => if(e.value) "exists" else "absent"
|
||||
case h: Hash => "hash(" + Hash.toHex(h.value) + ")"
|
||||
case e: Exists => if (e.value) "exists" else "absent"
|
||||
case h: Hash => "hash(" + Hash.toHex(h.value) + ")"
|
||||
case lm: LastModified => "lastModified(" + lm.value + ")"
|
||||
}
|
||||
|
||||
private val hashPattern = """hash\((\w+)\)""".r
|
||||
private val lastModifiedPattern = """lastModified\((\d+)\)""".r
|
||||
private val hashPattern = """hash\((\w+)\)""".r
|
||||
private val lastModifiedPattern = """lastModified\((\d+)\)""".r
|
||||
|
||||
def fromString(s: String): Stamp = s match {
|
||||
case "exists" => new Exists(true)
|
||||
case "absent" => new Exists(false)
|
||||
case hashPattern(value) => new Hash(Hash.fromHex(value))
|
||||
case lastModifiedPattern(value) => new LastModified(java.lang.Long.parseLong(value))
|
||||
case _ => throw new IllegalArgumentException("Unrecognized Stamp string representation: " + s)
|
||||
}
|
||||
case "exists" => new Exists(true)
|
||||
case "absent" => new Exists(false)
|
||||
case hashPattern(value) => new Hash(Hash.fromHex(value))
|
||||
case lastModifiedPattern(value) => new LastModified(java.lang.Long.parseLong(value))
|
||||
case _ => throw new IllegalArgumentException("Unrecognized Stamp string representation: " + s)
|
||||
}
|
||||
|
||||
def show(s: Stamp): String = s match {
|
||||
case h: Hash => "hash(" + Hash.toHex(h.value) + ")"
|
||||
case e: Exists => if(e.value) "exists" else "does not exist"
|
||||
case lm: LastModified => "last modified(" + lm.value + ")"
|
||||
}
|
||||
|
||||
val hash = (f: File) => tryStamp(new Hash(Hash(f)))
|
||||
val lastModified = (f: File) => tryStamp(new LastModified(f.lastModified))
|
||||
val exists = (f: File) => tryStamp(if(f.exists) present else notPresent)
|
||||
|
||||
def tryStamp(g: => Stamp): Stamp = try { g } catch { case i: IOException => notPresent }
|
||||
|
||||
val notPresent = new Exists(false)
|
||||
val present = new Exists(true)
|
||||
|
||||
def getStamp(map: Map[File, Stamp], src: File): Stamp = map.getOrElse(src, notPresent)
|
||||
def show(s: Stamp): String = s match {
|
||||
case h: Hash => "hash(" + Hash.toHex(h.value) + ")"
|
||||
case e: Exists => if (e.value) "exists" else "does not exist"
|
||||
case lm: LastModified => "last modified(" + lm.value + ")"
|
||||
}
|
||||
|
||||
val hash = (f: File) => tryStamp(new Hash(Hash(f)))
|
||||
val lastModified = (f: File) => tryStamp(new LastModified(f.lastModified))
|
||||
val exists = (f: File) => tryStamp(if (f.exists) present else notPresent)
|
||||
|
||||
def tryStamp(g: => Stamp): Stamp = try { g } catch { case i: IOException => notPresent }
|
||||
|
||||
val notPresent = new Exists(false)
|
||||
val present = new Exists(true)
|
||||
|
||||
def getStamp(map: Map[File, Stamp], src: File): Stamp = map.getOrElse(src, notPresent)
|
||||
}
|
||||
|
||||
object Stamps
|
||||
{
|
||||
/** Creates a ReadStamps instance that will calculate and cache the stamp for sources and binaries
|
||||
* on the first request according to the provided `srcStamp` and `binStamp` functions. Each
|
||||
* stamp is calculated separately on demand.
|
||||
* The stamp for a product is always recalculated. */
|
||||
def initial(prodStamp: File => Stamp, srcStamp: File => Stamp, binStamp: File => Stamp): ReadStamps = new InitialStamps(prodStamp, srcStamp, binStamp)
|
||||
|
||||
def empty: Stamps =
|
||||
{
|
||||
val eSt = Map.empty[File, Stamp]
|
||||
apply(eSt, eSt, eSt, Map.empty[File, String])
|
||||
}
|
||||
def apply(products: Map[File, Stamp], sources: Map[File, Stamp], binaries: Map[File, Stamp], binaryClassNames: Map[File, String]): Stamps =
|
||||
new MStamps(products, sources, binaries, binaryClassNames)
|
||||
object Stamps {
|
||||
/**
|
||||
* Creates a ReadStamps instance that will calculate and cache the stamp for sources and binaries
|
||||
* on the first request according to the provided `srcStamp` and `binStamp` functions. Each
|
||||
* stamp is calculated separately on demand.
|
||||
* The stamp for a product is always recalculated.
|
||||
*/
|
||||
def initial(prodStamp: File => Stamp, srcStamp: File => Stamp, binStamp: File => Stamp): ReadStamps = new InitialStamps(prodStamp, srcStamp, binStamp)
|
||||
|
||||
def merge(stamps: Traversable[Stamps]): Stamps = (Stamps.empty /: stamps)(_ ++ _)
|
||||
def empty: Stamps =
|
||||
{
|
||||
val eSt = Map.empty[File, Stamp]
|
||||
apply(eSt, eSt, eSt, Map.empty[File, String])
|
||||
}
|
||||
def apply(products: Map[File, Stamp], sources: Map[File, Stamp], binaries: Map[File, Stamp], binaryClassNames: Map[File, String]): Stamps =
|
||||
new MStamps(products, sources, binaries, binaryClassNames)
|
||||
|
||||
def merge(stamps: Traversable[Stamps]): Stamps = (Stamps.empty /: stamps)(_ ++ _)
|
||||
}
|
||||
|
||||
private class MStamps(val products: Map[File, Stamp], val sources: Map[File, Stamp], val binaries: Map[File, Stamp], val classNames: Map[File, String]) extends Stamps
|
||||
{
|
||||
def allInternalSources: collection.Set[File] = sources.keySet
|
||||
def allBinaries: collection.Set[File] = binaries.keySet
|
||||
def allProducts: collection.Set[File] = products.keySet
|
||||
|
||||
def ++ (o: Stamps): Stamps =
|
||||
new MStamps(products ++ o.products, sources ++ o.sources, binaries ++ o.binaries, classNames ++ o.classNames)
|
||||
|
||||
def markInternalSource(src: File, s: Stamp): Stamps =
|
||||
new MStamps(products, sources.updated(src, s), binaries, classNames)
|
||||
private class MStamps(val products: Map[File, Stamp], val sources: Map[File, Stamp], val binaries: Map[File, Stamp], val classNames: Map[File, String]) extends Stamps {
|
||||
def allInternalSources: collection.Set[File] = sources.keySet
|
||||
def allBinaries: collection.Set[File] = binaries.keySet
|
||||
def allProducts: collection.Set[File] = products.keySet
|
||||
|
||||
def markBinary(bin: File, className: String, s: Stamp): Stamps =
|
||||
new MStamps(products, sources, binaries.updated(bin, s), classNames.updated(bin, className))
|
||||
def ++(o: Stamps): Stamps =
|
||||
new MStamps(products ++ o.products, sources ++ o.sources, binaries ++ o.binaries, classNames ++ o.classNames)
|
||||
|
||||
def markProduct(prod: File, s: Stamp): Stamps =
|
||||
new MStamps(products.updated(prod, s), sources, binaries, classNames)
|
||||
|
||||
def filter(prod: File => Boolean, removeSources: Iterable[File], bin: File => Boolean): Stamps =
|
||||
new MStamps(products.filterKeys(prod), sources -- removeSources, binaries.filterKeys(bin), classNames.filterKeys(bin))
|
||||
|
||||
def groupBy[K](prod: Map[K, File => Boolean], f: File => K, bin: Map[K, File => Boolean]): Map[K, Stamps] =
|
||||
{
|
||||
val sourcesMap: Map[K, Map[File, Stamp]] = sources.groupBy(x => f(x._1))
|
||||
def markInternalSource(src: File, s: Stamp): Stamps =
|
||||
new MStamps(products, sources.updated(src, s), binaries, classNames)
|
||||
|
||||
val constFalse = (f: File) => false
|
||||
def kStamps(k: K): Stamps = new MStamps(
|
||||
products.filterKeys(prod.getOrElse(k, constFalse)),
|
||||
sourcesMap.getOrElse(k, Map.empty[File,Stamp]),
|
||||
binaries.filterKeys(bin.getOrElse(k, constFalse)),
|
||||
classNames.filterKeys(bin.getOrElse(k, constFalse))
|
||||
)
|
||||
def markBinary(bin: File, className: String, s: Stamp): Stamps =
|
||||
new MStamps(products, sources, binaries.updated(bin, s), classNames.updated(bin, className))
|
||||
|
||||
(for (k <- prod.keySet ++ sourcesMap.keySet ++ bin.keySet) yield (k, kStamps(k))).toMap
|
||||
}
|
||||
def markProduct(prod: File, s: Stamp): Stamps =
|
||||
new MStamps(products.updated(prod, s), sources, binaries, classNames)
|
||||
|
||||
def product(prod: File) = getStamp(products, prod)
|
||||
def internalSource(src: File) = getStamp(sources, src)
|
||||
def binary(bin: File) = getStamp(binaries, bin)
|
||||
def className(bin: File) = classNames get bin
|
||||
def filter(prod: File => Boolean, removeSources: Iterable[File], bin: File => Boolean): Stamps =
|
||||
new MStamps(products.filterKeys(prod), sources -- removeSources, binaries.filterKeys(bin), classNames.filterKeys(bin))
|
||||
|
||||
override def equals(other: Any): Boolean = other match {
|
||||
case o: MStamps => products == o.products && sources == o.sources && binaries == o.binaries && classNames == o.classNames
|
||||
case _ => false
|
||||
}
|
||||
def groupBy[K](prod: Map[K, File => Boolean], f: File => K, bin: Map[K, File => Boolean]): Map[K, Stamps] =
|
||||
{
|
||||
val sourcesMap: Map[K, Map[File, Stamp]] = sources.groupBy(x => f(x._1))
|
||||
|
||||
override lazy val hashCode: Int = (products :: sources :: binaries :: classNames :: Nil).hashCode
|
||||
|
||||
override def toString: String =
|
||||
"Stamps for: %d products, %d sources, %d binaries, %d classNames".format(products.size, sources.size, binaries.size, classNames.size)
|
||||
val constFalse = (f: File) => false
|
||||
def kStamps(k: K): Stamps = new MStamps(
|
||||
products.filterKeys(prod.getOrElse(k, constFalse)),
|
||||
sourcesMap.getOrElse(k, Map.empty[File, Stamp]),
|
||||
binaries.filterKeys(bin.getOrElse(k, constFalse)),
|
||||
classNames.filterKeys(bin.getOrElse(k, constFalse))
|
||||
)
|
||||
|
||||
(for (k <- prod.keySet ++ sourcesMap.keySet ++ bin.keySet) yield (k, kStamps(k))).toMap
|
||||
}
|
||||
|
||||
def product(prod: File) = getStamp(products, prod)
|
||||
def internalSource(src: File) = getStamp(sources, src)
|
||||
def binary(bin: File) = getStamp(binaries, bin)
|
||||
def className(bin: File) = classNames get bin
|
||||
|
||||
override def equals(other: Any): Boolean = other match {
|
||||
case o: MStamps => products == o.products && sources == o.sources && binaries == o.binaries && classNames == o.classNames
|
||||
case _ => false
|
||||
}
|
||||
|
||||
override lazy val hashCode: Int = (products :: sources :: binaries :: classNames :: Nil).hashCode
|
||||
|
||||
override def toString: String =
|
||||
"Stamps for: %d products, %d sources, %d binaries, %d classNames".format(products.size, sources.size, binaries.size, classNames.size)
|
||||
}
|
||||
|
||||
private class InitialStamps(prodStamp: File => Stamp, srcStamp: File => Stamp, binStamp: File => Stamp) extends ReadStamps
|
||||
{
|
||||
import collection.mutable.{HashMap, Map}
|
||||
// cached stamps for files that do not change during compilation
|
||||
private val sources: Map[File, Stamp] = new HashMap
|
||||
private val binaries: Map[File, Stamp] = new HashMap
|
||||
|
||||
def product(prod: File): Stamp = prodStamp(prod)
|
||||
def internalSource(src: File): Stamp = synchronized { sources.getOrElseUpdate(src, srcStamp(src)) }
|
||||
def binary(bin: File): Stamp = synchronized { binaries.getOrElseUpdate(bin, binStamp(bin)) }
|
||||
private class InitialStamps(prodStamp: File => Stamp, srcStamp: File => Stamp, binStamp: File => Stamp) extends ReadStamps {
|
||||
import collection.mutable.{ HashMap, Map }
|
||||
// cached stamps for files that do not change during compilation
|
||||
private val sources: Map[File, Stamp] = new HashMap
|
||||
private val binaries: Map[File, Stamp] = new HashMap
|
||||
|
||||
def product(prod: File): Stamp = prodStamp(prod)
|
||||
def internalSource(src: File): Stamp = synchronized { sources.getOrElseUpdate(src, srcStamp(src)) }
|
||||
def binary(bin: File): Stamp = synchronized { binaries.getOrElseUpdate(bin, binStamp(bin)) }
|
||||
}
|
||||
|
|
@ -6,230 +6,223 @@ package compiler
|
|||
|
||||
import inc._
|
||||
|
||||
import scala.annotation.tailrec
|
||||
import java.io.File
|
||||
import classpath.ClasspathUtilities
|
||||
import classfile.Analyze
|
||||
import inc.Locate.DefinesClass
|
||||
import inc.IncOptions
|
||||
import CompileSetup._
|
||||
import sbinary.DefaultProtocol.{ immutableMapFormat, immutableSetFormat, StringFormat }
|
||||
import scala.annotation.tailrec
|
||||
import java.io.File
|
||||
import classpath.ClasspathUtilities
|
||||
import classfile.Analyze
|
||||
import inc.Locate.DefinesClass
|
||||
import inc.IncOptions
|
||||
import CompileSetup._
|
||||
import sbinary.DefaultProtocol.{ immutableMapFormat, immutableSetFormat, StringFormat }
|
||||
|
||||
import xsbti.{ Reporter, AnalysisCallback }
|
||||
import xsbti.api.Source
|
||||
import xsbti.compile.{CompileOrder, DependencyChanges, GlobalsCache, Output, SingleOutput, MultipleOutput, CompileProgress}
|
||||
import CompileOrder.{JavaThenScala, Mixed, ScalaThenJava}
|
||||
import xsbti.{ Reporter, AnalysisCallback }
|
||||
import xsbti.api.Source
|
||||
import xsbti.compile.{ CompileOrder, DependencyChanges, GlobalsCache, Output, SingleOutput, MultipleOutput, CompileProgress }
|
||||
import CompileOrder.{ JavaThenScala, Mixed, ScalaThenJava }
|
||||
|
||||
final class CompileConfiguration(val sources: Seq[File], val classpath: Seq[File],
|
||||
val previousAnalysis: Analysis, val previousSetup: Option[CompileSetup], val currentSetup: CompileSetup, val progress: Option[CompileProgress], val getAnalysis: File => Option[Analysis], val definesClass: DefinesClass,
|
||||
val reporter: Reporter, val compiler: AnalyzingCompiler, val javac: xsbti.compile.JavaCompiler, val cache: GlobalsCache, val incOptions: IncOptions)
|
||||
val previousAnalysis: Analysis, val previousSetup: Option[CompileSetup], val currentSetup: CompileSetup, val progress: Option[CompileProgress], val getAnalysis: File => Option[Analysis], val definesClass: DefinesClass,
|
||||
val reporter: Reporter, val compiler: AnalyzingCompiler, val javac: xsbti.compile.JavaCompiler, val cache: GlobalsCache, val incOptions: IncOptions)
|
||||
|
||||
class AggressiveCompile(cacheFile: File)
|
||||
{
|
||||
def apply(compiler: AnalyzingCompiler,
|
||||
javac: xsbti.compile.JavaCompiler,
|
||||
sources: Seq[File], classpath: Seq[File],
|
||||
output: Output,
|
||||
cache: GlobalsCache,
|
||||
progress: Option[CompileProgress] = None,
|
||||
options: Seq[String] = Nil,
|
||||
javacOptions: Seq[String] = Nil,
|
||||
analysisMap: File => Option[Analysis] = { _ => None },
|
||||
definesClass: DefinesClass = Locate.definesClass _,
|
||||
reporter: Reporter,
|
||||
compileOrder: CompileOrder = Mixed,
|
||||
skip: Boolean = false,
|
||||
incrementalCompilerOptions: IncOptions)(implicit log: Logger): Analysis =
|
||||
{
|
||||
val setup = new CompileSetup(output, new CompileOptions(options, javacOptions),
|
||||
compiler.scalaInstance.actualVersion, compileOrder, incrementalCompilerOptions.nameHashing)
|
||||
compile1(sources, classpath, setup, progress, store, analysisMap, definesClass,
|
||||
compiler, javac, reporter, skip, cache, incrementalCompilerOptions)
|
||||
}
|
||||
class AggressiveCompile(cacheFile: File) {
|
||||
def apply(compiler: AnalyzingCompiler,
|
||||
javac: xsbti.compile.JavaCompiler,
|
||||
sources: Seq[File], classpath: Seq[File],
|
||||
output: Output,
|
||||
cache: GlobalsCache,
|
||||
progress: Option[CompileProgress] = None,
|
||||
options: Seq[String] = Nil,
|
||||
javacOptions: Seq[String] = Nil,
|
||||
analysisMap: File => Option[Analysis] = { _ => None },
|
||||
definesClass: DefinesClass = Locate.definesClass _,
|
||||
reporter: Reporter,
|
||||
compileOrder: CompileOrder = Mixed,
|
||||
skip: Boolean = false,
|
||||
incrementalCompilerOptions: IncOptions)(implicit log: Logger): Analysis =
|
||||
{
|
||||
val setup = new CompileSetup(output, new CompileOptions(options, javacOptions),
|
||||
compiler.scalaInstance.actualVersion, compileOrder, incrementalCompilerOptions.nameHashing)
|
||||
compile1(sources, classpath, setup, progress, store, analysisMap, definesClass,
|
||||
compiler, javac, reporter, skip, cache, incrementalCompilerOptions)
|
||||
}
|
||||
|
||||
def withBootclasspath(args: CompilerArguments, classpath: Seq[File]): Seq[File] =
|
||||
args.bootClasspathFor(classpath) ++ args.extClasspath ++ args.finishClasspath(classpath)
|
||||
def withBootclasspath(args: CompilerArguments, classpath: Seq[File]): Seq[File] =
|
||||
args.bootClasspathFor(classpath) ++ args.extClasspath ++ args.finishClasspath(classpath)
|
||||
|
||||
def compile1(sources: Seq[File],
|
||||
classpath: Seq[File],
|
||||
setup: CompileSetup, progress: Option[CompileProgress],
|
||||
store: AnalysisStore,
|
||||
analysis: File => Option[Analysis],
|
||||
definesClass: DefinesClass,
|
||||
compiler: AnalyzingCompiler,
|
||||
javac: xsbti.compile.JavaCompiler,
|
||||
reporter: Reporter, skip: Boolean,
|
||||
cache: GlobalsCache,
|
||||
incrementalCompilerOptions: IncOptions)(implicit log: Logger): Analysis =
|
||||
{
|
||||
val (previousAnalysis, previousSetup) = extract(store.get(), incrementalCompilerOptions)
|
||||
if(skip)
|
||||
previousAnalysis
|
||||
else {
|
||||
val config = new CompileConfiguration(sources, classpath, previousAnalysis, previousSetup, setup,
|
||||
progress, analysis, definesClass, reporter, compiler, javac, cache, incrementalCompilerOptions)
|
||||
val (modified, result) = compile2(config)
|
||||
if(modified)
|
||||
store.set(result, setup)
|
||||
result
|
||||
}
|
||||
}
|
||||
def compile2(config: CompileConfiguration)(implicit log: Logger, equiv: Equiv[CompileSetup]): (Boolean, Analysis) =
|
||||
{
|
||||
import config._
|
||||
import currentSetup._
|
||||
val absClasspath = classpath.map(_.getAbsoluteFile)
|
||||
val apiOption = (api: Either[Boolean, Source]) => api.right.toOption
|
||||
val cArgs = new CompilerArguments(compiler.scalaInstance, compiler.cp)
|
||||
val searchClasspath = explicitBootClasspath(options.options) ++ withBootclasspath(cArgs, absClasspath)
|
||||
val entry = Locate.entry(searchClasspath, definesClass)
|
||||
def compile1(sources: Seq[File],
|
||||
classpath: Seq[File],
|
||||
setup: CompileSetup, progress: Option[CompileProgress],
|
||||
store: AnalysisStore,
|
||||
analysis: File => Option[Analysis],
|
||||
definesClass: DefinesClass,
|
||||
compiler: AnalyzingCompiler,
|
||||
javac: xsbti.compile.JavaCompiler,
|
||||
reporter: Reporter, skip: Boolean,
|
||||
cache: GlobalsCache,
|
||||
incrementalCompilerOptions: IncOptions)(implicit log: Logger): Analysis =
|
||||
{
|
||||
val (previousAnalysis, previousSetup) = extract(store.get(), incrementalCompilerOptions)
|
||||
if (skip)
|
||||
previousAnalysis
|
||||
else {
|
||||
val config = new CompileConfiguration(sources, classpath, previousAnalysis, previousSetup, setup,
|
||||
progress, analysis, definesClass, reporter, compiler, javac, cache, incrementalCompilerOptions)
|
||||
val (modified, result) = compile2(config)
|
||||
if (modified)
|
||||
store.set(result, setup)
|
||||
result
|
||||
}
|
||||
}
|
||||
def compile2(config: CompileConfiguration)(implicit log: Logger, equiv: Equiv[CompileSetup]): (Boolean, Analysis) =
|
||||
{
|
||||
import config._
|
||||
import currentSetup._
|
||||
val absClasspath = classpath.map(_.getAbsoluteFile)
|
||||
val apiOption = (api: Either[Boolean, Source]) => api.right.toOption
|
||||
val cArgs = new CompilerArguments(compiler.scalaInstance, compiler.cp)
|
||||
val searchClasspath = explicitBootClasspath(options.options) ++ withBootclasspath(cArgs, absClasspath)
|
||||
val entry = Locate.entry(searchClasspath, definesClass)
|
||||
|
||||
val compile0 = (include: Set[File], changes: DependencyChanges, callback: AnalysisCallback) => {
|
||||
val outputDirs = outputDirectories(output)
|
||||
outputDirs foreach (IO.createDirectory)
|
||||
val incSrc = sources.filter(include)
|
||||
val (javaSrcs, scalaSrcs) = incSrc partition javaOnly
|
||||
logInputs(log, javaSrcs.size, scalaSrcs.size, outputDirs)
|
||||
def compileScala() =
|
||||
if(!scalaSrcs.isEmpty)
|
||||
{
|
||||
val sources = if(order == Mixed) incSrc else scalaSrcs
|
||||
val arguments = cArgs(Nil, absClasspath, None, options.options)
|
||||
timed("Scala compilation", log) {
|
||||
compiler.compile(sources, changes, arguments, output, callback, reporter, cache, log, progress)
|
||||
}
|
||||
}
|
||||
def compileJava() =
|
||||
if(!javaSrcs.isEmpty)
|
||||
{
|
||||
import Path._
|
||||
@tailrec def ancestor(f1: File, f2: File): Boolean =
|
||||
if (f2 eq null) false else
|
||||
if (f1 == f2) true else ancestor(f1, f2.getParentFile)
|
||||
val compile0 = (include: Set[File], changes: DependencyChanges, callback: AnalysisCallback) => {
|
||||
val outputDirs = outputDirectories(output)
|
||||
outputDirs foreach (IO.createDirectory)
|
||||
val incSrc = sources.filter(include)
|
||||
val (javaSrcs, scalaSrcs) = incSrc partition javaOnly
|
||||
logInputs(log, javaSrcs.size, scalaSrcs.size, outputDirs)
|
||||
def compileScala() =
|
||||
if (!scalaSrcs.isEmpty) {
|
||||
val sources = if (order == Mixed) incSrc else scalaSrcs
|
||||
val arguments = cArgs(Nil, absClasspath, None, options.options)
|
||||
timed("Scala compilation", log) {
|
||||
compiler.compile(sources, changes, arguments, output, callback, reporter, cache, log, progress)
|
||||
}
|
||||
}
|
||||
def compileJava() =
|
||||
if (!javaSrcs.isEmpty) {
|
||||
import Path._
|
||||
@tailrec def ancestor(f1: File, f2: File): Boolean =
|
||||
if (f2 eq null) false else if (f1 == f2) true else ancestor(f1, f2.getParentFile)
|
||||
|
||||
val chunks: Map[Option[File], Seq[File]] = output match {
|
||||
case single: SingleOutput => Map(Some(single.outputDirectory) -> javaSrcs)
|
||||
case multi: MultipleOutput =>
|
||||
javaSrcs groupBy { src =>
|
||||
multi.outputGroups find {out => ancestor(out.sourceDirectory, src)} map (_.outputDirectory)
|
||||
}
|
||||
}
|
||||
chunks.get(None) foreach { srcs =>
|
||||
log.error("No output directory mapped for: " + srcs.map(_.getAbsolutePath).mkString(","))
|
||||
}
|
||||
val memo = for ((Some(outputDirectory), srcs) <- chunks) yield {
|
||||
val classesFinder = PathFinder(outputDirectory) ** "*.class"
|
||||
(classesFinder, classesFinder.get, srcs)
|
||||
}
|
||||
val chunks: Map[Option[File], Seq[File]] = output match {
|
||||
case single: SingleOutput => Map(Some(single.outputDirectory) -> javaSrcs)
|
||||
case multi: MultipleOutput =>
|
||||
javaSrcs groupBy { src =>
|
||||
multi.outputGroups find { out => ancestor(out.sourceDirectory, src) } map (_.outputDirectory)
|
||||
}
|
||||
}
|
||||
chunks.get(None) foreach { srcs =>
|
||||
log.error("No output directory mapped for: " + srcs.map(_.getAbsolutePath).mkString(","))
|
||||
}
|
||||
val memo = for ((Some(outputDirectory), srcs) <- chunks) yield {
|
||||
val classesFinder = PathFinder(outputDirectory) ** "*.class"
|
||||
(classesFinder, classesFinder.get, srcs)
|
||||
}
|
||||
|
||||
val loader = ClasspathUtilities.toLoader(searchClasspath)
|
||||
timed("Java compilation", log) {
|
||||
javac.compile(javaSrcs.toArray, absClasspath.toArray, output, options.javacOptions.toArray, log)
|
||||
}
|
||||
val loader = ClasspathUtilities.toLoader(searchClasspath)
|
||||
timed("Java compilation", log) {
|
||||
javac.compile(javaSrcs.toArray, absClasspath.toArray, output, options.javacOptions.toArray, log)
|
||||
}
|
||||
|
||||
def readAPI(source: File, classes: Seq[Class[_]]): Set[String] = {
|
||||
val (api, inherits) = ClassToAPI.process(classes)
|
||||
callback.api(source, api)
|
||||
inherits.map(_.getName)
|
||||
}
|
||||
def readAPI(source: File, classes: Seq[Class[_]]): Set[String] = {
|
||||
val (api, inherits) = ClassToAPI.process(classes)
|
||||
callback.api(source, api)
|
||||
inherits.map(_.getName)
|
||||
}
|
||||
|
||||
timed("Java analysis", log) {
|
||||
for ((classesFinder, oldClasses, srcs) <- memo) {
|
||||
val newClasses = Set(classesFinder.get: _*) -- oldClasses
|
||||
Analyze(newClasses.toSeq, srcs, log)(callback, loader, readAPI)
|
||||
}
|
||||
}
|
||||
}
|
||||
if(order == JavaThenScala) { compileJava(); compileScala() } else { compileScala(); compileJava() }
|
||||
}
|
||||
timed("Java analysis", log) {
|
||||
for ((classesFinder, oldClasses, srcs) <- memo) {
|
||||
val newClasses = Set(classesFinder.get: _*) -- oldClasses
|
||||
Analyze(newClasses.toSeq, srcs, log)(callback, loader, readAPI)
|
||||
}
|
||||
}
|
||||
}
|
||||
if (order == JavaThenScala) { compileJava(); compileScala() } else { compileScala(); compileJava() }
|
||||
}
|
||||
|
||||
val sourcesSet = sources.toSet
|
||||
val analysis = previousSetup match {
|
||||
case Some(previous) if previous.nameHashing != currentSetup.nameHashing =>
|
||||
// if the value of `nameHashing` flag has changed we have to throw away
|
||||
// previous Analysis completely and start with empty Analysis object
|
||||
// that supports the particular value of the `nameHashing` flag.
|
||||
// Otherwise we'll be getting UnsupportedOperationExceptions
|
||||
Analysis.empty(currentSetup.nameHashing)
|
||||
case Some(previous) if equiv.equiv(previous, currentSetup) => previousAnalysis
|
||||
case _ => Incremental.prune(sourcesSet, previousAnalysis)
|
||||
}
|
||||
IncrementalCompile(sourcesSet, entry, compile0, analysis, getAnalysis, output, log, incOptions)
|
||||
}
|
||||
private[this] def outputDirectories(output: Output): Seq[File] = output match {
|
||||
case single: SingleOutput => List(single.outputDirectory)
|
||||
case mult: MultipleOutput => mult.outputGroups map (_.outputDirectory)
|
||||
}
|
||||
private[this] def timed[T](label: String, log: Logger)(t: => T): T =
|
||||
{
|
||||
val start = System.nanoTime
|
||||
val result = t
|
||||
val elapsed = System.nanoTime - start
|
||||
log.debug(label + " took " + (elapsed/1e9) + " s")
|
||||
result
|
||||
}
|
||||
private[this] def logInputs(log: Logger, javaCount: Int, scalaCount: Int, outputDirs: Seq[File])
|
||||
{
|
||||
val scalaMsg = Analysis.counted("Scala source", "", "s", scalaCount)
|
||||
val javaMsg = Analysis.counted("Java source", "", "s", javaCount)
|
||||
val combined = scalaMsg ++ javaMsg
|
||||
if(!combined.isEmpty)
|
||||
log.info(combined.mkString("Compiling ", " and ", " to " + outputDirs.map(_.getAbsolutePath).mkString(",") + "..."))
|
||||
}
|
||||
private def extract(previous: Option[(Analysis, CompileSetup)], incOptions: IncOptions): (Analysis, Option[CompileSetup]) =
|
||||
previous match
|
||||
{
|
||||
case Some((an, setup)) => (an, Some(setup))
|
||||
case None => (Analysis.empty(nameHashing = incOptions.nameHashing), None)
|
||||
}
|
||||
def javaOnly(f: File) = f.getName.endsWith(".java")
|
||||
val sourcesSet = sources.toSet
|
||||
val analysis = previousSetup match {
|
||||
case Some(previous) if previous.nameHashing != currentSetup.nameHashing =>
|
||||
// if the value of `nameHashing` flag has changed we have to throw away
|
||||
// previous Analysis completely and start with empty Analysis object
|
||||
// that supports the particular value of the `nameHashing` flag.
|
||||
// Otherwise we'll be getting UnsupportedOperationExceptions
|
||||
Analysis.empty(currentSetup.nameHashing)
|
||||
case Some(previous) if equiv.equiv(previous, currentSetup) => previousAnalysis
|
||||
case _ => Incremental.prune(sourcesSet, previousAnalysis)
|
||||
}
|
||||
IncrementalCompile(sourcesSet, entry, compile0, analysis, getAnalysis, output, log, incOptions)
|
||||
}
|
||||
private[this] def outputDirectories(output: Output): Seq[File] = output match {
|
||||
case single: SingleOutput => List(single.outputDirectory)
|
||||
case mult: MultipleOutput => mult.outputGroups map (_.outputDirectory)
|
||||
}
|
||||
private[this] def timed[T](label: String, log: Logger)(t: => T): T =
|
||||
{
|
||||
val start = System.nanoTime
|
||||
val result = t
|
||||
val elapsed = System.nanoTime - start
|
||||
log.debug(label + " took " + (elapsed / 1e9) + " s")
|
||||
result
|
||||
}
|
||||
private[this] def logInputs(log: Logger, javaCount: Int, scalaCount: Int, outputDirs: Seq[File]) {
|
||||
val scalaMsg = Analysis.counted("Scala source", "", "s", scalaCount)
|
||||
val javaMsg = Analysis.counted("Java source", "", "s", javaCount)
|
||||
val combined = scalaMsg ++ javaMsg
|
||||
if (!combined.isEmpty)
|
||||
log.info(combined.mkString("Compiling ", " and ", " to " + outputDirs.map(_.getAbsolutePath).mkString(",") + "..."))
|
||||
}
|
||||
private def extract(previous: Option[(Analysis, CompileSetup)], incOptions: IncOptions): (Analysis, Option[CompileSetup]) =
|
||||
previous match {
|
||||
case Some((an, setup)) => (an, Some(setup))
|
||||
case None => (Analysis.empty(nameHashing = incOptions.nameHashing), None)
|
||||
}
|
||||
def javaOnly(f: File) = f.getName.endsWith(".java")
|
||||
|
||||
private[this] def explicitBootClasspath(options: Seq[String]): Seq[File] =
|
||||
options.dropWhile(_ != CompilerArguments.BootClasspathOption).drop(1).take(1).headOption.toList.flatMap(IO.parseClasspath)
|
||||
private[this] def explicitBootClasspath(options: Seq[String]): Seq[File] =
|
||||
options.dropWhile(_ != CompilerArguments.BootClasspathOption).drop(1).take(1).headOption.toList.flatMap(IO.parseClasspath)
|
||||
|
||||
val store = AggressiveCompile.staticCache(cacheFile, AnalysisStore.sync(AnalysisStore.cached(FileBasedStore(cacheFile))))
|
||||
val store = AggressiveCompile.staticCache(cacheFile, AnalysisStore.sync(AnalysisStore.cached(FileBasedStore(cacheFile))))
|
||||
}
|
||||
object AggressiveCompile
|
||||
{
|
||||
import collection.mutable
|
||||
import java.lang.ref.{Reference,SoftReference}
|
||||
private[this] val cache = new collection.mutable.HashMap[File, Reference[AnalysisStore]]
|
||||
private def staticCache(file: File, backing: => AnalysisStore): AnalysisStore =
|
||||
synchronized {
|
||||
cache get file flatMap { ref => Option(ref.get) } getOrElse {
|
||||
val b = backing
|
||||
cache.put(file, new SoftReference(b))
|
||||
b
|
||||
}
|
||||
}
|
||||
object AggressiveCompile {
|
||||
import collection.mutable
|
||||
import java.lang.ref.{ Reference, SoftReference }
|
||||
private[this] val cache = new collection.mutable.HashMap[File, Reference[AnalysisStore]]
|
||||
private def staticCache(file: File, backing: => AnalysisStore): AnalysisStore =
|
||||
synchronized {
|
||||
cache get file flatMap { ref => Option(ref.get) } getOrElse {
|
||||
val b = backing
|
||||
cache.put(file, new SoftReference(b))
|
||||
b
|
||||
}
|
||||
}
|
||||
|
||||
def directOrFork(instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File]): JavaTool =
|
||||
if(javaHome.isDefined)
|
||||
JavaCompiler.fork(cpOptions, instance)(forkJavac(javaHome))
|
||||
else
|
||||
JavaCompiler.directOrFork(cpOptions, instance)(forkJavac(None))
|
||||
def directOrFork(instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File]): JavaTool =
|
||||
if (javaHome.isDefined)
|
||||
JavaCompiler.fork(cpOptions, instance)(forkJavac(javaHome))
|
||||
else
|
||||
JavaCompiler.directOrFork(cpOptions, instance)(forkJavac(None))
|
||||
|
||||
def forkJavac(javaHome: Option[File]): JavaCompiler.Fork =
|
||||
{
|
||||
import Path._
|
||||
def exec(jc: JavacContract) = javaHome match { case None => jc.name; case Some(jh) => (jh / "bin" / jc.name).absolutePath }
|
||||
(contract: JavacContract, args: Seq[String], log: Logger) => {
|
||||
log.debug("Forking " + contract.name + ": " + exec(contract) + " " + args.mkString(" "))
|
||||
val javacLogger = new JavacLogger(log)
|
||||
var exitCode = -1
|
||||
try {
|
||||
exitCode = Process(exec(contract), args) ! javacLogger
|
||||
} finally {
|
||||
javacLogger.flush(exitCode)
|
||||
}
|
||||
exitCode
|
||||
}
|
||||
}
|
||||
def forkJavac(javaHome: Option[File]): JavaCompiler.Fork =
|
||||
{
|
||||
import Path._
|
||||
def exec(jc: JavacContract) = javaHome match { case None => jc.name; case Some(jh) => (jh / "bin" / jc.name).absolutePath }
|
||||
(contract: JavacContract, args: Seq[String], log: Logger) => {
|
||||
log.debug("Forking " + contract.name + ": " + exec(contract) + " " + args.mkString(" "))
|
||||
val javacLogger = new JavacLogger(log)
|
||||
var exitCode = -1
|
||||
try {
|
||||
exitCode = Process(exec(contract), args) ! javacLogger
|
||||
} finally {
|
||||
javacLogger.flush(exitCode)
|
||||
}
|
||||
exitCode
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private[sbt] class JavacLogger(log: Logger) extends ProcessLogger {
|
||||
import scala.collection.mutable.ListBuffer
|
||||
import Level.{Info, Warn, Error, Value => LogLevel}
|
||||
import Level.{ Info, Warn, Error, Value => LogLevel }
|
||||
|
||||
private val msgs: ListBuffer[(LogLevel, String)] = new ListBuffer()
|
||||
|
||||
|
|
@ -242,7 +235,7 @@ private[sbt] class JavacLogger(log: Logger) extends ProcessLogger {
|
|||
def buffer[T](f: => T): T = f
|
||||
|
||||
private def print(desiredLevel: LogLevel)(t: (LogLevel, String)) = t match {
|
||||
case (Info, msg) => log.info(msg)
|
||||
case (Info, msg) => log.info(msg)
|
||||
case (Error, msg) => log.log(desiredLevel, msg)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,57 +1,54 @@
|
|||
package sbt.compiler
|
||||
|
||||
import java.io.File
|
||||
import sbt.{CompileSetup, IO, Using}
|
||||
import sbt.inc.{Analysis, IncOptions, TextAnalysisFormat}
|
||||
import xsbti.{Logger, Maybe}
|
||||
import xsbti.compile._
|
||||
import java.io.File
|
||||
import sbt.{ CompileSetup, IO, Using }
|
||||
import sbt.inc.{ Analysis, IncOptions, TextAnalysisFormat }
|
||||
import xsbti.{ Logger, Maybe }
|
||||
import xsbti.compile._
|
||||
|
||||
object IC extends IncrementalCompiler[Analysis, AnalyzingCompiler] {
|
||||
def compile(in: Inputs[Analysis, AnalyzingCompiler], log: Logger): Analysis =
|
||||
{
|
||||
val setup = in.setup; import setup._
|
||||
val options = in.options; import options.{ options => scalacOptions, _ }
|
||||
val compilers = in.compilers; import compilers._
|
||||
val agg = new AggressiveCompile(setup.cacheFile)
|
||||
val aMap = (f: File) => m2o(analysisMap(f))
|
||||
val defClass = (f: File) => { val dc = definesClass(f); (name: String) => dc.apply(name) }
|
||||
val incOptions = IncOptions.fromStringMap(incrementalCompilerOptions)
|
||||
agg(scalac, javac, sources, classpath, output, cache, m2o(progress), scalacOptions, javacOptions, aMap,
|
||||
defClass, reporter, order, skip, incOptions)(log)
|
||||
}
|
||||
|
||||
object IC extends IncrementalCompiler[Analysis, AnalyzingCompiler]
|
||||
{
|
||||
def compile(in: Inputs[Analysis, AnalyzingCompiler], log: Logger): Analysis =
|
||||
{
|
||||
val setup = in.setup; import setup._
|
||||
val options = in.options; import options.{options => scalacOptions, _}
|
||||
val compilers = in.compilers; import compilers._
|
||||
val agg = new AggressiveCompile(setup.cacheFile)
|
||||
val aMap = (f: File) => m2o(analysisMap(f))
|
||||
val defClass = (f: File) => { val dc = definesClass(f); (name: String) => dc.apply(name) }
|
||||
val incOptions = IncOptions.fromStringMap(incrementalCompilerOptions)
|
||||
agg(scalac, javac, sources, classpath, output, cache, m2o(progress), scalacOptions, javacOptions, aMap,
|
||||
defClass, reporter, order, skip, incOptions)(log)
|
||||
}
|
||||
private[this] def m2o[S](opt: Maybe[S]): Option[S] = if (opt.isEmpty) None else Some(opt.get)
|
||||
|
||||
private[this] def m2o[S](opt: Maybe[S]): Option[S] = if(opt.isEmpty) None else Some(opt.get)
|
||||
def newScalaCompiler(instance: ScalaInstance, interfaceJar: File, options: ClasspathOptions, log: Logger): AnalyzingCompiler =
|
||||
new AnalyzingCompiler(instance, CompilerInterfaceProvider.constant(interfaceJar), options, log)
|
||||
|
||||
def newScalaCompiler(instance: ScalaInstance, interfaceJar: File, options: ClasspathOptions, log: Logger): AnalyzingCompiler =
|
||||
new AnalyzingCompiler(instance, CompilerInterfaceProvider.constant(interfaceJar), options, log)
|
||||
def compileInterfaceJar(label: String, sourceJar: File, targetJar: File, interfaceJar: File, instance: ScalaInstance, log: Logger) {
|
||||
val raw = new RawCompiler(instance, sbt.ClasspathOptions.auto, log)
|
||||
AnalyzingCompiler.compileSources(sourceJar :: Nil, targetJar, interfaceJar :: Nil, label, raw, log)
|
||||
}
|
||||
|
||||
def compileInterfaceJar(label: String, sourceJar: File, targetJar: File, interfaceJar: File, instance: ScalaInstance, log: Logger)
|
||||
{
|
||||
val raw = new RawCompiler(instance, sbt.ClasspathOptions.auto, log)
|
||||
AnalyzingCompiler.compileSources(sourceJar :: Nil, targetJar, interfaceJar :: Nil, label, raw, log)
|
||||
}
|
||||
def readCache(file: File): Maybe[(Analysis, CompileSetup)] =
|
||||
try { Maybe.just(readCacheUncaught(file)) } catch { case _: Exception => Maybe.nothing() }
|
||||
|
||||
def readCache(file: File): Maybe[(Analysis, CompileSetup)] =
|
||||
try { Maybe.just(readCacheUncaught(file)) } catch { case _: Exception => Maybe.nothing() }
|
||||
@deprecated("Use overloaded variant which takes `IncOptions` as parameter.", "0.13.2")
|
||||
def readAnalysis(file: File): Analysis =
|
||||
try { readCacheUncaught(file)._1 } catch { case _: Exception => Analysis.Empty }
|
||||
|
||||
@deprecated("Use overloaded variant which takes `IncOptions` as parameter.", "0.13.2")
|
||||
def readAnalysis(file: File): Analysis =
|
||||
try { readCacheUncaught(file)._1 } catch { case _: Exception => Analysis.Empty }
|
||||
def readAnalysis(file: File, incOptions: IncOptions): Analysis =
|
||||
try { readCacheUncaught(file)._1 } catch {
|
||||
case _: Exception => Analysis.empty(nameHashing = incOptions.nameHashing)
|
||||
}
|
||||
|
||||
def readAnalysis(file: File, incOptions: IncOptions): Analysis =
|
||||
try { readCacheUncaught(file)._1 } catch {
|
||||
case _: Exception => Analysis.empty(nameHashing = incOptions.nameHashing)
|
||||
}
|
||||
|
||||
def readCacheUncaught(file: File): (Analysis, CompileSetup) =
|
||||
Using.fileReader(IO.utf8)(file) { reader =>
|
||||
try {
|
||||
TextAnalysisFormat.read(reader)
|
||||
} catch {
|
||||
case ex: sbt.inc.ReadException =>
|
||||
throw new java.io.IOException(s"Error while reading $file", ex)
|
||||
}
|
||||
}
|
||||
def readCacheUncaught(file: File): (Analysis, CompileSetup) =
|
||||
Using.fileReader(IO.utf8)(file) { reader =>
|
||||
try {
|
||||
TextAnalysisFormat.read(reader)
|
||||
} catch {
|
||||
case ex: sbt.inc.ReadException =>
|
||||
throw new java.io.IOException(s"Error while reading $file", ex)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,99 +4,86 @@
|
|||
package xsbt
|
||||
|
||||
import java.io.File
|
||||
import java.util.{Arrays,Comparator}
|
||||
import scala.tools.nsc.{io, plugins, symtab, Global, Phase}
|
||||
import io.{AbstractFile, PlainFile, ZipArchive}
|
||||
import plugins.{Plugin, PluginComponent}
|
||||
import java.util.{ Arrays, Comparator }
|
||||
import scala.tools.nsc.{ io, plugins, symtab, Global, Phase }
|
||||
import io.{ AbstractFile, PlainFile, ZipArchive }
|
||||
import plugins.{ Plugin, PluginComponent }
|
||||
import symtab.Flags
|
||||
import scala.collection.mutable.{HashMap, HashSet, ListBuffer}
|
||||
import xsbti.api.{ClassLike, DefinitionType, PathComponent, SimpleType}
|
||||
import scala.collection.mutable.{ HashMap, HashSet, ListBuffer }
|
||||
import xsbti.api.{ ClassLike, DefinitionType, PathComponent, SimpleType }
|
||||
|
||||
object API
|
||||
{
|
||||
val name = "xsbt-api"
|
||||
object API {
|
||||
val name = "xsbt-api"
|
||||
}
|
||||
|
||||
final class API(val global: CallbackGlobal) extends Compat
|
||||
{
|
||||
import global._
|
||||
final class API(val global: CallbackGlobal) extends Compat {
|
||||
import global._
|
||||
|
||||
@inline def debug(msg: => String) = if(settings.verbose.value) inform(msg)
|
||||
@inline def debug(msg: => String) = if (settings.verbose.value) inform(msg)
|
||||
|
||||
def newPhase(prev: Phase) = new ApiPhase(prev)
|
||||
class ApiPhase(prev: Phase) extends Phase(prev)
|
||||
{
|
||||
override def description = "Extracts the public API from source files."
|
||||
def name = API.name
|
||||
def run: Unit =
|
||||
{
|
||||
val start = System.currentTimeMillis
|
||||
currentRun.units.foreach(processUnit)
|
||||
val stop = System.currentTimeMillis
|
||||
debug("API phase took : " + ((stop - start)/1000.0) + " s")
|
||||
}
|
||||
def processUnit(unit: CompilationUnit) = if(!unit.isJava) processScalaUnit(unit)
|
||||
def processScalaUnit(unit: CompilationUnit)
|
||||
{
|
||||
val sourceFile = unit.source.file.file
|
||||
debug("Traversing " + sourceFile)
|
||||
val extractApi = new ExtractAPI[global.type](global, sourceFile)
|
||||
val traverser = new TopLevelHandler(extractApi)
|
||||
traverser.apply(unit.body)
|
||||
if (global.callback.nameHashing) {
|
||||
val extractUsedNames = new ExtractUsedNames[global.type](global)
|
||||
val names = extractUsedNames.extract(unit)
|
||||
debug("The " + sourceFile + " contains the following used names " + names)
|
||||
names foreach { (name: String) => callback.usedName(sourceFile, name) }
|
||||
}
|
||||
val packages = traverser.packages.toArray[String].map(p => new xsbti.api.Package(p))
|
||||
val source = new xsbti.api.SourceAPI(packages, traverser.definitions.toArray[xsbti.api.Definition])
|
||||
extractApi.forceStructures()
|
||||
callback.api(sourceFile, source)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private final class TopLevelHandler(extractApi: ExtractAPI[global.type]) extends TopLevelTraverser
|
||||
{
|
||||
val packages = new HashSet[String]
|
||||
val definitions = new ListBuffer[xsbti.api.Definition]
|
||||
def `class`(c: Symbol): Unit = {
|
||||
definitions += extractApi.classLike(c.owner, c)
|
||||
}
|
||||
/** Record packages declared in the source file*/
|
||||
def `package`(p: Symbol)
|
||||
{
|
||||
if( (p eq null) || p == NoSymbol || p.isRoot || p.isRootPackage || p.isEmptyPackageClass || p.isEmptyPackage)
|
||||
()
|
||||
else
|
||||
{
|
||||
packages += p.fullName
|
||||
`package`(p.enclosingPackage)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private abstract class TopLevelTraverser extends Traverser
|
||||
{
|
||||
def `class`(s: Symbol)
|
||||
def `package`(s: Symbol)
|
||||
override def traverse(tree: Tree)
|
||||
{
|
||||
tree match
|
||||
{
|
||||
case (_: ClassDef | _ : ModuleDef) if isTopLevel(tree.symbol) => `class`(tree.symbol)
|
||||
case p: PackageDef =>
|
||||
`package`(p.symbol)
|
||||
super.traverse(tree)
|
||||
case _ =>
|
||||
}
|
||||
}
|
||||
def isTopLevel(sym: Symbol): Boolean =
|
||||
(sym ne null) && (sym != NoSymbol) && !sym.isImplClass && !sym.isNestedClass && sym.isStatic &&
|
||||
!sym.hasFlag(Flags.SYNTHETIC) && !sym.hasFlag(Flags.JAVA)
|
||||
}
|
||||
def newPhase(prev: Phase) = new ApiPhase(prev)
|
||||
class ApiPhase(prev: Phase) extends Phase(prev) {
|
||||
override def description = "Extracts the public API from source files."
|
||||
def name = API.name
|
||||
def run: Unit =
|
||||
{
|
||||
val start = System.currentTimeMillis
|
||||
currentRun.units.foreach(processUnit)
|
||||
val stop = System.currentTimeMillis
|
||||
debug("API phase took : " + ((stop - start) / 1000.0) + " s")
|
||||
}
|
||||
def processUnit(unit: CompilationUnit) = if (!unit.isJava) processScalaUnit(unit)
|
||||
def processScalaUnit(unit: CompilationUnit) {
|
||||
val sourceFile = unit.source.file.file
|
||||
debug("Traversing " + sourceFile)
|
||||
val extractApi = new ExtractAPI[global.type](global, sourceFile)
|
||||
val traverser = new TopLevelHandler(extractApi)
|
||||
traverser.apply(unit.body)
|
||||
if (global.callback.nameHashing) {
|
||||
val extractUsedNames = new ExtractUsedNames[global.type](global)
|
||||
val names = extractUsedNames.extract(unit)
|
||||
debug("The " + sourceFile + " contains the following used names " + names)
|
||||
names foreach { (name: String) => callback.usedName(sourceFile, name) }
|
||||
}
|
||||
val packages = traverser.packages.toArray[String].map(p => new xsbti.api.Package(p))
|
||||
val source = new xsbti.api.SourceAPI(packages, traverser.definitions.toArray[xsbti.api.Definition])
|
||||
extractApi.forceStructures()
|
||||
callback.api(sourceFile, source)
|
||||
}
|
||||
}
|
||||
|
||||
private final class TopLevelHandler(extractApi: ExtractAPI[global.type]) extends TopLevelTraverser {
|
||||
val packages = new HashSet[String]
|
||||
val definitions = new ListBuffer[xsbti.api.Definition]
|
||||
def `class`(c: Symbol): Unit = {
|
||||
definitions += extractApi.classLike(c.owner, c)
|
||||
}
|
||||
/** Record packages declared in the source file*/
|
||||
def `package`(p: Symbol) {
|
||||
if ((p eq null) || p == NoSymbol || p.isRoot || p.isRootPackage || p.isEmptyPackageClass || p.isEmptyPackage)
|
||||
()
|
||||
else {
|
||||
packages += p.fullName
|
||||
`package`(p.enclosingPackage)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private abstract class TopLevelTraverser extends Traverser {
|
||||
def `class`(s: Symbol)
|
||||
def `package`(s: Symbol)
|
||||
override def traverse(tree: Tree) {
|
||||
tree match {
|
||||
case (_: ClassDef | _: ModuleDef) if isTopLevel(tree.symbol) => `class`(tree.symbol)
|
||||
case p: PackageDef =>
|
||||
`package`(p.symbol)
|
||||
super.traverse(tree)
|
||||
case _ =>
|
||||
}
|
||||
}
|
||||
def isTopLevel(sym: Symbol): Boolean =
|
||||
(sym ne null) && (sym != NoSymbol) && !sym.isImplClass && !sym.isNestedClass && sym.isStatic &&
|
||||
!sym.hasFlag(Flags.SYNTHETIC) && !sym.hasFlag(Flags.JAVA)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,53 +3,44 @@
|
|||
*/
|
||||
package xsbt
|
||||
|
||||
import scala.tools.nsc.{io, plugins, symtab, Global, Phase}
|
||||
import io.{AbstractFile, PlainFile, ZipArchive}
|
||||
import plugins.{Plugin, PluginComponent}
|
||||
import scala.collection.mutable.{HashMap, HashSet, Map, Set}
|
||||
import scala.tools.nsc.{ io, plugins, symtab, Global, Phase }
|
||||
import io.{ AbstractFile, PlainFile, ZipArchive }
|
||||
import plugins.{ Plugin, PluginComponent }
|
||||
import scala.collection.mutable.{ HashMap, HashSet, Map, Set }
|
||||
|
||||
import java.io.File
|
||||
import java.util.zip.ZipFile
|
||||
import xsbti.AnalysisCallback
|
||||
|
||||
object Analyzer
|
||||
{
|
||||
def name = "xsbt-analyzer"
|
||||
object Analyzer {
|
||||
def name = "xsbt-analyzer"
|
||||
}
|
||||
final class Analyzer(val global: CallbackGlobal) extends LocateClassFile
|
||||
{
|
||||
import global._
|
||||
final class Analyzer(val global: CallbackGlobal) extends LocateClassFile {
|
||||
import global._
|
||||
|
||||
def newPhase(prev: Phase): Phase = new AnalyzerPhase(prev)
|
||||
private class AnalyzerPhase(prev: Phase) extends Phase(prev)
|
||||
{
|
||||
override def description = "Finds concrete instances of provided superclasses, and application entry points."
|
||||
def name = Analyzer.name
|
||||
def run
|
||||
{
|
||||
for(unit <- currentRun.units if !unit.isJava)
|
||||
{
|
||||
val sourceFile = unit.source.file.file
|
||||
// build list of generated classes
|
||||
for(iclass <- unit.icode)
|
||||
{
|
||||
val sym = iclass.symbol
|
||||
def addGenerated(separatorRequired: Boolean)
|
||||
{
|
||||
for(classFile <- outputDirs map (fileForClass(_, sym, separatorRequired)) find (_.exists))
|
||||
callback.generatedClass(sourceFile, classFile, className(sym, '.', separatorRequired))
|
||||
}
|
||||
if(sym.isModuleClass && !sym.isImplClass)
|
||||
{
|
||||
if(isTopLevelModule(sym) && sym.companionClass == NoSymbol)
|
||||
addGenerated(false)
|
||||
addGenerated(true)
|
||||
}
|
||||
else
|
||||
addGenerated(false)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
def newPhase(prev: Phase): Phase = new AnalyzerPhase(prev)
|
||||
private class AnalyzerPhase(prev: Phase) extends Phase(prev) {
|
||||
override def description = "Finds concrete instances of provided superclasses, and application entry points."
|
||||
def name = Analyzer.name
|
||||
def run {
|
||||
for (unit <- currentRun.units if !unit.isJava) {
|
||||
val sourceFile = unit.source.file.file
|
||||
// build list of generated classes
|
||||
for (iclass <- unit.icode) {
|
||||
val sym = iclass.symbol
|
||||
def addGenerated(separatorRequired: Boolean) {
|
||||
for (classFile <- outputDirs map (fileForClass(_, sym, separatorRequired)) find (_.exists))
|
||||
callback.generatedClass(sourceFile, classFile, className(sym, '.', separatorRequired))
|
||||
}
|
||||
if (sym.isModuleClass && !sym.isImplClass) {
|
||||
if (isTopLevelModule(sym) && sym.companionClass == NoSymbol)
|
||||
addGenerated(false)
|
||||
addGenerated(true)
|
||||
} else
|
||||
addGenerated(false)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -3,27 +3,26 @@
|
|||
*/
|
||||
package xsbt
|
||||
|
||||
import scala.tools.nsc.{CompilerCommand, Settings}
|
||||
import scala.tools.nsc.{ CompilerCommand, Settings }
|
||||
|
||||
object Command
|
||||
{
|
||||
/**
|
||||
* Construct a CompilerCommand using reflection, to be compatible with Scalac before and after
|
||||
* <a href="https://lampsvn.epfl.ch/trac/scala/changeset/21274">r21274</a>
|
||||
*/
|
||||
def apply(arguments: List[String], settings: Settings): CompilerCommand = {
|
||||
def constr(params: Class[_]*) = classOf[CompilerCommand].getConstructor(params: _*)
|
||||
try {
|
||||
constr(classOf[List[_]], classOf[Settings]).newInstance(arguments, settings)
|
||||
} catch {
|
||||
case e: NoSuchMethodException =>
|
||||
constr(classOf[List[_]], classOf[Settings], classOf[Function1[_, _]], classOf[Boolean]).newInstance(arguments, settings, (s: String) => throw new RuntimeException(s), false.asInstanceOf[AnyRef])
|
||||
}
|
||||
}
|
||||
|
||||
def getWarnFatal(settings: Settings): Boolean =
|
||||
settings.Xwarnfatal.value
|
||||
object Command {
|
||||
/**
|
||||
* Construct a CompilerCommand using reflection, to be compatible with Scalac before and after
|
||||
* <a href="https://lampsvn.epfl.ch/trac/scala/changeset/21274">r21274</a>
|
||||
*/
|
||||
def apply(arguments: List[String], settings: Settings): CompilerCommand = {
|
||||
def constr(params: Class[_]*) = classOf[CompilerCommand].getConstructor(params: _*)
|
||||
try {
|
||||
constr(classOf[List[_]], classOf[Settings]).newInstance(arguments, settings)
|
||||
} catch {
|
||||
case e: NoSuchMethodException =>
|
||||
constr(classOf[List[_]], classOf[Settings], classOf[Function1[_, _]], classOf[Boolean]).newInstance(arguments, settings, (s: String) => throw new RuntimeException(s), false.asInstanceOf[AnyRef])
|
||||
}
|
||||
}
|
||||
|
||||
def getNoWarn(settings: Settings): Boolean =
|
||||
settings.nowarn.value
|
||||
def getWarnFatal(settings: Settings): Boolean =
|
||||
settings.Xwarnfatal.value
|
||||
|
||||
def getNoWarn(settings: Settings): Boolean =
|
||||
settings.nowarn.value
|
||||
}
|
||||
|
|
|
|||
|
|
@ -38,95 +38,92 @@ import scala.tools.nsc.symtab.Flags
|
|||
* The technique described above is used in several places below.
|
||||
*
|
||||
*/
|
||||
abstract class Compat
|
||||
{
|
||||
val global: Global
|
||||
import global._
|
||||
val LocalChild = global.tpnme.LOCAL_CHILD
|
||||
val Nullary = global.NullaryMethodType
|
||||
val ScalaObjectClass = definitions.ScalaObjectClass
|
||||
abstract class Compat {
|
||||
val global: Global
|
||||
import global._
|
||||
val LocalChild = global.tpnme.LOCAL_CHILD
|
||||
val Nullary = global.NullaryMethodType
|
||||
val ScalaObjectClass = definitions.ScalaObjectClass
|
||||
|
||||
private[this] final class MiscCompat
|
||||
{
|
||||
// in 2.9, nme.LOCALCHILD was renamed to tpnme.LOCAL_CHILD
|
||||
def tpnme = nme
|
||||
def LOCAL_CHILD = nme.LOCALCHILD
|
||||
def LOCALCHILD = sourceCompatibilityOnly
|
||||
private[this] final class MiscCompat {
|
||||
// in 2.9, nme.LOCALCHILD was renamed to tpnme.LOCAL_CHILD
|
||||
def tpnme = nme
|
||||
def LOCAL_CHILD = nme.LOCALCHILD
|
||||
def LOCALCHILD = sourceCompatibilityOnly
|
||||
|
||||
// in 2.10, ScalaObject was removed
|
||||
def ScalaObjectClass = definitions.ObjectClass
|
||||
// in 2.10, ScalaObject was removed
|
||||
def ScalaObjectClass = definitions.ObjectClass
|
||||
|
||||
def NullaryMethodType = NullaryMethodTpe
|
||||
def NullaryMethodType = NullaryMethodTpe
|
||||
|
||||
def MACRO = DummyValue
|
||||
def MACRO = DummyValue
|
||||
|
||||
// in 2.10, sym.moduleSuffix exists, but genJVM.moduleSuffix(Symbol) does not
|
||||
def moduleSuffix(sym: Symbol): String = sourceCompatibilityOnly
|
||||
// in 2.11 genJVM does not exist
|
||||
def genJVM = this
|
||||
}
|
||||
// in 2.9, NullaryMethodType was added to Type
|
||||
object NullaryMethodTpe {
|
||||
def unapply(t: Type): Option[Type] = None
|
||||
}
|
||||
// in 2.10, sym.moduleSuffix exists, but genJVM.moduleSuffix(Symbol) does not
|
||||
def moduleSuffix(sym: Symbol): String = sourceCompatibilityOnly
|
||||
// in 2.11 genJVM does not exist
|
||||
def genJVM = this
|
||||
}
|
||||
// in 2.9, NullaryMethodType was added to Type
|
||||
object NullaryMethodTpe {
|
||||
def unapply(t: Type): Option[Type] = None
|
||||
}
|
||||
|
||||
protected implicit def symbolCompat(sym: Symbol): SymbolCompat = new SymbolCompat(sym)
|
||||
protected final class SymbolCompat(sym: Symbol) {
|
||||
// before 2.10, sym.moduleSuffix doesn't exist, but genJVM.moduleSuffix does
|
||||
def moduleSuffix = global.genJVM.moduleSuffix(sym)
|
||||
protected implicit def symbolCompat(sym: Symbol): SymbolCompat = new SymbolCompat(sym)
|
||||
protected final class SymbolCompat(sym: Symbol) {
|
||||
// before 2.10, sym.moduleSuffix doesn't exist, but genJVM.moduleSuffix does
|
||||
def moduleSuffix = global.genJVM.moduleSuffix(sym)
|
||||
|
||||
def enclosingTopLevelClass: Symbol = sym.toplevelClass
|
||||
def toplevelClass: Symbol = sourceCompatibilityOnly
|
||||
}
|
||||
def enclosingTopLevelClass: Symbol = sym.toplevelClass
|
||||
def toplevelClass: Symbol = sourceCompatibilityOnly
|
||||
}
|
||||
|
||||
val DummyValue = 0
|
||||
def hasMacro(s: Symbol): Boolean =
|
||||
{
|
||||
val MACRO = Flags.MACRO // will be DummyValue for versions before 2.10
|
||||
MACRO != DummyValue && s.hasFlag(MACRO)
|
||||
}
|
||||
def moduleSuffix(s: Symbol): String = s.moduleSuffix
|
||||
|
||||
val DummyValue = 0
|
||||
def hasMacro(s: Symbol): Boolean =
|
||||
{
|
||||
val MACRO = Flags.MACRO // will be DummyValue for versions before 2.10
|
||||
MACRO != DummyValue && s.hasFlag(MACRO)
|
||||
}
|
||||
def moduleSuffix(s: Symbol): String = s.moduleSuffix
|
||||
private[this] def sourceCompatibilityOnly: Nothing = throw new RuntimeException("For source compatibility only: should not get here.")
|
||||
|
||||
private[this] def sourceCompatibilityOnly: Nothing = throw new RuntimeException("For source compatibility only: should not get here.")
|
||||
private[this] final implicit def miscCompat(n: AnyRef): MiscCompat = new MiscCompat
|
||||
|
||||
private[this] final implicit def miscCompat(n: AnyRef): MiscCompat = new MiscCompat
|
||||
object MacroExpansionOf {
|
||||
def unapply(tree: Tree): Option[Tree] = {
|
||||
|
||||
object MacroExpansionOf {
|
||||
def unapply(tree: Tree): Option[Tree] = {
|
||||
// MacroExpansionAttachment (MEA) compatibility for 2.8.x and 2.9.x
|
||||
object Compat {
|
||||
class MacroExpansionAttachment(val original: Tree)
|
||||
|
||||
// MacroExpansionAttachment (MEA) compatibility for 2.8.x and 2.9.x
|
||||
object Compat {
|
||||
class MacroExpansionAttachment(val original: Tree)
|
||||
// Trees have no attachments in 2.8.x and 2.9.x
|
||||
implicit def withAttachments(tree: Tree): WithAttachments = new WithAttachments(tree)
|
||||
class WithAttachments(val tree: Tree) {
|
||||
object EmptyAttachments {
|
||||
def all = Set.empty[Any]
|
||||
}
|
||||
val attachments = EmptyAttachments
|
||||
}
|
||||
}
|
||||
import Compat._
|
||||
|
||||
// Trees have no attachments in 2.8.x and 2.9.x
|
||||
implicit def withAttachments(tree: Tree): WithAttachments = new WithAttachments(tree)
|
||||
class WithAttachments(val tree: Tree) {
|
||||
object EmptyAttachments {
|
||||
def all = Set.empty[Any]
|
||||
}
|
||||
val attachments = EmptyAttachments
|
||||
}
|
||||
}
|
||||
import Compat._
|
||||
locally {
|
||||
// Wildcard imports are necessary since 2.8.x and 2.9.x don't have `MacroExpansionAttachment` at all
|
||||
import global._ // this is where MEA lives in 2.10.x
|
||||
|
||||
locally {
|
||||
// Wildcard imports are necessary since 2.8.x and 2.9.x don't have `MacroExpansionAttachment` at all
|
||||
import global._ // this is where MEA lives in 2.10.x
|
||||
// `original` has been renamed to `expandee` in 2.11.x
|
||||
implicit def withExpandee(att: MacroExpansionAttachment): WithExpandee = new WithExpandee(att)
|
||||
class WithExpandee(att: MacroExpansionAttachment) {
|
||||
def expandee: Tree = att.original
|
||||
}
|
||||
|
||||
// `original` has been renamed to `expandee` in 2.11.x
|
||||
implicit def withExpandee(att: MacroExpansionAttachment): WithExpandee = new WithExpandee(att)
|
||||
class WithExpandee(att: MacroExpansionAttachment) {
|
||||
def expandee: Tree = att.original
|
||||
}
|
||||
|
||||
locally {
|
||||
import analyzer._ // this is where MEA lives in 2.11.x
|
||||
tree.attachments.all.collect {
|
||||
case att: MacroExpansionAttachment => att.expandee
|
||||
} headOption
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
locally {
|
||||
import analyzer._ // this is where MEA lives in 2.11.x
|
||||
tree.attachments.all.collect {
|
||||
case att: MacroExpansionAttachment => att.expandee
|
||||
} headOption
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,273 +3,252 @@
|
|||
*/
|
||||
package xsbt
|
||||
|
||||
import xsbti.{AnalysisCallback,Logger,Problem,Reporter,Severity}
|
||||
import xsbti.{ AnalysisCallback, Logger, Problem, Reporter, Severity }
|
||||
import xsbti.compile._
|
||||
import scala.tools.nsc.{backend, io, reporters, symtab, util, Phase, Global, Settings, SubComponent}
|
||||
import scala.tools.nsc.{ backend, io, reporters, symtab, util, Phase, Global, Settings, SubComponent }
|
||||
import scala.tools.nsc.interactive.RangePositions
|
||||
import backend.JavaPlatform
|
||||
import scala.tools.util.PathResolver
|
||||
import symtab.SymbolLoaders
|
||||
import util.{ClassPath,DirectoryClassPath,MergedClassPath,JavaClassPath}
|
||||
import ClassPath.{ClassPathContext,JavaContext}
|
||||
import util.{ ClassPath, DirectoryClassPath, MergedClassPath, JavaClassPath }
|
||||
import ClassPath.{ ClassPathContext, JavaContext }
|
||||
import io.AbstractFile
|
||||
import scala.annotation.tailrec
|
||||
import scala.collection.mutable
|
||||
import Log.debug
|
||||
import java.io.File
|
||||
|
||||
final class CompilerInterface
|
||||
{
|
||||
def newCompiler(options: Array[String], output: Output, initialLog: Logger, initialDelegate: Reporter, resident: Boolean): CachedCompiler =
|
||||
new CachedCompiler0(options, output, new WeakLog(initialLog, initialDelegate), resident)
|
||||
final class CompilerInterface {
|
||||
def newCompiler(options: Array[String], output: Output, initialLog: Logger, initialDelegate: Reporter, resident: Boolean): CachedCompiler =
|
||||
new CachedCompiler0(options, output, new WeakLog(initialLog, initialDelegate), resident)
|
||||
|
||||
def run(sources: Array[File], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, delegate: Reporter, progress: CompileProgress, cached: CachedCompiler): Unit =
|
||||
cached.run(sources, changes, callback, log, delegate, progress)
|
||||
def run(sources: Array[File], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, delegate: Reporter, progress: CompileProgress, cached: CachedCompiler): Unit =
|
||||
cached.run(sources, changes, callback, log, delegate, progress)
|
||||
}
|
||||
// for compatibility with Scala versions without Global.registerTopLevelSym (2.8.1 and earlier)
|
||||
sealed trait GlobalCompat { self: Global =>
|
||||
def registerTopLevelSym(sym: Symbol): Unit
|
||||
sealed trait RunCompat {
|
||||
def informUnitStarting(phase: Phase, unit: CompilationUnit) {}
|
||||
}
|
||||
def registerTopLevelSym(sym: Symbol): Unit
|
||||
sealed trait RunCompat {
|
||||
def informUnitStarting(phase: Phase, unit: CompilationUnit) {}
|
||||
}
|
||||
}
|
||||
sealed abstract class CallbackGlobal(settings: Settings, reporter: reporters.Reporter, output: Output) extends Global(settings, reporter) with GlobalCompat {
|
||||
def callback: AnalysisCallback
|
||||
def findClass(name: String): Option[(AbstractFile,Boolean)]
|
||||
lazy val outputDirs: Iterable[File] = {
|
||||
output match {
|
||||
case single: SingleOutput => List(single.outputDirectory)
|
||||
case multi: MultipleOutput => multi.outputGroups.toStream map (_.outputDirectory)
|
||||
}
|
||||
}
|
||||
// Map source files to public inherited dependencies. These dependencies are tracked as the symbol for the dealiased base class.
|
||||
val inheritedDependencies = new mutable.HashMap[File, mutable.Set[Symbol]]
|
||||
def addInheritedDependencies(file: File, deps: Iterable[Symbol]) {
|
||||
inheritedDependencies.getOrElseUpdate(file, new mutable.HashSet) ++= deps
|
||||
}
|
||||
def callback: AnalysisCallback
|
||||
def findClass(name: String): Option[(AbstractFile, Boolean)]
|
||||
lazy val outputDirs: Iterable[File] = {
|
||||
output match {
|
||||
case single: SingleOutput => List(single.outputDirectory)
|
||||
case multi: MultipleOutput => multi.outputGroups.toStream map (_.outputDirectory)
|
||||
}
|
||||
}
|
||||
// Map source files to public inherited dependencies. These dependencies are tracked as the symbol for the dealiased base class.
|
||||
val inheritedDependencies = new mutable.HashMap[File, mutable.Set[Symbol]]
|
||||
def addInheritedDependencies(file: File, deps: Iterable[Symbol]) {
|
||||
inheritedDependencies.getOrElseUpdate(file, new mutable.HashSet) ++= deps
|
||||
}
|
||||
}
|
||||
class InterfaceCompileFailed(val arguments: Array[String], val problems: Array[Problem], override val toString: String) extends xsbti.CompileFailed
|
||||
|
||||
class InterfaceCompileCancelled(val arguments: Array[String], override val toString: String) extends xsbti.CompileCancelled
|
||||
|
||||
private final class WeakLog(private[this] var log: Logger, private[this] var delegate: Reporter)
|
||||
{
|
||||
def apply(message: String) {
|
||||
assert(log ne null, "Stale reference to logger")
|
||||
log.error(Message(message))
|
||||
}
|
||||
def logger: Logger = log
|
||||
def reporter: Reporter = delegate
|
||||
def clear() {
|
||||
log = null
|
||||
delegate = null
|
||||
}
|
||||
private final class WeakLog(private[this] var log: Logger, private[this] var delegate: Reporter) {
|
||||
def apply(message: String) {
|
||||
assert(log ne null, "Stale reference to logger")
|
||||
log.error(Message(message))
|
||||
}
|
||||
def logger: Logger = log
|
||||
def reporter: Reporter = delegate
|
||||
def clear() {
|
||||
log = null
|
||||
delegate = null
|
||||
}
|
||||
}
|
||||
|
||||
private final class CachedCompiler0(args: Array[String], output: Output, initialLog: WeakLog, resident: Boolean) extends CachedCompiler
|
||||
{
|
||||
val settings = new Settings(s => initialLog(s))
|
||||
output match {
|
||||
case multi: MultipleOutput =>
|
||||
for (out <- multi.outputGroups)
|
||||
settings.outputDirs.add(out.sourceDirectory.getAbsolutePath, out.outputDirectory.getAbsolutePath)
|
||||
case single: SingleOutput =>
|
||||
settings.outputDirs.setSingleOutput(single.outputDirectory.getAbsolutePath)
|
||||
}
|
||||
private final class CachedCompiler0(args: Array[String], output: Output, initialLog: WeakLog, resident: Boolean) extends CachedCompiler {
|
||||
val settings = new Settings(s => initialLog(s))
|
||||
output match {
|
||||
case multi: MultipleOutput =>
|
||||
for (out <- multi.outputGroups)
|
||||
settings.outputDirs.add(out.sourceDirectory.getAbsolutePath, out.outputDirectory.getAbsolutePath)
|
||||
case single: SingleOutput =>
|
||||
settings.outputDirs.setSingleOutput(single.outputDirectory.getAbsolutePath)
|
||||
}
|
||||
|
||||
val command = Command(args.toList, settings)
|
||||
private[this] val dreporter = DelegatingReporter(settings, initialLog.reporter)
|
||||
try {
|
||||
if(!noErrors(dreporter)) {
|
||||
dreporter.printSummary()
|
||||
handleErrors(dreporter, initialLog.logger)
|
||||
}
|
||||
} finally
|
||||
initialLog.clear()
|
||||
val command = Command(args.toList, settings)
|
||||
private[this] val dreporter = DelegatingReporter(settings, initialLog.reporter)
|
||||
try {
|
||||
if (!noErrors(dreporter)) {
|
||||
dreporter.printSummary()
|
||||
handleErrors(dreporter, initialLog.logger)
|
||||
}
|
||||
} finally
|
||||
initialLog.clear()
|
||||
|
||||
def noErrors(dreporter: DelegatingReporter) = !dreporter.hasErrors && command.ok
|
||||
def noErrors(dreporter: DelegatingReporter) = !dreporter.hasErrors && command.ok
|
||||
|
||||
def commandArguments(sources: Array[File]): Array[String] =
|
||||
(command.settings.recreateArgs ++ sources.map(_.getAbsolutePath)).toArray[String]
|
||||
def commandArguments(sources: Array[File]): Array[String] =
|
||||
(command.settings.recreateArgs ++ sources.map(_.getAbsolutePath)).toArray[String]
|
||||
|
||||
def run(sources: Array[File], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, delegate: Reporter, progress: CompileProgress): Unit = synchronized
|
||||
{
|
||||
debug(log, "Running cached compiler " + hashCode.toHexString + ", interfacing (CompilerInterface) with Scala compiler " + scala.tools.nsc.Properties.versionString)
|
||||
val dreporter = DelegatingReporter(settings, delegate)
|
||||
try { run(sources.toList, changes, callback, log, dreporter, progress) }
|
||||
finally { dreporter.dropDelegate() }
|
||||
}
|
||||
private[this] def run(sources: List[File], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, dreporter: DelegatingReporter, compileProgress: CompileProgress)
|
||||
{
|
||||
if(command.shouldStopWithInfo)
|
||||
{
|
||||
dreporter.info(null, command.getInfoMessage(compiler), true)
|
||||
throw new InterfaceCompileFailed(args, Array(), "Compiler option supplied that disabled actual compilation.")
|
||||
}
|
||||
if(noErrors(dreporter))
|
||||
{
|
||||
debug(log, args.mkString("Calling Scala compiler with arguments (CompilerInterface):\n\t", "\n\t", ""))
|
||||
compiler.set(callback, dreporter)
|
||||
val run = new compiler.Run with compiler.RunCompat {
|
||||
override def informUnitStarting(phase: Phase, unit: compiler.CompilationUnit) {
|
||||
compileProgress.startUnit(phase.name, unit.source.path)
|
||||
}
|
||||
override def progress(current: Int, total: Int) {
|
||||
if (!compileProgress.advance(current, total))
|
||||
cancel
|
||||
}
|
||||
}
|
||||
val sortedSourceFiles = sources.map(_.getAbsolutePath).sortWith(_ < _)
|
||||
run compile sortedSourceFiles
|
||||
processUnreportedWarnings(run)
|
||||
dreporter.problems foreach { p => callback.problem(p.category, p.position, p.message, p.severity, true) }
|
||||
}
|
||||
dreporter.printSummary()
|
||||
if(!noErrors(dreporter)) handleErrors(dreporter, log)
|
||||
// the case where we cancelled compilation _after_ some compilation errors got reported
|
||||
// will be handled by line above so errors still will be reported properly just potentially not
|
||||
// all of them (because we cancelled the compilation)
|
||||
if (dreporter.cancelled) handleCompilationCancellation(dreporter, log)
|
||||
}
|
||||
def handleErrors(dreporter: DelegatingReporter, log: Logger): Nothing =
|
||||
{
|
||||
debug(log, "Compilation failed (CompilerInterface)")
|
||||
throw new InterfaceCompileFailed(args, dreporter.problems, "Compilation failed")
|
||||
}
|
||||
def handleCompilationCancellation(dreporter: DelegatingReporter, log: Logger): Nothing = {
|
||||
assert(dreporter.cancelled, "We should get here only if when compilation got cancelled")
|
||||
debug(log, "Compilation cancelled (CompilerInterface)")
|
||||
throw new InterfaceCompileCancelled(args, "Compilation has been cancelled")
|
||||
}
|
||||
def processUnreportedWarnings(run: compiler.Run)
|
||||
{
|
||||
// allConditionalWarnings and the ConditionalWarning class are only in 2.10+
|
||||
final class CondWarnCompat(val what: String, val warnings: mutable.ListBuffer[(compiler.Position, String)])
|
||||
implicit def compat(run: AnyRef): Compat = new Compat
|
||||
final class Compat { def allConditionalWarnings = List[CondWarnCompat]() }
|
||||
def run(sources: Array[File], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, delegate: Reporter, progress: CompileProgress): Unit = synchronized {
|
||||
debug(log, "Running cached compiler " + hashCode.toHexString + ", interfacing (CompilerInterface) with Scala compiler " + scala.tools.nsc.Properties.versionString)
|
||||
val dreporter = DelegatingReporter(settings, delegate)
|
||||
try { run(sources.toList, changes, callback, log, dreporter, progress) }
|
||||
finally { dreporter.dropDelegate() }
|
||||
}
|
||||
private[this] def run(sources: List[File], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, dreporter: DelegatingReporter, compileProgress: CompileProgress) {
|
||||
if (command.shouldStopWithInfo) {
|
||||
dreporter.info(null, command.getInfoMessage(compiler), true)
|
||||
throw new InterfaceCompileFailed(args, Array(), "Compiler option supplied that disabled actual compilation.")
|
||||
}
|
||||
if (noErrors(dreporter)) {
|
||||
debug(log, args.mkString("Calling Scala compiler with arguments (CompilerInterface):\n\t", "\n\t", ""))
|
||||
compiler.set(callback, dreporter)
|
||||
val run = new compiler.Run with compiler.RunCompat {
|
||||
override def informUnitStarting(phase: Phase, unit: compiler.CompilationUnit) {
|
||||
compileProgress.startUnit(phase.name, unit.source.path)
|
||||
}
|
||||
override def progress(current: Int, total: Int) {
|
||||
if (!compileProgress.advance(current, total))
|
||||
cancel
|
||||
}
|
||||
}
|
||||
val sortedSourceFiles = sources.map(_.getAbsolutePath).sortWith(_ < _)
|
||||
run compile sortedSourceFiles
|
||||
processUnreportedWarnings(run)
|
||||
dreporter.problems foreach { p => callback.problem(p.category, p.position, p.message, p.severity, true) }
|
||||
}
|
||||
dreporter.printSummary()
|
||||
if (!noErrors(dreporter)) handleErrors(dreporter, log)
|
||||
// the case where we cancelled compilation _after_ some compilation errors got reported
|
||||
// will be handled by line above so errors still will be reported properly just potentially not
|
||||
// all of them (because we cancelled the compilation)
|
||||
if (dreporter.cancelled) handleCompilationCancellation(dreporter, log)
|
||||
}
|
||||
def handleErrors(dreporter: DelegatingReporter, log: Logger): Nothing =
|
||||
{
|
||||
debug(log, "Compilation failed (CompilerInterface)")
|
||||
throw new InterfaceCompileFailed(args, dreporter.problems, "Compilation failed")
|
||||
}
|
||||
def handleCompilationCancellation(dreporter: DelegatingReporter, log: Logger): Nothing = {
|
||||
assert(dreporter.cancelled, "We should get here only if when compilation got cancelled")
|
||||
debug(log, "Compilation cancelled (CompilerInterface)")
|
||||
throw new InterfaceCompileCancelled(args, "Compilation has been cancelled")
|
||||
}
|
||||
def processUnreportedWarnings(run: compiler.Run) {
|
||||
// allConditionalWarnings and the ConditionalWarning class are only in 2.10+
|
||||
final class CondWarnCompat(val what: String, val warnings: mutable.ListBuffer[(compiler.Position, String)])
|
||||
implicit def compat(run: AnyRef): Compat = new Compat
|
||||
final class Compat { def allConditionalWarnings = List[CondWarnCompat]() }
|
||||
|
||||
val warnings = run.allConditionalWarnings
|
||||
if(!warnings.isEmpty)
|
||||
compiler.logUnreportedWarnings(warnings.map(cw => ("" /*cw.what*/, cw.warnings.toList)))
|
||||
}
|
||||
val warnings = run.allConditionalWarnings
|
||||
if (!warnings.isEmpty)
|
||||
compiler.logUnreportedWarnings(warnings.map(cw => ("" /*cw.what*/ , cw.warnings.toList)))
|
||||
}
|
||||
|
||||
val compiler: Compiler = {
|
||||
if (command.settings.Yrangepos.value)
|
||||
new Compiler() with RangePositions // unnecessary in 2.11
|
||||
else
|
||||
new Compiler()
|
||||
}
|
||||
class Compiler extends CallbackGlobal(command.settings, dreporter, output)
|
||||
{
|
||||
object dummy // temporary fix for #4426
|
||||
object sbtAnalyzer extends
|
||||
{
|
||||
val global: Compiler.this.type = Compiler.this
|
||||
val phaseName = Analyzer.name
|
||||
val runsAfter = List("jvm")
|
||||
override val runsBefore = List("terminal")
|
||||
val runsRightAfter = None
|
||||
}
|
||||
with SubComponent
|
||||
{
|
||||
val analyzer = new Analyzer(global)
|
||||
def newPhase(prev: Phase) = analyzer.newPhase(prev)
|
||||
def name = phaseName
|
||||
}
|
||||
val compiler: Compiler = {
|
||||
if (command.settings.Yrangepos.value)
|
||||
new Compiler() with RangePositions // unnecessary in 2.11
|
||||
else
|
||||
new Compiler()
|
||||
}
|
||||
class Compiler extends CallbackGlobal(command.settings, dreporter, output) {
|
||||
object dummy // temporary fix for #4426
|
||||
object sbtAnalyzer extends {
|
||||
val global: Compiler.this.type = Compiler.this
|
||||
val phaseName = Analyzer.name
|
||||
val runsAfter = List("jvm")
|
||||
override val runsBefore = List("terminal")
|
||||
val runsRightAfter = None
|
||||
} with SubComponent {
|
||||
val analyzer = new Analyzer(global)
|
||||
def newPhase(prev: Phase) = analyzer.newPhase(prev)
|
||||
def name = phaseName
|
||||
}
|
||||
|
||||
/** Phase that extracts dependency information */
|
||||
object sbtDependency extends
|
||||
{
|
||||
val global: Compiler.this.type = Compiler.this
|
||||
val phaseName = Dependency.name
|
||||
val runsAfter = List(API.name)
|
||||
override val runsBefore = List("refchecks")
|
||||
// keep API and dependency close to each other
|
||||
// we might want to merge them in the future and even if don't
|
||||
// do that then it makes sense to run those phases next to each other
|
||||
val runsRightAfter = Some(API.name)
|
||||
}
|
||||
with SubComponent
|
||||
{
|
||||
val dependency = new Dependency(global)
|
||||
def newPhase(prev: Phase) = dependency.newPhase(prev)
|
||||
def name = phaseName
|
||||
}
|
||||
/** Phase that extracts dependency information */
|
||||
object sbtDependency extends {
|
||||
val global: Compiler.this.type = Compiler.this
|
||||
val phaseName = Dependency.name
|
||||
val runsAfter = List(API.name)
|
||||
override val runsBefore = List("refchecks")
|
||||
// keep API and dependency close to each other
|
||||
// we might want to merge them in the future and even if don't
|
||||
// do that then it makes sense to run those phases next to each other
|
||||
val runsRightAfter = Some(API.name)
|
||||
} with SubComponent {
|
||||
val dependency = new Dependency(global)
|
||||
def newPhase(prev: Phase) = dependency.newPhase(prev)
|
||||
def name = phaseName
|
||||
}
|
||||
|
||||
/** This phase walks trees and constructs a representation of the public API, which is used for incremental recompilation.
|
||||
*
|
||||
* We extract the api after picklers, since that way we see the same symbol information/structure
|
||||
* irrespective of whether we were typechecking from source / unpickling previously compiled classes.
|
||||
*/
|
||||
object apiExtractor extends
|
||||
{
|
||||
val global: Compiler.this.type = Compiler.this
|
||||
val phaseName = API.name
|
||||
val runsAfter = List("typer")
|
||||
override val runsBefore = List("erasure")
|
||||
// allow apiExtractor's phase to be overridden using the sbt.api.phase property
|
||||
// (in case someone would like the old timing, which was right after typer)
|
||||
// TODO: consider migrating to simply specifying "pickler" for `runsAfter` and "uncurry" for `runsBefore`
|
||||
val runsRightAfter = Option(System.getProperty("sbt.api.phase")) orElse Some("pickler")
|
||||
}
|
||||
with SubComponent
|
||||
{
|
||||
val api = new API(global)
|
||||
def newPhase(prev: Phase) = api.newPhase(prev)
|
||||
def name = phaseName
|
||||
}
|
||||
/**
|
||||
* This phase walks trees and constructs a representation of the public API, which is used for incremental recompilation.
|
||||
*
|
||||
* We extract the api after picklers, since that way we see the same symbol information/structure
|
||||
* irrespective of whether we were typechecking from source / unpickling previously compiled classes.
|
||||
*/
|
||||
object apiExtractor extends {
|
||||
val global: Compiler.this.type = Compiler.this
|
||||
val phaseName = API.name
|
||||
val runsAfter = List("typer")
|
||||
override val runsBefore = List("erasure")
|
||||
// allow apiExtractor's phase to be overridden using the sbt.api.phase property
|
||||
// (in case someone would like the old timing, which was right after typer)
|
||||
// TODO: consider migrating to simply specifying "pickler" for `runsAfter` and "uncurry" for `runsBefore`
|
||||
val runsRightAfter = Option(System.getProperty("sbt.api.phase")) orElse Some("pickler")
|
||||
} with SubComponent {
|
||||
val api = new API(global)
|
||||
def newPhase(prev: Phase) = api.newPhase(prev)
|
||||
def name = phaseName
|
||||
}
|
||||
|
||||
override lazy val phaseDescriptors =
|
||||
{
|
||||
phasesSet += sbtAnalyzer
|
||||
phasesSet += sbtDependency
|
||||
phasesSet += apiExtractor
|
||||
superComputePhaseDescriptors
|
||||
}
|
||||
// Required because computePhaseDescriptors is private in 2.8 (changed to protected sometime later).
|
||||
private[this] def superComputePhaseDescriptors() = superCall("computePhaseDescriptors").asInstanceOf[List[SubComponent]]
|
||||
private[this] def superDropRun(): Unit =
|
||||
try { superCall("dropRun") } catch { case e: NoSuchMethodException => () } // dropRun not in 2.8.1
|
||||
private[this] def superCall(methodName: String): AnyRef =
|
||||
{
|
||||
val meth = classOf[Global].getDeclaredMethod(methodName)
|
||||
meth.setAccessible(true)
|
||||
meth.invoke(this)
|
||||
}
|
||||
def logUnreportedWarnings(seq: Seq[(String, List[(Position,String)])]): Unit = // Scala 2.10.x and later
|
||||
{
|
||||
val drep = reporter.asInstanceOf[DelegatingReporter]
|
||||
for( (what, warnings) <- seq; (pos, msg) <- warnings) yield
|
||||
callback.problem(what, drep.convert(pos), msg, Severity.Warn, false)
|
||||
}
|
||||
override lazy val phaseDescriptors =
|
||||
{
|
||||
phasesSet += sbtAnalyzer
|
||||
phasesSet += sbtDependency
|
||||
phasesSet += apiExtractor
|
||||
superComputePhaseDescriptors
|
||||
}
|
||||
// Required because computePhaseDescriptors is private in 2.8 (changed to protected sometime later).
|
||||
private[this] def superComputePhaseDescriptors() = superCall("computePhaseDescriptors").asInstanceOf[List[SubComponent]]
|
||||
private[this] def superDropRun(): Unit =
|
||||
try { superCall("dropRun") } catch { case e: NoSuchMethodException => () } // dropRun not in 2.8.1
|
||||
private[this] def superCall(methodName: String): AnyRef =
|
||||
{
|
||||
val meth = classOf[Global].getDeclaredMethod(methodName)
|
||||
meth.setAccessible(true)
|
||||
meth.invoke(this)
|
||||
}
|
||||
def logUnreportedWarnings(seq: Seq[(String, List[(Position, String)])]): Unit = // Scala 2.10.x and later
|
||||
{
|
||||
val drep = reporter.asInstanceOf[DelegatingReporter]
|
||||
for ((what, warnings) <- seq; (pos, msg) <- warnings) yield callback.problem(what, drep.convert(pos), msg, Severity.Warn, false)
|
||||
}
|
||||
|
||||
def set(callback: AnalysisCallback, dreporter: DelegatingReporter)
|
||||
{
|
||||
this.callback0 = callback
|
||||
reporter = dreporter
|
||||
}
|
||||
def clear()
|
||||
{
|
||||
callback0 = null
|
||||
superDropRun()
|
||||
reporter = null
|
||||
}
|
||||
def set(callback: AnalysisCallback, dreporter: DelegatingReporter) {
|
||||
this.callback0 = callback
|
||||
reporter = dreporter
|
||||
}
|
||||
def clear() {
|
||||
callback0 = null
|
||||
superDropRun()
|
||||
reporter = null
|
||||
}
|
||||
|
||||
def findClass(name: String): Option[(AbstractFile, Boolean)] =
|
||||
getOutputClass(name).map(f => (f,true)) orElse findOnClassPath(name).map(f =>(f, false))
|
||||
def findClass(name: String): Option[(AbstractFile, Boolean)] =
|
||||
getOutputClass(name).map(f => (f, true)) orElse findOnClassPath(name).map(f => (f, false))
|
||||
|
||||
def getOutputClass(name: String): Option[AbstractFile] =
|
||||
{
|
||||
// This could be improved if a hint where to look is given.
|
||||
val className = name.replace('.', '/') + ".class"
|
||||
outputDirs map (new File(_, className)) find (_.exists) map (AbstractFile.getFile(_))
|
||||
}
|
||||
def getOutputClass(name: String): Option[AbstractFile] =
|
||||
{
|
||||
// This could be improved if a hint where to look is given.
|
||||
val className = name.replace('.', '/') + ".class"
|
||||
outputDirs map (new File(_, className)) find (_.exists) map (AbstractFile.getFile(_))
|
||||
}
|
||||
|
||||
def findOnClassPath(name: String): Option[AbstractFile] =
|
||||
classPath.findClass(name).flatMap(_.binary.asInstanceOf[Option[AbstractFile]])
|
||||
def findOnClassPath(name: String): Option[AbstractFile] =
|
||||
classPath.findClass(name).flatMap(_.binary.asInstanceOf[Option[AbstractFile]])
|
||||
|
||||
|
||||
private[this] var callback0: AnalysisCallback = null
|
||||
def callback: AnalysisCallback = callback0
|
||||
}
|
||||
private[this] var callback0: AnalysisCallback = null
|
||||
def callback: AnalysisCallback = callback0
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,102 +4,94 @@
|
|||
package xsbt
|
||||
|
||||
import xsbti.Logger
|
||||
import scala.tools.nsc.{GenericRunnerCommand, Interpreter, InterpreterLoop, ObjectRunner, Settings}
|
||||
import scala.tools.nsc.{ GenericRunnerCommand, Interpreter, InterpreterLoop, ObjectRunner, Settings }
|
||||
import scala.tools.nsc.interpreter.InteractiveReader
|
||||
import scala.tools.nsc.reporters.Reporter
|
||||
import scala.tools.nsc.util.ClassPath
|
||||
|
||||
class ConsoleInterface
|
||||
{
|
||||
def commandArguments(args: Array[String], bootClasspathString: String, classpathString: String, log: Logger): Array[String] =
|
||||
MakeSettings.sync(args, bootClasspathString, classpathString, log).recreateArgs.toArray[String]
|
||||
class ConsoleInterface {
|
||||
def commandArguments(args: Array[String], bootClasspathString: String, classpathString: String, log: Logger): Array[String] =
|
||||
MakeSettings.sync(args, bootClasspathString, classpathString, log).recreateArgs.toArray[String]
|
||||
|
||||
def run(args: Array[String], bootClasspathString: String, classpathString: String, initialCommands: String, cleanupCommands: String, loader: ClassLoader, bindNames: Array[String], bindValues: Array[Any], log: Logger)
|
||||
{
|
||||
lazy val interpreterSettings = MakeSettings.sync(args.toList, log)
|
||||
val compilerSettings = MakeSettings.sync(args, bootClasspathString, classpathString, log)
|
||||
|
||||
if(!bootClasspathString.isEmpty)
|
||||
compilerSettings.bootclasspath.value = bootClasspathString
|
||||
compilerSettings.classpath.value = classpathString
|
||||
log.info(Message("Starting scala interpreter..."))
|
||||
log.info(Message(""))
|
||||
val loop = new InterpreterLoop {
|
||||
|
||||
override def createInterpreter() = {
|
||||
|
||||
if(loader ne null)
|
||||
{
|
||||
in = InteractiveReader.createDefault()
|
||||
interpreter = new Interpreter(settings)
|
||||
{
|
||||
override protected def parentClassLoader = if(loader eq null) super.parentClassLoader else loader
|
||||
override protected def newCompiler(settings: Settings, reporter: Reporter) = super.newCompiler(compilerSettings, reporter)
|
||||
}
|
||||
interpreter.setContextClassLoader()
|
||||
}
|
||||
else
|
||||
super.createInterpreter()
|
||||
def run(args: Array[String], bootClasspathString: String, classpathString: String, initialCommands: String, cleanupCommands: String, loader: ClassLoader, bindNames: Array[String], bindValues: Array[Any], log: Logger) {
|
||||
lazy val interpreterSettings = MakeSettings.sync(args.toList, log)
|
||||
val compilerSettings = MakeSettings.sync(args, bootClasspathString, classpathString, log)
|
||||
|
||||
def bind(values: Seq[(String,Any)])
|
||||
{
|
||||
// for 2.8 compatibility
|
||||
final class Compat {
|
||||
def bindValue(id: String, value: Any) =
|
||||
interpreter.bind(id, value.asInstanceOf[AnyRef].getClass.getName, value)
|
||||
}
|
||||
implicit def compat(a: AnyRef): Compat = new Compat
|
||||
if (!bootClasspathString.isEmpty)
|
||||
compilerSettings.bootclasspath.value = bootClasspathString
|
||||
compilerSettings.classpath.value = classpathString
|
||||
log.info(Message("Starting scala interpreter..."))
|
||||
log.info(Message(""))
|
||||
val loop = new InterpreterLoop {
|
||||
|
||||
for( (id, value) <- values )
|
||||
interpreter.beQuietDuring(interpreter.bindValue(id, value))
|
||||
}
|
||||
override def createInterpreter() = {
|
||||
|
||||
bind(bindNames zip bindValues)
|
||||
|
||||
if(!initialCommands.isEmpty)
|
||||
interpreter.interpret(initialCommands)
|
||||
}
|
||||
override def closeInterpreter()
|
||||
{
|
||||
if(!cleanupCommands.isEmpty)
|
||||
interpreter.interpret(cleanupCommands)
|
||||
super.closeInterpreter()
|
||||
}
|
||||
}
|
||||
loop.main(if(loader eq null) compilerSettings else interpreterSettings)
|
||||
}
|
||||
if (loader ne null) {
|
||||
in = InteractiveReader.createDefault()
|
||||
interpreter = new Interpreter(settings) {
|
||||
override protected def parentClassLoader = if (loader eq null) super.parentClassLoader else loader
|
||||
override protected def newCompiler(settings: Settings, reporter: Reporter) = super.newCompiler(compilerSettings, reporter)
|
||||
}
|
||||
interpreter.setContextClassLoader()
|
||||
} else
|
||||
super.createInterpreter()
|
||||
|
||||
def bind(values: Seq[(String, Any)]) {
|
||||
// for 2.8 compatibility
|
||||
final class Compat {
|
||||
def bindValue(id: String, value: Any) =
|
||||
interpreter.bind(id, value.asInstanceOf[AnyRef].getClass.getName, value)
|
||||
}
|
||||
implicit def compat(a: AnyRef): Compat = new Compat
|
||||
|
||||
for ((id, value) <- values)
|
||||
interpreter.beQuietDuring(interpreter.bindValue(id, value))
|
||||
}
|
||||
|
||||
bind(bindNames zip bindValues)
|
||||
|
||||
if (!initialCommands.isEmpty)
|
||||
interpreter.interpret(initialCommands)
|
||||
}
|
||||
override def closeInterpreter() {
|
||||
if (!cleanupCommands.isEmpty)
|
||||
interpreter.interpret(cleanupCommands)
|
||||
super.closeInterpreter()
|
||||
}
|
||||
}
|
||||
loop.main(if (loader eq null) compilerSettings else interpreterSettings)
|
||||
}
|
||||
}
|
||||
object MakeSettings
|
||||
{
|
||||
def apply(args: List[String], log: Logger) =
|
||||
{
|
||||
val command = new GenericRunnerCommand(args, message => log.error(Message(message)))
|
||||
if(command.ok)
|
||||
command.settings
|
||||
else
|
||||
throw new InterfaceCompileFailed(Array(), Array(), command.usageMsg)
|
||||
}
|
||||
object MakeSettings {
|
||||
def apply(args: List[String], log: Logger) =
|
||||
{
|
||||
val command = new GenericRunnerCommand(args, message => log.error(Message(message)))
|
||||
if (command.ok)
|
||||
command.settings
|
||||
else
|
||||
throw new InterfaceCompileFailed(Array(), Array(), command.usageMsg)
|
||||
}
|
||||
|
||||
def sync(args: Array[String], bootClasspathString: String, classpathString: String, log: Logger): Settings =
|
||||
{
|
||||
val compilerSettings = sync(args.toList, log)
|
||||
if(!bootClasspathString.isEmpty)
|
||||
compilerSettings.bootclasspath.value = bootClasspathString
|
||||
compilerSettings.classpath.value = classpathString
|
||||
compilerSettings
|
||||
}
|
||||
def sync(args: Array[String], bootClasspathString: String, classpathString: String, log: Logger): Settings =
|
||||
{
|
||||
val compilerSettings = sync(args.toList, log)
|
||||
if (!bootClasspathString.isEmpty)
|
||||
compilerSettings.bootclasspath.value = bootClasspathString
|
||||
compilerSettings.classpath.value = classpathString
|
||||
compilerSettings
|
||||
}
|
||||
|
||||
def sync(options: List[String], log: Logger) =
|
||||
{
|
||||
val settings = apply(options, log)
|
||||
def sync(options: List[String], log: Logger) =
|
||||
{
|
||||
val settings = apply(options, log)
|
||||
|
||||
// -Yrepl-sync is only in 2.9.1+
|
||||
final class Compat {
|
||||
def Yreplsync = settings.BooleanSetting("-Yrepl-sync", "For compatibility only.")
|
||||
}
|
||||
implicit def compat(s: Settings): Compat = new Compat
|
||||
// -Yrepl-sync is only in 2.9.1+
|
||||
final class Compat {
|
||||
def Yreplsync = settings.BooleanSetting("-Yrepl-sync", "For compatibility only.")
|
||||
}
|
||||
implicit def compat(s: Settings): Compat = new Compat
|
||||
|
||||
settings.Yreplsync.value = true
|
||||
settings
|
||||
}
|
||||
settings.Yreplsync.value = true
|
||||
settings
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,102 +3,95 @@
|
|||
*/
|
||||
package xsbt
|
||||
|
||||
import xsbti.{F0,Logger,Maybe}
|
||||
import java.io.File
|
||||
import xsbti.{ F0, Logger, Maybe }
|
||||
import java.io.File
|
||||
|
||||
private object DelegatingReporter
|
||||
{
|
||||
def apply(settings: scala.tools.nsc.Settings, delegate: xsbti.Reporter): DelegatingReporter =
|
||||
new DelegatingReporter(Command.getWarnFatal(settings), Command.getNoWarn(settings), delegate)
|
||||
private object DelegatingReporter {
|
||||
def apply(settings: scala.tools.nsc.Settings, delegate: xsbti.Reporter): DelegatingReporter =
|
||||
new DelegatingReporter(Command.getWarnFatal(settings), Command.getNoWarn(settings), delegate)
|
||||
}
|
||||
|
||||
// The following code is based on scala.tools.nsc.reporters.{AbstractReporter, ConsoleReporter}
|
||||
// Copyright 2002-2009 LAMP/EPFL
|
||||
// Original author: Martin Odersky
|
||||
private final class DelegatingReporter(warnFatal: Boolean, noWarn: Boolean, private[this] var delegate: xsbti.Reporter) extends scala.tools.nsc.reporters.Reporter
|
||||
{
|
||||
import scala.tools.nsc.util.{FakePos,NoPosition,Position}
|
||||
private final class DelegatingReporter(warnFatal: Boolean, noWarn: Boolean, private[this] var delegate: xsbti.Reporter) extends scala.tools.nsc.reporters.Reporter {
|
||||
import scala.tools.nsc.util.{ FakePos, NoPosition, Position }
|
||||
|
||||
def dropDelegate() { delegate = null }
|
||||
def error(msg: String) { error(FakePos("scalac"), msg) }
|
||||
def dropDelegate() { delegate = null }
|
||||
def error(msg: String) { error(FakePos("scalac"), msg) }
|
||||
|
||||
def printSummary() = delegate.printSummary()
|
||||
def printSummary() = delegate.printSummary()
|
||||
|
||||
override def hasErrors = delegate.hasErrors
|
||||
override def hasWarnings = delegate.hasWarnings
|
||||
def problems = delegate.problems
|
||||
override def comment(pos: Position, msg: String) = delegate.comment(convert(pos), msg)
|
||||
override def hasErrors = delegate.hasErrors
|
||||
override def hasWarnings = delegate.hasWarnings
|
||||
def problems = delegate.problems
|
||||
override def comment(pos: Position, msg: String) = delegate.comment(convert(pos), msg)
|
||||
|
||||
override def reset =
|
||||
{
|
||||
super.reset
|
||||
delegate.reset
|
||||
}
|
||||
protected def info0(pos: Position, msg: String, rawSeverity: Severity, force: Boolean)
|
||||
{
|
||||
val skip = rawSeverity == WARNING && noWarn
|
||||
if (!skip) {
|
||||
val severity = if(warnFatal && rawSeverity == WARNING) ERROR else rawSeverity
|
||||
delegate.log(convert(pos), msg, convert(severity))
|
||||
}
|
||||
}
|
||||
def convert(posIn: Position): xsbti.Position =
|
||||
{
|
||||
val pos =
|
||||
posIn match
|
||||
{
|
||||
case null | NoPosition => NoPosition
|
||||
case x: FakePos => x
|
||||
case x =>
|
||||
posIn.inUltimateSource(posIn.source)
|
||||
}
|
||||
pos match
|
||||
{
|
||||
case NoPosition | FakePos(_) => position(None, None, None, "", None, None, None)
|
||||
case _ => makePosition(pos)
|
||||
}
|
||||
}
|
||||
private[this] def makePosition(pos: Position): xsbti.Position =
|
||||
{
|
||||
val src = pos.source
|
||||
val sourcePath = src.file.path
|
||||
val sourceFile = src.file.file
|
||||
val line = pos.line
|
||||
val lineContent = pos.lineContent.stripLineEnd
|
||||
val offset = getOffset(pos)
|
||||
val pointer = offset - src.lineToOffset(src.offsetToLine(offset))
|
||||
val pointerSpace = ((lineContent: Seq[Char]).take(pointer).map { case '\t' => '\t'; case x => ' ' }).mkString
|
||||
position(Some(sourcePath), Some(sourceFile), Some(line), lineContent, Some(offset), Some(pointer), Some(pointerSpace))
|
||||
}
|
||||
private[this] def getOffset(pos: Position): Int =
|
||||
{
|
||||
// for compatibility with 2.8
|
||||
implicit def withPoint(p: Position): WithPoint = new WithPoint(pos)
|
||||
final class WithPoint(val p: Position) { def point = p.offset.get }
|
||||
pos.point
|
||||
}
|
||||
private[this] def position(sourcePath0: Option[String], sourceFile0: Option[File], line0: Option[Int], lineContent0: String, offset0: Option[Int], pointer0: Option[Int], pointerSpace0: Option[String]) =
|
||||
new xsbti.Position
|
||||
{
|
||||
val line = o2mi(line0)
|
||||
val lineContent = lineContent0
|
||||
val offset = o2mi(offset0)
|
||||
val sourcePath = o2m(sourcePath0)
|
||||
val sourceFile = o2m(sourceFile0)
|
||||
val pointer = o2mi(pointer0)
|
||||
val pointerSpace = o2m(pointerSpace0)
|
||||
}
|
||||
override def reset =
|
||||
{
|
||||
super.reset
|
||||
delegate.reset
|
||||
}
|
||||
protected def info0(pos: Position, msg: String, rawSeverity: Severity, force: Boolean) {
|
||||
val skip = rawSeverity == WARNING && noWarn
|
||||
if (!skip) {
|
||||
val severity = if (warnFatal && rawSeverity == WARNING) ERROR else rawSeverity
|
||||
delegate.log(convert(pos), msg, convert(severity))
|
||||
}
|
||||
}
|
||||
def convert(posIn: Position): xsbti.Position =
|
||||
{
|
||||
val pos =
|
||||
posIn match {
|
||||
case null | NoPosition => NoPosition
|
||||
case x: FakePos => x
|
||||
case x =>
|
||||
posIn.inUltimateSource(posIn.source)
|
||||
}
|
||||
pos match {
|
||||
case NoPosition | FakePos(_) => position(None, None, None, "", None, None, None)
|
||||
case _ => makePosition(pos)
|
||||
}
|
||||
}
|
||||
private[this] def makePosition(pos: Position): xsbti.Position =
|
||||
{
|
||||
val src = pos.source
|
||||
val sourcePath = src.file.path
|
||||
val sourceFile = src.file.file
|
||||
val line = pos.line
|
||||
val lineContent = pos.lineContent.stripLineEnd
|
||||
val offset = getOffset(pos)
|
||||
val pointer = offset - src.lineToOffset(src.offsetToLine(offset))
|
||||
val pointerSpace = ((lineContent: Seq[Char]).take(pointer).map { case '\t' => '\t'; case x => ' ' }).mkString
|
||||
position(Some(sourcePath), Some(sourceFile), Some(line), lineContent, Some(offset), Some(pointer), Some(pointerSpace))
|
||||
}
|
||||
private[this] def getOffset(pos: Position): Int =
|
||||
{
|
||||
// for compatibility with 2.8
|
||||
implicit def withPoint(p: Position): WithPoint = new WithPoint(pos)
|
||||
final class WithPoint(val p: Position) { def point = p.offset.get }
|
||||
pos.point
|
||||
}
|
||||
private[this] def position(sourcePath0: Option[String], sourceFile0: Option[File], line0: Option[Int], lineContent0: String, offset0: Option[Int], pointer0: Option[Int], pointerSpace0: Option[String]) =
|
||||
new xsbti.Position {
|
||||
val line = o2mi(line0)
|
||||
val lineContent = lineContent0
|
||||
val offset = o2mi(offset0)
|
||||
val sourcePath = o2m(sourcePath0)
|
||||
val sourceFile = o2m(sourceFile0)
|
||||
val pointer = o2mi(pointer0)
|
||||
val pointerSpace = o2m(pointerSpace0)
|
||||
}
|
||||
|
||||
import xsbti.Severity.{Info, Warn, Error}
|
||||
private[this] def convert(sev: Severity): xsbti.Severity =
|
||||
sev match
|
||||
{
|
||||
case INFO => Info
|
||||
case WARNING => Warn
|
||||
case ERROR => Error
|
||||
}
|
||||
import xsbti.Severity.{ Info, Warn, Error }
|
||||
private[this] def convert(sev: Severity): xsbti.Severity =
|
||||
sev match {
|
||||
case INFO => Info
|
||||
case WARNING => Warn
|
||||
case ERROR => Error
|
||||
}
|
||||
|
||||
import java.lang.{Integer => I}
|
||||
private[this] def o2mi(opt: Option[Int]): Maybe[I] = opt match { case None => Maybe.nothing[I]; case Some(s) => Maybe.just[I](s) }
|
||||
private[this] def o2m[S](opt: Option[S]): Maybe[S] = opt match { case None => Maybe.nothing[S]; case Some(s) => Maybe.just(s) }
|
||||
import java.lang.{ Integer => I }
|
||||
private[this] def o2mi(opt: Option[Int]): Maybe[I] = opt match { case None => Maybe.nothing[I]; case Some(s) => Maybe.just[I](s) }
|
||||
private[this] def o2m[S](opt: Option[S]): Maybe[S] = opt match { case None => Maybe.nothing[S]; case Some(s) => Maybe.just(s) }
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,15 +3,14 @@
|
|||
*/
|
||||
package xsbt
|
||||
|
||||
import scala.tools.nsc.{io, symtab, Phase}
|
||||
import io.{AbstractFile, PlainFile, ZipArchive}
|
||||
import scala.tools.nsc.{ io, symtab, Phase }
|
||||
import io.{ AbstractFile, PlainFile, ZipArchive }
|
||||
import symtab.Flags
|
||||
|
||||
import java.io.File
|
||||
|
||||
object Dependency
|
||||
{
|
||||
def name = "xsbt-dependency"
|
||||
object Dependency {
|
||||
def name = "xsbt-dependency"
|
||||
}
|
||||
/**
|
||||
* Extracts dependency information from each compilation unit.
|
||||
|
|
@ -28,106 +27,97 @@ object Dependency
|
|||
* where it originates from. The Symbol->Classfile mapping is implemented by
|
||||
* LocateClassFile that we inherit from.
|
||||
*/
|
||||
final class Dependency(val global: CallbackGlobal) extends LocateClassFile
|
||||
{
|
||||
import global._
|
||||
final class Dependency(val global: CallbackGlobal) extends LocateClassFile {
|
||||
import global._
|
||||
|
||||
def newPhase(prev: Phase): Phase = new DependencyPhase(prev)
|
||||
private class DependencyPhase(prev: Phase) extends Phase(prev)
|
||||
{
|
||||
override def description = "Extracts dependency information"
|
||||
def name = Dependency.name
|
||||
def run
|
||||
{
|
||||
for(unit <- currentRun.units if !unit.isJava)
|
||||
{
|
||||
// build dependencies structure
|
||||
val sourceFile = unit.source.file.file
|
||||
if (global.callback.nameHashing) {
|
||||
val dependenciesByMemberRef = extractDependenciesByMemberRef(unit)
|
||||
for(on <- dependenciesByMemberRef)
|
||||
processDependency(on, inherited=false)
|
||||
def newPhase(prev: Phase): Phase = new DependencyPhase(prev)
|
||||
private class DependencyPhase(prev: Phase) extends Phase(prev) {
|
||||
override def description = "Extracts dependency information"
|
||||
def name = Dependency.name
|
||||
def run {
|
||||
for (unit <- currentRun.units if !unit.isJava) {
|
||||
// build dependencies structure
|
||||
val sourceFile = unit.source.file.file
|
||||
if (global.callback.nameHashing) {
|
||||
val dependenciesByMemberRef = extractDependenciesByMemberRef(unit)
|
||||
for (on <- dependenciesByMemberRef)
|
||||
processDependency(on, inherited = false)
|
||||
|
||||
val dependenciesByInheritance = extractDependenciesByInheritance(unit)
|
||||
for(on <- dependenciesByInheritance)
|
||||
processDependency(on, inherited=true)
|
||||
} else {
|
||||
for(on <- unit.depends) processDependency(on, inherited=false)
|
||||
for(on <- inheritedDependencies.getOrElse(sourceFile, Nil: Iterable[Symbol])) processDependency(on, inherited=true)
|
||||
}
|
||||
/**
|
||||
* Handles dependency on given symbol by trying to figure out if represents a term
|
||||
* that is coming from either source code (not necessarily compiled in this compilation
|
||||
* run) or from class file and calls respective callback method.
|
||||
*/
|
||||
def processDependency(on: Symbol, inherited: Boolean)
|
||||
{
|
||||
def binaryDependency(file: File, className: String) = callback.binaryDependency(file, className, sourceFile, inherited)
|
||||
val onSource = on.sourceFile
|
||||
if(onSource == null)
|
||||
{
|
||||
classFile(on) match
|
||||
{
|
||||
case Some((f,className,inOutDir)) =>
|
||||
if(inOutDir && on.isJavaDefined) registerTopLevelSym(on)
|
||||
f match
|
||||
{
|
||||
case ze: ZipArchive#Entry => for(zip <- ze.underlyingSource; zipFile <- Option(zip.file) ) binaryDependency(zipFile, className)
|
||||
case pf: PlainFile => binaryDependency(pf.file, className)
|
||||
case _ => ()
|
||||
}
|
||||
case None => ()
|
||||
}
|
||||
}
|
||||
else if (onSource.file != sourceFile)
|
||||
callback.sourceDependency(onSource.file, sourceFile, inherited)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
val dependenciesByInheritance = extractDependenciesByInheritance(unit)
|
||||
for (on <- dependenciesByInheritance)
|
||||
processDependency(on, inherited = true)
|
||||
} else {
|
||||
for (on <- unit.depends) processDependency(on, inherited = false)
|
||||
for (on <- inheritedDependencies.getOrElse(sourceFile, Nil: Iterable[Symbol])) processDependency(on, inherited = true)
|
||||
}
|
||||
/**
|
||||
* Handles dependency on given symbol by trying to figure out if represents a term
|
||||
* that is coming from either source code (not necessarily compiled in this compilation
|
||||
* run) or from class file and calls respective callback method.
|
||||
*/
|
||||
def processDependency(on: Symbol, inherited: Boolean) {
|
||||
def binaryDependency(file: File, className: String) = callback.binaryDependency(file, className, sourceFile, inherited)
|
||||
val onSource = on.sourceFile
|
||||
if (onSource == null) {
|
||||
classFile(on) match {
|
||||
case Some((f, className, inOutDir)) =>
|
||||
if (inOutDir && on.isJavaDefined) registerTopLevelSym(on)
|
||||
f match {
|
||||
case ze: ZipArchive#Entry => for (zip <- ze.underlyingSource; zipFile <- Option(zip.file)) binaryDependency(zipFile, className)
|
||||
case pf: PlainFile => binaryDependency(pf.file, className)
|
||||
case _ => ()
|
||||
}
|
||||
case None => ()
|
||||
}
|
||||
} else if (onSource.file != sourceFile)
|
||||
callback.sourceDependency(onSource.file, sourceFile, inherited)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Traverses given type and collects result of applying a partial function `pf`.
|
||||
*
|
||||
* NOTE: This class exists in Scala 2.10 as CollectTypeCollector but does not in earlier
|
||||
* versions (like 2.9) of Scala compiler that incremental cmpiler supports so we had to
|
||||
* reimplement that class here.
|
||||
*/
|
||||
private final class CollectTypeTraverser[T](pf: PartialFunction[Type, T]) extends TypeTraverser {
|
||||
var collected: List[T] = Nil
|
||||
def traverse(tpe: Type): Unit = {
|
||||
if (pf.isDefinedAt(tpe))
|
||||
collected = pf(tpe) :: collected
|
||||
mapOver(tpe)
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Traverses given type and collects result of applying a partial function `pf`.
|
||||
*
|
||||
* NOTE: This class exists in Scala 2.10 as CollectTypeCollector but does not in earlier
|
||||
* versions (like 2.9) of Scala compiler that incremental cmpiler supports so we had to
|
||||
* reimplement that class here.
|
||||
*/
|
||||
private final class CollectTypeTraverser[T](pf: PartialFunction[Type, T]) extends TypeTraverser {
|
||||
var collected: List[T] = Nil
|
||||
def traverse(tpe: Type): Unit = {
|
||||
if (pf.isDefinedAt(tpe))
|
||||
collected = pf(tpe) :: collected
|
||||
mapOver(tpe)
|
||||
}
|
||||
}
|
||||
|
||||
private abstract class ExtractDependenciesTraverser extends Traverser {
|
||||
protected val depBuf = collection.mutable.ArrayBuffer.empty[Symbol]
|
||||
protected def addDependency(dep: Symbol): Unit = depBuf += dep
|
||||
def dependencies: collection.immutable.Set[Symbol] = {
|
||||
// convert to immutable set and remove NoSymbol if we have one
|
||||
depBuf.toSet - NoSymbol
|
||||
}
|
||||
}
|
||||
private abstract class ExtractDependenciesTraverser extends Traverser {
|
||||
protected val depBuf = collection.mutable.ArrayBuffer.empty[Symbol]
|
||||
protected def addDependency(dep: Symbol): Unit = depBuf += dep
|
||||
def dependencies: collection.immutable.Set[Symbol] = {
|
||||
// convert to immutable set and remove NoSymbol if we have one
|
||||
depBuf.toSet - NoSymbol
|
||||
}
|
||||
}
|
||||
|
||||
private class ExtractDependenciesByMemberRefTraverser extends ExtractDependenciesTraverser {
|
||||
override def traverse(tree: Tree): Unit = {
|
||||
tree match {
|
||||
case Import(expr, selectors) =>
|
||||
selectors.foreach {
|
||||
case ImportSelector(nme.WILDCARD, _, null, _) =>
|
||||
// in case of wildcard import we do not rely on any particular name being defined
|
||||
// on `expr`; all symbols that are being used will get caught through selections
|
||||
case ImportSelector(name: Name, _, _, _) =>
|
||||
def lookupImported(name: Name) = expr.symbol.info.member(name)
|
||||
// importing a name means importing both a term and a type (if they exist)
|
||||
addDependency(lookupImported(name.toTermName))
|
||||
addDependency(lookupImported(name.toTypeName))
|
||||
}
|
||||
case select: Select =>
|
||||
addDependency(select.symbol)
|
||||
/*
|
||||
private class ExtractDependenciesByMemberRefTraverser extends ExtractDependenciesTraverser {
|
||||
override def traverse(tree: Tree): Unit = {
|
||||
tree match {
|
||||
case Import(expr, selectors) =>
|
||||
selectors.foreach {
|
||||
case ImportSelector(nme.WILDCARD, _, null, _) =>
|
||||
// in case of wildcard import we do not rely on any particular name being defined
|
||||
// on `expr`; all symbols that are being used will get caught through selections
|
||||
case ImportSelector(name: Name, _, _, _) =>
|
||||
def lookupImported(name: Name) = expr.symbol.info.member(name)
|
||||
// importing a name means importing both a term and a type (if they exist)
|
||||
addDependency(lookupImported(name.toTermName))
|
||||
addDependency(lookupImported(name.toTypeName))
|
||||
}
|
||||
case select: Select =>
|
||||
addDependency(select.symbol)
|
||||
/*
|
||||
* Idents are used in number of situations:
|
||||
* - to refer to local variable
|
||||
* - to refer to a top-level package (other packages are nested selections)
|
||||
|
|
@ -135,70 +125,70 @@ final class Dependency(val global: CallbackGlobal) extends LocateClassFile
|
|||
* this looks fishy, see this thread:
|
||||
* https://groups.google.com/d/topic/scala-internals/Ms9WUAtokLo/discussion
|
||||
*/
|
||||
case ident: Ident =>
|
||||
addDependency(ident.symbol)
|
||||
case typeTree: TypeTree =>
|
||||
val typeSymbolCollector = new CollectTypeTraverser({
|
||||
case tpe if !tpe.typeSymbol.isPackage => tpe.typeSymbol
|
||||
})
|
||||
typeSymbolCollector.traverse(typeTree.tpe)
|
||||
val deps = typeSymbolCollector.collected.toSet
|
||||
deps.foreach(addDependency)
|
||||
case Template(parents, self, body) =>
|
||||
traverseTrees(body)
|
||||
/*
|
||||
case ident: Ident =>
|
||||
addDependency(ident.symbol)
|
||||
case typeTree: TypeTree =>
|
||||
val typeSymbolCollector = new CollectTypeTraverser({
|
||||
case tpe if !tpe.typeSymbol.isPackage => tpe.typeSymbol
|
||||
})
|
||||
typeSymbolCollector.traverse(typeTree.tpe)
|
||||
val deps = typeSymbolCollector.collected.toSet
|
||||
deps.foreach(addDependency)
|
||||
case Template(parents, self, body) =>
|
||||
traverseTrees(body)
|
||||
/*
|
||||
* Some macros appear to contain themselves as original tree
|
||||
* In this case, we don't need to inspect the original tree because
|
||||
* we already inspected its expansion, which is equal.
|
||||
* See https://issues.scala-lang.org/browse/SI-8486
|
||||
*/
|
||||
case MacroExpansionOf(original) if original != tree =>
|
||||
this.traverse(original)
|
||||
case other => ()
|
||||
}
|
||||
super.traverse(tree)
|
||||
}
|
||||
}
|
||||
case MacroExpansionOf(original) if original != tree =>
|
||||
this.traverse(original)
|
||||
case other => ()
|
||||
}
|
||||
super.traverse(tree)
|
||||
}
|
||||
}
|
||||
|
||||
private def extractDependenciesByMemberRef(unit: CompilationUnit): collection.immutable.Set[Symbol] = {
|
||||
val traverser = new ExtractDependenciesByMemberRefTraverser
|
||||
traverser.traverse(unit.body)
|
||||
val dependencies = traverser.dependencies
|
||||
dependencies.map(enclosingTopLevelClass)
|
||||
}
|
||||
private def extractDependenciesByMemberRef(unit: CompilationUnit): collection.immutable.Set[Symbol] = {
|
||||
val traverser = new ExtractDependenciesByMemberRefTraverser
|
||||
traverser.traverse(unit.body)
|
||||
val dependencies = traverser.dependencies
|
||||
dependencies.map(enclosingTopLevelClass)
|
||||
}
|
||||
|
||||
/** Copied straight from Scala 2.10 as it does not exist in Scala 2.9 compiler */
|
||||
private final def debuglog(msg: => String) {
|
||||
if (settings.debug.value)
|
||||
log(msg)
|
||||
}
|
||||
/** Copied straight from Scala 2.10 as it does not exist in Scala 2.9 compiler */
|
||||
private final def debuglog(msg: => String) {
|
||||
if (settings.debug.value)
|
||||
log(msg)
|
||||
}
|
||||
|
||||
private final class ExtractDependenciesByInheritanceTraverser extends ExtractDependenciesTraverser {
|
||||
override def traverse(tree: Tree): Unit = tree match {
|
||||
case Template(parents, self, body) =>
|
||||
// we are using typeSymbol and not typeSymbolDirect because we want
|
||||
// type aliases to be expanded
|
||||
val parentTypeSymbols = parents.map(parent => parent.tpe.typeSymbol).toSet
|
||||
debuglog("Parent type symbols for " + tree.pos + ": " + parentTypeSymbols.map(_.fullName))
|
||||
parentTypeSymbols.foreach(addDependency)
|
||||
traverseTrees(body)
|
||||
case tree => super.traverse(tree)
|
||||
}
|
||||
}
|
||||
private final class ExtractDependenciesByInheritanceTraverser extends ExtractDependenciesTraverser {
|
||||
override def traverse(tree: Tree): Unit = tree match {
|
||||
case Template(parents, self, body) =>
|
||||
// we are using typeSymbol and not typeSymbolDirect because we want
|
||||
// type aliases to be expanded
|
||||
val parentTypeSymbols = parents.map(parent => parent.tpe.typeSymbol).toSet
|
||||
debuglog("Parent type symbols for " + tree.pos + ": " + parentTypeSymbols.map(_.fullName))
|
||||
parentTypeSymbols.foreach(addDependency)
|
||||
traverseTrees(body)
|
||||
case tree => super.traverse(tree)
|
||||
}
|
||||
}
|
||||
|
||||
private def extractDependenciesByInheritance(unit: CompilationUnit): collection.immutable.Set[Symbol] = {
|
||||
val traverser = new ExtractDependenciesByInheritanceTraverser
|
||||
traverser.traverse(unit.body)
|
||||
val dependencies = traverser.dependencies
|
||||
dependencies.map(enclosingTopLevelClass)
|
||||
}
|
||||
private def extractDependenciesByInheritance(unit: CompilationUnit): collection.immutable.Set[Symbol] = {
|
||||
val traverser = new ExtractDependenciesByInheritanceTraverser
|
||||
traverser.traverse(unit.body)
|
||||
val dependencies = traverser.dependencies
|
||||
dependencies.map(enclosingTopLevelClass)
|
||||
}
|
||||
|
||||
/**
|
||||
* We capture enclosing classes only because that's what CompilationUnit.depends does and we don't want
|
||||
* to deviate from old behaviour too much for now.
|
||||
*/
|
||||
private def enclosingTopLevelClass(sym: Symbol): Symbol =
|
||||
// for Scala 2.8 and 2.9 this method is provided through SymbolCompat
|
||||
sym.enclosingTopLevelClass
|
||||
/**
|
||||
* We capture enclosing classes only because that's what CompilationUnit.depends does and we don't want
|
||||
* to deviate from old behaviour too much for now.
|
||||
*/
|
||||
private def enclosingTopLevelClass(sym: Symbol): Symbol =
|
||||
// for Scala 2.8 and 2.9 this method is provided through SymbolCompat
|
||||
sym.enclosingTopLevelClass
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,13 +1,13 @@
|
|||
package xsbt
|
||||
|
||||
import java.io.File
|
||||
import java.util.{Arrays,Comparator}
|
||||
import scala.tools.nsc.{io, plugins, symtab, Global, Phase}
|
||||
import io.{AbstractFile, PlainFile, ZipArchive}
|
||||
import plugins.{Plugin, PluginComponent}
|
||||
import java.util.{ Arrays, Comparator }
|
||||
import scala.tools.nsc.{ io, plugins, symtab, Global, Phase }
|
||||
import io.{ AbstractFile, PlainFile, ZipArchive }
|
||||
import plugins.{ Plugin, PluginComponent }
|
||||
import symtab.Flags
|
||||
import scala.collection.mutable.{HashMap, HashSet, ListBuffer}
|
||||
import xsbti.api.{ClassLike, DefinitionType, PathComponent, SimpleType}
|
||||
import scala.collection.mutable.{ HashMap, HashSet, ListBuffer }
|
||||
import xsbti.api.{ ClassLike, DefinitionType, PathComponent, SimpleType }
|
||||
|
||||
/**
|
||||
* Extracts API representation out of Symbols and Types.
|
||||
|
|
@ -20,365 +20,356 @@ import xsbti.api.{ClassLike, DefinitionType, PathComponent, SimpleType}
|
|||
* exposed to a client that can pass them to an instance of CallbackGlobal it holds.
|
||||
*/
|
||||
class ExtractAPI[GlobalType <: CallbackGlobal](val global: GlobalType,
|
||||
// Tracks the source file associated with the CompilationUnit currently being processed by the API phase.
|
||||
// This is used when recording inheritance dependencies.
|
||||
sourceFile: File) extends Compat {
|
||||
// Tracks the source file associated with the CompilationUnit currently being processed by the API phase.
|
||||
// This is used when recording inheritance dependencies.
|
||||
sourceFile: File) extends Compat {
|
||||
|
||||
import global._
|
||||
import global._
|
||||
|
||||
private def error(msg: String) = throw new RuntimeException(msg)
|
||||
private def error(msg: String) = throw new RuntimeException(msg)
|
||||
|
||||
// this cache reduces duplicate work both here and when persisting
|
||||
// caches on other structures had minimal effect on time and cache size
|
||||
// (tried: Definition, Modifier, Path, Id, String)
|
||||
private[this] val typeCache = new HashMap[(Symbol,Type), xsbti.api.Type]
|
||||
// these caches are necessary for correctness
|
||||
private[this] val structureCache = new HashMap[Symbol, xsbti.api.Structure]
|
||||
private[this] val classLikeCache = new HashMap[(Symbol,Symbol), xsbti.api.ClassLike]
|
||||
private[this] val pending = new HashSet[xsbti.api.Lazy[_]]
|
||||
// this cache reduces duplicate work both here and when persisting
|
||||
// caches on other structures had minimal effect on time and cache size
|
||||
// (tried: Definition, Modifier, Path, Id, String)
|
||||
private[this] val typeCache = new HashMap[(Symbol, Type), xsbti.api.Type]
|
||||
// these caches are necessary for correctness
|
||||
private[this] val structureCache = new HashMap[Symbol, xsbti.api.Structure]
|
||||
private[this] val classLikeCache = new HashMap[(Symbol, Symbol), xsbti.api.ClassLike]
|
||||
private[this] val pending = new HashSet[xsbti.api.Lazy[_]]
|
||||
|
||||
private[this] val emptyStringArray = new Array[String](0)
|
||||
private[this] val emptyStringArray = new Array[String](0)
|
||||
|
||||
/**
|
||||
* Implements a work-around for https://github.com/sbt/sbt/issues/823
|
||||
*
|
||||
* The strategy is to rename all type variables bound by existential type to stable
|
||||
* names by assigning to each type variable a De Bruijn-like index. As a result, each
|
||||
* type variable gets name of this shape:
|
||||
*
|
||||
* "existential_${nestingLevel}_${i}"
|
||||
*
|
||||
* where `nestingLevel` indicates nesting level of existential types and `i` variable
|
||||
* indicates position of type variable in given existential type.
|
||||
*
|
||||
* For example, let's assume we have the following classes declared:
|
||||
*
|
||||
* class A[T]; class B[T,U]
|
||||
*
|
||||
* and we have type A[_] that is expanded by Scala compiler into
|
||||
*
|
||||
* A[_$1] forSome { type _$1 }
|
||||
*
|
||||
* After applying our renaming strategy we get
|
||||
*
|
||||
* A[existential_0_0] forSome { type existential_0_0 }
|
||||
*
|
||||
* Let's consider a bit more complicated example which shows how our strategy deals with
|
||||
* nested existential types:
|
||||
*
|
||||
* A[_ <: B[_, _]]
|
||||
*
|
||||
* which gets expanded into:
|
||||
*
|
||||
* A[_$1] forSome {
|
||||
* type _$1 <: B[_$2, _$3] forSome { type _$2; type _$3 }
|
||||
* }
|
||||
*
|
||||
* After applying our renaming strategy we get
|
||||
*
|
||||
* A[existential_0_0] forSome {
|
||||
* type existential_0_0 <: B[existential_1_0, existential_1_1] forSome {
|
||||
* type existential_1_0; type existential_1_1
|
||||
* }
|
||||
* }
|
||||
*
|
||||
* Note how the first index (nesting level) is bumped for both existential types.
|
||||
*
|
||||
* This way, all names of existential type variables depend only on the structure of
|
||||
* existential types and are kept stable.
|
||||
*
|
||||
* Both examples presented above used placeholder syntax for existential types but our
|
||||
* strategy is applied uniformly to all existential types no matter if they are written
|
||||
* using placeholder syntax or explicitly.
|
||||
*/
|
||||
private[this] object existentialRenamings {
|
||||
private var nestingLevel: Int = 0
|
||||
import scala.collection.mutable.Map
|
||||
private var renameTo: Map[Symbol, String] = Map.empty
|
||||
/**
|
||||
* Implements a work-around for https://github.com/sbt/sbt/issues/823
|
||||
*
|
||||
* The strategy is to rename all type variables bound by existential type to stable
|
||||
* names by assigning to each type variable a De Bruijn-like index. As a result, each
|
||||
* type variable gets name of this shape:
|
||||
*
|
||||
* "existential_${nestingLevel}_${i}"
|
||||
*
|
||||
* where `nestingLevel` indicates nesting level of existential types and `i` variable
|
||||
* indicates position of type variable in given existential type.
|
||||
*
|
||||
* For example, let's assume we have the following classes declared:
|
||||
*
|
||||
* class A[T]; class B[T,U]
|
||||
*
|
||||
* and we have type A[_] that is expanded by Scala compiler into
|
||||
*
|
||||
* A[_$1] forSome { type _$1 }
|
||||
*
|
||||
* After applying our renaming strategy we get
|
||||
*
|
||||
* A[existential_0_0] forSome { type existential_0_0 }
|
||||
*
|
||||
* Let's consider a bit more complicated example which shows how our strategy deals with
|
||||
* nested existential types:
|
||||
*
|
||||
* A[_ <: B[_, _]]
|
||||
*
|
||||
* which gets expanded into:
|
||||
*
|
||||
* A[_$1] forSome {
|
||||
* type _$1 <: B[_$2, _$3] forSome { type _$2; type _$3 }
|
||||
* }
|
||||
*
|
||||
* After applying our renaming strategy we get
|
||||
*
|
||||
* A[existential_0_0] forSome {
|
||||
* type existential_0_0 <: B[existential_1_0, existential_1_1] forSome {
|
||||
* type existential_1_0; type existential_1_1
|
||||
* }
|
||||
* }
|
||||
*
|
||||
* Note how the first index (nesting level) is bumped for both existential types.
|
||||
*
|
||||
* This way, all names of existential type variables depend only on the structure of
|
||||
* existential types and are kept stable.
|
||||
*
|
||||
* Both examples presented above used placeholder syntax for existential types but our
|
||||
* strategy is applied uniformly to all existential types no matter if they are written
|
||||
* using placeholder syntax or explicitly.
|
||||
*/
|
||||
private[this] object existentialRenamings {
|
||||
private var nestingLevel: Int = 0
|
||||
import scala.collection.mutable.Map
|
||||
private var renameTo: Map[Symbol, String] = Map.empty
|
||||
|
||||
def leaveExistentialTypeVariables(typeVariables: Seq[Symbol]): Unit = {
|
||||
nestingLevel -= 1
|
||||
assert(nestingLevel >= 0)
|
||||
typeVariables.foreach(renameTo.remove)
|
||||
}
|
||||
def enterExistentialTypeVariables(typeVariables: Seq[Symbol]): Unit = {
|
||||
nestingLevel += 1
|
||||
typeVariables.zipWithIndex foreach { case (tv, i) =>
|
||||
val newName = "existential_" + nestingLevel + "_" + i
|
||||
renameTo(tv) = newName
|
||||
}
|
||||
}
|
||||
def renaming(symbol: Symbol): Option[String] = renameTo.get(symbol)
|
||||
}
|
||||
def leaveExistentialTypeVariables(typeVariables: Seq[Symbol]): Unit = {
|
||||
nestingLevel -= 1
|
||||
assert(nestingLevel >= 0)
|
||||
typeVariables.foreach(renameTo.remove)
|
||||
}
|
||||
def enterExistentialTypeVariables(typeVariables: Seq[Symbol]): Unit = {
|
||||
nestingLevel += 1
|
||||
typeVariables.zipWithIndex foreach {
|
||||
case (tv, i) =>
|
||||
val newName = "existential_" + nestingLevel + "_" + i
|
||||
renameTo(tv) = newName
|
||||
}
|
||||
}
|
||||
def renaming(symbol: Symbol): Option[String] = renameTo.get(symbol)
|
||||
}
|
||||
|
||||
// call back to the xsbti.SafeLazy class in main sbt code to construct a SafeLazy instance
|
||||
// we pass a thunk, whose class is loaded by the interface class loader (this class's loader)
|
||||
// SafeLazy ensures that once the value is forced, the thunk is nulled out and so
|
||||
// references to the thunk's classes are not retained. Specifically, it allows the interface classes
|
||||
// (those in this subproject) to be garbage collected after compilation.
|
||||
private[this] val safeLazy = Class.forName("xsbti.SafeLazy").getMethod("apply", classOf[xsbti.F0[_]])
|
||||
private def lzy[S <: AnyRef](s: => S): xsbti.api.Lazy[S] =
|
||||
{
|
||||
val z = safeLazy.invoke(null, Message(s)).asInstanceOf[xsbti.api.Lazy[S]]
|
||||
pending += z
|
||||
z
|
||||
}
|
||||
// call back to the xsbti.SafeLazy class in main sbt code to construct a SafeLazy instance
|
||||
// we pass a thunk, whose class is loaded by the interface class loader (this class's loader)
|
||||
// SafeLazy ensures that once the value is forced, the thunk is nulled out and so
|
||||
// references to the thunk's classes are not retained. Specifically, it allows the interface classes
|
||||
// (those in this subproject) to be garbage collected after compilation.
|
||||
private[this] val safeLazy = Class.forName("xsbti.SafeLazy").getMethod("apply", classOf[xsbti.F0[_]])
|
||||
private def lzy[S <: AnyRef](s: => S): xsbti.api.Lazy[S] =
|
||||
{
|
||||
val z = safeLazy.invoke(null, Message(s)).asInstanceOf[xsbti.api.Lazy[S]]
|
||||
pending += z
|
||||
z
|
||||
}
|
||||
|
||||
/**
|
||||
* Force all lazy structures. This is necessary so that we see the symbols/types at this phase and
|
||||
* so that we don't hold on to compiler objects and classes
|
||||
*/
|
||||
def forceStructures(): Unit =
|
||||
if(pending.isEmpty)
|
||||
structureCache.clear()
|
||||
else
|
||||
{
|
||||
val toProcess = pending.toList
|
||||
pending.clear()
|
||||
toProcess foreach { _.get() }
|
||||
forceStructures()
|
||||
}
|
||||
/**
|
||||
* Force all lazy structures. This is necessary so that we see the symbols/types at this phase and
|
||||
* so that we don't hold on to compiler objects and classes
|
||||
*/
|
||||
def forceStructures(): Unit =
|
||||
if (pending.isEmpty)
|
||||
structureCache.clear()
|
||||
else {
|
||||
val toProcess = pending.toList
|
||||
pending.clear()
|
||||
toProcess foreach { _.get() }
|
||||
forceStructures()
|
||||
}
|
||||
|
||||
private def thisPath(sym: Symbol) = path(pathComponents(sym, Constants.thisPath :: Nil))
|
||||
private def path(components: List[PathComponent]) = new xsbti.api.Path(components.toArray[PathComponent])
|
||||
private def pathComponents(sym: Symbol, postfix: List[PathComponent]): List[PathComponent] =
|
||||
{
|
||||
if(sym == NoSymbol || sym.isRoot || sym.isEmptyPackageClass || sym.isRootPackage) postfix
|
||||
else pathComponents(sym.owner, new xsbti.api.Id(simpleName(sym)) :: postfix)
|
||||
}
|
||||
private def simpleType(in: Symbol, t: Type): SimpleType =
|
||||
processType(in, t) match
|
||||
{
|
||||
case s: SimpleType => s
|
||||
case x => warning("Not a simple type:\n\tType: " + t + " (" + t.getClass + ")\n\tTransformed: " + x.getClass); Constants.emptyType
|
||||
}
|
||||
private def types(in: Symbol, t: List[Type]): Array[xsbti.api.Type] = t.toArray[Type].map(processType(in, _))
|
||||
private def projectionType(in: Symbol, pre: Type, sym: Symbol) =
|
||||
{
|
||||
if(pre == NoPrefix)
|
||||
{
|
||||
if(sym.isLocalClass || sym.isRoot || sym.isRootPackage) Constants.emptyType
|
||||
else if(sym.isTypeParameterOrSkolem || sym.isExistentiallyBound) reference(sym)
|
||||
else {
|
||||
// this appears to come from an existential type in an inherited member- not sure why isExistential is false here
|
||||
/*println("Warning: Unknown prefixless type: " + sym + " in " + sym.owner + " in " + sym.enclClass)
|
||||
private def thisPath(sym: Symbol) = path(pathComponents(sym, Constants.thisPath :: Nil))
|
||||
private def path(components: List[PathComponent]) = new xsbti.api.Path(components.toArray[PathComponent])
|
||||
private def pathComponents(sym: Symbol, postfix: List[PathComponent]): List[PathComponent] =
|
||||
{
|
||||
if (sym == NoSymbol || sym.isRoot || sym.isEmptyPackageClass || sym.isRootPackage) postfix
|
||||
else pathComponents(sym.owner, new xsbti.api.Id(simpleName(sym)) :: postfix)
|
||||
}
|
||||
private def simpleType(in: Symbol, t: Type): SimpleType =
|
||||
processType(in, t) match {
|
||||
case s: SimpleType => s
|
||||
case x => warning("Not a simple type:\n\tType: " + t + " (" + t.getClass + ")\n\tTransformed: " + x.getClass); Constants.emptyType
|
||||
}
|
||||
private def types(in: Symbol, t: List[Type]): Array[xsbti.api.Type] = t.toArray[Type].map(processType(in, _))
|
||||
private def projectionType(in: Symbol, pre: Type, sym: Symbol) =
|
||||
{
|
||||
if (pre == NoPrefix) {
|
||||
if (sym.isLocalClass || sym.isRoot || sym.isRootPackage) Constants.emptyType
|
||||
else if (sym.isTypeParameterOrSkolem || sym.isExistentiallyBound) reference(sym)
|
||||
else {
|
||||
// this appears to come from an existential type in an inherited member- not sure why isExistential is false here
|
||||
/*println("Warning: Unknown prefixless type: " + sym + " in " + sym.owner + " in " + sym.enclClass)
|
||||
println("\tFlags: " + sym.flags + ", istype: " + sym.isType + ", absT: " + sym.isAbstractType + ", alias: " + sym.isAliasType + ", nonclass: " + isNonClassType(sym))*/
|
||||
reference(sym)
|
||||
}
|
||||
}
|
||||
else if(sym.isRoot || sym.isRootPackage) Constants.emptyType
|
||||
else new xsbti.api.Projection(simpleType(in, pre), simpleName(sym))
|
||||
}
|
||||
private def reference(sym: Symbol): xsbti.api.ParameterRef = new xsbti.api.ParameterRef(tparamID(sym))
|
||||
reference(sym)
|
||||
}
|
||||
} else if (sym.isRoot || sym.isRootPackage) Constants.emptyType
|
||||
else new xsbti.api.Projection(simpleType(in, pre), simpleName(sym))
|
||||
}
|
||||
private def reference(sym: Symbol): xsbti.api.ParameterRef = new xsbti.api.ParameterRef(tparamID(sym))
|
||||
|
||||
private def annotations(in: Symbol, as: List[AnnotationInfo]): Array[xsbti.api.Annotation] = as.toArray[AnnotationInfo].map(annotation(in,_))
|
||||
private def annotation(in: Symbol, a: AnnotationInfo) =
|
||||
new xsbti.api.Annotation(processType(in, a.atp),
|
||||
if(a.assocs.isEmpty) Array(new xsbti.api.AnnotationArgument("", a.args.mkString("(", ",", ")"))) // what else to do with a Tree?
|
||||
else a.assocs.map { case (name, value) => new xsbti.api.AnnotationArgument(name.toString, value.toString) }.toArray[xsbti.api.AnnotationArgument]
|
||||
)
|
||||
private def annotated(in: Symbol, as: List[AnnotationInfo], tpe: Type) = new xsbti.api.Annotated(processType(in, tpe), annotations(in, as))
|
||||
private def annotations(in: Symbol, as: List[AnnotationInfo]): Array[xsbti.api.Annotation] = as.toArray[AnnotationInfo].map(annotation(in, _))
|
||||
private def annotation(in: Symbol, a: AnnotationInfo) =
|
||||
new xsbti.api.Annotation(processType(in, a.atp),
|
||||
if (a.assocs.isEmpty) Array(new xsbti.api.AnnotationArgument("", a.args.mkString("(", ",", ")"))) // what else to do with a Tree?
|
||||
else a.assocs.map { case (name, value) => new xsbti.api.AnnotationArgument(name.toString, value.toString) }.toArray[xsbti.api.AnnotationArgument]
|
||||
)
|
||||
private def annotated(in: Symbol, as: List[AnnotationInfo], tpe: Type) = new xsbti.api.Annotated(processType(in, tpe), annotations(in, as))
|
||||
|
||||
private def viewer(s: Symbol) = (if(s.isModule) s.moduleClass else s).thisType
|
||||
private def printMember(label: String, in: Symbol, t: Type) = println(label + " in " + in + " : " + t + " (debug: " + debugString(t) + " )")
|
||||
private def defDef(in: Symbol, s: Symbol) =
|
||||
{
|
||||
def build(t: Type, typeParams: Array[xsbti.api.TypeParameter], valueParameters: List[xsbti.api.ParameterList]): xsbti.api.Def =
|
||||
{
|
||||
def parameterList(syms: List[Symbol]): xsbti.api.ParameterList =
|
||||
{
|
||||
val isImplicitList = syms match { case head :: _ => isImplicit(head); case _ => false }
|
||||
new xsbti.api.ParameterList(syms.map(parameterS).toArray, isImplicitList)
|
||||
}
|
||||
t match
|
||||
{
|
||||
case PolyType(typeParams0, base) =>
|
||||
assert(typeParams.isEmpty)
|
||||
assert(valueParameters.isEmpty)
|
||||
build(base, typeParameters(in, typeParams0), Nil)
|
||||
case MethodType(params, resultType) =>
|
||||
build(resultType, typeParams, parameterList(params) :: valueParameters)
|
||||
case Nullary(resultType) => // 2.9 and later
|
||||
build(resultType, typeParams, valueParameters)
|
||||
case returnType =>
|
||||
val t2 = processType(in, dropConst(returnType))
|
||||
new xsbti.api.Def(valueParameters.reverse.toArray, t2, typeParams, simpleName(s), getAccess(s), getModifiers(s), annotations(in,s))
|
||||
}
|
||||
}
|
||||
def parameterS(s: Symbol): xsbti.api.MethodParameter =
|
||||
makeParameter(simpleName(s), s.info, s.info.typeSymbol, s)
|
||||
private def viewer(s: Symbol) = (if (s.isModule) s.moduleClass else s).thisType
|
||||
private def printMember(label: String, in: Symbol, t: Type) = println(label + " in " + in + " : " + t + " (debug: " + debugString(t) + " )")
|
||||
private def defDef(in: Symbol, s: Symbol) =
|
||||
{
|
||||
def build(t: Type, typeParams: Array[xsbti.api.TypeParameter], valueParameters: List[xsbti.api.ParameterList]): xsbti.api.Def =
|
||||
{
|
||||
def parameterList(syms: List[Symbol]): xsbti.api.ParameterList =
|
||||
{
|
||||
val isImplicitList = syms match { case head :: _ => isImplicit(head); case _ => false }
|
||||
new xsbti.api.ParameterList(syms.map(parameterS).toArray, isImplicitList)
|
||||
}
|
||||
t match {
|
||||
case PolyType(typeParams0, base) =>
|
||||
assert(typeParams.isEmpty)
|
||||
assert(valueParameters.isEmpty)
|
||||
build(base, typeParameters(in, typeParams0), Nil)
|
||||
case MethodType(params, resultType) =>
|
||||
build(resultType, typeParams, parameterList(params) :: valueParameters)
|
||||
case Nullary(resultType) => // 2.9 and later
|
||||
build(resultType, typeParams, valueParameters)
|
||||
case returnType =>
|
||||
val t2 = processType(in, dropConst(returnType))
|
||||
new xsbti.api.Def(valueParameters.reverse.toArray, t2, typeParams, simpleName(s), getAccess(s), getModifiers(s), annotations(in, s))
|
||||
}
|
||||
}
|
||||
def parameterS(s: Symbol): xsbti.api.MethodParameter =
|
||||
makeParameter(simpleName(s), s.info, s.info.typeSymbol, s)
|
||||
|
||||
// paramSym is only for 2.8 and is to determine if the parameter has a default
|
||||
def makeParameter(name: String, tpe: Type, ts: Symbol, paramSym: Symbol): xsbti.api.MethodParameter =
|
||||
{
|
||||
import xsbti.api.ParameterModifier._
|
||||
val (t, special) =
|
||||
if(ts == definitions.RepeatedParamClass)// || s == definitions.JavaRepeatedParamClass)
|
||||
(tpe.typeArgs(0), Repeated)
|
||||
else if(ts == definitions.ByNameParamClass)
|
||||
(tpe.typeArgs(0), ByName)
|
||||
else
|
||||
(tpe, Plain)
|
||||
new xsbti.api.MethodParameter(name, processType(in, t), hasDefault(paramSym), special)
|
||||
}
|
||||
val t = viewer(in).memberInfo(s)
|
||||
build(t, Array(), Nil)
|
||||
}
|
||||
private def hasDefault(s: Symbol) = s != NoSymbol && s.hasFlag(Flags.DEFAULTPARAM)
|
||||
private def fieldDef[T](in: Symbol, s: Symbol, keepConst: Boolean, create: (xsbti.api.Type, String, xsbti.api.Access, xsbti.api.Modifiers, Array[xsbti.api.Annotation]) => T): T =
|
||||
{
|
||||
val t = dropNullary(viewer(in).memberType(s))
|
||||
val t2 = if(keepConst) t else dropConst(t)
|
||||
create(processType(in, t2), simpleName(s), getAccess(s), getModifiers(s), annotations(in, s))
|
||||
}
|
||||
private def dropConst(t: Type): Type = t match {
|
||||
case ConstantType(constant) => constant.tpe
|
||||
case _ => t
|
||||
}
|
||||
private def dropNullary(t: Type): Type = t match {
|
||||
case Nullary(un) => un
|
||||
case _ => t
|
||||
}
|
||||
// paramSym is only for 2.8 and is to determine if the parameter has a default
|
||||
def makeParameter(name: String, tpe: Type, ts: Symbol, paramSym: Symbol): xsbti.api.MethodParameter =
|
||||
{
|
||||
import xsbti.api.ParameterModifier._
|
||||
val (t, special) =
|
||||
if (ts == definitions.RepeatedParamClass) // || s == definitions.JavaRepeatedParamClass)
|
||||
(tpe.typeArgs(0), Repeated)
|
||||
else if (ts == definitions.ByNameParamClass)
|
||||
(tpe.typeArgs(0), ByName)
|
||||
else
|
||||
(tpe, Plain)
|
||||
new xsbti.api.MethodParameter(name, processType(in, t), hasDefault(paramSym), special)
|
||||
}
|
||||
val t = viewer(in).memberInfo(s)
|
||||
build(t, Array(), Nil)
|
||||
}
|
||||
private def hasDefault(s: Symbol) = s != NoSymbol && s.hasFlag(Flags.DEFAULTPARAM)
|
||||
private def fieldDef[T](in: Symbol, s: Symbol, keepConst: Boolean, create: (xsbti.api.Type, String, xsbti.api.Access, xsbti.api.Modifiers, Array[xsbti.api.Annotation]) => T): T =
|
||||
{
|
||||
val t = dropNullary(viewer(in).memberType(s))
|
||||
val t2 = if (keepConst) t else dropConst(t)
|
||||
create(processType(in, t2), simpleName(s), getAccess(s), getModifiers(s), annotations(in, s))
|
||||
}
|
||||
private def dropConst(t: Type): Type = t match {
|
||||
case ConstantType(constant) => constant.tpe
|
||||
case _ => t
|
||||
}
|
||||
private def dropNullary(t: Type): Type = t match {
|
||||
case Nullary(un) => un
|
||||
case _ => t
|
||||
}
|
||||
|
||||
private def typeDef(in: Symbol, s: Symbol): xsbti.api.TypeMember =
|
||||
{
|
||||
val (typeParams, tpe) =
|
||||
viewer(in).memberInfo(s) match
|
||||
{
|
||||
case PolyType(typeParams0, base) => (typeParameters(in, typeParams0), base)
|
||||
case t => (Array[xsbti.api.TypeParameter](), t)
|
||||
}
|
||||
val name = simpleName(s)
|
||||
val access = getAccess(s)
|
||||
val modifiers = getModifiers(s)
|
||||
val as = annotations(in, s)
|
||||
private def typeDef(in: Symbol, s: Symbol): xsbti.api.TypeMember =
|
||||
{
|
||||
val (typeParams, tpe) =
|
||||
viewer(in).memberInfo(s) match {
|
||||
case PolyType(typeParams0, base) => (typeParameters(in, typeParams0), base)
|
||||
case t => (Array[xsbti.api.TypeParameter](), t)
|
||||
}
|
||||
val name = simpleName(s)
|
||||
val access = getAccess(s)
|
||||
val modifiers = getModifiers(s)
|
||||
val as = annotations(in, s)
|
||||
|
||||
if(s.isAliasType)
|
||||
new xsbti.api.TypeAlias(processType(in, tpe), typeParams, name, access, modifiers, as)
|
||||
else if(s.isAbstractType)
|
||||
{
|
||||
val bounds = tpe.bounds
|
||||
new xsbti.api.TypeDeclaration(processType(in, bounds.lo), processType(in, bounds.hi), typeParams, name, access, modifiers, as)
|
||||
}
|
||||
else
|
||||
error("Unknown type member" + s)
|
||||
}
|
||||
if (s.isAliasType)
|
||||
new xsbti.api.TypeAlias(processType(in, tpe), typeParams, name, access, modifiers, as)
|
||||
else if (s.isAbstractType) {
|
||||
val bounds = tpe.bounds
|
||||
new xsbti.api.TypeDeclaration(processType(in, bounds.lo), processType(in, bounds.hi), typeParams, name, access, modifiers, as)
|
||||
} else
|
||||
error("Unknown type member" + s)
|
||||
}
|
||||
|
||||
private def structure(in: Symbol, s: Symbol): xsbti.api.Structure = structure(viewer(in).memberInfo(s), s, true)
|
||||
private def structure(info: Type): xsbti.api.Structure = structure(info, info.typeSymbol, false)
|
||||
private def structure(info: Type, s: Symbol, inherit: Boolean): xsbti.api.Structure =
|
||||
structureCache.getOrElseUpdate( s, mkStructure(info, s, inherit))
|
||||
private def structure(in: Symbol, s: Symbol): xsbti.api.Structure = structure(viewer(in).memberInfo(s), s, true)
|
||||
private def structure(info: Type): xsbti.api.Structure = structure(info, info.typeSymbol, false)
|
||||
private def structure(info: Type, s: Symbol, inherit: Boolean): xsbti.api.Structure =
|
||||
structureCache.getOrElseUpdate(s, mkStructure(info, s, inherit))
|
||||
|
||||
private def removeConstructors(ds: List[Symbol]): List[Symbol] = ds filter { !_.isConstructor}
|
||||
private def removeConstructors(ds: List[Symbol]): List[Symbol] = ds filter { !_.isConstructor }
|
||||
|
||||
private def mkStructure(info: Type, s: Symbol, inherit: Boolean): xsbti.api.Structure =
|
||||
{
|
||||
val (declared, inherited) = info.members.reverse.partition(_.owner == s)
|
||||
val baseTypes = info.baseClasses.tail.map(info.baseType)
|
||||
val ds = if(s.isModuleClass) removeConstructors(declared) else declared
|
||||
val is = if(inherit) removeConstructors(inherited) else Nil
|
||||
mkStructure(s, baseTypes, ds, is)
|
||||
}
|
||||
private def mkStructure(info: Type, s: Symbol, inherit: Boolean): xsbti.api.Structure =
|
||||
{
|
||||
val (declared, inherited) = info.members.reverse.partition(_.owner == s)
|
||||
val baseTypes = info.baseClasses.tail.map(info.baseType)
|
||||
val ds = if (s.isModuleClass) removeConstructors(declared) else declared
|
||||
val is = if (inherit) removeConstructors(inherited) else Nil
|
||||
mkStructure(s, baseTypes, ds, is)
|
||||
}
|
||||
|
||||
// If true, this template is publicly visible and should be processed as a public inheritance dependency.
|
||||
// Local classes and local refinements will never be traversed by the api phase, so we don't need to check for that.
|
||||
private[this] def isPublicStructure(s: Symbol): Boolean =
|
||||
s.isStructuralRefinement ||
|
||||
// do not consider templates that are private[this] or private
|
||||
!(s.isPrivate && (s.privateWithin == NoSymbol || s.isLocal))
|
||||
// If true, this template is publicly visible and should be processed as a public inheritance dependency.
|
||||
// Local classes and local refinements will never be traversed by the api phase, so we don't need to check for that.
|
||||
private[this] def isPublicStructure(s: Symbol): Boolean =
|
||||
s.isStructuralRefinement ||
|
||||
// do not consider templates that are private[this] or private
|
||||
!(s.isPrivate && (s.privateWithin == NoSymbol || s.isLocal))
|
||||
|
||||
private def mkStructure(s: Symbol, bases: List[Type], declared: List[Symbol], inherited: List[Symbol]): xsbti.api.Structure = {
|
||||
if(isPublicStructure(s))
|
||||
addInheritedDependencies(sourceFile, bases.map(_.dealias.typeSymbol))
|
||||
new xsbti.api.Structure(lzy(types(s, bases)), lzy(processDefinitions(s, declared)), lzy(processDefinitions(s, inherited)))
|
||||
}
|
||||
private def processDefinitions(in: Symbol, defs: List[Symbol]): Array[xsbti.api.Definition] =
|
||||
sort(defs.toArray).flatMap( (d: Symbol) => definition(in, d))
|
||||
private[this] def sort(defs: Array[Symbol]): Array[Symbol] = {
|
||||
Arrays.sort(defs, sortClasses)
|
||||
defs
|
||||
}
|
||||
private def mkStructure(s: Symbol, bases: List[Type], declared: List[Symbol], inherited: List[Symbol]): xsbti.api.Structure = {
|
||||
if (isPublicStructure(s))
|
||||
addInheritedDependencies(sourceFile, bases.map(_.dealias.typeSymbol))
|
||||
new xsbti.api.Structure(lzy(types(s, bases)), lzy(processDefinitions(s, declared)), lzy(processDefinitions(s, inherited)))
|
||||
}
|
||||
private def processDefinitions(in: Symbol, defs: List[Symbol]): Array[xsbti.api.Definition] =
|
||||
sort(defs.toArray).flatMap((d: Symbol) => definition(in, d))
|
||||
private[this] def sort(defs: Array[Symbol]): Array[Symbol] = {
|
||||
Arrays.sort(defs, sortClasses)
|
||||
defs
|
||||
}
|
||||
|
||||
private def definition(in: Symbol, sym: Symbol): Option[xsbti.api.Definition] =
|
||||
{
|
||||
def mkVar = Some(fieldDef(in, sym, false, new xsbti.api.Var(_,_,_,_,_)))
|
||||
def mkVal = Some(fieldDef(in, sym, true, new xsbti.api.Val(_,_,_,_,_)))
|
||||
if(isClass(sym))
|
||||
if(ignoreClass(sym)) None else Some(classLike(in, sym))
|
||||
else if(sym.isNonClassType)
|
||||
Some(typeDef(in, sym))
|
||||
else if(sym.isVariable)
|
||||
if(isSourceField(sym)) mkVar else None
|
||||
else if(sym.isStable)
|
||||
if(isSourceField(sym)) mkVal else None
|
||||
else if(sym.isSourceMethod && !sym.isSetter)
|
||||
if(sym.isGetter) mkVar else Some(defDef(in, sym))
|
||||
else
|
||||
None
|
||||
}
|
||||
private def ignoreClass(sym: Symbol): Boolean =
|
||||
sym.isLocalClass || sym.isAnonymousClass || sym.fullName.endsWith(LocalChild.toString)
|
||||
private def definition(in: Symbol, sym: Symbol): Option[xsbti.api.Definition] =
|
||||
{
|
||||
def mkVar = Some(fieldDef(in, sym, false, new xsbti.api.Var(_, _, _, _, _)))
|
||||
def mkVal = Some(fieldDef(in, sym, true, new xsbti.api.Val(_, _, _, _, _)))
|
||||
if (isClass(sym))
|
||||
if (ignoreClass(sym)) None else Some(classLike(in, sym))
|
||||
else if (sym.isNonClassType)
|
||||
Some(typeDef(in, sym))
|
||||
else if (sym.isVariable)
|
||||
if (isSourceField(sym)) mkVar else None
|
||||
else if (sym.isStable)
|
||||
if (isSourceField(sym)) mkVal else None
|
||||
else if (sym.isSourceMethod && !sym.isSetter)
|
||||
if (sym.isGetter) mkVar else Some(defDef(in, sym))
|
||||
else
|
||||
None
|
||||
}
|
||||
private def ignoreClass(sym: Symbol): Boolean =
|
||||
sym.isLocalClass || sym.isAnonymousClass || sym.fullName.endsWith(LocalChild.toString)
|
||||
|
||||
// This filters private[this] vals/vars that were not in the original source.
|
||||
// The getter will be used for processing instead.
|
||||
private def isSourceField(sym: Symbol): Boolean =
|
||||
{
|
||||
val getter = sym.getter(sym.enclClass)
|
||||
// the check `getter eq sym` is a precaution against infinite recursion
|
||||
// `isParamAccessor` does not exist in all supported versions of Scala, so the flag check is done directly
|
||||
(getter == NoSymbol && !sym.hasFlag(Flags.PARAMACCESSOR)) || (getter eq sym)
|
||||
}
|
||||
private def getModifiers(s: Symbol): xsbti.api.Modifiers =
|
||||
{
|
||||
import Flags._
|
||||
val absOver = s.hasFlag(ABSOVERRIDE)
|
||||
val abs = s.hasFlag(ABSTRACT) || s.hasFlag(DEFERRED) || absOver
|
||||
val over = s.hasFlag(OVERRIDE) || absOver
|
||||
new xsbti.api.Modifiers(abs, over, s.isFinal, s.hasFlag(SEALED), isImplicit(s), s.hasFlag(LAZY), hasMacro(s))
|
||||
}
|
||||
// This filters private[this] vals/vars that were not in the original source.
|
||||
// The getter will be used for processing instead.
|
||||
private def isSourceField(sym: Symbol): Boolean =
|
||||
{
|
||||
val getter = sym.getter(sym.enclClass)
|
||||
// the check `getter eq sym` is a precaution against infinite recursion
|
||||
// `isParamAccessor` does not exist in all supported versions of Scala, so the flag check is done directly
|
||||
(getter == NoSymbol && !sym.hasFlag(Flags.PARAMACCESSOR)) || (getter eq sym)
|
||||
}
|
||||
private def getModifiers(s: Symbol): xsbti.api.Modifiers =
|
||||
{
|
||||
import Flags._
|
||||
val absOver = s.hasFlag(ABSOVERRIDE)
|
||||
val abs = s.hasFlag(ABSTRACT) || s.hasFlag(DEFERRED) || absOver
|
||||
val over = s.hasFlag(OVERRIDE) || absOver
|
||||
new xsbti.api.Modifiers(abs, over, s.isFinal, s.hasFlag(SEALED), isImplicit(s), s.hasFlag(LAZY), hasMacro(s))
|
||||
}
|
||||
|
||||
private def isImplicit(s: Symbol) = s.hasFlag(Flags.IMPLICIT)
|
||||
private def getAccess(c: Symbol): xsbti.api.Access =
|
||||
{
|
||||
if(c.isPublic) Constants.public
|
||||
else if(c.isPrivateLocal) Constants.privateLocal
|
||||
else if(c.isProtectedLocal) Constants.protectedLocal
|
||||
else
|
||||
{
|
||||
val within = c.privateWithin
|
||||
val qualifier = if(within == NoSymbol) Constants.unqualified else new xsbti.api.IdQualifier(within.fullName)
|
||||
if(c.hasFlag(Flags.PROTECTED)) new xsbti.api.Protected(qualifier)
|
||||
else new xsbti.api.Private(qualifier)
|
||||
}
|
||||
}
|
||||
private def isImplicit(s: Symbol) = s.hasFlag(Flags.IMPLICIT)
|
||||
private def getAccess(c: Symbol): xsbti.api.Access =
|
||||
{
|
||||
if (c.isPublic) Constants.public
|
||||
else if (c.isPrivateLocal) Constants.privateLocal
|
||||
else if (c.isProtectedLocal) Constants.protectedLocal
|
||||
else {
|
||||
val within = c.privateWithin
|
||||
val qualifier = if (within == NoSymbol) Constants.unqualified else new xsbti.api.IdQualifier(within.fullName)
|
||||
if (c.hasFlag(Flags.PROTECTED)) new xsbti.api.Protected(qualifier)
|
||||
else new xsbti.api.Private(qualifier)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace all types that directly refer to the `forbidden` symbol by `NoType`.
|
||||
* (a specialized version of substThisAndSym)
|
||||
*/
|
||||
class SuppressSymbolRef(forbidden: Symbol) extends TypeMap {
|
||||
def apply(tp: Type) =
|
||||
if (tp.typeSymbolDirect == forbidden) NoType
|
||||
else mapOver(tp)
|
||||
}
|
||||
/**
|
||||
* Replace all types that directly refer to the `forbidden` symbol by `NoType`.
|
||||
* (a specialized version of substThisAndSym)
|
||||
*/
|
||||
class SuppressSymbolRef(forbidden: Symbol) extends TypeMap {
|
||||
def apply(tp: Type) =
|
||||
if (tp.typeSymbolDirect == forbidden) NoType
|
||||
else mapOver(tp)
|
||||
}
|
||||
|
||||
private def processType(in: Symbol, t: Type): xsbti.api.Type = typeCache.getOrElseUpdate((in, t), makeType(in, t))
|
||||
private def makeType(in: Symbol, t: Type): xsbti.api.Type =
|
||||
{
|
||||
private def processType(in: Symbol, t: Type): xsbti.api.Type = typeCache.getOrElseUpdate((in, t), makeType(in, t))
|
||||
private def makeType(in: Symbol, t: Type): xsbti.api.Type =
|
||||
{
|
||||
|
||||
val dealiased = t match {
|
||||
case TypeRef(_, sym, _) if sym.isAliasType => t.dealias
|
||||
case _ => t
|
||||
}
|
||||
val dealiased = t match {
|
||||
case TypeRef(_, sym, _) if sym.isAliasType => t.dealias
|
||||
case _ => t
|
||||
}
|
||||
|
||||
dealiased match
|
||||
{
|
||||
case NoPrefix => Constants.emptyType
|
||||
case ThisType(sym) => new xsbti.api.Singleton(thisPath(sym))
|
||||
case SingleType(pre, sym) => projectionType(in, pre, sym)
|
||||
case ConstantType(constant) => new xsbti.api.Constant(processType(in, constant.tpe), constant.stringValue)
|
||||
dealiased match {
|
||||
case NoPrefix => Constants.emptyType
|
||||
case ThisType(sym) => new xsbti.api.Singleton(thisPath(sym))
|
||||
case SingleType(pre, sym) => projectionType(in, pre, sym)
|
||||
case ConstantType(constant) => new xsbti.api.Constant(processType(in, constant.tpe), constant.stringValue)
|
||||
|
||||
/* explaining the special-casing of references to refinement classes (https://support.typesafe.com/tickets/1882)
|
||||
/* explaining the special-casing of references to refinement classes (https://support.typesafe.com/tickets/1882)
|
||||
*
|
||||
* goal: a representation of type references to refinement classes that's stable across compilation runs
|
||||
* (and thus insensitive to typing from source or unpickling from bytecode)
|
||||
|
|
@ -393,152 +384,150 @@ class ExtractAPI[GlobalType <: CallbackGlobal](val global: GlobalType,
|
|||
* + expand the reference to the corresponding refinement type: doing that recursively may not terminate, but we can deal with that by approximating recursive references
|
||||
* (all we care about is being sound for recompilation: recompile iff a dependency changes, and this will happen as long as we have one unrolling of the reference to the refinement)
|
||||
*/
|
||||
case TypeRef(pre, sym, Nil) if sym.isRefinementClass =>
|
||||
// Since we only care about detecting changes reliably, we unroll a reference to a refinement class once.
|
||||
// Recursive references are simply replaced by NoType -- changes to the type will be seen in the first unrolling.
|
||||
// The API need not be type correct, so this truncation is acceptable. Most of all, the API should be compact.
|
||||
val unrolling = pre.memberInfo(sym) // this is a refinement type
|
||||
case TypeRef(pre, sym, Nil) if sym.isRefinementClass =>
|
||||
// Since we only care about detecting changes reliably, we unroll a reference to a refinement class once.
|
||||
// Recursive references are simply replaced by NoType -- changes to the type will be seen in the first unrolling.
|
||||
// The API need not be type correct, so this truncation is acceptable. Most of all, the API should be compact.
|
||||
val unrolling = pre.memberInfo(sym) // this is a refinement type
|
||||
|
||||
// in case there are recursive references, suppress them -- does this ever happen?
|
||||
// we don't have a test case for this, so warn and hope we'll get a contribution for it :-)
|
||||
val withoutRecursiveRefs = new SuppressSymbolRef(sym).mapOver(unrolling)
|
||||
if (unrolling ne withoutRecursiveRefs)
|
||||
reporter.warning(sym.pos, "sbt-api: approximated refinement ref"+ t +" (== "+ unrolling +") to "+ withoutRecursiveRefs +"\nThis is currently untested, please report the code you were compiling.")
|
||||
// in case there are recursive references, suppress them -- does this ever happen?
|
||||
// we don't have a test case for this, so warn and hope we'll get a contribution for it :-)
|
||||
val withoutRecursiveRefs = new SuppressSymbolRef(sym).mapOver(unrolling)
|
||||
if (unrolling ne withoutRecursiveRefs)
|
||||
reporter.warning(sym.pos, "sbt-api: approximated refinement ref" + t + " (== " + unrolling + ") to " + withoutRecursiveRefs + "\nThis is currently untested, please report the code you were compiling.")
|
||||
|
||||
structure(withoutRecursiveRefs)
|
||||
case tr @ TypeRef(pre, sym, args) =>
|
||||
val base = projectionType(in, pre, sym)
|
||||
if(args.isEmpty)
|
||||
if(isRawType(tr))
|
||||
processType(in, rawToExistential(tr))
|
||||
else
|
||||
base
|
||||
else
|
||||
new xsbti.api.Parameterized(base, types(in, args))
|
||||
case SuperType(thistpe: Type, supertpe: Type) => warning("sbt-api: Super type (not implemented): this=" + thistpe + ", super=" + supertpe); Constants.emptyType
|
||||
case at: AnnotatedType => annotatedType(in, at)
|
||||
case rt: CompoundType => structure(rt)
|
||||
case t: ExistentialType => makeExistentialType(in, t)
|
||||
case NoType => Constants.emptyType // this can happen when there is an error that will be reported by a later phase
|
||||
case PolyType(typeParams, resultType) => new xsbti.api.Polymorphic(processType(in, resultType), typeParameters(in, typeParams))
|
||||
case Nullary(resultType) => warning("sbt-api: Unexpected nullary method type " + in + " in " + in.owner); Constants.emptyType
|
||||
case _ => warning("sbt-api: Unhandled type " + t.getClass + " : " + t); Constants.emptyType
|
||||
}
|
||||
}
|
||||
private def makeExistentialType(in: Symbol, t: ExistentialType): xsbti.api.Existential = {
|
||||
val ExistentialType(typeVariables, qualified) = t
|
||||
existentialRenamings.enterExistentialTypeVariables(typeVariables)
|
||||
try {
|
||||
val typeVariablesConverted = typeParameters(in, typeVariables)
|
||||
val qualifiedConverted = processType(in, qualified)
|
||||
new xsbti.api.Existential(qualifiedConverted, typeVariablesConverted)
|
||||
} finally {
|
||||
existentialRenamings.leaveExistentialTypeVariables(typeVariables)
|
||||
}
|
||||
}
|
||||
private def typeParameters(in: Symbol, s: Symbol): Array[xsbti.api.TypeParameter] = typeParameters(in, s.typeParams)
|
||||
private def typeParameters(in: Symbol, s: List[Symbol]): Array[xsbti.api.TypeParameter] = s.map(typeParameter(in,_)).toArray[xsbti.api.TypeParameter]
|
||||
private def typeParameter(in: Symbol, s: Symbol): xsbti.api.TypeParameter =
|
||||
{
|
||||
val varianceInt = s.variance
|
||||
import xsbti.api.Variance._
|
||||
val annots = annotations(in, s)
|
||||
val variance = if(varianceInt < 0) Contravariant else if(varianceInt > 0) Covariant else Invariant
|
||||
viewer(in).memberInfo(s) match
|
||||
{
|
||||
case TypeBounds(low, high) => new xsbti.api.TypeParameter( tparamID(s), annots, typeParameters(in, s), variance, processType(in, low), processType(in, high) )
|
||||
case PolyType(typeParams, base) => new xsbti.api.TypeParameter( tparamID(s), annots, typeParameters(in, typeParams), variance, processType(in, base.bounds.lo), processType(in, base.bounds.hi))
|
||||
case x => error("Unknown type parameter info: " + x.getClass)
|
||||
}
|
||||
}
|
||||
private def tparamID(s: Symbol): String = {
|
||||
val renameTo = existentialRenamings.renaming(s)
|
||||
renameTo match {
|
||||
case Some(rename) =>
|
||||
// can't use debuglog because it doesn't exist in Scala 2.9.x
|
||||
if (settings.debug.value)
|
||||
log("Renaming existential type variable " + s.fullName + " to " + rename)
|
||||
rename
|
||||
case None =>
|
||||
s.fullName
|
||||
}
|
||||
}
|
||||
private def selfType(in: Symbol, s: Symbol): xsbti.api.Type = processType(in, s.thisSym.typeOfThis)
|
||||
structure(withoutRecursiveRefs)
|
||||
case tr @ TypeRef(pre, sym, args) =>
|
||||
val base = projectionType(in, pre, sym)
|
||||
if (args.isEmpty)
|
||||
if (isRawType(tr))
|
||||
processType(in, rawToExistential(tr))
|
||||
else
|
||||
base
|
||||
else
|
||||
new xsbti.api.Parameterized(base, types(in, args))
|
||||
case SuperType(thistpe: Type, supertpe: Type) =>
|
||||
warning("sbt-api: Super type (not implemented): this=" + thistpe + ", super=" + supertpe); Constants.emptyType
|
||||
case at: AnnotatedType => annotatedType(in, at)
|
||||
case rt: CompoundType => structure(rt)
|
||||
case t: ExistentialType => makeExistentialType(in, t)
|
||||
case NoType => Constants.emptyType // this can happen when there is an error that will be reported by a later phase
|
||||
case PolyType(typeParams, resultType) => new xsbti.api.Polymorphic(processType(in, resultType), typeParameters(in, typeParams))
|
||||
case Nullary(resultType) =>
|
||||
warning("sbt-api: Unexpected nullary method type " + in + " in " + in.owner); Constants.emptyType
|
||||
case _ => warning("sbt-api: Unhandled type " + t.getClass + " : " + t); Constants.emptyType
|
||||
}
|
||||
}
|
||||
private def makeExistentialType(in: Symbol, t: ExistentialType): xsbti.api.Existential = {
|
||||
val ExistentialType(typeVariables, qualified) = t
|
||||
existentialRenamings.enterExistentialTypeVariables(typeVariables)
|
||||
try {
|
||||
val typeVariablesConverted = typeParameters(in, typeVariables)
|
||||
val qualifiedConverted = processType(in, qualified)
|
||||
new xsbti.api.Existential(qualifiedConverted, typeVariablesConverted)
|
||||
} finally {
|
||||
existentialRenamings.leaveExistentialTypeVariables(typeVariables)
|
||||
}
|
||||
}
|
||||
private def typeParameters(in: Symbol, s: Symbol): Array[xsbti.api.TypeParameter] = typeParameters(in, s.typeParams)
|
||||
private def typeParameters(in: Symbol, s: List[Symbol]): Array[xsbti.api.TypeParameter] = s.map(typeParameter(in, _)).toArray[xsbti.api.TypeParameter]
|
||||
private def typeParameter(in: Symbol, s: Symbol): xsbti.api.TypeParameter =
|
||||
{
|
||||
val varianceInt = s.variance
|
||||
import xsbti.api.Variance._
|
||||
val annots = annotations(in, s)
|
||||
val variance = if (varianceInt < 0) Contravariant else if (varianceInt > 0) Covariant else Invariant
|
||||
viewer(in).memberInfo(s) match {
|
||||
case TypeBounds(low, high) => new xsbti.api.TypeParameter(tparamID(s), annots, typeParameters(in, s), variance, processType(in, low), processType(in, high))
|
||||
case PolyType(typeParams, base) => new xsbti.api.TypeParameter(tparamID(s), annots, typeParameters(in, typeParams), variance, processType(in, base.bounds.lo), processType(in, base.bounds.hi))
|
||||
case x => error("Unknown type parameter info: " + x.getClass)
|
||||
}
|
||||
}
|
||||
private def tparamID(s: Symbol): String = {
|
||||
val renameTo = existentialRenamings.renaming(s)
|
||||
renameTo match {
|
||||
case Some(rename) =>
|
||||
// can't use debuglog because it doesn't exist in Scala 2.9.x
|
||||
if (settings.debug.value)
|
||||
log("Renaming existential type variable " + s.fullName + " to " + rename)
|
||||
rename
|
||||
case None =>
|
||||
s.fullName
|
||||
}
|
||||
}
|
||||
private def selfType(in: Symbol, s: Symbol): xsbti.api.Type = processType(in, s.thisSym.typeOfThis)
|
||||
|
||||
def classLike(in: Symbol, c: Symbol): ClassLike = classLikeCache.getOrElseUpdate( (in,c), mkClassLike(in, c))
|
||||
private def mkClassLike(in: Symbol, c: Symbol): ClassLike =
|
||||
{
|
||||
val name = c.fullName
|
||||
val isModule = c.isModuleClass || c.isModule
|
||||
val struct = if(isModule) c.moduleClass else c
|
||||
val defType =
|
||||
if(c.isTrait) DefinitionType.Trait
|
||||
else if(isModule)
|
||||
{
|
||||
if(c.isPackage) DefinitionType.PackageModule
|
||||
else DefinitionType.Module
|
||||
}
|
||||
else DefinitionType.ClassDef
|
||||
new xsbti.api.ClassLike(defType, lzy(selfType(in, c)), lzy(structure(in, struct)), emptyStringArray, typeParameters(in, c), name, getAccess(c), getModifiers(c), annotations(in, c))
|
||||
}
|
||||
def classLike(in: Symbol, c: Symbol): ClassLike = classLikeCache.getOrElseUpdate((in, c), mkClassLike(in, c))
|
||||
private def mkClassLike(in: Symbol, c: Symbol): ClassLike =
|
||||
{
|
||||
val name = c.fullName
|
||||
val isModule = c.isModuleClass || c.isModule
|
||||
val struct = if (isModule) c.moduleClass else c
|
||||
val defType =
|
||||
if (c.isTrait) DefinitionType.Trait
|
||||
else if (isModule) {
|
||||
if (c.isPackage) DefinitionType.PackageModule
|
||||
else DefinitionType.Module
|
||||
} else DefinitionType.ClassDef
|
||||
new xsbti.api.ClassLike(defType, lzy(selfType(in, c)), lzy(structure(in, struct)), emptyStringArray, typeParameters(in, c), name, getAccess(c), getModifiers(c), annotations(in, c))
|
||||
}
|
||||
|
||||
private[this] def isClass(s: Symbol) = s.isClass || s.isModule
|
||||
// necessary to ensure a stable ordering of classes in the definitions list:
|
||||
// modules and classes come first and are sorted by name
|
||||
// all other definitions come later and are not sorted
|
||||
private[this] val sortClasses = new Comparator[Symbol] {
|
||||
def compare(a: Symbol, b: Symbol) = {
|
||||
val aIsClass = isClass(a)
|
||||
val bIsClass = isClass(b)
|
||||
if(aIsClass == bIsClass)
|
||||
if(aIsClass)
|
||||
if(a.isModule == b.isModule)
|
||||
a.fullName.compareTo(b.fullName)
|
||||
else if(a.isModule)
|
||||
-1
|
||||
else
|
||||
1
|
||||
else
|
||||
0 // substantial performance hit if fullNames are compared here
|
||||
else if(aIsClass)
|
||||
-1
|
||||
else
|
||||
1
|
||||
}
|
||||
}
|
||||
private object Constants
|
||||
{
|
||||
val local = new xsbti.api.ThisQualifier
|
||||
val public = new xsbti.api.Public
|
||||
val privateLocal = new xsbti.api.Private(local)
|
||||
val protectedLocal = new xsbti.api.Protected(local)
|
||||
val unqualified = new xsbti.api.Unqualified
|
||||
val emptyPath = new xsbti.api.Path(Array())
|
||||
val thisPath = new xsbti.api.This
|
||||
val emptyType = new xsbti.api.EmptyType
|
||||
}
|
||||
private[this] def isClass(s: Symbol) = s.isClass || s.isModule
|
||||
// necessary to ensure a stable ordering of classes in the definitions list:
|
||||
// modules and classes come first and are sorted by name
|
||||
// all other definitions come later and are not sorted
|
||||
private[this] val sortClasses = new Comparator[Symbol] {
|
||||
def compare(a: Symbol, b: Symbol) = {
|
||||
val aIsClass = isClass(a)
|
||||
val bIsClass = isClass(b)
|
||||
if (aIsClass == bIsClass)
|
||||
if (aIsClass)
|
||||
if (a.isModule == b.isModule)
|
||||
a.fullName.compareTo(b.fullName)
|
||||
else if (a.isModule)
|
||||
-1
|
||||
else
|
||||
1
|
||||
else
|
||||
0 // substantial performance hit if fullNames are compared here
|
||||
else if (aIsClass)
|
||||
-1
|
||||
else
|
||||
1
|
||||
}
|
||||
}
|
||||
private object Constants {
|
||||
val local = new xsbti.api.ThisQualifier
|
||||
val public = new xsbti.api.Public
|
||||
val privateLocal = new xsbti.api.Private(local)
|
||||
val protectedLocal = new xsbti.api.Protected(local)
|
||||
val unqualified = new xsbti.api.Unqualified
|
||||
val emptyPath = new xsbti.api.Path(Array())
|
||||
val thisPath = new xsbti.api.This
|
||||
val emptyType = new xsbti.api.EmptyType
|
||||
}
|
||||
|
||||
private def simpleName(s: Symbol): String =
|
||||
{
|
||||
val n = s.originalName
|
||||
val n2 = if(n.toString == "<init>") n else n.decode
|
||||
n2.toString.trim
|
||||
}
|
||||
private def simpleName(s: Symbol): String =
|
||||
{
|
||||
val n = s.originalName
|
||||
val n2 = if (n.toString == "<init>") n else n.decode
|
||||
n2.toString.trim
|
||||
}
|
||||
|
||||
private def annotations(in: Symbol, s: Symbol): Array[xsbti.api.Annotation] =
|
||||
atPhase(currentRun.typerPhase) {
|
||||
val base = if(s.hasFlag(Flags.ACCESSOR)) s.accessed else NoSymbol
|
||||
val b = if(base == NoSymbol) s else base
|
||||
// annotations from bean methods are not handled because:
|
||||
// a) they are recorded as normal source methods anyway
|
||||
// b) there is no way to distinguish them from user-defined methods
|
||||
val associated = List(b, b.getter(b.enclClass), b.setter(b.enclClass)).filter(_ != NoSymbol)
|
||||
associated.flatMap( ss => annotations(in, ss.annotations) ).distinct.toArray ;
|
||||
}
|
||||
private def annotatedType(in: Symbol, at: AnnotatedType): xsbti.api.Type =
|
||||
{
|
||||
val annots = at.annotations
|
||||
if(annots.isEmpty) processType(in, at.underlying) else annotated(in, annots, at.underlying)
|
||||
}
|
||||
private def annotations(in: Symbol, s: Symbol): Array[xsbti.api.Annotation] =
|
||||
atPhase(currentRun.typerPhase) {
|
||||
val base = if (s.hasFlag(Flags.ACCESSOR)) s.accessed else NoSymbol
|
||||
val b = if (base == NoSymbol) s else base
|
||||
// annotations from bean methods are not handled because:
|
||||
// a) they are recorded as normal source methods anyway
|
||||
// b) there is no way to distinguish them from user-defined methods
|
||||
val associated = List(b, b.getter(b.enclClass), b.setter(b.enclClass)).filter(_ != NoSymbol)
|
||||
associated.flatMap(ss => annotations(in, ss.annotations)).distinct.toArray;
|
||||
}
|
||||
private def annotatedType(in: Symbol, at: AnnotatedType): xsbti.api.Type =
|
||||
{
|
||||
val annots = at.annotations
|
||||
if (annots.isEmpty) processType(in, at.underlying) else annotated(in, annots, at.underlying)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -39,86 +39,85 @@ import scala.tools.nsc._
|
|||
*
|
||||
*/
|
||||
class ExtractUsedNames[GlobalType <: CallbackGlobal](val global: GlobalType) extends Compat {
|
||||
import global._
|
||||
import global._
|
||||
|
||||
def extract(unit: CompilationUnit): Set[String] = {
|
||||
val tree = unit.body
|
||||
val extractedByTreeWalk = extractByTreeWalk(tree)
|
||||
extractedByTreeWalk
|
||||
}
|
||||
def extract(unit: CompilationUnit): Set[String] = {
|
||||
val tree = unit.body
|
||||
val extractedByTreeWalk = extractByTreeWalk(tree)
|
||||
extractedByTreeWalk
|
||||
}
|
||||
|
||||
private def extractByTreeWalk(tree: Tree): Set[String] = {
|
||||
val namesBuffer = collection.mutable.ListBuffer.empty[String]
|
||||
def addSymbol(symbol: Symbol): Unit = {
|
||||
val symbolNameAsString = symbol.name.decode.trim
|
||||
namesBuffer += symbolNameAsString
|
||||
}
|
||||
private def extractByTreeWalk(tree: Tree): Set[String] = {
|
||||
val namesBuffer = collection.mutable.ListBuffer.empty[String]
|
||||
def addSymbol(symbol: Symbol): Unit = {
|
||||
val symbolNameAsString = symbol.name.decode.trim
|
||||
namesBuffer += symbolNameAsString
|
||||
}
|
||||
|
||||
def handleTreeNode(node: Tree): Unit = {
|
||||
def handleMacroExpansion(original: Tree): Unit = {
|
||||
// Some macros seem to have themselves registered as original tree.
|
||||
// In this case, we only need to handle the children of the original tree,
|
||||
// because we already handled the expanded tree.
|
||||
// See https://issues.scala-lang.org/browse/SI-8486
|
||||
if(original == node) original.children.foreach(handleTreeNode)
|
||||
else original.foreach(handleTreeNode)
|
||||
}
|
||||
def handleTreeNode(node: Tree): Unit = {
|
||||
def handleMacroExpansion(original: Tree): Unit = {
|
||||
// Some macros seem to have themselves registered as original tree.
|
||||
// In this case, we only need to handle the children of the original tree,
|
||||
// because we already handled the expanded tree.
|
||||
// See https://issues.scala-lang.org/browse/SI-8486
|
||||
if (original == node) original.children.foreach(handleTreeNode)
|
||||
else original.foreach(handleTreeNode)
|
||||
}
|
||||
|
||||
def handleClassicTreeNode(node: Tree): Unit = node match {
|
||||
case _: DefTree | _: Template => ()
|
||||
// turns out that Import node has a TermSymbol associated with it
|
||||
// I (Grzegorz) tried to understand why it's there and what does it represent but
|
||||
// that logic was introduced in 2005 without any justification I'll just ignore the
|
||||
// import node altogether and just process the selectors in the import node
|
||||
case Import(_, selectors: List[ImportSelector]) =>
|
||||
def usedNameInImportSelector(name: Name): Unit =
|
||||
if ((name != null) && (name != nme.WILDCARD)) namesBuffer += name.toString
|
||||
selectors foreach { selector =>
|
||||
usedNameInImportSelector(selector.name)
|
||||
usedNameInImportSelector(selector.rename)
|
||||
}
|
||||
// TODO: figure out whether we should process the original tree or walk the type
|
||||
// the argument for processing the original tree: we process what user wrote
|
||||
// the argument for processing the type: we catch all transformations that typer applies
|
||||
// to types but that might be a bad thing because it might expand aliases eagerly which
|
||||
// not what we need
|
||||
case t: TypeTree if t.original != null =>
|
||||
t.original.foreach(handleTreeNode)
|
||||
case t if t.hasSymbol && eligibleAsUsedName(t.symbol) =>
|
||||
addSymbol(t.symbol)
|
||||
case _ => ()
|
||||
}
|
||||
def handleClassicTreeNode(node: Tree): Unit = node match {
|
||||
case _: DefTree | _: Template => ()
|
||||
// turns out that Import node has a TermSymbol associated with it
|
||||
// I (Grzegorz) tried to understand why it's there and what does it represent but
|
||||
// that logic was introduced in 2005 without any justification I'll just ignore the
|
||||
// import node altogether and just process the selectors in the import node
|
||||
case Import(_, selectors: List[ImportSelector]) =>
|
||||
def usedNameInImportSelector(name: Name): Unit =
|
||||
if ((name != null) && (name != nme.WILDCARD)) namesBuffer += name.toString
|
||||
selectors foreach { selector =>
|
||||
usedNameInImportSelector(selector.name)
|
||||
usedNameInImportSelector(selector.rename)
|
||||
}
|
||||
// TODO: figure out whether we should process the original tree or walk the type
|
||||
// the argument for processing the original tree: we process what user wrote
|
||||
// the argument for processing the type: we catch all transformations that typer applies
|
||||
// to types but that might be a bad thing because it might expand aliases eagerly which
|
||||
// not what we need
|
||||
case t: TypeTree if t.original != null =>
|
||||
t.original.foreach(handleTreeNode)
|
||||
case t if t.hasSymbol && eligibleAsUsedName(t.symbol) =>
|
||||
addSymbol(t.symbol)
|
||||
case _ => ()
|
||||
}
|
||||
|
||||
node match {
|
||||
case MacroExpansionOf(original) =>
|
||||
handleClassicTreeNode(node)
|
||||
handleMacroExpansion(original)
|
||||
case _ =>
|
||||
handleClassicTreeNode(node)
|
||||
}
|
||||
}
|
||||
node match {
|
||||
case MacroExpansionOf(original) =>
|
||||
handleClassicTreeNode(node)
|
||||
handleMacroExpansion(original)
|
||||
case _ =>
|
||||
handleClassicTreeNode(node)
|
||||
}
|
||||
}
|
||||
|
||||
tree.foreach(handleTreeNode)
|
||||
namesBuffer.toSet
|
||||
}
|
||||
tree.foreach(handleTreeNode)
|
||||
namesBuffer.toSet
|
||||
}
|
||||
|
||||
/**
|
||||
* Needed for compatibility with Scala 2.8 which doesn't define `tpnme`
|
||||
*/
|
||||
private object tpnme {
|
||||
val EMPTY = nme.EMPTY.toTypeName
|
||||
val EMPTY_PACKAGE_NAME = nme.EMPTY_PACKAGE_NAME.toTypeName
|
||||
}
|
||||
|
||||
/**
|
||||
* Needed for compatibility with Scala 2.8 which doesn't define `tpnme`
|
||||
*/
|
||||
private object tpnme {
|
||||
val EMPTY = nme.EMPTY.toTypeName
|
||||
val EMPTY_PACKAGE_NAME = nme.EMPTY_PACKAGE_NAME.toTypeName
|
||||
}
|
||||
private def eligibleAsUsedName(symbol: Symbol): Boolean = {
|
||||
def emptyName(name: Name): Boolean = name match {
|
||||
case nme.EMPTY | nme.EMPTY_PACKAGE_NAME | tpnme.EMPTY | tpnme.EMPTY_PACKAGE_NAME => true
|
||||
case _ => false
|
||||
}
|
||||
|
||||
private def eligibleAsUsedName(symbol: Symbol): Boolean = {
|
||||
def emptyName(name: Name): Boolean = name match {
|
||||
case nme.EMPTY | nme.EMPTY_PACKAGE_NAME | tpnme.EMPTY | tpnme.EMPTY_PACKAGE_NAME => true
|
||||
case _ => false
|
||||
}
|
||||
|
||||
(symbol != NoSymbol) &&
|
||||
!symbol.isSynthetic &&
|
||||
!emptyName(symbol.name)
|
||||
}
|
||||
(symbol != NoSymbol) &&
|
||||
!symbol.isSynthetic &&
|
||||
!emptyName(symbol.name)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,41 +11,37 @@ import java.io.File
|
|||
/**
|
||||
* Contains utility methods for looking up class files corresponding to Symbols.
|
||||
*/
|
||||
abstract class LocateClassFile extends Compat
|
||||
{
|
||||
val global: CallbackGlobal
|
||||
import global._
|
||||
abstract class LocateClassFile extends Compat {
|
||||
val global: CallbackGlobal
|
||||
import global._
|
||||
|
||||
private[this] final val classSeparator = '.'
|
||||
protected def classFile(sym: Symbol): Option[(AbstractFile, String, Boolean)] =
|
||||
// package can never have a corresponding class file; this test does not
|
||||
// catch package objects (that do not have this flag set)
|
||||
if (sym hasFlag scala.tools.nsc.symtab.Flags.PACKAGE) None else
|
||||
{
|
||||
import scala.tools.nsc.symtab.Flags
|
||||
val name = flatname(sym, classSeparator) + moduleSuffix(sym)
|
||||
findClass(name).map { case (file,inOut) => (file, name,inOut) } orElse {
|
||||
if(isTopLevelModule(sym))
|
||||
{
|
||||
val linked = sym.companionClass
|
||||
if(linked == NoSymbol)
|
||||
None
|
||||
else
|
||||
classFile(linked)
|
||||
}
|
||||
else
|
||||
None
|
||||
}
|
||||
}
|
||||
private def flatname(s: Symbol, separator: Char) =
|
||||
atPhase(currentRun.flattenPhase.next) { s fullName separator }
|
||||
private[this] final val classSeparator = '.'
|
||||
protected def classFile(sym: Symbol): Option[(AbstractFile, String, Boolean)] =
|
||||
// package can never have a corresponding class file; this test does not
|
||||
// catch package objects (that do not have this flag set)
|
||||
if (sym hasFlag scala.tools.nsc.symtab.Flags.PACKAGE) None else {
|
||||
import scala.tools.nsc.symtab.Flags
|
||||
val name = flatname(sym, classSeparator) + moduleSuffix(sym)
|
||||
findClass(name).map { case (file, inOut) => (file, name, inOut) } orElse {
|
||||
if (isTopLevelModule(sym)) {
|
||||
val linked = sym.companionClass
|
||||
if (linked == NoSymbol)
|
||||
None
|
||||
else
|
||||
classFile(linked)
|
||||
} else
|
||||
None
|
||||
}
|
||||
}
|
||||
private def flatname(s: Symbol, separator: Char) =
|
||||
atPhase(currentRun.flattenPhase.next) { s fullName separator }
|
||||
|
||||
protected def isTopLevelModule(sym: Symbol): Boolean =
|
||||
atPhase (currentRun.picklerPhase.next) {
|
||||
sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass
|
||||
}
|
||||
protected def className(s: Symbol, sep: Char, dollarRequired: Boolean): String =
|
||||
flatname(s, sep) + (if(dollarRequired) "$" else "")
|
||||
protected def fileForClass(outputDirectory: File, s: Symbol, separatorRequired: Boolean): File =
|
||||
new File(outputDirectory, className(s, File.separatorChar, separatorRequired) + ".class")
|
||||
protected def isTopLevelModule(sym: Symbol): Boolean =
|
||||
atPhase(currentRun.picklerPhase.next) {
|
||||
sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass
|
||||
}
|
||||
protected def className(s: Symbol, sep: Char, dollarRequired: Boolean): String =
|
||||
flatname(s, sep) + (if (dollarRequired) "$" else "")
|
||||
protected def fileForClass(outputDirectory: File, s: Symbol, separatorRequired: Boolean): File =
|
||||
new File(outputDirectory, className(s, File.separatorChar, separatorRequired) + ".class")
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,9 +3,8 @@
|
|||
*/
|
||||
package xsbt
|
||||
|
||||
object Log
|
||||
{
|
||||
def debug(log: xsbti.Logger, msg: => String) = log.debug(Message(msg))
|
||||
def settingsError(log: xsbti.Logger): String => Unit =
|
||||
s => log.error(Message(s))
|
||||
object Log {
|
||||
def debug(log: xsbti.Logger, msg: => String) = log.debug(Message(msg))
|
||||
def settingsError(log: xsbti.Logger): String => Unit =
|
||||
s => log.error(Message(s))
|
||||
}
|
||||
|
|
@ -3,7 +3,6 @@
|
|||
*/
|
||||
package xsbt
|
||||
|
||||
object Message
|
||||
{
|
||||
def apply[T](s: => T) = new xsbti.F0[T] { def apply() = s }
|
||||
object Message {
|
||||
def apply[T](s: => T) = new xsbti.F0[T] { def apply() = s }
|
||||
}
|
||||
|
|
@ -3,75 +3,66 @@
|
|||
*/
|
||||
package xsbt
|
||||
|
||||
import xsbti.Logger
|
||||
import Log.debug
|
||||
import xsbti.Logger
|
||||
import Log.debug
|
||||
|
||||
class ScaladocInterface
|
||||
{
|
||||
def run(args: Array[String], log: Logger, delegate: xsbti.Reporter) = (new Runner(args, log, delegate)).run
|
||||
class ScaladocInterface {
|
||||
def run(args: Array[String], log: Logger, delegate: xsbti.Reporter) = (new Runner(args, log, delegate)).run
|
||||
}
|
||||
private class Runner(args: Array[String], log: Logger, delegate: xsbti.Reporter)
|
||||
{
|
||||
import scala.tools.nsc.{doc, Global, reporters}
|
||||
import reporters.Reporter
|
||||
val docSettings: doc.Settings = new doc.Settings(Log.settingsError(log))
|
||||
val command = Command(args.toList, docSettings)
|
||||
val reporter = DelegatingReporter(docSettings, delegate)
|
||||
def noErrors = !reporter.hasErrors && command.ok
|
||||
private class Runner(args: Array[String], log: Logger, delegate: xsbti.Reporter) {
|
||||
import scala.tools.nsc.{ doc, Global, reporters }
|
||||
import reporters.Reporter
|
||||
val docSettings: doc.Settings = new doc.Settings(Log.settingsError(log))
|
||||
val command = Command(args.toList, docSettings)
|
||||
val reporter = DelegatingReporter(docSettings, delegate)
|
||||
def noErrors = !reporter.hasErrors && command.ok
|
||||
|
||||
import forScope._
|
||||
def run()
|
||||
{
|
||||
debug(log, "Calling Scaladoc with arguments:\n\t" + args.mkString("\n\t"))
|
||||
if(noErrors)
|
||||
{
|
||||
import doc._ // 2.8 trunk and Beta1-RC4 have doc.DocFactory. For other Scala versions, the next line creates forScope.DocFactory
|
||||
val processor = new DocFactory(reporter, docSettings)
|
||||
processor.document(command.files)
|
||||
}
|
||||
reporter.printSummary()
|
||||
if(!noErrors) throw new InterfaceCompileFailed(args, reporter.problems, "Scaladoc generation failed")
|
||||
}
|
||||
import forScope._
|
||||
def run() {
|
||||
debug(log, "Calling Scaladoc with arguments:\n\t" + args.mkString("\n\t"))
|
||||
if (noErrors) {
|
||||
import doc._ // 2.8 trunk and Beta1-RC4 have doc.DocFactory. For other Scala versions, the next line creates forScope.DocFactory
|
||||
val processor = new DocFactory(reporter, docSettings)
|
||||
processor.document(command.files)
|
||||
}
|
||||
reporter.printSummary()
|
||||
if (!noErrors) throw new InterfaceCompileFailed(args, reporter.problems, "Scaladoc generation failed")
|
||||
}
|
||||
|
||||
object forScope
|
||||
{
|
||||
class DocFactory(reporter: Reporter, docSettings: doc.Settings) // 2.7 compatibility
|
||||
{
|
||||
// see https://github.com/paulp/scala-full/commit/649823703a574641407d75d5c073be325ea31307
|
||||
trait GlobalCompat
|
||||
{
|
||||
def onlyPresentation = false
|
||||
object forScope {
|
||||
class DocFactory(reporter: Reporter, docSettings: doc.Settings) // 2.7 compatibility
|
||||
{
|
||||
// see https://github.com/paulp/scala-full/commit/649823703a574641407d75d5c073be325ea31307
|
||||
trait GlobalCompat {
|
||||
def onlyPresentation = false
|
||||
|
||||
def forScaladoc = false
|
||||
}
|
||||
def forScaladoc = false
|
||||
}
|
||||
|
||||
object compiler extends Global(command.settings, reporter) with GlobalCompat
|
||||
{
|
||||
override def onlyPresentation = true
|
||||
override def forScaladoc = true
|
||||
class DefaultDocDriver // 2.8 source compatibility
|
||||
{
|
||||
assert(false)
|
||||
def process(units: Iterator[CompilationUnit]) = error("for 2.8 compatibility only")
|
||||
}
|
||||
}
|
||||
def document(ignore: Seq[String])
|
||||
{
|
||||
import compiler._
|
||||
val run = new Run
|
||||
run compile command.files
|
||||
object compiler extends Global(command.settings, reporter) with GlobalCompat {
|
||||
override def onlyPresentation = true
|
||||
override def forScaladoc = true
|
||||
class DefaultDocDriver // 2.8 source compatibility
|
||||
{
|
||||
assert(false)
|
||||
def process(units: Iterator[CompilationUnit]) = error("for 2.8 compatibility only")
|
||||
}
|
||||
}
|
||||
def document(ignore: Seq[String]) {
|
||||
import compiler._
|
||||
val run = new Run
|
||||
run compile command.files
|
||||
|
||||
val generator =
|
||||
{
|
||||
import doc._
|
||||
new DefaultDocDriver
|
||||
{
|
||||
lazy val global: compiler.type = compiler
|
||||
lazy val settings = docSettings
|
||||
}
|
||||
}
|
||||
generator.process(run.units)
|
||||
}
|
||||
}
|
||||
}
|
||||
val generator =
|
||||
{
|
||||
import doc._
|
||||
new DefaultDocDriver {
|
||||
lazy val global: compiler.type = compiler
|
||||
lazy val settings = docSettings
|
||||
}
|
||||
}
|
||||
generator.process(run.units)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -6,60 +6,62 @@ package compiler
|
|||
|
||||
import java.io.File
|
||||
|
||||
object ComponentCompiler
|
||||
{
|
||||
val xsbtiID = "xsbti"
|
||||
val srcExtension = "-src"
|
||||
val binSeparator = "-bin_"
|
||||
val compilerInterfaceID = "compiler-interface"
|
||||
val compilerInterfaceSrcID = compilerInterfaceID + srcExtension
|
||||
val javaVersion = System.getProperty("java.class.version")
|
||||
object ComponentCompiler {
|
||||
val xsbtiID = "xsbti"
|
||||
val srcExtension = "-src"
|
||||
val binSeparator = "-bin_"
|
||||
val compilerInterfaceID = "compiler-interface"
|
||||
val compilerInterfaceSrcID = compilerInterfaceID + srcExtension
|
||||
val javaVersion = System.getProperty("java.class.version")
|
||||
|
||||
def interfaceProvider(manager: ComponentManager): CompilerInterfaceProvider = new CompilerInterfaceProvider
|
||||
{
|
||||
def apply(scalaInstance: xsbti.compile.ScalaInstance, log: Logger): File =
|
||||
{
|
||||
// this is the instance used to compile the interface component
|
||||
val componentCompiler = new ComponentCompiler(new RawCompiler(scalaInstance, ClasspathOptions.auto, log), manager)
|
||||
log.debug("Getting " + compilerInterfaceID + " from component compiler for Scala " + scalaInstance.version)
|
||||
componentCompiler(compilerInterfaceID)
|
||||
}
|
||||
}
|
||||
def interfaceProvider(manager: ComponentManager): CompilerInterfaceProvider = new CompilerInterfaceProvider {
|
||||
def apply(scalaInstance: xsbti.compile.ScalaInstance, log: Logger): File =
|
||||
{
|
||||
// this is the instance used to compile the interface component
|
||||
val componentCompiler = new ComponentCompiler(new RawCompiler(scalaInstance, ClasspathOptions.auto, log), manager)
|
||||
log.debug("Getting " + compilerInterfaceID + " from component compiler for Scala " + scalaInstance.version)
|
||||
componentCompiler(compilerInterfaceID)
|
||||
}
|
||||
}
|
||||
}
|
||||
/** This class provides source components compiled with the provided RawCompiler.
|
||||
* The compiled classes are cached using the provided component manager according
|
||||
* to the actualVersion field of the RawCompiler.*/
|
||||
class ComponentCompiler(compiler: RawCompiler, manager: ComponentManager)
|
||||
{
|
||||
import ComponentCompiler._
|
||||
def apply(id: String): File =
|
||||
try { getPrecompiled(id) }
|
||||
catch { case _: InvalidComponent => getLocallyCompiled(id) }
|
||||
/**
|
||||
* This class provides source components compiled with the provided RawCompiler.
|
||||
* The compiled classes are cached using the provided component manager according
|
||||
* to the actualVersion field of the RawCompiler.
|
||||
*/
|
||||
class ComponentCompiler(compiler: RawCompiler, manager: ComponentManager) {
|
||||
import ComponentCompiler._
|
||||
def apply(id: String): File =
|
||||
try { getPrecompiled(id) }
|
||||
catch { case _: InvalidComponent => getLocallyCompiled(id) }
|
||||
|
||||
/** Gets the precompiled (distributed with sbt) component with the given 'id'
|
||||
* If the component has not been precompiled, this throws InvalidComponent. */
|
||||
def getPrecompiled(id: String): File = manager.file( binaryID(id, false) )(IfMissing.Fail)
|
||||
/** Get the locally compiled component with the given 'id' or compiles it if it has not been compiled yet.
|
||||
* If the component does not exist, this throws InvalidComponent. */
|
||||
def getLocallyCompiled(id: String): File =
|
||||
{
|
||||
val binID = binaryID(id, true)
|
||||
manager.file(binID)( new IfMissing.Define(true, compileAndInstall(id, binID)) )
|
||||
}
|
||||
def clearCache(id: String): Unit = manager.clearCache(binaryID(id, true))
|
||||
protected def binaryID(id: String, withJavaVersion: Boolean) =
|
||||
{
|
||||
val base = id + binSeparator + compiler.scalaInstance.actualVersion
|
||||
if(withJavaVersion) base + "__" + javaVersion else base
|
||||
}
|
||||
protected def compileAndInstall(id: String, binID: String)
|
||||
{
|
||||
val srcID = id + srcExtension
|
||||
IO.withTemporaryDirectory { binaryDirectory =>
|
||||
val targetJar = new File(binaryDirectory, id + ".jar")
|
||||
val xsbtiJars = manager.files(xsbtiID)(IfMissing.Fail)
|
||||
AnalyzingCompiler.compileSources(manager.files(srcID)(IfMissing.Fail), targetJar, xsbtiJars, id, compiler, manager.log)
|
||||
manager.define(binID, Seq(targetJar))
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Gets the precompiled (distributed with sbt) component with the given 'id'
|
||||
* If the component has not been precompiled, this throws InvalidComponent.
|
||||
*/
|
||||
def getPrecompiled(id: String): File = manager.file(binaryID(id, false))(IfMissing.Fail)
|
||||
/**
|
||||
* Get the locally compiled component with the given 'id' or compiles it if it has not been compiled yet.
|
||||
* If the component does not exist, this throws InvalidComponent.
|
||||
*/
|
||||
def getLocallyCompiled(id: String): File =
|
||||
{
|
||||
val binID = binaryID(id, true)
|
||||
manager.file(binID)(new IfMissing.Define(true, compileAndInstall(id, binID)))
|
||||
}
|
||||
def clearCache(id: String): Unit = manager.clearCache(binaryID(id, true))
|
||||
protected def binaryID(id: String, withJavaVersion: Boolean) =
|
||||
{
|
||||
val base = id + binSeparator + compiler.scalaInstance.actualVersion
|
||||
if (withJavaVersion) base + "__" + javaVersion else base
|
||||
}
|
||||
protected def compileAndInstall(id: String, binID: String) {
|
||||
val srcID = id + srcExtension
|
||||
IO.withTemporaryDirectory { binaryDirectory =>
|
||||
val targetJar = new File(binaryDirectory, id + ".jar")
|
||||
val xsbtiJars = manager.files(xsbtiID)(IfMissing.Fail)
|
||||
AnalyzingCompiler.compileSources(manager.files(srcID)(IfMissing.Fail), targetJar, xsbtiJars, id, compiler, manager.log)
|
||||
manager.define(binID, Seq(targetJar))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -4,147 +4,142 @@
|
|||
package sbt
|
||||
package inc
|
||||
|
||||
import xsbti.api.{Source, Compilation}
|
||||
import xsbti.{Position,Problem,Severity}
|
||||
import xsbti.compile.{CompileOrder, Output => APIOutput, SingleOutput, MultipleOutput}
|
||||
import MultipleOutput.OutputGroup
|
||||
import java.io.File
|
||||
import sbinary._
|
||||
import DefaultProtocol._
|
||||
import DefaultProtocol.tuple2Format
|
||||
import Logger.{m2o, position, problem}
|
||||
import Relations.{Source => RSource, SourceDependencies}
|
||||
import xsbti.api.{ Source, Compilation }
|
||||
import xsbti.{ Position, Problem, Severity }
|
||||
import xsbti.compile.{ CompileOrder, Output => APIOutput, SingleOutput, MultipleOutput }
|
||||
import MultipleOutput.OutputGroup
|
||||
import java.io.File
|
||||
import sbinary._
|
||||
import DefaultProtocol._
|
||||
import DefaultProtocol.tuple2Format
|
||||
import Logger.{ m2o, position, problem }
|
||||
import Relations.{ Source => RSource, SourceDependencies }
|
||||
|
||||
@deprecated("Replaced by TextAnalysisFormat. OK to remove in 0.14.", since="0.13.1")
|
||||
object AnalysisFormats
|
||||
{
|
||||
type RFF = Relation[File, File]
|
||||
type RFS = Relation[File, String]
|
||||
@deprecated("Replaced by TextAnalysisFormat. OK to remove in 0.14.", since = "0.13.1")
|
||||
object AnalysisFormats {
|
||||
type RFF = Relation[File, File]
|
||||
type RFS = Relation[File, String]
|
||||
|
||||
import System.{ currentTimeMillis => now }
|
||||
val start = now
|
||||
def time(label: String) =
|
||||
{
|
||||
val end = now
|
||||
println(label + ": " + (end - start) + " ms")
|
||||
}
|
||||
|
||||
import System.{currentTimeMillis => now}
|
||||
val start = now
|
||||
def time(label: String) =
|
||||
{
|
||||
val end = now
|
||||
println(label + ": " + (end - start) + " ms")
|
||||
}
|
||||
def debug[T](label: String, f: Format[T]): Format[T] = new Format[T] {
|
||||
def reads(in: Input): T =
|
||||
{
|
||||
time(label + ".read.start")
|
||||
val r = f.reads(in)
|
||||
time(label + ".read.end")
|
||||
r
|
||||
}
|
||||
def writes(out: Output, t: T) {
|
||||
time(label + ".write.start")
|
||||
f.writes(out, t)
|
||||
time(label + ".write.end")
|
||||
}
|
||||
}
|
||||
|
||||
def debug[T](label: String, f: Format[T]): Format[T] = new Format[T]
|
||||
{
|
||||
def reads(in: Input): T =
|
||||
{
|
||||
time(label + ".read.start")
|
||||
val r = f.reads(in)
|
||||
time(label + ".read.end")
|
||||
r
|
||||
}
|
||||
def writes(out: Output, t: T)
|
||||
{
|
||||
time(label + ".write.start")
|
||||
f.writes(out,t)
|
||||
time(label + ".write.end")
|
||||
}
|
||||
}
|
||||
implicit def analysisFormat(implicit stampsF: Format[Stamps], apisF: Format[APIs], relationsF: Format[Relations],
|
||||
infosF: Format[SourceInfos], compilationsF: Format[Compilations]): Format[Analysis] =
|
||||
asProduct5(Analysis.Empty.copy _)(a => (a.stamps, a.apis, a.relations, a.infos, a.compilations))(stampsF, apisF, relationsF, infosF, compilationsF)
|
||||
|
||||
implicit def analysisFormat(implicit stampsF: Format[Stamps], apisF: Format[APIs], relationsF: Format[Relations],
|
||||
infosF: Format[SourceInfos], compilationsF: Format[Compilations]): Format[Analysis] =
|
||||
asProduct5( Analysis.Empty.copy _)( a => (a.stamps, a.apis, a.relations, a.infos, a.compilations))(stampsF, apisF, relationsF, infosF, compilationsF)
|
||||
implicit def infosFormat(implicit infoF: Format[Map[File, SourceInfo]]): Format[SourceInfos] =
|
||||
wrap[SourceInfos, Map[File, SourceInfo]](_.allInfos, SourceInfos.make _)
|
||||
|
||||
implicit def infosFormat(implicit infoF: Format[Map[File, SourceInfo]]): Format[SourceInfos] =
|
||||
wrap[SourceInfos, Map[File, SourceInfo]]( _.allInfos, SourceInfos.make _)
|
||||
implicit def infoFormat: Format[SourceInfo] =
|
||||
wrap[SourceInfo, (Seq[Problem], Seq[Problem])](si => (si.reportedProblems, si.unreportedProblems), { case (a, b) => SourceInfos.makeInfo(a, b) })
|
||||
|
||||
implicit def infoFormat: Format[SourceInfo] =
|
||||
wrap[SourceInfo, (Seq[Problem],Seq[Problem])](si => (si.reportedProblems, si.unreportedProblems), { case (a,b) => SourceInfos.makeInfo(a,b)})
|
||||
implicit def problemFormat: Format[Problem] = asProduct4(problem _)(p => (p.category, p.position, p.message, p.severity))
|
||||
|
||||
implicit def problemFormat: Format[Problem] = asProduct4(problem _)( p => (p.category, p.position, p.message, p.severity))
|
||||
implicit def compilationsFormat: Format[Compilations] = {
|
||||
implicit val compilationSeqF = seqFormat(xsbt.api.CompilationFormat)
|
||||
wrap[Compilations, Seq[Compilation]](_.allCompilations, Compilations.make _)
|
||||
}
|
||||
|
||||
implicit def compilationsFormat: Format[Compilations] = {
|
||||
implicit val compilationSeqF = seqFormat(xsbt.api.CompilationFormat)
|
||||
wrap[Compilations, Seq[Compilation]](_.allCompilations, Compilations.make _)
|
||||
}
|
||||
implicit def positionFormat: Format[Position] =
|
||||
asProduct7(position _)(p => (m2o(p.line), p.lineContent, m2o(p.offset), m2o(p.pointer), m2o(p.pointerSpace), m2o(p.sourcePath), m2o(p.sourceFile)))
|
||||
|
||||
implicit def positionFormat: Format[Position] =
|
||||
asProduct7( position _ )( p => (m2o(p.line), p.lineContent, m2o(p.offset), m2o(p.pointer), m2o(p.pointerSpace), m2o(p.sourcePath), m2o(p.sourceFile)))
|
||||
implicit val fileOptionFormat: Format[Option[File]] = optionsAreFormat[File](fileFormat)
|
||||
implicit val integerFormat: Format[Integer] = wrap[Integer, Int](_.toInt, Integer.valueOf)
|
||||
implicit val severityFormat: Format[Severity] =
|
||||
wrap[Severity, Byte](_.ordinal.toByte, b => Severity.values.apply(b.toInt))
|
||||
|
||||
implicit val fileOptionFormat: Format[Option[File]] = optionsAreFormat[File](fileFormat)
|
||||
implicit val integerFormat: Format[Integer] = wrap[Integer, Int](_.toInt, Integer.valueOf)
|
||||
implicit val severityFormat: Format[Severity] =
|
||||
wrap[Severity, Byte]( _.ordinal.toByte, b => Severity.values.apply(b.toInt) )
|
||||
implicit def setupFormat(implicit outputF: Format[APIOutput], optionF: Format[CompileOptions], compilerVersion: Format[String], orderF: Format[CompileOrder], nameHashingF: Format[Boolean]): Format[CompileSetup] =
|
||||
asProduct5[CompileSetup, APIOutput, CompileOptions, String, CompileOrder, Boolean]((a, b, c, d, e) => new CompileSetup(a, b, c, d, e))(s => (s.output, s.options, s.compilerVersion, s.order, s.nameHashing))(outputF, optionF, compilerVersion, orderF, nameHashingF)
|
||||
|
||||
implicit val outputGroupFormat: Format[OutputGroup] =
|
||||
asProduct2((a: File, b: File) => new OutputGroup { def sourceDirectory = a; def outputDirectory = b }) { out => (out.sourceDirectory, out.outputDirectory) }(fileFormat, fileFormat)
|
||||
implicit val multipleOutputFormat: Format[MultipleOutput] =
|
||||
wrap[MultipleOutput, Array[OutputGroup]](
|
||||
(_.outputGroups),
|
||||
{ groups => new MultipleOutput { def outputGroups = groups } }
|
||||
)
|
||||
implicit val singleOutputFormat: Format[SingleOutput] =
|
||||
wrap[SingleOutput, File](
|
||||
(_.outputDirectory),
|
||||
{ out => new SingleOutput { def outputDirectory = out } }
|
||||
)(fileFormat)
|
||||
implicit val outputFormat: Format[APIOutput] = asUnion(singleOutputFormat, multipleOutputFormat)
|
||||
|
||||
implicit def setupFormat(implicit outputF: Format[APIOutput], optionF: Format[CompileOptions], compilerVersion: Format[String], orderF: Format[CompileOrder], nameHashingF: Format[Boolean]): Format[CompileSetup] =
|
||||
asProduct5[CompileSetup, APIOutput, CompileOptions, String, CompileOrder, Boolean]( (a,b,c,d,e) => new CompileSetup(a,b,c,d,e) )(s => (s.output, s.options, s.compilerVersion, s.order, s.nameHashing))(outputF, optionF, compilerVersion, orderF, nameHashingF)
|
||||
implicit def stampsFormat(implicit prodF: Format[Map[File, Stamp]], srcF: Format[Map[File, Stamp]], binF: Format[Map[File, Stamp]], nameF: Format[Map[File, String]]): Format[Stamps] =
|
||||
asProduct4(Stamps.apply _)(s => (s.products, s.sources, s.binaries, s.classNames))(prodF, srcF, binF, nameF)
|
||||
|
||||
implicit val outputGroupFormat: Format[OutputGroup] =
|
||||
asProduct2((a: File,b: File) => new OutputGroup{def sourceDirectory = a; def outputDirectory = b}) { out => (out.sourceDirectory, out.outputDirectory) }(fileFormat, fileFormat)
|
||||
implicit val multipleOutputFormat: Format[MultipleOutput] =
|
||||
wrap[MultipleOutput, Array[OutputGroup]](
|
||||
(_.outputGroups),
|
||||
{ groups => new MultipleOutput { def outputGroups = groups } }
|
||||
)
|
||||
implicit val singleOutputFormat: Format[SingleOutput] =
|
||||
wrap[SingleOutput, File](
|
||||
(_.outputDirectory),
|
||||
{out => new SingleOutput{def outputDirectory = out}}
|
||||
)(fileFormat)
|
||||
implicit val outputFormat: Format[APIOutput] = asUnion(singleOutputFormat, multipleOutputFormat)
|
||||
implicit def stampFormat(implicit hashF: Format[Hash], modF: Format[LastModified], existsF: Format[Exists]): Format[Stamp] =
|
||||
asUnion(hashF, modF, existsF)
|
||||
|
||||
implicit def stampsFormat(implicit prodF: Format[Map[File, Stamp]], srcF: Format[Map[File, Stamp]], binF: Format[Map[File, Stamp]], nameF: Format[Map[File, String]]): Format[Stamps] =
|
||||
asProduct4( Stamps.apply _ )( s => (s.products, s.sources, s.binaries, s.classNames) )(prodF, srcF, binF, nameF)
|
||||
implicit def apisFormat(implicit internalF: Format[Map[File, Source]], externalF: Format[Map[String, Source]]): Format[APIs] =
|
||||
asProduct2(APIs.apply _)(as => (as.internal, as.external))(internalF, externalF)
|
||||
|
||||
implicit def stampFormat(implicit hashF: Format[Hash], modF: Format[LastModified], existsF: Format[Exists]): Format[Stamp] =
|
||||
asUnion(hashF, modF, existsF)
|
||||
implicit def relationsFormat(implicit prodF: Format[RFF], binF: Format[RFF], directF: Format[RSource], inheritedF: Format[RSource], memberRefF: Format[SourceDependencies], inheritanceF: Format[SourceDependencies], csF: Format[RFS], namesF: Format[RFS]): Format[Relations] =
|
||||
{
|
||||
def makeRelation(srcProd: RFF, binaryDep: RFF, direct: RSource, publicInherited: RSource,
|
||||
memberRef: SourceDependencies, inheritance: SourceDependencies, classes: RFS,
|
||||
nameHashing: Boolean, names: RFS): Relations = if (nameHashing) {
|
||||
def isEmpty(sourceDependencies: RSource): Boolean =
|
||||
sourceDependencies.internal.all.isEmpty && sourceDependencies.external.all.isEmpty
|
||||
// we check direct dependencies only because publicInherited dependencies are subset of direct
|
||||
assert(isEmpty(direct), "Direct dependencies are not empty but `nameHashing` flag is enabled.")
|
||||
Relations.make(srcProd, binaryDep, memberRef, inheritance, classes, names)
|
||||
} else {
|
||||
def isEmpty(sourceDependencies: SourceDependencies): Boolean =
|
||||
sourceDependencies.internal.all.isEmpty && sourceDependencies.external.all.isEmpty
|
||||
// we check memberRef dependencies only because inheritance dependencies are subset of memberRef
|
||||
assert(isEmpty(memberRef), "Direct dependencies are not empty but `nameHashing` flag is enabled.")
|
||||
Relations.make(srcProd, binaryDep, direct, publicInherited, classes)
|
||||
}
|
||||
asProduct9[Relations, RFF, RFF, RSource, RSource, SourceDependencies, SourceDependencies, RFS, Boolean, RFS]((a, b, c, d, e, f, g, h, i) => makeRelation(a, b, c, d, e, f, g, h, i))(
|
||||
rs => (rs.srcProd, rs.binaryDep, rs.direct, rs.publicInherited, rs.memberRef, rs.inheritance, rs.classes, rs.nameHashing, rs.names))(
|
||||
prodF, binF, directF, inheritedF, memberRefF, inheritanceF, csF, implicitly[Format[Boolean]], namesF)
|
||||
}
|
||||
|
||||
implicit def apisFormat(implicit internalF: Format[Map[File, Source]], externalF: Format[Map[String, Source]]): Format[APIs] =
|
||||
asProduct2( APIs.apply _)( as => (as.internal, as.external) )(internalF, externalF)
|
||||
implicit def relationsSourceFormat(implicit internalFormat: Format[Relation[File, File]], externalFormat: Format[Relation[File, String]]): Format[RSource] =
|
||||
asProduct2[RSource, RFF, RFS]((a, b) => Relations.makeSource(a, b))(rs => (rs.internal, rs.external))
|
||||
|
||||
implicit def relationsFormat(implicit prodF: Format[RFF], binF: Format[RFF], directF: Format[RSource], inheritedF: Format[RSource], memberRefF: Format[SourceDependencies], inheritanceF: Format[SourceDependencies], csF: Format[RFS], namesF: Format[RFS]): Format[Relations] =
|
||||
{
|
||||
def makeRelation(srcProd: RFF, binaryDep: RFF, direct: RSource, publicInherited: RSource,
|
||||
memberRef: SourceDependencies, inheritance: SourceDependencies, classes: RFS,
|
||||
nameHashing: Boolean, names: RFS): Relations = if (nameHashing) {
|
||||
def isEmpty(sourceDependencies: RSource): Boolean =
|
||||
sourceDependencies.internal.all.isEmpty && sourceDependencies.external.all.isEmpty
|
||||
// we check direct dependencies only because publicInherited dependencies are subset of direct
|
||||
assert(isEmpty(direct), "Direct dependencies are not empty but `nameHashing` flag is enabled.")
|
||||
Relations.make(srcProd, binaryDep, memberRef, inheritance, classes, names)
|
||||
} else {
|
||||
def isEmpty(sourceDependencies: SourceDependencies): Boolean =
|
||||
sourceDependencies.internal.all.isEmpty && sourceDependencies.external.all.isEmpty
|
||||
// we check memberRef dependencies only because inheritance dependencies are subset of memberRef
|
||||
assert(isEmpty(memberRef), "Direct dependencies are not empty but `nameHashing` flag is enabled.")
|
||||
Relations.make(srcProd, binaryDep, direct, publicInherited, classes)
|
||||
}
|
||||
asProduct9[Relations, RFF, RFF, RSource, RSource, SourceDependencies, SourceDependencies, RFS, Boolean, RFS]( (a,b,c,d,e,f,g,h,i) =>makeRelation(a,b,c,d,e,f,g,h,i) )(
|
||||
rs => (rs.srcProd, rs.binaryDep, rs.direct, rs.publicInherited, rs.memberRef, rs.inheritance, rs.classes, rs.nameHashing, rs.names) )(
|
||||
prodF, binF, directF, inheritedF, memberRefF, inheritanceF, csF, implicitly[Format[Boolean]], namesF)
|
||||
}
|
||||
implicit def relationsSourceDependenciesFormat(implicit internalFormat: Format[Relation[File, File]], externalFormat: Format[Relation[File, String]]): Format[SourceDependencies] =
|
||||
asProduct2[SourceDependencies, RFF, RFS]((a, b) => Relations.makeSourceDependencies(a, b))(rs => (rs.internal, rs.external))
|
||||
|
||||
implicit def relationsSourceFormat(implicit internalFormat: Format[Relation[File, File]], externalFormat: Format[Relation[File,String]]): Format[RSource] =
|
||||
asProduct2[RSource, RFF, RFS]( (a, b) => Relations.makeSource(a,b))( rs => (rs.internal, rs.external))
|
||||
implicit def relationFormat[A, B](implicit af: Format[Map[A, Set[B]]], bf: Format[Map[B, Set[A]]]): Format[Relation[A, B]] =
|
||||
asProduct2[Relation[A, B], Map[A, Set[B]], Map[B, Set[A]]](Relation.make _)(r => (r.forwardMap, r.reverseMap))(af, bf)
|
||||
|
||||
implicit def relationsSourceDependenciesFormat(implicit internalFormat: Format[Relation[File, File]], externalFormat: Format[Relation[File,String]]): Format[SourceDependencies] =
|
||||
asProduct2[SourceDependencies, RFF, RFS]( (a, b) => Relations.makeSourceDependencies(a,b))( rs => (rs.internal, rs.external))
|
||||
implicit val sourceFormat: Format[Source] = xsbt.api.SourceFormat
|
||||
|
||||
implicit def relationFormat[A,B](implicit af: Format[Map[A, Set[B]]], bf: Format[Map[B, Set[A]]]): Format[Relation[A,B]] =
|
||||
asProduct2[Relation[A,B], Map[A, Set[B]], Map[B, Set[A]]]( Relation.make _ )( r => (r.forwardMap, r.reverseMap) )(af, bf)
|
||||
implicit def fileFormat: Format[File] = wrap[File, String](_.getAbsolutePath, s => new File(s))
|
||||
// can't require Format[Seq[String]] because its complexity is higher than Format[CompileOptions]
|
||||
implicit def optsFormat(implicit strF: Format[String]): Format[CompileOptions] =
|
||||
wrap[CompileOptions, (Seq[String], Seq[String])](co => (co.options, co.javacOptions), os => new CompileOptions(os._1, os._2))
|
||||
|
||||
implicit val sourceFormat: Format[Source] = xsbt.api.SourceFormat
|
||||
implicit val orderFormat: Format[CompileOrder] =
|
||||
{
|
||||
val values = CompileOrder.values
|
||||
wrap[CompileOrder, Int](_.ordinal, values)
|
||||
}
|
||||
implicit def seqFormat[T](implicit optionFormat: Format[T]): Format[Seq[T]] = viaSeq[Seq[T], T](x => x)
|
||||
|
||||
implicit def fileFormat: Format[File] = wrap[File, String](_.getAbsolutePath, s => new File(s))
|
||||
// can't require Format[Seq[String]] because its complexity is higher than Format[CompileOptions]
|
||||
implicit def optsFormat(implicit strF: Format[String]): Format[CompileOptions] =
|
||||
wrap[CompileOptions, (Seq[String],Seq[String])](co => (co.options, co.javacOptions), os => new CompileOptions(os._1, os._2))
|
||||
|
||||
implicit val orderFormat: Format[CompileOrder] =
|
||||
{
|
||||
val values = CompileOrder.values
|
||||
wrap[CompileOrder, Int](_.ordinal, values)
|
||||
}
|
||||
implicit def seqFormat[T](implicit optionFormat: Format[T]): Format[Seq[T]] = viaSeq[Seq[T], T](x => x)
|
||||
|
||||
implicit def hashStampFormat: Format[Hash] = wrap[Hash, Array[Byte]](_.value, new Hash(_))
|
||||
implicit def lastModFormat: Format[LastModified] = wrap[LastModified, Long](_.value, new LastModified(_))
|
||||
implicit def existsFormat: Format[Exists] = wrap[Exists, Boolean](_.value, new Exists(_))
|
||||
implicit def hashStampFormat: Format[Hash] = wrap[Hash, Array[Byte]](_.value, new Hash(_))
|
||||
implicit def lastModFormat: Format[LastModified] = wrap[LastModified, Long](_.value, new LastModified(_))
|
||||
implicit def existsFormat: Format[Exists] = wrap[Exists, Boolean](_.value, new Exists(_))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,18 +4,17 @@
|
|||
package sbt
|
||||
package inc
|
||||
|
||||
import java.io.File
|
||||
import java.io.File
|
||||
|
||||
object FileBasedStore
|
||||
{
|
||||
def apply(file: File): AnalysisStore = new AnalysisStore {
|
||||
def set(analysis: Analysis, setup: CompileSetup) {
|
||||
Using.fileWriter(IO.utf8)(file) { writer => TextAnalysisFormat.write(writer, analysis, setup) }
|
||||
object FileBasedStore {
|
||||
def apply(file: File): AnalysisStore = new AnalysisStore {
|
||||
def set(analysis: Analysis, setup: CompileSetup) {
|
||||
Using.fileWriter(IO.utf8)(file) { writer => TextAnalysisFormat.write(writer, analysis, setup) }
|
||||
}
|
||||
|
||||
def get(): Option[(Analysis, CompileSetup)] =
|
||||
try { Some(getUncaught()) } catch { case _: Exception => None }
|
||||
def getUncaught(): (Analysis, CompileSetup) =
|
||||
Using.fileReader(IO.utf8)(file) { reader => TextAnalysisFormat.read(reader) }
|
||||
}
|
||||
def get(): Option[(Analysis, CompileSetup)] =
|
||||
try { Some(getUncaught()) } catch { case _: Exception => None }
|
||||
def getUncaught(): (Analysis, CompileSetup) =
|
||||
Using.fileReader(IO.utf8)(file) { reader => TextAnalysisFormat.read(reader) }
|
||||
}
|
||||
}
|
||||
|
|
@ -2,465 +2,469 @@ package sbt
|
|||
package inc
|
||||
|
||||
import java.io._
|
||||
import sbt.{CompileSetup, Relation}
|
||||
import xsbti.api.{Compilation, Source}
|
||||
import xsbti.compile.{MultipleOutput, SingleOutput}
|
||||
import sbt.{ CompileSetup, Relation }
|
||||
import xsbti.api.{ Compilation, Source }
|
||||
import xsbti.compile.{ MultipleOutput, SingleOutput }
|
||||
import javax.xml.bind.DatatypeConverter
|
||||
|
||||
|
||||
// Very simple timer for timing repeated code sections.
|
||||
// TODO: Temporary. Remove once we've milked all available performance gains.
|
||||
private[inc] object FormatTimer {
|
||||
private val timers = scala.collection.mutable.Map[String, Long]()
|
||||
private val printTimings = "true" == System.getProperty("sbt.analysis.debug.timing")
|
||||
private val timers = scala.collection.mutable.Map[String, Long]()
|
||||
private val printTimings = "true" == System.getProperty("sbt.analysis.debug.timing")
|
||||
|
||||
def aggregate[T](key: String)(f: => T) = {
|
||||
val start = System.nanoTime()
|
||||
val ret = f
|
||||
val elapsed = System.nanoTime() - start
|
||||
timers.update(key, timers.getOrElseUpdate(key, 0) + elapsed)
|
||||
ret
|
||||
}
|
||||
def aggregate[T](key: String)(f: => T) = {
|
||||
val start = System.nanoTime()
|
||||
val ret = f
|
||||
val elapsed = System.nanoTime() - start
|
||||
timers.update(key, timers.getOrElseUpdate(key, 0) + elapsed)
|
||||
ret
|
||||
}
|
||||
|
||||
def time[T](key: String)(f: => T) = {
|
||||
val ret = aggregate(key)(f)
|
||||
close(key)
|
||||
ret
|
||||
}
|
||||
def time[T](key: String)(f: => T) = {
|
||||
val ret = aggregate(key)(f)
|
||||
close(key)
|
||||
ret
|
||||
}
|
||||
|
||||
def close(key: String) {
|
||||
if (printTimings) {
|
||||
println("[%s] %dms".format(key, timers.getOrElse(key, 0L) / 1000000))
|
||||
}
|
||||
timers.remove(key)
|
||||
}
|
||||
def close(key: String) {
|
||||
if (printTimings) {
|
||||
println("[%s] %dms".format(key, timers.getOrElse(key, 0L) / 1000000))
|
||||
}
|
||||
timers.remove(key)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class ReadException(s: String) extends Exception(s) {
|
||||
def this(expected: String, found: String) = this("Expected: %s. Found: %s.".format(expected, found))
|
||||
def this(expected: String, found: String) = this("Expected: %s. Found: %s.".format(expected, found))
|
||||
}
|
||||
|
||||
class EOFException extends ReadException("Unexpected EOF.")
|
||||
|
||||
|
||||
// A text-based serialization format for Analysis objects.
|
||||
// This code has been tuned for high performance, and therefore has non-idiomatic areas.
|
||||
// Please refrain from making changes that significantly degrade read/write performance on large analysis files.
|
||||
object TextAnalysisFormat {
|
||||
// Some types are not required for external inspection/manipulation of the analysis file,
|
||||
// and are complex to serialize as text. So we serialize them as base64-encoded sbinary-serialized blobs.
|
||||
// TODO: This is a big performance hit. Figure out a more efficient way to serialize API objects?
|
||||
import sbinary.DefaultProtocol.{immutableMapFormat, immutableSetFormat, StringFormat, tuple2Format}
|
||||
import AnalysisFormats._
|
||||
implicit val compilationF = xsbt.api.CompilationFormat
|
||||
// Some types are not required for external inspection/manipulation of the analysis file,
|
||||
// and are complex to serialize as text. So we serialize them as base64-encoded sbinary-serialized blobs.
|
||||
// TODO: This is a big performance hit. Figure out a more efficient way to serialize API objects?
|
||||
import sbinary.DefaultProtocol.{ immutableMapFormat, immutableSetFormat, StringFormat, tuple2Format }
|
||||
import AnalysisFormats._
|
||||
implicit val compilationF = xsbt.api.CompilationFormat
|
||||
|
||||
def write(out: Writer, analysis: Analysis, setup: CompileSetup) {
|
||||
VersionF.write(out)
|
||||
// We start with writing compile setup which contains value of the `nameHashing`
|
||||
// flag that is needed to properly deserialize relations
|
||||
FormatTimer.time("write setup") { CompileSetupF.write(out, setup) }
|
||||
// Next we write relations because that's the part of greatest interest to external readers,
|
||||
// who can abort reading early once they're read them.
|
||||
FormatTimer.time("write relations") { RelationsF.write(out, analysis.relations) }
|
||||
FormatTimer.time("write stamps") { StampsF.write(out, analysis.stamps) }
|
||||
FormatTimer.time("write apis") { APIsF.write(out, analysis.apis) }
|
||||
FormatTimer.time("write sourceinfos") { SourceInfosF.write(out, analysis.infos) }
|
||||
FormatTimer.time("write compilations") { CompilationsF.write(out, analysis.compilations) }
|
||||
out.flush()
|
||||
}
|
||||
|
||||
def read(in: BufferedReader): (Analysis, CompileSetup) = {
|
||||
VersionF.read(in)
|
||||
val setup = FormatTimer.time("read setup") { CompileSetupF.read(in) }
|
||||
val relations = FormatTimer.time("read relations") { RelationsF.read(in, setup.nameHashing) }
|
||||
val stamps = FormatTimer.time("read stamps") { StampsF.read(in) }
|
||||
val apis = FormatTimer.time("read apis") { APIsF.read(in) }
|
||||
val infos = FormatTimer.time("read sourceinfos") { SourceInfosF.read(in) }
|
||||
val compilations = FormatTimer.time("read compilations") { CompilationsF.read(in) }
|
||||
|
||||
(Analysis.Empty.copy(stamps, apis, relations, infos, compilations), setup)
|
||||
}
|
||||
|
||||
private[this] object VersionF {
|
||||
val currentVersion = "5"
|
||||
|
||||
def write(out: Writer) {
|
||||
out.write("format version: %s\n".format(currentVersion))
|
||||
}
|
||||
|
||||
private val versionPattern = """format version: (\w+)""".r
|
||||
def read(in: BufferedReader) {
|
||||
in.readLine() match {
|
||||
case versionPattern(version) => validateVersion(version)
|
||||
case s: String => throw new ReadException("\"format version: <version>\"", s)
|
||||
case null => throw new EOFException
|
||||
}
|
||||
}
|
||||
|
||||
def validateVersion(version: String) {
|
||||
// TODO: Support backwards compatibility?
|
||||
if (version != currentVersion) {
|
||||
throw new ReadException("File uses format version %s, but we are compatible with version %s only.".format(version, currentVersion))
|
||||
}
|
||||
}
|
||||
def write(out: Writer, analysis: Analysis, setup: CompileSetup) {
|
||||
VersionF.write(out)
|
||||
// We start with writing compile setup which contains value of the `nameHashing`
|
||||
// flag that is needed to properly deserialize relations
|
||||
FormatTimer.time("write setup") { CompileSetupF.write(out, setup) }
|
||||
// Next we write relations because that's the part of greatest interest to external readers,
|
||||
// who can abort reading early once they're read them.
|
||||
FormatTimer.time("write relations") { RelationsF.write(out, analysis.relations) }
|
||||
FormatTimer.time("write stamps") { StampsF.write(out, analysis.stamps) }
|
||||
FormatTimer.time("write apis") { APIsF.write(out, analysis.apis) }
|
||||
FormatTimer.time("write sourceinfos") { SourceInfosF.write(out, analysis.infos) }
|
||||
FormatTimer.time("write compilations") { CompilationsF.write(out, analysis.compilations) }
|
||||
out.flush()
|
||||
}
|
||||
|
||||
private[this] object RelationsF {
|
||||
object Headers {
|
||||
val srcProd = "products"
|
||||
val binaryDep = "binary dependencies"
|
||||
val directSrcDep = "direct source dependencies"
|
||||
val directExternalDep = "direct external dependencies"
|
||||
val internalSrcDepPI = "public inherited source dependencies"
|
||||
val externalDepPI = "public inherited external dependencies"
|
||||
val classes = "class names"
|
||||
def read(in: BufferedReader): (Analysis, CompileSetup) = {
|
||||
VersionF.read(in)
|
||||
val setup = FormatTimer.time("read setup") { CompileSetupF.read(in) }
|
||||
val relations = FormatTimer.time("read relations") { RelationsF.read(in, setup.nameHashing) }
|
||||
val stamps = FormatTimer.time("read stamps") { StampsF.read(in) }
|
||||
val apis = FormatTimer.time("read apis") { APIsF.read(in) }
|
||||
val infos = FormatTimer.time("read sourceinfos") { SourceInfosF.read(in) }
|
||||
val compilations = FormatTimer.time("read compilations") { CompilationsF.read(in) }
|
||||
|
||||
val memberRefInternalDep = "member reference internal dependencies"
|
||||
val memberRefExternalDep = "member reference external dependencies"
|
||||
val inheritanceInternalDep = "inheritance internal dependencies"
|
||||
val inheritanceExternalDep = "inheritance external dependencies"
|
||||
(Analysis.Empty.copy(stamps, apis, relations, infos, compilations), setup)
|
||||
}
|
||||
|
||||
val usedNames = "used names"
|
||||
}
|
||||
private[this] object VersionF {
|
||||
val currentVersion = "5"
|
||||
|
||||
def write(out: Writer, relations: Relations) {
|
||||
def writeRelation[T](header: String, rel: Relation[File, T])(implicit ord: Ordering[T]) {
|
||||
writeHeader(out, header)
|
||||
writeSize(out, rel.size)
|
||||
// We sort for ease of debugging and for more efficient reconstruction when reading.
|
||||
// Note that we don't share code with writeMap. Each is implemented more efficiently
|
||||
// than the shared code would be, and the difference is measurable on large analyses.
|
||||
rel.forwardMap.toSeq.sortBy(_._1).foreach { case (k, vs) =>
|
||||
val kStr = k.toString
|
||||
vs.toSeq.sorted foreach { v =>
|
||||
out.write(kStr); out.write(" -> "); out.write(v.toString); out.write("\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
def write(out: Writer) {
|
||||
out.write("format version: %s\n".format(currentVersion))
|
||||
}
|
||||
|
||||
val nameHashing = relations.nameHashing
|
||||
writeRelation(Headers.srcProd, relations.srcProd)
|
||||
writeRelation(Headers.binaryDep, relations.binaryDep)
|
||||
private val versionPattern = """format version: (\w+)""".r
|
||||
def read(in: BufferedReader) {
|
||||
in.readLine() match {
|
||||
case versionPattern(version) => validateVersion(version)
|
||||
case s: String => throw new ReadException("\"format version: <version>\"", s)
|
||||
case null => throw new EOFException
|
||||
}
|
||||
}
|
||||
|
||||
val direct = if (nameHashing) Relations.emptySource else relations.direct
|
||||
val publicInherited = if (nameHashing)
|
||||
Relations.emptySource else relations.publicInherited
|
||||
def validateVersion(version: String) {
|
||||
// TODO: Support backwards compatibility?
|
||||
if (version != currentVersion) {
|
||||
throw new ReadException("File uses format version %s, but we are compatible with version %s only.".format(version, currentVersion))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
val memberRef = if (nameHashing)
|
||||
relations.memberRef else Relations.emptySourceDependencies
|
||||
val inheritance = if (nameHashing)
|
||||
relations.inheritance else Relations.emptySourceDependencies
|
||||
val names = if (nameHashing) relations.names else Relation.empty[File, String]
|
||||
private[this] object RelationsF {
|
||||
object Headers {
|
||||
val srcProd = "products"
|
||||
val binaryDep = "binary dependencies"
|
||||
val directSrcDep = "direct source dependencies"
|
||||
val directExternalDep = "direct external dependencies"
|
||||
val internalSrcDepPI = "public inherited source dependencies"
|
||||
val externalDepPI = "public inherited external dependencies"
|
||||
val classes = "class names"
|
||||
|
||||
writeRelation(Headers.directSrcDep, direct.internal)
|
||||
writeRelation(Headers.directExternalDep, direct.external)
|
||||
writeRelation(Headers.internalSrcDepPI, publicInherited.internal)
|
||||
writeRelation(Headers.externalDepPI, publicInherited.external)
|
||||
val memberRefInternalDep = "member reference internal dependencies"
|
||||
val memberRefExternalDep = "member reference external dependencies"
|
||||
val inheritanceInternalDep = "inheritance internal dependencies"
|
||||
val inheritanceExternalDep = "inheritance external dependencies"
|
||||
|
||||
writeRelation(Headers.memberRefInternalDep, memberRef.internal)
|
||||
writeRelation(Headers.memberRefExternalDep, memberRef.external)
|
||||
writeRelation(Headers.inheritanceInternalDep, inheritance.internal)
|
||||
writeRelation(Headers.inheritanceExternalDep, inheritance.external)
|
||||
val usedNames = "used names"
|
||||
}
|
||||
|
||||
writeRelation(Headers.classes, relations.classes)
|
||||
writeRelation(Headers.usedNames, names)
|
||||
}
|
||||
def write(out: Writer, relations: Relations) {
|
||||
def writeRelation[T](header: String, rel: Relation[File, T])(implicit ord: Ordering[T]) {
|
||||
writeHeader(out, header)
|
||||
writeSize(out, rel.size)
|
||||
// We sort for ease of debugging and for more efficient reconstruction when reading.
|
||||
// Note that we don't share code with writeMap. Each is implemented more efficiently
|
||||
// than the shared code would be, and the difference is measurable on large analyses.
|
||||
rel.forwardMap.toSeq.sortBy(_._1).foreach {
|
||||
case (k, vs) =>
|
||||
val kStr = k.toString
|
||||
vs.toSeq.sorted foreach { v =>
|
||||
out.write(kStr); out.write(" -> "); out.write(v.toString); out.write("\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def read(in: BufferedReader, nameHashing: Boolean): Relations = {
|
||||
def readRelation[T](expectedHeader: String, s2t: String => T): Relation[File, T] = {
|
||||
val items = readPairs(in)(expectedHeader, new File(_), s2t).toIterator
|
||||
// Reconstruct the forward map. This is more efficient than Relation.empty ++ items.
|
||||
var forward: List[(File, Set[T])] = Nil
|
||||
var currentItem: (File, T) = null
|
||||
var currentFile: File = null
|
||||
var currentVals: List[T] = Nil
|
||||
def closeEntry() {
|
||||
if (currentFile != null) forward = (currentFile, currentVals.toSet) :: forward
|
||||
currentFile = currentItem._1
|
||||
currentVals = currentItem._2 :: Nil
|
||||
}
|
||||
while (items.hasNext) {
|
||||
currentItem = items.next()
|
||||
if (currentItem._1 == currentFile) currentVals = currentItem._2 :: currentVals else closeEntry()
|
||||
}
|
||||
if (currentItem != null) closeEntry()
|
||||
Relation.reconstruct(forward.toMap)
|
||||
}
|
||||
val nameHashing = relations.nameHashing
|
||||
writeRelation(Headers.srcProd, relations.srcProd)
|
||||
writeRelation(Headers.binaryDep, relations.binaryDep)
|
||||
|
||||
def readFileRelation(expectedHeader: String) = readRelation(expectedHeader, { new File(_) })
|
||||
def readStringRelation(expectedHeader: String) = readRelation(expectedHeader, identity[String])
|
||||
val direct = if (nameHashing) Relations.emptySource else relations.direct
|
||||
val publicInherited = if (nameHashing)
|
||||
Relations.emptySource else relations.publicInherited
|
||||
|
||||
val srcProd = readFileRelation(Headers.srcProd)
|
||||
val binaryDep = readFileRelation(Headers.binaryDep)
|
||||
val memberRef = if (nameHashing)
|
||||
relations.memberRef else Relations.emptySourceDependencies
|
||||
val inheritance = if (nameHashing)
|
||||
relations.inheritance else Relations.emptySourceDependencies
|
||||
val names = if (nameHashing) relations.names else Relation.empty[File, String]
|
||||
|
||||
import sbt.inc.Relations.{Source, SourceDependencies, makeSourceDependencies, emptySource,
|
||||
makeSource, emptySourceDependencies}
|
||||
val directSrcDeps: Source = {
|
||||
val internalSrcDep = readFileRelation(Headers.directSrcDep)
|
||||
val externalDep = readStringRelation(Headers.directExternalDep)
|
||||
makeSource(internalSrcDep, externalDep)
|
||||
}
|
||||
val publicInheritedSrcDeps: Source = {
|
||||
val internalSrcDepPI = readFileRelation(Headers.internalSrcDepPI)
|
||||
val externalDepPI = readStringRelation(Headers.externalDepPI)
|
||||
makeSource(internalSrcDepPI, externalDepPI)
|
||||
}
|
||||
val memberRefSrcDeps: SourceDependencies = {
|
||||
val internalMemberRefDep = readFileRelation(Headers.memberRefInternalDep)
|
||||
val externalMemberRefDep = readStringRelation(Headers.memberRefExternalDep)
|
||||
makeSourceDependencies(internalMemberRefDep, externalMemberRefDep)
|
||||
}
|
||||
val inheritanceSrcDeps: SourceDependencies = {
|
||||
val internalInheritanceDep = readFileRelation(Headers.inheritanceInternalDep)
|
||||
val externalInheritanceDep = readStringRelation(Headers.inheritanceExternalDep)
|
||||
makeSourceDependencies(internalInheritanceDep, externalInheritanceDep)
|
||||
}
|
||||
// we don't check for emptiness of publicInherited/inheritance relations because
|
||||
// we assume that invariant that says they are subsets of direct/memberRef holds
|
||||
assert(nameHashing || (memberRefSrcDeps == emptySourceDependencies),
|
||||
"When name hashing is disabled the `memberRef` relation should be empty.")
|
||||
assert(!nameHashing || (directSrcDeps == emptySource),
|
||||
"When name hashing is enabled the `direct` relation should be empty.")
|
||||
val classes = readStringRelation(Headers.classes)
|
||||
val names = readStringRelation(Headers.usedNames)
|
||||
writeRelation(Headers.directSrcDep, direct.internal)
|
||||
writeRelation(Headers.directExternalDep, direct.external)
|
||||
writeRelation(Headers.internalSrcDepPI, publicInherited.internal)
|
||||
writeRelation(Headers.externalDepPI, publicInherited.external)
|
||||
|
||||
if (nameHashing)
|
||||
Relations.make(srcProd, binaryDep, memberRefSrcDeps, inheritanceSrcDeps, classes, names)
|
||||
else {
|
||||
assert(names.all.isEmpty, "When `nameHashing` is disabled `names` relation " +
|
||||
s"should be empty: $names")
|
||||
Relations.make(srcProd, binaryDep, directSrcDeps, publicInheritedSrcDeps, classes)
|
||||
}
|
||||
}
|
||||
}
|
||||
writeRelation(Headers.memberRefInternalDep, memberRef.internal)
|
||||
writeRelation(Headers.memberRefExternalDep, memberRef.external)
|
||||
writeRelation(Headers.inheritanceInternalDep, inheritance.internal)
|
||||
writeRelation(Headers.inheritanceExternalDep, inheritance.external)
|
||||
|
||||
private[this] object StampsF {
|
||||
object Headers {
|
||||
val products = "product stamps"
|
||||
val sources = "source stamps"
|
||||
val binaries = "binary stamps"
|
||||
val classNames = "class names"
|
||||
}
|
||||
writeRelation(Headers.classes, relations.classes)
|
||||
writeRelation(Headers.usedNames, names)
|
||||
}
|
||||
|
||||
def write(out: Writer, stamps: Stamps) {
|
||||
def doWriteMap[V](header: String, m: Map[File, V]) = writeMap(out)(header, m, { v: V => v.toString })
|
||||
def read(in: BufferedReader, nameHashing: Boolean): Relations = {
|
||||
def readRelation[T](expectedHeader: String, s2t: String => T): Relation[File, T] = {
|
||||
val items = readPairs(in)(expectedHeader, new File(_), s2t).toIterator
|
||||
// Reconstruct the forward map. This is more efficient than Relation.empty ++ items.
|
||||
var forward: List[(File, Set[T])] = Nil
|
||||
var currentItem: (File, T) = null
|
||||
var currentFile: File = null
|
||||
var currentVals: List[T] = Nil
|
||||
def closeEntry() {
|
||||
if (currentFile != null) forward = (currentFile, currentVals.toSet) :: forward
|
||||
currentFile = currentItem._1
|
||||
currentVals = currentItem._2 :: Nil
|
||||
}
|
||||
while (items.hasNext) {
|
||||
currentItem = items.next()
|
||||
if (currentItem._1 == currentFile) currentVals = currentItem._2 :: currentVals else closeEntry()
|
||||
}
|
||||
if (currentItem != null) closeEntry()
|
||||
Relation.reconstruct(forward.toMap)
|
||||
}
|
||||
|
||||
doWriteMap(Headers.products, stamps.products)
|
||||
doWriteMap(Headers.sources, stamps.sources)
|
||||
doWriteMap(Headers.binaries, stamps.binaries)
|
||||
doWriteMap(Headers.classNames, stamps.classNames)
|
||||
}
|
||||
def readFileRelation(expectedHeader: String) = readRelation(expectedHeader, { new File(_) })
|
||||
def readStringRelation(expectedHeader: String) = readRelation(expectedHeader, identity[String])
|
||||
|
||||
def read(in: BufferedReader): Stamps = {
|
||||
def doReadMap[V](expectedHeader: String, s2v: String => V) = readMap(in)(expectedHeader, new File(_), s2v)
|
||||
val products = doReadMap(Headers.products, Stamp.fromString)
|
||||
val sources = doReadMap(Headers.sources, Stamp.fromString)
|
||||
val binaries = doReadMap(Headers.binaries, Stamp.fromString)
|
||||
val classNames = doReadMap(Headers.classNames, identity[String])
|
||||
val srcProd = readFileRelation(Headers.srcProd)
|
||||
val binaryDep = readFileRelation(Headers.binaryDep)
|
||||
|
||||
Stamps(products, sources, binaries, classNames)
|
||||
}
|
||||
}
|
||||
import sbt.inc.Relations.{
|
||||
Source,
|
||||
SourceDependencies,
|
||||
makeSourceDependencies,
|
||||
emptySource,
|
||||
makeSource,
|
||||
emptySourceDependencies
|
||||
}
|
||||
val directSrcDeps: Source = {
|
||||
val internalSrcDep = readFileRelation(Headers.directSrcDep)
|
||||
val externalDep = readStringRelation(Headers.directExternalDep)
|
||||
makeSource(internalSrcDep, externalDep)
|
||||
}
|
||||
val publicInheritedSrcDeps: Source = {
|
||||
val internalSrcDepPI = readFileRelation(Headers.internalSrcDepPI)
|
||||
val externalDepPI = readStringRelation(Headers.externalDepPI)
|
||||
makeSource(internalSrcDepPI, externalDepPI)
|
||||
}
|
||||
val memberRefSrcDeps: SourceDependencies = {
|
||||
val internalMemberRefDep = readFileRelation(Headers.memberRefInternalDep)
|
||||
val externalMemberRefDep = readStringRelation(Headers.memberRefExternalDep)
|
||||
makeSourceDependencies(internalMemberRefDep, externalMemberRefDep)
|
||||
}
|
||||
val inheritanceSrcDeps: SourceDependencies = {
|
||||
val internalInheritanceDep = readFileRelation(Headers.inheritanceInternalDep)
|
||||
val externalInheritanceDep = readStringRelation(Headers.inheritanceExternalDep)
|
||||
makeSourceDependencies(internalInheritanceDep, externalInheritanceDep)
|
||||
}
|
||||
// we don't check for emptiness of publicInherited/inheritance relations because
|
||||
// we assume that invariant that says they are subsets of direct/memberRef holds
|
||||
assert(nameHashing || (memberRefSrcDeps == emptySourceDependencies),
|
||||
"When name hashing is disabled the `memberRef` relation should be empty.")
|
||||
assert(!nameHashing || (directSrcDeps == emptySource),
|
||||
"When name hashing is enabled the `direct` relation should be empty.")
|
||||
val classes = readStringRelation(Headers.classes)
|
||||
val names = readStringRelation(Headers.usedNames)
|
||||
|
||||
private[this] object APIsF {
|
||||
object Headers {
|
||||
val internal = "internal apis"
|
||||
val external = "external apis"
|
||||
}
|
||||
if (nameHashing)
|
||||
Relations.make(srcProd, binaryDep, memberRefSrcDeps, inheritanceSrcDeps, classes, names)
|
||||
else {
|
||||
assert(names.all.isEmpty, "When `nameHashing` is disabled `names` relation " +
|
||||
s"should be empty: $names")
|
||||
Relations.make(srcProd, binaryDep, directSrcDeps, publicInheritedSrcDeps, classes)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
val stringToSource = ObjectStringifier.stringToObj[Source] _
|
||||
val sourceToString = ObjectStringifier.objToString[Source] _
|
||||
private[this] object StampsF {
|
||||
object Headers {
|
||||
val products = "product stamps"
|
||||
val sources = "source stamps"
|
||||
val binaries = "binary stamps"
|
||||
val classNames = "class names"
|
||||
}
|
||||
|
||||
def write(out: Writer, apis: APIs) {
|
||||
writeMap(out)(Headers.internal, apis.internal, sourceToString, inlineVals=false)
|
||||
writeMap(out)(Headers.external, apis.external, sourceToString, inlineVals=false)
|
||||
FormatTimer.close("bytes -> base64")
|
||||
FormatTimer.close("byte copy")
|
||||
FormatTimer.close("sbinary write")
|
||||
}
|
||||
def write(out: Writer, stamps: Stamps) {
|
||||
def doWriteMap[V](header: String, m: Map[File, V]) = writeMap(out)(header, m, { v: V => v.toString })
|
||||
|
||||
def read(in: BufferedReader): APIs = {
|
||||
val internal = readMap(in)(Headers.internal, new File(_), stringToSource)
|
||||
val external = readMap(in)(Headers.external, identity[String], stringToSource)
|
||||
FormatTimer.close("base64 -> bytes")
|
||||
FormatTimer.close("sbinary read")
|
||||
APIs(internal, external)
|
||||
}
|
||||
}
|
||||
doWriteMap(Headers.products, stamps.products)
|
||||
doWriteMap(Headers.sources, stamps.sources)
|
||||
doWriteMap(Headers.binaries, stamps.binaries)
|
||||
doWriteMap(Headers.classNames, stamps.classNames)
|
||||
}
|
||||
|
||||
private[this] object SourceInfosF {
|
||||
object Headers {
|
||||
val infos = "source infos"
|
||||
}
|
||||
def read(in: BufferedReader): Stamps = {
|
||||
def doReadMap[V](expectedHeader: String, s2v: String => V) = readMap(in)(expectedHeader, new File(_), s2v)
|
||||
val products = doReadMap(Headers.products, Stamp.fromString)
|
||||
val sources = doReadMap(Headers.sources, Stamp.fromString)
|
||||
val binaries = doReadMap(Headers.binaries, Stamp.fromString)
|
||||
val classNames = doReadMap(Headers.classNames, identity[String])
|
||||
|
||||
val stringToSourceInfo = ObjectStringifier.stringToObj[SourceInfo] _
|
||||
val sourceInfoToString = ObjectStringifier.objToString[SourceInfo] _
|
||||
Stamps(products, sources, binaries, classNames)
|
||||
}
|
||||
}
|
||||
|
||||
def write(out: Writer, infos: SourceInfos) { writeMap(out)(Headers.infos, infos.allInfos, sourceInfoToString, inlineVals=false) }
|
||||
def read(in: BufferedReader): SourceInfos = SourceInfos.make(readMap(in)(Headers.infos, new File(_), stringToSourceInfo))
|
||||
}
|
||||
private[this] object APIsF {
|
||||
object Headers {
|
||||
val internal = "internal apis"
|
||||
val external = "external apis"
|
||||
}
|
||||
|
||||
private[this] object CompilationsF {
|
||||
object Headers {
|
||||
val compilations = "compilations"
|
||||
}
|
||||
val stringToSource = ObjectStringifier.stringToObj[Source] _
|
||||
val sourceToString = ObjectStringifier.objToString[Source] _
|
||||
|
||||
val stringToCompilation = ObjectStringifier.stringToObj[Compilation] _
|
||||
val compilationToString = ObjectStringifier.objToString[Compilation] _
|
||||
def write(out: Writer, apis: APIs) {
|
||||
writeMap(out)(Headers.internal, apis.internal, sourceToString, inlineVals = false)
|
||||
writeMap(out)(Headers.external, apis.external, sourceToString, inlineVals = false)
|
||||
FormatTimer.close("bytes -> base64")
|
||||
FormatTimer.close("byte copy")
|
||||
FormatTimer.close("sbinary write")
|
||||
}
|
||||
|
||||
def write(out: Writer, compilations: Compilations) {
|
||||
writeSeq(out)(Headers.compilations, compilations.allCompilations, compilationToString)
|
||||
}
|
||||
def read(in: BufferedReader): APIs = {
|
||||
val internal = readMap(in)(Headers.internal, new File(_), stringToSource)
|
||||
val external = readMap(in)(Headers.external, identity[String], stringToSource)
|
||||
FormatTimer.close("base64 -> bytes")
|
||||
FormatTimer.close("sbinary read")
|
||||
APIs(internal, external)
|
||||
}
|
||||
}
|
||||
|
||||
def read(in: BufferedReader): Compilations = Compilations.make(
|
||||
readSeq[Compilation](in)(Headers.compilations, stringToCompilation))
|
||||
}
|
||||
private[this] object SourceInfosF {
|
||||
object Headers {
|
||||
val infos = "source infos"
|
||||
}
|
||||
|
||||
private[this] object CompileSetupF {
|
||||
object Headers {
|
||||
val outputMode = "output mode"
|
||||
val outputDir = "output directories"
|
||||
val compileOptions = "compile options"
|
||||
val javacOptions = "javac options"
|
||||
val compilerVersion = "compiler version"
|
||||
val compileOrder = "compile order"
|
||||
val nameHashing = "name hashing"
|
||||
}
|
||||
val stringToSourceInfo = ObjectStringifier.stringToObj[SourceInfo] _
|
||||
val sourceInfoToString = ObjectStringifier.objToString[SourceInfo] _
|
||||
|
||||
private[this] val singleOutputMode = "single"
|
||||
private[this] val multipleOutputMode = "multiple"
|
||||
private[this] val singleOutputKey = new File("output dir")
|
||||
def write(out: Writer, infos: SourceInfos) { writeMap(out)(Headers.infos, infos.allInfos, sourceInfoToString, inlineVals = false) }
|
||||
def read(in: BufferedReader): SourceInfos = SourceInfos.make(readMap(in)(Headers.infos, new File(_), stringToSourceInfo))
|
||||
}
|
||||
|
||||
def write(out: Writer, setup: CompileSetup) {
|
||||
val (mode, outputAsMap) = setup.output match {
|
||||
case s: SingleOutput => (singleOutputMode, Map(singleOutputKey -> s.outputDirectory))
|
||||
case m: MultipleOutput => (multipleOutputMode, m.outputGroups.map(x => x.sourceDirectory -> x.outputDirectory).toMap)
|
||||
}
|
||||
private[this] object CompilationsF {
|
||||
object Headers {
|
||||
val compilations = "compilations"
|
||||
}
|
||||
|
||||
writeSeq(out)(Headers.outputMode, mode :: Nil, identity[String])
|
||||
writeMap(out)(Headers.outputDir, outputAsMap, { f: File => f.getPath })
|
||||
writeSeq(out)(Headers.compileOptions, setup.options.options, identity[String])
|
||||
writeSeq(out)(Headers.javacOptions, setup.options.javacOptions, identity[String])
|
||||
writeSeq(out)(Headers.compilerVersion, setup.compilerVersion :: Nil, identity[String])
|
||||
writeSeq(out)(Headers.compileOrder, setup.order.name :: Nil, identity[String])
|
||||
writeSeq(out)(Headers.nameHashing, setup.nameHashing :: Nil, (b: Boolean) => b.toString)
|
||||
}
|
||||
val stringToCompilation = ObjectStringifier.stringToObj[Compilation] _
|
||||
val compilationToString = ObjectStringifier.objToString[Compilation] _
|
||||
|
||||
def read(in: BufferedReader): CompileSetup = {
|
||||
def s2f(s: String) = new File(s)
|
||||
def s2b(s: String): Boolean = s.toBoolean
|
||||
val outputDirMode = readSeq(in)(Headers.outputMode, identity[String]).headOption
|
||||
val outputAsMap = readMap(in)(Headers.outputDir, s2f, s2f)
|
||||
val compileOptions = readSeq(in)(Headers.compileOptions, identity[String])
|
||||
val javacOptions = readSeq(in)(Headers.javacOptions, identity[String])
|
||||
val compilerVersion = readSeq(in)(Headers.compilerVersion, identity[String]).head
|
||||
val compileOrder = readSeq(in)(Headers.compileOrder, identity[String]).head
|
||||
val nameHashing = readSeq(in)(Headers.nameHashing, s2b).head
|
||||
def write(out: Writer, compilations: Compilations) {
|
||||
writeSeq(out)(Headers.compilations, compilations.allCompilations, compilationToString)
|
||||
}
|
||||
|
||||
val output = outputDirMode match {
|
||||
case Some(s) => s match {
|
||||
case `singleOutputMode` => new SingleOutput {
|
||||
val outputDirectory = outputAsMap(singleOutputKey)
|
||||
}
|
||||
case `multipleOutputMode` => new MultipleOutput {
|
||||
val outputGroups: Array[MultipleOutput.OutputGroup] = outputAsMap.toArray.map {
|
||||
case (src: File, out: File) => new MultipleOutput.OutputGroup {
|
||||
val sourceDirectory = src
|
||||
val outputDirectory = out
|
||||
}
|
||||
}
|
||||
}
|
||||
case str: String => throw new ReadException("Unrecognized output mode: " + str)
|
||||
}
|
||||
case None => throw new ReadException("No output mode specified")
|
||||
}
|
||||
def read(in: BufferedReader): Compilations = Compilations.make(
|
||||
readSeq[Compilation](in)(Headers.compilations, stringToCompilation))
|
||||
}
|
||||
|
||||
new CompileSetup(output, new CompileOptions(compileOptions, javacOptions), compilerVersion,
|
||||
xsbti.compile.CompileOrder.valueOf(compileOrder), nameHashing)
|
||||
}
|
||||
}
|
||||
private[this] object CompileSetupF {
|
||||
object Headers {
|
||||
val outputMode = "output mode"
|
||||
val outputDir = "output directories"
|
||||
val compileOptions = "compile options"
|
||||
val javacOptions = "javac options"
|
||||
val compilerVersion = "compiler version"
|
||||
val compileOrder = "compile order"
|
||||
val nameHashing = "name hashing"
|
||||
}
|
||||
|
||||
private[this] object ObjectStringifier {
|
||||
def objToString[T](o: T)(implicit fmt: sbinary.Format[T]) = {
|
||||
val baos = new ByteArrayOutputStream()
|
||||
val out = new sbinary.JavaOutput(baos)
|
||||
FormatTimer.aggregate("sbinary write") { try { fmt.writes(out, o) } finally { baos.close() } }
|
||||
val bytes = FormatTimer.aggregate("byte copy") { baos.toByteArray }
|
||||
FormatTimer.aggregate("bytes -> base64") { DatatypeConverter.printBase64Binary(bytes) }
|
||||
}
|
||||
private[this] val singleOutputMode = "single"
|
||||
private[this] val multipleOutputMode = "multiple"
|
||||
private[this] val singleOutputKey = new File("output dir")
|
||||
|
||||
def stringToObj[T](s: String)(implicit fmt: sbinary.Format[T]) = {
|
||||
val bytes = FormatTimer.aggregate("base64 -> bytes") { DatatypeConverter.parseBase64Binary(s) }
|
||||
val in = new sbinary.JavaInput(new ByteArrayInputStream(bytes))
|
||||
FormatTimer.aggregate("sbinary read") { fmt.reads(in) }
|
||||
}
|
||||
}
|
||||
def write(out: Writer, setup: CompileSetup) {
|
||||
val (mode, outputAsMap) = setup.output match {
|
||||
case s: SingleOutput => (singleOutputMode, Map(singleOutputKey -> s.outputDirectory))
|
||||
case m: MultipleOutput => (multipleOutputMode, m.outputGroups.map(x => x.sourceDirectory -> x.outputDirectory).toMap)
|
||||
}
|
||||
|
||||
// Various helper functions.
|
||||
writeSeq(out)(Headers.outputMode, mode :: Nil, identity[String])
|
||||
writeMap(out)(Headers.outputDir, outputAsMap, { f: File => f.getPath })
|
||||
writeSeq(out)(Headers.compileOptions, setup.options.options, identity[String])
|
||||
writeSeq(out)(Headers.javacOptions, setup.options.javacOptions, identity[String])
|
||||
writeSeq(out)(Headers.compilerVersion, setup.compilerVersion :: Nil, identity[String])
|
||||
writeSeq(out)(Headers.compileOrder, setup.order.name :: Nil, identity[String])
|
||||
writeSeq(out)(Headers.nameHashing, setup.nameHashing :: Nil, (b: Boolean) => b.toString)
|
||||
}
|
||||
|
||||
private[this] def writeHeader(out: Writer, header: String) {
|
||||
out.write(header + ":\n")
|
||||
}
|
||||
def read(in: BufferedReader): CompileSetup = {
|
||||
def s2f(s: String) = new File(s)
|
||||
def s2b(s: String): Boolean = s.toBoolean
|
||||
val outputDirMode = readSeq(in)(Headers.outputMode, identity[String]).headOption
|
||||
val outputAsMap = readMap(in)(Headers.outputDir, s2f, s2f)
|
||||
val compileOptions = readSeq(in)(Headers.compileOptions, identity[String])
|
||||
val javacOptions = readSeq(in)(Headers.javacOptions, identity[String])
|
||||
val compilerVersion = readSeq(in)(Headers.compilerVersion, identity[String]).head
|
||||
val compileOrder = readSeq(in)(Headers.compileOrder, identity[String]).head
|
||||
val nameHashing = readSeq(in)(Headers.nameHashing, s2b).head
|
||||
|
||||
private[this] def expectHeader(in: BufferedReader, expectedHeader: String) {
|
||||
val header = in.readLine()
|
||||
if (header != expectedHeader + ":") throw new ReadException(expectedHeader, if (header == null) "EOF" else header)
|
||||
}
|
||||
val output = outputDirMode match {
|
||||
case Some(s) => s match {
|
||||
case `singleOutputMode` => new SingleOutput {
|
||||
val outputDirectory = outputAsMap(singleOutputKey)
|
||||
}
|
||||
case `multipleOutputMode` => new MultipleOutput {
|
||||
val outputGroups: Array[MultipleOutput.OutputGroup] = outputAsMap.toArray.map {
|
||||
case (src: File, out: File) => new MultipleOutput.OutputGroup {
|
||||
val sourceDirectory = src
|
||||
val outputDirectory = out
|
||||
}
|
||||
}
|
||||
}
|
||||
case str: String => throw new ReadException("Unrecognized output mode: " + str)
|
||||
}
|
||||
case None => throw new ReadException("No output mode specified")
|
||||
}
|
||||
|
||||
private[this] def writeSize(out: Writer, n: Int) {
|
||||
out.write("%d items\n".format(n))
|
||||
}
|
||||
new CompileSetup(output, new CompileOptions(compileOptions, javacOptions), compilerVersion,
|
||||
xsbti.compile.CompileOrder.valueOf(compileOrder), nameHashing)
|
||||
}
|
||||
}
|
||||
|
||||
private val itemsPattern = """(\d+) items""".r
|
||||
private[this] def readSize(in: BufferedReader): Int = {
|
||||
in.readLine() match {
|
||||
case itemsPattern(nStr) => Integer.parseInt(nStr)
|
||||
case s: String => throw new ReadException("\"<n> items\"", s)
|
||||
case null => throw new EOFException
|
||||
}
|
||||
}
|
||||
private[this] object ObjectStringifier {
|
||||
def objToString[T](o: T)(implicit fmt: sbinary.Format[T]) = {
|
||||
val baos = new ByteArrayOutputStream()
|
||||
val out = new sbinary.JavaOutput(baos)
|
||||
FormatTimer.aggregate("sbinary write") { try { fmt.writes(out, o) } finally { baos.close() } }
|
||||
val bytes = FormatTimer.aggregate("byte copy") { baos.toByteArray }
|
||||
FormatTimer.aggregate("bytes -> base64") { DatatypeConverter.printBase64Binary(bytes) }
|
||||
}
|
||||
|
||||
private[this] def writeSeq[T](out: Writer)(header: String, s: Seq[T], t2s: T => String) {
|
||||
// We write sequences as idx -> element maps, for uniformity with maps/relations.
|
||||
def n = s.length
|
||||
val numDigits = if (n < 2) 1 else math.log10(n - 1).toInt + 1
|
||||
val fmtStr = "%%0%dd".format(numDigits)
|
||||
// We only use this for relatively short seqs, so creating this extra map won't be a performance hit.
|
||||
val m: Map[String, T] = s.zipWithIndex.map(x => fmtStr.format(x._2) -> x._1).toMap
|
||||
writeMap(out)(header, m, t2s)
|
||||
}
|
||||
def stringToObj[T](s: String)(implicit fmt: sbinary.Format[T]) = {
|
||||
val bytes = FormatTimer.aggregate("base64 -> bytes") { DatatypeConverter.parseBase64Binary(s) }
|
||||
val in = new sbinary.JavaInput(new ByteArrayInputStream(bytes))
|
||||
FormatTimer.aggregate("sbinary read") { fmt.reads(in) }
|
||||
}
|
||||
}
|
||||
|
||||
private[this] def readSeq[T](in: BufferedReader)(expectedHeader: String, s2t: String => T): Seq[T] =
|
||||
(readPairs(in)(expectedHeader, identity[String], s2t) map(_._2)).toSeq
|
||||
// Various helper functions.
|
||||
|
||||
private[this] def writeMap[K, V](out: Writer)(header: String, m: Map[K, V], v2s: V => String, inlineVals: Boolean=true)(implicit ord: Ordering[K]) {
|
||||
writeHeader(out, header)
|
||||
writeSize(out, m.size)
|
||||
m.keys.toSeq.sorted foreach { k =>
|
||||
out.write(k.toString)
|
||||
out.write(" -> ")
|
||||
if (!inlineVals) out.write("\n") // Put large vals on their own line, to save string munging on read.
|
||||
out.write(v2s(m(k)))
|
||||
out.write("\n")
|
||||
}
|
||||
}
|
||||
private[this] def writeHeader(out: Writer, header: String) {
|
||||
out.write(header + ":\n")
|
||||
}
|
||||
|
||||
private[this] def readPairs[K, V](in: BufferedReader)(expectedHeader: String, s2k: String => K, s2v: String => V): Traversable[(K, V)] = {
|
||||
def toPair(s: String): (K, V) = {
|
||||
if (s == null) throw new EOFException
|
||||
val p = s.indexOf(" -> ")
|
||||
val k = s2k(s.substring(0, p))
|
||||
// Pair is either "a -> b" or "a -> \nb". This saves us a lot of substring munging when b is a large blob.
|
||||
val v = s2v(if (p == s.length - 4) in.readLine() else s.substring(p + 4))
|
||||
(k, v)
|
||||
}
|
||||
expectHeader(in, expectedHeader)
|
||||
val n = readSize(in)
|
||||
for (i <- 0 until n) yield toPair(in.readLine())
|
||||
}
|
||||
private[this] def expectHeader(in: BufferedReader, expectedHeader: String) {
|
||||
val header = in.readLine()
|
||||
if (header != expectedHeader + ":") throw new ReadException(expectedHeader, if (header == null) "EOF" else header)
|
||||
}
|
||||
|
||||
private[this] def readMap[K, V](in: BufferedReader)(expectedHeader: String, s2k: String => K, s2v: String => V): Map[K, V] = {
|
||||
readPairs(in)(expectedHeader, s2k, s2v).toMap
|
||||
}
|
||||
private[this] def writeSize(out: Writer, n: Int) {
|
||||
out.write("%d items\n".format(n))
|
||||
}
|
||||
|
||||
private val itemsPattern = """(\d+) items""".r
|
||||
private[this] def readSize(in: BufferedReader): Int = {
|
||||
in.readLine() match {
|
||||
case itemsPattern(nStr) => Integer.parseInt(nStr)
|
||||
case s: String => throw new ReadException("\"<n> items\"", s)
|
||||
case null => throw new EOFException
|
||||
}
|
||||
}
|
||||
|
||||
private[this] def writeSeq[T](out: Writer)(header: String, s: Seq[T], t2s: T => String) {
|
||||
// We write sequences as idx -> element maps, for uniformity with maps/relations.
|
||||
def n = s.length
|
||||
val numDigits = if (n < 2) 1 else math.log10(n - 1).toInt + 1
|
||||
val fmtStr = "%%0%dd".format(numDigits)
|
||||
// We only use this for relatively short seqs, so creating this extra map won't be a performance hit.
|
||||
val m: Map[String, T] = s.zipWithIndex.map(x => fmtStr.format(x._2) -> x._1).toMap
|
||||
writeMap(out)(header, m, t2s)
|
||||
}
|
||||
|
||||
private[this] def readSeq[T](in: BufferedReader)(expectedHeader: String, s2t: String => T): Seq[T] =
|
||||
(readPairs(in)(expectedHeader, identity[String], s2t) map (_._2)).toSeq
|
||||
|
||||
private[this] def writeMap[K, V](out: Writer)(header: String, m: Map[K, V], v2s: V => String, inlineVals: Boolean = true)(implicit ord: Ordering[K]) {
|
||||
writeHeader(out, header)
|
||||
writeSize(out, m.size)
|
||||
m.keys.toSeq.sorted foreach { k =>
|
||||
out.write(k.toString)
|
||||
out.write(" -> ")
|
||||
if (!inlineVals) out.write("\n") // Put large vals on their own line, to save string munging on read.
|
||||
out.write(v2s(m(k)))
|
||||
out.write("\n")
|
||||
}
|
||||
}
|
||||
|
||||
private[this] def readPairs[K, V](in: BufferedReader)(expectedHeader: String, s2k: String => K, s2v: String => V): Traversable[(K, V)] = {
|
||||
def toPair(s: String): (K, V) = {
|
||||
if (s == null) throw new EOFException
|
||||
val p = s.indexOf(" -> ")
|
||||
val k = s2k(s.substring(0, p))
|
||||
// Pair is either "a -> b" or "a -> \nb". This saves us a lot of substring munging when b is a large blob.
|
||||
val v = s2v(if (p == s.length - 4) in.readLine() else s.substring(p + 4))
|
||||
(k, v)
|
||||
}
|
||||
expectHeader(in, expectedHeader)
|
||||
val n = readSize(in)
|
||||
for (i <- 0 until n) yield toPair(in.readLine())
|
||||
}
|
||||
|
||||
private[this] def readMap[K, V](in: BufferedReader)(expectedHeader: String, s2k: String => K, s2v: String => V): Map[K, V] = {
|
||||
readPairs(in)(expectedHeader, s2k, s2v).toMap
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,11 +6,11 @@ import sbinary._
|
|||
object CompilationFormat extends Format[Compilation] {
|
||||
import java.io._
|
||||
def reads(in: Input): Compilation = {
|
||||
val oin = new ObjectInputStream(new InputWrapperStream(in))
|
||||
try { oin.readObject.asInstanceOf[Compilation] } finally { oin.close() }
|
||||
val oin = new ObjectInputStream(new InputWrapperStream(in))
|
||||
try { oin.readObject.asInstanceOf[Compilation] } finally { oin.close() }
|
||||
}
|
||||
def writes(out: Output, src: Compilation) {
|
||||
val oout = new ObjectOutputStream(new OutputWrapperStream(out))
|
||||
try { oout.writeObject(src) } finally { oout.close() }
|
||||
try { oout.writeObject(src) } finally { oout.close() }
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,37 +3,33 @@
|
|||
*/
|
||||
package xsbt.api
|
||||
|
||||
import xsbti.SafeLazy
|
||||
import xsbti.api._
|
||||
import sbt.Using
|
||||
import sbinary._
|
||||
import DefaultProtocol._
|
||||
import Operations.{read,write}
|
||||
import java.io.File
|
||||
import scala.collection.mutable
|
||||
import xsbti.SafeLazy
|
||||
import xsbti.api._
|
||||
import sbt.Using
|
||||
import sbinary._
|
||||
import DefaultProtocol._
|
||||
import Operations.{ read, write }
|
||||
import java.io.File
|
||||
import scala.collection.mutable
|
||||
|
||||
object SourceFormat extends Format[Source]
|
||||
{
|
||||
import java.io._
|
||||
def reads(in: Input): Source =
|
||||
{
|
||||
val oin = new ObjectInputStream(new InputWrapperStream(in))
|
||||
try { oin.readObject.asInstanceOf[Source] } finally { oin.close() }
|
||||
}
|
||||
def writes(out: Output, src: Source)
|
||||
{
|
||||
val oout = new ObjectOutputStream(new OutputWrapperStream(out))
|
||||
try { oout.writeObject(src) } finally { oout.close() }
|
||||
}
|
||||
object SourceFormat extends Format[Source] {
|
||||
import java.io._
|
||||
def reads(in: Input): Source =
|
||||
{
|
||||
val oin = new ObjectInputStream(new InputWrapperStream(in))
|
||||
try { oin.readObject.asInstanceOf[Source] } finally { oin.close() }
|
||||
}
|
||||
def writes(out: Output, src: Source) {
|
||||
val oout = new ObjectOutputStream(new OutputWrapperStream(out))
|
||||
try { oout.writeObject(src) } finally { oout.close() }
|
||||
}
|
||||
}
|
||||
final class InputWrapperStream(in: Input) extends java.io.InputStream
|
||||
{
|
||||
def toInt(b: Byte) = if(b < 0) b + 256 else b.toInt
|
||||
def read() = try { toInt(in.readByte) } catch { case e: sbinary.EOF => -1 }
|
||||
override def read(b: Array[Byte], off: Int, len: Int) = in.readTo(b, off, len)
|
||||
final class InputWrapperStream(in: Input) extends java.io.InputStream {
|
||||
def toInt(b: Byte) = if (b < 0) b + 256 else b.toInt
|
||||
def read() = try { toInt(in.readByte) } catch { case e: sbinary.EOF => -1 }
|
||||
override def read(b: Array[Byte], off: Int, len: Int) = in.readTo(b, off, len)
|
||||
}
|
||||
final class OutputWrapperStream(out: Output) extends java.io.OutputStream
|
||||
{
|
||||
override def write(bs: Array[Byte], off: Int, len: Int) = out.writeAll(bs, off, len)
|
||||
def write(b: Int) = out.writeByte(b.toByte)
|
||||
final class OutputWrapperStream(out: Output) extends java.io.OutputStream {
|
||||
override def write(bs: Array[Byte], off: Int, len: Int) = out.writeAll(bs, off, len)
|
||||
def write(b: Int) = out.writeByte(b.toByte)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,11 +4,10 @@
|
|||
package sbt
|
||||
|
||||
final case class ClasspathOptions(bootLibrary: Boolean, compiler: Boolean, extra: Boolean, autoBoot: Boolean, filterLibrary: Boolean) extends xsbti.compile.ClasspathOptions
|
||||
object ClasspathOptions
|
||||
{
|
||||
def manual = ClasspathOptions(false, false, false, true, false)
|
||||
def boot = ClasspathOptions(true, false, false, true, true)
|
||||
def repl = auto
|
||||
def javac(compiler: Boolean) = new ClasspathOptions(false, compiler, false, false, false)
|
||||
def auto = ClasspathOptions(true, true, true, true, true)
|
||||
object ClasspathOptions {
|
||||
def manual = ClasspathOptions(false, false, false, true, false)
|
||||
def boot = ClasspathOptions(true, false, false, true, true)
|
||||
def repl = auto
|
||||
def javac(compiler: Boolean) = new ClasspathOptions(false, compiler, false, false, false)
|
||||
def auto = ClasspathOptions(true, true, true, true, true)
|
||||
}
|
||||
|
|
@ -8,141 +8,128 @@ package sbt
|
|||
// see licenses/LICENSE_Scala
|
||||
// Original author: Martin Odersky
|
||||
|
||||
import xsbti.{Maybe,Position,Problem,Reporter,Severity}
|
||||
import java.io.File
|
||||
import java.util.EnumMap
|
||||
import scala.collection.mutable
|
||||
import LoggerReporter._
|
||||
import Logger.{m2o,o2m,position,problem}
|
||||
import Severity.{Error,Info => SInfo,Warn}
|
||||
import xsbti.{ Maybe, Position, Problem, Reporter, Severity }
|
||||
import java.io.File
|
||||
import java.util.EnumMap
|
||||
import scala.collection.mutable
|
||||
import LoggerReporter._
|
||||
import Logger.{ m2o, o2m, position, problem }
|
||||
import Severity.{ Error, Info => SInfo, Warn }
|
||||
|
||||
object LoggerReporter
|
||||
{
|
||||
final class PositionKey(pos: Position)
|
||||
{
|
||||
def offset = pos.offset
|
||||
def sourceFile = pos.sourceFile
|
||||
object LoggerReporter {
|
||||
final class PositionKey(pos: Position) {
|
||||
def offset = pos.offset
|
||||
def sourceFile = pos.sourceFile
|
||||
|
||||
override def equals(o: Any) =
|
||||
o match { case pk: PositionKey => equalsKey(pk); case _ => false }
|
||||
override def equals(o: Any) =
|
||||
o match { case pk: PositionKey => equalsKey(pk); case _ => false }
|
||||
|
||||
def equalsKey(o: PositionKey) =
|
||||
m2o(pos.offset) == m2o(o.offset) &&
|
||||
m2o(pos.sourceFile) == m2o(o.sourceFile)
|
||||
override def hashCode =
|
||||
m2o(pos.offset).hashCode * 31
|
||||
m2o(pos.sourceFile).hashCode
|
||||
}
|
||||
def equalsKey(o: PositionKey) =
|
||||
m2o(pos.offset) == m2o(o.offset) &&
|
||||
m2o(pos.sourceFile) == m2o(o.sourceFile)
|
||||
override def hashCode =
|
||||
m2o(pos.offset).hashCode * 31
|
||||
m2o(pos.sourceFile).hashCode
|
||||
}
|
||||
|
||||
def countElementsAsString(n: Int, elements: String): String =
|
||||
n match {
|
||||
case 0 => "no " + elements + "s"
|
||||
case 1 => "one " + elements
|
||||
case 2 => "two " + elements + "s"
|
||||
case 3 => "three " + elements + "s"
|
||||
case 4 => "four " + elements + "s"
|
||||
case _ => "" + n + " " + elements + "s"
|
||||
}
|
||||
def countElementsAsString(n: Int, elements: String): String =
|
||||
n match {
|
||||
case 0 => "no " + elements + "s"
|
||||
case 1 => "one " + elements
|
||||
case 2 => "two " + elements + "s"
|
||||
case 3 => "three " + elements + "s"
|
||||
case 4 => "four " + elements + "s"
|
||||
case _ => "" + n + " " + elements + "s"
|
||||
}
|
||||
}
|
||||
|
||||
class LoggerReporter(maximumErrors: Int, log: Logger, sourcePositionMapper: Position => Position = {p => p}) extends xsbti.Reporter
|
||||
{
|
||||
val positions = new mutable.HashMap[PositionKey, Severity]
|
||||
val count = new EnumMap[Severity, Int](classOf[Severity])
|
||||
private[this] val allProblems = new mutable.ListBuffer[Problem]
|
||||
|
||||
reset()
|
||||
|
||||
def reset()
|
||||
{
|
||||
count.put(Warn, 0)
|
||||
count.put(SInfo, 0)
|
||||
count.put(Error, 0)
|
||||
positions.clear()
|
||||
allProblems.clear()
|
||||
}
|
||||
def hasWarnings = count.get(Warn) > 0
|
||||
def hasErrors = count.get(Error) > 0
|
||||
def problems: Array[Problem] = allProblems.toArray
|
||||
def comment(pos: Position, msg: String) {}
|
||||
class LoggerReporter(maximumErrors: Int, log: Logger, sourcePositionMapper: Position => Position = { p => p }) extends xsbti.Reporter {
|
||||
val positions = new mutable.HashMap[PositionKey, Severity]
|
||||
val count = new EnumMap[Severity, Int](classOf[Severity])
|
||||
private[this] val allProblems = new mutable.ListBuffer[Problem]
|
||||
|
||||
def printSummary()
|
||||
{
|
||||
val warnings = count.get(Severity.Warn)
|
||||
if(warnings > 0)
|
||||
log.warn(countElementsAsString(warnings, "warning") + " found")
|
||||
val errors = count.get(Severity.Error)
|
||||
if(errors > 0)
|
||||
log.error(countElementsAsString(errors, "error") + " found")
|
||||
}
|
||||
reset()
|
||||
|
||||
def inc(sev: Severity) = count.put(sev, count.get(sev) + 1)
|
||||
def reset() {
|
||||
count.put(Warn, 0)
|
||||
count.put(SInfo, 0)
|
||||
count.put(Error, 0)
|
||||
positions.clear()
|
||||
allProblems.clear()
|
||||
}
|
||||
def hasWarnings = count.get(Warn) > 0
|
||||
def hasErrors = count.get(Error) > 0
|
||||
def problems: Array[Problem] = allProblems.toArray
|
||||
def comment(pos: Position, msg: String) {}
|
||||
|
||||
def display(pos: Position, msg: String, severity: Severity)
|
||||
{
|
||||
inc(severity)
|
||||
if(severity != Error || maximumErrors <= 0 || count.get(severity) <= maximumErrors)
|
||||
print(severityLogger(severity), pos, msg)
|
||||
}
|
||||
def severityLogger(severity: Severity): (=> String) => Unit =
|
||||
m =>
|
||||
{
|
||||
(severity match
|
||||
{
|
||||
case Error => log.error(m)
|
||||
case Warn => log.warn(m)
|
||||
case SInfo => log.info(m)
|
||||
})
|
||||
}
|
||||
def printSummary() {
|
||||
val warnings = count.get(Severity.Warn)
|
||||
if (warnings > 0)
|
||||
log.warn(countElementsAsString(warnings, "warning") + " found")
|
||||
val errors = count.get(Severity.Error)
|
||||
if (errors > 0)
|
||||
log.error(countElementsAsString(errors, "error") + " found")
|
||||
}
|
||||
|
||||
def print(log: (=> String) => Unit, pos: Position, msg: String)
|
||||
{
|
||||
if(pos.sourcePath.isEmpty && pos.line.isEmpty)
|
||||
log(msg)
|
||||
else
|
||||
{
|
||||
val sourcePrefix = m2o(pos.sourcePath).getOrElse("")
|
||||
val lineNumberString = m2o(pos.line).map(":" + _ + ":").getOrElse(":") + " "
|
||||
log(sourcePrefix + lineNumberString + msg)
|
||||
val lineContent = pos.lineContent
|
||||
if(!lineContent.isEmpty)
|
||||
{
|
||||
log(lineContent)
|
||||
for(space <- m2o(pos.pointerSpace))
|
||||
log(space + "^") // pointer to the column position of the error/warning
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def log(pos: Position, msg: String, severity: Severity): Unit =
|
||||
{
|
||||
val mappedPos = sourcePositionMapper(pos)
|
||||
allProblems += problem("", mappedPos, msg, severity)
|
||||
severity match
|
||||
{
|
||||
case Warn | Error =>
|
||||
{
|
||||
if(!testAndLog(mappedPos, severity))
|
||||
display(mappedPos, msg, severity)
|
||||
}
|
||||
case _ => display(mappedPos, msg, severity)
|
||||
}
|
||||
}
|
||||
def inc(sev: Severity) = count.put(sev, count.get(sev) + 1)
|
||||
|
||||
def testAndLog(pos: Position, severity: Severity): Boolean =
|
||||
{
|
||||
if(pos.offset.isEmpty || pos.sourceFile.isEmpty)
|
||||
false
|
||||
else
|
||||
{
|
||||
val key = new PositionKey(pos)
|
||||
if(positions.get(key).map(_.ordinal >= severity.ordinal).getOrElse(false))
|
||||
true
|
||||
else
|
||||
{
|
||||
positions(key) = severity
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
def display(pos: Position, msg: String, severity: Severity) {
|
||||
inc(severity)
|
||||
if (severity != Error || maximumErrors <= 0 || count.get(severity) <= maximumErrors)
|
||||
print(severityLogger(severity), pos, msg)
|
||||
}
|
||||
def severityLogger(severity: Severity): (=> String) => Unit =
|
||||
m =>
|
||||
{
|
||||
(severity match {
|
||||
case Error => log.error(m)
|
||||
case Warn => log.warn(m)
|
||||
case SInfo => log.info(m)
|
||||
})
|
||||
}
|
||||
|
||||
def print(log: (=> String) => Unit, pos: Position, msg: String) {
|
||||
if (pos.sourcePath.isEmpty && pos.line.isEmpty)
|
||||
log(msg)
|
||||
else {
|
||||
val sourcePrefix = m2o(pos.sourcePath).getOrElse("")
|
||||
val lineNumberString = m2o(pos.line).map(":" + _ + ":").getOrElse(":") + " "
|
||||
log(sourcePrefix + lineNumberString + msg)
|
||||
val lineContent = pos.lineContent
|
||||
if (!lineContent.isEmpty) {
|
||||
log(lineContent)
|
||||
for (space <- m2o(pos.pointerSpace))
|
||||
log(space + "^") // pointer to the column position of the error/warning
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def log(pos: Position, msg: String, severity: Severity): Unit =
|
||||
{
|
||||
val mappedPos = sourcePositionMapper(pos)
|
||||
allProblems += problem("", mappedPos, msg, severity)
|
||||
severity match {
|
||||
case Warn | Error =>
|
||||
{
|
||||
if (!testAndLog(mappedPos, severity))
|
||||
display(mappedPos, msg, severity)
|
||||
}
|
||||
case _ => display(mappedPos, msg, severity)
|
||||
}
|
||||
}
|
||||
|
||||
def testAndLog(pos: Position, severity: Severity): Boolean =
|
||||
{
|
||||
if (pos.offset.isEmpty || pos.sourceFile.isEmpty)
|
||||
false
|
||||
else {
|
||||
val key = new PositionKey(pos)
|
||||
if (positions.get(key).map(_.ordinal >= severity.ordinal).getOrElse(false))
|
||||
true
|
||||
else {
|
||||
positions(key) = severity
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -4,160 +4,158 @@
|
|||
package sbt
|
||||
package compiler
|
||||
|
||||
import xsbti.{AnalysisCallback, Logger => xLogger, Reporter}
|
||||
import xsbti.compile.{CachedCompiler, CachedCompilerProvider, DependencyChanges, GlobalsCache, CompileProgress, Output}
|
||||
import java.io.File
|
||||
import java.net.{URL, URLClassLoader}
|
||||
import xsbti.{ AnalysisCallback, Logger => xLogger, Reporter }
|
||||
import xsbti.compile.{ CachedCompiler, CachedCompilerProvider, DependencyChanges, GlobalsCache, CompileProgress, Output }
|
||||
import java.io.File
|
||||
import java.net.{ URL, URLClassLoader }
|
||||
|
||||
/** Interface to the Scala compiler that uses the dependency analysis plugin. This class uses the Scala library and compiler
|
||||
* provided by scalaInstance. This class requires a ComponentManager in order to obtain the interface code to scalac and
|
||||
* the analysis plugin. Because these call Scala code for a different Scala version than the one used for this class, they must
|
||||
* be compiled for the version of Scala being used.*/
|
||||
final class AnalyzingCompiler private(val scalaInstance: xsbti.compile.ScalaInstance, val provider: CompilerInterfaceProvider, val cp: xsbti.compile.ClasspathOptions, onArgsF: Seq[String] => Unit) extends CachedCompilerProvider
|
||||
{
|
||||
def this(scalaInstance: xsbti.compile.ScalaInstance, provider: CompilerInterfaceProvider, cp: xsbti.compile.ClasspathOptions) =
|
||||
this(scalaInstance, provider, cp, _ => ())
|
||||
def this(scalaInstance: ScalaInstance, provider: CompilerInterfaceProvider) = this(scalaInstance, provider, ClasspathOptions.auto)
|
||||
/**
|
||||
* Interface to the Scala compiler that uses the dependency analysis plugin. This class uses the Scala library and compiler
|
||||
* provided by scalaInstance. This class requires a ComponentManager in order to obtain the interface code to scalac and
|
||||
* the analysis plugin. Because these call Scala code for a different Scala version than the one used for this class, they must
|
||||
* be compiled for the version of Scala being used.
|
||||
*/
|
||||
final class AnalyzingCompiler private (val scalaInstance: xsbti.compile.ScalaInstance, val provider: CompilerInterfaceProvider, val cp: xsbti.compile.ClasspathOptions, onArgsF: Seq[String] => Unit) extends CachedCompilerProvider {
|
||||
def this(scalaInstance: xsbti.compile.ScalaInstance, provider: CompilerInterfaceProvider, cp: xsbti.compile.ClasspathOptions) =
|
||||
this(scalaInstance, provider, cp, _ => ())
|
||||
def this(scalaInstance: ScalaInstance, provider: CompilerInterfaceProvider) = this(scalaInstance, provider, ClasspathOptions.auto)
|
||||
|
||||
@deprecated("A Logger is no longer needed.", "0.13.0")
|
||||
def this(scalaInstance: ScalaInstance, provider: CompilerInterfaceProvider, log: Logger) = this(scalaInstance, provider)
|
||||
@deprecated("A Logger is no longer needed.", "0.13.0")
|
||||
def this(scalaInstance: ScalaInstance, provider: CompilerInterfaceProvider, log: Logger) = this(scalaInstance, provider)
|
||||
|
||||
@deprecated("A Logger is no longer needed.", "0.13.0")
|
||||
def this(scalaInstance: xsbti.compile.ScalaInstance, provider: CompilerInterfaceProvider, cp: xsbti.compile.ClasspathOptions, log: Logger) = this(scalaInstance, provider, cp)
|
||||
@deprecated("A Logger is no longer needed.", "0.13.0")
|
||||
def this(scalaInstance: xsbti.compile.ScalaInstance, provider: CompilerInterfaceProvider, cp: xsbti.compile.ClasspathOptions, log: Logger) = this(scalaInstance, provider, cp)
|
||||
|
||||
def onArgs(f: Seq[String] => Unit): AnalyzingCompiler = new AnalyzingCompiler(scalaInstance, provider, cp, f)
|
||||
def onArgs(f: Seq[String] => Unit): AnalyzingCompiler = new AnalyzingCompiler(scalaInstance, provider, cp, f)
|
||||
|
||||
def apply(sources: Seq[File], changes: DependencyChanges, classpath: Seq[File], singleOutput: File, options: Seq[String], callback: AnalysisCallback, maximumErrors: Int, cache: GlobalsCache, log: Logger)
|
||||
{
|
||||
val arguments = (new CompilerArguments(scalaInstance, cp))(Nil, classpath, None, options)
|
||||
val output = CompileOutput(singleOutput)
|
||||
compile(sources, changes, arguments, output, callback, new LoggerReporter(maximumErrors, log, p => p), cache, log, None)
|
||||
}
|
||||
def apply(sources: Seq[File], changes: DependencyChanges, classpath: Seq[File], singleOutput: File, options: Seq[String], callback: AnalysisCallback, maximumErrors: Int, cache: GlobalsCache, log: Logger) {
|
||||
val arguments = (new CompilerArguments(scalaInstance, cp))(Nil, classpath, None, options)
|
||||
val output = CompileOutput(singleOutput)
|
||||
compile(sources, changes, arguments, output, callback, new LoggerReporter(maximumErrors, log, p => p), cache, log, None)
|
||||
}
|
||||
|
||||
def compile(sources: Seq[File], changes: DependencyChanges, options: Seq[String], output: Output, callback: AnalysisCallback, reporter: Reporter, cache: GlobalsCache, log: Logger, progressOpt: Option[CompileProgress]): Unit =
|
||||
{
|
||||
val cached = cache(options.toArray, output, !changes.isEmpty, this, log, reporter)
|
||||
val progress = progressOpt getOrElse IgnoreProgress
|
||||
compile(sources, changes, callback, log, reporter, progress, cached)
|
||||
}
|
||||
def compile(sources: Seq[File], changes: DependencyChanges, options: Seq[String], output: Output, callback: AnalysisCallback, reporter: Reporter, cache: GlobalsCache, log: Logger, progressOpt: Option[CompileProgress]): Unit =
|
||||
{
|
||||
val cached = cache(options.toArray, output, !changes.isEmpty, this, log, reporter)
|
||||
val progress = progressOpt getOrElse IgnoreProgress
|
||||
compile(sources, changes, callback, log, reporter, progress, cached)
|
||||
}
|
||||
|
||||
def compile(sources: Seq[File], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, reporter: Reporter, progress: CompileProgress, compiler: CachedCompiler)
|
||||
{
|
||||
onArgsF(compiler.commandArguments(sources.toArray))
|
||||
call("xsbt.CompilerInterface", "run", log)(
|
||||
classOf[Array[File]], classOf[DependencyChanges], classOf[AnalysisCallback], classOf[xLogger], classOf[Reporter], classOf[CompileProgress], classOf[CachedCompiler]) (
|
||||
sources.toArray, changes, callback, log, reporter, progress, compiler )
|
||||
}
|
||||
def newCachedCompiler(arguments: Array[String], output: Output, log: xLogger, reporter: Reporter, resident: Boolean): CachedCompiler =
|
||||
newCachedCompiler(arguments: Seq[String], output, log, reporter, resident)
|
||||
def compile(sources: Seq[File], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, reporter: Reporter, progress: CompileProgress, compiler: CachedCompiler) {
|
||||
onArgsF(compiler.commandArguments(sources.toArray))
|
||||
call("xsbt.CompilerInterface", "run", log)(
|
||||
classOf[Array[File]], classOf[DependencyChanges], classOf[AnalysisCallback], classOf[xLogger], classOf[Reporter], classOf[CompileProgress], classOf[CachedCompiler])(
|
||||
sources.toArray, changes, callback, log, reporter, progress, compiler)
|
||||
}
|
||||
def newCachedCompiler(arguments: Array[String], output: Output, log: xLogger, reporter: Reporter, resident: Boolean): CachedCompiler =
|
||||
newCachedCompiler(arguments: Seq[String], output, log, reporter, resident)
|
||||
|
||||
def newCachedCompiler(arguments: Seq[String], output: Output, log: xLogger, reporter: Reporter, resident: Boolean): CachedCompiler =
|
||||
{
|
||||
call("xsbt.CompilerInterface", "newCompiler", log)(
|
||||
classOf[Array[String]], classOf[Output], classOf[xLogger], classOf[Reporter], classOf[Boolean] ) (
|
||||
arguments.toArray[String] : Array[String], output, log, reporter, resident: java.lang.Boolean ).
|
||||
asInstanceOf[CachedCompiler]
|
||||
}
|
||||
def newCachedCompiler(arguments: Seq[String], output: Output, log: xLogger, reporter: Reporter, resident: Boolean): CachedCompiler =
|
||||
{
|
||||
call("xsbt.CompilerInterface", "newCompiler", log)(
|
||||
classOf[Array[String]], classOf[Output], classOf[xLogger], classOf[Reporter], classOf[Boolean])(
|
||||
arguments.toArray[String]: Array[String], output, log, reporter, resident: java.lang.Boolean).
|
||||
asInstanceOf[CachedCompiler]
|
||||
}
|
||||
|
||||
def doc(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maximumErrors: Int, log: Logger): Unit =
|
||||
doc(sources, classpath, outputDirectory, options, log, new LoggerReporter(maximumErrors, log))
|
||||
def doc(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger, reporter: Reporter): Unit =
|
||||
{
|
||||
val arguments = (new CompilerArguments(scalaInstance, cp))(sources, classpath, Some(outputDirectory), options)
|
||||
onArgsF(arguments)
|
||||
call("xsbt.ScaladocInterface", "run", log) (classOf[Array[String]], classOf[xLogger], classOf[Reporter]) (
|
||||
arguments.toArray[String] : Array[String], log, reporter)
|
||||
}
|
||||
def console(classpath: Seq[File], options: Seq[String], initialCommands: String, cleanupCommands: String, log: Logger)(loader: Option[ClassLoader] = None, bindings: Seq[(String, Any)] = Nil): Unit =
|
||||
{
|
||||
onArgsF(consoleCommandArguments(classpath, options, log))
|
||||
val (classpathString, bootClasspath) = consoleClasspaths(classpath)
|
||||
val (names, values) = bindings.unzip
|
||||
call("xsbt.ConsoleInterface", "run", log)(
|
||||
classOf[Array[String]], classOf[String], classOf[String], classOf[String], classOf[String], classOf[ClassLoader], classOf[Array[String]], classOf[Array[Any]], classOf[xLogger])(
|
||||
options.toArray[String]: Array[String], bootClasspath, classpathString, initialCommands, cleanupCommands, loader.orNull, names.toArray[String], values.toArray[Any], log)
|
||||
}
|
||||
def doc(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maximumErrors: Int, log: Logger): Unit =
|
||||
doc(sources, classpath, outputDirectory, options, log, new LoggerReporter(maximumErrors, log))
|
||||
def doc(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger, reporter: Reporter): Unit =
|
||||
{
|
||||
val arguments = (new CompilerArguments(scalaInstance, cp))(sources, classpath, Some(outputDirectory), options)
|
||||
onArgsF(arguments)
|
||||
call("xsbt.ScaladocInterface", "run", log)(classOf[Array[String]], classOf[xLogger], classOf[Reporter])(
|
||||
arguments.toArray[String]: Array[String], log, reporter)
|
||||
}
|
||||
def console(classpath: Seq[File], options: Seq[String], initialCommands: String, cleanupCommands: String, log: Logger)(loader: Option[ClassLoader] = None, bindings: Seq[(String, Any)] = Nil): Unit =
|
||||
{
|
||||
onArgsF(consoleCommandArguments(classpath, options, log))
|
||||
val (classpathString, bootClasspath) = consoleClasspaths(classpath)
|
||||
val (names, values) = bindings.unzip
|
||||
call("xsbt.ConsoleInterface", "run", log)(
|
||||
classOf[Array[String]], classOf[String], classOf[String], classOf[String], classOf[String], classOf[ClassLoader], classOf[Array[String]], classOf[Array[Any]], classOf[xLogger])(
|
||||
options.toArray[String]: Array[String], bootClasspath, classpathString, initialCommands, cleanupCommands, loader.orNull, names.toArray[String], values.toArray[Any], log)
|
||||
}
|
||||
|
||||
private[this] def consoleClasspaths(classpath: Seq[File]): (String, String) =
|
||||
{
|
||||
val arguments = new CompilerArguments(scalaInstance, cp)
|
||||
val classpathString = CompilerArguments.absString(arguments.finishClasspath(classpath))
|
||||
val bootClasspath = if(cp.autoBoot) arguments.createBootClasspathFor(classpath) else ""
|
||||
(classpathString, bootClasspath)
|
||||
}
|
||||
def consoleCommandArguments(classpath: Seq[File], options: Seq[String], log: Logger): Seq[String] =
|
||||
{
|
||||
val (classpathString, bootClasspath) = consoleClasspaths(classpath)
|
||||
val argsObj = call("xsbt.ConsoleInterface", "commandArguments", log)(
|
||||
classOf[Array[String]], classOf[String], classOf[String], classOf[xLogger])(
|
||||
options.toArray[String]: Array[String], bootClasspath, classpathString, log)
|
||||
argsObj.asInstanceOf[Array[String]].toSeq
|
||||
}
|
||||
def force(log: Logger): Unit = provider(scalaInstance, log)
|
||||
private def call(interfaceClassName: String, methodName: String, log: Logger)(argTypes: Class[_]*)(args: AnyRef*): AnyRef =
|
||||
{
|
||||
val interfaceClass = getInterfaceClass(interfaceClassName, log)
|
||||
val interface = interfaceClass.newInstance.asInstanceOf[AnyRef]
|
||||
val method = interfaceClass.getMethod(methodName, argTypes : _*)
|
||||
try { method.invoke(interface, args: _*) }
|
||||
catch { case e: java.lang.reflect.InvocationTargetException =>
|
||||
e.getCause match {
|
||||
case c: xsbti.CompileFailed => throw new CompileFailed(c.arguments, c.toString, c.problems)
|
||||
case t => throw t
|
||||
}
|
||||
}
|
||||
}
|
||||
private[this] def loader(log: Logger) =
|
||||
{
|
||||
val interfaceJar = provider(scalaInstance, log)
|
||||
// this goes to scalaInstance.loader for scala classes and the loader of this class for xsbti classes
|
||||
val dual = createDualLoader(scalaInstance.loader, getClass.getClassLoader)
|
||||
new URLClassLoader(Array(interfaceJar.toURI.toURL), dual)
|
||||
}
|
||||
private[this] def getInterfaceClass(name: String, log: Logger) = Class.forName(name, true, loader(log))
|
||||
protected def createDualLoader(scalaLoader: ClassLoader, sbtLoader: ClassLoader): ClassLoader =
|
||||
{
|
||||
val xsbtiFilter = (name: String) => name.startsWith("xsbti.")
|
||||
val notXsbtiFilter = (name: String) => !xsbtiFilter(name)
|
||||
new classpath.DualLoader(scalaLoader, notXsbtiFilter, x => true, sbtLoader, xsbtiFilter, x => false)
|
||||
}
|
||||
override def toString = "Analyzing compiler (Scala " + scalaInstance.actualVersion + ")"
|
||||
private[this] def consoleClasspaths(classpath: Seq[File]): (String, String) =
|
||||
{
|
||||
val arguments = new CompilerArguments(scalaInstance, cp)
|
||||
val classpathString = CompilerArguments.absString(arguments.finishClasspath(classpath))
|
||||
val bootClasspath = if (cp.autoBoot) arguments.createBootClasspathFor(classpath) else ""
|
||||
(classpathString, bootClasspath)
|
||||
}
|
||||
def consoleCommandArguments(classpath: Seq[File], options: Seq[String], log: Logger): Seq[String] =
|
||||
{
|
||||
val (classpathString, bootClasspath) = consoleClasspaths(classpath)
|
||||
val argsObj = call("xsbt.ConsoleInterface", "commandArguments", log)(
|
||||
classOf[Array[String]], classOf[String], classOf[String], classOf[xLogger])(
|
||||
options.toArray[String]: Array[String], bootClasspath, classpathString, log)
|
||||
argsObj.asInstanceOf[Array[String]].toSeq
|
||||
}
|
||||
def force(log: Logger): Unit = provider(scalaInstance, log)
|
||||
private def call(interfaceClassName: String, methodName: String, log: Logger)(argTypes: Class[_]*)(args: AnyRef*): AnyRef =
|
||||
{
|
||||
val interfaceClass = getInterfaceClass(interfaceClassName, log)
|
||||
val interface = interfaceClass.newInstance.asInstanceOf[AnyRef]
|
||||
val method = interfaceClass.getMethod(methodName, argTypes: _*)
|
||||
try { method.invoke(interface, args: _*) }
|
||||
catch {
|
||||
case e: java.lang.reflect.InvocationTargetException =>
|
||||
e.getCause match {
|
||||
case c: xsbti.CompileFailed => throw new CompileFailed(c.arguments, c.toString, c.problems)
|
||||
case t => throw t
|
||||
}
|
||||
}
|
||||
}
|
||||
private[this] def loader(log: Logger) =
|
||||
{
|
||||
val interfaceJar = provider(scalaInstance, log)
|
||||
// this goes to scalaInstance.loader for scala classes and the loader of this class for xsbti classes
|
||||
val dual = createDualLoader(scalaInstance.loader, getClass.getClassLoader)
|
||||
new URLClassLoader(Array(interfaceJar.toURI.toURL), dual)
|
||||
}
|
||||
private[this] def getInterfaceClass(name: String, log: Logger) = Class.forName(name, true, loader(log))
|
||||
protected def createDualLoader(scalaLoader: ClassLoader, sbtLoader: ClassLoader): ClassLoader =
|
||||
{
|
||||
val xsbtiFilter = (name: String) => name.startsWith("xsbti.")
|
||||
val notXsbtiFilter = (name: String) => !xsbtiFilter(name)
|
||||
new classpath.DualLoader(scalaLoader, notXsbtiFilter, x => true, sbtLoader, xsbtiFilter, x => false)
|
||||
}
|
||||
override def toString = "Analyzing compiler (Scala " + scalaInstance.actualVersion + ")"
|
||||
}
|
||||
object AnalyzingCompiler
|
||||
{
|
||||
import sbt.IO.{copy, createDirectory, zip, jars, unzip, withTemporaryDirectory}
|
||||
object AnalyzingCompiler {
|
||||
import sbt.IO.{ copy, createDirectory, zip, jars, unzip, withTemporaryDirectory }
|
||||
|
||||
// Note: The Scala build now depends on some details of this method:
|
||||
// https://github.com/jsuereth/scala/commit/3431860048df8d2a381fb85a526097e00154eae0
|
||||
/** Extract sources from source jars, compile them with the xsbti interfaces on the classpath, and package the compiled classes and
|
||||
* any resources from the source jars into a final jar.*/
|
||||
def compileSources(sourceJars: Iterable[File], targetJar: File, xsbtiJars: Iterable[File], id: String, compiler: RawCompiler, log: Logger)
|
||||
{
|
||||
val isSource = (f: File) => isSourceName(f.getName)
|
||||
def keepIfSource(files: Set[File]): Set[File] = if(files.exists(isSource)) files else Set()
|
||||
// Note: The Scala build now depends on some details of this method:
|
||||
// https://github.com/jsuereth/scala/commit/3431860048df8d2a381fb85a526097e00154eae0
|
||||
/**
|
||||
* Extract sources from source jars, compile them with the xsbti interfaces on the classpath, and package the compiled classes and
|
||||
* any resources from the source jars into a final jar.
|
||||
*/
|
||||
def compileSources(sourceJars: Iterable[File], targetJar: File, xsbtiJars: Iterable[File], id: String, compiler: RawCompiler, log: Logger) {
|
||||
val isSource = (f: File) => isSourceName(f.getName)
|
||||
def keepIfSource(files: Set[File]): Set[File] = if (files.exists(isSource)) files else Set()
|
||||
|
||||
withTemporaryDirectory { dir =>
|
||||
val extractedSources = (Set[File]() /: sourceJars) { (extracted, sourceJar)=> extracted ++ keepIfSource(unzip(sourceJar, dir)) }
|
||||
val (sourceFiles, resources) = extractedSources.partition(isSource)
|
||||
withTemporaryDirectory { outputDirectory =>
|
||||
log.info("'" + id + "' not yet compiled for Scala " + compiler.scalaInstance.actualVersion + ". Compiling...")
|
||||
val start = System.currentTimeMillis
|
||||
try
|
||||
{
|
||||
compiler(sourceFiles.toSeq, compiler.scalaInstance.libraryJar +: (xsbtiJars.toSeq ++ sourceJars), outputDirectory, "-nowarn" :: Nil)
|
||||
log.info(" Compilation completed in " + (System.currentTimeMillis - start) / 1000.0 + " s")
|
||||
}
|
||||
catch { case e: xsbti.CompileFailed => throw new CompileFailed(e.arguments, "Error compiling sbt component '" + id + "'", e.problems) }
|
||||
import sbt.Path._
|
||||
copy(resources x rebase(dir, outputDirectory))
|
||||
zip((outputDirectory ***) x_! relativeTo(outputDirectory), targetJar)
|
||||
}
|
||||
}
|
||||
}
|
||||
private def isSourceName(name: String): Boolean = name.endsWith(".scala") || name.endsWith(".java")
|
||||
withTemporaryDirectory { dir =>
|
||||
val extractedSources = (Set[File]() /: sourceJars) { (extracted, sourceJar) => extracted ++ keepIfSource(unzip(sourceJar, dir)) }
|
||||
val (sourceFiles, resources) = extractedSources.partition(isSource)
|
||||
withTemporaryDirectory { outputDirectory =>
|
||||
log.info("'" + id + "' not yet compiled for Scala " + compiler.scalaInstance.actualVersion + ". Compiling...")
|
||||
val start = System.currentTimeMillis
|
||||
try {
|
||||
compiler(sourceFiles.toSeq, compiler.scalaInstance.libraryJar +: (xsbtiJars.toSeq ++ sourceJars), outputDirectory, "-nowarn" :: Nil)
|
||||
log.info(" Compilation completed in " + (System.currentTimeMillis - start) / 1000.0 + " s")
|
||||
} catch { case e: xsbti.CompileFailed => throw new CompileFailed(e.arguments, "Error compiling sbt component '" + id + "'", e.problems) }
|
||||
import sbt.Path._
|
||||
copy(resources x rebase(dir, outputDirectory))
|
||||
zip((outputDirectory ***) x_! relativeTo(outputDirectory), targetJar)
|
||||
}
|
||||
}
|
||||
}
|
||||
private def isSourceName(name: String): Boolean = name.endsWith(".scala") || name.endsWith(".java")
|
||||
}
|
||||
|
||||
private[this] object IgnoreProgress extends CompileProgress {
|
||||
def startUnit(phase: String, unitPath: String) {}
|
||||
def advance(current: Int, total: Int) = true
|
||||
def startUnit(phase: String, unitPath: String) {}
|
||||
def advance(current: Int, total: Int) = true
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,70 +4,67 @@
|
|||
package sbt
|
||||
package compiler
|
||||
|
||||
import xsbti.ArtifactInfo
|
||||
import scala.util
|
||||
import java.io.File
|
||||
import CompilerArguments.{abs, absString, BootClasspathOption}
|
||||
import xsbti.ArtifactInfo
|
||||
import scala.util
|
||||
import java.io.File
|
||||
import CompilerArguments.{ abs, absString, BootClasspathOption }
|
||||
|
||||
/** Forms the list of options that is passed to the compiler from the required inputs and other options.
|
||||
* The directory containing scala-library.jar and scala-compiler.jar (scalaLibDirectory) is required in
|
||||
* order to add these jars to the boot classpath. The 'scala.home' property must be unset because Scala
|
||||
* puts jars in that directory on the bootclasspath. Because we use multiple Scala versions,
|
||||
* this would lead to compiling against the wrong library jar.*/
|
||||
final class CompilerArguments(scalaInstance: xsbti.compile.ScalaInstance, cp: xsbti.compile.ClasspathOptions)
|
||||
{
|
||||
def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: Option[File], options: Seq[String]): Seq[String] =
|
||||
{
|
||||
checkScalaHomeUnset()
|
||||
val cpWithCompiler = finishClasspath(classpath)
|
||||
// Scala compiler's treatment of empty classpath is troublesome (as of 2.9.1).
|
||||
// We append a random dummy element as workaround.
|
||||
val dummy = "dummy_" + Integer.toHexString(util.Random.nextInt)
|
||||
val classpathOption = Seq("-classpath", if(cpWithCompiler.isEmpty) dummy else absString(cpWithCompiler))
|
||||
val outputOption = outputDirectory map {out => Seq("-d", out.getAbsolutePath)} getOrElse Seq()
|
||||
options ++ outputOption ++ bootClasspathOption(hasLibrary(classpath)) ++ classpathOption ++ abs(sources)
|
||||
}
|
||||
def finishClasspath(classpath: Seq[File]): Seq[File] =
|
||||
filterLibrary(classpath) ++ include(cp.compiler, scalaInstance.compilerJar) ++ include(cp.extra, scalaInstance.otherJars : _*)
|
||||
private[this] def include(flag: Boolean, jars: File*) = if(flag) jars else Nil
|
||||
private[this] def abs(files: Seq[File]) = files.map(_.getAbsolutePath).sortWith(_ < _)
|
||||
private[this] def checkScalaHomeUnset()
|
||||
{
|
||||
val scalaHome = System.getProperty("scala.home")
|
||||
assert((scalaHome eq null) || scalaHome.isEmpty, "'scala.home' should not be set (was " + scalaHome + ")")
|
||||
}
|
||||
def createBootClasspathFor(classpath: Seq[File]) = createBootClasspath(hasLibrary(classpath) || cp.compiler || cp.extra)
|
||||
/**
|
||||
* Forms the list of options that is passed to the compiler from the required inputs and other options.
|
||||
* The directory containing scala-library.jar and scala-compiler.jar (scalaLibDirectory) is required in
|
||||
* order to add these jars to the boot classpath. The 'scala.home' property must be unset because Scala
|
||||
* puts jars in that directory on the bootclasspath. Because we use multiple Scala versions,
|
||||
* this would lead to compiling against the wrong library jar.
|
||||
*/
|
||||
final class CompilerArguments(scalaInstance: xsbti.compile.ScalaInstance, cp: xsbti.compile.ClasspathOptions) {
|
||||
def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: Option[File], options: Seq[String]): Seq[String] =
|
||||
{
|
||||
checkScalaHomeUnset()
|
||||
val cpWithCompiler = finishClasspath(classpath)
|
||||
// Scala compiler's treatment of empty classpath is troublesome (as of 2.9.1).
|
||||
// We append a random dummy element as workaround.
|
||||
val dummy = "dummy_" + Integer.toHexString(util.Random.nextInt)
|
||||
val classpathOption = Seq("-classpath", if (cpWithCompiler.isEmpty) dummy else absString(cpWithCompiler))
|
||||
val outputOption = outputDirectory map { out => Seq("-d", out.getAbsolutePath) } getOrElse Seq()
|
||||
options ++ outputOption ++ bootClasspathOption(hasLibrary(classpath)) ++ classpathOption ++ abs(sources)
|
||||
}
|
||||
def finishClasspath(classpath: Seq[File]): Seq[File] =
|
||||
filterLibrary(classpath) ++ include(cp.compiler, scalaInstance.compilerJar) ++ include(cp.extra, scalaInstance.otherJars: _*)
|
||||
private[this] def include(flag: Boolean, jars: File*) = if (flag) jars else Nil
|
||||
private[this] def abs(files: Seq[File]) = files.map(_.getAbsolutePath).sortWith(_ < _)
|
||||
private[this] def checkScalaHomeUnset() {
|
||||
val scalaHome = System.getProperty("scala.home")
|
||||
assert((scalaHome eq null) || scalaHome.isEmpty, "'scala.home' should not be set (was " + scalaHome + ")")
|
||||
}
|
||||
def createBootClasspathFor(classpath: Seq[File]) = createBootClasspath(hasLibrary(classpath) || cp.compiler || cp.extra)
|
||||
|
||||
/** Add the correct Scala library jar to the boot classpath if `addLibrary` is true.*/
|
||||
def createBootClasspath(addLibrary: Boolean) =
|
||||
{
|
||||
val originalBoot = System.getProperty("sun.boot.class.path", "")
|
||||
if(addLibrary)
|
||||
{
|
||||
val newBootPrefix = if(originalBoot.isEmpty) "" else originalBoot + File.pathSeparator
|
||||
newBootPrefix + scalaInstance.libraryJar.getAbsolutePath
|
||||
}
|
||||
else
|
||||
originalBoot
|
||||
}
|
||||
def filterLibrary(classpath: Seq[File]) = if(cp.filterLibrary) classpath filterNot isScalaLibrary else classpath
|
||||
def hasLibrary(classpath: Seq[File]) = classpath exists isScalaLibrary
|
||||
private[this] val isScalaLibrary: File => Boolean = file => {
|
||||
val name = file.getName
|
||||
(name contains ArtifactInfo.ScalaLibraryID) || file.getName == scalaInstance.libraryJar.getName
|
||||
}
|
||||
def bootClasspathOption(addLibrary: Boolean) = if(cp.autoBoot) Seq(BootClasspathOption, createBootClasspath(addLibrary)) else Nil
|
||||
def bootClasspath(addLibrary: Boolean) = if(cp.autoBoot) IO.parseClasspath(createBootClasspath(addLibrary)) else Nil
|
||||
def bootClasspathFor(classpath: Seq[File]) = bootClasspath(hasLibrary(classpath))
|
||||
/** Add the correct Scala library jar to the boot classpath if `addLibrary` is true.*/
|
||||
def createBootClasspath(addLibrary: Boolean) =
|
||||
{
|
||||
val originalBoot = System.getProperty("sun.boot.class.path", "")
|
||||
if (addLibrary) {
|
||||
val newBootPrefix = if (originalBoot.isEmpty) "" else originalBoot + File.pathSeparator
|
||||
newBootPrefix + scalaInstance.libraryJar.getAbsolutePath
|
||||
} else
|
||||
originalBoot
|
||||
}
|
||||
def filterLibrary(classpath: Seq[File]) = if (cp.filterLibrary) classpath filterNot isScalaLibrary else classpath
|
||||
def hasLibrary(classpath: Seq[File]) = classpath exists isScalaLibrary
|
||||
private[this] val isScalaLibrary: File => Boolean = file => {
|
||||
val name = file.getName
|
||||
(name contains ArtifactInfo.ScalaLibraryID) || file.getName == scalaInstance.libraryJar.getName
|
||||
}
|
||||
def bootClasspathOption(addLibrary: Boolean) = if (cp.autoBoot) Seq(BootClasspathOption, createBootClasspath(addLibrary)) else Nil
|
||||
def bootClasspath(addLibrary: Boolean) = if (cp.autoBoot) IO.parseClasspath(createBootClasspath(addLibrary)) else Nil
|
||||
def bootClasspathFor(classpath: Seq[File]) = bootClasspath(hasLibrary(classpath))
|
||||
|
||||
import Path._
|
||||
def extClasspath: Seq[File] = ( IO.parseClasspath(System.getProperty("java.ext.dirs")) * "*.jar" ).get
|
||||
import Path._
|
||||
def extClasspath: Seq[File] = (IO.parseClasspath(System.getProperty("java.ext.dirs")) * "*.jar").get
|
||||
}
|
||||
object CompilerArguments
|
||||
{
|
||||
val BootClasspathOption = "-bootclasspath"
|
||||
def abs(files: Seq[File]): Seq[String] = files.map(_.getAbsolutePath)
|
||||
def abs(files: Set[File]): Seq[String] = abs(files.toSeq)
|
||||
def absString(files: Seq[File]): String = abs(files).mkString(File.pathSeparator)
|
||||
def absString(files: Set[File]): String = absString(files.toSeq)
|
||||
object CompilerArguments {
|
||||
val BootClasspathOption = "-bootclasspath"
|
||||
def abs(files: Seq[File]): Seq[String] = files.map(_.getAbsolutePath)
|
||||
def abs(files: Set[File]): Seq[String] = abs(files.toSeq)
|
||||
def absString(files: Seq[File]): String = abs(files).mkString(File.pathSeparator)
|
||||
def absString(files: Set[File]): String = absString(files.toSeq)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,52 +1,49 @@
|
|||
package sbt
|
||||
package compiler
|
||||
|
||||
import xsbti.{Logger => xLogger, Reporter}
|
||||
import xsbti.compile.{CachedCompiler, CachedCompilerProvider, GlobalsCache, Output}
|
||||
import Logger.f0
|
||||
import java.io.File
|
||||
import java.util.{LinkedHashMap,Map}
|
||||
import xsbti.{ Logger => xLogger, Reporter }
|
||||
import xsbti.compile.{ CachedCompiler, CachedCompilerProvider, GlobalsCache, Output }
|
||||
import Logger.f0
|
||||
import java.io.File
|
||||
import java.util.{ LinkedHashMap, Map }
|
||||
|
||||
private final class CompilerCache(val maxInstances: Int) extends GlobalsCache
|
||||
{
|
||||
private[this] val cache = lru[CompilerKey, CachedCompiler](maxInstances)
|
||||
private[this] def lru[A,B](max: Int) = new LinkedHashMap[A,B](8, 0.75f, true) {
|
||||
override def removeEldestEntry(eldest: Map.Entry[A,B]): Boolean = size > max
|
||||
}
|
||||
def apply(args: Array[String], output: Output, forceNew: Boolean, c: CachedCompilerProvider, log: xLogger, reporter: Reporter): CachedCompiler = synchronized
|
||||
{
|
||||
val key = CompilerKey(dropSources(args.toList), c.scalaInstance.actualVersion)
|
||||
if(forceNew) cache.remove(key)
|
||||
cache.get(key) match {
|
||||
case null =>
|
||||
log.debug(f0("Compiler cache miss. " + key.toString))
|
||||
put(key, c.newCachedCompiler(args, output, log, reporter, /* resident = */ !forceNew))
|
||||
case cc =>
|
||||
log.debug(f0("Compiler cache hit (" + cc.hashCode.toHexString + "). " + key.toString))
|
||||
cc
|
||||
}
|
||||
}
|
||||
def clear(): Unit = synchronized { cache.clear() }
|
||||
private final class CompilerCache(val maxInstances: Int) extends GlobalsCache {
|
||||
private[this] val cache = lru[CompilerKey, CachedCompiler](maxInstances)
|
||||
private[this] def lru[A, B](max: Int) = new LinkedHashMap[A, B](8, 0.75f, true) {
|
||||
override def removeEldestEntry(eldest: Map.Entry[A, B]): Boolean = size > max
|
||||
}
|
||||
def apply(args: Array[String], output: Output, forceNew: Boolean, c: CachedCompilerProvider, log: xLogger, reporter: Reporter): CachedCompiler = synchronized {
|
||||
val key = CompilerKey(dropSources(args.toList), c.scalaInstance.actualVersion)
|
||||
if (forceNew) cache.remove(key)
|
||||
cache.get(key) match {
|
||||
case null =>
|
||||
log.debug(f0("Compiler cache miss. " + key.toString))
|
||||
put(key, c.newCachedCompiler(args, output, log, reporter, /* resident = */ !forceNew))
|
||||
case cc =>
|
||||
log.debug(f0("Compiler cache hit (" + cc.hashCode.toHexString + "). " + key.toString))
|
||||
cc
|
||||
}
|
||||
}
|
||||
def clear(): Unit = synchronized { cache.clear() }
|
||||
|
||||
private[this] def dropSources(args: Seq[String]): Seq[String] =
|
||||
args.filterNot(arg => arg.endsWith(".scala") || arg.endsWith(".java"))
|
||||
private[this] def dropSources(args: Seq[String]): Seq[String] =
|
||||
args.filterNot(arg => arg.endsWith(".scala") || arg.endsWith(".java"))
|
||||
|
||||
private[this] def put(key: CompilerKey, cc: CachedCompiler): CachedCompiler =
|
||||
{
|
||||
cache.put(key, cc)
|
||||
cc
|
||||
}
|
||||
private[this] final case class CompilerKey(args: Seq[String], scalaVersion: String) {
|
||||
override def toString = "scala " + scalaVersion + ", args: " + args.mkString(" ")
|
||||
}
|
||||
private[this] def put(key: CompilerKey, cc: CachedCompiler): CachedCompiler =
|
||||
{
|
||||
cache.put(key, cc)
|
||||
cc
|
||||
}
|
||||
private[this] final case class CompilerKey(args: Seq[String], scalaVersion: String) {
|
||||
override def toString = "scala " + scalaVersion + ", args: " + args.mkString(" ")
|
||||
}
|
||||
}
|
||||
object CompilerCache
|
||||
{
|
||||
def apply(maxInstances: Int): GlobalsCache = new CompilerCache(maxInstances)
|
||||
object CompilerCache {
|
||||
def apply(maxInstances: Int): GlobalsCache = new CompilerCache(maxInstances)
|
||||
|
||||
val fresh: GlobalsCache = new GlobalsCache {
|
||||
def clear() {}
|
||||
def apply(args: Array[String], output: Output, forceNew: Boolean, c: CachedCompilerProvider, log: xLogger, reporter: Reporter): CachedCompiler =
|
||||
c.newCachedCompiler(args, output, log, reporter, /*resident = */ false)
|
||||
}
|
||||
val fresh: GlobalsCache = new GlobalsCache {
|
||||
def clear() {}
|
||||
def apply(args: Array[String], output: Output, forceNew: Boolean, c: CachedCompilerProvider, log: xLogger, reporter: Reporter): CachedCompiler =
|
||||
c.newCachedCompiler(args, output, log, reporter, /*resident = */ false)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,15 +1,13 @@
|
|||
package sbt
|
||||
package compiler
|
||||
|
||||
import java.io.File
|
||||
import java.io.File
|
||||
|
||||
trait CompilerInterfaceProvider
|
||||
{
|
||||
def apply(scalaInstance: xsbti.compile.ScalaInstance, log: Logger): File
|
||||
trait CompilerInterfaceProvider {
|
||||
def apply(scalaInstance: xsbti.compile.ScalaInstance, log: Logger): File
|
||||
}
|
||||
object CompilerInterfaceProvider
|
||||
{
|
||||
def constant(file: File): CompilerInterfaceProvider = new CompilerInterfaceProvider {
|
||||
def apply(scalaInstance: xsbti.compile.ScalaInstance, log: Logger): File = file
|
||||
}
|
||||
object CompilerInterfaceProvider {
|
||||
def constant(file: File): CompilerInterfaceProvider = new CompilerInterfaceProvider {
|
||||
def apply(scalaInstance: xsbti.compile.ScalaInstance, log: Logger): File = file
|
||||
}
|
||||
}
|
||||
|
|
@ -5,20 +5,20 @@
|
|||
package sbt
|
||||
package compiler
|
||||
|
||||
import xsbti.compile.{Output, SingleOutput, MultipleOutput}
|
||||
import java.io.File
|
||||
import xsbti.compile.{ Output, SingleOutput, MultipleOutput }
|
||||
import java.io.File
|
||||
|
||||
object CompileOutput {
|
||||
def apply(dir: File): Output = new SingleOutput {
|
||||
def outputDirectory = dir
|
||||
}
|
||||
def apply(dir: File): Output = new SingleOutput {
|
||||
def outputDirectory = dir
|
||||
}
|
||||
|
||||
def apply(groups: (File, File)*): Output = new MultipleOutput {
|
||||
def outputGroups = groups.toArray map {
|
||||
case (src, out) => new MultipleOutput.OutputGroup {
|
||||
def sourceDirectory = src
|
||||
def outputDirectory = out
|
||||
}
|
||||
}
|
||||
}
|
||||
def apply(groups: (File, File)*): Output = new MultipleOutput {
|
||||
def outputGroups = groups.toArray map {
|
||||
case (src, out) => new MultipleOutput.OutputGroup {
|
||||
def sourceDirectory = src
|
||||
def outputDirectory = out
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,124 +4,120 @@
|
|||
package sbt
|
||||
package compiler
|
||||
|
||||
import java.io.{File, PrintWriter}
|
||||
import java.io.{ File, PrintWriter }
|
||||
|
||||
abstract class JavacContract(val name: String, val clazz: String) {
|
||||
def exec(args: Array[String], writer: PrintWriter): Int
|
||||
def exec(args: Array[String], writer: PrintWriter): Int
|
||||
}
|
||||
trait JavaCompiler extends xsbti.compile.JavaCompiler
|
||||
{
|
||||
def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger)
|
||||
trait JavaCompiler extends xsbti.compile.JavaCompiler {
|
||||
def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger)
|
||||
|
||||
def compile(sources: Array[File], classpath: Array[File], output: xsbti.compile.Output, options: Array[String], log: xsbti.Logger): Unit = {
|
||||
val outputDirectory = output match {
|
||||
case single: xsbti.compile.SingleOutput => single.outputDirectory
|
||||
case _ => throw new RuntimeException("Javac doesn't support multiple output directories")
|
||||
}
|
||||
apply(sources, classpath, outputDirectory, options)(log)
|
||||
}
|
||||
def compile(sources: Array[File], classpath: Array[File], output: xsbti.compile.Output, options: Array[String], log: xsbti.Logger): Unit = {
|
||||
val outputDirectory = output match {
|
||||
case single: xsbti.compile.SingleOutput => single.outputDirectory
|
||||
case _ => throw new RuntimeException("Javac doesn't support multiple output directories")
|
||||
}
|
||||
apply(sources, classpath, outputDirectory, options)(log)
|
||||
}
|
||||
|
||||
def onArgs(f: Seq[String] => Unit): JavaCompiler
|
||||
def onArgs(f: Seq[String] => Unit): JavaCompiler
|
||||
}
|
||||
trait Javadoc
|
||||
{
|
||||
def doc(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maximumErrors: Int, log: Logger)
|
||||
trait Javadoc {
|
||||
def doc(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maximumErrors: Int, log: Logger)
|
||||
|
||||
def onArgs(f: Seq[String] => Unit): Javadoc
|
||||
def onArgs(f: Seq[String] => Unit): Javadoc
|
||||
}
|
||||
trait JavaTool extends Javadoc with JavaCompiler
|
||||
{
|
||||
def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger) =
|
||||
compile(JavaCompiler.javac, sources, classpath, outputDirectory, options)(log)
|
||||
trait JavaTool extends Javadoc with JavaCompiler {
|
||||
def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger) =
|
||||
compile(JavaCompiler.javac, sources, classpath, outputDirectory, options)(log)
|
||||
|
||||
def doc(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maximumErrors: Int, log: Logger) =
|
||||
compile(JavaCompiler.javadoc, sources, classpath, outputDirectory, options)(log)
|
||||
def doc(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maximumErrors: Int, log: Logger) =
|
||||
compile(JavaCompiler.javadoc, sources, classpath, outputDirectory, options)(log)
|
||||
|
||||
def compile(contract: JavacContract, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger): Unit
|
||||
def compile(contract: JavacContract, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger): Unit
|
||||
|
||||
def onArgs(f: Seq[String] => Unit): JavaTool
|
||||
def onArgs(f: Seq[String] => Unit): JavaTool
|
||||
}
|
||||
object JavaCompiler
|
||||
{
|
||||
type Fork = (JavacContract, Seq[String], Logger) => Int
|
||||
object JavaCompiler {
|
||||
type Fork = (JavacContract, Seq[String], Logger) => Int
|
||||
|
||||
val javac = new JavacContract("javac", "com.sun.tools.javac.Main") {
|
||||
def exec(args: Array[String], writer: PrintWriter) = {
|
||||
val m = Class.forName(clazz).getDeclaredMethod("compile", classOf[Array[String]], classOf[PrintWriter])
|
||||
m.invoke(null, args, writer).asInstanceOf[java.lang.Integer].intValue
|
||||
}
|
||||
}
|
||||
val javadoc = new JavacContract("javadoc", "com.sun.tools.javadoc.Main") {
|
||||
def exec(args: Array[String], writer: PrintWriter) = {
|
||||
val m = Class.forName(clazz).getDeclaredMethod("execute", classOf[String], classOf[PrintWriter], classOf[PrintWriter], classOf[PrintWriter], classOf[String], classOf[Array[String]])
|
||||
m.invoke(null, name, writer, writer, writer, "com.sun.tools.doclets.standard.Standard", args).asInstanceOf[java.lang.Integer].intValue
|
||||
}
|
||||
}
|
||||
val javac = new JavacContract("javac", "com.sun.tools.javac.Main") {
|
||||
def exec(args: Array[String], writer: PrintWriter) = {
|
||||
val m = Class.forName(clazz).getDeclaredMethod("compile", classOf[Array[String]], classOf[PrintWriter])
|
||||
m.invoke(null, args, writer).asInstanceOf[java.lang.Integer].intValue
|
||||
}
|
||||
}
|
||||
val javadoc = new JavacContract("javadoc", "com.sun.tools.javadoc.Main") {
|
||||
def exec(args: Array[String], writer: PrintWriter) = {
|
||||
val m = Class.forName(clazz).getDeclaredMethod("execute", classOf[String], classOf[PrintWriter], classOf[PrintWriter], classOf[PrintWriter], classOf[String], classOf[Array[String]])
|
||||
m.invoke(null, name, writer, writer, writer, "com.sun.tools.doclets.standard.Standard", args).asInstanceOf[java.lang.Integer].intValue
|
||||
}
|
||||
}
|
||||
|
||||
def construct(f: Fork, cp: ClasspathOptions, scalaInstance: ScalaInstance): JavaTool = new JavaTool0(f, cp, scalaInstance, _ => ())
|
||||
def construct(f: Fork, cp: ClasspathOptions, scalaInstance: ScalaInstance): JavaTool = new JavaTool0(f, cp, scalaInstance, _ => ())
|
||||
|
||||
private[this] class JavaTool0(f: Fork, cp: ClasspathOptions, scalaInstance: ScalaInstance, onArgsF: Seq[String] => Unit) extends JavaTool
|
||||
{
|
||||
def onArgs(g: Seq[String] => Unit): JavaTool = new JavaTool0(f, cp, scalaInstance, g)
|
||||
def commandArguments(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger): Seq[String] =
|
||||
{
|
||||
val augmentedClasspath = if(cp.autoBoot) classpath ++ Seq(scalaInstance.libraryJar) else classpath
|
||||
val javaCp = ClasspathOptions.javac(cp.compiler)
|
||||
(new CompilerArguments(scalaInstance, javaCp))(sources, augmentedClasspath, Some(outputDirectory), options)
|
||||
}
|
||||
def compile(contract: JavacContract, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger) {
|
||||
val arguments = commandArguments(sources, classpath, outputDirectory, options, log)
|
||||
onArgsF(arguments)
|
||||
val code: Int = f(contract, arguments, log)
|
||||
log.debug(contract.name + " returned exit code: " + code)
|
||||
if( code != 0 ) throw new CompileFailed(arguments.toArray, contract.name + " returned nonzero exit code", Array())
|
||||
}
|
||||
}
|
||||
def directOrFork(cp: ClasspathOptions, scalaInstance: ScalaInstance)(implicit doFork: Fork): JavaTool =
|
||||
construct(directOrForkJavac, cp, scalaInstance)
|
||||
|
||||
def direct(cp: ClasspathOptions, scalaInstance: ScalaInstance): JavaTool =
|
||||
construct(directJavac, cp, scalaInstance)
|
||||
|
||||
def fork(cp: ClasspathOptions, scalaInstance: ScalaInstance)(implicit doFork: Fork): JavaTool =
|
||||
construct(forkJavac, cp, scalaInstance)
|
||||
|
||||
def directOrForkJavac(implicit doFork: Fork) = (contract: JavacContract, arguments: Seq[String], log: Logger) =>
|
||||
try { directJavac(contract, arguments, log) }
|
||||
catch { case e @ (_: ClassNotFoundException | _: NoSuchMethodException) =>
|
||||
log.debug(contract.clazz + " not found with appropriate method signature; forking " + contract.name + " instead")
|
||||
forkJavac(doFork)(contract, arguments, log)
|
||||
}
|
||||
private[this] class JavaTool0(f: Fork, cp: ClasspathOptions, scalaInstance: ScalaInstance, onArgsF: Seq[String] => Unit) extends JavaTool {
|
||||
def onArgs(g: Seq[String] => Unit): JavaTool = new JavaTool0(f, cp, scalaInstance, g)
|
||||
def commandArguments(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger): Seq[String] =
|
||||
{
|
||||
val augmentedClasspath = if (cp.autoBoot) classpath ++ Seq(scalaInstance.libraryJar) else classpath
|
||||
val javaCp = ClasspathOptions.javac(cp.compiler)
|
||||
(new CompilerArguments(scalaInstance, javaCp))(sources, augmentedClasspath, Some(outputDirectory), options)
|
||||
}
|
||||
def compile(contract: JavacContract, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger) {
|
||||
val arguments = commandArguments(sources, classpath, outputDirectory, options, log)
|
||||
onArgsF(arguments)
|
||||
val code: Int = f(contract, arguments, log)
|
||||
log.debug(contract.name + " returned exit code: " + code)
|
||||
if (code != 0) throw new CompileFailed(arguments.toArray, contract.name + " returned nonzero exit code", Array())
|
||||
}
|
||||
}
|
||||
def directOrFork(cp: ClasspathOptions, scalaInstance: ScalaInstance)(implicit doFork: Fork): JavaTool =
|
||||
construct(directOrForkJavac, cp, scalaInstance)
|
||||
|
||||
/** `doFork` should be a function that forks javac with the provided arguments and sends output to the given Logger.*/
|
||||
def forkJavac(implicit doFork: Fork) = (contract: JavacContract, arguments: Seq[String], log: Logger) =>
|
||||
{
|
||||
val (jArgs, nonJArgs) = arguments.partition(_.startsWith("-J"))
|
||||
def externalJavac(argFile: File) = doFork(contract, jArgs :+ ("@" + normalizeSlash(argFile.getAbsolutePath)), log)
|
||||
withArgumentFile(nonJArgs)(externalJavac)
|
||||
}
|
||||
val directJavac = (contract: JavacContract, arguments: Seq[String], log: Logger) =>
|
||||
{
|
||||
val logger = new LoggerWriter(log)
|
||||
val writer = new PrintWriter(logger)
|
||||
val argsArray = arguments.toArray
|
||||
log.debug("Attempting to call " + contract.name + " directly...")
|
||||
def direct(cp: ClasspathOptions, scalaInstance: ScalaInstance): JavaTool =
|
||||
construct(directJavac, cp, scalaInstance)
|
||||
|
||||
var exitCode = -1
|
||||
try { exitCode = contract.exec(argsArray, writer) }
|
||||
finally { logger.flushLines( if(exitCode == 0) Level.Warn else Level.Error) }
|
||||
exitCode
|
||||
}
|
||||
def withArgumentFile[T](args: Seq[String])(f: File => T): T =
|
||||
{
|
||||
import IO.{Newline, withTemporaryDirectory, write}
|
||||
withTemporaryDirectory { tmp =>
|
||||
val argFile = new File(tmp, "argfile")
|
||||
write(argFile, args.map(escapeSpaces).mkString(Newline))
|
||||
f(argFile)
|
||||
}
|
||||
}
|
||||
// javac's argument file seems to allow naive space escaping with quotes. escaping a quote with a backslash does not work
|
||||
def escapeSpaces(s: String): String = '\"' + normalizeSlash(s) + '\"'
|
||||
def normalizeSlash(s: String) = s.replace(File.separatorChar, '/')
|
||||
def fork(cp: ClasspathOptions, scalaInstance: ScalaInstance)(implicit doFork: Fork): JavaTool =
|
||||
construct(forkJavac, cp, scalaInstance)
|
||||
|
||||
def directOrForkJavac(implicit doFork: Fork) = (contract: JavacContract, arguments: Seq[String], log: Logger) =>
|
||||
try { directJavac(contract, arguments, log) }
|
||||
catch {
|
||||
case e @ (_: ClassNotFoundException | _: NoSuchMethodException) =>
|
||||
log.debug(contract.clazz + " not found with appropriate method signature; forking " + contract.name + " instead")
|
||||
forkJavac(doFork)(contract, arguments, log)
|
||||
}
|
||||
|
||||
/** `doFork` should be a function that forks javac with the provided arguments and sends output to the given Logger.*/
|
||||
def forkJavac(implicit doFork: Fork) = (contract: JavacContract, arguments: Seq[String], log: Logger) =>
|
||||
{
|
||||
val (jArgs, nonJArgs) = arguments.partition(_.startsWith("-J"))
|
||||
def externalJavac(argFile: File) = doFork(contract, jArgs :+ ("@" + normalizeSlash(argFile.getAbsolutePath)), log)
|
||||
withArgumentFile(nonJArgs)(externalJavac)
|
||||
}
|
||||
val directJavac = (contract: JavacContract, arguments: Seq[String], log: Logger) =>
|
||||
{
|
||||
val logger = new LoggerWriter(log)
|
||||
val writer = new PrintWriter(logger)
|
||||
val argsArray = arguments.toArray
|
||||
log.debug("Attempting to call " + contract.name + " directly...")
|
||||
|
||||
var exitCode = -1
|
||||
try { exitCode = contract.exec(argsArray, writer) }
|
||||
finally { logger.flushLines(if (exitCode == 0) Level.Warn else Level.Error) }
|
||||
exitCode
|
||||
}
|
||||
def withArgumentFile[T](args: Seq[String])(f: File => T): T =
|
||||
{
|
||||
import IO.{ Newline, withTemporaryDirectory, write }
|
||||
withTemporaryDirectory { tmp =>
|
||||
val argFile = new File(tmp, "argfile")
|
||||
write(argFile, args.map(escapeSpaces).mkString(Newline))
|
||||
f(argFile)
|
||||
}
|
||||
}
|
||||
// javac's argument file seems to allow naive space escaping with quotes. escaping a quote with a backslash does not work
|
||||
def escapeSpaces(s: String): String = '\"' + normalizeSlash(s) + '\"'
|
||||
def normalizeSlash(s: String) = s.replace(File.separatorChar, '/')
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,34 +4,33 @@
|
|||
package sbt
|
||||
package compiler
|
||||
|
||||
import java.io.File
|
||||
import java.io.File
|
||||
|
||||
/** A basic interface to the compiler. It is called in the same virtual machine, but no dependency analysis is done. This
|
||||
* is used, for example, to compile the interface/plugin code.
|
||||
* If `explicitClasspath` is true, the bootclasspath and classpath are not augmented. If it is false,
|
||||
* the scala-library.jar from `scalaInstance` is put on bootclasspath and the scala-compiler jar goes on the classpath.*/
|
||||
class RawCompiler(val scalaInstance: xsbti.compile.ScalaInstance, cp: ClasspathOptions, log: Logger)
|
||||
{
|
||||
def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])
|
||||
{
|
||||
// reflection is required for binary compatibility
|
||||
// The following import ensures there is a compile error if the identifiers change,
|
||||
// but should not be otherwise directly referenced
|
||||
import scala.tools.nsc.Main.{process => _}
|
||||
/**
|
||||
* A basic interface to the compiler. It is called in the same virtual machine, but no dependency analysis is done. This
|
||||
* is used, for example, to compile the interface/plugin code.
|
||||
* If `explicitClasspath` is true, the bootclasspath and classpath are not augmented. If it is false,
|
||||
* the scala-library.jar from `scalaInstance` is put on bootclasspath and the scala-compiler jar goes on the classpath.
|
||||
*/
|
||||
class RawCompiler(val scalaInstance: xsbti.compile.ScalaInstance, cp: ClasspathOptions, log: Logger) {
|
||||
def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String]) {
|
||||
// reflection is required for binary compatibility
|
||||
// The following import ensures there is a compile error if the identifiers change,
|
||||
// but should not be otherwise directly referenced
|
||||
import scala.tools.nsc.Main.{ process => _ }
|
||||
|
||||
val arguments = compilerArguments(sources, classpath, Some(outputDirectory), options)
|
||||
log.debug("Plain interface to Scala compiler " + scalaInstance.actualVersion + " with arguments: " + arguments.mkString("\n\t", "\n\t", ""))
|
||||
val mainClass = Class.forName("scala.tools.nsc.Main", true, scalaInstance.loader)
|
||||
val process = mainClass.getMethod("process", classOf[Array[String]])
|
||||
process.invoke(null, arguments.toArray)
|
||||
checkForFailure(mainClass, arguments.toArray)
|
||||
}
|
||||
def compilerArguments = new CompilerArguments(scalaInstance, cp)
|
||||
protected def checkForFailure(mainClass: Class[_], args: Array[String])
|
||||
{
|
||||
val reporter = mainClass.getMethod("reporter").invoke(null)
|
||||
val failed = reporter.getClass.getMethod("hasErrors").invoke(reporter).asInstanceOf[Boolean]
|
||||
if(failed) throw new CompileFailed(args, "Plain compile failed", Array())
|
||||
}
|
||||
val arguments = compilerArguments(sources, classpath, Some(outputDirectory), options)
|
||||
log.debug("Plain interface to Scala compiler " + scalaInstance.actualVersion + " with arguments: " + arguments.mkString("\n\t", "\n\t", ""))
|
||||
val mainClass = Class.forName("scala.tools.nsc.Main", true, scalaInstance.loader)
|
||||
val process = mainClass.getMethod("process", classOf[Array[String]])
|
||||
process.invoke(null, arguments.toArray)
|
||||
checkForFailure(mainClass, arguments.toArray)
|
||||
}
|
||||
def compilerArguments = new CompilerArguments(scalaInstance, cp)
|
||||
protected def checkForFailure(mainClass: Class[_], args: Array[String]) {
|
||||
val reporter = mainClass.getMethod("reporter").invoke(null)
|
||||
val failed = reporter.getClass.getMethod("hasErrors").invoke(reporter).asInstanceOf[Boolean]
|
||||
if (failed) throw new CompileFailed(args, "Plain compile failed", Array())
|
||||
}
|
||||
}
|
||||
class CompileFailed(val arguments: Array[String], override val toString: String, val problems: Array[xsbti.Problem]) extends xsbti.CompileFailed with FeedbackProvidedException
|
||||
|
|
|
|||
|
|
@ -6,71 +6,69 @@ package sbt
|
|||
import java.io.File
|
||||
import java.net.URL
|
||||
|
||||
final case class Artifact(name: String, `type`: String, extension: String, classifier: Option[String], configurations: Iterable[Configuration], url: Option[URL], extraAttributes: Map[String,String])
|
||||
{
|
||||
def extra(attributes: (String,String)*) = Artifact(name, `type`, extension, classifier, configurations, url, extraAttributes ++ ModuleID.checkE(attributes))
|
||||
final case class Artifact(name: String, `type`: String, extension: String, classifier: Option[String], configurations: Iterable[Configuration], url: Option[URL], extraAttributes: Map[String, String]) {
|
||||
def extra(attributes: (String, String)*) = Artifact(name, `type`, extension, classifier, configurations, url, extraAttributes ++ ModuleID.checkE(attributes))
|
||||
}
|
||||
|
||||
import Configurations.{config, Docs, Optional, Pom, Sources, Test}
|
||||
import Configurations.{ config, Docs, Optional, Pom, Sources, Test }
|
||||
|
||||
object Artifact
|
||||
{
|
||||
def apply(name: String): Artifact = Artifact(name, DefaultType, DefaultExtension, None, Nil, None)
|
||||
def apply(name: String, extra: Map[String,String]): Artifact = Artifact(name, DefaultType, DefaultExtension, None, Nil, None, extra)
|
||||
def apply(name: String, classifier: String): Artifact = Artifact(name, DefaultType, DefaultExtension, Some(classifier), Nil, None)
|
||||
def apply(name: String, `type`: String, extension: String): Artifact = Artifact(name, `type`, extension, None, Nil, None)
|
||||
def apply(name: String, `type`: String, extension: String, classifier: String): Artifact = Artifact(name, `type`, extension, Some(classifier), Nil, None)
|
||||
def apply(name: String, url: URL): Artifact =Artifact(name, extract(url, DefaultType), extract(url, DefaultExtension), None, Nil, Some(url))
|
||||
def apply(name: String, `type`: String, extension: String, classifier: Option[String], configurations: Iterable[Configuration], url: Option[URL]): Artifact =
|
||||
Artifact(name, `type`, extension, classifier, configurations, url, Map.empty)
|
||||
object Artifact {
|
||||
def apply(name: String): Artifact = Artifact(name, DefaultType, DefaultExtension, None, Nil, None)
|
||||
def apply(name: String, extra: Map[String, String]): Artifact = Artifact(name, DefaultType, DefaultExtension, None, Nil, None, extra)
|
||||
def apply(name: String, classifier: String): Artifact = Artifact(name, DefaultType, DefaultExtension, Some(classifier), Nil, None)
|
||||
def apply(name: String, `type`: String, extension: String): Artifact = Artifact(name, `type`, extension, None, Nil, None)
|
||||
def apply(name: String, `type`: String, extension: String, classifier: String): Artifact = Artifact(name, `type`, extension, Some(classifier), Nil, None)
|
||||
def apply(name: String, url: URL): Artifact = Artifact(name, extract(url, DefaultType), extract(url, DefaultExtension), None, Nil, Some(url))
|
||||
def apply(name: String, `type`: String, extension: String, classifier: Option[String], configurations: Iterable[Configuration], url: Option[URL]): Artifact =
|
||||
Artifact(name, `type`, extension, classifier, configurations, url, Map.empty)
|
||||
|
||||
val DefaultExtension = "jar"
|
||||
val DefaultType = "jar"
|
||||
val DefaultExtension = "jar"
|
||||
val DefaultType = "jar"
|
||||
|
||||
def sources(name: String) = classified(name, SourceClassifier)
|
||||
def javadoc(name: String) = classified(name, DocClassifier)
|
||||
def pom(name: String) = Artifact(name, PomType, PomType, None, Pom :: Nil, None)
|
||||
def sources(name: String) = classified(name, SourceClassifier)
|
||||
def javadoc(name: String) = classified(name, DocClassifier)
|
||||
def pom(name: String) = Artifact(name, PomType, PomType, None, Pom :: Nil, None)
|
||||
|
||||
val DocClassifier = "javadoc"
|
||||
val SourceClassifier = "sources"
|
||||
val DocType = "doc"
|
||||
val SourceType = "src"
|
||||
val PomType = "pom"
|
||||
val TestsClassifier = "tests"
|
||||
val DocClassifier = "javadoc"
|
||||
val SourceClassifier = "sources"
|
||||
val DocType = "doc"
|
||||
val SourceType = "src"
|
||||
val PomType = "pom"
|
||||
val TestsClassifier = "tests"
|
||||
|
||||
def extract(url: URL, default: String): String = extract(url.toString, default)
|
||||
def extract(name: String, default: String): String =
|
||||
{
|
||||
val i = name.lastIndexOf('.')
|
||||
if(i >= 0)
|
||||
name.substring(i+1)
|
||||
else
|
||||
default
|
||||
}
|
||||
def defaultArtifact(file: File) =
|
||||
{
|
||||
val name = file.getName
|
||||
val i = name.lastIndexOf('.')
|
||||
val base = if(i >= 0) name.substring(0, i) else name
|
||||
Artifact(base, extract(name, DefaultType), extract(name, DefaultExtension), None, Nil, Some(file.toURI.toURL))
|
||||
}
|
||||
def artifactName(scalaVersion: ScalaVersion, module: ModuleID, artifact: Artifact): String =
|
||||
{
|
||||
import artifact._
|
||||
val classifierStr = classifier match { case None => ""; case Some(c) => "-" + c }
|
||||
val cross = CrossVersion(module.crossVersion, scalaVersion.full, scalaVersion.binary)
|
||||
val base = CrossVersion.applyCross(artifact.name, cross)
|
||||
base + "-" + module.revision + classifierStr + "." + artifact.extension
|
||||
}
|
||||
def extract(url: URL, default: String): String = extract(url.toString, default)
|
||||
def extract(name: String, default: String): String =
|
||||
{
|
||||
val i = name.lastIndexOf('.')
|
||||
if (i >= 0)
|
||||
name.substring(i + 1)
|
||||
else
|
||||
default
|
||||
}
|
||||
def defaultArtifact(file: File) =
|
||||
{
|
||||
val name = file.getName
|
||||
val i = name.lastIndexOf('.')
|
||||
val base = if (i >= 0) name.substring(0, i) else name
|
||||
Artifact(base, extract(name, DefaultType), extract(name, DefaultExtension), None, Nil, Some(file.toURI.toURL))
|
||||
}
|
||||
def artifactName(scalaVersion: ScalaVersion, module: ModuleID, artifact: Artifact): String =
|
||||
{
|
||||
import artifact._
|
||||
val classifierStr = classifier match { case None => ""; case Some(c) => "-" + c }
|
||||
val cross = CrossVersion(module.crossVersion, scalaVersion.full, scalaVersion.binary)
|
||||
val base = CrossVersion.applyCross(artifact.name, cross)
|
||||
base + "-" + module.revision + classifierStr + "." + artifact.extension
|
||||
}
|
||||
|
||||
val classifierConfMap = Map(SourceClassifier -> Sources, DocClassifier -> Docs)
|
||||
val classifierTypeMap = Map(SourceClassifier -> SourceType, DocClassifier -> DocType)
|
||||
def classifierConf(classifier: String): Configuration =
|
||||
if(classifier.startsWith(TestsClassifier))
|
||||
Test
|
||||
else
|
||||
classifierConfMap.getOrElse(classifier, Optional)
|
||||
def classifierType(classifier: String): String = classifierTypeMap.getOrElse(classifier.stripPrefix(TestsClassifier + "-"), DefaultType)
|
||||
def classified(name: String, classifier: String): Artifact =
|
||||
Artifact(name, classifierType(classifier), DefaultExtension, Some(classifier), classifierConf(classifier) :: Nil, None)
|
||||
val classifierConfMap = Map(SourceClassifier -> Sources, DocClassifier -> Docs)
|
||||
val classifierTypeMap = Map(SourceClassifier -> SourceType, DocClassifier -> DocType)
|
||||
def classifierConf(classifier: String): Configuration =
|
||||
if (classifier.startsWith(TestsClassifier))
|
||||
Test
|
||||
else
|
||||
classifierConfMap.getOrElse(classifier, Optional)
|
||||
def classifierType(classifier: String): String = classifierTypeMap.getOrElse(classifier.stripPrefix(TestsClassifier + "-"), DefaultType)
|
||||
def classified(name: String, classifier: String): Artifact =
|
||||
Artifact(name, classifierType(classifier), DefaultExtension, Some(classifier), classifierConf(classifier) :: Nil, None)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,90 +3,87 @@
|
|||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.{File,FileOutputStream}
|
||||
import java.io.{ File, FileOutputStream }
|
||||
import java.util.concurrent.Callable
|
||||
|
||||
/** A component manager provides access to the pieces of xsbt that are distributed as components.
|
||||
* There are two types of components. The first type is compiled subproject jars with their dependencies.
|
||||
* The second type is a subproject distributed as a source jar so that it can be compiled against a specific
|
||||
* version of Scala.
|
||||
*
|
||||
* The component manager provides services to install and retrieve components to the local repository.
|
||||
* This is used for compiled source jars so that the compilation need not be repeated for other projects on the same
|
||||
* machine.
|
||||
*/
|
||||
class ComponentManager(globalLock: xsbti.GlobalLock, provider: xsbti.ComponentProvider, ivyHome: Option[File], val log: Logger)
|
||||
{
|
||||
private[this] val ivyCache = new IvyCache(ivyHome)
|
||||
/** Get all of the files for component 'id', throwing an exception if no files exist for the component. */
|
||||
def files(id: String)(ifMissing: IfMissing): Iterable[File] =
|
||||
{
|
||||
def fromGlobal =
|
||||
lockGlobalCache {
|
||||
try { update(id); getOrElse(createAndCache) }
|
||||
catch { case e: NotInCache => createAndCache }
|
||||
}
|
||||
def getOrElse(orElse: => Iterable[File]): Iterable[File] =
|
||||
{
|
||||
val existing = provider.component(id)
|
||||
if(existing.isEmpty) orElse else existing
|
||||
}
|
||||
def notFound = invalid("Could not find required component '" + id + "'")
|
||||
def createAndCache =
|
||||
ifMissing match {
|
||||
case IfMissing.Fail => notFound
|
||||
case d: IfMissing.Define =>
|
||||
d()
|
||||
if(d.cache) cache(id)
|
||||
getOrElse(notFound)
|
||||
}
|
||||
/**
|
||||
* A component manager provides access to the pieces of xsbt that are distributed as components.
|
||||
* There are two types of components. The first type is compiled subproject jars with their dependencies.
|
||||
* The second type is a subproject distributed as a source jar so that it can be compiled against a specific
|
||||
* version of Scala.
|
||||
*
|
||||
* The component manager provides services to install and retrieve components to the local repository.
|
||||
* This is used for compiled source jars so that the compilation need not be repeated for other projects on the same
|
||||
* machine.
|
||||
*/
|
||||
class ComponentManager(globalLock: xsbti.GlobalLock, provider: xsbti.ComponentProvider, ivyHome: Option[File], val log: Logger) {
|
||||
private[this] val ivyCache = new IvyCache(ivyHome)
|
||||
/** Get all of the files for component 'id', throwing an exception if no files exist for the component. */
|
||||
def files(id: String)(ifMissing: IfMissing): Iterable[File] =
|
||||
{
|
||||
def fromGlobal =
|
||||
lockGlobalCache {
|
||||
try { update(id); getOrElse(createAndCache) }
|
||||
catch { case e: NotInCache => createAndCache }
|
||||
}
|
||||
def getOrElse(orElse: => Iterable[File]): Iterable[File] =
|
||||
{
|
||||
val existing = provider.component(id)
|
||||
if (existing.isEmpty) orElse else existing
|
||||
}
|
||||
def notFound = invalid("Could not find required component '" + id + "'")
|
||||
def createAndCache =
|
||||
ifMissing match {
|
||||
case IfMissing.Fail => notFound
|
||||
case d: IfMissing.Define =>
|
||||
d()
|
||||
if (d.cache) cache(id)
|
||||
getOrElse(notFound)
|
||||
}
|
||||
|
||||
lockLocalCache { getOrElse(fromGlobal) }
|
||||
}
|
||||
/** This is used to lock the local cache in project/boot/. By checking the local cache first, we can avoid grabbing a global lock. */
|
||||
private def lockLocalCache[T](action: => T): T = lock(provider.lockFile)( action )
|
||||
/** This is used to ensure atomic access to components in the global Ivy cache.*/
|
||||
private def lockGlobalCache[T](action: => T): T = lock(ivyCache.lockFile)( action )
|
||||
private def lock[T](file: File)(action: => T): T = globalLock(file, new Callable[T] { def call = action })
|
||||
/** Get the file for component 'id', throwing an exception if no files or multiple files exist for the component. */
|
||||
def file(id: String)(ifMissing: IfMissing): File =
|
||||
files(id)(ifMissing).toList match {
|
||||
case x :: Nil => x
|
||||
case xs => invalid("Expected single file for component '" + id + "', found: " + xs.mkString(", "))
|
||||
}
|
||||
private def invalid(msg: String) = throw new InvalidComponent(msg)
|
||||
private def invalid(e: NotInCache) = throw new InvalidComponent(e.getMessage, e)
|
||||
lockLocalCache { getOrElse(fromGlobal) }
|
||||
}
|
||||
/** This is used to lock the local cache in project/boot/. By checking the local cache first, we can avoid grabbing a global lock. */
|
||||
private def lockLocalCache[T](action: => T): T = lock(provider.lockFile)(action)
|
||||
/** This is used to ensure atomic access to components in the global Ivy cache.*/
|
||||
private def lockGlobalCache[T](action: => T): T = lock(ivyCache.lockFile)(action)
|
||||
private def lock[T](file: File)(action: => T): T = globalLock(file, new Callable[T] { def call = action })
|
||||
/** Get the file for component 'id', throwing an exception if no files or multiple files exist for the component. */
|
||||
def file(id: String)(ifMissing: IfMissing): File =
|
||||
files(id)(ifMissing).toList match {
|
||||
case x :: Nil => x
|
||||
case xs => invalid("Expected single file for component '" + id + "', found: " + xs.mkString(", "))
|
||||
}
|
||||
private def invalid(msg: String) = throw new InvalidComponent(msg)
|
||||
private def invalid(e: NotInCache) = throw new InvalidComponent(e.getMessage, e)
|
||||
|
||||
def define(id: String, files: Iterable[File]) = lockLocalCache { provider.defineComponent(id, files.toSeq.toArray) }
|
||||
/** Retrieve the file for component 'id' from the local repository. */
|
||||
private def update(id: String): Unit = ivyCache.withCachedJar(sbtModuleID(id), Some(globalLock), log)(jar => define(id, Seq(jar)) )
|
||||
def define(id: String, files: Iterable[File]) = lockLocalCache { provider.defineComponent(id, files.toSeq.toArray) }
|
||||
/** Retrieve the file for component 'id' from the local repository. */
|
||||
private def update(id: String): Unit = ivyCache.withCachedJar(sbtModuleID(id), Some(globalLock), log)(jar => define(id, Seq(jar)))
|
||||
|
||||
private def sbtModuleID(id: String) = ModuleID(SbtArtifacts.Organization, id, ComponentManager.stampedVersion)
|
||||
/** Install the files for component 'id' to the local repository. This is usually used after writing files to the directory returned by 'location'. */
|
||||
def cache(id: String): Unit = ivyCache.cacheJar(sbtModuleID(id), file(id)(IfMissing.Fail), Some(globalLock), log)
|
||||
def clearCache(id: String): Unit = lockGlobalCache { ivyCache.clearCachedJar(sbtModuleID(id), Some(globalLock), log) }
|
||||
private def sbtModuleID(id: String) = ModuleID(SbtArtifacts.Organization, id, ComponentManager.stampedVersion)
|
||||
/** Install the files for component 'id' to the local repository. This is usually used after writing files to the directory returned by 'location'. */
|
||||
def cache(id: String): Unit = ivyCache.cacheJar(sbtModuleID(id), file(id)(IfMissing.Fail), Some(globalLock), log)
|
||||
def clearCache(id: String): Unit = lockGlobalCache { ivyCache.clearCachedJar(sbtModuleID(id), Some(globalLock), log) }
|
||||
}
|
||||
class InvalidComponent(msg: String, cause: Throwable) extends RuntimeException(msg, cause)
|
||||
{
|
||||
def this(msg: String) = this(msg, null)
|
||||
class InvalidComponent(msg: String, cause: Throwable) extends RuntimeException(msg, cause) {
|
||||
def this(msg: String) = this(msg, null)
|
||||
}
|
||||
sealed trait IfMissing extends NotNull
|
||||
object IfMissing
|
||||
{
|
||||
object Fail extends IfMissing
|
||||
final class Define(val cache: Boolean, define: => Unit) extends IfMissing { def apply() = define }
|
||||
object IfMissing {
|
||||
object Fail extends IfMissing
|
||||
final class Define(val cache: Boolean, define: => Unit) extends IfMissing { def apply() = define }
|
||||
}
|
||||
object ComponentManager
|
||||
{
|
||||
lazy val (version, timestamp) =
|
||||
{
|
||||
val properties = new java.util.Properties
|
||||
val propertiesStream = versionResource.openStream
|
||||
try { properties.load(propertiesStream) } finally { propertiesStream.close() }
|
||||
(properties.getProperty("version"), properties.getProperty("timestamp"))
|
||||
}
|
||||
lazy val stampedVersion = version + "_" + timestamp
|
||||
object ComponentManager {
|
||||
lazy val (version, timestamp) =
|
||||
{
|
||||
val properties = new java.util.Properties
|
||||
val propertiesStream = versionResource.openStream
|
||||
try { properties.load(propertiesStream) } finally { propertiesStream.close() }
|
||||
(properties.getProperty("version"), properties.getProperty("timestamp"))
|
||||
}
|
||||
lazy val stampedVersion = version + "_" + timestamp
|
||||
|
||||
import java.net.URL
|
||||
private def versionResource: URL = getClass.getResource("/xsbt.version.properties")
|
||||
import java.net.URL
|
||||
private def versionResource: URL = getClass.getResource("/xsbt.version.properties")
|
||||
}
|
||||
|
|
@ -3,63 +3,61 @@
|
|||
*/
|
||||
package sbt
|
||||
|
||||
object Configurations
|
||||
{
|
||||
def config(name: String) = new Configuration(name)
|
||||
def default: Seq[Configuration] = defaultMavenConfigurations
|
||||
def defaultMavenConfigurations: Seq[Configuration] = Seq(Compile, Runtime, Test, Provided, Optional)
|
||||
def defaultInternal: Seq[Configuration] = Seq(CompileInternal, RuntimeInternal, TestInternal)
|
||||
def auxiliary: Seq[Configuration] = Seq(Sources, Docs, Pom)
|
||||
def names(cs: Seq[Configuration]) = cs.map(_.name)
|
||||
object Configurations {
|
||||
def config(name: String) = new Configuration(name)
|
||||
def default: Seq[Configuration] = defaultMavenConfigurations
|
||||
def defaultMavenConfigurations: Seq[Configuration] = Seq(Compile, Runtime, Test, Provided, Optional)
|
||||
def defaultInternal: Seq[Configuration] = Seq(CompileInternal, RuntimeInternal, TestInternal)
|
||||
def auxiliary: Seq[Configuration] = Seq(Sources, Docs, Pom)
|
||||
def names(cs: Seq[Configuration]) = cs.map(_.name)
|
||||
|
||||
lazy val RuntimeInternal = optionalInternal(Runtime)
|
||||
lazy val TestInternal = fullInternal(Test)
|
||||
lazy val IntegrationTestInternal = fullInternal(IntegrationTest)
|
||||
lazy val CompileInternal = fullInternal(Compile)
|
||||
lazy val RuntimeInternal = optionalInternal(Runtime)
|
||||
lazy val TestInternal = fullInternal(Test)
|
||||
lazy val IntegrationTestInternal = fullInternal(IntegrationTest)
|
||||
lazy val CompileInternal = fullInternal(Compile)
|
||||
|
||||
def internalMap(c: Configuration) = c match {
|
||||
case Compile => CompileInternal
|
||||
case Test => TestInternal
|
||||
case Runtime => RuntimeInternal
|
||||
case IntegrationTest => IntegrationTestInternal
|
||||
case _ => c
|
||||
}
|
||||
def internalMap(c: Configuration) = c match {
|
||||
case Compile => CompileInternal
|
||||
case Test => TestInternal
|
||||
case Runtime => RuntimeInternal
|
||||
case IntegrationTest => IntegrationTestInternal
|
||||
case _ => c
|
||||
}
|
||||
|
||||
def internal(base: Configuration, ext: Configuration*) = config(base.name + "-internal") extend(ext : _*) hide;
|
||||
def fullInternal(base: Configuration): Configuration = internal(base, base, Optional, Provided)
|
||||
def optionalInternal(base: Configuration): Configuration = internal(base, base, Optional)
|
||||
def internal(base: Configuration, ext: Configuration*) = config(base.name + "-internal") extend (ext: _*) hide;
|
||||
def fullInternal(base: Configuration): Configuration = internal(base, base, Optional, Provided)
|
||||
def optionalInternal(base: Configuration): Configuration = internal(base, base, Optional)
|
||||
|
||||
lazy val Default = config("default")
|
||||
lazy val Compile = config("compile")
|
||||
lazy val IntegrationTest = config("it") extend(Runtime)
|
||||
lazy val Provided = config("provided") ;
|
||||
lazy val Docs = config("docs")
|
||||
lazy val Runtime = config("runtime") extend(Compile)
|
||||
lazy val Test = config("test") extend(Runtime)
|
||||
lazy val Sources = config("sources")
|
||||
lazy val System = config("system")
|
||||
lazy val Optional = config("optional")
|
||||
lazy val Pom = config("pom")
|
||||
lazy val Default = config("default")
|
||||
lazy val Compile = config("compile")
|
||||
lazy val IntegrationTest = config("it") extend (Runtime)
|
||||
lazy val Provided = config("provided");
|
||||
lazy val Docs = config("docs")
|
||||
lazy val Runtime = config("runtime") extend (Compile)
|
||||
lazy val Test = config("test") extend (Runtime)
|
||||
lazy val Sources = config("sources")
|
||||
lazy val System = config("system")
|
||||
lazy val Optional = config("optional")
|
||||
lazy val Pom = config("pom")
|
||||
|
||||
lazy val ScalaTool = config("scala-tool") hide
|
||||
lazy val CompilerPlugin = config("plugin") hide
|
||||
lazy val ScalaTool = config("scala-tool") hide
|
||||
lazy val CompilerPlugin = config("plugin") hide
|
||||
|
||||
private[sbt] val DefaultMavenConfiguration = defaultConfiguration(true)
|
||||
private[sbt] val DefaultIvyConfiguration = defaultConfiguration(false)
|
||||
private[sbt] def DefaultConfiguration(mavenStyle: Boolean) = if(mavenStyle) DefaultMavenConfiguration else DefaultIvyConfiguration
|
||||
private[sbt] def defaultConfiguration(mavenStyle: Boolean) = if(mavenStyle) Configurations.Compile else Configurations.Default
|
||||
private[sbt] def removeDuplicates(configs: Iterable[Configuration]) = Set(scala.collection.mutable.Map(configs.map(config => (config.name, config)).toSeq: _*).values.toList: _*)
|
||||
private[sbt] val DefaultMavenConfiguration = defaultConfiguration(true)
|
||||
private[sbt] val DefaultIvyConfiguration = defaultConfiguration(false)
|
||||
private[sbt] def DefaultConfiguration(mavenStyle: Boolean) = if (mavenStyle) DefaultMavenConfiguration else DefaultIvyConfiguration
|
||||
private[sbt] def defaultConfiguration(mavenStyle: Boolean) = if (mavenStyle) Configurations.Compile else Configurations.Default
|
||||
private[sbt] def removeDuplicates(configs: Iterable[Configuration]) = Set(scala.collection.mutable.Map(configs.map(config => (config.name, config)).toSeq: _*).values.toList: _*)
|
||||
}
|
||||
/** Represents an Ivy configuration. */
|
||||
final case class Configuration(name: String, description: String, isPublic: Boolean, extendsConfigs: List[Configuration], transitive: Boolean)
|
||||
{
|
||||
require(name != null && !name.isEmpty)
|
||||
require(description != null)
|
||||
def this(name: String) = this(name, "", true, Nil, true)
|
||||
def describedAs(newDescription: String) = Configuration(name, newDescription, isPublic, extendsConfigs, transitive)
|
||||
def extend(configs: Configuration*) = Configuration(name, description, isPublic, configs.toList ::: extendsConfigs, transitive)
|
||||
def notTransitive = intransitive
|
||||
def intransitive = Configuration(name, description, isPublic, extendsConfigs, false)
|
||||
def hide = Configuration(name, description, false, extendsConfigs, transitive)
|
||||
override def toString = name
|
||||
final case class Configuration(name: String, description: String, isPublic: Boolean, extendsConfigs: List[Configuration], transitive: Boolean) {
|
||||
require(name != null && !name.isEmpty)
|
||||
require(description != null)
|
||||
def this(name: String) = this(name, "", true, Nil, true)
|
||||
def describedAs(newDescription: String) = Configuration(name, newDescription, isPublic, extendsConfigs, transitive)
|
||||
def extend(configs: Configuration*) = Configuration(name, description, isPublic, configs.toList ::: extendsConfigs, transitive)
|
||||
def notTransitive = intransitive
|
||||
def intransitive = Configuration(name, description, isPublic, extendsConfigs, false)
|
||||
def hide = Configuration(name, description, false, extendsConfigs, transitive)
|
||||
override def toString = name
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,80 +1,74 @@
|
|||
package sbt
|
||||
|
||||
import DependencyFilter._
|
||||
import DependencyFilter._
|
||||
|
||||
final case class ConflictWarning(label: String, level: Level.Value, failOnConflict: Boolean)
|
||||
{
|
||||
@deprecated("`filter` is no longer used", "0.13.0")
|
||||
val filter: ModuleFilter = (_: ModuleID) => false
|
||||
@deprecated("`group` is no longer used", "0.13.0")
|
||||
val group: ModuleID => String = ConflictWarning.org
|
||||
final case class ConflictWarning(label: String, level: Level.Value, failOnConflict: Boolean) {
|
||||
@deprecated("`filter` is no longer used", "0.13.0")
|
||||
val filter: ModuleFilter = (_: ModuleID) => false
|
||||
@deprecated("`group` is no longer used", "0.13.0")
|
||||
val group: ModuleID => String = ConflictWarning.org
|
||||
}
|
||||
object ConflictWarning
|
||||
{
|
||||
@deprecated("`group` and `filter` are no longer used. Use a standard Ivy conflict manager.", "0.13.0")
|
||||
def apply(label: String, filter: ModuleFilter, group: ModuleID => String, level: Level.Value, failOnConflict: Boolean): ConflictWarning =
|
||||
ConflictWarning(label, level, failOnConflict)
|
||||
object ConflictWarning {
|
||||
@deprecated("`group` and `filter` are no longer used. Use a standard Ivy conflict manager.", "0.13.0")
|
||||
def apply(label: String, filter: ModuleFilter, group: ModuleID => String, level: Level.Value, failOnConflict: Boolean): ConflictWarning =
|
||||
ConflictWarning(label, level, failOnConflict)
|
||||
|
||||
def disable: ConflictWarning = ConflictWarning("", Level.Debug, false)
|
||||
def disable: ConflictWarning = ConflictWarning("", Level.Debug, false)
|
||||
|
||||
private def org = (_: ModuleID).organization
|
||||
private[this] def idString(org: String, name: String) = s"$org:$name"
|
||||
private def org = (_: ModuleID).organization
|
||||
private[this] def idString(org: String, name: String) = s"$org:$name"
|
||||
|
||||
def default(label: String): ConflictWarning = ConflictWarning(label, Level.Error, true)
|
||||
def default(label: String): ConflictWarning = ConflictWarning(label, Level.Error, true)
|
||||
|
||||
@deprecated("Warning on evicted modules is no longer done, so this is the same as `default`. Use a standard Ivy conflict manager.", "0.13.0")
|
||||
def strict(label: String): ConflictWarning = ConflictWarning(label, Level.Error, true)
|
||||
@deprecated("Warning on evicted modules is no longer done, so this is the same as `default`. Use a standard Ivy conflict manager.", "0.13.0")
|
||||
def strict(label: String): ConflictWarning = ConflictWarning(label, Level.Error, true)
|
||||
|
||||
def apply(config: ConflictWarning, report: UpdateReport, log: Logger)
|
||||
{
|
||||
processCrossVersioned(config, report, log)
|
||||
}
|
||||
private[this] def processCrossVersioned(config: ConflictWarning, report: UpdateReport, log: Logger)
|
||||
{
|
||||
val crossMismatches = crossVersionMismatches(report)
|
||||
if(!crossMismatches.isEmpty)
|
||||
{
|
||||
val pre = s"Modules were resolved with conflicting cross-version suffixes in ${config.label}:\n "
|
||||
val conflictMsgs =
|
||||
for( ((org,rawName), fullNames) <- crossMismatches ) yield
|
||||
{
|
||||
val suffixes = fullNames.map(getCrossSuffix).mkString(", ")
|
||||
s"${idString(org,rawName)} $suffixes"
|
||||
}
|
||||
log.log(config.level, conflictMsgs.mkString(pre, "\n ", ""))
|
||||
if(config.failOnConflict) {
|
||||
val summary = crossMismatches.map{ case ((org,raw),_) => idString(org,raw)}.mkString(", ")
|
||||
sys.error("Conflicting cross-version suffixes in: " + summary)
|
||||
}
|
||||
}
|
||||
}
|
||||
def apply(config: ConflictWarning, report: UpdateReport, log: Logger) {
|
||||
processCrossVersioned(config, report, log)
|
||||
}
|
||||
private[this] def processCrossVersioned(config: ConflictWarning, report: UpdateReport, log: Logger) {
|
||||
val crossMismatches = crossVersionMismatches(report)
|
||||
if (!crossMismatches.isEmpty) {
|
||||
val pre = s"Modules were resolved with conflicting cross-version suffixes in ${config.label}:\n "
|
||||
val conflictMsgs =
|
||||
for (((org, rawName), fullNames) <- crossMismatches) yield {
|
||||
val suffixes = fullNames.map(getCrossSuffix).mkString(", ")
|
||||
s"${idString(org, rawName)} $suffixes"
|
||||
}
|
||||
log.log(config.level, conflictMsgs.mkString(pre, "\n ", ""))
|
||||
if (config.failOnConflict) {
|
||||
val summary = crossMismatches.map { case ((org, raw), _) => idString(org, raw) }.mkString(", ")
|
||||
sys.error("Conflicting cross-version suffixes in: " + summary)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Map from (organization, rawName) to set of multiple full names. */
|
||||
def crossVersionMismatches(report: UpdateReport): Map[(String,String), Set[String]] =
|
||||
{
|
||||
val mismatches = report.configurations.flatMap { confReport =>
|
||||
groupByRawName(confReport.allModules).mapValues { modules =>
|
||||
val differentFullNames = modules.map(_.name).toSet
|
||||
if(differentFullNames.size > 1) differentFullNames else Set.empty[String]
|
||||
}
|
||||
}
|
||||
(Map.empty[(String,String),Set[String]] /: mismatches)(merge)
|
||||
}
|
||||
private[this] def merge[A,B](m: Map[A, Set[B]], b: (A, Set[B])): Map[A, Set[B]] =
|
||||
if(b._2.isEmpty) m else
|
||||
m.updated(b._1, m.getOrElse(b._1, Set.empty) ++ b._2)
|
||||
/** Map from (organization, rawName) to set of multiple full names. */
|
||||
def crossVersionMismatches(report: UpdateReport): Map[(String, String), Set[String]] =
|
||||
{
|
||||
val mismatches = report.configurations.flatMap { confReport =>
|
||||
groupByRawName(confReport.allModules).mapValues { modules =>
|
||||
val differentFullNames = modules.map(_.name).toSet
|
||||
if (differentFullNames.size > 1) differentFullNames else Set.empty[String]
|
||||
}
|
||||
}
|
||||
(Map.empty[(String, String), Set[String]] /: mismatches)(merge)
|
||||
}
|
||||
private[this] def merge[A, B](m: Map[A, Set[B]], b: (A, Set[B])): Map[A, Set[B]] =
|
||||
if (b._2.isEmpty) m else
|
||||
m.updated(b._1, m.getOrElse(b._1, Set.empty) ++ b._2)
|
||||
|
||||
private[this] def groupByRawName(ms: Seq[ModuleID]): Map[(String,String), Seq[ModuleID]] =
|
||||
ms.groupBy(m => (m.organization, dropCrossSuffix(m.name)))
|
||||
private[this] def groupByRawName(ms: Seq[ModuleID]): Map[(String, String), Seq[ModuleID]] =
|
||||
ms.groupBy(m => (m.organization, dropCrossSuffix(m.name)))
|
||||
|
||||
private[this] val CrossSuffixPattern = """(.+)_(\d+\.\d+(?:\.\d+)?(?:-.+)?)""".r
|
||||
private[this] def dropCrossSuffix(s: String): String = s match {
|
||||
case CrossSuffixPattern(raw, _) => raw
|
||||
case _ => s
|
||||
}
|
||||
private[this] def getCrossSuffix(s: String): String = s match {
|
||||
case CrossSuffixPattern(_, v) => "_" + v
|
||||
case _ => "<none>"
|
||||
}
|
||||
private[this] val CrossSuffixPattern = """(.+)_(\d+\.\d+(?:\.\d+)?(?:-.+)?)""".r
|
||||
private[this] def dropCrossSuffix(s: String): String = s match {
|
||||
case CrossSuffixPattern(raw, _) => raw
|
||||
case _ => s
|
||||
}
|
||||
private[this] def getCrossSuffix(s: String): String = s match {
|
||||
case CrossSuffixPattern(_, v) => "_" + v
|
||||
case _ => "<none>"
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,239 +5,234 @@ package sbt
|
|||
|
||||
import java.net.URL
|
||||
import java.util.Collections
|
||||
import org.apache.ivy.{core,plugins}
|
||||
import org.apache.ivy.{ core, plugins }
|
||||
import core.module.id.ModuleRevisionId
|
||||
import core.module.descriptor.DependencyDescriptor
|
||||
import core.resolve.ResolveData
|
||||
import core.settings.IvySettings
|
||||
import plugins.resolver.{BasicResolver, DependencyResolver, IBiblioResolver, RepositoryResolver}
|
||||
import plugins.resolver.{AbstractPatternsBasedResolver, AbstractSshBasedResolver, FileSystemResolver, SFTPResolver, SshResolver, URLResolver}
|
||||
import plugins.repository.url.{URLRepository => URLRepo}
|
||||
import plugins.repository.file.{FileRepository => FileRepo, FileResource}
|
||||
import plugins.resolver.{ BasicResolver, DependencyResolver, IBiblioResolver, RepositoryResolver }
|
||||
import plugins.resolver.{ AbstractPatternsBasedResolver, AbstractSshBasedResolver, FileSystemResolver, SFTPResolver, SshResolver, URLResolver }
|
||||
import plugins.repository.url.{ URLRepository => URLRepo }
|
||||
import plugins.repository.file.{ FileRepository => FileRepo, FileResource }
|
||||
import java.io.File
|
||||
import org.apache.ivy.util.ChecksumHelper
|
||||
import org.apache.ivy.core.module.descriptor.{Artifact=>IArtifact}
|
||||
import org.apache.ivy.core.module.descriptor.{ Artifact => IArtifact }
|
||||
|
||||
|
||||
private object ConvertResolver
|
||||
{
|
||||
/** This class contains all the reflective lookups used in the
|
||||
* checksum-friendly URL publishing shim.
|
||||
*/
|
||||
private object ChecksumFriendlyURLResolver {
|
||||
// TODO - When we dump JDK6 support we can remove this hackery
|
||||
// import java.lang.reflect.AccessibleObject
|
||||
type AccessibleObject = {
|
||||
def setAccessible(value: Boolean): Unit
|
||||
}
|
||||
private def reflectiveLookup[A <: AccessibleObject](f: Class[_] => A): Option[A] =
|
||||
try {
|
||||
val cls = classOf[RepositoryResolver]
|
||||
val thing = f(cls)
|
||||
import scala.language.reflectiveCalls
|
||||
thing.setAccessible(true)
|
||||
Some(thing)
|
||||
} catch {
|
||||
case (_: java.lang.NoSuchFieldException) |
|
||||
(_: java.lang.SecurityException) |
|
||||
(_: java.lang.NoSuchMethodException) => None
|
||||
}
|
||||
private val signerNameField: Option[java.lang.reflect.Field] =
|
||||
reflectiveLookup(_.getDeclaredField("signerName"))
|
||||
private val putChecksumMethod: Option[java.lang.reflect.Method] =
|
||||
reflectiveLookup(_.getDeclaredMethod("putChecksum",
|
||||
classOf[IArtifact], classOf[File], classOf[String],
|
||||
classOf[Boolean], classOf[String]))
|
||||
private val putSignatureMethod: Option[java.lang.reflect.Method] =
|
||||
reflectiveLookup(_.getDeclaredMethod("putSignature",
|
||||
classOf[IArtifact], classOf[File], classOf[String],
|
||||
classOf[Boolean]))
|
||||
}
|
||||
/**
|
||||
* The default behavior of ivy's overwrite flags ignores the fact that a lot of repositories
|
||||
* will autogenerate checksums *for* an artifact if it doesn't already exist. Therefore
|
||||
* if we succeed in publishing an artifact, we need to just blast the checksums in place.
|
||||
* This acts as a "shim" on RepositoryResolvers so that we can hook our methods into
|
||||
* both the IBiblioResolver + URLResolver without having to duplicate the code in two
|
||||
* places. However, this does mean our use of reflection is awesome.
|
||||
*
|
||||
* TODO - See about contributing back to ivy.
|
||||
*/
|
||||
private trait ChecksumFriendlyURLResolver extends RepositoryResolver {
|
||||
import ChecksumFriendlyURLResolver._
|
||||
private def signerName: String = signerNameField match {
|
||||
case Some(field) => field.get(this).asInstanceOf[String]
|
||||
case None => null
|
||||
}
|
||||
override protected def put(artifact: IArtifact, src: File, dest: String, overwrite: Boolean): Unit = {
|
||||
// verify the checksum algorithms before uploading artifacts!
|
||||
val checksums = getChecksumAlgorithms()
|
||||
val repository = getRepository()
|
||||
for {
|
||||
checksum <- checksums
|
||||
if !ChecksumHelper.isKnownAlgorithm(checksum)
|
||||
} throw new IllegalArgumentException("Unknown checksum algorithm: " + checksum)
|
||||
repository.put(artifact, src, dest, overwrite);
|
||||
// Fix for sbt#1156 - Artifactory will auto-generate MD5/sha1 files, so
|
||||
// we need to overwrite what it has.
|
||||
for (checksum <- checksums) {
|
||||
putChecksumMethod match {
|
||||
case Some(method) => method.invoke(this, artifact, src, dest, true: java.lang.Boolean, checksum)
|
||||
case None => // TODO - issue warning?
|
||||
}
|
||||
}
|
||||
if (signerName != null) {
|
||||
putSignatureMethod match {
|
||||
case None => ()
|
||||
case Some(method) => method.invoke(artifact, src, dest, true: java.lang.Boolean)
|
||||
}
|
||||
}
|
||||
private object ConvertResolver {
|
||||
/**
|
||||
* This class contains all the reflective lookups used in the
|
||||
* checksum-friendly URL publishing shim.
|
||||
*/
|
||||
private object ChecksumFriendlyURLResolver {
|
||||
// TODO - When we dump JDK6 support we can remove this hackery
|
||||
// import java.lang.reflect.AccessibleObject
|
||||
type AccessibleObject = {
|
||||
def setAccessible(value: Boolean): Unit
|
||||
}
|
||||
}
|
||||
private def reflectiveLookup[A <: AccessibleObject](f: Class[_] => A): Option[A] =
|
||||
try {
|
||||
val cls = classOf[RepositoryResolver]
|
||||
val thing = f(cls)
|
||||
import scala.language.reflectiveCalls
|
||||
thing.setAccessible(true)
|
||||
Some(thing)
|
||||
} catch {
|
||||
case (_: java.lang.NoSuchFieldException) |
|
||||
(_: java.lang.SecurityException) |
|
||||
(_: java.lang.NoSuchMethodException) => None
|
||||
}
|
||||
private val signerNameField: Option[java.lang.reflect.Field] =
|
||||
reflectiveLookup(_.getDeclaredField("signerName"))
|
||||
private val putChecksumMethod: Option[java.lang.reflect.Method] =
|
||||
reflectiveLookup(_.getDeclaredMethod("putChecksum",
|
||||
classOf[IArtifact], classOf[File], classOf[String],
|
||||
classOf[Boolean], classOf[String]))
|
||||
private val putSignatureMethod: Option[java.lang.reflect.Method] =
|
||||
reflectiveLookup(_.getDeclaredMethod("putSignature",
|
||||
classOf[IArtifact], classOf[File], classOf[String],
|
||||
classOf[Boolean]))
|
||||
}
|
||||
/**
|
||||
* The default behavior of ivy's overwrite flags ignores the fact that a lot of repositories
|
||||
* will autogenerate checksums *for* an artifact if it doesn't already exist. Therefore
|
||||
* if we succeed in publishing an artifact, we need to just blast the checksums in place.
|
||||
* This acts as a "shim" on RepositoryResolvers so that we can hook our methods into
|
||||
* both the IBiblioResolver + URLResolver without having to duplicate the code in two
|
||||
* places. However, this does mean our use of reflection is awesome.
|
||||
*
|
||||
* TODO - See about contributing back to ivy.
|
||||
*/
|
||||
private trait ChecksumFriendlyURLResolver extends RepositoryResolver {
|
||||
import ChecksumFriendlyURLResolver._
|
||||
private def signerName: String = signerNameField match {
|
||||
case Some(field) => field.get(this).asInstanceOf[String]
|
||||
case None => null
|
||||
}
|
||||
override protected def put(artifact: IArtifact, src: File, dest: String, overwrite: Boolean): Unit = {
|
||||
// verify the checksum algorithms before uploading artifacts!
|
||||
val checksums = getChecksumAlgorithms()
|
||||
val repository = getRepository()
|
||||
for {
|
||||
checksum <- checksums
|
||||
if !ChecksumHelper.isKnownAlgorithm(checksum)
|
||||
} throw new IllegalArgumentException("Unknown checksum algorithm: " + checksum)
|
||||
repository.put(artifact, src, dest, overwrite);
|
||||
// Fix for sbt#1156 - Artifactory will auto-generate MD5/sha1 files, so
|
||||
// we need to overwrite what it has.
|
||||
for (checksum <- checksums) {
|
||||
putChecksumMethod match {
|
||||
case Some(method) => method.invoke(this, artifact, src, dest, true: java.lang.Boolean, checksum)
|
||||
case None => // TODO - issue warning?
|
||||
}
|
||||
}
|
||||
if (signerName != null) {
|
||||
putSignatureMethod match {
|
||||
case None => ()
|
||||
case Some(method) => method.invoke(artifact, src, dest, true: java.lang.Boolean)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Converts the given sbt resolver into an Ivy resolver..*/
|
||||
def apply(r: Resolver, settings: IvySettings, log: Logger) =
|
||||
{
|
||||
r match
|
||||
{
|
||||
case repo: MavenRepository =>
|
||||
{
|
||||
val pattern = Collections.singletonList(Resolver.resolvePattern(repo.root, Resolver.mavenStyleBasePattern))
|
||||
final class PluginCapableResolver extends IBiblioResolver with ChecksumFriendlyURLResolver with DescriptorRequired {
|
||||
def setPatterns() { // done this way for access to protected methods.
|
||||
setArtifactPatterns(pattern)
|
||||
setIvyPatterns(pattern)
|
||||
}
|
||||
}
|
||||
val resolver = new PluginCapableResolver
|
||||
resolver.setRepository(new LocalIfFileRepo)
|
||||
initializeMavenStyle(resolver, repo.name, repo.root)
|
||||
resolver.setPatterns() // has to be done after initializeMavenStyle, which calls methods that overwrite the patterns
|
||||
resolver
|
||||
}
|
||||
case r: JavaNet1Repository =>
|
||||
{
|
||||
// Thanks to Matthias Pfau for posting how to use the Maven 1 repository on java.net with Ivy:
|
||||
// http://www.nabble.com/Using-gradle-Ivy-with-special-maven-repositories-td23775489.html
|
||||
val resolver = new IBiblioResolver with DescriptorRequired { override def convertM2IdForResourceSearch(mrid: ModuleRevisionId) = mrid }
|
||||
initializeMavenStyle(resolver, JavaNet1Repository.name, "http://download.java.net/maven/1/")
|
||||
resolver.setPattern("[organisation]/[ext]s/[module]-[revision](-[classifier]).[ext]")
|
||||
resolver
|
||||
}
|
||||
case repo: SshRepository =>
|
||||
{
|
||||
val resolver = new SshResolver with DescriptorRequired
|
||||
initializeSSHResolver(resolver, repo, settings)
|
||||
repo.publishPermissions.foreach(perm => resolver.setPublishPermissions(perm))
|
||||
resolver
|
||||
}
|
||||
case repo: SftpRepository =>
|
||||
{
|
||||
val resolver = new SFTPResolver
|
||||
initializeSSHResolver(resolver, repo, settings)
|
||||
resolver
|
||||
}
|
||||
case repo: FileRepository =>
|
||||
{
|
||||
val resolver = new FileSystemResolver with DescriptorRequired {
|
||||
// Workaround for #1156
|
||||
// Temporarily in sbt 0.13.x we deprecate overwriting
|
||||
// in local files for non-changing revisions.
|
||||
// This will be fully enforced in sbt 1.0.
|
||||
setRepository(new WarnOnOverwriteFileRepo())
|
||||
}
|
||||
resolver.setName(repo.name)
|
||||
initializePatterns(resolver, repo.patterns, settings)
|
||||
import repo.configuration.{isLocal, isTransactional}
|
||||
resolver.setLocal(isLocal)
|
||||
isTransactional.foreach(value => resolver.setTransactional(value.toString))
|
||||
resolver
|
||||
}
|
||||
case repo: URLRepository =>
|
||||
{
|
||||
val resolver = new URLResolver with ChecksumFriendlyURLResolver with DescriptorRequired
|
||||
resolver.setName(repo.name)
|
||||
initializePatterns(resolver, repo.patterns, settings)
|
||||
resolver
|
||||
}
|
||||
case repo: ChainedResolver => IvySbt.resolverChain(repo.name, repo.resolvers, false, settings, log)
|
||||
case repo: RawRepository => repo.resolver
|
||||
}
|
||||
}
|
||||
|
||||
private sealed trait DescriptorRequired extends BasicResolver
|
||||
{
|
||||
override def getDependency(dd: DependencyDescriptor, data: ResolveData) =
|
||||
{
|
||||
val prev = descriptorString(isAllownomd)
|
||||
setDescriptor(descriptorString(hasExplicitURL(dd)))
|
||||
try super.getDependency(dd, data) finally setDescriptor(prev)
|
||||
}
|
||||
def descriptorString(optional: Boolean) =
|
||||
if(optional) BasicResolver.DESCRIPTOR_OPTIONAL else BasicResolver.DESCRIPTOR_REQUIRED
|
||||
def hasExplicitURL(dd: DependencyDescriptor): Boolean =
|
||||
dd.getAllDependencyArtifacts.exists(_.getUrl != null)
|
||||
}
|
||||
private def initializeMavenStyle(resolver: IBiblioResolver, name: String, root: String)
|
||||
{
|
||||
resolver.setName(name)
|
||||
resolver.setM2compatible(true)
|
||||
resolver.setRoot(root)
|
||||
}
|
||||
private def initializeSSHResolver(resolver: AbstractSshBasedResolver, repo: SshBasedRepository, settings: IvySettings)
|
||||
{
|
||||
resolver.setName(repo.name)
|
||||
resolver.setPassfile(null)
|
||||
initializePatterns(resolver, repo.patterns, settings)
|
||||
initializeConnection(resolver, repo.connection)
|
||||
}
|
||||
private def initializeConnection(resolver: AbstractSshBasedResolver, connection: RepositoryHelpers.SshConnection)
|
||||
{
|
||||
import resolver._
|
||||
import connection._
|
||||
hostname.foreach(setHost)
|
||||
port.foreach(setPort)
|
||||
authentication foreach
|
||||
{
|
||||
case RepositoryHelpers.PasswordAuthentication(user, password) =>
|
||||
setUser(user)
|
||||
password.foreach(setUserPassword)
|
||||
case RepositoryHelpers.KeyFileAuthentication(user, file, password) =>
|
||||
setKeyFile(file)
|
||||
password.foreach(setKeyFilePassword)
|
||||
setUser(user)
|
||||
}
|
||||
}
|
||||
private def initializePatterns(resolver: AbstractPatternsBasedResolver, patterns: Patterns, settings: IvySettings)
|
||||
{
|
||||
resolver.setM2compatible(patterns.isMavenCompatible)
|
||||
resolver.setDescriptor(if (patterns.descriptorOptional) BasicResolver.DESCRIPTOR_OPTIONAL else BasicResolver.DESCRIPTOR_REQUIRED)
|
||||
resolver.setCheckconsistency(!patterns.skipConsistencyCheck)
|
||||
patterns.ivyPatterns.foreach(p => resolver.addIvyPattern(settings substitute p))
|
||||
patterns.artifactPatterns.foreach(p => resolver.addArtifactPattern(settings substitute p))
|
||||
}
|
||||
/** A custom Ivy URLRepository that returns FileResources for file URLs.
|
||||
* This allows using the artifacts from the Maven local repository instead of copying them to the Ivy cache. */
|
||||
private[this] final class LocalIfFileRepo extends URLRepo {
|
||||
private[this] val repo = new WarnOnOverwriteFileRepo()
|
||||
override def getResource(source: String) = {
|
||||
val url = new URL(source)
|
||||
if(url.getProtocol == IO.FileScheme)
|
||||
new FileResource(repo, IO.toFile(url))
|
||||
else
|
||||
super.getResource(source)
|
||||
}
|
||||
}
|
||||
/** Converts the given sbt resolver into an Ivy resolver..*/
|
||||
def apply(r: Resolver, settings: IvySettings, log: Logger) =
|
||||
{
|
||||
r match {
|
||||
case repo: MavenRepository =>
|
||||
{
|
||||
val pattern = Collections.singletonList(Resolver.resolvePattern(repo.root, Resolver.mavenStyleBasePattern))
|
||||
final class PluginCapableResolver extends IBiblioResolver with ChecksumFriendlyURLResolver with DescriptorRequired {
|
||||
def setPatterns() { // done this way for access to protected methods.
|
||||
setArtifactPatterns(pattern)
|
||||
setIvyPatterns(pattern)
|
||||
}
|
||||
}
|
||||
val resolver = new PluginCapableResolver
|
||||
resolver.setRepository(new LocalIfFileRepo)
|
||||
initializeMavenStyle(resolver, repo.name, repo.root)
|
||||
resolver.setPatterns() // has to be done after initializeMavenStyle, which calls methods that overwrite the patterns
|
||||
resolver
|
||||
}
|
||||
case r: JavaNet1Repository =>
|
||||
{
|
||||
// Thanks to Matthias Pfau for posting how to use the Maven 1 repository on java.net with Ivy:
|
||||
// http://www.nabble.com/Using-gradle-Ivy-with-special-maven-repositories-td23775489.html
|
||||
val resolver = new IBiblioResolver with DescriptorRequired { override def convertM2IdForResourceSearch(mrid: ModuleRevisionId) = mrid }
|
||||
initializeMavenStyle(resolver, JavaNet1Repository.name, "http://download.java.net/maven/1/")
|
||||
resolver.setPattern("[organisation]/[ext]s/[module]-[revision](-[classifier]).[ext]")
|
||||
resolver
|
||||
}
|
||||
case repo: SshRepository =>
|
||||
{
|
||||
val resolver = new SshResolver with DescriptorRequired
|
||||
initializeSSHResolver(resolver, repo, settings)
|
||||
repo.publishPermissions.foreach(perm => resolver.setPublishPermissions(perm))
|
||||
resolver
|
||||
}
|
||||
case repo: SftpRepository =>
|
||||
{
|
||||
val resolver = new SFTPResolver
|
||||
initializeSSHResolver(resolver, repo, settings)
|
||||
resolver
|
||||
}
|
||||
case repo: FileRepository =>
|
||||
{
|
||||
val resolver = new FileSystemResolver with DescriptorRequired {
|
||||
// Workaround for #1156
|
||||
// Temporarily in sbt 0.13.x we deprecate overwriting
|
||||
// in local files for non-changing revisions.
|
||||
// This will be fully enforced in sbt 1.0.
|
||||
setRepository(new WarnOnOverwriteFileRepo())
|
||||
}
|
||||
resolver.setName(repo.name)
|
||||
initializePatterns(resolver, repo.patterns, settings)
|
||||
import repo.configuration.{ isLocal, isTransactional }
|
||||
resolver.setLocal(isLocal)
|
||||
isTransactional.foreach(value => resolver.setTransactional(value.toString))
|
||||
resolver
|
||||
}
|
||||
case repo: URLRepository =>
|
||||
{
|
||||
val resolver = new URLResolver with ChecksumFriendlyURLResolver with DescriptorRequired
|
||||
resolver.setName(repo.name)
|
||||
initializePatterns(resolver, repo.patterns, settings)
|
||||
resolver
|
||||
}
|
||||
case repo: ChainedResolver => IvySbt.resolverChain(repo.name, repo.resolvers, false, settings, log)
|
||||
case repo: RawRepository => repo.resolver
|
||||
}
|
||||
}
|
||||
|
||||
private[this] final class WarnOnOverwriteFileRepo extends FileRepo() {
|
||||
override def put(source: java.io.File, destination: String, overwrite: Boolean): Unit = {
|
||||
try super.put(source, destination, overwrite)
|
||||
catch {
|
||||
case e: java.io.IOException if e.getMessage.contains("destination already exists") =>
|
||||
import org.apache.ivy.util.Message
|
||||
Message.warn(s"Attempting to overwrite $destination\n\tThis usage is deprecated and will be removed in sbt 1.0.")
|
||||
super.put(source, destination, true)
|
||||
}
|
||||
}
|
||||
}
|
||||
private sealed trait DescriptorRequired extends BasicResolver {
|
||||
override def getDependency(dd: DependencyDescriptor, data: ResolveData) =
|
||||
{
|
||||
val prev = descriptorString(isAllownomd)
|
||||
setDescriptor(descriptorString(hasExplicitURL(dd)))
|
||||
try super.getDependency(dd, data) finally setDescriptor(prev)
|
||||
}
|
||||
def descriptorString(optional: Boolean) =
|
||||
if (optional) BasicResolver.DESCRIPTOR_OPTIONAL else BasicResolver.DESCRIPTOR_REQUIRED
|
||||
def hasExplicitURL(dd: DependencyDescriptor): Boolean =
|
||||
dd.getAllDependencyArtifacts.exists(_.getUrl != null)
|
||||
}
|
||||
private def initializeMavenStyle(resolver: IBiblioResolver, name: String, root: String) {
|
||||
resolver.setName(name)
|
||||
resolver.setM2compatible(true)
|
||||
resolver.setRoot(root)
|
||||
}
|
||||
private def initializeSSHResolver(resolver: AbstractSshBasedResolver, repo: SshBasedRepository, settings: IvySettings) {
|
||||
resolver.setName(repo.name)
|
||||
resolver.setPassfile(null)
|
||||
initializePatterns(resolver, repo.patterns, settings)
|
||||
initializeConnection(resolver, repo.connection)
|
||||
}
|
||||
private def initializeConnection(resolver: AbstractSshBasedResolver, connection: RepositoryHelpers.SshConnection) {
|
||||
import resolver._
|
||||
import connection._
|
||||
hostname.foreach(setHost)
|
||||
port.foreach(setPort)
|
||||
authentication foreach
|
||||
{
|
||||
case RepositoryHelpers.PasswordAuthentication(user, password) =>
|
||||
setUser(user)
|
||||
password.foreach(setUserPassword)
|
||||
case RepositoryHelpers.KeyFileAuthentication(user, file, password) =>
|
||||
setKeyFile(file)
|
||||
password.foreach(setKeyFilePassword)
|
||||
setUser(user)
|
||||
}
|
||||
}
|
||||
private def initializePatterns(resolver: AbstractPatternsBasedResolver, patterns: Patterns, settings: IvySettings) {
|
||||
resolver.setM2compatible(patterns.isMavenCompatible)
|
||||
resolver.setDescriptor(if (patterns.descriptorOptional) BasicResolver.DESCRIPTOR_OPTIONAL else BasicResolver.DESCRIPTOR_REQUIRED)
|
||||
resolver.setCheckconsistency(!patterns.skipConsistencyCheck)
|
||||
patterns.ivyPatterns.foreach(p => resolver.addIvyPattern(settings substitute p))
|
||||
patterns.artifactPatterns.foreach(p => resolver.addArtifactPattern(settings substitute p))
|
||||
}
|
||||
/**
|
||||
* A custom Ivy URLRepository that returns FileResources for file URLs.
|
||||
* This allows using the artifacts from the Maven local repository instead of copying them to the Ivy cache.
|
||||
*/
|
||||
private[this] final class LocalIfFileRepo extends URLRepo {
|
||||
private[this] val repo = new WarnOnOverwriteFileRepo()
|
||||
override def getResource(source: String) = {
|
||||
val url = new URL(source)
|
||||
if (url.getProtocol == IO.FileScheme)
|
||||
new FileResource(repo, IO.toFile(url))
|
||||
else
|
||||
super.getResource(source)
|
||||
}
|
||||
}
|
||||
|
||||
private[this] final class WarnOnOverwriteFileRepo extends FileRepo() {
|
||||
override def put(source: java.io.File, destination: String, overwrite: Boolean): Unit = {
|
||||
try super.put(source, destination, overwrite)
|
||||
catch {
|
||||
case e: java.io.IOException if e.getMessage.contains("destination already exists") =>
|
||||
import org.apache.ivy.util.Message
|
||||
Message.warn(s"Attempting to overwrite $destination\n\tThis usage is deprecated and will be removed in sbt 1.0.")
|
||||
super.put(source, destination, true)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,71 +6,66 @@ package sbt
|
|||
import java.io.File
|
||||
import org.apache.ivy.util.url.CredentialsStore
|
||||
|
||||
object Credentials
|
||||
{
|
||||
def apply(realm: String, host: String, userName: String, passwd: String): Credentials =
|
||||
new DirectCredentials(realm, host, userName, passwd)
|
||||
def apply(file: File): Credentials =
|
||||
new FileCredentials(file)
|
||||
object Credentials {
|
||||
def apply(realm: String, host: String, userName: String, passwd: String): Credentials =
|
||||
new DirectCredentials(realm, host, userName, passwd)
|
||||
def apply(file: File): Credentials =
|
||||
new FileCredentials(file)
|
||||
|
||||
/** Add the provided credentials to Ivy's credentials cache.*/
|
||||
def add(realm: String, host: String, userName: String, passwd: String): Unit =
|
||||
CredentialsStore.INSTANCE.addCredentials(realm, host, userName, passwd)
|
||||
/** Load credentials from the given file into Ivy's credentials cache.*/
|
||||
def add(path: File, log: Logger): Unit =
|
||||
loadCredentials(path) match
|
||||
{
|
||||
case Left(err) => log.warn(err)
|
||||
case Right(dc) => add(dc.realm, dc.host, dc.userName, dc.passwd)
|
||||
}
|
||||
/** Add the provided credentials to Ivy's credentials cache.*/
|
||||
def add(realm: String, host: String, userName: String, passwd: String): Unit =
|
||||
CredentialsStore.INSTANCE.addCredentials(realm, host, userName, passwd)
|
||||
/** Load credentials from the given file into Ivy's credentials cache.*/
|
||||
def add(path: File, log: Logger): Unit =
|
||||
loadCredentials(path) match {
|
||||
case Left(err) => log.warn(err)
|
||||
case Right(dc) => add(dc.realm, dc.host, dc.userName, dc.passwd)
|
||||
}
|
||||
|
||||
def forHost(sc: Seq[Credentials], host: String) = allDirect(sc) find { _.host == host }
|
||||
def allDirect(sc: Seq[Credentials]): Seq[DirectCredentials] = sc map toDirect
|
||||
def toDirect(c: Credentials): DirectCredentials = c match {
|
||||
case dc: DirectCredentials => dc
|
||||
case fc: FileCredentials => loadCredentials(fc.path) match {
|
||||
case Left(err) => error(err)
|
||||
case Right(dc) => dc
|
||||
}
|
||||
}
|
||||
def forHost(sc: Seq[Credentials], host: String) = allDirect(sc) find { _.host == host }
|
||||
def allDirect(sc: Seq[Credentials]): Seq[DirectCredentials] = sc map toDirect
|
||||
def toDirect(c: Credentials): DirectCredentials = c match {
|
||||
case dc: DirectCredentials => dc
|
||||
case fc: FileCredentials => loadCredentials(fc.path) match {
|
||||
case Left(err) => error(err)
|
||||
case Right(dc) => dc
|
||||
}
|
||||
}
|
||||
|
||||
def loadCredentials(path: File): Either[String, DirectCredentials] =
|
||||
if(path.exists)
|
||||
{
|
||||
val properties = read(path)
|
||||
def get(keys: List[String]) = keys.flatMap(properties.get).headOption.toRight(keys.head + " not specified in credentials file: " + path)
|
||||
def loadCredentials(path: File): Either[String, DirectCredentials] =
|
||||
if (path.exists) {
|
||||
val properties = read(path)
|
||||
def get(keys: List[String]) = keys.flatMap(properties.get).headOption.toRight(keys.head + " not specified in credentials file: " + path)
|
||||
|
||||
IvyUtil.separate( List(RealmKeys, HostKeys, UserKeys, PasswordKeys).map(get) ) match
|
||||
{
|
||||
case (Nil, List(realm, host, user, pass)) => Right( new DirectCredentials(realm, host, user, pass) )
|
||||
case (errors, _) => Left(errors.mkString("\n"))
|
||||
}
|
||||
}
|
||||
else
|
||||
Left("Credentials file " + path + " does not exist")
|
||||
IvyUtil.separate(List(RealmKeys, HostKeys, UserKeys, PasswordKeys).map(get)) match {
|
||||
case (Nil, List(realm, host, user, pass)) => Right(new DirectCredentials(realm, host, user, pass))
|
||||
case (errors, _) => Left(errors.mkString("\n"))
|
||||
}
|
||||
} else
|
||||
Left("Credentials file " + path + " does not exist")
|
||||
|
||||
def register(cs: Seq[Credentials], log: Logger): Unit =
|
||||
cs foreach {
|
||||
case f: FileCredentials => add(f.path, log)
|
||||
case d: DirectCredentials => add(d.realm, d.host, d.userName, d.passwd)
|
||||
}
|
||||
def register(cs: Seq[Credentials], log: Logger): Unit =
|
||||
cs foreach {
|
||||
case f: FileCredentials => add(f.path, log)
|
||||
case d: DirectCredentials => add(d.realm, d.host, d.userName, d.passwd)
|
||||
}
|
||||
|
||||
private[this] val RealmKeys = List("realm")
|
||||
private[this] val HostKeys = List("host", "hostname")
|
||||
private[this] val UserKeys = List("user", "user.name", "username")
|
||||
private[this] val PasswordKeys = List("password", "pwd", "pass", "passwd")
|
||||
private[this] val RealmKeys = List("realm")
|
||||
private[this] val HostKeys = List("host", "hostname")
|
||||
private[this] val UserKeys = List("user", "user.name", "username")
|
||||
private[this] val PasswordKeys = List("password", "pwd", "pass", "passwd")
|
||||
|
||||
import collection.JavaConversions._
|
||||
private[this] def read(from: File): Map[String,String] =
|
||||
{
|
||||
val properties = new java.util.Properties
|
||||
IO.load(properties, from)
|
||||
properties map { case (k,v) => (k.toString, v.toString.trim) } toMap;
|
||||
}
|
||||
import collection.JavaConversions._
|
||||
private[this] def read(from: File): Map[String, String] =
|
||||
{
|
||||
val properties = new java.util.Properties
|
||||
IO.load(properties, from)
|
||||
properties map { case (k, v) => (k.toString, v.toString.trim) } toMap;
|
||||
}
|
||||
}
|
||||
|
||||
sealed trait Credentials
|
||||
final class FileCredentials(val path: File) extends Credentials {
|
||||
override def toString = "FileCredentials('" + path + "')"
|
||||
override def toString = "FileCredentials('" + path + "')"
|
||||
}
|
||||
final class DirectCredentials(val realm: String, val host: String, val userName: String, val passwd: String) extends Credentials
|
||||
|
|
|
|||
|
|
@ -7,142 +7,159 @@ final case class ScalaVersion(full: String, binary: String)
|
|||
/** Configures how a module will be cross-versioned. */
|
||||
sealed trait CrossVersion
|
||||
|
||||
object CrossVersion
|
||||
{
|
||||
/** The first `major.minor` Scala version that the Scala binary version should be used for cross-versioning instead of the full version. */
|
||||
val TransitionScalaVersion = CrossVersionUtil.TransitionScalaVersion
|
||||
object CrossVersion {
|
||||
/** The first `major.minor` Scala version that the Scala binary version should be used for cross-versioning instead of the full version. */
|
||||
val TransitionScalaVersion = CrossVersionUtil.TransitionScalaVersion
|
||||
|
||||
/** The first `major.minor` sbt version that the sbt binary version should be used for cross-versioning instead of the full version. */
|
||||
val TransitionSbtVersion = CrossVersionUtil.TransitionSbtVersion
|
||||
/** The first `major.minor` sbt version that the sbt binary version should be used for cross-versioning instead of the full version. */
|
||||
val TransitionSbtVersion = CrossVersionUtil.TransitionSbtVersion
|
||||
|
||||
/** Disables cross versioning for a module.*/
|
||||
object Disabled extends CrossVersion { override def toString = "disabled" }
|
||||
/** Disables cross versioning for a module.*/
|
||||
object Disabled extends CrossVersion { override def toString = "disabled" }
|
||||
|
||||
/** Cross-versions a module using the result of applying `remapVersion` to the binary version.
|
||||
* For example, if `remapVersion = v => "2.10"` and the binary version is "2.9.2" or "2.10",
|
||||
* the module is cross-versioned with "2.10". */
|
||||
final class Binary(val remapVersion: String => String) extends CrossVersion {
|
||||
override def toString = "Binary"
|
||||
}
|
||||
/**
|
||||
* Cross-versions a module using the result of applying `remapVersion` to the binary version.
|
||||
* For example, if `remapVersion = v => "2.10"` and the binary version is "2.9.2" or "2.10",
|
||||
* the module is cross-versioned with "2.10".
|
||||
*/
|
||||
final class Binary(val remapVersion: String => String) extends CrossVersion {
|
||||
override def toString = "Binary"
|
||||
}
|
||||
|
||||
/** Cross-versions a module with the result of applying `remapVersion` to the full version.
|
||||
* For example, if `remapVersion = v => "2.10"` and the full version is "2.9.2" or "2.10.3",
|
||||
* the module is cross-versioned with "2.10". */
|
||||
final class Full(val remapVersion: String => String) extends CrossVersion {
|
||||
override def toString = "Full"
|
||||
}
|
||||
/**
|
||||
* Cross-versions a module with the result of applying `remapVersion` to the full version.
|
||||
* For example, if `remapVersion = v => "2.10"` and the full version is "2.9.2" or "2.10.3",
|
||||
* the module is cross-versioned with "2.10".
|
||||
*/
|
||||
final class Full(val remapVersion: String => String) extends CrossVersion {
|
||||
override def toString = "Full"
|
||||
}
|
||||
|
||||
/** Cross-versions a module with the full version (typically the full Scala version). */
|
||||
def full: CrossVersion = new Full(idFun)
|
||||
/** Cross-versions a module with the full version (typically the full Scala version). */
|
||||
def full: CrossVersion = new Full(idFun)
|
||||
|
||||
/** Cross-versions a module with the result of applying `remapVersion` to the full version
|
||||
* (typically the full Scala version). See also [[sbt.CrossVersion.Full]]. */
|
||||
def fullMapped(remapVersion: String => String): CrossVersion = new Full(remapVersion)
|
||||
/**
|
||||
* Cross-versions a module with the result of applying `remapVersion` to the full version
|
||||
* (typically the full Scala version). See also [[sbt.CrossVersion.Full]].
|
||||
*/
|
||||
def fullMapped(remapVersion: String => String): CrossVersion = new Full(remapVersion)
|
||||
|
||||
/** Cross-versions a module with the binary version (typically the binary Scala version). */
|
||||
def binary: CrossVersion = new Binary(idFun)
|
||||
/** Cross-versions a module with the binary version (typically the binary Scala version). */
|
||||
def binary: CrossVersion = new Binary(idFun)
|
||||
|
||||
/** Cross-versions a module with the result of applying `remapVersion` to the binary version
|
||||
* (typically the binary Scala version). See also [[sbt.CrossVersion.Binary]]. */
|
||||
def binaryMapped(remapVersion: String => String): CrossVersion = new Binary(remapVersion)
|
||||
/**
|
||||
* Cross-versions a module with the result of applying `remapVersion` to the binary version
|
||||
* (typically the binary Scala version). See also [[sbt.CrossVersion.Binary]].
|
||||
*/
|
||||
def binaryMapped(remapVersion: String => String): CrossVersion = new Binary(remapVersion)
|
||||
|
||||
private[this] def idFun[T]: T => T = x => x
|
||||
private[this] def idFun[T]: T => T = x => x
|
||||
|
||||
@deprecated("Will be made private.", "0.13.1")
|
||||
def append(s: String): Option[String => String] = Some(x => crossName(x, s))
|
||||
@deprecated("Will be made private.", "0.13.1")
|
||||
def append(s: String): Option[String => String] = Some(x => crossName(x, s))
|
||||
|
||||
/** Construct a cross-versioning function given cross-versioning configuration `cross`,
|
||||
* full version `fullVersion` and binary version `binaryVersion`. The behavior of the
|
||||
* constructed function is as documented for the [[sbt.CrossVersion]] datatypes. */
|
||||
def apply(cross: CrossVersion, fullVersion: String, binaryVersion: String): Option[String => String] =
|
||||
cross match
|
||||
{
|
||||
case Disabled => None
|
||||
case b: Binary => append(b.remapVersion(binaryVersion))
|
||||
case f: Full => append(f.remapVersion(fullVersion))
|
||||
}
|
||||
/**
|
||||
* Construct a cross-versioning function given cross-versioning configuration `cross`,
|
||||
* full version `fullVersion` and binary version `binaryVersion`. The behavior of the
|
||||
* constructed function is as documented for the [[sbt.CrossVersion]] datatypes.
|
||||
*/
|
||||
def apply(cross: CrossVersion, fullVersion: String, binaryVersion: String): Option[String => String] =
|
||||
cross match {
|
||||
case Disabled => None
|
||||
case b: Binary => append(b.remapVersion(binaryVersion))
|
||||
case f: Full => append(f.remapVersion(fullVersion))
|
||||
}
|
||||
|
||||
/** Constructs the cross-version function defined by `module` and `is`, if one is configured. */
|
||||
def apply(module: ModuleID, is: IvyScala): Option[String => String] =
|
||||
CrossVersion(module.crossVersion, is.scalaFullVersion, is.scalaBinaryVersion)
|
||||
/** Constructs the cross-version function defined by `module` and `is`, if one is configured. */
|
||||
def apply(module: ModuleID, is: IvyScala): Option[String => String] =
|
||||
CrossVersion(module.crossVersion, is.scalaFullVersion, is.scalaBinaryVersion)
|
||||
|
||||
/** Constructs the cross-version function defined by `module` and `is`, if one is configured. */
|
||||
def apply(module: ModuleID, is: Option[IvyScala]): Option[String => String] =
|
||||
is flatMap { i => apply(module, i) }
|
||||
/** Constructs the cross-version function defined by `module` and `is`, if one is configured. */
|
||||
def apply(module: ModuleID, is: Option[IvyScala]): Option[String => String] =
|
||||
is flatMap { i => apply(module, i) }
|
||||
|
||||
/** Cross-version each `Artifact` in `artifacts` according to cross-version function `cross`. */
|
||||
def substituteCross(artifacts: Seq[Artifact], cross: Option[String => String]): Seq[Artifact] =
|
||||
cross match {
|
||||
case None => artifacts
|
||||
case Some(is) => substituteCrossA(artifacts, cross)
|
||||
}
|
||||
/** Cross-version each `Artifact` in `artifacts` according to cross-version function `cross`. */
|
||||
def substituteCross(artifacts: Seq[Artifact], cross: Option[String => String]): Seq[Artifact] =
|
||||
cross match {
|
||||
case None => artifacts
|
||||
case Some(is) => substituteCrossA(artifacts, cross)
|
||||
}
|
||||
|
||||
@deprecated("Will be made private.", "0.13.1")
|
||||
def applyCross(s: String, fopt: Option[String => String]): String =
|
||||
fopt match {
|
||||
case None => s
|
||||
case Some(fopt) => fopt(s)
|
||||
}
|
||||
@deprecated("Will be made private.", "0.13.1")
|
||||
def applyCross(s: String, fopt: Option[String => String]): String =
|
||||
fopt match {
|
||||
case None => s
|
||||
case Some(fopt) => fopt(s)
|
||||
}
|
||||
|
||||
@deprecated("Will be made private.", "0.13.1")
|
||||
def crossName(name: String, cross: String): String =
|
||||
name + "_" + cross
|
||||
@deprecated("Will be made private.", "0.13.1")
|
||||
def crossName(name: String, cross: String): String =
|
||||
name + "_" + cross
|
||||
|
||||
/** Cross-versions `a` according to cross-version function `cross`. */
|
||||
def substituteCross(a: Artifact, cross: Option[String => String]): Artifact =
|
||||
a.copy(name = applyCross(a.name, cross))
|
||||
/** Cross-versions `a` according to cross-version function `cross`. */
|
||||
def substituteCross(a: Artifact, cross: Option[String => String]): Artifact =
|
||||
a.copy(name = applyCross(a.name, cross))
|
||||
|
||||
@deprecated("Will be made private.", "0.13.1")
|
||||
def substituteCrossA(as: Seq[Artifact], cross: Option[String => String]): Seq[Artifact] =
|
||||
as.map(art => substituteCross(art, cross))
|
||||
@deprecated("Will be made private.", "0.13.1")
|
||||
def substituteCrossA(as: Seq[Artifact], cross: Option[String => String]): Seq[Artifact] =
|
||||
as.map(art => substituteCross(art, cross))
|
||||
|
||||
/** Constructs a function that will cross-version a ModuleID
|
||||
* for the given full and binary Scala versions `scalaFullVersion` and `scalaBinaryVersion`
|
||||
* according to the ModuleID's cross-versioning setting. */
|
||||
def apply(scalaFullVersion: String, scalaBinaryVersion: String): ModuleID => ModuleID = m =>
|
||||
{
|
||||
val cross = apply(m.crossVersion, scalaFullVersion, scalaBinaryVersion)
|
||||
if(cross.isDefined)
|
||||
m.copy(name = applyCross(m.name, cross), explicitArtifacts = substituteCrossA(m.explicitArtifacts, cross))
|
||||
else
|
||||
m
|
||||
}
|
||||
/**
|
||||
* Constructs a function that will cross-version a ModuleID
|
||||
* for the given full and binary Scala versions `scalaFullVersion` and `scalaBinaryVersion`
|
||||
* according to the ModuleID's cross-versioning setting.
|
||||
*/
|
||||
def apply(scalaFullVersion: String, scalaBinaryVersion: String): ModuleID => ModuleID = m =>
|
||||
{
|
||||
val cross = apply(m.crossVersion, scalaFullVersion, scalaBinaryVersion)
|
||||
if (cross.isDefined)
|
||||
m.copy(name = applyCross(m.name, cross), explicitArtifacts = substituteCrossA(m.explicitArtifacts, cross))
|
||||
else
|
||||
m
|
||||
}
|
||||
|
||||
@deprecated("Use CrossVersion.isScalaApiCompatible or CrossVersion.isSbtApiCompatible", "0.13.0")
|
||||
def isStable(v: String): Boolean = isScalaApiCompatible(v)
|
||||
@deprecated("Use CrossVersion.isScalaApiCompatible or CrossVersion.isSbtApiCompatible", "0.13.0")
|
||||
def isStable(v: String): Boolean = isScalaApiCompatible(v)
|
||||
|
||||
@deprecated("Use CrossVersion.scalaApiVersion or CrossVersion.sbtApiVersion", "0.13.0")
|
||||
def selectVersion(full: String, binary: String): String = if(isStable(full)) binary else full
|
||||
@deprecated("Use CrossVersion.scalaApiVersion or CrossVersion.sbtApiVersion", "0.13.0")
|
||||
def selectVersion(full: String, binary: String): String = if (isStable(full)) binary else full
|
||||
|
||||
def isSbtApiCompatible(v: String): Boolean = CrossVersionUtil.isSbtApiCompatible(v)
|
||||
def isSbtApiCompatible(v: String): Boolean = CrossVersionUtil.isSbtApiCompatible(v)
|
||||
|
||||
/** Returns sbt binary interface x.y API compatible with the given version string v.
|
||||
* RCs for x.y.0 are considered API compatible.
|
||||
* Compatibile versions include 0.12.0-1 and 0.12.0-RC1 for Some(0, 12).
|
||||
*/
|
||||
def sbtApiVersion(v: String): Option[(Int, Int)] = CrossVersionUtil.sbtApiVersion(v)
|
||||
/**
|
||||
* Returns sbt binary interface x.y API compatible with the given version string v.
|
||||
* RCs for x.y.0 are considered API compatible.
|
||||
* Compatibile versions include 0.12.0-1 and 0.12.0-RC1 for Some(0, 12).
|
||||
*/
|
||||
def sbtApiVersion(v: String): Option[(Int, Int)] = CrossVersionUtil.sbtApiVersion(v)
|
||||
|
||||
def isScalaApiCompatible(v: String): Boolean = CrossVersionUtil.isScalaApiCompatible(v)
|
||||
def isScalaApiCompatible(v: String): Boolean = CrossVersionUtil.isScalaApiCompatible(v)
|
||||
|
||||
/** Returns Scala binary interface x.y API compatible with the given version string v.
|
||||
* Compatibile versions include 2.10.0-1 and 2.10.1-M1 for Some(2, 10), but not 2.10.0-RC1. */
|
||||
def scalaApiVersion(v: String): Option[(Int, Int)] = CrossVersionUtil.scalaApiVersion(v)
|
||||
/**
|
||||
* Returns Scala binary interface x.y API compatible with the given version string v.
|
||||
* Compatibile versions include 2.10.0-1 and 2.10.1-M1 for Some(2, 10), but not 2.10.0-RC1.
|
||||
*/
|
||||
def scalaApiVersion(v: String): Option[(Int, Int)] = CrossVersionUtil.scalaApiVersion(v)
|
||||
|
||||
/** Regular expression that extracts the major and minor components of a version into matched groups 1 and 2.*/
|
||||
val PartialVersion = CrossVersionUtil.PartialVersion
|
||||
/** Regular expression that extracts the major and minor components of a version into matched groups 1 and 2.*/
|
||||
val PartialVersion = CrossVersionUtil.PartialVersion
|
||||
|
||||
/** Extracts the major and minor components of a version string `s` or returns `None` if the version is improperly formatted. */
|
||||
def partialVersion(s: String): Option[(Int,Int)] = CrossVersionUtil.partialVersion(s)
|
||||
/** Extracts the major and minor components of a version string `s` or returns `None` if the version is improperly formatted. */
|
||||
def partialVersion(s: String): Option[(Int, Int)] = CrossVersionUtil.partialVersion(s)
|
||||
|
||||
/** Computes the binary Scala version from the `full` version.
|
||||
* Full Scala versions earlier than [[sbt.CrossVersion.TransitionScalaVersion]] are returned as is. */
|
||||
def binaryScalaVersion(full: String): String = CrossVersionUtil.binaryScalaVersion(full)
|
||||
/**
|
||||
* Computes the binary Scala version from the `full` version.
|
||||
* Full Scala versions earlier than [[sbt.CrossVersion.TransitionScalaVersion]] are returned as is.
|
||||
*/
|
||||
def binaryScalaVersion(full: String): String = CrossVersionUtil.binaryScalaVersion(full)
|
||||
|
||||
/** Computes the binary sbt version from the `full` version.
|
||||
* Full sbt versions earlier than [[sbt.CrossVersion.TransitionSbtVersion]] are returned as is. */
|
||||
def binarySbtVersion(full: String): String = CrossVersionUtil.binarySbtVersion(full)
|
||||
/**
|
||||
* Computes the binary sbt version from the `full` version.
|
||||
* Full sbt versions earlier than [[sbt.CrossVersion.TransitionSbtVersion]] are returned as is.
|
||||
*/
|
||||
def binarySbtVersion(full: String): String = CrossVersionUtil.binarySbtVersion(full)
|
||||
|
||||
@deprecated("Use CrossVersion.scalaApiVersion or CrossVersion.sbtApiVersion", "0.13.0")
|
||||
def binaryVersion(full: String, cutoff: String): String = CrossVersionUtil.binaryVersion(full, cutoff)
|
||||
@deprecated("Use CrossVersion.scalaApiVersion or CrossVersion.sbtApiVersion", "0.13.0")
|
||||
def binaryVersion(full: String, cutoff: String): String = CrossVersionUtil.binaryVersion(full, cutoff)
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,224 +1,222 @@
|
|||
package sbt
|
||||
|
||||
import org.apache.ivy.{core, plugins, util}
|
||||
import core.module.id.ModuleRevisionId
|
||||
import core.module.descriptor.{DefaultArtifact, DefaultExtendsDescriptor, DefaultModuleDescriptor, ModuleDescriptor}
|
||||
import core.module.descriptor.{DefaultDependencyDescriptor, DependencyDescriptor}
|
||||
import plugins.parser.{m2, ModuleDescriptorParser, ModuleDescriptorParserRegistry, ParserSettings}
|
||||
import m2.{PomModuleDescriptorBuilder, PomModuleDescriptorParser}
|
||||
import plugins.repository.Resource
|
||||
import plugins.namespace.NamespaceTransformer
|
||||
import util.extendable.ExtendableItem
|
||||
import org.apache.ivy.{ core, plugins, util }
|
||||
import core.module.id.ModuleRevisionId
|
||||
import core.module.descriptor.{ DefaultArtifact, DefaultExtendsDescriptor, DefaultModuleDescriptor, ModuleDescriptor }
|
||||
import core.module.descriptor.{ DefaultDependencyDescriptor, DependencyDescriptor }
|
||||
import plugins.parser.{ m2, ModuleDescriptorParser, ModuleDescriptorParserRegistry, ParserSettings }
|
||||
import m2.{ PomModuleDescriptorBuilder, PomModuleDescriptorParser }
|
||||
import plugins.repository.Resource
|
||||
import plugins.namespace.NamespaceTransformer
|
||||
import util.extendable.ExtendableItem
|
||||
|
||||
import java.io.{File, InputStream}
|
||||
import java.net.URL
|
||||
import java.util.regex.Pattern
|
||||
import java.io.{ File, InputStream }
|
||||
import java.net.URL
|
||||
import java.util.regex.Pattern
|
||||
|
||||
final class CustomPomParser(delegate: ModuleDescriptorParser, transform: (ModuleDescriptorParser, ModuleDescriptor) => ModuleDescriptor) extends ModuleDescriptorParser
|
||||
{
|
||||
override def parseDescriptor(ivySettings: ParserSettings, descriptorURL: URL, validate: Boolean) =
|
||||
transform(this, delegate.parseDescriptor(ivySettings, descriptorURL, validate))
|
||||
|
||||
override def parseDescriptor(ivySettings: ParserSettings, descriptorURL: URL, res: Resource, validate: Boolean) =
|
||||
transform(this, delegate.parseDescriptor(ivySettings, descriptorURL, res, validate))
|
||||
|
||||
override def toIvyFile(is: InputStream, res: Resource, destFile: File, md: ModuleDescriptor) = delegate.toIvyFile(is, res, destFile, md)
|
||||
final class CustomPomParser(delegate: ModuleDescriptorParser, transform: (ModuleDescriptorParser, ModuleDescriptor) => ModuleDescriptor) extends ModuleDescriptorParser {
|
||||
override def parseDescriptor(ivySettings: ParserSettings, descriptorURL: URL, validate: Boolean) =
|
||||
transform(this, delegate.parseDescriptor(ivySettings, descriptorURL, validate))
|
||||
|
||||
override def accept(res: Resource) = delegate.accept(res)
|
||||
override def getType() = delegate.getType()
|
||||
override def getMetadataArtifact(mrid: ModuleRevisionId, res: Resource) = delegate.getMetadataArtifact(mrid, res)
|
||||
override def parseDescriptor(ivySettings: ParserSettings, descriptorURL: URL, res: Resource, validate: Boolean) =
|
||||
transform(this, delegate.parseDescriptor(ivySettings, descriptorURL, res, validate))
|
||||
|
||||
override def toIvyFile(is: InputStream, res: Resource, destFile: File, md: ModuleDescriptor) = delegate.toIvyFile(is, res, destFile, md)
|
||||
|
||||
override def accept(res: Resource) = delegate.accept(res)
|
||||
override def getType() = delegate.getType()
|
||||
override def getMetadataArtifact(mrid: ModuleRevisionId, res: Resource) = delegate.getMetadataArtifact(mrid, res)
|
||||
}
|
||||
object CustomPomParser
|
||||
{
|
||||
/** The key prefix that indicates that this is used only to store extra information and is not intended for dependency resolution.*/
|
||||
val InfoKeyPrefix = "info."
|
||||
val ApiURLKey = "info.apiURL"
|
||||
object CustomPomParser {
|
||||
/** The key prefix that indicates that this is used only to store extra information and is not intended for dependency resolution.*/
|
||||
val InfoKeyPrefix = "info."
|
||||
val ApiURLKey = "info.apiURL"
|
||||
|
||||
val SbtVersionKey = "sbtVersion"
|
||||
val ScalaVersionKey = "scalaVersion"
|
||||
val ExtraAttributesKey = "extraDependencyAttributes"
|
||||
private[this] val unqualifiedKeys = Set(SbtVersionKey, ScalaVersionKey, ExtraAttributesKey, ApiURLKey)
|
||||
val SbtVersionKey = "sbtVersion"
|
||||
val ScalaVersionKey = "scalaVersion"
|
||||
val ExtraAttributesKey = "extraDependencyAttributes"
|
||||
private[this] val unqualifiedKeys = Set(SbtVersionKey, ScalaVersionKey, ExtraAttributesKey, ApiURLKey)
|
||||
|
||||
// packagings that should be jars, but that Ivy doesn't handle as jars
|
||||
val JarPackagings = Set("eclipse-plugin", "hk2-jar", "orbit")
|
||||
val default = new CustomPomParser(PomModuleDescriptorParser.getInstance, defaultTransform)
|
||||
// packagings that should be jars, but that Ivy doesn't handle as jars
|
||||
val JarPackagings = Set("eclipse-plugin", "hk2-jar", "orbit")
|
||||
val default = new CustomPomParser(PomModuleDescriptorParser.getInstance, defaultTransform)
|
||||
|
||||
private[this] val TransformedHashKey = "e:sbtTransformHash"
|
||||
// A hash of the parameters transformation is based on.
|
||||
// If a descriptor has a different hash, we need to retransform it.
|
||||
private[this] val TransformHash: String = hash((unqualifiedKeys ++ JarPackagings).toSeq.sorted)
|
||||
private[this] def hash(ss: Seq[String]): String = Hash.toHex(Hash(ss.flatMap(_ getBytes "UTF-8").toArray))
|
||||
private[this] val TransformedHashKey = "e:sbtTransformHash"
|
||||
// A hash of the parameters transformation is based on.
|
||||
// If a descriptor has a different hash, we need to retransform it.
|
||||
private[this] val TransformHash: String = hash((unqualifiedKeys ++ JarPackagings).toSeq.sorted)
|
||||
private[this] def hash(ss: Seq[String]): String = Hash.toHex(Hash(ss.flatMap(_ getBytes "UTF-8").toArray))
|
||||
|
||||
// Unfortunately, ModuleDescriptorParserRegistry is add-only and is a singleton instance.
|
||||
lazy val registerDefault: Unit = ModuleDescriptorParserRegistry.getInstance.addParser(default)
|
||||
// Unfortunately, ModuleDescriptorParserRegistry is add-only and is a singleton instance.
|
||||
lazy val registerDefault: Unit = ModuleDescriptorParserRegistry.getInstance.addParser(default)
|
||||
|
||||
def defaultTransform(parser: ModuleDescriptorParser, md: ModuleDescriptor): ModuleDescriptor =
|
||||
if(transformedByThisVersion(md)) md else defaultTransformImpl(parser, md)
|
||||
def defaultTransform(parser: ModuleDescriptorParser, md: ModuleDescriptor): ModuleDescriptor =
|
||||
if (transformedByThisVersion(md)) md else defaultTransformImpl(parser, md)
|
||||
|
||||
private[this] def transformedByThisVersion(md: ModuleDescriptor): Boolean =
|
||||
{
|
||||
val oldTransformedHashKey = "sbtTransformHash"
|
||||
val extraInfo = md.getExtraInfo
|
||||
// sbt 0.13.1 used "sbtTransformHash" instead of "e:sbtTransformHash" until #1192 so read both
|
||||
Option(extraInfo).isDefined &&
|
||||
((Option(extraInfo get TransformedHashKey) orElse Option(extraInfo get oldTransformedHashKey)) match {
|
||||
case Some(TransformHash) => true
|
||||
case _ => false
|
||||
})
|
||||
}
|
||||
private[this] def transformedByThisVersion(md: ModuleDescriptor): Boolean =
|
||||
{
|
||||
val oldTransformedHashKey = "sbtTransformHash"
|
||||
val extraInfo = md.getExtraInfo
|
||||
// sbt 0.13.1 used "sbtTransformHash" instead of "e:sbtTransformHash" until #1192 so read both
|
||||
Option(extraInfo).isDefined &&
|
||||
((Option(extraInfo get TransformedHashKey) orElse Option(extraInfo get oldTransformedHashKey)) match {
|
||||
case Some(TransformHash) => true
|
||||
case _ => false
|
||||
})
|
||||
}
|
||||
|
||||
private[this] def defaultTransformImpl(parser: ModuleDescriptorParser, md: ModuleDescriptor): ModuleDescriptor =
|
||||
{
|
||||
val properties = getPomProperties(md)
|
||||
private[this] def defaultTransformImpl(parser: ModuleDescriptorParser, md: ModuleDescriptor): ModuleDescriptor =
|
||||
{
|
||||
val properties = getPomProperties(md)
|
||||
|
||||
// Extracts extra attributes (currently, sbt and Scala versions) stored in the <properties> element of the pom.
|
||||
// These are attached to the module itself.
|
||||
val filtered = shouldBeUnqualified(properties)
|
||||
// Extracts extra attributes (currently, sbt and Scala versions) stored in the <properties> element of the pom.
|
||||
// These are attached to the module itself.
|
||||
val filtered = shouldBeUnqualified(properties)
|
||||
|
||||
// Extracts extra attributes for the dependencies.
|
||||
// Because the <dependency> tag in pom.xml cannot include additional metadata,
|
||||
// sbt includes extra attributes in a 'extraDependencyAttributes' property.
|
||||
// This is read/written from/to a pure string (no element structure) because Ivy only
|
||||
// parses the immediate text nodes of the property.
|
||||
val extraDepAttributes = getDependencyExtra(filtered)
|
||||
// Extracts extra attributes for the dependencies.
|
||||
// Because the <dependency> tag in pom.xml cannot include additional metadata,
|
||||
// sbt includes extra attributes in a 'extraDependencyAttributes' property.
|
||||
// This is read/written from/to a pure string (no element structure) because Ivy only
|
||||
// parses the immediate text nodes of the property.
|
||||
val extraDepAttributes = getDependencyExtra(filtered)
|
||||
|
||||
// Fixes up the detected extension in some cases missed by Ivy.
|
||||
val convertArtifacts = artifactExtIncorrect(md)
|
||||
// Fixes up the detected extension in some cases missed by Ivy.
|
||||
val convertArtifacts = artifactExtIncorrect(md)
|
||||
|
||||
// Merges artifact sections for duplicate dependency definitions
|
||||
val mergeDuplicates = IvySbt.hasDuplicateDependencies(md.getDependencies)
|
||||
// Merges artifact sections for duplicate dependency definitions
|
||||
val mergeDuplicates = IvySbt.hasDuplicateDependencies(md.getDependencies)
|
||||
|
||||
val unqualify = toUnqualify(filtered)
|
||||
if(unqualify.isEmpty && extraDepAttributes.isEmpty && !convertArtifacts && !mergeDuplicates)
|
||||
md
|
||||
else
|
||||
addExtra(unqualify, extraDepAttributes, parser, md)
|
||||
}
|
||||
// The <properties> element of the pom is used to store additional metadata, such as for sbt plugins or for the base URL for API docs.
|
||||
// This is done because the pom XSD does not appear to allow extra metadata anywhere else.
|
||||
// The extra sbt plugin metadata in pom.xml does not need to be readable by maven, but the other information may be.
|
||||
// However, the pom.xml needs to be valid in all cases because other tools like repository managers may read the pom.xml.
|
||||
private[sbt] def getPomProperties(md: ModuleDescriptor): Map[String,String] =
|
||||
{
|
||||
import collection.JavaConverters._
|
||||
PomModuleDescriptorBuilder.extractPomProperties(md.getExtraInfo).asInstanceOf[java.util.Map[String,String]].asScala.toMap
|
||||
}
|
||||
private[sbt] def toUnqualify(propertyAttributes: Map[String, String]): Map[String, String] =
|
||||
(propertyAttributes - ExtraAttributesKey) map { case (k,v) => ("e:" + k, v) }
|
||||
val unqualify = toUnqualify(filtered)
|
||||
if (unqualify.isEmpty && extraDepAttributes.isEmpty && !convertArtifacts && !mergeDuplicates)
|
||||
md
|
||||
else
|
||||
addExtra(unqualify, extraDepAttributes, parser, md)
|
||||
}
|
||||
// The <properties> element of the pom is used to store additional metadata, such as for sbt plugins or for the base URL for API docs.
|
||||
// This is done because the pom XSD does not appear to allow extra metadata anywhere else.
|
||||
// The extra sbt plugin metadata in pom.xml does not need to be readable by maven, but the other information may be.
|
||||
// However, the pom.xml needs to be valid in all cases because other tools like repository managers may read the pom.xml.
|
||||
private[sbt] def getPomProperties(md: ModuleDescriptor): Map[String, String] =
|
||||
{
|
||||
import collection.JavaConverters._
|
||||
PomModuleDescriptorBuilder.extractPomProperties(md.getExtraInfo).asInstanceOf[java.util.Map[String, String]].asScala.toMap
|
||||
}
|
||||
private[sbt] def toUnqualify(propertyAttributes: Map[String, String]): Map[String, String] =
|
||||
(propertyAttributes - ExtraAttributesKey) map { case (k, v) => ("e:" + k, v) }
|
||||
|
||||
private[this] def artifactExtIncorrect(md: ModuleDescriptor): Boolean =
|
||||
md.getConfigurations.exists(conf => md.getArtifacts(conf.getName).exists(art => JarPackagings(art.getExt)))
|
||||
private[this] def shouldBeUnqualified(m: Map[String, String]): Map[String, String] = m.filterKeys(unqualifiedKeys)
|
||||
|
||||
private[this] def condAddExtra(properties: Map[String, String], id: ModuleRevisionId): ModuleRevisionId =
|
||||
if(properties.isEmpty) id else addExtra(properties, id)
|
||||
private[this] def addExtra(properties: Map[String, String], id: ModuleRevisionId): ModuleRevisionId =
|
||||
{
|
||||
import collection.JavaConverters._
|
||||
val oldExtra = qualifiedExtra(id)
|
||||
val newExtra = (oldExtra ++ properties).asJava
|
||||
ModuleRevisionId.newInstance(id.getOrganisation, id.getName, id.getBranch, id.getRevision, newExtra)
|
||||
}
|
||||
private[this] def artifactExtIncorrect(md: ModuleDescriptor): Boolean =
|
||||
md.getConfigurations.exists(conf => md.getArtifacts(conf.getName).exists(art => JarPackagings(art.getExt)))
|
||||
private[this] def shouldBeUnqualified(m: Map[String, String]): Map[String, String] = m.filterKeys(unqualifiedKeys)
|
||||
|
||||
private[this] def getDependencyExtra(m: Map[String, String]): Map[ModuleRevisionId, Map[String,String]] =
|
||||
(m get ExtraAttributesKey) match {
|
||||
case None => Map.empty
|
||||
case Some(str) =>
|
||||
def processDep(m: ModuleRevisionId) = (simplify(m), filterCustomExtra(m, include=true))
|
||||
readDependencyExtra(str).map(processDep).toMap
|
||||
}
|
||||
private[this] def condAddExtra(properties: Map[String, String], id: ModuleRevisionId): ModuleRevisionId =
|
||||
if (properties.isEmpty) id else addExtra(properties, id)
|
||||
private[this] def addExtra(properties: Map[String, String], id: ModuleRevisionId): ModuleRevisionId =
|
||||
{
|
||||
import collection.JavaConverters._
|
||||
val oldExtra = qualifiedExtra(id)
|
||||
val newExtra = (oldExtra ++ properties).asJava
|
||||
ModuleRevisionId.newInstance(id.getOrganisation, id.getName, id.getBranch, id.getRevision, newExtra)
|
||||
}
|
||||
|
||||
def qualifiedExtra(item: ExtendableItem): Map[String,String] =
|
||||
{
|
||||
import collection.JavaConverters._
|
||||
item.getQualifiedExtraAttributes.asInstanceOf[java.util.Map[String,String]].asScala.toMap
|
||||
}
|
||||
def filterCustomExtra(item: ExtendableItem, include: Boolean): Map[String,String] =
|
||||
(qualifiedExtra(item) filterKeys { k => qualifiedIsExtra(k) == include })
|
||||
private[this] def getDependencyExtra(m: Map[String, String]): Map[ModuleRevisionId, Map[String, String]] =
|
||||
(m get ExtraAttributesKey) match {
|
||||
case None => Map.empty
|
||||
case Some(str) =>
|
||||
def processDep(m: ModuleRevisionId) = (simplify(m), filterCustomExtra(m, include = true))
|
||||
readDependencyExtra(str).map(processDep).toMap
|
||||
}
|
||||
|
||||
def writeDependencyExtra(s: Seq[DependencyDescriptor]): Seq[String] =
|
||||
s.flatMap { dd =>
|
||||
val revId = dd.getDependencyRevisionId
|
||||
if(filterCustomExtra(revId, include=true).isEmpty)
|
||||
Nil
|
||||
else
|
||||
revId.encodeToString :: Nil
|
||||
}
|
||||
def qualifiedExtra(item: ExtendableItem): Map[String, String] =
|
||||
{
|
||||
import collection.JavaConverters._
|
||||
item.getQualifiedExtraAttributes.asInstanceOf[java.util.Map[String, String]].asScala.toMap
|
||||
}
|
||||
def filterCustomExtra(item: ExtendableItem, include: Boolean): Map[String, String] =
|
||||
(qualifiedExtra(item) filterKeys { k => qualifiedIsExtra(k) == include })
|
||||
|
||||
// parses the sequence of dependencies with extra attribute information, with one dependency per line
|
||||
def readDependencyExtra(s: String): Seq[ModuleRevisionId] =
|
||||
LinesP.split(s).map(_.trim).filter(!_.isEmpty).map(ModuleRevisionId.decode)
|
||||
def writeDependencyExtra(s: Seq[DependencyDescriptor]): Seq[String] =
|
||||
s.flatMap { dd =>
|
||||
val revId = dd.getDependencyRevisionId
|
||||
if (filterCustomExtra(revId, include = true).isEmpty)
|
||||
Nil
|
||||
else
|
||||
revId.encodeToString :: Nil
|
||||
}
|
||||
|
||||
private[this] val LinesP = Pattern.compile("(?m)^")
|
||||
// parses the sequence of dependencies with extra attribute information, with one dependency per line
|
||||
def readDependencyExtra(s: String): Seq[ModuleRevisionId] =
|
||||
LinesP.split(s).map(_.trim).filter(!_.isEmpty).map(ModuleRevisionId.decode)
|
||||
|
||||
def qualifiedIsExtra(k: String): Boolean = k.endsWith(ScalaVersionKey) || k.endsWith(SbtVersionKey)
|
||||
private[this] val LinesP = Pattern.compile("(?m)^")
|
||||
|
||||
// Reduces the id to exclude custom extra attributes
|
||||
// This makes the id suitable as a key to associate a dependency parsed from a <dependency> element
|
||||
// with the extra attributes from the <properties> section
|
||||
def simplify(id: ModuleRevisionId): ModuleRevisionId =
|
||||
{
|
||||
import collection.JavaConverters._
|
||||
ModuleRevisionId.newInstance(id.getOrganisation, id.getName, id.getBranch, id.getRevision, filterCustomExtra(id, include=false).asJava)
|
||||
}
|
||||
def qualifiedIsExtra(k: String): Boolean = k.endsWith(ScalaVersionKey) || k.endsWith(SbtVersionKey)
|
||||
|
||||
private[this] def addExtra(dep: DependencyDescriptor, extra: Map[ModuleRevisionId, Map[String, String]]): DependencyDescriptor =
|
||||
{
|
||||
val extras = if(extra.isEmpty) None else extra get simplify(dep.getDependencyRevisionId)
|
||||
extras match {
|
||||
case None => dep
|
||||
case Some(extraAttrs) => transform(dep, revId => addExtra(extraAttrs, revId))
|
||||
}
|
||||
}
|
||||
private[this] def transform(dep: DependencyDescriptor, f: ModuleRevisionId => ModuleRevisionId): DependencyDescriptor =
|
||||
DefaultDependencyDescriptor.transformInstance(dep, namespaceTransformer(dep.getDependencyRevisionId, f), false)
|
||||
private[this] def extraTransformer(txId: ModuleRevisionId, extra: Map[String, String]): NamespaceTransformer =
|
||||
namespaceTransformer(txId, revId => addExtra(extra, revId) )
|
||||
// Reduces the id to exclude custom extra attributes
|
||||
// This makes the id suitable as a key to associate a dependency parsed from a <dependency> element
|
||||
// with the extra attributes from the <properties> section
|
||||
def simplify(id: ModuleRevisionId): ModuleRevisionId =
|
||||
{
|
||||
import collection.JavaConverters._
|
||||
ModuleRevisionId.newInstance(id.getOrganisation, id.getName, id.getBranch, id.getRevision, filterCustomExtra(id, include = false).asJava)
|
||||
}
|
||||
|
||||
private[this] def namespaceTransformer(txId: ModuleRevisionId, f: ModuleRevisionId => ModuleRevisionId): NamespaceTransformer =
|
||||
new NamespaceTransformer {
|
||||
def transform(revId: ModuleRevisionId): ModuleRevisionId = if(revId == txId) f(revId) else revId
|
||||
def isIdentity = false
|
||||
}
|
||||
private[this] def addExtra(dep: DependencyDescriptor, extra: Map[ModuleRevisionId, Map[String, String]]): DependencyDescriptor =
|
||||
{
|
||||
val extras = if (extra.isEmpty) None else extra get simplify(dep.getDependencyRevisionId)
|
||||
extras match {
|
||||
case None => dep
|
||||
case Some(extraAttrs) => transform(dep, revId => addExtra(extraAttrs, revId))
|
||||
}
|
||||
}
|
||||
private[this] def transform(dep: DependencyDescriptor, f: ModuleRevisionId => ModuleRevisionId): DependencyDescriptor =
|
||||
DefaultDependencyDescriptor.transformInstance(dep, namespaceTransformer(dep.getDependencyRevisionId, f), false)
|
||||
private[this] def extraTransformer(txId: ModuleRevisionId, extra: Map[String, String]): NamespaceTransformer =
|
||||
namespaceTransformer(txId, revId => addExtra(extra, revId))
|
||||
|
||||
import collection.JavaConverters._
|
||||
def addExtra(properties: Map[String, String], dependencyExtra: Map[ModuleRevisionId, Map[String,String]], parser: ModuleDescriptorParser, md: ModuleDescriptor): ModuleDescriptor =
|
||||
{
|
||||
val dmd = new DefaultModuleDescriptor(parser, md.getResource)
|
||||
private[this] def namespaceTransformer(txId: ModuleRevisionId, f: ModuleRevisionId => ModuleRevisionId): NamespaceTransformer =
|
||||
new NamespaceTransformer {
|
||||
def transform(revId: ModuleRevisionId): ModuleRevisionId = if (revId == txId) f(revId) else revId
|
||||
def isIdentity = false
|
||||
}
|
||||
|
||||
val mrid = addExtra(properties, md.getModuleRevisionId)
|
||||
val resolvedMrid = addExtra(properties, md.getResolvedModuleRevisionId)
|
||||
dmd.setModuleRevisionId(mrid)
|
||||
dmd.setResolvedModuleRevisionId(resolvedMrid)
|
||||
import collection.JavaConverters._
|
||||
def addExtra(properties: Map[String, String], dependencyExtra: Map[ModuleRevisionId, Map[String, String]], parser: ModuleDescriptorParser, md: ModuleDescriptor): ModuleDescriptor =
|
||||
{
|
||||
val dmd = new DefaultModuleDescriptor(parser, md.getResource)
|
||||
|
||||
dmd.setDefault(md.isDefault)
|
||||
dmd.setHomePage(md.getHomePage)
|
||||
dmd.setDescription(md.getDescription)
|
||||
dmd.setLastModified(md.getLastModified)
|
||||
dmd.setStatus(md.getStatus())
|
||||
dmd.setPublicationDate(md.getPublicationDate())
|
||||
dmd.setResolvedPublicationDate(md.getResolvedPublicationDate())
|
||||
val mrid = addExtra(properties, md.getModuleRevisionId)
|
||||
val resolvedMrid = addExtra(properties, md.getResolvedModuleRevisionId)
|
||||
dmd.setModuleRevisionId(mrid)
|
||||
dmd.setResolvedModuleRevisionId(resolvedMrid)
|
||||
|
||||
for(l <- md.getLicenses) dmd.addLicense(l)
|
||||
for( (key,value) <- md.getExtraInfo.asInstanceOf[java.util.Map[String,String]].asScala ) dmd.addExtraInfo(key, value)
|
||||
dmd.addExtraInfo(TransformedHashKey, TransformHash) // mark as transformed by this version, so we don't need to do it again
|
||||
for( (key, value) <- md.getExtraAttributesNamespaces.asInstanceOf[java.util.Map[String,String]].asScala ) dmd.addExtraAttributeNamespace(key, value)
|
||||
IvySbt.addExtraNamespace(dmd)
|
||||
dmd.setDefault(md.isDefault)
|
||||
dmd.setHomePage(md.getHomePage)
|
||||
dmd.setDescription(md.getDescription)
|
||||
dmd.setLastModified(md.getLastModified)
|
||||
dmd.setStatus(md.getStatus())
|
||||
dmd.setPublicationDate(md.getPublicationDate())
|
||||
dmd.setResolvedPublicationDate(md.getResolvedPublicationDate())
|
||||
|
||||
val withExtra = md.getDependencies map { dd => addExtra(dd, dependencyExtra) }
|
||||
val unique = IvySbt.mergeDuplicateDefinitions(withExtra)
|
||||
unique foreach dmd.addDependency
|
||||
for (l <- md.getLicenses) dmd.addLicense(l)
|
||||
for ((key, value) <- md.getExtraInfo.asInstanceOf[java.util.Map[String, String]].asScala) dmd.addExtraInfo(key, value)
|
||||
dmd.addExtraInfo(TransformedHashKey, TransformHash) // mark as transformed by this version, so we don't need to do it again
|
||||
for ((key, value) <- md.getExtraAttributesNamespaces.asInstanceOf[java.util.Map[String, String]].asScala) dmd.addExtraAttributeNamespace(key, value)
|
||||
IvySbt.addExtraNamespace(dmd)
|
||||
|
||||
for( ed <- md.getInheritedDescriptors) dmd.addInheritedDescriptor( new DefaultExtendsDescriptor( md, ed.getLocation, ed.getExtendsTypes) )
|
||||
for( conf <- md.getConfigurations) {
|
||||
dmd.addConfiguration(conf)
|
||||
for(art <- md.getArtifacts(conf.getName)) {
|
||||
val ext = art.getExt
|
||||
val newExt = if( JarPackagings(ext) ) "jar" else ext
|
||||
val nart = new DefaultArtifact(mrid, art.getPublicationDate, art.getName, art.getType, newExt, art.getUrl, art.getQualifiedExtraAttributes)
|
||||
dmd.addArtifact(conf.getName, nart)
|
||||
}
|
||||
}
|
||||
dmd
|
||||
}
|
||||
val withExtra = md.getDependencies map { dd => addExtra(dd, dependencyExtra) }
|
||||
val unique = IvySbt.mergeDuplicateDefinitions(withExtra)
|
||||
unique foreach dmd.addDependency
|
||||
|
||||
for (ed <- md.getInheritedDescriptors) dmd.addInheritedDescriptor(new DefaultExtendsDescriptor(md, ed.getLocation, ed.getExtendsTypes))
|
||||
for (conf <- md.getConfigurations) {
|
||||
dmd.addConfiguration(conf)
|
||||
for (art <- md.getArtifacts(conf.getName)) {
|
||||
val ext = art.getExt
|
||||
val newExt = if (JarPackagings(ext)) "jar" else ext
|
||||
val nart = new DefaultArtifact(mrid, art.getPublicationDate, art.getName, art.getType, newExt, art.getUrl, art.getQualifiedExtraAttributes)
|
||||
dmd.addArtifact(conf.getName, nart)
|
||||
}
|
||||
}
|
||||
dmd
|
||||
}
|
||||
}
|
||||
|
|
@ -6,33 +6,31 @@ package sbt
|
|||
import java.io.ByteArrayInputStream
|
||||
import java.net.URL
|
||||
|
||||
import org.apache.ivy.{core, plugins}
|
||||
import core.module.descriptor.{DefaultDependencyDescriptor, DefaultModuleDescriptor}
|
||||
import org.apache.ivy.{ core, plugins }
|
||||
import core.module.descriptor.{ DefaultDependencyDescriptor, DefaultModuleDescriptor }
|
||||
import core.settings.IvySettings
|
||||
import plugins.parser.xml.XmlModuleDescriptorParser
|
||||
import plugins.repository.Resource
|
||||
import plugins.repository.url.URLResource
|
||||
|
||||
/** Subclasses the default Ivy file parser in order to provide access to protected methods.*/
|
||||
private[sbt] object CustomXmlParser extends XmlModuleDescriptorParser
|
||||
{
|
||||
import XmlModuleDescriptorParser.Parser
|
||||
class CustomParser(settings: IvySettings, defaultConfig: Option[String]) extends Parser(CustomXmlParser, settings)
|
||||
{
|
||||
def setSource(url: URL) =
|
||||
{
|
||||
super.setResource(new URLResource(url))
|
||||
super.setInput(url)
|
||||
}
|
||||
def setInput(bytes: Array[Byte]) { setInput(new ByteArrayInputStream(bytes)) }
|
||||
/** Overridden because the super implementation overwrites the module descriptor.*/
|
||||
override def setResource(res: Resource) {}
|
||||
override def setMd(md: DefaultModuleDescriptor) =
|
||||
{
|
||||
super.setMd(md)
|
||||
if(defaultConfig.isDefined) setDefaultConfMapping("*->default(compile)")
|
||||
}
|
||||
override def parseDepsConfs(confs: String, dd: DefaultDependencyDescriptor) = super.parseDepsConfs(confs, dd)
|
||||
override def getDefaultConf = defaultConfig.getOrElse(super.getDefaultConf)
|
||||
}
|
||||
private[sbt] object CustomXmlParser extends XmlModuleDescriptorParser {
|
||||
import XmlModuleDescriptorParser.Parser
|
||||
class CustomParser(settings: IvySettings, defaultConfig: Option[String]) extends Parser(CustomXmlParser, settings) {
|
||||
def setSource(url: URL) =
|
||||
{
|
||||
super.setResource(new URLResource(url))
|
||||
super.setInput(url)
|
||||
}
|
||||
def setInput(bytes: Array[Byte]) { setInput(new ByteArrayInputStream(bytes)) }
|
||||
/** Overridden because the super implementation overwrites the module descriptor.*/
|
||||
override def setResource(res: Resource) {}
|
||||
override def setMd(md: DefaultModuleDescriptor) =
|
||||
{
|
||||
super.setMd(md)
|
||||
if (defaultConfig.isDefined) setDefaultConfMapping("*->default(compile)")
|
||||
}
|
||||
override def parseDepsConfs(confs: String, dd: DefaultDependencyDescriptor) = super.parseDepsConfs(confs, dd)
|
||||
override def getDefaultConf = defaultConfig.getOrElse(super.getDefaultConf)
|
||||
}
|
||||
}
|
||||
|
|
@ -3,65 +3,58 @@
|
|||
*/
|
||||
package sbt
|
||||
|
||||
trait DependencyFilterExtra
|
||||
{
|
||||
def moduleFilter(organization: NameFilter = AllPassFilter, name: NameFilter = AllPassFilter, revision: NameFilter = AllPassFilter): ModuleFilter =
|
||||
new ModuleFilter {
|
||||
def apply(m: ModuleID): Boolean = organization.accept(m.organization) && name.accept(m.name) && revision.accept(m.revision)
|
||||
}
|
||||
def artifactFilter(name: NameFilter = AllPassFilter, `type`: NameFilter = AllPassFilter, extension: NameFilter = AllPassFilter, classifier: NameFilter = AllPassFilter): ArtifactFilter =
|
||||
new ArtifactFilter {
|
||||
def apply(a: Artifact): Boolean = name.accept(a.name) && `type`.accept(a.`type`) && extension.accept(a.extension) && classifier.accept(a.classifier getOrElse "")
|
||||
}
|
||||
def configurationFilter(name: NameFilter = AllPassFilter): ConfigurationFilter =
|
||||
new ConfigurationFilter {
|
||||
def apply(c: String): Boolean = name.accept(c)
|
||||
}
|
||||
trait DependencyFilterExtra {
|
||||
def moduleFilter(organization: NameFilter = AllPassFilter, name: NameFilter = AllPassFilter, revision: NameFilter = AllPassFilter): ModuleFilter =
|
||||
new ModuleFilter {
|
||||
def apply(m: ModuleID): Boolean = organization.accept(m.organization) && name.accept(m.name) && revision.accept(m.revision)
|
||||
}
|
||||
def artifactFilter(name: NameFilter = AllPassFilter, `type`: NameFilter = AllPassFilter, extension: NameFilter = AllPassFilter, classifier: NameFilter = AllPassFilter): ArtifactFilter =
|
||||
new ArtifactFilter {
|
||||
def apply(a: Artifact): Boolean = name.accept(a.name) && `type`.accept(a.`type`) && extension.accept(a.extension) && classifier.accept(a.classifier getOrElse "")
|
||||
}
|
||||
def configurationFilter(name: NameFilter = AllPassFilter): ConfigurationFilter =
|
||||
new ConfigurationFilter {
|
||||
def apply(c: String): Boolean = name.accept(c)
|
||||
}
|
||||
}
|
||||
object DependencyFilter extends DependencyFilterExtra
|
||||
{
|
||||
def make(configuration: ConfigurationFilter = configurationFilter(), module: ModuleFilter = moduleFilter(), artifact: ArtifactFilter = artifactFilter()): DependencyFilter =
|
||||
new DependencyFilter {
|
||||
def apply(c: String, m: ModuleID, a: Artifact): Boolean = configuration(c) && module(m) && artifact(a)
|
||||
}
|
||||
def apply(x: DependencyFilter, y: DependencyFilter, combine: (Boolean, Boolean) => Boolean): DependencyFilter =
|
||||
new DependencyFilter {
|
||||
def apply(c: String, m: ModuleID, a: Artifact): Boolean = combine(x(c, m, a), y(c, m, a))
|
||||
}
|
||||
def allPass: DependencyFilter = configurationFilter()
|
||||
implicit def fnToModuleFilter(f: ModuleID => Boolean): ModuleFilter = new ModuleFilter { def apply(m: ModuleID) = f(m) }
|
||||
implicit def fnToArtifactFilter(f: Artifact => Boolean): ArtifactFilter = new ArtifactFilter { def apply(m: Artifact) = f(m) }
|
||||
implicit def fnToConfigurationFilter(f: String => Boolean): ConfigurationFilter = new ConfigurationFilter { def apply(c: String) = f(c) }
|
||||
implicit def subDepFilterToFn[Arg](f: SubDepFilter[Arg, _]): Arg => Boolean = f apply _
|
||||
object DependencyFilter extends DependencyFilterExtra {
|
||||
def make(configuration: ConfigurationFilter = configurationFilter(), module: ModuleFilter = moduleFilter(), artifact: ArtifactFilter = artifactFilter()): DependencyFilter =
|
||||
new DependencyFilter {
|
||||
def apply(c: String, m: ModuleID, a: Artifact): Boolean = configuration(c) && module(m) && artifact(a)
|
||||
}
|
||||
def apply(x: DependencyFilter, y: DependencyFilter, combine: (Boolean, Boolean) => Boolean): DependencyFilter =
|
||||
new DependencyFilter {
|
||||
def apply(c: String, m: ModuleID, a: Artifact): Boolean = combine(x(c, m, a), y(c, m, a))
|
||||
}
|
||||
def allPass: DependencyFilter = configurationFilter()
|
||||
implicit def fnToModuleFilter(f: ModuleID => Boolean): ModuleFilter = new ModuleFilter { def apply(m: ModuleID) = f(m) }
|
||||
implicit def fnToArtifactFilter(f: Artifact => Boolean): ArtifactFilter = new ArtifactFilter { def apply(m: Artifact) = f(m) }
|
||||
implicit def fnToConfigurationFilter(f: String => Boolean): ConfigurationFilter = new ConfigurationFilter { def apply(c: String) = f(c) }
|
||||
implicit def subDepFilterToFn[Arg](f: SubDepFilter[Arg, _]): Arg => Boolean = f apply _
|
||||
}
|
||||
trait DependencyFilter
|
||||
{
|
||||
def apply(configuration: String, module: ModuleID, artifact: Artifact): Boolean
|
||||
final def &&(o: DependencyFilter) = DependencyFilter(this, o, _ && _)
|
||||
final def ||(o: DependencyFilter) = DependencyFilter(this, o, _ || _)
|
||||
final def -- (o: DependencyFilter) = DependencyFilter(this, o, _ && !_)
|
||||
trait DependencyFilter {
|
||||
def apply(configuration: String, module: ModuleID, artifact: Artifact): Boolean
|
||||
final def &&(o: DependencyFilter) = DependencyFilter(this, o, _ && _)
|
||||
final def ||(o: DependencyFilter) = DependencyFilter(this, o, _ || _)
|
||||
final def --(o: DependencyFilter) = DependencyFilter(this, o, _ && !_)
|
||||
}
|
||||
sealed trait SubDepFilter[Arg, Self <: SubDepFilter[Arg, Self]] extends DependencyFilter
|
||||
{ self: Self =>
|
||||
def apply(a: Arg): Boolean
|
||||
protected def make(f: Arg => Boolean): Self
|
||||
final def &(o: Self): Self = combine(o, _ && _)
|
||||
final def |(o: Self): Self = combine(o, _ || _)
|
||||
final def -(o: Self): Self = combine(o, _ && !_)
|
||||
private[this] def combine(o: Self, f: (Boolean, Boolean) => Boolean): Self = make( (m: Arg) => f(this(m), o(m)) )
|
||||
sealed trait SubDepFilter[Arg, Self <: SubDepFilter[Arg, Self]] extends DependencyFilter { self: Self =>
|
||||
def apply(a: Arg): Boolean
|
||||
protected def make(f: Arg => Boolean): Self
|
||||
final def &(o: Self): Self = combine(o, _ && _)
|
||||
final def |(o: Self): Self = combine(o, _ || _)
|
||||
final def -(o: Self): Self = combine(o, _ && !_)
|
||||
private[this] def combine(o: Self, f: (Boolean, Boolean) => Boolean): Self = make((m: Arg) => f(this(m), o(m)))
|
||||
}
|
||||
trait ModuleFilter extends SubDepFilter[ModuleID, ModuleFilter]
|
||||
{
|
||||
protected final def make(f: ModuleID => Boolean) = new ModuleFilter { def apply(m: ModuleID) = f(m) }
|
||||
final def apply(configuration: String, module: ModuleID, artifact: Artifact): Boolean = apply(module)
|
||||
trait ModuleFilter extends SubDepFilter[ModuleID, ModuleFilter] {
|
||||
protected final def make(f: ModuleID => Boolean) = new ModuleFilter { def apply(m: ModuleID) = f(m) }
|
||||
final def apply(configuration: String, module: ModuleID, artifact: Artifact): Boolean = apply(module)
|
||||
}
|
||||
trait ArtifactFilter extends SubDepFilter[Artifact, ArtifactFilter]
|
||||
{
|
||||
protected final def make(f: Artifact => Boolean) = new ArtifactFilter { def apply(m: Artifact) = f(m) }
|
||||
final def apply(configuration: String, module: ModuleID, artifact: Artifact): Boolean = apply(artifact)
|
||||
trait ArtifactFilter extends SubDepFilter[Artifact, ArtifactFilter] {
|
||||
protected final def make(f: Artifact => Boolean) = new ArtifactFilter { def apply(m: Artifact) = f(m) }
|
||||
final def apply(configuration: String, module: ModuleID, artifact: Artifact): Boolean = apply(artifact)
|
||||
}
|
||||
trait ConfigurationFilter extends SubDepFilter[String, ConfigurationFilter]
|
||||
{
|
||||
protected final def make(f: String => Boolean) = new ConfigurationFilter { def apply(m: String) = f(m) }
|
||||
final def apply(configuration: String, module: ModuleID, artifact: Artifact): Boolean = apply(configuration)
|
||||
trait ConfigurationFilter extends SubDepFilter[String, ConfigurationFilter] {
|
||||
protected final def make(f: String => Boolean) = new ConfigurationFilter { def apply(m: String) = f(m) }
|
||||
final def apply(configuration: String, module: ModuleID, artifact: Artifact): Boolean = apply(configuration)
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -4,276 +4,269 @@
|
|||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import scala.xml.{Node => XNode, NodeSeq}
|
||||
import scala.xml.{ Node => XNode, NodeSeq }
|
||||
|
||||
import org.apache.ivy.{core, plugins, Ivy}
|
||||
import core.{IvyPatternHelper, LogOptions}
|
||||
import org.apache.ivy.{ core, plugins, Ivy }
|
||||
import core.{ IvyPatternHelper, LogOptions }
|
||||
import core.deliver.DeliverOptions
|
||||
import core.install.InstallOptions
|
||||
import core.module.descriptor.{Artifact => IArtifact, MDArtifact, ModuleDescriptor, DefaultModuleDescriptor}
|
||||
import core.module.descriptor.{ Artifact => IArtifact, MDArtifact, ModuleDescriptor, DefaultModuleDescriptor }
|
||||
import core.report.ResolveReport
|
||||
import core.resolve.ResolveOptions
|
||||
import plugins.resolver.{BasicResolver, DependencyResolver}
|
||||
import plugins.resolver.{ BasicResolver, DependencyResolver }
|
||||
|
||||
final class DeliverConfiguration(val deliverIvyPattern: String, val status: String, val configurations: Option[Seq[Configuration]], val logging: UpdateLogging.Value)
|
||||
final class PublishConfiguration(val ivyFile: Option[File], val resolverName: String, val artifacts: Map[Artifact, File], val checksums: Seq[String], val logging: UpdateLogging.Value,
|
||||
val overwrite: Boolean) {
|
||||
def this(ivyFile: Option[File], resolverName: String, artifacts: Map[Artifact, File], checksums: Seq[String], logging: UpdateLogging.Value) =
|
||||
this(ivyFile, resolverName, artifacts, checksums, logging, false)
|
||||
val overwrite: Boolean) {
|
||||
def this(ivyFile: Option[File], resolverName: String, artifacts: Map[Artifact, File], checksums: Seq[String], logging: UpdateLogging.Value) =
|
||||
this(ivyFile, resolverName, artifacts, checksums, logging, false)
|
||||
}
|
||||
|
||||
final class UpdateConfiguration(val retrieve: Option[RetrieveConfiguration], val missingOk: Boolean, val logging: UpdateLogging.Value)
|
||||
final class RetrieveConfiguration(val retrieveDirectory: File, val outputPattern: String)
|
||||
final case class MakePomConfiguration(file: File, moduleInfo: ModuleInfo, configurations: Option[Seq[Configuration]] = None, extra: NodeSeq = NodeSeq.Empty, process: XNode => XNode = n => n, filterRepositories: MavenRepository => Boolean = _ => true, allRepositories: Boolean, includeTypes: Set[String] = Set(Artifact.DefaultType, Artifact.PomType))
|
||||
// exclude is a map on a restricted ModuleID
|
||||
// exclude is a map on a restricted ModuleID
|
||||
final case class GetClassifiersConfiguration(module: GetClassifiersModule, exclude: Map[ModuleID, Set[String]], configuration: UpdateConfiguration, ivyScala: Option[IvyScala])
|
||||
final case class GetClassifiersModule(id: ModuleID, modules: Seq[ModuleID], configurations: Seq[Configuration], classifiers: Seq[String])
|
||||
|
||||
/** Configures logging during an 'update'. `level` determines the amount of other information logged.
|
||||
* `Full` is the default and logs the most.
|
||||
* `DownloadOnly` only logs what is downloaded.
|
||||
* `Quiet` only displays errors.*/
|
||||
object UpdateLogging extends Enumeration
|
||||
{
|
||||
val Full, DownloadOnly, Quiet = Value
|
||||
/**
|
||||
* Configures logging during an 'update'. `level` determines the amount of other information logged.
|
||||
* `Full` is the default and logs the most.
|
||||
* `DownloadOnly` only logs what is downloaded.
|
||||
* `Quiet` only displays errors.
|
||||
*/
|
||||
object UpdateLogging extends Enumeration {
|
||||
val Full, DownloadOnly, Quiet = Value
|
||||
}
|
||||
|
||||
object IvyActions
|
||||
{
|
||||
/** Installs the dependencies of the given 'module' from the resolver named 'from' to the resolver named 'to'.*/
|
||||
def install(module: IvySbt#Module, from: String, to: String, log: Logger)
|
||||
{
|
||||
module.withModule(log) { (ivy, md, default) =>
|
||||
for(dependency <- md.getDependencies)
|
||||
{
|
||||
log.info("Installing " + dependency)
|
||||
val options = new InstallOptions
|
||||
options.setValidate(module.moduleSettings.validate)
|
||||
options.setTransitive(dependency.isTransitive)
|
||||
ivy.install(dependency.getDependencyRevisionId, from, to, options)
|
||||
}
|
||||
}
|
||||
}
|
||||
object IvyActions {
|
||||
/** Installs the dependencies of the given 'module' from the resolver named 'from' to the resolver named 'to'.*/
|
||||
def install(module: IvySbt#Module, from: String, to: String, log: Logger) {
|
||||
module.withModule(log) { (ivy, md, default) =>
|
||||
for (dependency <- md.getDependencies) {
|
||||
log.info("Installing " + dependency)
|
||||
val options = new InstallOptions
|
||||
options.setValidate(module.moduleSettings.validate)
|
||||
options.setTransitive(dependency.isTransitive)
|
||||
ivy.install(dependency.getDependencyRevisionId, from, to, options)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Clears the Ivy cache, as configured by 'config'. */
|
||||
def cleanCache(ivy: IvySbt, log: Logger) = ivy.withIvy(log) { iv =>
|
||||
iv.getSettings.getResolutionCacheManager.clean()
|
||||
iv.getSettings.getRepositoryCacheManagers.foreach(_.clean())
|
||||
}
|
||||
/** Clears the Ivy cache, as configured by 'config'. */
|
||||
def cleanCache(ivy: IvySbt, log: Logger) = ivy.withIvy(log) { iv =>
|
||||
iv.getSettings.getResolutionCacheManager.clean()
|
||||
iv.getSettings.getRepositoryCacheManagers.foreach(_.clean())
|
||||
}
|
||||
|
||||
/** Creates a Maven pom from the given Ivy configuration*/
|
||||
def makePom(module: IvySbt#Module, configuration: MakePomConfiguration, log: Logger)
|
||||
{
|
||||
import configuration.{allRepositories, moduleInfo, configurations, extra, file, filterRepositories, process, includeTypes}
|
||||
module.withModule(log) { (ivy, md, default) =>
|
||||
(new MakePom(log)).write(ivy, md, moduleInfo, configurations, includeTypes, extra, process, filterRepositories, allRepositories, file)
|
||||
log.info("Wrote " + file.getAbsolutePath)
|
||||
}
|
||||
}
|
||||
/** Creates a Maven pom from the given Ivy configuration*/
|
||||
def makePom(module: IvySbt#Module, configuration: MakePomConfiguration, log: Logger) {
|
||||
import configuration.{ allRepositories, moduleInfo, configurations, extra, file, filterRepositories, process, includeTypes }
|
||||
module.withModule(log) { (ivy, md, default) =>
|
||||
(new MakePom(log)).write(ivy, md, moduleInfo, configurations, includeTypes, extra, process, filterRepositories, allRepositories, file)
|
||||
log.info("Wrote " + file.getAbsolutePath)
|
||||
}
|
||||
}
|
||||
|
||||
def deliver(module: IvySbt#Module, configuration: DeliverConfiguration, log: Logger): File =
|
||||
{
|
||||
import configuration._
|
||||
module.withModule(log) { case (ivy, md, default) =>
|
||||
val revID = md.getModuleRevisionId
|
||||
val options = DeliverOptions.newInstance(ivy.getSettings).setStatus(status)
|
||||
options.setConfs(IvySbt.getConfigurations(md, configurations))
|
||||
ivy.deliver(revID, revID.getRevision, deliverIvyPattern, options)
|
||||
deliveredFile(ivy, deliverIvyPattern, md)
|
||||
}
|
||||
}
|
||||
def deliveredFile(ivy: Ivy, pattern: String, md: ModuleDescriptor): File =
|
||||
ivy.getSettings.resolveFile(IvyPatternHelper.substitute(pattern, md.getResolvedModuleRevisionId))
|
||||
def deliver(module: IvySbt#Module, configuration: DeliverConfiguration, log: Logger): File =
|
||||
{
|
||||
import configuration._
|
||||
module.withModule(log) {
|
||||
case (ivy, md, default) =>
|
||||
val revID = md.getModuleRevisionId
|
||||
val options = DeliverOptions.newInstance(ivy.getSettings).setStatus(status)
|
||||
options.setConfs(IvySbt.getConfigurations(md, configurations))
|
||||
ivy.deliver(revID, revID.getRevision, deliverIvyPattern, options)
|
||||
deliveredFile(ivy, deliverIvyPattern, md)
|
||||
}
|
||||
}
|
||||
def deliveredFile(ivy: Ivy, pattern: String, md: ModuleDescriptor): File =
|
||||
ivy.getSettings.resolveFile(IvyPatternHelper.substitute(pattern, md.getResolvedModuleRevisionId))
|
||||
|
||||
def publish(module: IvySbt#Module, configuration: PublishConfiguration, log: Logger)
|
||||
{
|
||||
import configuration._
|
||||
module.withModule(log) { case (ivy, md, default) =>
|
||||
val resolver = ivy.getSettings.getResolver(resolverName)
|
||||
if(resolver eq null) sys.error("Undefined resolver '" + resolverName + "'")
|
||||
val ivyArtifact = ivyFile map { file => (MDArtifact.newIvyArtifact(md), file) }
|
||||
val cross = crossVersionMap(module.moduleSettings)
|
||||
val as = mapArtifacts(md, cross, artifacts) ++ ivyArtifact.toSeq
|
||||
withChecksums(resolver, checksums) { publish(md, as, resolver, overwrite = overwrite) }
|
||||
}
|
||||
}
|
||||
private[this] def withChecksums[T](resolver: DependencyResolver, checksums: Seq[String])(act: => T): T =
|
||||
resolver match { case br: BasicResolver => withChecksums(br, checksums)(act); case _ => act }
|
||||
private[this] def withChecksums[T](resolver: BasicResolver, checksums: Seq[String])(act: => T): T =
|
||||
{
|
||||
val previous = resolver.getChecksumAlgorithms
|
||||
resolver.setChecksums(checksums mkString ",")
|
||||
try { act }
|
||||
finally { resolver.setChecksums(previous mkString ",") }
|
||||
}
|
||||
private def crossVersionMap(moduleSettings: ModuleSettings): Option[String => String] =
|
||||
moduleSettings match {
|
||||
case i: InlineConfiguration => CrossVersion(i.module, i.ivyScala)
|
||||
case e: EmptyConfiguration => CrossVersion(e.module, e.ivyScala)
|
||||
case _ => None
|
||||
}
|
||||
def mapArtifacts(module: ModuleDescriptor, cross: Option[String => String], artifacts: Map[Artifact, File]): Seq[(IArtifact, File)] =
|
||||
{
|
||||
val rawa = artifacts.keys.toSeq
|
||||
val seqa = CrossVersion.substituteCross(rawa, cross)
|
||||
val zipped = rawa zip IvySbt.mapArtifacts(module, seqa)
|
||||
zipped map { case (a, ivyA) => (ivyA, artifacts(a)) }
|
||||
}
|
||||
/** Resolves and retrieves dependencies. 'ivyConfig' is used to produce an Ivy file and configuration.
|
||||
* 'updateConfig' configures the actual resolution and retrieval process. */
|
||||
def update(module: IvySbt#Module, configuration: UpdateConfiguration, log: Logger): UpdateReport =
|
||||
module.withModule(log) { case (ivy, md, default) =>
|
||||
val (report, err) = resolve(configuration.logging)(ivy, md, default)
|
||||
err match
|
||||
{
|
||||
case Some(x) if !configuration.missingOk =>
|
||||
processUnresolved(x, log)
|
||||
throw x
|
||||
case _ =>
|
||||
val cachedDescriptor = ivy.getSettings.getResolutionCacheManager.getResolvedIvyFileInCache(md.getModuleRevisionId)
|
||||
val uReport = IvyRetrieve.updateReport(report, cachedDescriptor)
|
||||
configuration.retrieve match
|
||||
{
|
||||
case Some(rConf) => retrieve(ivy, uReport, rConf)
|
||||
case None => uReport
|
||||
}
|
||||
}
|
||||
}
|
||||
def publish(module: IvySbt#Module, configuration: PublishConfiguration, log: Logger) {
|
||||
import configuration._
|
||||
module.withModule(log) {
|
||||
case (ivy, md, default) =>
|
||||
val resolver = ivy.getSettings.getResolver(resolverName)
|
||||
if (resolver eq null) sys.error("Undefined resolver '" + resolverName + "'")
|
||||
val ivyArtifact = ivyFile map { file => (MDArtifact.newIvyArtifact(md), file) }
|
||||
val cross = crossVersionMap(module.moduleSettings)
|
||||
val as = mapArtifacts(md, cross, artifacts) ++ ivyArtifact.toSeq
|
||||
withChecksums(resolver, checksums) { publish(md, as, resolver, overwrite = overwrite) }
|
||||
}
|
||||
}
|
||||
private[this] def withChecksums[T](resolver: DependencyResolver, checksums: Seq[String])(act: => T): T =
|
||||
resolver match { case br: BasicResolver => withChecksums(br, checksums)(act); case _ => act }
|
||||
private[this] def withChecksums[T](resolver: BasicResolver, checksums: Seq[String])(act: => T): T =
|
||||
{
|
||||
val previous = resolver.getChecksumAlgorithms
|
||||
resolver.setChecksums(checksums mkString ",")
|
||||
try { act }
|
||||
finally { resolver.setChecksums(previous mkString ",") }
|
||||
}
|
||||
private def crossVersionMap(moduleSettings: ModuleSettings): Option[String => String] =
|
||||
moduleSettings match {
|
||||
case i: InlineConfiguration => CrossVersion(i.module, i.ivyScala)
|
||||
case e: EmptyConfiguration => CrossVersion(e.module, e.ivyScala)
|
||||
case _ => None
|
||||
}
|
||||
def mapArtifacts(module: ModuleDescriptor, cross: Option[String => String], artifacts: Map[Artifact, File]): Seq[(IArtifact, File)] =
|
||||
{
|
||||
val rawa = artifacts.keys.toSeq
|
||||
val seqa = CrossVersion.substituteCross(rawa, cross)
|
||||
val zipped = rawa zip IvySbt.mapArtifacts(module, seqa)
|
||||
zipped map { case (a, ivyA) => (ivyA, artifacts(a)) }
|
||||
}
|
||||
/**
|
||||
* Resolves and retrieves dependencies. 'ivyConfig' is used to produce an Ivy file and configuration.
|
||||
* 'updateConfig' configures the actual resolution and retrieval process.
|
||||
*/
|
||||
def update(module: IvySbt#Module, configuration: UpdateConfiguration, log: Logger): UpdateReport =
|
||||
module.withModule(log) {
|
||||
case (ivy, md, default) =>
|
||||
val (report, err) = resolve(configuration.logging)(ivy, md, default)
|
||||
err match {
|
||||
case Some(x) if !configuration.missingOk =>
|
||||
processUnresolved(x, log)
|
||||
throw x
|
||||
case _ =>
|
||||
val cachedDescriptor = ivy.getSettings.getResolutionCacheManager.getResolvedIvyFileInCache(md.getModuleRevisionId)
|
||||
val uReport = IvyRetrieve.updateReport(report, cachedDescriptor)
|
||||
configuration.retrieve match {
|
||||
case Some(rConf) => retrieve(ivy, uReport, rConf)
|
||||
case None => uReport
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def processUnresolved(err: ResolveException, log: Logger)
|
||||
{
|
||||
val withExtra = err.failed.filter(!_.extraDependencyAttributes.isEmpty)
|
||||
if(!withExtra.isEmpty)
|
||||
{
|
||||
log.warn("\n\tNote: Some unresolved dependencies have extra attributes. Check that these dependencies exist with the requested attributes.")
|
||||
withExtra foreach { id => log.warn("\t\t" + id) }
|
||||
log.warn("")
|
||||
}
|
||||
}
|
||||
def groupedConflicts[T](moduleFilter: ModuleFilter, grouping: ModuleID => T)(report: UpdateReport): Map[T, Set[String]] =
|
||||
report.configurations.flatMap { confReport =>
|
||||
val evicted = confReport.evicted.filter(moduleFilter)
|
||||
val evictedSet = evicted.map( m => (m.organization, m.name) ).toSet
|
||||
val conflicted = confReport.allModules.filter( mod => evictedSet( (mod.organization, mod.name) ) )
|
||||
grouped(grouping)(conflicted ++ evicted)
|
||||
} toMap;
|
||||
def processUnresolved(err: ResolveException, log: Logger) {
|
||||
val withExtra = err.failed.filter(!_.extraDependencyAttributes.isEmpty)
|
||||
if (!withExtra.isEmpty) {
|
||||
log.warn("\n\tNote: Some unresolved dependencies have extra attributes. Check that these dependencies exist with the requested attributes.")
|
||||
withExtra foreach { id => log.warn("\t\t" + id) }
|
||||
log.warn("")
|
||||
}
|
||||
}
|
||||
def groupedConflicts[T](moduleFilter: ModuleFilter, grouping: ModuleID => T)(report: UpdateReport): Map[T, Set[String]] =
|
||||
report.configurations.flatMap { confReport =>
|
||||
val evicted = confReport.evicted.filter(moduleFilter)
|
||||
val evictedSet = evicted.map(m => (m.organization, m.name)).toSet
|
||||
val conflicted = confReport.allModules.filter(mod => evictedSet((mod.organization, mod.name)))
|
||||
grouped(grouping)(conflicted ++ evicted)
|
||||
} toMap;
|
||||
|
||||
def grouped[T](grouping: ModuleID => T)(mods: Seq[ModuleID]): Map[T, Set[String]] =
|
||||
mods groupBy(grouping) mapValues(_.map(_.revision).toSet)
|
||||
def grouped[T](grouping: ModuleID => T)(mods: Seq[ModuleID]): Map[T, Set[String]] =
|
||||
mods groupBy (grouping) mapValues (_.map(_.revision).toSet)
|
||||
|
||||
def transitiveScratch(ivySbt: IvySbt, label: String, config: GetClassifiersConfiguration, log: Logger): UpdateReport =
|
||||
{
|
||||
import config.{configuration => c, ivyScala, module => mod}
|
||||
import mod.{id, modules => deps}
|
||||
val base = restrictedCopy(id, true).copy(name = id.name + "$" + label)
|
||||
val module = new ivySbt.Module(InlineConfiguration(base, ModuleInfo(base.name), deps).copy(ivyScala = ivyScala))
|
||||
val report = update(module, c, log)
|
||||
val newConfig = config.copy(module = mod.copy(modules = report.allModules))
|
||||
updateClassifiers(ivySbt, newConfig, log)
|
||||
}
|
||||
def updateClassifiers(ivySbt: IvySbt, config: GetClassifiersConfiguration, log: Logger): UpdateReport =
|
||||
{
|
||||
import config.{configuration => c, module => mod, _}
|
||||
import mod.{configurations => confs, _}
|
||||
assert(!classifiers.isEmpty, "classifiers cannot be empty")
|
||||
val baseModules = modules map { m => restrictedCopy(m, true) }
|
||||
val deps = baseModules.distinct flatMap classifiedArtifacts(classifiers, exclude)
|
||||
val base = restrictedCopy(id, true).copy(name = id.name + classifiers.mkString("$","_",""))
|
||||
val module = new ivySbt.Module(InlineConfiguration(base, ModuleInfo(base.name), deps).copy(ivyScala = ivyScala, configurations = confs))
|
||||
val upConf = new UpdateConfiguration(c.retrieve, true, c.logging)
|
||||
update(module, upConf, log)
|
||||
}
|
||||
def classifiedArtifacts(classifiers: Seq[String], exclude: Map[ModuleID, Set[String]])(m: ModuleID): Option[ModuleID] =
|
||||
{
|
||||
val excluded = exclude getOrElse(restrictedCopy(m, false), Set.empty)
|
||||
val included = classifiers filterNot excluded
|
||||
if(included.isEmpty) None else Some(m.copy(isTransitive = false, explicitArtifacts = classifiedArtifacts(m.name, included) ))
|
||||
}
|
||||
def addExcluded(report: UpdateReport, classifiers: Seq[String], exclude: Map[ModuleID, Set[String]]): UpdateReport =
|
||||
report.addMissing { id => classifiedArtifacts(id.name, classifiers filter getExcluded(id, exclude)) }
|
||||
def classifiedArtifacts(name: String, classifiers: Seq[String]): Seq[Artifact] =
|
||||
classifiers map { c => Artifact.classified(name, c) }
|
||||
private[this] def getExcluded(id: ModuleID, exclude: Map[ModuleID, Set[String]]): Set[String] =
|
||||
exclude.getOrElse(restrictedCopy(id, false), Set.empty[String])
|
||||
def transitiveScratch(ivySbt: IvySbt, label: String, config: GetClassifiersConfiguration, log: Logger): UpdateReport =
|
||||
{
|
||||
import config.{ configuration => c, ivyScala, module => mod }
|
||||
import mod.{ id, modules => deps }
|
||||
val base = restrictedCopy(id, true).copy(name = id.name + "$" + label)
|
||||
val module = new ivySbt.Module(InlineConfiguration(base, ModuleInfo(base.name), deps).copy(ivyScala = ivyScala))
|
||||
val report = update(module, c, log)
|
||||
val newConfig = config.copy(module = mod.copy(modules = report.allModules))
|
||||
updateClassifiers(ivySbt, newConfig, log)
|
||||
}
|
||||
def updateClassifiers(ivySbt: IvySbt, config: GetClassifiersConfiguration, log: Logger): UpdateReport =
|
||||
{
|
||||
import config.{ configuration => c, module => mod, _ }
|
||||
import mod.{ configurations => confs, _ }
|
||||
assert(!classifiers.isEmpty, "classifiers cannot be empty")
|
||||
val baseModules = modules map { m => restrictedCopy(m, true) }
|
||||
val deps = baseModules.distinct flatMap classifiedArtifacts(classifiers, exclude)
|
||||
val base = restrictedCopy(id, true).copy(name = id.name + classifiers.mkString("$", "_", ""))
|
||||
val module = new ivySbt.Module(InlineConfiguration(base, ModuleInfo(base.name), deps).copy(ivyScala = ivyScala, configurations = confs))
|
||||
val upConf = new UpdateConfiguration(c.retrieve, true, c.logging)
|
||||
update(module, upConf, log)
|
||||
}
|
||||
def classifiedArtifacts(classifiers: Seq[String], exclude: Map[ModuleID, Set[String]])(m: ModuleID): Option[ModuleID] =
|
||||
{
|
||||
val excluded = exclude getOrElse (restrictedCopy(m, false), Set.empty)
|
||||
val included = classifiers filterNot excluded
|
||||
if (included.isEmpty) None else Some(m.copy(isTransitive = false, explicitArtifacts = classifiedArtifacts(m.name, included)))
|
||||
}
|
||||
def addExcluded(report: UpdateReport, classifiers: Seq[String], exclude: Map[ModuleID, Set[String]]): UpdateReport =
|
||||
report.addMissing { id => classifiedArtifacts(id.name, classifiers filter getExcluded(id, exclude)) }
|
||||
def classifiedArtifacts(name: String, classifiers: Seq[String]): Seq[Artifact] =
|
||||
classifiers map { c => Artifact.classified(name, c) }
|
||||
private[this] def getExcluded(id: ModuleID, exclude: Map[ModuleID, Set[String]]): Set[String] =
|
||||
exclude.getOrElse(restrictedCopy(id, false), Set.empty[String])
|
||||
|
||||
def extractExcludes(report: UpdateReport): Map[ModuleID, Set[String]] =
|
||||
report.allMissing flatMap { case (_, mod, art) => art.classifier.map { c => (restrictedCopy(mod, false), c) } } groupBy(_._1) map { case (mod, pairs) => (mod, pairs.map(_._2).toSet) }
|
||||
def extractExcludes(report: UpdateReport): Map[ModuleID, Set[String]] =
|
||||
report.allMissing flatMap { case (_, mod, art) => art.classifier.map { c => (restrictedCopy(mod, false), c) } } groupBy (_._1) map { case (mod, pairs) => (mod, pairs.map(_._2).toSet) }
|
||||
|
||||
private[this] def restrictedCopy(m: ModuleID, confs: Boolean) =
|
||||
ModuleID(m.organization, m.name, m.revision, crossVersion = m.crossVersion, extraAttributes = m.extraAttributes, configurations = if(confs) m.configurations else None)
|
||||
private[this] def resolve(logging: UpdateLogging.Value)(ivy: Ivy, module: DefaultModuleDescriptor, defaultConf: String): (ResolveReport, Option[ResolveException]) =
|
||||
{
|
||||
val resolveOptions = new ResolveOptions
|
||||
val resolveId = ResolveOptions.getDefaultResolveId(module)
|
||||
resolveOptions.setResolveId(resolveId)
|
||||
resolveOptions.setLog(ivyLogLevel(logging))
|
||||
ResolutionCache.cleanModule(module.getModuleRevisionId, resolveId, ivy.getSettings.getResolutionCacheManager)
|
||||
val resolveReport = ivy.resolve(module, resolveOptions)
|
||||
val err =
|
||||
if(resolveReport.hasError)
|
||||
{
|
||||
val messages = resolveReport.getAllProblemMessages.toArray.map(_.toString).distinct
|
||||
val failed = resolveReport.getUnresolvedDependencies.map(node => IvyRetrieve.toModuleID(node.getId))
|
||||
Some(new ResolveException(messages, failed))
|
||||
}
|
||||
else None
|
||||
(resolveReport, err)
|
||||
}
|
||||
private def retrieve(ivy: Ivy, report: UpdateReport, config: RetrieveConfiguration): UpdateReport =
|
||||
retrieve(ivy, report, config.retrieveDirectory, config.outputPattern)
|
||||
private[this] def restrictedCopy(m: ModuleID, confs: Boolean) =
|
||||
ModuleID(m.organization, m.name, m.revision, crossVersion = m.crossVersion, extraAttributes = m.extraAttributes, configurations = if (confs) m.configurations else None)
|
||||
private[this] def resolve(logging: UpdateLogging.Value)(ivy: Ivy, module: DefaultModuleDescriptor, defaultConf: String): (ResolveReport, Option[ResolveException]) =
|
||||
{
|
||||
val resolveOptions = new ResolveOptions
|
||||
val resolveId = ResolveOptions.getDefaultResolveId(module)
|
||||
resolveOptions.setResolveId(resolveId)
|
||||
resolveOptions.setLog(ivyLogLevel(logging))
|
||||
ResolutionCache.cleanModule(module.getModuleRevisionId, resolveId, ivy.getSettings.getResolutionCacheManager)
|
||||
val resolveReport = ivy.resolve(module, resolveOptions)
|
||||
val err =
|
||||
if (resolveReport.hasError) {
|
||||
val messages = resolveReport.getAllProblemMessages.toArray.map(_.toString).distinct
|
||||
val failed = resolveReport.getUnresolvedDependencies.map(node => IvyRetrieve.toModuleID(node.getId))
|
||||
Some(new ResolveException(messages, failed))
|
||||
} else None
|
||||
(resolveReport, err)
|
||||
}
|
||||
private def retrieve(ivy: Ivy, report: UpdateReport, config: RetrieveConfiguration): UpdateReport =
|
||||
retrieve(ivy, report, config.retrieveDirectory, config.outputPattern)
|
||||
|
||||
private def retrieve(ivy: Ivy, report: UpdateReport, base: File, pattern: String): UpdateReport =
|
||||
{
|
||||
val toCopy = new collection.mutable.HashSet[(File,File)]
|
||||
val retReport = report retrieve { (conf, mid, art, cached) =>
|
||||
val to = retrieveTarget(conf, mid, art, base, pattern)
|
||||
toCopy += ((cached, to))
|
||||
to
|
||||
}
|
||||
IO.copy( toCopy )
|
||||
retReport
|
||||
}
|
||||
private def retrieveTarget(conf: String, mid: ModuleID, art: Artifact, base: File, pattern: String): File =
|
||||
new File(base, substitute(conf, mid, art, pattern))
|
||||
private def retrieve(ivy: Ivy, report: UpdateReport, base: File, pattern: String): UpdateReport =
|
||||
{
|
||||
val toCopy = new collection.mutable.HashSet[(File, File)]
|
||||
val retReport = report retrieve { (conf, mid, art, cached) =>
|
||||
val to = retrieveTarget(conf, mid, art, base, pattern)
|
||||
toCopy += ((cached, to))
|
||||
to
|
||||
}
|
||||
IO.copy(toCopy)
|
||||
retReport
|
||||
}
|
||||
private def retrieveTarget(conf: String, mid: ModuleID, art: Artifact, base: File, pattern: String): File =
|
||||
new File(base, substitute(conf, mid, art, pattern))
|
||||
|
||||
private def substitute(conf: String, mid: ModuleID, art: Artifact, pattern: String): String =
|
||||
{
|
||||
val mextra = IvySbt.javaMap(mid.extraAttributes, true)
|
||||
val aextra = IvySbt.extra(art, true)
|
||||
IvyPatternHelper.substitute(pattern, mid.organization, mid.name, mid.revision, art.name, art.`type`, art.extension, conf, mextra, aextra)
|
||||
}
|
||||
private def substitute(conf: String, mid: ModuleID, art: Artifact, pattern: String): String =
|
||||
{
|
||||
val mextra = IvySbt.javaMap(mid.extraAttributes, true)
|
||||
val aextra = IvySbt.extra(art, true)
|
||||
IvyPatternHelper.substitute(pattern, mid.organization, mid.name, mid.revision, art.name, art.`type`, art.extension, conf, mextra, aextra)
|
||||
}
|
||||
|
||||
import UpdateLogging.{Quiet, Full, DownloadOnly}
|
||||
import LogOptions.{LOG_QUIET, LOG_DEFAULT, LOG_DOWNLOAD_ONLY}
|
||||
private def ivyLogLevel(level: UpdateLogging.Value) =
|
||||
level match
|
||||
{
|
||||
case Quiet => LOG_QUIET
|
||||
case DownloadOnly => LOG_DOWNLOAD_ONLY
|
||||
case Full => LOG_DEFAULT
|
||||
}
|
||||
import UpdateLogging.{ Quiet, Full, DownloadOnly }
|
||||
import LogOptions.{ LOG_QUIET, LOG_DEFAULT, LOG_DOWNLOAD_ONLY }
|
||||
private def ivyLogLevel(level: UpdateLogging.Value) =
|
||||
level match {
|
||||
case Quiet => LOG_QUIET
|
||||
case DownloadOnly => LOG_DOWNLOAD_ONLY
|
||||
case Full => LOG_DEFAULT
|
||||
}
|
||||
|
||||
def publish(module: ModuleDescriptor, artifacts: Seq[(IArtifact, File)], resolver: DependencyResolver, overwrite: Boolean): Unit =
|
||||
{
|
||||
if (artifacts.nonEmpty) {
|
||||
checkFilesPresent(artifacts)
|
||||
try {
|
||||
resolver.beginPublishTransaction(module.getModuleRevisionId(), overwrite);
|
||||
for( (artifact, file) <- artifacts)
|
||||
resolver.publish(artifact, file, overwrite)
|
||||
resolver.commitPublishTransaction()
|
||||
} catch {
|
||||
case e: Throwable =>
|
||||
try { resolver.abortPublishTransaction() }
|
||||
finally { throw e }
|
||||
}
|
||||
}
|
||||
}
|
||||
private[this] def checkFilesPresent(artifacts: Seq[(IArtifact, File)])
|
||||
{
|
||||
val missing = artifacts filter { case (a, file) => !file.exists }
|
||||
if(missing.nonEmpty)
|
||||
error("Missing files for publishing:\n\t" + missing.map(_._2.getAbsolutePath).mkString("\n\t"))
|
||||
}
|
||||
def publish(module: ModuleDescriptor, artifacts: Seq[(IArtifact, File)], resolver: DependencyResolver, overwrite: Boolean): Unit =
|
||||
{
|
||||
if (artifacts.nonEmpty) {
|
||||
checkFilesPresent(artifacts)
|
||||
try {
|
||||
resolver.beginPublishTransaction(module.getModuleRevisionId(), overwrite);
|
||||
for ((artifact, file) <- artifacts)
|
||||
resolver.publish(artifact, file, overwrite)
|
||||
resolver.commitPublishTransaction()
|
||||
} catch {
|
||||
case e: Throwable =>
|
||||
try { resolver.abortPublishTransaction() }
|
||||
finally { throw e }
|
||||
}
|
||||
}
|
||||
}
|
||||
private[this] def checkFilesPresent(artifacts: Seq[(IArtifact, File)]) {
|
||||
val missing = artifacts filter { case (a, file) => !file.exists }
|
||||
if (missing.nonEmpty)
|
||||
error("Missing files for publishing:\n\t" + missing.map(_._2.getAbsolutePath).mkString("\n\t"))
|
||||
}
|
||||
}
|
||||
final class ResolveException(val messages: Seq[String], val failed: Seq[ModuleID]) extends RuntimeException(messages.mkString("\n"))
|
||||
|
|
|
|||
|
|
@ -6,102 +6,93 @@ package sbt
|
|||
import java.io.File
|
||||
import java.net.URL
|
||||
|
||||
import org.apache.ivy.{core, plugins, util}
|
||||
import core.cache.{ArtifactOrigin, CacheDownloadOptions, DefaultRepositoryCacheManager}
|
||||
import core.module.descriptor.{Artifact => IvyArtifact, DefaultArtifact}
|
||||
import plugins.repository.file.{FileRepository=>IvyFileRepository, FileResource}
|
||||
import plugins.repository.{ArtifactResourceResolver, Resource, ResourceDownloader}
|
||||
import org.apache.ivy.{ core, plugins, util }
|
||||
import core.cache.{ ArtifactOrigin, CacheDownloadOptions, DefaultRepositoryCacheManager }
|
||||
import core.module.descriptor.{ Artifact => IvyArtifact, DefaultArtifact }
|
||||
import plugins.repository.file.{ FileRepository => IvyFileRepository, FileResource }
|
||||
import plugins.repository.{ ArtifactResourceResolver, Resource, ResourceDownloader }
|
||||
import plugins.resolver.util.ResolvedResource
|
||||
import util.FileUtil
|
||||
|
||||
class NotInCache(val id: ModuleID, cause: Throwable)
|
||||
extends RuntimeException(NotInCache(id, cause), cause)
|
||||
{
|
||||
def this(id: ModuleID) = this(id, null)
|
||||
extends RuntimeException(NotInCache(id, cause), cause) {
|
||||
def this(id: ModuleID) = this(id, null)
|
||||
}
|
||||
private object NotInCache
|
||||
{
|
||||
def apply(id: ModuleID, cause: Throwable) =
|
||||
{
|
||||
val postfix = if(cause == null) "" else (": " +cause.toString)
|
||||
"File for " + id + " not in cache" + postfix
|
||||
}
|
||||
private object NotInCache {
|
||||
def apply(id: ModuleID, cause: Throwable) =
|
||||
{
|
||||
val postfix = if (cause == null) "" else (": " + cause.toString)
|
||||
"File for " + id + " not in cache" + postfix
|
||||
}
|
||||
}
|
||||
/** Provides methods for working at the level of a single jar file with the default Ivy cache.*/
|
||||
class IvyCache(val ivyHome: Option[File])
|
||||
{
|
||||
def lockFile = new File(ivyHome getOrElse Path.userHome, ".sbt.cache.lock")
|
||||
/** Caches the given 'file' with the given ID. It may be retrieved or cleared using this ID.*/
|
||||
def cacheJar(moduleID: ModuleID, file: File, lock: Option[xsbti.GlobalLock], log: Logger)
|
||||
{
|
||||
val artifact = defaultArtifact(moduleID)
|
||||
val resolved = new ResolvedResource(new FileResource(new IvyFileRepository, file), moduleID.revision)
|
||||
withDefaultCache(lock, log) { cache =>
|
||||
val resolver = new ArtifactResourceResolver { def resolve(artifact: IvyArtifact) = resolved }
|
||||
cache.download(artifact, resolver, new FileDownloader, new CacheDownloadOptions)
|
||||
}
|
||||
}
|
||||
/** Clears the cache of the jar for the given ID.*/
|
||||
def clearCachedJar(id: ModuleID, lock: Option[xsbti.GlobalLock], log: Logger)
|
||||
{
|
||||
try { withCachedJar(id, lock, log)(_.delete) }
|
||||
catch { case e: Exception => log.debug("Error cleaning cached jar: " + e.toString) }
|
||||
}
|
||||
/** Copies the cached jar for the given ID to the directory 'toDirectory'. If the jar is not in the cache, NotInCache is thrown.*/
|
||||
def retrieveCachedJar(id: ModuleID, toDirectory: File, lock: Option[xsbti.GlobalLock], log: Logger) =
|
||||
withCachedJar(id, lock, log) { cachedFile =>
|
||||
val copyTo = new File(toDirectory, cachedFile.getName)
|
||||
FileUtil.copy(cachedFile, copyTo, null)
|
||||
copyTo
|
||||
}
|
||||
class IvyCache(val ivyHome: Option[File]) {
|
||||
def lockFile = new File(ivyHome getOrElse Path.userHome, ".sbt.cache.lock")
|
||||
/** Caches the given 'file' with the given ID. It may be retrieved or cleared using this ID.*/
|
||||
def cacheJar(moduleID: ModuleID, file: File, lock: Option[xsbti.GlobalLock], log: Logger) {
|
||||
val artifact = defaultArtifact(moduleID)
|
||||
val resolved = new ResolvedResource(new FileResource(new IvyFileRepository, file), moduleID.revision)
|
||||
withDefaultCache(lock, log) { cache =>
|
||||
val resolver = new ArtifactResourceResolver { def resolve(artifact: IvyArtifact) = resolved }
|
||||
cache.download(artifact, resolver, new FileDownloader, new CacheDownloadOptions)
|
||||
}
|
||||
}
|
||||
/** Clears the cache of the jar for the given ID.*/
|
||||
def clearCachedJar(id: ModuleID, lock: Option[xsbti.GlobalLock], log: Logger) {
|
||||
try { withCachedJar(id, lock, log)(_.delete) }
|
||||
catch { case e: Exception => log.debug("Error cleaning cached jar: " + e.toString) }
|
||||
}
|
||||
/** Copies the cached jar for the given ID to the directory 'toDirectory'. If the jar is not in the cache, NotInCache is thrown.*/
|
||||
def retrieveCachedJar(id: ModuleID, toDirectory: File, lock: Option[xsbti.GlobalLock], log: Logger) =
|
||||
withCachedJar(id, lock, log) { cachedFile =>
|
||||
val copyTo = new File(toDirectory, cachedFile.getName)
|
||||
FileUtil.copy(cachedFile, copyTo, null)
|
||||
copyTo
|
||||
}
|
||||
|
||||
/** Get the location of the cached jar for the given ID in the Ivy cache. If the jar is not in the cache, NotInCache is thrown .*/
|
||||
def withCachedJar[T](id: ModuleID, lock: Option[xsbti.GlobalLock], log: Logger)(f: File => T): T =
|
||||
{
|
||||
val cachedFile =
|
||||
try
|
||||
{
|
||||
withDefaultCache(lock, log) { cache =>
|
||||
val artifact = defaultArtifact(id)
|
||||
cache.getArchiveFileInCache(artifact, unknownOrigin(artifact))
|
||||
}
|
||||
}
|
||||
catch { case e: Exception => throw new NotInCache(id, e) }
|
||||
/** Get the location of the cached jar for the given ID in the Ivy cache. If the jar is not in the cache, NotInCache is thrown .*/
|
||||
def withCachedJar[T](id: ModuleID, lock: Option[xsbti.GlobalLock], log: Logger)(f: File => T): T =
|
||||
{
|
||||
val cachedFile =
|
||||
try {
|
||||
withDefaultCache(lock, log) { cache =>
|
||||
val artifact = defaultArtifact(id)
|
||||
cache.getArchiveFileInCache(artifact, unknownOrigin(artifact))
|
||||
}
|
||||
} catch { case e: Exception => throw new NotInCache(id, e) }
|
||||
|
||||
if(cachedFile.exists) f(cachedFile) else throw new NotInCache(id)
|
||||
}
|
||||
/** Calls the given function with the default Ivy cache.*/
|
||||
def withDefaultCache[T](lock: Option[xsbti.GlobalLock], log: Logger)(f: DefaultRepositoryCacheManager => T): T =
|
||||
{
|
||||
val (ivy, local) = basicLocalIvy(lock, log)
|
||||
ivy.withIvy(log) { ivy =>
|
||||
val cache = ivy.getSettings.getDefaultRepositoryCacheManager.asInstanceOf[DefaultRepositoryCacheManager]
|
||||
cache.setUseOrigin(false)
|
||||
f(cache)
|
||||
}
|
||||
}
|
||||
private def unknownOrigin(artifact: IvyArtifact) = ArtifactOrigin.unkwnown(artifact)
|
||||
/** A minimal Ivy setup with only a local resolver and the current directory as the base directory.*/
|
||||
private def basicLocalIvy(lock: Option[xsbti.GlobalLock], log: Logger) =
|
||||
{
|
||||
val local = Resolver.defaultLocal
|
||||
val paths = new IvyPaths(new File("."), ivyHome)
|
||||
val conf = new InlineIvyConfiguration(paths, Seq(local), Nil, Nil, false, lock, IvySbt.DefaultChecksums, None, log)
|
||||
(new IvySbt(conf), local)
|
||||
}
|
||||
/** Creates a default jar artifact based on the given ID.*/
|
||||
private def defaultArtifact(moduleID: ModuleID): IvyArtifact =
|
||||
new DefaultArtifact(IvySbt.toID(moduleID), null, moduleID.name, "jar", "jar")
|
||||
if (cachedFile.exists) f(cachedFile) else throw new NotInCache(id)
|
||||
}
|
||||
/** Calls the given function with the default Ivy cache.*/
|
||||
def withDefaultCache[T](lock: Option[xsbti.GlobalLock], log: Logger)(f: DefaultRepositoryCacheManager => T): T =
|
||||
{
|
||||
val (ivy, local) = basicLocalIvy(lock, log)
|
||||
ivy.withIvy(log) { ivy =>
|
||||
val cache = ivy.getSettings.getDefaultRepositoryCacheManager.asInstanceOf[DefaultRepositoryCacheManager]
|
||||
cache.setUseOrigin(false)
|
||||
f(cache)
|
||||
}
|
||||
}
|
||||
private def unknownOrigin(artifact: IvyArtifact) = ArtifactOrigin.unkwnown(artifact)
|
||||
/** A minimal Ivy setup with only a local resolver and the current directory as the base directory.*/
|
||||
private def basicLocalIvy(lock: Option[xsbti.GlobalLock], log: Logger) =
|
||||
{
|
||||
val local = Resolver.defaultLocal
|
||||
val paths = new IvyPaths(new File("."), ivyHome)
|
||||
val conf = new InlineIvyConfiguration(paths, Seq(local), Nil, Nil, false, lock, IvySbt.DefaultChecksums, None, log)
|
||||
(new IvySbt(conf), local)
|
||||
}
|
||||
/** Creates a default jar artifact based on the given ID.*/
|
||||
private def defaultArtifact(moduleID: ModuleID): IvyArtifact =
|
||||
new DefaultArtifact(IvySbt.toID(moduleID), null, moduleID.name, "jar", "jar")
|
||||
}
|
||||
/** Required by Ivy for copying to the cache.*/
|
||||
private class FileDownloader extends ResourceDownloader with NotNull
|
||||
{
|
||||
def download(artifact: IvyArtifact, resource: Resource, dest: File)
|
||||
{
|
||||
if(dest.exists()) dest.delete()
|
||||
val part = new File(dest.getAbsolutePath + ".part")
|
||||
FileUtil.copy(resource.openStream, part, null)
|
||||
if(!part.renameTo(dest))
|
||||
sys.error("Could not move temporary file " + part + " to final location " + dest)
|
||||
}
|
||||
private class FileDownloader extends ResourceDownloader with NotNull {
|
||||
def download(artifact: IvyArtifact, resource: Resource, dest: File) {
|
||||
if (dest.exists()) dest.delete()
|
||||
val part = new File(dest.getAbsolutePath + ".part")
|
||||
FileUtil.copy(resource.openStream, part, null)
|
||||
if (!part.renameTo(dest))
|
||||
sys.error("Could not move temporary file " + part + " to final location " + dest)
|
||||
}
|
||||
}
|
||||
|
|
@ -4,120 +4,104 @@
|
|||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import java.net.{URI,URL}
|
||||
import java.net.{ URI, URL }
|
||||
import scala.xml.NodeSeq
|
||||
|
||||
final class IvyPaths(val baseDirectory: File, val ivyHome: Option[File])
|
||||
{
|
||||
def withBase(newBaseDirectory: File) = new IvyPaths(newBaseDirectory, ivyHome)
|
||||
final class IvyPaths(val baseDirectory: File, val ivyHome: Option[File]) {
|
||||
def withBase(newBaseDirectory: File) = new IvyPaths(newBaseDirectory, ivyHome)
|
||||
}
|
||||
sealed trait IvyConfiguration
|
||||
{
|
||||
type This <: IvyConfiguration
|
||||
def lock: Option[xsbti.GlobalLock]
|
||||
def baseDirectory: File
|
||||
def log: Logger
|
||||
def withBase(newBaseDirectory: File): This
|
||||
sealed trait IvyConfiguration {
|
||||
type This <: IvyConfiguration
|
||||
def lock: Option[xsbti.GlobalLock]
|
||||
def baseDirectory: File
|
||||
def log: Logger
|
||||
def withBase(newBaseDirectory: File): This
|
||||
}
|
||||
final class InlineIvyConfiguration(val paths: IvyPaths, val resolvers: Seq[Resolver], val otherResolvers: Seq[Resolver],
|
||||
val moduleConfigurations: Seq[ModuleConfiguration], val localOnly: Boolean, val lock: Option[xsbti.GlobalLock],
|
||||
val checksums: Seq[String], val resolutionCacheDir: Option[File], val log: Logger) extends IvyConfiguration
|
||||
{
|
||||
@deprecated("Use the variant that accepts the resolution cache location.", "0.13.0")
|
||||
def this(paths: IvyPaths, resolvers: Seq[Resolver], otherResolvers: Seq[Resolver],
|
||||
moduleConfigurations: Seq[ModuleConfiguration], localOnly: Boolean, lock: Option[xsbti.GlobalLock],
|
||||
checksums: Seq[String], log: Logger) =
|
||||
this(paths, resolvers, otherResolvers, moduleConfigurations, localOnly, lock, checksums, None, log)
|
||||
val moduleConfigurations: Seq[ModuleConfiguration], val localOnly: Boolean, val lock: Option[xsbti.GlobalLock],
|
||||
val checksums: Seq[String], val resolutionCacheDir: Option[File], val log: Logger) extends IvyConfiguration {
|
||||
@deprecated("Use the variant that accepts the resolution cache location.", "0.13.0")
|
||||
def this(paths: IvyPaths, resolvers: Seq[Resolver], otherResolvers: Seq[Resolver],
|
||||
moduleConfigurations: Seq[ModuleConfiguration], localOnly: Boolean, lock: Option[xsbti.GlobalLock],
|
||||
checksums: Seq[String], log: Logger) =
|
||||
this(paths, resolvers, otherResolvers, moduleConfigurations, localOnly, lock, checksums, None, log)
|
||||
|
||||
type This = InlineIvyConfiguration
|
||||
def baseDirectory = paths.baseDirectory
|
||||
def withBase(newBase: File) = new InlineIvyConfiguration(paths.withBase(newBase), resolvers, otherResolvers, moduleConfigurations, localOnly, lock, checksums, resolutionCacheDir, log)
|
||||
def changeResolvers(newResolvers: Seq[Resolver]) = new InlineIvyConfiguration(paths, newResolvers, otherResolvers, moduleConfigurations, localOnly, lock, checksums, resolutionCacheDir, log)
|
||||
type This = InlineIvyConfiguration
|
||||
def baseDirectory = paths.baseDirectory
|
||||
def withBase(newBase: File) = new InlineIvyConfiguration(paths.withBase(newBase), resolvers, otherResolvers, moduleConfigurations, localOnly, lock, checksums, resolutionCacheDir, log)
|
||||
def changeResolvers(newResolvers: Seq[Resolver]) = new InlineIvyConfiguration(paths, newResolvers, otherResolvers, moduleConfigurations, localOnly, lock, checksums, resolutionCacheDir, log)
|
||||
}
|
||||
final class ExternalIvyConfiguration(val baseDirectory: File, val uri: URI, val lock: Option[xsbti.GlobalLock], val extraResolvers: Seq[Resolver], val log: Logger) extends IvyConfiguration
|
||||
{
|
||||
type This = ExternalIvyConfiguration
|
||||
def withBase(newBase: File) = new ExternalIvyConfiguration(newBase, uri, lock, extraResolvers, log)
|
||||
final class ExternalIvyConfiguration(val baseDirectory: File, val uri: URI, val lock: Option[xsbti.GlobalLock], val extraResolvers: Seq[Resolver], val log: Logger) extends IvyConfiguration {
|
||||
type This = ExternalIvyConfiguration
|
||||
def withBase(newBase: File) = new ExternalIvyConfiguration(newBase, uri, lock, extraResolvers, log)
|
||||
}
|
||||
object ExternalIvyConfiguration
|
||||
{
|
||||
def apply(baseDirectory: File, file: File, lock: Option[xsbti.GlobalLock], log: Logger) = new ExternalIvyConfiguration(baseDirectory, file.toURI, lock, Nil, log)
|
||||
object ExternalIvyConfiguration {
|
||||
def apply(baseDirectory: File, file: File, lock: Option[xsbti.GlobalLock], log: Logger) = new ExternalIvyConfiguration(baseDirectory, file.toURI, lock, Nil, log)
|
||||
}
|
||||
|
||||
object IvyConfiguration
|
||||
{
|
||||
/** Called to configure Ivy when inline resolvers are not specified.
|
||||
* This will configure Ivy with an 'ivy-settings.xml' file if there is one or else use default resolvers.*/
|
||||
@deprecated("Explicitly use either external or inline configuration.", "0.12.0")
|
||||
def apply(paths: IvyPaths, lock: Option[xsbti.GlobalLock], localOnly: Boolean, checksums: Seq[String], log: Logger): IvyConfiguration =
|
||||
{
|
||||
log.debug("Autodetecting configuration.")
|
||||
val defaultIvyConfigFile = IvySbt.defaultIvyConfiguration(paths.baseDirectory)
|
||||
if(defaultIvyConfigFile.canRead)
|
||||
ExternalIvyConfiguration(paths.baseDirectory, defaultIvyConfigFile, lock, log)
|
||||
else
|
||||
new InlineIvyConfiguration(paths, Resolver.withDefaultResolvers(Nil), Nil, Nil, localOnly, lock, checksums, None, log)
|
||||
}
|
||||
object IvyConfiguration {
|
||||
/**
|
||||
* Called to configure Ivy when inline resolvers are not specified.
|
||||
* This will configure Ivy with an 'ivy-settings.xml' file if there is one or else use default resolvers.
|
||||
*/
|
||||
@deprecated("Explicitly use either external or inline configuration.", "0.12.0")
|
||||
def apply(paths: IvyPaths, lock: Option[xsbti.GlobalLock], localOnly: Boolean, checksums: Seq[String], log: Logger): IvyConfiguration =
|
||||
{
|
||||
log.debug("Autodetecting configuration.")
|
||||
val defaultIvyConfigFile = IvySbt.defaultIvyConfiguration(paths.baseDirectory)
|
||||
if (defaultIvyConfigFile.canRead)
|
||||
ExternalIvyConfiguration(paths.baseDirectory, defaultIvyConfigFile, lock, log)
|
||||
else
|
||||
new InlineIvyConfiguration(paths, Resolver.withDefaultResolvers(Nil), Nil, Nil, localOnly, lock, checksums, None, log)
|
||||
}
|
||||
}
|
||||
|
||||
sealed trait ModuleSettings
|
||||
{
|
||||
def validate: Boolean
|
||||
def ivyScala: Option[IvyScala]
|
||||
def noScala: ModuleSettings
|
||||
sealed trait ModuleSettings {
|
||||
def validate: Boolean
|
||||
def ivyScala: Option[IvyScala]
|
||||
def noScala: ModuleSettings
|
||||
}
|
||||
final case class IvyFileConfiguration(file: File, ivyScala: Option[IvyScala], validate: Boolean, autoScalaTools: Boolean = true) extends ModuleSettings
|
||||
{
|
||||
def noScala = copy(ivyScala = None)
|
||||
final case class IvyFileConfiguration(file: File, ivyScala: Option[IvyScala], validate: Boolean, autoScalaTools: Boolean = true) extends ModuleSettings {
|
||||
def noScala = copy(ivyScala = None)
|
||||
}
|
||||
final case class PomConfiguration(file: File, ivyScala: Option[IvyScala], validate: Boolean, autoScalaTools: Boolean = true) extends ModuleSettings
|
||||
{
|
||||
def noScala = copy(ivyScala = None)
|
||||
final case class PomConfiguration(file: File, ivyScala: Option[IvyScala], validate: Boolean, autoScalaTools: Boolean = true) extends ModuleSettings {
|
||||
def noScala = copy(ivyScala = None)
|
||||
}
|
||||
final case class InlineConfiguration(module: ModuleID, moduleInfo: ModuleInfo, dependencies: Seq[ModuleID], overrides: Set[ModuleID] = Set.empty, ivyXML: NodeSeq = NodeSeq.Empty, configurations: Seq[Configuration] = Nil, defaultConfiguration: Option[Configuration] = None, ivyScala: Option[IvyScala] = None, validate: Boolean = false, conflictManager: ConflictManager = ConflictManager.default) extends ModuleSettings
|
||||
{
|
||||
def withConfigurations(configurations: Seq[Configuration]) = copy(configurations = configurations)
|
||||
def noScala = copy(ivyScala = None)
|
||||
final case class InlineConfiguration(module: ModuleID, moduleInfo: ModuleInfo, dependencies: Seq[ModuleID], overrides: Set[ModuleID] = Set.empty, ivyXML: NodeSeq = NodeSeq.Empty, configurations: Seq[Configuration] = Nil, defaultConfiguration: Option[Configuration] = None, ivyScala: Option[IvyScala] = None, validate: Boolean = false, conflictManager: ConflictManager = ConflictManager.default) extends ModuleSettings {
|
||||
def withConfigurations(configurations: Seq[Configuration]) = copy(configurations = configurations)
|
||||
def noScala = copy(ivyScala = None)
|
||||
}
|
||||
@deprecated("Define a module using inline Scala (InlineConfiguration), a pom.xml (PomConfiguration), or an ivy.xml (IvyFileConfiguration).", "0.13.0")
|
||||
final case class EmptyConfiguration(module: ModuleID, moduleInfo: ModuleInfo, ivyScala: Option[IvyScala], validate: Boolean) extends ModuleSettings
|
||||
{
|
||||
def noScala = copy(ivyScala = None)
|
||||
final case class EmptyConfiguration(module: ModuleID, moduleInfo: ModuleInfo, ivyScala: Option[IvyScala], validate: Boolean) extends ModuleSettings {
|
||||
def noScala = copy(ivyScala = None)
|
||||
}
|
||||
object InlineConfiguration
|
||||
{
|
||||
def configurations(explicitConfigurations: Iterable[Configuration], defaultConfiguration: Option[Configuration]) =
|
||||
if(explicitConfigurations.isEmpty)
|
||||
{
|
||||
defaultConfiguration match
|
||||
{
|
||||
case Some(Configurations.DefaultIvyConfiguration) => Configurations.Default :: Nil
|
||||
case Some(Configurations.DefaultMavenConfiguration) => Configurations.defaultMavenConfigurations
|
||||
case _ => Nil
|
||||
}
|
||||
}
|
||||
else
|
||||
explicitConfigurations
|
||||
object InlineConfiguration {
|
||||
def configurations(explicitConfigurations: Iterable[Configuration], defaultConfiguration: Option[Configuration]) =
|
||||
if (explicitConfigurations.isEmpty) {
|
||||
defaultConfiguration match {
|
||||
case Some(Configurations.DefaultIvyConfiguration) => Configurations.Default :: Nil
|
||||
case Some(Configurations.DefaultMavenConfiguration) => Configurations.defaultMavenConfigurations
|
||||
case _ => Nil
|
||||
}
|
||||
} else
|
||||
explicitConfigurations
|
||||
}
|
||||
object ModuleSettings
|
||||
{
|
||||
@deprecated("Explicitly select configuration from pom.xml, ivy.xml, or inline Scala.", "0.13.0")
|
||||
def apply(ivyScala: Option[IvyScala], validate: Boolean, module: => ModuleID, moduleInfo: => ModuleInfo)(baseDirectory: File, log: Logger): ModuleSettings =
|
||||
{
|
||||
log.debug("Autodetecting dependencies.")
|
||||
val defaultPOMFile = IvySbt.defaultPOM(baseDirectory)
|
||||
if(defaultPOMFile.canRead)
|
||||
new PomConfiguration(defaultPOMFile, ivyScala, validate, true)
|
||||
else
|
||||
{
|
||||
val defaultIvy = IvySbt.defaultIvyFile(baseDirectory)
|
||||
if(defaultIvy.canRead)
|
||||
new IvyFileConfiguration(defaultIvy, ivyScala, validate, true)
|
||||
else
|
||||
{
|
||||
log.warn("No dependency configuration found, using defaults.")
|
||||
new EmptyConfiguration(module, moduleInfo, ivyScala, validate)
|
||||
}
|
||||
}
|
||||
}
|
||||
object ModuleSettings {
|
||||
@deprecated("Explicitly select configuration from pom.xml, ivy.xml, or inline Scala.", "0.13.0")
|
||||
def apply(ivyScala: Option[IvyScala], validate: Boolean, module: => ModuleID, moduleInfo: => ModuleInfo)(baseDirectory: File, log: Logger): ModuleSettings =
|
||||
{
|
||||
log.debug("Autodetecting dependencies.")
|
||||
val defaultPOMFile = IvySbt.defaultPOM(baseDirectory)
|
||||
if (defaultPOMFile.canRead)
|
||||
new PomConfiguration(defaultPOMFile, ivyScala, validate, true)
|
||||
else {
|
||||
val defaultIvy = IvySbt.defaultIvyFile(baseDirectory)
|
||||
if (defaultIvy.canRead)
|
||||
new IvyFileConfiguration(defaultIvy, ivyScala, validate, true)
|
||||
else {
|
||||
log.warn("No dependency configuration found, using defaults.")
|
||||
new EmptyConfiguration(module, moduleInfo, ivyScala, validate)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,18 +4,17 @@
|
|||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import java.net.{URI, URL}
|
||||
import java.net.{ URI, URL }
|
||||
import scala.xml.NodeSeq
|
||||
import org.apache.ivy.plugins.resolver.{DependencyResolver, IBiblioResolver}
|
||||
import org.apache.ivy.plugins.resolver.{ DependencyResolver, IBiblioResolver }
|
||||
import org.apache.ivy.util.url.CredentialsStore
|
||||
|
||||
/** Additional information about a project module */
|
||||
final case class ModuleInfo(nameFormal: String, description: String = "", homepage: Option[URL] = None, startYear: Option[Int] = None, licenses: Seq[(String, URL)] = Nil, organizationName: String = "", organizationHomepage: Option[URL] = None, scmInfo: Option[ScmInfo] = None)
|
||||
{
|
||||
def formally(name: String) = copy(nameFormal = name)
|
||||
def describing(desc: String, home: Option[URL]) = copy(description = desc, homepage = home)
|
||||
def licensed(lics: (String, URL)*) = copy(licenses = lics)
|
||||
def organization(name: String, home: Option[URL]) = copy(organizationName = name, organizationHomepage = home)
|
||||
final case class ModuleInfo(nameFormal: String, description: String = "", homepage: Option[URL] = None, startYear: Option[Int] = None, licenses: Seq[(String, URL)] = Nil, organizationName: String = "", organizationHomepage: Option[URL] = None, scmInfo: Option[ScmInfo] = None) {
|
||||
def formally(name: String) = copy(nameFormal = name)
|
||||
def describing(desc: String, home: Option[URL]) = copy(description = desc, homepage = home)
|
||||
def licensed(lics: (String, URL)*) = copy(licenses = lics)
|
||||
def organization(name: String, home: Option[URL]) = copy(organizationName = name, organizationHomepage = home)
|
||||
}
|
||||
|
||||
/** Basic SCM information for a project module */
|
||||
|
|
@ -25,20 +24,19 @@ final case class ScmInfo(browseUrl: URL, connection: String, devConnection: Opti
|
|||
final case class ExclusionRule(organization: String = "*", name: String = "*", artifact: String = "*", configurations: Seq[String] = Nil)
|
||||
|
||||
final case class ModuleConfiguration(organization: String, name: String, revision: String, resolver: Resolver)
|
||||
object ModuleConfiguration
|
||||
{
|
||||
def apply(org: String, resolver: Resolver): ModuleConfiguration = apply(org, "*", "*", resolver)
|
||||
def apply(org: String, name: String, resolver: Resolver): ModuleConfiguration = ModuleConfiguration(org, name, "*", resolver)
|
||||
object ModuleConfiguration {
|
||||
def apply(org: String, resolver: Resolver): ModuleConfiguration = apply(org, "*", "*", resolver)
|
||||
def apply(org: String, name: String, resolver: Resolver): ModuleConfiguration = ModuleConfiguration(org, name, "*", resolver)
|
||||
}
|
||||
|
||||
final case class ConflictManager(name: String, organization: String = "*", module: String = "*")
|
||||
|
||||
/** See http://ant.apache.org/ivy/history/latest-milestone/settings/conflict-managers.html for details of the different conflict managers.*/
|
||||
object ConflictManager {
|
||||
val all = ConflictManager("all")
|
||||
val latestTime = ConflictManager("latest-time")
|
||||
val latestRevision = ConflictManager("latest-revision")
|
||||
val latestCompatible = ConflictManager("latest-compatible")
|
||||
val strict = ConflictManager("strict")
|
||||
val default = latestRevision
|
||||
val all = ConflictManager("all")
|
||||
val latestTime = ConflictManager("latest-time")
|
||||
val latestRevision = ConflictManager("latest-revision")
|
||||
val latestCompatible = ConflictManager("latest-compatible")
|
||||
val strict = ConflictManager("strict")
|
||||
val default = latestRevision
|
||||
}
|
||||
|
|
@ -3,56 +3,51 @@
|
|||
*/
|
||||
package sbt
|
||||
|
||||
import org.apache.ivy.util.{Message, MessageLogger, MessageLoggerEngine}
|
||||
import org.apache.ivy.util.{ Message, MessageLogger, MessageLoggerEngine }
|
||||
|
||||
/** Interface to Ivy logging. */
|
||||
private final class IvyLoggerInterface(logger: Logger) extends MessageLogger
|
||||
{
|
||||
def rawlog(msg: String, level: Int) = log(msg, level)
|
||||
def log(msg: String, level: Int)
|
||||
{
|
||||
import Message.{MSG_DEBUG, MSG_VERBOSE, MSG_INFO, MSG_WARN, MSG_ERR}
|
||||
level match
|
||||
{
|
||||
case MSG_DEBUG => debug(msg)
|
||||
case MSG_VERBOSE => verbose(msg)
|
||||
case MSG_INFO => info(msg)
|
||||
case MSG_WARN => warn(msg)
|
||||
case MSG_ERR => error(msg)
|
||||
}
|
||||
}
|
||||
//DEBUG level messages are very verbose and rarely useful to users.
|
||||
// TODO: provide access to this information some other way
|
||||
def debug(msg: String) {}
|
||||
def verbose(msg: String) = logger.verbose(msg)
|
||||
def deprecated(msg: String) = warn(msg)
|
||||
def info(msg: String) = logger.info(msg)
|
||||
def rawinfo(msg: String) = info(msg)
|
||||
def warn(msg: String) = logger.warn(msg)
|
||||
def error(msg: String) = if(SbtIvyLogger.acceptError(msg)) logger.error(msg)
|
||||
|
||||
private def emptyList = java.util.Collections.emptyList[String]
|
||||
def getProblems = emptyList
|
||||
def getWarns = emptyList
|
||||
def getErrors = emptyList
|
||||
private final class IvyLoggerInterface(logger: Logger) extends MessageLogger {
|
||||
def rawlog(msg: String, level: Int) = log(msg, level)
|
||||
def log(msg: String, level: Int) {
|
||||
import Message.{ MSG_DEBUG, MSG_VERBOSE, MSG_INFO, MSG_WARN, MSG_ERR }
|
||||
level match {
|
||||
case MSG_DEBUG => debug(msg)
|
||||
case MSG_VERBOSE => verbose(msg)
|
||||
case MSG_INFO => info(msg)
|
||||
case MSG_WARN => warn(msg)
|
||||
case MSG_ERR => error(msg)
|
||||
}
|
||||
}
|
||||
//DEBUG level messages are very verbose and rarely useful to users.
|
||||
// TODO: provide access to this information some other way
|
||||
def debug(msg: String) {}
|
||||
def verbose(msg: String) = logger.verbose(msg)
|
||||
def deprecated(msg: String) = warn(msg)
|
||||
def info(msg: String) = logger.info(msg)
|
||||
def rawinfo(msg: String) = info(msg)
|
||||
def warn(msg: String) = logger.warn(msg)
|
||||
def error(msg: String) = if (SbtIvyLogger.acceptError(msg)) logger.error(msg)
|
||||
|
||||
def clearProblems = ()
|
||||
def sumupProblems = clearProblems()
|
||||
def progress = ()
|
||||
def endProgress = ()
|
||||
private def emptyList = java.util.Collections.emptyList[String]
|
||||
def getProblems = emptyList
|
||||
def getWarns = emptyList
|
||||
def getErrors = emptyList
|
||||
|
||||
def endProgress(msg: String) = info(msg)
|
||||
def isShowProgress = false
|
||||
def setShowProgress(progress: Boolean) {}
|
||||
def clearProblems = ()
|
||||
def sumupProblems = clearProblems()
|
||||
def progress = ()
|
||||
def endProgress = ()
|
||||
|
||||
def endProgress(msg: String) = info(msg)
|
||||
def isShowProgress = false
|
||||
def setShowProgress(progress: Boolean) {}
|
||||
}
|
||||
private final class SbtMessageLoggerEngine extends MessageLoggerEngine
|
||||
{
|
||||
/** This is a hack to filter error messages about 'unknown resolver ...'. */
|
||||
override def error(msg: String) = if(SbtIvyLogger.acceptError(msg)) super.error(msg)
|
||||
override def sumupProblems = clearProblems()
|
||||
private final class SbtMessageLoggerEngine extends MessageLoggerEngine {
|
||||
/** This is a hack to filter error messages about 'unknown resolver ...'. */
|
||||
override def error(msg: String) = if (SbtIvyLogger.acceptError(msg)) super.error(msg)
|
||||
override def sumupProblems = clearProblems()
|
||||
}
|
||||
private object SbtIvyLogger
|
||||
{
|
||||
val UnknownResolver = "unknown resolver"
|
||||
def acceptError(msg: String) = (msg ne null) && !msg.startsWith(UnknownResolver)
|
||||
private object SbtIvyLogger {
|
||||
val UnknownResolver = "unknown resolver"
|
||||
def acceptError(msg: String) = (msg ne null) && !msg.startsWith(UnknownResolver)
|
||||
}
|
||||
|
|
@ -6,51 +6,49 @@ package sbt
|
|||
import java.io.File
|
||||
import collection.mutable
|
||||
|
||||
import org.apache.ivy.core.{module, report}
|
||||
import module.descriptor.{Artifact => IvyArtifact}
|
||||
import org.apache.ivy.core.{ module, report }
|
||||
import module.descriptor.{ Artifact => IvyArtifact }
|
||||
import module.id.ModuleRevisionId
|
||||
import report.{ArtifactDownloadReport, ConfigurationResolveReport, ResolveReport}
|
||||
import report.{ ArtifactDownloadReport, ConfigurationResolveReport, ResolveReport }
|
||||
|
||||
object IvyRetrieve
|
||||
{
|
||||
def reports(report: ResolveReport): Seq[ConfigurationResolveReport] =
|
||||
report.getConfigurations map report.getConfigurationReport
|
||||
object IvyRetrieve {
|
||||
def reports(report: ResolveReport): Seq[ConfigurationResolveReport] =
|
||||
report.getConfigurations map report.getConfigurationReport
|
||||
|
||||
def moduleReports(confReport: ConfigurationResolveReport): Seq[ModuleReport] =
|
||||
for( revId <- confReport.getModuleRevisionIds.toArray collect { case revId: ModuleRevisionId => revId }) yield
|
||||
artifactReports(toModuleID(revId), confReport getDownloadReports revId)
|
||||
def moduleReports(confReport: ConfigurationResolveReport): Seq[ModuleReport] =
|
||||
for (revId <- confReport.getModuleRevisionIds.toArray collect { case revId: ModuleRevisionId => revId }) yield artifactReports(toModuleID(revId), confReport getDownloadReports revId)
|
||||
|
||||
def artifactReports(mid: ModuleID, artReport: Seq[ArtifactDownloadReport]): ModuleReport =
|
||||
{
|
||||
val missing = new mutable.ListBuffer[Artifact]
|
||||
val resolved = new mutable.ListBuffer[(Artifact,File)]
|
||||
for(r <- artReport) {
|
||||
val file = r.getLocalFile
|
||||
val art = toArtifact(r.getArtifact)
|
||||
if(file eq null)
|
||||
missing += art
|
||||
else
|
||||
resolved += ((art,file))
|
||||
}
|
||||
new ModuleReport(mid, resolved.toSeq, missing.toSeq)
|
||||
}
|
||||
def artifactReports(mid: ModuleID, artReport: Seq[ArtifactDownloadReport]): ModuleReport =
|
||||
{
|
||||
val missing = new mutable.ListBuffer[Artifact]
|
||||
val resolved = new mutable.ListBuffer[(Artifact, File)]
|
||||
for (r <- artReport) {
|
||||
val file = r.getLocalFile
|
||||
val art = toArtifact(r.getArtifact)
|
||||
if (file eq null)
|
||||
missing += art
|
||||
else
|
||||
resolved += ((art, file))
|
||||
}
|
||||
new ModuleReport(mid, resolved.toSeq, missing.toSeq)
|
||||
}
|
||||
|
||||
def evicted(confReport: ConfigurationResolveReport): Seq[ModuleID] =
|
||||
confReport.getEvictedNodes.map(node => toModuleID(node.getId))
|
||||
|
||||
def toModuleID(revID: ModuleRevisionId): ModuleID =
|
||||
ModuleID(revID.getOrganisation, revID.getName, revID.getRevision, extraAttributes = IvySbt.getExtraAttributes(revID))
|
||||
|
||||
def toArtifact(art: IvyArtifact): Artifact =
|
||||
{
|
||||
import art._
|
||||
Artifact(getName, getType, getExt, Option(getExtraAttribute("classifier")), getConfigurations map Configurations.config, Option(getUrl))
|
||||
}
|
||||
def evicted(confReport: ConfigurationResolveReport): Seq[ModuleID] =
|
||||
confReport.getEvictedNodes.map(node => toModuleID(node.getId))
|
||||
|
||||
def updateReport(report: ResolveReport, cachedDescriptor: File): UpdateReport =
|
||||
new UpdateReport(cachedDescriptor, reports(report) map configurationReport, updateStats(report), Map.empty) recomputeStamps()
|
||||
def updateStats(report: ResolveReport): UpdateStats =
|
||||
new UpdateStats(report.getResolveTime, report.getDownloadTime, report.getDownloadSize, false)
|
||||
def configurationReport(confReport: ConfigurationResolveReport): ConfigurationReport =
|
||||
new ConfigurationReport(confReport.getConfiguration, moduleReports(confReport), evicted(confReport))
|
||||
def toModuleID(revID: ModuleRevisionId): ModuleID =
|
||||
ModuleID(revID.getOrganisation, revID.getName, revID.getRevision, extraAttributes = IvySbt.getExtraAttributes(revID))
|
||||
|
||||
def toArtifact(art: IvyArtifact): Artifact =
|
||||
{
|
||||
import art._
|
||||
Artifact(getName, getType, getExt, Option(getExtraAttribute("classifier")), getConfigurations map Configurations.config, Option(getUrl))
|
||||
}
|
||||
|
||||
def updateReport(report: ResolveReport, cachedDescriptor: File): UpdateReport =
|
||||
new UpdateReport(cachedDescriptor, reports(report) map configurationReport, updateStats(report), Map.empty) recomputeStamps ()
|
||||
def updateStats(report: ResolveReport): UpdateStats =
|
||||
new UpdateStats(report.getResolveTime, report.getDownloadTime, report.getDownloadSize, false)
|
||||
def configurationReport(confReport: ConfigurationResolveReport): ConfigurationReport =
|
||||
new ConfigurationReport(confReport.getConfiguration, moduleReports(confReport), evicted(confReport))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,111 +6,108 @@ package sbt
|
|||
import java.util.Collections.emptyMap
|
||||
import scala.collection.mutable.HashSet
|
||||
|
||||
import org.apache.ivy.{core, plugins}
|
||||
import core.module.descriptor.{DefaultExcludeRule, ExcludeRule}
|
||||
import core.module.descriptor.{DependencyDescriptor, DefaultModuleDescriptor, ModuleDescriptor, OverrideDependencyDescriptorMediator}
|
||||
import core.module.id.{ArtifactId,ModuleId, ModuleRevisionId}
|
||||
import org.apache.ivy.{ core, plugins }
|
||||
import core.module.descriptor.{ DefaultExcludeRule, ExcludeRule }
|
||||
import core.module.descriptor.{ DependencyDescriptor, DefaultModuleDescriptor, ModuleDescriptor, OverrideDependencyDescriptorMediator }
|
||||
import core.module.id.{ ArtifactId, ModuleId, ModuleRevisionId }
|
||||
import plugins.matcher.ExactPatternMatcher
|
||||
|
||||
object ScalaArtifacts
|
||||
{
|
||||
import xsbti.ArtifactInfo._
|
||||
val Organization = ScalaOrganization
|
||||
val LibraryID = ScalaLibraryID
|
||||
val CompilerID = ScalaCompilerID
|
||||
def libraryDependency(version: String): ModuleID = ModuleID(Organization, LibraryID, version)
|
||||
object ScalaArtifacts {
|
||||
import xsbti.ArtifactInfo._
|
||||
val Organization = ScalaOrganization
|
||||
val LibraryID = ScalaLibraryID
|
||||
val CompilerID = ScalaCompilerID
|
||||
def libraryDependency(version: String): ModuleID = ModuleID(Organization, LibraryID, version)
|
||||
|
||||
private[sbt] def toolDependencies(org: String, version: String): Seq[ModuleID] = Seq(
|
||||
scalaToolDependency(org, ScalaArtifacts.CompilerID, version),
|
||||
scalaToolDependency(org, ScalaArtifacts.LibraryID, version)
|
||||
)
|
||||
private[this] def scalaToolDependency(org: String, id: String, version: String): ModuleID =
|
||||
ModuleID(org, id, version, Some(Configurations.ScalaTool.name + "->default,optional(default)") )
|
||||
private[sbt] def toolDependencies(org: String, version: String): Seq[ModuleID] = Seq(
|
||||
scalaToolDependency(org, ScalaArtifacts.CompilerID, version),
|
||||
scalaToolDependency(org, ScalaArtifacts.LibraryID, version)
|
||||
)
|
||||
private[this] def scalaToolDependency(org: String, id: String, version: String): ModuleID =
|
||||
ModuleID(org, id, version, Some(Configurations.ScalaTool.name + "->default,optional(default)"))
|
||||
}
|
||||
object SbtArtifacts
|
||||
{
|
||||
import xsbti.ArtifactInfo._
|
||||
val Organization = SbtOrganization
|
||||
object SbtArtifacts {
|
||||
import xsbti.ArtifactInfo._
|
||||
val Organization = SbtOrganization
|
||||
}
|
||||
|
||||
import ScalaArtifacts._
|
||||
|
||||
final case class IvyScala(scalaFullVersion: String, scalaBinaryVersion: String, configurations: Iterable[Configuration], checkExplicit: Boolean, filterImplicit: Boolean, overrideScalaVersion: Boolean, scalaOrganization: String = ScalaArtifacts.Organization)
|
||||
|
||||
private object IvyScala
|
||||
{
|
||||
/** Performs checks/adds filters on Scala dependencies (if enabled in IvyScala). */
|
||||
def checkModule(module: DefaultModuleDescriptor, conf: String, log: Logger)(check: IvyScala)
|
||||
{
|
||||
if(check.checkExplicit)
|
||||
checkDependencies(module, check.scalaBinaryVersion, check.configurations, log)
|
||||
if(check.filterImplicit)
|
||||
excludeScalaJars(module, check.configurations)
|
||||
if(check.overrideScalaVersion)
|
||||
overrideScalaVersion(module, check.scalaFullVersion)
|
||||
}
|
||||
def overrideScalaVersion(module: DefaultModuleDescriptor, version: String)
|
||||
{
|
||||
overrideVersion(module, Organization, LibraryID, version)
|
||||
overrideVersion(module, Organization, CompilerID, version)
|
||||
}
|
||||
def overrideVersion(module: DefaultModuleDescriptor, org: String, name: String, version: String)
|
||||
{
|
||||
val id = new ModuleId(org, name)
|
||||
val over = new OverrideDependencyDescriptorMediator(null, version)
|
||||
module.addDependencyDescriptorMediator(id, ExactPatternMatcher.INSTANCE, over)
|
||||
}
|
||||
|
||||
/** Checks the immediate dependencies of module for dependencies on scala jars and verifies that the version on the
|
||||
* dependencies matches scalaVersion. */
|
||||
private def checkDependencies(module: ModuleDescriptor, scalaBinaryVersion: String, configurations: Iterable[Configuration], log: Logger)
|
||||
{
|
||||
val configSet = if(configurations.isEmpty) (c: String) => true else configurationSet(configurations)
|
||||
def binaryScalaWarning(dep: DependencyDescriptor): Option[String] =
|
||||
{
|
||||
val id = dep.getDependencyRevisionId
|
||||
val depBinaryVersion = CrossVersion.binaryScalaVersion(id.getRevision)
|
||||
val mismatched = id.getOrganisation == Organization && depBinaryVersion != scalaBinaryVersion && dep.getModuleConfigurations.exists(configSet)
|
||||
if(mismatched)
|
||||
Some("Binary version (" + depBinaryVersion + ") for dependency " + id +
|
||||
"\n\tin " + module.getModuleRevisionId +
|
||||
" differs from Scala binary version in project (" + scalaBinaryVersion + ").")
|
||||
else
|
||||
None
|
||||
}
|
||||
module.getDependencies.toList.flatMap(binaryScalaWarning).toSet foreach { (s: String) => log.warn(s) }
|
||||
}
|
||||
private def configurationSet(configurations: Iterable[Configuration]) = configurations.map(_.toString).toSet
|
||||
private object IvyScala {
|
||||
/** Performs checks/adds filters on Scala dependencies (if enabled in IvyScala). */
|
||||
def checkModule(module: DefaultModuleDescriptor, conf: String, log: Logger)(check: IvyScala) {
|
||||
if (check.checkExplicit)
|
||||
checkDependencies(module, check.scalaBinaryVersion, check.configurations, log)
|
||||
if (check.filterImplicit)
|
||||
excludeScalaJars(module, check.configurations)
|
||||
if (check.overrideScalaVersion)
|
||||
overrideScalaVersion(module, check.scalaFullVersion)
|
||||
}
|
||||
def overrideScalaVersion(module: DefaultModuleDescriptor, version: String) {
|
||||
overrideVersion(module, Organization, LibraryID, version)
|
||||
overrideVersion(module, Organization, CompilerID, version)
|
||||
}
|
||||
def overrideVersion(module: DefaultModuleDescriptor, org: String, name: String, version: String) {
|
||||
val id = new ModuleId(org, name)
|
||||
val over = new OverrideDependencyDescriptorMediator(null, version)
|
||||
module.addDependencyDescriptorMediator(id, ExactPatternMatcher.INSTANCE, over)
|
||||
}
|
||||
|
||||
/** Adds exclusions for the scala library and compiler jars so that they are not downloaded. This is
|
||||
* done because these jars are provided by the ScalaInstance of the project. The version of Scala to use
|
||||
* is done by setting scalaVersion in the project definition. */
|
||||
private def excludeScalaJars(module: DefaultModuleDescriptor, configurations: Iterable[Configuration])
|
||||
{
|
||||
val configurationNames =
|
||||
{
|
||||
val names = module.getConfigurationsNames
|
||||
if(configurations.isEmpty)
|
||||
names
|
||||
else
|
||||
{
|
||||
val configSet = configurationSet(configurations)
|
||||
configSet.intersect(HashSet(names : _*))
|
||||
configSet.toArray
|
||||
}
|
||||
}
|
||||
def excludeScalaJar(name: String): Unit =
|
||||
module.addExcludeRule(excludeRule(Organization, name, configurationNames, "jar"))
|
||||
excludeScalaJar(LibraryID)
|
||||
excludeScalaJar(CompilerID)
|
||||
}
|
||||
/** Creates an ExcludeRule that excludes artifacts with the given module organization and name for
|
||||
* the given configurations. */
|
||||
private[sbt] def excludeRule(organization: String, name: String, configurationNames: Iterable[String], excludeTypePattern: String): ExcludeRule =
|
||||
{
|
||||
val artifact = new ArtifactId(ModuleId.newInstance(organization, name), "*", excludeTypePattern, "*")
|
||||
val rule = new DefaultExcludeRule(artifact, ExactPatternMatcher.INSTANCE, emptyMap[AnyRef,AnyRef])
|
||||
configurationNames.foreach(rule.addConfiguration)
|
||||
rule
|
||||
}
|
||||
/**
|
||||
* Checks the immediate dependencies of module for dependencies on scala jars and verifies that the version on the
|
||||
* dependencies matches scalaVersion.
|
||||
*/
|
||||
private def checkDependencies(module: ModuleDescriptor, scalaBinaryVersion: String, configurations: Iterable[Configuration], log: Logger) {
|
||||
val configSet = if (configurations.isEmpty) (c: String) => true else configurationSet(configurations)
|
||||
def binaryScalaWarning(dep: DependencyDescriptor): Option[String] =
|
||||
{
|
||||
val id = dep.getDependencyRevisionId
|
||||
val depBinaryVersion = CrossVersion.binaryScalaVersion(id.getRevision)
|
||||
val mismatched = id.getOrganisation == Organization && depBinaryVersion != scalaBinaryVersion && dep.getModuleConfigurations.exists(configSet)
|
||||
if (mismatched)
|
||||
Some("Binary version (" + depBinaryVersion + ") for dependency " + id +
|
||||
"\n\tin " + module.getModuleRevisionId +
|
||||
" differs from Scala binary version in project (" + scalaBinaryVersion + ").")
|
||||
else
|
||||
None
|
||||
}
|
||||
module.getDependencies.toList.flatMap(binaryScalaWarning).toSet foreach { (s: String) => log.warn(s) }
|
||||
}
|
||||
private def configurationSet(configurations: Iterable[Configuration]) = configurations.map(_.toString).toSet
|
||||
|
||||
/**
|
||||
* Adds exclusions for the scala library and compiler jars so that they are not downloaded. This is
|
||||
* done because these jars are provided by the ScalaInstance of the project. The version of Scala to use
|
||||
* is done by setting scalaVersion in the project definition.
|
||||
*/
|
||||
private def excludeScalaJars(module: DefaultModuleDescriptor, configurations: Iterable[Configuration]) {
|
||||
val configurationNames =
|
||||
{
|
||||
val names = module.getConfigurationsNames
|
||||
if (configurations.isEmpty)
|
||||
names
|
||||
else {
|
||||
val configSet = configurationSet(configurations)
|
||||
configSet.intersect(HashSet(names: _*))
|
||||
configSet.toArray
|
||||
}
|
||||
}
|
||||
def excludeScalaJar(name: String): Unit =
|
||||
module.addExcludeRule(excludeRule(Organization, name, configurationNames, "jar"))
|
||||
excludeScalaJar(LibraryID)
|
||||
excludeScalaJar(CompilerID)
|
||||
}
|
||||
/**
|
||||
* Creates an ExcludeRule that excludes artifacts with the given module organization and name for
|
||||
* the given configurations.
|
||||
*/
|
||||
private[sbt] def excludeRule(organization: String, name: String, configurationNames: Iterable[String], excludeTypePattern: String): ExcludeRule =
|
||||
{
|
||||
val artifact = new ArtifactId(ModuleId.newInstance(organization, name), "*", excludeTypePattern, "*")
|
||||
val rule = new DefaultExcludeRule(artifact, ExactPatternMatcher.INSTANCE, emptyMap[AnyRef, AnyRef])
|
||||
configurationNames.foreach(rule.addConfiguration)
|
||||
rule
|
||||
}
|
||||
}
|
||||
|
|
@ -1,7 +1,6 @@
|
|||
package sbt
|
||||
|
||||
private[sbt] object IvyUtil
|
||||
{
|
||||
def separate[A,B](l: Seq[Either[A,B]]): (Seq[A], Seq[B]) =
|
||||
(l.flatMap(_.left.toOption), l.flatMap(_.right.toOption))
|
||||
private[sbt] object IvyUtil {
|
||||
def separate[A, B](l: Seq[Either[A, B]]): (Seq[A], Seq[B]) =
|
||||
(l.flatMap(_.left.toOption), l.flatMap(_.right.toOption))
|
||||
}
|
||||
|
|
@ -10,349 +10,348 @@ package sbt
|
|||
import java.io.File
|
||||
// Node needs to be renamed to XNode because the task subproject contains a Node type that will shadow
|
||||
// scala.xml.Node when generating aggregated API documentation
|
||||
import scala.xml.{Elem, Node => XNode, NodeSeq, PrettyPrinter, PrefixedAttribute}
|
||||
import scala.xml.{ Elem, Node => XNode, NodeSeq, PrettyPrinter, PrefixedAttribute }
|
||||
import Configurations.Optional
|
||||
|
||||
import org.apache.ivy.{core, plugins, Ivy}
|
||||
import org.apache.ivy.{ core, plugins, Ivy }
|
||||
import core.settings.IvySettings
|
||||
import core.module.descriptor.{DependencyArtifactDescriptor, DependencyDescriptor, License, ModuleDescriptor, ExcludeRule}
|
||||
import plugins.resolver.{ChainResolver, DependencyResolver, IBiblioResolver}
|
||||
import core.module.descriptor.{ DependencyArtifactDescriptor, DependencyDescriptor, License, ModuleDescriptor, ExcludeRule }
|
||||
import plugins.resolver.{ ChainResolver, DependencyResolver, IBiblioResolver }
|
||||
|
||||
class MakePom(val log: Logger)
|
||||
{
|
||||
@deprecated("Use `write(Ivy, ModuleDescriptor, ModuleInfo, Option[Iterable[Configuration]], Set[String], NodeSeq, XNode => XNode, MavenRepository => Boolean, Boolean, File)` instead", "0.11.2")
|
||||
def write(ivy: Ivy, module: ModuleDescriptor, moduleInfo: ModuleInfo, configurations: Option[Iterable[Configuration]], extra: NodeSeq, process: XNode => XNode, filterRepositories: MavenRepository => Boolean, allRepositories: Boolean, output: File): Unit =
|
||||
write(ivy, module, moduleInfo: ModuleInfo, configurations: Option[Iterable[Configuration]], Set(Artifact.DefaultType), extra, process, filterRepositories, allRepositories, output)
|
||||
def write(ivy: Ivy, module: ModuleDescriptor, moduleInfo: ModuleInfo, configurations: Option[Iterable[Configuration]], includeTypes: Set[String], extra: NodeSeq, process: XNode => XNode, filterRepositories: MavenRepository => Boolean, allRepositories: Boolean, output: File): Unit =
|
||||
write(process(toPom(ivy, module, moduleInfo, configurations, includeTypes, extra, filterRepositories, allRepositories)), output)
|
||||
// use \n as newline because toString uses PrettyPrinter, which hard codes line endings to be \n
|
||||
def write(node: XNode, output: File): Unit = write(toString(node), output, "\n")
|
||||
def write(xmlString: String, output: File, newline: String)
|
||||
{
|
||||
IO.write(output, "<?xml version='1.0' encoding='" + IO.utf8.name + "'?>" + newline + xmlString)
|
||||
}
|
||||
class MakePom(val log: Logger) {
|
||||
@deprecated("Use `write(Ivy, ModuleDescriptor, ModuleInfo, Option[Iterable[Configuration]], Set[String], NodeSeq, XNode => XNode, MavenRepository => Boolean, Boolean, File)` instead", "0.11.2")
|
||||
def write(ivy: Ivy, module: ModuleDescriptor, moduleInfo: ModuleInfo, configurations: Option[Iterable[Configuration]], extra: NodeSeq, process: XNode => XNode, filterRepositories: MavenRepository => Boolean, allRepositories: Boolean, output: File): Unit =
|
||||
write(ivy, module, moduleInfo: ModuleInfo, configurations: Option[Iterable[Configuration]], Set(Artifact.DefaultType), extra, process, filterRepositories, allRepositories, output)
|
||||
def write(ivy: Ivy, module: ModuleDescriptor, moduleInfo: ModuleInfo, configurations: Option[Iterable[Configuration]], includeTypes: Set[String], extra: NodeSeq, process: XNode => XNode, filterRepositories: MavenRepository => Boolean, allRepositories: Boolean, output: File): Unit =
|
||||
write(process(toPom(ivy, module, moduleInfo, configurations, includeTypes, extra, filterRepositories, allRepositories)), output)
|
||||
// use \n as newline because toString uses PrettyPrinter, which hard codes line endings to be \n
|
||||
def write(node: XNode, output: File): Unit = write(toString(node), output, "\n")
|
||||
def write(xmlString: String, output: File, newline: String) {
|
||||
IO.write(output, "<?xml version='1.0' encoding='" + IO.utf8.name + "'?>" + newline + xmlString)
|
||||
}
|
||||
|
||||
def toString(node: XNode): String = new PrettyPrinter(1000, 4).format(node)
|
||||
@deprecated("Use `toPom(Ivy, ModuleDescriptor, ModuleInfo, Option[Iterable[Configuration]], Set[String], NodeSeq, MavenRepository => Boolean, Boolean)` instead", "0.11.2")
|
||||
def toPom(ivy: Ivy, module: ModuleDescriptor, moduleInfo: ModuleInfo, configurations: Option[Iterable[Configuration]], extra: NodeSeq, filterRepositories: MavenRepository => Boolean, allRepositories: Boolean): XNode =
|
||||
toPom(ivy, module, moduleInfo, configurations, Set(Artifact.DefaultType), extra, filterRepositories, allRepositories)
|
||||
def toPom(ivy: Ivy, module: ModuleDescriptor, moduleInfo: ModuleInfo, configurations: Option[Iterable[Configuration]], includeTypes: Set[String], extra: NodeSeq, filterRepositories: MavenRepository => Boolean, allRepositories: Boolean): XNode =
|
||||
(<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
{ makeModuleID(module) }
|
||||
<name>{moduleInfo.nameFormal}</name>
|
||||
{ makeStartYear(moduleInfo) }
|
||||
{ makeOrganization(moduleInfo) }
|
||||
{ makeScmInfo(moduleInfo) }
|
||||
{ extra }
|
||||
{
|
||||
val deps = depsInConfs(module, configurations)
|
||||
makeProperties(module, deps) ++
|
||||
makeDependencies(deps, includeTypes)
|
||||
}
|
||||
{ makeRepositories(ivy.getSettings, allRepositories, filterRepositories) }
|
||||
</project>)
|
||||
def toString(node: XNode): String = new PrettyPrinter(1000, 4).format(node)
|
||||
@deprecated("Use `toPom(Ivy, ModuleDescriptor, ModuleInfo, Option[Iterable[Configuration]], Set[String], NodeSeq, MavenRepository => Boolean, Boolean)` instead", "0.11.2")
|
||||
def toPom(ivy: Ivy, module: ModuleDescriptor, moduleInfo: ModuleInfo, configurations: Option[Iterable[Configuration]], extra: NodeSeq, filterRepositories: MavenRepository => Boolean, allRepositories: Boolean): XNode =
|
||||
toPom(ivy, module, moduleInfo, configurations, Set(Artifact.DefaultType), extra, filterRepositories, allRepositories)
|
||||
def toPom(ivy: Ivy, module: ModuleDescriptor, moduleInfo: ModuleInfo, configurations: Option[Iterable[Configuration]], includeTypes: Set[String], extra: NodeSeq, filterRepositories: MavenRepository => Boolean, allRepositories: Boolean): XNode =
|
||||
(<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
{ makeModuleID(module) }
|
||||
<name>{ moduleInfo.nameFormal }</name>
|
||||
{ makeStartYear(moduleInfo) }
|
||||
{ makeOrganization(moduleInfo) }
|
||||
{ makeScmInfo(moduleInfo) }
|
||||
{ extra }
|
||||
{
|
||||
val deps = depsInConfs(module, configurations)
|
||||
makeProperties(module, deps) ++
|
||||
makeDependencies(deps, includeTypes)
|
||||
}
|
||||
{ makeRepositories(ivy.getSettings, allRepositories, filterRepositories) }
|
||||
</project>)
|
||||
|
||||
def makeModuleID(module: ModuleDescriptor): NodeSeq =
|
||||
{
|
||||
val mrid = moduleDescriptor(module)
|
||||
val a: NodeSeq =
|
||||
(<groupId>{ mrid.getOrganisation }</groupId>
|
||||
<artifactId>{ mrid.getName }</artifactId>
|
||||
<packaging>{ packaging(module) }</packaging>)
|
||||
val b: NodeSeq =
|
||||
( (description(module.getDescription) ++
|
||||
homePage(module.getHomePage) ++
|
||||
revision(mrid.getRevision) ++
|
||||
licenses(module.getLicenses)) : NodeSeq )
|
||||
a ++ b
|
||||
}
|
||||
def makeModuleID(module: ModuleDescriptor): NodeSeq =
|
||||
{
|
||||
val mrid = moduleDescriptor(module)
|
||||
val a: NodeSeq =
|
||||
(<groupId>{ mrid.getOrganisation }</groupId>
|
||||
<artifactId>{ mrid.getName }</artifactId>
|
||||
<packaging>{ packaging(module) }</packaging>)
|
||||
val b: NodeSeq =
|
||||
((description(module.getDescription) ++
|
||||
homePage(module.getHomePage) ++
|
||||
revision(mrid.getRevision) ++
|
||||
licenses(module.getLicenses)): NodeSeq)
|
||||
a ++ b
|
||||
}
|
||||
|
||||
def makeStartYear(moduleInfo: ModuleInfo): NodeSeq =
|
||||
moduleInfo.startYear match {
|
||||
case Some(y) => <inceptionYear>{y}</inceptionYear>
|
||||
case _ => NodeSeq.Empty
|
||||
}
|
||||
def makeOrganization(moduleInfo: ModuleInfo): NodeSeq =
|
||||
{
|
||||
<organization>
|
||||
<name>{moduleInfo.organizationName}</name>
|
||||
{ moduleInfo.organizationHomepage match {
|
||||
case Some(h) => <url>{h}</url>
|
||||
case _ => NodeSeq.Empty
|
||||
}}
|
||||
</organization>
|
||||
}
|
||||
def makeScmInfo(moduleInfo: ModuleInfo): NodeSeq =
|
||||
{
|
||||
moduleInfo.scmInfo match {
|
||||
case Some(s) =>
|
||||
<scm>
|
||||
<url>{s.browseUrl}</url>
|
||||
<connection>{s.connection}</connection>
|
||||
{s.devConnection match {
|
||||
case Some(d) => <developerConnection>{d}</developerConnection>
|
||||
case _ => NodeSeq.Empty
|
||||
}}
|
||||
</scm>
|
||||
case _ => NodeSeq.Empty
|
||||
}
|
||||
}
|
||||
def makeProperties(module: ModuleDescriptor, dependencies: Seq[DependencyDescriptor]): NodeSeq =
|
||||
{
|
||||
val extra = IvySbt.getExtraAttributes(module)
|
||||
val depExtra = CustomPomParser.writeDependencyExtra(dependencies).mkString("\n")
|
||||
val allExtra = if(depExtra.isEmpty) extra else extra.updated(CustomPomParser.ExtraAttributesKey, depExtra)
|
||||
if(allExtra.isEmpty) NodeSeq.Empty else makeProperties(allExtra)
|
||||
}
|
||||
def makeProperties(extra: Map[String,String]): NodeSeq = {
|
||||
def _extraAttributes(k: String) = if (k == CustomPomParser.ExtraAttributesKey) xmlSpacePreserve else scala.xml.Null
|
||||
<properties> {
|
||||
for( (key,value) <- extra ) yield
|
||||
(<x>{value}</x>).copy(label = key, attributes = _extraAttributes(key))
|
||||
} </properties>
|
||||
}
|
||||
def makeStartYear(moduleInfo: ModuleInfo): NodeSeq =
|
||||
moduleInfo.startYear match {
|
||||
case Some(y) => <inceptionYear>{ y }</inceptionYear>
|
||||
case _ => NodeSeq.Empty
|
||||
}
|
||||
def makeOrganization(moduleInfo: ModuleInfo): NodeSeq =
|
||||
{
|
||||
<organization>
|
||||
<name>{ moduleInfo.organizationName }</name>
|
||||
{
|
||||
moduleInfo.organizationHomepage match {
|
||||
case Some(h)=> <url>{ h }</url>
|
||||
case _ => NodeSeq.Empty
|
||||
}
|
||||
}
|
||||
</organization>
|
||||
}
|
||||
def makeScmInfo(moduleInfo: ModuleInfo): NodeSeq =
|
||||
{
|
||||
moduleInfo.scmInfo match {
|
||||
case Some(s) =>
|
||||
<scm>
|
||||
<url>{ s.browseUrl }</url>
|
||||
<connection>{ s.connection }</connection>
|
||||
{
|
||||
s.devConnection match {
|
||||
case Some(d)=> <developerConnection>{ d }</developerConnection>
|
||||
case _=> NodeSeq.Empty
|
||||
}
|
||||
}
|
||||
</scm>
|
||||
case _ => NodeSeq.Empty
|
||||
}
|
||||
}
|
||||
def makeProperties(module: ModuleDescriptor, dependencies: Seq[DependencyDescriptor]): NodeSeq =
|
||||
{
|
||||
val extra = IvySbt.getExtraAttributes(module)
|
||||
val depExtra = CustomPomParser.writeDependencyExtra(dependencies).mkString("\n")
|
||||
val allExtra = if (depExtra.isEmpty) extra else extra.updated(CustomPomParser.ExtraAttributesKey, depExtra)
|
||||
if (allExtra.isEmpty) NodeSeq.Empty else makeProperties(allExtra)
|
||||
}
|
||||
def makeProperties(extra: Map[String, String]): NodeSeq = {
|
||||
def _extraAttributes(k: String) = if (k == CustomPomParser.ExtraAttributesKey) xmlSpacePreserve else scala.xml.Null
|
||||
<properties> {
|
||||
for ((key, value) <- extra) yield (<x>{ value }</x>).copy(label = key, attributes = _extraAttributes(key))
|
||||
} </properties>
|
||||
}
|
||||
|
||||
/**
|
||||
* Attribute tag that PrettyPrinter won't ignore, saying "don't mess with my spaces"
|
||||
* Without this, PrettyPrinter will flatten multiple entries for ExtraDependencyAttributes and make them
|
||||
* unparseable. (e.g. a plugin that depends on multiple plugins will fail)
|
||||
*/
|
||||
def xmlSpacePreserve = new PrefixedAttribute("xml", "space", "preserve", scala.xml.Null)
|
||||
/**
|
||||
* Attribute tag that PrettyPrinter won't ignore, saying "don't mess with my spaces"
|
||||
* Without this, PrettyPrinter will flatten multiple entries for ExtraDependencyAttributes and make them
|
||||
* unparseable. (e.g. a plugin that depends on multiple plugins will fail)
|
||||
*/
|
||||
def xmlSpacePreserve = new PrefixedAttribute("xml", "space", "preserve", scala.xml.Null)
|
||||
|
||||
def description(d: String) = if((d eq null) || d.isEmpty) NodeSeq.Empty else <description>{d}</description>
|
||||
def licenses(ls: Array[License]) = if(ls == null || ls.isEmpty) NodeSeq.Empty else <licenses>{ls.map(license)}</licenses>
|
||||
def license(l: License) =
|
||||
<license>
|
||||
<name>{l.getName}</name>
|
||||
<url>{l.getUrl}</url>
|
||||
<distribution>repo</distribution>
|
||||
</license>
|
||||
def homePage(homePage: String) = if(homePage eq null) NodeSeq.Empty else <url>{homePage}</url>
|
||||
def revision(version: String) = if(version ne null) <version>{version}</version> else NodeSeq.Empty
|
||||
def packaging(module: ModuleDescriptor) =
|
||||
module.getAllArtifacts match
|
||||
{
|
||||
case Array() => "pom"
|
||||
case Array(x) => x.getType
|
||||
case xs =>
|
||||
val types = xs.map(_.getType).toList.filterNot(IgnoreTypes)
|
||||
types match {
|
||||
case Nil => Artifact.PomType
|
||||
case xs if xs.contains(Artifact.DefaultType) => Artifact.DefaultType
|
||||
case x :: xs => x
|
||||
}
|
||||
}
|
||||
val IgnoreTypes: Set[String] = Set(Artifact.SourceType, Artifact.DocType, Artifact.PomType)
|
||||
def description(d: String) = if ((d eq null) || d.isEmpty) NodeSeq.Empty else <description>{ d }</description>
|
||||
def licenses(ls: Array[License]) = if (ls == null || ls.isEmpty) NodeSeq.Empty else <licenses>{ ls.map(license) }</licenses>
|
||||
def license(l: License) =
|
||||
<license>
|
||||
<name>{ l.getName }</name>
|
||||
<url>{ l.getUrl }</url>
|
||||
<distribution>repo</distribution>
|
||||
</license>
|
||||
def homePage(homePage: String) = if (homePage eq null) NodeSeq.Empty else <url>{ homePage }</url>
|
||||
def revision(version: String) = if (version ne null) <version>{ version }</version> else NodeSeq.Empty
|
||||
def packaging(module: ModuleDescriptor) =
|
||||
module.getAllArtifacts match {
|
||||
case Array() => "pom"
|
||||
case Array(x) => x.getType
|
||||
case xs =>
|
||||
val types = xs.map(_.getType).toList.filterNot(IgnoreTypes)
|
||||
types match {
|
||||
case Nil => Artifact.PomType
|
||||
case xs if xs.contains(Artifact.DefaultType) => Artifact.DefaultType
|
||||
case x :: xs => x
|
||||
}
|
||||
}
|
||||
val IgnoreTypes: Set[String] = Set(Artifact.SourceType, Artifact.DocType, Artifact.PomType)
|
||||
|
||||
def makeDependencies(dependencies: Seq[DependencyDescriptor], includeTypes: Set[String]): NodeSeq =
|
||||
if(dependencies.isEmpty)
|
||||
NodeSeq.Empty
|
||||
else
|
||||
<dependencies>
|
||||
{ dependencies.map(makeDependency(_, includeTypes)) }
|
||||
</dependencies>
|
||||
def makeDependencies(dependencies: Seq[DependencyDescriptor], includeTypes: Set[String]): NodeSeq =
|
||||
if (dependencies.isEmpty)
|
||||
NodeSeq.Empty
|
||||
else
|
||||
<dependencies>
|
||||
{ dependencies.map(makeDependency(_, includeTypes)) }
|
||||
</dependencies>
|
||||
|
||||
def makeDependency(dependency: DependencyDescriptor, includeTypes: Set[String]): NodeSeq =
|
||||
{
|
||||
val artifacts = dependency.getAllDependencyArtifacts
|
||||
val includeArtifacts = artifacts.filter(d => includeTypes(d.getType))
|
||||
if(artifacts.isEmpty) {
|
||||
val (scope, optional) = getScopeAndOptional(dependency.getModuleConfigurations)
|
||||
makeDependencyElem(dependency, scope, optional, None, None)
|
||||
}
|
||||
else if(includeArtifacts.isEmpty)
|
||||
NodeSeq.Empty
|
||||
else
|
||||
NodeSeq.fromSeq(artifacts.map( a => makeDependencyElem(dependency, a) ))
|
||||
}
|
||||
def makeDependency(dependency: DependencyDescriptor, includeTypes: Set[String]): NodeSeq =
|
||||
{
|
||||
val artifacts = dependency.getAllDependencyArtifacts
|
||||
val includeArtifacts = artifacts.filter(d => includeTypes(d.getType))
|
||||
if (artifacts.isEmpty) {
|
||||
val (scope, optional) = getScopeAndOptional(dependency.getModuleConfigurations)
|
||||
makeDependencyElem(dependency, scope, optional, None, None)
|
||||
} else if (includeArtifacts.isEmpty)
|
||||
NodeSeq.Empty
|
||||
else
|
||||
NodeSeq.fromSeq(artifacts.map(a => makeDependencyElem(dependency, a)))
|
||||
}
|
||||
|
||||
def makeDependencyElem(dependency: DependencyDescriptor, artifact: DependencyArtifactDescriptor): Elem =
|
||||
{
|
||||
val configs = artifact.getConfigurations.toList match {
|
||||
case Nil | "*" :: Nil => dependency.getModuleConfigurations
|
||||
case x => x.toArray
|
||||
}
|
||||
val (scope, optional) = getScopeAndOptional(configs)
|
||||
val classifier = artifactClassifier(artifact)
|
||||
val baseType = artifactType(artifact)
|
||||
val tpe = (classifier, baseType) match {
|
||||
case (Some(c), Some(tpe)) if Artifact.classifierType(c) == tpe => None
|
||||
case _ => baseType
|
||||
}
|
||||
makeDependencyElem(dependency, scope, optional, classifier, tpe)
|
||||
}
|
||||
def makeDependencyElem(dependency: DependencyDescriptor, scope: Option[String], optional: Boolean, classifier: Option[String], tpe: Option[String]): Elem =
|
||||
{
|
||||
val mrid = dependency.getDependencyRevisionId
|
||||
<dependency>
|
||||
<groupId>{mrid.getOrganisation}</groupId>
|
||||
<artifactId>{mrid.getName}</artifactId>
|
||||
<version>{makeDependencyVersion(mrid.getRevision)}</version>
|
||||
{ scopeElem(scope) }
|
||||
{ optionalElem(optional) }
|
||||
{ classifierElem(classifier) }
|
||||
{ typeElem(tpe) }
|
||||
{ exclusions(dependency) }
|
||||
</dependency>
|
||||
}
|
||||
def makeDependencyElem(dependency: DependencyDescriptor, artifact: DependencyArtifactDescriptor): Elem =
|
||||
{
|
||||
val configs = artifact.getConfigurations.toList match {
|
||||
case Nil | "*" :: Nil => dependency.getModuleConfigurations
|
||||
case x => x.toArray
|
||||
}
|
||||
val (scope, optional) = getScopeAndOptional(configs)
|
||||
val classifier = artifactClassifier(artifact)
|
||||
val baseType = artifactType(artifact)
|
||||
val tpe = (classifier, baseType) match {
|
||||
case (Some(c), Some(tpe)) if Artifact.classifierType(c) == tpe => None
|
||||
case _ => baseType
|
||||
}
|
||||
makeDependencyElem(dependency, scope, optional, classifier, tpe)
|
||||
}
|
||||
def makeDependencyElem(dependency: DependencyDescriptor, scope: Option[String], optional: Boolean, classifier: Option[String], tpe: Option[String]): Elem =
|
||||
{
|
||||
val mrid = dependency.getDependencyRevisionId
|
||||
<dependency>
|
||||
<groupId>{ mrid.getOrganisation }</groupId>
|
||||
<artifactId>{ mrid.getName }</artifactId>
|
||||
<version>{ makeDependencyVersion(mrid.getRevision) }</version>
|
||||
{ scopeElem(scope) }
|
||||
{ optionalElem(optional) }
|
||||
{ classifierElem(classifier) }
|
||||
{ typeElem(tpe) }
|
||||
{ exclusions(dependency) }
|
||||
</dependency>
|
||||
}
|
||||
|
||||
def makeDependencyVersion(revision: String): String = {
|
||||
def plusRange(s: String, shift: Int = 0) = {
|
||||
def pow(i: Int): Int = if (i > 0) 10 * pow(i - 1) else 1
|
||||
val (prefixVersion, lastVersion) = (s + "0" * shift).reverse.split("\\.", 2) match {
|
||||
case Array(revLast, revRest) =>
|
||||
(revRest.reverse + ".", revLast.reverse)
|
||||
case Array(revLast) => ("", revLast.reverse)
|
||||
}
|
||||
val lastVersionInt = lastVersion.toInt
|
||||
s"[${prefixVersion}${lastVersion},${prefixVersion}${lastVersionInt + pow(shift)})"
|
||||
}
|
||||
val startSym = Set(']', '[', '(')
|
||||
val stopSym = Set(']', '[', ')')
|
||||
try {
|
||||
if (revision endsWith ".+") {
|
||||
plusRange(revision.substring(0, revision.length - 2))
|
||||
} else if (revision endsWith "+") {
|
||||
val base = revision.take(revision.length - 1)
|
||||
// This is a heuristic. Maven just doesn't support Ivy's notions of 1+, so
|
||||
// we assume version ranges never go beyond 5 siginificant digits.
|
||||
(0 to 5).map(plusRange(base, _)).mkString(",")
|
||||
} else if (startSym(revision(0)) && stopSym(revision(revision.length - 1))) {
|
||||
val start = revision(0)
|
||||
val stop = revision(revision.length - 1)
|
||||
val mid = revision.substring(1, revision.length - 1)
|
||||
(if (start == ']') "(" else start) + mid + (if (stop == '[') ")" else stop)
|
||||
} else revision
|
||||
} catch {
|
||||
case e: NumberFormatException =>
|
||||
// TODO - if the version doesn't meet our expectations, maybe we just issue a hard
|
||||
// error instead of softly ignoring the attempt to rewrite.
|
||||
//sys.error(s"Could not fix version [$revision] into maven style version")
|
||||
revision
|
||||
}
|
||||
}
|
||||
|
||||
@deprecated("No longer used and will be removed.", "0.12.1")
|
||||
def classifier(dependency: DependencyDescriptor, includeTypes: Set[String]): NodeSeq =
|
||||
{
|
||||
val jarDep = dependency.getAllDependencyArtifacts.filter(d => includeTypes(d.getType)).headOption
|
||||
jarDep match {
|
||||
case Some(a) => classifierElem(artifactClassifier(a))
|
||||
case None => NodeSeq.Empty
|
||||
}
|
||||
}
|
||||
def artifactType(artifact: DependencyArtifactDescriptor): Option[String] =
|
||||
Option(artifact.getType).flatMap { tpe => if (tpe == "jar") None else Some(tpe) }
|
||||
def typeElem(tpe: Option[String]): NodeSeq =
|
||||
tpe match {
|
||||
case Some(t) => <type>{ t }</type>
|
||||
case None => NodeSeq.Empty
|
||||
}
|
||||
|
||||
def makeDependencyVersion(revision: String): String = {
|
||||
def plusRange(s:String, shift:Int = 0) = {
|
||||
def pow(i:Int):Int = if (i>0) 10 * pow(i-1) else 1
|
||||
val (prefixVersion, lastVersion) = (s+"0"*shift).reverse.split("\\.",2) match {
|
||||
case Array(revLast,revRest) =>
|
||||
( revRest.reverse + ".", revLast.reverse )
|
||||
case Array(revLast) => ("", revLast.reverse)
|
||||
}
|
||||
val lastVersionInt = lastVersion.toInt
|
||||
s"[${prefixVersion}${lastVersion},${prefixVersion}${lastVersionInt+pow(shift)})"
|
||||
}
|
||||
val startSym=Set(']','[','(')
|
||||
val stopSym=Set(']','[',')')
|
||||
try {
|
||||
if (revision endsWith ".+") {
|
||||
plusRange(revision.substring(0,revision.length-2))
|
||||
} else if (revision endsWith "+") {
|
||||
val base = revision.take(revision.length-1)
|
||||
// This is a heuristic. Maven just doesn't support Ivy's notions of 1+, so
|
||||
// we assume version ranges never go beyond 5 siginificant digits.
|
||||
(0 to 5).map(plusRange(base,_)).mkString(",")
|
||||
} else if (startSym(revision(0)) && stopSym(revision(revision.length-1))) {
|
||||
val start = revision(0)
|
||||
val stop = revision(revision.length-1)
|
||||
val mid = revision.substring(1,revision.length-1)
|
||||
(if (start == ']') "(" else start) + mid + (if (stop == '[') ")" else stop)
|
||||
} else revision
|
||||
} catch {
|
||||
case e: NumberFormatException =>
|
||||
// TODO - if the version doesn't meet our expectations, maybe we just issue a hard
|
||||
// error instead of softly ignoring the attempt to rewrite.
|
||||
//sys.error(s"Could not fix version [$revision] into maven style version")
|
||||
revision
|
||||
}
|
||||
}
|
||||
def artifactClassifier(artifact: DependencyArtifactDescriptor): Option[String] =
|
||||
Option(artifact.getExtraAttribute("classifier"))
|
||||
def classifierElem(classifier: Option[String]): NodeSeq =
|
||||
classifier match {
|
||||
case Some(c) => <classifier>{ c }</classifier>
|
||||
case None => NodeSeq.Empty
|
||||
}
|
||||
|
||||
@deprecated("No longer used and will be removed.", "0.12.1")
|
||||
def classifier(dependency: DependencyDescriptor, includeTypes: Set[String]): NodeSeq =
|
||||
{
|
||||
val jarDep = dependency.getAllDependencyArtifacts.filter(d => includeTypes(d.getType)).headOption
|
||||
jarDep match {
|
||||
case Some(a) => classifierElem(artifactClassifier(a))
|
||||
case None => NodeSeq.Empty
|
||||
}
|
||||
}
|
||||
def artifactType(artifact: DependencyArtifactDescriptor): Option[String] =
|
||||
Option(artifact.getType).flatMap { tpe => if(tpe == "jar") None else Some(tpe) }
|
||||
def typeElem(tpe: Option[String]): NodeSeq =
|
||||
tpe match {
|
||||
case Some(t) => <type>{t}</type>
|
||||
case None => NodeSeq.Empty
|
||||
}
|
||||
|
||||
def artifactClassifier(artifact: DependencyArtifactDescriptor): Option[String] =
|
||||
Option(artifact.getExtraAttribute("classifier"))
|
||||
def classifierElem(classifier: Option[String]): NodeSeq =
|
||||
classifier match {
|
||||
case Some(c) => <classifier>{c}</classifier>
|
||||
case None => NodeSeq.Empty
|
||||
}
|
||||
@deprecated("No longer used and will be removed.", "0.12.1")
|
||||
def scopeAndOptional(dependency: DependencyDescriptor): NodeSeq =
|
||||
{
|
||||
val (scope, opt) = getScopeAndOptional(dependency.getModuleConfigurations)
|
||||
scopeElem(scope) ++ optionalElem(opt)
|
||||
}
|
||||
def scopeElem(scope: Option[String]): NodeSeq = scope match {
|
||||
case None | Some(Configurations.Compile.name) => NodeSeq.Empty
|
||||
case Some(s) => <scope>{ s }</scope>
|
||||
}
|
||||
def optionalElem(opt: Boolean) = if (opt) <optional>true</optional> else NodeSeq.Empty
|
||||
def moduleDescriptor(module: ModuleDescriptor) = module.getModuleRevisionId
|
||||
|
||||
@deprecated("No longer used and will be removed.", "0.12.1")
|
||||
def scopeAndOptional(dependency: DependencyDescriptor): NodeSeq =
|
||||
{
|
||||
val (scope, opt) = getScopeAndOptional(dependency.getModuleConfigurations)
|
||||
scopeElem(scope) ++ optionalElem(opt)
|
||||
}
|
||||
def scopeElem(scope: Option[String]): NodeSeq = scope match {
|
||||
case None | Some(Configurations.Compile.name) => NodeSeq.Empty
|
||||
case Some(s) => <scope>{s}</scope>
|
||||
}
|
||||
def optionalElem(opt: Boolean) = if(opt) <optional>true</optional> else NodeSeq.Empty
|
||||
def moduleDescriptor(module: ModuleDescriptor) = module.getModuleRevisionId
|
||||
def getScopeAndOptional(confs: Array[String]): (Option[String], Boolean) =
|
||||
{
|
||||
val (opt, notOptional) = confs.partition(_ == Optional.name)
|
||||
val defaultNotOptional = Configurations.defaultMavenConfigurations.find(notOptional contains _.name)
|
||||
val scope = defaultNotOptional.map(_.name)
|
||||
(scope, !opt.isEmpty)
|
||||
}
|
||||
|
||||
def getScopeAndOptional(confs: Array[String]): (Option[String], Boolean) =
|
||||
{
|
||||
val (opt, notOptional) = confs.partition(_ == Optional.name)
|
||||
val defaultNotOptional = Configurations.defaultMavenConfigurations.find(notOptional contains _.name)
|
||||
val scope = defaultNotOptional.map(_.name)
|
||||
(scope, !opt.isEmpty)
|
||||
}
|
||||
def exclusions(dependency: DependencyDescriptor): NodeSeq =
|
||||
{
|
||||
val excl = dependency.getExcludeRules(dependency.getModuleConfigurations)
|
||||
val (warns, excls) = IvyUtil.separate(excl.map(makeExclusion))
|
||||
if (!warns.isEmpty) log.warn(warns.mkString(IO.Newline))
|
||||
if (!excls.isEmpty) <exclusions>{ excls }</exclusions>
|
||||
else NodeSeq.Empty
|
||||
}
|
||||
def makeExclusion(exclRule: ExcludeRule): Either[String, NodeSeq] =
|
||||
{
|
||||
val m = exclRule.getId.getModuleId
|
||||
val (g, a) = (m.getOrganisation, m.getName)
|
||||
if (g == null || g.isEmpty || g == "*" || a.isEmpty || a == "*")
|
||||
Left("Skipped generating '<exclusion/>' for %s. Dependency exclusion should have both 'org' and 'module' to comply with Maven POM's schema.".format(m))
|
||||
else
|
||||
Right(
|
||||
<exclusion>
|
||||
<groupId>{ g }</groupId>
|
||||
<artifactId>{ a }</artifactId>
|
||||
</exclusion>
|
||||
)
|
||||
}
|
||||
|
||||
def exclusions(dependency: DependencyDescriptor): NodeSeq =
|
||||
{
|
||||
val excl = dependency.getExcludeRules(dependency.getModuleConfigurations)
|
||||
val (warns, excls) = IvyUtil.separate(excl.map(makeExclusion))
|
||||
if(!warns.isEmpty) log.warn(warns.mkString(IO.Newline))
|
||||
if(!excls.isEmpty) <exclusions>{excls}</exclusions>
|
||||
else NodeSeq.Empty
|
||||
}
|
||||
def makeExclusion(exclRule: ExcludeRule): Either[String, NodeSeq] =
|
||||
{
|
||||
val m = exclRule.getId.getModuleId
|
||||
val (g, a) = (m.getOrganisation, m.getName)
|
||||
if(g == null || g.isEmpty || g == "*" || a.isEmpty || a == "*")
|
||||
Left("Skipped generating '<exclusion/>' for %s. Dependency exclusion should have both 'org' and 'module' to comply with Maven POM's schema.".format(m))
|
||||
else
|
||||
Right(
|
||||
<exclusion>
|
||||
<groupId>{g}</groupId>
|
||||
<artifactId>{a}</artifactId>
|
||||
</exclusion>
|
||||
)
|
||||
}
|
||||
def makeRepositories(settings: IvySettings, includeAll: Boolean, filterRepositories: MavenRepository => Boolean) =
|
||||
{
|
||||
class MavenRepo(name: String, snapshots: Boolean, releases: Boolean)
|
||||
val repositories = if (includeAll) allResolvers(settings) else resolvers(settings.getDefaultResolver)
|
||||
val mavenRepositories =
|
||||
repositories.flatMap {
|
||||
case m: IBiblioResolver if m.isM2compatible && m.getRoot != IBiblioResolver.DEFAULT_M2_ROOT =>
|
||||
MavenRepository(m.getName, m.getRoot) :: Nil
|
||||
case _ => Nil
|
||||
}
|
||||
val repositoryElements = mavenRepositories.filter(filterRepositories).map(mavenRepository)
|
||||
if (repositoryElements.isEmpty) repositoryElements else <repositories>{ repositoryElements }</repositories>
|
||||
}
|
||||
def allResolvers(settings: IvySettings): Seq[DependencyResolver] = flatten(castResolvers(settings.getResolvers)).distinct
|
||||
def flatten(rs: Seq[DependencyResolver]): Seq[DependencyResolver] = if (rs eq null) Nil else rs.flatMap(resolvers)
|
||||
def resolvers(r: DependencyResolver): Seq[DependencyResolver] =
|
||||
r match { case c: ChainResolver => flatten(castResolvers(c.getResolvers)); case _ => r :: Nil }
|
||||
|
||||
def makeRepositories(settings: IvySettings, includeAll: Boolean, filterRepositories: MavenRepository => Boolean) =
|
||||
{
|
||||
class MavenRepo(name: String, snapshots: Boolean, releases: Boolean)
|
||||
val repositories = if(includeAll) allResolvers(settings) else resolvers(settings.getDefaultResolver)
|
||||
val mavenRepositories =
|
||||
repositories.flatMap {
|
||||
case m: IBiblioResolver if m.isM2compatible && m.getRoot != IBiblioResolver.DEFAULT_M2_ROOT =>
|
||||
MavenRepository(m.getName, m.getRoot) :: Nil
|
||||
case _ => Nil
|
||||
}
|
||||
val repositoryElements = mavenRepositories.filter(filterRepositories).map(mavenRepository)
|
||||
if(repositoryElements.isEmpty) repositoryElements else <repositories>{repositoryElements}</repositories>
|
||||
}
|
||||
def allResolvers(settings: IvySettings): Seq[DependencyResolver] = flatten(castResolvers(settings.getResolvers)).distinct
|
||||
def flatten(rs: Seq[DependencyResolver]): Seq[DependencyResolver] = if(rs eq null) Nil else rs.flatMap(resolvers)
|
||||
def resolvers(r: DependencyResolver): Seq[DependencyResolver] =
|
||||
r match { case c: ChainResolver => flatten(castResolvers(c.getResolvers)); case _ => r :: Nil }
|
||||
// cast the contents of a pre-generics collection
|
||||
private def castResolvers(s: java.util.Collection[_]): Seq[DependencyResolver] =
|
||||
s.toArray.map(_.asInstanceOf[DependencyResolver])
|
||||
|
||||
// cast the contents of a pre-generics collection
|
||||
private def castResolvers(s: java.util.Collection[_]): Seq[DependencyResolver] =
|
||||
s.toArray.map(_.asInstanceOf[DependencyResolver])
|
||||
def toID(name: String) = checkID(name.filter(isValidIDCharacter).mkString, name)
|
||||
def isValidIDCharacter(c: Char) = c.isLetterOrDigit
|
||||
private def checkID(id: String, name: String) = if (id.isEmpty) sys.error("Could not convert '" + name + "' to an ID") else id
|
||||
def mavenRepository(repo: MavenRepository): XNode =
|
||||
mavenRepository(toID(repo.name), repo.name, repo.root)
|
||||
def mavenRepository(id: String, name: String, root: String): XNode =
|
||||
<repository>
|
||||
<id>{ id }</id>
|
||||
<name>{ name }</name>
|
||||
<url>{ root }</url>
|
||||
<layout>{ if (name == JavaNet1Repository.name) "legacy" else "default" }</layout>
|
||||
</repository>
|
||||
|
||||
def toID(name: String) = checkID(name.filter(isValidIDCharacter).mkString, name)
|
||||
def isValidIDCharacter(c: Char) = c.isLetterOrDigit
|
||||
private def checkID(id: String, name: String) = if(id.isEmpty) sys.error("Could not convert '" + name + "' to an ID") else id
|
||||
def mavenRepository(repo: MavenRepository): XNode =
|
||||
mavenRepository(toID(repo.name), repo.name, repo.root)
|
||||
def mavenRepository(id: String, name: String, root: String): XNode =
|
||||
<repository>
|
||||
<id>{id}</id>
|
||||
<name>{name}</name>
|
||||
<url>{root}</url>
|
||||
<layout>{ if(name == JavaNet1Repository.name) "legacy" else "default" }</layout>
|
||||
</repository>
|
||||
|
||||
/** Retain dependencies only with the configurations given, or all public configurations of `module` if `configurations` is None.
|
||||
* This currently only preserves the information required by makePom*/
|
||||
private def depsInConfs(module: ModuleDescriptor, configurations: Option[Iterable[Configuration]]): Seq[DependencyDescriptor] =
|
||||
{
|
||||
val keepConfigurations = IvySbt.getConfigurations(module, configurations)
|
||||
val keepSet = Set(keepConfigurations.toSeq : _*)
|
||||
def translate(dependency: DependencyDescriptor) =
|
||||
{
|
||||
val keep = dependency.getModuleConfigurations.filter(keepSet.contains)
|
||||
if(keep.isEmpty)
|
||||
None
|
||||
else // TODO: translate the dependency to contain only configurations to keep
|
||||
Some(dependency)
|
||||
}
|
||||
module.getDependencies flatMap translate
|
||||
}
|
||||
/**
|
||||
* Retain dependencies only with the configurations given, or all public configurations of `module` if `configurations` is None.
|
||||
* This currently only preserves the information required by makePom
|
||||
*/
|
||||
private def depsInConfs(module: ModuleDescriptor, configurations: Option[Iterable[Configuration]]): Seq[DependencyDescriptor] =
|
||||
{
|
||||
val keepConfigurations = IvySbt.getConfigurations(module, configurations)
|
||||
val keepSet = Set(keepConfigurations.toSeq: _*)
|
||||
def translate(dependency: DependencyDescriptor) =
|
||||
{
|
||||
val keep = dependency.getModuleConfigurations.filter(keepSet.contains)
|
||||
if (keep.isEmpty)
|
||||
None
|
||||
else // TODO: translate the dependency to contain only configurations to keep
|
||||
Some(dependency)
|
||||
}
|
||||
module.getDependencies flatMap translate
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,103 +3,121 @@
|
|||
*/
|
||||
package sbt
|
||||
|
||||
import java.net.URL
|
||||
import java.net.URL
|
||||
|
||||
final case class ModuleID(organization: String, name: String, revision: String, configurations: Option[String] = None, isChanging: Boolean = false, isTransitive: Boolean = true, isForce: Boolean = false, explicitArtifacts: Seq[Artifact] = Nil, exclusions: Seq[ExclusionRule] = Nil, extraAttributes: Map[String,String] = Map.empty, crossVersion: CrossVersion = CrossVersion.Disabled)
|
||||
{
|
||||
override def toString: String =
|
||||
organization + ":" + name + ":" + revision +
|
||||
(configurations match { case Some(s) => ":" + s; case None => "" }) +
|
||||
(if(extraAttributes.isEmpty) "" else " " + extraString)
|
||||
final case class ModuleID(organization: String, name: String, revision: String, configurations: Option[String] = None, isChanging: Boolean = false, isTransitive: Boolean = true, isForce: Boolean = false, explicitArtifacts: Seq[Artifact] = Nil, exclusions: Seq[ExclusionRule] = Nil, extraAttributes: Map[String, String] = Map.empty, crossVersion: CrossVersion = CrossVersion.Disabled) {
|
||||
override def toString: String =
|
||||
organization + ":" + name + ":" + revision +
|
||||
(configurations match { case Some(s) => ":" + s; case None => "" }) +
|
||||
(if (extraAttributes.isEmpty) "" else " " + extraString)
|
||||
|
||||
/** String representation of the extra attributes, excluding any information only attributes. */
|
||||
def extraString: String = extraDependencyAttributes.map { case (k,v) => k + "=" + v } mkString("(",", ",")")
|
||||
/** String representation of the extra attributes, excluding any information only attributes. */
|
||||
def extraString: String = extraDependencyAttributes.map { case (k, v) => k + "=" + v } mkString ("(", ", ", ")")
|
||||
|
||||
/** Returns the extra attributes except for ones marked as information only (ones that typically would not be used for dependency resolution). */
|
||||
def extraDependencyAttributes: Map[String,String] = extraAttributes.filterKeys(!_.startsWith(CustomPomParser.InfoKeyPrefix))
|
||||
/** Returns the extra attributes except for ones marked as information only (ones that typically would not be used for dependency resolution). */
|
||||
def extraDependencyAttributes: Map[String, String] = extraAttributes.filterKeys(!_.startsWith(CustomPomParser.InfoKeyPrefix))
|
||||
|
||||
@deprecated("Use `cross(CrossVersion)`, the variant accepting a CrossVersion value constructed by a member of the CrossVersion object instead.", "0.12.0")
|
||||
def cross(v: Boolean): ModuleID = cross(if(v) CrossVersion.binary else CrossVersion.Disabled)
|
||||
@deprecated("Use `cross(CrossVersion)`, the variant accepting a CrossVersion value constructed by a member of the CrossVersion object instead.", "0.12.0")
|
||||
def cross(v: Boolean): ModuleID = cross(if (v) CrossVersion.binary else CrossVersion.Disabled)
|
||||
|
||||
@deprecated("Use `cross(CrossVersion)`, the variant accepting a CrossVersion value constructed by a member of the CrossVersion object instead.", "0.12.0")
|
||||
def cross(v: Boolean, verRemap: String => String): ModuleID = cross(if(v) CrossVersion.binaryMapped(verRemap) else CrossVersion.Disabled)
|
||||
@deprecated("Use `cross(CrossVersion)`, the variant accepting a CrossVersion value constructed by a member of the CrossVersion object instead.", "0.12.0")
|
||||
def cross(v: Boolean, verRemap: String => String): ModuleID = cross(if (v) CrossVersion.binaryMapped(verRemap) else CrossVersion.Disabled)
|
||||
|
||||
/** Specifies the cross-version behavior for this module. See [CrossVersion] for details.*/
|
||||
def cross(v: CrossVersion): ModuleID = copy(crossVersion = v)
|
||||
/** Specifies the cross-version behavior for this module. See [CrossVersion] for details.*/
|
||||
def cross(v: CrossVersion): ModuleID = copy(crossVersion = v)
|
||||
|
||||
// () required for chaining
|
||||
/** Do not follow dependencies of this module. Synonym for `intransitive`.*/
|
||||
def notTransitive() = intransitive()
|
||||
// () required for chaining
|
||||
/** Do not follow dependencies of this module. Synonym for `intransitive`.*/
|
||||
def notTransitive() = intransitive()
|
||||
|
||||
/** Do not follow dependencies of this module. Synonym for `notTransitive`.*/
|
||||
def intransitive() = copy(isTransitive = false)
|
||||
/** Do not follow dependencies of this module. Synonym for `notTransitive`.*/
|
||||
def intransitive() = copy(isTransitive = false)
|
||||
|
||||
/** Marks this dependency as "changing". Ivy will always check if the metadata has changed and then if the artifact has changed,
|
||||
* redownload it. sbt configures all -SNAPSHOT dependencies to be changing.
|
||||
*
|
||||
* See the "Changes in artifacts" section of https://ant.apache.org/ivy/history/trunk/concept.html for full details.
|
||||
* */
|
||||
def changing() = copy(isChanging = true)
|
||||
/**
|
||||
* Marks this dependency as "changing". Ivy will always check if the metadata has changed and then if the artifact has changed,
|
||||
* redownload it. sbt configures all -SNAPSHOT dependencies to be changing.
|
||||
*
|
||||
* See the "Changes in artifacts" section of https://ant.apache.org/ivy/history/trunk/concept.html for full details.
|
||||
*/
|
||||
def changing() = copy(isChanging = true)
|
||||
|
||||
/** Indicates that conflict resolution should only select this module's revision.
|
||||
* This prevents a newer revision from being pulled in by a transitive dependency, for example.*/
|
||||
def force() = copy(isForce = true)
|
||||
/**
|
||||
* Indicates that conflict resolution should only select this module's revision.
|
||||
* This prevents a newer revision from being pulled in by a transitive dependency, for example.
|
||||
*/
|
||||
def force() = copy(isForce = true)
|
||||
|
||||
/** Specifies a URL from which the main artifact for this dependency can be downloaded.
|
||||
* This value is only consulted if the module is not found in a repository.
|
||||
* It is not included in published metadata.*/
|
||||
def from(url: String) = artifacts(Artifact(name, new URL(url)))
|
||||
/**
|
||||
* Specifies a URL from which the main artifact for this dependency can be downloaded.
|
||||
* This value is only consulted if the module is not found in a repository.
|
||||
* It is not included in published metadata.
|
||||
*/
|
||||
def from(url: String) = artifacts(Artifact(name, new URL(url)))
|
||||
|
||||
/** Adds a dependency on the artifact for this module with classifier `c`. */
|
||||
def classifier(c: String) = artifacts(Artifact(name, c))
|
||||
/** Adds a dependency on the artifact for this module with classifier `c`. */
|
||||
def classifier(c: String) = artifacts(Artifact(name, c))
|
||||
|
||||
/** Declares the explicit artifacts for this module. If this ModuleID represents a dependency,
|
||||
* these artifact definitions override the information in the dependency's published metadata. */
|
||||
def artifacts(newArtifacts: Artifact*) = copy(explicitArtifacts = newArtifacts ++ this.explicitArtifacts)
|
||||
/**
|
||||
* Declares the explicit artifacts for this module. If this ModuleID represents a dependency,
|
||||
* these artifact definitions override the information in the dependency's published metadata.
|
||||
*/
|
||||
def artifacts(newArtifacts: Artifact*) = copy(explicitArtifacts = newArtifacts ++ this.explicitArtifacts)
|
||||
|
||||
/** Applies the provided exclusions to dependencies of this module. Note that only exclusions that specify
|
||||
* both the exact organization and name and nothing else will be included in a pom.xml.*/
|
||||
def excludeAll(rules: ExclusionRule*) = copy(exclusions = this.exclusions ++ rules)
|
||||
/**
|
||||
* Applies the provided exclusions to dependencies of this module. Note that only exclusions that specify
|
||||
* both the exact organization and name and nothing else will be included in a pom.xml.
|
||||
*/
|
||||
def excludeAll(rules: ExclusionRule*) = copy(exclusions = this.exclusions ++ rules)
|
||||
|
||||
/** Excludes the dependency with organization `org` and `name` from being introduced by this dependency during resolution. */
|
||||
def exclude(org: String, name: String) = excludeAll(ExclusionRule(org, name))
|
||||
/** Excludes the dependency with organization `org` and `name` from being introduced by this dependency during resolution. */
|
||||
def exclude(org: String, name: String) = excludeAll(ExclusionRule(org, name))
|
||||
|
||||
/** Adds extra attributes for this module. All keys are prefixed with `e:` if they are not already so prefixed.
|
||||
* This information will only be published in an ivy.xml and not in a pom.xml. */
|
||||
def extra(attributes: (String,String)*) = copy(extraAttributes = this.extraAttributes ++ ModuleID.checkE(attributes))
|
||||
/**
|
||||
* Adds extra attributes for this module. All keys are prefixed with `e:` if they are not already so prefixed.
|
||||
* This information will only be published in an ivy.xml and not in a pom.xml.
|
||||
*/
|
||||
def extra(attributes: (String, String)*) = copy(extraAttributes = this.extraAttributes ++ ModuleID.checkE(attributes))
|
||||
|
||||
/** Not recommended for new use. This method is not deprecated, but the `update-classifiers` task is preferred
|
||||
* for performance and correctness. This method adds a dependency on this module's artifact with the "sources"
|
||||
* classifier. If you want to also depend on the main artifact, be sure to also call `jar()` or use `withSources()` instead.*/
|
||||
def sources() = artifacts(Artifact.sources(name))
|
||||
/**
|
||||
* Not recommended for new use. This method is not deprecated, but the `update-classifiers` task is preferred
|
||||
* for performance and correctness. This method adds a dependency on this module's artifact with the "sources"
|
||||
* classifier. If you want to also depend on the main artifact, be sure to also call `jar()` or use `withSources()` instead.
|
||||
*/
|
||||
def sources() = artifacts(Artifact.sources(name))
|
||||
|
||||
/** Not recommended for new use. This method is not deprecated, but the `update-classifiers` task is preferred
|
||||
* for performance and correctness. This method adds a dependency on this module's artifact with the "javadoc"
|
||||
* classifier. If you want to also depend on the main artifact, be sure to also call `jar()` or use `withJavadoc()` instead.*/
|
||||
def javadoc() = artifacts(Artifact.javadoc(name))
|
||||
/**
|
||||
* Not recommended for new use. This method is not deprecated, but the `update-classifiers` task is preferred
|
||||
* for performance and correctness. This method adds a dependency on this module's artifact with the "javadoc"
|
||||
* classifier. If you want to also depend on the main artifact, be sure to also call `jar()` or use `withJavadoc()` instead.
|
||||
*/
|
||||
def javadoc() = artifacts(Artifact.javadoc(name))
|
||||
|
||||
def pomOnly() = artifacts(Artifact.pom(name))
|
||||
def pomOnly() = artifacts(Artifact.pom(name))
|
||||
|
||||
/** Not recommended for new use. This method is not deprecated, but the `update-classifiers` task is preferred
|
||||
* for performance and correctness. This method adds a dependency on this module's artifact with the "sources"
|
||||
* classifier. If there is not already an explicit dependency on the main artifact, this adds one.*/
|
||||
def withSources() = jarIfEmpty.sources()
|
||||
/**
|
||||
* Not recommended for new use. This method is not deprecated, but the `update-classifiers` task is preferred
|
||||
* for performance and correctness. This method adds a dependency on this module's artifact with the "sources"
|
||||
* classifier. If there is not already an explicit dependency on the main artifact, this adds one.
|
||||
*/
|
||||
def withSources() = jarIfEmpty.sources()
|
||||
|
||||
/** Not recommended for new use. This method is not deprecated, but the `update-classifiers` task is preferred
|
||||
* for performance and correctness. This method adds a dependency on this module's artifact with the "javadoc"
|
||||
* classifier. If there is not already an explicit dependency on the main artifact, this adds one.*/
|
||||
def withJavadoc() = jarIfEmpty.javadoc()
|
||||
/**
|
||||
* Not recommended for new use. This method is not deprecated, but the `update-classifiers` task is preferred
|
||||
* for performance and correctness. This method adds a dependency on this module's artifact with the "javadoc"
|
||||
* classifier. If there is not already an explicit dependency on the main artifact, this adds one.
|
||||
*/
|
||||
def withJavadoc() = jarIfEmpty.javadoc()
|
||||
|
||||
private def jarIfEmpty = if(explicitArtifacts.isEmpty) jar() else this
|
||||
private def jarIfEmpty = if (explicitArtifacts.isEmpty) jar() else this
|
||||
|
||||
/** Declares a dependency on the main artifact. This is implied by default unless artifacts are explicitly declared, such
|
||||
* as when adding a dependency on an artifact with a classifier.*/
|
||||
def jar() = artifacts(Artifact(name))
|
||||
/**
|
||||
* Declares a dependency on the main artifact. This is implied by default unless artifacts are explicitly declared, such
|
||||
* as when adding a dependency on an artifact with a classifier.
|
||||
*/
|
||||
def jar() = artifacts(Artifact(name))
|
||||
}
|
||||
object ModuleID
|
||||
{
|
||||
/** Prefixes all keys with `e:` if they are not already so prefixed. */
|
||||
def checkE(attributes: Seq[(String, String)]) =
|
||||
for ( (key, value) <- attributes) yield
|
||||
if(key.startsWith("e:")) (key, value) else ("e:" + key, value)
|
||||
object ModuleID {
|
||||
/** Prefixes all keys with `e:` if they are not already so prefixed. */
|
||||
def checkE(attributes: Seq[(String, String)]) =
|
||||
for ((key, value) <- attributes) yield if (key.startsWith("e:")) (key, value) else ("e:" + key, value)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,87 +3,88 @@
|
|||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import java.util.Date
|
||||
import java.io.File
|
||||
import java.util.Date
|
||||
|
||||
import org.apache.ivy.{core,plugins}
|
||||
import core.{cache,module, report, resolve,search}
|
||||
import cache.{ArtifactOrigin,RepositoryCacheManager}
|
||||
import search.{ModuleEntry, OrganisationEntry, RevisionEntry}
|
||||
import module.id.ModuleRevisionId
|
||||
import module.descriptor.{Artifact => IArtifact, DefaultArtifact, DependencyDescriptor, ModuleDescriptor}
|
||||
import plugins.namespace.Namespace
|
||||
import plugins.resolver.{DependencyResolver,ResolverSettings}
|
||||
import report.{ArtifactDownloadReport, DownloadReport, DownloadStatus, MetadataArtifactDownloadReport}
|
||||
import resolve.{DownloadOptions, ResolveData, ResolvedModuleRevision}
|
||||
import org.apache.ivy.{ core, plugins }
|
||||
import core.{ cache, module, report, resolve, search }
|
||||
import cache.{ ArtifactOrigin, RepositoryCacheManager }
|
||||
import search.{ ModuleEntry, OrganisationEntry, RevisionEntry }
|
||||
import module.id.ModuleRevisionId
|
||||
import module.descriptor.{ Artifact => IArtifact, DefaultArtifact, DependencyDescriptor, ModuleDescriptor }
|
||||
import plugins.namespace.Namespace
|
||||
import plugins.resolver.{ DependencyResolver, ResolverSettings }
|
||||
import report.{ ArtifactDownloadReport, DownloadReport, DownloadStatus, MetadataArtifactDownloadReport }
|
||||
import resolve.{ DownloadOptions, ResolveData, ResolvedModuleRevision }
|
||||
|
||||
/**A Resolver that uses a predefined mapping from module ids to in-memory descriptors.
|
||||
* It does not handle artifacts.*/
|
||||
class ProjectResolver(name: String, map: Map[ModuleRevisionId, ModuleDescriptor]) extends ResolverAdapter
|
||||
{
|
||||
def getName = name
|
||||
def setName(name: String) = sys.error("Setting name not supported by ProjectResolver")
|
||||
override def toString = "ProjectResolver(" + name + ", mapped: " + map.keys.mkString(", ") + ")"
|
||||
/**
|
||||
* A Resolver that uses a predefined mapping from module ids to in-memory descriptors.
|
||||
* It does not handle artifacts.
|
||||
*/
|
||||
class ProjectResolver(name: String, map: Map[ModuleRevisionId, ModuleDescriptor]) extends ResolverAdapter {
|
||||
def getName = name
|
||||
def setName(name: String) = sys.error("Setting name not supported by ProjectResolver")
|
||||
override def toString = "ProjectResolver(" + name + ", mapped: " + map.keys.mkString(", ") + ")"
|
||||
|
||||
def getDependency(dd: DependencyDescriptor, data: ResolveData): ResolvedModuleRevision =
|
||||
getDependency(dd.getDependencyRevisionId).orNull
|
||||
def getDependency(dd: DependencyDescriptor, data: ResolveData): ResolvedModuleRevision =
|
||||
getDependency(dd.getDependencyRevisionId).orNull
|
||||
|
||||
private[this] def getDependency(revisionId: ModuleRevisionId): Option[ResolvedModuleRevision] =
|
||||
{
|
||||
def constructResult(descriptor: ModuleDescriptor) = new ResolvedModuleRevision(this, this, descriptor, report(revisionId), true)
|
||||
map get revisionId map constructResult
|
||||
}
|
||||
private[this] def getDependency(revisionId: ModuleRevisionId): Option[ResolvedModuleRevision] =
|
||||
{
|
||||
def constructResult(descriptor: ModuleDescriptor) = new ResolvedModuleRevision(this, this, descriptor, report(revisionId), true)
|
||||
map get revisionId map constructResult
|
||||
}
|
||||
|
||||
def report(revisionId: ModuleRevisionId): MetadataArtifactDownloadReport =
|
||||
{
|
||||
val artifact = DefaultArtifact.newIvyArtifact(revisionId, new Date)
|
||||
val r = new MetadataArtifactDownloadReport(artifact)
|
||||
r.setSearched(false)
|
||||
r.setDownloadStatus(DownloadStatus.FAILED)
|
||||
r
|
||||
}
|
||||
def report(revisionId: ModuleRevisionId): MetadataArtifactDownloadReport =
|
||||
{
|
||||
val artifact = DefaultArtifact.newIvyArtifact(revisionId, new Date)
|
||||
val r = new MetadataArtifactDownloadReport(artifact)
|
||||
r.setSearched(false)
|
||||
r.setDownloadStatus(DownloadStatus.FAILED)
|
||||
r
|
||||
}
|
||||
|
||||
// this resolver nevers locates artifacts, only resolves dependencies
|
||||
def exists(artifact: IArtifact) = false
|
||||
def locate(artifact: IArtifact) = null
|
||||
def download(artifacts: Array[IArtifact], options: DownloadOptions): DownloadReport =
|
||||
{
|
||||
val r = new DownloadReport
|
||||
for(artifact <- artifacts)
|
||||
if(getDependency(artifact.getModuleRevisionId).isEmpty)
|
||||
r.addArtifactReport(notDownloaded(artifact))
|
||||
r
|
||||
}
|
||||
// this resolver nevers locates artifacts, only resolves dependencies
|
||||
def exists(artifact: IArtifact) = false
|
||||
def locate(artifact: IArtifact) = null
|
||||
def download(artifacts: Array[IArtifact], options: DownloadOptions): DownloadReport =
|
||||
{
|
||||
val r = new DownloadReport
|
||||
for (artifact <- artifacts)
|
||||
if (getDependency(artifact.getModuleRevisionId).isEmpty)
|
||||
r.addArtifactReport(notDownloaded(artifact))
|
||||
r
|
||||
}
|
||||
|
||||
def download(artifact: ArtifactOrigin, options: DownloadOptions): ArtifactDownloadReport =
|
||||
notDownloaded(artifact.getArtifact)
|
||||
def findIvyFileRef(dd: DependencyDescriptor, data: ResolveData) = null
|
||||
def download(artifact: ArtifactOrigin, options: DownloadOptions): ArtifactDownloadReport =
|
||||
notDownloaded(artifact.getArtifact)
|
||||
def findIvyFileRef(dd: DependencyDescriptor, data: ResolveData) = null
|
||||
|
||||
def notDownloaded(artifact: IArtifact): ArtifactDownloadReport=
|
||||
{
|
||||
val r = new ArtifactDownloadReport(artifact)
|
||||
r.setDownloadStatus(DownloadStatus.FAILED)
|
||||
r
|
||||
}
|
||||
def notDownloaded(artifact: IArtifact): ArtifactDownloadReport =
|
||||
{
|
||||
val r = new ArtifactDownloadReport(artifact)
|
||||
r.setDownloadStatus(DownloadStatus.FAILED)
|
||||
r
|
||||
}
|
||||
|
||||
// doesn't support publishing
|
||||
def publish(artifact: IArtifact, src: File, overwrite: Boolean) = sys.error("Publish not supported by ProjectResolver")
|
||||
def beginPublishTransaction(module: ModuleRevisionId, overwrite: Boolean) {}
|
||||
def abortPublishTransaction() {}
|
||||
def commitPublishTransaction() {}
|
||||
// doesn't support publishing
|
||||
def publish(artifact: IArtifact, src: File, overwrite: Boolean) = sys.error("Publish not supported by ProjectResolver")
|
||||
def beginPublishTransaction(module: ModuleRevisionId, overwrite: Boolean) {}
|
||||
def abortPublishTransaction() {}
|
||||
def commitPublishTransaction() {}
|
||||
|
||||
def reportFailure() {}
|
||||
def reportFailure(art: IArtifact) {}
|
||||
def reportFailure() {}
|
||||
def reportFailure(art: IArtifact) {}
|
||||
|
||||
def listOrganisations() = new Array[OrganisationEntry](0)
|
||||
def listModules(org: OrganisationEntry) = new Array[ModuleEntry](0)
|
||||
def listRevisions(module: ModuleEntry) = new Array[RevisionEntry](0)
|
||||
def listOrganisations() = new Array[OrganisationEntry](0)
|
||||
def listModules(org: OrganisationEntry) = new Array[ModuleEntry](0)
|
||||
def listRevisions(module: ModuleEntry) = new Array[RevisionEntry](0)
|
||||
|
||||
def getNamespace = Namespace.SYSTEM_NAMESPACE
|
||||
def getNamespace = Namespace.SYSTEM_NAMESPACE
|
||||
|
||||
private[this] var settings: Option[ResolverSettings] = None
|
||||
private[this] var settings: Option[ResolverSettings] = None
|
||||
|
||||
def dumpSettings() {}
|
||||
def setSettings(settings: ResolverSettings) { this.settings = Some(settings) }
|
||||
def getRepositoryCacheManager = settings match { case Some(s) => s.getDefaultRepositoryCacheManager; case None => sys.error("No settings defined for ProjectResolver") }
|
||||
def dumpSettings() {}
|
||||
def setSettings(settings: ResolverSettings) { this.settings = Some(settings) }
|
||||
def getRepositoryCacheManager = settings match { case Some(s) => s.getDefaultRepositoryCacheManager; case None => sys.error("No settings defined for ProjectResolver") }
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,84 +7,84 @@ import org.apache.ivy.core
|
|||
import org.apache.ivy.plugins.parser
|
||||
import core.IvyPatternHelper
|
||||
import core.settings.IvySettings
|
||||
import core.cache.{CacheMetadataOptions, DefaultRepositoryCacheManager, DefaultResolutionCacheManager, ResolutionCacheManager}
|
||||
import core.cache.{ CacheMetadataOptions, DefaultRepositoryCacheManager, DefaultResolutionCacheManager, ResolutionCacheManager }
|
||||
import core.module.id.ModuleRevisionId
|
||||
import core.module.descriptor.ModuleDescriptor
|
||||
import ResolutionCache.{Name, ReportDirectory, ResolvedName, ResolvedPattern}
|
||||
import ResolutionCache.{ Name, ReportDirectory, ResolvedName, ResolvedPattern }
|
||||
import parser.xml.XmlModuleDescriptorParser
|
||||
|
||||
/** Replaces the standard Ivy resolution cache in order to:
|
||||
* 1. Separate cached resolved Ivy files from resolution reports, making the resolution reports easier to find.
|
||||
* 2. Have them per-project for easier cleaning (possible with standard cache, but central to this custom one).
|
||||
* 3. Cache location includes extra attributes so that cross builds of a plugin do not overwrite each other.
|
||||
*/
|
||||
private[sbt] final class ResolutionCache(base: File, settings: IvySettings) extends ResolutionCacheManager
|
||||
{
|
||||
private[this] def resolvedFileInCache(m: ModuleRevisionId, name: String, ext: String): File = {
|
||||
val p = ResolvedPattern
|
||||
val f = IvyPatternHelper.substitute(p, m.getOrganisation, m.getName, m.getBranch, m.getRevision, name, name, ext, null, null, m.getAttributes, null)
|
||||
new File(base, f)
|
||||
}
|
||||
private[this] val reportBase: File = new File(base, ReportDirectory)
|
||||
/**
|
||||
* Replaces the standard Ivy resolution cache in order to:
|
||||
* 1. Separate cached resolved Ivy files from resolution reports, making the resolution reports easier to find.
|
||||
* 2. Have them per-project for easier cleaning (possible with standard cache, but central to this custom one).
|
||||
* 3. Cache location includes extra attributes so that cross builds of a plugin do not overwrite each other.
|
||||
*/
|
||||
private[sbt] final class ResolutionCache(base: File, settings: IvySettings) extends ResolutionCacheManager {
|
||||
private[this] def resolvedFileInCache(m: ModuleRevisionId, name: String, ext: String): File = {
|
||||
val p = ResolvedPattern
|
||||
val f = IvyPatternHelper.substitute(p, m.getOrganisation, m.getName, m.getBranch, m.getRevision, name, name, ext, null, null, m.getAttributes, null)
|
||||
new File(base, f)
|
||||
}
|
||||
private[this] val reportBase: File = new File(base, ReportDirectory)
|
||||
|
||||
def getResolutionCacheRoot: File = base
|
||||
def clean() { IO.delete(base) }
|
||||
override def toString = Name
|
||||
def getResolutionCacheRoot: File = base
|
||||
def clean() { IO.delete(base) }
|
||||
override def toString = Name
|
||||
|
||||
def getResolvedIvyFileInCache(mrid: ModuleRevisionId): File =
|
||||
resolvedFileInCache(mrid, ResolvedName, "xml")
|
||||
def getResolvedIvyPropertiesInCache(mrid: ModuleRevisionId): File =
|
||||
resolvedFileInCache(mrid, ResolvedName, "properties")
|
||||
// name needs to be the same as Ivy's default because the ivy-report.xsl stylesheet assumes this
|
||||
// when making links to reports for other configurations
|
||||
def getConfigurationResolveReportInCache(resolveId: String, conf: String): File =
|
||||
new File(reportBase, resolveId + "-" + conf + ".xml")
|
||||
def getConfigurationResolveReportsInCache(resolveId: String): Array[File] =
|
||||
IO.listFiles(reportBase).filter(_.getName.startsWith(resolveId + "-"))
|
||||
|
||||
// XXX: this method is required by ResolutionCacheManager in Ivy 2.3.0 final,
|
||||
// but it is apparently unused by Ivy as sbt uses Ivy. Therefore, it is
|
||||
// unexercised in tests. Note that the implementation of this method in Ivy 2.3.0's
|
||||
// DefaultResolutionCache also resolves parent properties for a given mrid
|
||||
def getResolvedModuleDescriptor(mrid: ModuleRevisionId): ModuleDescriptor = {
|
||||
val ivyFile = getResolvedIvyFileInCache(mrid)
|
||||
if (!ivyFile.exists()) {
|
||||
throw new IllegalStateException("Ivy file not found in cache for " + mrid + "!")
|
||||
}
|
||||
|
||||
return XmlModuleDescriptorParser.getInstance().parseDescriptor(settings, ivyFile.toURI().toURL(), false)
|
||||
}
|
||||
|
||||
def saveResolvedModuleDescriptor(md: ModuleDescriptor): Unit = {
|
||||
val mrid = md.getResolvedModuleRevisionId
|
||||
val cachedIvyFile = getResolvedIvyFileInCache(mrid)
|
||||
md.toIvyFile(cachedIvyFile)
|
||||
}
|
||||
def getResolvedIvyFileInCache(mrid: ModuleRevisionId): File =
|
||||
resolvedFileInCache(mrid, ResolvedName, "xml")
|
||||
def getResolvedIvyPropertiesInCache(mrid: ModuleRevisionId): File =
|
||||
resolvedFileInCache(mrid, ResolvedName, "properties")
|
||||
// name needs to be the same as Ivy's default because the ivy-report.xsl stylesheet assumes this
|
||||
// when making links to reports for other configurations
|
||||
def getConfigurationResolveReportInCache(resolveId: String, conf: String): File =
|
||||
new File(reportBase, resolveId + "-" + conf + ".xml")
|
||||
def getConfigurationResolveReportsInCache(resolveId: String): Array[File] =
|
||||
IO.listFiles(reportBase).filter(_.getName.startsWith(resolveId + "-"))
|
||||
|
||||
// XXX: this method is required by ResolutionCacheManager in Ivy 2.3.0 final,
|
||||
// but it is apparently unused by Ivy as sbt uses Ivy. Therefore, it is
|
||||
// unexercised in tests. Note that the implementation of this method in Ivy 2.3.0's
|
||||
// DefaultResolutionCache also resolves parent properties for a given mrid
|
||||
def getResolvedModuleDescriptor(mrid: ModuleRevisionId): ModuleDescriptor = {
|
||||
val ivyFile = getResolvedIvyFileInCache(mrid)
|
||||
if (!ivyFile.exists()) {
|
||||
throw new IllegalStateException("Ivy file not found in cache for " + mrid + "!")
|
||||
}
|
||||
|
||||
return XmlModuleDescriptorParser.getInstance().parseDescriptor(settings, ivyFile.toURI().toURL(), false)
|
||||
}
|
||||
|
||||
def saveResolvedModuleDescriptor(md: ModuleDescriptor): Unit = {
|
||||
val mrid = md.getResolvedModuleRevisionId
|
||||
val cachedIvyFile = getResolvedIvyFileInCache(mrid)
|
||||
md.toIvyFile(cachedIvyFile)
|
||||
}
|
||||
}
|
||||
private[sbt] object ResolutionCache
|
||||
{
|
||||
/** Removes cached files from the resolution cache for the module with ID `mrid`
|
||||
* and the resolveId (as set on `ResolveOptions`). */
|
||||
private[sbt] def cleanModule(mrid: ModuleRevisionId, resolveId: String, manager: ResolutionCacheManager)
|
||||
{
|
||||
val files =
|
||||
Option(manager.getResolvedIvyFileInCache(mrid)).toList :::
|
||||
Option(manager.getResolvedIvyPropertiesInCache(mrid)).toList :::
|
||||
Option(manager.getConfigurationResolveReportsInCache(resolveId)).toList.flatten
|
||||
IO.delete(files)
|
||||
}
|
||||
private[sbt] object ResolutionCache {
|
||||
/**
|
||||
* Removes cached files from the resolution cache for the module with ID `mrid`
|
||||
* and the resolveId (as set on `ResolveOptions`).
|
||||
*/
|
||||
private[sbt] def cleanModule(mrid: ModuleRevisionId, resolveId: String, manager: ResolutionCacheManager) {
|
||||
val files =
|
||||
Option(manager.getResolvedIvyFileInCache(mrid)).toList :::
|
||||
Option(manager.getResolvedIvyPropertiesInCache(mrid)).toList :::
|
||||
Option(manager.getConfigurationResolveReportsInCache(resolveId)).toList.flatten
|
||||
IO.delete(files)
|
||||
}
|
||||
|
||||
private val ReportDirectory = "reports"
|
||||
private val ReportDirectory = "reports"
|
||||
|
||||
// name of the file providing a dependency resolution report for a configuration
|
||||
private val ReportFileName = "report.xml"
|
||||
// name of the file providing a dependency resolution report for a configuration
|
||||
private val ReportFileName = "report.xml"
|
||||
|
||||
// base name (name except for extension) of resolution report file
|
||||
private val ResolvedName = "resolved.xml"
|
||||
// base name (name except for extension) of resolution report file
|
||||
private val ResolvedName = "resolved.xml"
|
||||
|
||||
// Cache name
|
||||
private val Name = "sbt-resolution-cache"
|
||||
// Cache name
|
||||
private val Name = "sbt-resolution-cache"
|
||||
|
||||
// use sbt-specific extra attributes so that resolved xml files do not get overwritten when using different Scala/sbt versions
|
||||
private val ResolvedPattern = "[organisation]/[module]/" + Resolver.PluginPattern + "[revision]/[artifact].[ext]"
|
||||
// use sbt-specific extra attributes so that resolved xml files do not get overwritten when using different Scala/sbt versions
|
||||
private val ResolvedPattern = "[organisation]/[module]/" + Resolver.PluginPattern + "[revision]/[artifact].[ext]"
|
||||
}
|
||||
|
|
@ -6,145 +6,131 @@ package sbt
|
|||
import java.io.File
|
||||
import java.net.URL
|
||||
import scala.xml.NodeSeq
|
||||
import org.apache.ivy.plugins.resolver.{DependencyResolver, IBiblioResolver}
|
||||
import org.apache.ivy.plugins.resolver.{ DependencyResolver, IBiblioResolver }
|
||||
|
||||
sealed trait Resolver
|
||||
{
|
||||
def name: String
|
||||
sealed trait Resolver {
|
||||
def name: String
|
||||
}
|
||||
final class RawRepository(val resolver: DependencyResolver) extends Resolver
|
||||
{
|
||||
def name = resolver.getName
|
||||
override def toString = "Raw(" + resolver.toString + ")"
|
||||
final class RawRepository(val resolver: DependencyResolver) extends Resolver {
|
||||
def name = resolver.getName
|
||||
override def toString = "Raw(" + resolver.toString + ")"
|
||||
}
|
||||
sealed case class ChainedResolver(name: String, resolvers: Seq[Resolver]) extends Resolver
|
||||
sealed case class MavenRepository(name: String, root: String) extends Resolver
|
||||
{
|
||||
override def toString = name + ": " + root
|
||||
sealed case class MavenRepository(name: String, root: String) extends Resolver {
|
||||
override def toString = name + ": " + root
|
||||
}
|
||||
|
||||
final class Patterns(val ivyPatterns: Seq[String], val artifactPatterns: Seq[String], val isMavenCompatible: Boolean, val descriptorOptional: Boolean, val skipConsistencyCheck: Boolean)
|
||||
{
|
||||
private[sbt] def mavenStyle(): Patterns = Patterns(ivyPatterns, artifactPatterns, true)
|
||||
private[sbt] def withDescriptorOptional(): Patterns = Patterns(ivyPatterns, artifactPatterns, isMavenCompatible, true, skipConsistencyCheck)
|
||||
private[sbt] def withoutConsistencyCheck(): Patterns = Patterns(ivyPatterns, artifactPatterns, isMavenCompatible, descriptorOptional, true)
|
||||
private[sbt] def withIvys(patterns: Seq[String]): Patterns = Patterns(patterns ++ ivyPatterns, artifactPatterns, isMavenCompatible)
|
||||
private[sbt] def withArtifacts(patterns: Seq[String]): Patterns = Patterns(ivyPatterns, patterns ++ artifactPatterns, isMavenCompatible)
|
||||
override def toString = "Patterns(ivyPatterns=%s, artifactPatterns=%s, isMavenCompatible=%s, descriptorOptional=%s, skipConsistencyCheck=%s)".format(ivyPatterns, artifactPatterns, isMavenCompatible, descriptorOptional, skipConsistencyCheck)
|
||||
override def equals(obj: Any): Boolean = {
|
||||
obj match {
|
||||
case other: Patterns =>
|
||||
ivyPatterns == other.ivyPatterns && artifactPatterns == other.artifactPatterns && isMavenCompatible == other.isMavenCompatible && descriptorOptional == other.descriptorOptional && skipConsistencyCheck == other.skipConsistencyCheck
|
||||
case _ => false
|
||||
}
|
||||
}
|
||||
override def hashCode: Int = (ivyPatterns, artifactPatterns, isMavenCompatible, descriptorOptional, skipConsistencyCheck).hashCode
|
||||
final class Patterns(val ivyPatterns: Seq[String], val artifactPatterns: Seq[String], val isMavenCompatible: Boolean, val descriptorOptional: Boolean, val skipConsistencyCheck: Boolean) {
|
||||
private[sbt] def mavenStyle(): Patterns = Patterns(ivyPatterns, artifactPatterns, true)
|
||||
private[sbt] def withDescriptorOptional(): Patterns = Patterns(ivyPatterns, artifactPatterns, isMavenCompatible, true, skipConsistencyCheck)
|
||||
private[sbt] def withoutConsistencyCheck(): Patterns = Patterns(ivyPatterns, artifactPatterns, isMavenCompatible, descriptorOptional, true)
|
||||
private[sbt] def withIvys(patterns: Seq[String]): Patterns = Patterns(patterns ++ ivyPatterns, artifactPatterns, isMavenCompatible)
|
||||
private[sbt] def withArtifacts(patterns: Seq[String]): Patterns = Patterns(ivyPatterns, patterns ++ artifactPatterns, isMavenCompatible)
|
||||
override def toString = "Patterns(ivyPatterns=%s, artifactPatterns=%s, isMavenCompatible=%s, descriptorOptional=%s, skipConsistencyCheck=%s)".format(ivyPatterns, artifactPatterns, isMavenCompatible, descriptorOptional, skipConsistencyCheck)
|
||||
override def equals(obj: Any): Boolean = {
|
||||
obj match {
|
||||
case other: Patterns =>
|
||||
ivyPatterns == other.ivyPatterns && artifactPatterns == other.artifactPatterns && isMavenCompatible == other.isMavenCompatible && descriptorOptional == other.descriptorOptional && skipConsistencyCheck == other.skipConsistencyCheck
|
||||
case _ => false
|
||||
}
|
||||
}
|
||||
override def hashCode: Int = (ivyPatterns, artifactPatterns, isMavenCompatible, descriptorOptional, skipConsistencyCheck).hashCode
|
||||
|
||||
@deprecated
|
||||
def this(ivyPatterns: Seq[String], artifactPatterns: Seq[String], isMavenCompatible: Boolean) = this(ivyPatterns, artifactPatterns, isMavenCompatible, false, false)
|
||||
@deprecated
|
||||
def this(ivyPatterns: Seq[String], artifactPatterns: Seq[String], isMavenCompatible: Boolean) = this(ivyPatterns, artifactPatterns, isMavenCompatible, false, false)
|
||||
}
|
||||
object Patterns
|
||||
{
|
||||
implicit def defaultPatterns: Patterns = Resolver.defaultPatterns
|
||||
object Patterns {
|
||||
implicit def defaultPatterns: Patterns = Resolver.defaultPatterns
|
||||
|
||||
def apply(artifactPatterns: String*): Patterns = Patterns(true, artifactPatterns : _*)
|
||||
def apply(isMavenCompatible: Boolean, artifactPatterns: String*): Patterns = Patterns(artifactPatterns, artifactPatterns, isMavenCompatible)
|
||||
def apply(ivyPatterns: Seq[String], artifactPatterns: Seq[String], isMavenCompatible: Boolean): Patterns = apply(ivyPatterns: Seq[String], artifactPatterns: Seq[String], isMavenCompatible: Boolean, false, false)
|
||||
def apply(ivyPatterns: Seq[String], artifactPatterns: Seq[String], isMavenCompatible: Boolean, descriptorOptional: Boolean, skipConsistencyCheck: Boolean): Patterns = new Patterns(ivyPatterns, artifactPatterns, isMavenCompatible, descriptorOptional, skipConsistencyCheck)
|
||||
def apply(artifactPatterns: String*): Patterns = Patterns(true, artifactPatterns: _*)
|
||||
def apply(isMavenCompatible: Boolean, artifactPatterns: String*): Patterns = Patterns(artifactPatterns, artifactPatterns, isMavenCompatible)
|
||||
def apply(ivyPatterns: Seq[String], artifactPatterns: Seq[String], isMavenCompatible: Boolean): Patterns = apply(ivyPatterns: Seq[String], artifactPatterns: Seq[String], isMavenCompatible: Boolean, false, false)
|
||||
def apply(ivyPatterns: Seq[String], artifactPatterns: Seq[String], isMavenCompatible: Boolean, descriptorOptional: Boolean, skipConsistencyCheck: Boolean): Patterns = new Patterns(ivyPatterns, artifactPatterns, isMavenCompatible, descriptorOptional, skipConsistencyCheck)
|
||||
}
|
||||
object RepositoryHelpers
|
||||
{
|
||||
final case class SshConnection(authentication: Option[SshAuthentication], hostname: Option[String], port: Option[Int])
|
||||
{
|
||||
def copy(authentication: Option[SshAuthentication]) = SshConnection(authentication, hostname, port)
|
||||
}
|
||||
/** Configuration specific to an Ivy filesystem resolver. */
|
||||
final case class FileConfiguration(isLocal: Boolean, isTransactional: Option[Boolean])
|
||||
{
|
||||
def transactional() = FileConfiguration(isLocal, Some(true))
|
||||
def nontransactional() = FileConfiguration(isLocal, Some(false))
|
||||
def nonlocal() = FileConfiguration(false, isTransactional)
|
||||
}
|
||||
sealed trait SshAuthentication extends NotNull
|
||||
final case class PasswordAuthentication(user: String, password: Option[String]) extends SshAuthentication
|
||||
final case class KeyFileAuthentication(user: String, keyfile: File, password: Option[String]) extends SshAuthentication
|
||||
object RepositoryHelpers {
|
||||
final case class SshConnection(authentication: Option[SshAuthentication], hostname: Option[String], port: Option[Int]) {
|
||||
def copy(authentication: Option[SshAuthentication]) = SshConnection(authentication, hostname, port)
|
||||
}
|
||||
/** Configuration specific to an Ivy filesystem resolver. */
|
||||
final case class FileConfiguration(isLocal: Boolean, isTransactional: Option[Boolean]) {
|
||||
def transactional() = FileConfiguration(isLocal, Some(true))
|
||||
def nontransactional() = FileConfiguration(isLocal, Some(false))
|
||||
def nonlocal() = FileConfiguration(false, isTransactional)
|
||||
}
|
||||
sealed trait SshAuthentication extends NotNull
|
||||
final case class PasswordAuthentication(user: String, password: Option[String]) extends SshAuthentication
|
||||
final case class KeyFileAuthentication(user: String, keyfile: File, password: Option[String]) extends SshAuthentication
|
||||
}
|
||||
import RepositoryHelpers.{SshConnection, FileConfiguration}
|
||||
import RepositoryHelpers.{KeyFileAuthentication, PasswordAuthentication, SshAuthentication}
|
||||
import RepositoryHelpers.{ SshConnection, FileConfiguration }
|
||||
import RepositoryHelpers.{ KeyFileAuthentication, PasswordAuthentication, SshAuthentication }
|
||||
|
||||
/** sbt interface to an Ivy repository based on patterns, which is most Ivy repositories.*/
|
||||
sealed abstract class PatternsBasedRepository extends Resolver
|
||||
{
|
||||
type RepositoryType <: PatternsBasedRepository
|
||||
/** Should be implemented to create a new copy of this repository but with `patterns` as given.*/
|
||||
protected def copy(patterns: Patterns): RepositoryType
|
||||
sealed abstract class PatternsBasedRepository extends Resolver {
|
||||
type RepositoryType <: PatternsBasedRepository
|
||||
/** Should be implemented to create a new copy of this repository but with `patterns` as given.*/
|
||||
protected def copy(patterns: Patterns): RepositoryType
|
||||
|
||||
/** The object representing the configured patterns for this repository. */
|
||||
def patterns: Patterns
|
||||
/** The object representing the configured patterns for this repository. */
|
||||
def patterns: Patterns
|
||||
|
||||
/** Enables maven 2 compatibility for this repository. */
|
||||
def mavenStyle() = copy(patterns.mavenStyle())
|
||||
/** Enables maven 2 compatibility for this repository. */
|
||||
def mavenStyle() = copy(patterns.mavenStyle())
|
||||
|
||||
/** Makes descriptor metadata optional for this repository. */
|
||||
def descriptorOptional() = copy(patterns.withDescriptorOptional())
|
||||
/** Makes descriptor metadata optional for this repository. */
|
||||
def descriptorOptional() = copy(patterns.withDescriptorOptional())
|
||||
|
||||
/** Disables consistency checking for this repository. */
|
||||
def skipConsistencyCheck() = copy(patterns.withoutConsistencyCheck())
|
||||
/** Disables consistency checking for this repository. */
|
||||
def skipConsistencyCheck() = copy(patterns.withoutConsistencyCheck())
|
||||
|
||||
/** Adds the given patterns for resolving/publishing Ivy files.*/
|
||||
def ivys(ivyPatterns: String*): RepositoryType = copy(patterns.withIvys(ivyPatterns))
|
||||
/** Adds the given patterns for resolving/publishing artifacts.*/
|
||||
def artifacts(artifactPatterns: String*): RepositoryType = copy(patterns.withArtifacts(artifactPatterns))
|
||||
/** Adds the given patterns for resolving/publishing Ivy files.*/
|
||||
def ivys(ivyPatterns: String*): RepositoryType = copy(patterns.withIvys(ivyPatterns))
|
||||
/** Adds the given patterns for resolving/publishing artifacts.*/
|
||||
def artifacts(artifactPatterns: String*): RepositoryType = copy(patterns.withArtifacts(artifactPatterns))
|
||||
}
|
||||
/** sbt interface for an Ivy filesystem repository. More convenient construction is done using Resolver.file. */
|
||||
final case class FileRepository(name: String, configuration: FileConfiguration, patterns: Patterns) extends PatternsBasedRepository
|
||||
{
|
||||
type RepositoryType = FileRepository
|
||||
protected def copy(patterns: Patterns): FileRepository = FileRepository(name, configuration, patterns)
|
||||
private def copy(configuration: FileConfiguration) = FileRepository(name, configuration, patterns)
|
||||
def transactional() = copy(configuration.transactional())
|
||||
def nonlocal() = copy(configuration.nonlocal())
|
||||
final case class FileRepository(name: String, configuration: FileConfiguration, patterns: Patterns) extends PatternsBasedRepository {
|
||||
type RepositoryType = FileRepository
|
||||
protected def copy(patterns: Patterns): FileRepository = FileRepository(name, configuration, patterns)
|
||||
private def copy(configuration: FileConfiguration) = FileRepository(name, configuration, patterns)
|
||||
def transactional() = copy(configuration.transactional())
|
||||
def nonlocal() = copy(configuration.nonlocal())
|
||||
}
|
||||
final case class URLRepository(name: String, patterns: Patterns) extends PatternsBasedRepository
|
||||
{
|
||||
type RepositoryType = URLRepository
|
||||
protected def copy(patterns: Patterns): URLRepository = URLRepository(name, patterns)
|
||||
final case class URLRepository(name: String, patterns: Patterns) extends PatternsBasedRepository {
|
||||
type RepositoryType = URLRepository
|
||||
protected def copy(patterns: Patterns): URLRepository = URLRepository(name, patterns)
|
||||
}
|
||||
/** sbt interface for an Ivy ssh-based repository (ssh and sftp). Requires the Jsch library.. */
|
||||
sealed abstract class SshBasedRepository extends PatternsBasedRepository
|
||||
{
|
||||
type RepositoryType <: SshBasedRepository
|
||||
protected def copy(connection: SshConnection): RepositoryType
|
||||
private def copy(authentication: SshAuthentication): RepositoryType = copy(connection.copy(Some(authentication)))
|
||||
sealed abstract class SshBasedRepository extends PatternsBasedRepository {
|
||||
type RepositoryType <: SshBasedRepository
|
||||
protected def copy(connection: SshConnection): RepositoryType
|
||||
private def copy(authentication: SshAuthentication): RepositoryType = copy(connection.copy(Some(authentication)))
|
||||
|
||||
/** The object representing the configured ssh connection for this repository. */
|
||||
def connection: SshConnection
|
||||
/** The object representing the configured ssh connection for this repository. */
|
||||
def connection: SshConnection
|
||||
|
||||
/** Configures this to use the specified user name and password when connecting to the remote repository. */
|
||||
def as(user: String, password: String): RepositoryType = as(user, Some(password))
|
||||
def as(user: String): RepositoryType = as(user, None)
|
||||
def as(user: String, password: Option[String]) = copy(new PasswordAuthentication(user, password))
|
||||
/** Configures this to use the specified keyfile and password for the keyfile when connecting to the remote repository. */
|
||||
def as(user: String, keyfile: File): RepositoryType = as(user, keyfile, None)
|
||||
def as(user: String, keyfile: File, password: String): RepositoryType = as(user, keyfile, Some(password))
|
||||
def as(user: String, keyfile: File, password: Option[String]): RepositoryType = copy(new KeyFileAuthentication(user, keyfile, password))
|
||||
/** Configures this to use the specified user name and password when connecting to the remote repository. */
|
||||
def as(user: String, password: String): RepositoryType = as(user, Some(password))
|
||||
def as(user: String): RepositoryType = as(user, None)
|
||||
def as(user: String, password: Option[String]) = copy(new PasswordAuthentication(user, password))
|
||||
/** Configures this to use the specified keyfile and password for the keyfile when connecting to the remote repository. */
|
||||
def as(user: String, keyfile: File): RepositoryType = as(user, keyfile, None)
|
||||
def as(user: String, keyfile: File, password: String): RepositoryType = as(user, keyfile, Some(password))
|
||||
def as(user: String, keyfile: File, password: Option[String]): RepositoryType = copy(new KeyFileAuthentication(user, keyfile, password))
|
||||
}
|
||||
/** sbt interface for an Ivy repository over ssh. More convenient construction is done using Resolver.ssh. */
|
||||
final case class SshRepository(name: String, connection: SshConnection, patterns: Patterns, publishPermissions: Option[String]) extends SshBasedRepository
|
||||
{
|
||||
type RepositoryType = SshRepository
|
||||
protected def copy(patterns: Patterns): SshRepository = SshRepository(name, connection, patterns, publishPermissions)
|
||||
protected def copy(connection: SshConnection): SshRepository = SshRepository(name, connection, patterns, publishPermissions)
|
||||
/** Defines the permissions to set when publishing to this repository. */
|
||||
def withPermissions(publishPermissions: String): SshRepository = withPermissions(Some(publishPermissions))
|
||||
def withPermissions(publishPermissions: Option[String]): SshRepository = SshRepository(name, connection, patterns, publishPermissions)
|
||||
final case class SshRepository(name: String, connection: SshConnection, patterns: Patterns, publishPermissions: Option[String]) extends SshBasedRepository {
|
||||
type RepositoryType = SshRepository
|
||||
protected def copy(patterns: Patterns): SshRepository = SshRepository(name, connection, patterns, publishPermissions)
|
||||
protected def copy(connection: SshConnection): SshRepository = SshRepository(name, connection, patterns, publishPermissions)
|
||||
/** Defines the permissions to set when publishing to this repository. */
|
||||
def withPermissions(publishPermissions: String): SshRepository = withPermissions(Some(publishPermissions))
|
||||
def withPermissions(publishPermissions: Option[String]): SshRepository = SshRepository(name, connection, patterns, publishPermissions)
|
||||
}
|
||||
/** sbt interface for an Ivy repository over sftp. More convenient construction is done using Resolver.sftp. */
|
||||
final case class SftpRepository(name: String, connection: SshConnection, patterns: Patterns) extends SshBasedRepository
|
||||
{
|
||||
type RepositoryType = SftpRepository
|
||||
protected def copy(patterns: Patterns): SftpRepository = SftpRepository(name, connection, patterns)
|
||||
protected def copy(connection: SshConnection): SftpRepository = SftpRepository(name, connection, patterns)
|
||||
final case class SftpRepository(name: String, connection: SshConnection, patterns: Patterns) extends SshBasedRepository {
|
||||
type RepositoryType = SftpRepository
|
||||
protected def copy(patterns: Patterns): SftpRepository = SftpRepository(name, connection, patterns)
|
||||
protected def copy(connection: SshConnection): SftpRepository = SftpRepository(name, connection, patterns)
|
||||
}
|
||||
|
||||
import Resolver._
|
||||
|
|
@ -152,151 +138,163 @@ import Resolver._
|
|||
object DefaultMavenRepository extends MavenRepository("public", IBiblioResolver.DEFAULT_M2_ROOT)
|
||||
object JavaNet2Repository extends MavenRepository(JavaNet2RepositoryName, JavaNet2RepositoryRoot)
|
||||
object JavaNet1Repository extends JavaNet1Repository
|
||||
sealed trait JavaNet1Repository extends Resolver
|
||||
{
|
||||
def name = "java.net Maven1 Repository"
|
||||
sealed trait JavaNet1Repository extends Resolver {
|
||||
def name = "java.net Maven1 Repository"
|
||||
}
|
||||
|
||||
object Resolver
|
||||
{
|
||||
val TypesafeRepositoryRoot = "http://repo.typesafe.com/typesafe"
|
||||
val SbtPluginRepositoryRoot = "http://repo.scala-sbt.org/scalasbt"
|
||||
val SonatypeRepositoryRoot = "https://oss.sonatype.org/content/repositories"
|
||||
object Resolver {
|
||||
val TypesafeRepositoryRoot = "http://repo.typesafe.com/typesafe"
|
||||
val SbtPluginRepositoryRoot = "http://repo.scala-sbt.org/scalasbt"
|
||||
val SonatypeRepositoryRoot = "https://oss.sonatype.org/content/repositories"
|
||||
|
||||
// obsolete: kept only for launcher compatibility
|
||||
private[sbt] val ScalaToolsReleasesName = "Sonatype OSS Releases"
|
||||
private[sbt] val ScalaToolsSnapshotsName = "Sonatype OSS Snapshots"
|
||||
private[sbt] val ScalaToolsReleasesRoot = SonatypeRepositoryRoot + "/releases"
|
||||
private[sbt] val ScalaToolsSnapshotsRoot = SonatypeRepositoryRoot + "/snapshots"
|
||||
private[sbt] val ScalaToolsReleases = new MavenRepository(ScalaToolsReleasesName, ScalaToolsReleasesRoot)
|
||||
private[sbt] val ScalaToolsSnapshots = new MavenRepository(ScalaToolsSnapshotsName, ScalaToolsSnapshotsRoot)
|
||||
// obsolete: kept only for launcher compatibility
|
||||
private[sbt] val ScalaToolsReleasesName = "Sonatype OSS Releases"
|
||||
private[sbt] val ScalaToolsSnapshotsName = "Sonatype OSS Snapshots"
|
||||
private[sbt] val ScalaToolsReleasesRoot = SonatypeRepositoryRoot + "/releases"
|
||||
private[sbt] val ScalaToolsSnapshotsRoot = SonatypeRepositoryRoot + "/snapshots"
|
||||
private[sbt] val ScalaToolsReleases = new MavenRepository(ScalaToolsReleasesName, ScalaToolsReleasesRoot)
|
||||
private[sbt] val ScalaToolsSnapshots = new MavenRepository(ScalaToolsSnapshotsName, ScalaToolsSnapshotsRoot)
|
||||
|
||||
val JavaNet2RepositoryName = "java.net Maven2 Repository"
|
||||
val JavaNet2RepositoryRoot = "http://download.java.net/maven/2"
|
||||
val JavaNet2RepositoryName = "java.net Maven2 Repository"
|
||||
val JavaNet2RepositoryRoot = "http://download.java.net/maven/2"
|
||||
|
||||
def typesafeRepo(status: String) = new MavenRepository("typesafe-" + status, TypesafeRepositoryRoot + "/" + status)
|
||||
def typesafeIvyRepo(status: String) = url("typesafe-ivy-" + status, new URL(TypesafeRepositoryRoot + "/ivy-" + status + "/"))(ivyStylePatterns)
|
||||
def sbtPluginRepo(status: String) = url("sbt-plugin-" + status, new URL(SbtPluginRepositoryRoot + "/sbt-plugin-" + status + "/"))(ivyStylePatterns)
|
||||
def sonatypeRepo(status: String) = new MavenRepository("sonatype-" + status, SonatypeRepositoryRoot + "/" + status)
|
||||
def typesafeRepo(status: String) = new MavenRepository("typesafe-" + status, TypesafeRepositoryRoot + "/" + status)
|
||||
def typesafeIvyRepo(status: String) = url("typesafe-ivy-" + status, new URL(TypesafeRepositoryRoot + "/ivy-" + status + "/"))(ivyStylePatterns)
|
||||
def sbtPluginRepo(status: String) = url("sbt-plugin-" + status, new URL(SbtPluginRepositoryRoot + "/sbt-plugin-" + status + "/"))(ivyStylePatterns)
|
||||
def sonatypeRepo(status: String) = new MavenRepository("sonatype-" + status, SonatypeRepositoryRoot + "/" + status)
|
||||
|
||||
/** Add the local and Maven Central repositories to the user repositories. */
|
||||
def withDefaultResolvers(userResolvers: Seq[Resolver]): Seq[Resolver] =
|
||||
withDefaultResolvers(userResolvers, true)
|
||||
/** Add the local Ivy repository to the user repositories.
|
||||
* If `mavenCentral` is true, add the Maven Central repository. */
|
||||
def withDefaultResolvers(userResolvers: Seq[Resolver], mavenCentral: Boolean): Seq[Resolver] =
|
||||
Seq(Resolver.defaultLocal) ++
|
||||
userResolvers ++
|
||||
single(DefaultMavenRepository, mavenCentral)
|
||||
private def single[T](value: T, nonEmpty: Boolean): Seq[T] = if(nonEmpty) Seq(value) else Nil
|
||||
/** Add the local and Maven Central repositories to the user repositories. */
|
||||
def withDefaultResolvers(userResolvers: Seq[Resolver]): Seq[Resolver] =
|
||||
withDefaultResolvers(userResolvers, true)
|
||||
/**
|
||||
* Add the local Ivy repository to the user repositories.
|
||||
* If `mavenCentral` is true, add the Maven Central repository.
|
||||
*/
|
||||
def withDefaultResolvers(userResolvers: Seq[Resolver], mavenCentral: Boolean): Seq[Resolver] =
|
||||
Seq(Resolver.defaultLocal) ++
|
||||
userResolvers ++
|
||||
single(DefaultMavenRepository, mavenCentral)
|
||||
private def single[T](value: T, nonEmpty: Boolean): Seq[T] = if (nonEmpty) Seq(value) else Nil
|
||||
|
||||
/** A base class for defining factories for interfaces to Ivy repositories that require a hostname , port, and patterns. */
|
||||
sealed abstract class Define[RepositoryType <: SshBasedRepository] extends NotNull
|
||||
{
|
||||
/** Subclasses should implement this method to */
|
||||
protected def construct(name: String, connection: SshConnection, patterns: Patterns): RepositoryType
|
||||
/** Constructs this repository type with the given `name`. `basePatterns` are the initial patterns to use. A ManagedProject
|
||||
* has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/
|
||||
def apply(name: String)(implicit basePatterns: Patterns): RepositoryType =
|
||||
apply(name, None, None, None)
|
||||
/** Constructs this repository type with the given `name` and `hostname`. `basePatterns` are the initial patterns to use.
|
||||
* A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/
|
||||
def apply(name: String, hostname: String)(implicit basePatterns: Patterns): RepositoryType =
|
||||
apply(name, Some(hostname), None, None)
|
||||
/** Constructs this repository type with the given `name`, `hostname`, and the `basePath` against which the initial
|
||||
* patterns will be resolved. `basePatterns` are the initial patterns to use.
|
||||
* A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/
|
||||
def apply(name: String, hostname: String, basePath: String)(implicit basePatterns: Patterns): RepositoryType =
|
||||
apply(name, Some(hostname), None, Some(basePath))
|
||||
/** Constructs this repository type with the given `name`, `hostname`, and `port`. `basePatterns` are the initial patterns to use.
|
||||
* A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/
|
||||
def apply(name: String, hostname: String, port: Int)(implicit basePatterns: Patterns): RepositoryType =
|
||||
apply(name, Some(hostname), Some(port), None)
|
||||
/** Constructs this repository type with the given `name`, `hostname`, `port`, and the `basePath` against which the initial
|
||||
* patterns will be resolved. `basePatterns` are the initial patterns to use.
|
||||
* A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/
|
||||
def apply(name: String, hostname: String, port: Int, basePath: String)(implicit basePatterns: Patterns): RepositoryType =
|
||||
apply(name, Some(hostname), Some(port), Some(basePath))
|
||||
/** Constructs this repository type with the given `name`, `hostname`, `port`, and the `basePath` against which the initial
|
||||
* patterns will be resolved. `basePatterns` are the initial patterns to use. All but the `name` are optional (use None).
|
||||
* A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/
|
||||
def apply(name: String, hostname: Option[String], port: Option[Int], basePath: Option[String])(implicit basePatterns: Patterns): RepositoryType =
|
||||
construct(name, SshConnection(None, hostname, port), resolvePatterns(basePath, basePatterns))
|
||||
}
|
||||
/** A factory to construct an interface to an Ivy SSH resolver.*/
|
||||
object ssh extends Define[SshRepository]
|
||||
{
|
||||
protected def construct(name: String, connection: SshConnection, patterns: Patterns) = SshRepository(name, connection, patterns, None)
|
||||
}
|
||||
/** A factory to construct an interface to an Ivy SFTP resolver.*/
|
||||
object sftp extends Define[SftpRepository]
|
||||
{
|
||||
protected def construct(name: String, connection: SshConnection, patterns: Patterns) = SftpRepository(name, connection, patterns)
|
||||
}
|
||||
/** A factory to construct an interface to an Ivy filesytem resolver. */
|
||||
object file
|
||||
{
|
||||
/** Constructs a file resolver with the given name. The patterns to use must be explicitly specified
|
||||
* using the `ivys` or `artifacts` methods on the constructed resolver object.*/
|
||||
def apply(name: String): FileRepository = FileRepository(name, defaultFileConfiguration, Patterns(false))
|
||||
/** Constructs a file resolver with the given name and base directory. */
|
||||
def apply(name: String, baseDirectory: File)(implicit basePatterns: Patterns): FileRepository =
|
||||
baseRepository(new File(baseDirectory.toURI.normalize) getAbsolutePath)(FileRepository(name, defaultFileConfiguration, _))
|
||||
}
|
||||
object url
|
||||
{
|
||||
/** Constructs a URL resolver with the given name. The patterns to use must be explicitly specified
|
||||
* using the `ivys` or `artifacts` methods on the constructed resolver object.*/
|
||||
def apply(name: String): URLRepository = URLRepository(name, Patterns(false))
|
||||
/** Constructs a file resolver with the given name and base directory. */
|
||||
def apply(name: String, baseURL: URL)(implicit basePatterns: Patterns): URLRepository =
|
||||
baseRepository(baseURL.toURI.normalize.toString)(URLRepository(name, _))
|
||||
}
|
||||
private def baseRepository[T](base: String)(construct: Patterns => T)(implicit basePatterns: Patterns): T =
|
||||
construct(resolvePatterns(base, basePatterns))
|
||||
/** A base class for defining factories for interfaces to Ivy repositories that require a hostname , port, and patterns. */
|
||||
sealed abstract class Define[RepositoryType <: SshBasedRepository] extends NotNull {
|
||||
/** Subclasses should implement this method to */
|
||||
protected def construct(name: String, connection: SshConnection, patterns: Patterns): RepositoryType
|
||||
/**
|
||||
* Constructs this repository type with the given `name`. `basePatterns` are the initial patterns to use. A ManagedProject
|
||||
* has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.
|
||||
*/
|
||||
def apply(name: String)(implicit basePatterns: Patterns): RepositoryType =
|
||||
apply(name, None, None, None)
|
||||
/**
|
||||
* Constructs this repository type with the given `name` and `hostname`. `basePatterns` are the initial patterns to use.
|
||||
* A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.
|
||||
*/
|
||||
def apply(name: String, hostname: String)(implicit basePatterns: Patterns): RepositoryType =
|
||||
apply(name, Some(hostname), None, None)
|
||||
/**
|
||||
* Constructs this repository type with the given `name`, `hostname`, and the `basePath` against which the initial
|
||||
* patterns will be resolved. `basePatterns` are the initial patterns to use.
|
||||
* A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.
|
||||
*/
|
||||
def apply(name: String, hostname: String, basePath: String)(implicit basePatterns: Patterns): RepositoryType =
|
||||
apply(name, Some(hostname), None, Some(basePath))
|
||||
/**
|
||||
* Constructs this repository type with the given `name`, `hostname`, and `port`. `basePatterns` are the initial patterns to use.
|
||||
* A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.
|
||||
*/
|
||||
def apply(name: String, hostname: String, port: Int)(implicit basePatterns: Patterns): RepositoryType =
|
||||
apply(name, Some(hostname), Some(port), None)
|
||||
/**
|
||||
* Constructs this repository type with the given `name`, `hostname`, `port`, and the `basePath` against which the initial
|
||||
* patterns will be resolved. `basePatterns` are the initial patterns to use.
|
||||
* A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.
|
||||
*/
|
||||
def apply(name: String, hostname: String, port: Int, basePath: String)(implicit basePatterns: Patterns): RepositoryType =
|
||||
apply(name, Some(hostname), Some(port), Some(basePath))
|
||||
/**
|
||||
* Constructs this repository type with the given `name`, `hostname`, `port`, and the `basePath` against which the initial
|
||||
* patterns will be resolved. `basePatterns` are the initial patterns to use. All but the `name` are optional (use None).
|
||||
* A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.
|
||||
*/
|
||||
def apply(name: String, hostname: Option[String], port: Option[Int], basePath: Option[String])(implicit basePatterns: Patterns): RepositoryType =
|
||||
construct(name, SshConnection(None, hostname, port), resolvePatterns(basePath, basePatterns))
|
||||
}
|
||||
/** A factory to construct an interface to an Ivy SSH resolver.*/
|
||||
object ssh extends Define[SshRepository] {
|
||||
protected def construct(name: String, connection: SshConnection, patterns: Patterns) = SshRepository(name, connection, patterns, None)
|
||||
}
|
||||
/** A factory to construct an interface to an Ivy SFTP resolver.*/
|
||||
object sftp extends Define[SftpRepository] {
|
||||
protected def construct(name: String, connection: SshConnection, patterns: Patterns) = SftpRepository(name, connection, patterns)
|
||||
}
|
||||
/** A factory to construct an interface to an Ivy filesytem resolver. */
|
||||
object file {
|
||||
/**
|
||||
* Constructs a file resolver with the given name. The patterns to use must be explicitly specified
|
||||
* using the `ivys` or `artifacts` methods on the constructed resolver object.
|
||||
*/
|
||||
def apply(name: String): FileRepository = FileRepository(name, defaultFileConfiguration, Patterns(false))
|
||||
/** Constructs a file resolver with the given name and base directory. */
|
||||
def apply(name: String, baseDirectory: File)(implicit basePatterns: Patterns): FileRepository =
|
||||
baseRepository(new File(baseDirectory.toURI.normalize) getAbsolutePath)(FileRepository(name, defaultFileConfiguration, _))
|
||||
}
|
||||
object url {
|
||||
/**
|
||||
* Constructs a URL resolver with the given name. The patterns to use must be explicitly specified
|
||||
* using the `ivys` or `artifacts` methods on the constructed resolver object.
|
||||
*/
|
||||
def apply(name: String): URLRepository = URLRepository(name, Patterns(false))
|
||||
/** Constructs a file resolver with the given name and base directory. */
|
||||
def apply(name: String, baseURL: URL)(implicit basePatterns: Patterns): URLRepository =
|
||||
baseRepository(baseURL.toURI.normalize.toString)(URLRepository(name, _))
|
||||
}
|
||||
private def baseRepository[T](base: String)(construct: Patterns => T)(implicit basePatterns: Patterns): T =
|
||||
construct(resolvePatterns(base, basePatterns))
|
||||
|
||||
/** If `base` is None, `patterns` is returned unchanged.
|
||||
* Otherwise, the ivy file and artifact patterns in `patterns` are resolved against the given base. */
|
||||
private def resolvePatterns(base: Option[String], patterns: Patterns): Patterns =
|
||||
base match
|
||||
{
|
||||
case Some(path) => resolvePatterns(path, patterns)
|
||||
case None => patterns
|
||||
}
|
||||
/** Resolves the ivy file and artifact patterns in `patterns` against the given base. */
|
||||
private def resolvePatterns(base: String, basePatterns: Patterns): Patterns =
|
||||
{
|
||||
def resolveAll(patterns: Seq[String]) = patterns.map(p => resolvePattern(base, p))
|
||||
Patterns(resolveAll(basePatterns.ivyPatterns), resolveAll(basePatterns.artifactPatterns), basePatterns.isMavenCompatible)
|
||||
}
|
||||
private[sbt] def resolvePattern(base: String, pattern: String): String =
|
||||
{
|
||||
val normBase = base.replace('\\', '/')
|
||||
if(normBase.endsWith("/") || pattern.startsWith("/")) normBase + pattern else normBase + "/" + pattern
|
||||
}
|
||||
def defaultFileConfiguration = FileConfiguration(true, None)
|
||||
def mavenStylePatterns = Patterns(Nil, mavenStyleBasePattern :: Nil, true)
|
||||
def ivyStylePatterns = defaultIvyPatterns//Patterns(Nil, Nil, false)
|
||||
/**
|
||||
* If `base` is None, `patterns` is returned unchanged.
|
||||
* Otherwise, the ivy file and artifact patterns in `patterns` are resolved against the given base.
|
||||
*/
|
||||
private def resolvePatterns(base: Option[String], patterns: Patterns): Patterns =
|
||||
base match {
|
||||
case Some(path) => resolvePatterns(path, patterns)
|
||||
case None => patterns
|
||||
}
|
||||
/** Resolves the ivy file and artifact patterns in `patterns` against the given base. */
|
||||
private def resolvePatterns(base: String, basePatterns: Patterns): Patterns =
|
||||
{
|
||||
def resolveAll(patterns: Seq[String]) = patterns.map(p => resolvePattern(base, p))
|
||||
Patterns(resolveAll(basePatterns.ivyPatterns), resolveAll(basePatterns.artifactPatterns), basePatterns.isMavenCompatible)
|
||||
}
|
||||
private[sbt] def resolvePattern(base: String, pattern: String): String =
|
||||
{
|
||||
val normBase = base.replace('\\', '/')
|
||||
if (normBase.endsWith("/") || pattern.startsWith("/")) normBase + pattern else normBase + "/" + pattern
|
||||
}
|
||||
def defaultFileConfiguration = FileConfiguration(true, None)
|
||||
def mavenStylePatterns = Patterns(Nil, mavenStyleBasePattern :: Nil, true)
|
||||
def ivyStylePatterns = defaultIvyPatterns //Patterns(Nil, Nil, false)
|
||||
|
||||
def defaultPatterns = mavenStylePatterns
|
||||
def mavenStyleBasePattern = "[organisation]/[module](_[scalaVersion])(_[sbtVersion])/[revision]/[artifact]-[revision](-[classifier]).[ext]"
|
||||
def localBasePattern = "[organisation]/[module]/" + PluginPattern + "[revision]/[type]s/[artifact](-[classifier]).[ext]"
|
||||
def defaultRetrievePattern = "[type]s/[organisation]/[module]/" + PluginPattern + "[artifact](-[revision])(-[classifier]).[ext]"
|
||||
final val PluginPattern = "(scala_[scalaVersion]/)(sbt_[sbtVersion]/)"
|
||||
def defaultPatterns = mavenStylePatterns
|
||||
def mavenStyleBasePattern = "[organisation]/[module](_[scalaVersion])(_[sbtVersion])/[revision]/[artifact]-[revision](-[classifier]).[ext]"
|
||||
def localBasePattern = "[organisation]/[module]/" + PluginPattern + "[revision]/[type]s/[artifact](-[classifier]).[ext]"
|
||||
def defaultRetrievePattern = "[type]s/[organisation]/[module]/" + PluginPattern + "[artifact](-[revision])(-[classifier]).[ext]"
|
||||
final val PluginPattern = "(scala_[scalaVersion]/)(sbt_[sbtVersion]/)"
|
||||
|
||||
private[this] def mavenLocalDir = new File(Path.userHome, ".m2/repository/")
|
||||
def publishMavenLocal = Resolver.file("publish-m2-local", mavenLocalDir)
|
||||
def mavenLocal = MavenRepository("Maven2 Local", mavenLocalDir.toURI.toString)
|
||||
def defaultLocal = defaultUserFileRepository("local")
|
||||
def defaultShared = defaultUserFileRepository("shared")
|
||||
def defaultUserFileRepository(id: String) =
|
||||
{
|
||||
val pList = ("${ivy.home}/" + id + "/" + localBasePattern) :: Nil
|
||||
FileRepository(id, defaultFileConfiguration, Patterns(pList, pList, false))
|
||||
}
|
||||
def defaultIvyPatterns =
|
||||
{
|
||||
val pList = List(localBasePattern)
|
||||
Patterns(pList, pList, false)
|
||||
}
|
||||
private[this] def mavenLocalDir = new File(Path.userHome, ".m2/repository/")
|
||||
def publishMavenLocal = Resolver.file("publish-m2-local", mavenLocalDir)
|
||||
def mavenLocal = MavenRepository("Maven2 Local", mavenLocalDir.toURI.toString)
|
||||
def defaultLocal = defaultUserFileRepository("local")
|
||||
def defaultShared = defaultUserFileRepository("shared")
|
||||
def defaultUserFileRepository(id: String) =
|
||||
{
|
||||
val pList = ("${ivy.home}/" + id + "/" + localBasePattern) :: Nil
|
||||
FileRepository(id, defaultFileConfiguration, Patterns(pList, pList, false))
|
||||
}
|
||||
def defaultIvyPatterns =
|
||||
{
|
||||
val pList = List(localBasePattern)
|
||||
Patterns(pList, pList, false)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,13 +5,11 @@ package sbt
|
|||
|
||||
import java.util.Locale
|
||||
|
||||
object StringUtilities
|
||||
{
|
||||
@deprecated("Different use cases require different normalization. Use Project.normalizeModuleID or normalizeProjectID instead.", "0.13.0")
|
||||
def normalize(s: String) = s.toLowerCase(Locale.ENGLISH).replaceAll("""\W+""", "-")
|
||||
def nonEmpty(s: String, label: String)
|
||||
{
|
||||
require(s.trim.length > 0, label + " cannot be empty.")
|
||||
}
|
||||
def appendable(s: String) = if(s.isEmpty) "" else "_" + s
|
||||
object StringUtilities {
|
||||
@deprecated("Different use cases require different normalization. Use Project.normalizeModuleID or normalizeProjectID instead.", "0.13.0")
|
||||
def normalize(s: String) = s.toLowerCase(Locale.ENGLISH).replaceAll("""\W+""", "-")
|
||||
def nonEmpty(s: String, label: String) {
|
||||
require(s.trim.length > 0, label + " cannot be empty.")
|
||||
}
|
||||
def appendable(s: String) = if (s.isEmpty) "" else "_" + s
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,142 +3,138 @@
|
|||
*/
|
||||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import java.io.File
|
||||
|
||||
/** Provides information about dependency resolution.
|
||||
* It does not include information about evicted modules, only about the modules ultimately selected by the conflict manager.
|
||||
* This means that for a given configuration, there should only be one revision for a given organization and module name.
|
||||
* @param cachedDescriptor the location of the resolved module descriptor in the cache
|
||||
* @param configurations a sequence containing one report for each configuration resolved.
|
||||
* @param stats information about the update that produced this report
|
||||
* @see sbt.RichUpdateReport
|
||||
*/
|
||||
final class UpdateReport(val cachedDescriptor: File, val configurations: Seq[ConfigurationReport], val stats: UpdateStats, private[sbt] val stamps: Map[File,Long])
|
||||
{
|
||||
@deprecated("Use the variant that provides timestamps of files.", "0.13.0")
|
||||
def this(cachedDescriptor: File, configurations: Seq[ConfigurationReport], stats: UpdateStats) =
|
||||
this(cachedDescriptor, configurations, stats, Map.empty)
|
||||
/**
|
||||
* Provides information about dependency resolution.
|
||||
* It does not include information about evicted modules, only about the modules ultimately selected by the conflict manager.
|
||||
* This means that for a given configuration, there should only be one revision for a given organization and module name.
|
||||
* @param cachedDescriptor the location of the resolved module descriptor in the cache
|
||||
* @param configurations a sequence containing one report for each configuration resolved.
|
||||
* @param stats information about the update that produced this report
|
||||
* @see sbt.RichUpdateReport
|
||||
*/
|
||||
final class UpdateReport(val cachedDescriptor: File, val configurations: Seq[ConfigurationReport], val stats: UpdateStats, private[sbt] val stamps: Map[File, Long]) {
|
||||
@deprecated("Use the variant that provides timestamps of files.", "0.13.0")
|
||||
def this(cachedDescriptor: File, configurations: Seq[ConfigurationReport], stats: UpdateStats) =
|
||||
this(cachedDescriptor, configurations, stats, Map.empty)
|
||||
|
||||
override def toString = "Update report:\n\t" + stats + "\n" + configurations.mkString
|
||||
override def toString = "Update report:\n\t" + stats + "\n" + configurations.mkString
|
||||
|
||||
/** All resolved modules in all configurations. */
|
||||
def allModules: Seq[ModuleID] = configurations.flatMap(_.allModules).distinct
|
||||
/** All resolved modules in all configurations. */
|
||||
def allModules: Seq[ModuleID] = configurations.flatMap(_.allModules).distinct
|
||||
|
||||
def retrieve(f: (String, ModuleID, Artifact, File) => File): UpdateReport =
|
||||
new UpdateReport(cachedDescriptor, configurations map { _ retrieve f}, stats, stamps )
|
||||
def retrieve(f: (String, ModuleID, Artifact, File) => File): UpdateReport =
|
||||
new UpdateReport(cachedDescriptor, configurations map { _ retrieve f }, stats, stamps)
|
||||
|
||||
/** Gets the report for the given configuration, or `None` if the configuration was not resolved.*/
|
||||
def configuration(s: String) = configurations.find(_.configuration == s)
|
||||
/** Gets the report for the given configuration, or `None` if the configuration was not resolved.*/
|
||||
def configuration(s: String) = configurations.find(_.configuration == s)
|
||||
|
||||
/** Gets the names of all resolved configurations. This `UpdateReport` contains one `ConfigurationReport` for each configuration in this list. */
|
||||
def allConfigurations: Seq[String] = configurations.map(_.configuration)
|
||||
/** Gets the names of all resolved configurations. This `UpdateReport` contains one `ConfigurationReport` for each configuration in this list. */
|
||||
def allConfigurations: Seq[String] = configurations.map(_.configuration)
|
||||
}
|
||||
|
||||
/** Provides information about resolution of a single configuration.
|
||||
* @param configuration the configuration this report is for.
|
||||
* @param modules a seqeuence containing one report for each module resolved for this configuration.
|
||||
*/
|
||||
final class ConfigurationReport(val configuration: String, val modules: Seq[ModuleReport], val evicted: Seq[ModuleID])
|
||||
{
|
||||
override def toString = "\t" + configuration + ":\n" + modules.mkString + evicted.map("\t\t(EVICTED) " + _ + "\n").mkString
|
||||
/**
|
||||
* Provides information about resolution of a single configuration.
|
||||
* @param configuration the configuration this report is for.
|
||||
* @param modules a seqeuence containing one report for each module resolved for this configuration.
|
||||
*/
|
||||
final class ConfigurationReport(val configuration: String, val modules: Seq[ModuleReport], val evicted: Seq[ModuleID]) {
|
||||
override def toString = "\t" + configuration + ":\n" + modules.mkString + evicted.map("\t\t(EVICTED) " + _ + "\n").mkString
|
||||
|
||||
/** All resolved modules for this configuration.
|
||||
* For a given organization and module name, there is only one revision/`ModuleID` in this sequence.
|
||||
*/
|
||||
def allModules: Seq[ModuleID] = modules.map(mr => addConfiguration(mr.module))
|
||||
private[this] def addConfiguration(mod: ModuleID): ModuleID = if(mod.configurations.isEmpty) mod.copy(configurations = Some(configuration)) else mod
|
||||
|
||||
def retrieve(f: (String, ModuleID, Artifact, File) => File): ConfigurationReport =
|
||||
new ConfigurationReport(configuration, modules map { _.retrieve( (mid,art,file) => f(configuration, mid, art, file)) }, evicted)
|
||||
/**
|
||||
* All resolved modules for this configuration.
|
||||
* For a given organization and module name, there is only one revision/`ModuleID` in this sequence.
|
||||
*/
|
||||
def allModules: Seq[ModuleID] = modules.map(mr => addConfiguration(mr.module))
|
||||
private[this] def addConfiguration(mod: ModuleID): ModuleID = if (mod.configurations.isEmpty) mod.copy(configurations = Some(configuration)) else mod
|
||||
|
||||
def retrieve(f: (String, ModuleID, Artifact, File) => File): ConfigurationReport =
|
||||
new ConfigurationReport(configuration, modules map { _.retrieve((mid, art, file) => f(configuration, mid, art, file)) }, evicted)
|
||||
}
|
||||
|
||||
/** Provides information about the resolution of a module.
|
||||
* This information is in the context of a specific configuration.
|
||||
* @param module the `ModuleID` this report is for.
|
||||
* @param artifacts the resolved artifacts for this module, paired with the File the artifact was retrieved to. This File may be in the
|
||||
*/
|
||||
final class ModuleReport(val module: ModuleID, val artifacts: Seq[(Artifact, File)], val missingArtifacts: Seq[Artifact])
|
||||
{
|
||||
override def toString =
|
||||
{
|
||||
val arts = artifacts.map(_.toString) ++ missingArtifacts.map(art => "(MISSING) " + art)
|
||||
"\t\t" + module + ": " +
|
||||
(if(arts.size <= 1) "" else "\n\t\t\t") + arts.mkString("\n\t\t\t") + "\n"
|
||||
}
|
||||
def retrieve(f: (ModuleID, Artifact, File) => File): ModuleReport =
|
||||
new ModuleReport(module, artifacts.map { case (art,file) => (art, f(module, art, file)) }, missingArtifacts)
|
||||
/**
|
||||
* Provides information about the resolution of a module.
|
||||
* This information is in the context of a specific configuration.
|
||||
* @param module the `ModuleID` this report is for.
|
||||
* @param artifacts the resolved artifacts for this module, paired with the File the artifact was retrieved to. This File may be in the
|
||||
*/
|
||||
final class ModuleReport(val module: ModuleID, val artifacts: Seq[(Artifact, File)], val missingArtifacts: Seq[Artifact]) {
|
||||
override def toString =
|
||||
{
|
||||
val arts = artifacts.map(_.toString) ++ missingArtifacts.map(art => "(MISSING) " + art)
|
||||
"\t\t" + module + ": " +
|
||||
(if (arts.size <= 1) "" else "\n\t\t\t") + arts.mkString("\n\t\t\t") + "\n"
|
||||
}
|
||||
def retrieve(f: (ModuleID, Artifact, File) => File): ModuleReport =
|
||||
new ModuleReport(module, artifacts.map { case (art, file) => (art, f(module, art, file)) }, missingArtifacts)
|
||||
}
|
||||
object UpdateReport
|
||||
{
|
||||
implicit def richUpdateReport(report: UpdateReport): RichUpdateReport = new RichUpdateReport(report)
|
||||
object UpdateReport {
|
||||
implicit def richUpdateReport(report: UpdateReport): RichUpdateReport = new RichUpdateReport(report)
|
||||
|
||||
/** Provides extra methods for filtering the contents of an `UpdateReport` and for obtaining references to a selected subset of the underlying files. */
|
||||
final class RichUpdateReport(report: UpdateReport)
|
||||
{
|
||||
def recomputeStamps(): UpdateReport =
|
||||
{
|
||||
val files = report.cachedDescriptor +: allFiles
|
||||
val stamps = files.map(f => (f, f.lastModified)).toMap
|
||||
new UpdateReport(report.cachedDescriptor, report.configurations, report.stats, stamps)
|
||||
}
|
||||
/** Provides extra methods for filtering the contents of an `UpdateReport` and for obtaining references to a selected subset of the underlying files. */
|
||||
final class RichUpdateReport(report: UpdateReport) {
|
||||
def recomputeStamps(): UpdateReport =
|
||||
{
|
||||
val files = report.cachedDescriptor +: allFiles
|
||||
val stamps = files.map(f => (f, f.lastModified)).toMap
|
||||
new UpdateReport(report.cachedDescriptor, report.configurations, report.stats, stamps)
|
||||
}
|
||||
|
||||
import DependencyFilter._
|
||||
/** Obtains all successfully retrieved files in all configurations and modules. */
|
||||
def allFiles: Seq[File] = matching(DependencyFilter.allPass)
|
||||
import DependencyFilter._
|
||||
/** Obtains all successfully retrieved files in all configurations and modules. */
|
||||
def allFiles: Seq[File] = matching(DependencyFilter.allPass)
|
||||
|
||||
/** Obtains all successfully retrieved files in configurations, modules, and artifacts matching the specified filter. */
|
||||
def matching(f: DependencyFilter): Seq[File] = select0(f).distinct
|
||||
/** Obtains all successfully retrieved files in configurations, modules, and artifacts matching the specified filter. */
|
||||
def matching(f: DependencyFilter): Seq[File] = select0(f).distinct
|
||||
|
||||
/** Obtains all successfully retrieved files matching all provided filters. An unspecified argument matches all files. */
|
||||
def select(configuration: ConfigurationFilter = configurationFilter(), module: ModuleFilter = moduleFilter(), artifact: ArtifactFilter = artifactFilter()): Seq[File] =
|
||||
matching(DependencyFilter.make(configuration, module, artifact))
|
||||
/** Obtains all successfully retrieved files matching all provided filters. An unspecified argument matches all files. */
|
||||
def select(configuration: ConfigurationFilter = configurationFilter(), module: ModuleFilter = moduleFilter(), artifact: ArtifactFilter = artifactFilter()): Seq[File] =
|
||||
matching(DependencyFilter.make(configuration, module, artifact))
|
||||
|
||||
private[this] def select0(f: DependencyFilter): Seq[File] =
|
||||
for(cReport <- report.configurations; mReport <- cReport.modules; (artifact, file) <- mReport.artifacts if f(cReport.configuration, mReport.module, artifact)) yield {
|
||||
if(file == null) error("Null file: conf=" + cReport.configuration + ", module=" + mReport.module + ", art: " + artifact)
|
||||
file
|
||||
}
|
||||
|
||||
/** Constructs a new report that only contains files matching the specified filter.*/
|
||||
def filter(f: DependencyFilter): UpdateReport =
|
||||
moduleReportMap { (configuration, modReport) =>
|
||||
import modReport._
|
||||
val newArtifacts = artifacts filter { case (art, file) => f(configuration, module, art) }
|
||||
val newMissing = missingArtifacts filter { art => f(configuration, module, art) }
|
||||
new ModuleReport(module, newArtifacts, newMissing)
|
||||
}
|
||||
def substitute(f: (String, ModuleID, Seq[(Artifact, File)]) => Seq[(Artifact, File)]): UpdateReport =
|
||||
moduleReportMap { (configuration, modReport) =>
|
||||
val newArtifacts = f(configuration, modReport.module, modReport.artifacts)
|
||||
new ModuleReport(modReport.module, newArtifacts, Nil)
|
||||
}
|
||||
private[this] def select0(f: DependencyFilter): Seq[File] =
|
||||
for (cReport <- report.configurations; mReport <- cReport.modules; (artifact, file) <- mReport.artifacts if f(cReport.configuration, mReport.module, artifact)) yield {
|
||||
if (file == null) error("Null file: conf=" + cReport.configuration + ", module=" + mReport.module + ", art: " + artifact)
|
||||
file
|
||||
}
|
||||
|
||||
def toSeq: Seq[(String, ModuleID, Artifact, File)] =
|
||||
for(confReport <- report.configurations; modReport <- confReport.modules; (artifact, file) <- modReport.artifacts) yield
|
||||
(confReport.configuration, modReport.module, artifact, file)
|
||||
/** Constructs a new report that only contains files matching the specified filter.*/
|
||||
def filter(f: DependencyFilter): UpdateReport =
|
||||
moduleReportMap { (configuration, modReport) =>
|
||||
import modReport._
|
||||
val newArtifacts = artifacts filter { case (art, file) => f(configuration, module, art) }
|
||||
val newMissing = missingArtifacts filter { art => f(configuration, module, art) }
|
||||
new ModuleReport(module, newArtifacts, newMissing)
|
||||
}
|
||||
def substitute(f: (String, ModuleID, Seq[(Artifact, File)]) => Seq[(Artifact, File)]): UpdateReport =
|
||||
moduleReportMap { (configuration, modReport) =>
|
||||
val newArtifacts = f(configuration, modReport.module, modReport.artifacts)
|
||||
new ModuleReport(modReport.module, newArtifacts, Nil)
|
||||
}
|
||||
|
||||
def allMissing: Seq[(String, ModuleID, Artifact)] =
|
||||
for(confReport <- report.configurations; modReport <- confReport.modules; artifact <- modReport.missingArtifacts) yield
|
||||
(confReport.configuration, modReport.module, artifact)
|
||||
def toSeq: Seq[(String, ModuleID, Artifact, File)] =
|
||||
for (confReport <- report.configurations; modReport <- confReport.modules; (artifact, file) <- modReport.artifacts) yield (confReport.configuration, modReport.module, artifact, file)
|
||||
|
||||
def addMissing(f: ModuleID => Seq[Artifact]): UpdateReport =
|
||||
moduleReportMap { (configuration, modReport) =>
|
||||
import modReport._
|
||||
new ModuleReport(module, artifacts, (missingArtifacts ++ f(module)).distinct)
|
||||
}
|
||||
def allMissing: Seq[(String, ModuleID, Artifact)] =
|
||||
for (confReport <- report.configurations; modReport <- confReport.modules; artifact <- modReport.missingArtifacts) yield (confReport.configuration, modReport.module, artifact)
|
||||
|
||||
def moduleReportMap(f: (String, ModuleReport) => ModuleReport): UpdateReport =
|
||||
{
|
||||
val newConfigurations = report.configurations.map { confReport =>
|
||||
import confReport._
|
||||
val newModules = modules map { modReport => f(configuration, modReport) }
|
||||
new ConfigurationReport(configuration, newModules, evicted)
|
||||
}
|
||||
new UpdateReport(report.cachedDescriptor, newConfigurations, report.stats, report.stamps)
|
||||
}
|
||||
}
|
||||
def addMissing(f: ModuleID => Seq[Artifact]): UpdateReport =
|
||||
moduleReportMap { (configuration, modReport) =>
|
||||
import modReport._
|
||||
new ModuleReport(module, artifacts, (missingArtifacts ++ f(module)).distinct)
|
||||
}
|
||||
|
||||
def moduleReportMap(f: (String, ModuleReport) => ModuleReport): UpdateReport =
|
||||
{
|
||||
val newConfigurations = report.configurations.map { confReport =>
|
||||
import confReport._
|
||||
val newModules = modules map { modReport => f(configuration, modReport) }
|
||||
new ConfigurationReport(configuration, newModules, evicted)
|
||||
}
|
||||
new UpdateReport(report.cachedDescriptor, newConfigurations, report.stats, report.stamps)
|
||||
}
|
||||
}
|
||||
}
|
||||
final class UpdateStats(val resolveTime: Long, val downloadTime: Long, val downloadSize: Long, val cached: Boolean)
|
||||
{
|
||||
override def toString = Seq("Resolve time: " + resolveTime + " ms", "Download time: " + downloadTime + " ms", "Download size: " + downloadSize + " bytes").mkString(", ")
|
||||
final class UpdateStats(val resolveTime: Long, val downloadTime: Long, val downloadSize: Long, val cached: Boolean) {
|
||||
override def toString = Seq("Resolve time: " + resolveTime + " ms", "Download time: " + downloadTime + " ms", "Download size: " + downloadSize + " bytes").mkString(", ")
|
||||
}
|
||||
|
|
@ -6,67 +6,62 @@ package impl
|
|||
|
||||
import StringUtilities.nonEmpty
|
||||
|
||||
trait DependencyBuilders
|
||||
{
|
||||
final implicit def toGroupID(groupID: String): GroupID =
|
||||
{
|
||||
nonEmpty(groupID, "Group ID")
|
||||
new GroupID(groupID)
|
||||
}
|
||||
final implicit def toRepositoryName(name: String): RepositoryName =
|
||||
{
|
||||
nonEmpty(name, "Repository name")
|
||||
new RepositoryName(name)
|
||||
}
|
||||
final implicit def moduleIDConfigurable(m: ModuleID): ModuleIDConfigurable =
|
||||
{
|
||||
require(m.configurations.isEmpty, "Configurations already specified for module " + m)
|
||||
new ModuleIDConfigurable(m)
|
||||
}
|
||||
trait DependencyBuilders {
|
||||
final implicit def toGroupID(groupID: String): GroupID =
|
||||
{
|
||||
nonEmpty(groupID, "Group ID")
|
||||
new GroupID(groupID)
|
||||
}
|
||||
final implicit def toRepositoryName(name: String): RepositoryName =
|
||||
{
|
||||
nonEmpty(name, "Repository name")
|
||||
new RepositoryName(name)
|
||||
}
|
||||
final implicit def moduleIDConfigurable(m: ModuleID): ModuleIDConfigurable =
|
||||
{
|
||||
require(m.configurations.isEmpty, "Configurations already specified for module " + m)
|
||||
new ModuleIDConfigurable(m)
|
||||
}
|
||||
}
|
||||
|
||||
final class GroupID private[sbt] (groupID: String)
|
||||
{
|
||||
def % (artifactID: String) = groupArtifact(artifactID, CrossVersion.Disabled)
|
||||
def %% (artifactID: String): GroupArtifactID = groupArtifact(artifactID, CrossVersion.binary)
|
||||
final class GroupID private[sbt] (groupID: String) {
|
||||
def %(artifactID: String) = groupArtifact(artifactID, CrossVersion.Disabled)
|
||||
def %%(artifactID: String): GroupArtifactID = groupArtifact(artifactID, CrossVersion.binary)
|
||||
|
||||
@deprecated(deprecationMessage, "0.12.0")
|
||||
def %% (artifactID: String, crossVersion: String => String) = groupArtifact(artifactID, CrossVersion.binaryMapped(crossVersion))
|
||||
@deprecated(deprecationMessage, "0.12.0")
|
||||
def %% (artifactID: String, alternatives: (String, String)*) = groupArtifact(artifactID, CrossVersion.binaryMapped(Map(alternatives: _*) orElse { case s => s }))
|
||||
@deprecated(deprecationMessage, "0.12.0")
|
||||
def %%(artifactID: String, crossVersion: String => String) = groupArtifact(artifactID, CrossVersion.binaryMapped(crossVersion))
|
||||
@deprecated(deprecationMessage, "0.12.0")
|
||||
def %%(artifactID: String, alternatives: (String, String)*) = groupArtifact(artifactID, CrossVersion.binaryMapped(Map(alternatives: _*) orElse { case s => s }))
|
||||
|
||||
private def groupArtifact(artifactID: String, cross: CrossVersion) =
|
||||
{
|
||||
nonEmpty(artifactID, "Artifact ID")
|
||||
new GroupArtifactID(groupID, artifactID, cross)
|
||||
}
|
||||
private def groupArtifact(artifactID: String, cross: CrossVersion) =
|
||||
{
|
||||
nonEmpty(artifactID, "Artifact ID")
|
||||
new GroupArtifactID(groupID, artifactID, cross)
|
||||
}
|
||||
|
||||
private[this] def deprecationMessage = """Use the cross method on the constructed ModuleID. For example: ("a" % "b" % "1").cross(...)"""
|
||||
private[this] def deprecationMessage = """Use the cross method on the constructed ModuleID. For example: ("a" % "b" % "1").cross(...)"""
|
||||
}
|
||||
final class GroupArtifactID private[sbt] (groupID: String, artifactID: String, crossVersion: CrossVersion)
|
||||
{
|
||||
def % (revision: String): ModuleID =
|
||||
{
|
||||
nonEmpty(revision, "Revision")
|
||||
ModuleID(groupID, artifactID, revision).cross(crossVersion)
|
||||
}
|
||||
final class GroupArtifactID private[sbt] (groupID: String, artifactID: String, crossVersion: CrossVersion) {
|
||||
def %(revision: String): ModuleID =
|
||||
{
|
||||
nonEmpty(revision, "Revision")
|
||||
ModuleID(groupID, artifactID, revision).cross(crossVersion)
|
||||
}
|
||||
}
|
||||
final class ModuleIDConfigurable private[sbt] (moduleID: ModuleID)
|
||||
{
|
||||
def % (configuration: Configuration): ModuleID = %(configuration.name)
|
||||
final class ModuleIDConfigurable private[sbt] (moduleID: ModuleID) {
|
||||
def %(configuration: Configuration): ModuleID = %(configuration.name)
|
||||
|
||||
def % (configurations: String): ModuleID =
|
||||
{
|
||||
nonEmpty(configurations, "Configurations")
|
||||
val c = configurations
|
||||
moduleID.copy(configurations = Some(c))
|
||||
}
|
||||
def %(configurations: String): ModuleID =
|
||||
{
|
||||
nonEmpty(configurations, "Configurations")
|
||||
val c = configurations
|
||||
moduleID.copy(configurations = Some(c))
|
||||
}
|
||||
}
|
||||
final class RepositoryName private[sbt] (name: String)
|
||||
{
|
||||
def at (location: String) =
|
||||
{
|
||||
nonEmpty(location, "Repository location")
|
||||
new MavenRepository(name, location)
|
||||
}
|
||||
final class RepositoryName private[sbt] (name: String) {
|
||||
def at(location: String) =
|
||||
{
|
||||
nonEmpty(location, "Repository location")
|
||||
new MavenRepository(name, location)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ import org.apache.ivy.util.url.CredentialsStore
|
|||
*/
|
||||
object ErrorMessageAuthenticator {
|
||||
private var securityWarningLogged = false
|
||||
|
||||
|
||||
private def originalAuthenticator: Option[Authenticator] = {
|
||||
try {
|
||||
val f = classOf[Authenticator].getDeclaredField("theAuthenticator");
|
||||
|
|
@ -29,7 +29,7 @@ object ErrorMessageAuthenticator {
|
|||
None
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private lazy val ivyOriginalField = {
|
||||
val field = classOf[IvyAuthenticator].getDeclaredField("original")
|
||||
field.setAccessible(true)
|
||||
|
|
@ -42,17 +42,17 @@ object ErrorMessageAuthenticator {
|
|||
val newOriginal = new ErrorMessageAuthenticator(original)
|
||||
ivyOriginalField.set(ivy, newOriginal)
|
||||
}
|
||||
|
||||
|
||||
try Option(ivyOriginalField.get(ivy).asInstanceOf[Authenticator]) match {
|
||||
case Some(alreadyThere: ErrorMessageAuthenticator) => // We're already installed, no need to do the work again.
|
||||
case originalOpt => installIntoIvyImpl(originalOpt)
|
||||
case originalOpt => installIntoIvyImpl(originalOpt)
|
||||
} catch {
|
||||
case t: Throwable =>
|
||||
case t: Throwable =>
|
||||
Message.debug("Error occurred will trying to install debug messages into Ivy Authentication" + t.getMessage)
|
||||
}
|
||||
Some(ivy)
|
||||
}
|
||||
|
||||
|
||||
/** Installs the error message authenticator so we have nicer error messages when using java's URL for downloading. */
|
||||
def install() {
|
||||
// Actually installs the error message authenticator.
|
||||
|
|
@ -62,67 +62,68 @@ object ErrorMessageAuthenticator {
|
|||
case e: SecurityException if !securityWarningLogged =>
|
||||
securityWarningLogged = true;
|
||||
Message.warn("Not enough permissions to set the ErorrMessageAuthenticator. "
|
||||
+ "Helpful debug messages disabled!");
|
||||
}
|
||||
// We will try to use the original authenticator as backup authenticator.
|
||||
// Since there is no getter available, so try to use some reflection to
|
||||
// obtain it. If that doesn't work, assume there is no original authenticator
|
||||
def doInstallIfIvy(original: Option[Authenticator]): Unit =
|
||||
original match {
|
||||
case Some(installed: ErrorMessageAuthenticator) => // Ignore, we're already installed
|
||||
case Some(ivy: IvyAuthenticator) => installIntoIvy(ivy)
|
||||
case original => doInstall(original)
|
||||
}
|
||||
doInstallIfIvy(originalAuthenticator)
|
||||
+ "Helpful debug messages disabled!");
|
||||
}
|
||||
// We will try to use the original authenticator as backup authenticator.
|
||||
// Since there is no getter available, so try to use some reflection to
|
||||
// obtain it. If that doesn't work, assume there is no original authenticator
|
||||
def doInstallIfIvy(original: Option[Authenticator]): Unit =
|
||||
original match {
|
||||
case Some(installed: ErrorMessageAuthenticator) => // Ignore, we're already installed
|
||||
case Some(ivy: IvyAuthenticator) => installIntoIvy(ivy)
|
||||
case original => doInstall(original)
|
||||
}
|
||||
doInstallIfIvy(originalAuthenticator)
|
||||
}
|
||||
}
|
||||
/**
|
||||
* An authenticator which just delegates to a previous authenticator and issues *nice*
|
||||
* error messages on failure to find credentials.
|
||||
*
|
||||
* Since ivy installs its own credentials handler EVERY TIME it resolves or publishes, we want to
|
||||
* An authenticator which just delegates to a previous authenticator and issues *nice*
|
||||
* error messages on failure to find credentials.
|
||||
*
|
||||
* Since ivy installs its own credentials handler EVERY TIME it resolves or publishes, we want to
|
||||
* install this one at some point and eventually ivy will capture it and use it.
|
||||
*/
|
||||
private[sbt] final class ErrorMessageAuthenticator(original: Option[Authenticator]) extends Authenticator {
|
||||
|
||||
protected override def getPasswordAuthentication(): PasswordAuthentication = {
|
||||
// We're guaranteed to only get here if Ivy's authentication fails
|
||||
if (!isProxyAuthentication) {
|
||||
val host = getRequestingHost
|
||||
// TODO - levenshtein distance "did you mean" message.
|
||||
Message.error(s"Unable to find credentials for [${getRequestingPrompt} @ ${host}].")
|
||||
val configuredRealms = IvyCredentialsLookup.realmsForHost.getOrElse(host, Set.empty)
|
||||
if(!configuredRealms.isEmpty) {
|
||||
Message.error(s" Is one of these realms mispelled for host [${host}]:")
|
||||
configuredRealms foreach { realm =>
|
||||
Message.error(s" * ${realm}")
|
||||
}
|
||||
}
|
||||
protected override def getPasswordAuthentication(): PasswordAuthentication = {
|
||||
// We're guaranteed to only get here if Ivy's authentication fails
|
||||
if (!isProxyAuthentication) {
|
||||
val host = getRequestingHost
|
||||
// TODO - levenshtein distance "did you mean" message.
|
||||
Message.error(s"Unable to find credentials for [${getRequestingPrompt} @ ${host}].")
|
||||
val configuredRealms = IvyCredentialsLookup.realmsForHost.getOrElse(host, Set.empty)
|
||||
if (!configuredRealms.isEmpty) {
|
||||
Message.error(s" Is one of these realms mispelled for host [${host}]:")
|
||||
configuredRealms foreach { realm =>
|
||||
Message.error(s" * ${realm}")
|
||||
}
|
||||
// TODO - Maybe we should work on a helpful proxy message...
|
||||
|
||||
// TODO - To be more maven friendly, we may want to also try to grab the "first" authentication that shows up for a server and try it.
|
||||
// or maybe allow that behavior to be configured, since maven users aren't used to realms (which they should be).
|
||||
|
||||
// Grabs the authentication that would have been provided had we not been installed...
|
||||
def originalAuthentication: Option[PasswordAuthentication] = {
|
||||
Authenticator.setDefault(original.getOrElse(null))
|
||||
try Option(Authenticator.requestPasswordAuthentication(
|
||||
getRequestingHost,
|
||||
getRequestingSite,
|
||||
getRequestingPort,
|
||||
getRequestingProtocol,
|
||||
getRequestingPrompt,
|
||||
getRequestingScheme))
|
||||
finally Authenticator.setDefault(this)
|
||||
}
|
||||
originalAuthentication.getOrElse(null)
|
||||
}
|
||||
}
|
||||
// TODO - Maybe we should work on a helpful proxy message...
|
||||
|
||||
/** Returns true if this authentication if for a proxy and not for an HTTP server.
|
||||
* We want to display different error messages, depending.
|
||||
*/
|
||||
private def isProxyAuthentication: Boolean =
|
||||
getRequestorType == Authenticator.RequestorType.PROXY
|
||||
// TODO - To be more maven friendly, we may want to also try to grab the "first" authentication that shows up for a server and try it.
|
||||
// or maybe allow that behavior to be configured, since maven users aren't used to realms (which they should be).
|
||||
|
||||
// Grabs the authentication that would have been provided had we not been installed...
|
||||
def originalAuthentication: Option[PasswordAuthentication] = {
|
||||
Authenticator.setDefault(original.getOrElse(null))
|
||||
try Option(Authenticator.requestPasswordAuthentication(
|
||||
getRequestingHost,
|
||||
getRequestingSite,
|
||||
getRequestingPort,
|
||||
getRequestingProtocol,
|
||||
getRequestingPrompt,
|
||||
getRequestingScheme))
|
||||
finally Authenticator.setDefault(this)
|
||||
}
|
||||
originalAuthentication.getOrElse(null)
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if this authentication if for a proxy and not for an HTTP server.
|
||||
* We want to display different error messages, depending.
|
||||
*/
|
||||
private def isProxyAuthentication: Boolean =
|
||||
getRequestorType == Authenticator.RequestorType.PROXY
|
||||
|
||||
}
|
||||
|
|
@ -13,7 +13,7 @@ private[sbt] case class Realm(host: String, realm: String) extends CredentialKey
|
|||
|
||||
/**
|
||||
* Helper mechanism to improve credential related error messages.
|
||||
*
|
||||
*
|
||||
* This evil class exposes to us the necessary information to warn on credential failure and offer
|
||||
* spelling/typo suggestions.
|
||||
*/
|
||||
|
|
@ -21,17 +21,18 @@ private[sbt] object IvyCredentialsLookup {
|
|||
|
||||
/** Helper extractor for Ivy's key-value store of credentials. */
|
||||
private object KeySplit {
|
||||
def unapply(key: String): Option[(String,String)] = {
|
||||
def unapply(key: String): Option[(String, String)] = {
|
||||
key.indexOf('@') match {
|
||||
case -1 => None
|
||||
case n => Some(key.take(n) -> key.drop(n+1))
|
||||
case n => Some(key.take(n) -> key.drop(n + 1))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Here we cheat runtime private so we can look in the credentials store.
|
||||
/**
|
||||
* Here we cheat runtime private so we can look in the credentials store.
|
||||
*
|
||||
* TODO - Don't bomb at class load time...
|
||||
* TODO - Don't bomb at class load time...
|
||||
*/
|
||||
private val credKeyringField = {
|
||||
val tmp = classOf[CredentialsStore].getDeclaredField("KEYRING")
|
||||
|
|
@ -45,10 +46,10 @@ private[sbt] object IvyCredentialsLookup {
|
|||
// make a clone of the set...
|
||||
(map.keySet.asScala.map {
|
||||
case KeySplit(realm, host) => Realm(host, realm)
|
||||
case host => Host(host)
|
||||
case host => Host(host)
|
||||
})(collection.breakOut)
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* A mapping of host -> realms in the ivy credentials store.
|
||||
*/
|
||||
|
|
@ -58,6 +59,6 @@ private[sbt] object IvyCredentialsLookup {
|
|||
} groupBy { realm =>
|
||||
realm.host
|
||||
} mapValues { realms =>
|
||||
realms map (_.realm)
|
||||
realms map (_.realm)
|
||||
}
|
||||
}
|
||||
|
|
@ -3,134 +3,132 @@ package ivyint
|
|||
|
||||
import java.io.File
|
||||
import java.net.URI
|
||||
import java.util.{Collection, Collections => CS}
|
||||
import java.util.{ Collection, Collections => CS }
|
||||
import CS.singleton
|
||||
|
||||
import org.apache.ivy.{core, plugins, util, Ivy}
|
||||
import core.module.descriptor.{DependencyArtifactDescriptor, DefaultDependencyArtifactDescriptor}
|
||||
import core.module.descriptor.{DefaultDependencyDescriptor => DDD, DependencyDescriptor}
|
||||
import core.module.id.{ArtifactId,ModuleId, ModuleRevisionId}
|
||||
import org.apache.ivy.{ core, plugins, util, Ivy }
|
||||
import core.module.descriptor.{ DependencyArtifactDescriptor, DefaultDependencyArtifactDescriptor }
|
||||
import core.module.descriptor.{ DefaultDependencyDescriptor => DDD, DependencyDescriptor }
|
||||
import core.module.id.{ ArtifactId, ModuleId, ModuleRevisionId }
|
||||
import plugins.namespace.Namespace
|
||||
import util.extendable.ExtendableItem
|
||||
|
||||
private[sbt] object MergeDescriptors
|
||||
{
|
||||
def mergeable(a: DependencyDescriptor, b: DependencyDescriptor): Boolean =
|
||||
a.isForce == b.isForce &&
|
||||
a.isChanging == b.isChanging &&
|
||||
a.isTransitive == b.isTransitive &&
|
||||
a.getParentRevisionId == b.getParentRevisionId &&
|
||||
a.getNamespace == b.getNamespace && {
|
||||
val amrid = a.getDependencyRevisionId
|
||||
val bmrid = b.getDependencyRevisionId
|
||||
amrid == bmrid
|
||||
} && {
|
||||
val adyn = a.getDynamicConstraintDependencyRevisionId
|
||||
val bdyn = b.getDynamicConstraintDependencyRevisionId
|
||||
adyn == bdyn
|
||||
}
|
||||
private[sbt] object MergeDescriptors {
|
||||
def mergeable(a: DependencyDescriptor, b: DependencyDescriptor): Boolean =
|
||||
a.isForce == b.isForce &&
|
||||
a.isChanging == b.isChanging &&
|
||||
a.isTransitive == b.isTransitive &&
|
||||
a.getParentRevisionId == b.getParentRevisionId &&
|
||||
a.getNamespace == b.getNamespace && {
|
||||
val amrid = a.getDependencyRevisionId
|
||||
val bmrid = b.getDependencyRevisionId
|
||||
amrid == bmrid
|
||||
} && {
|
||||
val adyn = a.getDynamicConstraintDependencyRevisionId
|
||||
val bdyn = b.getDynamicConstraintDependencyRevisionId
|
||||
adyn == bdyn
|
||||
}
|
||||
|
||||
def apply(a: DependencyDescriptor, b: DependencyDescriptor): DependencyDescriptor =
|
||||
{
|
||||
assert(mergeable(a,b))
|
||||
new MergedDescriptors(a,b)
|
||||
}
|
||||
def apply(a: DependencyDescriptor, b: DependencyDescriptor): DependencyDescriptor =
|
||||
{
|
||||
assert(mergeable(a, b))
|
||||
new MergedDescriptors(a, b)
|
||||
}
|
||||
}
|
||||
|
||||
// combines the artifacts, configurations, includes, and excludes for DependencyDescriptors `a` and `b`
|
||||
// that otherwise have equal IDs
|
||||
private final class MergedDescriptors(a: DependencyDescriptor, b: DependencyDescriptor) extends DependencyDescriptor
|
||||
{
|
||||
def getDependencyId = a.getDependencyId
|
||||
def isForce = a.isForce
|
||||
def isChanging = a.isChanging
|
||||
def isTransitive = a.isTransitive
|
||||
def getNamespace = a.getNamespace
|
||||
def getParentRevisionId = a.getParentRevisionId
|
||||
def getDependencyRevisionId = a.getDependencyRevisionId
|
||||
def getDynamicConstraintDependencyRevisionId = a.getDynamicConstraintDependencyRevisionId
|
||||
private final class MergedDescriptors(a: DependencyDescriptor, b: DependencyDescriptor) extends DependencyDescriptor {
|
||||
def getDependencyId = a.getDependencyId
|
||||
def isForce = a.isForce
|
||||
def isChanging = a.isChanging
|
||||
def isTransitive = a.isTransitive
|
||||
def getNamespace = a.getNamespace
|
||||
def getParentRevisionId = a.getParentRevisionId
|
||||
def getDependencyRevisionId = a.getDependencyRevisionId
|
||||
def getDynamicConstraintDependencyRevisionId = a.getDynamicConstraintDependencyRevisionId
|
||||
|
||||
def getModuleConfigurations = concat(a.getModuleConfigurations, b.getModuleConfigurations)
|
||||
def getModuleConfigurations = concat(a.getModuleConfigurations, b.getModuleConfigurations)
|
||||
|
||||
def getDependencyConfigurations(moduleConfiguration: String, requestedConfiguration: String) =
|
||||
concat(a.getDependencyConfigurations(moduleConfiguration, requestedConfiguration), b.getDependencyConfigurations(moduleConfiguration))
|
||||
def getDependencyConfigurations(moduleConfiguration: String, requestedConfiguration: String) =
|
||||
concat(a.getDependencyConfigurations(moduleConfiguration, requestedConfiguration), b.getDependencyConfigurations(moduleConfiguration))
|
||||
|
||||
def getDependencyConfigurations(moduleConfiguration: String) =
|
||||
concat(a.getDependencyConfigurations(moduleConfiguration), b.getDependencyConfigurations(moduleConfiguration))
|
||||
def getDependencyConfigurations(moduleConfiguration: String) =
|
||||
concat(a.getDependencyConfigurations(moduleConfiguration), b.getDependencyConfigurations(moduleConfiguration))
|
||||
|
||||
def getDependencyConfigurations(moduleConfigurations: Array[String]) =
|
||||
concat(a.getDependencyConfigurations(moduleConfigurations), b.getDependencyConfigurations(moduleConfigurations))
|
||||
def getDependencyConfigurations(moduleConfigurations: Array[String]) =
|
||||
concat(a.getDependencyConfigurations(moduleConfigurations), b.getDependencyConfigurations(moduleConfigurations))
|
||||
|
||||
def getAllDependencyArtifacts = concatArtifacts(a, a.getAllDependencyArtifacts, b, b.getAllDependencyArtifacts)
|
||||
def getAllDependencyArtifacts = concatArtifacts(a, a.getAllDependencyArtifacts, b, b.getAllDependencyArtifacts)
|
||||
|
||||
def getDependencyArtifacts(moduleConfigurations: String) =
|
||||
concatArtifacts(a, a.getDependencyArtifacts(moduleConfigurations), b, b.getDependencyArtifacts(moduleConfigurations))
|
||||
def getDependencyArtifacts(moduleConfigurations: String) =
|
||||
concatArtifacts(a, a.getDependencyArtifacts(moduleConfigurations), b, b.getDependencyArtifacts(moduleConfigurations))
|
||||
|
||||
def getDependencyArtifacts(moduleConfigurations: Array[String]) =
|
||||
concatArtifacts(a, a.getDependencyArtifacts(moduleConfigurations), b, b.getDependencyArtifacts(moduleConfigurations))
|
||||
def getDependencyArtifacts(moduleConfigurations: Array[String]) =
|
||||
concatArtifacts(a, a.getDependencyArtifacts(moduleConfigurations), b, b.getDependencyArtifacts(moduleConfigurations))
|
||||
|
||||
def getAllIncludeRules = concat(a.getAllIncludeRules, b.getAllIncludeRules)
|
||||
def getAllIncludeRules = concat(a.getAllIncludeRules, b.getAllIncludeRules)
|
||||
|
||||
def getIncludeRules(moduleConfigurations: String) =
|
||||
concat(a.getIncludeRules(moduleConfigurations), b.getIncludeRules(moduleConfigurations))
|
||||
def getIncludeRules(moduleConfigurations: String) =
|
||||
concat(a.getIncludeRules(moduleConfigurations), b.getIncludeRules(moduleConfigurations))
|
||||
|
||||
def getIncludeRules(moduleConfigurations: Array[String]) =
|
||||
concat(a.getIncludeRules(moduleConfigurations), b.getIncludeRules(moduleConfigurations))
|
||||
def getIncludeRules(moduleConfigurations: Array[String]) =
|
||||
concat(a.getIncludeRules(moduleConfigurations), b.getIncludeRules(moduleConfigurations))
|
||||
|
||||
private[this] def concatArtifacts(a: DependencyDescriptor, as: Array[DependencyArtifactDescriptor], b: DependencyDescriptor, bs: Array[DependencyArtifactDescriptor]) =
|
||||
{
|
||||
if(as.isEmpty)
|
||||
if(bs.isEmpty) as
|
||||
else defaultArtifact(a) +: explicitConfigurations(b, bs)
|
||||
else if(bs.isEmpty) explicitConfigurations(a, as) :+ defaultArtifact(b)
|
||||
else concat(explicitConfigurations(a, as), explicitConfigurations(b, bs))
|
||||
}
|
||||
private[this] def explicitConfigurations(base: DependencyDescriptor, arts: Array[DependencyArtifactDescriptor]): Array[DependencyArtifactDescriptor] =
|
||||
arts map { art => explicitConfigurations(base, art) }
|
||||
private[this] def explicitConfigurations(base: DependencyDescriptor, art: DependencyArtifactDescriptor): DependencyArtifactDescriptor =
|
||||
{
|
||||
val aConfs = art.getConfigurations
|
||||
if(aConfs == null || aConfs.isEmpty)
|
||||
copyWithConfigurations(art, base.getModuleConfigurations)
|
||||
else
|
||||
art
|
||||
}
|
||||
private[this] def defaultArtifact(a: DependencyDescriptor): DependencyArtifactDescriptor =
|
||||
{
|
||||
val dd = new DefaultDependencyArtifactDescriptor(a, a.getDependencyRevisionId.getName, "jar", "jar", null, null)
|
||||
addConfigurations(dd, a.getModuleConfigurations)
|
||||
dd
|
||||
}
|
||||
private[this] def copyWithConfigurations(dd: DependencyArtifactDescriptor, confs: Seq[String]): DependencyArtifactDescriptor =
|
||||
{
|
||||
val dextra = dd.getQualifiedExtraAttributes
|
||||
val newd = new DefaultDependencyArtifactDescriptor(dd.getDependencyDescriptor, dd.getName, dd.getType, dd.getExt, dd.getUrl, dextra)
|
||||
addConfigurations(newd, confs)
|
||||
newd
|
||||
}
|
||||
private[this] def addConfigurations(dd: DefaultDependencyArtifactDescriptor, confs: Seq[String]): Unit =
|
||||
confs foreach dd.addConfiguration
|
||||
private[this] def concatArtifacts(a: DependencyDescriptor, as: Array[DependencyArtifactDescriptor], b: DependencyDescriptor, bs: Array[DependencyArtifactDescriptor]) =
|
||||
{
|
||||
if (as.isEmpty)
|
||||
if (bs.isEmpty) as
|
||||
else defaultArtifact(a) +: explicitConfigurations(b, bs)
|
||||
else if (bs.isEmpty) explicitConfigurations(a, as) :+ defaultArtifact(b)
|
||||
else concat(explicitConfigurations(a, as), explicitConfigurations(b, bs))
|
||||
}
|
||||
private[this] def explicitConfigurations(base: DependencyDescriptor, arts: Array[DependencyArtifactDescriptor]): Array[DependencyArtifactDescriptor] =
|
||||
arts map { art => explicitConfigurations(base, art) }
|
||||
private[this] def explicitConfigurations(base: DependencyDescriptor, art: DependencyArtifactDescriptor): DependencyArtifactDescriptor =
|
||||
{
|
||||
val aConfs = art.getConfigurations
|
||||
if (aConfs == null || aConfs.isEmpty)
|
||||
copyWithConfigurations(art, base.getModuleConfigurations)
|
||||
else
|
||||
art
|
||||
}
|
||||
private[this] def defaultArtifact(a: DependencyDescriptor): DependencyArtifactDescriptor =
|
||||
{
|
||||
val dd = new DefaultDependencyArtifactDescriptor(a, a.getDependencyRevisionId.getName, "jar", "jar", null, null)
|
||||
addConfigurations(dd, a.getModuleConfigurations)
|
||||
dd
|
||||
}
|
||||
private[this] def copyWithConfigurations(dd: DependencyArtifactDescriptor, confs: Seq[String]): DependencyArtifactDescriptor =
|
||||
{
|
||||
val dextra = dd.getQualifiedExtraAttributes
|
||||
val newd = new DefaultDependencyArtifactDescriptor(dd.getDependencyDescriptor, dd.getName, dd.getType, dd.getExt, dd.getUrl, dextra)
|
||||
addConfigurations(newd, confs)
|
||||
newd
|
||||
}
|
||||
private[this] def addConfigurations(dd: DefaultDependencyArtifactDescriptor, confs: Seq[String]): Unit =
|
||||
confs foreach dd.addConfiguration
|
||||
|
||||
private[this] def concat[T: ClassManifest](a: Array[T], b: Array[T]): Array[T] = (a ++ b).distinct.toArray
|
||||
private[this] def concat[T: ClassManifest](a: Array[T], b: Array[T]): Array[T] = (a ++ b).distinct.toArray
|
||||
|
||||
def getAllExcludeRules = concat(a.getAllExcludeRules, b.getAllExcludeRules)
|
||||
def getAllExcludeRules = concat(a.getAllExcludeRules, b.getAllExcludeRules)
|
||||
|
||||
def getExcludeRules(moduleConfigurations: String) = concat(a.getExcludeRules(moduleConfigurations), b.getExcludeRules(moduleConfigurations))
|
||||
def getExcludeRules(moduleConfigurations: String) = concat(a.getExcludeRules(moduleConfigurations), b.getExcludeRules(moduleConfigurations))
|
||||
|
||||
def getExcludeRules(moduleConfigurations: Array[String]) = concat(a.getExcludeRules(moduleConfigurations), b.getExcludeRules(moduleConfigurations))
|
||||
def getExcludeRules(moduleConfigurations: Array[String]) = concat(a.getExcludeRules(moduleConfigurations), b.getExcludeRules(moduleConfigurations))
|
||||
|
||||
def doesExclude(moduleConfigurations: Array[String], artifactId: ArtifactId) = a.doesExclude(moduleConfigurations, artifactId) || b.doesExclude(moduleConfigurations, artifactId)
|
||||
def doesExclude(moduleConfigurations: Array[String], artifactId: ArtifactId) = a.doesExclude(moduleConfigurations, artifactId) || b.doesExclude(moduleConfigurations, artifactId)
|
||||
|
||||
def canExclude = a.canExclude || b.canExclude
|
||||
def canExclude = a.canExclude || b.canExclude
|
||||
|
||||
def asSystem = this
|
||||
def asSystem = this
|
||||
|
||||
def clone(revision: ModuleRevisionId) = new MergedDescriptors(a.clone(revision), b.clone(revision))
|
||||
def clone(revision: ModuleRevisionId) = new MergedDescriptors(a.clone(revision), b.clone(revision))
|
||||
|
||||
def getAttribute(name: String): String = a.getAttribute(name)
|
||||
def getAttributes = a.getAttributes
|
||||
def getExtraAttribute(name: String) = a.getExtraAttribute(name)
|
||||
def getExtraAttributes = a.getExtraAttributes
|
||||
def getQualifiedExtraAttributes = a.getQualifiedExtraAttributes
|
||||
def getSourceModule = a.getSourceModule
|
||||
def getAttribute(name: String): String = a.getAttribute(name)
|
||||
def getAttributes = a.getAttributes
|
||||
def getExtraAttribute(name: String) = a.getExtraAttribute(name)
|
||||
def getExtraAttributes = a.getExtraAttributes
|
||||
def getQualifiedExtraAttributes = a.getQualifiedExtraAttributes
|
||||
def getSourceModule = a.getSourceModule
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,62 +1,58 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package xsbt.boot
|
||||
package xsbt.boot
|
||||
|
||||
import java.io.File
|
||||
|
||||
|
||||
// The entry point to the launcher
|
||||
object Boot
|
||||
{
|
||||
def main(args: Array[String])
|
||||
{
|
||||
val config = parseArgs(args)
|
||||
// If we havne't exited, we set up some hooks and launch
|
||||
System.clearProperty("scala.home") // avoid errors from mixing Scala versions in the same JVM
|
||||
System.setProperty("jline.shutdownhook", "false") // shutdown hooks cause class loader leaks
|
||||
System.setProperty("jline.esc.timeout", "0") // starts up a thread otherwise
|
||||
CheckProxy()
|
||||
run(config)
|
||||
}
|
||||
def parseArgs(args: Array[String]): LauncherArguments = {
|
||||
@annotation.tailrec
|
||||
def parse(args: List[String], isLocate: Boolean, remaining: List[String]): LauncherArguments =
|
||||
args match {
|
||||
case "--version" :: rest =>
|
||||
println("sbt launcher version " + Package.getPackage("xsbt.boot").getImplementationVersion)
|
||||
exit(1)
|
||||
case "--locate" :: rest => parse(rest, true, remaining)
|
||||
case next :: rest => parse(rest, isLocate, next :: remaining)
|
||||
case Nil => new LauncherArguments(remaining.reverse, isLocate)
|
||||
}
|
||||
parse(args.toList, false, Nil)
|
||||
}
|
||||
|
||||
// this arrangement is because Scala does not always properly optimize away
|
||||
// the tail recursion in a catch statement
|
||||
final def run(args: LauncherArguments): Unit = runImpl(args) match {
|
||||
case Some(newArgs) => run(newArgs)
|
||||
case None => ()
|
||||
}
|
||||
private def runImpl(args: LauncherArguments): Option[LauncherArguments] =
|
||||
try
|
||||
Launch(args) map exit
|
||||
catch
|
||||
{
|
||||
case b: BootException => errorAndExit(b.toString)
|
||||
case r: xsbti.RetrieveException => errorAndExit("Error: " + r.getMessage)
|
||||
case r: xsbti.FullReload => Some(new LauncherArguments(r.arguments.toList, false))
|
||||
case e: Throwable =>
|
||||
e.printStackTrace
|
||||
errorAndExit(Pre.prefixError(e.toString))
|
||||
}
|
||||
object Boot {
|
||||
def main(args: Array[String]) {
|
||||
val config = parseArgs(args)
|
||||
// If we havne't exited, we set up some hooks and launch
|
||||
System.clearProperty("scala.home") // avoid errors from mixing Scala versions in the same JVM
|
||||
System.setProperty("jline.shutdownhook", "false") // shutdown hooks cause class loader leaks
|
||||
System.setProperty("jline.esc.timeout", "0") // starts up a thread otherwise
|
||||
CheckProxy()
|
||||
run(config)
|
||||
}
|
||||
def parseArgs(args: Array[String]): LauncherArguments = {
|
||||
@annotation.tailrec
|
||||
def parse(args: List[String], isLocate: Boolean, remaining: List[String]): LauncherArguments =
|
||||
args match {
|
||||
case "--version" :: rest =>
|
||||
println("sbt launcher version " + Package.getPackage("xsbt.boot").getImplementationVersion)
|
||||
exit(1)
|
||||
case "--locate" :: rest => parse(rest, true, remaining)
|
||||
case next :: rest => parse(rest, isLocate, next :: remaining)
|
||||
case Nil => new LauncherArguments(remaining.reverse, isLocate)
|
||||
}
|
||||
parse(args.toList, false, Nil)
|
||||
}
|
||||
|
||||
private def errorAndExit(msg: String): Nothing =
|
||||
{
|
||||
System.out.println(msg)
|
||||
exit(1)
|
||||
}
|
||||
private def exit(code: Int): Nothing =
|
||||
System.exit(code).asInstanceOf[Nothing]
|
||||
// this arrangement is because Scala does not always properly optimize away
|
||||
// the tail recursion in a catch statement
|
||||
final def run(args: LauncherArguments): Unit = runImpl(args) match {
|
||||
case Some(newArgs) => run(newArgs)
|
||||
case None => ()
|
||||
}
|
||||
private def runImpl(args: LauncherArguments): Option[LauncherArguments] =
|
||||
try
|
||||
Launch(args) map exit
|
||||
catch {
|
||||
case b: BootException => errorAndExit(b.toString)
|
||||
case r: xsbti.RetrieveException => errorAndExit("Error: " + r.getMessage)
|
||||
case r: xsbti.FullReload => Some(new LauncherArguments(r.arguments.toList, false))
|
||||
case e: Throwable =>
|
||||
e.printStackTrace
|
||||
errorAndExit(Pre.prefixError(e.toString))
|
||||
}
|
||||
|
||||
private def errorAndExit(msg: String): Nothing =
|
||||
{
|
||||
System.out.println(msg)
|
||||
exit(1)
|
||||
}
|
||||
private def exit(code: Int): Nothing =
|
||||
System.exit(code).asInstanceOf[Nothing]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package xsbt.boot
|
||||
package xsbt.boot
|
||||
|
||||
import java.io.File
|
||||
import java.io.File
|
||||
|
||||
// <boot.directory>
|
||||
// [<scala-org>.]scala-<scala.version>/ [baseDirectoryName]
|
||||
|
|
@ -12,107 +12,111 @@
|
|||
//
|
||||
// see also ProjectProperties for the set of constants that apply to the build.properties file in a project
|
||||
// The scala organization is used as a prefix in baseDirectoryName when a non-standard organization is used.
|
||||
private object BootConfiguration
|
||||
{
|
||||
// these are the Scala module identifiers to resolve/retrieve
|
||||
val ScalaOrg = "org.scala-lang"
|
||||
val CompilerModuleName = "scala-compiler"
|
||||
val LibraryModuleName = "scala-library"
|
||||
private object BootConfiguration {
|
||||
// these are the Scala module identifiers to resolve/retrieve
|
||||
val ScalaOrg = "org.scala-lang"
|
||||
val CompilerModuleName = "scala-compiler"
|
||||
val LibraryModuleName = "scala-library"
|
||||
|
||||
val JUnitName = "junit"
|
||||
val JAnsiVersion = "1.11"
|
||||
val JUnitName = "junit"
|
||||
val JAnsiVersion = "1.11"
|
||||
|
||||
val SbtOrg = "org.scala-sbt"
|
||||
val SbtOrg = "org.scala-sbt"
|
||||
|
||||
/** The Ivy conflict manager to use for updating.*/
|
||||
val ConflictManagerName = "latest-revision"
|
||||
/** The name of the local Ivy repository, which is used when compiling sbt from source.*/
|
||||
val LocalIvyName = "local"
|
||||
/** The pattern used for the local Ivy repository, which is used when compiling sbt from source.*/
|
||||
val LocalPattern = "[organisation]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext]"
|
||||
/** The artifact pattern used for the local Ivy repository.*/
|
||||
def LocalArtifactPattern = LocalPattern
|
||||
/** The Ivy pattern used for the local Ivy repository.*/
|
||||
def LocalIvyPattern = LocalPattern
|
||||
/** The Ivy conflict manager to use for updating.*/
|
||||
val ConflictManagerName = "latest-revision"
|
||||
/** The name of the local Ivy repository, which is used when compiling sbt from source.*/
|
||||
val LocalIvyName = "local"
|
||||
/** The pattern used for the local Ivy repository, which is used when compiling sbt from source.*/
|
||||
val LocalPattern = "[organisation]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext]"
|
||||
/** The artifact pattern used for the local Ivy repository.*/
|
||||
def LocalArtifactPattern = LocalPattern
|
||||
/** The Ivy pattern used for the local Ivy repository.*/
|
||||
def LocalIvyPattern = LocalPattern
|
||||
|
||||
final val FjbgPackage = "ch.epfl.lamp.fjbg."
|
||||
/** The class name prefix used to hide the Scala classes used by this loader from the application */
|
||||
final val ScalaPackage = "scala."
|
||||
/** The class name prefix used to hide the Ivy classes used by this loader from the application*/
|
||||
final val IvyPackage = "org.apache.ivy."
|
||||
/** The class name prefix used to hide the launcher classes from the application.
|
||||
* Note that access to xsbti classes are allowed.*/
|
||||
final val SbtBootPackage = "xsbt.boot."
|
||||
/** The loader will check that these classes can be loaded and will assume that their presence indicates
|
||||
* the Scala compiler and library have been downloaded.*/
|
||||
val TestLoadScalaClasses = "scala.Option" :: "scala.tools.nsc.Global" :: Nil
|
||||
final val FjbgPackage = "ch.epfl.lamp.fjbg."
|
||||
/** The class name prefix used to hide the Scala classes used by this loader from the application */
|
||||
final val ScalaPackage = "scala."
|
||||
/** The class name prefix used to hide the Ivy classes used by this loader from the application*/
|
||||
final val IvyPackage = "org.apache.ivy."
|
||||
/**
|
||||
* The class name prefix used to hide the launcher classes from the application.
|
||||
* Note that access to xsbti classes are allowed.
|
||||
*/
|
||||
final val SbtBootPackage = "xsbt.boot."
|
||||
/**
|
||||
* The loader will check that these classes can be loaded and will assume that their presence indicates
|
||||
* the Scala compiler and library have been downloaded.
|
||||
*/
|
||||
val TestLoadScalaClasses = "scala.Option" :: "scala.tools.nsc.Global" :: Nil
|
||||
|
||||
val ScalaHomeProperty = "scala.home"
|
||||
val UpdateLogName = "update.log"
|
||||
val DefaultChecksums = "sha1" :: "md5" :: Nil
|
||||
val ScalaHomeProperty = "scala.home"
|
||||
val UpdateLogName = "update.log"
|
||||
val DefaultChecksums = "sha1" :: "md5" :: Nil
|
||||
|
||||
val DefaultIvyConfiguration = "default"
|
||||
val DefaultIvyConfiguration = "default"
|
||||
|
||||
/** The name of the directory within the boot directory to retrieve scala to. */
|
||||
val ScalaDirectoryName = "lib"
|
||||
/** The name of the directory within the boot directory to retrieve scala to. */
|
||||
val ScalaDirectoryName = "lib"
|
||||
|
||||
/** The Ivy pattern to use for retrieving the scala compiler and library. It is relative to the directory
|
||||
* containing all jars for the requested version of scala. */
|
||||
val scalaRetrievePattern = ScalaDirectoryName + "/[artifact](-[classifier]).[ext]"
|
||||
|
||||
def artifactType(classifier: String) =
|
||||
classifier match
|
||||
{
|
||||
case "sources" => "src"
|
||||
case "javadoc" => "doc"
|
||||
case _ => "jar"
|
||||
}
|
||||
|
||||
/** The Ivy pattern to use for retrieving the application and its dependencies. It is relative to the directory
|
||||
* containing all jars for the requested version of scala. */
|
||||
def appRetrievePattern(appID: xsbti.ApplicationID) = appDirectoryName(appID, "/") + "(/[component])/[artifact]-[revision](-[classifier]).[ext]"
|
||||
/**
|
||||
* The Ivy pattern to use for retrieving the scala compiler and library. It is relative to the directory
|
||||
* containing all jars for the requested version of scala.
|
||||
*/
|
||||
val scalaRetrievePattern = ScalaDirectoryName + "/[artifact](-[classifier]).[ext]"
|
||||
|
||||
val ScalaVersionPrefix = "scala-"
|
||||
def artifactType(classifier: String) =
|
||||
classifier match {
|
||||
case "sources" => "src"
|
||||
case "javadoc" => "doc"
|
||||
case _ => "jar"
|
||||
}
|
||||
|
||||
/** The name of the directory to retrieve the application and its dependencies to.*/
|
||||
def appDirectoryName(appID: xsbti.ApplicationID, sep: String) = appID.groupID + sep + appID.name + sep + appID.version
|
||||
/** The name of the directory in the boot directory to put all jars for the given version of scala in.*/
|
||||
def baseDirectoryName(scalaOrg: String, scalaVersion: Option[String]) = scalaVersion match {
|
||||
case None => "other"
|
||||
case Some(sv) => (if (scalaOrg == ScalaOrg) "" else scalaOrg + ".") + ScalaVersionPrefix + sv
|
||||
}
|
||||
/**
|
||||
* The Ivy pattern to use for retrieving the application and its dependencies. It is relative to the directory
|
||||
* containing all jars for the requested version of scala.
|
||||
*/
|
||||
def appRetrievePattern(appID: xsbti.ApplicationID) = appDirectoryName(appID, "/") + "(/[component])/[artifact]-[revision](-[classifier]).[ext]"
|
||||
|
||||
def extractScalaVersion(dir: File): Option[String] =
|
||||
{
|
||||
val name = dir.getName
|
||||
if(name.contains(ScalaVersionPrefix))
|
||||
Some(name.substring(name.lastIndexOf(ScalaVersionPrefix) + ScalaVersionPrefix.length))
|
||||
else
|
||||
None
|
||||
}
|
||||
val ScalaVersionPrefix = "scala-"
|
||||
|
||||
/** The name of the directory to retrieve the application and its dependencies to.*/
|
||||
def appDirectoryName(appID: xsbti.ApplicationID, sep: String) = appID.groupID + sep + appID.name + sep + appID.version
|
||||
/** The name of the directory in the boot directory to put all jars for the given version of scala in.*/
|
||||
def baseDirectoryName(scalaOrg: String, scalaVersion: Option[String]) = scalaVersion match {
|
||||
case None => "other"
|
||||
case Some(sv) => (if (scalaOrg == ScalaOrg) "" else scalaOrg + ".") + ScalaVersionPrefix + sv
|
||||
}
|
||||
|
||||
def extractScalaVersion(dir: File): Option[String] =
|
||||
{
|
||||
val name = dir.getName
|
||||
if (name.contains(ScalaVersionPrefix))
|
||||
Some(name.substring(name.lastIndexOf(ScalaVersionPrefix) + ScalaVersionPrefix.length))
|
||||
else
|
||||
None
|
||||
}
|
||||
}
|
||||
private final class ProxyProperties(
|
||||
val envURL: String,
|
||||
val envUser: String,
|
||||
val envPassword: String,
|
||||
val sysHost: String,
|
||||
val sysPort: String,
|
||||
val sysUser: String,
|
||||
val sysPassword: String
|
||||
)
|
||||
private object ProxyProperties
|
||||
{
|
||||
val http = apply("http")
|
||||
val https = apply("https")
|
||||
val ftp = apply("ftp")
|
||||
val envURL: String,
|
||||
val envUser: String,
|
||||
val envPassword: String,
|
||||
val sysHost: String,
|
||||
val sysPort: String,
|
||||
val sysUser: String,
|
||||
val sysPassword: String)
|
||||
private object ProxyProperties {
|
||||
val http = apply("http")
|
||||
val https = apply("https")
|
||||
val ftp = apply("ftp")
|
||||
|
||||
def apply(pre: String) = new ProxyProperties(
|
||||
pre+"_proxy",
|
||||
pre+"_proxy_user",
|
||||
pre+"_proxy_pass",
|
||||
pre+".proxyHost",
|
||||
pre+".proxyPort",
|
||||
pre+".proxyUser",
|
||||
pre+".proxyPassword"
|
||||
)
|
||||
def apply(pre: String) = new ProxyProperties(
|
||||
pre + "_proxy",
|
||||
pre + "_proxy_user",
|
||||
pre + "_proxy_pass",
|
||||
pre + ".proxyHost",
|
||||
pre + ".proxyPort",
|
||||
pre + ".proxyUser",
|
||||
pre + ".proxyPassword"
|
||||
)
|
||||
}
|
||||
|
|
@ -3,20 +3,19 @@
|
|||
*/
|
||||
package xsbt.boot
|
||||
|
||||
import java.lang.ref.{Reference, SoftReference}
|
||||
import java.lang.ref.{ Reference, SoftReference }
|
||||
import java.util.HashMap
|
||||
|
||||
final class Cache[K,X,V](create: (K,X) => V)
|
||||
{
|
||||
private[this] val delegate = new HashMap[K,Reference[V]]
|
||||
def apply(k: K, x: X): V = synchronized { getFromReference(k, x, delegate.get(k)) }
|
||||
private[this] def getFromReference(k: K, x: X, existingRef: Reference[V]) = if(existingRef eq null) newEntry(k, x) else get(k, x, existingRef.get)
|
||||
private[this] def get(k: K, x: X, existing: V) = if(existing == null) newEntry(k, x) else existing
|
||||
private[this] def newEntry(k: K, x: X): V =
|
||||
{
|
||||
val v = create(k, x)
|
||||
Pre.assert(v != null, "Value for key " + k + " was null")
|
||||
delegate.put(k, new SoftReference(v))
|
||||
v
|
||||
}
|
||||
final class Cache[K, X, V](create: (K, X) => V) {
|
||||
private[this] val delegate = new HashMap[K, Reference[V]]
|
||||
def apply(k: K, x: X): V = synchronized { getFromReference(k, x, delegate.get(k)) }
|
||||
private[this] def getFromReference(k: K, x: X, existingRef: Reference[V]) = if (existingRef eq null) newEntry(k, x) else get(k, x, existingRef.get)
|
||||
private[this] def get(k: K, x: X, existing: V) = if (existing == null) newEntry(k, x) else existing
|
||||
private[this] def newEntry(k: K, x: X): V =
|
||||
{
|
||||
val v = create(k, x)
|
||||
Pre.assert(v != null, "Value for key " + k + " was null")
|
||||
delegate.put(k, new SoftReference(v))
|
||||
v
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,43 +4,36 @@
|
|||
package xsbt.boot
|
||||
|
||||
import Pre._
|
||||
import java.net.{MalformedURLException, URL}
|
||||
import java.net.{ MalformedURLException, URL }
|
||||
|
||||
object CheckProxy
|
||||
{
|
||||
def apply()
|
||||
{
|
||||
import ProxyProperties._
|
||||
for( pp <- Seq(http, https, ftp))
|
||||
setFromEnv(pp)
|
||||
}
|
||||
object CheckProxy {
|
||||
def apply() {
|
||||
import ProxyProperties._
|
||||
for (pp <- Seq(http, https, ftp))
|
||||
setFromEnv(pp)
|
||||
}
|
||||
|
||||
private[this] def setFromEnv(conf: ProxyProperties)
|
||||
{
|
||||
import conf._
|
||||
val proxyURL = System.getenv(envURL)
|
||||
if(isDefined(proxyURL) && !isPropertyDefined(sysHost) && !isPropertyDefined(sysPort))
|
||||
{
|
||||
try
|
||||
{
|
||||
val proxy = new URL(proxyURL)
|
||||
setProperty(sysHost, proxy.getHost)
|
||||
val port = proxy.getPort
|
||||
if(port >= 0)
|
||||
System.setProperty(sysPort, port.toString)
|
||||
copyEnv(envUser, sysUser)
|
||||
copyEnv(envPassword, sysPassword)
|
||||
}
|
||||
catch
|
||||
{
|
||||
case e: MalformedURLException =>
|
||||
System.out.println(s"Warning: could not parse $envURL setting: ${e.toString}")
|
||||
}
|
||||
}
|
||||
}
|
||||
private[this] def setFromEnv(conf: ProxyProperties) {
|
||||
import conf._
|
||||
val proxyURL = System.getenv(envURL)
|
||||
if (isDefined(proxyURL) && !isPropertyDefined(sysHost) && !isPropertyDefined(sysPort)) {
|
||||
try {
|
||||
val proxy = new URL(proxyURL)
|
||||
setProperty(sysHost, proxy.getHost)
|
||||
val port = proxy.getPort
|
||||
if (port >= 0)
|
||||
System.setProperty(sysPort, port.toString)
|
||||
copyEnv(envUser, sysUser)
|
||||
copyEnv(envPassword, sysPassword)
|
||||
} catch {
|
||||
case e: MalformedURLException =>
|
||||
System.out.println(s"Warning: could not parse $envURL setting: ${e.toString}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private def copyEnv(envKey: String, sysKey: String) { setProperty(sysKey, System.getenv(envKey)) }
|
||||
private def setProperty(key: String, value: String) { if(value != null) System.setProperty(key, value) }
|
||||
private def isPropertyDefined(k: String) = isDefined(System.getProperty(k))
|
||||
private def isDefined(s: String) = s != null && isNonEmpty(s)
|
||||
private def copyEnv(envKey: String, sysKey: String) { setProperty(sysKey, System.getenv(envKey)) }
|
||||
private def setProperty(key: String, value: String) { if (value != null) System.setProperty(key, value) }
|
||||
private def isPropertyDefined(k: String) = isDefined(System.getProperty(k))
|
||||
private def isDefined(s: String) = s != null && isNonEmpty(s)
|
||||
}
|
||||
|
|
@ -4,8 +4,8 @@
|
|||
package xsbt.boot
|
||||
|
||||
import Pre._
|
||||
import java.io.{File, FileInputStream, InputStreamReader}
|
||||
import java.net.{MalformedURLException, URI, URL}
|
||||
import java.io.{ File, FileInputStream, InputStreamReader }
|
||||
import java.net.{ MalformedURLException, URI, URL }
|
||||
import java.util.regex.Pattern
|
||||
import scala.collection.immutable.List
|
||||
import annotation.tailrec
|
||||
|
|
@ -15,152 +15,147 @@ object ConfigurationStorageState extends Enumeration {
|
|||
val SerializedFile = value("serialized-file")
|
||||
}
|
||||
|
||||
object Configuration
|
||||
{
|
||||
import ConfigurationStorageState._
|
||||
final val SysPropPrefix = "-D"
|
||||
def parse(file: URL, baseDirectory: File) = Using( new InputStreamReader(file.openStream, "utf8") )( (new ConfigurationParser).apply )
|
||||
|
||||
/**
|
||||
* Finds the configuration location.
|
||||
*
|
||||
* Note: Configuration may be previously serialized by a launcher.
|
||||
*/
|
||||
@tailrec def find(args: List[String], baseDirectory: File): (URL, List[String], ConfigurationStorageState.Value) =
|
||||
args match
|
||||
{
|
||||
case head :: tail if head.startsWith("@load:") => (directConfiguration(head.substring(6), baseDirectory), tail, SerializedFile)
|
||||
case head :: tail if head.startsWith("@") => (directConfiguration(head.substring(1), baseDirectory), tail, PropertiesFile)
|
||||
case head :: tail if head.startsWith(SysPropPrefix) =>
|
||||
setProperty(head stripPrefix SysPropPrefix)
|
||||
find(tail, baseDirectory)
|
||||
case _ =>
|
||||
val propertyConfigured = System.getProperty("sbt.boot.properties")
|
||||
val url = if(propertyConfigured == null) configurationOnClasspath else configurationFromFile(propertyConfigured, baseDirectory)
|
||||
(url, args, PropertiesFile)
|
||||
}
|
||||
def setProperty(head: String)
|
||||
{
|
||||
val keyValue = head.split("=",2)
|
||||
if(keyValue.length != 2)
|
||||
System.err.println("Warning: invalid system property '" + head + "'")
|
||||
else
|
||||
System.setProperty(keyValue(0), keyValue(1))
|
||||
}
|
||||
def configurationOnClasspath: URL =
|
||||
{
|
||||
val paths = resourcePaths(guessSbtVersion)
|
||||
paths.iterator.map(getClass.getResource).find(neNull) getOrElse
|
||||
( multiPartError("Could not finder sbt launch configuration. Searched classpath for:", paths))
|
||||
}
|
||||
def directConfiguration(path: String, baseDirectory: File): URL =
|
||||
{
|
||||
try { new URL(path) }
|
||||
catch { case _: MalformedURLException => configurationFromFile(path, baseDirectory) }
|
||||
}
|
||||
def configurationFromFile(path: String, baseDirectory: File): URL =
|
||||
{
|
||||
val pathURI = filePathURI(path)
|
||||
def resolve(against: URI): Option[URL] =
|
||||
{
|
||||
val resolved = against.resolve(pathURI) // variant that accepts String doesn't properly escape (#725)
|
||||
val exists = try { (new File(resolved)).exists } catch { case _: IllegalArgumentException => false }
|
||||
if(exists) Some(resolved.toURL) else None
|
||||
}
|
||||
val against = resolveAgainst(baseDirectory)
|
||||
// use Iterators so that resolution occurs lazily, for performance
|
||||
val resolving = against.iterator.flatMap(e => resolve(e).toList.iterator)
|
||||
if(!resolving.hasNext) multiPartError("Could not find configuration file '" + path + "'. Searched:", against)
|
||||
resolving.next()
|
||||
}
|
||||
def multiPartError[T](firstLine: String, lines: List[T]) = error( (firstLine :: lines).mkString("\n\t") )
|
||||
object Configuration {
|
||||
import ConfigurationStorageState._
|
||||
final val SysPropPrefix = "-D"
|
||||
def parse(file: URL, baseDirectory: File) = Using(new InputStreamReader(file.openStream, "utf8"))((new ConfigurationParser).apply)
|
||||
|
||||
def UnspecifiedVersionPart = "Unspecified"
|
||||
def DefaultVersionPart = "Default"
|
||||
def DefaultBuildProperties = "project/build.properties"
|
||||
def SbtVersionProperty = "sbt.version"
|
||||
val ConfigurationName = "sbt.boot.properties"
|
||||
val JarBasePath = "/sbt/"
|
||||
def userConfigurationPath = "/" + ConfigurationName
|
||||
def defaultConfigurationPath = JarBasePath + ConfigurationName
|
||||
val baseResourcePaths: List[String] = userConfigurationPath :: defaultConfigurationPath :: Nil
|
||||
def resourcePaths(sbtVersion: Option[String]): List[String] =
|
||||
versionParts(sbtVersion) flatMap { part =>
|
||||
baseResourcePaths map { base =>
|
||||
base + part
|
||||
}
|
||||
}
|
||||
def fallbackParts: List[String] = "" :: Nil
|
||||
def versionParts(version: Option[String]): List[String] =
|
||||
version match {
|
||||
case None => UnspecifiedVersionPart :: fallbackParts
|
||||
case Some(v) => versionParts(v)
|
||||
}
|
||||
def versionParts(version: String): List[String] =
|
||||
{
|
||||
val pattern = Pattern.compile("""(\d+)(\.\d+)(\.\d+)(-.*)?""")
|
||||
val m = pattern.matcher(version)
|
||||
if(m.matches())
|
||||
subPartsIndices flatMap { is => fullMatchOnly(is.map(m.group)) }
|
||||
else
|
||||
noMatchParts
|
||||
}
|
||||
def noMatchParts: List[String] = DefaultVersionPart :: fallbackParts
|
||||
private[this] def fullMatchOnly(groups: List[String]): Option[String] =
|
||||
if(groups.forall(neNull)) Some(groups.mkString) else None
|
||||
/**
|
||||
* Finds the configuration location.
|
||||
*
|
||||
* Note: Configuration may be previously serialized by a launcher.
|
||||
*/
|
||||
@tailrec def find(args: List[String], baseDirectory: File): (URL, List[String], ConfigurationStorageState.Value) =
|
||||
args match {
|
||||
case head :: tail if head.startsWith("@load:") => (directConfiguration(head.substring(6), baseDirectory), tail, SerializedFile)
|
||||
case head :: tail if head.startsWith("@") => (directConfiguration(head.substring(1), baseDirectory), tail, PropertiesFile)
|
||||
case head :: tail if head.startsWith(SysPropPrefix) =>
|
||||
setProperty(head stripPrefix SysPropPrefix)
|
||||
find(tail, baseDirectory)
|
||||
case _ =>
|
||||
val propertyConfigured = System.getProperty("sbt.boot.properties")
|
||||
val url = if (propertyConfigured == null) configurationOnClasspath else configurationFromFile(propertyConfigured, baseDirectory)
|
||||
(url, args, PropertiesFile)
|
||||
}
|
||||
def setProperty(head: String) {
|
||||
val keyValue = head.split("=", 2)
|
||||
if (keyValue.length != 2)
|
||||
System.err.println("Warning: invalid system property '" + head + "'")
|
||||
else
|
||||
System.setProperty(keyValue(0), keyValue(1))
|
||||
}
|
||||
def configurationOnClasspath: URL =
|
||||
{
|
||||
val paths = resourcePaths(guessSbtVersion)
|
||||
paths.iterator.map(getClass.getResource).find(neNull) getOrElse
|
||||
(multiPartError("Could not finder sbt launch configuration. Searched classpath for:", paths))
|
||||
}
|
||||
def directConfiguration(path: String, baseDirectory: File): URL =
|
||||
{
|
||||
try { new URL(path) }
|
||||
catch { case _: MalformedURLException => configurationFromFile(path, baseDirectory) }
|
||||
}
|
||||
def configurationFromFile(path: String, baseDirectory: File): URL =
|
||||
{
|
||||
val pathURI = filePathURI(path)
|
||||
def resolve(against: URI): Option[URL] =
|
||||
{
|
||||
val resolved = against.resolve(pathURI) // variant that accepts String doesn't properly escape (#725)
|
||||
val exists = try { (new File(resolved)).exists } catch { case _: IllegalArgumentException => false }
|
||||
if (exists) Some(resolved.toURL) else None
|
||||
}
|
||||
val against = resolveAgainst(baseDirectory)
|
||||
// use Iterators so that resolution occurs lazily, for performance
|
||||
val resolving = against.iterator.flatMap(e => resolve(e).toList.iterator)
|
||||
if (!resolving.hasNext) multiPartError("Could not find configuration file '" + path + "'. Searched:", against)
|
||||
resolving.next()
|
||||
}
|
||||
def multiPartError[T](firstLine: String, lines: List[T]) = error((firstLine :: lines).mkString("\n\t"))
|
||||
|
||||
private[this] def subPartsIndices =
|
||||
(1 :: 2 :: 3 :: 4 :: Nil) ::
|
||||
(1 :: 2 :: 3 :: Nil) ::
|
||||
(1 :: 2 :: Nil) ::
|
||||
(Nil) ::
|
||||
Nil
|
||||
def UnspecifiedVersionPart = "Unspecified"
|
||||
def DefaultVersionPart = "Default"
|
||||
def DefaultBuildProperties = "project/build.properties"
|
||||
def SbtVersionProperty = "sbt.version"
|
||||
val ConfigurationName = "sbt.boot.properties"
|
||||
val JarBasePath = "/sbt/"
|
||||
def userConfigurationPath = "/" + ConfigurationName
|
||||
def defaultConfigurationPath = JarBasePath + ConfigurationName
|
||||
val baseResourcePaths: List[String] = userConfigurationPath :: defaultConfigurationPath :: Nil
|
||||
def resourcePaths(sbtVersion: Option[String]): List[String] =
|
||||
versionParts(sbtVersion) flatMap { part =>
|
||||
baseResourcePaths map { base =>
|
||||
base + part
|
||||
}
|
||||
}
|
||||
def fallbackParts: List[String] = "" :: Nil
|
||||
def versionParts(version: Option[String]): List[String] =
|
||||
version match {
|
||||
case None => UnspecifiedVersionPart :: fallbackParts
|
||||
case Some(v) => versionParts(v)
|
||||
}
|
||||
def versionParts(version: String): List[String] =
|
||||
{
|
||||
val pattern = Pattern.compile("""(\d+)(\.\d+)(\.\d+)(-.*)?""")
|
||||
val m = pattern.matcher(version)
|
||||
if (m.matches())
|
||||
subPartsIndices flatMap { is => fullMatchOnly(is.map(m.group)) }
|
||||
else
|
||||
noMatchParts
|
||||
}
|
||||
def noMatchParts: List[String] = DefaultVersionPart :: fallbackParts
|
||||
private[this] def fullMatchOnly(groups: List[String]): Option[String] =
|
||||
if (groups.forall(neNull)) Some(groups.mkString) else None
|
||||
|
||||
// the location of project/build.properties and the name of the property within that file
|
||||
// that configures the sbt version is configured in sbt.boot.properties.
|
||||
// We have to hard code them here in order to use them to determine the location of sbt.boot.properties itself
|
||||
def guessSbtVersion: Option[String] =
|
||||
{
|
||||
val props = Pre.readProperties(new File(DefaultBuildProperties))
|
||||
Option(props.getProperty(SbtVersionProperty))
|
||||
}
|
||||
private[this] def subPartsIndices =
|
||||
(1 :: 2 :: 3 :: 4 :: Nil) ::
|
||||
(1 :: 2 :: 3 :: Nil) ::
|
||||
(1 :: 2 :: Nil) ::
|
||||
(Nil) ::
|
||||
Nil
|
||||
|
||||
def resolveAgainst(baseDirectory: File): List[URI] =
|
||||
directoryURI(baseDirectory) ::
|
||||
directoryURI(new File(System.getProperty("user.home"))) ::
|
||||
toDirectory(classLocation(getClass).toURI) ::
|
||||
Nil
|
||||
// the location of project/build.properties and the name of the property within that file
|
||||
// that configures the sbt version is configured in sbt.boot.properties.
|
||||
// We have to hard code them here in order to use them to determine the location of sbt.boot.properties itself
|
||||
def guessSbtVersion: Option[String] =
|
||||
{
|
||||
val props = Pre.readProperties(new File(DefaultBuildProperties))
|
||||
Option(props.getProperty(SbtVersionProperty))
|
||||
}
|
||||
|
||||
def classLocation(cl: Class[_]): URL =
|
||||
{
|
||||
val codeSource = cl.getProtectionDomain.getCodeSource
|
||||
if(codeSource == null) error("No class location for " + cl)
|
||||
else codeSource.getLocation
|
||||
}
|
||||
// single-arg constructor doesn't properly escape
|
||||
def filePathURI(path: String): URI = {
|
||||
if(path.startsWith("file:")) new URI(path)
|
||||
else {
|
||||
val f = new File(path)
|
||||
new URI(if(f.isAbsolute) "file" else null, path, null)
|
||||
}
|
||||
}
|
||||
def directoryURI(dir: File): URI = directoryURI(dir.toURI)
|
||||
def directoryURI(uri: URI): URI =
|
||||
{
|
||||
assert(uri.isAbsolute)
|
||||
val str = uri.toASCIIString
|
||||
val dirStr = if(str.endsWith("/")) str else str + "/"
|
||||
(new URI(dirStr)).normalize
|
||||
}
|
||||
def resolveAgainst(baseDirectory: File): List[URI] =
|
||||
directoryURI(baseDirectory) ::
|
||||
directoryURI(new File(System.getProperty("user.home"))) ::
|
||||
toDirectory(classLocation(getClass).toURI) ::
|
||||
Nil
|
||||
|
||||
def toDirectory(uri: URI): URI =
|
||||
try
|
||||
{
|
||||
val file = new File(uri)
|
||||
val newFile = if(file.isFile) file.getParentFile else file
|
||||
directoryURI(newFile)
|
||||
}
|
||||
catch { case _: Exception => uri }
|
||||
private[this] def neNull: AnyRef => Boolean = _ ne null
|
||||
def classLocation(cl: Class[_]): URL =
|
||||
{
|
||||
val codeSource = cl.getProtectionDomain.getCodeSource
|
||||
if (codeSource == null) error("No class location for " + cl)
|
||||
else codeSource.getLocation
|
||||
}
|
||||
// single-arg constructor doesn't properly escape
|
||||
def filePathURI(path: String): URI = {
|
||||
if (path.startsWith("file:")) new URI(path)
|
||||
else {
|
||||
val f = new File(path)
|
||||
new URI(if (f.isAbsolute) "file" else null, path, null)
|
||||
}
|
||||
}
|
||||
def directoryURI(dir: File): URI = directoryURI(dir.toURI)
|
||||
def directoryURI(uri: URI): URI =
|
||||
{
|
||||
assert(uri.isAbsolute)
|
||||
val str = uri.toASCIIString
|
||||
val dirStr = if (str.endsWith("/")) str else str + "/"
|
||||
(new URI(dirStr)).normalize
|
||||
}
|
||||
|
||||
def toDirectory(uri: URI): URI =
|
||||
try {
|
||||
val file = new File(uri)
|
||||
val newFile = if (file.isFile) file.getParentFile else file
|
||||
directoryURI(newFile)
|
||||
} catch { case _: Exception => uri }
|
||||
private[this] def neNull: AnyRef => Boolean = _ ne null
|
||||
}
|
||||
|
|
@ -3,263 +3,255 @@
|
|||
*/
|
||||
package xsbt.boot
|
||||
|
||||
|
||||
import Pre._
|
||||
import ConfigurationParser._
|
||||
import java.lang.Character.isWhitespace
|
||||
import java.io.{BufferedReader, File, FileInputStream, InputStreamReader, Reader, StringReader}
|
||||
import java.net.{MalformedURLException, URL}
|
||||
import java.util.regex.{Matcher,Pattern}
|
||||
import java.io.{ BufferedReader, File, FileInputStream, InputStreamReader, Reader, StringReader }
|
||||
import java.net.{ MalformedURLException, URL }
|
||||
import java.util.regex.{ Matcher, Pattern }
|
||||
import Matcher.quoteReplacement
|
||||
import scala.collection.immutable.List
|
||||
|
||||
object ConfigurationParser
|
||||
{
|
||||
def trim(s: Array[String]) = s.map(_.trim).toList
|
||||
def ids(value: String) = trim(substituteVariables(value).split(",")).filter(isNonEmpty)
|
||||
object ConfigurationParser {
|
||||
def trim(s: Array[String]) = s.map(_.trim).toList
|
||||
def ids(value: String) = trim(substituteVariables(value).split(",")).filter(isNonEmpty)
|
||||
|
||||
private[this] lazy val VarPattern = Pattern.compile("""\$\{([\w.]+)(\-(.*))?\}""")
|
||||
def substituteVariables(s: String): String = if(s.indexOf('$') >= 0) substituteVariables0(s) else s
|
||||
// scala.util.Regex brought in 30kB, so we code it explicitly
|
||||
def substituteVariables0(s: String): String =
|
||||
{
|
||||
val m = VarPattern.matcher(s)
|
||||
val b = new StringBuffer
|
||||
while(m.find())
|
||||
{
|
||||
val key = m.group(1)
|
||||
val defined = System.getProperty(key)
|
||||
val value =
|
||||
if(defined ne null)
|
||||
defined
|
||||
else
|
||||
{
|
||||
val default = m.group(3)
|
||||
if(default eq null) m.group() else substituteVariables(default)
|
||||
}
|
||||
m.appendReplacement(b, quoteReplacement(value))
|
||||
}
|
||||
m.appendTail(b)
|
||||
b.toString
|
||||
}
|
||||
|
||||
implicit val readIDs = ids _
|
||||
private[this] lazy val VarPattern = Pattern.compile("""\$\{([\w.]+)(\-(.*))?\}""")
|
||||
def substituteVariables(s: String): String = if (s.indexOf('$') >= 0) substituteVariables0(s) else s
|
||||
// scala.util.Regex brought in 30kB, so we code it explicitly
|
||||
def substituteVariables0(s: String): String =
|
||||
{
|
||||
val m = VarPattern.matcher(s)
|
||||
val b = new StringBuffer
|
||||
while (m.find()) {
|
||||
val key = m.group(1)
|
||||
val defined = System.getProperty(key)
|
||||
val value =
|
||||
if (defined ne null)
|
||||
defined
|
||||
else {
|
||||
val default = m.group(3)
|
||||
if (default eq null) m.group() else substituteVariables(default)
|
||||
}
|
||||
m.appendReplacement(b, quoteReplacement(value))
|
||||
}
|
||||
m.appendTail(b)
|
||||
b.toString
|
||||
}
|
||||
|
||||
implicit val readIDs = ids _
|
||||
}
|
||||
class ConfigurationParser
|
||||
{
|
||||
def apply(file: File): LaunchConfiguration = Using(newReader(file))(apply)
|
||||
def apply(s: String): LaunchConfiguration = Using(new StringReader(s))(apply)
|
||||
def apply(reader: Reader): LaunchConfiguration = Using(new BufferedReader(reader))(apply)
|
||||
private def apply(in: BufferedReader): LaunchConfiguration =
|
||||
processSections(processLines(readLine(in, Nil, 0)))
|
||||
private final def readLine(in: BufferedReader, accum: List[Line], index: Int): List[Line] =
|
||||
in.readLine match {
|
||||
case null => accum.reverse
|
||||
case line => readLine(in, ParseLine(line,index) ::: accum, index+1)
|
||||
}
|
||||
private def newReader(file: File) = new InputStreamReader(new FileInputStream(file), "UTF-8")
|
||||
def readRepositoriesConfig(file: File): List[Repository.Repository] =
|
||||
Using(newReader(file))(readRepositoriesConfig)
|
||||
def readRepositoriesConfig(reader: Reader): List[Repository.Repository] =
|
||||
Using(new BufferedReader(reader))(readRepositoriesConfig)
|
||||
def readRepositoriesConfig(s: String): List[Repository.Repository] =
|
||||
Using(new StringReader(s))(readRepositoriesConfig)
|
||||
private def readRepositoriesConfig(in: BufferedReader): List[Repository.Repository] =
|
||||
processRepositoriesConfig(processLines(readLine(in, Nil, 0)))
|
||||
def processRepositoriesConfig(sections: SectionMap): List[Repository.Repository] =
|
||||
processSection(sections, "repositories", getRepositories)._1
|
||||
// section -> configuration instance processing
|
||||
def processSections(sections: SectionMap): LaunchConfiguration =
|
||||
{
|
||||
val ((scalaVersion, scalaClassifiers), m1) = processSection(sections, "scala", getScala)
|
||||
val ((app, appClassifiers), m2) = processSection(m1, "app", getApplication)
|
||||
val (defaultRepositories, m3) = processSection(m2, "repositories", getRepositories)
|
||||
val (boot, m4) = processSection(m3, "boot", getBoot)
|
||||
val (logging, m5) = processSection(m4, "log", getLogging)
|
||||
val (properties, m6) = processSection(m5, "app-properties", getAppProperties)
|
||||
val ((ivyHome, checksums, isOverrideRepos, rConfigFile), m7) = processSection(m6, "ivy", getIvy)
|
||||
val (serverOptions, m8) = processSection(m7, "server", getServer)
|
||||
check(m8, "section")
|
||||
val classifiers = Classifiers(scalaClassifiers, appClassifiers)
|
||||
val repositories = rConfigFile map readRepositoriesConfig getOrElse defaultRepositories
|
||||
val ivyOptions = IvyOptions(ivyHome, classifiers, repositories, checksums, isOverrideRepos)
|
||||
|
||||
// TODO - Read server properties...
|
||||
new LaunchConfiguration(scalaVersion, ivyOptions, app, boot, logging, properties, serverOptions)
|
||||
}
|
||||
def getScala(m: LabelMap) =
|
||||
{
|
||||
val (scalaVersion, m1) = getVersion(m, "Scala version", "scala.version")
|
||||
val (scalaClassifiers, m2) = getClassifiers(m1, "Scala classifiers")
|
||||
check(m2, "label")
|
||||
(scalaVersion, scalaClassifiers)
|
||||
}
|
||||
def getClassifiers(m: LabelMap, label: String): (Value[List[String]], LabelMap) =
|
||||
process(m, "classifiers", processClassifiers(label))
|
||||
def processClassifiers(label: String)(value: Option[String]): Value[List[String]] =
|
||||
value.map(readValue[List[String]](label)) getOrElse new Explicit(Nil)
|
||||
|
||||
def getVersion(m: LabelMap, label: String, defaultName: String): (Value[String], LabelMap) = process(m, "version", processVersion(label, defaultName))
|
||||
def processVersion(label: String, defaultName: String)(value: Option[String]): Value[String] =
|
||||
value.map(readValue[String](label)).getOrElse(new Implicit(defaultName, None))
|
||||
|
||||
def readValue[T](label: String)(implicit read: String => T): String => Value[T] = value0 =>
|
||||
{
|
||||
val value = substituteVariables(value0)
|
||||
if(isEmpty(value)) error(label + " cannot be empty (omit declaration to use the default)")
|
||||
try { parsePropertyValue(label, value)(Value.readImplied[T]) }
|
||||
catch { case e: BootException => new Explicit(read(value)) }
|
||||
}
|
||||
def processSection[T](sections: SectionMap, name: String, f: LabelMap => T) =
|
||||
process[String,LabelMap,T](sections, name, m => f(m default(x => None)))
|
||||
def process[K,V,T](sections: ListMap[K,V], name: K, f: V => T): (T, ListMap[K,V]) = ( f(sections(name)), sections - name)
|
||||
def check(map: ListMap[String, _], label: String): Unit = if(map.isEmpty) () else error(map.keys.mkString("Invalid " + label + "(s): ", ",",""))
|
||||
def check[T](label: String, pair: (T, ListMap[String, _])): T = { check(pair._2, label); pair._1 }
|
||||
def id(map: LabelMap, name: String, default: String): (String, LabelMap) =
|
||||
(substituteVariables(orElse(getOrNone(map, name), default)), map - name)
|
||||
def getOrNone[K,V](map: ListMap[K,Option[V]], k: K) = orElse(map.get(k), None)
|
||||
def ids(map: LabelMap, name: String, default: List[String]) =
|
||||
{
|
||||
val result = map(name) map ConfigurationParser.ids
|
||||
(orElse(result, default), map - name)
|
||||
}
|
||||
def bool(map: LabelMap, name: String, default: Boolean): (Boolean, LabelMap) =
|
||||
{
|
||||
val (b, m) = id(map, name, default.toString)
|
||||
(toBoolean(b), m)
|
||||
}
|
||||
|
||||
def toFiles(paths: List[String]): List[File] = paths.map(toFile)
|
||||
def toFile(path: String): File = new File(substituteVariables(path).replace('/', File.separatorChar))// if the path is relative, it will be resolved by Launch later
|
||||
def file(map: LabelMap, name: String, default: File): (File, LabelMap) =
|
||||
(orElse(getOrNone(map, name).map(toFile), default), map - name)
|
||||
def optfile(map: LabelMap, name: String): (Option[File], LabelMap) =
|
||||
(getOrNone(map, name).map(toFile), map - name)
|
||||
def getIvy(m: LabelMap): (Option[File], List[String], Boolean, Option[File]) =
|
||||
{
|
||||
val (ivyHome, m1) = optfile(m, "ivy-home")
|
||||
val (checksums, m2) = ids(m1, "checksums", BootConfiguration.DefaultChecksums)
|
||||
val (overrideRepos, m3) = bool(m2, "override-build-repos", false)
|
||||
val (repoConfig, m4) = optfile(m3, "repository-config")
|
||||
check(m4, "label")
|
||||
(ivyHome, checksums, overrideRepos, repoConfig filter (_.exists))
|
||||
}
|
||||
def getBoot(m: LabelMap): BootSetup =
|
||||
{
|
||||
val (dir, m1) = file(m, "directory", toFile("project/boot"))
|
||||
val (props, m2) = file(m1, "properties", toFile("project/build.properties"))
|
||||
val (search, m3) = getSearch(m2, props)
|
||||
val (enableQuick, m4) = bool(m3, "quick-option", false)
|
||||
val (promptFill, m5) = bool(m4, "prompt-fill", false)
|
||||
val (promptCreate, m6) = id(m5, "prompt-create", "")
|
||||
val (lock, m7) = bool(m6, "lock", true)
|
||||
check(m7, "label")
|
||||
BootSetup(dir, lock, props, search, promptCreate, enableQuick, promptFill)
|
||||
}
|
||||
def getLogging(m: LabelMap): Logging = check("label", process(m, "level", getLevel))
|
||||
def getLevel(m: Option[String]) = m.map(LogLevel.apply).getOrElse(new Logging(LogLevel.Info))
|
||||
def getSearch(m: LabelMap, defaultPath: File): (Search, LabelMap) =
|
||||
ids(m, "search", Nil) match
|
||||
{
|
||||
case (Nil, newM) => (Search.none, newM)
|
||||
case (tpe :: Nil, newM) => (Search(tpe, List(defaultPath)), newM)
|
||||
case (tpe :: paths, newM) => (Search(tpe, toFiles(paths)), newM)
|
||||
}
|
||||
class ConfigurationParser {
|
||||
def apply(file: File): LaunchConfiguration = Using(newReader(file))(apply)
|
||||
def apply(s: String): LaunchConfiguration = Using(new StringReader(s))(apply)
|
||||
def apply(reader: Reader): LaunchConfiguration = Using(new BufferedReader(reader))(apply)
|
||||
private def apply(in: BufferedReader): LaunchConfiguration =
|
||||
processSections(processLines(readLine(in, Nil, 0)))
|
||||
private final def readLine(in: BufferedReader, accum: List[Line], index: Int): List[Line] =
|
||||
in.readLine match {
|
||||
case null => accum.reverse
|
||||
case line => readLine(in, ParseLine(line, index) ::: accum, index + 1)
|
||||
}
|
||||
private def newReader(file: File) = new InputStreamReader(new FileInputStream(file), "UTF-8")
|
||||
def readRepositoriesConfig(file: File): List[Repository.Repository] =
|
||||
Using(newReader(file))(readRepositoriesConfig)
|
||||
def readRepositoriesConfig(reader: Reader): List[Repository.Repository] =
|
||||
Using(new BufferedReader(reader))(readRepositoriesConfig)
|
||||
def readRepositoriesConfig(s: String): List[Repository.Repository] =
|
||||
Using(new StringReader(s))(readRepositoriesConfig)
|
||||
private def readRepositoriesConfig(in: BufferedReader): List[Repository.Repository] =
|
||||
processRepositoriesConfig(processLines(readLine(in, Nil, 0)))
|
||||
def processRepositoriesConfig(sections: SectionMap): List[Repository.Repository] =
|
||||
processSection(sections, "repositories", getRepositories)._1
|
||||
// section -> configuration instance processing
|
||||
def processSections(sections: SectionMap): LaunchConfiguration =
|
||||
{
|
||||
val ((scalaVersion, scalaClassifiers), m1) = processSection(sections, "scala", getScala)
|
||||
val ((app, appClassifiers), m2) = processSection(m1, "app", getApplication)
|
||||
val (defaultRepositories, m3) = processSection(m2, "repositories", getRepositories)
|
||||
val (boot, m4) = processSection(m3, "boot", getBoot)
|
||||
val (logging, m5) = processSection(m4, "log", getLogging)
|
||||
val (properties, m6) = processSection(m5, "app-properties", getAppProperties)
|
||||
val ((ivyHome, checksums, isOverrideRepos, rConfigFile), m7) = processSection(m6, "ivy", getIvy)
|
||||
val (serverOptions, m8) = processSection(m7, "server", getServer)
|
||||
check(m8, "section")
|
||||
val classifiers = Classifiers(scalaClassifiers, appClassifiers)
|
||||
val repositories = rConfigFile map readRepositoriesConfig getOrElse defaultRepositories
|
||||
val ivyOptions = IvyOptions(ivyHome, classifiers, repositories, checksums, isOverrideRepos)
|
||||
|
||||
def getApplication(m: LabelMap): (Application, Value[List[String]]) =
|
||||
{
|
||||
val (org, m1) = id(m, "org", BootConfiguration.SbtOrg)
|
||||
val (name, m2) = id(m1, "name", "sbt")
|
||||
val (rev, m3) = getVersion(m2, name + " version", name + ".version")
|
||||
val (main, m4) = id(m3, "class", "xsbt.Main")
|
||||
val (components, m5) = ids(m4, "components", List("default"))
|
||||
val (crossVersioned, m6) = id(m5, "cross-versioned", CrossVersionUtil.binaryString)
|
||||
val (resources, m7) = ids(m6, "resources", Nil)
|
||||
val (classifiers, m8) = getClassifiers(m7, "Application classifiers")
|
||||
check(m8, "label")
|
||||
val classpathExtra = toArray(toFiles(resources))
|
||||
val app = new Application(org, name, rev, main, components, LaunchCrossVersion(crossVersioned), classpathExtra)
|
||||
(app, classifiers)
|
||||
}
|
||||
def getServer(m: LabelMap): (Option[ServerConfiguration]) =
|
||||
{
|
||||
val (lock, m1) = optfile(m, "lock")
|
||||
// TODO - JVM args
|
||||
val (args, m2) = optfile(m1, "jvmargs")
|
||||
val (props, m3) = optfile(m2, "jvmprops")
|
||||
lock map { file =>
|
||||
ServerConfiguration(file, args, props)
|
||||
}
|
||||
}
|
||||
def getRepositories(m: LabelMap): List[Repository.Repository] =
|
||||
{
|
||||
import Repository.{Ivy, Maven, Predefined}
|
||||
val BootOnly = "bootOnly"
|
||||
val MvnComp = "mavenCompatible"
|
||||
val DescriptorOptional = "descriptorOptional"
|
||||
val DontCheckConsistency = "skipConsistencyCheck"
|
||||
val OptSet = Set(BootOnly, MvnComp, DescriptorOptional, DontCheckConsistency)
|
||||
m.toList.map {
|
||||
case (key, None) => Predefined(key)
|
||||
case (key, Some(BootOnly)) => Predefined(key, true)
|
||||
case (key, Some(value)) =>
|
||||
val r = trim(substituteVariables(value).split(",",7))
|
||||
val url = try { new URL(r(0)) } catch { case e: MalformedURLException => error("Invalid URL specified for '" + key + "': " + e.getMessage) }
|
||||
val (optionPart, patterns) = r.tail.partition (OptSet.contains(_))
|
||||
val options = (optionPart.contains(BootOnly), optionPart.contains(MvnComp), optionPart.contains(DescriptorOptional), optionPart.contains(DontCheckConsistency))
|
||||
(patterns, options) match {
|
||||
case (both :: Nil, (bo, mc, dso, cc)) => Ivy(key, url, both, both, mavenCompatible=mc, bootOnly=bo, descriptorOptional=dso, skipConsistencyCheck=cc)
|
||||
case (ivy :: art :: Nil, (bo, mc, dso, cc)) => Ivy(key, url, ivy, art, mavenCompatible=mc, bootOnly=bo, descriptorOptional=dso, skipConsistencyCheck=cc)
|
||||
case (Nil, (true, false, false, cc)) => Maven(key, url, bootOnly=true)
|
||||
case (Nil, (false, false, false, false)) => Maven(key, url)
|
||||
case _ => error("Could not parse %s: %s".format(key, value))
|
||||
}
|
||||
}
|
||||
}
|
||||
def getAppProperties(m: LabelMap): List[AppProperty] =
|
||||
for((name, Some(value)) <- m.toList) yield
|
||||
{
|
||||
val map = ListMap( trim(value.split(",")).map(parsePropertyDefinition(name)) : _*)
|
||||
AppProperty(name)(map.get("quick"), map.get("new"), map.get("fill"))
|
||||
}
|
||||
def parsePropertyDefinition(name: String)(value: String) = value.split("=",2) match {
|
||||
case Array(mode,value) => (mode, parsePropertyValue(name, value)(defineProperty(name)))
|
||||
case x => error("Invalid property definition '" + x + "' for property '" + name + "'")
|
||||
}
|
||||
def defineProperty(name: String)(action: String, requiredArg: String, optionalArg: Option[String]) =
|
||||
action match
|
||||
{
|
||||
case "prompt" => new PromptProperty(requiredArg, optionalArg)
|
||||
case "set" => new SetProperty(requiredArg)
|
||||
case _ => error("Unknown action '" + action + "' for property '" + name + "'")
|
||||
}
|
||||
private[this] lazy val propertyPattern = Pattern.compile("""(.+)\((.*)\)(?:\[(.*)\])?""") // examples: prompt(Version)[1.0] or set(1.0)
|
||||
def parsePropertyValue[T](name: String, definition: String)(f: (String, String, Option[String]) => T): T =
|
||||
{
|
||||
val m = propertyPattern.matcher(definition)
|
||||
if(!m.matches()) error("Invalid property definition '" + definition + "' for property '" + name + "'")
|
||||
val optionalArg = m.group(3)
|
||||
f(m.group(1), m.group(2), if(optionalArg eq null) None else Some(optionalArg))
|
||||
}
|
||||
// TODO - Read server properties...
|
||||
new LaunchConfiguration(scalaVersion, ivyOptions, app, boot, logging, properties, serverOptions)
|
||||
}
|
||||
def getScala(m: LabelMap) =
|
||||
{
|
||||
val (scalaVersion, m1) = getVersion(m, "Scala version", "scala.version")
|
||||
val (scalaClassifiers, m2) = getClassifiers(m1, "Scala classifiers")
|
||||
check(m2, "label")
|
||||
(scalaVersion, scalaClassifiers)
|
||||
}
|
||||
def getClassifiers(m: LabelMap, label: String): (Value[List[String]], LabelMap) =
|
||||
process(m, "classifiers", processClassifiers(label))
|
||||
def processClassifiers(label: String)(value: Option[String]): Value[List[String]] =
|
||||
value.map(readValue[List[String]](label)) getOrElse new Explicit(Nil)
|
||||
|
||||
type LabelMap = ListMap[String, Option[String]]
|
||||
// section-name -> label -> value
|
||||
type SectionMap = ListMap[String, LabelMap]
|
||||
def processLines(lines: List[Line]): SectionMap =
|
||||
{
|
||||
type State = (SectionMap, Option[String])
|
||||
val s: State =
|
||||
( ( (ListMap.empty.default(x => ListMap.empty[String,Option[String]]), None): State) /: lines ) {
|
||||
case (x, Comment) => x
|
||||
case ( (map, _), s: Section ) => (map, Some(s.name))
|
||||
case ( (_, None), l: Labeled ) => error("Label " + l.label + " is not in a section")
|
||||
case ( (map, s @ Some(section)), l: Labeled ) =>
|
||||
val sMap = map(section)
|
||||
if( sMap.contains(l.label) ) error("Duplicate label '" + l.label + "' in section '" + section + "'")
|
||||
else ( map(section) = (sMap(l.label) = l.value), s )
|
||||
}
|
||||
s._1
|
||||
}
|
||||
def getVersion(m: LabelMap, label: String, defaultName: String): (Value[String], LabelMap) = process(m, "version", processVersion(label, defaultName))
|
||||
def processVersion(label: String, defaultName: String)(value: Option[String]): Value[String] =
|
||||
value.map(readValue[String](label)).getOrElse(new Implicit(defaultName, None))
|
||||
|
||||
def readValue[T](label: String)(implicit read: String => T): String => Value[T] = value0 =>
|
||||
{
|
||||
val value = substituteVariables(value0)
|
||||
if (isEmpty(value)) error(label + " cannot be empty (omit declaration to use the default)")
|
||||
try { parsePropertyValue(label, value)(Value.readImplied[T]) }
|
||||
catch { case e: BootException => new Explicit(read(value)) }
|
||||
}
|
||||
def processSection[T](sections: SectionMap, name: String, f: LabelMap => T) =
|
||||
process[String, LabelMap, T](sections, name, m => f(m default (x => None)))
|
||||
def process[K, V, T](sections: ListMap[K, V], name: K, f: V => T): (T, ListMap[K, V]) = (f(sections(name)), sections - name)
|
||||
def check(map: ListMap[String, _], label: String): Unit = if (map.isEmpty) () else error(map.keys.mkString("Invalid " + label + "(s): ", ",", ""))
|
||||
def check[T](label: String, pair: (T, ListMap[String, _])): T = { check(pair._2, label); pair._1 }
|
||||
def id(map: LabelMap, name: String, default: String): (String, LabelMap) =
|
||||
(substituteVariables(orElse(getOrNone(map, name), default)), map - name)
|
||||
def getOrNone[K, V](map: ListMap[K, Option[V]], k: K) = orElse(map.get(k), None)
|
||||
def ids(map: LabelMap, name: String, default: List[String]) =
|
||||
{
|
||||
val result = map(name) map ConfigurationParser.ids
|
||||
(orElse(result, default), map - name)
|
||||
}
|
||||
def bool(map: LabelMap, name: String, default: Boolean): (Boolean, LabelMap) =
|
||||
{
|
||||
val (b, m) = id(map, name, default.toString)
|
||||
(toBoolean(b), m)
|
||||
}
|
||||
|
||||
def toFiles(paths: List[String]): List[File] = paths.map(toFile)
|
||||
def toFile(path: String): File = new File(substituteVariables(path).replace('/', File.separatorChar)) // if the path is relative, it will be resolved by Launch later
|
||||
def file(map: LabelMap, name: String, default: File): (File, LabelMap) =
|
||||
(orElse(getOrNone(map, name).map(toFile), default), map - name)
|
||||
def optfile(map: LabelMap, name: String): (Option[File], LabelMap) =
|
||||
(getOrNone(map, name).map(toFile), map - name)
|
||||
def getIvy(m: LabelMap): (Option[File], List[String], Boolean, Option[File]) =
|
||||
{
|
||||
val (ivyHome, m1) = optfile(m, "ivy-home")
|
||||
val (checksums, m2) = ids(m1, "checksums", BootConfiguration.DefaultChecksums)
|
||||
val (overrideRepos, m3) = bool(m2, "override-build-repos", false)
|
||||
val (repoConfig, m4) = optfile(m3, "repository-config")
|
||||
check(m4, "label")
|
||||
(ivyHome, checksums, overrideRepos, repoConfig filter (_.exists))
|
||||
}
|
||||
def getBoot(m: LabelMap): BootSetup =
|
||||
{
|
||||
val (dir, m1) = file(m, "directory", toFile("project/boot"))
|
||||
val (props, m2) = file(m1, "properties", toFile("project/build.properties"))
|
||||
val (search, m3) = getSearch(m2, props)
|
||||
val (enableQuick, m4) = bool(m3, "quick-option", false)
|
||||
val (promptFill, m5) = bool(m4, "prompt-fill", false)
|
||||
val (promptCreate, m6) = id(m5, "prompt-create", "")
|
||||
val (lock, m7) = bool(m6, "lock", true)
|
||||
check(m7, "label")
|
||||
BootSetup(dir, lock, props, search, promptCreate, enableQuick, promptFill)
|
||||
}
|
||||
def getLogging(m: LabelMap): Logging = check("label", process(m, "level", getLevel))
|
||||
def getLevel(m: Option[String]) = m.map(LogLevel.apply).getOrElse(new Logging(LogLevel.Info))
|
||||
def getSearch(m: LabelMap, defaultPath: File): (Search, LabelMap) =
|
||||
ids(m, "search", Nil) match {
|
||||
case (Nil, newM) => (Search.none, newM)
|
||||
case (tpe :: Nil, newM) => (Search(tpe, List(defaultPath)), newM)
|
||||
case (tpe :: paths, newM) => (Search(tpe, toFiles(paths)), newM)
|
||||
}
|
||||
|
||||
def getApplication(m: LabelMap): (Application, Value[List[String]]) =
|
||||
{
|
||||
val (org, m1) = id(m, "org", BootConfiguration.SbtOrg)
|
||||
val (name, m2) = id(m1, "name", "sbt")
|
||||
val (rev, m3) = getVersion(m2, name + " version", name + ".version")
|
||||
val (main, m4) = id(m3, "class", "xsbt.Main")
|
||||
val (components, m5) = ids(m4, "components", List("default"))
|
||||
val (crossVersioned, m6) = id(m5, "cross-versioned", CrossVersionUtil.binaryString)
|
||||
val (resources, m7) = ids(m6, "resources", Nil)
|
||||
val (classifiers, m8) = getClassifiers(m7, "Application classifiers")
|
||||
check(m8, "label")
|
||||
val classpathExtra = toArray(toFiles(resources))
|
||||
val app = new Application(org, name, rev, main, components, LaunchCrossVersion(crossVersioned), classpathExtra)
|
||||
(app, classifiers)
|
||||
}
|
||||
def getServer(m: LabelMap): (Option[ServerConfiguration]) =
|
||||
{
|
||||
val (lock, m1) = optfile(m, "lock")
|
||||
// TODO - JVM args
|
||||
val (args, m2) = optfile(m1, "jvmargs")
|
||||
val (props, m3) = optfile(m2, "jvmprops")
|
||||
lock map { file =>
|
||||
ServerConfiguration(file, args, props)
|
||||
}
|
||||
}
|
||||
def getRepositories(m: LabelMap): List[Repository.Repository] =
|
||||
{
|
||||
import Repository.{ Ivy, Maven, Predefined }
|
||||
val BootOnly = "bootOnly"
|
||||
val MvnComp = "mavenCompatible"
|
||||
val DescriptorOptional = "descriptorOptional"
|
||||
val DontCheckConsistency = "skipConsistencyCheck"
|
||||
val OptSet = Set(BootOnly, MvnComp, DescriptorOptional, DontCheckConsistency)
|
||||
m.toList.map {
|
||||
case (key, None) => Predefined(key)
|
||||
case (key, Some(BootOnly)) => Predefined(key, true)
|
||||
case (key, Some(value)) =>
|
||||
val r = trim(substituteVariables(value).split(",", 7))
|
||||
val url = try { new URL(r(0)) } catch { case e: MalformedURLException => error("Invalid URL specified for '" + key + "': " + e.getMessage) }
|
||||
val (optionPart, patterns) = r.tail.partition(OptSet.contains(_))
|
||||
val options = (optionPart.contains(BootOnly), optionPart.contains(MvnComp), optionPart.contains(DescriptorOptional), optionPart.contains(DontCheckConsistency))
|
||||
(patterns, options) match {
|
||||
case (both :: Nil, (bo, mc, dso, cc)) => Ivy(key, url, both, both, mavenCompatible = mc, bootOnly = bo, descriptorOptional = dso, skipConsistencyCheck = cc)
|
||||
case (ivy :: art :: Nil, (bo, mc, dso, cc)) => Ivy(key, url, ivy, art, mavenCompatible = mc, bootOnly = bo, descriptorOptional = dso, skipConsistencyCheck = cc)
|
||||
case (Nil, (true, false, false, cc)) => Maven(key, url, bootOnly = true)
|
||||
case (Nil, (false, false, false, false)) => Maven(key, url)
|
||||
case _ => error("Could not parse %s: %s".format(key, value))
|
||||
}
|
||||
}
|
||||
}
|
||||
def getAppProperties(m: LabelMap): List[AppProperty] =
|
||||
for ((name, Some(value)) <- m.toList) yield {
|
||||
val map = ListMap(trim(value.split(",")).map(parsePropertyDefinition(name)): _*)
|
||||
AppProperty(name)(map.get("quick"), map.get("new"), map.get("fill"))
|
||||
}
|
||||
def parsePropertyDefinition(name: String)(value: String) = value.split("=", 2) match {
|
||||
case Array(mode, value) => (mode, parsePropertyValue(name, value)(defineProperty(name)))
|
||||
case x => error("Invalid property definition '" + x + "' for property '" + name + "'")
|
||||
}
|
||||
def defineProperty(name: String)(action: String, requiredArg: String, optionalArg: Option[String]) =
|
||||
action match {
|
||||
case "prompt" => new PromptProperty(requiredArg, optionalArg)
|
||||
case "set" => new SetProperty(requiredArg)
|
||||
case _ => error("Unknown action '" + action + "' for property '" + name + "'")
|
||||
}
|
||||
private[this] lazy val propertyPattern = Pattern.compile("""(.+)\((.*)\)(?:\[(.*)\])?""") // examples: prompt(Version)[1.0] or set(1.0)
|
||||
def parsePropertyValue[T](name: String, definition: String)(f: (String, String, Option[String]) => T): T =
|
||||
{
|
||||
val m = propertyPattern.matcher(definition)
|
||||
if (!m.matches()) error("Invalid property definition '" + definition + "' for property '" + name + "'")
|
||||
val optionalArg = m.group(3)
|
||||
f(m.group(1), m.group(2), if (optionalArg eq null) None else Some(optionalArg))
|
||||
}
|
||||
|
||||
type LabelMap = ListMap[String, Option[String]]
|
||||
// section-name -> label -> value
|
||||
type SectionMap = ListMap[String, LabelMap]
|
||||
def processLines(lines: List[Line]): SectionMap =
|
||||
{
|
||||
type State = (SectionMap, Option[String])
|
||||
val s: State =
|
||||
(((ListMap.empty.default(x => ListMap.empty[String, Option[String]]), None): State) /: lines) {
|
||||
case (x, Comment) => x
|
||||
case ((map, _), s: Section) => (map, Some(s.name))
|
||||
case ((_, None), l: Labeled) => error("Label " + l.label + " is not in a section")
|
||||
case ((map, s @ Some(section)), l: Labeled) =>
|
||||
val sMap = map(section)
|
||||
if (sMap.contains(l.label)) error("Duplicate label '" + l.label + "' in section '" + section + "'")
|
||||
else (map(section) = (sMap(l.label) = l.value), s)
|
||||
}
|
||||
s._1
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
|
@ -269,49 +261,46 @@ final class Section(val name: String) extends Line
|
|||
object Comment extends Line
|
||||
|
||||
class ParseException(val content: String, val line: Int, val col: Int, val msg: String)
|
||||
extends BootException( "[" + (line+1) + ", " + (col+1) + "]" + msg + "\n" + content + "\n" + List.fill(col)(" ").mkString + "^" )
|
||||
extends BootException("[" + (line + 1) + ", " + (col + 1) + "]" + msg + "\n" + content + "\n" + List.fill(col)(" ").mkString + "^")
|
||||
|
||||
object ParseLine
|
||||
{
|
||||
def apply(content: String, line: Int) =
|
||||
{
|
||||
def error(col: Int, msg: String) = throw new ParseException(content, line, col, msg)
|
||||
def check(condition: Boolean)(col: Int, msg: String) = if(condition) () else error(col, msg)
|
||||
object ParseLine {
|
||||
def apply(content: String, line: Int) =
|
||||
{
|
||||
def error(col: Int, msg: String) = throw new ParseException(content, line, col, msg)
|
||||
def check(condition: Boolean)(col: Int, msg: String) = if (condition) () else error(col, msg)
|
||||
|
||||
val trimmed = trimLeading(content)
|
||||
val offset = content.length - trimmed.length
|
||||
val trimmed = trimLeading(content)
|
||||
val offset = content.length - trimmed.length
|
||||
|
||||
def section =
|
||||
{
|
||||
val closing = trimmed.indexOf(']', 1)
|
||||
check(closing > 0)(content.length, "Expected ']', found end of line")
|
||||
val extra = trimmed.substring(closing+1)
|
||||
val trimmedExtra = trimLeading(extra)
|
||||
check(isEmpty(trimmedExtra))(content.length - trimmedExtra.length, "Expected end of line, found '" + extra + "'")
|
||||
new Section(trimmed.substring(1,closing).trim)
|
||||
}
|
||||
def labeled =
|
||||
{
|
||||
trimmed.split(":",2) match {
|
||||
case Array(label, value) =>
|
||||
val trimmedValue = value.trim
|
||||
check(isNonEmpty(trimmedValue))(content.indexOf(':'), "Value for '" + label + "' was empty")
|
||||
new Labeled(label, Some(trimmedValue))
|
||||
case x => new Labeled(x.mkString, None)
|
||||
}
|
||||
}
|
||||
|
||||
if(isEmpty(trimmed)) Nil
|
||||
else
|
||||
{
|
||||
val processed =
|
||||
trimmed.charAt(0) match
|
||||
{
|
||||
case '#' => Comment
|
||||
case '[' => section
|
||||
case _ => labeled
|
||||
}
|
||||
processed :: Nil
|
||||
}
|
||||
}
|
||||
def section =
|
||||
{
|
||||
val closing = trimmed.indexOf(']', 1)
|
||||
check(closing > 0)(content.length, "Expected ']', found end of line")
|
||||
val extra = trimmed.substring(closing + 1)
|
||||
val trimmedExtra = trimLeading(extra)
|
||||
check(isEmpty(trimmedExtra))(content.length - trimmedExtra.length, "Expected end of line, found '" + extra + "'")
|
||||
new Section(trimmed.substring(1, closing).trim)
|
||||
}
|
||||
def labeled =
|
||||
{
|
||||
trimmed.split(":", 2) match {
|
||||
case Array(label, value) =>
|
||||
val trimmedValue = value.trim
|
||||
check(isNonEmpty(trimmedValue))(content.indexOf(':'), "Value for '" + label + "' was empty")
|
||||
new Labeled(label, Some(trimmedValue))
|
||||
case x => new Labeled(x.mkString, None)
|
||||
}
|
||||
}
|
||||
|
||||
if (isEmpty(trimmed)) Nil
|
||||
else {
|
||||
val processed =
|
||||
trimmed.charAt(0) match {
|
||||
case '#' => Comment
|
||||
case '[' => section
|
||||
case _ => labeled
|
||||
}
|
||||
processed :: Nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue