Merge branch '1.x' into help-sbt-new

This commit is contained in:
PanAeon 2017-12-11 16:57:40 +00:00 committed by GitHub
commit 184390fed2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
98 changed files with 2928 additions and 546 deletions

View File

@ -200,3 +200,22 @@ Building Documentation
======================
The scala-sbt.org site documentation is a separate project [website](https://github.com/sbt/website). Follow [the steps in the README](https://github.com/sbt/website#scala-sbtorg) to generate the documentation.
Note for maintainers
====================
Publishing VS Code Extensions
-----------------------------
https://code.visualstudio.com/docs/extensions/publish-extension
```
$ sbt
> vscodePlugin/compile
> exit
cd vscode-sbt-scala/client
# update version number in vscode-sbt-scala/client/package.json
$ vsce package
$ vsce publish
```

View File

@ -34,7 +34,7 @@ def buildLevelSettings: Seq[Setting[_]] =
scmInfo := Some(ScmInfo(url("https://github.com/sbt/sbt"), "git@github.com:sbt/sbt.git")),
resolvers += Resolver.mavenLocal,
scalafmtOnCompile := true,
scalafmtVersion := "1.2.0",
scalafmtVersion := "1.3.0",
))
def commonSettings: Seq[Setting[_]] =
@ -51,10 +51,11 @@ def commonSettings: Seq[Setting[_]] =
resolvers += Resolver.typesafeIvyRepo("releases"),
resolvers += Resolver.sonatypeRepo("snapshots"),
resolvers += "bintray-sbt-maven-releases" at "https://dl.bintray.com/sbt/maven-releases/",
addCompilerPlugin("org.spire-math" % "kind-projector" % "0.9.4" cross CrossVersion.binary),
concurrentRestrictions in Global += Util.testExclusiveRestriction,
testOptions in Test += Tests.Argument(TestFrameworks.ScalaCheck, "-w", "1"),
testOptions in Test += Tests.Argument(TestFrameworks.ScalaCheck, "-verbosity", "2"),
javacOptions in compile ++= Seq("-target", "6", "-source", "6", "-Xlint", "-Xlint:-serial"),
javacOptions in compile ++= Seq("-Xlint", "-Xlint:-serial"),
crossScalaVersions := Seq(baseScalaVersion),
bintrayPackage := (bintrayPackage in ThisBuild).value,
bintrayRepository := (bintrayRepository in ThisBuild).value,
@ -74,10 +75,9 @@ def testedBaseSettings: Seq[Setting[_]] =
baseSettings ++ testDependencies
val mimaSettings = Def settings (
mimaPreviousArtifacts := Set(
organization.value % moduleName.value % "1.0.0"
cross (if (crossPaths.value) CrossVersion.binary else CrossVersion.disabled)
)
mimaPreviousArtifacts := (0 to 4).map { v =>
organization.value % moduleName.value % s"1.0.$v" cross (if (crossPaths.value) CrossVersion.binary else CrossVersion.disabled)
}.toSet
)
lazy val sbtRoot: Project = (project in file("."))
@ -107,7 +107,9 @@ lazy val sbtRoot: Project = (project in file("."))
Transform.conscriptSettings(bundledLauncherProj),
publish := {},
publishLocal := {},
skip in publish := true
skip in publish := true,
commands in Global += Command.single("sbtOn")((state, dir) =>
s"sbtProj/test:runMain sbt.RunFromSourceMain $dir" :: state),
)
// This is used to configure an sbt-launcher for this version of sbt.
@ -143,6 +145,18 @@ val collectionProj = (project in file("internal") / "util-collection")
mimaBinaryIssueFilters ++= Seq(
// Added private[sbt] method to capture State attributes.
exclude[ReversedMissingMethodProblem]("sbt.internal.util.AttributeMap.setCond"),
// Dropped in favour of kind-projector's inline type lambda syntax
exclude[MissingClassProblem]("sbt.internal.util.TypeFunctions$P1of2"),
// Dropped in favour of kind-projector's polymorphic lambda literals
exclude[MissingClassProblem]("sbt.internal.util.Param"),
exclude[MissingClassProblem]("sbt.internal.util.Param$"),
// Dropped in favour of plain scala.Function, and its compose method
exclude[MissingClassProblem]("sbt.internal.util.Fn1"),
exclude[DirectMissingMethodProblem]("sbt.internal.util.TypeFunctions.toFn1"),
exclude[DirectMissingMethodProblem]("sbt.internal.util.Types.toFn1"),
),
)
.configure(addSbtUtilPosition)
@ -296,7 +310,8 @@ lazy val commandProj = (project in file("main-command"))
.settings(
testedBaseSettings,
name := "Command",
libraryDependencies ++= Seq(launcherInterface, sjsonNewScalaJson.value, templateResolverApi),
libraryDependencies ++= Seq(launcherInterface, sjsonNewScalaJson.value, templateResolverApi,
jna, jnaPlatform),
managedSourceDirectories in Compile +=
baseDirectory.value / "src" / "main" / "contraband-scala",
sourceManaged in (Compile, generateContrabands) := baseDirectory.value / "src" / "main" / "contraband-scala",
@ -309,7 +324,13 @@ lazy val commandProj = (project in file("main-command"))
exclude[ReversedMissingMethodProblem]("sbt.internal.server.ServerInstance.*"),
// Added method to CommandChannel. internal.
exclude[ReversedMissingMethodProblem]("sbt.internal.CommandChannel.*"),
)
// Added an overload to reboot. The overload is private[sbt].
exclude[ReversedMissingMethodProblem]("sbt.StateOps.reboot"),
),
unmanagedSources in (Compile, headerCreate) := {
val old = (unmanagedSources in (Compile, headerCreate)).value
old filterNot { x => (x.getName startsWith "NG") || (x.getName == "ReferenceCountedFileDescriptor.java") }
},
)
.configure(
addSbtIO,
@ -376,7 +397,7 @@ lazy val mainProj = (project in file("main"))
.settings(
testedBaseSettings,
name := "Main",
libraryDependencies ++= scalaXml.value ++ Seq(launcherInterface) ++ log4jDependencies,
libraryDependencies ++= scalaXml.value ++ Seq(launcherInterface) ++ log4jDependencies ++ Seq(scalaCacheCaffeine),
managedSourceDirectories in Compile +=
baseDirectory.value / "src" / "main" / "contraband-scala",
sourceManaged in (Compile, generateContrabands) := baseDirectory.value / "src" / "main" / "contraband-scala",
@ -409,17 +430,49 @@ lazy val mainProj = (project in file("main"))
// with the sole purpose of providing certain identifiers without qualification (with a package object)
lazy val sbtProj = (project in file("sbt"))
.dependsOn(mainProj, scriptedSbtProj % "test->test")
.enablePlugins(BuildInfoPlugin)
.settings(
baseSettings,
name := "sbt",
normalizedName := "sbt",
crossScalaVersions := Seq(baseScalaVersion),
crossPaths := false,
javaOptions ++= Seq("-Xdebug", "-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=5005"),
mimaSettings,
mimaBinaryIssueFilters ++= sbtIgnoredProblems,
addBuildInfoToConfig(Test),
buildInfoObject in Test := "TestBuildInfo",
buildInfoKeys in Test := Seq[BuildInfoKey](fullClasspath in Compile),
connectInput in run in Test := true,
)
.configure(addSbtCompilerBridge)
lazy val sbtIgnoredProblems = {
Vector(
// Added more items to Import trait.
exclude[ReversedMissingMethodProblem]("sbt.Import.sbt$Import$_setter_$WatchSource_="),
exclude[ReversedMissingMethodProblem]("sbt.Import.WatchSource"),
// Dropped in favour of kind-projector's polymorphic lambda literals
exclude[DirectMissingMethodProblem]("sbt.Import.Param"),
exclude[DirectMissingMethodProblem]("sbt.package.Param"),
// Dropped in favour of plain scala.Function, and its compose method
exclude[DirectMissingMethodProblem]("sbt.package.toFn1"),
)
}
def runNpm(command: String, base: File, log: sbt.internal.util.ManagedLogger) = {
val npm = if (sbt.internal.util.Util.isWindows) "npm.cmd" else "npm"
import scala.sys.process._
try {
val exitCode = Process(s"$npm $command", Option(base)) ! log
if (exitCode != 0) throw new Exception("Process returned exit code: " + exitCode)
} catch {
case e: java.io.IOException => log.warn("failed to run npm " + e.getMessage)
}
}
lazy val vscodePlugin = (project in file("vscode-sbt-scala"))
.settings(
crossPaths := false,
@ -427,12 +480,8 @@ lazy val vscodePlugin = (project in file("vscode-sbt-scala"))
skip in publish := true,
compile in Compile := {
val u = update.value
val log = streams.value.log
import sbt.internal.inc.Analysis
import scala.sys.process._
val exitCode = Process(s"npm run compile", Option(baseDirectory.value)) ! log
if (exitCode != 0) throw new Exception("Process returned exit code: " + exitCode)
Analysis.empty
runNpm("run compile", baseDirectory.value, streams.value.log)
sbt.internal.inc.Analysis.empty
},
update := {
val old = update.value
@ -441,9 +490,7 @@ lazy val vscodePlugin = (project in file("vscode-sbt-scala"))
val log = streams.value.log
if (t.exists) ()
else {
import scala.sys.process._
val exitCode = Process("npm install", Option(base)) ! log
if (exitCode != 0) throw new Exception("Process returned exit code: " + exitCode)
runNpm("install", base, log)
IO.touch(t)
}
old
@ -459,14 +506,6 @@ lazy val vscodePlugin = (project in file("vscode-sbt-scala"))
}
)
lazy val sbtIgnoredProblems = {
Seq(
// Added more items to Import trait.
exclude[ReversedMissingMethodProblem]("sbt.Import.sbt$Import$_setter_$WatchSource_="),
exclude[ReversedMissingMethodProblem]("sbt.Import.WatchSource")
)
}
def scriptedTask: Def.Initialize[InputTask[Unit]] = Def.inputTask {
val result = scriptedSource(dir => (s: State) => Scripted.scriptedParser(dir)).parsed
// publishLocalBinAll.value // TODO: Restore scripted needing only binary jars.

View File

@ -32,6 +32,9 @@ import scala.reflect._
import macros._
object Instance {
type Aux[M0[_]] = Instance { type M[x] = M0[x] }
type Aux2[M0[_], N[_]] = Instance { type M[x] = M0[N[x]] }
final val ApplyName = "app"
final val FlattenName = "flatten"
final val PureName = "pure"
@ -204,19 +207,18 @@ object Instance {
import Types._
implicit def applicativeInstance[A[_]](
implicit ap: Applicative[A]): Instance { type M[x] = A[x] } = new Instance {
type M[x] = A[x]
def app[K[L[x]], Z](in: K[A], f: K[Id] => Z)(implicit a: AList[K]) = a.apply[A, Z](in, f)
def map[S, T](in: A[S], f: S => T) = ap.map(f, in)
def pure[S](s: () => S): M[S] = ap.pure(s())
}
implicit def applicativeInstance[A[_]](implicit ap: Applicative[A]): Instance.Aux[A] =
new Instance {
type M[x] = A[x]
def app[K[L[x]], Z](in: K[A], f: K[Id] => Z)(implicit a: AList[K]) = a.apply[A, Z](in, f)
def map[S, T](in: A[S], f: S => T) = ap.map(f, in)
def pure[S](s: () => S): M[S] = ap.pure(s())
}
type AI[A[_]] = Instance { type M[x] = A[x] }
def compose[A[_], B[_]](implicit a: AI[A], b: AI[B]): Instance { type M[x] = A[B[x]] } =
def compose[A[_], B[_]](implicit a: Aux[A], b: Aux[B]): Instance.Aux2[A, B] =
new Composed[A, B](a, b)
// made a public, named, unsealed class because of trouble with macros and inference when the Instance is not an object
class Composed[A[_], B[_]](a: AI[A], b: AI[B]) extends Instance {
class Composed[A[_], B[_]](a: Aux[A], b: Aux[B]) extends Instance {
type M[x] = A[B[x]]
def pure[S](s: () => S): A[B[S]] = a.pure(() => b.pure(s))
def map[S, T](in: M[S], f: S => T): M[T] = a.map(in, (bv: B[S]) => b.map(bv, f))

View File

@ -27,7 +27,7 @@ trait AList[K[L[x]]] {
}
object AList {
type Empty = AList[({ type l[L[x]] = Unit })#l]
type Empty = AList[ConstK[Unit]#l]
/** AList for Unit, which represents a sequence that is always empty.*/
val empty: Empty = new Empty {
@ -37,7 +37,7 @@ object AList {
def traverse[M[_], N[_], P[_]](in: Unit, f: M ~> (N P)#l)(implicit np: Applicative[N]): N[Unit] = np.pure(())
}
type SeqList[T] = AList[({ type l[L[x]] = List[L[T]] })#l]
type SeqList[T] = AList[λ[L[x] => List[L[T]]]]
/** AList for a homogeneous sequence. */
def seq[T]: SeqList[T] = new SeqList[T] {
@ -59,7 +59,7 @@ object AList {
}
/** AList for the arbitrary arity data structure KList. */
def klist[KL[M[_]] <: KList[M] { type Transform[N[_]] = KL[N] }]: AList[KL] = new AList[KL] {
def klist[KL[M[_]] <: KList.Aux[M, KL]]: AList[KL] = new AList[KL] {
def transform[M[_], N[_]](k: KL[M], f: M ~> N) = k.transform(f)
def foldr[M[_], T](k: KL[M], f: (M[_], T) => T, init: T): T = k.foldr(f, init)
override def apply[M[_], C](k: KL[M], f: KL[Id] => C)(implicit app: Applicative[M]): M[C] = k.apply(f)(app)
@ -67,7 +67,7 @@ object AList {
override def toList[M[_]](k: KL[M]) = k.toList
}
type Single[A] = AList[({ type l[L[x]] = L[A] })#l]
type Single[A] = AList[λ[L[x] => L[A]]]
/** AList for a single value. */
def single[A]: Single[A] = new Single[A] {
@ -76,7 +76,7 @@ object AList {
def traverse[M[_], N[_], P[_]](a: M[A], f: M ~> (N P)#l)(implicit np: Applicative[N]): N[P[A]] = f(a)
}
type ASplit[K[L[x]], B[x]] = AList[({ type l[L[x]] = K[(L B)#l] })#l]
type ASplit[K[L[x]], B[x]] = AList[λ[L[x] => K[(L B)#l]]]
/** AList that operates on the outer type constructor `A` of a composition `[x] A[B[x]]` for type constructors `A` and `B`*/
def asplit[K[L[x]], B[x]](base: AList[K]): ASplit[K, B] = new ASplit[K, B] {

View File

@ -196,11 +196,9 @@ object AttributeMap {
def apply(entries: AttributeEntry[_]*): AttributeMap = empty ++ entries
/** Presents an `AttributeMap` as a natural transformation. */
implicit def toNatTrans(map: AttributeMap): AttributeKey ~> Id = new (AttributeKey ~> Id) {
def apply[T](key: AttributeKey[T]): T = map(key)
}
implicit def toNatTrans(map: AttributeMap): AttributeKey ~> Id = λ[AttributeKey ~> Id](map(_))
}
private class BasicAttributeMap(private val backing: Map[AttributeKey[_], Any])
extends AttributeMap {

View File

@ -32,27 +32,25 @@ abstract class EvaluateSettings[Scope] {
private[this] def getStatic[T](key: ScopedKey[T]): INode[T] =
static get key getOrElse sys.error("Illegal reference to key " + key)
private[this] val transform: Initialize ~> INode = new (Initialize ~> INode) {
def apply[T](i: Initialize[T]): INode[T] = i match {
case k: Keyed[s, T] @unchecked => single(getStatic(k.scopedKey), k.transform)
case a: Apply[k, T] @unchecked =>
new MixedNode[k, T](
a.alist.transform[Initialize, INode](a.inputs, transform),
a.f,
a.alist
)
case b: Bind[s, T] @unchecked => new BindNode[s, T](transform(b.in), x => transform(b.f(x)))
case v: Value[T] @unchecked => constant(v.value)
case v: ValidationCapture[T] @unchecked => strictConstant(v.key)
case t: TransformCapture => strictConstant(t.f)
case o: Optional[s, T] @unchecked =>
o.a match {
case None => constant(() => o.f(None))
case Some(i) => single[s, T](transform(i), x => o.f(Some(x)))
}
case x if x == StaticScopes =>
strictConstant(allScopes.asInstanceOf[T]) // can't convince scalac that StaticScopes => T == Set[Scope]
}
private[this] val transform: Initialize ~> INode = λ[Initialize ~> INode] {
case k: Keyed[s, A1$] @unchecked => single(getStatic(k.scopedKey), k.transform)
case a: Apply[k, A1$] @unchecked =>
new MixedNode[k, A1$](
a.alist.transform[Initialize, INode](a.inputs, transform),
a.f,
a.alist
)
case b: Bind[s, A1$] @unchecked => new BindNode[s, A1$](transform(b.in), x => transform(b.f(x)))
case v: Value[A1$] @unchecked => constant(v.value)
case v: ValidationCapture[A1$] @unchecked => strictConstant(v.key)
case t: TransformCapture => strictConstant(t.f)
case o: Optional[s, A1$] @unchecked =>
o.a match {
case None => constant(() => o.f(None))
case Some(i) => single[s, A1$](transform(i), x => o.f(Some(x)))
}
case x if x == StaticScopes =>
strictConstant(allScopes.asInstanceOf[A1$]) // can't convince scalac that StaticScopes => T == Set[Scope]
}
private[this] lazy val roots: Seq[INode[_]] = compiledSettings flatMap { cs =>
@ -84,7 +82,7 @@ abstract class EvaluateSettings[Scope] {
if (key.key.isLocal) ss else ss.set(key.scope, key.key, node.get)
}
private[this] val getValue = new (INode ~> Id) { def apply[T](node: INode[T]) = node.get }
private[this] val getValue = λ[INode ~> Id](_.get)
private[this] def submitEvaluate(node: INode[_]) = submit(node.evaluate())
@ -204,7 +202,7 @@ abstract class EvaluateSettings[Scope] {
new MixedNode[ConstK[Unit]#l, T]((), _ => f(), AList.empty)
private[this] def single[S, T](in: INode[S], f: S => T): INode[T] =
new MixedNode[({ type l[L[x]] = L[S] })#l, T](in, f, AList.single[S])
new MixedNode[λ[L[x] => L[S]], T](in, f, AList.single[S])
private[this] final class BindNode[S, T](in: INode[S], f: S => INode[T]) extends INode[T] {
protected def dependsOn = in :: Nil

View File

@ -29,6 +29,9 @@ sealed trait KList[+M[_]] {
/** Discards the heterogeneous type information and constructs a plain List from this KList's elements. */
def toList: List[M[_]]
}
object KList {
type Aux[+M[_], Transform0[N[_]]] = KList[M] { type Transform[N[_]] = Transform0[N] }
}
final case class KCons[H, +T <: KList[M], +M[_]](head: M[H], tail: T) extends KList[M] {
final type Transform[N[_]] = KCons[H, tail.Transform[N], N]

View File

@ -31,8 +31,7 @@ trait IMap[K[_], V[_]] extends (K ~> V) with RMap[K, V] {
def remove[T](k: K[T]): IMap[K, V]
def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]): IMap[K, V]
def mapValues[V2[_]](f: V ~> V2): IMap[K, V2]
def mapSeparate[VL[_], VR[_]](f: V ~> ({ type l[T] = Either[VL[T], VR[T]] })#l)
: (IMap[K, VL], IMap[K, VR])
def mapSeparate[VL[_], VR[_]](f: V ~> λ[T => Either[VL[T], VR[T]]]): (IMap[K, VL], IMap[K, VR])
}
trait PMap[K[_], V[_]] extends (K ~> V) with RMap[K, V] {
@ -69,7 +68,7 @@ object IMap {
def mapValues[V2[_]](f: V ~> V2) =
new IMap0[K, V2](backing.mapValues(x => f(x)))
def mapSeparate[VL[_], VR[_]](f: V ~> ({ type l[T] = Either[VL[T], VR[T]] })#l) = {
def mapSeparate[VL[_], VR[_]](f: V ~> λ[T => Either[VL[T], VR[T]]]) = {
val mapped = backing.iterator.map {
case (k, v) =>
f(v) match {

View File

@ -1,32 +0,0 @@
/*
* sbt
* Copyright 2011 - 2017, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under BSD-3-Clause license (see LICENSE)
*/
package sbt.internal.util
// Used to emulate ~> literals
trait Param[A[_], B[_]] {
type T
def in: A[T]
def ret(out: B[T]): Unit
def ret: B[T]
}
object Param {
implicit def pToT[A[_], B[_]](p: Param[A, B] => Unit): A ~> B = new (A ~> B) {
def apply[s](a: A[s]): B[s] = {
val v: Param[A, B] { type T = s } = new Param[A, B] {
type T = s
def in = a
private var r: B[T] = _
def ret(b: B[T]): Unit = { r = b }
def ret: B[T] = r
}
p(v)
v.ret
}
}
}

View File

@ -101,14 +101,14 @@ trait Init[Scope] {
def bind[S, T](in: Initialize[S])(f: S => Initialize[T]): Initialize[T] = new Bind(f, in)
def map[S, T](in: Initialize[S])(f: S => T): Initialize[T] =
new Apply[({ type l[L[x]] = L[S] })#l, T](f, in, AList.single[S])
new Apply[λ[L[x] => L[S]], T](f, in, AList.single[S])
def app[K[L[x]], T](inputs: K[Initialize])(f: K[Id] => T)(
implicit alist: AList[K]
): Initialize[T] = new Apply[K, T](f, inputs, alist)
def uniform[S, T](inputs: Seq[Initialize[S]])(f: Seq[S] => T): Initialize[T] =
new Apply[({ type l[L[x]] = List[L[S]] })#l, T](f, inputs.toList, AList.seq[S])
new Apply[λ[L[x] => List[L[S]]], T](f, inputs.toList, AList.seq[S])
/**
* The result of this initialization is the validated `key`.
@ -156,9 +156,7 @@ trait Init[Scope] {
def empty(implicit delegates: Scope => Seq[Scope]): Settings[Scope] =
new Settings0(Map.empty, delegates)
def asTransform(s: Settings[Scope]): ScopedKey ~> Id = new (ScopedKey ~> Id) {
def apply[T](k: ScopedKey[T]): T = getValue(s, k)
}
def asTransform(s: Settings[Scope]): ScopedKey ~> Id = λ[ScopedKey ~> Id](k => getValue(s, k))
def getValue[T](s: Settings[Scope], k: ScopedKey[T]) =
s.get(k.scope, k.key) getOrElse (throw new InvalidReference(k))
@ -246,13 +244,11 @@ trait Init[Scope] {
type ValidatedSettings[T] = Either[Seq[Undefined], SettingSeq[T]]
val f = new (SettingSeq ~> ValidatedSettings) {
def apply[T](ks: Seq[Setting[T]]) = {
val (undefs, valid) = Util.separate(ks.zipWithIndex) {
case (s, i) => s validateKeyReferenced refMap(s, i == 0)
}
if (undefs.isEmpty) Right(valid) else Left(undefs.flatten)
val f = λ[SettingSeq ~> ValidatedSettings] { (ks: Seq[Setting[_]]) =>
val (undefs, valid) = Util.separate(ks.zipWithIndex) {
case (s, i) => s validateKeyReferenced refMap(s, i == 0)
}
if (undefs.isEmpty) Right(valid) else Left(undefs.flatten)
}
type Undefs[_] = Seq[Undefined]
@ -560,7 +556,7 @@ trait Init[Scope] {
def zip[S](o: Initialize[S]): Initialize[(T, S)] = zipTupled(o)(idFun)
def zipWith[S, U](o: Initialize[S])(f: (T, S) => U): Initialize[U] = zipTupled(o)(f.tupled)
private[this] def zipTupled[S, U](o: Initialize[S])(f: ((T, S)) => U): Initialize[U] =
new Apply[({ type l[L[x]] = (L[T], L[S]) })#l, U](f, (this, o), AList.tuple2[T, S])
new Apply[λ[L[x] => (L[T], L[S])], U](f, (this, o), AList.tuple2[T, S])
/** A fold on the static attributes of this and nested Initializes. */
private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S
@ -577,8 +573,7 @@ trait Init[Scope] {
def join[T](inits: Seq[Initialize[T]]): Initialize[Seq[T]] = uniform(inits)(idFun)
def joinAny[M[_]](inits: Seq[Initialize[M[T]] forSome { type T }]): Initialize[Seq[M[_]]] =
join(inits.asInstanceOf[Seq[Initialize[M[Any]]]])
.asInstanceOf[Initialize[Seq[M[T] forSome { type T }]]]
join(inits.asInstanceOf[Seq[Initialize[M[_]]]])
}
object SettingsDefinition {
@ -686,23 +681,15 @@ trait Init[Scope] {
case Right(x) => x
}
private[this] lazy val getValidated =
new (ValidatedInit ~> Initialize) { def apply[T](v: ValidatedInit[T]) = handleUndefined[T](v) }
private[this] lazy val getValidated = λ[ValidatedInit ~> Initialize](handleUndefined(_))
// mainly for reducing generated class count
private[this] def validateKeyReferencedT(g: ValidateKeyRef) =
new (Initialize ~> ValidatedInit) {
def apply[T](i: Initialize[T]) = i validateKeyReferenced g
}
λ[Initialize ~> ValidatedInit](_ validateKeyReferenced g)
private[this] def mapReferencedT(g: MapScoped) =
new (Initialize ~> Initialize) { def apply[T](i: Initialize[T]) = i mapReferenced g }
private[this] def mapConstantT(g: MapConstant) =
new (Initialize ~> Initialize) { def apply[T](i: Initialize[T]) = i mapConstant g }
private[this] def evaluateT(g: Settings[Scope]) =
new (Initialize ~> Id) { def apply[T](i: Initialize[T]) = i evaluate g }
private[this] def mapReferencedT(g: MapScoped) = λ[Initialize ~> Initialize](_ mapReferenced g)
private[this] def mapConstantT(g: MapConstant) = λ[Initialize ~> Initialize](_ mapConstant g)
private[this] def evaluateT(g: Settings[Scope]) = λ[Initialize ~> Id](_ evaluate g)
private[this] def deps(ls: Seq[Initialize[_]]): Seq[ScopedKey[_]] = ls.flatMap(_.dependencies)
@ -854,9 +841,7 @@ trait Init[Scope] {
def validateKeyReferenced(g: ValidateKeyRef) = {
val tx = alist.transform(inputs, validateKeyReferencedT(g))
val undefs = alist.toList(tx).flatMap(_.left.toSeq.flatten)
val get = new (ValidatedInit ~> Initialize) {
def apply[B](vr: ValidatedInit[B]) = vr.right.get
}
val get = λ[ValidatedInit ~> Initialize](_.right.get)
if (undefs.isEmpty) Right(new Apply(f, alist.transform(tx, get), alist)) else Left(undefs)
}

View File

@ -13,25 +13,18 @@ trait TypeFunctions {
sealed trait ConstK[A] { type l[L[x]] = A }
sealed trait Compose[A[_], B[_]] { type Apply[T] = A[B[T]] }
sealed trait [A[_], B[_]] { type l[T] = A[B[T]] }
sealed trait P1of2[M[_, _], A] { type Apply[B] = M[A, B]; type Flip[B] = M[B, A] }
final val left = new (Id ~> P1of2[Left, Nothing]#Flip) { def apply[T](t: T) = Left(t) }
final val right = new (Id ~> P1of2[Right, Nothing]#Apply) { def apply[T](t: T) = Right(t) }
final val some = new (Id ~> Some) { def apply[T](t: T) = Some(t) }
final val left = λ[Id ~> Left[?, Nothing]](Left(_))
final val right = λ[Id ~> Right[Nothing, ?]](Right(_))
final val some = λ[Id ~> Some](Some(_))
final def idFun[T] = (t: T) => t
final def const[A, B](b: B): A => B = _ => b
final def idK[M[_]]: M ~> M = new (M ~> M) { def apply[T](m: M[T]): M[T] = m }
final def idK[M[_]]: M ~> M = λ[M ~> M](m => m)
def nestCon[M[_], N[_], G[_]](f: M ~> N): (M G)#l ~> (N G)#l =
f.asInstanceOf[(M G)#l ~> (N G)#l] // implemented with a cast to avoid extra object+method call. castless version:
/* new ( (M ∙ G)#l ~> (N ∙ G)#l ) {
def apply[T](mg: M[G[T]]): N[G[T]] = f(mg)
} */
implicit def toFn1[A, B](f: A => B): Fn1[A, B] = new Fn1[A, B] {
def [C](g: C => A) = f compose g
}
f.asInstanceOf[(M G)#l ~> (N G)#l] // implemented with a cast to avoid extra object+method call.
// castless version:
// λ[(M G)#l ~> (N G)#l](f(_))
type Endo[T] = T => T
type ~>|[A[_], B[_]] = A ~> Compose[Option, B]#Apply
@ -42,17 +35,13 @@ object TypeFunctions extends TypeFunctions
trait ~>[-A[_], +B[_]] { outer =>
def apply[T](a: A[T]): B[T]
// directly on ~> because of type inference limitations
final def [C[_]](g: C ~> A): C ~> B = new (C ~> B) { def apply[T](c: C[T]) = outer.apply(g(c)) }
final def [C[_]](g: C ~> A): C ~> B = λ[C ~> B](c => outer.apply(g(c)))
final def [C, D](g: C => D)(implicit ev: D <:< A[D]): C => B[D] = i => apply(ev(g(i)))
final def fn[T] = (t: A[T]) => apply[T](t)
}
object ~> {
import TypeFunctions._
val Id: Id ~> Id = new (Id ~> Id) { def apply[T](a: T): T = a }
val Id: Id ~> Id = idK[Id]
implicit def tcIdEquals: (Id ~> Id) = Id
}
trait Fn1[A, B] {
def [C](g: C => A): C => B
}

View File

@ -1,21 +0,0 @@
/*
* sbt
* Copyright 2011 - 2017, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under BSD-3-Clause license (see LICENSE)
*/
package sbt.internal.util
// compilation test
object LiteralTest {
def x[A[_], B[_]](f: A ~> B) = f
import Param._
val f = x { (p: Param[Option, List]) =>
p.ret(p.in.toList)
}
val a: List[Int] = f(Some(3))
val b: List[String] = f(Some("aa"))
}

View File

@ -151,16 +151,23 @@ object Completion {
val empty: Completion = suggestion("")
def single(c: Char): Completion = suggestion(c.toString)
// TODO: make strict in 0.13.0 to match DisplayOnly
def displayOnly(value: => String): Completion = new DisplayOnly(value)
def displayOnly(value: String): Completion = new DisplayOnly(value)
// TODO: make strict in 0.13.0 to match Token
def token(prepend: => String, append: => String): Completion =
def token(prepend: String, append: String): Completion =
new Token(prepend + append, append)
/** @since 0.12.1 */
def tokenDisplay(append: String, display: String): Completion = new Token(display, append)
// TODO: make strict in 0.13.0 to match Suggestion
def suggestion(value: => String): Completion = new Suggestion(value)
def suggestion(value: String): Completion = new Suggestion(value)
@deprecated("No longer used. for binary compatibility", "1.1.0")
private[complete] def displayOnly(value: => String): Completion = new DisplayOnly(value)
@deprecated("No longer used. for binary compatibility", "1.1.0")
private[complete] def token(prepend: => String, append: => String): Completion =
new Token(prepend + append, append)
@deprecated("No longer used. for binary compatibility", "1.1.0")
private[complete] def suggestion(value: => String): Completion = new Suggestion(value)
}

View File

@ -13,6 +13,7 @@ import sbt.internal.inc.Analysis
import TaskExtra._
import sbt.internal.util.FeedbackProvidedException
import xsbti.api.Definition
import xsbti.api.ClassLike
import xsbti.compile.CompileAnalysis
import ConcurrentRestrictions.Tag
@ -389,7 +390,11 @@ object Tests {
defined(subclasses, d.baseClasses, d.isModule) ++
defined(annotations, d.annotations, d.isModule)
val discovered = Discovery(firsts(subclasses), firsts(annotations))(definitions)
val discovered = Discovery(firsts(subclasses), firsts(annotations))(definitions.filter {
case c: ClassLike =>
c.topLevel
case _ => false
})
// TODO: To pass in correct explicitlySpecified and selectors
val tests = for ((df, di) <- discovered; fingerprint <- toFingerprints(di))
yield new TestDefinition(df.name, fingerprint, false, Array(new SuiteSelector))

View File

@ -0,0 +1,28 @@
/**
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
trait ConnectionTypeFormats { self: sjsonnew.BasicJsonProtocol =>
implicit lazy val ConnectionTypeFormat: JsonFormat[sbt.ConnectionType] = new JsonFormat[sbt.ConnectionType] {
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.ConnectionType = {
jsOpt match {
case Some(js) =>
unbuilder.readString(js) match {
case "Local" => sbt.ConnectionType.Local
case "Tcp" => sbt.ConnectionType.Tcp
}
case None =>
deserializationError("Expected JsString but found None")
}
}
override def write[J](obj: sbt.ConnectionType, builder: Builder[J]): Unit = {
val str = obj match {
case sbt.ConnectionType.Local => "Local"
case sbt.ConnectionType.Tcp => "Tcp"
}
builder.writeString(str)
}
}
}

View File

@ -0,0 +1,13 @@
/**
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
package sbt
sealed abstract class ConnectionType extends Serializable
object ConnectionType {
/** This uses Unix domain socket on POSIX, and named pipe on Windows. */
case object Local extends ConnectionType
case object Tcp extends ConnectionType
}

View File

@ -16,3 +16,10 @@ type CommandSource {
enum ServerAuthentication {
Token
}
enum ConnectionType {
## This uses Unix domain socket on POSIX, and named pipe on Windows.
Local
Tcp
# Ssh
}

View File

@ -0,0 +1,178 @@
// Copied from https://github.com/facebook/nailgun/blob/af623fddedfdca010df46302a0711ce0e2cc1ba6/nailgun-server/src/main/java/com/martiansoftware/nailgun/NGUnixDomainServerSocket.java
/*
Copyright 2004-2015, Martian Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package sbt.internal;
import java.io.IOException;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketAddress;
import java.util.concurrent.atomic.AtomicInteger;
import com.sun.jna.LastErrorException;
import com.sun.jna.ptr.IntByReference;
/**
* Implements a {@link ServerSocket} which binds to a local Unix domain socket
* and returns instances of {@link NGUnixDomainSocket} from
* {@link #accept()}.
*/
public class NGUnixDomainServerSocket extends ServerSocket {
private static final int DEFAULT_BACKLOG = 50;
// We use an AtomicInteger to prevent a race in this situation which
// could happen if fd were just an int:
//
// Thread 1 -> NGUnixDomainServerSocket.accept()
// -> lock this
// -> check isBound and isClosed
// -> unlock this
// -> descheduled while still in method
// Thread 2 -> NGUnixDomainServerSocket.close()
// -> lock this
// -> check isClosed
// -> NGUnixDomainSocketLibrary.close(fd)
// -> now fd is invalid
// -> unlock this
// Thread 1 -> re-scheduled while still in method
// -> NGUnixDomainSocketLibrary.accept(fd, which is invalid and maybe re-used)
//
// By using an AtomicInteger, we'll set this to -1 after it's closed, which
// will cause the accept() call above to cleanly fail instead of possibly
// being called on an unrelated fd (which may or may not fail).
private final AtomicInteger fd;
private final int backlog;
private boolean isBound;
private boolean isClosed;
public static class NGUnixDomainServerSocketAddress extends SocketAddress {
private final String path;
public NGUnixDomainServerSocketAddress(String path) {
this.path = path;
}
public String getPath() {
return path;
}
}
/**
* Constructs an unbound Unix domain server socket.
*/
public NGUnixDomainServerSocket() throws IOException {
this(DEFAULT_BACKLOG, null);
}
/**
* Constructs an unbound Unix domain server socket with the specified listen backlog.
*/
public NGUnixDomainServerSocket(int backlog) throws IOException {
this(backlog, null);
}
/**
* Constructs and binds a Unix domain server socket to the specified path.
*/
public NGUnixDomainServerSocket(String path) throws IOException {
this(DEFAULT_BACKLOG, path);
}
/**
* Constructs and binds a Unix domain server socket to the specified path
* with the specified listen backlog.
*/
public NGUnixDomainServerSocket(int backlog, String path) throws IOException {
try {
fd = new AtomicInteger(
NGUnixDomainSocketLibrary.socket(
NGUnixDomainSocketLibrary.PF_LOCAL,
NGUnixDomainSocketLibrary.SOCK_STREAM,
0));
this.backlog = backlog;
if (path != null) {
bind(new NGUnixDomainServerSocketAddress(path));
}
} catch (LastErrorException e) {
throw new IOException(e);
}
}
public synchronized void bind(SocketAddress endpoint) throws IOException {
if (!(endpoint instanceof NGUnixDomainServerSocketAddress)) {
throw new IllegalArgumentException(
"endpoint must be an instance of NGUnixDomainServerSocketAddress");
}
if (isBound) {
throw new IllegalStateException("Socket is already bound");
}
if (isClosed) {
throw new IllegalStateException("Socket is already closed");
}
NGUnixDomainServerSocketAddress unEndpoint = (NGUnixDomainServerSocketAddress) endpoint;
NGUnixDomainSocketLibrary.SockaddrUn address =
new NGUnixDomainSocketLibrary.SockaddrUn(unEndpoint.getPath());
try {
int socketFd = fd.get();
NGUnixDomainSocketLibrary.bind(socketFd, address, address.size());
NGUnixDomainSocketLibrary.listen(socketFd, backlog);
isBound = true;
} catch (LastErrorException e) {
throw new IOException(e);
}
}
public Socket accept() throws IOException {
// We explicitly do not make this method synchronized, since the
// call to NGUnixDomainSocketLibrary.accept() will block
// indefinitely, causing another thread's call to close() to deadlock.
synchronized (this) {
if (!isBound) {
throw new IllegalStateException("Socket is not bound");
}
if (isClosed) {
throw new IllegalStateException("Socket is already closed");
}
}
try {
NGUnixDomainSocketLibrary.SockaddrUn sockaddrUn =
new NGUnixDomainSocketLibrary.SockaddrUn();
IntByReference addressLen = new IntByReference();
addressLen.setValue(sockaddrUn.size());
int clientFd = NGUnixDomainSocketLibrary.accept(fd.get(), sockaddrUn, addressLen);
return new NGUnixDomainSocket(clientFd);
} catch (LastErrorException e) {
throw new IOException(e);
}
}
public synchronized void close() throws IOException {
if (isClosed) {
throw new IllegalStateException("Socket is already closed");
}
try {
// Ensure any pending call to accept() fails.
NGUnixDomainSocketLibrary.close(fd.getAndSet(-1));
isClosed = true;
} catch (LastErrorException e) {
throw new IOException(e);
}
}
}

View File

@ -0,0 +1,171 @@
// Copied from https://github.com/facebook/nailgun/blob/af623fddedfdca010df46302a0711ce0e2cc1ba6/nailgun-server/src/main/java/com/martiansoftware/nailgun/NGUnixDomainSocket.java
/*
Copyright 2004-2015, Martian Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package sbt.internal;
import com.sun.jna.LastErrorException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.net.Socket;
/**
* Implements a {@link Socket} backed by a native Unix domain socket.
*
* Instances of this class always return {@code null} for
* {@link Socket#getInetAddress()}, {@link Socket#getLocalAddress()},
* {@link Socket#getLocalSocketAddress()}, {@link Socket#getRemoteSocketAddress()}.
*/
public class NGUnixDomainSocket extends Socket {
private final ReferenceCountedFileDescriptor fd;
private final InputStream is;
private final OutputStream os;
/**
* Creates a Unix domain socket backed by a native file descriptor.
*/
public NGUnixDomainSocket(int fd) {
this.fd = new ReferenceCountedFileDescriptor(fd);
this.is = new NGUnixDomainSocketInputStream();
this.os = new NGUnixDomainSocketOutputStream();
}
public InputStream getInputStream() {
return is;
}
public OutputStream getOutputStream() {
return os;
}
public void shutdownInput() throws IOException {
doShutdown(NGUnixDomainSocketLibrary.SHUT_RD);
}
public void shutdownOutput() throws IOException {
doShutdown(NGUnixDomainSocketLibrary.SHUT_WR);
}
private void doShutdown(int how) throws IOException {
try {
int socketFd = fd.acquire();
if (socketFd != -1) {
NGUnixDomainSocketLibrary.shutdown(socketFd, how);
}
} catch (LastErrorException e) {
throw new IOException(e);
} finally {
fd.release();
}
}
public void close() throws IOException {
super.close();
try {
// This might not close the FD right away. In case we are about
// to read or write on another thread, it will delay the close
// until the read or write completes, to prevent the FD from
// being re-used for a different purpose and the other thread
// reading from a different FD.
fd.close();
} catch (LastErrorException e) {
throw new IOException(e);
}
}
private class NGUnixDomainSocketInputStream extends InputStream {
public int read() throws IOException {
ByteBuffer buf = ByteBuffer.allocate(1);
int result;
if (doRead(buf) == 0) {
result = -1;
} else {
// Make sure to & with 0xFF to avoid sign extension
result = 0xFF & buf.get();
}
return result;
}
public int read(byte[] b, int off, int len) throws IOException {
if (len == 0) {
return 0;
}
ByteBuffer buf = ByteBuffer.wrap(b, off, len);
int result = doRead(buf);
if (result == 0) {
result = -1;
}
return result;
}
private int doRead(ByteBuffer buf) throws IOException {
try {
int fdToRead = fd.acquire();
if (fdToRead == -1) {
return -1;
}
return NGUnixDomainSocketLibrary.read(fdToRead, buf, buf.remaining());
} catch (LastErrorException e) {
throw new IOException(e);
} finally {
fd.release();
}
}
}
private class NGUnixDomainSocketOutputStream extends OutputStream {
public void write(int b) throws IOException {
ByteBuffer buf = ByteBuffer.allocate(1);
buf.put(0, (byte) (0xFF & b));
doWrite(buf);
}
public void write(byte[] b, int off, int len) throws IOException {
if (len == 0) {
return;
}
ByteBuffer buf = ByteBuffer.wrap(b, off, len);
doWrite(buf);
}
private void doWrite(ByteBuffer buf) throws IOException {
try {
int fdToWrite = fd.acquire();
if (fdToWrite == -1) {
return;
}
int ret = NGUnixDomainSocketLibrary.write(fdToWrite, buf, buf.remaining());
if (ret != buf.remaining()) {
// This shouldn't happen with standard blocking Unix domain sockets.
throw new IOException("Could not write " + buf.remaining() + " bytes as requested " +
"(wrote " + ret + " bytes instead)");
}
} catch (LastErrorException e) {
throw new IOException(e);
} finally {
fd.release();
}
}
}
}

View File

@ -0,0 +1,140 @@
// Copied from https://github.com/facebook/nailgun/blob/af623fddedfdca010df46302a0711ce0e2cc1ba6/nailgun-server/src/main/java/com/martiansoftware/nailgun/NGUnixDomainSocketLibrary.java
/*
Copyright 2004-2015, Martian Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package sbt.internal;
import com.sun.jna.LastErrorException;
import com.sun.jna.Native;
import com.sun.jna.Platform;
import com.sun.jna.Structure;
import com.sun.jna.Union;
import com.sun.jna.ptr.IntByReference;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.List;
/**
* Utility class to bridge native Unix domain socket calls to Java using JNA.
*/
public class NGUnixDomainSocketLibrary {
public static final int PF_LOCAL = 1;
public static final int AF_LOCAL = 1;
public static final int SOCK_STREAM = 1;
public static final int SHUT_RD = 0;
public static final int SHUT_WR = 1;
// Utility class, do not instantiate.
private NGUnixDomainSocketLibrary() { }
// BSD platforms write a length byte at the start of struct sockaddr_un.
private static final boolean HAS_SUN_LEN =
Platform.isMac() || Platform.isFreeBSD() || Platform.isNetBSD() ||
Platform.isOpenBSD() || Platform.iskFreeBSD();
/**
* Bridges {@code struct sockaddr_un} to and from native code.
*/
public static class SockaddrUn extends Structure implements Structure.ByReference {
/**
* On BSD platforms, the {@code sun_len} and {@code sun_family} values in
* {@code struct sockaddr_un}.
*/
public static class SunLenAndFamily extends Structure {
public byte sunLen;
public byte sunFamily;
protected List getFieldOrder() {
return Arrays.asList(new String[] { "sunLen", "sunFamily" });
}
}
/**
* On BSD platforms, {@code sunLenAndFamily} will be present.
* On other platforms, only {@code sunFamily} will be present.
*/
public static class SunFamily extends Union {
public SunLenAndFamily sunLenAndFamily;
public short sunFamily;
}
public SunFamily sunFamily = new SunFamily();
public byte[] sunPath = new byte[104];
/**
* Constructs an empty {@code struct sockaddr_un}.
*/
public SockaddrUn() {
if (HAS_SUN_LEN) {
sunFamily.sunLenAndFamily = new SunLenAndFamily();
sunFamily.setType(SunLenAndFamily.class);
} else {
sunFamily.setType(Short.TYPE);
}
allocateMemory();
}
/**
* Constructs a {@code struct sockaddr_un} with a path whose bytes are encoded
* using the default encoding of the platform.
*/
public SockaddrUn(String path) throws IOException {
byte[] pathBytes = path.getBytes();
if (pathBytes.length > sunPath.length - 1) {
throw new IOException("Cannot fit name [" + path + "] in maximum unix domain socket length");
}
System.arraycopy(pathBytes, 0, sunPath, 0, pathBytes.length);
sunPath[pathBytes.length] = (byte) 0;
if (HAS_SUN_LEN) {
int len = fieldOffset("sunPath") + pathBytes.length;
sunFamily.sunLenAndFamily = new SunLenAndFamily();
sunFamily.sunLenAndFamily.sunLen = (byte) len;
sunFamily.sunLenAndFamily.sunFamily = AF_LOCAL;
sunFamily.setType(SunLenAndFamily.class);
} else {
sunFamily.sunFamily = AF_LOCAL;
sunFamily.setType(Short.TYPE);
}
allocateMemory();
}
protected List getFieldOrder() {
return Arrays.asList(new String[] { "sunFamily", "sunPath" });
}
}
static {
Native.register(Platform.C_LIBRARY_NAME);
}
public static native int socket(int domain, int type, int protocol) throws LastErrorException;
public static native int bind(int fd, SockaddrUn address, int addressLen)
throws LastErrorException;
public static native int listen(int fd, int backlog) throws LastErrorException;
public static native int accept(int fd, SockaddrUn address, IntByReference addressLen)
throws LastErrorException;
public static native int read(int fd, ByteBuffer buffer, int count)
throws LastErrorException;
public static native int write(int fd, ByteBuffer buffer, int count)
throws LastErrorException;
public static native int close(int fd) throws LastErrorException;
public static native int shutdown(int fd, int how) throws LastErrorException;
}

View File

@ -0,0 +1,90 @@
// Copied from https://github.com/facebook/nailgun/blob/af623fddedfdca010df46302a0711ce0e2cc1ba6/nailgun-server/src/main/java/com/martiansoftware/nailgun/NGWin32NamedPipeLibrary.java
/*
Copyright 2004-2017, Martian Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package sbt.internal;
import java.nio.ByteBuffer;
import com.sun.jna.*;
import com.sun.jna.platform.win32.WinNT;
import com.sun.jna.platform.win32.WinNT.*;
import com.sun.jna.platform.win32.WinBase.*;
import com.sun.jna.ptr.IntByReference;
import com.sun.jna.win32.W32APIOptions;
public interface NGWin32NamedPipeLibrary extends WinNT {
int PIPE_ACCESS_DUPLEX = 3;
int PIPE_UNLIMITED_INSTANCES = 255;
int FILE_FLAG_FIRST_PIPE_INSTANCE = 524288;
NGWin32NamedPipeLibrary INSTANCE =
(NGWin32NamedPipeLibrary) Native.loadLibrary(
"kernel32",
NGWin32NamedPipeLibrary.class,
W32APIOptions.UNICODE_OPTIONS);
HANDLE CreateNamedPipe(
String lpName,
int dwOpenMode,
int dwPipeMode,
int nMaxInstances,
int nOutBufferSize,
int nInBufferSize,
int nDefaultTimeOut,
SECURITY_ATTRIBUTES lpSecurityAttributes);
boolean ConnectNamedPipe(
HANDLE hNamedPipe,
Pointer lpOverlapped);
boolean DisconnectNamedPipe(
HANDLE hObject);
boolean ReadFile(
HANDLE hFile,
Memory lpBuffer,
int nNumberOfBytesToRead,
IntByReference lpNumberOfBytesRead,
Pointer lpOverlapped);
boolean WriteFile(
HANDLE hFile,
ByteBuffer lpBuffer,
int nNumberOfBytesToWrite,
IntByReference lpNumberOfBytesWritten,
Pointer lpOverlapped);
boolean CloseHandle(
HANDLE hObject);
boolean GetOverlappedResult(
HANDLE hFile,
Pointer lpOverlapped,
IntByReference lpNumberOfBytesTransferred,
boolean wait);
boolean CancelIoEx(
HANDLE hObject,
Pointer lpOverlapped);
HANDLE CreateEvent(
SECURITY_ATTRIBUTES lpEventAttributes,
boolean bManualReset,
boolean bInitialState,
String lpName);
int WaitForSingleObject(
HANDLE hHandle,
int dwMilliseconds
);
int GetLastError();
}

View File

@ -0,0 +1,173 @@
// Copied from https://github.com/facebook/nailgun/blob/af623fddedfdca010df46302a0711ce0e2cc1ba6/nailgun-server/src/main/java/com/martiansoftware/nailgun/NGWin32NamedPipeServerSocket.java
/*
Copyright 2004-2017, Martian Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package sbt.internal;
import com.sun.jna.platform.win32.WinBase;
import com.sun.jna.platform.win32.WinError;
import com.sun.jna.platform.win32.WinNT;
import com.sun.jna.platform.win32.WinNT.HANDLE;
import com.sun.jna.ptr.IntByReference;
import java.io.IOException;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketAddress;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.LinkedBlockingQueue;
public class NGWin32NamedPipeServerSocket extends ServerSocket {
private static final NGWin32NamedPipeLibrary API = NGWin32NamedPipeLibrary.INSTANCE;
private static final String WIN32_PIPE_PREFIX = "\\\\.\\pipe\\";
private static final int BUFFER_SIZE = 65535;
private final LinkedBlockingQueue<HANDLE> openHandles;
private final LinkedBlockingQueue<HANDLE> connectedHandles;
private final NGWin32NamedPipeSocket.CloseCallback closeCallback;
private final String path;
private final int maxInstances;
private final HANDLE lockHandle;
public NGWin32NamedPipeServerSocket(String path) throws IOException {
this(NGWin32NamedPipeLibrary.PIPE_UNLIMITED_INSTANCES, path);
}
public NGWin32NamedPipeServerSocket(int maxInstances, String path) throws IOException {
this.openHandles = new LinkedBlockingQueue<>();
this.connectedHandles = new LinkedBlockingQueue<>();
this.closeCallback = handle -> {
if (connectedHandles.remove(handle)) {
closeConnectedPipe(handle, false);
}
if (openHandles.remove(handle)) {
closeOpenPipe(handle);
}
};
this.maxInstances = maxInstances;
if (!path.startsWith(WIN32_PIPE_PREFIX)) {
this.path = WIN32_PIPE_PREFIX + path;
} else {
this.path = path;
}
String lockPath = this.path + "_lock";
lockHandle = API.CreateNamedPipe(
lockPath,
NGWin32NamedPipeLibrary.FILE_FLAG_FIRST_PIPE_INSTANCE | NGWin32NamedPipeLibrary.PIPE_ACCESS_DUPLEX,
0,
1,
BUFFER_SIZE,
BUFFER_SIZE,
0,
null);
if (lockHandle == NGWin32NamedPipeLibrary.INVALID_HANDLE_VALUE) {
throw new IOException(String.format("Could not create lock for %s, error %d", lockPath, API.GetLastError()));
} else {
if (!API.DisconnectNamedPipe(lockHandle)) {
throw new IOException(String.format("Could not disconnect lock %d", API.GetLastError()));
}
}
}
public void bind(SocketAddress endpoint) throws IOException {
throw new IOException("Win32 named pipes do not support bind(), pass path to constructor");
}
public Socket accept() throws IOException {
HANDLE handle = API.CreateNamedPipe(
path,
NGWin32NamedPipeLibrary.PIPE_ACCESS_DUPLEX | WinNT.FILE_FLAG_OVERLAPPED,
0,
maxInstances,
BUFFER_SIZE,
BUFFER_SIZE,
0,
null);
if (handle == NGWin32NamedPipeLibrary.INVALID_HANDLE_VALUE) {
throw new IOException(String.format("Could not create named pipe, error %d", API.GetLastError()));
}
openHandles.add(handle);
HANDLE connWaitable = API.CreateEvent(null, true, false, null);
WinBase.OVERLAPPED olap = new WinBase.OVERLAPPED();
olap.hEvent = connWaitable;
olap.write();
boolean immediate = API.ConnectNamedPipe(handle, olap.getPointer());
if (immediate) {
openHandles.remove(handle);
connectedHandles.add(handle);
return new NGWin32NamedPipeSocket(handle, closeCallback);
}
int connectError = API.GetLastError();
if (connectError == WinError.ERROR_PIPE_CONNECTED) {
openHandles.remove(handle);
connectedHandles.add(handle);
return new NGWin32NamedPipeSocket(handle, closeCallback);
} else if (connectError == WinError.ERROR_NO_DATA) {
// Client has connected and disconnected between CreateNamedPipe() and ConnectNamedPipe()
// connection is broken, but it is returned it avoid loop here.
// Actual error will happen for NGSession when it will try to read/write from/to pipe
return new NGWin32NamedPipeSocket(handle, closeCallback);
} else if (connectError == WinError.ERROR_IO_PENDING) {
if (!API.GetOverlappedResult(handle, olap.getPointer(), new IntByReference(), true)) {
openHandles.remove(handle);
closeOpenPipe(handle);
throw new IOException("GetOverlappedResult() failed for connect operation: " + API.GetLastError());
}
openHandles.remove(handle);
connectedHandles.add(handle);
return new NGWin32NamedPipeSocket(handle, closeCallback);
} else {
throw new IOException("ConnectNamedPipe() failed with: " + connectError);
}
}
public void close() throws IOException {
try {
List<HANDLE> handlesToClose = new ArrayList<>();
openHandles.drainTo(handlesToClose);
for (HANDLE handle : handlesToClose) {
closeOpenPipe(handle);
}
List<HANDLE> handlesToDisconnect = new ArrayList<>();
connectedHandles.drainTo(handlesToDisconnect);
for (HANDLE handle : handlesToDisconnect) {
closeConnectedPipe(handle, true);
}
} finally {
API.CloseHandle(lockHandle);
}
}
private void closeOpenPipe(HANDLE handle) throws IOException {
API.CancelIoEx(handle, null);
API.CloseHandle(handle);
}
private void closeConnectedPipe(HANDLE handle, boolean shutdown) throws IOException {
if (!shutdown) {
API.WaitForSingleObject(handle, 10000);
}
API.DisconnectNamedPipe(handle);
API.CloseHandle(handle);
}
}

View File

@ -0,0 +1,172 @@
// Copied from https://github.com/facebook/nailgun/blob/af623fddedfdca010df46302a0711ce0e2cc1ba6/nailgun-server/src/main/java/com/martiansoftware/nailgun/NGWin32NamedPipeSocket.java
// Made change in `read` to read just the amount of bytes available.
/*
Copyright 2004-2017, Martian Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package sbt.internal;
import com.sun.jna.Memory;
import com.sun.jna.platform.win32.WinBase;
import com.sun.jna.platform.win32.WinError;
import com.sun.jna.platform.win32.WinNT.HANDLE;
import com.sun.jna.ptr.IntByReference;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.Socket;
import java.nio.ByteBuffer;
public class NGWin32NamedPipeSocket extends Socket {
private static final NGWin32NamedPipeLibrary API = NGWin32NamedPipeLibrary.INSTANCE;
private final HANDLE handle;
private final CloseCallback closeCallback;
private final InputStream is;
private final OutputStream os;
private final HANDLE readerWaitable;
private final HANDLE writerWaitable;
interface CloseCallback {
void onNamedPipeSocketClose(HANDLE handle) throws IOException;
}
public NGWin32NamedPipeSocket(
HANDLE handle,
NGWin32NamedPipeSocket.CloseCallback closeCallback) throws IOException {
this.handle = handle;
this.closeCallback = closeCallback;
this.readerWaitable = API.CreateEvent(null, true, false, null);
if (readerWaitable == null) {
throw new IOException("CreateEvent() failed ");
}
writerWaitable = API.CreateEvent(null, true, false, null);
if (writerWaitable == null) {
throw new IOException("CreateEvent() failed ");
}
this.is = new NGWin32NamedPipeSocketInputStream(handle);
this.os = new NGWin32NamedPipeSocketOutputStream(handle);
}
@Override
public InputStream getInputStream() {
return is;
}
@Override
public OutputStream getOutputStream() {
return os;
}
@Override
public void close() throws IOException {
closeCallback.onNamedPipeSocketClose(handle);
}
@Override
public void shutdownInput() throws IOException {
}
@Override
public void shutdownOutput() throws IOException {
}
private class NGWin32NamedPipeSocketInputStream extends InputStream {
private final HANDLE handle;
NGWin32NamedPipeSocketInputStream(HANDLE handle) {
this.handle = handle;
}
@Override
public int read() throws IOException {
int result;
byte[] b = new byte[1];
if (read(b) == 0) {
result = -1;
} else {
result = 0xFF & b[0];
}
return result;
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
Memory readBuffer = new Memory(len);
WinBase.OVERLAPPED olap = new WinBase.OVERLAPPED();
olap.hEvent = readerWaitable;
olap.write();
boolean immediate = API.ReadFile(handle, readBuffer, len, null, olap.getPointer());
if (!immediate) {
int lastError = API.GetLastError();
if (lastError != WinError.ERROR_IO_PENDING) {
throw new IOException("ReadFile() failed: " + lastError);
}
}
IntByReference read = new IntByReference();
if (!API.GetOverlappedResult(handle, olap.getPointer(), read, true)) {
int lastError = API.GetLastError();
throw new IOException("GetOverlappedResult() failed for read operation: " + lastError);
}
int actualLen = read.getValue();
byte[] byteArray = readBuffer.getByteArray(0, actualLen);
System.arraycopy(byteArray, 0, b, off, actualLen);
return actualLen;
}
}
private class NGWin32NamedPipeSocketOutputStream extends OutputStream {
private final HANDLE handle;
NGWin32NamedPipeSocketOutputStream(HANDLE handle) {
this.handle = handle;
}
@Override
public void write(int b) throws IOException {
write(new byte[]{(byte) (0xFF & b)});
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
ByteBuffer data = ByteBuffer.wrap(b, off, len);
WinBase.OVERLAPPED olap = new WinBase.OVERLAPPED();
olap.hEvent = writerWaitable;
olap.write();
boolean immediate = API.WriteFile(handle, data, len, null, olap.getPointer());
if (!immediate) {
int lastError = API.GetLastError();
if (lastError != WinError.ERROR_IO_PENDING) {
throw new IOException("WriteFile() failed: " + lastError);
}
}
IntByReference written = new IntByReference();
if (!API.GetOverlappedResult(handle, olap.getPointer(), written, true)) {
int lastError = API.GetLastError();
throw new IOException("GetOverlappedResult() failed for write operation: " + lastError);
}
if (written.getValue() != len) {
throw new IOException("WriteFile() wrote less bytes than requested");
}
}
}
}

View File

@ -0,0 +1,82 @@
// Copied from https://github.com/facebook/nailgun/blob/af623fddedfdca010df46302a0711ce0e2cc1ba6/nailgun-server/src/main/java/com/martiansoftware/nailgun/ReferenceCountedFileDescriptor.java
/*
Copyright 2004-2015, Martian Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package sbt.internal;
import com.sun.jna.LastErrorException;
import java.io.IOException;
/**
* Encapsulates a file descriptor plus a reference count to ensure close requests
* only close the file descriptor once the last reference to the file descriptor
* is released.
*
* If not explicitly closed, the file descriptor will be closed when
* this object is finalized.
*/
public class ReferenceCountedFileDescriptor {
private int fd;
private int fdRefCount;
private boolean closePending;
public ReferenceCountedFileDescriptor(int fd) {
this.fd = fd;
this.fdRefCount = 0;
this.closePending = false;
}
protected void finalize() throws IOException {
close();
}
public synchronized int acquire() {
fdRefCount++;
return fd;
}
public synchronized void release() throws IOException {
fdRefCount--;
if (fdRefCount == 0 && closePending && fd != -1) {
doClose();
}
}
public synchronized void close() throws IOException {
if (fd == -1 || closePending) {
return;
}
if (fdRefCount == 0) {
doClose();
} else {
// Another thread has the FD. We'll close it when they release the reference.
closePending = true;
}
}
private void doClose() throws IOException {
try {
NGUnixDomainSocketLibrary.close(fd);
fd = -1;
} catch (LastErrorException e) {
throw new IOException(e);
}
}
}

View File

@ -123,14 +123,17 @@ $HelpCommand <regular expression>
def RebootCommand = "reboot"
def RebootDetailed =
RebootCommand + """ [full]
RebootCommand + """ [dev | full]
This command is equivalent to exiting sbt, restarting, and running the
remaining commands with the exception that the JVM is not shut down.
If 'full' is specified, the boot directory (`~/.sbt/boot` by default)
is deleted before restarting. This forces an update of sbt and Scala
and is useful when working with development versions of sbt or Scala."""
If 'dev' is specified, the current sbt artifacts from the boot directory
(`~/.sbt/boot` by default) are deleted before restarting.
This forces an update of sbt and Scala, which is useful when working with development
versions of sbt.
If 'full' is specified, the boot directory is wiped out before restarting.
"""
def Multi = ";"
def MultiBrief =
@ -197,7 +200,7 @@ $AliasCommand name=
deprecatedAlias(ClearOnFailure, BasicCommandStrings.ClearOnFailure)
def FailureWallDeprecated = deprecatedAlias(FailureWall, BasicCommandStrings.FailureWall)
private[this] def deprecatedAlias(oldName: String, newName: String): String =
s"The `$oldName` command is deprecated in favor of `$newName` and will be removed in 0.14.0"
s"The `$oldName` command is deprecated in favor of `$newName` and will be removed in a later version"
}
def FailureWall = "resumeFromFailure"

View File

@ -185,10 +185,19 @@ object BasicCommands {
}
def reboot: Command =
Command(RebootCommand, Help.more(RebootCommand, RebootDetailed))(rebootParser)((s, full) =>
s reboot full)
Command(RebootCommand, Help.more(RebootCommand, RebootDetailed))(rebootOptionParser) {
case (s, (full, currentOnly)) =>
s.reboot(full, currentOnly)
}
def rebootParser(s: State): Parser[Boolean] = token(Space ~> "full" ^^^ true) ?? false
@deprecated("Use rebootOptionParser", "1.1.0")
def rebootParser(s: State): Parser[Boolean] =
rebootOptionParser(s) map { case (full, currentOnly) => full }
private[sbt] def rebootOptionParser(s: State): Parser[(Boolean, Boolean)] =
token(
Space ~> (("full" ^^^ ((true, false))) |
("dev" ^^^ ((false, true))))) ?? ((false, false))
def call: Command =
Command(ApplyCommand, Help.more(ApplyCommand, ApplyDetailed))(_ => callParser) {

View File

@ -33,6 +33,11 @@ object BasicKeys {
"Method of authenticating server command.",
10000)
val serverConnectionType =
AttributeKey[ConnectionType]("serverConnectionType",
"The wire protocol for the server command.",
10000)
private[sbt] val interactive = AttributeKey[Boolean](
"interactive",
"True if commands are currently being entered from an interactive environment.",

View File

@ -25,6 +25,11 @@ sealed trait Command {
def tags: AttributeMap
def tag[T](key: AttributeKey[T], value: T): Command
def nameOption: Option[String] = this match {
case sc: SimpleCommand => Some(sc.name)
case _ => None
}
}
private[sbt] final class SimpleCommand(

View File

@ -89,6 +89,17 @@ trait StateOps {
*/
def reboot(full: Boolean): State
/**
* Reboots sbt. A reboot restarts execution from the entry point of the launcher.
* A reboot is designed to be as close as possible to actually restarting the JVM without actually doing so.
* Because the JVM is not restarted, JVM exit hooks are not run.
* State.exitHooks should be used instead and those will be run before rebooting.
* If `full` is true, the boot directory is deleted before starting again.
* If `currentOnly` is true, the artifacts for the current sbt version is deleted.
* This command is currently implemented to not return, but may be implemented in the future to only reboot at the next command processing step.
*/
private[sbt] def reboot(full: Boolean, currentOnly: Boolean): State
/** Sets the next command processing action to do.*/
def setNext(n: State.Next): State
@ -248,12 +259,18 @@ object State {
def baseDir: File = s.configuration.baseDirectory
def setNext(n: Next) = s.copy(next = n)
def continue = setNext(Continue)
def reboot(full: Boolean) = {
runExitHooks();
throw new xsbti.FullReload(
(s.remainingCommands map { case e: Exec => e.commandLine }).toArray,
full)
/** Implementation of reboot. */
def reboot(full: Boolean): State = reboot(full, false)
/** Implementation of reboot. */
private[sbt] def reboot(full: Boolean, currentOnly: Boolean): State = {
runExitHooks()
val rs = s.remainingCommands map { case e: Exec => e.commandLine }
if (currentOnly) throw new RebootCurrent(rs)
else throw new xsbti.FullReload(rs.toArray, full)
}
def reload = runExitHooks().setNext(new Return(defaultReload(s)))
def clearGlobalLog = setNext(ClearGlobalLog)
def keepLastLog = setNext(KeepLastLog)
@ -320,3 +337,5 @@ object State {
private[sbt] def getBoolean(s: State, key: AttributeKey[Boolean], default: Boolean): Boolean =
s.get(key) getOrElse default
}
private[sbt] final class RebootCurrent(val arguments: List[String]) extends RuntimeException

View File

@ -9,7 +9,8 @@ package sbt
package internal
package client
import java.net.{ URI, Socket, InetAddress, SocketException }
import java.io.IOException
import java.net.{ URI, Socket, InetAddress }
import java.util.UUID
import java.util.concurrent.atomic.{ AtomicBoolean, AtomicReference }
import scala.collection.mutable.ListBuffer
@ -111,7 +112,7 @@ class NetworkClient(arguments: List[String]) { self =>
try {
connection.publish(bytes)
} catch {
case _: SocketException =>
case _: IOException =>
// log.debug(e.getMessage)
// toDel += client
}

View File

@ -17,13 +17,14 @@ import java.security.SecureRandom
import java.math.BigInteger
import scala.concurrent.{ Future, Promise }
import scala.util.{ Try, Success, Failure }
import sbt.internal.util.ErrorHandling
import sbt.internal.protocol.{ PortFile, TokenFile }
import sbt.util.Logger
import sbt.io.IO
import sbt.io.syntax._
import sjsonnew.support.scalajson.unsafe.{ Converter, CompactPrinter }
import sbt.internal.protocol.codec._
import sbt.internal.util.ErrorHandling
import sbt.internal.util.Util.isWindows
private[sbt] sealed trait ServerInstance {
def shutdown(): Unit
@ -38,31 +39,37 @@ private[sbt] object Server {
with TokenFileFormats
object JsonProtocol extends JsonProtocol
def start(host: String,
port: Int,
def start(connection: ServerConnection,
onIncomingSocket: (Socket, ServerInstance) => Unit,
auth: Set[ServerAuthentication],
portfile: File,
tokenfile: File,
log: Logger): ServerInstance =
new ServerInstance { self =>
import connection._
val running = new AtomicBoolean(false)
val p: Promise[Unit] = Promise[Unit]()
val ready: Future[Unit] = p.future
private[this] val rand = new SecureRandom
private[this] var token: String = nextToken
private[this] var serverSocketOpt: Option[ServerSocket] = None
val serverThread = new Thread("sbt-socket-server") {
override def run(): Unit = {
Try {
ErrorHandling.translate(s"server failed to start on $host:$port. ") {
new ServerSocket(port, 50, InetAddress.getByName(host))
ErrorHandling.translate(s"server failed to start on ${connection.shortName}. ") {
connection.connectionType match {
case ConnectionType.Local if isWindows =>
new NGWin32NamedPipeServerSocket(pipeName)
case ConnectionType.Local =>
prepareSocketfile()
new NGUnixDomainServerSocket(socketfile.getAbsolutePath)
case ConnectionType.Tcp => new ServerSocket(port, 50, InetAddress.getByName(host))
}
}
} match {
case Failure(e) => p.failure(e)
case Success(serverSocket) =>
serverSocket.setSoTimeout(5000)
log.info(s"sbt server started at $host:$port")
serverSocketOpt = Option(serverSocket)
log.info(s"sbt server started at ${connection.shortName}")
writePortfile()
running.set(true)
p.success(())
@ -74,6 +81,7 @@ private[sbt] object Server {
case _: SocketTimeoutException => // its ok
}
}
serverSocket.close()
}
}
}
@ -106,7 +114,7 @@ private[sbt] object Server {
private[this] def writeTokenfile(): Unit = {
import JsonProtocol._
val uri = s"tcp://$host:$port"
val uri = connection.shortName
val t = TokenFile(uri, token)
val jsonToken = Converter.toJson(t).get
@ -141,7 +149,7 @@ private[sbt] object Server {
private[this] def writePortfile(): Unit = {
import JsonProtocol._
val uri = s"tcp://$host:$port"
val uri = connection.shortName
val p =
auth match {
case _ if auth(ServerAuthentication.Token) =>
@ -153,5 +161,32 @@ private[sbt] object Server {
val json = Converter.toJson(p).get
IO.write(portfile, CompactPrinter(json))
}
private[sbt] def prepareSocketfile(): Unit = {
if (socketfile.exists) {
IO.delete(socketfile)
}
IO.createDirectory(socketfile.getParentFile)
}
}
}
private[sbt] case class ServerConnection(
connectionType: ConnectionType,
host: String,
port: Int,
auth: Set[ServerAuthentication],
portfile: File,
tokenfile: File,
socketfile: File,
pipeName: String
) {
def shortName: String = {
connectionType match {
case ConnectionType.Local if isWindows => s"local:$pipeName"
case ConnectionType.Local => s"local://$socketfile"
case ConnectionType.Tcp => s"tcp://$host:$port"
// case ConnectionType.Ssh => s"ssh://$host:$port"
}
}
}

View File

@ -21,9 +21,7 @@ import scala.util.control.NonFatal
*/
private[sbt] final class Previous(streams: Streams, referenced: IMap[ScopedTaskKey, Referenced]) {
private[this] val map = referenced.mapValues(toValue)
private[this] def toValue = new (Referenced ~> ReferencedValue) {
def apply[T](x: Referenced[T]) = new ReferencedValue(x)
}
private[this] def toValue = λ[Referenced ~> ReferencedValue](new ReferencedValue(_))
private[this] final class ReferencedValue[T](referenced: Referenced[T]) {
import referenced.{ stamped, task }

View File

@ -434,14 +434,10 @@ object Scoped {
protected def convert[M[_], Ret](f: Fun[M, Ret]): K[M] => Ret
private[this] val inputs: K[App] =
a.transform(
keys,
new (ScopedTaskable ~> App) { def apply[T](in: ScopedTaskable[T]): App[T] = in.toTask }
)
private[this] val inputs: K[App] = a.transform(keys, λ[ScopedTaskable ~> App](_.toTask))
private[this] def onTasks[T](f: K[Task] => Task[T]): App[T] =
Def.app[({ type l[L[x]] = K[(L Task)#l] })#l, Task[T]](inputs)(f)(AList.asplit[K, Task](a))
Def.app[λ[L[x] => K[(L Task)#l]], Task[T]](inputs)(f)(AList.asplit[K, Task](a))
def flatMap[T](f: Fun[Id, Task[T]]): App[T] = onTasks(_.flatMap(convert(f)))
def flatMapR[T](f: Fun[Result, Task[T]]): App[T] = onTasks(_.flatMapR(convert(f)))

View File

@ -26,7 +26,7 @@ import sbt.internal.librarymanagement.mavenint.{
PomExtraDependencyAttributes,
SbtPomExtraProperties
}
import sbt.internal.server.LanguageServerReporter
import sbt.internal.server.{ LanguageServerReporter, Definition }
import sbt.internal.testing.TestLogger
import sbt.internal.util._
import sbt.internal.util.Attributed.data
@ -272,7 +272,12 @@ object Defaults extends BuildCommon {
serverPort := 5000 + (Hash
.toHex(Hash(appConfiguration.value.baseDirectory.toString))
.## % 1000),
serverAuthentication := Set(ServerAuthentication.Token),
serverConnectionType := ConnectionType.Local,
serverAuthentication := {
if (serverConnectionType.value == ConnectionType.Tcp) Set(ServerAuthentication.Token)
else Set()
},
insideCI :== sys.env.contains("BUILD_NUMBER") || sys.env.contains("CI"),
))
def defaultTestTasks(key: Scoped): Seq[Setting[_]] =
@ -322,11 +327,13 @@ object Defaults extends BuildCommon {
excludeFilter in unmanagedSources).value,
watchSources in ConfigGlobal ++= {
val baseDir = baseDirectory.value
val bases = unmanagedSourceDirectories.value ++ (if (sourcesInBase.value) Seq(baseDir)
else Seq.empty)
val bases = unmanagedSourceDirectories.value
val include = (includeFilter in unmanagedSources).value
val exclude = (excludeFilter in unmanagedSources).value
bases.map(b => new Source(b, include, exclude))
val baseSources =
if (sourcesInBase.value) Seq(new Source(baseDir, include, exclude, recursive = false))
else Nil
bases.map(b => new Source(b, include, exclude)) ++ baseSources
},
managedSourceDirectories := Seq(sourceManaged.value),
managedSources := generate(sourceGenerators).value,
@ -493,6 +500,7 @@ object Defaults extends BuildCommon {
},
compileIncSetup := compileIncSetupTask.value,
console := consoleTask.value,
collectAnalyses := Definition.collectAnalysesTask.value,
consoleQuick := consoleQuickTask.value,
discoveredMainClasses := (compile map discoverMainClasses storeAs discoveredMainClasses xtriggeredBy compile).value,
discoveredSbtPlugins := discoverSbtPluginNames.value,
@ -771,21 +779,24 @@ object Defaults extends BuildCommon {
}
def intlStamp(c: String, analysis: Analysis, s: Set[String]): Long = {
if (s contains c) Long.MinValue
else {
val x = {
import analysis.{ relations => rel, apis }
rel.internalClassDeps(c).map(intlStamp(_, analysis, s + c)) ++
rel.externalDeps(c).map(stamp) +
(apis.internal.get(c) match {
case Some(x) => x.compilationTimestamp
case _ => Long.MinValue
})
}.max
if (x != Long.MinValue) {
stamps(c) = x
}
x
}
else
stamps.getOrElse(
c, {
val x = {
import analysis.{ relations => rel, apis }
rel.internalClassDeps(c).map(intlStamp(_, analysis, s + c)) ++
rel.externalDeps(c).map(stamp) +
(apis.internal.get(c) match {
case Some(x) => x.compilationTimestamp
case _ => Long.MinValue
})
}.max
if (x != Long.MinValue) {
stamps(c) = x
}
x
}
)
}
def noSuccessYet(test: String) = succeeded.get(test) match {
case None => true
@ -1749,12 +1760,10 @@ object Classpaths {
dependencyOverrides :== Vector.empty,
libraryDependencies :== Nil,
excludeDependencies :== Nil,
ivyLoggingLevel :== {
// This will suppress "Resolving..." logs on Jenkins and Travis.
if (sys.env.get("BUILD_NUMBER").isDefined || sys.env.get("CI").isDefined)
UpdateLogging.Quiet
else UpdateLogging.Default
},
ivyLoggingLevel := (// This will suppress "Resolving..." logs on Jenkins and Travis.
if (insideCI.value)
UpdateLogging.Quiet
else UpdateLogging.Default),
ivyXML :== NodeSeq.Empty,
ivyValidate :== false,
moduleConfigurations :== Nil,

View File

@ -133,6 +133,8 @@ object Keys {
val serverPort = SettingKey(BasicKeys.serverPort)
val serverHost = SettingKey(BasicKeys.serverHost)
val serverAuthentication = SettingKey(BasicKeys.serverAuthentication)
val serverConnectionType = SettingKey(BasicKeys.serverConnectionType)
val analysis = AttributeKey[CompileAnalysis]("analysis", "Analysis of compilation, including dependencies and generated outputs.", DSetting)
val watch = SettingKey(BasicKeys.watch)
val suppressSbtShellNotification = settingKey[Boolean]("""True to suppress the "Executing in batch mode.." message.""").withRank(CSetting)
@ -445,6 +447,10 @@ object Keys {
val skip = taskKey[Boolean]("For tasks that support it (currently only 'compile' and 'update'), setting skip to true will force the task to not to do its work. This exact semantics may vary by task.").withRank(BSetting)
val templateResolverInfos = settingKey[Seq[TemplateResolverInfo]]("Template resolvers used for 'new'.").withRank(BSetting)
val interactionService = taskKey[InteractionService]("Service used to ask for user input through the current user interface(s).").withRank(CTask)
val insideCI = SettingKey[Boolean]("insideCI", "Determines if the SBT is running in a Continuous Integration environment", AMinusSetting)
// sbt server internal
val collectAnalyses = taskKey[Unit]("Collect analysis file locations for later use.")
// special
val sessionVars = AttributeKey[SessionVar.Map]("sessionVars", "Bindings that exist for the duration of the session.", Invisible)

View File

@ -56,7 +56,8 @@ import StandardMain._
import java.io.{ File, IOException }
import java.net.URI
import java.util.Locale
import java.util.{ Locale, Properties }
import scala.util.control.NonFatal
import BasicCommandStrings.{ Shell, TemplateCommand }
import CommandStrings.BootCommand
@ -100,6 +101,9 @@ final class ConsoleMain extends xsbti.AppMain {
object StandardMain {
private[sbt] lazy val exchange = new CommandExchange()
import scalacache._
import scalacache.caffeine._
private[sbt] val cache: Cache[Any] = CaffeineCache[Any]
def runManaged(s: State): xsbti.MainResult = {
val previous = TrapExit.installManager()
@ -131,6 +135,9 @@ object StandardMain {
Exec(x, None)
}
val initAttrs = BuiltinCommands.initialAttributes
import scalacache.modes.scalaFuture._
import scala.concurrent.ExecutionContext.Implicits.global
cache.removeAll()
val s = State(
configuration,
initialDefinitions,
@ -156,7 +163,7 @@ import TemplateCommandUtil.templateCommand
object BuiltinCommands {
def initialAttributes = AttributeMap.empty
import BasicCommands.exit
def ConsoleCommands: Seq[Command] =
Seq(ignore, exit, IvyConsole.command, setLogLevel, early, act, nop)
@ -672,7 +679,26 @@ object BuiltinCommands {
def loadProjectImpl: Command =
Command(LoadProjectImpl)(_ => Project.loadActionParser)(doLoadProject)
def checkSBTVersionChanged(state: State): Unit = {
import sbt.io.syntax._
val app = state.configuration.provider
val buildProps = state.baseDir / "project" / "build.properties"
// First try reading the sbt version from build.properties file.
val sbtVersionOpt = if (buildProps.exists) {
val buildProperties = new Properties()
IO.load(buildProperties, buildProps)
Option(buildProperties.getProperty("sbt.version"))
} else None
sbtVersionOpt.foreach(version =>
if (version != app.id.version()) {
state.log.warn(s"""sbt version mismatch, current: ${app.id
.version()}, in build.properties: "$version", use 'reboot' to use the new value.""")
})
}
def doLoadProject(s0: State, action: LoadAction.Value): State = {
checkSBTVersionChanged(s0)
val (s1, base) = Project.loadAction(SessionVar.clear(s0), action)
IO.createDirectory(base)
val s = if (s1 has Keys.stateCompilerCache) s1 else registerCompilerCache(s1)

View File

@ -7,11 +7,12 @@
package sbt
import java.util.Properties
import scala.annotation.tailrec
import scala.util.control.NonFatal
import jline.TerminalFactory
import sbt.io.Using
import sbt.io.{ IO, Using }
import sbt.internal.util.{ ErrorHandling, GlobalLogBacking }
import sbt.internal.util.complete.DefaultParsers
import sbt.util.Logger
@ -58,6 +59,10 @@ object MainLoop {
case e: xsbti.FullReload =>
deleteLastLog(logBacking)
throw e // pass along a reboot request
case e: RebootCurrent =>
deleteLastLog(logBacking)
deleteCurrentArtifacts(state)
throw new xsbti.FullReload(e.arguments.toArray, false)
case NonFatal(e) =>
System.err.println(
"sbt appears to be exiting abnormally.\n The log file for this session is at " + logBacking.file)
@ -69,6 +74,28 @@ object MainLoop {
def deleteLastLog(logBacking: GlobalLogBacking): Unit =
logBacking.last.foreach(_.delete())
/** Deletes the current sbt artifacts from boot. */
private[sbt] def deleteCurrentArtifacts(state: State): Unit = {
import sbt.io.syntax._
val provider = state.configuration.provider
val appId = provider.id
// If we can obtain boot directory more accurately it'd be better.
val defaultBoot = BuildPaths.defaultGlobalBase / "boot"
val buildProps = state.baseDir / "project" / "build.properties"
// First try reading the sbt version from build.properties file.
val sbtVersionOpt = if (buildProps.exists) {
val buildProperties = new Properties()
IO.load(buildProperties, buildProps)
Option(buildProperties.getProperty("sbt.version"))
} else None
val sbtVersion = sbtVersionOpt.getOrElse(appId.version)
val currentArtDirs = defaultBoot * "*" / appId.groupID / appId.name / sbtVersion
currentArtDirs.get foreach { dir =>
state.log.info(s"Deleting $dir")
IO.delete(dir)
}
}
/** Runs the next sequence of commands with global logging in place. */
def runWithNewLog(state: State, logBacking: GlobalLogBacking): RunNext =
Using.fileWriter(append = true)(logBacking.file) { writer =>
@ -109,6 +136,7 @@ object MainLoop {
ErrorHandling.wideConvert { state.process(processCommand) } match {
case Right(s) => s
case Left(t: xsbti.FullReload) => throw t
case Left(t: RebootCurrent) => throw t
case Left(t) => state.handleError(t)
}

View File

@ -23,6 +23,7 @@ import Keys.{
serverHost,
serverPort,
serverAuthentication,
serverConnectionType,
watch
}
import Scope.{ Global, ThisScope }
@ -461,6 +462,7 @@ object Project extends ProjectExtra {
val host: Option[String] = get(serverHost)
val port: Option[Int] = get(serverPort)
val authentication: Option[Set[ServerAuthentication]] = get(serverAuthentication)
val connectionType: Option[ConnectionType] = get(serverConnectionType)
val commandDefs = allCommands.distinct.flatten[Command].map(_ tag (projectCommand, true))
val newDefinedCommands = commandDefs ++ BasicCommands.removeTagged(s.definedCommands,
projectCommand)
@ -471,6 +473,7 @@ object Project extends ProjectExtra {
.setCond(serverPort.key, port)
.setCond(serverHost.key, host)
.setCond(serverAuthentication.key, authentication)
.setCond(serverConnectionType.key, connectionType)
.put(historyPath.key, history)
.put(templateResolverInfos.key, trs)
.setCond(shellPrompt.key, prompt)
@ -515,10 +518,7 @@ object Project extends ProjectExtra {
def fillTaskAxis(scoped: ScopedKey[_]): ScopedKey[_] =
ScopedKey(Scope.fillTaskAxis(scoped.scope, scoped.key), scoped.key)
def mapScope(f: Scope => Scope) = new (ScopedKey ~> ScopedKey) {
def apply[T](key: ScopedKey[T]) =
ScopedKey(f(key.scope), key.key)
}
def mapScope(f: Scope => Scope) = λ[ScopedKey ~> ScopedKey](k => ScopedKey(f(k.scope), k.key))
def transform(g: Scope => Scope, ss: Seq[Def.Setting[_]]): Seq[Def.Setting[_]] = {
val f = mapScope(g)

View File

@ -8,12 +8,12 @@
package sbt
package internal
import java.net.SocketException
import java.io.IOException
import java.util.concurrent.ConcurrentLinkedQueue
import java.util.concurrent.atomic.AtomicInteger
import scala.collection.mutable.ListBuffer
import scala.annotation.tailrec
import BasicKeys.{ serverHost, serverPort, serverAuthentication }
import BasicKeys.{ serverHost, serverPort, serverAuthentication, serverConnectionType }
import java.net.Socket
import sjsonnew.JsonFormat
import sjsonnew.shaded.scalajson.ast.unsafe._
@ -23,9 +23,10 @@ import scala.util.{ Success, Failure }
import sbt.io.syntax._
import sbt.io.Hash
import sbt.internal.server._
import sbt.internal.util.{ StringEvent, ObjectEvent, ConsoleOut, MainAppender }
import sbt.internal.langserver.{ LogMessageParams, MessageType }
import sbt.internal.util.{ StringEvent, ObjectEvent, MainAppender }
import sbt.internal.util.codec.JValueFormats
import sbt.protocol.{ EventMessage, Serialization, ChannelAcceptedEvent }
import sbt.protocol.{ EventMessage, ExecStatusEvent }
import sbt.util.{ Level, Logger, LogExchange }
/**
@ -72,7 +73,7 @@ private[sbt] final class CommandExchange {
def run(s: State): State = {
consoleChannel match {
case Some(x) => // do nothing
case Some(_) => // do nothing
case _ =>
val x = new ConsoleChannel("console0")
consoleChannel = Some(x)
@ -83,6 +84,7 @@ private[sbt] final class CommandExchange {
}
private def newChannelName: String = s"channel-${nextChannelId.incrementAndGet()}"
private def newNetworkName: String = s"network-${nextChannelId.incrementAndGet()}"
/**
* Check if a server instance is running already, and start one if it isn't.
@ -100,28 +102,43 @@ private[sbt] final class CommandExchange {
case Some(xs) => xs
case None => Set(ServerAuthentication.Token)
}
lazy val connectionType = (s get serverConnectionType) match {
case Some(x) => x
case None => ConnectionType.Tcp
}
val serverLogLevel: Level.Value = Level.Debug
def onIncomingSocket(socket: Socket, instance: ServerInstance): Unit = {
s.log.info(s"new client connected from: ${socket.getPort}")
val name = newNetworkName
s.log.info(s"new client connected: $name")
val logger: Logger = {
val loggerName = s"network-${socket.getPort}"
val log = LogExchange.logger(loggerName, None, None)
LogExchange.unbindLoggerAppenders(loggerName)
val log = LogExchange.logger(name, None, None)
LogExchange.unbindLoggerAppenders(name)
val appender = MainAppender.defaultScreen(s.globalLogging.console)
LogExchange.bindLoggerAppenders(loggerName, List(appender -> serverLogLevel))
LogExchange.bindLoggerAppenders(name, List(appender -> serverLogLevel))
log
}
val channel =
new NetworkChannel(newChannelName, socket, Project structure s, auth, instance, logger)
new NetworkChannel(name, socket, Project structure s, auth, instance, logger)
subscribe(channel)
}
server match {
case Some(x) => // do nothing
case Some(_) => // do nothing
case _ =>
val portfile = (new File(".")).getAbsoluteFile / "project" / "target" / "active.json"
val h = Hash.halfHashString(portfile.toURI.toString)
val tokenfile = BuildPaths.getGlobalBase(s) / "server" / h / "token.json"
val x = Server.start(host, port, onIncomingSocket, auth, portfile, tokenfile, s.log)
val socketfile = BuildPaths.getGlobalBase(s) / "server" / h / "sock"
val pipeName = "sbt-server-" + h
val connection =
ServerConnection(connectionType,
host,
port,
auth,
portfile,
tokenfile,
socketfile,
pipeName)
val x = Server.start(connection, onIncomingSocket, s.log)
Await.ready(x.ready, Duration("10s"))
x.ready.value match {
case Some(Success(_)) =>
@ -147,13 +164,13 @@ private[sbt] final class CommandExchange {
private[sbt] def notifyEvent[A: JsonFormat](method: String, params: A): Unit = {
val toDel: ListBuffer[CommandChannel] = ListBuffer.empty
channels.foreach {
case c: ConsoleChannel =>
case _: ConsoleChannel =>
// c.publishEvent(event)
case c: NetworkChannel =>
try {
c.notifyEvent(method, params)
} catch {
case e: SocketException =>
case _: IOException =>
toDel += c
}
}
@ -167,33 +184,48 @@ private[sbt] final class CommandExchange {
}
def publishEvent[A: JsonFormat](event: A): Unit = {
val broadcastStringMessage = true
val toDel: ListBuffer[CommandChannel] = ListBuffer.empty
event match {
case entry: StringEvent =>
channels.foreach {
val params = toLogMessageParams(entry)
channels collect {
case c: ConsoleChannel =>
if (entry.channelName.isEmpty || entry.channelName == Some(c.name)) {
if (broadcastStringMessage) {
c.publishEvent(event)
} else {
if (entry.channelName.isEmpty || entry.channelName == Some(c.name)) {
c.publishEvent(event)
}
}
case c: NetworkChannel =>
try {
if (entry.channelName == Some(c.name)) {
c.publishEvent(event)
// Note that language server's LogMessageParams does not hold the execid,
// so this is weaker than the StringMessage. We might want to double-send
// in case we have a better client that can utilize the knowledge.
import sbt.internal.langserver.codec.JsonProtocol._
if (broadcastStringMessage) {
c.langNotify("window/logMessage", params)
} else {
if (entry.channelName == Some(c.name)) {
c.langNotify("window/logMessage", params)
}
}
} catch {
case e: SocketException =>
case _: IOException =>
toDel += c
}
}
case _ =>
channels.foreach {
channels collect {
case c: ConsoleChannel =>
c.publishEvent(event)
case c: NetworkChannel =>
try {
c.publishEvent(event)
} catch {
case e: SocketException =>
case _: IOException =>
toDel += c
}
}
@ -207,6 +239,10 @@ private[sbt] final class CommandExchange {
}
}
private[sbt] def toLogMessageParams(event: StringEvent): LogMessageParams = {
LogMessageParams(MessageType.fromLevelString(event.level), event.message)
}
/**
* This publishes object events. The type information has been
* erased because it went through logging.
@ -224,14 +260,14 @@ private[sbt] final class CommandExchange {
JField("execId", JString(execId))
})): _*
)
channels.foreach {
channels collect {
case c: ConsoleChannel =>
c.publishEvent(json)
case c: NetworkChannel =>
try {
c.publishObjectEvent(event)
} catch {
case e: SocketException =>
case _: IOException =>
toDel += c
}
}
@ -249,23 +285,39 @@ private[sbt] final class CommandExchange {
val toDel: ListBuffer[CommandChannel] = ListBuffer.empty
event match {
// Special treatment for ConsolePromptEvent since it's hand coded without codec.
case e: ConsolePromptEvent =>
case entry: ConsolePromptEvent =>
channels collect {
case c: ConsoleChannel => c.publishEventMessage(e)
case c: ConsoleChannel => c.publishEventMessage(entry)
}
case e: ConsoleUnpromptEvent =>
case entry: ConsoleUnpromptEvent =>
channels collect {
case c: ConsoleChannel => c.publishEventMessage(e)
case c: ConsoleChannel => c.publishEventMessage(entry)
}
case entry: ExecStatusEvent =>
channels collect {
case c: ConsoleChannel =>
if (entry.channelName.isEmpty || entry.channelName == Some(c.name)) {
c.publishEventMessage(event)
}
case c: NetworkChannel =>
try {
if (entry.channelName == Some(c.name)) {
c.publishEventMessage(event)
}
} catch {
case e: IOException =>
toDel += c
}
}
case _ =>
channels.foreach {
channels collect {
case c: ConsoleChannel =>
c.publishEventMessage(event)
case c: NetworkChannel =>
try {
c.publishEventMessage(event)
} catch {
case e: SocketException =>
case _: IOException =>
toDel += c
}
}

View File

@ -56,7 +56,7 @@ object IvyConsole {
depSettings)
val newStructure = Load.reapply(session.original ++ append, structure)
val newState = state.copy(remainingCommands = Exec("console-quick", None) :: Nil)
val newState = state.copy(remainingCommands = Exec(Keys.consoleQuick.key.label, None) :: Nil)
Project.setProject(session, newStructure, newState)
}

View File

@ -291,12 +291,12 @@ private[sbt] object Load {
// 3. resolvedScoped is replaced with the defining key as a value
// Note: this must be idempotent.
def finalTransforms(ss: Seq[Setting[_]]): Seq[Setting[_]] = {
def mapSpecial(to: ScopedKey[_]) = new (ScopedKey ~> ScopedKey) {
def apply[T](key: ScopedKey[T]) =
def mapSpecial(to: ScopedKey[_]) = λ[ScopedKey ~> ScopedKey](
(key: ScopedKey[_]) =>
if (key.key == streams.key)
ScopedKey(Scope.fillTaskAxis(Scope.replaceThis(to.scope)(key.scope), to.key), key.key)
else key
}
)
def setDefining[T] =
(key: ScopedKey[T], value: T) =>
value match {
@ -304,13 +304,13 @@ private[sbt] object Load {
case ik: InputTask[t] => ik.mapTask(tk => setDefinitionKey(tk, key)).asInstanceOf[T]
case _ => value
}
def setResolved(defining: ScopedKey[_]) = new (ScopedKey ~> Option) {
def apply[T](key: ScopedKey[T]): Option[T] =
def setResolved(defining: ScopedKey[_]) = λ[ScopedKey ~> Option](
(key: ScopedKey[_]) =>
key.key match {
case resolvedScoped.key => Some(defining.asInstanceOf[T])
case resolvedScoped.key => Some(defining.asInstanceOf[A1$])
case _ => None
}
}
}
)
ss.map(s =>
s mapConstant setResolved(s.key) mapReferenced mapSpecial(s.key) mapInit setDefining)
}

View File

@ -109,6 +109,14 @@ object LogManager {
}
}
// to change from global being the default to overriding, switch the order of state.get and data.get
def getOr[T](key: AttributeKey[T],
data: Settings[Scope],
scope: Scope,
state: State,
default: T): T =
data.get(scope, key) orElse state.get(key) getOrElse default
// This is the main function that is used to generate the logger for tasks.
def defaultLogger(
data: Settings[Scope],
@ -125,13 +133,10 @@ object LogManager {
val execId: Option[String] = execOpt flatMap { _.execId }
val log = LogExchange.logger(loggerName, channelName, execId)
val scope = task.scope
// to change from global being the default to overriding, switch the order of state.get and data.get
def getOr[T](key: AttributeKey[T], default: T): T =
data.get(scope, key) orElse state.get(key) getOrElse default
val screenLevel = getOr(logLevel.key, Level.Info)
val backingLevel = getOr(persistLogLevel.key, Level.Debug)
val screenTrace = getOr(traceLevel.key, defaultTraceLevel(state))
val backingTrace = getOr(persistTraceLevel.key, Int.MaxValue)
val screenLevel = getOr(logLevel.key, data, scope, state, Level.Info)
val backingLevel = getOr(persistLogLevel.key, data, scope, state, Level.Debug)
val screenTrace = getOr(traceLevel.key, data, scope, state, defaultTraceLevel(state))
val backingTrace = getOr(persistTraceLevel.key, data, scope, state, Int.MaxValue)
val extraBacked = state.globalLogging.backed :: relay :: Nil
val consoleOpt = consoleLocally(state, console)
val config = MainAppender.MainAppenderConfig(
@ -188,6 +193,9 @@ object LogManager {
relay: Appender,
extra: List[Appender]
): ManagedLogger = {
val scope = task.scope
val screenLevel = getOr(logLevel.key, data, scope, state, Level.Info)
val backingLevel = getOr(persistLogLevel.key, data, scope, state, Level.Debug)
val execOpt = state.currentCommand
val loggerName: String = s"bg-${task.key.label}-${generateId.incrementAndGet}"
val channelName: Option[String] = execOpt flatMap (_.source map (_.channelName))
@ -197,7 +205,7 @@ object LogManager {
val consoleOpt = consoleLocally(state, console)
LogExchange.bindLoggerAppenders(
loggerName,
(consoleOpt.toList map { _ -> Level.Debug }) ::: (relay -> Level.Debug) :: Nil)
(consoleOpt.toList map { _ -> screenLevel }) ::: (relay -> backingLevel) :: Nil)
log
}

View File

@ -116,7 +116,9 @@ private[sbt] object SbtParser {
scalacGlobalInitReporter = Some(new ConsoleReporter(settings))
// Mix Positions, otherwise global ignores -Yrangepos
val global = new Global(settings, globalReporter) with Positions
val global = new Global(settings, globalReporter) with Positions {
override protected def synchronizeNames = true // https://github.com/scala/bug/issues/10605
}
val run = new global.Run
// Add required dummy unit for initialization...
val initFile = new BatchSourceFile("<wrapper-init>", "")

View File

@ -0,0 +1,329 @@
/*
* sbt
* Copyright 2011 - 2017, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under BSD-3-Clause license (see LICENSE)
*/
package sbt
package internal
package server
import sbt.internal.inc.MixedAnalyzingCompiler
import sbt.internal.langserver.ErrorCodes
import sbt.util.Logger
import scala.annotation.tailrec
import scala.concurrent.{ ExecutionContext, Future }
import scala.concurrent.duration.Duration.Inf
import scala.util.matching.Regex.MatchIterator
import java.nio.file.{ Files, Paths }
import sbt.StandardMain
private[sbt] object Definition {
import java.net.URI
import Keys._
import sbt.internal.inc.Analysis
import sbt.internal.inc.JavaInterfaceUtil._
val AnalysesKey = "lsp.definition.analyses.key"
import sjsonnew.JsonFormat
def send[A: JsonFormat](source: CommandSource, execId: String)(params: A): Unit = {
for {
channel <- StandardMain.exchange.channels.collectFirst {
case c if c.name == source.channelName => c
}
} yield {
channel.publishEvent(params, Option(execId))
}
}
object textProcessor {
private val isIdentifier = {
import scala.tools.reflect.{ ToolBox, ToolBoxError }
lazy val tb =
scala.reflect.runtime.universe
.runtimeMirror(this.getClass.getClassLoader)
.mkToolBox()
import tb._
lazy val check = parse _ andThen compile _
(identifier: String) =>
try {
check(s"val $identifier = 0; val ${identifier}${identifier} = $identifier")
true
} catch {
case _: ToolBoxError => false
}
}
private def findInBackticks(line: String, point: Int): Option[String] = {
val (even, odd) = line.zipWithIndex
.collect {
case (char, backtickIndex) if char == '`' =>
backtickIndex
}
.zipWithIndex
.partition { bs =>
val (_, index) = bs
index % 2 == 0
}
even
.collect {
case (backtickIndex, _) => backtickIndex
}
.zip {
odd.collect {
case (backtickIndex, _) => backtickIndex + 1
}
}
.collectFirst {
case (from, to) if from <= point && point < to => line.slice(from, to)
}
}
def identifier(line: String, point: Int): Option[String] = findInBackticks(line, point).orElse {
val whiteSpaceReg = "(\\s|\\.)+".r
val (zero, end) = fold(Seq.empty)(whiteSpaceReg.findAllIn(line))
.collect {
case (white, ind) => (ind, ind + white.length)
}
.fold((0, line.length)) { (z, e) =>
val (from, to) = e
val (left, right) = z
(if (to > left && to <= point) to else left,
if (from < right && from >= point) from else right)
}
val ranges = for {
from <- zero to point
to <- point to end
} yield (from -> to)
ranges
.sortBy {
case (from, to) => -(to - from)
}
.foldLeft(Seq.empty[String]) { (z, e) =>
val (from, to) = e
val fragment = line.slice(from, to).trim
z match {
case Nil if fragment.nonEmpty && isIdentifier(fragment) => fragment +: z
case h +: _ if h.length < fragment.length && isIdentifier(fragment) =>
Seq(fragment)
case h +: _ if h.length == fragment.length && isIdentifier(fragment) =>
fragment +: z
case z => z
}
}
.headOption
}
private def asClassObjectIdentifier(sym: String) =
Seq(s".$sym", s".$sym$$", s"$$$sym", s"$$$sym$$")
def potentialClsOrTraitOrObj(sym: String): PartialFunction[String, String] = {
import scala.reflect.NameTransformer
val encodedSym = NameTransformer.encode(sym.toSeq match {
case '`' +: body :+ '`' => body.mkString
case noBackticked => noBackticked.mkString
})
val action: PartialFunction[String, String] = {
case potentialClassOrTraitOrObject
if asClassObjectIdentifier(encodedSym).exists(potentialClassOrTraitOrObject.endsWith) ||
encodedSym == potentialClassOrTraitOrObject ||
s"$encodedSym$$" == potentialClassOrTraitOrObject =>
potentialClassOrTraitOrObject
}
action
}
@tailrec
private def fold(z: Seq[(String, Int)])(it: MatchIterator): Seq[(String, Int)] = {
if (!it.hasNext) z
else fold(z :+ (it.next() -> it.start))(it)
}
def classTraitObjectInLine(sym: String)(line: String): Seq[(String, Int)] = {
import scala.util.matching.Regex.quote
val potentials =
Seq(s"object\\s+${quote(sym)}".r,
s"trait\\s+${quote(sym)} *\\[?".r,
s"class\\s+${quote(sym)} *\\[?".r)
potentials
.flatMap { reg =>
fold(Seq.empty)(reg.findAllIn(line))
}
.collect {
case (name, pos) =>
(if (name.endsWith("[")) name.init.trim else name.trim) -> pos
}
}
import java.io.File
def markPosition(file: File, sym: String): Seq[(File, Long, Long, Long)] = {
import java.nio.file._
import scala.collection.JavaConverters._
val findInLine = classTraitObjectInLine(sym)(_)
Files
.lines(file.toPath)
.iterator
.asScala
.zipWithIndex
.flatMap {
case (line, lineNumber) =>
findInLine(line)
.collect {
case (sym, from) =>
(file, lineNumber.toLong, from.toLong, from.toLong + sym.length)
}
}
.toSeq
.distinct
}
}
import sbt.internal.langserver.TextDocumentPositionParams
import sjsonnew.shaded.scalajson.ast.unsafe.JValue
private def getDefinition(jsonDefinition: JValue): Option[TextDocumentPositionParams] = {
import sbt.internal.langserver.codec.JsonProtocol._
import sjsonnew.support.scalajson.unsafe.Converter
Converter.fromJson[TextDocumentPositionParams](jsonDefinition).toOption
}
import java.io.File
private def storeAnalysis(cacheFile: File, useBinary: Boolean): Option[Analysis] =
MixedAnalyzingCompiler
.staticCachedStore(cacheFile, !useBinary)
.get
.toOption
.collect {
case contents =>
contents.getAnalysis
}
.collect {
case a: Analysis => a
}
import scalacache._
private[sbt] def updateCache[F[_]](cache: Cache[Any])(cacheFile: String, useBinary: Boolean)(
implicit
mode: Mode[F],
flags: Flags): F[Any] = {
mode.M.flatMap(cache.get(AnalysesKey)) {
case None =>
cache.put(AnalysesKey)(Set(cacheFile -> useBinary -> None), Option(Inf))
case Some(set) =>
cache.put(AnalysesKey)(
set.asInstanceOf[Set[((String, Boolean), Option[Analysis])]].filterNot {
case ((file, _), _) => file == cacheFile
} + (cacheFile -> useBinary -> None),
Option(Inf))
case _ => mode.M.pure(())
}
}
def collectAnalysesTask = Def.task {
val cacheFile = compileIncSetup.value.cacheFile.getAbsolutePath
val useBinary = enableBinaryCompileAnalysis.value
val s = state.value
s.log.debug(s"analysis location ${(cacheFile -> useBinary)}")
import scalacache.modes.sync._
updateCache(StandardMain.cache)(cacheFile, useBinary)
}
private[sbt] def getAnalyses(log: Logger): Future[Seq[Analysis]] = {
import scalacache.modes.scalaFuture._
import scala.concurrent.ExecutionContext.Implicits.global
StandardMain.cache
.get(AnalysesKey)
.collect {
case Some(a) => a.asInstanceOf[Set[((String, Boolean), Option[Analysis])]]
}
.map { caches =>
val (working, uninitialized) = caches.partition { cacheAnalysis =>
cacheAnalysis._2 != None
}
val addToCache = uninitialized.collect {
case (title @ (file, useBinary), _) if Files.exists(Paths.get(file)) =>
(title, storeAnalysis(Paths.get(file).toFile, !useBinary))
}
val validCaches = working ++ addToCache
if (addToCache.nonEmpty)
StandardMain.cache.put(AnalysesKey)(validCaches, Option(Inf))
validCaches.toSeq.collect {
case (_, Some(analysis)) =>
analysis
}
}
}
def lspDefinition(jsonDefinition: JValue,
requestId: String,
commandSource: CommandSource,
log: Logger)(implicit ec: ExecutionContext): Future[Unit] = Future {
val LspDefinitionLogHead = "lsp-definition"
import sjsonnew.support.scalajson.unsafe.CompactPrinter
log.debug(s"$LspDefinitionLogHead json request: ${CompactPrinter(jsonDefinition)}")
lazy val analyses = getAnalyses(log)
val definition = getDefinition(jsonDefinition)
definition
.flatMap { definition =>
val uri = URI.create(definition.textDocument.uri)
import java.nio.file._
Files
.lines(Paths.get(uri))
.skip(definition.position.line)
.findFirst
.toOption
.flatMap { line =>
log.debug(s"$LspDefinitionLogHead found line: $line")
textProcessor
.identifier(line, definition.position.character.toInt)
}
}
.map { sym =>
log.debug(s"symbol $sym")
analyses
.map { analyses =>
val locations = analyses.par.flatMap { analysis =>
val selectPotentials = textProcessor.potentialClsOrTraitOrObj(sym)
val classes =
(analysis.apis.allInternalClasses ++ analysis.apis.allExternals).collect {
selectPotentials
}
log.debug(s"$LspDefinitionLogHead potentials: $classes")
classes
.flatMap { className =>
analysis.relations.definesClass(className) ++ analysis.relations
.libraryDefinesClass(className)
}
.flatMap { classFile =>
textProcessor.markPosition(classFile, sym).collect {
case (file, line, from, to) =>
import sbt.internal.langserver.{ Location, Position, Range }
Location(file.toURI.toURL.toString,
Range(Position(line, from), Position(line, to)))
}
}
}.seq
log.debug(s"$LspDefinitionLogHead locations ${locations}")
import sbt.internal.langserver.codec.JsonProtocol._
send(commandSource, requestId)(locations.toArray)
}
.recover {
case anyException @ _ =>
log.warn(
s"Problem with processing analyses $anyException for ${CompactPrinter(jsonDefinition)}")
import sbt.internal.protocol.JsonRpcResponseError
import sbt.internal.protocol.codec.JsonRPCProtocol._
send(commandSource, requestId)(
JsonRpcResponseError(ErrorCodes.InternalError,
"Problem with processing analyses.",
None))
}
}
.orElse {
log.info(s"Symbol not found in definition request ${CompactPrinter(jsonDefinition)}")
import sbt.internal.langserver.Location
import sbt.internal.langserver.codec.JsonProtocol._
send(commandSource, requestId)(Array.empty[Location])
None
}
}
}

View File

@ -34,11 +34,18 @@ private[sbt] trait LanguageServerProtocol extends CommandChannel {
protected def log: Logger
protected def onSettingQuery(execId: Option[String], req: Q): Unit
protected def onRequestMessage(request: JsonRpcRequestMessage): Unit = {
protected def onNotification(notification: JsonRpcNotificationMessage): Unit = {
log.debug(s"onNotification: $notification")
notification.method match {
case "textDocument/didSave" =>
append(Exec(";compile; collectAnalyses", None, Some(CommandSource(name))))
case u => log.debug(s"Unhandled notification received: $u")
}
}
protected def onRequestMessage(request: JsonRpcRequestMessage): Unit = {
import sbt.internal.langserver.codec.JsonProtocol._
import internalJsonProtocol._
def json =
request.params.getOrElse(
throw LangServerError(ErrorCodes.InvalidParams,
@ -57,9 +64,11 @@ private[sbt] trait LanguageServerProtocol extends CommandChannel {
else throw LangServerError(ErrorCodes.InvalidRequest, "invalid token")
} else ()
setInitialized(true)
append(Exec(s"collectAnalyses", Some(request.id), Some(CommandSource(name))))
langRespond(InitializeResult(serverCapabilities), Option(request.id))
case "textDocument/didSave" =>
append(Exec("compile", Some(request.id), Some(CommandSource(name))))
case "textDocument/definition" =>
import scala.concurrent.ExecutionContext.Implicits.global
Definition.lspDefinition(json, request.id, CommandSource(name), log)
case "sbt/exec" =>
val param = Converter.fromJson[SbtExecParams](json).get
append(Exec(param.commandLine, Some(request.id), Some(CommandSource(name))))
@ -68,7 +77,7 @@ private[sbt] trait LanguageServerProtocol extends CommandChannel {
val param = Converter.fromJson[Q](json).get
onSettingQuery(Option(request.id), param)
}
case _ => ()
case unhandledRequest => log.debug(s"Unhandled request received: $unhandledRequest")
}
}
@ -138,9 +147,18 @@ private[sbt] trait LanguageServerProtocol extends CommandChannel {
publishBytes(bytes)
}
def logMessage(level: String, message: String): Unit = {
import sbt.internal.langserver.codec.JsonProtocol._
langNotify(
"window/logMessage",
LogMessageParams(MessageType.fromLevelString(level), message)
)
}
private[sbt] lazy val serverCapabilities: ServerCapabilities = {
ServerCapabilities(textDocumentSync =
TextDocumentSyncOptions(true, 0, false, false, SaveOptions(false)),
hoverProvider = false)
hoverProvider = false,
definitionProvider = true)
}
}

View File

@ -16,8 +16,9 @@ import sjsonnew._
import scala.annotation.tailrec
import sbt.protocol._
import sbt.internal.langserver.ErrorCodes
import sbt.internal.util.ObjectEvent
import sbt.internal.util.{ ObjectEvent, StringEvent }
import sbt.internal.util.codec.JValueFormats
import sbt.internal.protocol.{ JsonRpcRequestMessage, JsonRpcNotificationMessage }
import sbt.util.Logger
final class NetworkChannel(val name: String,
@ -166,8 +167,8 @@ final class NetworkChannel(val name: String,
def handleBody(chunk: Vector[Byte]): Unit = {
if (isLanguageServerProtocol) {
Serialization.deserializeJsonRequest(chunk) match {
case Right(req) =>
Serialization.deserializeJsonMessage(chunk) match {
case Right(req: JsonRpcRequestMessage) =>
try {
onRequestMessage(req)
} catch {
@ -175,6 +176,16 @@ final class NetworkChannel(val name: String,
log.debug(s"sending error: $code: $message")
langError(Option(req.id), code, message)
}
case Right(ntf: JsonRpcNotificationMessage) =>
try {
onNotification(ntf)
} catch {
case LangServerError(code, message) =>
log.debug(s"sending error: $code: $message")
langError(None, code, message) // new id?
}
case Right(msg) =>
log.debug(s"Unhandled message: $msg")
case Left(errorDesc) =>
val msg = s"Got invalid chunk from client (${new String(chunk.toArray, "UTF-8")}): " + errorDesc
langError(None, ErrorCodes.ParseError, msg)
@ -227,7 +238,10 @@ final class NetworkChannel(val name: String,
def publishEvent[A: JsonFormat](event: A, execId: Option[String]): Unit = {
if (isLanguageServerProtocol) {
langRespond(event, execId)
event match {
case entry: StringEvent => logMessage(entry.level, entry.message)
case _ => langRespond(event, execId)
}
} else {
contentType match {
case SbtX1Protocol =>
@ -241,11 +255,19 @@ final class NetworkChannel(val name: String,
def publishEvent[A: JsonFormat](event: A): Unit = publishEvent(event, None)
def publishEventMessage(event: EventMessage): Unit = {
contentType match {
case SbtX1Protocol =>
val bytes = Serialization.serializeEventMessage(event)
publishBytes(bytes, true)
case _ =>
if (isLanguageServerProtocol) {
event match {
case entry: LogEvent => logMessage(entry.level, entry.message)
case entry: ExecStatusEvent => logMessage("debug", entry.status)
case _ => ()
}
} else {
contentType match {
case SbtX1Protocol =>
val bytes = Serialization.serializeEventMessage(event)
publishBytes(bytes, true)
case _ => ()
}
}
}

View File

@ -20,7 +20,7 @@ trait SplitExpression {
trait SplitExpressionsBehavior extends SplitExpression { this: SpecificationLike =>
def newExpressionsSplitter(implicit splitter: SplitExpressions.SplitExpression): Unit = {
def newExpressionsSplitter(implicit splitter: SplitExpressions.SplitExpression) = {
"parse a two settings without intervening blank line" in {
val (imports, settings) = split("""version := "1.0"

View File

@ -0,0 +1,177 @@
/*
* sbt
* Copyright 2011 - 2017, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under BSD-3-Clause license (see LICENSE)
*/
package sbt
package internal
package server
import sbt.internal.inc.Analysis
class DefinitionTest extends org.specs2.mutable.Specification {
import Definition.textProcessor
"text processor" should {
"find valid standard scala identifier when caret is set at the start of it" in {
textProcessor.identifier("val identifier = 0", 4) must beSome("identifier")
}
"not find valid standard scala identifier because it is '='" in {
textProcessor.identifier("val identifier = 0", 15) must beNone
}
"find valid standard scala identifier when caret is set in the middle of it" in {
textProcessor.identifier("val identifier = 0", 11) must beSome("identifier")
}
"find valid standard scala identifier with comma" in {
textProcessor.identifier("def foo(a: identifier, b: other) = ???", 13) must beSome(
"identifier")
}
"find valid standard short scala identifier when caret is set at the start of it" in {
textProcessor.identifier("val a = 0", 4) must beSome("a")
}
"find valid standard short scala identifier when caret is set at the end of it" in {
textProcessor.identifier("def foo(f: Int) = Foo(f)", 19) must beSome("Foo")
}
"find valid non-standard short scala identifier when caret is set at the start of it" in {
textProcessor.identifier("val == = 0", 4) must beSome("==")
}
"find valid non-standard short scala identifier when caret is set in the middle of it" in {
textProcessor.identifier("val == = 0", 5) must beSome("==")
}
"find valid non-standard short scala identifier when caret is set at the end of it" in {
textProcessor.identifier("val == = 0", 6) must beSome("==")
}
"choose longest valid standard scala identifier from scala keyword when caret is set at the start of it" in {
textProcessor.identifier("val k = 0", 0) must beSome("va") or beSome("al")
}
"choose longest valid standard scala identifier from scala keyword when caret is set in the middle of it" in {
textProcessor.identifier("val k = 0", 1) must beSome("va") or beSome("al")
}
"match symbol as class name" in {
textProcessor.potentialClsOrTraitOrObj("A")("com.acme.A") must be_==("com.acme.A")
}
"match symbol as object name" in {
textProcessor.potentialClsOrTraitOrObj("A")("com.acme.A$") must be_==("com.acme.A$")
}
"match symbol as inner class name" in {
textProcessor.potentialClsOrTraitOrObj("A")("com.acme.A$A") must be_==("com.acme.A$A")
}
"match symbol as inner object name" in {
textProcessor.potentialClsOrTraitOrObj("A")("com.acme.A$A$") must be_==("com.acme.A$A$")
}
"match symbol as default package class name" in {
textProcessor.potentialClsOrTraitOrObj("A")("A") must be_==("A")
}
"match symbol as default package object name" in {
textProcessor.potentialClsOrTraitOrObj("A")("A$") must be_==("A$")
}
"match object in line version 1" in {
textProcessor.classTraitObjectInLine("A")(" object A ") must contain(("object A", 3))
}
"match object in line version 2" in {
textProcessor.classTraitObjectInLine("A")(" object A ") must contain(("object A", 3))
}
"match object in line version 3" in {
textProcessor.classTraitObjectInLine("A")("object A {") must contain(("object A", 0))
}
"not match object in line" in {
textProcessor.classTraitObjectInLine("B")("object A ") must be empty
}
"match class in line version 1" in {
textProcessor.classTraitObjectInLine("A")(" class A ") must contain(("class A", 3))
}
"match class in line version 2" in {
textProcessor.classTraitObjectInLine("A")(" class A ") must contain(("class A", 3))
}
"match class in line version 3" in {
textProcessor.classTraitObjectInLine("A")("class A {") must contain(("class A", 0))
}
"match class in line version 4" in {
textProcessor.classTraitObjectInLine("A")(" class A[A] ") must contain(
("class A", 3))
}
"match class in line version 5" in {
textProcessor.classTraitObjectInLine("A")(" class A [A] ") must contain(
("class A", 3))
}
"match class in line version 6" in {
textProcessor.classTraitObjectInLine("A")("class A[A[_]] {") must contain(("class A", 0))
}
"not match class in line" in {
textProcessor.classTraitObjectInLine("B")("class A ") must be empty
}
"match trait in line version 1" in {
textProcessor.classTraitObjectInLine("A")(" trait A ") must contain(("trait A", 3))
}
"match trait in line version 2" in {
textProcessor.classTraitObjectInLine("A")(" trait A ") must contain(("trait A", 3))
}
"match trait in line version 3" in {
textProcessor.classTraitObjectInLine("A")("trait A {") must contain(("trait A", 0))
}
"match trait in line version 4" in {
textProcessor.classTraitObjectInLine("A")(" trait A[A] ") must contain(
("trait A", 3))
}
"match trait in line version 5" in {
textProcessor.classTraitObjectInLine("A")(" trait A [A] ") must contain(
("trait A", 3))
}
"match trait in line version 6" in {
textProcessor.classTraitObjectInLine("A")("trait A[A[_]] {") must contain(("trait A", 0))
}
"not match trait in line" in {
textProcessor.classTraitObjectInLine("B")("trait A ") must be empty
}
}
"definition" should {
import scalacache.caffeine._
import scalacache.modes.sync._
"cache data in cache" in {
val cache = CaffeineCache[Any]
val cacheFile = "Test.scala"
val useBinary = true
Definition.updateCache(cache)(cacheFile, useBinary)
val actual = cache.get(Definition.AnalysesKey)
actual.collect {
case s => s.asInstanceOf[Set[((String, Boolean), Option[Analysis])]]
}.get should contain("Test.scala" -> true -> None)
}
"replace cache data in cache" in {
val cache = CaffeineCache[Any]
val cacheFile = "Test.scala"
val useBinary = true
val falseUseBinary = false
Definition.updateCache(cache)(cacheFile, falseUseBinary)
Definition.updateCache(cache)(cacheFile, useBinary)
val actual = cache.get(Definition.AnalysesKey)
actual.collect {
case s => s.asInstanceOf[Set[((String, Boolean), Option[Analysis])]]
}.get should contain("Test.scala" -> true -> None)
}
"cache more data in cache" in {
val cache = CaffeineCache[Any]
val cacheFile = "Test.scala"
val useBinary = true
val otherCacheFile = "OtherTest.scala"
val otherUseBinary = false
Definition.updateCache(cache)(otherCacheFile, otherUseBinary)
Definition.updateCache(cache)(cacheFile, useBinary)
val actual = cache.get(Definition.AnalysesKey)
actual.collect {
case s => s.asInstanceOf[Set[((String, Boolean), Option[Analysis])]]
}.get should contain("Test.scala" -> true -> None, "OtherTest.scala" -> false -> None)
}
}
}

View File

@ -51,7 +51,7 @@ object SettingQueryTest extends org.specs2.mutable.Specification {
def apply[T](lockFile: File, run: Callable[T]) = run.call()
}
lazy val structure: BuildStructure = {
lazy val buildStructure: BuildStructure = {
val projectSettings: Seq[Setting[_]] = Seq(scalaVersion := "2.12.1")
val appConfig: AppConfiguration = new AppConfiguration {
@ -180,7 +180,7 @@ object SettingQueryTest extends org.specs2.mutable.Specification {
def query(setting: String): String = {
import sbt.protocol._
val req: SettingQuery = sbt.protocol.SettingQuery(setting)
val rsp: SettingQueryResponse = server.SettingQuery.handleSettingQuery(req, structure)
val rsp: SettingQueryResponse = server.SettingQuery.handleSettingQuery(req, buildStructure)
val bytes: Array[Byte] = Serialization serializeEventMessage rsp
val payload: String = new String(bytes, java.nio.charset.StandardCharsets.UTF_8)
payload

View File

@ -1,7 +0,0 @@
### Bug fixes
- Fixes `addSbtPlugin` to use the correct version of sbt. [#3393][]/[#3397][] by [@dwijnand][]
[#3393]: https://github.com/sbt/sbt/issues/3393
[#3397]: https://github.com/sbt/sbt/pull/3397
[@dwijnand]: http://github.com/dwijnand

61
notes/1.0.3.markdown Normal file
View File

@ -0,0 +1,61 @@
This is a hotfix release for sbt 1.0.x series.
### Bug fixes
- Fixes `~` recompiling in loop (when a source generator or sbt-buildinfo is present). [#3501][3501]/[#3634][3634] by [@dwijnand][@dwijnand]
- Fixes undercompilation on inheritance on same source. [zinc#424][zinc424] by [@eed3si9n][@eed3si9n]
- Fixes the compilation of package-protected objects. [zinc#431][zinc431] by [@jvican][@jvican]
- Workaround for Java returning `null` for `getGenericParameterTypes`. [zinc#446][zinc446] by [@jvican][@jvican]
- Fixes test detection regression. sbt 1.0.3 filters out nested objects/classes from the list, restoring compatibility with 0.13. [#3669][3669] by [@cunei][@cunei]
- Uses Scala 2.12.4 for the build definition. This includes fix for runtime reflection of empty package members under Java 9. [#3587][3587] by [@eed3si9n][@eed3si9n]
- Fixes extra `/` in Ivy style patterns. [lm#170][lm170] by [@laughedelic][@laughedelic]
- Fixes "destination file exist" error message by including the file name. [lm171][lm171] by [@leonardehrenfried][@leonardehrenfried]
- Fixes JDK 9 warning "Illegal reflective access" in library management module and Ivy. [lm173][lm173] by [@dwijnand][@dwijnand]
### Improvements
- Adds `sbt.watch.mode` system property to allow switching back to old polling behaviour for watch. See below for more details.
#### Alternative watch mode
sbt 1.0.0 introduced a new mechanism for watching for source changes based on the NIO `WatchService` in Java 1.7. On
some platforms (namely macOS) this has led to long delays before changes are picked up. An alternative `WatchService`
for these platforms is planned for sbt 1.1.0 ([#3527][3527]), in the meantime an option to select which watch service
has been added.
The new `sbt.watch.mode` JVM flag has been added with the following supported values:
- `polling`: (default for macOS) poll the filesystem for changes (mechanism used in sbt 0.13).
- `nio` (default for other platforms): use the NIO based `WatchService`.
If you are experiencing long delays on a non-macOS machine then try adding `-Dsbt.watch.mode=polling` to your sbt
options.
[#3597][3597] by [@stringbean][@stringbean]
### Contributors
A huge thank you to everyone who's helped improve sbt and Zinc 1 by using them, reporting bugs, improving our documentation, porting builds, porting plugins, and submitting and reviewing pull requests.
This release was brought to you by 15 contributors, according to `git shortlog -sn --no-merges v1.0.2..v1.0.3` on sbt, zinc, librarymanagement, util, io, and website: Eugene Yokota, Dale Wijnand, Michael Stringer, Jorge Vicente Cantero (jvican), Alexey Alekhin, Antonio Cunei, Andrey Artemov, Jeffrey Olchovy, Kenji Yoshida (xuwei-k), Dominik Winter, Long Jinwei, Arnout Engelen, Justin Kaeser, Leonard Ehrenfried, Sakib Hadžiavdić. Thank you!
[@dwijnand]: https://github.com/dwijnand
[@cunei]: https://github.com/cunei
[@eed3si9n]: https://github.com/eed3si9n
[@jvican]: https://github.com/jvican
[@stringbean]: https://github.com/stringbean
[@laughedelic]: https://github.com/laughedelic
[@leonardehrenfried]: https://github.com/leonardehrenfried
[3669]: https://github.com/sbt/sbt/pull/3669
[3583]: https://github.com/sbt/sbt/issues/3583
[3587]: https://github.com/sbt/sbt/issues/3587
[3527]: https://github.com/sbt/sbt/issues/3527
[3597]: https://github.com/sbt/sbt/pull/3597
[3501]: https://github.com/sbt/sbt/issues/3501
[3634]: https://github.com/sbt/sbt/pull/3634
[lm170]: https://github.com/sbt/librarymanagement/pull/170
[lm171]: https://github.com/sbt/librarymanagement/pull/171
[lm173]: https://github.com/sbt/librarymanagement/pull/173
[zinc424]: https://github.com/sbt/zinc/pull/424
[zinc431]: https://github.com/sbt/zinc/pull/431
[zinc446]: https://github.com/sbt/zinc/pull/446

View File

@ -1,9 +0,0 @@
[@panaeon]: https://github.com/panaeon
[#3464]: https://github.com/sbt/sbt/issues/3464
[#3566]: https://github.com/sbt/sbt/pull/3566
### Bug fixes
- Escape imports from sbt files, so if user creates a backquoted definition then task evalution will not fail.

View File

@ -1,26 +0,0 @@
### Fixes with compatibility implications
### Improvements
- Add `sbt.watch.mode` system property to allow switching back to old polling behaviour for watch. See below for more details. [#3597][3597] by [@stringbean][@stringbean]
### Bug fixes
#### Alternative watch mode
sbt 1.0.0 introduced a new mechanism for watching for source changes based on the NIO `WatchService` in Java 1.7. On
some platforms (namely macOS) this has led to long delays before changes are picked up. An alternative `WatchService`
for these platforms is planned for sbt 1.1.0 ([#3527][3527]), in the meantime an option to select which watch service
has been added.
The new `sbt.watch.mode` JVM flag has been added with the following supported values:
- `polling`: (default for macOS) poll the filesystem for changes (mechanism used in sbt 0.13).
- `nio` (default for other platforms): use the NIO based `WatchService`.
If you are experiencing long delays on a non-macOS machine then try adding `-Dsbt.watch.mode=polling` to your sbt
options.
[@stringbean]: https://github.com/stringbean
[3527]: https://github.com/sbt/sbt/issues/3527
[3597]: https://github.com/sbt/sbt/pull/3597

51
notes/1.0.4.markdown Normal file
View File

@ -0,0 +1,51 @@
This is a hotfix release for sbt 1.0.x series.
### Bug fixes
- Fixes undercompilation of value classes when the underlying type changes. [zinc#444][zinc444] by [@smarter][@smarter]
- Fixes `ArrayIndexOutOfBoundsException` on Ivy when running on Java 9. [ivy#27][ivy27] by [@xuwei-k][@xuwei-k]
- Fixes Java 9 warning by upgrading to launcher 1.0.2. [ivy#26][ivy26]/[launcher#45][launcher45] by [@dwijnand][@dwijnand]
- Fixes `run` outputing debug level logs. [#3655][3655]/[#3717][3717] by [@cunei][@cunei]
- Fixes performance regression caused by classpath hashing. [zinc#452][zinc452] by [@jvican][@jvican]
- Fixes performance regression of `testQuick`. [#3680][3680]/[#3720][3720] by [@OlegYch][@OlegYch]
- Disables Ivy log4j caller location calculation for performance regression reported in [#3711][3711]. [util#132][util132] by [@leonardehrenfried][@leonardehrenfried]
- Works around Scala compiler's `templateStats()` not being thread-safe. [#3743][3743] by [@cunei][@cunei]
- Fixes "Attempting to overwrite" error message. [lm#174][lm174] by [@dwijnand][@dwijnand]
- Fixes incorrect eviction warning message. [lm#179][lm179] by [@xuwei-k][@xuwei-k]
- Registers Ivy protocol only for `http:` and `https:` to be more plugin friendly. [lm183][lm183] by [@tpunder][@tpunder]
### Enhancement
- Adds Scala 2.13.0-M2 support. [zinc#453][zinc453] by [@eed3si9n][@eed3si9n] and [@jan0sch][@jan0sch]
### Internal
- Improves Zinc scripted testing. [zinc440][zinc#440] by [@jvican][@jvican]
[@dwijnand]: https://github.com/dwijnand
[@cunei]: https://github.com/cunei
[@eed3si9n]: https://github.com/eed3si9n
[@jvican]: https://github.com/jvican
[@OlegYch]: https://github.com/OlegYch
[@leonardehrenfried]: https://github.com/leonardehrenfried
[@xuwei-k]: https://github.com/xuwei-k
[@tpunder]: https://github.com/tpunder
[@smarter]: https://github.com/smarter
[@jan0sch]: https://github.com/jan0sch
[3655]: https://github.com/sbt/sbt/issues/3655
[3717]: https://github.com/sbt/sbt/pull/3717
[ivy26]: https://github.com/sbt/ivy/pull/26
[ivy27]: https://github.com/sbt/ivy/pull/27
[launcher45]: https://github.com/sbt/launcher/pull/45
[3680]: https://github.com/sbt/sbt/issues/3680
[3720]: https://github.com/sbt/sbt/pull/3720
[3743]: https://github.com/sbt/sbt/pull/3743
[3711]: https://github.com/sbt/sbt/issues/3711
[util132]: https://github.com/sbt/util/pull/132
[lm174]: https://github.com/sbt/librarymanagement/pull/174
[lm179]: https://github.com/sbt/librarymanagement/pull/179
[lm183]: https://github.com/sbt/librarymanagement/pull/183
[zinc452]: https://github.com/sbt/zinc/pull/452
[zinc444]: https://github.com/sbt/zinc/pull/444
[zinc453]: https://github.com/sbt/zinc/pull/453
[zinc440]: https://github.com/sbt/zinc/pull/440

170
notes/1.1.0.markdown Normal file
View File

@ -0,0 +1,170 @@
### Features, fixes, changes with compatibility implications
- sbt server feature is reworked in sbt 1.1.0. See below.
- Changes `version` setting default to `0.1.0-SNAPSHOT` for compatibility with Semantic Versioning. [#3577][3577] by [@laughedelic][@laughedelic]
### Features
- Unifies sbt shell and build.sbt syntax. See below.
### Fixes
- Fixes over-compilation bug with Java 9. [zinc#450][zinc450] by [@retronym][@retronym]
- Fixes handling of deeply nested Java classes. [zinc#423][zinc423] by [@romanowski][@romanowski]
- Fixes JavaDoc not printing all errors. [zinc#415][zinc415] by [@raboof][@raboof]
- Preserves JAR order in `ScalaInstance.otherJars`. [zinc#411][zinc411] by [@dwijnand][@dwijnand]
- Fixes used name when it contains NL. [zinc#449][zinc449] by [@jilen][@jilen]
- Fixes handling of `ThisProject`. [#3609][3609] by [@dwijnand][@dwijnand]
- Escapes imports from sbt files, so if user creates a backquoted definition then task evalution will not fail. [#3635][3635] by [@panaeon][@panaeon]
- Removes reference to version 0.14.0 from a warning message. [#3693][3693] by [@saniyatech][@saniyatech]
- Fixes screpl throwing "Not a valid key: console-quick". [#3762][3762] by [@xuwei-k][@xuwei-k]
### Improvements
- Filters scripted tests based on optional `project/build.properties`. See below.
- Adds `Project#withId` to change a project's id. [#3601][3601] by [@dwijnand][@dwijnand]
- Adds `reboot dev` command, which deletes the current artifact from the boot directory. This is useful when working with development versions of sbt. [#3659][3659] by [@eed3si9n][@eed3si9n]
- Adds a check for a change in sbt version before `reload`. [#1055][1055]/[#3673][3673] by [@RomanIakovlev][@RomanIakovlev]
- Adds a new setting `insideCI`, which indicates that sbt is likely running in an Continuous Integration environment. [#3672][3672] by [@RomanIakovlev][@RomanIakovlev]
- Adds `nameOption` to `Command` trait. [#3671][3671] by [@miklos-martin][@miklos-martin]
- Adds POSIX persmission operations in IO, such as `IO.chmod(..)`. [io#76][io76] by [@eed3si9n][@eed3si9n]
- Treat sbt 1 modules using Semantic Versioning in the eviction warning. [lm#188][lm188] by [@eed3si9n][@eed3si9n]
- Uses kind-projector in the code. [#3650][3650] by [@dwijnand][@dwijnand]
- Make `displayOnly` etc methods strict in `Completions`. [#3763][3763] by [@xuwei-k][@xuwei-k]
### Unified slash syntax for sbt shell and build.sbt
This adds unified slash syntax for both sbt shell and the build.sbt DSL.
Instead of the current `<project-id>/config:intask::key`, this adds
`<project-id>/<config-ident>/intask/key` where `<config-ident>` is the Scala identifier
notation for the configurations like `Compile` and `Test`. (The old shell syntax will continue to function)
These examples work both from the shell and in build.sbt.
Global / cancelable
ThisBuild / scalaVersion
Test / test
root / Compile / compile / scalacOptions
ProjectRef(uri("file:/xxx/helloworld/"),"root")/Compile/scalacOptions
Zero / Zero / name
The inspect command now outputs something that can be copy-pasted:
> inspect compile
[info] Task: sbt.inc.Analysis
[info] Description:
[info] Compiles sources.
[info] Provided by:
[info] ProjectRef(uri("file:/xxx/helloworld/"),"root")/Compile/compile
[info] Defined at:
[info] (sbt.Defaults) Defaults.scala:326
[info] Dependencies:
[info] Compile/manipulateBytecode
[info] Compile/incCompileSetup
....
[#1812][1812]/[#3434][3434]/[#3617][3617]/[#3620][3620] by [@eed3si9n][@eed3si9n] and [@dwijnand][@dwijnand]
### sbt server
sbt server feature was reworked to use Language Server Protocol 3.0 (LSP) as the wire protocol, a protocol created by Microsoft for Visual Studio Code.
To discover a running server, sbt 1.1.0 creates a port file at `./project/target/active.json` relative to a build:
```
{"uri":"local:///Users/foo/.sbt/1.0/server/0845deda85cb41abcdef/sock"}
```
`local:` indicates a UNIX domain socket. Here's how we can say hello to the server using `nc`. (`^M` can be sent `Ctrl-V` then `Return`):
```
$ nc -U /Users/foo/.sbt/1.0/server/0845deda85cb41abcdef/sock
Content-Length: 99^M
^M
{ "jsonrpc": "2.0", "id": 1, "method": "initialize", "params": { "initializationOptions": { } } }^M
```
sbt server adds network access to sbt's shell command so, in addition to accepting input from the terminal, server also to accepts input from the network. Here's how we can call `compile`:
```
Content-Length: 93^M
^M
{ "jsonrpc": "2.0", "id": 2, "method": "sbt/exec", "params": { "commandLine": "compile" } }^M
```
The running sbt session should now queue `compile`, and return back with compiler warnings and errors, if any:
```
Content-Length: 296
Content-Type: application/vscode-jsonrpc; charset=utf-8
{"jsonrpc":"2.0","method":"textDocument/publishDiagnostics","params":{"uri":"file:/Users/foo/work/hellotest/Hello.scala","diagnostics":[{"range":{"start":{"line":2,"character":26},"end":{"line":2,"character":27}},"severity":1,"source":"sbt","message":"object X is not a member of package foo"}]}}
```
[#3524][3524]/[#3556][3556] by [@eed3si9n][@eed3si9n]
### VS Code extension
The primary use case we have in mind for the sbt server is tooling integration such as editors and IDEs. As a proof of concept, we created a Visual Studio Code extension called [Scala (sbt)][vscode-sbt-scala].
Currently this extension is able to:
- Run `compile` at the root project when `*.scala` files are saved. [#3524][3524] by [@eed3si9n][@eed3si9n]
- Display compiler errors.
- Display log messages. [#3740][3740] by [@laughedelic][@laughedelic]
- Jump to class definitions. [#3660][3660]
### Filtering scripted tests using `project/build.properties`
For all scripted tests in which `project/build.properties` exist, the value of the `sbt.version` property is read. If its binary version is different from `sbtBinaryVersion in pluginCrossBuild` the test will be skipped and a message indicating this will be logged.
This allows you to define scripted tests that track the minimum supported sbt versions, e.g. 0.13.9 and 1.0.0-RC2. [#3564][3564]/[#3566][3566] by [@jonas][@jonas]
[@eed3si9n]: https://github.com/eed3si9n
[@dwijnand]: http://github.com/dwijnand
[@jvican]: https://github.com/jvican
[@Duhemm]: https://github.com/Duhemm
[@jonas]: https://github.com/jonas
[@laughedelic]: https://github.com/laughedelic
[@panaeon]: https://github.com/panaeon
[@RomanIakovlev]: https://github.com/RomanIakovlev
[@miklos-martin]: https://github.com/miklos-martin
[@saniyatech]: https://github.com/saniyatech
[@xuwei-k]: https://github.com/xuwei-k
[@wpopielarski]: https://github.com/wpopielarski
[@retronym]: https://github.com/retronym
[@romanowski]: https://github.com/romanowski
[@raboof]: https://github.com/raboof
[@jilen]: https://github.com/jilen
[vscode-sbt-scala]: https://marketplace.visualstudio.com/items?itemName=lightbend.vscode-sbt-scala
[1812]: https://github.com/sbt/sbt/issues/1812
[3524]: https://github.com/sbt/sbt/pull/3524
[3556]: https://github.com/sbt/sbt/pull/3556
[3564]: https://github.com/sbt/sbt/issues/3564
[3566]: https://github.com/sbt/sbt/pull/3566
[3577]: https://github.com/sbt/sbt/pull/3577
[3434]: https://github.com/sbt/sbt/pull/3434
[3601]: https://github.com/sbt/sbt/pull/3601
[3609]: https://github.com/sbt/sbt/pull/3609
[3617]: https://github.com/sbt/sbt/pull/3617
[3620]: https://github.com/sbt/sbt/pull/3620
[3464]: https://github.com/sbt/sbt/issues/3464
[3635]: https://github.com/sbt/sbt/pull/3635
[3659]: https://github.com/sbt/sbt/pull/3659
[3650]: https://github.com/sbt/sbt/pull/3650
[3673]: https://github.com/sbt/sbt/pull/3673
[1055]: https://github.com/sbt/sbt/issues/1055
[3672]: https://github.com/sbt/sbt/pull/3672
[3671]: https://github.com/sbt/sbt/pull/3671
[3693]: https://github.com/sbt/sbt/issues/3693
[3763]: https://github.com/sbt/sbt/pull/3763
[3762]: https://github.com/sbt/sbt/pull/3762
[3740]: https://github.com/sbt/sbt/pull/3740
[3660]: https://github.com/sbt/sbt/pull/3660
[io76]: https://github.com/sbt/io/pull/76
[lm188]: https://github.com/sbt/librarymanagement/pull/188
[zinc450]: https://github.com/sbt/zinc/pull/450
[zinc423]: https://github.com/sbt/zinc/pull/423
[zinc415]: https://github.com/sbt/zinc/issues/415
[zinc411]: https://github.com/sbt/zinc/pull/411
[zinc449]: https://github.com/sbt/zinc/pull/449

View File

@ -1,14 +0,0 @@
[@jonas]: https://github.com/jonas
[#3564]: https://github.com/sbt/sbt/issues/3564
[#3566]: https://github.com/sbt/sbt/pull/3566
### Improvements
- Filter scripted tests based on optional `project/build.properties`. [#3564]/[#3566] by [@jonas]
### Filtering scripted tests using `project/build.properties`.
For all scripted tests in which `project/build.properties` exist, the value of the `sbt.version` property is read. If its binary version is different from `sbtBinaryVersion in pluginCrossBuild` the test will be skipped and a message indicating this will be logged.
This allows you to define scripted tests that track the minimum supported sbt versions, e.g. 0.13.9 and 1.0.0-RC2.

View File

@ -1,3 +0,0 @@
### Improvements
- Changes `version` setting default to `0.1.0-SNAPSHOT` for compatibility with Semantic Versioning

View File

@ -1,11 +0,0 @@
[@dwijnand]: https://github.com/dwijnand
[#3601]: https://github.com/sbt/sbt/pull/3601
### Fixes with compatibility implications
### Improvements
- Adds `Project#withId` to change a project's id. [#3601][] by [@dwijnand][]
### Bug fixes

View File

@ -1,49 +0,0 @@
### Fixes with compatibility implications
-
### Improvements
- Unifies sbt shell and build.sbt syntax. See below.
### Bug fixes
-
### Unified slash syntax for sbt shell and build.sbt
This adds unified slash syntax for both sbt shell and the build.sbt DSL.
Instead of the current `<project-id>/config:intask::key`, this adds
`<project-id>/<config-ident>/intask/key` where `<config-ident>` is the Scala identifier
notation for the configurations like `Compile` and `Test`. (The old shell syntax will continue to function)
These examples work both from the shell and in build.sbt.
Global / cancelable
ThisBuild / scalaVersion
Test / test
root / Compile / compile / scalacOptions
ProjectRef(uri("file:/xxx/helloworld/"),"root")/Compile/scalacOptions
Zero / Zero / name
The inspect command now outputs something that can be copy-pasted:
> inspect compile
[info] Task: sbt.inc.Analysis
[info] Description:
[info] Compiles sources.
[info] Provided by:
[info] ProjectRef(uri("file:/xxx/helloworld/"),"root")/Compile/compile
[info] Defined at:
[info] (sbt.Defaults) Defaults.scala:326
[info] Dependencies:
[info] Compile/manipulateBytecode
[info] Compile/incCompileSetup
....
[#3434][3434] by [@eed3si9n][@eed3si9n]
[3434]: https://github.com/sbt/sbt/pull/3434
[@eed3si9n]: https://github.com/eed3si9n
[@dwijnand]: http://github.com/dwijnand

View File

@ -6,16 +6,16 @@ object Dependencies {
val scala282 = "2.8.2"
val scala292 = "2.9.2"
val scala293 = "2.9.3"
val scala210 = "2.10.6"
val scala211 = "2.11.8"
val scala212 = "2.12.3"
val scala210 = "2.10.7"
val scala211 = "2.11.12"
val scala212 = "2.12.4"
val baseScalaVersion = scala212
// sbt modules
private val ioVersion = "1.1.0"
private val utilVersion = "1.0.2"
private val lmVersion = "1.0.2"
private val zincVersion = "1.0.2"
private val ioVersion = "1.1.1"
private val utilVersion = "1.1.0"
private val lmVersion = "1.1.0"
private val zincVersion = "1.1.0-RC1"
private val sbtIO = "org.scala-sbt" %% "io" % ioVersion
@ -30,8 +30,8 @@ object Dependencies {
private val libraryManagementCore = "org.scala-sbt" %% "librarymanagement-core" % lmVersion
private val libraryManagementIvy = "org.scala-sbt" %% "librarymanagement-ivy" % lmVersion
val launcherInterface = "org.scala-sbt" % "launcher-interface" % "1.0.0"
val rawLauncher = "org.scala-sbt" % "launcher" % "1.0.0"
val launcherInterface = "org.scala-sbt" % "launcher-interface" % "1.0.2"
val rawLauncher = "org.scala-sbt" % "launcher" % "1.0.2"
val testInterface = "org.scala-sbt" % "test-interface" % "1.0"
private val compilerInterface = "org.scala-sbt" % "compiler-interface" % zincVersion
@ -101,11 +101,13 @@ object Dependencies {
}
val jline = "jline" % "jline" % "2.14.4"
val scalatest = "org.scalatest" %% "scalatest" % "3.0.1"
val scalatest = "org.scalatest" %% "scalatest" % "3.0.4"
val scalaCheck = "org.scalacheck" %% "scalacheck" % "1.13.4"
val specs2 = "org.specs2" %% "specs2" % "2.4.17"
val specs2 = "org.specs2" %% "specs2-junit" % "4.0.1"
val junit = "junit" % "junit" % "4.11"
val templateResolverApi = "org.scala-sbt" % "template-resolver" % "0.1"
val jna = "net.java.dev.jna" % "jna" % "4.1.0"
val jnaPlatform = "net.java.dev.jna" % "jna-platform" % "4.1.0"
private def scala211Module(name: String, moduleVersion: String) = Def setting (
scalaBinaryVersion.value match {
@ -123,4 +125,6 @@ object Dependencies {
val log4jSlf4jImpl = "org.apache.logging.log4j" % "log4j-slf4j-impl" % log4jVersion
// specify all of log4j modules to prevent misalignment
val log4jDependencies = Vector(log4jApi, log4jCore, log4jSlf4jImpl)
val scalaCacheCaffeine = "com.github.cb372" %% "scalacache-caffeine" % "0.20.0"
}

View File

@ -108,6 +108,8 @@ object Scripted {
prescripted: File => Unit,
launchOpts: Seq[String]): Unit = {
System.err.println(s"About to run tests: ${args.mkString("\n * ", "\n * ", "\n")}")
// Force Log4J to not use a thread context classloader otherwise it throws a CCE
sys.props(org.apache.logging.log4j.util.LoaderUtil.IGNORE_TCCL_PROPERTY) = "true"
val noJLine = new classpath.FilteredLoader(scriptedSbtInstance.loader, "jline." :: Nil)
val loader = classpath.ClasspathUtilities.toLoader(scriptedSbtClasspath.files, noJLine)
val bridgeClass = Class.forName("sbt.test.ScriptedRunner", true, loader)

View File

@ -10,5 +10,6 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.1.17")
addSbtPlugin("org.foundweekends" % "sbt-bintray" % "0.5.1")
addSbtPlugin("org.scala-sbt" % "sbt-contraband" % "0.3.1")
addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.0-M1")
addSbtPlugin("com.lucidchart" % "sbt-scalafmt" % "1.10")
addSbtPlugin("com.lucidchart" % "sbt-scalafmt" % "1.14")
addSbtPlugin("de.heikoseeberger" % "sbt-header" % "3.0.2")
addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.7.0")

View File

@ -1,26 +0,0 @@
/**
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
trait InitializeResultFormats { self: ServerCapabilitiesFormats with sjsonnew.BasicJsonProtocol =>
implicit lazy val InitializeResultFormat: JsonFormat[sbt.internal.langserver.InitializeResult] = new JsonFormat[sbt.internal.langserver.InitializeResult] {
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.internal.langserver.InitializeResult = {
jsOpt match {
case Some(js) =>
unbuilder.beginObject(js)
val capabilities = unbuilder.readField[Option[sbt.internal.langserver.ServerCapabilities]]("capabilities")
unbuilder.endObject()
sbt.internal.langserver.InitializeResult(capabilities)
case None =>
deserializationError("Expected JsObject but found None")
}
}
override def write[J](obj: sbt.internal.langserver.InitializeResult, builder: Builder[J]): Unit = {
builder.beginObject()
builder.addField("capabilities", obj.capabilities)
builder.endObject()
}
}
}

View File

@ -1,26 +0,0 @@
/**
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
trait ServerCapabilitiesFormats { self: sjsonnew.BasicJsonProtocol =>
implicit lazy val ServerCapabilitiesFormat: JsonFormat[sbt.internal.langserver.ServerCapabilities] = new JsonFormat[sbt.internal.langserver.ServerCapabilities] {
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.internal.langserver.ServerCapabilities = {
jsOpt match {
case Some(js) =>
unbuilder.beginObject(js)
val hoverProvider = unbuilder.readField[Option[Boolean]]("hoverProvider")
unbuilder.endObject()
sbt.internal.langserver.ServerCapabilities(hoverProvider)
case None =>
deserializationError("Expected JsObject but found None")
}
}
override def write[J](obj: sbt.internal.langserver.ServerCapabilities, builder: Builder[J]): Unit = {
builder.beginObject()
builder.addField("hoverProvider", obj.hoverProvider)
builder.endObject()
}
}
}

View File

@ -0,0 +1,38 @@
/**
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
package sbt.internal.langserver
final class LogMessageParams private (
/** The message type. */
val `type`: Long,
/** The actual message */
val message: String) extends Serializable {
override def equals(o: Any): Boolean = o match {
case x: LogMessageParams => (this.`type` == x.`type`) && (this.message == x.message)
case _ => false
}
override def hashCode: Int = {
37 * (37 * (37 * (17 + "sbt.internal.langserver.LogMessageParams".##) + `type`.##) + message.##)
}
override def toString: String = {
"LogMessageParams(" + `type` + ", " + message + ")"
}
protected[this] def copy(`type`: Long = `type`, message: String = message): LogMessageParams = {
new LogMessageParams(`type`, message)
}
def withType(`type`: Long): LogMessageParams = {
copy(`type` = `type`)
}
def withMessage(message: String): LogMessageParams = {
copy(message = message)
}
}
object LogMessageParams {
def apply(`type`: Long, message: String): LogMessageParams = new LogMessageParams(`type`, message)
}

View File

@ -7,22 +7,24 @@ package sbt.internal.langserver
final class ServerCapabilities private (
val textDocumentSync: Option[sbt.internal.langserver.TextDocumentSyncOptions],
/** The server provides hover support. */
val hoverProvider: Option[Boolean]) extends Serializable {
val hoverProvider: Option[Boolean],
/** Goto definition */
val definitionProvider: Option[Boolean]) extends Serializable {
override def equals(o: Any): Boolean = o match {
case x: ServerCapabilities => (this.textDocumentSync == x.textDocumentSync) && (this.hoverProvider == x.hoverProvider)
case x: ServerCapabilities => (this.textDocumentSync == x.textDocumentSync) && (this.hoverProvider == x.hoverProvider) && (this.definitionProvider == x.definitionProvider)
case _ => false
}
override def hashCode: Int = {
37 * (37 * (37 * (17 + "sbt.internal.langserver.ServerCapabilities".##) + textDocumentSync.##) + hoverProvider.##)
37 * (37 * (37 * (37 * (17 + "sbt.internal.langserver.ServerCapabilities".##) + textDocumentSync.##) + hoverProvider.##) + definitionProvider.##)
}
override def toString: String = {
"ServerCapabilities(" + textDocumentSync + ", " + hoverProvider + ")"
"ServerCapabilities(" + textDocumentSync + ", " + hoverProvider + ", " + definitionProvider + ")"
}
protected[this] def copy(textDocumentSync: Option[sbt.internal.langserver.TextDocumentSyncOptions] = textDocumentSync, hoverProvider: Option[Boolean] = hoverProvider): ServerCapabilities = {
new ServerCapabilities(textDocumentSync, hoverProvider)
protected[this] def copy(textDocumentSync: Option[sbt.internal.langserver.TextDocumentSyncOptions] = textDocumentSync, hoverProvider: Option[Boolean] = hoverProvider, definitionProvider: Option[Boolean] = definitionProvider): ServerCapabilities = {
new ServerCapabilities(textDocumentSync, hoverProvider, definitionProvider)
}
def withTextDocumentSync(textDocumentSync: Option[sbt.internal.langserver.TextDocumentSyncOptions]): ServerCapabilities = {
copy(textDocumentSync = textDocumentSync)
@ -36,9 +38,15 @@ final class ServerCapabilities private (
def withHoverProvider(hoverProvider: Boolean): ServerCapabilities = {
copy(hoverProvider = Option(hoverProvider))
}
def withDefinitionProvider(definitionProvider: Option[Boolean]): ServerCapabilities = {
copy(definitionProvider = definitionProvider)
}
def withDefinitionProvider(definitionProvider: Boolean): ServerCapabilities = {
copy(definitionProvider = Option(definitionProvider))
}
}
object ServerCapabilities {
def apply(textDocumentSync: Option[sbt.internal.langserver.TextDocumentSyncOptions], hoverProvider: Option[Boolean]): ServerCapabilities = new ServerCapabilities(textDocumentSync, hoverProvider)
def apply(textDocumentSync: sbt.internal.langserver.TextDocumentSyncOptions, hoverProvider: Boolean): ServerCapabilities = new ServerCapabilities(Option(textDocumentSync), Option(hoverProvider))
def apply(textDocumentSync: Option[sbt.internal.langserver.TextDocumentSyncOptions], hoverProvider: Option[Boolean], definitionProvider: Option[Boolean]): ServerCapabilities = new ServerCapabilities(textDocumentSync, hoverProvider, definitionProvider)
def apply(textDocumentSync: sbt.internal.langserver.TextDocumentSyncOptions, hoverProvider: Boolean, definitionProvider: Boolean): ServerCapabilities = new ServerCapabilities(Option(textDocumentSync), Option(hoverProvider), Option(definitionProvider))
}

View File

@ -0,0 +1,34 @@
/**
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
package sbt.internal.langserver
/** Text documents are identified using a URI. On the protocol level, URIs are passed as strings. */
final class TextDocumentIdentifier private (
/** The text document's URI. */
val uri: String) extends Serializable {
override def equals(o: Any): Boolean = o match {
case x: TextDocumentIdentifier => (this.uri == x.uri)
case _ => false
}
override def hashCode: Int = {
37 * (37 * (17 + "sbt.internal.langserver.TextDocumentIdentifier".##) + uri.##)
}
override def toString: String = {
"TextDocumentIdentifier(" + uri + ")"
}
protected[this] def copy(uri: String = uri): TextDocumentIdentifier = {
new TextDocumentIdentifier(uri)
}
def withUri(uri: String): TextDocumentIdentifier = {
copy(uri = uri)
}
}
object TextDocumentIdentifier {
def apply(uri: String): TextDocumentIdentifier = new TextDocumentIdentifier(uri)
}

View File

@ -0,0 +1,39 @@
/**
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
package sbt.internal.langserver
/** Goto definition params model */
final class TextDocumentPositionParams private (
/** The text document. */
val textDocument: sbt.internal.langserver.TextDocumentIdentifier,
/** The position inside the text document. */
val position: sbt.internal.langserver.Position) extends Serializable {
override def equals(o: Any): Boolean = o match {
case x: TextDocumentPositionParams => (this.textDocument == x.textDocument) && (this.position == x.position)
case _ => false
}
override def hashCode: Int = {
37 * (37 * (37 * (17 + "sbt.internal.langserver.TextDocumentPositionParams".##) + textDocument.##) + position.##)
}
override def toString: String = {
"TextDocumentPositionParams(" + textDocument + ", " + position + ")"
}
protected[this] def copy(textDocument: sbt.internal.langserver.TextDocumentIdentifier = textDocument, position: sbt.internal.langserver.Position = position): TextDocumentPositionParams = {
new TextDocumentPositionParams(textDocument, position)
}
def withTextDocument(textDocument: sbt.internal.langserver.TextDocumentIdentifier): TextDocumentPositionParams = {
copy(textDocument = textDocument)
}
def withPosition(position: sbt.internal.langserver.Position): TextDocumentPositionParams = {
copy(position = position)
}
}
object TextDocumentPositionParams {
def apply(textDocument: sbt.internal.langserver.TextDocumentIdentifier, position: sbt.internal.langserver.Position): TextDocumentPositionParams = new TextDocumentPositionParams(textDocument, position)
}

View File

@ -16,6 +16,9 @@ trait JsonProtocol extends sjsonnew.BasicJsonProtocol
with sbt.internal.langserver.codec.TextDocumentSyncOptionsFormats
with sbt.internal.langserver.codec.ServerCapabilitiesFormats
with sbt.internal.langserver.codec.InitializeResultFormats
with sbt.internal.langserver.codec.LogMessageParamsFormats
with sbt.internal.langserver.codec.PublishDiagnosticsParamsFormats
with sbt.internal.langserver.codec.SbtExecParamsFormats
with sbt.internal.langserver.codec.TextDocumentIdentifierFormats
with sbt.internal.langserver.codec.TextDocumentPositionParamsFormats
object JsonProtocol extends JsonProtocol

View File

@ -0,0 +1,29 @@
/**
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
package sbt.internal.langserver.codec
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
trait LogMessageParamsFormats { self: sjsonnew.BasicJsonProtocol =>
implicit lazy val LogMessageParamsFormat: JsonFormat[sbt.internal.langserver.LogMessageParams] = new JsonFormat[sbt.internal.langserver.LogMessageParams] {
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.internal.langserver.LogMessageParams = {
jsOpt match {
case Some(js) =>
unbuilder.beginObject(js)
val `type` = unbuilder.readField[Long]("type")
val message = unbuilder.readField[String]("message")
unbuilder.endObject()
sbt.internal.langserver.LogMessageParams(`type`, message)
case None =>
deserializationError("Expected JsObject but found None")
}
}
override def write[J](obj: sbt.internal.langserver.LogMessageParams, builder: Builder[J]): Unit = {
builder.beginObject()
builder.addField("type", obj.`type`)
builder.addField("message", obj.message)
builder.endObject()
}
}
}

View File

@ -13,8 +13,9 @@ implicit lazy val ServerCapabilitiesFormat: JsonFormat[sbt.internal.langserver.S
unbuilder.beginObject(js)
val textDocumentSync = unbuilder.readField[Option[sbt.internal.langserver.TextDocumentSyncOptions]]("textDocumentSync")
val hoverProvider = unbuilder.readField[Option[Boolean]]("hoverProvider")
val definitionProvider = unbuilder.readField[Option[Boolean]]("definitionProvider")
unbuilder.endObject()
sbt.internal.langserver.ServerCapabilities(textDocumentSync, hoverProvider)
sbt.internal.langserver.ServerCapabilities(textDocumentSync, hoverProvider, definitionProvider)
case None =>
deserializationError("Expected JsObject but found None")
}
@ -23,6 +24,7 @@ implicit lazy val ServerCapabilitiesFormat: JsonFormat[sbt.internal.langserver.S
builder.beginObject()
builder.addField("textDocumentSync", obj.textDocumentSync)
builder.addField("hoverProvider", obj.hoverProvider)
builder.addField("definitionProvider", obj.definitionProvider)
builder.endObject()
}
}

View File

@ -0,0 +1,27 @@
/**
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
package sbt.internal.langserver.codec
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
trait TextDocumentIdentifierFormats { self: sjsonnew.BasicJsonProtocol =>
implicit lazy val TextDocumentIdentifierFormat: JsonFormat[sbt.internal.langserver.TextDocumentIdentifier] = new JsonFormat[sbt.internal.langserver.TextDocumentIdentifier] {
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.internal.langserver.TextDocumentIdentifier = {
jsOpt match {
case Some(js) =>
unbuilder.beginObject(js)
val uri = unbuilder.readField[String]("uri")
unbuilder.endObject()
sbt.internal.langserver.TextDocumentIdentifier(uri)
case None =>
deserializationError("Expected JsObject but found None")
}
}
override def write[J](obj: sbt.internal.langserver.TextDocumentIdentifier, builder: Builder[J]): Unit = {
builder.beginObject()
builder.addField("uri", obj.uri)
builder.endObject()
}
}
}

View File

@ -0,0 +1,29 @@
/**
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
package sbt.internal.langserver.codec
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
trait TextDocumentPositionParamsFormats { self: sbt.internal.langserver.codec.TextDocumentIdentifierFormats with sbt.internal.langserver.codec.PositionFormats with sjsonnew.BasicJsonProtocol =>
implicit lazy val TextDocumentPositionParamsFormat: JsonFormat[sbt.internal.langserver.TextDocumentPositionParams] = new JsonFormat[sbt.internal.langserver.TextDocumentPositionParams] {
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.internal.langserver.TextDocumentPositionParams = {
jsOpt match {
case Some(js) =>
unbuilder.beginObject(js)
val textDocument = unbuilder.readField[sbt.internal.langserver.TextDocumentIdentifier]("textDocument")
val position = unbuilder.readField[sbt.internal.langserver.Position]("position")
unbuilder.endObject()
sbt.internal.langserver.TextDocumentPositionParams(textDocument, position)
case None =>
deserializationError("Expected JsObject but found None")
}
}
override def write[J](obj: sbt.internal.langserver.TextDocumentPositionParams, builder: Builder[J]): Unit = {
builder.beginObject()
builder.addField("textDocument", obj.textDocument)
builder.addField("position", obj.position)
builder.endObject()
}
}
}

View File

@ -83,6 +83,9 @@ type ServerCapabilities {
## The server provides hover support.
hoverProvider: Boolean
## Goto definition
definitionProvider: Boolean
}
type TextDocumentSyncOptions {
@ -98,6 +101,16 @@ type SaveOptions {
includeText: Boolean
}
# LogMessage Notification
type LogMessageParams {
## The message type.
type: Long!
## The actual message
message: String!
}
# Document
# PublishDiagnostics Notification https://github.com/Microsoft/language-server-protocol/blob/master/protocol.md#textDocument_publishDiagnostics
@ -117,3 +130,18 @@ type PublishDiagnosticsParams {
type SbtExecParams {
commandLine: String!
}
## Goto definition params model
type TextDocumentPositionParams {
## The text document.
textDocument: sbt.internal.langserver.TextDocumentIdentifier!
## The position inside the text document.
position: sbt.internal.langserver.Position!
}
## Text documents are identified using a URI. On the protocol level, URIs are passed as strings.
type TextDocumentIdentifier {
## The text document's URI.
uri: String!
}

View File

@ -0,0 +1,34 @@
/*
* sbt
* Copyright 2011 - 2017, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under BSD-3-Clause license (see LICENSE)
*/
package sbt
package internal
package langserver
object MessageType {
/** An error message. */
val Error = 1L
/** A warning message. */
val Warning = 2L
/** An information message. */
val Info = 3L
/** A log message. */
val Log = 4L
def fromLevelString(level: String): Long = {
level.toLowerCase match {
case "info" => Info
case "warn" => Warning
case "error" => Error
case _ => Log
}
}
}

View File

@ -15,6 +15,7 @@ import java.nio.ByteBuffer
import scala.util.{ Success, Failure }
import sbt.internal.util.StringEvent
import sbt.internal.protocol.{
JsonRpcMessage,
JsonRpcRequestMessage,
JsonRpcResponseMessage,
JsonRpcNotificationMessage
@ -143,15 +144,20 @@ object Serialization {
}
}
private[sbt] def deserializeJsonRequest(
bytes: Seq[Byte]): Either[String, JsonRpcRequestMessage] = {
private[sbt] def deserializeJsonMessage(bytes: Seq[Byte]): Either[String, JsonRpcMessage] = {
val buffer = ByteBuffer.wrap(bytes.toArray)
Parser.parseFromByteBuffer(buffer) match {
case Success(json) =>
import sbt.internal.protocol.codec.JsonRPCProtocol._
Converter.fromJson[JsonRpcRequestMessage](json) match {
case Success(msg) => Right(msg)
case Failure(e) => throw e
case Success(request) if (request.id.nonEmpty) => Right(request)
case Failure(e) => throw e
case _ => {
Converter.fromJson[JsonRpcNotificationMessage](json) match {
case Success(notification) => Right(notification)
case Failure(e) => throw e
}
}
}
case Failure(e) =>
Left(s"Parse error: ${e.getMessage}")

View File

@ -133,7 +133,6 @@ trait Import {
type FeedbackProvidedException = sbt.internal.util.FeedbackProvidedException
type FilePosition = sbt.internal.util.FilePosition
type FilterLogger = sbt.internal.util.FilterLogger
type Fn1[A, B] = sbt.internal.util.Fn1[A, B]
val FullLogger = sbt.internal.util.FullLogger
type FullLogger = sbt.internal.util.FullLogger
val FullReader = sbt.internal.util.FullReader
@ -167,8 +166,6 @@ trait Import {
val NoPosition = sbt.internal.util.NoPosition
val PMap = sbt.internal.util.PMap
type PMap[K[_], V[_]] = sbt.internal.util.PMap[K, V]
val Param = sbt.internal.util.Param
type Param[A[_], B[_]] = sbt.internal.util.Param[A, B]
type RMap[K[_], V[_]] = sbt.internal.util.RMap[K, V]
val RangePosition = sbt.internal.util.RangePosition
type RangePosition = sbt.internal.util.RangePosition

View File

@ -0,0 +1,5 @@
val command = Command.command("noop") { s => s }
TaskKey[Unit]("check") := {
assert(command.nameOption == Some("noop"), """command.commandName should be "noop"""")
}

View File

@ -0,0 +1 @@
> check

View File

@ -0,0 +1,7 @@
name := "inside-ci"
organization := "org.example"
val t = taskKey[Boolean]("inside-ci")
t := insideCI.value

View File

@ -0,0 +1,2 @@
# just need to verify it loads
> help

View File

@ -0,0 +1,8 @@
TaskKey[Unit]("checkSbtVersionWarning") := {
val state = Keys.state.value
val logging = state.globalLogging
val currVersion = state.configuration.provider.id.version()
val contents = IO.read(logging.backing.file)
assert(contents.contains(s"""sbt version mismatch, current: $currVersion, in build.properties: "1.1.1", use 'reboot' to use the new value."""))
()
}

View File

@ -0,0 +1 @@
sbt.version=1.1.1

View File

@ -0,0 +1,4 @@
> help
$ copy-file changes/build.properties project/build.properties
> reload
> checkSbtVersionWarning

View File

@ -2,6 +2,7 @@ lazy val runClient = taskKey[Unit]("")
lazy val root = (project in file("."))
.settings(
serverConnectionType in Global := ConnectionType.Tcp,
scalaVersion := "2.12.3",
serverPort in Global := 5123,
libraryDependencies += "org.scala-sbt" %% "io" % "1.0.1",

View File

@ -0,0 +1,8 @@
libraryDependencies += "org.scalacheck" %% "scalacheck" % "1.13.5" % "test"
version := "0.0.1"
name := "broken"
organization := "org.catastrophe"
//scalaVersion := "2.10.6"
scalaVersion := "2.12.3"

View File

@ -0,0 +1,25 @@
package q
//
// On 1.0.3+ this test will say:
// [info] + Nesting.startsWith: OK, passed 100 tests.
// [info] Passed: Total 1, Failed 0, Errors 0, Passed 1
//
// On 1.0.0 to 1.0.2 it will crash with:
// [error] java.lang.ClassNotFoundException: q.X.Y$
//
import org.scalacheck.{Prop, Properties}
import Prop.forAll
class U extends Properties("Nesting")
object X extends U {
property("startsWith") = forAll { (a: String, b: String) =>
(a+b).startsWith(a)
}
object Y extends U {
property("endsWith") = forAll { (a: String, b: String) =>
(a+b).endsWith(b)
}
}
}

View File

@ -0,0 +1,2 @@
> test

View File

@ -0,0 +1,117 @@
/*
* sbt
* Copyright 2011 - 2017, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under BSD-3-Clause license (see LICENSE)
*/
package sbt
import scala.annotation.tailrec
import xsbti._
object RunFromSourceMain {
private val sbtVersion = "1.0.3" // "dev"
private val scalaVersion = "2.12.4"
def main(args: Array[String]): Unit = args match {
case Array() => sys.error(s"Must specify working directory as the first argument")
case Array(wd, args @ _*) => run(file(wd), args)
}
// this arrangement is because Scala does not always properly optimize away
// the tail recursion in a catch statement
@tailrec private def run(baseDir: File, args: Seq[String]): Unit =
runImpl(baseDir, args) match {
case Some((baseDir, args)) => run(baseDir, args)
case None => ()
}
private def runImpl(baseDir: File, args: Seq[String]): Option[(File, Seq[String])] =
try launch(getConf(baseDir, args)) map exit
catch {
case r: xsbti.FullReload => Some((baseDir, r.arguments()))
case scala.util.control.NonFatal(e) => e.printStackTrace(); errorAndExit(e.toString)
}
@tailrec private def launch(conf: AppConfiguration): Option[Int] =
new xMain().run(conf) match {
case e: xsbti.Exit => Some(e.code)
case _: xsbti.Continue => None
case r: xsbti.Reboot => launch(getConf(conf.baseDirectory(), r.arguments()))
case x => handleUnknownMainResult(x)
}
private val noGlobalLock = new GlobalLock {
def apply[T](lockFile: File, run: java.util.concurrent.Callable[T]) = run.call()
}
private def getConf(baseDir: File, args: Seq[String]): AppConfiguration = new AppConfiguration {
def baseDirectory = baseDir
def arguments = args.toArray
def provider = new AppProvider { appProvider =>
def scalaProvider = new ScalaProvider { scalaProvider =>
def scalaOrg = "org.scala-lang"
def launcher = new Launcher {
def getScala(version: String) = getScala(version, "")
def getScala(version: String, reason: String) = getScala(version, reason, scalaOrg)
def getScala(version: String, reason: String, scalaOrg: String) = scalaProvider
def app(id: xsbti.ApplicationID, version: String) = appProvider
def topLoader = new java.net.URLClassLoader(Array(), null)
def globalLock = noGlobalLock
def bootDirectory = file(sys.props("user.home")) / ".sbt" / "boot"
def ivyRepositories = Array()
def appRepositories = Array()
def isOverrideRepositories = false
def ivyHome = file(sys.props("user.home")) / ".ivy2"
def checksums = Array("sha1", "md5")
}
def version = scalaVersion
def libDir: File = launcher.bootDirectory / s"scala-$version" / "lib"
def jar(name: String): File = libDir / s"$name.jar"
def libraryJar = jar("scala-library")
def compilerJar = jar("scala-compiler")
def jars = libDir.listFiles(f => !f.isDirectory && f.getName.endsWith(".jar"))
def loader = new java.net.URLClassLoader(jars map (_.toURI.toURL), null)
def app(id: xsbti.ApplicationID) = appProvider
}
def id = ApplicationID(
"org.scala-sbt",
"sbt",
sbtVersion,
"sbt.xMain",
Seq("xsbti", "extra"),
CrossValue.Disabled,
Nil
)
def mainClasspath =
buildinfo.TestBuildInfo.fullClasspath.iterator
.map(s => file(s.stripPrefix("Attributed(").stripSuffix(")")))
.toArray
def loader = new java.net.URLClassLoader(mainClasspath map (_.toURI.toURL), null)
def entryPoint = classOf[xMain]
def mainClass = classOf[xMain]
def newMain = new xMain
def components = new ComponentProvider {
def componentLocation(id: String) = ???
def component(componentID: String) = ???
def defineComponent(componentID: String, components: Array[File]) = ???
def addToComponent(componentID: String, components: Array[File]) = ???
def lockFile = ???
}
}
}
private def handleUnknownMainResult(x: MainResult): Nothing = {
val clazz = if (x eq null) "" else " (class: " + x.getClass + ")"
errorAndExit("Invalid main result: " + x + clazz)
}
private def errorAndExit(msg: String): Nothing = { System.err.println(msg); exit(1) }
private def exit(code: Int): Nothing = System.exit(code).asInstanceOf[Nothing]
}

View File

@ -37,9 +37,7 @@ object Transform {
/** Applies `map`, returning the result if defined or returning the input unchanged otherwise.*/
implicit def getOrId(map: Task ~>| Task): Task ~> Task =
new (Task ~> Task) {
def apply[T](in: Task[T]): Task[T] = map(in).getOrElse(in)
}
λ[Task ~> Task](in => map(in).getOrElse(in))
def apply(dummies: DummyTaskMap) = taskToNode(getOrId(dummyMap(dummies)))
@ -48,7 +46,7 @@ object Transform {
case Pure(eval, _) => uniform(Nil)(_ => Right(eval()))
case m: Mapped[t, k] => toNode[t, k](m.in)(right m.f)(m.alist)
case m: FlatMapped[t, k] => toNode[t, k](m.in)(left m.f)(m.alist)
case DependsOn(in, deps) => uniform(existToAny(deps))(const(Left(in)) all)
case DependsOn(in, deps) => uniform(existToAny(deps))(const(Left(in)) compose all)
case Join(in, f) => uniform(in)(f)
}
def inline[T](t: Task[T]) = t.work match {
@ -58,7 +56,7 @@ object Transform {
}
def uniform[T, D](tasks: Seq[Task[D]])(f: Seq[Result[D]] => Either[Task[T], T]): Node[Task, T] =
toNode[T, ({ type l[L[x]] = List[L[D]] })#l](tasks.toList)(f)(AList.seq[D])
toNode[T, λ[L[x] => List[L[D]]]](tasks.toList)(f)(AList.seq[D])
def toNode[T, k[L[x]]](inputs: k[Task])(f: k[Result] => Either[Task[T], T])(
implicit a: AList[k]): Node[Task, T] = new Node[Task, T] {

View File

@ -118,7 +118,7 @@ trait TaskExtra {
}
final implicit def multT2Task[A, B](in: (Task[A], Task[B])) =
multInputTask[({ type l[L[x]] = (L[A], L[B]) })#l](in)(AList.tuple2[A, B])
multInputTask[λ[L[x] => (L[A], L[B])]](in)(AList.tuple2[A, B])
final implicit def multInputTask[K[L[X]]](tasks: K[Task])(implicit a: AList[K]): MultiInTask[K] =
new MultiInTask[K] {
@ -248,7 +248,7 @@ object TaskExtra extends TaskExtra {
}
def reducePair[S](a: Task[S], b: Task[S], f: (S, S) => S): Task[S] =
multInputTask[({ type l[L[x]] = (L[S], L[S]) })#l]((a, b))(AList.tuple2[S, S]) map f.tupled
multInputTask[λ[L[x] => (L[S], L[S])]]((a, b))(AList.tuple2[S, S]) map f.tupled
def anyFailM[K[L[x]]](implicit a: AList[K]): K[Result] => Seq[Incomplete] = in => {
val incs = failuresM(a)(in)

View File

@ -11,9 +11,9 @@ import sbt.internal.util.AList
object Test extends std.TaskExtra {
def t2[A, B](a: Task[A], b: Task[B]) =
multInputTask[({ type l[L[x]] = (L[A], L[B]) })#l]((a, b))(AList.tuple2)
multInputTask[λ[L[x] => (L[A], L[B])]]((a, b))(AList.tuple2)
def t3[A, B, C](a: Task[A], b: Task[B], c: Task[C]) =
multInputTask[({ type l[L[x]] = (L[A], L[B], L[C]) })#l]((a, b, c))(AList.tuple3)
multInputTask[λ[L[x] => (L[A], L[B], L[C])]]((a, b, c))(AList.tuple3)
val a = task(3)
val b = task[Boolean](sys.error("test"))

View File

@ -54,15 +54,16 @@ private[sbt] final class Execute[A[_] <: AnyRef](
private[this] val reverse = idMap[A[_], Iterable[A[_]]]
private[this] val callers = pMap[A, Compose[IDSet, A]#Apply]
private[this] val state = idMap[A[_], State]
private[this] val viewCache = pMap[A, ({ type l[t] = Node[A, t] })#l]
private[this] val viewCache = pMap[A, Node[A, ?]]
private[this] val results = pMap[A, Result]
private[this] val getResult: A ~> Result = new (A ~> Result) {
def apply[T](a: A[T]): Result[T] = view.inline(a) match {
case Some(v) => Value(v())
case None => results(a)
private[this] val getResult: A ~> Result = λ[A ~> Result](
a =>
view.inline(a) match {
case Some(v) => Value(v())
case None => results(a)
}
}
)
private[this] var progressState: progress.S = progress.initial
private[this] type State = State.Value

View File

@ -28,12 +28,9 @@ final case class Value[+T](value: T) extends Result[T] {
object Result {
type Id[X] = X
val tryValue = new (Result ~> Id) {
def apply[T](r: Result[T]): T =
r match {
case Value(v) => v
case Inc(i) => throw i
}
val tryValue = λ[Result ~> Id] {
case Value(v) => v
case Inc(i) => throw i
}
def tryValues[S](r: Seq[Result[Unit]], v: Result[S]): S = {
r foreach tryValue[Unit]

View File

@ -3,4 +3,7 @@ Scala language support using sbt
This is an experimental Scala language support using sbt as the language server.
To try this, use sbt 1.1.0-M1 and above. Saving `*.scala` will trigger `compile` task.
To try this, use sbt 1.1.0-RC1 or above.
- Saving `*.scala` will trigger `compile` task.
- Jump to definition support for class names.

View File

@ -1,7 +1,7 @@
{
"name": "vscode-sbt-scala",
"displayName": "Scala (sbt)",
"version": "0.0.2",
"version": "0.1.0",
"author": "Lightbend, Inc.",
"license": "BSD-3-Clause",
"publisher": "lightbend",

View File

@ -23,19 +23,25 @@ export function activate(context: ExtensionContext) {
let clientOptions: LanguageClientOptions = {
documentSelector: [{ language: 'scala', scheme: 'file' }, { language: 'java', scheme: 'file' }],
initializationOptions: () => {
return {
token: discoverToken()
};
return discoverToken();
}
}
// the port file is hardcoded to a particular location relative to the build.
function discoverToken(): String {
function discoverToken(): any {
let pf = path.join(workspace.rootPath, 'project', 'target', 'active.json');
let portfile = JSON.parse(fs.readFileSync(pf));
let tf = portfile.tokenfilePath;
let tokenfile = JSON.parse(fs.readFileSync(tf));
return tokenfile.token;
// if tokenfilepath exists, return the token.
if (portfile.hasOwnProperty('tokenfilePath')) {
let tf = portfile.tokenfilePath;
let tokenfile = JSON.parse(fs.readFileSync(tf));
return {
token: tokenfile.token
};
} else {
return {};
}
}
// Create the language client and start the client.

View File

@ -4,6 +4,7 @@ import * as path from 'path';
import * as url from 'url';
let net = require('net'),
fs = require('fs'),
os = require('os'),
stdin = process.stdin,
stdout = process.stdout;
@ -16,7 +17,17 @@ socket.on('data', (chunk: any) => {
}).on('end', () => {
stdin.pause();
});
socket.connect(u.port, '127.0.0.1');
if (u.protocol == 'tcp:') {
socket.connect(u.port, '127.0.0.1');
} else if (u.protocol == 'local:' && os.platform() == 'win32') {
let pipePath = '\\\\.\\pipe\\' + u.hostname;
socket.connect(pipePath);
} else if (u.protocol == 'local:') {
socket.connect(u.path);
} else {
throw 'Unknown protocol ' + u.protocol;
}
stdin.resume();
stdin.on('data', (chunk: any) => {