2015-06-25 01:18:57 +02:00
|
|
|
package coursier
|
|
|
|
|
|
2016-03-04 00:41:07 +01:00
|
|
|
import java.math.BigInteger
|
2016-03-06 14:45:58 +01:00
|
|
|
import java.net.{ HttpURLConnection, URL, URLConnection, URLStreamHandler }
|
2015-07-06 02:48:26 +02:00
|
|
|
import java.nio.channels.{ OverlappingFileLockException, FileLock }
|
2015-12-30 01:34:48 +01:00
|
|
|
import java.nio.file.{ StandardCopyOption, Files => NioFiles }
|
2015-07-05 15:41:38 +02:00
|
|
|
import java.security.MessageDigest
|
2016-03-04 00:41:07 +01:00
|
|
|
import java.util.concurrent.{ ConcurrentHashMap, Executors, ExecutorService }
|
|
|
|
|
import java.util.regex.Pattern
|
2015-06-25 01:18:57 +02:00
|
|
|
|
2015-12-31 16:26:18 +01:00
|
|
|
import coursier.ivy.IvyRepository
|
|
|
|
|
|
2015-06-25 01:18:57 +02:00
|
|
|
import scala.annotation.tailrec
|
2016-02-24 20:17:23 +01:00
|
|
|
|
2015-07-05 15:41:38 +02:00
|
|
|
import scalaz._
|
2016-02-24 20:17:23 +01:00
|
|
|
import scalaz.Scalaz.ToEitherOps
|
2015-07-04 16:19:43 +02:00
|
|
|
import scalaz.concurrent.{ Task, Strategy }
|
2015-06-25 01:18:57 +02:00
|
|
|
|
2015-12-30 01:34:32 +01:00
|
|
|
import java.io.{ Serializable => _, _ }
|
2015-06-25 01:18:57 +02:00
|
|
|
|
2015-12-30 01:34:41 +01:00
|
|
|
object Cache {
|
2015-07-04 16:19:43 +02:00
|
|
|
|
2016-01-14 01:05:36 +01:00
|
|
|
// Check SHA-1 if available, else be fine with no checksum
|
|
|
|
|
val defaultChecksums = Seq(Some("SHA-1"), None)
|
|
|
|
|
|
2016-02-23 20:15:25 +01:00
|
|
|
private val unsafeChars: Set[Char] = " %$&+,:;=?@<>#".toSet
|
|
|
|
|
|
|
|
|
|
// Scala version of http://stackoverflow.com/questions/4571346/how-to-encode-url-to-avoid-special-characters-in-java/4605848#4605848
|
|
|
|
|
// '/' was removed from the unsafe character list
|
|
|
|
|
private def escape(input: String): String = {
|
|
|
|
|
|
|
|
|
|
def toHex(ch: Int) =
|
|
|
|
|
(if (ch < 10) '0' + ch else 'A' + ch - 10).toChar
|
|
|
|
|
|
|
|
|
|
def isUnsafe(ch: Char) =
|
|
|
|
|
ch > 128 || ch < 0 || unsafeChars(ch)
|
|
|
|
|
|
|
|
|
|
input.flatMap {
|
|
|
|
|
case ch if isUnsafe(ch) =>
|
|
|
|
|
"%" + toHex(ch / 16) + toHex(ch % 16)
|
|
|
|
|
case other =>
|
|
|
|
|
other.toString
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-06 14:45:57 +01:00
|
|
|
private def withLocal(artifact: Artifact, cache: File): Artifact = {
|
2015-07-05 15:41:38 +02:00
|
|
|
def local(url: String) =
|
2015-07-07 12:29:26 +02:00
|
|
|
if (url.startsWith("file:///"))
|
2015-07-05 15:41:38 +02:00
|
|
|
url.stripPrefix("file://")
|
2015-07-07 12:29:26 +02:00
|
|
|
else if (url.startsWith("file:/"))
|
|
|
|
|
url.stripPrefix("file:")
|
2016-03-06 14:45:57 +01:00
|
|
|
else
|
|
|
|
|
// FIXME Should we fully parse the URL here?
|
|
|
|
|
// FIXME Should some safeguards be added against '..' components in paths?
|
|
|
|
|
url.split(":", 2) match {
|
|
|
|
|
case Array(protocol, remaining) =>
|
|
|
|
|
val remaining0 =
|
|
|
|
|
if (remaining.startsWith("///"))
|
|
|
|
|
remaining.stripPrefix("///")
|
|
|
|
|
else if (remaining.startsWith("/"))
|
|
|
|
|
remaining.stripPrefix("/")
|
|
|
|
|
else
|
|
|
|
|
throw new Exception(s"URL $url doesn't contain an absolute path")
|
|
|
|
|
|
|
|
|
|
new File(cache, escape(protocol + "/" + remaining0)) .toString
|
|
|
|
|
|
|
|
|
|
case _ =>
|
|
|
|
|
throw new Exception(s"No protocol found in URL $url")
|
2015-07-05 15:41:38 +02:00
|
|
|
}
|
|
|
|
|
|
2015-11-23 22:55:30 +01:00
|
|
|
if (artifact.extra.contains("local"))
|
2015-07-05 15:41:38 +02:00
|
|
|
artifact
|
|
|
|
|
else
|
|
|
|
|
artifact.copy(extra = artifact.extra + ("local" ->
|
|
|
|
|
artifact.copy(
|
|
|
|
|
url = local(artifact.url),
|
|
|
|
|
checksumUrls = artifact.checksumUrls
|
|
|
|
|
.mapValues(local)
|
|
|
|
|
.toVector
|
|
|
|
|
.toMap,
|
|
|
|
|
extra = Map.empty
|
|
|
|
|
)
|
|
|
|
|
))
|
|
|
|
|
}
|
|
|
|
|
|
2015-12-30 01:34:48 +01:00
|
|
|
private def readFullyTo(
|
|
|
|
|
in: InputStream,
|
|
|
|
|
out: OutputStream,
|
|
|
|
|
logger: Option[Logger],
|
2015-12-30 01:34:49 +01:00
|
|
|
url: String,
|
|
|
|
|
alreadyDownloaded: Long
|
2015-12-30 01:34:48 +01:00
|
|
|
): Unit = {
|
|
|
|
|
|
|
|
|
|
val b = Array.fill[Byte](bufferSize)(0)
|
|
|
|
|
|
|
|
|
|
@tailrec
|
|
|
|
|
def helper(count: Long): Unit = {
|
|
|
|
|
val read = in.read(b)
|
|
|
|
|
if (read >= 0) {
|
|
|
|
|
out.write(b, 0, read)
|
|
|
|
|
out.flush()
|
|
|
|
|
logger.foreach(_.downloadProgress(url, count + read))
|
|
|
|
|
helper(count + read)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-12-30 01:34:49 +01:00
|
|
|
helper(alreadyDownloaded)
|
2015-12-30 01:34:48 +01:00
|
|
|
}
|
|
|
|
|
|
2016-03-13 22:57:29 +01:00
|
|
|
private val processStructureLocks = new ConcurrentHashMap[File, AnyRef]
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Should be acquired when doing operations changing the file structure of the cache (creating
|
|
|
|
|
* new directories, creating / acquiring locks, ...), so that these don't hinder each other.
|
|
|
|
|
*
|
|
|
|
|
* Should hopefully address some transient errors seen on the CI of ensime-server.
|
|
|
|
|
*/
|
|
|
|
|
private def withStructureLock[T](cache: File)(f: => T): T = {
|
|
|
|
|
|
|
|
|
|
val intraProcessLock = Option(processStructureLocks.get(cache)).getOrElse {
|
|
|
|
|
val lock = new AnyRef
|
|
|
|
|
val prev = Option(processStructureLocks.putIfAbsent(cache, lock))
|
|
|
|
|
prev.getOrElse(lock)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
intraProcessLock.synchronized {
|
|
|
|
|
val lockFile = new File(cache, ".structure.lock")
|
|
|
|
|
lockFile.getParentFile.mkdirs()
|
|
|
|
|
var out = new FileOutputStream(lockFile)
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
var lock: FileLock = null
|
|
|
|
|
try {
|
|
|
|
|
lock = out.getChannel.lock()
|
|
|
|
|
|
|
|
|
|
try f
|
|
|
|
|
finally {
|
|
|
|
|
lock.release()
|
|
|
|
|
lock = null
|
|
|
|
|
out.close()
|
|
|
|
|
out = null
|
|
|
|
|
lockFile.delete()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
finally if (lock != null) lock.release()
|
|
|
|
|
} finally if (out != null) out.close()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private def withLockFor[T](cache: File, file: File)(f: => FileError \/ T): FileError \/ T = {
|
2015-12-30 01:34:48 +01:00
|
|
|
val lockFile = new File(file.getParentFile, s"${file.getName}.lock")
|
|
|
|
|
|
2016-03-13 22:57:29 +01:00
|
|
|
var out: FileOutputStream = null
|
|
|
|
|
|
|
|
|
|
withStructureLock(cache) {
|
|
|
|
|
lockFile.getParentFile.mkdirs()
|
|
|
|
|
out = new FileOutputStream(lockFile)
|
|
|
|
|
}
|
2015-12-30 01:34:48 +01:00
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
var lock: FileLock = null
|
|
|
|
|
try {
|
|
|
|
|
lock = out.getChannel.tryLock()
|
|
|
|
|
if (lock == null)
|
|
|
|
|
-\/(FileError.Locked(file))
|
|
|
|
|
else
|
|
|
|
|
try f
|
|
|
|
|
finally {
|
|
|
|
|
lock.release()
|
|
|
|
|
lock = null
|
|
|
|
|
out.close()
|
|
|
|
|
out = null
|
|
|
|
|
lockFile.delete()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
catch {
|
|
|
|
|
case e: OverlappingFileLockException =>
|
|
|
|
|
-\/(FileError.Locked(file))
|
|
|
|
|
}
|
|
|
|
|
finally if (lock != null) lock.release()
|
|
|
|
|
} finally if (out != null) out.close()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private def downloading[T](
|
|
|
|
|
url: String,
|
|
|
|
|
file: File,
|
|
|
|
|
logger: Option[Logger]
|
|
|
|
|
)(
|
|
|
|
|
f: => FileError \/ T
|
|
|
|
|
): FileError \/ T =
|
|
|
|
|
try {
|
|
|
|
|
val o = new Object
|
|
|
|
|
val prev = urlLocks.putIfAbsent(url, o)
|
|
|
|
|
if (prev == null) {
|
|
|
|
|
logger.foreach(_.downloadingArtifact(url, file))
|
|
|
|
|
|
|
|
|
|
val res =
|
2016-01-23 15:42:09 +01:00
|
|
|
try \/-(f)
|
|
|
|
|
catch {
|
|
|
|
|
case nfe: FileNotFoundException if nfe.getMessage != null =>
|
|
|
|
|
logger.foreach(_.downloadedArtifact(url, success = false))
|
|
|
|
|
-\/(-\/(FileError.NotFound(nfe.getMessage)))
|
|
|
|
|
case e: Exception =>
|
|
|
|
|
logger.foreach(_.downloadedArtifact(url, success = false))
|
|
|
|
|
throw e
|
2015-12-30 01:34:48 +01:00
|
|
|
}
|
|
|
|
|
finally {
|
|
|
|
|
urlLocks.remove(url)
|
|
|
|
|
}
|
|
|
|
|
|
2016-01-23 15:42:09 +01:00
|
|
|
for (res0 <- res)
|
|
|
|
|
logger.foreach(_.downloadedArtifact(url, success = res0.isRight))
|
2015-12-30 01:34:48 +01:00
|
|
|
|
2016-01-23 15:42:09 +01:00
|
|
|
res.merge
|
2015-12-30 01:34:48 +01:00
|
|
|
} else
|
|
|
|
|
-\/(FileError.ConcurrentDownload(url))
|
|
|
|
|
}
|
|
|
|
|
catch { case e: Exception =>
|
2015-12-30 01:34:49 +01:00
|
|
|
-\/(FileError.DownloadError(s"Caught $e (${e.getMessage})"))
|
2015-12-30 01:34:48 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private def temporaryFile(file: File): File = {
|
|
|
|
|
val dir = file.getParentFile
|
|
|
|
|
val name = file.getName
|
|
|
|
|
new File(dir, s"$name.part")
|
|
|
|
|
}
|
|
|
|
|
|
2015-12-30 01:34:49 +01:00
|
|
|
private val partialContentResponseCode = 206
|
|
|
|
|
|
2016-03-06 14:45:58 +01:00
|
|
|
private val handlerClsCache = new ConcurrentHashMap[String, Option[URLStreamHandler]]
|
|
|
|
|
|
|
|
|
|
private def handlerFor(url: String): Option[URLStreamHandler] = {
|
|
|
|
|
val protocol = url.takeWhile(_ != ':')
|
|
|
|
|
|
|
|
|
|
Option(handlerClsCache.get(protocol)) match {
|
|
|
|
|
case None =>
|
|
|
|
|
val clsName = s"coursier.cache.protocol.${protocol.capitalize}Handler"
|
|
|
|
|
val clsOpt =
|
|
|
|
|
try Some(Thread.currentThread().getContextClassLoader.loadClass(clsName))
|
|
|
|
|
catch {
|
|
|
|
|
case _: ClassNotFoundException =>
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
def printError(e: Exception): Unit =
|
|
|
|
|
scala.Console.err.println(
|
|
|
|
|
s"Cannot instantiate $clsName: $e${Option(e.getMessage).map(" ("+_+")")}"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
val handlerOpt = clsOpt.flatMap {
|
|
|
|
|
cls =>
|
|
|
|
|
try Some(cls.newInstance().asInstanceOf[URLStreamHandler])
|
|
|
|
|
catch {
|
|
|
|
|
case e: InstantiationException =>
|
|
|
|
|
printError(e)
|
|
|
|
|
None
|
|
|
|
|
case e: IllegalAccessException =>
|
|
|
|
|
printError(e)
|
|
|
|
|
None
|
|
|
|
|
case e: ClassCastException =>
|
|
|
|
|
printError(e)
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
val prevOpt = Option(handlerClsCache.putIfAbsent(protocol, handlerOpt))
|
|
|
|
|
prevOpt.getOrElse(handlerOpt)
|
|
|
|
|
|
|
|
|
|
case Some(handlerOpt) =>
|
|
|
|
|
handlerOpt
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Returns a `java.net.URL` for `s`, possibly using the custom protocol handlers found under the
|
|
|
|
|
* `coursier.cache.protocol` namespace.
|
|
|
|
|
*
|
|
|
|
|
* E.g. URL `"test://abc.com/foo"`, having protocol `"test"`, can be handled by a
|
|
|
|
|
* `URLStreamHandler` named `coursier.cache.protocol.TestHandler` (protocol name gets
|
|
|
|
|
* capitalized, and suffixed with `Handler` to get the class name).
|
|
|
|
|
*
|
|
|
|
|
* @param s
|
|
|
|
|
* @return
|
|
|
|
|
*/
|
|
|
|
|
def url(s: String): URL =
|
|
|
|
|
new URL(null, s, handlerFor(s).orNull)
|
|
|
|
|
|
2015-12-30 01:34:43 +01:00
|
|
|
private def download(
|
2015-07-04 16:19:36 +02:00
|
|
|
artifact: Artifact,
|
2016-03-06 14:45:57 +01:00
|
|
|
cache: File,
|
2015-12-30 01:34:32 +01:00
|
|
|
checksums: Set[String],
|
2015-07-05 15:41:38 +02:00
|
|
|
cachePolicy: CachePolicy,
|
2015-12-30 01:34:41 +01:00
|
|
|
pool: ExecutorService,
|
2015-12-30 01:34:41 +01:00
|
|
|
logger: Option[Logger] = None
|
2015-07-05 15:41:38 +02:00
|
|
|
): Task[Seq[((File, String), FileError \/ Unit)]] = {
|
2015-12-30 01:34:41 +01:00
|
|
|
|
|
|
|
|
implicit val pool0 = pool
|
|
|
|
|
|
|
|
|
|
val artifact0 = withLocal(artifact, cache)
|
2015-07-05 15:41:38 +02:00
|
|
|
.extra
|
|
|
|
|
.getOrElse("local", artifact)
|
|
|
|
|
|
2016-02-24 20:17:23 +01:00
|
|
|
// Reference file - if it exists, and we get not found errors on some URLs, we assume
|
|
|
|
|
// we can keep track of these missing, and not try to get them again later.
|
|
|
|
|
val referenceFileOpt = {
|
|
|
|
|
val referenceOpt = artifact.extra.get("metadata").map(withLocal(_, cache))
|
|
|
|
|
val referenceOpt0 = referenceOpt.map(a => a.extra.getOrElse("local", a))
|
|
|
|
|
|
|
|
|
|
referenceOpt0.map(a => new File(a.url))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
def referenceFileExists: Boolean = referenceFileOpt.exists(_.exists())
|
|
|
|
|
|
2015-12-30 01:34:33 +01:00
|
|
|
val pairs =
|
|
|
|
|
Seq(artifact0.url -> artifact.url) ++ {
|
|
|
|
|
checksums
|
|
|
|
|
.intersect(artifact0.checksumUrls.keySet)
|
|
|
|
|
.intersect(artifact.checksumUrls.keySet)
|
|
|
|
|
.toSeq
|
|
|
|
|
.map(sumType => artifact0.checksumUrls(sumType) -> artifact.checksumUrls(sumType))
|
|
|
|
|
}
|
2015-07-05 15:41:38 +02:00
|
|
|
|
2016-03-06 14:45:58 +01:00
|
|
|
def urlConn(url0: String) = {
|
|
|
|
|
val conn = url(url0).openConnection() // FIXME Should this be closed?
|
2015-12-30 01:34:33 +01:00
|
|
|
// Dummy user-agent instead of the default "Java/...",
|
|
|
|
|
// so that we are not returned incomplete/erroneous metadata
|
|
|
|
|
// (Maven 2 compatibility? - happens for snapshot versioning metadata,
|
2015-12-30 01:34:43 +01:00
|
|
|
// this is SO FSCKING CRAZY)
|
2015-12-30 01:34:33 +01:00
|
|
|
conn.setRequestProperty("User-Agent", "")
|
|
|
|
|
conn
|
|
|
|
|
}
|
2015-07-05 15:41:38 +02:00
|
|
|
|
2015-12-30 01:34:32 +01:00
|
|
|
|
2015-12-30 01:34:33 +01:00
|
|
|
def fileLastModified(file: File): EitherT[Task, FileError, Option[Long]] =
|
|
|
|
|
EitherT {
|
|
|
|
|
Task {
|
|
|
|
|
\/- {
|
|
|
|
|
val lastModified = file.lastModified()
|
|
|
|
|
if (lastModified > 0L)
|
|
|
|
|
Some(lastModified)
|
|
|
|
|
else
|
|
|
|
|
None
|
|
|
|
|
} : FileError \/ Option[Long]
|
|
|
|
|
}
|
|
|
|
|
}
|
2015-07-05 15:41:38 +02:00
|
|
|
|
2016-03-06 14:55:39 +01:00
|
|
|
def urlLastModified(
|
|
|
|
|
url: String,
|
|
|
|
|
currentLastModifiedOpt: Option[Long], // for the logger
|
|
|
|
|
logger: Option[Logger]
|
|
|
|
|
): EitherT[Task, FileError, Option[Long]] =
|
2015-12-30 01:34:33 +01:00
|
|
|
EitherT {
|
|
|
|
|
Task {
|
|
|
|
|
urlConn(url) match {
|
|
|
|
|
case c: HttpURLConnection =>
|
2016-03-06 14:55:39 +01:00
|
|
|
logger.foreach(_.checkingUpdates(url, currentLastModifiedOpt))
|
2015-12-30 01:34:33 +01:00
|
|
|
|
2016-03-06 14:55:39 +01:00
|
|
|
var success = false
|
|
|
|
|
try {
|
|
|
|
|
c.setRequestMethod("HEAD")
|
|
|
|
|
val remoteLastModified = c.getLastModified
|
|
|
|
|
|
|
|
|
|
// TODO 404 Not found could be checked here
|
|
|
|
|
|
|
|
|
|
val res =
|
|
|
|
|
if (remoteLastModified > 0L)
|
|
|
|
|
Some(remoteLastModified)
|
|
|
|
|
else
|
|
|
|
|
None
|
|
|
|
|
|
|
|
|
|
success = true
|
|
|
|
|
logger.foreach(_.checkingUpdatesResult(url, currentLastModifiedOpt, res))
|
|
|
|
|
|
|
|
|
|
res.right
|
|
|
|
|
} finally {
|
|
|
|
|
if (!success)
|
|
|
|
|
logger.foreach(_.checkingUpdatesResult(url, currentLastModifiedOpt, None))
|
2015-12-30 01:34:33 +01:00
|
|
|
}
|
2015-11-29 20:21:45 +01:00
|
|
|
|
2015-12-30 01:34:33 +01:00
|
|
|
case other =>
|
|
|
|
|
-\/(FileError.DownloadError(s"Cannot do HEAD request with connection $other ($url)"))
|
|
|
|
|
}
|
2015-07-05 15:41:38 +02:00
|
|
|
}
|
2015-12-30 01:34:32 +01:00
|
|
|
}
|
|
|
|
|
|
2016-03-06 14:55:39 +01:00
|
|
|
def fileExists(file: File): Task[Boolean] =
|
|
|
|
|
Task {
|
|
|
|
|
file.exists()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
def shouldDownload(file: File, url: String): EitherT[Task, FileError, Boolean] = {
|
|
|
|
|
def check = for {
|
2015-12-30 01:34:33 +01:00
|
|
|
fileLastModOpt <- fileLastModified(file)
|
2016-03-06 14:55:39 +01:00
|
|
|
urlLastModOpt <- urlLastModified(url, fileLastModOpt, logger)
|
2015-12-30 01:34:33 +01:00
|
|
|
} yield {
|
|
|
|
|
val fromDatesOpt = for {
|
|
|
|
|
fileLastMod <- fileLastModOpt
|
|
|
|
|
urlLastMod <- urlLastModOpt
|
|
|
|
|
} yield fileLastMod < urlLastMod
|
|
|
|
|
|
|
|
|
|
fromDatesOpt.getOrElse(true)
|
|
|
|
|
}
|
2015-12-30 01:34:32 +01:00
|
|
|
|
2016-03-06 14:55:39 +01:00
|
|
|
EitherT {
|
|
|
|
|
fileExists(file).flatMap {
|
|
|
|
|
case false =>
|
|
|
|
|
Task.now(true.right)
|
|
|
|
|
case true =>
|
|
|
|
|
check.run
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-24 20:17:23 +01:00
|
|
|
def is404(conn: URLConnection) =
|
|
|
|
|
conn match {
|
|
|
|
|
case conn0: HttpURLConnection =>
|
|
|
|
|
conn0.getResponseCode == 404
|
|
|
|
|
case _ =>
|
|
|
|
|
false
|
|
|
|
|
}
|
|
|
|
|
|
2015-12-30 01:34:33 +01:00
|
|
|
def remote(file: File, url: String): EitherT[Task, FileError, Unit] =
|
2015-12-30 01:34:32 +01:00
|
|
|
EitherT {
|
|
|
|
|
Task {
|
2016-03-13 22:57:29 +01:00
|
|
|
withLockFor(cache, file) {
|
2015-12-30 01:34:48 +01:00
|
|
|
downloading(url, file, logger) {
|
2015-12-30 01:34:49 +01:00
|
|
|
val tmp = temporaryFile(file)
|
|
|
|
|
|
|
|
|
|
val alreadyDownloaded = tmp.length()
|
|
|
|
|
|
|
|
|
|
val conn0 = urlConn(url)
|
|
|
|
|
|
2016-03-06 15:00:16 +01:00
|
|
|
val (partialDownload, conn) = conn0 match {
|
|
|
|
|
case conn0: HttpURLConnection if alreadyDownloaded > 0L =>
|
|
|
|
|
conn0.setRequestProperty("Range", s"bytes=$alreadyDownloaded-")
|
|
|
|
|
|
|
|
|
|
if (conn0.getResponseCode == partialContentResponseCode) {
|
|
|
|
|
val ackRange = Option(conn0.getHeaderField("Content-Range")).getOrElse("")
|
|
|
|
|
|
|
|
|
|
if (ackRange.startsWith(s"bytes $alreadyDownloaded-"))
|
|
|
|
|
(true, conn0)
|
|
|
|
|
else
|
|
|
|
|
// unrecognized Content-Range header -> start a new connection with no resume
|
|
|
|
|
(false, urlConn(url))
|
|
|
|
|
} else
|
|
|
|
|
(false, conn0)
|
|
|
|
|
|
|
|
|
|
case _ => (false, conn0)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (is404(conn))
|
2016-02-24 20:17:23 +01:00
|
|
|
FileError.NotFound(url, permanent = Some(true)).left
|
|
|
|
|
else {
|
|
|
|
|
for (len0 <- Option(conn.getContentLengthLong) if len0 >= 0L) {
|
|
|
|
|
val len = len0 + (if (partialDownload) alreadyDownloaded else 0L)
|
2016-03-06 15:00:15 +01:00
|
|
|
logger.foreach(_.downloadLength(url, len, alreadyDownloaded))
|
2016-02-24 20:17:23 +01:00
|
|
|
}
|
2015-12-30 01:34:49 +01:00
|
|
|
|
2016-02-24 20:17:23 +01:00
|
|
|
val in = new BufferedInputStream(conn.getInputStream, bufferSize)
|
2015-12-30 01:34:48 +01:00
|
|
|
|
2016-02-24 20:17:23 +01:00
|
|
|
val result =
|
|
|
|
|
try {
|
2016-03-13 22:57:29 +01:00
|
|
|
val out = withStructureLock(cache) {
|
|
|
|
|
tmp.getParentFile.mkdirs()
|
|
|
|
|
new FileOutputStream(tmp, partialDownload)
|
|
|
|
|
}
|
2016-02-24 20:17:23 +01:00
|
|
|
try \/-(readFullyTo(in, out, logger, url, if (partialDownload) alreadyDownloaded else 0L))
|
|
|
|
|
finally out.close()
|
|
|
|
|
} finally in.close()
|
2015-12-30 01:34:49 +01:00
|
|
|
|
2016-03-13 22:57:29 +01:00
|
|
|
withStructureLock(cache) {
|
|
|
|
|
file.getParentFile.mkdirs()
|
|
|
|
|
NioFiles.move(tmp.toPath, file.toPath, StandardCopyOption.ATOMIC_MOVE)
|
|
|
|
|
}
|
2015-12-30 01:34:49 +01:00
|
|
|
|
2016-02-24 20:17:23 +01:00
|
|
|
for (lastModified <- Option(conn.getLastModified) if lastModified > 0L)
|
|
|
|
|
file.setLastModified(lastModified)
|
2015-12-30 01:34:48 +01:00
|
|
|
|
2016-02-24 20:17:23 +01:00
|
|
|
result
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2015-12-30 01:34:48 +01:00
|
|
|
|
2016-02-24 20:17:23 +01:00
|
|
|
def remoteKeepErrors(file: File, url: String): EitherT[Task, FileError, Unit] = {
|
2015-12-30 01:34:48 +01:00
|
|
|
|
2016-02-24 20:17:23 +01:00
|
|
|
val errFile = new File(file.getParentFile, "." + file.getName + ".error")
|
2015-12-30 01:34:48 +01:00
|
|
|
|
2016-02-24 20:17:23 +01:00
|
|
|
def validErrFileExists =
|
|
|
|
|
EitherT {
|
|
|
|
|
Task {
|
|
|
|
|
(referenceFileExists && errFile.exists()).right[FileError]
|
|
|
|
|
}
|
|
|
|
|
}
|
2015-12-30 01:34:48 +01:00
|
|
|
|
2016-02-24 20:17:23 +01:00
|
|
|
def createErrFile =
|
|
|
|
|
EitherT {
|
|
|
|
|
Task {
|
|
|
|
|
if (referenceFileExists) {
|
|
|
|
|
if (!errFile.exists())
|
|
|
|
|
NioFiles.write(errFile.toPath, "".getBytes("UTF-8"))
|
2015-12-30 01:34:48 +01:00
|
|
|
}
|
2016-02-24 20:17:23 +01:00
|
|
|
|
|
|
|
|
().right[FileError]
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
def deleteErrFile =
|
|
|
|
|
EitherT {
|
|
|
|
|
Task {
|
|
|
|
|
if (errFile.exists())
|
|
|
|
|
errFile.delete()
|
|
|
|
|
|
|
|
|
|
().right[FileError]
|
2015-12-30 01:34:32 +01:00
|
|
|
}
|
2015-07-05 15:41:38 +02:00
|
|
|
}
|
2016-02-24 20:17:23 +01:00
|
|
|
|
|
|
|
|
def retainError =
|
|
|
|
|
EitherT {
|
|
|
|
|
remote(file, url).run.flatMap {
|
|
|
|
|
case err @ -\/(FileError.NotFound(_, Some(true))) =>
|
|
|
|
|
createErrFile.run.map(_ => err)
|
|
|
|
|
case other =>
|
|
|
|
|
deleteErrFile.run.map(_ => other)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
cachePolicy match {
|
|
|
|
|
case CachePolicy.FetchMissing | CachePolicy.LocalOnly =>
|
|
|
|
|
validErrFileExists.flatMap { exists =>
|
|
|
|
|
if (exists)
|
|
|
|
|
EitherT(Task.now(FileError.NotFound(url, Some(true)).left[Unit]))
|
|
|
|
|
else
|
|
|
|
|
retainError
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case CachePolicy.ForceDownload | CachePolicy.Update | CachePolicy.UpdateChanging =>
|
|
|
|
|
retainError
|
2015-07-05 15:41:38 +02:00
|
|
|
}
|
2016-02-24 20:17:23 +01:00
|
|
|
}
|
2015-07-05 15:41:38 +02:00
|
|
|
|
2015-12-30 01:34:33 +01:00
|
|
|
def checkFileExists(file: File, url: String): EitherT[Task, FileError, Unit] =
|
|
|
|
|
EitherT {
|
|
|
|
|
Task {
|
|
|
|
|
if (file.exists()) {
|
|
|
|
|
logger.foreach(_.foundLocally(url, file))
|
|
|
|
|
\/-(())
|
|
|
|
|
} else
|
|
|
|
|
-\/(FileError.NotFound(file.toString))
|
|
|
|
|
}
|
|
|
|
|
}
|
2015-07-05 15:41:38 +02:00
|
|
|
|
|
|
|
|
val tasks =
|
2015-11-29 20:20:31 +01:00
|
|
|
for ((f, url) <- pairs) yield {
|
|
|
|
|
val file = new File(f)
|
|
|
|
|
|
2015-12-30 01:34:47 +01:00
|
|
|
val res =
|
|
|
|
|
if (url.startsWith("file:/")) {
|
2016-01-11 21:20:55 +01:00
|
|
|
// for debug purposes, flaky with URL-encoded chars anyway
|
|
|
|
|
// def filtered(s: String) =
|
|
|
|
|
// s.stripPrefix("file:/").stripPrefix("//").stripSuffix("/")
|
|
|
|
|
// assert(
|
|
|
|
|
// filtered(url) == filtered(file.toURI.toString),
|
|
|
|
|
// s"URL: ${filtered(url)}, file: ${filtered(file.toURI.toString)}"
|
|
|
|
|
// )
|
2015-12-30 01:34:33 +01:00
|
|
|
checkFileExists(file, url)
|
2016-03-06 14:55:39 +01:00
|
|
|
} else {
|
|
|
|
|
def update = shouldDownload(file, url).flatMap {
|
|
|
|
|
case true =>
|
|
|
|
|
remoteKeepErrors(file, url)
|
|
|
|
|
case false =>
|
|
|
|
|
EitherT(Task.now[FileError \/ Unit](().right))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
val cachePolicy0 = cachePolicy match {
|
|
|
|
|
case CachePolicy.UpdateChanging if !artifact.changing =>
|
|
|
|
|
CachePolicy.FetchMissing
|
|
|
|
|
case other =>
|
|
|
|
|
other
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
cachePolicy0 match {
|
2015-12-30 01:34:47 +01:00
|
|
|
case CachePolicy.LocalOnly =>
|
|
|
|
|
checkFileExists(file, url)
|
|
|
|
|
case CachePolicy.UpdateChanging | CachePolicy.Update =>
|
2016-03-06 14:55:39 +01:00
|
|
|
update
|
2015-12-30 01:34:47 +01:00
|
|
|
case CachePolicy.FetchMissing =>
|
2016-02-24 20:17:23 +01:00
|
|
|
checkFileExists(file, url) orElse remoteKeepErrors(file, url)
|
2015-12-30 01:34:47 +01:00
|
|
|
case CachePolicy.ForceDownload =>
|
2016-02-24 20:17:23 +01:00
|
|
|
remoteKeepErrors(file, url)
|
2015-11-29 20:20:31 +01:00
|
|
|
}
|
2016-03-06 14:55:39 +01:00
|
|
|
}
|
|
|
|
|
|
2015-12-30 01:34:33 +01:00
|
|
|
|
|
|
|
|
res.run.map((file, url) -> _)
|
2015-11-29 20:20:31 +01:00
|
|
|
}
|
2015-07-05 15:41:38 +02:00
|
|
|
|
|
|
|
|
Nondeterminism[Task].gather(tasks)
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-06 14:56:12 +01:00
|
|
|
def parseChecksum(content: String): Option[BigInteger] = {
|
|
|
|
|
val lines = content
|
|
|
|
|
.linesIterator
|
|
|
|
|
.toVector
|
|
|
|
|
|
|
|
|
|
parseChecksumLine(lines) orElse parseChecksumAlternative(lines)
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-04 00:41:07 +01:00
|
|
|
// matches md5 or sha1
|
|
|
|
|
private val checksumPattern = Pattern.compile("^[0-9a-f]{32}([0-9a-f]{8})?")
|
|
|
|
|
|
2016-03-06 14:56:12 +01:00
|
|
|
private def findChecksum(elems: Seq[String]): Option[BigInteger] =
|
|
|
|
|
elems.collectFirst {
|
|
|
|
|
case rawSum if checksumPattern.matcher(rawSum).matches() =>
|
|
|
|
|
new BigInteger(rawSum, 16)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private def parseChecksumLine(lines: Seq[String]): Option[BigInteger] =
|
|
|
|
|
findChecksum(lines.map(_.toLowerCase.replaceAll("\\s", "")))
|
|
|
|
|
|
|
|
|
|
private def parseChecksumAlternative(lines: Seq[String]): Option[BigInteger] =
|
|
|
|
|
findChecksum(lines.flatMap(_.toLowerCase.split("\\s+")))
|
2016-02-24 10:33:52 +01:00
|
|
|
|
2015-07-05 15:41:38 +02:00
|
|
|
def validateChecksum(
|
|
|
|
|
artifact: Artifact,
|
2015-12-30 01:34:41 +01:00
|
|
|
sumType: String,
|
2016-03-06 14:45:57 +01:00
|
|
|
cache: File,
|
2015-12-30 01:34:41 +01:00
|
|
|
pool: ExecutorService
|
2015-12-30 01:34:32 +01:00
|
|
|
): EitherT[Task, FileError, Unit] = {
|
2015-12-30 01:34:41 +01:00
|
|
|
|
|
|
|
|
implicit val pool0 = pool
|
|
|
|
|
|
|
|
|
|
val artifact0 = withLocal(artifact, cache)
|
2015-07-05 15:41:38 +02:00
|
|
|
.extra
|
|
|
|
|
.getOrElse("local", artifact)
|
|
|
|
|
|
2015-12-30 01:34:32 +01:00
|
|
|
EitherT {
|
|
|
|
|
artifact0.checksumUrls.get(sumType) match {
|
|
|
|
|
case Some(sumFile) =>
|
|
|
|
|
Task {
|
2016-03-04 00:41:07 +01:00
|
|
|
val sumOpt = parseChecksum(
|
|
|
|
|
new String(NioFiles.readAllBytes(new File(sumFile).toPath), "UTF-8")
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
sumOpt match {
|
|
|
|
|
case None =>
|
|
|
|
|
FileError.ChecksumFormatError(sumType, sumFile).left
|
|
|
|
|
|
|
|
|
|
case Some(sum) =>
|
|
|
|
|
val md = MessageDigest.getInstance(sumType)
|
2016-03-04 00:48:48 +01:00
|
|
|
|
|
|
|
|
val f = new File(artifact0.url)
|
2016-03-04 00:41:07 +01:00
|
|
|
val is = new FileInputStream(f)
|
2016-03-04 00:48:48 +01:00
|
|
|
try withContent(is, md.update(_, 0, _))
|
|
|
|
|
finally is.close()
|
|
|
|
|
|
|
|
|
|
val digest = md.digest()
|
|
|
|
|
val calculatedSum = new BigInteger(1, digest)
|
|
|
|
|
|
|
|
|
|
if (sum == calculatedSum)
|
|
|
|
|
().right
|
|
|
|
|
else
|
|
|
|
|
FileError.WrongChecksum(
|
|
|
|
|
sumType,
|
|
|
|
|
calculatedSum.toString(16),
|
|
|
|
|
sum.toString(16),
|
|
|
|
|
artifact0.url,
|
|
|
|
|
sumFile
|
|
|
|
|
).left
|
2015-12-30 01:34:32 +01:00
|
|
|
}
|
|
|
|
|
}
|
2015-07-05 15:41:38 +02:00
|
|
|
|
2015-12-30 01:34:32 +01:00
|
|
|
case None =>
|
2016-03-04 00:41:07 +01:00
|
|
|
Task.now(FileError.ChecksumNotFound(sumType, artifact0.url).left)
|
2015-12-30 01:34:32 +01:00
|
|
|
}
|
2015-06-25 01:18:57 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-07-05 15:41:38 +02:00
|
|
|
def file(
|
|
|
|
|
artifact: Artifact,
|
2016-03-06 14:45:57 +01:00
|
|
|
cache: File = default,
|
2016-01-03 16:38:29 +01:00
|
|
|
cachePolicy: CachePolicy = CachePolicy.FetchMissing,
|
2016-01-14 01:05:36 +01:00
|
|
|
checksums: Seq[Option[String]] = defaultChecksums,
|
2015-12-30 01:34:41 +01:00
|
|
|
logger: Option[Logger] = None,
|
|
|
|
|
pool: ExecutorService = defaultPool
|
2015-12-30 01:34:32 +01:00
|
|
|
): EitherT[Task, FileError, File] = {
|
2015-12-30 01:34:41 +01:00
|
|
|
|
|
|
|
|
implicit val pool0 = pool
|
|
|
|
|
|
2015-12-30 01:34:32 +01:00
|
|
|
val checksums0 = if (checksums.isEmpty) Seq(None) else checksums
|
|
|
|
|
|
|
|
|
|
val res = EitherT {
|
|
|
|
|
download(
|
|
|
|
|
artifact,
|
2015-12-30 01:34:41 +01:00
|
|
|
cache,
|
2015-12-30 01:34:32 +01:00
|
|
|
checksums = checksums0.collect { case Some(c) => c }.toSet,
|
2015-12-30 01:34:41 +01:00
|
|
|
cachePolicy,
|
|
|
|
|
pool,
|
2015-12-30 01:34:32 +01:00
|
|
|
logger = logger
|
|
|
|
|
).map { results =>
|
|
|
|
|
val checksum = checksums0.find {
|
|
|
|
|
case None => true
|
|
|
|
|
case Some(c) =>
|
|
|
|
|
artifact.checksumUrls.get(c).exists { cUrl =>
|
|
|
|
|
results.exists { case ((_, u), b) =>
|
|
|
|
|
u == cUrl && b.isRight
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2015-07-05 15:41:38 +02:00
|
|
|
|
2015-12-30 01:34:32 +01:00
|
|
|
val ((f, _), res) = results.head
|
|
|
|
|
res.flatMap { _ =>
|
|
|
|
|
checksum match {
|
|
|
|
|
case None =>
|
|
|
|
|
// FIXME All the checksums should be in the error, possibly with their URLs
|
|
|
|
|
// from artifact.checksumUrls
|
|
|
|
|
-\/(FileError.ChecksumNotFound(checksums0.last.get, ""))
|
|
|
|
|
case Some(c) => \/-((f, c))
|
|
|
|
|
}
|
2015-11-29 20:20:31 +01:00
|
|
|
}
|
2015-07-05 15:41:38 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-12-30 01:34:32 +01:00
|
|
|
res.flatMap {
|
|
|
|
|
case (f, None) => EitherT(Task.now[FileError \/ File](\/-(f)))
|
|
|
|
|
case (f, Some(c)) =>
|
2015-12-30 01:34:41 +01:00
|
|
|
validateChecksum(artifact, c, cache, pool).map(_ => f)
|
2015-12-30 01:34:32 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-29 20:21:45 +01:00
|
|
|
def fetch(
|
2016-03-06 14:45:57 +01:00
|
|
|
cache: File = default,
|
2016-01-03 16:38:29 +01:00
|
|
|
cachePolicy: CachePolicy = CachePolicy.FetchMissing,
|
2016-01-14 01:05:36 +01:00
|
|
|
checksums: Seq[Option[String]] = defaultChecksums,
|
2015-12-30 01:34:41 +01:00
|
|
|
logger: Option[Logger] = None,
|
|
|
|
|
pool: ExecutorService = defaultPool
|
2015-12-30 01:34:32 +01:00
|
|
|
): Fetch.Content[Task] = {
|
2015-11-29 20:20:31 +01:00
|
|
|
artifact =>
|
2015-12-30 01:34:41 +01:00
|
|
|
file(
|
|
|
|
|
artifact,
|
|
|
|
|
cache,
|
|
|
|
|
cachePolicy,
|
|
|
|
|
checksums = checksums,
|
|
|
|
|
logger = logger,
|
|
|
|
|
pool = pool
|
|
|
|
|
).leftMap(_.message).map { f =>
|
2015-11-29 20:20:31 +01:00
|
|
|
// FIXME Catch error here?
|
2016-01-31 21:06:05 +01:00
|
|
|
new String(NioFiles.readAllBytes(f.toPath), "UTF-8")
|
2015-11-29 20:20:31 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-01-14 00:47:53 +01:00
|
|
|
private lazy val ivy2HomeUri = {
|
|
|
|
|
// a bit touchy on Windows... - don't try to manually write down the URI with s"file://..."
|
|
|
|
|
val str = new File(sys.props("user.home") + "/.ivy2/").toURI.toString
|
|
|
|
|
if (str.endsWith("/"))
|
|
|
|
|
str
|
|
|
|
|
else
|
|
|
|
|
str + "/"
|
|
|
|
|
}
|
|
|
|
|
|
2015-12-31 16:26:18 +01:00
|
|
|
lazy val ivy2Local = IvyRepository(
|
2016-01-14 00:47:53 +01:00
|
|
|
ivy2HomeUri + "local/" +
|
2015-12-31 16:26:18 +01:00
|
|
|
"[organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/" +
|
2016-02-03 23:21:32 +01:00
|
|
|
"[artifact](-[classifier]).[ext]",
|
|
|
|
|
dropInfoAttributes = true
|
2015-11-29 20:20:31 +01:00
|
|
|
)
|
|
|
|
|
|
2016-01-14 00:47:53 +01:00
|
|
|
lazy val ivy2Cache = IvyRepository(
|
|
|
|
|
ivy2HomeUri + "cache/" +
|
|
|
|
|
"(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[organisation]/[module]/[type]s/[artifact]-[revision](-[classifier]).[ext]",
|
|
|
|
|
metadataPatternOpt = Some(
|
|
|
|
|
ivy2HomeUri + "cache/" +
|
|
|
|
|
"(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[organisation]/[module]/[type]-[revision](-[classifier]).[ext]"
|
|
|
|
|
),
|
|
|
|
|
withChecksums = false,
|
|
|
|
|
withSignatures = false,
|
|
|
|
|
dropInfoAttributes = true
|
|
|
|
|
)
|
|
|
|
|
|
2016-03-06 14:45:57 +01:00
|
|
|
lazy val default = new File(
|
2016-01-03 16:38:28 +01:00
|
|
|
sys.env.getOrElse(
|
|
|
|
|
"COURSIER_CACHE",
|
|
|
|
|
sys.props("user.home") + "/.coursier/cache/v1"
|
|
|
|
|
)
|
2016-01-14 00:02:10 +01:00
|
|
|
).getAbsoluteFile
|
2016-01-03 16:38:28 +01:00
|
|
|
|
2015-07-04 16:19:43 +02:00
|
|
|
val defaultConcurrentDownloadCount = 6
|
2015-06-25 01:18:57 +02:00
|
|
|
|
2015-12-30 01:34:41 +01:00
|
|
|
lazy val defaultPool =
|
|
|
|
|
Executors.newFixedThreadPool(defaultConcurrentDownloadCount, Strategy.DefaultDaemonThreadFactory)
|
|
|
|
|
|
|
|
|
|
|
2015-12-30 01:34:32 +01:00
|
|
|
private val urlLocks = new ConcurrentHashMap[String, Object]
|
|
|
|
|
|
2015-07-04 16:19:36 +02:00
|
|
|
trait Logger {
|
2015-11-29 20:22:25 +01:00
|
|
|
def foundLocally(url: String, f: File): Unit = {}
|
2016-03-06 15:00:15 +01:00
|
|
|
|
2015-12-30 01:34:32 +01:00
|
|
|
def downloadingArtifact(url: String, file: File): Unit = {}
|
2016-03-06 15:00:15 +01:00
|
|
|
|
|
|
|
|
@deprecated("Use / override the variant with 3 arguments instead")
|
2015-11-29 20:22:25 +01:00
|
|
|
def downloadLength(url: String, length: Long): Unit = {}
|
2016-03-06 15:00:15 +01:00
|
|
|
def downloadLength(url: String, totalLength: Long, alreadyDownloaded: Long): Unit = {
|
|
|
|
|
downloadLength(url, totalLength)
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-29 20:22:25 +01:00
|
|
|
def downloadProgress(url: String, downloaded: Long): Unit = {}
|
2016-03-06 15:00:15 +01:00
|
|
|
|
2015-11-29 20:22:25 +01:00
|
|
|
def downloadedArtifact(url: String, success: Boolean): Unit = {}
|
2016-03-06 14:55:39 +01:00
|
|
|
def checkingUpdates(url: String, currentTimeOpt: Option[Long]): Unit = {}
|
|
|
|
|
def checkingUpdatesResult(url: String, currentTimeOpt: Option[Long], remoteTimeOpt: Option[Long]): Unit = {}
|
2015-07-04 16:19:36 +02:00
|
|
|
}
|
|
|
|
|
|
2015-06-25 01:18:57 +02:00
|
|
|
var bufferSize = 1024*1024
|
|
|
|
|
|
|
|
|
|
def readFullySync(is: InputStream) = {
|
|
|
|
|
val buffer = new ByteArrayOutputStream()
|
|
|
|
|
val data = Array.ofDim[Byte](16384)
|
|
|
|
|
|
|
|
|
|
var nRead = is.read(data, 0, data.length)
|
|
|
|
|
while (nRead != -1) {
|
|
|
|
|
buffer.write(data, 0, nRead)
|
|
|
|
|
nRead = is.read(data, 0, data.length)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
buffer.flush()
|
|
|
|
|
buffer.toByteArray
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
def readFully(is: => InputStream) =
|
|
|
|
|
Task {
|
|
|
|
|
\/.fromTryCatchNonFatal {
|
|
|
|
|
val is0 = is
|
|
|
|
|
val b =
|
|
|
|
|
try readFullySync(is0)
|
|
|
|
|
finally is0.close()
|
|
|
|
|
|
|
|
|
|
new String(b, "UTF-8")
|
|
|
|
|
} .leftMap(_.getMessage)
|
|
|
|
|
}
|
|
|
|
|
|
2015-07-05 15:41:38 +02:00
|
|
|
def withContent(is: InputStream, f: (Array[Byte], Int) => Unit): Unit = {
|
|
|
|
|
val data = Array.ofDim[Byte](16384)
|
|
|
|
|
|
|
|
|
|
var nRead = is.read(data, 0, data.length)
|
|
|
|
|
while (nRead != -1) {
|
|
|
|
|
f(data, nRead)
|
|
|
|
|
nRead = is.read(data, 0, data.length)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|