From b730c00b750e2889ca6665e29542eb0450657fec Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Thu, 1 May 2014 12:05:52 -0400 Subject: [PATCH 1/2] sbt 0.13.2 --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 37b489cb6..8ac605a3d 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=0.13.1 +sbt.version=0.13.2 From adb41611cf73260938274915d8462d924df200c8 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Thu, 1 May 2014 12:50:07 -0400 Subject: [PATCH 2/2] added scalariform --- cache/src/main/scala/sbt/Cache.scala | 459 ++- cache/src/main/scala/sbt/CacheIO.scala | 73 +- cache/src/main/scala/sbt/FileInfo.scala | 176 +- cache/src/main/scala/sbt/SeparatedCache.scala | 105 +- .../src/main/scala/sbt/ChangeReport.scala | 123 +- .../tracking/src/main/scala/sbt/Tracked.scala | 352 +- .../api/src/main/scala/sbt/ClassToAPI.scala | 566 ++- .../api/src/main/scala/xsbt/api/APIUtil.scala | 113 +- .../src/main/scala/xsbt/api/Discovered.scala | 10 +- .../src/main/scala/xsbt/api/Discovery.scala | 172 +- .../api/src/main/scala/xsbt/api/HashAPI.scala | 658 ++-- .../src/main/scala/xsbt/api/NameHashing.scala | 229 +- .../api/src/main/scala/xsbt/api/SameAPI.scala | 673 ++-- .../api/src/main/scala/xsbt/api/ShowAPI.scala | 536 ++- .../api/src/main/scala/xsbt/api/Visit.scala | 370 +- .../api/src/main/scala/xsbti/SafeLazy.scala | 36 +- .../inc/src/main/scala/sbt/CompileSetup.scala | 89 +- .../inc/src/main/scala/sbt/inc/APIDiff.scala | 72 +- compile/inc/src/main/scala/sbt/inc/APIs.scala | 119 +- .../inc/src/main/scala/sbt/inc/Analysis.scala | 394 +- .../main/scala/sbt/inc/AnalysisStore.scala | 45 +- .../inc/src/main/scala/sbt/inc/Changes.scala | 45 +- .../main/scala/sbt/inc/ClassfileManager.scala | 138 +- .../src/main/scala/sbt/inc/Compilations.scala | 2 +- .../inc/src/main/scala/sbt/inc/Compile.scala | 340 +- .../main/scala/sbt/inc/FileValueCache.scala | 70 +- .../src/main/scala/sbt/inc/IncOptions.scala | 523 ++- .../src/main/scala/sbt/inc/Incremental.scala | 917 ++--- .../inc/src/main/scala/sbt/inc/Locate.scala | 166 +- .../scala/sbt/inc/MemberRefInvalidator.scala | 125 +- .../src/main/scala/sbt/inc/Relations.scala | 720 ++-- .../src/main/scala/sbt/inc/SourceInfo.scala | 54 +- .../inc/src/main/scala/sbt/inc/Stamp.scala | 283 +- .../sbt/compiler/AggressiveCompile.scala | 401 +- .../sbt/compiler/IncrementalCompiler.scala | 89 +- .../interface/src/main/scala/xsbt/API.scala | 159 +- .../src/main/scala/xsbt/Analyzer.scala | 73 +- .../src/main/scala/xsbt/Command.scala | 41 +- .../src/main/scala/xsbt/Compat.scala | 147 +- .../main/scala/xsbt/CompilerInterface.scala | 441 ++- .../main/scala/xsbt/ConsoleInterface.scala | 162 +- .../main/scala/xsbt/DelegatingReporter.scala | 165 +- .../src/main/scala/xsbt/Dependency.scala | 300 +- .../src/main/scala/xsbt/ExtractAPI.scala | 945 +++-- .../main/scala/xsbt/ExtractUsedNames.scala | 145 +- .../src/main/scala/xsbt/LocateClassFile.scala | 66 +- .../interface/src/main/scala/xsbt/Log.scala | 9 +- .../src/main/scala/xsbt/Message.scala | 5 +- .../main/scala/xsbt/ScaladocInterface.scala | 119 +- .../sbt/compiler/ComponentCompiler.scala | 108 +- .../main/scala/sbt/inc/AnalysisFormats.scala | 235 +- .../main/scala/sbt/inc/FileBasedStore.scala | 21 +- .../scala/sbt/inc/TextAnalysisFormat.scala | 784 ++-- .../scala/xsbt/api/CompilationFormat.scala | 6 +- .../main/scala/xsbt/api/SourceFormat.scala | 56 +- .../src/main/scala/sbt/ClasspathOptions.scala | 13 +- .../src/main/scala/sbt/LoggerReporter.scala | 239 +- .../sbt/compiler/AnalyzingCompiler.scala | 278 +- .../sbt/compiler/CompilerArguments.scala | 121 +- .../scala/sbt/compiler/CompilerCache.scala | 83 +- .../compiler/CompilerInterfaceProvider.scala | 16 +- .../scala/sbt/compiler/CompilerOutput.scala | 26 +- .../scala/sbt/compiler/JavaCompiler.scala | 202 +- .../main/scala/sbt/compiler/RawCompiler.scala | 53 +- ivy/src/main/scala/sbt/Artifact.scala | 116 +- ivy/src/main/scala/sbt/ComponentManager.scala | 149 +- ivy/src/main/scala/sbt/Configuration.scala | 100 +- ivy/src/main/scala/sbt/ConflictWarning.scala | 126 +- ivy/src/main/scala/sbt/ConvertResolver.scala | 445 ++- ivy/src/main/scala/sbt/Credentials.scala | 105 +- ivy/src/main/scala/sbt/CrossVersion.scala | 231 +- ivy/src/main/scala/sbt/CustomPomParser.scala | 376 +- ivy/src/main/scala/sbt/CustomXmlParser.scala | 44 +- ivy/src/main/scala/sbt/DependencyFilter.scala | 103 +- ivy/src/main/scala/sbt/Ivy.scala | 1169 +++--- ivy/src/main/scala/sbt/IvyActions.scala | 473 ++- ivy/src/main/scala/sbt/IvyCache.scala | 165 +- .../main/scala/sbt/IvyConfigurations.scala | 178 +- ivy/src/main/scala/sbt/IvyInterface.scala | 34 +- ivy/src/main/scala/sbt/IvyLogger.scala | 87 +- ivy/src/main/scala/sbt/IvyRetrieve.scala | 80 +- ivy/src/main/scala/sbt/IvyScala.scala | 189 +- ivy/src/main/scala/sbt/IvyUtil.scala | 7 +- ivy/src/main/scala/sbt/MakePom.scala | 645 ++-- ivy/src/main/scala/sbt/ModuleID.scala | 168 +- ivy/src/main/scala/sbt/ProjectResolver.scala | 139 +- ivy/src/main/scala/sbt/ResolutionCache.scala | 136 +- ivy/src/main/scala/sbt/Resolver.scala | 492 ++- ivy/src/main/scala/sbt/StringUtilities.scala | 16 +- ivy/src/main/scala/sbt/UpdateReport.scala | 228 +- .../scala/sbt/impl/DependencyBuilders.scala | 103 +- .../ivyint/ErrorMessageAuthenticator.scala | 117 +- .../sbt/ivyint/IvyCredentialsLookup.scala | 17 +- .../scala/sbt/ivyint/MergeDescriptors.scala | 198 +- launch/src/main/scala/xsbt/boot/Boot.scala | 102 +- .../scala/xsbt/boot/BootConfiguration.scala | 186 +- launch/src/main/scala/xsbt/boot/Cache.scala | 27 +- .../src/main/scala/xsbt/boot/CheckProxy.scala | 65 +- .../main/scala/xsbt/boot/Configuration.scala | 283 +- .../scala/xsbt/boot/ConfigurationParser.scala | 567 ++- launch/src/main/scala/xsbt/boot/Create.scala | 89 +- .../main/scala/xsbt/boot/Enumeration.scala | 38 +- .../main/scala/xsbt/boot/FilteredLoader.scala | 56 +- launch/src/main/scala/xsbt/boot/Find.scala | 87 +- launch/src/main/scala/xsbt/boot/JAnsi.scala | 29 +- launch/src/main/scala/xsbt/boot/Launch.scala | 718 ++-- .../scala/xsbt/boot/LaunchConfiguration.scala | 185 +- launch/src/main/scala/xsbt/boot/ListMap.scala | 51 +- launch/src/main/scala/xsbt/boot/Locks.scala | 171 +- .../scala/xsbt/boot/ModuleDefinition.scala | 30 +- .../scala/xsbt/boot/PlainApplication.scala | 30 +- launch/src/main/scala/xsbt/boot/Pre.scala | 190 +- .../main/scala/xsbt/boot/ResolveValues.scala | 67 +- .../scala/xsbt/boot/ServerApplication.scala | 54 +- launch/src/main/scala/xsbt/boot/Update.scala | 776 ++-- launch/src/main/scala/xsbt/boot/Using.scala | 68 +- .../src/main/scala/xsbt/boot/test/Apps.scala | 55 +- .../main/scala/xsbt/boot/test/Servers.scala | 132 +- .../actions/src/main/scala/sbt/CacheIvy.scala | 325 +- .../actions/src/main/scala/sbt/Compiler.scala | 133 +- main/actions/src/main/scala/sbt/Console.scala | 40 +- main/actions/src/main/scala/sbt/Doc.scala | 131 +- .../actions/src/main/scala/sbt/DotGraph.scala | 110 +- .../src/main/scala/sbt/ForkTests.scala | 217 +- main/actions/src/main/scala/sbt/Package.scala | 196 +- .../src/main/scala/sbt/RawCompileLike.scala | 136 +- main/actions/src/main/scala/sbt/Sync.scala | 146 +- .../src/main/scala/sbt/TestResultLogger.scala | 32 +- main/actions/src/main/scala/sbt/Tests.scala | 456 +-- .../src/main/scala/sbt/compiler/Eval.scala | 753 ++-- .../main/scala/sbt/BasicCommandStrings.scala | 173 +- .../src/main/scala/sbt/BasicCommands.scala | 491 ++- .../src/main/scala/sbt/BasicKeys.scala | 19 +- main/command/src/main/scala/sbt/Command.scala | 257 +- .../src/main/scala/sbt/CommandUtil.scala | 125 +- .../main/scala/sbt/ExceptionCategory.scala | 32 +- .../src/main/scala/sbt/Highlight.scala | 37 +- .../src/main/scala/sbt/MainControl.scala | 30 +- .../command/src/main/scala/sbt/MainLoop.scala | 159 +- main/command/src/main/scala/sbt/State.scala | 420 +-- main/command/src/main/scala/sbt/Watched.scala | 135 +- main/settings/src/main/scala/sbt/Append.scala | 85 +- .../src/main/scala/sbt/ConfigKey.scala | 5 +- main/settings/src/main/scala/sbt/Def.scala | 190 +- .../src/main/scala/sbt/DelegateIndex.scala | 26 +- .../src/main/scala/sbt/InputTask.scala | 237 +- .../src/main/scala/sbt/KeyRanks.scala | 69 +- .../src/main/scala/sbt/Previous.scala | 158 +- .../src/main/scala/sbt/Reference.scala | 126 +- main/settings/src/main/scala/sbt/Scope.scala | 404 +- .../src/main/scala/sbt/ScopeAxis.scala | 37 +- .../src/main/scala/sbt/ScopeMask.scala | 23 +- .../src/main/scala/sbt/Structure.scala | 816 ++-- .../src/main/scala/sbt/std/InputConvert.scala | 127 +- .../src/main/scala/sbt/std/InputWrapper.scala | 337 +- .../src/main/scala/sbt/std/KeyMacro.scala | 91 +- .../src/main/scala/sbt/std/SettingMacro.scala | 63 +- .../src/main/scala/sbt/std/TaskMacro.scala | 656 ++-- main/src/main/scala/sbt/APIMappings.scala | 50 +- main/src/main/scala/sbt/Act.scala | 516 ++- main/src/main/scala/sbt/AddSettings.scala | 119 +- main/src/main/scala/sbt/Aggregation.scala | 373 +- main/src/main/scala/sbt/Build.scala | 99 +- .../main/scala/sbt/BuildDependencies.scala | 45 +- main/src/main/scala/sbt/BuildLoader.scala | 241 +- main/src/main/scala/sbt/BuildPaths.scala | 126 +- main/src/main/scala/sbt/BuildStructure.scala | 334 +- main/src/main/scala/sbt/BuildUtil.scala | 176 +- main/src/main/scala/sbt/CommandStrings.scala | 217 +- main/src/main/scala/sbt/ConsoleProject.scala | 43 +- main/src/main/scala/sbt/Cross.scala | 170 +- main/src/main/scala/sbt/Defaults.scala | 3284 +++++++++-------- .../scala/sbt/EvaluateConfigurations.scala | 436 ++- main/src/main/scala/sbt/EvaluateTask.scala | 761 ++-- main/src/main/scala/sbt/Extracted.scala | 121 +- main/src/main/scala/sbt/GlobalPlugin.scala | 141 +- .../main/scala/sbt/GroupedAutoPlugins.scala | 26 +- main/src/main/scala/sbt/Inspect.scala | 92 +- main/src/main/scala/sbt/IvyConsole.scala | 104 +- main/src/main/scala/sbt/KeyIndex.scala | 246 +- main/src/main/scala/sbt/Keys.scala | 653 ++-- main/src/main/scala/sbt/Load.scala | 1464 ++++---- main/src/main/scala/sbt/LoadedSbtFile.scala | 24 +- main/src/main/scala/sbt/LogManager.scala | 204 +- main/src/main/scala/sbt/Main.scala | 844 +++-- main/src/main/scala/sbt/Opts.scala | 94 +- main/src/main/scala/sbt/Output.scala | 125 +- main/src/main/scala/sbt/PluginDiscovery.scala | 258 +- .../src/main/scala/sbt/PluginManagement.scala | 70 +- main/src/main/scala/sbt/Project.scala | 965 ++--- .../main/scala/sbt/ProjectNavigation.scala | 108 +- main/src/main/scala/sbt/Resolve.scala | 78 +- main/src/main/scala/sbt/Resolvers.scala | 245 +- main/src/main/scala/sbt/RetrieveUnit.scala | 51 +- main/src/main/scala/sbt/ScopeFilter.scala | 373 +- main/src/main/scala/sbt/ScopedKeyData.scala | 29 +- main/src/main/scala/sbt/Script.scala | 106 +- main/src/main/scala/sbt/SessionSettings.scala | 317 +- main/src/main/scala/sbt/SessionVar.scala | 109 +- .../main/scala/sbt/SettingCompletions.scala | 620 ++-- main/src/main/scala/sbt/SettingGraph.scala | 154 +- main/src/main/scala/sbt/Tags.scala | 190 +- main/src/main/scala/sbt/TaskName.scala | 15 +- main/src/main/scala/sbt/TaskTimings.scala | 65 +- .../main/scala/sbt/plugins/CorePlugin.scala | 6 +- project/Docs.scala | 371 +- project/Formatting.scala | 30 + project/Proguard.scala | 264 +- project/Release.scala | 90 +- project/Sbt.scala | 520 ++- project/SiteMap.scala | 167 +- project/Status.scala | 53 +- project/Sxr.scala | 69 +- project/Transform.scala | 177 +- project/Util.scala | 349 +- project/p.sbt | 8 +- run/src/main/scala/sbt/Fork.scala | 352 +- run/src/main/scala/sbt/Run.scala | 170 +- run/src/main/scala/sbt/SelectMainClass.scala | 72 +- run/src/main/scala/sbt/TrapExit.scala | 886 ++--- .../scala/sbt/TrapExitSecurityException.scala | 46 +- sbt/src/main/scala/package.scala | 81 +- .../main/scala/xsbt/test/CommentHandler.scala | 5 +- .../main/scala/xsbt/test/FileCommands.scala | 222 +- .../main/scala/xsbt/test/FilteredLoader.scala | 23 +- .../main/scala/xsbt/test/ScriptRunner.scala | 87 +- .../scala/xsbt/test/StatementHandler.scala | 29 +- .../scala/xsbt/test/TestScriptParser.scala | 112 +- .../src/main/scala/sbt/ScriptedPlugin.scala | 103 +- .../src/main/scala/sbt/test/SbtHandler.scala | 130 +- .../main/scala/sbt/test/ScriptedTests.scala | 329 +- .../main/scala/sbt/CompletionService.scala | 70 +- .../scala/sbt/ConcurrentRestrictions.scala | 342 +- tasks/src/main/scala/sbt/Execute.scala | 600 ++- .../src/main/scala/sbt/ExecuteProgress.scala | 102 +- tasks/src/main/scala/sbt/Incomplete.scala | 99 +- tasks/src/main/scala/sbt/Node.scala | 23 +- tasks/src/main/scala/sbt/Result.scala | 52 +- .../standard/src/main/scala/sbt/Action.scala | 86 +- .../src/main/scala/sbt/std/Streams.scala | 217 +- .../src/main/scala/sbt/std/System.scala | 97 +- .../src/main/scala/sbt/std/TaskExtra.scala | 378 +- .../scala/sbt/JUnitXmlTestsListener.scala | 275 +- .../src/main/scala/sbt/TestFramework.scala | 362 +- .../main/scala/sbt/TestReportListener.scala | 180 +- .../main/scala/sbt/TestStatusReporter.scala | 59 +- .../main/scala/sbt/appmacro/ContextUtil.scala | 423 ++- .../src/main/scala/sbt/appmacro/Convert.scala | 57 +- .../main/scala/sbt/appmacro/Instance.scala | 370 +- .../scala/sbt/appmacro/KListBuilder.scala | 113 +- .../scala/sbt/appmacro/MixedBuilder.scala | 23 +- .../scala/sbt/appmacro/TupleBuilder.scala | 81 +- .../scala/sbt/appmacro/TupleNBuilder.scala | 91 +- .../main/scala/sbt/classfile/Analyze.scala | 253 +- .../main/scala/sbt/classfile/ClassFile.scala | 101 +- .../src/main/scala/sbt/classfile/Parser.scala | 302 +- .../src/main/scala/sbt/ModuleUtilities.scala | 31 +- .../src/main/scala/sbt/ReflectUtilities.scala | 105 +- .../src/main/scala/sbt/ScalaInstance.scala | 224 +- .../sbt/classpath/ClassLoaderCache.scala | 58 +- .../scala/sbt/classpath/ClassLoaders.scala | 275 +- .../sbt/classpath/ClasspathUtilities.scala | 184 +- .../main/scala/sbt/classpath/DualLoader.scala | 167 +- .../src/main/scala/sbt/classpath/RawURL.scala | 94 +- .../collection/src/main/scala/sbt/AList.scala | 391 +- .../src/main/scala/sbt/Attributes.scala | 293 +- .../src/main/scala/sbt/Classes.scala | 45 +- util/collection/src/main/scala/sbt/Dag.scala | 218 +- .../collection/src/main/scala/sbt/HList.scala | 36 +- .../collection/src/main/scala/sbt/IDSet.scala | 72 +- .../collection/src/main/scala/sbt/INode.scala | 318 +- .../collection/src/main/scala/sbt/KList.scala | 79 +- util/collection/src/main/scala/sbt/PMap.scala | 170 +- .../collection/src/main/scala/sbt/Param.scala | 39 +- .../src/main/scala/sbt/Positions.scala | 8 +- .../src/main/scala/sbt/Settings.scala | 1124 +++--- util/collection/src/main/scala/sbt/Show.scala | 7 +- .../src/main/scala/sbt/Signal.scala | 148 +- .../src/main/scala/sbt/TypeFunctions.scala | 71 +- .../collection/src/main/scala/sbt/Types.scala | 9 +- util/collection/src/main/scala/sbt/Util.scala | 58 +- .../src/main/scala/sbt/LineReader.scala | 235 +- .../main/scala/sbt/complete/Completions.scala | 243 +- .../scala/sbt/complete/EditDistance.scala | 58 +- .../scala/sbt/complete/ExampleSource.scala | 61 +- .../src/main/scala/sbt/complete/History.scala | 63 +- .../scala/sbt/complete/HistoryCommands.scala | 115 +- .../scala/sbt/complete/JLineCompletion.scala | 281 +- .../src/main/scala/sbt/complete/Parser.scala | 1421 ++++--- .../src/main/scala/sbt/complete/Parsers.scala | 372 +- .../scala/sbt/complete/ProcessError.scala | 53 +- .../scala/sbt/complete/TokenCompletions.scala | 57 +- .../main/scala/sbt/complete/TypeString.scala | 136 +- .../main/scala/sbt/complete/UpperBound.scala | 72 +- .../src/main/scala/sbt/ErrorHandling.scala | 59 +- .../control/src/main/scala/sbt/ExitHook.scala | 25 +- .../main/scala/sbt/MessageOnlyException.scala | 18 +- .../scala/xsbt/datatype/DatatypeParser.scala | 122 +- .../main/scala/xsbt/datatype/Definition.scala | 47 +- .../xsbt/datatype/GenerateDatatypes.scala | 45 +- .../main/scala/xsbt/datatype/Generator.scala | 294 +- .../src/main/scala/sbt/DeferredWriter.scala | 47 +- util/io/src/main/scala/sbt/Hash.scala | 173 +- util/io/src/main/scala/sbt/IO.scala | 1653 +++++---- util/io/src/main/scala/sbt/NameFilter.scala | 133 +- util/io/src/main/scala/sbt/Pack.scala | 130 +- util/io/src/main/scala/sbt/Path.scala | 405 +- util/io/src/main/scala/sbt/PathMapper.scala | 179 +- util/io/src/main/scala/sbt/Resources.scala | 101 +- util/io/src/main/scala/sbt/RichURI.scala | 74 +- .../scala/sbt/SourceModificationWatch.scala | 63 +- util/io/src/main/scala/sbt/Using.scala | 159 +- util/io/src/main/scala/xsbt/IPC.scala | 128 +- util/log/src/main/scala/sbt/BasicLogger.scala | 21 +- .../src/main/scala/sbt/BufferedLogger.scala | 169 +- .../src/main/scala/sbt/ConsoleLogger.scala | 305 +- util/log/src/main/scala/sbt/ConsoleOut.scala | 104 +- .../log/src/main/scala/sbt/FilterLogger.scala | 58 +- util/log/src/main/scala/sbt/FullLogger.scala | 49 +- .../src/main/scala/sbt/GlobalLogging.scala | 61 +- util/log/src/main/scala/sbt/Level.scala | 39 +- util/log/src/main/scala/sbt/LogEvent.scala | 7 +- util/log/src/main/scala/sbt/Logger.scala | 233 +- .../log/src/main/scala/sbt/LoggerWriter.scala | 84 +- util/log/src/main/scala/sbt/MainLogging.scala | 81 +- util/log/src/main/scala/sbt/MultiLogger.scala | 83 +- util/log/src/main/scala/sbt/StackTrace.scala | 97 +- .../src/main/scala/sbt/logic/Logic.scala | 509 +-- .../src/main/scala/sbt/InheritInput.scala | 19 +- util/process/src/main/scala/sbt/Process.scala | 349 +- .../src/main/scala/sbt/ProcessImpl.scala | 751 ++-- util/process/src/main/scala/sbt/SyncVar.scala | 59 +- .../src/main/scala/sbt/Relation.scala | 255 +- 333 files changed, 35422 insertions(+), 36301 deletions(-) create mode 100644 project/Formatting.scala diff --git a/cache/src/main/scala/sbt/Cache.scala b/cache/src/main/scala/sbt/Cache.scala index 725a103a8..c241394ba 100644 --- a/cache/src/main/scala/sbt/Cache.scala +++ b/cache/src/main/scala/sbt/Cache.scala @@ -3,271 +3,246 @@ */ package sbt -import sbinary.{CollectionTypes, DefaultProtocol, Format, Input, JavaFormats, Output => Out} -import java.io.{ByteArrayInputStream, ByteArrayOutputStream, File, InputStream, OutputStream} -import java.net.{URI, URL} +import sbinary.{ CollectionTypes, DefaultProtocol, Format, Input, JavaFormats, Output => Out } +import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, File, InputStream, OutputStream } +import java.net.{ URI, URL } import Types.:+: -import DefaultProtocol.{asProduct2, asSingleton, BooleanFormat, ByteFormat, IntFormat, wrap} +import DefaultProtocol.{ asProduct2, asSingleton, BooleanFormat, ByteFormat, IntFormat, wrap } import scala.xml.NodeSeq -trait Cache[I,O] -{ - def apply(file: File)(i: I): Either[O, O => Unit] +trait Cache[I, O] { + def apply(file: File)(i: I): Either[O, O => Unit] } -trait SBinaryFormats extends CollectionTypes with JavaFormats -{ - implicit def urlFormat: Format[URL] = DefaultProtocol.UrlFormat - implicit def uriFormat: Format[URI] = DefaultProtocol.UriFormat +trait SBinaryFormats extends CollectionTypes with JavaFormats { + implicit def urlFormat: Format[URL] = DefaultProtocol.UrlFormat + implicit def uriFormat: Format[URI] = DefaultProtocol.UriFormat } -object Cache extends CacheImplicits -{ - def cache[I,O](implicit c: Cache[I,O]): Cache[I,O] = c +object Cache extends CacheImplicits { + def cache[I, O](implicit c: Cache[I, O]): Cache[I, O] = c - def cached[I,O](file: File)(f: I => O)(implicit cache: Cache[I,O]): I => O = - in => - cache(file)(in) match - { - case Left(value) => value - case Right(store) => - val out = f(in) - store(out) - out - } + def cached[I, O](file: File)(f: I => O)(implicit cache: Cache[I, O]): I => O = + in => + cache(file)(in) match { + case Left(value) => value + case Right(store) => + val out = f(in) + store(out) + out + } - def debug[I](label: String, c: InputCache[I]): InputCache[I] = - new InputCache[I] - { - type Internal = c.Internal - def convert(i: I) = c.convert(i) - def read(from: Input) = - { - val v = c.read(from) - println(label + ".read: " + v) - v - } - def write(to: Out, v: Internal) - { - println(label + ".write: " + v) - c.write(to, v) - } - def equiv: Equiv[Internal] = new Equiv[Internal] { - def equiv(a: Internal, b: Internal)= - { - val equ = c.equiv.equiv(a,b) - println(label + ".equiv(" + a + ", " + b +"): " + equ) - equ - } - } - } + def debug[I](label: String, c: InputCache[I]): InputCache[I] = + new InputCache[I] { + type Internal = c.Internal + def convert(i: I) = c.convert(i) + def read(from: Input) = + { + val v = c.read(from) + println(label + ".read: " + v) + v + } + def write(to: Out, v: Internal) { + println(label + ".write: " + v) + c.write(to, v) + } + def equiv: Equiv[Internal] = new Equiv[Internal] { + def equiv(a: Internal, b: Internal) = + { + val equ = c.equiv.equiv(a, b) + println(label + ".equiv(" + a + ", " + b + "): " + equ) + equ + } + } + } } trait CacheImplicits extends BasicCacheImplicits with SBinaryFormats with HListCacheImplicits with UnionImplicits -trait BasicCacheImplicits -{ - implicit def basicCache[I, O](implicit in: InputCache[I], outFormat: Format[O]): Cache[I,O] = - new BasicCache()(in, outFormat) - def basicInput[I](implicit eq: Equiv[I], fmt: Format[I]): InputCache[I] = InputCache.basicInputCache(fmt, eq) +trait BasicCacheImplicits { + implicit def basicCache[I, O](implicit in: InputCache[I], outFormat: Format[O]): Cache[I, O] = + new BasicCache()(in, outFormat) + def basicInput[I](implicit eq: Equiv[I], fmt: Format[I]): InputCache[I] = InputCache.basicInputCache(fmt, eq) - def defaultEquiv[T]: Equiv[T] = new Equiv[T] { def equiv(a: T, b: T) = a == b } - - implicit def optInputCache[T](implicit t: InputCache[T]): InputCache[Option[T]] = - new InputCache[Option[T]] - { - type Internal = Option[t.Internal] - def convert(v: Option[T]): Internal = v.map(x => t.convert(x)) - def read(from: Input) = - { - val isDefined = BooleanFormat.reads(from) - if(isDefined) Some(t.read(from)) else None - } - def write(to: Out, j: Internal): Unit = - { - BooleanFormat.writes(to, j.isDefined) - j foreach { x => t.write(to, x) } - } - def equiv = optEquiv(t.equiv) - } - - def wrapEquiv[S,T](f: S => T)(implicit eqT: Equiv[T]): Equiv[S] = - new Equiv[S] { - def equiv(a: S, b: S) = - eqT.equiv( f(a), f(b) ) - } + def defaultEquiv[T]: Equiv[T] = new Equiv[T] { def equiv(a: T, b: T) = a == b } - implicit def optEquiv[T](implicit t: Equiv[T]): Equiv[Option[T]] = - new Equiv[Option[T]] { - def equiv(a: Option[T], b: Option[T]) = - (a,b) match - { - case (None, None) => true - case (Some(va), Some(vb)) => t.equiv(va, vb) - case _ => false - } - } - implicit def urlEquiv(implicit uriEq: Equiv[URI]): Equiv[URL] = wrapEquiv[URL, URI](_.toURI)(uriEq) - implicit def uriEquiv: Equiv[URI] = defaultEquiv - implicit def stringSetEquiv: Equiv[Set[String]] = defaultEquiv - implicit def stringMapEquiv: Equiv[Map[String, String]] = defaultEquiv + implicit def optInputCache[T](implicit t: InputCache[T]): InputCache[Option[T]] = + new InputCache[Option[T]] { + type Internal = Option[t.Internal] + def convert(v: Option[T]): Internal = v.map(x => t.convert(x)) + def read(from: Input) = + { + val isDefined = BooleanFormat.reads(from) + if (isDefined) Some(t.read(from)) else None + } + def write(to: Out, j: Internal): Unit = + { + BooleanFormat.writes(to, j.isDefined) + j foreach { x => t.write(to, x) } + } + def equiv = optEquiv(t.equiv) + } - def streamFormat[T](write: (T, OutputStream) => Unit, f: InputStream => T): Format[T] = - { - val toBytes = (t: T) => { val bos = new ByteArrayOutputStream; write(t, bos); bos.toByteArray } - val fromBytes = (bs: Array[Byte]) => f(new ByteArrayInputStream(bs)) - wrap(toBytes, fromBytes)(DefaultProtocol.ByteArrayFormat) - } - - implicit def xmlInputCache(implicit strEq: InputCache[String]): InputCache[NodeSeq] = wrapIn[NodeSeq, String](_.toString, strEq) + def wrapEquiv[S, T](f: S => T)(implicit eqT: Equiv[T]): Equiv[S] = + new Equiv[S] { + def equiv(a: S, b: S) = + eqT.equiv(f(a), f(b)) + } - implicit def seqCache[T](implicit t: InputCache[T]): InputCache[Seq[T]] = - new InputCache[Seq[T]] - { - type Internal = Seq[t.Internal] - def convert(v: Seq[T]) = v.map(x => t.convert(x)) - def read(from: Input) = - { - val size = IntFormat.reads(from) - def next(left: Int, acc: List[t.Internal]): Internal = - if(left <= 0) acc.reverse else next(left - 1, t.read(from) :: acc) - next(size, Nil) - } - def write(to: Out, vs: Internal) - { - val size = vs.length - IntFormat.writes(to, size) - for(v <- vs) t.write(to, v) - } - def equiv: Equiv[Internal] = seqEquiv(t.equiv) - } + implicit def optEquiv[T](implicit t: Equiv[T]): Equiv[Option[T]] = + new Equiv[Option[T]] { + def equiv(a: Option[T], b: Option[T]) = + (a, b) match { + case (None, None) => true + case (Some(va), Some(vb)) => t.equiv(va, vb) + case _ => false + } + } + implicit def urlEquiv(implicit uriEq: Equiv[URI]): Equiv[URL] = wrapEquiv[URL, URI](_.toURI)(uriEq) + implicit def uriEquiv: Equiv[URI] = defaultEquiv + implicit def stringSetEquiv: Equiv[Set[String]] = defaultEquiv + implicit def stringMapEquiv: Equiv[Map[String, String]] = defaultEquiv - implicit def arrEquiv[T](implicit t: Equiv[T]): Equiv[Array[T]] = - wrapEquiv( (x: Array[T]) => x :Seq[T] )(seqEquiv[T](t)) + def streamFormat[T](write: (T, OutputStream) => Unit, f: InputStream => T): Format[T] = + { + val toBytes = (t: T) => { val bos = new ByteArrayOutputStream; write(t, bos); bos.toByteArray } + val fromBytes = (bs: Array[Byte]) => f(new ByteArrayInputStream(bs)) + wrap(toBytes, fromBytes)(DefaultProtocol.ByteArrayFormat) + } - implicit def seqEquiv[T](implicit t: Equiv[T]): Equiv[Seq[T]] = - new Equiv[Seq[T]] - { - def equiv(a: Seq[T], b: Seq[T]) = - a.length == b.length && - ((a,b).zipped forall t.equiv) - } - implicit def seqFormat[T](implicit t: Format[T]): Format[Seq[T]] = - wrap[Seq[T], List[T]](_.toList, _.toSeq)(DefaultProtocol.listFormat) - - def wrapIn[I,J](implicit f: I => J, jCache: InputCache[J]): InputCache[I] = - new InputCache[I] - { - type Internal = jCache.Internal - def convert(i: I) = jCache.convert(f(i)) - def read(from: Input) = jCache.read(from) - def write(to: Out, j: Internal) = jCache.write(to, j) - def equiv = jCache.equiv - } + implicit def xmlInputCache(implicit strEq: InputCache[String]): InputCache[NodeSeq] = wrapIn[NodeSeq, String](_.toString, strEq) - def singleton[T](t: T): InputCache[T] = - basicInput(trueEquiv, asSingleton(t)) + implicit def seqCache[T](implicit t: InputCache[T]): InputCache[Seq[T]] = + new InputCache[Seq[T]] { + type Internal = Seq[t.Internal] + def convert(v: Seq[T]) = v.map(x => t.convert(x)) + def read(from: Input) = + { + val size = IntFormat.reads(from) + def next(left: Int, acc: List[t.Internal]): Internal = + if (left <= 0) acc.reverse else next(left - 1, t.read(from) :: acc) + next(size, Nil) + } + def write(to: Out, vs: Internal) { + val size = vs.length + IntFormat.writes(to, size) + for (v <- vs) t.write(to, v) + } + def equiv: Equiv[Internal] = seqEquiv(t.equiv) + } - def trueEquiv[T] = new Equiv[T] { def equiv(a: T, b: T) = true } + implicit def arrEquiv[T](implicit t: Equiv[T]): Equiv[Array[T]] = + wrapEquiv((x: Array[T]) => x: Seq[T])(seqEquiv[T](t)) + + implicit def seqEquiv[T](implicit t: Equiv[T]): Equiv[Seq[T]] = + new Equiv[Seq[T]] { + def equiv(a: Seq[T], b: Seq[T]) = + a.length == b.length && + ((a, b).zipped forall t.equiv) + } + implicit def seqFormat[T](implicit t: Format[T]): Format[Seq[T]] = + wrap[Seq[T], List[T]](_.toList, _.toSeq)(DefaultProtocol.listFormat) + + def wrapIn[I, J](implicit f: I => J, jCache: InputCache[J]): InputCache[I] = + new InputCache[I] { + type Internal = jCache.Internal + def convert(i: I) = jCache.convert(f(i)) + def read(from: Input) = jCache.read(from) + def write(to: Out, j: Internal) = jCache.write(to, j) + def equiv = jCache.equiv + } + + def singleton[T](t: T): InputCache[T] = + basicInput(trueEquiv, asSingleton(t)) + + def trueEquiv[T] = new Equiv[T] { def equiv(a: T, b: T) = true } } -trait HListCacheImplicits -{ - implicit def hConsCache[H, T <: HList](implicit head: InputCache[H], tail: InputCache[T]): InputCache[H :+: T] = - new InputCache[H :+: T] - { - type Internal = (head.Internal, tail.Internal) - def convert(in: H :+: T) = (head.convert(in.head), tail.convert(in.tail)) - def read(from: Input) = - { - val h = head.read(from) - val t = tail.read(from) - (h, t) - } - def write(to: Out, j: Internal) - { - head.write(to, j._1) - tail.write(to, j._2) - } - def equiv = new Equiv[Internal] - { - def equiv(a: Internal, b: Internal) = - head.equiv.equiv(a._1, b._1) && - tail.equiv.equiv(a._2, b._2) - } - } - - implicit def hNilCache: InputCache[HNil] = Cache.singleton(HNil : HNil) +trait HListCacheImplicits { + implicit def hConsCache[H, T <: HList](implicit head: InputCache[H], tail: InputCache[T]): InputCache[H :+: T] = + new InputCache[H :+: T] { + type Internal = (head.Internal, tail.Internal) + def convert(in: H :+: T) = (head.convert(in.head), tail.convert(in.tail)) + def read(from: Input) = + { + val h = head.read(from) + val t = tail.read(from) + (h, t) + } + def write(to: Out, j: Internal) { + head.write(to, j._1) + tail.write(to, j._2) + } + def equiv = new Equiv[Internal] { + def equiv(a: Internal, b: Internal) = + head.equiv.equiv(a._1, b._1) && + tail.equiv.equiv(a._2, b._2) + } + } - implicit def hConsFormat[H, T <: HList](implicit head: Format[H], tail: Format[T]): Format[H :+: T] = new Format[H :+: T] { - def reads(from: Input) = - { - val h = head.reads(from) - val t = tail.reads(from) - HCons(h, t) - } - def writes(to: Out, hc: H :+: T) - { - head.writes(to, hc.head) - tail.writes(to, hc.tail) - } - } + implicit def hNilCache: InputCache[HNil] = Cache.singleton(HNil: HNil) - implicit def hNilFormat: Format[HNil] = asSingleton(HNil) + implicit def hConsFormat[H, T <: HList](implicit head: Format[H], tail: Format[T]): Format[H :+: T] = new Format[H :+: T] { + def reads(from: Input) = + { + val h = head.reads(from) + val t = tail.reads(from) + HCons(h, t) + } + def writes(to: Out, hc: H :+: T) { + head.writes(to, hc.head) + tail.writes(to, hc.tail) + } + } + + implicit def hNilFormat: Format[HNil] = asSingleton(HNil) } -trait UnionImplicits -{ - def unionInputCache[UB, HL <: HList](implicit uc: UnionCache[HL, UB]): InputCache[UB] = - new InputCache[UB] - { - type Internal = Found[_] - def convert(in: UB) = uc.find(in) - def read(in: Input) = - { - val index = ByteFormat.reads(in) - val (cache, clazz) = uc.at(index) - val value = cache.read(in) - new Found[cache.Internal](cache, clazz, value, index) - } - def write(to: Out, i: Internal) - { - def write0[I](f: Found[I]) - { - ByteFormat.writes(to, f.index.toByte) - f.cache.write(to, f.value) - } - write0(i) - } - def equiv: Equiv[Internal] = new Equiv[Internal] - { - def equiv(a: Internal, b: Internal) = - { - if(a.clazz == b.clazz) - force(a.cache.equiv, a.value, b.value) - else - false - } - def force[T <: UB, UB](e: Equiv[T], a: UB, b: UB) = e.equiv(a.asInstanceOf[T], b.asInstanceOf[T]) - } - } +trait UnionImplicits { + def unionInputCache[UB, HL <: HList](implicit uc: UnionCache[HL, UB]): InputCache[UB] = + new InputCache[UB] { + type Internal = Found[_] + def convert(in: UB) = uc.find(in) + def read(in: Input) = + { + val index = ByteFormat.reads(in) + val (cache, clazz) = uc.at(index) + val value = cache.read(in) + new Found[cache.Internal](cache, clazz, value, index) + } + def write(to: Out, i: Internal) { + def write0[I](f: Found[I]) { + ByteFormat.writes(to, f.index.toByte) + f.cache.write(to, f.value) + } + write0(i) + } + def equiv: Equiv[Internal] = new Equiv[Internal] { + def equiv(a: Internal, b: Internal) = + { + if (a.clazz == b.clazz) + force(a.cache.equiv, a.value, b.value) + else + false + } + def force[T <: UB, UB](e: Equiv[T], a: UB, b: UB) = e.equiv(a.asInstanceOf[T], b.asInstanceOf[T]) + } + } - implicit def unionCons[H <: UB, UB, T <: HList](implicit head: InputCache[H], mf: Manifest[H], t: UnionCache[T, UB]): UnionCache[H :+: T, UB] = - new UnionCache[H :+: T, UB] - { - val size = 1 + t.size - def c = mf.runtimeClass - def find(value: UB): Found[_] = - if(c.isInstance(value)) new Found[head.Internal](head, c, head.convert(value.asInstanceOf[H]), size - 1) else t.find(value) - def at(i: Int): (InputCache[_ <: UB], Class[_]) = if(size == i + 1) (head, c) else t.at(i) - } + implicit def unionCons[H <: UB, UB, T <: HList](implicit head: InputCache[H], mf: Manifest[H], t: UnionCache[T, UB]): UnionCache[H :+: T, UB] = + new UnionCache[H :+: T, UB] { + val size = 1 + t.size + def c = mf.runtimeClass + def find(value: UB): Found[_] = + if (c.isInstance(value)) new Found[head.Internal](head, c, head.convert(value.asInstanceOf[H]), size - 1) else t.find(value) + def at(i: Int): (InputCache[_ <: UB], Class[_]) = if (size == i + 1) (head, c) else t.at(i) + } - implicit def unionNil[UB]: UnionCache[HNil, UB] = new UnionCache[HNil, UB] { - def size = 0 - def find(value: UB) = sys.error("No valid sum type for " + value) - def at(i: Int) = sys.error("Invalid union index " + i) - } + implicit def unionNil[UB]: UnionCache[HNil, UB] = new UnionCache[HNil, UB] { + def size = 0 + def find(value: UB) = sys.error("No valid sum type for " + value) + def at(i: Int) = sys.error("Invalid union index " + i) + } - final class Found[I](val cache: InputCache[_] { type Internal = I }, val clazz: Class[_], val value: I, val index: Int) - sealed trait UnionCache[HL <: HList, UB] - { - def size: Int - def at(i: Int): (InputCache[_ <: UB], Class[_]) - def find(forValue: UB): Found[_] - } + final class Found[I](val cache: InputCache[_] { type Internal = I }, val clazz: Class[_], val value: I, val index: Int) + sealed trait UnionCache[HL <: HList, UB] { + def size: Int + def at(i: Int): (InputCache[_ <: UB], Class[_]) + def find(forValue: UB): Found[_] + } } \ No newline at end of file diff --git a/cache/src/main/scala/sbt/CacheIO.scala b/cache/src/main/scala/sbt/CacheIO.scala index ac698c24e..a50da7ee7 100644 --- a/cache/src/main/scala/sbt/CacheIO.scala +++ b/cache/src/main/scala/sbt/CacheIO.scala @@ -3,43 +3,42 @@ */ package sbt -import java.io.{File, FileNotFoundException} -import sbinary.{DefaultProtocol, Format, Operations} +import java.io.{ File, FileNotFoundException } +import sbinary.{ DefaultProtocol, Format, Operations } import scala.reflect.Manifest -object CacheIO -{ - def toBytes[T](format: Format[T])(value: T)(implicit mf: Manifest[Format[T]]): Array[Byte] = - toBytes[T](value)(format, mf) - def toBytes[T](value: T)(implicit format: Format[T], mf: Manifest[Format[T]]): Array[Byte] = - Operations.toByteArray(value)(stampedFormat(format)) - def fromBytes[T](format: Format[T], default: => T)(bytes: Array[Byte])(implicit mf: Manifest[Format[T]]): T = - fromBytes(default)(bytes)(format, mf) - def fromBytes[T](default: => T)(bytes: Array[Byte])(implicit format: Format[T], mf: Manifest[Format[T]]): T = - if(bytes.isEmpty) default else Operations.fromByteArray(bytes)(stampedFormat(format)) - - def fromFile[T](format: Format[T], default: => T)(file: File)(implicit mf: Manifest[Format[T]]): T = - fromFile(file, default)(format, mf) - def fromFile[T](file: File, default: => T)(implicit format: Format[T], mf: Manifest[Format[T]]): T = - fromFile[T](file) getOrElse default - def fromFile[T](file: File)(implicit format: Format[T], mf: Manifest[Format[T]]): Option[T] = - try { Some( Operations.fromFile(file)(stampedFormat(format)) ) } - catch { case e: Exception => None } - - def toFile[T](format: Format[T])(value: T)(file: File)(implicit mf: Manifest[Format[T]]): Unit = - toFile(value)(file)(format, mf) - def toFile[T](value: T)(file: File)(implicit format: Format[T], mf: Manifest[Format[T]]): Unit = - { - IO.createDirectory(file.getParentFile) - Operations.toFile(value)(file)(stampedFormat(format)) - } - def stampedFormat[T](format: Format[T])(implicit mf: Manifest[Format[T]]): Format[T] = - { - import DefaultProtocol._ - withStamp(stamp(format))(format) - } - def stamp[T](format: Format[T])(implicit mf: Manifest[Format[T]]): Int = typeHash(mf) - def typeHash[T](implicit mf: Manifest[T]) = mf.toString.hashCode - def manifest[T](implicit mf: Manifest[T]): Manifest[T] = mf - def objManifest[T](t: T)(implicit mf: Manifest[T]): Manifest[T] = mf +object CacheIO { + def toBytes[T](format: Format[T])(value: T)(implicit mf: Manifest[Format[T]]): Array[Byte] = + toBytes[T](value)(format, mf) + def toBytes[T](value: T)(implicit format: Format[T], mf: Manifest[Format[T]]): Array[Byte] = + Operations.toByteArray(value)(stampedFormat(format)) + def fromBytes[T](format: Format[T], default: => T)(bytes: Array[Byte])(implicit mf: Manifest[Format[T]]): T = + fromBytes(default)(bytes)(format, mf) + def fromBytes[T](default: => T)(bytes: Array[Byte])(implicit format: Format[T], mf: Manifest[Format[T]]): T = + if (bytes.isEmpty) default else Operations.fromByteArray(bytes)(stampedFormat(format)) + + def fromFile[T](format: Format[T], default: => T)(file: File)(implicit mf: Manifest[Format[T]]): T = + fromFile(file, default)(format, mf) + def fromFile[T](file: File, default: => T)(implicit format: Format[T], mf: Manifest[Format[T]]): T = + fromFile[T](file) getOrElse default + def fromFile[T](file: File)(implicit format: Format[T], mf: Manifest[Format[T]]): Option[T] = + try { Some(Operations.fromFile(file)(stampedFormat(format))) } + catch { case e: Exception => None } + + def toFile[T](format: Format[T])(value: T)(file: File)(implicit mf: Manifest[Format[T]]): Unit = + toFile(value)(file)(format, mf) + def toFile[T](value: T)(file: File)(implicit format: Format[T], mf: Manifest[Format[T]]): Unit = + { + IO.createDirectory(file.getParentFile) + Operations.toFile(value)(file)(stampedFormat(format)) + } + def stampedFormat[T](format: Format[T])(implicit mf: Manifest[Format[T]]): Format[T] = + { + import DefaultProtocol._ + withStamp(stamp(format))(format) + } + def stamp[T](format: Format[T])(implicit mf: Manifest[Format[T]]): Int = typeHash(mf) + def typeHash[T](implicit mf: Manifest[T]) = mf.toString.hashCode + def manifest[T](implicit mf: Manifest[T]): Manifest[T] = mf + def objManifest[T](t: T)(implicit mf: Manifest[T]): Manifest[T] = mf } \ No newline at end of file diff --git a/cache/src/main/scala/sbt/FileInfo.scala b/cache/src/main/scala/sbt/FileInfo.scala index e4706c1fa..c735adcb0 100644 --- a/cache/src/main/scala/sbt/FileInfo.scala +++ b/cache/src/main/scala/sbt/FileInfo.scala @@ -3,26 +3,22 @@ */ package sbt -import java.io.{File, IOException} -import sbinary.{DefaultProtocol, Format} +import java.io.{ File, IOException } +import sbinary.{ DefaultProtocol, Format } import DefaultProtocol._ import scala.reflect.Manifest -sealed trait FileInfo extends NotNull -{ - val file: File +sealed trait FileInfo extends NotNull { + val file: File } -sealed trait HashFileInfo extends FileInfo -{ - val hash: List[Byte] +sealed trait HashFileInfo extends FileInfo { + val hash: List[Byte] } -sealed trait ModifiedFileInfo extends FileInfo -{ - val lastModified: Long +sealed trait ModifiedFileInfo extends FileInfo { + val lastModified: Long } -sealed trait PlainFileInfo extends FileInfo -{ - def exists: Boolean +sealed trait PlainFileInfo extends FileInfo { + def exists: Boolean } sealed trait HashModifiedFileInfo extends HashFileInfo with ModifiedFileInfo @@ -31,90 +27,80 @@ private final case class FileHash(file: File, hash: List[Byte]) extends HashFile private final case class FileModified(file: File, lastModified: Long) extends ModifiedFileInfo private final case class FileHashModified(file: File, hash: List[Byte], lastModified: Long) extends HashModifiedFileInfo -object FileInfo -{ - implicit def existsInputCache: InputCache[PlainFileInfo] = exists.infoInputCache - implicit def modifiedInputCache: InputCache[ModifiedFileInfo] = lastModified.infoInputCache - implicit def hashInputCache: InputCache[HashFileInfo] = hash.infoInputCache - implicit def fullInputCache: InputCache[HashModifiedFileInfo] = full.infoInputCache +object FileInfo { + implicit def existsInputCache: InputCache[PlainFileInfo] = exists.infoInputCache + implicit def modifiedInputCache: InputCache[ModifiedFileInfo] = lastModified.infoInputCache + implicit def hashInputCache: InputCache[HashFileInfo] = hash.infoInputCache + implicit def fullInputCache: InputCache[HashModifiedFileInfo] = full.infoInputCache - sealed trait Style - { - type F <: FileInfo - implicit def apply(file: File): F - implicit def unapply(info: F): File = info.file - implicit val format: Format[F] - import Cache._ - implicit def fileInfoEquiv: Equiv[F] = defaultEquiv - def infoInputCache: InputCache[F] = basicInput - implicit def fileInputCache: InputCache[File] = wrapIn[File,F] - } - object full extends Style - { - type F = HashModifiedFileInfo - implicit def apply(file: File): HashModifiedFileInfo = make(file, Hash(file).toList, file.lastModified) - def make(file: File, hash: List[Byte], lastModified: Long): HashModifiedFileInfo = FileHashModified(file.getAbsoluteFile, hash, lastModified) - implicit val format: Format[HashModifiedFileInfo] = wrap(f => (f.file, f.hash, f.lastModified), (make _).tupled) - } - object hash extends Style - { - type F = HashFileInfo - implicit def apply(file: File): HashFileInfo = make(file, computeHash(file)) - def make(file: File, hash: List[Byte]): HashFileInfo = FileHash(file.getAbsoluteFile, hash) - implicit val format: Format[HashFileInfo] = wrap(f => (f.file, f.hash), (make _).tupled) - private def computeHash(file: File): List[Byte] = try { Hash(file).toList } catch { case e: Exception => Nil } - } - object lastModified extends Style - { - type F = ModifiedFileInfo - implicit def apply(file: File): ModifiedFileInfo = make(file, file.lastModified) - def make(file: File, lastModified: Long): ModifiedFileInfo = FileModified(file.getAbsoluteFile, lastModified) - implicit val format: Format[ModifiedFileInfo] = wrap(f => (f.file, f.lastModified), (make _).tupled) - } - object exists extends Style - { - type F = PlainFileInfo - implicit def apply(file: File): PlainFileInfo = make(file) - def make(file: File): PlainFileInfo = { val abs = file.getAbsoluteFile; PlainFile(abs, abs.exists) } - implicit val format: Format[PlainFileInfo] = asProduct2[PlainFileInfo, File, Boolean](PlainFile.apply)(x => (x.file, x.exists)) - } + sealed trait Style { + type F <: FileInfo + implicit def apply(file: File): F + implicit def unapply(info: F): File = info.file + implicit val format: Format[F] + import Cache._ + implicit def fileInfoEquiv: Equiv[F] = defaultEquiv + def infoInputCache: InputCache[F] = basicInput + implicit def fileInputCache: InputCache[File] = wrapIn[File, F] + } + object full extends Style { + type F = HashModifiedFileInfo + implicit def apply(file: File): HashModifiedFileInfo = make(file, Hash(file).toList, file.lastModified) + def make(file: File, hash: List[Byte], lastModified: Long): HashModifiedFileInfo = FileHashModified(file.getAbsoluteFile, hash, lastModified) + implicit val format: Format[HashModifiedFileInfo] = wrap(f => (f.file, f.hash, f.lastModified), (make _).tupled) + } + object hash extends Style { + type F = HashFileInfo + implicit def apply(file: File): HashFileInfo = make(file, computeHash(file)) + def make(file: File, hash: List[Byte]): HashFileInfo = FileHash(file.getAbsoluteFile, hash) + implicit val format: Format[HashFileInfo] = wrap(f => (f.file, f.hash), (make _).tupled) + private def computeHash(file: File): List[Byte] = try { Hash(file).toList } catch { case e: Exception => Nil } + } + object lastModified extends Style { + type F = ModifiedFileInfo + implicit def apply(file: File): ModifiedFileInfo = make(file, file.lastModified) + def make(file: File, lastModified: Long): ModifiedFileInfo = FileModified(file.getAbsoluteFile, lastModified) + implicit val format: Format[ModifiedFileInfo] = wrap(f => (f.file, f.lastModified), (make _).tupled) + } + object exists extends Style { + type F = PlainFileInfo + implicit def apply(file: File): PlainFileInfo = make(file) + def make(file: File): PlainFileInfo = { val abs = file.getAbsoluteFile; PlainFile(abs, abs.exists) } + implicit val format: Format[PlainFileInfo] = asProduct2[PlainFileInfo, File, Boolean](PlainFile.apply)(x => (x.file, x.exists)) + } } -final case class FilesInfo[F <: FileInfo] private(files: Set[F]) -object FilesInfo -{ - sealed abstract class Style - { - type F <: FileInfo - val fileStyle: FileInfo.Style { type F = Style.this.F } +final case class FilesInfo[F <: FileInfo] private (files: Set[F]) +object FilesInfo { + sealed abstract class Style { + type F <: FileInfo + val fileStyle: FileInfo.Style { type F = Style.this.F } - //def manifest: Manifest[F] = fileStyle.manifest - implicit def apply(files: Set[File]): FilesInfo[F] - implicit def unapply(info: FilesInfo[F]): Set[File] = info.files.map(_.file) - implicit val formats: Format[FilesInfo[F]] - val manifest: Manifest[Format[FilesInfo[F]]] - def empty: FilesInfo[F] = new FilesInfo[F](Set.empty) - import Cache._ - def infosInputCache: InputCache[FilesInfo[F]] = basicInput - implicit def filesInputCache: InputCache[Set[File]] = wrapIn[Set[File],FilesInfo[F]] - implicit def filesInfoEquiv: Equiv[FilesInfo[F]] = defaultEquiv - } - private final class BasicStyle[FI <: FileInfo](style: FileInfo.Style { type F = FI }) - (implicit val manifest: Manifest[Format[FilesInfo[FI]]]) extends Style - { - type F = FI - val fileStyle: FileInfo.Style { type F = FI } = style - private implicit val infoFormat: Format[FI] = fileStyle.format - implicit def apply(files: Set[File]): FilesInfo[F] = FilesInfo( files.map(_.getAbsoluteFile).map(fileStyle.apply) ) - implicit val formats: Format[FilesInfo[F]] = wrap(_.files, (fs: Set[F]) => new FilesInfo(fs)) - } - lazy val full: Style { type F = HashModifiedFileInfo } = new BasicStyle(FileInfo.full) - lazy val hash: Style { type F = HashFileInfo } = new BasicStyle(FileInfo.hash) - lazy val lastModified: Style { type F = ModifiedFileInfo } = new BasicStyle(FileInfo.lastModified) - lazy val exists: Style { type F = PlainFileInfo } = new BasicStyle(FileInfo.exists) + //def manifest: Manifest[F] = fileStyle.manifest + implicit def apply(files: Set[File]): FilesInfo[F] + implicit def unapply(info: FilesInfo[F]): Set[File] = info.files.map(_.file) + implicit val formats: Format[FilesInfo[F]] + val manifest: Manifest[Format[FilesInfo[F]]] + def empty: FilesInfo[F] = new FilesInfo[F](Set.empty) + import Cache._ + def infosInputCache: InputCache[FilesInfo[F]] = basicInput + implicit def filesInputCache: InputCache[Set[File]] = wrapIn[Set[File], FilesInfo[F]] + implicit def filesInfoEquiv: Equiv[FilesInfo[F]] = defaultEquiv + } + private final class BasicStyle[FI <: FileInfo](style: FileInfo.Style { type F = FI })(implicit val manifest: Manifest[Format[FilesInfo[FI]]]) extends Style { + type F = FI + val fileStyle: FileInfo.Style { type F = FI } = style + private implicit val infoFormat: Format[FI] = fileStyle.format + implicit def apply(files: Set[File]): FilesInfo[F] = FilesInfo(files.map(_.getAbsoluteFile).map(fileStyle.apply)) + implicit val formats: Format[FilesInfo[F]] = wrap(_.files, (fs: Set[F]) => new FilesInfo(fs)) + } + lazy val full: Style { type F = HashModifiedFileInfo } = new BasicStyle(FileInfo.full) + lazy val hash: Style { type F = HashFileInfo } = new BasicStyle(FileInfo.hash) + lazy val lastModified: Style { type F = ModifiedFileInfo } = new BasicStyle(FileInfo.lastModified) + lazy val exists: Style { type F = PlainFileInfo } = new BasicStyle(FileInfo.exists) - implicit def existsInputsCache: InputCache[FilesInfo[PlainFileInfo]] = exists.infosInputCache - implicit def hashInputsCache: InputCache[FilesInfo[HashFileInfo]] = hash.infosInputCache - implicit def modifiedInputsCache: InputCache[FilesInfo[ModifiedFileInfo]] = lastModified.infosInputCache - implicit def fullInputsCache: InputCache[FilesInfo[HashModifiedFileInfo]] = full.infosInputCache + implicit def existsInputsCache: InputCache[FilesInfo[PlainFileInfo]] = exists.infosInputCache + implicit def hashInputsCache: InputCache[FilesInfo[HashFileInfo]] = hash.infosInputCache + implicit def modifiedInputsCache: InputCache[FilesInfo[ModifiedFileInfo]] = lastModified.infosInputCache + implicit def fullInputsCache: InputCache[FilesInfo[HashModifiedFileInfo]] = full.infosInputCache } \ No newline at end of file diff --git a/cache/src/main/scala/sbt/SeparatedCache.scala b/cache/src/main/scala/sbt/SeparatedCache.scala index a126229bd..9d11f1f3c 100644 --- a/cache/src/main/scala/sbt/SeparatedCache.scala +++ b/cache/src/main/scala/sbt/SeparatedCache.scala @@ -4,64 +4,59 @@ package sbt import Types.:+: -import sbinary.{DefaultProtocol, Format, Input, Output => Out} +import sbinary.{ DefaultProtocol, Format, Input, Output => Out } import DefaultProtocol.ByteFormat -import java.io.{File, InputStream, OutputStream} +import java.io.{ File, InputStream, OutputStream } -trait InputCache[I] -{ - type Internal - def convert(i: I): Internal - def read(from: Input): Internal - def write(to: Out, j: Internal): Unit - def equiv: Equiv[Internal] +trait InputCache[I] { + type Internal + def convert(i: I): Internal + def read(from: Input): Internal + def write(to: Out, j: Internal): Unit + def equiv: Equiv[Internal] } -object InputCache -{ - implicit def basicInputCache[I](implicit fmt: Format[I], eqv: Equiv[I]): InputCache[I] = - new InputCache[I] - { - type Internal = I - def convert(i: I) = i - def read(from: Input): I = fmt.reads(from) - def write(to: Out, i: I) = fmt.writes(to, i) - def equiv = eqv - } - def lzy[I](mkIn: => InputCache[I]): InputCache[I] = - new InputCache[I] - { - lazy val ic = mkIn - type Internal = ic.Internal - def convert(i: I) = ic convert i - def read(from: Input): ic.Internal = ic.read(from) - def write(to: Out, i: ic.Internal) = ic.write(to, i) - def equiv = ic.equiv - } +object InputCache { + implicit def basicInputCache[I](implicit fmt: Format[I], eqv: Equiv[I]): InputCache[I] = + new InputCache[I] { + type Internal = I + def convert(i: I) = i + def read(from: Input): I = fmt.reads(from) + def write(to: Out, i: I) = fmt.writes(to, i) + def equiv = eqv + } + def lzy[I](mkIn: => InputCache[I]): InputCache[I] = + new InputCache[I] { + lazy val ic = mkIn + type Internal = ic.Internal + def convert(i: I) = ic convert i + def read(from: Input): ic.Internal = ic.read(from) + def write(to: Out, i: ic.Internal) = ic.write(to, i) + def equiv = ic.equiv + } } -class BasicCache[I,O](implicit input: InputCache[I], outFormat: Format[O]) extends Cache[I,O] -{ - def apply(file: File)(in: I) = - { - val j = input.convert(in) - try { applyImpl(file, j) } - catch { case e: Exception => Right(update(file)(j)) } - } - protected def applyImpl(file: File, in: input.Internal) = - { - Using.fileInputStream(file) { stream => - val previousIn = input.read(stream) - if(input.equiv.equiv(in, previousIn)) - Left(outFormat.reads(stream)) - else - Right(update(file)(in)) - } - } - protected def update(file: File)(in: input.Internal) = (out: O) => - { - Using.fileOutputStream(false)(file) { stream => - input.write(stream, in) - outFormat.writes(stream, out) - } - } +class BasicCache[I, O](implicit input: InputCache[I], outFormat: Format[O]) extends Cache[I, O] { + def apply(file: File)(in: I) = + { + val j = input.convert(in) + try { applyImpl(file, j) } + catch { case e: Exception => Right(update(file)(j)) } + } + protected def applyImpl(file: File, in: input.Internal) = + { + Using.fileInputStream(file) { stream => + val previousIn = input.read(stream) + if (input.equiv.equiv(in, previousIn)) + Left(outFormat.reads(stream)) + else + Right(update(file)(in)) + } + } + protected def update(file: File)(in: input.Internal) = (out: O) => + { + Using.fileOutputStream(false)(file) { stream => + input.write(stream, in) + outFormat.writes(stream, out) + } + } } \ No newline at end of file diff --git a/cache/tracking/src/main/scala/sbt/ChangeReport.scala b/cache/tracking/src/main/scala/sbt/ChangeReport.scala index 634650f20..8502f9d3f 100644 --- a/cache/tracking/src/main/scala/sbt/ChangeReport.scala +++ b/cache/tracking/src/main/scala/sbt/ChangeReport.scala @@ -3,71 +3,68 @@ */ package sbt -object ChangeReport -{ - def modified[T](files: Set[T]) = - new EmptyChangeReport[T] - { - override def checked = files - override def modified = files - override def markAllModified = this - } - def unmodified[T](files: Set[T]) = - new EmptyChangeReport[T] - { - override def checked = files - override def unmodified = files - } +object ChangeReport { + def modified[T](files: Set[T]) = + new EmptyChangeReport[T] { + override def checked = files + override def modified = files + override def markAllModified = this + } + def unmodified[T](files: Set[T]) = + new EmptyChangeReport[T] { + override def checked = files + override def unmodified = files + } } /** The result of comparing some current set of objects against a previous set of objects.*/ -trait ChangeReport[T] extends NotNull -{ - /** The set of all of the objects in the current set.*/ - def checked: Set[T] - /** All of the objects that are in the same state in the current and reference sets.*/ - def unmodified: Set[T] - /** All checked objects that are not in the same state as the reference. This includes objects that are in both - * sets but have changed and files that are only in one set.*/ - def modified: Set[T] // all changes, including added - /** All objects that are only in the current set.*/ - def added: Set[T] - /** All objects only in the previous set*/ - def removed: Set[T] - def +++(other: ChangeReport[T]): ChangeReport[T] = new CompoundChangeReport(this, other) - /** Generate a new report with this report's unmodified set included in the new report's modified set. The new report's - * unmodified set is empty. The new report's added, removed, and checked sets are the same as in this report. */ - def markAllModified: ChangeReport[T] = - new ChangeReport[T] - { - def checked = ChangeReport.this.checked - def unmodified = Set.empty[T] - def modified = ChangeReport.this.checked - def added = ChangeReport.this.added - def removed = ChangeReport.this.removed - override def markAllModified = this - } - override def toString = - { - val labels = List("Checked", "Modified", "Unmodified", "Added", "Removed") - val sets = List(checked, modified, unmodified, added, removed) - val keyValues = labels.zip(sets).map{ case (label, set) => label + ": " + set.mkString(", ") } - keyValues.mkString("Change report:\n\t", "\n\t", "") - } +trait ChangeReport[T] extends NotNull { + /** The set of all of the objects in the current set.*/ + def checked: Set[T] + /** All of the objects that are in the same state in the current and reference sets.*/ + def unmodified: Set[T] + /** + * All checked objects that are not in the same state as the reference. This includes objects that are in both + * sets but have changed and files that are only in one set. + */ + def modified: Set[T] // all changes, including added + /** All objects that are only in the current set.*/ + def added: Set[T] + /** All objects only in the previous set*/ + def removed: Set[T] + def +++(other: ChangeReport[T]): ChangeReport[T] = new CompoundChangeReport(this, other) + /** + * Generate a new report with this report's unmodified set included in the new report's modified set. The new report's + * unmodified set is empty. The new report's added, removed, and checked sets are the same as in this report. + */ + def markAllModified: ChangeReport[T] = + new ChangeReport[T] { + def checked = ChangeReport.this.checked + def unmodified = Set.empty[T] + def modified = ChangeReport.this.checked + def added = ChangeReport.this.added + def removed = ChangeReport.this.removed + override def markAllModified = this + } + override def toString = + { + val labels = List("Checked", "Modified", "Unmodified", "Added", "Removed") + val sets = List(checked, modified, unmodified, added, removed) + val keyValues = labels.zip(sets).map { case (label, set) => label + ": " + set.mkString(", ") } + keyValues.mkString("Change report:\n\t", "\n\t", "") + } } -class EmptyChangeReport[T] extends ChangeReport[T] -{ - def checked = Set.empty[T] - def unmodified = Set.empty[T] - def modified = Set.empty[T] - def added = Set.empty[T] - def removed = Set.empty[T] - override def toString = "No changes" +class EmptyChangeReport[T] extends ChangeReport[T] { + def checked = Set.empty[T] + def unmodified = Set.empty[T] + def modified = Set.empty[T] + def added = Set.empty[T] + def removed = Set.empty[T] + override def toString = "No changes" } -private class CompoundChangeReport[T](a: ChangeReport[T], b: ChangeReport[T]) extends ChangeReport[T] -{ - lazy val checked = a.checked ++ b.checked - lazy val unmodified = a.unmodified ++ b.unmodified - lazy val modified = a.modified ++ b.modified - lazy val added = a.added ++ b.added - lazy val removed = a.removed ++ b.removed +private class CompoundChangeReport[T](a: ChangeReport[T], b: ChangeReport[T]) extends ChangeReport[T] { + lazy val checked = a.checked ++ b.checked + lazy val unmodified = a.unmodified ++ b.unmodified + lazy val modified = a.modified ++ b.modified + lazy val added = a.added ++ b.added + lazy val removed = a.removed ++ b.removed } \ No newline at end of file diff --git a/cache/tracking/src/main/scala/sbt/Tracked.scala b/cache/tracking/src/main/scala/sbt/Tracked.scala index fb0747ed9..c851ef9a5 100644 --- a/cache/tracking/src/main/scala/sbt/Tracked.scala +++ b/cache/tracking/src/main/scala/sbt/Tracked.scala @@ -4,204 +4,202 @@ package sbt import java.io.File -import CacheIO.{fromFile, toFile} +import CacheIO.{ fromFile, toFile } import sbinary.Format import scala.reflect.Manifest import scala.collection.mutable -import IO.{delete, read, write} +import IO.{ delete, read, write } +object Tracked { + /** + * Creates a tracker that provides the last time it was evaluated. + * If 'useStartTime' is true, the recorded time is the start of the evaluated function. + * If 'useStartTime' is false, the recorded time is when the evaluated function completes. + * In both cases, the timestamp is not updated if the function throws an exception. + */ + def tstamp(cacheFile: File, useStartTime: Boolean = true): Timestamp = new Timestamp(cacheFile, useStartTime) + /** Creates a tracker that only evaluates a function when the input has changed.*/ + //def changed[O](cacheFile: File)(implicit format: Format[O], equiv: Equiv[O]): Changed[O] = + // new Changed[O](cacheFile) -object Tracked -{ - /** Creates a tracker that provides the last time it was evaluated. - * If 'useStartTime' is true, the recorded time is the start of the evaluated function. - * If 'useStartTime' is false, the recorded time is when the evaluated function completes. - * In both cases, the timestamp is not updated if the function throws an exception.*/ - def tstamp(cacheFile: File, useStartTime: Boolean = true): Timestamp = new Timestamp(cacheFile, useStartTime) - /** Creates a tracker that only evaluates a function when the input has changed.*/ - //def changed[O](cacheFile: File)(implicit format: Format[O], equiv: Equiv[O]): Changed[O] = - // new Changed[O](cacheFile) - - /** Creates a tracker that provides the difference between a set of input files for successive invocations.*/ - def diffInputs(cache: File, style: FilesInfo.Style): Difference = - Difference.inputs(cache, style) - /** Creates a tracker that provides the difference between a set of output files for successive invocations.*/ - def diffOutputs(cache: File, style: FilesInfo.Style): Difference = - Difference.outputs(cache, style) + /** Creates a tracker that provides the difference between a set of input files for successive invocations.*/ + def diffInputs(cache: File, style: FilesInfo.Style): Difference = + Difference.inputs(cache, style) + /** Creates a tracker that provides the difference between a set of output files for successive invocations.*/ + def diffOutputs(cache: File, style: FilesInfo.Style): Difference = + Difference.outputs(cache, style) - def lastOutput[I,O](cacheFile: File)(f: (I,Option[O]) => O)(implicit o: Format[O], mf: Manifest[Format[O]]): I => O = in => - { - val previous: Option[O] = fromFile[O](cacheFile) - val next = f(in, previous) - toFile(next)(cacheFile) - next - } + def lastOutput[I, O](cacheFile: File)(f: (I, Option[O]) => O)(implicit o: Format[O], mf: Manifest[Format[O]]): I => O = in => + { + val previous: Option[O] = fromFile[O](cacheFile) + val next = f(in, previous) + toFile(next)(cacheFile) + next + } - def inputChanged[I,O](cacheFile: File)(f: (Boolean, I) => O)(implicit ic: InputCache[I]): I => O = in => - { - val help = new CacheHelp(ic) - val conv = help.convert(in) - val changed = help.changed(cacheFile, conv) - val result = f(changed, in) - - if(changed) - help.save(cacheFile, conv) + def inputChanged[I, O](cacheFile: File)(f: (Boolean, I) => O)(implicit ic: InputCache[I]): I => O = in => + { + val help = new CacheHelp(ic) + val conv = help.convert(in) + val changed = help.changed(cacheFile, conv) + val result = f(changed, in) - result - } - def outputChanged[I,O](cacheFile: File)(f: (Boolean, I) => O)(implicit ic: InputCache[I]): (() => I) => O = in => - { - val initial = in() - val help = new CacheHelp(ic) - val changed = help.changed(cacheFile, help.convert(initial)) - val result = f(changed, initial) - - if(changed) - help.save(cacheFile, help.convert(in())) + if (changed) + help.save(cacheFile, conv) - result - } - final class CacheHelp[I](val ic: InputCache[I]) - { - def convert(i: I): ic.Internal = ic.convert(i) - def save(cacheFile: File, value: ic.Internal): Unit = - Using.fileOutputStream()(cacheFile)(out => ic.write(out, value) ) - def changed(cacheFile: File, converted: ic.Internal): Boolean = - try { - val prev = Using.fileInputStream(cacheFile)(x => ic.read(x)) - !ic.equiv.equiv(converted, prev) - } catch { case e: Exception => true } - } + result + } + def outputChanged[I, O](cacheFile: File)(f: (Boolean, I) => O)(implicit ic: InputCache[I]): (() => I) => O = in => + { + val initial = in() + val help = new CacheHelp(ic) + val changed = help.changed(cacheFile, help.convert(initial)) + val result = f(changed, initial) + + if (changed) + help.save(cacheFile, help.convert(in())) + + result + } + final class CacheHelp[I](val ic: InputCache[I]) { + def convert(i: I): ic.Internal = ic.convert(i) + def save(cacheFile: File, value: ic.Internal): Unit = + Using.fileOutputStream()(cacheFile)(out => ic.write(out, value)) + def changed(cacheFile: File, converted: ic.Internal): Boolean = + try { + val prev = Using.fileInputStream(cacheFile)(x => ic.read(x)) + !ic.equiv.equiv(converted, prev) + } catch { case e: Exception => true } + } } -trait Tracked -{ - /** Cleans outputs and clears the cache.*/ - def clean(): Unit +trait Tracked { + /** Cleans outputs and clears the cache.*/ + def clean(): Unit } -class Timestamp(val cacheFile: File, useStartTime: Boolean) extends Tracked -{ - def clean() = delete(cacheFile) - /** Reads the previous timestamp, evaluates the provided function, - * and then updates the timestamp if the function completes normally.*/ - def apply[T](f: Long => T): T = - { - val start = now() - val result = f(readTimestamp) - write(cacheFile, (if(useStartTime) start else now()).toString) - result - } - private def now() = System.currentTimeMillis - def readTimestamp: Long = - try { read(cacheFile).toLong } - catch { case _: NumberFormatException | _: java.io.FileNotFoundException => 0 } +class Timestamp(val cacheFile: File, useStartTime: Boolean) extends Tracked { + def clean() = delete(cacheFile) + /** + * Reads the previous timestamp, evaluates the provided function, + * and then updates the timestamp if the function completes normally. + */ + def apply[T](f: Long => T): T = + { + val start = now() + val result = f(readTimestamp) + write(cacheFile, (if (useStartTime) start else now()).toString) + result + } + private def now() = System.currentTimeMillis + def readTimestamp: Long = + try { read(cacheFile).toLong } + catch { case _: NumberFormatException | _: java.io.FileNotFoundException => 0 } } -class Changed[O](val cacheFile: File)(implicit equiv: Equiv[O], format: Format[O]) extends Tracked -{ - def clean() = delete(cacheFile) - def apply[O2](ifChanged: O => O2, ifUnchanged: O => O2): O => O2 = value => - { - if(uptodate(value)) - ifUnchanged(value) - else - { - update(value) - ifChanged(value) - } - } +class Changed[O](val cacheFile: File)(implicit equiv: Equiv[O], format: Format[O]) extends Tracked { + def clean() = delete(cacheFile) + def apply[O2](ifChanged: O => O2, ifUnchanged: O => O2): O => O2 = value => + { + if (uptodate(value)) + ifUnchanged(value) + else { + update(value) + ifChanged(value) + } + } - def update(value: O): Unit = Using.fileOutputStream(false)(cacheFile)(stream => format.writes(stream, value)) - def uptodate(value: O): Boolean = - try { - Using.fileInputStream(cacheFile) { - stream => equiv.equiv(value, format.reads(stream)) - } - } catch { - case _: Exception => false - } + def update(value: O): Unit = Using.fileOutputStream(false)(cacheFile)(stream => format.writes(stream, value)) + def uptodate(value: O): Boolean = + try { + Using.fileInputStream(cacheFile) { + stream => equiv.equiv(value, format.reads(stream)) + } + } catch { + case _: Exception => false + } } -object Difference -{ - def constructor(defineClean: Boolean, filesAreOutputs: Boolean): (File, FilesInfo.Style) => Difference = - (cache, style) => new Difference(cache, style, defineClean, filesAreOutputs) +object Difference { + def constructor(defineClean: Boolean, filesAreOutputs: Boolean): (File, FilesInfo.Style) => Difference = + (cache, style) => new Difference(cache, style, defineClean, filesAreOutputs) - /** Provides a constructor for a Difference that removes the files from the previous run on a call to 'clean' and saves the - * hash/last modified time of the files as they are after running the function. This means that this information must be evaluated twice: - * before and after running the function.*/ - val outputs = constructor(true, true) - /** Provides a constructor for a Difference that does nothing on a call to 'clean' and saves the - * hash/last modified time of the files as they were prior to running the function.*/ - val inputs = constructor(false, false) + /** + * Provides a constructor for a Difference that removes the files from the previous run on a call to 'clean' and saves the + * hash/last modified time of the files as they are after running the function. This means that this information must be evaluated twice: + * before and after running the function. + */ + val outputs = constructor(true, true) + /** + * Provides a constructor for a Difference that does nothing on a call to 'clean' and saves the + * hash/last modified time of the files as they were prior to running the function. + */ + val inputs = constructor(false, false) } -class Difference(val cache: File, val style: FilesInfo.Style, val defineClean: Boolean, val filesAreOutputs: Boolean) extends Tracked -{ - def clean() = - { - if(defineClean) delete(raw(cachedFilesInfo)) else () - clearCache() - } - private def clearCache() = delete(cache) - - private def cachedFilesInfo = fromFile(style.formats, style.empty)(cache)(style.manifest).files - private def raw(fs: Set[style.F]): Set[File] = fs.map(_.file) - - def apply[T](files: Set[File])(f: ChangeReport[File] => T): T = - { - val lastFilesInfo = cachedFilesInfo - apply(files, lastFilesInfo)(f)(_ => files) - } - - def apply[T](f: ChangeReport[File] => T)(implicit toFiles: T => Set[File]): T = - { - val lastFilesInfo = cachedFilesInfo - apply(raw(lastFilesInfo), lastFilesInfo)(f)(toFiles) - } - - private def abs(files: Set[File]) = files.map(_.getAbsoluteFile) - private[this] def apply[T](files: Set[File], lastFilesInfo: Set[style.F])(f: ChangeReport[File] => T)(extractFiles: T => Set[File]): T = - { - val lastFiles = raw(lastFilesInfo) - val currentFiles = abs(files) - val currentFilesInfo = style(currentFiles) +class Difference(val cache: File, val style: FilesInfo.Style, val defineClean: Boolean, val filesAreOutputs: Boolean) extends Tracked { + def clean() = + { + if (defineClean) delete(raw(cachedFilesInfo)) else () + clearCache() + } + private def clearCache() = delete(cache) - val report = new ChangeReport[File] - { - lazy val checked = currentFiles - lazy val removed = lastFiles -- checked // all files that were included previously but not this time. This is independent of whether the files exist. - lazy val added = checked -- lastFiles // all files included now but not previously. This is independent of whether the files exist. - lazy val modified = raw(lastFilesInfo -- currentFilesInfo.files) ++ added - lazy val unmodified = checked -- modified - } + private def cachedFilesInfo = fromFile(style.formats, style.empty)(cache)(style.manifest).files + private def raw(fs: Set[style.F]): Set[File] = fs.map(_.file) - val result = f(report) - val info = if(filesAreOutputs) style(abs(extractFiles(result))) else currentFilesInfo - toFile(style.formats)(info)(cache)(style.manifest) - result - } + def apply[T](files: Set[File])(f: ChangeReport[File] => T): T = + { + val lastFilesInfo = cachedFilesInfo + apply(files, lastFilesInfo)(f)(_ => files) + } + + def apply[T](f: ChangeReport[File] => T)(implicit toFiles: T => Set[File]): T = + { + val lastFilesInfo = cachedFilesInfo + apply(raw(lastFilesInfo), lastFilesInfo)(f)(toFiles) + } + + private def abs(files: Set[File]) = files.map(_.getAbsoluteFile) + private[this] def apply[T](files: Set[File], lastFilesInfo: Set[style.F])(f: ChangeReport[File] => T)(extractFiles: T => Set[File]): T = + { + val lastFiles = raw(lastFilesInfo) + val currentFiles = abs(files) + val currentFilesInfo = style(currentFiles) + + val report = new ChangeReport[File] { + lazy val checked = currentFiles + lazy val removed = lastFiles -- checked // all files that were included previously but not this time. This is independent of whether the files exist. + lazy val added = checked -- lastFiles // all files included now but not previously. This is independent of whether the files exist. + lazy val modified = raw(lastFilesInfo -- currentFilesInfo.files) ++ added + lazy val unmodified = checked -- modified + } + + val result = f(report) + val info = if (filesAreOutputs) style(abs(extractFiles(result))) else currentFilesInfo + toFile(style.formats)(info)(cache)(style.manifest) + result + } } object FileFunction { - type UpdateFunction = (ChangeReport[File], ChangeReport[File]) => Set[File] - - def cached(cacheBaseDirectory: File, inStyle: FilesInfo.Style = FilesInfo.lastModified, outStyle: FilesInfo.Style = FilesInfo.exists)(action: Set[File] => Set[File]): Set[File] => Set[File] = - cached(cacheBaseDirectory)(inStyle, outStyle)( (in, out) => action(in.checked) ) - - def cached(cacheBaseDirectory: File)(inStyle: FilesInfo.Style, outStyle: FilesInfo.Style)(action: UpdateFunction): Set[File] => Set[File] = - { - import Path._ - lazy val inCache = Difference.inputs(cacheBaseDirectory / "in-cache", inStyle) - lazy val outCache = Difference.outputs(cacheBaseDirectory / "out-cache", outStyle) - inputs => - { - inCache(inputs) { inReport => - outCache { outReport => - if(inReport.modified.isEmpty && outReport.modified.isEmpty) - outReport.checked - else - action(inReport, outReport) - } - } - } - } + type UpdateFunction = (ChangeReport[File], ChangeReport[File]) => Set[File] + + def cached(cacheBaseDirectory: File, inStyle: FilesInfo.Style = FilesInfo.lastModified, outStyle: FilesInfo.Style = FilesInfo.exists)(action: Set[File] => Set[File]): Set[File] => Set[File] = + cached(cacheBaseDirectory)(inStyle, outStyle)((in, out) => action(in.checked)) + + def cached(cacheBaseDirectory: File)(inStyle: FilesInfo.Style, outStyle: FilesInfo.Style)(action: UpdateFunction): Set[File] => Set[File] = + { + import Path._ + lazy val inCache = Difference.inputs(cacheBaseDirectory / "in-cache", inStyle) + lazy val outCache = Difference.outputs(cacheBaseDirectory / "out-cache", outStyle) + inputs => + { + inCache(inputs) { inReport => + outCache { outReport => + if (inReport.modified.isEmpty && outReport.modified.isEmpty) + outReport.checked + else + action(inReport, outReport) + } + } + } + } } \ No newline at end of file diff --git a/compile/api/src/main/scala/sbt/ClassToAPI.scala b/compile/api/src/main/scala/sbt/ClassToAPI.scala index 642dfcf1d..5b92b8d36 100644 --- a/compile/api/src/main/scala/sbt/ClassToAPI.scala +++ b/compile/api/src/main/scala/sbt/ClassToAPI.scala @@ -1,6 +1,6 @@ package sbt -import java.lang.reflect.{Array => _, _} +import java.lang.reflect.{ Array => _, _ } import java.lang.annotation.Annotation import annotation.tailrec import xsbti.api @@ -8,329 +8,323 @@ import xsbti.SafeLazy import SafeLazy.strict import collection.mutable -object ClassToAPI -{ - def apply(c: Seq[Class[_]]): api.SourceAPI = process(c)._1 +object ClassToAPI { + def apply(c: Seq[Class[_]]): api.SourceAPI = process(c)._1 - // (api, public inherited classes) - def process(c: Seq[Class[_]]): (api.SourceAPI, Set[Class[_]]) = - { - val pkgs = packages(c).map(p => new api.Package(p)) - val cmap = emptyClassMap - val defs = c.filter(isTopLevel).flatMap(toDefinitions(cmap)) - val source = new api.SourceAPI(pkgs.toArray, defs.toArray) - cmap.lz.foreach(_.get()) // force thunks to ensure all inherited dependencies are recorded - val inDeps = cmap.inherited.toSet - cmap.clear() - (source, inDeps) - } + // (api, public inherited classes) + def process(c: Seq[Class[_]]): (api.SourceAPI, Set[Class[_]]) = + { + val pkgs = packages(c).map(p => new api.Package(p)) + val cmap = emptyClassMap + val defs = c.filter(isTopLevel).flatMap(toDefinitions(cmap)) + val source = new api.SourceAPI(pkgs.toArray, defs.toArray) + cmap.lz.foreach(_.get()) // force thunks to ensure all inherited dependencies are recorded + val inDeps = cmap.inherited.toSet + cmap.clear() + (source, inDeps) + } - // Avoiding implicit allocation. - private def arrayMap[T <: AnyRef, U <: AnyRef : ClassManifest](xs: Array[T])(f: T => U): Array[U] = { - val len = xs.length - var i = 0 - val res = new Array[U](len) - while (i < len) { - res(i) = f(xs(i)) - i += 1 - } - res - } + // Avoiding implicit allocation. + private def arrayMap[T <: AnyRef, U <: AnyRef: ClassManifest](xs: Array[T])(f: T => U): Array[U] = { + val len = xs.length + var i = 0 + val res = new Array[U](len) + while (i < len) { + res(i) = f(xs(i)) + i += 1 + } + res + } - def packages(c: Seq[Class[_]]): Set[String] = - c.flatMap(packageName).toSet + def packages(c: Seq[Class[_]]): Set[String] = + c.flatMap(packageName).toSet - def isTopLevel(c: Class[_]): Boolean = - c.getEnclosingClass eq null + def isTopLevel(c: Class[_]): Boolean = + c.getEnclosingClass eq null - final class ClassMap private[sbt](private[sbt] val memo: mutable.Map[String, Seq[api.ClassLike]], private[sbt] val inherited: mutable.Set[Class[_]], private[sbt] val lz: mutable.Buffer[xsbti.api.Lazy[_]]) { - def clear() { memo.clear(); inherited.clear(); lz.clear() } - } - def emptyClassMap: ClassMap = new ClassMap(new mutable.HashMap, new mutable.HashSet, new mutable.ListBuffer) + final class ClassMap private[sbt] (private[sbt] val memo: mutable.Map[String, Seq[api.ClassLike]], private[sbt] val inherited: mutable.Set[Class[_]], private[sbt] val lz: mutable.Buffer[xsbti.api.Lazy[_]]) { + def clear() { memo.clear(); inherited.clear(); lz.clear() } + } + def emptyClassMap: ClassMap = new ClassMap(new mutable.HashMap, new mutable.HashSet, new mutable.ListBuffer) - def toDefinitions(cmap: ClassMap)(c: Class[_]): Seq[api.ClassLike] = - cmap.memo.getOrElseUpdate(c.getName, toDefinitions0(c, cmap)) - def toDefinitions0(c: Class[_], cmap: ClassMap): Seq[api.ClassLike] = - { - import api.DefinitionType.{ClassDef, Module, Trait} - val enclPkg = packageName(c) - val mods = modifiers(c.getModifiers) - val acc = access(c.getModifiers, enclPkg) - val annots = annotations(c.getAnnotations) - val name = c.getName - val tpe = if(Modifier.isInterface(c.getModifiers)) Trait else ClassDef - lazy val (static, instance) = structure(c, enclPkg, cmap) - val cls = new api.ClassLike(tpe, strict(Empty), lzy(instance, cmap), emptyStringArray, typeParameters(typeParameterTypes(c)), name, acc, mods, annots) - val stat = new api.ClassLike(Module, strict(Empty), lzy(static, cmap), emptyStringArray, emptyTypeParameterArray, name, acc, mods, annots) - val defs = cls :: stat :: Nil - cmap.memo(c.getName) = defs - defs - } + def toDefinitions(cmap: ClassMap)(c: Class[_]): Seq[api.ClassLike] = + cmap.memo.getOrElseUpdate(c.getName, toDefinitions0(c, cmap)) + def toDefinitions0(c: Class[_], cmap: ClassMap): Seq[api.ClassLike] = + { + import api.DefinitionType.{ ClassDef, Module, Trait } + val enclPkg = packageName(c) + val mods = modifiers(c.getModifiers) + val acc = access(c.getModifiers, enclPkg) + val annots = annotations(c.getAnnotations) + val name = c.getName + val tpe = if (Modifier.isInterface(c.getModifiers)) Trait else ClassDef + lazy val (static, instance) = structure(c, enclPkg, cmap) + val cls = new api.ClassLike(tpe, strict(Empty), lzy(instance, cmap), emptyStringArray, typeParameters(typeParameterTypes(c)), name, acc, mods, annots) + val stat = new api.ClassLike(Module, strict(Empty), lzy(static, cmap), emptyStringArray, emptyTypeParameterArray, name, acc, mods, annots) + val defs = cls :: stat :: Nil + cmap.memo(c.getName) = defs + defs + } - /** Returns the (static structure, instance structure, inherited classes) for `c`. */ - def structure(c: Class[_], enclPkg: Option[String], cmap: ClassMap): (api.Structure, api.Structure) = - { - val methods = mergeMap(c, c.getDeclaredMethods, c.getMethods, methodToDef(enclPkg)) - val fields = mergeMap(c, c.getDeclaredFields, c.getFields, fieldToDef(enclPkg)) - val constructors = mergeMap(c, c.getDeclaredConstructors, c.getConstructors, constructorToDef(enclPkg)) - val classes = merge[Class[_]](c, c.getDeclaredClasses, c.getClasses, toDefinitions(cmap), (_: Seq[Class[_]]).partition(isStatic), _.getEnclosingClass != c) - val all = (methods ++ fields ++ constructors ++ classes) - val parentJavaTypes = allSuperTypes(c) - if(!Modifier.isPrivate(c.getModifiers)) - cmap.inherited ++= parentJavaTypes.collect { case c: Class[_] => c } - val parentTypes = types(parentJavaTypes) - val instanceStructure = new api.Structure(lzyS(parentTypes.toArray), lzyS(all.declared.toArray), lzyS(all.inherited.toArray)) - val staticStructure = new api.Structure(lzyEmptyTpeArray, lzyS(all.staticDeclared.toArray), lzyS(all.staticInherited.toArray)) - (staticStructure, instanceStructure) - } - private[this] def lzyS[T <: AnyRef](t: T): xsbti.api.Lazy[T] = lzy(t) - def lzy[T <: AnyRef](t: => T): xsbti.api.Lazy[T] = xsbti.SafeLazy(t) - private[this] def lzy[T <: AnyRef](t: => T, cmap: ClassMap): xsbti.api.Lazy[T] = { - val s = lzy(t) - cmap.lz += s - s - } + /** Returns the (static structure, instance structure, inherited classes) for `c`. */ + def structure(c: Class[_], enclPkg: Option[String], cmap: ClassMap): (api.Structure, api.Structure) = + { + val methods = mergeMap(c, c.getDeclaredMethods, c.getMethods, methodToDef(enclPkg)) + val fields = mergeMap(c, c.getDeclaredFields, c.getFields, fieldToDef(enclPkg)) + val constructors = mergeMap(c, c.getDeclaredConstructors, c.getConstructors, constructorToDef(enclPkg)) + val classes = merge[Class[_]](c, c.getDeclaredClasses, c.getClasses, toDefinitions(cmap), (_: Seq[Class[_]]).partition(isStatic), _.getEnclosingClass != c) + val all = (methods ++ fields ++ constructors ++ classes) + val parentJavaTypes = allSuperTypes(c) + if (!Modifier.isPrivate(c.getModifiers)) + cmap.inherited ++= parentJavaTypes.collect { case c: Class[_] => c } + val parentTypes = types(parentJavaTypes) + val instanceStructure = new api.Structure(lzyS(parentTypes.toArray), lzyS(all.declared.toArray), lzyS(all.inherited.toArray)) + val staticStructure = new api.Structure(lzyEmptyTpeArray, lzyS(all.staticDeclared.toArray), lzyS(all.staticInherited.toArray)) + (staticStructure, instanceStructure) + } + private[this] def lzyS[T <: AnyRef](t: T): xsbti.api.Lazy[T] = lzy(t) + def lzy[T <: AnyRef](t: => T): xsbti.api.Lazy[T] = xsbti.SafeLazy(t) + private[this] def lzy[T <: AnyRef](t: => T, cmap: ClassMap): xsbti.api.Lazy[T] = { + val s = lzy(t) + cmap.lz += s + s + } - private val emptyStringArray = new Array[String](0) - private val emptyTypeArray = new Array[xsbti.api.Type](0) - private val emptyAnnotationArray = new Array[xsbti.api.Annotation](0) - private val emptyTypeParameterArray = new Array[xsbti.api.TypeParameter](0) - private val emptySimpleTypeArray = new Array[xsbti.api.SimpleType](0) - private val lzyEmptyTpeArray = lzyS(emptyTypeArray) - private val lzyEmptyDefArray = lzyS(new Array[xsbti.api.Definition](0)) + private val emptyStringArray = new Array[String](0) + private val emptyTypeArray = new Array[xsbti.api.Type](0) + private val emptyAnnotationArray = new Array[xsbti.api.Annotation](0) + private val emptyTypeParameterArray = new Array[xsbti.api.TypeParameter](0) + private val emptySimpleTypeArray = new Array[xsbti.api.SimpleType](0) + private val lzyEmptyTpeArray = lzyS(emptyTypeArray) + private val lzyEmptyDefArray = lzyS(new Array[xsbti.api.Definition](0)) - private def allSuperTypes(t: Type): Seq[Type] = - { - @tailrec def accumulate(t: Type, accum: Seq[Type] = Seq.empty): Seq[Type] = t match { - case c: Class[_] => - val (parent, interfaces) = (c.getGenericSuperclass, c.getGenericInterfaces) - accumulate(parent, (accum :+ parent) ++ flattenAll(interfaces)) - case p: ParameterizedType => - accumulate(p.getRawType, accum) - case _ => - accum - } - @tailrec def flattenAll(interfaces: Seq[Type], accum: Seq[Type] = Seq.empty): Seq[Type] = - { - if (!interfaces.isEmpty) { - val raw = interfaces map { case p: ParameterizedType => p.getRawType; case i => i } - val children = raw flatMap { case i: Class[_] => i.getGenericInterfaces; case _ => Seq.empty } - flattenAll(children, accum ++ interfaces ++ children) - } - else - accum - } - accumulate(t).filterNot(_ == null).distinct - } + private def allSuperTypes(t: Type): Seq[Type] = + { + @tailrec def accumulate(t: Type, accum: Seq[Type] = Seq.empty): Seq[Type] = t match { + case c: Class[_] => + val (parent, interfaces) = (c.getGenericSuperclass, c.getGenericInterfaces) + accumulate(parent, (accum :+ parent) ++ flattenAll(interfaces)) + case p: ParameterizedType => + accumulate(p.getRawType, accum) + case _ => + accum + } + @tailrec def flattenAll(interfaces: Seq[Type], accum: Seq[Type] = Seq.empty): Seq[Type] = + { + if (!interfaces.isEmpty) { + val raw = interfaces map { case p: ParameterizedType => p.getRawType; case i => i } + val children = raw flatMap { case i: Class[_] => i.getGenericInterfaces; case _ => Seq.empty } + flattenAll(children, accum ++ interfaces ++ children) + } else + accum + } + accumulate(t).filterNot(_ == null).distinct + } - @deprecated("No longer used", "0.13.0") - def parents(c: Class[_]): Seq[api.Type] = types(allSuperTypes(c)) - def types(ts: Seq[Type]): Array[api.Type] = ts filter (_ ne null) map reference toArray; - def upperBounds(ts: Array[Type]): api.Type = - new api.Structure(lzy(types(ts)), lzyEmptyDefArray, lzyEmptyDefArray) + @deprecated("No longer used", "0.13.0") + def parents(c: Class[_]): Seq[api.Type] = types(allSuperTypes(c)) + def types(ts: Seq[Type]): Array[api.Type] = ts filter (_ ne null) map reference toArray; + def upperBounds(ts: Array[Type]): api.Type = + new api.Structure(lzy(types(ts)), lzyEmptyDefArray, lzyEmptyDefArray) - def fieldToDef(enclPkg: Option[String])(f: Field): api.FieldLike = - { - val name = f.getName - val accs = access(f.getModifiers, enclPkg) - val mods = modifiers(f.getModifiers) - val annots = annotations(f.getDeclaredAnnotations) - val tpe = reference(returnType(f)) - if(mods.isFinal) new api.Val(tpe, name, accs, mods, annots) else new api.Var(tpe, name, accs, mods, annots) - } + def fieldToDef(enclPkg: Option[String])(f: Field): api.FieldLike = + { + val name = f.getName + val accs = access(f.getModifiers, enclPkg) + val mods = modifiers(f.getModifiers) + val annots = annotations(f.getDeclaredAnnotations) + val tpe = reference(returnType(f)) + if (mods.isFinal) new api.Val(tpe, name, accs, mods, annots) else new api.Var(tpe, name, accs, mods, annots) + } - def methodToDef(enclPkg: Option[String])(m: Method): api.Def = - defLike(m.getName, m.getModifiers, m.getDeclaredAnnotations, typeParameterTypes(m), m.getParameterAnnotations, parameterTypes(m), Some(returnType(m)), exceptionTypes(m), m.isVarArgs, enclPkg) + def methodToDef(enclPkg: Option[String])(m: Method): api.Def = + defLike(m.getName, m.getModifiers, m.getDeclaredAnnotations, typeParameterTypes(m), m.getParameterAnnotations, parameterTypes(m), Some(returnType(m)), exceptionTypes(m), m.isVarArgs, enclPkg) - def constructorToDef(enclPkg: Option[String])(c: Constructor[_]): api.Def = - defLike("", c.getModifiers, c.getDeclaredAnnotations, typeParameterTypes(c), c.getParameterAnnotations, parameterTypes(c), None, exceptionTypes(c), c.isVarArgs, enclPkg) + def constructorToDef(enclPkg: Option[String])(c: Constructor[_]): api.Def = + defLike("", c.getModifiers, c.getDeclaredAnnotations, typeParameterTypes(c), c.getParameterAnnotations, parameterTypes(c), None, exceptionTypes(c), c.isVarArgs, enclPkg) - def defLike[T <: GenericDeclaration](name: String, mods: Int, annots: Array[Annotation], tps: Array[TypeVariable[T]], paramAnnots: Array[Array[Annotation]], paramTypes: Array[Type], retType: Option[Type], exceptions: Array[Type], varArgs: Boolean, enclPkg: Option[String]): api.Def = - { - val varArgPosition = if(varArgs) paramTypes.length - 1 else -1 - val isVarArg = List.tabulate(paramTypes.length)(_ == varArgPosition) - val pa = (paramAnnots, paramTypes, isVarArg).zipped map { case (a,p,v) => parameter(a,p,v) } - val params = new api.ParameterList(pa, false) - val ret = retType match { case Some(rt) => reference(rt); case None => Empty } - new api.Def(Array(params), ret, typeParameters(tps), name, access(mods, enclPkg), modifiers(mods), annotations(annots) ++ exceptionAnnotations(exceptions)) - } + def defLike[T <: GenericDeclaration](name: String, mods: Int, annots: Array[Annotation], tps: Array[TypeVariable[T]], paramAnnots: Array[Array[Annotation]], paramTypes: Array[Type], retType: Option[Type], exceptions: Array[Type], varArgs: Boolean, enclPkg: Option[String]): api.Def = + { + val varArgPosition = if (varArgs) paramTypes.length - 1 else -1 + val isVarArg = List.tabulate(paramTypes.length)(_ == varArgPosition) + val pa = (paramAnnots, paramTypes, isVarArg).zipped map { case (a, p, v) => parameter(a, p, v) } + val params = new api.ParameterList(pa, false) + val ret = retType match { case Some(rt) => reference(rt); case None => Empty } + new api.Def(Array(params), ret, typeParameters(tps), name, access(mods, enclPkg), modifiers(mods), annotations(annots) ++ exceptionAnnotations(exceptions)) + } - def exceptionAnnotations(exceptions: Array[Type]): Array[api.Annotation] = - if (exceptions.length == 0) emptyAnnotationArray - else arrayMap(exceptions)(t => new api.Annotation(Throws, Array(new api.AnnotationArgument("value", t.toString)))) + def exceptionAnnotations(exceptions: Array[Type]): Array[api.Annotation] = + if (exceptions.length == 0) emptyAnnotationArray + else arrayMap(exceptions)(t => new api.Annotation(Throws, Array(new api.AnnotationArgument("value", t.toString)))) - def parameter(annots: Array[Annotation], parameter: Type, varArgs: Boolean): api.MethodParameter = - new api.MethodParameter("", annotated(reference(parameter),annots), false, if(varArgs) api.ParameterModifier.Repeated else api.ParameterModifier.Plain) + def parameter(annots: Array[Annotation], parameter: Type, varArgs: Boolean): api.MethodParameter = + new api.MethodParameter("", annotated(reference(parameter), annots), false, if (varArgs) api.ParameterModifier.Repeated else api.ParameterModifier.Plain) - def annotated(t: api.SimpleType, annots: Array[Annotation]): api.Type = ( - if (annots.length == 0) t - else new api.Annotated(t, annotations(annots)) - ) + def annotated(t: api.SimpleType, annots: Array[Annotation]): api.Type = ( + if (annots.length == 0) t + else new api.Annotated(t, annotations(annots)) + ) - case class Defs(declared: Seq[api.Definition], inherited: Seq[api.Definition], staticDeclared: Seq[api.Definition], staticInherited: Seq[api.Definition]) - { - def ++(o: Defs) = Defs(declared ++ o.declared, inherited ++ o.inherited, staticDeclared ++ o.staticDeclared, staticInherited ++ o.staticInherited) - } - def mergeMap[T <: Member](of: Class[_], self: Seq[T], public: Seq[T], f: T => api.Definition): Defs = - merge[T](of, self, public, x => f(x) :: Nil, splitStatic _, _.getDeclaringClass != of) + case class Defs(declared: Seq[api.Definition], inherited: Seq[api.Definition], staticDeclared: Seq[api.Definition], staticInherited: Seq[api.Definition]) { + def ++(o: Defs) = Defs(declared ++ o.declared, inherited ++ o.inherited, staticDeclared ++ o.staticDeclared, staticInherited ++ o.staticInherited) + } + def mergeMap[T <: Member](of: Class[_], self: Seq[T], public: Seq[T], f: T => api.Definition): Defs = + merge[T](of, self, public, x => f(x) :: Nil, splitStatic _, _.getDeclaringClass != of) - def merge[T](of: Class[_], self: Seq[T], public: Seq[T], f: T => Seq[api.Definition], splitStatic: Seq[T] => (Seq[T],Seq[T]), isInherited: T => Boolean): Defs = - { - val (selfStatic, selfInstance) = splitStatic(self) - val (inheritedStatic, inheritedInstance) = splitStatic(public filter isInherited) - Defs(selfInstance flatMap f, inheritedInstance flatMap f, selfStatic flatMap f, inheritedStatic flatMap f) - } + def merge[T](of: Class[_], self: Seq[T], public: Seq[T], f: T => Seq[api.Definition], splitStatic: Seq[T] => (Seq[T], Seq[T]), isInherited: T => Boolean): Defs = + { + val (selfStatic, selfInstance) = splitStatic(self) + val (inheritedStatic, inheritedInstance) = splitStatic(public filter isInherited) + Defs(selfInstance flatMap f, inheritedInstance flatMap f, selfStatic flatMap f, inheritedStatic flatMap f) + } - def splitStatic[T <: Member](defs: Seq[T]): (Seq[T], Seq[T]) = - defs partition isStatic + def splitStatic[T <: Member](defs: Seq[T]): (Seq[T], Seq[T]) = + defs partition isStatic - def isStatic(c: Class[_]): Boolean = Modifier.isStatic(c.getModifiers) - def isStatic(a: Member): Boolean = Modifier.isStatic(a.getModifiers) + def isStatic(c: Class[_]): Boolean = Modifier.isStatic(c.getModifiers) + def isStatic(a: Member): Boolean = Modifier.isStatic(a.getModifiers) - def typeParameters[T <: GenericDeclaration](tps: Array[TypeVariable[T]]): Array[api.TypeParameter] = - if (tps.length == 0) emptyTypeParameterArray - else arrayMap(tps)(typeParameter) + def typeParameters[T <: GenericDeclaration](tps: Array[TypeVariable[T]]): Array[api.TypeParameter] = + if (tps.length == 0) emptyTypeParameterArray + else arrayMap(tps)(typeParameter) - def typeParameter[T <: GenericDeclaration](tp: TypeVariable[T]): api.TypeParameter = - new api.TypeParameter(typeVariable(tp), emptyAnnotationArray, emptyTypeParameterArray, api.Variance.Invariant, NothingRef, upperBounds(tp.getBounds)) + def typeParameter[T <: GenericDeclaration](tp: TypeVariable[T]): api.TypeParameter = + new api.TypeParameter(typeVariable(tp), emptyAnnotationArray, emptyTypeParameterArray, api.Variance.Invariant, NothingRef, upperBounds(tp.getBounds)) - // needs to be stable across compilations - def typeVariable[T <: GenericDeclaration](tv: TypeVariable[T]): String = - name(tv.getGenericDeclaration) + " " + tv.getName + // needs to be stable across compilations + def typeVariable[T <: GenericDeclaration](tv: TypeVariable[T]): String = + name(tv.getGenericDeclaration) + " " + tv.getName - def reduceHash(in: Array[Byte]): Int = - (0 /: in)( (acc, b) => (acc * 43) ^ b) + def reduceHash(in: Array[Byte]): Int = + (0 /: in)((acc, b) => (acc * 43) ^ b) - def name(gd: GenericDeclaration): String = - gd match - { - case c: Class[_] => c.getName - case m: Method => m.getName - case c: Constructor[_] => c.getName - } + def name(gd: GenericDeclaration): String = + gd match { + case c: Class[_] => c.getName + case m: Method => m.getName + case c: Constructor[_] => c.getName + } - def modifiers(i: Int): api.Modifiers = - { - import Modifier.{isAbstract, isFinal} - new api.Modifiers( isAbstract(i), false, isFinal(i), false, false, false, false) - } - def access(i: Int, pkg: Option[String]): api.Access = - { - import Modifier.{isPublic, isPrivate, isProtected} - if(isPublic(i)) Public else if(isPrivate(i)) Private else if(isProtected(i)) Protected else packagePrivate(pkg) - } + def modifiers(i: Int): api.Modifiers = + { + import Modifier.{ isAbstract, isFinal } + new api.Modifiers(isAbstract(i), false, isFinal(i), false, false, false, false) + } + def access(i: Int, pkg: Option[String]): api.Access = + { + import Modifier.{ isPublic, isPrivate, isProtected } + if (isPublic(i)) Public else if (isPrivate(i)) Private else if (isProtected(i)) Protected else packagePrivate(pkg) + } - def annotations(a: Array[Annotation]): Array[api.Annotation] = if (a.length == 0) emptyAnnotationArray else arrayMap(a)(annotation) - def annotation(a: Annotation): api.Annotation = - new api.Annotation( reference(a.annotationType), Array(javaAnnotation(a.toString))) + def annotations(a: Array[Annotation]): Array[api.Annotation] = if (a.length == 0) emptyAnnotationArray else arrayMap(a)(annotation) + def annotation(a: Annotation): api.Annotation = + new api.Annotation(reference(a.annotationType), Array(javaAnnotation(a.toString))) - // full information not available from reflection - def javaAnnotation(s: String): api.AnnotationArgument = - new api.AnnotationArgument("toString", s) + // full information not available from reflection + def javaAnnotation(s: String): api.AnnotationArgument = + new api.AnnotationArgument("toString", s) - def array(tpe: api.Type): api.SimpleType = new api.Parameterized(ArrayRef, Array(tpe)) - def reference(c: Class[_]): api.SimpleType = - if(c.isArray) array(reference(c.getComponentType)) else if(c.isPrimitive) primitive(c.getName) else reference(c.getName) + def array(tpe: api.Type): api.SimpleType = new api.Parameterized(ArrayRef, Array(tpe)) + def reference(c: Class[_]): api.SimpleType = + if (c.isArray) array(reference(c.getComponentType)) else if (c.isPrimitive) primitive(c.getName) else reference(c.getName) - // does not handle primitives - def reference(s: String): api.SimpleType = - { - val (pkg, cls) = packageAndName(s) - pkg match - { - // translate all primitives? - case None => new api.Projection(Empty, cls) - case Some(p) => - new api.Projection(new api.Singleton(pathFromString(p)), cls) - } - } - def referenceP(t: ParameterizedType): api.Parameterized = - { - val targs = t.getActualTypeArguments - val args = if (targs.length == 0) emptyTypeArray else arrayMap(targs)(t => reference(t): api.Type) - val base = reference(t.getRawType) - new api.Parameterized(base, args.toArray[api.Type]) - } - def reference(t: Type): api.SimpleType = - t match - { - case w: WildcardType => reference("_") - case tv: TypeVariable[_] => new api.ParameterRef(typeVariable(tv)) - case pt: ParameterizedType => referenceP(pt) - case gat: GenericArrayType => array(reference(gat.getGenericComponentType)) - case c: Class[_] => reference(c) - } + // does not handle primitives + def reference(s: String): api.SimpleType = + { + val (pkg, cls) = packageAndName(s) + pkg match { + // translate all primitives? + case None => new api.Projection(Empty, cls) + case Some(p) => + new api.Projection(new api.Singleton(pathFromString(p)), cls) + } + } + def referenceP(t: ParameterizedType): api.Parameterized = + { + val targs = t.getActualTypeArguments + val args = if (targs.length == 0) emptyTypeArray else arrayMap(targs)(t => reference(t): api.Type) + val base = reference(t.getRawType) + new api.Parameterized(base, args.toArray[api.Type]) + } + def reference(t: Type): api.SimpleType = + t match { + case w: WildcardType => reference("_") + case tv: TypeVariable[_] => new api.ParameterRef(typeVariable(tv)) + case pt: ParameterizedType => referenceP(pt) + case gat: GenericArrayType => array(reference(gat.getGenericComponentType)) + case c: Class[_] => reference(c) + } - def pathFromString(s: String): api.Path = - new api.Path(s.split("\\.").map(new api.Id(_)) :+ ThisRef ) - def packageName(c: Class[_]) = packageAndName(c)._1 - def packageAndName(c: Class[_]): (Option[String], String) = - packageAndName(c.getName) - def packageAndName(name: String): (Option[String], String) = - { - val lastDot = name.lastIndexOf('.') - if(lastDot >= 0) - (Some(name.substring(0, lastDot)), name.substring(lastDot+1)) - else - (None, name) - } + def pathFromString(s: String): api.Path = + new api.Path(s.split("\\.").map(new api.Id(_)) :+ ThisRef) + def packageName(c: Class[_]) = packageAndName(c)._1 + def packageAndName(c: Class[_]): (Option[String], String) = + packageAndName(c.getName) + def packageAndName(name: String): (Option[String], String) = + { + val lastDot = name.lastIndexOf('.') + if (lastDot >= 0) + (Some(name.substring(0, lastDot)), name.substring(lastDot + 1)) + else + (None, name) + } - val Empty = new api.EmptyType - val ThisRef = new api.This + val Empty = new api.EmptyType + val ThisRef = new api.This - val Public = new api.Public - val Unqualified = new api.Unqualified - val Private = new api.Private(Unqualified) - val Protected = new api.Protected(Unqualified) - def packagePrivate(pkg: Option[String]): api.Access = new api.Private(new api.IdQualifier(pkg getOrElse "")) + val Public = new api.Public + val Unqualified = new api.Unqualified + val Private = new api.Private(Unqualified) + val Protected = new api.Protected(Unqualified) + def packagePrivate(pkg: Option[String]): api.Access = new api.Private(new api.IdQualifier(pkg getOrElse "")) - val ArrayRef = reference("scala.Array") - val Throws = reference("scala.throws") - val NothingRef = reference("scala.Nothing") + val ArrayRef = reference("scala.Array") + val Throws = reference("scala.throws") + val NothingRef = reference("scala.Nothing") - private[this] def PrimitiveNames = Seq("boolean", "byte", "char", "short", "int", "long", "float", "double") - private[this] def PrimitiveMap = PrimitiveNames.map( j => (j, j.capitalize)) :+ ("void" -> "Unit") - private[this] val PrimitiveRefs = PrimitiveMap.map { case (n, sn) => (n, reference("scala." + sn)) }.toMap - def primitive(name: String): api.SimpleType = PrimitiveRefs(name) + private[this] def PrimitiveNames = Seq("boolean", "byte", "char", "short", "int", "long", "float", "double") + private[this] def PrimitiveMap = PrimitiveNames.map(j => (j, j.capitalize)) :+ ("void" -> "Unit") + private[this] val PrimitiveRefs = PrimitiveMap.map { case (n, sn) => (n, reference("scala." + sn)) }.toMap + def primitive(name: String): api.SimpleType = PrimitiveRefs(name) - // Workarounds for https://github.com/sbt/sbt/issues/1035 - // these catch the GenericSignatureFormatError and return the erased type + // Workarounds for https://github.com/sbt/sbt/issues/1035 + // these catch the GenericSignatureFormatError and return the erased type - private[this] def returnType(f: Field): Type = try f.getGenericType catch { - case _: GenericSignatureFormatError => f.getType - } - private[this] def parameterTypes(c: Constructor[_]): Array[Type] = try c.getGenericParameterTypes catch { - case _: GenericSignatureFormatError => convert(c.getParameterTypes) - } - private[this] def exceptionTypes(c: Constructor[_]): Array[Type] = try c.getGenericExceptionTypes catch { - case _: GenericSignatureFormatError => convert(c.getExceptionTypes) - } - private[this] def parameterTypes(m: Method): Array[Type] = try m.getGenericParameterTypes catch { - case _: GenericSignatureFormatError => convert(m.getParameterTypes) - } - private[this] def returnType(m: Method): Type = try m.getGenericReturnType catch { - case _: GenericSignatureFormatError => m.getReturnType - } - private[this] def exceptionTypes(m: Method): Array[Type] = try m.getGenericExceptionTypes catch { - case _: GenericSignatureFormatError => convert(m.getExceptionTypes) - } + private[this] def returnType(f: Field): Type = try f.getGenericType catch { + case _: GenericSignatureFormatError => f.getType + } + private[this] def parameterTypes(c: Constructor[_]): Array[Type] = try c.getGenericParameterTypes catch { + case _: GenericSignatureFormatError => convert(c.getParameterTypes) + } + private[this] def exceptionTypes(c: Constructor[_]): Array[Type] = try c.getGenericExceptionTypes catch { + case _: GenericSignatureFormatError => convert(c.getExceptionTypes) + } + private[this] def parameterTypes(m: Method): Array[Type] = try m.getGenericParameterTypes catch { + case _: GenericSignatureFormatError => convert(m.getParameterTypes) + } + private[this] def returnType(m: Method): Type = try m.getGenericReturnType catch { + case _: GenericSignatureFormatError => m.getReturnType + } + private[this] def exceptionTypes(m: Method): Array[Type] = try m.getGenericExceptionTypes catch { + case _: GenericSignatureFormatError => convert(m.getExceptionTypes) + } - private[this] def typeParameterTypes[T](m: Constructor[T]): Array[TypeVariable[Constructor[T]]] = try m.getTypeParameters catch { - case _: GenericSignatureFormatError => new Array(0) - } - private[this] def typeParameterTypes[T](m: Class[T]): Array[TypeVariable[Class[T]]] = try m.getTypeParameters catch { - case _: GenericSignatureFormatError => new Array(0) - } - private[this] def typeParameterTypes(m: Method): Array[TypeVariable[Method]] = try m.getTypeParameters catch { - case _: GenericSignatureFormatError => new Array(0) - } - private[this] def superclassType(c: Class[_]): Type = try c.getGenericSuperclass catch{ - case _: GenericSignatureFormatError => c.getSuperclass - } - private[this] def interfaces(c: Class[_]): Array[Type] = try c.getGenericInterfaces catch{ - case _: GenericSignatureFormatError => convert(c.getInterfaces) - } + private[this] def typeParameterTypes[T](m: Constructor[T]): Array[TypeVariable[Constructor[T]]] = try m.getTypeParameters catch { + case _: GenericSignatureFormatError => new Array(0) + } + private[this] def typeParameterTypes[T](m: Class[T]): Array[TypeVariable[Class[T]]] = try m.getTypeParameters catch { + case _: GenericSignatureFormatError => new Array(0) + } + private[this] def typeParameterTypes(m: Method): Array[TypeVariable[Method]] = try m.getTypeParameters catch { + case _: GenericSignatureFormatError => new Array(0) + } + private[this] def superclassType(c: Class[_]): Type = try c.getGenericSuperclass catch { + case _: GenericSignatureFormatError => c.getSuperclass + } + private[this] def interfaces(c: Class[_]): Array[Type] = try c.getGenericInterfaces catch { + case _: GenericSignatureFormatError => convert(c.getInterfaces) + } - private[this] def convert(classes: Array[Class[_]]): Array[Type] = - classes.asInstanceOf[Array[Type]] // ok: treat Arrays as read-only + private[this] def convert(classes: Array[Class[_]]): Array[Type] = + classes.asInstanceOf[Array[Type]] // ok: treat Arrays as read-only } diff --git a/compile/api/src/main/scala/xsbt/api/APIUtil.scala b/compile/api/src/main/scala/xsbt/api/APIUtil.scala index 50d287fe4..32bec717a 100644 --- a/compile/api/src/main/scala/xsbt/api/APIUtil.scala +++ b/compile/api/src/main/scala/xsbt/api/APIUtil.scala @@ -1,71 +1,66 @@ package xsbt.api - import xsbti.SafeLazy - import xsbti.api._ - import scala.collection.mutable.HashSet +import xsbti.SafeLazy +import xsbti.api._ +import scala.collection.mutable.HashSet -object APIUtil -{ - val modifiersToByte = (m: Modifiers) => { - import m._ - def x(b: Boolean, bit: Int) = if(b) 1 << bit else 0 - ( x(isAbstract, 0) | x(isOverride, 1) | x(isFinal, 2) | x(isSealed, 3) | x(isImplicit, 4) | x(isLazy, 5) | x(isMacro, 6) ).toByte - } - val byteToModifiers = (b: Byte) => { - def x(bit: Int) = (b & (1 << bit)) != 0 - new Modifiers( x(0), x(1), x(2), x(3), x(4), x(5), x(6) ) - } +object APIUtil { + val modifiersToByte = (m: Modifiers) => { + import m._ + def x(b: Boolean, bit: Int) = if (b) 1 << bit else 0 + (x(isAbstract, 0) | x(isOverride, 1) | x(isFinal, 2) | x(isSealed, 3) | x(isImplicit, 4) | x(isLazy, 5) | x(isMacro, 6)).toByte + } + val byteToModifiers = (b: Byte) => { + def x(bit: Int) = (b & (1 << bit)) != 0 + new Modifiers(x(0), x(1), x(2), x(3), x(4), x(5), x(6)) + } - def isScalaSourceName(name: String): Boolean = name.endsWith(".scala") + def isScalaSourceName(name: String): Boolean = name.endsWith(".scala") - def hasMacro(s: SourceAPI): Boolean = - { - val check = new HasMacro - check.visitAPI(s) - check.hasMacro - } + def hasMacro(s: SourceAPI): Boolean = + { + val check = new HasMacro + check.visitAPI(s) + check.hasMacro + } - private[this] class HasMacro extends Visit - { - var hasMacro = false + private[this] class HasMacro extends Visit { + var hasMacro = false - // Don't visit inherited definitions since we consider that a class - // that inherits a macro does not have a macro. - override def visitStructure0(structure: Structure) - { - visitTypes(structure.parents) - visitDefinitions(structure.declared) - } + // Don't visit inherited definitions since we consider that a class + // that inherits a macro does not have a macro. + override def visitStructure0(structure: Structure) { + visitTypes(structure.parents) + visitDefinitions(structure.declared) + } - override def visitModifiers(m: Modifiers) - { - hasMacro ||= m.isMacro - super.visitModifiers(m) - } - } + override def visitModifiers(m: Modifiers) { + hasMacro ||= m.isMacro + super.visitModifiers(m) + } + } - def minimize(api: SourceAPI): SourceAPI = - new SourceAPI(api.packages, minimizeDefinitions(api.definitions)) - def minimizeDefinitions(ds: Array[Definition]): Array[Definition] = - ds flatMap minimizeDefinition - def minimizeDefinition(d: Definition): Array[Definition] = - d match - { - case c: ClassLike => Array(minimizeClass(c)) - case _ => Array() - } - def minimizeClass(c: ClassLike): ClassLike = - { - val savedAnnotations = Discovery.defAnnotations(c.structure, (_: Any) => true).toArray[String] - val struct = minimizeStructure(c.structure, c.definitionType == DefinitionType.Module) - new ClassLike(c.definitionType, lzy(emptyType), lzy(struct), savedAnnotations, c.typeParameters, c.name, c.access, c.modifiers, c.annotations) - } + def minimize(api: SourceAPI): SourceAPI = + new SourceAPI(api.packages, minimizeDefinitions(api.definitions)) + def minimizeDefinitions(ds: Array[Definition]): Array[Definition] = + ds flatMap minimizeDefinition + def minimizeDefinition(d: Definition): Array[Definition] = + d match { + case c: ClassLike => Array(minimizeClass(c)) + case _ => Array() + } + def minimizeClass(c: ClassLike): ClassLike = + { + val savedAnnotations = Discovery.defAnnotations(c.structure, (_: Any) => true).toArray[String] + val struct = minimizeStructure(c.structure, c.definitionType == DefinitionType.Module) + new ClassLike(c.definitionType, lzy(emptyType), lzy(struct), savedAnnotations, c.typeParameters, c.name, c.access, c.modifiers, c.annotations) + } - def minimizeStructure(s: Structure, isModule: Boolean): Structure = - new Structure(lzy(s.parents), filterDefinitions(s.declared, isModule), filterDefinitions(s.inherited, isModule)) - def filterDefinitions(ds: Array[Definition], isModule: Boolean): Lazy[Array[Definition]] = - lzy(if(isModule) ds filter Discovery.isMainMethod else Array()) - private[this] def lzy[T <: AnyRef](t: T): Lazy[T] = SafeLazy.strict(t) + def minimizeStructure(s: Structure, isModule: Boolean): Structure = + new Structure(lzy(s.parents), filterDefinitions(s.declared, isModule), filterDefinitions(s.inherited, isModule)) + def filterDefinitions(ds: Array[Definition], isModule: Boolean): Lazy[Array[Definition]] = + lzy(if (isModule) ds filter Discovery.isMainMethod else Array()) + private[this] def lzy[T <: AnyRef](t: T): Lazy[T] = SafeLazy.strict(t) - private[this] val emptyType = new EmptyType + private[this] val emptyType = new EmptyType } \ No newline at end of file diff --git a/compile/api/src/main/scala/xsbt/api/Discovered.scala b/compile/api/src/main/scala/xsbt/api/Discovered.scala index e0f04b8c9..f19793190 100644 --- a/compile/api/src/main/scala/xsbt/api/Discovered.scala +++ b/compile/api/src/main/scala/xsbt/api/Discovered.scala @@ -3,11 +3,9 @@ */ package xsbt.api -final case class Discovered(baseClasses: Set[String], annotations: Set[String], hasMain: Boolean, isModule: Boolean) -{ - def isEmpty = baseClasses.isEmpty && annotations.isEmpty +final case class Discovered(baseClasses: Set[String], annotations: Set[String], hasMain: Boolean, isModule: Boolean) { + def isEmpty = baseClasses.isEmpty && annotations.isEmpty } -object Discovered -{ - def empty = new Discovered(Set.empty, Set.empty, false, false) +object Discovered { + def empty = new Discovered(Set.empty, Set.empty, false, false) } \ No newline at end of file diff --git a/compile/api/src/main/scala/xsbt/api/Discovery.scala b/compile/api/src/main/scala/xsbt/api/Discovery.scala index fffeea31c..9cd07d4d2 100644 --- a/compile/api/src/main/scala/xsbt/api/Discovery.scala +++ b/compile/api/src/main/scala/xsbt/api/Discovery.scala @@ -3,105 +3,101 @@ */ package xsbt.api - import xsbti.api.{Path => APath, _} +import xsbti.api.{ Path => APath, _ } import Discovery._ -class Discovery(baseClasses: Set[String], annotations: Set[String]) -{ - def apply(s: Seq[Definition]): Seq[(Definition, Discovered)] = - s.map { d => (d, apply(d)) } - def apply(d: Definition): Discovered = - d match - { - case c: ClassLike if isConcrete(c.modifiers) => - if(isPublic(c)) - discover(c) - else if(isModule(c) && hasMainMethod(c)) // jvm does not require a main class to be public - new Discovered(Set.empty, Set.empty, true, true) - else - Discovered.empty - case _ => Discovered.empty - } - def discover(c: ClassLike): Discovered = - { - val onClass = Discovery.findAnnotations(c.annotations, annotations) - val onDefs = Discovery.defAnnotations(c.structure, annotations) ++ c.savedAnnotations.filter(annotations) - val module = isModule(c) - new Discovered( bases(c.name, c.structure.parents), onClass ++ onDefs, module && hasMainMethod(c), module ) - } +class Discovery(baseClasses: Set[String], annotations: Set[String]) { + def apply(s: Seq[Definition]): Seq[(Definition, Discovered)] = + s.map { d => (d, apply(d)) } + def apply(d: Definition): Discovered = + d match { + case c: ClassLike if isConcrete(c.modifiers) => + if (isPublic(c)) + discover(c) + else if (isModule(c) && hasMainMethod(c)) // jvm does not require a main class to be public + new Discovered(Set.empty, Set.empty, true, true) + else + Discovered.empty + case _ => Discovered.empty + } + def discover(c: ClassLike): Discovered = + { + val onClass = Discovery.findAnnotations(c.annotations, annotations) + val onDefs = Discovery.defAnnotations(c.structure, annotations) ++ c.savedAnnotations.filter(annotations) + val module = isModule(c) + new Discovered(bases(c.name, c.structure.parents), onClass ++ onDefs, module && hasMainMethod(c), module) + } - def bases(own: String, c: Seq[Type]): Set[String] = - (own +: c.flatMap(simpleName)).filter(baseClasses).toSet + def bases(own: String, c: Seq[Type]): Set[String] = + (own +: c.flatMap(simpleName)).filter(baseClasses).toSet } -object Discovery -{ - def apply(subclasses: Set[String], annotations: Set[String])(definitions: Seq[Definition]): Seq[(Definition, Discovered)] = - { - val d = new Discovery(subclasses, annotations) - d(definitions) - } - def applications(definitions: Seq[Definition]): Seq[(Definition, Discovered)] = - apply(Set.empty, Set.empty)( definitions ) +object Discovery { + def apply(subclasses: Set[String], annotations: Set[String])(definitions: Seq[Definition]): Seq[(Definition, Discovered)] = + { + val d = new Discovery(subclasses, annotations) + d(definitions) + } + def applications(definitions: Seq[Definition]): Seq[(Definition, Discovered)] = + apply(Set.empty, Set.empty)(definitions) - def findAnnotations(as: Seq[Annotation], pred: String => Boolean): Set[String] = - as.flatMap { a => simpleName(a.base).filter(pred) }.toSet - def defAnnotations(s: Structure, pred: String => Boolean): Set[String] = - defAnnotations(s.declared, pred) ++ defAnnotations(s.inherited, pred) - def defAnnotations(defs: Seq[Definition], pred: String => Boolean): Set[String] = - findAnnotations( defs.flatMap { case d: Def if isPublic(d) => d.annotations.toSeq; case _ => Nil }, pred ) + def findAnnotations(as: Seq[Annotation], pred: String => Boolean): Set[String] = + as.flatMap { a => simpleName(a.base).filter(pred) }.toSet + def defAnnotations(s: Structure, pred: String => Boolean): Set[String] = + defAnnotations(s.declared, pred) ++ defAnnotations(s.inherited, pred) + def defAnnotations(defs: Seq[Definition], pred: String => Boolean): Set[String] = + findAnnotations(defs.flatMap { case d: Def if isPublic(d) => d.annotations.toSeq; case _ => Nil }, pred) - def isConcrete(a: Definition): Boolean = isConcrete(a.modifiers) - def isConcrete(m: Modifiers) = !m.isAbstract - def isPublic(a: Definition): Boolean = isPublic(a.access) - def isPublic(a: Access): Boolean = a.isInstanceOf[Public] - def isModule(c: ClassLike) = c.definitionType == DefinitionType.Module + def isConcrete(a: Definition): Boolean = isConcrete(a.modifiers) + def isConcrete(m: Modifiers) = !m.isAbstract + def isPublic(a: Definition): Boolean = isPublic(a.access) + def isPublic(a: Access): Boolean = a.isInstanceOf[Public] + def isModule(c: ClassLike) = c.definitionType == DefinitionType.Module - def hasMainMethod(c: ClassLike): Boolean = - hasMainMethod(c.structure.declared) || hasMainMethod(c.structure.inherited) - def hasMainMethod(defs: Seq[Definition]): Boolean = - defs.exists(isMainMethod) - def isMainMethod(d: Definition): Boolean = - d match { - case d: Def => d.name == "main" && isPublic(d) && isConcrete(d) && isUnit(d.returnType) && isStringArray(d.valueParameters) - case _ => false - } - def isStringArray(vp: IndexedSeq[ParameterList]): Boolean = vp.length == 1 && isStringArray(vp(0).parameters) - def isStringArray(params: Seq[MethodParameter]): Boolean = params.length == 1 && isStringArray(params(0)) - def isStringArray(p: MethodParameter): Boolean = (p.modifier == ParameterModifier.Plain || p.modifier == ParameterModifier.Repeated) && isStringArray(p.tpe) - def isStringArray(t: Type): Boolean = isParameterized(t, "scala.Array", "java.lang.String") // doesn't handle scala.this#Predef#String, should API phase dealias? + def hasMainMethod(c: ClassLike): Boolean = + hasMainMethod(c.structure.declared) || hasMainMethod(c.structure.inherited) + def hasMainMethod(defs: Seq[Definition]): Boolean = + defs.exists(isMainMethod) + def isMainMethod(d: Definition): Boolean = + d match { + case d: Def => d.name == "main" && isPublic(d) && isConcrete(d) && isUnit(d.returnType) && isStringArray(d.valueParameters) + case _ => false + } + def isStringArray(vp: IndexedSeq[ParameterList]): Boolean = vp.length == 1 && isStringArray(vp(0).parameters) + def isStringArray(params: Seq[MethodParameter]): Boolean = params.length == 1 && isStringArray(params(0)) + def isStringArray(p: MethodParameter): Boolean = (p.modifier == ParameterModifier.Plain || p.modifier == ParameterModifier.Repeated) && isStringArray(p.tpe) + def isStringArray(t: Type): Boolean = isParameterized(t, "scala.Array", "java.lang.String") // doesn't handle scala.this#Predef#String, should API phase dealias? - def isParameterized(t: Type, base: String, args: String*): Boolean = t match { - case p: Parameterized => - named(p.baseType, base) && p.typeArguments.length == args.length && p.typeArguments.flatMap(simpleName).sameElements(args) - case _ => false - } - def named(t: Type, nme: String) = simpleName(t) == Some(nme) + def isParameterized(t: Type, base: String, args: String*): Boolean = t match { + case p: Parameterized => + named(p.baseType, base) && p.typeArguments.length == args.length && p.typeArguments.flatMap(simpleName).sameElements(args) + case _ => false + } + def named(t: Type, nme: String) = simpleName(t) == Some(nme) - def simpleName(t: Type): Option[String] = t match { - case a: Annotated => simpleName(a.baseType) - case sing: Singleton => None - case p: Projection => - p.prefix match { - case s: Singleton => pathName(s.path, p.id) - case e: EmptyType => Some( p.id ) - case _ => None - } - case _ => None - } + def simpleName(t: Type): Option[String] = t match { + case a: Annotated => simpleName(a.baseType) + case sing: Singleton => None + case p: Projection => + p.prefix match { + case s: Singleton => pathName(s.path, p.id) + case e: EmptyType => Some(p.id) + case _ => None + } + case _ => None + } - def pathName(p: APath, id: String): Option[String] = - { - val cs = p.components - cs.last match - { - case _: This => - val ids = cs.init.collect { case i: Id => i.id } - if(ids.length == cs.length - 1) Some( (ids ++ Seq(id)).mkString(".") ) else None - case _ => None - } - } + def pathName(p: APath, id: String): Option[String] = + { + val cs = p.components + cs.last match { + case _: This => + val ids = cs.init.collect { case i: Id => i.id } + if (ids.length == cs.length - 1) Some((ids ++ Seq(id)).mkString(".")) else None + case _ => None + } + } - def isUnit(t: Type): Boolean = named(t, "scala.Unit") + def isUnit(t: Type): Boolean = named(t, "scala.Unit") } diff --git a/compile/api/src/main/scala/xsbt/api/HashAPI.scala b/compile/api/src/main/scala/xsbt/api/HashAPI.scala index dae3a5a00..30b6d7654 100644 --- a/compile/api/src/main/scala/xsbt/api/HashAPI.scala +++ b/compile/api/src/main/scala/xsbt/api/HashAPI.scala @@ -8,23 +8,22 @@ import xsbti.api._ import util.MurmurHash import HashAPI.Hash -object HashAPI -{ - type Hash = Int - def apply(a: SourceAPI): Hash = - (new HashAPI(false, true, true)).hashAPI(a) +object HashAPI { + type Hash = Int + def apply(a: SourceAPI): Hash = + (new HashAPI(false, true, true)).hashAPI(a) - def apply(x: Def): Hash = { - val hashApi = new HashAPI(false, true, true) - hashApi.hashDefinition(x) - hashApi.finalizeHash - } + def apply(x: Def): Hash = { + val hashApi = new HashAPI(false, true, true) + hashApi.hashDefinition(x) + hashApi.finalizeHash + } - def hashDefinitionsWithExtraHashes(ds: Seq[(Definition, Hash)]): Hash = { - val hashAPI = new HashAPI(false, true, false) - hashAPI.hashDefinitionsWithExtraHashes(ds) - hashAPI.finalizeHash - } + def hashDefinitionsWithExtraHashes(ds: Seq[(Definition, Hash)]): Hash = { + val hashAPI = new HashAPI(false, true, false) + hashAPI.hashDefinitionsWithExtraHashes(ds) + hashAPI.finalizeHash + } } /** @@ -36,357 +35,328 @@ object HashAPI * be included in a hash sum. Structure can appear as a type (in structural type) and in that case we * always include definitions in a hash sum. */ -final class HashAPI(includePrivate: Boolean, includeParamNames: Boolean, includeDefinitions: Boolean) -{ - // this constructor variant is for source and binary backwards compatibility with sbt 0.13.0 - def this(includePrivate: Boolean, includeParamNames: Boolean) { - // in the old logic we used to always include definitions hence - // includeDefinitions=true - this(includePrivate, includeParamNames, includeDefinitions=true) - } +final class HashAPI(includePrivate: Boolean, includeParamNames: Boolean, includeDefinitions: Boolean) { + // this constructor variant is for source and binary backwards compatibility with sbt 0.13.0 + def this(includePrivate: Boolean, includeParamNames: Boolean) { + // in the old logic we used to always include definitions hence + // includeDefinitions=true + this(includePrivate, includeParamNames, includeDefinitions = true) + } - import scala.collection.mutable - import MurmurHash.{extendHash, nextMagicA, nextMagicB, startHash, startMagicA, startMagicB, stringHash, symmetricHash} + import scala.collection.mutable + import MurmurHash.{ extendHash, nextMagicA, nextMagicB, startHash, startMagicA, startMagicB, stringHash, symmetricHash } - private[this] val visitedStructures = visitedMap[Structure] - private[this] val visitedClassLike = visitedMap[ClassLike] - private[this] def visitedMap[T] = new mutable.HashMap[T, List[Hash]] - private[this] def visit[T](map: mutable.Map[T, List[Hash]], t: T)(hashF: T => Unit) - { - map.put(t, hash :: map.getOrElse(t,Nil)) match { - case Some(x :: _) => extend(x) - case _ => - hashF(t) - for(hs <- map(t)) - extend(hs) - map.put(t, hash :: Nil) - } - } + private[this] val visitedStructures = visitedMap[Structure] + private[this] val visitedClassLike = visitedMap[ClassLike] + private[this] def visitedMap[T] = new mutable.HashMap[T, List[Hash]] + private[this] def visit[T](map: mutable.Map[T, List[Hash]], t: T)(hashF: T => Unit) { + map.put(t, hash :: map.getOrElse(t, Nil)) match { + case Some(x :: _) => extend(x) + case _ => + hashF(t) + for (hs <- map(t)) + extend(hs) + map.put(t, hash :: Nil) + } + } - private[this] final val ValHash = 1 - private[this] final val VarHash = 2 - private[this] final val DefHash = 3 - private[this] final val ClassHash = 4 - private[this] final val TypeDeclHash = 5 - private[this] final val TypeAliasHash = 6 + private[this] final val ValHash = 1 + private[this] final val VarHash = 2 + private[this] final val DefHash = 3 + private[this] final val ClassHash = 4 + private[this] final val TypeDeclHash = 5 + private[this] final val TypeAliasHash = 6 - private[this] final val PublicHash = 30 - private[this] final val ProtectedHash = 31 - private[this] final val PrivateHash = 32 - private[this] final val UnqualifiedHash = 33 - private[this] final val ThisQualifierHash = 34 - private[this] final val IdQualifierHash = 35 + private[this] final val PublicHash = 30 + private[this] final val ProtectedHash = 31 + private[this] final val PrivateHash = 32 + private[this] final val UnqualifiedHash = 33 + private[this] final val ThisQualifierHash = 34 + private[this] final val IdQualifierHash = 35 - private[this] final val IdPathHash = 20 - private[this] final val SuperHash = 21 - private[this] final val ThisPathHash = 22 + private[this] final val IdPathHash = 20 + private[this] final val SuperHash = 21 + private[this] final val ThisPathHash = 22 - private[this] final val ValueParamsHash = 40 - private[this] final val ClassPendingHash = 41 - private[this] final val StructurePendingHash = 42 + private[this] final val ValueParamsHash = 40 + private[this] final val ClassPendingHash = 41 + private[this] final val StructurePendingHash = 42 - private[this] final val EmptyTypeHash = 51 - private[this] final val ParameterRefHash = 52 - private[this] final val SingletonHash = 53 - private[this] final val ProjectionHash = 54 - private[this] final val ParameterizedHash = 55 - private[this] final val AnnotatedHash = 56 - private[this] final val PolymorphicHash = 57 - private[this] final val ConstantHash = 58 - private[this] final val ExistentialHash = 59 - private[this] final val StructureHash = 60 + private[this] final val EmptyTypeHash = 51 + private[this] final val ParameterRefHash = 52 + private[this] final val SingletonHash = 53 + private[this] final val ProjectionHash = 54 + private[this] final val ParameterizedHash = 55 + private[this] final val AnnotatedHash = 56 + private[this] final val PolymorphicHash = 57 + private[this] final val ConstantHash = 58 + private[this] final val ExistentialHash = 59 + private[this] final val StructureHash = 60 - private[this] final val TrueHash = 97 - private[this] final val FalseHash = 98 + private[this] final val TrueHash = 97 + private[this] final val FalseHash = 98 + private[this] var hash: Hash = startHash(0) + private[this] var magicA: Hash = startMagicA + private[this] var magicB: Hash = startMagicB - private[this] var hash: Hash = startHash(0) - private[this] var magicA: Hash = startMagicA - private[this] var magicB: Hash = startMagicB + @inline final def hashString(s: String): Unit = extend(stringHash(s)) + @inline final def hashBoolean(b: Boolean): Unit = extend(if (b) TrueHash else FalseHash) + @inline final def hashSeq[T](s: Seq[T], hashF: T => Unit) { + extend(s.length) + s foreach hashF + } + final def hashSymmetric[T](ts: TraversableOnce[T], hashF: T => Unit) { + val current = hash + val mA = magicA + val mB = magicB + val (hashes, mAs, mBs) = ts.toList.map { t => + hash = startHash(1) + magicA = startMagicA + magicB = startMagicB + hashF(t) + (finalizeHash, magicA, magicB) + } unzip3; + hash = current + magicA = mA + magicB = mB + extend(symmetricHash(hashes, 0xb592f7ae)) // constant from MurmurHash3 + } - @inline final def hashString(s: String): Unit = extend(stringHash(s)) - @inline final def hashBoolean(b: Boolean): Unit = extend(if(b) TrueHash else FalseHash) - @inline final def hashSeq[T](s: Seq[T], hashF: T => Unit) - { - extend(s.length) - s foreach hashF - } - final def hashSymmetric[T](ts: TraversableOnce[T], hashF: T => Unit) - { - val current = hash - val mA = magicA - val mB = magicB - val (hashes, mAs, mBs) = ts.toList.map { t => - hash = startHash(1) - magicA = startMagicA - magicB = startMagicB - hashF(t) - (finalizeHash, magicA, magicB) - } unzip3; - hash = current - magicA = mA - magicB = mB - extend(symmetricHash(hashes, 0xb592f7ae)) // constant from MurmurHash3 - } + @inline final def extend(a: Hash) { + hash = extendHash(hash, a, magicA, magicB) + magicA = nextMagicA(magicA) + magicB = nextMagicB(magicB) + } - @inline final def extend(a: Hash) - { - hash = extendHash(hash, a, magicA, magicB) - magicA = nextMagicA(magicA) - magicB = nextMagicB(magicB) - } + def finalizeHash: Hash = MurmurHash.finalizeHash(hash) - def finalizeHash: Hash = MurmurHash.finalizeHash(hash) + def hashModifiers(m: Modifiers) = extend(m.raw) - def hashModifiers(m: Modifiers) = extend(m.raw) + def hashAPI(s: SourceAPI): Hash = + { + hash = startHash(0) + hashSymmetric(s.packages, hashPackage) + hashDefinitions(s.definitions, true) + finalizeHash + } - def hashAPI(s: SourceAPI): Hash = - { - hash = startHash(0) - hashSymmetric(s.packages, hashPackage) - hashDefinitions(s.definitions, true) - finalizeHash - } + def hashPackage(p: Package) = hashString(p.name) - def hashPackage(p: Package) = hashString(p.name) + def hashDefinitions(ds: Seq[Definition], topLevel: Boolean): Unit = + { + val defs = SameAPI.filterDefinitions(ds, topLevel, includePrivate) + hashSymmetric(defs, hashDefinition) + } - def hashDefinitions(ds: Seq[Definition], topLevel: Boolean): Unit = - { - val defs = SameAPI.filterDefinitions(ds, topLevel, includePrivate) - hashSymmetric(defs, hashDefinition) - } + /** + * Hashes a sequence of definitions by combining each definition's own + * hash with extra one supplied as first element of a pair. + * + * It's useful when one wants to influence hash of a definition by some + * external (to definition) factor (e.g. location of definition). + * + * NOTE: This method doesn't perform any filtering of passed definitions. + */ + def hashDefinitionsWithExtraHashes(ds: Seq[(Definition, Hash)]): Unit = + { + def hashDefinitionCombined(d: Definition, extraHash: Hash): Unit = { + hashDefinition(d) + extend(extraHash) + } + hashSymmetric(ds, (hashDefinitionCombined _).tupled) + } + def hashDefinition(d: Definition) { + hashString(d.name) + hashAnnotations(d.annotations) + hashModifiers(d.modifiers) + hashAccess(d.access) + d match { + case c: ClassLike => hashClass(c) + case f: FieldLike => hashField(f) + case d: Def => hashDef(d) + case t: TypeDeclaration => hashTypeDeclaration(t) + case t: TypeAlias => hashTypeAlias(t) + } + } + final def hashClass(c: ClassLike): Unit = visit(visitedClassLike, c)(hashClass0) + def hashClass0(c: ClassLike) { + extend(ClassHash) + hashParameterizedDefinition(c) + hashType(c.selfType) + hashStructure(c.structure, includeDefinitions) + } + def hashField(f: FieldLike) { + f match { + case v: Var => extend(VarHash) + case v: Val => extend(ValHash) + } + hashType(f.tpe) + } + def hashDef(d: Def) { + extend(DefHash) + hashParameterizedDefinition(d) + hashValueParameters(d.valueParameters) + hashType(d.returnType) + } + def hashAccess(a: Access): Unit = + a match { + case pub: Public => extend(PublicHash) + case qual: Qualified => hashQualified(qual) + } + def hashQualified(qual: Qualified): Unit = + { + qual match { + case p: Protected => extend(ProtectedHash) + case p: Private => extend(PrivateHash) + } + hashQualifier(qual.qualifier) + } + def hashQualifier(qual: Qualifier): Unit = + qual match { + case _: Unqualified => extend(UnqualifiedHash) + case _: ThisQualifier => extend(ThisQualifierHash) + case id: IdQualifier => + extend(IdQualifierHash) + hashString(id.value) + } - /** - * Hashes a sequence of definitions by combining each definition's own - * hash with extra one supplied as first element of a pair. - * - * It's useful when one wants to influence hash of a definition by some - * external (to definition) factor (e.g. location of definition). - * - * NOTE: This method doesn't perform any filtering of passed definitions. - */ - def hashDefinitionsWithExtraHashes(ds: Seq[(Definition, Hash)]): Unit = - { - def hashDefinitionCombined(d: Definition, extraHash: Hash): Unit = { - hashDefinition(d) - extend(extraHash) - } - hashSymmetric(ds, (hashDefinitionCombined _).tupled) - } - def hashDefinition(d: Definition) - { - hashString(d.name) - hashAnnotations(d.annotations) - hashModifiers(d.modifiers) - hashAccess(d.access) - d match - { - case c: ClassLike => hashClass(c) - case f: FieldLike => hashField(f) - case d: Def => hashDef(d) - case t: TypeDeclaration => hashTypeDeclaration(t) - case t: TypeAlias => hashTypeAlias(t) - } - } - final def hashClass(c: ClassLike): Unit = visit(visitedClassLike, c)(hashClass0) - def hashClass0(c: ClassLike) - { - extend(ClassHash) - hashParameterizedDefinition(c) - hashType(c.selfType) - hashStructure(c.structure, includeDefinitions) - } - def hashField(f: FieldLike) - { - f match - { - case v: Var => extend(VarHash) - case v: Val => extend(ValHash) - } - hashType(f.tpe) - } - def hashDef(d: Def) - { - extend(DefHash) - hashParameterizedDefinition(d) - hashValueParameters(d.valueParameters) - hashType(d.returnType) - } - def hashAccess(a: Access): Unit = - a match - { - case pub: Public => extend(PublicHash) - case qual: Qualified => hashQualified(qual) - } - def hashQualified(qual: Qualified): Unit = - { - qual match - { - case p: Protected => extend(ProtectedHash) - case p: Private => extend(PrivateHash) - } - hashQualifier(qual.qualifier) - } - def hashQualifier(qual: Qualifier): Unit = - qual match - { - case _: Unqualified => extend(UnqualifiedHash) - case _: ThisQualifier => extend(ThisQualifierHash) - case id: IdQualifier => - extend(IdQualifierHash) - hashString(id.value) - } + def hashValueParameters(valueParameters: Seq[ParameterList]) = hashSeq(valueParameters, hashValueParameterList) + def hashValueParameterList(list: ParameterList) = + { + extend(ValueParamsHash) + hashBoolean(list.isImplicit) + hashSeq(list.parameters, hashValueParameter) + } + def hashValueParameter(parameter: MethodParameter) = + { + hashString(parameter.name) + hashType(parameter.tpe) + extend(parameter.modifier.ordinal) + hashBoolean(parameter.hasDefault) + } - def hashValueParameters(valueParameters: Seq[ParameterList]) = hashSeq(valueParameters, hashValueParameterList) - def hashValueParameterList(list: ParameterList) = - { - extend(ValueParamsHash) - hashBoolean(list.isImplicit) - hashSeq(list.parameters, hashValueParameter) - } - def hashValueParameter(parameter: MethodParameter) = - { - hashString(parameter.name) - hashType(parameter.tpe) - extend(parameter.modifier.ordinal) - hashBoolean(parameter.hasDefault) - } + def hashParameterizedDefinition[T <: ParameterizedDefinition](d: T) { + hashTypeParameters(d.typeParameters) + } + def hashTypeDeclaration(d: TypeDeclaration) { + extend(TypeDeclHash) + hashParameterizedDefinition(d) + hashType(d.lowerBound) + hashType(d.upperBound) + } + def hashTypeAlias(d: TypeAlias) { + extend(TypeAliasHash) + hashParameterizedDefinition(d) + hashType(d.tpe) + } - def hashParameterizedDefinition[T <: ParameterizedDefinition](d: T) - { - hashTypeParameters(d.typeParameters) - } - def hashTypeDeclaration(d: TypeDeclaration) - { - extend(TypeDeclHash) - hashParameterizedDefinition(d) - hashType(d.lowerBound) - hashType(d.upperBound) - } - def hashTypeAlias(d: TypeAlias) - { - extend(TypeAliasHash) - hashParameterizedDefinition(d) - hashType(d.tpe) - } + def hashTypeParameters(parameters: Seq[TypeParameter]) = hashSeq(parameters, hashTypeParameter) + def hashTypeParameter(parameter: TypeParameter) { + hashString(parameter.id) + extend(parameter.variance.ordinal) + hashTypeParameters(parameter.typeParameters) + hashType(parameter.lowerBound) + hashType(parameter.upperBound) + hashAnnotations(parameter.annotations) + } + def hashAnnotations(annotations: Seq[Annotation]) = hashSeq(annotations, hashAnnotation) + def hashAnnotation(annotation: Annotation) = + { + hashType(annotation.base) + hashAnnotationArguments(annotation.arguments) + } + def hashAnnotationArguments(args: Seq[AnnotationArgument]) = hashSeq(args, hashAnnotationArgument) + def hashAnnotationArgument(arg: AnnotationArgument) { + hashString(arg.name) + hashString(arg.value) + } - def hashTypeParameters(parameters: Seq[TypeParameter]) = hashSeq(parameters, hashTypeParameter) - def hashTypeParameter(parameter: TypeParameter) - { - hashString(parameter.id) - extend(parameter.variance.ordinal) - hashTypeParameters(parameter.typeParameters) - hashType(parameter.lowerBound) - hashType(parameter.upperBound) - hashAnnotations(parameter.annotations) - } - def hashAnnotations(annotations: Seq[Annotation]) = hashSeq(annotations, hashAnnotation) - def hashAnnotation(annotation: Annotation) = - { - hashType(annotation.base) - hashAnnotationArguments(annotation.arguments) - } - def hashAnnotationArguments(args: Seq[AnnotationArgument]) = hashSeq(args, hashAnnotationArgument) - def hashAnnotationArgument(arg: AnnotationArgument) - { - hashString(arg.name) - hashString(arg.value) - } + def hashTypes(ts: Seq[Type], includeDefinitions: Boolean = true) = + hashSeq(ts, (t: Type) => hashType(t, includeDefinitions)) + def hashType(t: Type, includeDefinitions: Boolean = true): Unit = + t match { + case s: Structure => hashStructure(s, includeDefinitions) + case e: Existential => hashExistential(e) + case c: Constant => hashConstant(c) + case p: Polymorphic => hashPolymorphic(p) + case a: Annotated => hashAnnotated(a) + case p: Parameterized => hashParameterized(p) + case p: Projection => hashProjection(p) + case _: EmptyType => extend(EmptyTypeHash) + case s: Singleton => hashSingleton(s) + case pr: ParameterRef => hashParameterRef(pr) + } - def hashTypes(ts: Seq[Type], includeDefinitions: Boolean = true) = - hashSeq(ts, (t: Type) => hashType(t, includeDefinitions)) - def hashType(t: Type, includeDefinitions: Boolean = true): Unit = - t match - { - case s: Structure => hashStructure(s, includeDefinitions) - case e: Existential => hashExistential(e) - case c: Constant => hashConstant(c) - case p: Polymorphic => hashPolymorphic(p) - case a: Annotated => hashAnnotated(a) - case p: Parameterized => hashParameterized(p) - case p: Projection => hashProjection(p) - case _: EmptyType => extend(EmptyTypeHash) - case s: Singleton => hashSingleton(s) - case pr: ParameterRef => hashParameterRef(pr) - } + def hashParameterRef(p: ParameterRef) { + extend(ParameterRefHash) + hashString(p.id) + } + def hashSingleton(s: Singleton) { + extend(SingletonHash) + hashPath(s.path) + } + def hashPath(path: Path) = hashSeq(path.components, hashPathComponent) + def hashPathComponent(pc: PathComponent) = pc match { + case _: This => extend(ThisPathHash) + case s: Super => hashSuperPath(s) + case id: Id => hashIdPath(id) + } + def hashSuperPath(s: Super) { + extend(SuperHash) + hashPath(s.qualifier) + } + def hashIdPath(id: Id) { + extend(IdPathHash) + hashString(id.id) + } - def hashParameterRef(p: ParameterRef) - { - extend(ParameterRefHash) - hashString(p.id) - } - def hashSingleton(s: Singleton) - { - extend(SingletonHash) - hashPath(s.path) - } - def hashPath(path: Path) = hashSeq(path.components, hashPathComponent) - def hashPathComponent(pc: PathComponent) = pc match - { - case _: This => extend(ThisPathHash) - case s: Super => hashSuperPath(s) - case id: Id => hashIdPath(id) - } - def hashSuperPath(s: Super) - { - extend(SuperHash) - hashPath(s.qualifier) - } - def hashIdPath(id: Id) - { - extend(IdPathHash) - hashString(id.id) - } - - def hashConstant(c: Constant) = - { - extend(ConstantHash) - hashString(c.value) - hashType(c.baseType) - } - def hashExistential(e: Existential) = - { - extend(ExistentialHash) - hashParameters(e.clause, e.baseType) - } - def hashPolymorphic(p: Polymorphic) = - { - extend(PolymorphicHash) - hashParameters(p.parameters, p.baseType) - } - def hashProjection(p: Projection) = - { - extend(ProjectionHash) - hashString(p.id) - hashType(p.prefix) - } - def hashParameterized(p: Parameterized) - { - extend(ParameterizedHash) - hashType(p.baseType) - hashTypes(p.typeArguments) - } - def hashAnnotated(a: Annotated) - { - extend(AnnotatedHash) - hashType(a.baseType) - hashAnnotations(a.annotations) - } - final def hashStructure(structure: Structure, includeDefinitions: Boolean) = - visit(visitedStructures, structure)(structure => hashStructure0(structure, includeDefinitions)) - def hashStructure0(structure: Structure, includeDefinitions: Boolean) - { - extend(StructureHash) - hashTypes(structure.parents, includeDefinitions) - if (includeDefinitions) { - hashDefinitions(structure.declared, false) - hashDefinitions(structure.inherited, false) - } - } - def hashParameters(parameters: Seq[TypeParameter], base: Type): Unit = - { - hashTypeParameters(parameters) - hashType(base) - } + def hashConstant(c: Constant) = + { + extend(ConstantHash) + hashString(c.value) + hashType(c.baseType) + } + def hashExistential(e: Existential) = + { + extend(ExistentialHash) + hashParameters(e.clause, e.baseType) + } + def hashPolymorphic(p: Polymorphic) = + { + extend(PolymorphicHash) + hashParameters(p.parameters, p.baseType) + } + def hashProjection(p: Projection) = + { + extend(ProjectionHash) + hashString(p.id) + hashType(p.prefix) + } + def hashParameterized(p: Parameterized) { + extend(ParameterizedHash) + hashType(p.baseType) + hashTypes(p.typeArguments) + } + def hashAnnotated(a: Annotated) { + extend(AnnotatedHash) + hashType(a.baseType) + hashAnnotations(a.annotations) + } + final def hashStructure(structure: Structure, includeDefinitions: Boolean) = + visit(visitedStructures, structure)(structure => hashStructure0(structure, includeDefinitions)) + def hashStructure0(structure: Structure, includeDefinitions: Boolean) { + extend(StructureHash) + hashTypes(structure.parents, includeDefinitions) + if (includeDefinitions) { + hashDefinitions(structure.declared, false) + hashDefinitions(structure.inherited, false) + } + } + def hashParameters(parameters: Seq[TypeParameter], base: Type): Unit = + { + hashTypeParameters(parameters) + hashType(base) + } } diff --git a/compile/api/src/main/scala/xsbt/api/NameHashing.scala b/compile/api/src/main/scala/xsbt/api/NameHashing.scala index 68e4933c4..60221e22f 100644 --- a/compile/api/src/main/scala/xsbt/api/NameHashing.scala +++ b/compile/api/src/main/scala/xsbt/api/NameHashing.scala @@ -18,129 +18,130 @@ import xsbti.api.DefinitionType.Trait */ class NameHashing { - import NameHashing._ + import NameHashing._ - /** - * This method takes an API representation and extracts a flat collection of all - * definitions contained in that API representation. Then it groups definition - * by a simple name. Lastly, it computes a hash sum of all definitions in a single - * group. - * - * NOTE: The hashing sum used for hashing a group of definition is insensitive - * to order of definitions. - */ - def nameHashes(source: SourceAPI): _internalOnly_NameHashes = { - val apiPublicDefs = publicDefs(source) - val (regularDefs, implicitDefs) = apiPublicDefs.partition(locDef => !locDef.definition.modifiers.isImplicit) - val regularNameHashes = nameHashesForLocatedDefinitions(regularDefs) - val implicitNameHashes = nameHashesForLocatedDefinitions(implicitDefs) - new _internalOnly_NameHashes(regularNameHashes.toArray, implicitNameHashes.toArray) - } + /** + * This method takes an API representation and extracts a flat collection of all + * definitions contained in that API representation. Then it groups definition + * by a simple name. Lastly, it computes a hash sum of all definitions in a single + * group. + * + * NOTE: The hashing sum used for hashing a group of definition is insensitive + * to order of definitions. + */ + def nameHashes(source: SourceAPI): _internalOnly_NameHashes = { + val apiPublicDefs = publicDefs(source) + val (regularDefs, implicitDefs) = apiPublicDefs.partition(locDef => !locDef.definition.modifiers.isImplicit) + val regularNameHashes = nameHashesForLocatedDefinitions(regularDefs) + val implicitNameHashes = nameHashesForLocatedDefinitions(implicitDefs) + new _internalOnly_NameHashes(regularNameHashes.toArray, implicitNameHashes.toArray) + } - private def nameHashesForLocatedDefinitions(locatedDefs: Iterable[LocatedDefinition]): Iterable[_internalOnly_NameHash] = { - val groupedBySimpleName = locatedDefs.groupBy(locatedDef => localName(locatedDef.definition.name)) - val hashes = groupedBySimpleName.mapValues(hashLocatedDefinitions) - hashes.toIterable.map({ case (name: String, hash: Int) => new _internalOnly_NameHash(name, hash) }) - } + private def nameHashesForLocatedDefinitions(locatedDefs: Iterable[LocatedDefinition]): Iterable[_internalOnly_NameHash] = { + val groupedBySimpleName = locatedDefs.groupBy(locatedDef => localName(locatedDef.definition.name)) + val hashes = groupedBySimpleName.mapValues(hashLocatedDefinitions) + hashes.toIterable.map({ case (name: String, hash: Int) => new _internalOnly_NameHash(name, hash) }) + } - private def hashLocatedDefinitions(locatedDefs: Iterable[LocatedDefinition]): Int = { - val defsWithExtraHashes = locatedDefs.toSeq.map(ld => ld.definition -> ld.location.hashCode) - xsbt.api.HashAPI.hashDefinitionsWithExtraHashes(defsWithExtraHashes) - } + private def hashLocatedDefinitions(locatedDefs: Iterable[LocatedDefinition]): Int = { + val defsWithExtraHashes = locatedDefs.toSeq.map(ld => ld.definition -> ld.location.hashCode) + xsbt.api.HashAPI.hashDefinitionsWithExtraHashes(defsWithExtraHashes) + } - /** - * A visitor that visits given API object and extracts all nested public - * definitions it finds. The extracted definitions have Location attached - * to them which identifies API object's location. - * - * The returned location is basically a path to a definition that contains - * the located definition. For example, if we have: - * - * object Foo { - * class Bar { def abc: Int } - * } - * - * then location of `abc` is Seq((TermName, Foo), (TypeName, Bar)) - */ - private class ExtractPublicDefinitions extends Visit { - val locatedDefs = scala.collection.mutable.Buffer[LocatedDefinition]() - private var currentLocation: Location = Location() - override def visitAPI(s: SourceAPI): Unit = { - s.packages foreach visitPackage - s.definitions foreach { case topLevelDef: ClassLike => - val packageName = { - val fullName = topLevelDef.name() - val lastDotIndex = fullName.lastIndexOf('.') - if (lastDotIndex <= 0) "" else fullName.substring(0, lastDotIndex-1) - } - currentLocation = packageAsLocation(packageName) - visitDefinition(topLevelDef) - } - } - override def visitDefinition(d: Definition): Unit = { - val locatedDef = LocatedDefinition(currentLocation, d) - locatedDefs += locatedDef - d match { - case cl: xsbti.api.ClassLike => - val savedLocation = currentLocation - currentLocation = classLikeAsLocation(currentLocation, cl) - super.visitDefinition(d) - currentLocation = savedLocation - case _ => - super.visitDefinition(d) - } - } - } + /** + * A visitor that visits given API object and extracts all nested public + * definitions it finds. The extracted definitions have Location attached + * to them which identifies API object's location. + * + * The returned location is basically a path to a definition that contains + * the located definition. For example, if we have: + * + * object Foo { + * class Bar { def abc: Int } + * } + * + * then location of `abc` is Seq((TermName, Foo), (TypeName, Bar)) + */ + private class ExtractPublicDefinitions extends Visit { + val locatedDefs = scala.collection.mutable.Buffer[LocatedDefinition]() + private var currentLocation: Location = Location() + override def visitAPI(s: SourceAPI): Unit = { + s.packages foreach visitPackage + s.definitions foreach { + case topLevelDef: ClassLike => + val packageName = { + val fullName = topLevelDef.name() + val lastDotIndex = fullName.lastIndexOf('.') + if (lastDotIndex <= 0) "" else fullName.substring(0, lastDotIndex - 1) + } + currentLocation = packageAsLocation(packageName) + visitDefinition(topLevelDef) + } + } + override def visitDefinition(d: Definition): Unit = { + val locatedDef = LocatedDefinition(currentLocation, d) + locatedDefs += locatedDef + d match { + case cl: xsbti.api.ClassLike => + val savedLocation = currentLocation + currentLocation = classLikeAsLocation(currentLocation, cl) + super.visitDefinition(d) + currentLocation = savedLocation + case _ => + super.visitDefinition(d) + } + } + } - private def publicDefs(source: SourceAPI): Iterable[LocatedDefinition] = { - val visitor = new ExtractPublicDefinitions - visitor.visitAPI(source) - visitor.locatedDefs - } + private def publicDefs(source: SourceAPI): Iterable[LocatedDefinition] = { + val visitor = new ExtractPublicDefinitions + visitor.visitAPI(source) + visitor.locatedDefs + } - private def localName(name: String): String = { - // when there's no dot in name `lastIndexOf` returns -1 so we handle - // that case properly - val index = name.lastIndexOf('.') + 1 - name.substring(index) - } + private def localName(name: String): String = { + // when there's no dot in name `lastIndexOf` returns -1 so we handle + // that case properly + val index = name.lastIndexOf('.') + 1 + name.substring(index) + } - private def packageAsLocation(pkg: String): Location = if (pkg != "") { - val selectors = pkg.split('.').map(name => Selector(name, TermName)).toSeq - Location(selectors: _*) - } else Location.Empty + private def packageAsLocation(pkg: String): Location = if (pkg != "") { + val selectors = pkg.split('.').map(name => Selector(name, TermName)).toSeq + Location(selectors: _*) + } else Location.Empty - private def classLikeAsLocation(prefix: Location, cl: ClassLike): Location = { - val selector = { - val clNameType = NameType(cl.definitionType) - Selector(localName(cl.name), clNameType) - } - Location((prefix.selectors :+ selector): _*) - } + private def classLikeAsLocation(prefix: Location, cl: ClassLike): Location = { + val selector = { + val clNameType = NameType(cl.definitionType) + Selector(localName(cl.name), clNameType) + } + Location((prefix.selectors :+ selector): _*) + } } object NameHashing { - private case class LocatedDefinition(location: Location, definition: Definition) - /** - * Location is expressed as sequence of annotated names. The annotation denotes - * a type of a name, i.e. whether it's a term name or type name. - * - * Using Scala compiler terminology, location is defined as a sequence of member - * selections that uniquely identify a given Symbol. - */ - private case class Location(selectors: Selector*) - private object Location { - val Empty = Location(Seq.empty: _*) - } - private case class Selector(name: String, nameType: NameType) - private sealed trait NameType - private object NameType { - import DefinitionType._ - def apply(dt: DefinitionType): NameType = dt match { - case Trait | ClassDef => TypeName - case Module | PackageModule => TermName - } - } - private case object TermName extends NameType - private case object TypeName extends NameType + private case class LocatedDefinition(location: Location, definition: Definition) + /** + * Location is expressed as sequence of annotated names. The annotation denotes + * a type of a name, i.e. whether it's a term name or type name. + * + * Using Scala compiler terminology, location is defined as a sequence of member + * selections that uniquely identify a given Symbol. + */ + private case class Location(selectors: Selector*) + private object Location { + val Empty = Location(Seq.empty: _*) + } + private case class Selector(name: String, nameType: NameType) + private sealed trait NameType + private object NameType { + import DefinitionType._ + def apply(dt: DefinitionType): NameType = dt match { + case Trait | ClassDef => TypeName + case Module | PackageModule => TermName + } + } + private case object TermName extends NameType + private case object TypeName extends NameType } diff --git a/compile/api/src/main/scala/xsbt/api/SameAPI.scala b/compile/api/src/main/scala/xsbt/api/SameAPI.scala index ed0aaf276..4cc36084c 100644 --- a/compile/api/src/main/scala/xsbt/api/SameAPI.scala +++ b/compile/api/src/main/scala/xsbt/api/SameAPI.scala @@ -6,397 +6,386 @@ package xsbt.api import xsbti.api._ import Function.tupled -import scala.collection.{immutable, mutable} +import scala.collection.{ immutable, mutable } @deprecated("This class is not used in incremental compiler and will be removed in next major version.", "0.13.2") -class NameChanges(val newTypes: Set[String], val removedTypes: Set[String], val newTerms: Set[String], val removedTerms: Set[String]) -{ - override def toString = - (("New types", newTypes) :: ("Removed types", removedTypes) :: ("New terms", newTerms) :: ("Removed terms", removedTerms) :: Nil).map { - case (label,set) => label + ":\n\t" + set.mkString("\n\t") - }.mkString("Name changes:\n ", "\n ", "\n") +class NameChanges(val newTypes: Set[String], val removedTypes: Set[String], val newTerms: Set[String], val removedTerms: Set[String]) { + override def toString = + (("New types", newTypes) :: ("Removed types", removedTypes) :: ("New terms", newTerms) :: ("Removed terms", removedTerms) :: Nil).map { + case (label, set) => label + ":\n\t" + set.mkString("\n\t") + }.mkString("Name changes:\n ", "\n ", "\n") } -object TopLevel -{ - @deprecated("The NameChanges class is deprecated and will be removed in next major version.", "0.13.2") - def nameChanges(a: Iterable[Source], b: Iterable[Source]): NameChanges = { - val api = (_: Source).api - apiNameChanges(a map api, b map api) - } - /** Identifies removed and new top-level definitions by name. */ - @deprecated("The NameChanges class is deprecated and will be removed in next major version.", "0.13.2") - def apiNameChanges(a: Iterable[SourceAPI], b: Iterable[SourceAPI]): NameChanges = - { - def changes(s: Set[String], t: Set[String]) = (s -- t, t -- s) +object TopLevel { + @deprecated("The NameChanges class is deprecated and will be removed in next major version.", "0.13.2") + def nameChanges(a: Iterable[Source], b: Iterable[Source]): NameChanges = { + val api = (_: Source).api + apiNameChanges(a map api, b map api) + } + /** Identifies removed and new top-level definitions by name. */ + @deprecated("The NameChanges class is deprecated and will be removed in next major version.", "0.13.2") + def apiNameChanges(a: Iterable[SourceAPI], b: Iterable[SourceAPI]): NameChanges = + { + def changes(s: Set[String], t: Set[String]) = (s -- t, t -- s) - val (avalues, atypes) = definitions(a) - val (bvalues, btypes) = definitions(b) + val (avalues, atypes) = definitions(a) + val (bvalues, btypes) = definitions(b) - val (newTypes, removedTypes) = changes(names(atypes), names(btypes)) - val (newTerms, removedTerms) = changes(names(avalues), names(bvalues)) + val (newTypes, removedTypes) = changes(names(atypes), names(btypes)) + val (newTerms, removedTerms) = changes(names(avalues), names(bvalues)) - new NameChanges(newTypes, removedTypes, newTerms, removedTerms) - } - def definitions(i: Iterable[SourceAPI]) = SameAPI.separateDefinitions(i.toSeq.flatMap( _.definitions )) - def names(s: Iterable[Definition]): Set[String] = Set() ++ s.map(_.name) + new NameChanges(newTypes, removedTypes, newTerms, removedTerms) + } + def definitions(i: Iterable[SourceAPI]) = SameAPI.separateDefinitions(i.toSeq.flatMap(_.definitions)) + def names(s: Iterable[Definition]): Set[String] = Set() ++ s.map(_.name) } /** Checks the API of two source files for equality.*/ -object SameAPI -{ - def apply(a: Source, b: Source): Boolean = - a.apiHash == b.apiHash && (a.hash.length > 0 && b.hash.length > 0) && apply(a.api, b.api) +object SameAPI { + def apply(a: Source, b: Source): Boolean = + a.apiHash == b.apiHash && (a.hash.length > 0 && b.hash.length > 0) && apply(a.api, b.api) - def apply(a: Def, b: Def): Boolean = - (new SameAPI(false, true)).sameDefinitions(List(a), List(b), true) + def apply(a: Def, b: Def): Boolean = + (new SameAPI(false, true)).sameDefinitions(List(a), List(b), true) - def apply(a: SourceAPI, b: SourceAPI): Boolean = - { - val start = System.currentTimeMillis + def apply(a: SourceAPI, b: SourceAPI): Boolean = + { + val start = System.currentTimeMillis - /*println("\n=========== API #1 ================") + /*println("\n=========== API #1 ================") import DefaultShowAPI._ println(ShowAPI.show(a)) println("\n=========== API #2 ================") println(ShowAPI.show(b))*/ - val result = (new SameAPI(false, true)).check(a,b) - val end = System.currentTimeMillis - //println(" API comparison took: " + (end - start) / 1000.0 + " s") - result - } + val result = (new SameAPI(false, true)).check(a, b) + val end = System.currentTimeMillis + //println(" API comparison took: " + (end - start) / 1000.0 + " s") + result + } - def separateDefinitions(s: Seq[Definition]): (Seq[Definition], Seq[Definition]) = - s.partition(isValueDefinition) - def isValueDefinition(d: Definition): Boolean = - d match - { - case _: FieldLike | _: Def=> true - case c: ClassLike => isValue(c.definitionType) - case _ => false - } - def isValue(d: DefinitionType): Boolean = - d == DefinitionType.Module || d == DefinitionType.PackageModule - /** Puts the given definitions in a map according to their names.*/ - def byName(s: Seq[Definition]): Map[String, List[Definition]] = - { - var map = Map[String, List[Definition]]() - for(d <- s; name = d.name) - map = map.updated(name, d :: map.getOrElse(name, Nil) ) - map - } + def separateDefinitions(s: Seq[Definition]): (Seq[Definition], Seq[Definition]) = + s.partition(isValueDefinition) + def isValueDefinition(d: Definition): Boolean = + d match { + case _: FieldLike | _: Def => true + case c: ClassLike => isValue(c.definitionType) + case _ => false + } + def isValue(d: DefinitionType): Boolean = + d == DefinitionType.Module || d == DefinitionType.PackageModule + /** Puts the given definitions in a map according to their names.*/ + def byName(s: Seq[Definition]): Map[String, List[Definition]] = + { + var map = Map[String, List[Definition]]() + for (d <- s; name = d.name) + map = map.updated(name, d :: map.getOrElse(name, Nil)) + map + } - /** Removes definitions that should not be considered for API equality. - * All top-level definitions are always considered: 'private' only means package-private. - * Other definitions are considered if they are not qualified with 'private[this]' or 'private'.*/ - def filterDefinitions(d: Seq[Definition], topLevel: Boolean, includePrivate: Boolean) = if(topLevel || includePrivate) d else d.filter(isNonPrivate) - def isNonPrivate(d: Definition): Boolean = isNonPrivate(d.access) - /** Returns false if the `access` is `Private` and qualified, true otherwise.*/ - def isNonPrivate(access: Access): Boolean = - access match - { - case p: Private if !p.qualifier.isInstanceOf[IdQualifier] => false - case _ => true - } + /** + * Removes definitions that should not be considered for API equality. + * All top-level definitions are always considered: 'private' only means package-private. + * Other definitions are considered if they are not qualified with 'private[this]' or 'private'. + */ + def filterDefinitions(d: Seq[Definition], topLevel: Boolean, includePrivate: Boolean) = if (topLevel || includePrivate) d else d.filter(isNonPrivate) + def isNonPrivate(d: Definition): Boolean = isNonPrivate(d.access) + /** Returns false if the `access` is `Private` and qualified, true otherwise.*/ + def isNonPrivate(access: Access): Boolean = + access match { + case p: Private if !p.qualifier.isInstanceOf[IdQualifier] => false + case _ => true + } } -/** Used to implement API equality. -* -* If `includePrivate` is true, `private` and `private[this]` members are included in the comparison. Otherwise, those members are excluded. -*/ -class SameAPI(includePrivate: Boolean, includeParamNames: Boolean) -{ - import SameAPI._ +/** + * Used to implement API equality. + * + * If `includePrivate` is true, `private` and `private[this]` members are included in the comparison. Otherwise, those members are excluded. + */ +class SameAPI(includePrivate: Boolean, includeParamNames: Boolean) { + import SameAPI._ - private val pending = new mutable.HashSet[AnyRef] - private[this] val debugEnabled = java.lang.Boolean.getBoolean("xsbt.api.debug") - def debug(flag: Boolean, msg: => String): Boolean = - { - if(debugEnabled && !flag) println(msg) - flag - } + private val pending = new mutable.HashSet[AnyRef] + private[this] val debugEnabled = java.lang.Boolean.getBoolean("xsbt.api.debug") + def debug(flag: Boolean, msg: => String): Boolean = + { + if (debugEnabled && !flag) println(msg) + flag + } - /** Returns true if source `a` has the same API as source `b`.*/ - def check(a: SourceAPI, b: SourceAPI): Boolean = - { - samePackages(a, b) && - debug(sameDefinitions(a, b), "Definitions differed") - } + /** Returns true if source `a` has the same API as source `b`.*/ + def check(a: SourceAPI, b: SourceAPI): Boolean = + { + samePackages(a, b) && + debug(sameDefinitions(a, b), "Definitions differed") + } - def samePackages(a: SourceAPI, b: SourceAPI): Boolean = - sameStrings(packages(a), packages(b)) - def packages(s: SourceAPI): Set[String] = - Set() ++ s.packages.map(_.name) + def samePackages(a: SourceAPI, b: SourceAPI): Boolean = + sameStrings(packages(a), packages(b)) + def packages(s: SourceAPI): Set[String] = + Set() ++ s.packages.map(_.name) - def sameDefinitions(a: SourceAPI, b: SourceAPI): Boolean = - sameDefinitions(a.definitions, b.definitions, true) - def sameDefinitions(a: Seq[Definition], b: Seq[Definition], topLevel: Boolean): Boolean = - { - val (avalues, atypes) = separateDefinitions(filterDefinitions(a, topLevel, includePrivate)) - val (bvalues, btypes) = separateDefinitions(filterDefinitions(b, topLevel, includePrivate)) - debug(sameDefinitions(byName(avalues), byName(bvalues)), "Value definitions differed") && - debug(sameDefinitions(byName(atypes), byName(btypes)), "Type definitions differed") - } - def sameDefinitions(a: scala.collection.Map[String, List[Definition]], b: scala.collection.Map[String, List[Definition]]): Boolean = - debug(sameStrings(a.keySet, b.keySet), "\tDefinition strings differed (a: " + (a.keySet -- b.keySet) + ", b: " + (b.keySet -- a.keySet) + ")") && - zippedEntries(a,b).forall(tupled(sameNamedDefinitions)) + def sameDefinitions(a: SourceAPI, b: SourceAPI): Boolean = + sameDefinitions(a.definitions, b.definitions, true) + def sameDefinitions(a: Seq[Definition], b: Seq[Definition], topLevel: Boolean): Boolean = + { + val (avalues, atypes) = separateDefinitions(filterDefinitions(a, topLevel, includePrivate)) + val (bvalues, btypes) = separateDefinitions(filterDefinitions(b, topLevel, includePrivate)) + debug(sameDefinitions(byName(avalues), byName(bvalues)), "Value definitions differed") && + debug(sameDefinitions(byName(atypes), byName(btypes)), "Type definitions differed") + } + def sameDefinitions(a: scala.collection.Map[String, List[Definition]], b: scala.collection.Map[String, List[Definition]]): Boolean = + debug(sameStrings(a.keySet, b.keySet), "\tDefinition strings differed (a: " + (a.keySet -- b.keySet) + ", b: " + (b.keySet -- a.keySet) + ")") && + zippedEntries(a, b).forall(tupled(sameNamedDefinitions)) - /** Checks that the definitions in `a` are the same as those in `b`, ignoring order. - * Each list is assumed to have already been checked to have the same names (by `sameDefinitions`, for example).*/ - def sameNamedDefinitions(a: List[Definition], b: List[Definition]): Boolean = - { - def sameDefs(a: List[Definition], b: List[Definition]): Boolean = - { - a match - { - case adef :: atail => - def sameDef(seen: List[Definition], remaining: List[Definition]): Boolean = - remaining match - { - case Nil => debug(false, "Definition different in new API: \n" + adef.name ) - case bdef :: btail => - val eq = sameDefinitionContent(adef, bdef) - if(eq) sameDefs(atail, seen ::: btail) else sameDef(bdef :: seen, btail) - } - sameDef(Nil, b) - case Nil => true - } - } - debug((a.length == b.length), "\t\tLength differed for " + a.headOption.map(_.name).getOrElse("empty")) && sameDefs(a, b) - } + /** + * Checks that the definitions in `a` are the same as those in `b`, ignoring order. + * Each list is assumed to have already been checked to have the same names (by `sameDefinitions`, for example). + */ + def sameNamedDefinitions(a: List[Definition], b: List[Definition]): Boolean = + { + def sameDefs(a: List[Definition], b: List[Definition]): Boolean = + { + a match { + case adef :: atail => + def sameDef(seen: List[Definition], remaining: List[Definition]): Boolean = + remaining match { + case Nil => debug(false, "Definition different in new API: \n" + adef.name) + case bdef :: btail => + val eq = sameDefinitionContent(adef, bdef) + if (eq) sameDefs(atail, seen ::: btail) else sameDef(bdef :: seen, btail) + } + sameDef(Nil, b) + case Nil => true + } + } + debug((a.length == b.length), "\t\tLength differed for " + a.headOption.map(_.name).getOrElse("empty")) && sameDefs(a, b) + } - /** Checks that the two definitions are the same, other than their name.*/ - def sameDefinitionContent(a: Definition, b: Definition): Boolean = - samePending(a,b)(sameDefinitionContentDirect) - def sameDefinitionContentDirect(a: Definition, b: Definition): Boolean = - { - //a.name == b.name && - debug(sameAccess(a.access, b.access), "Access differed") && - debug(sameModifiers(a.modifiers, b.modifiers), "Modifiers differed") && - debug(sameAnnotations(a.annotations, b.annotations), "Annotations differed") && - debug(sameDefinitionSpecificAPI(a, b), "Definition-specific differed") - } + /** Checks that the two definitions are the same, other than their name.*/ + def sameDefinitionContent(a: Definition, b: Definition): Boolean = + samePending(a, b)(sameDefinitionContentDirect) + def sameDefinitionContentDirect(a: Definition, b: Definition): Boolean = + { + //a.name == b.name && + debug(sameAccess(a.access, b.access), "Access differed") && + debug(sameModifiers(a.modifiers, b.modifiers), "Modifiers differed") && + debug(sameAnnotations(a.annotations, b.annotations), "Annotations differed") && + debug(sameDefinitionSpecificAPI(a, b), "Definition-specific differed") + } - def sameAccess(a: Access, b: Access): Boolean = - (a, b) match - { - case (_: Public, _: Public) => true - case (qa: Protected, qb: Protected) => sameQualifier(qa, qb) - case (qa: Private, qb: Private) => sameQualifier(qa, qb) - case _ => debug(false, "Different access categories") - } - def sameQualifier(a: Qualified, b: Qualified): Boolean = - sameQualifier(a.qualifier, b.qualifier) - def sameQualifier(a: Qualifier, b: Qualifier): Boolean = - (a, b) match - { - case (_: Unqualified, _: Unqualified) => true - case (_: ThisQualifier, _: ThisQualifier) => true - case (ia: IdQualifier, ib: IdQualifier) => debug(ia.value == ib.value, "Different qualifiers") - case _ => debug(false, "Different qualifier categories: " + a.getClass.getName + " -- " +b.getClass.getName) - } + def sameAccess(a: Access, b: Access): Boolean = + (a, b) match { + case (_: Public, _: Public) => true + case (qa: Protected, qb: Protected) => sameQualifier(qa, qb) + case (qa: Private, qb: Private) => sameQualifier(qa, qb) + case _ => debug(false, "Different access categories") + } + def sameQualifier(a: Qualified, b: Qualified): Boolean = + sameQualifier(a.qualifier, b.qualifier) + def sameQualifier(a: Qualifier, b: Qualifier): Boolean = + (a, b) match { + case (_: Unqualified, _: Unqualified) => true + case (_: ThisQualifier, _: ThisQualifier) => true + case (ia: IdQualifier, ib: IdQualifier) => debug(ia.value == ib.value, "Different qualifiers") + case _ => debug(false, "Different qualifier categories: " + a.getClass.getName + " -- " + b.getClass.getName) + } - def sameModifiers(a: Modifiers, b: Modifiers): Boolean = - bitSet(a) == bitSet(b) + def sameModifiers(a: Modifiers, b: Modifiers): Boolean = + bitSet(a) == bitSet(b) - def bitSet(m: Modifiers): immutable.BitSet = - { - import m._ - val bs = new mutable.BitSet - setIf(bs, isAbstract, 0) - setIf(bs, isOverride, 1) - setIf(bs, isFinal, 2) - setIf(bs, isSealed, 3) - setIf(bs, isImplicit, 4) - setIf(bs, isLazy, 5) - setIf(bs, isMacro, 6) - bs.toImmutable - } - def setIf(bs: mutable.BitSet, flag: Boolean, i: Int): Unit = - if(flag) bs += i + def bitSet(m: Modifiers): immutable.BitSet = + { + import m._ + val bs = new mutable.BitSet + setIf(bs, isAbstract, 0) + setIf(bs, isOverride, 1) + setIf(bs, isFinal, 2) + setIf(bs, isSealed, 3) + setIf(bs, isImplicit, 4) + setIf(bs, isLazy, 5) + setIf(bs, isMacro, 6) + bs.toImmutable + } + def setIf(bs: mutable.BitSet, flag: Boolean, i: Int): Unit = + if (flag) bs += i - def sameAnnotations(a: Seq[Annotation], b: Seq[Annotation]): Boolean = - sameSeq(a, b)(sameAnnotation) - def sameAnnotation(a: Annotation, b: Annotation): Boolean = - debug(sameType(a.base, b.base), "Annotation base type differed") && - debug(sameAnnotationArguments(a.arguments, b.arguments), "Annotation arguments differed (" + a + ") and (" + b + ")") - def sameAnnotationArguments(a: Seq[AnnotationArgument], b: Seq[AnnotationArgument]): Boolean = - argumentMap(a) == argumentMap(b) - def argumentMap(a: Seq[AnnotationArgument]): Map[String,String] = - Map() ++ a.map(arg => (arg.name, arg.value)) + def sameAnnotations(a: Seq[Annotation], b: Seq[Annotation]): Boolean = + sameSeq(a, b)(sameAnnotation) + def sameAnnotation(a: Annotation, b: Annotation): Boolean = + debug(sameType(a.base, b.base), "Annotation base type differed") && + debug(sameAnnotationArguments(a.arguments, b.arguments), "Annotation arguments differed (" + a + ") and (" + b + ")") + def sameAnnotationArguments(a: Seq[AnnotationArgument], b: Seq[AnnotationArgument]): Boolean = + argumentMap(a) == argumentMap(b) + def argumentMap(a: Seq[AnnotationArgument]): Map[String, String] = + Map() ++ a.map(arg => (arg.name, arg.value)) - def sameDefinitionSpecificAPI(a: Definition, b: Definition): Boolean = - (a, b) match - { - case (fa: FieldLike, fb: FieldLike) => sameFieldSpecificAPI(fa, fb) - case (pa: ParameterizedDefinition, pb: ParameterizedDefinition) => sameParameterizedDefinition(pa, pb) - case _ => false - } + def sameDefinitionSpecificAPI(a: Definition, b: Definition): Boolean = + (a, b) match { + case (fa: FieldLike, fb: FieldLike) => sameFieldSpecificAPI(fa, fb) + case (pa: ParameterizedDefinition, pb: ParameterizedDefinition) => sameParameterizedDefinition(pa, pb) + case _ => false + } - def sameParameterizedDefinition(a: ParameterizedDefinition, b: ParameterizedDefinition): Boolean = - debug(sameTypeParameters(a.typeParameters, b.typeParameters), "Different type parameters for " + a.name) && - sameParameterizedSpecificAPI(a, b) + def sameParameterizedDefinition(a: ParameterizedDefinition, b: ParameterizedDefinition): Boolean = + debug(sameTypeParameters(a.typeParameters, b.typeParameters), "Different type parameters for " + a.name) && + sameParameterizedSpecificAPI(a, b) - def sameParameterizedSpecificAPI(a: ParameterizedDefinition, b: ParameterizedDefinition): Boolean = - (a, b) match - { - case (da: Def, db: Def) => sameDefSpecificAPI(da, db) - case (ca: ClassLike, cb: ClassLike) => sameClassLikeSpecificAPI(ca, cb) - case (ta: TypeAlias, tb: TypeAlias) => sameAliasSpecificAPI(ta, tb) - case (ta: TypeDeclaration, tb: TypeDeclaration) => sameDeclarationSpecificAPI(ta, tb) - case _ => false - } + def sameParameterizedSpecificAPI(a: ParameterizedDefinition, b: ParameterizedDefinition): Boolean = + (a, b) match { + case (da: Def, db: Def) => sameDefSpecificAPI(da, db) + case (ca: ClassLike, cb: ClassLike) => sameClassLikeSpecificAPI(ca, cb) + case (ta: TypeAlias, tb: TypeAlias) => sameAliasSpecificAPI(ta, tb) + case (ta: TypeDeclaration, tb: TypeDeclaration) => sameDeclarationSpecificAPI(ta, tb) + case _ => false + } - def sameDefSpecificAPI(a: Def, b: Def): Boolean = - debug(sameValueParameters(a.valueParameters, b.valueParameters), "Different def value parameters for " + a.name) && - debug(sameType(a.returnType, b.returnType), "Different def return type for " + a.name) - def sameAliasSpecificAPI(a: TypeAlias, b: TypeAlias): Boolean = - debug(sameType(a.tpe, b.tpe), "Different alias type for " + a.name) - def sameDeclarationSpecificAPI(a: TypeDeclaration, b: TypeDeclaration): Boolean = - debug(sameType(a.lowerBound, b.lowerBound), "Different lower bound for declaration " + a.name) && - debug(sameType(a.upperBound, b.upperBound), "Different upper bound for declaration " + a.name) - def sameFieldSpecificAPI(a: FieldLike, b: FieldLike): Boolean = - debug(sameFieldCategory(a, b), "Different field categories (" + a.name + "=" + a.getClass.getName + " -- " +a.name + "=" + a.getClass.getName + ")")&& - debug(sameType(a.tpe, b.tpe), "Different field type for " + a.name) + def sameDefSpecificAPI(a: Def, b: Def): Boolean = + debug(sameValueParameters(a.valueParameters, b.valueParameters), "Different def value parameters for " + a.name) && + debug(sameType(a.returnType, b.returnType), "Different def return type for " + a.name) + def sameAliasSpecificAPI(a: TypeAlias, b: TypeAlias): Boolean = + debug(sameType(a.tpe, b.tpe), "Different alias type for " + a.name) + def sameDeclarationSpecificAPI(a: TypeDeclaration, b: TypeDeclaration): Boolean = + debug(sameType(a.lowerBound, b.lowerBound), "Different lower bound for declaration " + a.name) && + debug(sameType(a.upperBound, b.upperBound), "Different upper bound for declaration " + a.name) + def sameFieldSpecificAPI(a: FieldLike, b: FieldLike): Boolean = + debug(sameFieldCategory(a, b), "Different field categories (" + a.name + "=" + a.getClass.getName + " -- " + a.name + "=" + a.getClass.getName + ")") && + debug(sameType(a.tpe, b.tpe), "Different field type for " + a.name) - def sameFieldCategory(a: FieldLike, b: FieldLike): Boolean = - (a,b) match - { - case (_: Val, _: Val) => true - case (_: Var, _: Var) => true - case _=> false - } + def sameFieldCategory(a: FieldLike, b: FieldLike): Boolean = + (a, b) match { + case (_: Val, _: Val) => true + case (_: Var, _: Var) => true + case _ => false + } - def sameClassLikeSpecificAPI(a: ClassLike, b: ClassLike): Boolean = - sameDefinitionType(a.definitionType, b.definitionType) && - sameType(a.selfType, b.selfType) && - sameStructure(a.structure, b.structure) + def sameClassLikeSpecificAPI(a: ClassLike, b: ClassLike): Boolean = + sameDefinitionType(a.definitionType, b.definitionType) && + sameType(a.selfType, b.selfType) && + sameStructure(a.structure, b.structure) - def sameValueParameters(a: Seq[ParameterList], b: Seq[ParameterList]): Boolean = - sameSeq(a, b)(sameParameterList) + def sameValueParameters(a: Seq[ParameterList], b: Seq[ParameterList]): Boolean = + sameSeq(a, b)(sameParameterList) - def sameParameterList(a: ParameterList, b: ParameterList): Boolean = - (a.isImplicit == b.isImplicit) && - sameParameters(a.parameters, b.parameters) - def sameParameters(a: Seq[MethodParameter], b: Seq[MethodParameter]): Boolean = - sameSeq(a, b)(sameMethodParameter) - def sameMethodParameter(a: MethodParameter, b: MethodParameter): Boolean = - (!includeParamNames || a.name == b.name) && - sameType(a.tpe, b.tpe) && - (a.hasDefault == b.hasDefault) && - sameParameterModifier(a.modifier, b.modifier) - def sameParameterModifier(a: ParameterModifier, b: ParameterModifier) = - a == b - def sameDefinitionType(a: DefinitionType, b: DefinitionType): Boolean = - a == b - def sameVariance(a: Variance, b: Variance): Boolean = - a == b + def sameParameterList(a: ParameterList, b: ParameterList): Boolean = + (a.isImplicit == b.isImplicit) && + sameParameters(a.parameters, b.parameters) + def sameParameters(a: Seq[MethodParameter], b: Seq[MethodParameter]): Boolean = + sameSeq(a, b)(sameMethodParameter) + def sameMethodParameter(a: MethodParameter, b: MethodParameter): Boolean = + (!includeParamNames || a.name == b.name) && + sameType(a.tpe, b.tpe) && + (a.hasDefault == b.hasDefault) && + sameParameterModifier(a.modifier, b.modifier) + def sameParameterModifier(a: ParameterModifier, b: ParameterModifier) = + a == b + def sameDefinitionType(a: DefinitionType, b: DefinitionType): Boolean = + a == b + def sameVariance(a: Variance, b: Variance): Boolean = + a == b - def sameTypeParameters(a: Seq[TypeParameter], b: Seq[TypeParameter]): Boolean = - debug(sameSeq(a, b)(sameTypeParameter), "Different type parameters") - def sameTypeParameter(a: TypeParameter, b: TypeParameter): Boolean = - { - sameTypeParameters(a.typeParameters, b.typeParameters) && - debug(sameAnnotations(a.annotations, b.annotations), "Different type parameter annotations") && - debug(sameVariance(a.variance, b.variance), "Different variance") && - debug(sameType(a.lowerBound, b.lowerBound), "Different lower bound") && - debug(sameType(a.upperBound, b.upperBound), "Different upper bound") && - sameTags(a.id, b.id) - } - def sameTags(a: String, b: String): Boolean = - debug(a == b, "Different type parameter bindings: " + a + ", " + b) + def sameTypeParameters(a: Seq[TypeParameter], b: Seq[TypeParameter]): Boolean = + debug(sameSeq(a, b)(sameTypeParameter), "Different type parameters") + def sameTypeParameter(a: TypeParameter, b: TypeParameter): Boolean = + { + sameTypeParameters(a.typeParameters, b.typeParameters) && + debug(sameAnnotations(a.annotations, b.annotations), "Different type parameter annotations") && + debug(sameVariance(a.variance, b.variance), "Different variance") && + debug(sameType(a.lowerBound, b.lowerBound), "Different lower bound") && + debug(sameType(a.upperBound, b.upperBound), "Different upper bound") && + sameTags(a.id, b.id) + } + def sameTags(a: String, b: String): Boolean = + debug(a == b, "Different type parameter bindings: " + a + ", " + b) - def sameType(a: Type, b: Type): Boolean = - samePending(a,b)(sameTypeDirect) - def sameTypeDirect(a: Type, b: Type): Boolean = - (a, b) match - { - case (sa: SimpleType, sb: SimpleType) => debug(sameSimpleTypeDirect(sa, sb), "Different simple types: " + DefaultShowAPI(sa) + " and " + DefaultShowAPI(sb)) - case (ca: Constant, cb: Constant) => debug(sameConstantType(ca, cb), "Different constant types: " + DefaultShowAPI(ca) + " and " + DefaultShowAPI(cb)) - case (aa: Annotated, ab: Annotated) => debug(sameAnnotatedType(aa, ab), "Different annotated types") - case (sa: Structure, sb: Structure) => debug(sameStructureDirect(sa, sb), "Different structure type") - case (ea: Existential, eb: Existential) => debug(sameExistentialType(ea, eb), "Different existential type") - case (pa: Polymorphic, pb: Polymorphic) => debug(samePolymorphicType(pa, pb), "Different polymorphic type") - case _ => differentCategory("type", a, b) - } + def sameType(a: Type, b: Type): Boolean = + samePending(a, b)(sameTypeDirect) + def sameTypeDirect(a: Type, b: Type): Boolean = + (a, b) match { + case (sa: SimpleType, sb: SimpleType) => debug(sameSimpleTypeDirect(sa, sb), "Different simple types: " + DefaultShowAPI(sa) + " and " + DefaultShowAPI(sb)) + case (ca: Constant, cb: Constant) => debug(sameConstantType(ca, cb), "Different constant types: " + DefaultShowAPI(ca) + " and " + DefaultShowAPI(cb)) + case (aa: Annotated, ab: Annotated) => debug(sameAnnotatedType(aa, ab), "Different annotated types") + case (sa: Structure, sb: Structure) => debug(sameStructureDirect(sa, sb), "Different structure type") + case (ea: Existential, eb: Existential) => debug(sameExistentialType(ea, eb), "Different existential type") + case (pa: Polymorphic, pb: Polymorphic) => debug(samePolymorphicType(pa, pb), "Different polymorphic type") + case _ => differentCategory("type", a, b) + } - def sameConstantType(ca: Constant, cb: Constant): Boolean = - sameType(ca.baseType, cb.baseType) && - ca.value == cb.value - def sameExistentialType(a: Existential, b: Existential): Boolean = - sameTypeParameters(a.clause, b.clause) && - sameType(a.baseType, b.baseType) - def samePolymorphicType(a: Polymorphic, b: Polymorphic): Boolean = - sameTypeParameters(a.parameters, b.parameters) && - sameType(a.baseType, b.baseType) - def sameAnnotatedType(a: Annotated, b: Annotated): Boolean = - sameType(a.baseType, b.baseType) && - sameAnnotations(a.annotations, b.annotations) - def sameStructure(a: Structure, b: Structure): Boolean = - samePending(a,b)(sameStructureDirect) + def sameConstantType(ca: Constant, cb: Constant): Boolean = + sameType(ca.baseType, cb.baseType) && + ca.value == cb.value + def sameExistentialType(a: Existential, b: Existential): Boolean = + sameTypeParameters(a.clause, b.clause) && + sameType(a.baseType, b.baseType) + def samePolymorphicType(a: Polymorphic, b: Polymorphic): Boolean = + sameTypeParameters(a.parameters, b.parameters) && + sameType(a.baseType, b.baseType) + def sameAnnotatedType(a: Annotated, b: Annotated): Boolean = + sameType(a.baseType, b.baseType) && + sameAnnotations(a.annotations, b.annotations) + def sameStructure(a: Structure, b: Structure): Boolean = + samePending(a, b)(sameStructureDirect) - private[this] def samePending[T](a: T, b: T)(f: (T,T) => Boolean): Boolean = - if(pending add ((a,b)) ) f(a,b) else true + private[this] def samePending[T](a: T, b: T)(f: (T, T) => Boolean): Boolean = + if (pending add ((a, b))) f(a, b) else true - def sameStructureDirect(a: Structure, b: Structure): Boolean = - { - sameSeq(a.parents, b.parents)(sameType) && - sameMembers(a.declared, b.declared) && - sameMembers(a.inherited, b.inherited) - } + def sameStructureDirect(a: Structure, b: Structure): Boolean = + { + sameSeq(a.parents, b.parents)(sameType) && + sameMembers(a.declared, b.declared) && + sameMembers(a.inherited, b.inherited) + } - def sameMembers(a: Seq[Definition], b: Seq[Definition]): Boolean = - sameDefinitions(a, b, false) + def sameMembers(a: Seq[Definition], b: Seq[Definition]): Boolean = + sameDefinitions(a, b, false) - def sameSimpleType(a: SimpleType, b: SimpleType): Boolean = - samePending(a,b)(sameSimpleTypeDirect) - def sameSimpleTypeDirect(a: SimpleType, b: SimpleType): Boolean = - (a, b) match - { - case (pa: Projection, pb: Projection) => debug(sameProjection(pa, pb), "Different projection") - case (pa: ParameterRef, pb: ParameterRef) => debug(sameParameterRef(pa, pb), "Different parameter ref") - case (sa: Singleton, sb: Singleton) => debug(sameSingleton(sa, sb), "Different singleton") - case (_: EmptyType, _: EmptyType) => true - case (pa: Parameterized, pb: Parameterized) => debug(sameParameterized(pa, pb), "Different parameterized") - case _ => differentCategory("simple type", a, b) - } - def differentCategory(label: String, a: AnyRef, b: AnyRef): Boolean = - debug(false, "Different category of " + label + " (" + a.getClass.getName + " and " + b.getClass.getName + ") for (" + a + " and " + b + ")") + def sameSimpleType(a: SimpleType, b: SimpleType): Boolean = + samePending(a, b)(sameSimpleTypeDirect) + def sameSimpleTypeDirect(a: SimpleType, b: SimpleType): Boolean = + (a, b) match { + case (pa: Projection, pb: Projection) => debug(sameProjection(pa, pb), "Different projection") + case (pa: ParameterRef, pb: ParameterRef) => debug(sameParameterRef(pa, pb), "Different parameter ref") + case (sa: Singleton, sb: Singleton) => debug(sameSingleton(sa, sb), "Different singleton") + case (_: EmptyType, _: EmptyType) => true + case (pa: Parameterized, pb: Parameterized) => debug(sameParameterized(pa, pb), "Different parameterized") + case _ => differentCategory("simple type", a, b) + } + def differentCategory(label: String, a: AnyRef, b: AnyRef): Boolean = + debug(false, "Different category of " + label + " (" + a.getClass.getName + " and " + b.getClass.getName + ") for (" + a + " and " + b + ")") - def sameParameterized(a: Parameterized, b: Parameterized): Boolean = - sameSimpleType(a.baseType, b.baseType) && - sameSeq(a.typeArguments, b.typeArguments)(sameType) - def sameParameterRef(a: ParameterRef, b: ParameterRef): Boolean = sameTags(a.id, b.id) - def sameSingleton(a: Singleton, b: Singleton): Boolean = - samePath(a.path, b.path) - def sameProjection(a: Projection, b: Projection): Boolean = - sameSimpleType(a.prefix, b.prefix) && - (a.id == b.id) + def sameParameterized(a: Parameterized, b: Parameterized): Boolean = + sameSimpleType(a.baseType, b.baseType) && + sameSeq(a.typeArguments, b.typeArguments)(sameType) + def sameParameterRef(a: ParameterRef, b: ParameterRef): Boolean = sameTags(a.id, b.id) + def sameSingleton(a: Singleton, b: Singleton): Boolean = + samePath(a.path, b.path) + def sameProjection(a: Projection, b: Projection): Boolean = + sameSimpleType(a.prefix, b.prefix) && + (a.id == b.id) - def samePath(a: Path, b: Path): Boolean = - samePathComponents(a.components, b.components) - def samePathComponents(a: Seq[PathComponent], b: Seq[PathComponent]): Boolean = - sameSeq(a, b)(samePathComponent) - def samePathComponent(a: PathComponent, b: PathComponent): Boolean = - (a, b) match - { - case (_: This, _: This) => true - case (sa: Super, sb: Super) => samePathSuper(sa, sb) - case (ia: Id, ib: Id) => samePathId(ia, ib) - case _ => false - } - def samePathSuper(a: Super, b: Super): Boolean = - samePath(a.qualifier, b.qualifier) - def samePathId(a: Id, b: Id): Boolean = - a.id == b.id + def samePath(a: Path, b: Path): Boolean = + samePathComponents(a.components, b.components) + def samePathComponents(a: Seq[PathComponent], b: Seq[PathComponent]): Boolean = + sameSeq(a, b)(samePathComponent) + def samePathComponent(a: PathComponent, b: PathComponent): Boolean = + (a, b) match { + case (_: This, _: This) => true + case (sa: Super, sb: Super) => samePathSuper(sa, sb) + case (ia: Id, ib: Id) => samePathId(ia, ib) + case _ => false + } + def samePathSuper(a: Super, b: Super): Boolean = + samePath(a.qualifier, b.qualifier) + def samePathId(a: Id, b: Id): Boolean = + a.id == b.id - // precondition: a.keySet == b.keySet - protected def zippedEntries[A,B](a: scala.collection.Map[A,B], b: scala.collection.Map[A,B]): Iterable[(B,B)] = - for( (key, avalue) <- a) yield (avalue, b(key)) + // precondition: a.keySet == b.keySet + protected def zippedEntries[A, B](a: scala.collection.Map[A, B], b: scala.collection.Map[A, B]): Iterable[(B, B)] = + for ((key, avalue) <- a) yield (avalue, b(key)) - def sameStrings(a: scala.collection.Set[String], b: scala.collection.Set[String]): Boolean = - a == b - final def sameSeq[T](a: Seq[T], b: Seq[T])(eq: (T,T) => Boolean): Boolean = - (a.length == b.length) && (a zip b).forall(tupled(eq)) + def sameStrings(a: scala.collection.Set[String], b: scala.collection.Set[String]): Boolean = + a == b + final def sameSeq[T](a: Seq[T], b: Seq[T])(eq: (T, T) => Boolean): Boolean = + (a.length == b.length) && (a zip b).forall(tupled(eq)) } \ No newline at end of file diff --git a/compile/api/src/main/scala/xsbt/api/ShowAPI.scala b/compile/api/src/main/scala/xsbt/api/ShowAPI.scala index 5f3133005..4412be75d 100644 --- a/compile/api/src/main/scala/xsbt/api/ShowAPI.scala +++ b/compile/api/src/main/scala/xsbt/api/ShowAPI.scala @@ -3,314 +3,286 @@ */ package xsbt.api - import xsbti.api._ +import xsbti.api._ -trait Show[A] -{ - def show(a: A): String +trait Show[A] { + def show(a: A): String } -final class ShowLazy[A](delegate: => Show[A]) extends Show[A] -{ - private lazy val s = delegate - def show(a: A) = s.show(a) +final class ShowLazy[A](delegate: => Show[A]) extends Show[A] { + private lazy val s = delegate + def show(a: A) = s.show(a) } - import ShowAPI._ +import ShowAPI._ -object ShowAPI -{ - def Show[T](implicit s: Show[T]): Show[T] = s - def show[T](t: T)(implicit s: Show[T]): String = s.show(t) - - def bounds(lower: Type, upper: Type)(implicit t: Show[Type]): String = - ">: " + t.show(lower) + " <: " + t.show(upper) +object ShowAPI { + def Show[T](implicit s: Show[T]): Show[T] = s + def show[T](t: T)(implicit s: Show[T]): String = s.show(t) - import ParameterModifier._ - def parameterModifier(base: String, pm: ParameterModifier): String = - pm match - { - case Plain => base - case Repeated => base + "*" - case ByName => "=> " + base - } - - def concat[A](list: Seq[A], as: Show[A], sep: String): String = mapSeq(list, as).mkString(sep) - def commas[A](list: Seq[A], as: Show[A]): String = concat(list, as, ", ") - def spaced[A](list: Seq[A], as: Show[A]): String = concat(list, as, " ") - def lines[A](list: Seq[A], as: Show[A]): String = mapSeq(list, as).mkString("\n") - def mapSeq[A](list: Seq[A], as: Show[A]): Seq[String] = list.map(as.show) + def bounds(lower: Type, upper: Type)(implicit t: Show[Type]): String = + ">: " + t.show(lower) + " <: " + t.show(upper) + + import ParameterModifier._ + def parameterModifier(base: String, pm: ParameterModifier): String = + pm match { + case Plain => base + case Repeated => base + "*" + case ByName => "=> " + base + } + + def concat[A](list: Seq[A], as: Show[A], sep: String): String = mapSeq(list, as).mkString(sep) + def commas[A](list: Seq[A], as: Show[A]): String = concat(list, as, ", ") + def spaced[A](list: Seq[A], as: Show[A]): String = concat(list, as, " ") + def lines[A](list: Seq[A], as: Show[A]): String = mapSeq(list, as).mkString("\n") + def mapSeq[A](list: Seq[A], as: Show[A]): Seq[String] = list.map(as.show) } -trait ShowBase -{ - implicit def showAnnotation(implicit as: Show[AnnotationArgument], t: Show[Type]): Show[Annotation] = - new Show[Annotation] { def show(a: Annotation) = "@" + t.show(a.base) + (if(a.arguments.isEmpty) "" else "(" + commas(a.arguments, as) + ")") } - - implicit def showAnnotationArgument: Show[AnnotationArgument] = - new Show[AnnotationArgument] { def show(a: AnnotationArgument) = a.name + " = " + a.value } - - import Variance._ - implicit def showVariance: Show[Variance] = - new Show[Variance] { def show(v: Variance) = v match { case Invariant => ""; case Covariant => "+"; case Contravariant => "-" } } - - implicit def showSource(implicit ps: Show[Package], ds: Show[Definition]): Show[SourceAPI] = - new Show[SourceAPI] { def show(a: SourceAPI) = lines(a.packages, ps) + "\n" + lines(a.definitions, ds) } +trait ShowBase { + implicit def showAnnotation(implicit as: Show[AnnotationArgument], t: Show[Type]): Show[Annotation] = + new Show[Annotation] { def show(a: Annotation) = "@" + t.show(a.base) + (if (a.arguments.isEmpty) "" else "(" + commas(a.arguments, as) + ")") } - implicit def showPackage: Show[Package] = - new Show[Package] { def show(pkg: Package) = "package " + pkg.name } + implicit def showAnnotationArgument: Show[AnnotationArgument] = + new Show[AnnotationArgument] { def show(a: AnnotationArgument) = a.name + " = " + a.value } - implicit def showAccess(implicit sq: Show[Qualified]): Show[Access] = - new Show[Access] - { - def show(a: Access) = - a match - { - case p: Public => "" - case q: Qualified => sq.show(q) - } - } - implicit def showQualified(implicit sq: Show[Qualifier]): Show[Qualified] = - new Show[Qualified] - { - def show(q: Qualified) = - ((q match - { - case p: Protected => "protected" - case p: Private => "private" - }) - + sq.show(q.qualifier) ) - } - implicit def showQualifier: Show[Qualifier] = - new Show[Qualifier] - { - def show(q: Qualifier) = - q match - { - case _: Unqualified => "" - case _: ThisQualifier => "[this]" - case i: IdQualifier => "[" + i.value + "]" - } - } - implicit def showModifiers: Show[Modifiers] = - new Show[Modifiers] - { - def show(m: Modifiers) = - { - val mods = - (m.isOverride, "override") :: - (m.isFinal, "final") :: - (m.isSealed, "sealed") :: - (m.isImplicit, "implicit") :: - (m.isAbstract, "abstract") :: - (m.isLazy, "lazy") :: - Nil - mods.filter(_._1).map(_._2).mkString(" ") - } - } - - implicit def showDefinitionType: Show[DefinitionType] = - new Show[DefinitionType] { - import DefinitionType._ - def show(dt: DefinitionType) = - dt match - { - case Trait => "trait" - case ClassDef => "class" - case Module => "object" - case PackageModule => "package object" - } - } -} -trait ShowDefinitions -{ - implicit def showVal(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], t: Show[Type]): Show[Val] = - new Show[Val] { def show(v: Val) = definitionBase(v, "val")(acs, ms, ans) + ": " + t.show(v.tpe) } - - implicit def showVar(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], t: Show[Type]): Show[Var] = - new Show[Var] { def show(v: Var) = definitionBase(v, "var")(acs, ms, ans) + ": " + t.show(v.tpe) } - - implicit def showDef(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], vp: Show[Seq[ParameterList]], t: Show[Type]): Show[Def] = - new Show[Def] { def show(d: Def) = parameterizedDef(d, "def")(acs, ms, ans, tp) + vp.show(d.valueParameters) + ": " + t.show(d.returnType) } - - implicit def showClassLike(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], dt: Show[DefinitionType], s: Show[Structure], t: Show[Type]): Show[ClassLike] = - new Show[ClassLike] { def show(cl: ClassLike) = parameterizedDef(cl, dt.show(cl.definitionType))(acs, ms, ans, tp) + " requires " + t.show(cl.selfType) + " extends " + s.show(cl.structure) } - - implicit def showTypeAlias(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], t: Show[Type]): Show[TypeAlias] = - new Show[TypeAlias] { def show(ta: TypeAlias) = parameterizedDef(ta, "type")(acs, ms, ans, tp) + " = " + t.show(ta.tpe) } - - implicit def showTypeDeclaration(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], t: Show[Type]): Show[TypeDeclaration] = - new Show[TypeDeclaration] { def show(td: TypeDeclaration) = parameterizedDef(td, "type")(acs, ms, ans, tp) + bounds(td.lowerBound, td.upperBound) } - def showClassLikeSimple(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], dt: Show[DefinitionType]): Show[ClassLike] = - new Show[ClassLike] { def show(cl: ClassLike) = parameterizedDef(cl, dt.show(cl.definitionType))(acs, ms, ans, tp) } + import Variance._ + implicit def showVariance: Show[Variance] = + new Show[Variance] { def show(v: Variance) = v match { case Invariant => ""; case Covariant => "+"; case Contravariant => "-" } } - def parameterizedDef(d: ParameterizedDefinition, label: String)(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]]): String = - definitionBase(d, label)(acs, ms, ans) + tp.show(d.typeParameters) - def definitionBase(d: Definition, label: String)(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation]): String = - space(spaced(d.annotations, ans)) + space(acs.show(d.access)) + space(ms.show(d.modifiers)) + space(label) + d.name - def space(s: String) = if(s.isEmpty) s else s + " " + implicit def showSource(implicit ps: Show[Package], ds: Show[Definition]): Show[SourceAPI] = + new Show[SourceAPI] { def show(a: SourceAPI) = lines(a.packages, ps) + "\n" + lines(a.definitions, ds) } + + implicit def showPackage: Show[Package] = + new Show[Package] { def show(pkg: Package) = "package " + pkg.name } + + implicit def showAccess(implicit sq: Show[Qualified]): Show[Access] = + new Show[Access] { + def show(a: Access) = + a match { + case p: Public => "" + case q: Qualified => sq.show(q) + } + } + implicit def showQualified(implicit sq: Show[Qualifier]): Show[Qualified] = + new Show[Qualified] { + def show(q: Qualified) = + ((q match { + case p: Protected => "protected" + case p: Private => "private" + }) + + sq.show(q.qualifier)) + } + implicit def showQualifier: Show[Qualifier] = + new Show[Qualifier] { + def show(q: Qualifier) = + q match { + case _: Unqualified => "" + case _: ThisQualifier => "[this]" + case i: IdQualifier => "[" + i.value + "]" + } + } + implicit def showModifiers: Show[Modifiers] = + new Show[Modifiers] { + def show(m: Modifiers) = + { + val mods = + (m.isOverride, "override") :: + (m.isFinal, "final") :: + (m.isSealed, "sealed") :: + (m.isImplicit, "implicit") :: + (m.isAbstract, "abstract") :: + (m.isLazy, "lazy") :: + Nil + mods.filter(_._1).map(_._2).mkString(" ") + } + } + + implicit def showDefinitionType: Show[DefinitionType] = + new Show[DefinitionType] { + import DefinitionType._ + def show(dt: DefinitionType) = + dt match { + case Trait => "trait" + case ClassDef => "class" + case Module => "object" + case PackageModule => "package object" + } + } } -trait ShowDefinition -{ - implicit def showDefinition(implicit vl: Show[Val], vr: Show[Var], ds: Show[Def], cl: Show[ClassLike], ta: Show[TypeAlias], td: Show[TypeDeclaration]): Show[Definition] = - new Show[Definition] - { - def show(d: Definition) = - d match - { - case v: Val => vl.show(v) - case v: Var => vr.show(v) - case d: Def => ds.show(d) - case c: ClassLike => cl.show(c) - case t: TypeAlias => ta.show(t) - case t: TypeDeclaration => td.show(t) - } - } +trait ShowDefinitions { + implicit def showVal(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], t: Show[Type]): Show[Val] = + new Show[Val] { def show(v: Val) = definitionBase(v, "val")(acs, ms, ans) + ": " + t.show(v.tpe) } + + implicit def showVar(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], t: Show[Type]): Show[Var] = + new Show[Var] { def show(v: Var) = definitionBase(v, "var")(acs, ms, ans) + ": " + t.show(v.tpe) } + + implicit def showDef(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], vp: Show[Seq[ParameterList]], t: Show[Type]): Show[Def] = + new Show[Def] { def show(d: Def) = parameterizedDef(d, "def")(acs, ms, ans, tp) + vp.show(d.valueParameters) + ": " + t.show(d.returnType) } + + implicit def showClassLike(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], dt: Show[DefinitionType], s: Show[Structure], t: Show[Type]): Show[ClassLike] = + new Show[ClassLike] { def show(cl: ClassLike) = parameterizedDef(cl, dt.show(cl.definitionType))(acs, ms, ans, tp) + " requires " + t.show(cl.selfType) + " extends " + s.show(cl.structure) } + + implicit def showTypeAlias(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], t: Show[Type]): Show[TypeAlias] = + new Show[TypeAlias] { def show(ta: TypeAlias) = parameterizedDef(ta, "type")(acs, ms, ans, tp) + " = " + t.show(ta.tpe) } + + implicit def showTypeDeclaration(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], t: Show[Type]): Show[TypeDeclaration] = + new Show[TypeDeclaration] { def show(td: TypeDeclaration) = parameterizedDef(td, "type")(acs, ms, ans, tp) + bounds(td.lowerBound, td.upperBound) } + def showClassLikeSimple(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]], dt: Show[DefinitionType]): Show[ClassLike] = + new Show[ClassLike] { def show(cl: ClassLike) = parameterizedDef(cl, dt.show(cl.definitionType))(acs, ms, ans, tp) } + + def parameterizedDef(d: ParameterizedDefinition, label: String)(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation], tp: Show[Seq[TypeParameter]]): String = + definitionBase(d, label)(acs, ms, ans) + tp.show(d.typeParameters) + def definitionBase(d: Definition, label: String)(implicit acs: Show[Access], ms: Show[Modifiers], ans: Show[Annotation]): String = + space(spaced(d.annotations, ans)) + space(acs.show(d.access)) + space(ms.show(d.modifiers)) + space(label) + d.name + def space(s: String) = if (s.isEmpty) s else s + " " } -trait ShowType -{ - implicit def showType(implicit s: Show[SimpleType], a: Show[Annotated], st: Show[Structure], c: Show[Constant], e: Show[Existential], po: Show[Polymorphic]): Show[Type] = - new Show[Type] - { - def show(t: Type) = - t match - { - case q: SimpleType => s.show(q) - case q: Constant => c.show(q) - case q: Annotated => a.show(q) - case q: Structure => st.show(q) - case q: Existential => e.show(q) - case q: Polymorphic => po.show(q) - } - } - - implicit def showSimpleType(implicit pr: Show[Projection], pa: Show[ParameterRef], si: Show[Singleton], et: Show[EmptyType], p: Show[Parameterized]): Show[SimpleType] = - new Show[SimpleType] { - def show(t: SimpleType) = - t match - { - case q: Projection => pr.show(q) - case q: ParameterRef => pa.show(q) - case q: Singleton => si.show(q) - case q: EmptyType => et.show(q) - case q: Parameterized => p.show(q) - } - } +trait ShowDefinition { + implicit def showDefinition(implicit vl: Show[Val], vr: Show[Var], ds: Show[Def], cl: Show[ClassLike], ta: Show[TypeAlias], td: Show[TypeDeclaration]): Show[Definition] = + new Show[Definition] { + def show(d: Definition) = + d match { + case v: Val => vl.show(v) + case v: Var => vr.show(v) + case d: Def => ds.show(d) + case c: ClassLike => cl.show(c) + case t: TypeAlias => ta.show(t) + case t: TypeDeclaration => td.show(t) + } + } } -trait ShowBasicTypes -{ - implicit def showSingleton(implicit p: Show[Path]): Show[Singleton] = - new Show[Singleton] { def show(s: Singleton) = p.show(s.path) } - implicit def showEmptyType: Show[EmptyType] = - new Show[EmptyType] { def show(e: EmptyType) = "" } - implicit def showParameterRef: Show[ParameterRef] = - new Show[ParameterRef] { def show(p: ParameterRef) = "<" + p.id + ">" } +trait ShowType { + implicit def showType(implicit s: Show[SimpleType], a: Show[Annotated], st: Show[Structure], c: Show[Constant], e: Show[Existential], po: Show[Polymorphic]): Show[Type] = + new Show[Type] { + def show(t: Type) = + t match { + case q: SimpleType => s.show(q) + case q: Constant => c.show(q) + case q: Annotated => a.show(q) + case q: Structure => st.show(q) + case q: Existential => e.show(q) + case q: Polymorphic => po.show(q) + } + } + + implicit def showSimpleType(implicit pr: Show[Projection], pa: Show[ParameterRef], si: Show[Singleton], et: Show[EmptyType], p: Show[Parameterized]): Show[SimpleType] = + new Show[SimpleType] { + def show(t: SimpleType) = + t match { + case q: Projection => pr.show(q) + case q: ParameterRef => pa.show(q) + case q: Singleton => si.show(q) + case q: EmptyType => et.show(q) + case q: Parameterized => p.show(q) + } + } } -trait ShowTypes -{ - implicit def showStructure(implicit t: Show[Type], d: Show[Definition]): Show[Structure] = - new Show[Structure] { - def show(s: Structure) = { - // don't show inherited class like definitions to avoid dealing with cycles - val safeInherited = s.inherited.filterNot(_.isInstanceOf[ClassLike]) - val showInherited: Show[Definition] = new Show[Definition] { - def show(deff: Definition): String = "^inherited^ " + d.show(deff) - } - concat(s.parents, t, " with ") + "\n{\n" + lines(safeInherited, showInherited) + "\n" + lines(s.declared, d) + "\n}" - } - } - implicit def showAnnotated(implicit as: Show[Annotation], t: Show[Type]): Show[Annotated] = - new Show[Annotated] { def show(a: Annotated) = spaced(a.annotations, as) + " " + t.show(a.baseType) } - implicit def showProjection(implicit t: Show[SimpleType]): Show[Projection] = - new Show[Projection] { def show(p: Projection) = t.show(p.prefix) + "#" + p.id } - implicit def showParameterized(implicit t: Show[Type]): Show[Parameterized] = - new Show[Parameterized] { def show(p: Parameterized) = t.show(p.baseType) + mapSeq(p.typeArguments, t).mkString("[", ", ", "]") } - implicit def showConstant(implicit t: Show[Type]): Show[Constant] = - new Show[Constant] { def show(c: Constant) = t.show(c.baseType) + "(" + c.value + ")" } - implicit def showExistential(implicit t: Show[Type], tp: Show[TypeParameter]): Show[Existential] = - new Show[Existential] { - def show(e: Existential) = - t.show(e.baseType) + e.clause.map(t => "type " + tp.show(t)).mkString(" forSome { ", "; ", "}") - } - implicit def showPolymorphic(implicit t: Show[Type], tps: Show[Seq[TypeParameter]]): Show[Polymorphic] = - new Show[Polymorphic] { def show(p: Polymorphic) = t.show(p.baseType) + tps.show(p.parameters) } - +trait ShowBasicTypes { + implicit def showSingleton(implicit p: Show[Path]): Show[Singleton] = + new Show[Singleton] { def show(s: Singleton) = p.show(s.path) } + implicit def showEmptyType: Show[EmptyType] = + new Show[EmptyType] { def show(e: EmptyType) = "" } + implicit def showParameterRef: Show[ParameterRef] = + new Show[ParameterRef] { def show(p: ParameterRef) = "<" + p.id + ">" } +} +trait ShowTypes { + implicit def showStructure(implicit t: Show[Type], d: Show[Definition]): Show[Structure] = + new Show[Structure] { + def show(s: Structure) = { + // don't show inherited class like definitions to avoid dealing with cycles + val safeInherited = s.inherited.filterNot(_.isInstanceOf[ClassLike]) + val showInherited: Show[Definition] = new Show[Definition] { + def show(deff: Definition): String = "^inherited^ " + d.show(deff) + } + concat(s.parents, t, " with ") + "\n{\n" + lines(safeInherited, showInherited) + "\n" + lines(s.declared, d) + "\n}" + } + } + implicit def showAnnotated(implicit as: Show[Annotation], t: Show[Type]): Show[Annotated] = + new Show[Annotated] { def show(a: Annotated) = spaced(a.annotations, as) + " " + t.show(a.baseType) } + implicit def showProjection(implicit t: Show[SimpleType]): Show[Projection] = + new Show[Projection] { def show(p: Projection) = t.show(p.prefix) + "#" + p.id } + implicit def showParameterized(implicit t: Show[Type]): Show[Parameterized] = + new Show[Parameterized] { def show(p: Parameterized) = t.show(p.baseType) + mapSeq(p.typeArguments, t).mkString("[", ", ", "]") } + implicit def showConstant(implicit t: Show[Type]): Show[Constant] = + new Show[Constant] { def show(c: Constant) = t.show(c.baseType) + "(" + c.value + ")" } + implicit def showExistential(implicit t: Show[Type], tp: Show[TypeParameter]): Show[Existential] = + new Show[Existential] { + def show(e: Existential) = + t.show(e.baseType) + e.clause.map(t => "type " + tp.show(t)).mkString(" forSome { ", "; ", "}") + } + implicit def showPolymorphic(implicit t: Show[Type], tps: Show[Seq[TypeParameter]]): Show[Polymorphic] = + new Show[Polymorphic] { def show(p: Polymorphic) = t.show(p.baseType) + tps.show(p.parameters) } + } -trait ShowPath -{ - implicit def showPath(implicit pc: Show[PathComponent]): Show[Path] = - new Show[Path] { def show(p: Path) = mapSeq(p.components, pc).mkString(".") } - - implicit def showPathComponent(implicit sp: Show[Path]): Show[PathComponent] = - new Show[PathComponent] { - def show(p: PathComponent) = - p match - { - case s: Super => "super[" + sp.show(s.qualifier) + "]" - case _: This => "this" - case i: Id => i.id - } - } +trait ShowPath { + implicit def showPath(implicit pc: Show[PathComponent]): Show[Path] = + new Show[Path] { def show(p: Path) = mapSeq(p.components, pc).mkString(".") } + + implicit def showPathComponent(implicit sp: Show[Path]): Show[PathComponent] = + new Show[PathComponent] { + def show(p: PathComponent) = + p match { + case s: Super => "super[" + sp.show(s.qualifier) + "]" + case _: This => "this" + case i: Id => i.id + } + } } -trait ShowValueParameters -{ - implicit def showParameterLists(implicit pl: Show[ParameterList]): Show[Seq[ParameterList]] = - new Show[Seq[ParameterList]] { def show(p: Seq[ParameterList]) = concat(p,pl, "") } - implicit def showParameterList(implicit mp: Show[MethodParameter]): Show[ParameterList] = - new Show[ParameterList] { def show(pl: ParameterList) = "(" + (if(pl.isImplicit) "implicit " else "") + commas(pl.parameters, mp) + ")" } - - implicit def showMethodParameter(implicit t: Show[Type]): Show[MethodParameter] = - new Show[MethodParameter] { - def show(mp: MethodParameter) = - mp.name + ": " + parameterModifier(t.show(mp.tpe), mp.modifier) + (if(mp.hasDefault) "= ..." else "") - } +trait ShowValueParameters { + implicit def showParameterLists(implicit pl: Show[ParameterList]): Show[Seq[ParameterList]] = + new Show[Seq[ParameterList]] { def show(p: Seq[ParameterList]) = concat(p, pl, "") } + implicit def showParameterList(implicit mp: Show[MethodParameter]): Show[ParameterList] = + new Show[ParameterList] { def show(pl: ParameterList) = "(" + (if (pl.isImplicit) "implicit " else "") + commas(pl.parameters, mp) + ")" } + + implicit def showMethodParameter(implicit t: Show[Type]): Show[MethodParameter] = + new Show[MethodParameter] { + def show(mp: MethodParameter) = + mp.name + ": " + parameterModifier(t.show(mp.tpe), mp.modifier) + (if (mp.hasDefault) "= ..." else "") + } } -trait ShowTypeParameters -{ - implicit def showTypeParameters(implicit as: Show[TypeParameter]): Show[Seq[TypeParameter]] = - new Show[Seq[TypeParameter]] { def show(tps: Seq[TypeParameter]) = if(tps.isEmpty) "" else mapSeq(tps, as).mkString("[", ",", "]") } - implicit def showTypeParameter(implicit as: Show[Annotation], tp: Show[Seq[TypeParameter]], t: Show[Type], v: Show[Variance]): Show[TypeParameter] = - new Show[TypeParameter] { - def show(tps: TypeParameter) = - spaced(tps.annotations, as) + " " + v.show(tps.variance) + tps.id + tp.show(tps.typeParameters) + " " + bounds(tps.lowerBound, tps.upperBound) - } +trait ShowTypeParameters { + implicit def showTypeParameters(implicit as: Show[TypeParameter]): Show[Seq[TypeParameter]] = + new Show[Seq[TypeParameter]] { def show(tps: Seq[TypeParameter]) = if (tps.isEmpty) "" else mapSeq(tps, as).mkString("[", ",", "]") } + implicit def showTypeParameter(implicit as: Show[Annotation], tp: Show[Seq[TypeParameter]], t: Show[Type], v: Show[Variance]): Show[TypeParameter] = + new Show[TypeParameter] { + def show(tps: TypeParameter) = + spaced(tps.annotations, as) + " " + v.show(tps.variance) + tps.id + tp.show(tps.typeParameters) + " " + bounds(tps.lowerBound, tps.upperBound) + } } // this class is a hack to resolve some diverging implicit errors. // I'm pretty sure the cause is the Show[Seq[T]] dominating Show[X] issue. // It could probably be reduced a bit if that is the case (below was trial and error) -object DefaultShowAPI extends ShowBase with ShowBasicTypes with ShowValueParameters -{ - def apply(d: Definition) = ShowAPI.show(d) - def apply(d: Type) = ShowAPI.show(d) +object DefaultShowAPI extends ShowBase with ShowBasicTypes with ShowValueParameters { + def apply(d: Definition) = ShowAPI.show(d) + def apply(d: Type) = ShowAPI.show(d) - implicit lazy val showVal: Show[Val] = Cyclic.showVal - implicit lazy val showVar: Show[Var] = Cyclic.showVar - implicit lazy val showClassLike: Show[ClassLike] = Cyclic.showClassLike - implicit lazy val showTypeDeclaration: Show[TypeDeclaration] = Cyclic.showTypeDeclaration - implicit lazy val showTypeAlias: Show[TypeAlias] = Cyclic.showTypeAlias - implicit lazy val showDef: Show[Def] = Cyclic.showDef - - implicit lazy val showProj: Show[Projection] = Cyclic.showProjection - implicit lazy val showPoly: Show[Polymorphic] = Cyclic.showPolymorphic - - implicit lazy val showSimple: Show[SimpleType] = new ShowLazy(Cyclic.showSimpleType) - implicit lazy val showAnnotated: Show[Annotated] = Cyclic.showAnnotated - implicit lazy val showExistential: Show[Existential] = Cyclic.showExistential - implicit lazy val showConstant: Show[Constant] = Cyclic.showConstant - implicit lazy val showParameterized: Show[Parameterized] = Cyclic.showParameterized - - implicit lazy val showTypeParameters: Show[Seq[TypeParameter]] = new ShowLazy(Cyclic.showTypeParameters) - implicit lazy val showTypeParameter: Show[TypeParameter] = Cyclic.showTypeParameter - - implicit lazy val showDefinition: Show[Definition] = new ShowLazy(Cyclic.showDefinition) - implicit lazy val showType: Show[Type] = new ShowLazy(Cyclic.showType) - implicit lazy val showStructure: Show[Structure] = new ShowLazy(Cyclic.showStructure) - - implicit lazy val showPath: Show[Path] = new ShowLazy(Cyclic.showPath) - implicit lazy val showPathComponent: Show[PathComponent] = Cyclic.showPathComponent + implicit lazy val showVal: Show[Val] = Cyclic.showVal + implicit lazy val showVar: Show[Var] = Cyclic.showVar + implicit lazy val showClassLike: Show[ClassLike] = Cyclic.showClassLike + implicit lazy val showTypeDeclaration: Show[TypeDeclaration] = Cyclic.showTypeDeclaration + implicit lazy val showTypeAlias: Show[TypeAlias] = Cyclic.showTypeAlias + implicit lazy val showDef: Show[Def] = Cyclic.showDef - private object Cyclic extends ShowTypes with ShowType with ShowPath with ShowDefinition with ShowDefinitions with ShowTypeParameters + implicit lazy val showProj: Show[Projection] = Cyclic.showProjection + implicit lazy val showPoly: Show[Polymorphic] = Cyclic.showPolymorphic + + implicit lazy val showSimple: Show[SimpleType] = new ShowLazy(Cyclic.showSimpleType) + implicit lazy val showAnnotated: Show[Annotated] = Cyclic.showAnnotated + implicit lazy val showExistential: Show[Existential] = Cyclic.showExistential + implicit lazy val showConstant: Show[Constant] = Cyclic.showConstant + implicit lazy val showParameterized: Show[Parameterized] = Cyclic.showParameterized + + implicit lazy val showTypeParameters: Show[Seq[TypeParameter]] = new ShowLazy(Cyclic.showTypeParameters) + implicit lazy val showTypeParameter: Show[TypeParameter] = Cyclic.showTypeParameter + + implicit lazy val showDefinition: Show[Definition] = new ShowLazy(Cyclic.showDefinition) + implicit lazy val showType: Show[Type] = new ShowLazy(Cyclic.showType) + implicit lazy val showStructure: Show[Structure] = new ShowLazy(Cyclic.showStructure) + + implicit lazy val showPath: Show[Path] = new ShowLazy(Cyclic.showPath) + implicit lazy val showPathComponent: Show[PathComponent] = Cyclic.showPathComponent + + private object Cyclic extends ShowTypes with ShowType with ShowPath with ShowDefinition with ShowDefinitions with ShowTypeParameters } \ No newline at end of file diff --git a/compile/api/src/main/scala/xsbt/api/Visit.scala b/compile/api/src/main/scala/xsbt/api/Visit.scala index 85dd46db1..9cbb44ea0 100644 --- a/compile/api/src/main/scala/xsbt/api/Visit.scala +++ b/compile/api/src/main/scala/xsbt/api/Visit.scala @@ -3,207 +3,183 @@ */ package xsbt.api - import xsbti.api._ - import scala.collection.mutable +import xsbti.api._ +import scala.collection.mutable -class Visit -{ - private[this] val visitedStructures = new mutable.HashSet[Structure] - private[this] val visitedClassLike = new mutable.HashSet[ClassLike] +class Visit { + private[this] val visitedStructures = new mutable.HashSet[Structure] + private[this] val visitedClassLike = new mutable.HashSet[ClassLike] - def visit(s: Source): Unit = visitAPI(s.api) - def visitAPI(s: SourceAPI): Unit = - { - s.packages foreach visitPackage - s.definitions foreach visitDefinition - } + def visit(s: Source): Unit = visitAPI(s.api) + def visitAPI(s: SourceAPI): Unit = + { + s.packages foreach visitPackage + s.definitions foreach visitDefinition + } - def visitPackage(p: Package) - { - visitString(p.name) - } + def visitPackage(p: Package) { + visitString(p.name) + } - def visitDefinitions(ds: Seq[Definition]) = ds foreach visitDefinition - def visitDefinition(d: Definition) - { - visitString(d.name) - visitAnnotations(d.annotations) - visitModifiers(d.modifiers) - visitAccess(d.access) - d match - { - case c: ClassLike => visitClass(c) - case f: FieldLike => visitField(f) - case d: Def => visitDef(d) - case t: TypeDeclaration => visitTypeDeclaration(t) - case t: TypeAlias => visitTypeAlias(t) - } - } - final def visitClass(c: ClassLike): Unit = if(visitedClassLike add c) visitClass0(c) - def visitClass0(c: ClassLike) - { - visitParameterizedDefinition(c) - visitType(c.selfType) - visitStructure(c.structure) - } - def visitField(f: FieldLike) - { - visitType(f.tpe) - f match - { - case v: Var => visitVar(v) - case v: Val => visitVal(v) - } - } - def visitVar(v: Var) {} - def visitVal(v: Val) {} - def visitDef(d: Def) - { - visitParameterizedDefinition(d) - visitValueParameters(d.valueParameters) - visitType(d.returnType) - } - def visitAccess(a: Access): Unit = - a match - { - case pub: Public => visitPublic(pub) - case qual: Qualified => visitQualified(qual) - } - def visitQualified(qual: Qualified): Unit = - qual match - { - case p: Protected => visitProtected(p) - case p: Private => visitPrivate(p) - } - def visitQualifier(qual: Qualifier): Unit = - qual match - { - case unq: Unqualified => visitUnqualified(unq) - case thisq: ThisQualifier => visitThisQualifier(thisq) - case id: IdQualifier => visitIdQualifier(id) - } - def visitIdQualifier(id: IdQualifier) - { - visitString(id.value) - } - def visitUnqualified(unq: Unqualified) {} - def visitThisQualifier(thisq: ThisQualifier) {} - def visitPublic(pub: Public) {} - def visitPrivate(p: Private) { visitQualifier(p.qualifier) } - def visitProtected(p: Protected) { visitQualifier(p.qualifier) } - def visitModifiers(m: Modifiers) {} - - def visitValueParameters(valueParameters: Seq[ParameterList]) = valueParameters foreach visitValueParameterList - def visitValueParameterList(list: ParameterList) = list.parameters foreach visitValueParameter - def visitValueParameter(parameter: MethodParameter) = - { - visitString(parameter.name) - visitType(parameter.tpe) - } - - def visitParameterizedDefinition[T <: ParameterizedDefinition](d: T) - { - visitTypeParameters(d.typeParameters) - } - def visitTypeDeclaration(d: TypeDeclaration) - { - visitParameterizedDefinition(d) - visitType(d.lowerBound) - visitType(d.upperBound) - } - def visitTypeAlias(d: TypeAlias) - { - visitParameterizedDefinition(d) - visitType(d.tpe) - } - - def visitTypeParameters(parameters: Seq[TypeParameter]) = parameters foreach visitTypeParameter - def visitTypeParameter(parameter: TypeParameter) - { - visitTypeParameters(parameter.typeParameters) - visitType(parameter.lowerBound) - visitType(parameter.upperBound) - visitAnnotations(parameter.annotations) - } - def visitAnnotations(annotations: Seq[Annotation]) = annotations foreach visitAnnotation - def visitAnnotation(annotation: Annotation) = - { - visitType(annotation.base) - visitAnnotationArguments(annotation.arguments) - } - def visitAnnotationArguments(args: Seq[AnnotationArgument]) = args foreach visitAnnotationArgument - def visitAnnotationArgument(arg: AnnotationArgument) - { - visitString(arg.name) - visitString(arg.value) - } - - def visitTypes(ts: Seq[Type]) = ts.foreach(visitType) - def visitType(t: Type) - { - t match - { - case s: Structure => visitStructure(s) - case e: Existential => visitExistential(e) - case c: Constant => visitConstant(c) - case p: Polymorphic => visitPolymorphic(p) - case a: Annotated => visitAnnotated(a) - case p: Parameterized => visitParameterized(p) - case p: Projection => visitProjection(p) - case _: EmptyType => visitEmptyType() - case s: Singleton => visitSingleton(s) - case pr: ParameterRef => visitParameterRef(pr) - } - } - - def visitEmptyType() {} - def visitParameterRef(p: ParameterRef) {} - def visitSingleton(s: Singleton) { visitPath(s.path) } - def visitPath(path: Path) = path.components foreach visitPathComponent - def visitPathComponent(pc: PathComponent) = pc match - { - case t: This => visitThisPath(t) - case s: Super => visitSuperPath(s) - case id: Id => visitIdPath(id) - } - def visitThisPath(t: This) {} - def visitSuperPath(s: Super) { visitPath(s.qualifier) } - def visitIdPath(id: Id) { visitString(id.id) } + def visitDefinitions(ds: Seq[Definition]) = ds foreach visitDefinition + def visitDefinition(d: Definition) { + visitString(d.name) + visitAnnotations(d.annotations) + visitModifiers(d.modifiers) + visitAccess(d.access) + d match { + case c: ClassLike => visitClass(c) + case f: FieldLike => visitField(f) + case d: Def => visitDef(d) + case t: TypeDeclaration => visitTypeDeclaration(t) + case t: TypeAlias => visitTypeAlias(t) + } + } + final def visitClass(c: ClassLike): Unit = if (visitedClassLike add c) visitClass0(c) + def visitClass0(c: ClassLike) { + visitParameterizedDefinition(c) + visitType(c.selfType) + visitStructure(c.structure) + } + def visitField(f: FieldLike) { + visitType(f.tpe) + f match { + case v: Var => visitVar(v) + case v: Val => visitVal(v) + } + } + def visitVar(v: Var) {} + def visitVal(v: Val) {} + def visitDef(d: Def) { + visitParameterizedDefinition(d) + visitValueParameters(d.valueParameters) + visitType(d.returnType) + } + def visitAccess(a: Access): Unit = + a match { + case pub: Public => visitPublic(pub) + case qual: Qualified => visitQualified(qual) + } + def visitQualified(qual: Qualified): Unit = + qual match { + case p: Protected => visitProtected(p) + case p: Private => visitPrivate(p) + } + def visitQualifier(qual: Qualifier): Unit = + qual match { + case unq: Unqualified => visitUnqualified(unq) + case thisq: ThisQualifier => visitThisQualifier(thisq) + case id: IdQualifier => visitIdQualifier(id) + } + def visitIdQualifier(id: IdQualifier) { + visitString(id.value) + } + def visitUnqualified(unq: Unqualified) {} + def visitThisQualifier(thisq: ThisQualifier) {} + def visitPublic(pub: Public) {} + def visitPrivate(p: Private) { visitQualifier(p.qualifier) } + def visitProtected(p: Protected) { visitQualifier(p.qualifier) } + def visitModifiers(m: Modifiers) {} + def visitValueParameters(valueParameters: Seq[ParameterList]) = valueParameters foreach visitValueParameterList + def visitValueParameterList(list: ParameterList) = list.parameters foreach visitValueParameter + def visitValueParameter(parameter: MethodParameter) = + { + visitString(parameter.name) + visitType(parameter.tpe) + } - def visitConstant(c: Constant) = - { - visitString(c.value) - visitType(c.baseType) - } - def visitExistential(e: Existential) = visitParameters(e.clause, e.baseType) - def visitPolymorphic(p: Polymorphic) = visitParameters(p.parameters, p.baseType) - def visitProjection(p: Projection) = - { - visitString(p.id) - visitType(p.prefix) - } - def visitParameterized(p: Parameterized) - { - visitType(p.baseType) - visitTypes(p.typeArguments) - } - def visitAnnotated(a: Annotated) - { - visitType(a.baseType) - visitAnnotations(a.annotations) - } - final def visitStructure(structure: Structure) = if(visitedStructures add structure) visitStructure0(structure) - def visitStructure0(structure: Structure) - { - visitTypes(structure.parents) - visitDefinitions(structure.declared) - visitDefinitions(structure.inherited) - } - def visitParameters(parameters: Seq[TypeParameter], base: Type): Unit = - { - visitTypeParameters(parameters) - visitType(base) - } - def visitString(s: String) {} + def visitParameterizedDefinition[T <: ParameterizedDefinition](d: T) { + visitTypeParameters(d.typeParameters) + } + def visitTypeDeclaration(d: TypeDeclaration) { + visitParameterizedDefinition(d) + visitType(d.lowerBound) + visitType(d.upperBound) + } + def visitTypeAlias(d: TypeAlias) { + visitParameterizedDefinition(d) + visitType(d.tpe) + } + + def visitTypeParameters(parameters: Seq[TypeParameter]) = parameters foreach visitTypeParameter + def visitTypeParameter(parameter: TypeParameter) { + visitTypeParameters(parameter.typeParameters) + visitType(parameter.lowerBound) + visitType(parameter.upperBound) + visitAnnotations(parameter.annotations) + } + def visitAnnotations(annotations: Seq[Annotation]) = annotations foreach visitAnnotation + def visitAnnotation(annotation: Annotation) = + { + visitType(annotation.base) + visitAnnotationArguments(annotation.arguments) + } + def visitAnnotationArguments(args: Seq[AnnotationArgument]) = args foreach visitAnnotationArgument + def visitAnnotationArgument(arg: AnnotationArgument) { + visitString(arg.name) + visitString(arg.value) + } + + def visitTypes(ts: Seq[Type]) = ts.foreach(visitType) + def visitType(t: Type) { + t match { + case s: Structure => visitStructure(s) + case e: Existential => visitExistential(e) + case c: Constant => visitConstant(c) + case p: Polymorphic => visitPolymorphic(p) + case a: Annotated => visitAnnotated(a) + case p: Parameterized => visitParameterized(p) + case p: Projection => visitProjection(p) + case _: EmptyType => visitEmptyType() + case s: Singleton => visitSingleton(s) + case pr: ParameterRef => visitParameterRef(pr) + } + } + + def visitEmptyType() {} + def visitParameterRef(p: ParameterRef) {} + def visitSingleton(s: Singleton) { visitPath(s.path) } + def visitPath(path: Path) = path.components foreach visitPathComponent + def visitPathComponent(pc: PathComponent) = pc match { + case t: This => visitThisPath(t) + case s: Super => visitSuperPath(s) + case id: Id => visitIdPath(id) + } + def visitThisPath(t: This) {} + def visitSuperPath(s: Super) { visitPath(s.qualifier) } + def visitIdPath(id: Id) { visitString(id.id) } + + def visitConstant(c: Constant) = + { + visitString(c.value) + visitType(c.baseType) + } + def visitExistential(e: Existential) = visitParameters(e.clause, e.baseType) + def visitPolymorphic(p: Polymorphic) = visitParameters(p.parameters, p.baseType) + def visitProjection(p: Projection) = + { + visitString(p.id) + visitType(p.prefix) + } + def visitParameterized(p: Parameterized) { + visitType(p.baseType) + visitTypes(p.typeArguments) + } + def visitAnnotated(a: Annotated) { + visitType(a.baseType) + visitAnnotations(a.annotations) + } + final def visitStructure(structure: Structure) = if (visitedStructures add structure) visitStructure0(structure) + def visitStructure0(structure: Structure) { + visitTypes(structure.parents) + visitDefinitions(structure.declared) + visitDefinitions(structure.inherited) + } + def visitParameters(parameters: Seq[TypeParameter], base: Type): Unit = + { + visitTypeParameters(parameters) + visitType(base) + } + def visitString(s: String) {} } \ No newline at end of file diff --git a/compile/api/src/main/scala/xsbti/SafeLazy.scala b/compile/api/src/main/scala/xsbti/SafeLazy.scala index 7ccda7e0e..6809e9402 100644 --- a/compile/api/src/main/scala/xsbti/SafeLazy.scala +++ b/compile/api/src/main/scala/xsbti/SafeLazy.scala @@ -2,25 +2,23 @@ // and be accessible to the compiler-side interface package xsbti -object SafeLazy -{ - def apply[T <: AnyRef](eval: xsbti.F0[T]): xsbti.api.Lazy[T] = - apply( eval() ) - def apply[T <: AnyRef](eval: => T): xsbti.api.Lazy[T] = - fromFunction0( eval _ ) - def fromFunction0[T <: AnyRef](eval: () => T): xsbti.api.Lazy[T] = - new Impl( eval ) +object SafeLazy { + def apply[T <: AnyRef](eval: xsbti.F0[T]): xsbti.api.Lazy[T] = + apply(eval()) + def apply[T <: AnyRef](eval: => T): xsbti.api.Lazy[T] = + fromFunction0(eval _) + def fromFunction0[T <: AnyRef](eval: () => T): xsbti.api.Lazy[T] = + new Impl(eval) - def strict[T <: AnyRef](value: T): xsbti.api.Lazy[T] = apply(value) + def strict[T <: AnyRef](value: T): xsbti.api.Lazy[T] = apply(value) - private[this] final class Impl[T <: AnyRef](private[this] var eval: () => T) extends xsbti.api.AbstractLazy[T] - { - private[this] lazy val _t = - { - val t = eval() - eval = null // clear the reference, ensuring the only memory we hold onto is the result - t - } - def get: T = _t - } + private[this] final class Impl[T <: AnyRef](private[this] var eval: () => T) extends xsbti.api.AbstractLazy[T] { + private[this] lazy val _t = + { + val t = eval() + eval = null // clear the reference, ensuring the only memory we hold onto is the result + t + } + def get: T = _t + } } \ No newline at end of file diff --git a/compile/inc/src/main/scala/sbt/CompileSetup.scala b/compile/inc/src/main/scala/sbt/CompileSetup.scala index 11ecc6805..338f7d810 100644 --- a/compile/inc/src/main/scala/sbt/CompileSetup.scala +++ b/compile/inc/src/main/scala/sbt/CompileSetup.scala @@ -3,8 +3,8 @@ */ package sbt - import xsbti.compile.{ CompileOrder, Output => APIOutput, SingleOutput, MultipleOutput} - import java.io.File +import xsbti.compile.{ CompileOrder, Output => APIOutput, SingleOutput, MultipleOutput } +import java.io.File // this class exists because of Scala's restriction on implicit parameter search. // We cannot require an implicit parameter Equiv[Seq[String]] to construct Equiv[CompileSetup] @@ -12,50 +12,49 @@ package sbt // (6 > 4) final class CompileOptions(val options: Seq[String], val javacOptions: Seq[String]) final class CompileSetup(val output: APIOutput, val options: CompileOptions, val compilerVersion: String, - val order: CompileOrder, val nameHashing: Boolean) { - @deprecated("Use the other overloaded variant of the constructor that takes `nameHashing` value, instead.", "0.13.2") - def this(output: APIOutput, options: CompileOptions, compilerVersion: String, order: CompileOrder) = { - this(output, options, compilerVersion, order, false) - } + val order: CompileOrder, val nameHashing: Boolean) { + @deprecated("Use the other overloaded variant of the constructor that takes `nameHashing` value, instead.", "0.13.2") + def this(output: APIOutput, options: CompileOptions, compilerVersion: String, order: CompileOrder) = { + this(output, options, compilerVersion, order, false) + } } -object CompileSetup -{ - // Equiv[CompileOrder.Value] dominates Equiv[CompileSetup] - implicit def equivCompileSetup(implicit equivOutput: Equiv[APIOutput], equivOpts: Equiv[CompileOptions], equivComp: Equiv[String]/*, equivOrder: Equiv[CompileOrder]*/): Equiv[CompileSetup] = new Equiv[CompileSetup] { - def equiv(a: CompileSetup, b: CompileSetup) = - equivOutput.equiv(a.output, b.output) && - equivOpts.equiv(a.options, b.options) && - equivComp.equiv(a.compilerVersion, b.compilerVersion) && - a.order == b.order && // equivOrder.equiv(a.order, b.order) - a.nameHashing == b.nameHashing - } - implicit val equivFile: Equiv[File] = new Equiv[File] { - def equiv(a: File, b: File) = a.getAbsoluteFile == b.getAbsoluteFile - } - implicit val equivOutput: Equiv[APIOutput] = new Equiv[APIOutput] { - implicit val outputGroupsOrdering = Ordering.by((og: MultipleOutput.OutputGroup) => og.sourceDirectory) - def equiv(out1: APIOutput, out2: APIOutput) = (out1, out2) match { - case (m1: MultipleOutput, m2: MultipleOutput) => - (m1.outputGroups.length == m2.outputGroups.length) && - (m1.outputGroups.sorted zip m2.outputGroups.sorted forall { - case (a,b) => - equivFile.equiv(a.sourceDirectory, b.sourceDirectory) && equivFile.equiv(a.outputDirectory, b.outputDirectory) - }) - case (s1: SingleOutput, s2: SingleOutput) => equivFile.equiv(s1.outputDirectory, s2.outputDirectory) - case _ => false - } - } - implicit val equivOpts: Equiv[CompileOptions] = new Equiv[CompileOptions] { - def equiv(a: CompileOptions, b: CompileOptions) = - (a.options sameElements b.options) && - (a.javacOptions sameElements b.javacOptions) - } - implicit val equivCompilerVersion: Equiv[String] = new Equiv[String] { - def equiv(a: String, b: String) = a == b - } +object CompileSetup { + // Equiv[CompileOrder.Value] dominates Equiv[CompileSetup] + implicit def equivCompileSetup(implicit equivOutput: Equiv[APIOutput], equivOpts: Equiv[CompileOptions], equivComp: Equiv[String] /*, equivOrder: Equiv[CompileOrder]*/ ): Equiv[CompileSetup] = new Equiv[CompileSetup] { + def equiv(a: CompileSetup, b: CompileSetup) = + equivOutput.equiv(a.output, b.output) && + equivOpts.equiv(a.options, b.options) && + equivComp.equiv(a.compilerVersion, b.compilerVersion) && + a.order == b.order && // equivOrder.equiv(a.order, b.order) + a.nameHashing == b.nameHashing + } + implicit val equivFile: Equiv[File] = new Equiv[File] { + def equiv(a: File, b: File) = a.getAbsoluteFile == b.getAbsoluteFile + } + implicit val equivOutput: Equiv[APIOutput] = new Equiv[APIOutput] { + implicit val outputGroupsOrdering = Ordering.by((og: MultipleOutput.OutputGroup) => og.sourceDirectory) + def equiv(out1: APIOutput, out2: APIOutput) = (out1, out2) match { + case (m1: MultipleOutput, m2: MultipleOutput) => + (m1.outputGroups.length == m2.outputGroups.length) && + (m1.outputGroups.sorted zip m2.outputGroups.sorted forall { + case (a, b) => + equivFile.equiv(a.sourceDirectory, b.sourceDirectory) && equivFile.equiv(a.outputDirectory, b.outputDirectory) + }) + case (s1: SingleOutput, s2: SingleOutput) => equivFile.equiv(s1.outputDirectory, s2.outputDirectory) + case _ => false + } + } + implicit val equivOpts: Equiv[CompileOptions] = new Equiv[CompileOptions] { + def equiv(a: CompileOptions, b: CompileOptions) = + (a.options sameElements b.options) && + (a.javacOptions sameElements b.javacOptions) + } + implicit val equivCompilerVersion: Equiv[String] = new Equiv[String] { + def equiv(a: String, b: String) = a == b + } - implicit val equivOrder: Equiv[CompileOrder] = new Equiv[CompileOrder] { - def equiv(a: CompileOrder, b: CompileOrder) = a == b - } + implicit val equivOrder: Equiv[CompileOrder] = new Equiv[CompileOrder] { + def equiv(a: CompileOrder, b: CompileOrder) = a == b + } } diff --git a/compile/inc/src/main/scala/sbt/inc/APIDiff.scala b/compile/inc/src/main/scala/sbt/inc/APIDiff.scala index 30648d921..48fa0968f 100644 --- a/compile/inc/src/main/scala/sbt/inc/APIDiff.scala +++ b/compile/inc/src/main/scala/sbt/inc/APIDiff.scala @@ -4,7 +4,7 @@ import xsbti.api.SourceAPI import xsbt.api.ShowAPI import xsbt.api.DefaultShowAPI._ import java.lang.reflect.Method -import java.util.{List => JList} +import java.util.{ List => JList } /** * A class which computes diffs (unified diffs) between two textual representations of an API. @@ -21,47 +21,47 @@ import java.util.{List => JList} */ private[inc] class APIDiff { - import APIDiff._ + import APIDiff._ - private val diffUtilsClass = Class.forName(diffUtilsClassName) - // method signature: diff(List, List) - private val diffMethod: Method = - diffUtilsClass.getMethod(diffMethodName, classOf[JList[_]], classOf[JList[_]]) + private val diffUtilsClass = Class.forName(diffUtilsClassName) + // method signature: diff(List, List) + private val diffMethod: Method = + diffUtilsClass.getMethod(diffMethodName, classOf[JList[_]], classOf[JList[_]]) - private val generateUnifiedDiffMethod: Method = { - val patchClass = Class.forName(patchClassName) - // method signature: generateUnifiedDiff(String, String, List, Patch, int) - diffUtilsClass.getMethod(generateUnifiedDiffMethodName, classOf[String], - classOf[String], classOf[JList[String]], patchClass, classOf[Int]) - } + private val generateUnifiedDiffMethod: Method = { + val patchClass = Class.forName(patchClassName) + // method signature: generateUnifiedDiff(String, String, List, Patch, int) + diffUtilsClass.getMethod(generateUnifiedDiffMethodName, classOf[String], + classOf[String], classOf[JList[String]], patchClass, classOf[Int]) + } - /** - * Generates an unified diff between textual representations of `api1` and `api2`. - */ - def generateApiDiff(fileName: String, api1: SourceAPI, api2: SourceAPI, contextSize: Int): String = { - val api1Str = ShowAPI.show(api1) - val api2Str = ShowAPI.show(api2) - generateApiDiff(fileName, api1Str, api2Str, contextSize) - } + /** + * Generates an unified diff between textual representations of `api1` and `api2`. + */ + def generateApiDiff(fileName: String, api1: SourceAPI, api2: SourceAPI, contextSize: Int): String = { + val api1Str = ShowAPI.show(api1) + val api2Str = ShowAPI.show(api2) + generateApiDiff(fileName, api1Str, api2Str, contextSize) + } - private def generateApiDiff(fileName: String, f1: String, f2: String, contextSize: Int): String = { - assert((diffMethod != null) && (generateUnifiedDiffMethod != null), "APIDiff isn't properly initialized.") - import scala.collection.JavaConverters._ - def asJavaList[T](it: Iterator[T]): java.util.List[T] = it.toSeq.asJava - val f1Lines = asJavaList(f1.lines) - val f2Lines = asJavaList(f2.lines) - //val diff = DiffUtils.diff(f1Lines, f2Lines) - val diff /*: Patch*/ = diffMethod.invoke(null, f1Lines, f2Lines) - val unifiedPatch: JList[String] = generateUnifiedDiffMethod.invoke(null, fileName, fileName, f1Lines, diff, - (contextSize: java.lang.Integer)).asInstanceOf[JList[String]] - unifiedPatch.asScala.mkString("\n") - } + private def generateApiDiff(fileName: String, f1: String, f2: String, contextSize: Int): String = { + assert((diffMethod != null) && (generateUnifiedDiffMethod != null), "APIDiff isn't properly initialized.") + import scala.collection.JavaConverters._ + def asJavaList[T](it: Iterator[T]): java.util.List[T] = it.toSeq.asJava + val f1Lines = asJavaList(f1.lines) + val f2Lines = asJavaList(f2.lines) + //val diff = DiffUtils.diff(f1Lines, f2Lines) + val diff /*: Patch*/ = diffMethod.invoke(null, f1Lines, f2Lines) + val unifiedPatch: JList[String] = generateUnifiedDiffMethod.invoke(null, fileName, fileName, f1Lines, diff, + (contextSize: java.lang.Integer)).asInstanceOf[JList[String]] + unifiedPatch.asScala.mkString("\n") + } } private[inc] object APIDiff { - private val diffUtilsClassName = "difflib.DiffUtils" - private val patchClassName = "difflib.Patch" - private val diffMethodName = "diff" - private val generateUnifiedDiffMethodName = "generateUnifiedDiff" + private val diffUtilsClassName = "difflib.DiffUtils" + private val patchClassName = "difflib.Patch" + private val diffMethodName = "diff" + private val generateUnifiedDiffMethodName = "generateUnifiedDiff" } diff --git a/compile/inc/src/main/scala/sbt/inc/APIs.scala b/compile/inc/src/main/scala/sbt/inc/APIs.scala index a09df5ef7..08de5b88d 100644 --- a/compile/inc/src/main/scala/sbt/inc/APIs.scala +++ b/compile/inc/src/main/scala/sbt/inc/APIs.scala @@ -11,81 +11,82 @@ import xsbti.api._internalOnly_NameHashes import scala.util.Sorting import xsbt.api.SameAPI -trait APIs -{ - /** The API for the source file `src` at the time represented by this instance. - * This method returns an empty API if the file had no API or is not known to this instance. */ - def internalAPI(src: File): Source - /** The API for the external class `ext` at the time represented by this instance. - * This method returns an empty API if the file had no API or is not known to this instance. */ - def externalAPI(ext: String): Source +trait APIs { + /** + * The API for the source file `src` at the time represented by this instance. + * This method returns an empty API if the file had no API or is not known to this instance. + */ + def internalAPI(src: File): Source + /** + * The API for the external class `ext` at the time represented by this instance. + * This method returns an empty API if the file had no API or is not known to this instance. + */ + def externalAPI(ext: String): Source - def allExternals: collection.Set[String] - def allInternalSources: collection.Set[File] + def allExternals: collection.Set[String] + def allInternalSources: collection.Set[File] - def ++ (o: APIs): APIs + def ++(o: APIs): APIs - def markInternalSource(src: File, api: Source): APIs - def markExternalAPI(ext: String, api: Source): APIs + def markInternalSource(src: File, api: Source): APIs + def markExternalAPI(ext: String, api: Source): APIs - def removeInternal(remove: Iterable[File]): APIs - def filterExt(keep: String => Boolean): APIs - @deprecated("OK to remove in 0.14", "0.13.1") - def groupBy[K](internal: (File) => K, keepExternal: Map[K, String => Boolean]): Map[K, APIs] + def removeInternal(remove: Iterable[File]): APIs + def filterExt(keep: String => Boolean): APIs + @deprecated("OK to remove in 0.14", "0.13.1") + def groupBy[K](internal: (File) => K, keepExternal: Map[K, String => Boolean]): Map[K, APIs] - def internal: Map[File, Source] - def external: Map[String, Source] + def internal: Map[File, Source] + def external: Map[String, Source] } -object APIs -{ - def apply(internal: Map[File, Source], external: Map[String, Source]): APIs = new MAPIs(internal, external) - def empty: APIs = apply(Map.empty, Map.empty) +object APIs { + def apply(internal: Map[File, Source], external: Map[String, Source]): APIs = new MAPIs(internal, external) + def empty: APIs = apply(Map.empty, Map.empty) - val emptyAPI = new xsbti.api.SourceAPI(Array(), Array()) - val emptyCompilation = new xsbti.api.Compilation(-1, Array()) - val emptyNameHashes = new xsbti.api._internalOnly_NameHashes(Array.empty, Array.empty) - val emptySource = new xsbti.api.Source(emptyCompilation, Array(), emptyAPI, 0, emptyNameHashes, false) - def getAPI[T](map: Map[T, Source], src: T): Source = map.getOrElse(src, emptySource) + val emptyAPI = new xsbti.api.SourceAPI(Array(), Array()) + val emptyCompilation = new xsbti.api.Compilation(-1, Array()) + val emptyNameHashes = new xsbti.api._internalOnly_NameHashes(Array.empty, Array.empty) + val emptySource = new xsbti.api.Source(emptyCompilation, Array(), emptyAPI, 0, emptyNameHashes, false) + def getAPI[T](map: Map[T, Source], src: T): Source = map.getOrElse(src, emptySource) } -private class MAPIs(val internal: Map[File, Source], val external: Map[String, Source]) extends APIs -{ - def allInternalSources: collection.Set[File] = internal.keySet - def allExternals: collection.Set[String] = external.keySet +private class MAPIs(val internal: Map[File, Source], val external: Map[String, Source]) extends APIs { + def allInternalSources: collection.Set[File] = internal.keySet + def allExternals: collection.Set[String] = external.keySet - def ++ (o: APIs): APIs = new MAPIs(internal ++ o.internal, external ++ o.external) + def ++(o: APIs): APIs = new MAPIs(internal ++ o.internal, external ++ o.external) - def markInternalSource(src: File, api: Source): APIs = - new MAPIs(internal.updated(src, api), external) + def markInternalSource(src: File, api: Source): APIs = + new MAPIs(internal.updated(src, api), external) - def markExternalAPI(ext: String, api: Source): APIs = - new MAPIs(internal, external.updated(ext, api)) + def markExternalAPI(ext: String, api: Source): APIs = + new MAPIs(internal, external.updated(ext, api)) - def removeInternal(remove: Iterable[File]): APIs = new MAPIs(internal -- remove, external) - def filterExt(keep: String => Boolean): APIs = new MAPIs(internal, external.filterKeys(keep)) - @deprecated("Broken implementation. OK to remove in 0.14", "0.13.1") - def groupBy[K](f: (File) => K, keepExternal: Map[K, String => Boolean]): Map[K, APIs] = - internal.groupBy(item => f(item._1)) map { group => (group._1, new MAPIs(group._2, external).filterExt(keepExternal.getOrElse(group._1, _ => false)))} + def removeInternal(remove: Iterable[File]): APIs = new MAPIs(internal -- remove, external) + def filterExt(keep: String => Boolean): APIs = new MAPIs(internal, external.filterKeys(keep)) + @deprecated("Broken implementation. OK to remove in 0.14", "0.13.1") + def groupBy[K](f: (File) => K, keepExternal: Map[K, String => Boolean]): Map[K, APIs] = + internal.groupBy(item => f(item._1)) map { group => (group._1, new MAPIs(group._2, external).filterExt(keepExternal.getOrElse(group._1, _ => false))) } - def internalAPI(src: File) = getAPI(internal, src) - def externalAPI(ext: String) = getAPI(external, ext) + def internalAPI(src: File) = getAPI(internal, src) + def externalAPI(ext: String) = getAPI(external, ext) - override def equals(other: Any): Boolean = other match { - case o: MAPIs => { - def areEqual[T](x: Map[T, Source], y: Map[T, Source])(implicit ord: math.Ordering[T]) = { - x.size == y.size && (sorted(x) zip sorted(y) forall { z => z._1._1 == z._2._1 && SameAPI(z._1._2, z._2._2)}) - } - areEqual(internal, o.internal) && areEqual(external, o.external) - } - case _ => false - } + override def equals(other: Any): Boolean = other match { + case o: MAPIs => { + def areEqual[T](x: Map[T, Source], y: Map[T, Source])(implicit ord: math.Ordering[T]) = { + x.size == y.size && (sorted(x) zip sorted(y) forall { z => z._1._1 == z._2._1 && SameAPI(z._1._2, z._2._2) }) + } + areEqual(internal, o.internal) && areEqual(external, o.external) + } + case _ => false + } - override lazy val hashCode: Int = { - def hash[T](m: Map[T, Source])(implicit ord: math.Ordering[T]) = sorted(m).map(x => (x._1, x._2.apiHash).hashCode).hashCode - (hash(internal), hash(external)).hashCode - } + override lazy val hashCode: Int = { + def hash[T](m: Map[T, Source])(implicit ord: math.Ordering[T]) = sorted(m).map(x => (x._1, x._2.apiHash).hashCode).hashCode + (hash(internal), hash(external)).hashCode + } - override def toString: String = "API(internal: %d, external: %d)".format(internal.size, external.size) + override def toString: String = "API(internal: %d, external: %d)".format(internal.size, external.size) - private[this] def sorted[T](m: Map[T, Source])(implicit ord: math.Ordering[T]): Seq[(T, Source)] = m.toSeq.sortBy(_._1) + private[this] def sorted[T](m: Map[T, Source])(implicit ord: math.Ordering[T]): Seq[(T, Source)] = m.toSeq.sortBy(_._1) } diff --git a/compile/inc/src/main/scala/sbt/inc/Analysis.scala b/compile/inc/src/main/scala/sbt/inc/Analysis.scala index c2657aa0d..29855ee71 100644 --- a/compile/inc/src/main/scala/sbt/inc/Analysis.scala +++ b/compile/inc/src/main/scala/sbt/inc/Analysis.scala @@ -7,7 +7,6 @@ package inc import xsbti.api.Source import java.io.File - /** * The merge/groupBy functionality requires understanding of the concepts of internalizing/externalizing dependencies: * @@ -23,248 +22,243 @@ import java.io.File * These transformations are complicated by the fact that internal dependencies are expressed as source file -> source file, * but external dependencies are expressed as source file -> fully-qualified class name. */ -trait Analysis -{ - val stamps: Stamps - val apis: APIs - /** Mappings between sources, classes, and binaries. */ - val relations: Relations - val infos: SourceInfos - /** - * Information about compiler runs accumulated since `clean` command has been run. - * - * The main use-case for using `compilations` field is to determine how - * many iterations it took to compilen give code. The `Compilation` object - * are also stored in `Source` objects so there's an indirect way to recover - * information about files being recompiled in every iteration. - * - * The incremental compilation algorithm doesn't use information stored in - * `compilations`. It's safe to prune contents of that field without breaking - * internal consistency of the entire Analysis object. - */ - val compilations: Compilations +trait Analysis { + val stamps: Stamps + val apis: APIs + /** Mappings between sources, classes, and binaries. */ + val relations: Relations + val infos: SourceInfos + /** + * Information about compiler runs accumulated since `clean` command has been run. + * + * The main use-case for using `compilations` field is to determine how + * many iterations it took to compilen give code. The `Compilation` object + * are also stored in `Source` objects so there's an indirect way to recover + * information about files being recompiled in every iteration. + * + * The incremental compilation algorithm doesn't use information stored in + * `compilations`. It's safe to prune contents of that field without breaking + * internal consistency of the entire Analysis object. + */ + val compilations: Compilations - /** Concatenates Analysis objects naively, i.e., doesn't internalize external deps on added files. See `Analysis.merge`. */ - def ++ (other: Analysis): Analysis + /** Concatenates Analysis objects naively, i.e., doesn't internalize external deps on added files. See `Analysis.merge`. */ + def ++(other: Analysis): Analysis - /** Drops all analysis information for `sources` naively, i.e., doesn't externalize internal deps on removed files. */ - def -- (sources: Iterable[File]): Analysis + /** Drops all analysis information for `sources` naively, i.e., doesn't externalize internal deps on removed files. */ + def --(sources: Iterable[File]): Analysis - def copy(stamps: Stamps = stamps, apis: APIs = apis, relations: Relations = relations, infos: SourceInfos = infos, - compilations: Compilations = compilations): Analysis + def copy(stamps: Stamps = stamps, apis: APIs = apis, relations: Relations = relations, infos: SourceInfos = infos, + compilations: Compilations = compilations): Analysis - def addSource(src: File, api: Source, stamp: Stamp, directInternal: Iterable[File], inheritedInternal: Iterable[File], info: SourceInfo): Analysis - def addBinaryDep(src: File, dep: File, className: String, stamp: Stamp): Analysis - def addExternalDep(src: File, dep: String, api: Source, inherited: Boolean): Analysis - def addProduct(src: File, product: File, stamp: Stamp, name: String): Analysis + def addSource(src: File, api: Source, stamp: Stamp, directInternal: Iterable[File], inheritedInternal: Iterable[File], info: SourceInfo): Analysis + def addBinaryDep(src: File, dep: File, className: String, stamp: Stamp): Analysis + def addExternalDep(src: File, dep: String, api: Source, inherited: Boolean): Analysis + def addProduct(src: File, product: File, stamp: Stamp, name: String): Analysis - /** Partitions this Analysis using the discriminator function. Externalizes internal deps that cross partitions. */ - def groupBy[K](discriminator: (File => K)): Map[K, Analysis] + /** Partitions this Analysis using the discriminator function. Externalizes internal deps that cross partitions. */ + def groupBy[K](discriminator: (File => K)): Map[K, Analysis] - override lazy val toString = Analysis.summary(this) + override lazy val toString = Analysis.summary(this) } -object Analysis -{ - lazy val Empty: Analysis = new MAnalysis(Stamps.empty, APIs.empty, Relations.empty, SourceInfos.empty, Compilations.empty) - private[sbt] def empty(nameHashing: Boolean): Analysis = new MAnalysis(Stamps.empty, APIs.empty, - Relations.empty(nameHashing = nameHashing), SourceInfos.empty, Compilations.empty) +object Analysis { + lazy val Empty: Analysis = new MAnalysis(Stamps.empty, APIs.empty, Relations.empty, SourceInfos.empty, Compilations.empty) + private[sbt] def empty(nameHashing: Boolean): Analysis = new MAnalysis(Stamps.empty, APIs.empty, + Relations.empty(nameHashing = nameHashing), SourceInfos.empty, Compilations.empty) - /** Merge multiple analysis objects into one. Deps will be internalized as needed. */ - def merge(analyses: Traversable[Analysis]): Analysis = { - if (analyses.exists(_.relations.nameHashing)) - throw new IllegalArgumentException("Merging of Analyses that have" + - "`relations.memberRefAndInheritanceDeps` set to `true` is not supported.") + /** Merge multiple analysis objects into one. Deps will be internalized as needed. */ + def merge(analyses: Traversable[Analysis]): Analysis = { + if (analyses.exists(_.relations.nameHashing)) + throw new IllegalArgumentException("Merging of Analyses that have" + + "`relations.memberRefAndInheritanceDeps` set to `true` is not supported.") - // Merge the Relations, internalizing deps as needed. - val mergedSrcProd = Relation.merge(analyses map { _.relations.srcProd }) - val mergedBinaryDep = Relation.merge(analyses map { _.relations.binaryDep }) - val mergedClasses = Relation.merge(analyses map { _.relations.classes }) + // Merge the Relations, internalizing deps as needed. + val mergedSrcProd = Relation.merge(analyses map { _.relations.srcProd }) + val mergedBinaryDep = Relation.merge(analyses map { _.relations.binaryDep }) + val mergedClasses = Relation.merge(analyses map { _.relations.classes }) - val stillInternal = Relation.merge(analyses map { _.relations.direct.internal }) - val (internalized, stillExternal) = Relation.merge(analyses map { _.relations.direct.external }) partition { case (a, b) => mergedClasses._2s.contains(b) } - val internalizedFiles = Relation.reconstruct(internalized.forwardMap mapValues { _ flatMap mergedClasses.reverse }) - val mergedInternal = stillInternal ++ internalizedFiles + val stillInternal = Relation.merge(analyses map { _.relations.direct.internal }) + val (internalized, stillExternal) = Relation.merge(analyses map { _.relations.direct.external }) partition { case (a, b) => mergedClasses._2s.contains(b) } + val internalizedFiles = Relation.reconstruct(internalized.forwardMap mapValues { _ flatMap mergedClasses.reverse }) + val mergedInternal = stillInternal ++ internalizedFiles - val stillInternalPI = Relation.merge(analyses map { _.relations.publicInherited.internal }) - val (internalizedPI, stillExternalPI) = Relation.merge(analyses map { _.relations.publicInherited.external }) partition { case (a, b) => mergedClasses._2s.contains(b) } - val internalizedFilesPI = Relation.reconstruct(internalizedPI.forwardMap mapValues { _ flatMap mergedClasses.reverse }) - val mergedInternalPI = stillInternalPI ++ internalizedFilesPI + val stillInternalPI = Relation.merge(analyses map { _.relations.publicInherited.internal }) + val (internalizedPI, stillExternalPI) = Relation.merge(analyses map { _.relations.publicInherited.external }) partition { case (a, b) => mergedClasses._2s.contains(b) } + val internalizedFilesPI = Relation.reconstruct(internalizedPI.forwardMap mapValues { _ flatMap mergedClasses.reverse }) + val mergedInternalPI = stillInternalPI ++ internalizedFilesPI - val mergedRelations = Relations.make( - mergedSrcProd, - mergedBinaryDep, - Relations.makeSource(mergedInternal, stillExternal), - Relations.makeSource(mergedInternalPI, stillExternalPI), - mergedClasses - ) + val mergedRelations = Relations.make( + mergedSrcProd, + mergedBinaryDep, + Relations.makeSource(mergedInternal, stillExternal), + Relations.makeSource(mergedInternalPI, stillExternalPI), + mergedClasses + ) - // Merge the APIs, internalizing APIs for targets of dependencies we internalized above. - val concatenatedAPIs = (APIs.empty /: (analyses map {_.apis}))(_ ++ _) - val stillInternalAPIs = concatenatedAPIs.internal - val (internalizedAPIs, stillExternalAPIs) = concatenatedAPIs.external partition { x: (String, Source) => internalized._2s.contains(x._1) } - val internalizedFilesAPIs = internalizedAPIs flatMap { - case (cls: String, source: Source) => mergedRelations.definesClass(cls) map { file: File => (file, concatenatedAPIs.internalAPI(file)) } - } - val mergedAPIs = APIs(stillInternalAPIs ++ internalizedFilesAPIs, stillExternalAPIs) + // Merge the APIs, internalizing APIs for targets of dependencies we internalized above. + val concatenatedAPIs = (APIs.empty /: (analyses map { _.apis }))(_ ++ _) + val stillInternalAPIs = concatenatedAPIs.internal + val (internalizedAPIs, stillExternalAPIs) = concatenatedAPIs.external partition { x: (String, Source) => internalized._2s.contains(x._1) } + val internalizedFilesAPIs = internalizedAPIs flatMap { + case (cls: String, source: Source) => mergedRelations.definesClass(cls) map { file: File => (file, concatenatedAPIs.internalAPI(file)) } + } + val mergedAPIs = APIs(stillInternalAPIs ++ internalizedFilesAPIs, stillExternalAPIs) - val mergedStamps = Stamps.merge(analyses map { _.stamps }) - val mergedInfos = SourceInfos.merge(analyses map { _.infos }) - val mergedCompilations = Compilations.merge(analyses map { _.compilations }) + val mergedStamps = Stamps.merge(analyses map { _.stamps }) + val mergedInfos = SourceInfos.merge(analyses map { _.infos }) + val mergedCompilations = Compilations.merge(analyses map { _.compilations }) - new MAnalysis(mergedStamps, mergedAPIs, mergedRelations, mergedInfos, mergedCompilations) - } + new MAnalysis(mergedStamps, mergedAPIs, mergedRelations, mergedInfos, mergedCompilations) + } - def summary(a: Analysis): String = - { - val (j, s) = a.apis.allInternalSources.partition(_.getName.endsWith(".java")) - val c = a.stamps.allProducts - val ext = a.apis.allExternals - val jars = a.relations.allBinaryDeps.filter(_.getName.endsWith(".jar")) - val unreportedCount = a.infos.allInfos.values.map(_.unreportedProblems.size).sum - val sections = - counted("Scala source", "", "s", s.size) ++ - counted("Java source", "", "s", j.size) ++ - counted("class", "", "es", c.size) ++ - counted("external source dependenc", "y", "ies", ext.size) ++ - counted("binary dependenc", "y", "ies", jars.size) ++ - counted("unreported warning", "", "s", unreportedCount) - sections.mkString("Analysis: ", ", ", "") - } + def summary(a: Analysis): String = + { + val (j, s) = a.apis.allInternalSources.partition(_.getName.endsWith(".java")) + val c = a.stamps.allProducts + val ext = a.apis.allExternals + val jars = a.relations.allBinaryDeps.filter(_.getName.endsWith(".jar")) + val unreportedCount = a.infos.allInfos.values.map(_.unreportedProblems.size).sum + val sections = + counted("Scala source", "", "s", s.size) ++ + counted("Java source", "", "s", j.size) ++ + counted("class", "", "es", c.size) ++ + counted("external source dependenc", "y", "ies", ext.size) ++ + counted("binary dependenc", "y", "ies", jars.size) ++ + counted("unreported warning", "", "s", unreportedCount) + sections.mkString("Analysis: ", ", ", "") + } - def counted(prefix: String, single: String, plural: String, count: Int): Option[String] = - count match - { - case 0 => None - case 1 => Some("1 " + prefix + single) - case x => Some(x.toString + " " + prefix + plural) - } + def counted(prefix: String, single: String, plural: String, count: Int): Option[String] = + count match { + case 0 => None + case 1 => Some("1 " + prefix + single) + case x => Some(x.toString + " " + prefix + plural) + } } -private class MAnalysis(val stamps: Stamps, val apis: APIs, val relations: Relations, val infos: SourceInfos, val compilations: Compilations) extends Analysis -{ - def ++ (o: Analysis): Analysis = new MAnalysis(stamps ++ o.stamps, apis ++ o.apis, relations ++ o.relations, - infos ++ o.infos, compilations ++ o.compilations) +private class MAnalysis(val stamps: Stamps, val apis: APIs, val relations: Relations, val infos: SourceInfos, val compilations: Compilations) extends Analysis { + def ++(o: Analysis): Analysis = new MAnalysis(stamps ++ o.stamps, apis ++ o.apis, relations ++ o.relations, + infos ++ o.infos, compilations ++ o.compilations) - def -- (sources: Iterable[File]): Analysis = - { - val newRelations = relations -- sources - def keep[T](f: (Relations, T) => Set[_]): T => Boolean = !f(newRelations, _).isEmpty + def --(sources: Iterable[File]): Analysis = + { + val newRelations = relations -- sources + def keep[T](f: (Relations, T) => Set[_]): T => Boolean = !f(newRelations, _).isEmpty - val newAPIs = apis.removeInternal(sources).filterExt( keep(_ usesExternal _) ) - val newStamps = stamps.filter( keep(_ produced _), sources, keep(_ usesBinary _)) - val newInfos = infos -- sources - new MAnalysis(newStamps, newAPIs, newRelations, newInfos, compilations) - } + val newAPIs = apis.removeInternal(sources).filterExt(keep(_ usesExternal _)) + val newStamps = stamps.filter(keep(_ produced _), sources, keep(_ usesBinary _)) + val newInfos = infos -- sources + new MAnalysis(newStamps, newAPIs, newRelations, newInfos, compilations) + } - def copy(stamps: Stamps, apis: APIs, relations: Relations, infos: SourceInfos, compilations: Compilations = compilations): Analysis = - new MAnalysis(stamps, apis, relations, infos, compilations) + def copy(stamps: Stamps, apis: APIs, relations: Relations, infos: SourceInfos, compilations: Compilations = compilations): Analysis = + new MAnalysis(stamps, apis, relations, infos, compilations) - def addSource(src: File, api: Source, stamp: Stamp, directInternal: Iterable[File], inheritedInternal: Iterable[File], info: SourceInfo): Analysis = - copy( stamps.markInternalSource(src, stamp), apis.markInternalSource(src, api), relations.addInternalSrcDeps(src, directInternal, inheritedInternal), infos.add(src, info) ) + def addSource(src: File, api: Source, stamp: Stamp, directInternal: Iterable[File], inheritedInternal: Iterable[File], info: SourceInfo): Analysis = + copy(stamps.markInternalSource(src, stamp), apis.markInternalSource(src, api), relations.addInternalSrcDeps(src, directInternal, inheritedInternal), infos.add(src, info)) - def addBinaryDep(src: File, dep: File, className: String, stamp: Stamp): Analysis = - copy( stamps.markBinary(dep, className, stamp), apis, relations.addBinaryDep(src, dep), infos ) + def addBinaryDep(src: File, dep: File, className: String, stamp: Stamp): Analysis = + copy(stamps.markBinary(dep, className, stamp), apis, relations.addBinaryDep(src, dep), infos) - def addExternalDep(src: File, dep: String, depAPI: Source, inherited: Boolean): Analysis = - copy( stamps, apis.markExternalAPI(dep, depAPI), relations.addExternalDep(src, dep, inherited), infos ) + def addExternalDep(src: File, dep: String, depAPI: Source, inherited: Boolean): Analysis = + copy(stamps, apis.markExternalAPI(dep, depAPI), relations.addExternalDep(src, dep, inherited), infos) - def addProduct(src: File, product: File, stamp: Stamp, name: String): Analysis = - copy( stamps.markProduct(product, stamp), apis, relations.addProduct(src, product, name), infos ) + def addProduct(src: File, product: File, stamp: Stamp, name: String): Analysis = + copy(stamps.markProduct(product, stamp), apis, relations.addProduct(src, product, name), infos) - def groupBy[K](discriminator: File => K): Map[K, Analysis] = { - if (relations.nameHashing) - throw new UnsupportedOperationException("Grouping of Analyses that have" + - "`relations.memberRefAndInheritanceDeps` set to `true` is not supported.") + def groupBy[K](discriminator: File => K): Map[K, Analysis] = { + if (relations.nameHashing) + throw new UnsupportedOperationException("Grouping of Analyses that have" + + "`relations.memberRefAndInheritanceDeps` set to `true` is not supported.") - def discriminator1(x: (File, _)) = discriminator(x._1) // Apply the discriminator to the first coordinate. + def discriminator1(x: (File, _)) = discriminator(x._1) // Apply the discriminator to the first coordinate. - val kSrcProd = relations.srcProd.groupBy(discriminator1) - val kBinaryDep = relations.binaryDep.groupBy(discriminator1) - val kClasses = relations.classes.groupBy(discriminator1) - val kSourceInfos = infos.allInfos.groupBy(discriminator1) + val kSrcProd = relations.srcProd.groupBy(discriminator1) + val kBinaryDep = relations.binaryDep.groupBy(discriminator1) + val kClasses = relations.classes.groupBy(discriminator1) + val kSourceInfos = infos.allInfos.groupBy(discriminator1) - val (kStillInternal, kExternalized) = relations.direct.internal partition { case (a, b) => discriminator(a) == discriminator(b) } match { - case (i, e) => (i.groupBy(discriminator1), e.groupBy(discriminator1)) - } - val kStillExternal = relations.direct.external.groupBy(discriminator1) + val (kStillInternal, kExternalized) = relations.direct.internal partition { case (a, b) => discriminator(a) == discriminator(b) } match { + case (i, e) => (i.groupBy(discriminator1), e.groupBy(discriminator1)) + } + val kStillExternal = relations.direct.external.groupBy(discriminator1) + // Find all possible groups. + val allMaps = kSrcProd :: kBinaryDep :: kStillInternal :: kExternalized :: kStillExternal :: kClasses :: kSourceInfos :: Nil + val allKeys: Set[K] = (Set.empty[K] /: (allMaps map { _.keySet }))(_ ++ _) - // Find all possible groups. - val allMaps = kSrcProd :: kBinaryDep :: kStillInternal :: kExternalized :: kStillExternal :: kClasses :: kSourceInfos :: Nil - val allKeys: Set[K] = (Set.empty[K] /: (allMaps map { _.keySet }))(_ ++ _) + // Map from file to a single representative class defined in that file. + // This is correct (for now): currently all classes in an external dep share the same Source object, + // and a change to any of them will act like a change to all of them. + // We don't use all the top-level classes in source.api.definitions, even though that's more intuitively + // correct, because this can cause huge bloat of the analysis file. + def getRepresentativeClass(file: File): Option[String] = apis.internalAPI(file).api.definitions.headOption map { _.name } - // Map from file to a single representative class defined in that file. - // This is correct (for now): currently all classes in an external dep share the same Source object, - // and a change to any of them will act like a change to all of them. - // We don't use all the top-level classes in source.api.definitions, even though that's more intuitively - // correct, because this can cause huge bloat of the analysis file. - def getRepresentativeClass(file: File): Option[String] = apis.internalAPI(file).api.definitions.headOption map { _.name } + // Create an Analysis for each group. + (for (k <- allKeys) yield { + def getFrom[A, B](m: Map[K, Relation[A, B]]): Relation[A, B] = m.getOrElse(k, Relation.empty) - // Create an Analysis for each group. - (for (k <- allKeys) yield { - def getFrom[A, B](m: Map[K, Relation[A, B]]): Relation[A, B] = m.getOrElse(k, Relation.empty) + // Products and binary deps. + val srcProd = getFrom(kSrcProd) + val binaryDep = getFrom(kBinaryDep) - // Products and binary deps. - val srcProd = getFrom(kSrcProd) - val binaryDep = getFrom(kBinaryDep) + // Direct Sources. + val stillInternal = getFrom(kStillInternal) + val stillExternal = getFrom(kStillExternal) + val externalized = getFrom(kExternalized) + val externalizedClasses = Relation.reconstruct(externalized.forwardMap mapValues { _ flatMap getRepresentativeClass }) + val newExternal = stillExternal ++ externalizedClasses - // Direct Sources. - val stillInternal = getFrom(kStillInternal) - val stillExternal = getFrom(kStillExternal) - val externalized = getFrom(kExternalized) - val externalizedClasses = Relation.reconstruct(externalized.forwardMap mapValues { _ flatMap getRepresentativeClass }) - val newExternal = stillExternal ++ externalizedClasses + // Public inherited sources. + val stillInternalPI = stillInternal filter relations.publicInherited.internal.contains + val stillExternalPI = stillExternal filter relations.publicInherited.external.contains + val externalizedPI = externalized filter relations.publicInherited.internal.contains + val externalizedClassesPI = Relation.reconstruct(externalizedPI.forwardMap mapValues { _ flatMap getRepresentativeClass }) + val newExternalPI = stillExternalPI ++ externalizedClassesPI - // Public inherited sources. - val stillInternalPI = stillInternal filter relations.publicInherited.internal.contains - val stillExternalPI = stillExternal filter relations.publicInherited.external.contains - val externalizedPI = externalized filter relations.publicInherited.internal.contains - val externalizedClassesPI = Relation.reconstruct(externalizedPI.forwardMap mapValues { _ flatMap getRepresentativeClass }) - val newExternalPI = stillExternalPI ++ externalizedClassesPI + // Class names. + val classes = getFrom(kClasses) - // Class names. - val classes = getFrom(kClasses) + // Create new relations for this group. + val newRelations = Relations.make( + srcProd, + binaryDep, + Relations.makeSource(stillInternal, newExternal), + Relations.makeSource(stillInternalPI, newExternalPI), + classes + ) - // Create new relations for this group. - val newRelations = Relations.make( - srcProd, - binaryDep, - Relations.makeSource(stillInternal, newExternal), - Relations.makeSource(stillInternalPI, newExternalPI), - classes - ) + // Compute new API mappings. + def apisFor[T](m: Map[T, Source], x: Traversable[T]): Map[T, Source] = + (x map { e: T => (e, m.get(e)) } collect { case (t, Some(source)) => (t, source) }).toMap + val stillInternalAPIs = apisFor(apis.internal, srcProd._1s) + val stillExternalAPIs = apisFor(apis.external, stillExternal._2s) + val externalizedAPIs = apisFor(apis.internal, externalized._2s) + val externalizedClassesAPIs = externalizedAPIs flatMap { + case (file: File, source: Source) => getRepresentativeClass(file) map { cls: String => (cls, source) } + } + val newAPIs = APIs(stillInternalAPIs, stillExternalAPIs ++ externalizedClassesAPIs) - // Compute new API mappings. - def apisFor[T](m: Map[T, Source], x: Traversable[T]): Map[T, Source] = - (x map { e: T => (e, m.get(e)) } collect { case (t, Some(source)) => (t, source)}).toMap - val stillInternalAPIs = apisFor(apis.internal, srcProd._1s) - val stillExternalAPIs = apisFor(apis.external, stillExternal._2s) - val externalizedAPIs = apisFor(apis.internal, externalized._2s) - val externalizedClassesAPIs = externalizedAPIs flatMap { - case (file: File, source: Source) => getRepresentativeClass(file) map { cls: String => (cls, source) } - } - val newAPIs = APIs(stillInternalAPIs, stillExternalAPIs ++ externalizedClassesAPIs) + // New stamps. + val newStamps = Stamps( + stamps.products.filterKeys(srcProd._2s.contains), + stamps.sources.filterKeys({ discriminator(_) == k }), + stamps.binaries.filterKeys(binaryDep._2s.contains), + stamps.classNames.filterKeys(binaryDep._2s.contains)) - // New stamps. - val newStamps = Stamps( - stamps.products.filterKeys(srcProd._2s.contains), - stamps.sources.filterKeys({ discriminator(_) == k }), - stamps.binaries.filterKeys(binaryDep._2s.contains), - stamps.classNames.filterKeys(binaryDep._2s.contains)) + // New infos. + val newSourceInfos = SourceInfos.make(kSourceInfos.getOrElse(k, Map.empty)) - // New infos. - val newSourceInfos = SourceInfos.make(kSourceInfos.getOrElse(k, Map.empty)) + (k, new MAnalysis(newStamps, newAPIs, newRelations, newSourceInfos, compilations)) + }).toMap + } - (k, new MAnalysis(newStamps, newAPIs, newRelations, newSourceInfos, compilations)) - }).toMap - } + override def equals(other: Any) = other match { + // Note: Equality doesn't consider source infos or compilations. + case o: MAnalysis => stamps == o.stamps && apis == o.apis && relations == o.relations + case _ => false + } - override def equals(other: Any) = other match { - // Note: Equality doesn't consider source infos or compilations. - case o: MAnalysis => stamps == o.stamps && apis == o.apis && relations == o.relations - case _ => false - } - - override lazy val hashCode = (stamps :: apis :: relations :: Nil).hashCode + override lazy val hashCode = (stamps :: apis :: relations :: Nil).hashCode } diff --git a/compile/inc/src/main/scala/sbt/inc/AnalysisStore.scala b/compile/inc/src/main/scala/sbt/inc/AnalysisStore.scala index f8dfe6f6b..b505e0ee3 100644 --- a/compile/inc/src/main/scala/sbt/inc/AnalysisStore.scala +++ b/compile/inc/src/main/scala/sbt/inc/AnalysisStore.scala @@ -4,30 +4,27 @@ package sbt package inc -trait AnalysisStore -{ - def set(analysis: Analysis, setup: CompileSetup): Unit - def get(): Option[(Analysis, CompileSetup)] +trait AnalysisStore { + def set(analysis: Analysis, setup: CompileSetup): Unit + def get(): Option[(Analysis, CompileSetup)] } -object AnalysisStore -{ - def cached(backing: AnalysisStore): AnalysisStore = new AnalysisStore { - private var last: Option[(Analysis, CompileSetup)] = None - def set(analysis: Analysis, setup: CompileSetup) - { - backing.set(analysis, setup) - last = Some( (analysis, setup) ) - } - def get(): Option[(Analysis, CompileSetup)] = - { - if(last.isEmpty) - last = backing.get() - last - } - } - def sync(backing: AnalysisStore): AnalysisStore = new AnalysisStore { - def set(analysis: Analysis, setup: CompileSetup): Unit = synchronized { backing.set(analysis, setup) } - def get(): Option[(Analysis, CompileSetup)] = synchronized { backing.get() } - } +object AnalysisStore { + def cached(backing: AnalysisStore): AnalysisStore = new AnalysisStore { + private var last: Option[(Analysis, CompileSetup)] = None + def set(analysis: Analysis, setup: CompileSetup) { + backing.set(analysis, setup) + last = Some((analysis, setup)) + } + def get(): Option[(Analysis, CompileSetup)] = + { + if (last.isEmpty) + last = backing.get() + last + } + } + def sync(backing: AnalysisStore): AnalysisStore = new AnalysisStore { + def set(analysis: Analysis, setup: CompileSetup): Unit = synchronized { backing.set(analysis, setup) } + def get(): Option[(Analysis, CompileSetup)] = synchronized { backing.get() } + } } \ No newline at end of file diff --git a/compile/inc/src/main/scala/sbt/inc/Changes.scala b/compile/inc/src/main/scala/sbt/inc/Changes.scala index 94bb1ec18..8e21f0614 100644 --- a/compile/inc/src/main/scala/sbt/inc/Changes.scala +++ b/compile/inc/src/main/scala/sbt/inc/Changes.scala @@ -6,14 +6,13 @@ package inc import xsbt.api.NameChanges import java.io.File -import xsbti.api.{_internalOnly_NameHashes => NameHashes} -import xsbti.api.{_internalOnly_NameHash => NameHash} +import xsbti.api.{ _internalOnly_NameHashes => NameHashes } +import xsbti.api.{ _internalOnly_NameHash => NameHash } final case class InitialChanges(internalSrc: Changes[File], removedProducts: Set[File], binaryDeps: Set[File], external: APIChanges[String]) -final class APIChanges[T](val apiChanges: Iterable[APIChange[T]]) -{ - override def toString = "API Changes: " + apiChanges - def allModified: Iterable[T] = apiChanges.map(_.modified) +final class APIChanges[T](val apiChanges: Iterable[APIChange[T]]) { + override def toString = "API Changes: " + apiChanges + def allModified: Iterable[T] = apiChanges.map(_.modified) } sealed abstract class APIChange[T](val modified: T) @@ -40,28 +39,26 @@ final case class NamesChange[T](modified0: T, modifiedNames: ModifiedNames) exte * due to difficulty of reasoning about the implicit scope. */ final case class ModifiedNames(regularNames: Set[String], implicitNames: Set[String]) { - override def toString: String = - s"ModifiedNames(regularNames = ${regularNames mkString ", "}, implicitNames = ${implicitNames mkString ", "})" + override def toString: String = + s"ModifiedNames(regularNames = ${regularNames mkString ", "}, implicitNames = ${implicitNames mkString ", "})" } object ModifiedNames { - def compareTwoNameHashes(a: NameHashes, b: NameHashes): ModifiedNames = { - val modifiedRegularNames = calculateModifiedNames(a.regularMembers.toSet, b.regularMembers.toSet) - val modifiedImplicitNames = calculateModifiedNames(a.implicitMembers.toSet, b.implicitMembers.toSet) - ModifiedNames(modifiedRegularNames, modifiedImplicitNames) - } - private def calculateModifiedNames(xs: Set[NameHash], ys: Set[NameHash]): Set[String] = { - val differentNameHashes = (xs union ys) diff (xs intersect ys) - differentNameHashes.map(_.name) - } + def compareTwoNameHashes(a: NameHashes, b: NameHashes): ModifiedNames = { + val modifiedRegularNames = calculateModifiedNames(a.regularMembers.toSet, b.regularMembers.toSet) + val modifiedImplicitNames = calculateModifiedNames(a.implicitMembers.toSet, b.implicitMembers.toSet) + ModifiedNames(modifiedRegularNames, modifiedImplicitNames) + } + private def calculateModifiedNames(xs: Set[NameHash], ys: Set[NameHash]): Set[String] = { + val differentNameHashes = (xs union ys) diff (xs intersect ys) + differentNameHashes.map(_.name) + } } - -trait Changes[A] -{ - def added: Set[A] - def removed: Set[A] - def changed: Set[A] - def unmodified: Set[A] +trait Changes[A] { + def added: Set[A] + def removed: Set[A] + def changed: Set[A] + def unmodified: Set[A] } sealed abstract class Change(val file: File) diff --git a/compile/inc/src/main/scala/sbt/inc/ClassfileManager.scala b/compile/inc/src/main/scala/sbt/inc/ClassfileManager.scala index e4a449a4e..6d94295e3 100644 --- a/compile/inc/src/main/scala/sbt/inc/ClassfileManager.scala +++ b/compile/inc/src/main/scala/sbt/inc/ClassfileManager.scala @@ -1,81 +1,79 @@ package sbt.inc - import sbt.IO - import java.io.File - import collection.mutable +import sbt.IO +import java.io.File +import collection.mutable -/** During an incremental compilation run, a ClassfileManager deletes class files and is notified of generated class files. -* A ClassfileManager can be used only once.*/ -trait ClassfileManager -{ - /** Called once per compilation step with the class files to delete prior to that step's compilation. - * The files in `classes` must not exist if this method returns normally. - * Any empty ancestor directories of deleted files must not exist either.*/ - def delete(classes: Iterable[File]): Unit +/** + * During an incremental compilation run, a ClassfileManager deletes class files and is notified of generated class files. + * A ClassfileManager can be used only once. + */ +trait ClassfileManager { + /** + * Called once per compilation step with the class files to delete prior to that step's compilation. + * The files in `classes` must not exist if this method returns normally. + * Any empty ancestor directories of deleted files must not exist either. + */ + def delete(classes: Iterable[File]): Unit - /** Called once per compilation step with the class files generated during that step.*/ - def generated(classes: Iterable[File]): Unit + /** Called once per compilation step with the class files generated during that step.*/ + def generated(classes: Iterable[File]): Unit - /** Called once at the end of the whole compilation run, with `success` indicating whether compilation succeeded (true) or not (false).*/ - def complete(success: Boolean): Unit + /** Called once at the end of the whole compilation run, with `success` indicating whether compilation succeeded (true) or not (false).*/ + def complete(success: Boolean): Unit } -object ClassfileManager -{ - /** Constructs a minimal ClassfileManager implementation that immediately deletes class files when requested. */ - val deleteImmediately: () => ClassfileManager = () => new ClassfileManager - { - def delete(classes: Iterable[File]): Unit = IO.deleteFilesEmptyDirs(classes) - def generated(classes: Iterable[File]) {} - def complete(success: Boolean) {} - } - @deprecated("Use overloaded variant that takes additional logger argument, instead.", "0.13.5") - def transactional(tempDir0: File): () => ClassfileManager = - transactional(tempDir0, sbt.Logger.Null) - /** When compilation fails, this ClassfileManager restores class files to the way they were before compilation.*/ - def transactional(tempDir0: File, logger: sbt.Logger): () => ClassfileManager = () => new ClassfileManager - { - val tempDir = tempDir0.getCanonicalFile - IO.delete(tempDir) - IO.createDirectory(tempDir) - logger.debug(s"Created transactional ClassfileManager with tempDir = $tempDir") +object ClassfileManager { + /** Constructs a minimal ClassfileManager implementation that immediately deletes class files when requested. */ + val deleteImmediately: () => ClassfileManager = () => new ClassfileManager { + def delete(classes: Iterable[File]): Unit = IO.deleteFilesEmptyDirs(classes) + def generated(classes: Iterable[File]) {} + def complete(success: Boolean) {} + } + @deprecated("Use overloaded variant that takes additional logger argument, instead.", "0.13.5") + def transactional(tempDir0: File): () => ClassfileManager = + transactional(tempDir0, sbt.Logger.Null) + /** When compilation fails, this ClassfileManager restores class files to the way they were before compilation.*/ + def transactional(tempDir0: File, logger: sbt.Logger): () => ClassfileManager = () => new ClassfileManager { + val tempDir = tempDir0.getCanonicalFile + IO.delete(tempDir) + IO.createDirectory(tempDir) + logger.debug(s"Created transactional ClassfileManager with tempDir = $tempDir") - private[this] val generatedClasses = new mutable.HashSet[File] - private[this] val movedClasses = new mutable.HashMap[File, File] + private[this] val generatedClasses = new mutable.HashSet[File] + private[this] val movedClasses = new mutable.HashMap[File, File] - private def showFiles(files: Iterable[File]): String = files.map(f => s"\t$f").mkString("\n") - def delete(classes: Iterable[File]) - { - logger.debug(s"About to delete class files:\n${showFiles(classes)}") - val toBeBackedUp = classes.filter(c => c.exists && !movedClasses.contains(c) && !generatedClasses(c)) - logger.debug(s"We backup classs files:\n${showFiles(toBeBackedUp)}") - for(c <- toBeBackedUp) { - movedClasses.put(c, move(c)) - } - IO.deleteFilesEmptyDirs(classes) - } - def generated(classes: Iterable[File]): Unit = { - logger.debug(s"Registering generated classes:\n${showFiles(classes)}") - generatedClasses ++= classes - } - def complete(success: Boolean) - { - if(!success) { - logger.debug("Rolling back changes to class files.") - logger.debug(s"Removing generated classes:\n${showFiles(generatedClasses)}") - IO.deleteFilesEmptyDirs(generatedClasses) - logger.debug(s"Restoring class files: \n${showFiles(movedClasses.map(_._1))}") - for( (orig, tmp) <- movedClasses ) IO.move(tmp, orig) - } - logger.debug(s"Removing the temporary directory used for backing up class files: $tempDir") - IO.delete(tempDir) - } + private def showFiles(files: Iterable[File]): String = files.map(f => s"\t$f").mkString("\n") + def delete(classes: Iterable[File]) { + logger.debug(s"About to delete class files:\n${showFiles(classes)}") + val toBeBackedUp = classes.filter(c => c.exists && !movedClasses.contains(c) && !generatedClasses(c)) + logger.debug(s"We backup classs files:\n${showFiles(toBeBackedUp)}") + for (c <- toBeBackedUp) { + movedClasses.put(c, move(c)) + } + IO.deleteFilesEmptyDirs(classes) + } + def generated(classes: Iterable[File]): Unit = { + logger.debug(s"Registering generated classes:\n${showFiles(classes)}") + generatedClasses ++= classes + } + def complete(success: Boolean) { + if (!success) { + logger.debug("Rolling back changes to class files.") + logger.debug(s"Removing generated classes:\n${showFiles(generatedClasses)}") + IO.deleteFilesEmptyDirs(generatedClasses) + logger.debug(s"Restoring class files: \n${showFiles(movedClasses.map(_._1))}") + for ((orig, tmp) <- movedClasses) IO.move(tmp, orig) + } + logger.debug(s"Removing the temporary directory used for backing up class files: $tempDir") + IO.delete(tempDir) + } - def move(c: File): File = - { - val target = File.createTempFile("sbt", ".class", tempDir) - IO.move(c, target) - target - } - } + def move(c: File): File = + { + val target = File.createTempFile("sbt", ".class", tempDir) + IO.move(c, target) + target + } + } } \ No newline at end of file diff --git a/compile/inc/src/main/scala/sbt/inc/Compilations.scala b/compile/inc/src/main/scala/sbt/inc/Compilations.scala index 038984429..2f6c2f003 100644 --- a/compile/inc/src/main/scala/sbt/inc/Compilations.scala +++ b/compile/inc/src/main/scala/sbt/inc/Compilations.scala @@ -12,7 +12,7 @@ trait Compilations { object Compilations { val empty: Compilations = new MCompilations(Seq.empty) def make(s: Seq[Compilation]): Compilations = new MCompilations(s) - def merge(s: Traversable[Compilations]): Compilations = make((s flatMap { _.allCompilations }).toSeq.distinct) + def merge(s: Traversable[Compilations]): Compilations = make((s flatMap { _.allCompilations }).toSeq.distinct) } private final class MCompilations(val allCompilations: Seq[Compilation]) extends Compilations { diff --git a/compile/inc/src/main/scala/sbt/inc/Compile.scala b/compile/inc/src/main/scala/sbt/inc/Compile.scala index 95d9c31b1..f9b43edfa 100644 --- a/compile/inc/src/main/scala/sbt/inc/Compile.scala +++ b/compile/inc/src/main/scala/sbt/inc/Compile.scala @@ -4,194 +4,192 @@ package sbt package inc -import xsbti.api.{Source, SourceAPI, Compilation, OutputSetting, _internalOnly_NameHashes} -import xsbti.compile.{DependencyChanges, Output, SingleOutput, MultipleOutput} -import xsbti.{Position,Problem,Severity} -import Logger.{m2o, problem} +import xsbti.api.{ Source, SourceAPI, Compilation, OutputSetting, _internalOnly_NameHashes } +import xsbti.compile.{ DependencyChanges, Output, SingleOutput, MultipleOutput } +import xsbti.{ Position, Problem, Severity } +import Logger.{ m2o, problem } import java.io.File import xsbti.api.Definition -object IncrementalCompile -{ - def apply(sources: Set[File], entry: String => Option[File], - compile: (Set[File], DependencyChanges, xsbti.AnalysisCallback) => Unit, - previous: Analysis, - forEntry: File => Option[Analysis], - output: Output, log: Logger, - options: IncOptions): (Boolean, Analysis) = - { - val current = Stamps.initial(Stamp.lastModified, Stamp.hash, Stamp.lastModified) - val internalMap = (f: File) => previous.relations.produced(f).headOption - val externalAPI = getExternalAPI(entry, forEntry) - try { - Incremental.compile(sources, entry, previous, current, forEntry, doCompile(compile, internalMap, externalAPI, current, output, options), log, options) - } catch { - case e: xsbti.CompileCancelled => - log.info("Compilation has been cancelled") - // in case compilation got cancelled potential partial compilation results (e.g. produced classs files) got rolled back - // and we can report back as there was no change (false) and return a previous Analysis which is still up-to-date - (false, previous) - } - } - def doCompile(compile: (Set[File], DependencyChanges, xsbti.AnalysisCallback) => Unit, internalMap: File => Option[File], externalAPI: (File, String) => Option[Source], current: ReadStamps, output: Output, options: IncOptions) = - (srcs: Set[File], changes: DependencyChanges) => { - val callback = new AnalysisCallback(internalMap, externalAPI, current, output, options) - compile(srcs, changes, callback) - callback.get - } - def getExternalAPI(entry: String => Option[File], forEntry: File => Option[Analysis]): (File, String) => Option[Source] = - (file: File,className: String) => - entry(className) flatMap { defines => - if(file != Locate.resolve(defines, className) ) - None - else - forEntry(defines) flatMap { analysis => - analysis.relations.definesClass(className).headOption flatMap { src => - analysis.apis.internal get src - } - } - } +object IncrementalCompile { + def apply(sources: Set[File], entry: String => Option[File], + compile: (Set[File], DependencyChanges, xsbti.AnalysisCallback) => Unit, + previous: Analysis, + forEntry: File => Option[Analysis], + output: Output, log: Logger, + options: IncOptions): (Boolean, Analysis) = + { + val current = Stamps.initial(Stamp.lastModified, Stamp.hash, Stamp.lastModified) + val internalMap = (f: File) => previous.relations.produced(f).headOption + val externalAPI = getExternalAPI(entry, forEntry) + try { + Incremental.compile(sources, entry, previous, current, forEntry, doCompile(compile, internalMap, externalAPI, current, output, options), log, options) + } catch { + case e: xsbti.CompileCancelled => + log.info("Compilation has been cancelled") + // in case compilation got cancelled potential partial compilation results (e.g. produced classs files) got rolled back + // and we can report back as there was no change (false) and return a previous Analysis which is still up-to-date + (false, previous) + } + } + def doCompile(compile: (Set[File], DependencyChanges, xsbti.AnalysisCallback) => Unit, internalMap: File => Option[File], externalAPI: (File, String) => Option[Source], current: ReadStamps, output: Output, options: IncOptions) = + (srcs: Set[File], changes: DependencyChanges) => { + val callback = new AnalysisCallback(internalMap, externalAPI, current, output, options) + compile(srcs, changes, callback) + callback.get + } + def getExternalAPI(entry: String => Option[File], forEntry: File => Option[Analysis]): (File, String) => Option[Source] = + (file: File, className: String) => + entry(className) flatMap { defines => + if (file != Locate.resolve(defines, className)) + None + else + forEntry(defines) flatMap { analysis => + analysis.relations.definesClass(className).headOption flatMap { src => + analysis.apis.internal get src + } + } + } } -private final class AnalysisCallback(internalMap: File => Option[File], externalAPI: (File, String) => Option[Source], current: ReadStamps, output: Output, options: IncOptions) extends xsbti.AnalysisCallback -{ - val compilation = { - val outputSettings = output match { - case single: SingleOutput => Array(new OutputSetting("/", single.outputDirectory.getAbsolutePath)) - case multi: MultipleOutput => - multi.outputGroups.map(out => new OutputSetting(out.sourceDirectory.getAbsolutePath, out.outputDirectory.getAbsolutePath)).toArray - } - new Compilation(System.currentTimeMillis, outputSettings) - } +private final class AnalysisCallback(internalMap: File => Option[File], externalAPI: (File, String) => Option[Source], current: ReadStamps, output: Output, options: IncOptions) extends xsbti.AnalysisCallback { + val compilation = { + val outputSettings = output match { + case single: SingleOutput => Array(new OutputSetting("/", single.outputDirectory.getAbsolutePath)) + case multi: MultipleOutput => + multi.outputGroups.map(out => new OutputSetting(out.sourceDirectory.getAbsolutePath, out.outputDirectory.getAbsolutePath)).toArray + } + new Compilation(System.currentTimeMillis, outputSettings) + } - override def toString = ( List("APIs", "Binary deps", "Products", "Source deps") zip List(apis, binaryDeps, classes, sourceDeps)).map { case (label, map) => label + "\n\t" + map.mkString("\n\t") }.mkString("\n") + override def toString = (List("APIs", "Binary deps", "Products", "Source deps") zip List(apis, binaryDeps, classes, sourceDeps)).map { case (label, map) => label + "\n\t" + map.mkString("\n\t") }.mkString("\n") - import collection.mutable.{HashMap, HashSet, ListBuffer, Map, Set} + import collection.mutable.{ HashMap, HashSet, ListBuffer, Map, Set } - private[this] val apis = new HashMap[File, (Int, SourceAPI)] - private[this] val usedNames = new HashMap[File, Set[String]] - private[this] val publicNameHashes = new HashMap[File, _internalOnly_NameHashes] - private[this] val unreporteds = new HashMap[File, ListBuffer[Problem]] - private[this] val reporteds = new HashMap[File, ListBuffer[Problem]] - private[this] val binaryDeps = new HashMap[File, Set[File]] - // source file to set of generated (class file, class name) - private[this] val classes = new HashMap[File, Set[(File, String)]] - // generated class file to its source file - private[this] val classToSource = new HashMap[File, File] - // all internal source depenencies, including direct and inherited - private[this] val sourceDeps = new HashMap[File, Set[File]] - // inherited internal source dependencies - private[this] val inheritedSourceDeps = new HashMap[File, Set[File]] - // external source dependencies: - // (internal source, external source depended on, API of external dependency, true if an inheritance dependency) - private[this] val extSrcDeps = new ListBuffer[(File, String, Source, Boolean)] - private[this] val binaryClassName = new HashMap[File, String] - // source files containing a macro def. - private[this] val macroSources = Set[File]() + private[this] val apis = new HashMap[File, (Int, SourceAPI)] + private[this] val usedNames = new HashMap[File, Set[String]] + private[this] val publicNameHashes = new HashMap[File, _internalOnly_NameHashes] + private[this] val unreporteds = new HashMap[File, ListBuffer[Problem]] + private[this] val reporteds = new HashMap[File, ListBuffer[Problem]] + private[this] val binaryDeps = new HashMap[File, Set[File]] + // source file to set of generated (class file, class name) + private[this] val classes = new HashMap[File, Set[(File, String)]] + // generated class file to its source file + private[this] val classToSource = new HashMap[File, File] + // all internal source depenencies, including direct and inherited + private[this] val sourceDeps = new HashMap[File, Set[File]] + // inherited internal source dependencies + private[this] val inheritedSourceDeps = new HashMap[File, Set[File]] + // external source dependencies: + // (internal source, external source depended on, API of external dependency, true if an inheritance dependency) + private[this] val extSrcDeps = new ListBuffer[(File, String, Source, Boolean)] + private[this] val binaryClassName = new HashMap[File, String] + // source files containing a macro def. + private[this] val macroSources = Set[File]() - private def add[A,B](map: Map[A,Set[B]], a: A, b: B): Unit = - map.getOrElseUpdate(a, new HashSet[B]) += b + private def add[A, B](map: Map[A, Set[B]], a: A, b: B): Unit = + map.getOrElseUpdate(a, new HashSet[B]) += b - def problem(category: String, pos: Position, msg: String, severity: Severity, reported: Boolean): Unit = - { - for(source <- m2o(pos.sourceFile)) { - val map = if(reported) reporteds else unreporteds - map.getOrElseUpdate(source, ListBuffer.empty) += Logger.problem(category, pos, msg, severity) - } - } + def problem(category: String, pos: Position, msg: String, severity: Severity, reported: Boolean): Unit = + { + for (source <- m2o(pos.sourceFile)) { + val map = if (reported) reporteds else unreporteds + map.getOrElseUpdate(source, ListBuffer.empty) += Logger.problem(category, pos, msg, severity) + } + } - def sourceDependency(dependsOn: File, source: File, inherited: Boolean) = - { - add(sourceDeps, source, dependsOn) - if(inherited) add(inheritedSourceDeps, source, dependsOn) - } - def externalBinaryDependency(binary: File, className: String, source: File, inherited: Boolean) - { - binaryClassName.put(binary, className) - add(binaryDeps, source, binary) - } - def externalSourceDependency(t4: (File, String, Source, Boolean)) = extSrcDeps += t4 + def sourceDependency(dependsOn: File, source: File, inherited: Boolean) = + { + add(sourceDeps, source, dependsOn) + if (inherited) add(inheritedSourceDeps, source, dependsOn) + } + def externalBinaryDependency(binary: File, className: String, source: File, inherited: Boolean) { + binaryClassName.put(binary, className) + add(binaryDeps, source, binary) + } + def externalSourceDependency(t4: (File, String, Source, Boolean)) = extSrcDeps += t4 - def binaryDependency(classFile: File, name: String, source: File, inherited: Boolean) = - internalMap(classFile) match - { - case Some(dependsOn) => - // dependency is a product of a source not included in this compilation - sourceDependency(dependsOn, source, inherited) - case None => - classToSource.get(classFile) match - { - case Some(dependsOn) => - // dependency is a product of a source in this compilation step, - // but not in the same compiler run (as in javac v. scalac) - sourceDependency(dependsOn, source, inherited) - case None => - externalDependency(classFile, name, source, inherited) - } - } + def binaryDependency(classFile: File, name: String, source: File, inherited: Boolean) = + internalMap(classFile) match { + case Some(dependsOn) => + // dependency is a product of a source not included in this compilation + sourceDependency(dependsOn, source, inherited) + case None => + classToSource.get(classFile) match { + case Some(dependsOn) => + // dependency is a product of a source in this compilation step, + // but not in the same compiler run (as in javac v. scalac) + sourceDependency(dependsOn, source, inherited) + case None => + externalDependency(classFile, name, source, inherited) + } + } - private[this] def externalDependency(classFile: File, name: String, source: File, inherited: Boolean): Unit = - externalAPI(classFile, name) match - { - case Some(api) => - // dependency is a product of a source in another project - externalSourceDependency( (source, name, api, inherited) ) - case None => - // dependency is some other binary on the classpath - externalBinaryDependency(classFile, name, source, inherited) - } + private[this] def externalDependency(classFile: File, name: String, source: File, inherited: Boolean): Unit = + externalAPI(classFile, name) match { + case Some(api) => + // dependency is a product of a source in another project + externalSourceDependency((source, name, api, inherited)) + case None => + // dependency is some other binary on the classpath + externalBinaryDependency(classFile, name, source, inherited) + } - def generatedClass(source: File, module: File, name: String) = - { - add(classes, source, (module, name)) - classToSource.put(module, source) - } + def generatedClass(source: File, module: File, name: String) = + { + add(classes, source, (module, name)) + classToSource.put(module, source) + } - // empty value used when name hashing algorithm is disabled - private val emptyNameHashes = new xsbti.api._internalOnly_NameHashes(Array.empty, Array.empty) + // empty value used when name hashing algorithm is disabled + private val emptyNameHashes = new xsbti.api._internalOnly_NameHashes(Array.empty, Array.empty) - def api(sourceFile: File, source: SourceAPI) { - import xsbt.api.{APIUtil, HashAPI} - if (APIUtil.isScalaSourceName(sourceFile.getName) && APIUtil.hasMacro(source)) macroSources += sourceFile - publicNameHashes(sourceFile) = { - if (nameHashing) - (new xsbt.api.NameHashing).nameHashes(source) - else - emptyNameHashes - } - val shouldMinimize = !Incremental.apiDebug(options) - val savedSource = if (shouldMinimize) APIUtil.minimize(source) else source - apis(sourceFile) = (HashAPI(source), savedSource) - } + def api(sourceFile: File, source: SourceAPI) { + import xsbt.api.{ APIUtil, HashAPI } + if (APIUtil.isScalaSourceName(sourceFile.getName) && APIUtil.hasMacro(source)) macroSources += sourceFile + publicNameHashes(sourceFile) = { + if (nameHashing) + (new xsbt.api.NameHashing).nameHashes(source) + else + emptyNameHashes + } + val shouldMinimize = !Incremental.apiDebug(options) + val savedSource = if (shouldMinimize) APIUtil.minimize(source) else source + apis(sourceFile) = (HashAPI(source), savedSource) + } - def usedName(sourceFile: File, name: String) = add(usedNames, sourceFile, name) + def usedName(sourceFile: File, name: String) = add(usedNames, sourceFile, name) - def nameHashing: Boolean = options.nameHashing + def nameHashing: Boolean = options.nameHashing - def get: Analysis = addUsedNames( addCompilation( addExternals( addBinaries( addProducts( addSources(Analysis.empty(nameHashing = nameHashing)) ) ) ) ) ) - def addProducts(base: Analysis): Analysis = addAll(base, classes) { case (a, src, (prod, name)) => a.addProduct(src, prod, current product prod, name ) } - def addBinaries(base: Analysis): Analysis = addAll(base, binaryDeps)( (a, src, bin) => a.addBinaryDep(src, bin, binaryClassName(bin), current binary bin) ) - def addSources(base: Analysis): Analysis = - (base /: apis) { case (a, (src, api) ) => - val stamp = current.internalSource(src) - val hash = stamp match { case h: Hash => h.value; case _ => new Array[Byte](0) } - // TODO store this in Relations, rather than Source. - val hasMacro: Boolean = macroSources.contains(src) - val s = new xsbti.api.Source(compilation, hash, api._2, api._1, publicNameHashes(src), hasMacro) - val info = SourceInfos.makeInfo(getOrNil(reporteds, src), getOrNil(unreporteds, src)) - val direct = sourceDeps.getOrElse(src, Nil: Iterable[File]) - val publicInherited = inheritedSourceDeps.getOrElse(src, Nil: Iterable[File]) - a.addSource(src, s, stamp, direct, publicInherited, info) - } - def getOrNil[A,B](m: collection.Map[A, Seq[B]], a: A): Seq[B] = m.get(a).toList.flatten - def addExternals(base: Analysis): Analysis = (base /: extSrcDeps) { case (a, (source, name, api, inherited)) => a.addExternalDep(source, name, api, inherited) } - def addCompilation(base: Analysis): Analysis = base.copy(compilations = base.compilations.add(compilation)) - def addUsedNames(base: Analysis): Analysis = (base /: usedNames) { case (a, (src, names)) => - (a /: names) { case (a, name) => a.copy(relations = a.relations.addUsedName(src, name)) } - } + def get: Analysis = addUsedNames(addCompilation(addExternals(addBinaries(addProducts(addSources(Analysis.empty(nameHashing = nameHashing))))))) + def addProducts(base: Analysis): Analysis = addAll(base, classes) { case (a, src, (prod, name)) => a.addProduct(src, prod, current product prod, name) } + def addBinaries(base: Analysis): Analysis = addAll(base, binaryDeps)((a, src, bin) => a.addBinaryDep(src, bin, binaryClassName(bin), current binary bin)) + def addSources(base: Analysis): Analysis = + (base /: apis) { + case (a, (src, api)) => + val stamp = current.internalSource(src) + val hash = stamp match { case h: Hash => h.value; case _ => new Array[Byte](0) } + // TODO store this in Relations, rather than Source. + val hasMacro: Boolean = macroSources.contains(src) + val s = new xsbti.api.Source(compilation, hash, api._2, api._1, publicNameHashes(src), hasMacro) + val info = SourceInfos.makeInfo(getOrNil(reporteds, src), getOrNil(unreporteds, src)) + val direct = sourceDeps.getOrElse(src, Nil: Iterable[File]) + val publicInherited = inheritedSourceDeps.getOrElse(src, Nil: Iterable[File]) + a.addSource(src, s, stamp, direct, publicInherited, info) + } + def getOrNil[A, B](m: collection.Map[A, Seq[B]], a: A): Seq[B] = m.get(a).toList.flatten + def addExternals(base: Analysis): Analysis = (base /: extSrcDeps) { case (a, (source, name, api, inherited)) => a.addExternalDep(source, name, api, inherited) } + def addCompilation(base: Analysis): Analysis = base.copy(compilations = base.compilations.add(compilation)) + def addUsedNames(base: Analysis): Analysis = (base /: usedNames) { + case (a, (src, names)) => + (a /: names) { case (a, name) => a.copy(relations = a.relations.addUsedName(src, name)) } + } - def addAll[A,B](base: Analysis, m: Map[A, Set[B]])( f: (Analysis, A, B) => Analysis): Analysis = - (base /: m) { case (outer, (a, bs)) => - (outer /: bs) { (inner, b) => - f(inner, a, b) - } } + def addAll[A, B](base: Analysis, m: Map[A, Set[B]])(f: (Analysis, A, B) => Analysis): Analysis = + (base /: m) { + case (outer, (a, bs)) => + (outer /: bs) { (inner, b) => + f(inner, a, b) + } + } } diff --git a/compile/inc/src/main/scala/sbt/inc/FileValueCache.scala b/compile/inc/src/main/scala/sbt/inc/FileValueCache.scala index 36742747e..bd236bf9d 100644 --- a/compile/inc/src/main/scala/sbt/inc/FileValueCache.scala +++ b/compile/inc/src/main/scala/sbt/inc/FileValueCache.scala @@ -1,49 +1,43 @@ package sbt package inc - import java.io.File - import java.util.concurrent.ConcurrentHashMap +import java.io.File +import java.util.concurrent.ConcurrentHashMap -sealed trait FileValueCache[T] -{ - def clear(): Unit - def get: File => T +sealed trait FileValueCache[T] { + def clear(): Unit + def get: File => T } -private[this] final class FileValueCache0[T](getStamp: File => Stamp, make: File => T)(implicit equiv: Equiv[Stamp]) extends FileValueCache[T] -{ - private[this] val backing = new ConcurrentHashMap[File, FileCache] +private[this] final class FileValueCache0[T](getStamp: File => Stamp, make: File => T)(implicit equiv: Equiv[Stamp]) extends FileValueCache[T] { + private[this] val backing = new ConcurrentHashMap[File, FileCache] - def clear(): Unit = backing.clear() - def get = file => { - val ifAbsent = new FileCache(file) - val cache = backing.putIfAbsent(file, ifAbsent) - (if(cache eq null) ifAbsent else cache).get() - } + def clear(): Unit = backing.clear() + def get = file => { + val ifAbsent = new FileCache(file) + val cache = backing.putIfAbsent(file, ifAbsent) + (if (cache eq null) ifAbsent else cache).get() + } - private[this] final class FileCache(file: File) - { - private[this] var stampedValue: Option[(Stamp,T)] = None - def get(): T = synchronized - { - val latest = getStamp(file) - stampedValue match - { - case Some( (stamp, value) ) if(equiv.equiv(latest, stamp)) => value - case _ => update(latest) - } - } + private[this] final class FileCache(file: File) { + private[this] var stampedValue: Option[(Stamp, T)] = None + def get(): T = synchronized { + val latest = getStamp(file) + stampedValue match { + case Some((stamp, value)) if (equiv.equiv(latest, stamp)) => value + case _ => update(latest) + } + } - private[this] def update(stamp: Stamp): T = - { - val value = make(file) - stampedValue = Some((stamp, value)) - value - } - } + private[this] def update(stamp: Stamp): T = + { + val value = make(file) + stampedValue = Some((stamp, value)) + value + } + } } -object FileValueCache -{ - def apply[T](f: File => T): FileValueCache[T] = make(Stamp.lastModified)(f) - def make[T](stamp: File => Stamp)(f: File => T): FileValueCache[T] = new FileValueCache0[T](stamp, f) +object FileValueCache { + def apply[T](f: File => T): FileValueCache[T] = make(Stamp.lastModified)(f) + def make[T](stamp: File => Stamp)(f: File => T): FileValueCache[T] = new FileValueCache0[T](stamp, f) } \ No newline at end of file diff --git a/compile/inc/src/main/scala/sbt/inc/IncOptions.scala b/compile/inc/src/main/scala/sbt/inc/IncOptions.scala index 70add5183..e2a2cffb2 100644 --- a/compile/inc/src/main/scala/sbt/inc/IncOptions.scala +++ b/compile/inc/src/main/scala/sbt/inc/IncOptions.scala @@ -1,6 +1,6 @@ package sbt.inc - import java.io.File +import java.io.File /** * Represents all configuration options for the incremental compiler itself and @@ -12,295 +12,294 @@ package sbt.inc * defined explicitly. */ final class IncOptions( - /** After which step include whole transitive closure of invalidated source files. */ - val transitiveStep: Int, - /** - * What's the fraction of invalidated source files when we switch to recompiling - * all files and giving up incremental compilation altogether. That's useful in - * cases when probability that we end up recompiling most of source files but - * in multiple steps is high. Multi-step incremental recompilation is slower - * than recompiling everything in one step. - */ - val recompileAllFraction: Double, - /** Print very detailed information about relations, such as dependencies between source files. */ - val relationsDebug: Boolean, - /** - * Enable tools for debugging API changes. At the moment this option is unused but in the - * future it will enable for example: - * - disabling API hashing and API minimization (potentially very memory consuming) - * - diffing textual API representation which helps understanding what kind of changes - * to APIs are visible to the incremental compiler - */ - val apiDebug: Boolean, - /** - * Controls context size (in lines) displayed when diffs are produced for textual API - * representation. - * - * This option is used only when `apiDebug == true`. - */ - val apiDiffContextSize: Int, - /** - * The directory where we dump textual representation of APIs. This method might be called - * only if apiDebug returns true. This is unused option at the moment as the needed functionality - * is not implemented yet. - */ - val apiDumpDirectory: Option[java.io.File], - /** Creates a new ClassfileManager that will handle class file deletion and addition during a single incremental compilation run. */ - val newClassfileManager: () => ClassfileManager, - /** - * Determines whether incremental compiler should recompile all dependencies of a file - * that contains a macro definition. - */ - val recompileOnMacroDef: Boolean, - /** - * Determines whether incremental compiler uses the new algorithm known as name hashing. - * - * This flag is disabled by default so incremental compiler's behavior is the same as in sbt 0.13.0. - * - * IMPLEMENTATION NOTE: - * Enabling this flag enables a few additional functionalities that are needed by the name hashing algorithm: - * - * 1. New dependency source tracking is used. See `sbt.inc.Relations` for details. - * 2. Used names extraction and tracking is enabled. See `sbt.inc.Relations` for details as well. - * 3. Hashing of public names is enabled. See `sbt.inc.AnalysisCallback` for details. - * - */ - val nameHashing: Boolean -) extends Product with Serializable { + /** After which step include whole transitive closure of invalidated source files. */ + val transitiveStep: Int, + /** + * What's the fraction of invalidated source files when we switch to recompiling + * all files and giving up incremental compilation altogether. That's useful in + * cases when probability that we end up recompiling most of source files but + * in multiple steps is high. Multi-step incremental recompilation is slower + * than recompiling everything in one step. + */ + val recompileAllFraction: Double, + /** Print very detailed information about relations, such as dependencies between source files. */ + val relationsDebug: Boolean, + /** + * Enable tools for debugging API changes. At the moment this option is unused but in the + * future it will enable for example: + * - disabling API hashing and API minimization (potentially very memory consuming) + * - diffing textual API representation which helps understanding what kind of changes + * to APIs are visible to the incremental compiler + */ + val apiDebug: Boolean, + /** + * Controls context size (in lines) displayed when diffs are produced for textual API + * representation. + * + * This option is used only when `apiDebug == true`. + */ + val apiDiffContextSize: Int, + /** + * The directory where we dump textual representation of APIs. This method might be called + * only if apiDebug returns true. This is unused option at the moment as the needed functionality + * is not implemented yet. + */ + val apiDumpDirectory: Option[java.io.File], + /** Creates a new ClassfileManager that will handle class file deletion and addition during a single incremental compilation run. */ + val newClassfileManager: () => ClassfileManager, + /** + * Determines whether incremental compiler should recompile all dependencies of a file + * that contains a macro definition. + */ + val recompileOnMacroDef: Boolean, + /** + * Determines whether incremental compiler uses the new algorithm known as name hashing. + * + * This flag is disabled by default so incremental compiler's behavior is the same as in sbt 0.13.0. + * + * IMPLEMENTATION NOTE: + * Enabling this flag enables a few additional functionalities that are needed by the name hashing algorithm: + * + * 1. New dependency source tracking is used. See `sbt.inc.Relations` for details. + * 2. Used names extraction and tracking is enabled. See `sbt.inc.Relations` for details as well. + * 3. Hashing of public names is enabled. See `sbt.inc.AnalysisCallback` for details. + * + */ + val nameHashing: Boolean) extends Product with Serializable { - /** - * Secondary constructor introduced to make IncOptions to be binary compatible with version that didn't have - * `recompileOnMacroDef` and `nameHashing` fields defined. - */ - def this(transitiveStep: Int, recompileAllFraction: Double, relationsDebug: Boolean, apiDebug: Boolean, - apiDiffContextSize: Int, apiDumpDirectory: Option[java.io.File], newClassfileManager: () => ClassfileManager) = { - this(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, IncOptions.recompileOnMacroDefDefault, IncOptions.nameHashingDefault) - } + /** + * Secondary constructor introduced to make IncOptions to be binary compatible with version that didn't have + * `recompileOnMacroDef` and `nameHashing` fields defined. + */ + def this(transitiveStep: Int, recompileAllFraction: Double, relationsDebug: Boolean, apiDebug: Boolean, + apiDiffContextSize: Int, apiDumpDirectory: Option[java.io.File], newClassfileManager: () => ClassfileManager) = { + this(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, + apiDumpDirectory, newClassfileManager, IncOptions.recompileOnMacroDefDefault, IncOptions.nameHashingDefault) + } - def withTransitiveStep(transitiveStep: Int): IncOptions = { - new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) - } + def withTransitiveStep(transitiveStep: Int): IncOptions = { + new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) + } - def withRecompileAllFraction(recompileAllFraction: Double): IncOptions = { - new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) - } + def withRecompileAllFraction(recompileAllFraction: Double): IncOptions = { + new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) + } - def withRelationsDebug(relationsDebug: Boolean): IncOptions = { - new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) - } + def withRelationsDebug(relationsDebug: Boolean): IncOptions = { + new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) + } - def withApiDebug(apiDebug: Boolean): IncOptions = { - new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) - } + def withApiDebug(apiDebug: Boolean): IncOptions = { + new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) + } - def withApiDiffContextSize(apiDiffContextSize: Int): IncOptions = { - new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) - } + def withApiDiffContextSize(apiDiffContextSize: Int): IncOptions = { + new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) + } - def withApiDumpDirectory(apiDumpDirectory: Option[File]): IncOptions = { - new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) - } + def withApiDumpDirectory(apiDumpDirectory: Option[File]): IncOptions = { + new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) + } - def withNewClassfileManager(newClassfileManager: () => ClassfileManager): IncOptions = { - new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) - } + def withNewClassfileManager(newClassfileManager: () => ClassfileManager): IncOptions = { + new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) + } - def withRecompileOnMacroDef(recompileOnMacroDef: Boolean): IncOptions = { - new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) - } + def withRecompileOnMacroDef(recompileOnMacroDef: Boolean): IncOptions = { + new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) + } - def withNameHashing(nameHashing: Boolean): IncOptions = { - new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) - } + def withNameHashing(nameHashing: Boolean): IncOptions = { + new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) + } - //- EXPANDED CASE CLASS METHOD BEGIN -// - @deprecated("Use `with$nameOfTheField` copying methods instead.", "0.13.2") - def copy(transitiveStep: Int = this.transitiveStep, recompileAllFraction: Double = this.recompileAllFraction, - relationsDebug: Boolean = this.relationsDebug, apiDebug: Boolean = this.apiDebug, - apiDiffContextSize: Int = this.apiDiffContextSize, - apiDumpDirectory: Option[java.io.File] = this.apiDumpDirectory, - newClassfileManager: () => ClassfileManager = this.newClassfileManager): IncOptions = { - new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) - } + //- EXPANDED CASE CLASS METHOD BEGIN -// + @deprecated("Use `with$nameOfTheField` copying methods instead.", "0.13.2") + def copy(transitiveStep: Int = this.transitiveStep, recompileAllFraction: Double = this.recompileAllFraction, + relationsDebug: Boolean = this.relationsDebug, apiDebug: Boolean = this.apiDebug, + apiDiffContextSize: Int = this.apiDiffContextSize, + apiDumpDirectory: Option[java.io.File] = this.apiDumpDirectory, + newClassfileManager: () => ClassfileManager = this.newClassfileManager): IncOptions = { + new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) + } - @deprecated("Methods generated for case class will be removed in the future.", "0.13.2") - override def productPrefix: String = "IncOptions" + @deprecated("Methods generated for case class will be removed in the future.", "0.13.2") + override def productPrefix: String = "IncOptions" - @deprecated("Methods generated for case class will be removed in the future.", "0.13.2") - def productArity: Int = 9 + @deprecated("Methods generated for case class will be removed in the future.", "0.13.2") + def productArity: Int = 9 - @deprecated("Methods generated for case class will be removed in the future.", "0.13.2") - def productElement(x$1: Int): Any = x$1 match { - case 0 => IncOptions.this.transitiveStep - case 1 => IncOptions.this.recompileAllFraction - case 2 => IncOptions.this.relationsDebug - case 3 => IncOptions.this.apiDebug - case 4 => IncOptions.this.apiDiffContextSize - case 5 => IncOptions.this.apiDumpDirectory - case 6 => IncOptions.this.newClassfileManager - case 7 => IncOptions.this.recompileOnMacroDef - case 8 => IncOptions.this.nameHashing - case _ => throw new IndexOutOfBoundsException(x$1.toString()) - } + @deprecated("Methods generated for case class will be removed in the future.", "0.13.2") + def productElement(x$1: Int): Any = x$1 match { + case 0 => IncOptions.this.transitiveStep + case 1 => IncOptions.this.recompileAllFraction + case 2 => IncOptions.this.relationsDebug + case 3 => IncOptions.this.apiDebug + case 4 => IncOptions.this.apiDiffContextSize + case 5 => IncOptions.this.apiDumpDirectory + case 6 => IncOptions.this.newClassfileManager + case 7 => IncOptions.this.recompileOnMacroDef + case 8 => IncOptions.this.nameHashing + case _ => throw new IndexOutOfBoundsException(x$1.toString()) + } - @deprecated("Methods generated for case class will be removed in the future.", "0.13.2") - override def productIterator: Iterator[Any] = scala.runtime.ScalaRunTime.typedProductIterator[Any](IncOptions.this) + @deprecated("Methods generated for case class will be removed in the future.", "0.13.2") + override def productIterator: Iterator[Any] = scala.runtime.ScalaRunTime.typedProductIterator[Any](IncOptions.this) - @deprecated("Methods generated for case class will be removed in the future.", "0.13.2") - def canEqual(x$1: Any): Boolean = x$1.isInstanceOf[IncOptions] + @deprecated("Methods generated for case class will be removed in the future.", "0.13.2") + def canEqual(x$1: Any): Boolean = x$1.isInstanceOf[IncOptions] - override def hashCode(): Int = { - import scala.runtime.Statics - var acc: Int = -889275714 - acc = Statics.mix(acc, transitiveStep) - acc = Statics.mix(acc, Statics.doubleHash(recompileAllFraction)) - acc = Statics.mix(acc, if (relationsDebug) 1231 else 1237) - acc = Statics.mix(acc, if (apiDebug) 1231 else 1237) - acc = Statics.mix(acc, apiDiffContextSize) - acc = Statics.mix(acc, Statics.anyHash(apiDumpDirectory)) - acc = Statics.mix(acc, Statics.anyHash(newClassfileManager)) - acc = Statics.mix(acc, if (recompileOnMacroDef) 1231 else 1237) - acc = Statics.mix(acc, if (nameHashing) 1231 else 1237) - Statics.finalizeHash(acc, 9) - } + override def hashCode(): Int = { + import scala.runtime.Statics + var acc: Int = -889275714 + acc = Statics.mix(acc, transitiveStep) + acc = Statics.mix(acc, Statics.doubleHash(recompileAllFraction)) + acc = Statics.mix(acc, if (relationsDebug) 1231 else 1237) + acc = Statics.mix(acc, if (apiDebug) 1231 else 1237) + acc = Statics.mix(acc, apiDiffContextSize) + acc = Statics.mix(acc, Statics.anyHash(apiDumpDirectory)) + acc = Statics.mix(acc, Statics.anyHash(newClassfileManager)) + acc = Statics.mix(acc, if (recompileOnMacroDef) 1231 else 1237) + acc = Statics.mix(acc, if (nameHashing) 1231 else 1237) + Statics.finalizeHash(acc, 9) + } - override def toString(): String = scala.runtime.ScalaRunTime._toString(IncOptions.this) + override def toString(): String = scala.runtime.ScalaRunTime._toString(IncOptions.this) - override def equals(x$1: Any): Boolean = { - this.eq(x$1.asInstanceOf[Object]) || (x$1.isInstanceOf[IncOptions] && ({ - val IncOptions$1: IncOptions = x$1.asInstanceOf[IncOptions] - transitiveStep == IncOptions$1.transitiveStep && recompileAllFraction == IncOptions$1.recompileAllFraction && - relationsDebug == IncOptions$1.relationsDebug && apiDebug == IncOptions$1.apiDebug && + override def equals(x$1: Any): Boolean = { + this.eq(x$1.asInstanceOf[Object]) || (x$1.isInstanceOf[IncOptions] && ({ + val IncOptions$1: IncOptions = x$1.asInstanceOf[IncOptions] + transitiveStep == IncOptions$1.transitiveStep && recompileAllFraction == IncOptions$1.recompileAllFraction && + relationsDebug == IncOptions$1.relationsDebug && apiDebug == IncOptions$1.apiDebug && apiDiffContextSize == IncOptions$1.apiDiffContextSize && apiDumpDirectory == IncOptions$1.apiDumpDirectory && newClassfileManager == IncOptions$1.newClassfileManager && recompileOnMacroDef == IncOptions$1.recompileOnMacroDef && nameHashing == IncOptions$1.nameHashing - })) - } - //- EXPANDED CASE CLASS METHOD END -// + })) + } + //- EXPANDED CASE CLASS METHOD END -// } object IncOptions extends Serializable { - private val recompileOnMacroDefDefault: Boolean = true - private val nameHashingDefault: Boolean = false - val Default = IncOptions( - // 1. recompile changed sources - // 2(3). recompile direct dependencies and transitive public inheritance dependencies of sources with API changes in 1(2). - // 4. further changes invalidate all dependencies transitively to avoid too many steps - transitiveStep = 3, - recompileAllFraction = 0.5, - relationsDebug = false, - apiDebug = false, - apiDiffContextSize = 5, - apiDumpDirectory = None, - newClassfileManager = ClassfileManager.deleteImmediately, - recompileOnMacroDef = recompileOnMacroDefDefault, - nameHashing = nameHashingDefault - ) - //- EXPANDED CASE CLASS METHOD BEGIN -// - final override def toString(): String = "IncOptions" - @deprecated("Use overloaded variant of `apply` with complete list of arguments instead.", "0.13.2") - def apply(transitiveStep: Int, recompileAllFraction: Double, relationsDebug: Boolean, apiDebug: Boolean, - apiDiffContextSize: Int, apiDumpDirectory: Option[java.io.File], - newClassfileManager: () => ClassfileManager): IncOptions = { - new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager) - } - def apply(transitiveStep: Int, recompileAllFraction: Double, relationsDebug: Boolean, apiDebug: Boolean, - apiDiffContextSize: Int, apiDumpDirectory: Option[java.io.File], - newClassfileManager: () => ClassfileManager, recompileOnMacroDef: Boolean, - nameHashing: Boolean): IncOptions = { - new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, - apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) - } - @deprecated("Methods generated for case class will be removed in the future.", "0.13.2") - def unapply(x$0: IncOptions): Option[(Int, Double, Boolean, Boolean, Int, Option[java.io.File], () => AnyRef)] = { - if (x$0 == null) None - else Some.apply[(Int, Double, Boolean, Boolean, Int, Option[java.io.File], () => AnyRef)]( - Tuple7.apply[Int, Double, Boolean, Boolean, Int, Option[java.io.File], () => AnyRef]( - x$0.transitiveStep, x$0.recompileAllFraction, x$0.relationsDebug, x$0.apiDebug, x$0.apiDiffContextSize, - x$0.apiDumpDirectory, x$0.newClassfileManager)) - } - private def readResolve(): Object = IncOptions - //- EXPANDED CASE CLASS METHOD END -// + private val recompileOnMacroDefDefault: Boolean = true + private val nameHashingDefault: Boolean = false + val Default = IncOptions( + // 1. recompile changed sources + // 2(3). recompile direct dependencies and transitive public inheritance dependencies of sources with API changes in 1(2). + // 4. further changes invalidate all dependencies transitively to avoid too many steps + transitiveStep = 3, + recompileAllFraction = 0.5, + relationsDebug = false, + apiDebug = false, + apiDiffContextSize = 5, + apiDumpDirectory = None, + newClassfileManager = ClassfileManager.deleteImmediately, + recompileOnMacroDef = recompileOnMacroDefDefault, + nameHashing = nameHashingDefault + ) + //- EXPANDED CASE CLASS METHOD BEGIN -// + final override def toString(): String = "IncOptions" + @deprecated("Use overloaded variant of `apply` with complete list of arguments instead.", "0.13.2") + def apply(transitiveStep: Int, recompileAllFraction: Double, relationsDebug: Boolean, apiDebug: Boolean, + apiDiffContextSize: Int, apiDumpDirectory: Option[java.io.File], + newClassfileManager: () => ClassfileManager): IncOptions = { + new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, + apiDumpDirectory, newClassfileManager) + } + def apply(transitiveStep: Int, recompileAllFraction: Double, relationsDebug: Boolean, apiDebug: Boolean, + apiDiffContextSize: Int, apiDumpDirectory: Option[java.io.File], + newClassfileManager: () => ClassfileManager, recompileOnMacroDef: Boolean, + nameHashing: Boolean): IncOptions = { + new IncOptions(transitiveStep, recompileAllFraction, relationsDebug, apiDebug, apiDiffContextSize, + apiDumpDirectory, newClassfileManager, recompileOnMacroDef, nameHashing) + } + @deprecated("Methods generated for case class will be removed in the future.", "0.13.2") + def unapply(x$0: IncOptions): Option[(Int, Double, Boolean, Boolean, Int, Option[java.io.File], () => AnyRef)] = { + if (x$0 == null) None + else Some.apply[(Int, Double, Boolean, Boolean, Int, Option[java.io.File], () => AnyRef)]( + Tuple7.apply[Int, Double, Boolean, Boolean, Int, Option[java.io.File], () => AnyRef]( + x$0.transitiveStep, x$0.recompileAllFraction, x$0.relationsDebug, x$0.apiDebug, x$0.apiDiffContextSize, + x$0.apiDumpDirectory, x$0.newClassfileManager)) + } + private def readResolve(): Object = IncOptions + //- EXPANDED CASE CLASS METHOD END -// - @deprecated("Use IncOptions.Default.withNewClassfileManager(ClassfileManager.transactional(tempDir)), instead.", "0.13.5") - def defaultTransactional(tempDir: File): IncOptions = - setTransactional(Default, tempDir) - @deprecated("Use opts.withNewClassfileManager(ClassfileManager.transactional(tempDir)), instead.", "0.13.5") - def setTransactional(opts: IncOptions, tempDir: File): IncOptions = - opts.withNewClassfileManager(ClassfileManager.transactional(tempDir, sbt.Logger.Null)) + @deprecated("Use IncOptions.Default.withNewClassfileManager(ClassfileManager.transactional(tempDir)), instead.", "0.13.5") + def defaultTransactional(tempDir: File): IncOptions = + setTransactional(Default, tempDir) + @deprecated("Use opts.withNewClassfileManager(ClassfileManager.transactional(tempDir)), instead.", "0.13.5") + def setTransactional(opts: IncOptions, tempDir: File): IncOptions = + opts.withNewClassfileManager(ClassfileManager.transactional(tempDir, sbt.Logger.Null)) - private val transitiveStepKey = "transitiveStep" - private val recompileAllFractionKey = "recompileAllFraction" - private val relationsDebugKey = "relationsDebug" - private val apiDebugKey = "apiDebug" - private val apiDumpDirectoryKey = "apiDumpDirectory" - private val apiDiffContextSizeKey = "apiDiffContextSize" - private val recompileOnMacroDefKey = "recompileOnMacroDef" - private val nameHashingKey = "nameHashing" + private val transitiveStepKey = "transitiveStep" + private val recompileAllFractionKey = "recompileAllFraction" + private val relationsDebugKey = "relationsDebug" + private val apiDebugKey = "apiDebug" + private val apiDumpDirectoryKey = "apiDumpDirectory" + private val apiDiffContextSizeKey = "apiDiffContextSize" + private val recompileOnMacroDefKey = "recompileOnMacroDef" + private val nameHashingKey = "nameHashing" - def fromStringMap(m: java.util.Map[String, String]): IncOptions = { - // all the code below doesn't look like idiomatic Scala for a good reason: we are working with Java API - def getTransitiveStep: Int = { - val k = transitiveStepKey - if (m.containsKey(k)) m.get(k).toInt else Default.transitiveStep - } - def getRecompileAllFraction: Double = { - val k = recompileAllFractionKey - if (m.containsKey(k)) m.get(k).toDouble else Default.recompileAllFraction - } - def getRelationsDebug: Boolean = { - val k = relationsDebugKey - if (m.containsKey(k)) m.get(k).toBoolean else Default.relationsDebug - } - def getApiDebug: Boolean = { - val k = apiDebugKey - if (m.containsKey(k)) m.get(k).toBoolean else Default.apiDebug - } - def getApiDiffContextSize: Int = { - val k = apiDiffContextSizeKey - if (m.containsKey(k)) m.get(k).toInt else Default.apiDiffContextSize - } - def getApiDumpDirectory: Option[java.io.File] = { - val k = apiDumpDirectoryKey - if (m.containsKey(k)) - Some(new java.io.File(m.get(k))) - else None - } - def getRecompileOnMacroDef: Boolean = { - val k = recompileOnMacroDefKey - if (m.containsKey(k)) m.get(k).toBoolean else Default.recompileOnMacroDef - } - def getNameHashing: Boolean = { - val k = nameHashingKey - if (m.containsKey(k)) m.get(k).toBoolean else Default.nameHashing - } + def fromStringMap(m: java.util.Map[String, String]): IncOptions = { + // all the code below doesn't look like idiomatic Scala for a good reason: we are working with Java API + def getTransitiveStep: Int = { + val k = transitiveStepKey + if (m.containsKey(k)) m.get(k).toInt else Default.transitiveStep + } + def getRecompileAllFraction: Double = { + val k = recompileAllFractionKey + if (m.containsKey(k)) m.get(k).toDouble else Default.recompileAllFraction + } + def getRelationsDebug: Boolean = { + val k = relationsDebugKey + if (m.containsKey(k)) m.get(k).toBoolean else Default.relationsDebug + } + def getApiDebug: Boolean = { + val k = apiDebugKey + if (m.containsKey(k)) m.get(k).toBoolean else Default.apiDebug + } + def getApiDiffContextSize: Int = { + val k = apiDiffContextSizeKey + if (m.containsKey(k)) m.get(k).toInt else Default.apiDiffContextSize + } + def getApiDumpDirectory: Option[java.io.File] = { + val k = apiDumpDirectoryKey + if (m.containsKey(k)) + Some(new java.io.File(m.get(k))) + else None + } + def getRecompileOnMacroDef: Boolean = { + val k = recompileOnMacroDefKey + if (m.containsKey(k)) m.get(k).toBoolean else Default.recompileOnMacroDef + } + def getNameHashing: Boolean = { + val k = nameHashingKey + if (m.containsKey(k)) m.get(k).toBoolean else Default.nameHashing + } - new IncOptions(getTransitiveStep, getRecompileAllFraction, getRelationsDebug, getApiDebug, getApiDiffContextSize, - getApiDumpDirectory, ClassfileManager.deleteImmediately, getRecompileOnMacroDef, getNameHashing) - } + new IncOptions(getTransitiveStep, getRecompileAllFraction, getRelationsDebug, getApiDebug, getApiDiffContextSize, + getApiDumpDirectory, ClassfileManager.deleteImmediately, getRecompileOnMacroDef, getNameHashing) + } - def toStringMap(o: IncOptions): java.util.Map[String, String] = { - val m = new java.util.HashMap[String, String] - m.put(transitiveStepKey, o.transitiveStep.toString) - m.put(recompileAllFractionKey, o.recompileAllFraction.toString) - m.put(relationsDebugKey, o.relationsDebug.toString) - m.put(apiDebugKey, o.apiDebug.toString) - o.apiDumpDirectory.foreach(f => m.put(apiDumpDirectoryKey, f.toString)) - m.put(apiDiffContextSizeKey, o.apiDiffContextSize.toString) - m.put(recompileOnMacroDefKey, o.recompileOnMacroDef.toString) - m.put(nameHashingKey, o.nameHashing.toString) - m - } + def toStringMap(o: IncOptions): java.util.Map[String, String] = { + val m = new java.util.HashMap[String, String] + m.put(transitiveStepKey, o.transitiveStep.toString) + m.put(recompileAllFractionKey, o.recompileAllFraction.toString) + m.put(relationsDebugKey, o.relationsDebug.toString) + m.put(apiDebugKey, o.apiDebug.toString) + o.apiDumpDirectory.foreach(f => m.put(apiDumpDirectoryKey, f.toString)) + m.put(apiDiffContextSizeKey, o.apiDiffContextSize.toString) + m.put(recompileOnMacroDefKey, o.recompileOnMacroDef.toString) + m.put(nameHashingKey, o.nameHashing.toString) + m + } } diff --git a/compile/inc/src/main/scala/sbt/inc/Incremental.scala b/compile/inc/src/main/scala/sbt/inc/Incremental.scala index f64c284c5..ccce0065e 100644 --- a/compile/inc/src/main/scala/sbt/inc/Incremental.scala +++ b/compile/inc/src/main/scala/sbt/inc/Incremental.scala @@ -4,406 +4,407 @@ package sbt package inc -import xsbt.api.{NameChanges, SameAPI, TopLevel} +import xsbt.api.{ NameChanges, SameAPI, TopLevel } import annotation.tailrec -import xsbti.api.{Compilation, Source} +import xsbti.api.{ Compilation, Source } import xsbti.compile.DependencyChanges import java.io.File -object Incremental -{ - def compile(sources: Set[File], - entry: String => Option[File], - previous: Analysis, - current: ReadStamps, - forEntry: File => Option[Analysis], - doCompile: (Set[File], DependencyChanges) => Analysis, - log: Logger, - options: IncOptions)(implicit equivS: Equiv[Stamp]): (Boolean, Analysis) = - { - val incremental: IncrementalCommon = - if (!options.nameHashing) - new IncrementalDefaultImpl(log, options) - else - new IncrementalNameHashing(log, options) - val initialChanges = incremental.changedInitial(entry, sources, previous, current, forEntry) - val binaryChanges = new DependencyChanges { - val modifiedBinaries = initialChanges.binaryDeps.toArray - val modifiedClasses = initialChanges.external.allModified.toArray - def isEmpty = modifiedBinaries.isEmpty && modifiedClasses.isEmpty - } - val initialInv = incremental.invalidateInitial(previous.relations, initialChanges) - log.debug("All initially invalidated sources: " + initialInv + "\n") - val analysis = manageClassfiles(options) { classfileManager => - incremental.cycle(initialInv, sources, binaryChanges, previous, doCompile, classfileManager, 1) - } - (!initialInv.isEmpty, analysis) - } +object Incremental { + def compile(sources: Set[File], + entry: String => Option[File], + previous: Analysis, + current: ReadStamps, + forEntry: File => Option[Analysis], + doCompile: (Set[File], DependencyChanges) => Analysis, + log: Logger, + options: IncOptions)(implicit equivS: Equiv[Stamp]): (Boolean, Analysis) = + { + val incremental: IncrementalCommon = + if (!options.nameHashing) + new IncrementalDefaultImpl(log, options) + else + new IncrementalNameHashing(log, options) + val initialChanges = incremental.changedInitial(entry, sources, previous, current, forEntry) + val binaryChanges = new DependencyChanges { + val modifiedBinaries = initialChanges.binaryDeps.toArray + val modifiedClasses = initialChanges.external.allModified.toArray + def isEmpty = modifiedBinaries.isEmpty && modifiedClasses.isEmpty + } + val initialInv = incremental.invalidateInitial(previous.relations, initialChanges) + log.debug("All initially invalidated sources: " + initialInv + "\n") + val analysis = manageClassfiles(options) { classfileManager => + incremental.cycle(initialInv, sources, binaryChanges, previous, doCompile, classfileManager, 1) + } + (!initialInv.isEmpty, analysis) + } - // the name of system property that was meant to enable debugging mode of incremental compiler but - // it ended up being used just to enable debugging of relations. That's why if you migrate to new - // API for configuring incremental compiler (IncOptions) it's enough to control value of `relationsDebug` - // flag to achieve the same effect as using `incDebugProp`. - @deprecated("Use `IncOptions.relationsDebug` flag to enable debugging of relations.", "0.13.2") - val incDebugProp = "xsbt.inc.debug" + // the name of system property that was meant to enable debugging mode of incremental compiler but + // it ended up being used just to enable debugging of relations. That's why if you migrate to new + // API for configuring incremental compiler (IncOptions) it's enough to control value of `relationsDebug` + // flag to achieve the same effect as using `incDebugProp`. + @deprecated("Use `IncOptions.relationsDebug` flag to enable debugging of relations.", "0.13.2") + val incDebugProp = "xsbt.inc.debug" - private[inc] val apiDebugProp = "xsbt.api.debug" - private[inc] def apiDebug(options: IncOptions): Boolean = options.apiDebug || java.lang.Boolean.getBoolean(apiDebugProp) + private[inc] val apiDebugProp = "xsbt.api.debug" + private[inc] def apiDebug(options: IncOptions): Boolean = options.apiDebug || java.lang.Boolean.getBoolean(apiDebugProp) - private[sbt] def prune(invalidatedSrcs: Set[File], previous: Analysis): Analysis = - prune(invalidatedSrcs, previous, ClassfileManager.deleteImmediately()) + private[sbt] def prune(invalidatedSrcs: Set[File], previous: Analysis): Analysis = + prune(invalidatedSrcs, previous, ClassfileManager.deleteImmediately()) - private[sbt] def prune(invalidatedSrcs: Set[File], previous: Analysis, classfileManager: ClassfileManager): Analysis = - { - classfileManager.delete( invalidatedSrcs.flatMap(previous.relations.products) ) - previous -- invalidatedSrcs - } + private[sbt] def prune(invalidatedSrcs: Set[File], previous: Analysis, classfileManager: ClassfileManager): Analysis = + { + classfileManager.delete(invalidatedSrcs.flatMap(previous.relations.products)) + previous -- invalidatedSrcs + } - private[this] def manageClassfiles[T](options: IncOptions)(run: ClassfileManager => T): T = - { - val classfileManager = options.newClassfileManager() - val result = try run(classfileManager) catch { case e: Exception => - classfileManager.complete(success = false) - throw e - } - classfileManager.complete(success = true) - result - } + private[this] def manageClassfiles[T](options: IncOptions)(run: ClassfileManager => T): T = + { + val classfileManager = options.newClassfileManager() + val result = try run(classfileManager) catch { + case e: Exception => + classfileManager.complete(success = false) + throw e + } + classfileManager.complete(success = true) + result + } } - private abstract class IncrementalCommon(log: Logger, options: IncOptions) { - private def incDebug(options: IncOptions): Boolean = options.relationsDebug || java.lang.Boolean.getBoolean(Incremental.incDebugProp) + private def incDebug(options: IncOptions): Boolean = options.relationsDebug || java.lang.Boolean.getBoolean(Incremental.incDebugProp) - // setting the related system property to true will skip checking that the class name - // still comes from the same classpath entry. This can workaround bugs in classpath construction, - // such as the currently problematic -javabootclasspath. This is subject to removal at any time. - private[this] def skipClasspathLookup = java.lang.Boolean.getBoolean("xsbt.skip.cp.lookup") + // setting the related system property to true will skip checking that the class name + // still comes from the same classpath entry. This can workaround bugs in classpath construction, + // such as the currently problematic -javabootclasspath. This is subject to removal at any time. + private[this] def skipClasspathLookup = java.lang.Boolean.getBoolean("xsbt.skip.cp.lookup") - // TODO: the Analysis for the last successful compilation should get returned + Boolean indicating success - // TODO: full external name changes, scopeInvalidations - @tailrec final def cycle(invalidatedRaw: Set[File], allSources: Set[File], binaryChanges: DependencyChanges, previous: Analysis, - doCompile: (Set[File], DependencyChanges) => Analysis, classfileManager: ClassfileManager, cycleNum: Int): Analysis = - if(invalidatedRaw.isEmpty) - previous - else - { - def debug(s: => String) = if (incDebug(options)) log.debug(s) else () - val withPackageObjects = invalidatedRaw ++ invalidatedPackageObjects(invalidatedRaw, previous.relations) - val invalidated = expand(withPackageObjects, allSources) - val pruned = Incremental.prune(invalidated, previous, classfileManager) - debug("********* Pruned: \n" + pruned.relations + "\n*********") + // TODO: the Analysis for the last successful compilation should get returned + Boolean indicating success + // TODO: full external name changes, scopeInvalidations + @tailrec final def cycle(invalidatedRaw: Set[File], allSources: Set[File], binaryChanges: DependencyChanges, previous: Analysis, + doCompile: (Set[File], DependencyChanges) => Analysis, classfileManager: ClassfileManager, cycleNum: Int): Analysis = + if (invalidatedRaw.isEmpty) + previous + else { + def debug(s: => String) = if (incDebug(options)) log.debug(s) else () + val withPackageObjects = invalidatedRaw ++ invalidatedPackageObjects(invalidatedRaw, previous.relations) + val invalidated = expand(withPackageObjects, allSources) + val pruned = Incremental.prune(invalidated, previous, classfileManager) + debug("********* Pruned: \n" + pruned.relations + "\n*********") - val fresh = doCompile(invalidated, binaryChanges) - classfileManager.generated(fresh.relations.allProducts) - debug("********* Fresh: \n" + fresh.relations + "\n*********") - val merged = pruned ++ fresh//.copy(relations = pruned.relations ++ fresh.relations, apis = pruned.apis ++ fresh.apis) - debug("********* Merged: \n" + merged.relations + "\n*********") + val fresh = doCompile(invalidated, binaryChanges) + classfileManager.generated(fresh.relations.allProducts) + debug("********* Fresh: \n" + fresh.relations + "\n*********") + val merged = pruned ++ fresh //.copy(relations = pruned.relations ++ fresh.relations, apis = pruned.apis ++ fresh.apis) + debug("********* Merged: \n" + merged.relations + "\n*********") - val incChanges = changedIncremental(invalidated, previous.apis.internalAPI _, merged.apis.internalAPI _) - debug("\nChanges:\n" + incChanges) - val transitiveStep = options.transitiveStep - val incInv = invalidateIncremental(merged.relations, merged.apis, incChanges, invalidated, cycleNum >= transitiveStep) - cycle(incInv, allSources, emptyChanges, merged, doCompile, classfileManager, cycleNum+1) - } - private[this] def emptyChanges: DependencyChanges = new DependencyChanges { - val modifiedBinaries = new Array[File](0) - val modifiedClasses = new Array[String](0) - def isEmpty = true - } - private[this] def expand(invalidated: Set[File], all: Set[File]): Set[File] = { - val recompileAllFraction = options.recompileAllFraction - if(invalidated.size > all.size * recompileAllFraction) { - log.debug("Recompiling all " + all.size + " sources: invalidated sources (" + invalidated.size + ") exceeded " + (recompileAllFraction*100.0) + "% of all sources") - all ++ invalidated // need the union because all doesn't contain removed sources - } - else invalidated - } - - protected def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File] - - /** - * Logs API changes using debug-level logging. The API are obtained using the APIDiff class. - * - * NOTE: This method creates a new APIDiff instance on every invocation. - */ - private def logApiChanges[T](apiChanges: Iterable[APIChange[T]], oldAPIMapping: T => Source, - newAPIMapping: T => Source): Unit = { - val contextSize = options.apiDiffContextSize - try { - val apiDiff = new APIDiff - apiChanges foreach { - case APIChangeDueToMacroDefinition(src) => - log.debug(s"Public API is considered to be changed because $src contains a macro definition.") - case apiChange@(_: SourceAPIChange[T] | _: NamesChange[T]) => - val src = apiChange.modified - val oldApi = oldAPIMapping(src) - val newApi = newAPIMapping(src) - val apiUnifiedPatch = apiDiff.generateApiDiff(src.toString, oldApi.api, newApi.api, contextSize) - log.debug(s"Detected a change in a public API (${src.toString}):\n" - + apiUnifiedPatch) - } - } catch { - case e: ClassNotFoundException => - log.error("You have api debugging enabled but DiffUtils library cannot be found on sbt's classpath") - case e: LinkageError => - log.error("Encoutared linkage error while trying to load DiffUtils library.") - log.trace(e) - case e: Exception => - log.error("An exception has been thrown while trying to dump an api diff.") - log.trace(e) - } - } - - /** - * Accepts the sources that were recompiled during the last step and functions - * providing the API before and after the last step. The functions should return - * an empty API if the file did not/does not exist. - */ - def changedIncremental[T](lastSources: collection.Set[T], oldAPI: T => Source, newAPI: T => Source): APIChanges[T] = - { - val oldApis = lastSources.toSeq map oldAPI - val newApis = lastSources.toSeq map newAPI - val apiChanges = (lastSources, oldApis, newApis).zipped.flatMap { (src, oldApi, newApi) => sameSource(src, oldApi, newApi) } - - if (Incremental.apiDebug(options) && apiChanges.nonEmpty) { - logApiChanges(apiChanges, oldAPI, newAPI) - } - - new APIChanges(apiChanges) - } - def sameSource[T](src: T, a: Source, b: Source): Option[APIChange[T]] = { - // Clients of a modified source file (ie, one that doesn't satisfy `shortcutSameSource`) containing macros must be recompiled. - val hasMacro = a.hasMacro || b.hasMacro - if (shortcutSameSource(a, b)) { - None - } else { - if (hasMacro && options.recompileOnMacroDef) { - Some(APIChangeDueToMacroDefinition(src)) - } else sameAPI(src, a, b) - } - } - - protected def sameAPI[T](src: T, a: Source, b: Source): Option[APIChange[T]] - - def shortcutSameSource(a: Source, b: Source): Boolean = !a.hash.isEmpty && !b.hash.isEmpty && sameCompilation(a.compilation, b.compilation) && (a.hash.deep equals b.hash.deep) - def sameCompilation(a: Compilation, b: Compilation): Boolean = a.startTime == b.startTime && a.outputs.corresponds(b.outputs){ - case (co1, co2) => co1.sourceDirectory == co2.sourceDirectory && co1.outputDirectory == co2.outputDirectory + val incChanges = changedIncremental(invalidated, previous.apis.internalAPI _, merged.apis.internalAPI _) + debug("\nChanges:\n" + incChanges) + val transitiveStep = options.transitiveStep + val incInv = invalidateIncremental(merged.relations, merged.apis, incChanges, invalidated, cycleNum >= transitiveStep) + cycle(incInv, allSources, emptyChanges, merged, doCompile, classfileManager, cycleNum + 1) + } + private[this] def emptyChanges: DependencyChanges = new DependencyChanges { + val modifiedBinaries = new Array[File](0) + val modifiedClasses = new Array[String](0) + def isEmpty = true + } + private[this] def expand(invalidated: Set[File], all: Set[File]): Set[File] = { + val recompileAllFraction = options.recompileAllFraction + if (invalidated.size > all.size * recompileAllFraction) { + log.debug("Recompiling all " + all.size + " sources: invalidated sources (" + invalidated.size + ") exceeded " + (recompileAllFraction * 100.0) + "% of all sources") + all ++ invalidated // need the union because all doesn't contain removed sources + } else invalidated } - def changedInitial(entry: String => Option[File], sources: Set[File], previousAnalysis: Analysis, current: ReadStamps, - forEntry: File => Option[Analysis])(implicit equivS: Equiv[Stamp]): InitialChanges = - { - val previous = previousAnalysis.stamps - val previousAPIs = previousAnalysis.apis + protected def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File] - val srcChanges = changes(previous.allInternalSources.toSet, sources, f => !equivS.equiv( previous.internalSource(f), current.internalSource(f) ) ) - val removedProducts = previous.allProducts.filter( p => !equivS.equiv( previous.product(p), current.product(p) ) ).toSet - val binaryDepChanges = previous.allBinaries.filter( externalBinaryModified(entry, forEntry, previous, current)).toSet - val extChanges = changedIncremental(previousAPIs.allExternals, previousAPIs.externalAPI _, currentExternalAPI(entry, forEntry)) + /** + * Logs API changes using debug-level logging. The API are obtained using the APIDiff class. + * + * NOTE: This method creates a new APIDiff instance on every invocation. + */ + private def logApiChanges[T](apiChanges: Iterable[APIChange[T]], oldAPIMapping: T => Source, + newAPIMapping: T => Source): Unit = { + val contextSize = options.apiDiffContextSize + try { + val apiDiff = new APIDiff + apiChanges foreach { + case APIChangeDueToMacroDefinition(src) => + log.debug(s"Public API is considered to be changed because $src contains a macro definition.") + case apiChange @ (_: SourceAPIChange[T] | _: NamesChange[T]) => + val src = apiChange.modified + val oldApi = oldAPIMapping(src) + val newApi = newAPIMapping(src) + val apiUnifiedPatch = apiDiff.generateApiDiff(src.toString, oldApi.api, newApi.api, contextSize) + log.debug(s"Detected a change in a public API (${src.toString}):\n" + + apiUnifiedPatch) + } + } catch { + case e: ClassNotFoundException => + log.error("You have api debugging enabled but DiffUtils library cannot be found on sbt's classpath") + case e: LinkageError => + log.error("Encoutared linkage error while trying to load DiffUtils library.") + log.trace(e) + case e: Exception => + log.error("An exception has been thrown while trying to dump an api diff.") + log.trace(e) + } + } - InitialChanges(srcChanges, removedProducts, binaryDepChanges, extChanges ) - } + /** + * Accepts the sources that were recompiled during the last step and functions + * providing the API before and after the last step. The functions should return + * an empty API if the file did not/does not exist. + */ + def changedIncremental[T](lastSources: collection.Set[T], oldAPI: T => Source, newAPI: T => Source): APIChanges[T] = + { + val oldApis = lastSources.toSeq map oldAPI + val newApis = lastSources.toSeq map newAPI + val apiChanges = (lastSources, oldApis, newApis).zipped.flatMap { (src, oldApi, newApi) => sameSource(src, oldApi, newApi) } - def changes(previous: Set[File], current: Set[File], existingModified: File => Boolean): Changes[File] = - new Changes[File] - { - private val inBoth = previous & current - val removed = previous -- inBoth - val added = current -- inBoth - val (changed, unmodified) = inBoth.partition(existingModified) - } + if (Incremental.apiDebug(options) && apiChanges.nonEmpty) { + logApiChanges(apiChanges, oldAPI, newAPI) + } - def invalidateIncremental(previous: Relations, apis: APIs, changes: APIChanges[File], recompiledSources: Set[File], transitive: Boolean): Set[File] = - { - val dependsOnSrc = previous.usesInternalSrc _ - val propagated = - if(transitive) - transitiveDependencies(dependsOnSrc, changes.allModified.toSet) - else - invalidateIntermediate(previous, changes) + new APIChanges(apiChanges) + } + def sameSource[T](src: T, a: Source, b: Source): Option[APIChange[T]] = { + // Clients of a modified source file (ie, one that doesn't satisfy `shortcutSameSource`) containing macros must be recompiled. + val hasMacro = a.hasMacro || b.hasMacro + if (shortcutSameSource(a, b)) { + None + } else { + if (hasMacro && options.recompileOnMacroDef) { + Some(APIChangeDueToMacroDefinition(src)) + } else sameAPI(src, a, b) + } + } - val dups = invalidateDuplicates(previous) - if(dups.nonEmpty) - log.debug("Invalidated due to generated class file collision: " + dups) + protected def sameAPI[T](src: T, a: Source, b: Source): Option[APIChange[T]] - val inv = propagated ++ dups // ++ scopeInvalidations(previous.extAPI _, changes.modified, changes.names) - val newlyInvalidated = inv -- recompiledSources - log.debug("All newly invalidated sources after taking into account (previously) recompiled sources:" + newlyInvalidated) - if(newlyInvalidated.isEmpty) Set.empty else inv - } + def shortcutSameSource(a: Source, b: Source): Boolean = !a.hash.isEmpty && !b.hash.isEmpty && sameCompilation(a.compilation, b.compilation) && (a.hash.deep equals b.hash.deep) + def sameCompilation(a: Compilation, b: Compilation): Boolean = a.startTime == b.startTime && a.outputs.corresponds(b.outputs) { + case (co1, co2) => co1.sourceDirectory == co2.sourceDirectory && co1.outputDirectory == co2.outputDirectory + } - /** Invalidate all sources that claim to produce the same class file as another source file. */ - def invalidateDuplicates(merged: Relations): Set[File] = - merged.srcProd.reverseMap.flatMap { case (classFile, sources) => - if(sources.size > 1) sources else Nil - } toSet; + def changedInitial(entry: String => Option[File], sources: Set[File], previousAnalysis: Analysis, current: ReadStamps, + forEntry: File => Option[Analysis])(implicit equivS: Equiv[Stamp]): InitialChanges = + { + val previous = previousAnalysis.stamps + val previousAPIs = previousAnalysis.apis - /** Returns the transitive source dependencies of `initial`. - * Because the intermediate steps do not pull in cycles, this result includes the initial files - * if they are part of a cycle containing newly invalidated files . */ - def transitiveDependencies(dependsOnSrc: File => Set[File], initial: Set[File]): Set[File] = - { - val transitiveWithInitial = transitiveDeps(initial)(dependsOnSrc) - val transitivePartial = includeInitialCond(initial, transitiveWithInitial, dependsOnSrc) - log.debug("Final step, transitive dependencies:\n\t" + transitivePartial) - transitivePartial - } + val srcChanges = changes(previous.allInternalSources.toSet, sources, f => !equivS.equiv(previous.internalSource(f), current.internalSource(f))) + val removedProducts = previous.allProducts.filter(p => !equivS.equiv(previous.product(p), current.product(p))).toSet + val binaryDepChanges = previous.allBinaries.filter(externalBinaryModified(entry, forEntry, previous, current)).toSet + val extChanges = changedIncremental(previousAPIs.allExternals, previousAPIs.externalAPI _, currentExternalAPI(entry, forEntry)) - /** Invalidates sources based on initially detected 'changes' to the sources, products, and dependencies.*/ - def invalidateInitial(previous: Relations, changes: InitialChanges): Set[File] = - { - val srcChanges = changes.internalSrc - val srcDirect = srcChanges.removed ++ srcChanges.removed.flatMap(previous.usesInternalSrc) ++ srcChanges.added ++ srcChanges.changed - val byProduct = changes.removedProducts.flatMap(previous.produced) - val byBinaryDep = changes.binaryDeps.flatMap(previous.usesBinary) - val byExtSrcDep = invalidateByAllExternal(previous, changes.external) //changes.external.modified.flatMap(previous.usesExternal) // ++ scopeInvalidations - checkAbsolute(srcChanges.added.toList) - log.debug( - "\nInitial source changes: \n\tremoved:" + srcChanges.removed + "\n\tadded: " + srcChanges.added + "\n\tmodified: " + srcChanges.changed + - "\nRemoved products: " + changes.removedProducts + - "\nExternal API changes: " + changes.external + - "\nModified binary dependencies: " + changes.binaryDeps + - "\nInitial directly invalidated sources: " + srcDirect + - "\n\nSources indirectly invalidated by:" + - "\n\tproduct: " + byProduct + - "\n\tbinary dep: " + byBinaryDep + - "\n\texternal source: " + byExtSrcDep - ) + InitialChanges(srcChanges, removedProducts, binaryDepChanges, extChanges) + } - srcDirect ++ byProduct ++ byBinaryDep ++ byExtSrcDep - } - private[this] def checkAbsolute(addedSources: List[File]): Unit = - if(addedSources.nonEmpty) { - addedSources.filterNot(_.isAbsolute) match { - case first :: more => - val fileStrings = more match { - case Nil => first.toString - case x :: Nil => s"$first and $x" - case _ => s"$first and ${more.size} others" - } - sys.error(s"The incremental compiler requires absolute sources, but some were relative: $fileStrings") - case Nil => - } - } + def changes(previous: Set[File], current: Set[File], existingModified: File => Boolean): Changes[File] = + new Changes[File] { + private val inBoth = previous & current + val removed = previous -- inBoth + val added = current -- inBoth + val (changed, unmodified) = inBoth.partition(existingModified) + } - def invalidateByAllExternal(relations: Relations, externalAPIChanges: APIChanges[String]): Set[File] = { - (externalAPIChanges.apiChanges.flatMap { externalAPIChange => - invalidateByExternal(relations, externalAPIChange) - }).toSet - } + def invalidateIncremental(previous: Relations, apis: APIs, changes: APIChanges[File], recompiledSources: Set[File], transitive: Boolean): Set[File] = + { + val dependsOnSrc = previous.usesInternalSrc _ + val propagated = + if (transitive) + transitiveDependencies(dependsOnSrc, changes.allModified.toSet) + else + invalidateIntermediate(previous, changes) - /** Sources invalidated by `external` sources in other projects according to the previous `relations`. */ - protected def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File] + val dups = invalidateDuplicates(previous) + if (dups.nonEmpty) + log.debug("Invalidated due to generated class file collision: " + dups) - /** Intermediate invalidation step: steps after the initial invalidation, but before the final transitive invalidation. */ - def invalidateIntermediate(relations: Relations, changes: APIChanges[File]): Set[File] = - { - invalidateSources(relations, changes) - } - /** Invalidates inheritance dependencies, transitively. Then, invalidates direct dependencies. Finally, excludes initial dependencies not - * included in a cycle with newly invalidated sources. */ - private[this] def invalidateSources(relations: Relations, changes: APIChanges[File]): Set[File] = - { - val initial = changes.allModified.toSet - val all = (changes.apiChanges flatMap { change => - invalidateSource(relations, change) - }).toSet - includeInitialCond(initial, all, allDeps(relations)) - } + val inv = propagated ++ dups // ++ scopeInvalidations(previous.extAPI _, changes.modified, changes.names) + val newlyInvalidated = inv -- recompiledSources + log.debug("All newly invalidated sources after taking into account (previously) recompiled sources:" + newlyInvalidated) + if (newlyInvalidated.isEmpty) Set.empty else inv + } - protected def allDeps(relations: Relations): File => Set[File] + /** Invalidate all sources that claim to produce the same class file as another source file. */ + def invalidateDuplicates(merged: Relations): Set[File] = + merged.srcProd.reverseMap.flatMap { + case (classFile, sources) => + if (sources.size > 1) sources else Nil + } toSet; - protected def invalidateSource(relations: Relations, change: APIChange[File]): Set[File] + /** + * Returns the transitive source dependencies of `initial`. + * Because the intermediate steps do not pull in cycles, this result includes the initial files + * if they are part of a cycle containing newly invalidated files . + */ + def transitiveDependencies(dependsOnSrc: File => Set[File], initial: Set[File]): Set[File] = + { + val transitiveWithInitial = transitiveDeps(initial)(dependsOnSrc) + val transitivePartial = includeInitialCond(initial, transitiveWithInitial, dependsOnSrc) + log.debug("Final step, transitive dependencies:\n\t" + transitivePartial) + transitivePartial + } - /** Conditionally include initial sources that are dependencies of newly invalidated sources. - ** Initial sources included in this step can be because of a cycle, but not always. */ - private[this] def includeInitialCond(initial: Set[File], currentInvalidations: Set[File], allDeps: File => Set[File]): Set[File] = - { - val newInv = currentInvalidations -- initial - log.debug("New invalidations:\n\t" + newInv) - val transitiveOfNew = transitiveDeps(newInv)(allDeps) - val initialDependsOnNew = transitiveOfNew & initial - log.debug("Previously invalidated, but (transitively) depend on new invalidations:\n\t" + initialDependsOnNew) - newInv ++ initialDependsOnNew - } + /** Invalidates sources based on initially detected 'changes' to the sources, products, and dependencies.*/ + def invalidateInitial(previous: Relations, changes: InitialChanges): Set[File] = + { + val srcChanges = changes.internalSrc + val srcDirect = srcChanges.removed ++ srcChanges.removed.flatMap(previous.usesInternalSrc) ++ srcChanges.added ++ srcChanges.changed + val byProduct = changes.removedProducts.flatMap(previous.produced) + val byBinaryDep = changes.binaryDeps.flatMap(previous.usesBinary) + val byExtSrcDep = invalidateByAllExternal(previous, changes.external) //changes.external.modified.flatMap(previous.usesExternal) // ++ scopeInvalidations + checkAbsolute(srcChanges.added.toList) + log.debug( + "\nInitial source changes: \n\tremoved:" + srcChanges.removed + "\n\tadded: " + srcChanges.added + "\n\tmodified: " + srcChanges.changed + + "\nRemoved products: " + changes.removedProducts + + "\nExternal API changes: " + changes.external + + "\nModified binary dependencies: " + changes.binaryDeps + + "\nInitial directly invalidated sources: " + srcDirect + + "\n\nSources indirectly invalidated by:" + + "\n\tproduct: " + byProduct + + "\n\tbinary dep: " + byBinaryDep + + "\n\texternal source: " + byExtSrcDep + ) - def externalBinaryModified(entry: String => Option[File], analysis: File => Option[Analysis], previous: Stamps, current: ReadStamps)(implicit equivS: Equiv[Stamp]): File => Boolean = - dependsOn => - { - def inv(reason: String): Boolean = { - log.debug("Invalidating " + dependsOn + ": " + reason) - true - } - def entryModified(className: String, classpathEntry: File): Boolean = - { - val resolved = Locate.resolve(classpathEntry, className) - if(resolved.getCanonicalPath != dependsOn.getCanonicalPath) - inv("class " + className + " now provided by " + resolved.getCanonicalPath) - else - fileModified(dependsOn, resolved) - } - def fileModified(previousFile: File, currentFile: File): Boolean = - { - val previousStamp = previous.binary(previousFile) - val currentStamp = current.binary(currentFile) - if(equivS.equiv(previousStamp, currentStamp)) - false - else - inv("stamp changed from " + previousStamp + " to " + currentStamp) - } - def dependencyModified(file: File): Boolean = - previous.className(file) match { - case None => inv("no class name was mapped for it.") - case Some(name) => entry(name) match { - case None => inv("could not find class " + name + " on the classpath.") - case Some(e) => entryModified(name, e) - } - } + srcDirect ++ byProduct ++ byBinaryDep ++ byExtSrcDep + } + private[this] def checkAbsolute(addedSources: List[File]): Unit = + if (addedSources.nonEmpty) { + addedSources.filterNot(_.isAbsolute) match { + case first :: more => + val fileStrings = more match { + case Nil => first.toString + case x :: Nil => s"$first and $x" + case _ => s"$first and ${more.size} others" + } + sys.error(s"The incremental compiler requires absolute sources, but some were relative: $fileStrings") + case Nil => + } + } - analysis(dependsOn).isEmpty && - (if(skipClasspathLookup) fileModified(dependsOn, dependsOn) else dependencyModified(dependsOn)) + def invalidateByAllExternal(relations: Relations, externalAPIChanges: APIChanges[String]): Set[File] = { + (externalAPIChanges.apiChanges.flatMap { externalAPIChange => + invalidateByExternal(relations, externalAPIChange) + }).toSet + } - } + /** Sources invalidated by `external` sources in other projects according to the previous `relations`. */ + protected def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File] - def currentExternalAPI(entry: String => Option[File], forEntry: File => Option[Analysis]): String => Source = - className => - orEmpty( - for { - e <- entry(className) - analysis <- forEntry(e) - src <- analysis.relations.definesClass(className).headOption - } yield - analysis.apis.internalAPI(src) - ) + /** Intermediate invalidation step: steps after the initial invalidation, but before the final transitive invalidation. */ + def invalidateIntermediate(relations: Relations, changes: APIChanges[File]): Set[File] = + { + invalidateSources(relations, changes) + } + /** + * Invalidates inheritance dependencies, transitively. Then, invalidates direct dependencies. Finally, excludes initial dependencies not + * included in a cycle with newly invalidated sources. + */ + private[this] def invalidateSources(relations: Relations, changes: APIChanges[File]): Set[File] = + { + val initial = changes.allModified.toSet + val all = (changes.apiChanges flatMap { change => + invalidateSource(relations, change) + }).toSet + includeInitialCond(initial, all, allDeps(relations)) + } - def orEmpty(o: Option[Source]): Source = o getOrElse APIs.emptySource - def orTrue(o: Option[Boolean]): Boolean = o getOrElse true + protected def allDeps(relations: Relations): File => Set[File] - protected def transitiveDeps[T](nodes: Iterable[T])(dependencies: T => Iterable[T]): Set[T] = - { - val xs = new collection.mutable.HashSet[T] - def all(from: T, tos: Iterable[T]): Unit = tos.foreach(to => visit(from, to)) - def visit(from: T, to: T): Unit = - if (!xs.contains(to)) { - log.debug(s"Including $to by $from") - xs += to - all(to, dependencies(to)) - } - log.debug("Initial set of included nodes: " + nodes) - nodes foreach { start => - xs += start - all(start, dependencies(start)) - } - xs.toSet - } + protected def invalidateSource(relations: Relations, change: APIChange[File]): Set[File] + /** + * Conditionally include initial sources that are dependencies of newly invalidated sources. + * * Initial sources included in this step can be because of a cycle, but not always. + */ + private[this] def includeInitialCond(initial: Set[File], currentInvalidations: Set[File], allDeps: File => Set[File]): Set[File] = + { + val newInv = currentInvalidations -- initial + log.debug("New invalidations:\n\t" + newInv) + val transitiveOfNew = transitiveDeps(newInv)(allDeps) + val initialDependsOnNew = transitiveOfNew & initial + log.debug("Previously invalidated, but (transitively) depend on new invalidations:\n\t" + initialDependsOnNew) + newInv ++ initialDependsOnNew + } - // unmodifiedSources should not contain any sources in the previous compilation run - // (this may unnecessarily invalidate them otherwise) - /*def scopeInvalidation(previous: Analysis, otherSources: Set[File], names: NameChanges): Set[File] = + def externalBinaryModified(entry: String => Option[File], analysis: File => Option[Analysis], previous: Stamps, current: ReadStamps)(implicit equivS: Equiv[Stamp]): File => Boolean = + dependsOn => + { + def inv(reason: String): Boolean = { + log.debug("Invalidating " + dependsOn + ": " + reason) + true + } + def entryModified(className: String, classpathEntry: File): Boolean = + { + val resolved = Locate.resolve(classpathEntry, className) + if (resolved.getCanonicalPath != dependsOn.getCanonicalPath) + inv("class " + className + " now provided by " + resolved.getCanonicalPath) + else + fileModified(dependsOn, resolved) + } + def fileModified(previousFile: File, currentFile: File): Boolean = + { + val previousStamp = previous.binary(previousFile) + val currentStamp = current.binary(currentFile) + if (equivS.equiv(previousStamp, currentStamp)) + false + else + inv("stamp changed from " + previousStamp + " to " + currentStamp) + } + def dependencyModified(file: File): Boolean = + previous.className(file) match { + case None => inv("no class name was mapped for it.") + case Some(name) => entry(name) match { + case None => inv("could not find class " + name + " on the classpath.") + case Some(e) => entryModified(name, e) + } + } + + analysis(dependsOn).isEmpty && + (if (skipClasspathLookup) fileModified(dependsOn, dependsOn) else dependencyModified(dependsOn)) + + } + + def currentExternalAPI(entry: String => Option[File], forEntry: File => Option[Analysis]): String => Source = + className => + orEmpty( + for { + e <- entry(className) + analysis <- forEntry(e) + src <- analysis.relations.definesClass(className).headOption + } yield analysis.apis.internalAPI(src) + ) + + def orEmpty(o: Option[Source]): Source = o getOrElse APIs.emptySource + def orTrue(o: Option[Boolean]): Boolean = o getOrElse true + + protected def transitiveDeps[T](nodes: Iterable[T])(dependencies: T => Iterable[T]): Set[T] = + { + val xs = new collection.mutable.HashSet[T] + def all(from: T, tos: Iterable[T]): Unit = tos.foreach(to => visit(from, to)) + def visit(from: T, to: T): Unit = + if (!xs.contains(to)) { + log.debug(s"Including $to by $from") + xs += to + all(to, dependencies(to)) + } + log.debug("Initial set of included nodes: " + nodes) + nodes foreach { start => + xs += start + all(start, dependencies(start)) + } + xs.toSet + } + + // unmodifiedSources should not contain any sources in the previous compilation run + // (this may unnecessarily invalidate them otherwise) + /*def scopeInvalidation(previous: Analysis, otherSources: Set[File], names: NameChanges): Set[File] = { val newNames = newTypes ++ names.newTerms val newMap = pkgNameMap(newNames) @@ -445,51 +446,51 @@ private abstract class IncrementalCommon(log: Logger, options: IncOptions) { private final class IncrementalDefaultImpl(log: Logger, options: IncOptions) extends IncrementalCommon(log, options) { - // Package objects are fragile: if they inherit from an invalidated source, get "class file needed by package is missing" error - // This might be too conservative: we probably only need package objects for packages of invalidated sources. - override protected def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File] = - invalidated flatMap relations.publicInherited.internal.reverse filter { _.getName == "package.scala" } + // Package objects are fragile: if they inherit from an invalidated source, get "class file needed by package is missing" error + // This might be too conservative: we probably only need package objects for packages of invalidated sources. + override protected def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File] = + invalidated flatMap relations.publicInherited.internal.reverse filter { _.getName == "package.scala" } - override protected def sameAPI[T](src: T, a: Source, b: Source): Option[SourceAPIChange[T]] = { - if (SameAPI(a,b)) - None - else { - val sourceApiChange = SourceAPIChange(src) - Some(sourceApiChange) - } - } + override protected def sameAPI[T](src: T, a: Source, b: Source): Option[SourceAPIChange[T]] = { + if (SameAPI(a, b)) + None + else { + val sourceApiChange = SourceAPIChange(src) + Some(sourceApiChange) + } + } - /** Invalidates sources based on initially detected 'changes' to the sources, products, and dependencies.*/ - override protected def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File] = { - val modified = externalAPIChange.modified - // Propagate public inheritance dependencies transitively. - // This differs from normal because we need the initial crossing from externals to sources in this project. - val externalInheritedR = relations.publicInherited.external - val byExternalInherited = externalInheritedR.reverse(modified) - val internalInheritedR = relations.publicInherited.internal - val transitiveInherited = transitiveDeps(byExternalInherited)(internalInheritedR.reverse _) + /** Invalidates sources based on initially detected 'changes' to the sources, products, and dependencies.*/ + override protected def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File] = { + val modified = externalAPIChange.modified + // Propagate public inheritance dependencies transitively. + // This differs from normal because we need the initial crossing from externals to sources in this project. + val externalInheritedR = relations.publicInherited.external + val byExternalInherited = externalInheritedR.reverse(modified) + val internalInheritedR = relations.publicInherited.internal + val transitiveInherited = transitiveDeps(byExternalInherited)(internalInheritedR.reverse _) - // Get the direct dependencies of all sources transitively invalidated by inheritance - val directA = transitiveInherited flatMap relations.direct.internal.reverse - // Get the sources that directly depend on externals. This includes non-inheritance dependencies and is not transitive. - val directB = relations.direct.external.reverse(modified) - transitiveInherited ++ directA ++ directB - } + // Get the direct dependencies of all sources transitively invalidated by inheritance + val directA = transitiveInherited flatMap relations.direct.internal.reverse + // Get the sources that directly depend on externals. This includes non-inheritance dependencies and is not transitive. + val directB = relations.direct.external.reverse(modified) + transitiveInherited ++ directA ++ directB + } - override protected def invalidateSource(relations: Relations, change: APIChange[File]): Set[File] = { - def reverse(r: Relations.Source) = r.internal.reverse _ - val directDeps: File => Set[File] = reverse(relations.direct) - val publicInherited: File => Set[File] = reverse(relations.publicInherited) - log.debug("Invalidating by inheritance (transitively)...") - val transitiveInherited = transitiveDeps(Set(change.modified))(publicInherited) - log.debug("Invalidated by transitive public inheritance: " + transitiveInherited) - val direct = transitiveInherited flatMap directDeps - log.debug("Invalidated by direct dependency: " + direct) - transitiveInherited ++ direct - } + override protected def invalidateSource(relations: Relations, change: APIChange[File]): Set[File] = { + def reverse(r: Relations.Source) = r.internal.reverse _ + val directDeps: File => Set[File] = reverse(relations.direct) + val publicInherited: File => Set[File] = reverse(relations.publicInherited) + log.debug("Invalidating by inheritance (transitively)...") + val transitiveInherited = transitiveDeps(Set(change.modified))(publicInherited) + log.debug("Invalidated by transitive public inheritance: " + transitiveInherited) + val direct = transitiveInherited flatMap directDeps + log.debug("Invalidated by direct dependency: " + direct) + transitiveInherited ++ direct + } - override protected def allDeps(relations: Relations): File => Set[File] = - f => relations.direct.internal.reverse(f) + override protected def allDeps(relations: Relations): File => Set[File] = + f => relations.direct.internal.reverse(f) } @@ -501,74 +502,74 @@ private final class IncrementalDefaultImpl(log: Logger, options: IncOptions) ext */ private final class IncrementalNameHashing(log: Logger, options: IncOptions) extends IncrementalCommon(log, options) { - private val memberRefInvalidator = new MemberRefInvalidator(log) + private val memberRefInvalidator = new MemberRefInvalidator(log) - // Package objects are fragile: if they inherit from an invalidated source, get "class file needed by package is missing" error - // This might be too conservative: we probably only need package objects for packages of invalidated sources. - override protected def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File] = - invalidated flatMap relations.inheritance.internal.reverse filter { _.getName == "package.scala" } + // Package objects are fragile: if they inherit from an invalidated source, get "class file needed by package is missing" error + // This might be too conservative: we probably only need package objects for packages of invalidated sources. + override protected def invalidatedPackageObjects(invalidated: Set[File], relations: Relations): Set[File] = + invalidated flatMap relations.inheritance.internal.reverse filter { _.getName == "package.scala" } - override protected def sameAPI[T](src: T, a: Source, b: Source): Option[APIChange[T]] = { - if (SameAPI(a,b)) - None - else { - val aNameHashes = a._internalOnly_nameHashes - val bNameHashes = b._internalOnly_nameHashes - val modifiedNames = ModifiedNames.compareTwoNameHashes(aNameHashes, bNameHashes) - val apiChange = NamesChange(src, modifiedNames) - Some(apiChange) - } - } + override protected def sameAPI[T](src: T, a: Source, b: Source): Option[APIChange[T]] = { + if (SameAPI(a, b)) + None + else { + val aNameHashes = a._internalOnly_nameHashes + val bNameHashes = b._internalOnly_nameHashes + val modifiedNames = ModifiedNames.compareTwoNameHashes(aNameHashes, bNameHashes) + val apiChange = NamesChange(src, modifiedNames) + Some(apiChange) + } + } - /** Invalidates sources based on initially detected 'changes' to the sources, products, and dependencies.*/ - override protected def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File] = { - val modified = externalAPIChange.modified - val invalidationReason = memberRefInvalidator.invalidationReason(externalAPIChange) - log.debug(s"$invalidationReason\nAll member reference dependencies will be considered within this context.") - // Propagate inheritance dependencies transitively. - // This differs from normal because we need the initial crossing from externals to sources in this project. - val externalInheritanceR = relations.inheritance.external - val byExternalInheritance = externalInheritanceR.reverse(modified) - log.debug(s"Files invalidated by inheriting from (external) $modified: $byExternalInheritance; now invalidating by inheritance (internally).") - val transitiveInheritance = byExternalInheritance flatMap { file => - invalidateByInheritance(relations, file) - } - val memberRefInvalidationInternal = memberRefInvalidator.get(relations.memberRef.internal, - relations.names, externalAPIChange) - val memberRefInvalidationExternal = memberRefInvalidator.get(relations.memberRef.external, - relations.names, externalAPIChange) + /** Invalidates sources based on initially detected 'changes' to the sources, products, and dependencies.*/ + override protected def invalidateByExternal(relations: Relations, externalAPIChange: APIChange[String]): Set[File] = { + val modified = externalAPIChange.modified + val invalidationReason = memberRefInvalidator.invalidationReason(externalAPIChange) + log.debug(s"$invalidationReason\nAll member reference dependencies will be considered within this context.") + // Propagate inheritance dependencies transitively. + // This differs from normal because we need the initial crossing from externals to sources in this project. + val externalInheritanceR = relations.inheritance.external + val byExternalInheritance = externalInheritanceR.reverse(modified) + log.debug(s"Files invalidated by inheriting from (external) $modified: $byExternalInheritance; now invalidating by inheritance (internally).") + val transitiveInheritance = byExternalInheritance flatMap { file => + invalidateByInheritance(relations, file) + } + val memberRefInvalidationInternal = memberRefInvalidator.get(relations.memberRef.internal, + relations.names, externalAPIChange) + val memberRefInvalidationExternal = memberRefInvalidator.get(relations.memberRef.external, + relations.names, externalAPIChange) - // Get the member reference dependencies of all sources transitively invalidated by inheritance - log.debug("Getting direct dependencies of all sources transitively invalidated by inheritance.") - val memberRefA = transitiveInheritance flatMap memberRefInvalidationInternal - // Get the sources that depend on externals by member reference. - // This includes non-inheritance dependencies and is not transitive. - log.debug(s"Getting sources that directly depend on (external) $modified.") - val memberRefB = memberRefInvalidationExternal(modified) - transitiveInheritance ++ memberRefA ++ memberRefB - } + // Get the member reference dependencies of all sources transitively invalidated by inheritance + log.debug("Getting direct dependencies of all sources transitively invalidated by inheritance.") + val memberRefA = transitiveInheritance flatMap memberRefInvalidationInternal + // Get the sources that depend on externals by member reference. + // This includes non-inheritance dependencies and is not transitive. + log.debug(s"Getting sources that directly depend on (external) $modified.") + val memberRefB = memberRefInvalidationExternal(modified) + transitiveInheritance ++ memberRefA ++ memberRefB + } - private def invalidateByInheritance(relations: Relations, modified: File): Set[File] = { - val inheritanceDeps = relations.inheritance.internal.reverse _ - log.debug(s"Invalidating (transitively) by inheritance from $modified...") - val transitiveInheritance = transitiveDeps(Set(modified))(inheritanceDeps) - log.debug("Invalidated by transitive inheritance dependency: " + transitiveInheritance) - transitiveInheritance - } + private def invalidateByInheritance(relations: Relations, modified: File): Set[File] = { + val inheritanceDeps = relations.inheritance.internal.reverse _ + log.debug(s"Invalidating (transitively) by inheritance from $modified...") + val transitiveInheritance = transitiveDeps(Set(modified))(inheritanceDeps) + log.debug("Invalidated by transitive inheritance dependency: " + transitiveInheritance) + transitiveInheritance + } - override protected def invalidateSource(relations: Relations, change: APIChange[File]): Set[File] = { - log.debug(s"Invalidating ${change.modified}...") - val transitiveInheritance = invalidateByInheritance(relations, change.modified) - val reasonForInvalidation = memberRefInvalidator.invalidationReason(change) - log.debug(s"$reasonForInvalidation\nAll member reference dependencies will be considered within this context.") - val memberRefInvalidation = memberRefInvalidator.get(relations.memberRef.internal, - relations.names, change) - val memberRef = transitiveInheritance flatMap memberRefInvalidation - val all = transitiveInheritance ++ memberRef - all - } + override protected def invalidateSource(relations: Relations, change: APIChange[File]): Set[File] = { + log.debug(s"Invalidating ${change.modified}...") + val transitiveInheritance = invalidateByInheritance(relations, change.modified) + val reasonForInvalidation = memberRefInvalidator.invalidationReason(change) + log.debug(s"$reasonForInvalidation\nAll member reference dependencies will be considered within this context.") + val memberRefInvalidation = memberRefInvalidator.get(relations.memberRef.internal, + relations.names, change) + val memberRef = transitiveInheritance flatMap memberRefInvalidation + val all = transitiveInheritance ++ memberRef + all + } - override protected def allDeps(relations: Relations): File => Set[File] = - f => relations.memberRef.internal.reverse(f) + override protected def allDeps(relations: Relations): File => Set[File] = + f => relations.memberRef.internal.reverse(f) } diff --git a/compile/inc/src/main/scala/sbt/inc/Locate.scala b/compile/inc/src/main/scala/sbt/inc/Locate.scala index cb485f28a..c6762d8a0 100644 --- a/compile/inc/src/main/scala/sbt/inc/Locate.scala +++ b/compile/inc/src/main/scala/sbt/inc/Locate.scala @@ -5,89 +5,91 @@ package sbt package inc import java.io.File -import java.util.zip.{ZipException, ZipFile} +import java.util.zip.{ ZipException, ZipFile } import Function.const -object Locate -{ - type DefinesClass = File => String => Boolean +object Locate { + type DefinesClass = File => String => Boolean - /** Right(src) provides the value for the found class - * Left(true) means that the class was found, but it had no associated value - * Left(false) means that the class was not found */ - def value[S](classpath: Seq[File], get: File => String => Option[S]): String => Either[Boolean, S] = - { - val gets = classpath.toStream.map(getValue(get)) - className => find(className, gets) - } - - def find[S](name: String, gets: Stream[String => Either[Boolean, S]]): Either[Boolean, S] = - if(gets.isEmpty) - Left(false) - else - gets.head(name) match - { - case Left(false) => find(name, gets.tail) - case x => x - } - - /** Returns a function that searches the provided class path for - * a class name and returns the entry that defines that class.*/ - def entry(classpath: Seq[File], f: DefinesClass): String => Option[File] = - { - val entries = classpath.toStream.map { entry => (entry, f(entry)) } - className => entries collect { case (entry, defines) if defines(className) => entry } headOption; - } - def resolve(f: File, className: String): File = if(f.isDirectory) classFile(f, className) else f - - def getValue[S](get: File => String => Option[S])(entry: File): String => Either[Boolean, S] = - { - val defClass = definesClass(entry) - val getF = get(entry) - className => if(defClass(className)) getF(className).toRight(true) else Left(false) - } - - def definesClass(entry: File): String => Boolean = - if(entry.isDirectory) - directoryDefinesClass(entry) - else if(entry.exists && classpath.ClasspathUtilities.isArchive(entry, contentFallback=true)) - jarDefinesClass(entry) - else - const(false) - - def jarDefinesClass(entry: File): String => Boolean = - { - import collection.JavaConversions._ - val jar = try { new ZipFile(entry, ZipFile.OPEN_READ) } catch { - // ZipException doesn't include the file name :( - case e: ZipException => throw new RuntimeException("Error opening zip file: " + entry.getName, e) - } - val entries = try { jar.entries.map(e => toClassName(e.getName)).toSet } finally { jar.close() } - entries.contains _ - } - - def toClassName(entry: String): String = - entry.stripSuffix(ClassExt).replace('/', '.') - - val ClassExt = ".class" - - def directoryDefinesClass(entry: File): String => Boolean = - className => classFile(entry, className).isFile - - def classFile(baseDir: File, className: String): File = - { - val (pkg, name) = components(className) - val dir = subDirectory(baseDir, pkg) - new File(dir, name + ClassExt) - } - - def subDirectory(base: File, parts: Seq[String]): File = - (base /: parts) ( (b, p) => new File(b,p) ) - - def components(className: String): (Seq[String], String) = - { - assume(!className.isEmpty) - val parts = className.split("\\.") - if(parts.length == 1) (Nil, parts(0)) else (parts.init, parts.last) - } + /** + * Right(src) provides the value for the found class + * Left(true) means that the class was found, but it had no associated value + * Left(false) means that the class was not found + */ + def value[S](classpath: Seq[File], get: File => String => Option[S]): String => Either[Boolean, S] = + { + val gets = classpath.toStream.map(getValue(get)) + className => find(className, gets) + } + + def find[S](name: String, gets: Stream[String => Either[Boolean, S]]): Either[Boolean, S] = + if (gets.isEmpty) + Left(false) + else + gets.head(name) match { + case Left(false) => find(name, gets.tail) + case x => x + } + + /** + * Returns a function that searches the provided class path for + * a class name and returns the entry that defines that class. + */ + def entry(classpath: Seq[File], f: DefinesClass): String => Option[File] = + { + val entries = classpath.toStream.map { entry => (entry, f(entry)) } + className => entries collect { case (entry, defines) if defines(className) => entry } headOption; + } + def resolve(f: File, className: String): File = if (f.isDirectory) classFile(f, className) else f + + def getValue[S](get: File => String => Option[S])(entry: File): String => Either[Boolean, S] = + { + val defClass = definesClass(entry) + val getF = get(entry) + className => if (defClass(className)) getF(className).toRight(true) else Left(false) + } + + def definesClass(entry: File): String => Boolean = + if (entry.isDirectory) + directoryDefinesClass(entry) + else if (entry.exists && classpath.ClasspathUtilities.isArchive(entry, contentFallback = true)) + jarDefinesClass(entry) + else + const(false) + + def jarDefinesClass(entry: File): String => Boolean = + { + import collection.JavaConversions._ + val jar = try { new ZipFile(entry, ZipFile.OPEN_READ) } catch { + // ZipException doesn't include the file name :( + case e: ZipException => throw new RuntimeException("Error opening zip file: " + entry.getName, e) + } + val entries = try { jar.entries.map(e => toClassName(e.getName)).toSet } finally { jar.close() } + entries.contains _ + } + + def toClassName(entry: String): String = + entry.stripSuffix(ClassExt).replace('/', '.') + + val ClassExt = ".class" + + def directoryDefinesClass(entry: File): String => Boolean = + className => classFile(entry, className).isFile + + def classFile(baseDir: File, className: String): File = + { + val (pkg, name) = components(className) + val dir = subDirectory(baseDir, pkg) + new File(dir, name + ClassExt) + } + + def subDirectory(base: File, parts: Seq[String]): File = + (base /: parts)((b, p) => new File(b, p)) + + def components(className: String): (Seq[String], String) = + { + assume(!className.isEmpty) + val parts = className.split("\\.") + if (parts.length == 1) (Nil, parts(0)) else (parts.init, parts.last) + } } \ No newline at end of file diff --git a/compile/inc/src/main/scala/sbt/inc/MemberRefInvalidator.scala b/compile/inc/src/main/scala/sbt/inc/MemberRefInvalidator.scala index 22537c78d..9c977cb42 100644 --- a/compile/inc/src/main/scala/sbt/inc/MemberRefInvalidator.scala +++ b/compile/inc/src/main/scala/sbt/inc/MemberRefInvalidator.scala @@ -51,74 +51,73 @@ import xsbt.api.APIUtil * of regular members then we'll invalidate sources that use those names. */ private[inc] class MemberRefInvalidator(log: Logger) { - def get[T](memberRef: Relation[File, T], usedNames: Relation[File, String], apiChange: APIChange[_]): - T => Set[File] = apiChange match { - case _: APIChangeDueToMacroDefinition[_] => - new InvalidateUnconditionally(memberRef) - case NamesChange(_, modifiedNames) if !modifiedNames.implicitNames.isEmpty => - new InvalidateUnconditionally(memberRef) - case NamesChange(modifiedSrcFile, modifiedNames) => - new NameHashFilteredInvalidator[T](usedNames, memberRef, modifiedNames.regularNames) - case _: SourceAPIChange[_] => - sys.error(wrongAPIChangeMsg) - } + def get[T](memberRef: Relation[File, T], usedNames: Relation[File, String], apiChange: APIChange[_]): T => Set[File] = apiChange match { + case _: APIChangeDueToMacroDefinition[_] => + new InvalidateUnconditionally(memberRef) + case NamesChange(_, modifiedNames) if !modifiedNames.implicitNames.isEmpty => + new InvalidateUnconditionally(memberRef) + case NamesChange(modifiedSrcFile, modifiedNames) => + new NameHashFilteredInvalidator[T](usedNames, memberRef, modifiedNames.regularNames) + case _: SourceAPIChange[_] => + sys.error(wrongAPIChangeMsg) + } - def invalidationReason(apiChange: APIChange[_]): String = apiChange match { - case APIChangeDueToMacroDefinition(modifiedSrcFile) => - s"The $modifiedSrcFile source file declares a macro." - case NamesChange(modifiedSrcFile, modifiedNames) if !modifiedNames.implicitNames.isEmpty => - s"""|The $modifiedSrcFile source file has the following implicit definitions changed: + def invalidationReason(apiChange: APIChange[_]): String = apiChange match { + case APIChangeDueToMacroDefinition(modifiedSrcFile) => + s"The $modifiedSrcFile source file declares a macro." + case NamesChange(modifiedSrcFile, modifiedNames) if !modifiedNames.implicitNames.isEmpty => + s"""|The $modifiedSrcFile source file has the following implicit definitions changed: |\t${modifiedNames.implicitNames.mkString(", ")}.""".stripMargin - case NamesChange(modifiedSrcFile, modifiedNames) => - s"""|The $modifiedSrcFile source file has the following regular definitions changed: + case NamesChange(modifiedSrcFile, modifiedNames) => + s"""|The $modifiedSrcFile source file has the following regular definitions changed: |\t${modifiedNames.regularNames.mkString(", ")}.""".stripMargin - case _: SourceAPIChange[_] => - sys.error(wrongAPIChangeMsg) - } + case _: SourceAPIChange[_] => + sys.error(wrongAPIChangeMsg) + } - private val wrongAPIChangeMsg = - "MemberReferenceInvalidator.get should be called when name hashing is enabled " + - "and in that case we shouldn't have SourceAPIChange as an api change." + private val wrongAPIChangeMsg = + "MemberReferenceInvalidator.get should be called when name hashing is enabled " + + "and in that case we shouldn't have SourceAPIChange as an api change." - private class InvalidateUnconditionally[T](memberRef: Relation[File, T]) extends (T => Set[File]) { - def apply(from: T): Set[File] = { - val invalidated = memberRef.reverse(from) - if (!invalidated.isEmpty) - log.debug(s"The following member ref dependencies of $from are invalidated:\n" + - formatInvalidated(invalidated)) - invalidated - } - private def formatInvalidated(invalidated: Set[File]): String = { - val sortedFiles = invalidated.toSeq.sortBy(_.getAbsolutePath) - sortedFiles.map(file => "\t"+file).mkString("\n") - } - } + private class InvalidateUnconditionally[T](memberRef: Relation[File, T]) extends (T => Set[File]) { + def apply(from: T): Set[File] = { + val invalidated = memberRef.reverse(from) + if (!invalidated.isEmpty) + log.debug(s"The following member ref dependencies of $from are invalidated:\n" + + formatInvalidated(invalidated)) + invalidated + } + private def formatInvalidated(invalidated: Set[File]): String = { + val sortedFiles = invalidated.toSeq.sortBy(_.getAbsolutePath) + sortedFiles.map(file => "\t" + file).mkString("\n") + } + } - private class NameHashFilteredInvalidator[T]( - usedNames: Relation[File, String], - memberRef: Relation[File, T], - modifiedNames: Set[String]) extends (T => Set[File]) { + private class NameHashFilteredInvalidator[T]( + usedNames: Relation[File, String], + memberRef: Relation[File, T], + modifiedNames: Set[String]) extends (T => Set[File]) { - def apply(to: T): Set[File] = { - val dependent = memberRef.reverse(to) - filteredDependencies(dependent) - } - private def filteredDependencies(dependent: Set[File]): Set[File] = { - dependent.filter { - case from if APIUtil.isScalaSourceName(from.getName) => - val usedNamesInDependent = usedNames.forward(from) - val modifiedAndUsedNames = modifiedNames intersect usedNamesInDependent - if (modifiedAndUsedNames.isEmpty) { - log.debug(s"None of the modified names appears in $from. This dependency is not being considered for invalidation.") - false - } else { - log.debug(s"The following modified names cause invalidation of $from: $modifiedAndUsedNames") - true - } - case from => - log.debug(s"Name hashing optimization doesn't apply to non-Scala dependency: $from") - true - } - } - } + def apply(to: T): Set[File] = { + val dependent = memberRef.reverse(to) + filteredDependencies(dependent) + } + private def filteredDependencies(dependent: Set[File]): Set[File] = { + dependent.filter { + case from if APIUtil.isScalaSourceName(from.getName) => + val usedNamesInDependent = usedNames.forward(from) + val modifiedAndUsedNames = modifiedNames intersect usedNamesInDependent + if (modifiedAndUsedNames.isEmpty) { + log.debug(s"None of the modified names appears in $from. This dependency is not being considered for invalidation.") + false + } else { + log.debug(s"The following modified names cause invalidation of $from: $modifiedAndUsedNames") + true + } + case from => + log.debug(s"Name hashing optimization doesn't apply to non-Scala dependency: $from") + true + } + } + } } diff --git a/compile/inc/src/main/scala/sbt/inc/Relations.scala b/compile/inc/src/main/scala/sbt/inc/Relations.scala index 4fd420cc5..a42794eb6 100644 --- a/compile/inc/src/main/scala/sbt/inc/Relations.scala +++ b/compile/inc/src/main/scala/sbt/inc/Relations.scala @@ -8,244 +8,244 @@ import java.io.File import Relations.Source import Relations.SourceDependencies +/** + * Provides mappings between source files, generated classes (products), and binaries. + * Dependencies that are tracked include internal: a dependency on a source in the same compilation group (project), + * external: a dependency on a source in another compilation group (tracked as the name of the class), + * binary: a dependency on a class or jar file not generated by a source file in any tracked compilation group, + * inherited: a dependency that resulted from a public template inheriting, + * direct: any type of dependency, including inheritance. + */ +trait Relations { + /** All sources _with at least one product_ . */ + def allSources: collection.Set[File] -/** Provides mappings between source files, generated classes (products), and binaries. -* Dependencies that are tracked include internal: a dependency on a source in the same compilation group (project), -* external: a dependency on a source in another compilation group (tracked as the name of the class), -* binary: a dependency on a class or jar file not generated by a source file in any tracked compilation group, -* inherited: a dependency that resulted from a public template inheriting, -* direct: any type of dependency, including inheritance. */ -trait Relations -{ - /** All sources _with at least one product_ . */ - def allSources: collection.Set[File] + /** All products associated with sources. */ + def allProducts: collection.Set[File] - /** All products associated with sources. */ - def allProducts: collection.Set[File] + /** All files that are recorded as a binary dependency of a source file.*/ + def allBinaryDeps: collection.Set[File] - /** All files that are recorded as a binary dependency of a source file.*/ - def allBinaryDeps: collection.Set[File] + /** All files in this compilation group (project) that are recorded as a source dependency of a source file in this group.*/ + def allInternalSrcDeps: collection.Set[File] - /** All files in this compilation group (project) that are recorded as a source dependency of a source file in this group.*/ - def allInternalSrcDeps: collection.Set[File] + /** All files in another compilation group (project) that are recorded as a source dependency of a source file in this group.*/ + def allExternalDeps: collection.Set[String] - /** All files in another compilation group (project) that are recorded as a source dependency of a source file in this group.*/ - def allExternalDeps: collection.Set[String] + /** Fully qualified names of classes generated from source file `src`. */ + def classNames(src: File): Set[String] - /** Fully qualified names of classes generated from source file `src`. */ - def classNames(src: File): Set[String] + /** Source files that generated a class with the given fully qualified `name`. This is typically a set containing a single file. */ + def definesClass(name: String): Set[File] - /** Source files that generated a class with the given fully qualified `name`. This is typically a set containing a single file. */ - def definesClass(name: String): Set[File] + /** The classes that were generated for source file `src`. */ + def products(src: File): Set[File] + /** The source files that generated class file `prod`. This is typically a set containing a single file. */ + def produced(prod: File): Set[File] - /** The classes that were generated for source file `src`. */ - def products(src: File): Set[File] - /** The source files that generated class file `prod`. This is typically a set containing a single file. */ - def produced(prod: File): Set[File] + /** The binary dependencies for the source file `src`. */ + def binaryDeps(src: File): Set[File] + /** The source files that depend on binary file `dep`. */ + def usesBinary(dep: File): Set[File] - /** The binary dependencies for the source file `src`. */ - def binaryDeps(src: File): Set[File] - /** The source files that depend on binary file `dep`. */ - def usesBinary(dep: File): Set[File] + /** Internal source dependencies for `src`. This includes both direct and inherited dependencies. */ + def internalSrcDeps(src: File): Set[File] + /** Internal source files that depend on internal source `dep`. This includes both direct and inherited dependencies. */ + def usesInternalSrc(dep: File): Set[File] - /** Internal source dependencies for `src`. This includes both direct and inherited dependencies. */ - def internalSrcDeps(src: File): Set[File] - /** Internal source files that depend on internal source `dep`. This includes both direct and inherited dependencies. */ - def usesInternalSrc(dep: File): Set[File] + /** External source dependencies that internal source file `src` depends on. This includes both direct and inherited dependencies. */ + def externalDeps(src: File): Set[String] + /** Internal source dependencies that depend on external source file `dep`. This includes both direct and inherited dependencies. */ + def usesExternal(dep: String): Set[File] - /** External source dependencies that internal source file `src` depends on. This includes both direct and inherited dependencies. */ - def externalDeps(src: File): Set[String] - /** Internal source dependencies that depend on external source file `dep`. This includes both direct and inherited dependencies. */ - def usesExternal(dep: String): Set[File] + private[inc] def usedNames(src: File): Set[String] - private[inc] def usedNames(src: File): Set[String] + /** Records internal source file `src` as generating class file `prod` with top-level class `name`. */ + def addProduct(src: File, prod: File, name: String): Relations - /** Records internal source file `src` as generating class file `prod` with top-level class `name`. */ - def addProduct(src: File, prod: File, name: String): Relations + /** + * Records internal source file `src` as depending on class `dependsOn` in an external source file. + * If `inherited` is true, this dependency is recorded as coming from a public template in `src` extending something in `dependsOn` (an inheritance dependency). + * Whatever the value of `inherited`, the dependency is also recorded as a direct dependency. + */ + def addExternalDep(src: File, dependsOn: String, inherited: Boolean): Relations - /** Records internal source file `src` as depending on class `dependsOn` in an external source file. - * If `inherited` is true, this dependency is recorded as coming from a public template in `src` extending something in `dependsOn` (an inheritance dependency). - * Whatever the value of `inherited`, the dependency is also recorded as a direct dependency. */ - def addExternalDep(src: File, dependsOn: String, inherited: Boolean): Relations + /** Records internal source file `src` depending on a dependency binary dependency `dependsOn`.*/ + def addBinaryDep(src: File, dependsOn: File): Relations - /** Records internal source file `src` depending on a dependency binary dependency `dependsOn`.*/ - def addBinaryDep(src: File, dependsOn: File): Relations + /** + * Records internal source file `src` as having direct dependencies on internal source files `directDependsOn` + * and inheritance dependencies on `inheritedDependsOn`. Everything in `inheritedDependsOn` must be included in `directDependsOn`; + * this method does not automatically record direct dependencies like `addExternalDep` does. + */ + def addInternalSrcDeps(src: File, directDependsOn: Iterable[File], inheritedDependsOn: Iterable[File]): Relations - /** Records internal source file `src` as having direct dependencies on internal source files `directDependsOn` - * and inheritance dependencies on `inheritedDependsOn`. Everything in `inheritedDependsOn` must be included in `directDependsOn`; - * this method does not automatically record direct dependencies like `addExternalDep` does.*/ - def addInternalSrcDeps(src: File, directDependsOn: Iterable[File], inheritedDependsOn: Iterable[File]): Relations + private[inc] def addUsedName(src: File, name: String): Relations - private[inc] def addUsedName(src: File, name: String): Relations + /** Concatenates the two relations. Acts naively, i.e., doesn't internalize external deps on added files. */ + def ++(o: Relations): Relations - /** Concatenates the two relations. Acts naively, i.e., doesn't internalize external deps on added files. */ - def ++ (o: Relations): Relations + /** Drops all dependency mappings a->b where a is in `sources`. Acts naively, i.e., doesn't externalize internal deps on removed files. */ + def --(sources: Iterable[File]): Relations - /** Drops all dependency mappings a->b where a is in `sources`. Acts naively, i.e., doesn't externalize internal deps on removed files. */ - def -- (sources: Iterable[File]): Relations + @deprecated("OK to remove in 0.14", "0.13.1") + def groupBy[K](f: (File => K)): Map[K, Relations] - @deprecated("OK to remove in 0.14", "0.13.1") - def groupBy[K](f: (File => K)): Map[K, Relations] + /** The relation between internal sources and generated class files. */ + def srcProd: Relation[File, File] - /** The relation between internal sources and generated class files. */ - def srcProd: Relation[File, File] + /** The dependency relation between internal sources and binaries. */ + def binaryDep: Relation[File, File] - /** The dependency relation between internal sources and binaries. */ - def binaryDep: Relation[File, File] + /** The dependency relation between internal sources. This includes both direct and inherited dependencies.*/ + def internalSrcDep: Relation[File, File] - /** The dependency relation between internal sources. This includes both direct and inherited dependencies.*/ - def internalSrcDep: Relation[File, File] + /** The dependency relation between internal and external sources. This includes both direct and inherited dependencies.*/ + def externalDep: Relation[File, String] - /** The dependency relation between internal and external sources. This includes both direct and inherited dependencies.*/ - def externalDep: Relation[File, String] + /** + * The source dependency relation between source files introduced by member reference. + * + * NOTE: All inheritance dependencies are included in this relation because in order to + * inherit from a member you have to refer to it. If you check documentation of `inheritance` + * you'll see that there's small oddity related to traits being the first parent of a + * class/trait that results in additional parents being introduced due to normalization. + * This relation properly accounts for that so the invariant that `memberRef` is a superset + * of `inheritance` is preserved. + */ + private[inc] def memberRef: SourceDependencies - /** - * The source dependency relation between source files introduced by member reference. - * - * NOTE: All inheritance dependencies are included in this relation because in order to - * inherit from a member you have to refer to it. If you check documentation of `inheritance` - * you'll see that there's small oddity related to traits being the first parent of a - * class/trait that results in additional parents being introduced due to normalization. - * This relation properly accounts for that so the invariant that `memberRef` is a superset - * of `inheritance` is preserved. - */ - private[inc] def memberRef: SourceDependencies + /** + * The source dependency relation between source files introduced by inheritance. + * The dependency by inheritance is introduced when a template (class or trait) mentions + * a given type in a parent position. + * + * NOTE: Due to an oddity in how Scala's type checker works there's one unexpected dependency + * on a class being introduced. An example illustrates the best the problem. Let's consider + * the following structure: + * + * trait A extends B + * trait B extends C + * trait C extends D + * class D + * + * We are interested in dependencies by inheritance of `A`. One would expect it to be just `B` + * but the answer is `B` and `D`. The reason is because Scala's type checker performs a certain + * normalization so the first parent of a type is a class. Therefore the example above is normalized + * to the following form: + * + * trait A extends D with B + * trait B extends D with C + * trait C extends D + * class D + * + * Therefore if you inherit from a trait you'll get an additional dependency on a class that is + * resolved transitively. You should not rely on this behavior, though. + * + */ + private[inc] def inheritance: SourceDependencies - /** - * The source dependency relation between source files introduced by inheritance. - * The dependency by inheritance is introduced when a template (class or trait) mentions - * a given type in a parent position. - * - * NOTE: Due to an oddity in how Scala's type checker works there's one unexpected dependency - * on a class being introduced. An example illustrates the best the problem. Let's consider - * the following structure: - * - * trait A extends B - * trait B extends C - * trait C extends D - * class D - * - * We are interested in dependencies by inheritance of `A`. One would expect it to be just `B` - * but the answer is `B` and `D`. The reason is because Scala's type checker performs a certain - * normalization so the first parent of a type is a class. Therefore the example above is normalized - * to the following form: - * - * trait A extends D with B - * trait B extends D with C - * trait C extends D - * class D - * - * Therefore if you inherit from a trait you'll get an additional dependency on a class that is - * resolved transitively. You should not rely on this behavior, though. - * - */ - private[inc] def inheritance: SourceDependencies + /** The dependency relations between sources. These include both direct and inherited dependencies.*/ + def direct: Source - /** The dependency relations between sources. These include both direct and inherited dependencies.*/ - def direct: Source + /** The inheritance dependency relations between sources.*/ + def publicInherited: Source - /** The inheritance dependency relations between sources.*/ - def publicInherited: Source + /** The relation between a source file and the fully qualified names of classes generated from it.*/ + def classes: Relation[File, String] - /** The relation between a source file and the fully qualified names of classes generated from it.*/ - def classes: Relation[File, String] - - /** - * Flag which indicates whether given Relations object supports operations needed by name hashing algorithm. - * - * At the moment the list includes the following operations: - * - * - memberRef: SourceDependencies - * - inheritance: SourceDependencies - * - * The `memberRef` and `inheritance` implement a new style source dependency tracking. When this flag is - * enabled access to `direct` and `publicInherited` relations is illegal and will cause runtime exception - * being thrown. That is done as an optimization that prevents from storing two overlapping sets of - * dependencies. - * - * Conversely, when `nameHashing` flag is disabled access to `memberRef` and `inheritance` - * relations is illegal and will cause runtime exception being thrown. - */ - private[inc] def nameHashing: Boolean - /** - * Relation between source files and _unqualified_ term and type names used in given source file. - */ - private[inc] def names: Relation[File, String] + /** + * Flag which indicates whether given Relations object supports operations needed by name hashing algorithm. + * + * At the moment the list includes the following operations: + * + * - memberRef: SourceDependencies + * - inheritance: SourceDependencies + * + * The `memberRef` and `inheritance` implement a new style source dependency tracking. When this flag is + * enabled access to `direct` and `publicInherited` relations is illegal and will cause runtime exception + * being thrown. That is done as an optimization that prevents from storing two overlapping sets of + * dependencies. + * + * Conversely, when `nameHashing` flag is disabled access to `memberRef` and `inheritance` + * relations is illegal and will cause runtime exception being thrown. + */ + private[inc] def nameHashing: Boolean + /** + * Relation between source files and _unqualified_ term and type names used in given source file. + */ + private[inc] def names: Relation[File, String] } +object Relations { + /** Tracks internal and external source dependencies for a specific dependency type, such as direct or inherited.*/ + final class Source private[sbt] (val internal: Relation[File, File], val external: Relation[File, String]) { + def addInternal(source: File, dependsOn: Iterable[File]): Source = new Source(internal + (source, dependsOn), external) + def addExternal(source: File, dependsOn: String): Source = new Source(internal, external + (source, dependsOn)) + /** Drops all dependency mappings from `sources`. Acts naively, i.e., doesn't externalize internal deps on removed files.*/ + def --(sources: Iterable[File]): Source = new Source(internal -- sources, external -- sources) + def ++(o: Source): Source = new Source(internal ++ o.internal, external ++ o.external) -object Relations -{ - /** Tracks internal and external source dependencies for a specific dependency type, such as direct or inherited.*/ - final class Source private[sbt](val internal: Relation[File,File], val external: Relation[File,String]) { - def addInternal(source: File, dependsOn: Iterable[File]): Source = new Source(internal + (source, dependsOn), external) - def addExternal(source: File, dependsOn: String): Source = new Source(internal, external + (source, dependsOn)) - /** Drops all dependency mappings from `sources`. Acts naively, i.e., doesn't externalize internal deps on removed files.*/ - def --(sources: Iterable[File]): Source = new Source(internal -- sources, external -- sources) - def ++(o: Source): Source = new Source(internal ++ o.internal, external ++ o.external) + @deprecated("Broken implementation. OK to remove in 0.14", "0.13.1") + def groupBySource[K](f: File => K): Map[K, Source] = { - @deprecated("Broken implementation. OK to remove in 0.14", "0.13.1") - def groupBySource[K](f: File => K): Map[K, Source] = { + val i = internal.groupBy { case (a, b) => f(a) } + val e = external.groupBy { case (a, b) => f(a) } + val pairs = for (k <- i.keySet ++ e.keySet) yield (k, new Source(getOrEmpty(i, k), getOrEmpty(e, k))) + pairs.toMap + } - val i = internal.groupBy { case (a,b) => f(a) } - val e = external.groupBy { case (a,b) => f(a) } - val pairs = for( k <- i.keySet ++ e.keySet ) yield - (k, new Source( getOrEmpty(i, k), getOrEmpty(e, k) )) - pairs.toMap - } + override def equals(other: Any) = other match { + case o: Source => internal == o.internal && external == o.external + case _ => false + } - override def equals(other: Any) = other match { - case o: Source => internal == o.internal && external == o.external - case _ => false - } + override def hashCode = (internal, external).hashCode + } - override def hashCode = (internal, external).hashCode - } + /** Tracks internal and external source dependencies for a specific dependency type, such as direct or inherited.*/ + private[inc] final class SourceDependencies(val internal: Relation[File, File], val external: Relation[File, String]) { + def addInternal(source: File, dependsOn: Iterable[File]): SourceDependencies = new SourceDependencies(internal + (source, dependsOn), external) + def addExternal(source: File, dependsOn: String): SourceDependencies = new SourceDependencies(internal, external + (source, dependsOn)) + /** Drops all dependency mappings from `sources`. Acts naively, i.e., doesn't externalize internal deps on removed files.*/ + def --(sources: Iterable[File]): SourceDependencies = new SourceDependencies(internal -- sources, external -- sources) + def ++(o: SourceDependencies): SourceDependencies = new SourceDependencies(internal ++ o.internal, external ++ o.external) - /** Tracks internal and external source dependencies for a specific dependency type, such as direct or inherited.*/ - private[inc] final class SourceDependencies(val internal: Relation[File,File], val external: Relation[File,String]) { - def addInternal(source: File, dependsOn: Iterable[File]): SourceDependencies = new SourceDependencies(internal + (source, dependsOn), external) - def addExternal(source: File, dependsOn: String): SourceDependencies = new SourceDependencies(internal, external + (source, dependsOn)) - /** Drops all dependency mappings from `sources`. Acts naively, i.e., doesn't externalize internal deps on removed files.*/ - def --(sources: Iterable[File]): SourceDependencies = new SourceDependencies(internal -- sources, external -- sources) - def ++(o: SourceDependencies): SourceDependencies = new SourceDependencies(internal ++ o.internal, external ++ o.external) + override def equals(other: Any) = other match { + case o: SourceDependencies => internal == o.internal && external == o.external + case _ => false + } - override def equals(other: Any) = other match { - case o: SourceDependencies => internal == o.internal && external == o.external - case _ => false - } + override def hashCode = (internal, external).hashCode + } - override def hashCode = (internal, external).hashCode - } + private[sbt] def getOrEmpty[A, B, K](m: Map[K, Relation[A, B]], k: K): Relation[A, B] = m.getOrElse(k, Relation.empty) - private[sbt] def getOrEmpty[A,B,K](m: Map[K, Relation[A,B]], k: K): Relation[A,B] = m.getOrElse(k, Relation.empty) + private[this] lazy val e = Relation.empty[File, File] + private[this] lazy val estr = Relation.empty[File, String] + private[this] lazy val es = new Source(e, estr) - private[this] lazy val e = Relation.empty[File, File] - private[this] lazy val estr = Relation.empty[File, String] - private[this] lazy val es = new Source(e, estr) + def emptySource: Source = es + private[inc] lazy val emptySourceDependencies: SourceDependencies = new SourceDependencies(e, estr) + def empty: Relations = empty(nameHashing = false) + private[inc] def empty(nameHashing: Boolean): Relations = + if (nameHashing) + new MRelationsNameHashing(e, e, emptySourceDependencies, emptySourceDependencies, estr, estr) + else + new MRelationsDefaultImpl(e, e, es, es, estr) - def emptySource: Source = es - private[inc] lazy val emptySourceDependencies: SourceDependencies = new SourceDependencies(e, estr) - def empty: Relations = empty(nameHashing = false) - private[inc] def empty(nameHashing: Boolean): Relations = - if (nameHashing) - new MRelationsNameHashing(e, e, emptySourceDependencies, emptySourceDependencies, estr, estr) - else - new MRelationsDefaultImpl(e, e, es, es, estr) + def make(srcProd: Relation[File, File], binaryDep: Relation[File, File], direct: Source, publicInherited: Source, classes: Relation[File, String]): Relations = + new MRelationsDefaultImpl(srcProd, binaryDep, direct = direct, publicInherited = publicInherited, classes) - def make(srcProd: Relation[File, File], binaryDep: Relation[File, File], direct: Source, publicInherited: Source, classes: Relation[File, String]): Relations = - new MRelationsDefaultImpl(srcProd, binaryDep, direct = direct, publicInherited = publicInherited, classes) - - private[inc] def make(srcProd: Relation[File, File], binaryDep: Relation[File, File], - memberRef: SourceDependencies, inheritance: SourceDependencies, classes: Relation[File, String], - names: Relation[File, String]): Relations = - new MRelationsNameHashing(srcProd, binaryDep, memberRef = memberRef, inheritance = inheritance, - classes, names) - def makeSource(internal: Relation[File,File], external: Relation[File,String]): Source = new Source(internal, external) - private[inc] def makeSourceDependencies(internal: Relation[File,File], external: Relation[File,String]): SourceDependencies = new SourceDependencies(internal, external) + private[inc] def make(srcProd: Relation[File, File], binaryDep: Relation[File, File], + memberRef: SourceDependencies, inheritance: SourceDependencies, classes: Relation[File, String], + names: Relation[File, String]): Relations = + new MRelationsNameHashing(srcProd, binaryDep, memberRef = memberRef, inheritance = inheritance, + classes, names) + def makeSource(internal: Relation[File, File], external: Relation[File, String]): Source = new Source(internal, external) + private[inc] def makeSourceDependencies(internal: Relation[File, File], external: Relation[File, String]): SourceDependencies = new SourceDependencies(internal, external) } - /** * An abstract class that contains common functionality inherited by two implementations of Relations trait. * @@ -267,43 +267,41 @@ object Relations * `classes` is a relation between a source file and its generated fully-qualified class names. */ private abstract class MRelationsCommon(val srcProd: Relation[File, File], val binaryDep: Relation[File, File], - val classes: Relation[File, String]) extends Relations -{ - def allSources: collection.Set[File] = srcProd._1s + val classes: Relation[File, String]) extends Relations { + def allSources: collection.Set[File] = srcProd._1s - def allProducts: collection.Set[File] = srcProd._2s - def allBinaryDeps: collection.Set[File] = binaryDep._2s - def allInternalSrcDeps: collection.Set[File] = internalSrcDep._2s - def allExternalDeps: collection.Set[String] = externalDep._2s + def allProducts: collection.Set[File] = srcProd._2s + def allBinaryDeps: collection.Set[File] = binaryDep._2s + def allInternalSrcDeps: collection.Set[File] = internalSrcDep._2s + def allExternalDeps: collection.Set[String] = externalDep._2s - def classNames(src: File): Set[String] = classes.forward(src) - def definesClass(name: String): Set[File] = classes.reverse(name) + def classNames(src: File): Set[String] = classes.forward(src) + def definesClass(name: String): Set[File] = classes.reverse(name) - def products(src: File): Set[File] = srcProd.forward(src) - def produced(prod: File): Set[File] = srcProd.reverse(prod) + def products(src: File): Set[File] = srcProd.forward(src) + def produced(prod: File): Set[File] = srcProd.reverse(prod) - def binaryDeps(src: File): Set[File] = binaryDep.forward(src) - def usesBinary(dep: File): Set[File] = binaryDep.reverse(dep) + def binaryDeps(src: File): Set[File] = binaryDep.forward(src) + def usesBinary(dep: File): Set[File] = binaryDep.reverse(dep) - def internalSrcDeps(src: File): Set[File] = internalSrcDep.forward(src) - def usesInternalSrc(dep: File): Set[File] = internalSrcDep.reverse(dep) + def internalSrcDeps(src: File): Set[File] = internalSrcDep.forward(src) + def usesInternalSrc(dep: File): Set[File] = internalSrcDep.reverse(dep) - def externalDeps(src: File): Set[String] = externalDep.forward(src) - def usesExternal(dep: String): Set[File] = externalDep.reverse(dep) + def externalDeps(src: File): Set[String] = externalDep.forward(src) + def usesExternal(dep: String): Set[File] = externalDep.reverse(dep) - def usedNames(src: File): Set[String] = names.forward(src) + def usedNames(src: File): Set[String] = names.forward(src) /** Making large Relations a little readable. */ private val userDir = sys.props("user.dir").stripSuffix("/") + "/" - private def nocwd(s: String) = s stripPrefix userDir - private def line_s(kv: (Any, Any)) = " " + nocwd("" + kv._1) + " -> " + nocwd("" + kv._2) + "\n" + private def nocwd(s: String) = s stripPrefix userDir + private def line_s(kv: (Any, Any)) = " " + nocwd("" + kv._1) + " -> " + nocwd("" + kv._2) + "\n" protected def relation_s(r: Relation[_, _]) = ( if (r.forwardMap.isEmpty) "Relation [ ]" else (r.all.toSeq map line_s sorted) mkString ("Relation [\n", "", "]") ) } - /** * This class implements Relations trait with support for tracking of `direct` and `publicInherited` source * dependencies. Therefore this class preserves the "old" (from sbt 0.13.0) dependency tracking logic and it's @@ -317,107 +315,106 @@ private abstract class MRelationsCommon(val srcProd: Relation[File, File], val b * */ private class MRelationsDefaultImpl(srcProd: Relation[File, File], binaryDep: Relation[File, File], - // direct should include everything in inherited - val direct: Source, val publicInherited: Source, - classes: Relation[File, String]) extends MRelationsCommon(srcProd, binaryDep, classes) -{ - def internalSrcDep: Relation[File, File] = direct.internal - def externalDep: Relation[File, String] = direct.external + // direct should include everything in inherited + val direct: Source, val publicInherited: Source, + classes: Relation[File, String]) extends MRelationsCommon(srcProd, binaryDep, classes) { + def internalSrcDep: Relation[File, File] = direct.internal + def externalDep: Relation[File, String] = direct.external - def nameHashing: Boolean = false + def nameHashing: Boolean = false - def memberRef: SourceDependencies = - throw new UnsupportedOperationException("The `memberRef` source dependencies relation is not supported " + - "when `nameHashing` flag is disabled.") - def inheritance: SourceDependencies = - throw new UnsupportedOperationException("The `memberRef` source dependencies relation is not supported " + - "when `nameHashing` flag is disabled.") + def memberRef: SourceDependencies = + throw new UnsupportedOperationException("The `memberRef` source dependencies relation is not supported " + + "when `nameHashing` flag is disabled.") + def inheritance: SourceDependencies = + throw new UnsupportedOperationException("The `memberRef` source dependencies relation is not supported " + + "when `nameHashing` flag is disabled.") - def addProduct(src: File, prod: File, name: String): Relations = - new MRelationsDefaultImpl(srcProd + (src, prod), binaryDep, direct = direct, - publicInherited = publicInherited, classes + (src, name)) + def addProduct(src: File, prod: File, name: String): Relations = + new MRelationsDefaultImpl(srcProd + (src, prod), binaryDep, direct = direct, + publicInherited = publicInherited, classes + (src, name)) - def addExternalDep(src: File, dependsOn: String, inherited: Boolean): Relations = { - val newI = if(inherited) publicInherited.addExternal(src, dependsOn) else publicInherited - val newD = direct.addExternal(src, dependsOn) - new MRelationsDefaultImpl( srcProd, binaryDep, direct = newD, publicInherited = newI, classes) - } + def addExternalDep(src: File, dependsOn: String, inherited: Boolean): Relations = { + val newI = if (inherited) publicInherited.addExternal(src, dependsOn) else publicInherited + val newD = direct.addExternal(src, dependsOn) + new MRelationsDefaultImpl(srcProd, binaryDep, direct = newD, publicInherited = newI, classes) + } - def addInternalSrcDeps(src: File, dependsOn: Iterable[File], inherited: Iterable[File]): Relations = - { - val newI = publicInherited.addInternal(src, inherited) - val newD = direct.addInternal(src, dependsOn) - new MRelationsDefaultImpl( srcProd, binaryDep, direct = newD, publicInherited = newI, classes) - } - - def names: Relation[File, String] = - throw new UnsupportedOperationException("Tracking of used names is not supported " + - "when `nameHashing` is disabled.") - - def addUsedName(src: File, name: String): Relations = - throw new UnsupportedOperationException("Tracking of used names is not supported " + - "when `nameHashing` is disabled.") - - def addBinaryDep(src: File, dependsOn: File): Relations = - new MRelationsDefaultImpl( srcProd, binaryDep + (src, dependsOn), direct = direct, - publicInherited = publicInherited, classes) - - def ++ (o: Relations): Relations = { - if (nameHashing != o.nameHashing) - throw new UnsupportedOperationException("The `++` operation is not supported for relations " + - "with different values of `nameHashing` flag.") - new MRelationsDefaultImpl(srcProd ++ o.srcProd, binaryDep ++ o.binaryDep, direct ++ o.direct, - publicInherited ++ o.publicInherited, classes ++ o.classes) - } - def -- (sources: Iterable[File]) = - new MRelationsDefaultImpl(srcProd -- sources, binaryDep -- sources, direct = direct -- sources, - publicInherited = publicInherited -- sources, classes -- sources) - - @deprecated("Broken implementation. OK to remove in 0.14", "0.13.1") - def groupBy[K](f: File => K): Map[K, Relations] = - { - type MapRel[T] = Map[K, Relation[File, T]] - def outerJoin(srcProdMap: MapRel[File], binaryDepMap: MapRel[File], direct: Map[K, Source], - inherited: Map[K, Source], classesMap: MapRel[String], - namesMap: MapRel[String]): Map[K, Relations] = - { - def kRelations(k: K): Relations = { - def get[T](m: Map[K, Relation[File, T]]) = Relations.getOrEmpty(m, k) - def getSrc(m: Map[K, Source]): Source = m.getOrElse(k, Relations.emptySource) - def getSrcDeps(m: Map[K, SourceDependencies]): SourceDependencies = - m.getOrElse(k, Relations.emptySourceDependencies) - new MRelationsDefaultImpl( get(srcProdMap), get(binaryDepMap), getSrc(direct), getSrc(inherited), - get(classesMap)) - } - val keys = (srcProdMap.keySet ++ binaryDepMap.keySet ++ direct.keySet ++ inherited.keySet ++ classesMap.keySet).toList - Map( keys.map( (k: K) => (k, kRelations(k)) ) : _*) - } - - def f1[B](item: (File, B)): K = f(item._1) - - outerJoin(srcProd.groupBy(f1), binaryDep.groupBy(f1), direct.groupBySource(f), - publicInherited.groupBySource(f), classes.groupBy(f1), names.groupBy(f1)) + def addInternalSrcDeps(src: File, dependsOn: Iterable[File], inherited: Iterable[File]): Relations = + { + val newI = publicInherited.addInternal(src, inherited) + val newD = direct.addInternal(src, dependsOn) + new MRelationsDefaultImpl(srcProd, binaryDep, direct = newD, publicInherited = newI, classes) } - override def equals(other: Any) = other match { - case o: MRelationsDefaultImpl => - srcProd == o.srcProd && binaryDep == o.binaryDep && direct == o.direct && - publicInherited == o.publicInherited && classes == o.classes - case _ => false - } + def names: Relation[File, String] = + throw new UnsupportedOperationException("Tracking of used names is not supported " + + "when `nameHashing` is disabled.") - override def hashCode = (srcProd :: binaryDep :: direct :: publicInherited :: classes :: Nil).hashCode + def addUsedName(src: File, name: String): Relations = + throw new UnsupportedOperationException("Tracking of used names is not supported " + + "when `nameHashing` is disabled.") - override def toString = ( - """ + def addBinaryDep(src: File, dependsOn: File): Relations = + new MRelationsDefaultImpl(srcProd, binaryDep + (src, dependsOn), direct = direct, + publicInherited = publicInherited, classes) + + def ++(o: Relations): Relations = { + if (nameHashing != o.nameHashing) + throw new UnsupportedOperationException("The `++` operation is not supported for relations " + + "with different values of `nameHashing` flag.") + new MRelationsDefaultImpl(srcProd ++ o.srcProd, binaryDep ++ o.binaryDep, direct ++ o.direct, + publicInherited ++ o.publicInherited, classes ++ o.classes) + } + def --(sources: Iterable[File]) = + new MRelationsDefaultImpl(srcProd -- sources, binaryDep -- sources, direct = direct -- sources, + publicInherited = publicInherited -- sources, classes -- sources) + + @deprecated("Broken implementation. OK to remove in 0.14", "0.13.1") + def groupBy[K](f: File => K): Map[K, Relations] = + { + type MapRel[T] = Map[K, Relation[File, T]] + def outerJoin(srcProdMap: MapRel[File], binaryDepMap: MapRel[File], direct: Map[K, Source], + inherited: Map[K, Source], classesMap: MapRel[String], + namesMap: MapRel[String]): Map[K, Relations] = + { + def kRelations(k: K): Relations = { + def get[T](m: Map[K, Relation[File, T]]) = Relations.getOrEmpty(m, k) + def getSrc(m: Map[K, Source]): Source = m.getOrElse(k, Relations.emptySource) + def getSrcDeps(m: Map[K, SourceDependencies]): SourceDependencies = + m.getOrElse(k, Relations.emptySourceDependencies) + new MRelationsDefaultImpl(get(srcProdMap), get(binaryDepMap), getSrc(direct), getSrc(inherited), + get(classesMap)) + } + val keys = (srcProdMap.keySet ++ binaryDepMap.keySet ++ direct.keySet ++ inherited.keySet ++ classesMap.keySet).toList + Map(keys.map((k: K) => (k, kRelations(k))): _*) + } + + def f1[B](item: (File, B)): K = f(item._1) + + outerJoin(srcProd.groupBy(f1), binaryDep.groupBy(f1), direct.groupBySource(f), + publicInherited.groupBySource(f), classes.groupBy(f1), names.groupBy(f1)) + } + + override def equals(other: Any) = other match { + case o: MRelationsDefaultImpl => + srcProd == o.srcProd && binaryDep == o.binaryDep && direct == o.direct && + publicInherited == o.publicInherited && classes == o.classes + case _ => false + } + + override def hashCode = (srcProd :: binaryDep :: direct :: publicInherited :: classes :: Nil).hashCode + + override def toString = ( + """ |Relations: | products: %s | bin deps: %s | src deps: %s | ext deps: %s | class names: %s - """.trim.stripMargin.format(List(srcProd, binaryDep, internalSrcDep, externalDep, classes) map relation_s : _*) - ) + """.trim.stripMargin.format(List(srcProd, binaryDep, internalSrcDep, externalDep, classes) map relation_s: _*) + ) } /** @@ -426,78 +423,77 @@ private class MRelationsDefaultImpl(srcProd: Relation[File, File], binaryDep: Re * needed by the name hashing invalidation algorithm. */ private class MRelationsNameHashing(srcProd: Relation[File, File], binaryDep: Relation[File, File], - // memberRef should include everything in inherited - val memberRef: SourceDependencies, val inheritance: SourceDependencies, - classes: Relation[File, String], - val names: Relation[File, String]) extends MRelationsCommon(srcProd, binaryDep, classes) -{ - def direct: Source = - throw new UnsupportedOperationException("The `direct` source dependencies relation is not supported " + - "when `nameHashing` flag is disabled.") - def publicInherited: Source = - throw new UnsupportedOperationException("The `publicInherited` source dependencies relation is not supported " + - "when `nameHashing` flag is disabled.") + // memberRef should include everything in inherited + val memberRef: SourceDependencies, val inheritance: SourceDependencies, + classes: Relation[File, String], + val names: Relation[File, String]) extends MRelationsCommon(srcProd, binaryDep, classes) { + def direct: Source = + throw new UnsupportedOperationException("The `direct` source dependencies relation is not supported " + + "when `nameHashing` flag is disabled.") + def publicInherited: Source = + throw new UnsupportedOperationException("The `publicInherited` source dependencies relation is not supported " + + "when `nameHashing` flag is disabled.") - val nameHashing: Boolean = true + val nameHashing: Boolean = true - def internalSrcDep: Relation[File, File] = memberRef.internal - def externalDep: Relation[File, String] = memberRef.external + def internalSrcDep: Relation[File, File] = memberRef.internal + def externalDep: Relation[File, String] = memberRef.external - def addProduct(src: File, prod: File, name: String): Relations = - new MRelationsNameHashing(srcProd + (src, prod), binaryDep, memberRef = memberRef, - inheritance = inheritance, classes + (src, name), names = names) + def addProduct(src: File, prod: File, name: String): Relations = + new MRelationsNameHashing(srcProd + (src, prod), binaryDep, memberRef = memberRef, + inheritance = inheritance, classes + (src, name), names = names) - def addExternalDep(src: File, dependsOn: String, inherited: Boolean): Relations = { - val newIH = if(inherited) inheritance.addExternal(src, dependsOn) else inheritance - val newMR = memberRef.addExternal(src, dependsOn) - new MRelationsNameHashing( srcProd, binaryDep, memberRef = newMR, inheritance = newIH, classes, - names = names) - } + def addExternalDep(src: File, dependsOn: String, inherited: Boolean): Relations = { + val newIH = if (inherited) inheritance.addExternal(src, dependsOn) else inheritance + val newMR = memberRef.addExternal(src, dependsOn) + new MRelationsNameHashing(srcProd, binaryDep, memberRef = newMR, inheritance = newIH, classes, + names = names) + } - def addInternalSrcDeps(src: File, dependsOn: Iterable[File], inherited: Iterable[File]): Relations = { - val newIH = inheritance.addInternal(src, inherited) - val newMR = memberRef.addInternal(src, dependsOn) - new MRelationsNameHashing( srcProd, binaryDep, memberRef = newMR, inheritance = newIH, classes, - names = names) - } + def addInternalSrcDeps(src: File, dependsOn: Iterable[File], inherited: Iterable[File]): Relations = { + val newIH = inheritance.addInternal(src, inherited) + val newMR = memberRef.addInternal(src, dependsOn) + new MRelationsNameHashing(srcProd, binaryDep, memberRef = newMR, inheritance = newIH, classes, + names = names) + } - def addUsedName(src: File, name: String): Relations = - new MRelationsNameHashing(srcProd, binaryDep, memberRef = memberRef, - inheritance = inheritance, classes, names = names + (src, name)) + def addUsedName(src: File, name: String): Relations = + new MRelationsNameHashing(srcProd, binaryDep, memberRef = memberRef, + inheritance = inheritance, classes, names = names + (src, name)) - def addBinaryDep(src: File, dependsOn: File): Relations = - new MRelationsNameHashing(srcProd, binaryDep + (src, dependsOn), memberRef = memberRef, - inheritance = inheritance, classes, names = names) + def addBinaryDep(src: File, dependsOn: File): Relations = + new MRelationsNameHashing(srcProd, binaryDep + (src, dependsOn), memberRef = memberRef, + inheritance = inheritance, classes, names = names) - def ++ (o: Relations): Relations = { - if (!o.nameHashing) - throw new UnsupportedOperationException("The `++` operation is not supported for relations " + - "with different values of `nameHashing` flag.") - new MRelationsNameHashing(srcProd ++ o.srcProd, binaryDep ++ o.binaryDep, - memberRef = memberRef ++ o.memberRef, inheritance = inheritance ++ o.inheritance, - classes ++ o.classes, names = names ++ o.names) - } - def -- (sources: Iterable[File]) = - new MRelationsNameHashing(srcProd -- sources, binaryDep -- sources, - memberRef = memberRef -- sources, inheritance = inheritance -- sources, classes -- sources, - names = names -- sources) + def ++(o: Relations): Relations = { + if (!o.nameHashing) + throw new UnsupportedOperationException("The `++` operation is not supported for relations " + + "with different values of `nameHashing` flag.") + new MRelationsNameHashing(srcProd ++ o.srcProd, binaryDep ++ o.binaryDep, + memberRef = memberRef ++ o.memberRef, inheritance = inheritance ++ o.inheritance, + classes ++ o.classes, names = names ++ o.names) + } + def --(sources: Iterable[File]) = + new MRelationsNameHashing(srcProd -- sources, binaryDep -- sources, + memberRef = memberRef -- sources, inheritance = inheritance -- sources, classes -- sources, + names = names -- sources) - def groupBy[K](f: File => K): Map[K, Relations] = { - throw new UnsupportedOperationException("Merging of Analyses that have" + - "`relations.nameHashing` set to `true` is not supported.") - } + def groupBy[K](f: File => K): Map[K, Relations] = { + throw new UnsupportedOperationException("Merging of Analyses that have" + + "`relations.nameHashing` set to `true` is not supported.") + } - override def equals(other: Any) = other match { - case o: MRelationsNameHashing => - srcProd == o.srcProd && binaryDep == o.binaryDep && memberRef == o.memberRef && - inheritance == o.inheritance && classes == o.classes - case _ => false - } + override def equals(other: Any) = other match { + case o: MRelationsNameHashing => + srcProd == o.srcProd && binaryDep == o.binaryDep && memberRef == o.memberRef && + inheritance == o.inheritance && classes == o.classes + case _ => false + } - override def hashCode = (srcProd :: binaryDep :: memberRef :: inheritance :: classes :: Nil).hashCode + override def hashCode = (srcProd :: binaryDep :: memberRef :: inheritance :: classes :: Nil).hashCode - override def toString = ( - """ + override def toString = ( + """ |Relations (with name hashing enabled): | products: %s | bin deps: %s @@ -505,7 +501,7 @@ private class MRelationsNameHashing(srcProd: Relation[File, File], binaryDep: Re | ext deps: %s | class names: %s | used names: %s - """.trim.stripMargin.format(List(srcProd, binaryDep, internalSrcDep, externalDep, classes, names) map relation_s : _*) - ) + """.trim.stripMargin.format(List(srcProd, binaryDep, internalSrcDep, externalDep, classes, names) map relation_s: _*) + ) } diff --git a/compile/inc/src/main/scala/sbt/inc/SourceInfo.scala b/compile/inc/src/main/scala/sbt/inc/SourceInfo.scala index ff4727ae9..1b1ab4e4b 100644 --- a/compile/inc/src/main/scala/sbt/inc/SourceInfo.scala +++ b/compile/inc/src/main/scala/sbt/inc/SourceInfo.scala @@ -1,40 +1,36 @@ package sbt package inc - import xsbti.Problem +import xsbti.Problem - import java.io.File +import java.io.File -trait SourceInfo -{ - def reportedProblems: Seq[Problem] - def unreportedProblems: Seq[Problem] +trait SourceInfo { + def reportedProblems: Seq[Problem] + def unreportedProblems: Seq[Problem] } -trait SourceInfos -{ - def ++(o: SourceInfos): SourceInfos - def add(file: File, info: SourceInfo): SourceInfos - def --(files: Iterable[File]): SourceInfos - def groupBy[K](f: (File) => K): Map[K, SourceInfos] - def get(file: File): SourceInfo - def allInfos: Map[File, SourceInfo] +trait SourceInfos { + def ++(o: SourceInfos): SourceInfos + def add(file: File, info: SourceInfo): SourceInfos + def --(files: Iterable[File]): SourceInfos + def groupBy[K](f: (File) => K): Map[K, SourceInfos] + def get(file: File): SourceInfo + def allInfos: Map[File, SourceInfo] } -object SourceInfos -{ - def empty: SourceInfos = make(Map.empty) - def make(m: Map[File, SourceInfo]): SourceInfos = new MSourceInfos(m) +object SourceInfos { + def empty: SourceInfos = make(Map.empty) + def make(m: Map[File, SourceInfo]): SourceInfos = new MSourceInfos(m) - val emptyInfo: SourceInfo = makeInfo(Nil, Nil) - def makeInfo(reported: Seq[Problem], unreported: Seq[Problem]): SourceInfo = - new MSourceInfo(reported, unreported) - def merge(infos: Traversable[SourceInfos]): SourceInfos = (SourceInfos.empty /: infos)(_ ++ _) + val emptyInfo: SourceInfo = makeInfo(Nil, Nil) + def makeInfo(reported: Seq[Problem], unreported: Seq[Problem]): SourceInfo = + new MSourceInfo(reported, unreported) + def merge(infos: Traversable[SourceInfos]): SourceInfos = (SourceInfos.empty /: infos)(_ ++ _) } -private final class MSourceInfos(val allInfos: Map[File, SourceInfo]) extends SourceInfos -{ - def ++(o: SourceInfos) = new MSourceInfos(allInfos ++ o.allInfos) - def --(sources: Iterable[File]) = new MSourceInfos(allInfos -- sources) - def groupBy[K](f: File => K): Map[K, SourceInfos] = allInfos groupBy (x => f(x._1)) map { x => (x._1, new MSourceInfos(x._2)) } - def add(file: File, info: SourceInfo) = new MSourceInfos(allInfos + ((file, info))) - def get(file:File) = allInfos.getOrElse(file, SourceInfos.emptyInfo) +private final class MSourceInfos(val allInfos: Map[File, SourceInfo]) extends SourceInfos { + def ++(o: SourceInfos) = new MSourceInfos(allInfos ++ o.allInfos) + def --(sources: Iterable[File]) = new MSourceInfos(allInfos -- sources) + def groupBy[K](f: File => K): Map[K, SourceInfos] = allInfos groupBy (x => f(x._1)) map { x => (x._1, new MSourceInfos(x._2)) } + def add(file: File, info: SourceInfo) = new MSourceInfos(allInfos + ((file, info))) + def get(file: File) = allInfos.getOrElse(file, SourceInfos.emptyInfo) } private final class MSourceInfo(val reportedProblems: Seq[Problem], val unreportedProblems: Seq[Problem]) extends SourceInfo diff --git a/compile/inc/src/main/scala/sbt/inc/Stamp.scala b/compile/inc/src/main/scala/sbt/inc/Stamp.scala index 98f216967..bb262d95c 100644 --- a/compile/inc/src/main/scala/sbt/inc/Stamp.scala +++ b/compile/inc/src/main/scala/sbt/inc/Stamp.scala @@ -4,191 +4,186 @@ package sbt package inc -import java.io.{File, IOException} +import java.io.{ File, IOException } import Stamp.getStamp import scala.util.matching.Regex -trait ReadStamps -{ - /** The Stamp for the given product at the time represented by this Stamps instance.*/ - def product(prod: File): Stamp - /** The Stamp for the given source file at the time represented by this Stamps instance.*/ - def internalSource(src: File): Stamp - /** The Stamp for the given binary dependency at the time represented by this Stamps instance.*/ - def binary(bin: File): Stamp +trait ReadStamps { + /** The Stamp for the given product at the time represented by this Stamps instance.*/ + def product(prod: File): Stamp + /** The Stamp for the given source file at the time represented by this Stamps instance.*/ + def internalSource(src: File): Stamp + /** The Stamp for the given binary dependency at the time represented by this Stamps instance.*/ + def binary(bin: File): Stamp } /** Provides information about files as they were at a specific time.*/ -trait Stamps extends ReadStamps -{ - def allInternalSources: collection.Set[File] - def allBinaries: collection.Set[File] - def allProducts: collection.Set[File] - - def sources: Map[File, Stamp] - def binaries: Map[File, Stamp] - def products: Map[File, Stamp] - def classNames: Map[File, String] - - def className(bin: File): Option[String] - - def markInternalSource(src: File, s: Stamp): Stamps - def markBinary(bin: File, className: String, s: Stamp): Stamps - def markProduct(prod: File, s: Stamp): Stamps - - def filter(prod: File => Boolean, removeSources: Iterable[File], bin: File => Boolean): Stamps - - def ++ (o: Stamps): Stamps - def groupBy[K](prod: Map[K, File => Boolean], sourcesGrouping: File => K, bin: Map[K, File => Boolean]): Map[K, Stamps] +trait Stamps extends ReadStamps { + def allInternalSources: collection.Set[File] + def allBinaries: collection.Set[File] + def allProducts: collection.Set[File] + + def sources: Map[File, Stamp] + def binaries: Map[File, Stamp] + def products: Map[File, Stamp] + def classNames: Map[File, String] + + def className(bin: File): Option[String] + + def markInternalSource(src: File, s: Stamp): Stamps + def markBinary(bin: File, className: String, s: Stamp): Stamps + def markProduct(prod: File, s: Stamp): Stamps + + def filter(prod: File => Boolean, removeSources: Iterable[File], bin: File => Boolean): Stamps + + def ++(o: Stamps): Stamps + def groupBy[K](prod: Map[K, File => Boolean], sourcesGrouping: File => K, bin: Map[K, File => Boolean]): Map[K, Stamps] } -sealed trait Stamp -{ - override def equals(other: Any): Boolean = other match { - case o: Stamp => Stamp.equivStamp.equiv(this, o) - case _ => false - } +sealed trait Stamp { + override def equals(other: Any): Boolean = other match { + case o: Stamp => Stamp.equivStamp.equiv(this, o) + case _ => false + } override def toString: String = Stamp.toString(this) } final class Hash(val value: Array[Byte]) extends Stamp { - override def hashCode: Int = java.util.Arrays.hashCode(value) + override def hashCode: Int = java.util.Arrays.hashCode(value) } final class LastModified(val value: Long) extends Stamp { - override def hashCode: Int = (value ^ (value >>> 32)).toInt + override def hashCode: Int = (value ^ (value >>> 32)).toInt } final class Exists(val value: Boolean) extends Stamp { - override def hashCode: Int = if(value) 0 else 1 + override def hashCode: Int = if (value) 0 else 1 } -object Stamp -{ - implicit val equivStamp: Equiv[Stamp] = new Equiv[Stamp] { - def equiv(a: Stamp, b: Stamp) = (a,b) match { - case (h1: Hash, h2: Hash) => h1.value sameElements h2.value - case (e1: Exists, e2: Exists) => e1.value == e2.value - case (lm1: LastModified, lm2: LastModified) => lm1.value == lm2.value - case _ => false - } - } +object Stamp { + implicit val equivStamp: Equiv[Stamp] = new Equiv[Stamp] { + def equiv(a: Stamp, b: Stamp) = (a, b) match { + case (h1: Hash, h2: Hash) => h1.value sameElements h2.value + case (e1: Exists, e2: Exists) => e1.value == e2.value + case (lm1: LastModified, lm2: LastModified) => lm1.value == lm2.value + case _ => false + } + } // NOTE: toString/fromString used for serialization, not just for debug prints. def toString(s: Stamp): String = s match { - case e: Exists => if(e.value) "exists" else "absent" - case h: Hash => "hash(" + Hash.toHex(h.value) + ")" + case e: Exists => if (e.value) "exists" else "absent" + case h: Hash => "hash(" + Hash.toHex(h.value) + ")" case lm: LastModified => "lastModified(" + lm.value + ")" } - private val hashPattern = """hash\((\w+)\)""".r - private val lastModifiedPattern = """lastModified\((\d+)\)""".r + private val hashPattern = """hash\((\w+)\)""".r + private val lastModifiedPattern = """lastModified\((\d+)\)""".r def fromString(s: String): Stamp = s match { - case "exists" => new Exists(true) - case "absent" => new Exists(false) - case hashPattern(value) => new Hash(Hash.fromHex(value)) - case lastModifiedPattern(value) => new LastModified(java.lang.Long.parseLong(value)) - case _ => throw new IllegalArgumentException("Unrecognized Stamp string representation: " + s) - } + case "exists" => new Exists(true) + case "absent" => new Exists(false) + case hashPattern(value) => new Hash(Hash.fromHex(value)) + case lastModifiedPattern(value) => new LastModified(java.lang.Long.parseLong(value)) + case _ => throw new IllegalArgumentException("Unrecognized Stamp string representation: " + s) + } - def show(s: Stamp): String = s match { - case h: Hash => "hash(" + Hash.toHex(h.value) + ")" - case e: Exists => if(e.value) "exists" else "does not exist" - case lm: LastModified => "last modified(" + lm.value + ")" - } - - val hash = (f: File) => tryStamp(new Hash(Hash(f))) - val lastModified = (f: File) => tryStamp(new LastModified(f.lastModified)) - val exists = (f: File) => tryStamp(if(f.exists) present else notPresent) - - def tryStamp(g: => Stamp): Stamp = try { g } catch { case i: IOException => notPresent } - - val notPresent = new Exists(false) - val present = new Exists(true) - - def getStamp(map: Map[File, Stamp], src: File): Stamp = map.getOrElse(src, notPresent) + def show(s: Stamp): String = s match { + case h: Hash => "hash(" + Hash.toHex(h.value) + ")" + case e: Exists => if (e.value) "exists" else "does not exist" + case lm: LastModified => "last modified(" + lm.value + ")" + } + + val hash = (f: File) => tryStamp(new Hash(Hash(f))) + val lastModified = (f: File) => tryStamp(new LastModified(f.lastModified)) + val exists = (f: File) => tryStamp(if (f.exists) present else notPresent) + + def tryStamp(g: => Stamp): Stamp = try { g } catch { case i: IOException => notPresent } + + val notPresent = new Exists(false) + val present = new Exists(true) + + def getStamp(map: Map[File, Stamp], src: File): Stamp = map.getOrElse(src, notPresent) } -object Stamps -{ - /** Creates a ReadStamps instance that will calculate and cache the stamp for sources and binaries - * on the first request according to the provided `srcStamp` and `binStamp` functions. Each - * stamp is calculated separately on demand. - * The stamp for a product is always recalculated. */ - def initial(prodStamp: File => Stamp, srcStamp: File => Stamp, binStamp: File => Stamp): ReadStamps = new InitialStamps(prodStamp, srcStamp, binStamp) - - def empty: Stamps = - { - val eSt = Map.empty[File, Stamp] - apply(eSt, eSt, eSt, Map.empty[File, String]) - } - def apply(products: Map[File, Stamp], sources: Map[File, Stamp], binaries: Map[File, Stamp], binaryClassNames: Map[File, String]): Stamps = - new MStamps(products, sources, binaries, binaryClassNames) +object Stamps { + /** + * Creates a ReadStamps instance that will calculate and cache the stamp for sources and binaries + * on the first request according to the provided `srcStamp` and `binStamp` functions. Each + * stamp is calculated separately on demand. + * The stamp for a product is always recalculated. + */ + def initial(prodStamp: File => Stamp, srcStamp: File => Stamp, binStamp: File => Stamp): ReadStamps = new InitialStamps(prodStamp, srcStamp, binStamp) - def merge(stamps: Traversable[Stamps]): Stamps = (Stamps.empty /: stamps)(_ ++ _) + def empty: Stamps = + { + val eSt = Map.empty[File, Stamp] + apply(eSt, eSt, eSt, Map.empty[File, String]) + } + def apply(products: Map[File, Stamp], sources: Map[File, Stamp], binaries: Map[File, Stamp], binaryClassNames: Map[File, String]): Stamps = + new MStamps(products, sources, binaries, binaryClassNames) + + def merge(stamps: Traversable[Stamps]): Stamps = (Stamps.empty /: stamps)(_ ++ _) } -private class MStamps(val products: Map[File, Stamp], val sources: Map[File, Stamp], val binaries: Map[File, Stamp], val classNames: Map[File, String]) extends Stamps -{ - def allInternalSources: collection.Set[File] = sources.keySet - def allBinaries: collection.Set[File] = binaries.keySet - def allProducts: collection.Set[File] = products.keySet - - def ++ (o: Stamps): Stamps = - new MStamps(products ++ o.products, sources ++ o.sources, binaries ++ o.binaries, classNames ++ o.classNames) - - def markInternalSource(src: File, s: Stamp): Stamps = - new MStamps(products, sources.updated(src, s), binaries, classNames) +private class MStamps(val products: Map[File, Stamp], val sources: Map[File, Stamp], val binaries: Map[File, Stamp], val classNames: Map[File, String]) extends Stamps { + def allInternalSources: collection.Set[File] = sources.keySet + def allBinaries: collection.Set[File] = binaries.keySet + def allProducts: collection.Set[File] = products.keySet - def markBinary(bin: File, className: String, s: Stamp): Stamps = - new MStamps(products, sources, binaries.updated(bin, s), classNames.updated(bin, className)) + def ++(o: Stamps): Stamps = + new MStamps(products ++ o.products, sources ++ o.sources, binaries ++ o.binaries, classNames ++ o.classNames) - def markProduct(prod: File, s: Stamp): Stamps = - new MStamps(products.updated(prod, s), sources, binaries, classNames) - - def filter(prod: File => Boolean, removeSources: Iterable[File], bin: File => Boolean): Stamps = - new MStamps(products.filterKeys(prod), sources -- removeSources, binaries.filterKeys(bin), classNames.filterKeys(bin)) - - def groupBy[K](prod: Map[K, File => Boolean], f: File => K, bin: Map[K, File => Boolean]): Map[K, Stamps] = - { - val sourcesMap: Map[K, Map[File, Stamp]] = sources.groupBy(x => f(x._1)) + def markInternalSource(src: File, s: Stamp): Stamps = + new MStamps(products, sources.updated(src, s), binaries, classNames) - val constFalse = (f: File) => false - def kStamps(k: K): Stamps = new MStamps( - products.filterKeys(prod.getOrElse(k, constFalse)), - sourcesMap.getOrElse(k, Map.empty[File,Stamp]), - binaries.filterKeys(bin.getOrElse(k, constFalse)), - classNames.filterKeys(bin.getOrElse(k, constFalse)) - ) + def markBinary(bin: File, className: String, s: Stamp): Stamps = + new MStamps(products, sources, binaries.updated(bin, s), classNames.updated(bin, className)) - (for (k <- prod.keySet ++ sourcesMap.keySet ++ bin.keySet) yield (k, kStamps(k))).toMap - } + def markProduct(prod: File, s: Stamp): Stamps = + new MStamps(products.updated(prod, s), sources, binaries, classNames) - def product(prod: File) = getStamp(products, prod) - def internalSource(src: File) = getStamp(sources, src) - def binary(bin: File) = getStamp(binaries, bin) - def className(bin: File) = classNames get bin + def filter(prod: File => Boolean, removeSources: Iterable[File], bin: File => Boolean): Stamps = + new MStamps(products.filterKeys(prod), sources -- removeSources, binaries.filterKeys(bin), classNames.filterKeys(bin)) - override def equals(other: Any): Boolean = other match { - case o: MStamps => products == o.products && sources == o.sources && binaries == o.binaries && classNames == o.classNames - case _ => false - } + def groupBy[K](prod: Map[K, File => Boolean], f: File => K, bin: Map[K, File => Boolean]): Map[K, Stamps] = + { + val sourcesMap: Map[K, Map[File, Stamp]] = sources.groupBy(x => f(x._1)) - override lazy val hashCode: Int = (products :: sources :: binaries :: classNames :: Nil).hashCode - - override def toString: String = - "Stamps for: %d products, %d sources, %d binaries, %d classNames".format(products.size, sources.size, binaries.size, classNames.size) + val constFalse = (f: File) => false + def kStamps(k: K): Stamps = new MStamps( + products.filterKeys(prod.getOrElse(k, constFalse)), + sourcesMap.getOrElse(k, Map.empty[File, Stamp]), + binaries.filterKeys(bin.getOrElse(k, constFalse)), + classNames.filterKeys(bin.getOrElse(k, constFalse)) + ) + + (for (k <- prod.keySet ++ sourcesMap.keySet ++ bin.keySet) yield (k, kStamps(k))).toMap + } + + def product(prod: File) = getStamp(products, prod) + def internalSource(src: File) = getStamp(sources, src) + def binary(bin: File) = getStamp(binaries, bin) + def className(bin: File) = classNames get bin + + override def equals(other: Any): Boolean = other match { + case o: MStamps => products == o.products && sources == o.sources && binaries == o.binaries && classNames == o.classNames + case _ => false + } + + override lazy val hashCode: Int = (products :: sources :: binaries :: classNames :: Nil).hashCode + + override def toString: String = + "Stamps for: %d products, %d sources, %d binaries, %d classNames".format(products.size, sources.size, binaries.size, classNames.size) } -private class InitialStamps(prodStamp: File => Stamp, srcStamp: File => Stamp, binStamp: File => Stamp) extends ReadStamps -{ - import collection.mutable.{HashMap, Map} - // cached stamps for files that do not change during compilation - private val sources: Map[File, Stamp] = new HashMap - private val binaries: Map[File, Stamp] = new HashMap - - def product(prod: File): Stamp = prodStamp(prod) - def internalSource(src: File): Stamp = synchronized { sources.getOrElseUpdate(src, srcStamp(src)) } - def binary(bin: File): Stamp = synchronized { binaries.getOrElseUpdate(bin, binStamp(bin)) } +private class InitialStamps(prodStamp: File => Stamp, srcStamp: File => Stamp, binStamp: File => Stamp) extends ReadStamps { + import collection.mutable.{ HashMap, Map } + // cached stamps for files that do not change during compilation + private val sources: Map[File, Stamp] = new HashMap + private val binaries: Map[File, Stamp] = new HashMap + + def product(prod: File): Stamp = prodStamp(prod) + def internalSource(src: File): Stamp = synchronized { sources.getOrElseUpdate(src, srcStamp(src)) } + def binary(bin: File): Stamp = synchronized { binaries.getOrElseUpdate(bin, binStamp(bin)) } } \ No newline at end of file diff --git a/compile/integration/src/main/scala/sbt/compiler/AggressiveCompile.scala b/compile/integration/src/main/scala/sbt/compiler/AggressiveCompile.scala index 2c711d14f..960c6e6be 100644 --- a/compile/integration/src/main/scala/sbt/compiler/AggressiveCompile.scala +++ b/compile/integration/src/main/scala/sbt/compiler/AggressiveCompile.scala @@ -6,230 +6,223 @@ package compiler import inc._ - import scala.annotation.tailrec - import java.io.File - import classpath.ClasspathUtilities - import classfile.Analyze - import inc.Locate.DefinesClass - import inc.IncOptions - import CompileSetup._ - import sbinary.DefaultProtocol.{ immutableMapFormat, immutableSetFormat, StringFormat } +import scala.annotation.tailrec +import java.io.File +import classpath.ClasspathUtilities +import classfile.Analyze +import inc.Locate.DefinesClass +import inc.IncOptions +import CompileSetup._ +import sbinary.DefaultProtocol.{ immutableMapFormat, immutableSetFormat, StringFormat } - import xsbti.{ Reporter, AnalysisCallback } - import xsbti.api.Source - import xsbti.compile.{CompileOrder, DependencyChanges, GlobalsCache, Output, SingleOutput, MultipleOutput, CompileProgress} - import CompileOrder.{JavaThenScala, Mixed, ScalaThenJava} +import xsbti.{ Reporter, AnalysisCallback } +import xsbti.api.Source +import xsbti.compile.{ CompileOrder, DependencyChanges, GlobalsCache, Output, SingleOutput, MultipleOutput, CompileProgress } +import CompileOrder.{ JavaThenScala, Mixed, ScalaThenJava } final class CompileConfiguration(val sources: Seq[File], val classpath: Seq[File], - val previousAnalysis: Analysis, val previousSetup: Option[CompileSetup], val currentSetup: CompileSetup, val progress: Option[CompileProgress], val getAnalysis: File => Option[Analysis], val definesClass: DefinesClass, - val reporter: Reporter, val compiler: AnalyzingCompiler, val javac: xsbti.compile.JavaCompiler, val cache: GlobalsCache, val incOptions: IncOptions) + val previousAnalysis: Analysis, val previousSetup: Option[CompileSetup], val currentSetup: CompileSetup, val progress: Option[CompileProgress], val getAnalysis: File => Option[Analysis], val definesClass: DefinesClass, + val reporter: Reporter, val compiler: AnalyzingCompiler, val javac: xsbti.compile.JavaCompiler, val cache: GlobalsCache, val incOptions: IncOptions) -class AggressiveCompile(cacheFile: File) -{ - def apply(compiler: AnalyzingCompiler, - javac: xsbti.compile.JavaCompiler, - sources: Seq[File], classpath: Seq[File], - output: Output, - cache: GlobalsCache, - progress: Option[CompileProgress] = None, - options: Seq[String] = Nil, - javacOptions: Seq[String] = Nil, - analysisMap: File => Option[Analysis] = { _ => None }, - definesClass: DefinesClass = Locate.definesClass _, - reporter: Reporter, - compileOrder: CompileOrder = Mixed, - skip: Boolean = false, - incrementalCompilerOptions: IncOptions)(implicit log: Logger): Analysis = - { - val setup = new CompileSetup(output, new CompileOptions(options, javacOptions), - compiler.scalaInstance.actualVersion, compileOrder, incrementalCompilerOptions.nameHashing) - compile1(sources, classpath, setup, progress, store, analysisMap, definesClass, - compiler, javac, reporter, skip, cache, incrementalCompilerOptions) - } +class AggressiveCompile(cacheFile: File) { + def apply(compiler: AnalyzingCompiler, + javac: xsbti.compile.JavaCompiler, + sources: Seq[File], classpath: Seq[File], + output: Output, + cache: GlobalsCache, + progress: Option[CompileProgress] = None, + options: Seq[String] = Nil, + javacOptions: Seq[String] = Nil, + analysisMap: File => Option[Analysis] = { _ => None }, + definesClass: DefinesClass = Locate.definesClass _, + reporter: Reporter, + compileOrder: CompileOrder = Mixed, + skip: Boolean = false, + incrementalCompilerOptions: IncOptions)(implicit log: Logger): Analysis = + { + val setup = new CompileSetup(output, new CompileOptions(options, javacOptions), + compiler.scalaInstance.actualVersion, compileOrder, incrementalCompilerOptions.nameHashing) + compile1(sources, classpath, setup, progress, store, analysisMap, definesClass, + compiler, javac, reporter, skip, cache, incrementalCompilerOptions) + } - def withBootclasspath(args: CompilerArguments, classpath: Seq[File]): Seq[File] = - args.bootClasspathFor(classpath) ++ args.extClasspath ++ args.finishClasspath(classpath) + def withBootclasspath(args: CompilerArguments, classpath: Seq[File]): Seq[File] = + args.bootClasspathFor(classpath) ++ args.extClasspath ++ args.finishClasspath(classpath) - def compile1(sources: Seq[File], - classpath: Seq[File], - setup: CompileSetup, progress: Option[CompileProgress], - store: AnalysisStore, - analysis: File => Option[Analysis], - definesClass: DefinesClass, - compiler: AnalyzingCompiler, - javac: xsbti.compile.JavaCompiler, - reporter: Reporter, skip: Boolean, - cache: GlobalsCache, - incrementalCompilerOptions: IncOptions)(implicit log: Logger): Analysis = - { - val (previousAnalysis, previousSetup) = extract(store.get(), incrementalCompilerOptions) - if(skip) - previousAnalysis - else { - val config = new CompileConfiguration(sources, classpath, previousAnalysis, previousSetup, setup, - progress, analysis, definesClass, reporter, compiler, javac, cache, incrementalCompilerOptions) - val (modified, result) = compile2(config) - if(modified) - store.set(result, setup) - result - } - } - def compile2(config: CompileConfiguration)(implicit log: Logger, equiv: Equiv[CompileSetup]): (Boolean, Analysis) = - { - import config._ - import currentSetup._ - val absClasspath = classpath.map(_.getAbsoluteFile) - val apiOption = (api: Either[Boolean, Source]) => api.right.toOption - val cArgs = new CompilerArguments(compiler.scalaInstance, compiler.cp) - val searchClasspath = explicitBootClasspath(options.options) ++ withBootclasspath(cArgs, absClasspath) - val entry = Locate.entry(searchClasspath, definesClass) + def compile1(sources: Seq[File], + classpath: Seq[File], + setup: CompileSetup, progress: Option[CompileProgress], + store: AnalysisStore, + analysis: File => Option[Analysis], + definesClass: DefinesClass, + compiler: AnalyzingCompiler, + javac: xsbti.compile.JavaCompiler, + reporter: Reporter, skip: Boolean, + cache: GlobalsCache, + incrementalCompilerOptions: IncOptions)(implicit log: Logger): Analysis = + { + val (previousAnalysis, previousSetup) = extract(store.get(), incrementalCompilerOptions) + if (skip) + previousAnalysis + else { + val config = new CompileConfiguration(sources, classpath, previousAnalysis, previousSetup, setup, + progress, analysis, definesClass, reporter, compiler, javac, cache, incrementalCompilerOptions) + val (modified, result) = compile2(config) + if (modified) + store.set(result, setup) + result + } + } + def compile2(config: CompileConfiguration)(implicit log: Logger, equiv: Equiv[CompileSetup]): (Boolean, Analysis) = + { + import config._ + import currentSetup._ + val absClasspath = classpath.map(_.getAbsoluteFile) + val apiOption = (api: Either[Boolean, Source]) => api.right.toOption + val cArgs = new CompilerArguments(compiler.scalaInstance, compiler.cp) + val searchClasspath = explicitBootClasspath(options.options) ++ withBootclasspath(cArgs, absClasspath) + val entry = Locate.entry(searchClasspath, definesClass) - val compile0 = (include: Set[File], changes: DependencyChanges, callback: AnalysisCallback) => { - val outputDirs = outputDirectories(output) - outputDirs foreach (IO.createDirectory) - val incSrc = sources.filter(include) - val (javaSrcs, scalaSrcs) = incSrc partition javaOnly - logInputs(log, javaSrcs.size, scalaSrcs.size, outputDirs) - def compileScala() = - if(!scalaSrcs.isEmpty) - { - val sources = if(order == Mixed) incSrc else scalaSrcs - val arguments = cArgs(Nil, absClasspath, None, options.options) - timed("Scala compilation", log) { - compiler.compile(sources, changes, arguments, output, callback, reporter, cache, log, progress) - } - } - def compileJava() = - if(!javaSrcs.isEmpty) - { - import Path._ - @tailrec def ancestor(f1: File, f2: File): Boolean = - if (f2 eq null) false else - if (f1 == f2) true else ancestor(f1, f2.getParentFile) + val compile0 = (include: Set[File], changes: DependencyChanges, callback: AnalysisCallback) => { + val outputDirs = outputDirectories(output) + outputDirs foreach (IO.createDirectory) + val incSrc = sources.filter(include) + val (javaSrcs, scalaSrcs) = incSrc partition javaOnly + logInputs(log, javaSrcs.size, scalaSrcs.size, outputDirs) + def compileScala() = + if (!scalaSrcs.isEmpty) { + val sources = if (order == Mixed) incSrc else scalaSrcs + val arguments = cArgs(Nil, absClasspath, None, options.options) + timed("Scala compilation", log) { + compiler.compile(sources, changes, arguments, output, callback, reporter, cache, log, progress) + } + } + def compileJava() = + if (!javaSrcs.isEmpty) { + import Path._ + @tailrec def ancestor(f1: File, f2: File): Boolean = + if (f2 eq null) false else if (f1 == f2) true else ancestor(f1, f2.getParentFile) - val chunks: Map[Option[File], Seq[File]] = output match { - case single: SingleOutput => Map(Some(single.outputDirectory) -> javaSrcs) - case multi: MultipleOutput => - javaSrcs groupBy { src => - multi.outputGroups find {out => ancestor(out.sourceDirectory, src)} map (_.outputDirectory) - } - } - chunks.get(None) foreach { srcs => - log.error("No output directory mapped for: " + srcs.map(_.getAbsolutePath).mkString(",")) - } - val memo = for ((Some(outputDirectory), srcs) <- chunks) yield { - val classesFinder = PathFinder(outputDirectory) ** "*.class" - (classesFinder, classesFinder.get, srcs) - } + val chunks: Map[Option[File], Seq[File]] = output match { + case single: SingleOutput => Map(Some(single.outputDirectory) -> javaSrcs) + case multi: MultipleOutput => + javaSrcs groupBy { src => + multi.outputGroups find { out => ancestor(out.sourceDirectory, src) } map (_.outputDirectory) + } + } + chunks.get(None) foreach { srcs => + log.error("No output directory mapped for: " + srcs.map(_.getAbsolutePath).mkString(",")) + } + val memo = for ((Some(outputDirectory), srcs) <- chunks) yield { + val classesFinder = PathFinder(outputDirectory) ** "*.class" + (classesFinder, classesFinder.get, srcs) + } - val loader = ClasspathUtilities.toLoader(searchClasspath) - timed("Java compilation", log) { - javac.compile(javaSrcs.toArray, absClasspath.toArray, output, options.javacOptions.toArray, log) - } + val loader = ClasspathUtilities.toLoader(searchClasspath) + timed("Java compilation", log) { + javac.compile(javaSrcs.toArray, absClasspath.toArray, output, options.javacOptions.toArray, log) + } - def readAPI(source: File, classes: Seq[Class[_]]): Set[String] = { - val (api, inherits) = ClassToAPI.process(classes) - callback.api(source, api) - inherits.map(_.getName) - } + def readAPI(source: File, classes: Seq[Class[_]]): Set[String] = { + val (api, inherits) = ClassToAPI.process(classes) + callback.api(source, api) + inherits.map(_.getName) + } - timed("Java analysis", log) { - for ((classesFinder, oldClasses, srcs) <- memo) { - val newClasses = Set(classesFinder.get: _*) -- oldClasses - Analyze(newClasses.toSeq, srcs, log)(callback, loader, readAPI) - } - } - } - if(order == JavaThenScala) { compileJava(); compileScala() } else { compileScala(); compileJava() } - } + timed("Java analysis", log) { + for ((classesFinder, oldClasses, srcs) <- memo) { + val newClasses = Set(classesFinder.get: _*) -- oldClasses + Analyze(newClasses.toSeq, srcs, log)(callback, loader, readAPI) + } + } + } + if (order == JavaThenScala) { compileJava(); compileScala() } else { compileScala(); compileJava() } + } - val sourcesSet = sources.toSet - val analysis = previousSetup match { - case Some(previous) if previous.nameHashing != currentSetup.nameHashing => - // if the value of `nameHashing` flag has changed we have to throw away - // previous Analysis completely and start with empty Analysis object - // that supports the particular value of the `nameHashing` flag. - // Otherwise we'll be getting UnsupportedOperationExceptions - Analysis.empty(currentSetup.nameHashing) - case Some(previous) if equiv.equiv(previous, currentSetup) => previousAnalysis - case _ => Incremental.prune(sourcesSet, previousAnalysis) - } - IncrementalCompile(sourcesSet, entry, compile0, analysis, getAnalysis, output, log, incOptions) - } - private[this] def outputDirectories(output: Output): Seq[File] = output match { - case single: SingleOutput => List(single.outputDirectory) - case mult: MultipleOutput => mult.outputGroups map (_.outputDirectory) - } - private[this] def timed[T](label: String, log: Logger)(t: => T): T = - { - val start = System.nanoTime - val result = t - val elapsed = System.nanoTime - start - log.debug(label + " took " + (elapsed/1e9) + " s") - result - } - private[this] def logInputs(log: Logger, javaCount: Int, scalaCount: Int, outputDirs: Seq[File]) - { - val scalaMsg = Analysis.counted("Scala source", "", "s", scalaCount) - val javaMsg = Analysis.counted("Java source", "", "s", javaCount) - val combined = scalaMsg ++ javaMsg - if(!combined.isEmpty) - log.info(combined.mkString("Compiling ", " and ", " to " + outputDirs.map(_.getAbsolutePath).mkString(",") + "...")) - } - private def extract(previous: Option[(Analysis, CompileSetup)], incOptions: IncOptions): (Analysis, Option[CompileSetup]) = - previous match - { - case Some((an, setup)) => (an, Some(setup)) - case None => (Analysis.empty(nameHashing = incOptions.nameHashing), None) - } - def javaOnly(f: File) = f.getName.endsWith(".java") + val sourcesSet = sources.toSet + val analysis = previousSetup match { + case Some(previous) if previous.nameHashing != currentSetup.nameHashing => + // if the value of `nameHashing` flag has changed we have to throw away + // previous Analysis completely and start with empty Analysis object + // that supports the particular value of the `nameHashing` flag. + // Otherwise we'll be getting UnsupportedOperationExceptions + Analysis.empty(currentSetup.nameHashing) + case Some(previous) if equiv.equiv(previous, currentSetup) => previousAnalysis + case _ => Incremental.prune(sourcesSet, previousAnalysis) + } + IncrementalCompile(sourcesSet, entry, compile0, analysis, getAnalysis, output, log, incOptions) + } + private[this] def outputDirectories(output: Output): Seq[File] = output match { + case single: SingleOutput => List(single.outputDirectory) + case mult: MultipleOutput => mult.outputGroups map (_.outputDirectory) + } + private[this] def timed[T](label: String, log: Logger)(t: => T): T = + { + val start = System.nanoTime + val result = t + val elapsed = System.nanoTime - start + log.debug(label + " took " + (elapsed / 1e9) + " s") + result + } + private[this] def logInputs(log: Logger, javaCount: Int, scalaCount: Int, outputDirs: Seq[File]) { + val scalaMsg = Analysis.counted("Scala source", "", "s", scalaCount) + val javaMsg = Analysis.counted("Java source", "", "s", javaCount) + val combined = scalaMsg ++ javaMsg + if (!combined.isEmpty) + log.info(combined.mkString("Compiling ", " and ", " to " + outputDirs.map(_.getAbsolutePath).mkString(",") + "...")) + } + private def extract(previous: Option[(Analysis, CompileSetup)], incOptions: IncOptions): (Analysis, Option[CompileSetup]) = + previous match { + case Some((an, setup)) => (an, Some(setup)) + case None => (Analysis.empty(nameHashing = incOptions.nameHashing), None) + } + def javaOnly(f: File) = f.getName.endsWith(".java") - private[this] def explicitBootClasspath(options: Seq[String]): Seq[File] = - options.dropWhile(_ != CompilerArguments.BootClasspathOption).drop(1).take(1).headOption.toList.flatMap(IO.parseClasspath) + private[this] def explicitBootClasspath(options: Seq[String]): Seq[File] = + options.dropWhile(_ != CompilerArguments.BootClasspathOption).drop(1).take(1).headOption.toList.flatMap(IO.parseClasspath) - val store = AggressiveCompile.staticCache(cacheFile, AnalysisStore.sync(AnalysisStore.cached(FileBasedStore(cacheFile)))) + val store = AggressiveCompile.staticCache(cacheFile, AnalysisStore.sync(AnalysisStore.cached(FileBasedStore(cacheFile)))) } -object AggressiveCompile -{ - import collection.mutable - import java.lang.ref.{Reference,SoftReference} - private[this] val cache = new collection.mutable.HashMap[File, Reference[AnalysisStore]] - private def staticCache(file: File, backing: => AnalysisStore): AnalysisStore = - synchronized { - cache get file flatMap { ref => Option(ref.get) } getOrElse { - val b = backing - cache.put(file, new SoftReference(b)) - b - } - } +object AggressiveCompile { + import collection.mutable + import java.lang.ref.{ Reference, SoftReference } + private[this] val cache = new collection.mutable.HashMap[File, Reference[AnalysisStore]] + private def staticCache(file: File, backing: => AnalysisStore): AnalysisStore = + synchronized { + cache get file flatMap { ref => Option(ref.get) } getOrElse { + val b = backing + cache.put(file, new SoftReference(b)) + b + } + } - def directOrFork(instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File]): JavaTool = - if(javaHome.isDefined) - JavaCompiler.fork(cpOptions, instance)(forkJavac(javaHome)) - else - JavaCompiler.directOrFork(cpOptions, instance)(forkJavac(None)) + def directOrFork(instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File]): JavaTool = + if (javaHome.isDefined) + JavaCompiler.fork(cpOptions, instance)(forkJavac(javaHome)) + else + JavaCompiler.directOrFork(cpOptions, instance)(forkJavac(None)) - def forkJavac(javaHome: Option[File]): JavaCompiler.Fork = - { - import Path._ - def exec(jc: JavacContract) = javaHome match { case None => jc.name; case Some(jh) => (jh / "bin" / jc.name).absolutePath } - (contract: JavacContract, args: Seq[String], log: Logger) => { - log.debug("Forking " + contract.name + ": " + exec(contract) + " " + args.mkString(" ")) - val javacLogger = new JavacLogger(log) - var exitCode = -1 - try { - exitCode = Process(exec(contract), args) ! javacLogger - } finally { - javacLogger.flush(exitCode) - } - exitCode - } - } + def forkJavac(javaHome: Option[File]): JavaCompiler.Fork = + { + import Path._ + def exec(jc: JavacContract) = javaHome match { case None => jc.name; case Some(jh) => (jh / "bin" / jc.name).absolutePath } + (contract: JavacContract, args: Seq[String], log: Logger) => { + log.debug("Forking " + contract.name + ": " + exec(contract) + " " + args.mkString(" ")) + val javacLogger = new JavacLogger(log) + var exitCode = -1 + try { + exitCode = Process(exec(contract), args) ! javacLogger + } finally { + javacLogger.flush(exitCode) + } + exitCode + } + } } private[sbt] class JavacLogger(log: Logger) extends ProcessLogger { import scala.collection.mutable.ListBuffer - import Level.{Info, Warn, Error, Value => LogLevel} + import Level.{ Info, Warn, Error, Value => LogLevel } private val msgs: ListBuffer[(LogLevel, String)] = new ListBuffer() @@ -242,7 +235,7 @@ private[sbt] class JavacLogger(log: Logger) extends ProcessLogger { def buffer[T](f: => T): T = f private def print(desiredLevel: LogLevel)(t: (LogLevel, String)) = t match { - case (Info, msg) => log.info(msg) + case (Info, msg) => log.info(msg) case (Error, msg) => log.log(desiredLevel, msg) } diff --git a/compile/integration/src/main/scala/sbt/compiler/IncrementalCompiler.scala b/compile/integration/src/main/scala/sbt/compiler/IncrementalCompiler.scala index 5028c7996..b7a4b6c57 100644 --- a/compile/integration/src/main/scala/sbt/compiler/IncrementalCompiler.scala +++ b/compile/integration/src/main/scala/sbt/compiler/IncrementalCompiler.scala @@ -1,57 +1,54 @@ package sbt.compiler - import java.io.File - import sbt.{CompileSetup, IO, Using} - import sbt.inc.{Analysis, IncOptions, TextAnalysisFormat} - import xsbti.{Logger, Maybe} - import xsbti.compile._ +import java.io.File +import sbt.{ CompileSetup, IO, Using } +import sbt.inc.{ Analysis, IncOptions, TextAnalysisFormat } +import xsbti.{ Logger, Maybe } +import xsbti.compile._ +object IC extends IncrementalCompiler[Analysis, AnalyzingCompiler] { + def compile(in: Inputs[Analysis, AnalyzingCompiler], log: Logger): Analysis = + { + val setup = in.setup; import setup._ + val options = in.options; import options.{ options => scalacOptions, _ } + val compilers = in.compilers; import compilers._ + val agg = new AggressiveCompile(setup.cacheFile) + val aMap = (f: File) => m2o(analysisMap(f)) + val defClass = (f: File) => { val dc = definesClass(f); (name: String) => dc.apply(name) } + val incOptions = IncOptions.fromStringMap(incrementalCompilerOptions) + agg(scalac, javac, sources, classpath, output, cache, m2o(progress), scalacOptions, javacOptions, aMap, + defClass, reporter, order, skip, incOptions)(log) + } -object IC extends IncrementalCompiler[Analysis, AnalyzingCompiler] -{ - def compile(in: Inputs[Analysis, AnalyzingCompiler], log: Logger): Analysis = - { - val setup = in.setup; import setup._ - val options = in.options; import options.{options => scalacOptions, _} - val compilers = in.compilers; import compilers._ - val agg = new AggressiveCompile(setup.cacheFile) - val aMap = (f: File) => m2o(analysisMap(f)) - val defClass = (f: File) => { val dc = definesClass(f); (name: String) => dc.apply(name) } - val incOptions = IncOptions.fromStringMap(incrementalCompilerOptions) - agg(scalac, javac, sources, classpath, output, cache, m2o(progress), scalacOptions, javacOptions, aMap, - defClass, reporter, order, skip, incOptions)(log) - } + private[this] def m2o[S](opt: Maybe[S]): Option[S] = if (opt.isEmpty) None else Some(opt.get) - private[this] def m2o[S](opt: Maybe[S]): Option[S] = if(opt.isEmpty) None else Some(opt.get) + def newScalaCompiler(instance: ScalaInstance, interfaceJar: File, options: ClasspathOptions, log: Logger): AnalyzingCompiler = + new AnalyzingCompiler(instance, CompilerInterfaceProvider.constant(interfaceJar), options, log) - def newScalaCompiler(instance: ScalaInstance, interfaceJar: File, options: ClasspathOptions, log: Logger): AnalyzingCompiler = - new AnalyzingCompiler(instance, CompilerInterfaceProvider.constant(interfaceJar), options, log) + def compileInterfaceJar(label: String, sourceJar: File, targetJar: File, interfaceJar: File, instance: ScalaInstance, log: Logger) { + val raw = new RawCompiler(instance, sbt.ClasspathOptions.auto, log) + AnalyzingCompiler.compileSources(sourceJar :: Nil, targetJar, interfaceJar :: Nil, label, raw, log) + } - def compileInterfaceJar(label: String, sourceJar: File, targetJar: File, interfaceJar: File, instance: ScalaInstance, log: Logger) - { - val raw = new RawCompiler(instance, sbt.ClasspathOptions.auto, log) - AnalyzingCompiler.compileSources(sourceJar :: Nil, targetJar, interfaceJar :: Nil, label, raw, log) - } + def readCache(file: File): Maybe[(Analysis, CompileSetup)] = + try { Maybe.just(readCacheUncaught(file)) } catch { case _: Exception => Maybe.nothing() } - def readCache(file: File): Maybe[(Analysis, CompileSetup)] = - try { Maybe.just(readCacheUncaught(file)) } catch { case _: Exception => Maybe.nothing() } + @deprecated("Use overloaded variant which takes `IncOptions` as parameter.", "0.13.2") + def readAnalysis(file: File): Analysis = + try { readCacheUncaught(file)._1 } catch { case _: Exception => Analysis.Empty } - @deprecated("Use overloaded variant which takes `IncOptions` as parameter.", "0.13.2") - def readAnalysis(file: File): Analysis = - try { readCacheUncaught(file)._1 } catch { case _: Exception => Analysis.Empty } + def readAnalysis(file: File, incOptions: IncOptions): Analysis = + try { readCacheUncaught(file)._1 } catch { + case _: Exception => Analysis.empty(nameHashing = incOptions.nameHashing) + } - def readAnalysis(file: File, incOptions: IncOptions): Analysis = - try { readCacheUncaught(file)._1 } catch { - case _: Exception => Analysis.empty(nameHashing = incOptions.nameHashing) - } - - def readCacheUncaught(file: File): (Analysis, CompileSetup) = - Using.fileReader(IO.utf8)(file) { reader => - try { - TextAnalysisFormat.read(reader) - } catch { - case ex: sbt.inc.ReadException => - throw new java.io.IOException(s"Error while reading $file", ex) - } - } + def readCacheUncaught(file: File): (Analysis, CompileSetup) = + Using.fileReader(IO.utf8)(file) { reader => + try { + TextAnalysisFormat.read(reader) + } catch { + case ex: sbt.inc.ReadException => + throw new java.io.IOException(s"Error while reading $file", ex) + } + } } diff --git a/compile/interface/src/main/scala/xsbt/API.scala b/compile/interface/src/main/scala/xsbt/API.scala index c65bef3c0..9bd6ae2d7 100644 --- a/compile/interface/src/main/scala/xsbt/API.scala +++ b/compile/interface/src/main/scala/xsbt/API.scala @@ -4,99 +4,86 @@ package xsbt import java.io.File -import java.util.{Arrays,Comparator} -import scala.tools.nsc.{io, plugins, symtab, Global, Phase} -import io.{AbstractFile, PlainFile, ZipArchive} -import plugins.{Plugin, PluginComponent} +import java.util.{ Arrays, Comparator } +import scala.tools.nsc.{ io, plugins, symtab, Global, Phase } +import io.{ AbstractFile, PlainFile, ZipArchive } +import plugins.{ Plugin, PluginComponent } import symtab.Flags -import scala.collection.mutable.{HashMap, HashSet, ListBuffer} -import xsbti.api.{ClassLike, DefinitionType, PathComponent, SimpleType} +import scala.collection.mutable.{ HashMap, HashSet, ListBuffer } +import xsbti.api.{ ClassLike, DefinitionType, PathComponent, SimpleType } -object API -{ - val name = "xsbt-api" +object API { + val name = "xsbt-api" } -final class API(val global: CallbackGlobal) extends Compat -{ - import global._ +final class API(val global: CallbackGlobal) extends Compat { + import global._ - @inline def debug(msg: => String) = if(settings.verbose.value) inform(msg) + @inline def debug(msg: => String) = if (settings.verbose.value) inform(msg) - def newPhase(prev: Phase) = new ApiPhase(prev) - class ApiPhase(prev: Phase) extends Phase(prev) - { - override def description = "Extracts the public API from source files." - def name = API.name - def run: Unit = - { - val start = System.currentTimeMillis - currentRun.units.foreach(processUnit) - val stop = System.currentTimeMillis - debug("API phase took : " + ((stop - start)/1000.0) + " s") - } - def processUnit(unit: CompilationUnit) = if(!unit.isJava) processScalaUnit(unit) - def processScalaUnit(unit: CompilationUnit) - { - val sourceFile = unit.source.file.file - debug("Traversing " + sourceFile) - val extractApi = new ExtractAPI[global.type](global, sourceFile) - val traverser = new TopLevelHandler(extractApi) - traverser.apply(unit.body) - if (global.callback.nameHashing) { - val extractUsedNames = new ExtractUsedNames[global.type](global) - val names = extractUsedNames.extract(unit) - debug("The " + sourceFile + " contains the following used names " + names) - names foreach { (name: String) => callback.usedName(sourceFile, name) } - } - val packages = traverser.packages.toArray[String].map(p => new xsbti.api.Package(p)) - val source = new xsbti.api.SourceAPI(packages, traverser.definitions.toArray[xsbti.api.Definition]) - extractApi.forceStructures() - callback.api(sourceFile, source) - } - } - - - private final class TopLevelHandler(extractApi: ExtractAPI[global.type]) extends TopLevelTraverser - { - val packages = new HashSet[String] - val definitions = new ListBuffer[xsbti.api.Definition] - def `class`(c: Symbol): Unit = { - definitions += extractApi.classLike(c.owner, c) - } - /** Record packages declared in the source file*/ - def `package`(p: Symbol) - { - if( (p eq null) || p == NoSymbol || p.isRoot || p.isRootPackage || p.isEmptyPackageClass || p.isEmptyPackage) - () - else - { - packages += p.fullName - `package`(p.enclosingPackage) - } - } - } - - private abstract class TopLevelTraverser extends Traverser - { - def `class`(s: Symbol) - def `package`(s: Symbol) - override def traverse(tree: Tree) - { - tree match - { - case (_: ClassDef | _ : ModuleDef) if isTopLevel(tree.symbol) => `class`(tree.symbol) - case p: PackageDef => - `package`(p.symbol) - super.traverse(tree) - case _ => - } - } - def isTopLevel(sym: Symbol): Boolean = - (sym ne null) && (sym != NoSymbol) && !sym.isImplClass && !sym.isNestedClass && sym.isStatic && - !sym.hasFlag(Flags.SYNTHETIC) && !sym.hasFlag(Flags.JAVA) - } + def newPhase(prev: Phase) = new ApiPhase(prev) + class ApiPhase(prev: Phase) extends Phase(prev) { + override def description = "Extracts the public API from source files." + def name = API.name + def run: Unit = + { + val start = System.currentTimeMillis + currentRun.units.foreach(processUnit) + val stop = System.currentTimeMillis + debug("API phase took : " + ((stop - start) / 1000.0) + " s") + } + def processUnit(unit: CompilationUnit) = if (!unit.isJava) processScalaUnit(unit) + def processScalaUnit(unit: CompilationUnit) { + val sourceFile = unit.source.file.file + debug("Traversing " + sourceFile) + val extractApi = new ExtractAPI[global.type](global, sourceFile) + val traverser = new TopLevelHandler(extractApi) + traverser.apply(unit.body) + if (global.callback.nameHashing) { + val extractUsedNames = new ExtractUsedNames[global.type](global) + val names = extractUsedNames.extract(unit) + debug("The " + sourceFile + " contains the following used names " + names) + names foreach { (name: String) => callback.usedName(sourceFile, name) } + } + val packages = traverser.packages.toArray[String].map(p => new xsbti.api.Package(p)) + val source = new xsbti.api.SourceAPI(packages, traverser.definitions.toArray[xsbti.api.Definition]) + extractApi.forceStructures() + callback.api(sourceFile, source) + } + } + private final class TopLevelHandler(extractApi: ExtractAPI[global.type]) extends TopLevelTraverser { + val packages = new HashSet[String] + val definitions = new ListBuffer[xsbti.api.Definition] + def `class`(c: Symbol): Unit = { + definitions += extractApi.classLike(c.owner, c) + } + /** Record packages declared in the source file*/ + def `package`(p: Symbol) { + if ((p eq null) || p == NoSymbol || p.isRoot || p.isRootPackage || p.isEmptyPackageClass || p.isEmptyPackage) + () + else { + packages += p.fullName + `package`(p.enclosingPackage) + } + } + } + private abstract class TopLevelTraverser extends Traverser { + def `class`(s: Symbol) + def `package`(s: Symbol) + override def traverse(tree: Tree) { + tree match { + case (_: ClassDef | _: ModuleDef) if isTopLevel(tree.symbol) => `class`(tree.symbol) + case p: PackageDef => + `package`(p.symbol) + super.traverse(tree) + case _ => + } + } + def isTopLevel(sym: Symbol): Boolean = + (sym ne null) && (sym != NoSymbol) && !sym.isImplClass && !sym.isNestedClass && sym.isStatic && + !sym.hasFlag(Flags.SYNTHETIC) && !sym.hasFlag(Flags.JAVA) + } } diff --git a/compile/interface/src/main/scala/xsbt/Analyzer.scala b/compile/interface/src/main/scala/xsbt/Analyzer.scala index dd11fe0e0..549cd882a 100644 --- a/compile/interface/src/main/scala/xsbt/Analyzer.scala +++ b/compile/interface/src/main/scala/xsbt/Analyzer.scala @@ -3,53 +3,44 @@ */ package xsbt -import scala.tools.nsc.{io, plugins, symtab, Global, Phase} -import io.{AbstractFile, PlainFile, ZipArchive} -import plugins.{Plugin, PluginComponent} -import scala.collection.mutable.{HashMap, HashSet, Map, Set} +import scala.tools.nsc.{ io, plugins, symtab, Global, Phase } +import io.{ AbstractFile, PlainFile, ZipArchive } +import plugins.{ Plugin, PluginComponent } +import scala.collection.mutable.{ HashMap, HashSet, Map, Set } import java.io.File import java.util.zip.ZipFile import xsbti.AnalysisCallback -object Analyzer -{ - def name = "xsbt-analyzer" +object Analyzer { + def name = "xsbt-analyzer" } -final class Analyzer(val global: CallbackGlobal) extends LocateClassFile -{ - import global._ +final class Analyzer(val global: CallbackGlobal) extends LocateClassFile { + import global._ - def newPhase(prev: Phase): Phase = new AnalyzerPhase(prev) - private class AnalyzerPhase(prev: Phase) extends Phase(prev) - { - override def description = "Finds concrete instances of provided superclasses, and application entry points." - def name = Analyzer.name - def run - { - for(unit <- currentRun.units if !unit.isJava) - { - val sourceFile = unit.source.file.file - // build list of generated classes - for(iclass <- unit.icode) - { - val sym = iclass.symbol - def addGenerated(separatorRequired: Boolean) - { - for(classFile <- outputDirs map (fileForClass(_, sym, separatorRequired)) find (_.exists)) - callback.generatedClass(sourceFile, classFile, className(sym, '.', separatorRequired)) - } - if(sym.isModuleClass && !sym.isImplClass) - { - if(isTopLevelModule(sym) && sym.companionClass == NoSymbol) - addGenerated(false) - addGenerated(true) - } - else - addGenerated(false) - } - } - } - } + def newPhase(prev: Phase): Phase = new AnalyzerPhase(prev) + private class AnalyzerPhase(prev: Phase) extends Phase(prev) { + override def description = "Finds concrete instances of provided superclasses, and application entry points." + def name = Analyzer.name + def run { + for (unit <- currentRun.units if !unit.isJava) { + val sourceFile = unit.source.file.file + // build list of generated classes + for (iclass <- unit.icode) { + val sym = iclass.symbol + def addGenerated(separatorRequired: Boolean) { + for (classFile <- outputDirs map (fileForClass(_, sym, separatorRequired)) find (_.exists)) + callback.generatedClass(sourceFile, classFile, className(sym, '.', separatorRequired)) + } + if (sym.isModuleClass && !sym.isImplClass) { + if (isTopLevelModule(sym) && sym.companionClass == NoSymbol) + addGenerated(false) + addGenerated(true) + } else + addGenerated(false) + } + } + } + } } diff --git a/compile/interface/src/main/scala/xsbt/Command.scala b/compile/interface/src/main/scala/xsbt/Command.scala index b54396718..4b127e5ff 100644 --- a/compile/interface/src/main/scala/xsbt/Command.scala +++ b/compile/interface/src/main/scala/xsbt/Command.scala @@ -3,27 +3,26 @@ */ package xsbt - import scala.tools.nsc.{CompilerCommand, Settings} +import scala.tools.nsc.{ CompilerCommand, Settings } -object Command -{ - /** - * Construct a CompilerCommand using reflection, to be compatible with Scalac before and after - * r21274 - */ - def apply(arguments: List[String], settings: Settings): CompilerCommand = { - def constr(params: Class[_]*) = classOf[CompilerCommand].getConstructor(params: _*) - try { - constr(classOf[List[_]], classOf[Settings]).newInstance(arguments, settings) - } catch { - case e: NoSuchMethodException => - constr(classOf[List[_]], classOf[Settings], classOf[Function1[_, _]], classOf[Boolean]).newInstance(arguments, settings, (s: String) => throw new RuntimeException(s), false.asInstanceOf[AnyRef]) - } - } - - def getWarnFatal(settings: Settings): Boolean = - settings.Xwarnfatal.value +object Command { + /** + * Construct a CompilerCommand using reflection, to be compatible with Scalac before and after + * r21274 + */ + def apply(arguments: List[String], settings: Settings): CompilerCommand = { + def constr(params: Class[_]*) = classOf[CompilerCommand].getConstructor(params: _*) + try { + constr(classOf[List[_]], classOf[Settings]).newInstance(arguments, settings) + } catch { + case e: NoSuchMethodException => + constr(classOf[List[_]], classOf[Settings], classOf[Function1[_, _]], classOf[Boolean]).newInstance(arguments, settings, (s: String) => throw new RuntimeException(s), false.asInstanceOf[AnyRef]) + } + } - def getNoWarn(settings: Settings): Boolean = - settings.nowarn.value + def getWarnFatal(settings: Settings): Boolean = + settings.Xwarnfatal.value + + def getNoWarn(settings: Settings): Boolean = + settings.nowarn.value } diff --git a/compile/interface/src/main/scala/xsbt/Compat.scala b/compile/interface/src/main/scala/xsbt/Compat.scala index d92ba6e73..74116c0af 100644 --- a/compile/interface/src/main/scala/xsbt/Compat.scala +++ b/compile/interface/src/main/scala/xsbt/Compat.scala @@ -38,95 +38,92 @@ import scala.tools.nsc.symtab.Flags * The technique described above is used in several places below. * */ -abstract class Compat -{ - val global: Global - import global._ - val LocalChild = global.tpnme.LOCAL_CHILD - val Nullary = global.NullaryMethodType - val ScalaObjectClass = definitions.ScalaObjectClass +abstract class Compat { + val global: Global + import global._ + val LocalChild = global.tpnme.LOCAL_CHILD + val Nullary = global.NullaryMethodType + val ScalaObjectClass = definitions.ScalaObjectClass - private[this] final class MiscCompat - { - // in 2.9, nme.LOCALCHILD was renamed to tpnme.LOCAL_CHILD - def tpnme = nme - def LOCAL_CHILD = nme.LOCALCHILD - def LOCALCHILD = sourceCompatibilityOnly + private[this] final class MiscCompat { + // in 2.9, nme.LOCALCHILD was renamed to tpnme.LOCAL_CHILD + def tpnme = nme + def LOCAL_CHILD = nme.LOCALCHILD + def LOCALCHILD = sourceCompatibilityOnly - // in 2.10, ScalaObject was removed - def ScalaObjectClass = definitions.ObjectClass + // in 2.10, ScalaObject was removed + def ScalaObjectClass = definitions.ObjectClass - def NullaryMethodType = NullaryMethodTpe + def NullaryMethodType = NullaryMethodTpe - def MACRO = DummyValue + def MACRO = DummyValue - // in 2.10, sym.moduleSuffix exists, but genJVM.moduleSuffix(Symbol) does not - def moduleSuffix(sym: Symbol): String = sourceCompatibilityOnly - // in 2.11 genJVM does not exist - def genJVM = this - } - // in 2.9, NullaryMethodType was added to Type - object NullaryMethodTpe { - def unapply(t: Type): Option[Type] = None - } + // in 2.10, sym.moduleSuffix exists, but genJVM.moduleSuffix(Symbol) does not + def moduleSuffix(sym: Symbol): String = sourceCompatibilityOnly + // in 2.11 genJVM does not exist + def genJVM = this + } + // in 2.9, NullaryMethodType was added to Type + object NullaryMethodTpe { + def unapply(t: Type): Option[Type] = None + } - protected implicit def symbolCompat(sym: Symbol): SymbolCompat = new SymbolCompat(sym) - protected final class SymbolCompat(sym: Symbol) { - // before 2.10, sym.moduleSuffix doesn't exist, but genJVM.moduleSuffix does - def moduleSuffix = global.genJVM.moduleSuffix(sym) + protected implicit def symbolCompat(sym: Symbol): SymbolCompat = new SymbolCompat(sym) + protected final class SymbolCompat(sym: Symbol) { + // before 2.10, sym.moduleSuffix doesn't exist, but genJVM.moduleSuffix does + def moduleSuffix = global.genJVM.moduleSuffix(sym) - def enclosingTopLevelClass: Symbol = sym.toplevelClass - def toplevelClass: Symbol = sourceCompatibilityOnly - } + def enclosingTopLevelClass: Symbol = sym.toplevelClass + def toplevelClass: Symbol = sourceCompatibilityOnly + } + val DummyValue = 0 + def hasMacro(s: Symbol): Boolean = + { + val MACRO = Flags.MACRO // will be DummyValue for versions before 2.10 + MACRO != DummyValue && s.hasFlag(MACRO) + } + def moduleSuffix(s: Symbol): String = s.moduleSuffix - val DummyValue = 0 - def hasMacro(s: Symbol): Boolean = - { - val MACRO = Flags.MACRO // will be DummyValue for versions before 2.10 - MACRO != DummyValue && s.hasFlag(MACRO) - } - def moduleSuffix(s: Symbol): String = s.moduleSuffix + private[this] def sourceCompatibilityOnly: Nothing = throw new RuntimeException("For source compatibility only: should not get here.") - private[this] def sourceCompatibilityOnly: Nothing = throw new RuntimeException("For source compatibility only: should not get here.") + private[this] final implicit def miscCompat(n: AnyRef): MiscCompat = new MiscCompat - private[this] final implicit def miscCompat(n: AnyRef): MiscCompat = new MiscCompat + object MacroExpansionOf { + def unapply(tree: Tree): Option[Tree] = { - object MacroExpansionOf { - def unapply(tree: Tree): Option[Tree] = { + // MacroExpansionAttachment (MEA) compatibility for 2.8.x and 2.9.x + object Compat { + class MacroExpansionAttachment(val original: Tree) - // MacroExpansionAttachment (MEA) compatibility for 2.8.x and 2.9.x - object Compat { - class MacroExpansionAttachment(val original: Tree) + // Trees have no attachments in 2.8.x and 2.9.x + implicit def withAttachments(tree: Tree): WithAttachments = new WithAttachments(tree) + class WithAttachments(val tree: Tree) { + object EmptyAttachments { + def all = Set.empty[Any] + } + val attachments = EmptyAttachments + } + } + import Compat._ - // Trees have no attachments in 2.8.x and 2.9.x - implicit def withAttachments(tree: Tree): WithAttachments = new WithAttachments(tree) - class WithAttachments(val tree: Tree) { - object EmptyAttachments { - def all = Set.empty[Any] - } - val attachments = EmptyAttachments - } - } - import Compat._ + locally { + // Wildcard imports are necessary since 2.8.x and 2.9.x don't have `MacroExpansionAttachment` at all + import global._ // this is where MEA lives in 2.10.x - locally { - // Wildcard imports are necessary since 2.8.x and 2.9.x don't have `MacroExpansionAttachment` at all - import global._ // this is where MEA lives in 2.10.x + // `original` has been renamed to `expandee` in 2.11.x + implicit def withExpandee(att: MacroExpansionAttachment): WithExpandee = new WithExpandee(att) + class WithExpandee(att: MacroExpansionAttachment) { + def expandee: Tree = att.original + } - // `original` has been renamed to `expandee` in 2.11.x - implicit def withExpandee(att: MacroExpansionAttachment): WithExpandee = new WithExpandee(att) - class WithExpandee(att: MacroExpansionAttachment) { - def expandee: Tree = att.original - } - - locally { - import analyzer._ // this is where MEA lives in 2.11.x - tree.attachments.all.collect { - case att: MacroExpansionAttachment => att.expandee - } headOption - } - } - } - } + locally { + import analyzer._ // this is where MEA lives in 2.11.x + tree.attachments.all.collect { + case att: MacroExpansionAttachment => att.expandee + } headOption + } + } + } + } } diff --git a/compile/interface/src/main/scala/xsbt/CompilerInterface.scala b/compile/interface/src/main/scala/xsbt/CompilerInterface.scala index 9d1285640..834a34ab1 100644 --- a/compile/interface/src/main/scala/xsbt/CompilerInterface.scala +++ b/compile/interface/src/main/scala/xsbt/CompilerInterface.scala @@ -3,273 +3,252 @@ */ package xsbt -import xsbti.{AnalysisCallback,Logger,Problem,Reporter,Severity} +import xsbti.{ AnalysisCallback, Logger, Problem, Reporter, Severity } import xsbti.compile._ -import scala.tools.nsc.{backend, io, reporters, symtab, util, Phase, Global, Settings, SubComponent} +import scala.tools.nsc.{ backend, io, reporters, symtab, util, Phase, Global, Settings, SubComponent } import scala.tools.nsc.interactive.RangePositions import backend.JavaPlatform import scala.tools.util.PathResolver import symtab.SymbolLoaders -import util.{ClassPath,DirectoryClassPath,MergedClassPath,JavaClassPath} -import ClassPath.{ClassPathContext,JavaContext} +import util.{ ClassPath, DirectoryClassPath, MergedClassPath, JavaClassPath } +import ClassPath.{ ClassPathContext, JavaContext } import io.AbstractFile import scala.annotation.tailrec import scala.collection.mutable import Log.debug import java.io.File -final class CompilerInterface -{ - def newCompiler(options: Array[String], output: Output, initialLog: Logger, initialDelegate: Reporter, resident: Boolean): CachedCompiler = - new CachedCompiler0(options, output, new WeakLog(initialLog, initialDelegate), resident) +final class CompilerInterface { + def newCompiler(options: Array[String], output: Output, initialLog: Logger, initialDelegate: Reporter, resident: Boolean): CachedCompiler = + new CachedCompiler0(options, output, new WeakLog(initialLog, initialDelegate), resident) - def run(sources: Array[File], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, delegate: Reporter, progress: CompileProgress, cached: CachedCompiler): Unit = - cached.run(sources, changes, callback, log, delegate, progress) + def run(sources: Array[File], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, delegate: Reporter, progress: CompileProgress, cached: CachedCompiler): Unit = + cached.run(sources, changes, callback, log, delegate, progress) } // for compatibility with Scala versions without Global.registerTopLevelSym (2.8.1 and earlier) sealed trait GlobalCompat { self: Global => - def registerTopLevelSym(sym: Symbol): Unit - sealed trait RunCompat { - def informUnitStarting(phase: Phase, unit: CompilationUnit) {} - } + def registerTopLevelSym(sym: Symbol): Unit + sealed trait RunCompat { + def informUnitStarting(phase: Phase, unit: CompilationUnit) {} + } } sealed abstract class CallbackGlobal(settings: Settings, reporter: reporters.Reporter, output: Output) extends Global(settings, reporter) with GlobalCompat { - def callback: AnalysisCallback - def findClass(name: String): Option[(AbstractFile,Boolean)] - lazy val outputDirs: Iterable[File] = { - output match { - case single: SingleOutput => List(single.outputDirectory) - case multi: MultipleOutput => multi.outputGroups.toStream map (_.outputDirectory) - } - } - // Map source files to public inherited dependencies. These dependencies are tracked as the symbol for the dealiased base class. - val inheritedDependencies = new mutable.HashMap[File, mutable.Set[Symbol]] - def addInheritedDependencies(file: File, deps: Iterable[Symbol]) { - inheritedDependencies.getOrElseUpdate(file, new mutable.HashSet) ++= deps - } + def callback: AnalysisCallback + def findClass(name: String): Option[(AbstractFile, Boolean)] + lazy val outputDirs: Iterable[File] = { + output match { + case single: SingleOutput => List(single.outputDirectory) + case multi: MultipleOutput => multi.outputGroups.toStream map (_.outputDirectory) + } + } + // Map source files to public inherited dependencies. These dependencies are tracked as the symbol for the dealiased base class. + val inheritedDependencies = new mutable.HashMap[File, mutable.Set[Symbol]] + def addInheritedDependencies(file: File, deps: Iterable[Symbol]) { + inheritedDependencies.getOrElseUpdate(file, new mutable.HashSet) ++= deps + } } class InterfaceCompileFailed(val arguments: Array[String], val problems: Array[Problem], override val toString: String) extends xsbti.CompileFailed class InterfaceCompileCancelled(val arguments: Array[String], override val toString: String) extends xsbti.CompileCancelled -private final class WeakLog(private[this] var log: Logger, private[this] var delegate: Reporter) -{ - def apply(message: String) { - assert(log ne null, "Stale reference to logger") - log.error(Message(message)) - } - def logger: Logger = log - def reporter: Reporter = delegate - def clear() { - log = null - delegate = null - } +private final class WeakLog(private[this] var log: Logger, private[this] var delegate: Reporter) { + def apply(message: String) { + assert(log ne null, "Stale reference to logger") + log.error(Message(message)) + } + def logger: Logger = log + def reporter: Reporter = delegate + def clear() { + log = null + delegate = null + } } -private final class CachedCompiler0(args: Array[String], output: Output, initialLog: WeakLog, resident: Boolean) extends CachedCompiler -{ - val settings = new Settings(s => initialLog(s)) - output match { - case multi: MultipleOutput => - for (out <- multi.outputGroups) - settings.outputDirs.add(out.sourceDirectory.getAbsolutePath, out.outputDirectory.getAbsolutePath) - case single: SingleOutput => - settings.outputDirs.setSingleOutput(single.outputDirectory.getAbsolutePath) - } +private final class CachedCompiler0(args: Array[String], output: Output, initialLog: WeakLog, resident: Boolean) extends CachedCompiler { + val settings = new Settings(s => initialLog(s)) + output match { + case multi: MultipleOutput => + for (out <- multi.outputGroups) + settings.outputDirs.add(out.sourceDirectory.getAbsolutePath, out.outputDirectory.getAbsolutePath) + case single: SingleOutput => + settings.outputDirs.setSingleOutput(single.outputDirectory.getAbsolutePath) + } - val command = Command(args.toList, settings) - private[this] val dreporter = DelegatingReporter(settings, initialLog.reporter) - try { - if(!noErrors(dreporter)) { - dreporter.printSummary() - handleErrors(dreporter, initialLog.logger) - } - } finally - initialLog.clear() + val command = Command(args.toList, settings) + private[this] val dreporter = DelegatingReporter(settings, initialLog.reporter) + try { + if (!noErrors(dreporter)) { + dreporter.printSummary() + handleErrors(dreporter, initialLog.logger) + } + } finally + initialLog.clear() - def noErrors(dreporter: DelegatingReporter) = !dreporter.hasErrors && command.ok + def noErrors(dreporter: DelegatingReporter) = !dreporter.hasErrors && command.ok - def commandArguments(sources: Array[File]): Array[String] = - (command.settings.recreateArgs ++ sources.map(_.getAbsolutePath)).toArray[String] + def commandArguments(sources: Array[File]): Array[String] = + (command.settings.recreateArgs ++ sources.map(_.getAbsolutePath)).toArray[String] - def run(sources: Array[File], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, delegate: Reporter, progress: CompileProgress): Unit = synchronized - { - debug(log, "Running cached compiler " + hashCode.toHexString + ", interfacing (CompilerInterface) with Scala compiler " + scala.tools.nsc.Properties.versionString) - val dreporter = DelegatingReporter(settings, delegate) - try { run(sources.toList, changes, callback, log, dreporter, progress) } - finally { dreporter.dropDelegate() } - } - private[this] def run(sources: List[File], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, dreporter: DelegatingReporter, compileProgress: CompileProgress) - { - if(command.shouldStopWithInfo) - { - dreporter.info(null, command.getInfoMessage(compiler), true) - throw new InterfaceCompileFailed(args, Array(), "Compiler option supplied that disabled actual compilation.") - } - if(noErrors(dreporter)) - { - debug(log, args.mkString("Calling Scala compiler with arguments (CompilerInterface):\n\t", "\n\t", "")) - compiler.set(callback, dreporter) - val run = new compiler.Run with compiler.RunCompat { - override def informUnitStarting(phase: Phase, unit: compiler.CompilationUnit) { - compileProgress.startUnit(phase.name, unit.source.path) - } - override def progress(current: Int, total: Int) { - if (!compileProgress.advance(current, total)) - cancel - } - } - val sortedSourceFiles = sources.map(_.getAbsolutePath).sortWith(_ < _) - run compile sortedSourceFiles - processUnreportedWarnings(run) - dreporter.problems foreach { p => callback.problem(p.category, p.position, p.message, p.severity, true) } - } - dreporter.printSummary() - if(!noErrors(dreporter)) handleErrors(dreporter, log) - // the case where we cancelled compilation _after_ some compilation errors got reported - // will be handled by line above so errors still will be reported properly just potentially not - // all of them (because we cancelled the compilation) - if (dreporter.cancelled) handleCompilationCancellation(dreporter, log) - } - def handleErrors(dreporter: DelegatingReporter, log: Logger): Nothing = - { - debug(log, "Compilation failed (CompilerInterface)") - throw new InterfaceCompileFailed(args, dreporter.problems, "Compilation failed") - } - def handleCompilationCancellation(dreporter: DelegatingReporter, log: Logger): Nothing = { - assert(dreporter.cancelled, "We should get here only if when compilation got cancelled") - debug(log, "Compilation cancelled (CompilerInterface)") - throw new InterfaceCompileCancelled(args, "Compilation has been cancelled") - } - def processUnreportedWarnings(run: compiler.Run) - { - // allConditionalWarnings and the ConditionalWarning class are only in 2.10+ - final class CondWarnCompat(val what: String, val warnings: mutable.ListBuffer[(compiler.Position, String)]) - implicit def compat(run: AnyRef): Compat = new Compat - final class Compat { def allConditionalWarnings = List[CondWarnCompat]() } + def run(sources: Array[File], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, delegate: Reporter, progress: CompileProgress): Unit = synchronized { + debug(log, "Running cached compiler " + hashCode.toHexString + ", interfacing (CompilerInterface) with Scala compiler " + scala.tools.nsc.Properties.versionString) + val dreporter = DelegatingReporter(settings, delegate) + try { run(sources.toList, changes, callback, log, dreporter, progress) } + finally { dreporter.dropDelegate() } + } + private[this] def run(sources: List[File], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, dreporter: DelegatingReporter, compileProgress: CompileProgress) { + if (command.shouldStopWithInfo) { + dreporter.info(null, command.getInfoMessage(compiler), true) + throw new InterfaceCompileFailed(args, Array(), "Compiler option supplied that disabled actual compilation.") + } + if (noErrors(dreporter)) { + debug(log, args.mkString("Calling Scala compiler with arguments (CompilerInterface):\n\t", "\n\t", "")) + compiler.set(callback, dreporter) + val run = new compiler.Run with compiler.RunCompat { + override def informUnitStarting(phase: Phase, unit: compiler.CompilationUnit) { + compileProgress.startUnit(phase.name, unit.source.path) + } + override def progress(current: Int, total: Int) { + if (!compileProgress.advance(current, total)) + cancel + } + } + val sortedSourceFiles = sources.map(_.getAbsolutePath).sortWith(_ < _) + run compile sortedSourceFiles + processUnreportedWarnings(run) + dreporter.problems foreach { p => callback.problem(p.category, p.position, p.message, p.severity, true) } + } + dreporter.printSummary() + if (!noErrors(dreporter)) handleErrors(dreporter, log) + // the case where we cancelled compilation _after_ some compilation errors got reported + // will be handled by line above so errors still will be reported properly just potentially not + // all of them (because we cancelled the compilation) + if (dreporter.cancelled) handleCompilationCancellation(dreporter, log) + } + def handleErrors(dreporter: DelegatingReporter, log: Logger): Nothing = + { + debug(log, "Compilation failed (CompilerInterface)") + throw new InterfaceCompileFailed(args, dreporter.problems, "Compilation failed") + } + def handleCompilationCancellation(dreporter: DelegatingReporter, log: Logger): Nothing = { + assert(dreporter.cancelled, "We should get here only if when compilation got cancelled") + debug(log, "Compilation cancelled (CompilerInterface)") + throw new InterfaceCompileCancelled(args, "Compilation has been cancelled") + } + def processUnreportedWarnings(run: compiler.Run) { + // allConditionalWarnings and the ConditionalWarning class are only in 2.10+ + final class CondWarnCompat(val what: String, val warnings: mutable.ListBuffer[(compiler.Position, String)]) + implicit def compat(run: AnyRef): Compat = new Compat + final class Compat { def allConditionalWarnings = List[CondWarnCompat]() } - val warnings = run.allConditionalWarnings - if(!warnings.isEmpty) - compiler.logUnreportedWarnings(warnings.map(cw => ("" /*cw.what*/, cw.warnings.toList))) - } + val warnings = run.allConditionalWarnings + if (!warnings.isEmpty) + compiler.logUnreportedWarnings(warnings.map(cw => ("" /*cw.what*/ , cw.warnings.toList))) + } - val compiler: Compiler = { - if (command.settings.Yrangepos.value) - new Compiler() with RangePositions // unnecessary in 2.11 - else - new Compiler() - } - class Compiler extends CallbackGlobal(command.settings, dreporter, output) - { - object dummy // temporary fix for #4426 - object sbtAnalyzer extends - { - val global: Compiler.this.type = Compiler.this - val phaseName = Analyzer.name - val runsAfter = List("jvm") - override val runsBefore = List("terminal") - val runsRightAfter = None - } - with SubComponent - { - val analyzer = new Analyzer(global) - def newPhase(prev: Phase) = analyzer.newPhase(prev) - def name = phaseName - } + val compiler: Compiler = { + if (command.settings.Yrangepos.value) + new Compiler() with RangePositions // unnecessary in 2.11 + else + new Compiler() + } + class Compiler extends CallbackGlobal(command.settings, dreporter, output) { + object dummy // temporary fix for #4426 + object sbtAnalyzer extends { + val global: Compiler.this.type = Compiler.this + val phaseName = Analyzer.name + val runsAfter = List("jvm") + override val runsBefore = List("terminal") + val runsRightAfter = None + } with SubComponent { + val analyzer = new Analyzer(global) + def newPhase(prev: Phase) = analyzer.newPhase(prev) + def name = phaseName + } - /** Phase that extracts dependency information */ - object sbtDependency extends - { - val global: Compiler.this.type = Compiler.this - val phaseName = Dependency.name - val runsAfter = List(API.name) - override val runsBefore = List("refchecks") - // keep API and dependency close to each other - // we might want to merge them in the future and even if don't - // do that then it makes sense to run those phases next to each other - val runsRightAfter = Some(API.name) - } - with SubComponent - { - val dependency = new Dependency(global) - def newPhase(prev: Phase) = dependency.newPhase(prev) - def name = phaseName - } + /** Phase that extracts dependency information */ + object sbtDependency extends { + val global: Compiler.this.type = Compiler.this + val phaseName = Dependency.name + val runsAfter = List(API.name) + override val runsBefore = List("refchecks") + // keep API and dependency close to each other + // we might want to merge them in the future and even if don't + // do that then it makes sense to run those phases next to each other + val runsRightAfter = Some(API.name) + } with SubComponent { + val dependency = new Dependency(global) + def newPhase(prev: Phase) = dependency.newPhase(prev) + def name = phaseName + } - /** This phase walks trees and constructs a representation of the public API, which is used for incremental recompilation. - * - * We extract the api after picklers, since that way we see the same symbol information/structure - * irrespective of whether we were typechecking from source / unpickling previously compiled classes. - */ - object apiExtractor extends - { - val global: Compiler.this.type = Compiler.this - val phaseName = API.name - val runsAfter = List("typer") - override val runsBefore = List("erasure") - // allow apiExtractor's phase to be overridden using the sbt.api.phase property - // (in case someone would like the old timing, which was right after typer) - // TODO: consider migrating to simply specifying "pickler" for `runsAfter` and "uncurry" for `runsBefore` - val runsRightAfter = Option(System.getProperty("sbt.api.phase")) orElse Some("pickler") - } - with SubComponent - { - val api = new API(global) - def newPhase(prev: Phase) = api.newPhase(prev) - def name = phaseName - } + /** + * This phase walks trees and constructs a representation of the public API, which is used for incremental recompilation. + * + * We extract the api after picklers, since that way we see the same symbol information/structure + * irrespective of whether we were typechecking from source / unpickling previously compiled classes. + */ + object apiExtractor extends { + val global: Compiler.this.type = Compiler.this + val phaseName = API.name + val runsAfter = List("typer") + override val runsBefore = List("erasure") + // allow apiExtractor's phase to be overridden using the sbt.api.phase property + // (in case someone would like the old timing, which was right after typer) + // TODO: consider migrating to simply specifying "pickler" for `runsAfter` and "uncurry" for `runsBefore` + val runsRightAfter = Option(System.getProperty("sbt.api.phase")) orElse Some("pickler") + } with SubComponent { + val api = new API(global) + def newPhase(prev: Phase) = api.newPhase(prev) + def name = phaseName + } - override lazy val phaseDescriptors = - { - phasesSet += sbtAnalyzer - phasesSet += sbtDependency - phasesSet += apiExtractor - superComputePhaseDescriptors - } - // Required because computePhaseDescriptors is private in 2.8 (changed to protected sometime later). - private[this] def superComputePhaseDescriptors() = superCall("computePhaseDescriptors").asInstanceOf[List[SubComponent]] - private[this] def superDropRun(): Unit = - try { superCall("dropRun") } catch { case e: NoSuchMethodException => () } // dropRun not in 2.8.1 - private[this] def superCall(methodName: String): AnyRef = - { - val meth = classOf[Global].getDeclaredMethod(methodName) - meth.setAccessible(true) - meth.invoke(this) - } - def logUnreportedWarnings(seq: Seq[(String, List[(Position,String)])]): Unit = // Scala 2.10.x and later - { - val drep = reporter.asInstanceOf[DelegatingReporter] - for( (what, warnings) <- seq; (pos, msg) <- warnings) yield - callback.problem(what, drep.convert(pos), msg, Severity.Warn, false) - } + override lazy val phaseDescriptors = + { + phasesSet += sbtAnalyzer + phasesSet += sbtDependency + phasesSet += apiExtractor + superComputePhaseDescriptors + } + // Required because computePhaseDescriptors is private in 2.8 (changed to protected sometime later). + private[this] def superComputePhaseDescriptors() = superCall("computePhaseDescriptors").asInstanceOf[List[SubComponent]] + private[this] def superDropRun(): Unit = + try { superCall("dropRun") } catch { case e: NoSuchMethodException => () } // dropRun not in 2.8.1 + private[this] def superCall(methodName: String): AnyRef = + { + val meth = classOf[Global].getDeclaredMethod(methodName) + meth.setAccessible(true) + meth.invoke(this) + } + def logUnreportedWarnings(seq: Seq[(String, List[(Position, String)])]): Unit = // Scala 2.10.x and later + { + val drep = reporter.asInstanceOf[DelegatingReporter] + for ((what, warnings) <- seq; (pos, msg) <- warnings) yield callback.problem(what, drep.convert(pos), msg, Severity.Warn, false) + } - def set(callback: AnalysisCallback, dreporter: DelegatingReporter) - { - this.callback0 = callback - reporter = dreporter - } - def clear() - { - callback0 = null - superDropRun() - reporter = null - } + def set(callback: AnalysisCallback, dreporter: DelegatingReporter) { + this.callback0 = callback + reporter = dreporter + } + def clear() { + callback0 = null + superDropRun() + reporter = null + } - def findClass(name: String): Option[(AbstractFile, Boolean)] = - getOutputClass(name).map(f => (f,true)) orElse findOnClassPath(name).map(f =>(f, false)) + def findClass(name: String): Option[(AbstractFile, Boolean)] = + getOutputClass(name).map(f => (f, true)) orElse findOnClassPath(name).map(f => (f, false)) - def getOutputClass(name: String): Option[AbstractFile] = - { - // This could be improved if a hint where to look is given. - val className = name.replace('.', '/') + ".class" - outputDirs map (new File(_, className)) find (_.exists) map (AbstractFile.getFile(_)) - } + def getOutputClass(name: String): Option[AbstractFile] = + { + // This could be improved if a hint where to look is given. + val className = name.replace('.', '/') + ".class" + outputDirs map (new File(_, className)) find (_.exists) map (AbstractFile.getFile(_)) + } - def findOnClassPath(name: String): Option[AbstractFile] = - classPath.findClass(name).flatMap(_.binary.asInstanceOf[Option[AbstractFile]]) + def findOnClassPath(name: String): Option[AbstractFile] = + classPath.findClass(name).flatMap(_.binary.asInstanceOf[Option[AbstractFile]]) - - private[this] var callback0: AnalysisCallback = null - def callback: AnalysisCallback = callback0 - } + private[this] var callback0: AnalysisCallback = null + def callback: AnalysisCallback = callback0 + } } diff --git a/compile/interface/src/main/scala/xsbt/ConsoleInterface.scala b/compile/interface/src/main/scala/xsbt/ConsoleInterface.scala index 7aa637237..3819f746d 100644 --- a/compile/interface/src/main/scala/xsbt/ConsoleInterface.scala +++ b/compile/interface/src/main/scala/xsbt/ConsoleInterface.scala @@ -4,102 +4,94 @@ package xsbt import xsbti.Logger -import scala.tools.nsc.{GenericRunnerCommand, Interpreter, InterpreterLoop, ObjectRunner, Settings} +import scala.tools.nsc.{ GenericRunnerCommand, Interpreter, InterpreterLoop, ObjectRunner, Settings } import scala.tools.nsc.interpreter.InteractiveReader import scala.tools.nsc.reporters.Reporter import scala.tools.nsc.util.ClassPath -class ConsoleInterface -{ - def commandArguments(args: Array[String], bootClasspathString: String, classpathString: String, log: Logger): Array[String] = - MakeSettings.sync(args, bootClasspathString, classpathString, log).recreateArgs.toArray[String] +class ConsoleInterface { + def commandArguments(args: Array[String], bootClasspathString: String, classpathString: String, log: Logger): Array[String] = + MakeSettings.sync(args, bootClasspathString, classpathString, log).recreateArgs.toArray[String] - def run(args: Array[String], bootClasspathString: String, classpathString: String, initialCommands: String, cleanupCommands: String, loader: ClassLoader, bindNames: Array[String], bindValues: Array[Any], log: Logger) - { - lazy val interpreterSettings = MakeSettings.sync(args.toList, log) - val compilerSettings = MakeSettings.sync(args, bootClasspathString, classpathString, log) - - if(!bootClasspathString.isEmpty) - compilerSettings.bootclasspath.value = bootClasspathString - compilerSettings.classpath.value = classpathString - log.info(Message("Starting scala interpreter...")) - log.info(Message("")) - val loop = new InterpreterLoop { - - override def createInterpreter() = { - - if(loader ne null) - { - in = InteractiveReader.createDefault() - interpreter = new Interpreter(settings) - { - override protected def parentClassLoader = if(loader eq null) super.parentClassLoader else loader - override protected def newCompiler(settings: Settings, reporter: Reporter) = super.newCompiler(compilerSettings, reporter) - } - interpreter.setContextClassLoader() - } - else - super.createInterpreter() + def run(args: Array[String], bootClasspathString: String, classpathString: String, initialCommands: String, cleanupCommands: String, loader: ClassLoader, bindNames: Array[String], bindValues: Array[Any], log: Logger) { + lazy val interpreterSettings = MakeSettings.sync(args.toList, log) + val compilerSettings = MakeSettings.sync(args, bootClasspathString, classpathString, log) - def bind(values: Seq[(String,Any)]) - { - // for 2.8 compatibility - final class Compat { - def bindValue(id: String, value: Any) = - interpreter.bind(id, value.asInstanceOf[AnyRef].getClass.getName, value) - } - implicit def compat(a: AnyRef): Compat = new Compat + if (!bootClasspathString.isEmpty) + compilerSettings.bootclasspath.value = bootClasspathString + compilerSettings.classpath.value = classpathString + log.info(Message("Starting scala interpreter...")) + log.info(Message("")) + val loop = new InterpreterLoop { - for( (id, value) <- values ) - interpreter.beQuietDuring(interpreter.bindValue(id, value)) - } + override def createInterpreter() = { - bind(bindNames zip bindValues) - - if(!initialCommands.isEmpty) - interpreter.interpret(initialCommands) - } - override def closeInterpreter() - { - if(!cleanupCommands.isEmpty) - interpreter.interpret(cleanupCommands) - super.closeInterpreter() - } - } - loop.main(if(loader eq null) compilerSettings else interpreterSettings) - } + if (loader ne null) { + in = InteractiveReader.createDefault() + interpreter = new Interpreter(settings) { + override protected def parentClassLoader = if (loader eq null) super.parentClassLoader else loader + override protected def newCompiler(settings: Settings, reporter: Reporter) = super.newCompiler(compilerSettings, reporter) + } + interpreter.setContextClassLoader() + } else + super.createInterpreter() + + def bind(values: Seq[(String, Any)]) { + // for 2.8 compatibility + final class Compat { + def bindValue(id: String, value: Any) = + interpreter.bind(id, value.asInstanceOf[AnyRef].getClass.getName, value) + } + implicit def compat(a: AnyRef): Compat = new Compat + + for ((id, value) <- values) + interpreter.beQuietDuring(interpreter.bindValue(id, value)) + } + + bind(bindNames zip bindValues) + + if (!initialCommands.isEmpty) + interpreter.interpret(initialCommands) + } + override def closeInterpreter() { + if (!cleanupCommands.isEmpty) + interpreter.interpret(cleanupCommands) + super.closeInterpreter() + } + } + loop.main(if (loader eq null) compilerSettings else interpreterSettings) + } } -object MakeSettings -{ - def apply(args: List[String], log: Logger) = - { - val command = new GenericRunnerCommand(args, message => log.error(Message(message))) - if(command.ok) - command.settings - else - throw new InterfaceCompileFailed(Array(), Array(), command.usageMsg) - } +object MakeSettings { + def apply(args: List[String], log: Logger) = + { + val command = new GenericRunnerCommand(args, message => log.error(Message(message))) + if (command.ok) + command.settings + else + throw new InterfaceCompileFailed(Array(), Array(), command.usageMsg) + } - def sync(args: Array[String], bootClasspathString: String, classpathString: String, log: Logger): Settings = - { - val compilerSettings = sync(args.toList, log) - if(!bootClasspathString.isEmpty) - compilerSettings.bootclasspath.value = bootClasspathString - compilerSettings.classpath.value = classpathString - compilerSettings - } + def sync(args: Array[String], bootClasspathString: String, classpathString: String, log: Logger): Settings = + { + val compilerSettings = sync(args.toList, log) + if (!bootClasspathString.isEmpty) + compilerSettings.bootclasspath.value = bootClasspathString + compilerSettings.classpath.value = classpathString + compilerSettings + } - def sync(options: List[String], log: Logger) = - { - val settings = apply(options, log) + def sync(options: List[String], log: Logger) = + { + val settings = apply(options, log) - // -Yrepl-sync is only in 2.9.1+ - final class Compat { - def Yreplsync = settings.BooleanSetting("-Yrepl-sync", "For compatibility only.") - } - implicit def compat(s: Settings): Compat = new Compat + // -Yrepl-sync is only in 2.9.1+ + final class Compat { + def Yreplsync = settings.BooleanSetting("-Yrepl-sync", "For compatibility only.") + } + implicit def compat(s: Settings): Compat = new Compat - settings.Yreplsync.value = true - settings - } + settings.Yreplsync.value = true + settings + } } diff --git a/compile/interface/src/main/scala/xsbt/DelegatingReporter.scala b/compile/interface/src/main/scala/xsbt/DelegatingReporter.scala index 35cc522df..732fafbb7 100644 --- a/compile/interface/src/main/scala/xsbt/DelegatingReporter.scala +++ b/compile/interface/src/main/scala/xsbt/DelegatingReporter.scala @@ -3,102 +3,95 @@ */ package xsbt - import xsbti.{F0,Logger,Maybe} - import java.io.File +import xsbti.{ F0, Logger, Maybe } +import java.io.File -private object DelegatingReporter -{ - def apply(settings: scala.tools.nsc.Settings, delegate: xsbti.Reporter): DelegatingReporter = - new DelegatingReporter(Command.getWarnFatal(settings), Command.getNoWarn(settings), delegate) +private object DelegatingReporter { + def apply(settings: scala.tools.nsc.Settings, delegate: xsbti.Reporter): DelegatingReporter = + new DelegatingReporter(Command.getWarnFatal(settings), Command.getNoWarn(settings), delegate) } // The following code is based on scala.tools.nsc.reporters.{AbstractReporter, ConsoleReporter} // Copyright 2002-2009 LAMP/EPFL // Original author: Martin Odersky -private final class DelegatingReporter(warnFatal: Boolean, noWarn: Boolean, private[this] var delegate: xsbti.Reporter) extends scala.tools.nsc.reporters.Reporter -{ - import scala.tools.nsc.util.{FakePos,NoPosition,Position} +private final class DelegatingReporter(warnFatal: Boolean, noWarn: Boolean, private[this] var delegate: xsbti.Reporter) extends scala.tools.nsc.reporters.Reporter { + import scala.tools.nsc.util.{ FakePos, NoPosition, Position } - def dropDelegate() { delegate = null } - def error(msg: String) { error(FakePos("scalac"), msg) } + def dropDelegate() { delegate = null } + def error(msg: String) { error(FakePos("scalac"), msg) } - def printSummary() = delegate.printSummary() + def printSummary() = delegate.printSummary() - override def hasErrors = delegate.hasErrors - override def hasWarnings = delegate.hasWarnings - def problems = delegate.problems - override def comment(pos: Position, msg: String) = delegate.comment(convert(pos), msg) + override def hasErrors = delegate.hasErrors + override def hasWarnings = delegate.hasWarnings + def problems = delegate.problems + override def comment(pos: Position, msg: String) = delegate.comment(convert(pos), msg) - override def reset = - { - super.reset - delegate.reset - } - protected def info0(pos: Position, msg: String, rawSeverity: Severity, force: Boolean) - { - val skip = rawSeverity == WARNING && noWarn - if (!skip) { - val severity = if(warnFatal && rawSeverity == WARNING) ERROR else rawSeverity - delegate.log(convert(pos), msg, convert(severity)) - } - } - def convert(posIn: Position): xsbti.Position = - { - val pos = - posIn match - { - case null | NoPosition => NoPosition - case x: FakePos => x - case x => - posIn.inUltimateSource(posIn.source) - } - pos match - { - case NoPosition | FakePos(_) => position(None, None, None, "", None, None, None) - case _ => makePosition(pos) - } - } - private[this] def makePosition(pos: Position): xsbti.Position = - { - val src = pos.source - val sourcePath = src.file.path - val sourceFile = src.file.file - val line = pos.line - val lineContent = pos.lineContent.stripLineEnd - val offset = getOffset(pos) - val pointer = offset - src.lineToOffset(src.offsetToLine(offset)) - val pointerSpace = ((lineContent: Seq[Char]).take(pointer).map { case '\t' => '\t'; case x => ' ' }).mkString - position(Some(sourcePath), Some(sourceFile), Some(line), lineContent, Some(offset), Some(pointer), Some(pointerSpace)) - } - private[this] def getOffset(pos: Position): Int = - { - // for compatibility with 2.8 - implicit def withPoint(p: Position): WithPoint = new WithPoint(pos) - final class WithPoint(val p: Position) { def point = p.offset.get } - pos.point - } - private[this] def position(sourcePath0: Option[String], sourceFile0: Option[File], line0: Option[Int], lineContent0: String, offset0: Option[Int], pointer0: Option[Int], pointerSpace0: Option[String]) = - new xsbti.Position - { - val line = o2mi(line0) - val lineContent = lineContent0 - val offset = o2mi(offset0) - val sourcePath = o2m(sourcePath0) - val sourceFile = o2m(sourceFile0) - val pointer = o2mi(pointer0) - val pointerSpace = o2m(pointerSpace0) - } + override def reset = + { + super.reset + delegate.reset + } + protected def info0(pos: Position, msg: String, rawSeverity: Severity, force: Boolean) { + val skip = rawSeverity == WARNING && noWarn + if (!skip) { + val severity = if (warnFatal && rawSeverity == WARNING) ERROR else rawSeverity + delegate.log(convert(pos), msg, convert(severity)) + } + } + def convert(posIn: Position): xsbti.Position = + { + val pos = + posIn match { + case null | NoPosition => NoPosition + case x: FakePos => x + case x => + posIn.inUltimateSource(posIn.source) + } + pos match { + case NoPosition | FakePos(_) => position(None, None, None, "", None, None, None) + case _ => makePosition(pos) + } + } + private[this] def makePosition(pos: Position): xsbti.Position = + { + val src = pos.source + val sourcePath = src.file.path + val sourceFile = src.file.file + val line = pos.line + val lineContent = pos.lineContent.stripLineEnd + val offset = getOffset(pos) + val pointer = offset - src.lineToOffset(src.offsetToLine(offset)) + val pointerSpace = ((lineContent: Seq[Char]).take(pointer).map { case '\t' => '\t'; case x => ' ' }).mkString + position(Some(sourcePath), Some(sourceFile), Some(line), lineContent, Some(offset), Some(pointer), Some(pointerSpace)) + } + private[this] def getOffset(pos: Position): Int = + { + // for compatibility with 2.8 + implicit def withPoint(p: Position): WithPoint = new WithPoint(pos) + final class WithPoint(val p: Position) { def point = p.offset.get } + pos.point + } + private[this] def position(sourcePath0: Option[String], sourceFile0: Option[File], line0: Option[Int], lineContent0: String, offset0: Option[Int], pointer0: Option[Int], pointerSpace0: Option[String]) = + new xsbti.Position { + val line = o2mi(line0) + val lineContent = lineContent0 + val offset = o2mi(offset0) + val sourcePath = o2m(sourcePath0) + val sourceFile = o2m(sourceFile0) + val pointer = o2mi(pointer0) + val pointerSpace = o2m(pointerSpace0) + } - import xsbti.Severity.{Info, Warn, Error} - private[this] def convert(sev: Severity): xsbti.Severity = - sev match - { - case INFO => Info - case WARNING => Warn - case ERROR => Error - } + import xsbti.Severity.{ Info, Warn, Error } + private[this] def convert(sev: Severity): xsbti.Severity = + sev match { + case INFO => Info + case WARNING => Warn + case ERROR => Error + } - import java.lang.{Integer => I} - private[this] def o2mi(opt: Option[Int]): Maybe[I] = opt match { case None => Maybe.nothing[I]; case Some(s) => Maybe.just[I](s) } - private[this] def o2m[S](opt: Option[S]): Maybe[S] = opt match { case None => Maybe.nothing[S]; case Some(s) => Maybe.just(s) } + import java.lang.{ Integer => I } + private[this] def o2mi(opt: Option[Int]): Maybe[I] = opt match { case None => Maybe.nothing[I]; case Some(s) => Maybe.just[I](s) } + private[this] def o2m[S](opt: Option[S]): Maybe[S] = opt match { case None => Maybe.nothing[S]; case Some(s) => Maybe.just(s) } } diff --git a/compile/interface/src/main/scala/xsbt/Dependency.scala b/compile/interface/src/main/scala/xsbt/Dependency.scala index 1edae4ac0..b2b4e012d 100644 --- a/compile/interface/src/main/scala/xsbt/Dependency.scala +++ b/compile/interface/src/main/scala/xsbt/Dependency.scala @@ -3,15 +3,14 @@ */ package xsbt -import scala.tools.nsc.{io, symtab, Phase} -import io.{AbstractFile, PlainFile, ZipArchive} +import scala.tools.nsc.{ io, symtab, Phase } +import io.{ AbstractFile, PlainFile, ZipArchive } import symtab.Flags import java.io.File -object Dependency -{ - def name = "xsbt-dependency" +object Dependency { + def name = "xsbt-dependency" } /** * Extracts dependency information from each compilation unit. @@ -28,106 +27,97 @@ object Dependency * where it originates from. The Symbol->Classfile mapping is implemented by * LocateClassFile that we inherit from. */ -final class Dependency(val global: CallbackGlobal) extends LocateClassFile -{ - import global._ +final class Dependency(val global: CallbackGlobal) extends LocateClassFile { + import global._ - def newPhase(prev: Phase): Phase = new DependencyPhase(prev) - private class DependencyPhase(prev: Phase) extends Phase(prev) - { - override def description = "Extracts dependency information" - def name = Dependency.name - def run - { - for(unit <- currentRun.units if !unit.isJava) - { - // build dependencies structure - val sourceFile = unit.source.file.file - if (global.callback.nameHashing) { - val dependenciesByMemberRef = extractDependenciesByMemberRef(unit) - for(on <- dependenciesByMemberRef) - processDependency(on, inherited=false) + def newPhase(prev: Phase): Phase = new DependencyPhase(prev) + private class DependencyPhase(prev: Phase) extends Phase(prev) { + override def description = "Extracts dependency information" + def name = Dependency.name + def run { + for (unit <- currentRun.units if !unit.isJava) { + // build dependencies structure + val sourceFile = unit.source.file.file + if (global.callback.nameHashing) { + val dependenciesByMemberRef = extractDependenciesByMemberRef(unit) + for (on <- dependenciesByMemberRef) + processDependency(on, inherited = false) - val dependenciesByInheritance = extractDependenciesByInheritance(unit) - for(on <- dependenciesByInheritance) - processDependency(on, inherited=true) - } else { - for(on <- unit.depends) processDependency(on, inherited=false) - for(on <- inheritedDependencies.getOrElse(sourceFile, Nil: Iterable[Symbol])) processDependency(on, inherited=true) - } - /** - * Handles dependency on given symbol by trying to figure out if represents a term - * that is coming from either source code (not necessarily compiled in this compilation - * run) or from class file and calls respective callback method. - */ - def processDependency(on: Symbol, inherited: Boolean) - { - def binaryDependency(file: File, className: String) = callback.binaryDependency(file, className, sourceFile, inherited) - val onSource = on.sourceFile - if(onSource == null) - { - classFile(on) match - { - case Some((f,className,inOutDir)) => - if(inOutDir && on.isJavaDefined) registerTopLevelSym(on) - f match - { - case ze: ZipArchive#Entry => for(zip <- ze.underlyingSource; zipFile <- Option(zip.file) ) binaryDependency(zipFile, className) - case pf: PlainFile => binaryDependency(pf.file, className) - case _ => () - } - case None => () - } - } - else if (onSource.file != sourceFile) - callback.sourceDependency(onSource.file, sourceFile, inherited) - } - } - } - } + val dependenciesByInheritance = extractDependenciesByInheritance(unit) + for (on <- dependenciesByInheritance) + processDependency(on, inherited = true) + } else { + for (on <- unit.depends) processDependency(on, inherited = false) + for (on <- inheritedDependencies.getOrElse(sourceFile, Nil: Iterable[Symbol])) processDependency(on, inherited = true) + } + /** + * Handles dependency on given symbol by trying to figure out if represents a term + * that is coming from either source code (not necessarily compiled in this compilation + * run) or from class file and calls respective callback method. + */ + def processDependency(on: Symbol, inherited: Boolean) { + def binaryDependency(file: File, className: String) = callback.binaryDependency(file, className, sourceFile, inherited) + val onSource = on.sourceFile + if (onSource == null) { + classFile(on) match { + case Some((f, className, inOutDir)) => + if (inOutDir && on.isJavaDefined) registerTopLevelSym(on) + f match { + case ze: ZipArchive#Entry => for (zip <- ze.underlyingSource; zipFile <- Option(zip.file)) binaryDependency(zipFile, className) + case pf: PlainFile => binaryDependency(pf.file, className) + case _ => () + } + case None => () + } + } else if (onSource.file != sourceFile) + callback.sourceDependency(onSource.file, sourceFile, inherited) + } + } + } + } - /** - * Traverses given type and collects result of applying a partial function `pf`. - * - * NOTE: This class exists in Scala 2.10 as CollectTypeCollector but does not in earlier - * versions (like 2.9) of Scala compiler that incremental cmpiler supports so we had to - * reimplement that class here. - */ - private final class CollectTypeTraverser[T](pf: PartialFunction[Type, T]) extends TypeTraverser { - var collected: List[T] = Nil - def traverse(tpe: Type): Unit = { - if (pf.isDefinedAt(tpe)) - collected = pf(tpe) :: collected - mapOver(tpe) - } - } + /** + * Traverses given type and collects result of applying a partial function `pf`. + * + * NOTE: This class exists in Scala 2.10 as CollectTypeCollector but does not in earlier + * versions (like 2.9) of Scala compiler that incremental cmpiler supports so we had to + * reimplement that class here. + */ + private final class CollectTypeTraverser[T](pf: PartialFunction[Type, T]) extends TypeTraverser { + var collected: List[T] = Nil + def traverse(tpe: Type): Unit = { + if (pf.isDefinedAt(tpe)) + collected = pf(tpe) :: collected + mapOver(tpe) + } + } - private abstract class ExtractDependenciesTraverser extends Traverser { - protected val depBuf = collection.mutable.ArrayBuffer.empty[Symbol] - protected def addDependency(dep: Symbol): Unit = depBuf += dep - def dependencies: collection.immutable.Set[Symbol] = { - // convert to immutable set and remove NoSymbol if we have one - depBuf.toSet - NoSymbol - } - } + private abstract class ExtractDependenciesTraverser extends Traverser { + protected val depBuf = collection.mutable.ArrayBuffer.empty[Symbol] + protected def addDependency(dep: Symbol): Unit = depBuf += dep + def dependencies: collection.immutable.Set[Symbol] = { + // convert to immutable set and remove NoSymbol if we have one + depBuf.toSet - NoSymbol + } + } - private class ExtractDependenciesByMemberRefTraverser extends ExtractDependenciesTraverser { - override def traverse(tree: Tree): Unit = { - tree match { - case Import(expr, selectors) => - selectors.foreach { - case ImportSelector(nme.WILDCARD, _, null, _) => - // in case of wildcard import we do not rely on any particular name being defined - // on `expr`; all symbols that are being used will get caught through selections - case ImportSelector(name: Name, _, _, _) => - def lookupImported(name: Name) = expr.symbol.info.member(name) - // importing a name means importing both a term and a type (if they exist) - addDependency(lookupImported(name.toTermName)) - addDependency(lookupImported(name.toTypeName)) - } - case select: Select => - addDependency(select.symbol) - /* + private class ExtractDependenciesByMemberRefTraverser extends ExtractDependenciesTraverser { + override def traverse(tree: Tree): Unit = { + tree match { + case Import(expr, selectors) => + selectors.foreach { + case ImportSelector(nme.WILDCARD, _, null, _) => + // in case of wildcard import we do not rely on any particular name being defined + // on `expr`; all symbols that are being used will get caught through selections + case ImportSelector(name: Name, _, _, _) => + def lookupImported(name: Name) = expr.symbol.info.member(name) + // importing a name means importing both a term and a type (if they exist) + addDependency(lookupImported(name.toTermName)) + addDependency(lookupImported(name.toTypeName)) + } + case select: Select => + addDependency(select.symbol) + /* * Idents are used in number of situations: * - to refer to local variable * - to refer to a top-level package (other packages are nested selections) @@ -135,70 +125,70 @@ final class Dependency(val global: CallbackGlobal) extends LocateClassFile * this looks fishy, see this thread: * https://groups.google.com/d/topic/scala-internals/Ms9WUAtokLo/discussion */ - case ident: Ident => - addDependency(ident.symbol) - case typeTree: TypeTree => - val typeSymbolCollector = new CollectTypeTraverser({ - case tpe if !tpe.typeSymbol.isPackage => tpe.typeSymbol - }) - typeSymbolCollector.traverse(typeTree.tpe) - val deps = typeSymbolCollector.collected.toSet - deps.foreach(addDependency) - case Template(parents, self, body) => - traverseTrees(body) - /* + case ident: Ident => + addDependency(ident.symbol) + case typeTree: TypeTree => + val typeSymbolCollector = new CollectTypeTraverser({ + case tpe if !tpe.typeSymbol.isPackage => tpe.typeSymbol + }) + typeSymbolCollector.traverse(typeTree.tpe) + val deps = typeSymbolCollector.collected.toSet + deps.foreach(addDependency) + case Template(parents, self, body) => + traverseTrees(body) + /* * Some macros appear to contain themselves as original tree * In this case, we don't need to inspect the original tree because * we already inspected its expansion, which is equal. * See https://issues.scala-lang.org/browse/SI-8486 */ - case MacroExpansionOf(original) if original != tree => - this.traverse(original) - case other => () - } - super.traverse(tree) - } - } + case MacroExpansionOf(original) if original != tree => + this.traverse(original) + case other => () + } + super.traverse(tree) + } + } - private def extractDependenciesByMemberRef(unit: CompilationUnit): collection.immutable.Set[Symbol] = { - val traverser = new ExtractDependenciesByMemberRefTraverser - traverser.traverse(unit.body) - val dependencies = traverser.dependencies - dependencies.map(enclosingTopLevelClass) - } + private def extractDependenciesByMemberRef(unit: CompilationUnit): collection.immutable.Set[Symbol] = { + val traverser = new ExtractDependenciesByMemberRefTraverser + traverser.traverse(unit.body) + val dependencies = traverser.dependencies + dependencies.map(enclosingTopLevelClass) + } - /** Copied straight from Scala 2.10 as it does not exist in Scala 2.9 compiler */ - private final def debuglog(msg: => String) { - if (settings.debug.value) - log(msg) - } + /** Copied straight from Scala 2.10 as it does not exist in Scala 2.9 compiler */ + private final def debuglog(msg: => String) { + if (settings.debug.value) + log(msg) + } - private final class ExtractDependenciesByInheritanceTraverser extends ExtractDependenciesTraverser { - override def traverse(tree: Tree): Unit = tree match { - case Template(parents, self, body) => - // we are using typeSymbol and not typeSymbolDirect because we want - // type aliases to be expanded - val parentTypeSymbols = parents.map(parent => parent.tpe.typeSymbol).toSet - debuglog("Parent type symbols for " + tree.pos + ": " + parentTypeSymbols.map(_.fullName)) - parentTypeSymbols.foreach(addDependency) - traverseTrees(body) - case tree => super.traverse(tree) - } - } + private final class ExtractDependenciesByInheritanceTraverser extends ExtractDependenciesTraverser { + override def traverse(tree: Tree): Unit = tree match { + case Template(parents, self, body) => + // we are using typeSymbol and not typeSymbolDirect because we want + // type aliases to be expanded + val parentTypeSymbols = parents.map(parent => parent.tpe.typeSymbol).toSet + debuglog("Parent type symbols for " + tree.pos + ": " + parentTypeSymbols.map(_.fullName)) + parentTypeSymbols.foreach(addDependency) + traverseTrees(body) + case tree => super.traverse(tree) + } + } - private def extractDependenciesByInheritance(unit: CompilationUnit): collection.immutable.Set[Symbol] = { - val traverser = new ExtractDependenciesByInheritanceTraverser - traverser.traverse(unit.body) - val dependencies = traverser.dependencies - dependencies.map(enclosingTopLevelClass) - } + private def extractDependenciesByInheritance(unit: CompilationUnit): collection.immutable.Set[Symbol] = { + val traverser = new ExtractDependenciesByInheritanceTraverser + traverser.traverse(unit.body) + val dependencies = traverser.dependencies + dependencies.map(enclosingTopLevelClass) + } - /** - * We capture enclosing classes only because that's what CompilationUnit.depends does and we don't want - * to deviate from old behaviour too much for now. - */ - private def enclosingTopLevelClass(sym: Symbol): Symbol = - // for Scala 2.8 and 2.9 this method is provided through SymbolCompat - sym.enclosingTopLevelClass + /** + * We capture enclosing classes only because that's what CompilationUnit.depends does and we don't want + * to deviate from old behaviour too much for now. + */ + private def enclosingTopLevelClass(sym: Symbol): Symbol = + // for Scala 2.8 and 2.9 this method is provided through SymbolCompat + sym.enclosingTopLevelClass } diff --git a/compile/interface/src/main/scala/xsbt/ExtractAPI.scala b/compile/interface/src/main/scala/xsbt/ExtractAPI.scala index acdc89e03..2b205398e 100644 --- a/compile/interface/src/main/scala/xsbt/ExtractAPI.scala +++ b/compile/interface/src/main/scala/xsbt/ExtractAPI.scala @@ -1,13 +1,13 @@ package xsbt import java.io.File -import java.util.{Arrays,Comparator} -import scala.tools.nsc.{io, plugins, symtab, Global, Phase} -import io.{AbstractFile, PlainFile, ZipArchive} -import plugins.{Plugin, PluginComponent} +import java.util.{ Arrays, Comparator } +import scala.tools.nsc.{ io, plugins, symtab, Global, Phase } +import io.{ AbstractFile, PlainFile, ZipArchive } +import plugins.{ Plugin, PluginComponent } import symtab.Flags -import scala.collection.mutable.{HashMap, HashSet, ListBuffer} -import xsbti.api.{ClassLike, DefinitionType, PathComponent, SimpleType} +import scala.collection.mutable.{ HashMap, HashSet, ListBuffer } +import xsbti.api.{ ClassLike, DefinitionType, PathComponent, SimpleType } /** * Extracts API representation out of Symbols and Types. @@ -20,365 +20,356 @@ import xsbti.api.{ClassLike, DefinitionType, PathComponent, SimpleType} * exposed to a client that can pass them to an instance of CallbackGlobal it holds. */ class ExtractAPI[GlobalType <: CallbackGlobal](val global: GlobalType, - // Tracks the source file associated with the CompilationUnit currently being processed by the API phase. - // This is used when recording inheritance dependencies. - sourceFile: File) extends Compat { + // Tracks the source file associated with the CompilationUnit currently being processed by the API phase. + // This is used when recording inheritance dependencies. + sourceFile: File) extends Compat { - import global._ + import global._ - private def error(msg: String) = throw new RuntimeException(msg) + private def error(msg: String) = throw new RuntimeException(msg) - // this cache reduces duplicate work both here and when persisting - // caches on other structures had minimal effect on time and cache size - // (tried: Definition, Modifier, Path, Id, String) - private[this] val typeCache = new HashMap[(Symbol,Type), xsbti.api.Type] - // these caches are necessary for correctness - private[this] val structureCache = new HashMap[Symbol, xsbti.api.Structure] - private[this] val classLikeCache = new HashMap[(Symbol,Symbol), xsbti.api.ClassLike] - private[this] val pending = new HashSet[xsbti.api.Lazy[_]] + // this cache reduces duplicate work both here and when persisting + // caches on other structures had minimal effect on time and cache size + // (tried: Definition, Modifier, Path, Id, String) + private[this] val typeCache = new HashMap[(Symbol, Type), xsbti.api.Type] + // these caches are necessary for correctness + private[this] val structureCache = new HashMap[Symbol, xsbti.api.Structure] + private[this] val classLikeCache = new HashMap[(Symbol, Symbol), xsbti.api.ClassLike] + private[this] val pending = new HashSet[xsbti.api.Lazy[_]] - private[this] val emptyStringArray = new Array[String](0) + private[this] val emptyStringArray = new Array[String](0) - /** - * Implements a work-around for https://github.com/sbt/sbt/issues/823 - * - * The strategy is to rename all type variables bound by existential type to stable - * names by assigning to each type variable a De Bruijn-like index. As a result, each - * type variable gets name of this shape: - * - * "existential_${nestingLevel}_${i}" - * - * where `nestingLevel` indicates nesting level of existential types and `i` variable - * indicates position of type variable in given existential type. - * - * For example, let's assume we have the following classes declared: - * - * class A[T]; class B[T,U] - * - * and we have type A[_] that is expanded by Scala compiler into - * - * A[_$1] forSome { type _$1 } - * - * After applying our renaming strategy we get - * - * A[existential_0_0] forSome { type existential_0_0 } - * - * Let's consider a bit more complicated example which shows how our strategy deals with - * nested existential types: - * - * A[_ <: B[_, _]] - * - * which gets expanded into: - * - * A[_$1] forSome { - * type _$1 <: B[_$2, _$3] forSome { type _$2; type _$3 } - * } - * - * After applying our renaming strategy we get - * - * A[existential_0_0] forSome { - * type existential_0_0 <: B[existential_1_0, existential_1_1] forSome { - * type existential_1_0; type existential_1_1 - * } - * } - * - * Note how the first index (nesting level) is bumped for both existential types. - * - * This way, all names of existential type variables depend only on the structure of - * existential types and are kept stable. - * - * Both examples presented above used placeholder syntax for existential types but our - * strategy is applied uniformly to all existential types no matter if they are written - * using placeholder syntax or explicitly. - */ - private[this] object existentialRenamings { - private var nestingLevel: Int = 0 - import scala.collection.mutable.Map - private var renameTo: Map[Symbol, String] = Map.empty + /** + * Implements a work-around for https://github.com/sbt/sbt/issues/823 + * + * The strategy is to rename all type variables bound by existential type to stable + * names by assigning to each type variable a De Bruijn-like index. As a result, each + * type variable gets name of this shape: + * + * "existential_${nestingLevel}_${i}" + * + * where `nestingLevel` indicates nesting level of existential types and `i` variable + * indicates position of type variable in given existential type. + * + * For example, let's assume we have the following classes declared: + * + * class A[T]; class B[T,U] + * + * and we have type A[_] that is expanded by Scala compiler into + * + * A[_$1] forSome { type _$1 } + * + * After applying our renaming strategy we get + * + * A[existential_0_0] forSome { type existential_0_0 } + * + * Let's consider a bit more complicated example which shows how our strategy deals with + * nested existential types: + * + * A[_ <: B[_, _]] + * + * which gets expanded into: + * + * A[_$1] forSome { + * type _$1 <: B[_$2, _$3] forSome { type _$2; type _$3 } + * } + * + * After applying our renaming strategy we get + * + * A[existential_0_0] forSome { + * type existential_0_0 <: B[existential_1_0, existential_1_1] forSome { + * type existential_1_0; type existential_1_1 + * } + * } + * + * Note how the first index (nesting level) is bumped for both existential types. + * + * This way, all names of existential type variables depend only on the structure of + * existential types and are kept stable. + * + * Both examples presented above used placeholder syntax for existential types but our + * strategy is applied uniformly to all existential types no matter if they are written + * using placeholder syntax or explicitly. + */ + private[this] object existentialRenamings { + private var nestingLevel: Int = 0 + import scala.collection.mutable.Map + private var renameTo: Map[Symbol, String] = Map.empty - def leaveExistentialTypeVariables(typeVariables: Seq[Symbol]): Unit = { - nestingLevel -= 1 - assert(nestingLevel >= 0) - typeVariables.foreach(renameTo.remove) - } - def enterExistentialTypeVariables(typeVariables: Seq[Symbol]): Unit = { - nestingLevel += 1 - typeVariables.zipWithIndex foreach { case (tv, i) => - val newName = "existential_" + nestingLevel + "_" + i - renameTo(tv) = newName - } - } - def renaming(symbol: Symbol): Option[String] = renameTo.get(symbol) - } + def leaveExistentialTypeVariables(typeVariables: Seq[Symbol]): Unit = { + nestingLevel -= 1 + assert(nestingLevel >= 0) + typeVariables.foreach(renameTo.remove) + } + def enterExistentialTypeVariables(typeVariables: Seq[Symbol]): Unit = { + nestingLevel += 1 + typeVariables.zipWithIndex foreach { + case (tv, i) => + val newName = "existential_" + nestingLevel + "_" + i + renameTo(tv) = newName + } + } + def renaming(symbol: Symbol): Option[String] = renameTo.get(symbol) + } - // call back to the xsbti.SafeLazy class in main sbt code to construct a SafeLazy instance - // we pass a thunk, whose class is loaded by the interface class loader (this class's loader) - // SafeLazy ensures that once the value is forced, the thunk is nulled out and so - // references to the thunk's classes are not retained. Specifically, it allows the interface classes - // (those in this subproject) to be garbage collected after compilation. - private[this] val safeLazy = Class.forName("xsbti.SafeLazy").getMethod("apply", classOf[xsbti.F0[_]]) - private def lzy[S <: AnyRef](s: => S): xsbti.api.Lazy[S] = - { - val z = safeLazy.invoke(null, Message(s)).asInstanceOf[xsbti.api.Lazy[S]] - pending += z - z - } + // call back to the xsbti.SafeLazy class in main sbt code to construct a SafeLazy instance + // we pass a thunk, whose class is loaded by the interface class loader (this class's loader) + // SafeLazy ensures that once the value is forced, the thunk is nulled out and so + // references to the thunk's classes are not retained. Specifically, it allows the interface classes + // (those in this subproject) to be garbage collected after compilation. + private[this] val safeLazy = Class.forName("xsbti.SafeLazy").getMethod("apply", classOf[xsbti.F0[_]]) + private def lzy[S <: AnyRef](s: => S): xsbti.api.Lazy[S] = + { + val z = safeLazy.invoke(null, Message(s)).asInstanceOf[xsbti.api.Lazy[S]] + pending += z + z + } - /** - * Force all lazy structures. This is necessary so that we see the symbols/types at this phase and - * so that we don't hold on to compiler objects and classes - */ - def forceStructures(): Unit = - if(pending.isEmpty) - structureCache.clear() - else - { - val toProcess = pending.toList - pending.clear() - toProcess foreach { _.get() } - forceStructures() - } + /** + * Force all lazy structures. This is necessary so that we see the symbols/types at this phase and + * so that we don't hold on to compiler objects and classes + */ + def forceStructures(): Unit = + if (pending.isEmpty) + structureCache.clear() + else { + val toProcess = pending.toList + pending.clear() + toProcess foreach { _.get() } + forceStructures() + } - private def thisPath(sym: Symbol) = path(pathComponents(sym, Constants.thisPath :: Nil)) - private def path(components: List[PathComponent]) = new xsbti.api.Path(components.toArray[PathComponent]) - private def pathComponents(sym: Symbol, postfix: List[PathComponent]): List[PathComponent] = - { - if(sym == NoSymbol || sym.isRoot || sym.isEmptyPackageClass || sym.isRootPackage) postfix - else pathComponents(sym.owner, new xsbti.api.Id(simpleName(sym)) :: postfix) - } - private def simpleType(in: Symbol, t: Type): SimpleType = - processType(in, t) match - { - case s: SimpleType => s - case x => warning("Not a simple type:\n\tType: " + t + " (" + t.getClass + ")\n\tTransformed: " + x.getClass); Constants.emptyType - } - private def types(in: Symbol, t: List[Type]): Array[xsbti.api.Type] = t.toArray[Type].map(processType(in, _)) - private def projectionType(in: Symbol, pre: Type, sym: Symbol) = - { - if(pre == NoPrefix) - { - if(sym.isLocalClass || sym.isRoot || sym.isRootPackage) Constants.emptyType - else if(sym.isTypeParameterOrSkolem || sym.isExistentiallyBound) reference(sym) - else { - // this appears to come from an existential type in an inherited member- not sure why isExistential is false here - /*println("Warning: Unknown prefixless type: " + sym + " in " + sym.owner + " in " + sym.enclClass) + private def thisPath(sym: Symbol) = path(pathComponents(sym, Constants.thisPath :: Nil)) + private def path(components: List[PathComponent]) = new xsbti.api.Path(components.toArray[PathComponent]) + private def pathComponents(sym: Symbol, postfix: List[PathComponent]): List[PathComponent] = + { + if (sym == NoSymbol || sym.isRoot || sym.isEmptyPackageClass || sym.isRootPackage) postfix + else pathComponents(sym.owner, new xsbti.api.Id(simpleName(sym)) :: postfix) + } + private def simpleType(in: Symbol, t: Type): SimpleType = + processType(in, t) match { + case s: SimpleType => s + case x => warning("Not a simple type:\n\tType: " + t + " (" + t.getClass + ")\n\tTransformed: " + x.getClass); Constants.emptyType + } + private def types(in: Symbol, t: List[Type]): Array[xsbti.api.Type] = t.toArray[Type].map(processType(in, _)) + private def projectionType(in: Symbol, pre: Type, sym: Symbol) = + { + if (pre == NoPrefix) { + if (sym.isLocalClass || sym.isRoot || sym.isRootPackage) Constants.emptyType + else if (sym.isTypeParameterOrSkolem || sym.isExistentiallyBound) reference(sym) + else { + // this appears to come from an existential type in an inherited member- not sure why isExistential is false here + /*println("Warning: Unknown prefixless type: " + sym + " in " + sym.owner + " in " + sym.enclClass) println("\tFlags: " + sym.flags + ", istype: " + sym.isType + ", absT: " + sym.isAbstractType + ", alias: " + sym.isAliasType + ", nonclass: " + isNonClassType(sym))*/ - reference(sym) - } - } - else if(sym.isRoot || sym.isRootPackage) Constants.emptyType - else new xsbti.api.Projection(simpleType(in, pre), simpleName(sym)) - } - private def reference(sym: Symbol): xsbti.api.ParameterRef = new xsbti.api.ParameterRef(tparamID(sym)) + reference(sym) + } + } else if (sym.isRoot || sym.isRootPackage) Constants.emptyType + else new xsbti.api.Projection(simpleType(in, pre), simpleName(sym)) + } + private def reference(sym: Symbol): xsbti.api.ParameterRef = new xsbti.api.ParameterRef(tparamID(sym)) - private def annotations(in: Symbol, as: List[AnnotationInfo]): Array[xsbti.api.Annotation] = as.toArray[AnnotationInfo].map(annotation(in,_)) - private def annotation(in: Symbol, a: AnnotationInfo) = - new xsbti.api.Annotation(processType(in, a.atp), - if(a.assocs.isEmpty) Array(new xsbti.api.AnnotationArgument("", a.args.mkString("(", ",", ")"))) // what else to do with a Tree? - else a.assocs.map { case (name, value) => new xsbti.api.AnnotationArgument(name.toString, value.toString) }.toArray[xsbti.api.AnnotationArgument] - ) - private def annotated(in: Symbol, as: List[AnnotationInfo], tpe: Type) = new xsbti.api.Annotated(processType(in, tpe), annotations(in, as)) + private def annotations(in: Symbol, as: List[AnnotationInfo]): Array[xsbti.api.Annotation] = as.toArray[AnnotationInfo].map(annotation(in, _)) + private def annotation(in: Symbol, a: AnnotationInfo) = + new xsbti.api.Annotation(processType(in, a.atp), + if (a.assocs.isEmpty) Array(new xsbti.api.AnnotationArgument("", a.args.mkString("(", ",", ")"))) // what else to do with a Tree? + else a.assocs.map { case (name, value) => new xsbti.api.AnnotationArgument(name.toString, value.toString) }.toArray[xsbti.api.AnnotationArgument] + ) + private def annotated(in: Symbol, as: List[AnnotationInfo], tpe: Type) = new xsbti.api.Annotated(processType(in, tpe), annotations(in, as)) - private def viewer(s: Symbol) = (if(s.isModule) s.moduleClass else s).thisType - private def printMember(label: String, in: Symbol, t: Type) = println(label + " in " + in + " : " + t + " (debug: " + debugString(t) + " )") - private def defDef(in: Symbol, s: Symbol) = - { - def build(t: Type, typeParams: Array[xsbti.api.TypeParameter], valueParameters: List[xsbti.api.ParameterList]): xsbti.api.Def = - { - def parameterList(syms: List[Symbol]): xsbti.api.ParameterList = - { - val isImplicitList = syms match { case head :: _ => isImplicit(head); case _ => false } - new xsbti.api.ParameterList(syms.map(parameterS).toArray, isImplicitList) - } - t match - { - case PolyType(typeParams0, base) => - assert(typeParams.isEmpty) - assert(valueParameters.isEmpty) - build(base, typeParameters(in, typeParams0), Nil) - case MethodType(params, resultType) => - build(resultType, typeParams, parameterList(params) :: valueParameters) - case Nullary(resultType) => // 2.9 and later - build(resultType, typeParams, valueParameters) - case returnType => - val t2 = processType(in, dropConst(returnType)) - new xsbti.api.Def(valueParameters.reverse.toArray, t2, typeParams, simpleName(s), getAccess(s), getModifiers(s), annotations(in,s)) - } - } - def parameterS(s: Symbol): xsbti.api.MethodParameter = - makeParameter(simpleName(s), s.info, s.info.typeSymbol, s) + private def viewer(s: Symbol) = (if (s.isModule) s.moduleClass else s).thisType + private def printMember(label: String, in: Symbol, t: Type) = println(label + " in " + in + " : " + t + " (debug: " + debugString(t) + " )") + private def defDef(in: Symbol, s: Symbol) = + { + def build(t: Type, typeParams: Array[xsbti.api.TypeParameter], valueParameters: List[xsbti.api.ParameterList]): xsbti.api.Def = + { + def parameterList(syms: List[Symbol]): xsbti.api.ParameterList = + { + val isImplicitList = syms match { case head :: _ => isImplicit(head); case _ => false } + new xsbti.api.ParameterList(syms.map(parameterS).toArray, isImplicitList) + } + t match { + case PolyType(typeParams0, base) => + assert(typeParams.isEmpty) + assert(valueParameters.isEmpty) + build(base, typeParameters(in, typeParams0), Nil) + case MethodType(params, resultType) => + build(resultType, typeParams, parameterList(params) :: valueParameters) + case Nullary(resultType) => // 2.9 and later + build(resultType, typeParams, valueParameters) + case returnType => + val t2 = processType(in, dropConst(returnType)) + new xsbti.api.Def(valueParameters.reverse.toArray, t2, typeParams, simpleName(s), getAccess(s), getModifiers(s), annotations(in, s)) + } + } + def parameterS(s: Symbol): xsbti.api.MethodParameter = + makeParameter(simpleName(s), s.info, s.info.typeSymbol, s) - // paramSym is only for 2.8 and is to determine if the parameter has a default - def makeParameter(name: String, tpe: Type, ts: Symbol, paramSym: Symbol): xsbti.api.MethodParameter = - { - import xsbti.api.ParameterModifier._ - val (t, special) = - if(ts == definitions.RepeatedParamClass)// || s == definitions.JavaRepeatedParamClass) - (tpe.typeArgs(0), Repeated) - else if(ts == definitions.ByNameParamClass) - (tpe.typeArgs(0), ByName) - else - (tpe, Plain) - new xsbti.api.MethodParameter(name, processType(in, t), hasDefault(paramSym), special) - } - val t = viewer(in).memberInfo(s) - build(t, Array(), Nil) - } - private def hasDefault(s: Symbol) = s != NoSymbol && s.hasFlag(Flags.DEFAULTPARAM) - private def fieldDef[T](in: Symbol, s: Symbol, keepConst: Boolean, create: (xsbti.api.Type, String, xsbti.api.Access, xsbti.api.Modifiers, Array[xsbti.api.Annotation]) => T): T = - { - val t = dropNullary(viewer(in).memberType(s)) - val t2 = if(keepConst) t else dropConst(t) - create(processType(in, t2), simpleName(s), getAccess(s), getModifiers(s), annotations(in, s)) - } - private def dropConst(t: Type): Type = t match { - case ConstantType(constant) => constant.tpe - case _ => t - } - private def dropNullary(t: Type): Type = t match { - case Nullary(un) => un - case _ => t - } + // paramSym is only for 2.8 and is to determine if the parameter has a default + def makeParameter(name: String, tpe: Type, ts: Symbol, paramSym: Symbol): xsbti.api.MethodParameter = + { + import xsbti.api.ParameterModifier._ + val (t, special) = + if (ts == definitions.RepeatedParamClass) // || s == definitions.JavaRepeatedParamClass) + (tpe.typeArgs(0), Repeated) + else if (ts == definitions.ByNameParamClass) + (tpe.typeArgs(0), ByName) + else + (tpe, Plain) + new xsbti.api.MethodParameter(name, processType(in, t), hasDefault(paramSym), special) + } + val t = viewer(in).memberInfo(s) + build(t, Array(), Nil) + } + private def hasDefault(s: Symbol) = s != NoSymbol && s.hasFlag(Flags.DEFAULTPARAM) + private def fieldDef[T](in: Symbol, s: Symbol, keepConst: Boolean, create: (xsbti.api.Type, String, xsbti.api.Access, xsbti.api.Modifiers, Array[xsbti.api.Annotation]) => T): T = + { + val t = dropNullary(viewer(in).memberType(s)) + val t2 = if (keepConst) t else dropConst(t) + create(processType(in, t2), simpleName(s), getAccess(s), getModifiers(s), annotations(in, s)) + } + private def dropConst(t: Type): Type = t match { + case ConstantType(constant) => constant.tpe + case _ => t + } + private def dropNullary(t: Type): Type = t match { + case Nullary(un) => un + case _ => t + } - private def typeDef(in: Symbol, s: Symbol): xsbti.api.TypeMember = - { - val (typeParams, tpe) = - viewer(in).memberInfo(s) match - { - case PolyType(typeParams0, base) => (typeParameters(in, typeParams0), base) - case t => (Array[xsbti.api.TypeParameter](), t) - } - val name = simpleName(s) - val access = getAccess(s) - val modifiers = getModifiers(s) - val as = annotations(in, s) + private def typeDef(in: Symbol, s: Symbol): xsbti.api.TypeMember = + { + val (typeParams, tpe) = + viewer(in).memberInfo(s) match { + case PolyType(typeParams0, base) => (typeParameters(in, typeParams0), base) + case t => (Array[xsbti.api.TypeParameter](), t) + } + val name = simpleName(s) + val access = getAccess(s) + val modifiers = getModifiers(s) + val as = annotations(in, s) - if(s.isAliasType) - new xsbti.api.TypeAlias(processType(in, tpe), typeParams, name, access, modifiers, as) - else if(s.isAbstractType) - { - val bounds = tpe.bounds - new xsbti.api.TypeDeclaration(processType(in, bounds.lo), processType(in, bounds.hi), typeParams, name, access, modifiers, as) - } - else - error("Unknown type member" + s) - } + if (s.isAliasType) + new xsbti.api.TypeAlias(processType(in, tpe), typeParams, name, access, modifiers, as) + else if (s.isAbstractType) { + val bounds = tpe.bounds + new xsbti.api.TypeDeclaration(processType(in, bounds.lo), processType(in, bounds.hi), typeParams, name, access, modifiers, as) + } else + error("Unknown type member" + s) + } - private def structure(in: Symbol, s: Symbol): xsbti.api.Structure = structure(viewer(in).memberInfo(s), s, true) - private def structure(info: Type): xsbti.api.Structure = structure(info, info.typeSymbol, false) - private def structure(info: Type, s: Symbol, inherit: Boolean): xsbti.api.Structure = - structureCache.getOrElseUpdate( s, mkStructure(info, s, inherit)) + private def structure(in: Symbol, s: Symbol): xsbti.api.Structure = structure(viewer(in).memberInfo(s), s, true) + private def structure(info: Type): xsbti.api.Structure = structure(info, info.typeSymbol, false) + private def structure(info: Type, s: Symbol, inherit: Boolean): xsbti.api.Structure = + structureCache.getOrElseUpdate(s, mkStructure(info, s, inherit)) - private def removeConstructors(ds: List[Symbol]): List[Symbol] = ds filter { !_.isConstructor} + private def removeConstructors(ds: List[Symbol]): List[Symbol] = ds filter { !_.isConstructor } - private def mkStructure(info: Type, s: Symbol, inherit: Boolean): xsbti.api.Structure = - { - val (declared, inherited) = info.members.reverse.partition(_.owner == s) - val baseTypes = info.baseClasses.tail.map(info.baseType) - val ds = if(s.isModuleClass) removeConstructors(declared) else declared - val is = if(inherit) removeConstructors(inherited) else Nil - mkStructure(s, baseTypes, ds, is) - } + private def mkStructure(info: Type, s: Symbol, inherit: Boolean): xsbti.api.Structure = + { + val (declared, inherited) = info.members.reverse.partition(_.owner == s) + val baseTypes = info.baseClasses.tail.map(info.baseType) + val ds = if (s.isModuleClass) removeConstructors(declared) else declared + val is = if (inherit) removeConstructors(inherited) else Nil + mkStructure(s, baseTypes, ds, is) + } - // If true, this template is publicly visible and should be processed as a public inheritance dependency. - // Local classes and local refinements will never be traversed by the api phase, so we don't need to check for that. - private[this] def isPublicStructure(s: Symbol): Boolean = - s.isStructuralRefinement || - // do not consider templates that are private[this] or private - !(s.isPrivate && (s.privateWithin == NoSymbol || s.isLocal)) + // If true, this template is publicly visible and should be processed as a public inheritance dependency. + // Local classes and local refinements will never be traversed by the api phase, so we don't need to check for that. + private[this] def isPublicStructure(s: Symbol): Boolean = + s.isStructuralRefinement || + // do not consider templates that are private[this] or private + !(s.isPrivate && (s.privateWithin == NoSymbol || s.isLocal)) - private def mkStructure(s: Symbol, bases: List[Type], declared: List[Symbol], inherited: List[Symbol]): xsbti.api.Structure = { - if(isPublicStructure(s)) - addInheritedDependencies(sourceFile, bases.map(_.dealias.typeSymbol)) - new xsbti.api.Structure(lzy(types(s, bases)), lzy(processDefinitions(s, declared)), lzy(processDefinitions(s, inherited))) - } - private def processDefinitions(in: Symbol, defs: List[Symbol]): Array[xsbti.api.Definition] = - sort(defs.toArray).flatMap( (d: Symbol) => definition(in, d)) - private[this] def sort(defs: Array[Symbol]): Array[Symbol] = { - Arrays.sort(defs, sortClasses) - defs - } + private def mkStructure(s: Symbol, bases: List[Type], declared: List[Symbol], inherited: List[Symbol]): xsbti.api.Structure = { + if (isPublicStructure(s)) + addInheritedDependencies(sourceFile, bases.map(_.dealias.typeSymbol)) + new xsbti.api.Structure(lzy(types(s, bases)), lzy(processDefinitions(s, declared)), lzy(processDefinitions(s, inherited))) + } + private def processDefinitions(in: Symbol, defs: List[Symbol]): Array[xsbti.api.Definition] = + sort(defs.toArray).flatMap((d: Symbol) => definition(in, d)) + private[this] def sort(defs: Array[Symbol]): Array[Symbol] = { + Arrays.sort(defs, sortClasses) + defs + } - private def definition(in: Symbol, sym: Symbol): Option[xsbti.api.Definition] = - { - def mkVar = Some(fieldDef(in, sym, false, new xsbti.api.Var(_,_,_,_,_))) - def mkVal = Some(fieldDef(in, sym, true, new xsbti.api.Val(_,_,_,_,_))) - if(isClass(sym)) - if(ignoreClass(sym)) None else Some(classLike(in, sym)) - else if(sym.isNonClassType) - Some(typeDef(in, sym)) - else if(sym.isVariable) - if(isSourceField(sym)) mkVar else None - else if(sym.isStable) - if(isSourceField(sym)) mkVal else None - else if(sym.isSourceMethod && !sym.isSetter) - if(sym.isGetter) mkVar else Some(defDef(in, sym)) - else - None - } - private def ignoreClass(sym: Symbol): Boolean = - sym.isLocalClass || sym.isAnonymousClass || sym.fullName.endsWith(LocalChild.toString) + private def definition(in: Symbol, sym: Symbol): Option[xsbti.api.Definition] = + { + def mkVar = Some(fieldDef(in, sym, false, new xsbti.api.Var(_, _, _, _, _))) + def mkVal = Some(fieldDef(in, sym, true, new xsbti.api.Val(_, _, _, _, _))) + if (isClass(sym)) + if (ignoreClass(sym)) None else Some(classLike(in, sym)) + else if (sym.isNonClassType) + Some(typeDef(in, sym)) + else if (sym.isVariable) + if (isSourceField(sym)) mkVar else None + else if (sym.isStable) + if (isSourceField(sym)) mkVal else None + else if (sym.isSourceMethod && !sym.isSetter) + if (sym.isGetter) mkVar else Some(defDef(in, sym)) + else + None + } + private def ignoreClass(sym: Symbol): Boolean = + sym.isLocalClass || sym.isAnonymousClass || sym.fullName.endsWith(LocalChild.toString) - // This filters private[this] vals/vars that were not in the original source. - // The getter will be used for processing instead. - private def isSourceField(sym: Symbol): Boolean = - { - val getter = sym.getter(sym.enclClass) - // the check `getter eq sym` is a precaution against infinite recursion - // `isParamAccessor` does not exist in all supported versions of Scala, so the flag check is done directly - (getter == NoSymbol && !sym.hasFlag(Flags.PARAMACCESSOR)) || (getter eq sym) - } - private def getModifiers(s: Symbol): xsbti.api.Modifiers = - { - import Flags._ - val absOver = s.hasFlag(ABSOVERRIDE) - val abs = s.hasFlag(ABSTRACT) || s.hasFlag(DEFERRED) || absOver - val over = s.hasFlag(OVERRIDE) || absOver - new xsbti.api.Modifiers(abs, over, s.isFinal, s.hasFlag(SEALED), isImplicit(s), s.hasFlag(LAZY), hasMacro(s)) - } + // This filters private[this] vals/vars that were not in the original source. + // The getter will be used for processing instead. + private def isSourceField(sym: Symbol): Boolean = + { + val getter = sym.getter(sym.enclClass) + // the check `getter eq sym` is a precaution against infinite recursion + // `isParamAccessor` does not exist in all supported versions of Scala, so the flag check is done directly + (getter == NoSymbol && !sym.hasFlag(Flags.PARAMACCESSOR)) || (getter eq sym) + } + private def getModifiers(s: Symbol): xsbti.api.Modifiers = + { + import Flags._ + val absOver = s.hasFlag(ABSOVERRIDE) + val abs = s.hasFlag(ABSTRACT) || s.hasFlag(DEFERRED) || absOver + val over = s.hasFlag(OVERRIDE) || absOver + new xsbti.api.Modifiers(abs, over, s.isFinal, s.hasFlag(SEALED), isImplicit(s), s.hasFlag(LAZY), hasMacro(s)) + } - private def isImplicit(s: Symbol) = s.hasFlag(Flags.IMPLICIT) - private def getAccess(c: Symbol): xsbti.api.Access = - { - if(c.isPublic) Constants.public - else if(c.isPrivateLocal) Constants.privateLocal - else if(c.isProtectedLocal) Constants.protectedLocal - else - { - val within = c.privateWithin - val qualifier = if(within == NoSymbol) Constants.unqualified else new xsbti.api.IdQualifier(within.fullName) - if(c.hasFlag(Flags.PROTECTED)) new xsbti.api.Protected(qualifier) - else new xsbti.api.Private(qualifier) - } - } + private def isImplicit(s: Symbol) = s.hasFlag(Flags.IMPLICIT) + private def getAccess(c: Symbol): xsbti.api.Access = + { + if (c.isPublic) Constants.public + else if (c.isPrivateLocal) Constants.privateLocal + else if (c.isProtectedLocal) Constants.protectedLocal + else { + val within = c.privateWithin + val qualifier = if (within == NoSymbol) Constants.unqualified else new xsbti.api.IdQualifier(within.fullName) + if (c.hasFlag(Flags.PROTECTED)) new xsbti.api.Protected(qualifier) + else new xsbti.api.Private(qualifier) + } + } - /** - * Replace all types that directly refer to the `forbidden` symbol by `NoType`. - * (a specialized version of substThisAndSym) - */ - class SuppressSymbolRef(forbidden: Symbol) extends TypeMap { - def apply(tp: Type) = - if (tp.typeSymbolDirect == forbidden) NoType - else mapOver(tp) - } + /** + * Replace all types that directly refer to the `forbidden` symbol by `NoType`. + * (a specialized version of substThisAndSym) + */ + class SuppressSymbolRef(forbidden: Symbol) extends TypeMap { + def apply(tp: Type) = + if (tp.typeSymbolDirect == forbidden) NoType + else mapOver(tp) + } - private def processType(in: Symbol, t: Type): xsbti.api.Type = typeCache.getOrElseUpdate((in, t), makeType(in, t)) - private def makeType(in: Symbol, t: Type): xsbti.api.Type = - { + private def processType(in: Symbol, t: Type): xsbti.api.Type = typeCache.getOrElseUpdate((in, t), makeType(in, t)) + private def makeType(in: Symbol, t: Type): xsbti.api.Type = + { - val dealiased = t match { - case TypeRef(_, sym, _) if sym.isAliasType => t.dealias - case _ => t - } + val dealiased = t match { + case TypeRef(_, sym, _) if sym.isAliasType => t.dealias + case _ => t + } - dealiased match - { - case NoPrefix => Constants.emptyType - case ThisType(sym) => new xsbti.api.Singleton(thisPath(sym)) - case SingleType(pre, sym) => projectionType(in, pre, sym) - case ConstantType(constant) => new xsbti.api.Constant(processType(in, constant.tpe), constant.stringValue) + dealiased match { + case NoPrefix => Constants.emptyType + case ThisType(sym) => new xsbti.api.Singleton(thisPath(sym)) + case SingleType(pre, sym) => projectionType(in, pre, sym) + case ConstantType(constant) => new xsbti.api.Constant(processType(in, constant.tpe), constant.stringValue) - /* explaining the special-casing of references to refinement classes (https://support.typesafe.com/tickets/1882) + /* explaining the special-casing of references to refinement classes (https://support.typesafe.com/tickets/1882) * * goal: a representation of type references to refinement classes that's stable across compilation runs * (and thus insensitive to typing from source or unpickling from bytecode) @@ -393,152 +384,150 @@ class ExtractAPI[GlobalType <: CallbackGlobal](val global: GlobalType, * + expand the reference to the corresponding refinement type: doing that recursively may not terminate, but we can deal with that by approximating recursive references * (all we care about is being sound for recompilation: recompile iff a dependency changes, and this will happen as long as we have one unrolling of the reference to the refinement) */ - case TypeRef(pre, sym, Nil) if sym.isRefinementClass => - // Since we only care about detecting changes reliably, we unroll a reference to a refinement class once. - // Recursive references are simply replaced by NoType -- changes to the type will be seen in the first unrolling. - // The API need not be type correct, so this truncation is acceptable. Most of all, the API should be compact. - val unrolling = pre.memberInfo(sym) // this is a refinement type + case TypeRef(pre, sym, Nil) if sym.isRefinementClass => + // Since we only care about detecting changes reliably, we unroll a reference to a refinement class once. + // Recursive references are simply replaced by NoType -- changes to the type will be seen in the first unrolling. + // The API need not be type correct, so this truncation is acceptable. Most of all, the API should be compact. + val unrolling = pre.memberInfo(sym) // this is a refinement type - // in case there are recursive references, suppress them -- does this ever happen? - // we don't have a test case for this, so warn and hope we'll get a contribution for it :-) - val withoutRecursiveRefs = new SuppressSymbolRef(sym).mapOver(unrolling) - if (unrolling ne withoutRecursiveRefs) - reporter.warning(sym.pos, "sbt-api: approximated refinement ref"+ t +" (== "+ unrolling +") to "+ withoutRecursiveRefs +"\nThis is currently untested, please report the code you were compiling.") + // in case there are recursive references, suppress them -- does this ever happen? + // we don't have a test case for this, so warn and hope we'll get a contribution for it :-) + val withoutRecursiveRefs = new SuppressSymbolRef(sym).mapOver(unrolling) + if (unrolling ne withoutRecursiveRefs) + reporter.warning(sym.pos, "sbt-api: approximated refinement ref" + t + " (== " + unrolling + ") to " + withoutRecursiveRefs + "\nThis is currently untested, please report the code you were compiling.") - structure(withoutRecursiveRefs) - case tr @ TypeRef(pre, sym, args) => - val base = projectionType(in, pre, sym) - if(args.isEmpty) - if(isRawType(tr)) - processType(in, rawToExistential(tr)) - else - base - else - new xsbti.api.Parameterized(base, types(in, args)) - case SuperType(thistpe: Type, supertpe: Type) => warning("sbt-api: Super type (not implemented): this=" + thistpe + ", super=" + supertpe); Constants.emptyType - case at: AnnotatedType => annotatedType(in, at) - case rt: CompoundType => structure(rt) - case t: ExistentialType => makeExistentialType(in, t) - case NoType => Constants.emptyType // this can happen when there is an error that will be reported by a later phase - case PolyType(typeParams, resultType) => new xsbti.api.Polymorphic(processType(in, resultType), typeParameters(in, typeParams)) - case Nullary(resultType) => warning("sbt-api: Unexpected nullary method type " + in + " in " + in.owner); Constants.emptyType - case _ => warning("sbt-api: Unhandled type " + t.getClass + " : " + t); Constants.emptyType - } - } - private def makeExistentialType(in: Symbol, t: ExistentialType): xsbti.api.Existential = { - val ExistentialType(typeVariables, qualified) = t - existentialRenamings.enterExistentialTypeVariables(typeVariables) - try { - val typeVariablesConverted = typeParameters(in, typeVariables) - val qualifiedConverted = processType(in, qualified) - new xsbti.api.Existential(qualifiedConverted, typeVariablesConverted) - } finally { - existentialRenamings.leaveExistentialTypeVariables(typeVariables) - } - } - private def typeParameters(in: Symbol, s: Symbol): Array[xsbti.api.TypeParameter] = typeParameters(in, s.typeParams) - private def typeParameters(in: Symbol, s: List[Symbol]): Array[xsbti.api.TypeParameter] = s.map(typeParameter(in,_)).toArray[xsbti.api.TypeParameter] - private def typeParameter(in: Symbol, s: Symbol): xsbti.api.TypeParameter = - { - val varianceInt = s.variance - import xsbti.api.Variance._ - val annots = annotations(in, s) - val variance = if(varianceInt < 0) Contravariant else if(varianceInt > 0) Covariant else Invariant - viewer(in).memberInfo(s) match - { - case TypeBounds(low, high) => new xsbti.api.TypeParameter( tparamID(s), annots, typeParameters(in, s), variance, processType(in, low), processType(in, high) ) - case PolyType(typeParams, base) => new xsbti.api.TypeParameter( tparamID(s), annots, typeParameters(in, typeParams), variance, processType(in, base.bounds.lo), processType(in, base.bounds.hi)) - case x => error("Unknown type parameter info: " + x.getClass) - } - } - private def tparamID(s: Symbol): String = { - val renameTo = existentialRenamings.renaming(s) - renameTo match { - case Some(rename) => - // can't use debuglog because it doesn't exist in Scala 2.9.x - if (settings.debug.value) - log("Renaming existential type variable " + s.fullName + " to " + rename) - rename - case None => - s.fullName - } - } - private def selfType(in: Symbol, s: Symbol): xsbti.api.Type = processType(in, s.thisSym.typeOfThis) + structure(withoutRecursiveRefs) + case tr @ TypeRef(pre, sym, args) => + val base = projectionType(in, pre, sym) + if (args.isEmpty) + if (isRawType(tr)) + processType(in, rawToExistential(tr)) + else + base + else + new xsbti.api.Parameterized(base, types(in, args)) + case SuperType(thistpe: Type, supertpe: Type) => + warning("sbt-api: Super type (not implemented): this=" + thistpe + ", super=" + supertpe); Constants.emptyType + case at: AnnotatedType => annotatedType(in, at) + case rt: CompoundType => structure(rt) + case t: ExistentialType => makeExistentialType(in, t) + case NoType => Constants.emptyType // this can happen when there is an error that will be reported by a later phase + case PolyType(typeParams, resultType) => new xsbti.api.Polymorphic(processType(in, resultType), typeParameters(in, typeParams)) + case Nullary(resultType) => + warning("sbt-api: Unexpected nullary method type " + in + " in " + in.owner); Constants.emptyType + case _ => warning("sbt-api: Unhandled type " + t.getClass + " : " + t); Constants.emptyType + } + } + private def makeExistentialType(in: Symbol, t: ExistentialType): xsbti.api.Existential = { + val ExistentialType(typeVariables, qualified) = t + existentialRenamings.enterExistentialTypeVariables(typeVariables) + try { + val typeVariablesConverted = typeParameters(in, typeVariables) + val qualifiedConverted = processType(in, qualified) + new xsbti.api.Existential(qualifiedConverted, typeVariablesConverted) + } finally { + existentialRenamings.leaveExistentialTypeVariables(typeVariables) + } + } + private def typeParameters(in: Symbol, s: Symbol): Array[xsbti.api.TypeParameter] = typeParameters(in, s.typeParams) + private def typeParameters(in: Symbol, s: List[Symbol]): Array[xsbti.api.TypeParameter] = s.map(typeParameter(in, _)).toArray[xsbti.api.TypeParameter] + private def typeParameter(in: Symbol, s: Symbol): xsbti.api.TypeParameter = + { + val varianceInt = s.variance + import xsbti.api.Variance._ + val annots = annotations(in, s) + val variance = if (varianceInt < 0) Contravariant else if (varianceInt > 0) Covariant else Invariant + viewer(in).memberInfo(s) match { + case TypeBounds(low, high) => new xsbti.api.TypeParameter(tparamID(s), annots, typeParameters(in, s), variance, processType(in, low), processType(in, high)) + case PolyType(typeParams, base) => new xsbti.api.TypeParameter(tparamID(s), annots, typeParameters(in, typeParams), variance, processType(in, base.bounds.lo), processType(in, base.bounds.hi)) + case x => error("Unknown type parameter info: " + x.getClass) + } + } + private def tparamID(s: Symbol): String = { + val renameTo = existentialRenamings.renaming(s) + renameTo match { + case Some(rename) => + // can't use debuglog because it doesn't exist in Scala 2.9.x + if (settings.debug.value) + log("Renaming existential type variable " + s.fullName + " to " + rename) + rename + case None => + s.fullName + } + } + private def selfType(in: Symbol, s: Symbol): xsbti.api.Type = processType(in, s.thisSym.typeOfThis) - def classLike(in: Symbol, c: Symbol): ClassLike = classLikeCache.getOrElseUpdate( (in,c), mkClassLike(in, c)) - private def mkClassLike(in: Symbol, c: Symbol): ClassLike = - { - val name = c.fullName - val isModule = c.isModuleClass || c.isModule - val struct = if(isModule) c.moduleClass else c - val defType = - if(c.isTrait) DefinitionType.Trait - else if(isModule) - { - if(c.isPackage) DefinitionType.PackageModule - else DefinitionType.Module - } - else DefinitionType.ClassDef - new xsbti.api.ClassLike(defType, lzy(selfType(in, c)), lzy(structure(in, struct)), emptyStringArray, typeParameters(in, c), name, getAccess(c), getModifiers(c), annotations(in, c)) - } + def classLike(in: Symbol, c: Symbol): ClassLike = classLikeCache.getOrElseUpdate((in, c), mkClassLike(in, c)) + private def mkClassLike(in: Symbol, c: Symbol): ClassLike = + { + val name = c.fullName + val isModule = c.isModuleClass || c.isModule + val struct = if (isModule) c.moduleClass else c + val defType = + if (c.isTrait) DefinitionType.Trait + else if (isModule) { + if (c.isPackage) DefinitionType.PackageModule + else DefinitionType.Module + } else DefinitionType.ClassDef + new xsbti.api.ClassLike(defType, lzy(selfType(in, c)), lzy(structure(in, struct)), emptyStringArray, typeParameters(in, c), name, getAccess(c), getModifiers(c), annotations(in, c)) + } - private[this] def isClass(s: Symbol) = s.isClass || s.isModule - // necessary to ensure a stable ordering of classes in the definitions list: - // modules and classes come first and are sorted by name - // all other definitions come later and are not sorted - private[this] val sortClasses = new Comparator[Symbol] { - def compare(a: Symbol, b: Symbol) = { - val aIsClass = isClass(a) - val bIsClass = isClass(b) - if(aIsClass == bIsClass) - if(aIsClass) - if(a.isModule == b.isModule) - a.fullName.compareTo(b.fullName) - else if(a.isModule) - -1 - else - 1 - else - 0 // substantial performance hit if fullNames are compared here - else if(aIsClass) - -1 - else - 1 - } - } - private object Constants - { - val local = new xsbti.api.ThisQualifier - val public = new xsbti.api.Public - val privateLocal = new xsbti.api.Private(local) - val protectedLocal = new xsbti.api.Protected(local) - val unqualified = new xsbti.api.Unqualified - val emptyPath = new xsbti.api.Path(Array()) - val thisPath = new xsbti.api.This - val emptyType = new xsbti.api.EmptyType - } + private[this] def isClass(s: Symbol) = s.isClass || s.isModule + // necessary to ensure a stable ordering of classes in the definitions list: + // modules and classes come first and are sorted by name + // all other definitions come later and are not sorted + private[this] val sortClasses = new Comparator[Symbol] { + def compare(a: Symbol, b: Symbol) = { + val aIsClass = isClass(a) + val bIsClass = isClass(b) + if (aIsClass == bIsClass) + if (aIsClass) + if (a.isModule == b.isModule) + a.fullName.compareTo(b.fullName) + else if (a.isModule) + -1 + else + 1 + else + 0 // substantial performance hit if fullNames are compared here + else if (aIsClass) + -1 + else + 1 + } + } + private object Constants { + val local = new xsbti.api.ThisQualifier + val public = new xsbti.api.Public + val privateLocal = new xsbti.api.Private(local) + val protectedLocal = new xsbti.api.Protected(local) + val unqualified = new xsbti.api.Unqualified + val emptyPath = new xsbti.api.Path(Array()) + val thisPath = new xsbti.api.This + val emptyType = new xsbti.api.EmptyType + } - private def simpleName(s: Symbol): String = - { - val n = s.originalName - val n2 = if(n.toString == "") n else n.decode - n2.toString.trim - } + private def simpleName(s: Symbol): String = + { + val n = s.originalName + val n2 = if (n.toString == "") n else n.decode + n2.toString.trim + } - private def annotations(in: Symbol, s: Symbol): Array[xsbti.api.Annotation] = - atPhase(currentRun.typerPhase) { - val base = if(s.hasFlag(Flags.ACCESSOR)) s.accessed else NoSymbol - val b = if(base == NoSymbol) s else base - // annotations from bean methods are not handled because: - // a) they are recorded as normal source methods anyway - // b) there is no way to distinguish them from user-defined methods - val associated = List(b, b.getter(b.enclClass), b.setter(b.enclClass)).filter(_ != NoSymbol) - associated.flatMap( ss => annotations(in, ss.annotations) ).distinct.toArray ; - } - private def annotatedType(in: Symbol, at: AnnotatedType): xsbti.api.Type = - { - val annots = at.annotations - if(annots.isEmpty) processType(in, at.underlying) else annotated(in, annots, at.underlying) - } + private def annotations(in: Symbol, s: Symbol): Array[xsbti.api.Annotation] = + atPhase(currentRun.typerPhase) { + val base = if (s.hasFlag(Flags.ACCESSOR)) s.accessed else NoSymbol + val b = if (base == NoSymbol) s else base + // annotations from bean methods are not handled because: + // a) they are recorded as normal source methods anyway + // b) there is no way to distinguish them from user-defined methods + val associated = List(b, b.getter(b.enclClass), b.setter(b.enclClass)).filter(_ != NoSymbol) + associated.flatMap(ss => annotations(in, ss.annotations)).distinct.toArray; + } + private def annotatedType(in: Symbol, at: AnnotatedType): xsbti.api.Type = + { + val annots = at.annotations + if (annots.isEmpty) processType(in, at.underlying) else annotated(in, annots, at.underlying) + } } \ No newline at end of file diff --git a/compile/interface/src/main/scala/xsbt/ExtractUsedNames.scala b/compile/interface/src/main/scala/xsbt/ExtractUsedNames.scala index ba8e87a1e..85b78e0d9 100644 --- a/compile/interface/src/main/scala/xsbt/ExtractUsedNames.scala +++ b/compile/interface/src/main/scala/xsbt/ExtractUsedNames.scala @@ -39,86 +39,85 @@ import scala.tools.nsc._ * */ class ExtractUsedNames[GlobalType <: CallbackGlobal](val global: GlobalType) extends Compat { - import global._ + import global._ - def extract(unit: CompilationUnit): Set[String] = { - val tree = unit.body - val extractedByTreeWalk = extractByTreeWalk(tree) - extractedByTreeWalk - } + def extract(unit: CompilationUnit): Set[String] = { + val tree = unit.body + val extractedByTreeWalk = extractByTreeWalk(tree) + extractedByTreeWalk + } - private def extractByTreeWalk(tree: Tree): Set[String] = { - val namesBuffer = collection.mutable.ListBuffer.empty[String] - def addSymbol(symbol: Symbol): Unit = { - val symbolNameAsString = symbol.name.decode.trim - namesBuffer += symbolNameAsString - } + private def extractByTreeWalk(tree: Tree): Set[String] = { + val namesBuffer = collection.mutable.ListBuffer.empty[String] + def addSymbol(symbol: Symbol): Unit = { + val symbolNameAsString = symbol.name.decode.trim + namesBuffer += symbolNameAsString + } - def handleTreeNode(node: Tree): Unit = { - def handleMacroExpansion(original: Tree): Unit = { - // Some macros seem to have themselves registered as original tree. - // In this case, we only need to handle the children of the original tree, - // because we already handled the expanded tree. - // See https://issues.scala-lang.org/browse/SI-8486 - if(original == node) original.children.foreach(handleTreeNode) - else original.foreach(handleTreeNode) - } + def handleTreeNode(node: Tree): Unit = { + def handleMacroExpansion(original: Tree): Unit = { + // Some macros seem to have themselves registered as original tree. + // In this case, we only need to handle the children of the original tree, + // because we already handled the expanded tree. + // See https://issues.scala-lang.org/browse/SI-8486 + if (original == node) original.children.foreach(handleTreeNode) + else original.foreach(handleTreeNode) + } - def handleClassicTreeNode(node: Tree): Unit = node match { - case _: DefTree | _: Template => () - // turns out that Import node has a TermSymbol associated with it - // I (Grzegorz) tried to understand why it's there and what does it represent but - // that logic was introduced in 2005 without any justification I'll just ignore the - // import node altogether and just process the selectors in the import node - case Import(_, selectors: List[ImportSelector]) => - def usedNameInImportSelector(name: Name): Unit = - if ((name != null) && (name != nme.WILDCARD)) namesBuffer += name.toString - selectors foreach { selector => - usedNameInImportSelector(selector.name) - usedNameInImportSelector(selector.rename) - } - // TODO: figure out whether we should process the original tree or walk the type - // the argument for processing the original tree: we process what user wrote - // the argument for processing the type: we catch all transformations that typer applies - // to types but that might be a bad thing because it might expand aliases eagerly which - // not what we need - case t: TypeTree if t.original != null => - t.original.foreach(handleTreeNode) - case t if t.hasSymbol && eligibleAsUsedName(t.symbol) => - addSymbol(t.symbol) - case _ => () - } + def handleClassicTreeNode(node: Tree): Unit = node match { + case _: DefTree | _: Template => () + // turns out that Import node has a TermSymbol associated with it + // I (Grzegorz) tried to understand why it's there and what does it represent but + // that logic was introduced in 2005 without any justification I'll just ignore the + // import node altogether and just process the selectors in the import node + case Import(_, selectors: List[ImportSelector]) => + def usedNameInImportSelector(name: Name): Unit = + if ((name != null) && (name != nme.WILDCARD)) namesBuffer += name.toString + selectors foreach { selector => + usedNameInImportSelector(selector.name) + usedNameInImportSelector(selector.rename) + } + // TODO: figure out whether we should process the original tree or walk the type + // the argument for processing the original tree: we process what user wrote + // the argument for processing the type: we catch all transformations that typer applies + // to types but that might be a bad thing because it might expand aliases eagerly which + // not what we need + case t: TypeTree if t.original != null => + t.original.foreach(handleTreeNode) + case t if t.hasSymbol && eligibleAsUsedName(t.symbol) => + addSymbol(t.symbol) + case _ => () + } - node match { - case MacroExpansionOf(original) => - handleClassicTreeNode(node) - handleMacroExpansion(original) - case _ => - handleClassicTreeNode(node) - } - } + node match { + case MacroExpansionOf(original) => + handleClassicTreeNode(node) + handleMacroExpansion(original) + case _ => + handleClassicTreeNode(node) + } + } - tree.foreach(handleTreeNode) - namesBuffer.toSet - } + tree.foreach(handleTreeNode) + namesBuffer.toSet + } + /** + * Needed for compatibility with Scala 2.8 which doesn't define `tpnme` + */ + private object tpnme { + val EMPTY = nme.EMPTY.toTypeName + val EMPTY_PACKAGE_NAME = nme.EMPTY_PACKAGE_NAME.toTypeName + } - /** - * Needed for compatibility with Scala 2.8 which doesn't define `tpnme` - */ - private object tpnme { - val EMPTY = nme.EMPTY.toTypeName - val EMPTY_PACKAGE_NAME = nme.EMPTY_PACKAGE_NAME.toTypeName - } + private def eligibleAsUsedName(symbol: Symbol): Boolean = { + def emptyName(name: Name): Boolean = name match { + case nme.EMPTY | nme.EMPTY_PACKAGE_NAME | tpnme.EMPTY | tpnme.EMPTY_PACKAGE_NAME => true + case _ => false + } - private def eligibleAsUsedName(symbol: Symbol): Boolean = { - def emptyName(name: Name): Boolean = name match { - case nme.EMPTY | nme.EMPTY_PACKAGE_NAME | tpnme.EMPTY | tpnme.EMPTY_PACKAGE_NAME => true - case _ => false - } - - (symbol != NoSymbol) && - !symbol.isSynthetic && - !emptyName(symbol.name) - } + (symbol != NoSymbol) && + !symbol.isSynthetic && + !emptyName(symbol.name) + } } diff --git a/compile/interface/src/main/scala/xsbt/LocateClassFile.scala b/compile/interface/src/main/scala/xsbt/LocateClassFile.scala index 5fa889228..c2faf24fb 100644 --- a/compile/interface/src/main/scala/xsbt/LocateClassFile.scala +++ b/compile/interface/src/main/scala/xsbt/LocateClassFile.scala @@ -11,41 +11,37 @@ import java.io.File /** * Contains utility methods for looking up class files corresponding to Symbols. */ -abstract class LocateClassFile extends Compat -{ - val global: CallbackGlobal - import global._ +abstract class LocateClassFile extends Compat { + val global: CallbackGlobal + import global._ - private[this] final val classSeparator = '.' - protected def classFile(sym: Symbol): Option[(AbstractFile, String, Boolean)] = - // package can never have a corresponding class file; this test does not - // catch package objects (that do not have this flag set) - if (sym hasFlag scala.tools.nsc.symtab.Flags.PACKAGE) None else - { - import scala.tools.nsc.symtab.Flags - val name = flatname(sym, classSeparator) + moduleSuffix(sym) - findClass(name).map { case (file,inOut) => (file, name,inOut) } orElse { - if(isTopLevelModule(sym)) - { - val linked = sym.companionClass - if(linked == NoSymbol) - None - else - classFile(linked) - } - else - None - } - } - private def flatname(s: Symbol, separator: Char) = - atPhase(currentRun.flattenPhase.next) { s fullName separator } + private[this] final val classSeparator = '.' + protected def classFile(sym: Symbol): Option[(AbstractFile, String, Boolean)] = + // package can never have a corresponding class file; this test does not + // catch package objects (that do not have this flag set) + if (sym hasFlag scala.tools.nsc.symtab.Flags.PACKAGE) None else { + import scala.tools.nsc.symtab.Flags + val name = flatname(sym, classSeparator) + moduleSuffix(sym) + findClass(name).map { case (file, inOut) => (file, name, inOut) } orElse { + if (isTopLevelModule(sym)) { + val linked = sym.companionClass + if (linked == NoSymbol) + None + else + classFile(linked) + } else + None + } + } + private def flatname(s: Symbol, separator: Char) = + atPhase(currentRun.flattenPhase.next) { s fullName separator } - protected def isTopLevelModule(sym: Symbol): Boolean = - atPhase (currentRun.picklerPhase.next) { - sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass - } - protected def className(s: Symbol, sep: Char, dollarRequired: Boolean): String = - flatname(s, sep) + (if(dollarRequired) "$" else "") - protected def fileForClass(outputDirectory: File, s: Symbol, separatorRequired: Boolean): File = - new File(outputDirectory, className(s, File.separatorChar, separatorRequired) + ".class") + protected def isTopLevelModule(sym: Symbol): Boolean = + atPhase(currentRun.picklerPhase.next) { + sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass + } + protected def className(s: Symbol, sep: Char, dollarRequired: Boolean): String = + flatname(s, sep) + (if (dollarRequired) "$" else "") + protected def fileForClass(outputDirectory: File, s: Symbol, separatorRequired: Boolean): File = + new File(outputDirectory, className(s, File.separatorChar, separatorRequired) + ".class") } diff --git a/compile/interface/src/main/scala/xsbt/Log.scala b/compile/interface/src/main/scala/xsbt/Log.scala index 8462fb20f..8b31bb9b2 100644 --- a/compile/interface/src/main/scala/xsbt/Log.scala +++ b/compile/interface/src/main/scala/xsbt/Log.scala @@ -3,9 +3,8 @@ */ package xsbt -object Log -{ - def debug(log: xsbti.Logger, msg: => String) = log.debug(Message(msg)) - def settingsError(log: xsbti.Logger): String => Unit = - s => log.error(Message(s)) +object Log { + def debug(log: xsbti.Logger, msg: => String) = log.debug(Message(msg)) + def settingsError(log: xsbti.Logger): String => Unit = + s => log.error(Message(s)) } \ No newline at end of file diff --git a/compile/interface/src/main/scala/xsbt/Message.scala b/compile/interface/src/main/scala/xsbt/Message.scala index 3db251747..9ce888d58 100644 --- a/compile/interface/src/main/scala/xsbt/Message.scala +++ b/compile/interface/src/main/scala/xsbt/Message.scala @@ -3,7 +3,6 @@ */ package xsbt -object Message -{ - def apply[T](s: => T) = new xsbti.F0[T] { def apply() = s } +object Message { + def apply[T](s: => T) = new xsbti.F0[T] { def apply() = s } } \ No newline at end of file diff --git a/compile/interface/src/main/scala/xsbt/ScaladocInterface.scala b/compile/interface/src/main/scala/xsbt/ScaladocInterface.scala index 3c77e263f..9c54631fa 100644 --- a/compile/interface/src/main/scala/xsbt/ScaladocInterface.scala +++ b/compile/interface/src/main/scala/xsbt/ScaladocInterface.scala @@ -3,75 +3,66 @@ */ package xsbt - import xsbti.Logger - import Log.debug +import xsbti.Logger +import Log.debug -class ScaladocInterface -{ - def run(args: Array[String], log: Logger, delegate: xsbti.Reporter) = (new Runner(args, log, delegate)).run +class ScaladocInterface { + def run(args: Array[String], log: Logger, delegate: xsbti.Reporter) = (new Runner(args, log, delegate)).run } -private class Runner(args: Array[String], log: Logger, delegate: xsbti.Reporter) -{ - import scala.tools.nsc.{doc, Global, reporters} - import reporters.Reporter - val docSettings: doc.Settings = new doc.Settings(Log.settingsError(log)) - val command = Command(args.toList, docSettings) - val reporter = DelegatingReporter(docSettings, delegate) - def noErrors = !reporter.hasErrors && command.ok +private class Runner(args: Array[String], log: Logger, delegate: xsbti.Reporter) { + import scala.tools.nsc.{ doc, Global, reporters } + import reporters.Reporter + val docSettings: doc.Settings = new doc.Settings(Log.settingsError(log)) + val command = Command(args.toList, docSettings) + val reporter = DelegatingReporter(docSettings, delegate) + def noErrors = !reporter.hasErrors && command.ok - import forScope._ - def run() - { - debug(log, "Calling Scaladoc with arguments:\n\t" + args.mkString("\n\t")) - if(noErrors) - { - import doc._ // 2.8 trunk and Beta1-RC4 have doc.DocFactory. For other Scala versions, the next line creates forScope.DocFactory - val processor = new DocFactory(reporter, docSettings) - processor.document(command.files) - } - reporter.printSummary() - if(!noErrors) throw new InterfaceCompileFailed(args, reporter.problems, "Scaladoc generation failed") - } + import forScope._ + def run() { + debug(log, "Calling Scaladoc with arguments:\n\t" + args.mkString("\n\t")) + if (noErrors) { + import doc._ // 2.8 trunk and Beta1-RC4 have doc.DocFactory. For other Scala versions, the next line creates forScope.DocFactory + val processor = new DocFactory(reporter, docSettings) + processor.document(command.files) + } + reporter.printSummary() + if (!noErrors) throw new InterfaceCompileFailed(args, reporter.problems, "Scaladoc generation failed") + } - object forScope - { - class DocFactory(reporter: Reporter, docSettings: doc.Settings) // 2.7 compatibility - { - // see https://github.com/paulp/scala-full/commit/649823703a574641407d75d5c073be325ea31307 - trait GlobalCompat - { - def onlyPresentation = false + object forScope { + class DocFactory(reporter: Reporter, docSettings: doc.Settings) // 2.7 compatibility + { + // see https://github.com/paulp/scala-full/commit/649823703a574641407d75d5c073be325ea31307 + trait GlobalCompat { + def onlyPresentation = false - def forScaladoc = false - } + def forScaladoc = false + } - object compiler extends Global(command.settings, reporter) with GlobalCompat - { - override def onlyPresentation = true - override def forScaladoc = true - class DefaultDocDriver // 2.8 source compatibility - { - assert(false) - def process(units: Iterator[CompilationUnit]) = error("for 2.8 compatibility only") - } - } - def document(ignore: Seq[String]) - { - import compiler._ - val run = new Run - run compile command.files + object compiler extends Global(command.settings, reporter) with GlobalCompat { + override def onlyPresentation = true + override def forScaladoc = true + class DefaultDocDriver // 2.8 source compatibility + { + assert(false) + def process(units: Iterator[CompilationUnit]) = error("for 2.8 compatibility only") + } + } + def document(ignore: Seq[String]) { + import compiler._ + val run = new Run + run compile command.files - val generator = - { - import doc._ - new DefaultDocDriver - { - lazy val global: compiler.type = compiler - lazy val settings = docSettings - } - } - generator.process(run.units) - } - } - } + val generator = + { + import doc._ + new DefaultDocDriver { + lazy val global: compiler.type = compiler + lazy val settings = docSettings + } + } + generator.process(run.units) + } + } + } } \ No newline at end of file diff --git a/compile/ivy/src/main/scala/sbt/compiler/ComponentCompiler.scala b/compile/ivy/src/main/scala/sbt/compiler/ComponentCompiler.scala index f4732aabf..24d4f0530 100644 --- a/compile/ivy/src/main/scala/sbt/compiler/ComponentCompiler.scala +++ b/compile/ivy/src/main/scala/sbt/compiler/ComponentCompiler.scala @@ -6,60 +6,62 @@ package compiler import java.io.File -object ComponentCompiler -{ - val xsbtiID = "xsbti" - val srcExtension = "-src" - val binSeparator = "-bin_" - val compilerInterfaceID = "compiler-interface" - val compilerInterfaceSrcID = compilerInterfaceID + srcExtension - val javaVersion = System.getProperty("java.class.version") +object ComponentCompiler { + val xsbtiID = "xsbti" + val srcExtension = "-src" + val binSeparator = "-bin_" + val compilerInterfaceID = "compiler-interface" + val compilerInterfaceSrcID = compilerInterfaceID + srcExtension + val javaVersion = System.getProperty("java.class.version") - def interfaceProvider(manager: ComponentManager): CompilerInterfaceProvider = new CompilerInterfaceProvider - { - def apply(scalaInstance: xsbti.compile.ScalaInstance, log: Logger): File = - { - // this is the instance used to compile the interface component - val componentCompiler = new ComponentCompiler(new RawCompiler(scalaInstance, ClasspathOptions.auto, log), manager) - log.debug("Getting " + compilerInterfaceID + " from component compiler for Scala " + scalaInstance.version) - componentCompiler(compilerInterfaceID) - } - } + def interfaceProvider(manager: ComponentManager): CompilerInterfaceProvider = new CompilerInterfaceProvider { + def apply(scalaInstance: xsbti.compile.ScalaInstance, log: Logger): File = + { + // this is the instance used to compile the interface component + val componentCompiler = new ComponentCompiler(new RawCompiler(scalaInstance, ClasspathOptions.auto, log), manager) + log.debug("Getting " + compilerInterfaceID + " from component compiler for Scala " + scalaInstance.version) + componentCompiler(compilerInterfaceID) + } + } } -/** This class provides source components compiled with the provided RawCompiler. -* The compiled classes are cached using the provided component manager according -* to the actualVersion field of the RawCompiler.*/ -class ComponentCompiler(compiler: RawCompiler, manager: ComponentManager) -{ - import ComponentCompiler._ - def apply(id: String): File = - try { getPrecompiled(id) } - catch { case _: InvalidComponent => getLocallyCompiled(id) } +/** + * This class provides source components compiled with the provided RawCompiler. + * The compiled classes are cached using the provided component manager according + * to the actualVersion field of the RawCompiler. + */ +class ComponentCompiler(compiler: RawCompiler, manager: ComponentManager) { + import ComponentCompiler._ + def apply(id: String): File = + try { getPrecompiled(id) } + catch { case _: InvalidComponent => getLocallyCompiled(id) } - /** Gets the precompiled (distributed with sbt) component with the given 'id' - * If the component has not been precompiled, this throws InvalidComponent. */ - def getPrecompiled(id: String): File = manager.file( binaryID(id, false) )(IfMissing.Fail) - /** Get the locally compiled component with the given 'id' or compiles it if it has not been compiled yet. - * If the component does not exist, this throws InvalidComponent. */ - def getLocallyCompiled(id: String): File = - { - val binID = binaryID(id, true) - manager.file(binID)( new IfMissing.Define(true, compileAndInstall(id, binID)) ) - } - def clearCache(id: String): Unit = manager.clearCache(binaryID(id, true)) - protected def binaryID(id: String, withJavaVersion: Boolean) = - { - val base = id + binSeparator + compiler.scalaInstance.actualVersion - if(withJavaVersion) base + "__" + javaVersion else base - } - protected def compileAndInstall(id: String, binID: String) - { - val srcID = id + srcExtension - IO.withTemporaryDirectory { binaryDirectory => - val targetJar = new File(binaryDirectory, id + ".jar") - val xsbtiJars = manager.files(xsbtiID)(IfMissing.Fail) - AnalyzingCompiler.compileSources(manager.files(srcID)(IfMissing.Fail), targetJar, xsbtiJars, id, compiler, manager.log) - manager.define(binID, Seq(targetJar)) - } - } + /** + * Gets the precompiled (distributed with sbt) component with the given 'id' + * If the component has not been precompiled, this throws InvalidComponent. + */ + def getPrecompiled(id: String): File = manager.file(binaryID(id, false))(IfMissing.Fail) + /** + * Get the locally compiled component with the given 'id' or compiles it if it has not been compiled yet. + * If the component does not exist, this throws InvalidComponent. + */ + def getLocallyCompiled(id: String): File = + { + val binID = binaryID(id, true) + manager.file(binID)(new IfMissing.Define(true, compileAndInstall(id, binID))) + } + def clearCache(id: String): Unit = manager.clearCache(binaryID(id, true)) + protected def binaryID(id: String, withJavaVersion: Boolean) = + { + val base = id + binSeparator + compiler.scalaInstance.actualVersion + if (withJavaVersion) base + "__" + javaVersion else base + } + protected def compileAndInstall(id: String, binID: String) { + val srcID = id + srcExtension + IO.withTemporaryDirectory { binaryDirectory => + val targetJar = new File(binaryDirectory, id + ".jar") + val xsbtiJars = manager.files(xsbtiID)(IfMissing.Fail) + AnalyzingCompiler.compileSources(manager.files(srcID)(IfMissing.Fail), targetJar, xsbtiJars, id, compiler, manager.log) + manager.define(binID, Seq(targetJar)) + } + } } \ No newline at end of file diff --git a/compile/persist/src/main/scala/sbt/inc/AnalysisFormats.scala b/compile/persist/src/main/scala/sbt/inc/AnalysisFormats.scala index 73b619e0f..53c7d8cdb 100644 --- a/compile/persist/src/main/scala/sbt/inc/AnalysisFormats.scala +++ b/compile/persist/src/main/scala/sbt/inc/AnalysisFormats.scala @@ -4,147 +4,142 @@ package sbt package inc - import xsbti.api.{Source, Compilation} - import xsbti.{Position,Problem,Severity} - import xsbti.compile.{CompileOrder, Output => APIOutput, SingleOutput, MultipleOutput} - import MultipleOutput.OutputGroup - import java.io.File - import sbinary._ - import DefaultProtocol._ - import DefaultProtocol.tuple2Format - import Logger.{m2o, position, problem} - import Relations.{Source => RSource, SourceDependencies} +import xsbti.api.{ Source, Compilation } +import xsbti.{ Position, Problem, Severity } +import xsbti.compile.{ CompileOrder, Output => APIOutput, SingleOutput, MultipleOutput } +import MultipleOutput.OutputGroup +import java.io.File +import sbinary._ +import DefaultProtocol._ +import DefaultProtocol.tuple2Format +import Logger.{ m2o, position, problem } +import Relations.{ Source => RSource, SourceDependencies } -@deprecated("Replaced by TextAnalysisFormat. OK to remove in 0.14.", since="0.13.1") -object AnalysisFormats -{ - type RFF = Relation[File, File] - type RFS = Relation[File, String] +@deprecated("Replaced by TextAnalysisFormat. OK to remove in 0.14.", since = "0.13.1") +object AnalysisFormats { + type RFF = Relation[File, File] + type RFS = Relation[File, String] + import System.{ currentTimeMillis => now } + val start = now + def time(label: String) = + { + val end = now + println(label + ": " + (end - start) + " ms") + } - import System.{currentTimeMillis => now} - val start = now - def time(label: String) = - { - val end = now - println(label + ": " + (end - start) + " ms") - } + def debug[T](label: String, f: Format[T]): Format[T] = new Format[T] { + def reads(in: Input): T = + { + time(label + ".read.start") + val r = f.reads(in) + time(label + ".read.end") + r + } + def writes(out: Output, t: T) { + time(label + ".write.start") + f.writes(out, t) + time(label + ".write.end") + } + } - def debug[T](label: String, f: Format[T]): Format[T] = new Format[T] - { - def reads(in: Input): T = - { - time(label + ".read.start") - val r = f.reads(in) - time(label + ".read.end") - r - } - def writes(out: Output, t: T) - { - time(label + ".write.start") - f.writes(out,t) - time(label + ".write.end") - } - } + implicit def analysisFormat(implicit stampsF: Format[Stamps], apisF: Format[APIs], relationsF: Format[Relations], + infosF: Format[SourceInfos], compilationsF: Format[Compilations]): Format[Analysis] = + asProduct5(Analysis.Empty.copy _)(a => (a.stamps, a.apis, a.relations, a.infos, a.compilations))(stampsF, apisF, relationsF, infosF, compilationsF) - implicit def analysisFormat(implicit stampsF: Format[Stamps], apisF: Format[APIs], relationsF: Format[Relations], - infosF: Format[SourceInfos], compilationsF: Format[Compilations]): Format[Analysis] = - asProduct5( Analysis.Empty.copy _)( a => (a.stamps, a.apis, a.relations, a.infos, a.compilations))(stampsF, apisF, relationsF, infosF, compilationsF) + implicit def infosFormat(implicit infoF: Format[Map[File, SourceInfo]]): Format[SourceInfos] = + wrap[SourceInfos, Map[File, SourceInfo]](_.allInfos, SourceInfos.make _) - implicit def infosFormat(implicit infoF: Format[Map[File, SourceInfo]]): Format[SourceInfos] = - wrap[SourceInfos, Map[File, SourceInfo]]( _.allInfos, SourceInfos.make _) + implicit def infoFormat: Format[SourceInfo] = + wrap[SourceInfo, (Seq[Problem], Seq[Problem])](si => (si.reportedProblems, si.unreportedProblems), { case (a, b) => SourceInfos.makeInfo(a, b) }) - implicit def infoFormat: Format[SourceInfo] = - wrap[SourceInfo, (Seq[Problem],Seq[Problem])](si => (si.reportedProblems, si.unreportedProblems), { case (a,b) => SourceInfos.makeInfo(a,b)}) + implicit def problemFormat: Format[Problem] = asProduct4(problem _)(p => (p.category, p.position, p.message, p.severity)) - implicit def problemFormat: Format[Problem] = asProduct4(problem _)( p => (p.category, p.position, p.message, p.severity)) + implicit def compilationsFormat: Format[Compilations] = { + implicit val compilationSeqF = seqFormat(xsbt.api.CompilationFormat) + wrap[Compilations, Seq[Compilation]](_.allCompilations, Compilations.make _) + } - implicit def compilationsFormat: Format[Compilations] = { - implicit val compilationSeqF = seqFormat(xsbt.api.CompilationFormat) - wrap[Compilations, Seq[Compilation]](_.allCompilations, Compilations.make _) - } + implicit def positionFormat: Format[Position] = + asProduct7(position _)(p => (m2o(p.line), p.lineContent, m2o(p.offset), m2o(p.pointer), m2o(p.pointerSpace), m2o(p.sourcePath), m2o(p.sourceFile))) - implicit def positionFormat: Format[Position] = - asProduct7( position _ )( p => (m2o(p.line), p.lineContent, m2o(p.offset), m2o(p.pointer), m2o(p.pointerSpace), m2o(p.sourcePath), m2o(p.sourceFile))) + implicit val fileOptionFormat: Format[Option[File]] = optionsAreFormat[File](fileFormat) + implicit val integerFormat: Format[Integer] = wrap[Integer, Int](_.toInt, Integer.valueOf) + implicit val severityFormat: Format[Severity] = + wrap[Severity, Byte](_.ordinal.toByte, b => Severity.values.apply(b.toInt)) - implicit val fileOptionFormat: Format[Option[File]] = optionsAreFormat[File](fileFormat) - implicit val integerFormat: Format[Integer] = wrap[Integer, Int](_.toInt, Integer.valueOf) - implicit val severityFormat: Format[Severity] = - wrap[Severity, Byte]( _.ordinal.toByte, b => Severity.values.apply(b.toInt) ) + implicit def setupFormat(implicit outputF: Format[APIOutput], optionF: Format[CompileOptions], compilerVersion: Format[String], orderF: Format[CompileOrder], nameHashingF: Format[Boolean]): Format[CompileSetup] = + asProduct5[CompileSetup, APIOutput, CompileOptions, String, CompileOrder, Boolean]((a, b, c, d, e) => new CompileSetup(a, b, c, d, e))(s => (s.output, s.options, s.compilerVersion, s.order, s.nameHashing))(outputF, optionF, compilerVersion, orderF, nameHashingF) + implicit val outputGroupFormat: Format[OutputGroup] = + asProduct2((a: File, b: File) => new OutputGroup { def sourceDirectory = a; def outputDirectory = b }) { out => (out.sourceDirectory, out.outputDirectory) }(fileFormat, fileFormat) + implicit val multipleOutputFormat: Format[MultipleOutput] = + wrap[MultipleOutput, Array[OutputGroup]]( + (_.outputGroups), + { groups => new MultipleOutput { def outputGroups = groups } } + ) + implicit val singleOutputFormat: Format[SingleOutput] = + wrap[SingleOutput, File]( + (_.outputDirectory), + { out => new SingleOutput { def outputDirectory = out } } + )(fileFormat) + implicit val outputFormat: Format[APIOutput] = asUnion(singleOutputFormat, multipleOutputFormat) - implicit def setupFormat(implicit outputF: Format[APIOutput], optionF: Format[CompileOptions], compilerVersion: Format[String], orderF: Format[CompileOrder], nameHashingF: Format[Boolean]): Format[CompileSetup] = - asProduct5[CompileSetup, APIOutput, CompileOptions, String, CompileOrder, Boolean]( (a,b,c,d,e) => new CompileSetup(a,b,c,d,e) )(s => (s.output, s.options, s.compilerVersion, s.order, s.nameHashing))(outputF, optionF, compilerVersion, orderF, nameHashingF) + implicit def stampsFormat(implicit prodF: Format[Map[File, Stamp]], srcF: Format[Map[File, Stamp]], binF: Format[Map[File, Stamp]], nameF: Format[Map[File, String]]): Format[Stamps] = + asProduct4(Stamps.apply _)(s => (s.products, s.sources, s.binaries, s.classNames))(prodF, srcF, binF, nameF) - implicit val outputGroupFormat: Format[OutputGroup] = - asProduct2((a: File,b: File) => new OutputGroup{def sourceDirectory = a; def outputDirectory = b}) { out => (out.sourceDirectory, out.outputDirectory) }(fileFormat, fileFormat) - implicit val multipleOutputFormat: Format[MultipleOutput] = - wrap[MultipleOutput, Array[OutputGroup]]( - (_.outputGroups), - { groups => new MultipleOutput { def outputGroups = groups } } - ) - implicit val singleOutputFormat: Format[SingleOutput] = - wrap[SingleOutput, File]( - (_.outputDirectory), - {out => new SingleOutput{def outputDirectory = out}} - )(fileFormat) - implicit val outputFormat: Format[APIOutput] = asUnion(singleOutputFormat, multipleOutputFormat) + implicit def stampFormat(implicit hashF: Format[Hash], modF: Format[LastModified], existsF: Format[Exists]): Format[Stamp] = + asUnion(hashF, modF, existsF) - implicit def stampsFormat(implicit prodF: Format[Map[File, Stamp]], srcF: Format[Map[File, Stamp]], binF: Format[Map[File, Stamp]], nameF: Format[Map[File, String]]): Format[Stamps] = - asProduct4( Stamps.apply _ )( s => (s.products, s.sources, s.binaries, s.classNames) )(prodF, srcF, binF, nameF) + implicit def apisFormat(implicit internalF: Format[Map[File, Source]], externalF: Format[Map[String, Source]]): Format[APIs] = + asProduct2(APIs.apply _)(as => (as.internal, as.external))(internalF, externalF) - implicit def stampFormat(implicit hashF: Format[Hash], modF: Format[LastModified], existsF: Format[Exists]): Format[Stamp] = - asUnion(hashF, modF, existsF) + implicit def relationsFormat(implicit prodF: Format[RFF], binF: Format[RFF], directF: Format[RSource], inheritedF: Format[RSource], memberRefF: Format[SourceDependencies], inheritanceF: Format[SourceDependencies], csF: Format[RFS], namesF: Format[RFS]): Format[Relations] = + { + def makeRelation(srcProd: RFF, binaryDep: RFF, direct: RSource, publicInherited: RSource, + memberRef: SourceDependencies, inheritance: SourceDependencies, classes: RFS, + nameHashing: Boolean, names: RFS): Relations = if (nameHashing) { + def isEmpty(sourceDependencies: RSource): Boolean = + sourceDependencies.internal.all.isEmpty && sourceDependencies.external.all.isEmpty + // we check direct dependencies only because publicInherited dependencies are subset of direct + assert(isEmpty(direct), "Direct dependencies are not empty but `nameHashing` flag is enabled.") + Relations.make(srcProd, binaryDep, memberRef, inheritance, classes, names) + } else { + def isEmpty(sourceDependencies: SourceDependencies): Boolean = + sourceDependencies.internal.all.isEmpty && sourceDependencies.external.all.isEmpty + // we check memberRef dependencies only because inheritance dependencies are subset of memberRef + assert(isEmpty(memberRef), "Direct dependencies are not empty but `nameHashing` flag is enabled.") + Relations.make(srcProd, binaryDep, direct, publicInherited, classes) + } + asProduct9[Relations, RFF, RFF, RSource, RSource, SourceDependencies, SourceDependencies, RFS, Boolean, RFS]((a, b, c, d, e, f, g, h, i) => makeRelation(a, b, c, d, e, f, g, h, i))( + rs => (rs.srcProd, rs.binaryDep, rs.direct, rs.publicInherited, rs.memberRef, rs.inheritance, rs.classes, rs.nameHashing, rs.names))( + prodF, binF, directF, inheritedF, memberRefF, inheritanceF, csF, implicitly[Format[Boolean]], namesF) + } - implicit def apisFormat(implicit internalF: Format[Map[File, Source]], externalF: Format[Map[String, Source]]): Format[APIs] = - asProduct2( APIs.apply _)( as => (as.internal, as.external) )(internalF, externalF) + implicit def relationsSourceFormat(implicit internalFormat: Format[Relation[File, File]], externalFormat: Format[Relation[File, String]]): Format[RSource] = + asProduct2[RSource, RFF, RFS]((a, b) => Relations.makeSource(a, b))(rs => (rs.internal, rs.external)) - implicit def relationsFormat(implicit prodF: Format[RFF], binF: Format[RFF], directF: Format[RSource], inheritedF: Format[RSource], memberRefF: Format[SourceDependencies], inheritanceF: Format[SourceDependencies], csF: Format[RFS], namesF: Format[RFS]): Format[Relations] = - { - def makeRelation(srcProd: RFF, binaryDep: RFF, direct: RSource, publicInherited: RSource, - memberRef: SourceDependencies, inheritance: SourceDependencies, classes: RFS, - nameHashing: Boolean, names: RFS): Relations = if (nameHashing) { - def isEmpty(sourceDependencies: RSource): Boolean = - sourceDependencies.internal.all.isEmpty && sourceDependencies.external.all.isEmpty - // we check direct dependencies only because publicInherited dependencies are subset of direct - assert(isEmpty(direct), "Direct dependencies are not empty but `nameHashing` flag is enabled.") - Relations.make(srcProd, binaryDep, memberRef, inheritance, classes, names) - } else { - def isEmpty(sourceDependencies: SourceDependencies): Boolean = - sourceDependencies.internal.all.isEmpty && sourceDependencies.external.all.isEmpty - // we check memberRef dependencies only because inheritance dependencies are subset of memberRef - assert(isEmpty(memberRef), "Direct dependencies are not empty but `nameHashing` flag is enabled.") - Relations.make(srcProd, binaryDep, direct, publicInherited, classes) - } - asProduct9[Relations, RFF, RFF, RSource, RSource, SourceDependencies, SourceDependencies, RFS, Boolean, RFS]( (a,b,c,d,e,f,g,h,i) =>makeRelation(a,b,c,d,e,f,g,h,i) )( - rs => (rs.srcProd, rs.binaryDep, rs.direct, rs.publicInherited, rs.memberRef, rs.inheritance, rs.classes, rs.nameHashing, rs.names) )( - prodF, binF, directF, inheritedF, memberRefF, inheritanceF, csF, implicitly[Format[Boolean]], namesF) - } + implicit def relationsSourceDependenciesFormat(implicit internalFormat: Format[Relation[File, File]], externalFormat: Format[Relation[File, String]]): Format[SourceDependencies] = + asProduct2[SourceDependencies, RFF, RFS]((a, b) => Relations.makeSourceDependencies(a, b))(rs => (rs.internal, rs.external)) - implicit def relationsSourceFormat(implicit internalFormat: Format[Relation[File, File]], externalFormat: Format[Relation[File,String]]): Format[RSource] = - asProduct2[RSource, RFF, RFS]( (a, b) => Relations.makeSource(a,b))( rs => (rs.internal, rs.external)) + implicit def relationFormat[A, B](implicit af: Format[Map[A, Set[B]]], bf: Format[Map[B, Set[A]]]): Format[Relation[A, B]] = + asProduct2[Relation[A, B], Map[A, Set[B]], Map[B, Set[A]]](Relation.make _)(r => (r.forwardMap, r.reverseMap))(af, bf) - implicit def relationsSourceDependenciesFormat(implicit internalFormat: Format[Relation[File, File]], externalFormat: Format[Relation[File,String]]): Format[SourceDependencies] = - asProduct2[SourceDependencies, RFF, RFS]( (a, b) => Relations.makeSourceDependencies(a,b))( rs => (rs.internal, rs.external)) + implicit val sourceFormat: Format[Source] = xsbt.api.SourceFormat - implicit def relationFormat[A,B](implicit af: Format[Map[A, Set[B]]], bf: Format[Map[B, Set[A]]]): Format[Relation[A,B]] = - asProduct2[Relation[A,B], Map[A, Set[B]], Map[B, Set[A]]]( Relation.make _ )( r => (r.forwardMap, r.reverseMap) )(af, bf) + implicit def fileFormat: Format[File] = wrap[File, String](_.getAbsolutePath, s => new File(s)) + // can't require Format[Seq[String]] because its complexity is higher than Format[CompileOptions] + implicit def optsFormat(implicit strF: Format[String]): Format[CompileOptions] = + wrap[CompileOptions, (Seq[String], Seq[String])](co => (co.options, co.javacOptions), os => new CompileOptions(os._1, os._2)) - implicit val sourceFormat: Format[Source] = xsbt.api.SourceFormat + implicit val orderFormat: Format[CompileOrder] = + { + val values = CompileOrder.values + wrap[CompileOrder, Int](_.ordinal, values) + } + implicit def seqFormat[T](implicit optionFormat: Format[T]): Format[Seq[T]] = viaSeq[Seq[T], T](x => x) - implicit def fileFormat: Format[File] = wrap[File, String](_.getAbsolutePath, s => new File(s)) - // can't require Format[Seq[String]] because its complexity is higher than Format[CompileOptions] - implicit def optsFormat(implicit strF: Format[String]): Format[CompileOptions] = - wrap[CompileOptions, (Seq[String],Seq[String])](co => (co.options, co.javacOptions), os => new CompileOptions(os._1, os._2)) - - implicit val orderFormat: Format[CompileOrder] = - { - val values = CompileOrder.values - wrap[CompileOrder, Int](_.ordinal, values) - } - implicit def seqFormat[T](implicit optionFormat: Format[T]): Format[Seq[T]] = viaSeq[Seq[T], T](x => x) - - implicit def hashStampFormat: Format[Hash] = wrap[Hash, Array[Byte]](_.value, new Hash(_)) - implicit def lastModFormat: Format[LastModified] = wrap[LastModified, Long](_.value, new LastModified(_)) - implicit def existsFormat: Format[Exists] = wrap[Exists, Boolean](_.value, new Exists(_)) + implicit def hashStampFormat: Format[Hash] = wrap[Hash, Array[Byte]](_.value, new Hash(_)) + implicit def lastModFormat: Format[LastModified] = wrap[LastModified, Long](_.value, new LastModified(_)) + implicit def existsFormat: Format[Exists] = wrap[Exists, Boolean](_.value, new Exists(_)) } diff --git a/compile/persist/src/main/scala/sbt/inc/FileBasedStore.scala b/compile/persist/src/main/scala/sbt/inc/FileBasedStore.scala index 49c7be0bc..2f12b327b 100644 --- a/compile/persist/src/main/scala/sbt/inc/FileBasedStore.scala +++ b/compile/persist/src/main/scala/sbt/inc/FileBasedStore.scala @@ -4,18 +4,17 @@ package sbt package inc - import java.io.File +import java.io.File -object FileBasedStore -{ - def apply(file: File): AnalysisStore = new AnalysisStore { - def set(analysis: Analysis, setup: CompileSetup) { - Using.fileWriter(IO.utf8)(file) { writer => TextAnalysisFormat.write(writer, analysis, setup) } +object FileBasedStore { + def apply(file: File): AnalysisStore = new AnalysisStore { + def set(analysis: Analysis, setup: CompileSetup) { + Using.fileWriter(IO.utf8)(file) { writer => TextAnalysisFormat.write(writer, analysis, setup) } } - def get(): Option[(Analysis, CompileSetup)] = - try { Some(getUncaught()) } catch { case _: Exception => None } - def getUncaught(): (Analysis, CompileSetup) = - Using.fileReader(IO.utf8)(file) { reader => TextAnalysisFormat.read(reader) } - } + def get(): Option[(Analysis, CompileSetup)] = + try { Some(getUncaught()) } catch { case _: Exception => None } + def getUncaught(): (Analysis, CompileSetup) = + Using.fileReader(IO.utf8)(file) { reader => TextAnalysisFormat.read(reader) } + } } \ No newline at end of file diff --git a/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala b/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala index f3e13d23a..724c81ade 100644 --- a/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala +++ b/compile/persist/src/main/scala/sbt/inc/TextAnalysisFormat.scala @@ -2,465 +2,469 @@ package sbt package inc import java.io._ -import sbt.{CompileSetup, Relation} -import xsbti.api.{Compilation, Source} -import xsbti.compile.{MultipleOutput, SingleOutput} +import sbt.{ CompileSetup, Relation } +import xsbti.api.{ Compilation, Source } +import xsbti.compile.{ MultipleOutput, SingleOutput } import javax.xml.bind.DatatypeConverter - // Very simple timer for timing repeated code sections. // TODO: Temporary. Remove once we've milked all available performance gains. private[inc] object FormatTimer { - private val timers = scala.collection.mutable.Map[String, Long]() - private val printTimings = "true" == System.getProperty("sbt.analysis.debug.timing") + private val timers = scala.collection.mutable.Map[String, Long]() + private val printTimings = "true" == System.getProperty("sbt.analysis.debug.timing") - def aggregate[T](key: String)(f: => T) = { - val start = System.nanoTime() - val ret = f - val elapsed = System.nanoTime() - start - timers.update(key, timers.getOrElseUpdate(key, 0) + elapsed) - ret - } + def aggregate[T](key: String)(f: => T) = { + val start = System.nanoTime() + val ret = f + val elapsed = System.nanoTime() - start + timers.update(key, timers.getOrElseUpdate(key, 0) + elapsed) + ret + } - def time[T](key: String)(f: => T) = { - val ret = aggregate(key)(f) - close(key) - ret - } + def time[T](key: String)(f: => T) = { + val ret = aggregate(key)(f) + close(key) + ret + } - def close(key: String) { - if (printTimings) { - println("[%s] %dms".format(key, timers.getOrElse(key, 0L) / 1000000)) - } - timers.remove(key) - } + def close(key: String) { + if (printTimings) { + println("[%s] %dms".format(key, timers.getOrElse(key, 0L) / 1000000)) + } + timers.remove(key) + } } - class ReadException(s: String) extends Exception(s) { - def this(expected: String, found: String) = this("Expected: %s. Found: %s.".format(expected, found)) + def this(expected: String, found: String) = this("Expected: %s. Found: %s.".format(expected, found)) } class EOFException extends ReadException("Unexpected EOF.") - // A text-based serialization format for Analysis objects. // This code has been tuned for high performance, and therefore has non-idiomatic areas. // Please refrain from making changes that significantly degrade read/write performance on large analysis files. object TextAnalysisFormat { - // Some types are not required for external inspection/manipulation of the analysis file, - // and are complex to serialize as text. So we serialize them as base64-encoded sbinary-serialized blobs. - // TODO: This is a big performance hit. Figure out a more efficient way to serialize API objects? - import sbinary.DefaultProtocol.{immutableMapFormat, immutableSetFormat, StringFormat, tuple2Format} - import AnalysisFormats._ - implicit val compilationF = xsbt.api.CompilationFormat + // Some types are not required for external inspection/manipulation of the analysis file, + // and are complex to serialize as text. So we serialize them as base64-encoded sbinary-serialized blobs. + // TODO: This is a big performance hit. Figure out a more efficient way to serialize API objects? + import sbinary.DefaultProtocol.{ immutableMapFormat, immutableSetFormat, StringFormat, tuple2Format } + import AnalysisFormats._ + implicit val compilationF = xsbt.api.CompilationFormat - def write(out: Writer, analysis: Analysis, setup: CompileSetup) { - VersionF.write(out) - // We start with writing compile setup which contains value of the `nameHashing` - // flag that is needed to properly deserialize relations - FormatTimer.time("write setup") { CompileSetupF.write(out, setup) } - // Next we write relations because that's the part of greatest interest to external readers, - // who can abort reading early once they're read them. - FormatTimer.time("write relations") { RelationsF.write(out, analysis.relations) } - FormatTimer.time("write stamps") { StampsF.write(out, analysis.stamps) } - FormatTimer.time("write apis") { APIsF.write(out, analysis.apis) } - FormatTimer.time("write sourceinfos") { SourceInfosF.write(out, analysis.infos) } - FormatTimer.time("write compilations") { CompilationsF.write(out, analysis.compilations) } - out.flush() - } - - def read(in: BufferedReader): (Analysis, CompileSetup) = { - VersionF.read(in) - val setup = FormatTimer.time("read setup") { CompileSetupF.read(in) } - val relations = FormatTimer.time("read relations") { RelationsF.read(in, setup.nameHashing) } - val stamps = FormatTimer.time("read stamps") { StampsF.read(in) } - val apis = FormatTimer.time("read apis") { APIsF.read(in) } - val infos = FormatTimer.time("read sourceinfos") { SourceInfosF.read(in) } - val compilations = FormatTimer.time("read compilations") { CompilationsF.read(in) } - - (Analysis.Empty.copy(stamps, apis, relations, infos, compilations), setup) - } - - private[this] object VersionF { - val currentVersion = "5" - - def write(out: Writer) { - out.write("format version: %s\n".format(currentVersion)) - } - - private val versionPattern = """format version: (\w+)""".r - def read(in: BufferedReader) { - in.readLine() match { - case versionPattern(version) => validateVersion(version) - case s: String => throw new ReadException("\"format version: \"", s) - case null => throw new EOFException - } - } - - def validateVersion(version: String) { - // TODO: Support backwards compatibility? - if (version != currentVersion) { - throw new ReadException("File uses format version %s, but we are compatible with version %s only.".format(version, currentVersion)) - } - } + def write(out: Writer, analysis: Analysis, setup: CompileSetup) { + VersionF.write(out) + // We start with writing compile setup which contains value of the `nameHashing` + // flag that is needed to properly deserialize relations + FormatTimer.time("write setup") { CompileSetupF.write(out, setup) } + // Next we write relations because that's the part of greatest interest to external readers, + // who can abort reading early once they're read them. + FormatTimer.time("write relations") { RelationsF.write(out, analysis.relations) } + FormatTimer.time("write stamps") { StampsF.write(out, analysis.stamps) } + FormatTimer.time("write apis") { APIsF.write(out, analysis.apis) } + FormatTimer.time("write sourceinfos") { SourceInfosF.write(out, analysis.infos) } + FormatTimer.time("write compilations") { CompilationsF.write(out, analysis.compilations) } + out.flush() } - private[this] object RelationsF { - object Headers { - val srcProd = "products" - val binaryDep = "binary dependencies" - val directSrcDep = "direct source dependencies" - val directExternalDep = "direct external dependencies" - val internalSrcDepPI = "public inherited source dependencies" - val externalDepPI = "public inherited external dependencies" - val classes = "class names" + def read(in: BufferedReader): (Analysis, CompileSetup) = { + VersionF.read(in) + val setup = FormatTimer.time("read setup") { CompileSetupF.read(in) } + val relations = FormatTimer.time("read relations") { RelationsF.read(in, setup.nameHashing) } + val stamps = FormatTimer.time("read stamps") { StampsF.read(in) } + val apis = FormatTimer.time("read apis") { APIsF.read(in) } + val infos = FormatTimer.time("read sourceinfos") { SourceInfosF.read(in) } + val compilations = FormatTimer.time("read compilations") { CompilationsF.read(in) } - val memberRefInternalDep = "member reference internal dependencies" - val memberRefExternalDep = "member reference external dependencies" - val inheritanceInternalDep = "inheritance internal dependencies" - val inheritanceExternalDep = "inheritance external dependencies" + (Analysis.Empty.copy(stamps, apis, relations, infos, compilations), setup) + } - val usedNames = "used names" - } + private[this] object VersionF { + val currentVersion = "5" - def write(out: Writer, relations: Relations) { - def writeRelation[T](header: String, rel: Relation[File, T])(implicit ord: Ordering[T]) { - writeHeader(out, header) - writeSize(out, rel.size) - // We sort for ease of debugging and for more efficient reconstruction when reading. - // Note that we don't share code with writeMap. Each is implemented more efficiently - // than the shared code would be, and the difference is measurable on large analyses. - rel.forwardMap.toSeq.sortBy(_._1).foreach { case (k, vs) => - val kStr = k.toString - vs.toSeq.sorted foreach { v => - out.write(kStr); out.write(" -> "); out.write(v.toString); out.write("\n") - } - } - } + def write(out: Writer) { + out.write("format version: %s\n".format(currentVersion)) + } - val nameHashing = relations.nameHashing - writeRelation(Headers.srcProd, relations.srcProd) - writeRelation(Headers.binaryDep, relations.binaryDep) + private val versionPattern = """format version: (\w+)""".r + def read(in: BufferedReader) { + in.readLine() match { + case versionPattern(version) => validateVersion(version) + case s: String => throw new ReadException("\"format version: \"", s) + case null => throw new EOFException + } + } - val direct = if (nameHashing) Relations.emptySource else relations.direct - val publicInherited = if (nameHashing) - Relations.emptySource else relations.publicInherited + def validateVersion(version: String) { + // TODO: Support backwards compatibility? + if (version != currentVersion) { + throw new ReadException("File uses format version %s, but we are compatible with version %s only.".format(version, currentVersion)) + } + } + } - val memberRef = if (nameHashing) - relations.memberRef else Relations.emptySourceDependencies - val inheritance = if (nameHashing) - relations.inheritance else Relations.emptySourceDependencies - val names = if (nameHashing) relations.names else Relation.empty[File, String] + private[this] object RelationsF { + object Headers { + val srcProd = "products" + val binaryDep = "binary dependencies" + val directSrcDep = "direct source dependencies" + val directExternalDep = "direct external dependencies" + val internalSrcDepPI = "public inherited source dependencies" + val externalDepPI = "public inherited external dependencies" + val classes = "class names" - writeRelation(Headers.directSrcDep, direct.internal) - writeRelation(Headers.directExternalDep, direct.external) - writeRelation(Headers.internalSrcDepPI, publicInherited.internal) - writeRelation(Headers.externalDepPI, publicInherited.external) + val memberRefInternalDep = "member reference internal dependencies" + val memberRefExternalDep = "member reference external dependencies" + val inheritanceInternalDep = "inheritance internal dependencies" + val inheritanceExternalDep = "inheritance external dependencies" - writeRelation(Headers.memberRefInternalDep, memberRef.internal) - writeRelation(Headers.memberRefExternalDep, memberRef.external) - writeRelation(Headers.inheritanceInternalDep, inheritance.internal) - writeRelation(Headers.inheritanceExternalDep, inheritance.external) + val usedNames = "used names" + } - writeRelation(Headers.classes, relations.classes) - writeRelation(Headers.usedNames, names) - } + def write(out: Writer, relations: Relations) { + def writeRelation[T](header: String, rel: Relation[File, T])(implicit ord: Ordering[T]) { + writeHeader(out, header) + writeSize(out, rel.size) + // We sort for ease of debugging and for more efficient reconstruction when reading. + // Note that we don't share code with writeMap. Each is implemented more efficiently + // than the shared code would be, and the difference is measurable on large analyses. + rel.forwardMap.toSeq.sortBy(_._1).foreach { + case (k, vs) => + val kStr = k.toString + vs.toSeq.sorted foreach { v => + out.write(kStr); out.write(" -> "); out.write(v.toString); out.write("\n") + } + } + } - def read(in: BufferedReader, nameHashing: Boolean): Relations = { - def readRelation[T](expectedHeader: String, s2t: String => T): Relation[File, T] = { - val items = readPairs(in)(expectedHeader, new File(_), s2t).toIterator - // Reconstruct the forward map. This is more efficient than Relation.empty ++ items. - var forward: List[(File, Set[T])] = Nil - var currentItem: (File, T) = null - var currentFile: File = null - var currentVals: List[T] = Nil - def closeEntry() { - if (currentFile != null) forward = (currentFile, currentVals.toSet) :: forward - currentFile = currentItem._1 - currentVals = currentItem._2 :: Nil - } - while (items.hasNext) { - currentItem = items.next() - if (currentItem._1 == currentFile) currentVals = currentItem._2 :: currentVals else closeEntry() - } - if (currentItem != null) closeEntry() - Relation.reconstruct(forward.toMap) - } + val nameHashing = relations.nameHashing + writeRelation(Headers.srcProd, relations.srcProd) + writeRelation(Headers.binaryDep, relations.binaryDep) - def readFileRelation(expectedHeader: String) = readRelation(expectedHeader, { new File(_) }) - def readStringRelation(expectedHeader: String) = readRelation(expectedHeader, identity[String]) + val direct = if (nameHashing) Relations.emptySource else relations.direct + val publicInherited = if (nameHashing) + Relations.emptySource else relations.publicInherited - val srcProd = readFileRelation(Headers.srcProd) - val binaryDep = readFileRelation(Headers.binaryDep) + val memberRef = if (nameHashing) + relations.memberRef else Relations.emptySourceDependencies + val inheritance = if (nameHashing) + relations.inheritance else Relations.emptySourceDependencies + val names = if (nameHashing) relations.names else Relation.empty[File, String] - import sbt.inc.Relations.{Source, SourceDependencies, makeSourceDependencies, emptySource, - makeSource, emptySourceDependencies} - val directSrcDeps: Source = { - val internalSrcDep = readFileRelation(Headers.directSrcDep) - val externalDep = readStringRelation(Headers.directExternalDep) - makeSource(internalSrcDep, externalDep) - } - val publicInheritedSrcDeps: Source = { - val internalSrcDepPI = readFileRelation(Headers.internalSrcDepPI) - val externalDepPI = readStringRelation(Headers.externalDepPI) - makeSource(internalSrcDepPI, externalDepPI) - } - val memberRefSrcDeps: SourceDependencies = { - val internalMemberRefDep = readFileRelation(Headers.memberRefInternalDep) - val externalMemberRefDep = readStringRelation(Headers.memberRefExternalDep) - makeSourceDependencies(internalMemberRefDep, externalMemberRefDep) - } - val inheritanceSrcDeps: SourceDependencies = { - val internalInheritanceDep = readFileRelation(Headers.inheritanceInternalDep) - val externalInheritanceDep = readStringRelation(Headers.inheritanceExternalDep) - makeSourceDependencies(internalInheritanceDep, externalInheritanceDep) - } - // we don't check for emptiness of publicInherited/inheritance relations because - // we assume that invariant that says they are subsets of direct/memberRef holds - assert(nameHashing || (memberRefSrcDeps == emptySourceDependencies), - "When name hashing is disabled the `memberRef` relation should be empty.") - assert(!nameHashing || (directSrcDeps == emptySource), - "When name hashing is enabled the `direct` relation should be empty.") - val classes = readStringRelation(Headers.classes) - val names = readStringRelation(Headers.usedNames) + writeRelation(Headers.directSrcDep, direct.internal) + writeRelation(Headers.directExternalDep, direct.external) + writeRelation(Headers.internalSrcDepPI, publicInherited.internal) + writeRelation(Headers.externalDepPI, publicInherited.external) - if (nameHashing) - Relations.make(srcProd, binaryDep, memberRefSrcDeps, inheritanceSrcDeps, classes, names) - else { - assert(names.all.isEmpty, "When `nameHashing` is disabled `names` relation " + - s"should be empty: $names") - Relations.make(srcProd, binaryDep, directSrcDeps, publicInheritedSrcDeps, classes) - } - } - } + writeRelation(Headers.memberRefInternalDep, memberRef.internal) + writeRelation(Headers.memberRefExternalDep, memberRef.external) + writeRelation(Headers.inheritanceInternalDep, inheritance.internal) + writeRelation(Headers.inheritanceExternalDep, inheritance.external) - private[this] object StampsF { - object Headers { - val products = "product stamps" - val sources = "source stamps" - val binaries = "binary stamps" - val classNames = "class names" - } + writeRelation(Headers.classes, relations.classes) + writeRelation(Headers.usedNames, names) + } - def write(out: Writer, stamps: Stamps) { - def doWriteMap[V](header: String, m: Map[File, V]) = writeMap(out)(header, m, { v: V => v.toString }) + def read(in: BufferedReader, nameHashing: Boolean): Relations = { + def readRelation[T](expectedHeader: String, s2t: String => T): Relation[File, T] = { + val items = readPairs(in)(expectedHeader, new File(_), s2t).toIterator + // Reconstruct the forward map. This is more efficient than Relation.empty ++ items. + var forward: List[(File, Set[T])] = Nil + var currentItem: (File, T) = null + var currentFile: File = null + var currentVals: List[T] = Nil + def closeEntry() { + if (currentFile != null) forward = (currentFile, currentVals.toSet) :: forward + currentFile = currentItem._1 + currentVals = currentItem._2 :: Nil + } + while (items.hasNext) { + currentItem = items.next() + if (currentItem._1 == currentFile) currentVals = currentItem._2 :: currentVals else closeEntry() + } + if (currentItem != null) closeEntry() + Relation.reconstruct(forward.toMap) + } - doWriteMap(Headers.products, stamps.products) - doWriteMap(Headers.sources, stamps.sources) - doWriteMap(Headers.binaries, stamps.binaries) - doWriteMap(Headers.classNames, stamps.classNames) - } + def readFileRelation(expectedHeader: String) = readRelation(expectedHeader, { new File(_) }) + def readStringRelation(expectedHeader: String) = readRelation(expectedHeader, identity[String]) - def read(in: BufferedReader): Stamps = { - def doReadMap[V](expectedHeader: String, s2v: String => V) = readMap(in)(expectedHeader, new File(_), s2v) - val products = doReadMap(Headers.products, Stamp.fromString) - val sources = doReadMap(Headers.sources, Stamp.fromString) - val binaries = doReadMap(Headers.binaries, Stamp.fromString) - val classNames = doReadMap(Headers.classNames, identity[String]) + val srcProd = readFileRelation(Headers.srcProd) + val binaryDep = readFileRelation(Headers.binaryDep) - Stamps(products, sources, binaries, classNames) - } - } + import sbt.inc.Relations.{ + Source, + SourceDependencies, + makeSourceDependencies, + emptySource, + makeSource, + emptySourceDependencies + } + val directSrcDeps: Source = { + val internalSrcDep = readFileRelation(Headers.directSrcDep) + val externalDep = readStringRelation(Headers.directExternalDep) + makeSource(internalSrcDep, externalDep) + } + val publicInheritedSrcDeps: Source = { + val internalSrcDepPI = readFileRelation(Headers.internalSrcDepPI) + val externalDepPI = readStringRelation(Headers.externalDepPI) + makeSource(internalSrcDepPI, externalDepPI) + } + val memberRefSrcDeps: SourceDependencies = { + val internalMemberRefDep = readFileRelation(Headers.memberRefInternalDep) + val externalMemberRefDep = readStringRelation(Headers.memberRefExternalDep) + makeSourceDependencies(internalMemberRefDep, externalMemberRefDep) + } + val inheritanceSrcDeps: SourceDependencies = { + val internalInheritanceDep = readFileRelation(Headers.inheritanceInternalDep) + val externalInheritanceDep = readStringRelation(Headers.inheritanceExternalDep) + makeSourceDependencies(internalInheritanceDep, externalInheritanceDep) + } + // we don't check for emptiness of publicInherited/inheritance relations because + // we assume that invariant that says they are subsets of direct/memberRef holds + assert(nameHashing || (memberRefSrcDeps == emptySourceDependencies), + "When name hashing is disabled the `memberRef` relation should be empty.") + assert(!nameHashing || (directSrcDeps == emptySource), + "When name hashing is enabled the `direct` relation should be empty.") + val classes = readStringRelation(Headers.classes) + val names = readStringRelation(Headers.usedNames) - private[this] object APIsF { - object Headers { - val internal = "internal apis" - val external = "external apis" - } + if (nameHashing) + Relations.make(srcProd, binaryDep, memberRefSrcDeps, inheritanceSrcDeps, classes, names) + else { + assert(names.all.isEmpty, "When `nameHashing` is disabled `names` relation " + + s"should be empty: $names") + Relations.make(srcProd, binaryDep, directSrcDeps, publicInheritedSrcDeps, classes) + } + } + } - val stringToSource = ObjectStringifier.stringToObj[Source] _ - val sourceToString = ObjectStringifier.objToString[Source] _ + private[this] object StampsF { + object Headers { + val products = "product stamps" + val sources = "source stamps" + val binaries = "binary stamps" + val classNames = "class names" + } - def write(out: Writer, apis: APIs) { - writeMap(out)(Headers.internal, apis.internal, sourceToString, inlineVals=false) - writeMap(out)(Headers.external, apis.external, sourceToString, inlineVals=false) - FormatTimer.close("bytes -> base64") - FormatTimer.close("byte copy") - FormatTimer.close("sbinary write") - } + def write(out: Writer, stamps: Stamps) { + def doWriteMap[V](header: String, m: Map[File, V]) = writeMap(out)(header, m, { v: V => v.toString }) - def read(in: BufferedReader): APIs = { - val internal = readMap(in)(Headers.internal, new File(_), stringToSource) - val external = readMap(in)(Headers.external, identity[String], stringToSource) - FormatTimer.close("base64 -> bytes") - FormatTimer.close("sbinary read") - APIs(internal, external) - } - } + doWriteMap(Headers.products, stamps.products) + doWriteMap(Headers.sources, stamps.sources) + doWriteMap(Headers.binaries, stamps.binaries) + doWriteMap(Headers.classNames, stamps.classNames) + } - private[this] object SourceInfosF { - object Headers { - val infos = "source infos" - } + def read(in: BufferedReader): Stamps = { + def doReadMap[V](expectedHeader: String, s2v: String => V) = readMap(in)(expectedHeader, new File(_), s2v) + val products = doReadMap(Headers.products, Stamp.fromString) + val sources = doReadMap(Headers.sources, Stamp.fromString) + val binaries = doReadMap(Headers.binaries, Stamp.fromString) + val classNames = doReadMap(Headers.classNames, identity[String]) - val stringToSourceInfo = ObjectStringifier.stringToObj[SourceInfo] _ - val sourceInfoToString = ObjectStringifier.objToString[SourceInfo] _ + Stamps(products, sources, binaries, classNames) + } + } - def write(out: Writer, infos: SourceInfos) { writeMap(out)(Headers.infos, infos.allInfos, sourceInfoToString, inlineVals=false) } - def read(in: BufferedReader): SourceInfos = SourceInfos.make(readMap(in)(Headers.infos, new File(_), stringToSourceInfo)) - } + private[this] object APIsF { + object Headers { + val internal = "internal apis" + val external = "external apis" + } - private[this] object CompilationsF { - object Headers { - val compilations = "compilations" - } + val stringToSource = ObjectStringifier.stringToObj[Source] _ + val sourceToString = ObjectStringifier.objToString[Source] _ - val stringToCompilation = ObjectStringifier.stringToObj[Compilation] _ - val compilationToString = ObjectStringifier.objToString[Compilation] _ + def write(out: Writer, apis: APIs) { + writeMap(out)(Headers.internal, apis.internal, sourceToString, inlineVals = false) + writeMap(out)(Headers.external, apis.external, sourceToString, inlineVals = false) + FormatTimer.close("bytes -> base64") + FormatTimer.close("byte copy") + FormatTimer.close("sbinary write") + } - def write(out: Writer, compilations: Compilations) { - writeSeq(out)(Headers.compilations, compilations.allCompilations, compilationToString) - } + def read(in: BufferedReader): APIs = { + val internal = readMap(in)(Headers.internal, new File(_), stringToSource) + val external = readMap(in)(Headers.external, identity[String], stringToSource) + FormatTimer.close("base64 -> bytes") + FormatTimer.close("sbinary read") + APIs(internal, external) + } + } - def read(in: BufferedReader): Compilations = Compilations.make( - readSeq[Compilation](in)(Headers.compilations, stringToCompilation)) - } + private[this] object SourceInfosF { + object Headers { + val infos = "source infos" + } - private[this] object CompileSetupF { - object Headers { - val outputMode = "output mode" - val outputDir = "output directories" - val compileOptions = "compile options" - val javacOptions = "javac options" - val compilerVersion = "compiler version" - val compileOrder = "compile order" - val nameHashing = "name hashing" - } + val stringToSourceInfo = ObjectStringifier.stringToObj[SourceInfo] _ + val sourceInfoToString = ObjectStringifier.objToString[SourceInfo] _ - private[this] val singleOutputMode = "single" - private[this] val multipleOutputMode = "multiple" - private[this] val singleOutputKey = new File("output dir") + def write(out: Writer, infos: SourceInfos) { writeMap(out)(Headers.infos, infos.allInfos, sourceInfoToString, inlineVals = false) } + def read(in: BufferedReader): SourceInfos = SourceInfos.make(readMap(in)(Headers.infos, new File(_), stringToSourceInfo)) + } - def write(out: Writer, setup: CompileSetup) { - val (mode, outputAsMap) = setup.output match { - case s: SingleOutput => (singleOutputMode, Map(singleOutputKey -> s.outputDirectory)) - case m: MultipleOutput => (multipleOutputMode, m.outputGroups.map(x => x.sourceDirectory -> x.outputDirectory).toMap) - } + private[this] object CompilationsF { + object Headers { + val compilations = "compilations" + } - writeSeq(out)(Headers.outputMode, mode :: Nil, identity[String]) - writeMap(out)(Headers.outputDir, outputAsMap, { f: File => f.getPath }) - writeSeq(out)(Headers.compileOptions, setup.options.options, identity[String]) - writeSeq(out)(Headers.javacOptions, setup.options.javacOptions, identity[String]) - writeSeq(out)(Headers.compilerVersion, setup.compilerVersion :: Nil, identity[String]) - writeSeq(out)(Headers.compileOrder, setup.order.name :: Nil, identity[String]) - writeSeq(out)(Headers.nameHashing, setup.nameHashing :: Nil, (b: Boolean) => b.toString) - } + val stringToCompilation = ObjectStringifier.stringToObj[Compilation] _ + val compilationToString = ObjectStringifier.objToString[Compilation] _ - def read(in: BufferedReader): CompileSetup = { - def s2f(s: String) = new File(s) - def s2b(s: String): Boolean = s.toBoolean - val outputDirMode = readSeq(in)(Headers.outputMode, identity[String]).headOption - val outputAsMap = readMap(in)(Headers.outputDir, s2f, s2f) - val compileOptions = readSeq(in)(Headers.compileOptions, identity[String]) - val javacOptions = readSeq(in)(Headers.javacOptions, identity[String]) - val compilerVersion = readSeq(in)(Headers.compilerVersion, identity[String]).head - val compileOrder = readSeq(in)(Headers.compileOrder, identity[String]).head - val nameHashing = readSeq(in)(Headers.nameHashing, s2b).head + def write(out: Writer, compilations: Compilations) { + writeSeq(out)(Headers.compilations, compilations.allCompilations, compilationToString) + } - val output = outputDirMode match { - case Some(s) => s match { - case `singleOutputMode` => new SingleOutput { - val outputDirectory = outputAsMap(singleOutputKey) - } - case `multipleOutputMode` => new MultipleOutput { - val outputGroups: Array[MultipleOutput.OutputGroup] = outputAsMap.toArray.map { - case (src: File, out: File) => new MultipleOutput.OutputGroup { - val sourceDirectory = src - val outputDirectory = out - } - } - } - case str: String => throw new ReadException("Unrecognized output mode: " + str) - } - case None => throw new ReadException("No output mode specified") - } + def read(in: BufferedReader): Compilations = Compilations.make( + readSeq[Compilation](in)(Headers.compilations, stringToCompilation)) + } - new CompileSetup(output, new CompileOptions(compileOptions, javacOptions), compilerVersion, - xsbti.compile.CompileOrder.valueOf(compileOrder), nameHashing) - } - } + private[this] object CompileSetupF { + object Headers { + val outputMode = "output mode" + val outputDir = "output directories" + val compileOptions = "compile options" + val javacOptions = "javac options" + val compilerVersion = "compiler version" + val compileOrder = "compile order" + val nameHashing = "name hashing" + } - private[this] object ObjectStringifier { - def objToString[T](o: T)(implicit fmt: sbinary.Format[T]) = { - val baos = new ByteArrayOutputStream() - val out = new sbinary.JavaOutput(baos) - FormatTimer.aggregate("sbinary write") { try { fmt.writes(out, o) } finally { baos.close() } } - val bytes = FormatTimer.aggregate("byte copy") { baos.toByteArray } - FormatTimer.aggregate("bytes -> base64") { DatatypeConverter.printBase64Binary(bytes) } - } + private[this] val singleOutputMode = "single" + private[this] val multipleOutputMode = "multiple" + private[this] val singleOutputKey = new File("output dir") - def stringToObj[T](s: String)(implicit fmt: sbinary.Format[T]) = { - val bytes = FormatTimer.aggregate("base64 -> bytes") { DatatypeConverter.parseBase64Binary(s) } - val in = new sbinary.JavaInput(new ByteArrayInputStream(bytes)) - FormatTimer.aggregate("sbinary read") { fmt.reads(in) } - } - } + def write(out: Writer, setup: CompileSetup) { + val (mode, outputAsMap) = setup.output match { + case s: SingleOutput => (singleOutputMode, Map(singleOutputKey -> s.outputDirectory)) + case m: MultipleOutput => (multipleOutputMode, m.outputGroups.map(x => x.sourceDirectory -> x.outputDirectory).toMap) + } - // Various helper functions. + writeSeq(out)(Headers.outputMode, mode :: Nil, identity[String]) + writeMap(out)(Headers.outputDir, outputAsMap, { f: File => f.getPath }) + writeSeq(out)(Headers.compileOptions, setup.options.options, identity[String]) + writeSeq(out)(Headers.javacOptions, setup.options.javacOptions, identity[String]) + writeSeq(out)(Headers.compilerVersion, setup.compilerVersion :: Nil, identity[String]) + writeSeq(out)(Headers.compileOrder, setup.order.name :: Nil, identity[String]) + writeSeq(out)(Headers.nameHashing, setup.nameHashing :: Nil, (b: Boolean) => b.toString) + } - private[this] def writeHeader(out: Writer, header: String) { - out.write(header + ":\n") - } + def read(in: BufferedReader): CompileSetup = { + def s2f(s: String) = new File(s) + def s2b(s: String): Boolean = s.toBoolean + val outputDirMode = readSeq(in)(Headers.outputMode, identity[String]).headOption + val outputAsMap = readMap(in)(Headers.outputDir, s2f, s2f) + val compileOptions = readSeq(in)(Headers.compileOptions, identity[String]) + val javacOptions = readSeq(in)(Headers.javacOptions, identity[String]) + val compilerVersion = readSeq(in)(Headers.compilerVersion, identity[String]).head + val compileOrder = readSeq(in)(Headers.compileOrder, identity[String]).head + val nameHashing = readSeq(in)(Headers.nameHashing, s2b).head - private[this] def expectHeader(in: BufferedReader, expectedHeader: String) { - val header = in.readLine() - if (header != expectedHeader + ":") throw new ReadException(expectedHeader, if (header == null) "EOF" else header) - } + val output = outputDirMode match { + case Some(s) => s match { + case `singleOutputMode` => new SingleOutput { + val outputDirectory = outputAsMap(singleOutputKey) + } + case `multipleOutputMode` => new MultipleOutput { + val outputGroups: Array[MultipleOutput.OutputGroup] = outputAsMap.toArray.map { + case (src: File, out: File) => new MultipleOutput.OutputGroup { + val sourceDirectory = src + val outputDirectory = out + } + } + } + case str: String => throw new ReadException("Unrecognized output mode: " + str) + } + case None => throw new ReadException("No output mode specified") + } - private[this] def writeSize(out: Writer, n: Int) { - out.write("%d items\n".format(n)) - } + new CompileSetup(output, new CompileOptions(compileOptions, javacOptions), compilerVersion, + xsbti.compile.CompileOrder.valueOf(compileOrder), nameHashing) + } + } - private val itemsPattern = """(\d+) items""".r - private[this] def readSize(in: BufferedReader): Int = { - in.readLine() match { - case itemsPattern(nStr) => Integer.parseInt(nStr) - case s: String => throw new ReadException("\" items\"", s) - case null => throw new EOFException - } - } + private[this] object ObjectStringifier { + def objToString[T](o: T)(implicit fmt: sbinary.Format[T]) = { + val baos = new ByteArrayOutputStream() + val out = new sbinary.JavaOutput(baos) + FormatTimer.aggregate("sbinary write") { try { fmt.writes(out, o) } finally { baos.close() } } + val bytes = FormatTimer.aggregate("byte copy") { baos.toByteArray } + FormatTimer.aggregate("bytes -> base64") { DatatypeConverter.printBase64Binary(bytes) } + } - private[this] def writeSeq[T](out: Writer)(header: String, s: Seq[T], t2s: T => String) { - // We write sequences as idx -> element maps, for uniformity with maps/relations. - def n = s.length - val numDigits = if (n < 2) 1 else math.log10(n - 1).toInt + 1 - val fmtStr = "%%0%dd".format(numDigits) - // We only use this for relatively short seqs, so creating this extra map won't be a performance hit. - val m: Map[String, T] = s.zipWithIndex.map(x => fmtStr.format(x._2) -> x._1).toMap - writeMap(out)(header, m, t2s) - } + def stringToObj[T](s: String)(implicit fmt: sbinary.Format[T]) = { + val bytes = FormatTimer.aggregate("base64 -> bytes") { DatatypeConverter.parseBase64Binary(s) } + val in = new sbinary.JavaInput(new ByteArrayInputStream(bytes)) + FormatTimer.aggregate("sbinary read") { fmt.reads(in) } + } + } - private[this] def readSeq[T](in: BufferedReader)(expectedHeader: String, s2t: String => T): Seq[T] = - (readPairs(in)(expectedHeader, identity[String], s2t) map(_._2)).toSeq + // Various helper functions. - private[this] def writeMap[K, V](out: Writer)(header: String, m: Map[K, V], v2s: V => String, inlineVals: Boolean=true)(implicit ord: Ordering[K]) { - writeHeader(out, header) - writeSize(out, m.size) - m.keys.toSeq.sorted foreach { k => - out.write(k.toString) - out.write(" -> ") - if (!inlineVals) out.write("\n") // Put large vals on their own line, to save string munging on read. - out.write(v2s(m(k))) - out.write("\n") - } - } + private[this] def writeHeader(out: Writer, header: String) { + out.write(header + ":\n") + } - private[this] def readPairs[K, V](in: BufferedReader)(expectedHeader: String, s2k: String => K, s2v: String => V): Traversable[(K, V)] = { - def toPair(s: String): (K, V) = { - if (s == null) throw new EOFException - val p = s.indexOf(" -> ") - val k = s2k(s.substring(0, p)) - // Pair is either "a -> b" or "a -> \nb". This saves us a lot of substring munging when b is a large blob. - val v = s2v(if (p == s.length - 4) in.readLine() else s.substring(p + 4)) - (k, v) - } - expectHeader(in, expectedHeader) - val n = readSize(in) - for (i <- 0 until n) yield toPair(in.readLine()) - } + private[this] def expectHeader(in: BufferedReader, expectedHeader: String) { + val header = in.readLine() + if (header != expectedHeader + ":") throw new ReadException(expectedHeader, if (header == null) "EOF" else header) + } - private[this] def readMap[K, V](in: BufferedReader)(expectedHeader: String, s2k: String => K, s2v: String => V): Map[K, V] = { - readPairs(in)(expectedHeader, s2k, s2v).toMap - } + private[this] def writeSize(out: Writer, n: Int) { + out.write("%d items\n".format(n)) + } + + private val itemsPattern = """(\d+) items""".r + private[this] def readSize(in: BufferedReader): Int = { + in.readLine() match { + case itemsPattern(nStr) => Integer.parseInt(nStr) + case s: String => throw new ReadException("\" items\"", s) + case null => throw new EOFException + } + } + + private[this] def writeSeq[T](out: Writer)(header: String, s: Seq[T], t2s: T => String) { + // We write sequences as idx -> element maps, for uniformity with maps/relations. + def n = s.length + val numDigits = if (n < 2) 1 else math.log10(n - 1).toInt + 1 + val fmtStr = "%%0%dd".format(numDigits) + // We only use this for relatively short seqs, so creating this extra map won't be a performance hit. + val m: Map[String, T] = s.zipWithIndex.map(x => fmtStr.format(x._2) -> x._1).toMap + writeMap(out)(header, m, t2s) + } + + private[this] def readSeq[T](in: BufferedReader)(expectedHeader: String, s2t: String => T): Seq[T] = + (readPairs(in)(expectedHeader, identity[String], s2t) map (_._2)).toSeq + + private[this] def writeMap[K, V](out: Writer)(header: String, m: Map[K, V], v2s: V => String, inlineVals: Boolean = true)(implicit ord: Ordering[K]) { + writeHeader(out, header) + writeSize(out, m.size) + m.keys.toSeq.sorted foreach { k => + out.write(k.toString) + out.write(" -> ") + if (!inlineVals) out.write("\n") // Put large vals on their own line, to save string munging on read. + out.write(v2s(m(k))) + out.write("\n") + } + } + + private[this] def readPairs[K, V](in: BufferedReader)(expectedHeader: String, s2k: String => K, s2v: String => V): Traversable[(K, V)] = { + def toPair(s: String): (K, V) = { + if (s == null) throw new EOFException + val p = s.indexOf(" -> ") + val k = s2k(s.substring(0, p)) + // Pair is either "a -> b" or "a -> \nb". This saves us a lot of substring munging when b is a large blob. + val v = s2v(if (p == s.length - 4) in.readLine() else s.substring(p + 4)) + (k, v) + } + expectHeader(in, expectedHeader) + val n = readSize(in) + for (i <- 0 until n) yield toPair(in.readLine()) + } + + private[this] def readMap[K, V](in: BufferedReader)(expectedHeader: String, s2k: String => K, s2v: String => V): Map[K, V] = { + readPairs(in)(expectedHeader, s2k, s2v).toMap + } } diff --git a/compile/persist/src/main/scala/xsbt/api/CompilationFormat.scala b/compile/persist/src/main/scala/xsbt/api/CompilationFormat.scala index ac469b470..6a1a164dd 100644 --- a/compile/persist/src/main/scala/xsbt/api/CompilationFormat.scala +++ b/compile/persist/src/main/scala/xsbt/api/CompilationFormat.scala @@ -6,11 +6,11 @@ import sbinary._ object CompilationFormat extends Format[Compilation] { import java.io._ def reads(in: Input): Compilation = { - val oin = new ObjectInputStream(new InputWrapperStream(in)) - try { oin.readObject.asInstanceOf[Compilation] } finally { oin.close() } + val oin = new ObjectInputStream(new InputWrapperStream(in)) + try { oin.readObject.asInstanceOf[Compilation] } finally { oin.close() } } def writes(out: Output, src: Compilation) { val oout = new ObjectOutputStream(new OutputWrapperStream(out)) - try { oout.writeObject(src) } finally { oout.close() } + try { oout.writeObject(src) } finally { oout.close() } } } diff --git a/compile/persist/src/main/scala/xsbt/api/SourceFormat.scala b/compile/persist/src/main/scala/xsbt/api/SourceFormat.scala index dca287cfa..34a4489e7 100644 --- a/compile/persist/src/main/scala/xsbt/api/SourceFormat.scala +++ b/compile/persist/src/main/scala/xsbt/api/SourceFormat.scala @@ -3,37 +3,33 @@ */ package xsbt.api - import xsbti.SafeLazy - import xsbti.api._ - import sbt.Using - import sbinary._ - import DefaultProtocol._ - import Operations.{read,write} - import java.io.File - import scala.collection.mutable +import xsbti.SafeLazy +import xsbti.api._ +import sbt.Using +import sbinary._ +import DefaultProtocol._ +import Operations.{ read, write } +import java.io.File +import scala.collection.mutable -object SourceFormat extends Format[Source] -{ - import java.io._ - def reads(in: Input): Source = - { - val oin = new ObjectInputStream(new InputWrapperStream(in)) - try { oin.readObject.asInstanceOf[Source] } finally { oin.close() } - } - def writes(out: Output, src: Source) - { - val oout = new ObjectOutputStream(new OutputWrapperStream(out)) - try { oout.writeObject(src) } finally { oout.close() } - } +object SourceFormat extends Format[Source] { + import java.io._ + def reads(in: Input): Source = + { + val oin = new ObjectInputStream(new InputWrapperStream(in)) + try { oin.readObject.asInstanceOf[Source] } finally { oin.close() } + } + def writes(out: Output, src: Source) { + val oout = new ObjectOutputStream(new OutputWrapperStream(out)) + try { oout.writeObject(src) } finally { oout.close() } + } } -final class InputWrapperStream(in: Input) extends java.io.InputStream -{ - def toInt(b: Byte) = if(b < 0) b + 256 else b.toInt - def read() = try { toInt(in.readByte) } catch { case e: sbinary.EOF => -1 } - override def read(b: Array[Byte], off: Int, len: Int) = in.readTo(b, off, len) +final class InputWrapperStream(in: Input) extends java.io.InputStream { + def toInt(b: Byte) = if (b < 0) b + 256 else b.toInt + def read() = try { toInt(in.readByte) } catch { case e: sbinary.EOF => -1 } + override def read(b: Array[Byte], off: Int, len: Int) = in.readTo(b, off, len) } -final class OutputWrapperStream(out: Output) extends java.io.OutputStream -{ - override def write(bs: Array[Byte], off: Int, len: Int) = out.writeAll(bs, off, len) - def write(b: Int) = out.writeByte(b.toByte) +final class OutputWrapperStream(out: Output) extends java.io.OutputStream { + override def write(bs: Array[Byte], off: Int, len: Int) = out.writeAll(bs, off, len) + def write(b: Int) = out.writeByte(b.toByte) } diff --git a/compile/src/main/scala/sbt/ClasspathOptions.scala b/compile/src/main/scala/sbt/ClasspathOptions.scala index 548ec888e..638f4d1fc 100644 --- a/compile/src/main/scala/sbt/ClasspathOptions.scala +++ b/compile/src/main/scala/sbt/ClasspathOptions.scala @@ -4,11 +4,10 @@ package sbt final case class ClasspathOptions(bootLibrary: Boolean, compiler: Boolean, extra: Boolean, autoBoot: Boolean, filterLibrary: Boolean) extends xsbti.compile.ClasspathOptions -object ClasspathOptions -{ - def manual = ClasspathOptions(false, false, false, true, false) - def boot = ClasspathOptions(true, false, false, true, true) - def repl = auto - def javac(compiler: Boolean) = new ClasspathOptions(false, compiler, false, false, false) - def auto = ClasspathOptions(true, true, true, true, true) +object ClasspathOptions { + def manual = ClasspathOptions(false, false, false, true, false) + def boot = ClasspathOptions(true, false, false, true, true) + def repl = auto + def javac(compiler: Boolean) = new ClasspathOptions(false, compiler, false, false, false) + def auto = ClasspathOptions(true, true, true, true, true) } \ No newline at end of file diff --git a/compile/src/main/scala/sbt/LoggerReporter.scala b/compile/src/main/scala/sbt/LoggerReporter.scala index 69d444e5f..8156e6343 100644 --- a/compile/src/main/scala/sbt/LoggerReporter.scala +++ b/compile/src/main/scala/sbt/LoggerReporter.scala @@ -8,141 +8,128 @@ package sbt // see licenses/LICENSE_Scala // Original author: Martin Odersky - import xsbti.{Maybe,Position,Problem,Reporter,Severity} - import java.io.File - import java.util.EnumMap - import scala.collection.mutable - import LoggerReporter._ - import Logger.{m2o,o2m,position,problem} - import Severity.{Error,Info => SInfo,Warn} +import xsbti.{ Maybe, Position, Problem, Reporter, Severity } +import java.io.File +import java.util.EnumMap +import scala.collection.mutable +import LoggerReporter._ +import Logger.{ m2o, o2m, position, problem } +import Severity.{ Error, Info => SInfo, Warn } -object LoggerReporter -{ - final class PositionKey(pos: Position) - { - def offset = pos.offset - def sourceFile = pos.sourceFile +object LoggerReporter { + final class PositionKey(pos: Position) { + def offset = pos.offset + def sourceFile = pos.sourceFile - override def equals(o: Any) = - o match { case pk: PositionKey => equalsKey(pk); case _ => false } + override def equals(o: Any) = + o match { case pk: PositionKey => equalsKey(pk); case _ => false } - def equalsKey(o: PositionKey) = - m2o(pos.offset) == m2o(o.offset) && - m2o(pos.sourceFile) == m2o(o.sourceFile) - override def hashCode = - m2o(pos.offset).hashCode * 31 - m2o(pos.sourceFile).hashCode - } + def equalsKey(o: PositionKey) = + m2o(pos.offset) == m2o(o.offset) && + m2o(pos.sourceFile) == m2o(o.sourceFile) + override def hashCode = + m2o(pos.offset).hashCode * 31 + m2o(pos.sourceFile).hashCode + } - def countElementsAsString(n: Int, elements: String): String = - n match { - case 0 => "no " + elements + "s" - case 1 => "one " + elements - case 2 => "two " + elements + "s" - case 3 => "three " + elements + "s" - case 4 => "four " + elements + "s" - case _ => "" + n + " " + elements + "s" - } + def countElementsAsString(n: Int, elements: String): String = + n match { + case 0 => "no " + elements + "s" + case 1 => "one " + elements + case 2 => "two " + elements + "s" + case 3 => "three " + elements + "s" + case 4 => "four " + elements + "s" + case _ => "" + n + " " + elements + "s" + } } - -class LoggerReporter(maximumErrors: Int, log: Logger, sourcePositionMapper: Position => Position = {p => p}) extends xsbti.Reporter -{ - val positions = new mutable.HashMap[PositionKey, Severity] - val count = new EnumMap[Severity, Int](classOf[Severity]) - private[this] val allProblems = new mutable.ListBuffer[Problem] - reset() - - def reset() - { - count.put(Warn, 0) - count.put(SInfo, 0) - count.put(Error, 0) - positions.clear() - allProblems.clear() - } - def hasWarnings = count.get(Warn) > 0 - def hasErrors = count.get(Error) > 0 - def problems: Array[Problem] = allProblems.toArray - def comment(pos: Position, msg: String) {} +class LoggerReporter(maximumErrors: Int, log: Logger, sourcePositionMapper: Position => Position = { p => p }) extends xsbti.Reporter { + val positions = new mutable.HashMap[PositionKey, Severity] + val count = new EnumMap[Severity, Int](classOf[Severity]) + private[this] val allProblems = new mutable.ListBuffer[Problem] - def printSummary() - { - val warnings = count.get(Severity.Warn) - if(warnings > 0) - log.warn(countElementsAsString(warnings, "warning") + " found") - val errors = count.get(Severity.Error) - if(errors > 0) - log.error(countElementsAsString(errors, "error") + " found") - } + reset() - def inc(sev: Severity) = count.put(sev, count.get(sev) + 1) + def reset() { + count.put(Warn, 0) + count.put(SInfo, 0) + count.put(Error, 0) + positions.clear() + allProblems.clear() + } + def hasWarnings = count.get(Warn) > 0 + def hasErrors = count.get(Error) > 0 + def problems: Array[Problem] = allProblems.toArray + def comment(pos: Position, msg: String) {} - def display(pos: Position, msg: String, severity: Severity) - { - inc(severity) - if(severity != Error || maximumErrors <= 0 || count.get(severity) <= maximumErrors) - print(severityLogger(severity), pos, msg) - } - def severityLogger(severity: Severity): (=> String) => Unit = - m => - { - (severity match - { - case Error => log.error(m) - case Warn => log.warn(m) - case SInfo => log.info(m) - }) - } + def printSummary() { + val warnings = count.get(Severity.Warn) + if (warnings > 0) + log.warn(countElementsAsString(warnings, "warning") + " found") + val errors = count.get(Severity.Error) + if (errors > 0) + log.error(countElementsAsString(errors, "error") + " found") + } - def print(log: (=> String) => Unit, pos: Position, msg: String) - { - if(pos.sourcePath.isEmpty && pos.line.isEmpty) - log(msg) - else - { - val sourcePrefix = m2o(pos.sourcePath).getOrElse("") - val lineNumberString = m2o(pos.line).map(":" + _ + ":").getOrElse(":") + " " - log(sourcePrefix + lineNumberString + msg) - val lineContent = pos.lineContent - if(!lineContent.isEmpty) - { - log(lineContent) - for(space <- m2o(pos.pointerSpace)) - log(space + "^") // pointer to the column position of the error/warning - } - } - } - - def log(pos: Position, msg: String, severity: Severity): Unit = - { - val mappedPos = sourcePositionMapper(pos) - allProblems += problem("", mappedPos, msg, severity) - severity match - { - case Warn | Error => - { - if(!testAndLog(mappedPos, severity)) - display(mappedPos, msg, severity) - } - case _ => display(mappedPos, msg, severity) - } - } + def inc(sev: Severity) = count.put(sev, count.get(sev) + 1) - def testAndLog(pos: Position, severity: Severity): Boolean = - { - if(pos.offset.isEmpty || pos.sourceFile.isEmpty) - false - else - { - val key = new PositionKey(pos) - if(positions.get(key).map(_.ordinal >= severity.ordinal).getOrElse(false)) - true - else - { - positions(key) = severity - false - } - } - } + def display(pos: Position, msg: String, severity: Severity) { + inc(severity) + if (severity != Error || maximumErrors <= 0 || count.get(severity) <= maximumErrors) + print(severityLogger(severity), pos, msg) + } + def severityLogger(severity: Severity): (=> String) => Unit = + m => + { + (severity match { + case Error => log.error(m) + case Warn => log.warn(m) + case SInfo => log.info(m) + }) + } + + def print(log: (=> String) => Unit, pos: Position, msg: String) { + if (pos.sourcePath.isEmpty && pos.line.isEmpty) + log(msg) + else { + val sourcePrefix = m2o(pos.sourcePath).getOrElse("") + val lineNumberString = m2o(pos.line).map(":" + _ + ":").getOrElse(":") + " " + log(sourcePrefix + lineNumberString + msg) + val lineContent = pos.lineContent + if (!lineContent.isEmpty) { + log(lineContent) + for (space <- m2o(pos.pointerSpace)) + log(space + "^") // pointer to the column position of the error/warning + } + } + } + + def log(pos: Position, msg: String, severity: Severity): Unit = + { + val mappedPos = sourcePositionMapper(pos) + allProblems += problem("", mappedPos, msg, severity) + severity match { + case Warn | Error => + { + if (!testAndLog(mappedPos, severity)) + display(mappedPos, msg, severity) + } + case _ => display(mappedPos, msg, severity) + } + } + + def testAndLog(pos: Position, severity: Severity): Boolean = + { + if (pos.offset.isEmpty || pos.sourceFile.isEmpty) + false + else { + val key = new PositionKey(pos) + if (positions.get(key).map(_.ordinal >= severity.ordinal).getOrElse(false)) + true + else { + positions(key) = severity + false + } + } + } } \ No newline at end of file diff --git a/compile/src/main/scala/sbt/compiler/AnalyzingCompiler.scala b/compile/src/main/scala/sbt/compiler/AnalyzingCompiler.scala index 3c8ce50de..f970b98d4 100644 --- a/compile/src/main/scala/sbt/compiler/AnalyzingCompiler.scala +++ b/compile/src/main/scala/sbt/compiler/AnalyzingCompiler.scala @@ -4,160 +4,158 @@ package sbt package compiler - import xsbti.{AnalysisCallback, Logger => xLogger, Reporter} - import xsbti.compile.{CachedCompiler, CachedCompilerProvider, DependencyChanges, GlobalsCache, CompileProgress, Output} - import java.io.File - import java.net.{URL, URLClassLoader} +import xsbti.{ AnalysisCallback, Logger => xLogger, Reporter } +import xsbti.compile.{ CachedCompiler, CachedCompilerProvider, DependencyChanges, GlobalsCache, CompileProgress, Output } +import java.io.File +import java.net.{ URL, URLClassLoader } -/** Interface to the Scala compiler that uses the dependency analysis plugin. This class uses the Scala library and compiler -* provided by scalaInstance. This class requires a ComponentManager in order to obtain the interface code to scalac and -* the analysis plugin. Because these call Scala code for a different Scala version than the one used for this class, they must -* be compiled for the version of Scala being used.*/ -final class AnalyzingCompiler private(val scalaInstance: xsbti.compile.ScalaInstance, val provider: CompilerInterfaceProvider, val cp: xsbti.compile.ClasspathOptions, onArgsF: Seq[String] => Unit) extends CachedCompilerProvider -{ - def this(scalaInstance: xsbti.compile.ScalaInstance, provider: CompilerInterfaceProvider, cp: xsbti.compile.ClasspathOptions) = - this(scalaInstance, provider, cp, _ => ()) - def this(scalaInstance: ScalaInstance, provider: CompilerInterfaceProvider) = this(scalaInstance, provider, ClasspathOptions.auto) +/** + * Interface to the Scala compiler that uses the dependency analysis plugin. This class uses the Scala library and compiler + * provided by scalaInstance. This class requires a ComponentManager in order to obtain the interface code to scalac and + * the analysis plugin. Because these call Scala code for a different Scala version than the one used for this class, they must + * be compiled for the version of Scala being used. + */ +final class AnalyzingCompiler private (val scalaInstance: xsbti.compile.ScalaInstance, val provider: CompilerInterfaceProvider, val cp: xsbti.compile.ClasspathOptions, onArgsF: Seq[String] => Unit) extends CachedCompilerProvider { + def this(scalaInstance: xsbti.compile.ScalaInstance, provider: CompilerInterfaceProvider, cp: xsbti.compile.ClasspathOptions) = + this(scalaInstance, provider, cp, _ => ()) + def this(scalaInstance: ScalaInstance, provider: CompilerInterfaceProvider) = this(scalaInstance, provider, ClasspathOptions.auto) - @deprecated("A Logger is no longer needed.", "0.13.0") - def this(scalaInstance: ScalaInstance, provider: CompilerInterfaceProvider, log: Logger) = this(scalaInstance, provider) + @deprecated("A Logger is no longer needed.", "0.13.0") + def this(scalaInstance: ScalaInstance, provider: CompilerInterfaceProvider, log: Logger) = this(scalaInstance, provider) - @deprecated("A Logger is no longer needed.", "0.13.0") - def this(scalaInstance: xsbti.compile.ScalaInstance, provider: CompilerInterfaceProvider, cp: xsbti.compile.ClasspathOptions, log: Logger) = this(scalaInstance, provider, cp) + @deprecated("A Logger is no longer needed.", "0.13.0") + def this(scalaInstance: xsbti.compile.ScalaInstance, provider: CompilerInterfaceProvider, cp: xsbti.compile.ClasspathOptions, log: Logger) = this(scalaInstance, provider, cp) - def onArgs(f: Seq[String] => Unit): AnalyzingCompiler = new AnalyzingCompiler(scalaInstance, provider, cp, f) + def onArgs(f: Seq[String] => Unit): AnalyzingCompiler = new AnalyzingCompiler(scalaInstance, provider, cp, f) - def apply(sources: Seq[File], changes: DependencyChanges, classpath: Seq[File], singleOutput: File, options: Seq[String], callback: AnalysisCallback, maximumErrors: Int, cache: GlobalsCache, log: Logger) - { - val arguments = (new CompilerArguments(scalaInstance, cp))(Nil, classpath, None, options) - val output = CompileOutput(singleOutput) - compile(sources, changes, arguments, output, callback, new LoggerReporter(maximumErrors, log, p => p), cache, log, None) - } + def apply(sources: Seq[File], changes: DependencyChanges, classpath: Seq[File], singleOutput: File, options: Seq[String], callback: AnalysisCallback, maximumErrors: Int, cache: GlobalsCache, log: Logger) { + val arguments = (new CompilerArguments(scalaInstance, cp))(Nil, classpath, None, options) + val output = CompileOutput(singleOutput) + compile(sources, changes, arguments, output, callback, new LoggerReporter(maximumErrors, log, p => p), cache, log, None) + } - def compile(sources: Seq[File], changes: DependencyChanges, options: Seq[String], output: Output, callback: AnalysisCallback, reporter: Reporter, cache: GlobalsCache, log: Logger, progressOpt: Option[CompileProgress]): Unit = - { - val cached = cache(options.toArray, output, !changes.isEmpty, this, log, reporter) - val progress = progressOpt getOrElse IgnoreProgress - compile(sources, changes, callback, log, reporter, progress, cached) - } + def compile(sources: Seq[File], changes: DependencyChanges, options: Seq[String], output: Output, callback: AnalysisCallback, reporter: Reporter, cache: GlobalsCache, log: Logger, progressOpt: Option[CompileProgress]): Unit = + { + val cached = cache(options.toArray, output, !changes.isEmpty, this, log, reporter) + val progress = progressOpt getOrElse IgnoreProgress + compile(sources, changes, callback, log, reporter, progress, cached) + } - def compile(sources: Seq[File], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, reporter: Reporter, progress: CompileProgress, compiler: CachedCompiler) - { - onArgsF(compiler.commandArguments(sources.toArray)) - call("xsbt.CompilerInterface", "run", log)( - classOf[Array[File]], classOf[DependencyChanges], classOf[AnalysisCallback], classOf[xLogger], classOf[Reporter], classOf[CompileProgress], classOf[CachedCompiler]) ( - sources.toArray, changes, callback, log, reporter, progress, compiler ) - } - def newCachedCompiler(arguments: Array[String], output: Output, log: xLogger, reporter: Reporter, resident: Boolean): CachedCompiler = - newCachedCompiler(arguments: Seq[String], output, log, reporter, resident) + def compile(sources: Seq[File], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, reporter: Reporter, progress: CompileProgress, compiler: CachedCompiler) { + onArgsF(compiler.commandArguments(sources.toArray)) + call("xsbt.CompilerInterface", "run", log)( + classOf[Array[File]], classOf[DependencyChanges], classOf[AnalysisCallback], classOf[xLogger], classOf[Reporter], classOf[CompileProgress], classOf[CachedCompiler])( + sources.toArray, changes, callback, log, reporter, progress, compiler) + } + def newCachedCompiler(arguments: Array[String], output: Output, log: xLogger, reporter: Reporter, resident: Boolean): CachedCompiler = + newCachedCompiler(arguments: Seq[String], output, log, reporter, resident) - def newCachedCompiler(arguments: Seq[String], output: Output, log: xLogger, reporter: Reporter, resident: Boolean): CachedCompiler = - { - call("xsbt.CompilerInterface", "newCompiler", log)( - classOf[Array[String]], classOf[Output], classOf[xLogger], classOf[Reporter], classOf[Boolean] ) ( - arguments.toArray[String] : Array[String], output, log, reporter, resident: java.lang.Boolean ). - asInstanceOf[CachedCompiler] - } + def newCachedCompiler(arguments: Seq[String], output: Output, log: xLogger, reporter: Reporter, resident: Boolean): CachedCompiler = + { + call("xsbt.CompilerInterface", "newCompiler", log)( + classOf[Array[String]], classOf[Output], classOf[xLogger], classOf[Reporter], classOf[Boolean])( + arguments.toArray[String]: Array[String], output, log, reporter, resident: java.lang.Boolean). + asInstanceOf[CachedCompiler] + } - def doc(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maximumErrors: Int, log: Logger): Unit = - doc(sources, classpath, outputDirectory, options, log, new LoggerReporter(maximumErrors, log)) - def doc(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger, reporter: Reporter): Unit = - { - val arguments = (new CompilerArguments(scalaInstance, cp))(sources, classpath, Some(outputDirectory), options) - onArgsF(arguments) - call("xsbt.ScaladocInterface", "run", log) (classOf[Array[String]], classOf[xLogger], classOf[Reporter]) ( - arguments.toArray[String] : Array[String], log, reporter) - } - def console(classpath: Seq[File], options: Seq[String], initialCommands: String, cleanupCommands: String, log: Logger)(loader: Option[ClassLoader] = None, bindings: Seq[(String, Any)] = Nil): Unit = - { - onArgsF(consoleCommandArguments(classpath, options, log)) - val (classpathString, bootClasspath) = consoleClasspaths(classpath) - val (names, values) = bindings.unzip - call("xsbt.ConsoleInterface", "run", log)( - classOf[Array[String]], classOf[String], classOf[String], classOf[String], classOf[String], classOf[ClassLoader], classOf[Array[String]], classOf[Array[Any]], classOf[xLogger])( - options.toArray[String]: Array[String], bootClasspath, classpathString, initialCommands, cleanupCommands, loader.orNull, names.toArray[String], values.toArray[Any], log) - } + def doc(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maximumErrors: Int, log: Logger): Unit = + doc(sources, classpath, outputDirectory, options, log, new LoggerReporter(maximumErrors, log)) + def doc(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger, reporter: Reporter): Unit = + { + val arguments = (new CompilerArguments(scalaInstance, cp))(sources, classpath, Some(outputDirectory), options) + onArgsF(arguments) + call("xsbt.ScaladocInterface", "run", log)(classOf[Array[String]], classOf[xLogger], classOf[Reporter])( + arguments.toArray[String]: Array[String], log, reporter) + } + def console(classpath: Seq[File], options: Seq[String], initialCommands: String, cleanupCommands: String, log: Logger)(loader: Option[ClassLoader] = None, bindings: Seq[(String, Any)] = Nil): Unit = + { + onArgsF(consoleCommandArguments(classpath, options, log)) + val (classpathString, bootClasspath) = consoleClasspaths(classpath) + val (names, values) = bindings.unzip + call("xsbt.ConsoleInterface", "run", log)( + classOf[Array[String]], classOf[String], classOf[String], classOf[String], classOf[String], classOf[ClassLoader], classOf[Array[String]], classOf[Array[Any]], classOf[xLogger])( + options.toArray[String]: Array[String], bootClasspath, classpathString, initialCommands, cleanupCommands, loader.orNull, names.toArray[String], values.toArray[Any], log) + } - private[this] def consoleClasspaths(classpath: Seq[File]): (String, String) = - { - val arguments = new CompilerArguments(scalaInstance, cp) - val classpathString = CompilerArguments.absString(arguments.finishClasspath(classpath)) - val bootClasspath = if(cp.autoBoot) arguments.createBootClasspathFor(classpath) else "" - (classpathString, bootClasspath) - } - def consoleCommandArguments(classpath: Seq[File], options: Seq[String], log: Logger): Seq[String] = - { - val (classpathString, bootClasspath) = consoleClasspaths(classpath) - val argsObj = call("xsbt.ConsoleInterface", "commandArguments", log)( - classOf[Array[String]], classOf[String], classOf[String], classOf[xLogger])( - options.toArray[String]: Array[String], bootClasspath, classpathString, log) - argsObj.asInstanceOf[Array[String]].toSeq - } - def force(log: Logger): Unit = provider(scalaInstance, log) - private def call(interfaceClassName: String, methodName: String, log: Logger)(argTypes: Class[_]*)(args: AnyRef*): AnyRef = - { - val interfaceClass = getInterfaceClass(interfaceClassName, log) - val interface = interfaceClass.newInstance.asInstanceOf[AnyRef] - val method = interfaceClass.getMethod(methodName, argTypes : _*) - try { method.invoke(interface, args: _*) } - catch { case e: java.lang.reflect.InvocationTargetException => - e.getCause match { - case c: xsbti.CompileFailed => throw new CompileFailed(c.arguments, c.toString, c.problems) - case t => throw t - } - } - } - private[this] def loader(log: Logger) = - { - val interfaceJar = provider(scalaInstance, log) - // this goes to scalaInstance.loader for scala classes and the loader of this class for xsbti classes - val dual = createDualLoader(scalaInstance.loader, getClass.getClassLoader) - new URLClassLoader(Array(interfaceJar.toURI.toURL), dual) - } - private[this] def getInterfaceClass(name: String, log: Logger) = Class.forName(name, true, loader(log)) - protected def createDualLoader(scalaLoader: ClassLoader, sbtLoader: ClassLoader): ClassLoader = - { - val xsbtiFilter = (name: String) => name.startsWith("xsbti.") - val notXsbtiFilter = (name: String) => !xsbtiFilter(name) - new classpath.DualLoader(scalaLoader, notXsbtiFilter, x => true, sbtLoader, xsbtiFilter, x => false) - } - override def toString = "Analyzing compiler (Scala " + scalaInstance.actualVersion + ")" + private[this] def consoleClasspaths(classpath: Seq[File]): (String, String) = + { + val arguments = new CompilerArguments(scalaInstance, cp) + val classpathString = CompilerArguments.absString(arguments.finishClasspath(classpath)) + val bootClasspath = if (cp.autoBoot) arguments.createBootClasspathFor(classpath) else "" + (classpathString, bootClasspath) + } + def consoleCommandArguments(classpath: Seq[File], options: Seq[String], log: Logger): Seq[String] = + { + val (classpathString, bootClasspath) = consoleClasspaths(classpath) + val argsObj = call("xsbt.ConsoleInterface", "commandArguments", log)( + classOf[Array[String]], classOf[String], classOf[String], classOf[xLogger])( + options.toArray[String]: Array[String], bootClasspath, classpathString, log) + argsObj.asInstanceOf[Array[String]].toSeq + } + def force(log: Logger): Unit = provider(scalaInstance, log) + private def call(interfaceClassName: String, methodName: String, log: Logger)(argTypes: Class[_]*)(args: AnyRef*): AnyRef = + { + val interfaceClass = getInterfaceClass(interfaceClassName, log) + val interface = interfaceClass.newInstance.asInstanceOf[AnyRef] + val method = interfaceClass.getMethod(methodName, argTypes: _*) + try { method.invoke(interface, args: _*) } + catch { + case e: java.lang.reflect.InvocationTargetException => + e.getCause match { + case c: xsbti.CompileFailed => throw new CompileFailed(c.arguments, c.toString, c.problems) + case t => throw t + } + } + } + private[this] def loader(log: Logger) = + { + val interfaceJar = provider(scalaInstance, log) + // this goes to scalaInstance.loader for scala classes and the loader of this class for xsbti classes + val dual = createDualLoader(scalaInstance.loader, getClass.getClassLoader) + new URLClassLoader(Array(interfaceJar.toURI.toURL), dual) + } + private[this] def getInterfaceClass(name: String, log: Logger) = Class.forName(name, true, loader(log)) + protected def createDualLoader(scalaLoader: ClassLoader, sbtLoader: ClassLoader): ClassLoader = + { + val xsbtiFilter = (name: String) => name.startsWith("xsbti.") + val notXsbtiFilter = (name: String) => !xsbtiFilter(name) + new classpath.DualLoader(scalaLoader, notXsbtiFilter, x => true, sbtLoader, xsbtiFilter, x => false) + } + override def toString = "Analyzing compiler (Scala " + scalaInstance.actualVersion + ")" } -object AnalyzingCompiler -{ - import sbt.IO.{copy, createDirectory, zip, jars, unzip, withTemporaryDirectory} +object AnalyzingCompiler { + import sbt.IO.{ copy, createDirectory, zip, jars, unzip, withTemporaryDirectory } - // Note: The Scala build now depends on some details of this method: - // https://github.com/jsuereth/scala/commit/3431860048df8d2a381fb85a526097e00154eae0 - /** Extract sources from source jars, compile them with the xsbti interfaces on the classpath, and package the compiled classes and - * any resources from the source jars into a final jar.*/ - def compileSources(sourceJars: Iterable[File], targetJar: File, xsbtiJars: Iterable[File], id: String, compiler: RawCompiler, log: Logger) - { - val isSource = (f: File) => isSourceName(f.getName) - def keepIfSource(files: Set[File]): Set[File] = if(files.exists(isSource)) files else Set() + // Note: The Scala build now depends on some details of this method: + // https://github.com/jsuereth/scala/commit/3431860048df8d2a381fb85a526097e00154eae0 + /** + * Extract sources from source jars, compile them with the xsbti interfaces on the classpath, and package the compiled classes and + * any resources from the source jars into a final jar. + */ + def compileSources(sourceJars: Iterable[File], targetJar: File, xsbtiJars: Iterable[File], id: String, compiler: RawCompiler, log: Logger) { + val isSource = (f: File) => isSourceName(f.getName) + def keepIfSource(files: Set[File]): Set[File] = if (files.exists(isSource)) files else Set() - withTemporaryDirectory { dir => - val extractedSources = (Set[File]() /: sourceJars) { (extracted, sourceJar)=> extracted ++ keepIfSource(unzip(sourceJar, dir)) } - val (sourceFiles, resources) = extractedSources.partition(isSource) - withTemporaryDirectory { outputDirectory => - log.info("'" + id + "' not yet compiled for Scala " + compiler.scalaInstance.actualVersion + ". Compiling...") - val start = System.currentTimeMillis - try - { - compiler(sourceFiles.toSeq, compiler.scalaInstance.libraryJar +: (xsbtiJars.toSeq ++ sourceJars), outputDirectory, "-nowarn" :: Nil) - log.info(" Compilation completed in " + (System.currentTimeMillis - start) / 1000.0 + " s") - } - catch { case e: xsbti.CompileFailed => throw new CompileFailed(e.arguments, "Error compiling sbt component '" + id + "'", e.problems) } - import sbt.Path._ - copy(resources x rebase(dir, outputDirectory)) - zip((outputDirectory ***) x_! relativeTo(outputDirectory), targetJar) - } - } - } - private def isSourceName(name: String): Boolean = name.endsWith(".scala") || name.endsWith(".java") + withTemporaryDirectory { dir => + val extractedSources = (Set[File]() /: sourceJars) { (extracted, sourceJar) => extracted ++ keepIfSource(unzip(sourceJar, dir)) } + val (sourceFiles, resources) = extractedSources.partition(isSource) + withTemporaryDirectory { outputDirectory => + log.info("'" + id + "' not yet compiled for Scala " + compiler.scalaInstance.actualVersion + ". Compiling...") + val start = System.currentTimeMillis + try { + compiler(sourceFiles.toSeq, compiler.scalaInstance.libraryJar +: (xsbtiJars.toSeq ++ sourceJars), outputDirectory, "-nowarn" :: Nil) + log.info(" Compilation completed in " + (System.currentTimeMillis - start) / 1000.0 + " s") + } catch { case e: xsbti.CompileFailed => throw new CompileFailed(e.arguments, "Error compiling sbt component '" + id + "'", e.problems) } + import sbt.Path._ + copy(resources x rebase(dir, outputDirectory)) + zip((outputDirectory ***) x_! relativeTo(outputDirectory), targetJar) + } + } + } + private def isSourceName(name: String): Boolean = name.endsWith(".scala") || name.endsWith(".java") } private[this] object IgnoreProgress extends CompileProgress { - def startUnit(phase: String, unitPath: String) {} - def advance(current: Int, total: Int) = true + def startUnit(phase: String, unitPath: String) {} + def advance(current: Int, total: Int) = true } diff --git a/compile/src/main/scala/sbt/compiler/CompilerArguments.scala b/compile/src/main/scala/sbt/compiler/CompilerArguments.scala index 85687d37d..a4fec2857 100644 --- a/compile/src/main/scala/sbt/compiler/CompilerArguments.scala +++ b/compile/src/main/scala/sbt/compiler/CompilerArguments.scala @@ -4,70 +4,67 @@ package sbt package compiler - import xsbti.ArtifactInfo - import scala.util - import java.io.File - import CompilerArguments.{abs, absString, BootClasspathOption} +import xsbti.ArtifactInfo +import scala.util +import java.io.File +import CompilerArguments.{ abs, absString, BootClasspathOption } -/** Forms the list of options that is passed to the compiler from the required inputs and other options. -* The directory containing scala-library.jar and scala-compiler.jar (scalaLibDirectory) is required in -* order to add these jars to the boot classpath. The 'scala.home' property must be unset because Scala -* puts jars in that directory on the bootclasspath. Because we use multiple Scala versions, -* this would lead to compiling against the wrong library jar.*/ -final class CompilerArguments(scalaInstance: xsbti.compile.ScalaInstance, cp: xsbti.compile.ClasspathOptions) -{ - def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: Option[File], options: Seq[String]): Seq[String] = - { - checkScalaHomeUnset() - val cpWithCompiler = finishClasspath(classpath) - // Scala compiler's treatment of empty classpath is troublesome (as of 2.9.1). - // We append a random dummy element as workaround. - val dummy = "dummy_" + Integer.toHexString(util.Random.nextInt) - val classpathOption = Seq("-classpath", if(cpWithCompiler.isEmpty) dummy else absString(cpWithCompiler)) - val outputOption = outputDirectory map {out => Seq("-d", out.getAbsolutePath)} getOrElse Seq() - options ++ outputOption ++ bootClasspathOption(hasLibrary(classpath)) ++ classpathOption ++ abs(sources) - } - def finishClasspath(classpath: Seq[File]): Seq[File] = - filterLibrary(classpath) ++ include(cp.compiler, scalaInstance.compilerJar) ++ include(cp.extra, scalaInstance.otherJars : _*) - private[this] def include(flag: Boolean, jars: File*) = if(flag) jars else Nil - private[this] def abs(files: Seq[File]) = files.map(_.getAbsolutePath).sortWith(_ < _) - private[this] def checkScalaHomeUnset() - { - val scalaHome = System.getProperty("scala.home") - assert((scalaHome eq null) || scalaHome.isEmpty, "'scala.home' should not be set (was " + scalaHome + ")") - } - def createBootClasspathFor(classpath: Seq[File]) = createBootClasspath(hasLibrary(classpath) || cp.compiler || cp.extra) +/** + * Forms the list of options that is passed to the compiler from the required inputs and other options. + * The directory containing scala-library.jar and scala-compiler.jar (scalaLibDirectory) is required in + * order to add these jars to the boot classpath. The 'scala.home' property must be unset because Scala + * puts jars in that directory on the bootclasspath. Because we use multiple Scala versions, + * this would lead to compiling against the wrong library jar. + */ +final class CompilerArguments(scalaInstance: xsbti.compile.ScalaInstance, cp: xsbti.compile.ClasspathOptions) { + def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: Option[File], options: Seq[String]): Seq[String] = + { + checkScalaHomeUnset() + val cpWithCompiler = finishClasspath(classpath) + // Scala compiler's treatment of empty classpath is troublesome (as of 2.9.1). + // We append a random dummy element as workaround. + val dummy = "dummy_" + Integer.toHexString(util.Random.nextInt) + val classpathOption = Seq("-classpath", if (cpWithCompiler.isEmpty) dummy else absString(cpWithCompiler)) + val outputOption = outputDirectory map { out => Seq("-d", out.getAbsolutePath) } getOrElse Seq() + options ++ outputOption ++ bootClasspathOption(hasLibrary(classpath)) ++ classpathOption ++ abs(sources) + } + def finishClasspath(classpath: Seq[File]): Seq[File] = + filterLibrary(classpath) ++ include(cp.compiler, scalaInstance.compilerJar) ++ include(cp.extra, scalaInstance.otherJars: _*) + private[this] def include(flag: Boolean, jars: File*) = if (flag) jars else Nil + private[this] def abs(files: Seq[File]) = files.map(_.getAbsolutePath).sortWith(_ < _) + private[this] def checkScalaHomeUnset() { + val scalaHome = System.getProperty("scala.home") + assert((scalaHome eq null) || scalaHome.isEmpty, "'scala.home' should not be set (was " + scalaHome + ")") + } + def createBootClasspathFor(classpath: Seq[File]) = createBootClasspath(hasLibrary(classpath) || cp.compiler || cp.extra) - /** Add the correct Scala library jar to the boot classpath if `addLibrary` is true.*/ - def createBootClasspath(addLibrary: Boolean) = - { - val originalBoot = System.getProperty("sun.boot.class.path", "") - if(addLibrary) - { - val newBootPrefix = if(originalBoot.isEmpty) "" else originalBoot + File.pathSeparator - newBootPrefix + scalaInstance.libraryJar.getAbsolutePath - } - else - originalBoot - } - def filterLibrary(classpath: Seq[File]) = if(cp.filterLibrary) classpath filterNot isScalaLibrary else classpath - def hasLibrary(classpath: Seq[File]) = classpath exists isScalaLibrary - private[this] val isScalaLibrary: File => Boolean = file => { - val name = file.getName - (name contains ArtifactInfo.ScalaLibraryID) || file.getName == scalaInstance.libraryJar.getName - } - def bootClasspathOption(addLibrary: Boolean) = if(cp.autoBoot) Seq(BootClasspathOption, createBootClasspath(addLibrary)) else Nil - def bootClasspath(addLibrary: Boolean) = if(cp.autoBoot) IO.parseClasspath(createBootClasspath(addLibrary)) else Nil - def bootClasspathFor(classpath: Seq[File]) = bootClasspath(hasLibrary(classpath)) + /** Add the correct Scala library jar to the boot classpath if `addLibrary` is true.*/ + def createBootClasspath(addLibrary: Boolean) = + { + val originalBoot = System.getProperty("sun.boot.class.path", "") + if (addLibrary) { + val newBootPrefix = if (originalBoot.isEmpty) "" else originalBoot + File.pathSeparator + newBootPrefix + scalaInstance.libraryJar.getAbsolutePath + } else + originalBoot + } + def filterLibrary(classpath: Seq[File]) = if (cp.filterLibrary) classpath filterNot isScalaLibrary else classpath + def hasLibrary(classpath: Seq[File]) = classpath exists isScalaLibrary + private[this] val isScalaLibrary: File => Boolean = file => { + val name = file.getName + (name contains ArtifactInfo.ScalaLibraryID) || file.getName == scalaInstance.libraryJar.getName + } + def bootClasspathOption(addLibrary: Boolean) = if (cp.autoBoot) Seq(BootClasspathOption, createBootClasspath(addLibrary)) else Nil + def bootClasspath(addLibrary: Boolean) = if (cp.autoBoot) IO.parseClasspath(createBootClasspath(addLibrary)) else Nil + def bootClasspathFor(classpath: Seq[File]) = bootClasspath(hasLibrary(classpath)) - import Path._ - def extClasspath: Seq[File] = ( IO.parseClasspath(System.getProperty("java.ext.dirs")) * "*.jar" ).get + import Path._ + def extClasspath: Seq[File] = (IO.parseClasspath(System.getProperty("java.ext.dirs")) * "*.jar").get } -object CompilerArguments -{ - val BootClasspathOption = "-bootclasspath" - def abs(files: Seq[File]): Seq[String] = files.map(_.getAbsolutePath) - def abs(files: Set[File]): Seq[String] = abs(files.toSeq) - def absString(files: Seq[File]): String = abs(files).mkString(File.pathSeparator) - def absString(files: Set[File]): String = absString(files.toSeq) +object CompilerArguments { + val BootClasspathOption = "-bootclasspath" + def abs(files: Seq[File]): Seq[String] = files.map(_.getAbsolutePath) + def abs(files: Set[File]): Seq[String] = abs(files.toSeq) + def absString(files: Seq[File]): String = abs(files).mkString(File.pathSeparator) + def absString(files: Set[File]): String = absString(files.toSeq) } diff --git a/compile/src/main/scala/sbt/compiler/CompilerCache.scala b/compile/src/main/scala/sbt/compiler/CompilerCache.scala index 43d4d0b69..25391460a 100644 --- a/compile/src/main/scala/sbt/compiler/CompilerCache.scala +++ b/compile/src/main/scala/sbt/compiler/CompilerCache.scala @@ -1,52 +1,49 @@ package sbt package compiler - import xsbti.{Logger => xLogger, Reporter} - import xsbti.compile.{CachedCompiler, CachedCompilerProvider, GlobalsCache, Output} - import Logger.f0 - import java.io.File - import java.util.{LinkedHashMap,Map} +import xsbti.{ Logger => xLogger, Reporter } +import xsbti.compile.{ CachedCompiler, CachedCompilerProvider, GlobalsCache, Output } +import Logger.f0 +import java.io.File +import java.util.{ LinkedHashMap, Map } -private final class CompilerCache(val maxInstances: Int) extends GlobalsCache -{ - private[this] val cache = lru[CompilerKey, CachedCompiler](maxInstances) - private[this] def lru[A,B](max: Int) = new LinkedHashMap[A,B](8, 0.75f, true) { - override def removeEldestEntry(eldest: Map.Entry[A,B]): Boolean = size > max - } - def apply(args: Array[String], output: Output, forceNew: Boolean, c: CachedCompilerProvider, log: xLogger, reporter: Reporter): CachedCompiler = synchronized - { - val key = CompilerKey(dropSources(args.toList), c.scalaInstance.actualVersion) - if(forceNew) cache.remove(key) - cache.get(key) match { - case null => - log.debug(f0("Compiler cache miss. " + key.toString)) - put(key, c.newCachedCompiler(args, output, log, reporter, /* resident = */ !forceNew)) - case cc => - log.debug(f0("Compiler cache hit (" + cc.hashCode.toHexString + "). " + key.toString)) - cc - } - } - def clear(): Unit = synchronized { cache.clear() } +private final class CompilerCache(val maxInstances: Int) extends GlobalsCache { + private[this] val cache = lru[CompilerKey, CachedCompiler](maxInstances) + private[this] def lru[A, B](max: Int) = new LinkedHashMap[A, B](8, 0.75f, true) { + override def removeEldestEntry(eldest: Map.Entry[A, B]): Boolean = size > max + } + def apply(args: Array[String], output: Output, forceNew: Boolean, c: CachedCompilerProvider, log: xLogger, reporter: Reporter): CachedCompiler = synchronized { + val key = CompilerKey(dropSources(args.toList), c.scalaInstance.actualVersion) + if (forceNew) cache.remove(key) + cache.get(key) match { + case null => + log.debug(f0("Compiler cache miss. " + key.toString)) + put(key, c.newCachedCompiler(args, output, log, reporter, /* resident = */ !forceNew)) + case cc => + log.debug(f0("Compiler cache hit (" + cc.hashCode.toHexString + "). " + key.toString)) + cc + } + } + def clear(): Unit = synchronized { cache.clear() } - private[this] def dropSources(args: Seq[String]): Seq[String] = - args.filterNot(arg => arg.endsWith(".scala") || arg.endsWith(".java")) + private[this] def dropSources(args: Seq[String]): Seq[String] = + args.filterNot(arg => arg.endsWith(".scala") || arg.endsWith(".java")) - private[this] def put(key: CompilerKey, cc: CachedCompiler): CachedCompiler = - { - cache.put(key, cc) - cc - } - private[this] final case class CompilerKey(args: Seq[String], scalaVersion: String) { - override def toString = "scala " + scalaVersion + ", args: " + args.mkString(" ") - } + private[this] def put(key: CompilerKey, cc: CachedCompiler): CachedCompiler = + { + cache.put(key, cc) + cc + } + private[this] final case class CompilerKey(args: Seq[String], scalaVersion: String) { + override def toString = "scala " + scalaVersion + ", args: " + args.mkString(" ") + } } -object CompilerCache -{ - def apply(maxInstances: Int): GlobalsCache = new CompilerCache(maxInstances) +object CompilerCache { + def apply(maxInstances: Int): GlobalsCache = new CompilerCache(maxInstances) - val fresh: GlobalsCache = new GlobalsCache { - def clear() {} - def apply(args: Array[String], output: Output, forceNew: Boolean, c: CachedCompilerProvider, log: xLogger, reporter: Reporter): CachedCompiler = - c.newCachedCompiler(args, output, log, reporter, /*resident = */ false) - } + val fresh: GlobalsCache = new GlobalsCache { + def clear() {} + def apply(args: Array[String], output: Output, forceNew: Boolean, c: CachedCompilerProvider, log: xLogger, reporter: Reporter): CachedCompiler = + c.newCachedCompiler(args, output, log, reporter, /*resident = */ false) + } } diff --git a/compile/src/main/scala/sbt/compiler/CompilerInterfaceProvider.scala b/compile/src/main/scala/sbt/compiler/CompilerInterfaceProvider.scala index 02d0ccd65..1beee441c 100644 --- a/compile/src/main/scala/sbt/compiler/CompilerInterfaceProvider.scala +++ b/compile/src/main/scala/sbt/compiler/CompilerInterfaceProvider.scala @@ -1,15 +1,13 @@ package sbt package compiler - import java.io.File +import java.io.File -trait CompilerInterfaceProvider -{ - def apply(scalaInstance: xsbti.compile.ScalaInstance, log: Logger): File +trait CompilerInterfaceProvider { + def apply(scalaInstance: xsbti.compile.ScalaInstance, log: Logger): File } -object CompilerInterfaceProvider -{ - def constant(file: File): CompilerInterfaceProvider = new CompilerInterfaceProvider { - def apply(scalaInstance: xsbti.compile.ScalaInstance, log: Logger): File = file - } +object CompilerInterfaceProvider { + def constant(file: File): CompilerInterfaceProvider = new CompilerInterfaceProvider { + def apply(scalaInstance: xsbti.compile.ScalaInstance, log: Logger): File = file + } } \ No newline at end of file diff --git a/compile/src/main/scala/sbt/compiler/CompilerOutput.scala b/compile/src/main/scala/sbt/compiler/CompilerOutput.scala index daa8209d8..66aee7367 100755 --- a/compile/src/main/scala/sbt/compiler/CompilerOutput.scala +++ b/compile/src/main/scala/sbt/compiler/CompilerOutput.scala @@ -5,20 +5,20 @@ package sbt package compiler - import xsbti.compile.{Output, SingleOutput, MultipleOutput} - import java.io.File +import xsbti.compile.{ Output, SingleOutput, MultipleOutput } +import java.io.File object CompileOutput { - def apply(dir: File): Output = new SingleOutput { - def outputDirectory = dir - } + def apply(dir: File): Output = new SingleOutput { + def outputDirectory = dir + } - def apply(groups: (File, File)*): Output = new MultipleOutput { - def outputGroups = groups.toArray map { - case (src, out) => new MultipleOutput.OutputGroup { - def sourceDirectory = src - def outputDirectory = out - } - } - } + def apply(groups: (File, File)*): Output = new MultipleOutput { + def outputGroups = groups.toArray map { + case (src, out) => new MultipleOutput.OutputGroup { + def sourceDirectory = src + def outputDirectory = out + } + } + } } diff --git a/compile/src/main/scala/sbt/compiler/JavaCompiler.scala b/compile/src/main/scala/sbt/compiler/JavaCompiler.scala index 8e5b3bba2..60e9a48d5 100644 --- a/compile/src/main/scala/sbt/compiler/JavaCompiler.scala +++ b/compile/src/main/scala/sbt/compiler/JavaCompiler.scala @@ -4,124 +4,120 @@ package sbt package compiler -import java.io.{File, PrintWriter} +import java.io.{ File, PrintWriter } abstract class JavacContract(val name: String, val clazz: String) { - def exec(args: Array[String], writer: PrintWriter): Int + def exec(args: Array[String], writer: PrintWriter): Int } -trait JavaCompiler extends xsbti.compile.JavaCompiler -{ - def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger) +trait JavaCompiler extends xsbti.compile.JavaCompiler { + def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger) - def compile(sources: Array[File], classpath: Array[File], output: xsbti.compile.Output, options: Array[String], log: xsbti.Logger): Unit = { - val outputDirectory = output match { - case single: xsbti.compile.SingleOutput => single.outputDirectory - case _ => throw new RuntimeException("Javac doesn't support multiple output directories") - } - apply(sources, classpath, outputDirectory, options)(log) - } + def compile(sources: Array[File], classpath: Array[File], output: xsbti.compile.Output, options: Array[String], log: xsbti.Logger): Unit = { + val outputDirectory = output match { + case single: xsbti.compile.SingleOutput => single.outputDirectory + case _ => throw new RuntimeException("Javac doesn't support multiple output directories") + } + apply(sources, classpath, outputDirectory, options)(log) + } - def onArgs(f: Seq[String] => Unit): JavaCompiler + def onArgs(f: Seq[String] => Unit): JavaCompiler } -trait Javadoc -{ - def doc(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maximumErrors: Int, log: Logger) +trait Javadoc { + def doc(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maximumErrors: Int, log: Logger) - def onArgs(f: Seq[String] => Unit): Javadoc + def onArgs(f: Seq[String] => Unit): Javadoc } -trait JavaTool extends Javadoc with JavaCompiler -{ - def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger) = - compile(JavaCompiler.javac, sources, classpath, outputDirectory, options)(log) +trait JavaTool extends Javadoc with JavaCompiler { + def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger) = + compile(JavaCompiler.javac, sources, classpath, outputDirectory, options)(log) - def doc(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maximumErrors: Int, log: Logger) = - compile(JavaCompiler.javadoc, sources, classpath, outputDirectory, options)(log) + def doc(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maximumErrors: Int, log: Logger) = + compile(JavaCompiler.javadoc, sources, classpath, outputDirectory, options)(log) - def compile(contract: JavacContract, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger): Unit + def compile(contract: JavacContract, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger): Unit - def onArgs(f: Seq[String] => Unit): JavaTool + def onArgs(f: Seq[String] => Unit): JavaTool } -object JavaCompiler -{ - type Fork = (JavacContract, Seq[String], Logger) => Int +object JavaCompiler { + type Fork = (JavacContract, Seq[String], Logger) => Int - val javac = new JavacContract("javac", "com.sun.tools.javac.Main") { - def exec(args: Array[String], writer: PrintWriter) = { - val m = Class.forName(clazz).getDeclaredMethod("compile", classOf[Array[String]], classOf[PrintWriter]) - m.invoke(null, args, writer).asInstanceOf[java.lang.Integer].intValue - } - } - val javadoc = new JavacContract("javadoc", "com.sun.tools.javadoc.Main") { - def exec(args: Array[String], writer: PrintWriter) = { - val m = Class.forName(clazz).getDeclaredMethod("execute", classOf[String], classOf[PrintWriter], classOf[PrintWriter], classOf[PrintWriter], classOf[String], classOf[Array[String]]) - m.invoke(null, name, writer, writer, writer, "com.sun.tools.doclets.standard.Standard", args).asInstanceOf[java.lang.Integer].intValue - } - } + val javac = new JavacContract("javac", "com.sun.tools.javac.Main") { + def exec(args: Array[String], writer: PrintWriter) = { + val m = Class.forName(clazz).getDeclaredMethod("compile", classOf[Array[String]], classOf[PrintWriter]) + m.invoke(null, args, writer).asInstanceOf[java.lang.Integer].intValue + } + } + val javadoc = new JavacContract("javadoc", "com.sun.tools.javadoc.Main") { + def exec(args: Array[String], writer: PrintWriter) = { + val m = Class.forName(clazz).getDeclaredMethod("execute", classOf[String], classOf[PrintWriter], classOf[PrintWriter], classOf[PrintWriter], classOf[String], classOf[Array[String]]) + m.invoke(null, name, writer, writer, writer, "com.sun.tools.doclets.standard.Standard", args).asInstanceOf[java.lang.Integer].intValue + } + } - def construct(f: Fork, cp: ClasspathOptions, scalaInstance: ScalaInstance): JavaTool = new JavaTool0(f, cp, scalaInstance, _ => ()) + def construct(f: Fork, cp: ClasspathOptions, scalaInstance: ScalaInstance): JavaTool = new JavaTool0(f, cp, scalaInstance, _ => ()) - private[this] class JavaTool0(f: Fork, cp: ClasspathOptions, scalaInstance: ScalaInstance, onArgsF: Seq[String] => Unit) extends JavaTool - { - def onArgs(g: Seq[String] => Unit): JavaTool = new JavaTool0(f, cp, scalaInstance, g) - def commandArguments(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger): Seq[String] = - { - val augmentedClasspath = if(cp.autoBoot) classpath ++ Seq(scalaInstance.libraryJar) else classpath - val javaCp = ClasspathOptions.javac(cp.compiler) - (new CompilerArguments(scalaInstance, javaCp))(sources, augmentedClasspath, Some(outputDirectory), options) - } - def compile(contract: JavacContract, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger) { - val arguments = commandArguments(sources, classpath, outputDirectory, options, log) - onArgsF(arguments) - val code: Int = f(contract, arguments, log) - log.debug(contract.name + " returned exit code: " + code) - if( code != 0 ) throw new CompileFailed(arguments.toArray, contract.name + " returned nonzero exit code", Array()) - } - } - def directOrFork(cp: ClasspathOptions, scalaInstance: ScalaInstance)(implicit doFork: Fork): JavaTool = - construct(directOrForkJavac, cp, scalaInstance) - - def direct(cp: ClasspathOptions, scalaInstance: ScalaInstance): JavaTool = - construct(directJavac, cp, scalaInstance) - - def fork(cp: ClasspathOptions, scalaInstance: ScalaInstance)(implicit doFork: Fork): JavaTool = - construct(forkJavac, cp, scalaInstance) - - def directOrForkJavac(implicit doFork: Fork) = (contract: JavacContract, arguments: Seq[String], log: Logger) => - try { directJavac(contract, arguments, log) } - catch { case e @ (_: ClassNotFoundException | _: NoSuchMethodException) => - log.debug(contract.clazz + " not found with appropriate method signature; forking " + contract.name + " instead") - forkJavac(doFork)(contract, arguments, log) - } + private[this] class JavaTool0(f: Fork, cp: ClasspathOptions, scalaInstance: ScalaInstance, onArgsF: Seq[String] => Unit) extends JavaTool { + def onArgs(g: Seq[String] => Unit): JavaTool = new JavaTool0(f, cp, scalaInstance, g) + def commandArguments(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger): Seq[String] = + { + val augmentedClasspath = if (cp.autoBoot) classpath ++ Seq(scalaInstance.libraryJar) else classpath + val javaCp = ClasspathOptions.javac(cp.compiler) + (new CompilerArguments(scalaInstance, javaCp))(sources, augmentedClasspath, Some(outputDirectory), options) + } + def compile(contract: JavacContract, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger) { + val arguments = commandArguments(sources, classpath, outputDirectory, options, log) + onArgsF(arguments) + val code: Int = f(contract, arguments, log) + log.debug(contract.name + " returned exit code: " + code) + if (code != 0) throw new CompileFailed(arguments.toArray, contract.name + " returned nonzero exit code", Array()) + } + } + def directOrFork(cp: ClasspathOptions, scalaInstance: ScalaInstance)(implicit doFork: Fork): JavaTool = + construct(directOrForkJavac, cp, scalaInstance) - /** `doFork` should be a function that forks javac with the provided arguments and sends output to the given Logger.*/ - def forkJavac(implicit doFork: Fork) = (contract: JavacContract, arguments: Seq[String], log: Logger) => - { - val (jArgs, nonJArgs) = arguments.partition(_.startsWith("-J")) - def externalJavac(argFile: File) = doFork(contract, jArgs :+ ("@" + normalizeSlash(argFile.getAbsolutePath)), log) - withArgumentFile(nonJArgs)(externalJavac) - } - val directJavac = (contract: JavacContract, arguments: Seq[String], log: Logger) => - { - val logger = new LoggerWriter(log) - val writer = new PrintWriter(logger) - val argsArray = arguments.toArray - log.debug("Attempting to call " + contract.name + " directly...") + def direct(cp: ClasspathOptions, scalaInstance: ScalaInstance): JavaTool = + construct(directJavac, cp, scalaInstance) - var exitCode = -1 - try { exitCode = contract.exec(argsArray, writer) } - finally { logger.flushLines( if(exitCode == 0) Level.Warn else Level.Error) } - exitCode - } - def withArgumentFile[T](args: Seq[String])(f: File => T): T = - { - import IO.{Newline, withTemporaryDirectory, write} - withTemporaryDirectory { tmp => - val argFile = new File(tmp, "argfile") - write(argFile, args.map(escapeSpaces).mkString(Newline)) - f(argFile) - } - } - // javac's argument file seems to allow naive space escaping with quotes. escaping a quote with a backslash does not work - def escapeSpaces(s: String): String = '\"' + normalizeSlash(s) + '\"' - def normalizeSlash(s: String) = s.replace(File.separatorChar, '/') + def fork(cp: ClasspathOptions, scalaInstance: ScalaInstance)(implicit doFork: Fork): JavaTool = + construct(forkJavac, cp, scalaInstance) + + def directOrForkJavac(implicit doFork: Fork) = (contract: JavacContract, arguments: Seq[String], log: Logger) => + try { directJavac(contract, arguments, log) } + catch { + case e @ (_: ClassNotFoundException | _: NoSuchMethodException) => + log.debug(contract.clazz + " not found with appropriate method signature; forking " + contract.name + " instead") + forkJavac(doFork)(contract, arguments, log) + } + + /** `doFork` should be a function that forks javac with the provided arguments and sends output to the given Logger.*/ + def forkJavac(implicit doFork: Fork) = (contract: JavacContract, arguments: Seq[String], log: Logger) => + { + val (jArgs, nonJArgs) = arguments.partition(_.startsWith("-J")) + def externalJavac(argFile: File) = doFork(contract, jArgs :+ ("@" + normalizeSlash(argFile.getAbsolutePath)), log) + withArgumentFile(nonJArgs)(externalJavac) + } + val directJavac = (contract: JavacContract, arguments: Seq[String], log: Logger) => + { + val logger = new LoggerWriter(log) + val writer = new PrintWriter(logger) + val argsArray = arguments.toArray + log.debug("Attempting to call " + contract.name + " directly...") + + var exitCode = -1 + try { exitCode = contract.exec(argsArray, writer) } + finally { logger.flushLines(if (exitCode == 0) Level.Warn else Level.Error) } + exitCode + } + def withArgumentFile[T](args: Seq[String])(f: File => T): T = + { + import IO.{ Newline, withTemporaryDirectory, write } + withTemporaryDirectory { tmp => + val argFile = new File(tmp, "argfile") + write(argFile, args.map(escapeSpaces).mkString(Newline)) + f(argFile) + } + } + // javac's argument file seems to allow naive space escaping with quotes. escaping a quote with a backslash does not work + def escapeSpaces(s: String): String = '\"' + normalizeSlash(s) + '\"' + def normalizeSlash(s: String) = s.replace(File.separatorChar, '/') } diff --git a/compile/src/main/scala/sbt/compiler/RawCompiler.scala b/compile/src/main/scala/sbt/compiler/RawCompiler.scala index f7ae16468..558d318cf 100644 --- a/compile/src/main/scala/sbt/compiler/RawCompiler.scala +++ b/compile/src/main/scala/sbt/compiler/RawCompiler.scala @@ -4,34 +4,33 @@ package sbt package compiler - import java.io.File +import java.io.File -/** A basic interface to the compiler. It is called in the same virtual machine, but no dependency analysis is done. This -* is used, for example, to compile the interface/plugin code. -* If `explicitClasspath` is true, the bootclasspath and classpath are not augmented. If it is false, -* the scala-library.jar from `scalaInstance` is put on bootclasspath and the scala-compiler jar goes on the classpath.*/ -class RawCompiler(val scalaInstance: xsbti.compile.ScalaInstance, cp: ClasspathOptions, log: Logger) -{ - def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String]) - { - // reflection is required for binary compatibility - // The following import ensures there is a compile error if the identifiers change, - // but should not be otherwise directly referenced - import scala.tools.nsc.Main.{process => _} +/** + * A basic interface to the compiler. It is called in the same virtual machine, but no dependency analysis is done. This + * is used, for example, to compile the interface/plugin code. + * If `explicitClasspath` is true, the bootclasspath and classpath are not augmented. If it is false, + * the scala-library.jar from `scalaInstance` is put on bootclasspath and the scala-compiler jar goes on the classpath. + */ +class RawCompiler(val scalaInstance: xsbti.compile.ScalaInstance, cp: ClasspathOptions, log: Logger) { + def apply(sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String]) { + // reflection is required for binary compatibility + // The following import ensures there is a compile error if the identifiers change, + // but should not be otherwise directly referenced + import scala.tools.nsc.Main.{ process => _ } - val arguments = compilerArguments(sources, classpath, Some(outputDirectory), options) - log.debug("Plain interface to Scala compiler " + scalaInstance.actualVersion + " with arguments: " + arguments.mkString("\n\t", "\n\t", "")) - val mainClass = Class.forName("scala.tools.nsc.Main", true, scalaInstance.loader) - val process = mainClass.getMethod("process", classOf[Array[String]]) - process.invoke(null, arguments.toArray) - checkForFailure(mainClass, arguments.toArray) - } - def compilerArguments = new CompilerArguments(scalaInstance, cp) - protected def checkForFailure(mainClass: Class[_], args: Array[String]) - { - val reporter = mainClass.getMethod("reporter").invoke(null) - val failed = reporter.getClass.getMethod("hasErrors").invoke(reporter).asInstanceOf[Boolean] - if(failed) throw new CompileFailed(args, "Plain compile failed", Array()) - } + val arguments = compilerArguments(sources, classpath, Some(outputDirectory), options) + log.debug("Plain interface to Scala compiler " + scalaInstance.actualVersion + " with arguments: " + arguments.mkString("\n\t", "\n\t", "")) + val mainClass = Class.forName("scala.tools.nsc.Main", true, scalaInstance.loader) + val process = mainClass.getMethod("process", classOf[Array[String]]) + process.invoke(null, arguments.toArray) + checkForFailure(mainClass, arguments.toArray) + } + def compilerArguments = new CompilerArguments(scalaInstance, cp) + protected def checkForFailure(mainClass: Class[_], args: Array[String]) { + val reporter = mainClass.getMethod("reporter").invoke(null) + val failed = reporter.getClass.getMethod("hasErrors").invoke(reporter).asInstanceOf[Boolean] + if (failed) throw new CompileFailed(args, "Plain compile failed", Array()) + } } class CompileFailed(val arguments: Array[String], override val toString: String, val problems: Array[xsbti.Problem]) extends xsbti.CompileFailed with FeedbackProvidedException diff --git a/ivy/src/main/scala/sbt/Artifact.scala b/ivy/src/main/scala/sbt/Artifact.scala index 5bd17ef0d..325ef7b6b 100644 --- a/ivy/src/main/scala/sbt/Artifact.scala +++ b/ivy/src/main/scala/sbt/Artifact.scala @@ -6,71 +6,69 @@ package sbt import java.io.File import java.net.URL -final case class Artifact(name: String, `type`: String, extension: String, classifier: Option[String], configurations: Iterable[Configuration], url: Option[URL], extraAttributes: Map[String,String]) -{ - def extra(attributes: (String,String)*) = Artifact(name, `type`, extension, classifier, configurations, url, extraAttributes ++ ModuleID.checkE(attributes)) +final case class Artifact(name: String, `type`: String, extension: String, classifier: Option[String], configurations: Iterable[Configuration], url: Option[URL], extraAttributes: Map[String, String]) { + def extra(attributes: (String, String)*) = Artifact(name, `type`, extension, classifier, configurations, url, extraAttributes ++ ModuleID.checkE(attributes)) } - import Configurations.{config, Docs, Optional, Pom, Sources, Test} +import Configurations.{ config, Docs, Optional, Pom, Sources, Test } -object Artifact -{ - def apply(name: String): Artifact = Artifact(name, DefaultType, DefaultExtension, None, Nil, None) - def apply(name: String, extra: Map[String,String]): Artifact = Artifact(name, DefaultType, DefaultExtension, None, Nil, None, extra) - def apply(name: String, classifier: String): Artifact = Artifact(name, DefaultType, DefaultExtension, Some(classifier), Nil, None) - def apply(name: String, `type`: String, extension: String): Artifact = Artifact(name, `type`, extension, None, Nil, None) - def apply(name: String, `type`: String, extension: String, classifier: String): Artifact = Artifact(name, `type`, extension, Some(classifier), Nil, None) - def apply(name: String, url: URL): Artifact =Artifact(name, extract(url, DefaultType), extract(url, DefaultExtension), None, Nil, Some(url)) - def apply(name: String, `type`: String, extension: String, classifier: Option[String], configurations: Iterable[Configuration], url: Option[URL]): Artifact = - Artifact(name, `type`, extension, classifier, configurations, url, Map.empty) +object Artifact { + def apply(name: String): Artifact = Artifact(name, DefaultType, DefaultExtension, None, Nil, None) + def apply(name: String, extra: Map[String, String]): Artifact = Artifact(name, DefaultType, DefaultExtension, None, Nil, None, extra) + def apply(name: String, classifier: String): Artifact = Artifact(name, DefaultType, DefaultExtension, Some(classifier), Nil, None) + def apply(name: String, `type`: String, extension: String): Artifact = Artifact(name, `type`, extension, None, Nil, None) + def apply(name: String, `type`: String, extension: String, classifier: String): Artifact = Artifact(name, `type`, extension, Some(classifier), Nil, None) + def apply(name: String, url: URL): Artifact = Artifact(name, extract(url, DefaultType), extract(url, DefaultExtension), None, Nil, Some(url)) + def apply(name: String, `type`: String, extension: String, classifier: Option[String], configurations: Iterable[Configuration], url: Option[URL]): Artifact = + Artifact(name, `type`, extension, classifier, configurations, url, Map.empty) - val DefaultExtension = "jar" - val DefaultType = "jar" + val DefaultExtension = "jar" + val DefaultType = "jar" - def sources(name: String) = classified(name, SourceClassifier) - def javadoc(name: String) = classified(name, DocClassifier) - def pom(name: String) = Artifact(name, PomType, PomType, None, Pom :: Nil, None) + def sources(name: String) = classified(name, SourceClassifier) + def javadoc(name: String) = classified(name, DocClassifier) + def pom(name: String) = Artifact(name, PomType, PomType, None, Pom :: Nil, None) - val DocClassifier = "javadoc" - val SourceClassifier = "sources" - val DocType = "doc" - val SourceType = "src" - val PomType = "pom" - val TestsClassifier = "tests" + val DocClassifier = "javadoc" + val SourceClassifier = "sources" + val DocType = "doc" + val SourceType = "src" + val PomType = "pom" + val TestsClassifier = "tests" - def extract(url: URL, default: String): String = extract(url.toString, default) - def extract(name: String, default: String): String = - { - val i = name.lastIndexOf('.') - if(i >= 0) - name.substring(i+1) - else - default - } - def defaultArtifact(file: File) = - { - val name = file.getName - val i = name.lastIndexOf('.') - val base = if(i >= 0) name.substring(0, i) else name - Artifact(base, extract(name, DefaultType), extract(name, DefaultExtension), None, Nil, Some(file.toURI.toURL)) - } - def artifactName(scalaVersion: ScalaVersion, module: ModuleID, artifact: Artifact): String = - { - import artifact._ - val classifierStr = classifier match { case None => ""; case Some(c) => "-" + c } - val cross = CrossVersion(module.crossVersion, scalaVersion.full, scalaVersion.binary) - val base = CrossVersion.applyCross(artifact.name, cross) - base + "-" + module.revision + classifierStr + "." + artifact.extension - } + def extract(url: URL, default: String): String = extract(url.toString, default) + def extract(name: String, default: String): String = + { + val i = name.lastIndexOf('.') + if (i >= 0) + name.substring(i + 1) + else + default + } + def defaultArtifact(file: File) = + { + val name = file.getName + val i = name.lastIndexOf('.') + val base = if (i >= 0) name.substring(0, i) else name + Artifact(base, extract(name, DefaultType), extract(name, DefaultExtension), None, Nil, Some(file.toURI.toURL)) + } + def artifactName(scalaVersion: ScalaVersion, module: ModuleID, artifact: Artifact): String = + { + import artifact._ + val classifierStr = classifier match { case None => ""; case Some(c) => "-" + c } + val cross = CrossVersion(module.crossVersion, scalaVersion.full, scalaVersion.binary) + val base = CrossVersion.applyCross(artifact.name, cross) + base + "-" + module.revision + classifierStr + "." + artifact.extension + } - val classifierConfMap = Map(SourceClassifier -> Sources, DocClassifier -> Docs) - val classifierTypeMap = Map(SourceClassifier -> SourceType, DocClassifier -> DocType) - def classifierConf(classifier: String): Configuration = - if(classifier.startsWith(TestsClassifier)) - Test - else - classifierConfMap.getOrElse(classifier, Optional) - def classifierType(classifier: String): String = classifierTypeMap.getOrElse(classifier.stripPrefix(TestsClassifier + "-"), DefaultType) - def classified(name: String, classifier: String): Artifact = - Artifact(name, classifierType(classifier), DefaultExtension, Some(classifier), classifierConf(classifier) :: Nil, None) + val classifierConfMap = Map(SourceClassifier -> Sources, DocClassifier -> Docs) + val classifierTypeMap = Map(SourceClassifier -> SourceType, DocClassifier -> DocType) + def classifierConf(classifier: String): Configuration = + if (classifier.startsWith(TestsClassifier)) + Test + else + classifierConfMap.getOrElse(classifier, Optional) + def classifierType(classifier: String): String = classifierTypeMap.getOrElse(classifier.stripPrefix(TestsClassifier + "-"), DefaultType) + def classified(name: String, classifier: String): Artifact = + Artifact(name, classifierType(classifier), DefaultExtension, Some(classifier), classifierConf(classifier) :: Nil, None) } diff --git a/ivy/src/main/scala/sbt/ComponentManager.scala b/ivy/src/main/scala/sbt/ComponentManager.scala index de87a9890..2d6c5a447 100644 --- a/ivy/src/main/scala/sbt/ComponentManager.scala +++ b/ivy/src/main/scala/sbt/ComponentManager.scala @@ -3,90 +3,87 @@ */ package sbt -import java.io.{File,FileOutputStream} +import java.io.{ File, FileOutputStream } import java.util.concurrent.Callable -/** A component manager provides access to the pieces of xsbt that are distributed as components. -* There are two types of components. The first type is compiled subproject jars with their dependencies. -* The second type is a subproject distributed as a source jar so that it can be compiled against a specific -* version of Scala. -* -* The component manager provides services to install and retrieve components to the local repository. -* This is used for compiled source jars so that the compilation need not be repeated for other projects on the same -* machine. -*/ -class ComponentManager(globalLock: xsbti.GlobalLock, provider: xsbti.ComponentProvider, ivyHome: Option[File], val log: Logger) -{ - private[this] val ivyCache = new IvyCache(ivyHome) - /** Get all of the files for component 'id', throwing an exception if no files exist for the component. */ - def files(id: String)(ifMissing: IfMissing): Iterable[File] = - { - def fromGlobal = - lockGlobalCache { - try { update(id); getOrElse(createAndCache) } - catch { case e: NotInCache => createAndCache } - } - def getOrElse(orElse: => Iterable[File]): Iterable[File] = - { - val existing = provider.component(id) - if(existing.isEmpty) orElse else existing - } - def notFound = invalid("Could not find required component '" + id + "'") - def createAndCache = - ifMissing match { - case IfMissing.Fail => notFound - case d: IfMissing.Define => - d() - if(d.cache) cache(id) - getOrElse(notFound) - } +/** + * A component manager provides access to the pieces of xsbt that are distributed as components. + * There are two types of components. The first type is compiled subproject jars with their dependencies. + * The second type is a subproject distributed as a source jar so that it can be compiled against a specific + * version of Scala. + * + * The component manager provides services to install and retrieve components to the local repository. + * This is used for compiled source jars so that the compilation need not be repeated for other projects on the same + * machine. + */ +class ComponentManager(globalLock: xsbti.GlobalLock, provider: xsbti.ComponentProvider, ivyHome: Option[File], val log: Logger) { + private[this] val ivyCache = new IvyCache(ivyHome) + /** Get all of the files for component 'id', throwing an exception if no files exist for the component. */ + def files(id: String)(ifMissing: IfMissing): Iterable[File] = + { + def fromGlobal = + lockGlobalCache { + try { update(id); getOrElse(createAndCache) } + catch { case e: NotInCache => createAndCache } + } + def getOrElse(orElse: => Iterable[File]): Iterable[File] = + { + val existing = provider.component(id) + if (existing.isEmpty) orElse else existing + } + def notFound = invalid("Could not find required component '" + id + "'") + def createAndCache = + ifMissing match { + case IfMissing.Fail => notFound + case d: IfMissing.Define => + d() + if (d.cache) cache(id) + getOrElse(notFound) + } - lockLocalCache { getOrElse(fromGlobal) } - } - /** This is used to lock the local cache in project/boot/. By checking the local cache first, we can avoid grabbing a global lock. */ - private def lockLocalCache[T](action: => T): T = lock(provider.lockFile)( action ) - /** This is used to ensure atomic access to components in the global Ivy cache.*/ - private def lockGlobalCache[T](action: => T): T = lock(ivyCache.lockFile)( action ) - private def lock[T](file: File)(action: => T): T = globalLock(file, new Callable[T] { def call = action }) - /** Get the file for component 'id', throwing an exception if no files or multiple files exist for the component. */ - def file(id: String)(ifMissing: IfMissing): File = - files(id)(ifMissing).toList match { - case x :: Nil => x - case xs => invalid("Expected single file for component '" + id + "', found: " + xs.mkString(", ")) - } - private def invalid(msg: String) = throw new InvalidComponent(msg) - private def invalid(e: NotInCache) = throw new InvalidComponent(e.getMessage, e) + lockLocalCache { getOrElse(fromGlobal) } + } + /** This is used to lock the local cache in project/boot/. By checking the local cache first, we can avoid grabbing a global lock. */ + private def lockLocalCache[T](action: => T): T = lock(provider.lockFile)(action) + /** This is used to ensure atomic access to components in the global Ivy cache.*/ + private def lockGlobalCache[T](action: => T): T = lock(ivyCache.lockFile)(action) + private def lock[T](file: File)(action: => T): T = globalLock(file, new Callable[T] { def call = action }) + /** Get the file for component 'id', throwing an exception if no files or multiple files exist for the component. */ + def file(id: String)(ifMissing: IfMissing): File = + files(id)(ifMissing).toList match { + case x :: Nil => x + case xs => invalid("Expected single file for component '" + id + "', found: " + xs.mkString(", ")) + } + private def invalid(msg: String) = throw new InvalidComponent(msg) + private def invalid(e: NotInCache) = throw new InvalidComponent(e.getMessage, e) - def define(id: String, files: Iterable[File]) = lockLocalCache { provider.defineComponent(id, files.toSeq.toArray) } - /** Retrieve the file for component 'id' from the local repository. */ - private def update(id: String): Unit = ivyCache.withCachedJar(sbtModuleID(id), Some(globalLock), log)(jar => define(id, Seq(jar)) ) + def define(id: String, files: Iterable[File]) = lockLocalCache { provider.defineComponent(id, files.toSeq.toArray) } + /** Retrieve the file for component 'id' from the local repository. */ + private def update(id: String): Unit = ivyCache.withCachedJar(sbtModuleID(id), Some(globalLock), log)(jar => define(id, Seq(jar))) - private def sbtModuleID(id: String) = ModuleID(SbtArtifacts.Organization, id, ComponentManager.stampedVersion) - /** Install the files for component 'id' to the local repository. This is usually used after writing files to the directory returned by 'location'. */ - def cache(id: String): Unit = ivyCache.cacheJar(sbtModuleID(id), file(id)(IfMissing.Fail), Some(globalLock), log) - def clearCache(id: String): Unit = lockGlobalCache { ivyCache.clearCachedJar(sbtModuleID(id), Some(globalLock), log) } + private def sbtModuleID(id: String) = ModuleID(SbtArtifacts.Organization, id, ComponentManager.stampedVersion) + /** Install the files for component 'id' to the local repository. This is usually used after writing files to the directory returned by 'location'. */ + def cache(id: String): Unit = ivyCache.cacheJar(sbtModuleID(id), file(id)(IfMissing.Fail), Some(globalLock), log) + def clearCache(id: String): Unit = lockGlobalCache { ivyCache.clearCachedJar(sbtModuleID(id), Some(globalLock), log) } } -class InvalidComponent(msg: String, cause: Throwable) extends RuntimeException(msg, cause) -{ - def this(msg: String) = this(msg, null) +class InvalidComponent(msg: String, cause: Throwable) extends RuntimeException(msg, cause) { + def this(msg: String) = this(msg, null) } sealed trait IfMissing extends NotNull -object IfMissing -{ - object Fail extends IfMissing - final class Define(val cache: Boolean, define: => Unit) extends IfMissing { def apply() = define } +object IfMissing { + object Fail extends IfMissing + final class Define(val cache: Boolean, define: => Unit) extends IfMissing { def apply() = define } } -object ComponentManager -{ - lazy val (version, timestamp) = - { - val properties = new java.util.Properties - val propertiesStream = versionResource.openStream - try { properties.load(propertiesStream) } finally { propertiesStream.close() } - (properties.getProperty("version"), properties.getProperty("timestamp")) - } - lazy val stampedVersion = version + "_" + timestamp +object ComponentManager { + lazy val (version, timestamp) = + { + val properties = new java.util.Properties + val propertiesStream = versionResource.openStream + try { properties.load(propertiesStream) } finally { propertiesStream.close() } + (properties.getProperty("version"), properties.getProperty("timestamp")) + } + lazy val stampedVersion = version + "_" + timestamp - import java.net.URL - private def versionResource: URL = getClass.getResource("/xsbt.version.properties") + import java.net.URL + private def versionResource: URL = getClass.getResource("/xsbt.version.properties") } \ No newline at end of file diff --git a/ivy/src/main/scala/sbt/Configuration.scala b/ivy/src/main/scala/sbt/Configuration.scala index a259f99d7..db4ada450 100644 --- a/ivy/src/main/scala/sbt/Configuration.scala +++ b/ivy/src/main/scala/sbt/Configuration.scala @@ -3,63 +3,61 @@ */ package sbt -object Configurations -{ - def config(name: String) = new Configuration(name) - def default: Seq[Configuration] = defaultMavenConfigurations - def defaultMavenConfigurations: Seq[Configuration] = Seq(Compile, Runtime, Test, Provided, Optional) - def defaultInternal: Seq[Configuration] = Seq(CompileInternal, RuntimeInternal, TestInternal) - def auxiliary: Seq[Configuration] = Seq(Sources, Docs, Pom) - def names(cs: Seq[Configuration]) = cs.map(_.name) +object Configurations { + def config(name: String) = new Configuration(name) + def default: Seq[Configuration] = defaultMavenConfigurations + def defaultMavenConfigurations: Seq[Configuration] = Seq(Compile, Runtime, Test, Provided, Optional) + def defaultInternal: Seq[Configuration] = Seq(CompileInternal, RuntimeInternal, TestInternal) + def auxiliary: Seq[Configuration] = Seq(Sources, Docs, Pom) + def names(cs: Seq[Configuration]) = cs.map(_.name) - lazy val RuntimeInternal = optionalInternal(Runtime) - lazy val TestInternal = fullInternal(Test) - lazy val IntegrationTestInternal = fullInternal(IntegrationTest) - lazy val CompileInternal = fullInternal(Compile) + lazy val RuntimeInternal = optionalInternal(Runtime) + lazy val TestInternal = fullInternal(Test) + lazy val IntegrationTestInternal = fullInternal(IntegrationTest) + lazy val CompileInternal = fullInternal(Compile) - def internalMap(c: Configuration) = c match { - case Compile => CompileInternal - case Test => TestInternal - case Runtime => RuntimeInternal - case IntegrationTest => IntegrationTestInternal - case _ => c - } + def internalMap(c: Configuration) = c match { + case Compile => CompileInternal + case Test => TestInternal + case Runtime => RuntimeInternal + case IntegrationTest => IntegrationTestInternal + case _ => c + } - def internal(base: Configuration, ext: Configuration*) = config(base.name + "-internal") extend(ext : _*) hide; - def fullInternal(base: Configuration): Configuration = internal(base, base, Optional, Provided) - def optionalInternal(base: Configuration): Configuration = internal(base, base, Optional) + def internal(base: Configuration, ext: Configuration*) = config(base.name + "-internal") extend (ext: _*) hide; + def fullInternal(base: Configuration): Configuration = internal(base, base, Optional, Provided) + def optionalInternal(base: Configuration): Configuration = internal(base, base, Optional) - lazy val Default = config("default") - lazy val Compile = config("compile") - lazy val IntegrationTest = config("it") extend(Runtime) - lazy val Provided = config("provided") ; - lazy val Docs = config("docs") - lazy val Runtime = config("runtime") extend(Compile) - lazy val Test = config("test") extend(Runtime) - lazy val Sources = config("sources") - lazy val System = config("system") - lazy val Optional = config("optional") - lazy val Pom = config("pom") + lazy val Default = config("default") + lazy val Compile = config("compile") + lazy val IntegrationTest = config("it") extend (Runtime) + lazy val Provided = config("provided"); + lazy val Docs = config("docs") + lazy val Runtime = config("runtime") extend (Compile) + lazy val Test = config("test") extend (Runtime) + lazy val Sources = config("sources") + lazy val System = config("system") + lazy val Optional = config("optional") + lazy val Pom = config("pom") - lazy val ScalaTool = config("scala-tool") hide - lazy val CompilerPlugin = config("plugin") hide + lazy val ScalaTool = config("scala-tool") hide + lazy val CompilerPlugin = config("plugin") hide - private[sbt] val DefaultMavenConfiguration = defaultConfiguration(true) - private[sbt] val DefaultIvyConfiguration = defaultConfiguration(false) - private[sbt] def DefaultConfiguration(mavenStyle: Boolean) = if(mavenStyle) DefaultMavenConfiguration else DefaultIvyConfiguration - private[sbt] def defaultConfiguration(mavenStyle: Boolean) = if(mavenStyle) Configurations.Compile else Configurations.Default - private[sbt] def removeDuplicates(configs: Iterable[Configuration]) = Set(scala.collection.mutable.Map(configs.map(config => (config.name, config)).toSeq: _*).values.toList: _*) + private[sbt] val DefaultMavenConfiguration = defaultConfiguration(true) + private[sbt] val DefaultIvyConfiguration = defaultConfiguration(false) + private[sbt] def DefaultConfiguration(mavenStyle: Boolean) = if (mavenStyle) DefaultMavenConfiguration else DefaultIvyConfiguration + private[sbt] def defaultConfiguration(mavenStyle: Boolean) = if (mavenStyle) Configurations.Compile else Configurations.Default + private[sbt] def removeDuplicates(configs: Iterable[Configuration]) = Set(scala.collection.mutable.Map(configs.map(config => (config.name, config)).toSeq: _*).values.toList: _*) } /** Represents an Ivy configuration. */ -final case class Configuration(name: String, description: String, isPublic: Boolean, extendsConfigs: List[Configuration], transitive: Boolean) -{ - require(name != null && !name.isEmpty) - require(description != null) - def this(name: String) = this(name, "", true, Nil, true) - def describedAs(newDescription: String) = Configuration(name, newDescription, isPublic, extendsConfigs, transitive) - def extend(configs: Configuration*) = Configuration(name, description, isPublic, configs.toList ::: extendsConfigs, transitive) - def notTransitive = intransitive - def intransitive = Configuration(name, description, isPublic, extendsConfigs, false) - def hide = Configuration(name, description, false, extendsConfigs, transitive) - override def toString = name +final case class Configuration(name: String, description: String, isPublic: Boolean, extendsConfigs: List[Configuration], transitive: Boolean) { + require(name != null && !name.isEmpty) + require(description != null) + def this(name: String) = this(name, "", true, Nil, true) + def describedAs(newDescription: String) = Configuration(name, newDescription, isPublic, extendsConfigs, transitive) + def extend(configs: Configuration*) = Configuration(name, description, isPublic, configs.toList ::: extendsConfigs, transitive) + def notTransitive = intransitive + def intransitive = Configuration(name, description, isPublic, extendsConfigs, false) + def hide = Configuration(name, description, false, extendsConfigs, transitive) + override def toString = name } diff --git a/ivy/src/main/scala/sbt/ConflictWarning.scala b/ivy/src/main/scala/sbt/ConflictWarning.scala index 51c69e988..7c5c9a918 100644 --- a/ivy/src/main/scala/sbt/ConflictWarning.scala +++ b/ivy/src/main/scala/sbt/ConflictWarning.scala @@ -1,80 +1,74 @@ package sbt - import DependencyFilter._ +import DependencyFilter._ -final case class ConflictWarning(label: String, level: Level.Value, failOnConflict: Boolean) -{ - @deprecated("`filter` is no longer used", "0.13.0") - val filter: ModuleFilter = (_: ModuleID) => false - @deprecated("`group` is no longer used", "0.13.0") - val group: ModuleID => String = ConflictWarning.org +final case class ConflictWarning(label: String, level: Level.Value, failOnConflict: Boolean) { + @deprecated("`filter` is no longer used", "0.13.0") + val filter: ModuleFilter = (_: ModuleID) => false + @deprecated("`group` is no longer used", "0.13.0") + val group: ModuleID => String = ConflictWarning.org } -object ConflictWarning -{ - @deprecated("`group` and `filter` are no longer used. Use a standard Ivy conflict manager.", "0.13.0") - def apply(label: String, filter: ModuleFilter, group: ModuleID => String, level: Level.Value, failOnConflict: Boolean): ConflictWarning = - ConflictWarning(label, level, failOnConflict) +object ConflictWarning { + @deprecated("`group` and `filter` are no longer used. Use a standard Ivy conflict manager.", "0.13.0") + def apply(label: String, filter: ModuleFilter, group: ModuleID => String, level: Level.Value, failOnConflict: Boolean): ConflictWarning = + ConflictWarning(label, level, failOnConflict) - def disable: ConflictWarning = ConflictWarning("", Level.Debug, false) + def disable: ConflictWarning = ConflictWarning("", Level.Debug, false) - private def org = (_: ModuleID).organization - private[this] def idString(org: String, name: String) = s"$org:$name" + private def org = (_: ModuleID).organization + private[this] def idString(org: String, name: String) = s"$org:$name" - def default(label: String): ConflictWarning = ConflictWarning(label, Level.Error, true) + def default(label: String): ConflictWarning = ConflictWarning(label, Level.Error, true) - @deprecated("Warning on evicted modules is no longer done, so this is the same as `default`. Use a standard Ivy conflict manager.", "0.13.0") - def strict(label: String): ConflictWarning = ConflictWarning(label, Level.Error, true) + @deprecated("Warning on evicted modules is no longer done, so this is the same as `default`. Use a standard Ivy conflict manager.", "0.13.0") + def strict(label: String): ConflictWarning = ConflictWarning(label, Level.Error, true) - def apply(config: ConflictWarning, report: UpdateReport, log: Logger) - { - processCrossVersioned(config, report, log) - } - private[this] def processCrossVersioned(config: ConflictWarning, report: UpdateReport, log: Logger) - { - val crossMismatches = crossVersionMismatches(report) - if(!crossMismatches.isEmpty) - { - val pre = s"Modules were resolved with conflicting cross-version suffixes in ${config.label}:\n " - val conflictMsgs = - for( ((org,rawName), fullNames) <- crossMismatches ) yield - { - val suffixes = fullNames.map(getCrossSuffix).mkString(", ") - s"${idString(org,rawName)} $suffixes" - } - log.log(config.level, conflictMsgs.mkString(pre, "\n ", "")) - if(config.failOnConflict) { - val summary = crossMismatches.map{ case ((org,raw),_) => idString(org,raw)}.mkString(", ") - sys.error("Conflicting cross-version suffixes in: " + summary) - } - } - } + def apply(config: ConflictWarning, report: UpdateReport, log: Logger) { + processCrossVersioned(config, report, log) + } + private[this] def processCrossVersioned(config: ConflictWarning, report: UpdateReport, log: Logger) { + val crossMismatches = crossVersionMismatches(report) + if (!crossMismatches.isEmpty) { + val pre = s"Modules were resolved with conflicting cross-version suffixes in ${config.label}:\n " + val conflictMsgs = + for (((org, rawName), fullNames) <- crossMismatches) yield { + val suffixes = fullNames.map(getCrossSuffix).mkString(", ") + s"${idString(org, rawName)} $suffixes" + } + log.log(config.level, conflictMsgs.mkString(pre, "\n ", "")) + if (config.failOnConflict) { + val summary = crossMismatches.map { case ((org, raw), _) => idString(org, raw) }.mkString(", ") + sys.error("Conflicting cross-version suffixes in: " + summary) + } + } + } - /** Map from (organization, rawName) to set of multiple full names. */ - def crossVersionMismatches(report: UpdateReport): Map[(String,String), Set[String]] = - { - val mismatches = report.configurations.flatMap { confReport => - groupByRawName(confReport.allModules).mapValues { modules => - val differentFullNames = modules.map(_.name).toSet - if(differentFullNames.size > 1) differentFullNames else Set.empty[String] - } - } - (Map.empty[(String,String),Set[String]] /: mismatches)(merge) - } - private[this] def merge[A,B](m: Map[A, Set[B]], b: (A, Set[B])): Map[A, Set[B]] = - if(b._2.isEmpty) m else - m.updated(b._1, m.getOrElse(b._1, Set.empty) ++ b._2) + /** Map from (organization, rawName) to set of multiple full names. */ + def crossVersionMismatches(report: UpdateReport): Map[(String, String), Set[String]] = + { + val mismatches = report.configurations.flatMap { confReport => + groupByRawName(confReport.allModules).mapValues { modules => + val differentFullNames = modules.map(_.name).toSet + if (differentFullNames.size > 1) differentFullNames else Set.empty[String] + } + } + (Map.empty[(String, String), Set[String]] /: mismatches)(merge) + } + private[this] def merge[A, B](m: Map[A, Set[B]], b: (A, Set[B])): Map[A, Set[B]] = + if (b._2.isEmpty) m else + m.updated(b._1, m.getOrElse(b._1, Set.empty) ++ b._2) - private[this] def groupByRawName(ms: Seq[ModuleID]): Map[(String,String), Seq[ModuleID]] = - ms.groupBy(m => (m.organization, dropCrossSuffix(m.name))) + private[this] def groupByRawName(ms: Seq[ModuleID]): Map[(String, String), Seq[ModuleID]] = + ms.groupBy(m => (m.organization, dropCrossSuffix(m.name))) - private[this] val CrossSuffixPattern = """(.+)_(\d+\.\d+(?:\.\d+)?(?:-.+)?)""".r - private[this] def dropCrossSuffix(s: String): String = s match { - case CrossSuffixPattern(raw, _) => raw - case _ => s - } - private[this] def getCrossSuffix(s: String): String = s match { - case CrossSuffixPattern(_, v) => "_" + v - case _ => "" - } + private[this] val CrossSuffixPattern = """(.+)_(\d+\.\d+(?:\.\d+)?(?:-.+)?)""".r + private[this] def dropCrossSuffix(s: String): String = s match { + case CrossSuffixPattern(raw, _) => raw + case _ => s + } + private[this] def getCrossSuffix(s: String): String = s match { + case CrossSuffixPattern(_, v) => "_" + v + case _ => "" + } } diff --git a/ivy/src/main/scala/sbt/ConvertResolver.scala b/ivy/src/main/scala/sbt/ConvertResolver.scala index 74c5c119c..613de970e 100644 --- a/ivy/src/main/scala/sbt/ConvertResolver.scala +++ b/ivy/src/main/scala/sbt/ConvertResolver.scala @@ -5,239 +5,234 @@ package sbt import java.net.URL import java.util.Collections -import org.apache.ivy.{core,plugins} +import org.apache.ivy.{ core, plugins } import core.module.id.ModuleRevisionId import core.module.descriptor.DependencyDescriptor import core.resolve.ResolveData import core.settings.IvySettings -import plugins.resolver.{BasicResolver, DependencyResolver, IBiblioResolver, RepositoryResolver} -import plugins.resolver.{AbstractPatternsBasedResolver, AbstractSshBasedResolver, FileSystemResolver, SFTPResolver, SshResolver, URLResolver} -import plugins.repository.url.{URLRepository => URLRepo} -import plugins.repository.file.{FileRepository => FileRepo, FileResource} +import plugins.resolver.{ BasicResolver, DependencyResolver, IBiblioResolver, RepositoryResolver } +import plugins.resolver.{ AbstractPatternsBasedResolver, AbstractSshBasedResolver, FileSystemResolver, SFTPResolver, SshResolver, URLResolver } +import plugins.repository.url.{ URLRepository => URLRepo } +import plugins.repository.file.{ FileRepository => FileRepo, FileResource } import java.io.File import org.apache.ivy.util.ChecksumHelper -import org.apache.ivy.core.module.descriptor.{Artifact=>IArtifact} +import org.apache.ivy.core.module.descriptor.{ Artifact => IArtifact } - -private object ConvertResolver -{ - /** This class contains all the reflective lookups used in the - * checksum-friendly URL publishing shim. - */ - private object ChecksumFriendlyURLResolver { - // TODO - When we dump JDK6 support we can remove this hackery - // import java.lang.reflect.AccessibleObject - type AccessibleObject = { - def setAccessible(value: Boolean): Unit - } - private def reflectiveLookup[A <: AccessibleObject](f: Class[_] => A): Option[A] = - try { - val cls = classOf[RepositoryResolver] - val thing = f(cls) - import scala.language.reflectiveCalls - thing.setAccessible(true) - Some(thing) - } catch { - case (_: java.lang.NoSuchFieldException) | - (_: java.lang.SecurityException) | - (_: java.lang.NoSuchMethodException) => None - } - private val signerNameField: Option[java.lang.reflect.Field] = - reflectiveLookup(_.getDeclaredField("signerName")) - private val putChecksumMethod: Option[java.lang.reflect.Method] = - reflectiveLookup(_.getDeclaredMethod("putChecksum", - classOf[IArtifact], classOf[File], classOf[String], - classOf[Boolean], classOf[String])) - private val putSignatureMethod: Option[java.lang.reflect.Method] = - reflectiveLookup(_.getDeclaredMethod("putSignature", - classOf[IArtifact], classOf[File], classOf[String], - classOf[Boolean])) - } - /** - * The default behavior of ivy's overwrite flags ignores the fact that a lot of repositories - * will autogenerate checksums *for* an artifact if it doesn't already exist. Therefore - * if we succeed in publishing an artifact, we need to just blast the checksums in place. - * This acts as a "shim" on RepositoryResolvers so that we can hook our methods into - * both the IBiblioResolver + URLResolver without having to duplicate the code in two - * places. However, this does mean our use of reflection is awesome. - * - * TODO - See about contributing back to ivy. - */ - private trait ChecksumFriendlyURLResolver extends RepositoryResolver { - import ChecksumFriendlyURLResolver._ - private def signerName: String = signerNameField match { - case Some(field) => field.get(this).asInstanceOf[String] - case None => null - } - override protected def put(artifact: IArtifact, src: File, dest: String, overwrite: Boolean): Unit = { - // verify the checksum algorithms before uploading artifacts! - val checksums = getChecksumAlgorithms() - val repository = getRepository() - for { - checksum <- checksums - if !ChecksumHelper.isKnownAlgorithm(checksum) - } throw new IllegalArgumentException("Unknown checksum algorithm: " + checksum) - repository.put(artifact, src, dest, overwrite); - // Fix for sbt#1156 - Artifactory will auto-generate MD5/sha1 files, so - // we need to overwrite what it has. - for (checksum <- checksums) { - putChecksumMethod match { - case Some(method) => method.invoke(this, artifact, src, dest, true: java.lang.Boolean, checksum) - case None => // TODO - issue warning? - } - } - if (signerName != null) { - putSignatureMethod match { - case None => () - case Some(method) => method.invoke(artifact, src, dest, true: java.lang.Boolean) - } - } +private object ConvertResolver { + /** + * This class contains all the reflective lookups used in the + * checksum-friendly URL publishing shim. + */ + private object ChecksumFriendlyURLResolver { + // TODO - When we dump JDK6 support we can remove this hackery + // import java.lang.reflect.AccessibleObject + type AccessibleObject = { + def setAccessible(value: Boolean): Unit } - } + private def reflectiveLookup[A <: AccessibleObject](f: Class[_] => A): Option[A] = + try { + val cls = classOf[RepositoryResolver] + val thing = f(cls) + import scala.language.reflectiveCalls + thing.setAccessible(true) + Some(thing) + } catch { + case (_: java.lang.NoSuchFieldException) | + (_: java.lang.SecurityException) | + (_: java.lang.NoSuchMethodException) => None + } + private val signerNameField: Option[java.lang.reflect.Field] = + reflectiveLookup(_.getDeclaredField("signerName")) + private val putChecksumMethod: Option[java.lang.reflect.Method] = + reflectiveLookup(_.getDeclaredMethod("putChecksum", + classOf[IArtifact], classOf[File], classOf[String], + classOf[Boolean], classOf[String])) + private val putSignatureMethod: Option[java.lang.reflect.Method] = + reflectiveLookup(_.getDeclaredMethod("putSignature", + classOf[IArtifact], classOf[File], classOf[String], + classOf[Boolean])) + } + /** + * The default behavior of ivy's overwrite flags ignores the fact that a lot of repositories + * will autogenerate checksums *for* an artifact if it doesn't already exist. Therefore + * if we succeed in publishing an artifact, we need to just blast the checksums in place. + * This acts as a "shim" on RepositoryResolvers so that we can hook our methods into + * both the IBiblioResolver + URLResolver without having to duplicate the code in two + * places. However, this does mean our use of reflection is awesome. + * + * TODO - See about contributing back to ivy. + */ + private trait ChecksumFriendlyURLResolver extends RepositoryResolver { + import ChecksumFriendlyURLResolver._ + private def signerName: String = signerNameField match { + case Some(field) => field.get(this).asInstanceOf[String] + case None => null + } + override protected def put(artifact: IArtifact, src: File, dest: String, overwrite: Boolean): Unit = { + // verify the checksum algorithms before uploading artifacts! + val checksums = getChecksumAlgorithms() + val repository = getRepository() + for { + checksum <- checksums + if !ChecksumHelper.isKnownAlgorithm(checksum) + } throw new IllegalArgumentException("Unknown checksum algorithm: " + checksum) + repository.put(artifact, src, dest, overwrite); + // Fix for sbt#1156 - Artifactory will auto-generate MD5/sha1 files, so + // we need to overwrite what it has. + for (checksum <- checksums) { + putChecksumMethod match { + case Some(method) => method.invoke(this, artifact, src, dest, true: java.lang.Boolean, checksum) + case None => // TODO - issue warning? + } + } + if (signerName != null) { + putSignatureMethod match { + case None => () + case Some(method) => method.invoke(artifact, src, dest, true: java.lang.Boolean) + } + } + } + } - /** Converts the given sbt resolver into an Ivy resolver..*/ - def apply(r: Resolver, settings: IvySettings, log: Logger) = - { - r match - { - case repo: MavenRepository => - { - val pattern = Collections.singletonList(Resolver.resolvePattern(repo.root, Resolver.mavenStyleBasePattern)) - final class PluginCapableResolver extends IBiblioResolver with ChecksumFriendlyURLResolver with DescriptorRequired { - def setPatterns() { // done this way for access to protected methods. - setArtifactPatterns(pattern) - setIvyPatterns(pattern) - } - } - val resolver = new PluginCapableResolver - resolver.setRepository(new LocalIfFileRepo) - initializeMavenStyle(resolver, repo.name, repo.root) - resolver.setPatterns() // has to be done after initializeMavenStyle, which calls methods that overwrite the patterns - resolver - } - case r: JavaNet1Repository => - { - // Thanks to Matthias Pfau for posting how to use the Maven 1 repository on java.net with Ivy: - // http://www.nabble.com/Using-gradle-Ivy-with-special-maven-repositories-td23775489.html - val resolver = new IBiblioResolver with DescriptorRequired { override def convertM2IdForResourceSearch(mrid: ModuleRevisionId) = mrid } - initializeMavenStyle(resolver, JavaNet1Repository.name, "http://download.java.net/maven/1/") - resolver.setPattern("[organisation]/[ext]s/[module]-[revision](-[classifier]).[ext]") - resolver - } - case repo: SshRepository => - { - val resolver = new SshResolver with DescriptorRequired - initializeSSHResolver(resolver, repo, settings) - repo.publishPermissions.foreach(perm => resolver.setPublishPermissions(perm)) - resolver - } - case repo: SftpRepository => - { - val resolver = new SFTPResolver - initializeSSHResolver(resolver, repo, settings) - resolver - } - case repo: FileRepository => - { - val resolver = new FileSystemResolver with DescriptorRequired { - // Workaround for #1156 - // Temporarily in sbt 0.13.x we deprecate overwriting - // in local files for non-changing revisions. - // This will be fully enforced in sbt 1.0. - setRepository(new WarnOnOverwriteFileRepo()) - } - resolver.setName(repo.name) - initializePatterns(resolver, repo.patterns, settings) - import repo.configuration.{isLocal, isTransactional} - resolver.setLocal(isLocal) - isTransactional.foreach(value => resolver.setTransactional(value.toString)) - resolver - } - case repo: URLRepository => - { - val resolver = new URLResolver with ChecksumFriendlyURLResolver with DescriptorRequired - resolver.setName(repo.name) - initializePatterns(resolver, repo.patterns, settings) - resolver - } - case repo: ChainedResolver => IvySbt.resolverChain(repo.name, repo.resolvers, false, settings, log) - case repo: RawRepository => repo.resolver - } - } - - private sealed trait DescriptorRequired extends BasicResolver - { - override def getDependency(dd: DependencyDescriptor, data: ResolveData) = - { - val prev = descriptorString(isAllownomd) - setDescriptor(descriptorString(hasExplicitURL(dd))) - try super.getDependency(dd, data) finally setDescriptor(prev) - } - def descriptorString(optional: Boolean) = - if(optional) BasicResolver.DESCRIPTOR_OPTIONAL else BasicResolver.DESCRIPTOR_REQUIRED - def hasExplicitURL(dd: DependencyDescriptor): Boolean = - dd.getAllDependencyArtifacts.exists(_.getUrl != null) - } - private def initializeMavenStyle(resolver: IBiblioResolver, name: String, root: String) - { - resolver.setName(name) - resolver.setM2compatible(true) - resolver.setRoot(root) - } - private def initializeSSHResolver(resolver: AbstractSshBasedResolver, repo: SshBasedRepository, settings: IvySettings) - { - resolver.setName(repo.name) - resolver.setPassfile(null) - initializePatterns(resolver, repo.patterns, settings) - initializeConnection(resolver, repo.connection) - } - private def initializeConnection(resolver: AbstractSshBasedResolver, connection: RepositoryHelpers.SshConnection) - { - import resolver._ - import connection._ - hostname.foreach(setHost) - port.foreach(setPort) - authentication foreach - { - case RepositoryHelpers.PasswordAuthentication(user, password) => - setUser(user) - password.foreach(setUserPassword) - case RepositoryHelpers.KeyFileAuthentication(user, file, password) => - setKeyFile(file) - password.foreach(setKeyFilePassword) - setUser(user) - } - } - private def initializePatterns(resolver: AbstractPatternsBasedResolver, patterns: Patterns, settings: IvySettings) - { - resolver.setM2compatible(patterns.isMavenCompatible) - resolver.setDescriptor(if (patterns.descriptorOptional) BasicResolver.DESCRIPTOR_OPTIONAL else BasicResolver.DESCRIPTOR_REQUIRED) - resolver.setCheckconsistency(!patterns.skipConsistencyCheck) - patterns.ivyPatterns.foreach(p => resolver.addIvyPattern(settings substitute p)) - patterns.artifactPatterns.foreach(p => resolver.addArtifactPattern(settings substitute p)) - } - /** A custom Ivy URLRepository that returns FileResources for file URLs. - * This allows using the artifacts from the Maven local repository instead of copying them to the Ivy cache. */ - private[this] final class LocalIfFileRepo extends URLRepo { - private[this] val repo = new WarnOnOverwriteFileRepo() - override def getResource(source: String) = { - val url = new URL(source) - if(url.getProtocol == IO.FileScheme) - new FileResource(repo, IO.toFile(url)) - else - super.getResource(source) - } - } + /** Converts the given sbt resolver into an Ivy resolver..*/ + def apply(r: Resolver, settings: IvySettings, log: Logger) = + { + r match { + case repo: MavenRepository => + { + val pattern = Collections.singletonList(Resolver.resolvePattern(repo.root, Resolver.mavenStyleBasePattern)) + final class PluginCapableResolver extends IBiblioResolver with ChecksumFriendlyURLResolver with DescriptorRequired { + def setPatterns() { // done this way for access to protected methods. + setArtifactPatterns(pattern) + setIvyPatterns(pattern) + } + } + val resolver = new PluginCapableResolver + resolver.setRepository(new LocalIfFileRepo) + initializeMavenStyle(resolver, repo.name, repo.root) + resolver.setPatterns() // has to be done after initializeMavenStyle, which calls methods that overwrite the patterns + resolver + } + case r: JavaNet1Repository => + { + // Thanks to Matthias Pfau for posting how to use the Maven 1 repository on java.net with Ivy: + // http://www.nabble.com/Using-gradle-Ivy-with-special-maven-repositories-td23775489.html + val resolver = new IBiblioResolver with DescriptorRequired { override def convertM2IdForResourceSearch(mrid: ModuleRevisionId) = mrid } + initializeMavenStyle(resolver, JavaNet1Repository.name, "http://download.java.net/maven/1/") + resolver.setPattern("[organisation]/[ext]s/[module]-[revision](-[classifier]).[ext]") + resolver + } + case repo: SshRepository => + { + val resolver = new SshResolver with DescriptorRequired + initializeSSHResolver(resolver, repo, settings) + repo.publishPermissions.foreach(perm => resolver.setPublishPermissions(perm)) + resolver + } + case repo: SftpRepository => + { + val resolver = new SFTPResolver + initializeSSHResolver(resolver, repo, settings) + resolver + } + case repo: FileRepository => + { + val resolver = new FileSystemResolver with DescriptorRequired { + // Workaround for #1156 + // Temporarily in sbt 0.13.x we deprecate overwriting + // in local files for non-changing revisions. + // This will be fully enforced in sbt 1.0. + setRepository(new WarnOnOverwriteFileRepo()) + } + resolver.setName(repo.name) + initializePatterns(resolver, repo.patterns, settings) + import repo.configuration.{ isLocal, isTransactional } + resolver.setLocal(isLocal) + isTransactional.foreach(value => resolver.setTransactional(value.toString)) + resolver + } + case repo: URLRepository => + { + val resolver = new URLResolver with ChecksumFriendlyURLResolver with DescriptorRequired + resolver.setName(repo.name) + initializePatterns(resolver, repo.patterns, settings) + resolver + } + case repo: ChainedResolver => IvySbt.resolverChain(repo.name, repo.resolvers, false, settings, log) + case repo: RawRepository => repo.resolver + } + } - private[this] final class WarnOnOverwriteFileRepo extends FileRepo() { - override def put(source: java.io.File, destination: String, overwrite: Boolean): Unit = { - try super.put(source, destination, overwrite) - catch { - case e: java.io.IOException if e.getMessage.contains("destination already exists") => - import org.apache.ivy.util.Message - Message.warn(s"Attempting to overwrite $destination\n\tThis usage is deprecated and will be removed in sbt 1.0.") - super.put(source, destination, true) - } - } - } + private sealed trait DescriptorRequired extends BasicResolver { + override def getDependency(dd: DependencyDescriptor, data: ResolveData) = + { + val prev = descriptorString(isAllownomd) + setDescriptor(descriptorString(hasExplicitURL(dd))) + try super.getDependency(dd, data) finally setDescriptor(prev) + } + def descriptorString(optional: Boolean) = + if (optional) BasicResolver.DESCRIPTOR_OPTIONAL else BasicResolver.DESCRIPTOR_REQUIRED + def hasExplicitURL(dd: DependencyDescriptor): Boolean = + dd.getAllDependencyArtifacts.exists(_.getUrl != null) + } + private def initializeMavenStyle(resolver: IBiblioResolver, name: String, root: String) { + resolver.setName(name) + resolver.setM2compatible(true) + resolver.setRoot(root) + } + private def initializeSSHResolver(resolver: AbstractSshBasedResolver, repo: SshBasedRepository, settings: IvySettings) { + resolver.setName(repo.name) + resolver.setPassfile(null) + initializePatterns(resolver, repo.patterns, settings) + initializeConnection(resolver, repo.connection) + } + private def initializeConnection(resolver: AbstractSshBasedResolver, connection: RepositoryHelpers.SshConnection) { + import resolver._ + import connection._ + hostname.foreach(setHost) + port.foreach(setPort) + authentication foreach + { + case RepositoryHelpers.PasswordAuthentication(user, password) => + setUser(user) + password.foreach(setUserPassword) + case RepositoryHelpers.KeyFileAuthentication(user, file, password) => + setKeyFile(file) + password.foreach(setKeyFilePassword) + setUser(user) + } + } + private def initializePatterns(resolver: AbstractPatternsBasedResolver, patterns: Patterns, settings: IvySettings) { + resolver.setM2compatible(patterns.isMavenCompatible) + resolver.setDescriptor(if (patterns.descriptorOptional) BasicResolver.DESCRIPTOR_OPTIONAL else BasicResolver.DESCRIPTOR_REQUIRED) + resolver.setCheckconsistency(!patterns.skipConsistencyCheck) + patterns.ivyPatterns.foreach(p => resolver.addIvyPattern(settings substitute p)) + patterns.artifactPatterns.foreach(p => resolver.addArtifactPattern(settings substitute p)) + } + /** + * A custom Ivy URLRepository that returns FileResources for file URLs. + * This allows using the artifacts from the Maven local repository instead of copying them to the Ivy cache. + */ + private[this] final class LocalIfFileRepo extends URLRepo { + private[this] val repo = new WarnOnOverwriteFileRepo() + override def getResource(source: String) = { + val url = new URL(source) + if (url.getProtocol == IO.FileScheme) + new FileResource(repo, IO.toFile(url)) + else + super.getResource(source) + } + } + + private[this] final class WarnOnOverwriteFileRepo extends FileRepo() { + override def put(source: java.io.File, destination: String, overwrite: Boolean): Unit = { + try super.put(source, destination, overwrite) + catch { + case e: java.io.IOException if e.getMessage.contains("destination already exists") => + import org.apache.ivy.util.Message + Message.warn(s"Attempting to overwrite $destination\n\tThis usage is deprecated and will be removed in sbt 1.0.") + super.put(source, destination, true) + } + } + } } diff --git a/ivy/src/main/scala/sbt/Credentials.scala b/ivy/src/main/scala/sbt/Credentials.scala index 882e1c22b..fd6a85538 100644 --- a/ivy/src/main/scala/sbt/Credentials.scala +++ b/ivy/src/main/scala/sbt/Credentials.scala @@ -6,71 +6,66 @@ package sbt import java.io.File import org.apache.ivy.util.url.CredentialsStore -object Credentials -{ - def apply(realm: String, host: String, userName: String, passwd: String): Credentials = - new DirectCredentials(realm, host, userName, passwd) - def apply(file: File): Credentials = - new FileCredentials(file) +object Credentials { + def apply(realm: String, host: String, userName: String, passwd: String): Credentials = + new DirectCredentials(realm, host, userName, passwd) + def apply(file: File): Credentials = + new FileCredentials(file) - /** Add the provided credentials to Ivy's credentials cache.*/ - def add(realm: String, host: String, userName: String, passwd: String): Unit = - CredentialsStore.INSTANCE.addCredentials(realm, host, userName, passwd) - /** Load credentials from the given file into Ivy's credentials cache.*/ - def add(path: File, log: Logger): Unit = - loadCredentials(path) match - { - case Left(err) => log.warn(err) - case Right(dc) => add(dc.realm, dc.host, dc.userName, dc.passwd) - } + /** Add the provided credentials to Ivy's credentials cache.*/ + def add(realm: String, host: String, userName: String, passwd: String): Unit = + CredentialsStore.INSTANCE.addCredentials(realm, host, userName, passwd) + /** Load credentials from the given file into Ivy's credentials cache.*/ + def add(path: File, log: Logger): Unit = + loadCredentials(path) match { + case Left(err) => log.warn(err) + case Right(dc) => add(dc.realm, dc.host, dc.userName, dc.passwd) + } - def forHost(sc: Seq[Credentials], host: String) = allDirect(sc) find { _.host == host } - def allDirect(sc: Seq[Credentials]): Seq[DirectCredentials] = sc map toDirect - def toDirect(c: Credentials): DirectCredentials = c match { - case dc: DirectCredentials => dc - case fc: FileCredentials => loadCredentials(fc.path) match { - case Left(err) => error(err) - case Right(dc) => dc - } - } + def forHost(sc: Seq[Credentials], host: String) = allDirect(sc) find { _.host == host } + def allDirect(sc: Seq[Credentials]): Seq[DirectCredentials] = sc map toDirect + def toDirect(c: Credentials): DirectCredentials = c match { + case dc: DirectCredentials => dc + case fc: FileCredentials => loadCredentials(fc.path) match { + case Left(err) => error(err) + case Right(dc) => dc + } + } - def loadCredentials(path: File): Either[String, DirectCredentials] = - if(path.exists) - { - val properties = read(path) - def get(keys: List[String]) = keys.flatMap(properties.get).headOption.toRight(keys.head + " not specified in credentials file: " + path) + def loadCredentials(path: File): Either[String, DirectCredentials] = + if (path.exists) { + val properties = read(path) + def get(keys: List[String]) = keys.flatMap(properties.get).headOption.toRight(keys.head + " not specified in credentials file: " + path) - IvyUtil.separate( List(RealmKeys, HostKeys, UserKeys, PasswordKeys).map(get) ) match - { - case (Nil, List(realm, host, user, pass)) => Right( new DirectCredentials(realm, host, user, pass) ) - case (errors, _) => Left(errors.mkString("\n")) - } - } - else - Left("Credentials file " + path + " does not exist") + IvyUtil.separate(List(RealmKeys, HostKeys, UserKeys, PasswordKeys).map(get)) match { + case (Nil, List(realm, host, user, pass)) => Right(new DirectCredentials(realm, host, user, pass)) + case (errors, _) => Left(errors.mkString("\n")) + } + } else + Left("Credentials file " + path + " does not exist") - def register(cs: Seq[Credentials], log: Logger): Unit = - cs foreach { - case f: FileCredentials => add(f.path, log) - case d: DirectCredentials => add(d.realm, d.host, d.userName, d.passwd) - } + def register(cs: Seq[Credentials], log: Logger): Unit = + cs foreach { + case f: FileCredentials => add(f.path, log) + case d: DirectCredentials => add(d.realm, d.host, d.userName, d.passwd) + } - private[this] val RealmKeys = List("realm") - private[this] val HostKeys = List("host", "hostname") - private[this] val UserKeys = List("user", "user.name", "username") - private[this] val PasswordKeys = List("password", "pwd", "pass", "passwd") + private[this] val RealmKeys = List("realm") + private[this] val HostKeys = List("host", "hostname") + private[this] val UserKeys = List("user", "user.name", "username") + private[this] val PasswordKeys = List("password", "pwd", "pass", "passwd") - import collection.JavaConversions._ - private[this] def read(from: File): Map[String,String] = - { - val properties = new java.util.Properties - IO.load(properties, from) - properties map { case (k,v) => (k.toString, v.toString.trim) } toMap; - } + import collection.JavaConversions._ + private[this] def read(from: File): Map[String, String] = + { + val properties = new java.util.Properties + IO.load(properties, from) + properties map { case (k, v) => (k.toString, v.toString.trim) } toMap; + } } sealed trait Credentials final class FileCredentials(val path: File) extends Credentials { - override def toString = "FileCredentials('" + path + "')" + override def toString = "FileCredentials('" + path + "')" } final class DirectCredentials(val realm: String, val host: String, val userName: String, val passwd: String) extends Credentials diff --git a/ivy/src/main/scala/sbt/CrossVersion.scala b/ivy/src/main/scala/sbt/CrossVersion.scala index 3425bb1f5..79dfe091e 100644 --- a/ivy/src/main/scala/sbt/CrossVersion.scala +++ b/ivy/src/main/scala/sbt/CrossVersion.scala @@ -7,142 +7,159 @@ final case class ScalaVersion(full: String, binary: String) /** Configures how a module will be cross-versioned. */ sealed trait CrossVersion -object CrossVersion -{ - /** The first `major.minor` Scala version that the Scala binary version should be used for cross-versioning instead of the full version. */ - val TransitionScalaVersion = CrossVersionUtil.TransitionScalaVersion +object CrossVersion { + /** The first `major.minor` Scala version that the Scala binary version should be used for cross-versioning instead of the full version. */ + val TransitionScalaVersion = CrossVersionUtil.TransitionScalaVersion - /** The first `major.minor` sbt version that the sbt binary version should be used for cross-versioning instead of the full version. */ - val TransitionSbtVersion = CrossVersionUtil.TransitionSbtVersion + /** The first `major.minor` sbt version that the sbt binary version should be used for cross-versioning instead of the full version. */ + val TransitionSbtVersion = CrossVersionUtil.TransitionSbtVersion - /** Disables cross versioning for a module.*/ - object Disabled extends CrossVersion { override def toString = "disabled" } + /** Disables cross versioning for a module.*/ + object Disabled extends CrossVersion { override def toString = "disabled" } - /** Cross-versions a module using the result of applying `remapVersion` to the binary version. - * For example, if `remapVersion = v => "2.10"` and the binary version is "2.9.2" or "2.10", - * the module is cross-versioned with "2.10". */ - final class Binary(val remapVersion: String => String) extends CrossVersion { - override def toString = "Binary" - } + /** + * Cross-versions a module using the result of applying `remapVersion` to the binary version. + * For example, if `remapVersion = v => "2.10"` and the binary version is "2.9.2" or "2.10", + * the module is cross-versioned with "2.10". + */ + final class Binary(val remapVersion: String => String) extends CrossVersion { + override def toString = "Binary" + } - /** Cross-versions a module with the result of applying `remapVersion` to the full version. - * For example, if `remapVersion = v => "2.10"` and the full version is "2.9.2" or "2.10.3", - * the module is cross-versioned with "2.10". */ - final class Full(val remapVersion: String => String) extends CrossVersion { - override def toString = "Full" - } + /** + * Cross-versions a module with the result of applying `remapVersion` to the full version. + * For example, if `remapVersion = v => "2.10"` and the full version is "2.9.2" or "2.10.3", + * the module is cross-versioned with "2.10". + */ + final class Full(val remapVersion: String => String) extends CrossVersion { + override def toString = "Full" + } - /** Cross-versions a module with the full version (typically the full Scala version). */ - def full: CrossVersion = new Full(idFun) + /** Cross-versions a module with the full version (typically the full Scala version). */ + def full: CrossVersion = new Full(idFun) - /** Cross-versions a module with the result of applying `remapVersion` to the full version - * (typically the full Scala version). See also [[sbt.CrossVersion.Full]]. */ - def fullMapped(remapVersion: String => String): CrossVersion = new Full(remapVersion) + /** + * Cross-versions a module with the result of applying `remapVersion` to the full version + * (typically the full Scala version). See also [[sbt.CrossVersion.Full]]. + */ + def fullMapped(remapVersion: String => String): CrossVersion = new Full(remapVersion) - /** Cross-versions a module with the binary version (typically the binary Scala version). */ - def binary: CrossVersion = new Binary(idFun) + /** Cross-versions a module with the binary version (typically the binary Scala version). */ + def binary: CrossVersion = new Binary(idFun) - /** Cross-versions a module with the result of applying `remapVersion` to the binary version - * (typically the binary Scala version). See also [[sbt.CrossVersion.Binary]]. */ - def binaryMapped(remapVersion: String => String): CrossVersion = new Binary(remapVersion) + /** + * Cross-versions a module with the result of applying `remapVersion` to the binary version + * (typically the binary Scala version). See also [[sbt.CrossVersion.Binary]]. + */ + def binaryMapped(remapVersion: String => String): CrossVersion = new Binary(remapVersion) - private[this] def idFun[T]: T => T = x => x + private[this] def idFun[T]: T => T = x => x - @deprecated("Will be made private.", "0.13.1") - def append(s: String): Option[String => String] = Some(x => crossName(x, s)) + @deprecated("Will be made private.", "0.13.1") + def append(s: String): Option[String => String] = Some(x => crossName(x, s)) - /** Construct a cross-versioning function given cross-versioning configuration `cross`, - * full version `fullVersion` and binary version `binaryVersion`. The behavior of the - * constructed function is as documented for the [[sbt.CrossVersion]] datatypes. */ - def apply(cross: CrossVersion, fullVersion: String, binaryVersion: String): Option[String => String] = - cross match - { - case Disabled => None - case b: Binary => append(b.remapVersion(binaryVersion)) - case f: Full => append(f.remapVersion(fullVersion)) - } + /** + * Construct a cross-versioning function given cross-versioning configuration `cross`, + * full version `fullVersion` and binary version `binaryVersion`. The behavior of the + * constructed function is as documented for the [[sbt.CrossVersion]] datatypes. + */ + def apply(cross: CrossVersion, fullVersion: String, binaryVersion: String): Option[String => String] = + cross match { + case Disabled => None + case b: Binary => append(b.remapVersion(binaryVersion)) + case f: Full => append(f.remapVersion(fullVersion)) + } - /** Constructs the cross-version function defined by `module` and `is`, if one is configured. */ - def apply(module: ModuleID, is: IvyScala): Option[String => String] = - CrossVersion(module.crossVersion, is.scalaFullVersion, is.scalaBinaryVersion) + /** Constructs the cross-version function defined by `module` and `is`, if one is configured. */ + def apply(module: ModuleID, is: IvyScala): Option[String => String] = + CrossVersion(module.crossVersion, is.scalaFullVersion, is.scalaBinaryVersion) - /** Constructs the cross-version function defined by `module` and `is`, if one is configured. */ - def apply(module: ModuleID, is: Option[IvyScala]): Option[String => String] = - is flatMap { i => apply(module, i) } + /** Constructs the cross-version function defined by `module` and `is`, if one is configured. */ + def apply(module: ModuleID, is: Option[IvyScala]): Option[String => String] = + is flatMap { i => apply(module, i) } - /** Cross-version each `Artifact` in `artifacts` according to cross-version function `cross`. */ - def substituteCross(artifacts: Seq[Artifact], cross: Option[String => String]): Seq[Artifact] = - cross match { - case None => artifacts - case Some(is) => substituteCrossA(artifacts, cross) - } + /** Cross-version each `Artifact` in `artifacts` according to cross-version function `cross`. */ + def substituteCross(artifacts: Seq[Artifact], cross: Option[String => String]): Seq[Artifact] = + cross match { + case None => artifacts + case Some(is) => substituteCrossA(artifacts, cross) + } - @deprecated("Will be made private.", "0.13.1") - def applyCross(s: String, fopt: Option[String => String]): String = - fopt match { - case None => s - case Some(fopt) => fopt(s) - } + @deprecated("Will be made private.", "0.13.1") + def applyCross(s: String, fopt: Option[String => String]): String = + fopt match { + case None => s + case Some(fopt) => fopt(s) + } - @deprecated("Will be made private.", "0.13.1") - def crossName(name: String, cross: String): String = - name + "_" + cross + @deprecated("Will be made private.", "0.13.1") + def crossName(name: String, cross: String): String = + name + "_" + cross - /** Cross-versions `a` according to cross-version function `cross`. */ - def substituteCross(a: Artifact, cross: Option[String => String]): Artifact = - a.copy(name = applyCross(a.name, cross)) + /** Cross-versions `a` according to cross-version function `cross`. */ + def substituteCross(a: Artifact, cross: Option[String => String]): Artifact = + a.copy(name = applyCross(a.name, cross)) - @deprecated("Will be made private.", "0.13.1") - def substituteCrossA(as: Seq[Artifact], cross: Option[String => String]): Seq[Artifact] = - as.map(art => substituteCross(art, cross)) + @deprecated("Will be made private.", "0.13.1") + def substituteCrossA(as: Seq[Artifact], cross: Option[String => String]): Seq[Artifact] = + as.map(art => substituteCross(art, cross)) - /** Constructs a function that will cross-version a ModuleID - * for the given full and binary Scala versions `scalaFullVersion` and `scalaBinaryVersion` - * according to the ModuleID's cross-versioning setting. */ - def apply(scalaFullVersion: String, scalaBinaryVersion: String): ModuleID => ModuleID = m => - { - val cross = apply(m.crossVersion, scalaFullVersion, scalaBinaryVersion) - if(cross.isDefined) - m.copy(name = applyCross(m.name, cross), explicitArtifacts = substituteCrossA(m.explicitArtifacts, cross)) - else - m - } + /** + * Constructs a function that will cross-version a ModuleID + * for the given full and binary Scala versions `scalaFullVersion` and `scalaBinaryVersion` + * according to the ModuleID's cross-versioning setting. + */ + def apply(scalaFullVersion: String, scalaBinaryVersion: String): ModuleID => ModuleID = m => + { + val cross = apply(m.crossVersion, scalaFullVersion, scalaBinaryVersion) + if (cross.isDefined) + m.copy(name = applyCross(m.name, cross), explicitArtifacts = substituteCrossA(m.explicitArtifacts, cross)) + else + m + } - @deprecated("Use CrossVersion.isScalaApiCompatible or CrossVersion.isSbtApiCompatible", "0.13.0") - def isStable(v: String): Boolean = isScalaApiCompatible(v) + @deprecated("Use CrossVersion.isScalaApiCompatible or CrossVersion.isSbtApiCompatible", "0.13.0") + def isStable(v: String): Boolean = isScalaApiCompatible(v) - @deprecated("Use CrossVersion.scalaApiVersion or CrossVersion.sbtApiVersion", "0.13.0") - def selectVersion(full: String, binary: String): String = if(isStable(full)) binary else full + @deprecated("Use CrossVersion.scalaApiVersion or CrossVersion.sbtApiVersion", "0.13.0") + def selectVersion(full: String, binary: String): String = if (isStable(full)) binary else full - def isSbtApiCompatible(v: String): Boolean = CrossVersionUtil.isSbtApiCompatible(v) + def isSbtApiCompatible(v: String): Boolean = CrossVersionUtil.isSbtApiCompatible(v) - /** Returns sbt binary interface x.y API compatible with the given version string v. - * RCs for x.y.0 are considered API compatible. - * Compatibile versions include 0.12.0-1 and 0.12.0-RC1 for Some(0, 12). - */ - def sbtApiVersion(v: String): Option[(Int, Int)] = CrossVersionUtil.sbtApiVersion(v) + /** + * Returns sbt binary interface x.y API compatible with the given version string v. + * RCs for x.y.0 are considered API compatible. + * Compatibile versions include 0.12.0-1 and 0.12.0-RC1 for Some(0, 12). + */ + def sbtApiVersion(v: String): Option[(Int, Int)] = CrossVersionUtil.sbtApiVersion(v) - def isScalaApiCompatible(v: String): Boolean = CrossVersionUtil.isScalaApiCompatible(v) + def isScalaApiCompatible(v: String): Boolean = CrossVersionUtil.isScalaApiCompatible(v) - /** Returns Scala binary interface x.y API compatible with the given version string v. - * Compatibile versions include 2.10.0-1 and 2.10.1-M1 for Some(2, 10), but not 2.10.0-RC1. */ - def scalaApiVersion(v: String): Option[(Int, Int)] = CrossVersionUtil.scalaApiVersion(v) + /** + * Returns Scala binary interface x.y API compatible with the given version string v. + * Compatibile versions include 2.10.0-1 and 2.10.1-M1 for Some(2, 10), but not 2.10.0-RC1. + */ + def scalaApiVersion(v: String): Option[(Int, Int)] = CrossVersionUtil.scalaApiVersion(v) - /** Regular expression that extracts the major and minor components of a version into matched groups 1 and 2.*/ - val PartialVersion = CrossVersionUtil.PartialVersion + /** Regular expression that extracts the major and minor components of a version into matched groups 1 and 2.*/ + val PartialVersion = CrossVersionUtil.PartialVersion - /** Extracts the major and minor components of a version string `s` or returns `None` if the version is improperly formatted. */ - def partialVersion(s: String): Option[(Int,Int)] = CrossVersionUtil.partialVersion(s) + /** Extracts the major and minor components of a version string `s` or returns `None` if the version is improperly formatted. */ + def partialVersion(s: String): Option[(Int, Int)] = CrossVersionUtil.partialVersion(s) - /** Computes the binary Scala version from the `full` version. - * Full Scala versions earlier than [[sbt.CrossVersion.TransitionScalaVersion]] are returned as is. */ - def binaryScalaVersion(full: String): String = CrossVersionUtil.binaryScalaVersion(full) + /** + * Computes the binary Scala version from the `full` version. + * Full Scala versions earlier than [[sbt.CrossVersion.TransitionScalaVersion]] are returned as is. + */ + def binaryScalaVersion(full: String): String = CrossVersionUtil.binaryScalaVersion(full) - /** Computes the binary sbt version from the `full` version. - * Full sbt versions earlier than [[sbt.CrossVersion.TransitionSbtVersion]] are returned as is. */ - def binarySbtVersion(full: String): String = CrossVersionUtil.binarySbtVersion(full) + /** + * Computes the binary sbt version from the `full` version. + * Full sbt versions earlier than [[sbt.CrossVersion.TransitionSbtVersion]] are returned as is. + */ + def binarySbtVersion(full: String): String = CrossVersionUtil.binarySbtVersion(full) - @deprecated("Use CrossVersion.scalaApiVersion or CrossVersion.sbtApiVersion", "0.13.0") - def binaryVersion(full: String, cutoff: String): String = CrossVersionUtil.binaryVersion(full, cutoff) + @deprecated("Use CrossVersion.scalaApiVersion or CrossVersion.sbtApiVersion", "0.13.0") + def binaryVersion(full: String, cutoff: String): String = CrossVersionUtil.binaryVersion(full, cutoff) } diff --git a/ivy/src/main/scala/sbt/CustomPomParser.scala b/ivy/src/main/scala/sbt/CustomPomParser.scala index 871c1f07c..12ae48675 100644 --- a/ivy/src/main/scala/sbt/CustomPomParser.scala +++ b/ivy/src/main/scala/sbt/CustomPomParser.scala @@ -1,224 +1,222 @@ package sbt - import org.apache.ivy.{core, plugins, util} - import core.module.id.ModuleRevisionId - import core.module.descriptor.{DefaultArtifact, DefaultExtendsDescriptor, DefaultModuleDescriptor, ModuleDescriptor} - import core.module.descriptor.{DefaultDependencyDescriptor, DependencyDescriptor} - import plugins.parser.{m2, ModuleDescriptorParser, ModuleDescriptorParserRegistry, ParserSettings} - import m2.{PomModuleDescriptorBuilder, PomModuleDescriptorParser} - import plugins.repository.Resource - import plugins.namespace.NamespaceTransformer - import util.extendable.ExtendableItem +import org.apache.ivy.{ core, plugins, util } +import core.module.id.ModuleRevisionId +import core.module.descriptor.{ DefaultArtifact, DefaultExtendsDescriptor, DefaultModuleDescriptor, ModuleDescriptor } +import core.module.descriptor.{ DefaultDependencyDescriptor, DependencyDescriptor } +import plugins.parser.{ m2, ModuleDescriptorParser, ModuleDescriptorParserRegistry, ParserSettings } +import m2.{ PomModuleDescriptorBuilder, PomModuleDescriptorParser } +import plugins.repository.Resource +import plugins.namespace.NamespaceTransformer +import util.extendable.ExtendableItem - import java.io.{File, InputStream} - import java.net.URL - import java.util.regex.Pattern +import java.io.{ File, InputStream } +import java.net.URL +import java.util.regex.Pattern -final class CustomPomParser(delegate: ModuleDescriptorParser, transform: (ModuleDescriptorParser, ModuleDescriptor) => ModuleDescriptor) extends ModuleDescriptorParser -{ - override def parseDescriptor(ivySettings: ParserSettings, descriptorURL: URL, validate: Boolean) = - transform(this, delegate.parseDescriptor(ivySettings, descriptorURL, validate)) - - override def parseDescriptor(ivySettings: ParserSettings, descriptorURL: URL, res: Resource, validate: Boolean) = - transform(this, delegate.parseDescriptor(ivySettings, descriptorURL, res, validate)) - - override def toIvyFile(is: InputStream, res: Resource, destFile: File, md: ModuleDescriptor) = delegate.toIvyFile(is, res, destFile, md) +final class CustomPomParser(delegate: ModuleDescriptorParser, transform: (ModuleDescriptorParser, ModuleDescriptor) => ModuleDescriptor) extends ModuleDescriptorParser { + override def parseDescriptor(ivySettings: ParserSettings, descriptorURL: URL, validate: Boolean) = + transform(this, delegate.parseDescriptor(ivySettings, descriptorURL, validate)) - override def accept(res: Resource) = delegate.accept(res) - override def getType() = delegate.getType() - override def getMetadataArtifact(mrid: ModuleRevisionId, res: Resource) = delegate.getMetadataArtifact(mrid, res) + override def parseDescriptor(ivySettings: ParserSettings, descriptorURL: URL, res: Resource, validate: Boolean) = + transform(this, delegate.parseDescriptor(ivySettings, descriptorURL, res, validate)) + + override def toIvyFile(is: InputStream, res: Resource, destFile: File, md: ModuleDescriptor) = delegate.toIvyFile(is, res, destFile, md) + + override def accept(res: Resource) = delegate.accept(res) + override def getType() = delegate.getType() + override def getMetadataArtifact(mrid: ModuleRevisionId, res: Resource) = delegate.getMetadataArtifact(mrid, res) } -object CustomPomParser -{ - /** The key prefix that indicates that this is used only to store extra information and is not intended for dependency resolution.*/ - val InfoKeyPrefix = "info." - val ApiURLKey = "info.apiURL" +object CustomPomParser { + /** The key prefix that indicates that this is used only to store extra information and is not intended for dependency resolution.*/ + val InfoKeyPrefix = "info." + val ApiURLKey = "info.apiURL" - val SbtVersionKey = "sbtVersion" - val ScalaVersionKey = "scalaVersion" - val ExtraAttributesKey = "extraDependencyAttributes" - private[this] val unqualifiedKeys = Set(SbtVersionKey, ScalaVersionKey, ExtraAttributesKey, ApiURLKey) + val SbtVersionKey = "sbtVersion" + val ScalaVersionKey = "scalaVersion" + val ExtraAttributesKey = "extraDependencyAttributes" + private[this] val unqualifiedKeys = Set(SbtVersionKey, ScalaVersionKey, ExtraAttributesKey, ApiURLKey) - // packagings that should be jars, but that Ivy doesn't handle as jars - val JarPackagings = Set("eclipse-plugin", "hk2-jar", "orbit") - val default = new CustomPomParser(PomModuleDescriptorParser.getInstance, defaultTransform) + // packagings that should be jars, but that Ivy doesn't handle as jars + val JarPackagings = Set("eclipse-plugin", "hk2-jar", "orbit") + val default = new CustomPomParser(PomModuleDescriptorParser.getInstance, defaultTransform) - private[this] val TransformedHashKey = "e:sbtTransformHash" - // A hash of the parameters transformation is based on. - // If a descriptor has a different hash, we need to retransform it. - private[this] val TransformHash: String = hash((unqualifiedKeys ++ JarPackagings).toSeq.sorted) - private[this] def hash(ss: Seq[String]): String = Hash.toHex(Hash(ss.flatMap(_ getBytes "UTF-8").toArray)) + private[this] val TransformedHashKey = "e:sbtTransformHash" + // A hash of the parameters transformation is based on. + // If a descriptor has a different hash, we need to retransform it. + private[this] val TransformHash: String = hash((unqualifiedKeys ++ JarPackagings).toSeq.sorted) + private[this] def hash(ss: Seq[String]): String = Hash.toHex(Hash(ss.flatMap(_ getBytes "UTF-8").toArray)) - // Unfortunately, ModuleDescriptorParserRegistry is add-only and is a singleton instance. - lazy val registerDefault: Unit = ModuleDescriptorParserRegistry.getInstance.addParser(default) + // Unfortunately, ModuleDescriptorParserRegistry is add-only and is a singleton instance. + lazy val registerDefault: Unit = ModuleDescriptorParserRegistry.getInstance.addParser(default) - def defaultTransform(parser: ModuleDescriptorParser, md: ModuleDescriptor): ModuleDescriptor = - if(transformedByThisVersion(md)) md else defaultTransformImpl(parser, md) + def defaultTransform(parser: ModuleDescriptorParser, md: ModuleDescriptor): ModuleDescriptor = + if (transformedByThisVersion(md)) md else defaultTransformImpl(parser, md) - private[this] def transformedByThisVersion(md: ModuleDescriptor): Boolean = - { - val oldTransformedHashKey = "sbtTransformHash" - val extraInfo = md.getExtraInfo - // sbt 0.13.1 used "sbtTransformHash" instead of "e:sbtTransformHash" until #1192 so read both - Option(extraInfo).isDefined && - ((Option(extraInfo get TransformedHashKey) orElse Option(extraInfo get oldTransformedHashKey)) match { - case Some(TransformHash) => true - case _ => false - }) - } + private[this] def transformedByThisVersion(md: ModuleDescriptor): Boolean = + { + val oldTransformedHashKey = "sbtTransformHash" + val extraInfo = md.getExtraInfo + // sbt 0.13.1 used "sbtTransformHash" instead of "e:sbtTransformHash" until #1192 so read both + Option(extraInfo).isDefined && + ((Option(extraInfo get TransformedHashKey) orElse Option(extraInfo get oldTransformedHashKey)) match { + case Some(TransformHash) => true + case _ => false + }) + } - private[this] def defaultTransformImpl(parser: ModuleDescriptorParser, md: ModuleDescriptor): ModuleDescriptor = - { - val properties = getPomProperties(md) + private[this] def defaultTransformImpl(parser: ModuleDescriptorParser, md: ModuleDescriptor): ModuleDescriptor = + { + val properties = getPomProperties(md) - // Extracts extra attributes (currently, sbt and Scala versions) stored in the element of the pom. - // These are attached to the module itself. - val filtered = shouldBeUnqualified(properties) + // Extracts extra attributes (currently, sbt and Scala versions) stored in the element of the pom. + // These are attached to the module itself. + val filtered = shouldBeUnqualified(properties) - // Extracts extra attributes for the dependencies. - // Because the tag in pom.xml cannot include additional metadata, - // sbt includes extra attributes in a 'extraDependencyAttributes' property. - // This is read/written from/to a pure string (no element structure) because Ivy only - // parses the immediate text nodes of the property. - val extraDepAttributes = getDependencyExtra(filtered) + // Extracts extra attributes for the dependencies. + // Because the tag in pom.xml cannot include additional metadata, + // sbt includes extra attributes in a 'extraDependencyAttributes' property. + // This is read/written from/to a pure string (no element structure) because Ivy only + // parses the immediate text nodes of the property. + val extraDepAttributes = getDependencyExtra(filtered) - // Fixes up the detected extension in some cases missed by Ivy. - val convertArtifacts = artifactExtIncorrect(md) + // Fixes up the detected extension in some cases missed by Ivy. + val convertArtifacts = artifactExtIncorrect(md) - // Merges artifact sections for duplicate dependency definitions - val mergeDuplicates = IvySbt.hasDuplicateDependencies(md.getDependencies) + // Merges artifact sections for duplicate dependency definitions + val mergeDuplicates = IvySbt.hasDuplicateDependencies(md.getDependencies) - val unqualify = toUnqualify(filtered) - if(unqualify.isEmpty && extraDepAttributes.isEmpty && !convertArtifacts && !mergeDuplicates) - md - else - addExtra(unqualify, extraDepAttributes, parser, md) - } - // The element of the pom is used to store additional metadata, such as for sbt plugins or for the base URL for API docs. - // This is done because the pom XSD does not appear to allow extra metadata anywhere else. - // The extra sbt plugin metadata in pom.xml does not need to be readable by maven, but the other information may be. - // However, the pom.xml needs to be valid in all cases because other tools like repository managers may read the pom.xml. - private[sbt] def getPomProperties(md: ModuleDescriptor): Map[String,String] = - { - import collection.JavaConverters._ - PomModuleDescriptorBuilder.extractPomProperties(md.getExtraInfo).asInstanceOf[java.util.Map[String,String]].asScala.toMap - } - private[sbt] def toUnqualify(propertyAttributes: Map[String, String]): Map[String, String] = - (propertyAttributes - ExtraAttributesKey) map { case (k,v) => ("e:" + k, v) } + val unqualify = toUnqualify(filtered) + if (unqualify.isEmpty && extraDepAttributes.isEmpty && !convertArtifacts && !mergeDuplicates) + md + else + addExtra(unqualify, extraDepAttributes, parser, md) + } + // The element of the pom is used to store additional metadata, such as for sbt plugins or for the base URL for API docs. + // This is done because the pom XSD does not appear to allow extra metadata anywhere else. + // The extra sbt plugin metadata in pom.xml does not need to be readable by maven, but the other information may be. + // However, the pom.xml needs to be valid in all cases because other tools like repository managers may read the pom.xml. + private[sbt] def getPomProperties(md: ModuleDescriptor): Map[String, String] = + { + import collection.JavaConverters._ + PomModuleDescriptorBuilder.extractPomProperties(md.getExtraInfo).asInstanceOf[java.util.Map[String, String]].asScala.toMap + } + private[sbt] def toUnqualify(propertyAttributes: Map[String, String]): Map[String, String] = + (propertyAttributes - ExtraAttributesKey) map { case (k, v) => ("e:" + k, v) } - private[this] def artifactExtIncorrect(md: ModuleDescriptor): Boolean = - md.getConfigurations.exists(conf => md.getArtifacts(conf.getName).exists(art => JarPackagings(art.getExt))) - private[this] def shouldBeUnqualified(m: Map[String, String]): Map[String, String] = m.filterKeys(unqualifiedKeys) - - private[this] def condAddExtra(properties: Map[String, String], id: ModuleRevisionId): ModuleRevisionId = - if(properties.isEmpty) id else addExtra(properties, id) - private[this] def addExtra(properties: Map[String, String], id: ModuleRevisionId): ModuleRevisionId = - { - import collection.JavaConverters._ - val oldExtra = qualifiedExtra(id) - val newExtra = (oldExtra ++ properties).asJava - ModuleRevisionId.newInstance(id.getOrganisation, id.getName, id.getBranch, id.getRevision, newExtra) - } + private[this] def artifactExtIncorrect(md: ModuleDescriptor): Boolean = + md.getConfigurations.exists(conf => md.getArtifacts(conf.getName).exists(art => JarPackagings(art.getExt))) + private[this] def shouldBeUnqualified(m: Map[String, String]): Map[String, String] = m.filterKeys(unqualifiedKeys) - private[this] def getDependencyExtra(m: Map[String, String]): Map[ModuleRevisionId, Map[String,String]] = - (m get ExtraAttributesKey) match { - case None => Map.empty - case Some(str) => - def processDep(m: ModuleRevisionId) = (simplify(m), filterCustomExtra(m, include=true)) - readDependencyExtra(str).map(processDep).toMap - } + private[this] def condAddExtra(properties: Map[String, String], id: ModuleRevisionId): ModuleRevisionId = + if (properties.isEmpty) id else addExtra(properties, id) + private[this] def addExtra(properties: Map[String, String], id: ModuleRevisionId): ModuleRevisionId = + { + import collection.JavaConverters._ + val oldExtra = qualifiedExtra(id) + val newExtra = (oldExtra ++ properties).asJava + ModuleRevisionId.newInstance(id.getOrganisation, id.getName, id.getBranch, id.getRevision, newExtra) + } - def qualifiedExtra(item: ExtendableItem): Map[String,String] = - { - import collection.JavaConverters._ - item.getQualifiedExtraAttributes.asInstanceOf[java.util.Map[String,String]].asScala.toMap - } - def filterCustomExtra(item: ExtendableItem, include: Boolean): Map[String,String] = - (qualifiedExtra(item) filterKeys { k => qualifiedIsExtra(k) == include }) + private[this] def getDependencyExtra(m: Map[String, String]): Map[ModuleRevisionId, Map[String, String]] = + (m get ExtraAttributesKey) match { + case None => Map.empty + case Some(str) => + def processDep(m: ModuleRevisionId) = (simplify(m), filterCustomExtra(m, include = true)) + readDependencyExtra(str).map(processDep).toMap + } - def writeDependencyExtra(s: Seq[DependencyDescriptor]): Seq[String] = - s.flatMap { dd => - val revId = dd.getDependencyRevisionId - if(filterCustomExtra(revId, include=true).isEmpty) - Nil - else - revId.encodeToString :: Nil - } + def qualifiedExtra(item: ExtendableItem): Map[String, String] = + { + import collection.JavaConverters._ + item.getQualifiedExtraAttributes.asInstanceOf[java.util.Map[String, String]].asScala.toMap + } + def filterCustomExtra(item: ExtendableItem, include: Boolean): Map[String, String] = + (qualifiedExtra(item) filterKeys { k => qualifiedIsExtra(k) == include }) - // parses the sequence of dependencies with extra attribute information, with one dependency per line - def readDependencyExtra(s: String): Seq[ModuleRevisionId] = - LinesP.split(s).map(_.trim).filter(!_.isEmpty).map(ModuleRevisionId.decode) + def writeDependencyExtra(s: Seq[DependencyDescriptor]): Seq[String] = + s.flatMap { dd => + val revId = dd.getDependencyRevisionId + if (filterCustomExtra(revId, include = true).isEmpty) + Nil + else + revId.encodeToString :: Nil + } - private[this] val LinesP = Pattern.compile("(?m)^") + // parses the sequence of dependencies with extra attribute information, with one dependency per line + def readDependencyExtra(s: String): Seq[ModuleRevisionId] = + LinesP.split(s).map(_.trim).filter(!_.isEmpty).map(ModuleRevisionId.decode) - def qualifiedIsExtra(k: String): Boolean = k.endsWith(ScalaVersionKey) || k.endsWith(SbtVersionKey) + private[this] val LinesP = Pattern.compile("(?m)^") - // Reduces the id to exclude custom extra attributes - // This makes the id suitable as a key to associate a dependency parsed from a element - // with the extra attributes from the section - def simplify(id: ModuleRevisionId): ModuleRevisionId = - { - import collection.JavaConverters._ - ModuleRevisionId.newInstance(id.getOrganisation, id.getName, id.getBranch, id.getRevision, filterCustomExtra(id, include=false).asJava) - } + def qualifiedIsExtra(k: String): Boolean = k.endsWith(ScalaVersionKey) || k.endsWith(SbtVersionKey) - private[this] def addExtra(dep: DependencyDescriptor, extra: Map[ModuleRevisionId, Map[String, String]]): DependencyDescriptor = - { - val extras = if(extra.isEmpty) None else extra get simplify(dep.getDependencyRevisionId) - extras match { - case None => dep - case Some(extraAttrs) => transform(dep, revId => addExtra(extraAttrs, revId)) - } - } - private[this] def transform(dep: DependencyDescriptor, f: ModuleRevisionId => ModuleRevisionId): DependencyDescriptor = - DefaultDependencyDescriptor.transformInstance(dep, namespaceTransformer(dep.getDependencyRevisionId, f), false) - private[this] def extraTransformer(txId: ModuleRevisionId, extra: Map[String, String]): NamespaceTransformer = - namespaceTransformer(txId, revId => addExtra(extra, revId) ) + // Reduces the id to exclude custom extra attributes + // This makes the id suitable as a key to associate a dependency parsed from a element + // with the extra attributes from the section + def simplify(id: ModuleRevisionId): ModuleRevisionId = + { + import collection.JavaConverters._ + ModuleRevisionId.newInstance(id.getOrganisation, id.getName, id.getBranch, id.getRevision, filterCustomExtra(id, include = false).asJava) + } - private[this] def namespaceTransformer(txId: ModuleRevisionId, f: ModuleRevisionId => ModuleRevisionId): NamespaceTransformer = - new NamespaceTransformer { - def transform(revId: ModuleRevisionId): ModuleRevisionId = if(revId == txId) f(revId) else revId - def isIdentity = false - } + private[this] def addExtra(dep: DependencyDescriptor, extra: Map[ModuleRevisionId, Map[String, String]]): DependencyDescriptor = + { + val extras = if (extra.isEmpty) None else extra get simplify(dep.getDependencyRevisionId) + extras match { + case None => dep + case Some(extraAttrs) => transform(dep, revId => addExtra(extraAttrs, revId)) + } + } + private[this] def transform(dep: DependencyDescriptor, f: ModuleRevisionId => ModuleRevisionId): DependencyDescriptor = + DefaultDependencyDescriptor.transformInstance(dep, namespaceTransformer(dep.getDependencyRevisionId, f), false) + private[this] def extraTransformer(txId: ModuleRevisionId, extra: Map[String, String]): NamespaceTransformer = + namespaceTransformer(txId, revId => addExtra(extra, revId)) - import collection.JavaConverters._ - def addExtra(properties: Map[String, String], dependencyExtra: Map[ModuleRevisionId, Map[String,String]], parser: ModuleDescriptorParser, md: ModuleDescriptor): ModuleDescriptor = - { - val dmd = new DefaultModuleDescriptor(parser, md.getResource) + private[this] def namespaceTransformer(txId: ModuleRevisionId, f: ModuleRevisionId => ModuleRevisionId): NamespaceTransformer = + new NamespaceTransformer { + def transform(revId: ModuleRevisionId): ModuleRevisionId = if (revId == txId) f(revId) else revId + def isIdentity = false + } - val mrid = addExtra(properties, md.getModuleRevisionId) - val resolvedMrid = addExtra(properties, md.getResolvedModuleRevisionId) - dmd.setModuleRevisionId(mrid) - dmd.setResolvedModuleRevisionId(resolvedMrid) + import collection.JavaConverters._ + def addExtra(properties: Map[String, String], dependencyExtra: Map[ModuleRevisionId, Map[String, String]], parser: ModuleDescriptorParser, md: ModuleDescriptor): ModuleDescriptor = + { + val dmd = new DefaultModuleDescriptor(parser, md.getResource) - dmd.setDefault(md.isDefault) - dmd.setHomePage(md.getHomePage) - dmd.setDescription(md.getDescription) - dmd.setLastModified(md.getLastModified) - dmd.setStatus(md.getStatus()) - dmd.setPublicationDate(md.getPublicationDate()) - dmd.setResolvedPublicationDate(md.getResolvedPublicationDate()) + val mrid = addExtra(properties, md.getModuleRevisionId) + val resolvedMrid = addExtra(properties, md.getResolvedModuleRevisionId) + dmd.setModuleRevisionId(mrid) + dmd.setResolvedModuleRevisionId(resolvedMrid) - for(l <- md.getLicenses) dmd.addLicense(l) - for( (key,value) <- md.getExtraInfo.asInstanceOf[java.util.Map[String,String]].asScala ) dmd.addExtraInfo(key, value) - dmd.addExtraInfo(TransformedHashKey, TransformHash) // mark as transformed by this version, so we don't need to do it again - for( (key, value) <- md.getExtraAttributesNamespaces.asInstanceOf[java.util.Map[String,String]].asScala ) dmd.addExtraAttributeNamespace(key, value) - IvySbt.addExtraNamespace(dmd) + dmd.setDefault(md.isDefault) + dmd.setHomePage(md.getHomePage) + dmd.setDescription(md.getDescription) + dmd.setLastModified(md.getLastModified) + dmd.setStatus(md.getStatus()) + dmd.setPublicationDate(md.getPublicationDate()) + dmd.setResolvedPublicationDate(md.getResolvedPublicationDate()) - val withExtra = md.getDependencies map { dd => addExtra(dd, dependencyExtra) } - val unique = IvySbt.mergeDuplicateDefinitions(withExtra) - unique foreach dmd.addDependency + for (l <- md.getLicenses) dmd.addLicense(l) + for ((key, value) <- md.getExtraInfo.asInstanceOf[java.util.Map[String, String]].asScala) dmd.addExtraInfo(key, value) + dmd.addExtraInfo(TransformedHashKey, TransformHash) // mark as transformed by this version, so we don't need to do it again + for ((key, value) <- md.getExtraAttributesNamespaces.asInstanceOf[java.util.Map[String, String]].asScala) dmd.addExtraAttributeNamespace(key, value) + IvySbt.addExtraNamespace(dmd) - for( ed <- md.getInheritedDescriptors) dmd.addInheritedDescriptor( new DefaultExtendsDescriptor( md, ed.getLocation, ed.getExtendsTypes) ) - for( conf <- md.getConfigurations) { - dmd.addConfiguration(conf) - for(art <- md.getArtifacts(conf.getName)) { - val ext = art.getExt - val newExt = if( JarPackagings(ext) ) "jar" else ext - val nart = new DefaultArtifact(mrid, art.getPublicationDate, art.getName, art.getType, newExt, art.getUrl, art.getQualifiedExtraAttributes) - dmd.addArtifact(conf.getName, nart) - } - } - dmd - } + val withExtra = md.getDependencies map { dd => addExtra(dd, dependencyExtra) } + val unique = IvySbt.mergeDuplicateDefinitions(withExtra) + unique foreach dmd.addDependency + + for (ed <- md.getInheritedDescriptors) dmd.addInheritedDescriptor(new DefaultExtendsDescriptor(md, ed.getLocation, ed.getExtendsTypes)) + for (conf <- md.getConfigurations) { + dmd.addConfiguration(conf) + for (art <- md.getArtifacts(conf.getName)) { + val ext = art.getExt + val newExt = if (JarPackagings(ext)) "jar" else ext + val nart = new DefaultArtifact(mrid, art.getPublicationDate, art.getName, art.getType, newExt, art.getUrl, art.getQualifiedExtraAttributes) + dmd.addArtifact(conf.getName, nart) + } + } + dmd + } } \ No newline at end of file diff --git a/ivy/src/main/scala/sbt/CustomXmlParser.scala b/ivy/src/main/scala/sbt/CustomXmlParser.scala index da44743f6..0afa87fbd 100644 --- a/ivy/src/main/scala/sbt/CustomXmlParser.scala +++ b/ivy/src/main/scala/sbt/CustomXmlParser.scala @@ -6,33 +6,31 @@ package sbt import java.io.ByteArrayInputStream import java.net.URL -import org.apache.ivy.{core, plugins} -import core.module.descriptor.{DefaultDependencyDescriptor, DefaultModuleDescriptor} +import org.apache.ivy.{ core, plugins } +import core.module.descriptor.{ DefaultDependencyDescriptor, DefaultModuleDescriptor } import core.settings.IvySettings import plugins.parser.xml.XmlModuleDescriptorParser import plugins.repository.Resource import plugins.repository.url.URLResource /** Subclasses the default Ivy file parser in order to provide access to protected methods.*/ -private[sbt] object CustomXmlParser extends XmlModuleDescriptorParser -{ - import XmlModuleDescriptorParser.Parser - class CustomParser(settings: IvySettings, defaultConfig: Option[String]) extends Parser(CustomXmlParser, settings) - { - def setSource(url: URL) = - { - super.setResource(new URLResource(url)) - super.setInput(url) - } - def setInput(bytes: Array[Byte]) { setInput(new ByteArrayInputStream(bytes)) } - /** Overridden because the super implementation overwrites the module descriptor.*/ - override def setResource(res: Resource) {} - override def setMd(md: DefaultModuleDescriptor) = - { - super.setMd(md) - if(defaultConfig.isDefined) setDefaultConfMapping("*->default(compile)") - } - override def parseDepsConfs(confs: String, dd: DefaultDependencyDescriptor) = super.parseDepsConfs(confs, dd) - override def getDefaultConf = defaultConfig.getOrElse(super.getDefaultConf) - } +private[sbt] object CustomXmlParser extends XmlModuleDescriptorParser { + import XmlModuleDescriptorParser.Parser + class CustomParser(settings: IvySettings, defaultConfig: Option[String]) extends Parser(CustomXmlParser, settings) { + def setSource(url: URL) = + { + super.setResource(new URLResource(url)) + super.setInput(url) + } + def setInput(bytes: Array[Byte]) { setInput(new ByteArrayInputStream(bytes)) } + /** Overridden because the super implementation overwrites the module descriptor.*/ + override def setResource(res: Resource) {} + override def setMd(md: DefaultModuleDescriptor) = + { + super.setMd(md) + if (defaultConfig.isDefined) setDefaultConfMapping("*->default(compile)") + } + override def parseDepsConfs(confs: String, dd: DefaultDependencyDescriptor) = super.parseDepsConfs(confs, dd) + override def getDefaultConf = defaultConfig.getOrElse(super.getDefaultConf) + } } \ No newline at end of file diff --git a/ivy/src/main/scala/sbt/DependencyFilter.scala b/ivy/src/main/scala/sbt/DependencyFilter.scala index e31cd4e24..fee8da46c 100644 --- a/ivy/src/main/scala/sbt/DependencyFilter.scala +++ b/ivy/src/main/scala/sbt/DependencyFilter.scala @@ -3,65 +3,58 @@ */ package sbt -trait DependencyFilterExtra -{ - def moduleFilter(organization: NameFilter = AllPassFilter, name: NameFilter = AllPassFilter, revision: NameFilter = AllPassFilter): ModuleFilter = - new ModuleFilter { - def apply(m: ModuleID): Boolean = organization.accept(m.organization) && name.accept(m.name) && revision.accept(m.revision) - } - def artifactFilter(name: NameFilter = AllPassFilter, `type`: NameFilter = AllPassFilter, extension: NameFilter = AllPassFilter, classifier: NameFilter = AllPassFilter): ArtifactFilter = - new ArtifactFilter { - def apply(a: Artifact): Boolean = name.accept(a.name) && `type`.accept(a.`type`) && extension.accept(a.extension) && classifier.accept(a.classifier getOrElse "") - } - def configurationFilter(name: NameFilter = AllPassFilter): ConfigurationFilter = - new ConfigurationFilter { - def apply(c: String): Boolean = name.accept(c) - } +trait DependencyFilterExtra { + def moduleFilter(organization: NameFilter = AllPassFilter, name: NameFilter = AllPassFilter, revision: NameFilter = AllPassFilter): ModuleFilter = + new ModuleFilter { + def apply(m: ModuleID): Boolean = organization.accept(m.organization) && name.accept(m.name) && revision.accept(m.revision) + } + def artifactFilter(name: NameFilter = AllPassFilter, `type`: NameFilter = AllPassFilter, extension: NameFilter = AllPassFilter, classifier: NameFilter = AllPassFilter): ArtifactFilter = + new ArtifactFilter { + def apply(a: Artifact): Boolean = name.accept(a.name) && `type`.accept(a.`type`) && extension.accept(a.extension) && classifier.accept(a.classifier getOrElse "") + } + def configurationFilter(name: NameFilter = AllPassFilter): ConfigurationFilter = + new ConfigurationFilter { + def apply(c: String): Boolean = name.accept(c) + } } -object DependencyFilter extends DependencyFilterExtra -{ - def make(configuration: ConfigurationFilter = configurationFilter(), module: ModuleFilter = moduleFilter(), artifact: ArtifactFilter = artifactFilter()): DependencyFilter = - new DependencyFilter { - def apply(c: String, m: ModuleID, a: Artifact): Boolean = configuration(c) && module(m) && artifact(a) - } - def apply(x: DependencyFilter, y: DependencyFilter, combine: (Boolean, Boolean) => Boolean): DependencyFilter = - new DependencyFilter { - def apply(c: String, m: ModuleID, a: Artifact): Boolean = combine(x(c, m, a), y(c, m, a)) - } - def allPass: DependencyFilter = configurationFilter() - implicit def fnToModuleFilter(f: ModuleID => Boolean): ModuleFilter = new ModuleFilter { def apply(m: ModuleID) = f(m) } - implicit def fnToArtifactFilter(f: Artifact => Boolean): ArtifactFilter = new ArtifactFilter { def apply(m: Artifact) = f(m) } - implicit def fnToConfigurationFilter(f: String => Boolean): ConfigurationFilter = new ConfigurationFilter { def apply(c: String) = f(c) } - implicit def subDepFilterToFn[Arg](f: SubDepFilter[Arg, _]): Arg => Boolean = f apply _ +object DependencyFilter extends DependencyFilterExtra { + def make(configuration: ConfigurationFilter = configurationFilter(), module: ModuleFilter = moduleFilter(), artifact: ArtifactFilter = artifactFilter()): DependencyFilter = + new DependencyFilter { + def apply(c: String, m: ModuleID, a: Artifact): Boolean = configuration(c) && module(m) && artifact(a) + } + def apply(x: DependencyFilter, y: DependencyFilter, combine: (Boolean, Boolean) => Boolean): DependencyFilter = + new DependencyFilter { + def apply(c: String, m: ModuleID, a: Artifact): Boolean = combine(x(c, m, a), y(c, m, a)) + } + def allPass: DependencyFilter = configurationFilter() + implicit def fnToModuleFilter(f: ModuleID => Boolean): ModuleFilter = new ModuleFilter { def apply(m: ModuleID) = f(m) } + implicit def fnToArtifactFilter(f: Artifact => Boolean): ArtifactFilter = new ArtifactFilter { def apply(m: Artifact) = f(m) } + implicit def fnToConfigurationFilter(f: String => Boolean): ConfigurationFilter = new ConfigurationFilter { def apply(c: String) = f(c) } + implicit def subDepFilterToFn[Arg](f: SubDepFilter[Arg, _]): Arg => Boolean = f apply _ } -trait DependencyFilter -{ - def apply(configuration: String, module: ModuleID, artifact: Artifact): Boolean - final def &&(o: DependencyFilter) = DependencyFilter(this, o, _ && _) - final def ||(o: DependencyFilter) = DependencyFilter(this, o, _ || _) - final def -- (o: DependencyFilter) = DependencyFilter(this, o, _ && !_) +trait DependencyFilter { + def apply(configuration: String, module: ModuleID, artifact: Artifact): Boolean + final def &&(o: DependencyFilter) = DependencyFilter(this, o, _ && _) + final def ||(o: DependencyFilter) = DependencyFilter(this, o, _ || _) + final def --(o: DependencyFilter) = DependencyFilter(this, o, _ && !_) } -sealed trait SubDepFilter[Arg, Self <: SubDepFilter[Arg, Self]] extends DependencyFilter -{ self: Self => - def apply(a: Arg): Boolean - protected def make(f: Arg => Boolean): Self - final def &(o: Self): Self = combine(o, _ && _) - final def |(o: Self): Self = combine(o, _ || _) - final def -(o: Self): Self = combine(o, _ && !_) - private[this] def combine(o: Self, f: (Boolean, Boolean) => Boolean): Self = make( (m: Arg) => f(this(m), o(m)) ) +sealed trait SubDepFilter[Arg, Self <: SubDepFilter[Arg, Self]] extends DependencyFilter { self: Self => + def apply(a: Arg): Boolean + protected def make(f: Arg => Boolean): Self + final def &(o: Self): Self = combine(o, _ && _) + final def |(o: Self): Self = combine(o, _ || _) + final def -(o: Self): Self = combine(o, _ && !_) + private[this] def combine(o: Self, f: (Boolean, Boolean) => Boolean): Self = make((m: Arg) => f(this(m), o(m))) } -trait ModuleFilter extends SubDepFilter[ModuleID, ModuleFilter] -{ - protected final def make(f: ModuleID => Boolean) = new ModuleFilter { def apply(m: ModuleID) = f(m) } - final def apply(configuration: String, module: ModuleID, artifact: Artifact): Boolean = apply(module) +trait ModuleFilter extends SubDepFilter[ModuleID, ModuleFilter] { + protected final def make(f: ModuleID => Boolean) = new ModuleFilter { def apply(m: ModuleID) = f(m) } + final def apply(configuration: String, module: ModuleID, artifact: Artifact): Boolean = apply(module) } -trait ArtifactFilter extends SubDepFilter[Artifact, ArtifactFilter] -{ - protected final def make(f: Artifact => Boolean) = new ArtifactFilter { def apply(m: Artifact) = f(m) } - final def apply(configuration: String, module: ModuleID, artifact: Artifact): Boolean = apply(artifact) +trait ArtifactFilter extends SubDepFilter[Artifact, ArtifactFilter] { + protected final def make(f: Artifact => Boolean) = new ArtifactFilter { def apply(m: Artifact) = f(m) } + final def apply(configuration: String, module: ModuleID, artifact: Artifact): Boolean = apply(artifact) } -trait ConfigurationFilter extends SubDepFilter[String, ConfigurationFilter] -{ - protected final def make(f: String => Boolean) = new ConfigurationFilter { def apply(m: String) = f(m) } - final def apply(configuration: String, module: ModuleID, artifact: Artifact): Boolean = apply(configuration) +trait ConfigurationFilter extends SubDepFilter[String, ConfigurationFilter] { + protected final def make(f: String => Boolean) = new ConfigurationFilter { def apply(m: String) = f(m) } + final def apply(configuration: String, module: ModuleID, artifact: Artifact): Boolean = apply(configuration) } \ No newline at end of file diff --git a/ivy/src/main/scala/sbt/Ivy.scala b/ivy/src/main/scala/sbt/Ivy.scala index a6519c6bc..be029c422 100644 --- a/ivy/src/main/scala/sbt/Ivy.scala +++ b/ivy/src/main/scala/sbt/Ivy.scala @@ -8,636 +8,617 @@ import Resolver.PluginPattern import java.io.File import java.net.URI import java.util.concurrent.Callable -import java.util.{Collection, Collections => CS} +import java.util.{ Collection, Collections => CS } import CS.singleton -import org.apache.ivy.{core, plugins, util, Ivy} -import core.{IvyPatternHelper, LogOptions} -import core.cache.{CacheMetadataOptions, DefaultRepositoryCacheManager, ModuleDescriptorWriter} -import core.module.descriptor.{Artifact => IArtifact, DefaultArtifact, DefaultDependencyArtifactDescriptor, MDArtifact} -import core.module.descriptor.{DefaultDependencyDescriptor, DefaultModuleDescriptor, DependencyDescriptor, ModuleDescriptor, License} -import core.module.descriptor.{OverrideDependencyDescriptorMediator} -import core.module.id.{ArtifactId,ModuleId, ModuleRevisionId} -import core.resolve.{IvyNode, ResolveData, ResolvedModuleRevision} +import org.apache.ivy.{ core, plugins, util, Ivy } +import core.{ IvyPatternHelper, LogOptions } +import core.cache.{ CacheMetadataOptions, DefaultRepositoryCacheManager, ModuleDescriptorWriter } +import core.module.descriptor.{ Artifact => IArtifact, DefaultArtifact, DefaultDependencyArtifactDescriptor, MDArtifact } +import core.module.descriptor.{ DefaultDependencyDescriptor, DefaultModuleDescriptor, DependencyDescriptor, ModuleDescriptor, License } +import core.module.descriptor.{ OverrideDependencyDescriptorMediator } +import core.module.id.{ ArtifactId, ModuleId, ModuleRevisionId } +import core.resolve.{ IvyNode, ResolveData, ResolvedModuleRevision } import core.settings.IvySettings import plugins.latest.LatestRevisionStrategy import plugins.matcher.PatternMatcher import plugins.parser.m2.PomModuleDescriptorParser -import plugins.resolver.{ChainResolver, DependencyResolver} -import util.{Message, MessageLogger} +import plugins.resolver.{ ChainResolver, DependencyResolver } +import util.{ Message, MessageLogger } import util.extendable.ExtendableItem -import scala.xml.{NodeSeq, Text} +import scala.xml.{ NodeSeq, Text } -final class IvySbt(val configuration: IvyConfiguration) -{ - import configuration.baseDirectory +final class IvySbt(val configuration: IvyConfiguration) { + import configuration.baseDirectory - /** ========== Configuration/Setup ============ - * This part configures the Ivy instance by first creating the logger interface to ivy, then IvySettings, and then the Ivy instance. - * These are lazy so that they are loaded within the right context. This is important so that no Ivy XML configuration needs to be loaded, - * saving some time. This is necessary because Ivy has global state (IvyContext, Message, DocumentBuilder, ...). - */ - private def withDefaultLogger[T](logger: MessageLogger)(f: => T): T = - { - def action() = - IvySbt.synchronized - { - val originalLogger = Message.getDefaultLogger - Message.setDefaultLogger(logger) - try { f } - finally { Message.setDefaultLogger(originalLogger) } - } - // Ivy is not thread-safe nor can the cache be used concurrently. - // If provided a GlobalLock, we can use that to ensure safe access to the cache. - // Otherwise, we can at least synchronize within the JVM. - // For thread-safety in particular, Ivy uses a static DocumentBuilder, which is not thread-safe. - configuration.lock match - { - case Some(lock) => lock(ivyLockFile, new Callable[T] { def call = action() }) - case None => action() - } - } - private lazy val settings: IvySettings = - { - val is = new IvySettings - is.setBaseDir(baseDirectory) - CustomPomParser.registerDefault - configuration match - { - case e: ExternalIvyConfiguration => - IvySbt.addResolvers(e.extraResolvers, is, configuration.log) - IvySbt.loadURI(is, e.uri) - case i: InlineIvyConfiguration => - is.setVariable("ivy.checksums", i.checksums mkString ",") - i.paths.ivyHome foreach is.setDefaultIvyUserDir - IvySbt.configureCache(is, i.localOnly, i.resolutionCacheDir) - IvySbt.setResolvers(is, i.resolvers, i.otherResolvers, i.localOnly, configuration.log) - IvySbt.setModuleConfigurations(is, i.moduleConfigurations, configuration.log) - } - is - } - private lazy val ivy: Ivy = - { - val i = new Ivy() { private val loggerEngine = new SbtMessageLoggerEngine; override def getLoggerEngine = loggerEngine } - i.setSettings(settings) - i.bind() - i.getLoggerEngine.pushLogger(new IvyLoggerInterface(configuration.log)) - i - } - // Must be the same file as is used in Update in the launcher - private lazy val ivyLockFile = new File(settings.getDefaultIvyUserDir, ".sbt.ivy.lock") - /** ========== End Configuration/Setup ============*/ + /** + * ========== Configuration/Setup ============ + * This part configures the Ivy instance by first creating the logger interface to ivy, then IvySettings, and then the Ivy instance. + * These are lazy so that they are loaded within the right context. This is important so that no Ivy XML configuration needs to be loaded, + * saving some time. This is necessary because Ivy has global state (IvyContext, Message, DocumentBuilder, ...). + */ + private def withDefaultLogger[T](logger: MessageLogger)(f: => T): T = + { + def action() = + IvySbt.synchronized { + val originalLogger = Message.getDefaultLogger + Message.setDefaultLogger(logger) + try { f } + finally { Message.setDefaultLogger(originalLogger) } + } + // Ivy is not thread-safe nor can the cache be used concurrently. + // If provided a GlobalLock, we can use that to ensure safe access to the cache. + // Otherwise, we can at least synchronize within the JVM. + // For thread-safety in particular, Ivy uses a static DocumentBuilder, which is not thread-safe. + configuration.lock match { + case Some(lock) => lock(ivyLockFile, new Callable[T] { def call = action() }) + case None => action() + } + } + private lazy val settings: IvySettings = + { + val is = new IvySettings + is.setBaseDir(baseDirectory) + CustomPomParser.registerDefault + configuration match { + case e: ExternalIvyConfiguration => + IvySbt.addResolvers(e.extraResolvers, is, configuration.log) + IvySbt.loadURI(is, e.uri) + case i: InlineIvyConfiguration => + is.setVariable("ivy.checksums", i.checksums mkString ",") + i.paths.ivyHome foreach is.setDefaultIvyUserDir + IvySbt.configureCache(is, i.localOnly, i.resolutionCacheDir) + IvySbt.setResolvers(is, i.resolvers, i.otherResolvers, i.localOnly, configuration.log) + IvySbt.setModuleConfigurations(is, i.moduleConfigurations, configuration.log) + } + is + } + private lazy val ivy: Ivy = + { + val i = new Ivy() { private val loggerEngine = new SbtMessageLoggerEngine; override def getLoggerEngine = loggerEngine } + i.setSettings(settings) + i.bind() + i.getLoggerEngine.pushLogger(new IvyLoggerInterface(configuration.log)) + i + } + // Must be the same file as is used in Update in the launcher + private lazy val ivyLockFile = new File(settings.getDefaultIvyUserDir, ".sbt.ivy.lock") + /** ========== End Configuration/Setup ============*/ - /** Uses the configured Ivy instance within a safe context.*/ - def withIvy[T](log: Logger)(f: Ivy => T): T = - withIvy(new IvyLoggerInterface(log))(f) + /** Uses the configured Ivy instance within a safe context.*/ + def withIvy[T](log: Logger)(f: Ivy => T): T = + withIvy(new IvyLoggerInterface(log))(f) - def withIvy[T](log: MessageLogger)(f: Ivy => T): T = - withDefaultLogger(log) - { - // See #429 - We always insert a helper authenticator here which lets us get more useful authentication errors. - ivyint.ErrorMessageAuthenticator.install() - ivy.pushContext() - ivy.getLoggerEngine.pushLogger(log) - try { f(ivy) } - finally { - ivy.getLoggerEngine.popLogger() - ivy.popContext() - } - } + def withIvy[T](log: MessageLogger)(f: Ivy => T): T = + withDefaultLogger(log) { + // See #429 - We always insert a helper authenticator here which lets us get more useful authentication errors. + ivyint.ErrorMessageAuthenticator.install() + ivy.pushContext() + ivy.getLoggerEngine.pushLogger(log) + try { f(ivy) } + finally { + ivy.getLoggerEngine.popLogger() + ivy.popContext() + } + } - final class Module(rawModuleSettings: ModuleSettings) - { - val moduleSettings: ModuleSettings = IvySbt.substituteCross(rawModuleSettings) - def owner = IvySbt.this - def withModule[T](log: Logger)(f: (Ivy,DefaultModuleDescriptor,String) => T): T = - withIvy[T](log) { ivy => f(ivy, moduleDescriptor0, defaultConfig0) } + final class Module(rawModuleSettings: ModuleSettings) { + val moduleSettings: ModuleSettings = IvySbt.substituteCross(rawModuleSettings) + def owner = IvySbt.this + def withModule[T](log: Logger)(f: (Ivy, DefaultModuleDescriptor, String) => T): T = + withIvy[T](log) { ivy => f(ivy, moduleDescriptor0, defaultConfig0) } - def moduleDescriptor(log: Logger): DefaultModuleDescriptor = withModule(log)((_,md,_) => md) - def dependencyMapping(log: Logger): (ModuleRevisionId, ModuleDescriptor) = - { - val md = moduleDescriptor(log) - (md.getModuleRevisionId, md) - } - def defaultConfig(log: Logger): String = withModule(log)( (_,_,dc) => dc) - // these should only be referenced by withModule because lazy vals synchronize on this object - // withIvy explicitly locks the IvySbt object, so they have to be done in the right order to avoid deadlock - private[this] lazy val (moduleDescriptor0: DefaultModuleDescriptor, defaultConfig0: String) = - { - val (baseModule, baseConfiguration) = - moduleSettings match - { - case ic: InlineConfiguration => configureInline(ic, configuration.log) - case ec: EmptyConfiguration => configureEmpty(ec) - case pc: PomConfiguration => configurePom(pc) - case ifc: IvyFileConfiguration => configureIvyFile(ifc) - } - moduleSettings.ivyScala.foreach(IvyScala.checkModule(baseModule, baseConfiguration, configuration.log)) - IvySbt.addExtraNamespace(baseModule) - (baseModule, baseConfiguration) - } - private def configureInline(ic: InlineConfiguration, log: Logger) = - { - import ic._ - val moduleID = newConfiguredModuleID(module, moduleInfo, configurations) - IvySbt.setConflictManager(moduleID, conflictManager, ivy.getSettings) - val defaultConf = defaultConfiguration getOrElse Configurations.config(ModuleDescriptor.DEFAULT_CONFIGURATION) - log.debug("Using inline dependencies specified in Scala" + (if(ivyXML.isEmpty) "." else " and XML.")) + def moduleDescriptor(log: Logger): DefaultModuleDescriptor = withModule(log)((_, md, _) => md) + def dependencyMapping(log: Logger): (ModuleRevisionId, ModuleDescriptor) = + { + val md = moduleDescriptor(log) + (md.getModuleRevisionId, md) + } + def defaultConfig(log: Logger): String = withModule(log)((_, _, dc) => dc) + // these should only be referenced by withModule because lazy vals synchronize on this object + // withIvy explicitly locks the IvySbt object, so they have to be done in the right order to avoid deadlock + private[this] lazy val (moduleDescriptor0: DefaultModuleDescriptor, defaultConfig0: String) = + { + val (baseModule, baseConfiguration) = + moduleSettings match { + case ic: InlineConfiguration => configureInline(ic, configuration.log) + case ec: EmptyConfiguration => configureEmpty(ec) + case pc: PomConfiguration => configurePom(pc) + case ifc: IvyFileConfiguration => configureIvyFile(ifc) + } + moduleSettings.ivyScala.foreach(IvyScala.checkModule(baseModule, baseConfiguration, configuration.log)) + IvySbt.addExtraNamespace(baseModule) + (baseModule, baseConfiguration) + } + private def configureInline(ic: InlineConfiguration, log: Logger) = + { + import ic._ + val moduleID = newConfiguredModuleID(module, moduleInfo, configurations) + IvySbt.setConflictManager(moduleID, conflictManager, ivy.getSettings) + val defaultConf = defaultConfiguration getOrElse Configurations.config(ModuleDescriptor.DEFAULT_CONFIGURATION) + log.debug("Using inline dependencies specified in Scala" + (if (ivyXML.isEmpty) "." else " and XML.")) - val parser = IvySbt.parseIvyXML(ivy.getSettings, IvySbt.wrapped(module, ivyXML), moduleID, defaultConf.name, validate) - IvySbt.addMainArtifact(moduleID) - IvySbt.addOverrides(moduleID, overrides, ivy.getSettings.getMatcher(PatternMatcher.EXACT)) - val transformedDeps = IvySbt.overrideDirect(dependencies, overrides) - IvySbt.addDependencies(moduleID, transformedDeps, parser) - (moduleID, parser.getDefaultConf) - } - private def newConfiguredModuleID(module: ModuleID, moduleInfo: ModuleInfo, configurations: Iterable[Configuration]) = - { - val mod = new DefaultModuleDescriptor(IvySbt.toID(module), "release", null, false) - mod.setLastModified(System.currentTimeMillis) - mod.setDescription(moduleInfo.description) - moduleInfo.homepage foreach { h => mod.setHomePage(h.toString) } - moduleInfo.licenses foreach { l => mod.addLicense(new License(l._1, l._2.toString)) } - IvySbt.addConfigurations(mod, configurations) - IvySbt.addArtifacts(mod, module.explicitArtifacts) - mod - } + val parser = IvySbt.parseIvyXML(ivy.getSettings, IvySbt.wrapped(module, ivyXML), moduleID, defaultConf.name, validate) + IvySbt.addMainArtifact(moduleID) + IvySbt.addOverrides(moduleID, overrides, ivy.getSettings.getMatcher(PatternMatcher.EXACT)) + val transformedDeps = IvySbt.overrideDirect(dependencies, overrides) + IvySbt.addDependencies(moduleID, transformedDeps, parser) + (moduleID, parser.getDefaultConf) + } + private def newConfiguredModuleID(module: ModuleID, moduleInfo: ModuleInfo, configurations: Iterable[Configuration]) = + { + val mod = new DefaultModuleDescriptor(IvySbt.toID(module), "release", null, false) + mod.setLastModified(System.currentTimeMillis) + mod.setDescription(moduleInfo.description) + moduleInfo.homepage foreach { h => mod.setHomePage(h.toString) } + moduleInfo.licenses foreach { l => mod.addLicense(new License(l._1, l._2.toString)) } + IvySbt.addConfigurations(mod, configurations) + IvySbt.addArtifacts(mod, module.explicitArtifacts) + mod + } - /** Parses the Maven pom 'pomFile' from the given `PomConfiguration`.*/ - private def configurePom(pc: PomConfiguration) = - { - val md = CustomPomParser.default.parseDescriptor(settings, toURL(pc.file), pc.validate) - val dmd = IvySbt.toDefaultModuleDescriptor(md) - IvySbt.addConfigurations(dmd, Configurations.defaultInternal) - val defaultConf = Configurations.DefaultMavenConfiguration.name - for( is <- pc.ivyScala) if(pc.autoScalaTools) { - val confParser = new CustomXmlParser.CustomParser(settings, Some(defaultConf)) - confParser.setMd(dmd) - addScalaToolDependencies(dmd, confParser, is) - } - (dmd, defaultConf) - } - /** Parses the Ivy file 'ivyFile' from the given `IvyFileConfiguration`.*/ - private def configureIvyFile(ifc: IvyFileConfiguration) = - { - val parser = new CustomXmlParser.CustomParser(settings, None) - parser.setValidate(ifc.validate) - parser.setSource(toURL(ifc.file)) - parser.parse() - val dmd = IvySbt.toDefaultModuleDescriptor(parser.getModuleDescriptor()) - for( is <- ifc.ivyScala ) if(ifc.autoScalaTools) - addScalaToolDependencies(dmd, parser, is) - (dmd, parser.getDefaultConf) - } - private def addScalaToolDependencies(dmd: DefaultModuleDescriptor, parser: CustomXmlParser.CustomParser, is: IvyScala) { - IvySbt.addConfigurations(dmd, Configurations.ScalaTool :: Nil) - IvySbt.addDependencies(dmd, ScalaArtifacts.toolDependencies(is.scalaOrganization, is.scalaFullVersion), parser) - } - private def toURL(file: File) = file.toURI.toURL - private def configureEmpty(ec: EmptyConfiguration) = - { - val defaultConf = ModuleDescriptor.DEFAULT_CONFIGURATION - val mod = newConfiguredModuleID(ec.module, ec.moduleInfo, Seq(Configurations.Default)) - IvySbt.addMainArtifact(mod) - (mod, defaultConf) - } - } + /** Parses the Maven pom 'pomFile' from the given `PomConfiguration`.*/ + private def configurePom(pc: PomConfiguration) = + { + val md = CustomPomParser.default.parseDescriptor(settings, toURL(pc.file), pc.validate) + val dmd = IvySbt.toDefaultModuleDescriptor(md) + IvySbt.addConfigurations(dmd, Configurations.defaultInternal) + val defaultConf = Configurations.DefaultMavenConfiguration.name + for (is <- pc.ivyScala) if (pc.autoScalaTools) { + val confParser = new CustomXmlParser.CustomParser(settings, Some(defaultConf)) + confParser.setMd(dmd) + addScalaToolDependencies(dmd, confParser, is) + } + (dmd, defaultConf) + } + /** Parses the Ivy file 'ivyFile' from the given `IvyFileConfiguration`.*/ + private def configureIvyFile(ifc: IvyFileConfiguration) = + { + val parser = new CustomXmlParser.CustomParser(settings, None) + parser.setValidate(ifc.validate) + parser.setSource(toURL(ifc.file)) + parser.parse() + val dmd = IvySbt.toDefaultModuleDescriptor(parser.getModuleDescriptor()) + for (is <- ifc.ivyScala) if (ifc.autoScalaTools) + addScalaToolDependencies(dmd, parser, is) + (dmd, parser.getDefaultConf) + } + private def addScalaToolDependencies(dmd: DefaultModuleDescriptor, parser: CustomXmlParser.CustomParser, is: IvyScala) { + IvySbt.addConfigurations(dmd, Configurations.ScalaTool :: Nil) + IvySbt.addDependencies(dmd, ScalaArtifacts.toolDependencies(is.scalaOrganization, is.scalaFullVersion), parser) + } + private def toURL(file: File) = file.toURI.toURL + private def configureEmpty(ec: EmptyConfiguration) = + { + val defaultConf = ModuleDescriptor.DEFAULT_CONFIGURATION + val mod = newConfiguredModuleID(ec.module, ec.moduleInfo, Seq(Configurations.Default)) + IvySbt.addMainArtifact(mod) + (mod, defaultConf) + } + } } -private object IvySbt -{ - val DefaultIvyConfigFilename = "ivysettings.xml" - val DefaultIvyFilename = "ivy.xml" - val DefaultMavenFilename = "pom.xml" - val DefaultChecksums = Seq("sha1", "md5") +private object IvySbt { + val DefaultIvyConfigFilename = "ivysettings.xml" + val DefaultIvyFilename = "ivy.xml" + val DefaultMavenFilename = "pom.xml" + val DefaultChecksums = Seq("sha1", "md5") - def defaultIvyFile(project: File) = new File(project, DefaultIvyFilename) - def defaultIvyConfiguration(project: File) = new File(project, DefaultIvyConfigFilename) - def defaultPOM(project: File) = new File(project, DefaultMavenFilename) + def defaultIvyFile(project: File) = new File(project, DefaultIvyFilename) + def defaultIvyConfiguration(project: File) = new File(project, DefaultIvyConfigFilename) + def defaultPOM(project: File) = new File(project, DefaultMavenFilename) - def loadURI(is: IvySettings, uri: URI) - { - if(uri.getScheme == "file") - is.load(new File(uri)) // IVY-1114 - else - is.load(uri.toURL) - } + def loadURI(is: IvySettings, uri: URI) { + if (uri.getScheme == "file") + is.load(new File(uri)) // IVY-1114 + else + is.load(uri.toURL) + } - /** Sets the resolvers for 'settings' to 'resolvers'. This is done by creating a new chain and making it the default. - * 'other' is for resolvers that should be in a different chain. These are typically used for publishing or other actions. */ - private def setResolvers(settings: IvySettings, resolvers: Seq[Resolver], other: Seq[Resolver], localOnly: Boolean, log: Logger) - { - def makeChain(label: String, name: String, rs: Seq[Resolver]) = { - log.debug(label + " repositories:") - val chain = resolverChain(name, rs, localOnly, settings, log) - settings.addResolver(chain) - chain - } - val otherChain = makeChain("Other", "sbt-other", other) - val mainChain = makeChain("Default", "sbt-chain", resolvers) - settings.setDefaultResolver(mainChain.getName) - } - def resolverChain(name: String, resolvers: Seq[Resolver], localOnly: Boolean, settings: IvySettings, log: Logger): DependencyResolver = - { - val newDefault = new ChainResolver { - // Technically, this should be applied to module configurations. - // That would require custom subclasses of all resolver types in ConvertResolver (a delegation approach does not work). - // It would be better to get proper support into Ivy. - // A workaround is to configure the ModuleConfiguration resolver to be a ChainResolver. - // - // This method is only used by the pom parsing code in Ivy to find artifacts it doesn't know about. - // In particular, a) it looks up source and javadoc classifiers b) it looks up a main artifact for packaging="pom" - // sbt now provides the update-classifiers or requires explicitly specifying classifiers explicitly - // Providing a main artifact for packaging="pom" does not seem to be correct and the lookup can be expensive. - // - // Ideally this could just skip the lookup, but unfortunately several artifacts in practice do not follow the - // correct behavior for packaging="pom" and so it is only skipped for source/javadoc classifiers. - override def locate(artifact: IArtifact) = if(hasImplicitClassifier(artifact)) null else super.locate(artifact) + /** + * Sets the resolvers for 'settings' to 'resolvers'. This is done by creating a new chain and making it the default. + * 'other' is for resolvers that should be in a different chain. These are typically used for publishing or other actions. + */ + private def setResolvers(settings: IvySettings, resolvers: Seq[Resolver], other: Seq[Resolver], localOnly: Boolean, log: Logger) { + def makeChain(label: String, name: String, rs: Seq[Resolver]) = { + log.debug(label + " repositories:") + val chain = resolverChain(name, rs, localOnly, settings, log) + settings.addResolver(chain) + chain + } + val otherChain = makeChain("Other", "sbt-other", other) + val mainChain = makeChain("Default", "sbt-chain", resolvers) + settings.setDefaultResolver(mainChain.getName) + } + def resolverChain(name: String, resolvers: Seq[Resolver], localOnly: Boolean, settings: IvySettings, log: Logger): DependencyResolver = + { + val newDefault = new ChainResolver { + // Technically, this should be applied to module configurations. + // That would require custom subclasses of all resolver types in ConvertResolver (a delegation approach does not work). + // It would be better to get proper support into Ivy. + // A workaround is to configure the ModuleConfiguration resolver to be a ChainResolver. + // + // This method is only used by the pom parsing code in Ivy to find artifacts it doesn't know about. + // In particular, a) it looks up source and javadoc classifiers b) it looks up a main artifact for packaging="pom" + // sbt now provides the update-classifiers or requires explicitly specifying classifiers explicitly + // Providing a main artifact for packaging="pom" does not seem to be correct and the lookup can be expensive. + // + // Ideally this could just skip the lookup, but unfortunately several artifacts in practice do not follow the + // correct behavior for packaging="pom" and so it is only skipped for source/javadoc classifiers. + override def locate(artifact: IArtifact) = if (hasImplicitClassifier(artifact)) null else super.locate(artifact) - override def getDependency(dd: DependencyDescriptor, data: ResolveData) = - { - if(data.getOptions.getLog != LogOptions.LOG_QUIET) - Message.info("Resolving " + dd.getDependencyRevisionId + " ...") - val gd = super.getDependency(dd, data) - resetArtifactResolver(gd) - } - } - newDefault.setName(name) - newDefault.setReturnFirst(true) - newDefault.setCheckmodified(false) - for(sbtResolver <- resolvers) { - log.debug("\t" + sbtResolver) - newDefault.add(ConvertResolver(sbtResolver, settings, log)) - } - newDefault - } - def addResolvers(resolvers: Seq[Resolver], settings: IvySettings, log: Logger) - { - for(r <- resolvers) { - log.debug("\t" + r) - settings.addResolver(ConvertResolver(r, settings, log)) - } - } - /** A hack to detect if the given artifact is an automatically generated request for a classifier, - * as opposed to a user-initiated declaration. It relies on Ivy prefixing classifier with m:, while sbt uses e:. - * Clearly, it would be better to have an explicit option in Ivy to control this.*/ - def hasImplicitClassifier(artifact: IArtifact): Boolean = - { - import collection.JavaConversions._ - artifact.getQualifiedExtraAttributes.keys.exists(_.asInstanceOf[String] startsWith "m:") - } - private def setModuleConfigurations(settings: IvySettings, moduleConfigurations: Seq[ModuleConfiguration], log: Logger) - { - val existing = settings.getResolverNames - for(moduleConf <- moduleConfigurations) - { - import moduleConf._ - import IvyPatternHelper._ - import PatternMatcher._ - if(!existing.contains(resolver.name)) - settings.addResolver(ConvertResolver(resolver, settings, log)) - val attributes = javaMap(Map(MODULE_KEY -> name, ORGANISATION_KEY -> organization, REVISION_KEY -> revision)) - settings.addModuleConfiguration(attributes, settings.getMatcher(EXACT_OR_REGEXP), resolver.name, null, null, null) - } - } - private def configureCache(settings: IvySettings, localOnly: Boolean, resCacheDir: Option[File]) - { - configureResolutionCache(settings, localOnly, resCacheDir) - configureRepositoryCache(settings, localOnly) - } - private[this] def configureResolutionCache(settings: IvySettings, localOnly: Boolean, resCacheDir: Option[File]) - { - val base = resCacheDir getOrElse settings.getDefaultResolutionCacheBasedir - settings.setResolutionCacheManager(new ResolutionCache(base, settings)) - } - // set the artifact resolver to be the main resolver. - // this is because sometimes the artifact resolver saved in the cache is not correct - // the common case is for resolved.getArtifactResolver to be inter-project from a different project's publish-local - // if there are problems with this, a less aggressive fix might be to only reset the artifact resolver when it is a ProjectResolver - // a possible problem is that fetching artifacts is slower, due to the full chain being the artifact resolver instead of the specific resolver - // This also fixes #760, which occurs when metadata exists in a repository, but the artifact doesn't. - private[this] def resetArtifactResolver(resolved: ResolvedModuleRevision): ResolvedModuleRevision = - if(resolved eq null) - null - else { - val desc = resolved.getDescriptor - val updatedDescriptor = CustomPomParser.defaultTransform(desc.getParser, desc) - new ResolvedModuleRevision(resolved.getResolver, resolved.getResolver, updatedDescriptor, resolved.getReport, resolved.isForce) - } + override def getDependency(dd: DependencyDescriptor, data: ResolveData) = + { + if (data.getOptions.getLog != LogOptions.LOG_QUIET) + Message.info("Resolving " + dd.getDependencyRevisionId + " ...") + val gd = super.getDependency(dd, data) + resetArtifactResolver(gd) + } + } + newDefault.setName(name) + newDefault.setReturnFirst(true) + newDefault.setCheckmodified(false) + for (sbtResolver <- resolvers) { + log.debug("\t" + sbtResolver) + newDefault.add(ConvertResolver(sbtResolver, settings, log)) + } + newDefault + } + def addResolvers(resolvers: Seq[Resolver], settings: IvySettings, log: Logger) { + for (r <- resolvers) { + log.debug("\t" + r) + settings.addResolver(ConvertResolver(r, settings, log)) + } + } + /** + * A hack to detect if the given artifact is an automatically generated request for a classifier, + * as opposed to a user-initiated declaration. It relies on Ivy prefixing classifier with m:, while sbt uses e:. + * Clearly, it would be better to have an explicit option in Ivy to control this. + */ + def hasImplicitClassifier(artifact: IArtifact): Boolean = + { + import collection.JavaConversions._ + artifact.getQualifiedExtraAttributes.keys.exists(_.asInstanceOf[String] startsWith "m:") + } + private def setModuleConfigurations(settings: IvySettings, moduleConfigurations: Seq[ModuleConfiguration], log: Logger) { + val existing = settings.getResolverNames + for (moduleConf <- moduleConfigurations) { + import moduleConf._ + import IvyPatternHelper._ + import PatternMatcher._ + if (!existing.contains(resolver.name)) + settings.addResolver(ConvertResolver(resolver, settings, log)) + val attributes = javaMap(Map(MODULE_KEY -> name, ORGANISATION_KEY -> organization, REVISION_KEY -> revision)) + settings.addModuleConfiguration(attributes, settings.getMatcher(EXACT_OR_REGEXP), resolver.name, null, null, null) + } + } + private def configureCache(settings: IvySettings, localOnly: Boolean, resCacheDir: Option[File]) { + configureResolutionCache(settings, localOnly, resCacheDir) + configureRepositoryCache(settings, localOnly) + } + private[this] def configureResolutionCache(settings: IvySettings, localOnly: Boolean, resCacheDir: Option[File]) { + val base = resCacheDir getOrElse settings.getDefaultResolutionCacheBasedir + settings.setResolutionCacheManager(new ResolutionCache(base, settings)) + } + // set the artifact resolver to be the main resolver. + // this is because sometimes the artifact resolver saved in the cache is not correct + // the common case is for resolved.getArtifactResolver to be inter-project from a different project's publish-local + // if there are problems with this, a less aggressive fix might be to only reset the artifact resolver when it is a ProjectResolver + // a possible problem is that fetching artifacts is slower, due to the full chain being the artifact resolver instead of the specific resolver + // This also fixes #760, which occurs when metadata exists in a repository, but the artifact doesn't. + private[this] def resetArtifactResolver(resolved: ResolvedModuleRevision): ResolvedModuleRevision = + if (resolved eq null) + null + else { + val desc = resolved.getDescriptor + val updatedDescriptor = CustomPomParser.defaultTransform(desc.getParser, desc) + new ResolvedModuleRevision(resolved.getResolver, resolved.getResolver, updatedDescriptor, resolved.getReport, resolved.isForce) + } - private[this] def configureRepositoryCache(settings: IvySettings, localOnly: Boolean) //, artifactResolver: DependencyResolver) - { - val cacheDir = settings.getDefaultRepositoryCacheBasedir() - val manager = new DefaultRepositoryCacheManager("default-cache", settings, cacheDir) { - override def findModuleInCache(dd: DependencyDescriptor, revId: ModuleRevisionId, options: CacheMetadataOptions, r: String) = { - // ignore and reset the resolver- not ideal, but avoids thrashing. - val resolved = resetArtifactResolver(super.findModuleInCache(dd,revId,options,null)) - // invalidate the cache if the artifact was removed from the local repository - if(resolved == null) null - else if(isProjectResolver(resolved.getResolver)) { - resolved.getReport.getLocalFile.delete() - null - } else { - val origin = resolved.getReport.getArtifactOrigin - if(!origin.isLocal) resolved - else { - val file = new File(origin.getLocation) - if(file == null || file.exists) resolved - else { - resolved.getReport.getLocalFile.delete() - null - } - } - } - } - private[this] def isProjectResolver(r: DependencyResolver): Boolean = r match { - case pr: ProjectResolver => true - case _ => false - } - // ignore the original resolver wherever possible to avoid issues like #704 - override def saveResolvers(descriptor: ModuleDescriptor, metadataResolverName: String, artifactResolverName: String) {} - } - manager.setArtifactPattern(PluginPattern + manager.getArtifactPattern) - manager.setDataFilePattern(PluginPattern + manager.getDataFilePattern) - manager.setIvyPattern(PluginPattern + manager.getIvyPattern) - manager.setUseOrigin(true) - if(localOnly) - manager.setDefaultTTL(java.lang.Long.MAX_VALUE) - else - { - manager.setChangingMatcher(PatternMatcher.REGEXP) - manager.setChangingPattern(".*-SNAPSHOT") - } - settings.addRepositoryCacheManager(manager) - settings.setDefaultRepositoryCacheManager(manager) - } - def toIvyConfiguration(configuration: Configuration) = - { - import org.apache.ivy.core.module.descriptor.{Configuration => IvyConfig} - import IvyConfig.Visibility._ - import configuration._ - new IvyConfig(name, if(isPublic) PUBLIC else PRIVATE, description, extendsConfigs.map(_.name).toArray, transitive, null) - } - def addExtraNamespace(dmd: DefaultModuleDescriptor): Unit = - dmd.addExtraAttributeNamespace("e", "http://ant.apache.org/ivy/extra") + private[this] def configureRepositoryCache(settings: IvySettings, localOnly: Boolean) //, artifactResolver: DependencyResolver) + { + val cacheDir = settings.getDefaultRepositoryCacheBasedir() + val manager = new DefaultRepositoryCacheManager("default-cache", settings, cacheDir) { + override def findModuleInCache(dd: DependencyDescriptor, revId: ModuleRevisionId, options: CacheMetadataOptions, r: String) = { + // ignore and reset the resolver- not ideal, but avoids thrashing. + val resolved = resetArtifactResolver(super.findModuleInCache(dd, revId, options, null)) + // invalidate the cache if the artifact was removed from the local repository + if (resolved == null) null + else if (isProjectResolver(resolved.getResolver)) { + resolved.getReport.getLocalFile.delete() + null + } else { + val origin = resolved.getReport.getArtifactOrigin + if (!origin.isLocal) resolved + else { + val file = new File(origin.getLocation) + if (file == null || file.exists) resolved + else { + resolved.getReport.getLocalFile.delete() + null + } + } + } + } + private[this] def isProjectResolver(r: DependencyResolver): Boolean = r match { + case pr: ProjectResolver => true + case _ => false + } + // ignore the original resolver wherever possible to avoid issues like #704 + override def saveResolvers(descriptor: ModuleDescriptor, metadataResolverName: String, artifactResolverName: String) {} + } + manager.setArtifactPattern(PluginPattern + manager.getArtifactPattern) + manager.setDataFilePattern(PluginPattern + manager.getDataFilePattern) + manager.setIvyPattern(PluginPattern + manager.getIvyPattern) + manager.setUseOrigin(true) + if (localOnly) + manager.setDefaultTTL(java.lang.Long.MAX_VALUE) + else { + manager.setChangingMatcher(PatternMatcher.REGEXP) + manager.setChangingPattern(".*-SNAPSHOT") + } + settings.addRepositoryCacheManager(manager) + settings.setDefaultRepositoryCacheManager(manager) + } + def toIvyConfiguration(configuration: Configuration) = + { + import org.apache.ivy.core.module.descriptor.{ Configuration => IvyConfig } + import IvyConfig.Visibility._ + import configuration._ + new IvyConfig(name, if (isPublic) PUBLIC else PRIVATE, description, extendsConfigs.map(_.name).toArray, transitive, null) + } + def addExtraNamespace(dmd: DefaultModuleDescriptor): Unit = + dmd.addExtraAttributeNamespace("e", "http://ant.apache.org/ivy/extra") - /** Adds the ivy.xml main artifact. */ - private def addMainArtifact(moduleID: DefaultModuleDescriptor) - { - val artifact = DefaultArtifact.newIvyArtifact(moduleID.getResolvedModuleRevisionId, moduleID.getPublicationDate) - moduleID.setModuleArtifact(artifact) - moduleID.check() - } - private def setConflictManager(moduleID: DefaultModuleDescriptor, conflict: ConflictManager, is: IvySettings) - { - val mid = ModuleId.newInstance(conflict.organization, conflict.module) - val matcher = is.getMatcher(PatternMatcher.EXACT_OR_REGEXP) - val manager = is.getConflictManager(conflict.name) - moduleID.addConflictManager(mid, matcher, manager) - } + /** Adds the ivy.xml main artifact. */ + private def addMainArtifact(moduleID: DefaultModuleDescriptor) { + val artifact = DefaultArtifact.newIvyArtifact(moduleID.getResolvedModuleRevisionId, moduleID.getPublicationDate) + moduleID.setModuleArtifact(artifact) + moduleID.check() + } + private def setConflictManager(moduleID: DefaultModuleDescriptor, conflict: ConflictManager, is: IvySettings) { + val mid = ModuleId.newInstance(conflict.organization, conflict.module) + val matcher = is.getMatcher(PatternMatcher.EXACT_OR_REGEXP) + val manager = is.getConflictManager(conflict.name) + moduleID.addConflictManager(mid, matcher, manager) + } - /** Converts the given sbt module id into an Ivy ModuleRevisionId.*/ - def toID(m: ModuleID) = - { - import m._ - ModuleRevisionId.newInstance(organization, name, revision, javaMap(extraAttributes)) - } + /** Converts the given sbt module id into an Ivy ModuleRevisionId.*/ + def toID(m: ModuleID) = + { + import m._ + ModuleRevisionId.newInstance(organization, name, revision, javaMap(extraAttributes)) + } - private def substituteCross(m: ModuleSettings): ModuleSettings = - m.ivyScala match { - case None => m - case Some(is) => substituteCross(m, is.scalaFullVersion, is.scalaBinaryVersion) - } - private def substituteCross(m: ModuleSettings, scalaFullVersion: String, scalaBinaryVersion: String): ModuleSettings = - { - val sub = CrossVersion(scalaFullVersion, scalaBinaryVersion) - m match { - case ec: EmptyConfiguration => ec.copy(module = sub(ec.module)) - case ic: InlineConfiguration => ic.copy(module = sub(ic.module), dependencies = ic.dependencies map sub, overrides = ic.overrides map sub) - case _ => m - } - } + private def substituteCross(m: ModuleSettings): ModuleSettings = + m.ivyScala match { + case None => m + case Some(is) => substituteCross(m, is.scalaFullVersion, is.scalaBinaryVersion) + } + private def substituteCross(m: ModuleSettings, scalaFullVersion: String, scalaBinaryVersion: String): ModuleSettings = + { + val sub = CrossVersion(scalaFullVersion, scalaBinaryVersion) + m match { + case ec: EmptyConfiguration => ec.copy(module = sub(ec.module)) + case ic: InlineConfiguration => ic.copy(module = sub(ic.module), dependencies = ic.dependencies map sub, overrides = ic.overrides map sub) + case _ => m + } + } - private def toIvyArtifact(moduleID: ModuleDescriptor, a: Artifact, allConfigurations: Iterable[String]): MDArtifact = - { - val artifact = new MDArtifact(moduleID, a.name, a.`type`, a.extension, null, extra(a, false)) - copyConfigurations(a, artifact.addConfiguration, allConfigurations) - artifact - } - def getExtraAttributes(revID: ExtendableItem): Map[String,String] = - { - import collection.JavaConverters._ - revID.getExtraAttributes.asInstanceOf[java.util.Map[String,String]].asScala.toMap - } - private[sbt] def extra(artifact: Artifact, unqualify: Boolean = false): java.util.Map[String, String] = - { - val ea = artifact.classifier match { case Some(c) => artifact.extra("e:classifier" -> c); case None => artifact } - javaMap(ea.extraAttributes, unqualify) - } - private[sbt] def javaMap(m: Map[String,String], unqualify: Boolean = false) = - { - val map = if(unqualify) m map { case (k, v) => (k.stripPrefix("e:"), v) } else m - if(map.isEmpty) null else scala.collection.JavaConversions.mapAsJavaMap(map) - } + private def toIvyArtifact(moduleID: ModuleDescriptor, a: Artifact, allConfigurations: Iterable[String]): MDArtifact = + { + val artifact = new MDArtifact(moduleID, a.name, a.`type`, a.extension, null, extra(a, false)) + copyConfigurations(a, artifact.addConfiguration, allConfigurations) + artifact + } + def getExtraAttributes(revID: ExtendableItem): Map[String, String] = + { + import collection.JavaConverters._ + revID.getExtraAttributes.asInstanceOf[java.util.Map[String, String]].asScala.toMap + } + private[sbt] def extra(artifact: Artifact, unqualify: Boolean = false): java.util.Map[String, String] = + { + val ea = artifact.classifier match { case Some(c) => artifact.extra("e:classifier" -> c); case None => artifact } + javaMap(ea.extraAttributes, unqualify) + } + private[sbt] def javaMap(m: Map[String, String], unqualify: Boolean = false) = + { + val map = if (unqualify) m map { case (k, v) => (k.stripPrefix("e:"), v) } else m + if (map.isEmpty) null else scala.collection.JavaConversions.mapAsJavaMap(map) + } - private object javaMap - { - import java.util.{HashMap, Map} - def apply[K,V](pairs: (K,V)*): Map[K,V] = - { - val map = new HashMap[K,V] - pairs.foreach { case (key, value) => map.put(key, value) } - map - } - } - /** Creates a full ivy file for 'module' using the 'dependencies' XML as the part after the <info>...</info> section. */ - private def wrapped(module: ModuleID, dependencies: NodeSeq) = - { - - { if(hasInfo(module, dependencies)) - NodeSeq.Empty - else - addExtraAttributes(defaultInfo(module), module.extraAttributes) - } - {dependencies} - { - // this is because Ivy adds a default artifact if none are specified. - if(dependencies \\ "publications" isEmpty) else NodeSeq.Empty - } - - } - private[this] def defaultInfo(module: ModuleID): scala.xml.Elem = { - import module._ - - } - private[this] def addExtraAttributes(elem: scala.xml.Elem, extra: Map[String, String]): scala.xml.Elem = - (elem /: extra) { case (e, (key,value) ) => e % new scala.xml.UnprefixedAttribute(key, value, scala.xml.Null) } - private def hasInfo(module: ModuleID, x: scala.xml.NodeSeq) = - { - val info = {x} \ "info" - if(!info.isEmpty) - { - def check(found: NodeSeq, expected: String, label: String) = - if(found.isEmpty) - sys.error("Missing " + label + " in inline Ivy XML.") - else { - val str = found.text - if(str != expected) sys.error("Inconsistent " + label + " in inline Ivy XML. Expected '" + expected + "', got '" + str + "'") - } - check(info \ "@organisation", module.organization, "organisation") - check(info \ "@module", module.name, "name") - check(info \ "@revision", module.revision, "version") - } - !info.isEmpty - } - /** Parses the given in-memory Ivy file 'xml', using the existing 'moduleID' and specifying the given 'defaultConfiguration'. */ - private def parseIvyXML(settings: IvySettings, xml: scala.xml.NodeSeq, moduleID: DefaultModuleDescriptor, defaultConfiguration: String, validate: Boolean): CustomXmlParser.CustomParser = - parseIvyXML(settings, xml.toString, moduleID, defaultConfiguration, validate) - /** Parses the given in-memory Ivy file 'xml', using the existing 'moduleID' and specifying the given 'defaultConfiguration'. */ - private def parseIvyXML(settings: IvySettings, xml: String, moduleID: DefaultModuleDescriptor, defaultConfiguration: String, validate: Boolean): CustomXmlParser.CustomParser = - { - val parser = new CustomXmlParser.CustomParser(settings, Some(defaultConfiguration)) - parser.setMd(moduleID) - parser.setValidate(validate) - parser.setInput(xml.getBytes) - parser.parse() - parser - } + private object javaMap { + import java.util.{ HashMap, Map } + def apply[K, V](pairs: (K, V)*): Map[K, V] = + { + val map = new HashMap[K, V] + pairs.foreach { case (key, value) => map.put(key, value) } + map + } + } + /** Creates a full ivy file for 'module' using the 'dependencies' XML as the part after the <info>...</info> section. */ + private def wrapped(module: ModuleID, dependencies: NodeSeq) = + { + + { + if (hasInfo(module, dependencies)) + NodeSeq.Empty + else + addExtraAttributes(defaultInfo(module), module.extraAttributes) + } + { dependencies } + { + // this is because Ivy adds a default artifact if none are specified. + if (dependencies \\ "publications" isEmpty) else NodeSeq.Empty + } + + } + private[this] def defaultInfo(module: ModuleID): scala.xml.Elem = { + import module._ + + } + private[this] def addExtraAttributes(elem: scala.xml.Elem, extra: Map[String, String]): scala.xml.Elem = + (elem /: extra) { case (e, (key, value)) => e % new scala.xml.UnprefixedAttribute(key, value, scala.xml.Null) } + private def hasInfo(module: ModuleID, x: scala.xml.NodeSeq) = + { + val info = { x } \ "info" + if (!info.isEmpty) { + def check(found: NodeSeq, expected: String, label: String) = + if (found.isEmpty) + sys.error("Missing " + label + " in inline Ivy XML.") + else { + val str = found.text + if (str != expected) sys.error("Inconsistent " + label + " in inline Ivy XML. Expected '" + expected + "', got '" + str + "'") + } + check(info \ "@organisation", module.organization, "organisation") + check(info \ "@module", module.name, "name") + check(info \ "@revision", module.revision, "version") + } + !info.isEmpty + } + /** Parses the given in-memory Ivy file 'xml', using the existing 'moduleID' and specifying the given 'defaultConfiguration'. */ + private def parseIvyXML(settings: IvySettings, xml: scala.xml.NodeSeq, moduleID: DefaultModuleDescriptor, defaultConfiguration: String, validate: Boolean): CustomXmlParser.CustomParser = + parseIvyXML(settings, xml.toString, moduleID, defaultConfiguration, validate) + /** Parses the given in-memory Ivy file 'xml', using the existing 'moduleID' and specifying the given 'defaultConfiguration'. */ + private def parseIvyXML(settings: IvySettings, xml: String, moduleID: DefaultModuleDescriptor, defaultConfiguration: String, validate: Boolean): CustomXmlParser.CustomParser = + { + val parser = new CustomXmlParser.CustomParser(settings, Some(defaultConfiguration)) + parser.setMd(moduleID) + parser.setValidate(validate) + parser.setInput(xml.getBytes) + parser.parse() + parser + } - /** This method is used to add inline dependencies to the provided module. */ - def addDependencies(moduleID: DefaultModuleDescriptor, dependencies: Seq[ModuleID], parser: CustomXmlParser.CustomParser) - { - val converted = dependencies map { dependency => convertDependency(moduleID, dependency, parser) } - val unique = if(hasDuplicateDependencies(converted)) mergeDuplicateDefinitions(converted) else converted - unique foreach moduleID.addDependency - } - /** Determines if there are multiple dependency definitions for the same dependency ID. */ - def hasDuplicateDependencies(dependencies: Seq[DependencyDescriptor]): Boolean = - { - val ids = dependencies.map(_.getDependencyRevisionId) - ids.toSet.size != ids.size - } + /** This method is used to add inline dependencies to the provided module. */ + def addDependencies(moduleID: DefaultModuleDescriptor, dependencies: Seq[ModuleID], parser: CustomXmlParser.CustomParser) { + val converted = dependencies map { dependency => convertDependency(moduleID, dependency, parser) } + val unique = if (hasDuplicateDependencies(converted)) mergeDuplicateDefinitions(converted) else converted + unique foreach moduleID.addDependency + } + /** Determines if there are multiple dependency definitions for the same dependency ID. */ + def hasDuplicateDependencies(dependencies: Seq[DependencyDescriptor]): Boolean = + { + val ids = dependencies.map(_.getDependencyRevisionId) + ids.toSet.size != ids.size + } - /** Combines the artifacts, includes, and excludes of duplicate dependency definitions. - * This is somewhat fragile and is only intended to workaround Ivy (or sbt's use of Ivy) not handling this case properly. - * In particular, Ivy will create multiple dependency entries when converting a pom with a dependency on a classified artifact and a non-classified artifact: - * https://github.com/sbt/sbt/issues/468 - * It will also allow users to declare dependencies on classified modules in different configurations: - * https://groups.google.com/d/topic/simple-build-tool/H2MdAARz6e0/discussion - * as well as basic multi-classifier handling: #285, #419, #480. - * Multiple dependency definitions should otherwise be avoided as much as possible. - */ - def mergeDuplicateDefinitions(dependencies: Seq[DependencyDescriptor]): Seq[DependencyDescriptor] = - { - // need to preserve basic order of dependencies: can't use dependencies.groupBy - val deps = new java.util.LinkedHashMap[ModuleRevisionId, List[DependencyDescriptor]] - for( dd <- dependencies ) - { - val id = dd.getDependencyRevisionId - val updated = deps get id match { - case null => dd :: Nil - case v => dd :: v - } - deps.put(id, updated) - } + /** + * Combines the artifacts, includes, and excludes of duplicate dependency definitions. + * This is somewhat fragile and is only intended to workaround Ivy (or sbt's use of Ivy) not handling this case properly. + * In particular, Ivy will create multiple dependency entries when converting a pom with a dependency on a classified artifact and a non-classified artifact: + * https://github.com/sbt/sbt/issues/468 + * It will also allow users to declare dependencies on classified modules in different configurations: + * https://groups.google.com/d/topic/simple-build-tool/H2MdAARz6e0/discussion + * as well as basic multi-classifier handling: #285, #419, #480. + * Multiple dependency definitions should otherwise be avoided as much as possible. + */ + def mergeDuplicateDefinitions(dependencies: Seq[DependencyDescriptor]): Seq[DependencyDescriptor] = + { + // need to preserve basic order of dependencies: can't use dependencies.groupBy + val deps = new java.util.LinkedHashMap[ModuleRevisionId, List[DependencyDescriptor]] + for (dd <- dependencies) { + val id = dd.getDependencyRevisionId + val updated = deps get id match { + case null => dd :: Nil + case v => dd :: v + } + deps.put(id, updated) + } - import collection.JavaConverters._ - deps.values.asScala.toSeq.flatMap { dds => - val mergeable = (dds, dds.tail).zipped.forall( ivyint.MergeDescriptors.mergeable _) - if(mergeable) dds.reverse.reduceLeft(ivyint.MergeDescriptors.apply _) :: Nil else dds - } - } + import collection.JavaConverters._ + deps.values.asScala.toSeq.flatMap { dds => + val mergeable = (dds, dds.tail).zipped.forall(ivyint.MergeDescriptors.mergeable _) + if (mergeable) dds.reverse.reduceLeft(ivyint.MergeDescriptors.apply _) :: Nil else dds + } + } - /** Transforms an sbt ModuleID into an Ivy DefaultDependencyDescriptor.*/ - def convertDependency(moduleID: DefaultModuleDescriptor, dependency: ModuleID, parser: CustomXmlParser.CustomParser): DefaultDependencyDescriptor = - { - val dependencyDescriptor = new DefaultDependencyDescriptor(moduleID, toID(dependency), dependency.isForce, dependency.isChanging, dependency.isTransitive) - dependency.configurations match - { - case None => // The configuration for this dependency was not explicitly specified, so use the default - parser.parseDepsConfs(parser.getDefaultConf, dependencyDescriptor) - case Some(confs) => // The configuration mapping (looks like: test->default) was specified for this dependency - parser.parseDepsConfs(confs, dependencyDescriptor) - } - for(artifact <- dependency.explicitArtifacts) - { - import artifact.{name, classifier, `type`, extension, url} - val extraMap = extra(artifact) - val ivyArtifact = new DefaultDependencyArtifactDescriptor(dependencyDescriptor, name, `type`, extension, url.getOrElse(null), extraMap) - copyConfigurations(artifact, ivyArtifact.addConfiguration) - for(conf <- dependencyDescriptor.getModuleConfigurations) - dependencyDescriptor.addDependencyArtifact(conf, ivyArtifact) - } - for(excls <- dependency.exclusions) - { - for(conf <- dependencyDescriptor.getModuleConfigurations) - { - dependencyDescriptor.addExcludeRule(conf, IvyScala.excludeRule(excls.organization, excls.name, excls.configurations, excls.artifact)) - } - } - dependencyDescriptor - } - def copyConfigurations(artifact: Artifact, addConfiguration: String => Unit): Unit = - copyConfigurations(artifact, addConfiguration, "*" :: Nil) - - private[this] def copyConfigurations(artifact: Artifact, addConfiguration: String => Unit, allConfigurations: Iterable[String]): Unit = - { - val confs = if(artifact.configurations.isEmpty) allConfigurations else artifact.configurations.map(_.name) - confs foreach addConfiguration - } + /** Transforms an sbt ModuleID into an Ivy DefaultDependencyDescriptor.*/ + def convertDependency(moduleID: DefaultModuleDescriptor, dependency: ModuleID, parser: CustomXmlParser.CustomParser): DefaultDependencyDescriptor = + { + val dependencyDescriptor = new DefaultDependencyDescriptor(moduleID, toID(dependency), dependency.isForce, dependency.isChanging, dependency.isTransitive) + dependency.configurations match { + case None => // The configuration for this dependency was not explicitly specified, so use the default + parser.parseDepsConfs(parser.getDefaultConf, dependencyDescriptor) + case Some(confs) => // The configuration mapping (looks like: test->default) was specified for this dependency + parser.parseDepsConfs(confs, dependencyDescriptor) + } + for (artifact <- dependency.explicitArtifacts) { + import artifact.{ name, classifier, `type`, extension, url } + val extraMap = extra(artifact) + val ivyArtifact = new DefaultDependencyArtifactDescriptor(dependencyDescriptor, name, `type`, extension, url.getOrElse(null), extraMap) + copyConfigurations(artifact, ivyArtifact.addConfiguration) + for (conf <- dependencyDescriptor.getModuleConfigurations) + dependencyDescriptor.addDependencyArtifact(conf, ivyArtifact) + } + for (excls <- dependency.exclusions) { + for (conf <- dependencyDescriptor.getModuleConfigurations) { + dependencyDescriptor.addExcludeRule(conf, IvyScala.excludeRule(excls.organization, excls.name, excls.configurations, excls.artifact)) + } + } + dependencyDescriptor + } + def copyConfigurations(artifact: Artifact, addConfiguration: String => Unit): Unit = + copyConfigurations(artifact, addConfiguration, "*" :: Nil) - def addOverrides(moduleID: DefaultModuleDescriptor, overrides: Set[ModuleID], matcher: PatternMatcher): Unit = - overrides foreach addOverride(moduleID, matcher) - def addOverride(moduleID: DefaultModuleDescriptor, matcher: PatternMatcher)(overrideDef: ModuleID): Unit = - { - val overrideID = new ModuleId(overrideDef.organization, overrideDef.name) - val overrideWith = new OverrideDependencyDescriptorMediator(null, overrideDef.revision) - moduleID.addDependencyDescriptorMediator(overrideID, matcher, overrideWith) - } - /** It is necessary to explicitly modify direct dependencies because Ivy gives - * "IllegalStateException: impossible to get artifacts when data has not been loaded." - * when a direct dependency is overridden with a newer version."*/ - def overrideDirect(dependencies: Seq[ModuleID], overrides: Set[ModuleID]): Seq[ModuleID] = - { - def key(id: ModuleID) = (id.organization, id.name) - val overridden = overrides.map(id => (key(id), id.revision)).toMap - dependencies map { dep => - overridden get key(dep) match { - case Some(rev) => dep.copy(revision = rev) - case None => dep - } - } - } + private[this] def copyConfigurations(artifact: Artifact, addConfiguration: String => Unit, allConfigurations: Iterable[String]): Unit = + { + val confs = if (artifact.configurations.isEmpty) allConfigurations else artifact.configurations.map(_.name) + confs foreach addConfiguration + } - /** This method is used to add inline artifacts to the provided module. */ - def addArtifacts(moduleID: DefaultModuleDescriptor, artifacts: Iterable[Artifact]): Unit = - for(art <- mapArtifacts(moduleID, artifacts.toSeq); c <- art.getConfigurations) - moduleID.addArtifact(c, art) + def addOverrides(moduleID: DefaultModuleDescriptor, overrides: Set[ModuleID], matcher: PatternMatcher): Unit = + overrides foreach addOverride(moduleID, matcher) + def addOverride(moduleID: DefaultModuleDescriptor, matcher: PatternMatcher)(overrideDef: ModuleID): Unit = + { + val overrideID = new ModuleId(overrideDef.organization, overrideDef.name) + val overrideWith = new OverrideDependencyDescriptorMediator(null, overrideDef.revision) + moduleID.addDependencyDescriptorMediator(overrideID, matcher, overrideWith) + } + /** + * It is necessary to explicitly modify direct dependencies because Ivy gives + * "IllegalStateException: impossible to get artifacts when data has not been loaded." + * when a direct dependency is overridden with a newer version." + */ + def overrideDirect(dependencies: Seq[ModuleID], overrides: Set[ModuleID]): Seq[ModuleID] = + { + def key(id: ModuleID) = (id.organization, id.name) + val overridden = overrides.map(id => (key(id), id.revision)).toMap + dependencies map { dep => + overridden get key(dep) match { + case Some(rev) => dep.copy(revision = rev) + case None => dep + } + } + } - def addConfigurations(mod: DefaultModuleDescriptor, configurations: Iterable[Configuration]): Unit = - configurations.foreach(config => mod.addConfiguration(toIvyConfiguration(config))) + /** This method is used to add inline artifacts to the provided module. */ + def addArtifacts(moduleID: DefaultModuleDescriptor, artifacts: Iterable[Artifact]): Unit = + for (art <- mapArtifacts(moduleID, artifacts.toSeq); c <- art.getConfigurations) + moduleID.addArtifact(c, art) - def mapArtifacts(moduleID: ModuleDescriptor, artifacts: Seq[Artifact]): Seq[IArtifact] = - { - lazy val allConfigurations = moduleID.getPublicConfigurationsNames - for(artifact <- artifacts) yield - toIvyArtifact(moduleID, artifact, allConfigurations) - } + def addConfigurations(mod: DefaultModuleDescriptor, configurations: Iterable[Configuration]): Unit = + configurations.foreach(config => mod.addConfiguration(toIvyConfiguration(config))) + def mapArtifacts(moduleID: ModuleDescriptor, artifacts: Seq[Artifact]): Seq[IArtifact] = + { + lazy val allConfigurations = moduleID.getPublicConfigurationsNames + for (artifact <- artifacts) yield toIvyArtifact(moduleID, artifact, allConfigurations) + } - /** This code converts the given ModuleDescriptor to a DefaultModuleDescriptor by casting or generating an error. - * Ivy 2.0.0 always produces a DefaultModuleDescriptor. */ - private def toDefaultModuleDescriptor(md: ModuleDescriptor) = - md match - { - case dmd: DefaultModuleDescriptor => dmd - case _ => sys.error("Unknown ModuleDescriptor type.") - } - def getConfigurations(module: ModuleDescriptor, configurations: Option[Iterable[Configuration]]) = - configurations match - { - case Some(confs) => confs.map(_.name).toList.toArray - case None => module.getPublicConfigurationsNames - } + /** + * This code converts the given ModuleDescriptor to a DefaultModuleDescriptor by casting or generating an error. + * Ivy 2.0.0 always produces a DefaultModuleDescriptor. + */ + private def toDefaultModuleDescriptor(md: ModuleDescriptor) = + md match { + case dmd: DefaultModuleDescriptor => dmd + case _ => sys.error("Unknown ModuleDescriptor type.") + } + def getConfigurations(module: ModuleDescriptor, configurations: Option[Iterable[Configuration]]) = + configurations match { + case Some(confs) => confs.map(_.name).toList.toArray + case None => module.getPublicConfigurationsNames + } } diff --git a/ivy/src/main/scala/sbt/IvyActions.scala b/ivy/src/main/scala/sbt/IvyActions.scala index 716579e0f..5e2d0359c 100644 --- a/ivy/src/main/scala/sbt/IvyActions.scala +++ b/ivy/src/main/scala/sbt/IvyActions.scala @@ -4,276 +4,269 @@ package sbt import java.io.File -import scala.xml.{Node => XNode, NodeSeq} +import scala.xml.{ Node => XNode, NodeSeq } -import org.apache.ivy.{core, plugins, Ivy} -import core.{IvyPatternHelper, LogOptions} +import org.apache.ivy.{ core, plugins, Ivy } +import core.{ IvyPatternHelper, LogOptions } import core.deliver.DeliverOptions import core.install.InstallOptions -import core.module.descriptor.{Artifact => IArtifact, MDArtifact, ModuleDescriptor, DefaultModuleDescriptor} +import core.module.descriptor.{ Artifact => IArtifact, MDArtifact, ModuleDescriptor, DefaultModuleDescriptor } import core.report.ResolveReport import core.resolve.ResolveOptions -import plugins.resolver.{BasicResolver, DependencyResolver} +import plugins.resolver.{ BasicResolver, DependencyResolver } final class DeliverConfiguration(val deliverIvyPattern: String, val status: String, val configurations: Option[Seq[Configuration]], val logging: UpdateLogging.Value) final class PublishConfiguration(val ivyFile: Option[File], val resolverName: String, val artifacts: Map[Artifact, File], val checksums: Seq[String], val logging: UpdateLogging.Value, - val overwrite: Boolean) { - def this(ivyFile: Option[File], resolverName: String, artifacts: Map[Artifact, File], checksums: Seq[String], logging: UpdateLogging.Value) = - this(ivyFile, resolverName, artifacts, checksums, logging, false) + val overwrite: Boolean) { + def this(ivyFile: Option[File], resolverName: String, artifacts: Map[Artifact, File], checksums: Seq[String], logging: UpdateLogging.Value) = + this(ivyFile, resolverName, artifacts, checksums, logging, false) } final class UpdateConfiguration(val retrieve: Option[RetrieveConfiguration], val missingOk: Boolean, val logging: UpdateLogging.Value) final class RetrieveConfiguration(val retrieveDirectory: File, val outputPattern: String) final case class MakePomConfiguration(file: File, moduleInfo: ModuleInfo, configurations: Option[Seq[Configuration]] = None, extra: NodeSeq = NodeSeq.Empty, process: XNode => XNode = n => n, filterRepositories: MavenRepository => Boolean = _ => true, allRepositories: Boolean, includeTypes: Set[String] = Set(Artifact.DefaultType, Artifact.PomType)) - // exclude is a map on a restricted ModuleID +// exclude is a map on a restricted ModuleID final case class GetClassifiersConfiguration(module: GetClassifiersModule, exclude: Map[ModuleID, Set[String]], configuration: UpdateConfiguration, ivyScala: Option[IvyScala]) final case class GetClassifiersModule(id: ModuleID, modules: Seq[ModuleID], configurations: Seq[Configuration], classifiers: Seq[String]) -/** Configures logging during an 'update'. `level` determines the amount of other information logged. -* `Full` is the default and logs the most. -* `DownloadOnly` only logs what is downloaded. -* `Quiet` only displays errors.*/ -object UpdateLogging extends Enumeration -{ - val Full, DownloadOnly, Quiet = Value +/** + * Configures logging during an 'update'. `level` determines the amount of other information logged. + * `Full` is the default and logs the most. + * `DownloadOnly` only logs what is downloaded. + * `Quiet` only displays errors. + */ +object UpdateLogging extends Enumeration { + val Full, DownloadOnly, Quiet = Value } -object IvyActions -{ - /** Installs the dependencies of the given 'module' from the resolver named 'from' to the resolver named 'to'.*/ - def install(module: IvySbt#Module, from: String, to: String, log: Logger) - { - module.withModule(log) { (ivy, md, default) => - for(dependency <- md.getDependencies) - { - log.info("Installing " + dependency) - val options = new InstallOptions - options.setValidate(module.moduleSettings.validate) - options.setTransitive(dependency.isTransitive) - ivy.install(dependency.getDependencyRevisionId, from, to, options) - } - } - } +object IvyActions { + /** Installs the dependencies of the given 'module' from the resolver named 'from' to the resolver named 'to'.*/ + def install(module: IvySbt#Module, from: String, to: String, log: Logger) { + module.withModule(log) { (ivy, md, default) => + for (dependency <- md.getDependencies) { + log.info("Installing " + dependency) + val options = new InstallOptions + options.setValidate(module.moduleSettings.validate) + options.setTransitive(dependency.isTransitive) + ivy.install(dependency.getDependencyRevisionId, from, to, options) + } + } + } - /** Clears the Ivy cache, as configured by 'config'. */ - def cleanCache(ivy: IvySbt, log: Logger) = ivy.withIvy(log) { iv => - iv.getSettings.getResolutionCacheManager.clean() - iv.getSettings.getRepositoryCacheManagers.foreach(_.clean()) - } + /** Clears the Ivy cache, as configured by 'config'. */ + def cleanCache(ivy: IvySbt, log: Logger) = ivy.withIvy(log) { iv => + iv.getSettings.getResolutionCacheManager.clean() + iv.getSettings.getRepositoryCacheManagers.foreach(_.clean()) + } - /** Creates a Maven pom from the given Ivy configuration*/ - def makePom(module: IvySbt#Module, configuration: MakePomConfiguration, log: Logger) - { - import configuration.{allRepositories, moduleInfo, configurations, extra, file, filterRepositories, process, includeTypes} - module.withModule(log) { (ivy, md, default) => - (new MakePom(log)).write(ivy, md, moduleInfo, configurations, includeTypes, extra, process, filterRepositories, allRepositories, file) - log.info("Wrote " + file.getAbsolutePath) - } - } + /** Creates a Maven pom from the given Ivy configuration*/ + def makePom(module: IvySbt#Module, configuration: MakePomConfiguration, log: Logger) { + import configuration.{ allRepositories, moduleInfo, configurations, extra, file, filterRepositories, process, includeTypes } + module.withModule(log) { (ivy, md, default) => + (new MakePom(log)).write(ivy, md, moduleInfo, configurations, includeTypes, extra, process, filterRepositories, allRepositories, file) + log.info("Wrote " + file.getAbsolutePath) + } + } - def deliver(module: IvySbt#Module, configuration: DeliverConfiguration, log: Logger): File = - { - import configuration._ - module.withModule(log) { case (ivy, md, default) => - val revID = md.getModuleRevisionId - val options = DeliverOptions.newInstance(ivy.getSettings).setStatus(status) - options.setConfs(IvySbt.getConfigurations(md, configurations)) - ivy.deliver(revID, revID.getRevision, deliverIvyPattern, options) - deliveredFile(ivy, deliverIvyPattern, md) - } - } - def deliveredFile(ivy: Ivy, pattern: String, md: ModuleDescriptor): File = - ivy.getSettings.resolveFile(IvyPatternHelper.substitute(pattern, md.getResolvedModuleRevisionId)) + def deliver(module: IvySbt#Module, configuration: DeliverConfiguration, log: Logger): File = + { + import configuration._ + module.withModule(log) { + case (ivy, md, default) => + val revID = md.getModuleRevisionId + val options = DeliverOptions.newInstance(ivy.getSettings).setStatus(status) + options.setConfs(IvySbt.getConfigurations(md, configurations)) + ivy.deliver(revID, revID.getRevision, deliverIvyPattern, options) + deliveredFile(ivy, deliverIvyPattern, md) + } + } + def deliveredFile(ivy: Ivy, pattern: String, md: ModuleDescriptor): File = + ivy.getSettings.resolveFile(IvyPatternHelper.substitute(pattern, md.getResolvedModuleRevisionId)) - def publish(module: IvySbt#Module, configuration: PublishConfiguration, log: Logger) - { - import configuration._ - module.withModule(log) { case (ivy, md, default) => - val resolver = ivy.getSettings.getResolver(resolverName) - if(resolver eq null) sys.error("Undefined resolver '" + resolverName + "'") - val ivyArtifact = ivyFile map { file => (MDArtifact.newIvyArtifact(md), file) } - val cross = crossVersionMap(module.moduleSettings) - val as = mapArtifacts(md, cross, artifacts) ++ ivyArtifact.toSeq - withChecksums(resolver, checksums) { publish(md, as, resolver, overwrite = overwrite) } - } - } - private[this] def withChecksums[T](resolver: DependencyResolver, checksums: Seq[String])(act: => T): T = - resolver match { case br: BasicResolver => withChecksums(br, checksums)(act); case _ => act } - private[this] def withChecksums[T](resolver: BasicResolver, checksums: Seq[String])(act: => T): T = - { - val previous = resolver.getChecksumAlgorithms - resolver.setChecksums(checksums mkString ",") - try { act } - finally { resolver.setChecksums(previous mkString ",") } - } - private def crossVersionMap(moduleSettings: ModuleSettings): Option[String => String] = - moduleSettings match { - case i: InlineConfiguration => CrossVersion(i.module, i.ivyScala) - case e: EmptyConfiguration => CrossVersion(e.module, e.ivyScala) - case _ => None - } - def mapArtifacts(module: ModuleDescriptor, cross: Option[String => String], artifacts: Map[Artifact, File]): Seq[(IArtifact, File)] = - { - val rawa = artifacts.keys.toSeq - val seqa = CrossVersion.substituteCross(rawa, cross) - val zipped = rawa zip IvySbt.mapArtifacts(module, seqa) - zipped map { case (a, ivyA) => (ivyA, artifacts(a)) } - } - /** Resolves and retrieves dependencies. 'ivyConfig' is used to produce an Ivy file and configuration. - * 'updateConfig' configures the actual resolution and retrieval process. */ - def update(module: IvySbt#Module, configuration: UpdateConfiguration, log: Logger): UpdateReport = - module.withModule(log) { case (ivy, md, default) => - val (report, err) = resolve(configuration.logging)(ivy, md, default) - err match - { - case Some(x) if !configuration.missingOk => - processUnresolved(x, log) - throw x - case _ => - val cachedDescriptor = ivy.getSettings.getResolutionCacheManager.getResolvedIvyFileInCache(md.getModuleRevisionId) - val uReport = IvyRetrieve.updateReport(report, cachedDescriptor) - configuration.retrieve match - { - case Some(rConf) => retrieve(ivy, uReport, rConf) - case None => uReport - } - } - } + def publish(module: IvySbt#Module, configuration: PublishConfiguration, log: Logger) { + import configuration._ + module.withModule(log) { + case (ivy, md, default) => + val resolver = ivy.getSettings.getResolver(resolverName) + if (resolver eq null) sys.error("Undefined resolver '" + resolverName + "'") + val ivyArtifact = ivyFile map { file => (MDArtifact.newIvyArtifact(md), file) } + val cross = crossVersionMap(module.moduleSettings) + val as = mapArtifacts(md, cross, artifacts) ++ ivyArtifact.toSeq + withChecksums(resolver, checksums) { publish(md, as, resolver, overwrite = overwrite) } + } + } + private[this] def withChecksums[T](resolver: DependencyResolver, checksums: Seq[String])(act: => T): T = + resolver match { case br: BasicResolver => withChecksums(br, checksums)(act); case _ => act } + private[this] def withChecksums[T](resolver: BasicResolver, checksums: Seq[String])(act: => T): T = + { + val previous = resolver.getChecksumAlgorithms + resolver.setChecksums(checksums mkString ",") + try { act } + finally { resolver.setChecksums(previous mkString ",") } + } + private def crossVersionMap(moduleSettings: ModuleSettings): Option[String => String] = + moduleSettings match { + case i: InlineConfiguration => CrossVersion(i.module, i.ivyScala) + case e: EmptyConfiguration => CrossVersion(e.module, e.ivyScala) + case _ => None + } + def mapArtifacts(module: ModuleDescriptor, cross: Option[String => String], artifacts: Map[Artifact, File]): Seq[(IArtifact, File)] = + { + val rawa = artifacts.keys.toSeq + val seqa = CrossVersion.substituteCross(rawa, cross) + val zipped = rawa zip IvySbt.mapArtifacts(module, seqa) + zipped map { case (a, ivyA) => (ivyA, artifacts(a)) } + } + /** + * Resolves and retrieves dependencies. 'ivyConfig' is used to produce an Ivy file and configuration. + * 'updateConfig' configures the actual resolution and retrieval process. + */ + def update(module: IvySbt#Module, configuration: UpdateConfiguration, log: Logger): UpdateReport = + module.withModule(log) { + case (ivy, md, default) => + val (report, err) = resolve(configuration.logging)(ivy, md, default) + err match { + case Some(x) if !configuration.missingOk => + processUnresolved(x, log) + throw x + case _ => + val cachedDescriptor = ivy.getSettings.getResolutionCacheManager.getResolvedIvyFileInCache(md.getModuleRevisionId) + val uReport = IvyRetrieve.updateReport(report, cachedDescriptor) + configuration.retrieve match { + case Some(rConf) => retrieve(ivy, uReport, rConf) + case None => uReport + } + } + } - def processUnresolved(err: ResolveException, log: Logger) - { - val withExtra = err.failed.filter(!_.extraDependencyAttributes.isEmpty) - if(!withExtra.isEmpty) - { - log.warn("\n\tNote: Some unresolved dependencies have extra attributes. Check that these dependencies exist with the requested attributes.") - withExtra foreach { id => log.warn("\t\t" + id) } - log.warn("") - } - } - def groupedConflicts[T](moduleFilter: ModuleFilter, grouping: ModuleID => T)(report: UpdateReport): Map[T, Set[String]] = - report.configurations.flatMap { confReport => - val evicted = confReport.evicted.filter(moduleFilter) - val evictedSet = evicted.map( m => (m.organization, m.name) ).toSet - val conflicted = confReport.allModules.filter( mod => evictedSet( (mod.organization, mod.name) ) ) - grouped(grouping)(conflicted ++ evicted) - } toMap; + def processUnresolved(err: ResolveException, log: Logger) { + val withExtra = err.failed.filter(!_.extraDependencyAttributes.isEmpty) + if (!withExtra.isEmpty) { + log.warn("\n\tNote: Some unresolved dependencies have extra attributes. Check that these dependencies exist with the requested attributes.") + withExtra foreach { id => log.warn("\t\t" + id) } + log.warn("") + } + } + def groupedConflicts[T](moduleFilter: ModuleFilter, grouping: ModuleID => T)(report: UpdateReport): Map[T, Set[String]] = + report.configurations.flatMap { confReport => + val evicted = confReport.evicted.filter(moduleFilter) + val evictedSet = evicted.map(m => (m.organization, m.name)).toSet + val conflicted = confReport.allModules.filter(mod => evictedSet((mod.organization, mod.name))) + grouped(grouping)(conflicted ++ evicted) + } toMap; - def grouped[T](grouping: ModuleID => T)(mods: Seq[ModuleID]): Map[T, Set[String]] = - mods groupBy(grouping) mapValues(_.map(_.revision).toSet) + def grouped[T](grouping: ModuleID => T)(mods: Seq[ModuleID]): Map[T, Set[String]] = + mods groupBy (grouping) mapValues (_.map(_.revision).toSet) - def transitiveScratch(ivySbt: IvySbt, label: String, config: GetClassifiersConfiguration, log: Logger): UpdateReport = - { - import config.{configuration => c, ivyScala, module => mod} - import mod.{id, modules => deps} - val base = restrictedCopy(id, true).copy(name = id.name + "$" + label) - val module = new ivySbt.Module(InlineConfiguration(base, ModuleInfo(base.name), deps).copy(ivyScala = ivyScala)) - val report = update(module, c, log) - val newConfig = config.copy(module = mod.copy(modules = report.allModules)) - updateClassifiers(ivySbt, newConfig, log) - } - def updateClassifiers(ivySbt: IvySbt, config: GetClassifiersConfiguration, log: Logger): UpdateReport = - { - import config.{configuration => c, module => mod, _} - import mod.{configurations => confs, _} - assert(!classifiers.isEmpty, "classifiers cannot be empty") - val baseModules = modules map { m => restrictedCopy(m, true) } - val deps = baseModules.distinct flatMap classifiedArtifacts(classifiers, exclude) - val base = restrictedCopy(id, true).copy(name = id.name + classifiers.mkString("$","_","")) - val module = new ivySbt.Module(InlineConfiguration(base, ModuleInfo(base.name), deps).copy(ivyScala = ivyScala, configurations = confs)) - val upConf = new UpdateConfiguration(c.retrieve, true, c.logging) - update(module, upConf, log) - } - def classifiedArtifacts(classifiers: Seq[String], exclude: Map[ModuleID, Set[String]])(m: ModuleID): Option[ModuleID] = - { - val excluded = exclude getOrElse(restrictedCopy(m, false), Set.empty) - val included = classifiers filterNot excluded - if(included.isEmpty) None else Some(m.copy(isTransitive = false, explicitArtifacts = classifiedArtifacts(m.name, included) )) - } - def addExcluded(report: UpdateReport, classifiers: Seq[String], exclude: Map[ModuleID, Set[String]]): UpdateReport = - report.addMissing { id => classifiedArtifacts(id.name, classifiers filter getExcluded(id, exclude)) } - def classifiedArtifacts(name: String, classifiers: Seq[String]): Seq[Artifact] = - classifiers map { c => Artifact.classified(name, c) } - private[this] def getExcluded(id: ModuleID, exclude: Map[ModuleID, Set[String]]): Set[String] = - exclude.getOrElse(restrictedCopy(id, false), Set.empty[String]) + def transitiveScratch(ivySbt: IvySbt, label: String, config: GetClassifiersConfiguration, log: Logger): UpdateReport = + { + import config.{ configuration => c, ivyScala, module => mod } + import mod.{ id, modules => deps } + val base = restrictedCopy(id, true).copy(name = id.name + "$" + label) + val module = new ivySbt.Module(InlineConfiguration(base, ModuleInfo(base.name), deps).copy(ivyScala = ivyScala)) + val report = update(module, c, log) + val newConfig = config.copy(module = mod.copy(modules = report.allModules)) + updateClassifiers(ivySbt, newConfig, log) + } + def updateClassifiers(ivySbt: IvySbt, config: GetClassifiersConfiguration, log: Logger): UpdateReport = + { + import config.{ configuration => c, module => mod, _ } + import mod.{ configurations => confs, _ } + assert(!classifiers.isEmpty, "classifiers cannot be empty") + val baseModules = modules map { m => restrictedCopy(m, true) } + val deps = baseModules.distinct flatMap classifiedArtifacts(classifiers, exclude) + val base = restrictedCopy(id, true).copy(name = id.name + classifiers.mkString("$", "_", "")) + val module = new ivySbt.Module(InlineConfiguration(base, ModuleInfo(base.name), deps).copy(ivyScala = ivyScala, configurations = confs)) + val upConf = new UpdateConfiguration(c.retrieve, true, c.logging) + update(module, upConf, log) + } + def classifiedArtifacts(classifiers: Seq[String], exclude: Map[ModuleID, Set[String]])(m: ModuleID): Option[ModuleID] = + { + val excluded = exclude getOrElse (restrictedCopy(m, false), Set.empty) + val included = classifiers filterNot excluded + if (included.isEmpty) None else Some(m.copy(isTransitive = false, explicitArtifacts = classifiedArtifacts(m.name, included))) + } + def addExcluded(report: UpdateReport, classifiers: Seq[String], exclude: Map[ModuleID, Set[String]]): UpdateReport = + report.addMissing { id => classifiedArtifacts(id.name, classifiers filter getExcluded(id, exclude)) } + def classifiedArtifacts(name: String, classifiers: Seq[String]): Seq[Artifact] = + classifiers map { c => Artifact.classified(name, c) } + private[this] def getExcluded(id: ModuleID, exclude: Map[ModuleID, Set[String]]): Set[String] = + exclude.getOrElse(restrictedCopy(id, false), Set.empty[String]) - def extractExcludes(report: UpdateReport): Map[ModuleID, Set[String]] = - report.allMissing flatMap { case (_, mod, art) => art.classifier.map { c => (restrictedCopy(mod, false), c) } } groupBy(_._1) map { case (mod, pairs) => (mod, pairs.map(_._2).toSet) } + def extractExcludes(report: UpdateReport): Map[ModuleID, Set[String]] = + report.allMissing flatMap { case (_, mod, art) => art.classifier.map { c => (restrictedCopy(mod, false), c) } } groupBy (_._1) map { case (mod, pairs) => (mod, pairs.map(_._2).toSet) } - private[this] def restrictedCopy(m: ModuleID, confs: Boolean) = - ModuleID(m.organization, m.name, m.revision, crossVersion = m.crossVersion, extraAttributes = m.extraAttributes, configurations = if(confs) m.configurations else None) - private[this] def resolve(logging: UpdateLogging.Value)(ivy: Ivy, module: DefaultModuleDescriptor, defaultConf: String): (ResolveReport, Option[ResolveException]) = - { - val resolveOptions = new ResolveOptions - val resolveId = ResolveOptions.getDefaultResolveId(module) - resolveOptions.setResolveId(resolveId) - resolveOptions.setLog(ivyLogLevel(logging)) - ResolutionCache.cleanModule(module.getModuleRevisionId, resolveId, ivy.getSettings.getResolutionCacheManager) - val resolveReport = ivy.resolve(module, resolveOptions) - val err = - if(resolveReport.hasError) - { - val messages = resolveReport.getAllProblemMessages.toArray.map(_.toString).distinct - val failed = resolveReport.getUnresolvedDependencies.map(node => IvyRetrieve.toModuleID(node.getId)) - Some(new ResolveException(messages, failed)) - } - else None - (resolveReport, err) - } - private def retrieve(ivy: Ivy, report: UpdateReport, config: RetrieveConfiguration): UpdateReport = - retrieve(ivy, report, config.retrieveDirectory, config.outputPattern) + private[this] def restrictedCopy(m: ModuleID, confs: Boolean) = + ModuleID(m.organization, m.name, m.revision, crossVersion = m.crossVersion, extraAttributes = m.extraAttributes, configurations = if (confs) m.configurations else None) + private[this] def resolve(logging: UpdateLogging.Value)(ivy: Ivy, module: DefaultModuleDescriptor, defaultConf: String): (ResolveReport, Option[ResolveException]) = + { + val resolveOptions = new ResolveOptions + val resolveId = ResolveOptions.getDefaultResolveId(module) + resolveOptions.setResolveId(resolveId) + resolveOptions.setLog(ivyLogLevel(logging)) + ResolutionCache.cleanModule(module.getModuleRevisionId, resolveId, ivy.getSettings.getResolutionCacheManager) + val resolveReport = ivy.resolve(module, resolveOptions) + val err = + if (resolveReport.hasError) { + val messages = resolveReport.getAllProblemMessages.toArray.map(_.toString).distinct + val failed = resolveReport.getUnresolvedDependencies.map(node => IvyRetrieve.toModuleID(node.getId)) + Some(new ResolveException(messages, failed)) + } else None + (resolveReport, err) + } + private def retrieve(ivy: Ivy, report: UpdateReport, config: RetrieveConfiguration): UpdateReport = + retrieve(ivy, report, config.retrieveDirectory, config.outputPattern) - private def retrieve(ivy: Ivy, report: UpdateReport, base: File, pattern: String): UpdateReport = - { - val toCopy = new collection.mutable.HashSet[(File,File)] - val retReport = report retrieve { (conf, mid, art, cached) => - val to = retrieveTarget(conf, mid, art, base, pattern) - toCopy += ((cached, to)) - to - } - IO.copy( toCopy ) - retReport - } - private def retrieveTarget(conf: String, mid: ModuleID, art: Artifact, base: File, pattern: String): File = - new File(base, substitute(conf, mid, art, pattern)) + private def retrieve(ivy: Ivy, report: UpdateReport, base: File, pattern: String): UpdateReport = + { + val toCopy = new collection.mutable.HashSet[(File, File)] + val retReport = report retrieve { (conf, mid, art, cached) => + val to = retrieveTarget(conf, mid, art, base, pattern) + toCopy += ((cached, to)) + to + } + IO.copy(toCopy) + retReport + } + private def retrieveTarget(conf: String, mid: ModuleID, art: Artifact, base: File, pattern: String): File = + new File(base, substitute(conf, mid, art, pattern)) - private def substitute(conf: String, mid: ModuleID, art: Artifact, pattern: String): String = - { - val mextra = IvySbt.javaMap(mid.extraAttributes, true) - val aextra = IvySbt.extra(art, true) - IvyPatternHelper.substitute(pattern, mid.organization, mid.name, mid.revision, art.name, art.`type`, art.extension, conf, mextra, aextra) - } + private def substitute(conf: String, mid: ModuleID, art: Artifact, pattern: String): String = + { + val mextra = IvySbt.javaMap(mid.extraAttributes, true) + val aextra = IvySbt.extra(art, true) + IvyPatternHelper.substitute(pattern, mid.organization, mid.name, mid.revision, art.name, art.`type`, art.extension, conf, mextra, aextra) + } - import UpdateLogging.{Quiet, Full, DownloadOnly} - import LogOptions.{LOG_QUIET, LOG_DEFAULT, LOG_DOWNLOAD_ONLY} - private def ivyLogLevel(level: UpdateLogging.Value) = - level match - { - case Quiet => LOG_QUIET - case DownloadOnly => LOG_DOWNLOAD_ONLY - case Full => LOG_DEFAULT - } + import UpdateLogging.{ Quiet, Full, DownloadOnly } + import LogOptions.{ LOG_QUIET, LOG_DEFAULT, LOG_DOWNLOAD_ONLY } + private def ivyLogLevel(level: UpdateLogging.Value) = + level match { + case Quiet => LOG_QUIET + case DownloadOnly => LOG_DOWNLOAD_ONLY + case Full => LOG_DEFAULT + } - def publish(module: ModuleDescriptor, artifacts: Seq[(IArtifact, File)], resolver: DependencyResolver, overwrite: Boolean): Unit = - { - if (artifacts.nonEmpty) { - checkFilesPresent(artifacts) - try { - resolver.beginPublishTransaction(module.getModuleRevisionId(), overwrite); - for( (artifact, file) <- artifacts) - resolver.publish(artifact, file, overwrite) - resolver.commitPublishTransaction() - } catch { - case e: Throwable => - try { resolver.abortPublishTransaction() } - finally { throw e } - } - } - } - private[this] def checkFilesPresent(artifacts: Seq[(IArtifact, File)]) - { - val missing = artifacts filter { case (a, file) => !file.exists } - if(missing.nonEmpty) - error("Missing files for publishing:\n\t" + missing.map(_._2.getAbsolutePath).mkString("\n\t")) - } + def publish(module: ModuleDescriptor, artifacts: Seq[(IArtifact, File)], resolver: DependencyResolver, overwrite: Boolean): Unit = + { + if (artifacts.nonEmpty) { + checkFilesPresent(artifacts) + try { + resolver.beginPublishTransaction(module.getModuleRevisionId(), overwrite); + for ((artifact, file) <- artifacts) + resolver.publish(artifact, file, overwrite) + resolver.commitPublishTransaction() + } catch { + case e: Throwable => + try { resolver.abortPublishTransaction() } + finally { throw e } + } + } + } + private[this] def checkFilesPresent(artifacts: Seq[(IArtifact, File)]) { + val missing = artifacts filter { case (a, file) => !file.exists } + if (missing.nonEmpty) + error("Missing files for publishing:\n\t" + missing.map(_._2.getAbsolutePath).mkString("\n\t")) + } } final class ResolveException(val messages: Seq[String], val failed: Seq[ModuleID]) extends RuntimeException(messages.mkString("\n")) diff --git a/ivy/src/main/scala/sbt/IvyCache.scala b/ivy/src/main/scala/sbt/IvyCache.scala index 3ff8432f0..fe520d96c 100644 --- a/ivy/src/main/scala/sbt/IvyCache.scala +++ b/ivy/src/main/scala/sbt/IvyCache.scala @@ -6,102 +6,93 @@ package sbt import java.io.File import java.net.URL -import org.apache.ivy.{core, plugins, util} -import core.cache.{ArtifactOrigin, CacheDownloadOptions, DefaultRepositoryCacheManager} -import core.module.descriptor.{Artifact => IvyArtifact, DefaultArtifact} -import plugins.repository.file.{FileRepository=>IvyFileRepository, FileResource} -import plugins.repository.{ArtifactResourceResolver, Resource, ResourceDownloader} +import org.apache.ivy.{ core, plugins, util } +import core.cache.{ ArtifactOrigin, CacheDownloadOptions, DefaultRepositoryCacheManager } +import core.module.descriptor.{ Artifact => IvyArtifact, DefaultArtifact } +import plugins.repository.file.{ FileRepository => IvyFileRepository, FileResource } +import plugins.repository.{ ArtifactResourceResolver, Resource, ResourceDownloader } import plugins.resolver.util.ResolvedResource import util.FileUtil class NotInCache(val id: ModuleID, cause: Throwable) - extends RuntimeException(NotInCache(id, cause), cause) -{ - def this(id: ModuleID) = this(id, null) + extends RuntimeException(NotInCache(id, cause), cause) { + def this(id: ModuleID) = this(id, null) } -private object NotInCache -{ - def apply(id: ModuleID, cause: Throwable) = - { - val postfix = if(cause == null) "" else (": " +cause.toString) - "File for " + id + " not in cache" + postfix - } +private object NotInCache { + def apply(id: ModuleID, cause: Throwable) = + { + val postfix = if (cause == null) "" else (": " + cause.toString) + "File for " + id + " not in cache" + postfix + } } /** Provides methods for working at the level of a single jar file with the default Ivy cache.*/ -class IvyCache(val ivyHome: Option[File]) -{ - def lockFile = new File(ivyHome getOrElse Path.userHome, ".sbt.cache.lock") - /** Caches the given 'file' with the given ID. It may be retrieved or cleared using this ID.*/ - def cacheJar(moduleID: ModuleID, file: File, lock: Option[xsbti.GlobalLock], log: Logger) - { - val artifact = defaultArtifact(moduleID) - val resolved = new ResolvedResource(new FileResource(new IvyFileRepository, file), moduleID.revision) - withDefaultCache(lock, log) { cache => - val resolver = new ArtifactResourceResolver { def resolve(artifact: IvyArtifact) = resolved } - cache.download(artifact, resolver, new FileDownloader, new CacheDownloadOptions) - } - } - /** Clears the cache of the jar for the given ID.*/ - def clearCachedJar(id: ModuleID, lock: Option[xsbti.GlobalLock], log: Logger) - { - try { withCachedJar(id, lock, log)(_.delete) } - catch { case e: Exception => log.debug("Error cleaning cached jar: " + e.toString) } - } - /** Copies the cached jar for the given ID to the directory 'toDirectory'. If the jar is not in the cache, NotInCache is thrown.*/ - def retrieveCachedJar(id: ModuleID, toDirectory: File, lock: Option[xsbti.GlobalLock], log: Logger) = - withCachedJar(id, lock, log) { cachedFile => - val copyTo = new File(toDirectory, cachedFile.getName) - FileUtil.copy(cachedFile, copyTo, null) - copyTo - } +class IvyCache(val ivyHome: Option[File]) { + def lockFile = new File(ivyHome getOrElse Path.userHome, ".sbt.cache.lock") + /** Caches the given 'file' with the given ID. It may be retrieved or cleared using this ID.*/ + def cacheJar(moduleID: ModuleID, file: File, lock: Option[xsbti.GlobalLock], log: Logger) { + val artifact = defaultArtifact(moduleID) + val resolved = new ResolvedResource(new FileResource(new IvyFileRepository, file), moduleID.revision) + withDefaultCache(lock, log) { cache => + val resolver = new ArtifactResourceResolver { def resolve(artifact: IvyArtifact) = resolved } + cache.download(artifact, resolver, new FileDownloader, new CacheDownloadOptions) + } + } + /** Clears the cache of the jar for the given ID.*/ + def clearCachedJar(id: ModuleID, lock: Option[xsbti.GlobalLock], log: Logger) { + try { withCachedJar(id, lock, log)(_.delete) } + catch { case e: Exception => log.debug("Error cleaning cached jar: " + e.toString) } + } + /** Copies the cached jar for the given ID to the directory 'toDirectory'. If the jar is not in the cache, NotInCache is thrown.*/ + def retrieveCachedJar(id: ModuleID, toDirectory: File, lock: Option[xsbti.GlobalLock], log: Logger) = + withCachedJar(id, lock, log) { cachedFile => + val copyTo = new File(toDirectory, cachedFile.getName) + FileUtil.copy(cachedFile, copyTo, null) + copyTo + } - /** Get the location of the cached jar for the given ID in the Ivy cache. If the jar is not in the cache, NotInCache is thrown .*/ - def withCachedJar[T](id: ModuleID, lock: Option[xsbti.GlobalLock], log: Logger)(f: File => T): T = - { - val cachedFile = - try - { - withDefaultCache(lock, log) { cache => - val artifact = defaultArtifact(id) - cache.getArchiveFileInCache(artifact, unknownOrigin(artifact)) - } - } - catch { case e: Exception => throw new NotInCache(id, e) } + /** Get the location of the cached jar for the given ID in the Ivy cache. If the jar is not in the cache, NotInCache is thrown .*/ + def withCachedJar[T](id: ModuleID, lock: Option[xsbti.GlobalLock], log: Logger)(f: File => T): T = + { + val cachedFile = + try { + withDefaultCache(lock, log) { cache => + val artifact = defaultArtifact(id) + cache.getArchiveFileInCache(artifact, unknownOrigin(artifact)) + } + } catch { case e: Exception => throw new NotInCache(id, e) } - if(cachedFile.exists) f(cachedFile) else throw new NotInCache(id) - } - /** Calls the given function with the default Ivy cache.*/ - def withDefaultCache[T](lock: Option[xsbti.GlobalLock], log: Logger)(f: DefaultRepositoryCacheManager => T): T = - { - val (ivy, local) = basicLocalIvy(lock, log) - ivy.withIvy(log) { ivy => - val cache = ivy.getSettings.getDefaultRepositoryCacheManager.asInstanceOf[DefaultRepositoryCacheManager] - cache.setUseOrigin(false) - f(cache) - } - } - private def unknownOrigin(artifact: IvyArtifact) = ArtifactOrigin.unkwnown(artifact) - /** A minimal Ivy setup with only a local resolver and the current directory as the base directory.*/ - private def basicLocalIvy(lock: Option[xsbti.GlobalLock], log: Logger) = - { - val local = Resolver.defaultLocal - val paths = new IvyPaths(new File("."), ivyHome) - val conf = new InlineIvyConfiguration(paths, Seq(local), Nil, Nil, false, lock, IvySbt.DefaultChecksums, None, log) - (new IvySbt(conf), local) - } - /** Creates a default jar artifact based on the given ID.*/ - private def defaultArtifact(moduleID: ModuleID): IvyArtifact = - new DefaultArtifact(IvySbt.toID(moduleID), null, moduleID.name, "jar", "jar") + if (cachedFile.exists) f(cachedFile) else throw new NotInCache(id) + } + /** Calls the given function with the default Ivy cache.*/ + def withDefaultCache[T](lock: Option[xsbti.GlobalLock], log: Logger)(f: DefaultRepositoryCacheManager => T): T = + { + val (ivy, local) = basicLocalIvy(lock, log) + ivy.withIvy(log) { ivy => + val cache = ivy.getSettings.getDefaultRepositoryCacheManager.asInstanceOf[DefaultRepositoryCacheManager] + cache.setUseOrigin(false) + f(cache) + } + } + private def unknownOrigin(artifact: IvyArtifact) = ArtifactOrigin.unkwnown(artifact) + /** A minimal Ivy setup with only a local resolver and the current directory as the base directory.*/ + private def basicLocalIvy(lock: Option[xsbti.GlobalLock], log: Logger) = + { + val local = Resolver.defaultLocal + val paths = new IvyPaths(new File("."), ivyHome) + val conf = new InlineIvyConfiguration(paths, Seq(local), Nil, Nil, false, lock, IvySbt.DefaultChecksums, None, log) + (new IvySbt(conf), local) + } + /** Creates a default jar artifact based on the given ID.*/ + private def defaultArtifact(moduleID: ModuleID): IvyArtifact = + new DefaultArtifact(IvySbt.toID(moduleID), null, moduleID.name, "jar", "jar") } /** Required by Ivy for copying to the cache.*/ -private class FileDownloader extends ResourceDownloader with NotNull -{ - def download(artifact: IvyArtifact, resource: Resource, dest: File) - { - if(dest.exists()) dest.delete() - val part = new File(dest.getAbsolutePath + ".part") - FileUtil.copy(resource.openStream, part, null) - if(!part.renameTo(dest)) - sys.error("Could not move temporary file " + part + " to final location " + dest) - } +private class FileDownloader extends ResourceDownloader with NotNull { + def download(artifact: IvyArtifact, resource: Resource, dest: File) { + if (dest.exists()) dest.delete() + val part = new File(dest.getAbsolutePath + ".part") + FileUtil.copy(resource.openStream, part, null) + if (!part.renameTo(dest)) + sys.error("Could not move temporary file " + part + " to final location " + dest) + } } \ No newline at end of file diff --git a/ivy/src/main/scala/sbt/IvyConfigurations.scala b/ivy/src/main/scala/sbt/IvyConfigurations.scala index 256af5f2e..93025e2d7 100644 --- a/ivy/src/main/scala/sbt/IvyConfigurations.scala +++ b/ivy/src/main/scala/sbt/IvyConfigurations.scala @@ -4,120 +4,104 @@ package sbt import java.io.File -import java.net.{URI,URL} +import java.net.{ URI, URL } import scala.xml.NodeSeq -final class IvyPaths(val baseDirectory: File, val ivyHome: Option[File]) -{ - def withBase(newBaseDirectory: File) = new IvyPaths(newBaseDirectory, ivyHome) +final class IvyPaths(val baseDirectory: File, val ivyHome: Option[File]) { + def withBase(newBaseDirectory: File) = new IvyPaths(newBaseDirectory, ivyHome) } -sealed trait IvyConfiguration -{ - type This <: IvyConfiguration - def lock: Option[xsbti.GlobalLock] - def baseDirectory: File - def log: Logger - def withBase(newBaseDirectory: File): This +sealed trait IvyConfiguration { + type This <: IvyConfiguration + def lock: Option[xsbti.GlobalLock] + def baseDirectory: File + def log: Logger + def withBase(newBaseDirectory: File): This } final class InlineIvyConfiguration(val paths: IvyPaths, val resolvers: Seq[Resolver], val otherResolvers: Seq[Resolver], - val moduleConfigurations: Seq[ModuleConfiguration], val localOnly: Boolean, val lock: Option[xsbti.GlobalLock], - val checksums: Seq[String], val resolutionCacheDir: Option[File], val log: Logger) extends IvyConfiguration -{ - @deprecated("Use the variant that accepts the resolution cache location.", "0.13.0") - def this(paths: IvyPaths, resolvers: Seq[Resolver], otherResolvers: Seq[Resolver], - moduleConfigurations: Seq[ModuleConfiguration], localOnly: Boolean, lock: Option[xsbti.GlobalLock], - checksums: Seq[String], log: Logger) = - this(paths, resolvers, otherResolvers, moduleConfigurations, localOnly, lock, checksums, None, log) + val moduleConfigurations: Seq[ModuleConfiguration], val localOnly: Boolean, val lock: Option[xsbti.GlobalLock], + val checksums: Seq[String], val resolutionCacheDir: Option[File], val log: Logger) extends IvyConfiguration { + @deprecated("Use the variant that accepts the resolution cache location.", "0.13.0") + def this(paths: IvyPaths, resolvers: Seq[Resolver], otherResolvers: Seq[Resolver], + moduleConfigurations: Seq[ModuleConfiguration], localOnly: Boolean, lock: Option[xsbti.GlobalLock], + checksums: Seq[String], log: Logger) = + this(paths, resolvers, otherResolvers, moduleConfigurations, localOnly, lock, checksums, None, log) - type This = InlineIvyConfiguration - def baseDirectory = paths.baseDirectory - def withBase(newBase: File) = new InlineIvyConfiguration(paths.withBase(newBase), resolvers, otherResolvers, moduleConfigurations, localOnly, lock, checksums, resolutionCacheDir, log) - def changeResolvers(newResolvers: Seq[Resolver]) = new InlineIvyConfiguration(paths, newResolvers, otherResolvers, moduleConfigurations, localOnly, lock, checksums, resolutionCacheDir, log) + type This = InlineIvyConfiguration + def baseDirectory = paths.baseDirectory + def withBase(newBase: File) = new InlineIvyConfiguration(paths.withBase(newBase), resolvers, otherResolvers, moduleConfigurations, localOnly, lock, checksums, resolutionCacheDir, log) + def changeResolvers(newResolvers: Seq[Resolver]) = new InlineIvyConfiguration(paths, newResolvers, otherResolvers, moduleConfigurations, localOnly, lock, checksums, resolutionCacheDir, log) } -final class ExternalIvyConfiguration(val baseDirectory: File, val uri: URI, val lock: Option[xsbti.GlobalLock], val extraResolvers: Seq[Resolver], val log: Logger) extends IvyConfiguration -{ - type This = ExternalIvyConfiguration - def withBase(newBase: File) = new ExternalIvyConfiguration(newBase, uri, lock, extraResolvers, log) +final class ExternalIvyConfiguration(val baseDirectory: File, val uri: URI, val lock: Option[xsbti.GlobalLock], val extraResolvers: Seq[Resolver], val log: Logger) extends IvyConfiguration { + type This = ExternalIvyConfiguration + def withBase(newBase: File) = new ExternalIvyConfiguration(newBase, uri, lock, extraResolvers, log) } -object ExternalIvyConfiguration -{ - def apply(baseDirectory: File, file: File, lock: Option[xsbti.GlobalLock], log: Logger) = new ExternalIvyConfiguration(baseDirectory, file.toURI, lock, Nil, log) +object ExternalIvyConfiguration { + def apply(baseDirectory: File, file: File, lock: Option[xsbti.GlobalLock], log: Logger) = new ExternalIvyConfiguration(baseDirectory, file.toURI, lock, Nil, log) } -object IvyConfiguration -{ - /** Called to configure Ivy when inline resolvers are not specified. - * This will configure Ivy with an 'ivy-settings.xml' file if there is one or else use default resolvers.*/ - @deprecated("Explicitly use either external or inline configuration.", "0.12.0") - def apply(paths: IvyPaths, lock: Option[xsbti.GlobalLock], localOnly: Boolean, checksums: Seq[String], log: Logger): IvyConfiguration = - { - log.debug("Autodetecting configuration.") - val defaultIvyConfigFile = IvySbt.defaultIvyConfiguration(paths.baseDirectory) - if(defaultIvyConfigFile.canRead) - ExternalIvyConfiguration(paths.baseDirectory, defaultIvyConfigFile, lock, log) - else - new InlineIvyConfiguration(paths, Resolver.withDefaultResolvers(Nil), Nil, Nil, localOnly, lock, checksums, None, log) - } +object IvyConfiguration { + /** + * Called to configure Ivy when inline resolvers are not specified. + * This will configure Ivy with an 'ivy-settings.xml' file if there is one or else use default resolvers. + */ + @deprecated("Explicitly use either external or inline configuration.", "0.12.0") + def apply(paths: IvyPaths, lock: Option[xsbti.GlobalLock], localOnly: Boolean, checksums: Seq[String], log: Logger): IvyConfiguration = + { + log.debug("Autodetecting configuration.") + val defaultIvyConfigFile = IvySbt.defaultIvyConfiguration(paths.baseDirectory) + if (defaultIvyConfigFile.canRead) + ExternalIvyConfiguration(paths.baseDirectory, defaultIvyConfigFile, lock, log) + else + new InlineIvyConfiguration(paths, Resolver.withDefaultResolvers(Nil), Nil, Nil, localOnly, lock, checksums, None, log) + } } -sealed trait ModuleSettings -{ - def validate: Boolean - def ivyScala: Option[IvyScala] - def noScala: ModuleSettings +sealed trait ModuleSettings { + def validate: Boolean + def ivyScala: Option[IvyScala] + def noScala: ModuleSettings } -final case class IvyFileConfiguration(file: File, ivyScala: Option[IvyScala], validate: Boolean, autoScalaTools: Boolean = true) extends ModuleSettings -{ - def noScala = copy(ivyScala = None) +final case class IvyFileConfiguration(file: File, ivyScala: Option[IvyScala], validate: Boolean, autoScalaTools: Boolean = true) extends ModuleSettings { + def noScala = copy(ivyScala = None) } -final case class PomConfiguration(file: File, ivyScala: Option[IvyScala], validate: Boolean, autoScalaTools: Boolean = true) extends ModuleSettings -{ - def noScala = copy(ivyScala = None) +final case class PomConfiguration(file: File, ivyScala: Option[IvyScala], validate: Boolean, autoScalaTools: Boolean = true) extends ModuleSettings { + def noScala = copy(ivyScala = None) } -final case class InlineConfiguration(module: ModuleID, moduleInfo: ModuleInfo, dependencies: Seq[ModuleID], overrides: Set[ModuleID] = Set.empty, ivyXML: NodeSeq = NodeSeq.Empty, configurations: Seq[Configuration] = Nil, defaultConfiguration: Option[Configuration] = None, ivyScala: Option[IvyScala] = None, validate: Boolean = false, conflictManager: ConflictManager = ConflictManager.default) extends ModuleSettings -{ - def withConfigurations(configurations: Seq[Configuration]) = copy(configurations = configurations) - def noScala = copy(ivyScala = None) +final case class InlineConfiguration(module: ModuleID, moduleInfo: ModuleInfo, dependencies: Seq[ModuleID], overrides: Set[ModuleID] = Set.empty, ivyXML: NodeSeq = NodeSeq.Empty, configurations: Seq[Configuration] = Nil, defaultConfiguration: Option[Configuration] = None, ivyScala: Option[IvyScala] = None, validate: Boolean = false, conflictManager: ConflictManager = ConflictManager.default) extends ModuleSettings { + def withConfigurations(configurations: Seq[Configuration]) = copy(configurations = configurations) + def noScala = copy(ivyScala = None) } @deprecated("Define a module using inline Scala (InlineConfiguration), a pom.xml (PomConfiguration), or an ivy.xml (IvyFileConfiguration).", "0.13.0") -final case class EmptyConfiguration(module: ModuleID, moduleInfo: ModuleInfo, ivyScala: Option[IvyScala], validate: Boolean) extends ModuleSettings -{ - def noScala = copy(ivyScala = None) +final case class EmptyConfiguration(module: ModuleID, moduleInfo: ModuleInfo, ivyScala: Option[IvyScala], validate: Boolean) extends ModuleSettings { + def noScala = copy(ivyScala = None) } -object InlineConfiguration -{ - def configurations(explicitConfigurations: Iterable[Configuration], defaultConfiguration: Option[Configuration]) = - if(explicitConfigurations.isEmpty) - { - defaultConfiguration match - { - case Some(Configurations.DefaultIvyConfiguration) => Configurations.Default :: Nil - case Some(Configurations.DefaultMavenConfiguration) => Configurations.defaultMavenConfigurations - case _ => Nil - } - } - else - explicitConfigurations +object InlineConfiguration { + def configurations(explicitConfigurations: Iterable[Configuration], defaultConfiguration: Option[Configuration]) = + if (explicitConfigurations.isEmpty) { + defaultConfiguration match { + case Some(Configurations.DefaultIvyConfiguration) => Configurations.Default :: Nil + case Some(Configurations.DefaultMavenConfiguration) => Configurations.defaultMavenConfigurations + case _ => Nil + } + } else + explicitConfigurations } -object ModuleSettings -{ - @deprecated("Explicitly select configuration from pom.xml, ivy.xml, or inline Scala.", "0.13.0") - def apply(ivyScala: Option[IvyScala], validate: Boolean, module: => ModuleID, moduleInfo: => ModuleInfo)(baseDirectory: File, log: Logger): ModuleSettings = - { - log.debug("Autodetecting dependencies.") - val defaultPOMFile = IvySbt.defaultPOM(baseDirectory) - if(defaultPOMFile.canRead) - new PomConfiguration(defaultPOMFile, ivyScala, validate, true) - else - { - val defaultIvy = IvySbt.defaultIvyFile(baseDirectory) - if(defaultIvy.canRead) - new IvyFileConfiguration(defaultIvy, ivyScala, validate, true) - else - { - log.warn("No dependency configuration found, using defaults.") - new EmptyConfiguration(module, moduleInfo, ivyScala, validate) - } - } - } +object ModuleSettings { + @deprecated("Explicitly select configuration from pom.xml, ivy.xml, or inline Scala.", "0.13.0") + def apply(ivyScala: Option[IvyScala], validate: Boolean, module: => ModuleID, moduleInfo: => ModuleInfo)(baseDirectory: File, log: Logger): ModuleSettings = + { + log.debug("Autodetecting dependencies.") + val defaultPOMFile = IvySbt.defaultPOM(baseDirectory) + if (defaultPOMFile.canRead) + new PomConfiguration(defaultPOMFile, ivyScala, validate, true) + else { + val defaultIvy = IvySbt.defaultIvyFile(baseDirectory) + if (defaultIvy.canRead) + new IvyFileConfiguration(defaultIvy, ivyScala, validate, true) + else { + log.warn("No dependency configuration found, using defaults.") + new EmptyConfiguration(module, moduleInfo, ivyScala, validate) + } + } + } } diff --git a/ivy/src/main/scala/sbt/IvyInterface.scala b/ivy/src/main/scala/sbt/IvyInterface.scala index 2772b98c3..05720e6aa 100644 --- a/ivy/src/main/scala/sbt/IvyInterface.scala +++ b/ivy/src/main/scala/sbt/IvyInterface.scala @@ -4,18 +4,17 @@ package sbt import java.io.File -import java.net.{URI, URL} +import java.net.{ URI, URL } import scala.xml.NodeSeq -import org.apache.ivy.plugins.resolver.{DependencyResolver, IBiblioResolver} +import org.apache.ivy.plugins.resolver.{ DependencyResolver, IBiblioResolver } import org.apache.ivy.util.url.CredentialsStore /** Additional information about a project module */ -final case class ModuleInfo(nameFormal: String, description: String = "", homepage: Option[URL] = None, startYear: Option[Int] = None, licenses: Seq[(String, URL)] = Nil, organizationName: String = "", organizationHomepage: Option[URL] = None, scmInfo: Option[ScmInfo] = None) -{ - def formally(name: String) = copy(nameFormal = name) - def describing(desc: String, home: Option[URL]) = copy(description = desc, homepage = home) - def licensed(lics: (String, URL)*) = copy(licenses = lics) - def organization(name: String, home: Option[URL]) = copy(organizationName = name, organizationHomepage = home) +final case class ModuleInfo(nameFormal: String, description: String = "", homepage: Option[URL] = None, startYear: Option[Int] = None, licenses: Seq[(String, URL)] = Nil, organizationName: String = "", organizationHomepage: Option[URL] = None, scmInfo: Option[ScmInfo] = None) { + def formally(name: String) = copy(nameFormal = name) + def describing(desc: String, home: Option[URL]) = copy(description = desc, homepage = home) + def licensed(lics: (String, URL)*) = copy(licenses = lics) + def organization(name: String, home: Option[URL]) = copy(organizationName = name, organizationHomepage = home) } /** Basic SCM information for a project module */ @@ -25,20 +24,19 @@ final case class ScmInfo(browseUrl: URL, connection: String, devConnection: Opti final case class ExclusionRule(organization: String = "*", name: String = "*", artifact: String = "*", configurations: Seq[String] = Nil) final case class ModuleConfiguration(organization: String, name: String, revision: String, resolver: Resolver) -object ModuleConfiguration -{ - def apply(org: String, resolver: Resolver): ModuleConfiguration = apply(org, "*", "*", resolver) - def apply(org: String, name: String, resolver: Resolver): ModuleConfiguration = ModuleConfiguration(org, name, "*", resolver) +object ModuleConfiguration { + def apply(org: String, resolver: Resolver): ModuleConfiguration = apply(org, "*", "*", resolver) + def apply(org: String, name: String, resolver: Resolver): ModuleConfiguration = ModuleConfiguration(org, name, "*", resolver) } final case class ConflictManager(name: String, organization: String = "*", module: String = "*") /** See http://ant.apache.org/ivy/history/latest-milestone/settings/conflict-managers.html for details of the different conflict managers.*/ object ConflictManager { - val all = ConflictManager("all") - val latestTime = ConflictManager("latest-time") - val latestRevision = ConflictManager("latest-revision") - val latestCompatible = ConflictManager("latest-compatible") - val strict = ConflictManager("strict") - val default = latestRevision + val all = ConflictManager("all") + val latestTime = ConflictManager("latest-time") + val latestRevision = ConflictManager("latest-revision") + val latestCompatible = ConflictManager("latest-compatible") + val strict = ConflictManager("strict") + val default = latestRevision } \ No newline at end of file diff --git a/ivy/src/main/scala/sbt/IvyLogger.scala b/ivy/src/main/scala/sbt/IvyLogger.scala index 6e1b136fe..42131a174 100644 --- a/ivy/src/main/scala/sbt/IvyLogger.scala +++ b/ivy/src/main/scala/sbt/IvyLogger.scala @@ -3,56 +3,51 @@ */ package sbt -import org.apache.ivy.util.{Message, MessageLogger, MessageLoggerEngine} +import org.apache.ivy.util.{ Message, MessageLogger, MessageLoggerEngine } /** Interface to Ivy logging. */ -private final class IvyLoggerInterface(logger: Logger) extends MessageLogger -{ - def rawlog(msg: String, level: Int) = log(msg, level) - def log(msg: String, level: Int) - { - import Message.{MSG_DEBUG, MSG_VERBOSE, MSG_INFO, MSG_WARN, MSG_ERR} - level match - { - case MSG_DEBUG => debug(msg) - case MSG_VERBOSE => verbose(msg) - case MSG_INFO => info(msg) - case MSG_WARN => warn(msg) - case MSG_ERR => error(msg) - } - } - //DEBUG level messages are very verbose and rarely useful to users. - // TODO: provide access to this information some other way - def debug(msg: String) {} - def verbose(msg: String) = logger.verbose(msg) - def deprecated(msg: String) = warn(msg) - def info(msg: String) = logger.info(msg) - def rawinfo(msg: String) = info(msg) - def warn(msg: String) = logger.warn(msg) - def error(msg: String) = if(SbtIvyLogger.acceptError(msg)) logger.error(msg) - - private def emptyList = java.util.Collections.emptyList[String] - def getProblems = emptyList - def getWarns = emptyList - def getErrors = emptyList +private final class IvyLoggerInterface(logger: Logger) extends MessageLogger { + def rawlog(msg: String, level: Int) = log(msg, level) + def log(msg: String, level: Int) { + import Message.{ MSG_DEBUG, MSG_VERBOSE, MSG_INFO, MSG_WARN, MSG_ERR } + level match { + case MSG_DEBUG => debug(msg) + case MSG_VERBOSE => verbose(msg) + case MSG_INFO => info(msg) + case MSG_WARN => warn(msg) + case MSG_ERR => error(msg) + } + } + //DEBUG level messages are very verbose and rarely useful to users. + // TODO: provide access to this information some other way + def debug(msg: String) {} + def verbose(msg: String) = logger.verbose(msg) + def deprecated(msg: String) = warn(msg) + def info(msg: String) = logger.info(msg) + def rawinfo(msg: String) = info(msg) + def warn(msg: String) = logger.warn(msg) + def error(msg: String) = if (SbtIvyLogger.acceptError(msg)) logger.error(msg) - def clearProblems = () - def sumupProblems = clearProblems() - def progress = () - def endProgress = () + private def emptyList = java.util.Collections.emptyList[String] + def getProblems = emptyList + def getWarns = emptyList + def getErrors = emptyList - def endProgress(msg: String) = info(msg) - def isShowProgress = false - def setShowProgress(progress: Boolean) {} + def clearProblems = () + def sumupProblems = clearProblems() + def progress = () + def endProgress = () + + def endProgress(msg: String) = info(msg) + def isShowProgress = false + def setShowProgress(progress: Boolean) {} } -private final class SbtMessageLoggerEngine extends MessageLoggerEngine -{ - /** This is a hack to filter error messages about 'unknown resolver ...'. */ - override def error(msg: String) = if(SbtIvyLogger.acceptError(msg)) super.error(msg) - override def sumupProblems = clearProblems() +private final class SbtMessageLoggerEngine extends MessageLoggerEngine { + /** This is a hack to filter error messages about 'unknown resolver ...'. */ + override def error(msg: String) = if (SbtIvyLogger.acceptError(msg)) super.error(msg) + override def sumupProblems = clearProblems() } -private object SbtIvyLogger -{ - val UnknownResolver = "unknown resolver" - def acceptError(msg: String) = (msg ne null) && !msg.startsWith(UnknownResolver) +private object SbtIvyLogger { + val UnknownResolver = "unknown resolver" + def acceptError(msg: String) = (msg ne null) && !msg.startsWith(UnknownResolver) } \ No newline at end of file diff --git a/ivy/src/main/scala/sbt/IvyRetrieve.scala b/ivy/src/main/scala/sbt/IvyRetrieve.scala index dd0ffe910..5d0f3f6ec 100644 --- a/ivy/src/main/scala/sbt/IvyRetrieve.scala +++ b/ivy/src/main/scala/sbt/IvyRetrieve.scala @@ -6,51 +6,49 @@ package sbt import java.io.File import collection.mutable -import org.apache.ivy.core.{module, report} -import module.descriptor.{Artifact => IvyArtifact} +import org.apache.ivy.core.{ module, report } +import module.descriptor.{ Artifact => IvyArtifact } import module.id.ModuleRevisionId -import report.{ArtifactDownloadReport, ConfigurationResolveReport, ResolveReport} +import report.{ ArtifactDownloadReport, ConfigurationResolveReport, ResolveReport } -object IvyRetrieve -{ - def reports(report: ResolveReport): Seq[ConfigurationResolveReport] = - report.getConfigurations map report.getConfigurationReport +object IvyRetrieve { + def reports(report: ResolveReport): Seq[ConfigurationResolveReport] = + report.getConfigurations map report.getConfigurationReport - def moduleReports(confReport: ConfigurationResolveReport): Seq[ModuleReport] = - for( revId <- confReport.getModuleRevisionIds.toArray collect { case revId: ModuleRevisionId => revId }) yield - artifactReports(toModuleID(revId), confReport getDownloadReports revId) + def moduleReports(confReport: ConfigurationResolveReport): Seq[ModuleReport] = + for (revId <- confReport.getModuleRevisionIds.toArray collect { case revId: ModuleRevisionId => revId }) yield artifactReports(toModuleID(revId), confReport getDownloadReports revId) - def artifactReports(mid: ModuleID, artReport: Seq[ArtifactDownloadReport]): ModuleReport = - { - val missing = new mutable.ListBuffer[Artifact] - val resolved = new mutable.ListBuffer[(Artifact,File)] - for(r <- artReport) { - val file = r.getLocalFile - val art = toArtifact(r.getArtifact) - if(file eq null) - missing += art - else - resolved += ((art,file)) - } - new ModuleReport(mid, resolved.toSeq, missing.toSeq) - } + def artifactReports(mid: ModuleID, artReport: Seq[ArtifactDownloadReport]): ModuleReport = + { + val missing = new mutable.ListBuffer[Artifact] + val resolved = new mutable.ListBuffer[(Artifact, File)] + for (r <- artReport) { + val file = r.getLocalFile + val art = toArtifact(r.getArtifact) + if (file eq null) + missing += art + else + resolved += ((art, file)) + } + new ModuleReport(mid, resolved.toSeq, missing.toSeq) + } - def evicted(confReport: ConfigurationResolveReport): Seq[ModuleID] = - confReport.getEvictedNodes.map(node => toModuleID(node.getId)) - - def toModuleID(revID: ModuleRevisionId): ModuleID = - ModuleID(revID.getOrganisation, revID.getName, revID.getRevision, extraAttributes = IvySbt.getExtraAttributes(revID)) - - def toArtifact(art: IvyArtifact): Artifact = - { - import art._ - Artifact(getName, getType, getExt, Option(getExtraAttribute("classifier")), getConfigurations map Configurations.config, Option(getUrl)) - } + def evicted(confReport: ConfigurationResolveReport): Seq[ModuleID] = + confReport.getEvictedNodes.map(node => toModuleID(node.getId)) - def updateReport(report: ResolveReport, cachedDescriptor: File): UpdateReport = - new UpdateReport(cachedDescriptor, reports(report) map configurationReport, updateStats(report), Map.empty) recomputeStamps() - def updateStats(report: ResolveReport): UpdateStats = - new UpdateStats(report.getResolveTime, report.getDownloadTime, report.getDownloadSize, false) - def configurationReport(confReport: ConfigurationResolveReport): ConfigurationReport = - new ConfigurationReport(confReport.getConfiguration, moduleReports(confReport), evicted(confReport)) + def toModuleID(revID: ModuleRevisionId): ModuleID = + ModuleID(revID.getOrganisation, revID.getName, revID.getRevision, extraAttributes = IvySbt.getExtraAttributes(revID)) + + def toArtifact(art: IvyArtifact): Artifact = + { + import art._ + Artifact(getName, getType, getExt, Option(getExtraAttribute("classifier")), getConfigurations map Configurations.config, Option(getUrl)) + } + + def updateReport(report: ResolveReport, cachedDescriptor: File): UpdateReport = + new UpdateReport(cachedDescriptor, reports(report) map configurationReport, updateStats(report), Map.empty) recomputeStamps () + def updateStats(report: ResolveReport): UpdateStats = + new UpdateStats(report.getResolveTime, report.getDownloadTime, report.getDownloadSize, false) + def configurationReport(confReport: ConfigurationResolveReport): ConfigurationReport = + new ConfigurationReport(confReport.getConfiguration, moduleReports(confReport), evicted(confReport)) } diff --git a/ivy/src/main/scala/sbt/IvyScala.scala b/ivy/src/main/scala/sbt/IvyScala.scala index 23b7597ff..5cc3bdd48 100644 --- a/ivy/src/main/scala/sbt/IvyScala.scala +++ b/ivy/src/main/scala/sbt/IvyScala.scala @@ -6,111 +6,108 @@ package sbt import java.util.Collections.emptyMap import scala.collection.mutable.HashSet -import org.apache.ivy.{core, plugins} -import core.module.descriptor.{DefaultExcludeRule, ExcludeRule} -import core.module.descriptor.{DependencyDescriptor, DefaultModuleDescriptor, ModuleDescriptor, OverrideDependencyDescriptorMediator} -import core.module.id.{ArtifactId,ModuleId, ModuleRevisionId} +import org.apache.ivy.{ core, plugins } +import core.module.descriptor.{ DefaultExcludeRule, ExcludeRule } +import core.module.descriptor.{ DependencyDescriptor, DefaultModuleDescriptor, ModuleDescriptor, OverrideDependencyDescriptorMediator } +import core.module.id.{ ArtifactId, ModuleId, ModuleRevisionId } import plugins.matcher.ExactPatternMatcher -object ScalaArtifacts -{ - import xsbti.ArtifactInfo._ - val Organization = ScalaOrganization - val LibraryID = ScalaLibraryID - val CompilerID = ScalaCompilerID - def libraryDependency(version: String): ModuleID = ModuleID(Organization, LibraryID, version) +object ScalaArtifacts { + import xsbti.ArtifactInfo._ + val Organization = ScalaOrganization + val LibraryID = ScalaLibraryID + val CompilerID = ScalaCompilerID + def libraryDependency(version: String): ModuleID = ModuleID(Organization, LibraryID, version) - private[sbt] def toolDependencies(org: String, version: String): Seq[ModuleID] = Seq( - scalaToolDependency(org, ScalaArtifacts.CompilerID, version), - scalaToolDependency(org, ScalaArtifacts.LibraryID, version) - ) - private[this] def scalaToolDependency(org: String, id: String, version: String): ModuleID = - ModuleID(org, id, version, Some(Configurations.ScalaTool.name + "->default,optional(default)") ) + private[sbt] def toolDependencies(org: String, version: String): Seq[ModuleID] = Seq( + scalaToolDependency(org, ScalaArtifacts.CompilerID, version), + scalaToolDependency(org, ScalaArtifacts.LibraryID, version) + ) + private[this] def scalaToolDependency(org: String, id: String, version: String): ModuleID = + ModuleID(org, id, version, Some(Configurations.ScalaTool.name + "->default,optional(default)")) } -object SbtArtifacts -{ - import xsbti.ArtifactInfo._ - val Organization = SbtOrganization +object SbtArtifacts { + import xsbti.ArtifactInfo._ + val Organization = SbtOrganization } import ScalaArtifacts._ final case class IvyScala(scalaFullVersion: String, scalaBinaryVersion: String, configurations: Iterable[Configuration], checkExplicit: Boolean, filterImplicit: Boolean, overrideScalaVersion: Boolean, scalaOrganization: String = ScalaArtifacts.Organization) -private object IvyScala -{ - /** Performs checks/adds filters on Scala dependencies (if enabled in IvyScala). */ - def checkModule(module: DefaultModuleDescriptor, conf: String, log: Logger)(check: IvyScala) - { - if(check.checkExplicit) - checkDependencies(module, check.scalaBinaryVersion, check.configurations, log) - if(check.filterImplicit) - excludeScalaJars(module, check.configurations) - if(check.overrideScalaVersion) - overrideScalaVersion(module, check.scalaFullVersion) - } - def overrideScalaVersion(module: DefaultModuleDescriptor, version: String) - { - overrideVersion(module, Organization, LibraryID, version) - overrideVersion(module, Organization, CompilerID, version) - } - def overrideVersion(module: DefaultModuleDescriptor, org: String, name: String, version: String) - { - val id = new ModuleId(org, name) - val over = new OverrideDependencyDescriptorMediator(null, version) - module.addDependencyDescriptorMediator(id, ExactPatternMatcher.INSTANCE, over) - } - - /** Checks the immediate dependencies of module for dependencies on scala jars and verifies that the version on the - * dependencies matches scalaVersion. */ - private def checkDependencies(module: ModuleDescriptor, scalaBinaryVersion: String, configurations: Iterable[Configuration], log: Logger) - { - val configSet = if(configurations.isEmpty) (c: String) => true else configurationSet(configurations) - def binaryScalaWarning(dep: DependencyDescriptor): Option[String] = - { - val id = dep.getDependencyRevisionId - val depBinaryVersion = CrossVersion.binaryScalaVersion(id.getRevision) - val mismatched = id.getOrganisation == Organization && depBinaryVersion != scalaBinaryVersion && dep.getModuleConfigurations.exists(configSet) - if(mismatched) - Some("Binary version (" + depBinaryVersion + ") for dependency " + id + - "\n\tin " + module.getModuleRevisionId + - " differs from Scala binary version in project (" + scalaBinaryVersion + ").") - else - None - } - module.getDependencies.toList.flatMap(binaryScalaWarning).toSet foreach { (s: String) => log.warn(s) } - } - private def configurationSet(configurations: Iterable[Configuration]) = configurations.map(_.toString).toSet +private object IvyScala { + /** Performs checks/adds filters on Scala dependencies (if enabled in IvyScala). */ + def checkModule(module: DefaultModuleDescriptor, conf: String, log: Logger)(check: IvyScala) { + if (check.checkExplicit) + checkDependencies(module, check.scalaBinaryVersion, check.configurations, log) + if (check.filterImplicit) + excludeScalaJars(module, check.configurations) + if (check.overrideScalaVersion) + overrideScalaVersion(module, check.scalaFullVersion) + } + def overrideScalaVersion(module: DefaultModuleDescriptor, version: String) { + overrideVersion(module, Organization, LibraryID, version) + overrideVersion(module, Organization, CompilerID, version) + } + def overrideVersion(module: DefaultModuleDescriptor, org: String, name: String, version: String) { + val id = new ModuleId(org, name) + val over = new OverrideDependencyDescriptorMediator(null, version) + module.addDependencyDescriptorMediator(id, ExactPatternMatcher.INSTANCE, over) + } - /** Adds exclusions for the scala library and compiler jars so that they are not downloaded. This is - * done because these jars are provided by the ScalaInstance of the project. The version of Scala to use - * is done by setting scalaVersion in the project definition. */ - private def excludeScalaJars(module: DefaultModuleDescriptor, configurations: Iterable[Configuration]) - { - val configurationNames = - { - val names = module.getConfigurationsNames - if(configurations.isEmpty) - names - else - { - val configSet = configurationSet(configurations) - configSet.intersect(HashSet(names : _*)) - configSet.toArray - } - } - def excludeScalaJar(name: String): Unit = - module.addExcludeRule(excludeRule(Organization, name, configurationNames, "jar")) - excludeScalaJar(LibraryID) - excludeScalaJar(CompilerID) - } - /** Creates an ExcludeRule that excludes artifacts with the given module organization and name for - * the given configurations. */ - private[sbt] def excludeRule(organization: String, name: String, configurationNames: Iterable[String], excludeTypePattern: String): ExcludeRule = - { - val artifact = new ArtifactId(ModuleId.newInstance(organization, name), "*", excludeTypePattern, "*") - val rule = new DefaultExcludeRule(artifact, ExactPatternMatcher.INSTANCE, emptyMap[AnyRef,AnyRef]) - configurationNames.foreach(rule.addConfiguration) - rule - } + /** + * Checks the immediate dependencies of module for dependencies on scala jars and verifies that the version on the + * dependencies matches scalaVersion. + */ + private def checkDependencies(module: ModuleDescriptor, scalaBinaryVersion: String, configurations: Iterable[Configuration], log: Logger) { + val configSet = if (configurations.isEmpty) (c: String) => true else configurationSet(configurations) + def binaryScalaWarning(dep: DependencyDescriptor): Option[String] = + { + val id = dep.getDependencyRevisionId + val depBinaryVersion = CrossVersion.binaryScalaVersion(id.getRevision) + val mismatched = id.getOrganisation == Organization && depBinaryVersion != scalaBinaryVersion && dep.getModuleConfigurations.exists(configSet) + if (mismatched) + Some("Binary version (" + depBinaryVersion + ") for dependency " + id + + "\n\tin " + module.getModuleRevisionId + + " differs from Scala binary version in project (" + scalaBinaryVersion + ").") + else + None + } + module.getDependencies.toList.flatMap(binaryScalaWarning).toSet foreach { (s: String) => log.warn(s) } + } + private def configurationSet(configurations: Iterable[Configuration]) = configurations.map(_.toString).toSet + + /** + * Adds exclusions for the scala library and compiler jars so that they are not downloaded. This is + * done because these jars are provided by the ScalaInstance of the project. The version of Scala to use + * is done by setting scalaVersion in the project definition. + */ + private def excludeScalaJars(module: DefaultModuleDescriptor, configurations: Iterable[Configuration]) { + val configurationNames = + { + val names = module.getConfigurationsNames + if (configurations.isEmpty) + names + else { + val configSet = configurationSet(configurations) + configSet.intersect(HashSet(names: _*)) + configSet.toArray + } + } + def excludeScalaJar(name: String): Unit = + module.addExcludeRule(excludeRule(Organization, name, configurationNames, "jar")) + excludeScalaJar(LibraryID) + excludeScalaJar(CompilerID) + } + /** + * Creates an ExcludeRule that excludes artifacts with the given module organization and name for + * the given configurations. + */ + private[sbt] def excludeRule(organization: String, name: String, configurationNames: Iterable[String], excludeTypePattern: String): ExcludeRule = + { + val artifact = new ArtifactId(ModuleId.newInstance(organization, name), "*", excludeTypePattern, "*") + val rule = new DefaultExcludeRule(artifact, ExactPatternMatcher.INSTANCE, emptyMap[AnyRef, AnyRef]) + configurationNames.foreach(rule.addConfiguration) + rule + } } \ No newline at end of file diff --git a/ivy/src/main/scala/sbt/IvyUtil.scala b/ivy/src/main/scala/sbt/IvyUtil.scala index 1c99bb5d3..0f06b6b84 100644 --- a/ivy/src/main/scala/sbt/IvyUtil.scala +++ b/ivy/src/main/scala/sbt/IvyUtil.scala @@ -1,7 +1,6 @@ package sbt -private[sbt] object IvyUtil -{ - def separate[A,B](l: Seq[Either[A,B]]): (Seq[A], Seq[B]) = - (l.flatMap(_.left.toOption), l.flatMap(_.right.toOption)) +private[sbt] object IvyUtil { + def separate[A, B](l: Seq[Either[A, B]]): (Seq[A], Seq[B]) = + (l.flatMap(_.left.toOption), l.flatMap(_.right.toOption)) } \ No newline at end of file diff --git a/ivy/src/main/scala/sbt/MakePom.scala b/ivy/src/main/scala/sbt/MakePom.scala index 16ec28333..6e51befca 100644 --- a/ivy/src/main/scala/sbt/MakePom.scala +++ b/ivy/src/main/scala/sbt/MakePom.scala @@ -10,349 +10,348 @@ package sbt import java.io.File // Node needs to be renamed to XNode because the task subproject contains a Node type that will shadow // scala.xml.Node when generating aggregated API documentation -import scala.xml.{Elem, Node => XNode, NodeSeq, PrettyPrinter, PrefixedAttribute} +import scala.xml.{ Elem, Node => XNode, NodeSeq, PrettyPrinter, PrefixedAttribute } import Configurations.Optional -import org.apache.ivy.{core, plugins, Ivy} +import org.apache.ivy.{ core, plugins, Ivy } import core.settings.IvySettings -import core.module.descriptor.{DependencyArtifactDescriptor, DependencyDescriptor, License, ModuleDescriptor, ExcludeRule} -import plugins.resolver.{ChainResolver, DependencyResolver, IBiblioResolver} +import core.module.descriptor.{ DependencyArtifactDescriptor, DependencyDescriptor, License, ModuleDescriptor, ExcludeRule } +import plugins.resolver.{ ChainResolver, DependencyResolver, IBiblioResolver } -class MakePom(val log: Logger) -{ - @deprecated("Use `write(Ivy, ModuleDescriptor, ModuleInfo, Option[Iterable[Configuration]], Set[String], NodeSeq, XNode => XNode, MavenRepository => Boolean, Boolean, File)` instead", "0.11.2") - def write(ivy: Ivy, module: ModuleDescriptor, moduleInfo: ModuleInfo, configurations: Option[Iterable[Configuration]], extra: NodeSeq, process: XNode => XNode, filterRepositories: MavenRepository => Boolean, allRepositories: Boolean, output: File): Unit = - write(ivy, module, moduleInfo: ModuleInfo, configurations: Option[Iterable[Configuration]], Set(Artifact.DefaultType), extra, process, filterRepositories, allRepositories, output) - def write(ivy: Ivy, module: ModuleDescriptor, moduleInfo: ModuleInfo, configurations: Option[Iterable[Configuration]], includeTypes: Set[String], extra: NodeSeq, process: XNode => XNode, filterRepositories: MavenRepository => Boolean, allRepositories: Boolean, output: File): Unit = - write(process(toPom(ivy, module, moduleInfo, configurations, includeTypes, extra, filterRepositories, allRepositories)), output) - // use \n as newline because toString uses PrettyPrinter, which hard codes line endings to be \n - def write(node: XNode, output: File): Unit = write(toString(node), output, "\n") - def write(xmlString: String, output: File, newline: String) - { - IO.write(output, "" + newline + xmlString) - } +class MakePom(val log: Logger) { + @deprecated("Use `write(Ivy, ModuleDescriptor, ModuleInfo, Option[Iterable[Configuration]], Set[String], NodeSeq, XNode => XNode, MavenRepository => Boolean, Boolean, File)` instead", "0.11.2") + def write(ivy: Ivy, module: ModuleDescriptor, moduleInfo: ModuleInfo, configurations: Option[Iterable[Configuration]], extra: NodeSeq, process: XNode => XNode, filterRepositories: MavenRepository => Boolean, allRepositories: Boolean, output: File): Unit = + write(ivy, module, moduleInfo: ModuleInfo, configurations: Option[Iterable[Configuration]], Set(Artifact.DefaultType), extra, process, filterRepositories, allRepositories, output) + def write(ivy: Ivy, module: ModuleDescriptor, moduleInfo: ModuleInfo, configurations: Option[Iterable[Configuration]], includeTypes: Set[String], extra: NodeSeq, process: XNode => XNode, filterRepositories: MavenRepository => Boolean, allRepositories: Boolean, output: File): Unit = + write(process(toPom(ivy, module, moduleInfo, configurations, includeTypes, extra, filterRepositories, allRepositories)), output) + // use \n as newline because toString uses PrettyPrinter, which hard codes line endings to be \n + def write(node: XNode, output: File): Unit = write(toString(node), output, "\n") + def write(xmlString: String, output: File, newline: String) { + IO.write(output, "" + newline + xmlString) + } - def toString(node: XNode): String = new PrettyPrinter(1000, 4).format(node) - @deprecated("Use `toPom(Ivy, ModuleDescriptor, ModuleInfo, Option[Iterable[Configuration]], Set[String], NodeSeq, MavenRepository => Boolean, Boolean)` instead", "0.11.2") - def toPom(ivy: Ivy, module: ModuleDescriptor, moduleInfo: ModuleInfo, configurations: Option[Iterable[Configuration]], extra: NodeSeq, filterRepositories: MavenRepository => Boolean, allRepositories: Boolean): XNode = - toPom(ivy, module, moduleInfo, configurations, Set(Artifact.DefaultType), extra, filterRepositories, allRepositories) - def toPom(ivy: Ivy, module: ModuleDescriptor, moduleInfo: ModuleInfo, configurations: Option[Iterable[Configuration]], includeTypes: Set[String], extra: NodeSeq, filterRepositories: MavenRepository => Boolean, allRepositories: Boolean): XNode = - ( - 4.0.0 - { makeModuleID(module) } - {moduleInfo.nameFormal} - { makeStartYear(moduleInfo) } - { makeOrganization(moduleInfo) } - { makeScmInfo(moduleInfo) } - { extra } - { - val deps = depsInConfs(module, configurations) - makeProperties(module, deps) ++ - makeDependencies(deps, includeTypes) - } - { makeRepositories(ivy.getSettings, allRepositories, filterRepositories) } - ) + def toString(node: XNode): String = new PrettyPrinter(1000, 4).format(node) + @deprecated("Use `toPom(Ivy, ModuleDescriptor, ModuleInfo, Option[Iterable[Configuration]], Set[String], NodeSeq, MavenRepository => Boolean, Boolean)` instead", "0.11.2") + def toPom(ivy: Ivy, module: ModuleDescriptor, moduleInfo: ModuleInfo, configurations: Option[Iterable[Configuration]], extra: NodeSeq, filterRepositories: MavenRepository => Boolean, allRepositories: Boolean): XNode = + toPom(ivy, module, moduleInfo, configurations, Set(Artifact.DefaultType), extra, filterRepositories, allRepositories) + def toPom(ivy: Ivy, module: ModuleDescriptor, moduleInfo: ModuleInfo, configurations: Option[Iterable[Configuration]], includeTypes: Set[String], extra: NodeSeq, filterRepositories: MavenRepository => Boolean, allRepositories: Boolean): XNode = + ( + 4.0.0 + { makeModuleID(module) } + { moduleInfo.nameFormal } + { makeStartYear(moduleInfo) } + { makeOrganization(moduleInfo) } + { makeScmInfo(moduleInfo) } + { extra } + { + val deps = depsInConfs(module, configurations) + makeProperties(module, deps) ++ + makeDependencies(deps, includeTypes) + } + { makeRepositories(ivy.getSettings, allRepositories, filterRepositories) } + ) - def makeModuleID(module: ModuleDescriptor): NodeSeq = - { - val mrid = moduleDescriptor(module) - val a: NodeSeq = - ({ mrid.getOrganisation } - { mrid.getName } - { packaging(module) }) - val b: NodeSeq = - ( (description(module.getDescription) ++ - homePage(module.getHomePage) ++ - revision(mrid.getRevision) ++ - licenses(module.getLicenses)) : NodeSeq ) - a ++ b - } + def makeModuleID(module: ModuleDescriptor): NodeSeq = + { + val mrid = moduleDescriptor(module) + val a: NodeSeq = + ({ mrid.getOrganisation } + { mrid.getName } + { packaging(module) }) + val b: NodeSeq = + ((description(module.getDescription) ++ + homePage(module.getHomePage) ++ + revision(mrid.getRevision) ++ + licenses(module.getLicenses)): NodeSeq) + a ++ b + } - def makeStartYear(moduleInfo: ModuleInfo): NodeSeq = - moduleInfo.startYear match { - case Some(y) => {y} - case _ => NodeSeq.Empty - } - def makeOrganization(moduleInfo: ModuleInfo): NodeSeq = - { - - {moduleInfo.organizationName} - { moduleInfo.organizationHomepage match { - case Some(h) => {h} - case _ => NodeSeq.Empty - }} - - } - def makeScmInfo(moduleInfo: ModuleInfo): NodeSeq = - { - moduleInfo.scmInfo match { - case Some(s) => - - {s.browseUrl} - {s.connection} - {s.devConnection match { - case Some(d) => {d} - case _ => NodeSeq.Empty - }} - - case _ => NodeSeq.Empty - } - } - def makeProperties(module: ModuleDescriptor, dependencies: Seq[DependencyDescriptor]): NodeSeq = - { - val extra = IvySbt.getExtraAttributes(module) - val depExtra = CustomPomParser.writeDependencyExtra(dependencies).mkString("\n") - val allExtra = if(depExtra.isEmpty) extra else extra.updated(CustomPomParser.ExtraAttributesKey, depExtra) - if(allExtra.isEmpty) NodeSeq.Empty else makeProperties(allExtra) - } - def makeProperties(extra: Map[String,String]): NodeSeq = { - def _extraAttributes(k: String) = if (k == CustomPomParser.ExtraAttributesKey) xmlSpacePreserve else scala.xml.Null - { - for( (key,value) <- extra ) yield - ({value}).copy(label = key, attributes = _extraAttributes(key)) - } - } + def makeStartYear(moduleInfo: ModuleInfo): NodeSeq = + moduleInfo.startYear match { + case Some(y) => { y } + case _ => NodeSeq.Empty + } + def makeOrganization(moduleInfo: ModuleInfo): NodeSeq = + { + + { moduleInfo.organizationName } + { + moduleInfo.organizationHomepage match { + case Some(h)=> { h } + case _ => NodeSeq.Empty + } + } + + } + def makeScmInfo(moduleInfo: ModuleInfo): NodeSeq = + { + moduleInfo.scmInfo match { + case Some(s) => + + { s.browseUrl } + { s.connection } + { + s.devConnection match { + case Some(d)=> { d } + case _=> NodeSeq.Empty + } + } + + case _ => NodeSeq.Empty + } + } + def makeProperties(module: ModuleDescriptor, dependencies: Seq[DependencyDescriptor]): NodeSeq = + { + val extra = IvySbt.getExtraAttributes(module) + val depExtra = CustomPomParser.writeDependencyExtra(dependencies).mkString("\n") + val allExtra = if (depExtra.isEmpty) extra else extra.updated(CustomPomParser.ExtraAttributesKey, depExtra) + if (allExtra.isEmpty) NodeSeq.Empty else makeProperties(allExtra) + } + def makeProperties(extra: Map[String, String]): NodeSeq = { + def _extraAttributes(k: String) = if (k == CustomPomParser.ExtraAttributesKey) xmlSpacePreserve else scala.xml.Null + { + for ((key, value) <- extra) yield ({ value }).copy(label = key, attributes = _extraAttributes(key)) + } + } - /** - * Attribute tag that PrettyPrinter won't ignore, saying "don't mess with my spaces" - * Without this, PrettyPrinter will flatten multiple entries for ExtraDependencyAttributes and make them - * unparseable. (e.g. a plugin that depends on multiple plugins will fail) - */ - def xmlSpacePreserve = new PrefixedAttribute("xml", "space", "preserve", scala.xml.Null) + /** + * Attribute tag that PrettyPrinter won't ignore, saying "don't mess with my spaces" + * Without this, PrettyPrinter will flatten multiple entries for ExtraDependencyAttributes and make them + * unparseable. (e.g. a plugin that depends on multiple plugins will fail) + */ + def xmlSpacePreserve = new PrefixedAttribute("xml", "space", "preserve", scala.xml.Null) - def description(d: String) = if((d eq null) || d.isEmpty) NodeSeq.Empty else {d} - def licenses(ls: Array[License]) = if(ls == null || ls.isEmpty) NodeSeq.Empty else {ls.map(license)} - def license(l: License) = - - {l.getName} - {l.getUrl} - repo - - def homePage(homePage: String) = if(homePage eq null) NodeSeq.Empty else {homePage} - def revision(version: String) = if(version ne null) {version} else NodeSeq.Empty - def packaging(module: ModuleDescriptor) = - module.getAllArtifacts match - { - case Array() => "pom" - case Array(x) => x.getType - case xs => - val types = xs.map(_.getType).toList.filterNot(IgnoreTypes) - types match { - case Nil => Artifact.PomType - case xs if xs.contains(Artifact.DefaultType) => Artifact.DefaultType - case x :: xs => x - } - } - val IgnoreTypes: Set[String] = Set(Artifact.SourceType, Artifact.DocType, Artifact.PomType) + def description(d: String) = if ((d eq null) || d.isEmpty) NodeSeq.Empty else { d } + def licenses(ls: Array[License]) = if (ls == null || ls.isEmpty) NodeSeq.Empty else { ls.map(license) } + def license(l: License) = + + { l.getName } + { l.getUrl } + repo + + def homePage(homePage: String) = if (homePage eq null) NodeSeq.Empty else { homePage } + def revision(version: String) = if (version ne null) { version } else NodeSeq.Empty + def packaging(module: ModuleDescriptor) = + module.getAllArtifacts match { + case Array() => "pom" + case Array(x) => x.getType + case xs => + val types = xs.map(_.getType).toList.filterNot(IgnoreTypes) + types match { + case Nil => Artifact.PomType + case xs if xs.contains(Artifact.DefaultType) => Artifact.DefaultType + case x :: xs => x + } + } + val IgnoreTypes: Set[String] = Set(Artifact.SourceType, Artifact.DocType, Artifact.PomType) - def makeDependencies(dependencies: Seq[DependencyDescriptor], includeTypes: Set[String]): NodeSeq = - if(dependencies.isEmpty) - NodeSeq.Empty - else - - { dependencies.map(makeDependency(_, includeTypes)) } - + def makeDependencies(dependencies: Seq[DependencyDescriptor], includeTypes: Set[String]): NodeSeq = + if (dependencies.isEmpty) + NodeSeq.Empty + else + + { dependencies.map(makeDependency(_, includeTypes)) } + - def makeDependency(dependency: DependencyDescriptor, includeTypes: Set[String]): NodeSeq = - { - val artifacts = dependency.getAllDependencyArtifacts - val includeArtifacts = artifacts.filter(d => includeTypes(d.getType)) - if(artifacts.isEmpty) { - val (scope, optional) = getScopeAndOptional(dependency.getModuleConfigurations) - makeDependencyElem(dependency, scope, optional, None, None) - } - else if(includeArtifacts.isEmpty) - NodeSeq.Empty - else - NodeSeq.fromSeq(artifacts.map( a => makeDependencyElem(dependency, a) )) - } + def makeDependency(dependency: DependencyDescriptor, includeTypes: Set[String]): NodeSeq = + { + val artifacts = dependency.getAllDependencyArtifacts + val includeArtifacts = artifacts.filter(d => includeTypes(d.getType)) + if (artifacts.isEmpty) { + val (scope, optional) = getScopeAndOptional(dependency.getModuleConfigurations) + makeDependencyElem(dependency, scope, optional, None, None) + } else if (includeArtifacts.isEmpty) + NodeSeq.Empty + else + NodeSeq.fromSeq(artifacts.map(a => makeDependencyElem(dependency, a))) + } - def makeDependencyElem(dependency: DependencyDescriptor, artifact: DependencyArtifactDescriptor): Elem = - { - val configs = artifact.getConfigurations.toList match { - case Nil | "*" :: Nil => dependency.getModuleConfigurations - case x => x.toArray - } - val (scope, optional) = getScopeAndOptional(configs) - val classifier = artifactClassifier(artifact) - val baseType = artifactType(artifact) - val tpe = (classifier, baseType) match { - case (Some(c), Some(tpe)) if Artifact.classifierType(c) == tpe => None - case _ => baseType - } - makeDependencyElem(dependency, scope, optional, classifier, tpe) - } - def makeDependencyElem(dependency: DependencyDescriptor, scope: Option[String], optional: Boolean, classifier: Option[String], tpe: Option[String]): Elem = - { - val mrid = dependency.getDependencyRevisionId - - {mrid.getOrganisation} - {mrid.getName} - {makeDependencyVersion(mrid.getRevision)} - { scopeElem(scope) } - { optionalElem(optional) } - { classifierElem(classifier) } - { typeElem(tpe) } - { exclusions(dependency) } - - } + def makeDependencyElem(dependency: DependencyDescriptor, artifact: DependencyArtifactDescriptor): Elem = + { + val configs = artifact.getConfigurations.toList match { + case Nil | "*" :: Nil => dependency.getModuleConfigurations + case x => x.toArray + } + val (scope, optional) = getScopeAndOptional(configs) + val classifier = artifactClassifier(artifact) + val baseType = artifactType(artifact) + val tpe = (classifier, baseType) match { + case (Some(c), Some(tpe)) if Artifact.classifierType(c) == tpe => None + case _ => baseType + } + makeDependencyElem(dependency, scope, optional, classifier, tpe) + } + def makeDependencyElem(dependency: DependencyDescriptor, scope: Option[String], optional: Boolean, classifier: Option[String], tpe: Option[String]): Elem = + { + val mrid = dependency.getDependencyRevisionId + + { mrid.getOrganisation } + { mrid.getName } + { makeDependencyVersion(mrid.getRevision) } + { scopeElem(scope) } + { optionalElem(optional) } + { classifierElem(classifier) } + { typeElem(tpe) } + { exclusions(dependency) } + + } + def makeDependencyVersion(revision: String): String = { + def plusRange(s: String, shift: Int = 0) = { + def pow(i: Int): Int = if (i > 0) 10 * pow(i - 1) else 1 + val (prefixVersion, lastVersion) = (s + "0" * shift).reverse.split("\\.", 2) match { + case Array(revLast, revRest) => + (revRest.reverse + ".", revLast.reverse) + case Array(revLast) => ("", revLast.reverse) + } + val lastVersionInt = lastVersion.toInt + s"[${prefixVersion}${lastVersion},${prefixVersion}${lastVersionInt + pow(shift)})" + } + val startSym = Set(']', '[', '(') + val stopSym = Set(']', '[', ')') + try { + if (revision endsWith ".+") { + plusRange(revision.substring(0, revision.length - 2)) + } else if (revision endsWith "+") { + val base = revision.take(revision.length - 1) + // This is a heuristic. Maven just doesn't support Ivy's notions of 1+, so + // we assume version ranges never go beyond 5 siginificant digits. + (0 to 5).map(plusRange(base, _)).mkString(",") + } else if (startSym(revision(0)) && stopSym(revision(revision.length - 1))) { + val start = revision(0) + val stop = revision(revision.length - 1) + val mid = revision.substring(1, revision.length - 1) + (if (start == ']') "(" else start) + mid + (if (stop == '[') ")" else stop) + } else revision + } catch { + case e: NumberFormatException => + // TODO - if the version doesn't meet our expectations, maybe we just issue a hard + // error instead of softly ignoring the attempt to rewrite. + //sys.error(s"Could not fix version [$revision] into maven style version") + revision + } + } + @deprecated("No longer used and will be removed.", "0.12.1") + def classifier(dependency: DependencyDescriptor, includeTypes: Set[String]): NodeSeq = + { + val jarDep = dependency.getAllDependencyArtifacts.filter(d => includeTypes(d.getType)).headOption + jarDep match { + case Some(a) => classifierElem(artifactClassifier(a)) + case None => NodeSeq.Empty + } + } + def artifactType(artifact: DependencyArtifactDescriptor): Option[String] = + Option(artifact.getType).flatMap { tpe => if (tpe == "jar") None else Some(tpe) } + def typeElem(tpe: Option[String]): NodeSeq = + tpe match { + case Some(t) => { t } + case None => NodeSeq.Empty + } - def makeDependencyVersion(revision: String): String = { - def plusRange(s:String, shift:Int = 0) = { - def pow(i:Int):Int = if (i>0) 10 * pow(i-1) else 1 - val (prefixVersion, lastVersion) = (s+"0"*shift).reverse.split("\\.",2) match { - case Array(revLast,revRest) => - ( revRest.reverse + ".", revLast.reverse ) - case Array(revLast) => ("", revLast.reverse) - } - val lastVersionInt = lastVersion.toInt - s"[${prefixVersion}${lastVersion},${prefixVersion}${lastVersionInt+pow(shift)})" - } - val startSym=Set(']','[','(') - val stopSym=Set(']','[',')') - try { - if (revision endsWith ".+") { - plusRange(revision.substring(0,revision.length-2)) - } else if (revision endsWith "+") { - val base = revision.take(revision.length-1) - // This is a heuristic. Maven just doesn't support Ivy's notions of 1+, so - // we assume version ranges never go beyond 5 siginificant digits. - (0 to 5).map(plusRange(base,_)).mkString(",") - } else if (startSym(revision(0)) && stopSym(revision(revision.length-1))) { - val start = revision(0) - val stop = revision(revision.length-1) - val mid = revision.substring(1,revision.length-1) - (if (start == ']') "(" else start) + mid + (if (stop == '[') ")" else stop) - } else revision - } catch { - case e: NumberFormatException => - // TODO - if the version doesn't meet our expectations, maybe we just issue a hard - // error instead of softly ignoring the attempt to rewrite. - //sys.error(s"Could not fix version [$revision] into maven style version") - revision - } - } + def artifactClassifier(artifact: DependencyArtifactDescriptor): Option[String] = + Option(artifact.getExtraAttribute("classifier")) + def classifierElem(classifier: Option[String]): NodeSeq = + classifier match { + case Some(c) => { c } + case None => NodeSeq.Empty + } - @deprecated("No longer used and will be removed.", "0.12.1") - def classifier(dependency: DependencyDescriptor, includeTypes: Set[String]): NodeSeq = - { - val jarDep = dependency.getAllDependencyArtifacts.filter(d => includeTypes(d.getType)).headOption - jarDep match { - case Some(a) => classifierElem(artifactClassifier(a)) - case None => NodeSeq.Empty - } - } - def artifactType(artifact: DependencyArtifactDescriptor): Option[String] = - Option(artifact.getType).flatMap { tpe => if(tpe == "jar") None else Some(tpe) } - def typeElem(tpe: Option[String]): NodeSeq = - tpe match { - case Some(t) => {t} - case None => NodeSeq.Empty - } - - def artifactClassifier(artifact: DependencyArtifactDescriptor): Option[String] = - Option(artifact.getExtraAttribute("classifier")) - def classifierElem(classifier: Option[String]): NodeSeq = - classifier match { - case Some(c) => {c} - case None => NodeSeq.Empty - } + @deprecated("No longer used and will be removed.", "0.12.1") + def scopeAndOptional(dependency: DependencyDescriptor): NodeSeq = + { + val (scope, opt) = getScopeAndOptional(dependency.getModuleConfigurations) + scopeElem(scope) ++ optionalElem(opt) + } + def scopeElem(scope: Option[String]): NodeSeq = scope match { + case None | Some(Configurations.Compile.name) => NodeSeq.Empty + case Some(s) => { s } + } + def optionalElem(opt: Boolean) = if (opt) true else NodeSeq.Empty + def moduleDescriptor(module: ModuleDescriptor) = module.getModuleRevisionId - @deprecated("No longer used and will be removed.", "0.12.1") - def scopeAndOptional(dependency: DependencyDescriptor): NodeSeq = - { - val (scope, opt) = getScopeAndOptional(dependency.getModuleConfigurations) - scopeElem(scope) ++ optionalElem(opt) - } - def scopeElem(scope: Option[String]): NodeSeq = scope match { - case None | Some(Configurations.Compile.name) => NodeSeq.Empty - case Some(s) => {s} - } - def optionalElem(opt: Boolean) = if(opt) true else NodeSeq.Empty - def moduleDescriptor(module: ModuleDescriptor) = module.getModuleRevisionId + def getScopeAndOptional(confs: Array[String]): (Option[String], Boolean) = + { + val (opt, notOptional) = confs.partition(_ == Optional.name) + val defaultNotOptional = Configurations.defaultMavenConfigurations.find(notOptional contains _.name) + val scope = defaultNotOptional.map(_.name) + (scope, !opt.isEmpty) + } - def getScopeAndOptional(confs: Array[String]): (Option[String], Boolean) = - { - val (opt, notOptional) = confs.partition(_ == Optional.name) - val defaultNotOptional = Configurations.defaultMavenConfigurations.find(notOptional contains _.name) - val scope = defaultNotOptional.map(_.name) - (scope, !opt.isEmpty) - } + def exclusions(dependency: DependencyDescriptor): NodeSeq = + { + val excl = dependency.getExcludeRules(dependency.getModuleConfigurations) + val (warns, excls) = IvyUtil.separate(excl.map(makeExclusion)) + if (!warns.isEmpty) log.warn(warns.mkString(IO.Newline)) + if (!excls.isEmpty) { excls } + else NodeSeq.Empty + } + def makeExclusion(exclRule: ExcludeRule): Either[String, NodeSeq] = + { + val m = exclRule.getId.getModuleId + val (g, a) = (m.getOrganisation, m.getName) + if (g == null || g.isEmpty || g == "*" || a.isEmpty || a == "*") + Left("Skipped generating '' for %s. Dependency exclusion should have both 'org' and 'module' to comply with Maven POM's schema.".format(m)) + else + Right( + + { g } + { a } + + ) + } - def exclusions(dependency: DependencyDescriptor): NodeSeq = - { - val excl = dependency.getExcludeRules(dependency.getModuleConfigurations) - val (warns, excls) = IvyUtil.separate(excl.map(makeExclusion)) - if(!warns.isEmpty) log.warn(warns.mkString(IO.Newline)) - if(!excls.isEmpty) {excls} - else NodeSeq.Empty - } - def makeExclusion(exclRule: ExcludeRule): Either[String, NodeSeq] = - { - val m = exclRule.getId.getModuleId - val (g, a) = (m.getOrganisation, m.getName) - if(g == null || g.isEmpty || g == "*" || a.isEmpty || a == "*") - Left("Skipped generating '' for %s. Dependency exclusion should have both 'org' and 'module' to comply with Maven POM's schema.".format(m)) - else - Right( - - {g} - {a} - - ) - } + def makeRepositories(settings: IvySettings, includeAll: Boolean, filterRepositories: MavenRepository => Boolean) = + { + class MavenRepo(name: String, snapshots: Boolean, releases: Boolean) + val repositories = if (includeAll) allResolvers(settings) else resolvers(settings.getDefaultResolver) + val mavenRepositories = + repositories.flatMap { + case m: IBiblioResolver if m.isM2compatible && m.getRoot != IBiblioResolver.DEFAULT_M2_ROOT => + MavenRepository(m.getName, m.getRoot) :: Nil + case _ => Nil + } + val repositoryElements = mavenRepositories.filter(filterRepositories).map(mavenRepository) + if (repositoryElements.isEmpty) repositoryElements else { repositoryElements } + } + def allResolvers(settings: IvySettings): Seq[DependencyResolver] = flatten(castResolvers(settings.getResolvers)).distinct + def flatten(rs: Seq[DependencyResolver]): Seq[DependencyResolver] = if (rs eq null) Nil else rs.flatMap(resolvers) + def resolvers(r: DependencyResolver): Seq[DependencyResolver] = + r match { case c: ChainResolver => flatten(castResolvers(c.getResolvers)); case _ => r :: Nil } - def makeRepositories(settings: IvySettings, includeAll: Boolean, filterRepositories: MavenRepository => Boolean) = - { - class MavenRepo(name: String, snapshots: Boolean, releases: Boolean) - val repositories = if(includeAll) allResolvers(settings) else resolvers(settings.getDefaultResolver) - val mavenRepositories = - repositories.flatMap { - case m: IBiblioResolver if m.isM2compatible && m.getRoot != IBiblioResolver.DEFAULT_M2_ROOT => - MavenRepository(m.getName, m.getRoot) :: Nil - case _ => Nil - } - val repositoryElements = mavenRepositories.filter(filterRepositories).map(mavenRepository) - if(repositoryElements.isEmpty) repositoryElements else {repositoryElements} - } - def allResolvers(settings: IvySettings): Seq[DependencyResolver] = flatten(castResolvers(settings.getResolvers)).distinct - def flatten(rs: Seq[DependencyResolver]): Seq[DependencyResolver] = if(rs eq null) Nil else rs.flatMap(resolvers) - def resolvers(r: DependencyResolver): Seq[DependencyResolver] = - r match { case c: ChainResolver => flatten(castResolvers(c.getResolvers)); case _ => r :: Nil } + // cast the contents of a pre-generics collection + private def castResolvers(s: java.util.Collection[_]): Seq[DependencyResolver] = + s.toArray.map(_.asInstanceOf[DependencyResolver]) - // cast the contents of a pre-generics collection - private def castResolvers(s: java.util.Collection[_]): Seq[DependencyResolver] = - s.toArray.map(_.asInstanceOf[DependencyResolver]) + def toID(name: String) = checkID(name.filter(isValidIDCharacter).mkString, name) + def isValidIDCharacter(c: Char) = c.isLetterOrDigit + private def checkID(id: String, name: String) = if (id.isEmpty) sys.error("Could not convert '" + name + "' to an ID") else id + def mavenRepository(repo: MavenRepository): XNode = + mavenRepository(toID(repo.name), repo.name, repo.root) + def mavenRepository(id: String, name: String, root: String): XNode = + + { id } + { name } + { root } + { if (name == JavaNet1Repository.name) "legacy" else "default" } + - def toID(name: String) = checkID(name.filter(isValidIDCharacter).mkString, name) - def isValidIDCharacter(c: Char) = c.isLetterOrDigit - private def checkID(id: String, name: String) = if(id.isEmpty) sys.error("Could not convert '" + name + "' to an ID") else id - def mavenRepository(repo: MavenRepository): XNode = - mavenRepository(toID(repo.name), repo.name, repo.root) - def mavenRepository(id: String, name: String, root: String): XNode = - - {id} - {name} - {root} - { if(name == JavaNet1Repository.name) "legacy" else "default" } - - - /** Retain dependencies only with the configurations given, or all public configurations of `module` if `configurations` is None. - * This currently only preserves the information required by makePom*/ - private def depsInConfs(module: ModuleDescriptor, configurations: Option[Iterable[Configuration]]): Seq[DependencyDescriptor] = - { - val keepConfigurations = IvySbt.getConfigurations(module, configurations) - val keepSet = Set(keepConfigurations.toSeq : _*) - def translate(dependency: DependencyDescriptor) = - { - val keep = dependency.getModuleConfigurations.filter(keepSet.contains) - if(keep.isEmpty) - None - else // TODO: translate the dependency to contain only configurations to keep - Some(dependency) - } - module.getDependencies flatMap translate - } + /** + * Retain dependencies only with the configurations given, or all public configurations of `module` if `configurations` is None. + * This currently only preserves the information required by makePom + */ + private def depsInConfs(module: ModuleDescriptor, configurations: Option[Iterable[Configuration]]): Seq[DependencyDescriptor] = + { + val keepConfigurations = IvySbt.getConfigurations(module, configurations) + val keepSet = Set(keepConfigurations.toSeq: _*) + def translate(dependency: DependencyDescriptor) = + { + val keep = dependency.getModuleConfigurations.filter(keepSet.contains) + if (keep.isEmpty) + None + else // TODO: translate the dependency to contain only configurations to keep + Some(dependency) + } + module.getDependencies flatMap translate + } } diff --git a/ivy/src/main/scala/sbt/ModuleID.scala b/ivy/src/main/scala/sbt/ModuleID.scala index e37c84da2..178940666 100644 --- a/ivy/src/main/scala/sbt/ModuleID.scala +++ b/ivy/src/main/scala/sbt/ModuleID.scala @@ -3,103 +3,121 @@ */ package sbt - import java.net.URL +import java.net.URL -final case class ModuleID(organization: String, name: String, revision: String, configurations: Option[String] = None, isChanging: Boolean = false, isTransitive: Boolean = true, isForce: Boolean = false, explicitArtifacts: Seq[Artifact] = Nil, exclusions: Seq[ExclusionRule] = Nil, extraAttributes: Map[String,String] = Map.empty, crossVersion: CrossVersion = CrossVersion.Disabled) -{ - override def toString: String = - organization + ":" + name + ":" + revision + - (configurations match { case Some(s) => ":" + s; case None => "" }) + - (if(extraAttributes.isEmpty) "" else " " + extraString) +final case class ModuleID(organization: String, name: String, revision: String, configurations: Option[String] = None, isChanging: Boolean = false, isTransitive: Boolean = true, isForce: Boolean = false, explicitArtifacts: Seq[Artifact] = Nil, exclusions: Seq[ExclusionRule] = Nil, extraAttributes: Map[String, String] = Map.empty, crossVersion: CrossVersion = CrossVersion.Disabled) { + override def toString: String = + organization + ":" + name + ":" + revision + + (configurations match { case Some(s) => ":" + s; case None => "" }) + + (if (extraAttributes.isEmpty) "" else " " + extraString) - /** String representation of the extra attributes, excluding any information only attributes. */ - def extraString: String = extraDependencyAttributes.map { case (k,v) => k + "=" + v } mkString("(",", ",")") + /** String representation of the extra attributes, excluding any information only attributes. */ + def extraString: String = extraDependencyAttributes.map { case (k, v) => k + "=" + v } mkString ("(", ", ", ")") - /** Returns the extra attributes except for ones marked as information only (ones that typically would not be used for dependency resolution). */ - def extraDependencyAttributes: Map[String,String] = extraAttributes.filterKeys(!_.startsWith(CustomPomParser.InfoKeyPrefix)) + /** Returns the extra attributes except for ones marked as information only (ones that typically would not be used for dependency resolution). */ + def extraDependencyAttributes: Map[String, String] = extraAttributes.filterKeys(!_.startsWith(CustomPomParser.InfoKeyPrefix)) - @deprecated("Use `cross(CrossVersion)`, the variant accepting a CrossVersion value constructed by a member of the CrossVersion object instead.", "0.12.0") - def cross(v: Boolean): ModuleID = cross(if(v) CrossVersion.binary else CrossVersion.Disabled) + @deprecated("Use `cross(CrossVersion)`, the variant accepting a CrossVersion value constructed by a member of the CrossVersion object instead.", "0.12.0") + def cross(v: Boolean): ModuleID = cross(if (v) CrossVersion.binary else CrossVersion.Disabled) - @deprecated("Use `cross(CrossVersion)`, the variant accepting a CrossVersion value constructed by a member of the CrossVersion object instead.", "0.12.0") - def cross(v: Boolean, verRemap: String => String): ModuleID = cross(if(v) CrossVersion.binaryMapped(verRemap) else CrossVersion.Disabled) + @deprecated("Use `cross(CrossVersion)`, the variant accepting a CrossVersion value constructed by a member of the CrossVersion object instead.", "0.12.0") + def cross(v: Boolean, verRemap: String => String): ModuleID = cross(if (v) CrossVersion.binaryMapped(verRemap) else CrossVersion.Disabled) - /** Specifies the cross-version behavior for this module. See [CrossVersion] for details.*/ - def cross(v: CrossVersion): ModuleID = copy(crossVersion = v) + /** Specifies the cross-version behavior for this module. See [CrossVersion] for details.*/ + def cross(v: CrossVersion): ModuleID = copy(crossVersion = v) - // () required for chaining - /** Do not follow dependencies of this module. Synonym for `intransitive`.*/ - def notTransitive() = intransitive() + // () required for chaining + /** Do not follow dependencies of this module. Synonym for `intransitive`.*/ + def notTransitive() = intransitive() - /** Do not follow dependencies of this module. Synonym for `notTransitive`.*/ - def intransitive() = copy(isTransitive = false) + /** Do not follow dependencies of this module. Synonym for `notTransitive`.*/ + def intransitive() = copy(isTransitive = false) - /** Marks this dependency as "changing". Ivy will always check if the metadata has changed and then if the artifact has changed, - * redownload it. sbt configures all -SNAPSHOT dependencies to be changing. - * - * See the "Changes in artifacts" section of https://ant.apache.org/ivy/history/trunk/concept.html for full details. - * */ - def changing() = copy(isChanging = true) + /** + * Marks this dependency as "changing". Ivy will always check if the metadata has changed and then if the artifact has changed, + * redownload it. sbt configures all -SNAPSHOT dependencies to be changing. + * + * See the "Changes in artifacts" section of https://ant.apache.org/ivy/history/trunk/concept.html for full details. + */ + def changing() = copy(isChanging = true) - /** Indicates that conflict resolution should only select this module's revision. - * This prevents a newer revision from being pulled in by a transitive dependency, for example.*/ - def force() = copy(isForce = true) + /** + * Indicates that conflict resolution should only select this module's revision. + * This prevents a newer revision from being pulled in by a transitive dependency, for example. + */ + def force() = copy(isForce = true) - /** Specifies a URL from which the main artifact for this dependency can be downloaded. - * This value is only consulted if the module is not found in a repository. - * It is not included in published metadata.*/ - def from(url: String) = artifacts(Artifact(name, new URL(url))) + /** + * Specifies a URL from which the main artifact for this dependency can be downloaded. + * This value is only consulted if the module is not found in a repository. + * It is not included in published metadata. + */ + def from(url: String) = artifacts(Artifact(name, new URL(url))) - /** Adds a dependency on the artifact for this module with classifier `c`. */ - def classifier(c: String) = artifacts(Artifact(name, c)) + /** Adds a dependency on the artifact for this module with classifier `c`. */ + def classifier(c: String) = artifacts(Artifact(name, c)) - /** Declares the explicit artifacts for this module. If this ModuleID represents a dependency, - * these artifact definitions override the information in the dependency's published metadata. */ - def artifacts(newArtifacts: Artifact*) = copy(explicitArtifacts = newArtifacts ++ this.explicitArtifacts) + /** + * Declares the explicit artifacts for this module. If this ModuleID represents a dependency, + * these artifact definitions override the information in the dependency's published metadata. + */ + def artifacts(newArtifacts: Artifact*) = copy(explicitArtifacts = newArtifacts ++ this.explicitArtifacts) - /** Applies the provided exclusions to dependencies of this module. Note that only exclusions that specify - * both the exact organization and name and nothing else will be included in a pom.xml.*/ - def excludeAll(rules: ExclusionRule*) = copy(exclusions = this.exclusions ++ rules) + /** + * Applies the provided exclusions to dependencies of this module. Note that only exclusions that specify + * both the exact organization and name and nothing else will be included in a pom.xml. + */ + def excludeAll(rules: ExclusionRule*) = copy(exclusions = this.exclusions ++ rules) - /** Excludes the dependency with organization `org` and `name` from being introduced by this dependency during resolution. */ - def exclude(org: String, name: String) = excludeAll(ExclusionRule(org, name)) + /** Excludes the dependency with organization `org` and `name` from being introduced by this dependency during resolution. */ + def exclude(org: String, name: String) = excludeAll(ExclusionRule(org, name)) - /** Adds extra attributes for this module. All keys are prefixed with `e:` if they are not already so prefixed. - * This information will only be published in an ivy.xml and not in a pom.xml. */ - def extra(attributes: (String,String)*) = copy(extraAttributes = this.extraAttributes ++ ModuleID.checkE(attributes)) + /** + * Adds extra attributes for this module. All keys are prefixed with `e:` if they are not already so prefixed. + * This information will only be published in an ivy.xml and not in a pom.xml. + */ + def extra(attributes: (String, String)*) = copy(extraAttributes = this.extraAttributes ++ ModuleID.checkE(attributes)) - /** Not recommended for new use. This method is not deprecated, but the `update-classifiers` task is preferred - * for performance and correctness. This method adds a dependency on this module's artifact with the "sources" - * classifier. If you want to also depend on the main artifact, be sure to also call `jar()` or use `withSources()` instead.*/ - def sources() = artifacts(Artifact.sources(name)) + /** + * Not recommended for new use. This method is not deprecated, but the `update-classifiers` task is preferred + * for performance and correctness. This method adds a dependency on this module's artifact with the "sources" + * classifier. If you want to also depend on the main artifact, be sure to also call `jar()` or use `withSources()` instead. + */ + def sources() = artifacts(Artifact.sources(name)) - /** Not recommended for new use. This method is not deprecated, but the `update-classifiers` task is preferred - * for performance and correctness. This method adds a dependency on this module's artifact with the "javadoc" - * classifier. If you want to also depend on the main artifact, be sure to also call `jar()` or use `withJavadoc()` instead.*/ - def javadoc() = artifacts(Artifact.javadoc(name)) + /** + * Not recommended for new use. This method is not deprecated, but the `update-classifiers` task is preferred + * for performance and correctness. This method adds a dependency on this module's artifact with the "javadoc" + * classifier. If you want to also depend on the main artifact, be sure to also call `jar()` or use `withJavadoc()` instead. + */ + def javadoc() = artifacts(Artifact.javadoc(name)) - def pomOnly() = artifacts(Artifact.pom(name)) + def pomOnly() = artifacts(Artifact.pom(name)) - /** Not recommended for new use. This method is not deprecated, but the `update-classifiers` task is preferred - * for performance and correctness. This method adds a dependency on this module's artifact with the "sources" - * classifier. If there is not already an explicit dependency on the main artifact, this adds one.*/ - def withSources() = jarIfEmpty.sources() + /** + * Not recommended for new use. This method is not deprecated, but the `update-classifiers` task is preferred + * for performance and correctness. This method adds a dependency on this module's artifact with the "sources" + * classifier. If there is not already an explicit dependency on the main artifact, this adds one. + */ + def withSources() = jarIfEmpty.sources() - /** Not recommended for new use. This method is not deprecated, but the `update-classifiers` task is preferred - * for performance and correctness. This method adds a dependency on this module's artifact with the "javadoc" - * classifier. If there is not already an explicit dependency on the main artifact, this adds one.*/ - def withJavadoc() = jarIfEmpty.javadoc() + /** + * Not recommended for new use. This method is not deprecated, but the `update-classifiers` task is preferred + * for performance and correctness. This method adds a dependency on this module's artifact with the "javadoc" + * classifier. If there is not already an explicit dependency on the main artifact, this adds one. + */ + def withJavadoc() = jarIfEmpty.javadoc() - private def jarIfEmpty = if(explicitArtifacts.isEmpty) jar() else this + private def jarIfEmpty = if (explicitArtifacts.isEmpty) jar() else this - /** Declares a dependency on the main artifact. This is implied by default unless artifacts are explicitly declared, such - * as when adding a dependency on an artifact with a classifier.*/ - def jar() = artifacts(Artifact(name)) + /** + * Declares a dependency on the main artifact. This is implied by default unless artifacts are explicitly declared, such + * as when adding a dependency on an artifact with a classifier. + */ + def jar() = artifacts(Artifact(name)) } -object ModuleID -{ - /** Prefixes all keys with `e:` if they are not already so prefixed. */ - def checkE(attributes: Seq[(String, String)]) = - for ( (key, value) <- attributes) yield - if(key.startsWith("e:")) (key, value) else ("e:" + key, value) +object ModuleID { + /** Prefixes all keys with `e:` if they are not already so prefixed. */ + def checkE(attributes: Seq[(String, String)]) = + for ((key, value) <- attributes) yield if (key.startsWith("e:")) (key, value) else ("e:" + key, value) } diff --git a/ivy/src/main/scala/sbt/ProjectResolver.scala b/ivy/src/main/scala/sbt/ProjectResolver.scala index 917889db8..caff3d496 100644 --- a/ivy/src/main/scala/sbt/ProjectResolver.scala +++ b/ivy/src/main/scala/sbt/ProjectResolver.scala @@ -3,87 +3,88 @@ */ package sbt - import java.io.File - import java.util.Date +import java.io.File +import java.util.Date - import org.apache.ivy.{core,plugins} - import core.{cache,module, report, resolve,search} - import cache.{ArtifactOrigin,RepositoryCacheManager} - import search.{ModuleEntry, OrganisationEntry, RevisionEntry} - import module.id.ModuleRevisionId - import module.descriptor.{Artifact => IArtifact, DefaultArtifact, DependencyDescriptor, ModuleDescriptor} - import plugins.namespace.Namespace - import plugins.resolver.{DependencyResolver,ResolverSettings} - import report.{ArtifactDownloadReport, DownloadReport, DownloadStatus, MetadataArtifactDownloadReport} - import resolve.{DownloadOptions, ResolveData, ResolvedModuleRevision} +import org.apache.ivy.{ core, plugins } +import core.{ cache, module, report, resolve, search } +import cache.{ ArtifactOrigin, RepositoryCacheManager } +import search.{ ModuleEntry, OrganisationEntry, RevisionEntry } +import module.id.ModuleRevisionId +import module.descriptor.{ Artifact => IArtifact, DefaultArtifact, DependencyDescriptor, ModuleDescriptor } +import plugins.namespace.Namespace +import plugins.resolver.{ DependencyResolver, ResolverSettings } +import report.{ ArtifactDownloadReport, DownloadReport, DownloadStatus, MetadataArtifactDownloadReport } +import resolve.{ DownloadOptions, ResolveData, ResolvedModuleRevision } -/**A Resolver that uses a predefined mapping from module ids to in-memory descriptors. -* It does not handle artifacts.*/ -class ProjectResolver(name: String, map: Map[ModuleRevisionId, ModuleDescriptor]) extends ResolverAdapter -{ - def getName = name - def setName(name: String) = sys.error("Setting name not supported by ProjectResolver") - override def toString = "ProjectResolver(" + name + ", mapped: " + map.keys.mkString(", ") + ")" +/** + * A Resolver that uses a predefined mapping from module ids to in-memory descriptors. + * It does not handle artifacts. + */ +class ProjectResolver(name: String, map: Map[ModuleRevisionId, ModuleDescriptor]) extends ResolverAdapter { + def getName = name + def setName(name: String) = sys.error("Setting name not supported by ProjectResolver") + override def toString = "ProjectResolver(" + name + ", mapped: " + map.keys.mkString(", ") + ")" - def getDependency(dd: DependencyDescriptor, data: ResolveData): ResolvedModuleRevision = - getDependency(dd.getDependencyRevisionId).orNull + def getDependency(dd: DependencyDescriptor, data: ResolveData): ResolvedModuleRevision = + getDependency(dd.getDependencyRevisionId).orNull - private[this] def getDependency(revisionId: ModuleRevisionId): Option[ResolvedModuleRevision] = - { - def constructResult(descriptor: ModuleDescriptor) = new ResolvedModuleRevision(this, this, descriptor, report(revisionId), true) - map get revisionId map constructResult - } + private[this] def getDependency(revisionId: ModuleRevisionId): Option[ResolvedModuleRevision] = + { + def constructResult(descriptor: ModuleDescriptor) = new ResolvedModuleRevision(this, this, descriptor, report(revisionId), true) + map get revisionId map constructResult + } - def report(revisionId: ModuleRevisionId): MetadataArtifactDownloadReport = - { - val artifact = DefaultArtifact.newIvyArtifact(revisionId, new Date) - val r = new MetadataArtifactDownloadReport(artifact) - r.setSearched(false) - r.setDownloadStatus(DownloadStatus.FAILED) - r - } + def report(revisionId: ModuleRevisionId): MetadataArtifactDownloadReport = + { + val artifact = DefaultArtifact.newIvyArtifact(revisionId, new Date) + val r = new MetadataArtifactDownloadReport(artifact) + r.setSearched(false) + r.setDownloadStatus(DownloadStatus.FAILED) + r + } - // this resolver nevers locates artifacts, only resolves dependencies - def exists(artifact: IArtifact) = false - def locate(artifact: IArtifact) = null - def download(artifacts: Array[IArtifact], options: DownloadOptions): DownloadReport = - { - val r = new DownloadReport - for(artifact <- artifacts) - if(getDependency(artifact.getModuleRevisionId).isEmpty) - r.addArtifactReport(notDownloaded(artifact)) - r - } + // this resolver nevers locates artifacts, only resolves dependencies + def exists(artifact: IArtifact) = false + def locate(artifact: IArtifact) = null + def download(artifacts: Array[IArtifact], options: DownloadOptions): DownloadReport = + { + val r = new DownloadReport + for (artifact <- artifacts) + if (getDependency(artifact.getModuleRevisionId).isEmpty) + r.addArtifactReport(notDownloaded(artifact)) + r + } - def download(artifact: ArtifactOrigin, options: DownloadOptions): ArtifactDownloadReport = - notDownloaded(artifact.getArtifact) - def findIvyFileRef(dd: DependencyDescriptor, data: ResolveData) = null + def download(artifact: ArtifactOrigin, options: DownloadOptions): ArtifactDownloadReport = + notDownloaded(artifact.getArtifact) + def findIvyFileRef(dd: DependencyDescriptor, data: ResolveData) = null - def notDownloaded(artifact: IArtifact): ArtifactDownloadReport= - { - val r = new ArtifactDownloadReport(artifact) - r.setDownloadStatus(DownloadStatus.FAILED) - r - } + def notDownloaded(artifact: IArtifact): ArtifactDownloadReport = + { + val r = new ArtifactDownloadReport(artifact) + r.setDownloadStatus(DownloadStatus.FAILED) + r + } - // doesn't support publishing - def publish(artifact: IArtifact, src: File, overwrite: Boolean) = sys.error("Publish not supported by ProjectResolver") - def beginPublishTransaction(module: ModuleRevisionId, overwrite: Boolean) {} - def abortPublishTransaction() {} - def commitPublishTransaction() {} + // doesn't support publishing + def publish(artifact: IArtifact, src: File, overwrite: Boolean) = sys.error("Publish not supported by ProjectResolver") + def beginPublishTransaction(module: ModuleRevisionId, overwrite: Boolean) {} + def abortPublishTransaction() {} + def commitPublishTransaction() {} - def reportFailure() {} - def reportFailure(art: IArtifact) {} + def reportFailure() {} + def reportFailure(art: IArtifact) {} - def listOrganisations() = new Array[OrganisationEntry](0) - def listModules(org: OrganisationEntry) = new Array[ModuleEntry](0) - def listRevisions(module: ModuleEntry) = new Array[RevisionEntry](0) + def listOrganisations() = new Array[OrganisationEntry](0) + def listModules(org: OrganisationEntry) = new Array[ModuleEntry](0) + def listRevisions(module: ModuleEntry) = new Array[RevisionEntry](0) - def getNamespace = Namespace.SYSTEM_NAMESPACE + def getNamespace = Namespace.SYSTEM_NAMESPACE - private[this] var settings: Option[ResolverSettings] = None + private[this] var settings: Option[ResolverSettings] = None - def dumpSettings() {} - def setSettings(settings: ResolverSettings) { this.settings = Some(settings) } - def getRepositoryCacheManager = settings match { case Some(s) => s.getDefaultRepositoryCacheManager; case None => sys.error("No settings defined for ProjectResolver") } + def dumpSettings() {} + def setSettings(settings: ResolverSettings) { this.settings = Some(settings) } + def getRepositoryCacheManager = settings match { case Some(s) => s.getDefaultRepositoryCacheManager; case None => sys.error("No settings defined for ProjectResolver") } } diff --git a/ivy/src/main/scala/sbt/ResolutionCache.scala b/ivy/src/main/scala/sbt/ResolutionCache.scala index d79f25a4c..41721a784 100644 --- a/ivy/src/main/scala/sbt/ResolutionCache.scala +++ b/ivy/src/main/scala/sbt/ResolutionCache.scala @@ -7,84 +7,84 @@ import org.apache.ivy.core import org.apache.ivy.plugins.parser import core.IvyPatternHelper import core.settings.IvySettings -import core.cache.{CacheMetadataOptions, DefaultRepositoryCacheManager, DefaultResolutionCacheManager, ResolutionCacheManager} +import core.cache.{ CacheMetadataOptions, DefaultRepositoryCacheManager, DefaultResolutionCacheManager, ResolutionCacheManager } import core.module.id.ModuleRevisionId import core.module.descriptor.ModuleDescriptor -import ResolutionCache.{Name, ReportDirectory, ResolvedName, ResolvedPattern} +import ResolutionCache.{ Name, ReportDirectory, ResolvedName, ResolvedPattern } import parser.xml.XmlModuleDescriptorParser -/** Replaces the standard Ivy resolution cache in order to: -* 1. Separate cached resolved Ivy files from resolution reports, making the resolution reports easier to find. -* 2. Have them per-project for easier cleaning (possible with standard cache, but central to this custom one). -* 3. Cache location includes extra attributes so that cross builds of a plugin do not overwrite each other. -*/ -private[sbt] final class ResolutionCache(base: File, settings: IvySettings) extends ResolutionCacheManager -{ - private[this] def resolvedFileInCache(m: ModuleRevisionId, name: String, ext: String): File = { - val p = ResolvedPattern - val f = IvyPatternHelper.substitute(p, m.getOrganisation, m.getName, m.getBranch, m.getRevision, name, name, ext, null, null, m.getAttributes, null) - new File(base, f) - } - private[this] val reportBase: File = new File(base, ReportDirectory) +/** + * Replaces the standard Ivy resolution cache in order to: + * 1. Separate cached resolved Ivy files from resolution reports, making the resolution reports easier to find. + * 2. Have them per-project for easier cleaning (possible with standard cache, but central to this custom one). + * 3. Cache location includes extra attributes so that cross builds of a plugin do not overwrite each other. + */ +private[sbt] final class ResolutionCache(base: File, settings: IvySettings) extends ResolutionCacheManager { + private[this] def resolvedFileInCache(m: ModuleRevisionId, name: String, ext: String): File = { + val p = ResolvedPattern + val f = IvyPatternHelper.substitute(p, m.getOrganisation, m.getName, m.getBranch, m.getRevision, name, name, ext, null, null, m.getAttributes, null) + new File(base, f) + } + private[this] val reportBase: File = new File(base, ReportDirectory) - def getResolutionCacheRoot: File = base - def clean() { IO.delete(base) } - override def toString = Name + def getResolutionCacheRoot: File = base + def clean() { IO.delete(base) } + override def toString = Name - def getResolvedIvyFileInCache(mrid: ModuleRevisionId): File = - resolvedFileInCache(mrid, ResolvedName, "xml") - def getResolvedIvyPropertiesInCache(mrid: ModuleRevisionId): File = - resolvedFileInCache(mrid, ResolvedName, "properties") - // name needs to be the same as Ivy's default because the ivy-report.xsl stylesheet assumes this - // when making links to reports for other configurations - def getConfigurationResolveReportInCache(resolveId: String, conf: String): File = - new File(reportBase, resolveId + "-" + conf + ".xml") - def getConfigurationResolveReportsInCache(resolveId: String): Array[File] = - IO.listFiles(reportBase).filter(_.getName.startsWith(resolveId + "-")) - - // XXX: this method is required by ResolutionCacheManager in Ivy 2.3.0 final, - // but it is apparently unused by Ivy as sbt uses Ivy. Therefore, it is - // unexercised in tests. Note that the implementation of this method in Ivy 2.3.0's - // DefaultResolutionCache also resolves parent properties for a given mrid - def getResolvedModuleDescriptor(mrid: ModuleRevisionId): ModuleDescriptor = { - val ivyFile = getResolvedIvyFileInCache(mrid) - if (!ivyFile.exists()) { - throw new IllegalStateException("Ivy file not found in cache for " + mrid + "!") - } - - return XmlModuleDescriptorParser.getInstance().parseDescriptor(settings, ivyFile.toURI().toURL(), false) - } - - def saveResolvedModuleDescriptor(md: ModuleDescriptor): Unit = { - val mrid = md.getResolvedModuleRevisionId - val cachedIvyFile = getResolvedIvyFileInCache(mrid) - md.toIvyFile(cachedIvyFile) - } + def getResolvedIvyFileInCache(mrid: ModuleRevisionId): File = + resolvedFileInCache(mrid, ResolvedName, "xml") + def getResolvedIvyPropertiesInCache(mrid: ModuleRevisionId): File = + resolvedFileInCache(mrid, ResolvedName, "properties") + // name needs to be the same as Ivy's default because the ivy-report.xsl stylesheet assumes this + // when making links to reports for other configurations + def getConfigurationResolveReportInCache(resolveId: String, conf: String): File = + new File(reportBase, resolveId + "-" + conf + ".xml") + def getConfigurationResolveReportsInCache(resolveId: String): Array[File] = + IO.listFiles(reportBase).filter(_.getName.startsWith(resolveId + "-")) + + // XXX: this method is required by ResolutionCacheManager in Ivy 2.3.0 final, + // but it is apparently unused by Ivy as sbt uses Ivy. Therefore, it is + // unexercised in tests. Note that the implementation of this method in Ivy 2.3.0's + // DefaultResolutionCache also resolves parent properties for a given mrid + def getResolvedModuleDescriptor(mrid: ModuleRevisionId): ModuleDescriptor = { + val ivyFile = getResolvedIvyFileInCache(mrid) + if (!ivyFile.exists()) { + throw new IllegalStateException("Ivy file not found in cache for " + mrid + "!") + } + + return XmlModuleDescriptorParser.getInstance().parseDescriptor(settings, ivyFile.toURI().toURL(), false) + } + + def saveResolvedModuleDescriptor(md: ModuleDescriptor): Unit = { + val mrid = md.getResolvedModuleRevisionId + val cachedIvyFile = getResolvedIvyFileInCache(mrid) + md.toIvyFile(cachedIvyFile) + } } -private[sbt] object ResolutionCache -{ - /** Removes cached files from the resolution cache for the module with ID `mrid` - * and the resolveId (as set on `ResolveOptions`). */ - private[sbt] def cleanModule(mrid: ModuleRevisionId, resolveId: String, manager: ResolutionCacheManager) - { - val files = - Option(manager.getResolvedIvyFileInCache(mrid)).toList ::: - Option(manager.getResolvedIvyPropertiesInCache(mrid)).toList ::: - Option(manager.getConfigurationResolveReportsInCache(resolveId)).toList.flatten - IO.delete(files) - } +private[sbt] object ResolutionCache { + /** + * Removes cached files from the resolution cache for the module with ID `mrid` + * and the resolveId (as set on `ResolveOptions`). + */ + private[sbt] def cleanModule(mrid: ModuleRevisionId, resolveId: String, manager: ResolutionCacheManager) { + val files = + Option(manager.getResolvedIvyFileInCache(mrid)).toList ::: + Option(manager.getResolvedIvyPropertiesInCache(mrid)).toList ::: + Option(manager.getConfigurationResolveReportsInCache(resolveId)).toList.flatten + IO.delete(files) + } - private val ReportDirectory = "reports" + private val ReportDirectory = "reports" - // name of the file providing a dependency resolution report for a configuration - private val ReportFileName = "report.xml" + // name of the file providing a dependency resolution report for a configuration + private val ReportFileName = "report.xml" - // base name (name except for extension) of resolution report file - private val ResolvedName = "resolved.xml" + // base name (name except for extension) of resolution report file + private val ResolvedName = "resolved.xml" - // Cache name - private val Name = "sbt-resolution-cache" + // Cache name + private val Name = "sbt-resolution-cache" - // use sbt-specific extra attributes so that resolved xml files do not get overwritten when using different Scala/sbt versions - private val ResolvedPattern = "[organisation]/[module]/" + Resolver.PluginPattern + "[revision]/[artifact].[ext]" + // use sbt-specific extra attributes so that resolved xml files do not get overwritten when using different Scala/sbt versions + private val ResolvedPattern = "[organisation]/[module]/" + Resolver.PluginPattern + "[revision]/[artifact].[ext]" } \ No newline at end of file diff --git a/ivy/src/main/scala/sbt/Resolver.scala b/ivy/src/main/scala/sbt/Resolver.scala index e72db1e46..f6250705e 100644 --- a/ivy/src/main/scala/sbt/Resolver.scala +++ b/ivy/src/main/scala/sbt/Resolver.scala @@ -6,145 +6,131 @@ package sbt import java.io.File import java.net.URL import scala.xml.NodeSeq -import org.apache.ivy.plugins.resolver.{DependencyResolver, IBiblioResolver} +import org.apache.ivy.plugins.resolver.{ DependencyResolver, IBiblioResolver } -sealed trait Resolver -{ - def name: String +sealed trait Resolver { + def name: String } -final class RawRepository(val resolver: DependencyResolver) extends Resolver -{ - def name = resolver.getName - override def toString = "Raw(" + resolver.toString + ")" +final class RawRepository(val resolver: DependencyResolver) extends Resolver { + def name = resolver.getName + override def toString = "Raw(" + resolver.toString + ")" } sealed case class ChainedResolver(name: String, resolvers: Seq[Resolver]) extends Resolver -sealed case class MavenRepository(name: String, root: String) extends Resolver -{ - override def toString = name + ": " + root +sealed case class MavenRepository(name: String, root: String) extends Resolver { + override def toString = name + ": " + root } -final class Patterns(val ivyPatterns: Seq[String], val artifactPatterns: Seq[String], val isMavenCompatible: Boolean, val descriptorOptional: Boolean, val skipConsistencyCheck: Boolean) -{ - private[sbt] def mavenStyle(): Patterns = Patterns(ivyPatterns, artifactPatterns, true) - private[sbt] def withDescriptorOptional(): Patterns = Patterns(ivyPatterns, artifactPatterns, isMavenCompatible, true, skipConsistencyCheck) - private[sbt] def withoutConsistencyCheck(): Patterns = Patterns(ivyPatterns, artifactPatterns, isMavenCompatible, descriptorOptional, true) - private[sbt] def withIvys(patterns: Seq[String]): Patterns = Patterns(patterns ++ ivyPatterns, artifactPatterns, isMavenCompatible) - private[sbt] def withArtifacts(patterns: Seq[String]): Patterns = Patterns(ivyPatterns, patterns ++ artifactPatterns, isMavenCompatible) - override def toString = "Patterns(ivyPatterns=%s, artifactPatterns=%s, isMavenCompatible=%s, descriptorOptional=%s, skipConsistencyCheck=%s)".format(ivyPatterns, artifactPatterns, isMavenCompatible, descriptorOptional, skipConsistencyCheck) - override def equals(obj: Any): Boolean = { - obj match { - case other: Patterns => - ivyPatterns == other.ivyPatterns && artifactPatterns == other.artifactPatterns && isMavenCompatible == other.isMavenCompatible && descriptorOptional == other.descriptorOptional && skipConsistencyCheck == other.skipConsistencyCheck - case _ => false - } - } - override def hashCode: Int = (ivyPatterns, artifactPatterns, isMavenCompatible, descriptorOptional, skipConsistencyCheck).hashCode +final class Patterns(val ivyPatterns: Seq[String], val artifactPatterns: Seq[String], val isMavenCompatible: Boolean, val descriptorOptional: Boolean, val skipConsistencyCheck: Boolean) { + private[sbt] def mavenStyle(): Patterns = Patterns(ivyPatterns, artifactPatterns, true) + private[sbt] def withDescriptorOptional(): Patterns = Patterns(ivyPatterns, artifactPatterns, isMavenCompatible, true, skipConsistencyCheck) + private[sbt] def withoutConsistencyCheck(): Patterns = Patterns(ivyPatterns, artifactPatterns, isMavenCompatible, descriptorOptional, true) + private[sbt] def withIvys(patterns: Seq[String]): Patterns = Patterns(patterns ++ ivyPatterns, artifactPatterns, isMavenCompatible) + private[sbt] def withArtifacts(patterns: Seq[String]): Patterns = Patterns(ivyPatterns, patterns ++ artifactPatterns, isMavenCompatible) + override def toString = "Patterns(ivyPatterns=%s, artifactPatterns=%s, isMavenCompatible=%s, descriptorOptional=%s, skipConsistencyCheck=%s)".format(ivyPatterns, artifactPatterns, isMavenCompatible, descriptorOptional, skipConsistencyCheck) + override def equals(obj: Any): Boolean = { + obj match { + case other: Patterns => + ivyPatterns == other.ivyPatterns && artifactPatterns == other.artifactPatterns && isMavenCompatible == other.isMavenCompatible && descriptorOptional == other.descriptorOptional && skipConsistencyCheck == other.skipConsistencyCheck + case _ => false + } + } + override def hashCode: Int = (ivyPatterns, artifactPatterns, isMavenCompatible, descriptorOptional, skipConsistencyCheck).hashCode - @deprecated - def this(ivyPatterns: Seq[String], artifactPatterns: Seq[String], isMavenCompatible: Boolean) = this(ivyPatterns, artifactPatterns, isMavenCompatible, false, false) + @deprecated + def this(ivyPatterns: Seq[String], artifactPatterns: Seq[String], isMavenCompatible: Boolean) = this(ivyPatterns, artifactPatterns, isMavenCompatible, false, false) } -object Patterns -{ - implicit def defaultPatterns: Patterns = Resolver.defaultPatterns +object Patterns { + implicit def defaultPatterns: Patterns = Resolver.defaultPatterns - def apply(artifactPatterns: String*): Patterns = Patterns(true, artifactPatterns : _*) - def apply(isMavenCompatible: Boolean, artifactPatterns: String*): Patterns = Patterns(artifactPatterns, artifactPatterns, isMavenCompatible) - def apply(ivyPatterns: Seq[String], artifactPatterns: Seq[String], isMavenCompatible: Boolean): Patterns = apply(ivyPatterns: Seq[String], artifactPatterns: Seq[String], isMavenCompatible: Boolean, false, false) - def apply(ivyPatterns: Seq[String], artifactPatterns: Seq[String], isMavenCompatible: Boolean, descriptorOptional: Boolean, skipConsistencyCheck: Boolean): Patterns = new Patterns(ivyPatterns, artifactPatterns, isMavenCompatible, descriptorOptional, skipConsistencyCheck) + def apply(artifactPatterns: String*): Patterns = Patterns(true, artifactPatterns: _*) + def apply(isMavenCompatible: Boolean, artifactPatterns: String*): Patterns = Patterns(artifactPatterns, artifactPatterns, isMavenCompatible) + def apply(ivyPatterns: Seq[String], artifactPatterns: Seq[String], isMavenCompatible: Boolean): Patterns = apply(ivyPatterns: Seq[String], artifactPatterns: Seq[String], isMavenCompatible: Boolean, false, false) + def apply(ivyPatterns: Seq[String], artifactPatterns: Seq[String], isMavenCompatible: Boolean, descriptorOptional: Boolean, skipConsistencyCheck: Boolean): Patterns = new Patterns(ivyPatterns, artifactPatterns, isMavenCompatible, descriptorOptional, skipConsistencyCheck) } -object RepositoryHelpers -{ - final case class SshConnection(authentication: Option[SshAuthentication], hostname: Option[String], port: Option[Int]) - { - def copy(authentication: Option[SshAuthentication]) = SshConnection(authentication, hostname, port) - } - /** Configuration specific to an Ivy filesystem resolver. */ - final case class FileConfiguration(isLocal: Boolean, isTransactional: Option[Boolean]) - { - def transactional() = FileConfiguration(isLocal, Some(true)) - def nontransactional() = FileConfiguration(isLocal, Some(false)) - def nonlocal() = FileConfiguration(false, isTransactional) - } - sealed trait SshAuthentication extends NotNull - final case class PasswordAuthentication(user: String, password: Option[String]) extends SshAuthentication - final case class KeyFileAuthentication(user: String, keyfile: File, password: Option[String]) extends SshAuthentication +object RepositoryHelpers { + final case class SshConnection(authentication: Option[SshAuthentication], hostname: Option[String], port: Option[Int]) { + def copy(authentication: Option[SshAuthentication]) = SshConnection(authentication, hostname, port) + } + /** Configuration specific to an Ivy filesystem resolver. */ + final case class FileConfiguration(isLocal: Boolean, isTransactional: Option[Boolean]) { + def transactional() = FileConfiguration(isLocal, Some(true)) + def nontransactional() = FileConfiguration(isLocal, Some(false)) + def nonlocal() = FileConfiguration(false, isTransactional) + } + sealed trait SshAuthentication extends NotNull + final case class PasswordAuthentication(user: String, password: Option[String]) extends SshAuthentication + final case class KeyFileAuthentication(user: String, keyfile: File, password: Option[String]) extends SshAuthentication } -import RepositoryHelpers.{SshConnection, FileConfiguration} -import RepositoryHelpers.{KeyFileAuthentication, PasswordAuthentication, SshAuthentication} +import RepositoryHelpers.{ SshConnection, FileConfiguration } +import RepositoryHelpers.{ KeyFileAuthentication, PasswordAuthentication, SshAuthentication } /** sbt interface to an Ivy repository based on patterns, which is most Ivy repositories.*/ -sealed abstract class PatternsBasedRepository extends Resolver -{ - type RepositoryType <: PatternsBasedRepository - /** Should be implemented to create a new copy of this repository but with `patterns` as given.*/ - protected def copy(patterns: Patterns): RepositoryType +sealed abstract class PatternsBasedRepository extends Resolver { + type RepositoryType <: PatternsBasedRepository + /** Should be implemented to create a new copy of this repository but with `patterns` as given.*/ + protected def copy(patterns: Patterns): RepositoryType - /** The object representing the configured patterns for this repository. */ - def patterns: Patterns + /** The object representing the configured patterns for this repository. */ + def patterns: Patterns - /** Enables maven 2 compatibility for this repository. */ - def mavenStyle() = copy(patterns.mavenStyle()) + /** Enables maven 2 compatibility for this repository. */ + def mavenStyle() = copy(patterns.mavenStyle()) - /** Makes descriptor metadata optional for this repository. */ - def descriptorOptional() = copy(patterns.withDescriptorOptional()) + /** Makes descriptor metadata optional for this repository. */ + def descriptorOptional() = copy(patterns.withDescriptorOptional()) - /** Disables consistency checking for this repository. */ - def skipConsistencyCheck() = copy(patterns.withoutConsistencyCheck()) + /** Disables consistency checking for this repository. */ + def skipConsistencyCheck() = copy(patterns.withoutConsistencyCheck()) - /** Adds the given patterns for resolving/publishing Ivy files.*/ - def ivys(ivyPatterns: String*): RepositoryType = copy(patterns.withIvys(ivyPatterns)) - /** Adds the given patterns for resolving/publishing artifacts.*/ - def artifacts(artifactPatterns: String*): RepositoryType = copy(patterns.withArtifacts(artifactPatterns)) + /** Adds the given patterns for resolving/publishing Ivy files.*/ + def ivys(ivyPatterns: String*): RepositoryType = copy(patterns.withIvys(ivyPatterns)) + /** Adds the given patterns for resolving/publishing artifacts.*/ + def artifacts(artifactPatterns: String*): RepositoryType = copy(patterns.withArtifacts(artifactPatterns)) } /** sbt interface for an Ivy filesystem repository. More convenient construction is done using Resolver.file. */ -final case class FileRepository(name: String, configuration: FileConfiguration, patterns: Patterns) extends PatternsBasedRepository -{ - type RepositoryType = FileRepository - protected def copy(patterns: Patterns): FileRepository = FileRepository(name, configuration, patterns) - private def copy(configuration: FileConfiguration) = FileRepository(name, configuration, patterns) - def transactional() = copy(configuration.transactional()) - def nonlocal() = copy(configuration.nonlocal()) +final case class FileRepository(name: String, configuration: FileConfiguration, patterns: Patterns) extends PatternsBasedRepository { + type RepositoryType = FileRepository + protected def copy(patterns: Patterns): FileRepository = FileRepository(name, configuration, patterns) + private def copy(configuration: FileConfiguration) = FileRepository(name, configuration, patterns) + def transactional() = copy(configuration.transactional()) + def nonlocal() = copy(configuration.nonlocal()) } -final case class URLRepository(name: String, patterns: Patterns) extends PatternsBasedRepository -{ - type RepositoryType = URLRepository - protected def copy(patterns: Patterns): URLRepository = URLRepository(name, patterns) +final case class URLRepository(name: String, patterns: Patterns) extends PatternsBasedRepository { + type RepositoryType = URLRepository + protected def copy(patterns: Patterns): URLRepository = URLRepository(name, patterns) } /** sbt interface for an Ivy ssh-based repository (ssh and sftp). Requires the Jsch library.. */ -sealed abstract class SshBasedRepository extends PatternsBasedRepository -{ - type RepositoryType <: SshBasedRepository - protected def copy(connection: SshConnection): RepositoryType - private def copy(authentication: SshAuthentication): RepositoryType = copy(connection.copy(Some(authentication))) +sealed abstract class SshBasedRepository extends PatternsBasedRepository { + type RepositoryType <: SshBasedRepository + protected def copy(connection: SshConnection): RepositoryType + private def copy(authentication: SshAuthentication): RepositoryType = copy(connection.copy(Some(authentication))) - /** The object representing the configured ssh connection for this repository. */ - def connection: SshConnection + /** The object representing the configured ssh connection for this repository. */ + def connection: SshConnection - /** Configures this to use the specified user name and password when connecting to the remote repository. */ - def as(user: String, password: String): RepositoryType = as(user, Some(password)) - def as(user: String): RepositoryType = as(user, None) - def as(user: String, password: Option[String]) = copy(new PasswordAuthentication(user, password)) - /** Configures this to use the specified keyfile and password for the keyfile when connecting to the remote repository. */ - def as(user: String, keyfile: File): RepositoryType = as(user, keyfile, None) - def as(user: String, keyfile: File, password: String): RepositoryType = as(user, keyfile, Some(password)) - def as(user: String, keyfile: File, password: Option[String]): RepositoryType = copy(new KeyFileAuthentication(user, keyfile, password)) + /** Configures this to use the specified user name and password when connecting to the remote repository. */ + def as(user: String, password: String): RepositoryType = as(user, Some(password)) + def as(user: String): RepositoryType = as(user, None) + def as(user: String, password: Option[String]) = copy(new PasswordAuthentication(user, password)) + /** Configures this to use the specified keyfile and password for the keyfile when connecting to the remote repository. */ + def as(user: String, keyfile: File): RepositoryType = as(user, keyfile, None) + def as(user: String, keyfile: File, password: String): RepositoryType = as(user, keyfile, Some(password)) + def as(user: String, keyfile: File, password: Option[String]): RepositoryType = copy(new KeyFileAuthentication(user, keyfile, password)) } /** sbt interface for an Ivy repository over ssh. More convenient construction is done using Resolver.ssh. */ -final case class SshRepository(name: String, connection: SshConnection, patterns: Patterns, publishPermissions: Option[String]) extends SshBasedRepository -{ - type RepositoryType = SshRepository - protected def copy(patterns: Patterns): SshRepository = SshRepository(name, connection, patterns, publishPermissions) - protected def copy(connection: SshConnection): SshRepository = SshRepository(name, connection, patterns, publishPermissions) - /** Defines the permissions to set when publishing to this repository. */ - def withPermissions(publishPermissions: String): SshRepository = withPermissions(Some(publishPermissions)) - def withPermissions(publishPermissions: Option[String]): SshRepository = SshRepository(name, connection, patterns, publishPermissions) +final case class SshRepository(name: String, connection: SshConnection, patterns: Patterns, publishPermissions: Option[String]) extends SshBasedRepository { + type RepositoryType = SshRepository + protected def copy(patterns: Patterns): SshRepository = SshRepository(name, connection, patterns, publishPermissions) + protected def copy(connection: SshConnection): SshRepository = SshRepository(name, connection, patterns, publishPermissions) + /** Defines the permissions to set when publishing to this repository. */ + def withPermissions(publishPermissions: String): SshRepository = withPermissions(Some(publishPermissions)) + def withPermissions(publishPermissions: Option[String]): SshRepository = SshRepository(name, connection, patterns, publishPermissions) } /** sbt interface for an Ivy repository over sftp. More convenient construction is done using Resolver.sftp. */ -final case class SftpRepository(name: String, connection: SshConnection, patterns: Patterns) extends SshBasedRepository -{ - type RepositoryType = SftpRepository - protected def copy(patterns: Patterns): SftpRepository = SftpRepository(name, connection, patterns) - protected def copy(connection: SshConnection): SftpRepository = SftpRepository(name, connection, patterns) +final case class SftpRepository(name: String, connection: SshConnection, patterns: Patterns) extends SshBasedRepository { + type RepositoryType = SftpRepository + protected def copy(patterns: Patterns): SftpRepository = SftpRepository(name, connection, patterns) + protected def copy(connection: SshConnection): SftpRepository = SftpRepository(name, connection, patterns) } import Resolver._ @@ -152,151 +138,163 @@ import Resolver._ object DefaultMavenRepository extends MavenRepository("public", IBiblioResolver.DEFAULT_M2_ROOT) object JavaNet2Repository extends MavenRepository(JavaNet2RepositoryName, JavaNet2RepositoryRoot) object JavaNet1Repository extends JavaNet1Repository -sealed trait JavaNet1Repository extends Resolver -{ - def name = "java.net Maven1 Repository" +sealed trait JavaNet1Repository extends Resolver { + def name = "java.net Maven1 Repository" } -object Resolver -{ - val TypesafeRepositoryRoot = "http://repo.typesafe.com/typesafe" - val SbtPluginRepositoryRoot = "http://repo.scala-sbt.org/scalasbt" - val SonatypeRepositoryRoot = "https://oss.sonatype.org/content/repositories" +object Resolver { + val TypesafeRepositoryRoot = "http://repo.typesafe.com/typesafe" + val SbtPluginRepositoryRoot = "http://repo.scala-sbt.org/scalasbt" + val SonatypeRepositoryRoot = "https://oss.sonatype.org/content/repositories" - // obsolete: kept only for launcher compatibility - private[sbt] val ScalaToolsReleasesName = "Sonatype OSS Releases" - private[sbt] val ScalaToolsSnapshotsName = "Sonatype OSS Snapshots" - private[sbt] val ScalaToolsReleasesRoot = SonatypeRepositoryRoot + "/releases" - private[sbt] val ScalaToolsSnapshotsRoot = SonatypeRepositoryRoot + "/snapshots" - private[sbt] val ScalaToolsReleases = new MavenRepository(ScalaToolsReleasesName, ScalaToolsReleasesRoot) - private[sbt] val ScalaToolsSnapshots = new MavenRepository(ScalaToolsSnapshotsName, ScalaToolsSnapshotsRoot) + // obsolete: kept only for launcher compatibility + private[sbt] val ScalaToolsReleasesName = "Sonatype OSS Releases" + private[sbt] val ScalaToolsSnapshotsName = "Sonatype OSS Snapshots" + private[sbt] val ScalaToolsReleasesRoot = SonatypeRepositoryRoot + "/releases" + private[sbt] val ScalaToolsSnapshotsRoot = SonatypeRepositoryRoot + "/snapshots" + private[sbt] val ScalaToolsReleases = new MavenRepository(ScalaToolsReleasesName, ScalaToolsReleasesRoot) + private[sbt] val ScalaToolsSnapshots = new MavenRepository(ScalaToolsSnapshotsName, ScalaToolsSnapshotsRoot) - val JavaNet2RepositoryName = "java.net Maven2 Repository" - val JavaNet2RepositoryRoot = "http://download.java.net/maven/2" + val JavaNet2RepositoryName = "java.net Maven2 Repository" + val JavaNet2RepositoryRoot = "http://download.java.net/maven/2" - def typesafeRepo(status: String) = new MavenRepository("typesafe-" + status, TypesafeRepositoryRoot + "/" + status) - def typesafeIvyRepo(status: String) = url("typesafe-ivy-" + status, new URL(TypesafeRepositoryRoot + "/ivy-" + status + "/"))(ivyStylePatterns) - def sbtPluginRepo(status: String) = url("sbt-plugin-" + status, new URL(SbtPluginRepositoryRoot + "/sbt-plugin-" + status + "/"))(ivyStylePatterns) - def sonatypeRepo(status: String) = new MavenRepository("sonatype-" + status, SonatypeRepositoryRoot + "/" + status) + def typesafeRepo(status: String) = new MavenRepository("typesafe-" + status, TypesafeRepositoryRoot + "/" + status) + def typesafeIvyRepo(status: String) = url("typesafe-ivy-" + status, new URL(TypesafeRepositoryRoot + "/ivy-" + status + "/"))(ivyStylePatterns) + def sbtPluginRepo(status: String) = url("sbt-plugin-" + status, new URL(SbtPluginRepositoryRoot + "/sbt-plugin-" + status + "/"))(ivyStylePatterns) + def sonatypeRepo(status: String) = new MavenRepository("sonatype-" + status, SonatypeRepositoryRoot + "/" + status) - /** Add the local and Maven Central repositories to the user repositories. */ - def withDefaultResolvers(userResolvers: Seq[Resolver]): Seq[Resolver] = - withDefaultResolvers(userResolvers, true) - /** Add the local Ivy repository to the user repositories. - * If `mavenCentral` is true, add the Maven Central repository. */ - def withDefaultResolvers(userResolvers: Seq[Resolver], mavenCentral: Boolean): Seq[Resolver] = - Seq(Resolver.defaultLocal) ++ - userResolvers ++ - single(DefaultMavenRepository, mavenCentral) - private def single[T](value: T, nonEmpty: Boolean): Seq[T] = if(nonEmpty) Seq(value) else Nil + /** Add the local and Maven Central repositories to the user repositories. */ + def withDefaultResolvers(userResolvers: Seq[Resolver]): Seq[Resolver] = + withDefaultResolvers(userResolvers, true) + /** + * Add the local Ivy repository to the user repositories. + * If `mavenCentral` is true, add the Maven Central repository. + */ + def withDefaultResolvers(userResolvers: Seq[Resolver], mavenCentral: Boolean): Seq[Resolver] = + Seq(Resolver.defaultLocal) ++ + userResolvers ++ + single(DefaultMavenRepository, mavenCentral) + private def single[T](value: T, nonEmpty: Boolean): Seq[T] = if (nonEmpty) Seq(value) else Nil - /** A base class for defining factories for interfaces to Ivy repositories that require a hostname , port, and patterns. */ - sealed abstract class Define[RepositoryType <: SshBasedRepository] extends NotNull - { - /** Subclasses should implement this method to */ - protected def construct(name: String, connection: SshConnection, patterns: Patterns): RepositoryType - /** Constructs this repository type with the given `name`. `basePatterns` are the initial patterns to use. A ManagedProject - * has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/ - def apply(name: String)(implicit basePatterns: Patterns): RepositoryType = - apply(name, None, None, None) - /** Constructs this repository type with the given `name` and `hostname`. `basePatterns` are the initial patterns to use. - * A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/ - def apply(name: String, hostname: String)(implicit basePatterns: Patterns): RepositoryType = - apply(name, Some(hostname), None, None) - /** Constructs this repository type with the given `name`, `hostname`, and the `basePath` against which the initial - * patterns will be resolved. `basePatterns` are the initial patterns to use. - * A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/ - def apply(name: String, hostname: String, basePath: String)(implicit basePatterns: Patterns): RepositoryType = - apply(name, Some(hostname), None, Some(basePath)) - /** Constructs this repository type with the given `name`, `hostname`, and `port`. `basePatterns` are the initial patterns to use. - * A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/ - def apply(name: String, hostname: String, port: Int)(implicit basePatterns: Patterns): RepositoryType = - apply(name, Some(hostname), Some(port), None) - /** Constructs this repository type with the given `name`, `hostname`, `port`, and the `basePath` against which the initial - * patterns will be resolved. `basePatterns` are the initial patterns to use. - * A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/ - def apply(name: String, hostname: String, port: Int, basePath: String)(implicit basePatterns: Patterns): RepositoryType = - apply(name, Some(hostname), Some(port), Some(basePath)) - /** Constructs this repository type with the given `name`, `hostname`, `port`, and the `basePath` against which the initial - * patterns will be resolved. `basePatterns` are the initial patterns to use. All but the `name` are optional (use None). - * A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/ - def apply(name: String, hostname: Option[String], port: Option[Int], basePath: Option[String])(implicit basePatterns: Patterns): RepositoryType = - construct(name, SshConnection(None, hostname, port), resolvePatterns(basePath, basePatterns)) - } - /** A factory to construct an interface to an Ivy SSH resolver.*/ - object ssh extends Define[SshRepository] - { - protected def construct(name: String, connection: SshConnection, patterns: Patterns) = SshRepository(name, connection, patterns, None) - } - /** A factory to construct an interface to an Ivy SFTP resolver.*/ - object sftp extends Define[SftpRepository] - { - protected def construct(name: String, connection: SshConnection, patterns: Patterns) = SftpRepository(name, connection, patterns) - } - /** A factory to construct an interface to an Ivy filesytem resolver. */ - object file - { - /** Constructs a file resolver with the given name. The patterns to use must be explicitly specified - * using the `ivys` or `artifacts` methods on the constructed resolver object.*/ - def apply(name: String): FileRepository = FileRepository(name, defaultFileConfiguration, Patterns(false)) - /** Constructs a file resolver with the given name and base directory. */ - def apply(name: String, baseDirectory: File)(implicit basePatterns: Patterns): FileRepository = - baseRepository(new File(baseDirectory.toURI.normalize) getAbsolutePath)(FileRepository(name, defaultFileConfiguration, _)) - } - object url - { - /** Constructs a URL resolver with the given name. The patterns to use must be explicitly specified - * using the `ivys` or `artifacts` methods on the constructed resolver object.*/ - def apply(name: String): URLRepository = URLRepository(name, Patterns(false)) - /** Constructs a file resolver with the given name and base directory. */ - def apply(name: String, baseURL: URL)(implicit basePatterns: Patterns): URLRepository = - baseRepository(baseURL.toURI.normalize.toString)(URLRepository(name, _)) - } - private def baseRepository[T](base: String)(construct: Patterns => T)(implicit basePatterns: Patterns): T = - construct(resolvePatterns(base, basePatterns)) + /** A base class for defining factories for interfaces to Ivy repositories that require a hostname , port, and patterns. */ + sealed abstract class Define[RepositoryType <: SshBasedRepository] extends NotNull { + /** Subclasses should implement this method to */ + protected def construct(name: String, connection: SshConnection, patterns: Patterns): RepositoryType + /** + * Constructs this repository type with the given `name`. `basePatterns` are the initial patterns to use. A ManagedProject + * has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns. + */ + def apply(name: String)(implicit basePatterns: Patterns): RepositoryType = + apply(name, None, None, None) + /** + * Constructs this repository type with the given `name` and `hostname`. `basePatterns` are the initial patterns to use. + * A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns. + */ + def apply(name: String, hostname: String)(implicit basePatterns: Patterns): RepositoryType = + apply(name, Some(hostname), None, None) + /** + * Constructs this repository type with the given `name`, `hostname`, and the `basePath` against which the initial + * patterns will be resolved. `basePatterns` are the initial patterns to use. + * A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns. + */ + def apply(name: String, hostname: String, basePath: String)(implicit basePatterns: Patterns): RepositoryType = + apply(name, Some(hostname), None, Some(basePath)) + /** + * Constructs this repository type with the given `name`, `hostname`, and `port`. `basePatterns` are the initial patterns to use. + * A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns. + */ + def apply(name: String, hostname: String, port: Int)(implicit basePatterns: Patterns): RepositoryType = + apply(name, Some(hostname), Some(port), None) + /** + * Constructs this repository type with the given `name`, `hostname`, `port`, and the `basePath` against which the initial + * patterns will be resolved. `basePatterns` are the initial patterns to use. + * A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns. + */ + def apply(name: String, hostname: String, port: Int, basePath: String)(implicit basePatterns: Patterns): RepositoryType = + apply(name, Some(hostname), Some(port), Some(basePath)) + /** + * Constructs this repository type with the given `name`, `hostname`, `port`, and the `basePath` against which the initial + * patterns will be resolved. `basePatterns` are the initial patterns to use. All but the `name` are optional (use None). + * A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns. + */ + def apply(name: String, hostname: Option[String], port: Option[Int], basePath: Option[String])(implicit basePatterns: Patterns): RepositoryType = + construct(name, SshConnection(None, hostname, port), resolvePatterns(basePath, basePatterns)) + } + /** A factory to construct an interface to an Ivy SSH resolver.*/ + object ssh extends Define[SshRepository] { + protected def construct(name: String, connection: SshConnection, patterns: Patterns) = SshRepository(name, connection, patterns, None) + } + /** A factory to construct an interface to an Ivy SFTP resolver.*/ + object sftp extends Define[SftpRepository] { + protected def construct(name: String, connection: SshConnection, patterns: Patterns) = SftpRepository(name, connection, patterns) + } + /** A factory to construct an interface to an Ivy filesytem resolver. */ + object file { + /** + * Constructs a file resolver with the given name. The patterns to use must be explicitly specified + * using the `ivys` or `artifacts` methods on the constructed resolver object. + */ + def apply(name: String): FileRepository = FileRepository(name, defaultFileConfiguration, Patterns(false)) + /** Constructs a file resolver with the given name and base directory. */ + def apply(name: String, baseDirectory: File)(implicit basePatterns: Patterns): FileRepository = + baseRepository(new File(baseDirectory.toURI.normalize) getAbsolutePath)(FileRepository(name, defaultFileConfiguration, _)) + } + object url { + /** + * Constructs a URL resolver with the given name. The patterns to use must be explicitly specified + * using the `ivys` or `artifacts` methods on the constructed resolver object. + */ + def apply(name: String): URLRepository = URLRepository(name, Patterns(false)) + /** Constructs a file resolver with the given name and base directory. */ + def apply(name: String, baseURL: URL)(implicit basePatterns: Patterns): URLRepository = + baseRepository(baseURL.toURI.normalize.toString)(URLRepository(name, _)) + } + private def baseRepository[T](base: String)(construct: Patterns => T)(implicit basePatterns: Patterns): T = + construct(resolvePatterns(base, basePatterns)) - /** If `base` is None, `patterns` is returned unchanged. - * Otherwise, the ivy file and artifact patterns in `patterns` are resolved against the given base. */ - private def resolvePatterns(base: Option[String], patterns: Patterns): Patterns = - base match - { - case Some(path) => resolvePatterns(path, patterns) - case None => patterns - } - /** Resolves the ivy file and artifact patterns in `patterns` against the given base. */ - private def resolvePatterns(base: String, basePatterns: Patterns): Patterns = - { - def resolveAll(patterns: Seq[String]) = patterns.map(p => resolvePattern(base, p)) - Patterns(resolveAll(basePatterns.ivyPatterns), resolveAll(basePatterns.artifactPatterns), basePatterns.isMavenCompatible) - } - private[sbt] def resolvePattern(base: String, pattern: String): String = - { - val normBase = base.replace('\\', '/') - if(normBase.endsWith("/") || pattern.startsWith("/")) normBase + pattern else normBase + "/" + pattern - } - def defaultFileConfiguration = FileConfiguration(true, None) - def mavenStylePatterns = Patterns(Nil, mavenStyleBasePattern :: Nil, true) - def ivyStylePatterns = defaultIvyPatterns//Patterns(Nil, Nil, false) + /** + * If `base` is None, `patterns` is returned unchanged. + * Otherwise, the ivy file and artifact patterns in `patterns` are resolved against the given base. + */ + private def resolvePatterns(base: Option[String], patterns: Patterns): Patterns = + base match { + case Some(path) => resolvePatterns(path, patterns) + case None => patterns + } + /** Resolves the ivy file and artifact patterns in `patterns` against the given base. */ + private def resolvePatterns(base: String, basePatterns: Patterns): Patterns = + { + def resolveAll(patterns: Seq[String]) = patterns.map(p => resolvePattern(base, p)) + Patterns(resolveAll(basePatterns.ivyPatterns), resolveAll(basePatterns.artifactPatterns), basePatterns.isMavenCompatible) + } + private[sbt] def resolvePattern(base: String, pattern: String): String = + { + val normBase = base.replace('\\', '/') + if (normBase.endsWith("/") || pattern.startsWith("/")) normBase + pattern else normBase + "/" + pattern + } + def defaultFileConfiguration = FileConfiguration(true, None) + def mavenStylePatterns = Patterns(Nil, mavenStyleBasePattern :: Nil, true) + def ivyStylePatterns = defaultIvyPatterns //Patterns(Nil, Nil, false) - def defaultPatterns = mavenStylePatterns - def mavenStyleBasePattern = "[organisation]/[module](_[scalaVersion])(_[sbtVersion])/[revision]/[artifact]-[revision](-[classifier]).[ext]" - def localBasePattern = "[organisation]/[module]/" + PluginPattern + "[revision]/[type]s/[artifact](-[classifier]).[ext]" - def defaultRetrievePattern = "[type]s/[organisation]/[module]/" + PluginPattern + "[artifact](-[revision])(-[classifier]).[ext]" - final val PluginPattern = "(scala_[scalaVersion]/)(sbt_[sbtVersion]/)" + def defaultPatterns = mavenStylePatterns + def mavenStyleBasePattern = "[organisation]/[module](_[scalaVersion])(_[sbtVersion])/[revision]/[artifact]-[revision](-[classifier]).[ext]" + def localBasePattern = "[organisation]/[module]/" + PluginPattern + "[revision]/[type]s/[artifact](-[classifier]).[ext]" + def defaultRetrievePattern = "[type]s/[organisation]/[module]/" + PluginPattern + "[artifact](-[revision])(-[classifier]).[ext]" + final val PluginPattern = "(scala_[scalaVersion]/)(sbt_[sbtVersion]/)" - private[this] def mavenLocalDir = new File(Path.userHome, ".m2/repository/") - def publishMavenLocal = Resolver.file("publish-m2-local", mavenLocalDir) - def mavenLocal = MavenRepository("Maven2 Local", mavenLocalDir.toURI.toString) - def defaultLocal = defaultUserFileRepository("local") - def defaultShared = defaultUserFileRepository("shared") - def defaultUserFileRepository(id: String) = - { - val pList = ("${ivy.home}/" + id + "/" + localBasePattern) :: Nil - FileRepository(id, defaultFileConfiguration, Patterns(pList, pList, false)) - } - def defaultIvyPatterns = - { - val pList = List(localBasePattern) - Patterns(pList, pList, false) - } + private[this] def mavenLocalDir = new File(Path.userHome, ".m2/repository/") + def publishMavenLocal = Resolver.file("publish-m2-local", mavenLocalDir) + def mavenLocal = MavenRepository("Maven2 Local", mavenLocalDir.toURI.toString) + def defaultLocal = defaultUserFileRepository("local") + def defaultShared = defaultUserFileRepository("shared") + def defaultUserFileRepository(id: String) = + { + val pList = ("${ivy.home}/" + id + "/" + localBasePattern) :: Nil + FileRepository(id, defaultFileConfiguration, Patterns(pList, pList, false)) + } + def defaultIvyPatterns = + { + val pList = List(localBasePattern) + Patterns(pList, pList, false) + } } diff --git a/ivy/src/main/scala/sbt/StringUtilities.scala b/ivy/src/main/scala/sbt/StringUtilities.scala index caf373d3b..fc8327304 100644 --- a/ivy/src/main/scala/sbt/StringUtilities.scala +++ b/ivy/src/main/scala/sbt/StringUtilities.scala @@ -5,13 +5,11 @@ package sbt import java.util.Locale -object StringUtilities -{ - @deprecated("Different use cases require different normalization. Use Project.normalizeModuleID or normalizeProjectID instead.", "0.13.0") - def normalize(s: String) = s.toLowerCase(Locale.ENGLISH).replaceAll("""\W+""", "-") - def nonEmpty(s: String, label: String) - { - require(s.trim.length > 0, label + " cannot be empty.") - } - def appendable(s: String) = if(s.isEmpty) "" else "_" + s +object StringUtilities { + @deprecated("Different use cases require different normalization. Use Project.normalizeModuleID or normalizeProjectID instead.", "0.13.0") + def normalize(s: String) = s.toLowerCase(Locale.ENGLISH).replaceAll("""\W+""", "-") + def nonEmpty(s: String, label: String) { + require(s.trim.length > 0, label + " cannot be empty.") + } + def appendable(s: String) = if (s.isEmpty) "" else "_" + s } diff --git a/ivy/src/main/scala/sbt/UpdateReport.scala b/ivy/src/main/scala/sbt/UpdateReport.scala index 071643eb2..71d58390b 100644 --- a/ivy/src/main/scala/sbt/UpdateReport.scala +++ b/ivy/src/main/scala/sbt/UpdateReport.scala @@ -3,142 +3,138 @@ */ package sbt - import java.io.File +import java.io.File -/** Provides information about dependency resolution. -* It does not include information about evicted modules, only about the modules ultimately selected by the conflict manager. -* This means that for a given configuration, there should only be one revision for a given organization and module name. -* @param cachedDescriptor the location of the resolved module descriptor in the cache -* @param configurations a sequence containing one report for each configuration resolved. -* @param stats information about the update that produced this report -* @see sbt.RichUpdateReport -*/ -final class UpdateReport(val cachedDescriptor: File, val configurations: Seq[ConfigurationReport], val stats: UpdateStats, private[sbt] val stamps: Map[File,Long]) -{ - @deprecated("Use the variant that provides timestamps of files.", "0.13.0") - def this(cachedDescriptor: File, configurations: Seq[ConfigurationReport], stats: UpdateStats) = - this(cachedDescriptor, configurations, stats, Map.empty) +/** + * Provides information about dependency resolution. + * It does not include information about evicted modules, only about the modules ultimately selected by the conflict manager. + * This means that for a given configuration, there should only be one revision for a given organization and module name. + * @param cachedDescriptor the location of the resolved module descriptor in the cache + * @param configurations a sequence containing one report for each configuration resolved. + * @param stats information about the update that produced this report + * @see sbt.RichUpdateReport + */ +final class UpdateReport(val cachedDescriptor: File, val configurations: Seq[ConfigurationReport], val stats: UpdateStats, private[sbt] val stamps: Map[File, Long]) { + @deprecated("Use the variant that provides timestamps of files.", "0.13.0") + def this(cachedDescriptor: File, configurations: Seq[ConfigurationReport], stats: UpdateStats) = + this(cachedDescriptor, configurations, stats, Map.empty) - override def toString = "Update report:\n\t" + stats + "\n" + configurations.mkString + override def toString = "Update report:\n\t" + stats + "\n" + configurations.mkString - /** All resolved modules in all configurations. */ - def allModules: Seq[ModuleID] = configurations.flatMap(_.allModules).distinct + /** All resolved modules in all configurations. */ + def allModules: Seq[ModuleID] = configurations.flatMap(_.allModules).distinct - def retrieve(f: (String, ModuleID, Artifact, File) => File): UpdateReport = - new UpdateReport(cachedDescriptor, configurations map { _ retrieve f}, stats, stamps ) + def retrieve(f: (String, ModuleID, Artifact, File) => File): UpdateReport = + new UpdateReport(cachedDescriptor, configurations map { _ retrieve f }, stats, stamps) - /** Gets the report for the given configuration, or `None` if the configuration was not resolved.*/ - def configuration(s: String) = configurations.find(_.configuration == s) + /** Gets the report for the given configuration, or `None` if the configuration was not resolved.*/ + def configuration(s: String) = configurations.find(_.configuration == s) - /** Gets the names of all resolved configurations. This `UpdateReport` contains one `ConfigurationReport` for each configuration in this list. */ - def allConfigurations: Seq[String] = configurations.map(_.configuration) + /** Gets the names of all resolved configurations. This `UpdateReport` contains one `ConfigurationReport` for each configuration in this list. */ + def allConfigurations: Seq[String] = configurations.map(_.configuration) } -/** Provides information about resolution of a single configuration. -* @param configuration the configuration this report is for. -* @param modules a seqeuence containing one report for each module resolved for this configuration. -*/ -final class ConfigurationReport(val configuration: String, val modules: Seq[ModuleReport], val evicted: Seq[ModuleID]) -{ - override def toString = "\t" + configuration + ":\n" + modules.mkString + evicted.map("\t\t(EVICTED) " + _ + "\n").mkString +/** + * Provides information about resolution of a single configuration. + * @param configuration the configuration this report is for. + * @param modules a seqeuence containing one report for each module resolved for this configuration. + */ +final class ConfigurationReport(val configuration: String, val modules: Seq[ModuleReport], val evicted: Seq[ModuleID]) { + override def toString = "\t" + configuration + ":\n" + modules.mkString + evicted.map("\t\t(EVICTED) " + _ + "\n").mkString - /** All resolved modules for this configuration. - * For a given organization and module name, there is only one revision/`ModuleID` in this sequence. - */ - def allModules: Seq[ModuleID] = modules.map(mr => addConfiguration(mr.module)) - private[this] def addConfiguration(mod: ModuleID): ModuleID = if(mod.configurations.isEmpty) mod.copy(configurations = Some(configuration)) else mod - - def retrieve(f: (String, ModuleID, Artifact, File) => File): ConfigurationReport = - new ConfigurationReport(configuration, modules map { _.retrieve( (mid,art,file) => f(configuration, mid, art, file)) }, evicted) + /** + * All resolved modules for this configuration. + * For a given organization and module name, there is only one revision/`ModuleID` in this sequence. + */ + def allModules: Seq[ModuleID] = modules.map(mr => addConfiguration(mr.module)) + private[this] def addConfiguration(mod: ModuleID): ModuleID = if (mod.configurations.isEmpty) mod.copy(configurations = Some(configuration)) else mod + + def retrieve(f: (String, ModuleID, Artifact, File) => File): ConfigurationReport = + new ConfigurationReport(configuration, modules map { _.retrieve((mid, art, file) => f(configuration, mid, art, file)) }, evicted) } -/** Provides information about the resolution of a module. -* This information is in the context of a specific configuration. -* @param module the `ModuleID` this report is for. -* @param artifacts the resolved artifacts for this module, paired with the File the artifact was retrieved to. This File may be in the -*/ -final class ModuleReport(val module: ModuleID, val artifacts: Seq[(Artifact, File)], val missingArtifacts: Seq[Artifact]) -{ - override def toString = - { - val arts = artifacts.map(_.toString) ++ missingArtifacts.map(art => "(MISSING) " + art) - "\t\t" + module + ": " + - (if(arts.size <= 1) "" else "\n\t\t\t") + arts.mkString("\n\t\t\t") + "\n" - } - def retrieve(f: (ModuleID, Artifact, File) => File): ModuleReport = - new ModuleReport(module, artifacts.map { case (art,file) => (art, f(module, art, file)) }, missingArtifacts) +/** + * Provides information about the resolution of a module. + * This information is in the context of a specific configuration. + * @param module the `ModuleID` this report is for. + * @param artifacts the resolved artifacts for this module, paired with the File the artifact was retrieved to. This File may be in the + */ +final class ModuleReport(val module: ModuleID, val artifacts: Seq[(Artifact, File)], val missingArtifacts: Seq[Artifact]) { + override def toString = + { + val arts = artifacts.map(_.toString) ++ missingArtifacts.map(art => "(MISSING) " + art) + "\t\t" + module + ": " + + (if (arts.size <= 1) "" else "\n\t\t\t") + arts.mkString("\n\t\t\t") + "\n" + } + def retrieve(f: (ModuleID, Artifact, File) => File): ModuleReport = + new ModuleReport(module, artifacts.map { case (art, file) => (art, f(module, art, file)) }, missingArtifacts) } -object UpdateReport -{ - implicit def richUpdateReport(report: UpdateReport): RichUpdateReport = new RichUpdateReport(report) +object UpdateReport { + implicit def richUpdateReport(report: UpdateReport): RichUpdateReport = new RichUpdateReport(report) - /** Provides extra methods for filtering the contents of an `UpdateReport` and for obtaining references to a selected subset of the underlying files. */ - final class RichUpdateReport(report: UpdateReport) - { - def recomputeStamps(): UpdateReport = - { - val files = report.cachedDescriptor +: allFiles - val stamps = files.map(f => (f, f.lastModified)).toMap - new UpdateReport(report.cachedDescriptor, report.configurations, report.stats, stamps) - } + /** Provides extra methods for filtering the contents of an `UpdateReport` and for obtaining references to a selected subset of the underlying files. */ + final class RichUpdateReport(report: UpdateReport) { + def recomputeStamps(): UpdateReport = + { + val files = report.cachedDescriptor +: allFiles + val stamps = files.map(f => (f, f.lastModified)).toMap + new UpdateReport(report.cachedDescriptor, report.configurations, report.stats, stamps) + } - import DependencyFilter._ - /** Obtains all successfully retrieved files in all configurations and modules. */ - def allFiles: Seq[File] = matching(DependencyFilter.allPass) + import DependencyFilter._ + /** Obtains all successfully retrieved files in all configurations and modules. */ + def allFiles: Seq[File] = matching(DependencyFilter.allPass) - /** Obtains all successfully retrieved files in configurations, modules, and artifacts matching the specified filter. */ - def matching(f: DependencyFilter): Seq[File] = select0(f).distinct + /** Obtains all successfully retrieved files in configurations, modules, and artifacts matching the specified filter. */ + def matching(f: DependencyFilter): Seq[File] = select0(f).distinct - /** Obtains all successfully retrieved files matching all provided filters. An unspecified argument matches all files. */ - def select(configuration: ConfigurationFilter = configurationFilter(), module: ModuleFilter = moduleFilter(), artifact: ArtifactFilter = artifactFilter()): Seq[File] = - matching(DependencyFilter.make(configuration, module, artifact)) + /** Obtains all successfully retrieved files matching all provided filters. An unspecified argument matches all files. */ + def select(configuration: ConfigurationFilter = configurationFilter(), module: ModuleFilter = moduleFilter(), artifact: ArtifactFilter = artifactFilter()): Seq[File] = + matching(DependencyFilter.make(configuration, module, artifact)) - private[this] def select0(f: DependencyFilter): Seq[File] = - for(cReport <- report.configurations; mReport <- cReport.modules; (artifact, file) <- mReport.artifacts if f(cReport.configuration, mReport.module, artifact)) yield { - if(file == null) error("Null file: conf=" + cReport.configuration + ", module=" + mReport.module + ", art: " + artifact) - file - } - - /** Constructs a new report that only contains files matching the specified filter.*/ - def filter(f: DependencyFilter): UpdateReport = - moduleReportMap { (configuration, modReport) => - import modReport._ - val newArtifacts = artifacts filter { case (art, file) => f(configuration, module, art) } - val newMissing = missingArtifacts filter { art => f(configuration, module, art) } - new ModuleReport(module, newArtifacts, newMissing) - } - def substitute(f: (String, ModuleID, Seq[(Artifact, File)]) => Seq[(Artifact, File)]): UpdateReport = - moduleReportMap { (configuration, modReport) => - val newArtifacts = f(configuration, modReport.module, modReport.artifacts) - new ModuleReport(modReport.module, newArtifacts, Nil) - } + private[this] def select0(f: DependencyFilter): Seq[File] = + for (cReport <- report.configurations; mReport <- cReport.modules; (artifact, file) <- mReport.artifacts if f(cReport.configuration, mReport.module, artifact)) yield { + if (file == null) error("Null file: conf=" + cReport.configuration + ", module=" + mReport.module + ", art: " + artifact) + file + } - def toSeq: Seq[(String, ModuleID, Artifact, File)] = - for(confReport <- report.configurations; modReport <- confReport.modules; (artifact, file) <- modReport.artifacts) yield - (confReport.configuration, modReport.module, artifact, file) + /** Constructs a new report that only contains files matching the specified filter.*/ + def filter(f: DependencyFilter): UpdateReport = + moduleReportMap { (configuration, modReport) => + import modReport._ + val newArtifacts = artifacts filter { case (art, file) => f(configuration, module, art) } + val newMissing = missingArtifacts filter { art => f(configuration, module, art) } + new ModuleReport(module, newArtifacts, newMissing) + } + def substitute(f: (String, ModuleID, Seq[(Artifact, File)]) => Seq[(Artifact, File)]): UpdateReport = + moduleReportMap { (configuration, modReport) => + val newArtifacts = f(configuration, modReport.module, modReport.artifacts) + new ModuleReport(modReport.module, newArtifacts, Nil) + } - def allMissing: Seq[(String, ModuleID, Artifact)] = - for(confReport <- report.configurations; modReport <- confReport.modules; artifact <- modReport.missingArtifacts) yield - (confReport.configuration, modReport.module, artifact) + def toSeq: Seq[(String, ModuleID, Artifact, File)] = + for (confReport <- report.configurations; modReport <- confReport.modules; (artifact, file) <- modReport.artifacts) yield (confReport.configuration, modReport.module, artifact, file) - def addMissing(f: ModuleID => Seq[Artifact]): UpdateReport = - moduleReportMap { (configuration, modReport) => - import modReport._ - new ModuleReport(module, artifacts, (missingArtifacts ++ f(module)).distinct) - } + def allMissing: Seq[(String, ModuleID, Artifact)] = + for (confReport <- report.configurations; modReport <- confReport.modules; artifact <- modReport.missingArtifacts) yield (confReport.configuration, modReport.module, artifact) - def moduleReportMap(f: (String, ModuleReport) => ModuleReport): UpdateReport = - { - val newConfigurations = report.configurations.map { confReport => - import confReport._ - val newModules = modules map { modReport => f(configuration, modReport) } - new ConfigurationReport(configuration, newModules, evicted) - } - new UpdateReport(report.cachedDescriptor, newConfigurations, report.stats, report.stamps) - } - } + def addMissing(f: ModuleID => Seq[Artifact]): UpdateReport = + moduleReportMap { (configuration, modReport) => + import modReport._ + new ModuleReport(module, artifacts, (missingArtifacts ++ f(module)).distinct) + } + + def moduleReportMap(f: (String, ModuleReport) => ModuleReport): UpdateReport = + { + val newConfigurations = report.configurations.map { confReport => + import confReport._ + val newModules = modules map { modReport => f(configuration, modReport) } + new ConfigurationReport(configuration, newModules, evicted) + } + new UpdateReport(report.cachedDescriptor, newConfigurations, report.stats, report.stamps) + } + } } -final class UpdateStats(val resolveTime: Long, val downloadTime: Long, val downloadSize: Long, val cached: Boolean) -{ - override def toString = Seq("Resolve time: " + resolveTime + " ms", "Download time: " + downloadTime + " ms", "Download size: " + downloadSize + " bytes").mkString(", ") +final class UpdateStats(val resolveTime: Long, val downloadTime: Long, val downloadSize: Long, val cached: Boolean) { + override def toString = Seq("Resolve time: " + resolveTime + " ms", "Download time: " + downloadTime + " ms", "Download size: " + downloadSize + " bytes").mkString(", ") } \ No newline at end of file diff --git a/ivy/src/main/scala/sbt/impl/DependencyBuilders.scala b/ivy/src/main/scala/sbt/impl/DependencyBuilders.scala index 7d5812aaf..16a1fa504 100755 --- a/ivy/src/main/scala/sbt/impl/DependencyBuilders.scala +++ b/ivy/src/main/scala/sbt/impl/DependencyBuilders.scala @@ -6,67 +6,62 @@ package impl import StringUtilities.nonEmpty -trait DependencyBuilders -{ - final implicit def toGroupID(groupID: String): GroupID = - { - nonEmpty(groupID, "Group ID") - new GroupID(groupID) - } - final implicit def toRepositoryName(name: String): RepositoryName = - { - nonEmpty(name, "Repository name") - new RepositoryName(name) - } - final implicit def moduleIDConfigurable(m: ModuleID): ModuleIDConfigurable = - { - require(m.configurations.isEmpty, "Configurations already specified for module " + m) - new ModuleIDConfigurable(m) - } +trait DependencyBuilders { + final implicit def toGroupID(groupID: String): GroupID = + { + nonEmpty(groupID, "Group ID") + new GroupID(groupID) + } + final implicit def toRepositoryName(name: String): RepositoryName = + { + nonEmpty(name, "Repository name") + new RepositoryName(name) + } + final implicit def moduleIDConfigurable(m: ModuleID): ModuleIDConfigurable = + { + require(m.configurations.isEmpty, "Configurations already specified for module " + m) + new ModuleIDConfigurable(m) + } } -final class GroupID private[sbt] (groupID: String) -{ - def % (artifactID: String) = groupArtifact(artifactID, CrossVersion.Disabled) - def %% (artifactID: String): GroupArtifactID = groupArtifact(artifactID, CrossVersion.binary) +final class GroupID private[sbt] (groupID: String) { + def %(artifactID: String) = groupArtifact(artifactID, CrossVersion.Disabled) + def %%(artifactID: String): GroupArtifactID = groupArtifact(artifactID, CrossVersion.binary) - @deprecated(deprecationMessage, "0.12.0") - def %% (artifactID: String, crossVersion: String => String) = groupArtifact(artifactID, CrossVersion.binaryMapped(crossVersion)) - @deprecated(deprecationMessage, "0.12.0") - def %% (artifactID: String, alternatives: (String, String)*) = groupArtifact(artifactID, CrossVersion.binaryMapped(Map(alternatives: _*) orElse { case s => s })) + @deprecated(deprecationMessage, "0.12.0") + def %%(artifactID: String, crossVersion: String => String) = groupArtifact(artifactID, CrossVersion.binaryMapped(crossVersion)) + @deprecated(deprecationMessage, "0.12.0") + def %%(artifactID: String, alternatives: (String, String)*) = groupArtifact(artifactID, CrossVersion.binaryMapped(Map(alternatives: _*) orElse { case s => s })) - private def groupArtifact(artifactID: String, cross: CrossVersion) = - { - nonEmpty(artifactID, "Artifact ID") - new GroupArtifactID(groupID, artifactID, cross) - } + private def groupArtifact(artifactID: String, cross: CrossVersion) = + { + nonEmpty(artifactID, "Artifact ID") + new GroupArtifactID(groupID, artifactID, cross) + } - private[this] def deprecationMessage = """Use the cross method on the constructed ModuleID. For example: ("a" % "b" % "1").cross(...)""" + private[this] def deprecationMessage = """Use the cross method on the constructed ModuleID. For example: ("a" % "b" % "1").cross(...)""" } -final class GroupArtifactID private[sbt] (groupID: String, artifactID: String, crossVersion: CrossVersion) -{ - def % (revision: String): ModuleID = - { - nonEmpty(revision, "Revision") - ModuleID(groupID, artifactID, revision).cross(crossVersion) - } +final class GroupArtifactID private[sbt] (groupID: String, artifactID: String, crossVersion: CrossVersion) { + def %(revision: String): ModuleID = + { + nonEmpty(revision, "Revision") + ModuleID(groupID, artifactID, revision).cross(crossVersion) + } } -final class ModuleIDConfigurable private[sbt] (moduleID: ModuleID) -{ - def % (configuration: Configuration): ModuleID = %(configuration.name) +final class ModuleIDConfigurable private[sbt] (moduleID: ModuleID) { + def %(configuration: Configuration): ModuleID = %(configuration.name) - def % (configurations: String): ModuleID = - { - nonEmpty(configurations, "Configurations") - val c = configurations - moduleID.copy(configurations = Some(c)) - } + def %(configurations: String): ModuleID = + { + nonEmpty(configurations, "Configurations") + val c = configurations + moduleID.copy(configurations = Some(c)) + } } -final class RepositoryName private[sbt] (name: String) -{ - def at (location: String) = - { - nonEmpty(location, "Repository location") - new MavenRepository(name, location) - } +final class RepositoryName private[sbt] (name: String) { + def at(location: String) = + { + nonEmpty(location, "Repository location") + new MavenRepository(name, location) + } } diff --git a/ivy/src/main/scala/sbt/ivyint/ErrorMessageAuthenticator.scala b/ivy/src/main/scala/sbt/ivyint/ErrorMessageAuthenticator.scala index 3d0d174d8..e59e36e34 100644 --- a/ivy/src/main/scala/sbt/ivyint/ErrorMessageAuthenticator.scala +++ b/ivy/src/main/scala/sbt/ivyint/ErrorMessageAuthenticator.scala @@ -16,7 +16,7 @@ import org.apache.ivy.util.url.CredentialsStore */ object ErrorMessageAuthenticator { private var securityWarningLogged = false - + private def originalAuthenticator: Option[Authenticator] = { try { val f = classOf[Authenticator].getDeclaredField("theAuthenticator"); @@ -29,7 +29,7 @@ object ErrorMessageAuthenticator { None } } - + private lazy val ivyOriginalField = { val field = classOf[IvyAuthenticator].getDeclaredField("original") field.setAccessible(true) @@ -42,17 +42,17 @@ object ErrorMessageAuthenticator { val newOriginal = new ErrorMessageAuthenticator(original) ivyOriginalField.set(ivy, newOriginal) } - + try Option(ivyOriginalField.get(ivy).asInstanceOf[Authenticator]) match { case Some(alreadyThere: ErrorMessageAuthenticator) => // We're already installed, no need to do the work again. - case originalOpt => installIntoIvyImpl(originalOpt) + case originalOpt => installIntoIvyImpl(originalOpt) } catch { - case t: Throwable => + case t: Throwable => Message.debug("Error occurred will trying to install debug messages into Ivy Authentication" + t.getMessage) } Some(ivy) } - + /** Installs the error message authenticator so we have nicer error messages when using java's URL for downloading. */ def install() { // Actually installs the error message authenticator. @@ -62,67 +62,68 @@ object ErrorMessageAuthenticator { case e: SecurityException if !securityWarningLogged => securityWarningLogged = true; Message.warn("Not enough permissions to set the ErorrMessageAuthenticator. " - + "Helpful debug messages disabled!"); - } - // We will try to use the original authenticator as backup authenticator. - // Since there is no getter available, so try to use some reflection to - // obtain it. If that doesn't work, assume there is no original authenticator - def doInstallIfIvy(original: Option[Authenticator]): Unit = - original match { - case Some(installed: ErrorMessageAuthenticator) => // Ignore, we're already installed - case Some(ivy: IvyAuthenticator) => installIntoIvy(ivy) - case original => doInstall(original) - } - doInstallIfIvy(originalAuthenticator) + + "Helpful debug messages disabled!"); + } + // We will try to use the original authenticator as backup authenticator. + // Since there is no getter available, so try to use some reflection to + // obtain it. If that doesn't work, assume there is no original authenticator + def doInstallIfIvy(original: Option[Authenticator]): Unit = + original match { + case Some(installed: ErrorMessageAuthenticator) => // Ignore, we're already installed + case Some(ivy: IvyAuthenticator) => installIntoIvy(ivy) + case original => doInstall(original) + } + doInstallIfIvy(originalAuthenticator) } } /** - * An authenticator which just delegates to a previous authenticator and issues *nice* - * error messages on failure to find credentials. - * - * Since ivy installs its own credentials handler EVERY TIME it resolves or publishes, we want to + * An authenticator which just delegates to a previous authenticator and issues *nice* + * error messages on failure to find credentials. + * + * Since ivy installs its own credentials handler EVERY TIME it resolves or publishes, we want to * install this one at some point and eventually ivy will capture it and use it. */ private[sbt] final class ErrorMessageAuthenticator(original: Option[Authenticator]) extends Authenticator { - protected override def getPasswordAuthentication(): PasswordAuthentication = { - // We're guaranteed to only get here if Ivy's authentication fails - if (!isProxyAuthentication) { - val host = getRequestingHost - // TODO - levenshtein distance "did you mean" message. - Message.error(s"Unable to find credentials for [${getRequestingPrompt} @ ${host}].") - val configuredRealms = IvyCredentialsLookup.realmsForHost.getOrElse(host, Set.empty) - if(!configuredRealms.isEmpty) { - Message.error(s" Is one of these realms mispelled for host [${host}]:") - configuredRealms foreach { realm => - Message.error(s" * ${realm}") - } - } + protected override def getPasswordAuthentication(): PasswordAuthentication = { + // We're guaranteed to only get here if Ivy's authentication fails + if (!isProxyAuthentication) { + val host = getRequestingHost + // TODO - levenshtein distance "did you mean" message. + Message.error(s"Unable to find credentials for [${getRequestingPrompt} @ ${host}].") + val configuredRealms = IvyCredentialsLookup.realmsForHost.getOrElse(host, Set.empty) + if (!configuredRealms.isEmpty) { + Message.error(s" Is one of these realms mispelled for host [${host}]:") + configuredRealms foreach { realm => + Message.error(s" * ${realm}") } - // TODO - Maybe we should work on a helpful proxy message... - - // TODO - To be more maven friendly, we may want to also try to grab the "first" authentication that shows up for a server and try it. - // or maybe allow that behavior to be configured, since maven users aren't used to realms (which they should be). - - // Grabs the authentication that would have been provided had we not been installed... - def originalAuthentication: Option[PasswordAuthentication] = { - Authenticator.setDefault(original.getOrElse(null)) - try Option(Authenticator.requestPasswordAuthentication( - getRequestingHost, - getRequestingSite, - getRequestingPort, - getRequestingProtocol, - getRequestingPrompt, - getRequestingScheme)) - finally Authenticator.setDefault(this) - } - originalAuthentication.getOrElse(null) + } } + // TODO - Maybe we should work on a helpful proxy message... - /** Returns true if this authentication if for a proxy and not for an HTTP server. - * We want to display different error messages, depending. - */ - private def isProxyAuthentication: Boolean = - getRequestorType == Authenticator.RequestorType.PROXY + // TODO - To be more maven friendly, we may want to also try to grab the "first" authentication that shows up for a server and try it. + // or maybe allow that behavior to be configured, since maven users aren't used to realms (which they should be). + + // Grabs the authentication that would have been provided had we not been installed... + def originalAuthentication: Option[PasswordAuthentication] = { + Authenticator.setDefault(original.getOrElse(null)) + try Option(Authenticator.requestPasswordAuthentication( + getRequestingHost, + getRequestingSite, + getRequestingPort, + getRequestingProtocol, + getRequestingPrompt, + getRequestingScheme)) + finally Authenticator.setDefault(this) + } + originalAuthentication.getOrElse(null) + } + + /** + * Returns true if this authentication if for a proxy and not for an HTTP server. + * We want to display different error messages, depending. + */ + private def isProxyAuthentication: Boolean = + getRequestorType == Authenticator.RequestorType.PROXY } \ No newline at end of file diff --git a/ivy/src/main/scala/sbt/ivyint/IvyCredentialsLookup.scala b/ivy/src/main/scala/sbt/ivyint/IvyCredentialsLookup.scala index 365ffe698..aeef4d428 100644 --- a/ivy/src/main/scala/sbt/ivyint/IvyCredentialsLookup.scala +++ b/ivy/src/main/scala/sbt/ivyint/IvyCredentialsLookup.scala @@ -13,7 +13,7 @@ private[sbt] case class Realm(host: String, realm: String) extends CredentialKey /** * Helper mechanism to improve credential related error messages. - * + * * This evil class exposes to us the necessary information to warn on credential failure and offer * spelling/typo suggestions. */ @@ -21,17 +21,18 @@ private[sbt] object IvyCredentialsLookup { /** Helper extractor for Ivy's key-value store of credentials. */ private object KeySplit { - def unapply(key: String): Option[(String,String)] = { + def unapply(key: String): Option[(String, String)] = { key.indexOf('@') match { case -1 => None - case n => Some(key.take(n) -> key.drop(n+1)) + case n => Some(key.take(n) -> key.drop(n + 1)) } } } - /** Here we cheat runtime private so we can look in the credentials store. + /** + * Here we cheat runtime private so we can look in the credentials store. * - * TODO - Don't bomb at class load time... + * TODO - Don't bomb at class load time... */ private val credKeyringField = { val tmp = classOf[CredentialsStore].getDeclaredField("KEYRING") @@ -45,10 +46,10 @@ private[sbt] object IvyCredentialsLookup { // make a clone of the set... (map.keySet.asScala.map { case KeySplit(realm, host) => Realm(host, realm) - case host => Host(host) + case host => Host(host) })(collection.breakOut) } - + /** * A mapping of host -> realms in the ivy credentials store. */ @@ -58,6 +59,6 @@ private[sbt] object IvyCredentialsLookup { } groupBy { realm => realm.host } mapValues { realms => - realms map (_.realm) + realms map (_.realm) } } \ No newline at end of file diff --git a/ivy/src/main/scala/sbt/ivyint/MergeDescriptors.scala b/ivy/src/main/scala/sbt/ivyint/MergeDescriptors.scala index e1c07c780..486d8b7bd 100644 --- a/ivy/src/main/scala/sbt/ivyint/MergeDescriptors.scala +++ b/ivy/src/main/scala/sbt/ivyint/MergeDescriptors.scala @@ -3,134 +3,132 @@ package ivyint import java.io.File import java.net.URI -import java.util.{Collection, Collections => CS} +import java.util.{ Collection, Collections => CS } import CS.singleton -import org.apache.ivy.{core, plugins, util, Ivy} -import core.module.descriptor.{DependencyArtifactDescriptor, DefaultDependencyArtifactDescriptor} -import core.module.descriptor.{DefaultDependencyDescriptor => DDD, DependencyDescriptor} -import core.module.id.{ArtifactId,ModuleId, ModuleRevisionId} +import org.apache.ivy.{ core, plugins, util, Ivy } +import core.module.descriptor.{ DependencyArtifactDescriptor, DefaultDependencyArtifactDescriptor } +import core.module.descriptor.{ DefaultDependencyDescriptor => DDD, DependencyDescriptor } +import core.module.id.{ ArtifactId, ModuleId, ModuleRevisionId } import plugins.namespace.Namespace import util.extendable.ExtendableItem -private[sbt] object MergeDescriptors -{ - def mergeable(a: DependencyDescriptor, b: DependencyDescriptor): Boolean = - a.isForce == b.isForce && - a.isChanging == b.isChanging && - a.isTransitive == b.isTransitive && - a.getParentRevisionId == b.getParentRevisionId && - a.getNamespace == b.getNamespace && { - val amrid = a.getDependencyRevisionId - val bmrid = b.getDependencyRevisionId - amrid == bmrid - } && { - val adyn = a.getDynamicConstraintDependencyRevisionId - val bdyn = b.getDynamicConstraintDependencyRevisionId - adyn == bdyn - } +private[sbt] object MergeDescriptors { + def mergeable(a: DependencyDescriptor, b: DependencyDescriptor): Boolean = + a.isForce == b.isForce && + a.isChanging == b.isChanging && + a.isTransitive == b.isTransitive && + a.getParentRevisionId == b.getParentRevisionId && + a.getNamespace == b.getNamespace && { + val amrid = a.getDependencyRevisionId + val bmrid = b.getDependencyRevisionId + amrid == bmrid + } && { + val adyn = a.getDynamicConstraintDependencyRevisionId + val bdyn = b.getDynamicConstraintDependencyRevisionId + adyn == bdyn + } - def apply(a: DependencyDescriptor, b: DependencyDescriptor): DependencyDescriptor = - { - assert(mergeable(a,b)) - new MergedDescriptors(a,b) - } + def apply(a: DependencyDescriptor, b: DependencyDescriptor): DependencyDescriptor = + { + assert(mergeable(a, b)) + new MergedDescriptors(a, b) + } } // combines the artifacts, configurations, includes, and excludes for DependencyDescriptors `a` and `b` // that otherwise have equal IDs -private final class MergedDescriptors(a: DependencyDescriptor, b: DependencyDescriptor) extends DependencyDescriptor -{ - def getDependencyId = a.getDependencyId - def isForce = a.isForce - def isChanging = a.isChanging - def isTransitive = a.isTransitive - def getNamespace = a.getNamespace - def getParentRevisionId = a.getParentRevisionId - def getDependencyRevisionId = a.getDependencyRevisionId - def getDynamicConstraintDependencyRevisionId = a.getDynamicConstraintDependencyRevisionId +private final class MergedDescriptors(a: DependencyDescriptor, b: DependencyDescriptor) extends DependencyDescriptor { + def getDependencyId = a.getDependencyId + def isForce = a.isForce + def isChanging = a.isChanging + def isTransitive = a.isTransitive + def getNamespace = a.getNamespace + def getParentRevisionId = a.getParentRevisionId + def getDependencyRevisionId = a.getDependencyRevisionId + def getDynamicConstraintDependencyRevisionId = a.getDynamicConstraintDependencyRevisionId - def getModuleConfigurations = concat(a.getModuleConfigurations, b.getModuleConfigurations) + def getModuleConfigurations = concat(a.getModuleConfigurations, b.getModuleConfigurations) - def getDependencyConfigurations(moduleConfiguration: String, requestedConfiguration: String) = - concat(a.getDependencyConfigurations(moduleConfiguration, requestedConfiguration), b.getDependencyConfigurations(moduleConfiguration)) + def getDependencyConfigurations(moduleConfiguration: String, requestedConfiguration: String) = + concat(a.getDependencyConfigurations(moduleConfiguration, requestedConfiguration), b.getDependencyConfigurations(moduleConfiguration)) - def getDependencyConfigurations(moduleConfiguration: String) = - concat(a.getDependencyConfigurations(moduleConfiguration), b.getDependencyConfigurations(moduleConfiguration)) + def getDependencyConfigurations(moduleConfiguration: String) = + concat(a.getDependencyConfigurations(moduleConfiguration), b.getDependencyConfigurations(moduleConfiguration)) - def getDependencyConfigurations(moduleConfigurations: Array[String]) = - concat(a.getDependencyConfigurations(moduleConfigurations), b.getDependencyConfigurations(moduleConfigurations)) + def getDependencyConfigurations(moduleConfigurations: Array[String]) = + concat(a.getDependencyConfigurations(moduleConfigurations), b.getDependencyConfigurations(moduleConfigurations)) - def getAllDependencyArtifacts = concatArtifacts(a, a.getAllDependencyArtifacts, b, b.getAllDependencyArtifacts) + def getAllDependencyArtifacts = concatArtifacts(a, a.getAllDependencyArtifacts, b, b.getAllDependencyArtifacts) - def getDependencyArtifacts(moduleConfigurations: String) = - concatArtifacts(a, a.getDependencyArtifacts(moduleConfigurations), b, b.getDependencyArtifacts(moduleConfigurations)) + def getDependencyArtifacts(moduleConfigurations: String) = + concatArtifacts(a, a.getDependencyArtifacts(moduleConfigurations), b, b.getDependencyArtifacts(moduleConfigurations)) - def getDependencyArtifacts(moduleConfigurations: Array[String]) = - concatArtifacts(a, a.getDependencyArtifacts(moduleConfigurations), b, b.getDependencyArtifacts(moduleConfigurations)) + def getDependencyArtifacts(moduleConfigurations: Array[String]) = + concatArtifacts(a, a.getDependencyArtifacts(moduleConfigurations), b, b.getDependencyArtifacts(moduleConfigurations)) - def getAllIncludeRules = concat(a.getAllIncludeRules, b.getAllIncludeRules) + def getAllIncludeRules = concat(a.getAllIncludeRules, b.getAllIncludeRules) - def getIncludeRules(moduleConfigurations: String) = - concat(a.getIncludeRules(moduleConfigurations), b.getIncludeRules(moduleConfigurations)) + def getIncludeRules(moduleConfigurations: String) = + concat(a.getIncludeRules(moduleConfigurations), b.getIncludeRules(moduleConfigurations)) - def getIncludeRules(moduleConfigurations: Array[String]) = - concat(a.getIncludeRules(moduleConfigurations), b.getIncludeRules(moduleConfigurations)) + def getIncludeRules(moduleConfigurations: Array[String]) = + concat(a.getIncludeRules(moduleConfigurations), b.getIncludeRules(moduleConfigurations)) - private[this] def concatArtifacts(a: DependencyDescriptor, as: Array[DependencyArtifactDescriptor], b: DependencyDescriptor, bs: Array[DependencyArtifactDescriptor]) = - { - if(as.isEmpty) - if(bs.isEmpty) as - else defaultArtifact(a) +: explicitConfigurations(b, bs) - else if(bs.isEmpty) explicitConfigurations(a, as) :+ defaultArtifact(b) - else concat(explicitConfigurations(a, as), explicitConfigurations(b, bs)) - } - private[this] def explicitConfigurations(base: DependencyDescriptor, arts: Array[DependencyArtifactDescriptor]): Array[DependencyArtifactDescriptor] = - arts map { art => explicitConfigurations(base, art) } - private[this] def explicitConfigurations(base: DependencyDescriptor, art: DependencyArtifactDescriptor): DependencyArtifactDescriptor = - { - val aConfs = art.getConfigurations - if(aConfs == null || aConfs.isEmpty) - copyWithConfigurations(art, base.getModuleConfigurations) - else - art - } - private[this] def defaultArtifact(a: DependencyDescriptor): DependencyArtifactDescriptor = - { - val dd = new DefaultDependencyArtifactDescriptor(a, a.getDependencyRevisionId.getName, "jar", "jar", null, null) - addConfigurations(dd, a.getModuleConfigurations) - dd - } - private[this] def copyWithConfigurations(dd: DependencyArtifactDescriptor, confs: Seq[String]): DependencyArtifactDescriptor = - { - val dextra = dd.getQualifiedExtraAttributes - val newd = new DefaultDependencyArtifactDescriptor(dd.getDependencyDescriptor, dd.getName, dd.getType, dd.getExt, dd.getUrl, dextra) - addConfigurations(newd, confs) - newd - } - private[this] def addConfigurations(dd: DefaultDependencyArtifactDescriptor, confs: Seq[String]): Unit = - confs foreach dd.addConfiguration + private[this] def concatArtifacts(a: DependencyDescriptor, as: Array[DependencyArtifactDescriptor], b: DependencyDescriptor, bs: Array[DependencyArtifactDescriptor]) = + { + if (as.isEmpty) + if (bs.isEmpty) as + else defaultArtifact(a) +: explicitConfigurations(b, bs) + else if (bs.isEmpty) explicitConfigurations(a, as) :+ defaultArtifact(b) + else concat(explicitConfigurations(a, as), explicitConfigurations(b, bs)) + } + private[this] def explicitConfigurations(base: DependencyDescriptor, arts: Array[DependencyArtifactDescriptor]): Array[DependencyArtifactDescriptor] = + arts map { art => explicitConfigurations(base, art) } + private[this] def explicitConfigurations(base: DependencyDescriptor, art: DependencyArtifactDescriptor): DependencyArtifactDescriptor = + { + val aConfs = art.getConfigurations + if (aConfs == null || aConfs.isEmpty) + copyWithConfigurations(art, base.getModuleConfigurations) + else + art + } + private[this] def defaultArtifact(a: DependencyDescriptor): DependencyArtifactDescriptor = + { + val dd = new DefaultDependencyArtifactDescriptor(a, a.getDependencyRevisionId.getName, "jar", "jar", null, null) + addConfigurations(dd, a.getModuleConfigurations) + dd + } + private[this] def copyWithConfigurations(dd: DependencyArtifactDescriptor, confs: Seq[String]): DependencyArtifactDescriptor = + { + val dextra = dd.getQualifiedExtraAttributes + val newd = new DefaultDependencyArtifactDescriptor(dd.getDependencyDescriptor, dd.getName, dd.getType, dd.getExt, dd.getUrl, dextra) + addConfigurations(newd, confs) + newd + } + private[this] def addConfigurations(dd: DefaultDependencyArtifactDescriptor, confs: Seq[String]): Unit = + confs foreach dd.addConfiguration - private[this] def concat[T: ClassManifest](a: Array[T], b: Array[T]): Array[T] = (a ++ b).distinct.toArray + private[this] def concat[T: ClassManifest](a: Array[T], b: Array[T]): Array[T] = (a ++ b).distinct.toArray - def getAllExcludeRules = concat(a.getAllExcludeRules, b.getAllExcludeRules) + def getAllExcludeRules = concat(a.getAllExcludeRules, b.getAllExcludeRules) - def getExcludeRules(moduleConfigurations: String) = concat(a.getExcludeRules(moduleConfigurations), b.getExcludeRules(moduleConfigurations)) + def getExcludeRules(moduleConfigurations: String) = concat(a.getExcludeRules(moduleConfigurations), b.getExcludeRules(moduleConfigurations)) - def getExcludeRules(moduleConfigurations: Array[String]) = concat(a.getExcludeRules(moduleConfigurations), b.getExcludeRules(moduleConfigurations)) + def getExcludeRules(moduleConfigurations: Array[String]) = concat(a.getExcludeRules(moduleConfigurations), b.getExcludeRules(moduleConfigurations)) - def doesExclude(moduleConfigurations: Array[String], artifactId: ArtifactId) = a.doesExclude(moduleConfigurations, artifactId) || b.doesExclude(moduleConfigurations, artifactId) + def doesExclude(moduleConfigurations: Array[String], artifactId: ArtifactId) = a.doesExclude(moduleConfigurations, artifactId) || b.doesExclude(moduleConfigurations, artifactId) - def canExclude = a.canExclude || b.canExclude + def canExclude = a.canExclude || b.canExclude - def asSystem = this + def asSystem = this - def clone(revision: ModuleRevisionId) = new MergedDescriptors(a.clone(revision), b.clone(revision)) + def clone(revision: ModuleRevisionId) = new MergedDescriptors(a.clone(revision), b.clone(revision)) - def getAttribute(name: String): String = a.getAttribute(name) - def getAttributes = a.getAttributes - def getExtraAttribute(name: String) = a.getExtraAttribute(name) - def getExtraAttributes = a.getExtraAttributes - def getQualifiedExtraAttributes = a.getQualifiedExtraAttributes - def getSourceModule = a.getSourceModule + def getAttribute(name: String): String = a.getAttribute(name) + def getAttributes = a.getAttributes + def getExtraAttribute(name: String) = a.getExtraAttribute(name) + def getExtraAttributes = a.getExtraAttributes + def getQualifiedExtraAttributes = a.getQualifiedExtraAttributes + def getSourceModule = a.getSourceModule } diff --git a/launch/src/main/scala/xsbt/boot/Boot.scala b/launch/src/main/scala/xsbt/boot/Boot.scala index 665407cff..c28e2e4fd 100644 --- a/launch/src/main/scala/xsbt/boot/Boot.scala +++ b/launch/src/main/scala/xsbt/boot/Boot.scala @@ -1,62 +1,58 @@ /* sbt -- Simple Build Tool * Copyright 2009, 2010 Mark Harrah */ - package xsbt.boot +package xsbt.boot import java.io.File - // The entry point to the launcher -object Boot -{ - def main(args: Array[String]) - { - val config = parseArgs(args) - // If we havne't exited, we set up some hooks and launch - System.clearProperty("scala.home") // avoid errors from mixing Scala versions in the same JVM - System.setProperty("jline.shutdownhook", "false") // shutdown hooks cause class loader leaks - System.setProperty("jline.esc.timeout", "0") // starts up a thread otherwise - CheckProxy() - run(config) - } - def parseArgs(args: Array[String]): LauncherArguments = { - @annotation.tailrec - def parse(args: List[String], isLocate: Boolean, remaining: List[String]): LauncherArguments = - args match { - case "--version" :: rest => - println("sbt launcher version " + Package.getPackage("xsbt.boot").getImplementationVersion) - exit(1) - case "--locate" :: rest => parse(rest, true, remaining) - case next :: rest => parse(rest, isLocate, next :: remaining) - case Nil => new LauncherArguments(remaining.reverse, isLocate) - } - parse(args.toList, false, Nil) - } - - // this arrangement is because Scala does not always properly optimize away - // the tail recursion in a catch statement - final def run(args: LauncherArguments): Unit = runImpl(args) match { - case Some(newArgs) => run(newArgs) - case None => () - } - private def runImpl(args: LauncherArguments): Option[LauncherArguments] = - try - Launch(args) map exit - catch - { - case b: BootException => errorAndExit(b.toString) - case r: xsbti.RetrieveException => errorAndExit("Error: " + r.getMessage) - case r: xsbti.FullReload => Some(new LauncherArguments(r.arguments.toList, false)) - case e: Throwable => - e.printStackTrace - errorAndExit(Pre.prefixError(e.toString)) - } +object Boot { + def main(args: Array[String]) { + val config = parseArgs(args) + // If we havne't exited, we set up some hooks and launch + System.clearProperty("scala.home") // avoid errors from mixing Scala versions in the same JVM + System.setProperty("jline.shutdownhook", "false") // shutdown hooks cause class loader leaks + System.setProperty("jline.esc.timeout", "0") // starts up a thread otherwise + CheckProxy() + run(config) + } + def parseArgs(args: Array[String]): LauncherArguments = { + @annotation.tailrec + def parse(args: List[String], isLocate: Boolean, remaining: List[String]): LauncherArguments = + args match { + case "--version" :: rest => + println("sbt launcher version " + Package.getPackage("xsbt.boot").getImplementationVersion) + exit(1) + case "--locate" :: rest => parse(rest, true, remaining) + case next :: rest => parse(rest, isLocate, next :: remaining) + case Nil => new LauncherArguments(remaining.reverse, isLocate) + } + parse(args.toList, false, Nil) + } - private def errorAndExit(msg: String): Nothing = - { - System.out.println(msg) - exit(1) - } - private def exit(code: Int): Nothing = - System.exit(code).asInstanceOf[Nothing] + // this arrangement is because Scala does not always properly optimize away + // the tail recursion in a catch statement + final def run(args: LauncherArguments): Unit = runImpl(args) match { + case Some(newArgs) => run(newArgs) + case None => () + } + private def runImpl(args: LauncherArguments): Option[LauncherArguments] = + try + Launch(args) map exit + catch { + case b: BootException => errorAndExit(b.toString) + case r: xsbti.RetrieveException => errorAndExit("Error: " + r.getMessage) + case r: xsbti.FullReload => Some(new LauncherArguments(r.arguments.toList, false)) + case e: Throwable => + e.printStackTrace + errorAndExit(Pre.prefixError(e.toString)) + } + + private def errorAndExit(msg: String): Nothing = + { + System.out.println(msg) + exit(1) + } + private def exit(code: Int): Nothing = + System.exit(code).asInstanceOf[Nothing] } diff --git a/launch/src/main/scala/xsbt/boot/BootConfiguration.scala b/launch/src/main/scala/xsbt/boot/BootConfiguration.scala index 288aadc83..084a44015 100644 --- a/launch/src/main/scala/xsbt/boot/BootConfiguration.scala +++ b/launch/src/main/scala/xsbt/boot/BootConfiguration.scala @@ -1,9 +1,9 @@ /* sbt -- Simple Build Tool * Copyright 2009, 2010 Mark Harrah */ - package xsbt.boot +package xsbt.boot - import java.io.File +import java.io.File // // [.]scala-/ [baseDirectoryName] @@ -12,107 +12,111 @@ // // see also ProjectProperties for the set of constants that apply to the build.properties file in a project // The scala organization is used as a prefix in baseDirectoryName when a non-standard organization is used. -private object BootConfiguration -{ - // these are the Scala module identifiers to resolve/retrieve - val ScalaOrg = "org.scala-lang" - val CompilerModuleName = "scala-compiler" - val LibraryModuleName = "scala-library" +private object BootConfiguration { + // these are the Scala module identifiers to resolve/retrieve + val ScalaOrg = "org.scala-lang" + val CompilerModuleName = "scala-compiler" + val LibraryModuleName = "scala-library" - val JUnitName = "junit" - val JAnsiVersion = "1.11" + val JUnitName = "junit" + val JAnsiVersion = "1.11" - val SbtOrg = "org.scala-sbt" + val SbtOrg = "org.scala-sbt" - /** The Ivy conflict manager to use for updating.*/ - val ConflictManagerName = "latest-revision" - /** The name of the local Ivy repository, which is used when compiling sbt from source.*/ - val LocalIvyName = "local" - /** The pattern used for the local Ivy repository, which is used when compiling sbt from source.*/ - val LocalPattern = "[organisation]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext]" - /** The artifact pattern used for the local Ivy repository.*/ - def LocalArtifactPattern = LocalPattern - /** The Ivy pattern used for the local Ivy repository.*/ - def LocalIvyPattern = LocalPattern + /** The Ivy conflict manager to use for updating.*/ + val ConflictManagerName = "latest-revision" + /** The name of the local Ivy repository, which is used when compiling sbt from source.*/ + val LocalIvyName = "local" + /** The pattern used for the local Ivy repository, which is used when compiling sbt from source.*/ + val LocalPattern = "[organisation]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext]" + /** The artifact pattern used for the local Ivy repository.*/ + def LocalArtifactPattern = LocalPattern + /** The Ivy pattern used for the local Ivy repository.*/ + def LocalIvyPattern = LocalPattern - final val FjbgPackage = "ch.epfl.lamp.fjbg." - /** The class name prefix used to hide the Scala classes used by this loader from the application */ - final val ScalaPackage = "scala." - /** The class name prefix used to hide the Ivy classes used by this loader from the application*/ - final val IvyPackage = "org.apache.ivy." - /** The class name prefix used to hide the launcher classes from the application. - * Note that access to xsbti classes are allowed.*/ - final val SbtBootPackage = "xsbt.boot." - /** The loader will check that these classes can be loaded and will assume that their presence indicates - * the Scala compiler and library have been downloaded.*/ - val TestLoadScalaClasses = "scala.Option" :: "scala.tools.nsc.Global" :: Nil + final val FjbgPackage = "ch.epfl.lamp.fjbg." + /** The class name prefix used to hide the Scala classes used by this loader from the application */ + final val ScalaPackage = "scala." + /** The class name prefix used to hide the Ivy classes used by this loader from the application*/ + final val IvyPackage = "org.apache.ivy." + /** + * The class name prefix used to hide the launcher classes from the application. + * Note that access to xsbti classes are allowed. + */ + final val SbtBootPackage = "xsbt.boot." + /** + * The loader will check that these classes can be loaded and will assume that their presence indicates + * the Scala compiler and library have been downloaded. + */ + val TestLoadScalaClasses = "scala.Option" :: "scala.tools.nsc.Global" :: Nil - val ScalaHomeProperty = "scala.home" - val UpdateLogName = "update.log" - val DefaultChecksums = "sha1" :: "md5" :: Nil + val ScalaHomeProperty = "scala.home" + val UpdateLogName = "update.log" + val DefaultChecksums = "sha1" :: "md5" :: Nil - val DefaultIvyConfiguration = "default" + val DefaultIvyConfiguration = "default" - /** The name of the directory within the boot directory to retrieve scala to. */ - val ScalaDirectoryName = "lib" + /** The name of the directory within the boot directory to retrieve scala to. */ + val ScalaDirectoryName = "lib" - /** The Ivy pattern to use for retrieving the scala compiler and library. It is relative to the directory - * containing all jars for the requested version of scala. */ - val scalaRetrievePattern = ScalaDirectoryName + "/[artifact](-[classifier]).[ext]" - - def artifactType(classifier: String) = - classifier match - { - case "sources" => "src" - case "javadoc" => "doc" - case _ => "jar" - } - - /** The Ivy pattern to use for retrieving the application and its dependencies. It is relative to the directory - * containing all jars for the requested version of scala. */ - def appRetrievePattern(appID: xsbti.ApplicationID) = appDirectoryName(appID, "/") + "(/[component])/[artifact]-[revision](-[classifier]).[ext]" + /** + * The Ivy pattern to use for retrieving the scala compiler and library. It is relative to the directory + * containing all jars for the requested version of scala. + */ + val scalaRetrievePattern = ScalaDirectoryName + "/[artifact](-[classifier]).[ext]" - val ScalaVersionPrefix = "scala-" + def artifactType(classifier: String) = + classifier match { + case "sources" => "src" + case "javadoc" => "doc" + case _ => "jar" + } - /** The name of the directory to retrieve the application and its dependencies to.*/ - def appDirectoryName(appID: xsbti.ApplicationID, sep: String) = appID.groupID + sep + appID.name + sep + appID.version - /** The name of the directory in the boot directory to put all jars for the given version of scala in.*/ - def baseDirectoryName(scalaOrg: String, scalaVersion: Option[String]) = scalaVersion match { - case None => "other" - case Some(sv) => (if (scalaOrg == ScalaOrg) "" else scalaOrg + ".") + ScalaVersionPrefix + sv - } + /** + * The Ivy pattern to use for retrieving the application and its dependencies. It is relative to the directory + * containing all jars for the requested version of scala. + */ + def appRetrievePattern(appID: xsbti.ApplicationID) = appDirectoryName(appID, "/") + "(/[component])/[artifact]-[revision](-[classifier]).[ext]" - def extractScalaVersion(dir: File): Option[String] = - { - val name = dir.getName - if(name.contains(ScalaVersionPrefix)) - Some(name.substring(name.lastIndexOf(ScalaVersionPrefix) + ScalaVersionPrefix.length)) - else - None - } + val ScalaVersionPrefix = "scala-" + + /** The name of the directory to retrieve the application and its dependencies to.*/ + def appDirectoryName(appID: xsbti.ApplicationID, sep: String) = appID.groupID + sep + appID.name + sep + appID.version + /** The name of the directory in the boot directory to put all jars for the given version of scala in.*/ + def baseDirectoryName(scalaOrg: String, scalaVersion: Option[String]) = scalaVersion match { + case None => "other" + case Some(sv) => (if (scalaOrg == ScalaOrg) "" else scalaOrg + ".") + ScalaVersionPrefix + sv + } + + def extractScalaVersion(dir: File): Option[String] = + { + val name = dir.getName + if (name.contains(ScalaVersionPrefix)) + Some(name.substring(name.lastIndexOf(ScalaVersionPrefix) + ScalaVersionPrefix.length)) + else + None + } } private final class ProxyProperties( - val envURL: String, - val envUser: String, - val envPassword: String, - val sysHost: String, - val sysPort: String, - val sysUser: String, - val sysPassword: String -) -private object ProxyProperties -{ - val http = apply("http") - val https = apply("https") - val ftp = apply("ftp") + val envURL: String, + val envUser: String, + val envPassword: String, + val sysHost: String, + val sysPort: String, + val sysUser: String, + val sysPassword: String) +private object ProxyProperties { + val http = apply("http") + val https = apply("https") + val ftp = apply("ftp") - def apply(pre: String) = new ProxyProperties( - pre+"_proxy", - pre+"_proxy_user", - pre+"_proxy_pass", - pre+".proxyHost", - pre+".proxyPort", - pre+".proxyUser", - pre+".proxyPassword" - ) + def apply(pre: String) = new ProxyProperties( + pre + "_proxy", + pre + "_proxy_user", + pre + "_proxy_pass", + pre + ".proxyHost", + pre + ".proxyPort", + pre + ".proxyUser", + pre + ".proxyPassword" + ) } \ No newline at end of file diff --git a/launch/src/main/scala/xsbt/boot/Cache.scala b/launch/src/main/scala/xsbt/boot/Cache.scala index 669ea3819..7034f6a22 100644 --- a/launch/src/main/scala/xsbt/boot/Cache.scala +++ b/launch/src/main/scala/xsbt/boot/Cache.scala @@ -3,20 +3,19 @@ */ package xsbt.boot -import java.lang.ref.{Reference, SoftReference} +import java.lang.ref.{ Reference, SoftReference } import java.util.HashMap -final class Cache[K,X,V](create: (K,X) => V) -{ - private[this] val delegate = new HashMap[K,Reference[V]] - def apply(k: K, x: X): V = synchronized { getFromReference(k, x, delegate.get(k)) } - private[this] def getFromReference(k: K, x: X, existingRef: Reference[V]) = if(existingRef eq null) newEntry(k, x) else get(k, x, existingRef.get) - private[this] def get(k: K, x: X, existing: V) = if(existing == null) newEntry(k, x) else existing - private[this] def newEntry(k: K, x: X): V = - { - val v = create(k, x) - Pre.assert(v != null, "Value for key " + k + " was null") - delegate.put(k, new SoftReference(v)) - v - } +final class Cache[K, X, V](create: (K, X) => V) { + private[this] val delegate = new HashMap[K, Reference[V]] + def apply(k: K, x: X): V = synchronized { getFromReference(k, x, delegate.get(k)) } + private[this] def getFromReference(k: K, x: X, existingRef: Reference[V]) = if (existingRef eq null) newEntry(k, x) else get(k, x, existingRef.get) + private[this] def get(k: K, x: X, existing: V) = if (existing == null) newEntry(k, x) else existing + private[this] def newEntry(k: K, x: X): V = + { + val v = create(k, x) + Pre.assert(v != null, "Value for key " + k + " was null") + delegate.put(k, new SoftReference(v)) + v + } } diff --git a/launch/src/main/scala/xsbt/boot/CheckProxy.scala b/launch/src/main/scala/xsbt/boot/CheckProxy.scala index af3cdc651..d82409e06 100644 --- a/launch/src/main/scala/xsbt/boot/CheckProxy.scala +++ b/launch/src/main/scala/xsbt/boot/CheckProxy.scala @@ -4,43 +4,36 @@ package xsbt.boot import Pre._ -import java.net.{MalformedURLException, URL} +import java.net.{ MalformedURLException, URL } -object CheckProxy -{ - def apply() - { - import ProxyProperties._ - for( pp <- Seq(http, https, ftp)) - setFromEnv(pp) - } +object CheckProxy { + def apply() { + import ProxyProperties._ + for (pp <- Seq(http, https, ftp)) + setFromEnv(pp) + } - private[this] def setFromEnv(conf: ProxyProperties) - { - import conf._ - val proxyURL = System.getenv(envURL) - if(isDefined(proxyURL) && !isPropertyDefined(sysHost) && !isPropertyDefined(sysPort)) - { - try - { - val proxy = new URL(proxyURL) - setProperty(sysHost, proxy.getHost) - val port = proxy.getPort - if(port >= 0) - System.setProperty(sysPort, port.toString) - copyEnv(envUser, sysUser) - copyEnv(envPassword, sysPassword) - } - catch - { - case e: MalformedURLException => - System.out.println(s"Warning: could not parse $envURL setting: ${e.toString}") - } - } - } + private[this] def setFromEnv(conf: ProxyProperties) { + import conf._ + val proxyURL = System.getenv(envURL) + if (isDefined(proxyURL) && !isPropertyDefined(sysHost) && !isPropertyDefined(sysPort)) { + try { + val proxy = new URL(proxyURL) + setProperty(sysHost, proxy.getHost) + val port = proxy.getPort + if (port >= 0) + System.setProperty(sysPort, port.toString) + copyEnv(envUser, sysUser) + copyEnv(envPassword, sysPassword) + } catch { + case e: MalformedURLException => + System.out.println(s"Warning: could not parse $envURL setting: ${e.toString}") + } + } + } - private def copyEnv(envKey: String, sysKey: String) { setProperty(sysKey, System.getenv(envKey)) } - private def setProperty(key: String, value: String) { if(value != null) System.setProperty(key, value) } - private def isPropertyDefined(k: String) = isDefined(System.getProperty(k)) - private def isDefined(s: String) = s != null && isNonEmpty(s) + private def copyEnv(envKey: String, sysKey: String) { setProperty(sysKey, System.getenv(envKey)) } + private def setProperty(key: String, value: String) { if (value != null) System.setProperty(key, value) } + private def isPropertyDefined(k: String) = isDefined(System.getProperty(k)) + private def isDefined(s: String) = s != null && isNonEmpty(s) } \ No newline at end of file diff --git a/launch/src/main/scala/xsbt/boot/Configuration.scala b/launch/src/main/scala/xsbt/boot/Configuration.scala index 4028e89cf..cf987004a 100644 --- a/launch/src/main/scala/xsbt/boot/Configuration.scala +++ b/launch/src/main/scala/xsbt/boot/Configuration.scala @@ -4,8 +4,8 @@ package xsbt.boot import Pre._ -import java.io.{File, FileInputStream, InputStreamReader} -import java.net.{MalformedURLException, URI, URL} +import java.io.{ File, FileInputStream, InputStreamReader } +import java.net.{ MalformedURLException, URI, URL } import java.util.regex.Pattern import scala.collection.immutable.List import annotation.tailrec @@ -15,152 +15,147 @@ object ConfigurationStorageState extends Enumeration { val SerializedFile = value("serialized-file") } -object Configuration -{ - import ConfigurationStorageState._ - final val SysPropPrefix = "-D" - def parse(file: URL, baseDirectory: File) = Using( new InputStreamReader(file.openStream, "utf8") )( (new ConfigurationParser).apply ) - - /** - * Finds the configuration location. - * - * Note: Configuration may be previously serialized by a launcher. - */ - @tailrec def find(args: List[String], baseDirectory: File): (URL, List[String], ConfigurationStorageState.Value) = - args match - { - case head :: tail if head.startsWith("@load:") => (directConfiguration(head.substring(6), baseDirectory), tail, SerializedFile) - case head :: tail if head.startsWith("@") => (directConfiguration(head.substring(1), baseDirectory), tail, PropertiesFile) - case head :: tail if head.startsWith(SysPropPrefix) => - setProperty(head stripPrefix SysPropPrefix) - find(tail, baseDirectory) - case _ => - val propertyConfigured = System.getProperty("sbt.boot.properties") - val url = if(propertyConfigured == null) configurationOnClasspath else configurationFromFile(propertyConfigured, baseDirectory) - (url, args, PropertiesFile) - } - def setProperty(head: String) - { - val keyValue = head.split("=",2) - if(keyValue.length != 2) - System.err.println("Warning: invalid system property '" + head + "'") - else - System.setProperty(keyValue(0), keyValue(1)) - } - def configurationOnClasspath: URL = - { - val paths = resourcePaths(guessSbtVersion) - paths.iterator.map(getClass.getResource).find(neNull) getOrElse - ( multiPartError("Could not finder sbt launch configuration. Searched classpath for:", paths)) - } - def directConfiguration(path: String, baseDirectory: File): URL = - { - try { new URL(path) } - catch { case _: MalformedURLException => configurationFromFile(path, baseDirectory) } - } - def configurationFromFile(path: String, baseDirectory: File): URL = - { - val pathURI = filePathURI(path) - def resolve(against: URI): Option[URL] = - { - val resolved = against.resolve(pathURI) // variant that accepts String doesn't properly escape (#725) - val exists = try { (new File(resolved)).exists } catch { case _: IllegalArgumentException => false } - if(exists) Some(resolved.toURL) else None - } - val against = resolveAgainst(baseDirectory) - // use Iterators so that resolution occurs lazily, for performance - val resolving = against.iterator.flatMap(e => resolve(e).toList.iterator) - if(!resolving.hasNext) multiPartError("Could not find configuration file '" + path + "'. Searched:", against) - resolving.next() - } - def multiPartError[T](firstLine: String, lines: List[T]) = error( (firstLine :: lines).mkString("\n\t") ) +object Configuration { + import ConfigurationStorageState._ + final val SysPropPrefix = "-D" + def parse(file: URL, baseDirectory: File) = Using(new InputStreamReader(file.openStream, "utf8"))((new ConfigurationParser).apply) - def UnspecifiedVersionPart = "Unspecified" - def DefaultVersionPart = "Default" - def DefaultBuildProperties = "project/build.properties" - def SbtVersionProperty = "sbt.version" - val ConfigurationName = "sbt.boot.properties" - val JarBasePath = "/sbt/" - def userConfigurationPath = "/" + ConfigurationName - def defaultConfigurationPath = JarBasePath + ConfigurationName - val baseResourcePaths: List[String] = userConfigurationPath :: defaultConfigurationPath :: Nil - def resourcePaths(sbtVersion: Option[String]): List[String] = - versionParts(sbtVersion) flatMap { part => - baseResourcePaths map { base => - base + part - } - } - def fallbackParts: List[String] = "" :: Nil - def versionParts(version: Option[String]): List[String] = - version match { - case None => UnspecifiedVersionPart :: fallbackParts - case Some(v) => versionParts(v) - } - def versionParts(version: String): List[String] = - { - val pattern = Pattern.compile("""(\d+)(\.\d+)(\.\d+)(-.*)?""") - val m = pattern.matcher(version) - if(m.matches()) - subPartsIndices flatMap { is => fullMatchOnly(is.map(m.group)) } - else - noMatchParts - } - def noMatchParts: List[String] = DefaultVersionPart :: fallbackParts - private[this] def fullMatchOnly(groups: List[String]): Option[String] = - if(groups.forall(neNull)) Some(groups.mkString) else None + /** + * Finds the configuration location. + * + * Note: Configuration may be previously serialized by a launcher. + */ + @tailrec def find(args: List[String], baseDirectory: File): (URL, List[String], ConfigurationStorageState.Value) = + args match { + case head :: tail if head.startsWith("@load:") => (directConfiguration(head.substring(6), baseDirectory), tail, SerializedFile) + case head :: tail if head.startsWith("@") => (directConfiguration(head.substring(1), baseDirectory), tail, PropertiesFile) + case head :: tail if head.startsWith(SysPropPrefix) => + setProperty(head stripPrefix SysPropPrefix) + find(tail, baseDirectory) + case _ => + val propertyConfigured = System.getProperty("sbt.boot.properties") + val url = if (propertyConfigured == null) configurationOnClasspath else configurationFromFile(propertyConfigured, baseDirectory) + (url, args, PropertiesFile) + } + def setProperty(head: String) { + val keyValue = head.split("=", 2) + if (keyValue.length != 2) + System.err.println("Warning: invalid system property '" + head + "'") + else + System.setProperty(keyValue(0), keyValue(1)) + } + def configurationOnClasspath: URL = + { + val paths = resourcePaths(guessSbtVersion) + paths.iterator.map(getClass.getResource).find(neNull) getOrElse + (multiPartError("Could not finder sbt launch configuration. Searched classpath for:", paths)) + } + def directConfiguration(path: String, baseDirectory: File): URL = + { + try { new URL(path) } + catch { case _: MalformedURLException => configurationFromFile(path, baseDirectory) } + } + def configurationFromFile(path: String, baseDirectory: File): URL = + { + val pathURI = filePathURI(path) + def resolve(against: URI): Option[URL] = + { + val resolved = against.resolve(pathURI) // variant that accepts String doesn't properly escape (#725) + val exists = try { (new File(resolved)).exists } catch { case _: IllegalArgumentException => false } + if (exists) Some(resolved.toURL) else None + } + val against = resolveAgainst(baseDirectory) + // use Iterators so that resolution occurs lazily, for performance + val resolving = against.iterator.flatMap(e => resolve(e).toList.iterator) + if (!resolving.hasNext) multiPartError("Could not find configuration file '" + path + "'. Searched:", against) + resolving.next() + } + def multiPartError[T](firstLine: String, lines: List[T]) = error((firstLine :: lines).mkString("\n\t")) - private[this] def subPartsIndices = - (1 :: 2 :: 3 :: 4 :: Nil) :: - (1 :: 2 :: 3 :: Nil) :: - (1 :: 2 :: Nil) :: - (Nil) :: - Nil + def UnspecifiedVersionPart = "Unspecified" + def DefaultVersionPart = "Default" + def DefaultBuildProperties = "project/build.properties" + def SbtVersionProperty = "sbt.version" + val ConfigurationName = "sbt.boot.properties" + val JarBasePath = "/sbt/" + def userConfigurationPath = "/" + ConfigurationName + def defaultConfigurationPath = JarBasePath + ConfigurationName + val baseResourcePaths: List[String] = userConfigurationPath :: defaultConfigurationPath :: Nil + def resourcePaths(sbtVersion: Option[String]): List[String] = + versionParts(sbtVersion) flatMap { part => + baseResourcePaths map { base => + base + part + } + } + def fallbackParts: List[String] = "" :: Nil + def versionParts(version: Option[String]): List[String] = + version match { + case None => UnspecifiedVersionPart :: fallbackParts + case Some(v) => versionParts(v) + } + def versionParts(version: String): List[String] = + { + val pattern = Pattern.compile("""(\d+)(\.\d+)(\.\d+)(-.*)?""") + val m = pattern.matcher(version) + if (m.matches()) + subPartsIndices flatMap { is => fullMatchOnly(is.map(m.group)) } + else + noMatchParts + } + def noMatchParts: List[String] = DefaultVersionPart :: fallbackParts + private[this] def fullMatchOnly(groups: List[String]): Option[String] = + if (groups.forall(neNull)) Some(groups.mkString) else None - // the location of project/build.properties and the name of the property within that file - // that configures the sbt version is configured in sbt.boot.properties. - // We have to hard code them here in order to use them to determine the location of sbt.boot.properties itself - def guessSbtVersion: Option[String] = - { - val props = Pre.readProperties(new File(DefaultBuildProperties)) - Option(props.getProperty(SbtVersionProperty)) - } + private[this] def subPartsIndices = + (1 :: 2 :: 3 :: 4 :: Nil) :: + (1 :: 2 :: 3 :: Nil) :: + (1 :: 2 :: Nil) :: + (Nil) :: + Nil - def resolveAgainst(baseDirectory: File): List[URI] = - directoryURI(baseDirectory) :: - directoryURI(new File(System.getProperty("user.home"))) :: - toDirectory(classLocation(getClass).toURI) :: - Nil + // the location of project/build.properties and the name of the property within that file + // that configures the sbt version is configured in sbt.boot.properties. + // We have to hard code them here in order to use them to determine the location of sbt.boot.properties itself + def guessSbtVersion: Option[String] = + { + val props = Pre.readProperties(new File(DefaultBuildProperties)) + Option(props.getProperty(SbtVersionProperty)) + } - def classLocation(cl: Class[_]): URL = - { - val codeSource = cl.getProtectionDomain.getCodeSource - if(codeSource == null) error("No class location for " + cl) - else codeSource.getLocation - } - // single-arg constructor doesn't properly escape - def filePathURI(path: String): URI = { - if(path.startsWith("file:")) new URI(path) - else { - val f = new File(path) - new URI(if(f.isAbsolute) "file" else null, path, null) - } - } - def directoryURI(dir: File): URI = directoryURI(dir.toURI) - def directoryURI(uri: URI): URI = - { - assert(uri.isAbsolute) - val str = uri.toASCIIString - val dirStr = if(str.endsWith("/")) str else str + "/" - (new URI(dirStr)).normalize - } + def resolveAgainst(baseDirectory: File): List[URI] = + directoryURI(baseDirectory) :: + directoryURI(new File(System.getProperty("user.home"))) :: + toDirectory(classLocation(getClass).toURI) :: + Nil - def toDirectory(uri: URI): URI = - try - { - val file = new File(uri) - val newFile = if(file.isFile) file.getParentFile else file - directoryURI(newFile) - } - catch { case _: Exception => uri } - private[this] def neNull: AnyRef => Boolean = _ ne null + def classLocation(cl: Class[_]): URL = + { + val codeSource = cl.getProtectionDomain.getCodeSource + if (codeSource == null) error("No class location for " + cl) + else codeSource.getLocation + } + // single-arg constructor doesn't properly escape + def filePathURI(path: String): URI = { + if (path.startsWith("file:")) new URI(path) + else { + val f = new File(path) + new URI(if (f.isAbsolute) "file" else null, path, null) + } + } + def directoryURI(dir: File): URI = directoryURI(dir.toURI) + def directoryURI(uri: URI): URI = + { + assert(uri.isAbsolute) + val str = uri.toASCIIString + val dirStr = if (str.endsWith("/")) str else str + "/" + (new URI(dirStr)).normalize + } + + def toDirectory(uri: URI): URI = + try { + val file = new File(uri) + val newFile = if (file.isFile) file.getParentFile else file + directoryURI(newFile) + } catch { case _: Exception => uri } + private[this] def neNull: AnyRef => Boolean = _ ne null } \ No newline at end of file diff --git a/launch/src/main/scala/xsbt/boot/ConfigurationParser.scala b/launch/src/main/scala/xsbt/boot/ConfigurationParser.scala index 659573550..791d73d72 100644 --- a/launch/src/main/scala/xsbt/boot/ConfigurationParser.scala +++ b/launch/src/main/scala/xsbt/boot/ConfigurationParser.scala @@ -3,263 +3,255 @@ */ package xsbt.boot - import Pre._ import ConfigurationParser._ import java.lang.Character.isWhitespace -import java.io.{BufferedReader, File, FileInputStream, InputStreamReader, Reader, StringReader} -import java.net.{MalformedURLException, URL} -import java.util.regex.{Matcher,Pattern} +import java.io.{ BufferedReader, File, FileInputStream, InputStreamReader, Reader, StringReader } +import java.net.{ MalformedURLException, URL } +import java.util.regex.{ Matcher, Pattern } import Matcher.quoteReplacement import scala.collection.immutable.List -object ConfigurationParser -{ - def trim(s: Array[String]) = s.map(_.trim).toList - def ids(value: String) = trim(substituteVariables(value).split(",")).filter(isNonEmpty) +object ConfigurationParser { + def trim(s: Array[String]) = s.map(_.trim).toList + def ids(value: String) = trim(substituteVariables(value).split(",")).filter(isNonEmpty) - private[this] lazy val VarPattern = Pattern.compile("""\$\{([\w.]+)(\-(.*))?\}""") - def substituteVariables(s: String): String = if(s.indexOf('$') >= 0) substituteVariables0(s) else s - // scala.util.Regex brought in 30kB, so we code it explicitly - def substituteVariables0(s: String): String = - { - val m = VarPattern.matcher(s) - val b = new StringBuffer - while(m.find()) - { - val key = m.group(1) - val defined = System.getProperty(key) - val value = - if(defined ne null) - defined - else - { - val default = m.group(3) - if(default eq null) m.group() else substituteVariables(default) - } - m.appendReplacement(b, quoteReplacement(value)) - } - m.appendTail(b) - b.toString - } - - implicit val readIDs = ids _ + private[this] lazy val VarPattern = Pattern.compile("""\$\{([\w.]+)(\-(.*))?\}""") + def substituteVariables(s: String): String = if (s.indexOf('$') >= 0) substituteVariables0(s) else s + // scala.util.Regex brought in 30kB, so we code it explicitly + def substituteVariables0(s: String): String = + { + val m = VarPattern.matcher(s) + val b = new StringBuffer + while (m.find()) { + val key = m.group(1) + val defined = System.getProperty(key) + val value = + if (defined ne null) + defined + else { + val default = m.group(3) + if (default eq null) m.group() else substituteVariables(default) + } + m.appendReplacement(b, quoteReplacement(value)) + } + m.appendTail(b) + b.toString + } + + implicit val readIDs = ids _ } -class ConfigurationParser -{ - def apply(file: File): LaunchConfiguration = Using(newReader(file))(apply) - def apply(s: String): LaunchConfiguration = Using(new StringReader(s))(apply) - def apply(reader: Reader): LaunchConfiguration = Using(new BufferedReader(reader))(apply) - private def apply(in: BufferedReader): LaunchConfiguration = - processSections(processLines(readLine(in, Nil, 0))) - private final def readLine(in: BufferedReader, accum: List[Line], index: Int): List[Line] = - in.readLine match { - case null => accum.reverse - case line => readLine(in, ParseLine(line,index) ::: accum, index+1) - } - private def newReader(file: File) = new InputStreamReader(new FileInputStream(file), "UTF-8") - def readRepositoriesConfig(file: File): List[Repository.Repository] = - Using(newReader(file))(readRepositoriesConfig) - def readRepositoriesConfig(reader: Reader): List[Repository.Repository] = - Using(new BufferedReader(reader))(readRepositoriesConfig) - def readRepositoriesConfig(s: String): List[Repository.Repository] = - Using(new StringReader(s))(readRepositoriesConfig) - private def readRepositoriesConfig(in: BufferedReader): List[Repository.Repository] = - processRepositoriesConfig(processLines(readLine(in, Nil, 0))) - def processRepositoriesConfig(sections: SectionMap): List[Repository.Repository] = - processSection(sections, "repositories", getRepositories)._1 - // section -> configuration instance processing - def processSections(sections: SectionMap): LaunchConfiguration = - { - val ((scalaVersion, scalaClassifiers), m1) = processSection(sections, "scala", getScala) - val ((app, appClassifiers), m2) = processSection(m1, "app", getApplication) - val (defaultRepositories, m3) = processSection(m2, "repositories", getRepositories) - val (boot, m4) = processSection(m3, "boot", getBoot) - val (logging, m5) = processSection(m4, "log", getLogging) - val (properties, m6) = processSection(m5, "app-properties", getAppProperties) - val ((ivyHome, checksums, isOverrideRepos, rConfigFile), m7) = processSection(m6, "ivy", getIvy) - val (serverOptions, m8) = processSection(m7, "server", getServer) - check(m8, "section") - val classifiers = Classifiers(scalaClassifiers, appClassifiers) - val repositories = rConfigFile map readRepositoriesConfig getOrElse defaultRepositories - val ivyOptions = IvyOptions(ivyHome, classifiers, repositories, checksums, isOverrideRepos) - - // TODO - Read server properties... - new LaunchConfiguration(scalaVersion, ivyOptions, app, boot, logging, properties, serverOptions) - } - def getScala(m: LabelMap) = - { - val (scalaVersion, m1) = getVersion(m, "Scala version", "scala.version") - val (scalaClassifiers, m2) = getClassifiers(m1, "Scala classifiers") - check(m2, "label") - (scalaVersion, scalaClassifiers) - } - def getClassifiers(m: LabelMap, label: String): (Value[List[String]], LabelMap) = - process(m, "classifiers", processClassifiers(label)) - def processClassifiers(label: String)(value: Option[String]): Value[List[String]] = - value.map(readValue[List[String]](label)) getOrElse new Explicit(Nil) - - def getVersion(m: LabelMap, label: String, defaultName: String): (Value[String], LabelMap) = process(m, "version", processVersion(label, defaultName)) - def processVersion(label: String, defaultName: String)(value: Option[String]): Value[String] = - value.map(readValue[String](label)).getOrElse(new Implicit(defaultName, None)) - - def readValue[T](label: String)(implicit read: String => T): String => Value[T] = value0 => - { - val value = substituteVariables(value0) - if(isEmpty(value)) error(label + " cannot be empty (omit declaration to use the default)") - try { parsePropertyValue(label, value)(Value.readImplied[T]) } - catch { case e: BootException => new Explicit(read(value)) } - } - def processSection[T](sections: SectionMap, name: String, f: LabelMap => T) = - process[String,LabelMap,T](sections, name, m => f(m default(x => None))) - def process[K,V,T](sections: ListMap[K,V], name: K, f: V => T): (T, ListMap[K,V]) = ( f(sections(name)), sections - name) - def check(map: ListMap[String, _], label: String): Unit = if(map.isEmpty) () else error(map.keys.mkString("Invalid " + label + "(s): ", ",","")) - def check[T](label: String, pair: (T, ListMap[String, _])): T = { check(pair._2, label); pair._1 } - def id(map: LabelMap, name: String, default: String): (String, LabelMap) = - (substituteVariables(orElse(getOrNone(map, name), default)), map - name) - def getOrNone[K,V](map: ListMap[K,Option[V]], k: K) = orElse(map.get(k), None) - def ids(map: LabelMap, name: String, default: List[String]) = - { - val result = map(name) map ConfigurationParser.ids - (orElse(result, default), map - name) - } - def bool(map: LabelMap, name: String, default: Boolean): (Boolean, LabelMap) = - { - val (b, m) = id(map, name, default.toString) - (toBoolean(b), m) - } - - def toFiles(paths: List[String]): List[File] = paths.map(toFile) - def toFile(path: String): File = new File(substituteVariables(path).replace('/', File.separatorChar))// if the path is relative, it will be resolved by Launch later - def file(map: LabelMap, name: String, default: File): (File, LabelMap) = - (orElse(getOrNone(map, name).map(toFile), default), map - name) - def optfile(map: LabelMap, name: String): (Option[File], LabelMap) = - (getOrNone(map, name).map(toFile), map - name) - def getIvy(m: LabelMap): (Option[File], List[String], Boolean, Option[File]) = - { - val (ivyHome, m1) = optfile(m, "ivy-home") - val (checksums, m2) = ids(m1, "checksums", BootConfiguration.DefaultChecksums) - val (overrideRepos, m3) = bool(m2, "override-build-repos", false) - val (repoConfig, m4) = optfile(m3, "repository-config") - check(m4, "label") - (ivyHome, checksums, overrideRepos, repoConfig filter (_.exists)) - } - def getBoot(m: LabelMap): BootSetup = - { - val (dir, m1) = file(m, "directory", toFile("project/boot")) - val (props, m2) = file(m1, "properties", toFile("project/build.properties")) - val (search, m3) = getSearch(m2, props) - val (enableQuick, m4) = bool(m3, "quick-option", false) - val (promptFill, m5) = bool(m4, "prompt-fill", false) - val (promptCreate, m6) = id(m5, "prompt-create", "") - val (lock, m7) = bool(m6, "lock", true) - check(m7, "label") - BootSetup(dir, lock, props, search, promptCreate, enableQuick, promptFill) - } - def getLogging(m: LabelMap): Logging = check("label", process(m, "level", getLevel)) - def getLevel(m: Option[String]) = m.map(LogLevel.apply).getOrElse(new Logging(LogLevel.Info)) - def getSearch(m: LabelMap, defaultPath: File): (Search, LabelMap) = - ids(m, "search", Nil) match - { - case (Nil, newM) => (Search.none, newM) - case (tpe :: Nil, newM) => (Search(tpe, List(defaultPath)), newM) - case (tpe :: paths, newM) => (Search(tpe, toFiles(paths)), newM) - } +class ConfigurationParser { + def apply(file: File): LaunchConfiguration = Using(newReader(file))(apply) + def apply(s: String): LaunchConfiguration = Using(new StringReader(s))(apply) + def apply(reader: Reader): LaunchConfiguration = Using(new BufferedReader(reader))(apply) + private def apply(in: BufferedReader): LaunchConfiguration = + processSections(processLines(readLine(in, Nil, 0))) + private final def readLine(in: BufferedReader, accum: List[Line], index: Int): List[Line] = + in.readLine match { + case null => accum.reverse + case line => readLine(in, ParseLine(line, index) ::: accum, index + 1) + } + private def newReader(file: File) = new InputStreamReader(new FileInputStream(file), "UTF-8") + def readRepositoriesConfig(file: File): List[Repository.Repository] = + Using(newReader(file))(readRepositoriesConfig) + def readRepositoriesConfig(reader: Reader): List[Repository.Repository] = + Using(new BufferedReader(reader))(readRepositoriesConfig) + def readRepositoriesConfig(s: String): List[Repository.Repository] = + Using(new StringReader(s))(readRepositoriesConfig) + private def readRepositoriesConfig(in: BufferedReader): List[Repository.Repository] = + processRepositoriesConfig(processLines(readLine(in, Nil, 0))) + def processRepositoriesConfig(sections: SectionMap): List[Repository.Repository] = + processSection(sections, "repositories", getRepositories)._1 + // section -> configuration instance processing + def processSections(sections: SectionMap): LaunchConfiguration = + { + val ((scalaVersion, scalaClassifiers), m1) = processSection(sections, "scala", getScala) + val ((app, appClassifiers), m2) = processSection(m1, "app", getApplication) + val (defaultRepositories, m3) = processSection(m2, "repositories", getRepositories) + val (boot, m4) = processSection(m3, "boot", getBoot) + val (logging, m5) = processSection(m4, "log", getLogging) + val (properties, m6) = processSection(m5, "app-properties", getAppProperties) + val ((ivyHome, checksums, isOverrideRepos, rConfigFile), m7) = processSection(m6, "ivy", getIvy) + val (serverOptions, m8) = processSection(m7, "server", getServer) + check(m8, "section") + val classifiers = Classifiers(scalaClassifiers, appClassifiers) + val repositories = rConfigFile map readRepositoriesConfig getOrElse defaultRepositories + val ivyOptions = IvyOptions(ivyHome, classifiers, repositories, checksums, isOverrideRepos) - def getApplication(m: LabelMap): (Application, Value[List[String]]) = - { - val (org, m1) = id(m, "org", BootConfiguration.SbtOrg) - val (name, m2) = id(m1, "name", "sbt") - val (rev, m3) = getVersion(m2, name + " version", name + ".version") - val (main, m4) = id(m3, "class", "xsbt.Main") - val (components, m5) = ids(m4, "components", List("default")) - val (crossVersioned, m6) = id(m5, "cross-versioned", CrossVersionUtil.binaryString) - val (resources, m7) = ids(m6, "resources", Nil) - val (classifiers, m8) = getClassifiers(m7, "Application classifiers") - check(m8, "label") - val classpathExtra = toArray(toFiles(resources)) - val app = new Application(org, name, rev, main, components, LaunchCrossVersion(crossVersioned), classpathExtra) - (app, classifiers) - } - def getServer(m: LabelMap): (Option[ServerConfiguration]) = - { - val (lock, m1) = optfile(m, "lock") - // TODO - JVM args - val (args, m2) = optfile(m1, "jvmargs") - val (props, m3) = optfile(m2, "jvmprops") - lock map { file => - ServerConfiguration(file, args, props) - } - } - def getRepositories(m: LabelMap): List[Repository.Repository] = - { - import Repository.{Ivy, Maven, Predefined} - val BootOnly = "bootOnly" - val MvnComp = "mavenCompatible" - val DescriptorOptional = "descriptorOptional" - val DontCheckConsistency = "skipConsistencyCheck" - val OptSet = Set(BootOnly, MvnComp, DescriptorOptional, DontCheckConsistency) - m.toList.map { - case (key, None) => Predefined(key) - case (key, Some(BootOnly)) => Predefined(key, true) - case (key, Some(value)) => - val r = trim(substituteVariables(value).split(",",7)) - val url = try { new URL(r(0)) } catch { case e: MalformedURLException => error("Invalid URL specified for '" + key + "': " + e.getMessage) } - val (optionPart, patterns) = r.tail.partition (OptSet.contains(_)) - val options = (optionPart.contains(BootOnly), optionPart.contains(MvnComp), optionPart.contains(DescriptorOptional), optionPart.contains(DontCheckConsistency)) - (patterns, options) match { - case (both :: Nil, (bo, mc, dso, cc)) => Ivy(key, url, both, both, mavenCompatible=mc, bootOnly=bo, descriptorOptional=dso, skipConsistencyCheck=cc) - case (ivy :: art :: Nil, (bo, mc, dso, cc)) => Ivy(key, url, ivy, art, mavenCompatible=mc, bootOnly=bo, descriptorOptional=dso, skipConsistencyCheck=cc) - case (Nil, (true, false, false, cc)) => Maven(key, url, bootOnly=true) - case (Nil, (false, false, false, false)) => Maven(key, url) - case _ => error("Could not parse %s: %s".format(key, value)) - } - } - } - def getAppProperties(m: LabelMap): List[AppProperty] = - for((name, Some(value)) <- m.toList) yield - { - val map = ListMap( trim(value.split(",")).map(parsePropertyDefinition(name)) : _*) - AppProperty(name)(map.get("quick"), map.get("new"), map.get("fill")) - } - def parsePropertyDefinition(name: String)(value: String) = value.split("=",2) match { - case Array(mode,value) => (mode, parsePropertyValue(name, value)(defineProperty(name))) - case x => error("Invalid property definition '" + x + "' for property '" + name + "'") - } - def defineProperty(name: String)(action: String, requiredArg: String, optionalArg: Option[String]) = - action match - { - case "prompt" => new PromptProperty(requiredArg, optionalArg) - case "set" => new SetProperty(requiredArg) - case _ => error("Unknown action '" + action + "' for property '" + name + "'") - } - private[this] lazy val propertyPattern = Pattern.compile("""(.+)\((.*)\)(?:\[(.*)\])?""") // examples: prompt(Version)[1.0] or set(1.0) - def parsePropertyValue[T](name: String, definition: String)(f: (String, String, Option[String]) => T): T = - { - val m = propertyPattern.matcher(definition) - if(!m.matches()) error("Invalid property definition '" + definition + "' for property '" + name + "'") - val optionalArg = m.group(3) - f(m.group(1), m.group(2), if(optionalArg eq null) None else Some(optionalArg)) - } + // TODO - Read server properties... + new LaunchConfiguration(scalaVersion, ivyOptions, app, boot, logging, properties, serverOptions) + } + def getScala(m: LabelMap) = + { + val (scalaVersion, m1) = getVersion(m, "Scala version", "scala.version") + val (scalaClassifiers, m2) = getClassifiers(m1, "Scala classifiers") + check(m2, "label") + (scalaVersion, scalaClassifiers) + } + def getClassifiers(m: LabelMap, label: String): (Value[List[String]], LabelMap) = + process(m, "classifiers", processClassifiers(label)) + def processClassifiers(label: String)(value: Option[String]): Value[List[String]] = + value.map(readValue[List[String]](label)) getOrElse new Explicit(Nil) - type LabelMap = ListMap[String, Option[String]] - // section-name -> label -> value - type SectionMap = ListMap[String, LabelMap] - def processLines(lines: List[Line]): SectionMap = - { - type State = (SectionMap, Option[String]) - val s: State = - ( ( (ListMap.empty.default(x => ListMap.empty[String,Option[String]]), None): State) /: lines ) { - case (x, Comment) => x - case ( (map, _), s: Section ) => (map, Some(s.name)) - case ( (_, None), l: Labeled ) => error("Label " + l.label + " is not in a section") - case ( (map, s @ Some(section)), l: Labeled ) => - val sMap = map(section) - if( sMap.contains(l.label) ) error("Duplicate label '" + l.label + "' in section '" + section + "'") - else ( map(section) = (sMap(l.label) = l.value), s ) - } - s._1 - } + def getVersion(m: LabelMap, label: String, defaultName: String): (Value[String], LabelMap) = process(m, "version", processVersion(label, defaultName)) + def processVersion(label: String, defaultName: String)(value: Option[String]): Value[String] = + value.map(readValue[String](label)).getOrElse(new Implicit(defaultName, None)) + + def readValue[T](label: String)(implicit read: String => T): String => Value[T] = value0 => + { + val value = substituteVariables(value0) + if (isEmpty(value)) error(label + " cannot be empty (omit declaration to use the default)") + try { parsePropertyValue(label, value)(Value.readImplied[T]) } + catch { case e: BootException => new Explicit(read(value)) } + } + def processSection[T](sections: SectionMap, name: String, f: LabelMap => T) = + process[String, LabelMap, T](sections, name, m => f(m default (x => None))) + def process[K, V, T](sections: ListMap[K, V], name: K, f: V => T): (T, ListMap[K, V]) = (f(sections(name)), sections - name) + def check(map: ListMap[String, _], label: String): Unit = if (map.isEmpty) () else error(map.keys.mkString("Invalid " + label + "(s): ", ",", "")) + def check[T](label: String, pair: (T, ListMap[String, _])): T = { check(pair._2, label); pair._1 } + def id(map: LabelMap, name: String, default: String): (String, LabelMap) = + (substituteVariables(orElse(getOrNone(map, name), default)), map - name) + def getOrNone[K, V](map: ListMap[K, Option[V]], k: K) = orElse(map.get(k), None) + def ids(map: LabelMap, name: String, default: List[String]) = + { + val result = map(name) map ConfigurationParser.ids + (orElse(result, default), map - name) + } + def bool(map: LabelMap, name: String, default: Boolean): (Boolean, LabelMap) = + { + val (b, m) = id(map, name, default.toString) + (toBoolean(b), m) + } + + def toFiles(paths: List[String]): List[File] = paths.map(toFile) + def toFile(path: String): File = new File(substituteVariables(path).replace('/', File.separatorChar)) // if the path is relative, it will be resolved by Launch later + def file(map: LabelMap, name: String, default: File): (File, LabelMap) = + (orElse(getOrNone(map, name).map(toFile), default), map - name) + def optfile(map: LabelMap, name: String): (Option[File], LabelMap) = + (getOrNone(map, name).map(toFile), map - name) + def getIvy(m: LabelMap): (Option[File], List[String], Boolean, Option[File]) = + { + val (ivyHome, m1) = optfile(m, "ivy-home") + val (checksums, m2) = ids(m1, "checksums", BootConfiguration.DefaultChecksums) + val (overrideRepos, m3) = bool(m2, "override-build-repos", false) + val (repoConfig, m4) = optfile(m3, "repository-config") + check(m4, "label") + (ivyHome, checksums, overrideRepos, repoConfig filter (_.exists)) + } + def getBoot(m: LabelMap): BootSetup = + { + val (dir, m1) = file(m, "directory", toFile("project/boot")) + val (props, m2) = file(m1, "properties", toFile("project/build.properties")) + val (search, m3) = getSearch(m2, props) + val (enableQuick, m4) = bool(m3, "quick-option", false) + val (promptFill, m5) = bool(m4, "prompt-fill", false) + val (promptCreate, m6) = id(m5, "prompt-create", "") + val (lock, m7) = bool(m6, "lock", true) + check(m7, "label") + BootSetup(dir, lock, props, search, promptCreate, enableQuick, promptFill) + } + def getLogging(m: LabelMap): Logging = check("label", process(m, "level", getLevel)) + def getLevel(m: Option[String]) = m.map(LogLevel.apply).getOrElse(new Logging(LogLevel.Info)) + def getSearch(m: LabelMap, defaultPath: File): (Search, LabelMap) = + ids(m, "search", Nil) match { + case (Nil, newM) => (Search.none, newM) + case (tpe :: Nil, newM) => (Search(tpe, List(defaultPath)), newM) + case (tpe :: paths, newM) => (Search(tpe, toFiles(paths)), newM) + } + + def getApplication(m: LabelMap): (Application, Value[List[String]]) = + { + val (org, m1) = id(m, "org", BootConfiguration.SbtOrg) + val (name, m2) = id(m1, "name", "sbt") + val (rev, m3) = getVersion(m2, name + " version", name + ".version") + val (main, m4) = id(m3, "class", "xsbt.Main") + val (components, m5) = ids(m4, "components", List("default")) + val (crossVersioned, m6) = id(m5, "cross-versioned", CrossVersionUtil.binaryString) + val (resources, m7) = ids(m6, "resources", Nil) + val (classifiers, m8) = getClassifiers(m7, "Application classifiers") + check(m8, "label") + val classpathExtra = toArray(toFiles(resources)) + val app = new Application(org, name, rev, main, components, LaunchCrossVersion(crossVersioned), classpathExtra) + (app, classifiers) + } + def getServer(m: LabelMap): (Option[ServerConfiguration]) = + { + val (lock, m1) = optfile(m, "lock") + // TODO - JVM args + val (args, m2) = optfile(m1, "jvmargs") + val (props, m3) = optfile(m2, "jvmprops") + lock map { file => + ServerConfiguration(file, args, props) + } + } + def getRepositories(m: LabelMap): List[Repository.Repository] = + { + import Repository.{ Ivy, Maven, Predefined } + val BootOnly = "bootOnly" + val MvnComp = "mavenCompatible" + val DescriptorOptional = "descriptorOptional" + val DontCheckConsistency = "skipConsistencyCheck" + val OptSet = Set(BootOnly, MvnComp, DescriptorOptional, DontCheckConsistency) + m.toList.map { + case (key, None) => Predefined(key) + case (key, Some(BootOnly)) => Predefined(key, true) + case (key, Some(value)) => + val r = trim(substituteVariables(value).split(",", 7)) + val url = try { new URL(r(0)) } catch { case e: MalformedURLException => error("Invalid URL specified for '" + key + "': " + e.getMessage) } + val (optionPart, patterns) = r.tail.partition(OptSet.contains(_)) + val options = (optionPart.contains(BootOnly), optionPart.contains(MvnComp), optionPart.contains(DescriptorOptional), optionPart.contains(DontCheckConsistency)) + (patterns, options) match { + case (both :: Nil, (bo, mc, dso, cc)) => Ivy(key, url, both, both, mavenCompatible = mc, bootOnly = bo, descriptorOptional = dso, skipConsistencyCheck = cc) + case (ivy :: art :: Nil, (bo, mc, dso, cc)) => Ivy(key, url, ivy, art, mavenCompatible = mc, bootOnly = bo, descriptorOptional = dso, skipConsistencyCheck = cc) + case (Nil, (true, false, false, cc)) => Maven(key, url, bootOnly = true) + case (Nil, (false, false, false, false)) => Maven(key, url) + case _ => error("Could not parse %s: %s".format(key, value)) + } + } + } + def getAppProperties(m: LabelMap): List[AppProperty] = + for ((name, Some(value)) <- m.toList) yield { + val map = ListMap(trim(value.split(",")).map(parsePropertyDefinition(name)): _*) + AppProperty(name)(map.get("quick"), map.get("new"), map.get("fill")) + } + def parsePropertyDefinition(name: String)(value: String) = value.split("=", 2) match { + case Array(mode, value) => (mode, parsePropertyValue(name, value)(defineProperty(name))) + case x => error("Invalid property definition '" + x + "' for property '" + name + "'") + } + def defineProperty(name: String)(action: String, requiredArg: String, optionalArg: Option[String]) = + action match { + case "prompt" => new PromptProperty(requiredArg, optionalArg) + case "set" => new SetProperty(requiredArg) + case _ => error("Unknown action '" + action + "' for property '" + name + "'") + } + private[this] lazy val propertyPattern = Pattern.compile("""(.+)\((.*)\)(?:\[(.*)\])?""") // examples: prompt(Version)[1.0] or set(1.0) + def parsePropertyValue[T](name: String, definition: String)(f: (String, String, Option[String]) => T): T = + { + val m = propertyPattern.matcher(definition) + if (!m.matches()) error("Invalid property definition '" + definition + "' for property '" + name + "'") + val optionalArg = m.group(3) + f(m.group(1), m.group(2), if (optionalArg eq null) None else Some(optionalArg)) + } + + type LabelMap = ListMap[String, Option[String]] + // section-name -> label -> value + type SectionMap = ListMap[String, LabelMap] + def processLines(lines: List[Line]): SectionMap = + { + type State = (SectionMap, Option[String]) + val s: State = + (((ListMap.empty.default(x => ListMap.empty[String, Option[String]]), None): State) /: lines) { + case (x, Comment) => x + case ((map, _), s: Section) => (map, Some(s.name)) + case ((_, None), l: Labeled) => error("Label " + l.label + " is not in a section") + case ((map, s @ Some(section)), l: Labeled) => + val sMap = map(section) + if (sMap.contains(l.label)) error("Duplicate label '" + l.label + "' in section '" + section + "'") + else (map(section) = (sMap(l.label) = l.value), s) + } + s._1 + } } @@ -269,49 +261,46 @@ final class Section(val name: String) extends Line object Comment extends Line class ParseException(val content: String, val line: Int, val col: Int, val msg: String) - extends BootException( "[" + (line+1) + ", " + (col+1) + "]" + msg + "\n" + content + "\n" + List.fill(col)(" ").mkString + "^" ) + extends BootException("[" + (line + 1) + ", " + (col + 1) + "]" + msg + "\n" + content + "\n" + List.fill(col)(" ").mkString + "^") -object ParseLine -{ - def apply(content: String, line: Int) = - { - def error(col: Int, msg: String) = throw new ParseException(content, line, col, msg) - def check(condition: Boolean)(col: Int, msg: String) = if(condition) () else error(col, msg) +object ParseLine { + def apply(content: String, line: Int) = + { + def error(col: Int, msg: String) = throw new ParseException(content, line, col, msg) + def check(condition: Boolean)(col: Int, msg: String) = if (condition) () else error(col, msg) - val trimmed = trimLeading(content) - val offset = content.length - trimmed.length + val trimmed = trimLeading(content) + val offset = content.length - trimmed.length - def section = - { - val closing = trimmed.indexOf(']', 1) - check(closing > 0)(content.length, "Expected ']', found end of line") - val extra = trimmed.substring(closing+1) - val trimmedExtra = trimLeading(extra) - check(isEmpty(trimmedExtra))(content.length - trimmedExtra.length, "Expected end of line, found '" + extra + "'") - new Section(trimmed.substring(1,closing).trim) - } - def labeled = - { - trimmed.split(":",2) match { - case Array(label, value) => - val trimmedValue = value.trim - check(isNonEmpty(trimmedValue))(content.indexOf(':'), "Value for '" + label + "' was empty") - new Labeled(label, Some(trimmedValue)) - case x => new Labeled(x.mkString, None) - } - } - - if(isEmpty(trimmed)) Nil - else - { - val processed = - trimmed.charAt(0) match - { - case '#' => Comment - case '[' => section - case _ => labeled - } - processed :: Nil - } - } + def section = + { + val closing = trimmed.indexOf(']', 1) + check(closing > 0)(content.length, "Expected ']', found end of line") + val extra = trimmed.substring(closing + 1) + val trimmedExtra = trimLeading(extra) + check(isEmpty(trimmedExtra))(content.length - trimmedExtra.length, "Expected end of line, found '" + extra + "'") + new Section(trimmed.substring(1, closing).trim) + } + def labeled = + { + trimmed.split(":", 2) match { + case Array(label, value) => + val trimmedValue = value.trim + check(isNonEmpty(trimmedValue))(content.indexOf(':'), "Value for '" + label + "' was empty") + new Labeled(label, Some(trimmedValue)) + case x => new Labeled(x.mkString, None) + } + } + + if (isEmpty(trimmed)) Nil + else { + val processed = + trimmed.charAt(0) match { + case '#' => Comment + case '[' => section + case _ => labeled + } + processed :: Nil + } + } } diff --git a/launch/src/main/scala/xsbt/boot/Create.scala b/launch/src/main/scala/xsbt/boot/Create.scala index 17e549781..8e77dc93d 100644 --- a/launch/src/main/scala/xsbt/boot/Create.scala +++ b/launch/src/main/scala/xsbt/boot/Create.scala @@ -4,55 +4,46 @@ package xsbt.boot import Pre._ -import java.io.{File, FileInputStream, FileOutputStream} -import java.util.{Locale, Properties} +import java.io.{ File, FileInputStream, FileOutputStream } +import java.util.{ Locale, Properties } import scala.collection.immutable.List -object Initialize -{ - lazy val selectCreate = (_: AppProperty).create - lazy val selectQuick = (_: AppProperty).quick - lazy val selectFill = (_: AppProperty).fill - def create(file: File, promptCreate: String, enableQuick: Boolean, spec: List[AppProperty]) - { - readLine(promptCreate + " (y/N" + (if(enableQuick) "/s" else "") + ") ") match - { - case None => declined("") - case Some(line) => - line.toLowerCase(Locale.ENGLISH) match - { - case "y" | "yes" => process(file, spec, selectCreate) - case "s" => process(file, spec, selectQuick) - case "n" | "no" | "" => declined("") - case x => - System.out.println(" '" + x + "' not understood.") - create(file, promptCreate, enableQuick, spec) - } - } - } - def fill(file: File, spec: List[AppProperty]): Unit = process(file, spec, selectFill) - def process(file: File, appProperties: List[AppProperty], select: AppProperty => Option[PropertyInit]) - { - val properties = readProperties(file) - val uninitialized = - for(property <- appProperties; init <- select(property) if properties.getProperty(property.name) == null) yield - initialize(properties, property.name, init) - if(!uninitialized.isEmpty) writeProperties(properties, file, "") - } - def initialize(properties: Properties, name: String, init: PropertyInit) - { - init match - { - case set: SetProperty => properties.setProperty(name, set.value) - case prompt: PromptProperty => - def noValue = declined("No value provided for " + prompt.label) - readLine(prompt.label + prompt.default.toList.map(" [" + _ + "]").mkString + ": ") match - { - case None => noValue - case Some(line) => - val value = if(isEmpty(line)) orElse(prompt.default, noValue) else line - properties.setProperty(name, value) - } - } - } +object Initialize { + lazy val selectCreate = (_: AppProperty).create + lazy val selectQuick = (_: AppProperty).quick + lazy val selectFill = (_: AppProperty).fill + def create(file: File, promptCreate: String, enableQuick: Boolean, spec: List[AppProperty]) { + readLine(promptCreate + " (y/N" + (if (enableQuick) "/s" else "") + ") ") match { + case None => declined("") + case Some(line) => + line.toLowerCase(Locale.ENGLISH) match { + case "y" | "yes" => process(file, spec, selectCreate) + case "s" => process(file, spec, selectQuick) + case "n" | "no" | "" => declined("") + case x => + System.out.println(" '" + x + "' not understood.") + create(file, promptCreate, enableQuick, spec) + } + } + } + def fill(file: File, spec: List[AppProperty]): Unit = process(file, spec, selectFill) + def process(file: File, appProperties: List[AppProperty], select: AppProperty => Option[PropertyInit]) { + val properties = readProperties(file) + val uninitialized = + for (property <- appProperties; init <- select(property) if properties.getProperty(property.name) == null) yield initialize(properties, property.name, init) + if (!uninitialized.isEmpty) writeProperties(properties, file, "") + } + def initialize(properties: Properties, name: String, init: PropertyInit) { + init match { + case set: SetProperty => properties.setProperty(name, set.value) + case prompt: PromptProperty => + def noValue = declined("No value provided for " + prompt.label) + readLine(prompt.label + prompt.default.toList.map(" [" + _ + "]").mkString + ": ") match { + case None => noValue + case Some(line) => + val value = if (isEmpty(line)) orElse(prompt.default, noValue) else line + properties.setProperty(name, value) + } + } + } } diff --git a/launch/src/main/scala/xsbt/boot/Enumeration.scala b/launch/src/main/scala/xsbt/boot/Enumeration.scala index e65309f2a..49e90ea1d 100644 --- a/launch/src/main/scala/xsbt/boot/Enumeration.scala +++ b/launch/src/main/scala/xsbt/boot/Enumeration.scala @@ -6,25 +6,21 @@ package xsbt.boot import Pre._ import scala.collection.immutable.List -class Enumeration extends Serializable -{ - def elements: List[Value] = members - private lazy val members: List[Value] = - { - val c = getClass - val correspondingFields = ListMap( c.getDeclaredFields.map(f => (f.getName, f)) : _*) - c.getMethods.toList flatMap { method => - if(method.getParameterTypes.length == 0 && classOf[Value].isAssignableFrom(method.getReturnType)) - { - for(field <- correspondingFields.get(method.getName) if field.getType == method.getReturnType) yield - method.invoke(this).asInstanceOf[Value] - } - else - Nil - } - } - def value(s: String) = new Value(s, 0) - def value(s: String, i: Int) = new Value(s, i) - final class Value(override val toString: String, val id: Int) extends Serializable - def toValue(s: String): Value = elements.find(_.toString == s).getOrElse(error("Expected one of " + elements.mkString(",") + " (got: " + s + ")")) +class Enumeration extends Serializable { + def elements: List[Value] = members + private lazy val members: List[Value] = + { + val c = getClass + val correspondingFields = ListMap(c.getDeclaredFields.map(f => (f.getName, f)): _*) + c.getMethods.toList flatMap { method => + if (method.getParameterTypes.length == 0 && classOf[Value].isAssignableFrom(method.getReturnType)) { + for (field <- correspondingFields.get(method.getName) if field.getType == method.getReturnType) yield method.invoke(this).asInstanceOf[Value] + } else + Nil + } + } + def value(s: String) = new Value(s, 0) + def value(s: String, i: Int) = new Value(s, i) + final class Value(override val toString: String, val id: Int) extends Serializable + def toValue(s: String): Value = elements.find(_.toString == s).getOrElse(error("Expected one of " + elements.mkString(",") + " (got: " + s + ")")) } \ No newline at end of file diff --git a/launch/src/main/scala/xsbt/boot/FilteredLoader.scala b/launch/src/main/scala/xsbt/boot/FilteredLoader.scala index fbc46bb54..860a488e3 100644 --- a/launch/src/main/scala/xsbt/boot/FilteredLoader.scala +++ b/launch/src/main/scala/xsbt/boot/FilteredLoader.scala @@ -3,37 +3,37 @@ */ package xsbt.boot -import BootConfiguration.{FjbgPackage, IvyPackage, SbtBootPackage, ScalaPackage} +import BootConfiguration.{ FjbgPackage, IvyPackage, SbtBootPackage, ScalaPackage } import scala.collection.immutable.Stream -/** A custom class loader to ensure the main part of sbt doesn't load any Scala or -* Ivy classes from the jar containing the loader. */ -private[boot] final class BootFilteredLoader(parent: ClassLoader) extends ClassLoader(parent) -{ - @throws(classOf[ClassNotFoundException]) - override final def loadClass(className: String, resolve: Boolean): Class[_] = - { - // note that we allow xsbti.* - if(className.startsWith(ScalaPackage) || className.startsWith(IvyPackage) || className.startsWith(SbtBootPackage) || className.startsWith(FjbgPackage)) - throw new ClassNotFoundException(className) - else - super.loadClass(className, resolve) - } - override def getResources(name: String) = excludedLoader.getResources(name) - override def getResource(name: String) = excludedLoader.getResource(name) +/** + * A custom class loader to ensure the main part of sbt doesn't load any Scala or + * Ivy classes from the jar containing the loader. + */ +private[boot] final class BootFilteredLoader(parent: ClassLoader) extends ClassLoader(parent) { + @throws(classOf[ClassNotFoundException]) + override final def loadClass(className: String, resolve: Boolean): Class[_] = + { + // note that we allow xsbti.* + if (className.startsWith(ScalaPackage) || className.startsWith(IvyPackage) || className.startsWith(SbtBootPackage) || className.startsWith(FjbgPackage)) + throw new ClassNotFoundException(className) + else + super.loadClass(className, resolve) + } + override def getResources(name: String) = excludedLoader.getResources(name) + override def getResource(name: String) = excludedLoader.getResource(name) - // the loader to use when a resource is excluded. This needs to be at least parent.getParent so that it skips parent. parent contains - // resources included in the launcher, which need to be ignored. Now that the launcher can be unrooted (not the application entry point), - // this needs to be the Java extension loader (the loader with getParent == null) - private val excludedLoader = Loaders(parent.getParent).head + // the loader to use when a resource is excluded. This needs to be at least parent.getParent so that it skips parent. parent contains + // resources included in the launcher, which need to be ignored. Now that the launcher can be unrooted (not the application entry point), + // this needs to be the Java extension loader (the loader with getParent == null) + private val excludedLoader = Loaders(parent.getParent).head } -object Loaders -{ - def apply(loader: ClassLoader): Stream[ClassLoader] = - { - def loaders(loader: ClassLoader, accum: Stream[ClassLoader]): Stream[ClassLoader] = - if(loader eq null) accum else loaders(loader.getParent, Stream.cons(loader, accum)) - loaders(loader, Stream.empty) - } +object Loaders { + def apply(loader: ClassLoader): Stream[ClassLoader] = + { + def loaders(loader: ClassLoader, accum: Stream[ClassLoader]): Stream[ClassLoader] = + if (loader eq null) accum else loaders(loader.getParent, Stream.cons(loader, accum)) + loaders(loader, Stream.empty) + } } \ No newline at end of file diff --git a/launch/src/main/scala/xsbt/boot/Find.scala b/launch/src/main/scala/xsbt/boot/Find.scala index 7ee823431..fddd451c4 100644 --- a/launch/src/main/scala/xsbt/boot/Find.scala +++ b/launch/src/main/scala/xsbt/boot/Find.scala @@ -9,52 +9,47 @@ import java.net.URI import scala.collection.immutable.List object Find { def apply(config: LaunchConfiguration, currentDirectory: File) = (new Find(config))(currentDirectory) } -class Find(config: LaunchConfiguration) -{ - import config.boot.search - def apply(currentDirectory: File) = - { - val current = currentDirectory.getCanonicalFile - assert(current.isDirectory) +class Find(config: LaunchConfiguration) { + import config.boot.search + def apply(currentDirectory: File) = + { + val current = currentDirectory.getCanonicalFile + assert(current.isDirectory) - lazy val fromRoot = path(current, Nil).filter(hasProject).map(_.getCanonicalFile) - val found: Option[File] = - search.tpe match - { - case Search.RootFirst => fromRoot.headOption - case Search.Nearest => fromRoot.lastOption - case Search.Only => - if(hasProject(current)) - Some(current) - else - fromRoot match - { - case Nil => Some(current) - case head :: Nil => Some(head) - case xs => - System.err.println("Search method is 'only' and multiple ancestor directories match:\n\t" + fromRoot.mkString("\n\t")) - System.exit(1) - None - } - case _ => Some(current) - } - val baseDirectory = orElse(found, current) - System.setProperty("user.dir", baseDirectory.getAbsolutePath) - (ResolvePaths(config, baseDirectory), baseDirectory) - } - private def hasProject(f: File) = f.isDirectory && search.paths.forall(p => ResolvePaths(f, p).exists) - private def path(f: File, acc: List[File]): List[File] = if(f eq null) acc else path(f.getParentFile, f :: acc) + lazy val fromRoot = path(current, Nil).filter(hasProject).map(_.getCanonicalFile) + val found: Option[File] = + search.tpe match { + case Search.RootFirst => fromRoot.headOption + case Search.Nearest => fromRoot.lastOption + case Search.Only => + if (hasProject(current)) + Some(current) + else + fromRoot match { + case Nil => Some(current) + case head :: Nil => Some(head) + case xs => + System.err.println("Search method is 'only' and multiple ancestor directories match:\n\t" + fromRoot.mkString("\n\t")) + System.exit(1) + None + } + case _ => Some(current) + } + val baseDirectory = orElse(found, current) + System.setProperty("user.dir", baseDirectory.getAbsolutePath) + (ResolvePaths(config, baseDirectory), baseDirectory) + } + private def hasProject(f: File) = f.isDirectory && search.paths.forall(p => ResolvePaths(f, p).exists) + private def path(f: File, acc: List[File]): List[File] = if (f eq null) acc else path(f.getParentFile, f :: acc) } -object ResolvePaths -{ - def apply(config: LaunchConfiguration, baseDirectory: File): LaunchConfiguration = - config.map(f => apply(baseDirectory, f)) - def apply(baseDirectory: File, f: File): File = - if (f.isAbsolute) f - else - { - assert(baseDirectory.isDirectory) // if base directory is not a directory, URI.resolve will not work properly - val uri = new URI(null, null, f.getPath, null) - new File(baseDirectory.toURI.resolve(uri)) - } +object ResolvePaths { + def apply(config: LaunchConfiguration, baseDirectory: File): LaunchConfiguration = + config.map(f => apply(baseDirectory, f)) + def apply(baseDirectory: File, f: File): File = + if (f.isAbsolute) f + else { + assert(baseDirectory.isDirectory) // if base directory is not a directory, URI.resolve will not work properly + val uri = new URI(null, null, f.getPath, null) + new File(baseDirectory.toURI.resolve(uri)) + } } \ No newline at end of file diff --git a/launch/src/main/scala/xsbt/boot/JAnsi.scala b/launch/src/main/scala/xsbt/boot/JAnsi.scala index e9c6a5ff0..70f160341 100644 --- a/launch/src/main/scala/xsbt/boot/JAnsi.scala +++ b/launch/src/main/scala/xsbt/boot/JAnsi.scala @@ -1,24 +1,23 @@ package xsbt.boot - import Pre._ +import Pre._ -object JAnsi -{ - def uninstall(loader: ClassLoader): Unit = callJAnsi("systemUninstall", loader) - def install(loader: ClassLoader): Unit = callJAnsi("systemInstall", loader) +object JAnsi { + def uninstall(loader: ClassLoader): Unit = callJAnsi("systemUninstall", loader) + def install(loader: ClassLoader): Unit = callJAnsi("systemInstall", loader) - private[this] def callJAnsi(methodName: String, loader: ClassLoader): Unit = if(isWindows && !isCygwin) callJAnsiMethod(methodName, loader) - private[this] def callJAnsiMethod(methodName: String, loader: ClassLoader): Unit = - try { - val c = Class.forName("org.fusesource.jansi.AnsiConsole", true, loader) - c.getMethod(methodName).invoke(null) - } catch { - case ignore: ClassNotFoundException => - /* The below code intentionally traps everything. It technically shouldn't trap the + private[this] def callJAnsi(methodName: String, loader: ClassLoader): Unit = if (isWindows && !isCygwin) callJAnsiMethod(methodName, loader) + private[this] def callJAnsiMethod(methodName: String, loader: ClassLoader): Unit = + try { + val c = Class.forName("org.fusesource.jansi.AnsiConsole", true, loader) + c.getMethod(methodName).invoke(null) + } catch { + case ignore: ClassNotFoundException => + /* The below code intentionally traps everything. It technically shouldn't trap the * non-StackOverflowError VirtualMachineErrors and AWTError would be weird, but this is PermGen * mitigation code that should not render sbt completely unusable if jansi initialization fails. * [From Mark Harrah, https://github.com/sbt/sbt/pull/633#issuecomment-11957578]. */ - case ex: Throwable => println("Jansi found on class path but initialization failed: " + ex) - } + case ex: Throwable => println("Jansi found on class path but initialization failed: " + ex) + } } \ No newline at end of file diff --git a/launch/src/main/scala/xsbt/boot/Launch.scala b/launch/src/main/scala/xsbt/boot/Launch.scala index 688a769ee..afe4b285b 100644 --- a/launch/src/main/scala/xsbt/boot/Launch.scala +++ b/launch/src/main/scala/xsbt/boot/Launch.scala @@ -4,9 +4,9 @@ package xsbt.boot import Pre._ -import BootConfiguration.{CompilerModuleName, JAnsiVersion, LibraryModuleName} +import BootConfiguration.{ CompilerModuleName, JAnsiVersion, LibraryModuleName } import java.io.File -import java.net.{URL, URLClassLoader, URI} +import java.net.{ URL, URLClassLoader, URI } import java.util.concurrent.Callable import scala.collection.immutable.List import scala.annotation.tailrec @@ -14,391 +14,385 @@ import ConfigurationStorageState._ class LauncherArguments(val args: List[String], val isLocate: Boolean) -object Launch -{ - def apply(arguments: LauncherArguments): Option[Int] = apply( (new File("")).getAbsoluteFile , arguments ) +object Launch { + def apply(arguments: LauncherArguments): Option[Int] = apply((new File("")).getAbsoluteFile, arguments) - def apply(currentDirectory: File, arguments: LauncherArguments): Option[Int] = { - val (configLocation, newArgs2, state) = Configuration.find(arguments.args, currentDirectory) - val config = state match { - case SerializedFile => LaunchConfiguration.restore(configLocation) - case PropertiesFile => parseAndInitializeConfig(configLocation, currentDirectory) - } - if(arguments.isLocate) { - if(!newArgs2.isEmpty) { - // TODO - Print the arguments without exploding proguard size. - System.err.println("Warning: --locate option ignores arguments.") - } - locate(currentDirectory, config) - } else { - // First check to see if there are java system properties we need to set. Then launch the application. - updateProperties(config) - launch(run(Launcher(config)))(makeRunConfig(currentDirectory, config, newArgs2)) - } - } - /** Locate a server, print where it is, and exit. */ - def locate(currentDirectory: File, config: LaunchConfiguration): Option[Int] = { - config.serverConfig match { - case Some(_) => - val uri = ServerLocator.locate(currentDirectory, config) - System.out.println(uri.toASCIIString) - Some(0) - case None => sys.error(s"${config.app.groupID}-${config.app.main} is not configured as a server.") - } - } - /** Some hackery to allow sys.props to be configured via a file. If this launch config has - * a valid file configured, we load the properties and and apply them to this jvm. - */ - def updateProperties(config: LaunchConfiguration): Unit = { - config.serverConfig match { - case Some(config) => - config.jvmPropsFile match { - case Some(file) if file.exists => - try setSystemProperties(readProperties(file)) - catch { - case e: Exception => throw new RuntimeException(s"Unable to load server properties file: ${file}", e) - } - case _ => - } - case None => - } - } - - /** Parses the configuration *and* runs the initialization code that will remove variable references. */ - def parseAndInitializeConfig(configLocation: URL, currentDirectory: File): LaunchConfiguration = - { - val (parsed, bd) = parseConfiguration(configLocation, currentDirectory) - resolveConfig(parsed) - } - /** Parse configuration and return it and the baseDirectory of the launch. */ - def parseConfiguration(configLocation: URL, currentDirectory: File): (LaunchConfiguration, File) = - Find(Configuration.parse(configLocation, currentDirectory), currentDirectory) - - /** Setups the Initialize object so we can fill in system properties in the configuration */ - def resolveConfig(parsed: LaunchConfiguration): LaunchConfiguration = - { - // Set up initialize. - val propertiesFile = parsed.boot.properties - import parsed.boot.{enableQuick, promptCreate, promptFill} - if(isNonEmpty(promptCreate) && !propertiesFile.exists) - Initialize.create(propertiesFile, promptCreate, enableQuick, parsed.appProperties) - else if(promptFill) - Initialize.fill(propertiesFile, parsed.appProperties) + def apply(currentDirectory: File, arguments: LauncherArguments): Option[Int] = { + val (configLocation, newArgs2, state) = Configuration.find(arguments.args, currentDirectory) + val config = state match { + case SerializedFile => LaunchConfiguration.restore(configLocation) + case PropertiesFile => parseAndInitializeConfig(configLocation, currentDirectory) + } + if (arguments.isLocate) { + if (!newArgs2.isEmpty) { + // TODO - Print the arguments without exploding proguard size. + System.err.println("Warning: --locate option ignores arguments.") + } + locate(currentDirectory, config) + } else { + // First check to see if there are java system properties we need to set. Then launch the application. + updateProperties(config) + launch(run(Launcher(config)))(makeRunConfig(currentDirectory, config, newArgs2)) + } + } + /** Locate a server, print where it is, and exit. */ + def locate(currentDirectory: File, config: LaunchConfiguration): Option[Int] = { + config.serverConfig match { + case Some(_) => + val uri = ServerLocator.locate(currentDirectory, config) + System.out.println(uri.toASCIIString) + Some(0) + case None => sys.error(s"${config.app.groupID}-${config.app.main} is not configured as a server.") + } + } + /** + * Some hackery to allow sys.props to be configured via a file. If this launch config has + * a valid file configured, we load the properties and and apply them to this jvm. + */ + def updateProperties(config: LaunchConfiguration): Unit = { + config.serverConfig match { + case Some(config) => + config.jvmPropsFile match { + case Some(file) if file.exists => + try setSystemProperties(readProperties(file)) + catch { + case e: Exception => throw new RuntimeException(s"Unable to load server properties file: ${file}", e) + } + case _ => + } + case None => + } + } - parsed.logging.debug("Parsed configuration: " + parsed) - val resolved = ResolveValues(parsed) - resolved.logging.debug("Resolved configuration: " + resolved) - resolved - } + /** Parses the configuration *and* runs the initialization code that will remove variable references. */ + def parseAndInitializeConfig(configLocation: URL, currentDirectory: File): LaunchConfiguration = + { + val (parsed, bd) = parseConfiguration(configLocation, currentDirectory) + resolveConfig(parsed) + } + /** Parse configuration and return it and the baseDirectory of the launch. */ + def parseConfiguration(configLocation: URL, currentDirectory: File): (LaunchConfiguration, File) = + Find(Configuration.parse(configLocation, currentDirectory), currentDirectory) - /** Create run configuration we'll use to launch the app. */ - def makeRunConfig(currentDirectory: File, config: LaunchConfiguration, arguments: List[String]): RunConfiguration = - new RunConfiguration(config.getScalaVersion, config.app.toID, currentDirectory, arguments) + /** Setups the Initialize object so we can fill in system properties in the configuration */ + def resolveConfig(parsed: LaunchConfiguration): LaunchConfiguration = + { + // Set up initialize. + val propertiesFile = parsed.boot.properties + import parsed.boot.{ enableQuick, promptCreate, promptFill } + if (isNonEmpty(promptCreate) && !propertiesFile.exists) + Initialize.create(propertiesFile, promptCreate, enableQuick, parsed.appProperties) + else if (promptFill) + Initialize.fill(propertiesFile, parsed.appProperties) - /** The actual mechanism used to run a launched application. */ - def run(launcher: xsbti.Launcher)(config: RunConfiguration): xsbti.MainResult = - { - import config._ - val appProvider: xsbti.AppProvider = launcher.app(app, orNull(scalaVersion)) // takes ~40 ms when no update is required - val appConfig: xsbti.AppConfiguration = new AppConfiguration(toArray(arguments), workingDirectory, appProvider) + parsed.logging.debug("Parsed configuration: " + parsed) + val resolved = ResolveValues(parsed) + resolved.logging.debug("Resolved configuration: " + resolved) + resolved + } - // TODO - Jansi probably should be configurable via some other mechanism... - JAnsi.install(launcher.topLoader) - try { - val main = appProvider.newMain() - try { withContextLoader(appProvider.loader)(main.run(appConfig)) } - catch { case e: xsbti.FullReload => if(e.clean) delete(launcher.bootDirectory); throw e } - } finally { - JAnsi.uninstall(launcher.topLoader) - } - } - final def launch(run: RunConfiguration => xsbti.MainResult)(config: RunConfiguration): Option[Int] = - { - run(config) match - { - case e: xsbti.Exit => Some(e.code) - case c: xsbti.Continue => None - case r: xsbti.Reboot => launch(run)(new RunConfiguration(Option(r.scalaVersion), r.app, r.baseDirectory, r.arguments.toList)) - case x => throw new BootException("Invalid main result: " + x + (if(x eq null) "" else " (class: " + x.getClass + ")")) - } - } - private[this] def withContextLoader[T](loader: ClassLoader)(eval: => T): T = - { - val oldLoader = Thread.currentThread.getContextClassLoader - Thread.currentThread.setContextClassLoader(loader) - try { eval } finally { Thread.currentThread.setContextClassLoader(oldLoader) } - } - - // Cache of classes for lookup later. - val ServerMainClass = classOf[xsbti.ServerMain] - val AppMainClass = classOf[xsbti.AppMain] + /** Create run configuration we'll use to launch the app. */ + def makeRunConfig(currentDirectory: File, config: LaunchConfiguration, arguments: List[String]): RunConfiguration = + new RunConfiguration(config.getScalaVersion, config.app.toID, currentDirectory, arguments) + + /** The actual mechanism used to run a launched application. */ + def run(launcher: xsbti.Launcher)(config: RunConfiguration): xsbti.MainResult = + { + import config._ + val appProvider: xsbti.AppProvider = launcher.app(app, orNull(scalaVersion)) // takes ~40 ms when no update is required + val appConfig: xsbti.AppConfiguration = new AppConfiguration(toArray(arguments), workingDirectory, appProvider) + + // TODO - Jansi probably should be configurable via some other mechanism... + JAnsi.install(launcher.topLoader) + try { + val main = appProvider.newMain() + try { withContextLoader(appProvider.loader)(main.run(appConfig)) } + catch { case e: xsbti.FullReload => if (e.clean) delete(launcher.bootDirectory); throw e } + } finally { + JAnsi.uninstall(launcher.topLoader) + } + } + final def launch(run: RunConfiguration => xsbti.MainResult)(config: RunConfiguration): Option[Int] = + { + run(config) match { + case e: xsbti.Exit => Some(e.code) + case c: xsbti.Continue => None + case r: xsbti.Reboot => launch(run)(new RunConfiguration(Option(r.scalaVersion), r.app, r.baseDirectory, r.arguments.toList)) + case x => throw new BootException("Invalid main result: " + x + (if (x eq null) "" else " (class: " + x.getClass + ")")) + } + } + private[this] def withContextLoader[T](loader: ClassLoader)(eval: => T): T = + { + val oldLoader = Thread.currentThread.getContextClassLoader + Thread.currentThread.setContextClassLoader(loader) + try { eval } finally { Thread.currentThread.setContextClassLoader(oldLoader) } + } + + // Cache of classes for lookup later. + val ServerMainClass = classOf[xsbti.ServerMain] + val AppMainClass = classOf[xsbti.AppMain] } final class RunConfiguration(val scalaVersion: Option[String], val app: xsbti.ApplicationID, val workingDirectory: File, val arguments: List[String]) -import BootConfiguration.{appDirectoryName, baseDirectoryName, extractScalaVersion, ScalaDirectoryName, TestLoadScalaClasses, ScalaOrg} -class Launch private[xsbt](val bootDirectory: File, val lockBoot: Boolean, val ivyOptions: IvyOptions) extends xsbti.Launcher -{ - import ivyOptions.{checksums => checksumsList, classifiers, repositories} - bootDirectory.mkdirs - private val scalaProviders = new Cache[(String, String), String, xsbti.ScalaProvider]((x, y) => getScalaProvider(x._1, x._2, y)) - def getScala(version: String): xsbti.ScalaProvider = getScala(version, "") - def getScala(version: String, reason: String): xsbti.ScalaProvider = getScala(version, reason, ScalaOrg) - def getScala(version: String, reason: String, scalaOrg: String) = scalaProviders((scalaOrg, version), reason) - def app(id: xsbti.ApplicationID, version: String): xsbti.AppProvider = app(id, Option(version)) - def app(id: xsbti.ApplicationID, scalaVersion: Option[String]): xsbti.AppProvider = - getAppProvider(id, scalaVersion, false) +import BootConfiguration.{ appDirectoryName, baseDirectoryName, extractScalaVersion, ScalaDirectoryName, TestLoadScalaClasses, ScalaOrg } +class Launch private[xsbt] (val bootDirectory: File, val lockBoot: Boolean, val ivyOptions: IvyOptions) extends xsbti.Launcher { + import ivyOptions.{ checksums => checksumsList, classifiers, repositories } + bootDirectory.mkdirs + private val scalaProviders = new Cache[(String, String), String, xsbti.ScalaProvider]((x, y) => getScalaProvider(x._1, x._2, y)) + def getScala(version: String): xsbti.ScalaProvider = getScala(version, "") + def getScala(version: String, reason: String): xsbti.ScalaProvider = getScala(version, reason, ScalaOrg) + def getScala(version: String, reason: String, scalaOrg: String) = scalaProviders((scalaOrg, version), reason) + def app(id: xsbti.ApplicationID, version: String): xsbti.AppProvider = app(id, Option(version)) + def app(id: xsbti.ApplicationID, scalaVersion: Option[String]): xsbti.AppProvider = + getAppProvider(id, scalaVersion, false) - val bootLoader = new BootFilteredLoader(getClass.getClassLoader) - val topLoader = if(isWindows && !isCygwin) jansiLoader(bootLoader) else bootLoader + val bootLoader = new BootFilteredLoader(getClass.getClassLoader) + val topLoader = if (isWindows && !isCygwin) jansiLoader(bootLoader) else bootLoader - val updateLockFile = if(lockBoot) Some(new File(bootDirectory, "sbt.boot.lock")) else None + val updateLockFile = if (lockBoot) Some(new File(bootDirectory, "sbt.boot.lock")) else None - def globalLock: xsbti.GlobalLock = Locks - def ivyHome = orNull(ivyOptions.ivyHome) - def ivyRepositories = (repositories: List[xsbti.Repository]).toArray - def appRepositories = ((repositories filterNot (_.bootOnly)): List[xsbti.Repository]).toArray - def isOverrideRepositories: Boolean = ivyOptions.isOverrideRepositories - def checksums = checksumsList.toArray[String] + def globalLock: xsbti.GlobalLock = Locks + def ivyHome = orNull(ivyOptions.ivyHome) + def ivyRepositories = (repositories: List[xsbti.Repository]).toArray + def appRepositories = ((repositories filterNot (_.bootOnly)): List[xsbti.Repository]).toArray + def isOverrideRepositories: Boolean = ivyOptions.isOverrideRepositories + def checksums = checksumsList.toArray[String] - // JAnsi needs to be shared between Scala and the application so there aren't two competing versions - def jansiLoader(parent: ClassLoader): ClassLoader = - { - val id = AppID("org.fusesource.jansi", "jansi", JAnsiVersion, "", toArray(Nil), xsbti.CrossValue.Disabled, array()) - val configuration = makeConfiguration(ScalaOrg, None) - val jansiHome = appDirectory(new File(bootDirectory, baseDirectoryName(ScalaOrg, None)), id) - val module = appModule(id, None, false, "jansi") - def makeLoader(): ClassLoader = { - val urls = toURLs(wrapNull(jansiHome.listFiles(JarFilter))) - val loader = new URLClassLoader(urls, bootLoader) - checkLoader(loader, module, "org.fusesource.jansi.internal.WindowsSupport" :: Nil, loader) - } - val existingLoader = - if(jansiHome.exists) - try Some(makeLoader()) catch { case e: Exception => None } - else - None - existingLoader getOrElse { - update(module, "") - makeLoader() - } - } - def checkLoader[T](loader: ClassLoader, module: ModuleDefinition, testClasses: Seq[String], ifValid: T): T = - { - val missing = getMissing(loader, testClasses) - if(missing.isEmpty) - ifValid - else - module.retrieveCorrupt(missing) - } + // JAnsi needs to be shared between Scala and the application so there aren't two competing versions + def jansiLoader(parent: ClassLoader): ClassLoader = + { + val id = AppID("org.fusesource.jansi", "jansi", JAnsiVersion, "", toArray(Nil), xsbti.CrossValue.Disabled, array()) + val configuration = makeConfiguration(ScalaOrg, None) + val jansiHome = appDirectory(new File(bootDirectory, baseDirectoryName(ScalaOrg, None)), id) + val module = appModule(id, None, false, "jansi") + def makeLoader(): ClassLoader = { + val urls = toURLs(wrapNull(jansiHome.listFiles(JarFilter))) + val loader = new URLClassLoader(urls, bootLoader) + checkLoader(loader, module, "org.fusesource.jansi.internal.WindowsSupport" :: Nil, loader) + } + val existingLoader = + if (jansiHome.exists) + try Some(makeLoader()) catch { case e: Exception => None } + else + None + existingLoader getOrElse { + update(module, "") + makeLoader() + } + } + def checkLoader[T](loader: ClassLoader, module: ModuleDefinition, testClasses: Seq[String], ifValid: T): T = + { + val missing = getMissing(loader, testClasses) + if (missing.isEmpty) + ifValid + else + module.retrieveCorrupt(missing) + } - private[this] def makeConfiguration(scalaOrg: String, version: Option[String]): UpdateConfiguration = - new UpdateConfiguration(bootDirectory, ivyOptions.ivyHome, scalaOrg, version, repositories, checksumsList) + private[this] def makeConfiguration(scalaOrg: String, version: Option[String]): UpdateConfiguration = + new UpdateConfiguration(bootDirectory, ivyOptions.ivyHome, scalaOrg, version, repositories, checksumsList) - final def getAppProvider(id: xsbti.ApplicationID, explicitScalaVersion: Option[String], forceAppUpdate: Boolean): xsbti.AppProvider = - locked(new Callable[xsbti.AppProvider] { def call = getAppProvider0(id, explicitScalaVersion, forceAppUpdate) }) + final def getAppProvider(id: xsbti.ApplicationID, explicitScalaVersion: Option[String], forceAppUpdate: Boolean): xsbti.AppProvider = + locked(new Callable[xsbti.AppProvider] { def call = getAppProvider0(id, explicitScalaVersion, forceAppUpdate) }) - @tailrec private[this] final def getAppProvider0(id: xsbti.ApplicationID, explicitScalaVersion: Option[String], forceAppUpdate: Boolean): xsbti.AppProvider = - { - val app = appModule(id, explicitScalaVersion, true, "app") - /** Replace the version of an ApplicationID with the given one, if set. */ - def resolveId(appVersion: Option[String], id: xsbti.ApplicationID) = appVersion map { v => - import id._ - AppID(groupID(), name(), v, mainClass(), mainComponents(), crossVersionedValue(), classpathExtra()) - } getOrElse id - val baseDirs = (resolvedVersion: Option[String]) => (base: File) => appBaseDirs(base, resolveId(resolvedVersion, id)) - def retrieve() = { - val (appv, sv) = update(app, "") - val scalaVersion = strictOr(explicitScalaVersion, sv) - new RetrievedModule(true, app, sv, appv, baseDirs(appv)(scalaHome(ScalaOrg, scalaVersion))) - } - val retrievedApp = - if(forceAppUpdate) - retrieve() - else - existing(app, ScalaOrg, explicitScalaVersion, baseDirs(None)) getOrElse retrieve() + @tailrec private[this] final def getAppProvider0(id: xsbti.ApplicationID, explicitScalaVersion: Option[String], forceAppUpdate: Boolean): xsbti.AppProvider = + { + val app = appModule(id, explicitScalaVersion, true, "app") + /** Replace the version of an ApplicationID with the given one, if set. */ + def resolveId(appVersion: Option[String], id: xsbti.ApplicationID) = appVersion map { v => + import id._ + AppID(groupID(), name(), v, mainClass(), mainComponents(), crossVersionedValue(), classpathExtra()) + } getOrElse id + val baseDirs = (resolvedVersion: Option[String]) => (base: File) => appBaseDirs(base, resolveId(resolvedVersion, id)) + def retrieve() = { + val (appv, sv) = update(app, "") + val scalaVersion = strictOr(explicitScalaVersion, sv) + new RetrievedModule(true, app, sv, appv, baseDirs(appv)(scalaHome(ScalaOrg, scalaVersion))) + } + val retrievedApp = + if (forceAppUpdate) + retrieve() + else + existing(app, ScalaOrg, explicitScalaVersion, baseDirs(None)) getOrElse retrieve() - val scalaVersion = getOrError(strictOr(explicitScalaVersion, retrievedApp.detectedScalaVersion), "No Scala version specified or detected") - val scalaProvider = getScala(scalaVersion, "(for " + id.name + ")") - val resolvedId = resolveId(retrievedApp.resolvedAppVersion, id) + val scalaVersion = getOrError(strictOr(explicitScalaVersion, retrievedApp.detectedScalaVersion), "No Scala version specified or detected") + val scalaProvider = getScala(scalaVersion, "(for " + id.name + ")") + val resolvedId = resolveId(retrievedApp.resolvedAppVersion, id) - val (missing, appProvider) = checkedAppProvider(resolvedId, retrievedApp, scalaProvider) - if(missing.isEmpty) - appProvider - else if(retrievedApp.fresh) - app.retrieveCorrupt(missing) - else - getAppProvider0(resolvedId, explicitScalaVersion, true) - } - def scalaHome(scalaOrg: String, scalaVersion: Option[String]): File = new File(bootDirectory, baseDirectoryName(scalaOrg, scalaVersion)) - def appHome(id: xsbti.ApplicationID, scalaVersion: Option[String]): File = appDirectory(scalaHome(ScalaOrg, scalaVersion), id) - def checkedAppProvider(id: xsbti.ApplicationID, module: RetrievedModule, scalaProvider: xsbti.ScalaProvider): (Iterable[String], xsbti.AppProvider) = - { - val p = appProvider(id, module, scalaProvider, appHome(id, Some(scalaProvider.version))) - val missing = getMissing(p.loader, id.mainClass :: Nil) - (missing, p) - } - private[this] def locked[T](c: Callable[T]): T = Locks(orNull(updateLockFile), c) - def getScalaProvider(scalaOrg: String, scalaVersion: String, reason: String): xsbti.ScalaProvider = - locked(new Callable[xsbti.ScalaProvider] { def call = getScalaProvider0(scalaOrg, scalaVersion, reason) }) + val (missing, appProvider) = checkedAppProvider(resolvedId, retrievedApp, scalaProvider) + if (missing.isEmpty) + appProvider + else if (retrievedApp.fresh) + app.retrieveCorrupt(missing) + else + getAppProvider0(resolvedId, explicitScalaVersion, true) + } + def scalaHome(scalaOrg: String, scalaVersion: Option[String]): File = new File(bootDirectory, baseDirectoryName(scalaOrg, scalaVersion)) + def appHome(id: xsbti.ApplicationID, scalaVersion: Option[String]): File = appDirectory(scalaHome(ScalaOrg, scalaVersion), id) + def checkedAppProvider(id: xsbti.ApplicationID, module: RetrievedModule, scalaProvider: xsbti.ScalaProvider): (Iterable[String], xsbti.AppProvider) = + { + val p = appProvider(id, module, scalaProvider, appHome(id, Some(scalaProvider.version))) + val missing = getMissing(p.loader, id.mainClass :: Nil) + (missing, p) + } + private[this] def locked[T](c: Callable[T]): T = Locks(orNull(updateLockFile), c) + def getScalaProvider(scalaOrg: String, scalaVersion: String, reason: String): xsbti.ScalaProvider = + locked(new Callable[xsbti.ScalaProvider] { def call = getScalaProvider0(scalaOrg, scalaVersion, reason) }) - private[this] final def getScalaProvider0(scalaOrg: String, scalaVersion: String, reason: String) = - { - val scalaM = scalaModule(scalaOrg, scalaVersion) - val (scalaHome, lib) = scalaDirs(scalaM, scalaOrg, scalaVersion) - val baseDirs = lib :: Nil - def provider(retrieved: RetrievedModule): xsbti.ScalaProvider = { - val p = scalaProvider(scalaVersion, retrieved, topLoader, lib) - checkLoader(p.loader, retrieved.definition, TestLoadScalaClasses, p) - } - existing(scalaM, scalaOrg, Some(scalaVersion), _ => baseDirs) flatMap { mod => - try Some(provider(mod)) - catch { case e: Exception => None } - } getOrElse { - val (_, scalaVersion) = update(scalaM, reason) - provider( new RetrievedModule(true, scalaM, scalaVersion, baseDirs) ) - } - } + private[this] final def getScalaProvider0(scalaOrg: String, scalaVersion: String, reason: String) = + { + val scalaM = scalaModule(scalaOrg, scalaVersion) + val (scalaHome, lib) = scalaDirs(scalaM, scalaOrg, scalaVersion) + val baseDirs = lib :: Nil + def provider(retrieved: RetrievedModule): xsbti.ScalaProvider = { + val p = scalaProvider(scalaVersion, retrieved, topLoader, lib) + checkLoader(p.loader, retrieved.definition, TestLoadScalaClasses, p) + } + existing(scalaM, scalaOrg, Some(scalaVersion), _ => baseDirs) flatMap { mod => + try Some(provider(mod)) + catch { case e: Exception => None } + } getOrElse { + val (_, scalaVersion) = update(scalaM, reason) + provider(new RetrievedModule(true, scalaM, scalaVersion, baseDirs)) + } + } - def existing(module: ModuleDefinition, scalaOrg: String, explicitScalaVersion: Option[String], baseDirs: File => List[File]): Option[RetrievedModule] = - { - val filter = new java.io.FileFilter { - val explicitName = explicitScalaVersion.map(sv => baseDirectoryName(scalaOrg, Some(sv))) - def accept(file: File) = file.isDirectory && explicitName.forall(_ == file.getName) - } - val retrieved = wrapNull(bootDirectory.listFiles(filter)) flatMap { scalaDir => - val appDir = directory(scalaDir, module.target) - if(appDir.exists) - new RetrievedModule(false, module, extractScalaVersion(scalaDir), baseDirs(scalaDir)) :: Nil - else - Nil - } - retrieved.headOption - } - def directory(scalaDir: File, target: UpdateTarget): File = target match { - case _: UpdateScala => scalaDir - case ua: UpdateApp => appDirectory(scalaDir, ua.id.toID) - } - def appBaseDirs(scalaHome: File, id: xsbti.ApplicationID): List[File] = - { - val appHome = appDirectory(scalaHome, id) - val components = componentProvider(appHome) - appHome :: id.mainComponents.map(components.componentLocation).toList - } - def appDirectory(base: File, id: xsbti.ApplicationID): File = - new File(base, appDirectoryName(id, File.separator)) + def existing(module: ModuleDefinition, scalaOrg: String, explicitScalaVersion: Option[String], baseDirs: File => List[File]): Option[RetrievedModule] = + { + val filter = new java.io.FileFilter { + val explicitName = explicitScalaVersion.map(sv => baseDirectoryName(scalaOrg, Some(sv))) + def accept(file: File) = file.isDirectory && explicitName.forall(_ == file.getName) + } + val retrieved = wrapNull(bootDirectory.listFiles(filter)) flatMap { scalaDir => + val appDir = directory(scalaDir, module.target) + if (appDir.exists) + new RetrievedModule(false, module, extractScalaVersion(scalaDir), baseDirs(scalaDir)) :: Nil + else + Nil + } + retrieved.headOption + } + def directory(scalaDir: File, target: UpdateTarget): File = target match { + case _: UpdateScala => scalaDir + case ua: UpdateApp => appDirectory(scalaDir, ua.id.toID) + } + def appBaseDirs(scalaHome: File, id: xsbti.ApplicationID): List[File] = + { + val appHome = appDirectory(scalaHome, id) + val components = componentProvider(appHome) + appHome :: id.mainComponents.map(components.componentLocation).toList + } + def appDirectory(base: File, id: xsbti.ApplicationID): File = + new File(base, appDirectoryName(id, File.separator)) - def scalaDirs(module: ModuleDefinition, scalaOrg: String, scalaVersion: String): (File, File) = - { - val scalaHome = new File(bootDirectory, baseDirectoryName(scalaOrg, Some(scalaVersion))) - val libDirectory = new File(scalaHome, ScalaDirectoryName) - (scalaHome, libDirectory) - } + def scalaDirs(module: ModuleDefinition, scalaOrg: String, scalaVersion: String): (File, File) = + { + val scalaHome = new File(bootDirectory, baseDirectoryName(scalaOrg, Some(scalaVersion))) + val libDirectory = new File(scalaHome, ScalaDirectoryName) + (scalaHome, libDirectory) + } - def appProvider(appID: xsbti.ApplicationID, app: RetrievedModule, scalaProvider0: xsbti.ScalaProvider, appHome: File): xsbti.AppProvider = - new xsbti.AppProvider { - import Launch.{ServerMainClass,AppMainClass} - val scalaProvider = scalaProvider0 - val id = appID - def mainClasspath = app.fullClasspath - lazy val loader = app.createLoader(scalaProvider.loader) - // TODO - For some reason we can't call this from vanilla scala. We get a - // no such method exception UNLESS we're in the same project. - lazy val entryPoint: Class[T] forSome { type T } = - { - val c = Class.forName(id.mainClass, true, loader) - if(classOf[xsbti.AppMain].isAssignableFrom(c)) c - else if(PlainApplication.isPlainApplication(c)) c - else if(ServerApplication.isServerApplication(c)) c - else sys.error(s"${c} is not an instance of xsbti.AppMain, xsbti.ServerMain nor does it have one of these static methods:\n"+ - " * void main(String[] args)\n * int main(String[] args)\n * xsbti.Exit main(String[] args)\n") - } - // Deprecated API. Remove when we can. - def mainClass: Class[T] forSome { type T <: xsbti.AppMain } = entryPoint.asSubclass(AppMainClass) - def newMain(): xsbti.AppMain = { - if(ServerApplication.isServerApplication(entryPoint)) ServerApplication(this) - else if(PlainApplication.isPlainApplication(entryPoint)) PlainApplication(entryPoint) - else if(AppMainClass.isAssignableFrom(entryPoint)) mainClass.newInstance - else throw new IncompatibleClassChangeError(s"Main class ${entryPoint.getName} is not an instance of xsbti.AppMain, xsbti.ServerMain nor does it have a valid `main` method.") - } - lazy val components = componentProvider(appHome) - } - def componentProvider(appHome: File) = new ComponentProvider(appHome, lockBoot) + def appProvider(appID: xsbti.ApplicationID, app: RetrievedModule, scalaProvider0: xsbti.ScalaProvider, appHome: File): xsbti.AppProvider = + new xsbti.AppProvider { + import Launch.{ ServerMainClass, AppMainClass } + val scalaProvider = scalaProvider0 + val id = appID + def mainClasspath = app.fullClasspath + lazy val loader = app.createLoader(scalaProvider.loader) + // TODO - For some reason we can't call this from vanilla scala. We get a + // no such method exception UNLESS we're in the same project. + lazy val entryPoint: Class[T] forSome { type T } = + { + val c = Class.forName(id.mainClass, true, loader) + if (classOf[xsbti.AppMain].isAssignableFrom(c)) c + else if (PlainApplication.isPlainApplication(c)) c + else if (ServerApplication.isServerApplication(c)) c + else sys.error(s"${c} is not an instance of xsbti.AppMain, xsbti.ServerMain nor does it have one of these static methods:\n" + + " * void main(String[] args)\n * int main(String[] args)\n * xsbti.Exit main(String[] args)\n") + } + // Deprecated API. Remove when we can. + def mainClass: Class[T] forSome { type T <: xsbti.AppMain } = entryPoint.asSubclass(AppMainClass) + def newMain(): xsbti.AppMain = { + if (ServerApplication.isServerApplication(entryPoint)) ServerApplication(this) + else if (PlainApplication.isPlainApplication(entryPoint)) PlainApplication(entryPoint) + else if (AppMainClass.isAssignableFrom(entryPoint)) mainClass.newInstance + else throw new IncompatibleClassChangeError(s"Main class ${entryPoint.getName} is not an instance of xsbti.AppMain, xsbti.ServerMain nor does it have a valid `main` method.") + } + lazy val components = componentProvider(appHome) + } + def componentProvider(appHome: File) = new ComponentProvider(appHome, lockBoot) - def scalaProvider(scalaVersion: String, module: RetrievedModule, parentLoader: ClassLoader, scalaLibDir: File): xsbti.ScalaProvider = new xsbti.ScalaProvider - { - def launcher = Launch.this - def version = scalaVersion - lazy val loader = module.createLoader(parentLoader) + def scalaProvider(scalaVersion: String, module: RetrievedModule, parentLoader: ClassLoader, scalaLibDir: File): xsbti.ScalaProvider = new xsbti.ScalaProvider { + def launcher = Launch.this + def version = scalaVersion + lazy val loader = module.createLoader(parentLoader) - def compilerJar = new File(scalaLibDir, CompilerModuleName + ".jar") - def libraryJar = new File(scalaLibDir, LibraryModuleName + ".jar") - def jars = module.fullClasspath - - def app(id: xsbti.ApplicationID) = Launch.this.app(id, Some(scalaVersion)) - } + def compilerJar = new File(scalaLibDir, CompilerModuleName + ".jar") + def libraryJar = new File(scalaLibDir, LibraryModuleName + ".jar") + def jars = module.fullClasspath - def appModule(id: xsbti.ApplicationID, scalaVersion: Option[String], getClassifiers: Boolean, tpe: String): ModuleDefinition = new ModuleDefinition( - configuration = makeConfiguration(ScalaOrg, scalaVersion), - target = new UpdateApp(Application(id), if(getClassifiers) Value.get(classifiers.app) else Nil, tpe), - failLabel = id.name + " " + id.version, - extraClasspath = id.classpathExtra - ) - def scalaModule(org: String, version: String): ModuleDefinition = new ModuleDefinition( - configuration = makeConfiguration(org, Some(version)), - target = new UpdateScala(Value.get(classifiers.forScala)), - failLabel = "Scala " + version, - extraClasspath = array() - ) - /** Returns the resolved appVersion (if this was an App), as well as the scalaVersion. */ - def update(mm: ModuleDefinition, reason: String): (Option[String], Option[String]) = - { - val result = ( new Update(mm.configuration) )(mm.target, reason) - if(result.success) result.appVersion -> result.scalaVersion else mm.retrieveFailed - } + def app(id: xsbti.ApplicationID) = Launch.this.app(id, Some(scalaVersion)) + } + + def appModule(id: xsbti.ApplicationID, scalaVersion: Option[String], getClassifiers: Boolean, tpe: String): ModuleDefinition = new ModuleDefinition( + configuration = makeConfiguration(ScalaOrg, scalaVersion), + target = new UpdateApp(Application(id), if (getClassifiers) Value.get(classifiers.app) else Nil, tpe), + failLabel = id.name + " " + id.version, + extraClasspath = id.classpathExtra + ) + def scalaModule(org: String, version: String): ModuleDefinition = new ModuleDefinition( + configuration = makeConfiguration(org, Some(version)), + target = new UpdateScala(Value.get(classifiers.forScala)), + failLabel = "Scala " + version, + extraClasspath = array() + ) + /** Returns the resolved appVersion (if this was an App), as well as the scalaVersion. */ + def update(mm: ModuleDefinition, reason: String): (Option[String], Option[String]) = + { + val result = (new Update(mm.configuration))(mm.target, reason) + if (result.success) result.appVersion -> result.scalaVersion else mm.retrieveFailed + } } -object Launcher -{ - def apply(bootDirectory: File, repositories: List[Repository.Repository]): xsbti.Launcher = - apply(bootDirectory, IvyOptions(None, Classifiers(Nil, Nil), repositories, BootConfiguration.DefaultChecksums, false)) - def apply(bootDirectory: File, ivyOptions: IvyOptions): xsbti.Launcher = - apply(bootDirectory, ivyOptions, GetLocks.find) - def apply(bootDirectory: File, ivyOptions: IvyOptions, locks: xsbti.GlobalLock): xsbti.Launcher = - new Launch(bootDirectory, true, ivyOptions) { - override def globalLock = locks - } - def apply(explicit: LaunchConfiguration): xsbti.Launcher = - new Launch(explicit.boot.directory, explicit.boot.lock, explicit.ivyConfiguration) - def defaultAppProvider(baseDirectory: File): xsbti.AppProvider = getAppProvider(baseDirectory, Configuration.configurationOnClasspath) - def getAppProvider(baseDirectory: File, configLocation: URL): xsbti.AppProvider = - { - val parsed = ResolvePaths(Configuration.parse(configLocation, baseDirectory), baseDirectory) - Initialize.process(parsed.boot.properties, parsed.appProperties, Initialize.selectQuick) - val config = ResolveValues(parsed) - val launcher = apply(config) - launcher.app(config.app.toID, orNull(config.getScalaVersion)) - } +object Launcher { + def apply(bootDirectory: File, repositories: List[Repository.Repository]): xsbti.Launcher = + apply(bootDirectory, IvyOptions(None, Classifiers(Nil, Nil), repositories, BootConfiguration.DefaultChecksums, false)) + def apply(bootDirectory: File, ivyOptions: IvyOptions): xsbti.Launcher = + apply(bootDirectory, ivyOptions, GetLocks.find) + def apply(bootDirectory: File, ivyOptions: IvyOptions, locks: xsbti.GlobalLock): xsbti.Launcher = + new Launch(bootDirectory, true, ivyOptions) { + override def globalLock = locks + } + def apply(explicit: LaunchConfiguration): xsbti.Launcher = + new Launch(explicit.boot.directory, explicit.boot.lock, explicit.ivyConfiguration) + def defaultAppProvider(baseDirectory: File): xsbti.AppProvider = getAppProvider(baseDirectory, Configuration.configurationOnClasspath) + def getAppProvider(baseDirectory: File, configLocation: URL): xsbti.AppProvider = + { + val parsed = ResolvePaths(Configuration.parse(configLocation, baseDirectory), baseDirectory) + Initialize.process(parsed.boot.properties, parsed.appProperties, Initialize.selectQuick) + val config = ResolveValues(parsed) + val launcher = apply(config) + launcher.app(config.app.toID, orNull(config.getScalaVersion)) + } } -class ComponentProvider(baseDirectory: File, lockBoot: Boolean) extends xsbti.ComponentProvider -{ - def componentLocation(id: String): File = new File(baseDirectory, id) - def component(id: String) = wrapNull(componentLocation(id).listFiles).filter(_.isFile) - def defineComponent(id: String, files: Array[File]) = - { - val location = componentLocation(id) - if(location.exists) - throw new BootException("Cannot redefine component. ID: " + id + ", files: " + files.mkString(",")) - else - Copy(files.toList, location) - } - def addToComponent(id: String, files: Array[File]): Boolean = - Copy(files.toList, componentLocation(id)) - def lockFile = if(lockBoot) ComponentProvider.lockFile(baseDirectory) else null // null for the Java interface +class ComponentProvider(baseDirectory: File, lockBoot: Boolean) extends xsbti.ComponentProvider { + def componentLocation(id: String): File = new File(baseDirectory, id) + def component(id: String) = wrapNull(componentLocation(id).listFiles).filter(_.isFile) + def defineComponent(id: String, files: Array[File]) = + { + val location = componentLocation(id) + if (location.exists) + throw new BootException("Cannot redefine component. ID: " + id + ", files: " + files.mkString(",")) + else + Copy(files.toList, location) + } + def addToComponent(id: String, files: Array[File]): Boolean = + Copy(files.toList, componentLocation(id)) + def lockFile = if (lockBoot) ComponentProvider.lockFile(baseDirectory) else null // null for the Java interface } -object ComponentProvider -{ - def lockFile(baseDirectory: File) = - { - baseDirectory.mkdirs() - new File(baseDirectory, "sbt.components.lock") - } +object ComponentProvider { + def lockFile(baseDirectory: File) = + { + baseDirectory.mkdirs() + new File(baseDirectory, "sbt.components.lock") + } } diff --git a/launch/src/main/scala/xsbt/boot/LaunchConfiguration.scala b/launch/src/main/scala/xsbt/boot/LaunchConfiguration.scala index be1f0fc4a..0b16327ba 100644 --- a/launch/src/main/scala/xsbt/boot/LaunchConfiguration.scala +++ b/launch/src/main/scala/xsbt/boot/LaunchConfiguration.scala @@ -9,22 +9,21 @@ import java.net.URL import scala.collection.immutable.List //TODO: use copy constructor, check size change -final case class LaunchConfiguration(scalaVersion: Value[String], ivyConfiguration: IvyOptions, app: Application, boot: BootSetup, logging: Logging, appProperties: List[AppProperty], serverConfig: Option[ServerConfiguration]) -{ - def isServer: Boolean = serverConfig.isDefined - def getScalaVersion = { - val sv = Value.get(scalaVersion) - if(sv == "auto") None else Some(sv) - } +final case class LaunchConfiguration(scalaVersion: Value[String], ivyConfiguration: IvyOptions, app: Application, boot: BootSetup, logging: Logging, appProperties: List[AppProperty], serverConfig: Option[ServerConfiguration]) { + def isServer: Boolean = serverConfig.isDefined + def getScalaVersion = { + val sv = Value.get(scalaVersion) + if (sv == "auto") None else Some(sv) + } - def withScalaVersion(newScalaVersion: String) = LaunchConfiguration(new Explicit(newScalaVersion), ivyConfiguration, app, boot, logging, appProperties, serverConfig) - def withApp(app: Application) = LaunchConfiguration(scalaVersion, ivyConfiguration, app, boot, logging, appProperties, serverConfig) - def withAppVersion(newAppVersion: String) = LaunchConfiguration(scalaVersion, ivyConfiguration, app.withVersion(new Explicit(newAppVersion)), boot, logging, appProperties, serverConfig) - // TODO: withExplicit - def withVersions(newScalaVersion: String, newAppVersion: String, classifiers0: Classifiers) = - LaunchConfiguration(new Explicit(newScalaVersion), ivyConfiguration.copy(classifiers = classifiers0), app.withVersion(new Explicit(newAppVersion)), boot, logging, appProperties, serverConfig) + def withScalaVersion(newScalaVersion: String) = LaunchConfiguration(new Explicit(newScalaVersion), ivyConfiguration, app, boot, logging, appProperties, serverConfig) + def withApp(app: Application) = LaunchConfiguration(scalaVersion, ivyConfiguration, app, boot, logging, appProperties, serverConfig) + def withAppVersion(newAppVersion: String) = LaunchConfiguration(scalaVersion, ivyConfiguration, app.withVersion(new Explicit(newAppVersion)), boot, logging, appProperties, serverConfig) + // TODO: withExplicit + def withVersions(newScalaVersion: String, newAppVersion: String, classifiers0: Classifiers) = + LaunchConfiguration(new Explicit(newScalaVersion), ivyConfiguration.copy(classifiers = classifiers0), app.withVersion(new Explicit(newAppVersion)), boot, logging, appProperties, serverConfig) - def map(f: File => File) = LaunchConfiguration(scalaVersion, ivyConfiguration.map(f), app.map(f), boot.map(f), logging, appProperties, serverConfig.map(_ map f)) + def map(f: File => File) = LaunchConfiguration(scalaVersion, ivyConfiguration.map(f), app.map(f), boot.map(f), logging, appProperties, serverConfig.map(_ map f)) } object LaunchConfiguration { // Saves a launch configuration into a file. This is only safe if it is loaded by the *same* launcher version. @@ -44,104 +43,94 @@ final case class ServerConfiguration(lockFile: File, jvmArgs: Option[File], jvmP def map(f: File => File) = ServerConfiguration(f(lockFile), jvmArgs map f, jvmPropsFile map f) } -final case class IvyOptions(ivyHome: Option[File], classifiers: Classifiers, repositories: List[Repository.Repository], checksums: List[String], isOverrideRepositories: Boolean) -{ - def map(f: File => File) = IvyOptions(ivyHome.map(f), classifiers, repositories, checksums, isOverrideRepositories) +final case class IvyOptions(ivyHome: Option[File], classifiers: Classifiers, repositories: List[Repository.Repository], checksums: List[String], isOverrideRepositories: Boolean) { + def map(f: File => File) = IvyOptions(ivyHome.map(f), classifiers, repositories, checksums, isOverrideRepositories) } sealed trait Value[T] extends Serializable final class Explicit[T](val value: T) extends Value[T] { - override def toString = value.toString + override def toString = value.toString } -final class Implicit[T](val name: String, val default: Option[T]) extends Value[T] -{ - require(isNonEmpty(name), "Name cannot be empty") - override def toString = name + (default match { case Some(d) => "[" + d + "]"; case None => "" }) +final class Implicit[T](val name: String, val default: Option[T]) extends Value[T] { + require(isNonEmpty(name), "Name cannot be empty") + override def toString = name + (default match { case Some(d) => "[" + d + "]"; case None => "" }) } -object Value -{ - def get[T](v: Value[T]): T = v match { case e: Explicit[T] => e.value; case _ => throw new BootException("Unresolved version: " + v) } - def readImplied[T](s: String, name: String, default: Option[String])(implicit read: String => T): Value[T] = - if(s == "read") new Implicit(name, default map read) else error("Expected 'read', got '" + s +"'") +object Value { + def get[T](v: Value[T]): T = v match { case e: Explicit[T] => e.value; case _ => throw new BootException("Unresolved version: " + v) } + def readImplied[T](s: String, name: String, default: Option[String])(implicit read: String => T): Value[T] = + if (s == "read") new Implicit(name, default map read) else error("Expected 'read', got '" + s + "'") } final case class Classifiers(forScala: Value[List[String]], app: Value[List[String]]) object Classifiers { - def apply(forScala: List[String], app: List[String]):Classifiers = Classifiers(new Explicit(forScala), new Explicit(app)) + def apply(forScala: List[String], app: List[String]): Classifiers = Classifiers(new Explicit(forScala), new Explicit(app)) } -object LaunchCrossVersion -{ - def apply(s: String): xsbti.CrossValue = - s match { - case x if CrossVersionUtil.isFull(s) => xsbti.CrossValue.Full - case x if CrossVersionUtil.isBinary(s) => xsbti.CrossValue.Binary - case x if CrossVersionUtil.isDisabled(s) => xsbti.CrossValue.Disabled - case x => error("Unknown value '" + x + "' for property 'cross-versioned'") - } +object LaunchCrossVersion { + def apply(s: String): xsbti.CrossValue = + s match { + case x if CrossVersionUtil.isFull(s) => xsbti.CrossValue.Full + case x if CrossVersionUtil.isBinary(s) => xsbti.CrossValue.Binary + case x if CrossVersionUtil.isDisabled(s) => xsbti.CrossValue.Disabled + case x => error("Unknown value '" + x + "' for property 'cross-versioned'") + } } -final case class Application(groupID: String, name: String, version: Value[String], main: String, components: List[String], crossVersioned: xsbti.CrossValue, classpathExtra: Array[File]) -{ - def getVersion = Value.get(version) - def withVersion(newVersion: Value[String]) = Application(groupID, name, newVersion, main, components, crossVersioned, classpathExtra) - def toID = AppID(groupID, name, getVersion, main, toArray(components), crossVersioned, classpathExtra) - def map(f: File => File) = Application(groupID, name, version, main, components, crossVersioned, classpathExtra.map(f)) +final case class Application(groupID: String, name: String, version: Value[String], main: String, components: List[String], crossVersioned: xsbti.CrossValue, classpathExtra: Array[File]) { + def getVersion = Value.get(version) + def withVersion(newVersion: Value[String]) = Application(groupID, name, newVersion, main, components, crossVersioned, classpathExtra) + def toID = AppID(groupID, name, getVersion, main, toArray(components), crossVersioned, classpathExtra) + def map(f: File => File) = Application(groupID, name, version, main, components, crossVersioned, classpathExtra.map(f)) } -final case class AppID(groupID: String, name: String, version: String, mainClass: String, mainComponents: Array[String], crossVersionedValue: xsbti.CrossValue, classpathExtra: Array[File]) extends xsbti.ApplicationID -{ - def crossVersioned: Boolean = crossVersionedValue != xsbti.CrossValue.Disabled +final case class AppID(groupID: String, name: String, version: String, mainClass: String, mainComponents: Array[String], crossVersionedValue: xsbti.CrossValue, classpathExtra: Array[File]) extends xsbti.ApplicationID { + def crossVersioned: Boolean = crossVersionedValue != xsbti.CrossValue.Disabled } -object Application -{ - def apply(id: xsbti.ApplicationID): Application = - { - import id._ - Application(groupID, name, new Explicit(version), mainClass, mainComponents.toList, safeCrossVersionedValue(id), classpathExtra) - } +object Application { + def apply(id: xsbti.ApplicationID): Application = + { + import id._ + Application(groupID, name, new Explicit(version), mainClass, mainComponents.toList, safeCrossVersionedValue(id), classpathExtra) + } - private def safeCrossVersionedValue(id: xsbti.ApplicationID): xsbti.CrossValue = - try id.crossVersionedValue - catch { - case _: AbstractMethodError => - // Before 0.13 this method did not exist on application, so we need to provide a default value - //in the event we're dealing with an older Application. - if(id.crossVersioned) xsbti.CrossValue.Binary - else xsbti.CrossValue.Disabled - } + private def safeCrossVersionedValue(id: xsbti.ApplicationID): xsbti.CrossValue = + try id.crossVersionedValue + catch { + case _: AbstractMethodError => + // Before 0.13 this method did not exist on application, so we need to provide a default value + //in the event we're dealing with an older Application. + if (id.crossVersioned) xsbti.CrossValue.Binary + else xsbti.CrossValue.Disabled + } } -object Repository -{ - trait Repository extends xsbti.Repository { - def bootOnly: Boolean - } - final case class Maven(id: String, url: URL, bootOnly: Boolean = false) extends xsbti.MavenRepository with Repository - final case class Ivy(id: String, url: URL, ivyPattern: String, artifactPattern: String, mavenCompatible: Boolean, bootOnly: Boolean = false, descriptorOptional: Boolean = false, skipConsistencyCheck: Boolean = false) extends xsbti.IvyRepository with Repository - final case class Predefined(id: xsbti.Predefined, bootOnly: Boolean = false) extends xsbti.PredefinedRepository with Repository - object Predefined { - def apply(s: String): Predefined = new Predefined(xsbti.Predefined.toValue(s), false) - def apply(s: String, bootOnly: Boolean): Predefined = new Predefined(xsbti.Predefined.toValue(s), bootOnly) - } +object Repository { + trait Repository extends xsbti.Repository { + def bootOnly: Boolean + } + final case class Maven(id: String, url: URL, bootOnly: Boolean = false) extends xsbti.MavenRepository with Repository + final case class Ivy(id: String, url: URL, ivyPattern: String, artifactPattern: String, mavenCompatible: Boolean, bootOnly: Boolean = false, descriptorOptional: Boolean = false, skipConsistencyCheck: Boolean = false) extends xsbti.IvyRepository with Repository + final case class Predefined(id: xsbti.Predefined, bootOnly: Boolean = false) extends xsbti.PredefinedRepository with Repository + object Predefined { + def apply(s: String): Predefined = new Predefined(xsbti.Predefined.toValue(s), false) + def apply(s: String, bootOnly: Boolean): Predefined = new Predefined(xsbti.Predefined.toValue(s), bootOnly) + } - def isMavenLocal(repo: xsbti.Repository) = repo match { case p: xsbti.PredefinedRepository => p.id == xsbti.Predefined.MavenLocal; case _ => false } - def defaults: List[xsbti.Repository] = xsbti.Predefined.values.map(x => Predefined(x, false)).toList + def isMavenLocal(repo: xsbti.Repository) = repo match { case p: xsbti.PredefinedRepository => p.id == xsbti.Predefined.MavenLocal; case _ => false } + def defaults: List[xsbti.Repository] = xsbti.Predefined.values.map(x => Predefined(x, false)).toList } final case class Search(tpe: Search.Value, paths: List[File]) -object Search extends Enumeration -{ - def none = Search(Current, Nil) - val Only = value("only") - val RootFirst = value("root-first") - val Nearest = value("nearest") - val Current = value("none") - def apply(s: String, paths: List[File]): Search = Search(toValue(s), paths) +object Search extends Enumeration { + def none = Search(Current, Nil) + val Only = value("only") + val RootFirst = value("root-first") + val Nearest = value("nearest") + val Current = value("none") + def apply(s: String, paths: List[File]): Search = Search(toValue(s), paths) } -final case class BootSetup(directory: File, lock: Boolean, properties: File, search: Search, promptCreate: String, enableQuick: Boolean, promptFill: Boolean) -{ - def map(f: File => File) = BootSetup(f(directory), lock, f(properties), search, promptCreate, enableQuick, promptFill) +final case class BootSetup(directory: File, lock: Boolean, properties: File, search: Search, promptCreate: String, enableQuick: Boolean, promptFill: Boolean) { + def map(f: File => File) = BootSetup(f(directory), lock, f(properties), search, promptCreate, enableQuick, promptFill) } final case class AppProperty(name: String)(val quick: Option[PropertyInit], val create: Option[PropertyInit], val fill: Option[PropertyInit]) @@ -149,19 +138,17 @@ sealed trait PropertyInit final class SetProperty(val value: String) extends PropertyInit final class PromptProperty(val label: String, val default: Option[String]) extends PropertyInit -final class Logging(level: LogLevel.Value) extends Serializable -{ - def log(s: => String, at: LogLevel.Value) = if(level.id <= at.id) stream(at).println("[" + at + "] " + s) - def debug(s: => String) = log(s, LogLevel.Debug) - private def stream(at: LogLevel.Value) = if(at == LogLevel.Error) System.err else System.out +final class Logging(level: LogLevel.Value) extends Serializable { + def log(s: => String, at: LogLevel.Value) = if (level.id <= at.id) stream(at).println("[" + at + "] " + s) + def debug(s: => String) = log(s, LogLevel.Debug) + private def stream(at: LogLevel.Value) = if (at == LogLevel.Error) System.err else System.out } -object LogLevel extends Enumeration -{ - val Debug = value("debug", 0) - val Info = value("info", 1) - val Warn = value("warn", 2) - val Error = value("error", 3) - def apply(s: String): Logging = new Logging(toValue(s)) +object LogLevel extends Enumeration { + val Debug = value("debug", 0) + val Info = value("info", 1) + val Warn = value("warn", 2) + val Error = value("error", 3) + def apply(s: String): Logging = new Logging(toValue(s)) } final class AppConfiguration(val arguments: Array[String], val baseDirectory: File, val provider: xsbti.AppProvider) extends xsbti.AppConfiguration diff --git a/launch/src/main/scala/xsbt/boot/ListMap.scala b/launch/src/main/scala/xsbt/boot/ListMap.scala index 588f9bfaf..6dcd679d0 100644 --- a/launch/src/main/scala/xsbt/boot/ListMap.scala +++ b/launch/src/main/scala/xsbt/boot/ListMap.scala @@ -4,35 +4,34 @@ package xsbt.boot import Pre._ -import scala.collection.{Iterable, Iterator} +import scala.collection.{ Iterable, Iterator } import scala.collection.immutable.List // preserves iteration order -sealed class ListMap[K,V] private(backing: List[(K,V)]) extends Iterable[(K,V)] // use Iterable because Traversable.toStream loops +sealed class ListMap[K, V] private (backing: List[(K, V)]) extends Iterable[(K, V)] // use Iterable because Traversable.toStream loops { - import ListMap.remove - def update(k: K, v: V) = this.+( (k,v) ) - def +(pair: (K,V)) = copy(pair :: remove(backing,pair._1)) - def -(k: K) = copy(remove(backing,k)) - def get(k: K): Option[V] = backing.find(_._1 == k).map(_._2) - def keys: List[K] = backing.reverse.map(_._1) - def apply(k: K): V = getOrError(get(k), "Key " + k + " not found") - def contains(k: K): Boolean = get(k).isDefined - def iterator = backing.reverse.iterator - override def isEmpty: Boolean = backing.isEmpty - override def toList = backing.reverse - override def toSeq = toList - protected def copy(newBacking: List[(K,V)]): ListMap[K,V] = new ListMap(newBacking) - def default(defaultF: K => V): ListMap[K,V] = - new ListMap[K,V](backing) { - override def apply(k: K) = super.get(k).getOrElse(defaultF(k)) - override def copy(newBacking: List[(K,V)]) = super.copy(newBacking).default(defaultF) - } - override def toString = backing.mkString("ListMap(",",",")") + import ListMap.remove + def update(k: K, v: V) = this.+((k, v)) + def +(pair: (K, V)) = copy(pair :: remove(backing, pair._1)) + def -(k: K) = copy(remove(backing, k)) + def get(k: K): Option[V] = backing.find(_._1 == k).map(_._2) + def keys: List[K] = backing.reverse.map(_._1) + def apply(k: K): V = getOrError(get(k), "Key " + k + " not found") + def contains(k: K): Boolean = get(k).isDefined + def iterator = backing.reverse.iterator + override def isEmpty: Boolean = backing.isEmpty + override def toList = backing.reverse + override def toSeq = toList + protected def copy(newBacking: List[(K, V)]): ListMap[K, V] = new ListMap(newBacking) + def default(defaultF: K => V): ListMap[K, V] = + new ListMap[K, V](backing) { + override def apply(k: K) = super.get(k).getOrElse(defaultF(k)) + override def copy(newBacking: List[(K, V)]) = super.copy(newBacking).default(defaultF) + } + override def toString = backing.mkString("ListMap(", ",", ")") } -object ListMap -{ - def apply[K,V](pairs: (K,V)*) = new ListMap[K,V](pairs.toList.distinct) - def empty[K,V] = new ListMap[K,V](Nil) - private def remove[K,V](backing: List[(K,V)], k: K) = backing.filter(_._1 != k) +object ListMap { + def apply[K, V](pairs: (K, V)*) = new ListMap[K, V](pairs.toList.distinct) + def empty[K, V] = new ListMap[K, V](Nil) + private def remove[K, V](backing: List[(K, V)], k: K) = backing.filter(_._1 != k) } diff --git a/launch/src/main/scala/xsbt/boot/Locks.scala b/launch/src/main/scala/xsbt/boot/Locks.scala index d8619fa6c..f53abf721 100644 --- a/launch/src/main/scala/xsbt/boot/Locks.scala +++ b/launch/src/main/scala/xsbt/boot/Locks.scala @@ -3,104 +3,99 @@ */ package xsbt.boot -import java.io.{File, FileOutputStream, IOException} +import java.io.{ File, FileOutputStream, IOException } import java.nio.channels.FileChannel import java.util.concurrent.Callable import scala.collection.immutable.List import scala.annotation.tailrec -object GetLocks -{ - /** Searches for Locks in parent class loaders before returning Locks from this class loader. - * Normal class loading doesn't work because the launcher class loader hides xsbt classes.*/ - def find: xsbti.GlobalLock = - Loaders(getClass.getClassLoader.getParent).flatMap(tryGet).headOption.getOrElse(Locks) - private[this] def tryGet(loader: ClassLoader): List[xsbti.GlobalLock] = - try { getLocks0(loader) :: Nil } catch { case e: ClassNotFoundException => Nil } - private[this] def getLocks0(loader: ClassLoader) = - Class.forName("xsbt.boot.Locks$", true, loader).getField("MODULE$").get(null).asInstanceOf[xsbti.GlobalLock] +object GetLocks { + /** + * Searches for Locks in parent class loaders before returning Locks from this class loader. + * Normal class loading doesn't work because the launcher class loader hides xsbt classes. + */ + def find: xsbti.GlobalLock = + Loaders(getClass.getClassLoader.getParent).flatMap(tryGet).headOption.getOrElse(Locks) + private[this] def tryGet(loader: ClassLoader): List[xsbti.GlobalLock] = + try { getLocks0(loader) :: Nil } catch { case e: ClassNotFoundException => Nil } + private[this] def getLocks0(loader: ClassLoader) = + Class.forName("xsbt.boot.Locks$", true, loader).getField("MODULE$").get(null).asInstanceOf[xsbti.GlobalLock] } // gets a file lock by first getting a JVM-wide lock. -object Locks extends xsbti.GlobalLock -{ - private[this] val locks = new Cache[File, Unit, GlobalLock]( (f, _) => new GlobalLock(f)) - def apply[T](file: File, action: Callable[T]): T = if(file eq null) action.call else apply0(file, action) - private[this] def apply0[T](file: File, action: Callable[T]): T = - { - val lock = - synchronized - { - file.getParentFile.mkdirs() - file.createNewFile() - locks(file.getCanonicalFile, ()) - } - lock.withLock(action) - } +object Locks extends xsbti.GlobalLock { + private[this] val locks = new Cache[File, Unit, GlobalLock]((f, _) => new GlobalLock(f)) + def apply[T](file: File, action: Callable[T]): T = if (file eq null) action.call else apply0(file, action) + private[this] def apply0[T](file: File, action: Callable[T]): T = + { + val lock = + synchronized { + file.getParentFile.mkdirs() + file.createNewFile() + locks(file.getCanonicalFile, ()) + } + lock.withLock(action) + } - private[this] class GlobalLock(file: File) - { - private[this] var fileLocked = false - def withLock[T](run: Callable[T]): T = - synchronized - { - if(fileLocked) - run.call - else - { - fileLocked = true - try { ignoringDeadlockAvoided(run) } - finally { fileLocked = false } - } - } + private[this] class GlobalLock(file: File) { + private[this] var fileLocked = false + def withLock[T](run: Callable[T]): T = + synchronized { + if (fileLocked) + run.call + else { + fileLocked = true + try { ignoringDeadlockAvoided(run) } + finally { fileLocked = false } + } + } - // https://github.com/sbt/sbt/issues/650 - // This approach means a real deadlock won't be detected - @tailrec private[this] def ignoringDeadlockAvoided[T](run: Callable[T]): T = - { - val result = - try { Some(withFileLock(run)) } - catch { case i: IOException if isDeadlockAvoided(i) => - // there should be a timeout to the deadlock avoidance, so this is just a backup - Thread.sleep(200) - None - } - result match { // workaround for no tailrec optimization in the above try/catch - case Some(t) => t - case None => ignoringDeadlockAvoided(run) - } - } + // https://github.com/sbt/sbt/issues/650 + // This approach means a real deadlock won't be detected + @tailrec private[this] def ignoringDeadlockAvoided[T](run: Callable[T]): T = + { + val result = + try { Some(withFileLock(run)) } + catch { + case i: IOException if isDeadlockAvoided(i) => + // there should be a timeout to the deadlock avoidance, so this is just a backup + Thread.sleep(200) + None + } + result match { // workaround for no tailrec optimization in the above try/catch + case Some(t) => t + case None => ignoringDeadlockAvoided(run) + } + } - // The actual message is not specified by FileChannel.lock, so this may need to be adjusted for different JVMs - private[this] def isDeadlockAvoided(i: IOException): Boolean = - i.getMessage == "Resource deadlock avoided" + // The actual message is not specified by FileChannel.lock, so this may need to be adjusted for different JVMs + private[this] def isDeadlockAvoided(i: IOException): Boolean = + i.getMessage == "Resource deadlock avoided" - private[this] def withFileLock[T](run: Callable[T]): T = - { - def withChannelRetries(retries: Int)(channel: FileChannel): T = - try { withChannel(channel) } - catch { case i: InternalLockNPE => - if(retries > 0) withChannelRetries(retries - 1)(channel) else throw i - } - - def withChannel(channel: FileChannel) = - { - val freeLock = try { channel.tryLock } catch { case e: NullPointerException => throw new InternalLockNPE(e) } - if(freeLock eq null) - { - System.out.println("Waiting for lock on " + file + " to be available..."); - val lock = try { channel.lock } catch { case e: NullPointerException => throw new InternalLockNPE(e) } - try { run.call } - finally { lock.release() } - } - else - { - try { run.call } - finally { freeLock.release() } - } - } - Using(new FileOutputStream(file).getChannel)(withChannelRetries(5)) - } - } - private[this] final class InternalLockNPE(cause: Exception) extends RuntimeException(cause) + private[this] def withFileLock[T](run: Callable[T]): T = + { + def withChannelRetries(retries: Int)(channel: FileChannel): T = + try { withChannel(channel) } + catch { + case i: InternalLockNPE => + if (retries > 0) withChannelRetries(retries - 1)(channel) else throw i + } + + def withChannel(channel: FileChannel) = + { + val freeLock = try { channel.tryLock } catch { case e: NullPointerException => throw new InternalLockNPE(e) } + if (freeLock eq null) { + System.out.println("Waiting for lock on " + file + " to be available..."); + val lock = try { channel.lock } catch { case e: NullPointerException => throw new InternalLockNPE(e) } + try { run.call } + finally { lock.release() } + } else { + try { run.call } + finally { freeLock.release() } + } + } + Using(new FileOutputStream(file).getChannel)(withChannelRetries(5)) + } + } + private[this] final class InternalLockNPE(cause: Exception) extends RuntimeException(cause) } diff --git a/launch/src/main/scala/xsbt/boot/ModuleDefinition.scala b/launch/src/main/scala/xsbt/boot/ModuleDefinition.scala index 800247743..bf0489fd4 100644 --- a/launch/src/main/scala/xsbt/boot/ModuleDefinition.scala +++ b/launch/src/main/scala/xsbt/boot/ModuleDefinition.scala @@ -4,24 +4,22 @@ import Pre._ import java.io.File import java.net.URLClassLoader -final class ModuleDefinition(val configuration: UpdateConfiguration, val extraClasspath: Array[File], val target: UpdateTarget, val failLabel: String) -{ - def retrieveFailed: Nothing = fail("") - def retrieveCorrupt(missing: Iterable[String]): Nothing = fail(": missing " + missing.mkString(", ")) - private def fail(extra: String) = - throw new xsbti.RetrieveException(versionString, "Could not retrieve " + failLabel + extra) - private def versionString: String = target match { case _: UpdateScala => configuration.getScalaVersion; case a: UpdateApp => Value.get(a.id.version) } +final class ModuleDefinition(val configuration: UpdateConfiguration, val extraClasspath: Array[File], val target: UpdateTarget, val failLabel: String) { + def retrieveFailed: Nothing = fail("") + def retrieveCorrupt(missing: Iterable[String]): Nothing = fail(": missing " + missing.mkString(", ")) + private def fail(extra: String) = + throw new xsbti.RetrieveException(versionString, "Could not retrieve " + failLabel + extra) + private def versionString: String = target match { case _: UpdateScala => configuration.getScalaVersion; case a: UpdateApp => Value.get(a.id.version) } } -final class RetrievedModule(val fresh: Boolean, val definition: ModuleDefinition, val detectedScalaVersion: Option[String], val resolvedAppVersion: Option[String], val baseDirectories: List[File]) -{ - /** Use this constructor only when the module exists already, or when its version is not dynamic (so its resolved version would be the same) */ - def this(fresh: Boolean, definition: ModuleDefinition, detectedScalaVersion: Option[String], baseDirectories: List[File]) = - this(fresh, definition, detectedScalaVersion, None, baseDirectories) +final class RetrievedModule(val fresh: Boolean, val definition: ModuleDefinition, val detectedScalaVersion: Option[String], val resolvedAppVersion: Option[String], val baseDirectories: List[File]) { + /** Use this constructor only when the module exists already, or when its version is not dynamic (so its resolved version would be the same) */ + def this(fresh: Boolean, definition: ModuleDefinition, detectedScalaVersion: Option[String], baseDirectories: List[File]) = + this(fresh, definition, detectedScalaVersion, None, baseDirectories) - lazy val classpath: Array[File] = getJars(baseDirectories) - lazy val fullClasspath: Array[File] = concat(classpath, definition.extraClasspath) + lazy val classpath: Array[File] = getJars(baseDirectories) + lazy val fullClasspath: Array[File] = concat(classpath, definition.extraClasspath) - def createLoader(parentLoader: ClassLoader): ClassLoader = - new URLClassLoader(toURLs(fullClasspath), parentLoader) + def createLoader(parentLoader: ClassLoader): ClassLoader = + new URLClassLoader(toURLs(fullClasspath), parentLoader) } \ No newline at end of file diff --git a/launch/src/main/scala/xsbt/boot/PlainApplication.scala b/launch/src/main/scala/xsbt/boot/PlainApplication.scala index 65e2bd56b..93658e098 100644 --- a/launch/src/main/scala/xsbt/boot/PlainApplication.scala +++ b/launch/src/main/scala/xsbt/boot/PlainApplication.scala @@ -9,36 +9,36 @@ class PlainApplication private (mainMethod: java.lang.reflect.Method) extends xs val ExitClass = classOf[xsbti.Exit] // It seems we may need to wrap exceptions here... try mainMethod.getReturnType match { - case ExitClass => - mainMethod.invoke(null, configuration.arguments).asInstanceOf[xsbti.Exit] - case IntClass => - PlainApplication.Exit(mainMethod.invoke(null, configuration.arguments).asInstanceOf[Int]) - case _ => - // Here we still invoke, but return 0 if sucessful (no exceptions). - mainMethod.invoke(null, configuration.arguments) - PlainApplication.Exit(0) + case ExitClass => + mainMethod.invoke(null, configuration.arguments).asInstanceOf[xsbti.Exit] + case IntClass => + PlainApplication.Exit(mainMethod.invoke(null, configuration.arguments).asInstanceOf[Int]) + case _ => + // Here we still invoke, but return 0 if sucessful (no exceptions). + mainMethod.invoke(null, configuration.arguments) + PlainApplication.Exit(0) } catch { // This is only thrown if the underlying reflective call throws. // Let's expose the underlying error. case e: java.lang.reflect.InvocationTargetException if e.getCause != null => throw e.getCause } - + } } /** An object that lets us detect compatible "plain" applications and launch them reflectively. */ object PlainApplication { def isPlainApplication(clazz: Class[_]): Boolean = findMainMethod(clazz).isDefined def apply(clazz: Class[_]): xsbti.AppMain = - findMainMethod(clazz) match { - case Some(method) => new PlainApplication(method) - case None => sys.error("Class: " + clazz + " does not have a main method!") - } + findMainMethod(clazz) match { + case Some(method) => new PlainApplication(method) + case None => sys.error("Class: " + clazz + " does not have a main method!") + } private def findMainMethod(clazz: Class[_]): Option[java.lang.reflect.Method] = try { - val method = + val method = clazz.getMethod("main", classOf[Array[String]]) - if(java.lang.reflect.Modifier.isStatic(method.getModifiers)) Some(method) + if (java.lang.reflect.Modifier.isStatic(method.getModifiers)) Some(method) else None } catch { case n: NoSuchMethodException => None diff --git a/launch/src/main/scala/xsbt/boot/Pre.scala b/launch/src/main/scala/xsbt/boot/Pre.scala index 26b83aee9..cc5abbad3 100644 --- a/launch/src/main/scala/xsbt/boot/Pre.scala +++ b/launch/src/main/scala/xsbt/boot/Pre.scala @@ -3,108 +3,102 @@ */ package xsbt.boot - import scala.collection.immutable.List - import java.io.{File, FileFilter} - import java.net.{URL, URLClassLoader} - import java.util.Locale +import scala.collection.immutable.List +import java.io.{ File, FileFilter } +import java.net.{ URL, URLClassLoader } +import java.util.Locale -object Pre -{ - def readLine(prompt: String): Option[String] = { - val c = System.console() - if(c eq null) None else Option(c.readLine(prompt)) - } - def trimLeading(line: String) = - { - def newStart(i: Int): Int = if(i >= line.length || !Character.isWhitespace(line.charAt(i))) i else newStart(i+1) - line.substring(newStart(0)) - } - def isEmpty(line: String) = line.length == 0 - def isNonEmpty(line: String) = line.length > 0 - def assert(condition: Boolean, msg: => String): Unit = if (!condition) throw new AssertionError(msg) - def assert(condition: Boolean): Unit = assert(condition, "Assertion failed") - def require(condition: Boolean, msg: => String): Unit = if (!condition) throw new IllegalArgumentException(msg) - def error(msg: String): Nothing = throw new BootException(prefixError(msg)) - def declined(msg: String): Nothing = throw new BootException(msg) - def prefixError(msg: String): String = "Error during sbt execution: " + msg - def toBoolean(s: String) = java.lang.Boolean.parseBoolean(s) - def toArray[T : ClassManifest](list: List[T]) = - { - val arr = new Array[T](list.length) - def copy(i: Int, rem: List[T]): Unit = - if(i < arr.length) - { - arr(i) = rem.head - copy(i+1, rem.tail) - } - copy(0, list) - arr - } - /* These exist in order to avoid bringing in dependencies on RichInt and ArrayBuffer, among others. */ - def concat(a: Array[File], b: Array[File]): Array[File] = - { - val n = new Array[File](a.length + b.length) - java.lang.System.arraycopy(a, 0, n, 0, a.length) - java.lang.System.arraycopy(b, 0, n, a.length, b.length) - n - } - def array(files: File*): Array[File] = toArray(files.toList) - /* Saves creating a closure for default if it has already been evaluated*/ - def orElse[T](opt: Option[T], default: T) = if(opt.isDefined) opt.get else default +object Pre { + def readLine(prompt: String): Option[String] = { + val c = System.console() + if (c eq null) None else Option(c.readLine(prompt)) + } + def trimLeading(line: String) = + { + def newStart(i: Int): Int = if (i >= line.length || !Character.isWhitespace(line.charAt(i))) i else newStart(i + 1) + line.substring(newStart(0)) + } + def isEmpty(line: String) = line.length == 0 + def isNonEmpty(line: String) = line.length > 0 + def assert(condition: Boolean, msg: => String): Unit = if (!condition) throw new AssertionError(msg) + def assert(condition: Boolean): Unit = assert(condition, "Assertion failed") + def require(condition: Boolean, msg: => String): Unit = if (!condition) throw new IllegalArgumentException(msg) + def error(msg: String): Nothing = throw new BootException(prefixError(msg)) + def declined(msg: String): Nothing = throw new BootException(msg) + def prefixError(msg: String): String = "Error during sbt execution: " + msg + def toBoolean(s: String) = java.lang.Boolean.parseBoolean(s) + def toArray[T: ClassManifest](list: List[T]) = + { + val arr = new Array[T](list.length) + def copy(i: Int, rem: List[T]): Unit = + if (i < arr.length) { + arr(i) = rem.head + copy(i + 1, rem.tail) + } + copy(0, list) + arr + } + /* These exist in order to avoid bringing in dependencies on RichInt and ArrayBuffer, among others. */ + def concat(a: Array[File], b: Array[File]): Array[File] = + { + val n = new Array[File](a.length + b.length) + java.lang.System.arraycopy(a, 0, n, 0, a.length) + java.lang.System.arraycopy(b, 0, n, a.length, b.length) + n + } + def array(files: File*): Array[File] = toArray(files.toList) + /* Saves creating a closure for default if it has already been evaluated*/ + def orElse[T](opt: Option[T], default: T) = if (opt.isDefined) opt.get else default - def wrapNull(a: Array[File]): Array[File] = if(a == null) new Array[File](0) else a - def const[B](b: B): Any => B = _ => b - def strictOr[T](a: Option[T], b: Option[T]): Option[T] = a match { case None => b; case _ => a } - def getOrError[T](a: Option[T], msg: String): T = a match { case None => error(msg); case Some(x) => x } - def orNull[T >: Null](t: Option[T]): T = t match { case None => null; case Some(x) => x } + def wrapNull(a: Array[File]): Array[File] = if (a == null) new Array[File](0) else a + def const[B](b: B): Any => B = _ => b + def strictOr[T](a: Option[T], b: Option[T]): Option[T] = a match { case None => b; case _ => a } + def getOrError[T](a: Option[T], msg: String): T = a match { case None => error(msg); case Some(x) => x } + def orNull[T >: Null](t: Option[T]): T = t match { case None => null; case Some(x) => x } - def getJars(directories: List[File]): Array[File] = toArray(directories.flatMap(directory => wrapNull(directory.listFiles(JarFilter)))) + def getJars(directories: List[File]): Array[File] = toArray(directories.flatMap(directory => wrapNull(directory.listFiles(JarFilter)))) - object JarFilter extends FileFilter - { - def accept(file: File) = !file.isDirectory && file.getName.endsWith(".jar") - } - def getMissing(loader: ClassLoader, classes: Iterable[String]): Iterable[String] = - { - def classMissing(c: String) = try { Class.forName(c, false, loader); false } catch { case e: ClassNotFoundException => true } - classes.toList.filter(classMissing) - } - def toURLs(files: Array[File]): Array[URL] = files.map(_.toURI.toURL) - def toFile(url: URL): File = - try { new File(url.toURI) } - catch { case _: java.net.URISyntaxException => new File(url.getPath) } + object JarFilter extends FileFilter { + def accept(file: File) = !file.isDirectory && file.getName.endsWith(".jar") + } + def getMissing(loader: ClassLoader, classes: Iterable[String]): Iterable[String] = + { + def classMissing(c: String) = try { Class.forName(c, false, loader); false } catch { case e: ClassNotFoundException => true } + classes.toList.filter(classMissing) + } + def toURLs(files: Array[File]): Array[URL] = files.map(_.toURI.toURL) + def toFile(url: URL): File = + try { new File(url.toURI) } + catch { case _: java.net.URISyntaxException => new File(url.getPath) } + def delete(f: File) { + if (f.isDirectory) { + val fs = f.listFiles() + if (fs ne null) fs foreach delete + } + if (f.exists) f.delete() + } + final val isWindows: Boolean = System.getProperty("os.name").toLowerCase(Locale.ENGLISH).contains("windows") + final val isCygwin: Boolean = isWindows && java.lang.Boolean.getBoolean("sbt.cygwin") - def delete(f: File) - { - if(f.isDirectory) - { - val fs = f.listFiles() - if(fs ne null) fs foreach delete - } - if(f.exists) f.delete() - } - final val isWindows: Boolean = System.getProperty("os.name").toLowerCase(Locale.ENGLISH).contains("windows") - final val isCygwin: Boolean = isWindows && java.lang.Boolean.getBoolean("sbt.cygwin") - - import java.util.Properties - import java.io.{FileInputStream,FileOutputStream} - private[boot] def readProperties(propertiesFile: File) = - { - val properties = new Properties - if(propertiesFile.exists) - Using( new FileInputStream(propertiesFile) )( properties.load ) - properties - } - private[boot] def writeProperties(properties: Properties, file: File, msg: String): Unit = { - file.getParentFile.mkdirs() - Using(new FileOutputStream(file))( out => properties.store(out, msg) ) - } - private[boot] def setSystemProperties(properties: Properties): Unit = { - val nameItr = properties.stringPropertyNames.iterator - while(nameItr.hasNext) { - val propName = nameItr.next - System.setProperty(propName, properties.getProperty(propName)) - } - } + import java.util.Properties + import java.io.{ FileInputStream, FileOutputStream } + private[boot] def readProperties(propertiesFile: File) = + { + val properties = new Properties + if (propertiesFile.exists) + Using(new FileInputStream(propertiesFile))(properties.load) + properties + } + private[boot] def writeProperties(properties: Properties, file: File, msg: String): Unit = { + file.getParentFile.mkdirs() + Using(new FileOutputStream(file))(out => properties.store(out, msg)) + } + private[boot] def setSystemProperties(properties: Properties): Unit = { + val nameItr = properties.stringPropertyNames.iterator + while (nameItr.hasNext) { + val propName = nameItr.next + System.setProperty(propName, properties.getProperty(propName)) + } + } } diff --git a/launch/src/main/scala/xsbt/boot/ResolveValues.scala b/launch/src/main/scala/xsbt/boot/ResolveValues.scala index 952d9d970..5e57c1243 100644 --- a/launch/src/main/scala/xsbt/boot/ResolveValues.scala +++ b/launch/src/main/scala/xsbt/boot/ResolveValues.scala @@ -4,43 +4,40 @@ package xsbt.boot import Pre._ -import java.io.{File, FileInputStream} +import java.io.{ File, FileInputStream } import java.util.Properties -object ResolveValues -{ - def apply(conf: LaunchConfiguration): LaunchConfiguration = (new ResolveValues(conf))() - private def trim(s: String) = if(s eq null) None else notEmpty(s.trim) - private def notEmpty(s: String) = if(isEmpty(s)) None else Some(s) +object ResolveValues { + def apply(conf: LaunchConfiguration): LaunchConfiguration = (new ResolveValues(conf))() + private def trim(s: String) = if (s eq null) None else notEmpty(s.trim) + private def notEmpty(s: String) = if (isEmpty(s)) None else Some(s) } -import ResolveValues.{trim} -final class ResolveValues(conf: LaunchConfiguration) -{ - private def propertiesFile = conf.boot.properties - private lazy val properties = readProperties(propertiesFile) - def apply(): LaunchConfiguration = - { - import conf._ - val scalaVersion = resolve(conf.scalaVersion) - val appVersion = resolve(app.version) - val classifiers = resolveClassifiers(ivyConfiguration.classifiers) - withVersions(scalaVersion, appVersion, classifiers) - } - def resolveClassifiers(classifiers: Classifiers): Classifiers = - { - import ConfigurationParser.readIDs - // the added "" ensures that the main jars are retrieved - val scalaClassifiers = "" :: resolve(classifiers.forScala) - val appClassifiers = "" :: resolve(classifiers.app) - Classifiers(new Explicit(scalaClassifiers), new Explicit(appClassifiers)) - } - def resolve[T](v: Value[T])(implicit read: String => T): T = - v match - { - case e: Explicit[t] => e.value - case i: Implicit[t] => - trim(properties.getProperty(i.name)) map read orElse - i.default getOrElse ("No " + i.name + " specified in " + propertiesFile) - } +import ResolveValues.{ trim } +final class ResolveValues(conf: LaunchConfiguration) { + private def propertiesFile = conf.boot.properties + private lazy val properties = readProperties(propertiesFile) + def apply(): LaunchConfiguration = + { + import conf._ + val scalaVersion = resolve(conf.scalaVersion) + val appVersion = resolve(app.version) + val classifiers = resolveClassifiers(ivyConfiguration.classifiers) + withVersions(scalaVersion, appVersion, classifiers) + } + def resolveClassifiers(classifiers: Classifiers): Classifiers = + { + import ConfigurationParser.readIDs + // the added "" ensures that the main jars are retrieved + val scalaClassifiers = "" :: resolve(classifiers.forScala) + val appClassifiers = "" :: resolve(classifiers.app) + Classifiers(new Explicit(scalaClassifiers), new Explicit(appClassifiers)) + } + def resolve[T](v: Value[T])(implicit read: String => T): T = + v match { + case e: Explicit[t] => e.value + case i: Implicit[t] => + trim(properties.getProperty(i.name)) map read orElse + i.default getOrElse ("No " + i.name + " specified in " + propertiesFile) + } } \ No newline at end of file diff --git a/launch/src/main/scala/xsbt/boot/ServerApplication.scala b/launch/src/main/scala/xsbt/boot/ServerApplication.scala index 0b7600b83..3080f1226 100644 --- a/launch/src/main/scala/xsbt/boot/ServerApplication.scala +++ b/launch/src/main/scala/xsbt/boot/ServerApplication.scala @@ -11,7 +11,7 @@ import scala.annotation.tailrec /** A wrapper around 'raw' static methods to meet the sbt application interface. */ class ServerApplication private (provider: xsbti.AppProvider) extends xsbti.AppMain { import ServerApplication._ - + override def run(configuration: xsbti.AppConfiguration): xsbti.MainResult = { val serverMain = provider.entryPoint.asSubclass(ServerMainClass).newInstance val server = serverMain.start(configuration) @@ -27,9 +27,9 @@ object ServerApplication { // We could even structurally type things that have a uri + awaitTermination method... def isServerApplication(clazz: Class[_]): Boolean = ServerMainClass.isAssignableFrom(clazz) - def apply(provider: xsbti.AppProvider): xsbti.AppMain = - new ServerApplication(provider) - + def apply(provider: xsbti.AppProvider): xsbti.AppMain = + new ServerApplication(provider) + } object ServerLocator { // TODO - Probably want to drop this to reduce classfile size @@ -45,20 +45,20 @@ object ServerLocator { def makeLockFile(f: File): File = new File(f.getParentFile, s"${f.getName}.lock") // Launch the process and read the port... - def locate(currentDirectory: File, config: LaunchConfiguration): URI = + def locate(currentDirectory: File, config: LaunchConfiguration): URI = config.serverConfig match { case None => sys.error("No server lock file configured. Cannot locate server.") case Some(sc) => locked(makeLockFile(sc.lockFile)) { readProperties(sc.lockFile) match { case Some(uri) if isReachable(uri) => uri - case _ => + case _ => val uri = ServerLauncher.startServer(currentDirectory, config) writeProperties(sc.lockFile, uri) uri } } } - + private val SERVER_URI_PROPERTY = "server.uri" def readProperties(f: File): Option[java.net.URI] = { try { @@ -79,8 +79,8 @@ object ServerLocator { df.setTimeZone(java.util.TimeZone.getTimeZone("UTC")) Pre.writeProperties(props, f, s"Server Startup at ${df.format(new java.util.Date)}") } - - def isReachable(uri: java.net.URI): Boolean = + + def isReachable(uri: java.net.URI): Boolean = try { // TODO - For now we assume if we can connect, it means // that the server is working... @@ -97,16 +97,16 @@ class StreamDumper(in: java.io.BufferedReader, out: java.io.PrintStream) extends setDaemon(true) private val running = new java.util.concurrent.atomic.AtomicBoolean(true) override def run(): Unit = { - def read(): Unit = if(running.get) in.readLine match { + def read(): Unit = if (running.get) in.readLine match { case null => () - case line => + case line => out.println(line) read() } read() out.close() } - + def close(): Unit = running.set(false) } object ServerLauncher { @@ -114,26 +114,26 @@ object ServerLauncher { def startServer(currentDirectory: File, config: LaunchConfiguration): URI = { val serverConfig = config.serverConfig match { case Some(c) => c - case None => throw new RuntimeException("Logic Failure: Attempting to start a server that isn't configured to be a server. Please report a bug.") + case None => throw new RuntimeException("Logic Failure: Attempting to start a server that isn't configured to be a server. Please report a bug.") } val launchConfig = java.io.File.createTempFile("sbtlaunch", "config") launchConfig.deleteOnExit() LaunchConfiguration.save(config, launchConfig) val jvmArgs: List[String] = serverConfig.jvmArgs map readLines match { case Some(args) => args - case None => Nil + case None => Nil } - val cmd: List[String] = + val cmd: List[String] = ("java" :: jvmArgs) ++ - ("-jar" :: defaultLauncherLookup.getCanonicalPath :: s"@load:${launchConfig.toURI.toURL.toString}" :: Nil) + ("-jar" :: defaultLauncherLookup.getCanonicalPath :: s"@load:${launchConfig.toURI.toURL.toString}" :: Nil) launchProcessAndGetUri(cmd, currentDirectory) } - + // Here we try to isolate all the stupidity of dealing with Java processes. def launchProcessAndGetUri(cmd: List[String], cwd: File): URI = { // TODO - Handle windows path stupidity in arguments. val pb = new java.lang.ProcessBuilder() - pb.command(cmd:_*) + pb.command(cmd: _*) pb.directory(cwd) val process = pb.start() // First we need to grab all the input streams, and close the ones we don't care about. @@ -146,7 +146,7 @@ object ServerLauncher { // Now we look for the URI synch value, and then make sure we close the output files. try readUntilSynch(new java.io.BufferedReader(new java.io.InputStreamReader(stdout))) match { case Some(uri) => uri - case _ => sys.error("Failed to start server!") + case _ => sys.error("Failed to start server!") } finally { errorDumper.close() stdout.close() @@ -158,10 +158,10 @@ object ServerLauncher { //stderr.close() } } - + object ServerUriLine { def unapply(in: String): Option[URI] = - if(in startsWith SERVER_SYNCH_TEXT) { + if (in startsWith SERVER_SYNCH_TEXT) { Some(new URI(in.substring(SERVER_SYNCH_TEXT.size))) } else None } @@ -169,19 +169,19 @@ object ServerLauncher { def readUntilSynch(in: java.io.BufferedReader): Option[URI] = { @tailrec def read(): Option[URI] = in.readLine match { - case null => None + case null => None case ServerUriLine(uri) => Some(uri) - case line => read() + case line => read() } try read() finally in.close() } /** Reads all the lines in a file. If it doesn't exist, returns an empty list. Forces UTF-8 strings. */ - def readLines(f: File): List[String] = - if(!f.exists) Nil else { + def readLines(f: File): List[String] = + if (!f.exists) Nil else { val reader = new java.io.BufferedReader(new java.io.InputStreamReader(new java.io.FileInputStream(f), "UTF-8")) @tailrec - def read(current: List[String]): List[String] = + def read(current: List[String]): List[String] = reader.readLine match { case null => current.reverse case line => read(line :: current) @@ -189,7 +189,7 @@ object ServerLauncher { try read(Nil) finally reader.close() } - + def defaultLauncherLookup: File = try { val classInLauncher = classOf[AppConfiguration] diff --git a/launch/src/main/scala/xsbt/boot/Update.scala b/launch/src/main/scala/xsbt/boot/Update.scala index cbf0fb020..fb9f99b4b 100644 --- a/launch/src/main/scala/xsbt/boot/Update.scala +++ b/launch/src/main/scala/xsbt/boot/Update.scala @@ -4,26 +4,26 @@ package xsbt.boot import Pre._ -import java.io.{File, FileWriter, PrintWriter, Writer} +import java.io.{ File, FileWriter, PrintWriter, Writer } import java.util.concurrent.Callable import java.util.regex.Pattern import java.util.Properties -import org.apache.ivy.{core, plugins, util, Ivy} +import org.apache.ivy.{ core, plugins, util, Ivy } import core.LogOptions -import core.cache.{CacheMetadataOptions, DefaultRepositoryCacheManager, DefaultResolutionCacheManager} +import core.cache.{ CacheMetadataOptions, DefaultRepositoryCacheManager, DefaultResolutionCacheManager } import core.event.EventManager -import core.module.id.{ArtifactId, ModuleId, ModuleRevisionId} -import core.module.descriptor.{Configuration => IvyConfiguration, DefaultDependencyArtifactDescriptor, DefaultDependencyDescriptor, DefaultModuleDescriptor, ModuleDescriptor} -import core.module.descriptor.{Artifact => IArtifact, DefaultExcludeRule, DependencyDescriptor, ExcludeRule} +import core.module.id.{ ArtifactId, ModuleId, ModuleRevisionId } +import core.module.descriptor.{ Configuration => IvyConfiguration, DefaultDependencyArtifactDescriptor, DefaultDependencyDescriptor, DefaultModuleDescriptor, ModuleDescriptor } +import core.module.descriptor.{ Artifact => IArtifact, DefaultExcludeRule, DependencyDescriptor, ExcludeRule } import core.report.ResolveReport -import core.resolve.{ResolveEngine, ResolveOptions} -import core.retrieve.{RetrieveEngine, RetrieveOptions} +import core.resolve.{ ResolveEngine, ResolveOptions } +import core.retrieve.{ RetrieveEngine, RetrieveOptions } import core.sort.SortEngine import core.settings.IvySettings -import plugins.matcher.{ExactPatternMatcher, PatternMatcher} -import plugins.resolver.{BasicResolver, ChainResolver, FileSystemResolver, IBiblioResolver, URLResolver} -import util.{DefaultMessageLogger, filter, Message, MessageLoggerEngine, url} +import plugins.matcher.{ ExactPatternMatcher, PatternMatcher } +import plugins.resolver.{ BasicResolver, ChainResolver, FileSystemResolver, IBiblioResolver, URLResolver } +import util.{ DefaultMessageLogger, filter, Message, MessageLoggerEngine, url } import filter.Filter import url.CredentialsStore @@ -34,410 +34,388 @@ final class UpdateScala(val classifiers: List[String]) extends UpdateTarget { de final class UpdateApp(val id: Application, val classifiers: List[String], val tpe: String) extends UpdateTarget final class UpdateConfiguration(val bootDirectory: File, val ivyHome: Option[File], val scalaOrg: String, - val scalaVersion: Option[String], val repositories: List[xsbti.Repository], val checksums: List[String]) { - val resolutionCacheBase = new File(bootDirectory, "resolution-cache") - def getScalaVersion = scalaVersion match { case Some(sv) => sv; case None => "" } + val scalaVersion: Option[String], val repositories: List[xsbti.Repository], val checksums: List[String]) { + val resolutionCacheBase = new File(bootDirectory, "resolution-cache") + def getScalaVersion = scalaVersion match { case Some(sv) => sv; case None => "" } } final class UpdateResult(val success: Boolean, val scalaVersion: Option[String], val appVersion: Option[String]) { - @deprecated("0.13.2", "Please use the other constructor providing appVersion.") - def this(success: Boolean, scalaVersion: Option[String]) = this(success, scalaVersion, None) + @deprecated("0.13.2", "Please use the other constructor providing appVersion.") + def this(success: Boolean, scalaVersion: Option[String]) = this(success, scalaVersion, None) } /** Ensures that the Scala and application jars exist for the given versions or else downloads them.*/ -final class Update(config: UpdateConfiguration) -{ - import config.{bootDirectory, checksums, getScalaVersion, ivyHome, repositories, resolutionCacheBase, scalaVersion, scalaOrg} - bootDirectory.mkdirs +final class Update(config: UpdateConfiguration) { + import config.{ bootDirectory, checksums, getScalaVersion, ivyHome, repositories, resolutionCacheBase, scalaVersion, scalaOrg } + bootDirectory.mkdirs - private def logFile = new File(bootDirectory, UpdateLogName) - private val logWriter = new PrintWriter(new FileWriter(logFile)) + private def logFile = new File(bootDirectory, UpdateLogName) + private val logWriter = new PrintWriter(new FileWriter(logFile)) - private def addCredentials() - { - val optionProps = - Option(System.getProperty("sbt.boot.credentials")) orElse - Option(System.getenv("SBT_CREDENTIALS")) map ( path => - Pre.readProperties(new File(path)) - ) - optionProps match { - case Some(props) => extractCredentials("realm","host","user","password")(props) - case None => () - } - extractCredentials("sbt.boot.realm","sbt.boot.host","sbt.boot.user","sbt.boot.password")(System.getProperties) - } - private def extractCredentials(keys: (String,String,String,String))(props: Properties) { - val List(realm, host, user, password) = keys.productIterator.map(key => props.getProperty(key.toString)).toList - if (realm != null && host != null && user != null && password != null) - CredentialsStore.INSTANCE.addCredentials(realm, host, user, password) - } - private lazy val settings = - { - addCredentials() - val settings = new IvySettings - ivyHome match { case Some(dir) => settings.setDefaultIvyUserDir(dir); case None => } - addResolvers(settings) - settings.setVariable("ivy.checksums", checksums mkString ",") - settings.setDefaultConflictManager(settings.getConflictManager(ConflictManagerName)) - settings.setBaseDir(bootDirectory) - setScalaVariable(settings, scalaVersion) - settings - } - private[this] def setScalaVariable(settings: IvySettings, scalaVersion: Option[String]): Unit = - scalaVersion match { case Some(sv) => settings.setVariable("scala", sv); case None => } - private lazy val ivy = - { - val ivy = new Ivy() { private val loggerEngine = new SbtMessageLoggerEngine; override def getLoggerEngine = loggerEngine } - ivy.setSettings(settings) - ivy.bind() - ivy - } - // should be the same file as is used in the Ivy module - private lazy val ivyLockFile = new File(settings.getDefaultIvyUserDir, ".sbt.ivy.lock") + private def addCredentials() { + val optionProps = + Option(System.getProperty("sbt.boot.credentials")) orElse + Option(System.getenv("SBT_CREDENTIALS")) map (path => + Pre.readProperties(new File(path)) + ) + optionProps match { + case Some(props) => extractCredentials("realm", "host", "user", "password")(props) + case None => () + } + extractCredentials("sbt.boot.realm", "sbt.boot.host", "sbt.boot.user", "sbt.boot.password")(System.getProperties) + } + private def extractCredentials(keys: (String, String, String, String))(props: Properties) { + val List(realm, host, user, password) = keys.productIterator.map(key => props.getProperty(key.toString)).toList + if (realm != null && host != null && user != null && password != null) + CredentialsStore.INSTANCE.addCredentials(realm, host, user, password) + } + private lazy val settings = + { + addCredentials() + val settings = new IvySettings + ivyHome match { case Some(dir) => settings.setDefaultIvyUserDir(dir); case None => } + addResolvers(settings) + settings.setVariable("ivy.checksums", checksums mkString ",") + settings.setDefaultConflictManager(settings.getConflictManager(ConflictManagerName)) + settings.setBaseDir(bootDirectory) + setScalaVariable(settings, scalaVersion) + settings + } + private[this] def setScalaVariable(settings: IvySettings, scalaVersion: Option[String]): Unit = + scalaVersion match { case Some(sv) => settings.setVariable("scala", sv); case None => } + private lazy val ivy = + { + val ivy = new Ivy() { private val loggerEngine = new SbtMessageLoggerEngine; override def getLoggerEngine = loggerEngine } + ivy.setSettings(settings) + ivy.bind() + ivy + } + // should be the same file as is used in the Ivy module + private lazy val ivyLockFile = new File(settings.getDefaultIvyUserDir, ".sbt.ivy.lock") - /** The main entry point of this class for use by the Update module. It runs Ivy */ - def apply(target: UpdateTarget, reason: String): UpdateResult = - { - Message.setDefaultLogger(new SbtIvyLogger(logWriter)) - val action = new Callable[UpdateResult] { def call = lockedApply(target, reason) } - Locks(ivyLockFile, action) - } - private def lockedApply(target: UpdateTarget, reason: String): UpdateResult = - { - ivy.pushContext() - try { update(target, reason) } - catch - { - case e: Exception => - e.printStackTrace(logWriter) - log(e.toString) - System.out.println(" (see " + logFile + " for complete log)") - new UpdateResult(false, None, None) - } - finally - { - logWriter.close() - ivy.popContext() - delete(resolutionCacheBase) - } - } - /** Runs update for the specified target (updates either the scala or appliciation jars for building the project) */ - private def update(target: UpdateTarget, reason: String): UpdateResult = - { - import IvyConfiguration.Visibility.PUBLIC - // the actual module id here is not that important - val moduleID = new DefaultModuleDescriptor(createID(SbtOrg, "boot-" + target.tpe, "1.0"), "release", null, false) - moduleID.setLastModified(System.currentTimeMillis) - moduleID.addConfiguration(new IvyConfiguration(DefaultIvyConfiguration, PUBLIC, "", new Array(0), true, null)) - // add dependencies based on which target needs updating - val dep = target match - { - case u: UpdateScala => - val scalaVersion = getScalaVersion - addDependency(moduleID, scalaOrg, CompilerModuleName, scalaVersion, "default;optional(default)", u.classifiers) - val ddesc = addDependency(moduleID, scalaOrg, LibraryModuleName, scalaVersion, "default", u.classifiers) - excludeJUnit(moduleID) - val scalaOrgString = if (scalaOrg != ScalaOrg) " " + scalaOrg else "" - System.out.println("Getting" + scalaOrgString + " Scala " + scalaVersion + " " + reason + "...") - ddesc.getDependencyId - case u: UpdateApp => - val app = u.id - val resolvedName = (app.crossVersioned, scalaVersion) match { - case (xsbti.CrossValue.Full, Some(sv)) => app.name + "_" + sv - case (xsbti.CrossValue.Binary, Some(sv)) => app.name + "_" + CrossVersionUtil.binaryScalaVersion(sv) - case _ => app.name - } - val ddesc = addDependency(moduleID, app.groupID, resolvedName, app.getVersion, "default(compile)", u.classifiers) - System.out.println("Getting " + app.groupID + " " + resolvedName + " " + app.getVersion + " " + reason + "...") - ddesc.getDependencyId - } - update(moduleID, target, dep) - } - /** Runs the resolve and retrieve for the given moduleID, which has had its dependencies added already. */ - private def update(moduleID: DefaultModuleDescriptor, target: UpdateTarget, dep: ModuleId): UpdateResult = - { - val eventManager = new EventManager - val (autoScalaVersion, depVersion) = resolve(eventManager, moduleID, dep) - // Fix up target.id with the depVersion that we know for sure is resolved (not dynamic) -- this way, `retrieve` - // will put them in the right version directory. - val target1 = (depVersion, target) match { - case (Some(dv), u: UpdateApp) => import u._; new UpdateApp(id.copy(version = new Explicit(dv)), classifiers, tpe) - case _ => target - } - setScalaVariable(settings, autoScalaVersion) - retrieve(eventManager, moduleID, target1, autoScalaVersion) - new UpdateResult(true, autoScalaVersion, depVersion) - } - private def createID(organization: String, name: String, revision: String) = - ModuleRevisionId.newInstance(organization, name, revision) - /** Adds the given dependency to the default configuration of 'moduleID'. */ - private def addDependency(moduleID: DefaultModuleDescriptor, organization: String, name: String, revision: String, conf: String, classifiers: List[String]) = - { - val dep = new DefaultDependencyDescriptor(moduleID, createID(organization, name, revision), false, false, true) - for(c <- conf.split(";")) - dep.addDependencyConfiguration(DefaultIvyConfiguration, c) - for(classifier <- classifiers) - addClassifier(dep, name, classifier) - moduleID.addDependency(dep) - dep - } - private def addClassifier(dep: DefaultDependencyDescriptor, name: String, classifier: String) - { - val extraMap = new java.util.HashMap[String,String] - if(!isEmpty(classifier)) - extraMap.put("e:classifier", classifier) - val ivyArtifact = new DefaultDependencyArtifactDescriptor(dep, name, artifactType(classifier), "jar", null, extraMap) - for(conf <- dep.getModuleConfigurations) - dep.addDependencyArtifact(conf, ivyArtifact) - } - private def excludeJUnit(module: DefaultModuleDescriptor): Unit = module.addExcludeRule(excludeRule(JUnitName, JUnitName)) - private def excludeRule(organization: String, name: String): ExcludeRule = - { - val artifact = new ArtifactId(ModuleId.newInstance(organization, name), "*", "*", "*") - val rule = new DefaultExcludeRule(artifact, ExactPatternMatcher.INSTANCE, java.util.Collections.emptyMap[AnyRef,AnyRef]) - rule.addConfiguration(DefaultIvyConfiguration) - rule - } - val scalaLibraryId = ModuleId.newInstance(ScalaOrg, LibraryModuleName) - // Returns the version of the scala library, as well as `dep` (a dependency of `module`) after it's been resolved - private def resolve(eventManager: EventManager, module: ModuleDescriptor, dep: ModuleId): (Option[String], Option[String]) = - { - val resolveOptions = new ResolveOptions - // this reduces the substantial logging done by Ivy, including the progress dots when downloading artifacts - resolveOptions.setLog(LogOptions.LOG_DOWNLOAD_ONLY) - resolveOptions.setCheckIfChanged(false) - val resolveEngine = new ResolveEngine(settings, eventManager, new SortEngine(settings)) - val resolveReport = resolveEngine.resolve(module, resolveOptions) - if(resolveReport.hasError) - { - logExceptions(resolveReport) - val seen = new java.util.LinkedHashSet[Any] - seen.addAll(resolveReport.getAllProblemMessages) - System.out.println(seen.toArray.mkString(System.getProperty("line.separator"))) - error("Error retrieving required libraries") - } - val modules = moduleRevisionIDs(resolveReport) - extractVersion(modules, scalaLibraryId) -> extractVersion(modules, dep) - } - private[this] def extractVersion(modules: Seq[ModuleRevisionId], dep: ModuleId): Option[String] = - { - modules collectFirst { case m if m.getModuleId.equals(dep) => m.getRevision } - } - private[this] def moduleRevisionIDs(report: ResolveReport): Seq[ModuleRevisionId] = - { - import collection.JavaConverters._ - import org.apache.ivy.core.resolve.IvyNode - report.getDependencies.asInstanceOf[java.util.List[IvyNode]].asScala map (_.getResolvedId) - } + /** The main entry point of this class for use by the Update module. It runs Ivy */ + def apply(target: UpdateTarget, reason: String): UpdateResult = + { + Message.setDefaultLogger(new SbtIvyLogger(logWriter)) + val action = new Callable[UpdateResult] { def call = lockedApply(target, reason) } + Locks(ivyLockFile, action) + } + private def lockedApply(target: UpdateTarget, reason: String): UpdateResult = + { + ivy.pushContext() + try { update(target, reason) } + catch { + case e: Exception => + e.printStackTrace(logWriter) + log(e.toString) + System.out.println(" (see " + logFile + " for complete log)") + new UpdateResult(false, None, None) + } finally { + logWriter.close() + ivy.popContext() + delete(resolutionCacheBase) + } + } + /** Runs update for the specified target (updates either the scala or appliciation jars for building the project) */ + private def update(target: UpdateTarget, reason: String): UpdateResult = + { + import IvyConfiguration.Visibility.PUBLIC + // the actual module id here is not that important + val moduleID = new DefaultModuleDescriptor(createID(SbtOrg, "boot-" + target.tpe, "1.0"), "release", null, false) + moduleID.setLastModified(System.currentTimeMillis) + moduleID.addConfiguration(new IvyConfiguration(DefaultIvyConfiguration, PUBLIC, "", new Array(0), true, null)) + // add dependencies based on which target needs updating + val dep = target match { + case u: UpdateScala => + val scalaVersion = getScalaVersion + addDependency(moduleID, scalaOrg, CompilerModuleName, scalaVersion, "default;optional(default)", u.classifiers) + val ddesc = addDependency(moduleID, scalaOrg, LibraryModuleName, scalaVersion, "default", u.classifiers) + excludeJUnit(moduleID) + val scalaOrgString = if (scalaOrg != ScalaOrg) " " + scalaOrg else "" + System.out.println("Getting" + scalaOrgString + " Scala " + scalaVersion + " " + reason + "...") + ddesc.getDependencyId + case u: UpdateApp => + val app = u.id + val resolvedName = (app.crossVersioned, scalaVersion) match { + case (xsbti.CrossValue.Full, Some(sv)) => app.name + "_" + sv + case (xsbti.CrossValue.Binary, Some(sv)) => app.name + "_" + CrossVersionUtil.binaryScalaVersion(sv) + case _ => app.name + } + val ddesc = addDependency(moduleID, app.groupID, resolvedName, app.getVersion, "default(compile)", u.classifiers) + System.out.println("Getting " + app.groupID + " " + resolvedName + " " + app.getVersion + " " + reason + "...") + ddesc.getDependencyId + } + update(moduleID, target, dep) + } + /** Runs the resolve and retrieve for the given moduleID, which has had its dependencies added already. */ + private def update(moduleID: DefaultModuleDescriptor, target: UpdateTarget, dep: ModuleId): UpdateResult = + { + val eventManager = new EventManager + val (autoScalaVersion, depVersion) = resolve(eventManager, moduleID, dep) + // Fix up target.id with the depVersion that we know for sure is resolved (not dynamic) -- this way, `retrieve` + // will put them in the right version directory. + val target1 = (depVersion, target) match { + case (Some(dv), u: UpdateApp) => + import u._; new UpdateApp(id.copy(version = new Explicit(dv)), classifiers, tpe) + case _ => target + } + setScalaVariable(settings, autoScalaVersion) + retrieve(eventManager, moduleID, target1, autoScalaVersion) + new UpdateResult(true, autoScalaVersion, depVersion) + } + private def createID(organization: String, name: String, revision: String) = + ModuleRevisionId.newInstance(organization, name, revision) + /** Adds the given dependency to the default configuration of 'moduleID'. */ + private def addDependency(moduleID: DefaultModuleDescriptor, organization: String, name: String, revision: String, conf: String, classifiers: List[String]) = + { + val dep = new DefaultDependencyDescriptor(moduleID, createID(organization, name, revision), false, false, true) + for (c <- conf.split(";")) + dep.addDependencyConfiguration(DefaultIvyConfiguration, c) + for (classifier <- classifiers) + addClassifier(dep, name, classifier) + moduleID.addDependency(dep) + dep + } + private def addClassifier(dep: DefaultDependencyDescriptor, name: String, classifier: String) { + val extraMap = new java.util.HashMap[String, String] + if (!isEmpty(classifier)) + extraMap.put("e:classifier", classifier) + val ivyArtifact = new DefaultDependencyArtifactDescriptor(dep, name, artifactType(classifier), "jar", null, extraMap) + for (conf <- dep.getModuleConfigurations) + dep.addDependencyArtifact(conf, ivyArtifact) + } + private def excludeJUnit(module: DefaultModuleDescriptor): Unit = module.addExcludeRule(excludeRule(JUnitName, JUnitName)) + private def excludeRule(organization: String, name: String): ExcludeRule = + { + val artifact = new ArtifactId(ModuleId.newInstance(organization, name), "*", "*", "*") + val rule = new DefaultExcludeRule(artifact, ExactPatternMatcher.INSTANCE, java.util.Collections.emptyMap[AnyRef, AnyRef]) + rule.addConfiguration(DefaultIvyConfiguration) + rule + } + val scalaLibraryId = ModuleId.newInstance(ScalaOrg, LibraryModuleName) + // Returns the version of the scala library, as well as `dep` (a dependency of `module`) after it's been resolved + private def resolve(eventManager: EventManager, module: ModuleDescriptor, dep: ModuleId): (Option[String], Option[String]) = + { + val resolveOptions = new ResolveOptions + // this reduces the substantial logging done by Ivy, including the progress dots when downloading artifacts + resolveOptions.setLog(LogOptions.LOG_DOWNLOAD_ONLY) + resolveOptions.setCheckIfChanged(false) + val resolveEngine = new ResolveEngine(settings, eventManager, new SortEngine(settings)) + val resolveReport = resolveEngine.resolve(module, resolveOptions) + if (resolveReport.hasError) { + logExceptions(resolveReport) + val seen = new java.util.LinkedHashSet[Any] + seen.addAll(resolveReport.getAllProblemMessages) + System.out.println(seen.toArray.mkString(System.getProperty("line.separator"))) + error("Error retrieving required libraries") + } + val modules = moduleRevisionIDs(resolveReport) + extractVersion(modules, scalaLibraryId) -> extractVersion(modules, dep) + } + private[this] def extractVersion(modules: Seq[ModuleRevisionId], dep: ModuleId): Option[String] = + { + modules collectFirst { case m if m.getModuleId.equals(dep) => m.getRevision } + } + private[this] def moduleRevisionIDs(report: ResolveReport): Seq[ModuleRevisionId] = + { + import collection.JavaConverters._ + import org.apache.ivy.core.resolve.IvyNode + report.getDependencies.asInstanceOf[java.util.List[IvyNode]].asScala map (_.getResolvedId) + } - /** Exceptions are logged to the update log file. */ - private def logExceptions(report: ResolveReport) - { - for(unresolved <- report.getUnresolvedDependencies) - { - val problem = unresolved.getProblem - if(problem != null) - problem.printStackTrace(logWriter) + /** Exceptions are logged to the update log file. */ + private def logExceptions(report: ResolveReport) { + for (unresolved <- report.getUnresolvedDependencies) { + val problem = unresolved.getProblem + if (problem != null) + problem.printStackTrace(logWriter) + } + } + private final class ArtifactFilter(f: IArtifact => Boolean) extends Filter { + def accept(o: Any) = o match { case a: IArtifact => f(a); case _ => false } + } + /** Retrieves resolved dependencies using the given target to determine the location to retrieve to. */ + private def retrieve(eventManager: EventManager, module: ModuleDescriptor, target: UpdateTarget, autoScalaVersion: Option[String]) { + val retrieveOptions = new RetrieveOptions + val retrieveEngine = new RetrieveEngine(settings, eventManager) + val (pattern, extraFilter) = + target match { + case _: UpdateScala => (scalaRetrievePattern, const(true)) + case u: UpdateApp => (appRetrievePattern(u.id.toID), notCoreScala _) + } + val filter = (a: IArtifact) => retrieveType(a.getType) && a.getExtraAttribute("classifier") == null && extraFilter(a) + retrieveOptions.setArtifactFilter(new ArtifactFilter(filter)) + val scalaV = strictOr(scalaVersion, autoScalaVersion) + retrieveOptions.setDestArtifactPattern(baseDirectoryName(scalaOrg, scalaV) + "/" + pattern) + retrieveEngine.retrieve(module.getModuleRevisionId, retrieveOptions) + } + private[this] def notCoreScala(a: IArtifact) = a.getName match { + case LibraryModuleName | CompilerModuleName => false + case _ => true + } + private def retrieveType(tpe: String): Boolean = tpe == "jar" || tpe == "bundle" + /** Add the Sonatype OSS repositories */ + private def addResolvers(settings: IvySettings) { + val newDefault = new ChainResolver { + override def locate(artifact: IArtifact) = + if (hasImplicitClassifier(artifact)) null else super.locate(artifact) + } + newDefault.setName("redefined-public") + if (repositories.isEmpty) error("No repositories defined.") + for (repo <- repositories if includeRepo(repo)) + newDefault.add(toIvyRepository(settings, repo)) + configureCache(settings) + settings.addResolver(newDefault) + settings.setDefaultResolver(newDefault.getName) + } + // infrastructure is needed to avoid duplication between this class and the ivy/ subproject + private def hasImplicitClassifier(artifact: IArtifact): Boolean = + { + import collection.JavaConversions._ + artifact.getQualifiedExtraAttributes.keys.exists(_.asInstanceOf[String] startsWith "m:") + } + // exclude the local Maven repository for Scala -SNAPSHOTs + private def includeRepo(repo: xsbti.Repository) = !(Repository.isMavenLocal(repo) && isSnapshot(getScalaVersion)) + private def isSnapshot(scalaVersion: String) = scalaVersion.endsWith(Snapshot) + private[this] val Snapshot = "-SNAPSHOT" + private[this] val ChangingPattern = ".*" + Snapshot + private[this] val ChangingMatcher = PatternMatcher.REGEXP + private[this] def configureCache(settings: IvySettings) { + configureResolutionCache(settings) + configureRepositoryCache(settings) + } + private[this] def configureResolutionCache(settings: IvySettings) { + resolutionCacheBase.mkdirs() + val drcm = new DefaultResolutionCacheManager(resolutionCacheBase) + drcm.setSettings(settings) + settings.setResolutionCacheManager(drcm) + } + private[this] def configureRepositoryCache(settings: IvySettings) { + val cacheDir = settings.getDefaultRepositoryCacheBasedir() + val manager = new DefaultRepositoryCacheManager("default-cache", settings, cacheDir) { + // ignore resolvers wherever possible- not ideal, but avoids issues like #704 + override def saveResolvers(descriptor: ModuleDescriptor, metadataResolverName: String, artifactResolverName: String) {} + override def findModuleInCache(dd: DependencyDescriptor, revId: ModuleRevisionId, options: CacheMetadataOptions, r: String) = { + super.findModuleInCache(dd, revId, options, null) + } + } + manager.setUseOrigin(true) + manager.setChangingMatcher(ChangingMatcher) + manager.setChangingPattern(ChangingPattern) + settings.addRepositoryCacheManager(manager) + settings.setDefaultRepositoryCacheManager(manager) + } + private def toIvyRepository(settings: IvySettings, repo: xsbti.Repository) = + { + import xsbti.Predefined._ + repo match { + case m: xsbti.MavenRepository => mavenResolver(m.id, m.url.toString) + case i: xsbti.IvyRepository => urlResolver(i.id, i.url.toString, i.ivyPattern, i.artifactPattern, i.mavenCompatible, i.descriptorOptional, i.skipConsistencyCheck) + case p: xsbti.PredefinedRepository => p.id match { + case Local => localResolver(settings.getDefaultIvyUserDir.getAbsolutePath) + case MavenLocal => mavenLocal + case MavenCentral => mavenMainResolver + case ScalaToolsReleases | SonatypeOSSReleases => mavenResolver("Sonatype Releases Repository", "https://oss.sonatype.org/content/repositories/releases") + case ScalaToolsSnapshots | SonatypeOSSSnapshots => scalaSnapshots(getScalaVersion) } - } - private final class ArtifactFilter(f: IArtifact => Boolean) extends Filter { - def accept(o: Any) = o match { case a: IArtifact => f(a); case _ => false } - } - /** Retrieves resolved dependencies using the given target to determine the location to retrieve to. */ - private def retrieve(eventManager: EventManager, module: ModuleDescriptor, target: UpdateTarget, autoScalaVersion: Option[String]) - { - val retrieveOptions = new RetrieveOptions - val retrieveEngine = new RetrieveEngine(settings, eventManager) - val (pattern, extraFilter) = - target match - { - case _: UpdateScala => (scalaRetrievePattern, const(true)) - case u: UpdateApp => (appRetrievePattern(u.id.toID), notCoreScala _) - } - val filter = (a: IArtifact) => retrieveType(a.getType) && a.getExtraAttribute("classifier") == null && extraFilter(a) - retrieveOptions.setArtifactFilter(new ArtifactFilter(filter)) - val scalaV = strictOr(scalaVersion, autoScalaVersion) - retrieveOptions.setDestArtifactPattern(baseDirectoryName(scalaOrg, scalaV) + "/" + pattern) - retrieveEngine.retrieve(module.getModuleRevisionId, retrieveOptions) - } - private[this] def notCoreScala(a: IArtifact) = a.getName match { - case LibraryModuleName | CompilerModuleName => false - case _ => true - } - private def retrieveType(tpe: String): Boolean = tpe == "jar" || tpe == "bundle" - /** Add the Sonatype OSS repositories */ - private def addResolvers(settings: IvySettings) - { - val newDefault = new ChainResolver { - override def locate(artifact: IArtifact) = - if(hasImplicitClassifier(artifact)) null else super.locate(artifact) - } - newDefault.setName("redefined-public") - if(repositories.isEmpty) error("No repositories defined.") - for(repo <- repositories if includeRepo(repo)) - newDefault.add(toIvyRepository(settings, repo)) - configureCache(settings) - settings.addResolver(newDefault) - settings.setDefaultResolver(newDefault.getName) - } - // infrastructure is needed to avoid duplication between this class and the ivy/ subproject - private def hasImplicitClassifier(artifact: IArtifact): Boolean = - { - import collection.JavaConversions._ - artifact.getQualifiedExtraAttributes.keys.exists(_.asInstanceOf[String] startsWith "m:") - } - // exclude the local Maven repository for Scala -SNAPSHOTs - private def includeRepo(repo: xsbti.Repository) = !(Repository.isMavenLocal(repo) && isSnapshot(getScalaVersion) ) - private def isSnapshot(scalaVersion: String) = scalaVersion.endsWith(Snapshot) - private[this] val Snapshot = "-SNAPSHOT" - private[this] val ChangingPattern = ".*" + Snapshot - private[this] val ChangingMatcher = PatternMatcher.REGEXP - private[this] def configureCache(settings: IvySettings) - { - configureResolutionCache(settings) - configureRepositoryCache(settings) - } - private[this] def configureResolutionCache(settings: IvySettings) - { - resolutionCacheBase.mkdirs() - val drcm = new DefaultResolutionCacheManager(resolutionCacheBase) - drcm.setSettings(settings) - settings.setResolutionCacheManager(drcm) - } - private[this] def configureRepositoryCache(settings: IvySettings) - { - val cacheDir = settings.getDefaultRepositoryCacheBasedir() - val manager = new DefaultRepositoryCacheManager("default-cache", settings, cacheDir) { - // ignore resolvers wherever possible- not ideal, but avoids issues like #704 - override def saveResolvers(descriptor: ModuleDescriptor, metadataResolverName: String, artifactResolverName: String) {} - override def findModuleInCache(dd: DependencyDescriptor, revId: ModuleRevisionId, options: CacheMetadataOptions, r: String) = { - super.findModuleInCache(dd,revId,options,null) - } - } - manager.setUseOrigin(true) - manager.setChangingMatcher(ChangingMatcher) - manager.setChangingPattern(ChangingPattern) - settings.addRepositoryCacheManager(manager) - settings.setDefaultRepositoryCacheManager(manager) - } - private def toIvyRepository(settings: IvySettings, repo: xsbti.Repository) = - { - import xsbti.Predefined._ - repo match - { - case m: xsbti.MavenRepository => mavenResolver(m.id, m.url.toString) - case i: xsbti.IvyRepository => urlResolver(i.id, i.url.toString, i.ivyPattern, i.artifactPattern, i.mavenCompatible, i.descriptorOptional, i.skipConsistencyCheck) - case p: xsbti.PredefinedRepository => p.id match { - case Local => localResolver(settings.getDefaultIvyUserDir.getAbsolutePath) - case MavenLocal => mavenLocal - case MavenCentral => mavenMainResolver - case ScalaToolsReleases | SonatypeOSSReleases => mavenResolver("Sonatype Releases Repository", "https://oss.sonatype.org/content/repositories/releases") - case ScalaToolsSnapshots | SonatypeOSSSnapshots => scalaSnapshots(getScalaVersion) - } - } - } - private def onDefaultRepositoryCacheManager(settings: IvySettings)(f: DefaultRepositoryCacheManager => Unit) - { - settings.getDefaultRepositoryCacheManager match - { - case manager: DefaultRepositoryCacheManager => f(manager) - case _ => () - } - } - /** Uses the pattern defined in BuildConfiguration to download sbt from Google code.*/ - private def urlResolver(id: String, base: String, ivyPattern: String, artifactPattern: String, mavenCompatible: Boolean, descriptorOptional: Boolean, skipConsistencyCheck: Boolean) = - { - val resolver = new URLResolver - resolver.setName(id) - resolver.addIvyPattern(adjustPattern(base, ivyPattern)) - resolver.addArtifactPattern(adjustPattern(base, artifactPattern)) - resolver.setM2compatible(mavenCompatible) - resolver.setDescriptor(if (descriptorOptional) BasicResolver.DESCRIPTOR_OPTIONAL else BasicResolver.DESCRIPTOR_REQUIRED) - resolver.setCheckconsistency(!skipConsistencyCheck) - resolver - } - private def adjustPattern(base: String, pattern: String): String = - (if(base.endsWith("/") || isEmpty(base)) base else (base + "/") ) + pattern - private def mavenLocal = mavenResolver("Maven2 Local", "file://" + System.getProperty("user.home") + "/.m2/repository/") - /** Creates a maven-style resolver.*/ - private def mavenResolver(name: String, root: String) = - { - val resolver = defaultMavenResolver(name) - resolver.setRoot(root) - resolver - } - /** Creates a resolver for Maven Central.*/ - private def mavenMainResolver = defaultMavenResolver("Maven Central") - /** Creates a maven-style resolver with the default root.*/ - private def defaultMavenResolver(name: String) = - { - val resolver = new IBiblioResolver - resolver.setName(name) - resolver.setM2compatible(true) - resolver - } - private def localResolver(ivyUserDirectory: String) = - { - val localIvyRoot = ivyUserDirectory + "/local" - val resolver = new FileSystemResolver - resolver.setName(LocalIvyName) - resolver.addIvyPattern(localIvyRoot + "/" + LocalIvyPattern) - resolver.addArtifactPattern(localIvyRoot + "/" + LocalArtifactPattern) - resolver - } - private val SnapshotPattern = Pattern.compile("""(\d+).(\d+).(\d+)-(\d{8})\.(\d{6})-(\d+|\+)""") - private def scalaSnapshots(scalaVersion: String) = - { - val m = SnapshotPattern.matcher(scalaVersion) - if(m.matches) - { - val base = List(1,2,3).map(m.group).mkString(".") - val pattern = "https://oss.sonatype.org/content/repositories/snapshots/[organization]/[module]/" + base + "-SNAPSHOT/[artifact]-[revision](-[classifier]).[ext]" + } + } + private def onDefaultRepositoryCacheManager(settings: IvySettings)(f: DefaultRepositoryCacheManager => Unit) { + settings.getDefaultRepositoryCacheManager match { + case manager: DefaultRepositoryCacheManager => f(manager) + case _ => () + } + } + /** Uses the pattern defined in BuildConfiguration to download sbt from Google code.*/ + private def urlResolver(id: String, base: String, ivyPattern: String, artifactPattern: String, mavenCompatible: Boolean, descriptorOptional: Boolean, skipConsistencyCheck: Boolean) = + { + val resolver = new URLResolver + resolver.setName(id) + resolver.addIvyPattern(adjustPattern(base, ivyPattern)) + resolver.addArtifactPattern(adjustPattern(base, artifactPattern)) + resolver.setM2compatible(mavenCompatible) + resolver.setDescriptor(if (descriptorOptional) BasicResolver.DESCRIPTOR_OPTIONAL else BasicResolver.DESCRIPTOR_REQUIRED) + resolver.setCheckconsistency(!skipConsistencyCheck) + resolver + } + private def adjustPattern(base: String, pattern: String): String = + (if (base.endsWith("/") || isEmpty(base)) base else (base + "/")) + pattern + private def mavenLocal = mavenResolver("Maven2 Local", "file://" + System.getProperty("user.home") + "/.m2/repository/") + /** Creates a maven-style resolver.*/ + private def mavenResolver(name: String, root: String) = + { + val resolver = defaultMavenResolver(name) + resolver.setRoot(root) + resolver + } + /** Creates a resolver for Maven Central.*/ + private def mavenMainResolver = defaultMavenResolver("Maven Central") + /** Creates a maven-style resolver with the default root.*/ + private def defaultMavenResolver(name: String) = + { + val resolver = new IBiblioResolver + resolver.setName(name) + resolver.setM2compatible(true) + resolver + } + private def localResolver(ivyUserDirectory: String) = + { + val localIvyRoot = ivyUserDirectory + "/local" + val resolver = new FileSystemResolver + resolver.setName(LocalIvyName) + resolver.addIvyPattern(localIvyRoot + "/" + LocalIvyPattern) + resolver.addArtifactPattern(localIvyRoot + "/" + LocalArtifactPattern) + resolver + } + private val SnapshotPattern = Pattern.compile("""(\d+).(\d+).(\d+)-(\d{8})\.(\d{6})-(\d+|\+)""") + private def scalaSnapshots(scalaVersion: String) = + { + val m = SnapshotPattern.matcher(scalaVersion) + if (m.matches) { + val base = List(1, 2, 3).map(m.group).mkString(".") + val pattern = "https://oss.sonatype.org/content/repositories/snapshots/[organization]/[module]/" + base + "-SNAPSHOT/[artifact]-[revision](-[classifier]).[ext]" - val resolver = new URLResolver - resolver.setName("Sonatype OSS Snapshots") - resolver.setM2compatible(true) - resolver.addArtifactPattern(pattern) - resolver - } - else - mavenResolver("Sonatype Snapshots Repository", "https://oss.sonatype.org/content/repositories/snapshots") - } + val resolver = new URLResolver + resolver.setName("Sonatype OSS Snapshots") + resolver.setM2compatible(true) + resolver.addArtifactPattern(pattern) + resolver + } else + mavenResolver("Sonatype Snapshots Repository", "https://oss.sonatype.org/content/repositories/snapshots") + } - /** Logs the given message to a file and to the console. */ - private def log(msg: String) = - { - try { logWriter.println(msg) } - catch { case e: Exception => System.err.println("Error writing to update log file: " + e.toString) } - System.out.println(msg) - } + /** Logs the given message to a file and to the console. */ + private def log(msg: String) = + { + try { logWriter.println(msg) } + catch { case e: Exception => System.err.println("Error writing to update log file: " + e.toString) } + System.out.println(msg) + } } -import SbtIvyLogger.{acceptError, acceptMessage} +import SbtIvyLogger.{ acceptError, acceptMessage } -/** A custom logger for Ivy to ignore the messages about not finding classes -* intentionally filtered using proguard and about 'unknown resolver'. */ -private final class SbtIvyLogger(logWriter: PrintWriter) extends DefaultMessageLogger(Message.MSG_INFO) -{ - override def log(msg: String, level: Int) - { - logWriter.println(msg) - if(level <= getLevel && acceptMessage(msg)) - System.out.println(msg) - } - override def rawlog(msg: String, level: Int) { log(msg, level) } - /** This is a hack to filter error messages about 'unknown resolver ...'. */ - override def error(msg: String) = if(acceptError(msg)) super.error(msg) +/** + * A custom logger for Ivy to ignore the messages about not finding classes + * intentionally filtered using proguard and about 'unknown resolver'. + */ +private final class SbtIvyLogger(logWriter: PrintWriter) extends DefaultMessageLogger(Message.MSG_INFO) { + override def log(msg: String, level: Int) { + logWriter.println(msg) + if (level <= getLevel && acceptMessage(msg)) + System.out.println(msg) + } + override def rawlog(msg: String, level: Int) { log(msg, level) } + /** This is a hack to filter error messages about 'unknown resolver ...'. */ + override def error(msg: String) = if (acceptError(msg)) super.error(msg) } -private final class SbtMessageLoggerEngine extends MessageLoggerEngine -{ - /** This is a hack to filter error messages about 'unknown resolver ...'. */ - override def error(msg: String) = if(acceptError(msg)) super.error(msg) +private final class SbtMessageLoggerEngine extends MessageLoggerEngine { + /** This is a hack to filter error messages about 'unknown resolver ...'. */ + override def error(msg: String) = if (acceptError(msg)) super.error(msg) } -private object SbtIvyLogger -{ - val IgnorePrefix = "impossible to define" - val UnknownResolver = "unknown resolver" - def acceptError(msg: String) = acceptMessage(msg) && !msg.startsWith(UnknownResolver) - def acceptMessage(msg: String) = (msg ne null) && !msg.startsWith(IgnorePrefix) +private object SbtIvyLogger { + val IgnorePrefix = "impossible to define" + val UnknownResolver = "unknown resolver" + def acceptError(msg: String) = acceptMessage(msg) && !msg.startsWith(UnknownResolver) + def acceptMessage(msg: String) = (msg ne null) && !msg.startsWith(IgnorePrefix) } diff --git a/launch/src/main/scala/xsbt/boot/Using.scala b/launch/src/main/scala/xsbt/boot/Using.scala index 9e93b96c1..cf03a3067 100644 --- a/launch/src/main/scala/xsbt/boot/Using.scala +++ b/launch/src/main/scala/xsbt/boot/Using.scala @@ -3,44 +3,38 @@ */ package xsbt.boot -import java.io.{Closeable, File, FileInputStream, FileOutputStream, InputStream, OutputStream} +import java.io.{ Closeable, File, FileInputStream, FileOutputStream, InputStream, OutputStream } -object Using -{ - def apply[R <: Closeable,T](create: R)(f: R => T): T = withResource(create)(f) - def withResource[R <: Closeable,T](r: R)(f: R => T): T = try { f(r) } finally { r.close() } +object Using { + def apply[R <: Closeable, T](create: R)(f: R => T): T = withResource(create)(f) + def withResource[R <: Closeable, T](r: R)(f: R => T): T = try { f(r) } finally { r.close() } } -object Copy -{ - def apply(files: List[File], toDirectory: File): Boolean = files.map(file => apply(file, toDirectory)).contains(true) - def apply(file: File, toDirectory: File): Boolean = - { - toDirectory.mkdirs() - val to = new File(toDirectory, file.getName) - val missing = !to.exists - if(missing) - { - Using(new FileInputStream(file)) { in => - Using(new FileOutputStream(to)) { out => - transfer(in, out) - } - } - } - missing - } - def transfer(in: InputStream, out: OutputStream) - { - val buffer = new Array[Byte](8192) - def next() - { - val read = in.read(buffer) - if(read > 0) - { - out.write(buffer, 0, read) - next() - } - } - next() - } +object Copy { + def apply(files: List[File], toDirectory: File): Boolean = files.map(file => apply(file, toDirectory)).contains(true) + def apply(file: File, toDirectory: File): Boolean = + { + toDirectory.mkdirs() + val to = new File(toDirectory, file.getName) + val missing = !to.exists + if (missing) { + Using(new FileInputStream(file)) { in => + Using(new FileOutputStream(to)) { out => + transfer(in, out) + } + } + } + missing + } + def transfer(in: InputStream, out: OutputStream) { + val buffer = new Array[Byte](8192) + def next() { + val read = in.read(buffer) + if (read > 0) { + out.write(buffer, 0, read) + next() + } + } + next() + } } \ No newline at end of file diff --git a/launch/test-sample/src/main/scala/xsbt/boot/test/Apps.scala b/launch/test-sample/src/main/scala/xsbt/boot/test/Apps.scala index 217e7b1ef..ba377d61b 100644 --- a/launch/test-sample/src/main/scala/xsbt/boot/test/Apps.scala +++ b/launch/test-sample/src/main/scala/xsbt/boot/test/Apps.scala @@ -3,43 +3,40 @@ package xsbt.boot.test class Exit(val code: Int) extends xsbti.Exit final class MainException(message: String) extends RuntimeException(message) -final class ArgumentTest extends xsbti.AppMain -{ - def run(configuration: xsbti.AppConfiguration) = - if(configuration.arguments.length == 0) - throw new MainException("Arguments were empty") - else - new Exit(0) +final class ArgumentTest extends xsbti.AppMain { + def run(configuration: xsbti.AppConfiguration) = + if (configuration.arguments.length == 0) + throw new MainException("Arguments were empty") + else + new Exit(0) } -class AppVersionTest extends xsbti.AppMain -{ - def run(configuration: xsbti.AppConfiguration) = - { - val expected = configuration.arguments.headOption.getOrElse("") - if(configuration.provider.id.version == expected) - new Exit(0) - else - throw new MainException("app version was " + configuration.provider.id.version + ", expected: " + expected) - } +class AppVersionTest extends xsbti.AppMain { + def run(configuration: xsbti.AppConfiguration) = + { + val expected = configuration.arguments.headOption.getOrElse("") + if (configuration.provider.id.version == expected) + new Exit(0) + else + throw new MainException("app version was " + configuration.provider.id.version + ", expected: " + expected) + } } -class ExtraTest extends xsbti.AppMain -{ - def run(configuration: xsbti.AppConfiguration) = - { - configuration.arguments.foreach { arg => - if(getClass.getClassLoader.getResource(arg) eq null) - throw new MainException("Could not find '" + arg + "'") - } - new Exit(0) - } +class ExtraTest extends xsbti.AppMain { + def run(configuration: xsbti.AppConfiguration) = + { + configuration.arguments.foreach { arg => + if (getClass.getClassLoader.getResource(arg) eq null) + throw new MainException("Could not find '" + arg + "'") + } + new Exit(0) + } } object PlainArgumentTestWithReturn { def main(args: Array[String]): Int = - if(args.length == 0) 1 + if (args.length == 0) 1 else 0 } object PlainArgumentTest { def main(args: Array[String]): Unit = - if(args.length == 0) throw new MainException("Arguments were empty") + if (args.length == 0) throw new MainException("Arguments were empty") else () } \ No newline at end of file diff --git a/launch/test-sample/src/main/scala/xsbt/boot/test/Servers.scala b/launch/test-sample/src/main/scala/xsbt/boot/test/Servers.scala index 930e565a9..bd1fff114 100644 --- a/launch/test-sample/src/main/scala/xsbt/boot/test/Servers.scala +++ b/launch/test-sample/src/main/scala/xsbt/boot/test/Servers.scala @@ -4,71 +4,69 @@ package xsbt.boot.test import java.net.Socket import java.net.SocketTimeoutException -class EchoServer extends xsbti.ServerMain -{ - def start(configuration: xsbti.AppConfiguration): xsbti.Server = - { - object server extends xsbti.Server { - // TODO - Start a server. - val serverSocket = new java.net.ServerSocket(0) - val port = serverSocket.getLocalPort - val addr = serverSocket.getInetAddress.getHostAddress - override val uri =new java.net.URI(s"http://${addr}:${port}") - // Check for stop every second. - serverSocket.setSoTimeout(1000) - object serverThread extends Thread { - private val running = new java.util.concurrent.atomic.AtomicBoolean(true) - override def run(): Unit = { - while(running.get) try { - val clientSocket = serverSocket.accept() - // Handle client connections - object clientSocketThread extends Thread { - override def run(): Unit = { - echoTo(clientSocket) - } - } - clientSocketThread.start() - } catch { - case e: SocketTimeoutException => // Ignore - } - } - // Simple mechanism to dump input to output. - private def echoTo(socket: Socket): Unit = { - val input = new java.io.BufferedReader(new java.io.InputStreamReader(socket.getInputStream)) - val output = new java.io.BufferedWriter(new java.io.OutputStreamWriter(socket.getOutputStream)) - import scala.util.control.Breaks._ - try { - // Lame way to break out. - breakable { - def read(): Unit = input.readLine match { - case null => () - case "kill" => - running.set(false) - serverSocket.close() - break() - case line => - output.write(line) - output.flush() - read() - } - read() - } - } finally { - output.close() - input.close() - socket.close() - } - } - } - // Start the thread immediately - serverThread.start() - override def awaitTermination(): xsbti.MainResult = { - serverThread.join() - new Exit(0) - } - } - server - } - - +class EchoServer extends xsbti.ServerMain { + def start(configuration: xsbti.AppConfiguration): xsbti.Server = + { + object server extends xsbti.Server { + // TODO - Start a server. + val serverSocket = new java.net.ServerSocket(0) + val port = serverSocket.getLocalPort + val addr = serverSocket.getInetAddress.getHostAddress + override val uri = new java.net.URI(s"http://${addr}:${port}") + // Check for stop every second. + serverSocket.setSoTimeout(1000) + object serverThread extends Thread { + private val running = new java.util.concurrent.atomic.AtomicBoolean(true) + override def run(): Unit = { + while (running.get) try { + val clientSocket = serverSocket.accept() + // Handle client connections + object clientSocketThread extends Thread { + override def run(): Unit = { + echoTo(clientSocket) + } + } + clientSocketThread.start() + } catch { + case e: SocketTimeoutException => // Ignore + } + } + // Simple mechanism to dump input to output. + private def echoTo(socket: Socket): Unit = { + val input = new java.io.BufferedReader(new java.io.InputStreamReader(socket.getInputStream)) + val output = new java.io.BufferedWriter(new java.io.OutputStreamWriter(socket.getOutputStream)) + import scala.util.control.Breaks._ + try { + // Lame way to break out. + breakable { + def read(): Unit = input.readLine match { + case null => () + case "kill" => + running.set(false) + serverSocket.close() + break() + case line => + output.write(line) + output.flush() + read() + } + read() + } + } finally { + output.close() + input.close() + socket.close() + } + } + } + // Start the thread immediately + serverThread.start() + override def awaitTermination(): xsbti.MainResult = { + serverThread.join() + new Exit(0) + } + } + server + } + } \ No newline at end of file diff --git a/main/actions/src/main/scala/sbt/CacheIvy.scala b/main/actions/src/main/scala/sbt/CacheIvy.scala index b09ce7f2b..3b50cadb9 100644 --- a/main/actions/src/main/scala/sbt/CacheIvy.scala +++ b/main/actions/src/main/scala/sbt/CacheIvy.scala @@ -3,197 +3,196 @@ */ package sbt - import Predef.{Map, Set, implicitly} // excludes *both 2.10.x conforms and 2.11.x $conforms in source compatible manner. +import Predef.{ Map, Set, implicitly } // excludes *both 2.10.x conforms and 2.11.x $conforms in source compatible manner. - import FileInfo.{exists, hash} - import java.io.File - import java.net.URL - import Types.{:+:, idFun} - import scala.xml.NodeSeq - import sbinary.{DefaultProtocol, Format} - import RepositoryHelpers._ - import Ordering._ +import FileInfo.{ exists, hash } +import java.io.File +import java.net.URL +import Types.{ :+:, idFun } +import scala.xml.NodeSeq +import sbinary.{ DefaultProtocol, Format } +import RepositoryHelpers._ +import Ordering._ +/** + * InputCaches for IvyConfiguration, ModuleSettings, and UpdateConfiguration + * The InputCaches for a basic data structure is built in two parts. + * Given the data structure: + * Data[A,B,C, ...] + * 1) Define a conversion from Data to the HList A :+: B :+: C :+: ... :+: HNil, + * excluding any members that should not be considered for caching + * 2) In theory, 1) would be enough and wrapHL would generate InputCache[Data] as long + * as all of InputCache[A], InputCache[B], ... exist. However, if any of these child + * InputCaches are constructed using wrapHL, you get a diverging implicit error. (I + * believe scalac is generating this error as specified, but that the implicits would + * be valid and not be infinite. This might take some effort to come up with a new rule + * that allows this) + * 3) So, we need to explicitly define the intermediate implicits. The general approach is: + * {{{ + * object LN { + * ... Data => HList conversions ... + * } + * import LN._ + * implicit dataCache: InputCache[Data] = wrapHL + * + * object L(N-1) ... + * }}} + * Each Data in LN only uses implicits from L(N-1). + * This way, higher levels (higher N) cannot see the HList conversions of subcomponents but can + * use the explicitly defined subcomponent implicits and there is no divergence. + * 4) Ideally, diverging implicits could be relaxed so that the ... = wrapIn lines could be removed. + */ +object CacheIvy { + def password(s: Option[String]) = new Array[Byte](0) + def names(s: Iterable[Configuration]): Set[String] = s.map(_.name).toSet -/** InputCaches for IvyConfiguration, ModuleSettings, and UpdateConfiguration -* The InputCaches for a basic data structure is built in two parts. -* Given the data structure: -* Data[A,B,C, ...] -* 1) Define a conversion from Data to the HList A :+: B :+: C :+: ... :+: HNil, -* excluding any members that should not be considered for caching -* 2) In theory, 1) would be enough and wrapHL would generate InputCache[Data] as long -* as all of InputCache[A], InputCache[B], ... exist. However, if any of these child -* InputCaches are constructed using wrapHL, you get a diverging implicit error. (I -* believe scalac is generating this error as specified, but that the implicits would -* be valid and not be infinite. This might take some effort to come up with a new rule -* that allows this) -* 3) So, we need to explicitly define the intermediate implicits. The general approach is: -* {{{ -* object LN { -* ... Data => HList conversions ... -* } -* import LN._ -* implicit dataCache: InputCache[Data] = wrapHL -* -* object L(N-1) ... -* }}} -* Each Data in LN only uses implicits from L(N-1). -* This way, higher levels (higher N) cannot see the HList conversions of subcomponents but can -* use the explicitly defined subcomponent implicits and there is no divergence. -* 4) Ideally, diverging implicits could be relaxed so that the ... = wrapIn lines could be removed. -*/ -object CacheIvy -{ - def password(s: Option[String]) = new Array[Byte](0) - def names(s: Iterable[Configuration]): Set[String] = s.map(_.name).toSet + import Cache._ + implicit def wrapHL[W, H, T <: HList](implicit f: W => H :+: T, cache: InputCache[H :+: T]): InputCache[W] = + Cache.wrapIn(f, cache) - import Cache._ - implicit def wrapHL[W, H, T <: HList](implicit f: W => H :+: T, cache: InputCache[H :+: T]): InputCache[W] = - Cache.wrapIn(f, cache) - - lazy val excludeMap: Format[Map[ModuleID, Set[String]]] = implicitly - lazy val updateIC: InputCache[IvyConfiguration :+: ModuleSettings :+: UpdateConfiguration :+: HNil] = implicitly -/* def deliverIC: InputCache[IvyConfiguration :+: ModuleSettings :+: DeliverConfiguration :+: HNil] = implicitly + lazy val excludeMap: Format[Map[ModuleID, Set[String]]] = implicitly + lazy val updateIC: InputCache[IvyConfiguration :+: ModuleSettings :+: UpdateConfiguration :+: HNil] = implicitly + /* def deliverIC: InputCache[IvyConfiguration :+: ModuleSettings :+: DeliverConfiguration :+: HNil] = implicitly def publishIC: InputCache[IvyConfiguration :+: ModuleSettings :+: PublishConfiguration :+: HNil] = implicitly*/ - implicit lazy val updateReportFormat: Format[UpdateReport] = - { - import DefaultProtocol.{StringFormat, FileFormat} - wrap[UpdateReport, (File, Seq[ConfigurationReport], UpdateStats, Map[File,Long])](rep => (rep.cachedDescriptor, rep.configurations, rep.stats, rep.stamps), { case (cd, cs, stats, stamps) => new UpdateReport(cd, cs, stats, stamps) }) - } - implicit def updateStatsFormat: Format[UpdateStats] = - wrap[UpdateStats, (Long,Long,Long)]( us => (us.resolveTime, us.downloadTime, us.downloadSize), { case (rt, dt, ds) => new UpdateStats(rt, dt, ds, true) }) - implicit def confReportFormat(implicit m: Format[String], mr: Format[Seq[ModuleReport]], mi: Format[Seq[ModuleID]]): Format[ConfigurationReport] = - wrap[ConfigurationReport, (String,Seq[ModuleReport],Seq[ModuleID])]( r => (r.configuration, r.modules, r.evicted), { case (c,m,v) => new ConfigurationReport(c,m,v) }) - implicit def moduleReportFormat(implicit ff: Format[File]): Format[ModuleReport] = - wrap[ModuleReport, (ModuleID, Seq[(Artifact, File)], Seq[Artifact])]( m => (m.module, m.artifacts, m.missingArtifacts), { case (m, as, ms) => new ModuleReport(m, as,ms) }) - implicit def artifactFormat(implicit sf: Format[String], uf: Format[Option[URL]]): Format[Artifact] = { - wrap[Artifact, (String,String,String,Option[String],Seq[Configuration],Option[URL],Map[String,String])]( - a => (a.name, a.`type`, a.extension, a.classifier, a.configurations.toSeq, a.url, a.extraAttributes), - { case (n,t,x,c,cs,u,e) => Artifact(n,t,x,c,cs,u,e) } - ) - } - implicit def exclusionRuleFormat(implicit sf: Format[String]): Format[ExclusionRule] = - wrap[ExclusionRule, (String, String, String, Seq[String])]( e => (e.organization, e.name, e.artifact, e.configurations), { case (o,n,a,cs) => ExclusionRule(o,n,a,cs) }) - implicit def crossVersionFormat: Format[CrossVersion] = wrap(crossToInt, crossFromInt) + implicit lazy val updateReportFormat: Format[UpdateReport] = + { + import DefaultProtocol.{ StringFormat, FileFormat } + wrap[UpdateReport, (File, Seq[ConfigurationReport], UpdateStats, Map[File, Long])](rep => (rep.cachedDescriptor, rep.configurations, rep.stats, rep.stamps), { case (cd, cs, stats, stamps) => new UpdateReport(cd, cs, stats, stamps) }) + } + implicit def updateStatsFormat: Format[UpdateStats] = + wrap[UpdateStats, (Long, Long, Long)](us => (us.resolveTime, us.downloadTime, us.downloadSize), { case (rt, dt, ds) => new UpdateStats(rt, dt, ds, true) }) + implicit def confReportFormat(implicit m: Format[String], mr: Format[Seq[ModuleReport]], mi: Format[Seq[ModuleID]]): Format[ConfigurationReport] = + wrap[ConfigurationReport, (String, Seq[ModuleReport], Seq[ModuleID])](r => (r.configuration, r.modules, r.evicted), { case (c, m, v) => new ConfigurationReport(c, m, v) }) + implicit def moduleReportFormat(implicit ff: Format[File]): Format[ModuleReport] = + wrap[ModuleReport, (ModuleID, Seq[(Artifact, File)], Seq[Artifact])](m => (m.module, m.artifacts, m.missingArtifacts), { case (m, as, ms) => new ModuleReport(m, as, ms) }) + implicit def artifactFormat(implicit sf: Format[String], uf: Format[Option[URL]]): Format[Artifact] = { + wrap[Artifact, (String, String, String, Option[String], Seq[Configuration], Option[URL], Map[String, String])]( + a => (a.name, a.`type`, a.extension, a.classifier, a.configurations.toSeq, a.url, a.extraAttributes), + { case (n, t, x, c, cs, u, e) => Artifact(n, t, x, c, cs, u, e) } + ) + } + implicit def exclusionRuleFormat(implicit sf: Format[String]): Format[ExclusionRule] = + wrap[ExclusionRule, (String, String, String, Seq[String])](e => (e.organization, e.name, e.artifact, e.configurations), { case (o, n, a, cs) => ExclusionRule(o, n, a, cs) }) + implicit def crossVersionFormat: Format[CrossVersion] = wrap(crossToInt, crossFromInt) - private[this] final val DisabledValue = 0 - private[this] final val BinaryValue = 1 - private[this] final val FullValue = 2 + private[this] final val DisabledValue = 0 + private[this] final val BinaryValue = 1 + private[this] final val FullValue = 2 - import CrossVersion.{Binary, Disabled, Full} - private[this] val crossFromInt = (i: Int) => i match { case BinaryValue => new Binary(idFun); case FullValue => new Full(idFun); case _ => Disabled } - private[this] val crossToInt = (c: CrossVersion) => c match { case Disabled => 0; case b: Binary => BinaryValue; case f: Full => FullValue } + import CrossVersion.{ Binary, Disabled, Full } + private[this] val crossFromInt = (i: Int) => i match { case BinaryValue => new Binary(idFun); case FullValue => new Full(idFun); case _ => Disabled } + private[this] val crossToInt = (c: CrossVersion) => c match { case Disabled => 0; case b: Binary => BinaryValue; case f: Full => FullValue } - implicit def moduleIDFormat(implicit sf: Format[String], bf: Format[Boolean]): Format[ModuleID] = - wrap[ModuleID, ((String,String,String,Option[String]),(Boolean,Boolean,Boolean,Seq[Artifact],Seq[ExclusionRule],Map[String,String],CrossVersion))]( - m => ((m.organization,m.name,m.revision,m.configurations), (m.isChanging, m.isTransitive, m.isForce, m.explicitArtifacts, m.exclusions, m.extraAttributes, m.crossVersion)), - { case ((o,n,r,cs),(ch,t,f,as,excl,x,cv)) => ModuleID(o,n,r,cs,ch,t,f,as,excl,x,cv) } - ) - implicit def moduleSetIC: InputCache[Set[ModuleID]] = basicInput(defaultEquiv, immutableSetFormat) + implicit def moduleIDFormat(implicit sf: Format[String], bf: Format[Boolean]): Format[ModuleID] = + wrap[ModuleID, ((String, String, String, Option[String]), (Boolean, Boolean, Boolean, Seq[Artifact], Seq[ExclusionRule], Map[String, String], CrossVersion))]( + m => ((m.organization, m.name, m.revision, m.configurations), (m.isChanging, m.isTransitive, m.isForce, m.explicitArtifacts, m.exclusions, m.extraAttributes, m.crossVersion)), + { case ((o, n, r, cs), (ch, t, f, as, excl, x, cv)) => ModuleID(o, n, r, cs, ch, t, f, as, excl, x, cv) } + ) + implicit def moduleSetIC: InputCache[Set[ModuleID]] = basicInput(defaultEquiv, immutableSetFormat) - implicit def configurationFormat(implicit sf: Format[String]): Format[Configuration] = - wrap[Configuration, String](_.name, s => new Configuration(s)) + implicit def configurationFormat(implicit sf: Format[String]): Format[Configuration] = + wrap[Configuration, String](_.name, s => new Configuration(s)) - implicit def classpathFormat = - { - import DefaultProtocol.FileFormat - implicitly[Format[Map[String, Seq[File]]]] - } + implicit def classpathFormat = + { + import DefaultProtocol.FileFormat + implicitly[Format[Map[String, Seq[File]]]] + } - object L5 { - implicit def inlineIvyToHL = (i: InlineIvyConfiguration) => i.paths :+: i.resolvers :+: i.otherResolvers :+: i.moduleConfigurations :+: i.localOnly :+: i.checksums :+: HNil - } - import L5._ + object L5 { + implicit def inlineIvyToHL = (i: InlineIvyConfiguration) => i.paths :+: i.resolvers :+: i.otherResolvers :+: i.moduleConfigurations :+: i.localOnly :+: i.checksums :+: HNil + } + import L5._ - implicit def inlineIvyIC: InputCache[InlineIvyConfiguration] = wrapIn - implicit def moduleSettingsIC: InputCache[ModuleSettings] = - unionInputCache[ModuleSettings, PomConfiguration :+: InlineConfiguration :+: EmptyConfiguration :+: IvyFileConfiguration :+: HNil] - - implicit def ivyConfigurationIC: InputCache[IvyConfiguration] = - unionInputCache[IvyConfiguration, InlineIvyConfiguration :+: ExternalIvyConfiguration :+: HNil] + implicit def inlineIvyIC: InputCache[InlineIvyConfiguration] = wrapIn + implicit def moduleSettingsIC: InputCache[ModuleSettings] = + unionInputCache[ModuleSettings, PomConfiguration :+: InlineConfiguration :+: EmptyConfiguration :+: IvyFileConfiguration :+: HNil] - object L4 { - implicit def moduleConfToHL = (m: ModuleConfiguration) => m.organization :+: m.name :+: m.revision :+: m.resolver :+: HNil - implicit def emptyToHL = (e: EmptyConfiguration) => e.module :+: e.ivyScala :+: e.validate :+: HNil - implicit def inlineToHL = (c: InlineConfiguration) => c.module :+: c.dependencies :+: c.ivyXML :+: c.configurations :+: c.defaultConfiguration.map(_.name) :+: c.ivyScala :+: c.validate :+: c.overrides :+: HNil - } - import L4._ + implicit def ivyConfigurationIC: InputCache[IvyConfiguration] = + unionInputCache[IvyConfiguration, InlineIvyConfiguration :+: ExternalIvyConfiguration :+: HNil] - implicit def emptyIC: InputCache[EmptyConfiguration] = wrapIn - implicit def inlineIC: InputCache[InlineConfiguration] = wrapIn - implicit def moduleConfIC: InputCache[ModuleConfiguration] = wrapIn + object L4 { + implicit def moduleConfToHL = (m: ModuleConfiguration) => m.organization :+: m.name :+: m.revision :+: m.resolver :+: HNil + implicit def emptyToHL = (e: EmptyConfiguration) => e.module :+: e.ivyScala :+: e.validate :+: HNil + implicit def inlineToHL = (c: InlineConfiguration) => c.module :+: c.dependencies :+: c.ivyXML :+: c.configurations :+: c.defaultConfiguration.map(_.name) :+: c.ivyScala :+: c.validate :+: c.overrides :+: HNil + } + import L4._ - object L3 { - implicit def mavenRToHL = (m: MavenRepository) => m.name :+: m.root :+: HNil - implicit def fileRToHL = (r: FileRepository) => r.name :+: r.configuration :+: r.patterns :+: HNil - implicit def urlRToHL = (u: URLRepository) => u.name :+: u.patterns :+: HNil - implicit def sshRToHL = (s: SshRepository) => s.name :+: s.connection :+: s.patterns :+: s.publishPermissions :+: HNil - implicit def sftpRToHL = (s: SftpRepository) => s.name :+: s.connection :+: s.patterns :+: HNil - implicit def rawRToHL = (r: RawRepository) => r.name :+: r.resolver.getClass.getName :+: HNil - implicit def chainRToHL = (c: ChainedResolver) => c.name :+: c.resolvers :+: HNil - implicit def moduleToHL = (m: ModuleID) => m.organization :+: m.name :+: m.revision :+: m.configurations :+: m.isChanging :+: m.isTransitive :+: m.explicitArtifacts :+: m.exclusions :+: m.extraAttributes :+: m.crossVersion :+: HNil - } - import L3._ + implicit def emptyIC: InputCache[EmptyConfiguration] = wrapIn + implicit def inlineIC: InputCache[InlineConfiguration] = wrapIn + implicit def moduleConfIC: InputCache[ModuleConfiguration] = wrapIn - implicit lazy val chainedIC: InputCache[ChainedResolver] = InputCache.lzy(wrapIn) - implicit lazy val resolverIC: InputCache[Resolver] = - unionInputCache[Resolver, ChainedResolver :+: JavaNet1Repository :+: MavenRepository :+: FileRepository :+: URLRepository :+: SshRepository :+: SftpRepository :+: RawRepository :+: HNil] - implicit def moduleIC: InputCache[ModuleID] = wrapIn - implicitly[InputCache[Seq[Configuration]]] + object L3 { + implicit def mavenRToHL = (m: MavenRepository) => m.name :+: m.root :+: HNil + implicit def fileRToHL = (r: FileRepository) => r.name :+: r.configuration :+: r.patterns :+: HNil + implicit def urlRToHL = (u: URLRepository) => u.name :+: u.patterns :+: HNil + implicit def sshRToHL = (s: SshRepository) => s.name :+: s.connection :+: s.patterns :+: s.publishPermissions :+: HNil + implicit def sftpRToHL = (s: SftpRepository) => s.name :+: s.connection :+: s.patterns :+: HNil + implicit def rawRToHL = (r: RawRepository) => r.name :+: r.resolver.getClass.getName :+: HNil + implicit def chainRToHL = (c: ChainedResolver) => c.name :+: c.resolvers :+: HNil + implicit def moduleToHL = (m: ModuleID) => m.organization :+: m.name :+: m.revision :+: m.configurations :+: m.isChanging :+: m.isTransitive :+: m.explicitArtifacts :+: m.exclusions :+: m.extraAttributes :+: m.crossVersion :+: HNil + } + import L3._ - object L2 { - implicit def updateConfToHL = (u: UpdateConfiguration) => u.retrieve :+: u.missingOk :+: HNil - implicit def pomConfigurationHL = (c: PomConfiguration) => hash(c.file) :+: c.ivyScala :+: c.validate :+: HNil - implicit def ivyFileConfigurationHL = (c: IvyFileConfiguration) => hash(c.file) :+: c.ivyScala :+: c.validate :+: HNil - implicit def sshConnectionToHL = (s: SshConnection) => s.authentication :+: s.hostname :+: s.port :+: HNil + implicit lazy val chainedIC: InputCache[ChainedResolver] = InputCache.lzy(wrapIn) + implicit lazy val resolverIC: InputCache[Resolver] = + unionInputCache[Resolver, ChainedResolver :+: JavaNet1Repository :+: MavenRepository :+: FileRepository :+: URLRepository :+: SshRepository :+: SftpRepository :+: RawRepository :+: HNil] + implicit def moduleIC: InputCache[ModuleID] = wrapIn + implicitly[InputCache[Seq[Configuration]]] - implicit def artifactToHL = (a: Artifact) => a.name :+: a.`type` :+: a.extension :+: a.classifier :+: names(a.configurations) :+: a.url :+: a.extraAttributes :+: HNil - implicit def exclusionToHL = (e: ExclusionRule) => e.organization :+: e.name :+: e.artifact :+: e.configurations :+: HNil - implicit def crossToHL = (c: CrossVersion) => crossToInt(c) :+: HNil + object L2 { + implicit def updateConfToHL = (u: UpdateConfiguration) => u.retrieve :+: u.missingOk :+: HNil + implicit def pomConfigurationHL = (c: PomConfiguration) => hash(c.file) :+: c.ivyScala :+: c.validate :+: HNil + implicit def ivyFileConfigurationHL = (c: IvyFileConfiguration) => hash(c.file) :+: c.ivyScala :+: c.validate :+: HNil + implicit def sshConnectionToHL = (s: SshConnection) => s.authentication :+: s.hostname :+: s.port :+: HNil -/* implicit def deliverConfToHL = (p: DeliverConfiguration) => p.deliverIvyPattern :+: p.status :+: p.configurations :+: HNil + implicit def artifactToHL = (a: Artifact) => a.name :+: a.`type` :+: a.extension :+: a.classifier :+: names(a.configurations) :+: a.url :+: a.extraAttributes :+: HNil + implicit def exclusionToHL = (e: ExclusionRule) => e.organization :+: e.name :+: e.artifact :+: e.configurations :+: HNil + implicit def crossToHL = (c: CrossVersion) => crossToInt(c) :+: HNil + + /* implicit def deliverConfToHL = (p: DeliverConfiguration) => p.deliverIvyPattern :+: p.status :+: p.configurations :+: HNil implicit def publishConfToHL = (p: PublishConfiguration) => p.ivyFile :+: p.resolverName :+: p.artifacts :+: HNil*/ - } - import L2._ + } + import L2._ - implicit def updateConfIC: InputCache[UpdateConfiguration] = wrapIn - implicit def pomIC: InputCache[PomConfiguration] = wrapIn - implicit def ivyFileIC: InputCache[IvyFileConfiguration] = wrapIn - implicit def connectionIC: InputCache[SshConnection] = wrapIn - implicit def artifactIC: InputCache[Artifact] = wrapIn - implicit def exclusionIC: InputCache[ExclusionRule] = wrapIn - implicit def crossVersionIC: InputCache[CrossVersion] = wrapIn -/* implicit def publishConfIC: InputCache[PublishConfiguration] = wrapIn + implicit def updateConfIC: InputCache[UpdateConfiguration] = wrapIn + implicit def pomIC: InputCache[PomConfiguration] = wrapIn + implicit def ivyFileIC: InputCache[IvyFileConfiguration] = wrapIn + implicit def connectionIC: InputCache[SshConnection] = wrapIn + implicit def artifactIC: InputCache[Artifact] = wrapIn + implicit def exclusionIC: InputCache[ExclusionRule] = wrapIn + implicit def crossVersionIC: InputCache[CrossVersion] = wrapIn + /* implicit def publishConfIC: InputCache[PublishConfiguration] = wrapIn implicit def deliverConfIC: InputCache[DeliverConfiguration] = wrapIn*/ - object L1 { - implicit def retrieveToHL = (r: RetrieveConfiguration) => exists(r.retrieveDirectory) :+: r.outputPattern :+: HNil - implicit def ivyPathsToHL = (p: IvyPaths) => exists(p.baseDirectory) :+: p.ivyHome.map(exists.apply) :+: HNil - implicit def ivyScalaHL = (i: IvyScala) => i.scalaFullVersion :+: i.scalaBinaryVersion :+: names(i.configurations) :+: i.checkExplicit :+: i.filterImplicit :+: HNil - implicit def configurationToHL = (c: Configuration) => c.name :+: c.description :+: c.isPublic :+: names(c.extendsConfigs) :+: c.transitive :+: HNil + object L1 { + implicit def retrieveToHL = (r: RetrieveConfiguration) => exists(r.retrieveDirectory) :+: r.outputPattern :+: HNil + implicit def ivyPathsToHL = (p: IvyPaths) => exists(p.baseDirectory) :+: p.ivyHome.map(exists.apply) :+: HNil + implicit def ivyScalaHL = (i: IvyScala) => i.scalaFullVersion :+: i.scalaBinaryVersion :+: names(i.configurations) :+: i.checkExplicit :+: i.filterImplicit :+: HNil + implicit def configurationToHL = (c: Configuration) => c.name :+: c.description :+: c.isPublic :+: names(c.extendsConfigs) :+: c.transitive :+: HNil - implicit def passwordToHL = (s: PasswordAuthentication) => Hash(s.user) :+: password(s.password) :+: HNil - implicit def keyFileToHL = (s: KeyFileAuthentication) => Hash(s.user) :+: hash(s.keyfile) :+: password(s.password) :+: HNil + implicit def passwordToHL = (s: PasswordAuthentication) => Hash(s.user) :+: password(s.password) :+: HNil + implicit def keyFileToHL = (s: KeyFileAuthentication) => Hash(s.user) :+: hash(s.keyfile) :+: password(s.password) :+: HNil - implicit def patternsToHL = (p: Patterns) => p.ivyPatterns :+: p.artifactPatterns :+: p.isMavenCompatible :+: HNil - implicit def fileConfToHL = (f: FileConfiguration) => f.isLocal :+: f.isTransactional :+: HNil + implicit def patternsToHL = (p: Patterns) => p.ivyPatterns :+: p.artifactPatterns :+: p.isMavenCompatible :+: HNil + implicit def fileConfToHL = (f: FileConfiguration) => f.isLocal :+: f.isTransactional :+: HNil - implicit def externalIvyConfigurationToHL = (e: ExternalIvyConfiguration) => - exists(e.baseDirectory) :+: Hash.contentsIfLocal(e.uri) :+: HNil - } - import L1._ + implicit def externalIvyConfigurationToHL = (e: ExternalIvyConfiguration) => + exists(e.baseDirectory) :+: Hash.contentsIfLocal(e.uri) :+: HNil + } + import L1._ - implicit def ivyScalaIC: InputCache[IvyScala] = wrapIn - implicit def ivyPathsIC: InputCache[IvyPaths] = wrapIn - implicit def retrieveIC: InputCache[RetrieveConfiguration] = wrapIn - implicit def patternsIC: InputCache[Patterns] = wrapIn - implicit def fileConfIC: InputCache[FileConfiguration] = wrapIn - implicit def extIvyIC: InputCache[ExternalIvyConfiguration] = wrapIn - implicit def confIC: InputCache[Configuration] = wrapIn + implicit def ivyScalaIC: InputCache[IvyScala] = wrapIn + implicit def ivyPathsIC: InputCache[IvyPaths] = wrapIn + implicit def retrieveIC: InputCache[RetrieveConfiguration] = wrapIn + implicit def patternsIC: InputCache[Patterns] = wrapIn + implicit def fileConfIC: InputCache[FileConfiguration] = wrapIn + implicit def extIvyIC: InputCache[ExternalIvyConfiguration] = wrapIn + implicit def confIC: InputCache[Configuration] = wrapIn - implicit def authIC: InputCache[SshAuthentication] = - unionInputCache[SshAuthentication, PasswordAuthentication :+: KeyFileAuthentication :+: HNil] + implicit def authIC: InputCache[SshAuthentication] = + unionInputCache[SshAuthentication, PasswordAuthentication :+: KeyFileAuthentication :+: HNil] - implicit def javaNet1IC: InputCache[JavaNet1Repository] = singleton(JavaNet1Repository) + implicit def javaNet1IC: InputCache[JavaNet1Repository] = singleton(JavaNet1Repository) } diff --git a/main/actions/src/main/scala/sbt/Compiler.scala b/main/actions/src/main/scala/sbt/Compiler.scala index 459636f8e..cb075553c 100644 --- a/main/actions/src/main/scala/sbt/Compiler.scala +++ b/main/actions/src/main/scala/sbt/Compiler.scala @@ -3,78 +3,77 @@ */ package sbt - import xsbti.{Logger => _,_} - import xsbti.compile.{CompileOrder,GlobalsCache} - import CompileOrder.{JavaThenScala, Mixed, ScalaThenJava} - import compiler._ - import inc._ - import Locate.DefinesClass - import java.io.File +import xsbti.{ Logger => _, _ } +import xsbti.compile.{ CompileOrder, GlobalsCache } +import CompileOrder.{ JavaThenScala, Mixed, ScalaThenJava } +import compiler._ +import inc._ +import Locate.DefinesClass +import java.io.File -object Compiler -{ - val DefaultMaxErrors = 100 +object Compiler { + val DefaultMaxErrors = 100 - final case class Inputs(compilers: Compilers, config: Options, incSetup: IncSetup) - final case class Options(classpath: Seq[File], sources: Seq[File], classesDirectory: File, options: Seq[String], javacOptions: Seq[String], maxErrors: Int, sourcePositionMapper: Position => Position, order: CompileOrder) - final case class IncSetup(analysisMap: File => Option[Analysis], definesClass: DefinesClass, skip: Boolean, cacheFile: File, cache: GlobalsCache, incOptions: IncOptions) - final case class Compilers(scalac: AnalyzingCompiler, javac: JavaTool) + final case class Inputs(compilers: Compilers, config: Options, incSetup: IncSetup) + final case class Options(classpath: Seq[File], sources: Seq[File], classesDirectory: File, options: Seq[String], javacOptions: Seq[String], maxErrors: Int, sourcePositionMapper: Position => Position, order: CompileOrder) + final case class IncSetup(analysisMap: File => Option[Analysis], definesClass: DefinesClass, skip: Boolean, cacheFile: File, cache: GlobalsCache, incOptions: IncOptions) + final case class Compilers(scalac: AnalyzingCompiler, javac: JavaTool) - def inputs(classpath: Seq[File], sources: Seq[File], classesDirectory: File, options: Seq[String], javacOptions: Seq[String], maxErrors: Int, sourcePositionMappers: Seq[Position => Option[Position]], order: CompileOrder)(implicit compilers: Compilers, incSetup: IncSetup, log: Logger): Inputs = - new Inputs( - compilers, - new Options(classpath, sources, classesDirectory, options, javacOptions, maxErrors, foldMappers(sourcePositionMappers), order), - incSetup - ) + def inputs(classpath: Seq[File], sources: Seq[File], classesDirectory: File, options: Seq[String], javacOptions: Seq[String], maxErrors: Int, sourcePositionMappers: Seq[Position => Option[Position]], order: CompileOrder)(implicit compilers: Compilers, incSetup: IncSetup, log: Logger): Inputs = + new Inputs( + compilers, + new Options(classpath, sources, classesDirectory, options, javacOptions, maxErrors, foldMappers(sourcePositionMappers), order), + incSetup + ) - def compilers(cpOptions: ClasspathOptions)(implicit app: AppConfiguration, log: Logger): Compilers = - { - val scalaProvider = app.provider.scalaProvider - compilers(ScalaInstance(scalaProvider.version, scalaProvider.launcher), cpOptions) - } + def compilers(cpOptions: ClasspathOptions)(implicit app: AppConfiguration, log: Logger): Compilers = + { + val scalaProvider = app.provider.scalaProvider + compilers(ScalaInstance(scalaProvider.version, scalaProvider.launcher), cpOptions) + } - def compilers(instance: ScalaInstance, cpOptions: ClasspathOptions)(implicit app: AppConfiguration, log: Logger): Compilers = - compilers(instance, cpOptions, None) + def compilers(instance: ScalaInstance, cpOptions: ClasspathOptions)(implicit app: AppConfiguration, log: Logger): Compilers = + compilers(instance, cpOptions, None) - def compilers(instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File])(implicit app: AppConfiguration, log: Logger): Compilers = - { - val javac = AggressiveCompile.directOrFork(instance, cpOptions, javaHome) - compilers(instance, cpOptions, javac) - } - def compilers(instance: ScalaInstance, cpOptions: ClasspathOptions, javac: JavaCompiler.Fork)(implicit app: AppConfiguration, log: Logger): Compilers = - { - val javaCompiler = JavaCompiler.fork(cpOptions, instance)(javac) - compilers(instance, cpOptions, javaCompiler) - } - def compilers(instance: ScalaInstance, cpOptions: ClasspathOptions, javac: JavaTool)(implicit app: AppConfiguration, log: Logger): Compilers = - { - val scalac = scalaCompiler(instance, cpOptions) - new Compilers(scalac, javac) - } - def scalaCompiler(instance: ScalaInstance, cpOptions: ClasspathOptions)(implicit app: AppConfiguration, log: Logger): AnalyzingCompiler = - { - val launcher = app.provider.scalaProvider.launcher - val componentManager = new ComponentManager(launcher.globalLock, app.provider.components, Option(launcher.ivyHome), log) - val provider = ComponentCompiler.interfaceProvider(componentManager) - new AnalyzingCompiler(instance, provider, cpOptions, log) - } - def apply(in: Inputs, log: Logger): Analysis = - { - import in.compilers._ - import in.config._ - import in.incSetup._ - apply(in, log, new LoggerReporter(maxErrors, log, sourcePositionMapper)) - } - def apply(in: Inputs, log: Logger, reporter: xsbti.Reporter): Analysis = - { - import in.compilers._ - import in.config._ - import in.incSetup._ - val agg = new AggressiveCompile(cacheFile) - agg(scalac, javac, sources, classpath, CompileOutput(classesDirectory), cache, None, options, javacOptions, - analysisMap, definesClass, reporter, order, skip, incOptions)(log) - } + def compilers(instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File])(implicit app: AppConfiguration, log: Logger): Compilers = + { + val javac = AggressiveCompile.directOrFork(instance, cpOptions, javaHome) + compilers(instance, cpOptions, javac) + } + def compilers(instance: ScalaInstance, cpOptions: ClasspathOptions, javac: JavaCompiler.Fork)(implicit app: AppConfiguration, log: Logger): Compilers = + { + val javaCompiler = JavaCompiler.fork(cpOptions, instance)(javac) + compilers(instance, cpOptions, javaCompiler) + } + def compilers(instance: ScalaInstance, cpOptions: ClasspathOptions, javac: JavaTool)(implicit app: AppConfiguration, log: Logger): Compilers = + { + val scalac = scalaCompiler(instance, cpOptions) + new Compilers(scalac, javac) + } + def scalaCompiler(instance: ScalaInstance, cpOptions: ClasspathOptions)(implicit app: AppConfiguration, log: Logger): AnalyzingCompiler = + { + val launcher = app.provider.scalaProvider.launcher + val componentManager = new ComponentManager(launcher.globalLock, app.provider.components, Option(launcher.ivyHome), log) + val provider = ComponentCompiler.interfaceProvider(componentManager) + new AnalyzingCompiler(instance, provider, cpOptions, log) + } + def apply(in: Inputs, log: Logger): Analysis = + { + import in.compilers._ + import in.config._ + import in.incSetup._ + apply(in, log, new LoggerReporter(maxErrors, log, sourcePositionMapper)) + } + def apply(in: Inputs, log: Logger, reporter: xsbti.Reporter): Analysis = + { + import in.compilers._ + import in.config._ + import in.incSetup._ + val agg = new AggressiveCompile(cacheFile) + agg(scalac, javac, sources, classpath, CompileOutput(classesDirectory), cache, None, options, javacOptions, + analysisMap, definesClass, reporter, order, skip, incOptions)(log) + } - private[sbt] def foldMappers[A](mappers: Seq[A => Option[A]]) = - mappers.foldRight({p: A => p}) { (mapper, mappers) => {p: A => mapper(p).getOrElse(mappers(p))}} + private[sbt] def foldMappers[A](mappers: Seq[A => Option[A]]) = + mappers.foldRight({ p: A => p }) { (mapper, mappers) => { p: A => mapper(p).getOrElse(mappers(p)) } } } diff --git a/main/actions/src/main/scala/sbt/Console.scala b/main/actions/src/main/scala/sbt/Console.scala index 38d2c7477..21f6df18a 100644 --- a/main/actions/src/main/scala/sbt/Console.scala +++ b/main/actions/src/main/scala/sbt/Console.scala @@ -3,28 +3,26 @@ */ package sbt - import java.io.File - import compiler.AnalyzingCompiler +import java.io.File +import compiler.AnalyzingCompiler -final class Console(compiler: AnalyzingCompiler) -{ - /** Starts an interactive scala interpreter session with the given classpath.*/ - def apply(classpath: Seq[File], log: Logger): Option[String] = - apply(classpath, Nil, "", "", log) +final class Console(compiler: AnalyzingCompiler) { + /** Starts an interactive scala interpreter session with the given classpath.*/ + def apply(classpath: Seq[File], log: Logger): Option[String] = + apply(classpath, Nil, "", "", log) - def apply(classpath: Seq[File], options: Seq[String], initialCommands: String, cleanupCommands: String, log: Logger): Option[String] = - apply(classpath, options, initialCommands, cleanupCommands)(None, Nil)(log) - - def apply(classpath: Seq[File], options: Seq[String], loader: ClassLoader, initialCommands: String, cleanupCommands: String)(bindings: (String, Any)*)(implicit log: Logger): Option[String] = - apply(classpath, options, initialCommands, cleanupCommands)(Some(loader), bindings) - - def apply(classpath: Seq[File], options: Seq[String], initialCommands: String, cleanupCommands: String)(loader: Option[ClassLoader], bindings: Seq[(String, Any)])(implicit log: Logger): Option[String] = - { - def console0() = compiler.console(classpath, options, initialCommands, cleanupCommands, log)(loader, bindings) - JLine.withJLine( Run.executeTrapExit(console0, log) ) - } + def apply(classpath: Seq[File], options: Seq[String], initialCommands: String, cleanupCommands: String, log: Logger): Option[String] = + apply(classpath, options, initialCommands, cleanupCommands)(None, Nil)(log) + + def apply(classpath: Seq[File], options: Seq[String], loader: ClassLoader, initialCommands: String, cleanupCommands: String)(bindings: (String, Any)*)(implicit log: Logger): Option[String] = + apply(classpath, options, initialCommands, cleanupCommands)(Some(loader), bindings) + + def apply(classpath: Seq[File], options: Seq[String], initialCommands: String, cleanupCommands: String)(loader: Option[ClassLoader], bindings: Seq[(String, Any)])(implicit log: Logger): Option[String] = + { + def console0() = compiler.console(classpath, options, initialCommands, cleanupCommands, log)(loader, bindings) + JLine.withJLine(Run.executeTrapExit(console0, log)) + } } -object Console -{ - def apply(conf: Compiler.Inputs): Console = new Console( conf.compilers.scalac ) +object Console { + def apply(conf: Compiler.Inputs): Console = new Console(conf.compilers.scalac) } \ No newline at end of file diff --git a/main/actions/src/main/scala/sbt/Doc.scala b/main/actions/src/main/scala/sbt/Doc.scala index 5e0a2937e..822b14565 100644 --- a/main/actions/src/main/scala/sbt/Doc.scala +++ b/main/actions/src/main/scala/sbt/Doc.scala @@ -3,89 +3,82 @@ */ package sbt - import java.io.{File, PrintWriter} - import compiler.{AnalyzingCompiler, JavaCompiler} +import java.io.{ File, PrintWriter } +import compiler.{ AnalyzingCompiler, JavaCompiler } - import Predef.{conforms => _, _} - import Types.:+: - import Path._ +import Predef.{ conforms => _, _ } +import Types.:+: +import Path._ - import sbinary.DefaultProtocol.FileFormat - import Cache.{defaultEquiv, hConsCache, hNilCache, seqCache, seqFormat, streamFormat, StringFormat, UnitFormat, wrapIn} - import Tracked.{inputChanged, outputChanged} - import FilesInfo.{exists, hash, lastModified} +import sbinary.DefaultProtocol.FileFormat +import Cache.{ defaultEquiv, hConsCache, hNilCache, seqCache, seqFormat, streamFormat, StringFormat, UnitFormat, wrapIn } +import Tracked.{ inputChanged, outputChanged } +import FilesInfo.{ exists, hash, lastModified } -object Doc -{ - import RawCompileLike._ - def scaladoc(label: String, cache: File, compiler: AnalyzingCompiler): Gen = - scaladoc(label, cache, compiler, Seq()) - def scaladoc(label: String, cache: File, compiler: AnalyzingCompiler, fileInputOptions: Seq[String]): Gen = - cached(cache, fileInputOptions, prepare(label + " Scala API documentation", compiler.doc)) - def javadoc(label: String, cache: File, doc: sbt.compiler.Javadoc): Gen = - javadoc(label, cache, doc, Seq()) - def javadoc(label: String, cache: File, doc: sbt.compiler.Javadoc, fileInputOptions: Seq[String]): Gen = - cached(cache, fileInputOptions, prepare(label + " Java API documentation", filterSources(javaSourcesOnly, doc.doc))) +object Doc { + import RawCompileLike._ + def scaladoc(label: String, cache: File, compiler: AnalyzingCompiler): Gen = + scaladoc(label, cache, compiler, Seq()) + def scaladoc(label: String, cache: File, compiler: AnalyzingCompiler, fileInputOptions: Seq[String]): Gen = + cached(cache, fileInputOptions, prepare(label + " Scala API documentation", compiler.doc)) + def javadoc(label: String, cache: File, doc: sbt.compiler.Javadoc): Gen = + javadoc(label, cache, doc, Seq()) + def javadoc(label: String, cache: File, doc: sbt.compiler.Javadoc, fileInputOptions: Seq[String]): Gen = + cached(cache, fileInputOptions, prepare(label + " Java API documentation", filterSources(javaSourcesOnly, doc.doc))) - val javaSourcesOnly: File => Boolean = _.getName.endsWith(".java") + val javaSourcesOnly: File => Boolean = _.getName.endsWith(".java") - @deprecated("Use `scaladoc`", "0.13.0") - def apply(maximumErrors: Int, compiler: AnalyzingCompiler) = new Scaladoc(maximumErrors, compiler) + @deprecated("Use `scaladoc`", "0.13.0") + def apply(maximumErrors: Int, compiler: AnalyzingCompiler) = new Scaladoc(maximumErrors, compiler) - @deprecated("Use `javadoc`", "0.13.0") - def apply(maximumErrors: Int, compiler: sbt.compiler.Javadoc) = new Javadoc(maximumErrors, compiler) + @deprecated("Use `javadoc`", "0.13.0") + def apply(maximumErrors: Int, compiler: sbt.compiler.Javadoc) = new Javadoc(maximumErrors, compiler) } @deprecated("No longer used. See `Doc.javadoc` or `Doc.scaladoc`", "0.13.0") sealed trait Doc { - type Gen = (Seq[File], Seq[File], File, Seq[String], Int, Logger) => Unit + type Gen = (Seq[File], Seq[File], File, Seq[String], Int, Logger) => Unit - def apply(label: String, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger): Unit + def apply(label: String, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger): Unit - final def generate(variant: String, label: String, docf: Gen, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maxErrors: Int, log: Logger) { - val logSnip = variant + " API documentation" - if(sources.isEmpty) - log.info("No sources available, skipping " + logSnip + "...") - else - { - log.info("Generating " + logSnip + " for " + label + " sources to " + outputDirectory.absolutePath + "...") - IO.delete(outputDirectory) - IO.createDirectory(outputDirectory) - docf(sources, classpath, outputDirectory, options, maxErrors, log) - log.info(logSnip + " generation successful.") - } - } + final def generate(variant: String, label: String, docf: Gen, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maxErrors: Int, log: Logger) { + val logSnip = variant + " API documentation" + if (sources.isEmpty) + log.info("No sources available, skipping " + logSnip + "...") + else { + log.info("Generating " + logSnip + " for " + label + " sources to " + outputDirectory.absolutePath + "...") + IO.delete(outputDirectory) + IO.createDirectory(outputDirectory) + docf(sources, classpath, outputDirectory, options, maxErrors, log) + log.info(logSnip + " generation successful.") + } + } - def cached(cache: File, label: String, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger) - { - type Inputs = FilesInfo[HashFileInfo] :+: FilesInfo[ModifiedFileInfo] :+: String :+: File :+: Seq[String] :+: HNil - val inputs: Inputs = hash(sources.toSet) :+: lastModified(classpath.toSet) :+: classpath.absString :+: outputDirectory :+: options :+: HNil - implicit val stringEquiv: Equiv[String] = defaultEquiv - implicit val fileEquiv: Equiv[File] = defaultEquiv - val cachedDoc = inputChanged(cache / "inputs") { (inChanged, in: Inputs) => - outputChanged(cache / "output") { (outChanged, outputs: FilesInfo[PlainFileInfo]) => - if(inChanged || outChanged) - apply(label, sources, classpath, outputDirectory, options, log) - else - log.debug("Doc uptodate: " + outputDirectory.getAbsolutePath) - } - } - cachedDoc(inputs)(() => exists(outputDirectory.***.get.toSet)) - } + def cached(cache: File, label: String, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger) { + type Inputs = FilesInfo[HashFileInfo] :+: FilesInfo[ModifiedFileInfo] :+: String :+: File :+: Seq[String] :+: HNil + val inputs: Inputs = hash(sources.toSet) :+: lastModified(classpath.toSet) :+: classpath.absString :+: outputDirectory :+: options :+: HNil + implicit val stringEquiv: Equiv[String] = defaultEquiv + implicit val fileEquiv: Equiv[File] = defaultEquiv + val cachedDoc = inputChanged(cache / "inputs") { (inChanged, in: Inputs) => + outputChanged(cache / "output") { (outChanged, outputs: FilesInfo[PlainFileInfo]) => + if (inChanged || outChanged) + apply(label, sources, classpath, outputDirectory, options, log) + else + log.debug("Doc uptodate: " + outputDirectory.getAbsolutePath) + } + } + cachedDoc(inputs)(() => exists(outputDirectory.***.get.toSet)) + } } @deprecated("No longer used. See `Doc.scaladoc`", "0.13.0") -final class Scaladoc(maximumErrors: Int, compiler: AnalyzingCompiler) extends Doc -{ - def apply(label: String, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger) - { - generate("Scala", label, compiler.doc, sources, classpath, outputDirectory, options, maximumErrors, log) - } +final class Scaladoc(maximumErrors: Int, compiler: AnalyzingCompiler) extends Doc { + def apply(label: String, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger) { + generate("Scala", label, compiler.doc, sources, classpath, outputDirectory, options, maximumErrors, log) + } } @deprecated("No longer used. See `Doc.javadoc`", "0.13.0") -final class Javadoc(maximumErrors: Int, doc: sbt.compiler.Javadoc) extends Doc -{ - def apply(label: String, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger) - { - // javadoc doesn't handle *.scala properly, so we evict them from javadoc sources list. - generate("Java", label, doc.doc, sources.filterNot(_.name.endsWith(".scala")), classpath, outputDirectory, options, maximumErrors, log) - } +final class Javadoc(maximumErrors: Int, doc: sbt.compiler.Javadoc) extends Doc { + def apply(label: String, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger) { + // javadoc doesn't handle *.scala properly, so we evict them from javadoc sources list. + generate("Java", label, doc.doc, sources.filterNot(_.name.endsWith(".scala")), classpath, outputDirectory, options, maximumErrors, log) + } } diff --git a/main/actions/src/main/scala/sbt/DotGraph.scala b/main/actions/src/main/scala/sbt/DotGraph.scala index bbd7f2446..90fd9fe25 100644 --- a/main/actions/src/main/scala/sbt/DotGraph.scala +++ b/main/actions/src/main/scala/sbt/DotGraph.scala @@ -3,67 +3,61 @@ */ package sbt - import java.io.{File, Writer} - import inc.Relations +import java.io.{ File, Writer } +import inc.Relations -object DotGraph -{ - private def fToString(roots: Iterable[File]): (File => String) = - (x: File) => sourceToString(roots, x) - def sources(relations: Relations, outputDirectory: File, sourceRoots: Iterable[File]) - { - val toString = fToString(sourceRoots) - apply(relations, outputDirectory, toString, toString) - } - def packages(relations: Relations, outputDirectory: File, sourceRoots: Iterable[File]) - { - val packageOnly = (path: String) => - { - val last = path.lastIndexOf(File.separatorChar) - val packagePath = (if(last > 0) path.substring(0, last) else path).trim - if(packagePath.isEmpty) "" else packagePath.replace(File.separatorChar, '.') - } - val toString = packageOnly compose fToString(sourceRoots) - apply(relations, outputDirectory, toString, toString) - } - def apply(relations: Relations, outputDir: File, sourceToString: File => String, externalToString: File => String) - { - def file(name: String) = new File(outputDir, name) - IO.createDirectory(outputDir) - generateGraph(file("int-source-deps"), "dependencies", relations.internalSrcDep, sourceToString, sourceToString) - generateGraph(file("binary-dependencies"), "externalDependencies", relations.binaryDep, externalToString, sourceToString) - } +object DotGraph { + private def fToString(roots: Iterable[File]): (File => String) = + (x: File) => sourceToString(roots, x) + def sources(relations: Relations, outputDirectory: File, sourceRoots: Iterable[File]) { + val toString = fToString(sourceRoots) + apply(relations, outputDirectory, toString, toString) + } + def packages(relations: Relations, outputDirectory: File, sourceRoots: Iterable[File]) { + val packageOnly = (path: String) => + { + val last = path.lastIndexOf(File.separatorChar) + val packagePath = (if (last > 0) path.substring(0, last) else path).trim + if (packagePath.isEmpty) "" else packagePath.replace(File.separatorChar, '.') + } + val toString = packageOnly compose fToString(sourceRoots) + apply(relations, outputDirectory, toString, toString) + } + def apply(relations: Relations, outputDir: File, sourceToString: File => String, externalToString: File => String) { + def file(name: String) = new File(outputDir, name) + IO.createDirectory(outputDir) + generateGraph(file("int-source-deps"), "dependencies", relations.internalSrcDep, sourceToString, sourceToString) + generateGraph(file("binary-dependencies"), "externalDependencies", relations.binaryDep, externalToString, sourceToString) + } - def generateGraph[Key, Value](file: File, graphName: String, relation: Relation[Key, Value], - keyToString: Key => String, valueToString: Value => String) - { - import scala.collection.mutable.{HashMap, HashSet} - val mappedGraph = new HashMap[String, HashSet[String]] - for( (key, values) <- relation.forwardMap; keyString = keyToString(key); value <- values) - mappedGraph.getOrElseUpdate(keyString, new HashSet[String]) += valueToString(value) + def generateGraph[Key, Value](file: File, graphName: String, relation: Relation[Key, Value], + keyToString: Key => String, valueToString: Value => String) { + import scala.collection.mutable.{ HashMap, HashSet } + val mappedGraph = new HashMap[String, HashSet[String]] + for ((key, values) <- relation.forwardMap; keyString = keyToString(key); value <- values) + mappedGraph.getOrElseUpdate(keyString, new HashSet[String]) += valueToString(value) - val mappings = - for { - (dependsOn, dependants) <- mappedGraph.toSeq - dependant <- dependants - if dependant != dependsOn && !dependsOn.isEmpty && !dependant.isEmpty - } - yield "\"" + dependant + "\" -> \"" + dependsOn + "\"" + val mappings = + for { + (dependsOn, dependants) <- mappedGraph.toSeq + dependant <- dependants + if dependant != dependsOn && !dependsOn.isEmpty && !dependant.isEmpty + } yield "\"" + dependant + "\" -> \"" + dependsOn + "\"" - val lines = - ("digraph " + graphName + " {") +: - mappings :+ - "}" - - IO.writeLines(file, lines) - } - def sourceToString(roots: Iterable[File], source: File) = - relativized(roots, source).trim.stripSuffix(".scala").stripSuffix(".java") + val lines = + ("digraph " + graphName + " {") +: + mappings :+ + "}" - private def relativized(roots: Iterable[File], path: File): String = - { - val relativized = roots.flatMap(root => IO.relativize(root, path)) - val shortest = (Int.MaxValue /: relativized)(_ min _.length) - relativized.find(_.length == shortest).getOrElse(path.getName) - } + IO.writeLines(file, lines) + } + def sourceToString(roots: Iterable[File], source: File) = + relativized(roots, source).trim.stripSuffix(".scala").stripSuffix(".java") + + private def relativized(roots: Iterable[File], path: File): String = + { + val relativized = roots.flatMap(root => IO.relativize(root, path)) + val shortest = (Int.MaxValue /: relativized)(_ min _.length) + relativized.find(_.length == shortest).getOrElse(path.getName) + } } \ No newline at end of file diff --git a/main/actions/src/main/scala/sbt/ForkTests.scala b/main/actions/src/main/scala/sbt/ForkTests.scala index 63b8da1d4..c13ce1a2d 100755 --- a/main/actions/src/main/scala/sbt/ForkTests.scala +++ b/main/actions/src/main/scala/sbt/ForkTests.scala @@ -7,127 +7,130 @@ import scala.collection.mutable import testing._ import java.net.ServerSocket import java.io._ -import Tests.{Output => TestOutput, _} +import Tests.{ Output => TestOutput, _ } import ForkMain._ -private[sbt] object ForkTests -{ - def apply(runners: Map[TestFramework, Runner], tests: List[TestDefinition], config: Execution, classpath: Seq[File], fork: ForkOptions, log: Logger): Task[TestOutput] = { - val opts = processOptions(config, tests, log) +private[sbt] object ForkTests { + def apply(runners: Map[TestFramework, Runner], tests: List[TestDefinition], config: Execution, classpath: Seq[File], fork: ForkOptions, log: Logger): Task[TestOutput] = { + val opts = processOptions(config, tests, log) - import std.TaskExtra._ - val dummyLoader = this.getClass.getClassLoader // can't provide the loader for test classes, which is in another jvm - def all(work: Seq[ClassLoader => Unit]) = work.fork(f => f(dummyLoader)) + import std.TaskExtra._ + val dummyLoader = this.getClass.getClassLoader // can't provide the loader for test classes, which is in another jvm + def all(work: Seq[ClassLoader => Unit]) = work.fork(f => f(dummyLoader)) - val main = - if(opts.tests.isEmpty) - constant( TestOutput(TestResult.Passed, Map.empty[String, SuiteResult], Iterable.empty) ) - else - mainTestTask(runners, opts, classpath, fork, log, config.parallel).tagw(config.tags: _*) - main.dependsOn( all(opts.setup) : _*) flatMap { results => - all(opts.cleanup).join.map( _ => results) - } - } + val main = + if (opts.tests.isEmpty) + constant(TestOutput(TestResult.Passed, Map.empty[String, SuiteResult], Iterable.empty)) + else + mainTestTask(runners, opts, classpath, fork, log, config.parallel).tagw(config.tags: _*) + main.dependsOn(all(opts.setup): _*) flatMap { results => + all(opts.cleanup).join.map(_ => results) + } + } - private[this] def mainTestTask(runners: Map[TestFramework, Runner], opts: ProcessedOptions, classpath: Seq[File], fork: ForkOptions, log: Logger, parallel: Boolean): Task[TestOutput] = - std.TaskExtra.task - { - val server = new ServerSocket(0) - val testListeners = opts.testListeners flatMap { - case tl: TestsListener => Some(tl) - case _ => None - } + private[this] def mainTestTask(runners: Map[TestFramework, Runner], opts: ProcessedOptions, classpath: Seq[File], fork: ForkOptions, log: Logger, parallel: Boolean): Task[TestOutput] = + std.TaskExtra.task { + val server = new ServerSocket(0) + val testListeners = opts.testListeners flatMap { + case tl: TestsListener => Some(tl) + case _ => None + } - object Acceptor extends Runnable { - val resultsAcc = mutable.Map.empty[String, SuiteResult] - lazy val result = TestOutput(overall(resultsAcc.values.map(_.result)), resultsAcc.toMap, Iterable.empty) + object Acceptor extends Runnable { + val resultsAcc = mutable.Map.empty[String, SuiteResult] + lazy val result = TestOutput(overall(resultsAcc.values.map(_.result)), resultsAcc.toMap, Iterable.empty) - def run() { - val socket = - try { - server.accept() - } catch { - case e: java.net.SocketException => - log.error("Could not accept connection from test agent: " + e.getClass + ": " + e.getMessage) - log.trace(e) - server.close() - return - } - val os = new ObjectOutputStream(socket.getOutputStream) - // Must flush the header that the constructor writes, otherwise the ObjectInputStream on the other end may block indefinitely - os.flush() - val is = new ObjectInputStream(socket.getInputStream) + def run() { + val socket = + try { + server.accept() + } catch { + case e: java.net.SocketException => + log.error("Could not accept connection from test agent: " + e.getClass + ": " + e.getMessage) + log.trace(e) + server.close() + return + } + val os = new ObjectOutputStream(socket.getOutputStream) + // Must flush the header that the constructor writes, otherwise the ObjectInputStream on the other end may block indefinitely + os.flush() + val is = new ObjectInputStream(socket.getInputStream) - try { - val config = new ForkConfiguration(log.ansiCodesSupported, parallel) - os.writeObject(config) + try { + val config = new ForkConfiguration(log.ansiCodesSupported, parallel) + os.writeObject(config) - val taskdefs = opts.tests.map(t => new TaskDef(t.name, forkFingerprint(t.fingerprint), t.explicitlySpecified, t.selectors)) - os.writeObject(taskdefs.toArray) + val taskdefs = opts.tests.map(t => new TaskDef(t.name, forkFingerprint(t.fingerprint), t.explicitlySpecified, t.selectors)) + os.writeObject(taskdefs.toArray) - os.writeInt(runners.size) - for ((testFramework, mainRunner) <- runners) { - os.writeObject(testFramework.implClassNames.toArray) - os.writeObject(mainRunner.args) - os.writeObject(mainRunner.remoteArgs) - } - os.flush() + os.writeInt(runners.size) + for ((testFramework, mainRunner) <- runners) { + os.writeObject(testFramework.implClassNames.toArray) + os.writeObject(mainRunner.args) + os.writeObject(mainRunner.remoteArgs) + } + os.flush() - new React(is, os, log, opts.testListeners, resultsAcc).react() - } finally { - is.close(); os.close(); socket.close() - } - } - } + new React(is, os, log, opts.testListeners, resultsAcc).react() + } finally { + is.close(); os.close(); socket.close() + } + } + } - try { - testListeners.foreach(_.doInit()) - val acceptorThread = new Thread(Acceptor) - acceptorThread.start() + try { + testListeners.foreach(_.doInit()) + val acceptorThread = new Thread(Acceptor) + acceptorThread.start() - val fullCp = classpath ++: Seq(IO.classLocationFile[ForkMain], IO.classLocationFile[Framework]) - val options = Seq("-classpath", fullCp mkString File.pathSeparator, classOf[ForkMain].getCanonicalName, server.getLocalPort.toString) - val ec = Fork.java(fork, options) - val result = - if (ec != 0) - TestOutput(TestResult.Error, Map("Running java with options " + options.mkString(" ") + " failed with exit code " + ec -> SuiteResult.Error), Iterable.empty) - else { - // Need to wait acceptor thread to finish its business - acceptorThread.join() - Acceptor.result - } + val fullCp = classpath ++: Seq(IO.classLocationFile[ForkMain], IO.classLocationFile[Framework]) + val options = Seq("-classpath", fullCp mkString File.pathSeparator, classOf[ForkMain].getCanonicalName, server.getLocalPort.toString) + val ec = Fork.java(fork, options) + val result = + if (ec != 0) + TestOutput(TestResult.Error, Map("Running java with options " + options.mkString(" ") + " failed with exit code " + ec -> SuiteResult.Error), Iterable.empty) + else { + // Need to wait acceptor thread to finish its business + acceptorThread.join() + Acceptor.result + } - testListeners.foreach(_.doComplete(result.overall)) - result - } finally { - server.close() - } - } + testListeners.foreach(_.doComplete(result.overall)) + result + } finally { + server.close() + } + } - private[this] def forkFingerprint(f: Fingerprint): Fingerprint with Serializable = - f match { - case s: SubclassFingerprint => new ForkMain.SubclassFingerscan(s) - case a: AnnotatedFingerprint => new ForkMain.AnnotatedFingerscan(a) - case _ => error("Unknown fingerprint type: " + f.getClass) - } + private[this] def forkFingerprint(f: Fingerprint): Fingerprint with Serializable = + f match { + case s: SubclassFingerprint => new ForkMain.SubclassFingerscan(s) + case a: AnnotatedFingerprint => new ForkMain.AnnotatedFingerscan(a) + case _ => error("Unknown fingerprint type: " + f.getClass) + } } -private final class React(is: ObjectInputStream, os: ObjectOutputStream, log: Logger, listeners: Seq[TestReportListener], results: mutable.Map[String, SuiteResult]) -{ - import ForkTags._ - @annotation.tailrec def react(): Unit = is.readObject match { - case `Done` => os.writeObject(Done); os.flush() - case Array(`Error`, s: String) => log.error(s); react() - case Array(`Warn`, s: String) => log.warn(s); react() - case Array(`Info`, s: String) => log.info(s); react() - case Array(`Debug`, s: String) => log.debug(s); react() - case t: Throwable => log.trace(t); react() - case Array(group: String, tEvents: Array[Event]) => - listeners.foreach(_ startGroup group) - val event = TestEvent(tEvents) - listeners.foreach(_ testEvent event) - val suiteResult = SuiteResult(tEvents) - results += group -> suiteResult - listeners.foreach(_ endGroup (group, suiteResult.result)) - react() - } +private final class React(is: ObjectInputStream, os: ObjectOutputStream, log: Logger, listeners: Seq[TestReportListener], results: mutable.Map[String, SuiteResult]) { + import ForkTags._ + @annotation.tailrec def react(): Unit = is.readObject match { + case `Done` => + os.writeObject(Done); os.flush() + case Array(`Error`, s: String) => + log.error(s); react() + case Array(`Warn`, s: String) => + log.warn(s); react() + case Array(`Info`, s: String) => + log.info(s); react() + case Array(`Debug`, s: String) => + log.debug(s); react() + case t: Throwable => + log.trace(t); react() + case Array(group: String, tEvents: Array[Event]) => + listeners.foreach(_ startGroup group) + val event = TestEvent(tEvents) + listeners.foreach(_ testEvent event) + val suiteResult = SuiteResult(tEvents) + results += group -> suiteResult + listeners.foreach(_ endGroup (group, suiteResult.result)) + react() + } } diff --git a/main/actions/src/main/scala/sbt/Package.scala b/main/actions/src/main/scala/sbt/Package.scala index 047c53c36..67a32fc82 100644 --- a/main/actions/src/main/scala/sbt/Package.scala +++ b/main/actions/src/main/scala/sbt/Package.scala @@ -3,115 +3,105 @@ */ package sbt - import Predef.{conforms => _, _} - import java.io.File - import java.util.jar.{Attributes, Manifest} - import collection.JavaConversions._ - import Types.:+: - import Path._ +import Predef.{ conforms => _, _ } +import java.io.File +import java.util.jar.{ Attributes, Manifest } +import collection.JavaConversions._ +import Types.:+: +import Path._ - import sbinary.{DefaultProtocol,Format} - import DefaultProtocol.{FileFormat, immutableMapFormat, StringFormat, UnitFormat} - import Cache.{defaultEquiv, hConsCache, hNilCache, streamFormat, wrapIn} - import Tracked.{inputChanged, outputChanged} - import FileInfo.exists - import FilesInfo.lastModified +import sbinary.{ DefaultProtocol, Format } +import DefaultProtocol.{ FileFormat, immutableMapFormat, StringFormat, UnitFormat } +import Cache.{ defaultEquiv, hConsCache, hNilCache, streamFormat, wrapIn } +import Tracked.{ inputChanged, outputChanged } +import FileInfo.exists +import FilesInfo.lastModified sealed trait PackageOption -object Package -{ - final case class JarManifest(m: Manifest) extends PackageOption - { - assert(m != null) - } - final case class MainClass(mainClassName: String) extends PackageOption - final case class ManifestAttributes(attributes: (Attributes.Name, String)*) extends PackageOption - def ManifestAttributes(attributes: (String, String)*): ManifestAttributes = - { - val converted = for( (name,value) <- attributes ) yield (new Attributes.Name(name), value) - new ManifestAttributes(converted : _*) - } +object Package { + final case class JarManifest(m: Manifest) extends PackageOption { + assert(m != null) + } + final case class MainClass(mainClassName: String) extends PackageOption + final case class ManifestAttributes(attributes: (Attributes.Name, String)*) extends PackageOption + def ManifestAttributes(attributes: (String, String)*): ManifestAttributes = + { + val converted = for ((name, value) <- attributes) yield (new Attributes.Name(name), value) + new ManifestAttributes(converted: _*) + } - def mergeAttributes(a1: Attributes, a2: Attributes) = a1 ++= a2 - // merges `mergeManifest` into `manifest` (mutating `manifest` in the process) - def mergeManifests(manifest: Manifest, mergeManifest: Manifest) - { - mergeAttributes(manifest.getMainAttributes, mergeManifest.getMainAttributes) - val entryMap = mapAsScalaMap(manifest.getEntries) - for((key, value) <- mergeManifest.getEntries) - { - entryMap.get(key) match - { - case Some(attributes) => mergeAttributes(attributes, value) - case None => entryMap put (key, value) - } - } - } + def mergeAttributes(a1: Attributes, a2: Attributes) = a1 ++= a2 + // merges `mergeManifest` into `manifest` (mutating `manifest` in the process) + def mergeManifests(manifest: Manifest, mergeManifest: Manifest) { + mergeAttributes(manifest.getMainAttributes, mergeManifest.getMainAttributes) + val entryMap = mapAsScalaMap(manifest.getEntries) + for ((key, value) <- mergeManifest.getEntries) { + entryMap.get(key) match { + case Some(attributes) => mergeAttributes(attributes, value) + case None => entryMap put (key, value) + } + } + } - final class Configuration(val sources: Seq[(File, String)], val jar: File, val options: Seq[PackageOption]) - def apply(conf: Configuration, cacheFile: File, log: Logger) - { - val manifest = new Manifest - val main = manifest.getMainAttributes - for(option <- conf.options) - { - option match - { - case JarManifest(mergeManifest) => mergeManifests(manifest, mergeManifest) - case MainClass(mainClassName) => main.put(Attributes.Name.MAIN_CLASS, mainClassName) - case ManifestAttributes(attributes @ _*) => main ++= attributes - case _ => log.warn("Ignored unknown package option " + option) - } - } - setVersion(main) + final class Configuration(val sources: Seq[(File, String)], val jar: File, val options: Seq[PackageOption]) + def apply(conf: Configuration, cacheFile: File, log: Logger) { + val manifest = new Manifest + val main = manifest.getMainAttributes + for (option <- conf.options) { + option match { + case JarManifest(mergeManifest) => mergeManifests(manifest, mergeManifest) + case MainClass(mainClassName) => main.put(Attributes.Name.MAIN_CLASS, mainClassName) + case ManifestAttributes(attributes @ _*) => main ++= attributes + case _ => log.warn("Ignored unknown package option " + option) + } + } + setVersion(main) - val cachedMakeJar = inputChanged(cacheFile / "inputs") { (inChanged, inputs: Map[File, String] :+: FilesInfo[ModifiedFileInfo] :+: Manifest :+: HNil) => - val sources :+: _ :+: manifest :+: HNil = inputs - outputChanged(cacheFile / "output") { (outChanged, jar: PlainFileInfo) => - if(inChanged || outChanged) - makeJar(sources.toSeq, jar.file, manifest, log) - else - log.debug("Jar uptodate: " + jar.file) - } - } + val cachedMakeJar = inputChanged(cacheFile / "inputs") { (inChanged, inputs: Map[File, String] :+: FilesInfo[ModifiedFileInfo] :+: Manifest :+: HNil) => + val sources :+: _ :+: manifest :+: HNil = inputs + outputChanged(cacheFile / "output") { (outChanged, jar: PlainFileInfo) => + if (inChanged || outChanged) + makeJar(sources.toSeq, jar.file, manifest, log) + else + log.debug("Jar uptodate: " + jar.file) + } + } - val map = conf.sources.toMap - val inputs = map :+: lastModified(map.keySet.toSet) :+: manifest :+: HNil - cachedMakeJar(inputs)(() => exists(conf.jar)) - } - def setVersion(main: Attributes) - { - val version = Attributes.Name.MANIFEST_VERSION - if(main.getValue(version) eq null) - main.put(version, "1.0") - } - def addSpecManifestAttributes(name: String, version: String, orgName: String): PackageOption = - { - import Attributes.Name._ - val attribKeys = Seq(SPECIFICATION_TITLE, SPECIFICATION_VERSION, SPECIFICATION_VENDOR) - val attribVals = Seq(name, version, orgName) - ManifestAttributes(attribKeys zip attribVals : _*) - } - def addImplManifestAttributes(name: String, version: String, homepage: Option[java.net.URL], org: String, orgName: String): PackageOption = - { - import Attributes.Name._ - val attribKeys = Seq(IMPLEMENTATION_TITLE, IMPLEMENTATION_VERSION, IMPLEMENTATION_VENDOR, IMPLEMENTATION_VENDOR_ID) - val attribVals = Seq(name, version, orgName, org) - ManifestAttributes((attribKeys zip attribVals) ++ { homepage map(h => (IMPLEMENTATION_URL, h.toString)) } : _*) - } - def makeJar(sources: Seq[(File, String)], jar: File, manifest: Manifest, log: Logger) - { - log.info("Packaging " + jar.getAbsolutePath + " ...") - IO.delete(jar) - log.debug(sourcesDebugString(sources)) - IO.jar(sources, jar, manifest) - log.info("Done packaging.") - } - def sourcesDebugString(sources: Seq[(File, String)]): String = - "Input file mappings:\n\t" + (sources map { case (f,s) => s + "\n\t " + f} mkString("\n\t") ) + val map = conf.sources.toMap + val inputs = map :+: lastModified(map.keySet.toSet) :+: manifest :+: HNil + cachedMakeJar(inputs)(() => exists(conf.jar)) + } + def setVersion(main: Attributes) { + val version = Attributes.Name.MANIFEST_VERSION + if (main.getValue(version) eq null) + main.put(version, "1.0") + } + def addSpecManifestAttributes(name: String, version: String, orgName: String): PackageOption = + { + import Attributes.Name._ + val attribKeys = Seq(SPECIFICATION_TITLE, SPECIFICATION_VERSION, SPECIFICATION_VENDOR) + val attribVals = Seq(name, version, orgName) + ManifestAttributes(attribKeys zip attribVals: _*) + } + def addImplManifestAttributes(name: String, version: String, homepage: Option[java.net.URL], org: String, orgName: String): PackageOption = + { + import Attributes.Name._ + val attribKeys = Seq(IMPLEMENTATION_TITLE, IMPLEMENTATION_VERSION, IMPLEMENTATION_VENDOR, IMPLEMENTATION_VENDOR_ID) + val attribVals = Seq(name, version, orgName, org) + ManifestAttributes((attribKeys zip attribVals) ++ { homepage map (h => (IMPLEMENTATION_URL, h.toString)) }: _*) + } + def makeJar(sources: Seq[(File, String)], jar: File, manifest: Manifest, log: Logger) { + log.info("Packaging " + jar.getAbsolutePath + " ...") + IO.delete(jar) + log.debug(sourcesDebugString(sources)) + IO.jar(sources, jar, manifest) + log.info("Done packaging.") + } + def sourcesDebugString(sources: Seq[(File, String)]): String = + "Input file mappings:\n\t" + (sources map { case (f, s) => s + "\n\t " + f } mkString ("\n\t")) - implicit def manifestEquiv: Equiv[Manifest] = defaultEquiv - implicit def manifestFormat: Format[Manifest] = streamFormat( _ write _, in => new Manifest(in)) - - implicit def stringMapEquiv: Equiv[Map[File, String]] = defaultEquiv + implicit def manifestEquiv: Equiv[Manifest] = defaultEquiv + implicit def manifestFormat: Format[Manifest] = streamFormat(_ write _, in => new Manifest(in)) + + implicit def stringMapEquiv: Equiv[Map[File, String]] = defaultEquiv } \ No newline at end of file diff --git a/main/actions/src/main/scala/sbt/RawCompileLike.scala b/main/actions/src/main/scala/sbt/RawCompileLike.scala index 6cddd4eb5..0a4f7e0db 100644 --- a/main/actions/src/main/scala/sbt/RawCompileLike.scala +++ b/main/actions/src/main/scala/sbt/RawCompileLike.scala @@ -3,80 +3,78 @@ */ package sbt - import java.io.File - import compiler.{AnalyzingCompiler, JavaCompiler} +import java.io.File +import compiler.{ AnalyzingCompiler, JavaCompiler } - import Predef.{conforms => _, _} - import Types.:+: - import Path._ +import Predef.{ conforms => _, _ } +import Types.:+: +import Path._ - import sbinary.DefaultProtocol.FileFormat - import Cache.{defaultEquiv, hConsCache, hNilCache, IntFormat, seqCache, seqFormat, streamFormat, StringFormat, UnitFormat, wrapIn} - import Tracked.{inputChanged, outputChanged} - import FilesInfo.{exists, hash, lastModified} +import sbinary.DefaultProtocol.FileFormat +import Cache.{ defaultEquiv, hConsCache, hNilCache, IntFormat, seqCache, seqFormat, streamFormat, StringFormat, UnitFormat, wrapIn } +import Tracked.{ inputChanged, outputChanged } +import FilesInfo.{ exists, hash, lastModified } -object RawCompileLike -{ - type Gen = (Seq[File], Seq[File], File, Seq[String], Int, Logger) => Unit +object RawCompileLike { + type Gen = (Seq[File], Seq[File], File, Seq[String], Int, Logger) => Unit - private def optionFiles(options: Seq[String], fileInputOpts: Seq[String]): List[File] = - { - @annotation.tailrec - def loop(opt: List[String], result: List[File]): List[File] = { - opt.dropWhile(! fileInputOpts.contains(_)) match { - case List(_, fileOpt, tail @ _*) => - { - val file = new File(fileOpt) - if(file.isFile) loop(tail.toList, file :: result) - else loop(tail.toList, result) - } - case Nil | List(_) => result - } - } - loop(options.toList, Nil) - } + private def optionFiles(options: Seq[String], fileInputOpts: Seq[String]): List[File] = + { + @annotation.tailrec + def loop(opt: List[String], result: List[File]): List[File] = { + opt.dropWhile(!fileInputOpts.contains(_)) match { + case List(_, fileOpt, tail @ _*) => + { + val file = new File(fileOpt) + if (file.isFile) loop(tail.toList, file :: result) + else loop(tail.toList, result) + } + case Nil | List(_) => result + } + } + loop(options.toList, Nil) + } - def cached(cache: File, doCompile: Gen): Gen = cached(cache, Seq(), doCompile) - def cached(cache: File, fileInputOpts: Seq[String], doCompile: Gen): Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => - { - type Inputs = FilesInfo[HashFileInfo] :+: FilesInfo[ModifiedFileInfo] :+: String :+: File :+: Seq[String] :+: Int :+: HNil - val inputs: Inputs = hash(sources.toSet ++ optionFiles(options, fileInputOpts)) :+: lastModified(classpath.toSet) :+: classpath.absString :+: outputDirectory :+: options :+: maxErrors :+: HNil - implicit val stringEquiv: Equiv[String] = defaultEquiv - implicit val fileEquiv: Equiv[File] = defaultEquiv - implicit val intEquiv: Equiv[Int] = defaultEquiv - val cachedComp = inputChanged(cache / "inputs") { (inChanged, in: Inputs) => - outputChanged(cache / "output") { (outChanged, outputs: FilesInfo[PlainFileInfo]) => - if(inChanged || outChanged) - doCompile(sources, classpath, outputDirectory, options, maxErrors, log) - else - log.debug("Uptodate: " + outputDirectory.getAbsolutePath) - } - } - cachedComp(inputs)(() => exists(outputDirectory.***.get.toSet)) - } - def prepare(description: String, doCompile: Gen): Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => - { - if(sources.isEmpty) - log.info("No sources available, skipping " + description + "...") - else - { - log.info(description.capitalize + " to " + outputDirectory.absolutePath + "...") - IO.delete(outputDirectory) - IO.createDirectory(outputDirectory) - doCompile(sources, classpath, outputDirectory, options, maxErrors, log) - log.info(description.capitalize + " successful.") - } - } - def filterSources(f: File => Boolean, doCompile: Gen): Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => - doCompile(sources filter f, classpath, outputDirectory, options, maxErrors, log) + def cached(cache: File, doCompile: Gen): Gen = cached(cache, Seq(), doCompile) + def cached(cache: File, fileInputOpts: Seq[String], doCompile: Gen): Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => + { + type Inputs = FilesInfo[HashFileInfo] :+: FilesInfo[ModifiedFileInfo] :+: String :+: File :+: Seq[String] :+: Int :+: HNil + val inputs: Inputs = hash(sources.toSet ++ optionFiles(options, fileInputOpts)) :+: lastModified(classpath.toSet) :+: classpath.absString :+: outputDirectory :+: options :+: maxErrors :+: HNil + implicit val stringEquiv: Equiv[String] = defaultEquiv + implicit val fileEquiv: Equiv[File] = defaultEquiv + implicit val intEquiv: Equiv[Int] = defaultEquiv + val cachedComp = inputChanged(cache / "inputs") { (inChanged, in: Inputs) => + outputChanged(cache / "output") { (outChanged, outputs: FilesInfo[PlainFileInfo]) => + if (inChanged || outChanged) + doCompile(sources, classpath, outputDirectory, options, maxErrors, log) + else + log.debug("Uptodate: " + outputDirectory.getAbsolutePath) + } + } + cachedComp(inputs)(() => exists(outputDirectory.***.get.toSet)) + } + def prepare(description: String, doCompile: Gen): Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => + { + if (sources.isEmpty) + log.info("No sources available, skipping " + description + "...") + else { + log.info(description.capitalize + " to " + outputDirectory.absolutePath + "...") + IO.delete(outputDirectory) + IO.createDirectory(outputDirectory) + doCompile(sources, classpath, outputDirectory, options, maxErrors, log) + log.info(description.capitalize + " successful.") + } + } + def filterSources(f: File => Boolean, doCompile: Gen): Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => + doCompile(sources filter f, classpath, outputDirectory, options, maxErrors, log) - def rawCompile(instance: ScalaInstance, cpOptions: ClasspathOptions): Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => - { - val compiler = new sbt.compiler.RawCompiler(instance, cpOptions, log) - compiler(sources, classpath, outputDirectory, options) - } - def compile(label: String, cache: File, instance: ScalaInstance, cpOptions: ClasspathOptions): Gen = - cached(cache, prepare(label + " sources", rawCompile(instance, cpOptions))) + def rawCompile(instance: ScalaInstance, cpOptions: ClasspathOptions): Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => + { + val compiler = new sbt.compiler.RawCompiler(instance, cpOptions, log) + compiler(sources, classpath, outputDirectory, options) + } + def compile(label: String, cache: File, instance: ScalaInstance, cpOptions: ClasspathOptions): Gen = + cached(cache, prepare(label + " sources", rawCompile(instance, cpOptions))) - val nop: Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => () + val nop: Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => () } diff --git a/main/actions/src/main/scala/sbt/Sync.scala b/main/actions/src/main/scala/sbt/Sync.scala index 16698653b..e69fea0c4 100644 --- a/main/actions/src/main/scala/sbt/Sync.scala +++ b/main/actions/src/main/scala/sbt/Sync.scala @@ -3,89 +3,89 @@ */ package sbt - import java.io.File +import java.io.File -/** -Maintains a set of mappings so that they are uptodate. -Specifically, 'apply' applies the mappings by creating target directories and copying source files to their destination. -For each mapping no longer present, the old target is removed. -Caution: Existing files are overwritten. -Caution: The removal of old targets assumes that nothing else has written to or modified those files. - It tries not to obliterate large amounts of data by only removing previously tracked files and empty directories. - That is, it won't remove a directory with unknown (untracked) files in it. -Warning: It is therefore inappropriate to use this with anything other than an automatically managed destination or a dedicated target directory. -Warning: Specifically, don't mix this with a directory containing manually created files, like sources. -It is safe to use for its intended purpose: copying resources to a class output directory. -*/ -object Sync -{ - def apply(cacheFile: File, inStyle: FileInfo.Style = FileInfo.lastModified, outStyle: FileInfo.Style = FileInfo.exists): Traversable[(File,File)] => Relation[File,File] = - mappings => - { - val relation = Relation.empty ++ mappings - noDuplicateTargets(relation) - val currentInfo = relation._1s.map(s => (s, inStyle(s)) ).toMap +/** + * Maintains a set of mappings so that they are uptodate. + * Specifically, 'apply' applies the mappings by creating target directories and copying source files to their destination. + * For each mapping no longer present, the old target is removed. + * Caution: Existing files are overwritten. + * Caution: The removal of old targets assumes that nothing else has written to or modified those files. + * It tries not to obliterate large amounts of data by only removing previously tracked files and empty directories. + * That is, it won't remove a directory with unknown (untracked) files in it. + * Warning: It is therefore inappropriate to use this with anything other than an automatically managed destination or a dedicated target directory. + * Warning: Specifically, don't mix this with a directory containing manually created files, like sources. + * It is safe to use for its intended purpose: copying resources to a class output directory. + */ +object Sync { + def apply(cacheFile: File, inStyle: FileInfo.Style = FileInfo.lastModified, outStyle: FileInfo.Style = FileInfo.exists): Traversable[(File, File)] => Relation[File, File] = + mappings => + { + val relation = Relation.empty ++ mappings + noDuplicateTargets(relation) + val currentInfo = relation._1s.map(s => (s, inStyle(s))).toMap - val (previousRelation, previousInfo) = readInfo(cacheFile)(inStyle.format) - val removeTargets = previousRelation._2s -- relation._2s + val (previousRelation, previousInfo) = readInfo(cacheFile)(inStyle.format) + val removeTargets = previousRelation._2s -- relation._2s - def outofdate(source: File, target: File): Boolean = - !previousRelation.contains(source, target) || - (previousInfo get source) != (currentInfo get source) || - !target.exists || - target.isDirectory != source.isDirectory + def outofdate(source: File, target: File): Boolean = + !previousRelation.contains(source, target) || + (previousInfo get source) != (currentInfo get source) || + !target.exists || + target.isDirectory != source.isDirectory - val updates = relation filter outofdate + val updates = relation filter outofdate - val (cleanDirs, cleanFiles) = (updates._2s ++ removeTargets).partition(_.isDirectory) + val (cleanDirs, cleanFiles) = (updates._2s ++ removeTargets).partition(_.isDirectory) - IO.delete(cleanFiles) - IO.deleteIfEmpty(cleanDirs) - updates.all.foreach((copy _).tupled) + IO.delete(cleanFiles) + IO.deleteIfEmpty(cleanDirs) + updates.all.foreach((copy _).tupled) - writeInfo(cacheFile, relation, currentInfo)(inStyle.format) - relation - } - - def copy(source: File, target: File): Unit = - if(source.isFile) - IO.copyFile(source, target, true) - else if(!target.exists) // we don't want to update the last modified time of an existing directory - { - IO.createDirectory(target) - IO.copyLastModified(source, target) - } + writeInfo(cacheFile, relation, currentInfo)(inStyle.format) + relation + } - def noDuplicateTargets(relation: Relation[File, File]) - { - val dups = relation.reverseMap.filter { case (target, srcs) => - srcs.size >= 2 && srcs.exists(!_.isDirectory) - } map { case (target, srcs) => - "\n\t" + target + "\nfrom\n\t" + srcs.mkString("\n\t\t") - } - if(!dups.isEmpty) - sys.error("Duplicate mappings:" + dups.mkString) - } - - import java.io.{File, IOException} - import sbinary._ - import Operations.{read, write} - import DefaultProtocol.{FileFormat => _, _} - import sbt.inc.AnalysisFormats.{fileFormat, relationFormat} + def copy(source: File, target: File): Unit = + if (source.isFile) + IO.copyFile(source, target, true) + else if (!target.exists) // we don't want to update the last modified time of an existing directory + { + IO.createDirectory(target) + IO.copyLastModified(source, target) + } - def writeInfo[F <: FileInfo](file: File, relation: Relation[File, File], info: Map[File, F])(implicit infoFormat: Format[F]): Unit = - IO.gzipFileOut(file) { out => - write(out, (relation, info) ) - } + def noDuplicateTargets(relation: Relation[File, File]) { + val dups = relation.reverseMap.filter { + case (target, srcs) => + srcs.size >= 2 && srcs.exists(!_.isDirectory) + } map { + case (target, srcs) => + "\n\t" + target + "\nfrom\n\t" + srcs.mkString("\n\t\t") + } + if (!dups.isEmpty) + sys.error("Duplicate mappings:" + dups.mkString) + } - type RelationInfo[F] = (Relation[File,File], Map[File, F]) + import java.io.{ File, IOException } + import sbinary._ + import Operations.{ read, write } + import DefaultProtocol.{ FileFormat => _, _ } + import sbt.inc.AnalysisFormats.{ fileFormat, relationFormat } - def readInfo[F <: FileInfo](file: File)(implicit infoFormat: Format[F]): RelationInfo[F] = - try { readUncaught(file)(infoFormat) } - catch { case e: IOException => (Relation.empty, Map.empty) } + def writeInfo[F <: FileInfo](file: File, relation: Relation[File, File], info: Map[File, F])(implicit infoFormat: Format[F]): Unit = + IO.gzipFileOut(file) { out => + write(out, (relation, info)) + } - def readUncaught[F <: FileInfo](file: File)(implicit infoFormat: Format[F]): RelationInfo[F] = - IO.gzipFileIn(file) { in => - read[RelationInfo[F]](in) - } + type RelationInfo[F] = (Relation[File, File], Map[File, F]) + + def readInfo[F <: FileInfo](file: File)(implicit infoFormat: Format[F]): RelationInfo[F] = + try { readUncaught(file)(infoFormat) } + catch { case e: IOException => (Relation.empty, Map.empty) } + + def readUncaught[F <: FileInfo](file: File)(implicit infoFormat: Format[F]): RelationInfo[F] = + IO.gzipFileIn(file) { in => + read[RelationInfo[F]](in) + } } diff --git a/main/actions/src/main/scala/sbt/TestResultLogger.scala b/main/actions/src/main/scala/sbt/TestResultLogger.scala index c211035d2..3f79510a9 100644 --- a/main/actions/src/main/scala/sbt/TestResultLogger.scala +++ b/main/actions/src/main/scala/sbt/TestResultLogger.scala @@ -1,6 +1,6 @@ package sbt -import sbt.Tests.{Output, Summary} +import sbt.Tests.{ Output, Summary } /** * Logs information about tests after they finish. @@ -49,7 +49,7 @@ object TestResultLogger { } /** Creates a `TestResultLogger` that ignores its input and always performs the same logging. */ - def const(f: Logger => Unit) = apply((l,_,_) => f(l)) + def const(f: Logger => Unit) = apply((l, _, _) => f(l)) /** * Selects a `TestResultLogger` based on a given predicate. @@ -65,7 +65,7 @@ object TestResultLogger { def silenceWhenNoTests(d: Defaults.Main) = d.copy( printStandard = d.printStandard.unless((results, _) => results.events.isEmpty), - printNoTests = Null + printNoTests = Null ) object Defaults { @@ -73,11 +73,10 @@ object TestResultLogger { /** SBT's default `TestResultLogger`. Use `copy()` to change selective portions. */ case class Main( printStandard_? : Output => Boolean = Defaults.printStandard_?, - printSummary : TestResultLogger = Defaults.printSummary, - printStandard : TestResultLogger = Defaults.printStandard, - printFailures : TestResultLogger = Defaults.printFailures, - printNoTests : TestResultLogger = Defaults.printNoTests - ) extends TestResultLogger { + printSummary: TestResultLogger = Defaults.printSummary, + printStandard: TestResultLogger = Defaults.printStandard, + printFailures: TestResultLogger = Defaults.printFailures, + printNoTests: TestResultLogger = Defaults.printNoTests) extends TestResultLogger { override def run(log: Logger, results: Output, taskName: String): Unit = { def run(r: TestResultLogger): Unit = r.run(log, results, taskName) @@ -94,7 +93,7 @@ object TestResultLogger { results.overall match { case TestResult.Error | TestResult.Failed => throw new TestsFailedException - case TestResult.Passed => + case TestResult.Passed => } } } @@ -102,10 +101,10 @@ object TestResultLogger { val printSummary = TestResultLogger((log, results, _) => { val multipleFrameworks = results.summaries.size > 1 for (Summary(name, message) <- results.summaries) - if(message.isEmpty) + if (message.isEmpty) log.debug("Summary for " + name + " not available.") else { - if(multipleFrameworks) log.info(name) + if (multipleFrameworks) log.info(name) log.info(message) } }) @@ -117,19 +116,20 @@ object TestResultLogger { val printStandard = TestResultLogger((log, results, _) => { val (skippedCount, errorsCount, passedCount, failuresCount, ignoredCount, canceledCount, pendingCount) = - results.events.foldLeft((0, 0, 0, 0, 0, 0, 0)) { case ((skippedAcc, errorAcc, passedAcc, failureAcc, ignoredAcc, canceledAcc, pendingAcc), (name, testEvent)) => - (skippedAcc + testEvent.skippedCount, errorAcc + testEvent.errorCount, passedAcc + testEvent.passedCount, failureAcc + testEvent.failureCount, - ignoredAcc + testEvent.ignoredCount, canceledAcc + testEvent.canceledCount, pendingAcc + testEvent.pendingCount) + results.events.foldLeft((0, 0, 0, 0, 0, 0, 0)) { + case ((skippedAcc, errorAcc, passedAcc, failureAcc, ignoredAcc, canceledAcc, pendingAcc), (name, testEvent)) => + (skippedAcc + testEvent.skippedCount, errorAcc + testEvent.errorCount, passedAcc + testEvent.passedCount, failureAcc + testEvent.failureCount, + ignoredAcc + testEvent.ignoredCount, canceledAcc + testEvent.canceledCount, pendingAcc + testEvent.pendingCount) } val totalCount = failuresCount + errorsCount + skippedCount + passedCount val base = s"Total $totalCount, Failed $failuresCount, Errors $errorsCount, Passed $passedCount" val otherCounts = Seq("Skipped" -> skippedCount, "Ignored" -> ignoredCount, "Canceled" -> canceledCount, "Pending" -> pendingCount) - val extra = otherCounts.filter(_._2 > 0).map{case(label,count) => s", $label $count" } + val extra = otherCounts.filter(_._2 > 0).map { case (label, count) => s", $label $count" } val postfix = base + extra.mkString results.overall match { - case TestResult.Error => log.error("Error: " + postfix) + case TestResult.Error => log.error("Error: " + postfix) case TestResult.Passed => log.info("Passed: " + postfix) case TestResult.Failed => log.error("Failed: " + postfix) } diff --git a/main/actions/src/main/scala/sbt/Tests.scala b/main/actions/src/main/scala/sbt/Tests.scala index c75521ced..486361565 100644 --- a/main/actions/src/main/scala/sbt/Tests.scala +++ b/main/actions/src/main/scala/sbt/Tests.scala @@ -3,269 +3,279 @@ */ package sbt - import std._ - import xsbt.api.{Discovered,Discovery} - import inc.Analysis - import TaskExtra._ - import Types._ - import xsbti.api.Definition - import ConcurrentRestrictions.Tag +import std._ +import xsbt.api.{ Discovered, Discovery } +import inc.Analysis +import TaskExtra._ +import Types._ +import xsbti.api.Definition +import ConcurrentRestrictions.Tag - import testing.{AnnotatedFingerprint, Fingerprint, Framework, SubclassFingerprint, Runner, TaskDef, SuiteSelector, Task => TestTask} - import scala.annotation.tailrec +import testing.{ AnnotatedFingerprint, Fingerprint, Framework, SubclassFingerprint, Runner, TaskDef, SuiteSelector, Task => TestTask } +import scala.annotation.tailrec - import java.io.File +import java.io.File sealed trait TestOption -object Tests -{ - /** The result of a test run. - * - * @param overall The overall result of execution across all tests for all test frameworks in this test run. - * @param events The result of each test group (suite) executed during this test run. - * @param summaries Explicit summaries directly provided by test frameworks. This may be empty, in which case a default summary will be generated. - */ - final case class Output(overall: TestResult.Value, events: Map[String,SuiteResult], summaries: Iterable[Summary]) +object Tests { + /** + * The result of a test run. + * + * @param overall The overall result of execution across all tests for all test frameworks in this test run. + * @param events The result of each test group (suite) executed during this test run. + * @param summaries Explicit summaries directly provided by test frameworks. This may be empty, in which case a default summary will be generated. + */ + final case class Output(overall: TestResult.Value, events: Map[String, SuiteResult], summaries: Iterable[Summary]) - /** Summarizes a test run. - * - * @param name The name of the test framework providing this summary. - * @param summaryText The summary message for tests run by the test framework. - */ - final case class Summary(name: String, summaryText: String) - - /** Defines a TestOption that will evaluate `setup` before any tests execute. - * The ClassLoader provided to `setup` is the loader containing the test classes that will be run. - * Setup is not currently performed for forked tests. */ - final case class Setup(setup: ClassLoader => Unit) extends TestOption + /** + * Summarizes a test run. + * + * @param name The name of the test framework providing this summary. + * @param summaryText The summary message for tests run by the test framework. + */ + final case class Summary(name: String, summaryText: String) - /** Defines a TestOption that will evaluate `setup` before any tests execute. - * Setup is not currently performed for forked tests. */ - def Setup(setup: () => Unit) = new Setup(_ => setup()) + /** + * Defines a TestOption that will evaluate `setup` before any tests execute. + * The ClassLoader provided to `setup` is the loader containing the test classes that will be run. + * Setup is not currently performed for forked tests. + */ + final case class Setup(setup: ClassLoader => Unit) extends TestOption - /** Defines a TestOption that will evaluate `cleanup` after all tests execute. - * The ClassLoader provided to `cleanup` is the loader containing the test classes that ran. - * Cleanup is not currently performed for forked tests. */ - final case class Cleanup(cleanup: ClassLoader => Unit) extends TestOption + /** + * Defines a TestOption that will evaluate `setup` before any tests execute. + * Setup is not currently performed for forked tests. + */ + def Setup(setup: () => Unit) = new Setup(_ => setup()) - /** Defines a TestOption that will evaluate `cleanup` after all tests execute. - * Cleanup is not currently performed for forked tests. */ - def Cleanup(cleanup: () => Unit) = new Cleanup(_ => cleanup()) + /** + * Defines a TestOption that will evaluate `cleanup` after all tests execute. + * The ClassLoader provided to `cleanup` is the loader containing the test classes that ran. + * Cleanup is not currently performed for forked tests. + */ + final case class Cleanup(cleanup: ClassLoader => Unit) extends TestOption - /** The names of tests to explicitly exclude from execution. */ - final case class Exclude(tests: Iterable[String]) extends TestOption + /** + * Defines a TestOption that will evaluate `cleanup` after all tests execute. + * Cleanup is not currently performed for forked tests. + */ + def Cleanup(cleanup: () => Unit) = new Cleanup(_ => cleanup()) - final case class Listeners(listeners: Iterable[TestReportListener]) extends TestOption + /** The names of tests to explicitly exclude from execution. */ + final case class Exclude(tests: Iterable[String]) extends TestOption - /** Selects tests by name to run. Only tests for which `filterTest` returns true will be run. */ - final case class Filter(filterTest: String => Boolean) extends TestOption + final case class Listeners(listeners: Iterable[TestReportListener]) extends TestOption - /** Test execution will be ordered by the position of the matching filter. */ - final case class Filters(filterTest: Seq[String => Boolean]) extends TestOption + /** Selects tests by name to run. Only tests for which `filterTest` returns true will be run. */ + final case class Filter(filterTest: String => Boolean) extends TestOption - /** Defines a TestOption that passes arguments `args` to all test frameworks. */ - def Argument(args: String*): Argument = Argument(None, args.toList) + /** Test execution will be ordered by the position of the matching filter. */ + final case class Filters(filterTest: Seq[String => Boolean]) extends TestOption - /** Defines a TestOption that passes arguments `args` to only the test framework `tf`. */ - def Argument(tf: TestFramework, args: String*): Argument = Argument(Some(tf), args.toList) + /** Defines a TestOption that passes arguments `args` to all test frameworks. */ + def Argument(args: String*): Argument = Argument(None, args.toList) - /** Defines arguments to pass to test frameworks. - * - * @param framework The test framework the arguments apply to if one is specified in Some. - * If None, the arguments will apply to all test frameworks. - * @param args The list of arguments to pass to the selected framework(s). - */ - final case class Argument(framework: Option[TestFramework], args: List[String]) extends TestOption + /** Defines a TestOption that passes arguments `args` to only the test framework `tf`. */ + def Argument(tf: TestFramework, args: String*): Argument = Argument(Some(tf), args.toList) - /** Configures test execution. - * - * @param options The options to apply to this execution, including test framework arguments, filters, - * and setup and cleanup work. - * @param parallel If true, execute each unit of work returned by the test frameworks in separate sbt.Tasks. - * If false, execute all work in a single sbt.Task. - * @param tags The tags that should be added to each test task. These can be used to apply restrictions on - * concurrent execution. - */ - final case class Execution(options: Seq[TestOption], parallel: Boolean, tags: Seq[(Tag, Int)]) + /** + * Defines arguments to pass to test frameworks. + * + * @param framework The test framework the arguments apply to if one is specified in Some. + * If None, the arguments will apply to all test frameworks. + * @param args The list of arguments to pass to the selected framework(s). + */ + final case class Argument(framework: Option[TestFramework], args: List[String]) extends TestOption + /** + * Configures test execution. + * + * @param options The options to apply to this execution, including test framework arguments, filters, + * and setup and cleanup work. + * @param parallel If true, execute each unit of work returned by the test frameworks in separate sbt.Tasks. + * If false, execute all work in a single sbt.Task. + * @param tags The tags that should be added to each test task. These can be used to apply restrictions on + * concurrent execution. + */ + final case class Execution(options: Seq[TestOption], parallel: Boolean, tags: Seq[(Tag, Int)]) - /** Configures whether a group of tests runs in the same JVM or are forked. */ - sealed trait TestRunPolicy + /** Configures whether a group of tests runs in the same JVM or are forked. */ + sealed trait TestRunPolicy - /** Configures a group of tests to run in the same JVM. */ - case object InProcess extends TestRunPolicy + /** Configures a group of tests to run in the same JVM. */ + case object InProcess extends TestRunPolicy - /** Configures a group of tests to be forked in a new JVM with forking options specified by `config`. */ - final case class SubProcess(config: ForkOptions) extends TestRunPolicy - object SubProcess { - @deprecated("Construct SubProcess with a ForkOptions argument.", "0.13.0") - def apply(javaOptions: Seq[String]): SubProcess = SubProcess(ForkOptions(runJVMOptions = javaOptions)) - } + /** Configures a group of tests to be forked in a new JVM with forking options specified by `config`. */ + final case class SubProcess(config: ForkOptions) extends TestRunPolicy + object SubProcess { + @deprecated("Construct SubProcess with a ForkOptions argument.", "0.13.0") + def apply(javaOptions: Seq[String]): SubProcess = SubProcess(ForkOptions(runJVMOptions = javaOptions)) + } - /** A named group of tests configured to run in the same JVM or be forked. */ - final case class Group(name: String, tests: Seq[TestDefinition], runPolicy: TestRunPolicy) + /** A named group of tests configured to run in the same JVM or be forked. */ + final case class Group(name: String, tests: Seq[TestDefinition], runPolicy: TestRunPolicy) - private[sbt] final class ProcessedOptions( - val tests: Seq[TestDefinition], - val setup: Seq[ClassLoader => Unit], - val cleanup: Seq[ClassLoader => Unit], - val testListeners: Seq[TestReportListener] - ) - private[sbt] def processOptions(config: Execution, discovered: Seq[TestDefinition], log: Logger): ProcessedOptions = - { - import collection.mutable.{HashSet, ListBuffer, Map, Set} - val testFilters = new ListBuffer[String => Boolean] - var orderedFilters = Seq[String => Boolean]() - val excludeTestsSet = new HashSet[String] - val setup, cleanup = new ListBuffer[ClassLoader => Unit] - val testListeners = new ListBuffer[TestReportListener] - val undefinedFrameworks = new ListBuffer[String] + private[sbt] final class ProcessedOptions( + val tests: Seq[TestDefinition], + val setup: Seq[ClassLoader => Unit], + val cleanup: Seq[ClassLoader => Unit], + val testListeners: Seq[TestReportListener]) + private[sbt] def processOptions(config: Execution, discovered: Seq[TestDefinition], log: Logger): ProcessedOptions = + { + import collection.mutable.{ HashSet, ListBuffer, Map, Set } + val testFilters = new ListBuffer[String => Boolean] + var orderedFilters = Seq[String => Boolean]() + val excludeTestsSet = new HashSet[String] + val setup, cleanup = new ListBuffer[ClassLoader => Unit] + val testListeners = new ListBuffer[TestReportListener] + val undefinedFrameworks = new ListBuffer[String] - for(option <- config.options) - { - option match - { - case Filter(include) => testFilters += include - case Filters(includes) => if(!orderedFilters.isEmpty) sys.error("Cannot define multiple ordered test filters.") else orderedFilters = includes - case Exclude(exclude) => excludeTestsSet ++= exclude - case Listeners(listeners) => testListeners ++= listeners - case Setup(setupFunction) => setup += setupFunction - case Cleanup(cleanupFunction) => cleanup += cleanupFunction - case a: Argument => // now handled by whatever constructs `runners` - } - } + for (option <- config.options) { + option match { + case Filter(include) => testFilters += include + case Filters(includes) => if (!orderedFilters.isEmpty) sys.error("Cannot define multiple ordered test filters.") else orderedFilters = includes + case Exclude(exclude) => excludeTestsSet ++= exclude + case Listeners(listeners) => testListeners ++= listeners + case Setup(setupFunction) => setup += setupFunction + case Cleanup(cleanupFunction) => cleanup += cleanupFunction + case a: Argument => // now handled by whatever constructs `runners` + } + } - if(excludeTestsSet.size > 0) - log.debug(excludeTestsSet.mkString("Excluding tests: \n\t", "\n\t", "")) - if(undefinedFrameworks.size > 0) - log.warn("Arguments defined for test frameworks that are not present:\n\t" + undefinedFrameworks.mkString("\n\t")) + if (excludeTestsSet.size > 0) + log.debug(excludeTestsSet.mkString("Excluding tests: \n\t", "\n\t", "")) + if (undefinedFrameworks.size > 0) + log.warn("Arguments defined for test frameworks that are not present:\n\t" + undefinedFrameworks.mkString("\n\t")) - def includeTest(test: TestDefinition) = !excludeTestsSet.contains(test.name) && testFilters.forall(filter => filter(test.name)) - val filtered0 = discovered.filter(includeTest).toList.distinct - val tests = if(orderedFilters.isEmpty) filtered0 else orderedFilters.flatMap(f => filtered0.filter(d => f(d.name))).toList.distinct - new ProcessedOptions(tests, setup.toList, cleanup.toList, testListeners.toList) - } + def includeTest(test: TestDefinition) = !excludeTestsSet.contains(test.name) && testFilters.forall(filter => filter(test.name)) + val filtered0 = discovered.filter(includeTest).toList.distinct + val tests = if (orderedFilters.isEmpty) filtered0 else orderedFilters.flatMap(f => filtered0.filter(d => f(d.name))).toList.distinct + new ProcessedOptions(tests, setup.toList, cleanup.toList, testListeners.toList) + } - def apply(frameworks: Map[TestFramework, Framework], testLoader: ClassLoader, runners: Map[TestFramework, Runner], discovered: Seq[TestDefinition], config: Execution, log: Logger): Task[Output] = - { - val o = processOptions(config, discovered, log) - testTask(testLoader, frameworks, runners, o.tests, o.setup, o.cleanup, log, o.testListeners, config) - } + def apply(frameworks: Map[TestFramework, Framework], testLoader: ClassLoader, runners: Map[TestFramework, Runner], discovered: Seq[TestDefinition], config: Execution, log: Logger): Task[Output] = + { + val o = processOptions(config, discovered, log) + testTask(testLoader, frameworks, runners, o.tests, o.setup, o.cleanup, log, o.testListeners, config) + } - def testTask(loader: ClassLoader, frameworks: Map[TestFramework, Framework], runners: Map[TestFramework, Runner], tests: Seq[TestDefinition], - userSetup: Iterable[ClassLoader => Unit], userCleanup: Iterable[ClassLoader => Unit], - log: Logger, testListeners: Seq[TestReportListener], config: Execution): Task[Output] = - { - def fj(actions: Iterable[() => Unit]): Task[Unit] = nop.dependsOn( actions.toSeq.fork( _() ) : _*) - def partApp(actions: Iterable[ClassLoader => Unit]) = actions.toSeq map {a => () => a(loader) } + def testTask(loader: ClassLoader, frameworks: Map[TestFramework, Framework], runners: Map[TestFramework, Runner], tests: Seq[TestDefinition], + userSetup: Iterable[ClassLoader => Unit], userCleanup: Iterable[ClassLoader => Unit], + log: Logger, testListeners: Seq[TestReportListener], config: Execution): Task[Output] = + { + def fj(actions: Iterable[() => Unit]): Task[Unit] = nop.dependsOn(actions.toSeq.fork(_()): _*) + def partApp(actions: Iterable[ClassLoader => Unit]) = actions.toSeq map { a => () => a(loader) } - val (frameworkSetup, runnables, frameworkCleanup) = - TestFramework.testTasks(frameworks, runners, loader, tests, log, testListeners) + val (frameworkSetup, runnables, frameworkCleanup) = + TestFramework.testTasks(frameworks, runners, loader, tests, log, testListeners) - val setupTasks = fj(partApp(userSetup) :+ frameworkSetup) - val mainTasks = - if(config.parallel) - makeParallel(loader, runnables, setupTasks, config.tags)//.toSeq.join - else - makeSerial(loader, runnables, setupTasks, config.tags) - val taggedMainTasks = mainTasks.tagw(config.tags : _*) - taggedMainTasks map processResults flatMap { results => - val cleanupTasks = fj(partApp(userCleanup) :+ frameworkCleanup(results.overall)) - cleanupTasks map { _ => results } - } - } - type TestRunnable = (String, TestFunction) - - private def createNestedRunnables(loader: ClassLoader, testFun: TestFunction, nestedTasks: Seq[TestTask]): Seq[(String, TestFunction)] = - nestedTasks.view.zipWithIndex map { case (nt, idx) => - val testFunDef = testFun.taskDef - (testFunDef.fullyQualifiedName, TestFramework.createTestFunction(loader, new TaskDef(testFunDef.fullyQualifiedName + "-" + idx, testFunDef.fingerprint, testFunDef.explicitlySpecified, testFunDef.selectors), testFun.runner, nt)) - } + val setupTasks = fj(partApp(userSetup) :+ frameworkSetup) + val mainTasks = + if (config.parallel) + makeParallel(loader, runnables, setupTasks, config.tags) //.toSeq.join + else + makeSerial(loader, runnables, setupTasks, config.tags) + val taggedMainTasks = mainTasks.tagw(config.tags: _*) + taggedMainTasks map processResults flatMap { results => + val cleanupTasks = fj(partApp(userCleanup) :+ frameworkCleanup(results.overall)) + cleanupTasks map { _ => results } + } + } + type TestRunnable = (String, TestFunction) - def makeParallel(loader: ClassLoader, runnables: Iterable[TestRunnable], setupTasks: Task[Unit], tags: Seq[(Tag,Int)]): Task[Map[String,SuiteResult]] = - toTasks(loader, runnables.toSeq, tags).dependsOn(setupTasks) + private def createNestedRunnables(loader: ClassLoader, testFun: TestFunction, nestedTasks: Seq[TestTask]): Seq[(String, TestFunction)] = + nestedTasks.view.zipWithIndex map { + case (nt, idx) => + val testFunDef = testFun.taskDef + (testFunDef.fullyQualifiedName, TestFramework.createTestFunction(loader, new TaskDef(testFunDef.fullyQualifiedName + "-" + idx, testFunDef.fingerprint, testFunDef.explicitlySpecified, testFunDef.selectors), testFun.runner, nt)) + } - def toTasks(loader: ClassLoader, runnables: Seq[TestRunnable], tags: Seq[(Tag,Int)]): Task[Map[String, SuiteResult]] = { - val tasks = runnables.map { case (name, test) => toTask(loader, name, test, tags) } - tasks.join.map( _.foldLeft(Map.empty[String, SuiteResult]) { case (sum, e) => - sum ++ e - } ) - } + def makeParallel(loader: ClassLoader, runnables: Iterable[TestRunnable], setupTasks: Task[Unit], tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] = + toTasks(loader, runnables.toSeq, tags).dependsOn(setupTasks) - def toTask(loader: ClassLoader, name: String, fun: TestFunction, tags: Seq[(Tag,Int)]): Task[Map[String, SuiteResult]] = { - val base = task { (name, fun.apply()) } - val taggedBase = base.tagw(tags : _*).tag(fun.tags.map(ConcurrentRestrictions.Tag(_)) : _*) - taggedBase flatMap { case (name, (result, nested)) => - val nestedRunnables = createNestedRunnables(loader, fun, nested) - toTasks(loader, nestedRunnables, tags).map( _.updated(name, result) ) - } - } + def toTasks(loader: ClassLoader, runnables: Seq[TestRunnable], tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] = { + val tasks = runnables.map { case (name, test) => toTask(loader, name, test, tags) } + tasks.join.map(_.foldLeft(Map.empty[String, SuiteResult]) { + case (sum, e) => + sum ++ e + }) + } - def makeSerial(loader: ClassLoader, runnables: Seq[TestRunnable], setupTasks: Task[Unit], tags: Seq[(Tag,Int)]): Task[List[(String, SuiteResult)]] = - { - @tailrec - def processRunnable(runnableList: List[TestRunnable], acc: List[(String, SuiteResult)]): List[(String, SuiteResult)] = - runnableList match { - case hd :: rst => - val testFun = hd._2 - val (result, nestedTasks) = testFun.apply() - val nestedRunnables = createNestedRunnables(loader, testFun, nestedTasks) - processRunnable(nestedRunnables.toList ::: rst, (hd._1, result) :: acc) - case Nil => acc - } + def toTask(loader: ClassLoader, name: String, fun: TestFunction, tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] = { + val base = task { (name, fun.apply()) } + val taggedBase = base.tagw(tags: _*).tag(fun.tags.map(ConcurrentRestrictions.Tag(_)): _*) + taggedBase flatMap { + case (name, (result, nested)) => + val nestedRunnables = createNestedRunnables(loader, fun, nested) + toTasks(loader, nestedRunnables, tags).map(_.updated(name, result)) + } + } - task { processRunnable(runnables.toList, List.empty) } dependsOn(setupTasks) - } + def makeSerial(loader: ClassLoader, runnables: Seq[TestRunnable], setupTasks: Task[Unit], tags: Seq[(Tag, Int)]): Task[List[(String, SuiteResult)]] = + { + @tailrec + def processRunnable(runnableList: List[TestRunnable], acc: List[(String, SuiteResult)]): List[(String, SuiteResult)] = + runnableList match { + case hd :: rst => + val testFun = hd._2 + val (result, nestedTasks) = testFun.apply() + val nestedRunnables = createNestedRunnables(loader, testFun, nestedTasks) + processRunnable(nestedRunnables.toList ::: rst, (hd._1, result) :: acc) + case Nil => acc + } - def processResults(results: Iterable[(String, SuiteResult)]): Output = - Output(overall(results.map(_._2.result)), results.toMap, Iterable.empty) - def foldTasks(results: Seq[Task[Output]], parallel: Boolean): Task[Output] = - if (results.isEmpty) - task { Output(TestResult.Passed, Map.empty, Nil) } - else if (parallel) - reduced(results.toIndexedSeq, { - case (Output(v1, m1, _), Output(v2, m2, _)) => Output(if (v1.id < v2.id) v2 else v1, m1 ++ m2, Iterable.empty) - }) - else { - def sequence(tasks: List[Task[Output]], acc: List[Output]): Task[List[Output]] = tasks match { - case Nil => task(acc.reverse) - case hd::tl => hd flatMap { out => sequence(tl, out::acc) } - } - sequence(results.toList, List()) map { ress => - val (rs, ms) = ress.unzip { e => (e.overall, e.events) } - Output(overall(rs), ms reduce (_ ++ _), Iterable.empty) - } - } - def overall(results: Iterable[TestResult.Value]): TestResult.Value = - (TestResult.Passed /: results) { (acc, result) => if(acc.id < result.id) result else acc } - def discover(frameworks: Seq[Framework], analysis: Analysis, log: Logger): (Seq[TestDefinition], Set[String]) = - discover(frameworks flatMap TestFramework.getFingerprints, allDefs(analysis), log) + task { processRunnable(runnables.toList, List.empty) } dependsOn (setupTasks) + } - def allDefs(analysis: Analysis) = analysis.apis.internal.values.flatMap(_.api.definitions).toSeq - def discover(fingerprints: Seq[Fingerprint], definitions: Seq[Definition], log: Logger): (Seq[TestDefinition], Set[String]) = - { - val subclasses = fingerprints collect { case sub: SubclassFingerprint => (sub.superclassName, sub.isModule, sub) }; - val annotations = fingerprints collect { case ann: AnnotatedFingerprint => (ann.annotationName, ann.isModule, ann) }; - log.debug("Subclass fingerprints: " + subclasses) - log.debug("Annotation fingerprints: " + annotations) + def processResults(results: Iterable[(String, SuiteResult)]): Output = + Output(overall(results.map(_._2.result)), results.toMap, Iterable.empty) + def foldTasks(results: Seq[Task[Output]], parallel: Boolean): Task[Output] = + if (results.isEmpty) + task { Output(TestResult.Passed, Map.empty, Nil) } + else if (parallel) + reduced(results.toIndexedSeq, { + case (Output(v1, m1, _), Output(v2, m2, _)) => Output(if (v1.id < v2.id) v2 else v1, m1 ++ m2, Iterable.empty) + }) + else { + def sequence(tasks: List[Task[Output]], acc: List[Output]): Task[List[Output]] = tasks match { + case Nil => task(acc.reverse) + case hd :: tl => hd flatMap { out => sequence(tl, out :: acc) } + } + sequence(results.toList, List()) map { ress => + val (rs, ms) = ress.unzip { e => (e.overall, e.events) } + Output(overall(rs), ms reduce (_ ++ _), Iterable.empty) + } + } + def overall(results: Iterable[TestResult.Value]): TestResult.Value = + (TestResult.Passed /: results) { (acc, result) => if (acc.id < result.id) result else acc } + def discover(frameworks: Seq[Framework], analysis: Analysis, log: Logger): (Seq[TestDefinition], Set[String]) = + discover(frameworks flatMap TestFramework.getFingerprints, allDefs(analysis), log) - def firsts[A,B,C](s: Seq[(A,B,C)]): Set[A] = s.map(_._1).toSet - def defined(in: Seq[(String,Boolean,Fingerprint)], names: Set[String], IsModule: Boolean): Seq[Fingerprint] = - in collect { case (name, IsModule, print) if names(name) => print } + def allDefs(analysis: Analysis) = analysis.apis.internal.values.flatMap(_.api.definitions).toSeq + def discover(fingerprints: Seq[Fingerprint], definitions: Seq[Definition], log: Logger): (Seq[TestDefinition], Set[String]) = + { + val subclasses = fingerprints collect { case sub: SubclassFingerprint => (sub.superclassName, sub.isModule, sub) }; + val annotations = fingerprints collect { case ann: AnnotatedFingerprint => (ann.annotationName, ann.isModule, ann) }; + log.debug("Subclass fingerprints: " + subclasses) + log.debug("Annotation fingerprints: " + annotations) - def toFingerprints(d: Discovered): Seq[Fingerprint] = - defined(subclasses, d.baseClasses, d.isModule) ++ - defined(annotations, d.annotations, d.isModule) + def firsts[A, B, C](s: Seq[(A, B, C)]): Set[A] = s.map(_._1).toSet + def defined(in: Seq[(String, Boolean, Fingerprint)], names: Set[String], IsModule: Boolean): Seq[Fingerprint] = + in collect { case (name, IsModule, print) if names(name) => print } - val discovered = Discovery(firsts(subclasses), firsts(annotations))(definitions) - // TODO: To pass in correct explicitlySpecified and selectors - val tests = for( (df, di) <- discovered; fingerprint <- toFingerprints(di) ) yield new TestDefinition(df.name, fingerprint, false, Array(new SuiteSelector)) - val mains = discovered collect { case (df, di) if di.hasMain => df.name } - (tests, mains.toSet) - } + def toFingerprints(d: Discovered): Seq[Fingerprint] = + defined(subclasses, d.baseClasses, d.isModule) ++ + defined(annotations, d.annotations, d.isModule) - @deprecated("Tests.showResults() has been superseded with TestResultLogger and setting 'testResultLogger'.", "0.13.5") - def showResults(log: Logger, results: Output, noTestsMessage: =>String): Unit = + val discovered = Discovery(firsts(subclasses), firsts(annotations))(definitions) + // TODO: To pass in correct explicitlySpecified and selectors + val tests = for ((df, di) <- discovered; fingerprint <- toFingerprints(di)) yield new TestDefinition(df.name, fingerprint, false, Array(new SuiteSelector)) + val mains = discovered collect { case (df, di) if di.hasMain => df.name } + (tests, mains.toSet) + } + + @deprecated("Tests.showResults() has been superseded with TestResultLogger and setting 'testResultLogger'.", "0.13.5") + def showResults(log: Logger, results: Output, noTestsMessage: => String): Unit = TestResultLogger.Default.copy(printNoTests = TestResultLogger.const(_ info noTestsMessage)) .run(log, results, "") } diff --git a/main/actions/src/main/scala/sbt/compiler/Eval.scala b/main/actions/src/main/scala/sbt/compiler/Eval.scala index 1fb4f6bb4..b92207551 100644 --- a/main/actions/src/main/scala/sbt/compiler/Eval.scala +++ b/main/actions/src/main/scala/sbt/compiler/Eval.scala @@ -2,431 +2,440 @@ package sbt package compiler import scala.reflect.Manifest -import scala.tools.nsc.{ast, interpreter, io, reporters, util, CompilerCommand, Global, Phase, Settings} +import scala.tools.nsc.{ ast, interpreter, io, reporters, util, CompilerCommand, Global, Phase, Settings } import interpreter.AbstractFileClassLoader -import io.{AbstractFile, PlainFile, VirtualDirectory} +import io.{ AbstractFile, PlainFile, VirtualDirectory } import ast.parser.Tokens -import reporters.{ConsoleReporter, Reporter} +import reporters.{ ConsoleReporter, Reporter } import scala.reflect.internal.util.BatchSourceFile -import Tokens.{EOF, NEWLINE, NEWLINES, SEMI} +import Tokens.{ EOF, NEWLINE, NEWLINES, SEMI } import java.io.File import java.nio.ByteBuffer import java.net.URLClassLoader -import Eval.{getModule, getValue, WrapValName} +import Eval.{ getModule, getValue, WrapValName } // TODO: provide a way to cleanup backing directory -final class EvalImports(val strings: Seq[(String,Int)], val srcName: String) +final class EvalImports(val strings: Seq[(String, Int)], val srcName: String) -/** The result of evaluating a Scala expression. The inferred type of the expression is given by `tpe`. -* The value may be obtained from `getValue` by providing a parent class loader that provides the classes from the classpath -* this expression was compiled against. Each call to `getValue` constructs a new class loader and loads -* the module from that class loader. `generated` contains the compiled classes and cache files related -* to the expression. The name of the auto-generated module wrapping the expression is `enclosingModule`. */ +/** + * The result of evaluating a Scala expression. The inferred type of the expression is given by `tpe`. + * The value may be obtained from `getValue` by providing a parent class loader that provides the classes from the classpath + * this expression was compiled against. Each call to `getValue` constructs a new class loader and loads + * the module from that class loader. `generated` contains the compiled classes and cache files related + * to the expression. The name of the auto-generated module wrapping the expression is `enclosingModule`. + */ final class EvalResult(val tpe: String, val getValue: ClassLoader => Any, val generated: Seq[File], val enclosingModule: String) -/** The result of evaluating a group of Scala definitions. The definitions are wrapped in an auto-generated, -* top-level module named `enclosingModule`. `generated` contains the compiled classes and cache files related to the definitions. -* A new class loader containing the module may be obtained from `loader` by passing the parent class loader providing the classes -* from the classpath that the definitions were compiled against. The list of vals with the requested types is `valNames`. -* The values for these may be obtained by providing the parent class loader to `values` as is done with `loader`.*/ -final class EvalDefinitions(val loader: ClassLoader => ClassLoader, val generated: Seq[File], val enclosingModule: String, val valNames: Seq[String]) -{ - def values(parent: ClassLoader): Seq[Any] = { - val module = getModule(enclosingModule, loader(parent)) - for(n <- valNames) yield - module.getClass.getMethod(n).invoke(module) - } +/** + * The result of evaluating a group of Scala definitions. The definitions are wrapped in an auto-generated, + * top-level module named `enclosingModule`. `generated` contains the compiled classes and cache files related to the definitions. + * A new class loader containing the module may be obtained from `loader` by passing the parent class loader providing the classes + * from the classpath that the definitions were compiled against. The list of vals with the requested types is `valNames`. + * The values for these may be obtained by providing the parent class loader to `values` as is done with `loader`. + */ +final class EvalDefinitions(val loader: ClassLoader => ClassLoader, val generated: Seq[File], val enclosingModule: String, val valNames: Seq[String]) { + def values(parent: ClassLoader): Seq[Any] = { + val module = getModule(enclosingModule, loader(parent)) + for (n <- valNames) yield module.getClass.getMethod(n).invoke(module) + } } final class EvalException(msg: String) extends RuntimeException(msg) // not thread safe, since it reuses a Global instance -final class Eval(optionsNoncp: Seq[String], classpath: Seq[File], mkReporter: Settings => Reporter, backing: Option[File]) -{ - def this(mkReporter: Settings => Reporter, backing: Option[File]) = this(Nil, IO.classLocationFile[Product] :: Nil, mkReporter, backing) - def this() = this(s => new ConsoleReporter(s), None) +final class Eval(optionsNoncp: Seq[String], classpath: Seq[File], mkReporter: Settings => Reporter, backing: Option[File]) { + def this(mkReporter: Settings => Reporter, backing: Option[File]) = this(Nil, IO.classLocationFile[Product] :: Nil, mkReporter, backing) + def this() = this(s => new ConsoleReporter(s), None) - backing.foreach(IO.createDirectory) - val classpathString = Path.makeString(classpath ++ backing.toList) - val options = "-cp" +: classpathString +: optionsNoncp + backing.foreach(IO.createDirectory) + val classpathString = Path.makeString(classpath ++ backing.toList) + val options = "-cp" +: classpathString +: optionsNoncp - lazy val settings = - { - val s = new Settings(println) - val command = new CompilerCommand(options.toList, s) - s - } - lazy val reporter = mkReporter(settings) - lazy val global: Global = new Global(settings, reporter) - import global._ - import definitions._ + lazy val settings = + { + val s = new Settings(println) + val command = new CompilerCommand(options.toList, s) + s + } + lazy val reporter = mkReporter(settings) + lazy val global: Global = new Global(settings, reporter) + import global._ + import definitions._ - private[sbt] def unlinkDeferred() { - toUnlinkLater foreach unlink - toUnlinkLater = Nil - } + private[sbt] def unlinkDeferred() { + toUnlinkLater foreach unlink + toUnlinkLater = Nil + } - private[this] var toUnlinkLater = List[Symbol]() - private[this] def unlink(sym: Symbol) = sym.owner.info.decls.unlink(sym) + private[this] var toUnlinkLater = List[Symbol]() + private[this] def unlink(sym: Symbol) = sym.owner.info.decls.unlink(sym) - def eval(expression: String, imports: EvalImports = noImports, tpeName: Option[String] = None, srcName: String = "", line: Int = DefaultStartLine): EvalResult = - { - val ev = new EvalType[String] { - def makeUnit = mkUnit(srcName, line, expression) - def unlink = true - def unitBody(unit: CompilationUnit, importTrees: Seq[Tree], moduleName: String): Tree = { - val (parser, tree) = parse(unit, settingErrorStrings, _.expr()) - val tpt: Tree = expectedType(tpeName) - augment(parser, importTrees, tree, tpt, moduleName) - } - def extra(run: Run, unit: CompilationUnit) = atPhase(run.typerPhase.next) { (new TypeExtractor).getType(unit.body) } - def read(file: File) = IO.read(file) - def write(value: String, f: File) = IO.write(f, value) - } - val i = evalCommon(expression :: Nil, imports, tpeName, ev) - val value = (cl: ClassLoader) => getValue[Any](i.enclosingModule, i.loader(cl)) - new EvalResult(i.extra, value, i.generated, i.enclosingModule) - } - def evalDefinitions(definitions: Seq[(String,scala.Range)], imports: EvalImports, srcName: String, valTypes: Seq[String]): EvalDefinitions = - { - require(definitions.nonEmpty, "Definitions to evaluate cannot be empty.") - val ev = new EvalType[Seq[String]] { - lazy val (fullUnit, defUnits) = mkDefsUnit(srcName, definitions) - def makeUnit = fullUnit - def unlink = false - def unitBody(unit: CompilationUnit, importTrees: Seq[Tree], moduleName: String): Tree = { - val fullParser = new syntaxAnalyzer.UnitParser(unit) - val trees = defUnits flatMap parseDefinitions - syntheticModule(fullParser, importTrees, trees.toList, moduleName) - } - def extra(run: Run, unit: CompilationUnit) = { - val tpes = valTypes.map(tpe => rootMirror.getRequiredClass(tpe).tpe) - atPhase(run.typerPhase.next) { (new ValExtractor(tpes)).getVals(unit.body) } - } - def read(file: File) = IO.readLines(file) - def write(value: Seq[String], file: File) = IO.writeLines(file, value) - } - val i = evalCommon(definitions.map(_._1), imports, Some(""), ev) - new EvalDefinitions(i.loader, i.generated, i.enclosingModule, i.extra) - } + def eval(expression: String, imports: EvalImports = noImports, tpeName: Option[String] = None, srcName: String = "", line: Int = DefaultStartLine): EvalResult = + { + val ev = new EvalType[String] { + def makeUnit = mkUnit(srcName, line, expression) + def unlink = true + def unitBody(unit: CompilationUnit, importTrees: Seq[Tree], moduleName: String): Tree = { + val (parser, tree) = parse(unit, settingErrorStrings, _.expr()) + val tpt: Tree = expectedType(tpeName) + augment(parser, importTrees, tree, tpt, moduleName) + } + def extra(run: Run, unit: CompilationUnit) = atPhase(run.typerPhase.next) { (new TypeExtractor).getType(unit.body) } + def read(file: File) = IO.read(file) + def write(value: String, f: File) = IO.write(f, value) + } + val i = evalCommon(expression :: Nil, imports, tpeName, ev) + val value = (cl: ClassLoader) => getValue[Any](i.enclosingModule, i.loader(cl)) + new EvalResult(i.extra, value, i.generated, i.enclosingModule) + } + def evalDefinitions(definitions: Seq[(String, scala.Range)], imports: EvalImports, srcName: String, valTypes: Seq[String]): EvalDefinitions = + { + require(definitions.nonEmpty, "Definitions to evaluate cannot be empty.") + val ev = new EvalType[Seq[String]] { + lazy val (fullUnit, defUnits) = mkDefsUnit(srcName, definitions) + def makeUnit = fullUnit + def unlink = false + def unitBody(unit: CompilationUnit, importTrees: Seq[Tree], moduleName: String): Tree = { + val fullParser = new syntaxAnalyzer.UnitParser(unit) + val trees = defUnits flatMap parseDefinitions + syntheticModule(fullParser, importTrees, trees.toList, moduleName) + } + def extra(run: Run, unit: CompilationUnit) = { + val tpes = valTypes.map(tpe => rootMirror.getRequiredClass(tpe).tpe) + atPhase(run.typerPhase.next) { (new ValExtractor(tpes)).getVals(unit.body) } + } + def read(file: File) = IO.readLines(file) + def write(value: Seq[String], file: File) = IO.writeLines(file, value) + } + val i = evalCommon(definitions.map(_._1), imports, Some(""), ev) + new EvalDefinitions(i.loader, i.generated, i.enclosingModule, i.extra) + } - private[this] def evalCommon[T](content: Seq[String], imports: EvalImports, tpeName: Option[String], ev: EvalType[T]): EvalIntermediate[T] = - { - import Eval._ - val hash = Hash.toHex(Hash(bytes( stringSeqBytes(content) :: optBytes(backing)(fileExistsBytes) :: stringSeqBytes(options) :: - seqBytes(classpath)(fileModifiedBytes) :: stringSeqBytes(imports.strings.map(_._1)) :: optBytes(tpeName)(bytes) :: Nil))) - val moduleName = makeModuleName(hash) - - lazy val unit = { - reporter.reset - ev.makeUnit - } - lazy val run = new Run { - override def units = (unit :: Nil).iterator - } - def unlinkAll(): Unit = for( (sym, _) <- run.symSource ) if(ev.unlink) unlink(sym) else toUnlinkLater ::= sym + private[this] def evalCommon[T](content: Seq[String], imports: EvalImports, tpeName: Option[String], ev: EvalType[T]): EvalIntermediate[T] = + { + import Eval._ + val hash = Hash.toHex(Hash(bytes(stringSeqBytes(content) :: optBytes(backing)(fileExistsBytes) :: stringSeqBytes(options) :: + seqBytes(classpath)(fileModifiedBytes) :: stringSeqBytes(imports.strings.map(_._1)) :: optBytes(tpeName)(bytes) :: Nil))) + val moduleName = makeModuleName(hash) - val (extra, loader) = backing match { - case Some(back) if classExists(back, moduleName) => - val loader = (parent: ClassLoader) => new URLClassLoader(Array(back.toURI.toURL), parent) - val extra = ev.read(cacheFile(back,moduleName)) - (extra, loader) - case _ => - try { compileAndLoad(run, unit, imports, backing, moduleName, ev) } - finally { unlinkAll() } - } + lazy val unit = { + reporter.reset + ev.makeUnit + } + lazy val run = new Run { + override def units = (unit :: Nil).iterator + } + def unlinkAll(): Unit = for ((sym, _) <- run.symSource) if (ev.unlink) unlink(sym) else toUnlinkLater ::= sym - val classFiles = getClassFiles(backing, moduleName) - new EvalIntermediate(extra, loader, classFiles, moduleName) - } - // location of the cached type or definition information - private[this] def cacheFile(base: File, moduleName: String): File = new File(base, moduleName + ".cache") - private[this] def compileAndLoad[T](run: Run, unit: CompilationUnit, imports: EvalImports, backing: Option[File], moduleName: String, ev: EvalType[T]): (T, ClassLoader => ClassLoader) = - { - val dir = outputDirectory(backing) - settings.outputDirs setSingleOutput dir + val (extra, loader) = backing match { + case Some(back) if classExists(back, moduleName) => + val loader = (parent: ClassLoader) => new URLClassLoader(Array(back.toURI.toURL), parent) + val extra = ev.read(cacheFile(back, moduleName)) + (extra, loader) + case _ => + try { compileAndLoad(run, unit, imports, backing, moduleName, ev) } + finally { unlinkAll() } + } - val importTrees = parseImports(imports) - unit.body = ev.unitBody(unit, importTrees, moduleName) + val classFiles = getClassFiles(backing, moduleName) + new EvalIntermediate(extra, loader, classFiles, moduleName) + } + // location of the cached type or definition information + private[this] def cacheFile(base: File, moduleName: String): File = new File(base, moduleName + ".cache") + private[this] def compileAndLoad[T](run: Run, unit: CompilationUnit, imports: EvalImports, backing: Option[File], moduleName: String, ev: EvalType[T]): (T, ClassLoader => ClassLoader) = + { + val dir = outputDirectory(backing) + settings.outputDirs setSingleOutput dir - def compile(phase: Phase): Unit = - { - globalPhase = phase - if(phase == null || phase == phase.next || reporter.hasErrors) - () - else - { - atPhase(phase) { phase.run } - compile(phase.next) - } - } + val importTrees = parseImports(imports) + unit.body = ev.unitBody(unit, importTrees, moduleName) - compile(run.namerPhase) - checkError("Type error in expression") + def compile(phase: Phase): Unit = + { + globalPhase = phase + if (phase == null || phase == phase.next || reporter.hasErrors) + () + else { + atPhase(phase) { phase.run } + compile(phase.next) + } + } - val extra = ev.extra(run, unit) - for(f <- backing) ev.write(extra, cacheFile(f, moduleName)) - val loader = (parent: ClassLoader) => new AbstractFileClassLoader(dir, parent) - (extra, loader) - } + compile(run.namerPhase) + checkError("Type error in expression") - private[this] def expectedType(tpeName: Option[String]): Tree = - tpeName match { - case Some(tpe) => parseType(tpe) - case None => TypeTree(NoType) - } + val extra = ev.extra(run, unit) + for (f <- backing) ev.write(extra, cacheFile(f, moduleName)) + val loader = (parent: ClassLoader) => new AbstractFileClassLoader(dir, parent) + (extra, loader) + } - private[this] def outputDirectory(backing: Option[File]): AbstractFile = - backing match { case None => new VirtualDirectory("", None); case Some(dir) => new PlainFile(dir) } + private[this] def expectedType(tpeName: Option[String]): Tree = + tpeName match { + case Some(tpe) => parseType(tpe) + case None => TypeTree(NoType) + } - def load(dir: AbstractFile, moduleName: String): ClassLoader => Any = parent => getValue[Any](moduleName, new AbstractFileClassLoader(dir, parent)) - def loadPlain(dir: File, moduleName: String): ClassLoader => Any = parent => getValue[Any](moduleName, new URLClassLoader(Array(dir.toURI.toURL), parent)) + private[this] def outputDirectory(backing: Option[File]): AbstractFile = + backing match { case None => new VirtualDirectory("", None); case Some(dir) => new PlainFile(dir) } - //wrap tree in object objectName { def WrapValName = } - def augment(parser: global.syntaxAnalyzer.UnitParser, imports: Seq[Tree], tree: Tree, tpt: Tree, objectName: String): Tree = - { - val method = DefDef(NoMods, newTermName(WrapValName), Nil, Nil, tpt, tree) - syntheticModule(parser, imports, method :: Nil, objectName) - } - private[this] def syntheticModule(parser: global.syntaxAnalyzer.UnitParser, imports: Seq[Tree], definitions: List[Tree], objectName: String): Tree = - { - val emptyTypeName = nme.EMPTY.toTypeName - def emptyPkg = parser.atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) } - def emptyInit = DefDef( - NoMods, - nme.CONSTRUCTOR, - Nil, - List(Nil), - TypeTree(), - Block(List(Apply(Select(Super(This(emptyTypeName), emptyTypeName), nme.CONSTRUCTOR), Nil)), Literal(Constant(()))) - ) + def load(dir: AbstractFile, moduleName: String): ClassLoader => Any = parent => getValue[Any](moduleName, new AbstractFileClassLoader(dir, parent)) + def loadPlain(dir: File, moduleName: String): ClassLoader => Any = parent => getValue[Any](moduleName, new URLClassLoader(Array(dir.toURI.toURL), parent)) - def moduleBody = Template(List(gen.scalaAnyRefConstr), emptyValDef, emptyInit :: definitions) - def moduleDef = ModuleDef(NoMods, newTermName(objectName), moduleBody) - parser.makePackaging(0, emptyPkg, (imports :+ moduleDef).toList) - } + //wrap tree in object objectName { def WrapValName = } + def augment(parser: global.syntaxAnalyzer.UnitParser, imports: Seq[Tree], tree: Tree, tpt: Tree, objectName: String): Tree = + { + val method = DefDef(NoMods, newTermName(WrapValName), Nil, Nil, tpt, tree) + syntheticModule(parser, imports, method :: Nil, objectName) + } + private[this] def syntheticModule(parser: global.syntaxAnalyzer.UnitParser, imports: Seq[Tree], definitions: List[Tree], objectName: String): Tree = + { + val emptyTypeName = nme.EMPTY.toTypeName + def emptyPkg = parser.atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) } + def emptyInit = DefDef( + NoMods, + nme.CONSTRUCTOR, + Nil, + List(Nil), + TypeTree(), + Block(List(Apply(Select(Super(This(emptyTypeName), emptyTypeName), nme.CONSTRUCTOR), Nil)), Literal(Constant(()))) + ) - private[this] final class TypeExtractor extends Traverser { - private[this] var result = "" - def getType(t: Tree) = { result = ""; traverse(t); result } - override def traverse(tree: Tree): Unit = tree match { - case d: DefDef if d.symbol.nameString == WrapValName => result = d.symbol.tpe.finalResultType.toString - case _ => super.traverse(tree) - } - } - /** Tree traverser that obtains the names of vals in a top-level module whose type is a subtype of one of `types`.*/ - private[this] final class ValExtractor(types: Seq[Type]) extends Traverser { - private[this] var vals = List[String]() - def getVals(t: Tree): List[String] = { vals = Nil; traverse(t); vals } - override def traverse(tree: Tree): Unit = tree match { - case ValDef(_, n, actualTpe, _) if isTopLevelModule(tree.symbol.owner) && types.exists(_ <:< actualTpe.tpe) => - vals ::= nme.localToGetter(n).encoded - case _ => super.traverse(tree) - } - } - // inlined implemented of Symbol.isTopLevelModule that was removed in e5b050814deb2e7e1d6d05511d3a6cb6b013b549 - private[this] def isTopLevelModule(s: Symbol): Boolean = s.hasFlag(reflect.internal.Flags.MODULE) && s.owner.isPackageClass + def moduleBody = Template(List(gen.scalaAnyRefConstr), emptyValDef, emptyInit :: definitions) + def moduleDef = ModuleDef(NoMods, newTermName(objectName), moduleBody) + parser.makePackaging(0, emptyPkg, (imports :+ moduleDef).toList) + } - private[this] final class EvalIntermediate[T](val extra: T, val loader: ClassLoader => ClassLoader, val generated: Seq[File], val enclosingModule: String) + private[this] final class TypeExtractor extends Traverser { + private[this] var result = "" + def getType(t: Tree) = { result = ""; traverse(t); result } + override def traverse(tree: Tree): Unit = tree match { + case d: DefDef if d.symbol.nameString == WrapValName => result = d.symbol.tpe.finalResultType.toString + case _ => super.traverse(tree) + } + } + /** Tree traverser that obtains the names of vals in a top-level module whose type is a subtype of one of `types`.*/ + private[this] final class ValExtractor(types: Seq[Type]) extends Traverser { + private[this] var vals = List[String]() + def getVals(t: Tree): List[String] = { vals = Nil; traverse(t); vals } + override def traverse(tree: Tree): Unit = tree match { + case ValDef(_, n, actualTpe, _) if isTopLevelModule(tree.symbol.owner) && types.exists(_ <:< actualTpe.tpe) => + vals ::= nme.localToGetter(n).encoded + case _ => super.traverse(tree) + } + } + // inlined implemented of Symbol.isTopLevelModule that was removed in e5b050814deb2e7e1d6d05511d3a6cb6b013b549 + private[this] def isTopLevelModule(s: Symbol): Boolean = s.hasFlag(reflect.internal.Flags.MODULE) && s.owner.isPackageClass - private[this] def classExists(dir: File, name: String) = (new File(dir, name + ".class")).exists - // TODO: use the code from Analyzer - private[this] def getClassFiles(backing: Option[File], moduleName: String): Seq[File] = - backing match { - case None => Nil - case Some(dir) => dir listFiles moduleClassFilter(moduleName) - } - private[this] def moduleClassFilter(moduleName: String) = new java.io.FilenameFilter { def accept(dir: File, s: String) = - (s contains moduleName) && (s endsWith ".class") - } + private[this] final class EvalIntermediate[T](val extra: T, val loader: ClassLoader => ClassLoader, val generated: Seq[File], val enclosingModule: String) - private[this] class ParseErrorStrings(val base: String, val extraBlank: String, val missingBlank: String, val extraSemi: String) - private[this] def definitionErrorStrings = new ParseErrorStrings( - base = "Error parsing definition.", - extraBlank = " Ensure that there are no blank lines within a definition.", - missingBlank = " Ensure that definitions are separated by blank lines.", - extraSemi = " A trailing semicolon is not permitted for standalone definitions." - ) - private[this] def settingErrorStrings = new ParseErrorStrings( - base = "Error parsing expression.", - extraBlank = " Ensure that there are no blank lines within a setting.", - missingBlank = " Ensure that settings are separated by blank lines.", - extraSemi = " Note that settings are expressions and do not end with semicolons. (Semicolons are fine within {} blocks, however.)" - ) + private[this] def classExists(dir: File, name: String) = (new File(dir, name + ".class")).exists + // TODO: use the code from Analyzer + private[this] def getClassFiles(backing: Option[File], moduleName: String): Seq[File] = + backing match { + case None => Nil + case Some(dir) => dir listFiles moduleClassFilter(moduleName) + } + private[this] def moduleClassFilter(moduleName: String) = new java.io.FilenameFilter { + def accept(dir: File, s: String) = + (s contains moduleName) && (s endsWith ".class") + } - /** Parses the provided compilation `unit` according to `f` and then performs checks on the final parser state - * to catch errors that are common when the content is embedded in a blank-line-delimited format. */ - private[this] def parse[T](unit: CompilationUnit, errors: ParseErrorStrings, f: syntaxAnalyzer.UnitParser => T): (syntaxAnalyzer.UnitParser, T) = - { - val parser = new syntaxAnalyzer.UnitParser(unit) + private[this] class ParseErrorStrings(val base: String, val extraBlank: String, val missingBlank: String, val extraSemi: String) + private[this] def definitionErrorStrings = new ParseErrorStrings( + base = "Error parsing definition.", + extraBlank = " Ensure that there are no blank lines within a definition.", + missingBlank = " Ensure that definitions are separated by blank lines.", + extraSemi = " A trailing semicolon is not permitted for standalone definitions." + ) + private[this] def settingErrorStrings = new ParseErrorStrings( + base = "Error parsing expression.", + extraBlank = " Ensure that there are no blank lines within a setting.", + missingBlank = " Ensure that settings are separated by blank lines.", + extraSemi = " Note that settings are expressions and do not end with semicolons. (Semicolons are fine within {} blocks, however.)" + ) - val tree = f(parser) - val extra = parser.in.token match { - case EOF => errors.extraBlank - case _ => "" - } - checkError(errors.base + extra) + /** + * Parses the provided compilation `unit` according to `f` and then performs checks on the final parser state + * to catch errors that are common when the content is embedded in a blank-line-delimited format. + */ + private[this] def parse[T](unit: CompilationUnit, errors: ParseErrorStrings, f: syntaxAnalyzer.UnitParser => T): (syntaxAnalyzer.UnitParser, T) = + { + val parser = new syntaxAnalyzer.UnitParser(unit) - parser.accept(EOF) - val extra2 = parser.in.token match { - case SEMI => errors.extraSemi - case NEWLINE | NEWLINES => errors.missingBlank - case _ => "" - } - checkError(errors.base + extra2) + val tree = f(parser) + val extra = parser.in.token match { + case EOF => errors.extraBlank + case _ => "" + } + checkError(errors.base + extra) - (parser, tree) - } - private[this] def parseType(tpe: String): Tree = - { - val tpeParser = new syntaxAnalyzer.UnitParser(mkUnit("", DefaultStartLine, tpe)) - val tpt0: Tree = tpeParser.typ() - tpeParser.accept(EOF) - checkError("Error parsing expression type.") - tpt0 - } - private[this] def parseImports(imports: EvalImports): Seq[Tree] = - imports.strings flatMap { case (s, line) => parseImport(mkUnit(imports.srcName, line, s)) } - private[this] def parseImport(importUnit: CompilationUnit): Seq[Tree] = - { - val parser = new syntaxAnalyzer.UnitParser(importUnit) - val trees: Seq[Tree] = parser.importClause() - parser.accept(EOF) - checkError("Error parsing imports for expression.") - trees - } - private[this] def parseDefinitions(du: CompilationUnit): Seq[Tree] = - parse(du, definitionErrorStrings, parseDefinitions)._2 + parser.accept(EOF) + val extra2 = parser.in.token match { + case SEMI => errors.extraSemi + case NEWLINE | NEWLINES => errors.missingBlank + case _ => "" + } + checkError(errors.base + extra2) - /** Parses one or more definitions (defs, vals, lazy vals, classes, traits, modules). */ - private[this] def parseDefinitions(parser: syntaxAnalyzer.UnitParser): Seq[Tree] = - { - var defs = parser.nonLocalDefOrDcl - parser.acceptStatSepOpt() - while(!parser.isStatSeqEnd) { - val next = parser.nonLocalDefOrDcl - defs ++= next - parser.acceptStatSepOpt() - } - defs - } + (parser, tree) + } + private[this] def parseType(tpe: String): Tree = + { + val tpeParser = new syntaxAnalyzer.UnitParser(mkUnit("", DefaultStartLine, tpe)) + val tpt0: Tree = tpeParser.typ() + tpeParser.accept(EOF) + checkError("Error parsing expression type.") + tpt0 + } + private[this] def parseImports(imports: EvalImports): Seq[Tree] = + imports.strings flatMap { case (s, line) => parseImport(mkUnit(imports.srcName, line, s)) } + private[this] def parseImport(importUnit: CompilationUnit): Seq[Tree] = + { + val parser = new syntaxAnalyzer.UnitParser(importUnit) + val trees: Seq[Tree] = parser.importClause() + parser.accept(EOF) + checkError("Error parsing imports for expression.") + trees + } + private[this] def parseDefinitions(du: CompilationUnit): Seq[Tree] = + parse(du, definitionErrorStrings, parseDefinitions)._2 - private[this] trait EvalType[T] - { - /** Extracts additional information after the compilation unit is evaluated.*/ - def extra(run: Run, unit: CompilationUnit): T + /** Parses one or more definitions (defs, vals, lazy vals, classes, traits, modules). */ + private[this] def parseDefinitions(parser: syntaxAnalyzer.UnitParser): Seq[Tree] = + { + var defs = parser.nonLocalDefOrDcl + parser.acceptStatSepOpt() + while (!parser.isStatSeqEnd) { + val next = parser.nonLocalDefOrDcl + defs ++= next + parser.acceptStatSepOpt() + } + defs + } - /** Deserializes the extra information for unchanged inputs from a cache file.*/ - def read(file: File): T + private[this] trait EvalType[T] { + /** Extracts additional information after the compilation unit is evaluated.*/ + def extra(run: Run, unit: CompilationUnit): T - /** Serializes the extra information to a cache file, where it can be `read` back if inputs haven't changed.*/ - def write(value: T, file: File): Unit - - /** Constructs the full compilation unit for this evaluation. - * This is used for error reporting during compilation. - * The `unitBody` method actually does the parsing and may parse the Tree from another source. */ - def makeUnit: CompilationUnit - - /** If true, all top-level symbols from this evaluation will be unlinked.*/ - def unlink: Boolean + /** Deserializes the extra information for unchanged inputs from a cache file.*/ + def read(file: File): T - /** Constructs the Tree to be compiled. The full compilation `unit` from `makeUnit` is provided along with the - * parsed imports `importTrees` to be used. `moduleName` should be name of the enclosing module. - * The Tree doesn't need to be parsed from the contents of `unit`. */ - def unitBody(unit: CompilationUnit, importTrees: Seq[Tree], moduleName: String): Tree - } + /** Serializes the extra information to a cache file, where it can be `read` back if inputs haven't changed.*/ + def write(value: T, file: File): Unit - val DefaultStartLine = 0 - private[this] def makeModuleName(hash: String): String = "$" + Hash.halve(hash) - private[this] def noImports = new EvalImports(Nil, "") - private[this] def mkUnit(srcName: String, firstLine: Int, s: String) = new CompilationUnit(new EvalSourceFile(srcName, firstLine, s)) - private[this] def checkError(label: String) = if(reporter.hasErrors) throw new EvalException(label) + /** + * Constructs the full compilation unit for this evaluation. + * This is used for error reporting during compilation. + * The `unitBody` method actually does the parsing and may parse the Tree from another source. + */ + def makeUnit: CompilationUnit - private[this] final class EvalSourceFile(name: String, startLine: Int, contents: String) extends BatchSourceFile(name, contents) - { - override def lineToOffset(line: Int): Int = super.lineToOffset((line - startLine) max 0) - override def offsetToLine(offset: Int): Int = super.offsetToLine(offset) + startLine - } - /** Constructs a CompilationUnit for each definition, which can be used to independently parse the definition into a Tree. - * Additionally, a CompilationUnit for the combined definitions is constructed for use by combined compilation after parsing. */ - private[this] def mkDefsUnit(srcName: String, definitions: Seq[(String,scala.Range)]): (CompilationUnit, Seq[CompilationUnit]) = - { - def fragmentUnit(content: String, lineMap: Array[Int]) = new CompilationUnit(fragmentSourceFile(srcName, content, lineMap)) + /** If true, all top-level symbols from this evaluation will be unlinked.*/ + def unlink: Boolean - import collection.mutable.ListBuffer - val lines = new ListBuffer[Int]() - val defs = new ListBuffer[CompilationUnit]() - val fullContent = new java.lang.StringBuilder() - for( (defString, range) <- definitions ) - { - defs += fragmentUnit(defString, range.toArray) - fullContent.append(defString) - lines ++= range - fullContent.append("\n\n") - lines ++= (range.end :: range.end :: Nil) - } - val fullUnit = fragmentUnit(fullContent.toString, lines.toArray) - (fullUnit, defs.toSeq) - } + /** + * Constructs the Tree to be compiled. The full compilation `unit` from `makeUnit` is provided along with the + * parsed imports `importTrees` to be used. `moduleName` should be name of the enclosing module. + * The Tree doesn't need to be parsed from the contents of `unit`. + */ + def unitBody(unit: CompilationUnit, importTrees: Seq[Tree], moduleName: String): Tree + } - /** Source file that can map the offset in the file to and from line numbers that may discontinuous. - * The values in `lineMap` must be ordered, but need not be consecutive. */ - private[this] def fragmentSourceFile(srcName: String, content: String, lineMap: Array[Int]) = new BatchSourceFile(srcName, content) { - override def lineToOffset(line: Int): Int = super.lineToOffset(lineMap.indexWhere(_ == line) max 0) - override def offsetToLine(offset: Int): Int = index(lineMap, super.offsetToLine(offset)) - // the SourceFile attribute is populated from this method, so we are required to only return the name - override def toString = new File(srcName).getName - private[this] def index(a: Array[Int], i: Int): Int = if(i < 0 || i >= a.length) 0 else a(i) - } + val DefaultStartLine = 0 + private[this] def makeModuleName(hash: String): String = "$" + Hash.halve(hash) + private[this] def noImports = new EvalImports(Nil, "") + private[this] def mkUnit(srcName: String, firstLine: Int, s: String) = new CompilationUnit(new EvalSourceFile(srcName, firstLine, s)) + private[this] def checkError(label: String) = if (reporter.hasErrors) throw new EvalException(label) + + private[this] final class EvalSourceFile(name: String, startLine: Int, contents: String) extends BatchSourceFile(name, contents) { + override def lineToOffset(line: Int): Int = super.lineToOffset((line - startLine) max 0) + override def offsetToLine(offset: Int): Int = super.offsetToLine(offset) + startLine + } + /** + * Constructs a CompilationUnit for each definition, which can be used to independently parse the definition into a Tree. + * Additionally, a CompilationUnit for the combined definitions is constructed for use by combined compilation after parsing. + */ + private[this] def mkDefsUnit(srcName: String, definitions: Seq[(String, scala.Range)]): (CompilationUnit, Seq[CompilationUnit]) = + { + def fragmentUnit(content: String, lineMap: Array[Int]) = new CompilationUnit(fragmentSourceFile(srcName, content, lineMap)) + + import collection.mutable.ListBuffer + val lines = new ListBuffer[Int]() + val defs = new ListBuffer[CompilationUnit]() + val fullContent = new java.lang.StringBuilder() + for ((defString, range) <- definitions) { + defs += fragmentUnit(defString, range.toArray) + fullContent.append(defString) + lines ++= range + fullContent.append("\n\n") + lines ++= (range.end :: range.end :: Nil) + } + val fullUnit = fragmentUnit(fullContent.toString, lines.toArray) + (fullUnit, defs.toSeq) + } + + /** + * Source file that can map the offset in the file to and from line numbers that may discontinuous. + * The values in `lineMap` must be ordered, but need not be consecutive. + */ + private[this] def fragmentSourceFile(srcName: String, content: String, lineMap: Array[Int]) = new BatchSourceFile(srcName, content) { + override def lineToOffset(line: Int): Int = super.lineToOffset(lineMap.indexWhere(_ == line) max 0) + override def offsetToLine(offset: Int): Int = index(lineMap, super.offsetToLine(offset)) + // the SourceFile attribute is populated from this method, so we are required to only return the name + override def toString = new File(srcName).getName + private[this] def index(a: Array[Int], i: Int): Int = if (i < 0 || i >= a.length) 0 else a(i) + } } -private object Eval -{ - def optBytes[T](o: Option[T])(f: T => Array[Byte]): Array[Byte] = seqBytes(o.toSeq)(f) - def stringSeqBytes(s: Seq[String]): Array[Byte] = seqBytes(s)(bytes) - def seqBytes[T](s: Seq[T])(f: T => Array[Byte]): Array[Byte] = bytes(s map f) - def bytes(b: Seq[Array[Byte]]): Array[Byte] = bytes(b.length) ++ b.flatten.toArray[Byte] - def bytes(b: Boolean): Array[Byte] = Array[Byte](if(b) 1 else 0) - def filesModifiedBytes(fs: Array[File]): Array[Byte] = if(fs eq null) filesModifiedBytes(Array[File]()) else seqBytes(fs)(fileModifiedBytes) - def fileModifiedBytes(f: File): Array[Byte] = - (if(f.isDirectory) filesModifiedBytes(f listFiles classDirFilter) else bytes(f.lastModified)) ++ - bytes(f.getAbsolutePath) - def fileExistsBytes(f: File): Array[Byte] = - bytes(f.exists) ++ - bytes(f.getAbsolutePath) - - def bytes(s: String): Array[Byte] = s getBytes "UTF-8" - def bytes(l: Long): Array[Byte] = - { - val buffer = ByteBuffer.allocate(8) - buffer.putLong(l) - buffer.array - } - def bytes(i: Int): Array[Byte] = - { - val buffer = ByteBuffer.allocate(4) - buffer.putInt(i) - buffer.array - } +private object Eval { + def optBytes[T](o: Option[T])(f: T => Array[Byte]): Array[Byte] = seqBytes(o.toSeq)(f) + def stringSeqBytes(s: Seq[String]): Array[Byte] = seqBytes(s)(bytes) + def seqBytes[T](s: Seq[T])(f: T => Array[Byte]): Array[Byte] = bytes(s map f) + def bytes(b: Seq[Array[Byte]]): Array[Byte] = bytes(b.length) ++ b.flatten.toArray[Byte] + def bytes(b: Boolean): Array[Byte] = Array[Byte](if (b) 1 else 0) + def filesModifiedBytes(fs: Array[File]): Array[Byte] = if (fs eq null) filesModifiedBytes(Array[File]()) else seqBytes(fs)(fileModifiedBytes) + def fileModifiedBytes(f: File): Array[Byte] = + (if (f.isDirectory) filesModifiedBytes(f listFiles classDirFilter) else bytes(f.lastModified)) ++ + bytes(f.getAbsolutePath) + def fileExistsBytes(f: File): Array[Byte] = + bytes(f.exists) ++ + bytes(f.getAbsolutePath) - /** The name of the synthetic val in the synthetic module that an expression is assigned to. */ - final val WrapValName = "$sbtdef" + def bytes(s: String): Array[Byte] = s getBytes "UTF-8" + def bytes(l: Long): Array[Byte] = + { + val buffer = ByteBuffer.allocate(8) + buffer.putLong(l) + buffer.array + } + def bytes(i: Int): Array[Byte] = + { + val buffer = ByteBuffer.allocate(4) + buffer.putInt(i) + buffer.array + } - /** Gets the value of the expression wrapped in module `objectName`, which is accessible via `loader`. - * The module name should not include the trailing `$`. */ - def getValue[T](objectName: String, loader: ClassLoader): T = - { - val module = getModule(objectName, loader) - val accessor = module.getClass.getMethod(WrapValName) - val value = accessor.invoke(module) - value.asInstanceOf[T] - } + /** The name of the synthetic val in the synthetic module that an expression is assigned to. */ + final val WrapValName = "$sbtdef" - /** Gets the top-level module `moduleName` from the provided class `loader`. The module name should not include the trailing `$`.*/ - def getModule(moduleName: String, loader: ClassLoader): Any = - { - val clazz = Class.forName(moduleName + "$", true, loader) - clazz.getField("MODULE$").get(null) - } + /** + * Gets the value of the expression wrapped in module `objectName`, which is accessible via `loader`. + * The module name should not include the trailing `$`. + */ + def getValue[T](objectName: String, loader: ClassLoader): T = + { + val module = getModule(objectName, loader) + val accessor = module.getClass.getMethod(WrapValName) + val value = accessor.invoke(module) + value.asInstanceOf[T] + } - private val classDirFilter: FileFilter = DirectoryFilter || GlobFilter("*.class") + /** Gets the top-level module `moduleName` from the provided class `loader`. The module name should not include the trailing `$`.*/ + def getModule(moduleName: String, loader: ClassLoader): Any = + { + val clazz = Class.forName(moduleName + "$", true, loader) + clazz.getField("MODULE$").get(null) + } + + private val classDirFilter: FileFilter = DirectoryFilter || GlobFilter("*.class") } diff --git a/main/command/src/main/scala/sbt/BasicCommandStrings.scala b/main/command/src/main/scala/sbt/BasicCommandStrings.scala index 237db8d7d..76f79e623 100644 --- a/main/command/src/main/scala/sbt/BasicCommandStrings.scala +++ b/main/command/src/main/scala/sbt/BasicCommandStrings.scala @@ -9,18 +9,17 @@ import scala.annotation.tailrec import java.io.File import Path._ -object BasicCommandStrings -{ - val HelpCommand = "help" - val CompletionsCommand = "completions" - val Exit = "exit" - val Quit = "quit" +object BasicCommandStrings { + val HelpCommand = "help" + val CompletionsCommand = "completions" + val Exit = "exit" + val Quit = "quit" - /** The command name to terminate the program.*/ - val TerminateAction: String = Exit + /** The command name to terminate the program.*/ + val TerminateAction: String = Exit - def helpBrief = (HelpCommand, s"Displays this help message or prints detailed help on requested commands (run '$HelpCommand ').") - def helpDetailed = HelpCommand + """ + def helpBrief = (HelpCommand, s"Displays this help message or prints detailed help on requested commands (run '$HelpCommand ').") + def helpDetailed = HelpCommand + """ Prints a help summary. @@ -33,24 +32,24 @@ object BasicCommandStrings Searches the help according to the provided regular expression. """ - def CompletionsDetailed = "Displays a list of completions for the given argument string (run 'completions ')." - def CompletionsBrief = (CompletionsCommand, CompletionsDetailed) + def CompletionsDetailed = "Displays a list of completions for the given argument string (run 'completions ')." + def CompletionsBrief = (CompletionsCommand, CompletionsDetailed) - def HistoryHelpBrief = (HistoryCommands.Start -> "History command help. Lists and describes all history commands.") - def historyHelp = Help(Nil, (HistoryHelpBrief +: HistoryCommands.descriptions).toMap, Set(HistoryCommands.Start)) + def HistoryHelpBrief = (HistoryCommands.Start -> "History command help. Lists and describes all history commands.") + def historyHelp = Help(Nil, (HistoryHelpBrief +: HistoryCommands.descriptions).toMap, Set(HistoryCommands.Start)) - def exitBrief = "Terminates the build." + def exitBrief = "Terminates the build." - def logLevelHelp = - { - val levels = Level.values.toSeq - val levelList = levels.mkString(", ") - val brief = ("", "Sets the logging level to 'log-level'. Valid levels: " + levelList) - val detailed = levels.map(l => (l.toString, logLevelDetail(l))).toMap - Help(brief, detailed) - } - private[this] def logLevelDetail(level: Level.Value): String = -s"""$level + def logLevelHelp = + { + val levels = Level.values.toSeq + val levelList = levels.mkString(", ") + val brief = ("", "Sets the logging level to 'log-level'. Valid levels: " + levelList) + val detailed = levels.map(l => (l.toString, logLevelDetail(l))).toMap + Help(brief, detailed) + } + private[this] def logLevelDetail(level: Level.Value): String = + s"""$level Sets the global logging level to $level. This will be used as the default level for logging from commands, settings, and tasks. @@ -64,27 +63,27 @@ ${runEarly(level.toString)} * if no other commands are passed, interactive mode is still entered """ - def runEarly(command: String) = { - val sep = if(command.isEmpty || Character.isLetter(command.charAt(0))) "" else " " - s"$EarlyCommand$sep$command" - } - private[sbt] def isEarlyCommand(s: String): Boolean = { - s.startsWith(EarlyCommand) && s != Compat.FailureWall && s != Compat.ClearOnFailure - } + def runEarly(command: String) = { + val sep = if (command.isEmpty || Character.isLetter(command.charAt(0))) "" else " " + s"$EarlyCommand$sep$command" + } + private[sbt] def isEarlyCommand(s: String): Boolean = { + s.startsWith(EarlyCommand) && s != Compat.FailureWall && s != Compat.ClearOnFailure + } - val EarlyCommand = "--" - val EarlyCommandBrief = (s"$EarlyCommand", "Schedules a command to run before other commands on startup.") - val EarlyCommandDetailed = -s"""$EarlyCommand + val EarlyCommand = "--" + val EarlyCommandBrief = (s"$EarlyCommand", "Schedules a command to run before other commands on startup.") + val EarlyCommandDetailed = + s"""$EarlyCommand Schedules an early command, which will be run before other commands on the command line. The order is preserved between all early commands, so `sbt --a --b` executes `a` and `b` in order. """ - def ReadCommand = "<" - def ReadFiles = " file1 file2 ..." - def ReadDetailed = -ReadCommand + ReadFiles + """ + def ReadCommand = "<" + def ReadFiles = " file1 file2 ..." + def ReadDetailed = + ReadCommand + ReadFiles + """ Reads the lines from the given files and inserts them as commands. All empty lines and lines that start with '#' are ignored. @@ -96,17 +95,17 @@ ReadCommand + ReadFiles + """ You probably need to escape this command if entering it at your shell.""" - def ApplyCommand = "apply" - def ApplyDetailed = -ApplyCommand + """ [-cp|-classpath ] * + def ApplyCommand = "apply" + def ApplyDetailed = + ApplyCommand + """ [-cp|-classpath ] * Transforms the current State by calling .apply(currentState) for each listed module name. Here, currentState is of type sbt.State. If a classpath is provided, modules are loaded from a new class loader for this classpath. """ - def RebootCommand = "reboot" - def RebootDetailed = -RebootCommand + """ [full] + def RebootCommand = "reboot" + def RebootDetailed = + RebootCommand + """ [full] This command is equivalent to exiting sbt, restarting, and running the remaining commands with the exception that the JVM is not shut down. @@ -115,67 +114,67 @@ RebootCommand + """ [full] is deleted before restarting. This forces an update of sbt and Scala and is useful when working with development versions of sbt or Scala.""" - def Multi = ";" - def MultiBrief = (Multi + " (" + Multi + " )*", "Runs the provided semicolon-separated commands.") - def MultiDetailed = -Multi + " command1 " + Multi + """ command2 ... + def Multi = ";" + def MultiBrief = (Multi + " (" + Multi + " )*", "Runs the provided semicolon-separated commands.") + def MultiDetailed = + Multi + " command1 " + Multi + """ command2 ... Runs the specified commands.""" - def AppendCommand = "append" - def AppendLastDetailed = -AppendCommand + """ + def AppendCommand = "append" + def AppendLastDetailed = + AppendCommand + """ Appends 'command' to list of commands to run. """ - val AliasCommand = "alias" - def AliasDetailed = -AliasCommand + """ + val AliasCommand = "alias" + def AliasDetailed = + AliasCommand + """ Prints a list of defined aliases. """ + -AliasCommand + """ name + AliasCommand + """ name Prints the alias defined for `name`. """ + -AliasCommand + """ name=value + AliasCommand + """ name=value Sets the alias `name` to `value`, replacing any existing alias with that name. Whenever `name` is entered, the corresponding `value` is run. If any argument is provided to `name`, it is appended as argument to `value`. """ + -AliasCommand + """ name= + AliasCommand + """ name= Removes the alias for `name`.""" - def Shell = "shell" - def ShellDetailed = "Provides an interactive prompt from which commands can be run." + def Shell = "shell" + def ShellDetailed = "Provides an interactive prompt from which commands can be run." - def StashOnFailure = "sbtStashOnFailure" - def PopOnFailure = "sbtPopOnFailure" + def StashOnFailure = "sbtStashOnFailure" + def PopOnFailure = "sbtPopOnFailure" - // commands with poor choices for names since they clash with the usual conventions for command line options - // these are not documented and are mainly internal commands and can be removed without a full deprecation cycle - object Compat { - def OnFailure = "-" - def ClearOnFailure = "--" - def FailureWall = "---" - def OnFailureDeprecated = deprecatedAlias(OnFailure, BasicCommandStrings.OnFailure) - def ClearOnFailureDeprecated = deprecatedAlias(ClearOnFailure, BasicCommandStrings.ClearOnFailure) - def FailureWallDeprecated = deprecatedAlias(FailureWall, BasicCommandStrings.FailureWall) - private[this] def deprecatedAlias(oldName: String, newName: String): String = - s"The `$oldName` command is deprecated in favor of `$newName` and will be removed in 0.14.0" - } + // commands with poor choices for names since they clash with the usual conventions for command line options + // these are not documented and are mainly internal commands and can be removed without a full deprecation cycle + object Compat { + def OnFailure = "-" + def ClearOnFailure = "--" + def FailureWall = "---" + def OnFailureDeprecated = deprecatedAlias(OnFailure, BasicCommandStrings.OnFailure) + def ClearOnFailureDeprecated = deprecatedAlias(ClearOnFailure, BasicCommandStrings.ClearOnFailure) + def FailureWallDeprecated = deprecatedAlias(FailureWall, BasicCommandStrings.FailureWall) + private[this] def deprecatedAlias(oldName: String, newName: String): String = + s"The `$oldName` command is deprecated in favor of `$newName` and will be removed in 0.14.0" + } - def FailureWall = "resumeFromFailure" + def FailureWall = "resumeFromFailure" - def ClearOnFailure = "sbtClearOnFailure" - def OnFailure = "onFailure" - def OnFailureDetailed = -OnFailure + """ command + def ClearOnFailure = "sbtClearOnFailure" + def OnFailure = "onFailure" + def OnFailureDetailed = + OnFailure + """ command Registers 'command' to run when a command fails to complete normally. @@ -185,14 +184,14 @@ OnFailure + """ command The failure command resets when it runs once, so it must be added again if desired.""" - def IfLast = "iflast" - def IfLastCommon = "If there are no more commands after this one, 'command' is run." - def IfLastDetailed = -IfLast + """ + def IfLast = "iflast" + def IfLastCommon = "If there are no more commands after this one, 'command' is run." + def IfLastDetailed = + IfLast + """ """ + IfLastCommon - val ContinuousExecutePrefix = "~" - def continuousDetail = "Executes the specified command whenever source files change." - def continuousBriefHelp = (ContinuousExecutePrefix + " ", continuousDetail) + val ContinuousExecutePrefix = "~" + def continuousDetail = "Executes the specified command whenever source files change." + def continuousBriefHelp = (ContinuousExecutePrefix + " ", continuousDetail) } diff --git a/main/command/src/main/scala/sbt/BasicCommands.scala b/main/command/src/main/scala/sbt/BasicCommands.scala index 14c2dfded..9744d15b8 100644 --- a/main/command/src/main/scala/sbt/BasicCommands.scala +++ b/main/command/src/main/scala/sbt/BasicCommands.scala @@ -1,284 +1,279 @@ package sbt - import complete.{Completion, Completions, DefaultParsers, HistoryCommands, Parser, TokenCompletions} - import classpath.ClasspathUtilities.toLoader - import DefaultParsers._ - import Types.{const,idFun} - import Function.tupled - import Command.applyEffect - import HistoryCommands.{Start => HistoryPrefix} - import BasicCommandStrings._ - import CommandUtil._ - import BasicKeys._ +import complete.{ Completion, Completions, DefaultParsers, HistoryCommands, Parser, TokenCompletions } +import classpath.ClasspathUtilities.toLoader +import DefaultParsers._ +import Types.{ const, idFun } +import Function.tupled +import Command.applyEffect +import HistoryCommands.{ Start => HistoryPrefix } +import BasicCommandStrings._ +import CommandUtil._ +import BasicKeys._ - import java.io.File +import java.io.File -object BasicCommands -{ - lazy val allBasicCommands = Seq(nop, ignore, help, completionsCommand, multi, ifLast, append, setOnFailure, clearOnFailure, stashOnFailure, popOnFailure, reboot, call, early, exit, continuous, history, shell, read, alias) ++ compatCommands +object BasicCommands { + lazy val allBasicCommands = Seq(nop, ignore, help, completionsCommand, multi, ifLast, append, setOnFailure, clearOnFailure, stashOnFailure, popOnFailure, reboot, call, early, exit, continuous, history, shell, read, alias) ++ compatCommands - def nop = Command.custom(s => success(() => s)) - def ignore = Command.command(FailureWall)(idFun) + def nop = Command.custom(s => success(() => s)) + def ignore = Command.command(FailureWall)(idFun) - def early = Command.arb(earlyParser, earlyHelp) { (s, other) => other :: s } - private[this] def earlyParser = (s: State) => token(EarlyCommand).flatMap(_ => otherCommandParser(s)) - private[this] def earlyHelp = Help(EarlyCommand, EarlyCommandBrief, EarlyCommandDetailed) + def early = Command.arb(earlyParser, earlyHelp) { (s, other) => other :: s } + private[this] def earlyParser = (s: State) => token(EarlyCommand).flatMap(_ => otherCommandParser(s)) + private[this] def earlyHelp = Help(EarlyCommand, EarlyCommandBrief, EarlyCommandDetailed) - def help = Command.make(HelpCommand, helpBrief, helpDetailed)(helpParser) + def help = Command.make(HelpCommand, helpBrief, helpDetailed)(helpParser) - def helpParser(s: State) = - { - val h = (Help.empty /: s.definedCommands)(_ ++ _.help(s)) - val helpCommands = h.detail.keySet - val spacedArg = singleArgument(helpCommands).? - applyEffect(spacedArg)(runHelp(s, h)) - } + def helpParser(s: State) = + { + val h = (Help.empty /: s.definedCommands)(_ ++ _.help(s)) + val helpCommands = h.detail.keySet + val spacedArg = singleArgument(helpCommands).? + applyEffect(spacedArg)(runHelp(s, h)) + } - def runHelp(s: State, h: Help)(arg: Option[String]): State = - { - val message = Help.message(h, arg) - System.out.println(message) - s - } - @deprecated("Use Help.moreMessage", "0.13.0") - def moreHelp(more: Seq[String]): String = Help.moreMessage(more) + def runHelp(s: State, h: Help)(arg: Option[String]): State = + { + val message = Help.message(h, arg) + System.out.println(message) + s + } + @deprecated("Use Help.moreMessage", "0.13.0") + def moreHelp(more: Seq[String]): String = Help.moreMessage(more) - def completionsCommand = Command.make(CompletionsCommand, CompletionsBrief, CompletionsDetailed)(completionsParser) - def completionsParser(state: State) = - { - val notQuoted = (NotQuoted ~ any.*) map {case (nq, s) => (nq +: s).mkString} - val quotedOrUnquotedSingleArgument = Space ~> (StringVerbatim | StringEscapable | notQuoted) + def completionsCommand = Command.make(CompletionsCommand, CompletionsBrief, CompletionsDetailed)(completionsParser) + def completionsParser(state: State) = + { + val notQuoted = (NotQuoted ~ any.*) map { case (nq, s) => (nq +: s).mkString } + val quotedOrUnquotedSingleArgument = Space ~> (StringVerbatim | StringEscapable | notQuoted) - applyEffect(token(quotedOrUnquotedSingleArgument ?? "" examples("", " ")))(runCompletions(state)) - } - def runCompletions(state: State)(input: String): State = { - Parser.completions(state.combinedParser, input, 9).get map { - c => if (c.isEmpty) input else input + c.append - } foreach { c => - System.out.println("[completions] " + c.replaceAll("\n", " ")) - } - state - } + applyEffect(token(quotedOrUnquotedSingleArgument ?? "" examples ("", " ")))(runCompletions(state)) + } + def runCompletions(state: State)(input: String): State = { + Parser.completions(state.combinedParser, input, 9).get map { + c => if (c.isEmpty) input else input + c.append + } foreach { c => + System.out.println("[completions] " + c.replaceAll("\n", " ")) + } + state + } + def multiParser(s: State): Parser[Seq[String]] = + { + val nonSemi = token(charClass(_ != ';').+, hide = const(true)) + (token(';' ~> OptSpace) flatMap { _ => matched((s.combinedParser & nonSemi) | nonSemi) <~ token(OptSpace) } map (_.trim)).+ + } - def multiParser(s: State): Parser[Seq[String]] = - { - val nonSemi = token(charClass(_ != ';').+, hide= const(true)) - ( token(';' ~> OptSpace) flatMap { _ => matched((s.combinedParser&nonSemi) | nonSemi) <~ token(OptSpace) } map (_.trim) ).+ - } + def multiApplied(s: State) = + Command.applyEffect(multiParser(s))(_ ::: s) - def multiApplied(s: State) = - Command.applyEffect( multiParser(s) )( _ ::: s ) + def multi = Command.custom(multiApplied, Help(Multi, MultiBrief, MultiDetailed)) - def multi = Command.custom(multiApplied, Help(Multi, MultiBrief, MultiDetailed) ) + lazy val otherCommandParser = (s: State) => token(OptSpace ~> combinedLax(s, NotSpaceClass ~ any.*)) + def combinedLax(s: State, any: Parser[_]): Parser[String] = + matched(s.combinedParser | token(any, hide = const(true))) - lazy val otherCommandParser = (s: State) => token(OptSpace ~> combinedLax(s, NotSpaceClass ~ any.*) ) - def combinedLax(s: State, any: Parser[_]): Parser[String] = - matched(s.combinedParser | token(any, hide= const(true))) + def ifLast = Command(IfLast, Help.more(IfLast, IfLastDetailed))(otherCommandParser) { (s, arg) => + if (s.remainingCommands.isEmpty) arg :: s else s + } + def append = Command(AppendCommand, Help.more(AppendCommand, AppendLastDetailed))(otherCommandParser) { (s, arg) => + s.copy(remainingCommands = s.remainingCommands :+ arg) + } - def ifLast = Command(IfLast, Help.more(IfLast, IfLastDetailed))(otherCommandParser) { (s, arg) => - if(s.remainingCommands.isEmpty) arg :: s else s - } - def append = Command(AppendCommand, Help.more(AppendCommand, AppendLastDetailed))(otherCommandParser) { (s, arg) => - s.copy(remainingCommands = s.remainingCommands :+ arg) - } + def setOnFailure = Command(OnFailure, Help.more(OnFailure, OnFailureDetailed))(otherCommandParser) { (s, arg) => + s.copy(onFailure = Some(arg)) + } + private[sbt] def compatCommands = Seq( + Command.command(Compat.ClearOnFailure) { s => + s.log.warn(Compat.ClearOnFailureDeprecated) + s.copy(onFailure = None) + }, + Command.arb(s => token(Compat.OnFailure, hide = const(true)).flatMap(x => otherCommandParser(s))) { (s, arg) => + s.log.warn(Compat.OnFailureDeprecated) + s.copy(onFailure = Some(arg)) + }, + Command.command(Compat.FailureWall) { s => + s.log.warn(Compat.FailureWallDeprecated) + s + } + ) - def setOnFailure = Command(OnFailure, Help.more(OnFailure, OnFailureDetailed))(otherCommandParser) { (s, arg) => - s.copy(onFailure = Some(arg)) - } - private[sbt] def compatCommands = Seq( - Command.command(Compat.ClearOnFailure) { s => - s.log.warn(Compat.ClearOnFailureDeprecated) - s.copy(onFailure = None) - }, - Command.arb(s => token(Compat.OnFailure, hide = const(true)).flatMap(x => otherCommandParser(s)) ){ (s, arg) => - s.log.warn(Compat.OnFailureDeprecated) - s.copy(onFailure = Some(arg)) - }, - Command.command(Compat.FailureWall) { s => - s.log.warn(Compat.FailureWallDeprecated) - s - } - ) + def clearOnFailure = Command.command(ClearOnFailure)(s => s.copy(onFailure = None)) + def stashOnFailure = Command.command(StashOnFailure)(s => s.copy(onFailure = None).update(OnFailureStack)(s.onFailure :: _.toList.flatten)) + def popOnFailure = Command.command(PopOnFailure) { s => + val stack = s.get(OnFailureStack).getOrElse(Nil) + val updated = if (stack.isEmpty) s.remove(OnFailureStack) else s.put(OnFailureStack, stack.tail) + updated.copy(onFailure = stack.headOption.flatten) + } - def clearOnFailure = Command.command(ClearOnFailure)(s => s.copy(onFailure = None)) - def stashOnFailure = Command.command(StashOnFailure)(s => s.copy(onFailure = None).update(OnFailureStack)(s.onFailure :: _.toList.flatten)) - def popOnFailure = Command.command(PopOnFailure) { s => - val stack = s.get(OnFailureStack).getOrElse(Nil) - val updated = if(stack.isEmpty) s.remove(OnFailureStack) else s.put(OnFailureStack, stack.tail) - updated.copy(onFailure = stack.headOption.flatten) - } + def reboot = Command(RebootCommand, Help.more(RebootCommand, RebootDetailed))(rebootParser) { (s, full) => + s.reboot(full) + } + def rebootParser(s: State) = token(Space ~> "full" ^^^ true) ?? false - def reboot = Command(RebootCommand, Help.more(RebootCommand, RebootDetailed))(rebootParser) { (s, full) => - s.reboot(full) - } - def rebootParser(s: State) = token(Space ~> "full" ^^^ true) ?? false + def call = Command(ApplyCommand, Help.more(ApplyCommand, ApplyDetailed))(_ => callParser) { + case (state, (cp, args)) => + val parentLoader = getClass.getClassLoader + state.log.info("Applying State transformations " + args.mkString(", ") + (if (cp.isEmpty) "" else " from " + cp.mkString(File.pathSeparator))) + val loader = if (cp.isEmpty) parentLoader else toLoader(cp.map(f => new File(f)), parentLoader) + val loaded = args.map(arg => ModuleUtilities.getObject(arg, loader).asInstanceOf[State => State]) + (state /: loaded)((s, obj) => obj(s)) + } + def callParser: Parser[(Seq[String], Seq[String])] = token(Space) ~> ((classpathOptionParser ?? Nil) ~ rep1sep(className, token(Space))) + private[this] def className: Parser[String] = + { + val base = StringBasic & not('-' ~> any.*, "Class name cannot start with '-'.") + def single(s: String) = Completions.single(Completion.displayStrict(s)) + val compl = TokenCompletions.fixed((seen, level) => if (seen.startsWith("-")) Completions.nil else single("")) + token(base, compl) + } + private[this] def classpathOptionParser: Parser[Seq[String]] = + token(("-cp" | "-classpath") ~> Space) ~> classpathStrings <~ token(Space) + private[this] def classpathStrings: Parser[Seq[String]] = + token(StringBasic.map(s => IO.pathSplit(s).toSeq), "") - def call = Command(ApplyCommand, Help.more(ApplyCommand, ApplyDetailed))(_ => callParser) { case (state,(cp,args)) => - val parentLoader = getClass.getClassLoader - state.log.info("Applying State transformations " + args.mkString(", ") + (if(cp.isEmpty) "" else " from " + cp.mkString(File.pathSeparator))) - val loader = if(cp.isEmpty) parentLoader else toLoader(cp.map(f => new File(f)), parentLoader) - val loaded = args.map(arg => ModuleUtilities.getObject(arg, loader).asInstanceOf[State => State]) - (state /: loaded)((s, obj) => obj(s)) - } - def callParser: Parser[(Seq[String], Seq[String])] = token(Space) ~> ((classpathOptionParser ?? Nil) ~ rep1sep(className, token(Space))) - private[this] def className: Parser[String] = - { - val base = StringBasic & not('-' ~> any.*, "Class name cannot start with '-'.") - def single(s: String) = Completions.single(Completion.displayStrict(s)) - val compl = TokenCompletions.fixed( (seen,level) => if(seen.startsWith("-")) Completions.nil else single("")) - token(base, compl) - } - private[this] def classpathOptionParser: Parser[Seq[String]] = - token( ("-cp" | "-classpath") ~> Space ) ~> classpathStrings <~ token(Space) - private[this] def classpathStrings: Parser[Seq[String]] = - token(StringBasic.map(s => IO.pathSplit(s).toSeq), "") + def exit = Command.command(TerminateAction, exitBrief, exitBrief)(_ exit true) - def exit = Command.command(TerminateAction, exitBrief, exitBrief ) ( _ exit true ) + def continuous = + Command(ContinuousExecutePrefix, continuousBriefHelp, continuousDetail)(otherCommandParser) { (s, arg) => + withAttribute(s, Watched.Configuration, "Continuous execution not configured.") { w => + val repeat = ContinuousExecutePrefix + (if (arg.startsWith(" ")) arg else " " + arg) + Watched.executeContinuously(w, s, arg, repeat) + } + } + def history = Command.custom(historyParser, BasicCommandStrings.historyHelp) + def historyParser(s: State): Parser[() => State] = + Command.applyEffect(HistoryCommands.actionParser) { histFun => + val logError = (msg: String) => s.log.error(msg) + val hp = s get historyPath getOrElse None + val lines = hp.toList.flatMap(p => IO.readLines(p)).toIndexedSeq + histFun(complete.History(lines, hp, logError)) match { + case Some(commands) => + commands foreach println //printing is more appropriate than logging + (commands ::: s).continue + case None => s.fail + } + } - def continuous = - Command(ContinuousExecutePrefix, continuousBriefHelp, continuousDetail)(otherCommandParser) { (s, arg) => - withAttribute(s, Watched.Configuration, "Continuous execution not configured.") { w => - val repeat = ContinuousExecutePrefix + (if(arg.startsWith(" ")) arg else " " + arg) - Watched.executeContinuously(w, s, arg, repeat) - } - } + def shell = Command.command(Shell, Help.more(Shell, ShellDetailed)) { s => + val history = (s get historyPath) getOrElse Some(new File(s.baseDir, ".history")) + val prompt = (s get shellPrompt) match { case Some(pf) => pf(s); case None => "> " } + val reader = new FullReader(history, s.combinedParser) + val line = reader.readLine(prompt) + line match { + case Some(line) => + val newState = s.copy(onFailure = Some(Shell), remainingCommands = line +: Shell +: s.remainingCommands).setInteractive(true) + if (line.trim.isEmpty) newState else newState.clearGlobalLog + case None => s.setInteractive(false) + } + } - def history = Command.custom(historyParser, BasicCommandStrings.historyHelp) - def historyParser(s: State): Parser[() => State] = - Command.applyEffect(HistoryCommands.actionParser) { histFun => - val logError = (msg: String) => s.log.error(msg) - val hp = s get historyPath getOrElse None - val lines = hp.toList.flatMap( p => IO.readLines(p) ).toIndexedSeq - histFun( complete.History(lines, hp, logError) ) match - { - case Some(commands) => - commands foreach println //printing is more appropriate than logging - (commands ::: s).continue - case None => s.fail - } - } + def read = Command.make(ReadCommand, Help.more(ReadCommand, ReadDetailed))(s => applyEffect(readParser(s))(doRead(s))) + def readParser(s: State) = + { + val files = (token(Space) ~> fileParser(s.baseDir)).+ + val portAndSuccess = token(OptSpace) ~> Port + portAndSuccess || files + } + def doRead(s: State)(arg: Either[Int, Seq[File]]): State = + arg match { + case Left(portAndSuccess) => + val port = math.abs(portAndSuccess) + val previousSuccess = portAndSuccess >= 0 + readMessage(port, previousSuccess) match { + case Some(message) => (message :: (ReadCommand + " " + port) :: s).copy(onFailure = Some(ReadCommand + " " + (-port))) + case None => + System.err.println("Connection closed.") + s.fail + } + case Right(from) => + val notFound = notReadable(from) + if (notFound.isEmpty) + readLines(from) ::: s // this means that all commands from all files are loaded, parsed, and inserted before any are executed + else { + s.log.error("Command file(s) not readable: \n\t" + notFound.mkString("\n\t")) + s + } + } + private def readMessage(port: Int, previousSuccess: Boolean): Option[String] = + { + // split into two connections because this first connection ends the previous communication + xsbt.IPC.client(port) { _.send(previousSuccess.toString) } + // and this second connection starts the next communication + xsbt.IPC.client(port) { ipc => + val message = ipc.receive + if (message eq null) None else Some(message) + } + } - def shell = Command.command(Shell, Help.more(Shell, ShellDetailed)) { s => - val history = (s get historyPath) getOrElse Some(new File(s.baseDir, ".history")) - val prompt = (s get shellPrompt) match { case Some(pf) => pf(s); case None => "> " } - val reader = new FullReader(history, s.combinedParser) - val line = reader.readLine(prompt) - line match { - case Some(line) => - val newState = s.copy(onFailure = Some(Shell), remainingCommands = line +: Shell +: s.remainingCommands).setInteractive(true) - if(line.trim.isEmpty) newState else newState.clearGlobalLog - case None => s.setInteractive(false) - } - } + def alias = Command.make(AliasCommand, Help.more(AliasCommand, AliasDetailed)) { s => + val name = token(OpOrID.examples(aliasNames(s): _*)) + val assign = token(OptSpace ~ '=' ~ OptSpace) + val sfree = removeAliases(s) + val to = matched(sfree.combinedParser, partial = true).failOnException | any.+.string + val base = (OptSpace ~> (name ~ (assign ~> to.?).?).?) + applyEffect(base)(t => runAlias(s, t)) + } - def read = Command.make(ReadCommand, Help.more(ReadCommand, ReadDetailed))(s => applyEffect(readParser(s))(doRead(s)) ) - def readParser(s: State) = - { - val files = (token(Space) ~> fileParser(s.baseDir)).+ - val portAndSuccess = token(OptSpace) ~> Port - portAndSuccess || files - } - def doRead(s: State)(arg: Either[Int, Seq[File]]): State = - arg match - { - case Left(portAndSuccess) => - val port = math.abs(portAndSuccess) - val previousSuccess = portAndSuccess >= 0 - readMessage(port, previousSuccess) match - { - case Some(message) => (message :: (ReadCommand + " " + port) :: s).copy(onFailure = Some(ReadCommand + " " + (-port))) - case None => - System.err.println("Connection closed.") - s.fail - } - case Right(from) => - val notFound = notReadable(from) - if(notFound.isEmpty) - readLines(from) ::: s // this means that all commands from all files are loaded, parsed, and inserted before any are executed - else { - s.log.error("Command file(s) not readable: \n\t" + notFound.mkString("\n\t")) - s - } - } - private def readMessage(port: Int, previousSuccess: Boolean): Option[String] = - { - // split into two connections because this first connection ends the previous communication - xsbt.IPC.client(port) { _.send(previousSuccess.toString) } - // and this second connection starts the next communication - xsbt.IPC.client(port) { ipc => - val message = ipc.receive - if(message eq null) None else Some(message) - } - } + def runAlias(s: State, args: Option[(String, Option[Option[String]])]): State = + args match { + case None => + printAliases(s); s + case Some(x ~ None) if !x.isEmpty => + printAlias(s, x.trim); s + case Some(name ~ Some(None)) => removeAlias(s, name.trim) + case Some(name ~ Some(Some(value))) => addAlias(s, name.trim, value.trim) + } + def addAlias(s: State, name: String, value: String): State = + if (Command validID name) { + val removed = removeAlias(s, name) + if (value.isEmpty) removed else addAlias0(removed, name, value) + } else { + System.err.println("Invalid alias name '" + name + "'.") + s.fail + } + private[this] def addAlias0(s: State, name: String, value: String): State = + s.copy(definedCommands = newAlias(name, value) +: s.definedCommands) + def removeAliases(s: State): State = removeTagged(s, CommandAliasKey) + def removeAlias(s: State, name: String): State = s.copy(definedCommands = s.definedCommands.filter(c => !isAliasNamed(name, c))) - def alias = Command.make(AliasCommand, Help.more(AliasCommand, AliasDetailed)) { s => - val name = token(OpOrID.examples( aliasNames(s) : _*) ) - val assign = token(OptSpace ~ '=' ~ OptSpace) - val sfree = removeAliases(s) - val to = matched(sfree.combinedParser, partial = true).failOnException | any.+.string - val base = (OptSpace ~> (name ~ (assign ~> to.?).?).?) - applyEffect(base)(t => runAlias(s, t) ) - } + def removeTagged(s: State, tag: AttributeKey[_]): State = s.copy(definedCommands = removeTagged(s.definedCommands, tag)) + def removeTagged(as: Seq[Command], tag: AttributeKey[_]): Seq[Command] = as.filter(c => !(c.tags contains tag)) - def runAlias(s: State, args: Option[(String, Option[Option[String]])]): State = - args match - { - case None => printAliases(s); s - case Some(x ~ None) if !x.isEmpty => printAlias(s, x.trim); s - case Some(name ~ Some(None)) => removeAlias(s, name.trim) - case Some(name ~ Some(Some(value))) => addAlias(s, name.trim, value.trim) - } - def addAlias(s: State, name: String, value: String): State = - if(Command validID name) { - val removed = removeAlias(s, name) - if(value.isEmpty) removed else addAlias0(removed, name, value) - } else { - System.err.println("Invalid alias name '" + name + "'.") - s.fail - } - private[this] def addAlias0(s: State, name: String, value: String): State = - s.copy(definedCommands = newAlias(name, value) +: s.definedCommands) + def isAliasNamed(name: String, c: Command): Boolean = isNamed(name, getAlias(c)) + def isNamed(name: String, alias: Option[(String, String)]): Boolean = alias match { case None => false; case Some((n, _)) => name == n } - def removeAliases(s: State): State = removeTagged(s, CommandAliasKey) - def removeAlias(s: State, name: String): State = s.copy(definedCommands = s.definedCommands.filter(c => !isAliasNamed(name, c)) ) + def getAlias(c: Command): Option[(String, String)] = c.tags get CommandAliasKey + def printAlias(s: State, name: String): Unit = printAliases(aliases(s, (n, v) => n == name)) + def printAliases(s: State): Unit = printAliases(allAliases(s)) + def printAliases(as: Seq[(String, String)]): Unit = + for ((name, value) <- as) + println("\t" + name + " = " + value) - def removeTagged(s: State, tag: AttributeKey[_]): State = s.copy(definedCommands = removeTagged(s.definedCommands, tag)) - def removeTagged(as: Seq[Command], tag: AttributeKey[_]): Seq[Command] = as.filter(c => ! (c.tags contains tag)) + def aliasNames(s: State): Seq[String] = allAliases(s).map(_._1) + def allAliases(s: State): Seq[(String, String)] = aliases(s, (n, v) => true) + def aliases(s: State, pred: (String, String) => Boolean): Seq[(String, String)] = + s.definedCommands.flatMap(c => getAlias(c).filter(tupled(pred))) - def isAliasNamed(name: String, c: Command): Boolean = isNamed(name, getAlias(c)) - def isNamed(name: String, alias: Option[(String,String)]): Boolean = alias match { case None => false; case Some((n,_)) => name == n } + def newAlias(name: String, value: String): Command = + Command.make(name, (name, "'" + value + "'"), "Alias of '" + value + "'")(aliasBody(name, value)).tag(CommandAliasKey, (name, value)) + def aliasBody(name: String, value: String)(state: State): Parser[() => State] = { + val aliasRemoved = removeAlias(state, name) + // apply the alias value to the commands of `state` except for the alias to avoid recursion (#933) + val partiallyApplied = Parser(Command.combine(aliasRemoved.definedCommands)(aliasRemoved))(value) + val arg = matched(partiallyApplied & (success() | (SpaceClass ~ any.*))) + // by scheduling the expanded alias instead of directly executing, we get errors on the expanded string (#598) + arg.map(str => () => (value + str) :: state) + } - def getAlias(c: Command): Option[(String,String)] = c.tags get CommandAliasKey - def printAlias(s: State, name: String): Unit = printAliases(aliases(s,(n,v) => n == name) ) - def printAliases(s: State): Unit = printAliases(allAliases(s)) - def printAliases(as: Seq[(String,String)]): Unit = - for( (name,value) <- as) - println("\t" + name + " = " + value) + def delegateToAlias(name: String, orElse: Parser[() => State])(state: State): Parser[() => State] = + aliases(state, (nme, _) => nme == name).headOption match { + case None => orElse + case Some((n, v)) => aliasBody(n, v)(state) + } - def aliasNames(s: State): Seq[String] = allAliases(s).map(_._1) - def allAliases(s: State): Seq[(String,String)] = aliases(s, (n,v) => true) - def aliases(s: State, pred: (String,String) => Boolean): Seq[(String,String)] = - s.definedCommands.flatMap(c => getAlias(c).filter(tupled(pred))) - - def newAlias(name: String, value: String): Command = - Command.make(name, (name, "'" + value + "'"), "Alias of '" + value + "'")(aliasBody(name, value)).tag(CommandAliasKey, (name, value)) - def aliasBody(name: String, value: String)(state: State): Parser[() => State] = { - val aliasRemoved = removeAlias(state,name) - // apply the alias value to the commands of `state` except for the alias to avoid recursion (#933) - val partiallyApplied = Parser(Command.combine(aliasRemoved.definedCommands)(aliasRemoved))(value) - val arg = matched( partiallyApplied & (success() | (SpaceClass ~ any.*)) ) - // by scheduling the expanded alias instead of directly executing, we get errors on the expanded string (#598) - arg.map( str => () => (value + str) :: state) - } - - def delegateToAlias(name: String, orElse: Parser[() => State])(state: State): Parser[() => State] = - aliases(state, (nme,_) => nme == name).headOption match { - case None => orElse - case Some((n,v)) => aliasBody(n,v)(state) - } - - val CommandAliasKey = AttributeKey[(String,String)]("is-command-alias", "Internal: marker for Commands created as aliases for another command.") + val CommandAliasKey = AttributeKey[(String, String)]("is-command-alias", "Internal: marker for Commands created as aliases for another command.") } diff --git a/main/command/src/main/scala/sbt/BasicKeys.scala b/main/command/src/main/scala/sbt/BasicKeys.scala index 0c6f3b64e..6e7779558 100644 --- a/main/command/src/main/scala/sbt/BasicKeys.scala +++ b/main/command/src/main/scala/sbt/BasicKeys.scala @@ -1,14 +1,13 @@ package sbt - import java.io.File +import java.io.File -object BasicKeys -{ - val historyPath = AttributeKey[Option[File]]("history", "The location where command line history is persisted.", 40) - val shellPrompt = AttributeKey[State => String]("shell-prompt", "The function that constructs the command prompt from the current build state.", 10000) - val watch = AttributeKey[Watched]("watch", "Continuous execution configuration.", 1000) - private[sbt] val interactive = AttributeKey[Boolean]("interactive", "True if commands are currently being entered from an interactive environment.", 10) - private[sbt] val classLoaderCache = AttributeKey[classpath.ClassLoaderCache]("class-loader-cache", "Caches class loaders based on the classpath entries and last modified times.", 10) - private[sbt] val OnFailureStack = AttributeKey[List[Option[String]]]("on-failure-stack", "Stack that remembers on-failure handlers.", 10) - private[sbt] val explicitGlobalLogLevels = AttributeKey[Boolean]("explicit-global-log-levels", "True if the global logging levels were explicitly set by the user.", 10) +object BasicKeys { + val historyPath = AttributeKey[Option[File]]("history", "The location where command line history is persisted.", 40) + val shellPrompt = AttributeKey[State => String]("shell-prompt", "The function that constructs the command prompt from the current build state.", 10000) + val watch = AttributeKey[Watched]("watch", "Continuous execution configuration.", 1000) + private[sbt] val interactive = AttributeKey[Boolean]("interactive", "True if commands are currently being entered from an interactive environment.", 10) + private[sbt] val classLoaderCache = AttributeKey[classpath.ClassLoaderCache]("class-loader-cache", "Caches class loaders based on the classpath entries and last modified times.", 10) + private[sbt] val OnFailureStack = AttributeKey[List[Option[String]]]("on-failure-stack", "Stack that remembers on-failure handlers.", 10) + private[sbt] val explicitGlobalLogLevels = AttributeKey[Boolean]("explicit-global-log-levels", "True if the global logging levels were explicitly set by the user.", 10) } diff --git a/main/command/src/main/scala/sbt/Command.scala b/main/command/src/main/scala/sbt/Command.scala index 1a5e9355a..7c98bb6e8 100644 --- a/main/command/src/main/scala/sbt/Command.scala +++ b/main/command/src/main/scala/sbt/Command.scala @@ -3,166 +3,159 @@ */ package sbt - import java.io.File - import complete.{DefaultParsers, EditDistance, Parser} - import Types.const +import java.io.File +import complete.{ DefaultParsers, EditDistance, Parser } +import Types.const sealed trait Command { - def help: State => Help - def parser: State => Parser[() => State] - def tags: AttributeMap - def tag[T](key: AttributeKey[T], value: T): Command + def help: State => Help + def parser: State => Parser[() => State] + def tags: AttributeMap + def tag[T](key: AttributeKey[T], value: T): Command } private[sbt] final class SimpleCommand(val name: String, private[sbt] val help0: Help, val parser: State => Parser[() => State], val tags: AttributeMap) extends Command { - assert(Command validID name, "'" + name + "' is not a valid command name." ) - def tag[T](key: AttributeKey[T], value: T): SimpleCommand = new SimpleCommand(name, help0, parser, tags.put(key, value)) - def help = const(help0) + assert(Command validID name, "'" + name + "' is not a valid command name.") + def tag[T](key: AttributeKey[T], value: T): SimpleCommand = new SimpleCommand(name, help0, parser, tags.put(key, value)) + def help = const(help0) } -private[sbt] final class ArbitraryCommand(val parser: State => Parser[() => State], val help: State => Help, val tags: AttributeMap) extends Command -{ - def tag[T](key: AttributeKey[T], value: T): ArbitraryCommand = new ArbitraryCommand(parser, help, tags.put(key, value)) +private[sbt] final class ArbitraryCommand(val parser: State => Parser[() => State], val help: State => Help, val tags: AttributeMap) extends Command { + def tag[T](key: AttributeKey[T], value: T): ArbitraryCommand = new ArbitraryCommand(parser, help, tags.put(key, value)) } -object Command -{ - import DefaultParsers._ +object Command { + import DefaultParsers._ - def command(name: String, briefHelp: String, detail: String)(f: State => State): Command = command(name, Help(name, (name, briefHelp), detail))(f) - def command(name: String, help: Help = Help.empty)(f: State => State): Command = make(name, help)(state => success(() => f(state))) + def command(name: String, briefHelp: String, detail: String)(f: State => State): Command = command(name, Help(name, (name, briefHelp), detail))(f) + def command(name: String, help: Help = Help.empty)(f: State => State): Command = make(name, help)(state => success(() => f(state))) - def make(name: String, briefHelp: (String, String), detail: String)(parser: State => Parser[() => State]): Command = - make(name, Help(name, briefHelp, detail) )(parser) - def make(name: String, help: Help = Help.empty)(parser: State => Parser[() => State]): Command = new SimpleCommand(name, help, parser, AttributeMap.empty) + def make(name: String, briefHelp: (String, String), detail: String)(parser: State => Parser[() => State]): Command = + make(name, Help(name, briefHelp, detail))(parser) + def make(name: String, help: Help = Help.empty)(parser: State => Parser[() => State]): Command = new SimpleCommand(name, help, parser, AttributeMap.empty) - def apply[T](name: String, briefHelp: (String, String), detail: String)(parser: State => Parser[T])(effect: (State,T) => State): Command = - apply(name, Help(name, briefHelp, detail) )(parser)(effect) - def apply[T](name: String, help: Help = Help.empty)(parser: State => Parser[T])(effect: (State,T) => State): Command = - make(name, help)(applyEffect(parser)(effect) ) + def apply[T](name: String, briefHelp: (String, String), detail: String)(parser: State => Parser[T])(effect: (State, T) => State): Command = + apply(name, Help(name, briefHelp, detail))(parser)(effect) + def apply[T](name: String, help: Help = Help.empty)(parser: State => Parser[T])(effect: (State, T) => State): Command = + make(name, help)(applyEffect(parser)(effect)) - def args(name: String, briefHelp: (String, String), detail: String, display: String)(f: (State, Seq[String]) => State): Command = - args(name, display, Help(name, briefHelp, detail) )(f) - - def args(name: String, display: String, help: Help = Help.empty)(f: (State, Seq[String]) => State): Command = - make(name, help)( state => spaceDelimited(display) map apply1(f, state) ) + def args(name: String, briefHelp: (String, String), detail: String, display: String)(f: (State, Seq[String]) => State): Command = + args(name, display, Help(name, briefHelp, detail))(f) - def single(name: String, briefHelp: (String, String), detail: String)(f: (State, String) => State): Command = - single(name, Help(name, briefHelp, detail) )(f) - def single(name: String, help: Help = Help.empty)(f: (State, String) => State): Command = - make(name, help)( state => token(trimmed(spacedAny(name)) map apply1(f, state)) ) + def args(name: String, display: String, help: Help = Help.empty)(f: (State, Seq[String]) => State): Command = + make(name, help)(state => spaceDelimited(display) map apply1(f, state)) - def custom(parser: State => Parser[() => State], help: Help = Help.empty): Command = customHelp(parser, const(help)) - def customHelp(parser: State => Parser[() => State], help: State => Help): Command = new ArbitraryCommand(parser, help, AttributeMap.empty) - def arb[T](parser: State => Parser[T], help: Help = Help.empty)(effect: (State, T) => State): Command = custom(applyEffect(parser)(effect), help) + def single(name: String, briefHelp: (String, String), detail: String)(f: (State, String) => State): Command = + single(name, Help(name, briefHelp, detail))(f) + def single(name: String, help: Help = Help.empty)(f: (State, String) => State): Command = + make(name, help)(state => token(trimmed(spacedAny(name)) map apply1(f, state))) - def validID(name: String) = DefaultParsers.matches(OpOrID, name) + def custom(parser: State => Parser[() => State], help: Help = Help.empty): Command = customHelp(parser, const(help)) + def customHelp(parser: State => Parser[() => State], help: State => Help): Command = new ArbitraryCommand(parser, help, AttributeMap.empty) + def arb[T](parser: State => Parser[T], help: Help = Help.empty)(effect: (State, T) => State): Command = custom(applyEffect(parser)(effect), help) - def applyEffect[T](parser: State => Parser[T])(effect: (State, T) => State): State => Parser[() => State] = - s => applyEffect(parser(s))(t => effect(s,t)) - def applyEffect[T](p: Parser[T])(f: T => State): Parser[() => State] = - p map { t => () => f(t) } + def validID(name: String) = DefaultParsers.matches(OpOrID, name) - def combine(cmds: Seq[Command]): State => Parser[() => State] = - { - val (simple, arbs) = separateCommands(cmds) - state => (simpleParser(simple)(state) /: arbs.map(_ parser state) ){ _ | _ } - } - private[this] def separateCommands(cmds: Seq[Command]): (Seq[SimpleCommand], Seq[ArbitraryCommand]) = - Util.separate(cmds){ case s: SimpleCommand => Left(s); case a: ArbitraryCommand => Right(a) } - private[this] def apply1[A,B,C](f: (A,B) => C, a: A): B => () => C = - b => () => f(a,b) + def applyEffect[T](parser: State => Parser[T])(effect: (State, T) => State): State => Parser[() => State] = + s => applyEffect(parser(s))(t => effect(s, t)) + def applyEffect[T](p: Parser[T])(f: T => State): Parser[() => State] = + p map { t => () => f(t) } - def simpleParser(cmds: Seq[SimpleCommand]): State => Parser[() => State] = - simpleParser(cmds.map(sc => (sc.name, argParser(sc) )).toMap ) - private[this] def argParser(sc: SimpleCommand): State => Parser[() => State] = - { - def usageError = s"${sc.name} usage:" + Help.message(sc.help0, None) - s => (Parser.softFailure(usageError, definitive = true): Parser[() => State]) | sc.parser(s) - } + def combine(cmds: Seq[Command]): State => Parser[() => State] = + { + val (simple, arbs) = separateCommands(cmds) + state => (simpleParser(simple)(state) /: arbs.map(_ parser state)) { _ | _ } + } + private[this] def separateCommands(cmds: Seq[Command]): (Seq[SimpleCommand], Seq[ArbitraryCommand]) = + Util.separate(cmds) { case s: SimpleCommand => Left(s); case a: ArbitraryCommand => Right(a) } + private[this] def apply1[A, B, C](f: (A, B) => C, a: A): B => () => C = + b => () => f(a, b) - def simpleParser(commandMap: Map[String, State => Parser[() => State]]): State => Parser[() => State] = - (state: State) => token(OpOrID examples commandMap.keys.toSet) flatMap { id => - (commandMap get id) match { - case None => failure(invalidValue("command", commandMap.keys)(id)) - case Some(c) => c(state) - } - } - - def process(command: String, state: State): State = - { - val parser = combine(state.definedCommands) - parse(command, parser(state)) match - { - case Right(s) => s() // apply command. command side effects happen here - case Left(errMsg) => - state.log.error(errMsg) - state.fail - } - } - def invalidValue(label: String, allowed: Iterable[String])(value: String): String = - "Not a valid " + label + ": " + value + similar(value, allowed) - def similar(value: String, allowed: Iterable[String]): String = - { - val suggested = if(value.length > 2) suggestions(value, allowed.toSeq) else Nil - if(suggested.isEmpty) "" else suggested.mkString(" (similar: ", ", ", ")") - } - def suggestions(a: String, bs: Seq[String], maxDistance: Int = 3, maxSuggestions: Int = 3): Seq[String] = - bs.map { b => (b, distance(a, b) ) } filter (_._2 <= maxDistance) sortBy(_._2) take(maxSuggestions) map(_._1) - def distance(a: String, b: String): Int = - EditDistance.levenshtein(a, b, insertCost = 1, deleteCost = 1, subCost = 2, transposeCost = 1, matchCost = -1, caseCost = 1, true) + def simpleParser(cmds: Seq[SimpleCommand]): State => Parser[() => State] = + simpleParser(cmds.map(sc => (sc.name, argParser(sc))).toMap) + private[this] def argParser(sc: SimpleCommand): State => Parser[() => State] = + { + def usageError = s"${sc.name} usage:" + Help.message(sc.help0, None) + s => (Parser.softFailure(usageError, definitive = true): Parser[() => State]) | sc.parser(s) + } - def spacedAny(name: String): Parser[String] = spacedC(name, any) - def spacedC(name: String, c: Parser[Char]): Parser[String] = - ( (c & opOrIDSpaced(name)) ~ c.+) map { case (f, rem) => (f +: rem).mkString } + def simpleParser(commandMap: Map[String, State => Parser[() => State]]): State => Parser[() => State] = + (state: State) => token(OpOrID examples commandMap.keys.toSet) flatMap { id => + (commandMap get id) match { + case None => failure(invalidValue("command", commandMap.keys)(id)) + case Some(c) => c(state) + } + } + + def process(command: String, state: State): State = + { + val parser = combine(state.definedCommands) + parse(command, parser(state)) match { + case Right(s) => s() // apply command. command side effects happen here + case Left(errMsg) => + state.log.error(errMsg) + state.fail + } + } + def invalidValue(label: String, allowed: Iterable[String])(value: String): String = + "Not a valid " + label + ": " + value + similar(value, allowed) + def similar(value: String, allowed: Iterable[String]): String = + { + val suggested = if (value.length > 2) suggestions(value, allowed.toSeq) else Nil + if (suggested.isEmpty) "" else suggested.mkString(" (similar: ", ", ", ")") + } + def suggestions(a: String, bs: Seq[String], maxDistance: Int = 3, maxSuggestions: Int = 3): Seq[String] = + bs.map { b => (b, distance(a, b)) } filter (_._2 <= maxDistance) sortBy (_._2) take (maxSuggestions) map (_._1) + def distance(a: String, b: String): Int = + EditDistance.levenshtein(a, b, insertCost = 1, deleteCost = 1, subCost = 2, transposeCost = 1, matchCost = -1, caseCost = 1, true) + + def spacedAny(name: String): Parser[String] = spacedC(name, any) + def spacedC(name: String, c: Parser[Char]): Parser[String] = + ((c & opOrIDSpaced(name)) ~ c.+) map { case (f, rem) => (f +: rem).mkString } } -trait Help -{ - def detail: Map[String, String] - def brief: Seq[(String, String)] - def more: Set[String] - def ++(o: Help): Help +trait Help { + def detail: Map[String, String] + def brief: Seq[(String, String)] + def more: Set[String] + def ++(o: Help): Help } -private final class Help0(val brief: Seq[(String,String)], val detail: Map[String,String], val more: Set[String]) extends Help -{ - def ++(h: Help): Help = new Help0(Help0.this.brief ++ h.brief, Help0.this.detail ++ h.detail, more ++ h.more) +private final class Help0(val brief: Seq[(String, String)], val detail: Map[String, String], val more: Set[String]) extends Help { + def ++(h: Help): Help = new Help0(Help0.this.brief ++ h.brief, Help0.this.detail ++ h.detail, more ++ h.more) } -object Help -{ - val empty: Help = briefDetail(Nil) +object Help { + val empty: Help = briefDetail(Nil) - def apply(name: String, briefHelp: (String, String), detail: String): Help = apply(briefHelp, Map( (name, detail) ) ) + def apply(name: String, briefHelp: (String, String), detail: String): Help = apply(briefHelp, Map((name, detail))) - def apply(briefHelp: (String, String), detailedHelp: Map[String, String] = Map.empty ): Help = - apply(briefHelp :: Nil, detailedHelp) + def apply(briefHelp: (String, String), detailedHelp: Map[String, String] = Map.empty): Help = + apply(briefHelp :: Nil, detailedHelp) - def apply(briefHelp: Seq[(String,String)], detailedHelp: Map[String,String]): Help = - apply(briefHelp, detailedHelp, Set.empty[String]) - def apply(briefHelp: Seq[(String,String)], detailedHelp: Map[String,String], more: Set[String]): Help = - new Help0(briefHelp, detailedHelp, more) + def apply(briefHelp: Seq[(String, String)], detailedHelp: Map[String, String]): Help = + apply(briefHelp, detailedHelp, Set.empty[String]) + def apply(briefHelp: Seq[(String, String)], detailedHelp: Map[String, String], more: Set[String]): Help = + new Help0(briefHelp, detailedHelp, more) - def more(name: String, detailedHelp: String): Help = apply(Nil, Map(name -> detailedHelp), Set(name)) - def briefDetail(help: Seq[(String, String)]): Help = apply(help, help.toMap) - def briefOnly(help: Seq[(String, String)]): Help = apply(help, Map.empty[String,String]) - def detailOnly(help: Seq[(String, String)]): Help = apply(Nil, help.toMap) + def more(name: String, detailedHelp: String): Help = apply(Nil, Map(name -> detailedHelp), Set(name)) + def briefDetail(help: Seq[(String, String)]): Help = apply(help, help.toMap) + def briefOnly(help: Seq[(String, String)]): Help = apply(help, Map.empty[String, String]) + def detailOnly(help: Seq[(String, String)]): Help = apply(Nil, help.toMap) - import CommandUtil._ + import CommandUtil._ - def message(h: Help, arg: Option[String]): String = - arg match { - case Some(x) => detail(x, h.detail) - case None => - val brief = aligned(" ", " ", h.brief).mkString("\n", "\n", "\n") - val more = h.more.toSeq.sorted - if(more.isEmpty) - brief - else - brief + "\n" + moreMessage(more) - } - def moreMessage(more: Seq[String]): String = - more.mkString("More command help available using 'help ' for:\n ", ", ", "\n") + def message(h: Help, arg: Option[String]): String = + arg match { + case Some(x) => detail(x, h.detail) + case None => + val brief = aligned(" ", " ", h.brief).mkString("\n", "\n", "\n") + val more = h.more.toSeq.sorted + if (more.isEmpty) + brief + else + brief + "\n" + moreMessage(more) + } + def moreMessage(more: Seq[String]): String = + more.mkString("More command help available using 'help ' for:\n ", ", ", "\n") } -trait CommandDefinitions extends (State => State) -{ - def commands: Seq[Command] = ReflectUtilities.allVals[Command](this).values.toSeq - def apply(s: State): State = s ++ commands +trait CommandDefinitions extends (State => State) { + def commands: Seq[Command] = ReflectUtilities.allVals[Command](this).values.toSeq + def apply(s: State): State = s ++ commands } diff --git a/main/command/src/main/scala/sbt/CommandUtil.scala b/main/command/src/main/scala/sbt/CommandUtil.scala index 467da6a33..59d125267 100644 --- a/main/command/src/main/scala/sbt/CommandUtil.scala +++ b/main/command/src/main/scala/sbt/CommandUtil.scala @@ -1,75 +1,74 @@ package sbt - import java.io.File - import java.util.regex.{Pattern, PatternSyntaxException} +import java.io.File +import java.util.regex.{ Pattern, PatternSyntaxException } - import complete.Parser - import complete.DefaultParsers._ +import complete.Parser +import complete.DefaultParsers._ -object CommandUtil -{ - def readLines(files: Seq[File]): Seq[String] = files flatMap (line => IO.readLines(line)) flatMap processLine - def processLine(s: String) = { val trimmed = s.trim; if(ignoreLine(trimmed)) None else Some(trimmed) } - def ignoreLine(s: String) = s.isEmpty || s.startsWith("#") +object CommandUtil { + def readLines(files: Seq[File]): Seq[String] = files flatMap (line => IO.readLines(line)) flatMap processLine + def processLine(s: String) = { val trimmed = s.trim; if (ignoreLine(trimmed)) None else Some(trimmed) } + def ignoreLine(s: String) = s.isEmpty || s.startsWith("#") - private def canRead = (_: File).canRead - def notReadable(files: Seq[File]): Seq[File] = files filterNot canRead - def readable(files: Seq[File]): Seq[File] = files filter canRead + private def canRead = (_: File).canRead + def notReadable(files: Seq[File]): Seq[File] = files filterNot canRead + def readable(files: Seq[File]): Seq[File] = files filter canRead - // slightly better fallback in case of older launcher - def bootDirectory(state: State): File = - try { state.configuration.provider.scalaProvider.launcher.bootDirectory } - catch { case e: NoSuchMethodError => new File(".").getAbsoluteFile } + // slightly better fallback in case of older launcher + def bootDirectory(state: State): File = + try { state.configuration.provider.scalaProvider.launcher.bootDirectory } + catch { case e: NoSuchMethodError => new File(".").getAbsoluteFile } - def aligned(pre: String, sep: String, in: Seq[(String, String)]): Seq[String] = if(in.isEmpty) Nil else - { - val width = in.map(_._1.length).max - in.map { case (a, b) => (pre + fill(a, width) + sep + b) } - } - def fill(s: String, size: Int) = s + " " * math.max(size - s.length, 0) + def aligned(pre: String, sep: String, in: Seq[(String, String)]): Seq[String] = if (in.isEmpty) Nil else { + val width = in.map(_._1.length).max + in.map { case (a, b) => (pre + fill(a, width) + sep + b) } + } + def fill(s: String, size: Int) = s + " " * math.max(size - s.length, 0) - def withAttribute[T](s: State, key: AttributeKey[T], ifMissing: String)(f: T => State): State = - (s get key) match { - case None => s.log.error(ifMissing); s.fail - case Some(nav) => f(nav) - } + def withAttribute[T](s: State, key: AttributeKey[T], ifMissing: String)(f: T => State): State = + (s get key) match { + case None => + s.log.error(ifMissing); s.fail + case Some(nav) => f(nav) + } - def singleArgument(exampleStrings: Set[String]): Parser[String] = - { - val arg = (NotSpaceClass ~ any.*) map { case (ns, s) => (ns +: s).mkString } - token(Space) ~> token( arg examples exampleStrings ) - } - def detail(selected: String, detailMap: Map[String, String]): String = - detailMap.get(selected) match - { - case Some(exactDetail) => exactDetail - case None => try { - val details = searchHelp(selected, detailMap) - if(details.isEmpty) - "No matches for regular expression '" + selected + "'." - else - layoutDetails(details) - } catch { - case pse: PatternSyntaxException => sys.error("Invalid regular expression (java.util.regex syntax).\n" + pse.getMessage) - } - } - def searchHelp(selected: String, detailMap: Map[String, String]): Map[String, String] = - { - val pattern = Pattern.compile(selected, HelpPatternFlags) - detailMap flatMap { case (k,v) => - val contentMatches = Highlight.showMatches(pattern)(v) - val keyMatches = Highlight.showMatches(pattern)(k) - val keyString = Highlight.bold(keyMatches getOrElse k) - val contentString = contentMatches getOrElse v - if(keyMatches.isDefined || contentMatches.isDefined) - (keyString, contentString) :: Nil - else - Nil - } - } - def layoutDetails(details: Map[String,String]): String = - details.map { case (k,v) => k + "\n\n " + v } mkString("\n", "\n\n", "\n") + def singleArgument(exampleStrings: Set[String]): Parser[String] = + { + val arg = (NotSpaceClass ~ any.*) map { case (ns, s) => (ns +: s).mkString } + token(Space) ~> token(arg examples exampleStrings) + } + def detail(selected: String, detailMap: Map[String, String]): String = + detailMap.get(selected) match { + case Some(exactDetail) => exactDetail + case None => try { + val details = searchHelp(selected, detailMap) + if (details.isEmpty) + "No matches for regular expression '" + selected + "'." + else + layoutDetails(details) + } catch { + case pse: PatternSyntaxException => sys.error("Invalid regular expression (java.util.regex syntax).\n" + pse.getMessage) + } + } + def searchHelp(selected: String, detailMap: Map[String, String]): Map[String, String] = + { + val pattern = Pattern.compile(selected, HelpPatternFlags) + detailMap flatMap { + case (k, v) => + val contentMatches = Highlight.showMatches(pattern)(v) + val keyMatches = Highlight.showMatches(pattern)(k) + val keyString = Highlight.bold(keyMatches getOrElse k) + val contentString = contentMatches getOrElse v + if (keyMatches.isDefined || contentMatches.isDefined) + (keyString, contentString) :: Nil + else + Nil + } + } + def layoutDetails(details: Map[String, String]): String = + details.map { case (k, v) => k + "\n\n " + v } mkString ("\n", "\n\n", "\n") - final val HelpPatternFlags = Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE + final val HelpPatternFlags = Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE } \ No newline at end of file diff --git a/main/command/src/main/scala/sbt/ExceptionCategory.scala b/main/command/src/main/scala/sbt/ExceptionCategory.scala index 92e34496d..699f247f2 100644 --- a/main/command/src/main/scala/sbt/ExceptionCategory.scala +++ b/main/command/src/main/scala/sbt/ExceptionCategory.scala @@ -1,25 +1,25 @@ package sbt - import java.lang.reflect.InvocationTargetException - import scala.annotation.tailrec +import java.lang.reflect.InvocationTargetException +import scala.annotation.tailrec private[sbt] sealed abstract class ExceptionCategory { - def isFull: Boolean = false + def isFull: Boolean = false } private[sbt] object ExceptionCategory { - @tailrec def apply(t: Throwable): ExceptionCategory = t match { - case _: AlreadyHandledException | _: UnprintableException => AlreadyHandled - case ite: InvocationTargetException => - val cause = ite.getCause - if(cause == null || cause == ite) new Full(ite) else apply(cause) - case _: MessageOnlyException => new MessageOnly(t.toString) - case _ => new Full(t) - } + @tailrec def apply(t: Throwable): ExceptionCategory = t match { + case _: AlreadyHandledException | _: UnprintableException => AlreadyHandled + case ite: InvocationTargetException => + val cause = ite.getCause + if (cause == null || cause == ite) new Full(ite) else apply(cause) + case _: MessageOnlyException => new MessageOnly(t.toString) + case _ => new Full(t) + } - object AlreadyHandled extends ExceptionCategory - final class MessageOnly(val message: String) extends ExceptionCategory - final class Full(val exception: Throwable) extends ExceptionCategory { - override def isFull = true - } + object AlreadyHandled extends ExceptionCategory + final class MessageOnly(val message: String) extends ExceptionCategory + final class Full(val exception: Throwable) extends ExceptionCategory { + override def isFull = true + } } diff --git a/main/command/src/main/scala/sbt/Highlight.scala b/main/command/src/main/scala/sbt/Highlight.scala index 0713017e0..ae58566ea 100644 --- a/main/command/src/main/scala/sbt/Highlight.scala +++ b/main/command/src/main/scala/sbt/Highlight.scala @@ -1,25 +1,22 @@ package sbt - import java.util.regex.Pattern - import scala.Console.{BOLD, RESET} +import java.util.regex.Pattern +import scala.Console.{ BOLD, RESET } -object Highlight -{ - final val NormalIntensity = "\033[22m" - final val NormalTextColor = "\033[39m" +object Highlight { + final val NormalIntensity = "\033[22m" + final val NormalTextColor = "\033[39m" - def showMatches(pattern: Pattern)(line: String): Option[String] = - { - val matcher = pattern.matcher(line) - if(ConsoleLogger.formatEnabled) - { - val highlighted = matcher.replaceAll(scala.Console.RED + "$0" + NormalTextColor) - if(highlighted == line) None else Some(highlighted) - } - else if(matcher.find) - Some(line) - else - None - } - def bold(s: String) = if(ConsoleLogger.formatEnabled) BOLD + s + NormalIntensity else s + def showMatches(pattern: Pattern)(line: String): Option[String] = + { + val matcher = pattern.matcher(line) + if (ConsoleLogger.formatEnabled) { + val highlighted = matcher.replaceAll(scala.Console.RED + "$0" + NormalTextColor) + if (highlighted == line) None else Some(highlighted) + } else if (matcher.find) + Some(line) + else + None + } + def bold(s: String) = if (ConsoleLogger.formatEnabled) BOLD + s + NormalIntensity else s } \ No newline at end of file diff --git a/main/command/src/main/scala/sbt/MainControl.scala b/main/command/src/main/scala/sbt/MainControl.scala index 87612ef73..7ffb65f57 100644 --- a/main/command/src/main/scala/sbt/MainControl.scala +++ b/main/command/src/main/scala/sbt/MainControl.scala @@ -5,24 +5,20 @@ package sbt import java.io.File -final case class Exit(code: Int) extends xsbti.Exit -{ - require(code >= 0) +final case class Exit(code: Int) extends xsbti.Exit { + require(code >= 0) } -final case class Reboot(scalaVersion: String, argsList: Seq[String], app: xsbti.ApplicationID, baseDirectory: File) extends xsbti.Reboot -{ - def arguments = argsList.toArray +final case class Reboot(scalaVersion: String, argsList: Seq[String], app: xsbti.ApplicationID, baseDirectory: File) extends xsbti.Reboot { + def arguments = argsList.toArray } -final case class ApplicationID(groupID: String, name: String, version: String, mainClass: String, components: Seq[String], crossVersionedValue: xsbti.CrossValue, extra: Seq[File]) extends xsbti.ApplicationID -{ - def mainComponents = components.toArray - def classpathExtra = extra.toArray - def crossVersioned = crossVersionedValue != xsbti.CrossValue.Disabled +final case class ApplicationID(groupID: String, name: String, version: String, mainClass: String, components: Seq[String], crossVersionedValue: xsbti.CrossValue, extra: Seq[File]) extends xsbti.ApplicationID { + def mainComponents = components.toArray + def classpathExtra = extra.toArray + def crossVersioned = crossVersionedValue != xsbti.CrossValue.Disabled } -object ApplicationID -{ - def apply(delegate: xsbti.ApplicationID, newVersion: String): ApplicationID = - apply(delegate).copy(version = newVersion) - def apply(delegate: xsbti.ApplicationID): ApplicationID = - ApplicationID(delegate.groupID, delegate.name, delegate.version, delegate.mainClass, delegate.mainComponents, delegate.crossVersionedValue, delegate.classpathExtra) +object ApplicationID { + def apply(delegate: xsbti.ApplicationID, newVersion: String): ApplicationID = + apply(delegate).copy(version = newVersion) + def apply(delegate: xsbti.ApplicationID): ApplicationID = + ApplicationID(delegate.groupID, delegate.name, delegate.version, delegate.mainClass, delegate.mainComponents, delegate.crossVersionedValue, delegate.classpathExtra) } \ No newline at end of file diff --git a/main/command/src/main/scala/sbt/MainLoop.scala b/main/command/src/main/scala/sbt/MainLoop.scala index c81283678..dac718ef2 100644 --- a/main/command/src/main/scala/sbt/MainLoop.scala +++ b/main/command/src/main/scala/sbt/MainLoop.scala @@ -3,14 +3,13 @@ */ package sbt - import scala.annotation.tailrec - import java.io.{File, PrintWriter} - import jline.TerminalFactory +import scala.annotation.tailrec +import java.io.{ File, PrintWriter } +import jline.TerminalFactory -object MainLoop -{ - /** Entry point to run the remaining commands in State with managed global logging.*/ - def runLogged(state: State): xsbti.MainResult = { +object MainLoop { + /** Entry point to run the remaining commands in State with managed global logging.*/ + def runLogged(state: State): xsbti.MainResult = { // We've disabled jline shutdown hooks to prevent classloader leaks, and have been careful to always restore // the jline terminal in finally blocks, but hitting ctrl+c prevents finally blocks from being executed, in that // case the only way to restore the terminal is in a shutdown hook. @@ -28,87 +27,85 @@ object MainLoop } } - /** Run loop that evaluates remaining commands and manages changes to global logging configuration.*/ - @tailrec def runLoggedLoop(state: State, logBacking: GlobalLogBacking): xsbti.MainResult = - runAndClearLast(state, logBacking) match { - case ret: Return => // delete current and last log files when exiting normally - logBacking.file.delete() - deleteLastLog(logBacking) - ret.result - case clear: ClearGlobalLog => // delete previous log file, move current to previous, and start writing to a new file - deleteLastLog(logBacking) - runLoggedLoop(clear.state, logBacking.shiftNew()) - case keep: KeepGlobalLog => // make previous log file the current log file - logBacking.file.delete - runLoggedLoop(keep.state, logBacking.unshift) - } - - /** Runs the next sequence of commands, cleaning up global logging after any exceptions. */ - def runAndClearLast(state: State, logBacking: GlobalLogBacking): RunNext = - try - runWithNewLog(state, logBacking) - catch { - case e: xsbti.FullReload => - deleteLastLog(logBacking) - throw e // pass along a reboot request - case e: Throwable => - System.err.println("sbt appears to be exiting abnormally.\n The log file for this session is at " + logBacking.file) - deleteLastLog(logBacking) - throw e - } + /** Run loop that evaluates remaining commands and manages changes to global logging configuration.*/ + @tailrec def runLoggedLoop(state: State, logBacking: GlobalLogBacking): xsbti.MainResult = + runAndClearLast(state, logBacking) match { + case ret: Return => // delete current and last log files when exiting normally + logBacking.file.delete() + deleteLastLog(logBacking) + ret.result + case clear: ClearGlobalLog => // delete previous log file, move current to previous, and start writing to a new file + deleteLastLog(logBacking) + runLoggedLoop(clear.state, logBacking.shiftNew()) + case keep: KeepGlobalLog => // make previous log file the current log file + logBacking.file.delete + runLoggedLoop(keep.state, logBacking.unshift) + } - /** Deletes the previous global log file. */ - def deleteLastLog(logBacking: GlobalLogBacking): Unit = - logBacking.last.foreach(_.delete()) + /** Runs the next sequence of commands, cleaning up global logging after any exceptions. */ + def runAndClearLast(state: State, logBacking: GlobalLogBacking): RunNext = + try + runWithNewLog(state, logBacking) + catch { + case e: xsbti.FullReload => + deleteLastLog(logBacking) + throw e // pass along a reboot request + case e: Throwable => + System.err.println("sbt appears to be exiting abnormally.\n The log file for this session is at " + logBacking.file) + deleteLastLog(logBacking) + throw e + } - /** Runs the next sequence of commands with global logging in place. */ - def runWithNewLog(state: State, logBacking: GlobalLogBacking): RunNext = - Using.fileWriter(append = true)(logBacking.file) { writer => - val out = new java.io.PrintWriter(writer) - val newLogging = state.globalLogging.newLogger(out, logBacking) - transferLevels(state, newLogging) - val loggedState = state.copy(globalLogging = newLogging) - try run(loggedState) finally out.close() - } + /** Deletes the previous global log file. */ + def deleteLastLog(logBacking: GlobalLogBacking): Unit = + logBacking.last.foreach(_.delete()) - /** Transfers logging and trace levels from the old global loggers to the new ones. */ - private[this] def transferLevels(state: State, logging: GlobalLogging) { - val old = state.globalLogging - Logger.transferLevels(old.backed, logging.backed) - (old.full, logging.full) match { // well, this is a hack - case (oldLog: AbstractLogger, newLog: AbstractLogger) => Logger.transferLevels(oldLog, newLog) - case _ => () - } - } + /** Runs the next sequence of commands with global logging in place. */ + def runWithNewLog(state: State, logBacking: GlobalLogBacking): RunNext = + Using.fileWriter(append = true)(logBacking.file) { writer => + val out = new java.io.PrintWriter(writer) + val newLogging = state.globalLogging.newLogger(out, logBacking) + transferLevels(state, newLogging) + val loggedState = state.copy(globalLogging = newLogging) + try run(loggedState) finally out.close() + } - sealed trait RunNext - final class ClearGlobalLog(val state: State) extends RunNext - final class KeepGlobalLog(val state: State) extends RunNext - final class Return(val result: xsbti.MainResult) extends RunNext + /** Transfers logging and trace levels from the old global loggers to the new ones. */ + private[this] def transferLevels(state: State, logging: GlobalLogging) { + val old = state.globalLogging + Logger.transferLevels(old.backed, logging.backed) + (old.full, logging.full) match { // well, this is a hack + case (oldLog: AbstractLogger, newLog: AbstractLogger) => Logger.transferLevels(oldLog, newLog) + case _ => () + } + } - /** Runs the next sequence of commands that doesn't require global logging changes.*/ - @tailrec def run(state: State): RunNext = - state.next match - { - case State.Continue => run(next(state)) - case State.ClearGlobalLog => new ClearGlobalLog(state.continue) - case State.KeepLastLog => new KeepGlobalLog(state.continue) - case ret: State.Return => new Return(ret.result) - } + sealed trait RunNext + final class ClearGlobalLog(val state: State) extends RunNext + final class KeepGlobalLog(val state: State) extends RunNext + final class Return(val result: xsbti.MainResult) extends RunNext - def next(state: State): State = - ErrorHandling.wideConvert { state.process(Command.process) } match - { - case Right(s) => s - case Left(t: xsbti.FullReload) => throw t - case Left(t) => handleException(t, state) - } + /** Runs the next sequence of commands that doesn't require global logging changes.*/ + @tailrec def run(state: State): RunNext = + state.next match { + case State.Continue => run(next(state)) + case State.ClearGlobalLog => new ClearGlobalLog(state.continue) + case State.KeepLastLog => new KeepGlobalLog(state.continue) + case ret: State.Return => new Return(ret.result) + } - @deprecated("Use State.handleError", "0.13.0") - def handleException(e: Throwable, s: State): State = s.handleError(e) + def next(state: State): State = + ErrorHandling.wideConvert { state.process(Command.process) } match { + case Right(s) => s + case Left(t: xsbti.FullReload) => throw t + case Left(t) => handleException(t, state) + } - @deprecated("Use State.handleError", "0.13.0") - def handleException(t: Throwable, s: State, log: Logger): State = State.handleException(t, s, log) + @deprecated("Use State.handleError", "0.13.0") + def handleException(e: Throwable, s: State): State = s.handleError(e) - def logFullException(e: Throwable, log: Logger): Unit = State.logFullException(e, log) + @deprecated("Use State.handleError", "0.13.0") + def handleException(t: Throwable, s: State, log: Logger): State = State.handleException(t, s, log) + + def logFullException(e: Throwable, log: Logger): Unit = State.logFullException(e, log) } \ No newline at end of file diff --git a/main/command/src/main/scala/sbt/State.scala b/main/command/src/main/scala/sbt/State.scala index 16ebefdf0..0d23776ae 100644 --- a/main/command/src/main/scala/sbt/State.scala +++ b/main/command/src/main/scala/sbt/State.scala @@ -3,253 +3,255 @@ */ package sbt - import java.io.File - import java.util.concurrent.Callable +import java.io.File +import java.util.concurrent.Callable /** -Data structure representing all command execution information. - -@param configuration provides access to the launcher environment, including the application configuration, Scala versions, jvm/filesystem wide locking, and the launcher itself -@param definedCommands the list of command definitions that evaluate command strings. These may be modified to change the available commands. -@param onFailure the command to execute when another command fails. `onFailure` is cleared before the failure handling command is executed. -@param remainingCommands the sequence of commands to execute. This sequence may be modified to change the commands to be executed. Typically, the `::` and `:::` methods are used to prepend new commands to run. -@param exitHooks code to run before sbt exits, usually to ensure resources are cleaned up. -@param history tracks the recently executed commands -@param attributes custom command state. It is important to clean up attributes when no longer needed to avoid memory leaks and class loader leaks. -@param next the next action for the command processor to take. This may be to continue with the next command, adjust global logging, or exit. -*/ + * Data structure representing all command execution information. + * + * @param configuration provides access to the launcher environment, including the application configuration, Scala versions, jvm/filesystem wide locking, and the launcher itself + * @param definedCommands the list of command definitions that evaluate command strings. These may be modified to change the available commands. + * @param onFailure the command to execute when another command fails. `onFailure` is cleared before the failure handling command is executed. + * @param remainingCommands the sequence of commands to execute. This sequence may be modified to change the commands to be executed. Typically, the `::` and `:::` methods are used to prepend new commands to run. + * @param exitHooks code to run before sbt exits, usually to ensure resources are cleaned up. + * @param history tracks the recently executed commands + * @param attributes custom command state. It is important to clean up attributes when no longer needed to avoid memory leaks and class loader leaks. + * @param next the next action for the command processor to take. This may be to continue with the next command, adjust global logging, or exit. + */ final case class State( - configuration: xsbti.AppConfiguration, - definedCommands: Seq[Command], - exitHooks: Set[ExitHook], - onFailure: Option[String], - remainingCommands: Seq[String], - history: State.History, - attributes: AttributeMap, - globalLogging: GlobalLogging, - next: State.Next -) extends Identity { - lazy val combinedParser = Command.combine(definedCommands)(this) + configuration: xsbti.AppConfiguration, + definedCommands: Seq[Command], + exitHooks: Set[ExitHook], + onFailure: Option[String], + remainingCommands: Seq[String], + history: State.History, + attributes: AttributeMap, + globalLogging: GlobalLogging, + next: State.Next) extends Identity { + lazy val combinedParser = Command.combine(definedCommands)(this) } trait Identity { - override final def hashCode = super.hashCode - override final def equals(a: Any) = super.equals(a) - override final def toString = super.toString + override final def hashCode = super.hashCode + override final def equals(a: Any) = super.equals(a) + override final def toString = super.toString } /** Convenience methods for State transformations and operations. */ trait StateOps { - def process(f: (String, State) => State): State + def process(f: (String, State) => State): State - /** Schedules `commands` to be run before any remaining commands.*/ - def ::: (commands: Seq[String]): State + /** Schedules `commands` to be run before any remaining commands.*/ + def :::(commands: Seq[String]): State - /** Schedules `command` to be run before any remaining commands.*/ - def :: (command: String): State + /** Schedules `command` to be run before any remaining commands.*/ + def ::(command: String): State - /** Sets the next command processing action to be to continue processing the next command.*/ - def continue: State + /** Sets the next command processing action to be to continue processing the next command.*/ + def continue: State - /** Reboots sbt. A reboot restarts execution from the entry point of the launcher. - * A reboot is designed to be as close as possible to actually restarting the JVM without actually doing so. - * Because the JVM is not restarted, JVM exit hooks are not run. - * State.exitHooks should be used instead and those will be run before rebooting. - * If `full` is true, the boot directory is deleted before starting again. - * This command is currently implemented to not return, but may be implemented in the future to only reboot at the next command processing step. */ - def reboot(full: Boolean): State + /** + * Reboots sbt. A reboot restarts execution from the entry point of the launcher. + * A reboot is designed to be as close as possible to actually restarting the JVM without actually doing so. + * Because the JVM is not restarted, JVM exit hooks are not run. + * State.exitHooks should be used instead and those will be run before rebooting. + * If `full` is true, the boot directory is deleted before starting again. + * This command is currently implemented to not return, but may be implemented in the future to only reboot at the next command processing step. + */ + def reboot(full: Boolean): State - /** Sets the next command processing action to do.*/ - def setNext(n: State.Next): State + /** Sets the next command processing action to do.*/ + def setNext(n: State.Next): State - @deprecated("Use setNext", "0.11.0") def setResult(ro: Option[xsbti.MainResult]): State + @deprecated("Use setNext", "0.11.0") def setResult(ro: Option[xsbti.MainResult]): State - /** Restarts sbt without dropping loaded Scala classes. It is a shallower restart than `reboot`. - * This method takes a snapshot of the remaining commands and will resume executing those commands after reload. - * This means that any commands added to this State will be dropped.*/ - def reload: State + /** + * Restarts sbt without dropping loaded Scala classes. It is a shallower restart than `reboot`. + * This method takes a snapshot of the remaining commands and will resume executing those commands after reload. + * This means that any commands added to this State will be dropped. + */ + def reload: State - /** Sets the next command processing action to be to rotate the global log and continue executing commands.*/ - def clearGlobalLog: State - /** Sets the next command processing action to be to keep the previous log and continue executing commands. */ - def keepLastLog: State + /** Sets the next command processing action to be to rotate the global log and continue executing commands.*/ + def clearGlobalLog: State + /** Sets the next command processing action to be to keep the previous log and continue executing commands. */ + def keepLastLog: State - /** Sets the next command processing action to be to exit with a zero exit code if `ok` is true and a nonzero exit code if `ok` if false.*/ - def exit(ok: Boolean): State - /** Marks the currently executing command as failing. This triggers failure handling by the command processor. See also `State.onFailure`*/ - def fail: State + /** Sets the next command processing action to be to exit with a zero exit code if `ok` is true and a nonzero exit code if `ok` if false.*/ + def exit(ok: Boolean): State + /** Marks the currently executing command as failing. This triggers failure handling by the command processor. See also `State.onFailure`*/ + def fail: State - /** Marks the currently executing command as failing due to the given exception. - * This displays the error appropriately and triggers failure handling by the command processor. - * Note that this does not throw an exception and returns normally. - * It is only once control is returned to the command processor that failure handling at the command level occurs. */ - def handleError(t: Throwable): State + /** + * Marks the currently executing command as failing due to the given exception. + * This displays the error appropriately and triggers failure handling by the command processor. + * Note that this does not throw an exception and returns normally. + * It is only once control is returned to the command processor that failure handling at the command level occurs. + */ + def handleError(t: Throwable): State - /** Registers `newCommands` as available commands. */ - def ++ (newCommands: Seq[Command]): State - /** Registers `newCommand` as an available command. */ - def + (newCommand: Command): State + /** Registers `newCommands` as available commands. */ + def ++(newCommands: Seq[Command]): State + /** Registers `newCommand` as an available command. */ + def +(newCommand: Command): State - /** Gets the value associated with `key` from the custom attributes map.*/ - def get[T](key: AttributeKey[T]): Option[T] - /** Sets the value associated with `key` in the custom attributes map.*/ - def put[T](key: AttributeKey[T], value: T): State - /** Removes the `key` and any associated value from the custom attributes map.*/ - def remove(key: AttributeKey[_]): State - /** Sets the value associated with `key` in the custom attributes map by transforming the current value.*/ - def update[T](key: AttributeKey[T])(f: Option[T] => T): State - /** Returns true if `key` exists in the custom attributes map, false if it does not exist.*/ - def has(key: AttributeKey[_]): Boolean + /** Gets the value associated with `key` from the custom attributes map.*/ + def get[T](key: AttributeKey[T]): Option[T] + /** Sets the value associated with `key` in the custom attributes map.*/ + def put[T](key: AttributeKey[T], value: T): State + /** Removes the `key` and any associated value from the custom attributes map.*/ + def remove(key: AttributeKey[_]): State + /** Sets the value associated with `key` in the custom attributes map by transforming the current value.*/ + def update[T](key: AttributeKey[T])(f: Option[T] => T): State + /** Returns true if `key` exists in the custom attributes map, false if it does not exist.*/ + def has(key: AttributeKey[_]): Boolean - /** The application base directory, which is not necessarily the current working directory.*/ - def baseDir: File + /** The application base directory, which is not necessarily the current working directory.*/ + def baseDir: File - /** The Logger used for general command logging.*/ - def log: Logger + /** The Logger used for general command logging.*/ + def log: Logger - /** Evaluates the provided expression with a JVM-wide and machine-wide lock on `file`.*/ - def locked[T](file: File)(t: => T): T + /** Evaluates the provided expression with a JVM-wide and machine-wide lock on `file`.*/ + def locked[T](file: File)(t: => T): T - /** Runs any defined exitHooks and then clears them.*/ - def runExitHooks(): State - /** Registers a new exit hook, which will run when sbt exits or restarts.*/ - def addExitHook(f: => Unit): State + /** Runs any defined exitHooks and then clears them.*/ + def runExitHooks(): State + /** Registers a new exit hook, which will run when sbt exits or restarts.*/ + def addExitHook(f: => Unit): State - /** An advisory flag that is `true` if this application will execute commands based on user input.*/ - def interactive: Boolean - /** Changes the advisory `interactive` flag. */ - def setInteractive(flag: Boolean): State + /** An advisory flag that is `true` if this application will execute commands based on user input.*/ + def interactive: Boolean + /** Changes the advisory `interactive` flag. */ + def setInteractive(flag: Boolean): State - /** Get the class loader cache for the application.*/ - def classLoaderCache: classpath.ClassLoaderCache + /** Get the class loader cache for the application.*/ + def classLoaderCache: classpath.ClassLoaderCache - /** Create and register a class loader cache. This should be called once at the application entry-point.*/ - def initializeClassLoaderCache: State + /** Create and register a class loader cache. This should be called once at the application entry-point.*/ + def initializeClassLoaderCache: State } -object State -{ - /** Indicates where command execution should resume after a failure.*/ - val FailureWall = BasicCommandStrings.FailureWall +object State { + /** Indicates where command execution should resume after a failure.*/ + val FailureWall = BasicCommandStrings.FailureWall - /** Represents the next action for the command processor.*/ - sealed trait Next - /** Indicates that the command processor should process the next command.*/ - object Continue extends Next - /** Indicates that the application should exit with the given result.*/ - final class Return(val result: xsbti.MainResult) extends Next - /** Indicates that global logging should be rotated.*/ - final object ClearGlobalLog extends Next - /** Indicates that the previous log file should be preserved instead of discarded.*/ - final object KeepLastLog extends Next + /** Represents the next action for the command processor.*/ + sealed trait Next + /** Indicates that the command processor should process the next command.*/ + object Continue extends Next + /** Indicates that the application should exit with the given result.*/ + final class Return(val result: xsbti.MainResult) extends Next + /** Indicates that global logging should be rotated.*/ + final object ClearGlobalLog extends Next + /** Indicates that the previous log file should be preserved instead of discarded.*/ + final object KeepLastLog extends Next - /** Provides a list of recently executed commands. The commands are stored as processed instead of as entered by the user. - * @param executed the list of the most recently executed commands, with the most recent command first. - * @param maxSize the maximum number of commands to keep, or 0 to keep an unlimited number. */ - final class History private[State](val executed: Seq[String], val maxSize: Int) - { - /** Adds `command` as the most recently executed command.*/ - def :: (command: String): History = - { - val prependTo = if(maxSize > 0 && executed.size >= maxSize) executed.take(maxSize - 1) else executed - new History(command +: prependTo, maxSize) - } - /** Changes the maximum number of commands kept, adjusting the current history if necessary.*/ - def setMaxSize(size: Int): History = - new History(if(size <= 0) executed else executed.take(size), size) - def current: String = executed.head - def previous: Option[String] = executed.drop(1).headOption - } - /** Constructs an empty command History with a default, finite command limit.*/ - def newHistory = new History(Vector.empty, complete.HistoryCommands.MaxLines) + /** + * Provides a list of recently executed commands. The commands are stored as processed instead of as entered by the user. + * @param executed the list of the most recently executed commands, with the most recent command first. + * @param maxSize the maximum number of commands to keep, or 0 to keep an unlimited number. + */ + final class History private[State] (val executed: Seq[String], val maxSize: Int) { + /** Adds `command` as the most recently executed command.*/ + def ::(command: String): History = + { + val prependTo = if (maxSize > 0 && executed.size >= maxSize) executed.take(maxSize - 1) else executed + new History(command +: prependTo, maxSize) + } + /** Changes the maximum number of commands kept, adjusting the current history if necessary.*/ + def setMaxSize(size: Int): History = + new History(if (size <= 0) executed else executed.take(size), size) + def current: String = executed.head + def previous: Option[String] = executed.drop(1).headOption + } + /** Constructs an empty command History with a default, finite command limit.*/ + def newHistory = new History(Vector.empty, complete.HistoryCommands.MaxLines) - def defaultReload(state: State): Reboot = - { - val app = state.configuration.provider - new Reboot(app.scalaProvider.version, state.remainingCommands, app.id, state.configuration.baseDirectory) - } + def defaultReload(state: State): Reboot = + { + val app = state.configuration.provider + new Reboot(app.scalaProvider.version, state.remainingCommands, app.id, state.configuration.baseDirectory) + } - /** Provides operations and transformations on State. */ - implicit def stateOps(s: State): StateOps = new StateOps { - def process(f: (String, State) => State): State = - s.remainingCommands match { - case Seq() => exit(true) - case Seq(x, xs @ _*) => - log.debug(s"> $x") - f(x, s.copy(remainingCommands = xs, history = x :: s.history)) - } - s.copy(remainingCommands = s.remainingCommands.drop(1)) - def ::: (newCommands: Seq[String]): State = s.copy(remainingCommands = newCommands ++ s.remainingCommands) - def :: (command: String): State = (command :: Nil) ::: this - def ++ (newCommands: Seq[Command]): State = s.copy(definedCommands = (s.definedCommands ++ newCommands).distinct) - def + (newCommand: Command): State = this ++ (newCommand :: Nil) - def baseDir: File = s.configuration.baseDirectory - def setNext(n: Next) = s.copy(next = n) - def setResult(ro: Option[xsbti.MainResult]) = ro match { case None => continue; case Some(r) => setNext(new Return(r)) } - def continue = setNext(Continue) - def reboot(full: Boolean) ={ runExitHooks(); throw new xsbti.FullReload(s.remainingCommands.toArray, full) } - def reload = runExitHooks().setNext(new Return(defaultReload(s))) - def clearGlobalLog = setNext(ClearGlobalLog) - def keepLastLog = setNext(KeepLastLog) - def exit(ok: Boolean) = runExitHooks().setNext(new Return(Exit(if(ok) 0 else 1))) - def get[T](key: AttributeKey[T]) = s.attributes get key - def put[T](key: AttributeKey[T], value: T) = s.copy(attributes = s.attributes.put(key, value)) - def update[T](key: AttributeKey[T])(f: Option[T] => T): State = put(key, f(get(key))) - def has(key: AttributeKey[_]) = s.attributes contains key - def remove(key: AttributeKey[_]) = s.copy(attributes = s.attributes remove key) - def log = s.globalLogging.full - def handleError(t: Throwable): State = handleException(t, s, log) - def fail = - { - import BasicCommandStrings.Compat.{FailureWall => CompatFailureWall} - val remaining = s.remainingCommands.dropWhile(c => c != FailureWall && c != CompatFailureWall) - if(remaining.isEmpty) - applyOnFailure(s, Nil, exit(ok = false)) - else - applyOnFailure(s, remaining, s.copy(remainingCommands = remaining)) - } - private[this] def applyOnFailure(s: State, remaining: Seq[String], noHandler: => State): State = - s.onFailure match - { - case Some(c) => s.copy(remainingCommands = c +: remaining, onFailure = None) - case None => noHandler - } + /** Provides operations and transformations on State. */ + implicit def stateOps(s: State): StateOps = new StateOps { + def process(f: (String, State) => State): State = + s.remainingCommands match { + case Seq() => exit(true) + case Seq(x, xs @ _*) => + log.debug(s"> $x") + f(x, s.copy(remainingCommands = xs, history = x :: s.history)) + } + s.copy(remainingCommands = s.remainingCommands.drop(1)) + def :::(newCommands: Seq[String]): State = s.copy(remainingCommands = newCommands ++ s.remainingCommands) + def ::(command: String): State = (command :: Nil) ::: this + def ++(newCommands: Seq[Command]): State = s.copy(definedCommands = (s.definedCommands ++ newCommands).distinct) + def +(newCommand: Command): State = this ++ (newCommand :: Nil) + def baseDir: File = s.configuration.baseDirectory + def setNext(n: Next) = s.copy(next = n) + def setResult(ro: Option[xsbti.MainResult]) = ro match { case None => continue; case Some(r) => setNext(new Return(r)) } + def continue = setNext(Continue) + def reboot(full: Boolean) = { runExitHooks(); throw new xsbti.FullReload(s.remainingCommands.toArray, full) } + def reload = runExitHooks().setNext(new Return(defaultReload(s))) + def clearGlobalLog = setNext(ClearGlobalLog) + def keepLastLog = setNext(KeepLastLog) + def exit(ok: Boolean) = runExitHooks().setNext(new Return(Exit(if (ok) 0 else 1))) + def get[T](key: AttributeKey[T]) = s.attributes get key + def put[T](key: AttributeKey[T], value: T) = s.copy(attributes = s.attributes.put(key, value)) + def update[T](key: AttributeKey[T])(f: Option[T] => T): State = put(key, f(get(key))) + def has(key: AttributeKey[_]) = s.attributes contains key + def remove(key: AttributeKey[_]) = s.copy(attributes = s.attributes remove key) + def log = s.globalLogging.full + def handleError(t: Throwable): State = handleException(t, s, log) + def fail = + { + import BasicCommandStrings.Compat.{ FailureWall => CompatFailureWall } + val remaining = s.remainingCommands.dropWhile(c => c != FailureWall && c != CompatFailureWall) + if (remaining.isEmpty) + applyOnFailure(s, Nil, exit(ok = false)) + else + applyOnFailure(s, remaining, s.copy(remainingCommands = remaining)) + } + private[this] def applyOnFailure(s: State, remaining: Seq[String], noHandler: => State): State = + s.onFailure match { + case Some(c) => s.copy(remainingCommands = c +: remaining, onFailure = None) + case None => noHandler + } + def addExitHook(act: => Unit): State = + s.copy(exitHooks = s.exitHooks + ExitHook(act)) + def runExitHooks(): State = { + ExitHooks.runExitHooks(s.exitHooks.toSeq) + s.copy(exitHooks = Set.empty) + } + def locked[T](file: File)(t: => T): T = + s.configuration.provider.scalaProvider.launcher.globalLock.apply(file, new Callable[T] { def call = t }) - def addExitHook(act: => Unit): State = - s.copy(exitHooks = s.exitHooks + ExitHook(act)) - def runExitHooks(): State = { - ExitHooks.runExitHooks(s.exitHooks.toSeq) - s.copy(exitHooks = Set.empty) - } - def locked[T](file: File)(t: => T): T = - s.configuration.provider.scalaProvider.launcher.globalLock.apply(file, new Callable[T] { def call = t }) + def interactive = getBoolean(s, BasicKeys.interactive, false) + def setInteractive(i: Boolean) = s.put(BasicKeys.interactive, i) - def interactive = getBoolean(s, BasicKeys.interactive, false) - def setInteractive(i: Boolean) = s.put(BasicKeys.interactive, i) + def classLoaderCache: classpath.ClassLoaderCache = s get BasicKeys.classLoaderCache getOrElse newClassLoaderCache + def initializeClassLoaderCache = s.put(BasicKeys.classLoaderCache, newClassLoaderCache) + private[this] def newClassLoaderCache = new classpath.ClassLoaderCache(s.configuration.provider.scalaProvider.launcher.topLoader) + } - def classLoaderCache: classpath.ClassLoaderCache = s get BasicKeys.classLoaderCache getOrElse newClassLoaderCache - def initializeClassLoaderCache = s.put(BasicKeys.classLoaderCache, newClassLoaderCache) - private[this] def newClassLoaderCache = new classpath.ClassLoaderCache(s.configuration.provider.scalaProvider.launcher.topLoader) - } + import ExceptionCategory._ - import ExceptionCategory._ - - private[sbt] def handleException(t: Throwable, s: State, log: Logger): State = - { - ExceptionCategory(t) match { - case AlreadyHandled => () - case m: MessageOnly => log.error(m.message) - case f: Full => logFullException(f.exception, log) - } - s.fail - } - private[sbt] def logFullException(e: Throwable, log: Logger) - { - log.trace(e) - log.error(ErrorHandling reducedToString e) - log.error("Use 'last' for the full log.") - } - private[sbt] def getBoolean(s: State, key: AttributeKey[Boolean], default: Boolean): Boolean = - s.get(key) getOrElse default + private[sbt] def handleException(t: Throwable, s: State, log: Logger): State = + { + ExceptionCategory(t) match { + case AlreadyHandled => () + case m: MessageOnly => log.error(m.message) + case f: Full => logFullException(f.exception, log) + } + s.fail + } + private[sbt] def logFullException(e: Throwable, log: Logger) { + log.trace(e) + log.error(ErrorHandling reducedToString e) + log.error("Use 'last' for the full log.") + } + private[sbt] def getBoolean(s: State, key: AttributeKey[Boolean], default: Boolean): Boolean = + s.get(key) getOrElse default } \ No newline at end of file diff --git a/main/command/src/main/scala/sbt/Watched.scala b/main/command/src/main/scala/sbt/Watched.scala index dc3d90c50..3dc1bd6e0 100644 --- a/main/command/src/main/scala/sbt/Watched.scala +++ b/main/command/src/main/scala/sbt/Watched.scala @@ -3,81 +3,78 @@ */ package sbt - import BasicCommandStrings.ClearOnFailure - import State.FailureWall - import annotation.tailrec - import java.io.File - import Types.const +import BasicCommandStrings.ClearOnFailure +import State.FailureWall +import annotation.tailrec +import java.io.File +import Types.const -trait Watched -{ - /** The files watched when an action is run with a preceeding ~ */ - def watchPaths(s: State): Seq[File] = Nil - def terminateWatch(key: Int): Boolean = Watched.isEnter(key) - /** The time in milliseconds between checking for changes. The actual time between the last change made to a file and the - * execution time is between `pollInterval` and `pollInterval*2`.*/ - def pollInterval: Int = Watched.PollDelayMillis - /** The message to show when triggered execution waits for sources to change.*/ - def watchingMessage(s: WatchState): String = Watched.defaultWatchingMessage(s) - /** The message to show before an action is run. */ - def triggeredMessage(s: WatchState): String = Watched.defaultTriggeredMessage(s) +trait Watched { + /** The files watched when an action is run with a preceeding ~ */ + def watchPaths(s: State): Seq[File] = Nil + def terminateWatch(key: Int): Boolean = Watched.isEnter(key) + /** + * The time in milliseconds between checking for changes. The actual time between the last change made to a file and the + * execution time is between `pollInterval` and `pollInterval*2`. + */ + def pollInterval: Int = Watched.PollDelayMillis + /** The message to show when triggered execution waits for sources to change.*/ + def watchingMessage(s: WatchState): String = Watched.defaultWatchingMessage(s) + /** The message to show before an action is run. */ + def triggeredMessage(s: WatchState): String = Watched.defaultTriggeredMessage(s) } -object Watched -{ - val defaultWatchingMessage: WatchState => String = _.count + ". Waiting for source changes... (press enter to interrupt)" - val defaultTriggeredMessage: WatchState => String = const("") - val clearWhenTriggered: WatchState => String = const(clearScreen) - def clearScreen: String = "\033[2J\033[0;0H" +object Watched { + val defaultWatchingMessage: WatchState => String = _.count + ". Waiting for source changes... (press enter to interrupt)" + val defaultTriggeredMessage: WatchState => String = const("") + val clearWhenTriggered: WatchState => String = const(clearScreen) + def clearScreen: String = "\033[2J\033[0;0H" - private[this] class AWatched extends Watched - - def multi(base: Watched, paths: Seq[Watched]): Watched = - new AWatched - { - override def watchPaths(s: State) = (base.watchPaths(s) /: paths)(_ ++ _.watchPaths(s)) - override def terminateWatch(key: Int): Boolean = base.terminateWatch(key) - override val pollInterval = (base +: paths).map(_.pollInterval).min - override def watchingMessage(s: WatchState) = base.watchingMessage(s) - override def triggeredMessage(s: WatchState) = base.triggeredMessage(s) - } - def empty: Watched = new AWatched - - val PollDelayMillis = 500 - def isEnter(key: Int): Boolean = key == 10 || key == 13 - def printIfDefined(msg: String) = if(!msg.isEmpty) System.out.println(msg) + private[this] class AWatched extends Watched - def executeContinuously(watched: Watched, s: State, next: String, repeat: String): State = - { - @tailrec def shouldTerminate: Boolean = (System.in.available > 0) && (watched.terminateWatch(System.in.read()) || shouldTerminate) - val sourcesFinder = PathFinder { watched watchPaths s } - val watchState = s get ContinuousState getOrElse WatchState.empty + def multi(base: Watched, paths: Seq[Watched]): Watched = + new AWatched { + override def watchPaths(s: State) = (base.watchPaths(s) /: paths)(_ ++ _.watchPaths(s)) + override def terminateWatch(key: Int): Boolean = base.terminateWatch(key) + override val pollInterval = (base +: paths).map(_.pollInterval).min + override def watchingMessage(s: WatchState) = base.watchingMessage(s) + override def triggeredMessage(s: WatchState) = base.triggeredMessage(s) + } + def empty: Watched = new AWatched - if(watchState.count > 0) - printIfDefined(watched watchingMessage watchState) + val PollDelayMillis = 500 + def isEnter(key: Int): Boolean = key == 10 || key == 13 + def printIfDefined(msg: String) = if (!msg.isEmpty) System.out.println(msg) - val (triggered, newWatchState, newState) = - try { - val (triggered, newWatchState) = SourceModificationWatch.watch(sourcesFinder, watched.pollInterval, watchState)(shouldTerminate) - (triggered, newWatchState, s) - } - catch { case e: Exception => - val log = s.log - log.error("Error occurred obtaining files to watch. Terminating continuous execution...") - MainLoop.handleException(e, s, log) - (false, watchState, s.fail) - } + def executeContinuously(watched: Watched, s: State, next: String, repeat: String): State = + { + @tailrec def shouldTerminate: Boolean = (System.in.available > 0) && (watched.terminateWatch(System.in.read()) || shouldTerminate) + val sourcesFinder = PathFinder { watched watchPaths s } + val watchState = s get ContinuousState getOrElse WatchState.empty - if(triggered) { - printIfDefined(watched triggeredMessage newWatchState) - (ClearOnFailure :: next :: FailureWall :: repeat :: s).put(ContinuousState, newWatchState) - } - else - { - while (System.in.available() > 0) System.in.read() - s.put(ContinuousState, WatchState.empty) - } - } - val ContinuousState = AttributeKey[WatchState]("watch state", "Internal: tracks state for continuous execution.") - val Configuration = AttributeKey[Watched]("watched-configuration", "Configures continuous execution.") + if (watchState.count > 0) + printIfDefined(watched watchingMessage watchState) + + val (triggered, newWatchState, newState) = + try { + val (triggered, newWatchState) = SourceModificationWatch.watch(sourcesFinder, watched.pollInterval, watchState)(shouldTerminate) + (triggered, newWatchState, s) + } catch { + case e: Exception => + val log = s.log + log.error("Error occurred obtaining files to watch. Terminating continuous execution...") + MainLoop.handleException(e, s, log) + (false, watchState, s.fail) + } + + if (triggered) { + printIfDefined(watched triggeredMessage newWatchState) + (ClearOnFailure :: next :: FailureWall :: repeat :: s).put(ContinuousState, newWatchState) + } else { + while (System.in.available() > 0) System.in.read() + s.put(ContinuousState, WatchState.empty) + } + } + val ContinuousState = AttributeKey[WatchState]("watch state", "Internal: tracks state for continuous execution.") + val Configuration = AttributeKey[Watched]("watched-configuration", "Configures continuous execution.") } \ No newline at end of file diff --git a/main/settings/src/main/scala/sbt/Append.scala b/main/settings/src/main/scala/sbt/Append.scala index 92ccb9404..cd3f33c59 100644 --- a/main/settings/src/main/scala/sbt/Append.scala +++ b/main/settings/src/main/scala/sbt/Append.scala @@ -1,49 +1,46 @@ package sbt - import java.io.File - import Def.Classpath - import scala.annotation.implicitNotFound +import java.io.File +import Def.Classpath +import scala.annotation.implicitNotFound -object Append -{ - @implicitNotFound(msg = "No implicit for Append.Value[${A}, ${B}] found,\n so ${B} cannot be appended to ${A}") - sealed trait Value[A,B] - { - def appendValue(a: A, b: B): A - } - @implicitNotFound(msg = "No implicit for Append.Values[${A}, ${B}] found,\n so ${B} cannot be appended to ${A}") - sealed trait Values[A,-B] - { - def appendValues(a: A, b: B): A - } - sealed trait Sequence[A,-B,T] extends Value[A,T] with Values[A,B] +object Append { + @implicitNotFound(msg = "No implicit for Append.Value[${A}, ${B}] found,\n so ${B} cannot be appended to ${A}") + sealed trait Value[A, B] { + def appendValue(a: A, b: B): A + } + @implicitNotFound(msg = "No implicit for Append.Values[${A}, ${B}] found,\n so ${B} cannot be appended to ${A}") + sealed trait Values[A, -B] { + def appendValues(a: A, b: B): A + } + sealed trait Sequence[A, -B, T] extends Value[A, T] with Values[A, B] - implicit def appendSeq[T, V <: T]: Sequence[Seq[T], Seq[V], V] = new Sequence[Seq[T], Seq[V], V] { - def appendValues(a: Seq[T], b: Seq[V]): Seq[T] = a ++ b - def appendValue(a: Seq[T], b: V): Seq[T] = a :+ b - } - implicit def appendString: Value[String, String] = new Value[String, String] { - def appendValue(a: String, b: String) = a + b - } - implicit def appendInt = new Value[Int, Int] { - def appendValue(a: Int, b: Int) = a + b - } - implicit def appendLong = new Value[Long, Long] { - def appendValue(a: Long, b: Long) = a + b - } - implicit def appendDouble = new Value[Double, Double] { - def appendValue(a: Double, b: Double) = a + b - } - implicit def appendClasspath: Sequence[Classpath, Seq[File], File] = new Sequence[Classpath, Seq[File], File] { - def appendValues(a: Classpath, b: Seq[File]): Classpath = a ++ Attributed.blankSeq(b) - def appendValue(a: Classpath, b: File): Classpath = a :+ Attributed.blank(b) - } - implicit def appendSet[T, V <: T]: Sequence[Set[T], Set[V], V] = new Sequence[Set[T], Set[V], V] { - def appendValues(a: Set[T], b: Set[V]): Set[T] = a ++ b - def appendValue(a: Set[T], b: V): Set[T] = a + b - } - implicit def appendMap[A,B, X <: A, Y <: B]: Sequence[Map[A,B], Map[X,Y], (X,Y)] = new Sequence[Map[A,B], Map[X,Y], (X,Y)] { - def appendValues(a: Map[A,B], b: Map[X,Y]): Map[A,B] = a ++ b - def appendValue(a: Map[A,B], b: (X,Y)): Map[A,B] = a + b - } + implicit def appendSeq[T, V <: T]: Sequence[Seq[T], Seq[V], V] = new Sequence[Seq[T], Seq[V], V] { + def appendValues(a: Seq[T], b: Seq[V]): Seq[T] = a ++ b + def appendValue(a: Seq[T], b: V): Seq[T] = a :+ b + } + implicit def appendString: Value[String, String] = new Value[String, String] { + def appendValue(a: String, b: String) = a + b + } + implicit def appendInt = new Value[Int, Int] { + def appendValue(a: Int, b: Int) = a + b + } + implicit def appendLong = new Value[Long, Long] { + def appendValue(a: Long, b: Long) = a + b + } + implicit def appendDouble = new Value[Double, Double] { + def appendValue(a: Double, b: Double) = a + b + } + implicit def appendClasspath: Sequence[Classpath, Seq[File], File] = new Sequence[Classpath, Seq[File], File] { + def appendValues(a: Classpath, b: Seq[File]): Classpath = a ++ Attributed.blankSeq(b) + def appendValue(a: Classpath, b: File): Classpath = a :+ Attributed.blank(b) + } + implicit def appendSet[T, V <: T]: Sequence[Set[T], Set[V], V] = new Sequence[Set[T], Set[V], V] { + def appendValues(a: Set[T], b: Set[V]): Set[T] = a ++ b + def appendValue(a: Set[T], b: V): Set[T] = a + b + } + implicit def appendMap[A, B, X <: A, Y <: B]: Sequence[Map[A, B], Map[X, Y], (X, Y)] = new Sequence[Map[A, B], Map[X, Y], (X, Y)] { + def appendValues(a: Map[A, B], b: Map[X, Y]): Map[A, B] = a ++ b + def appendValue(a: Map[A, B], b: (X, Y)): Map[A, B] = a + b + } } \ No newline at end of file diff --git a/main/settings/src/main/scala/sbt/ConfigKey.scala b/main/settings/src/main/scala/sbt/ConfigKey.scala index 1398c86c9..8acbb92ad 100644 --- a/main/settings/src/main/scala/sbt/ConfigKey.scala +++ b/main/settings/src/main/scala/sbt/ConfigKey.scala @@ -1,7 +1,6 @@ package sbt final case class ConfigKey(name: String) -object ConfigKey -{ - implicit def configurationToKey(c: Configuration): ConfigKey = ConfigKey(c.name) +object ConfigKey { + implicit def configurationToKey(c: Configuration): ConfigKey = ConfigKey(c.name) } diff --git a/main/settings/src/main/scala/sbt/Def.scala b/main/settings/src/main/scala/sbt/Def.scala index 702ca6919..073505b6e 100644 --- a/main/settings/src/main/scala/sbt/Def.scala +++ b/main/settings/src/main/scala/sbt/Def.scala @@ -1,120 +1,118 @@ package sbt - import Types.const - import complete.Parser - import java.io.File - import Scope.{ThisScope,GlobalScope} - import KeyRanks.{DTask, Invisible} +import Types.const +import complete.Parser +import java.io.File +import Scope.{ ThisScope, GlobalScope } +import KeyRanks.{ DTask, Invisible } /** A concrete settings system that uses `sbt.Scope` for the scope type. */ -object Def extends Init[Scope] with TaskMacroExtra -{ - type Classpath = Seq[Attributed[File]] +object Def extends Init[Scope] with TaskMacroExtra { + type Classpath = Seq[Attributed[File]] - val triggeredBy = AttributeKey[Seq[Task[_]]]("triggered-by") - val runBefore = AttributeKey[Seq[Task[_]]]("run-before") - val resolvedScoped = SettingKey[ScopedKey[_]]("resolved-scoped", "The ScopedKey for the referencing setting or task.", KeyRanks.DSetting) - private[sbt] val taskDefinitionKey = AttributeKey[ScopedKey[_]]("task-definition-key", "Internal: used to map a task back to its ScopedKey.", Invisible) + val triggeredBy = AttributeKey[Seq[Task[_]]]("triggered-by") + val runBefore = AttributeKey[Seq[Task[_]]]("run-before") + val resolvedScoped = SettingKey[ScopedKey[_]]("resolved-scoped", "The ScopedKey for the referencing setting or task.", KeyRanks.DSetting) + private[sbt] val taskDefinitionKey = AttributeKey[ScopedKey[_]]("task-definition-key", "Internal: used to map a task back to its ScopedKey.", Invisible) - lazy val showFullKey: Show[ScopedKey[_]] = showFullKey(None) - def showFullKey(keyNameColor: Option[String]): Show[ScopedKey[_]] = - new Show[ScopedKey[_]] { def apply(key: ScopedKey[_]) = displayFull(key, keyNameColor) } + lazy val showFullKey: Show[ScopedKey[_]] = showFullKey(None) + def showFullKey(keyNameColor: Option[String]): Show[ScopedKey[_]] = + new Show[ScopedKey[_]] { def apply(key: ScopedKey[_]) = displayFull(key, keyNameColor) } - def showRelativeKey(current: ProjectRef, multi: Boolean, keyNameColor: Option[String] = None): Show[ScopedKey[_]] = new Show[ScopedKey[_]] { - def apply(key: ScopedKey[_]) = - Scope.display(key.scope, colored(key.key.label, keyNameColor), ref => displayRelative(current, multi, ref)) - } - def displayRelative(current: ProjectRef, multi: Boolean, project: Reference): String = project match { - case BuildRef(current.build) => "{.}/" - case `current` => if(multi) current.project + "/" else "" - case ProjectRef(current.build, x) => x + "/" - case _ => Reference.display(project) + "/" - } - def displayFull(scoped: ScopedKey[_]): String = displayFull(scoped, None) - def displayFull(scoped: ScopedKey[_], keyNameColor: Option[String]): String = Scope.display(scoped.scope, colored(scoped.key.label, keyNameColor)) - def displayMasked(scoped: ScopedKey[_], mask: ScopeMask): String = Scope.displayMasked(scoped.scope, scoped.key.label, mask) + def showRelativeKey(current: ProjectRef, multi: Boolean, keyNameColor: Option[String] = None): Show[ScopedKey[_]] = new Show[ScopedKey[_]] { + def apply(key: ScopedKey[_]) = + Scope.display(key.scope, colored(key.key.label, keyNameColor), ref => displayRelative(current, multi, ref)) + } + def displayRelative(current: ProjectRef, multi: Boolean, project: Reference): String = project match { + case BuildRef(current.build) => "{.}/" + case `current` => if (multi) current.project + "/" else "" + case ProjectRef(current.build, x) => x + "/" + case _ => Reference.display(project) + "/" + } + def displayFull(scoped: ScopedKey[_]): String = displayFull(scoped, None) + def displayFull(scoped: ScopedKey[_], keyNameColor: Option[String]): String = Scope.display(scoped.scope, colored(scoped.key.label, keyNameColor)) + def displayMasked(scoped: ScopedKey[_], mask: ScopeMask): String = Scope.displayMasked(scoped.scope, scoped.key.label, mask) - def colored(s: String, color: Option[String]): String = color match { - case Some(c) => c + s + scala.Console.RESET - case None => s - } + def colored(s: String, color: Option[String]): String = color match { + case Some(c) => c + s + scala.Console.RESET + case None => s + } - override def deriveAllowed[T](s: Setting[T], allowDynamic: Boolean): Option[String] = - super.deriveAllowed(s, allowDynamic) orElse - (if(s.key.scope != ThisScope) Some(s"Scope cannot be defined for ${definedSettingString(s)}") else None ) orElse - s.dependencies.find(k => k.scope != ThisScope).map(k => s"Scope cannot be defined for dependency ${k.key.label} of ${definedSettingString(s)}") + override def deriveAllowed[T](s: Setting[T], allowDynamic: Boolean): Option[String] = + super.deriveAllowed(s, allowDynamic) orElse + (if (s.key.scope != ThisScope) Some(s"Scope cannot be defined for ${definedSettingString(s)}") else None) orElse + s.dependencies.find(k => k.scope != ThisScope).map(k => s"Scope cannot be defined for dependency ${k.key.label} of ${definedSettingString(s)}") - override def intersect(s1: Scope, s2: Scope)(implicit delegates: Scope => Seq[Scope]): Option[Scope] = - if (s2 == GlobalScope) Some(s1) // s1 is more specific - else if (s1 == GlobalScope) Some(s2) // s2 is more specific - else super.intersect(s1, s2) + override def intersect(s1: Scope, s2: Scope)(implicit delegates: Scope => Seq[Scope]): Option[Scope] = + if (s2 == GlobalScope) Some(s1) // s1 is more specific + else if (s1 == GlobalScope) Some(s2) // s2 is more specific + else super.intersect(s1, s2) + private[this] def definedSettingString(s: Setting[_]): String = + s"derived setting ${s.key.key.label}${positionString(s)}" + private[this] def positionString(s: Setting[_]): String = + s.positionString match { case None => ""; case Some(pos) => s" defined at $pos" } - private[this] def definedSettingString(s: Setting[_]): String = - s"derived setting ${s.key.key.label}${positionString(s)}" - private[this] def positionString(s: Setting[_]): String = - s.positionString match { case None => ""; case Some(pos) => s" defined at $pos" } + /** + * A default Parser for splitting input into space-separated arguments. + * `argLabel` is an optional, fixed label shown for an argument during tab completion. + */ + def spaceDelimited(argLabel: String = ""): Parser[Seq[String]] = complete.Parsers.spaceDelimited(argLabel) + /** Lifts the result of a setting initialization into a Task. */ + def toITask[T](i: Initialize[T]): Initialize[Task[T]] = map(i)(std.TaskExtra.inlineTask) - /** A default Parser for splitting input into space-separated arguments. - * `argLabel` is an optional, fixed label shown for an argument during tab completion.*/ - def spaceDelimited(argLabel: String = ""): Parser[Seq[String]] = complete.Parsers.spaceDelimited(argLabel) + def toSParser[T](p: Parser[T]): State => Parser[T] = const(p) + def toISParser[T](p: Initialize[Parser[T]]): Initialize[State => Parser[T]] = p(toSParser) + def toIParser[T](p: Initialize[InputTask[T]]): Initialize[State => Parser[Task[T]]] = p(_.parser) - /** Lifts the result of a setting initialization into a Task. */ - def toITask[T](i: Initialize[T]): Initialize[Task[T]] = map(i)(std.TaskExtra.inlineTask) + import language.experimental.macros + import std.TaskMacro.{ inputTaskMacroImpl, inputTaskDynMacroImpl, taskDynMacroImpl, taskMacroImpl } + import std.SettingMacro.{ settingDynMacroImpl, settingMacroImpl } + import std.{ InputEvaluated, MacroPrevious, MacroValue, MacroTaskValue, ParserInput } - def toSParser[T](p: Parser[T]): State => Parser[T] = const(p) - def toISParser[T](p: Initialize[Parser[T]]): Initialize[State => Parser[T]] = p(toSParser) - def toIParser[T](p: Initialize[InputTask[T]]): Initialize[State => Parser[Task[T]]] = p(_.parser) + def task[T](t: T): Def.Initialize[Task[T]] = macro taskMacroImpl[T] + def taskDyn[T](t: Def.Initialize[Task[T]]): Def.Initialize[Task[T]] = macro taskDynMacroImpl[T] + def setting[T](t: T): Def.Initialize[T] = macro settingMacroImpl[T] + def settingDyn[T](t: Def.Initialize[T]): Def.Initialize[T] = macro settingDynMacroImpl[T] + def inputTask[T](t: T): Def.Initialize[InputTask[T]] = macro inputTaskMacroImpl[T] + def inputTaskDyn[T](t: Def.Initialize[Task[T]]): Def.Initialize[InputTask[T]] = macro inputTaskDynMacroImpl[T] - import language.experimental.macros - import std.TaskMacro.{inputTaskMacroImpl, inputTaskDynMacroImpl, taskDynMacroImpl, taskMacroImpl} - import std.SettingMacro.{settingDynMacroImpl,settingMacroImpl} - import std.{InputEvaluated, MacroPrevious, MacroValue, MacroTaskValue, ParserInput} + // The following conversions enable the types Initialize[T], Initialize[Task[T]], and Task[T] to + // be used in task and setting macros as inputs with an ultimate result of type T - def task[T](t: T): Def.Initialize[Task[T]] = macro taskMacroImpl[T] - def taskDyn[T](t: Def.Initialize[Task[T]]): Def.Initialize[Task[T]] = macro taskDynMacroImpl[T] - def setting[T](t: T): Def.Initialize[T] = macro settingMacroImpl[T] - def settingDyn[T](t: Def.Initialize[T]): Def.Initialize[T] = macro settingDynMacroImpl[T] - def inputTask[T](t: T): Def.Initialize[InputTask[T]] = macro inputTaskMacroImpl[T] - def inputTaskDyn[T](t: Def.Initialize[Task[T]]): Def.Initialize[InputTask[T]] = macro inputTaskDynMacroImpl[T] + implicit def macroValueI[T](in: Initialize[T]): MacroValue[T] = ??? + implicit def macroValueIT[T](in: Initialize[Task[T]]): MacroValue[T] = ??? + implicit def macroValueIInT[T](in: Initialize[InputTask[T]]): InputEvaluated[T] = ??? + implicit def taskMacroValueIT[T](in: Initialize[Task[T]]): MacroTaskValue[T] = ??? + implicit def macroPrevious[T](in: TaskKey[T]): MacroPrevious[T] = ??? - // The following conversions enable the types Initialize[T], Initialize[Task[T]], and Task[T] to - // be used in task and setting macros as inputs with an ultimate result of type T + // The following conversions enable the types Parser[T], Initialize[Parser[T]], and Initialize[State => Parser[T]] to + // be used in the inputTask macro as an input with an ultimate result of type T + implicit def parserInitToInput[T](p: Initialize[Parser[T]]): ParserInput[T] = ??? + implicit def parserInitStateToInput[T](p: Initialize[State => Parser[T]]): ParserInput[T] = ??? - implicit def macroValueI[T](in: Initialize[T]): MacroValue[T] = ??? - implicit def macroValueIT[T](in: Initialize[Task[T]]): MacroValue[T] = ??? - implicit def macroValueIInT[T](in: Initialize[InputTask[T]]): InputEvaluated[T] = ??? - implicit def taskMacroValueIT[T](in: Initialize[Task[T]]): MacroTaskValue[T] = ??? - implicit def macroPrevious[T](in: TaskKey[T]): MacroPrevious[T] = ??? + import language.experimental.macros + def settingKey[T](description: String): SettingKey[T] = macro std.KeyMacro.settingKeyImpl[T] + def taskKey[T](description: String): TaskKey[T] = macro std.KeyMacro.taskKeyImpl[T] + def inputKey[T](description: String): InputKey[T] = macro std.KeyMacro.inputKeyImpl[T] - // The following conversions enable the types Parser[T], Initialize[Parser[T]], and Initialize[State => Parser[T]] to - // be used in the inputTask macro as an input with an ultimate result of type T - implicit def parserInitToInput[T](p: Initialize[Parser[T]]): ParserInput[T] = ??? - implicit def parserInitStateToInput[T](p: Initialize[State => Parser[T]]): ParserInput[T] = ??? - - import language.experimental.macros - def settingKey[T](description: String): SettingKey[T] = macro std.KeyMacro.settingKeyImpl[T] - def taskKey[T](description: String): TaskKey[T] = macro std.KeyMacro.taskKeyImpl[T] - def inputKey[T](description: String): InputKey[T] = macro std.KeyMacro.inputKeyImpl[T] - - private[sbt] def dummy[T: Manifest](name: String, description: String): (TaskKey[T], Task[T]) = (TaskKey[T](name, description, DTask), dummyTask(name)) - private[sbt] def dummyTask[T](name: String): Task[T] = - { - import std.TaskExtra.{task => newTask, _} - val base: Task[T] = newTask( sys.error("Dummy task '" + name + "' did not get converted to a full task.") ) named name - base.copy(info = base.info.set(isDummyTask, true)) - } - private[sbt] def isDummy(t: Task[_]): Boolean = t.info.attributes.get(isDummyTask) getOrElse false - private[sbt] val isDummyTask = AttributeKey[Boolean]("is-dummy-task", "Internal: used to identify dummy tasks. sbt injects values for these tasks at the start of task execution.", Invisible) - private[sbt] val (stateKey, dummyState) = dummy[State]("state", "Current build state.") - private[sbt] val (streamsManagerKey, dummyStreamsManager) = Def.dummy[std.Streams[ScopedKey[_]]]("streams-manager", "Streams manager, which provides streams for different contexts.") + private[sbt] def dummy[T: Manifest](name: String, description: String): (TaskKey[T], Task[T]) = (TaskKey[T](name, description, DTask), dummyTask(name)) + private[sbt] def dummyTask[T](name: String): Task[T] = + { + import std.TaskExtra.{ task => newTask, _ } + val base: Task[T] = newTask(sys.error("Dummy task '" + name + "' did not get converted to a full task.")) named name + base.copy(info = base.info.set(isDummyTask, true)) + } + private[sbt] def isDummy(t: Task[_]): Boolean = t.info.attributes.get(isDummyTask) getOrElse false + private[sbt] val isDummyTask = AttributeKey[Boolean]("is-dummy-task", "Internal: used to identify dummy tasks. sbt injects values for these tasks at the start of task execution.", Invisible) + private[sbt] val (stateKey, dummyState) = dummy[State]("state", "Current build state.") + private[sbt] val (streamsManagerKey, dummyStreamsManager) = Def.dummy[std.Streams[ScopedKey[_]]]("streams-manager", "Streams manager, which provides streams for different contexts.") } // these need to be mixed into the sbt package object because the target doesn't involve Initialize or anything in Def -trait TaskMacroExtra -{ - implicit def macroValueT[T](in: Task[T]): std.MacroValue[T] = ??? - implicit def macroValueIn[T](in: InputTask[T]): std.InputEvaluated[T] = ??? - implicit def parserToInput[T](in: Parser[T]): std.ParserInput[T] = ??? - implicit def stateParserToInput[T](in: State => Parser[T]): std.ParserInput[T] = ??? +trait TaskMacroExtra { + implicit def macroValueT[T](in: Task[T]): std.MacroValue[T] = ??? + implicit def macroValueIn[T](in: InputTask[T]): std.InputEvaluated[T] = ??? + implicit def parserToInput[T](in: Parser[T]): std.ParserInput[T] = ??? + implicit def stateParserToInput[T](in: State => Parser[T]): std.ParserInput[T] = ??? } \ No newline at end of file diff --git a/main/settings/src/main/scala/sbt/DelegateIndex.scala b/main/settings/src/main/scala/sbt/DelegateIndex.scala index 422146c46..66df26abe 100644 --- a/main/settings/src/main/scala/sbt/DelegateIndex.scala +++ b/main/settings/src/main/scala/sbt/DelegateIndex.scala @@ -1,19 +1,17 @@ package sbt -sealed trait DelegateIndex -{ - def project(ref: ProjectRef): Seq[ScopeAxis[ResolvedReference]] - def config(ref: ProjectRef, conf: ConfigKey): Seq[ScopeAxis[ConfigKey]] -// def task(ref: ProjectRef, task: ScopedKey[_]): Seq[ScopeAxis[ScopedKey[_]]] -// def extra(ref: ProjectRef, e: AttributeMap): Seq[ScopeAxis[AttributeMap]] +sealed trait DelegateIndex { + def project(ref: ProjectRef): Seq[ScopeAxis[ResolvedReference]] + def config(ref: ProjectRef, conf: ConfigKey): Seq[ScopeAxis[ConfigKey]] + // def task(ref: ProjectRef, task: ScopedKey[_]): Seq[ScopeAxis[ScopedKey[_]]] + // def extra(ref: ProjectRef, e: AttributeMap): Seq[ScopeAxis[AttributeMap]] } -private final class DelegateIndex0(refs: Map[ProjectRef, ProjectDelegates]) extends DelegateIndex -{ - def project(ref: ProjectRef): Seq[ScopeAxis[ResolvedReference]] = refs.get(ref) match { case Some(pd) => pd.refs; case None => Nil } - def config(ref: ProjectRef, conf: ConfigKey): Seq[ScopeAxis[ConfigKey]] = - refs.get(ref) match { - case Some(pd) => pd.confs.get(conf) match { case Some(cs) => cs; case None => Select(conf) :: Global :: Nil } - case None => Select(conf) :: Global :: Nil - } +private final class DelegateIndex0(refs: Map[ProjectRef, ProjectDelegates]) extends DelegateIndex { + def project(ref: ProjectRef): Seq[ScopeAxis[ResolvedReference]] = refs.get(ref) match { case Some(pd) => pd.refs; case None => Nil } + def config(ref: ProjectRef, conf: ConfigKey): Seq[ScopeAxis[ConfigKey]] = + refs.get(ref) match { + case Some(pd) => pd.confs.get(conf) match { case Some(cs) => cs; case None => Select(conf) :: Global :: Nil } + case None => Select(conf) :: Global :: Nil + } } private final class ProjectDelegates(val ref: ProjectRef, val refs: Seq[ScopeAxis[ResolvedReference]], val confs: Map[ConfigKey, Seq[ScopeAxis[ConfigKey]]]) \ No newline at end of file diff --git a/main/settings/src/main/scala/sbt/InputTask.scala b/main/settings/src/main/scala/sbt/InputTask.scala index b802eaf24..4ccf76612 100644 --- a/main/settings/src/main/scala/sbt/InputTask.scala +++ b/main/settings/src/main/scala/sbt/InputTask.scala @@ -1,146 +1,149 @@ package sbt - import complete.Parser - import Def.{Initialize, ScopedKey} - import std.TaskExtra.{task => mktask, _} - import Task._ - import Types._ +import complete.Parser +import Def.{ Initialize, ScopedKey } +import std.TaskExtra.{ task => mktask, _ } +import Task._ +import Types._ /** Parses input and produces a task to run. Constructed using the companion object. */ -final class InputTask[T] private(val parser: State => Parser[Task[T]]) -{ - def mapTask[S](f: Task[T] => Task[S]): InputTask[S] = - new InputTask[S](s => parser(s) map f) +final class InputTask[T] private (val parser: State => Parser[Task[T]]) { + def mapTask[S](f: Task[T] => Task[S]): InputTask[S] = + new InputTask[S](s => parser(s) map f) - def partialInput(in: String): InputTask[T] = - new InputTask[T](s => Parser(parser(s))(in) ) + def partialInput(in: String): InputTask[T] = + new InputTask[T](s => Parser(parser(s))(in)) - def fullInput(in: String): InputTask[T] = new InputTask[T]( s => - Parser.parse(in, parser(s)) match { - case Right(v) => Parser.success(v) - case Left(msg) => - val indented = msg.lines.map(" " + _).mkString("\n") - Parser.failure(s"Invalid programmatic input:\n$indented") - } - ) + def fullInput(in: String): InputTask[T] = new InputTask[T](s => + Parser.parse(in, parser(s)) match { + case Right(v) => Parser.success(v) + case Left(msg) => + val indented = msg.lines.map(" " + _).mkString("\n") + Parser.failure(s"Invalid programmatic input:\n$indented") + } + ) } -object InputTask -{ - implicit class InitializeInput[T](i: Initialize[InputTask[T]]) { - def partialInput(in: String): Initialize[InputTask[T]] = i(_ partialInput in) - def fullInput(in: String): Initialize[InputTask[T]] = i(_ fullInput in) +object InputTask { + implicit class InitializeInput[T](i: Initialize[InputTask[T]]) { + def partialInput(in: String): Initialize[InputTask[T]] = i(_ partialInput in) + def fullInput(in: String): Initialize[InputTask[T]] = i(_ fullInput in) - import std.FullInstance._ - def toTask(in: String): Initialize[Task[T]] = flatten( - (Def.stateKey zipWith i) ( (sTask, it) => - sTask map ( s => - Parser.parse(in, it.parser(s)) match { - case Right(t) => Def.value(t) - case Left(msg) => - val indented = msg.lines.map(" " + _).mkString("\n") - sys.error(s"Invalid programmatic input:\n$indented") - } - ) - ) - ) - } + import std.FullInstance._ + def toTask(in: String): Initialize[Task[T]] = flatten( + (Def.stateKey zipWith i)((sTask, it) => + sTask map (s => + Parser.parse(in, it.parser(s)) match { + case Right(t) => Def.value(t) + case Left(msg) => + val indented = msg.lines.map(" " + _).mkString("\n") + sys.error(s"Invalid programmatic input:\n$indented") + } + ) + ) + ) + } - implicit def inputTaskParsed[T](in: InputTask[T]): std.ParserInputTask[T] = ??? - implicit def inputTaskInitParsed[T](in: Initialize[InputTask[T]]): std.ParserInputTask[T] = ??? + implicit def inputTaskParsed[T](in: InputTask[T]): std.ParserInputTask[T] = ??? + implicit def inputTaskInitParsed[T](in: Initialize[InputTask[T]]): std.ParserInputTask[T] = ??? - def make[T](p: State => Parser[Task[T]]): InputTask[T] = new InputTask[T](p) + def make[T](p: State => Parser[Task[T]]): InputTask[T] = new InputTask[T](p) - def static[T](p: Parser[Task[T]]): InputTask[T] = free(_ => p) + def static[T](p: Parser[Task[T]]): InputTask[T] = free(_ => p) - def static[I,T](p: Parser[I])(c: I => Task[T]): InputTask[T] = static(p map c) + def static[I, T](p: Parser[I])(c: I => Task[T]): InputTask[T] = static(p map c) - def free[T](p: State => Parser[Task[T]]): InputTask[T] = make(p) + def free[T](p: State => Parser[Task[T]]): InputTask[T] = make(p) - def free[I,T](p: State => Parser[I])(c: I => Task[T]): InputTask[T] = free(s => p(s) map c) + def free[I, T](p: State => Parser[I])(c: I => Task[T]): InputTask[T] = free(s => p(s) map c) - def separate[I,T](p: State => Parser[I])(action: Initialize[I => Task[T]]): Initialize[InputTask[T]] = - separate(Def value p)(action) + def separate[I, T](p: State => Parser[I])(action: Initialize[I => Task[T]]): Initialize[InputTask[T]] = + separate(Def value p)(action) - def separate[I,T](p: Initialize[State => Parser[I]])(action: Initialize[I => Task[T]]): Initialize[InputTask[T]] = - p.zipWith(action)((parser, act) => free(parser)(act)) + def separate[I, T](p: Initialize[State => Parser[I]])(action: Initialize[I => Task[T]]): Initialize[InputTask[T]] = + p.zipWith(action)((parser, act) => free(parser)(act)) - /** Constructs an InputTask that accepts no user input. */ - def createFree[T](action: Initialize[Task[T]]): Initialize[InputTask[T]] = - action { tsk => free(emptyParser)( const(tsk) ) } + /** Constructs an InputTask that accepts no user input. */ + def createFree[T](action: Initialize[Task[T]]): Initialize[InputTask[T]] = + action { tsk => free(emptyParser)(const(tsk)) } - /** Constructs an InputTask from: - * a) a Parser constructed using other Settings, but not Tasks - * b) a dynamically constructed Task that uses Settings, Tasks, and the result of parsing. */ - def createDyn[I,T](p: Initialize[State => Parser[I]])(action: Initialize[Task[I => Initialize[Task[T]]]]): Initialize[InputTask[T]] = - separate(p)(std.FullInstance.flattenFun[I,T](action)) + /** + * Constructs an InputTask from: + * a) a Parser constructed using other Settings, but not Tasks + * b) a dynamically constructed Task that uses Settings, Tasks, and the result of parsing. + */ + def createDyn[I, T](p: Initialize[State => Parser[I]])(action: Initialize[Task[I => Initialize[Task[T]]]]): Initialize[InputTask[T]] = + separate(p)(std.FullInstance.flattenFun[I, T](action)) - /** A dummy parser that consumes no input and produces nothing useful (unit).*/ - def emptyParser: State => Parser[Unit] = Types.const(complete.DefaultParsers.success(())) + /** A dummy parser that consumes no input and produces nothing useful (unit).*/ + def emptyParser: State => Parser[Unit] = Types.const(complete.DefaultParsers.success(())) - /** Implementation detail that is public because it is used by a macro.*/ - def parserAsInput[T](p: Parser[T]): Initialize[State => Parser[T]] = Def.valueStrict(Types.const(p)) + /** Implementation detail that is public because it is used by a macro.*/ + def parserAsInput[T](p: Parser[T]): Initialize[State => Parser[T]] = Def.valueStrict(Types.const(p)) - /** Implementation detail that is public because it is used y a macro.*/ - def initParserAsInput[T](i: Initialize[Parser[T]]): Initialize[State => Parser[T]] = i(Types.const) + /** Implementation detail that is public because it is used y a macro.*/ + def initParserAsInput[T](i: Initialize[Parser[T]]): Initialize[State => Parser[T]] = i(Types.const) + @deprecated("Use another InputTask constructor or the `Def.inputTask` macro.", "0.13.0") + def apply[I, T](p: Initialize[State => Parser[I]])(action: TaskKey[I] => Initialize[Task[T]]): Initialize[InputTask[T]] = + { + val dummyKey = localKey[Task[I]] + val (marker, dummy) = dummyTask[I] + val it = action(TaskKey(dummyKey)) mapConstant subResultForDummy(dummyKey, dummy) + val act = it { tsk => (value: I) => subForDummy(marker, value, tsk) } + separate(p)(act) + } + @deprecated("Use another InputTask constructor or the `Def.inputTask` macro.", "0.13.0") + def apply[I, T](p: State => Parser[I])(action: TaskKey[I] => Initialize[Task[T]]): Initialize[InputTask[T]] = + apply(Def.value(p))(action) - @deprecated("Use another InputTask constructor or the `Def.inputTask` macro.", "0.13.0") - def apply[I,T](p: Initialize[State => Parser[I]])(action: TaskKey[I] => Initialize[Task[T]]): Initialize[InputTask[T]] = - { - val dummyKey = localKey[Task[I]] - val (marker, dummy) = dummyTask[I] - val it = action(TaskKey(dummyKey)) mapConstant subResultForDummy(dummyKey, dummy) - val act = it { tsk => (value: I) => subForDummy(marker, value, tsk) } - separate(p)(act) - } + /** + * The proper solution is to have a Manifest context bound and accept slight source incompatibility, + * The affected InputTask construction methods are all deprecated and so it is better to keep complete + * compatibility. Because the AttributeKey is local, it uses object equality and the manifest is not used. + */ + private[this] def localKey[T]: AttributeKey[T] = AttributeKey.local[Unit].asInstanceOf[AttributeKey[T]] - @deprecated("Use another InputTask constructor or the `Def.inputTask` macro.", "0.13.0") - def apply[I,T](p: State => Parser[I])(action: TaskKey[I] => Initialize[Task[T]]): Initialize[InputTask[T]] = - apply(Def.value(p))(action) + private[this] def subResultForDummy[I](dummyKey: AttributeKey[Task[I]], dummyTask: Task[I]) = + new (ScopedKey ~> Option) { + def apply[T](sk: ScopedKey[T]) = + if (sk.key eq dummyKey) { + // sk.key: AttributeKey[T], dummy.key: AttributeKey[Task[I]] + // (sk.key eq dummy.key) ==> T == Task[I] because AttributeKey is invariant + Some(dummyTask.asInstanceOf[T]) + } else + None + } - /** The proper solution is to have a Manifest context bound and accept slight source incompatibility, - * The affected InputTask construction methods are all deprecated and so it is better to keep complete - * compatibility. Because the AttributeKey is local, it uses object equality and the manifest is not used. */ - private[this] def localKey[T]: AttributeKey[T] = AttributeKey.local[Unit].asInstanceOf[AttributeKey[T]] - - private[this] def subResultForDummy[I](dummyKey: AttributeKey[Task[I]], dummyTask: Task[I]) = - new (ScopedKey ~> Option) { def apply[T](sk: ScopedKey[T]) = - if(sk.key eq dummyKey) { - // sk.key: AttributeKey[T], dummy.key: AttributeKey[Task[I]] - // (sk.key eq dummy.key) ==> T == Task[I] because AttributeKey is invariant - Some(dummyTask.asInstanceOf[T]) - } else - None - } - - private[this] def dummyTask[I]: (AttributeKey[Option[I]], Task[I]) = - { - val key = localKey[Option[I]] - val f: () => I = () => sys.error(s"Internal sbt error: InputTask stub was not substituted properly.") - val t: Task[I] = Task(Info[I]().set(key, None), Pure(f, false)) - (key, t) - } - private[this] def subForDummy[I, T](marker: AttributeKey[Option[I]], value: I, task: Task[T]): Task[T] = - { - val seen = new java.util.IdentityHashMap[Task[_], Task[_]] - lazy val f: Task ~> Task = new (Task ~> Task) { def apply[T](t: Task[T]): Task[T] = - { - val t0 = seen.get(t) - if(t0 == null) { - val newAction = - if(t.info.get(marker).isDefined) - Pure(() => value.asInstanceOf[T], inline = true) - else - t.work.mapTask(f) - val newTask = Task(t.info, newAction) - seen.put(t, newTask) - newTask - } else - t0.asInstanceOf[Task[T]] - }} - f(task) - } + private[this] def dummyTask[I]: (AttributeKey[Option[I]], Task[I]) = + { + val key = localKey[Option[I]] + val f: () => I = () => sys.error(s"Internal sbt error: InputTask stub was not substituted properly.") + val t: Task[I] = Task(Info[I]().set(key, None), Pure(f, false)) + (key, t) + } + private[this] def subForDummy[I, T](marker: AttributeKey[Option[I]], value: I, task: Task[T]): Task[T] = + { + val seen = new java.util.IdentityHashMap[Task[_], Task[_]] + lazy val f: Task ~> Task = new (Task ~> Task) { + def apply[T](t: Task[T]): Task[T] = + { + val t0 = seen.get(t) + if (t0 == null) { + val newAction = + if (t.info.get(marker).isDefined) + Pure(() => value.asInstanceOf[T], inline = true) + else + t.work.mapTask(f) + val newTask = Task(t.info, newAction) + seen.put(t, newTask) + newTask + } else + t0.asInstanceOf[Task[T]] + } + } + f(task) + } } diff --git a/main/settings/src/main/scala/sbt/KeyRanks.scala b/main/settings/src/main/scala/sbt/KeyRanks.scala index 3daeb4975..327fd2bc6 100644 --- a/main/settings/src/main/scala/sbt/KeyRanks.scala +++ b/main/settings/src/main/scala/sbt/KeyRanks.scala @@ -1,45 +1,44 @@ package sbt -object KeyRanks -{ - // task and setting ranks, used to prioritize displaying information - // main tasks - final val APlusTask = 4 - final val ATask = 5 - final val AMinusTask = 6 +object KeyRanks { + // task and setting ranks, used to prioritize displaying information + // main tasks + final val APlusTask = 4 + final val ATask = 5 + final val AMinusTask = 6 - // main settings - final val APlusSetting = 9 - final val ASetting = 10 - final val AMinusSetting = 11 + // main settings + final val APlusSetting = 9 + final val ASetting = 10 + final val AMinusSetting = 11 - // less major tasks or tasks that print useful information - final val BPlusTask = 29 - final val BTask = 30 - final val BMinusTask = 31 + // less major tasks or tasks that print useful information + final val BPlusTask = 29 + final val BTask = 30 + final val BMinusTask = 31 - // secondary settings - final val BPlusSetting = 39 - final val BSetting = 40 - final val BMinusSetting = 41 + // secondary settings + final val BPlusSetting = 39 + final val BSetting = 40 + final val BMinusSetting = 41 - // advanced settings - final val CSetting = 100 - // advanced tasks - final val CTask = 200 - // explicit settings - final val DSetting = 10000 - // explicit tasks - final val DTask = 20000 + // advanced settings + final val CSetting = 100 + // advanced tasks + final val CTask = 200 + // explicit settings + final val DSetting = 10000 + // explicit tasks + final val DTask = 20000 - final val MainTaskCutoff = AMinusTask - final val MainSettingCutoff = AMinusSetting - final val MainCutoff = math.max(AMinusTask, AMinusSetting) + final val MainTaskCutoff = AMinusTask + final val MainSettingCutoff = AMinusSetting + final val MainCutoff = math.max(AMinusTask, AMinusSetting) - final val DefaultTaskRank = (ATask + BTask)/2 - final val DefaultInputRank = ATask // input tasks are likely a main task - final val DefaultSettingRank = (ASetting + BSetting) / 2 + final val DefaultTaskRank = (ATask + BTask) / 2 + final val DefaultInputRank = ATask // input tasks are likely a main task + final val DefaultSettingRank = (ASetting + BSetting) / 2 - // implementation details - val Invisible = Int.MaxValue + // implementation details + val Invisible = Int.MaxValue } diff --git a/main/settings/src/main/scala/sbt/Previous.scala b/main/settings/src/main/scala/sbt/Previous.scala index 1784464f4..eb0376c61 100644 --- a/main/settings/src/main/scala/sbt/Previous.scala +++ b/main/settings/src/main/scala/sbt/Previous.scala @@ -1,99 +1,97 @@ package sbt - import Def.{Initialize, resolvedScoped, ScopedKey, Setting, streamsManagerKey} - import Previous._ - import Types._ +import Def.{ Initialize, resolvedScoped, ScopedKey, Setting, streamsManagerKey } +import Previous._ +import Types._ - import java.io.{InputStream, OutputStream} - import sbinary.{DefaultProtocol,Format} - import DefaultProtocol.{StringFormat, withStamp} +import java.io.{ InputStream, OutputStream } +import sbinary.{ DefaultProtocol, Format } +import DefaultProtocol.{ StringFormat, withStamp } -/** Reads the previous value of tasks on-demand. The read values are cached so that they are only read once per task execution. -* `referenced` provides the `Format` to use for each key. */ -private[sbt] final class Previous(streams: Streams, referenced: IMap[ScopedTaskKey, Referenced]) -{ - private[this] val map = referenced.mapValues(toValue) - private[this] def toValue = new (Referenced ~> ReferencedValue) { def apply[T](x: Referenced[T]) = new ReferencedValue(x) } +/** + * Reads the previous value of tasks on-demand. The read values are cached so that they are only read once per task execution. + * `referenced` provides the `Format` to use for each key. + */ +private[sbt] final class Previous(streams: Streams, referenced: IMap[ScopedTaskKey, Referenced]) { + private[this] val map = referenced.mapValues(toValue) + private[this] def toValue = new (Referenced ~> ReferencedValue) { def apply[T](x: Referenced[T]) = new ReferencedValue(x) } - private[this] final class ReferencedValue[T](referenced: Referenced[T]) - { - import referenced.{stamped, task} - lazy val previousValue: Option[T] = { - val in = streams(task).readBinary(task, StreamName) - try read(in, stamped) finally in.close() - } - } + private[this] final class ReferencedValue[T](referenced: Referenced[T]) { + import referenced.{ stamped, task } + lazy val previousValue: Option[T] = { + val in = streams(task).readBinary(task, StreamName) + try read(in, stamped) finally in.close() + } + } - /** Used by the .previous runtime implemention to get the previous value for task `key`. */ - private def get[T](key: ScopedKey[Task[T]]): Option[T] = - map.get(key).flatMap(_.previousValue) + /** Used by the .previous runtime implemention to get the previous value for task `key`. */ + private def get[T](key: ScopedKey[Task[T]]): Option[T] = + map.get(key).flatMap(_.previousValue) } -object Previous -{ - private[sbt] type ScopedTaskKey[T] = ScopedKey[Task[T]] - private type Streams = sbt.std.Streams[ScopedKey[_]] +object Previous { + private[sbt]type ScopedTaskKey[T] = ScopedKey[Task[T]] + private type Streams = sbt.std.Streams[ScopedKey[_]] - /** The stream where the task value is persisted. */ - private final val StreamName = "previous" + /** The stream where the task value is persisted. */ + private final val StreamName = "previous" - /** Represents a reference task.previous*/ - private[sbt] final class Referenced[T](val task: ScopedKey[Task[T]], val format: Format[T]) { - lazy val stamped = withStamp(task.key.manifest.toString)(format) - def setTask(newTask: ScopedKey[Task[T]]) = new Referenced(newTask, format) - } + /** Represents a reference task.previous*/ + private[sbt] final class Referenced[T](val task: ScopedKey[Task[T]], val format: Format[T]) { + lazy val stamped = withStamp(task.key.manifest.toString)(format) + def setTask(newTask: ScopedKey[Task[T]]) = new Referenced(newTask, format) + } - private[sbt] val references = SettingKey[References]("previous-references", "Collects all static references to previous values of tasks.", KeyRanks.Invisible) - private[sbt] val cache = TaskKey[Previous]("previous-cache", "Caches previous values of tasks read from disk for the duration of a task execution.", KeyRanks.Invisible) - private[this] val previousReferenced = AttributeKey[Referenced[_]]("previous-referenced") + private[sbt] val references = SettingKey[References]("previous-references", "Collects all static references to previous values of tasks.", KeyRanks.Invisible) + private[sbt] val cache = TaskKey[Previous]("previous-cache", "Caches previous values of tasks read from disk for the duration of a task execution.", KeyRanks.Invisible) + private[this] val previousReferenced = AttributeKey[Referenced[_]]("previous-referenced") - /** Records references to previous task value. This should be completely populated after settings finish loading. */ - private[sbt] final class References - { - private[this] var map = IMap.empty[ScopedTaskKey, Referenced] + /** Records references to previous task value. This should be completely populated after settings finish loading. */ + private[sbt] final class References { + private[this] var map = IMap.empty[ScopedTaskKey, Referenced] - // TODO: this arbitrarily chooses a Format. - // The need to choose is a fundamental problem with this approach, but this should at least make a stable choice. - def recordReference[T](key: ScopedKey[Task[T]], format: Format[T]): Unit = synchronized { - map = map.put(key, new Referenced(key, format)) - } - def getReferences: IMap[ScopedTaskKey, Referenced] = synchronized { map } - } + // TODO: this arbitrarily chooses a Format. + // The need to choose is a fundamental problem with this approach, but this should at least make a stable choice. + def recordReference[T](key: ScopedKey[Task[T]], format: Format[T]): Unit = synchronized { + map = map.put(key, new Referenced(key, format)) + } + def getReferences: IMap[ScopedTaskKey, Referenced] = synchronized { map } + } - /** Persists values of tasks t where there is some task referencing it via t.previous. */ - private[sbt] def complete(referenced: References, results: RMap[Task,Result], streams: Streams): Unit = - { - val map = referenced.getReferences - def impl[T](key: ScopedKey[_], result: T): Unit = - for(i <- map.get(key.asInstanceOf[ScopedTaskKey[T]])) { - val out = streams.apply(i.task).binary(StreamName) - try write(out, i.stamped, result ) finally out.close() - } + /** Persists values of tasks t where there is some task referencing it via t.previous. */ + private[sbt] def complete(referenced: References, results: RMap[Task, Result], streams: Streams): Unit = + { + val map = referenced.getReferences + def impl[T](key: ScopedKey[_], result: T): Unit = + for (i <- map.get(key.asInstanceOf[ScopedTaskKey[T]])) { + val out = streams.apply(i.task).binary(StreamName) + try write(out, i.stamped, result) finally out.close() + } - for { - results.TPair(Task(info, _), Value(result)) <- results.toTypedSeq - key <- info.attributes get Def.taskDefinitionKey - } - impl(key, result) - } + for { + results.TPair(Task(info, _), Value(result)) <- results.toTypedSeq + key <- info.attributes get Def.taskDefinitionKey + } impl(key, result) + } - private def read[T](stream: InputStream, format: Format[T]): Option[T] = - try Some(format.reads(stream)) - catch { case e: Exception => None } + private def read[T](stream: InputStream, format: Format[T]): Option[T] = + try Some(format.reads(stream)) + catch { case e: Exception => None } - private def write[T](stream: OutputStream, format: Format[T], value: T): Unit = - try format.writes(stream, value) - catch { case e: Exception => () } + private def write[T](stream: OutputStream, format: Format[T], value: T): Unit = + try format.writes(stream, value) + catch { case e: Exception => () } - /** Public as a macro implementation detail. Do not call directly. */ - def runtime[T](skey: TaskKey[T])(implicit format: Format[T]): Initialize[Task[Option[T]]] = - { - val inputs = (cache in Global) zip Def.validated(skey, selfRefOk=true) zip (references in Global) - inputs { case ( (prevTask, resolved), refs ) => - refs.recordReference(resolved, format) // always evaluated on project load - import std.TaskExtra._ - prevTask.map(_ get resolved) // evaluated if this task is evaluated - } - } + /** Public as a macro implementation detail. Do not call directly. */ + def runtime[T](skey: TaskKey[T])(implicit format: Format[T]): Initialize[Task[Option[T]]] = + { + val inputs = (cache in Global) zip Def.validated(skey, selfRefOk = true) zip (references in Global) + inputs { + case ((prevTask, resolved), refs) => + refs.recordReference(resolved, format) // always evaluated on project load + import std.TaskExtra._ + prevTask.map(_ get resolved) // evaluated if this task is evaluated + } + } - private[sbt] def cacheSetting = (streamsManagerKey, references) map { (s, refs) => new Previous(s, refs.getReferences) } + private[sbt] def cacheSetting = (streamsManagerKey, references) map { (s, refs) => new Previous(s, refs.getReferences) } } diff --git a/main/settings/src/main/scala/sbt/Reference.scala b/main/settings/src/main/scala/sbt/Reference.scala index f970d5eb2..296cc22cc 100644 --- a/main/settings/src/main/scala/sbt/Reference.scala +++ b/main/settings/src/main/scala/sbt/Reference.scala @@ -3,8 +3,8 @@ */ package sbt - import java.io.File - import java.net.URI +import java.io.File +import java.net.URI // in all of these, the URI must be resolved and normalized before it is definitive @@ -34,77 +34,71 @@ final case object LocalRootProject extends ProjectReference /** Identifies the project for the current context. */ final case object ThisProject extends ProjectReference -object ProjectRef -{ - def apply(base: File, id: String): ProjectRef = ProjectRef(IO toURI base, id) +object ProjectRef { + def apply(base: File, id: String): ProjectRef = ProjectRef(IO toURI base, id) } -object RootProject -{ - /** Reference to the root project at 'base'.*/ - def apply(base: File): RootProject = RootProject(IO toURI base) +object RootProject { + /** Reference to the root project at 'base'.*/ + def apply(base: File): RootProject = RootProject(IO toURI base) } -object Reference -{ - implicit val resolvedReferenceOrdering: Ordering[ResolvedReference] = new Ordering[ResolvedReference] { - def compare(a: ResolvedReference, b: ResolvedReference): Int = (a, b) match { - case (ba: BuildRef, bb: BuildRef) => buildRefOrdering.compare(ba, bb) - case (pa: ProjectRef, pb: ProjectRef) => projectRefOrdering.compare(pa, pb) - case (_: BuildRef, _: ProjectRef) => -1 - case (_: ProjectRef, _: BuildRef) => 1 - } - } - implicit val buildRefOrdering: Ordering[BuildRef] = new Ordering[BuildRef] { - def compare(a: BuildRef, b: BuildRef): Int = a.build compareTo b.build - } +object Reference { + implicit val resolvedReferenceOrdering: Ordering[ResolvedReference] = new Ordering[ResolvedReference] { + def compare(a: ResolvedReference, b: ResolvedReference): Int = (a, b) match { + case (ba: BuildRef, bb: BuildRef) => buildRefOrdering.compare(ba, bb) + case (pa: ProjectRef, pb: ProjectRef) => projectRefOrdering.compare(pa, pb) + case (_: BuildRef, _: ProjectRef) => -1 + case (_: ProjectRef, _: BuildRef) => 1 + } + } + implicit val buildRefOrdering: Ordering[BuildRef] = new Ordering[BuildRef] { + def compare(a: BuildRef, b: BuildRef): Int = a.build compareTo b.build + } - implicit val projectRefOrdering: Ordering[ProjectRef] = new Ordering[ProjectRef] { - def compare(a: ProjectRef, b: ProjectRef): Int = { - val bc = a.build compareTo b.build - if(bc == 0) a.project compareTo b.project else bc - } - } + implicit val projectRefOrdering: Ordering[ProjectRef] = new Ordering[ProjectRef] { + def compare(a: ProjectRef, b: ProjectRef): Int = { + val bc = a.build compareTo b.build + if (bc == 0) a.project compareTo b.project else bc + } + } - def display(ref: Reference): String = - ref match - { - case pr: ProjectReference => display(pr) - case br: BuildReference => display(br) - } + def display(ref: Reference): String = + ref match { + case pr: ProjectReference => display(pr) + case br: BuildReference => display(br) + } - def display(ref: BuildReference): String = - ref match - { - case ThisBuild => "{}" - case BuildRef(uri) => "{" + uri + "}" - } - def display(ref: ProjectReference): String = - ref match - { - case ThisProject => "{}" - case LocalRootProject => "{}" - case LocalProject(id) => "{}" + id - case RootProject(uri) => "{" + uri + " }" - case ProjectRef(uri, id) => "{" + uri + "}" + id - } + def display(ref: BuildReference): String = + ref match { + case ThisBuild => "{}" + case BuildRef(uri) => "{" + uri + "}" + } + def display(ref: ProjectReference): String = + ref match { + case ThisProject => "{}" + case LocalRootProject => "{}" + case LocalProject(id) => "{}" + id + case RootProject(uri) => "{" + uri + " }" + case ProjectRef(uri, id) => "{" + uri + "}" + id + } - def buildURI(ref: ResolvedReference): URI = ref match { - case BuildRef(b) => b - case ProjectRef(b, _) => b - } - /** Extracts the build URI from a Reference if one has been explicitly defined.*/ - def uri(ref: Reference): Option[URI] = ref match { - case RootProject(b) => Some(b) - case ProjectRef(b, _) => Some(b) - case BuildRef(b) => Some(b) - case _ => None - } + def buildURI(ref: ResolvedReference): URI = ref match { + case BuildRef(b) => b + case ProjectRef(b, _) => b + } + /** Extracts the build URI from a Reference if one has been explicitly defined.*/ + def uri(ref: Reference): Option[URI] = ref match { + case RootProject(b) => Some(b) + case ProjectRef(b, _) => Some(b) + case BuildRef(b) => Some(b) + case _ => None + } - @deprecated("Explicitly wrap the URI in a call to RootProject.", "0.13.0") - implicit def uriToRef(u: URI): ProjectReference = RootProject(u) + @deprecated("Explicitly wrap the URI in a call to RootProject.", "0.13.0") + implicit def uriToRef(u: URI): ProjectReference = RootProject(u) - @deprecated("Explicitly wrap the File in a call to RootProject.", "0.13.0") - implicit def fileToRef(f: File): ProjectReference = RootProject(f) + @deprecated("Explicitly wrap the File in a call to RootProject.", "0.13.0") + implicit def fileToRef(f: File): ProjectReference = RootProject(f) - @deprecated("Explicitly wrap the String in a call to LocalProject.", "0.13.0") - implicit def stringToReference(s: String): ProjectReference = LocalProject(s) + @deprecated("Explicitly wrap the String in a call to LocalProject.", "0.13.0") + implicit def stringToReference(s: String): ProjectReference = LocalProject(s) } \ No newline at end of file diff --git a/main/settings/src/main/scala/sbt/Scope.scala b/main/settings/src/main/scala/sbt/Scope.scala index 2d8741a59..772fff7f3 100644 --- a/main/settings/src/main/scala/sbt/Scope.scala +++ b/main/settings/src/main/scala/sbt/Scope.scala @@ -3,230 +3,220 @@ */ package sbt - import java.io.File - import java.net.URI +import java.io.File +import java.net.URI -final case class Scope(project: ScopeAxis[Reference], config: ScopeAxis[ConfigKey], task: ScopeAxis[AttributeKey[_]], extra: ScopeAxis[AttributeMap]) -{ - def in(project: Reference, config: ConfigKey): Scope = copy(project = Select(project), config = Select(config)) - def in(config: ConfigKey, task: AttributeKey[_]): Scope = copy(config = Select(config), task = Select(task)) - def in(project: Reference, task: AttributeKey[_]): Scope = copy(project = Select(project), task = Select(task)) - def in(project: Reference, config: ConfigKey, task: AttributeKey[_]): Scope = copy(project = Select(project), config = Select(config), task = Select(task)) - def in(project: Reference): Scope = copy(project = Select(project)) - def in(config: ConfigKey): Scope = copy(config = Select(config)) - def in(task: AttributeKey[_]): Scope = copy(task = Select(task)) +final case class Scope(project: ScopeAxis[Reference], config: ScopeAxis[ConfigKey], task: ScopeAxis[AttributeKey[_]], extra: ScopeAxis[AttributeMap]) { + def in(project: Reference, config: ConfigKey): Scope = copy(project = Select(project), config = Select(config)) + def in(config: ConfigKey, task: AttributeKey[_]): Scope = copy(config = Select(config), task = Select(task)) + def in(project: Reference, task: AttributeKey[_]): Scope = copy(project = Select(project), task = Select(task)) + def in(project: Reference, config: ConfigKey, task: AttributeKey[_]): Scope = copy(project = Select(project), config = Select(config), task = Select(task)) + def in(project: Reference): Scope = copy(project = Select(project)) + def in(config: ConfigKey): Scope = copy(config = Select(config)) + def in(task: AttributeKey[_]): Scope = copy(task = Select(task)) } -object Scope -{ - val ThisScope = Scope(This, This, This, This) - val GlobalScope = Scope(Global, Global, Global, Global) +object Scope { + val ThisScope = Scope(This, This, This, This) + val GlobalScope = Scope(Global, Global, Global, Global) - def resolveScope(thisScope: Scope, current: URI, rootProject: URI => String): Scope => Scope = - resolveProject(current, rootProject) compose replaceThis(thisScope) + def resolveScope(thisScope: Scope, current: URI, rootProject: URI => String): Scope => Scope = + resolveProject(current, rootProject) compose replaceThis(thisScope) - def resolveBuildScope(thisScope: Scope, current: URI): Scope => Scope = - buildResolve(current) compose replaceThis(thisScope) + def resolveBuildScope(thisScope: Scope, current: URI): Scope => Scope = + buildResolve(current) compose replaceThis(thisScope) - def replaceThis(thisScope: Scope): Scope => Scope = (scope: Scope) => - Scope(subThis(thisScope.project, scope.project), subThis(thisScope.config, scope.config), subThis(thisScope.task, scope.task), subThis(thisScope.extra, scope.extra)) - - def subThis[T](sub: ScopeAxis[T], into: ScopeAxis[T]): ScopeAxis[T] = - if(into == This) sub else into + def replaceThis(thisScope: Scope): Scope => Scope = (scope: Scope) => + Scope(subThis(thisScope.project, scope.project), subThis(thisScope.config, scope.config), subThis(thisScope.task, scope.task), subThis(thisScope.extra, scope.extra)) - def fillTaskAxis(scope: Scope, key: AttributeKey[_]): Scope = - scope.task match - { - case _: Select[_] => scope - case _ => scope.copy(task = Select(key)) - } - - def mapReference(f: Reference => Reference): Scope => Scope = - { - case Scope(Select(ref), a,b,c) => Scope(Select(f(ref)), a,b,c) - case x => x - } - def resolveProject(uri: URI, rootProject: URI => String): Scope => Scope = - mapReference(ref => resolveReference(uri, rootProject, ref)) - def buildResolve(uri: URI): Scope => Scope = - mapReference(ref => resolveBuildOnly(uri, ref)) + def subThis[T](sub: ScopeAxis[T], into: ScopeAxis[T]): ScopeAxis[T] = + if (into == This) sub else into - def resolveBuildOnly(current: URI, ref: Reference): Reference = - ref match - { - case br: BuildReference => resolveBuild(current, br) - case pr: ProjectReference => resolveProjectBuild(current, pr) - } - def resolveBuild(current: URI, ref: BuildReference): BuildReference = - ref match - { - case ThisBuild => BuildRef(current) - case BuildRef(uri) => BuildRef(resolveBuild(current, uri)) - } - def resolveProjectBuild(current: URI, ref: ProjectReference): ProjectReference = - ref match - { - case ThisProject => RootProject(current) - case LocalRootProject => RootProject(current) - case LocalProject(id) => ProjectRef(current, id) - case RootProject(uri) => RootProject(resolveBuild(current, uri)) - case ProjectRef(uri, id) => ProjectRef(resolveBuild(current, uri), id) - } - def resolveBuild(current: URI, uri: URI): URI = - if(!uri.isAbsolute && current.isOpaque && uri.getSchemeSpecificPart == ".") - current // this handles the shortcut of referring to the current build using "." - else - IO.directoryURI(current resolve uri) + def fillTaskAxis(scope: Scope, key: AttributeKey[_]): Scope = + scope.task match { + case _: Select[_] => scope + case _ => scope.copy(task = Select(key)) + } - def resolveReference(current: URI, rootProject: URI => String, ref: Reference): ResolvedReference = - ref match - { - case br: BuildReference => resolveBuildRef(current, br) - case pr: ProjectReference => resolveProjectRef(current, rootProject, pr) - } - - def resolveProjectRef(current: URI, rootProject: URI => String, ref: ProjectReference): ProjectRef = - ref match - { - case ThisProject | LocalRootProject => ProjectRef(current, rootProject(current)) - case LocalProject(id) => ProjectRef(current, id) - case RootProject(uri) => val res = resolveBuild(current, uri); ProjectRef(res, rootProject(res)) - case ProjectRef(uri, id) => ProjectRef(resolveBuild(current, uri), id) - } - def resolveBuildRef(current: URI, ref: BuildReference): BuildRef = - ref match - { - case ThisBuild => BuildRef(current) - case BuildRef(uri) => BuildRef(resolveBuild(current, uri)) - } + def mapReference(f: Reference => Reference): Scope => Scope = + { + case Scope(Select(ref), a, b, c) => Scope(Select(f(ref)), a, b, c) + case x => x + } + def resolveProject(uri: URI, rootProject: URI => String): Scope => Scope = + mapReference(ref => resolveReference(uri, rootProject, ref)) + def buildResolve(uri: URI): Scope => Scope = + mapReference(ref => resolveBuildOnly(uri, ref)) - def display(config: ConfigKey): String = config.name + ":" - def display(scope: Scope, sep: String): String = displayMasked(scope, sep, showProject, ScopeMask()) - def displayMasked(scope: Scope, sep: String, mask: ScopeMask): String = displayMasked(scope, sep, showProject, mask) - def display(scope: Scope, sep: String, showProject: Reference => String): String = displayMasked(scope, sep, showProject, ScopeMask()) - def displayMasked(scope: Scope, sep: String, showProject: Reference => String, mask: ScopeMask): String = - { - import scope.{project, config, task, extra} - val configPrefix = config.foldStrict(display, "*:", ".:") - val taskPrefix = task.foldStrict(_.label + "::", "", ".::") - val extras = extra.foldStrict(_.entries.map( _.toString ).toList, Nil, Nil) - val postfix = if(extras.isEmpty) "" else extras.mkString("(", ", ", ")") - mask.concatShow(projectPrefix(project, showProject), configPrefix, taskPrefix, sep, postfix) - } + def resolveBuildOnly(current: URI, ref: Reference): Reference = + ref match { + case br: BuildReference => resolveBuild(current, br) + case pr: ProjectReference => resolveProjectBuild(current, pr) + } + def resolveBuild(current: URI, ref: BuildReference): BuildReference = + ref match { + case ThisBuild => BuildRef(current) + case BuildRef(uri) => BuildRef(resolveBuild(current, uri)) + } + def resolveProjectBuild(current: URI, ref: ProjectReference): ProjectReference = + ref match { + case ThisProject => RootProject(current) + case LocalRootProject => RootProject(current) + case LocalProject(id) => ProjectRef(current, id) + case RootProject(uri) => RootProject(resolveBuild(current, uri)) + case ProjectRef(uri, id) => ProjectRef(resolveBuild(current, uri), id) + } + def resolveBuild(current: URI, uri: URI): URI = + if (!uri.isAbsolute && current.isOpaque && uri.getSchemeSpecificPart == ".") + current // this handles the shortcut of referring to the current build using "." + else + IO.directoryURI(current resolve uri) - def equal(a: Scope, b: Scope, mask: ScopeMask): Boolean = - (!mask.project || a.project == b.project) && - (!mask.config || a.config == b.config) && - (!mask.task || a.task == b.task) && - (!mask.extra || a.extra == b.extra) + def resolveReference(current: URI, rootProject: URI => String, ref: Reference): ResolvedReference = + ref match { + case br: BuildReference => resolveBuildRef(current, br) + case pr: ProjectReference => resolveProjectRef(current, rootProject, pr) + } - def projectPrefix(project: ScopeAxis[Reference], show: Reference => String = showProject): String = project.foldStrict(show, "*/", "./") - def showProject = (ref: Reference) => Reference.display(ref) + "/" + def resolveProjectRef(current: URI, rootProject: URI => String, ref: ProjectReference): ProjectRef = + ref match { + case ThisProject | LocalRootProject => ProjectRef(current, rootProject(current)) + case LocalProject(id) => ProjectRef(current, id) + case RootProject(uri) => + val res = resolveBuild(current, uri); ProjectRef(res, rootProject(res)) + case ProjectRef(uri, id) => ProjectRef(resolveBuild(current, uri), id) + } + def resolveBuildRef(current: URI, ref: BuildReference): BuildRef = + ref match { + case ThisBuild => BuildRef(current) + case BuildRef(uri) => BuildRef(resolveBuild(current, uri)) + } - def parseScopedKey(command: String): (Scope, String) = - { - val ScopedKeyRegex(_, projectID, _, config, key) = command - val pref = if(projectID eq null) This else Select(LocalProject(projectID)) - val conf = if(config eq null) This else Select(ConfigKey(config)) - (Scope(pref, conf, This, This), transformTaskName(key)) - } - val ScopedKeyRegex = """((\w+)\/)?((\w+)\:)?([\w\-]+)""".r - - def transformTaskName(s: String) = - { - val parts = s.split("-+") - (parts.take(1) ++ parts.drop(1).map(_.capitalize)).mkString - } + def display(config: ConfigKey): String = config.name + ":" + def display(scope: Scope, sep: String): String = displayMasked(scope, sep, showProject, ScopeMask()) + def displayMasked(scope: Scope, sep: String, mask: ScopeMask): String = displayMasked(scope, sep, showProject, mask) + def display(scope: Scope, sep: String, showProject: Reference => String): String = displayMasked(scope, sep, showProject, ScopeMask()) + def displayMasked(scope: Scope, sep: String, showProject: Reference => String, mask: ScopeMask): String = + { + import scope.{ project, config, task, extra } + val configPrefix = config.foldStrict(display, "*:", ".:") + val taskPrefix = task.foldStrict(_.label + "::", "", ".::") + val extras = extra.foldStrict(_.entries.map(_.toString).toList, Nil, Nil) + val postfix = if (extras.isEmpty) "" else extras.mkString("(", ", ", ")") + mask.concatShow(projectPrefix(project, showProject), configPrefix, taskPrefix, sep, postfix) + } - // *Inherit functions should be immediate delegates and not include argument itself. Transitivity will be provided by this method - def delegates[Proj]( - refs: Seq[(ProjectRef, Proj)], - configurations: Proj => Seq[ConfigKey], - resolve: Reference => ResolvedReference, - rootProject: URI => String, - projectInherit: ProjectRef => Seq[ProjectRef], - configInherit: (ResolvedReference, ConfigKey) => Seq[ConfigKey], - taskInherit: AttributeKey[_] => Seq[AttributeKey[_]], - extraInherit: (ResolvedReference, AttributeMap) => Seq[AttributeMap]): Scope => Seq[Scope] = - { - val index = delegates(refs, configurations, projectInherit, configInherit) - scope => indexedDelegates(resolve, index, rootProject, taskInherit, extraInherit)(scope) - } + def equal(a: Scope, b: Scope, mask: ScopeMask): Boolean = + (!mask.project || a.project == b.project) && + (!mask.config || a.config == b.config) && + (!mask.task || a.task == b.task) && + (!mask.extra || a.extra == b.extra) - def indexedDelegates( - resolve: Reference => ResolvedReference, - index: DelegateIndex, - rootProject: URI => String, - taskInherit: AttributeKey[_] => Seq[AttributeKey[_]], - extraInherit: (ResolvedReference, AttributeMap) => Seq[AttributeMap])(rawScope: Scope): Seq[Scope] = - { - val scope = Scope.replaceThis(GlobalScope)(rawScope) - - def nonProjectScopes(resolvedProj: ResolvedReference)(px: ScopeAxis[ResolvedReference]) = - { - val p = px.toOption getOrElse resolvedProj - val configProj = p match { case pr: ProjectRef => pr; case br: BuildRef => ProjectRef(br.build, rootProject(br.build)) } - val cLin = scope.config match { case Select(conf) => index.config(configProj, conf); case _ => withGlobalAxis(scope.config) } - val tLin = scope.task match { case t @ Select(task) => linearize(t)(taskInherit); case _ => withGlobalAxis(scope.task) } - val eLin = withGlobalAxis(scope.extra) - for(c <- cLin; t <- tLin; e <- eLin) yield Scope(px, c, t, e) - } - scope.project match - { - case Global | This => globalProjectDelegates(scope) - case Select(proj) => - val resolvedProj = resolve(proj) - val projAxes: Seq[ScopeAxis[ResolvedReference]] = - resolvedProj match - { - case pr: ProjectRef => index.project(pr) - case br: BuildRef => Select(br) :: Global :: Nil - } - projAxes flatMap nonProjectScopes(resolvedProj) - } - } + def projectPrefix(project: ScopeAxis[Reference], show: Reference => String = showProject): String = project.foldStrict(show, "*/", "./") + def showProject = (ref: Reference) => Reference.display(ref) + "/" - def withGlobalAxis[T](base: ScopeAxis[T]): Seq[ScopeAxis[T]] = if(base.isSelect) base :: Global :: Nil else Global :: Nil - def withGlobalScope(base: Scope): Seq[Scope] = if(base == GlobalScope) GlobalScope :: Nil else base :: GlobalScope :: Nil - def withRawBuilds(ps: Seq[ScopeAxis[ProjectRef]]): Seq[ScopeAxis[ResolvedReference]] = - ps ++ (ps flatMap rawBuild).distinct :+ Global + def parseScopedKey(command: String): (Scope, String) = + { + val ScopedKeyRegex(_, projectID, _, config, key) = command + val pref = if (projectID eq null) This else Select(LocalProject(projectID)) + val conf = if (config eq null) This else Select(ConfigKey(config)) + (Scope(pref, conf, This, This), transformTaskName(key)) + } + val ScopedKeyRegex = """((\w+)\/)?((\w+)\:)?([\w\-]+)""".r - def rawBuild(ps: ScopeAxis[ProjectRef]): Seq[ScopeAxis[BuildRef]] = ps match { case Select(ref) => Select(BuildRef(ref.build)) :: Nil; case _ => Nil } + def transformTaskName(s: String) = + { + val parts = s.split("-+") + (parts.take(1) ++ parts.drop(1).map(_.capitalize)).mkString + } - def delegates[Proj]( - refs: Seq[(ProjectRef, Proj)], - configurations: Proj => Seq[ConfigKey], - projectInherit: ProjectRef => Seq[ProjectRef], - configInherit: (ResolvedReference, ConfigKey) => Seq[ConfigKey]): DelegateIndex = - { - val pDelegates = refs map { case (ref, project) => - (ref, delegateIndex(ref, configurations(project))(projectInherit, configInherit) ) - } toMap ; - new DelegateIndex0(pDelegates) - } - private[this] def delegateIndex(ref: ProjectRef, confs: Seq[ConfigKey])(projectInherit: ProjectRef => Seq[ProjectRef], configInherit: (ResolvedReference, ConfigKey) => Seq[ConfigKey]): ProjectDelegates = - { - val refDelegates = withRawBuilds(linearize(Select(ref), false)(projectInherit)) - val configs = confs map { c => axisDelegates(configInherit, ref, c) } - new ProjectDelegates(ref, refDelegates, configs.toMap) - } - def axisDelegates[T](direct: (ResolvedReference, T) => Seq[T], ref: ResolvedReference, init: T): (T, Seq[ScopeAxis[T]]) = - ( init, linearize(Select(init))(direct(ref, _)) ) + // *Inherit functions should be immediate delegates and not include argument itself. Transitivity will be provided by this method + def delegates[Proj]( + refs: Seq[(ProjectRef, Proj)], + configurations: Proj => Seq[ConfigKey], + resolve: Reference => ResolvedReference, + rootProject: URI => String, + projectInherit: ProjectRef => Seq[ProjectRef], + configInherit: (ResolvedReference, ConfigKey) => Seq[ConfigKey], + taskInherit: AttributeKey[_] => Seq[AttributeKey[_]], + extraInherit: (ResolvedReference, AttributeMap) => Seq[AttributeMap]): Scope => Seq[Scope] = + { + val index = delegates(refs, configurations, projectInherit, configInherit) + scope => indexedDelegates(resolve, index, rootProject, taskInherit, extraInherit)(scope) + } - def linearize[T](axis: ScopeAxis[T], appendGlobal: Boolean = true)(inherit: T => Seq[T]): Seq[ScopeAxis[T]] = - axis match - { - case Select(x) => topologicalSort[T](x, appendGlobal)(inherit) - case Global | This => if(appendGlobal) Global :: Nil else Nil - } + def indexedDelegates( + resolve: Reference => ResolvedReference, + index: DelegateIndex, + rootProject: URI => String, + taskInherit: AttributeKey[_] => Seq[AttributeKey[_]], + extraInherit: (ResolvedReference, AttributeMap) => Seq[AttributeMap])(rawScope: Scope): Seq[Scope] = + { + val scope = Scope.replaceThis(GlobalScope)(rawScope) - def topologicalSort[T](node: T, appendGlobal: Boolean)(dependencies: T => Seq[T]): Seq[ScopeAxis[T]] = - { - val o = Dag.topologicalSortUnchecked(node)(dependencies).map(Select.apply) - if(appendGlobal) o ::: Global :: Nil else o - } - def globalProjectDelegates(scope: Scope): Seq[Scope] = - if(scope == GlobalScope) - GlobalScope :: Nil - else - for( c <- withGlobalAxis(scope.config); t <- withGlobalAxis(scope.task); e <- withGlobalAxis(scope.extra) ) yield Scope(Global, c, t, e) + def nonProjectScopes(resolvedProj: ResolvedReference)(px: ScopeAxis[ResolvedReference]) = + { + val p = px.toOption getOrElse resolvedProj + val configProj = p match { case pr: ProjectRef => pr; case br: BuildRef => ProjectRef(br.build, rootProject(br.build)) } + val cLin = scope.config match { case Select(conf) => index.config(configProj, conf); case _ => withGlobalAxis(scope.config) } + val tLin = scope.task match { case t @ Select(task) => linearize(t)(taskInherit); case _ => withGlobalAxis(scope.task) } + val eLin = withGlobalAxis(scope.extra) + for (c <- cLin; t <- tLin; e <- eLin) yield Scope(px, c, t, e) + } + scope.project match { + case Global | This => globalProjectDelegates(scope) + case Select(proj) => + val resolvedProj = resolve(proj) + val projAxes: Seq[ScopeAxis[ResolvedReference]] = + resolvedProj match { + case pr: ProjectRef => index.project(pr) + case br: BuildRef => Select(br) :: Global :: Nil + } + projAxes flatMap nonProjectScopes(resolvedProj) + } + } + + def withGlobalAxis[T](base: ScopeAxis[T]): Seq[ScopeAxis[T]] = if (base.isSelect) base :: Global :: Nil else Global :: Nil + def withGlobalScope(base: Scope): Seq[Scope] = if (base == GlobalScope) GlobalScope :: Nil else base :: GlobalScope :: Nil + def withRawBuilds(ps: Seq[ScopeAxis[ProjectRef]]): Seq[ScopeAxis[ResolvedReference]] = + ps ++ (ps flatMap rawBuild).distinct :+ Global + + def rawBuild(ps: ScopeAxis[ProjectRef]): Seq[ScopeAxis[BuildRef]] = ps match { case Select(ref) => Select(BuildRef(ref.build)) :: Nil; case _ => Nil } + + def delegates[Proj]( + refs: Seq[(ProjectRef, Proj)], + configurations: Proj => Seq[ConfigKey], + projectInherit: ProjectRef => Seq[ProjectRef], + configInherit: (ResolvedReference, ConfigKey) => Seq[ConfigKey]): DelegateIndex = + { + val pDelegates = refs map { + case (ref, project) => + (ref, delegateIndex(ref, configurations(project))(projectInherit, configInherit)) + } toMap; + new DelegateIndex0(pDelegates) + } + private[this] def delegateIndex(ref: ProjectRef, confs: Seq[ConfigKey])(projectInherit: ProjectRef => Seq[ProjectRef], configInherit: (ResolvedReference, ConfigKey) => Seq[ConfigKey]): ProjectDelegates = + { + val refDelegates = withRawBuilds(linearize(Select(ref), false)(projectInherit)) + val configs = confs map { c => axisDelegates(configInherit, ref, c) } + new ProjectDelegates(ref, refDelegates, configs.toMap) + } + def axisDelegates[T](direct: (ResolvedReference, T) => Seq[T], ref: ResolvedReference, init: T): (T, Seq[ScopeAxis[T]]) = + (init, linearize(Select(init))(direct(ref, _))) + + def linearize[T](axis: ScopeAxis[T], appendGlobal: Boolean = true)(inherit: T => Seq[T]): Seq[ScopeAxis[T]] = + axis match { + case Select(x) => topologicalSort[T](x, appendGlobal)(inherit) + case Global | This => if (appendGlobal) Global :: Nil else Nil + } + + def topologicalSort[T](node: T, appendGlobal: Boolean)(dependencies: T => Seq[T]): Seq[ScopeAxis[T]] = + { + val o = Dag.topologicalSortUnchecked(node)(dependencies).map(Select.apply) + if (appendGlobal) o ::: Global :: Nil else o + } + def globalProjectDelegates(scope: Scope): Seq[Scope] = + if (scope == GlobalScope) + GlobalScope :: Nil + else + for (c <- withGlobalAxis(scope.config); t <- withGlobalAxis(scope.task); e <- withGlobalAxis(scope.extra)) yield Scope(Global, c, t, e) } diff --git a/main/settings/src/main/scala/sbt/ScopeAxis.scala b/main/settings/src/main/scala/sbt/ScopeAxis.scala index 3706f02f7..2a17d2496 100644 --- a/main/settings/src/main/scala/sbt/ScopeAxis.scala +++ b/main/settings/src/main/scala/sbt/ScopeAxis.scala @@ -1,29 +1,28 @@ package sbt - import Types.some +import Types.some sealed trait ScopeAxis[+S] { - def foldStrict[T](f: S => T, ifGlobal: T, ifThis: T): T = fold(f, ifGlobal, ifThis) - def fold[T](f: S => T, ifGlobal: => T, ifThis: => T): T = this match { - case This => ifThis - case Global => ifGlobal - case Select(s) => f(s) - } - def toOption: Option[S] = foldStrict(some.fn, None, None) - def map[T](f: S => T): ScopeAxis[T] = foldStrict(s => Select(f(s)), Global, This) - def isSelect: Boolean = false + def foldStrict[T](f: S => T, ifGlobal: T, ifThis: T): T = fold(f, ifGlobal, ifThis) + def fold[T](f: S => T, ifGlobal: => T, ifThis: => T): T = this match { + case This => ifThis + case Global => ifGlobal + case Select(s) => f(s) + } + def toOption: Option[S] = foldStrict(some.fn, None, None) + def map[T](f: S => T): ScopeAxis[T] = foldStrict(s => Select(f(s)), Global, This) + def isSelect: Boolean = false } case object This extends ScopeAxis[Nothing] case object Global extends ScopeAxis[Nothing] final case class Select[S](s: S) extends ScopeAxis[S] { - override def isSelect = true + override def isSelect = true } -object ScopeAxis -{ - implicit def scopeAxisToScope(axis: ScopeAxis[Nothing]): Scope = - Scope(axis, axis, axis, axis) - def fromOption[T](o: Option[T]): ScopeAxis[T] = o match { - case Some(v) => Select(v) - case None => Global - } +object ScopeAxis { + implicit def scopeAxisToScope(axis: ScopeAxis[Nothing]): Scope = + Scope(axis, axis, axis, axis) + def fromOption[T](o: Option[T]): ScopeAxis[T] = o match { + case Some(v) => Select(v) + case None => Global + } } diff --git a/main/settings/src/main/scala/sbt/ScopeMask.scala b/main/settings/src/main/scala/sbt/ScopeMask.scala index 6b75513ec..0f1a018e1 100644 --- a/main/settings/src/main/scala/sbt/ScopeMask.scala +++ b/main/settings/src/main/scala/sbt/ScopeMask.scala @@ -1,16 +1,15 @@ package sbt /** Specifies the Scope axes that should be used for an operation. `true` indicates an axis should be used. */ -final case class ScopeMask(project: Boolean = true, config: Boolean = true, task: Boolean = true, extra: Boolean = true) -{ - def concatShow(p: String, c: String, t: String, sep: String, x: String): String = - { - val sb = new StringBuilder - if(project) sb.append(p) - if(config) sb.append(c) - if(task) sb.append(t) - sb.append(sep) - if(extra) sb.append(x) - sb.toString - } +final case class ScopeMask(project: Boolean = true, config: Boolean = true, task: Boolean = true, extra: Boolean = true) { + def concatShow(p: String, c: String, t: String, sep: String, x: String): String = + { + val sb = new StringBuilder + if (project) sb.append(p) + if (config) sb.append(c) + if (task) sb.append(t) + sb.append(sep) + if (extra) sb.append(x) + sb.toString + } } diff --git a/main/settings/src/main/scala/sbt/Structure.scala b/main/settings/src/main/scala/sbt/Structure.scala index bd23f3f4d..153f48e7c 100644 --- a/main/settings/src/main/scala/sbt/Structure.scala +++ b/main/settings/src/main/scala/sbt/Structure.scala @@ -5,382 +5,365 @@ package sbt /** An abstraction on top of Settings for build configuration and task definition. */ - import java.io.File - import java.net.URI +import java.io.File +import java.net.URI - import ConcurrentRestrictions.Tag - import Def.{Initialize, KeyedInitialize, ScopedKey, Setting, setting} - import Path._ - import std.TaskExtra.{task => mktask, _} - import Task._ - import Types._ +import ConcurrentRestrictions.Tag +import Def.{ Initialize, KeyedInitialize, ScopedKey, Setting, setting } +import Path._ +import std.TaskExtra.{ task => mktask, _ } +import Task._ +import Types._ - import language.experimental.macros - import reflect.internal.annotations.compileTimeOnly +import language.experimental.macros +import reflect.internal.annotations.compileTimeOnly sealed trait Scoped { def scope: Scope; val key: AttributeKey[_] } /** A common type for SettingKey and TaskKey so that both can be used as inputs to tasks.*/ sealed trait ScopedTaskable[T] extends Scoped { - def toTask: Initialize[Task[T]] + def toTask: Initialize[Task[T]] } -/** Identifies a setting. It consists of three parts: the scope, the name, and the type of a value associated with this key. -* The scope is represented by a value of type Scope. -* The name and the type are represented by a value of type `AttributeKey[T]`. -* Instances are constructed using the companion object. */ -sealed abstract class SettingKey[T] extends ScopedTaskable[T] with KeyedInitialize[T] with Scoped.ScopingSetting[SettingKey[T]] with Scoped.DefinableSetting[T] -{ - val key: AttributeKey[T] - final def toTask: Initialize[Task[T]] = this apply inlineTask - final def scopedKey: ScopedKey[T] = ScopedKey(scope, key) - final def in(scope: Scope): SettingKey[T] = Scoped.scopedSetting(Scope.replaceThis(this.scope)(scope), this.key) +/** + * Identifies a setting. It consists of three parts: the scope, the name, and the type of a value associated with this key. + * The scope is represented by a value of type Scope. + * The name and the type are represented by a value of type `AttributeKey[T]`. + * Instances are constructed using the companion object. + */ +sealed abstract class SettingKey[T] extends ScopedTaskable[T] with KeyedInitialize[T] with Scoped.ScopingSetting[SettingKey[T]] with Scoped.DefinableSetting[T] { + val key: AttributeKey[T] + final def toTask: Initialize[Task[T]] = this apply inlineTask + final def scopedKey: ScopedKey[T] = ScopedKey(scope, key) + final def in(scope: Scope): SettingKey[T] = Scoped.scopedSetting(Scope.replaceThis(this.scope)(scope), this.key) - final def := (v: T): Setting[T] = macro std.TaskMacro.settingAssignMacroImpl[T] - final def +=[U](v: U)(implicit a: Append.Value[T, U]): Setting[T] = macro std.TaskMacro.settingAppend1Impl[T,U] - final def ++=[U](vs: U)(implicit a: Append.Values[T, U]): Setting[T] = macro std.TaskMacro.settingAppendNImpl[T,U] - final def <+= [V](v: Initialize[V])(implicit a: Append.Value[T, V]): Setting[T] = macro std.TaskMacro.settingAppend1Position[T,V] - final def <++= [V](vs: Initialize[V])(implicit a: Append.Values[T, V]): Setting[T] = macro std.TaskMacro.settingAppendNPosition[T,V] - final def ~= (f: T => T): Setting[T] = macro std.TaskMacro.settingTransformPosition[T] - final def transform(f: T => T, source: SourcePosition): Setting[T] = set( scopedKey(f), source ) + final def :=(v: T): Setting[T] = macro std.TaskMacro.settingAssignMacroImpl[T] + final def +=[U](v: U)(implicit a: Append.Value[T, U]): Setting[T] = macro std.TaskMacro.settingAppend1Impl[T, U] + final def ++=[U](vs: U)(implicit a: Append.Values[T, U]): Setting[T] = macro std.TaskMacro.settingAppendNImpl[T, U] + final def <+=[V](v: Initialize[V])(implicit a: Append.Value[T, V]): Setting[T] = macro std.TaskMacro.settingAppend1Position[T, V] + final def <++=[V](vs: Initialize[V])(implicit a: Append.Values[T, V]): Setting[T] = macro std.TaskMacro.settingAppendNPosition[T, V] + final def ~=(f: T => T): Setting[T] = macro std.TaskMacro.settingTransformPosition[T] + final def transform(f: T => T, source: SourcePosition): Setting[T] = set(scopedKey(f), source) - final def append1[V](v: Initialize[V], source: SourcePosition)(implicit a: Append.Value[T, V]): Setting[T] = make(v, source)(a.appendValue) - final def appendN[V](vs: Initialize[V], source: SourcePosition)(implicit a: Append.Values[T, V]): Setting[T] = make(vs, source)(a.appendValues) + final def append1[V](v: Initialize[V], source: SourcePosition)(implicit a: Append.Value[T, V]): Setting[T] = make(v, source)(a.appendValue) + final def appendN[V](vs: Initialize[V], source: SourcePosition)(implicit a: Append.Values[T, V]): Setting[T] = make(vs, source)(a.appendValues) - protected[this] def make[S](other: Initialize[S], source: SourcePosition)(f: (T, S) => T): Setting[T] = this.set( (this, other)(f), source) + protected[this] def make[S](other: Initialize[S], source: SourcePosition)(f: (T, S) => T): Setting[T] = this.set((this, other)(f), source) } -/** Identifies a task. It consists of three parts: the scope, the name, and the type of the value computed by a task associated with this key. -* The scope is represented by a value of type Scope. -* The name and the type are represented by a value of type `AttributeKey[Task[T]]`. -* Instances are constructed using the companion object. */ -sealed abstract class TaskKey[T] extends ScopedTaskable[T] with KeyedInitialize[Task[T]] with Scoped.ScopingSetting[TaskKey[T]] with Scoped.DefinableTask[T] -{ - val key: AttributeKey[Task[T]] - def toTask: Initialize[Task[T]] = this - def scopedKey: ScopedKey[Task[T]] = ScopedKey(scope, key) - def in(scope: Scope): TaskKey[T] = Scoped.scopedTask(Scope.replaceThis(this.scope)(scope), this.key) +/** + * Identifies a task. It consists of three parts: the scope, the name, and the type of the value computed by a task associated with this key. + * The scope is represented by a value of type Scope. + * The name and the type are represented by a value of type `AttributeKey[Task[T]]`. + * Instances are constructed using the companion object. + */ +sealed abstract class TaskKey[T] extends ScopedTaskable[T] with KeyedInitialize[Task[T]] with Scoped.ScopingSetting[TaskKey[T]] with Scoped.DefinableTask[T] { + val key: AttributeKey[Task[T]] + def toTask: Initialize[Task[T]] = this + def scopedKey: ScopedKey[Task[T]] = ScopedKey(scope, key) + def in(scope: Scope): TaskKey[T] = Scoped.scopedTask(Scope.replaceThis(this.scope)(scope), this.key) - def +=[U](v: U)(implicit a: Append.Value[T, U]): Setting[Task[T]] = macro std.TaskMacro.taskAppend1Impl[T,U] - def ++=[U](vs: U)(implicit a: Append.Values[T, U]): Setting[Task[T]] = macro std.TaskMacro.taskAppendNImpl[T,U] - def <+= [V](v: Initialize[Task[V]])(implicit a: Append.Value[T, V]): Setting[Task[T]] = macro std.TaskMacro.taskAppend1Position[T,V] - def <++= [V](vs: Initialize[Task[V]])(implicit a: Append.Values[T, V]): Setting[Task[T]] = macro std.TaskMacro.taskAppendNPosition[T,V] + def +=[U](v: U)(implicit a: Append.Value[T, U]): Setting[Task[T]] = macro std.TaskMacro.taskAppend1Impl[T, U] + def ++=[U](vs: U)(implicit a: Append.Values[T, U]): Setting[Task[T]] = macro std.TaskMacro.taskAppendNImpl[T, U] + def <+=[V](v: Initialize[Task[V]])(implicit a: Append.Value[T, V]): Setting[Task[T]] = macro std.TaskMacro.taskAppend1Position[T, V] + def <++=[V](vs: Initialize[Task[V]])(implicit a: Append.Values[T, V]): Setting[Task[T]] = macro std.TaskMacro.taskAppendNPosition[T, V] - def append1[V](v: Initialize[Task[V]], source: SourcePosition)(implicit a: Append.Value[T, V]): Setting[Task[T]] = make(v, source)(a.appendValue) - def appendN[V](vs: Initialize[Task[V]], source: SourcePosition)(implicit a: Append.Values[T, V]): Setting[Task[T]] = make(vs, source)(a.appendValues) + def append1[V](v: Initialize[Task[V]], source: SourcePosition)(implicit a: Append.Value[T, V]): Setting[Task[T]] = make(v, source)(a.appendValue) + def appendN[V](vs: Initialize[Task[V]], source: SourcePosition)(implicit a: Append.Values[T, V]): Setting[Task[T]] = make(vs, source)(a.appendValues) - private[this] def make[S](other: Initialize[Task[S]], source: SourcePosition)(f: (T, S) => T): Setting[Task[T]] = - set( (this, other) { (a,b) => (a,b) map f.tupled }, source) + private[this] def make[S](other: Initialize[Task[S]], source: SourcePosition)(f: (T, S) => T): Setting[Task[T]] = + set((this, other) { (a, b) => (a, b) map f.tupled }, source) } -/** Identifies an input task. An input task parses input and produces a task to run. -* It consists of three parts: the scope, the name, and the type of the value produced by an input task associated with this key. -* The scope is represented by a value of type Scope. -* The name and the type are represented by a value of type `AttributeKey[InputTask[T]]`. -* Instances are constructed using the companion object. */ -sealed trait InputKey[T] extends Scoped with KeyedInitialize[InputTask[T]] with Scoped.ScopingSetting[InputKey[T]] with Scoped.DefinableSetting[InputTask[T]] -{ - val key: AttributeKey[InputTask[T]] - def scopedKey: ScopedKey[InputTask[T]] = ScopedKey(scope, key) - def in(scope: Scope): InputKey[T] = Scoped.scopedInput(Scope.replaceThis(this.scope)(scope), this.key) +/** + * Identifies an input task. An input task parses input and produces a task to run. + * It consists of three parts: the scope, the name, and the type of the value produced by an input task associated with this key. + * The scope is represented by a value of type Scope. + * The name and the type are represented by a value of type `AttributeKey[InputTask[T]]`. + * Instances are constructed using the companion object. + */ +sealed trait InputKey[T] extends Scoped with KeyedInitialize[InputTask[T]] with Scoped.ScopingSetting[InputKey[T]] with Scoped.DefinableSetting[InputTask[T]] { + val key: AttributeKey[InputTask[T]] + def scopedKey: ScopedKey[InputTask[T]] = ScopedKey(scope, key) + def in(scope: Scope): InputKey[T] = Scoped.scopedInput(Scope.replaceThis(this.scope)(scope), this.key) - final def :=(v: T): Setting[InputTask[T]] = macro std.TaskMacro.inputTaskAssignMacroImpl[T] - final def ~= (f: T => T): Setting[InputTask[T]] = macro std.TaskMacro.itaskTransformPosition[T] - final def transform(f: T => T, source: SourcePosition): Setting[InputTask[T]] = set( scopedKey(_ mapTask { _ map f} ), source ) + final def :=(v: T): Setting[InputTask[T]] = macro std.TaskMacro.inputTaskAssignMacroImpl[T] + final def ~=(f: T => T): Setting[InputTask[T]] = macro std.TaskMacro.itaskTransformPosition[T] + final def transform(f: T => T, source: SourcePosition): Setting[InputTask[T]] = set(scopedKey(_ mapTask { _ map f }), source) } /** Methods and types related to constructing settings, including keys, scopes, and initializations. */ -object Scoped -{ - implicit def taskScopedToKey[T](s: TaskKey[T]): ScopedKey[Task[T]] = ScopedKey(s.scope, s.key) - implicit def inputScopedToKey[T](s: InputKey[T]): ScopedKey[InputTask[T]] = ScopedKey(s.scope, s.key) +object Scoped { + implicit def taskScopedToKey[T](s: TaskKey[T]): ScopedKey[Task[T]] = ScopedKey(s.scope, s.key) + implicit def inputScopedToKey[T](s: InputKey[T]): ScopedKey[InputTask[T]] = ScopedKey(s.scope, s.key) - /** - * Mixin trait for adding convenience vocabulary associated with specifiying the [[Scope]] of a setting. - * Allows specification of the Scope or part of the [[Scope]] of a setting being referenced. - * @example - * {{{ - * name in Global := "hello Global scope" - * - * name in (Compile, packageBin) := "hello Compile scope packageBin" - * - * name in Compile := "hello Compile scope" + /** + * Mixin trait for adding convenience vocabulary associated with specifiying the [[Scope]] of a setting. + * Allows specification of the Scope or part of the [[Scope]] of a setting being referenced. + * @example + * {{{ + * name in Global := "hello Global scope" + * + * name in (Compile, packageBin) := "hello Compile scope packageBin" + * + * name in Compile := "hello Compile scope" + * + * name.in(Compile).:=("hello ugly syntax") + * }}} + * + */ + sealed trait ScopingSetting[Result] { + def in(s: Scope): Result - * name.in(Compile).:=("hello ugly syntax") - * }}} - * - */ - sealed trait ScopingSetting[Result] - { - def in(s: Scope): Result + def in(p: Reference): Result = in(Select(p), This, This) + def in(t: Scoped): Result = in(This, This, Select(t.key)) + def in(c: ConfigKey): Result = in(This, Select(c), This) + def in(c: ConfigKey, t: Scoped): Result = in(This, Select(c), Select(t.key)) + def in(p: Reference, c: ConfigKey): Result = in(Select(p), Select(c), This) + def in(p: Reference, t: Scoped): Result = in(Select(p), This, Select(t.key)) + def in(p: Reference, c: ConfigKey, t: Scoped): Result = in(Select(p), Select(c), Select(t.key)) + def in(p: ScopeAxis[Reference], c: ScopeAxis[ConfigKey], t: ScopeAxis[AttributeKey[_]]): Result = in(Scope(p, c, t, This)) + } - def in(p: Reference): Result = in(Select(p), This, This) - def in(t: Scoped): Result = in(This, This, Select(t.key)) - def in(c: ConfigKey): Result = in(This, Select(c), This) - def in(c: ConfigKey, t: Scoped): Result = in(This, Select(c), Select(t.key)) - def in(p: Reference, c: ConfigKey): Result = in(Select(p), Select(c), This) - def in(p: Reference, t: Scoped): Result = in(Select(p), This, Select(t.key)) - def in(p: Reference, c: ConfigKey, t: Scoped): Result = in(Select(p), Select(c), Select(t.key)) - def in(p: ScopeAxis[Reference], c: ScopeAxis[ConfigKey], t: ScopeAxis[AttributeKey[_]]): Result = in( Scope(p, c, t, This) ) - } + def scopedSetting[T](s: Scope, k: AttributeKey[T]): SettingKey[T] = new SettingKey[T] { val scope = s; val key = k } + def scopedInput[T](s: Scope, k: AttributeKey[InputTask[T]]): InputKey[T] = new InputKey[T] { val scope = s; val key = k } + def scopedTask[T](s: Scope, k: AttributeKey[Task[T]]): TaskKey[T] = new TaskKey[T] { val scope = s; val key = k } - def scopedSetting[T](s: Scope, k: AttributeKey[T]): SettingKey[T] = new SettingKey[T] { val scope = s; val key = k} - def scopedInput[T](s: Scope, k: AttributeKey[InputTask[T]]): InputKey[T] = new InputKey[T] { val scope = s; val key = k } - def scopedTask[T](s: Scope, k: AttributeKey[Task[T]]): TaskKey[T] = new TaskKey[T] { val scope = s; val key = k } + /** + * Mixin trait for adding convenience vocabulary associated with applying a setting to a configuration item. + */ + sealed trait DefinableSetting[S] { + def scopedKey: ScopedKey[S] - /** - * Mixin trait for adding convenience vocabulary associated with applying a setting to a configuration item. - */ - sealed trait DefinableSetting[S] - { - def scopedKey: ScopedKey[S] - - private[sbt] final def :==(app: S): Setting[S] = macro std.TaskMacro.settingAssignPure[S] - - /** Binds a single value to this. A new [Def.Setting] is defined using the value(s) of `app`. - * @param app value to bind to this key - * @return setting binding this key to the given value. - */ - final def <<= (app: Initialize[S]): Setting[S] = macro std.TaskMacro.settingAssignPosition[S] - - /** Internally used function for setting a value along with the `.sbt` file location where it is defined. */ - final def set (app: Initialize[S], source: SourcePosition): Setting[S] = setting(scopedKey, app, source) - - /** From the given [[Settings]], extract the value bound to this key. */ - final def get(settings: Settings[Scope]): Option[S] = settings.get(scopedKey.scope, scopedKey.key) - - /** Creates an [[Def.Initialize]] with value [[scala.None]] if there was no previous definition of this key, - * and `[[scala.Some]](value)` if a definition exists. Useful for when you want to use the ''existence'' of - * one setting in order to define another setting. - * @return currently bound value wrapped in `Initialize[Some[T]]`, or `Initialize[None]` if unbound. */ - final def ? : Initialize[Option[S]] = Def.optional(scopedKey)(idFun) - - /** Creates an [[Def.Initialize]] with value bound to this key, or returns `i` parameter if unbound. - * @param i value to return if this setting doesn't have a value. - * @return currently bound setting value, or `i` if unbound. - */ - final def or[T >: S](i: Initialize[T]): Initialize[T] = (this.?, i)(_ getOrElse _ ) - - /** Like [[?]], but with a call-by-name parameter rather than an existing [[Def.Initialize]]. - * Useful when you want to have a value computed when no value is bound to this key. - * @param or by-name expression evaluated when a value is needed. - * @return currently bound setting value, or the result of `or` if unbound. - */ - final def ??[T >: S](or: => T): Initialize[T] = Def.optional(scopedKey)(_ getOrElse or ) - } + private[sbt] final def :==(app: S): Setting[S] = macro std.TaskMacro.settingAssignPure[S] /** - * Wraps an [[sbt.Def.Initialize]] instance to provide `map` and `flatMap` symantics. - */ - final class RichInitialize[S](init: Initialize[S]) - { - def map[T](f: S => T): Initialize[Task[T]] = init(s => mktask(f(s)) ) - def flatMap[T](f: S => Task[T]): Initialize[Task[T]] = init(f) - } - sealed trait DefinableTask[S] - { self: TaskKey[S] => + * Binds a single value to this. A new [Def.Setting] is defined using the value(s) of `app`. + * @param app value to bind to this key + * @return setting binding this key to the given value. + */ + final def <<=(app: Initialize[S]): Setting[S] = macro std.TaskMacro.settingAssignPosition[S] - private[sbt] def :==(app: S): Setting[Task[S]] = macro std.TaskMacro.taskAssignPositionPure[S] - private[sbt] def ::=(app: Task[S]): Setting[Task[S]] = macro std.TaskMacro.taskAssignPositionT[S] - def := (v: S): Setting[Task[S]] = macro std.TaskMacro.taskAssignMacroImpl[S] - def ~= (f: S => S): Setting[Task[S]] = macro std.TaskMacro.taskTransformPosition[S] + /** Internally used function for setting a value along with the `.sbt` file location where it is defined. */ + final def set(app: Initialize[S], source: SourcePosition): Setting[S] = setting(scopedKey, app, source) - def <<= (app: Initialize[Task[S]]): Setting[Task[S]] = macro std.TaskMacro.itaskAssignPosition[S] - def set(app: Initialize[Task[S]], source: SourcePosition): Setting[Task[S]] = Def.setting(scopedKey, app, source) - def transform(f: S => S, source: SourcePosition): Setting[Task[S]] = set( scopedKey(_ map f), source) + /** From the given [[Settings]], extract the value bound to this key. */ + final def get(settings: Settings[Scope]): Option[S] = settings.get(scopedKey.scope, scopedKey.key) - @deprecated("No longer needed with new task syntax and SettingKey inheriting from Initialize.", "0.13.2") - def task: SettingKey[Task[S]] = scopedSetting(scope, key) - def get(settings: Settings[Scope]): Option[Task[S]] = settings.get(scope, key) + /** + * Creates an [[Def.Initialize]] with value [[scala.None]] if there was no previous definition of this key, + * and `[[scala.Some]](value)` if a definition exists. Useful for when you want to use the ''existence'' of + * one setting in order to define another setting. + * @return currently bound value wrapped in `Initialize[Some[T]]`, or `Initialize[None]` if unbound. + */ + final def ? : Initialize[Option[S]] = Def.optional(scopedKey)(idFun) - def ? : Initialize[Task[Option[S]]] = Def.optional(scopedKey) { case None => mktask { None }; case Some(t) => t map some.fn } - def ??[T >: S](or: => T): Initialize[Task[T]] = Def.optional(scopedKey)( _ getOrElse mktask(or) ) - def or[T >: S](i: Initialize[Task[T]]): Initialize[Task[T]] = (this.? zipWith i)( (x,y) => (x, y) map { case (a,b) => a getOrElse b}) - } - final class RichInitializeTask[S](i: Initialize[Task[S]]) extends RichInitTaskBase[S, Task] - { - protected def onTask[T](f: Task[S] => Task[T]): Initialize[Task[T]] = i apply f + /** + * Creates an [[Def.Initialize]] with value bound to this key, or returns `i` parameter if unbound. + * @param i value to return if this setting doesn't have a value. + * @return currently bound setting value, or `i` if unbound. + */ + final def or[T >: S](i: Initialize[T]): Initialize[T] = (this.?, i)(_ getOrElse _) - def dependsOn(tasks: AnyInitTask*): Initialize[Task[S]] = (i, Initialize.joinAny[Task](tasks)) { (thisTask, deps) => thisTask.dependsOn(deps : _*) } + /** + * Like [[?]], but with a call-by-name parameter rather than an existing [[Def.Initialize]]. + * Useful when you want to have a value computed when no value is bound to this key. + * @param or by-name expression evaluated when a value is needed. + * @return currently bound setting value, or the result of `or` if unbound. + */ + final def ??[T >: S](or: => T): Initialize[T] = Def.optional(scopedKey)(_ getOrElse or) + } - def failure: Initialize[Task[Incomplete]] = i(_.failure) - def result: Initialize[Task[Result[S]]] = i(_.result) + /** + * Wraps an [[sbt.Def.Initialize]] instance to provide `map` and `flatMap` symantics. + */ + final class RichInitialize[S](init: Initialize[S]) { + def map[T](f: S => T): Initialize[Task[T]] = init(s => mktask(f(s))) + def flatMap[T](f: S => Task[T]): Initialize[Task[T]] = init(f) + } + sealed trait DefinableTask[S] { self: TaskKey[S] => - def triggeredBy(tasks: AnyInitTask*): Initialize[Task[S]] = nonLocal(tasks, Def.triggeredBy) - def runBefore(tasks: AnyInitTask*): Initialize[Task[S]] = nonLocal(tasks, Def.runBefore) - private[this] def nonLocal(tasks: Seq[AnyInitTask], key: AttributeKey[Seq[Task[_]]]): Initialize[Task[S]] = - (Initialize.joinAny[Task](tasks), i) { (ts, i) => i.copy(info = i.info.set(key, ts)) } - } - final class RichInitializeInputTask[S](i: Initialize[InputTask[S]]) extends RichInitTaskBase[S,InputTask] - { - protected def onTask[T](f: Task[S] => Task[T]): Initialize[InputTask[T]] = i(_ mapTask f) - def dependsOn(tasks: AnyInitTask*): Initialize[InputTask[S]] = (i, Initialize.joinAny[Task](tasks)) { (thisTask, deps) => thisTask.mapTask(_.dependsOn(deps : _*)) } - } + private[sbt] def :==(app: S): Setting[Task[S]] = macro std.TaskMacro.taskAssignPositionPure[S] + private[sbt] def ::=(app: Task[S]): Setting[Task[S]] = macro std.TaskMacro.taskAssignPositionT[S] + def :=(v: S): Setting[Task[S]] = macro std.TaskMacro.taskAssignMacroImpl[S] + def ~=(f: S => S): Setting[Task[S]] = macro std.TaskMacro.taskTransformPosition[S] - sealed abstract class RichInitTaskBase[S, R[_]] - { - protected def onTask[T](f: Task[S] => Task[T]): Initialize[R[T]] + def <<=(app: Initialize[Task[S]]): Setting[Task[S]] = macro std.TaskMacro.itaskAssignPosition[S] + def set(app: Initialize[Task[S]], source: SourcePosition): Setting[Task[S]] = Def.setting(scopedKey, app, source) + def transform(f: S => S, source: SourcePosition): Setting[Task[S]] = set(scopedKey(_ map f), source) - def flatMap[T](f: S => Task[T]): Initialize[R[T]] = flatMapR(f compose successM) - def map[T](f: S => T): Initialize[R[T]] = mapR(f compose successM) - def andFinally(fin: => Unit): Initialize[R[S]] = onTask(_ andFinally fin) - def doFinally(t: Task[Unit]): Initialize[R[S]] = onTask(_ doFinally t) + @deprecated("No longer needed with new task syntax and SettingKey inheriting from Initialize.", "0.13.2") + def task: SettingKey[Task[S]] = scopedSetting(scope, key) + def get(settings: Settings[Scope]): Option[Task[S]] = settings.get(scope, key) - def || [T >: S](alt: Task[T]): Initialize[R[T]] = onTask(_ || alt) - def && [T](alt: Task[T]): Initialize[R[T]] = onTask(_ && alt) + def ? : Initialize[Task[Option[S]]] = Def.optional(scopedKey) { case None => mktask { None }; case Some(t) => t map some.fn } + def ??[T >: S](or: => T): Initialize[Task[T]] = Def.optional(scopedKey)(_ getOrElse mktask(or)) + def or[T >: S](i: Initialize[Task[T]]): Initialize[Task[T]] = (this.? zipWith i)((x, y) => (x, y) map { case (a, b) => a getOrElse b }) + } + final class RichInitializeTask[S](i: Initialize[Task[S]]) extends RichInitTaskBase[S, Task] { + protected def onTask[T](f: Task[S] => Task[T]): Initialize[Task[T]] = i apply f - def tag(tags: Tag*): Initialize[R[S]] = onTask(_.tag(tags: _*)) - def tagw(tags: (Tag, Int)*): Initialize[R[S]] = onTask(_.tagw(tags : _*)) + def dependsOn(tasks: AnyInitTask*): Initialize[Task[S]] = (i, Initialize.joinAny[Task](tasks)) { (thisTask, deps) => thisTask.dependsOn(deps: _*) } - @deprecated("Use the `result` method to create a task that returns the full Result of this task. Then, call `flatMap` on the new task.", "0.13.0") - def flatMapR[T](f: Result[S] => Task[T]): Initialize[R[T]] = onTask(_ flatMapR f) + def failure: Initialize[Task[Incomplete]] = i(_.failure) + def result: Initialize[Task[Result[S]]] = i(_.result) - @deprecated("Use the `result` method to create a task that returns the full Result of this task. Then, call `map` on the new task.", "0.13.0") - def mapR[T](f: Result[S] => T): Initialize[R[T]] = onTask(_ mapR f) + def triggeredBy(tasks: AnyInitTask*): Initialize[Task[S]] = nonLocal(tasks, Def.triggeredBy) + def runBefore(tasks: AnyInitTask*): Initialize[Task[S]] = nonLocal(tasks, Def.runBefore) + private[this] def nonLocal(tasks: Seq[AnyInitTask], key: AttributeKey[Seq[Task[_]]]): Initialize[Task[S]] = + (Initialize.joinAny[Task](tasks), i) { (ts, i) => i.copy(info = i.info.set(key, ts)) } + } + final class RichInitializeInputTask[S](i: Initialize[InputTask[S]]) extends RichInitTaskBase[S, InputTask] { + protected def onTask[T](f: Task[S] => Task[T]): Initialize[InputTask[T]] = i(_ mapTask f) + def dependsOn(tasks: AnyInitTask*): Initialize[InputTask[S]] = (i, Initialize.joinAny[Task](tasks)) { (thisTask, deps) => thisTask.mapTask(_.dependsOn(deps: _*)) } + } - @deprecated("Use the `failure` method to create a task that returns Incomplete when this task fails and then call `flatMap` on the new task.", "0.13.0") - def flatFailure[T](f: Incomplete => Task[T]): Initialize[R[T]] = flatMapR(f compose failM) + sealed abstract class RichInitTaskBase[S, R[_]] { + protected def onTask[T](f: Task[S] => Task[T]): Initialize[R[T]] - @deprecated("Use the `failure` method to create a task that returns Incomplete when this task fails and then call `map` on the new task.", "0.13.0") - def mapFailure[T](f: Incomplete => T): Initialize[R[T]] = mapR(f compose failM) - } + def flatMap[T](f: S => Task[T]): Initialize[R[T]] = flatMapR(f compose successM) + def map[T](f: S => T): Initialize[R[T]] = mapR(f compose successM) + def andFinally(fin: => Unit): Initialize[R[S]] = onTask(_ andFinally fin) + def doFinally(t: Task[Unit]): Initialize[R[S]] = onTask(_ doFinally t) - type AnyInitTask = Initialize[Task[T]] forSome { type T } + def ||[T >: S](alt: Task[T]): Initialize[R[T]] = onTask(_ || alt) + def &&[T](alt: Task[T]): Initialize[R[T]] = onTask(_ && alt) - implicit def richTaskSeq[T](in: Seq[Initialize[Task[T]]]): RichTaskSeq[T] = new RichTaskSeq(in) - final class RichTaskSeq[T](keys: Seq[Initialize[Task[T]]]) - { - def join: Initialize[Task[Seq[T]]] = tasks(_.join) - def tasks: Initialize[Seq[Task[T]]] = Initialize.join(keys) - } - implicit def richAnyTaskSeq(in: Seq[AnyInitTask]): RichAnyTaskSeq = new RichAnyTaskSeq(in) - final class RichAnyTaskSeq(keys: Seq[AnyInitTask]) - { - def dependOn: Initialize[Task[Unit]] = Initialize.joinAny[Task](keys).apply(deps => nop.dependsOn(deps : _*) ) - } + def tag(tags: Tag*): Initialize[R[S]] = onTask(_.tag(tags: _*)) + def tagw(tags: (Tag, Int)*): Initialize[R[S]] = onTask(_.tagw(tags: _*)) + @deprecated("Use the `result` method to create a task that returns the full Result of this task. Then, call `flatMap` on the new task.", "0.13.0") + def flatMapR[T](f: Result[S] => Task[T]): Initialize[R[T]] = onTask(_ flatMapR f) - implicit def richFileSetting(s: SettingKey[File]): RichFileSetting = new RichFileSetting(s) - implicit def richFilesSetting(s: SettingKey[Seq[File]]): RichFilesSetting = new RichFilesSetting(s) + @deprecated("Use the `result` method to create a task that returns the full Result of this task. Then, call `map` on the new task.", "0.13.0") + def mapR[T](f: Result[S] => T): Initialize[R[T]] = onTask(_ mapR f) - final class RichFileSetting(s: SettingKey[File]) extends RichFileBase - { - @deprecated("Use a standard setting definition.", "0.13.0") - def /(c: String): Initialize[File] = s { _ / c } - protected[this] def map0(f: PathFinder => PathFinder) = s(file => finder(f)(file :: Nil)) - } - final class RichFilesSetting(s: SettingKey[Seq[File]]) extends RichFileBase - { - @deprecated("Use a standard setting definition.", "0.13.0") - def /(s: String): Initialize[Seq[File]] = map0 { _ / s } - protected[this] def map0(f: PathFinder => PathFinder) = s(finder(f)) - } - sealed abstract class RichFileBase - { - @deprecated("Use a standard setting definition.", "0.13.0") - def *(filter: FileFilter): Initialize[Seq[File]] = map0 { _ * filter } - @deprecated("Use a standard setting definition.", "0.13.0") - def **(filter: FileFilter): Initialize[Seq[File]] = map0 { _ ** filter } - protected[this] def map0(f: PathFinder => PathFinder): Initialize[Seq[File]] - protected[this] def finder(f: PathFinder => PathFinder): Seq[File] => Seq[File] = - in => f(in).get - } + @deprecated("Use the `failure` method to create a task that returns Incomplete when this task fails and then call `flatMap` on the new task.", "0.13.0") + def flatFailure[T](f: Incomplete => Task[T]): Initialize[R[T]] = flatMapR(f compose failM) - // this is the least painful arrangement I came up with - implicit def t2ToTable2[A,B](t2: (ScopedTaskable[A], ScopedTaskable[B]) ): RichTaskable2[A,B] = new RichTaskable2(t2) - implicit def t3ToTable3[A,B,C](t3: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C]) ): RichTaskable3[A,B,C] = new RichTaskable3(t3) - implicit def t4ToTable4[A,B,C,D](t4: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C], ScopedTaskable[D]) ): RichTaskable4[A,B,C,D] = new RichTaskable4(t4) - implicit def t5ToTable5[A,B,C,D,E](t5: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C], ScopedTaskable[D], ScopedTaskable[E]) ): RichTaskable5[A,B,C,D,E] = new RichTaskable5(t5) - implicit def t6ToTable6[A,B,C,D,E,F](t6: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C], ScopedTaskable[D], ScopedTaskable[E], ScopedTaskable[F]) ): RichTaskable6[A,B,C,D,E,F] = new RichTaskable6(t6) - implicit def t7ToTable7[A,B,C,D,E,F,G](t7: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C], ScopedTaskable[D], ScopedTaskable[E], ScopedTaskable[F], ScopedTaskable[G]) ): RichTaskable7[A,B,C,D,E,F,G] = new RichTaskable7(t7) - implicit def t8ToTable8[A,B,C,D,E,F,G,H](t8: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C], ScopedTaskable[D], ScopedTaskable[E], ScopedTaskable[F], ScopedTaskable[G], ScopedTaskable[H]) ): RichTaskable8[A,B,C,D,E,F,G,H] = new RichTaskable8(t8) - implicit def t9ToTable9[A,B,C,D,E,F,G,H,I](t9: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C], ScopedTaskable[D], ScopedTaskable[E], ScopedTaskable[F], ScopedTaskable[G], ScopedTaskable[H], ScopedTaskable[I]) ): RichTaskable9[A,B,C,D,E,F,G,H,I] = new RichTaskable9(t9) - implicit def t10ToTable10[A,B,C,D,E,F,G,H,I,J](t10: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C], ScopedTaskable[D], ScopedTaskable[E], ScopedTaskable[F], ScopedTaskable[G], ScopedTaskable[H], ScopedTaskable[I], ScopedTaskable[J]) ): RichTaskable10[A,B,C,D,E,F,G,H,I,J] = new RichTaskable10(t10) - implicit def t11ToTable11[A,B,C,D,E,F,G,H,I,J,K](t11: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C], ScopedTaskable[D], ScopedTaskable[E], ScopedTaskable[F], ScopedTaskable[G], ScopedTaskable[H], ScopedTaskable[I], ScopedTaskable[J], ScopedTaskable[K]) ): RichTaskable11[A,B,C,D,E,F,G,H,I,J,K] = new RichTaskable11(t11) -/* implicit def t12ToTable12[A,B,C,D,E,F,G,H,I,J,K,L](t12: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C], ScopedTaskable[D], ScopedTaskable[E], ScopedTaskable[F], ScopedTaskable[G], ScopedTaskable[H], ScopedTaskable[I], ScopedTaskable[J], ScopedTaskable[K], ScopedTaskable[L]) ): RichTaskable12[A,B,C,D,E,F,G,H,I,J,K,L] = new RichTaskable12(t12) + @deprecated("Use the `failure` method to create a task that returns Incomplete when this task fails and then call `map` on the new task.", "0.13.0") + def mapFailure[T](f: Incomplete => T): Initialize[R[T]] = mapR(f compose failM) + } + + type AnyInitTask = Initialize[Task[T]] forSome { type T } + + implicit def richTaskSeq[T](in: Seq[Initialize[Task[T]]]): RichTaskSeq[T] = new RichTaskSeq(in) + final class RichTaskSeq[T](keys: Seq[Initialize[Task[T]]]) { + def join: Initialize[Task[Seq[T]]] = tasks(_.join) + def tasks: Initialize[Seq[Task[T]]] = Initialize.join(keys) + } + implicit def richAnyTaskSeq(in: Seq[AnyInitTask]): RichAnyTaskSeq = new RichAnyTaskSeq(in) + final class RichAnyTaskSeq(keys: Seq[AnyInitTask]) { + def dependOn: Initialize[Task[Unit]] = Initialize.joinAny[Task](keys).apply(deps => nop.dependsOn(deps: _*)) + } + + implicit def richFileSetting(s: SettingKey[File]): RichFileSetting = new RichFileSetting(s) + implicit def richFilesSetting(s: SettingKey[Seq[File]]): RichFilesSetting = new RichFilesSetting(s) + + final class RichFileSetting(s: SettingKey[File]) extends RichFileBase { + @deprecated("Use a standard setting definition.", "0.13.0") + def /(c: String): Initialize[File] = s { _ / c } + protected[this] def map0(f: PathFinder => PathFinder) = s(file => finder(f)(file :: Nil)) + } + final class RichFilesSetting(s: SettingKey[Seq[File]]) extends RichFileBase { + @deprecated("Use a standard setting definition.", "0.13.0") + def /(s: String): Initialize[Seq[File]] = map0 { _ / s } + protected[this] def map0(f: PathFinder => PathFinder) = s(finder(f)) + } + sealed abstract class RichFileBase { + @deprecated("Use a standard setting definition.", "0.13.0") + def *(filter: FileFilter): Initialize[Seq[File]] = map0 { _ * filter } + @deprecated("Use a standard setting definition.", "0.13.0") + def **(filter: FileFilter): Initialize[Seq[File]] = map0 { _ ** filter } + protected[this] def map0(f: PathFinder => PathFinder): Initialize[Seq[File]] + protected[this] def finder(f: PathFinder => PathFinder): Seq[File] => Seq[File] = + in => f(in).get + } + + // this is the least painful arrangement I came up with + implicit def t2ToTable2[A, B](t2: (ScopedTaskable[A], ScopedTaskable[B])): RichTaskable2[A, B] = new RichTaskable2(t2) + implicit def t3ToTable3[A, B, C](t3: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C])): RichTaskable3[A, B, C] = new RichTaskable3(t3) + implicit def t4ToTable4[A, B, C, D](t4: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C], ScopedTaskable[D])): RichTaskable4[A, B, C, D] = new RichTaskable4(t4) + implicit def t5ToTable5[A, B, C, D, E](t5: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C], ScopedTaskable[D], ScopedTaskable[E])): RichTaskable5[A, B, C, D, E] = new RichTaskable5(t5) + implicit def t6ToTable6[A, B, C, D, E, F](t6: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C], ScopedTaskable[D], ScopedTaskable[E], ScopedTaskable[F])): RichTaskable6[A, B, C, D, E, F] = new RichTaskable6(t6) + implicit def t7ToTable7[A, B, C, D, E, F, G](t7: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C], ScopedTaskable[D], ScopedTaskable[E], ScopedTaskable[F], ScopedTaskable[G])): RichTaskable7[A, B, C, D, E, F, G] = new RichTaskable7(t7) + implicit def t8ToTable8[A, B, C, D, E, F, G, H](t8: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C], ScopedTaskable[D], ScopedTaskable[E], ScopedTaskable[F], ScopedTaskable[G], ScopedTaskable[H])): RichTaskable8[A, B, C, D, E, F, G, H] = new RichTaskable8(t8) + implicit def t9ToTable9[A, B, C, D, E, F, G, H, I](t9: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C], ScopedTaskable[D], ScopedTaskable[E], ScopedTaskable[F], ScopedTaskable[G], ScopedTaskable[H], ScopedTaskable[I])): RichTaskable9[A, B, C, D, E, F, G, H, I] = new RichTaskable9(t9) + implicit def t10ToTable10[A, B, C, D, E, F, G, H, I, J](t10: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C], ScopedTaskable[D], ScopedTaskable[E], ScopedTaskable[F], ScopedTaskable[G], ScopedTaskable[H], ScopedTaskable[I], ScopedTaskable[J])): RichTaskable10[A, B, C, D, E, F, G, H, I, J] = new RichTaskable10(t10) + implicit def t11ToTable11[A, B, C, D, E, F, G, H, I, J, K](t11: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C], ScopedTaskable[D], ScopedTaskable[E], ScopedTaskable[F], ScopedTaskable[G], ScopedTaskable[H], ScopedTaskable[I], ScopedTaskable[J], ScopedTaskable[K])): RichTaskable11[A, B, C, D, E, F, G, H, I, J, K] = new RichTaskable11(t11) + /* implicit def t12ToTable12[A,B,C,D,E,F,G,H,I,J,K,L](t12: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C], ScopedTaskable[D], ScopedTaskable[E], ScopedTaskable[F], ScopedTaskable[G], ScopedTaskable[H], ScopedTaskable[I], ScopedTaskable[J], ScopedTaskable[K], ScopedTaskable[L]) ): RichTaskable12[A,B,C,D,E,F,G,H,I,J,K,L] = new RichTaskable12(t12) implicit def t13ToTable13[A,B,C,D,E,F,G,H,I,J,K,L,N](t13: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C], ScopedTaskable[D], ScopedTaskable[E], ScopedTaskable[F], ScopedTaskable[G], ScopedTaskable[H], ScopedTaskable[I], ScopedTaskable[J], ScopedTaskable[K], ScopedTaskable[L], ScopedTaskable[N]) ): RichTaskable13[A,B,C,D,E,F,G,H,I,J,K,L,N] = new RichTaskable13(t13) implicit def t14ToTable14[A,B,C,D,E,F,G,H,I,J,K,L,N,O](t14: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C], ScopedTaskable[D], ScopedTaskable[E], ScopedTaskable[F], ScopedTaskable[G], ScopedTaskable[H], ScopedTaskable[I], ScopedTaskable[J], ScopedTaskable[K], ScopedTaskable[L], ScopedTaskable[N], ScopedTaskable[O]) ): RichTaskable14[A,B,C,D,E,F,G,H,I,J,K,L,N,O] = new RichTaskable14(t14) implicit def t15ToTable15[A,B,C,D,E,F,G,H,I,J,K,L,N,O,P](t15: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C], ScopedTaskable[D], ScopedTaskable[E], ScopedTaskable[F], ScopedTaskable[G], ScopedTaskable[H], ScopedTaskable[I], ScopedTaskable[J], ScopedTaskable[K], ScopedTaskable[L], ScopedTaskable[N], ScopedTaskable[O], ScopedTaskable[P]) ): RichTaskable15[A,B,C,D,E,F,G,H,I,J,K,L,N,O,P] = new RichTaskable15(t15)*/ - sealed abstract class RichTaskables[K[L[x]]](final val keys: K[ScopedTaskable])(implicit a: AList[K]) - { - type App[T] = Initialize[Task[T]] - type Fun[M[_],Ret] - protected def convert[M[_],Ret](f: Fun[M,Ret]): K[M] => Ret - private[this] val inputs: K[App] = a.transform(keys, new (ScopedTaskable ~> App) { def apply[T](in: ScopedTaskable[T]): App[T] = in.toTask }) - private[this] def onTasks[T](f: K[Task] => Task[T]): App[T] = Def.app[({ type l[L[x]] = K[ (L ∙ Task)#l] })#l,Task[T]](inputs)(f)(AList.asplit[K,Task](a)) + sealed abstract class RichTaskables[K[L[x]]]( final val keys: K[ScopedTaskable])(implicit a: AList[K]) { + type App[T] = Initialize[Task[T]] + type Fun[M[_], Ret] + protected def convert[M[_], Ret](f: Fun[M, Ret]): K[M] => Ret + private[this] val inputs: K[App] = a.transform(keys, new (ScopedTaskable ~> App) { def apply[T](in: ScopedTaskable[T]): App[T] = in.toTask }) + private[this] def onTasks[T](f: K[Task] => Task[T]): App[T] = Def.app[({ type l[L[x]] = K[(L ∙ Task)#l] })#l, Task[T]](inputs)(f)(AList.asplit[K, Task](a)) - def flatMap[T](f: Fun[Id,Task[T]]): App[T] = onTasks(_.flatMap(convert(f))) - def flatMapR[T](f: Fun[Result,Task[T]]): App[T] = onTasks(_.flatMapR(convert(f))) - def map[T](f: Fun[Id, T]): App[T] = onTasks(_.mapR( convert(f) compose allM)) - def mapR[T](f: Fun[Result,T]): App[T] = onTasks(_.mapR(convert(f))) - def flatFailure[T](f: Seq[Incomplete] => Task[T]): App[T] = onTasks(_ flatFailure f) - def mapFailure[T](f: Seq[Incomplete] => T): App[T] = onTasks(_ mapFailure f) - } - type ST[X] = ScopedTaskable[X] - final class RichTaskable2[A,B](t2: (ST[A], ST[B])) extends RichTaskables[ AList.T2K[A,B]#l ](t2)(AList.tuple2[A,B]) - { - type Fun[M[_],Ret] = (M[A],M[B]) => Ret - def identityMap = map(mkTuple2) - protected def convert[M[_],R](f: (M[A],M[B]) => R) = f.tupled - } - final class RichTaskable3[A,B,C](t3: (ST[A], ST[B], ST[C])) extends RichTaskables[ AList.T3K[A,B,C]#l](t3)(AList.tuple3[A,B,C]) - { - type Fun[M[_],Ret] = (M[A],M[B],M[C]) => Ret - def identityMap = map(mkTuple3) - protected def convert[M[_],R](f: Fun[M,R]) = f.tupled - } - final class RichTaskable4[A,B,C,D](t4: (ST[A], ST[B], ST[C], ST[D])) extends RichTaskables[ AList.T4K[A,B,C,D]#l](t4)(AList.tuple4[A,B,C,D]) - { - type Fun[M[_],Ret] = (M[A],M[B],M[C],M[D]) => Ret - def identityMap = map(mkTuple4) - protected def convert[M[_],R](f: Fun[M,R]) = f.tupled - } - final class RichTaskable5[A,B,C,D,E](t5: (ST[A], ST[B], ST[C], ST[D], ST[E])) extends RichTaskables[AList.T5K[A,B,C,D,E]#l](t5)(AList.tuple5[A,B,C,D,E]) - { - type Fun[M[_],Ret] = (M[A],M[B],M[C],M[D],M[E]) => Ret - def identityMap = map(mkTuple5) - protected def convert[M[_],R](f: Fun[M,R]) = f.tupled - } - final class RichTaskable6[A,B,C,D,E,F](t6: (ST[A], ST[B], ST[C], ST[D], ST[E], ST[F])) extends RichTaskables[AList.T6K[A,B,C,D,E,F]#l](t6)(AList.tuple6[A,B,C,D,E,F]) - { - type Fun[M[_],Ret] = (M[A],M[B],M[C],M[D],M[E],M[F]) => Ret - def identityMap = map(mkTuple6) - protected def convert[M[_],R](z: Fun[M,R]) = z.tupled - } - final class RichTaskable7[A,B,C,D,E,F,G](t7: (ST[A], ST[B], ST[C], ST[D], ST[E], ST[F], ST[G])) extends RichTaskables[AList.T7K[A,B,C,D,E,F,G]#l](t7)(AList.tuple7[A,B,C,D,E,F,G]) - { - type Fun[M[_],Ret] = (M[A],M[B],M[C],M[D],M[E],M[F],M[G]) => Ret - def identityMap = map(mkTuple7) - protected def convert[M[_],R](z: Fun[M,R]) = z.tupled - } - final class RichTaskable8[A,B,C,D,E,F,G,H](t8: (ST[A], ST[B], ST[C], ST[D], ST[E], ST[F], ST[G], ST[H])) extends RichTaskables[AList.T8K[A,B,C,D,E,F,G,H]#l](t8)(AList.tuple8[A,B,C,D,E,F,G,H]) - { - type Fun[M[_],Ret] = (M[A],M[B],M[C],M[D],M[E],M[F],M[G],M[H]) => Ret - def identityMap = map(mkTuple8) - protected def convert[M[_],R](z: Fun[M,R]) = z.tupled - } - final class RichTaskable9[A,B,C,D,E,F,G,H,I](t9: (ST[A], ST[B], ST[C], ST[D], ST[E], ST[F], ST[G], ST[H], ST[I])) extends RichTaskables[AList.T9K[A,B,C,D,E,F,G,H,I]#l](t9)(AList.tuple9[A,B,C,D,E,F,G,H,I]) - { - type Fun[M[_],Ret] = (M[A],M[B],M[C],M[D],M[E],M[F],M[G],M[H],M[I]) => Ret - def identityMap = map(mkTuple9) - protected def convert[M[_],R](z: Fun[M,R]) = z.tupled - } - final class RichTaskable10[A,B,C,D,E,F,G,H,I,J](t10: ((ST[A], ST[B], ST[C], ST[D], ST[E], ST[F], ST[G], ST[H], ST[I], ST[J]))) extends RichTaskables[AList.T10K[A,B,C,D,E,F,G,H,I,J]#l](t10)(AList.tuple10[A,B,C,D,E,F,G,H,I,J]) - { - type Fun[M[_],Ret] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J]) => Ret - def identityMap = map(mkTuple10) - protected def convert[M[_],R](z: Fun[M,R]) = z.tupled - } + def flatMap[T](f: Fun[Id, Task[T]]): App[T] = onTasks(_.flatMap(convert(f))) + def flatMapR[T](f: Fun[Result, Task[T]]): App[T] = onTasks(_.flatMapR(convert(f))) + def map[T](f: Fun[Id, T]): App[T] = onTasks(_.mapR(convert(f) compose allM)) + def mapR[T](f: Fun[Result, T]): App[T] = onTasks(_.mapR(convert(f))) + def flatFailure[T](f: Seq[Incomplete] => Task[T]): App[T] = onTasks(_ flatFailure f) + def mapFailure[T](f: Seq[Incomplete] => T): App[T] = onTasks(_ mapFailure f) + } + type ST[X] = ScopedTaskable[X] + final class RichTaskable2[A, B](t2: (ST[A], ST[B])) extends RichTaskables[AList.T2K[A, B]#l](t2)(AList.tuple2[A, B]) { + type Fun[M[_], Ret] = (M[A], M[B]) => Ret + def identityMap = map(mkTuple2) + protected def convert[M[_], R](f: (M[A], M[B]) => R) = f.tupled + } + final class RichTaskable3[A, B, C](t3: (ST[A], ST[B], ST[C])) extends RichTaskables[AList.T3K[A, B, C]#l](t3)(AList.tuple3[A, B, C]) { + type Fun[M[_], Ret] = (M[A], M[B], M[C]) => Ret + def identityMap = map(mkTuple3) + protected def convert[M[_], R](f: Fun[M, R]) = f.tupled + } + final class RichTaskable4[A, B, C, D](t4: (ST[A], ST[B], ST[C], ST[D])) extends RichTaskables[AList.T4K[A, B, C, D]#l](t4)(AList.tuple4[A, B, C, D]) { + type Fun[M[_], Ret] = (M[A], M[B], M[C], M[D]) => Ret + def identityMap = map(mkTuple4) + protected def convert[M[_], R](f: Fun[M, R]) = f.tupled + } + final class RichTaskable5[A, B, C, D, E](t5: (ST[A], ST[B], ST[C], ST[D], ST[E])) extends RichTaskables[AList.T5K[A, B, C, D, E]#l](t5)(AList.tuple5[A, B, C, D, E]) { + type Fun[M[_], Ret] = (M[A], M[B], M[C], M[D], M[E]) => Ret + def identityMap = map(mkTuple5) + protected def convert[M[_], R](f: Fun[M, R]) = f.tupled + } + final class RichTaskable6[A, B, C, D, E, F](t6: (ST[A], ST[B], ST[C], ST[D], ST[E], ST[F])) extends RichTaskables[AList.T6K[A, B, C, D, E, F]#l](t6)(AList.tuple6[A, B, C, D, E, F]) { + type Fun[M[_], Ret] = (M[A], M[B], M[C], M[D], M[E], M[F]) => Ret + def identityMap = map(mkTuple6) + protected def convert[M[_], R](z: Fun[M, R]) = z.tupled + } + final class RichTaskable7[A, B, C, D, E, F, G](t7: (ST[A], ST[B], ST[C], ST[D], ST[E], ST[F], ST[G])) extends RichTaskables[AList.T7K[A, B, C, D, E, F, G]#l](t7)(AList.tuple7[A, B, C, D, E, F, G]) { + type Fun[M[_], Ret] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G]) => Ret + def identityMap = map(mkTuple7) + protected def convert[M[_], R](z: Fun[M, R]) = z.tupled + } + final class RichTaskable8[A, B, C, D, E, F, G, H](t8: (ST[A], ST[B], ST[C], ST[D], ST[E], ST[F], ST[G], ST[H])) extends RichTaskables[AList.T8K[A, B, C, D, E, F, G, H]#l](t8)(AList.tuple8[A, B, C, D, E, F, G, H]) { + type Fun[M[_], Ret] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H]) => Ret + def identityMap = map(mkTuple8) + protected def convert[M[_], R](z: Fun[M, R]) = z.tupled + } + final class RichTaskable9[A, B, C, D, E, F, G, H, I](t9: (ST[A], ST[B], ST[C], ST[D], ST[E], ST[F], ST[G], ST[H], ST[I])) extends RichTaskables[AList.T9K[A, B, C, D, E, F, G, H, I]#l](t9)(AList.tuple9[A, B, C, D, E, F, G, H, I]) { + type Fun[M[_], Ret] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I]) => Ret + def identityMap = map(mkTuple9) + protected def convert[M[_], R](z: Fun[M, R]) = z.tupled + } + final class RichTaskable10[A, B, C, D, E, F, G, H, I, J](t10: ((ST[A], ST[B], ST[C], ST[D], ST[E], ST[F], ST[G], ST[H], ST[I], ST[J]))) extends RichTaskables[AList.T10K[A, B, C, D, E, F, G, H, I, J]#l](t10)(AList.tuple10[A, B, C, D, E, F, G, H, I, J]) { + type Fun[M[_], Ret] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J]) => Ret + def identityMap = map(mkTuple10) + protected def convert[M[_], R](z: Fun[M, R]) = z.tupled + } - final class RichTaskable11[A,B,C,D,E,F,G,H,I,J,K](t11: ((ST[A], ST[B], ST[C], ST[D], ST[E], ST[F], ST[G], ST[H], ST[I], ST[J], ST[K]))) extends RichTaskables[AList.T11K[A,B,C,D,E,F,G,H,I,J,K]#l](t11)(AList.tuple11[A,B,C,D,E,F,G,H,I,J,K]) - { - type Fun[M[_],Ret] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J], M[K]) => Ret - def identityMap = map(mkTuple11) - protected def convert[M[_],R](z: Fun[M,R]) = z.tupled - } + final class RichTaskable11[A, B, C, D, E, F, G, H, I, J, K](t11: ((ST[A], ST[B], ST[C], ST[D], ST[E], ST[F], ST[G], ST[H], ST[I], ST[J], ST[K]))) extends RichTaskables[AList.T11K[A, B, C, D, E, F, G, H, I, J, K]#l](t11)(AList.tuple11[A, B, C, D, E, F, G, H, I, J, K]) { + type Fun[M[_], Ret] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J], M[K]) => Ret + def identityMap = map(mkTuple11) + protected def convert[M[_], R](z: Fun[M, R]) = z.tupled + } -/* final class RichTaskable12[A,B,C,D,E,F,G,H,I,J,K,L](t12: ((ST[A], ST[B], ST[C], ST[D], ST[E], ST[F], ST[G], ST[H], ST[I], ST[J], ST[K], ST[L]))) extends RichTaskables(k12(t12)) + /* final class RichTaskable12[A,B,C,D,E,F,G,H,I,J,K,L](t12: ((ST[A], ST[B], ST[C], ST[D], ST[E], ST[F], ST[G], ST[H], ST[I], ST[J], ST[K], ST[L]))) extends RichTaskables(k12(t12)) { type Fun[M[_],Ret] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J], M[K], M[L]) => Ret def identityMap = map(mkTuple12) @@ -408,77 +391,77 @@ object Scoped protected def convert[M[_],R](z: Fun[M,R]) = z.tupled }*/ - implicit def t2ToApp2[A,B](t2: (Initialize[A], Initialize[B]) ): Apply2[A,B] = new Apply2(t2) - implicit def t3ToApp3[A,B,C](t3: (Initialize[A], Initialize[B], Initialize[C]) ): Apply3[A,B,C] = new Apply3(t3) - implicit def t4ToApp4[A,B,C,D](t4: (Initialize[A], Initialize[B], Initialize[C], Initialize[D]) ): Apply4[A,B,C,D] = new Apply4(t4) - implicit def t5ToApp5[A,B,C,D,E](t5: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E]) ): Apply5[A,B,C,D,E] = new Apply5(t5) - implicit def t6ToApp6[A,B,C,D,E,F](t6: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F]) ): Apply6[A,B,C,D,E,F] = new Apply6(t6) - implicit def t7ToApp7[A,B,C,D,E,F,G](t7: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G]) ): Apply7[A,B,C,D,E,F,G] = new Apply7(t7) - implicit def t8ToApp8[A,B,C,D,E,F,G,H](t8: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G], Initialize[H]) ): Apply8[A,B,C,D,E,F,G,H] = new Apply8(t8) - implicit def t9ToApp9[A,B,C,D,E,F,G,H,I](t9: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G], Initialize[H], Initialize[I]) ): Apply9[A,B,C,D,E,F,G,H,I] = new Apply9(t9) - implicit def t10ToApp10[A,B,C,D,E,F,G,H,I,J](t10: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G], Initialize[H], Initialize[I], Initialize[J]) ): Apply10[A,B,C,D,E,F,G,H,I,J] = new Apply10(t10) - implicit def t11ToApp11[A,B,C,D,E,F,G,H,I,J,K](t11: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G], Initialize[H], Initialize[I], Initialize[J], Initialize[K]) ): Apply11[A,B,C,D,E,F,G,H,I,J,K] = new Apply11(t11) -/* implicit def t12ToApp12[A,B,C,D,E,F,G,H,I,J,K,L](t12: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G], Initialize[H], Initialize[I], Initialize[J], Initialize[K], Initialize[L]) ): Apply12[A,B,C,D,E,F,G,H,I,J,K,L] = new Apply12(t12) + implicit def t2ToApp2[A, B](t2: (Initialize[A], Initialize[B])): Apply2[A, B] = new Apply2(t2) + implicit def t3ToApp3[A, B, C](t3: (Initialize[A], Initialize[B], Initialize[C])): Apply3[A, B, C] = new Apply3(t3) + implicit def t4ToApp4[A, B, C, D](t4: (Initialize[A], Initialize[B], Initialize[C], Initialize[D])): Apply4[A, B, C, D] = new Apply4(t4) + implicit def t5ToApp5[A, B, C, D, E](t5: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E])): Apply5[A, B, C, D, E] = new Apply5(t5) + implicit def t6ToApp6[A, B, C, D, E, F](t6: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F])): Apply6[A, B, C, D, E, F] = new Apply6(t6) + implicit def t7ToApp7[A, B, C, D, E, F, G](t7: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G])): Apply7[A, B, C, D, E, F, G] = new Apply7(t7) + implicit def t8ToApp8[A, B, C, D, E, F, G, H](t8: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G], Initialize[H])): Apply8[A, B, C, D, E, F, G, H] = new Apply8(t8) + implicit def t9ToApp9[A, B, C, D, E, F, G, H, I](t9: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G], Initialize[H], Initialize[I])): Apply9[A, B, C, D, E, F, G, H, I] = new Apply9(t9) + implicit def t10ToApp10[A, B, C, D, E, F, G, H, I, J](t10: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G], Initialize[H], Initialize[I], Initialize[J])): Apply10[A, B, C, D, E, F, G, H, I, J] = new Apply10(t10) + implicit def t11ToApp11[A, B, C, D, E, F, G, H, I, J, K](t11: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G], Initialize[H], Initialize[I], Initialize[J], Initialize[K])): Apply11[A, B, C, D, E, F, G, H, I, J, K] = new Apply11(t11) + /* implicit def t12ToApp12[A,B,C,D,E,F,G,H,I,J,K,L](t12: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G], Initialize[H], Initialize[I], Initialize[J], Initialize[K], Initialize[L]) ): Apply12[A,B,C,D,E,F,G,H,I,J,K,L] = new Apply12(t12) implicit def t13ToApp13[A,B,C,D,E,F,G,H,I,J,K,L,N](t13: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G], Initialize[H], Initialize[I], Initialize[J], Initialize[K], Initialize[L], Initialize[N]) ): Apply13[A,B,C,D,E,F,G,H,I,J,K,L,N] = new Apply13(t13) implicit def t14ToApp14[A,B,C,D,E,F,G,H,I,J,K,L,N,O](t14: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G], Initialize[H], Initialize[I], Initialize[J], Initialize[K], Initialize[L], Initialize[N], Initialize[O]) ): Apply14[A,B,C,D,E,F,G,H,I,J,K,L,N,O] = new Apply14(t14) implicit def t15ToApp15[A,B,C,D,E,F,G,H,I,J,K,L,N,O,P](t15: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G], Initialize[H], Initialize[I], Initialize[J], Initialize[K], Initialize[L], Initialize[N], Initialize[O], Initialize[P]) ): Apply15[A,B,C,D,E,F,G,H,I,J,K,L,N,O,P] = new Apply15(t15) */ - def mkTuple2[A,B] = (a:A,b:B) => (a,b) - def mkTuple3[A,B,C] = (a:A,b:B,c:C) => (a,b,c) - def mkTuple4[A,B,C,D] = (a:A,b:B,c:C,d:D) => (a,b,c,d) - def mkTuple5[A,B,C,D,E] = (a:A,b:B,c:C,d:D,e:E) => (a,b,c,d,e) - def mkTuple6[A,B,C,D,E,F] = (a:A,b:B,c:C,d:D,e:E,f:F) => (a,b,c,d,e,f) - def mkTuple7[A,B,C,D,E,F,G] = (a:A,b:B,c:C,d:D,e:E,f:F,g:G) => (a,b,c,d,e,f,g) - def mkTuple8[A,B,C,D,E,F,G,H] = (a:A,b:B,c:C,d:D,e:E,f:F,g:G,h:H) => (a,b,c,d,e,f,g,h) - def mkTuple9[A,B,C,D,E,F,G,H,I] = (a:A,b:B,c:C,d:D,e:E,f:F,g:G,h:H,i:I) => (a,b,c,d,e,f,g,h,i) - def mkTuple10[A,B,C,D,E,F,G,H,I,J] = (a:A,b:B,c:C,d:D,e:E,f:F,g:G,h:H,i:I,j:J) => (a,b,c,d,e,f,g,h,i,j) - def mkTuple11[A,B,C,D,E,F,G,H,I,J,K] = (a:A,b:B,c:C,d:D,e:E,f:F,g:G,h:H,i:I,j:J,k:K) => (a,b,c,d,e,f,g,h,i,j,k) - def mkTuple12[A,B,C,D,E,F,G,H,I,J,K,L] = (a:A,b:B,c:C,d:D,e:E,f:F,g:G,h:H,i:I,j:J,k:K,l:L) => (a,b,c,d,e,f,g,h,i,j,k,l) - def mkTuple13[A,B,C,D,E,F,G,H,I,J,K,L,N] = (a:A,b:B,c:C,d:D,e:E,f:F,g:G,h:H,i:I,j:J,k:K,l:L,n:N) => (a,b,c,d,e,f,g,h,i,j,k,l,n) - def mkTuple14[A,B,C,D,E,F,G,H,I,J,K,L,N,O] = (a:A,b:B,c:C,d:D,e:E,f:F,g:G,h:H,i:I,j:J,k:K,l:L,n:N,o:O) => (a,b,c,d,e,f,g,h,i,j,k,l,n,o) - def mkTuple15[A,B,C,D,E,F,G,H,I,J,K,L,N,O,P] = (a:A,b:B,c:C,d:D,e:E,f:F,g:G,h:H,i:I,j:J,k:K,l:L,n:N,o:O,p:P) => (a,b,c,d,e,f,g,h,i,j,k,l,n,o,p) + def mkTuple2[A, B] = (a: A, b: B) => (a, b) + def mkTuple3[A, B, C] = (a: A, b: B, c: C) => (a, b, c) + def mkTuple4[A, B, C, D] = (a: A, b: B, c: C, d: D) => (a, b, c, d) + def mkTuple5[A, B, C, D, E] = (a: A, b: B, c: C, d: D, e: E) => (a, b, c, d, e) + def mkTuple6[A, B, C, D, E, F] = (a: A, b: B, c: C, d: D, e: E, f: F) => (a, b, c, d, e, f) + def mkTuple7[A, B, C, D, E, F, G] = (a: A, b: B, c: C, d: D, e: E, f: F, g: G) => (a, b, c, d, e, f, g) + def mkTuple8[A, B, C, D, E, F, G, H] = (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H) => (a, b, c, d, e, f, g, h) + def mkTuple9[A, B, C, D, E, F, G, H, I] = (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I) => (a, b, c, d, e, f, g, h, i) + def mkTuple10[A, B, C, D, E, F, G, H, I, J] = (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J) => (a, b, c, d, e, f, g, h, i, j) + def mkTuple11[A, B, C, D, E, F, G, H, I, J, K] = (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K) => (a, b, c, d, e, f, g, h, i, j, k) + def mkTuple12[A, B, C, D, E, F, G, H, I, J, K, L] = (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L) => (a, b, c, d, e, f, g, h, i, j, k, l) + def mkTuple13[A, B, C, D, E, F, G, H, I, J, K, L, N] = (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, n: N) => (a, b, c, d, e, f, g, h, i, j, k, l, n) + def mkTuple14[A, B, C, D, E, F, G, H, I, J, K, L, N, O] = (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, n: N, o: O) => (a, b, c, d, e, f, g, h, i, j, k, l, n, o) + def mkTuple15[A, B, C, D, E, F, G, H, I, J, K, L, N, O, P] = (a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, n: N, o: O, p: P) => (a, b, c, d, e, f, g, h, i, j, k, l, n, o, p) - final class Apply2[A,B](t2: (Initialize[A], Initialize[B])) { - def apply[T](z: (A,B) => T) = Def.app[AList.T2K[A,B]#l, T]( t2 )( z.tupled )(AList.tuple2[A,B]) - def identity = apply(mkTuple2) - } - final class Apply3[A,B,C](t3: (Initialize[A], Initialize[B], Initialize[C])) { - def apply[T](z: (A,B,C) => T) = Def.app[AList.T3K[A,B,C]#l, T]( t3 )( z.tupled )(AList.tuple3[A,B,C]) - def identity = apply(mkTuple3) - } - final class Apply4[A,B,C,D](t4: (Initialize[A], Initialize[B], Initialize[C], Initialize[D])) { - def apply[T](z: (A,B,C,D) => T) = Def.app[AList.T4K[A,B,C,D]#l, T]( t4 )( z.tupled )(AList.tuple4[A,B,C,D]) - def identity = apply(mkTuple4) - } - final class Apply5[A,B,C,D,E](t5: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E])) { - def apply[T](z: (A,B,C,D,E) => T) = Def.app[AList.T5K[A,B,C,D,E]#l, T]( t5 )( z.tupled )( AList.tuple5[A,B,C,D,E] ) - def identity = apply(mkTuple5) - } - final class Apply6[A,B,C,D,E,F](t6: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F])) { - def apply[T](z: (A,B,C,D,E,F) => T) = Def.app[AList.T6K[A,B,C,D,E,F]#l, T]( t6 )( z.tupled )( AList.tuple6[A,B,C,D,E,F] ) - def identity = apply(mkTuple6) - } - final class Apply7[A,B,C,D,E,F,G](t7: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G])) { - def apply[T](z: (A,B,C,D,E,F,G) => T) = Def.app[AList.T7K[A,B,C,D,E,F,G]#l, T]( t7 )( z.tupled )( AList.tuple7[A,B,C,D,E,F,G] ) - def identity = apply(mkTuple7) - } - final class Apply8[A,B,C,D,E,F,G,H](t8: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G], Initialize[H])) { - def apply[T](z: (A,B,C,D,E,F,G,H) => T) = Def.app[AList.T8K[A,B,C,D,E,F,G,H]#l, T]( t8 )( z.tupled )( AList.tuple8[A,B,C,D,E,F,G,H] ) - def identity = apply(mkTuple8) - } - final class Apply9[A,B,C,D,E,F,G,H,I](t9: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G], Initialize[H], Initialize[I])) { - def apply[T](z: (A,B,C,D,E,F,G,H,I) => T) = Def.app[AList.T9K[A,B,C,D,E,F,G,H,I]#l, T]( t9 )( z.tupled )( AList.tuple9[A,B,C,D,E,F,G,H,I] ) - def identity = apply(mkTuple9) - } - final class Apply10[A,B,C,D,E,F,G,H,I,J](t10: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G], Initialize[H], Initialize[I], Initialize[J])) { - def apply[T](z: (A,B,C,D,E,F,G,H,I,J) => T) = Def.app[AList.T10K[A,B,C,D,E,F,G,H,I,J]#l, T]( t10 )( z.tupled )( AList.tuple10[A,B,C,D,E,F,G,H,I,J] ) - def identity = apply(mkTuple10) - } - final class Apply11[A,B,C,D,E,F,G,H,I,J,K](t11: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G], Initialize[H], Initialize[I], Initialize[J], Initialize[K])) { - def apply[T](z: (A,B,C,D,E,F,G,H,I,J,K) => T) = Def.app[AList.T11K[A,B,C,D,E,F,G,H,I,J,K]#l, T]( t11 )( z.tupled )( AList.tuple11[A,B,C,D,E,F,G,H,I,J,K] ) - def identity = apply(mkTuple11) - } -/* final class Apply12[A,B,C,D,E,F,G,H,I,J,K,L](t12: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G], Initialize[H], Initialize[I], Initialize[J], Initialize[K], Initialize[L])) { + final class Apply2[A, B](t2: (Initialize[A], Initialize[B])) { + def apply[T](z: (A, B) => T) = Def.app[AList.T2K[A, B]#l, T](t2)(z.tupled)(AList.tuple2[A, B]) + def identity = apply(mkTuple2) + } + final class Apply3[A, B, C](t3: (Initialize[A], Initialize[B], Initialize[C])) { + def apply[T](z: (A, B, C) => T) = Def.app[AList.T3K[A, B, C]#l, T](t3)(z.tupled)(AList.tuple3[A, B, C]) + def identity = apply(mkTuple3) + } + final class Apply4[A, B, C, D](t4: (Initialize[A], Initialize[B], Initialize[C], Initialize[D])) { + def apply[T](z: (A, B, C, D) => T) = Def.app[AList.T4K[A, B, C, D]#l, T](t4)(z.tupled)(AList.tuple4[A, B, C, D]) + def identity = apply(mkTuple4) + } + final class Apply5[A, B, C, D, E](t5: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E])) { + def apply[T](z: (A, B, C, D, E) => T) = Def.app[AList.T5K[A, B, C, D, E]#l, T](t5)(z.tupled)(AList.tuple5[A, B, C, D, E]) + def identity = apply(mkTuple5) + } + final class Apply6[A, B, C, D, E, F](t6: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F])) { + def apply[T](z: (A, B, C, D, E, F) => T) = Def.app[AList.T6K[A, B, C, D, E, F]#l, T](t6)(z.tupled)(AList.tuple6[A, B, C, D, E, F]) + def identity = apply(mkTuple6) + } + final class Apply7[A, B, C, D, E, F, G](t7: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G])) { + def apply[T](z: (A, B, C, D, E, F, G) => T) = Def.app[AList.T7K[A, B, C, D, E, F, G]#l, T](t7)(z.tupled)(AList.tuple7[A, B, C, D, E, F, G]) + def identity = apply(mkTuple7) + } + final class Apply8[A, B, C, D, E, F, G, H](t8: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G], Initialize[H])) { + def apply[T](z: (A, B, C, D, E, F, G, H) => T) = Def.app[AList.T8K[A, B, C, D, E, F, G, H]#l, T](t8)(z.tupled)(AList.tuple8[A, B, C, D, E, F, G, H]) + def identity = apply(mkTuple8) + } + final class Apply9[A, B, C, D, E, F, G, H, I](t9: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G], Initialize[H], Initialize[I])) { + def apply[T](z: (A, B, C, D, E, F, G, H, I) => T) = Def.app[AList.T9K[A, B, C, D, E, F, G, H, I]#l, T](t9)(z.tupled)(AList.tuple9[A, B, C, D, E, F, G, H, I]) + def identity = apply(mkTuple9) + } + final class Apply10[A, B, C, D, E, F, G, H, I, J](t10: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G], Initialize[H], Initialize[I], Initialize[J])) { + def apply[T](z: (A, B, C, D, E, F, G, H, I, J) => T) = Def.app[AList.T10K[A, B, C, D, E, F, G, H, I, J]#l, T](t10)(z.tupled)(AList.tuple10[A, B, C, D, E, F, G, H, I, J]) + def identity = apply(mkTuple10) + } + final class Apply11[A, B, C, D, E, F, G, H, I, J, K](t11: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G], Initialize[H], Initialize[I], Initialize[J], Initialize[K])) { + def apply[T](z: (A, B, C, D, E, F, G, H, I, J, K) => T) = Def.app[AList.T11K[A, B, C, D, E, F, G, H, I, J, K]#l, T](t11)(z.tupled)(AList.tuple11[A, B, C, D, E, F, G, H, I, J, K]) + def identity = apply(mkTuple11) + } + /* final class Apply12[A,B,C,D,E,F,G,H,I,J,K,L](t12: (Initialize[A], Initialize[B], Initialize[C], Initialize[D], Initialize[E], Initialize[F], Initialize[G], Initialize[H], Initialize[I], Initialize[J], Initialize[K], Initialize[L])) { def apply[T](z: (A,B,C,D,E,F,G,H,I,J,K,L) => T) = Def.app( k12(t12) )( hf12(z) ) def identity = apply(mkTuple12) } @@ -495,59 +478,56 @@ object Scoped def identity = apply(mkTuple15) } */ - private[sbt] def extendScoped(s1: Scoped, ss: Seq[Scoped]): Seq[AttributeKey[_]] = s1.key +: ss.map(_.key) + private[sbt] def extendScoped(s1: Scoped, ss: Seq[Scoped]): Seq[AttributeKey[_]] = s1.key +: ss.map(_.key) } - import Scoped.extendScoped +import Scoped.extendScoped /** Constructs InputKeys, which are associated with input tasks to define a setting.*/ -object InputKey -{ - def apply[T: Manifest](label: String, description: String = "", rank: Int = KeyRanks.DefaultInputRank): InputKey[T] = - apply( AttributeKey[InputTask[T]](label, description, rank) ) +object InputKey { + def apply[T: Manifest](label: String, description: String = "", rank: Int = KeyRanks.DefaultInputRank): InputKey[T] = + apply(AttributeKey[InputTask[T]](label, description, rank)) - def apply[T: Manifest](label: String, description: String, extend1: Scoped, extendN: Scoped*): InputKey[T] = - apply(label, description, KeyRanks.DefaultInputRank, extend1, extendN : _*) + def apply[T: Manifest](label: String, description: String, extend1: Scoped, extendN: Scoped*): InputKey[T] = + apply(label, description, KeyRanks.DefaultInputRank, extend1, extendN: _*) - def apply[T: Manifest](label: String, description: String, rank: Int, extend1: Scoped, extendN: Scoped*): InputKey[T] = - apply( AttributeKey[InputTask[T]](label, description, extendScoped(extend1, extendN), rank) ) + def apply[T: Manifest](label: String, description: String, rank: Int, extend1: Scoped, extendN: Scoped*): InputKey[T] = + apply(AttributeKey[InputTask[T]](label, description, extendScoped(extend1, extendN), rank)) - def apply[T](akey: AttributeKey[InputTask[T]]): InputKey[T] = - new InputKey[T] { val key = akey; def scope = Scope.ThisScope } + def apply[T](akey: AttributeKey[InputTask[T]]): InputKey[T] = + new InputKey[T] { val key = akey; def scope = Scope.ThisScope } } /** Constructs TaskKeys, which are associated with tasks to define a setting.*/ -object TaskKey -{ - def apply[T: Manifest](label: String, description: String = "", rank: Int = KeyRanks.DefaultTaskRank): TaskKey[T] = - apply( AttributeKey[Task[T]](label, description, rank) ) +object TaskKey { + def apply[T: Manifest](label: String, description: String = "", rank: Int = KeyRanks.DefaultTaskRank): TaskKey[T] = + apply(AttributeKey[Task[T]](label, description, rank)) - def apply[T: Manifest](label: String, description: String, extend1: Scoped, extendN: Scoped*): TaskKey[T] = - apply( AttributeKey[Task[T]](label, description, extendScoped(extend1, extendN)) ) + def apply[T: Manifest](label: String, description: String, extend1: Scoped, extendN: Scoped*): TaskKey[T] = + apply(AttributeKey[Task[T]](label, description, extendScoped(extend1, extendN))) - def apply[T: Manifest](label: String, description: String, rank: Int, extend1: Scoped, extendN: Scoped*): TaskKey[T] = - apply( AttributeKey[Task[T]](label, description, extendScoped(extend1, extendN), rank) ) + def apply[T: Manifest](label: String, description: String, rank: Int, extend1: Scoped, extendN: Scoped*): TaskKey[T] = + apply(AttributeKey[Task[T]](label, description, extendScoped(extend1, extendN), rank)) - def apply[T](akey: AttributeKey[Task[T]]): TaskKey[T] = - new TaskKey[T] { val key = akey; def scope = Scope.ThisScope } + def apply[T](akey: AttributeKey[Task[T]]): TaskKey[T] = + new TaskKey[T] { val key = akey; def scope = Scope.ThisScope } - def local[T: Manifest]: TaskKey[T] = apply[T](AttributeKey.local[Task[T]]) + def local[T: Manifest]: TaskKey[T] = apply[T](AttributeKey.local[Task[T]]) } /** Constructs SettingKeys, which are associated with a value to define a basic setting.*/ -object SettingKey -{ - def apply[T: Manifest](label: String, description: String = "", rank: Int = KeyRanks.DefaultSettingRank): SettingKey[T] = - apply( AttributeKey[T](label, description, rank) ) +object SettingKey { + def apply[T: Manifest](label: String, description: String = "", rank: Int = KeyRanks.DefaultSettingRank): SettingKey[T] = + apply(AttributeKey[T](label, description, rank)) - def apply[T: Manifest](label: String, description: String, extend1: Scoped, extendN: Scoped*): SettingKey[T] = - apply( AttributeKey[T](label, description, extendScoped(extend1, extendN)) ) + def apply[T: Manifest](label: String, description: String, extend1: Scoped, extendN: Scoped*): SettingKey[T] = + apply(AttributeKey[T](label, description, extendScoped(extend1, extendN))) - def apply[T: Manifest](label: String, description: String, rank: Int, extend1: Scoped, extendN: Scoped*): SettingKey[T] = - apply( AttributeKey[T](label, description, extendScoped(extend1, extendN), rank) ) + def apply[T: Manifest](label: String, description: String, rank: Int, extend1: Scoped, extendN: Scoped*): SettingKey[T] = + apply(AttributeKey[T](label, description, extendScoped(extend1, extendN), rank)) - def apply[T](akey: AttributeKey[T]): SettingKey[T] = - new SettingKey[T] { val key = akey; def scope = Scope.ThisScope } + def apply[T](akey: AttributeKey[T]): SettingKey[T] = + new SettingKey[T] { val key = akey; def scope = Scope.ThisScope } - def local[T: Manifest]: SettingKey[T] = apply[T](AttributeKey.local[T]) + def local[T: Manifest]: SettingKey[T] = apply[T](AttributeKey.local[T]) } diff --git a/main/settings/src/main/scala/sbt/std/InputConvert.scala b/main/settings/src/main/scala/sbt/std/InputConvert.scala index 2f25ee216..e0b2ab23e 100644 --- a/main/settings/src/main/scala/sbt/std/InputConvert.scala +++ b/main/settings/src/main/scala/sbt/std/InputConvert.scala @@ -1,85 +1,76 @@ package sbt package std - import language.experimental.macros - import scala.reflect._ - import reflect.macros._ +import language.experimental.macros +import scala.reflect._ +import reflect.macros._ - import Def.Initialize - import complete.Parser - import appmacro.{Convert, Converted} +import Def.Initialize +import complete.Parser +import appmacro.{ Convert, Converted } - -object InputInitConvert extends Convert -{ - def apply[T: c.WeakTypeTag](c: Context)(nme: String, in: c.Tree): Converted[c.type] = - if(nme == InputWrapper.WrapInitName) - Converted.Success(in) - else if(nme == InputWrapper.WrapInitTaskName) - Converted.Failure(in.pos, "Internal sbt error: initialize+task wrapper not split") - else - Converted.NotApplicable +object InputInitConvert extends Convert { + def apply[T: c.WeakTypeTag](c: Context)(nme: String, in: c.Tree): Converted[c.type] = + if (nme == InputWrapper.WrapInitName) + Converted.Success(in) + else if (nme == InputWrapper.WrapInitTaskName) + Converted.Failure(in.pos, "Internal sbt error: initialize+task wrapper not split") + else + Converted.NotApplicable } /** Converts an input `Tree` of type `Parser[T]` or `State => Parser[T]` into a `Tree` of type `State => Parser[T]`.*/ -object ParserConvert extends Convert -{ - def apply[T: c.WeakTypeTag](c: Context)(nme: String, in: c.Tree): Converted[c.type] = - { - if(nme == ParserInput.WrapName) - Converted.Success(in) - else if(nme == ParserInput.WrapInitName) - Converted.Failure(in.pos, "Internal sbt error: initialize+parser wrapper not split") - else - Converted.NotApplicable - } +object ParserConvert extends Convert { + def apply[T: c.WeakTypeTag](c: Context)(nme: String, in: c.Tree): Converted[c.type] = + { + if (nme == ParserInput.WrapName) + Converted.Success(in) + else if (nme == ParserInput.WrapInitName) + Converted.Failure(in.pos, "Internal sbt error: initialize+parser wrapper not split") + else + Converted.NotApplicable + } } /** Convert instance for plain `Task`s not within the settings system. */ -object TaskConvert extends Convert -{ - def apply[T: c.WeakTypeTag](c: Context)(nme: String, in: c.Tree): Converted[c.type] = - if(nme == InputWrapper.WrapTaskName) - Converted.Success(in) - else - Converted.NotApplicable +object TaskConvert extends Convert { + def apply[T: c.WeakTypeTag](c: Context)(nme: String, in: c.Tree): Converted[c.type] = + if (nme == InputWrapper.WrapTaskName) + Converted.Success(in) + else + Converted.NotApplicable } /** Converts an input `Tree` of type `Initialize[T]`, `Initialize[Task[T]]`, or `Task[T]` into a `Tree` of type `Initialize[Task[T]]`.*/ -object FullConvert extends Convert -{ - import InputWrapper._ - def apply[T: c.WeakTypeTag](c: Context)(nme: String, in: c.Tree): Converted[c.type] = - if(nme == WrapInitTaskName || nme == WrapPreviousName) - Converted.Success(in) - else if(nme == WrapInitName) - { - val i = c.Expr[Initialize[T]](in) - val t = c.universe.reify( Def.toITask(i.splice) ).tree - Converted.Success(t) - } - else if(nme == WrapTaskName) - { - val i = c.Expr[Task[T]](in) - val t = c.universe.reify( Def.valueStrict[Task[T]](i.splice) ).tree - Converted.Success(t) - } - else - Converted.NotApplicable +object FullConvert extends Convert { + import InputWrapper._ + def apply[T: c.WeakTypeTag](c: Context)(nme: String, in: c.Tree): Converted[c.type] = + if (nme == WrapInitTaskName || nme == WrapPreviousName) + Converted.Success(in) + else if (nme == WrapInitName) { + val i = c.Expr[Initialize[T]](in) + val t = c.universe.reify(Def.toITask(i.splice)).tree + Converted.Success(t) + } else if (nme == WrapTaskName) { + val i = c.Expr[Task[T]](in) + val t = c.universe.reify(Def.valueStrict[Task[T]](i.splice)).tree + Converted.Success(t) + } else + Converted.NotApplicable } -/** Converts an input `Tree` of type `State => Parser[T]` or `Initialize[State => Parser[T]]` -* into a `Tree` of type `Initialize[State => Parser[T]]`.*/ -object InitParserConvert extends Convert -{ - def apply[T: c.WeakTypeTag](c: Context)(nme: String, in: c.Tree): Converted[c.type] = - if(nme == ParserInput.WrapName) { - val e = c.Expr[State => Parser[T]](in) - val t = c.universe.reify { Def.valueStrict[State => Parser[T]](e.splice) } - Converted.Success(t.tree) - } - else if(nme == ParserInput.WrapInitName) - Converted.Success(in) - else - Converted.NotApplicable +/** + * Converts an input `Tree` of type `State => Parser[T]` or `Initialize[State => Parser[T]]` + * into a `Tree` of type `Initialize[State => Parser[T]]`. + */ +object InitParserConvert extends Convert { + def apply[T: c.WeakTypeTag](c: Context)(nme: String, in: c.Tree): Converted[c.type] = + if (nme == ParserInput.WrapName) { + val e = c.Expr[State => Parser[T]](in) + val t = c.universe.reify { Def.valueStrict[State => Parser[T]](e.splice) } + Converted.Success(t.tree) + } else if (nme == ParserInput.WrapInitName) + Converted.Success(in) + else + Converted.NotApplicable } diff --git a/main/settings/src/main/scala/sbt/std/InputWrapper.scala b/main/settings/src/main/scala/sbt/std/InputWrapper.scala index 8d8fe6182..8b9160458 100644 --- a/main/settings/src/main/scala/sbt/std/InputWrapper.scala +++ b/main/settings/src/main/scala/sbt/std/InputWrapper.scala @@ -1,219 +1,214 @@ package sbt package std - import language.experimental.macros - import scala.reflect._ - import reflect.macros._ - import reflect.internal.annotations.compileTimeOnly +import language.experimental.macros +import scala.reflect._ +import reflect.macros._ +import reflect.internal.annotations.compileTimeOnly - import Def.{Initialize, ScopedKey} - import appmacro.ContextUtil - import complete.Parser +import Def.{ Initialize, ScopedKey } +import appmacro.ContextUtil +import complete.Parser /** Implementation detail. The wrap methods temporarily hold inputs (as a Tree, at compile time) until a task or setting macro processes it. */ -object InputWrapper -{ - /* The names of the wrapper methods should be obscure. +object InputWrapper { + /* The names of the wrapper methods should be obscure. * Wrapper checking is based solely on this name, so it must not conflict with a user method name. * The user should never see this method because it is compile-time only and only used internally by the task macro system.*/ - private[std] final val WrapTaskName = "wrapTask_\u2603\u2603" - private[std] final val WrapInitName = "wrapInit_\u2603\u2603" - private[std] final val WrapInitTaskName = "wrapInitTask_\u2603\u2603" - private[std] final val WrapInitInputName = "wrapInitInputTask_\u2603\u2603" - private[std] final val WrapInputName = "wrapInputTask_\u2603\u2603" - private[std] final val WrapPreviousName = "wrapPrevious_\u2603\u2603" + private[std] final val WrapTaskName = "wrapTask_\u2603\u2603" + private[std] final val WrapInitName = "wrapInit_\u2603\u2603" + private[std] final val WrapInitTaskName = "wrapInitTask_\u2603\u2603" + private[std] final val WrapInitInputName = "wrapInitInputTask_\u2603\u2603" + private[std] final val WrapInputName = "wrapInputTask_\u2603\u2603" + private[std] final val WrapPreviousName = "wrapPrevious_\u2603\u2603" - @compileTimeOnly("`value` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task.") - def wrapTask_\u2603\u2603[T](in: Any): T = implDetailError + @compileTimeOnly("`value` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task.") + def wrapTask_\u2603\u2603[T](in: Any): T = implDetailError - @compileTimeOnly("`value` can only be used within a task or setting macro, such as :=, +=, ++=, Def.task, or Def.setting.") - def wrapInit_\u2603\u2603[T](in: Any): T = implDetailError + @compileTimeOnly("`value` can only be used within a task or setting macro, such as :=, +=, ++=, Def.task, or Def.setting.") + def wrapInit_\u2603\u2603[T](in: Any): T = implDetailError - @compileTimeOnly("`value` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task.") - def wrapInitTask_\u2603\u2603[T](in: Any): T = implDetailError + @compileTimeOnly("`value` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task.") + def wrapInitTask_\u2603\u2603[T](in: Any): T = implDetailError - @compileTimeOnly("`value` can only be called on an input task within a task definition macro, such as := or Def.inputTask.") - def wrapInputTask_\u2603\u2603[T](in: Any): T = implDetailError + @compileTimeOnly("`value` can only be called on an input task within a task definition macro, such as := or Def.inputTask.") + def wrapInputTask_\u2603\u2603[T](in: Any): T = implDetailError - @compileTimeOnly("`value` can only be called on an input task within a task definition macro, such as := or Def.inputTask.") - def wrapInitInputTask_\u2603\u2603[T](in: Any): T = implDetailError + @compileTimeOnly("`value` can only be called on an input task within a task definition macro, such as := or Def.inputTask.") + def wrapInitInputTask_\u2603\u2603[T](in: Any): T = implDetailError - @compileTimeOnly("`previous` can only be called on a task within a task or input task definition macro, such as :=, +=, ++=, Def.task, or Def.inputTask.") - def wrapPrevious_\u2603\u2603[T](in: Any): T = implDetailError + @compileTimeOnly("`previous` can only be called on a task within a task or input task definition macro, such as :=, +=, ++=, Def.task, or Def.inputTask.") + def wrapPrevious_\u2603\u2603[T](in: Any): T = implDetailError - private[this] def implDetailError = error("This method is an implementation detail and should not be referenced.") + private[this] def implDetailError = error("This method is an implementation detail and should not be referenced.") - private[std] def wrapTask[T: c.WeakTypeTag](c: Context)(ts: c.Expr[Any], pos: c.Position): c.Expr[T] = - wrapImpl[T,InputWrapper.type](c, InputWrapper, WrapTaskName)(ts, pos) - private[std] def wrapInit[T: c.WeakTypeTag](c: Context)(ts: c.Expr[Any], pos: c.Position): c.Expr[T] = - wrapImpl[T,InputWrapper.type](c, InputWrapper, WrapInitName)(ts, pos) - private[std] def wrapInitTask[T: c.WeakTypeTag](c: Context)(ts: c.Expr[Any], pos: c.Position): c.Expr[T] = - wrapImpl[T,InputWrapper.type](c, InputWrapper, WrapInitTaskName)(ts, pos) + private[std] def wrapTask[T: c.WeakTypeTag](c: Context)(ts: c.Expr[Any], pos: c.Position): c.Expr[T] = + wrapImpl[T, InputWrapper.type](c, InputWrapper, WrapTaskName)(ts, pos) + private[std] def wrapInit[T: c.WeakTypeTag](c: Context)(ts: c.Expr[Any], pos: c.Position): c.Expr[T] = + wrapImpl[T, InputWrapper.type](c, InputWrapper, WrapInitName)(ts, pos) + private[std] def wrapInitTask[T: c.WeakTypeTag](c: Context)(ts: c.Expr[Any], pos: c.Position): c.Expr[T] = + wrapImpl[T, InputWrapper.type](c, InputWrapper, WrapInitTaskName)(ts, pos) - private[std] def wrapInitInputTask[T: c.WeakTypeTag](c: Context)(ts: c.Expr[Any], pos: c.Position): c.Expr[T] = - wrapImpl[T,InputWrapper.type](c, InputWrapper, WrapInitInputName)(ts, pos) - private[std] def wrapInputTask[T: c.WeakTypeTag](c: Context)(ts: c.Expr[Any], pos: c.Position): c.Expr[T] = - wrapImpl[T,InputWrapper.type](c, InputWrapper, WrapInputName)(ts, pos) + private[std] def wrapInitInputTask[T: c.WeakTypeTag](c: Context)(ts: c.Expr[Any], pos: c.Position): c.Expr[T] = + wrapImpl[T, InputWrapper.type](c, InputWrapper, WrapInitInputName)(ts, pos) + private[std] def wrapInputTask[T: c.WeakTypeTag](c: Context)(ts: c.Expr[Any], pos: c.Position): c.Expr[T] = + wrapImpl[T, InputWrapper.type](c, InputWrapper, WrapInputName)(ts, pos) - private[std] def wrapPrevious[T: c.WeakTypeTag](c: Context)(ts: c.Expr[Any], pos: c.Position): c.Expr[Option[T]] = - wrapImpl[Option[T],InputWrapper.type](c, InputWrapper, WrapPreviousName)(ts, pos) + private[std] def wrapPrevious[T: c.WeakTypeTag](c: Context)(ts: c.Expr[Any], pos: c.Position): c.Expr[Option[T]] = + wrapImpl[Option[T], InputWrapper.type](c, InputWrapper, WrapPreviousName)(ts, pos) - // TODO 2.11 Remove this after dropping 2.10.x support. - private object HasCompat { val compat = ??? }; import HasCompat._ + // TODO 2.11 Remove this after dropping 2.10.x support. + private object HasCompat { val compat = ??? }; import HasCompat._ - /** Wraps an arbitrary Tree in a call to the `.` method of this module for later processing by an enclosing macro. - * The resulting Tree is the manually constructed version of: - * - * `c.universe.reify { .[T](ts.splice) }` - */ - def wrapImpl[T: c.WeakTypeTag, S <: AnyRef with Singleton](c: Context, s: S, wrapName: String)(ts: c.Expr[Any], pos: c.Position)(implicit it: c.TypeTag[s.type]): c.Expr[T] = - { - import c.universe.{Apply=>ApplyTree,_} - import compat._ - val util = new ContextUtil[c.type](c) - val iw = util.singleton(s) - val tpe = c.weakTypeOf[T] - val nme = newTermName(wrapName).encoded - val sel = Select(Ident(iw), nme) - sel.setPos(pos) // need to set the position on Select, because that is where the compileTimeOnly check looks - val tree = ApplyTree(TypeApply(sel, TypeTree(tpe) :: Nil), ts.tree :: Nil) - tree.setPos(ts.tree.pos) - // JZ: I'm not sure why we need to do this. Presumably a caller is wrapping this tree in a - // typed tree *before* handing the whole thing back to the macro engine. One must never splice - // untyped trees under typed trees, as the type checker doesn't descend if `tree.tpe == null`. - // - // #1031 The previous attempt to fix this just set the type on `tree`, which worked in cases when the - // call to `.value` was inside a the task macro and eliminated before the end of the typer phase. - // But, if a "naked" call to `.value` left the typer, the superaccessors phase would freak out when - // if hit the untyped trees, before we could get to refchecks and the desired @compileTimeOnly warning. - val typedTree = c.typeCheck(tree) - c.Expr[T](typedTree) - } + /** + * Wraps an arbitrary Tree in a call to the `.` method of this module for later processing by an enclosing macro. + * The resulting Tree is the manually constructed version of: + * + * `c.universe.reify { .[T](ts.splice) }` + */ + def wrapImpl[T: c.WeakTypeTag, S <: AnyRef with Singleton](c: Context, s: S, wrapName: String)(ts: c.Expr[Any], pos: c.Position)(implicit it: c.TypeTag[s.type]): c.Expr[T] = + { + import c.universe.{ Apply => ApplyTree, _ } + import compat._ + val util = new ContextUtil[c.type](c) + val iw = util.singleton(s) + val tpe = c.weakTypeOf[T] + val nme = newTermName(wrapName).encoded + val sel = Select(Ident(iw), nme) + sel.setPos(pos) // need to set the position on Select, because that is where the compileTimeOnly check looks + val tree = ApplyTree(TypeApply(sel, TypeTree(tpe) :: Nil), ts.tree :: Nil) + tree.setPos(ts.tree.pos) + // JZ: I'm not sure why we need to do this. Presumably a caller is wrapping this tree in a + // typed tree *before* handing the whole thing back to the macro engine. One must never splice + // untyped trees under typed trees, as the type checker doesn't descend if `tree.tpe == null`. + // + // #1031 The previous attempt to fix this just set the type on `tree`, which worked in cases when the + // call to `.value` was inside a the task macro and eliminated before the end of the typer phase. + // But, if a "naked" call to `.value` left the typer, the superaccessors phase would freak out when + // if hit the untyped trees, before we could get to refchecks and the desired @compileTimeOnly warning. + val typedTree = c.typeCheck(tree) + c.Expr[T](typedTree) + } - def valueMacroImpl[T: c.WeakTypeTag](c: Context): c.Expr[T] = - ContextUtil.selectMacroImpl[T](c) { (ts,pos) => - val tpe = ts.tree.tpe - if(tpe <:< c.weakTypeOf[Initialize[Task[T]]]) - InputWrapper.wrapInitTask[T](c)(ts,pos) - else if(tpe <:< c.weakTypeOf[Initialize[T]]) - InputWrapper.wrapInit[T](c)(ts,pos) - else if(tpe <:< c.weakTypeOf[Task[T]]) - InputWrapper.wrapTask[T](c)(ts,pos) - else if(tpe <:< c.weakTypeOf[InputTask[T]]) - InputWrapper.wrapInputTask[T](c)(ts,pos) - else if(tpe <:< c.weakTypeOf[Initialize[InputTask[T]]]) - InputWrapper.wrapInitInputTask[T](c)(ts,pos) - else - c.abort(pos, s"Internal sbt error. Unexpected type ${tpe.widen}") - } - def taskValueMacroImpl[T: c.WeakTypeTag](c: Context): c.Expr[Task[T]] = - ContextUtil.selectMacroImpl[Task[T]](c) { (ts, pos) => - val tpe = ts.tree.tpe - if(tpe <:< c.weakTypeOf[Initialize[Task[T]]]) - InputWrapper.wrapInit[Task[T]](c)(ts,pos) - else - c.abort(pos, s"Internal sbt error. Unexpected type ${tpe.widen}") - } - /** Translates .previous(format) to Previous.runtime()(format).value*/ - def previousMacroImpl[T: c.WeakTypeTag](c: Context)(format: c.Expr[sbinary.Format[T]]): c.Expr[Option[T]] = - { - import c.universe._ - c.macroApplication match { - case a @ Apply(Select(Apply(_, t :: Nil), tp), fmt) => - if(t.tpe <:< c.weakTypeOf[TaskKey[T]]) { - val tsTyped = c.Expr[TaskKey[T]](t) - val newTree = c.universe.reify { Previous.runtime[T](tsTyped.splice)(format.splice) } - wrapPrevious[T](c)(newTree, a.pos) - } - else - c.abort(a.pos, s"Internal sbt error. Unexpected type ${t.tpe.widen}") - case x => ContextUtil.unexpectedTree(x) - } - } + def valueMacroImpl[T: c.WeakTypeTag](c: Context): c.Expr[T] = + ContextUtil.selectMacroImpl[T](c) { (ts, pos) => + val tpe = ts.tree.tpe + if (tpe <:< c.weakTypeOf[Initialize[Task[T]]]) + InputWrapper.wrapInitTask[T](c)(ts, pos) + else if (tpe <:< c.weakTypeOf[Initialize[T]]) + InputWrapper.wrapInit[T](c)(ts, pos) + else if (tpe <:< c.weakTypeOf[Task[T]]) + InputWrapper.wrapTask[T](c)(ts, pos) + else if (tpe <:< c.weakTypeOf[InputTask[T]]) + InputWrapper.wrapInputTask[T](c)(ts, pos) + else if (tpe <:< c.weakTypeOf[Initialize[InputTask[T]]]) + InputWrapper.wrapInitInputTask[T](c)(ts, pos) + else + c.abort(pos, s"Internal sbt error. Unexpected type ${tpe.widen}") + } + def taskValueMacroImpl[T: c.WeakTypeTag](c: Context): c.Expr[Task[T]] = + ContextUtil.selectMacroImpl[Task[T]](c) { (ts, pos) => + val tpe = ts.tree.tpe + if (tpe <:< c.weakTypeOf[Initialize[Task[T]]]) + InputWrapper.wrapInit[Task[T]](c)(ts, pos) + else + c.abort(pos, s"Internal sbt error. Unexpected type ${tpe.widen}") + } + /** Translates .previous(format) to Previous.runtime()(format).value*/ + def previousMacroImpl[T: c.WeakTypeTag](c: Context)(format: c.Expr[sbinary.Format[T]]): c.Expr[Option[T]] = + { + import c.universe._ + c.macroApplication match { + case a @ Apply(Select(Apply(_, t :: Nil), tp), fmt) => + if (t.tpe <:< c.weakTypeOf[TaskKey[T]]) { + val tsTyped = c.Expr[TaskKey[T]](t) + val newTree = c.universe.reify { Previous.runtime[T](tsTyped.splice)(format.splice) } + wrapPrevious[T](c)(newTree, a.pos) + } else + c.abort(a.pos, s"Internal sbt error. Unexpected type ${t.tpe.widen}") + case x => ContextUtil.unexpectedTree(x) + } + } } sealed abstract class MacroTaskValue[T] { - @compileTimeOnly("`taskValue` can only be used within a setting macro, such as :=, +=, ++=, or Def.setting.") - def taskValue: Task[T] = macro InputWrapper.taskValueMacroImpl[T] + @compileTimeOnly("`taskValue` can only be used within a setting macro, such as :=, +=, ++=, or Def.setting.") + def taskValue: Task[T] = macro InputWrapper.taskValueMacroImpl[T] } sealed abstract class MacroValue[T] { - @compileTimeOnly("`value` can only be used within a task or setting macro, such as :=, +=, ++=, Def.task, or Def.setting.") - def value: T = macro InputWrapper.valueMacroImpl[T] + @compileTimeOnly("`value` can only be used within a task or setting macro, such as :=, +=, ++=, Def.task, or Def.setting.") + def value: T = macro InputWrapper.valueMacroImpl[T] } sealed abstract class ParserInput[T] { - @compileTimeOnly("`parsed` can only be used within an input task macro, such as := or Def.inputTask.") - def parsed: T = macro ParserInput.parsedMacroImpl[T] + @compileTimeOnly("`parsed` can only be used within an input task macro, such as := or Def.inputTask.") + def parsed: T = macro ParserInput.parsedMacroImpl[T] } sealed abstract class InputEvaluated[T] { - @compileTimeOnly("`evaluated` can only be used within an input task macro, such as := or Def.inputTask.") - def evaluated: T = macro InputWrapper.valueMacroImpl[T] + @compileTimeOnly("`evaluated` can only be used within an input task macro, such as := or Def.inputTask.") + def evaluated: T = macro InputWrapper.valueMacroImpl[T] } sealed abstract class ParserInputTask[T] { - @compileTimeOnly("`parsed` can only be used within an input task macro, such as := or Def.inputTask.") - def parsed: Task[T] = macro ParserInput.parsedInputMacroImpl[T] + @compileTimeOnly("`parsed` can only be used within an input task macro, such as := or Def.inputTask.") + def parsed: Task[T] = macro ParserInput.parsedInputMacroImpl[T] } sealed abstract class MacroPrevious[T] { - @compileTimeOnly("`previous` can only be used within a task macro, such as :=, +=, ++=, or Def.task.") - def previous(implicit format: sbinary.Format[T]): Option[T] = macro InputWrapper.previousMacroImpl[T] + @compileTimeOnly("`previous` can only be used within a task macro, such as :=, +=, ++=, or Def.task.") + def previous(implicit format: sbinary.Format[T]): Option[T] = macro InputWrapper.previousMacroImpl[T] } /** Implementation detail. The wrap method temporarily holds the input parser (as a Tree, at compile time) until the input task macro processes it. */ object ParserInput { - /* The name of the wrapper method should be obscure. + /* The name of the wrapper method should be obscure. * Wrapper checking is based solely on this name, so it must not conflict with a user method name. * The user should never see this method because it is compile-time only and only used internally by the task macros.*/ - private[std] val WrapName = "parser_\u2603\u2603" - private[std] val WrapInitName = "initParser_\u2603\u2603" + private[std] val WrapName = "parser_\u2603\u2603" + private[std] val WrapInitName = "initParser_\u2603\u2603" - @compileTimeOnly("`parsed` can only be used within an input task macro, such as := or Def.inputTask.") - def parser_\u2603\u2603[T](i: Any): T = error("This method is an implementation detail and should not be referenced.") + @compileTimeOnly("`parsed` can only be used within an input task macro, such as := or Def.inputTask.") + def parser_\u2603\u2603[T](i: Any): T = error("This method is an implementation detail and should not be referenced.") - @compileTimeOnly("`parsed` can only be used within an input task macro, such as := or Def.inputTask.") - def initParser_\u2603\u2603[T](i: Any): T = error("This method is an implementation detail and should not be referenced.") + @compileTimeOnly("`parsed` can only be used within an input task macro, such as := or Def.inputTask.") + def initParser_\u2603\u2603[T](i: Any): T = error("This method is an implementation detail and should not be referenced.") - private[std] def wrap[T: c.WeakTypeTag](c: Context)(ts: c.Expr[Any], pos: c.Position): c.Expr[T] = - InputWrapper.wrapImpl[T,ParserInput.type](c, ParserInput, WrapName)(ts, pos) - private[std] def wrapInit[T: c.WeakTypeTag](c: Context)(ts: c.Expr[Any], pos: c.Position): c.Expr[T] = - InputWrapper.wrapImpl[T,ParserInput.type](c, ParserInput, WrapInitName)(ts, pos) + private[std] def wrap[T: c.WeakTypeTag](c: Context)(ts: c.Expr[Any], pos: c.Position): c.Expr[T] = + InputWrapper.wrapImpl[T, ParserInput.type](c, ParserInput, WrapName)(ts, pos) + private[std] def wrapInit[T: c.WeakTypeTag](c: Context)(ts: c.Expr[Any], pos: c.Position): c.Expr[T] = + InputWrapper.wrapImpl[T, ParserInput.type](c, ParserInput, WrapInitName)(ts, pos) - private[std] def inputParser[T: c.WeakTypeTag](c: Context)(t: c.Expr[InputTask[T]]): c.Expr[State => Parser[Task[T]]] = c.universe.reify(t.splice.parser) + private[std] def inputParser[T: c.WeakTypeTag](c: Context)(t: c.Expr[InputTask[T]]): c.Expr[State => Parser[Task[T]]] = c.universe.reify(t.splice.parser) - def parsedInputMacroImpl[T: c.WeakTypeTag](c: Context): c.Expr[Task[T]] = - ContextUtil.selectMacroImpl[Task[T]](c) { (p,pos) => - import c.universe.reify - val tpe = p.tree.tpe - if(tpe <:< c.weakTypeOf[InputTask[T]]) { - val e = c.Expr[InputTask[T]](p.tree) - wrap[Task[T]](c)( inputParser(c)(e), pos ) - } - else if(tpe <:< c.weakTypeOf[Initialize[InputTask[T]]]) { - val e = c.Expr[Initialize[InputTask[T]]](p.tree) - wrapInit[Task[T]](c)( reify { Def.toIParser(e.splice) }, pos ) - } - else - c.abort(pos, s"Internal sbt error. Unexpected type ${tpe.normalize} in parsedInputMacroImpl.") - } + def parsedInputMacroImpl[T: c.WeakTypeTag](c: Context): c.Expr[Task[T]] = + ContextUtil.selectMacroImpl[Task[T]](c) { (p, pos) => + import c.universe.reify + val tpe = p.tree.tpe + if (tpe <:< c.weakTypeOf[InputTask[T]]) { + val e = c.Expr[InputTask[T]](p.tree) + wrap[Task[T]](c)(inputParser(c)(e), pos) + } else if (tpe <:< c.weakTypeOf[Initialize[InputTask[T]]]) { + val e = c.Expr[Initialize[InputTask[T]]](p.tree) + wrapInit[Task[T]](c)(reify { Def.toIParser(e.splice) }, pos) + } else + c.abort(pos, s"Internal sbt error. Unexpected type ${tpe.normalize} in parsedInputMacroImpl.") + } - /** Implements `Parser[T].parsed` by wrapping the Parser with the ParserInput wrapper.*/ - def parsedMacroImpl[T: c.WeakTypeTag](c: Context): c.Expr[T] = - ContextUtil.selectMacroImpl[T](c) { (p,pos) => - import c.universe.reify - val tpe = p.tree.tpe - if(tpe <:< c.weakTypeOf[Parser[T]]) { - val e = c.Expr[Parser[T]](p.tree) - wrap[T](c)( reify { Def.toSParser(e.splice) }, pos) - } - else if(tpe <:< c.weakTypeOf[State => Parser[T]]) - wrap[T](c)( p, pos) - else if(tpe <:< c.weakTypeOf[Initialize[Parser[T]]]) { - val e = c.Expr[Initialize[Parser[T]]](p.tree) - val es = reify { Def.toISParser(e.splice) } - wrapInit[T](c)(es, pos) - } - else if(tpe <:< c.weakTypeOf[Initialize[State => Parser[T]]]) - wrapInit[T](c)(p,pos) - else - c.abort(pos, s"Internal sbt error. Unexpected type ${tpe.normalize} in parsedMacroImpl") - } + /** Implements `Parser[T].parsed` by wrapping the Parser with the ParserInput wrapper.*/ + def parsedMacroImpl[T: c.WeakTypeTag](c: Context): c.Expr[T] = + ContextUtil.selectMacroImpl[T](c) { (p, pos) => + import c.universe.reify + val tpe = p.tree.tpe + if (tpe <:< c.weakTypeOf[Parser[T]]) { + val e = c.Expr[Parser[T]](p.tree) + wrap[T](c)(reify { Def.toSParser(e.splice) }, pos) + } else if (tpe <:< c.weakTypeOf[State => Parser[T]]) + wrap[T](c)(p, pos) + else if (tpe <:< c.weakTypeOf[Initialize[Parser[T]]]) { + val e = c.Expr[Initialize[Parser[T]]](p.tree) + val es = reify { Def.toISParser(e.splice) } + wrapInit[T](c)(es, pos) + } else if (tpe <:< c.weakTypeOf[Initialize[State => Parser[T]]]) + wrapInit[T](c)(p, pos) + else + c.abort(pos, s"Internal sbt error. Unexpected type ${tpe.normalize} in parsedMacroImpl") + } } diff --git a/main/settings/src/main/scala/sbt/std/KeyMacro.scala b/main/settings/src/main/scala/sbt/std/KeyMacro.scala index c31169b07..6cebe7d5c 100644 --- a/main/settings/src/main/scala/sbt/std/KeyMacro.scala +++ b/main/settings/src/main/scala/sbt/std/KeyMacro.scala @@ -1,52 +1,51 @@ package sbt package std - import language.experimental.macros - import scala.reflect._ - import reflect.macros._ +import language.experimental.macros +import scala.reflect._ +import reflect.macros._ -private[sbt] object KeyMacro -{ - def settingKeyImpl[T: c.WeakTypeTag](c: Context)(description: c.Expr[String]): c.Expr[SettingKey[T]] = - keyImpl[T, SettingKey[T]](c) { (name, mf) => - c.universe.reify { SettingKey[T](name.splice, description.splice)(mf.splice) } - } - def taskKeyImpl[T: c.WeakTypeTag](c: Context)(description: c.Expr[String]): c.Expr[TaskKey[T]] = - keyImpl[T, TaskKey[T]](c) { (name, mf) => - c.universe.reify { TaskKey[T](name.splice, description.splice)(mf.splice) } - } - def inputKeyImpl[T: c.WeakTypeTag](c: Context)(description: c.Expr[String]): c.Expr[InputKey[T]] = - keyImpl[T, InputKey[T]](c) { (name, mf) => - c.universe.reify { InputKey[T](name.splice, description.splice)(mf.splice) } - } +private[sbt] object KeyMacro { + def settingKeyImpl[T: c.WeakTypeTag](c: Context)(description: c.Expr[String]): c.Expr[SettingKey[T]] = + keyImpl[T, SettingKey[T]](c) { (name, mf) => + c.universe.reify { SettingKey[T](name.splice, description.splice)(mf.splice) } + } + def taskKeyImpl[T: c.WeakTypeTag](c: Context)(description: c.Expr[String]): c.Expr[TaskKey[T]] = + keyImpl[T, TaskKey[T]](c) { (name, mf) => + c.universe.reify { TaskKey[T](name.splice, description.splice)(mf.splice) } + } + def inputKeyImpl[T: c.WeakTypeTag](c: Context)(description: c.Expr[String]): c.Expr[InputKey[T]] = + keyImpl[T, InputKey[T]](c) { (name, mf) => + c.universe.reify { InputKey[T](name.splice, description.splice)(mf.splice) } + } - def keyImpl[T: c.WeakTypeTag, S: c.WeakTypeTag](c: Context)(f: (c.Expr[String], c.Expr[Manifest[T]]) => c.Expr[S]): c.Expr[S] = - { - import c.universe.{Apply=>ApplyTree,_} - val enclosingValName = definingValName(c, methodName => s"""$methodName must be directly assigned to a val, such as `val x = $methodName[Int]("description")`.""") - val name = c.Expr[String]( Literal(Constant(enclosingValName)) ) - val mf = c.Expr[Manifest[T]](c.inferImplicitValue( weakTypeOf[Manifest[T]] ) ) - f(name, mf) - } - def definingValName(c: Context, invalidEnclosingTree: String => String): String = - { - import c.universe.{Apply=>ApplyTree,_} - val methodName = c.macroApplication.symbol.name - def processName(n: Name): String = n.decoded.trim // trim is not strictly correct, but macros don't expose the API necessary - def enclosingVal(trees: List[c.Tree]): String = - { - trees match { - case vd @ ValDef(_, name, _, _) :: ts => processName(name) - case (_: ApplyTree | _: Select | _: TypeApply) :: xs => enclosingVal(xs) - // lazy val x: X = has this form for some reason (only when the explicit type is present, though) - case Block(_, _) :: DefDef(mods, name, _, _, _, _) :: xs if mods.hasFlag(Flag.LAZY) => processName(name) - case _ => - c.error(c.enclosingPosition, invalidEnclosingTree(methodName.decoded)) - "" - } - } - enclosingVal(enclosingTrees(c).toList) - } - def enclosingTrees(c: Context): Seq[c.Tree] = - c.asInstanceOf[reflect.macros.runtime.Context].callsiteTyper.context.enclosingContextChain.map(_.tree.asInstanceOf[c.Tree]) + def keyImpl[T: c.WeakTypeTag, S: c.WeakTypeTag](c: Context)(f: (c.Expr[String], c.Expr[Manifest[T]]) => c.Expr[S]): c.Expr[S] = + { + import c.universe.{ Apply => ApplyTree, _ } + val enclosingValName = definingValName(c, methodName => s"""$methodName must be directly assigned to a val, such as `val x = $methodName[Int]("description")`.""") + val name = c.Expr[String](Literal(Constant(enclosingValName))) + val mf = c.Expr[Manifest[T]](c.inferImplicitValue(weakTypeOf[Manifest[T]])) + f(name, mf) + } + def definingValName(c: Context, invalidEnclosingTree: String => String): String = + { + import c.universe.{ Apply => ApplyTree, _ } + val methodName = c.macroApplication.symbol.name + def processName(n: Name): String = n.decoded.trim // trim is not strictly correct, but macros don't expose the API necessary + def enclosingVal(trees: List[c.Tree]): String = + { + trees match { + case vd @ ValDef(_, name, _, _) :: ts => processName(name) + case (_: ApplyTree | _: Select | _: TypeApply) :: xs => enclosingVal(xs) + // lazy val x: X = has this form for some reason (only when the explicit type is present, though) + case Block(_, _) :: DefDef(mods, name, _, _, _, _) :: xs if mods.hasFlag(Flag.LAZY) => processName(name) + case _ => + c.error(c.enclosingPosition, invalidEnclosingTree(methodName.decoded)) + "" + } + } + enclosingVal(enclosingTrees(c).toList) + } + def enclosingTrees(c: Context): Seq[c.Tree] = + c.asInstanceOf[reflect.macros.runtime.Context].callsiteTyper.context.enclosingContextChain.map(_.tree.asInstanceOf[c.Tree]) } \ No newline at end of file diff --git a/main/settings/src/main/scala/sbt/std/SettingMacro.scala b/main/settings/src/main/scala/sbt/std/SettingMacro.scala index 813cea7a7..d720adbf1 100644 --- a/main/settings/src/main/scala/sbt/std/SettingMacro.scala +++ b/main/settings/src/main/scala/sbt/std/SettingMacro.scala @@ -1,45 +1,40 @@ package sbt package std - import Def.{Initialize,Setting} - import Types.{idFun,Id} - import appmacro.{Convert, Converted, Instance, MixedBuilder, MonadInstance} +import Def.{ Initialize, Setting } +import Types.{ idFun, Id } +import appmacro.{ Convert, Converted, Instance, MixedBuilder, MonadInstance } -object InitializeInstance extends MonadInstance -{ - type M[x] = Initialize[x] - def app[K[L[x]], Z](in: K[Initialize], f: K[Id] => Z)(implicit a: AList[K]): Initialize[Z] = Def.app[K,Z](in)(f)(a) - def map[S,T](in: Initialize[S], f: S => T): Initialize[T] = Def.map(in)(f) - def flatten[T](in: Initialize[Initialize[T]]): Initialize[T] = Def.bind(in)(idFun[Initialize[T]]) - def pure[T](t: () => T): Initialize[T] = Def.pure(t) +object InitializeInstance extends MonadInstance { + type M[x] = Initialize[x] + def app[K[L[x]], Z](in: K[Initialize], f: K[Id] => Z)(implicit a: AList[K]): Initialize[Z] = Def.app[K, Z](in)(f)(a) + def map[S, T](in: Initialize[S], f: S => T): Initialize[T] = Def.map(in)(f) + def flatten[T](in: Initialize[Initialize[T]]): Initialize[T] = Def.bind(in)(idFun[Initialize[T]]) + def pure[T](t: () => T): Initialize[T] = Def.pure(t) } - import language.experimental.macros - import scala.reflect._ - import reflect.macros._ +import language.experimental.macros +import scala.reflect._ +import reflect.macros._ -object InitializeConvert extends Convert -{ - def apply[T: c.WeakTypeTag](c: Context)(nme: String, in: c.Tree): Converted[c.type] = - if(nme == InputWrapper.WrapInitName) - { - val i = c.Expr[Initialize[T]](in) - val t = c.universe.reify( i.splice ).tree - Converted.Success(t) - } - else if(nme == InputWrapper.WrapTaskName || nme == InputWrapper.WrapInitTaskName) - Converted.Failure(in.pos, "A setting cannot depend on a task") - else if(nme == InputWrapper.WrapPreviousName) - Converted.Failure(in.pos, "A setting cannot depend on a task's previous value.") - else - Converted.NotApplicable +object InitializeConvert extends Convert { + def apply[T: c.WeakTypeTag](c: Context)(nme: String, in: c.Tree): Converted[c.type] = + if (nme == InputWrapper.WrapInitName) { + val i = c.Expr[Initialize[T]](in) + val t = c.universe.reify(i.splice).tree + Converted.Success(t) + } else if (nme == InputWrapper.WrapTaskName || nme == InputWrapper.WrapInitTaskName) + Converted.Failure(in.pos, "A setting cannot depend on a task") + else if (nme == InputWrapper.WrapPreviousName) + Converted.Failure(in.pos, "A setting cannot depend on a task's previous value.") + else + Converted.NotApplicable } -object SettingMacro -{ - def settingMacroImpl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[Initialize[T]] = - Instance.contImpl[T, Id](c, InitializeInstance, InitializeConvert, MixedBuilder)(Left(t), Instance.idTransform[c.type]) +object SettingMacro { + def settingMacroImpl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[Initialize[T]] = + Instance.contImpl[T, Id](c, InitializeInstance, InitializeConvert, MixedBuilder)(Left(t), Instance.idTransform[c.type]) - def settingDynMacroImpl[T: c.WeakTypeTag](c: Context)(t: c.Expr[Initialize[T]]): c.Expr[Initialize[T]] = - Instance.contImpl[T, Id](c, InitializeInstance, InitializeConvert, MixedBuilder)(Right(t), Instance.idTransform[c.type]) + def settingDynMacroImpl[T: c.WeakTypeTag](c: Context)(t: c.Expr[Initialize[T]]): c.Expr[Initialize[T]] = + Instance.contImpl[T, Id](c, InitializeInstance, InitializeConvert, MixedBuilder)(Right(t), Instance.idTransform[c.type]) } diff --git a/main/settings/src/main/scala/sbt/std/TaskMacro.scala b/main/settings/src/main/scala/sbt/std/TaskMacro.scala index a15f8f5d6..51f6ed028 100644 --- a/main/settings/src/main/scala/sbt/std/TaskMacro.scala +++ b/main/settings/src/main/scala/sbt/std/TaskMacro.scala @@ -1,379 +1,369 @@ package sbt package std - import Def.{Initialize,Setting} - import Types.{const, idFun,Id} - import TaskExtra.allM - import appmacro.{ContextUtil, Convert, Converted, Instance, MixedBuilder, MonadInstance} - import Instance.Transform - import complete.{DefaultParsers,Parser} +import Def.{ Initialize, Setting } +import Types.{ const, idFun, Id } +import TaskExtra.allM +import appmacro.{ ContextUtil, Convert, Converted, Instance, MixedBuilder, MonadInstance } +import Instance.Transform +import complete.{ DefaultParsers, Parser } - import language.experimental.macros - import scala.reflect._ - import reflect.macros._ - import reflect.internal.annotations.compileTimeOnly +import language.experimental.macros +import scala.reflect._ +import reflect.macros._ +import reflect.internal.annotations.compileTimeOnly /** Instance for the monad/applicative functor for plain Tasks. */ -object TaskInstance extends MonadInstance -{ - import TaskExtra._ +object TaskInstance extends MonadInstance { + import TaskExtra._ - final type M[x] = Task[x] - def app[K[L[x]], Z](in: K[Task], f: K[Id] => Z)(implicit a: AList[K]): Task[Z] = Task(Info(), new Mapped[Z,K](in, f compose allM, a)) - def map[S,T](in: Task[S], f: S => T): Task[T] = in map f - def flatten[T](in: Task[Task[T]]): Task[T] = in flatMap idFun[Task[T]] - def pure[T](t: () => T): Task[T] = toTask(t) + final type M[x] = Task[x] + def app[K[L[x]], Z](in: K[Task], f: K[Id] => Z)(implicit a: AList[K]): Task[Z] = Task(Info(), new Mapped[Z, K](in, f compose allM, a)) + def map[S, T](in: Task[S], f: S => T): Task[T] = in map f + def flatten[T](in: Task[Task[T]]): Task[T] = in flatMap idFun[Task[T]] + def pure[T](t: () => T): Task[T] = toTask(t) } -object ParserInstance extends Instance -{ - import sbt.Classes.Applicative - private[this] implicit val parserApplicative: Applicative[M] = new Applicative[M]{ - def apply[S,T](f: M[S => T], v: M[S]): M[T] = s => (f(s) ~ v(s)) map { case (a,b) => a(b) } - def pure[S](s: => S) = const(Parser.success(s)) - def map[S, T](f: S => T, v: M[S]) = s => v(s).map(f) - } +object ParserInstance extends Instance { + import sbt.Classes.Applicative + private[this] implicit val parserApplicative: Applicative[M] = new Applicative[M] { + def apply[S, T](f: M[S => T], v: M[S]): M[T] = s => (f(s) ~ v(s)) map { case (a, b) => a(b) } + def pure[S](s: => S) = const(Parser.success(s)) + def map[S, T](f: S => T, v: M[S]) = s => v(s).map(f) + } - final type M[x] = State => Parser[x] - def app[K[L[x]], Z](in: K[M], f: K[Id] => Z)(implicit a: AList[K]): M[Z] = a.apply(in,f) - def map[S,T](in: M[S], f: S => T): M[T] = s => in(s) map f - def pure[T](t: () => T): State => Parser[T] = const(DefaultParsers.success(t())) + final type M[x] = State => Parser[x] + def app[K[L[x]], Z](in: K[M], f: K[Id] => Z)(implicit a: AList[K]): M[Z] = a.apply(in, f) + def map[S, T](in: M[S], f: S => T): M[T] = s => in(s) map f + def pure[T](t: () => T): State => Parser[T] = const(DefaultParsers.success(t())) } /** Composes the Task and Initialize Instances to provide an Instance for [T] Initialize[Task[T]].*/ -object FullInstance extends Instance.Composed[Initialize, Task](InitializeInstance, TaskInstance) with MonadInstance -{ - type SS = sbt.Settings[Scope] - val settingsData = TaskKey[SS]("settings-data", "Provides access to the project data for the build.", KeyRanks.DTask) +object FullInstance extends Instance.Composed[Initialize, Task](InitializeInstance, TaskInstance) with MonadInstance { + type SS = sbt.Settings[Scope] + val settingsData = TaskKey[SS]("settings-data", "Provides access to the project data for the build.", KeyRanks.DTask) - def flatten[T](in: Initialize[Task[Initialize[Task[T]]]]): Initialize[Task[T]] = - { - import Scoped._ - (in,settingsData, Def.capturedTransformations) apply{ - (a: Task[Initialize[Task[T]]], data: Task[SS], f) => - import TaskExtra.multT2Task - (a, data) flatMap { case (a,d) => f(a) evaluate d } - } - } - def flattenFun[S,T](in: Initialize[Task[ S => Initialize[Task[T]] ]]): Initialize[S => Task[T]] = - { - import Scoped._ - (in,settingsData, Def.capturedTransformations) apply{ - (a: Task[S => Initialize[Task[T]]], data: Task[SS], f) => (s: S) => - import TaskExtra.multT2Task - (a, data) flatMap { case (af,d) => f(af(s)) evaluate d } - } - } + def flatten[T](in: Initialize[Task[Initialize[Task[T]]]]): Initialize[Task[T]] = + { + import Scoped._ + (in, settingsData, Def.capturedTransformations) apply { + (a: Task[Initialize[Task[T]]], data: Task[SS], f) => + import TaskExtra.multT2Task + (a, data) flatMap { case (a, d) => f(a) evaluate d } + } + } + def flattenFun[S, T](in: Initialize[Task[S => Initialize[Task[T]]]]): Initialize[S => Task[T]] = + { + import Scoped._ + (in, settingsData, Def.capturedTransformations) apply { + (a: Task[S => Initialize[Task[T]]], data: Task[SS], f) => + (s: S) => + import TaskExtra.multT2Task + (a, data) flatMap { case (af, d) => f(af(s)) evaluate d } + } + } } -object TaskMacro -{ - final val AssignInitName = "set" - final val Append1InitName = "append1" - final val AppendNInitName = "appendN" - final val TransformInitName = "transform" - final val InputTaskCreateDynName = "createDyn" - final val InputTaskCreateFreeName = "createFree" +object TaskMacro { + final val AssignInitName = "set" + final val Append1InitName = "append1" + final val AppendNInitName = "appendN" + final val TransformInitName = "transform" + final val InputTaskCreateDynName = "createDyn" + final val InputTaskCreateFreeName = "createFree" - def taskMacroImpl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[Initialize[Task[T]]] = - Instance.contImpl[T,Id](c, FullInstance, FullConvert, MixedBuilder)(Left(t), Instance.idTransform[c.type]) + def taskMacroImpl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[Initialize[Task[T]]] = + Instance.contImpl[T, Id](c, FullInstance, FullConvert, MixedBuilder)(Left(t), Instance.idTransform[c.type]) - def taskDynMacroImpl[T: c.WeakTypeTag](c: Context)(t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[Task[T]]] = - Instance.contImpl[T,Id](c, FullInstance, FullConvert, MixedBuilder)(Right(t), Instance.idTransform[c.type]) + def taskDynMacroImpl[T: c.WeakTypeTag](c: Context)(t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[Task[T]]] = + Instance.contImpl[T, Id](c, FullInstance, FullConvert, MixedBuilder)(Right(t), Instance.idTransform[c.type]) - /** Implementation of := macro for settings. */ - def settingAssignMacroImpl[T: c.WeakTypeTag](c: Context)(v: c.Expr[T]): c.Expr[Setting[T]] = - { - val init = SettingMacro.settingMacroImpl[T](c)(v) - val assign = transformMacroImpl(c)( init.tree )( AssignInitName ) - c.Expr[Setting[T]]( assign ) - } - /** Implementation of := macro for tasks. */ - def taskAssignMacroImpl[T: c.WeakTypeTag](c: Context)(v: c.Expr[T]): c.Expr[Setting[Task[T]]] = - { - val init = taskMacroImpl[T](c)(v) - val assign = transformMacroImpl(c)( init.tree )( AssignInitName ) - c.Expr[Setting[Task[T]]]( assign ) - } + /** Implementation of := macro for settings. */ + def settingAssignMacroImpl[T: c.WeakTypeTag](c: Context)(v: c.Expr[T]): c.Expr[Setting[T]] = + { + val init = SettingMacro.settingMacroImpl[T](c)(v) + val assign = transformMacroImpl(c)(init.tree)(AssignInitName) + c.Expr[Setting[T]](assign) + } + /** Implementation of := macro for tasks. */ + def taskAssignMacroImpl[T: c.WeakTypeTag](c: Context)(v: c.Expr[T]): c.Expr[Setting[Task[T]]] = + { + val init = taskMacroImpl[T](c)(v) + val assign = transformMacroImpl(c)(init.tree)(AssignInitName) + c.Expr[Setting[Task[T]]](assign) + } - /* Implementations of <<= macro variations for tasks and settings. These just get the source position of the call site.*/ + /* Implementations of <<= macro variations for tasks and settings. These just get the source position of the call site.*/ - def itaskAssignPosition[T: c.WeakTypeTag](c: Context)(app: c.Expr[Initialize[Task[T]]]): c.Expr[Setting[Task[T]]] = - settingAssignPosition(c)(app) - def taskAssignPositionT[T: c.WeakTypeTag](c: Context)(app: c.Expr[Task[T]]): c.Expr[Setting[Task[T]]] = - itaskAssignPosition(c)( c.universe.reify { Def.valueStrict(app.splice) }) - def taskAssignPositionPure[T: c.WeakTypeTag](c: Context)(app: c.Expr[T]): c.Expr[Setting[Task[T]]] = - taskAssignPositionT(c)( c.universe.reify { TaskExtra.constant(app.splice) }) + def itaskAssignPosition[T: c.WeakTypeTag](c: Context)(app: c.Expr[Initialize[Task[T]]]): c.Expr[Setting[Task[T]]] = + settingAssignPosition(c)(app) + def taskAssignPositionT[T: c.WeakTypeTag](c: Context)(app: c.Expr[Task[T]]): c.Expr[Setting[Task[T]]] = + itaskAssignPosition(c)(c.universe.reify { Def.valueStrict(app.splice) }) + def taskAssignPositionPure[T: c.WeakTypeTag](c: Context)(app: c.Expr[T]): c.Expr[Setting[Task[T]]] = + taskAssignPositionT(c)(c.universe.reify { TaskExtra.constant(app.splice) }) - def taskTransformPosition[S: c.WeakTypeTag](c: Context)(f: c.Expr[S => S]): c.Expr[Setting[Task[S]]] = - c.Expr[Setting[Task[S]]]( transformMacroImpl(c)( f.tree )( TransformInitName ) ) - def settingTransformPosition[S: c.WeakTypeTag](c: Context)(f: c.Expr[S => S]): c.Expr[Setting[S]] = - c.Expr[Setting[S]]( transformMacroImpl(c)( f.tree )( TransformInitName ) ) - def itaskTransformPosition[S: c.WeakTypeTag](c: Context)(f: c.Expr[S => S]): c.Expr[Setting[S]] = - c.Expr[Setting[S]]( transformMacroImpl(c)( f.tree )( TransformInitName ) ) + def taskTransformPosition[S: c.WeakTypeTag](c: Context)(f: c.Expr[S => S]): c.Expr[Setting[Task[S]]] = + c.Expr[Setting[Task[S]]](transformMacroImpl(c)(f.tree)(TransformInitName)) + def settingTransformPosition[S: c.WeakTypeTag](c: Context)(f: c.Expr[S => S]): c.Expr[Setting[S]] = + c.Expr[Setting[S]](transformMacroImpl(c)(f.tree)(TransformInitName)) + def itaskTransformPosition[S: c.WeakTypeTag](c: Context)(f: c.Expr[S => S]): c.Expr[Setting[S]] = + c.Expr[Setting[S]](transformMacroImpl(c)(f.tree)(TransformInitName)) + def taskAppendNPosition[S: c.WeakTypeTag, V: c.WeakTypeTag](c: Context)(vs: c.Expr[Initialize[Task[V]]])(a: c.Expr[Append.Values[S, V]]): c.Expr[Setting[Task[S]]] = + c.Expr[Setting[Task[S]]](appendMacroImpl(c)(vs.tree, a.tree)(AppendNInitName)) - def taskAppendNPosition[S: c.WeakTypeTag, V: c.WeakTypeTag](c: Context)(vs: c.Expr[Initialize[Task[V]]])(a: c.Expr[Append.Values[S, V]]): c.Expr[Setting[Task[S]]] = - c.Expr[Setting[Task[S]]]( appendMacroImpl(c)( vs.tree, a.tree )( AppendNInitName ) ) + def settingAppendNPosition[S: c.WeakTypeTag, V: c.WeakTypeTag](c: Context)(vs: c.Expr[Initialize[V]])(a: c.Expr[Append.Values[S, V]]): c.Expr[Setting[S]] = + c.Expr[Setting[S]](appendMacroImpl(c)(vs.tree, a.tree)(AppendNInitName)) - def settingAppendNPosition[S: c.WeakTypeTag, V: c.WeakTypeTag](c: Context)(vs: c.Expr[Initialize[V]])(a: c.Expr[Append.Values[S, V]]): c.Expr[Setting[S]] = - c.Expr[Setting[S]]( appendMacroImpl(c)( vs.tree, a.tree )( AppendNInitName ) ) + def taskAppend1Position[S: c.WeakTypeTag, V: c.WeakTypeTag](c: Context)(v: c.Expr[Initialize[Task[V]]])(a: c.Expr[Append.Value[S, V]]): c.Expr[Setting[Task[S]]] = + c.Expr[Setting[Task[S]]](appendMacroImpl(c)(v.tree, a.tree)(Append1InitName)) - def taskAppend1Position[S: c.WeakTypeTag, V: c.WeakTypeTag](c: Context)(v: c.Expr[Initialize[Task[V]]])(a: c.Expr[Append.Value[S, V]]): c.Expr[Setting[Task[S]]] = - c.Expr[Setting[Task[S]]]( appendMacroImpl(c)( v.tree, a.tree )( Append1InitName ) ) + def settingAppend1Position[S: c.WeakTypeTag, V: c.WeakTypeTag](c: Context)(v: c.Expr[Initialize[V]])(a: c.Expr[Append.Value[S, V]]): c.Expr[Setting[S]] = + c.Expr[Setting[S]](appendMacroImpl(c)(v.tree, a.tree)(Append1InitName)) - def settingAppend1Position[S: c.WeakTypeTag, V: c.WeakTypeTag](c: Context)(v: c.Expr[Initialize[V]])(a: c.Expr[Append.Value[S, V]]): c.Expr[Setting[S]] = - c.Expr[Setting[S]]( appendMacroImpl(c)( v.tree, a.tree )( Append1InitName ) ) + def settingAssignPure[T: c.WeakTypeTag](c: Context)(app: c.Expr[T]): c.Expr[Setting[T]] = + settingAssignPosition(c)(c.universe.reify { Def.valueStrict(app.splice) }) + def settingAssignPosition[T: c.WeakTypeTag](c: Context)(app: c.Expr[Initialize[T]]): c.Expr[Setting[T]] = + c.Expr[Setting[T]](transformMacroImpl(c)(app.tree)(AssignInitName)) - def settingAssignPure[T: c.WeakTypeTag](c: Context)(app: c.Expr[T]): c.Expr[Setting[T]] = - settingAssignPosition(c)( c.universe.reify { Def.valueStrict(app.splice) }) - def settingAssignPosition[T: c.WeakTypeTag](c: Context)(app: c.Expr[Initialize[T]]): c.Expr[Setting[T]] = - c.Expr[Setting[T]]( transformMacroImpl(c)( app.tree )( AssignInitName ) ) + /** Implementation of := macro for tasks. */ + def inputTaskAssignMacroImpl[T: c.WeakTypeTag](c: Context)(v: c.Expr[T]): c.Expr[Setting[InputTask[T]]] = + { + val init = inputTaskMacroImpl[T](c)(v) + val assign = transformMacroImpl(c)(init.tree)(AssignInitName) + c.Expr[Setting[InputTask[T]]](assign) + } + /** Implementation of += macro for tasks. */ + def taskAppend1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: Context)(v: c.Expr[U])(a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[Task[T]]] = + { + val init = taskMacroImpl[U](c)(v) + val assign = appendMacroImpl(c)(init.tree, a.tree)(Append1InitName) + c.Expr[Setting[Task[T]]](assign) + } + /** Implementation of += macro for settings. */ + def settingAppend1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: Context)(v: c.Expr[U])(a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[T]] = + { + val init = SettingMacro.settingMacroImpl[U](c)(v) + val assign = appendMacroImpl(c)(init.tree, a.tree)(Append1InitName) + c.Expr[Setting[T]](assign) + } + /** Implementation of ++= macro for tasks. */ + def taskAppendNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: Context)(vs: c.Expr[U])(a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[Task[T]]] = + { + val init = taskMacroImpl[U](c)(vs) + val assign = appendMacroImpl(c)(init.tree, a.tree)(AppendNInitName) + c.Expr[Setting[Task[T]]](assign) + } + /** Implementation of ++= macro for settings. */ + def settingAppendNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: Context)(vs: c.Expr[U])(a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[T]] = + { + val init = SettingMacro.settingMacroImpl[U](c)(vs) + val assign = appendMacroImpl(c)(init.tree, a.tree)(AppendNInitName) + c.Expr[Setting[T]](assign) + } - /** Implementation of := macro for tasks. */ - def inputTaskAssignMacroImpl[T: c.WeakTypeTag](c: Context)(v: c.Expr[T]): c.Expr[Setting[InputTask[T]]] = - { - val init = inputTaskMacroImpl[T](c)(v) - val assign = transformMacroImpl(c)( init.tree )( AssignInitName ) - c.Expr[Setting[InputTask[T]]]( assign ) - } - /** Implementation of += macro for tasks. */ - def taskAppend1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: Context)(v: c.Expr[U])(a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[Task[T]]] = - { - val init = taskMacroImpl[U](c)(v) - val assign = appendMacroImpl(c)( init.tree, a.tree )( Append1InitName ) - c.Expr[Setting[Task[T]]]( assign ) - } - /** Implementation of += macro for settings. */ - def settingAppend1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: Context)(v: c.Expr[U])(a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[T]] = - { - val init = SettingMacro.settingMacroImpl[U](c)(v) - val assign = appendMacroImpl(c)( init.tree, a.tree )( Append1InitName ) - c.Expr[Setting[T]]( assign ) - } - /** Implementation of ++= macro for tasks. */ - def taskAppendNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: Context)(vs: c.Expr[U])(a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[Task[T]]] = - { - val init = taskMacroImpl[U](c)(vs) - val assign = appendMacroImpl(c)( init.tree, a.tree )( AppendNInitName ) - c.Expr[Setting[Task[T]]]( assign ) - } - /** Implementation of ++= macro for settings. */ - def settingAppendNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: Context)(vs: c.Expr[U])(a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[T]] = - { - val init = SettingMacro.settingMacroImpl[U](c)(vs) - val assign = appendMacroImpl(c)( init.tree, a.tree )( AppendNInitName ) - c.Expr[Setting[T]]( assign ) - } + private[this] def appendMacroImpl(c: Context)(init: c.Tree, append: c.Tree)(newName: String): c.Tree = + { + import c.universe.{ Apply, ApplyTag, newTermName, Select, SelectTag, TypeApply, TypeApplyTag } + c.macroApplication match { + case Apply(Apply(TypeApply(Select(preT, nmeT), targs), _), a) => + Apply(Apply(TypeApply(Select(preT, newTermName(newName).encodedName), targs), init :: sourcePosition(c).tree :: Nil), a) + case x => ContextUtil.unexpectedTree(x) + } + } + private[this] def transformMacroImpl(c: Context)(init: c.Tree)(newName: String): c.Tree = + { + import c.universe.{ Apply, ApplyTag, newTermName, Select, SelectTag } + val target = + c.macroApplication match { + case Apply(Select(prefix, _), _) => prefix + case x => ContextUtil.unexpectedTree(x) + } + Apply.apply(Select(target, newTermName(newName).encodedName), init :: sourcePosition(c).tree :: Nil) + } + private[this] def sourcePosition(c: Context): c.Expr[SourcePosition] = + { + import c.universe._ + val pos = c.enclosingPosition + if (pos.isDefined && pos.line >= 0 && pos.source != null) { + val f = pos.source.file + val name = constant[String](c, settingSource(c, f.path, f.name)) + val line = constant[Int](c, pos.line) + reify { sbt.LinePosition(name.splice, line.splice) } + } else + reify { sbt.NoPosition } + } + private[this] def settingSource(c: Context, path: String, name: String): String = + { + val ec = c.enclosingClass.symbol + def inEmptyPackage(s: c.Symbol): Boolean = + s != c.universe.NoSymbol && (s.owner == c.mirror.EmptyPackage || s.owner == c.mirror.EmptyPackageClass || inEmptyPackage(s.owner)) + if (!ec.isStatic) + name + else if (inEmptyPackage(ec)) + path + else + s"(${ec.fullName}) $name" + } - private[this] def appendMacroImpl(c: Context)(init: c.Tree, append: c.Tree)(newName: String): c.Tree = - { - import c.universe.{Apply,ApplyTag,newTermName,Select,SelectTag,TypeApply,TypeApplyTag} - c.macroApplication match { - case Apply(Apply(TypeApply(Select(preT, nmeT), targs), _), a) => - Apply(Apply(TypeApply(Select(preT, newTermName(newName).encodedName), targs), init :: sourcePosition(c).tree :: Nil), a) - case x => ContextUtil.unexpectedTree(x) - } - } - private[this] def transformMacroImpl(c: Context)(init: c.Tree)(newName: String): c.Tree = - { - import c.universe.{Apply,ApplyTag,newTermName,Select,SelectTag} - val target = - c.macroApplication match { - case Apply(Select(prefix, _), _) => prefix - case x => ContextUtil.unexpectedTree(x) - } - Apply.apply(Select(target, newTermName(newName).encodedName), init :: sourcePosition(c).tree :: Nil) - } - private[this] def sourcePosition(c: Context): c.Expr[SourcePosition] = - { - import c.universe._ - val pos = c.enclosingPosition - if(pos.isDefined && pos.line >= 0 && pos.source != null) { - val f = pos.source.file - val name = constant[String](c, settingSource(c, f.path, f.name)) - val line = constant[Int](c, pos.line) - reify { sbt.LinePosition(name.splice, line.splice) } - } - else - reify{ sbt.NoPosition } - } - private[this] def settingSource(c: Context, path: String, name: String): String = - { - val ec = c.enclosingClass.symbol - def inEmptyPackage(s: c.Symbol): Boolean = - s != c.universe.NoSymbol && (s.owner == c.mirror.EmptyPackage || s.owner == c.mirror.EmptyPackageClass || inEmptyPackage(s.owner)) - if(!ec.isStatic) - name - else if(inEmptyPackage(ec)) - path - else - s"(${ec.fullName}) $name" - } + private[this] def constant[T: c.TypeTag](c: Context, t: T): c.Expr[T] = { + import c.universe._ + c.Expr[T](Literal(Constant(t))) + } - private[this] def constant[T: c.TypeTag](c: Context, t: T): c.Expr[T] = { - import c.universe._ - c.Expr[T](Literal(Constant(t))) - } + def inputTaskMacroImpl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[Initialize[InputTask[T]]] = + inputTaskMacro0[T](c)(t) + def inputTaskDynMacroImpl[T: c.WeakTypeTag](c: Context)(t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] = + inputTaskDynMacro0[T](c)(t) + private[this] def inputTaskMacro0[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[Initialize[InputTask[T]]] = + iInitializeMacro(c)(t) { et => + val pt = iParserMacro(c)(et) { pt => + iTaskMacro(c)(pt) + } + c.universe.reify { InputTask.make(pt.splice) } + } - def inputTaskMacroImpl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[Initialize[InputTask[T]]] = - inputTaskMacro0[T](c)(t) - def inputTaskDynMacroImpl[T: c.WeakTypeTag](c: Context)(t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] = - inputTaskDynMacro0[T](c)(t) + private[this] def iInitializeMacro[M[_], T](c: Context)(t: c.Expr[T])(f: c.Expr[T] => c.Expr[M[T]])(implicit tt: c.WeakTypeTag[T], mt: c.WeakTypeTag[M[T]]): c.Expr[Initialize[M[T]]] = + { + val inner: Transform[c.type, M] = new Transform[c.type, M] { def apply(in: c.Tree): c.Tree = f(c.Expr[T](in)).tree } + val cond = c.Expr[T](conditionInputTaskTree(c)(t.tree)) + Instance.contImpl[T, M](c, InitializeInstance, InputInitConvert, MixedBuilder)(Left(cond), inner) + } + private[this] def conditionInputTaskTree(c: Context)(t: c.Tree): c.Tree = + { + import c.universe._ + import InputWrapper._ + def wrapInitTask[T: c.WeakTypeTag](tree: Tree) = + { + val e = c.Expr[Initialize[Task[T]]](tree) + wrapTask[T](c)(wrapInit[Task[T]](c)(e, tree.pos), tree.pos).tree + } + def wrapInitParser[T: c.WeakTypeTag](tree: Tree) = + { + val e = c.Expr[Initialize[State => Parser[T]]](tree) + ParserInput.wrap[T](c)(wrapInit[State => Parser[T]](c)(e, tree.pos), tree.pos).tree + } + def wrapInitInput[T: c.WeakTypeTag](tree: Tree) = + { + val e = c.Expr[Initialize[InputTask[T]]](tree) + wrapInput[T](wrapInit[InputTask[T]](c)(e, tree.pos).tree) + } + def wrapInput[T: c.WeakTypeTag](tree: Tree) = + { + val e = c.Expr[InputTask[T]](tree) + val p = ParserInput.wrap[Task[T]](c)(ParserInput.inputParser(c)(e), tree.pos) + wrapTask[T](c)(p, tree.pos).tree + } - private[this] def inputTaskMacro0[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[Initialize[InputTask[T]]] = - iInitializeMacro(c)(t) { et => - val pt = iParserMacro(c)(et) { pt => - iTaskMacro(c)(pt) - } - c.universe.reify { InputTask.make(pt.splice) } - } + def expand(nme: String, tpe: Type, tree: Tree): Converted[c.type] = nme match { + case WrapInitTaskName => Converted.Success(wrapInitTask(tree)(c.WeakTypeTag(tpe))) + case ParserInput.WrapInitName => Converted.Success(wrapInitParser(tree)(c.WeakTypeTag(tpe))) + case WrapInitInputName => Converted.Success(wrapInitInput(tree)(c.WeakTypeTag(tpe))) + case WrapInputName => Converted.Success(wrapInput(tree)(c.WeakTypeTag(tpe))) + case _ => Converted.NotApplicable + } + val util = ContextUtil[c.type](c) + util.transformWrappers(t, (nme, tpe, tree, original) => expand(nme, tpe, tree)) + } - private[this] def iInitializeMacro[M[_], T](c: Context)(t: c.Expr[T])(f: c.Expr[T] => c.Expr[M[T]])(implicit tt: c.WeakTypeTag[T], mt: c.WeakTypeTag[M[T]]): c.Expr[Initialize[M[T]]] = - { - val inner: Transform[c.type,M] = new Transform[c.type,M] { def apply(in: c.Tree): c.Tree = f(c.Expr[T](in)).tree } - val cond = c.Expr[T](conditionInputTaskTree(c)(t.tree)) - Instance.contImpl[T,M](c, InitializeInstance, InputInitConvert, MixedBuilder)(Left(cond), inner) - } - private[this] def conditionInputTaskTree(c: Context)(t: c.Tree): c.Tree = - { - import c.universe._ - import InputWrapper._ - def wrapInitTask[T: c.WeakTypeTag](tree: Tree) = - { - val e = c.Expr[Initialize[Task[T]]](tree) - wrapTask[T](c)( wrapInit[Task[T]](c)(e, tree.pos), tree.pos).tree - } - def wrapInitParser[T: c.WeakTypeTag](tree: Tree) = - { - val e = c.Expr[Initialize[State => Parser[T]]](tree) - ParserInput.wrap[T](c)( wrapInit[State => Parser[T]](c)(e, tree.pos), tree.pos).tree - } - def wrapInitInput[T: c.WeakTypeTag](tree: Tree) = - { - val e = c.Expr[Initialize[InputTask[T]]](tree) - wrapInput[T]( wrapInit[InputTask[T]](c)(e, tree.pos).tree ) - } - def wrapInput[T: c.WeakTypeTag](tree: Tree) = - { - val e = c.Expr[InputTask[T]](tree) - val p = ParserInput.wrap[Task[T]](c)( ParserInput.inputParser(c)(e), tree.pos ) - wrapTask[T](c)(p, tree.pos).tree - } + private[this] def iParserMacro[M[_], T](c: Context)(t: c.Expr[T])(f: c.Expr[T] => c.Expr[M[T]])(implicit tt: c.WeakTypeTag[T], mt: c.WeakTypeTag[M[T]]): c.Expr[State => Parser[M[T]]] = + { + val inner: Transform[c.type, M] = new Transform[c.type, M] { def apply(in: c.Tree): c.Tree = f(c.Expr[T](in)).tree } + Instance.contImpl[T, M](c, ParserInstance, ParserConvert, MixedBuilder)(Left(t), inner) + } - def expand(nme: String, tpe: Type, tree: Tree): Converted[c.type] = nme match { - case WrapInitTaskName => Converted.Success(wrapInitTask(tree)(c.WeakTypeTag(tpe))) - case ParserInput.WrapInitName => Converted.Success(wrapInitParser(tree)(c.WeakTypeTag(tpe))) - case WrapInitInputName => Converted.Success(wrapInitInput(tree)(c.WeakTypeTag(tpe))) - case WrapInputName => Converted.Success(wrapInput(tree)(c.WeakTypeTag(tpe))) - case _ => Converted.NotApplicable - } - val util = ContextUtil[c.type](c) - util.transformWrappers(t, (nme,tpe,tree,original) => expand(nme,tpe,tree)) - } + private[this] def iTaskMacro[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[Task[T]] = + Instance.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder)(Left(t), Instance.idTransform) - private[this] def iParserMacro[M[_], T](c: Context)(t: c.Expr[T])(f: c.Expr[T] => c.Expr[M[T]])(implicit tt: c.WeakTypeTag[T], mt: c.WeakTypeTag[M[T]]): c.Expr[State => Parser[M[T]]] = - { - val inner: Transform[c.type,M] = new Transform[c.type,M] { def apply(in: c.Tree): c.Tree = f(c.Expr[T](in)).tree } - Instance.contImpl[T,M](c, ParserInstance, ParserConvert, MixedBuilder)(Left(t), inner) - } + // TODO 2.11 Remove this after dropping 2.10.x support. + private object HasCompat { val compat = ??? }; import HasCompat._ - private[this] def iTaskMacro[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[Task[T]] = - Instance.contImpl[T,Id](c, TaskInstance, TaskConvert, MixedBuilder)(Left(t), Instance.idTransform) + private[this] def inputTaskDynMacro0[T: c.WeakTypeTag](c: Context)(t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] = + { + import c.universe.{ Apply => ApplyTree, _ } + import compat._ - // TODO 2.11 Remove this after dropping 2.10.x support. - private object HasCompat { val compat = ??? }; import HasCompat._ + val tag = implicitly[c.WeakTypeTag[T]] + val util = ContextUtil[c.type](c) + val it = Ident(util.singleton(InputTask)) + val isParserWrapper = InitParserConvert.asPredicate(c) + val isTaskWrapper = FullConvert.asPredicate(c) + val isAnyWrapper = (n: String, tpe: Type, tr: Tree) => isParserWrapper(n, tpe, tr) || isTaskWrapper(n, tpe, tr) + val ttree = t.tree + val defs = util.collectDefs(ttree, isAnyWrapper) + val checkQual = util.checkReferences(defs, isAnyWrapper) - private[this] def inputTaskDynMacro0[T: c.WeakTypeTag](c: Context)(t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] = - { - import c.universe.{Apply=>ApplyTree,_} - import compat._ + // the Symbol for the anonymous function passed to the appropriate Instance.map/flatMap/pure method + // this Symbol needs to be known up front so that it can be used as the owner of synthetic vals + val functionSym = util.functionSymbol(ttree.pos) + var result: Option[(Tree, Type, ValDef)] = None - val tag = implicitly[c.WeakTypeTag[T]] - val util = ContextUtil[c.type](c) - val it = Ident(util.singleton(InputTask)) - val isParserWrapper = InitParserConvert.asPredicate(c) - val isTaskWrapper = FullConvert.asPredicate(c) - val isAnyWrapper = (n: String, tpe: Type, tr: Tree) => isParserWrapper(n,tpe,tr) || isTaskWrapper(n,tpe,tr) - val ttree = t.tree - val defs = util.collectDefs(ttree, isAnyWrapper) - val checkQual = util.checkReferences(defs, isAnyWrapper) + // original is the Tree being replaced. It is needed for preserving attributes. + def subWrapper(tpe: Type, qual: Tree, original: Tree): Tree = + if (result.isDefined) { + c.error(qual.pos, "Implementation restriction: a dynamic InputTask can only have a single input parser.") + EmptyTree + } else { + qual.foreach(checkQual) + val vd = util.freshValDef(tpe, qual.symbol.pos, functionSym) // val $x: + result = Some((qual, tpe, vd)) + val tree = util.refVal(original, vd) // $x + tree.setPos(qual.pos) // position needs to be set so that wrapKey passes the position onto the wrapper + assert(tree.tpe != null, "Null type: " + tree) + tree.setType(tpe) + tree + } + // Tree for InputTask.[, ](arg1)(arg2) + def inputTaskCreate(name: String, tpeA: Type, tpeB: Type, arg1: Tree, arg2: Tree) = + { + val typedApp = TypeApply(util.select(it, name), TypeTree(tpeA) :: TypeTree(tpeB) :: Nil) + val app = ApplyTree(ApplyTree(typedApp, arg1 :: Nil), arg2 :: Nil) + c.Expr[Initialize[InputTask[T]]](app) + } + // Tree for InputTask.createFree[](arg1) + def inputTaskCreateFree(tpe: Type, arg: Tree) = + { + val typedApp = TypeApply(util.select(it, InputTaskCreateFreeName), TypeTree(tpe) :: Nil) + val app = ApplyTree(typedApp, arg :: Nil) + c.Expr[Initialize[InputTask[T]]](app) + } + def expandTask[I: WeakTypeTag](dyn: Boolean, tx: Tree): c.Expr[Initialize[Task[I]]] = + if (dyn) + taskDynMacroImpl[I](c)(c.Expr[Initialize[Task[I]]](tx)) + else + taskMacroImpl[I](c)(c.Expr[I](tx)) + def wrapTag[I: WeakTypeTag]: WeakTypeTag[Initialize[Task[I]]] = weakTypeTag - // the Symbol for the anonymous function passed to the appropriate Instance.map/flatMap/pure method - // this Symbol needs to be known up front so that it can be used as the owner of synthetic vals - val functionSym = util.functionSymbol(ttree.pos) - var result: Option[(Tree, Type, ValDef)] = None + def sub(name: String, tpe: Type, qual: Tree, selection: Tree): Converted[c.type] = + { + val tag = c.WeakTypeTag[T](tpe) + InitParserConvert(c)(name, qual)(tag) transform { tree => + subWrapper(tpe, tree, selection) + } + } - // original is the Tree being replaced. It is needed for preserving attributes. - def subWrapper(tpe: Type, qual: Tree, original: Tree): Tree = - if(result.isDefined) - { - c.error(qual.pos, "Implementation restriction: a dynamic InputTask can only have a single input parser.") - EmptyTree - } - else - { - qual.foreach(checkQual) - val vd = util.freshValDef(tpe, qual.symbol.pos, functionSym) // val $x: - result = Some( (qual, tpe, vd) ) - val tree = util.refVal(original, vd) // $x - tree.setPos(qual.pos) // position needs to be set so that wrapKey passes the position onto the wrapper - assert(tree.tpe != null, "Null type: " + tree) - tree.setType(tpe) - tree - } - // Tree for InputTask.[, ](arg1)(arg2) - def inputTaskCreate(name: String, tpeA: Type, tpeB: Type, arg1: Tree, arg2: Tree) = - { - val typedApp = TypeApply(util.select(it, name), TypeTree(tpeA) :: TypeTree(tpeB) :: Nil) - val app = ApplyTree( ApplyTree(typedApp, arg1 :: Nil), arg2 :: Nil) - c.Expr[Initialize[InputTask[T]]](app) - } - // Tree for InputTask.createFree[](arg1) - def inputTaskCreateFree(tpe: Type, arg: Tree) = - { - val typedApp = TypeApply(util.select(it, InputTaskCreateFreeName), TypeTree(tpe) :: Nil) - val app = ApplyTree(typedApp, arg :: Nil) - c.Expr[Initialize[InputTask[T]]](app) - } - def expandTask[I: WeakTypeTag](dyn: Boolean, tx: Tree): c.Expr[Initialize[Task[I]]] = - if(dyn) - taskDynMacroImpl[I](c)( c.Expr[Initialize[Task[I]]](tx) ) - else - taskMacroImpl[I](c)( c.Expr[I](tx) ) - def wrapTag[I: WeakTypeTag]: WeakTypeTag[Initialize[Task[I]]] = weakTypeTag - - def sub(name: String, tpe: Type, qual: Tree, selection: Tree): Converted[c.type] = - { - val tag = c.WeakTypeTag[T](tpe) - InitParserConvert(c)(name, qual)(tag) transform { tree => - subWrapper(tpe, tree, selection) - } - } - - val tx = util.transformWrappers(ttree, (n,tpe,tree,replace) => sub(n,tpe,tree,replace)) - result match { - case Some((p, tpe, param)) => - val fCore = util.createFunction(param :: Nil, tx, functionSym) - val bodyTpe = wrapTag(tag).tpe - val fTpe = util.functionType(tpe :: Nil, bodyTpe) - val fTag = c.WeakTypeTag[Any](fTpe) // don't know the actual type yet, so use Any - val fInit = expandTask(false, fCore)(fTag).tree - inputTaskCreate(InputTaskCreateDynName, tpe, tag.tpe, p, fInit) - case None => - val init = expandTask[T](true, tx).tree - inputTaskCreateFree(tag.tpe, init) - } - } + val tx = util.transformWrappers(ttree, (n, tpe, tree, replace) => sub(n, tpe, tree, replace)) + result match { + case Some((p, tpe, param)) => + val fCore = util.createFunction(param :: Nil, tx, functionSym) + val bodyTpe = wrapTag(tag).tpe + val fTpe = util.functionType(tpe :: Nil, bodyTpe) + val fTag = c.WeakTypeTag[Any](fTpe) // don't know the actual type yet, so use Any + val fInit = expandTask(false, fCore)(fTag).tree + inputTaskCreate(InputTaskCreateDynName, tpe, tag.tpe, p, fInit) + case None => + val init = expandTask[T](true, tx).tree + inputTaskCreateFree(tag.tpe, init) + } + } } -object PlainTaskMacro -{ - def task[T](t: T): Task[T] = macro taskImpl[T] - def taskImpl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[Task[T]] = - Instance.contImpl[T,Id](c, TaskInstance, TaskConvert, MixedBuilder)(Left(t), Instance.idTransform[c.type]) +object PlainTaskMacro { + def task[T](t: T): Task[T] = macro taskImpl[T] + def taskImpl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[Task[T]] = + Instance.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder)(Left(t), Instance.idTransform[c.type]) - def taskDyn[T](t: Task[T]): Task[T] = macro taskDynImpl[T] - def taskDynImpl[T: c.WeakTypeTag](c: Context)(t: c.Expr[Task[T]]): c.Expr[Task[T]] = - Instance.contImpl[T,Id](c, TaskInstance, TaskConvert, MixedBuilder)(Right(t), Instance.idTransform[c.type]) + def taskDyn[T](t: Task[T]): Task[T] = macro taskDynImpl[T] + def taskDynImpl[T: c.WeakTypeTag](c: Context)(t: c.Expr[Task[T]]): c.Expr[Task[T]] = + Instance.contImpl[T, Id](c, TaskInstance, TaskConvert, MixedBuilder)(Right(t), Instance.idTransform[c.type]) } diff --git a/main/src/main/scala/sbt/APIMappings.scala b/main/src/main/scala/sbt/APIMappings.scala index 1f7e4e9cc..648ed6d4f 100644 --- a/main/src/main/scala/sbt/APIMappings.scala +++ b/main/src/main/scala/sbt/APIMappings.scala @@ -1,33 +1,33 @@ package sbt - import java.io.File - import java.net.{MalformedURLException,URL} +import java.io.File +import java.net.{ MalformedURLException, URL } -private[sbt] object APIMappings -{ - def extract(cp: Seq[Attributed[File]], log: Logger): Seq[(File,URL)] = - cp.flatMap(entry => extractFromEntry(entry, log)) +private[sbt] object APIMappings { + def extract(cp: Seq[Attributed[File]], log: Logger): Seq[(File, URL)] = + cp.flatMap(entry => extractFromEntry(entry, log)) - def extractFromEntry(entry: Attributed[File], log: Logger): Option[(File,URL)] = - entry.get(Keys.entryApiURL) match { - case Some(u) => Some( (entry.data, u) ) - case None => entry.get(Keys.moduleID.key).flatMap { mid => extractFromID(entry.data, mid, log) } - } + def extractFromEntry(entry: Attributed[File], log: Logger): Option[(File, URL)] = + entry.get(Keys.entryApiURL) match { + case Some(u) => Some((entry.data, u)) + case None => entry.get(Keys.moduleID.key).flatMap { mid => extractFromID(entry.data, mid, log) } + } - private[this] def extractFromID(entry: File, mid: ModuleID, log: Logger): Option[(File,URL)] = - for { - urlString <- mid.extraAttributes.get(CustomPomParser.ApiURLKey) - u <- parseURL(urlString, entry, log) - } yield (entry, u) + private[this] def extractFromID(entry: File, mid: ModuleID, log: Logger): Option[(File, URL)] = + for { + urlString <- mid.extraAttributes.get(CustomPomParser.ApiURLKey) + u <- parseURL(urlString, entry, log) + } yield (entry, u) - private[this] def parseURL(s: String, forEntry: File, log: Logger): Option[URL] = - try Some(new URL(s)) catch { case e: MalformedURLException => - log.warn(s"Invalid API base URL '$s' for classpath entry '$forEntry': ${e.toString}") - None - } + private[this] def parseURL(s: String, forEntry: File, log: Logger): Option[URL] = + try Some(new URL(s)) catch { + case e: MalformedURLException => + log.warn(s"Invalid API base URL '$s' for classpath entry '$forEntry': ${e.toString}") + None + } - def store[T](attr: Attributed[T], entryAPI: Option[URL]): Attributed[T] = entryAPI match { - case None => attr - case Some(u) => attr.put(Keys.entryApiURL, u) - } + def store[T](attr: Attributed[T], entryAPI: Option[URL]): Attributed[T] = entryAPI match { + case None => attr + case Some(u) => attr.put(Keys.entryApiURL, u) + } } \ No newline at end of file diff --git a/main/src/main/scala/sbt/Act.scala b/main/src/main/scala/sbt/Act.scala index 0679754c1..e65205754 100644 --- a/main/src/main/scala/sbt/Act.scala +++ b/main/src/main/scala/sbt/Act.scala @@ -3,304 +3,296 @@ */ package sbt - import Def.{showRelativeKey, ScopedKey} - import Project.showContextKey - import Keys.{sessionSettings, thisProject} - import complete.{DefaultParsers, Parser} - import Aggregation.{KeyValue,Values} - import DefaultParsers._ - import Types.idFun - import java.net.URI - import CommandStrings.{MultiTaskCommand, ShowCommand} +import Def.{ showRelativeKey, ScopedKey } +import Project.showContextKey +import Keys.{ sessionSettings, thisProject } +import complete.{ DefaultParsers, Parser } +import Aggregation.{ KeyValue, Values } +import DefaultParsers._ +import Types.idFun +import java.net.URI +import CommandStrings.{ MultiTaskCommand, ShowCommand } final class ParsedKey(val key: ScopedKey[_], val mask: ScopeMask) -object Act -{ - val GlobalString = "*" +object Act { + val GlobalString = "*" - // this does not take aggregation into account - def scopedKey(index: KeyIndex, current: ProjectRef, defaultConfigs: Option[ResolvedReference] => Seq[String], - keyMap: Map[String, AttributeKey[_]], data: Settings[Scope]): Parser[ScopedKey[_]] = - scopedKeySelected(index, current, defaultConfigs, keyMap, data).map(_.key) + // this does not take aggregation into account + def scopedKey(index: KeyIndex, current: ProjectRef, defaultConfigs: Option[ResolvedReference] => Seq[String], + keyMap: Map[String, AttributeKey[_]], data: Settings[Scope]): Parser[ScopedKey[_]] = + scopedKeySelected(index, current, defaultConfigs, keyMap, data).map(_.key) - // the index should be an aggregated index for proper tab completion - def scopedKeyAggregated(current: ProjectRef, defaultConfigs: Option[ResolvedReference] => Seq[String], structure: BuildStructure): KeysParser = - for(selected <- scopedKeySelected(structure.index.aggregateKeyIndex, current, defaultConfigs, structure.index.keyMap, structure.data) ) yield - Aggregation.aggregate(selected.key, selected.mask, structure.extra) + // the index should be an aggregated index for proper tab completion + def scopedKeyAggregated(current: ProjectRef, defaultConfigs: Option[ResolvedReference] => Seq[String], structure: BuildStructure): KeysParser = + for (selected <- scopedKeySelected(structure.index.aggregateKeyIndex, current, defaultConfigs, structure.index.keyMap, structure.data)) yield Aggregation.aggregate(selected.key, selected.mask, structure.extra) - def scopedKeySelected(index: KeyIndex, current: ProjectRef, defaultConfigs: Option[ResolvedReference] => Seq[String], - keyMap: Map[String, AttributeKey[_]], data: Settings[Scope]): Parser[ParsedKey] = - scopedKeyFull(index, current, defaultConfigs, keyMap) flatMap { choices => - select(choices, data)( showRelativeKey(current, index.buildURIs.size > 1) ) - } + def scopedKeySelected(index: KeyIndex, current: ProjectRef, defaultConfigs: Option[ResolvedReference] => Seq[String], + keyMap: Map[String, AttributeKey[_]], data: Settings[Scope]): Parser[ParsedKey] = + scopedKeyFull(index, current, defaultConfigs, keyMap) flatMap { choices => + select(choices, data)(showRelativeKey(current, index.buildURIs.size > 1)) + } - def scopedKeyFull(index: KeyIndex, current: ProjectRef, defaultConfigs: Option[ResolvedReference] => Seq[String], keyMap: Map[String, AttributeKey[_]]): Parser[Seq[Parser[ParsedKey]]] = - { - def taskKeyExtra(proj: Option[ResolvedReference], confAmb: ParsedAxis[String], baseMask: ScopeMask): Seq[Parser[ParsedKey]] = - for { - conf <- configs(confAmb, defaultConfigs, proj, index) - } yield for { - taskAmb <- taskAxis(conf, index.tasks(proj, conf), keyMap) - task = resolveTask(taskAmb) - key <- key(index, proj, conf, task, keyMap) - extra <- extraAxis(keyMap, IMap.empty) - } yield { - val mask = baseMask.copy(task = taskAmb.isExplicit, extra = true) - new ParsedKey( makeScopedKey( proj, conf, task, extra, key ), mask) - } + def scopedKeyFull(index: KeyIndex, current: ProjectRef, defaultConfigs: Option[ResolvedReference] => Seq[String], keyMap: Map[String, AttributeKey[_]]): Parser[Seq[Parser[ParsedKey]]] = + { + def taskKeyExtra(proj: Option[ResolvedReference], confAmb: ParsedAxis[String], baseMask: ScopeMask): Seq[Parser[ParsedKey]] = + for { + conf <- configs(confAmb, defaultConfigs, proj, index) + } yield for { + taskAmb <- taskAxis(conf, index.tasks(proj, conf), keyMap) + task = resolveTask(taskAmb) + key <- key(index, proj, conf, task, keyMap) + extra <- extraAxis(keyMap, IMap.empty) + } yield { + val mask = baseMask.copy(task = taskAmb.isExplicit, extra = true) + new ParsedKey(makeScopedKey(proj, conf, task, extra, key), mask) + } - for { - rawProject <- optProjectRef(index, current) - proj = resolveProject(rawProject, current) - confAmb <- config( index configs proj ) - partialMask = ScopeMask(rawProject.isExplicit, confAmb.isExplicit, false, false) - } yield - taskKeyExtra(proj, confAmb, partialMask) - } - def makeScopedKey(proj: Option[ResolvedReference], conf: Option[String], task: Option[AttributeKey[_]], extra: ScopeAxis[AttributeMap], key: AttributeKey[_]): ScopedKey[_] = - ScopedKey( Scope( toAxis(proj, Global), toAxis(conf map ConfigKey.apply, Global), toAxis(task, Global), extra), key ) + for { + rawProject <- optProjectRef(index, current) + proj = resolveProject(rawProject, current) + confAmb <- config(index configs proj) + partialMask = ScopeMask(rawProject.isExplicit, confAmb.isExplicit, false, false) + } yield taskKeyExtra(proj, confAmb, partialMask) + } + def makeScopedKey(proj: Option[ResolvedReference], conf: Option[String], task: Option[AttributeKey[_]], extra: ScopeAxis[AttributeMap], key: AttributeKey[_]): ScopedKey[_] = + ScopedKey(Scope(toAxis(proj, Global), toAxis(conf map ConfigKey.apply, Global), toAxis(task, Global), extra), key) - def select(allKeys: Seq[Parser[ParsedKey]], data: Settings[Scope])(implicit show: Show[ScopedKey[_]]): Parser[ParsedKey] = - seq(allKeys) flatMap { ss => - val default = ss.headOption match { - case None => noValidKeys - case Some(x) => success(x) - } - selectFromValid(ss filter isValid(data), default) - } - def selectFromValid(ss: Seq[ParsedKey], default: Parser[ParsedKey])(implicit show: Show[ScopedKey[_]]): Parser[ParsedKey] = - selectByTask(selectByConfig(ss)) match - { - case Seq() => default - case Seq(single) => success(single) - case multi => failure("Ambiguous keys: " + showAmbiguous(keys(multi))) - } - private[this] def keys(ss: Seq[ParsedKey]): Seq[ScopedKey[_]] = ss.map(_.key) - def selectByConfig(ss: Seq[ParsedKey]): Seq[ParsedKey] = - ss match - { - case Seq() => Nil - case Seq(x, tail @ _*) => // select the first configuration containing a valid key - tail.takeWhile(_.key.scope.config == x.key.scope.config) match - { - case Seq() => x :: Nil - case xs => x +: xs - } - } - def selectByTask(ss: Seq[ParsedKey]): Seq[ParsedKey] = - { - val (selects, globals) = ss.partition(_.key.scope.task.isSelect) - if(globals.nonEmpty) globals else selects - } + def select(allKeys: Seq[Parser[ParsedKey]], data: Settings[Scope])(implicit show: Show[ScopedKey[_]]): Parser[ParsedKey] = + seq(allKeys) flatMap { ss => + val default = ss.headOption match { + case None => noValidKeys + case Some(x) => success(x) + } + selectFromValid(ss filter isValid(data), default) + } + def selectFromValid(ss: Seq[ParsedKey], default: Parser[ParsedKey])(implicit show: Show[ScopedKey[_]]): Parser[ParsedKey] = + selectByTask(selectByConfig(ss)) match { + case Seq() => default + case Seq(single) => success(single) + case multi => failure("Ambiguous keys: " + showAmbiguous(keys(multi))) + } + private[this] def keys(ss: Seq[ParsedKey]): Seq[ScopedKey[_]] = ss.map(_.key) + def selectByConfig(ss: Seq[ParsedKey]): Seq[ParsedKey] = + ss match { + case Seq() => Nil + case Seq(x, tail @ _*) => // select the first configuration containing a valid key + tail.takeWhile(_.key.scope.config == x.key.scope.config) match { + case Seq() => x :: Nil + case xs => x +: xs + } + } + def selectByTask(ss: Seq[ParsedKey]): Seq[ParsedKey] = + { + val (selects, globals) = ss.partition(_.key.scope.task.isSelect) + if (globals.nonEmpty) globals else selects + } - def noValidKeys = failure("No such key.") + def noValidKeys = failure("No such key.") - def showAmbiguous(keys: Seq[ScopedKey[_]])(implicit show: Show[ScopedKey[_]]): String = - keys.take(3).map(x => show(x)).mkString("", ", ", if(keys.size > 3) ", ..." else "") + def showAmbiguous(keys: Seq[ScopedKey[_]])(implicit show: Show[ScopedKey[_]]): String = + keys.take(3).map(x => show(x)).mkString("", ", ", if (keys.size > 3) ", ..." else "") - def isValid(data: Settings[Scope])(parsed: ParsedKey): Boolean = - { - val key = parsed.key - data.definingScope(key.scope, key.key) == Some(key.scope) - } + def isValid(data: Settings[Scope])(parsed: ParsedKey): Boolean = + { + val key = parsed.key + data.definingScope(key.scope, key.key) == Some(key.scope) + } - def examples(p: Parser[String], exs: Set[String], label: String): Parser[String] = - p !!! ("Expected " + label) examples exs - def examplesStrict(p: Parser[String], exs: Set[String], label: String): Parser[String] = - filterStrings(examples(p, exs, label), exs, label) + def examples(p: Parser[String], exs: Set[String], label: String): Parser[String] = + p !!! ("Expected " + label) examples exs + def examplesStrict(p: Parser[String], exs: Set[String], label: String): Parser[String] = + filterStrings(examples(p, exs, label), exs, label) - def optionalAxis[T](p: Parser[T], ifNone: ScopeAxis[T]): Parser[ScopeAxis[T]] = - p.? map { opt => toAxis(opt, ifNone) } - def toAxis[T](opt: Option[T], ifNone: ScopeAxis[T]): ScopeAxis[T] = - opt match { case Some(t) => Select(t); case None => ifNone } + def optionalAxis[T](p: Parser[T], ifNone: ScopeAxis[T]): Parser[ScopeAxis[T]] = + p.? map { opt => toAxis(opt, ifNone) } + def toAxis[T](opt: Option[T], ifNone: ScopeAxis[T]): ScopeAxis[T] = + opt match { case Some(t) => Select(t); case None => ifNone } - def config(confs: Set[String]): Parser[ParsedAxis[String]] = - { - val sep = ':' !!! "Expected ':' (if selecting a configuration)" - token( (GlobalString ^^^ ParsedGlobal | value(examples(ID, confs, "configuration")) ) <~ sep ) ?? Omitted - } + def config(confs: Set[String]): Parser[ParsedAxis[String]] = + { + val sep = ':' !!! "Expected ':' (if selecting a configuration)" + token((GlobalString ^^^ ParsedGlobal | value(examples(ID, confs, "configuration"))) <~ sep) ?? Omitted + } - def configs(explicit: ParsedAxis[String], defaultConfigs: Option[ResolvedReference] => Seq[String], proj: Option[ResolvedReference], index: KeyIndex): Seq[Option[String]] = - explicit match - { - case Omitted => None +: defaultConfigurations(proj, index, defaultConfigs).flatMap(nonEmptyConfig(index, proj)) - case ParsedGlobal => None :: Nil - case pv: ParsedValue[x] => Some(pv.value) :: Nil - } - def defaultConfigurations(proj: Option[ResolvedReference], index: KeyIndex, defaultConfigs: Option[ResolvedReference] => Seq[String]): Seq[String] = - if(index exists proj) defaultConfigs(proj) else Nil - def nonEmptyConfig(index: KeyIndex, proj: Option[ResolvedReference]): String => Seq[Option[String]] = config => - if(index.isEmpty(proj, Some(config))) Nil else Some(config) :: Nil + def configs(explicit: ParsedAxis[String], defaultConfigs: Option[ResolvedReference] => Seq[String], proj: Option[ResolvedReference], index: KeyIndex): Seq[Option[String]] = + explicit match { + case Omitted => None +: defaultConfigurations(proj, index, defaultConfigs).flatMap(nonEmptyConfig(index, proj)) + case ParsedGlobal => None :: Nil + case pv: ParsedValue[x] => Some(pv.value) :: Nil + } + def defaultConfigurations(proj: Option[ResolvedReference], index: KeyIndex, defaultConfigs: Option[ResolvedReference] => Seq[String]): Seq[String] = + if (index exists proj) defaultConfigs(proj) else Nil + def nonEmptyConfig(index: KeyIndex, proj: Option[ResolvedReference]): String => Seq[Option[String]] = config => + if (index.isEmpty(proj, Some(config))) Nil else Some(config) :: Nil - def key(index: KeyIndex, proj: Option[ResolvedReference], conf: Option[String], task: Option[AttributeKey[_]], keyMap: Map[String,AttributeKey[_]]): Parser[AttributeKey[_]] = - { - def dropHyphenated(keys: Set[String]): Set[String] = keys.filterNot(Util.hasHyphen) - def keyParser(keys: Set[String]): Parser[AttributeKey[_]] = - token(ID !!! "Expected key" examples dropHyphenated(keys)) flatMap { keyString=> - getKey(keyMap, keyString, idFun) - } - keyParser(index.keys(proj, conf, task)) - } + def key(index: KeyIndex, proj: Option[ResolvedReference], conf: Option[String], task: Option[AttributeKey[_]], keyMap: Map[String, AttributeKey[_]]): Parser[AttributeKey[_]] = + { + def dropHyphenated(keys: Set[String]): Set[String] = keys.filterNot(Util.hasHyphen) + def keyParser(keys: Set[String]): Parser[AttributeKey[_]] = + token(ID !!! "Expected key" examples dropHyphenated(keys)) flatMap { keyString => + getKey(keyMap, keyString, idFun) + } + keyParser(index.keys(proj, conf, task)) + } - def getKey[T](keyMap: Map[String,AttributeKey[_]], keyString: String, f: AttributeKey[_] => T): Parser[T] = - keyMap.get(keyString) match { - case Some(k) => success(f(k)) - case None => failure(Command.invalidValue("key", keyMap.keys)(keyString)) - } + def getKey[T](keyMap: Map[String, AttributeKey[_]], keyString: String, f: AttributeKey[_] => T): Parser[T] = + keyMap.get(keyString) match { + case Some(k) => success(f(k)) + case None => failure(Command.invalidValue("key", keyMap.keys)(keyString)) + } - val spacedComma = token(OptSpace ~ ',' ~ OptSpace) + val spacedComma = token(OptSpace ~ ',' ~ OptSpace) - def extraAxis(knownKeys: Map[String, AttributeKey[_]], knownValues: IMap[AttributeKey, Set]): Parser[ScopeAxis[AttributeMap]] = - { - val extrasP = extrasParser(knownKeys, knownValues) - val extras = token('(', hide = _ == 1 && knownValues.isEmpty) ~> extrasP <~ token(')') - optionalAxis(extras, Global) - } + def extraAxis(knownKeys: Map[String, AttributeKey[_]], knownValues: IMap[AttributeKey, Set]): Parser[ScopeAxis[AttributeMap]] = + { + val extrasP = extrasParser(knownKeys, knownValues) + val extras = token('(', hide = _ == 1 && knownValues.isEmpty) ~> extrasP <~ token(')') + optionalAxis(extras, Global) + } - def taskAxis(d: Option[String], tasks: Set[AttributeKey[_]], allKnown: Map[String, AttributeKey[_]]): Parser[ParsedAxis[AttributeKey[_]]] = - { - val taskSeq = tasks.toSeq - def taskKeys(f: AttributeKey[_] => String): Seq[(String, AttributeKey[_])] = taskSeq.map(key => (f(key), key)) - val normKeys = taskKeys(_.label) - val valid = allKnown ++ normKeys ++ taskKeys(_.rawLabel) - val suggested = normKeys.map(_._1).toSet - val keyP = filterStrings(examples(ID, suggested, "key"), valid.keySet, "key") map valid - (token(value(keyP) | GlobalString ^^^ ParsedGlobal ) <~ token("::".id) ) ?? Omitted - } - def resolveTask(task: ParsedAxis[AttributeKey[_]]): Option[AttributeKey[_]] = - task match - { - case ParsedGlobal | Omitted => None - case t: ParsedValue[AttributeKey[_]] => Some(t.value) - } + def taskAxis(d: Option[String], tasks: Set[AttributeKey[_]], allKnown: Map[String, AttributeKey[_]]): Parser[ParsedAxis[AttributeKey[_]]] = + { + val taskSeq = tasks.toSeq + def taskKeys(f: AttributeKey[_] => String): Seq[(String, AttributeKey[_])] = taskSeq.map(key => (f(key), key)) + val normKeys = taskKeys(_.label) + val valid = allKnown ++ normKeys ++ taskKeys(_.rawLabel) + val suggested = normKeys.map(_._1).toSet + val keyP = filterStrings(examples(ID, suggested, "key"), valid.keySet, "key") map valid + (token(value(keyP) | GlobalString ^^^ ParsedGlobal) <~ token("::".id)) ?? Omitted + } + def resolveTask(task: ParsedAxis[AttributeKey[_]]): Option[AttributeKey[_]] = + task match { + case ParsedGlobal | Omitted => None + case t: ParsedValue[AttributeKey[_]] => Some(t.value) + } - def filterStrings(base: Parser[String], valid: Set[String], label: String): Parser[String] = - base.filter(valid, Command.invalidValue(label, valid)) + def filterStrings(base: Parser[String], valid: Set[String], label: String): Parser[String] = + base.filter(valid, Command.invalidValue(label, valid)) - def extrasParser(knownKeys: Map[String, AttributeKey[_]], knownValues: IMap[AttributeKey, Set]): Parser[AttributeMap] = - { - val validKeys = knownKeys.filter { case (_, key) => knownValues get key exists(!_.isEmpty) } - if(validKeys.isEmpty) - failure("No valid extra keys.") - else - rep1sep( extraParser(validKeys, knownValues), spacedComma) map AttributeMap.apply - } + def extrasParser(knownKeys: Map[String, AttributeKey[_]], knownValues: IMap[AttributeKey, Set]): Parser[AttributeMap] = + { + val validKeys = knownKeys.filter { case (_, key) => knownValues get key exists (!_.isEmpty) } + if (validKeys.isEmpty) + failure("No valid extra keys.") + else + rep1sep(extraParser(validKeys, knownValues), spacedComma) map AttributeMap.apply + } - def extraParser(knownKeys: Map[String, AttributeKey[_]], knownValues: IMap[AttributeKey, Set]): Parser[AttributeEntry[_]] = - { - val keyp = knownIDParser(knownKeys, "Not a valid extra key") <~ token(':' ~ OptSpace) - keyp flatMap { case key: AttributeKey[t] => - val valueMap: Map[String,t] = knownValues(key).map( v => (v.toString, v)).toMap - knownIDParser(valueMap, "extra value") map { value => AttributeEntry(key, value) } - } - } - def knownIDParser[T](knownKeys: Map[String, T], label: String): Parser[T] = - token(examplesStrict(ID, knownKeys.keys.toSet, label)) map knownKeys + def extraParser(knownKeys: Map[String, AttributeKey[_]], knownValues: IMap[AttributeKey, Set]): Parser[AttributeEntry[_]] = + { + val keyp = knownIDParser(knownKeys, "Not a valid extra key") <~ token(':' ~ OptSpace) + keyp flatMap { + case key: AttributeKey[t] => + val valueMap: Map[String, t] = knownValues(key).map(v => (v.toString, v)).toMap + knownIDParser(valueMap, "extra value") map { value => AttributeEntry(key, value) } + } + } + def knownIDParser[T](knownKeys: Map[String, T], label: String): Parser[T] = + token(examplesStrict(ID, knownKeys.keys.toSet, label)) map knownKeys - def projectRef(index: KeyIndex, currentBuild: URI): Parser[ParsedAxis[ResolvedReference]] = - { - val global = token(GlobalString ~ '/') ^^^ ParsedGlobal - val trailing = '/' !!! "Expected '/' (if selecting a project)" - global | value(resolvedReference(index, currentBuild, trailing)) - } - def resolvedReference(index: KeyIndex, currentBuild: URI, trailing: Parser[_]): Parser[ResolvedReference] = - { - def projectID(uri: URI) = token( examplesStrict(ID, index projects uri, "project ID") <~ trailing ) - def projectRef(uri: URI) = projectID(uri) map { id => ProjectRef(uri, id) } + def projectRef(index: KeyIndex, currentBuild: URI): Parser[ParsedAxis[ResolvedReference]] = + { + val global = token(GlobalString ~ '/') ^^^ ParsedGlobal + val trailing = '/' !!! "Expected '/' (if selecting a project)" + global | value(resolvedReference(index, currentBuild, trailing)) + } + def resolvedReference(index: KeyIndex, currentBuild: URI, trailing: Parser[_]): Parser[ResolvedReference] = + { + def projectID(uri: URI) = token(examplesStrict(ID, index projects uri, "project ID") <~ trailing) + def projectRef(uri: URI) = projectID(uri) map { id => ProjectRef(uri, id) } - val uris = index.buildURIs - val resolvedURI = Uri(uris).map(uri => Scope.resolveBuild(currentBuild, uri)) - val buildRef = token( '{' ~> resolvedURI <~ '}' ).? + val uris = index.buildURIs + val resolvedURI = Uri(uris).map(uri => Scope.resolveBuild(currentBuild, uri)) + val buildRef = token('{' ~> resolvedURI <~ '}').? - buildRef flatMap { - case None => projectRef(currentBuild) - case Some(uri) => projectRef(uri) | token(trailing ^^^ BuildRef(uri)) - } - } - def optProjectRef(index: KeyIndex, current: ProjectRef): Parser[ParsedAxis[ResolvedReference]] = - projectRef(index, current.build) ?? Omitted - def resolveProject(parsed: ParsedAxis[ResolvedReference], current: ProjectRef): Option[ResolvedReference] = - parsed match - { - case Omitted => Some(current) - case ParsedGlobal => None - case pv: ParsedValue[rr] => Some(pv.value) - } + buildRef flatMap { + case None => projectRef(currentBuild) + case Some(uri) => projectRef(uri) | token(trailing ^^^ BuildRef(uri)) + } + } + def optProjectRef(index: KeyIndex, current: ProjectRef): Parser[ParsedAxis[ResolvedReference]] = + projectRef(index, current.build) ?? Omitted + def resolveProject(parsed: ParsedAxis[ResolvedReference], current: ProjectRef): Option[ResolvedReference] = + parsed match { + case Omitted => Some(current) + case ParsedGlobal => None + case pv: ParsedValue[rr] => Some(pv.value) + } - def actParser(s: State): Parser[() => State] = requireSession(s, actParser0(s)) + def actParser(s: State): Parser[() => State] = requireSession(s, actParser0(s)) - private[this] def actParser0(state: State): Parser[() => State] = - { - val extracted = Project extract state - import extracted.{showKey, structure} - import Aggregation.evaluatingParser - actionParser.flatMap { action => - val akp = aggregatedKeyParser(extracted) - def evaluate(kvs: Seq[ScopedKey[_]]): Parser[() => State] = { - val preparedPairs = anyKeyValues(structure, kvs) - val showConfig = Aggregation.defaultShow(state, showTasks = action == ShowAction) - evaluatingParser(state, structure, showConfig)(preparedPairs) map { evaluate => - () => { - val keyStrings = preparedPairs.map(pp => showKey(pp.key)).mkString(", ") - state.log.debug("Evaluating tasks: " + keyStrings) - evaluate() - } - } - } - action match { - case SingleAction => akp flatMap evaluate - case ShowAction | MultiAction => - rep1sep(akp, token(Space)).flatMap( kvss => evaluate(kvss.flatten) ) - } - } - } + private[this] def actParser0(state: State): Parser[() => State] = + { + val extracted = Project extract state + import extracted.{ showKey, structure } + import Aggregation.evaluatingParser + actionParser.flatMap { action => + val akp = aggregatedKeyParser(extracted) + def evaluate(kvs: Seq[ScopedKey[_]]): Parser[() => State] = { + val preparedPairs = anyKeyValues(structure, kvs) + val showConfig = Aggregation.defaultShow(state, showTasks = action == ShowAction) + evaluatingParser(state, structure, showConfig)(preparedPairs) map { evaluate => + () => { + val keyStrings = preparedPairs.map(pp => showKey(pp.key)).mkString(", ") + state.log.debug("Evaluating tasks: " + keyStrings) + evaluate() + } + } + } + action match { + case SingleAction => akp flatMap evaluate + case ShowAction | MultiAction => + rep1sep(akp, token(Space)).flatMap(kvss => evaluate(kvss.flatten)) + } + } + } - private[this] final class ActAction - private[this] final val ShowAction, MultiAction, SingleAction = new ActAction + private[this] final class ActAction + private[this] final val ShowAction, MultiAction, SingleAction = new ActAction - private[this] def actionParser: Parser[ActAction] = - token( - ((ShowCommand ^^^ ShowAction) | - (MultiTaskCommand ^^^ MultiAction) ) <~ Space - ) ?? SingleAction + private[this] def actionParser: Parser[ActAction] = + token( + ((ShowCommand ^^^ ShowAction) | + (MultiTaskCommand ^^^ MultiAction)) <~ Space + ) ?? SingleAction - @deprecated("No longer used.", "0.13.2") - def showParser = token( (ShowCommand ~ Space) ^^^ true) ?? false + @deprecated("No longer used.", "0.13.2") + def showParser = token((ShowCommand ~ Space) ^^^ true) ?? false - def scopedKeyParser(state: State): Parser[ScopedKey[_]] = scopedKeyParser(Project extract state) - def scopedKeyParser(extracted: Extracted): Parser[ScopedKey[_]] = scopedKeyParser(extracted.structure, extracted.currentRef) - def scopedKeyParser(structure: BuildStructure, currentRef: ProjectRef): Parser[ScopedKey[_]] = - scopedKey(structure.index.keyIndex, currentRef, structure.extra.configurationsForAxis, structure.index.keyMap, structure.data) + def scopedKeyParser(state: State): Parser[ScopedKey[_]] = scopedKeyParser(Project extract state) + def scopedKeyParser(extracted: Extracted): Parser[ScopedKey[_]] = scopedKeyParser(extracted.structure, extracted.currentRef) + def scopedKeyParser(structure: BuildStructure, currentRef: ProjectRef): Parser[ScopedKey[_]] = + scopedKey(structure.index.keyIndex, currentRef, structure.extra.configurationsForAxis, structure.index.keyMap, structure.data) - type KeysParser = Parser[Seq[ScopedKey[T]] forSome { type T}] - def aggregatedKeyParser(state: State): KeysParser = aggregatedKeyParser(Project extract state) - def aggregatedKeyParser(extracted: Extracted): KeysParser = aggregatedKeyParser(extracted.structure, extracted.currentRef) - def aggregatedKeyParser(structure: BuildStructure, currentRef: ProjectRef): KeysParser = - scopedKeyAggregated(currentRef, structure.extra.configurationsForAxis, structure) + type KeysParser = Parser[Seq[ScopedKey[T]] forSome { type T }] + def aggregatedKeyParser(state: State): KeysParser = aggregatedKeyParser(Project extract state) + def aggregatedKeyParser(extracted: Extracted): KeysParser = aggregatedKeyParser(extracted.structure, extracted.currentRef) + def aggregatedKeyParser(structure: BuildStructure, currentRef: ProjectRef): KeysParser = + scopedKeyAggregated(currentRef, structure.extra.configurationsForAxis, structure) - def keyValues[T](state: State)(keys: Seq[ScopedKey[T]]): Values[T] = keyValues(Project extract state)(keys) - def keyValues[T](extracted: Extracted)(keys: Seq[ScopedKey[T]]): Values[T] = keyValues(extracted.structure)(keys) - def keyValues[T](structure: BuildStructure)(keys: Seq[ScopedKey[T]]): Values[T] = - keys.flatMap { key => - getValue(structure.data, key.scope, key.key) map { value => KeyValue(key, value) } - } - private[this] def anyKeyValues(structure: BuildStructure, keys: Seq[ScopedKey[_]]): Seq[KeyValue[_]] = - keys.flatMap { key => - getValue(structure.data, key.scope, key.key) map { value => KeyValue(key, value) } - } + def keyValues[T](state: State)(keys: Seq[ScopedKey[T]]): Values[T] = keyValues(Project extract state)(keys) + def keyValues[T](extracted: Extracted)(keys: Seq[ScopedKey[T]]): Values[T] = keyValues(extracted.structure)(keys) + def keyValues[T](structure: BuildStructure)(keys: Seq[ScopedKey[T]]): Values[T] = + keys.flatMap { key => + getValue(structure.data, key.scope, key.key) map { value => KeyValue(key, value) } + } + private[this] def anyKeyValues(structure: BuildStructure, keys: Seq[ScopedKey[_]]): Seq[KeyValue[_]] = + keys.flatMap { key => + getValue(structure.data, key.scope, key.key) map { value => KeyValue(key, value) } + } - private[this] def getValue[T](data: Settings[Scope], scope: Scope, key: AttributeKey[T]): Option[T] = - if(java.lang.Boolean.getBoolean("sbt.cli.nodelegation")) data.getDirect(scope, key) else data.get(scope, key) + private[this] def getValue[T](data: Settings[Scope], scope: Scope, key: AttributeKey[T]): Option[T] = + if (java.lang.Boolean.getBoolean("sbt.cli.nodelegation")) data.getDirect(scope, key) else data.get(scope, key) - def requireSession[T](s: State, p: => Parser[T]): Parser[T] = - if(s get sessionSettings isEmpty) failure("No project loaded") else p + def requireSession[T](s: State, p: => Parser[T]): Parser[T] = + if (s get sessionSettings isEmpty) failure("No project loaded") else p - sealed trait ParsedAxis[+T] { - final def isExplicit = this != Omitted - } - final object ParsedGlobal extends ParsedAxis[Nothing] - final object Omitted extends ParsedAxis[Nothing] - final class ParsedValue[T](val value: T) extends ParsedAxis[T] - def value[T](t: Parser[T]): Parser[ParsedAxis[T]] = t map { v => new ParsedValue(v) } + sealed trait ParsedAxis[+T] { + final def isExplicit = this != Omitted + } + final object ParsedGlobal extends ParsedAxis[Nothing] + final object Omitted extends ParsedAxis[Nothing] + final class ParsedValue[T](val value: T) extends ParsedAxis[T] + def value[T](t: Parser[T]): Parser[ParsedAxis[T]] = t map { v => new ParsedValue(v) } } \ No newline at end of file diff --git a/main/src/main/scala/sbt/AddSettings.scala b/main/src/main/scala/sbt/AddSettings.scala index 9d1a2ac80..5ecb34195 100644 --- a/main/src/main/scala/sbt/AddSettings.scala +++ b/main/src/main/scala/sbt/AddSettings.scala @@ -1,78 +1,79 @@ package sbt - import Types.const - import java.io.File +import Types.const +import java.io.File -/** Represents how settings from various sources are automatically merged into a Project's settings. -* This only configures per-project settings and not global or per-build settings. */ +/** + * Represents how settings from various sources are automatically merged into a Project's settings. + * This only configures per-project settings and not global or per-build settings. + */ sealed abstract class AddSettings - -object AddSettings -{ - private[sbt] final class Sequence(val sequence: Seq[AddSettings]) extends AddSettings - private[sbt] final object User extends AddSettings - private[sbt] final class Plugins(val include: Plugin => Boolean) extends AddSettings - private[sbt] final class AutoPlugins(val include: AutoPlugin => Boolean) extends AddSettings - private[sbt] final class DefaultSbtFiles(val include: File => Boolean) extends AddSettings - private[sbt] final class SbtFiles(val files: Seq[File]) extends AddSettings - private[sbt] final object BuildScalaFiles extends AddSettings - /** Adds all settings from autoplugins. */ - val autoPlugins: AddSettings = new AutoPlugins(const(true)) // Note: We do not expose fine-grained autoplugins because - // it's dangerous to control at that level right now. - // Leaving the hook in place in case we need to expose - // it, but most likely it will remain locked out - // for users with an alternative ordering feature - // in place. +object AddSettings { + private[sbt] final class Sequence(val sequence: Seq[AddSettings]) extends AddSettings + private[sbt] final object User extends AddSettings + private[sbt] final class Plugins(val include: Plugin => Boolean) extends AddSettings + private[sbt] final class AutoPlugins(val include: AutoPlugin => Boolean) extends AddSettings + private[sbt] final class DefaultSbtFiles(val include: File => Boolean) extends AddSettings + private[sbt] final class SbtFiles(val files: Seq[File]) extends AddSettings + private[sbt] final object BuildScalaFiles extends AddSettings - /** Settings specified in Build.scala `Project` constructors. */ - val buildScalaFiles: AddSettings = BuildScalaFiles + /** Adds all settings from autoplugins. */ + val autoPlugins: AddSettings = new AutoPlugins(const(true)) // Note: We do not expose fine-grained autoplugins because + // it's dangerous to control at that level right now. + // Leaving the hook in place in case we need to expose + // it, but most likely it will remain locked out + // for users with an alternative ordering feature + // in place. - /** All plugins that aren't auto plugins. */ - val nonAutoPlugins: AddSettings = plugins(const(true)) + /** Settings specified in Build.scala `Project` constructors. */ + val buildScalaFiles: AddSettings = BuildScalaFiles - /** Adds all settings from a plugin to a project. */ - val allPlugins: AddSettings = seq(autoPlugins, nonAutoPlugins) + /** All plugins that aren't auto plugins. */ + val nonAutoPlugins: AddSettings = plugins(const(true)) - /** Allows the plugins whose names match the `names` filter to automatically add settings to a project. */ - def plugins(include: Plugin => Boolean): AddSettings = new Plugins(include) + /** Adds all settings from a plugin to a project. */ + val allPlugins: AddSettings = seq(autoPlugins, nonAutoPlugins) - /** Includes user settings in the project. */ - val userSettings: AddSettings = User + /** Allows the plugins whose names match the `names` filter to automatically add settings to a project. */ + def plugins(include: Plugin => Boolean): AddSettings = new Plugins(include) - /** Includes the settings from all .sbt files in the project's base directory. */ - val defaultSbtFiles: AddSettings = new DefaultSbtFiles(const(true)) + /** Includes user settings in the project. */ + val userSettings: AddSettings = User - /** Includes the settings from the .sbt files given by `files`. */ - def sbtFiles(files: File*): AddSettings = new SbtFiles(files) + /** Includes the settings from all .sbt files in the project's base directory. */ + val defaultSbtFiles: AddSettings = new DefaultSbtFiles(const(true)) - /** Includes settings automatically*/ - def seq(autos: AddSettings*): AddSettings = new Sequence(autos) + /** Includes the settings from the .sbt files given by `files`. */ + def sbtFiles(files: File*): AddSettings = new SbtFiles(files) - /** The default inclusion of settings. */ - val allDefaults: AddSettings = seq(autoPlugins, buildScalaFiles, userSettings, nonAutoPlugins, defaultSbtFiles) + /** Includes settings automatically*/ + def seq(autos: AddSettings*): AddSettings = new Sequence(autos) - /** Combines two automatic setting configurations. */ - def append(a: AddSettings, b: AddSettings): AddSettings = (a,b) match { - case (sa: Sequence, sb: Sequence) => seq(sa.sequence ++ sb.sequence : _*) - case (sa: Sequence, _) => seq(sa.sequence :+ b : _*) - case (_, sb: Sequence) => seq(a +: sb.sequence : _*) - case _ => seq(a,b) - } + /** The default inclusion of settings. */ + val allDefaults: AddSettings = seq(autoPlugins, buildScalaFiles, userSettings, nonAutoPlugins, defaultSbtFiles) - def clearSbtFiles(a: AddSettings): AddSettings = tx(a) { - case _: DefaultSbtFiles | _: SbtFiles => None - case x => Some(x) - } getOrElse seq() + /** Combines two automatic setting configurations. */ + def append(a: AddSettings, b: AddSettings): AddSettings = (a, b) match { + case (sa: Sequence, sb: Sequence) => seq(sa.sequence ++ sb.sequence: _*) + case (sa: Sequence, _) => seq(sa.sequence :+ b: _*) + case (_, sb: Sequence) => seq(a +: sb.sequence: _*) + case _ => seq(a, b) + } - private[sbt] def tx(a: AddSettings)(f: AddSettings => Option[AddSettings]): Option[AddSettings] = a match { - case s: Sequence => - s.sequence.flatMap { b => tx(b)(f) } match { - case Seq() => None - case Seq(x) => Some(x) - case ss => Some(new Sequence(ss)) - } - case x => f(x) - } + def clearSbtFiles(a: AddSettings): AddSettings = tx(a) { + case _: DefaultSbtFiles | _: SbtFiles => None + case x => Some(x) + } getOrElse seq() + + private[sbt] def tx(a: AddSettings)(f: AddSettings => Option[AddSettings]): Option[AddSettings] = a match { + case s: Sequence => + s.sequence.flatMap { b => tx(b)(f) } match { + case Seq() => None + case Seq(x) => Some(x) + case ss => Some(new Sequence(ss)) + } + case x => f(x) + } } diff --git a/main/src/main/scala/sbt/Aggregation.scala b/main/src/main/scala/sbt/Aggregation.scala index 60e9b64bb..76c48b596 100644 --- a/main/src/main/scala/sbt/Aggregation.scala +++ b/main/src/main/scala/sbt/Aggregation.scala @@ -3,212 +3,205 @@ */ package sbt - import Def.ScopedKey - import Keys.{aggregate, showSuccess, showTiming, timingFormat} - import sbt.complete.Parser - import java.net.URI - import Parser._ - import collection.mutable - import std.Transform.{DummyTaskMap, TaskAndValue} +import Def.ScopedKey +import Keys.{ aggregate, showSuccess, showTiming, timingFormat } +import sbt.complete.Parser +import java.net.URI +import Parser._ +import collection.mutable +import std.Transform.{ DummyTaskMap, TaskAndValue } sealed trait Aggregation -final object Aggregation -{ - final case class ShowConfig(settingValues: Boolean, taskValues: Boolean, print: String => Unit, success: Boolean) - final case class Complete[T](start: Long, stop: Long, results: Result[Seq[KeyValue[T]]], state: State) - final case class KeyValue[+T](key: ScopedKey[_], value: T) +final object Aggregation { + final case class ShowConfig(settingValues: Boolean, taskValues: Boolean, print: String => Unit, success: Boolean) + final case class Complete[T](start: Long, stop: Long, results: Result[Seq[KeyValue[T]]], state: State) + final case class KeyValue[+T](key: ScopedKey[_], value: T) - def defaultShow(state: State, showTasks: Boolean): ShowConfig = ShowConfig(settingValues = true, taskValues = showTasks, s => state.log.info(s), success = true) - def printSettings(xs: Seq[KeyValue[_]], print: String => Unit)(implicit display: Show[ScopedKey[_]]) = - xs match - { - case KeyValue(_,x) :: Nil => print(x.toString) - case _ => xs foreach { case KeyValue(key, value) => print(display(key) + "\n\t" + value.toString) } - } - type Values[T] = Seq[KeyValue[T]] - type AnyKeys = Values[_] - def seqParser[T](ps: Values[Parser[T]]): Parser[Seq[KeyValue[T]]] = seq(ps.map { case KeyValue(k,p) => p.map(v => KeyValue(k,v) ) }) + def defaultShow(state: State, showTasks: Boolean): ShowConfig = ShowConfig(settingValues = true, taskValues = showTasks, s => state.log.info(s), success = true) + def printSettings(xs: Seq[KeyValue[_]], print: String => Unit)(implicit display: Show[ScopedKey[_]]) = + xs match { + case KeyValue(_, x) :: Nil => print(x.toString) + case _ => xs foreach { case KeyValue(key, value) => print(display(key) + "\n\t" + value.toString) } + } + type Values[T] = Seq[KeyValue[T]] + type AnyKeys = Values[_] + def seqParser[T](ps: Values[Parser[T]]): Parser[Seq[KeyValue[T]]] = seq(ps.map { case KeyValue(k, p) => p.map(v => KeyValue(k, v)) }) - def applyTasks[T](s: State, structure: BuildStructure, ps: Values[Parser[Task[T]]], show: ShowConfig)(implicit display: Show[ScopedKey[_]]): Parser[() => State] = - Command.applyEffect(seqParser(ps)) { ts => - runTasks(s, structure, ts, DummyTaskMap(Nil), show) - } + def applyTasks[T](s: State, structure: BuildStructure, ps: Values[Parser[Task[T]]], show: ShowConfig)(implicit display: Show[ScopedKey[_]]): Parser[() => State] = + Command.applyEffect(seqParser(ps)) { ts => + runTasks(s, structure, ts, DummyTaskMap(Nil), show) + } - @deprecated("Use `timedRun` and `showRun` directly or use `runTasks`.", "0.13.0") - def runTasksWithResult[T](s: State, structure: BuildStructure, ts: Values[Task[T]], extra: DummyTaskMap, show: ShowConfig)(implicit display: Show[ScopedKey[_]]): (State, Result[Seq[KeyValue[T]]]) = - { - val complete = timedRun[T](s, ts, extra) - showRun(complete, show) - (complete.state, complete.results) - } - def showRun[T](complete: Complete[T], show: ShowConfig)(implicit display: Show[ScopedKey[_]]) - { - import complete._ - val log = state.log - val extracted = Project extract state - val success = results match { case Value(_) => true; case Inc(_) => false } - results.toEither.right.foreach { r => if(show.taskValues) printSettings(r, show.print) } - if(show.success) printSuccess(start, stop, extracted, success, log) - } - def timedRun[T](s: State, ts: Values[Task[T]], extra: DummyTaskMap): Complete[T] = - { - import EvaluateTask._ - import std.TaskExtra._ + @deprecated("Use `timedRun` and `showRun` directly or use `runTasks`.", "0.13.0") + def runTasksWithResult[T](s: State, structure: BuildStructure, ts: Values[Task[T]], extra: DummyTaskMap, show: ShowConfig)(implicit display: Show[ScopedKey[_]]): (State, Result[Seq[KeyValue[T]]]) = + { + val complete = timedRun[T](s, ts, extra) + showRun(complete, show) + (complete.state, complete.results) + } + def showRun[T](complete: Complete[T], show: ShowConfig)(implicit display: Show[ScopedKey[_]]) { + import complete._ + val log = state.log + val extracted = Project extract state + val success = results match { case Value(_) => true; case Inc(_) => false } + results.toEither.right.foreach { r => if (show.taskValues) printSettings(r, show.print) } + if (show.success) printSuccess(start, stop, extracted, success, log) + } + def timedRun[T](s: State, ts: Values[Task[T]], extra: DummyTaskMap): Complete[T] = + { + import EvaluateTask._ + import std.TaskExtra._ - val extracted = Project extract s - import extracted.structure - val toRun = ts map { case KeyValue(k,t) => t.map(v => KeyValue(k,v)) } join; - val roots = ts map { case KeyValue(k,_) => k } - val config = extractedTaskConfig(extracted, structure, s) + val extracted = Project extract s + import extracted.structure + val toRun = ts map { case KeyValue(k, t) => t.map(v => KeyValue(k, v)) } join; + val roots = ts map { case KeyValue(k, _) => k } + val config = extractedTaskConfig(extracted, structure, s) - val start = System.currentTimeMillis - val (newS, result) = withStreams(structure, s){ str => - val transform = nodeView(s, str, roots, extra) - runTask(toRun, s,str, structure.index.triggers, config)(transform) - } - val stop = System.currentTimeMillis - Complete(start, stop, result, newS) - } + val start = System.currentTimeMillis + val (newS, result) = withStreams(structure, s) { str => + val transform = nodeView(s, str, roots, extra) + runTask(toRun, s, str, structure.index.triggers, config)(transform) + } + val stop = System.currentTimeMillis + Complete(start, stop, result, newS) + } - def runTasks[HL <: HList, T](s: State, structure: BuildStructure, ts: Values[Task[T]], extra: DummyTaskMap, show: ShowConfig)(implicit display: Show[ScopedKey[_]]): State = { - val complete = timedRun[T](s, ts, extra) - showRun(complete, show) - complete.results match { - case Inc(i) => complete.state.handleError(i) - case Value(_) => complete.state - } - } + def runTasks[HL <: HList, T](s: State, structure: BuildStructure, ts: Values[Task[T]], extra: DummyTaskMap, show: ShowConfig)(implicit display: Show[ScopedKey[_]]): State = { + val complete = timedRun[T](s, ts, extra) + showRun(complete, show) + complete.results match { + case Inc(i) => complete.state.handleError(i) + case Value(_) => complete.state + } + } - def printSuccess(start: Long, stop: Long, extracted: Extracted, success: Boolean, log: Logger) - { - import extracted._ - def get(key: SettingKey[Boolean]): Boolean = key in currentRef get structure.data getOrElse true - if(get(showSuccess)) - { - if(get(showTiming)) - { - val msg = timingString(start, stop, "", structure.data, currentRef, log) - if(success) log.success(msg) else log.error(msg) - } - else if(success) - log.success("") - } - } - private def timingString(startTime: Long, endTime: Long, s: String, data: Settings[Scope], currentRef: ProjectRef, log: Logger): String = - { - val format = timingFormat in currentRef get data getOrElse defaultFormat - timing(format, startTime, endTime, "", log) - } - def timing(format: java.text.DateFormat, startTime: Long, endTime: Long, s: String, log: Logger): String = - { - val ss = if(s.isEmpty) "" else s + " " - val nowString = format.format(new java.util.Date(endTime)) - "Total " + ss + "time: " + (endTime - startTime + 500) / 1000 + " s, completed " + nowString - } - def defaultFormat = - { - import java.text.DateFormat - DateFormat.getDateTimeInstance(DateFormat.MEDIUM, DateFormat.MEDIUM) - } + def printSuccess(start: Long, stop: Long, extracted: Extracted, success: Boolean, log: Logger) { + import extracted._ + def get(key: SettingKey[Boolean]): Boolean = key in currentRef get structure.data getOrElse true + if (get(showSuccess)) { + if (get(showTiming)) { + val msg = timingString(start, stop, "", structure.data, currentRef, log) + if (success) log.success(msg) else log.error(msg) + } else if (success) + log.success("") + } + } + private def timingString(startTime: Long, endTime: Long, s: String, data: Settings[Scope], currentRef: ProjectRef, log: Logger): String = + { + val format = timingFormat in currentRef get data getOrElse defaultFormat + timing(format, startTime, endTime, "", log) + } + def timing(format: java.text.DateFormat, startTime: Long, endTime: Long, s: String, log: Logger): String = + { + val ss = if (s.isEmpty) "" else s + " " + val nowString = format.format(new java.util.Date(endTime)) + "Total " + ss + "time: " + (endTime - startTime + 500) / 1000 + " s, completed " + nowString + } + def defaultFormat = + { + import java.text.DateFormat + DateFormat.getDateTimeInstance(DateFormat.MEDIUM, DateFormat.MEDIUM) + } - def applyDynamicTasks[I](s: State, structure: BuildStructure, inputs: Values[InputTask[I]], show: ShowConfig)(implicit display: Show[ScopedKey[_]]): Parser[() => State] = - { - val parsers = for(KeyValue(k,it) <- inputs) yield it.parser(s).map(v => KeyValue(k,v)) - Command.applyEffect(seq(parsers)) { roots => - import EvaluateTask._ - runTasks(s, structure, roots, DummyTaskMap(Nil), show) - } - } + def applyDynamicTasks[I](s: State, structure: BuildStructure, inputs: Values[InputTask[I]], show: ShowConfig)(implicit display: Show[ScopedKey[_]]): Parser[() => State] = + { + val parsers = for (KeyValue(k, it) <- inputs) yield it.parser(s).map(v => KeyValue(k, v)) + Command.applyEffect(seq(parsers)) { roots => + import EvaluateTask._ + runTasks(s, structure, roots, DummyTaskMap(Nil), show) + } + } - def evaluatingParser(s: State, structure: BuildStructure, show: ShowConfig)(keys: Seq[KeyValue[_]])(implicit display: Show[ScopedKey[_]]): Parser[() => State] = - { - // to make the call sites clearer - def separate[L](in: Seq[KeyValue[_]])(f: KeyValue[_] => Either[KeyValue[L], KeyValue[_]]): (Seq[KeyValue[L]], Seq[KeyValue[_]]) = - Util.separate(in)(f) + def evaluatingParser(s: State, structure: BuildStructure, show: ShowConfig)(keys: Seq[KeyValue[_]])(implicit display: Show[ScopedKey[_]]): Parser[() => State] = + { + // to make the call sites clearer + def separate[L](in: Seq[KeyValue[_]])(f: KeyValue[_] => Either[KeyValue[L], KeyValue[_]]): (Seq[KeyValue[L]], Seq[KeyValue[_]]) = + Util.separate(in)(f) - val kvs = keys.toList - if(kvs.isEmpty) - failure("No such setting/task") - else { - val (inputTasks, other) = separate[InputTask[_]](kvs) { - case KeyValue(k,v: InputTask[_]) => Left(KeyValue(k,v)) - case kv => Right(kv) - } - val (tasks, settings) = separate[Task[_]](other) { - case KeyValue(k, v: Task[_]) => Left(KeyValue(k,v)) - case kv => Right(kv) - } - // currently, disallow input tasks to be mixed with normal tasks. - // This occurs in `all` or `show`, which support multiple tasks. - // Previously, multiple tasks could be run in one execution, but they were all for the same key, just in different scopes. - // When `all` was added, it allowed different keys and thus opened the possibility for mixing settings, - // tasks, and input tasks in the same call. The code below allows settings and tasks to be mixed, but not input tasks. - // One problem with input tasks in `all` is that many input tasks consume all input and would need syntactic delimiters. - // Once that is addressed, the tasks constructed by the input tasks would need to be combined with the explicit tasks. - if(inputTasks.size > 0) { - if(other.size > 0) { - val inputStrings = inputTasks.map(_.key).mkString("Input task(s):\n\t", "\n\t", "\n") - val otherStrings = other.map(_.key).mkString("Task(s)/setting(s):\n\t", "\n\t", "\n") - failure(s"Cannot mix input tasks with plain tasks/settings. $inputStrings $otherStrings") - } else - applyDynamicTasks(s, structure, maps(inputTasks)(castToAny), show) - } else { - val base = if(tasks.isEmpty) success( () => s ) else - applyTasks(s, structure, maps(tasks)(x => success( castToAny(x))), show) - base.map { res => () => - val newState = res() - if(show.settingValues && !settings.isEmpty) printSettings(settings, show.print) - newState - } - } - } - } - // this is a hack to avoid duplicating method implementations - private[this] def castToAny[T[_]](t: T[_]): T[Any] = t.asInstanceOf[T[Any]] + val kvs = keys.toList + if (kvs.isEmpty) + failure("No such setting/task") + else { + val (inputTasks, other) = separate[InputTask[_]](kvs) { + case KeyValue(k, v: InputTask[_]) => Left(KeyValue(k, v)) + case kv => Right(kv) + } + val (tasks, settings) = separate[Task[_]](other) { + case KeyValue(k, v: Task[_]) => Left(KeyValue(k, v)) + case kv => Right(kv) + } + // currently, disallow input tasks to be mixed with normal tasks. + // This occurs in `all` or `show`, which support multiple tasks. + // Previously, multiple tasks could be run in one execution, but they were all for the same key, just in different scopes. + // When `all` was added, it allowed different keys and thus opened the possibility for mixing settings, + // tasks, and input tasks in the same call. The code below allows settings and tasks to be mixed, but not input tasks. + // One problem with input tasks in `all` is that many input tasks consume all input and would need syntactic delimiters. + // Once that is addressed, the tasks constructed by the input tasks would need to be combined with the explicit tasks. + if (inputTasks.size > 0) { + if (other.size > 0) { + val inputStrings = inputTasks.map(_.key).mkString("Input task(s):\n\t", "\n\t", "\n") + val otherStrings = other.map(_.key).mkString("Task(s)/setting(s):\n\t", "\n\t", "\n") + failure(s"Cannot mix input tasks with plain tasks/settings. $inputStrings $otherStrings") + } else + applyDynamicTasks(s, structure, maps(inputTasks)(castToAny), show) + } else { + val base = if (tasks.isEmpty) success(() => s) else + applyTasks(s, structure, maps(tasks)(x => success(castToAny(x))), show) + base.map { res => + () => + val newState = res() + if (show.settingValues && !settings.isEmpty) printSettings(settings, show.print) + newState + } + } + } + } + // this is a hack to avoid duplicating method implementations + private[this] def castToAny[T[_]](t: T[_]): T[Any] = t.asInstanceOf[T[Any]] - private[this] def maps[T, S](vs: Values[T])(f: T => S): Values[S] = - vs map { case KeyValue(k,v) => KeyValue(k, f(v)) } + private[this] def maps[T, S](vs: Values[T])(f: T => S): Values[S] = + vs map { case KeyValue(k, v) => KeyValue(k, f(v)) } + def projectAggregates[Proj](proj: Option[Reference], extra: BuildUtil[Proj], reverse: Boolean): Seq[ProjectRef] = + { + val resRef = proj.map(p => extra.projectRefFor(extra.resolveRef(p))) + resRef.toList.flatMap(ref => + if (reverse) extra.aggregates.reverse(ref) else extra.aggregates.forward(ref) + ) + } - def projectAggregates[Proj](proj: Option[Reference], extra: BuildUtil[Proj], reverse: Boolean): Seq[ProjectRef] = - { - val resRef = proj.map(p => extra.projectRefFor(extra.resolveRef(p))) - resRef.toList.flatMap(ref => - if(reverse) extra.aggregates.reverse(ref) else extra.aggregates.forward(ref) - ) - } + def aggregate[T, Proj](key: ScopedKey[T], rawMask: ScopeMask, extra: BuildUtil[Proj], reverse: Boolean = false): Seq[ScopedKey[T]] = + { + val mask = rawMask.copy(project = true) + Dag.topologicalSort(key) { k => + if (reverse) + reverseAggregatedKeys(k, extra, mask) + else if (aggregationEnabled(k, extra.data)) + aggregatedKeys(k, extra, mask) + else + Nil + } + } + def reverseAggregatedKeys[T](key: ScopedKey[T], extra: BuildUtil[_], mask: ScopeMask): Seq[ScopedKey[T]] = + projectAggregates(key.scope.project.toOption, extra, reverse = true) flatMap { ref => + val toResolve = key.scope.copy(project = Select(ref)) + val resolved = Resolve(extra, Global, key.key, mask)(toResolve) + val skey = ScopedKey(resolved, key.key) + if (aggregationEnabled(skey, extra.data)) skey :: Nil else Nil + } - def aggregate[T, Proj](key: ScopedKey[T], rawMask: ScopeMask, extra: BuildUtil[Proj], reverse: Boolean = false): Seq[ScopedKey[T]] = - { - val mask = rawMask.copy(project = true) - Dag.topologicalSort(key) { k => - if(reverse) - reverseAggregatedKeys(k, extra, mask) - else if(aggregationEnabled(k, extra.data)) - aggregatedKeys(k, extra, mask) - else - Nil - } - } - def reverseAggregatedKeys[T](key: ScopedKey[T], extra: BuildUtil[_], mask: ScopeMask): Seq[ScopedKey[T]] = - projectAggregates(key.scope.project.toOption, extra, reverse = true) flatMap { ref => - val toResolve = key.scope.copy(project = Select(ref)) - val resolved = Resolve(extra, Global, key.key, mask)(toResolve) - val skey = ScopedKey(resolved, key.key) - if( aggregationEnabled(skey, extra.data) ) skey :: Nil else Nil - } + def aggregatedKeys[T](key: ScopedKey[T], extra: BuildUtil[_], mask: ScopeMask): Seq[ScopedKey[T]] = + projectAggregates(key.scope.project.toOption, extra, reverse = false) map { ref => + val toResolve = key.scope.copy(project = Select(ref)) + val resolved = Resolve(extra, Global, key.key, mask)(toResolve) + ScopedKey(resolved, key.key) + } - def aggregatedKeys[T](key: ScopedKey[T], extra: BuildUtil[_], mask: ScopeMask): Seq[ScopedKey[T]] = - projectAggregates(key.scope.project.toOption, extra, reverse = false) map { ref => - val toResolve = key.scope.copy(project = Select(ref)) - val resolved = Resolve(extra, Global, key.key, mask)(toResolve) - ScopedKey(resolved, key.key) - } + def aggregationEnabled(key: ScopedKey[_], data: Settings[Scope]): Boolean = + Keys.aggregate in Scope.fillTaskAxis(key.scope, key.key) get data getOrElse true - def aggregationEnabled(key: ScopedKey[_], data: Settings[Scope]): Boolean = - Keys.aggregate in Scope.fillTaskAxis(key.scope, key.key) get data getOrElse true - - @deprecated("Use BuildUtil.aggregationRelation", "0.13.0") - def relation(units: Map[URI, LoadedBuildUnit]): Relation[ProjectRef, ProjectRef] = - BuildUtil.aggregationRelation(units) + @deprecated("Use BuildUtil.aggregationRelation", "0.13.0") + def relation(units: Map[URI, LoadedBuildUnit]): Relation[ProjectRef, ProjectRef] = + BuildUtil.aggregationRelation(units) } diff --git a/main/src/main/scala/sbt/Build.scala b/main/src/main/scala/sbt/Build.scala index 030e54dfb..d36ccbb97 100644 --- a/main/src/main/scala/sbt/Build.scala +++ b/main/src/main/scala/sbt/Build.scala @@ -3,65 +3,64 @@ */ package sbt - import java.io.File - import Keys.{name, organization, thisProject} - import Def.{ScopedKey, Setting} +import java.io.File +import Keys.{ name, organization, thisProject } +import Def.{ ScopedKey, Setting } // name is more like BuildDefinition, but that is too long -trait Build -{ - def projectDefinitions(baseDirectory: File): Seq[Project] = projects - def projects: Seq[Project] = ReflectUtilities.allVals[Project](this).values.toSeq - // TODO: Should we grab the build core setting shere or in a plugin? - def settings: Seq[Setting[_]] = Defaults.buildCore - def buildLoaders: Seq[BuildLoader.Components] = Nil - /** Explicitly defines the root project. - * If None, the root project is the first project in the build's root directory or just the first project if none are in the root directory.*/ - def rootProject: Option[Project] = None +trait Build { + def projectDefinitions(baseDirectory: File): Seq[Project] = projects + def projects: Seq[Project] = ReflectUtilities.allVals[Project](this).values.toSeq + // TODO: Should we grab the build core setting shere or in a plugin? + def settings: Seq[Setting[_]] = Defaults.buildCore + def buildLoaders: Seq[BuildLoader.Components] = Nil + /** + * Explicitly defines the root project. + * If None, the root project is the first project in the build's root directory or just the first project if none are in the root directory. + */ + def rootProject: Option[Project] = None } // TODO 0.14.0: decide if Plugin should be deprecated in favor of AutoPlugin -trait Plugin -{ - @deprecated("Override projectSettings or buildSettings instead.", "0.12.0") - def settings: Seq[Setting[_]] = Nil +trait Plugin { + @deprecated("Override projectSettings or buildSettings instead.", "0.12.0") + def settings: Seq[Setting[_]] = Nil - /** Settings to be appended to all projects in a build. */ - def projectSettings: Seq[Setting[_]] = Nil + /** Settings to be appended to all projects in a build. */ + def projectSettings: Seq[Setting[_]] = Nil - /** Settings to be appended at the build scope. */ - def buildSettings: Seq[Setting[_]] = Nil + /** Settings to be appended at the build scope. */ + def buildSettings: Seq[Setting[_]] = Nil - /** Settings to be appended at the global scope. */ - def globalSettings: Seq[Setting[_]] = Nil + /** Settings to be appended at the global scope. */ + def globalSettings: Seq[Setting[_]] = Nil } -object Build -{ - val defaultEmpty: Build = new Build { override def projects = Nil } - val default: Build = new Build { override def projectDefinitions(base: File) = defaultProject(defaultID(base), base) :: Nil } - def defaultAggregated(id: String, aggregate: Seq[ProjectRef]): Build = new Build { - override def projectDefinitions(base: File) = defaultAggregatedProject(id, base, aggregate) :: Nil - } +object Build { + val defaultEmpty: Build = new Build { override def projects = Nil } + val default: Build = new Build { override def projectDefinitions(base: File) = defaultProject(defaultID(base), base) :: Nil } + def defaultAggregated(id: String, aggregate: Seq[ProjectRef]): Build = new Build { + override def projectDefinitions(base: File) = defaultAggregatedProject(id, base, aggregate) :: Nil + } - def defaultID(base: File, prefix: String = "default"): String = prefix + "-" + Hash.trimHashString(base.getAbsolutePath, 6) - @deprecated("Explicitly specify the ID", "0.13.0") - def defaultProject(base: File): Project = defaultProject(defaultID(base), base) - def defaultProject(id: String, base: File): Project = Project(id, base).settings( - // TODO - Can we move this somewhere else? ordering of settings is causing this to get borked. - // if the user has overridden the name, use the normal organization that is derived from the name. - organization := { - val overridden = thisProject.value.id == name.value - organization.?.value match { - case Some(o) if !overridden => o - case _ => "default" - } - //(thisProject, organization, name) { (p, o, n) => if(p.id == n) "default" else o } - } - ) - def defaultAggregatedProject(id: String, base: File, agg: Seq[ProjectRef]): Project = - defaultProject(id, base).aggregate(agg : _*) + def defaultID(base: File, prefix: String = "default"): String = prefix + "-" + Hash.trimHashString(base.getAbsolutePath, 6) + @deprecated("Explicitly specify the ID", "0.13.0") + def defaultProject(base: File): Project = defaultProject(defaultID(base), base) + def defaultProject(id: String, base: File): Project = Project(id, base).settings( + // TODO - Can we move this somewhere else? ordering of settings is causing this to get borked. + // if the user has overridden the name, use the normal organization that is derived from the name. + organization := { + val overridden = thisProject.value.id == name.value + organization.?.value match { + case Some(o) if !overridden => o + case _ => "default" + } + //(thisProject, organization, name) { (p, o, n) => if(p.id == n) "default" else o } + } + ) + def defaultAggregatedProject(id: String, base: File, agg: Seq[ProjectRef]): Project = + defaultProject(id, base).aggregate(agg: _*) - @deprecated("Use Attributed.data", "0.13.0") - def data[T](in: Seq[Attributed[T]]): Seq[T] = Attributed.data(in) - def analyzed(in: Seq[Attributed[_]]): Seq[inc.Analysis] = in.flatMap{ _.metadata.get(Keys.analysis) } + @deprecated("Use Attributed.data", "0.13.0") + def data[T](in: Seq[Attributed[T]]): Seq[T] = Attributed.data(in) + def analyzed(in: Seq[Attributed[_]]): Seq[inc.Analysis] = in.flatMap { _.metadata.get(Keys.analysis) } } diff --git a/main/src/main/scala/sbt/BuildDependencies.scala b/main/src/main/scala/sbt/BuildDependencies.scala index 09417c784..d32f0a990 100644 --- a/main/src/main/scala/sbt/BuildDependencies.scala +++ b/main/src/main/scala/sbt/BuildDependencies.scala @@ -1,32 +1,29 @@ package sbt - import Types.idFun - import BuildDependencies._ +import Types.idFun +import BuildDependencies._ -final class BuildDependencies private(val classpath: DependencyMap[ClasspathDep[ProjectRef]], val aggregate: DependencyMap[ProjectRef]) -{ - def classpathRefs(ref: ProjectRef): Seq[ProjectRef] = classpath(ref) map getID - def classpathTransitiveRefs(ref: ProjectRef): Seq[ProjectRef] = classpathTransitive(ref) +final class BuildDependencies private (val classpath: DependencyMap[ClasspathDep[ProjectRef]], val aggregate: DependencyMap[ProjectRef]) { + def classpathRefs(ref: ProjectRef): Seq[ProjectRef] = classpath(ref) map getID + def classpathTransitiveRefs(ref: ProjectRef): Seq[ProjectRef] = classpathTransitive(ref) - lazy val classpathTransitive: DependencyMap[ProjectRef] = transitive(classpath, getID) - lazy val aggregateTransitive: DependencyMap[ProjectRef] = transitive(aggregate, idFun[ProjectRef]) + lazy val classpathTransitive: DependencyMap[ProjectRef] = transitive(classpath, getID) + lazy val aggregateTransitive: DependencyMap[ProjectRef] = transitive(aggregate, idFun[ProjectRef]) - def addClasspath(ref: ProjectRef, deps: ClasspathDep[ProjectRef]*): BuildDependencies = - new BuildDependencies( classpath.updated(ref, deps ++ classpath.getOrElse(ref, Nil)), aggregate) - def addAggregate(ref: ProjectRef, deps: ProjectRef*): BuildDependencies = - new BuildDependencies(classpath, aggregate.updated(ref, deps ++ aggregate.getOrElse(ref, Nil))) + def addClasspath(ref: ProjectRef, deps: ClasspathDep[ProjectRef]*): BuildDependencies = + new BuildDependencies(classpath.updated(ref, deps ++ classpath.getOrElse(ref, Nil)), aggregate) + def addAggregate(ref: ProjectRef, deps: ProjectRef*): BuildDependencies = + new BuildDependencies(classpath, aggregate.updated(ref, deps ++ aggregate.getOrElse(ref, Nil))) } -object BuildDependencies -{ - def apply(classpath: DependencyMap[ClasspathDep[ProjectRef]], aggregate: DependencyMap[ProjectRef]): BuildDependencies = - new BuildDependencies(classpath, aggregate) +object BuildDependencies { + def apply(classpath: DependencyMap[ClasspathDep[ProjectRef]], aggregate: DependencyMap[ProjectRef]): BuildDependencies = + new BuildDependencies(classpath, aggregate) - type DependencyMap[D] = Map[ProjectRef, Seq[D]] - def transitive[D](deps: DependencyMap[D], extract: D => ProjectRef): DependencyMap[ProjectRef] = - for( (ref, _) <- deps ) yield - { - val sorted = Dag.topologicalSort(ref)(d => deps(d) map extract) - (ref, sorted dropRight 1) - } - val getID: ClasspathDep[ProjectRef] => ProjectRef = _.project + type DependencyMap[D] = Map[ProjectRef, Seq[D]] + def transitive[D](deps: DependencyMap[D], extract: D => ProjectRef): DependencyMap[ProjectRef] = + for ((ref, _) <- deps) yield { + val sorted = Dag.topologicalSort(ref)(d => deps(d) map extract) + (ref, sorted dropRight 1) + } + val getID: ClasspathDep[ProjectRef] => ProjectRef = _.project } \ No newline at end of file diff --git a/main/src/main/scala/sbt/BuildLoader.scala b/main/src/main/scala/sbt/BuildLoader.scala index 6affab1cb..d22327073 100644 --- a/main/src/main/scala/sbt/BuildLoader.scala +++ b/main/src/main/scala/sbt/BuildLoader.scala @@ -3,143 +3,140 @@ */ package sbt - import java.io.File - import java.net.URI - import BuildLoader._ - import Alternatives._ - import Types.{const,idFun} +import java.io.File +import java.net.URI +import BuildLoader._ +import Alternatives._ +import Types.{ const, idFun } -final class MultiHandler[S,T](builtIn: S=>Option[T], root: Option[S => Option[T]], nonRoots: List[(URI, S => Option[T])], getURI: S => URI, log: S => Logger) -{ - def applyFun: S => Option[T] = apply _ - def apply(info: S): Option[T] = - (baseLoader(info), applyNonRoots(info)) match - { - case (None, Nil) => None - case (None, xs @ (_, nr) :: ignored ) => - if(!ignored.isEmpty) warn("Using first of multiple matching non-root build resolvers for " + getURI(info), log(info), xs) - Some(nr) - case (Some(b), xs) => - if(!xs.isEmpty) warn("Ignoring shadowed non-root build resolver(s) for " + getURI(info), log(info), xs) - Some(b) - } +final class MultiHandler[S, T](builtIn: S => Option[T], root: Option[S => Option[T]], nonRoots: List[(URI, S => Option[T])], getURI: S => URI, log: S => Logger) { + def applyFun: S => Option[T] = apply _ + def apply(info: S): Option[T] = + (baseLoader(info), applyNonRoots(info)) match { + case (None, Nil) => None + case (None, xs @ (_, nr) :: ignored) => + if (!ignored.isEmpty) warn("Using first of multiple matching non-root build resolvers for " + getURI(info), log(info), xs) + Some(nr) + case (Some(b), xs) => + if (!xs.isEmpty) warn("Ignoring shadowed non-root build resolver(s) for " + getURI(info), log(info), xs) + Some(b) + } - def baseLoader: S => Option[T] = root match { case Some(rl) => rl | builtIn; case None => builtIn } + def baseLoader: S => Option[T] = root match { case Some(rl) => rl | builtIn; case None => builtIn } - def addNonRoot(uri: URI, loader: S => Option[T]) = new MultiHandler(builtIn, root, (uri, loader) :: nonRoots, getURI, log) - def setRoot(resolver: S => Option[T]) = new MultiHandler(builtIn, Some(resolver), nonRoots, getURI, log) - def applyNonRoots(info: S): List[(URI, T)] = - nonRoots flatMap { case (definingURI, loader) => loader(info) map { unit => (definingURI, unit) } } + def addNonRoot(uri: URI, loader: S => Option[T]) = new MultiHandler(builtIn, root, (uri, loader) :: nonRoots, getURI, log) + def setRoot(resolver: S => Option[T]) = new MultiHandler(builtIn, Some(resolver), nonRoots, getURI, log) + def applyNonRoots(info: S): List[(URI, T)] = + nonRoots flatMap { case (definingURI, loader) => loader(info) map { unit => (definingURI, unit) } } - private[this] def warn(baseMessage: String, log: Logger, matching: Seq[(URI, T)]) - { - log.warn(baseMessage) - log.debug("Non-root build resolvers defined in:") - log.debug(matching.map(_._1).mkString("\n\t")) - } + private[this] def warn(baseMessage: String, log: Logger, matching: Seq[(URI, T)]) { + log.warn(baseMessage) + log.debug("Non-root build resolvers defined in:") + log.debug(matching.map(_._1).mkString("\n\t")) + } } -object BuildLoader -{ - /** in: Build URI and staging directory - * out: None if unhandled or Some containing the retrieve function, which returns the directory retrieved to (can be the same as the staging directory) */ - type Resolver = ResolveInfo => Option[() => File] - type Builder = BuildInfo => Option[() => BuildUnit] - type Transformer = TransformInfo => BuildUnit - type Loader = LoadInfo => Option[() => BuildUnit] - type TransformAll = PartBuild => PartBuild +object BuildLoader { + /** + * in: Build URI and staging directory + * out: None if unhandled or Some containing the retrieve function, which returns the directory retrieved to (can be the same as the staging directory) + */ + type Resolver = ResolveInfo => Option[() => File] + type Builder = BuildInfo => Option[() => BuildUnit] + type Transformer = TransformInfo => BuildUnit + type Loader = LoadInfo => Option[() => BuildUnit] + type TransformAll = PartBuild => PartBuild - final class Components(val resolver: Resolver, val builder: Builder, val transformer: Transformer, val full: Loader, val transformAll: TransformAll) { - def | (cs: Components): Components = - new Components(resolver | cs.resolver, builder | cs.builder, seq(transformer, cs.transformer), full | cs.full, transformAll andThen cs.transformAll) - } - def transform(t: Transformer): Components = components(transformer = t) - def resolve(r: Resolver): Components = components(resolver = r) - def build(b: Builder): Components = components(builder = b) - def full(f: Loader): Components = components(full = f) - def transformAll(t: TransformAll) = components(transformAll = t) - def components(resolver: Resolver = const(None), builder: Builder = const(None), transformer: Transformer = _.unit, full: Loader = const(None), transformAll: TransformAll = idFun) = - new Components(resolver, builder, transformer, full, transformAll) + final class Components(val resolver: Resolver, val builder: Builder, val transformer: Transformer, val full: Loader, val transformAll: TransformAll) { + def |(cs: Components): Components = + new Components(resolver | cs.resolver, builder | cs.builder, seq(transformer, cs.transformer), full | cs.full, transformAll andThen cs.transformAll) + } + def transform(t: Transformer): Components = components(transformer = t) + def resolve(r: Resolver): Components = components(resolver = r) + def build(b: Builder): Components = components(builder = b) + def full(f: Loader): Components = components(full = f) + def transformAll(t: TransformAll) = components(transformAll = t) + def components(resolver: Resolver = const(None), builder: Builder = const(None), transformer: Transformer = _.unit, full: Loader = const(None), transformAll: TransformAll = idFun) = + new Components(resolver, builder, transformer, full, transformAll) - def seq(a: Transformer, b: Transformer): Transformer = info => b(info.setUnit(a(info))) + def seq(a: Transformer, b: Transformer): Transformer = info => b(info.setUnit(a(info))) - sealed trait Info { - def uri: URI - def config: LoadBuildConfiguration - def state: State - } - final class ResolveInfo(val uri: URI, val staging: File, val config: LoadBuildConfiguration, val state: State) extends Info - final class BuildInfo(val uri: URI, val base: File, val config: LoadBuildConfiguration, val state: State) extends Info - final class TransformInfo(val uri: URI, val base: File, val unit: BuildUnit, val config: LoadBuildConfiguration, val state: State) extends Info { - def setUnit(newUnit: BuildUnit): TransformInfo = new TransformInfo(uri, base, newUnit, config, state) - } + sealed trait Info { + def uri: URI + def config: LoadBuildConfiguration + def state: State + } + final class ResolveInfo(val uri: URI, val staging: File, val config: LoadBuildConfiguration, val state: State) extends Info + final class BuildInfo(val uri: URI, val base: File, val config: LoadBuildConfiguration, val state: State) extends Info + final class TransformInfo(val uri: URI, val base: File, val unit: BuildUnit, val config: LoadBuildConfiguration, val state: State) extends Info { + def setUnit(newUnit: BuildUnit): TransformInfo = new TransformInfo(uri, base, newUnit, config, state) + } - final class LoadInfo(val uri: URI, val staging: File, val config: LoadBuildConfiguration, val state: State, val components: Components) extends Info + final class LoadInfo(val uri: URI, val staging: File, val config: LoadBuildConfiguration, val state: State, val components: Components) extends Info - def apply(base: Components, fail: URI => Nothing, s: State, config: LoadBuildConfiguration): BuildLoader = - { - def makeMulti[S <: Info, T](base: S => Option[T]) = new MultiHandler[S,T](base, None, Nil, _.uri, _.config.log) - new BuildLoader(fail, s, config, makeMulti(base.resolver), makeMulti(base.builder), base.transformer, makeMulti(base.full), base.transformAll) - } + def apply(base: Components, fail: URI => Nothing, s: State, config: LoadBuildConfiguration): BuildLoader = + { + def makeMulti[S <: Info, T](base: S => Option[T]) = new MultiHandler[S, T](base, None, Nil, _.uri, _.config.log) + new BuildLoader(fail, s, config, makeMulti(base.resolver), makeMulti(base.builder), base.transformer, makeMulti(base.full), base.transformAll) + } - def componentLoader: Loader = (info: LoadInfo) => { - import info.{components, config, staging, state, uri} - val cs = info.components - for { - resolve <- cs.resolver(new ResolveInfo(uri, staging, config, state)) - base = resolve() - build <- cs.builder(new BuildInfo(uri, base, config, state)) - } yield () => { - val unit = build() - cs.transformer(new TransformInfo(uri, base, unit, config, state)) - } - } + def componentLoader: Loader = (info: LoadInfo) => { + import info.{ components, config, staging, state, uri } + val cs = info.components + for { + resolve <- cs.resolver(new ResolveInfo(uri, staging, config, state)) + base = resolve() + build <- cs.builder(new BuildInfo(uri, base, config, state)) + } yield () => { + val unit = build() + cs.transformer(new TransformInfo(uri, base, unit, config, state)) + } + } } final class BuildLoader( - val fail: URI => Nothing, - val state: State, - val config: LoadBuildConfiguration, - val resolvers: MultiHandler[ResolveInfo, ()=>File], - val builders: MultiHandler[BuildInfo, ()=>BuildUnit], - val transformer: Transformer, - val full: MultiHandler[LoadInfo, ()=>BuildUnit], - val transformAll: TransformAll) -{ - def addNonRoot(uri: URI, loaders: Components): BuildLoader = - new BuildLoader(fail, state, config, - resolvers.addNonRoot(uri, loaders.resolver), - builders.addNonRoot(uri, loaders.builder), - seq(transformer, loaders.transformer), - full.addNonRoot(uri, loaders.full), - transformAll andThen loaders.transformAll - ) - def setRoot(loaders: Components): BuildLoader = - new BuildLoader(fail, state, config, - resolvers.setRoot(loaders.resolver), - builders.setRoot(loaders.builder), - seq(loaders.transformer, transformer), - full.setRoot(loaders.full), - loaders.transformAll andThen transformAll - ) - def resetPluginDepth: BuildLoader = copyWithNewPM(config.pluginManagement.resetDepth) + val fail: URI => Nothing, + val state: State, + val config: LoadBuildConfiguration, + val resolvers: MultiHandler[ResolveInfo, () => File], + val builders: MultiHandler[BuildInfo, () => BuildUnit], + val transformer: Transformer, + val full: MultiHandler[LoadInfo, () => BuildUnit], + val transformAll: TransformAll) { + def addNonRoot(uri: URI, loaders: Components): BuildLoader = + new BuildLoader(fail, state, config, + resolvers.addNonRoot(uri, loaders.resolver), + builders.addNonRoot(uri, loaders.builder), + seq(transformer, loaders.transformer), + full.addNonRoot(uri, loaders.full), + transformAll andThen loaders.transformAll + ) + def setRoot(loaders: Components): BuildLoader = + new BuildLoader(fail, state, config, + resolvers.setRoot(loaders.resolver), + builders.setRoot(loaders.builder), + seq(loaders.transformer, transformer), + full.setRoot(loaders.full), + loaders.transformAll andThen transformAll + ) + def resetPluginDepth: BuildLoader = copyWithNewPM(config.pluginManagement.resetDepth) - def updatePluginManagement(overrides: Set[ModuleID]): BuildLoader = - { - val mgmt = config.pluginManagement - copyWithNewPM(mgmt.copy(overrides = mgmt.overrides ++ overrides)) - } - private[this] def copyWithNewPM(newpm: PluginManagement): BuildLoader = - { - val newConfig = config.copy(pluginManagement = newpm) - new BuildLoader(fail, state, newConfig, resolvers, builders, transformer, full, transformAll) - } + def updatePluginManagement(overrides: Set[ModuleID]): BuildLoader = + { + val mgmt = config.pluginManagement + copyWithNewPM(mgmt.copy(overrides = mgmt.overrides ++ overrides)) + } + private[this] def copyWithNewPM(newpm: PluginManagement): BuildLoader = + { + val newConfig = config.copy(pluginManagement = newpm) + new BuildLoader(fail, state, newConfig, resolvers, builders, transformer, full, transformAll) + } - def components = new Components(resolvers.applyFun, builders.applyFun, transformer, full.applyFun, transformAll) - def apply(uri: URI): BuildUnit = - { - val info = new LoadInfo(uri, config.stagingDirectory, config, state, components) - val load = full(info) getOrElse fail(uri) - load() - } + def components = new Components(resolvers.applyFun, builders.applyFun, transformer, full.applyFun, transformAll) + def apply(uri: URI): BuildUnit = + { + val info = new LoadInfo(uri, config.stagingDirectory, config, state, components) + val load = full(info) getOrElse fail(uri) + load() + } } \ No newline at end of file diff --git a/main/src/main/scala/sbt/BuildPaths.scala b/main/src/main/scala/sbt/BuildPaths.scala index d3b006b74..4efe35c1d 100644 --- a/main/src/main/scala/sbt/BuildPaths.scala +++ b/main/src/main/scala/sbt/BuildPaths.scala @@ -3,87 +3,85 @@ */ package sbt - import java.io.File - import java.net.URI - import KeyRanks.DSetting +import java.io.File +import java.net.URI +import KeyRanks.DSetting -object BuildPaths -{ - val globalBaseDirectory = AttributeKey[File]("global-base-directory", "The base directory for global sbt configuration and staging.", DSetting) - val globalPluginsDirectory = AttributeKey[File]("global-plugins-directory", "The base directory for global sbt plugins.", DSetting) - val globalSettingsDirectory = AttributeKey[File]("global-settings-directory", "The base directory for global sbt settings.", DSetting) - val stagingDirectory = AttributeKey[File]("staging-directory", "The directory for staging remote projects.", DSetting) +object BuildPaths { + val globalBaseDirectory = AttributeKey[File]("global-base-directory", "The base directory for global sbt configuration and staging.", DSetting) + val globalPluginsDirectory = AttributeKey[File]("global-plugins-directory", "The base directory for global sbt plugins.", DSetting) + val globalSettingsDirectory = AttributeKey[File]("global-settings-directory", "The base directory for global sbt settings.", DSetting) + val stagingDirectory = AttributeKey[File]("staging-directory", "The directory for staging remote projects.", DSetting) - import Path._ + import Path._ - def getGlobalBase(state: State): File = { - val default = defaultVersionedGlobalBase(binarySbtVersion(state)) - def getDefault = { checkTransition(state, default); default } - getFileSetting(globalBaseDirectory, GlobalBaseProperty, getDefault)(state) - } - private[this] def checkTransition(state: State, versioned: File) - { - val unversioned = defaultGlobalBase - def globalDefined(base: File): Boolean = - getGlobalPluginsDirectory(state, base).exists || - configurationSources(getGlobalSettingsDirectory(state, base)).exists(_.exists) - val warnTransition = !globalDefined(versioned) && globalDefined(unversioned) - if(warnTransition) - state.log.warn(globalDirTransitionWarning(unversioned, versioned)) - } + def getGlobalBase(state: State): File = { + val default = defaultVersionedGlobalBase(binarySbtVersion(state)) + def getDefault = { checkTransition(state, default); default } + getFileSetting(globalBaseDirectory, GlobalBaseProperty, getDefault)(state) + } + private[this] def checkTransition(state: State, versioned: File) { + val unversioned = defaultGlobalBase + def globalDefined(base: File): Boolean = + getGlobalPluginsDirectory(state, base).exists || + configurationSources(getGlobalSettingsDirectory(state, base)).exists(_.exists) + val warnTransition = !globalDefined(versioned) && globalDefined(unversioned) + if (warnTransition) + state.log.warn(globalDirTransitionWarning(unversioned, versioned)) + } - def getStagingDirectory(state: State, globalBase: File): File = - fileSetting(stagingDirectory, StagingProperty, defaultStaging(globalBase))(state) + def getStagingDirectory(state: State, globalBase: File): File = + fileSetting(stagingDirectory, StagingProperty, defaultStaging(globalBase))(state) - def getGlobalPluginsDirectory(state: State, globalBase: File): File = - fileSetting(globalPluginsDirectory, GlobalPluginsProperty, defaultGlobalPlugins(globalBase))(state) + def getGlobalPluginsDirectory(state: State, globalBase: File): File = + fileSetting(globalPluginsDirectory, GlobalPluginsProperty, defaultGlobalPlugins(globalBase))(state) - def getGlobalSettingsDirectory(state: State, globalBase: File): File = - fileSetting(globalSettingsDirectory, GlobalSettingsProperty, globalBase)(state) + def getGlobalSettingsDirectory(state: State, globalBase: File): File = + fileSetting(globalSettingsDirectory, GlobalSettingsProperty, globalBase)(state) - private[this] def fileSetting(stateKey: AttributeKey[File], property: String, default: File)(state: State): File = - getFileSetting(stateKey, property, default)(state) + private[this] def fileSetting(stateKey: AttributeKey[File], property: String, default: File)(state: State): File = + getFileSetting(stateKey, property, default)(state) - def getFileSetting(stateKey: AttributeKey[File], property: String, default: => File)(state: State): File = - state get stateKey orElse getFileProperty(property) getOrElse default + def getFileSetting(stateKey: AttributeKey[File], property: String, default: => File)(state: State): File = + state get stateKey orElse getFileProperty(property) getOrElse default - def getFileProperty(name: String): Option[File] = Option(System.getProperty(name)) flatMap { path => - if(path.isEmpty) None else Some(new File(path)) - } + def getFileProperty(name: String): Option[File] = Option(System.getProperty(name)) flatMap { path => + if (path.isEmpty) None else Some(new File(path)) + } - def defaultVersionedGlobalBase(sbtVersion: String): File = defaultGlobalBase / sbtVersion - def defaultGlobalBase = Path.userHome / ConfigDirectoryName + def defaultVersionedGlobalBase(sbtVersion: String): File = defaultGlobalBase / sbtVersion + def defaultGlobalBase = Path.userHome / ConfigDirectoryName - private[this] def binarySbtVersion(state: State): String = - sbt.cross.CrossVersionUtil.binarySbtVersion(state.configuration.provider.id.version) - private[this] def defaultStaging(globalBase: File) = globalBase / "staging" - private[this] def defaultGlobalPlugins(globalBase: File) = globalBase / PluginsDirectoryName - - def configurationSources(base: File): Seq[File] = (base * (GlobFilter("*.sbt") - ".sbt")).get - def pluginDirectory(definitionBase: File) = definitionBase / PluginsDirectoryName + private[this] def binarySbtVersion(state: State): String = + sbt.cross.CrossVersionUtil.binarySbtVersion(state.configuration.provider.id.version) + private[this] def defaultStaging(globalBase: File) = globalBase / "staging" + private[this] def defaultGlobalPlugins(globalBase: File) = globalBase / PluginsDirectoryName - def evalOutputDirectory(base: File) = outputDirectory(base) / "config-classes" - def outputDirectory(base: File) = base / DefaultTargetName + def configurationSources(base: File): Seq[File] = (base * (GlobFilter("*.sbt") - ".sbt")).get + def pluginDirectory(definitionBase: File) = definitionBase / PluginsDirectoryName - def projectStandard(base: File) = base / "project" + def evalOutputDirectory(base: File) = outputDirectory(base) / "config-classes" + def outputDirectory(base: File) = base / DefaultTargetName - @deprecated("Use projectStandard. The alternative project directory location has been removed.", "0.13.0") - def projectHidden(base: File) = projectStandard(base) - @deprecated("Use projectStandard. The alternative project directory location has been removed.", "0.13.0") - def selectProjectDir(base: File, log: Logger) = projectStandard(base) + def projectStandard(base: File) = base / "project" - final val PluginsDirectoryName = "plugins" - final val DefaultTargetName = "target" - final val ConfigDirectoryName = ".sbt" - final val GlobalBaseProperty = "sbt.global.base" - final val StagingProperty = "sbt.global.staging" - final val GlobalPluginsProperty = "sbt.global.plugins" - final val GlobalSettingsProperty = "sbt.global.settings" + @deprecated("Use projectStandard. The alternative project directory location has been removed.", "0.13.0") + def projectHidden(base: File) = projectStandard(base) + @deprecated("Use projectStandard. The alternative project directory location has been removed.", "0.13.0") + def selectProjectDir(base: File, log: Logger) = projectStandard(base) - def crossPath(base: File, instance: xsbti.compile.ScalaInstance): File = base / ("scala_" + instance.version) + final val PluginsDirectoryName = "plugins" + final val DefaultTargetName = "target" + final val ConfigDirectoryName = ".sbt" + final val GlobalBaseProperty = "sbt.global.base" + final val StagingProperty = "sbt.global.staging" + final val GlobalPluginsProperty = "sbt.global.plugins" + final val GlobalSettingsProperty = "sbt.global.settings" - private[this] def globalDirTransitionWarning(unversioned: File, versioned: File): String = -s"""The global sbt directory is now versioned and is located at $versioned. + def crossPath(base: File, instance: xsbti.compile.ScalaInstance): File = base / ("scala_" + instance.version) + + private[this] def globalDirTransitionWarning(unversioned: File, versioned: File): String = + s"""The global sbt directory is now versioned and is located at $versioned. You are seeing this warning because there is global configuration in $unversioned but not in $versioned. The global sbt directory may be changed via the $GlobalBaseProperty system property. """ diff --git a/main/src/main/scala/sbt/BuildStructure.scala b/main/src/main/scala/sbt/BuildStructure.scala index be6bad658..de2ff7763 100644 --- a/main/src/main/scala/sbt/BuildStructure.scala +++ b/main/src/main/scala/sbt/BuildStructure.scala @@ -3,211 +3,215 @@ */ package sbt - import java.io.File - import java.net.URI - import Def.{displayFull, ScopedKey, ScopeLocal, Setting} - import Attributed.data - import BuildPaths.outputDirectory - import Scope.GlobalScope - import BuildStreams.Streams - import Path._ +import java.io.File +import java.net.URI +import Def.{ displayFull, ScopedKey, ScopeLocal, Setting } +import Attributed.data +import BuildPaths.outputDirectory +import Scope.GlobalScope +import BuildStreams.Streams +import Path._ -final class BuildStructure(val units: Map[URI, LoadedBuildUnit], val root: URI, val settings: Seq[Setting[_]], val data: Settings[Scope], val index: StructureIndex, val streams: State => Streams, val delegates: Scope => Seq[Scope], val scopeLocal: ScopeLocal) -{ - val rootProject: URI => String = Load getRootProject units - def allProjects: Seq[ResolvedProject] = units.values.flatMap(_.defined.values).toSeq - def allProjects(build: URI): Seq[ResolvedProject] = units.get(build).toList.flatMap(_.defined.values) - def allProjectRefs: Seq[ProjectRef] = units.toSeq flatMap { case (build, unit) => refs(build, unit.defined.values.toSeq) } - def allProjectRefs(build: URI): Seq[ProjectRef] = refs(build, allProjects(build)) - val extra: BuildUtil[ResolvedProject] = BuildUtil(root, units, index.keyIndex, data) - private[this] def refs(build: URI, projects: Seq[ResolvedProject]): Seq[ProjectRef] = projects.map { p => ProjectRef(build, p.id) } +final class BuildStructure(val units: Map[URI, LoadedBuildUnit], val root: URI, val settings: Seq[Setting[_]], val data: Settings[Scope], val index: StructureIndex, val streams: State => Streams, val delegates: Scope => Seq[Scope], val scopeLocal: ScopeLocal) { + val rootProject: URI => String = Load getRootProject units + def allProjects: Seq[ResolvedProject] = units.values.flatMap(_.defined.values).toSeq + def allProjects(build: URI): Seq[ResolvedProject] = units.get(build).toList.flatMap(_.defined.values) + def allProjectRefs: Seq[ProjectRef] = units.toSeq flatMap { case (build, unit) => refs(build, unit.defined.values.toSeq) } + def allProjectRefs(build: URI): Seq[ProjectRef] = refs(build, allProjects(build)) + val extra: BuildUtil[ResolvedProject] = BuildUtil(root, units, index.keyIndex, data) + private[this] def refs(build: URI, projects: Seq[ResolvedProject]): Seq[ProjectRef] = projects.map { p => ProjectRef(build, p.id) } } // information that is not original, but can be reconstructed from the rest of BuildStructure final class StructureIndex( - val keyMap: Map[String, AttributeKey[_]], - val taskToKey: Map[Task[_], ScopedKey[Task[_]]], - val triggers: Triggers[Task], - val keyIndex: KeyIndex, - val aggregateKeyIndex: KeyIndex -) + val keyMap: Map[String, AttributeKey[_]], + val taskToKey: Map[Task[_], ScopedKey[Task[_]]], + val triggers: Triggers[Task], + val keyIndex: KeyIndex, + val aggregateKeyIndex: KeyIndex) -/** A resolved build unit. (`ResolvedBuildUnit` would be a better name to distinguish it from the loaded, but unresolved `BuildUnit`.) -* @param unit The loaded, but unresolved [[BuildUnit]] this was resolved from. -* @param defined The definitive map from project IDs to resolved projects. -* These projects have had [[Reference]]s resolved and [[AutoPlugin]]s evaluated. -* @param rootProjects The list of project IDs for the projects considered roots of this build. -* The first root project is used as the default in several situations where a project is not otherwise selected. -*/ -final class LoadedBuildUnit(val unit: BuildUnit, val defined: Map[String, ResolvedProject], val rootProjects: Seq[String], val buildSettings: Seq[Setting[_]]) extends BuildUnitBase -{ - assert(!rootProjects.isEmpty, "No root projects defined for build unit " + unit) - /** The project to use as the default when one is not otherwise selected. - * [[LocalRootProject]] resolves to this from within the same build.*/ - val root = rootProjects.head +/** + * A resolved build unit. (`ResolvedBuildUnit` would be a better name to distinguish it from the loaded, but unresolved `BuildUnit`.) + * @param unit The loaded, but unresolved [[BuildUnit]] this was resolved from. + * @param defined The definitive map from project IDs to resolved projects. + * These projects have had [[Reference]]s resolved and [[AutoPlugin]]s evaluated. + * @param rootProjects The list of project IDs for the projects considered roots of this build. + * The first root project is used as the default in several situations where a project is not otherwise selected. + */ +final class LoadedBuildUnit(val unit: BuildUnit, val defined: Map[String, ResolvedProject], val rootProjects: Seq[String], val buildSettings: Seq[Setting[_]]) extends BuildUnitBase { + assert(!rootProjects.isEmpty, "No root projects defined for build unit " + unit) + /** + * The project to use as the default when one is not otherwise selected. + * [[LocalRootProject]] resolves to this from within the same build. + */ + val root = rootProjects.head - /** The base directory of the build unit (not the build definition).*/ - def localBase = unit.localBase + /** The base directory of the build unit (not the build definition).*/ + def localBase = unit.localBase - /** The classpath to use when compiling against this build unit's publicly visible code. - * It includes build definition and plugin classes, but not classes for .sbt file statements and expressions. */ - def classpath: Seq[File] = unit.definitions.target ++ unit.plugins.classpath + /** + * The classpath to use when compiling against this build unit's publicly visible code. + * It includes build definition and plugin classes, but not classes for .sbt file statements and expressions. + */ + def classpath: Seq[File] = unit.definitions.target ++ unit.plugins.classpath - /** The class loader to use for this build unit's publicly visible code. - * It includes build definition and plugin classes, but not classes for .sbt file statements and expressions. */ - def loader = unit.definitions.loader + /** + * The class loader to use for this build unit's publicly visible code. + * It includes build definition and plugin classes, but not classes for .sbt file statements and expressions. + */ + def loader = unit.definitions.loader - /** The imports to use for .sbt files, `consoleProject` and other contexts that use code from the build definition. */ - def imports = BuildUtil.getImports(unit) - override def toString = unit.toString + /** The imports to use for .sbt files, `consoleProject` and other contexts that use code from the build definition. */ + def imports = BuildUtil.getImports(unit) + override def toString = unit.toString } // TODO: figure out how to deprecate and drop buildNames -/** The built and loaded build definition, including loaded but unresolved [[Project]]s, for a build unit (for a single URI). -* -* @param base The base directory of the build definition, typically `/project/`. -* @param loader The ClassLoader containing all classes and plugins for the build definition project. -* Note that this does not include classes for .sbt files. -* @param builds The list of [[Build]]s for the build unit. -* In addition to auto-discovered [[Build]]s, this includes any auto-generated default [[Build]]s. -* @param projects The list of all [[Project]]s from all [[Build]]s. -* These projects have not yet been resolved, but they have had auto-plugins applied. -* In particular, each [[Project]]'s `autoPlugins` field is populated according to their configured `plugins` -* and their `settings` and `configurations` updated as appropriate. -* @param buildNames No longer used and will be deprecated once feasible. -*/ +/** + * The built and loaded build definition, including loaded but unresolved [[Project]]s, for a build unit (for a single URI). + * + * @param base The base directory of the build definition, typically `/project/`. + * @param loader The ClassLoader containing all classes and plugins for the build definition project. + * Note that this does not include classes for .sbt files. + * @param builds The list of [[Build]]s for the build unit. + * In addition to auto-discovered [[Build]]s, this includes any auto-generated default [[Build]]s. + * @param projects The list of all [[Project]]s from all [[Build]]s. + * These projects have not yet been resolved, but they have had auto-plugins applied. + * In particular, each [[Project]]'s `autoPlugins` field is populated according to their configured `plugins` + * and their `settings` and `configurations` updated as appropriate. + * @param buildNames No longer used and will be deprecated once feasible. + */ final class LoadedDefinitions(val base: File, val target: Seq[File], val loader: ClassLoader, val builds: Seq[Build], val projects: Seq[Project], val buildNames: Seq[String]) /** Auto-detected top-level modules (as in `object X`) of type `T` paired with their source names. */ -final class DetectedModules[T](val modules: Seq[(String, T)]) -{ - /** The source names of the modules. This is "X" in `object X`, as opposed to the implementation class name "X$". - * The names are returned in a stable order such that `names zip values` pairs a name with the actual module. */ - def names: Seq[String] = modules.map(_._1) +final class DetectedModules[T](val modules: Seq[(String, T)]) { + /** + * The source names of the modules. This is "X" in `object X`, as opposed to the implementation class name "X$". + * The names are returned in a stable order such that `names zip values` pairs a name with the actual module. + */ + def names: Seq[String] = modules.map(_._1) - /** The singleton value of the module. - * The values are returned in a stable order such that `names zip values` pairs a name with the actual module. */ - def values: Seq[T] = modules.map(_._2) + /** + * The singleton value of the module. + * The values are returned in a stable order such that `names zip values` pairs a name with the actual module. + */ + def values: Seq[T] = modules.map(_._2) } /** Auto-detected auto plugin. */ case class DetectedAutoPlugin(val name: String, val value: AutoPlugin, val hasAutoImport: Boolean) -/** Auto-discovered modules for the build definition project. These include modules defined in build definition sources -* as well as modules in binary dependencies. -* -* @param builds The [[Build]]s detected in the build definition. This does not include the default [[Build]] that sbt creates if none is defined. -*/ -final class DetectedPlugins(val plugins: DetectedModules[Plugin], val autoPlugins: Seq[DetectedAutoPlugin], val builds: DetectedModules[Build]) -{ - /** Sequence of import expressions for the build definition. This includes the names of the [[Plugin]], [[Build]], and [[AutoImport]] modules, but not the [[AutoPlugin]] modules. */ - lazy val imports: Seq[String] = BuildUtil.getImports(plugins.names ++ builds.names ++ - (autoPlugins flatMap { case DetectedAutoPlugin(name, ap, hasAutoImport) => - if (hasAutoImport) Some(name + ".autoImport") - else None - })) ++ - BuildUtil.importNamesRoot(autoPlugins map { _.name }) +/** + * Auto-discovered modules for the build definition project. These include modules defined in build definition sources + * as well as modules in binary dependencies. + * + * @param builds The [[Build]]s detected in the build definition. This does not include the default [[Build]] that sbt creates if none is defined. + */ +final class DetectedPlugins(val plugins: DetectedModules[Plugin], val autoPlugins: Seq[DetectedAutoPlugin], val builds: DetectedModules[Build]) { + /** Sequence of import expressions for the build definition. This includes the names of the [[Plugin]], [[Build]], and [[AutoImport]] modules, but not the [[AutoPlugin]] modules. */ + lazy val imports: Seq[String] = BuildUtil.getImports(plugins.names ++ builds.names ++ + (autoPlugins flatMap { + case DetectedAutoPlugin(name, ap, hasAutoImport) => + if (hasAutoImport) Some(name + ".autoImport") + else None + })) ++ + BuildUtil.importNamesRoot(autoPlugins map { _.name }) - /** A function to select the right [[AutoPlugin]]s from [[autoPlugins]] for a [[Project]]. */ - lazy val deducePlugins: (Plugins, Logger) => Seq[AutoPlugin] = Plugins.deducer(autoPlugins.toList map {_.value}) + /** A function to select the right [[AutoPlugin]]s from [[autoPlugins]] for a [[Project]]. */ + lazy val deducePlugins: (Plugins, Logger) => Seq[AutoPlugin] = Plugins.deducer(autoPlugins.toList map { _.value }) } -/** The built and loaded build definition project. -* @param base The base directory for the build definition project (not the base of the project itself). -* @param pluginData Evaluated tasks/settings from the build definition for later use. -* This is necessary because the build definition project is discarded. -* @param loader The class loader for the build definition project, notably excluding classes used for .sbt files. -* @param detected Auto-detected modules in the build definition. -*/ -final class LoadedPlugins(val base: File, val pluginData: PluginData, val loader: ClassLoader, val detected: DetectedPlugins) -{ - @deprecated("Use the primary constructor.", "0.13.2") - def this(base: File, pluginData: PluginData, loader: ClassLoader, plugins: Seq[Plugin], pluginNames: Seq[String]) = - this(base, pluginData, loader, - new DetectedPlugins(new DetectedModules(pluginNames zip plugins), Nil, new DetectedModules(Nil)) - ) +/** + * The built and loaded build definition project. + * @param base The base directory for the build definition project (not the base of the project itself). + * @param pluginData Evaluated tasks/settings from the build definition for later use. + * This is necessary because the build definition project is discarded. + * @param loader The class loader for the build definition project, notably excluding classes used for .sbt files. + * @param detected Auto-detected modules in the build definition. + */ +final class LoadedPlugins(val base: File, val pluginData: PluginData, val loader: ClassLoader, val detected: DetectedPlugins) { + @deprecated("Use the primary constructor.", "0.13.2") + def this(base: File, pluginData: PluginData, loader: ClassLoader, plugins: Seq[Plugin], pluginNames: Seq[String]) = + this(base, pluginData, loader, + new DetectedPlugins(new DetectedModules(pluginNames zip plugins), Nil, new DetectedModules(Nil)) + ) - @deprecated("Use detected.plugins.values.", "0.13.2") - val plugins: Seq[Plugin] = detected.plugins.values - @deprecated("Use detected.plugins.names.", "0.13.2") - val pluginNames: Seq[String] = detected.plugins.names + @deprecated("Use detected.plugins.values.", "0.13.2") + val plugins: Seq[Plugin] = detected.plugins.values + @deprecated("Use detected.plugins.names.", "0.13.2") + val pluginNames: Seq[String] = detected.plugins.names - def fullClasspath: Seq[Attributed[File]] = pluginData.classpath - def classpath = data(fullClasspath) + def fullClasspath: Seq[Attributed[File]] = pluginData.classpath + def classpath = data(fullClasspath) } -/** The loaded, but unresolved build unit. -* @param uri The uniquely identifying URI for the build. -* @param localBase The working location of the build on the filesystem. -* For local URIs, this is the same as `uri`, but for remote URIs, this is the local copy or workspace allocated for the build. -*/ -final class BuildUnit(val uri: URI, val localBase: File, val definitions: LoadedDefinitions, val plugins: LoadedPlugins) -{ - override def toString = if(uri.getScheme == "file") localBase.toString else (uri + " (locally: " + localBase +")") +/** + * The loaded, but unresolved build unit. + * @param uri The uniquely identifying URI for the build. + * @param localBase The working location of the build on the filesystem. + * For local URIs, this is the same as `uri`, but for remote URIs, this is the local copy or workspace allocated for the build. + */ +final class BuildUnit(val uri: URI, val localBase: File, val definitions: LoadedDefinitions, val plugins: LoadedPlugins) { + override def toString = if (uri.getScheme == "file") localBase.toString else (uri + " (locally: " + localBase + ")") } -final class LoadedBuild(val root: URI, val units: Map[URI, LoadedBuildUnit]) -{ - BuildUtil.checkCycles(units) - def allProjectRefs: Seq[(ProjectRef, ResolvedProject)] = for( (uri, unit) <- units.toSeq; (id, proj) <- unit.defined ) yield ProjectRef(uri, id) -> proj - def extra(data: Settings[Scope])(keyIndex: KeyIndex): BuildUtil[ResolvedProject] = BuildUtil(root, units, keyIndex, data) +final class LoadedBuild(val root: URI, val units: Map[URI, LoadedBuildUnit]) { + BuildUtil.checkCycles(units) + def allProjectRefs: Seq[(ProjectRef, ResolvedProject)] = for ((uri, unit) <- units.toSeq; (id, proj) <- unit.defined) yield ProjectRef(uri, id) -> proj + def extra(data: Settings[Scope])(keyIndex: KeyIndex): BuildUtil[ResolvedProject] = BuildUtil(root, units, keyIndex, data) - private[sbt] def autos = GroupedAutoPlugins(units) + private[sbt] def autos = GroupedAutoPlugins(units) } final class PartBuild(val root: URI, val units: Map[URI, PartBuildUnit]) sealed trait BuildUnitBase { def rootProjects: Seq[String]; def buildSettings: Seq[Setting[_]] } -final class PartBuildUnit(val unit: BuildUnit, val defined: Map[String, Project], val rootProjects: Seq[String], val buildSettings: Seq[Setting[_]]) extends BuildUnitBase -{ - def resolve(f: Project => ResolvedProject): LoadedBuildUnit = new LoadedBuildUnit(unit, defined mapValues f toMap, rootProjects, buildSettings) - def resolveRefs(f: ProjectReference => ProjectRef): LoadedBuildUnit = resolve(_ resolve f) +final class PartBuildUnit(val unit: BuildUnit, val defined: Map[String, Project], val rootProjects: Seq[String], val buildSettings: Seq[Setting[_]]) extends BuildUnitBase { + def resolve(f: Project => ResolvedProject): LoadedBuildUnit = new LoadedBuildUnit(unit, defined mapValues f toMap, rootProjects, buildSettings) + def resolveRefs(f: ProjectReference => ProjectRef): LoadedBuildUnit = resolve(_ resolve f) } -object BuildStreams -{ - type Streams = std.Streams[ScopedKey[_]] +object BuildStreams { + type Streams = std.Streams[ScopedKey[_]] - final val GlobalPath = "$global" - final val BuildUnitPath = "$build" - final val StreamsDirectory = "streams" + final val GlobalPath = "$global" + final val BuildUnitPath = "$build" + final val StreamsDirectory = "streams" - def mkStreams(units: Map[URI, LoadedBuildUnit], root: URI, data: Settings[Scope]): State => Streams = s => - s get Keys.stateStreams getOrElse std.Streams( path(units, root, data), displayFull, LogManager.construct(data, s) ) + def mkStreams(units: Map[URI, LoadedBuildUnit], root: URI, data: Settings[Scope]): State => Streams = s => + s get Keys.stateStreams getOrElse std.Streams(path(units, root, data), displayFull, LogManager.construct(data, s)) - def path(units: Map[URI, LoadedBuildUnit], root: URI, data: Settings[Scope])(scoped: ScopedKey[_]): File = - resolvePath( projectPath(units, root, scoped, data), nonProjectPath(scoped) ) + def path(units: Map[URI, LoadedBuildUnit], root: URI, data: Settings[Scope])(scoped: ScopedKey[_]): File = + resolvePath(projectPath(units, root, scoped, data), nonProjectPath(scoped)) - def resolvePath(base: File, components: Seq[String]): File = - (base /: components)( (b,p) => new File(b,p) ) + def resolvePath(base: File, components: Seq[String]): File = + (base /: components)((b, p) => new File(b, p)) - def pathComponent[T](axis: ScopeAxis[T], scoped: ScopedKey[_], label: String)(show: T => String): String = - axis match - { - case Global => GlobalPath - case This => sys.error("Unresolved This reference for " + label + " in " + displayFull(scoped)) - case Select(t) => show(t) - } - def nonProjectPath[T](scoped: ScopedKey[T]): Seq[String] = - { - val scope = scoped.scope - pathComponent(scope.config, scoped, "config")(_.name) :: - pathComponent(scope.task, scoped, "task")(_.label) :: - pathComponent(scope.extra, scoped, "extra")(showAMap) :: - scoped.key.label :: - Nil - } - def showAMap(a: AttributeMap): String = - a.entries.toSeq.sortBy(_.key.label).map { case AttributeEntry(key, value) => key.label + "=" + value.toString } mkString(" ") - def projectPath(units: Map[URI, LoadedBuildUnit], root: URI, scoped: ScopedKey[_], data: Settings[Scope]): File = - scoped.scope.project match - { - case Global => refTarget(GlobalScope, units(root).localBase, data) / GlobalPath - case Select(br @ BuildRef(uri)) => refTarget(br, units(uri).localBase, data) / BuildUnitPath - case Select(pr @ ProjectRef(uri, id)) => refTarget(pr, units(uri).defined(id).base, data) - case Select(pr) => sys.error("Unresolved project reference (" + pr + ") in " + displayFull(scoped)) - case This => sys.error("Unresolved project reference (This) in " + displayFull(scoped)) - } + def pathComponent[T](axis: ScopeAxis[T], scoped: ScopedKey[_], label: String)(show: T => String): String = + axis match { + case Global => GlobalPath + case This => sys.error("Unresolved This reference for " + label + " in " + displayFull(scoped)) + case Select(t) => show(t) + } + def nonProjectPath[T](scoped: ScopedKey[T]): Seq[String] = + { + val scope = scoped.scope + pathComponent(scope.config, scoped, "config")(_.name) :: + pathComponent(scope.task, scoped, "task")(_.label) :: + pathComponent(scope.extra, scoped, "extra")(showAMap) :: + scoped.key.label :: + Nil + } + def showAMap(a: AttributeMap): String = + a.entries.toSeq.sortBy(_.key.label).map { case AttributeEntry(key, value) => key.label + "=" + value.toString } mkString (" ") + def projectPath(units: Map[URI, LoadedBuildUnit], root: URI, scoped: ScopedKey[_], data: Settings[Scope]): File = + scoped.scope.project match { + case Global => refTarget(GlobalScope, units(root).localBase, data) / GlobalPath + case Select(br @ BuildRef(uri)) => refTarget(br, units(uri).localBase, data) / BuildUnitPath + case Select(pr @ ProjectRef(uri, id)) => refTarget(pr, units(uri).defined(id).base, data) + case Select(pr) => sys.error("Unresolved project reference (" + pr + ") in " + displayFull(scoped)) + case This => sys.error("Unresolved project reference (This) in " + displayFull(scoped)) + } - def refTarget(ref: ResolvedReference, fallbackBase: File, data: Settings[Scope]): File = - refTarget(GlobalScope.copy(project = Select(ref)), fallbackBase, data) - def refTarget(scope: Scope, fallbackBase: File, data: Settings[Scope]): File = - (Keys.target in scope get data getOrElse outputDirectory(fallbackBase).asFile ) / StreamsDirectory + def refTarget(ref: ResolvedReference, fallbackBase: File, data: Settings[Scope]): File = + refTarget(GlobalScope.copy(project = Select(ref)), fallbackBase, data) + def refTarget(scope: Scope, fallbackBase: File, data: Settings[Scope]): File = + (Keys.target in scope get data getOrElse outputDirectory(fallbackBase).asFile) / StreamsDirectory } \ No newline at end of file diff --git a/main/src/main/scala/sbt/BuildUtil.scala b/main/src/main/scala/sbt/BuildUtil.scala index db0d31f8e..c4068c0f8 100644 --- a/main/src/main/scala/sbt/BuildUtil.scala +++ b/main/src/main/scala/sbt/BuildUtil.scala @@ -1,108 +1,102 @@ package sbt - import java.net.URI +import java.net.URI final class BuildUtil[Proj]( - val keyIndex: KeyIndex, - val data: Settings[Scope], - val root: URI, - val rootProjectID: URI => String, - val project: (URI, String) => Proj, - val configurations: Proj => Seq[ConfigKey], - val aggregates: Relation[ProjectRef, ProjectRef] -) -{ - def rootProject(uri: URI): Proj = - project(uri, rootProjectID(uri)) + val keyIndex: KeyIndex, + val data: Settings[Scope], + val root: URI, + val rootProjectID: URI => String, + val project: (URI, String) => Proj, + val configurations: Proj => Seq[ConfigKey], + val aggregates: Relation[ProjectRef, ProjectRef]) { + def rootProject(uri: URI): Proj = + project(uri, rootProjectID(uri)) - def resolveRef(ref: Reference): ResolvedReference = - Scope.resolveReference(root, rootProjectID, ref) + def resolveRef(ref: Reference): ResolvedReference = + Scope.resolveReference(root, rootProjectID, ref) - def projectFor(ref: ResolvedReference): Proj = ref match { - case ProjectRef(uri, id) => project(uri, id) - case BuildRef(uri) => rootProject(uri) - } - def projectRefFor(ref: ResolvedReference): ProjectRef = ref match { - case p: ProjectRef => p - case BuildRef(uri) => ProjectRef(uri, rootProjectID(uri)) - } - def projectForAxis(ref: Option[ResolvedReference]): Proj = ref match { - case Some(ref) => projectFor(ref) - case None => rootProject(root) - } - def exactProject(refOpt: Option[Reference]): Option[Proj] = refOpt map resolveRef flatMap { - case ProjectRef(uri, id) => Some(project(uri, id)) - case _ => None - } + def projectFor(ref: ResolvedReference): Proj = ref match { + case ProjectRef(uri, id) => project(uri, id) + case BuildRef(uri) => rootProject(uri) + } + def projectRefFor(ref: ResolvedReference): ProjectRef = ref match { + case p: ProjectRef => p + case BuildRef(uri) => ProjectRef(uri, rootProjectID(uri)) + } + def projectForAxis(ref: Option[ResolvedReference]): Proj = ref match { + case Some(ref) => projectFor(ref) + case None => rootProject(root) + } + def exactProject(refOpt: Option[Reference]): Option[Proj] = refOpt map resolveRef flatMap { + case ProjectRef(uri, id) => Some(project(uri, id)) + case _ => None + } - val configurationsForAxis: Option[ResolvedReference] => Seq[String] = - refOpt => configurations(projectForAxis(refOpt)).map(_.name) + val configurationsForAxis: Option[ResolvedReference] => Seq[String] = + refOpt => configurations(projectForAxis(refOpt)).map(_.name) } -object BuildUtil -{ - def apply(root: URI, units: Map[URI, LoadedBuildUnit], keyIndex: KeyIndex, data: Settings[Scope]): BuildUtil[ResolvedProject] = - { - val getp = (build: URI, project: String) => Load.getProject(units, build, project) - val configs = (_: ResolvedProject).configurations.map(c => ConfigKey(c.name)) - val aggregates = aggregationRelation(units) - new BuildUtil(keyIndex, data, root, Load getRootProject units, getp, configs, aggregates) - } +object BuildUtil { + def apply(root: URI, units: Map[URI, LoadedBuildUnit], keyIndex: KeyIndex, data: Settings[Scope]): BuildUtil[ResolvedProject] = + { + val getp = (build: URI, project: String) => Load.getProject(units, build, project) + val configs = (_: ResolvedProject).configurations.map(c => ConfigKey(c.name)) + val aggregates = aggregationRelation(units) + new BuildUtil(keyIndex, data, root, Load getRootProject units, getp, configs, aggregates) + } - def dependencies(units: Map[URI, LoadedBuildUnit]): BuildDependencies = - { - import collection.mutable.HashMap - val agg = new HashMap[ProjectRef, Seq[ProjectRef]] - val cp = new HashMap[ProjectRef, Seq[ClasspathDep[ProjectRef]]] - for(lbu <- units.values; rp <- lbu.defined.values) - { - val ref = ProjectRef(lbu.unit.uri, rp.id) - cp(ref) = rp.dependencies - agg(ref) = rp.aggregate - } - BuildDependencies(cp.toMap, agg.toMap) - } + def dependencies(units: Map[URI, LoadedBuildUnit]): BuildDependencies = + { + import collection.mutable.HashMap + val agg = new HashMap[ProjectRef, Seq[ProjectRef]] + val cp = new HashMap[ProjectRef, Seq[ClasspathDep[ProjectRef]]] + for (lbu <- units.values; rp <- lbu.defined.values) { + val ref = ProjectRef(lbu.unit.uri, rp.id) + cp(ref) = rp.dependencies + agg(ref) = rp.aggregate + } + BuildDependencies(cp.toMap, agg.toMap) + } - def checkCycles(units: Map[URI, LoadedBuildUnit]) - { - def getRef(pref: ProjectRef) = units(pref.build).defined(pref.project) - def deps(proj: ResolvedProject)(base: ResolvedProject => Seq[ProjectRef]): Seq[ResolvedProject] = Dag.topologicalSort(proj)(p => base(p) map getRef) - // check for cycles - for( (_, lbu) <- units; proj <- lbu.defined.values) { - deps(proj)(_.dependencies.map(_.project)) - deps(proj)(_.delegates) - deps(proj)(_.aggregate) - } - } - def baseImports: Seq[String] = "import sbt._, Keys._" :: Nil + def checkCycles(units: Map[URI, LoadedBuildUnit]) { + def getRef(pref: ProjectRef) = units(pref.build).defined(pref.project) + def deps(proj: ResolvedProject)(base: ResolvedProject => Seq[ProjectRef]): Seq[ResolvedProject] = Dag.topologicalSort(proj)(p => base(p) map getRef) + // check for cycles + for ((_, lbu) <- units; proj <- lbu.defined.values) { + deps(proj)(_.dependencies.map(_.project)) + deps(proj)(_.delegates) + deps(proj)(_.aggregate) + } + } + def baseImports: Seq[String] = "import sbt._, Keys._" :: Nil - def getImports(unit: BuildUnit): Seq[String] = unit.plugins.detected.imports + def getImports(unit: BuildUnit): Seq[String] = unit.plugins.detected.imports - @deprecated("Use getImports(Seq[String]).", "0.13.2") - def getImports(pluginNames: Seq[String], buildNames: Seq[String]): Seq[String] = getImports(pluginNames ++ buildNames) + @deprecated("Use getImports(Seq[String]).", "0.13.2") + def getImports(pluginNames: Seq[String], buildNames: Seq[String]): Seq[String] = getImports(pluginNames ++ buildNames) - /** `import sbt._, Keys._`, and wildcard import `._` for all names. */ - def getImports(names: Seq[String]): Seq[String] = baseImports ++ importAllRoot(names) + /** `import sbt._, Keys._`, and wildcard import `._` for all names. */ + def getImports(names: Seq[String]): Seq[String] = baseImports ++ importAllRoot(names) - /** Import just the names. */ - def importNames(names: Seq[String]): Seq[String] = if (names.isEmpty) Nil else names.mkString("import ", ", ", "") :: Nil - /** Prepend `_root_` and import just the names. */ - def importNamesRoot(names: Seq[String]): Seq[String] = importNames(names map rootedName) + /** Import just the names. */ + def importNames(names: Seq[String]): Seq[String] = if (names.isEmpty) Nil else names.mkString("import ", ", ", "") :: Nil + /** Prepend `_root_` and import just the names. */ + def importNamesRoot(names: Seq[String]): Seq[String] = importNames(names map rootedName) - /** Wildcard import `._` for all values. */ - def importAll(values: Seq[String]): Seq[String] = importNames(values map { _ + "._" }) - def importAllRoot(values: Seq[String]): Seq[String] = importAll(values map rootedName) - def rootedName(s: String): String = if(s contains '.') "_root_." + s else s + /** Wildcard import `._` for all values. */ + def importAll(values: Seq[String]): Seq[String] = importNames(values map { _ + "._" }) + def importAllRoot(values: Seq[String]): Seq[String] = importAll(values map rootedName) + def rootedName(s: String): String = if (s contains '.') "_root_." + s else s - def aggregationRelation(units: Map[URI, LoadedBuildUnit]): Relation[ProjectRef, ProjectRef] = - { - val depPairs = - for { - (uri, unit) <- units.toIterable - project <- unit.defined.values - ref = ProjectRef(uri, project.id) - agg <- project.aggregate - } yield - (ref, agg) - Relation.empty ++ depPairs - } + def aggregationRelation(units: Map[URI, LoadedBuildUnit]): Relation[ProjectRef, ProjectRef] = + { + val depPairs = + for { + (uri, unit) <- units.toIterable + project <- unit.defined.values + ref = ProjectRef(uri, project.id) + agg <- project.aggregate + } yield (ref, agg) + Relation.empty ++ depPairs + } } diff --git a/main/src/main/scala/sbt/CommandStrings.scala b/main/src/main/scala/sbt/CommandStrings.scala index 447d2a883..e721b8089 100644 --- a/main/src/main/scala/sbt/CommandStrings.scala +++ b/main/src/main/scala/sbt/CommandStrings.scala @@ -3,45 +3,43 @@ */ package sbt -object CommandStrings -{ - /** The prefix used to identify a request to execute the remaining input on source changes.*/ - val AboutCommand = "about" - val TasksCommand = "tasks" - val SettingsCommand = "settings" - val ProjectCommand = "project" - val ProjectsCommand = "projects" - val ShowCommand = "show" - val MultiTaskCommand = "all" - val BootCommand = "boot" +object CommandStrings { + /** The prefix used to identify a request to execute the remaining input on source changes.*/ + val AboutCommand = "about" + val TasksCommand = "tasks" + val SettingsCommand = "settings" + val ProjectCommand = "project" + val ProjectsCommand = "projects" + val ShowCommand = "show" + val MultiTaskCommand = "all" + val BootCommand = "boot" - val EvalCommand = "eval" - val evalBrief = (EvalCommand + " ", "Evaluates a Scala expression and prints the result and type.") - val evalDetailed = -EvalCommand + """ + val EvalCommand = "eval" + val evalBrief = (EvalCommand + " ", "Evaluates a Scala expression and prints the result and type.") + val evalDetailed = + EvalCommand + """ Evaluates the given Scala expression and prints the result and type.""" - @deprecated("Misnomer: was only for `show`. Use showBrief.", "0.13.2") - def actBrief = showBrief - @deprecated("Misnomer: was only for `show`. Use showDetailed.", "0.13.2") - def actDetailed = showDetailed + @deprecated("Misnomer: was only for `show`. Use showBrief.", "0.13.2") + def actBrief = showBrief + @deprecated("Misnomer: was only for `show`. Use showDetailed.", "0.13.2") + def actDetailed = showDetailed - def actHelp = showHelp ++ multiTaskHelp + def actHelp = showHelp ++ multiTaskHelp - def multiTaskHelp = Help(MultiTaskCommand, (multiTaskSyntax, multiTaskBrief), multiTaskDetailed) - def multiTaskDetailed = -s"""$multiTaskSyntax + def multiTaskHelp = Help(MultiTaskCommand, (multiTaskSyntax, multiTaskBrief), multiTaskDetailed) + def multiTaskDetailed = + s"""$multiTaskSyntax $multiTaskBrief""" - def multiTaskSyntax = s"""$MultiTaskCommand +""" - def multiTaskBrief = """Executes all of the specified tasks concurrently.""" + def multiTaskSyntax = s"""$MultiTaskCommand +""" + def multiTaskBrief = """Executes all of the specified tasks concurrently.""" - - def showHelp = Help(ShowCommand, (s"$ShowCommand ", showBrief), showDetailed) - def showBrief = "Displays the result of evaluating the setting or task associated with 'key'." - def showDetailed = -s"""$ShowCommand + def showHelp = Help(ShowCommand, (s"$ShowCommand ", showBrief), showDetailed) + def showBrief = "Displays the result of evaluating the setting or task associated with 'key'." + def showDetailed = + s"""$ShowCommand Displays the value of the specified setting. @@ -49,19 +47,19 @@ $ShowCommand Evaluates the specified task and display the value returned by the task.""" - val PluginsCommand = "plugins" - val PluginCommand = "plugin" - def pluginsBrief = "Lists currently available plugins." - def pluginsDetailed = pluginsBrief // TODO: expand + val PluginsCommand = "plugins" + val PluginCommand = "plugin" + def pluginsBrief = "Lists currently available plugins." + def pluginsDetailed = pluginsBrief // TODO: expand - val LastCommand = "last" - val LastGrepCommand = "last-grep" - val ExportCommand = "export" - val ExportStream = "export" + val LastCommand = "last" + val LastGrepCommand = "last-grep" + val ExportCommand = "export" + val ExportStream = "export" - val lastGrepBrief = (LastGrepCommand, "Shows lines from the last output for 'key' that match 'pattern'.") - val lastGrepDetailed = -LastGrepCommand + """ + val lastGrepBrief = (LastGrepCommand, "Shows lines from the last output for 'key' that match 'pattern'.") + val lastGrepDetailed = + LastGrepCommand + """ Displays lines from the logging of previous commands that match `pattern`. """ + LastGrepCommand + """ [key] @@ -70,9 +68,9 @@ LastGrepCommand + """ is a regular expression interpreted by java.util.Pattern. Matching text is highlighted (when highlighting is supported and enabled). See also '""" + LastCommand + "'." - val lastBrief = (LastCommand, "Displays output from a previous command or the output from a specific task.") - val lastDetailed = -LastCommand + """ + val lastBrief = (LastCommand, "Displays output from a previous command or the output from a specific task.") + val lastDetailed = + LastCommand + """ Prints the logging for the previous command, typically at a more verbose level. """ + LastCommand + """ @@ -80,9 +78,9 @@ LastCommand + """ See also '""" + LastGrepCommand + "'." - val exportBrief = (ExportCommand + " +", "Executes tasks and displays the equivalent command lines.") - val exportDetailed = -s"""$ExportCommand [--last] + + val exportBrief = (ExportCommand + " +", "Executes tasks and displays the equivalent command lines.") + val exportDetailed = + s"""$ExportCommand [--last] + Runs the specified tasks and prints the equivalent command lines or other exportable information for those runs. --last @@ -95,10 +93,10 @@ s"""$ExportCommand [--last] + equivalent and will show nothing at all. """ - val InspectCommand = "inspect" - val inspectBrief = (InspectCommand + " [uses|tree|definitions] ", "Prints the value for 'key', the defining scope, delegates, related definitions, and dependencies.") - val inspectDetailed = -InspectCommand + """ + val InspectCommand = "inspect" + val inspectBrief = (InspectCommand + " [uses|tree|definitions] ", "Prints the value for 'key', the defining scope, delegates, related definitions, and dependencies.") + val inspectDetailed = + InspectCommand + """ For a plain setting, the value bound to the key argument is displayed using its toString method. Otherwise, the type of task ("Task" or "Input task") is displayed. @@ -115,27 +113,26 @@ InspectCommand + """ "Related" shows all of the scopes in which the key is defined. """ + -InspectCommand + """ tree + InspectCommand + """ tree Displays `key` and its dependencies in a tree structure. For settings, the value bound to the setting is displayed and for tasks, the type of the task is shown. """ + -InspectCommand + """ uses + InspectCommand + """ uses Displays the settings and tasks that directly depend on `key`. """ + -InspectCommand + """ definitions + InspectCommand + """ definitions Displays the scopes in which `key` is defined. """ - - val SetCommand = "set" - val setBrief = (s"$SetCommand [every] ", "Evaluates a Setting and applies it to the current project.") - val setDetailed = -SetCommand + """ [every] + val SetCommand = "set" + val setBrief = (s"$SetCommand [every] ", "Evaluates a Setting and applies it to the current project.") + val setDetailed = + SetCommand + """ [every] Applies the given setting to the current project: 1) Constructs the expression provided as an argument by compiling and loading it. @@ -151,20 +148,20 @@ SetCommand + """ [every] bound to the key everywhere. """ - def SessionCommand = "session" - def sessionBrief = (SessionCommand, "Manipulates session settings. For details, run 'help " + SessionCommand + "'.") + def SessionCommand = "session" + def sessionBrief = (SessionCommand, "Manipulates session settings. For details, run 'help " + SessionCommand + "'.") - def settingsPreamble = commonPreamble("settings") - def tasksPreamble = commonPreamble("tasks") + """ + def settingsPreamble = commonPreamble("settings") + def tasksPreamble = commonPreamble("tasks") + """ Tasks produce values. Use the 'show' command to run the task and print the resulting value.""" - def commonPreamble(label: String) = """ + def commonPreamble(label: String) = """ This is a list of %s defined for the current project. It does not list the scopes the %] @@ -181,16 +178,16 @@ Syntax summary Restricts the %+ @@ -236,24 +233,24 @@ ProjectsCommand + """ Builds explicitly listed in the build definition are not affected by this command. """ - def sbtrc = ".sbtrc" + def sbtrc = ".sbtrc" - def DefaultsCommand = "add-default-commands" - def DefaultsBrief = (DefaultsCommand, DefaultsDetailed) - def DefaultsDetailed = "Registers default built-in commands" + def DefaultsCommand = "add-default-commands" + def DefaultsBrief = (DefaultsCommand, DefaultsDetailed) + def DefaultsDetailed = "Registers default built-in commands" - def Load = "load" - def LoadLabel = "a project" - def LoadCommand = "load-commands" - def LoadCommandLabel = "commands" + def Load = "load" + def LoadLabel = "a project" + def LoadCommand = "load-commands" + def LoadCommandLabel = "commands" - def LoadFailed = "load-failed" + def LoadFailed = "load-failed" - def LoadProjectImpl = "loadp" - def LoadProject = "reload" - def LoadProjectBrief = (LoadProject, LoadProjectDetailed) - def LoadProjectDetailed = LoadProject + -s""" + def LoadProjectImpl = "loadp" + def LoadProject = "reload" + def LoadProjectBrief = (LoadProject, LoadProjectDetailed) + def LoadProjectDetailed = LoadProject + + s""" \t(Re)loads the project in the current directory. @@ -265,10 +262,10 @@ $LoadProject return \t(Re)loads the root project (and leaves the plugins project).""" - def InitCommand = "initialize" - def InitBrief = (InitCommand, "Initializes command processing.") - def InitDetailed = -InitCommand + """ + def InitCommand = "initialize" + def InitBrief = (InitCommand, "Initializes command processing.") + def InitDetailed = + InitCommand + """ Initializes command processing. Runs the following commands. @@ -284,22 +281,22 @@ load-commands -base ~/.sbt/commands Runs commands from ~/.sbtrc and ./.sbtrc if they exist """ - import java.io.File - import Path._ + import java.io.File + import Path._ - def sbtRCs(s: State): Seq[File] = - (Path.userHome / sbtrc) :: - (s.baseDir / sbtrc asFile) :: - Nil + def sbtRCs(s: State): Seq[File] = + (Path.userHome / sbtrc) :: + (s.baseDir / sbtrc asFile) :: + Nil - val CrossCommand = "+" - val SwitchCommand = "++" + val CrossCommand = "+" + val SwitchCommand = "++" - def crossHelp: Help = Help.more(CrossCommand, CrossDetailed) - def switchHelp: Help = Help.more(SwitchCommand, SwitchDetailed) + def crossHelp: Help = Help.more(CrossCommand, CrossDetailed) + def switchHelp: Help = Help.more(SwitchCommand, SwitchDetailed) - def CrossDetailed = -s"""$CrossCommand + def CrossDetailed = + s"""$CrossCommand Runs for each Scala version specified for cross-building. For each string in `crossScalaVersions` in the current project, this command sets the @@ -310,8 +307,8 @@ s"""$CrossCommand See also `help $SwitchCommand` """ - def SwitchDetailed = -s"""$SwitchCommand [] + def SwitchDetailed = + s"""$SwitchCommand [] Changes the Scala version and runs a command. Sets the `scalaVersion` of all projects to and reloads the build. diff --git a/main/src/main/scala/sbt/ConsoleProject.scala b/main/src/main/scala/sbt/ConsoleProject.scala index a3bf84381..bdb844263 100644 --- a/main/src/main/scala/sbt/ConsoleProject.scala +++ b/main/src/main/scala/sbt/ConsoleProject.scala @@ -3,29 +3,26 @@ */ package sbt - import java.io.File +import java.io.File -object ConsoleProject -{ - def apply(state: State, extra: String, cleanupCommands: String = "", options: Seq[String] = Nil)(implicit log: Logger) - { - val extracted = Project extract state - val cpImports = new Imports(extracted, state) +object ConsoleProject { + def apply(state: State, extra: String, cleanupCommands: String = "", options: Seq[String] = Nil)(implicit log: Logger) { + val extracted = Project extract state + val cpImports = new Imports(extracted, state) - val bindings = ("currentState" -> state) :: ("extracted" -> extracted ) :: ("cpHelpers" -> cpImports) :: Nil - val unit = extracted.currentUnit - val compiler = Compiler.compilers(ClasspathOptions.repl)(state.configuration, log).scalac - val imports = BuildUtil.getImports(unit.unit) ++ BuildUtil.importAll(bindings.map(_._1)) - val importString = imports.mkString("", ";\n", ";\n\n") - val initCommands = importString + extra - (new Console(compiler))(unit.classpath, options, initCommands, cleanupCommands)(Some(unit.loader), bindings) - } - /** Conveniences for consoleProject that shouldn't normally be used for builds. */ - final class Imports private[sbt](extracted: Extracted, state: State) - { - import extracted._ - implicit def taskKeyEvaluate[T](t: TaskKey[T]): Evaluate[T] = new Evaluate(runTask(t, state)._2) - implicit def settingKeyEvaluate[T](s: SettingKey[T]): Evaluate[T] = new Evaluate(get(s)) - } - final class Evaluate[T] private[sbt](val eval: T) + val bindings = ("currentState" -> state) :: ("extracted" -> extracted) :: ("cpHelpers" -> cpImports) :: Nil + val unit = extracted.currentUnit + val compiler = Compiler.compilers(ClasspathOptions.repl)(state.configuration, log).scalac + val imports = BuildUtil.getImports(unit.unit) ++ BuildUtil.importAll(bindings.map(_._1)) + val importString = imports.mkString("", ";\n", ";\n\n") + val initCommands = importString + extra + (new Console(compiler))(unit.classpath, options, initCommands, cleanupCommands)(Some(unit.loader), bindings) + } + /** Conveniences for consoleProject that shouldn't normally be used for builds. */ + final class Imports private[sbt] (extracted: Extracted, state: State) { + import extracted._ + implicit def taskKeyEvaluate[T](t: TaskKey[T]): Evaluate[T] = new Evaluate(runTask(t, state)._2) + implicit def settingKeyEvaluate[T](s: SettingKey[T]): Evaluate[T] = new Evaluate(get(s)) + } + final class Evaluate[T] private[sbt] (val eval: T) } diff --git a/main/src/main/scala/sbt/Cross.scala b/main/src/main/scala/sbt/Cross.scala index c39019062..c6aafaa9d 100644 --- a/main/src/main/scala/sbt/Cross.scala +++ b/main/src/main/scala/sbt/Cross.scala @@ -3,97 +3,97 @@ */ package sbt - import Keys._ - import complete.{DefaultParsers, Parser} - import DefaultParsers._ - import Def.{ScopedKey, Setting} - import Scope.GlobalScope - import CommandStrings.{CrossCommand,crossHelp,SwitchCommand,switchHelp} - import java.io.File +import Keys._ +import complete.{ DefaultParsers, Parser } +import DefaultParsers._ +import Def.{ ScopedKey, Setting } +import Scope.GlobalScope +import CommandStrings.{ CrossCommand, crossHelp, SwitchCommand, switchHelp } +import java.io.File -object Cross -{ - @deprecated("Moved to CommandStrings.Switch", "0.13.0") - final val Switch = CommandStrings.SwitchCommand +object Cross { + @deprecated("Moved to CommandStrings.Switch", "0.13.0") + final val Switch = CommandStrings.SwitchCommand - @deprecated("Moved to CommandStrings.Cross", "0.13.0") - final val Cross = CommandStrings.CrossCommand + @deprecated("Moved to CommandStrings.Cross", "0.13.0") + final val Cross = CommandStrings.CrossCommand - def switchParser(state: State): Parser[(String, String)] = - { - def versionAndCommand(spacePresent: Boolean) = { - val knownVersions = crossVersions(state) - val version = token(StringBasic.examples(knownVersions : _*)) - val spacedVersion = if(spacePresent) version else version & spacedFirst(SwitchCommand) - val optionalCommand = token(Space ~> matched(state.combinedParser)) ?? "" - spacedVersion ~ optionalCommand - } - token(SwitchCommand ~> OptSpace) flatMap { sp => versionAndCommand(!sp.isEmpty) } - } - def spacedFirst(name: String) = opOrIDSpaced(name) ~ any.+ + def switchParser(state: State): Parser[(String, String)] = + { + def versionAndCommand(spacePresent: Boolean) = { + val knownVersions = crossVersions(state) + val version = token(StringBasic.examples(knownVersions: _*)) + val spacedVersion = if (spacePresent) version else version & spacedFirst(SwitchCommand) + val optionalCommand = token(Space ~> matched(state.combinedParser)) ?? "" + spacedVersion ~ optionalCommand + } + token(SwitchCommand ~> OptSpace) flatMap { sp => versionAndCommand(!sp.isEmpty) } + } + def spacedFirst(name: String) = opOrIDSpaced(name) ~ any.+ - lazy val switchVersion = Command.arb(requireSession(switchParser), switchHelp) { case (state, (arg, command)) => - val x = Project.extract(state) - import x._ - - val (resolveVersion, homePath) = arg.split("=") match { - case Array(v, h) => (v, h) - case _ => ("", arg) - } - val home = IO.resolve(x.currentProject.base, new File(homePath)) - val (add, exclude) = - if(home.exists) { - val instance = ScalaInstance(home)(state.classLoaderCache.apply _) - state.log.info("Setting Scala home to " + home + " with actual version " + instance.actualVersion) - val version = if(resolveVersion.isEmpty) instance.actualVersion else resolveVersion - state.log.info("\tand using " + version + " for resolving dependencies.") - val settings = Seq( - scalaVersion in GlobalScope :== version, - scalaHome in GlobalScope :== Some(home), - scalaInstance in GlobalScope :== instance - ) - (settings, excludeKeys(Set(scalaVersion.key, scalaHome.key, scalaInstance.key))) - } else if(!resolveVersion.isEmpty) { - error("Scala home directory did not exist: " + home) - } else { - state.log.info("Setting version to " + arg) - val settings = Seq( - scalaVersion in GlobalScope :== arg, - scalaHome in GlobalScope :== None - ) - (settings, excludeKeys(Set(scalaVersion.key, scalaHome.key))) - } - val cleared = session.mergeSettings.filterNot( exclude ) - val newStructure = Load.reapply(add ++ cleared, structure) - Project.setProject(session, newStructure, command :: state) - } - @deprecated("No longer used.", "0.13.0") - def crossExclude(s: Setting[_]): Boolean = excludeKeys(Set(scalaVersion.key, scalaHome.key))(s) + lazy val switchVersion = Command.arb(requireSession(switchParser), switchHelp) { + case (state, (arg, command)) => + val x = Project.extract(state) + import x._ - private[this] def excludeKeys(keys: Set[AttributeKey[_]]): Setting[_] => Boolean = - _.key match { - case ScopedKey( Scope(_, Global, Global, _), key) if keys.contains(key) => true - case _ => false - } + val (resolveVersion, homePath) = arg.split("=") match { + case Array(v, h) => (v, h) + case _ => ("", arg) + } + val home = IO.resolve(x.currentProject.base, new File(homePath)) + val (add, exclude) = + if (home.exists) { + val instance = ScalaInstance(home)(state.classLoaderCache.apply _) + state.log.info("Setting Scala home to " + home + " with actual version " + instance.actualVersion) + val version = if (resolveVersion.isEmpty) instance.actualVersion else resolveVersion + state.log.info("\tand using " + version + " for resolving dependencies.") + val settings = Seq( + scalaVersion in GlobalScope :== version, + scalaHome in GlobalScope :== Some(home), + scalaInstance in GlobalScope :== instance + ) + (settings, excludeKeys(Set(scalaVersion.key, scalaHome.key, scalaInstance.key))) + } else if (!resolveVersion.isEmpty) { + error("Scala home directory did not exist: " + home) + } else { + state.log.info("Setting version to " + arg) + val settings = Seq( + scalaVersion in GlobalScope :== arg, + scalaHome in GlobalScope :== None + ) + (settings, excludeKeys(Set(scalaVersion.key, scalaHome.key))) + } + val cleared = session.mergeSettings.filterNot(exclude) + val newStructure = Load.reapply(add ++ cleared, structure) + Project.setProject(session, newStructure, command :: state) + } + @deprecated("No longer used.", "0.13.0") + def crossExclude(s: Setting[_]): Boolean = excludeKeys(Set(scalaVersion.key, scalaHome.key))(s) - def crossParser(state: State): Parser[String] = - token(CrossCommand <~ OptSpace) flatMap { _ => token(matched( state.combinedParser & spacedFirst(CrossCommand) )) } + private[this] def excludeKeys(keys: Set[AttributeKey[_]]): Setting[_] => Boolean = + _.key match { + case ScopedKey(Scope(_, Global, Global, _), key) if keys.contains(key) => true + case _ => false + } - lazy val crossBuild = Command.arb(requireSession(crossParser), crossHelp) { (state, command) => - val x = Project.extract(state) - import x._ - val versions = crossVersions(state) - val current = scalaVersion in currentRef get structure.data map(SwitchCommand + " " + _) toList; - if(versions.isEmpty) command :: state else versions.map(SwitchCommand + " " + _ + " " + command) ::: current ::: state - } - def crossVersions(state: State): Seq[String] = - { - val x = Project.extract(state) - import x._ - crossScalaVersions in currentRef get structure.data getOrElse Nil - } - - def requireSession[T](p: State => Parser[T]): State => Parser[T] = s => - if(s get sessionSettings isEmpty) failure("No project loaded") else p(s) + def crossParser(state: State): Parser[String] = + token(CrossCommand <~ OptSpace) flatMap { _ => token(matched(state.combinedParser & spacedFirst(CrossCommand))) } + + lazy val crossBuild = Command.arb(requireSession(crossParser), crossHelp) { (state, command) => + val x = Project.extract(state) + import x._ + val versions = crossVersions(state) + val current = scalaVersion in currentRef get structure.data map (SwitchCommand + " " + _) toList; + if (versions.isEmpty) command :: state else versions.map(SwitchCommand + " " + _ + " " + command) ::: current ::: state + } + def crossVersions(state: State): Seq[String] = + { + val x = Project.extract(state) + import x._ + crossScalaVersions in currentRef get structure.data getOrElse Nil + } + + def requireSession[T](p: State => Parser[T]): State => Parser[T] = s => + if (s get sessionSettings isEmpty) failure("No project loaded") else p(s) } \ No newline at end of file diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index fcd76d85e..e15532a54 100755 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -3,1253 +3,1257 @@ */ package sbt - import Attributed.data - import Scope.{fillTaskAxis, GlobalScope, ThisScope} - import xsbt.api.Discovery - import xsbti.compile.CompileOrder - import Project.{inConfig, inScope, inTask, richInitialize, richInitializeTask, richTaskSessionVar} - import Def.{Initialize, ScopedKey, Setting, SettingsDefinition} - import Artifact.{DocClassifier, SourceClassifier} - import Configurations.{Compile, CompilerPlugin, IntegrationTest, names, Provided, Runtime, Test} - import CrossVersion.{binarySbtVersion, binaryScalaVersion, partialVersion} - import complete._ - import std.TaskExtra._ - import inc.{FileValueCache, IncOptions, Locate} - import testing.{Framework, Runner, AnnotatedFingerprint, SubclassFingerprint} - - import sys.error - import scala.xml.NodeSeq - import org.apache.ivy.core.module.{descriptor, id} - import descriptor.ModuleDescriptor, id.ModuleRevisionId - import java.io.{File, PrintWriter} - import java.net.{URI,URL,MalformedURLException} - import java.util.concurrent.Callable - import sbinary.DefaultProtocol.StringFormat - import Cache.seqFormat - import CommandStrings.ExportStream - - import Types._ - import Path._ - import Keys._ - -object Defaults extends BuildCommon -{ - final val CacheDirectoryName = "cache" - - def configSrcSub(key: SettingKey[File]): Initialize[File] = (key in ThisScope.copy(config = Global), configuration) { (src, conf) => src / nameForSrc(conf.name) } - def nameForSrc(config: String) = if(config == Configurations.Compile.name) "main" else config - def prefix(config: String) = if(config == Configurations.Compile.name) "" else config + "-" - - def lock(app: xsbti.AppConfiguration): xsbti.GlobalLock = app.provider.scalaProvider.launcher.globalLock - - def extractAnalysis[T](a: Attributed[T]): (T, inc.Analysis) = - (a.data, a.metadata get Keys.analysis getOrElse inc.Analysis.Empty) - - def analysisMap[T](cp: Seq[Attributed[T]]): T => Option[inc.Analysis] = - { - val m = (for(a <- cp; an <- a.metadata get Keys.analysis) yield (a.data, an) ).toMap - m.get _ - } - private[sbt] def globalDefaults(ss: Seq[Setting[_]]): Seq[Setting[_]] = Def.defaultSettings(inScope(GlobalScope)(ss)) - - def buildCore: Seq[Setting[_]] = thisBuildCore ++ globalCore - def thisBuildCore: Seq[Setting[_]] = inScope(GlobalScope.copy(project = Select(ThisBuild)))(Seq( - managedDirectory := baseDirectory.value / "lib_managed" - )) - @deprecated("0.13.2", "Use AutoPlugins and globalSbtCore instead.") - lazy val globalCore: Seq[Setting[_]] = globalDefaults(defaultTestTasks(test) ++ defaultTestTasks(testOnly) ++ defaultTestTasks(testQuick) ++ Seq( - excludeFilter :== HiddenFileFilter - ) ++ globalIvyCore ++ globalJvmCore) ++ globalSbtCore - - private[sbt] lazy val globalJvmCore: Seq[Setting[_]] = - Seq( - compilerCache := state.value get Keys.stateCompilerCache getOrElse compiler.CompilerCache.fresh, - sourcesInBase :== true, - autoAPIMappings := false, - apiMappings := Map.empty, - autoScalaLibrary :== true, - managedScalaInstance :== true, - definesClass :== FileValueCache(Locate.definesClass _ ).get, - traceLevel in run :== 0, - traceLevel in runMain :== 0, - traceLevel in console :== Int.MaxValue, - traceLevel in consoleProject :== Int.MaxValue, - autoCompilerPlugins :== true, - scalaHome :== None, - apiURL := None, - javaHome :== None, - testForkedParallel :== false, - javaOptions :== Nil, - sbtPlugin :== false, - crossPaths :== true, - sourcePositionMappers :== Nil, - artifactClassifier in packageSrc :== Some(SourceClassifier), - artifactClassifier in packageDoc :== Some(DocClassifier), - includeFilter :== NothingFilter, - includeFilter in unmanagedSources :== "*.java" | "*.scala", - includeFilter in unmanagedJars :== "*.jar" | "*.so" | "*.dll" | "*.jnilib" | "*.zip", - includeFilter in unmanagedResources :== AllPassFilter - ) - - private[sbt] lazy val globalIvyCore: Seq[Setting[_]] = - Seq( - internalConfigurationMap :== Configurations.internalMap _, - credentials :== Nil, - exportJars :== false, - retrieveManaged :== false, - scalaOrganization :== ScalaArtifacts.Organization, - sbtResolver := { if(sbtVersion.value endsWith "-SNAPSHOT") Classpaths.typesafeSnapshots else Classpaths.typesafeReleases }, - crossVersion :== CrossVersion.Disabled, - buildDependencies <<= Classpaths.constructBuildDependencies, - version :== "0.1-SNAPSHOT", - classpathTypes :== Set("jar", "bundle") ++ CustomPomParser.JarPackagings, - artifactClassifier :== None, - checksums := Classpaths.bootChecksums(appConfiguration.value), - conflictManager := ConflictManager.default, - pomExtra :== NodeSeq.Empty, - pomPostProcess :== idFun, - pomAllRepositories :== false, - pomIncludeRepository :== Classpaths.defaultRepositoryFilter - ) - - /** Core non-plugin settings for sbt builds. These *must* be on every build or the sbt engine will fail to run at all. */ - private[sbt] lazy val globalSbtCore: Seq[Setting[_]] = globalDefaults(Seq( - outputStrategy :== None, // TODO - This might belong elsewhere. - buildStructure := Project.structure(state.value), - settingsData := buildStructure.value.data, - trapExit :== true, - connectInput :== false, - cancelable :== false, - taskCancelStrategy := { state: State => - if(cancelable.value) TaskCancellationStrategy.Signal - else TaskCancellationStrategy.Null - }, - envVars :== Map.empty, - sbtVersion := appConfiguration.value.provider.id.version, - sbtBinaryVersion := binarySbtVersion(sbtVersion.value), - watchingMessage := Watched.defaultWatchingMessage, - triggeredMessage := Watched.defaultTriggeredMessage, - onLoad := idFun[State], - onUnload := idFun[State], - onUnload := { s => try onUnload.value(s) finally IO.delete(taskTemporaryDirectory.value) }, - extraLoggers :== { _ => Nil }, - watchSources :== Nil, - skip :== false, - taskTemporaryDirectory := { val dir = IO.createTemporaryDirectory; dir.deleteOnExit(); dir }, - onComplete := { val dir = taskTemporaryDirectory.value; () => {IO.delete(dir); IO.createDirectory(dir) }}, - Previous.cache <<= Previous.cacheSetting, - Previous.references :== new Previous.References, - concurrentRestrictions <<= defaultRestrictions, - parallelExecution :== true, - pollInterval :== 500, - logBuffered :== false, - commands :== Nil, - showSuccess :== true, - showTiming :== true, - timingFormat :== Aggregation.defaultFormat, - aggregate :== true, - maxErrors :== 100, - fork :== false, - initialize :== {} - )) - def defaultTestTasks(key: Scoped): Seq[Setting[_]] = inTask(key)(Seq( - tags := Seq(Tags.Test -> 1), - logBuffered := true - )) - // TODO: This should be on the new default settings for a project. - def projectCore: Seq[Setting[_]] = Seq( - name := thisProject.value.id, - logManager := LogManager.defaults(extraLoggers.value, StandardMain.console), - onLoadMessage <<= onLoadMessage or (name, thisProjectRef)("Set current project to " + _ + " (in build " + _.build +")") - ) - def paths = Seq( - baseDirectory := thisProject.value.base, - target := baseDirectory.value / "target", - historyPath <<= historyPath or target(t => Some(t / ".history")), - sourceDirectory := baseDirectory.value / "src", - sourceManaged := crossTarget.value / "src_managed", - resourceManaged := crossTarget.value / "resource_managed", - cacheDirectory := crossTarget.value / CacheDirectoryName / thisProject.value.id / "global" - ) - - lazy val configPaths = sourceConfigPaths ++ resourceConfigPaths ++ outputConfigPaths - lazy val sourceConfigPaths = Seq( - sourceDirectory <<= configSrcSub(sourceDirectory), - sourceManaged <<= configSrcSub(sourceManaged), - scalaSource := sourceDirectory.value / "scala", - javaSource := sourceDirectory.value / "java", - unmanagedSourceDirectories := Seq(scalaSource.value, javaSource.value), - unmanagedSources <<= collectFiles(unmanagedSourceDirectories, includeFilter in unmanagedSources, excludeFilter in unmanagedSources), - watchSources in ConfigGlobal <++= unmanagedSources, - managedSourceDirectories := Seq(sourceManaged.value), - managedSources <<= generate(sourceGenerators), - sourceGenerators :== Nil, - sourceDirectories <<= Classpaths.concatSettings(unmanagedSourceDirectories, managedSourceDirectories), - sources <<= Classpaths.concat(unmanagedSources, managedSources) - ) - lazy val resourceConfigPaths = Seq( - resourceDirectory := sourceDirectory.value / "resources", - resourceManaged <<= configSrcSub(resourceManaged), - unmanagedResourceDirectories := Seq(resourceDirectory.value), - managedResourceDirectories := Seq(resourceManaged.value), - resourceDirectories <<= Classpaths.concatSettings(unmanagedResourceDirectories, managedResourceDirectories), - unmanagedResources <<= collectFiles(unmanagedResourceDirectories, includeFilter in unmanagedResources, excludeFilter in unmanagedResources), - watchSources in ConfigGlobal ++= unmanagedResources.value, - resourceGenerators :== Nil, - resourceGenerators <+= (discoveredSbtPlugins, resourceManaged) map PluginDiscovery.writeDescriptors, - managedResources <<= generate(resourceGenerators), - resources <<= Classpaths.concat(managedResources, unmanagedResources) - ) - lazy val outputConfigPaths = Seq( - cacheDirectory := crossTarget.value / CacheDirectoryName / thisProject.value.id / configuration.value.name, - classDirectory := crossTarget.value / (prefix(configuration.value.name) + "classes"), - target in doc := crossTarget.value / (prefix(configuration.value.name) + "api") - ) - def addBaseSources = Seq( - unmanagedSources := { - val srcs = unmanagedSources.value - val f = (includeFilter in unmanagedSources).value - val excl = (excludeFilter in unmanagedSources).value - if(sourcesInBase.value) (srcs +++ baseDirectory.value * (f -- excl)).get else srcs - } - ) - - def compileBase = inTask(console)(compilersSetting :: Nil) ++ compileBaseGlobal ++ Seq( - incOptions := incOptions.value.withNewClassfileManager( - sbt.inc.ClassfileManager.transactional(crossTarget.value / "classes.bak", sbt.Logger.Null)), - scalaInstance <<= scalaInstanceTask, - crossVersion := (if(crossPaths.value) CrossVersion.binary else CrossVersion.Disabled), - crossTarget := makeCrossTarget(target.value, scalaBinaryVersion.value, sbtBinaryVersion.value, sbtPlugin.value, crossPaths.value) - ) - // must be a val: duplication detected by object identity - private[this] lazy val compileBaseGlobal: Seq[Setting[_]] = globalDefaults(Seq( - incOptions := IncOptions.Default, - classpathOptions :== ClasspathOptions.boot, - classpathOptions in console :== ClasspathOptions.repl, - compileOrder :== CompileOrder.Mixed, - javacOptions :== Nil, - scalacOptions :== Nil, - scalaVersion := appConfiguration.value.provider.scalaProvider.version, - crossScalaVersions := Seq(scalaVersion.value), - derive(compilersSetting), - derive(scalaBinaryVersion := binaryScalaVersion(scalaVersion.value)) - )) - def makeCrossTarget(t: File, sv: String, sbtv: String, plugin: Boolean, cross: Boolean): File = - { - val scalaBase = if(cross) t / ("scala-" + sv) else t - if(plugin) scalaBase / ("sbt-" + sbtv) else scalaBase - } - - def compilersSetting = compilers := Compiler.compilers(scalaInstance.value, classpathOptions.value, javaHome.value)(appConfiguration.value, streams.value.log) - - lazy val configTasks = docTaskSettings(doc) ++ inTask(compile)(compileInputsSettings) ++ configGlobal ++ Seq( - compile <<= compileTask tag(Tags.Compile, Tags.CPU), - printWarnings <<= printWarningsTask, - compileAnalysisFilename := { - // Here, if the user wants cross-scala-versioning, we also append it - // to the analysis cache, so we keep the scala versions separated. - val extra = - if(crossPaths.value) s"_${scalaBinaryVersion.value}" - else "" - s"inc_compile${extra}" - }, - compileIncSetup <<= compileIncSetupTask, - console <<= consoleTask, - consoleQuick <<= consoleQuickTask, - discoveredMainClasses <<= compile map discoverMainClasses storeAs discoveredMainClasses triggeredBy compile, - definedSbtPlugins <<= discoverPlugins, - discoveredSbtPlugins <<= discoverSbtPluginNames, - inTask(run)(runnerTask :: Nil).head, - selectMainClass := mainClass.value orElse selectRunMain(discoveredMainClasses.value), - mainClass in run := (selectMainClass in run).value, - mainClass := selectPackageMain(discoveredMainClasses.value), - run <<= runTask(fullClasspath, mainClass in run, runner in run), - runMain <<= runMainTask(fullClasspath, runner in run), - copyResources <<= copyResourcesTask - ) - private[this] lazy val configGlobal = globalDefaults(Seq( - initialCommands :== "", - cleanupCommands :== "" - )) - - lazy val projectTasks: Seq[Setting[_]] = Seq( - cleanFiles := Seq(managedDirectory.value, target.value), - cleanKeepFiles := historyPath.value.toList, - clean := doClean(cleanFiles.value, cleanKeepFiles.value), - consoleProject <<= consoleProjectTask, - watchTransitiveSources <<= watchTransitiveSourcesTask, - watch <<= watchSetting - ) - - def generate(generators: SettingKey[Seq[Task[Seq[File]]]]): Initialize[Task[Seq[File]]] = generators {_.join.map(_.flatten) } - - @deprecated("Use the new .all() API", "0.13.0") - def inAllConfigurations[T](key: TaskKey[T]): Initialize[Task[Seq[T]]] = (state, thisProjectRef) flatMap { (state, ref) => - val structure = Project structure state - val configurations = Project.getProject(ref, structure).toList.flatMap(_.configurations) - configurations.flatMap { conf => - key in (ref, conf) get structure.data - } join - } - def watchTransitiveSourcesTask: Initialize[Task[Seq[File]]] = { - import ScopeFilter.Make.{inDependencies => inDeps, _} - val selectDeps = ScopeFilter(inAggregates(ThisProject) || inDeps(ThisProject)) - val allWatched = (watchSources ?? Nil).all( selectDeps ) - Def.task { allWatched.value.flatten } - } - - def transitiveUpdateTask: Initialize[Task[Seq[UpdateReport]]] = { - import ScopeFilter.Make.{inDependencies => inDeps, _} - val selectDeps = ScopeFilter(inDeps(ThisProject, includeRoot = false)) - val allUpdates = update.?.all(selectDeps) - Def.task { allUpdates.value.flatten } - } - - def watchSetting: Initialize[Watched] = (pollInterval, thisProjectRef, watchingMessage, triggeredMessage) { (interval, base, msg, trigMsg) => - new Watched { - val scoped = watchTransitiveSources in base - val key = ScopedKey(scoped.scope, scoped.key) - override def pollInterval = interval - override def watchingMessage(s: WatchState) = msg(s) - override def triggeredMessage(s: WatchState) = trigMsg(s) - override def watchPaths(s: State) = EvaluateTask.evaluateTask(Project structure s, key, s, base) match { - case Some(Value(ps)) => ps - case Some(Inc(i)) => throw i - case None => error("key not found: " + Def.displayFull(key)) - } - } - } - - @deprecated("Use scalaInstanceTask.", "0.13.0") - def scalaInstanceSetting = scalaInstanceTask - def scalaInstanceTask: Initialize[Task[ScalaInstance]] = Def.taskDyn { - // if this logic changes, ensure that `unmanagedScalaInstanceOnly` and `update` are changed - // appropriately to avoid cycles - scalaHome.value match { - case Some(h) => scalaInstanceFromHome(h) - case None => - val scalaProvider = appConfiguration.value.provider.scalaProvider - val version = scalaVersion.value - if(version == scalaProvider.version) // use the same class loader as the Scala classes used by sbt - Def.task( ScalaInstance(version, scalaProvider) ) - else - scalaInstanceFromUpdate - } - } - // Returns the ScalaInstance only if it was not constructed via `update` - // This is necessary to prevent cycles between `update` and `scalaInstance` - private[sbt] def unmanagedScalaInstanceOnly: Initialize[Task[Option[ScalaInstance]]] = Def.taskDyn { - if(scalaHome.value.isDefined) Def.task(Some(scalaInstance.value)) else Def.task(None) - } - - private[this] def noToolConfiguration(autoInstance: Boolean): String = - { - val pre = "Missing Scala tool configuration from the 'update' report. " - val post = - if(autoInstance) - "'scala-tool' is normally added automatically, so this may indicate a bug in sbt or you may be removing it from ivyConfigurations, for example." - else - "Explicitly define scalaInstance or scalaHome or include Scala dependencies in the 'scala-tool' configuration." - pre + post - } - - def scalaInstanceFromUpdate: Initialize[Task[ScalaInstance]] = Def.task { - val toolReport = update.value.configuration(Configurations.ScalaTool.name) getOrElse - error(noToolConfiguration(managedScalaInstance.value)) - def files(id: String) = - for { m <- toolReport.modules if m.module.name == id; - (art, file) <- m.artifacts if art.`type` == Artifact.DefaultType } - yield file - def file(id: String) = files(id).headOption getOrElse error(s"Missing ${id}.jar") - val allFiles = toolReport.modules.flatMap(_.artifacts.map(_._2)) - val libraryJar = file(ScalaArtifacts.LibraryID) - val compilerJar = file(ScalaArtifacts.CompilerID) - val otherJars = allFiles.filterNot(x => x == libraryJar || x == compilerJar) - ScalaInstance(scalaVersion.value, libraryJar, compilerJar, otherJars : _*)(makeClassLoader(state.value)) - } - def scalaInstanceFromHome(dir: File): Initialize[Task[ScalaInstance]] = Def.task { - ScalaInstance(dir)(makeClassLoader(state.value)) - } - private[this] def makeClassLoader(state: State) = state.classLoaderCache.apply _ - - private[this] def testDefaults = Defaults.globalDefaults(Seq( - testFrameworks :== { - import sbt.TestFrameworks._ - Seq(ScalaCheck, Specs2, Specs, ScalaTest, JUnit) - }, - testListeners :== Nil, - testOptions :== Nil, - testResultLogger :== TestResultLogger.Default, - testFilter in testOnly :== (selectedFilter _) - )) - lazy val testTasks: Seq[Setting[_]] = testTaskOptions(test) ++ testTaskOptions(testOnly) ++ testTaskOptions(testQuick) ++ testDefaults ++ Seq( - testLoader := TestFramework.createTestLoader(data(fullClasspath.value), scalaInstance.value, IO.createUniqueDirectory(taskTemporaryDirectory.value)), - loadedTestFrameworks := testFrameworks.value.flatMap(f => f.create(testLoader.value, streams.value.log).map( x => (f,x)).toIterable).toMap, - definedTests <<= detectTests, - definedTestNames <<= definedTests map ( _.map(_.name).distinct) storeAs definedTestNames triggeredBy compile, - testFilter in testQuick <<= testQuickFilter, - executeTests <<= (streams in test, loadedTestFrameworks, testLoader, testGrouping in test, testExecution in test, fullClasspath in test, javaHome in test, testForkedParallel) flatMap allTestGroupsTask, - testResultLogger in (Test, test) :== TestResultLogger.SilentWhenNoTests, // https://github.com/sbt/sbt/issues/1185 - test := { - val trl = (testResultLogger in (Test, test)).value - val taskName = Project.showContextKey(state.value)(resolvedScoped.value) - trl.run(streams.value.log, executeTests.value, taskName) - }, - testOnly <<= inputTests(testOnly), - testQuick <<= inputTests(testQuick) - ) - lazy val TaskGlobal: Scope = ThisScope.copy(task = Global) - lazy val ConfigGlobal: Scope = ThisScope.copy(config = Global) - def testTaskOptions(key: Scoped): Seq[Setting[_]] = inTask(key)( Seq( - testListeners := { - TestLogger.make(streams.value.log, closeableTestLogger(streamsManager.value, test in resolvedScoped.value.scope, logBuffered.value)) +: - new TestStatusReporter(succeededFile( streams.in(test).value.cacheDirectory )) +: - testListeners.in(TaskGlobal).value - }, - testOptions := Tests.Listeners(testListeners.value) +: (testOptions in TaskGlobal).value, - testExecution <<= testExecutionTask(key) - ) ) ++ inScope(GlobalScope)(Seq( - derive(testGrouping <<= singleTestGroupDefault) - )) - @deprecated("Doesn't provide for closing the underlying resources.", "0.13.1") - def testLogger(manager: Streams, baseKey: Scoped)(tdef: TestDefinition): Logger = - { - val scope = baseKey.scope - val extra = scope.extra match { case Select(x) => x; case _ => AttributeMap.empty } - val key = ScopedKey(scope.copy(extra = Select(testExtra(extra, tdef))), baseKey.key) - manager(key).log - } - private[this] def closeableTestLogger(manager: Streams, baseKey: Scoped, buffered: Boolean)(tdef: TestDefinition): TestLogger.PerTest = - { - val scope = baseKey.scope - val extra = scope.extra match { case Select(x) => x; case _ => AttributeMap.empty } - val key = ScopedKey(scope.copy(extra = Select(testExtra(extra, tdef))), baseKey.key) - val s = manager(key) - new TestLogger.PerTest(s.log, () => s.close(), buffered) - } - def buffered(log: Logger): Logger = new BufferedLogger(FullLogger(log)) - def testExtra(extra: AttributeMap, tdef: TestDefinition): AttributeMap = - { - val mod = tdef.fingerprint match { case f: SubclassFingerprint => f.isModule; case f: AnnotatedFingerprint => f.isModule; case _ => false } - extra.put(name.key, tdef.name).put(isModule, mod) - } - def singleTestGroup(key: Scoped): Initialize[Task[Seq[Tests.Group]]] = inTask(key, singleTestGroupDefault) - def singleTestGroupDefault: Initialize[Task[Seq[Tests.Group]]] = Def.task { - val tests = definedTests.value - val fk = fork.value - val opts = forkOptions.value - Seq(new Tests.Group("", tests, if(fk) Tests.SubProcess(opts) else Tests.InProcess)) - } - private[this] def forkOptions: Initialize[Task[ForkOptions]] = - (baseDirectory, javaOptions, outputStrategy, envVars, javaHome, connectInput) map { - (base, options, strategy, env, javaHomeDir, connectIn) => - // bootJars is empty by default because only jars on the user's classpath should be on the boot classpath - ForkOptions(bootJars = Nil, javaHome = javaHomeDir, connectInput = connectIn, outputStrategy = strategy, runJVMOptions = options, workingDirectory = Some(base), envVars = env) - } - - def testExecutionTask(task: Scoped): Initialize[Task[Tests.Execution]] = - (testOptions in task, parallelExecution in task, tags in task) map { - (opts, par, ts) => - new Tests.Execution(opts, par, ts) - } - - def testQuickFilter: Initialize[Task[Seq[String] => Seq[String => Boolean]]] = - (fullClasspath in test, streams in test) map { - (cp, s) => - val ans = cp.flatMap(_.metadata get Keys.analysis) - val succeeded = TestStatus.read(succeededFile( s.cacheDirectory )) - val stamps = collection.mutable.Map.empty[File, Long] - def stamp(dep: String): Long = { - val stamps = for (a <- ans; f <- a.relations.definesClass(dep)) yield intlStamp(f, a, Set.empty) - if (stamps.isEmpty) Long.MinValue else stamps.max - } - def intlStamp(f: File, analysis: inc.Analysis, s: Set[File]): Long = { - if (s contains f) Long.MinValue else - stamps.getOrElseUpdate(f, { - import analysis.{relations => rel, apis} - rel.internalSrcDeps(f).map(intlStamp(_, analysis, s + f)) ++ - rel.externalDeps(f).map(stamp) + - apis.internal(f).compilation.startTime - }.max) - } - def noSuccessYet(test: String) = succeeded.get(test) match { - case None => true - case Some(ts) => stamp(test) > ts - } - - args => for(filter <- selectedFilter(args)) yield - (test: String) => filter(test) && noSuccessYet(test) - } - def succeededFile(dir: File) = dir / "succeeded_tests" - - def inputTests(key: InputKey[_]): Initialize[InputTask[Unit]] = inputTests0.mapReferenced(Def.mapScope(_ in key.key)) - private[this] lazy val inputTests0: Initialize[InputTask[Unit]] = - { - val parser = loadForParser(definedTestNames)( (s, i) => testOnlyParser(s, i getOrElse Nil) ) - Def.inputTaskDyn { - val (selected, frameworkOptions) = parser.parsed - val s = streams.value - val filter = testFilter.value - val config = testExecution.value - - implicit val display = Project.showContextKey(state.value) - val modifiedOpts = Tests.Filters(filter(selected)) +: Tests.Argument(frameworkOptions : _*) +: config.options - val newConfig = config.copy(options = modifiedOpts) - val output = allTestGroupsTask(s, loadedTestFrameworks.value, testLoader.value, testGrouping.value, newConfig, fullClasspath.value, javaHome.value, testForkedParallel.value) - val taskName = display(resolvedScoped.value) - val trl = testResultLogger.value - val processed = output.map(out => trl.run(s.log, out, taskName)) - Def.value(processed) - } - } - - def createTestRunners(frameworks: Map[TestFramework,Framework], loader: ClassLoader, config: Tests.Execution) : Map[TestFramework, Runner] = { - import Tests.Argument - val opts = config.options.toList - frameworks.map { case (tf, f) => - val args = opts.flatMap { - case Argument(None | Some(`tf`), args) => args - case _ => Nil - } - val mainRunner = f.runner(args.toArray, Array.empty[String], loader) - tf -> mainRunner - } - } - - def allTestGroupsTask(s: TaskStreams, frameworks: Map[TestFramework,Framework], loader: ClassLoader, groups: Seq[Tests.Group], config: Tests.Execution, cp: Classpath, javaHome: Option[File]): Task[Tests.Output] = { - allTestGroupsTask(s,frameworks,loader, groups, config, cp, javaHome, forkedParallelExecution = false) - } - - def allTestGroupsTask(s: TaskStreams, frameworks: Map[TestFramework,Framework], loader: ClassLoader, groups: Seq[Tests.Group], config: Tests.Execution, cp: Classpath, javaHome: Option[File], forkedParallelExecution: Boolean): Task[Tests.Output] = { - val runners = createTestRunners(frameworks, loader, config) - val groupTasks = groups map { - case Tests.Group(name, tests, runPolicy) => - runPolicy match { - case Tests.SubProcess(opts) => - val forkedConfig = config.copy(parallel = config.parallel && forkedParallelExecution) - s.log.debug(s"Forking tests - parallelism = ${forkedConfig.parallel}") - ForkTests(runners, tests.toList, forkedConfig, cp.files, opts, s.log) tag Tags.ForkedTestGroup - case Tests.InProcess => - Tests(frameworks, loader, runners, tests, config, s.log) - } - } - val output = Tests.foldTasks(groupTasks, config.parallel) - output map { out => - val summaries = - runners map { case (tf, r) => - Tests.Summary(frameworks(tf).name, r.done()) - } - out.copy(summaries = summaries) - } - } - - def selectedFilter(args: Seq[String]): Seq[String => Boolean] = - { - val filters = args map GlobFilter.apply - if(filters.isEmpty) - Seq(const(true)) - else - filters.map { f => (s: String) => f accept s } - } - def detectTests: Initialize[Task[Seq[TestDefinition]]] = (loadedTestFrameworks, compile, streams) map { (frameworkMap, analysis, s) => - Tests.discover(frameworkMap.values.toList, analysis, s.log)._1 - } - def defaultRestrictions: Initialize[Seq[Tags.Rule]] = parallelExecution { par => - val max = EvaluateTask.SystemProcessors - Tags.limitAll(if(par) max else 1) :: Tags.limit(Tags.ForkedTestGroup, 1) :: Nil - } - - lazy val packageBase: Seq[Setting[_]] = Seq( - artifact := Artifact(moduleName.value) - ) ++ Defaults.globalDefaults(Seq( - packageOptions :== Nil, - artifactName :== ( Artifact.artifactName _ ) - )) - - lazy val packageConfig: Seq[Setting[_]] = - inTask(packageBin)(Seq( - packageOptions <<= (name, version, homepage, organization, organizationName, mainClass, packageOptions) map { (name, ver, h, org, orgName, main, p) => Package.addSpecManifestAttributes(name, ver, orgName) +: Package.addImplManifestAttributes(name, ver, h, org, orgName) +: main.map(Package.MainClass.apply) ++: p })) ++ - inTask(packageSrc)(Seq( - packageOptions := Package.addSpecManifestAttributes(name.value, version.value, organizationName.value) +: packageOptions.value )) ++ - packageTaskSettings(packageBin, packageBinMappings) ++ - packageTaskSettings(packageSrc, packageSrcMappings) ++ - packageTaskSettings(packageDoc, packageDocMappings) ++ - Seq(`package` := packageBin.value) - - def packageBinMappings = products map { _ flatMap Path.allSubpaths } - def packageDocMappings = doc map { Path.allSubpaths(_).toSeq } - def packageSrcMappings = concatMappings(resourceMappings, sourceMappings) - - @deprecated("Use `packageBinMappings` instead", "0.12.0") - def packageBinTask = packageBinMappings - @deprecated("Use `packageDocMappings` instead", "0.12.0") - def packageDocTask = packageDocMappings - @deprecated("Use `packageSrcMappings` instead", "0.12.0") - def packageSrcTask = packageSrcMappings - - private type Mappings = Initialize[Task[Seq[(File, String)]]] - def concatMappings(as: Mappings, bs: Mappings) = (as zipWith bs)( (a,b) => (a, b) map { case (a, b) => a ++ b } ) - - // drop base directories, since there are no valid mappings for these - def sourceMappings = (unmanagedSources, unmanagedSourceDirectories, baseDirectory) map { (srcs, sdirs, base) => - ( (srcs --- sdirs --- base) pair (relativeTo(sdirs)|relativeTo(base)|flat)) toSeq - } - def resourceMappings = relativeMappings(unmanagedResources, unmanagedResourceDirectories) - def relativeMappings(files: ScopedTaskable[Seq[File]], dirs: ScopedTaskable[Seq[File]]): Initialize[Task[Seq[(File, String)]]] = - (files, dirs) map { (rs, rdirs) => - (rs --- rdirs) pair (relativeTo(rdirs)|flat) toSeq - } - - def collectFiles(dirs: ScopedTaskable[Seq[File]], filter: ScopedTaskable[FileFilter], excludes: ScopedTaskable[FileFilter]): Initialize[Task[Seq[File]]] = - (dirs, filter, excludes) map { (d,f,excl) => d.descendantsExcept(f,excl).get } - - def artifactPathSetting(art: SettingKey[Artifact]) = (crossTarget, projectID, art, scalaVersion in artifactName, scalaBinaryVersion in artifactName, artifactName) { - (t, module, a, sv, sbv, toString) => - t / toString(ScalaVersion(sv, sbv), module, a) asFile - } - def artifactSetting = ((artifact, artifactClassifier).identity zipWith configuration.?) { case ((a,classifier),cOpt) => - val cPart = cOpt flatMap { - case Compile => None - case Test => Some(Artifact.TestsClassifier) - case c => Some(c.name) - } - val combined = cPart.toList ++ classifier.toList - if(combined.isEmpty) a.copy(classifier = None, configurations = cOpt.toList) else { - val classifierString = combined mkString "-" - val confs = cOpt.toList flatMap { c => artifactConfigurations(a, c, classifier) } - a.copy(classifier = Some(classifierString), `type` = Artifact.classifierType(classifierString), configurations = confs) - } - } - def artifactConfigurations(base: Artifact, scope: Configuration, classifier: Option[String]): Iterable[Configuration] = - classifier match { - case Some(c) => Artifact.classifierConf(c) :: Nil - case None => scope :: Nil - } - - @deprecated("Use `Util.pairID` instead", "0.12.0") - def pairID = Util.pairID - - @deprecated("Use the cacheDirectory val on streams.", "0.13.0") - def perTaskCache(key: TaskKey[_]): Setting[File] = - cacheDirectory ~= { _ / ("for_" + key.key.label) } - - @deprecated("Use `packageTaskSettings` instead", "0.12.0") - def packageTasks(key: TaskKey[File], mappingsTask: Initialize[Task[Seq[(File,String)]]]) = packageTaskSettings(key, mappingsTask) - def packageTaskSettings(key: TaskKey[File], mappingsTask: Initialize[Task[Seq[(File,String)]]]) = - inTask(key)( Seq( - key in TaskGlobal <<= packageTask, - packageConfiguration <<= packageConfigurationTask, - mappings <<= mappingsTask, - packagedArtifact := (artifact.value, key.value), - artifact <<= artifactSetting, - artifactPath <<= artifactPathSetting(artifact) - )) - def packageTask: Initialize[Task[File]] = - (packageConfiguration, streams) map { (config, s) => - Package(config, s.cacheDirectory, s.log) - config.jar - } - def packageConfigurationTask: Initialize[Task[Package.Configuration]] = - (mappings, artifactPath, packageOptions) map { (srcs, path, options) => - new Package.Configuration(srcs, path, options) - } - - def selectRunMain(classes: Seq[String]): Option[String] = - sbt.SelectMainClass(Some(SimpleReader readLine _), classes) - def selectPackageMain(classes: Seq[String]): Option[String] = - sbt.SelectMainClass(None, classes) - - def doClean(clean: Seq[File], preserve: Seq[File]): Unit = - IO.withTemporaryDirectory { temp => - val (dirs, files) = preserve.filter(_.exists).flatMap(_.***.get).partition(_.isDirectory) - val mappings = files.zipWithIndex map { case (f, i) => (f, new File(temp, i.toHexString)) } - IO.move(mappings) - IO.delete(clean) - IO.createDirectories(dirs) // recreate empty directories - IO.move(mappings.map(_.swap)) - } - def runMainTask(classpath: Initialize[Task[Classpath]], scalaRun: Initialize[Task[ScalaRun]]): Initialize[InputTask[Unit]] = - { - import DefaultParsers._ - val parser = loadForParser(discoveredMainClasses)( (s, names) => runMainParser(s, names getOrElse Nil) ) - Def.inputTask { - val (mainClass, args) = parser.parsed - toError(scalaRun.value.run(mainClass, data(classpath.value), args, streams.value.log)) - } - } - - def runTask(classpath: Initialize[Task[Classpath]], mainClassTask: Initialize[Task[Option[String]]], scalaRun: Initialize[Task[ScalaRun]]): Initialize[InputTask[Unit]] = - { - import Def.parserToInput - val parser = Def.spaceDelimited() - Def.inputTask { - val mainClass = mainClassTask.value getOrElse error("No main class detected.") - toError(scalaRun.value.run(mainClass, data(classpath.value), parser.parsed, streams.value.log)) - } - } - - def runnerTask = runner <<= runnerInit - def runnerInit: Initialize[Task[ScalaRun]] = Def.task { - val tmp = taskTemporaryDirectory.value - val si = scalaInstance.value - if(fork.value) new ForkRun(forkOptions.value) else new Run(si, trapExit.value, tmp) - } - - @deprecated("Use `docTaskSettings` instead", "0.12.0") - def docSetting(key: TaskKey[File]) = docTaskSettings(key) - def docTaskSettings(key: TaskKey[File] = doc): Seq[Setting[_]] = inTask(key)(Seq( - apiMappings ++= { if(autoAPIMappings.value) APIMappings.extract(dependencyClasspath.value, streams.value.log).toMap else Map.empty[File,URL] }, - fileInputOptions := Seq("-doc-root-content", "-diagrams-dot-path"), - key in TaskGlobal := { - val s = streams.value - val cs = compilers.value - val srcs = sources.value - val out = target.value - val sOpts = scalacOptions.value - val jOpts = javacOptions.value - val xapis = apiMappings.value - val hasScala = srcs.exists(_.name.endsWith(".scala")) - val hasJava = srcs.exists(_.name.endsWith(".java")) - val cp = data(dependencyClasspath.value).toList - val label = nameForSrc(configuration.value.name) - val fiOpts = fileInputOptions.value - val (options, runDoc) = - if(hasScala) - (sOpts ++ Opts.doc.externalAPI(xapis), // can't put the .value calls directly here until 2.10.2 - Doc.scaladoc(label, s.cacheDirectory / "scala", cs.scalac.onArgs(exported(s, "scaladoc")), fiOpts)) - else if(hasJava) - (jOpts, - Doc.javadoc(label, s.cacheDirectory / "java", cs.javac.onArgs(exported(s, "javadoc")), fiOpts)) - else - (Nil, RawCompileLike.nop) - runDoc(srcs, cp, out, options, maxErrors.value, s.log) - out - } - )) - - def mainRunTask = run <<= runTask(fullClasspath in Runtime, mainClass in run, runner in run) - def mainRunMainTask = runMain <<= runMainTask(fullClasspath in Runtime, runner in run) - - def discoverMainClasses(analysis: inc.Analysis): Seq[String] = - Discovery.applications(Tests.allDefs(analysis)) collect { case (definition, discovered) if(discovered.hasMain) => definition.name } - - def consoleProjectTask = (state, streams, initialCommands in consoleProject) map { (state, s, extra) => ConsoleProject(state, extra)(s.log); println() } - def consoleTask: Initialize[Task[Unit]] = consoleTask(fullClasspath, console) - def consoleQuickTask = consoleTask(externalDependencyClasspath, consoleQuick) - def consoleTask(classpath: TaskKey[Classpath], task: TaskKey[_]): Initialize[Task[Unit]] = - (compilers in task, classpath in task, scalacOptions in task, initialCommands in task, cleanupCommands in task, taskTemporaryDirectory in task, scalaInstance in task, streams) map { - (cs, cp, options, initCommands, cleanup, temp, si, s) => - val cpFiles = data(cp) - val fullcp = (cpFiles ++ si.jars).distinct - val loader = sbt.classpath.ClasspathUtilities.makeLoader(fullcp, si, IO.createUniqueDirectory(temp)) - val compiler = cs.scalac.onArgs(exported(s, "scala")) - (new Console(compiler))(cpFiles, options, loader, initCommands, cleanup)()(s.log).foreach(msg => error(msg)) - println() - } - - private[this] def exported(w: PrintWriter, command: String): Seq[String] => Unit = args => - w.println( (command +: args).mkString(" ") ) - private[this] def exported(s: TaskStreams, command: String): Seq[String] => Unit = args => { - val w = s.text(ExportStream) - try exported(w, command) - finally w.close() // workaround for #937 - } - - @deprecated("Use inTask(compile)(compileInputsSettings)", "0.13.0") - def compileTaskSettings: Seq[Setting[_]] = inTask(compile)(compileInputsSettings) - - def compileTask: Initialize[Task[inc.Analysis]] = Def.task { compileTaskImpl(streams.value, (compileInputs in compile).value, (compilerReporter in compile).value) } - private[this] def compileTaskImpl(s: TaskStreams, ci: Compiler.Inputs, reporter: Option[xsbti.Reporter]): inc.Analysis = - { - lazy val x = s.text(ExportStream) - def onArgs(cs: Compiler.Compilers) = cs.copy(scalac = cs.scalac.onArgs(exported(x, "scalac")), javac = cs.javac.onArgs(exported(x, "javac"))) - val i = ci.copy(compilers = onArgs(ci.compilers)) - try reporter match { - case Some(reporter) => Compiler(i, s.log, reporter) - case None => Compiler(i, s.log) - } - finally x.close() // workaround for #937 - } - def compileIncSetupTask = Def.task { - Compiler.IncSetup( - analysisMap(dependencyClasspath.value), - definesClass.value, - (skip in compile).value, - // TODO - this is kind of a bad way to grab the cache directory for streams... - streams.value.cacheDirectory / compileAnalysisFilename.value, - compilerCache.value, - incOptions.value) - } - def compileInputsSettings: Seq[Setting[_]] = - Seq(compileInputs := { - val cp = classDirectory.value +: data(dependencyClasspath.value) - Compiler.inputs(cp, sources.value, classDirectory.value, scalacOptions.value, javacOptions.value, maxErrors.value, sourcePositionMappers.value, compileOrder.value)(compilers.value, compileIncSetup.value, streams.value.log) - }, - compilerReporter := None) - - def printWarningsTask: Initialize[Task[Unit]] = - (streams, compile, maxErrors, sourcePositionMappers) map { (s, analysis, max, spms) => - val problems = analysis.infos.allInfos.values.flatMap(i => i.reportedProblems++ i.unreportedProblems) - val reporter = new LoggerReporter(max, s.log, Compiler.foldMappers(spms)) - problems foreach { p => reporter.display(p.position, p.message, p.severity) } - } - - def sbtPluginExtra(m: ModuleID, sbtV: String, scalaV: String): ModuleID = - m.extra(CustomPomParser.SbtVersionKey -> sbtV, CustomPomParser.ScalaVersionKey -> scalaV).copy(crossVersion = CrossVersion.Disabled) - - @deprecated("Use PluginDiscovery.writeDescriptor.", "0.13.2") - def writePluginsDescriptor(plugins: Set[String], dir: File): Seq[File] = - PluginDiscovery.writeDescriptor(plugins.toSeq, dir, PluginDiscovery.Paths.Plugins).toList - - def discoverSbtPluginNames: Initialize[Task[PluginDiscovery.DiscoveredNames]] = Def.task { - if(sbtPlugin.value) PluginDiscovery.discoverSourceAll(compile.value) else PluginDiscovery.emptyDiscoveredNames - } - - @deprecated("Use discoverSbtPluginNames.", "0.13.2") - def discoverPlugins: Initialize[Task[Set[String]]] = (compile, sbtPlugin, streams) map { (analysis, isPlugin, s) => if(isPlugin) discoverSbtPlugins(analysis, s.log) else Set.empty } - - @deprecated("Use PluginDiscovery.sourceModuleNames[Plugin].", "0.13.2") - def discoverSbtPlugins(analysis: inc.Analysis, log: Logger): Set[String] = - PluginDiscovery.sourceModuleNames(analysis, classOf[Plugin].getName).toSet - - def copyResourcesTask = - (classDirectory, resources, resourceDirectories, streams) map { (target, resrcs, dirs, s) => - val cacheFile = s.cacheDirectory / "copy-resources" - val mappings = (resrcs --- dirs) pair (rebase(dirs, target) | flat(target)) - s.log.debug("Copy resource mappings: " + mappings.mkString("\n\t","\n\t","")) - Sync(cacheFile)( mappings ) - mappings - } - - def runMainParser: (State, Seq[String]) => Parser[(String, Seq[String])] = - { - import DefaultParsers._ - (state, mainClasses) => Space ~> token(NotSpace examples mainClasses.toSet) ~ spaceDelimited("") - } - - def testOnlyParser: (State, Seq[String]) => Parser[(Seq[String],Seq[String])] = - { (state, tests) => - import DefaultParsers._ - val selectTests = distinctParser(tests.toSet, true) - val options = (token(Space) ~> token("--") ~> spaceDelimited("