From 7f92cc5f4c71107c3f9bc03bfe100ecfa445f7fe Mon Sep 17 00:00:00 2001 From: dmharrah Date: Fri, 26 Jun 2009 01:26:06 +0000 Subject: [PATCH] * Removed build scripts and manual dependencies * useDefaultConfigurations supersedes useMavenConfigurations and is now true by default * Moved installer-plugin to its own independent project as an sbt plugin * bumped version for 0.5 release * Updated project excludes for plugins * Specifying the explicit URL for dependency now infers the extension and type from the URL * Can load credentials from a properties file instead of adding them inline * Added help for '+' * Added method configurationPath to get the path to the directory containing dependencies downloaded for a Configuration * managedStyle = ManagedStyle.Maven by default now git-svn-id: https://simple-build-tool.googlecode.com/svn/trunk@813 d89573ee-9141-11dd-94d4-bdf5e562f29c --- LICENSE | 25 + NOTICE | 58 ++ boot/LICENSE | 25 + boot/NOTICE | 35 + boot/lib/jline-0.9.94.jar | Bin 0 -> 87325 bytes boot/licenses/LICENSE_Ivy | 258 +++++ boot/licenses/LICENSE_JLine | 33 + boot/licenses/LICENSE_Scala | 35 + boot/src/main/scala/Boot.scala | 310 ++++++ boot/src/main/scala/BootConfiguration.scala | 96 ++ boot/src/main/scala/FilteredLoader.scala | 20 + boot/src/main/scala/ProjectProperties.scala | 147 +++ boot/src/main/scala/SimpleReader.scala | 26 + boot/src/main/scala/Update.scala | 248 +++++ install/extract/src/main/scala/Main.scala | 108 +++ .../main/scala/SelfExtractingProject.scala | 79 ++ install/project/build.properties | 7 + install/project/build/InstallProject.scala | 90 ++ licenses/LICENSE_Apache | 176 ++++ licenses/LICENSE_Scala | 35 + licenses/LICENSE_ScalaCheck | 32 + licenses/LICENSE_sbt | 25 + licenses/LICENSE_specs | 23 + project/build.properties | 7 + project/build/src/CrossCompileProject.scala | 207 ++++ project/build/src/LoaderProject.scala | 129 +++ project/build/src/SbtProject.scala | 74 ++ project/build/src/ScriptedLoader.scala | 31 + project/build/src/ScriptedTests.scala | 70 ++ project/build/src/TestScriptParser.scala | 269 ++++++ src/main/resources/scalac-plugin.xml | 4 + src/main/scala/sbt/Analysis.scala | 220 +++++ src/main/scala/sbt/AnalysisCallback.scala | 93 ++ src/main/scala/sbt/Analyzer.scala | 234 +++++ src/main/scala/sbt/AutoCompilerPlugins.scala | 33 + src/main/scala/sbt/BasicProjectTypes.scala | 578 ++++++++++++ src/main/scala/sbt/BuilderProject.scala | 210 +++++ src/main/scala/sbt/ClasspathUtilities.scala | 197 ++++ src/main/scala/sbt/Compile.scala | 279 ++++++ src/main/scala/sbt/Conditional.scala | 381 ++++++++ src/main/scala/sbt/Control.scala | 73 ++ src/main/scala/sbt/Dag.scala | 30 + src/main/scala/sbt/DefaultProject.scala | 458 +++++++++ src/main/scala/sbt/DotGraph.scala | 47 + src/main/scala/sbt/Environment.scala | 345 +++++++ src/main/scala/sbt/ExitHook.scala | 43 + src/main/scala/sbt/FileTask.scala | 108 +++ src/main/scala/sbt/FileUtilities.scala | 892 ++++++++++++++++++ src/main/scala/sbt/Fork.scala | 82 ++ src/main/scala/sbt/Format.scala | 45 + src/main/scala/sbt/Hash.scala | 79 ++ src/main/scala/sbt/IntegrationTesting.scala | 85 ++ src/main/scala/sbt/LineReader.scala | 97 ++ src/main/scala/sbt/Logger.scala | 337 +++++++ src/main/scala/sbt/Main.scala | 673 +++++++++++++ src/main/scala/sbt/ManageDependencies.scala | 743 +++++++++++++++ src/main/scala/sbt/ManagedInterface.scala | 376 ++++++++ src/main/scala/sbt/ModuleUtilities.scala | 14 + src/main/scala/sbt/NameFilter.scala | 72 ++ src/main/scala/sbt/Pack.scala | 87 ++ src/main/scala/sbt/ParallelRunner.scala | 464 +++++++++ src/main/scala/sbt/Path.scala | 336 +++++++ src/main/scala/sbt/Process.scala | 89 ++ src/main/scala/sbt/Project.scala | 462 +++++++++ src/main/scala/sbt/ProjectInfo.scala | 107 +++ src/main/scala/sbt/ProjectPaths.scala | 300 ++++++ src/main/scala/sbt/ReflectUtilities.scala | 52 + src/main/scala/sbt/Resources.scala | 149 +++ src/main/scala/sbt/Run.scala | 167 ++++ src/main/scala/sbt/ScalaProject.scala | 345 +++++++ src/main/scala/sbt/ScalaVersion.scala | 47 + .../scala/sbt/SourceModificationWatch.scala | 32 + src/main/scala/sbt/TaskManager.scala | 88 ++ src/main/scala/sbt/TestFramework.scala | 221 +++++ src/main/scala/sbt/TestReportListener.scala | 316 +++++++ src/main/scala/sbt/TrapExit.scala | 238 +++++ src/main/scala/sbt/Version.scala | 65 ++ src/main/scala/sbt/WebApp.scala | 245 +++++ src/main/scala/sbt/Webstart.scala | 277 ++++++ src/main/scala/sbt/impl/CommandParser.scala | 54 ++ src/main/scala/sbt/impl/MapUtilities.scala | 59 ++ src/main/scala/sbt/impl/ProcessImpl.scala | 353 +++++++ src/main/scala/sbt/impl/RunTask.scala | 164 ++++ src/main/scala/sbt/impl/SelectMainClass.scala | 44 + .../scala/sbt/impl/TestFrameworkImpl.scala | 186 ++++ src/main/scala/sbt/impl/TestParser.scala | 47 + .../scala/sbt/impl/TestStatusReporter.scala | 74 ++ src/main/scala/sbt/wrap/Wrappers.scala | 112 +++ .../interactive/changes/TestProject2.scala | 6 + .../interactive/changes/TestProject3.scala | 7 + .../interactive/changes/TestProject4.scala | 10 + .../interactive/changes/TestProject5.scala | 12 + .../interactive/changes/TestProject6.scala | 12 + .../interactive/changes/TestProject7.scala | 12 + .../interactive/project/build.properties | 4 + .../project/build/src/Marker.scala | 13 + .../project/build/src/TestProject.scala | 6 + .../actions/interactive/test | 46 + .../run-test/project/build.properties | 3 + .../project/build/src/TestProject.scala | 7 + .../run-test/src/main/scala/Foo.scala | 27 + .../run-test/src/test/scala/FooTest.scala | 12 + .../compiler-project/run-test/test | 11 + .../project/build.properties | 2 + .../project/build/src/TestProject.scala | 26 + .../exclude-transitive/test | 32 + .../inherit-repo/changes/CorrectProject.scala | 19 + .../changes/CorrectProject2.scala | 21 + .../changes/CorrectProject3.scala | 21 + .../inherit-repo/project/build.properties | 5 + .../project/build/src/TestProject.scala | 18 + .../dependency-management/inherit-repo/test | 31 + .../inline-default/project/build.properties | 2 + .../project/build/src/TestProject.scala | 20 + .../dependency-management/inline-default/test | 42 + .../project/build.properties | 4 + .../project/build/src/UpdateTestProject.scala | 8 + .../inline-dependencies-a/test | 5 + .../changes/scala-tools-ivysettings.xml | 19 + .../ivy-settings-a/changes/scalacheck-ivy.xml | 7 + .../ivy-settings-a/ivy.xml | 4 + .../ivy-settings-a/ivysettings.xml | 3 + .../ivy-settings-a/project/build.properties | 4 + .../dependency-management/ivy-settings-a/test | 20 + .../changes/scala-tools-ivysettings.xml | 19 + .../ivy-settings-b/ivysettings.xml | 3 + .../ivy-settings-b/project/build.properties | 4 + .../project/build/src/UpdateTestProject.scala | 6 + .../dependency-management/ivy-settings-b/test | 14 + .../java.net/project/build.properties | 2 + .../project/build/src/TestProject.scala | 7 + .../dependency-management/java.net/test | 2 + .../provided/project/build.properties | 2 + .../project/build/src/TestProject.scala | 32 + .../dependency-management/provided/test | 51 + .../url/project/build.properties | 2 + .../url/project/build/src/TestProject.scala | 25 + .../dependency-management/url/test | 12 + .../java/analysis/project/build.properties | 5 + .../java/analysis/src/main/java/test/R.java | 16 + .../sbt-test-resources/java/analysis/test | 2 + .../java/basic/project/build.properties | 5 + .../java/basic/src/main/java/test/R.java | 6 + .../java/basic/src/main/scala/S.scala | 7 + .../sbt-test-resources/java/basic/test | 2 + .../java/options/project/build.properties | 5 + .../project/build/src/JavaProject.scala | 8 + .../java/options/src/main/java/test/R.java | 9 + .../java/options/src/main/scala/S.scala | 7 + .../sbt-test-resources/java/options/test | 2 + .../lazy-name/project/build.properties | 4 + .../sbt-test-resources/package/lazy-name/test | 26 + .../package/manifest/project/build.properties | 4 + .../build/src/ManifestTestProject.scala | 16 + .../src/main/scala/jartest/Main.scala | 6 + .../sbt-test-resources/package/manifest/test | 11 + .../resources/project/build.properties | 4 + .../build/src/ManifestTestProject.scala | 16 + .../src/main/resources/main_resource_test | 1 + .../src/main/scala/jartest/Main.scala | 10 + .../sbt-test-resources/package/resources/test | 36 + .../changes/LibTestProject.scala | 6 + .../Class.forName/project/build.properties | 5 + .../Class.forName/src/main/scala/Test.scala | 8 + .../project/Class.forName/test | 19 + .../project/flatten/project/build.properties | 5 + .../project/build/src/FlatProject.scala | 23 + .../project/flatten/src/JavaA.java | 4 + .../project/flatten/src/ScalaA.scala | 6 + .../project/flatten/src/a/JavaB.java | 6 + .../project/flatten/src/a/ScalaB.scala | 16 + .../project/flatten/src/a/test-resource-a | 1 + .../project/flatten/src/test-resource | 1 + .../sbt-test-resources/project/flatten/test | 25 + .../project/flatten/test-src/SimpleTest.scala | 10 + .../flatten/test-src/c/ResourcesTest.scala | 19 + .../flatten/test-src/c/test-resource-c | 1 + .../project/flatten/test-src/test-resource | 1 + .../project/lib/changes/LibTestProject.scala | 6 + .../project/lib/project/build.properties | 5 + .../project/lib/src/main/scala/Test.scala | 6 + .../sbt-test-resources/project/lib/test | 19 + .../project/multi/changes/MultiProject.scala | 6 + .../multi/changes/SingleAndTraitProject.scala | 9 + .../project/multi/changes/SingleProject.scala | 5 + .../multi/changes/SinglePublicProject.scala | 7 + .../project/multi/project/build.properties | 2 + .../sbt-test-resources/project/multi/test | 31 + .../multi-project-a/project/build.properties | 4 + .../project/build/src/TestProject.scala | 7 + .../properties/multi-project-a/test | 5 + .../multi-project-b/project/build.properties | 4 + .../project/build/src/TestProject.scala | 11 + .../properties/multi-project-b/test | 5 + .../run/daemon-exit/project/build.properties | 3 + .../daemon-exit/src/main/scala/Daemon.scala | 23 + .../sbt-test-resources/run/daemon-exit/test | 2 + .../run/daemon/project/build.properties | 3 + .../run/daemon/src/main/scala/Daemon.scala | 13 + .../sbt-test-resources/run/daemon/test | 2 + .../run/spawn-exit/project/build.properties | 3 + .../run/spawn-exit/src/main/scala/Spawn.scala | 30 + .../sbt-test-resources/run/spawn-exit/test | 2 + .../run/spawn/project/build.properties | 3 + .../run/spawn/src/main/scala/Spawn.scala | 35 + .../sbt-test-resources/run/spawn/test | 2 + .../empty-a/changes/A.scala | 6 + .../empty-a/changes/A2.scala | 6 + .../empty-a/changes/B.scala | 6 + .../empty-a/project/build.properties | 4 + .../source-dependencies/empty-a/test | 41 + .../new-cyclic/changes/A2.scala | 1 + .../new-cyclic/project/build.properties | 4 + .../new-cyclic/src/main/scala/A.scala | 1 + .../new-cyclic/src/main/scala/B.scala | 1 + .../source-dependencies/new-cyclic/test | 8 + .../remove-test-a/changes/1.scala | 6 + .../remove-test-a/project/build.properties | 4 + .../source-dependencies/remove-test-a/test | 14 + .../remove-test-b/changes/A2.scala | 6 + .../remove-test-b/changes/B3.scala | 6 + .../remove-test-b/changes/B4.scala | 5 + .../remove-test-b/changes/B5.scala | 5 + .../remove-test-b/project/build.properties | 4 + .../remove-test-b/src/main/scala/A.scala | 5 + .../remove-test-b/src/main/scala/B.scala | 5 + .../source-dependencies/remove-test-b/test | 50 + .../replace-test-a/changes/first.scala | 4 + .../replace-test-a/changes/second.scala | 4 + .../replace-test-a/project/build.properties | 2 + .../project/build/src/TestProject.scala | 9 + .../source-dependencies/replace-test-a/test | 19 + .../transitive-a/changes/A2.scala | 4 + .../transitive-a/project/build.properties | 4 + .../transitive-a/src/main/scala/A.scala | 4 + .../transitive-a/src/main/scala/B.scala | 4 + .../transitive-a/src/main/scala/C.scala | 4 + .../source-dependencies/transitive-a/test | 8 + .../transitive-b/changes/A2.scala | 4 + .../transitive-b/project/build.properties | 4 + .../transitive-b/src/main/scala/A.scala | 4 + .../transitive-b/src/main/scala/B.scala | 1 + .../transitive-b/src/main/scala/C.scala | 4 + .../source-dependencies/transitive-b/test | 8 + .../tests/Class.forName/ivy.xml | 10 + .../Class.forName/project/build.properties | 5 + .../Class.forName/src/main/scala/Test.scala | 10 + .../Class.forName/src/test/scala/Test.scala | 15 + .../tests/Class.forName/test | 23 + .../tests/extend/changes/ScalaCheck.scala | 41 + .../tests/extend/changes/TestFailure.scala | 6 + .../tests/extend/changes/TestProject2.scala | 8 + .../tests/extend/changes/TestProject3.scala | 12 + .../tests/extend/changes/TestSuccess.scala | 6 + .../tests/extend/project/build.properties | 5 + .../project/build/src/TestProject.scala | 9 + .../sbt-test-resources/tests/extend/test | 72 ++ .../tests/resources/ivy.xml | 7 + .../tests/resources/project/build.properties | 3 + .../src/main/resources/MainResource.txt | 1 + .../src/test/resources/TestResource.txt | 1 + .../resources/src/test/scala/BasicTest.scala | 11 + .../sbt-test-resources/tests/resources/test | 5 + .../scalacheck-a/changes/BasicTest.scala | 21 + .../scalacheck-a/changes/FailedTest.scala | 21 + .../scalacheck-a/project/build.properties | 4 + .../project/build/src/TestProject.scala | 7 + .../src/test/scala/DummyTest.scala | 2 + .../tests/scalacheck-a/test | 23 + .../scalatest-ignore/project/build.properties | 2 + .../scalatest-ignore/project/build/Test.scala | 7 + .../src/test/scala/IgnoreTest.scala | 11 + .../tests/scalatest-ignore/test | 5 + .../tests/specs-a/changes/BasicTest.scala | 36 + .../tests/specs-a/changes/FailTest.scala | 36 + .../tests/specs-a/project/build.properties | 4 + .../project/build/src/TestProject.scala | 7 + .../specs-a/src/test/scala/DummyTest.scala | 2 + .../sbt-test-resources/tests/specs-a/test | 23 + .../specs-nested/project/build.properties | 4 + .../project/build/src/TestProject.scala | 6 + .../src/test/scala/TestSpecification.scala | 19 + .../tests/specs-nested/test | 5 + src/test/scala/sbt/DagSpecification.scala | 56 ++ .../scala/sbt/EnvironmentSpecification.scala | 98 ++ .../sbt/FileUtilitiesSpecification.scala | 73 ++ .../scala/sbt/NameFilterSpecification.scala | 39 + src/test/scala/sbt/PathSpecification.scala | 109 +++ src/test/scala/sbt/ProcessSpecification.scala | 91 ++ src/test/scala/sbt/ReflectSpecification.scala | 181 ++++ src/test/scala/sbt/TestedProcess.scala | 55 ++ src/test/scala/sbt/VersionSpecification.scala | 61 ++ .../scala/sbt/wrap/MutableSetWrapper.scala | 89 ++ 293 files changed, 17611 insertions(+) create mode 100644 LICENSE create mode 100644 NOTICE create mode 100644 boot/LICENSE create mode 100644 boot/NOTICE create mode 100644 boot/lib/jline-0.9.94.jar create mode 100644 boot/licenses/LICENSE_Ivy create mode 100644 boot/licenses/LICENSE_JLine create mode 100644 boot/licenses/LICENSE_Scala create mode 100755 boot/src/main/scala/Boot.scala create mode 100644 boot/src/main/scala/BootConfiguration.scala create mode 100644 boot/src/main/scala/FilteredLoader.scala create mode 100644 boot/src/main/scala/ProjectProperties.scala create mode 100644 boot/src/main/scala/SimpleReader.scala create mode 100644 boot/src/main/scala/Update.scala create mode 100644 install/extract/src/main/scala/Main.scala create mode 100644 install/plugin/src/main/scala/SelfExtractingProject.scala create mode 100644 install/project/build.properties create mode 100644 install/project/build/InstallProject.scala create mode 100644 licenses/LICENSE_Apache create mode 100644 licenses/LICENSE_Scala create mode 100644 licenses/LICENSE_ScalaCheck create mode 100644 licenses/LICENSE_sbt create mode 100644 licenses/LICENSE_specs create mode 100644 project/build.properties create mode 100644 project/build/src/CrossCompileProject.scala create mode 100644 project/build/src/LoaderProject.scala create mode 100644 project/build/src/SbtProject.scala create mode 100644 project/build/src/ScriptedLoader.scala create mode 100644 project/build/src/ScriptedTests.scala create mode 100644 project/build/src/TestScriptParser.scala create mode 100644 src/main/resources/scalac-plugin.xml create mode 100644 src/main/scala/sbt/Analysis.scala create mode 100644 src/main/scala/sbt/AnalysisCallback.scala create mode 100644 src/main/scala/sbt/Analyzer.scala create mode 100644 src/main/scala/sbt/AutoCompilerPlugins.scala create mode 100644 src/main/scala/sbt/BasicProjectTypes.scala create mode 100644 src/main/scala/sbt/BuilderProject.scala create mode 100644 src/main/scala/sbt/ClasspathUtilities.scala create mode 100644 src/main/scala/sbt/Compile.scala create mode 100644 src/main/scala/sbt/Conditional.scala create mode 100644 src/main/scala/sbt/Control.scala create mode 100644 src/main/scala/sbt/Dag.scala create mode 100644 src/main/scala/sbt/DefaultProject.scala create mode 100644 src/main/scala/sbt/DotGraph.scala create mode 100644 src/main/scala/sbt/Environment.scala create mode 100644 src/main/scala/sbt/ExitHook.scala create mode 100644 src/main/scala/sbt/FileTask.scala create mode 100644 src/main/scala/sbt/FileUtilities.scala create mode 100644 src/main/scala/sbt/Fork.scala create mode 100644 src/main/scala/sbt/Format.scala create mode 100644 src/main/scala/sbt/Hash.scala create mode 100644 src/main/scala/sbt/IntegrationTesting.scala create mode 100644 src/main/scala/sbt/LineReader.scala create mode 100644 src/main/scala/sbt/Logger.scala create mode 100644 src/main/scala/sbt/Main.scala create mode 100644 src/main/scala/sbt/ManageDependencies.scala create mode 100644 src/main/scala/sbt/ManagedInterface.scala create mode 100644 src/main/scala/sbt/ModuleUtilities.scala create mode 100644 src/main/scala/sbt/NameFilter.scala create mode 100644 src/main/scala/sbt/Pack.scala create mode 100644 src/main/scala/sbt/ParallelRunner.scala create mode 100644 src/main/scala/sbt/Path.scala create mode 100644 src/main/scala/sbt/Process.scala create mode 100644 src/main/scala/sbt/Project.scala create mode 100644 src/main/scala/sbt/ProjectInfo.scala create mode 100644 src/main/scala/sbt/ProjectPaths.scala create mode 100644 src/main/scala/sbt/ReflectUtilities.scala create mode 100644 src/main/scala/sbt/Resources.scala create mode 100644 src/main/scala/sbt/Run.scala create mode 100644 src/main/scala/sbt/ScalaProject.scala create mode 100644 src/main/scala/sbt/ScalaVersion.scala create mode 100644 src/main/scala/sbt/SourceModificationWatch.scala create mode 100644 src/main/scala/sbt/TaskManager.scala create mode 100644 src/main/scala/sbt/TestFramework.scala create mode 100644 src/main/scala/sbt/TestReportListener.scala create mode 100644 src/main/scala/sbt/TrapExit.scala create mode 100644 src/main/scala/sbt/Version.scala create mode 100644 src/main/scala/sbt/WebApp.scala create mode 100644 src/main/scala/sbt/Webstart.scala create mode 100644 src/main/scala/sbt/impl/CommandParser.scala create mode 100644 src/main/scala/sbt/impl/MapUtilities.scala create mode 100644 src/main/scala/sbt/impl/ProcessImpl.scala create mode 100644 src/main/scala/sbt/impl/RunTask.scala create mode 100644 src/main/scala/sbt/impl/SelectMainClass.scala create mode 100755 src/main/scala/sbt/impl/TestFrameworkImpl.scala create mode 100644 src/main/scala/sbt/impl/TestParser.scala create mode 100644 src/main/scala/sbt/impl/TestStatusReporter.scala create mode 100644 src/main/scala/sbt/wrap/Wrappers.scala create mode 100644 src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject2.scala create mode 100644 src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject3.scala create mode 100644 src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject4.scala create mode 100644 src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject5.scala create mode 100644 src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject6.scala create mode 100644 src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject7.scala create mode 100644 src/test/resources/sbt-test-resources/actions/interactive/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/actions/interactive/project/build/src/Marker.scala create mode 100644 src/test/resources/sbt-test-resources/actions/interactive/project/build/src/TestProject.scala create mode 100644 src/test/resources/sbt-test-resources/actions/interactive/test create mode 100644 src/test/resources/sbt-test-resources/compiler-project/run-test/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/compiler-project/run-test/project/build/src/TestProject.scala create mode 100644 src/test/resources/sbt-test-resources/compiler-project/run-test/src/main/scala/Foo.scala create mode 100644 src/test/resources/sbt-test-resources/compiler-project/run-test/src/test/scala/FooTest.scala create mode 100644 src/test/resources/sbt-test-resources/compiler-project/run-test/test create mode 100644 src/test/resources/sbt-test-resources/dependency-management/exclude-transitive/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/dependency-management/exclude-transitive/project/build/src/TestProject.scala create mode 100644 src/test/resources/sbt-test-resources/dependency-management/exclude-transitive/test create mode 100644 src/test/resources/sbt-test-resources/dependency-management/inherit-repo/changes/CorrectProject.scala create mode 100644 src/test/resources/sbt-test-resources/dependency-management/inherit-repo/changes/CorrectProject2.scala create mode 100644 src/test/resources/sbt-test-resources/dependency-management/inherit-repo/changes/CorrectProject3.scala create mode 100644 src/test/resources/sbt-test-resources/dependency-management/inherit-repo/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/dependency-management/inherit-repo/project/build/src/TestProject.scala create mode 100644 src/test/resources/sbt-test-resources/dependency-management/inherit-repo/test create mode 100644 src/test/resources/sbt-test-resources/dependency-management/inline-default/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/dependency-management/inline-default/project/build/src/TestProject.scala create mode 100644 src/test/resources/sbt-test-resources/dependency-management/inline-default/test create mode 100644 src/test/resources/sbt-test-resources/dependency-management/inline-dependencies-a/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/dependency-management/inline-dependencies-a/project/build/src/UpdateTestProject.scala create mode 100644 src/test/resources/sbt-test-resources/dependency-management/inline-dependencies-a/test create mode 100644 src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/changes/scala-tools-ivysettings.xml create mode 100644 src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/changes/scalacheck-ivy.xml create mode 100644 src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/ivy.xml create mode 100644 src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/ivysettings.xml create mode 100644 src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/test create mode 100644 src/test/resources/sbt-test-resources/dependency-management/ivy-settings-b/changes/scala-tools-ivysettings.xml create mode 100644 src/test/resources/sbt-test-resources/dependency-management/ivy-settings-b/ivysettings.xml create mode 100644 src/test/resources/sbt-test-resources/dependency-management/ivy-settings-b/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/dependency-management/ivy-settings-b/project/build/src/UpdateTestProject.scala create mode 100644 src/test/resources/sbt-test-resources/dependency-management/ivy-settings-b/test create mode 100644 src/test/resources/sbt-test-resources/dependency-management/java.net/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/dependency-management/java.net/project/build/src/TestProject.scala create mode 100644 src/test/resources/sbt-test-resources/dependency-management/java.net/test create mode 100644 src/test/resources/sbt-test-resources/dependency-management/provided/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/dependency-management/provided/project/build/src/TestProject.scala create mode 100644 src/test/resources/sbt-test-resources/dependency-management/provided/test create mode 100644 src/test/resources/sbt-test-resources/dependency-management/url/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/dependency-management/url/project/build/src/TestProject.scala create mode 100644 src/test/resources/sbt-test-resources/dependency-management/url/test create mode 100644 src/test/resources/sbt-test-resources/java/analysis/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/java/analysis/src/main/java/test/R.java create mode 100644 src/test/resources/sbt-test-resources/java/analysis/test create mode 100644 src/test/resources/sbt-test-resources/java/basic/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/java/basic/src/main/java/test/R.java create mode 100644 src/test/resources/sbt-test-resources/java/basic/src/main/scala/S.scala create mode 100644 src/test/resources/sbt-test-resources/java/basic/test create mode 100644 src/test/resources/sbt-test-resources/java/options/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/java/options/project/build/src/JavaProject.scala create mode 100644 src/test/resources/sbt-test-resources/java/options/src/main/java/test/R.java create mode 100644 src/test/resources/sbt-test-resources/java/options/src/main/scala/S.scala create mode 100644 src/test/resources/sbt-test-resources/java/options/test create mode 100755 src/test/resources/sbt-test-resources/package/lazy-name/project/build.properties create mode 100755 src/test/resources/sbt-test-resources/package/lazy-name/test create mode 100644 src/test/resources/sbt-test-resources/package/manifest/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/package/manifest/project/build/src/ManifestTestProject.scala create mode 100644 src/test/resources/sbt-test-resources/package/manifest/src/main/scala/jartest/Main.scala create mode 100644 src/test/resources/sbt-test-resources/package/manifest/test create mode 100644 src/test/resources/sbt-test-resources/package/resources/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/package/resources/project/build/src/ManifestTestProject.scala create mode 100644 src/test/resources/sbt-test-resources/package/resources/src/main/resources/main_resource_test create mode 100644 src/test/resources/sbt-test-resources/package/resources/src/main/scala/jartest/Main.scala create mode 100644 src/test/resources/sbt-test-resources/package/resources/test create mode 100644 src/test/resources/sbt-test-resources/project/Class.forName/changes/LibTestProject.scala create mode 100644 src/test/resources/sbt-test-resources/project/Class.forName/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/project/Class.forName/src/main/scala/Test.scala create mode 100644 src/test/resources/sbt-test-resources/project/Class.forName/test create mode 100644 src/test/resources/sbt-test-resources/project/flatten/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/project/flatten/project/build/src/FlatProject.scala create mode 100644 src/test/resources/sbt-test-resources/project/flatten/src/JavaA.java create mode 100644 src/test/resources/sbt-test-resources/project/flatten/src/ScalaA.scala create mode 100644 src/test/resources/sbt-test-resources/project/flatten/src/a/JavaB.java create mode 100644 src/test/resources/sbt-test-resources/project/flatten/src/a/ScalaB.scala create mode 100644 src/test/resources/sbt-test-resources/project/flatten/src/a/test-resource-a create mode 100644 src/test/resources/sbt-test-resources/project/flatten/src/test-resource create mode 100644 src/test/resources/sbt-test-resources/project/flatten/test create mode 100644 src/test/resources/sbt-test-resources/project/flatten/test-src/SimpleTest.scala create mode 100644 src/test/resources/sbt-test-resources/project/flatten/test-src/c/ResourcesTest.scala create mode 100644 src/test/resources/sbt-test-resources/project/flatten/test-src/c/test-resource-c create mode 100644 src/test/resources/sbt-test-resources/project/flatten/test-src/test-resource create mode 100644 src/test/resources/sbt-test-resources/project/lib/changes/LibTestProject.scala create mode 100644 src/test/resources/sbt-test-resources/project/lib/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/project/lib/src/main/scala/Test.scala create mode 100644 src/test/resources/sbt-test-resources/project/lib/test create mode 100644 src/test/resources/sbt-test-resources/project/multi/changes/MultiProject.scala create mode 100644 src/test/resources/sbt-test-resources/project/multi/changes/SingleAndTraitProject.scala create mode 100644 src/test/resources/sbt-test-resources/project/multi/changes/SingleProject.scala create mode 100644 src/test/resources/sbt-test-resources/project/multi/changes/SinglePublicProject.scala create mode 100644 src/test/resources/sbt-test-resources/project/multi/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/project/multi/test create mode 100644 src/test/resources/sbt-test-resources/properties/multi-project-a/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/properties/multi-project-a/project/build/src/TestProject.scala create mode 100644 src/test/resources/sbt-test-resources/properties/multi-project-a/test create mode 100644 src/test/resources/sbt-test-resources/properties/multi-project-b/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/properties/multi-project-b/project/build/src/TestProject.scala create mode 100644 src/test/resources/sbt-test-resources/properties/multi-project-b/test create mode 100644 src/test/resources/sbt-test-resources/run/daemon-exit/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/run/daemon-exit/src/main/scala/Daemon.scala create mode 100644 src/test/resources/sbt-test-resources/run/daemon-exit/test create mode 100644 src/test/resources/sbt-test-resources/run/daemon/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/run/daemon/src/main/scala/Daemon.scala create mode 100644 src/test/resources/sbt-test-resources/run/daemon/test create mode 100644 src/test/resources/sbt-test-resources/run/spawn-exit/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/run/spawn-exit/src/main/scala/Spawn.scala create mode 100644 src/test/resources/sbt-test-resources/run/spawn-exit/test create mode 100644 src/test/resources/sbt-test-resources/run/spawn/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/run/spawn/src/main/scala/Spawn.scala create mode 100644 src/test/resources/sbt-test-resources/run/spawn/test create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/empty-a/changes/A.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/empty-a/changes/A2.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/empty-a/changes/B.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/empty-a/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/empty-a/test create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/new-cyclic/changes/A2.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/new-cyclic/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/new-cyclic/src/main/scala/A.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/new-cyclic/src/main/scala/B.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/new-cyclic/test create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/remove-test-a/changes/1.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/remove-test-a/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/remove-test-a/test create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/changes/A2.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/changes/B3.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/changes/B4.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/changes/B5.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/src/main/scala/A.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/src/main/scala/B.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/test create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/replace-test-a/changes/first.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/replace-test-a/changes/second.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/replace-test-a/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/replace-test-a/project/build/src/TestProject.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/replace-test-a/test create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/transitive-a/changes/A2.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/transitive-a/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/transitive-a/src/main/scala/A.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/transitive-a/src/main/scala/B.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/transitive-a/src/main/scala/C.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/transitive-a/test create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/transitive-b/changes/A2.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/transitive-b/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/transitive-b/src/main/scala/A.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/transitive-b/src/main/scala/B.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/transitive-b/src/main/scala/C.scala create mode 100644 src/test/resources/sbt-test-resources/source-dependencies/transitive-b/test create mode 100644 src/test/resources/sbt-test-resources/tests/Class.forName/ivy.xml create mode 100644 src/test/resources/sbt-test-resources/tests/Class.forName/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/tests/Class.forName/src/main/scala/Test.scala create mode 100644 src/test/resources/sbt-test-resources/tests/Class.forName/src/test/scala/Test.scala create mode 100644 src/test/resources/sbt-test-resources/tests/Class.forName/test create mode 100644 src/test/resources/sbt-test-resources/tests/extend/changes/ScalaCheck.scala create mode 100644 src/test/resources/sbt-test-resources/tests/extend/changes/TestFailure.scala create mode 100644 src/test/resources/sbt-test-resources/tests/extend/changes/TestProject2.scala create mode 100644 src/test/resources/sbt-test-resources/tests/extend/changes/TestProject3.scala create mode 100644 src/test/resources/sbt-test-resources/tests/extend/changes/TestSuccess.scala create mode 100644 src/test/resources/sbt-test-resources/tests/extend/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/tests/extend/project/build/src/TestProject.scala create mode 100644 src/test/resources/sbt-test-resources/tests/extend/test create mode 100644 src/test/resources/sbt-test-resources/tests/resources/ivy.xml create mode 100644 src/test/resources/sbt-test-resources/tests/resources/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/tests/resources/src/main/resources/MainResource.txt create mode 100644 src/test/resources/sbt-test-resources/tests/resources/src/test/resources/TestResource.txt create mode 100644 src/test/resources/sbt-test-resources/tests/resources/src/test/scala/BasicTest.scala create mode 100644 src/test/resources/sbt-test-resources/tests/resources/test create mode 100755 src/test/resources/sbt-test-resources/tests/scalacheck-a/changes/BasicTest.scala create mode 100755 src/test/resources/sbt-test-resources/tests/scalacheck-a/changes/FailedTest.scala create mode 100755 src/test/resources/sbt-test-resources/tests/scalacheck-a/project/build.properties create mode 100755 src/test/resources/sbt-test-resources/tests/scalacheck-a/project/build/src/TestProject.scala create mode 100755 src/test/resources/sbt-test-resources/tests/scalacheck-a/src/test/scala/DummyTest.scala create mode 100755 src/test/resources/sbt-test-resources/tests/scalacheck-a/test create mode 100644 src/test/resources/sbt-test-resources/tests/scalatest-ignore/project/build.properties create mode 100644 src/test/resources/sbt-test-resources/tests/scalatest-ignore/project/build/Test.scala create mode 100644 src/test/resources/sbt-test-resources/tests/scalatest-ignore/src/test/scala/IgnoreTest.scala create mode 100644 src/test/resources/sbt-test-resources/tests/scalatest-ignore/test create mode 100755 src/test/resources/sbt-test-resources/tests/specs-a/changes/BasicTest.scala create mode 100755 src/test/resources/sbt-test-resources/tests/specs-a/changes/FailTest.scala create mode 100755 src/test/resources/sbt-test-resources/tests/specs-a/project/build.properties create mode 100755 src/test/resources/sbt-test-resources/tests/specs-a/project/build/src/TestProject.scala create mode 100755 src/test/resources/sbt-test-resources/tests/specs-a/src/test/scala/DummyTest.scala create mode 100755 src/test/resources/sbt-test-resources/tests/specs-a/test create mode 100755 src/test/resources/sbt-test-resources/tests/specs-nested/project/build.properties create mode 100755 src/test/resources/sbt-test-resources/tests/specs-nested/project/build/src/TestProject.scala create mode 100755 src/test/resources/sbt-test-resources/tests/specs-nested/src/test/scala/TestSpecification.scala create mode 100755 src/test/resources/sbt-test-resources/tests/specs-nested/test create mode 100644 src/test/scala/sbt/DagSpecification.scala create mode 100644 src/test/scala/sbt/EnvironmentSpecification.scala create mode 100644 src/test/scala/sbt/FileUtilitiesSpecification.scala create mode 100644 src/test/scala/sbt/NameFilterSpecification.scala create mode 100644 src/test/scala/sbt/PathSpecification.scala create mode 100644 src/test/scala/sbt/ProcessSpecification.scala create mode 100644 src/test/scala/sbt/ReflectSpecification.scala create mode 100644 src/test/scala/sbt/TestedProcess.scala create mode 100644 src/test/scala/sbt/VersionSpecification.scala create mode 100644 src/test/scala/sbt/wrap/MutableSetWrapper.scala diff --git a/LICENSE b/LICENSE new file mode 100644 index 000000000..be586c877 --- /dev/null +++ b/LICENSE @@ -0,0 +1,25 @@ +Copyright (c) 2008, 2009 Steven Blundy, Mark Harrah, David MacIver, Mikko Peltonen +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +3. The name of the author may not be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/NOTICE b/NOTICE new file mode 100644 index 000000000..a249cda2e --- /dev/null +++ b/NOTICE @@ -0,0 +1,58 @@ +Simple Build Tool (sbt) +Copyright 2008, 2009 Steven Blundy, Mark Harrah, David MacIver, Mikko Peltonen + + +Portions based on code by Pete Kirkham in Nailgun +Copyright 2004, Martian Software, Inc +Licensed under the Apache License, Version 2.0 +(see licenses/LICENSE_Apache) + +Portions based on code from the Scala compiler +Copyright 2002-2008 EPFL, Lausanne +Licensed under BSD-style license (see licenses/LICENSE_Scala) + +Portions based on code from specs +Copyright (c) 2007-2008 Eric Torreborre +Licensed under MIT license (see licenses/LICENSE_specs) + +The following test frameworks are distributed with sbt (in +the subversion repository): + specs (see licenses/LICENSE_specs) + ScalaCheck (see licenses/LICENSE_ScalaCheck) + ScalaTest (see licenses/LICENSE_Apache) + +Jetty is distributed with sbt (in the subversion repository) and is +licensed under the Apache License, Version 2.0 (see +licenses/LICENSE_Apache). + +ScalaTest is distributed with sbt (in the subversion repository) +and requires the following notice: + + This product includes software developed by + Artima, Inc. (http://www.artima.com/). + + +Apache Ivy, licensed under the Apache License, Version 2.0 +(see licenses/LICENSE_Apache) is distributed with sbt and +requires the following notice: + +This product includes software developed by +The Apache Software Foundation (http://www.apache.org/). + +Portions of Ivy were originally developed by +Jayasoft SARL (http://www.jayasoft.fr/) +and are licensed to the Apache Software Foundation under the +"Software Grant License Agreement" + + +THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + \ No newline at end of file diff --git a/boot/LICENSE b/boot/LICENSE new file mode 100644 index 000000000..6e0b0067b --- /dev/null +++ b/boot/LICENSE @@ -0,0 +1,25 @@ +Copyright (c) 2009 Mark Harrah +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +3. The name of the author may not be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/boot/NOTICE b/boot/NOTICE new file mode 100644 index 000000000..b2378b336 --- /dev/null +++ b/boot/NOTICE @@ -0,0 +1,35 @@ +Simple Build Tool (sbt) +Copyright 2009 Mark Harrah + +Parts of the Scala library are distributed with sbt: +Copyright 2002-2008 EPFL, Lausanne +Licensed under BSD-style license (see licenses/LICENSE_Scala) + +JLine is distributed with sbt: +Copyright (c) 2002-2006, Marc Prud'hommeaux +Licensed under BSD-style license (see licenses/LICENSE_JLine) + +Apache Ivy, licensed under the Apache License, Version 2.0 +(see licenses/LICENSE_Apache) is distributed with sbt and +requires the following notice: + +This product includes software developed by +The Apache Software Foundation (http://www.apache.org/). + +Portions of Ivy were originally developed by +Jayasoft SARL (http://www.jayasoft.fr/) +and are licensed to the Apache Software Foundation under the +"Software Grant License Agreement" + + +THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/boot/lib/jline-0.9.94.jar b/boot/lib/jline-0.9.94.jar new file mode 100644 index 0000000000000000000000000000000000000000..dede3721dee71225c942ebcfa4e38250e1e893c2 GIT binary patch literal 87325 zcma&NW0YjUwk=%fvTfTox@>0IRb95NF59+k+wQV$+w8)tbMHA<>aDExC6BoM>*Fd^TsbK~!B zukVKXUx&#G%1MfeDk;;;irvdjjLS&V(a*w3(@{@NOg8`+7g%-<9cZPdMroz#gdiaf z3RM%)X?jWB+A<>mDAE8Wr!49!R9qP31Qr0A7s@!`{<}wt#ZjtvPa|yTRlyHcrzX)V z_LSU1>^9h>Q@c|e=wH(U0+PV|@3RO0o|c7`xsCCEnfgD*2(6DcAA$k_@qaIm@IS@~ z$|*?-Ih&XmJCOe4QP|eV*pc4Q%D~YvN_kodR}}RVJ9$%MBQO*SoSobcQAYwc{RNaQ z>TDRxvNI6=jwF(^Ys<$HvsGpxTs9w>G3=#4Py}Ue3Xnrpg%S}eyzBt>6W!t7UGDw)vYIb}x z;)xEWtVU9~Mz#7}0^$kP%FtZq4?d8xa1}%KApnSZgEn(jr*)Z=aBpgMn_lgG@m>yw z1hU%NEX?JJ%uu})vYrw5ZM^aNq39`Qai^4?!W{{1@QsbnF!khd(J>%HpZ+kZ*G$XK zzL~QInclHLb@4z<7OFw}!(_l#q~&xvYOz(GMcoyQcay}oS2M$=u}GCS zfhKx2V=9_um+SLQgbf9qr}XLeP-Di!b9ZC^9$p;xc$*e60UnR2!!)cB{aw42JzP3ikx2uzQ?Bq zOy*imNoW1(63J@!rS-BNT5Gxa^6XI~n}(38tJ+h(7-}P*!=x~n0Vc!!Ezlb$!mm(Y zm*i5I<50b@-TOg|UoXVBcCaysdf~f@d*e*+)VG#QnI!lTyY_oDOkPPZkC@$JyhsDK zh&@;X%VD;IH(GB&Xqgg1l;mbA;YYDO)(lM^M}%e{>I7 zYo@V75#;yUVv`qhgOH~&v%kQ!Cuiq({H%+}&eI|NtHkPJz`KefOTFjH)F-H3sRAuW zAk0{W@vck58L%3yuPQ1~v_)|Sw=MtSH<%y%(r>UQPrBvq6PETR#hb@)yXuF0%uTj7 zg2-Ej@vcJS_KfN8?a{a0b6nLP*dnTJNZzaOn1W#v9177f`gK|&Md({-04<(``r zC7)Q|NPV_eqPU2AA{~P8z<_+rHaH&==9|!J7s;MEi%v6zZzfzdA1vzi=4*|4tYFN*d+PmmxuyqL zVs9KW*p4oeueDmA$$x72S6B!R61L6V77lO6(111m)lD{fpYDNMnwo+_Zb)6^uOEt) zA~R9Q3~IuyIR1UD7UshX4#B#Jh|(ZPTU<(7x=VC^ei!8~^3vGp8ZuMss$;b5q~`UNn3`i7%CK@N z6Y<`FTo!&86~f-FAMFifNP&Mh1)XHhYdyM^q>)4WD0 zI6~3Rk`^cn)M%Te@&)xqMj8-o8HHSa!pISk3#75GQuNG;v4=)kEEbKkDveH?g|<^O zcdk%f)i`3Wbil^4G-t(Cb*Jqn^@6K}9TJnYC#r0E%}yibM)66%PM^&Z5G7a)Bw)vJyHB(H;LUJFjp0BQ0Kgv8zOvY zg}qau3Y)gZf<%*5HAY3B+i-ivS)7GY)NuObK(>#UV$-a zUio`UGNmt(C@;8$3(mWD{b2@Zdip|yYaR=ohh%PCE#=47etoJ{pxrdv^9#=w)OPAy z5ns-mfA_G=ZU6eG>D8>gVH^MXm`zDa41Ivu0|2|q$txYO2RP1-v#rNukc`M`$6meZ zc$S2aS{&<@_Q79oU~nwKs@75{@O5} za$R_R61KCP;}okPn?rrTld}>a1?kBpx&4t9fs<>l=7{Yj!Hp-u`J}`8MD+U-5~T@) z@8Rg#FwhZJuKn%pIBo1ACw;wsN8*sC&k1o~Gz^=Rest^c?bx23|w&w_n%1T-i1 z=hyI=Lo`47MoX^=HuyMuHN3nW%?pIL3*qdK#07rJI)a3?d6vDkoC1at%*{to;xO)h zgxnd5xj$TcG~x^YcKwMki+5Tf&fM`>s4GHqUcdmXz=Y}YU@#tM%QB<{PU&_Hx3Q7S zF0r5C&tL_Xr7}@VuypD&bn0FooaQ;5(i9FPlgMm;U}}YpUrY@@V09PQ1yBySt=_V2 zY3uRwh;nNO&g;s}+th?0@6UEQTUl0(ap}Di#bDj-{Ujlal90eykDZc|96$y% z!<}X(vXT-_HW5uqNd6@)3}P?52h90EoqFKRfl;0Q4s>dOJIxoD>L4X!!jJuZ^&$`s z0pL{-0|$`i3gOJqFe^xaXqtn1sA!UENDQ+?pcb2CkYOd@&+&|@oj;*CG4KmI-6A#H z5;pr~UywYs_XMfyRemwKuwK*gc{{_#3gPjbzQNN^eMWcjf9MoCx+&Q{2lRYdz2=10 zAfo6Q0=xneU(ihlU0cbzpEn=YTLZ^;e2-`!?=4XusN%KrZg?2w{{pXR}31R)qXiT zIT#o^3I4N3#MsK*+T6+5;ol)TNo89JM+Nos>XX!fNI;TM!Yo2UIF`7%s7b+xb%ve2 zD7l$Xp((=%3Wg{zBTC8prfffwrTb06v=+~MCkP@-5mSVR|FI9^3s2m%GP?sO8MVK3 z!0x*3IQ#GBHP7+IWDSFl7kDlK!n@f(AcBF+-EMezg^xt4g8d`{7>W-okzz!PeHH_X zPq?JRIry`z{W&>;dx%zx@`kE?$n}N_)4n)~$*Z61E8|FrI~ol2Rhe5%-&d}3ZPZJ= z4H{Np5T|gJqRhFaC%y5?M8*?j0On)vsj|a7QcCisr)v<}w z_`!L-Qhka{KV(cvcym$7r9F+7CfnUpU>u8@?0PL`qX@sP`fa8zEjFqLd0QsQ$!FK7 zn}IW(=?YlIxkMyUaC^SNcKHmEIL2P4wa$}q59;ME^f{GRJ;2EO>9_D?>&c+)b$6jg z;h_R+TcvO5^YMdM^H;NV+Utcv{;}B{T1XfL%jr)+N!cWHjJCJp0kg&odW|8FOR4rG zGG;g%g)w%=4`aeO7felrtnP~)2Uuuznwz(x39n%{31hYhG;;)%MJD;tsWR+_lLNYe z9hL|ii&1!G{Tms@zZ>+4m5ug82Z!)@QoZIrVZr;Hd3kt9aCo7NtZHXtHJDf&EqP}a ztX{wWQbUpl{lyNpmlVdvf&)iqGpZ}>8>|mTJ5a7&KRx1jaNzyaf|IQyO& zu4IUz&6fCOCg0SJ@(8=gslJLuKSH#H$^T{+f0$8d=I^U3mF}Ux;j(PMn^Jp@T5D__7w5L(THWy|V>ymKct*b}L zmjI&go{FY9J#@QWxXGMf_8lAvJcK~lBk~@9`1b9m=yQ)9qL7U;mY-H8M;A_LUQ6WS zbtN#@6JMY$A7u~^Rs|ukr-*KioCvQ#;8<*$!;iRV=D6;1k--2FFO{&&VVo-ic1e4- z1Fh{K))qK-3k>MVGf_#*#2`UYTxHK35JHl8RSVZ>%VeI-0 zb@r=E&>5yF#=d{`6yH$p9|-7!V*VsdX235<8-dw7@}mz`%Z0*E-^qi*UrP;n3M`ajjcKlN zT-$@RtvSXFy?I?-4t2l-?heiL4mXJ>GnUv-Q7D3w*gGPLgZ`@pmIR+|?Sh8f0Uo3y zPSfFM!ya@`E6MQMD9j|Lwxn?7Zq2!>7;{l7?y^8e!}JNWldI=4%-L>kQ{ZS&erp*Y zW5F(~7ZQ`5ZQfb)^7%hVF3EzL&)PS%W&i^LG5o(E_Wus8Q3^aZ{Q`*Ipo8NhL;`Wz zzlm9IfwhSck>iU{ot%OrE1QQE77n6^2t&5zvPxPd|$k=;<; zh(i%n>tp5g1t3ev*U>6Ac!7)65`@UUg zWVU>6-SU+n>Q?uHfaLl8!KM>J0lBG*l3Nsf9(hK5;k?s;A;N#3_`>?gy@&hnS^&Wk zRiOj~2#ESSpFsWp<7bjFcXaxvK2VajL{a<>K1T7WPU-w}g-fdE1xrTdQQ zCQr}TJ2#L*e-mqbYoZVc7`rGtc}5XwHEA7fZ2h~K=ekQkvcQv~^SFmaQhM?_WffTW zKD!N|aqCGdy9fJ7WdrVS+V4`xNP?xUXHB^sw`zgnrLB#|j{4} zmFjr${Lqjo6N#E zqq#;qKLFr!m`wV$SVP|_3o!IgaTgD6ankF><_izP$3nm2kcEBRt~F9V3q0&+a{2uy z2ey7^iuRmOVQ1(|0ihD2)T)QzjH5zN37}nAO3;5ick1oZ+$9N$5j6)&5K}Wbv$d zT(oOMjBDc(FNTV3Dx%xdmI2t^r&=ioguzMgL6^S@Q1Iah$ipXOfs7yxsW1@)3hX(P zj<=?fw?a8CwZgdE7xo7pB?uY2X@lZ(`zQ|Iy8h*r|K0pC)5B@1X2H;OG*gW880+vK zA8r3|9oxJ7hSc*;9CanWGlY!WT7>p+J4O>}lnWfKjvobUBEF@JgGXA$5i(*`HWDn8 z;4L5R_74q?n4>4g_#}kV9!3THPwAWfGhDXl8xbJCGh|%U5ai7pb3UnH#8WGC`U#zp<7Yq0RCyP^M$7o^)H-Wo4?d4s<#p-%TfwGyU8DaNcN0q1 zMILg$6BjxjS!s$>xI zXQ|GRi{rmT9g+Vu#19A{pb&TiRj|IYyt_P~i{Ub?_R=BEtBm!K ziAD3R%<>B9#E7F%mDY&sGrCqc>Q~`9T&45CP%G4RRiGi0Ti%dsMfDrC48?n~>~Tl1 zdN{ot-gnf#Aa0*Gx;mg%ap=;n`qs!)QsgmsN`zm~{l~5t z!ht*_Qd!y)f6Ij5fnbP=w!v!x)sn8GbYH)>#yTQ_s_bK8 zHM;r@z7X!HGNC-HgpksRC6qRYKI#LjQ(fcRoSjdSHe@Ygy_}2$aM;PEil*@Su!=-b zvST2tj~+DpXR1L?bQ;zL37KS=BSr4=bO2aL)pf-}MZ*!kG zPh<()JJ3(o8xr0EyGakEkC*`}&Jv^6)!r1lEI)=FO;E#^d_U9MOuu8Wj}c}suog@= zUGhVfE?_Xv>TsjJxulfa~apm zqm6@P{xfj-rC2|X8)^I}((xu8bW=31G9uL{{_*T-?mNeenhOPEn`rMAzDN$ZcW7Ix zhZ-d${8F@O&-)Nr^FMSc^?A^0H5JWUa3!(r2lEBByZbuR!4(<^sZ(hN5y6*#rYzW8 zdxV(V+AlLCBTIPQqNxz{TW6f|8Lynz04ZxTe7vJ{OHM^5tExp4Riv){Tnuz+>*vGN zkLlW4RMY!wUCARKbZLkAeAAxwpjubwi5QWk>=|ZcjioADD%vJ)F z?yY(STl4M4f10GmIwvR43V*#%9G#0&MTr}+tPGk;2D^edinVMIc-=XTqk+8c7xgO^ zutwNNFFQ8j2)=N-7J>GJaPLfCbO3Gh7`@bH6%yAAz@k3rUuF!@o_^dt_VA zUd!qUkm7M7@MUqun0dVLMxqgTM*^2Jx6u-pB8?qI)OY{Uvt8G#$p}!O^VJ(JJhz2d zMz*Eo6VS74;FSA`P+cxl8inn!CQLn06A{67R_Ukz#xXP9lqUp#9|&s5il&W8mXlFj zcdaK=98q~}W@QAPGJ7b_9t+7@Yd}jm{~bzrov;PB+uTCAXI=FS<2ZSoa%<^ubp44U z+(H|DWu!S~K0u;}o7_%dubfHKEmsb)>4-3wn~h2QeKl0&PkfP!0_D)?(6vA$+m8y zc4UHsH_R5re9EnhIAP`78h$}#8Dw>Zj;D>F)#ZV+&psEgn7{|Xd9gQ(s^;u7&h;*> zB$4hR-Qh@lqL;ZD8FLWyqv6Us)ZiNm^F;&mjwO3SXV{1T)Xy%Oy62@XsmrN>%!mm! z^b*eeMa*yub_-8{*fV_2yKB3{cjon*lbs(R`TGW~Fmfkk1?Zg@F6*(G_+*Fw%zMB1(Or*h9FG*q`XNX>-b<-5cv za+=Q;)!>j4@Fpiqv`GnDDSqR+t;FQ)gY+xpy*ur&%#C3a;Q3r>f3RX%x(7T^d|lXz zKg3rw7+TR?)W7`z8}eLhJ=y4#7dE466z!5?@YVB~z}{u;%ZnSaz|4w4@POeq8Bi4F zIpRC9Y(~~X>YX9Ych(58GK{XuL>vwr2mDd!Un1vi7Erd`Oz(`95CnDHbmusI+nw84VqP8!`TZK++7J4!U)} zK&tB^+92uS7+>`?Zes}iD?AzzX78Ro!$gIEFv?7p3E|9(I&9yn`@p9iC4N&%s~0=M z@aHa7!dnvey*}E2>$$b{E)erAeCN#qQXcx)61l1drr-ADr8wG_>P5*#+dy%d{T{i2 z?U-DtDuZ@p%6Nb2O`AKeeu;WP{pfenWpi^$y!p7I!ZFq09@V^kvMtKe9Xv~x1i8%e znH0B82w$S4hRvK@j&ZnuW&gYXQ(_vv*=4MwvgRU7rV9j>_<5gF!fD~U9OLXT9kgah zRxRq0i=wNmP3iQXY|E#j(Xz|>eG`j_G}%9nab;}*lKz^jhX&?+oaos68+9AbWtT^_ z^J&o~4p0>*m~5icDEE18yQo}Iv+=4tmn&433C855Mq;&tf7YhN#7OPJ(*C#;mSkPp zH;g^}c3V$5XW*S#uPhPGtIl@_vge9x#9Pc(m-3S9FTi*#12L`6f-xa6^` zBI8DAZ9Ac$XSd?OD0Dvr{rs6QM!1c$~c&vwKj$QZ)LY5fX3 zV^fcwQ2tJR_a7Wy2sW;{p1@z~T61R`Z&hbl-pARf% z>yLFfUW$_V6_cG5AwGop-kKXr&>p0$o}~Sr0T^7Jd1JfeQ`mZk!MzA>AKM)Sx8x=@ zt_*P9mGf(nq|irfW@{F3#^7w4b6QdT{S>z8p6Q z?+LPX3%xx*&U)(N5{#PDk{f01&|a*#K)?EY;ocD#t|9V&>s5CB+R0imr3}3rC>E}z z&tMB78wv*eDWU1fV};Vx1cOC03QVGxhGf7lmkNSYse)B}q|aAn1K$dze@f}^?lHaU`)RktH6~4W9iG_tlW@P4er;9V z{8zAchtul9l#O&_q+ItASX4(nxb)jY*HeB;)M1GuQHyT^3JjQ)Zta zPtp#nchxic-fkLlc2Y=c$e|Z~>5noX_Xy>bi}ub6&4{47zA5-LhlEFrFeP}VW@*K0 zBVIETX4Q0Ewl6)q@g~4!P|-ofY|bC9fT2GLk#Om&&Nh7^@WHCOKebhx$})kCAwwa}j4ogYiw!xiIXkJgvzAb9)s z5i5oW1~|S})M9CcYpzjI)%UF+p|Ms90MYQlJ%W|xgXjGqs==`u z%4ieUp%y}Lx`JanM6#Gqr%)U^YlRzO{Y!!LL=iuTqR; zDHn>ujt^>t#pseVXxGM2VR9wL zU1AK!j)xIL#9e;|M1C#~VYe>nrH<@sl_V(1v+A2KAS>a){6cD~+KJB9)0{cU^B0Hm zI8+P}In3}QBWBS~b~IA+vPJOVf}{}`i6$v$Sc3OTSUQMzID*#5 z6Cd7eJMZ(U(7F@x8 zjbH$tH_UY(t9ynwWc(HSS+_HiMg^@J@You>>6E1zdA`WJu|w9J!{^?lR!lx`%Ir*4 zL(7nVNmN6tM;^>t1$GOPZhrggihrbkdaj(jj1CQ~3}#dbpUnHh@S2Xe8aKkUo!8

yM&(VU1WZrF}+iFGwk=;ymBsq&^hVS+sR6qDaYSsCya_h+4~&KQkcftK-aoI zTJO%lQBCjME^dDz&xkdpb$)+~-o`4mABh+ysuS4`UWN}?G74}}lS6cwCM^w-u6bw$ zRy#ZzpRV?Ak#iZFx{+&P_EE?I;EDnox!VmT}s1CXg>Mg_QN++jRzmNc(_s*{$Pa|27iRn z3h)Uaq=s>z7u3ZTd*3nA8~k2=s7UDAM(pQh%rVPZ7@{b6wioYQlL1-d`iXb4U|YY1 zKLy~Mn!Bq)aNRSQibA!ZtIa@%w@5&$C11xT@P72hJ?}hFFt)gC{#bh6ikKfkB&CRn zhRU>ONWOTak1}Twcj)@#_;IH8UpyM|?^?yi?vH1UZz&P--Tu>~`TyXTb`G|7#tu&A z#*WfswDhBrl?$?R6VhWclC+X^%>X;UP%6@+14?C_1!~j-3=C6p<0GSFqhj=R8Z%|; z0}QGIbB7}Rlf|kZr!|A*2`RQku@EVvlMGT#fSJWdu(|ej?fnp&%9u`Hev~>A}h9MWV`$_&a{+2sZ+vqN0jn z*OPKj5=#U<NLV!X zB?JVAlOv>F+G0#s)cKPX?=PezKZjtil55q(xUS;t8LnuCOYD0hr*|5V@%7grKHNfc za_;xF&h_H!Q1zY+2@LT~Ce(}qZ&jdTMpGj`CT@TjAR@l$(Eed; zU}B&FIS*|?vP$(3B!jqmg<>#%ikiNs*6EzY=ru4m}iw zyKi_(iN+F#o4Z&)N7`Zw=7)9gh1RsxOLX8hbmnqeXoksRb6~P^1vaKl*mD`2hn}xc zlap{RZ!?{bFmHM&g=hZLE3#ky(xqgDT2rH(pHv7ik_wZeN`vnLsHScyy(4$g! z5|xv}Rf>))}y40XlXm&$hx^Wau!40NQZjwLs(ttDul=$cCNyN z1ObH#O}l|kr69!s_bP<%i5k!SzIyq)-qe!DlQ)qt$Ih~=W z*l+K8IvbIiaGWSFA=2zZyj20BkDzqL&F-~S+!Lj!)`E1;UWplR^lU}B`GNzoB)IjZ zEZ^0*b6ojxF7xKpGCW^|TTW8XKS)Y-l zrc;Jl&{`HbGwi0q4d}aqAWZhYaf83>o@Q`%Nn;r03sQCgdZP3GpVn3BYNK#A=yZED zvLYR)xUq0*x1TOlFHAl{E(a=N%}%KAs0

Y5qC;k%1U!?;t&iH#A^9N0vn>7fOhR zamJC+jNI{09sY}v2rHBHzc@NZ-5M$~F$-xR+B>*jh|71#IxBZLF8sFlTv690Of@I2 zu^yp6b9Z815(F3v_=|jWt!doQd{uA0t2+BEUorxFXkH+6)o$e9Bht01SNF9jl2AMQ zggz$+zYI>uJGJ_JZ@Jm>{SSxbe3kkzlOhqr?Pf)HO{kKBI{b-w`!g`Q9)rsP54aFV zZz4$iL{$9@;iyC`$}-4|O#(d1gQ$x6p@3cEyZYM~j8sr7@jM?HuaS?LPI3yR0n^#8 zF^J9f*35ZTP2K$&W**y79gg}nhrNS&95;jkqpN_+HVU7!}aMJXn#ct#GK zPDq6>&Bv0Xk%;5~l9Oup5KP{1`TO4ZQid8Kz!O|pzo zv3KZ>3ngH-kGZ4FnQz=5u=N~G2(Xv7-&n!Q49Amr?3+Yzo2}LuWw>XqXS`dv{3yap z%|;kXZzJaEmxAi)99TdBO>Sqmc?5*AvdF6A|H(1TY)0-;T`8bXEwTFiD=69riY!K5 zvjo?~l=p;0vIm%u;?cSTD++FtG^B%8-8S*510?Xt2#C#B(lJAD%TO#OdF4CA>vKHgj85qHWU8%up#gh z!GL|OlbxqiH4fjaqVb5a@P_f7%`YGIfOa047>Md}1lOOT7E7(ThBFe&Vg&tH2Abkv z4X`=VRaLA6Tj;Ww;s!gc@uHv%SJ0fmm_cGQUpSTU4H9`!z4{C3QILG@JN`>BV->j< z?jh9=c*8Tu;mu)n@7xDO_Pp@g2Oop!w}Y9wQNVa z+%#Snf>w~z?PjPfk}(>7j!K*{%~YLLv;cHj1NKYM90 z8CjneX4JrnZ&qw!Y zenkH|g-q@^`IGdQ#oX!V(L8s|9Hq#Zvh4PP!CPXLXyRv$0G!I8&_%ED3(#3zPPZ+= z05fi00!1Be3Dg&Fx?OOoZc(~>fbL?crU;g5gk~iYjD3@OMcp^?-wG*P5#=-HIJ*(#?g#?$rcu>La86zyJ#0x}>IglP_W44F+^VxdDGjcc_aAm zdGS|*CZ+msT6pr^{?k_eU-F_lQL@ne0M35d}o173Q08|(-LD#EcY@jPQ1g=USXm93>z$3k9F_3e zB9g_`Qo5P!%(F!id#BMeIvW6fp39$L9=f)dMH231Q4mo_1jVju)ITxlP0#`uKkvS* z`g7r#sDJ)>LL{DyJl5MRMD`D^IuH?!Sf$wEgdkrSSzm`UhJt z`fh07?cYxz{d2MZqQE?lw#)qB;NafiaxUO5F5vKD;L!)~8~&LK@ycT0Y;ZZ1gY_aG znS-59A3O2+6P-?$@}Uamk|xpJ@+S6SO~n)>plWtL=86vGS|;!c7&$D1zu$@ZVgX{{ zpOu*toyy*=j3fa-{pw$Zl?w;R>|)?z;N;+l`bGvu`X>6ob(rdYq$r>`|D)tKgUg(q z>jx0f!8eotuiIqfXlrFG|E(wtoW5;~e;$f5UOKkL6~zR_#pdAStLugs32ttl?m+>EPbQ`j0Lr%}6T>#Y?|){crx3%EvYCK4 zVG7d_AlQ~jGhu_WkY0P?6kBISC9&eqefi6vE|V^Cq~W!w72mrV(&FBja9~uj4M3J< zmYl2vHGP+$D8wBNodL$Sns+Ud9M334xrntY^(ZD;1WvN)Tre98NKn))+mwh|HaMVm z#xhQ92V=8z_js9XO-qc1eMIz}oYX03#de&p=Ge`D$@MWdKEx8dEZAy|6hcUXt1t@^ zos%s;(W}%NPT7y+`}Iow0>RhjAjE+9!f{oi6pfO$!yt&)OhWe)+DcDK&DfXN@z4k} zs?dMtzC`&6f_al^+(+EBTc+4Q((rVLLr^Dai)2x|s0aE?mkuC48+d%)_US+J$Y@gi zvmlRwWe&O^9ot7J3%J-UM)~c%ZmqF6f}mVhr8@r_%z9??v`ZQ5gQrnjr$y?4&SlR} zZW;C&&P8(=y5B-k&ya6sNl_=-L$WJb&!AAB7AdgI4pw#&**B~hv#hv~VMbZL(*=-I zqd`6Y&La4z3gCEM=rp*WprHJ5xnUq95}X{)FXohsgW^qE8npaIv87MvkGjYYO8Wy<9;$}5!H-#qT&q- zwz_2|=Ig$*JX@0~b4t}sUaBasxvTPk6_`vQwuQLeRiocfj#ehCC)QQ54RQab+b^(g zde7nh&u_?$D56u{qL_mFMAD-?Abr80$y{y|U;CNytQ0?C>p`1WsXEO6aL9o- zCMi8?JPm*}osL=0-u+)P*Lnxp=*(+DkOWBS?JOl(1vpLYYWJg zh2Y0CBLOTyzZYd^oI8parXzz7g6S44L7x|?7wY}&4bz0yj$v9%K#VCr@YWwM;*FN! z4=2D7#Xt#T4m1KILVpQU4t!3Z7r7Vu{ptvEMFSW` zsx{{r?Jm)gfm^=R8Df~{XJ1o9w_HT-`k{#((h9CyRZrpBv zF8>;%ss8y1mK&QR%K2L{u|~1j17$bWspvGLb;)e zqH}(hm-Ec0u@YfP5X$t!1&{e?gCpj3>g9Wd6<<@4_KYMqax8ZkOd2HW#*0h(Dm@?0P_ z0R0Zz!=so!27}i<*+I$bechEq^a3#7-;BZY4?)56vjxfT&*Qy^8u^g^`~xMYd$7ZT z>t({saMZx+(>NSs|Eno5d{s{3ut*NGcRVw+ATyjx&WAQfx(S+MeE%Nu)r*?CtTaDg zbDvF4cN{~ZE|$<4UiUhXN$gq_zIz2~KPrU`zIy=*B{Ibr-X{~~E-K|v8TWdJf^*jn z)PZ<%O`r)4` z9^Xy~D@R;3B%x+kusjIhmg-u3)8vJRM#1yq0r7JPN`+Mriy`l}r1%Ezoz*|LYwjC!hQkC%F?Bp9Z&H8wn=v^6zcIcUO)3Rps4+x;k&!()_?#xg*;mqih?!d{baZmD;9okDR($u`Vk-qijWRh)#J! zYRFpoJuIA_`cr1;Qh7tL!+`>g=hPWv@hX2!&+OVosV>Cmloet-0|euyqwEr&`blwC zmiIF6@nP=VCu&=o0iv{3e*YQe2Dtx~5DkIrb>O{5nRo9X=58e3faYgvKf$p)=SzZ| zSDN_HHtsuB)X&SEY0h1fPg=BT^LKY-VXiS4_)p?p-!NU>;x(GV0yVZzY(d@S7n}z# zmY|-Y?c)6TzHLdruJ=vUPuI^F)K7fh4DL?(COL|)-@Xzx1Ys=jc-$g5cTBtQ6qnN! z*k21{KlgK#LVS^`eU)y5623ydQuOrzUp1vK>EPb7wYDdZTIJs2L(hW)(opLNg8>9( zFJby>O6SuHuNdVYQnlL>?g&Wbwu(Mr`T_dNc8SnLr3$5jL-fQ@;*bzc7NwoUX{;~U z{#<2jU?r&ByMLjVDZvS2M9Rs*0bi{BadJTs?eGqveR3}?qq)s8xWvI`zbK=o=LQ); z+GqDdt&D<3?L&9vUb3g>2O$^WnE)sStW26HhOA7gDEq8Tx+o#$#A+xgth=)!+2nU@ zin(DzvN8*UyXmQgVPom3`C(=0sl{QZ>6}6+;ud?9vZ&u?5V?s(zB)<+t9>JR^sGo2 zxrt@IGKz6JX}hKONGdL~5yhsAh=z4k8Q|>2k`1jXFS4 z^=Arjg+600>kK}$=lVcIS5+3j->|Yl>;e*OqjMluE4BPx^`o5CV>5?*B@D#J;*p)e84+_*kQP(jm`ZEpKK2GkQ+_BRvP zW{##e#5n#QFAWt*Yhh92zq^fRmaH$NA#x-n)Fgoj7@UAEWJ;V_THHPv^t5WK+jA{MY78cx--k`v9E&px> zY3W=vB(s$|xG`JfSXFmGMb9Qj-B#ev%Y&GaSVhbd`?7Q)1eQ_Srqq5$?a${Ht-%*i z6q+?2?txL>oEA77(Oec|Bn^Rq7Qj@2W1WY>Lwg zLM_d)mfXapjikZTIVbq?@mSOlA%iR1$TDc^V44wb*qVRPCL3UESfo!bE6+(-!blb_ zQ5tITMpbPom{n$4T2WPRZ6Cn>g)}56CPcXO6y7(}J6mcA(W)i*6E?oSpiIC+6~NS1 zG|YJ2ej~E2spsjIKP!^(vSz=Eg1 -o+ALO7?mx!OF3QAyE8xC_VXp@rcY;N!vJ zCx!V++Gcj8=Sewprgd`9E=kW^7?eajpfyQ_8TkT=t)OG_w*IlJ(Hhbw65H^8Af^{G z*0Qk?=#SX*#ajL1d!Am461AK!1$g`dRp|slJVQl!#Qk|2`}!+sQj_6M$UL#bVEeiq zUD;$VLI47kZKf{yL8h^$-=@H7GqNiK1+v+Fz$H@X$<(I|HyKuy@Ix2XY>h=E!`!kQ z4xHPouXbBps5sVQA0*v-*F$Xk20v6LwOz)F^&zMcRR;SS_c@tl0xD zuMIgJ1TIIGw5hdTqC{PqhlnFjQhJ304JrfA5X~`$1OUj`-=QotYj92=I)9hlMTa6a z)d$h?`aD7}(m@Fe%VYVq9aunL2{uJ_5}B=dkz`+4$e(1afMm6$Uhnt z@Mbs3T+@l9ksCY%Qt*A)#J9&u#8&-C)AG-4o3|^;E_jjTtLEXQl_)aYb9$T^?HyZH zoBT2sRQu<3f80Tom2k)P0V9{(^qjbiHYp~=(THXMG!UwVtO_1|Y zuL}x0YCu%2a@53NKy84zSR?!O7*fQXV1|0CfECS~5@WhH9E8RHA?%%kGzp_@?Xqp# zwr$(CZFiTxvTfUDmu=g&U0q#&P0XA)XC@}%zsxsp@?vG=-aGetmUaHg_1Rqu=W>z! z95Y9Kx=3pyVv*EFB5slXPZviP-zsdjE@Lk|Hy7T2-IZ#_X4+HV;D27w1EWY0c|z)I z>yp!^P-xQFAc>NU_t5%IvytUTc^FP1=sX)ig7;L*%49oV*}%Ts;|@@}tYXT9Y7ujS z?KmC>XvpNqCVY`PBG?gMORgMiNJZ66cx1C!cm3yju;7%DS}gi92pR0yM9o~)X2v!ZBseNc~Y;*DZpUu9?%paxVYo^kkOe3 zrqrzyL)P{VnNMAYoY+iAouVK-hiDWyiz)!|KGwZNbOWS}WjcQ?Y{8Pt#+{cr0#i;AlzKvW=&s z3JV)bR_V!GI2hiiK3MZ2Ev?DRMRGUG<1kUmq?k+UcQ_|>M>AyE^Dof5$3c7si}r{i zdwDqYN@9rcwsY@Oc`oIWah=D@(#G-gz+F?CG5^ugyCr6Jj{BNR1#s{$t)Db$p@-oV z#q{x5J3_ll3}Bem3aXvhKDWKYBaaaMj;)eUODfwbJH%YQ6*a<JX!q~JYrc48{$M3{4E^a_*ZUWT18PrH{nnb^`dSUCN)~8NR*c* z{L`LUT;T4k2A^}%!}tR39%!^K;i~Qp!r7%!i^JGn)!)HMWiw&UoRRdr8kUv?Z${2h z3^QWf$d>yQQB4(GVdd=r{i>$#Mp?HPZgkEBC_4*Xw}u4$a0FuK`l2Y;2H312ipwix zjq<3+p?3u5Kn4b}7*x#pL`I4u%Lc*A1C+p)&2@Yp-?Z)WCdl%A_?Oa|o05$$wQhxB zMm0YnkiW##w~9|6v>4e9Rc;=Xn-xv^PcTb&xaeW^NNFk!5fa_vNrn>oXnK+D**LXE zrz~Ud+j!v&wP;-Zlvk=b{?d!zLcdQlXJ zh8;wvuDH|Q_@Wkiby3qG>t4ZY7f75J2NZ!a-TGTsNW5;&QSIy|Cm$h)=5HoL51sAu zf1&k##p>Zr$AL&b7f#)TMYhE41aT3|6mtBh94wsPzE_JtYc6JaJ`tJkJ)CP8Zj4Tg zzMUEK_!KR_$&#gSj*~3pGOLdl8Hv=RxXFbtuEr;#`w06dK%wxx+)K`Kx86}% zZ`ed14=pZrJ7VBg{kZH;fj<709Gor07V%1d6hCg?2u9zf4ojqz2_rey(B8^biXe-C82zUpGo0|tKYud_QT-;ZrX{4sn*_G4kYZ(~C z)GZ&&UgDl;EG@6niH%mu@~znF==+Zkd;l`Jm6d2_FhQjZvVZ*^9^pF@1rF7@yJ1QEWUB zi7d97a*g!`A6nTZ-92EtQ^X@#7$uw)g`Ks2#{Pr@G0T}DxQrP46#5(k@KDGOrrC|< zoz)Zr-&zybSM29>^c90Gwk7Q9Elc2GX^b^QC~)t@D`yAt_ZSg}o5d9VO79) z24!22A*WSF;<@L$E-_O`6f_$OId6zgF@tA&MD{AKL!1z;r_Jomwii#cR_>>QhWK+y zof2it!$3;y>8+pBJz)*6L_uP+Rb&a3(GL}ZFx(&tr$LpU3j--)-{%+WSOnN%UkH1 zZPC@CWm3GdV4vS%QMqGGP@5oF)AM4{VJ!gze{4~LsIF1jo8ZjD!@2*w`kiTPU>8+R zGRoq(JzKh^lFP=z6_ zBo3A&~7;~^X1xNZ($>eZr zm(RK4CJ@W!WAjp3IT~th!s)MqQtY6_H?zwioH!9%X8+TcphT|HtW*RX@u;f7AGON> z=izE5A5Tktfi=4(Xj1QqZ_t!DRMc%br$UvK__0q>@@02)ju`2dynE3#ap-r&d-5KK z%JAO#GP>jVfCY~cd&S-jLUEF&t=1xMHfe$ag#{&Y{1B_hTsqddhYtne&W&g^?-J{1 zfCTbz0A$d%)PlS|+D>U{yKe&&YeFU&XiQ0b)k9q(=Rb&DiPM*Tg!b>G2!b1n)j+ zOrrY(cii-7nS?jny1}#!S$8UZ{2@l>+lAJjt1wCZVqfUz&=M`(iBw~6ydHW8@H5`J z{#;+8b0DtN4qu_kUBq#a)T;!p)J2)!rrx-4L4pm34IB$tg9=8JGTFh}pt`_vE8TYF zFBZu>!}Fu#*h%JDqpm>NlP@ANh-Z*SeT|^;k&dAH*0oOtPR|4R1Xx?3*!=O>f{*%4 zz%LUS?x0D`tdH}UHc;3TW z((XT;ng?neLkrt*&dth z>!!wuW?UjgZk0*{%*runw4n#P1j<^~RlaDD?vA9=c(x8NN!p2)+&sSRB-b*k?qc{L zkREf}wCHdZx)2oXv;j;dDl-waMAwp!d$PCNpl!oBwT)8tt9caP<#%=?=l!5Dmd1U;K=P)Ied^8^qQ&SFYYeZ%D``(E_ zE=bmhLJwhdAf+CuR^q%JLu24^4unw%w;M{>3!1qXBx?`89eGF0?{=Tgad7*N;VP_m zaPW?p4}!3t^$z;w1Fpbu;PVdM55ds%8utyO$CUbx9~VThVg8PFqsVYR7q4(jV4UeyH5 zm!EgISpXGTBr-oy?y8lIO1ecl%sv8FH zCL5jVm|NoH9aHj^gBD&K(Jp|9I}n{OD#x@iGw?5^4gOhQr?E6&VMQxAcQrV7v8)L* z6=$RCXZ?6gDrK(xaC@QlSA7H?LBlzpM*s-N@l`ZwEgf+dgArzCFl9zC{-640+|*&j zx>k5ktwF{M%Qxry!8CHb0Ol?-WL$~Srv50jx~nkwqumJjG@atLbtazN_*5`Epwm+`ZI!l21`w;pjtD{jD;2omKNjbG)G|dtEcw5E5KnC|2n@D2s)*OWEla# z7DUaDk{8|00|HJY6nkQ3;C!~B!{oCVK2bZk{&iU_QJx~uVO@^E?SSi{j{Bzh=sG^P z6y3M8z*xgW!9x-iWeJ|PvmbXh5Z0NILFB6g>-wO?>WiTb&z~RPsYt4M*XHafpZYTa|4@KXjT#>Gu5sC{H+ro;h|4Q z5_j+9Z!-<3k`SUNk6kY|#opl@g4FEjX@%c8M#$1z$XYzFYTqx!+cQSMSI?n74M^-6rp1f0}@- z;XG#i{S~ou-s%Ip@R6%6xZ}sbmgm}V?f%yn;ah;!*z|tZereql-~v8}r%2N5R%>v^DZj{oJAOu&_5Q|A++?b`{PD^`tL{tt1fOaFJy@hauX5Lh3LoXw-?E95kw}O zL`pIAi}|1a!tSKuwEiM@U}>xaOe)m?3G_zB@J754=}dz%!zaOfM6S z196tHZ%0D(0l}xubAF&V4B`Bb|aqh(GW1t_X z=Y7*VQ2@j25#}SYA1dKU-GM^@{_F|oLA$;mSZ`L`(Y7C4Z(Q6l;=|yZDPNjk|Kpo2 zUtaGx<%8n;{$E2snD0!*`?!@@l}eB+zLO?*TKN5c0Ix5*aX#8xJh64GsrDSJS9T7wRvc~pCJ4N?F~m~2X$ zdp`AWE;8GhS zg-lmmveRPtH3S@(vC{2#Za|sflY^aBaI5TLSwBz!1l+0R09HUO`U;#7B_T&}2FRo~ zdip3P{78fMQMqY&7ruUv@D>JZfeHY{cU_Lt2PoybRA`aEMl1JLGE1$C;TAy4wAZoV zj;_(z5cKpiuzcY^V?LnYBsX3g@zJ|Krm~kMbetBPWC+aD3juL|Nfc#>DEWYES0J7t z-2fx0tGrKU_KD7n)^(Oek#wN2=#SaXIVdDLTbDpw=_`Bmr9)~Kg9A*?pmp3PO!JLe ze9vHZnrBSbi~8`!khR*z`oO`nH#CIObysd$>gBq)2Q zHd@z9VZ40zHJ%J;4+QPl?}s#^ZG4j0y3eZY5~5duF)IrSGv2h|+G+VCWt<$;;jP^- zT3-X{q2yAG>!7VsOb}8?Ck_FkebO^`iC(MQ(Dy~9a-}p;i#1m(is&iK0w`!hr`Cp1 zD7|`8N)x2!tvY6qtT#>*>}tgwbaGV|Rf{ZR;-CS5aKTs{xPm1Q1hDy=*Xs(r3f1}H z{N+cKAU5>r54>3$e-(x4#TNO!8jSh~gKv z#r>)=$1l){w=SlYAp+7B!wcqC2U>AF3ppOCP&_+04inDK^h>@RT?kIA zriGe~5poxes&MW@8&@p_PVFxz> z(+h1BS+Z){%gbVk9lfi`G1bIYcuT&CkDK4AE4|i?iksIceLXp+fQ?#y+i5g01|}Co zuvNMZ(i27uWrG6}4rQE4jmtP43n(l?Z4A?zNxpax&rc(=KT%XQAv2;Bm>5^xzE&!j z`|5W^EGv^#nd+jkQ#xPma4E}e+scZK#P?y$?VHGy^jX%dplM#-UL$7u-lBbgPh$U*1%BR8JC{l~FK~#D^Eof>Jx72Wz}q zy7atER9jZ%6ny${8Lh-FKO7aiHN{VRC{IEBn)q^jdUPGug|5Y+FWeQ=FQu|tp!ryc zN)+jw+YM(fxK$+NX1%D)CM4HyE+4Aq490LNg+(441Z@K0AjzB@uB}8K8;B|n%@)Bu zM-dzPJDoC(7ZXz@G^`zRU6d+UGM~6m4Ig^e`h>z`-m#%-Fob%?4q!N`tAK~D9tlrmqWIi-zKs74XZ%xuq>1A)m12ak+ z>P*<7u!6aRn=PzzOrcrG>W@!>r^adlqI?$VEZKS%F}mcfW@c%X{+`n|uReQ&80z5a zChDMB&kq)>(J?fh%{;4f|2Odu(SRzx%dyCZN^~0}iqCgHU=?)x&PeTpA*jOl< zu1h&LY%L+Mg>W;8$RvS0m(M7U5tU(D$RJkt`@o^)6@xLenWS5hg*#W(&@EHFaK4~r zK%`FOrj`;1pI>29yC2_iWX|IvKe}*P8O16uTi&CYj_pD$qQx=@QTh*~SRXYubXHnJ z78e-PO5s|Eh~!Cnr^{GqZvAd}i7A)dq(=n+W>}1i=vVnO`5nQmw7O>H{9`N_iQGY~ zy%PSIskk&_E6+WP6U%H5;vFoY9VqD98THt9JYVt8PRJd9YM-Xgw1ez<3+*-0^(t+-ld~?>icaqycHp4@K*}bLh&C`- z^70%s&6?kUVqvFZS_sM#aP7=YD!BUX%#D44fR&K82oE_eQ7zDe{}hvLk9Nks8yP#! z@%IW<_Gp~xz)abj-&;!{!dOKOBuWWFxI0YFhY){xlu64Eq~ky}okk$oboi7B@v`MW zmP^ZaI8M(mdhbyb()@(IU+D{B}B*$n;q2er3@v{(UCMucWF)2BV#1@JcumH&< ztKsT5q0T)Aim%eS&hunmS<0~zTCyegbBRI}Q)O^e`w&)JKLx|6zPDy&+C|~eJepnc z!w?S=rgd;}zSHwr8Vxq-mzO6zBP}=|AchNz&+~ zpP;rG1%g&RzGi8_%N$rv38P=loEonJ&yqKsz#Q2-*&C^3!_w;|ia$JHhJktf&k7kQ z%3#*PHhdk4E>#a+0Fuh1JdE!?yQ08DWU6x)9zx>u+rsdvfTNI9RUTiuuz54$UKbEp(zsHJEe&0G z^{qu~Xs+{R?124L5;G+Jhe1B3YqryjMra)<(gkP68R2XKJib&f`m!6XFF8wD{T-U@T`F(8q+g29**9!G_Yf@>Mo4|_q z?Yk3#%J#1P8ZV69D&MV!; zX%&iyExY(^8-*QEuZ^k644Gzf$D^~UB$9x~!>ew{*5FjlZSYaE-7*c+EsD}AY#R8C z^t6HrW^8b5tZ8nIxyYTJ^1U~;5T#3!=qrxO@KbLcLty$PoxJY80ap;so8f1Cp@5yR zOJAU^Hvt)MEcJuBa+J8j2tT+|D#!vYZ@d#K#XgZ^X{|~=aN6Xu0_6RGV{omaZY=4? z{RtB6M(8un2v!h93so|?P->J5{4Z zALE_+JzH{I80(@c%2)e07}0p%vd-1)?1byIo%=cD_n5N)5x>xZ2lkDWTP5gULV@ql zp@){uH@zEI<_W_+B%-hL3P(+OpY^wtv~B_uFvWM-uV4<=1MwR2AKQE5;=N03$P7%# z2nn7f5@Zfrv1C8<1>uOUQH`AZ3hF&I)&3HM5V&RYte@4vh=3M~&Yh=O*CON_d5UM& zZQVlZ(i5vs58Z=t7gJBVf|5L}yb9&8exhY~(IvYv6-l8$9u$g7(|j>}Sl1{&Z_T@#!SCwmaHN`m4lAZwbC@e4BC z)l!PRjRL`9k@Gm1l3z5X#OR9&kwx*5B*lequNC=u=Of2+6r)Pazi+mV^zufLjDueb z3c$=QIN;PLox@K0_H9CA0OS5e)*r|9(t}y0wlG~$^!V~KI~#F(WR|;Tw&Mm3>JM)^7^MpM|=vaN#~kl7R7?IM;}HRu!@5y*q?~++K0Z*^G!G* zWb9y|OH4L_{3bYbqPkb&z2zJ^7dUvrkeviN$1HlKzTgBPTe`*t>E;aCjRY3U+s&XO zSeu^#5HhQWfLHW|PZ$cv4%+&&&`S^Cg5~BH$MidN1JdwFcp`y-=KoCg}wRL6N=CoQEXo zWd^|P7b*2xe_(KyY>h&F=(CFrAgh)yor5_BtuNr6k~+~_D>jYGd|0nk977$RIWE<* z;X^GDERoct^Jw!ST3LqP^Fvy)*pbt)uWcXvNR51qX`eRE})@`KP#xZ<`ENzXe%7I@> zU=2&Sejtm(d40DG9U>G->mMNB_)UPkh%hGZ7w`mj^=ZfKA7GkLs*EV;jlCc$uXAF(a2=3$(s?r&PXyo0eadBn0b8s-Ql`B*d^E#Acb&V`m{DW}~Q$Y{0}gU$*e20vcJ?Kxt)|Yc~_1M5)IGIp@I7 z!rTBo#x~QCLArrNq0_@RFKGAs!7=A7!Gd`&hTMEI?+N~JVT*Nr!WlWO=3j$&p_7dc;!*fB&42pBZ|@s|E?Q1*?#qq4G+l<^S3c1|>Tq<)u;&5~vS` zkeTc9P|5`RV-Hy(JE!XrFB`Pe91I~nhwCwC1nskpFcF`>dX+B+?X!%4Ln2(aCM-&R zX3k3`8)VQJ43M6S`qgLE2FC9QX)ZhG>Y)@1#%~J=pO$ZVm5+?_)Qr!j5|rB*EFnF& zdzC+r@>Gn^CJ~fdA3P#6cjz(41TdSs%gt3WLgW8%vxIdbF|a{_T^nd=KxW zz%hH)wIFf!H;n=O@)j&RA6&~{KdrvDK9L^Si#JiFS;HXlzouGKze31G!b(iQ=ungP zxs(wmhgG-?ssX4&=T3#~AUH!cd+5HzK0wrHxbI-^NkSw13t7is|HCi!g-GQcHzaj=872l>uChqs62`hzixp7It9&;ke*hzk~Xl``ri|rt(18dN{cK z?m!es&{MW8jP&n1$UY}?(57@&C+IYAi|=);zyoXCvA|+9$ynPQs}lcnVTHdLVnV*Q zVUrD)i8;3IGPkt|jf5~Ya-R#lKeNz8J()(4(`M*l<4#}kXSn0W4n*TfelspA3xC9m zQL{>s1`$FCCQx*WU-%(wMy||>+2oH{!&vcp%#7p4X(||bS{=3{ssaMN8kIEqjP3KPgezpF!2kKa)Wb#|I_J3dKPPOkosJkw=LA zWODjj$P6SCTq0^4$*O?<)yXA!ePRB06Nifkq>9duUL}6?%KN{YIQ|#C%KapB{XfV5 zN3*hyC_l(-1~|ElSo)X1<&CfwophSF%D11nwUtQFQZhtK<{Gu0@t9jFHpI1|v-LqE zE^7Z!s4nWS8=} zKlZs4d5uwa{8jLSz-C+)q;!Th{n4b3!drJgO^3<{f8dxZ5Y*$=ua%jO*3~KX!M^*< zr6_I#Q39y&M4_KSiufkT2XB|^d1vx4Z0u@^VHCLuk0D zWo}z64S-Fa2Z|e`{vrLXMd8QQe^Kpy#hnzRvpGuJTn6Iwnuj~PNnjFQ-;JFg6>b)5 z$CTgI^Zs26wg7sV3wmBHMClPXYE`uK?^g~jFhK(%iHCSPG;uAKV`=tdX|4wx+cFoQ zMyUjTQSGb;sCVy6*bs7`aIJZQLOV7NX4{sqH%fO0KyR>uN77s_Q6S9p2GLU4Qk4mU zE|=`~5;Dn*GB}Ni#nTYAPCmixmsB)3sdiq$A4xhFDd8jD5xR$a0XK_68bHZ2#21z3 z2iE&zTi zbXXKZ;?K%59W5ZY2DP^Kaa=^Rfk}qcxkmw~Rslm1PI)-%q;)#3j$(uofc26RClmM` zCo<)*#a2=?5b5s!sBiD9~sjuFeD3~68b72MkD!(A9t!lp@usb^4 zt60@bF}P%(S+wLlYVOr*6up~%=CB+WeCvia-JaNM?&*r@h**@m!ZxCP7L?b;zds(a zHw5{%Z9L~Tdp*x?^0@{&RTN-1Mw+3x>KTj8BLAJtIyL4{_w%0+ac7;9Ks33KHp;HC9=MfC2 zkF-l6B7^_t1l!c}>=+@p0SHTH03O%h6xvUQFW4r^xZnc5&~-6*SBi;pmL-Q~7)<5%#@xJyJYxsN~QVnb4Vv zKU34dHy4WeR~Wy`i0<llU>XLWMk8Hj_;iK$S81dU--yWQHnQxIE8U*;q1#xt13WMF6 zcD#m3pF^7Y#i#6!?rbq00%IdKYO8z@V-sr3^`qdwK`|1Vm&QrIarxfGhbU5^jZ$i_ z4#Eh8Do%2d?z2|EG_epEpGcDX#byZB?ZLj0u=-QWDFkSbn6i|b`H7Ecn)}K3uinE$ z@1n)S#_BWSd8!a4;;m9Xi7B*=paj>N>X`b1>wMT%on55z7N5!hgxGQxqlRDZv#qhO z{VYzSS;;3^@B&+8(Vm!b$HO2g={arNEp&{6Z@50px<;30^salb1Lp)5S++qz0<})# zh3aQ#;%k2oKWUiHET#+lI*U)$3TLPvh|*W(WsUsp zyv+UV==L@+!8qQ-$bUW~h_f4*aFgAes|CEKW`#9@FfK_`U9GI%TT|GeKDm5};r!BK zf$XwNETOCe|Gdr%+VMqaoqnGGHX%Y=dh=amhUj@{yRrU0g?5yoBqqogfbx2;IQ-xHZ<#l)e zMjg^pDy@JD4!*g#%GTmi&4IO_q0mspx6Mq&TOi>jqW613B;g@8dfmu1d)P|j25+{)=lDJ z(pyTT;UOnVK-Gcg8-G=KT}vct?rFf_&^wUI25N zs>ojAii6v?Frm{NxJv{1c{W6q4X4-n&d!_L`toa@8QZ0i4S}@GSts^7NHzhke0bN- zTudpdiS_CNdel`r#F!0n*0jYZ=*=msb)AY=TJMMW_R;1j?ZE{+83D%&n}P(UX|5Wn z5w<@{V;R;t>?+>AXrcC>osw`hLJHW}^rk#$&JHTB_KGf{v(C`pYWI8=8b*ygwwoA> zZN+)T%A5C+@gg4z*y6v4wvaCE*xFavaS%mY7U(>%5o!LM&1PS_n=aMq*_YDgxXiDi z=;Y;0A``|Tt?cT|${)5!tvvyL{qomUz|bdE1S&I9VBAvWhu;dg=Wat{p5E-{H+DMv z%Aq|(YCq>ebyY@PUaz;BaQ{OF{R<>M`)Z@bc=3MWAU~w0#zjLiR#G(*(8ZKM(qOh7 z{lG;wk%d&{7Iv6pr|HFsVYfy{eeH&{ zHyl$LQK}_sr{h&~ci63YHR|F_Z3k2-xlls`6rik1tv<(RV;TUML!uR=2Y9Qaofb3{BRSh37y9}#;eWN;s?Q&TvHj2N zTSo!cR8{XAx><nGX?&F zZ;B_}J@v2)xZ8uxxeo#r=Ntu=kFhW!i@9$=HN3z>ARA~RV$_G|bPG38Vy#KDf2Q%&z&W1*C-VuB9FivJZZ9&*718*J zrmk#5j=Yw{6K6g++pgi9`&&!j|M20xp}oM6Uzidi0W|F$(YiC@!A)M~oX}zp@CGCH z6+g>F4YXbUnjYS@S2%Qs>OnKUDuK0q=GD1_xfDnZuUm943dZVnaO_oo0UA}DK{3q^`TA%zYhsRB&ZMn#n;Leayq)Kk;|^|xn7mBFNM6waZhGVsk6>k( z^^o7fbymZ`4aktE!$n^F^O@rhqF$i)p?4!RV(wIb-!(%k6xjap zas>Og@Z6T^{L!LEymN_96Ya)(k<}eR26Wt^3=0wq7vPlunf=3h43xJ7z9kch$B091 z2c-rvt`9n)HDCoBKQ8UmSU)B=AW`5oA5ZEdB) z1g5740zWi_jAHpI#o%&%<|nK4K!9{a=`DGF~Fvg)b2on!DHBraJ;2KdxU!X>02 z{v1vq4Or(5`yV5DA`5y)=popePFg1Ihs>L=LBh(~pG*@DEA!5!O70l?(bu)rsC%PX zTiB`O7hJSFMk;1yhN68A&tQIW`G>^fWzVd@2Url)7Qp41Rf~iTir|Mq-;hS&ayZVH z>KV0dd7=7FTnlJk3A{Z_G-Ss zkiVT2U!Q2iq*n49K^98fUHWQkB9*M3h`|ALYlZ!wD5}(JqP^Pdwoad=-R!+1*j56` z>~wh-d|H^>1`S4>HDI58b4dYRlZ!$D;0K;VZi_E_frr;V-PsqaEO~I<9;I=gbFdi9 zyYD3XA?8ZkT7JFg)*_6la#!B>iw&*Oln>KKVGcn)@yhyvpj^smCjj1f)WCS^A8y0- zYIv_mkpCfKD8Hx&j&oy@+2S7KFx=P7LL?ma9k4)g+1|YEOV!iSALma+G9Ia(=ytf4 z-1EUW$v1Gn;(PHL2GO?$!-ViZW+Wa5&j=&FZj{tWYHEo_)Fc=479<;S4dvE(HXGpd zmuU>Ha*S^AG*uQuzSI^8Mo@Aplnr8H80wu{X*R?%En>dLm82pu^RD91yA;PMZ>%IU z9!9Cx2owP$*tL?B|KO9hVR++@`*(gx;2RcX=Phosr+9Oit!5N>*e^emA2kCoLypUH z%JovnR{J9!V2it;#|5=DP|ocW#Q`4}yK|hY@NHdzTh+O9O>Cg~LiU~F2P+lS;+?Or zPFVsF=pPtQVz*+2o=(7+DOMk7(E>c@yO6Ong9(zSE^LS0BTX8ncU$~7pp+zc=qbtl zwrJBR@VuWR@J!1W9L6;Mp;*eq5CR3fBN2in(D1O*a2oe+#qsDkPEZWC#2I?k!4=sz z;D|Fb^WgY=ga`?0%%MEguAgWYB%#>P>_*~-R97wE{|}J!Kf=QaJi5XtesU>|{u?Am z!rIRKe<5AEtLwX>s$&JTj^9vhUPICYV~Pu-Edp9$wAhbDsfbckXb5Y5!(^E)I&51E z-au4DPK=T7ump4R?6?8?VRM*&Q^g*Fv;G76_j@pO&}+QrBYWFkHj62P+jqt{_c4HP z)>q-*-`CSFAP6VvL7(o70RnVicCsNiEx*^2dBB&6htQxdSNcFWpv_r|PC^}TrJbJw zKx%@8dEz;ARSvLpl7ky%&+ZGUmyg`vMpI}#KvRG{^ELU8BwCu8axPlobz{vdqqGpQ zR!b2Pmo<8A6dsA|NOp}ZdaWJjroFiZ_6rPo%Yi4mhS->Kqm37GtRhx~dr(NbQ5;sw z8e`Aa3$4IQcxfs0BJlaFrXto8Z`VF%T2;_dkl9)}_!zNL9V1@QqEa<48Y>MMk8wDA zMq92`shCsjz3lR=QlBkBgkdvgl9?u%7X7XEsKoMd*Muz!ry>cmU%SF%xk0x)Eqyqg z;?mTOIGSl5Yq`#4brUaadbv|7q=;Ryq9t~1B6cczG#o7caiR3vHlLq%ai6uEm_5?P z<^J#V`RjBk8cQhSge)5u=>be6H-iZes5B?+DtJ{anEfxAIbgP76V&+%yIE;&un+l0e;kE$tdv_bP5$A2}-;8wAHU#LLSpab^s8 zY>D3M@wY_I4-|>|F8C91O57_6m8AagSs1O&x>hCI@k#Dy$V7~@hatpKs$#WW!L3Qe zR}>qUXXQnzK&`kgXUt#hJ|>35_2gpi3=nKb;AJM8@f|su2TfJA!mP>NJd6TvIhyd# z1kFQrR#}o0M1{0@7wm}uyW)?FGXlBShbEq+o`ltbjDjzT%9}$*J~Cj$d&ReyvCU9~ zSZ$QmD21o{)wn?~>?AL6bh|;k)bR=LU zqXx@JoOgZVuRNP4*DP7fmN&R)r!vqk{QZT90W6sY>@kcYIl`XIMoQT$?%8bM3vUg> zgV=Fdp=F;^S6P*!3ESuRu& zvVjaF=5f3p6Vs=JRobUE)+dsN3bC24Z^-{XOg8sf07U+bNU)KBfPO+X{)?X`^%JDw z=;HmKRaB^kt`n{$`oERb<`oP2;n-`6mP0mjdA#vj*+n#*tT{5Sp@^jBh6v{~a$D=` z>I8Dn78r;2WLZX~o8ybro&0zV5P8e+pmC+W5=3(&uTX3NB?IH#5GWJqK5y5hA+0w4 z$lhf|as{vG(( z_&|(5$wR;qpXklYFm@1T{uUk^hqD$EH-3bSPslkJtT=Wz4*!Nmkh0$Z?}hhsWsRSg z5Ue=yU}^rA4VTARk8T(r!#MFI`|x5O@t6!}Xg#P2?+0&)dAB=vFON@)^W>_qPTPTK zyT;XEl+y;F%~i$9-8%>eh*ccWtMkEc_vfnO^o+Vk?lfO|wN8S56t?i4>(#Sc9pX&s z4ZB|#avDr+PBs&`N5-}pieC2JEaI4i$#@gUDRPN_ikUmFc$8JIgJ|+l%5YaXZMfiP zghesO^&LHl1fGV)fl*;MnhF_nU!h$gJO3f>4xzJ>S|*$`il#rkBA=y3pXA9s32eN^ zkI)%{A*Zb~cAb9qN_fqE(%aN|B5mdxrh=WfhtIO{D6^wgv=|a|Wpfkf!lyLHFomhB z{UzsYz)DuqWSf<>e9}L$m3_V8?M}r|oD9yDH<16U(FpxCEMkIs=25QtiV|?d%6_lc z-+H##{P6!V_6}T@c1zZ1Rob>~+qP}nW~D0a%u3s~txE2+ZQFK!*|)n-f9JiU@3>>^ zKd@JiIdzy}(nhh>pgbEu1-{|A>4^F>YL>Th za!)5?{_0T8Ol<}BqD)Qi2d=h$iIaiZQA?OOU_-VCT#!8uJFttn@EGrEryed8w8uy& zbjGA|{0_9PK5N?{%}?&l$KbWz_2n_x^#QVyMs@@bNGJdw{7MbUK1@#}HYUp!mIK_8 z>HUo)>yExU_81v(XSD4bdKy+WhLXwbD-2I)1q^g8TkjI-vE4*yY>``UrJMwESnq;6 zUB+(*#o(tuBkHj_!g0c0xZ>v14UepVavzCGVDgt~{bWWa4Ni9-?Uh!S4a;ku41-uJ zq$7X-CA|P0VEVV-s@QU!?8>)oO{!S z-H>0U4$8D=D@^LdK~hC=`(V%4aIP8cNuf+3H?L8$UW1Y@`F1jMD$B1ud@etFZjz2=8kJST@3cFDsKbOs)Ip8nb$yX?33IA(d7QLGnyB zLH5b?rz2=M4Ep_&H~Q7K2Qd;(3zG`@?D4KX{I5Dp_B{Nn{TNJ+0o`Rl2?%yx5r{Kp zW$;wi7rh3Vwx~9(5q}Z*xdqY<+ULc6!6Qg^f9Wk-ggfJ_Zw?CV)#29$)CkfQ!4Wtn zMOy$%F9!2U#j-;1$kSt*wZsSyZbwA4s*6eaaplg5VCJ4({^)f{KZ0A_HO)!&4u%Q` zSjM(NKZIk{3ncI=rb`0Id3k;Z+VC~|Q~WG9@#I?4of}S{*CHoAbk!_+?dB=QT9fp{6r~< z@ne`~WY>-S`z&!Mrajj8wL+Ql;tHfHzDT~r=qgWTS{QKFgmMbuLrfloKqS?_uR{S` z1e%XqVg&Sahe<6zi1g7Q$Mqp(e-F^J|L|<0M5s8Sy&uw6$-tUYTK2y=VPNDQ&^S00 zCE)_63uP^R8x;3k{0)lw$!s%lX0y{1HL#p#w4Zbv6RbaC=lDpoTb07Z>Q`4;SAm@em5h8&`-Zb`%AET0Uv2CPmewu)k@X&p zqOU{y=r57?C(1AA?m_T>mhOo>WJ{NC6h!RXDH-curCZk3#>Mg<`lmiBvi8`jD7;g6 zJ?PvC^d$*KSQJGHI33}Umbzim7FH6dr({&3qNEk!yprTu9z^CkKdFP-&!H&220~z= z{|H$AR;%nd);E~Q`SJY{;+hQobMUm;b+zeymFaAr{c*RZ{DV|SJ$1wYqeu{DJ3wz! z4P(pzY5?PMYSre)?A7vd_(+O3fue{VfOl)*`um@S^q}TvgZTnSQyS42NS@rmPjlpTfSXd1Z{4}51fzQ3xr zF>4rusei9~$uyg`X4cUZ%$ZQt}g z%`}GNBl=$4);^R7a`ema;i~0u_mGDlrbK9AfZ8Ps!6YfK-i1q;|LvzJM5^hv!(5Y&%RifXm&$rVoI~ zyRjpX-ypfe#zzV!$FbCrLl5ra#RCPc1}6Q@k-!D^Om?ux7!J47f+>?P@K|w?b~L`0 z$0#VM57w2KJE&0zCQ9*Hr=h6^t%h5px#gUC+A;Tf*6jBmDmmBnc7%?Ig*Zp?_5bxM zZnnr2N{*2#OG-T(H}sCj+?Gh}C;obQPj|#>?n;lNNhf!jaf=5=OT(YI8|s%sPQ|3D z7&EFVY|hz?9ho z8l>K1)fr%+b2J%i?Umm^GIK;%^;o~!qrx-5)8E6wyLqQYgTb{%F$mzS*<}H2qm?a^ z2csPvXG;jjR^l#l7_)5ko^{Hm&S{#Ft_%_%c1rdEZeS;OXbYEnUH4(@|EkD$w`uTs zO4MIcc#n5t`zVd*xvnIX3kY^-6nXqnPJhYyr7+8p@*2XjfIWLhX7$Sd?`<}2w!sqn zw^t$1Hwpef!kcogHa7qGRBaW;fNFwUwA4$X_Jp(6|$q$8T zDz$*{+=Nzpk9&&%Nc&keMy2#SGLA;6_!1~HpT|ndr_!Pc9l?JgSYVrS$y$s-oQm>f zk8!dGu&}hm&iJOAa3L+to_>*D5`ix%3MS=!#Co(p-F>j>gb)9r1ii)%V&>Qjf^0qcC29_AxSuyIOq*`R}Lwoa=xt@_pL+-=|IXugs`yY3uMWM@V}o5;=Q$hi@dw z<-bYb;J7VmAVCB`kJqWLUb2~0j~*2fOP<%+;ISw~fG|GHmin5FslEJSLr+C^K<=-r z*oebP=-w!e)RnZAsqvJoshTDCA9!#&CJK%U&b506DZSl^T4L%jX}=1l{@8}P2jm?~ z({eQ!5RB8}Q;a5lI1H=iU}FL-nXq|>DFfFaI})tRhzv2H*tMYN)V#S+Rk$Ak5x+cn zF=Y}hLhGW1oa+U&NOr#&+q2J9ekXZH-rHYsalhuR-sKoMgu(@mDbSkp&ma5JFrlyn zsHmkIri^ocHH%qzXm<-V%>nxf16&L0!h#4$}EztKjhk}{J$e%Z(%YhimyREsm;>mrT4{!77rH^qwGm-NM$&~ zo6Fdb?Pki1vQlYYc)7p+Vb{oM*>G0=eg5d*v0(gPJbDdj8tug@n~K=GZD8-j~fs77AA8z>HMb!mG=?a9Zt&vH=%W(U7uzwKzNqC zfd2ggE?V;wyMB`s*N8uUVE=D_{J%H~^*3W2Rn$*2_lAa!EEYoBRXP=my4VRjAqKUe zMl@-tdKSsJf#mC2nfe5m^oh%M<~-0Mpa>`>5L8CgA_^=KgjyEMFoM!Tt*ynDPsmT$ zUH8jLIkt=u`2A3?t97qSPw#KHQoG5os|WUnMc-hBQbc(XUbATT&7vGk|UNeI zgY3=|j^u(pRM;xuM-fW<2NYA6$b)hdqXC-z^UM=1xZq#0sqPtB3DsFwI26gc)a(Xr z+^hr6YMa@$3&F_|GMh9}u5hiU9Smar$E79J1y2&2bQ)Q%i_IWfDUluL0T-9WC-Y9< z!?r9UzaluLg_Y@>9_8g=v=0QGIqbvn2N9Y5{?e@7Uf7DQ_p|M3;GctDu)gf5*;r_# zwDT@IIcHHkR6`;xOd}WW@a_;Cx&T^s1r5=;?d!%f-SKEESU<}7O_6H&*^H8OIy4OG z^%rUl6@{A>(p^pxOyww82|Kor+AdRn4bd#YyPY|)1NFmq3_8N5%qxlG!lD7H0(@$m zYIiK^GM&A*l2mFgAXsc$S5GRmy z8{%J%ulJ8jx6yIaXt+h`u*7cX2UZ*>n9P0pT*Tp|aWP^JE_Dk#8JqW=ZOdsPLdEn> z%P5PjkWR}b4^~IDk=6X!jFbB6oMgPBpAA;(sfw03npMtT%Wq6Y+lv>wa|Y3vQ_jkc zyytJwf^sR2>0vFut`V&VHp=$M3kL)o>v@dgN{Q6Ox2X5w8nj|V{CS5*r>XlbftPj* zz;IT$e+Od4|Gckokl6>E&Q!A=Fp!*=?!vtIvP4%ht;b4l7x+y5|_jM2yP5Wl3^F7-68+Zc#P2?yKRj-FAQO^8cPo*gqfx_Umi)=idtmiA*NdLU@gg>A`x&{;b}^2m1{7 z3-AA8`vVB_?{G{s#a>oU-qx4IebW(b6tkGfSBNF)nq^=IZ7ExAA-Tij(p-otbnDql zWU(n@-qd#iCzB=}+{CJLkSZr8%c@p)4Ib(*o>QmMrV5WiYK@*!?BzNvr-iLZc?6C4 zbL~164lr-Sa+Clq3gs1ckMtTfy~BeoBc<)6!mm|_b+CL0r<$9rP}Tl(fGWji=Jg?& zo!7kGy-@F+au_DfFK8eD%1WvN?+mwkxr4f?1Xfew+vcZI3EKGWH*hHf$Nf2$IsxX^ zew_-knm!7~0?UTppi&PtF6i|Kd#ssikjS8Q(8>>r+sMF+s)Hy|S{AGe!#}%)E?`A@ z{K!*#Q|3}0jVyxGOXVZERMPqliq|{Xn~mO%+K4kCM0w|6Q6PfW4?dxZ3_WYVqjxJp z_x%LD56&CH@N?3%Cg8me-_a(SZp@R_&$fQf@qB0L!D1UWml4+!cvXRenIUqzVjaSe z9uUN3W4?D*NKso&6oq}wSk#WluGChnV#b@aG}|h1auUTB>CjbQfiM_qop{pCgv)N2 zQ&PF4%qD^vUPQ?oDbfA`gEgCmDT1EVOwd=Cu_rfxd2JO!J<6_H{W6*{U6ME^^&5hc z`6F|19B*=0h9`>?n)gQl-Y0Wy%g|YC4M)sJYuYH^q-)R}t<=r$l4r5YaBt^dcy)Q> zG`qd8Pcv1USRWJJpJp(-$kO-Z`$_lrJ^c*W9QaZ_(E})qIYZ^1Bq0z><(r-u!VMPZ zW1YbRFlz7*^agJ{=^^4(U&m=ElK0>h8!%Qu}BrlsbsTq(K=g9Xdig&P6(4T2B+6+|2-k8i-v9Pu%cSGsBw(RLi*rfP2;o# z!{&O1E7=t>Fop{iJWOeG1uf}4vSO8L?Y}p`GUkb%j!n2XK;Kz77HvX5 ze~chVpUmtndVulyJqy3cz;e14RSrk&>IJfwT`l(jn@ruSXCkj+%U3v$f%;QEnvCD1 zbL@zcwh(XsAOv)sB^IUE{%ow&w%*$_~gAp0V?<%xNCMo@^68j|iTe+q5h)7f{e0E}gP%_`6HG z;+jGM%zHshl?!!&XOvMr9bG&M#+_@;#-+Vm7g#z*e00E7Wzz%+>ww&#P`43S)y!+;ng>ky>j;Gok zG;5Z7hP`vflUQZJgQO^vY#fh@c%r6vF5aIMpfDv{Q~-r%YB*nb#!n> zamc0Z@cIJo_p?vq4tuRQh0$)wHalY zmDm~FQuK`LIe$R0nUwLA@1c)(?H>WJ>Rm{14#i!OOUP+@TzrgmD96rPpxry$n8zs> zNp4R$ipoM((io|gl9!fNt!F7XHz?dt%uT7oTSZW>&+ht^8<4)(jJW%xwI?L#6Hig* z?IyF_znUs2Nn}~XYAn*=+6EtF=&t0BGH({7&Br*i`bg2irOlBf=DL31e9*Ofn-exh zb^JI{C_1@7oA60E7KbxBO1QK*0mG`87B|Bdjaq5*;O|mVu{D06YXnKVwy`+n?9w2TFI7dLX0%`WtkH`U&Yizzr^gI7Km$3d zbDEx~k907NMOE)Chwo0#tHo1MC*&w}oF;jgtL9F1T+w(eEw9C6B_pHJ%+U{QYKekl z1slIE&6d1l8MN_X5khsQFC*AO<2vxD)9HXNkS{pdT+bN~WzKrk*WW}b%-FRGf^KT% z+DxLsCeT7*jxMHt|R78 zzzTPt(a3k7;kM9Kbq|uQwYHHM;Y}@j48x+#?w8ZN)l%(7C}Jw&SKy!HREjkbtyjbM z$lu3L-$FC*f>D3fwU~BDNNbj-rcx)bekH*S*W{L40&s(+af2 z@z$hOmWEGOaA~42hGzR~O4bo4zH^SO%pVYj{QR5WUxOhQnM?mMd#Dbrk7262mWqzR zl%>aPbMZk^QO+n;SXys5AZT2?iVskv9lqH3SNAZ3NFSdnErz-8rdl^vS*4kmk5<)Q zs5MQ0WJ2K@hJTgOGxV7=cGN7t68N?zoO+WfMt8F$8y{;ja`lS9DKZkF*)htakId!> z;|aF5A6PpoYZ;ZJ=7B}vnQoqQQY!e$O6!sicMeB`PU%zNUTs9>v+>U;0~?aJJfg-5 zKp)%Cif&8n{-;WebNHY;B6TiXT9~tA`QFcO_e2GETv}ZBJe1mfzEf~JBXcXf=mKqM ziFHBcpVC(aH=D@=^+3s?b_OFnYEhfLQmYG-#;7S3;H%IY&dAcrE}Go2u8CFnq<}Uz zdy*bBrq$jYGUL?WlPmJmk|Z3LMztN7gM7v18T=-2)2|2@UMG&J;irQ5F-Cv(JaNeT zelW@*laA!bmKZ2GLz;JzDRlQN-|OASxbpF^dYboN;Y?AwyyZo z>su|Pga5ae-~w0BRd{EsLdvT=h3M1@fv7|T;)O6nQNAM7crd9LX3y<){Bd&3?o9A~ znG6!;i;{w^yZp-bnVMgKmbcN_8z`m9Kf6_OUTsMgN1An5lfv>LU5|c-BfmHd{E{6< zJw+zxbNZos9ewoT{8?LO-q}x)v9ES5*{ttmMJOa2QEDx#BZe;`tZE>`L=FHX6mk)W zl33wxn?-H=JXT_wMc24A?ME*`n(Na*Vf6-xyWL?aJeEN8%mg6x(E;i_cr~B@;Figw z&t7xBpZ+7?FwFk|)T(xt9{*iiNErXKy4Wg>$n`TI_|Vx3&k1O*)?a}N3ZhIB&_R$C zhCv52+gX(=I{ZOyrr(VkAcOP%fgg+!8YW7jG?bmfZa*~T`Q_mQJpDHS7PK*wvRwvxsOcv&e-+z!LS73*=q=u=t)d-5$0v-MzOTq3-{HN|I5N0Jo=Dy6 z;P--?F)H^=A#*{z1unVP!7DJ6{Be8WUi!r?w9`{vvZ(8dNLK{TAe&)H5LEg}P$T!& zM#OwxL|$uk=KX{LM+~tG88h&Q%?-;l$iIUdcF0|0C*Qoo%XiE0Kl_sZ7FH!w2OC3A z1t(K8OAmQFQ(+53r~lXPS(1vjJ+>&yXN-M^8(Y<=;UT z!jya#dYf~{`lJRnYi`+}!Ee%trKl1Dp!xQwN_u~c#PgA^$7_z|kBQ~On2iB-=?akA zUT%kL%v-)k8O(1>{5>CV1pe+q95FKl?TlpokxWrz*YPk%E}8I#K=h#=Ol9Aj-8ELN zxrS6hZc(Qaqi@zsI*Eo{AiyKGj9fy^nLx zKbjZbUG3A}dAnuzAo;?RBf3Kki5+ehTkR_GIanI_(2okOk+%Y zW4RDdU}%lRS2-+>NJ3zG0}0>^=KG##d(`EKdfjn!UllUp-L8lv`XaTg0P1)($#?Mv zT~qk|OOEr8Z9_=#+;-4)$Qh4QM)?OPLwRZQcLO|g5xDl>7&%jW-mf3tH)2Ac9@q<6 zPbwQ;3p_L}I;Qb1se8;k86lxx0>kT|X~3!;z6WXGB^-GE=($kdjs66>fQBg7rG^`v zS5$Y2y4&8+@Snb9oYtK&>PCzAYCEh2Efv>kfvV54`lvsh!L!5E0zZLan7K5Bukm)F zU?^)R?wuETU8pK$4cp5Yn`_QtRy9`MuIIol3g8ioXnlA1^^DjevdA5`>7S)Do?|>sg zQ`o2&`q>BLOUWo}Z24eFKh1PEMiR#rum+jR!2+zp&hz(TKlN4U?tYlpLNo!#ySVk5n} zkLgXe&eu!RO&cnUfj_9<;IMqAzKJ8;dtyIYU!!1tYEiMQmb}u33-5UB3AR3+!gO2l z2Y>dCDf~b3$0=ylU1WC#8iR;tDHMLD4vMODfx5_||>({geE_5=z7ZZx?GuRd-6H0gXj5? zD+&wZ@B<&(00lnD@xc)m-r9rYxBPYjkl$#mJb3Po;!nOkfH^P);0WZo6JP&&FY>3# zjx>;;X1_Yn@sK*Jx9nk!^>fJeXYr4%*=yUho`RhYU+<~dEAUz|@t^a1 zikvsbu>3RE%7(8EKYU~KpKr!~VBJ{%^xXs42OqmLz3ZvG1(x26!}=kO=diwu_UnH< zXN2^CJ3xLC_Vb%Qr%+tmV}2IwFs9)j1qC z^ZwT35usHP6;LfN{|&VpJ5XlzQ<#Yc(V>i?JnyqyO?ix^NvD6qSM^Ap8bu90ZH{qo zxx0|lZUSa#qFR!81wBV@t>0~yr5NSD+hbkEZ7@n@RE9Fq9R?OQMvs|imXRetHw2Zc z9qA^~cHjb7jK(&ZA%fbIO1jyeVtvj%X&Y{~mZqA2otY^05~v}sd!q}kTv~7}|M3dB zwZNg&%fY3_fJ>6`*oeDSVUe2yp}W|+mYXI_4=L!$4SgFA$tCoy2J_?s`xI?J?G|Og zq1D=fW7n$}x4K-&w&NMW7IBvcXk+EYU)iR@BoR*u35-_Gbxl%?+EGNOd7blWv zeq2|G9M-wk@~Ds9uxmE<*~-eiOJP1h=dr8NgqjMICN?iaRXKiYMyjP_vLcK0wp!I{ zdZk2Rx$Yp)yOtD|Zp1%$DJzf5mOyM&uaO#tQ3otGi^LJC^1jq613x7)Zn-8kFtThd zuO*XIBr3CQM~aKcIh*E=fCy?2Cl8>}HXP|S;)4xuPDX67W4K!{ZD$mPv(~xC4OMoV z4O<2-=1mI=Nf-%Q#-cb*xurfdSDlqv2``LPCiOw)H6XocL|ik*Q`Xqj827TpensI? z>ooYtxBW%d#~&Vm2qi6_20C&_gDWadmJnXBJI;3FD?5XG7@346CofMP-)sU;Ja~U6 zpX!#%DT%3bAdAtI|5zw2v_rZSDZ5a8p*NSFigMKM@D?9#$!s~doIcc`OXn?Buu~JkWg_9j|ApLWK^asgEAB25+h5ZOt`N$ zqCF)Jsdh3#+?gH9A63;TtL;btkugt-Hu0R7nh^d;OmwPKYdVG&EE_!tb&{F3pSD8O zFKVOQ57>PywA8*YT*M1M80BTilprTIizRi{xiD+j^f>h(-Ynb`TQ4*Qvs^Wo^tI2n zS1o!I|CQeIrrK4lR<^)p_g67E^7G1s7iOkb0mXmrn*bh{wtd^WcLl{XpK2_?}R9~}*YL^MQumax@CDwKf ztzK2>(9dj@{ynw;*Q+B4jqA=ZhP{fdm4Em^3*tJyn`L7d`5~!UCOJbvgz~gs#iP!c zAmRF~T2kGv(xM;ix=`SDR<7=PRu12CdP->(Yq43gms;q*^j#xY9j^{STO5yVQkdTw zhCK&PJ%itNz1d0T=FTUoVH~wxXiPIYC&pHY4l^zuiK6!<*;JI@^$5 zeyiuG_MR)bX#OnKoK|$U@H9uYjW+U1LX3u!FTSwYwdiCgTvD6JA4}O7%&Ro$&*?$J zT@I6~Fk+QFv|e}ho&d5ur~V}SbCiHXeR;M!Q*`RAX1MXMv^nn6Hqh@DjEY$6=@UdX;Vx@WG>1PM-SJ*>%b?#VQB91emB zxZ4gw_PcafGrd9_>)}Xt9>5U-m%mc>KDp)`4v#EpiNY+(ZHSb7zdO`tG4u3L`empe zoM{OHfd-ucLFBA_-YyB%>r}3l%^pqw{HA#JkukPgZahzpboH46*_Q5ges~2M%^;eR zj#TB)3g0S|b^E**`Wk`-OcUJ@SLMslT%XCAQh9Kt|BAtc6CTH8v zY)?qLVw2z$CE(bC-lwuyg$b5mD|zD;N|i_|Od9?e)fgpL%*yM`zh>YXX`qYQf*;G6 zl~@`5r_<>$SoP*uOd8ywcX&3V+waq$`)pY43SyA`?QRmp2k(obHKR-Fv;MI_Ps)CM%azJl5DSjG)m^wA8mYCF8ZZ0+LLzxxj)RzytQ!+bZ zHYgOHYU0sp>KReJ__69GBo&Hbn!a%PY0nM8q`NCAj5yNNz;Aj0nF`t-Ngh84UVwP_ zV!JPbt7rdhWj@tBO^E$!;s6> zBkE1^X0lFdL-Pc(PcEwO6?fFC)c0g_K-!Cf(Q==eLz`{=4f?rpK&&EYoWhWFmO5^2 zaqcOTCx+FESz|id6C?Tc>wX6oW*+-wnSF-}$Gq0guyk`g`4+7?{CkA7g-M`=@kIX5 z(*?~g_wb8ca=-4r8++SC{41W(=2>eq;aOv*wdsSF)Gnz z>6?Z4R&5KFk(~(Q0Uj%L;3@42phcIEMOj-t^1#NXs(ND~J`$gEoR7>CAEm9Xsc*k7 z66OgAYMyjtKT~%SwYX7MN0m&-yG?A2k7qe;u$L2iO#w3iOD{0)N45q)V|`ewvg2G~~UG_M*G6nw0wFmJzck!*iSofqQ~D&1t0hr8nB(>{k^q}dxsY_=eM($$%H zf61kM-0+3v!y+Tlhj9l_OP=>$ISiN_-=~+|Iq%XC!HP?49pgnte^OPEYta*3RH_!d ztgV0oYeDi>i1mwQt3Q4)?R%m10lIF`Vd{g|)Qwx)I zDj{LH)iYDjZv&_?j8Qb zAHakm2%0zALJDJ2YRb;q5&2Bb_k8MH{(}<#{YOu)y`on#?O&q!@_QHgoJ+nr-Ufa@ zzptOUK~7khC`!k_%Lc-b23f*qP=*wwzCk&ub<6%4`sz$~RvD|-lAZ4FSKSHj*rc>w zmGj87@l9xd{Vq zC$)~493-%+>~uPKZYps|c4hS(9ol=a`mHem&`yVPjMW@`DNGg$n{%!A@z}#yImi(@ zev!MzRKYQPJo7VwdbnAwg$2+XTzETmA(Xgt9{Gl10OerM5JTLln}ECuQ09=lJIWb> zsS{w~BgtncU0rG`|CwJ57rP{W%Z>06b1QA}Q?SR`u6Qj@s!;r$DY##k42?c2P*vD& z+Y6QtHj~&aYTx#k0XVu}QNO^4%h_iBH0*Q?m-}Ybm&O9rqCS0&$-RCza;==QKBN!Y z8ypVv866(;?H&pk;fI@r^~f+udKjPul90y|Ie;$pMRh6q79Et@(afXdc$8AoLEh>E z98?~l4N=NH!kVWT#(4*x{uGir{-CXgbI8Lk6K?eTxT{#LWXnK_6TxfA3$^}24 zf*bbmIPmTtcmapcaB0Ff+PMC`q=f(fb)WweHi%aJQvOC2zigZwMjRUnfdZB#tg8rv zji{-lq-}wlRJ7_8sG=ra2(jxQoKjXgT*SVRzrg(*#rmVNZUAmWH}+269quo)f3#BD zHl8-Sr@V4}u56#*w)nq*YQt>7J$1&6VDQ&8oc;_o!SKzzm0)@oKbd0Zm^*9;qOs#t z?iTBH&yUe|tuIQi45dfAf|X462!><~2L0l^?*Ge7E>v}oV#TdA%YWLG zXsvV8AGYq&n`t$7*Q}yoFH)n%@lz={338NykpZ2P1*x^D!}J}DqyX!N3R3rW(rsEL zbT?+cDiurNoti5q5>lq!19}f8;p5(py+TzsI=a0zu%udLG}--ZPn9-Hibc65!d&2D!j`18{mMOs~{Ae4#HfZ7`*60q*ek0?`~vTOIjkZ&diR9r>tf!Q2uk zJa)eb{2$lfM+y?Z?;6h<5+AJboyx(}P6@WSL2uUu_Rlj$^e`kI-!uGEz<}ttKR{al zILIC~zmCw|3(Vmj56%gsfC=$)Xy&=_J9!!e)9v(wyujPQ%=p$`1IbJE5I-P!O~q|; zha}-!8G2-QcYd7CHn;qF>7(KmcdJQsr<2zPVC3b^3OdO_SEE&;i9KxgrP+}}CnC2B z?zKt1FV_0w{(6nAjn#GVHpiextS}wVlgKH0Azg1$$$w-Zo|v;7m4I#)N;16PYVcte zCCAd7e$hAJDRz8BPxm7}c+D2r^zf61-v*z}nYMB4{}$#u*{658Mdx~fmi$EFW4fs} z8sx$Ml>-*|_2b{`lY9(YzRx!)Km>1S-T7PIa9bP+(|;5^i`s?$w1Ajs1xL zFqOApjBbRJ`^=$*G^sW zobGq{k9GZoFDgHp2-;=({p~{34Y7t7OC*?8H5m0unMHy*$Pt5<{5g!vLBUxY=V)L!C^XT_SMRby3WJrlx^&;lZ}kYs+ZGPl6=J;kro_WSmaOu` z8sujJF@~v;bg5yRZlON4G4DBtmAa%>SQp4XoTJ%mhi*{B3b=w_@`E`eD35;{M&GW= zM7W|fs@mCAGJ&#C(C4$X@XfLIV%6-wLfzqp^XqHl24M~}D0^`;U#zbQXaZzyENG*3 zk#B(iJ#IVwZd2Fac^LFN4|Dymar^&6{A8(YDSvOHe9HM=kkQx%3D7FSG+Aj_5YrMg zhe^?-V?hJihTiVRum5#RmR%?Lxas@aF5r8pqVMDYHXNEPty#o$eZPoQ6Bjpk!NJk@ zp1E1iKw$dqgFt3}_3^lF-*cDM;PsT_z6tuL?*JmkfF#N;YV!q|z!c;bb16J{0L7dfVLoBdj%P2}F@@C>YBlgW7~P+UAebg0 ztREPwbV#A5hB1M-SmOPrUS8TF$SYNa)E~=TC!|&eRfcF)sa>CG1XIAxgOiSm9GnL` zFX)iDPFG?M=t}+{>L+@wS~tz0djE6nBL%|Wg>mW^oVl&KPH8nAr2VFrQ&1uGAUDR8l}Y=U#Y#kMSz%3F?@2oCE_?HFamBane}8op^&f?xDPH^M?1$ zFHb#>ie_t0z?D2PHEm>s5QKO_8t}y*(c|o4)UHkg;l_|qo2xc#PQ~(N(G*k+ZV~jO zlA%^O`L-kZ$A$|-v!<+H{&8(a3uEDw+IB`>_t}Y4-{g-v3{OCizuo{WPgnsMUSIz= z`9lu!0!co^qQk%tjTu;C{d!yRvRu!|7_pqvDE3lH{y;Ozf>46yGN}(IMI}&T`O|?dFR^<{shm&(|M6 z+ano#!T=a&)#iJS_tckRMhCG!WMHm&M>O}Gy^)xU{g8c`KZnSG4!6ky%nXnToq@aI zs=z*EG0ePcL#}N}P%#t@GBEcm*TN|Oi~gxNC;6s-#35z@76tMMMe;MEIeD>sp_2Lr ziQNMrKEZeQti>nTMLdd2^D2CSf{(%Try$%1#%ctA!+LvE8nDo6$zcgkC9tFv>(E+yp}H-F1=$zY2wS; zIvT2lWkxSETvKq~p1QgX-N$|H2(~sK_U5SCWoVF2>|y1ICC~kW{`Wj`;%++Z^v(Z( zf6vAL&uIKV^8bJKa#9KF~C2iw9!wQfJui8Qyz1_|&f( zSZ`(6DOKkKZLIE#U~s?k-&Y>j=6LUI=zjh^l|%YFHK?~8csw<~duFa%^qy}5gnZ@u zw=AE%=+m!#2Hys)nBM(E@PE}Ieknxb@8B=A@jq_o%)d`rJ)?_0*XVTy`ORG;(>L#y zKt035ew8Kr9S-{K^mILQ>OJoIUH9OG;}k?FF)`6lBqXI5@+I|ZG^SZ$q({+G?Nvsm z&b40_Bt4q6tst_&!N(4T{2WY3I2xL$g2ILmCKNmfoNe_Y$gj}S!f|zR+oKWk)14uv zCN}eEHKu7B4)!kj?k+?am^9U_F(QEyGoBmDK z2$k3{cz9XIxOm0Cb<~&l0@`<`;|fTR30ECx+OgadVM?_;SlOtq;XuFx3wlk*!~Qz6 zFsTH^GK2*fY7?b*>E{szq+49YZG%ivoGV?{Dn25D?#r6sLB)cEil;F44}gfjdKL8Y z^sEM@G43WinQ&Q7!v?w!Tt>N)8ke0^ce5zg&HLh+4VcgNs@O1Dl2G(dBu+~V&OtJ> z%06*OUvjfVqq$f0w#6j!jsMpGnD~F|I=48 zlP7g8v!2>8-Z;BuI*~daczCPjAWS+T@g#>mmW)#$Ta0r*)@}p2x`3y2>q+6Ae^n_e z!%QjNn$3c|7-i7Enp6y zVk~(lJ9O(gPs}DwSmU5k;?87U<{S!RO_l{=#$Mc-wemom9;*RZWpiT@!b- zOftr8juK~Q=SyuGi^?>H`FN#hOtqx4>{+6!d@80jUowxK!s@89Rb?^!xW~BE+%&71 zn~AR-T~?h6*Yt=?7#H&zHXbDBH5MB_fJ@tN{1-3@kyzGT(z1EFFK+?Ib&6Z*_$b9-rZUck|>>F*3fo$J+wSe#7 zLTbbWc@>{&(|qG-mT9+jKRsG3_R4wlMu$6d?;3rAvD=VJ=1NrvuM1#!C6U!w!z>d< zZ1yg#HSc}oRaa=a3~u3u?|X~5We+#NmhEeD*0wml%(2mP?knSk`B( znZ(9C?RB9{t0@9PlV*Ejo=%@l*NaA)+I7giP%G-m3jOk8e6&)0W@(r2%U#6*px$~3 zOD9fUoq7p)x{qT&WxoZKCG4gT#7;~NokquT)D23~E*s5`&PO*?v|?WmONuQxP7-_< zqaoY#nB!zlnFNv>%@k7lpK+XdvM!=bcV&(vF1y-s5ZPW94;pTHq6Vn2A0#DGjGc5G zF;N&bw?~`e7QX}=<%Sy<@ioohh zUO?JH3`Swt1A>LWk6-Rv+Fv3>xw~>l_2`VpCG!{P< zn_eSS0liwA;$C(23C+$WjaegQ#Z&;FSnlNO+sis!vu}%WyXE!wm)+uy&CMApHn5$q zDFE|nNji^}UXG#duPq{JK)0FA7aWZVW1NXhUFPl+QjIV4DyKIn#~N9?&-G@PtHX}V ztFWfaUhZ;8DeK!;--#5iWd*nD0;bm1ms??^pU#--E~~ZQ(igJZbwimGnN2FbxSiXS z;#KivI!iJ=VI&R9h@^sNOa@vVHD<9-C+96A8dFy$m%2mJ5xn2n{FIQP%P55{{0xrr zd#8n;^&=SN$J?0tJjDsZCmrw`2#-79n+f0IfNv%IOAh!p!V@L?Z(qf~1u+LaNjUC+ z6NC*1JVkig0nZRN9q=85PdVTu;aLZqBAj->bA;cBOcBl!wjA(0;X57hX~Jh5@L9rd za=-~bylU9*Y~AWRSlxc?nU@%Q=DmVrGii9i;`mNN-$NDD_vWd<5-I}zO26v@sxH92 z2rY0Cp7Frr(3ViW2(Q0}D}#Q$BZt}tP#bs*>lRUW9u0n99_1yfo+#4!6`eOv+pviB z=TSR!5l!Pem(cvgqdfW;opn1JDeS#Gyc@L~zYG_igda~aD4xc4T*7XA1wHsG`tb}Y zo+Zt5r1}OO_e~o7+xS&dT}km{sK&eSYve{n@NV44c{>L19vVQTLS1M8&b(aO^ZN_% z{e}4hYNk}YJCTPn%RQVGw;QQL9o6~-sskNM*szd?*FB8Q9gm^qVKjF z%%Sx$w0!~|+H%uEe!uMyrDGEf4SXM7ya*qDfPMH8b^EccLsR~&g5>Ir7dXoEp!!3G zbCAzNbXi`u&ykah^ZtN;^AfJ$VOJgqwEAAYYYEcod+Dx&fyzZ}3A7QsfU7QI>v*7b z3EOhG`aG(`fn7`3u3T^x_FylrDTvpJ8jdf)ji2!Cr)a>>umL~ECcMm@uV9aOuve}Ig-@n6efr+T@LLk@eFurItT zhwg5d@5=5f|LeHje|-+GulNu&-;Ld*?HN~RICwNx<*< z-J(4~VGi>-asfw`vF~McU!ouL#=eb72&qMl)S*s%@Jj<)r4a+tjAPP@ux!Syas_Ue zEx1>%=4F06J}f)%DGBg0y8}YqD4ruvJGkEQ;frSUnd{y?(}AlOuZ zw9mItwcv9v_|`4>?p|;$couv$3qF0p*Sg?ds9c~>=RS`0s%EuOsAGItpTqHkZ7bksgyHM+2)e6~cSkCf463!W(2-$}fs)DRDsC4REl@>NEvwdR0o z9oo67bFOnf&pEb>fQyY|MXhxzBIVPJ_XRY`XB=#si)^1Nvh6Rht+&~-4_%JoqpW+6 z!zYUlhV?~n{~`XUz;G|A)S6cnZhx31y3|Y;92~p_4jxw8rwg63r|2&>z*4`XeU-2N zxag}(cI*0z4OOs8s__}Sp7{CZVa2(L<5qhe{|8V@0|W{H000O8M;KN(dYY~`{|^8F z#XbN45dZ)HYHVq4WiM)MX>MgRGA?9nZ0%bOa8%V5zHfJvERc{zgT{*ES_8#aGP`6q ziTw>ME1&^leuOE;mzV5IvammK_bsBOZEB2lXU#O@Fs~x-p_rz=G()_iV(6v z&1MlAMYJ@a!lfVT+^hG_MbFK8?V3@p@wIDOdxLRjUo6@a6T;3eArgs7&Q8%8OGKQ( zh;zld7H2pb5Ub0|W>?8Fn*0bgawX_bt<%fU0))QDA#@BCUpt4h)T7zJPzlun)gf0W z8Rj787OFQV>#r&Wjk-V5VPt@V4xvZ+(1Z|=89IQ&lE=tCgq}PHUz~5Lmc*?R9PhQu z3e843BWMBDI;sPLBp}&r{DXC!YvgM{C|w<+jD*jHG@lvy8mc?vaeTg8CgdvpSe{7b z5g#?lx?Pe(UQouBeM$H(%cY+figwAq?34BT0>igFw?-Gsr&ZdL@2bs=_ zS+4xvCxDpTR%J`IJ5p=x$=j;z$^W-vJxu*}mVM9z0XNJw3Qz+L)RT^X3>l zB>7WUIpA!vY*=3gCT%yo9=jrC9ZX9fWG5&$A;u?%q(vxMQI%O-f{^=Al5~yRNM^{` zfHs&-I27@T8{KCJ|EVGI^v2uy_Wh*@w}HQ3;on7A(^6=hn6?Y6llcy4o+*Z(6N|Fh z>@ZbwXbT$J_a)494*I#Xnfr>t{BP*>3jpOr6eMQuhO=>03PgAM1P_#yD&yy8Y3$Q!I4ZLLM?tzBm2Or_y*gxCVke2<|2dZS7Y{`@Dier@ZIiUM0MjX&gwk#O;U5~>%>8l)R z!mHV5AFjX$vZ#~3#Uqt?Zssq*+&)*b#ghUAEFKgg{vi5oNVZQA6_bf+$QyG~?Nh15 zG~pTdk1m5U0@q;2P-1l4zYjOZ{b_ti`j3p*pkv&BfKp$k)T1MILVcA|Cn)vUNGYKn zCQVOO(tmu!ft~5kj8u^0$&pHOJTUrB8)ml?<_xn`jdF)p-Sr;voZ=|O zHHtIN!39J9QLIyd!;UyY9PxHyj{DG%IF+2(czgR8wg|8I8%`zPO8jH;AEx-)+d%XM zOmp@O_^-$ZWl7X+}uoFtbZ7yOOu}kwu|F)vhWzsxs%ISfx)hFg8kEGhC501@x zeR47rMQWZ_^TEQ$I*?;{Sh*f&7w$H-_nF#hQ~Nnn`+%uEYHA-hwU3(G8B_b1seRbg zo-nnK7~16ewEiQw^2+u?>8PZ4ruZCPET3uVuQWd;X!CM$em?(YN#p9}D}BE8HA|QH zS1oC|sqv;ID_d7JFIlsC?Mfhi*6Lj>7oZ%*?_Quh&_v&*%$Dn7Xg5LK3Uy*0t#4dH zt0Y%Zu8ZXQW8h6gJqgu$3qrR-O+x(*)W1TtHzTyP8KZGW?nqI;j^^luT%c>4%PC#oG@O@$j7t2D5bE_%J%ZNX0OyTRH$d%$x((_Ms80%? zj=U;_ms3}-e9l9*iAKb&LHsWEA;gQZSOnjtqxlx0Ga8eS%hKH)O2i5Ow=CT;QA8+a z2@4_eHWO#DWr4slDH?_h`V_hm76fe^MJq)KpRN_cBJ`DUD~-ztO1X~K(gmU;%M;z* zVr+G!o9J*4?r)I-tAt1(1S3W)EsPBO&NUq0(iM|P;zSi4xvQRMamU%~&Hm=K{>GZ6 zl=EWw+!wg}-w6kwYmMs;*8{FcU9Y*0y54uqb6@Z7bcfszx_7wO*GM(ruX(8E*_sz> zUafhnW~zqslzOi9Eb)BZbCajjbGN78^ApeOo;N+mJQ>fwJ)GC!z0CVn?*{K3-XD4& z@jm5!-ut3=+*?vxUVC}%gS8LWK3e2kRcKd!}xG-J!a_*1cb6 zslTNDi}g$Em)AGfOZ9j|1HW4(dm0zZCtj+%gE+O2Ev1j)X|IQag%6p+UEO8iQ?{l! zm=Ft>Th2~E%737~VKL9wglhXHIFx;=w!4rnR>MLtVv5y9 z7~5biJYSRtW0kTzpRmr~RCVs@>dK+>22Ljj^;K)>j-Zn>5qO&N*oDhhpbuNM^~GZ> zbjoU;uO`)Y8yw~puwB_@&u2UH8cDT%m1=umTucO_OUPcZ=71E4?ywb}FUo_l7OOrn z>+C><=gWl(&sQJPudl}qoQgifsy;d7%t)WyA0q6P$6AqM_3t?a-I-G|F7I)4gI~+W zR*Oy_LMgXsoBNbN%7)7=Udq4DLjKtRCoqEP@ae~W3iH1If#B&QB{YpDaCq<_c3Im zcv{Vr+EW@uj^}=)#B;Z>Su|?c8-ZsOIBIgbrQSqjGdvAya+Ddo5FVpekhcc}ovT31 zdY8SiQ{}Ol#i;&i%Eu%7XEcw0J_Fl58w%Uy{aIn2x&uo5aC2B5vt8#EnG;N8?ndUw z>@ZVmr;$0LcFqX>D1*L_K@TzLPR@+Jdq(KT81yuQ-p8O{&!DGggnpPoe~v*9FzBTW z`oxUTk2B~87<7q2_b}+kW`ur(K_6w%dl~df&YTbNo^_#os70@m4|NePTXE_F=EDk9 zI3GTV3g^Sy^6|+2Dd7s|!wYD;XKP`*ye}8#smq*0K0Gy}$h2F`k$D`MBXhf%QoDm$ z7sx&nbi|+^WzZjB(0dtl$BfWR8T1T;{t$!yB!ljp5xR{*KgOUBGw9nGbk~f~r*ZrY`iTWkL;g?=a7vrpzWR=h3)d5EX-5qP*yHi#`4IvySe;t z=Ta8Mc4yw_FU>z^e>>+y|D%7pq-YGdBo&K&bArnw7K&v>%D)u$!9mH`lyX>p8DLQ< ze}ls|S+WK3bTdl6gH@iy-?wNQQc9T|nw&~mfIo@m^jI#SC|L`?HKN;KX@g{+l|ChD zqwqT<{J4(45~7qK!E)*-16J+7C6w}0MxUpxTK1!q7Z{#DFqC(!_@5$reXIaDI5eJrGdkn1t&w5W!XmS!z9Q{zhf-RKv{gVU9YyqQnC7uNFhv)K zbc1xKAzHp)MDLchQ7ND(cLkKPtH|uJXBoZ5)JclxZVf1Vv(2>?-gQVpiheN0kK<1 zgd{!zbJr(!Nm!G>olE&>=O$?J;RMWPwM(W&y2NTX!jsB(ck!FWyQ-Jc7U`)`q>H^W zfX+Y`IbR>lF-z>BL^Y&{OE9ImZEfr7HGaOS#oxAK9pCJ4T<7B(SGTl6j^X)WxR2*o zG7-k(TNOTzW0|n=D_YkzYokoq5}iSb=qiQcx`Cl!C;cdd7eFn1k1b=WAF(ue6cSA< zU;|jqN4v3!V6G5Xe! zrj+t64MC>8LA|Xqg{EP~Mdoj4h`X?v(NF*zs2Q5Wt#EgzM>_l5_}8v5nA|H0eL7dP zu%d~Y@;#MeM4t|f^wTSn)~(nG(+B>G#tn`G%z%CZi4dVjWL8M{Dask64G2C%Fk+t?<|>DCsJ&g9NV%<>3!_kF($$ZvNJ^5O ztoxpe=kWuC$pM5*z91{HY4mM@a8l5XPBFk57o;If^b3O;D(MW>7Ky5=d~e09Ykk_I z3)|pl@@SygM@3IG5A2mnVIRyZ0hnJ4sE0002I000pH003%iX>MgNYHVq4 zWi~V}WNd8gy$N7cMYb?py(OKE?f?y8Z!l;a1S3HWglISE$ZhCA*rS3ZBn=6KBzEp) zQP6}=l=j*h6&+`s85wmPW=3atjv^pxLV$oQ2Hb+E1efu~MvYqth}`d-s_HE)MMb4(UF5cUqzY?Uh1D^AmUVe$iF-^^QhX|}W989z>N2*z z@?P+HX3g&R#)8}Tu08LKqwYKJT?uaP$j0~5X`b!xEu-=8zIP{en|S_a>dq`HF2#C+ zFy`kHQgV-hEPgfpreItr8DUM(8VRX_Kmm(uu)sev_10(z_iGtl@MFLL{b)jwEQS>d zI_HO(xATvLzhC99MC0v<6spAG)TuxzAYIQ-iTPj7gBcV526F;y%h#jzb8 zBhTNnK99S+vKTywLnnuA>v{PmT^z&z{rum21n0@rK+)GaNW@z%~nNu+kdra+_*-kyj!n$Zyf2f1we( zthZwB0|z<+<%Apve+1wBRO7r*9jc`o@^AZrdP~vU4}p~1lt-w$Uyxe`xm&f&hcwgc zlY5z7pB^vB^HbADHF+B~*wDh}1DC45Pq=<#Wc?Y@^`EVv^_!vo#7ouB@E5$N+XeZk z>E0)>7N3}!F4XUP4dsYn%Kb+0tx3&XytrX8Jc68b&EFxqexGDjo{R?ohC(i74h)NJ zD*Be`5ssup$mEjuWE@c~t1B>nBY-7cHQu0u1hE%Ds@$GPNPVO9d>}!&9K0 zc?B8akXr;9nrB_0WJ7||oe<2D(HPM+5%_q|XqzeiQ^D1_|ZvszJa+Wxgk-m?v9>+p>%`YRz#?MC0VCE)RmUIhPj zyM5WyFl2DSe`QoW)&rFzylar3ptaLD{Q^ zQh)pN;BU5IwpHVl_#xDDxpMme>KT%;PwCQ8%zJwce}TN3t+eME#a~H*!p|nbKTUj? z4E0B(T-k8<^H|LE!0~gc>Qyx3Enhv4aF@b=$)GeDkjlqQ^$%gcR%1Q4VLjX82=V7= zSVi75TB%gI4&t}Bf#{@kQBL&<_K&H~3w6s$mM5|6T2;%$a*FLpr0afKpK7r}oY0bE z#6jWHMF5Xk0=>gfd(Y^kSU`7PGRWp|w)4gtcCHzFHpM<0*eTs9cgkm6@(Ef7P&bXY z)IA3{#%45jya`djZlVD(Nq5Of@xX0)_4~wFyL#B?NVNjA$uJV+_nh(pWx-DXB8@cF zJxb^hVF|lNfP`wpB(JKGu7%FHh7KQ0+cbiYB@7IL8eH;_XO?3t$HatSf?a*9UIi#r zoisjF>sM!05O|U?;~1c24OpSTU_mx4Vf-(egBjIU*0$Un&;qeQUQj+0kmK@!Na^1`*JfHGmd2J6Iy`WIDxegl;DbfB(P*ZhFfN{ zY!LoT`Fx*GLzy?T`MmnRjk#2PFQ!J;=gmI#C-94J$c7)m6H#ddYToRxwm~NOsF(uO z`~>{UM^{iNj7YL07?A`Kf`72)IlX$Z|MHmJWzs0F`YS z1ZDs2a$45I%2uNTCHq{6nBMGV2#Poi$}Ylo=P*D_i0wYRKVPd_rfj2)hv?qiWBfXR zp;PABXWNlu({tr^`F)4nLHk#E3`SIgH+vG$LSw4?4U)|;=1){NCXV$ZKiLGW9h0ho z@eY6vr?kW(A9X_TB@Aw*!8QR$j8NYtW-g|aBby(+0VBd-6|z3^3=_OrMC>ofS;~eO zM7LXn$^K1cB~+lc2HpY|eWnblk>m$QX@H>(QNL$V4`b9?h-%2K|5S`;fIkm?3_)o~ zi(%a+2Cv=kN;N7QAi*vlR8GQ(KY&EZ$FWsF$L<3A{LrKy8o%DPg(O((JZoqO1A&3O znvNIQa6Ev#l`PI;I75zsr!C=T>C!(`HE_Ws!4 z(}}1TGKsbDwq}Sjc|QUwZNoL9V!9e+bvN|*x=_^FF?8lj@@DUQmNLTkpKblM z=l+QDso6~%1Y8VTms1LS3=MDp5hg$7_0bouZ!+zleJZfKKl|^H{oB6^r(lqsp19(A z!SA~ALul-mz`hQ@po4nnW4)seEBRMlxVr9P6qRcr8^r_gQA$Q#IPU?>TihJXm#1r- zE<1J^m2nrz|C?bH_2e*yZm<5iDGFPmX4}6ayqB-lJ#`&(X{i$VJxVlv^Lc@;ryL_ z;2G@v`WLDCJ0B)K|Bx3RLH`<@q?$9-+Dv2#v6r`n@tWG-b21=;Q(ejG`}va^eOA}bWXnbC;I+9tEFO2M6#X&XuQyAwsJEtS=LU@Mo zfiS=|5p>FSTqkGjQ!Rz8O&DM+{)n0xUHiEFj?{GfHe9As9Tnz}bS+hbuPhF0M`_X2 zr1VS!IXIf~vI#40F%14=a4mLze&Dl?N3aQ}+_;#H*K52xk%mU^nbDq3zx_*Zck;@Z zt)R(~=GrC)5Q)xP+fg!kPxmt2w6j?#-f5Q|jrwNRzH&-W5t0%Gzu~(LykYE?^C1v? zNj?}bp-8^o!8tss3KEonhN%X91=J0|lMta9p1;08?l7ZF2nr>rt3ytbHXyL7*jKe^ z7qJTC1i5AtP&9A{jPW5$kzRi*;+@|v-Y#H|Q@zZlT+t2sGwAk)yKlh$T94U^g^7Z0 zujyrbUs%Q(Q@|4gl@1mB*KWi4K|Vu8_jk{VS5Wb{GcBvl^D&IPry&(JI?Dvsm>3Hq z5GEU)An#_pLI)zCMF$JVM}(=8DpE;*d4yroU$tcN8WK4s9ezRuR(=^G^KoHVNoNVJ z)O6$`WCdmRDT-3M;qIx3(o2Pu&B90l`-PCYGIIX(UwgC^G3XBBHxAgv>1|BQYNhx! zm(uA&wcJw3I}Yk^Ma(jB#K#AEmv9=mq?Nve1g^lzlFe%_V;UDbp#J51_1}6r;pb(tik&Z8DPWq1!G>wkv`Rg8Z>zmtgz|9*=!%Z>dY&OSm4jr!G|lPY!xcp$Cxp@KKf-ck(*DhJz5) zY@_=WvtT{jkmq|YwUMItrDTGI)dM^`#yREtsjK*4_G@5;zUs#&D*;(EALVxe?aA9v zM{ADK&sc$1`$aCU4n*qfs%7f}(AJaAJP7~0Rm*(~g5yz|>K8P0Ijuae5AAeZ2Yvq_ zUqyG#r-KNWNf}3!lyf*Uy+1#46&;9G{!NG7CBLa!CS%odb5C8nCp>8so~0a~GxIM1 z&yXm1ZVbY+I0~NiKOsC*dcre21kcN~>SL18Kh>m11C3SIP_%$WZ6oU3>V#F6NhwqHLBDfig-}pkTDv{#V8qCvcpFf zTE#1kv{_Xgg{z5us7PmJ_lEM~K&a?X53!|2FYk>+ilgg|)DQ=j_hs7m+Zx7+nLXC5 zVg92{XZP*Fe?9nf$a(3(`BhFT@-s=3id|(=EgiQYdrbAy#S?A}R^7s2DWF?C*YWH| zO#1AW3vJGD`KGM=4%Q5d=)RXUNlxm%KP1u~y#+s@ja zs5te%#d%k?SSVc5a0dL#T{vXgRLcs8miH?2xzS< zS*h^?aDY@6*!wD?7OGDISe-gKytvDsy=LxkZ8@L(DNRXcMtKIQaXUu<-?=#c5`dvfC9bA82$5SIyo z@$v9)LcG)43^SSGk;n=>d*!4%AR?f{ieQdk1gq(p8dMR))KaPpI3VOKj&XYT==1zn z#n@lRlQnx*i^JsVeud7*S#O&B4^xb?uYZqJBs(@uunF;{0xrLW(|O~=AMArA!q*qVgj)M(KF9r zuYHhPuimxRDd#~p7^lVX|D3qMZrT#FNh8kLWR&7T51C6v!epVi8Fx&YP8+=K!vz1h z8lmYQdOnheC9{;L9*=Y4mn$r5LEu7Krn9>m)sR z0s5&N`bYY6=yL#iGe93m$Gk#|4z&%tcQ+_)xRzFRru!#TY3FdNDl$a^_z1aBz41nR|s}sd|#74FFC^2FM?5EFsc%^E z47VXrm0TXO^%1t+C78DKsXHPrW&4?Q+e!ZM+rQSF$;Ot7Pm4C8xWk{XK|$x@P55HO zY8OhbcYsrru7N#whp3=Z?n=S|wZ|yvj^77ONWX_xCq2TnfJvueK>J2!K+xs8yv1II)vPMXKGa7U2KecTcVcO7$;=v)e2khQlqCrU?Z`z_m z&R#!RHEqejWSAjWu-&r$7s!M;Y;|4YVBFUIy{+b)Rg8iEanhqS&Z|EnPBFcF5Q&~p?9eQ*05n><#mJSny z(g+edW~g|0B^oh+k)ec4T&=R!NwD>fPe80NlzC4g%A9G+FTUWS`^$8TaUL6(A8%v> z^DTPPkv)Gycn=rm_C1DakJ3%Dv(u41h8*08Lniwl4Rk>w8i4u|C{o7-zav$kn|hT0 z@n9Zz_LSwg#b1oFVXMkM!-TBP>QbTDfm3|9TD?HEgq;Tb%6j>pR1h zQZ0j5IS{IJP&|FvV;T_eW*D&!-;e0GDj786?xlQ*#88OTyCS5HU1d6i^pF{E!`c10byT`C(!ZeTm^^`BCzEe=&-svC(+ z3`-QY!7d+12~i;Sg&x)}_5vBZLcWtI8Aly=$t{>C6aS?PhktbVr)uc&1ccM5d{Hew zO+#pfVnr-N>2$*nK>;Y<94?Cmp)xEBZKqK(s+OI!sFZ*j?UwIV)hT~3p#J#OG$i95 zy6u7o@da$R85J2IpbcpXxza{6iL& zP@XP8C+m21KdP@@;wUrve9%ASYF0k&-&>yUw^61Ne6wK{&IGWXa#)^xyU{6MnJ1TJ zI!Aq#p?2Bzi}U2upzFKj6Q=rq1Q*LL=%q~i__f4zPZp|<{soChPdTSs$Wlz}a0^<_ zIg5TT8nJlq)Po#E%L7?Qx8>>fq*Dikdl$EPP3zY~+FjN5+KH*f)yvzZG{Kww+OKhs ze<-X6U&8a|_IN6_dr@&o_vC&D(ehNiovq*Gcb&RJ$`@B+;$)jVOP^Pqn2Ov4n#$xf zr|#W6c^95^Gyd|ppvo^HGZTDfdlulbZX&8*%Ykz$obv5f=cq13am9Q<&rIdDJoS4a ztHW~x!?XH2A?q7Y5{QhIMthedRd1E9krU9^0OG~V1N{YgcIqm?^4~O0U#tTNlBe5a zS6d^R^gK_2clkkLfMS*Il$U{9ErgV+h{Hno?7ZlHAm2;*Rz9sP`7*cwG}RrY8w=}E zin@voWp`od@c0R0AEykHAWt;ogl8 zJKKBOAPu5NllHSN-y~zt?Ow>Bk@}!W>$wk%<~oA>mraz7{*I13MejXrG}XsqE5(*q z*5a(9j&^Z!g`*q?^#q<~*N6*DWXAhlLC^@L#tV3amLQ zYvotaDlq#2LK>yS7B2?8C?2b_8zQ1=F=|2fLlIGR7&RsPk%*{zjJiJC9}%?R&ms9ilQ_!#bJFyx~kUVut~DpIva_6w2oylwT`({%J0 zIzEAED5J5NGFPNM7H1PfGax0rKK7~Oqkn}eZ}6Z%$Z8SI!l_nJQ2$iV_I#ij9II3- zRV}wVnP5v-EdqEQa+@>bm}a8VY}uO`j4(NofdEuj#xbS>@YxokFKZLFW^gE6 zS;t)_N1MwxWF)lhJoOg%E(<|%!Sw^TC$PHwBOkgG*AOoMXaPDZLxsh}>uXUIeBiy) zsFAEf@gBMog2ft4=`~Kj>maaTf^r9wD!;%1iQ_Ws!!?JvJc(9TP*0(wQ7CQ^bdAmy z{VqcF;lMi=-Jglrp8$FN^&M zLghmCRzSL=%=plo-Ha?P^;gDytg8NLLzI)pRh6B9YCphDtRDp(tP@8X?m#Iq@hlPb z@}9uzZD^vb*uJFOT)69DQkp+e)u7Q}pkP}(W`NN|8fYZu7Jtu8+3(6E)V0CdPp%`>CYP)mU)dO_5uu z5%X>FQJ8D5T8)%DCU_?vCsML<@EkMf@(anz;r9`}#N*-l7HB*1rf7jJ_>zv^AASB= zdvYUMZ}{&|ZKX=t_?!6Q9ok4fj&xah8Iqzdbra;nfeNd(Mx4cLsDY-lGirYwEl#I9 z)$(O7D#*B{cq})x2emhsQZkY5)C*dLT!zj48Dh878gWZu17@$?g%I7^eBc7>_1cr0 zsjW!|5X^KxZn_+WH5$x2$~B+TSx2BWk}nCNarKcHuR+<((N4SOx4ou2qOIt2PdIwdCL)vbixR1 z?j=Vg(oZ^Y0{%@ZVg_@t8Rp;=n1ih_2lv#+*?u}*(fg;QC?9@-eNOFeJSPzSV<0%w zHqhO?0l2`A^XH^+?pCqQ;8t&a6GvbEpM^khN1VO==Z%tvKf= zVV*yJL3>W>@AN}2IkVpOT#nHVELt2OG^sklKdu`!T)7?hSPTw;A)wDV78r&@Zv~Bv z5&YAP%2JkL5N6P398)H=h0cSbjTj@3+0Gx2infiOfrgJV7`^bkp#?^raH?%kMASQp92Mj6_$~ z5l_5qH|ln}fZ}Gmw+UcwO3TgK?>PZev>-P{pBLa!fj{YmMv!&O*_wNpYgCg{ZdD#o zS&x`xTWXUs>{P3_T|+HILnMf_2hG@5n+Y7fgM^Aqf%^{xEyOs^ge z5M`#7T)={aZtnS$cH^N(%}2DSlh;RN1o;++VP8@_a}t=7xSk0;VBgdC!g5sK&g|JD z^w-E$hYV05{l)+ z>c=qm`0m9HPg)1g>o{w{xT*>`Az>9uxIHQXjo6m3gui-|5Y&Ue5t zL(JsYP9{hhlYOMxZ0uv|7N#g-%`7o~WKo6A-V?3x; zgBJb^q4*7|G~7cM6F$dYBWU3u*VE-B17r%krQGl#tkZq^RNM>#03Sr76R0#vehizE z_uJKX0tV_J*zpLq0l=2}xO}e#!HaeHD0rY2j8J}i4CnfOv^`j^hW9U(VnCj2#H$4P z#nc*hf@cdani^_sgq(!u1$g$u^DB6!;f)`I#-N1<{|q_3?NC({sHnHinMX~gnSfGjGqDDnX+sO$A<10WQ$SJqFlX*OJ}u_k>3*YfHcZ|9b~+ zz4bdN%U%Q8FyNEsDjs{k}`yCngE?M?_;D8Us7lYm^ZnX)kWi|LOPK zE|Qvh){G}&yg=vmFEr9OP>~d4U!NS<-e5OvA(-IwHfrh`*V|PwhUWPk^Q1!_W&694 z?X-{)ge}KuYswmIXGE}_=(`6&`126{_g(T~DH*RKBxBWSJS)!UgAuX*kV}eptdHXf zAv`C%dnr86eiwgHyM6AgZNES#c_hS1Mmx6b&>Q$Oc!IEnCk=r zrE{#0N3)=M6g=31l$5jHHBj~2^J$~@JzAn{c;`}7n)_WOl@9!~{+)n$PkQH4cN2*TnUF>lek=v?=^FS+#+u#k>(5XgH{bw6Z?6` z;}Uq_hbT`T#2S>p4!~+2Oo`6X{Xsa#M3w_9RoWxs+5C2II&aK@jok=|nHW*qaSqYh z%<>Bu8$Vc5)4~=gj0Hi)1$(Edx3px3>DT%X#l=!ywHh;!W23^ zZNj?>AWjlwAu_Jf>Vzef1VVKklA_aFg9Orq^%vap#q}lH#r1$v?E$6Q14^|Alxh@| z>y`qM8-j3}ad1K=tr~(e-xv`WhO;&TPBm)nMD@2>p>yQ-#au+x=8}_P1twlC_>w^@ zSMH++lSjm1P6!O4wht44A%IKBQrv=U6%ER}XW5$de4}%I-{#SGsX8XFM{Cvkqn0|BiltxDbA z2nZtai?;V0eX_?QxyOA!!GH5$LH_1-4ISV6=_#9%mWuq{?jPNA7 ze7BihqxOI|h4VkiJAH~kO?to9tj|%Uub}G+=#{Yt(2R3CPQ9*ChZghvlOR8?ZnY>C z*W1rQc}LtikdxA9J#H9=_j6NF9$9f~sfKPX8Lq%P-Wj_DUtMYy+ia4Zdb~ZJuUaxP z=sl4-?0!)Hd@rUxPIu2{r&h_$uj3KudM&+Ky;GCb;`t7mPo2BRky-_$5L4&YHHu?Q zTjpvUrY((*teq?S*+;d)LJZdG`YOCu^@3H2HxqK-O#uHMG~hTQ=O+4a=NfdNDpY#q zTs!+j~5O+(j?bgM26PmHS^&=dPmb62(x zyI=`z&l2{nc#z8r)#J4unc{$^|Ef0M(hJ?gbd#1x zYcbl&qht3_moV)GjhULY*w@laff+zwsFM;k*uVZ+Px>cWz1;&=_JXSYIl*cMde6`( zt3Ri`N6nOZnlxF*Jv#;Ye%7NI2b2-X4D=d9(4uHr+W8x9VKZT6_**z6x`Sh`H!uk< ztofLWgW-mjynza4DVy^J-8*}dS@$C6Wes@~1f%Fc!+^n^UY#oSgF!D->wj+MYfxNt z;2Qq03`^Zh3(JOIE@OL)=}h!OWvq?ADG?*~$ga>nR|JFiu-{#)TzI@*;JiRSZ5!LA zTy+FB+>K0VY+~@Vxw1ySxEN6BCpS=9SYj$*PBSVCf5gn~Le}WdmLhRvnUMmN680~nDHenl3sSh;8~{kEi?L-B?`V} zW?|G@zGW%QK1V-+nYw(1#x2s+5W{Ldmeo-{S zYRfE7y&TJ4E?#TeG6{55aWn`$t!c|%M^@9y6#J;XPQN2%2*6ci)C*K%f7DLJ~74TUbg8G;3` z!PD%~dXZ4&V!v{vTU9$!HS_?sV`xvJ>f;HS?oTWM39$_%gtO^{-sK&(PusnRRAv4_x@D@QqMf4rI}~X0n@ZOi zw3%wp%F&zZeuZ~|t#~=be3%M+x;?X=Dlm^R0l3$`m1ekGPcJYH6Xf^o@&TpsN35k2 zZ;_6n*&o=-6<28>54ijze_aIKnRNh^w8B>0=z^$w@nW6tHI_S8({&F@E3K)%57g3S z*i=gIqQ+=~7e|y&U}}*k>wShdZU^uu8-0e|)RX8lG@|D%-1z|Y>g!Fg{yJk=J5VTo z14ugEyx8um6E;KbY~$1#IKlGPo{uYACf>$qsnDA$nKofm2?8c1YR-{aItjmsP++GKhDecMXWx087|&c6N4oX!G5o z?gm`eewNT%_94%kr(&^Z{l^a>vkDuw9>-!|-xxwCHA2SZnLN6Jnq3dS?mQI`nE9s+ zxXpc5^d&Rl&?uohTf@@AQmK51R8-}-|A1f?YW}$LzDY>?qZ~8Oa?Lc+ibgTuay?Lb zDA(R)c)V`4{Rrp6k3AsTGGg!iYeXqOJ`oCD0Fr*l!a3SS zKYj^?rd=1x7|dvYZ0h?_Zqgg|S2t!VWS(l)_K+grMg+7Tk5*1T zX^wp%l$KyKfh&25-0SiFZBzKcA8#1joKs_|s~00!WQiqkYUzB06;BoKS-eUeLr%6A z)b+Zv^t6{2+-L2AgYr>l{B39L?!Tfjbo%A}BqQTMi4sO)IWXw&oR0*^7Zt zP2S3wFyK-%EwU$ZR=;f}Eram1%m`@tg4u8mRZOL)Ia{0FbP~#v!c!zY?X|*@@MCzA z8S5Z-Dtd79>3orJLA|^PiIO(OA1+=i#*R>(TEe-%Q*I}1TRx&5dxxs z%ps%6H9Bw#m5C48jdfrLv{)Dsdg=^)_y*{(CjlyVMmAIfZn7WUzSAxS);Hl1Bw+=d zR7lHF5Ufmy2jX3_17DE#c8kwDgS$mJC7!ZqK*@f`2fclruK??&BFel8Ksmv45GhkgjhHUPTySZ8)3k;~(W5vh7cJ3VaR7V(jDdy!3Zx?69vF5@7fv>4?0i!Xqykc2w|_7ETP^9PW$(uZwZW>Bvv!Z3|nW}A7Xf1D$*;BbX=M_jT@{j zS!;`Jh82>cjEQ`YRTsBxOCeHTidFH|eUjZqAeZaCkmJb282>|!>^hA#ex8q)qKo}Z zRHrF+SijrL7tU0>5~Yp>SFNP_7R!t*sjMsVdV1{aHX4ap%yzpZX}O+bt<&spJAqa6 z>5kaVBur$f9_5nYZ>AIoX>oYpMOBhKFTYT#B{ytvz{x+25ri39)V?gal~O$uB9y#u zHRnn+Dbvy#<)?`p;#W7t`B_90l}4r8n(l@A35Wh$G4vQ2H5aR5mlMHHbn3$N5+!NV z9A#7FpC+`O=_N7|#yM)R`Gj6n+7CHDV6VK&gxG}iC{zRu{FW0c$GTBo+S7<)KtENE z#YupSIFH5s2&hYVcyFx4$t*OnGvc*Bc&7fH`6J$Ad0IEY3&mJ9u#_Gy8S*2e9@?kv zjr~-`k{!q5%9$x*9*%GZ4E6_Y?fb``TPUgy({podT~u79iVsk!(uI<8J_5JW0R9om zv8DD-Cz|B=L@+LA*p-Y&xD|U_8uvZ-#iFM|k!Ad5$>SL)_Cg}@{r7Ftg(}chnq`K2rN#;r`X<}WGdP_^Z3Tus;tSoP&> zb*jnv-`%%9RVgNyKaRl39LHTM1?UMD@!J(F%*o$qY_E$L4-()uNPMisHf}T`W3TI7 zsZ0<|`aClfdA+kxweuBGBzg5_qq!G@1e;P=Xyz*#V+@?+Z zMcjQ8V24kA_eSBNCLC=~LgXiv#=)|W2lr6?bo1rp?Ad*-rDcK+fKAhLMdCFXMtD`8 z5)?$u!obK=Jw+nsYQN+-`D2RM=kDEUc6Ni&+nj>ZTDOI&S}TQ8{5*Xza7kE*Q+bC_ zaD81R7L2aUBV8x;8qH8^NQYzlv)&=>AgYh6Rwr!D?7S7&_f`5KPYIHpytws-0NfMH$uD`6E=1Z*Ez-6kavq!R4LN+xRBpdevPgV z)r}ZQU9wg#RjU*e3~w(*ZcM*gV)Xh1;!OE<`T|j4B7TX_Xpu)^O^e*e(~3l|mKJ08 z;BY^#5^-u>gb5@}Lw;RXO+X(Gh2>)muiQ-uN~!Bz$W5eKprBq`T6R|v#1 zEvWa3h(C@M!GW*&TN&!-2V!0rF<%cw6(XBLB|b{raOe_EbJL7! zL2OrKJL>9k^dAb&p5Ygk$WmXRRzOkU*vc&bjHdGJyMwvlSCN6akaNUY37##?CLa2% z#rJs)owM4=#ZF^#dQz88*!o0Ow~cHfI(y>Ltd=G|ya)x70~T6u4doJ6b4ZVp-PaKE zjrij3zz^*nElopzfua(tU_;3y>QiM1wRS){Cg4p5f>;|)r0La5T?x2k)?{>e#bJ~| z@s+aqHGDMH+~wX0M(V=`KHHg&+-Hr4=TN>XaudFhxoBTvqCnAtWPK6$dvi-rFxAmV zKMOLuhk4soa{S$@-d*mK3$3$O1eL2Yr{lXdj%h^N7N2mH)p83~sGgDWm7)N(?_7BS z%X_|sw_begw-vtDI|P0u16!lzuzlRSzMIRP<_*PdD~vRRfm3g9vC1281&)DZ-loE~tq(78B)#*|n zd|?D3@3vf1LxvN5C|3w%NcQwmTB06F!rPa<4OZX~{Jo z^G(OIui@pL&kcAhp`-xj0Nq4vh1+ZpOiQ^!zxWyLgF`7RV?sB0NV}M!DWR}9T_E)a zTS12i_4{vZ3lBxy$z#$18k|4W%>1WNN2MLFaX35>xZXu#vYpj=l1}f;qGgL8%%n0z zafTxG1WUHv#I#<4q^^SQ7C;3ni-wa16cI<_S6e@k#;jPM@3SqrWDQrWs5Ra)60#u} zd)W>yq@4d)%ohm-19^m5*N15OFu$;+ZJnYo>Ia5E3`r^i0-#RTHvV$88INIg8fE`& zY#EH-yiDu`iE@ZGjQ}ZxSS2T%;w0NV+lcO#H}517udvrP{A4(zU4B1Te{DAqiiF)*6q^ESU*Y|Q|sB6gkk<=F? z4@aD)x+NlKf`-Yp;I^h`h$Q9b7-BHi#je1$4$%q8)J7jmMH0Z7%mwi_=(&o3DL&8c zLfZ$9W$gU=vpwaY--Crq>V5OJ_?DQ=i#q$&y()fcs;hbju70Nu0!u(bg&m@JO+hd1~!PnPFEwFqVlfn%ssq>6D4)iB4lHlgxzVjfi;19O0^FdYrJ&QkE%YYKAPo z6VuFG#QrQlcP9ElVlA$A8$_98tl_Fz^m1CjACYCgP)f<`B^&JksfQ0M zhvJ}dSq3q8phvoZ2mU%I0utUQYeC_>3;dwHe!MrW&~CYYy1`pwlHA@7N^xGo1dSS~ zVbA33*`#6;lMLZV6*Jz{%~@{K%!IQA!#W|!q!{Pz5qKLoTpjyDsIJFMrWKWX$b_=q z_f|-!5@GNfaF?C&*2)^gLdv$)(j3cj+Uu$p@bFAwi{H=lmX{tkioOTdmoQ>5y&>~u ztAb&MR~b$>(q|B$0)iQ)f@~~RCF_T7yIer?NAh(QYYkEuW9Mj#u?TzcE?Ij(>(9gG zgt23q^T0arXr|uZHUXNBOsR*bd$W2Z*EjS5ofm#^Ym?;y(n-(Kg$G+XlNi^uc_16J z?^=OdY(oT7_dUXXvX2(|ywXm|ksI!|T0TPF0D7Y0rV_n@1EvFS>)e`hoJgb3r(_uV zgY}k1aU0|Z^sX_i|K!COe?=%%XUtS)sP+DPi?d8gwr44|d)|>LWPU!Z4LCP&(w*y~ z+I|NEL~U)+02~#~dgcar&sF`;A1H6WxnC=0;rT4R&iI#YA7e>g7oZQ-kU}JSZD~6` zjf>B-QnU^y7I+qe@x_fL`2uTXAQ0C93J^{9Y1g@Qq(EU zoAa$>OwllY;6ocoSP9-0Ij@Y;nMr6xUqzr#Tnyn3=!x5G3RskBr?3VY*=3kX=>;H| zbb+Kv8@Psy`#EWwk1%G-8~UU!V2gmnL?$t`SV#SdlU;1JDr^ZM^mI>ndtLgvATLCZ z))A!81YhaQXx}yc8nQvnGcn`%vO)Z7Luqn=?LWJ2A2vA zFF)7LXayH=X`9OH*DY;CC%9sXc;_w%iH8OzMf)<@g7eX=q=FBuXz7fhptJ3q;=RGd z#Y>kGJ%AjY(WK9Or|b1rvW$B|j$OL10QDQB%@r<)MhcUGUpIe5Cn(xO2#M z^WxBIJ5N#h3X^6Ih&)Fu()Qxh#N{{l;|(Rx2a@>k7s|lcH1k`_f}6~PBkVOW>7&wI zpI~ipzh>mNYnPQu0mi9dc;+h+s^MhC-Qh^tS4>Skf1c}9=u^CpTTdvAZudV%>c;A~ zz>h?lQdKVMq-e!D915ga4` zsjdU6k=+%#C^m5nxp&sWh8_t}Q+zYZe%)-%Uff2yFUujot=Q`qwkboM!-#vzccDqa z46-57C?l7p>Hi+ubQAUo@wt(4#MsOdV3zt=WAhjRkvQ$Pk!=B4HlFm+wlGDh<(uG5 zr`Y=FL@IKL&XiX1ay#yg|EHryu1*S;90P>IFC~__0>{EP zfOC;Tfn&VM-*GM_ytN@|FF)_m4v4l1(HP77FooG;G{$Zf#1tgPUFQw1;nZNTwzPo|1DR zg%+9ES#cdp&R3-u8^?+6uvfq-`2rL#WdqWS8F}-ERZ!aD;Ji0~-YX6;YA`Z9ql>p* zmq_EEd}9o)E;ux|w`Usi+PJ=Ac4ME37cVs26RQdXW)F(qQ3P~hanU6AmnO30N0SlqhDO*}6I>@@Q;JsSxw$NKtq^6O9MR0N zvJkPa=vj!#Riew^5TW8`!%j=~03UD*xx&9PPN#Bf^#E9H-xf7>e+aK#(iUCh_s3r1 zvZOClbM||;qY3D{KEpOfX7f%*HwU0u3wLLlowQ_1P)15`4W;#5Rb}rFVf$`=cM!%) z3vDEyX(BG5pk#)%hL$`81FH>m2DxEbKD$BYbi^jRW=llf`vM z-G!Q(t>V||7Dk%I*{uF>r&!Dn3=8Sqb9Kg ziCqPP5W(yhTB$*Dv!4rNqdVtFKHL$tXzORZz6#Or8DOGy-Lg_ID$kzTCc|Hh&ZX-| z7q~D=@Qn%el+&^^wsGe(%rmZ2Rw$4NBWCEBjw^roMV&Sh0oS^`p;??7Y!votewn2Z z)55Wh?r0wy-~A}`)=#uEJ4P_HrtjcR>Mq+#=X57{COEMs1AUBl6xz@2FSOl5U#Y>i-bt$)qE^@+y4y=CPXX-s~nDyF^J zp*gbs&hgIitgB}&d)0R5ta&x&V@fMoi#LILiGw?f`*F(?w|nl+8RA)!1pR*=-o6l(CRwcT4r|K;%I1?%vqtHQD)9D9w!omP+)EaxP_Lb{9j}F4VPewWrMaqTN>Ky&5W3fD zc6rJ9__el|F55CxRUk)2ihm$2xTPzroL5#;`wXSLDn42n1ro65k3R>pzQ;b9Ml1-e zFQNQwQ7!-<3hsI^?$nkC0&uJ?apGZruWrjwR^`T|uiCZzfz)$^Hbs*zuMER3rM#j* zj-HN|sXa)YNbzG!b43BTh?AJ~F{vfYtI7EfnPD`TW9)L32GQ?($@8R}ZDRGyg$8ma>hk{yGj>-S0D&LU+jrN1<`m6a~a53A|Hj7y*Z=hGIh^E9B{xsE&1 z>v1~!l>u#a1&adv2eV{#*)BFv)?(PbF`pdn2hEJyRNps13&%hf&grW5TYd9c&_JLl50t7<+ljplHrTnYXi<#_X`*`ltk67yJEru?72z{V~ zw4{;wkJUItG47_x?SZ%M6v48qbR&^#BD8hSSqf)rYRrEA&hfkn#++&AkAx2?U~nlB zp9#65Wj4c26HJ-k`j2|ywiGeKGV^lD2Vez40tS8CcdG^@5`xUGz9J1p*iBxsZdw{h z?1JD!Du<}XywlaXCZdJ~xR>bhq%&D$Jpl*wkL8qUZ_iO!E0_Pv#zDHz(ie5OI(419sVJuRRH76sWeQHK56YM zk2zs@&`5IqyJZ+`p_1j(^ezh;)gAb|*z8`IqpAX{X}#zfhVD9&ENjTvmh4ysu)Ri1mm>j#@v4 zMysXTudoWMa0T{%Tw4#AMYrF!9kUH84rQeI_Qr5N72!*s$tOV^eEL$=VLAKLd@PH@ zdt%iSf@{O+_!**^fu8<@s?N4vM^S^#QQg%ZMzUJ7cbT|l|A<=od5iP-a{C$O+pN&{ zpIw&u=MR<#A0dpocAL*YHxBU|QS`?rpCwZCu`cId$Lwn=Zn`s{-;jJJ&Sz)dtPrPy zyM^x1`av=-G@jVH4H54(y5HbGKHrAen^3c!niq~;#v;Dy`1E?*MNhwD(6_Q$@I3x; z-s;|YF8JX028P7Re|$2SLM}w-4D0z=N}Jw=Y1c?;>h3B%JWn5=D;X{j zw+Gb;|Gh|+qA=GonXcg5tRY1BSFJNzmNbLZ6sh^VEct{HOJ5b?1IY!wJsL3zhUy^| zosrRvUA^6emMZC=m}*nR4NiM^ zUq#-HF4Ydu7xG+{Ue3Pg412bswakUxu)2CjXgL{FE#=3T-Jlp&{Vr#W>h$5lH{2#m zWZAsO9pd3dZblHT3n{6@Ea6t`!Z(d}WEUgiJ$gDmv>2gLeIPYP979_-vrXmKUi*q94yiQ#nyt%^dTI?dB63oAq8=*l_ySD1{!vSLlDR_nc^-0J$^ z1(7b{h2k{sln8~KRI8B?p(#Ir?F81gLv4@jWLx7`)Y*HcAk3Qz^PofKG9_HcbifQ_dbzb@L*6w5O)qx!5;_n}xtrRbPnz}hYT6$`Qc)yk% z4~*v?6>9_N&E(XE3!=bKE~opBrJq}{QH3~*VI|`c(!4PF+x@`g<`oqmBWnAUP|*mG z9>#ih)jB;r!V}<^FeK><63CNRcOT_% z$;FRo7=*bQ9v3-F-XCA&7(s4nrekcQCP$J|3tB%JY!0=ZP(d|p1kVy36&1OF&C$mV zV+htF6AjR(4^f<5k@5l>0m;Yn+D=@wa#YwPsJ?r!u)a4zS3-*rAm%qJ{LxA`#CxU61~v-&ImSasw` zdSxGzwssy9wldaD49sP`yVXSlJt%^}aRQZhX+>jWzcq=#f)GGY1&l#PK)rO~%m@aQ zY%WMylSzkq^tL#3mAdT8hI3tqFZ5YRkIC5B;~5YYgVHI!1V`Au;E=U)CK)bXY0(NanO-2HwrCL=TBsMUzYt z=rf|nXMi3Jaw==er5`JS>+x)0NZUK4F2FByPr`xF{F3 z<>x$lX;55p0D^F_$F~c@Ns?VUj|#;789%6QF029kSo^{hZ1?oSZFVyU`Sd`SE=i9nm*i0Mgg(FpxC=)}6|?fBjO-d@A_2~FdyR9xuHa)ULPk285J#^C zFuOsCu7C~RS{JeUP?Bu))T69Ocs@2IY!l!J5%&vkf5$1i4zbBHUa_DXb(}EoNPzAD zse?b{H!x$jcNwGDeeeV*itO?mkzcYo5(ZpCTnl#%Ze-l_IM1Kor|tg~Gw~^<-z<-q zPt+Vo3vOjXK{%k1XJRW3E_06^Py|ZSj+sIX`_j|8$JFPJyJGy|Cgua87)V$?+T1147L8A0$aL-p7~dNc=5xD^#g$$F8BVlP|bocL7Ji@yzd1(qiE$^#Ytug<{{yUNrBB( zKH<~94a%*A~7@f0u3o8>9=m_2FD2$Ap8SUJ5T+7;};7^-Y=L1dy6^s#Hb*KQi} zdo%Ug!PBZVat6JV+f7HMz6f*y=^vFCtKw?6S63rO+eLfiaf7-f6U9_<6R4tHlw8Rg zVrb4$BNSXmy+8E2@ISbJ75%E9Y=OWf!WIn9>{_*=qN}cz&_1H@ng^4sV#v6eX%O96 z)F;M3X~hBwNF7;&r%3B`kC!Tp6guAb9C>#qZ|Z-xwzZ0#quzl36Dgk=5RXEoSL0D! z%i|voU2{`N64w6VTD6K&pF}+=NRwfHoUyu?+9%?2jtlZA(%kfV2no|QWBC1+6FpkN zs5JfHkktOMS#tZD+AfwRSZ@T;PO>2Z^xet4nq6JYjY)#!r!r&*K6-M38dx;%9EikO zaFG#&+?)y2X{Ul^1=yO{pJ+qrQJ>Nz{VFpx)VC*S7_P>u#(I;Eq{{>Nn{`?U)AEJp zCeTL@R4eGqsfTsNbLJ-ES99hc?2ss2#973~Pf4h1LYGYYN4cxG#2wJir435J&=2P4 z3%nmrSP$E+WDqVA_$x z)5OL7JXrm(D2659rG8xD)`BH1o6;ii&{LsAV<~R6i>?-t2*nK5qaIbh>X)kUW-LUCVX)g-|YCJHT8~b zDV`aLZ7Gzw24s73MP8PSydt)C0zvd_jcmftlY2Jy_bgMBoRxG`NVLU{@x6pNq1SQ9 zKU>F;gKEJ@&JDB5>zvY6UD3G4{U^TZPM9~ciTF8?>{gT}zT-=mUDWJjY)(3sQt&iw zPrij7M>0u`Jtb5S`m^8oWdx5c@?irDW zs^?#s6gK4gY#}}^imNlE&|1y|mCU4r${@;=T`&1*lTlll_VsB*SU=)(DsA9QD&m;z z?w2!?ySKONVIpMP+>$-J5id;pUBf*+%-Ef+N`CwXtlFy@p0qbYOa;XKgROP`)t+8o zkCyl_6%X#1Feq@4ovi@=mgbxgD2U^otxX9V8YHAW!+@}`O9yWPIBic4G$f>ht#Lif z{S~3UvEK5)iwg&bjc#WGxDAww+_1?j2Rz^y^?pHomhS#5cL!YIUR-RumjWJs$-S9E z2SS){08>eiyC7~W-r$-?P6~3u_qbH?g}!5spWG)U>(A3pyavyW{5A($_=FP&re;_7 zTC3TE$KC18x)VF8&L4IjL7s+s4P&+*thvV;S9J$xJNY|f7lO1~TYPW$2ww%l4sh1j zi9DZ2@w>$MUj%Y;P)99Lig4aAE)AN4zi~{P8N4kJJqt7khhHE=ACrBHyMf-Z|Bg%c z4e1(MXpwAnitJe_?xI)VTii7?*+-IG8}hB%=-g7VbGTq{zt_T_Z{DMC<_mA<3~xd= z#*Dj8z?bagZzLxbc?6c9s(*xyYctvozHVe!k)#%f+}yyWHjmKz{R~=-eIwbo^GR-v z=~y#ZGFWW7$z8d5J0|)mE`Hb} zL~|O~t=L^GiH&=mrxrzItRd8TZYulXwTm>egW|JZ?it)_-O?V}PU>FXqCK&ue1_@N z?BD*d+UUrY-Tq}&e#=h(ETc7ZNSDn0W|d-9;ECC}z4b}Jov+n9u^rZZrFCUwO?P$e z(!u40>FUCH)i<;~&bc|Dony6f+m6Ovwne)~7tehbxUeZMhbidiS{mVt;r`G4_0Bhz zv$bcmthmX`M#-Af9A_(ejHu)1(7n-Ij?8DeT%W;ibw9SS_N4X%YbvJ%=Blst!0?>G z$L2mU{jwJw2$Sv?Is$?^b7A&dx0UmWD97Jy9D;cJ=Oycv^_3Dx-@!|pcGp} zlX(Ug>_s$WU+1fy?yQw9`F1yNEnUktA#pRNKaib9O})Urbeq`8 z*&pWWORV7Mmmh&SZKjU7UQ_IXT^t0p&w*a;Ihv`syuAUp+z?$Y-R6E(z_maMuv@9n zZ8knN64k)g8ZkTO#QSbluW4Im3uC*a@ZQ>sMg&O$^Hlf_**hK@HtCH?n-8boGfC_# zv_PLWqIUSl9tZ(dX25hZJxW3u;sM$gq6wx!`LLK*67I39)u598z*Ll+-fnsMt|B1J^-~-- z%!E)_XFdN39jRWwx^Mj$z34ov^EvteX)+k2f+HTT!CKsr`_U{EOXz6tE6H}$tZ1}a zT57&hh@~jr5YyLs$7_ZegbRBM%-XcCU7WqPGZ6L$(|lTnaZ^7?&I$#&ulV(Y5Z6QH zc8`KKCbu)_wBMo_jKW%pTpefwShNiFr-s2(jr||M@3LY1vM&y9vXEt`;xKMuI1ac! z;yr^rMvXvm9XzvSp>Jn;&NYv`=IZC6^cnhyY`g{LLg5Pqc8T$N`tBd8kVOqUppU5_-ivF&?gvy!y$6i^jCV z>>a6PmDC~v7w&T0?)I_z@fc9P+t_M&-UUlGb;x&JL=xscd-!ddK=k% zL!yiKC!xuFI-XKMY-jLoS2RbZ;7L8qqh+{8-Y}#yfI(LR*H3NqMA%)`IcoG>ncS@Ei0RY(VV_1^iAWhMewy& zM^^30EI6*%4W70MG@nl*Jk#->!1SKikJ1G(t6|Ap$=tWK z1EIfS-~P5mG7;X9xf3w%`FjWe0RPW`O4g>HM!@jrmUiaOj1Err4yH~nmZr`js?&DM z%;>(88aiK?G8^QB+-wh0X~3)Hr9na}T*_!oP{GvE`0C_wK6W1?E6-piry?KZIQvfW z+`_=$yD^$j?xMn~r<}Da;o6asJa9>=uXbXN1v2;XfjkTFUosTY?!+IWgrHg#up4|DKC-!%4lmkk)W*+$M%+qF-l74v6b<%xHAnxE71s2m~$)%0JakwcT^lXIO@1k)2g<6}4VgN{4mfMDcjK11(O1PI1h zh?+B$qjqc9(~=wXDH}naz#1pgTB^p@mle2-$pmVBam~pe>Z#ZhxAfe9AxDn38;os9 z_)%=uL2v0cs;sr4(eQAQIJ#h|D+dt`m#8MY%QPx0Gs}|s z%f5~m0lX>ld!Zcpz_b}d6>jpXcYB_6=B0XDk8oO*mBF<#_J&=0^hzBQS_ECk4vEp* z-3Uwf99o6XP{9W`ARhAJDpom`O`eh%4a~(a4y>`ySUtIvvSX*;0&a8g8en^;M{K@C zJD7Q`iN%!5v43(0mofX{VuVO<_w|Geb@yDVSNK-0>Jo%rjEd@XvU`3csCx}WPO-uC-|Ge?eOE&6(%BGSpxjr19ZgJJkd zVoigGbXd3<<^#Z?r`K1EceDfMkctr*qf+E~PIKR6k_Gr&%JA;jk1&E%J4rPmQVg78 zs+BPy3qqzy>q@gZgYqW_4ey$E7EE;|Fn)Rl3imHpIgzJfZ&?(J-zHWl^n`q7Udo41 zu?XG+y~!x*fMk(&KFMNpcp^K4;`lL6HBO%#n@yxfloYU0PFIzQfz!2={hbNt`oVb0 zrgN4)eOt#&>q}`y<(rh0N=g5wA@jZ=GcbIQyzYuIGR4SLC0x58eSOw?^gOllK_JLS zm*#~nJW5_{jOBP^44Opfc9)H2myYhvHHIK;w1)e1E+L%jmO(H=PI_2g4XSf!MliAi z98Y^J^f)O~i3H088Ru`G$^ErTNh72ok-r;dXh&!3!?y*68N%q?IcbmxGAnyNFwQh9 zZ$up2HV>ZRMAIN2w`j41s1V0eq*tZvk`Yo$i}@OdG~v-uW2he`&ywCqiVPxs`|9d( z92*$$)=a+n;{U{VeL!?10J*>p!z>n!w@UR!vf-FtT-=ZZ=G|v~C@76%DURK4Nan3P zW^qxci=T#BkMl6wR*Kq!#JLJpKK=rFQI`?uD?B4^T+#28#HkzDar}v{X zQ!$(2z1w?CTO(6yga$?2uj9!Hrg~YN>CWd~dYV7FVs#OC%;MoxJ|`ELeSxTd&n;sj zs3tn-po*P^G3*s2UuJ@AO-oZesL;O3l|8L+MHxdw?GDnXY;2M=KPZuX>*-91d%PV1 zk|8zK#QQl^=@7{(o5%J8yKbEOb4^0UhW@xwIekT1v-SbeA6^*%V8&JF1{(LD2QbH|$!`(Q|Azm2l3x=-1IU2H@{B^Y!iwlo%p0G21z5|0@aR(@Tgh#pL-&U`6YUsCo&M*1tAA$5Go=IB$+b6 z`bnv(j8me4zKyuf2Qt8}BKI~>3(4^J5hXoIgrs;e=_)Pzh3=yLh ze~9twj=L?-s}io|=rleje~7h=L=`0g^0O`wkcz`q9PVxd5h zK_ZfLfIhFX^@EgJAij&**8sn`9{9X$G40CEca@$zOq}(%z1Ccb1FW;cQ=FB4rEHI0 zAXwOj40y?oN)|BNAta_?cG=F#btBUZaxc1P$+VmP{y3jwb@Af|F}PHPy^`bgKE2Dr zpNk>s#>l{iybIOCD%hSD+EFZcJ?#NT#l};DTGiSMaW35M=EByhHwq1Jr4EMI%^aPo zSHFiBg*Viz*D<@*zWfk~SF}8azcU1}gz}?;@DPos0^9N?603iz3`bYQY5H@beB}|x z6pwNnE3pb+7uLil^*7PY>G}(nKgLT1>BA*C2dR74vIEprZKNXA63}J3C|S z73udcUA*3XY~}6-pGg#NN-2-l=4CKj4xBU{h(5(Yf^FbD@2&HWeGrKh zl#!|FzF%rAoO6=4Knj)mK5Fi1C6G0B8?IryZf6e$P1=GQa4k!c0<%VVS!YC{_~vYP zbixJd>nvAtiFApih}wq8KHNI-sdSnoHWnHIWk5CPVgf@&i2(IyjG<)X(WtZG1#nhG zq_+NFr3;z~xFq zQ8BDMSzNUuTM1+E(Bwp=K4H0h*Ip(~2!it6WNF(ja<4)Jrdfa5e8PQXP>|m5M(Dvw zbdRgWeUb^eg5InGJP#?!H{HTp&(}xSOV>yBm9Q>hh%oHbcBc=?Y{{Z(_*4x@_30yd zm#U-A7hSg*5X4YNpFbhAtW&1cH>DRo?XPAOeJuvXiOnbuS?(_QAhnUg-^&n}LdVXP z1j)_h#u{>x8c-}}N>{u8z>`p7TA6|0Mh{MQ-hK!AF7eLci9jbci^Cm^$@x=yO7qQhlw29?6W3&BFmH=S z0~I^>htH@|o_dz+R)=xE0)8Dn9LUk0Zhp73$IYqL#?H;-zUas`hrlRX6U#bb8RdC{ z#n|(f`9+%Ct+=1=;Y~VV)k5#_L z{?i>oul5P;Vz#|trMhx{Zd@^|n8;$~}1hC{O2~0wWnZvEAGc~p%asZ@Pm&c*iVka!3j!9T+@l@O?4p+S8o2^= zH;YeKl}4<2>KqLyf|P7X??Cyg+HDXkLK}(Oe4ege_PRH8W1`(H+mJ^kSZ0HI$3dSj zAtkiTW{~VB{iH1b(3k#T7RJNV=(e29wqF~J>Ki+kV2ixTDW7A_V|+8bPWT+4#j#}6 z4+Y1>W<%6abt1w9F$fYw=x}7A{a5^F{IDGpSFkpS$r;sg_x(eZo?r7$ks!&@q#f|Q zz-*B`vKi>Ucvz`|@$iL0jc7Shzi}aVx_}HAb37V{8-1c?Lg4?Vae?Zd26iZBi2r!V z#=k?SDDa)FYD+jLm;80>IMXy0#Qn6pI}oKMRbUxlT(YPUPk^HkWQngbI;rokB^F(6 zAkuCr_&w4s<&%}tr?2v5Y7*KaYhlM2AnMiJ?pH7L^(#rob92pB&jT3BINdxahpYNk zx*K06kLI5`M^a?_Q)|{9Wj1o&ykLVR9!@95Vb=+e7WYrCG9NS=CKYVvW0-8iFaZ)C zK__CGT%Y~xYVByl4W+B&426Uua|cu+tp!aR9ipEexHcd5V5=B^dUxDnhn`7>KsQGZ zeQxOzVnjSj%zO=641lV-=9jdu^37Z|#Z{1kx#{_8%}zSIw;=V~e{9&nKFMtuG;erAiWpx9?=e$&vHa;rQpW?0+0-A`+8^+XJD?-lG%@n98P z`mH(>_q#tPNmO>^s6Ty@atLqHCu+a&y0CbF5I2POE2{EE=p?Ij+ScX%c1}r$~Yqc0GPj^K>&aKoxxvd|BQj=(0PXly!!``5!WvmVL%=KH-?kB ztF5V>i<+J&>4@vx}3Vv5Uxm|L2{ljis%ni>cE;3n81+@LL9gXMz9#Ouq>E z0qmUqtC0U2IFdEGQVDQSM++4*^#7Qe}@t?1U{89F?2B%^E9?G{qGXBw6~K2{%-Sc<>%A2RE-TZ2M4sjtC*4Ke*pic z(f}fBBrh*A9E-I{6)-Ro|fk0EEA2gB@rl z{|)|IwNfy&{X-G<|6;%oM2#wSK#W7+`D-Dmu>Xnmd+qp7x}^IEvj1CtKLgL-23vm| zMFGMbe}norHkmw@oIKzFz%DR%zp{D!e<19f?QKjIfluBHU4Wea#f7xK&~2|X0s!pB z007P(RDjo_06X5l*!&livZBnqYc0VqTQD1`49u532{S0Qr1IPd?D?0*)N2Bk;I0TiSS z6h!`upnbdlBHnc#n0n2GMgsGNgA4!w zeZ!weQGkEoA5c<2e_-$A`OjR&^U*9_fH{%?I&a)RlmBLae35?u$h+FOSpM1e{4)+g zj==&K(Di`=i|tnhAH@8Dqu^>|^B-XD^Bl0=0b%rkFqFU0QH%QnM#b{CR`5?Cas_)u z2cWwCwejPztUoRTVqpPsS$@G>i~l#wzsdOD;J;f7k+}lx;R4v${q`j0kE1Byd&0lK z|Fa5JuEm5XfyJN!|MwnIaK;}%s-{l1mUf0V|BOHtbNrYK%;hjpd%qUuYTh3RYIc?$ zf1r@F{4=Q_i`4 zhf0%xIbQ|VDPYU`=TQ_u()_=`{WCYkSk3}pU~#wr`Tg2VT(}#j$cy(+R=;KQ3otU~?``Sdq5l6A%8|S2kQ1;_pn#VAt1(~p{EM9bnfKp%7G4=u zTyMa>WxwF>fjj@;TojNs@dwa<(Pm?11cveafBO#q6!-J5z1M=--q7eNODJ z4A+A!}>0m znh^~bkme1TnjwC~XFno(K}I~rWdyoG*hi6JURly8V~61ph!GGHI;MU literal 0 HcmV?d00001 diff --git a/boot/licenses/LICENSE_Ivy b/boot/licenses/LICENSE_Ivy new file mode 100644 index 000000000..a73dda8d7 --- /dev/null +++ b/boot/licenses/LICENSE_Ivy @@ -0,0 +1,258 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +------------------------------------------------------------------------------ +License for JCraft JSch package +------------------------------------------------------------------------------ +Copyright (c) 2002,2003,2004,2005,2006,2007 Atsuhiko Yamanaka, JCraft,Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the distribution. + + 3. The names of the authors may not be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, +INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JCRAFT, +INC. OR ANY CONTRIBUTORS TO THIS SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, +OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, +EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +------------------------------------------------------------------------------ +License for jQuery +------------------------------------------------------------------------------ +Copyright (c) 2007 John Resig, http://jquery.com/ + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + \ No newline at end of file diff --git a/boot/licenses/LICENSE_JLine b/boot/licenses/LICENSE_JLine new file mode 100644 index 000000000..1cdc44c21 --- /dev/null +++ b/boot/licenses/LICENSE_JLine @@ -0,0 +1,33 @@ +Copyright (c) 2002-2006, Marc Prud'hommeaux +All rights reserved. + +Redistribution and use in source and binary forms, with or +without modification, are permitted provided that the following +conditions are met: + +Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with +the distribution. + +Neither the name of JLine nor the names of its contributors +may be used to endorse or promote products derived from this +software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, +BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, +OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED +AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED +OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/boot/licenses/LICENSE_Scala b/boot/licenses/LICENSE_Scala new file mode 100644 index 000000000..968bcd6e9 --- /dev/null +++ b/boot/licenses/LICENSE_Scala @@ -0,0 +1,35 @@ +SCALA LICENSE + +Copyright (c) 2002-2008 EPFL, Lausanne, unless otherwise specified. +All rights reserved. + +This software was developed by the Programming Methods Laboratory of the +Swiss Federal Institute of Technology (EPFL), Lausanne, Switzerland. + +Permission to use, copy, modify, and distribute this software in source +or binary form for any purpose with or without fee is hereby granted, +provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of the EPFL nor the names of its contributors + may be used to endorse or promote products derived from this + software without specific prior written permission. + + +THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. \ No newline at end of file diff --git a/boot/src/main/scala/Boot.scala b/boot/src/main/scala/Boot.scala new file mode 100755 index 000000000..2a9eb9cb5 --- /dev/null +++ b/boot/src/main/scala/Boot.scala @@ -0,0 +1,310 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ + package sbt.boot + +// This is the main class for the sbt launcher. Its purpose is to ensure the appropriate +// versions of sbt and scala are downloaded to the projects 'project/boot' directory. +// Then, the downloaded version of sbt is started as usual using the right version of +// scala. + +// Artifact names must be consistent between the main sbt build and this build. + +import java.io.{File, FileFilter} +import java.net.{MalformedURLException, URL, URLClassLoader} + +// contains constants and paths +import BootConfiguration._ +import UpdateTarget.{UpdateScala, UpdateSbt} + +// The exception to use when an error occurs at the launcher level (and not a nested exception). +// This indicates overrides toString because the exception class name is not needed to understand +// the error message. +private class BootException(override val toString: String) extends RuntimeException +// The entry point to the launcher +object Boot +{ + def main(args: Array[String]) + { + System.setProperty("sbt.boot", true.toString) + checkProxy() + try { boot(args) } + catch + { + case b: BootException => errorAndExit(b) + case e => + e.printStackTrace + errorAndExit(e) + } + System.exit(0) + } + private def errorAndExit(e: Throwable) + { + System.out.println("Error during sbt execution: " + e.toString) + System.exit(1) + } + def boot(args: Array[String]) + { + // prompt to create project if it doesn't exist. + // will not return if user declines + (new Paths).checkProject() + val loaderCache = new LoaderCache + if(args.length == 0) + load(args, loaderCache) // interactive mode, which can only use one version of scala for a run + else + runBatch(args.toList, Nil, loaderCache) // batch mode, which can reboot with a different scala version + } + private def runBatch(args: List[String], accumulateReversed: List[String], loaderCache: LoaderCache) + { + def doLoad() = if(!accumulateReversed.isEmpty) load(accumulateReversed.reverse.toArray, loaderCache) + args match + { + case Nil => doLoad() + case RebootCommand :: tail => + doLoad() + runBatch(tail, Nil, loaderCache) + case action :: tail if action.trim.startsWith(CrossBuildPrefix) => + doLoad() + load(Array(action), loaderCache) // call main with the single cross-build argument, preserving the '+' prefix, with which it knows what to do + runBatch(tail, Nil, loaderCache) + case notReload :: tail => runBatch(tail, notReload :: accumulateReversed, loaderCache) + } + } + /** Loads the project in the current working directory using the version of scala and sbt + * declared in the build. The class loader used prevents the Scala and Ivy classes used by + * this loader from being seen by the loaded sbt/project.*/ + private def load(args: Array[String], loaderCache: LoaderCache) + { + val loader = (new Setup(loaderCache)).loader() + val sbtMain = Class.forName(SbtMainClass, true, loader) + val exitCode = run(sbtMain, args) + if(exitCode == NormalExitCode) + () + else if(exitCode == RebootExitCode) + load(args, loaderCache) + else + System.exit(exitCode) + } + private def run(sbtMain: Class[_], args: Array[String]): Int = + { + try { + // Versions newer than 0.3.8 enter through the run method, which does not call System.exit + val runMethod = sbtMain.getMethod(MainMethodName, classOf[Array[String]]) + runMethod.invoke(null, Array(args) : _*).asInstanceOf[Int] + } catch { + case e: NoSuchMethodException => runOld(sbtMain, args) + } + } + /** The entry point for version 0.3.8 was the main method. */ + private def runOld(sbtMain: Class[_], args: Array[String]): Int = + { + val runMethod = sbtMain.getMethod(OldMainMethodName, classOf[Array[String]]) + runMethod.invoke(null, Array(args) : _*) + NormalExitCode + } + + private def checkProxy() + { + import ProxyProperties._ + val httpProxy = System.getenv(HttpProxyEnv) + if(isDefined(httpProxy) && !isPropertyDefined(ProxyHost) && !isPropertyDefined(ProxyPort)) + { + try + { + val proxy = new URL(httpProxy) + setProperty(ProxyHost, proxy.getHost) + val port = proxy.getPort + if(port >= 0) + System.setProperty(ProxyPort, port.toString) + copyEnv(HttpProxyUser, ProxyUser) + copyEnv(HttpProxyPassword, ProxyPassword) + } + catch + { + case e: MalformedURLException => + System.out.println("Warning: could not parse http_proxy setting: " + e.toString) + } + } + } + private def copyEnv(envKey: String, sysKey: String) { setProperty(sysKey, System.getenv(envKey)) } + private def setProperty(key: String, value: String) { if(value != null) System.setProperty(key, value) } + private def isPropertyDefined(k: String) = isDefined(System.getProperty(k)) + private def isDefined(s: String) = s != null && !s.isEmpty +} + +private class Paths extends NotNull +{ + protected final val ProjectDirectory = new File(ProjectDirectoryName) + protected final val BootDirectory = new File(ProjectDirectory, BootDirectoryName) + protected final val PropertiesFile = new File(ProjectDirectory, BuildPropertiesName) + + final def checkProject() + { + if(!ProjectDirectory.exists) + { + val line = SimpleReader.readLine("Project does not exist, create new project? (y/N/s) : ") + if(Setup.isYes(line)) + ProjectProperties(PropertiesFile, true) + else if(Setup.isScratch(line)) + ProjectProperties.scratch(PropertiesFile) + else + System.exit(1) + } + } +} +/** A class to handle setting up the properties and classpath of the project +* before it is loaded. */ +private class Setup(loaderCache: LoaderCache) extends Paths +{ + /** Checks that the requested version of sbt and scala have been downloaded. + * It performs a simple check that the appropriate directories exist. It uses Ivy + * to resolve and retrieve any necessary libraries. The classpath to use is returned.*/ + final def loader(): ClassLoader = loader(Nil) + private final def loader(forcePrompt: Seq[String]): ClassLoader = + { + val (normalScalaVersion, sbtVersion) = ProjectProperties.forcePrompt(PropertiesFile, forcePrompt : _*) + val scalaVersion = crossScalaVersion(normalScalaVersion) + loaderCache( scalaVersion, sbtVersion ) match + { + case Some(existingLoader) => + { + setScalaVersion(scalaVersion) + existingLoader + } + case None => + { + getLoader(scalaVersion, sbtVersion) match + { + case Left(retry) => loader(retry) + case Right(classLoader) => classLoader + } + } + } + } + private def crossScalaVersion(simpleScalaVersion: String): String = + { + val crossScalaVersion = System.getProperty(SbtScalaVersionKey) + if(crossScalaVersion == null || crossScalaVersion.isEmpty) + simpleScalaVersion + else + crossScalaVersion + } + private def getLoader(scalaVersion: String, sbtVersion: String): Either[Seq[String], ClassLoader] = + { + import Setup.{failIfMissing,isYes,needsUpdate} + import ProjectProperties.{ScalaVersionKey, SbtVersionKey} + + val baseDirectory = new File(BootDirectory, baseDirectoryName(scalaVersion)) + System.setProperty(ScalaHomeProperty, baseDirectory.getAbsolutePath) + val scalaDirectory = new File(baseDirectory, ScalaDirectoryName) + val sbtDirectory = new File(baseDirectory, sbtDirectoryName(sbtVersion)) + + val classLoader = createLoader(scalaDirectory, sbtDirectory) + val updateTargets = needsUpdate("", classLoader, TestLoadScalaClasses, UpdateScala) ::: needsUpdate(sbtVersion, classLoader, TestLoadSbtClasses, UpdateSbt) + if(updateTargets.isEmpty) // avoid loading Ivy related classes if there is nothing to update + success(classLoader, scalaVersion, sbtVersion) + else + { + Update(baseDirectory, sbtVersion, scalaVersion, updateTargets: _*) + + val classLoader = createLoader(scalaDirectory, sbtDirectory) + val sbtFailed = failIfMissing(classLoader, TestLoadSbtClasses, "sbt " + sbtVersion, SbtVersionKey) + val scalaFailed = failIfMissing(classLoader, TestLoadScalaClasses, "Scala " + scalaVersion, ScalaVersionKey) + + (scalaFailed +++ sbtFailed) match + { + case Success => success(classLoader, scalaVersion, sbtVersion) + case f: Failure => + val noRetrieveMessage = "Could not retrieve " + f.label + "." + val getNewVersions = SimpleReader.readLine(noRetrieveMessage + " Select different version? (y/N) : ") + if(isYes(getNewVersions)) + Left(f.keys) + else + throw new BootException(noRetrieveMessage) + } + } + } + private def success(classLoader: ClassLoader, scalaVersion: String, sbtVersion: String) = + { + setScalaVersion(scalaVersion) + loaderCache( scalaVersion, sbtVersion ) = classLoader + Right(classLoader) + } + private def createLoader(dirs: File*) = + { + val classpath = Setup.getJars(dirs : _*) + new URLClassLoader(classpath.toArray, new BootFilteredLoader) + } + private def setScalaVersion(scalaVersion: String) { System.setProperty(SbtScalaVersionKey, scalaVersion) } +} +private final class LoaderCache +{ + private[this] var cachedSbtVersion: Option[String] = None + private[this] val loaderMap = new scala.collection.mutable.HashMap[String, ClassLoader] + def apply(scalaVersion: String, sbtVersion: String): Option[ClassLoader] = + { + cachedSbtVersion flatMap { currentSbtVersion => + if(sbtVersion == currentSbtVersion) + loaderMap.get(scalaVersion) + else + None + } + } + def update(scalaVersion: String, sbtVersion: String, loader: ClassLoader) + { + for(currentSbtVersion <- cachedSbtVersion) + { + if(sbtVersion != currentSbtVersion) + loaderMap.clear() + } + cachedSbtVersion = Some(sbtVersion) + loaderMap(scalaVersion) = loader + } +} +private object Setup +{ + private def failIfMissing(loader: ClassLoader, classes: Iterable[String], label: String, key: String) = checkTarget(loader, classes, Success, new Failure(label, List(key))) + private def needsUpdate(version: String, loader: ClassLoader, classes: Iterable[String], target: UpdateTarget.Value) = + if(version.endsWith("-SNAPSHOT")) + target :: Nil + else + checkTarget(loader, classes, Nil, target :: Nil) + private def checkTarget[T](loader: ClassLoader, classes: Iterable[String], ifSuccess: => T, ifFailure: => T): T = + { + try + { + for(c <- classes) + Class.forName(c, false, loader) + ifSuccess + } + catch { case e: ClassNotFoundException => ifFailure } + } + def isYes(so: Option[String]) = isValue("y", "yes")(so) + def isScratch(so: Option[String]) = isValue("s", "scratch")(so) + def isValue(values: String*)(so: Option[String]) = + so match + { + case Some(s) => values.contains(s.toLowerCase) + case None => false + } + private def getJars(directories: File*) = directories.flatMap(file => wrapNull(file.listFiles(JarFilter))).map(_.toURI.toURL) + private def wrapNull(a: Array[File]): Array[File] = if(a == null) Array() else a +} + + +private object JarFilter extends FileFilter +{ + def accept(file: File) = !file.isDirectory && file.getName.endsWith(".jar") +} + +private sealed trait Checked extends NotNull { def +++(o: Checked): Checked } +private final object Success extends Checked { def +++(o: Checked) = o } +private final class Failure(val label: String, val keys: List[String]) extends Checked +{ + def +++(o: Checked) = + o match + { + case Success => this + case f: Failure => new Failure(label + " and " + f.label, keys ::: f.keys) + } +} \ No newline at end of file diff --git a/boot/src/main/scala/BootConfiguration.scala b/boot/src/main/scala/BootConfiguration.scala new file mode 100644 index 000000000..9fcf74802 --- /dev/null +++ b/boot/src/main/scala/BootConfiguration.scala @@ -0,0 +1,96 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ + package sbt.boot + +// project/boot/ [BootDirectoryName] +// scala-/ [baseDirectoryName] +// lib/ [ScalaDirectoryName] +// sbt-/ [sbtDirectoryName] +// +// see also ProjectProperties for the set of constants that apply to the build.properties file in a project +private object BootConfiguration +{ + val SbtMainClass = "sbt.Main" + val MainMethodName = "run" + val OldMainMethodName = "main" + + // these are the module identifiers to resolve/retrieve + val ScalaOrg = "org.scala-lang" + val SbtOrg = "sbt" + val CompilerModuleName = "scala-compiler" + val LibraryModuleName = "scala-library" + val SbtModuleName = "simple-build-tool" + + /** The Ivy conflict manager to use for updating.*/ + val ConflictManagerName = "strict" + /** The name of the local Ivy repository, which is used when compiling sbt from source.*/ + val LocalIvyName = "local" + /** The pattern used for the local Ivy repository, which is used when compiling sbt from source.*/ + val LocalPattern = "[organisation]/[module]/[revision]/[type]s/[artifact].[ext]" + /** The artifact pattern used for the local Ivy repository.*/ + def LocalArtifactPattern = LocalPattern + /** The Ivy pattern used for the local Ivy repository.*/ + def LocalIvyPattern = LocalPattern + + /** The name of the property declaring the version of scala to use to build the project when not cross-building.*/ + val ScalaVersion = "scala.version" + /** The name of the property declaring the version of sbt to use to build the project.*/ + val SbtVersion = "sbt.version" + /** The name of the system property containing the version of scala actually used to build a project. + * This might be different from the ScalaVersion property when cross-building.*/ + val SbtScalaVersionKey = "sbt.scala.version" + /** The class name prefix used to hide the Scala classes used by this loader from sbt + * and the project definition*/ + val ScalaPackage = "scala." + /** The class name prefix used to hide the Ivy classes used by this loader from sbt + * and the project definition*/ + val IvyPackage = "org.apache.ivy." + /** The loader will check that these classes can be loaded and will assume that their presence indicates + * sbt and its dependencies have been downloaded.*/ + val TestLoadSbtClasses = "sbt.Main" :: "org.apache.ivy.Ivy" :: Nil + /** The loader will check that these classes can be loaded and will assume that their presence indicates + * the Scala compiler and library have been downloaded.*/ + val TestLoadScalaClasses = "scala.ScalaObject" :: "scala.tools.nsc.GenericRunnerCommand" :: Nil + + val ProjectDirectoryName = "project" + val BootDirectoryName = "boot" + val BuildPropertiesName ="build.properties" + val ScalaHomeProperty = "scala.home" + val UpdateLogName = "update.log" + + val CrossBuildPrefix = "+" + val RebootCommand = "reboot" + val RebootExitCode = -1 + val NormalExitCode = 0 + val DefaultIvyConfiguration = "default" + + /** The base URL to use to resolve sbt for download. */ + val sbtRootBase = "http://simple-build-tool.googlecode.com/svn/artifacts/" + /** The name of the directory within the boot directory to retrieve scala to. */ + val ScalaDirectoryName = "lib" + /** The Ivy pattern to use for retrieving the scala compiler and library. It is relative to the directory + * containing all jars for the requested version of scala. */ + val scalaRetrievePattern = ScalaDirectoryName + "/[artifact].[ext]" + + /** The Ivy pattern to use for retrieving sbt and its dependencies. It is relative to the directory + * containing all jars for the requested version of scala. */ + def sbtRetrievePattern(sbtVersion: String) = sbtDirectoryName(sbtVersion) + "/[artifact]-[revision].[ext]" + /** The Ivy pattern to use for resolving sbt and its dependencies from the Google code project.*/ + def sbtResolverPattern(scalaVersion: String) = sbtRootBase + "[revision]/[type]s/[artifact].[ext]" + /** The name of the directory to retrieve sbt and its dependencies to.*/ + def sbtDirectoryName(sbtVersion: String) = SbtOrg + "-" + sbtVersion + /** The name of the directory in the boot directory to put all jars for the given version of scala in.*/ + def baseDirectoryName(scalaVersion: String) = "scala-" + scalaVersion +} +private object ProxyProperties +{ + val HttpProxyEnv = "http_proxy" + val HttpProxyUser = "http_proxy_user" + val HttpProxyPassword = "http_proxy_pass" + + val ProxyHost = "http.proxyHost" + val ProxyPort = "http.proxyPort" + val ProxyUser = "http.proxyUser" + val ProxyPassword = "http.proxyPassword" +} \ No newline at end of file diff --git a/boot/src/main/scala/FilteredLoader.scala b/boot/src/main/scala/FilteredLoader.scala new file mode 100644 index 000000000..b0ee20873 --- /dev/null +++ b/boot/src/main/scala/FilteredLoader.scala @@ -0,0 +1,20 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt.boot + +import BootConfiguration._ + +/** A custom class loader to ensure the main part of sbt doesn't load any Scala or +* Ivy classes from the jar containing the loader. */ +private[boot] final class BootFilteredLoader extends ClassLoader with NotNull +{ + @throws(classOf[ClassNotFoundException]) + override final def loadClass(className: String, resolve: Boolean): Class[_] = + { + if(className.startsWith(ScalaPackage) || className.startsWith(IvyPackage)) + throw new ClassNotFoundException(className) + else + super.loadClass(className, resolve) + } +} \ No newline at end of file diff --git a/boot/src/main/scala/ProjectProperties.scala b/boot/src/main/scala/ProjectProperties.scala new file mode 100644 index 000000000..d63da6c4b --- /dev/null +++ b/boot/src/main/scala/ProjectProperties.scala @@ -0,0 +1,147 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt.boot + +/* +Project does not exist, create new project? [y/N] y +Name: +Organization []: +Version [1.0]: +Scala version [2.7.5]: +sbt version [0.5]: +*/ +import java.io.File +/** Constants related to reading/writing the build.properties file in a project. +* See BootConfiguration for general constants used by the loader. */ +private object ProjectProperties +{ + /** The properties key for storing the name of the project.*/ + val NameKey = "project.name" + /** The properties key for storing the organization of the project.*/ + val OrganizationKey = "project.organization" + /** The properties key for storing the version of the project.*/ + val VersionKey = "project.version" + /** The properties key for storing the version of Scala used with the project.*/ + val ScalaVersionKey = "scala.version" + /** The properties key for storing the version of sbt used to build the project.*/ + val SbtVersionKey = "sbt.version" + /** The properties key to communicate to the main component of sbt that the project + * should be initialized after being loaded, typically by creating a default directory structure.*/ + val InitializeProjectKey = "project.initialize" + /** The properties key that configures the project to be flattened a bit for use by quick throwaway projects.*/ + val ScratchKey = "project.scratch" + + /** The label used when prompting for the name of the user's project.*/ + val NameLabel = "Name" + /** The label used when prompting for the organization of the user's project.*/ + val OrganizationLabel = "Organization" + /** The label used when prompting for the version of the user's project.*/ + val VersionLabel = "Version" + /** The label used when prompting for the version of Scala to use for the user's project.*/ + val ScalaVersionLabel = "Scala version" + /** The label used when prompting for the version of sbt to use for the user's project.*/ + val SbtVersionLabel = "sbt version" + + /** The default organization of the new user project when the user doesn't explicitly specify one when prompted.*/ + val DefaultOrganization = "" + /** The default version of the new user project when the user doesn't explicitly specify a version when prompted.*/ + val DefaultVersion = "1.0" + /** The default version of sbt when the user doesn't explicitly specify a version when prompted.*/ + val DefaultSbtVersion = "0.5" + /** The default version of Scala when the user doesn't explicitly specify a version when prompted.*/ + val DefaultScalaVersion = "2.7.5" + + // sets up the project properties for a throwaway project (flattens src and lib to the root project directory) + def scratch(file: File) + { + withProperties(file) { properties => + for( (key, _, default, _) <- propertyDefinitions(false)) + properties(key) = default.getOrElse("scratch") + properties(ScratchKey) = true.toString + } + } + // returns (scala version, sbt version) + def apply(file: File, setInitializeProject: Boolean): (String, String) = applyImpl(file, setInitializeProject, Nil) + def forcePrompt(file: File, propertyKeys: String*) = applyImpl(file, false, propertyKeys) + private def applyImpl(file: File, setInitializeProject: Boolean, propertyKeys: Iterable[String]): (String, String) = + { + val organizationOptional = file.exists + withProperties(file) { properties => + properties -= propertyKeys + + prompt(properties, organizationOptional) + if(setInitializeProject) + properties(InitializeProjectKey) = true.toString + } + } + // (key, label, defaultValue, promptRequired) + private def propertyDefinitions(organizationOptional: Boolean) = + (NameKey, NameLabel, None, true) :: + (OrganizationKey, OrganizationLabel, Some(DefaultOrganization), !organizationOptional) :: + (VersionKey, VersionLabel, Some(DefaultVersion), true) :: + (ScalaVersionKey, ScalaVersionLabel, Some(DefaultScalaVersion), true) :: + (SbtVersionKey, SbtVersionLabel, Some(DefaultSbtVersion), true) :: + Nil + private def prompt(fill: ProjectProperties, organizationOptional: Boolean) + { + for( (key, label, default, promptRequired) <- propertyDefinitions(organizationOptional)) + { + val value = fill(key) + if(value == null && promptRequired) + fill(key) = readLine(label, default) + } + } + private def withProperties(file: File)(f: ProjectProperties => Unit) = + { + val properties = new ProjectProperties(file) + f(properties) + properties.save + (properties(ScalaVersionKey), properties(SbtVersionKey)) + } + private def readLine(label: String, default: Option[String]): String = + { + val prompt = + default match + { + case Some(d) => "%s [%s]: ".format(label, d) + case None => "%s: ".format(label) + } + SimpleReader.readLine(prompt) orElse default match + { + case Some(line) => line + case None => throw new BootException("Project not loaded: " + label + " not specified.") + } + } +} + +import java.io.{FileInputStream, FileOutputStream} +import java.util.Properties +private class ProjectProperties(file: File) extends NotNull +{ + private[this] var modified = false + private[this] val properties = new Properties + if(file.exists) + { + val in = new FileInputStream(file) + try { properties.load(in) } finally { in.close() } + } + + def update(key: String, value: String) + { + modified = true + properties.setProperty(key, value) + } + def apply(key: String) = properties.getProperty(key) + def save() + { + if(modified) + { + file.getParentFile.mkdirs() + val out = new FileOutputStream(file) + try { properties.store(out, "Project Properties") } finally { out.close() } + modified = false + } + } + def -= (keys: Iterable[String]) { for(key <- keys) properties.remove(key) } +} \ No newline at end of file diff --git a/boot/src/main/scala/SimpleReader.scala b/boot/src/main/scala/SimpleReader.scala new file mode 100644 index 000000000..3e9359625 --- /dev/null +++ b/boot/src/main/scala/SimpleReader.scala @@ -0,0 +1,26 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt.boot + +import jline.ConsoleReader +object SimpleReader extends NotNull +{ + protected[this] val reader = + { + val cr = new ConsoleReader + cr.setBellEnabled(false) + cr + } + def readLine(prompt: String) = + reader.readLine(prompt) match + { + case null => None + case x => + val trimmed = x.trim + if(trimmed.isEmpty) + None + else + Some(trimmed) + } +} \ No newline at end of file diff --git a/boot/src/main/scala/Update.scala b/boot/src/main/scala/Update.scala new file mode 100644 index 000000000..d5bfa8c0e --- /dev/null +++ b/boot/src/main/scala/Update.scala @@ -0,0 +1,248 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt.boot + +import java.io.{File, FileWriter, PrintWriter, Writer} + +import org.apache.ivy.{core, plugins, util} +import core.LogOptions +import core.cache.DefaultRepositoryCacheManager +import core.event.EventManager +import core.module.id.ModuleRevisionId +import core.module.descriptor.{Configuration, DefaultDependencyDescriptor, DefaultModuleDescriptor, ModuleDescriptor} +import core.report.ResolveReport +import core.resolve.{ResolveEngine, ResolveOptions} +import core.retrieve.{RetrieveEngine, RetrieveOptions} +import core.sort.SortEngine +import core.settings.IvySettings +import plugins.resolver.{ChainResolver, FileSystemResolver, IBiblioResolver, URLResolver} +import util.{DefaultMessageLogger, Message} + +import BootConfiguration._ + +private[boot] object UpdateTarget extends Enumeration +{ + val UpdateScala, UpdateSbt = Value +} +import UpdateTarget.{UpdateSbt, UpdateScala} + +object Update +{ + /** Use Ivy to resolve and retrieve the specified 'targets' for the given versions.*/ + def apply(bootDirectory: File, sbtVersion: String, scalaVersion: String, targets: UpdateTarget.Value*) = + synchronized // synchronized because Ivy is not thread-safe + { + val up = new Update(bootDirectory, sbtVersion, scalaVersion, targets : _*) + up.update() + } +} +/** Ensures that the Scala and sbt jars exist for the given versions or else downloads them.*/ +private final class Update(bootDirectory: File, sbtVersion: String, scalaVersion: String, targets: UpdateTarget.Value*) +{ + private def logFile = new File(bootDirectory, UpdateLogName) + /** A Writer to use to write the full logging information to a file for debugging. **/ + lazy val logWriter = + { + bootDirectory.mkdirs + new PrintWriter(new FileWriter(logFile)) + } + + /** The main entry point of this class for use by the Update module. It runs Ivy */ + private def update() + { + Message.setDefaultLogger(new SbtIvyLogger(logWriter)) + try { targets.foreach(update) } // runs update on each module separately + catch + { + case e: Exception => + e.printStackTrace(logWriter) + log(e.toString) + println(" (see " + logFile + " for complete log)") + } + finally { logWriter.close() } + } + /** Runs update for the specified target (updates either the scala or sbt jars for building the project) */ + private def update(target: UpdateTarget.Value) + { + import Configuration.Visibility.PUBLIC + // the actual module id here is not that important + val moduleID = new DefaultModuleDescriptor(createID(SbtOrg, "boot", "1.0"), "release", null, false) + moduleID.setLastModified(System.currentTimeMillis) + moduleID.addConfiguration(new Configuration(DefaultIvyConfiguration, PUBLIC, "", Array(), true, null)) + // add dependencies based on which target needs updating + target match + { + case UpdateScala => + addDependency(moduleID, ScalaOrg, CompilerModuleName, scalaVersion, "default") + addDependency(moduleID, ScalaOrg, LibraryModuleName, scalaVersion, "default") + update(moduleID, target, false) + case UpdateSbt => + addDependency(moduleID, SbtOrg, SbtModuleName, sbtVersion, scalaVersion) + try { update(moduleID, target, false) } + catch + { + // unfortunately, there is not a more specific exception thrown when a configuration does not exist, + // so we always retry after cleaning the ivy file for this version of sbt on in case it is a newer version + // of Scala than when this version of sbt was initially published + case e: RuntimeException => + update(moduleID, target, true) + } + } + } + /** Runs the resolve and retrieve for the given moduleID, which has had its dependencies added already. */ + private def update(moduleID: DefaultModuleDescriptor, target: UpdateTarget.Value, cleanExisting: Boolean) + { + val eventManager = new EventManager + val settings = new IvySettings + addResolvers(settings, scalaVersion, target) + settings.setDefaultConflictManager(settings.getConflictManager(ConflictManagerName)) + settings.setBaseDir(bootDirectory) + if(cleanExisting) + { + val sbtID = createID(SbtOrg, SbtModuleName, sbtVersion) + onDefaultRepositoryCacheManager(settings) { cache => + val ivyFile = cache.getIvyFileInCache(sbtID) + ivyFile.delete() + val original = new File(ivyFile.getParentFile, ivyFile.getName + ".original") + original.delete() + } + } + resolve(settings, eventManager, moduleID) + retrieve(settings, eventManager, moduleID, target) + } + private def createID(organization: String, name: String, revision: String) = + ModuleRevisionId.newInstance(organization, name, revision) + /** Adds the given dependency to the default configuration of 'moduleID'. */ + private def addDependency(moduleID: DefaultModuleDescriptor, organization: String, name: String, revision: String, conf: String) + { + val dep = new DefaultDependencyDescriptor(moduleID, createID(organization, name, revision), false, false, true) + dep.addDependencyConfiguration(DefaultIvyConfiguration, conf) + moduleID.addDependency(dep) + } + private def resolve(settings: IvySettings, eventManager: EventManager, module: ModuleDescriptor) + { + val resolveOptions = new ResolveOptions + // this reduces the substantial logging done by Ivy, including the progress dots when downloading artifacts + resolveOptions.setLog(LogOptions.LOG_DOWNLOAD_ONLY) + val resolveEngine = new ResolveEngine(settings, eventManager, new SortEngine(settings)) + val resolveReport = resolveEngine.resolve(module, resolveOptions) + if(resolveReport.hasError) + { + logExceptions(resolveReport) + println(Set(resolveReport.getAllProblemMessages.toArray: _*).mkString(System.getProperty("line.separator"))) + throw new BootException("Error retrieving required libraries") + } + } + /** Exceptions are logged to the update log file. */ + private def logExceptions(report: ResolveReport) + { + for(unresolved <- report.getUnresolvedDependencies) + { + val problem = unresolved.getProblem + if(problem != null) + problem.printStackTrace(logWriter) + } + } + /** Retrieves resolved dependencies using the given target to determine the location to retrieve to. */ + private def retrieve(settings: IvySettings, eventManager: EventManager, module: ModuleDescriptor, target: UpdateTarget.Value) + { + val retrieveOptions = new RetrieveOptions + val retrieveEngine = new RetrieveEngine(settings, eventManager) + val pattern = + target match + { + // see BuildConfiguration + case UpdateSbt => sbtRetrievePattern(sbtVersion) + case UpdateScala => scalaRetrievePattern + } + retrieveEngine.retrieve(module.getModuleRevisionId, pattern, retrieveOptions); + } + /** Add the scala tools repositories and a URL resolver to download sbt from the Google code project.*/ + private def addResolvers(settings: IvySettings, scalaVersion: String, target: UpdateTarget.Value) + { + val newDefault = new ChainResolver + newDefault.setName("redefined-public") + newDefault.add(localResolver(settings.getDefaultIvyUserDir.getAbsolutePath)) + newDefault.add(mavenLocal) + target match + { + case UpdateSbt => + newDefault.add(sbtResolver(scalaVersion)) + newDefault.add(mavenMainResolver) + case UpdateScala => + newDefault.add(mavenResolver("Scala-Tools Maven2 Repository", "http://scala-tools.org/repo-releases")) + newDefault.add(mavenResolver("Scala-Tools Maven2 Snapshots Repository", "http://scala-tools.org/repo-snapshots")) + } + onDefaultRepositoryCacheManager(settings)(_.setUseOrigin(true)) + settings.addResolver(newDefault) + settings.setDefaultResolver(newDefault.getName) + } + private def onDefaultRepositoryCacheManager(settings: IvySettings)(f: DefaultRepositoryCacheManager => Unit) + { + settings.getDefaultRepositoryCacheManager match + { + case manager: DefaultRepositoryCacheManager => f(manager) + case _ => () + } + } + /** Uses the pattern defined in BuildConfiguration to download sbt from Google code.*/ + private def sbtResolver(scalaVersion: String) = + { + val pattern = sbtResolverPattern(scalaVersion) + val resolver = new URLResolver + resolver.setName("Sbt Repository") + resolver.addIvyPattern(pattern) + resolver.addArtifactPattern(pattern) + resolver + } + private def mavenLocal = mavenResolver("Maven2 Local", "file://" + System.getProperty("user.home") + "/.m2/repository/") + /** Creates a maven-style resolver.*/ + private def mavenResolver(name: String, root: String) = + { + val resolver = defaultMavenResolver(name) + resolver.setRoot(root) + resolver + } + /** Creates a resolver for Maven Central.*/ + private def mavenMainResolver = defaultMavenResolver("Maven Central") + /** Creates a maven-style resolver with the default root.*/ + private def defaultMavenResolver(name: String) = + { + val resolver = new IBiblioResolver + resolver.setName(name) + resolver.setM2compatible(true) + resolver + } + private def localResolver(ivyUserDirectory: String) = + { + val localIvyRoot = ivyUserDirectory + "/local" + val artifactPattern = localIvyRoot + "/" + LocalArtifactPattern + val ivyPattern = localIvyRoot + "/" + LocalIvyPattern + val resolver = new FileSystemResolver + resolver.setName(LocalIvyName) + resolver.addIvyPattern(ivyPattern) + resolver.addArtifactPattern(artifactPattern) + resolver + } + /** Logs the given message to a file and to the console. */ + private def log(msg: String) = + { + try { logWriter.println(msg) } + catch { case e: Exception => System.err.println("Error writing to update log file: " + e.toString) } + println(msg) + } +} +/** A custom logger for Ivy to ignore the messages about not finding classes +* intentionally filtered using proguard. */ +private final class SbtIvyLogger(logWriter: PrintWriter) extends DefaultMessageLogger(Message.MSG_INFO) with NotNull +{ + private val ignorePrefix = "impossible to define" + override def log(msg: String, level: Int) + { + logWriter.println(msg) + if(level <= getLevel && msg != null && !msg.startsWith(ignorePrefix)) + System.out.println(msg) + } + override def rawlog(msg: String, level: Int) { log(msg, level) } +} \ No newline at end of file diff --git a/install/extract/src/main/scala/Main.scala b/install/extract/src/main/scala/Main.scala new file mode 100644 index 000000000..e8c701767 --- /dev/null +++ b/install/extract/src/main/scala/Main.scala @@ -0,0 +1,108 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt.extract + +import java.io.{File, InputStream} +import java.util.zip.{ZipEntry, ZipFile} + +object Main +{ + lazy val log: Logger = new ConsoleLogger + + def main(args: Array[String]) + { + if(args.contains("debug")) + log.setLevel(Level.Debug) + val result = OpenResource.zipFile.ioOption(FileUtilities.classLocationFile[Install], "processing", log)(process) + for(msg <- result) + { + log.error(msg) + System.exit(1) + } + } + private[this] val packedGzip = ".pack.gz" + private def isArchive(name: String) = name.endsWith(".gz") || name.endsWith(".zip") + private def process(zip: ZipFile) = + { + val installEntry = zip.getEntry("install") + if(installEntry == null) + Some("Install commands not found.") + else + { + val jarAndZip = wrap.Wrappers.toList(zip.entries).filter(entry => isArchive(entry.getName)).partition(_.getName.endsWith(packedGzip)) + jarAndZip match + { + case (Nil, _)=> Some("sbt loader not found.") + case (_, Nil) => Some("Project to extract and build not found.") + case (loaderEntry :: _, projectEntry :: _) => extractAndRun(zip, loaderEntry, projectEntry, installEntry) + } + } + } + private def extractAndRun(zip: ZipFile, loaderEntry: ZipEntry, projectEntry: ZipEntry, installEntry: ZipEntry) = + { + val zipResource = OpenResource.zipEntry(zip) + + import FileUtilities.{gunzip, readString, transfer, unzip, writeStream} + val directory = new File(".", trimExtension(projectEntry.getName, ".zip")) + assume(!directory.exists, "Could not extract project: directory " + projectEntry.getName + " exists.") + + val loaderBaseName = trimExtension(loaderEntry.getName, packedGzip) + val loaderFile = new File(directory, loaderBaseName + ".jar") + val tempLoaderFile = new File(directory, loaderBaseName + ".pack") + + def extractLoader() = + { + implicit def fileToPath(f: File) = Path.fromFile(f) + val result = + writeStream(tempLoaderFile, log) { out => zipResource.ioOption(loaderEntry, "reading", log)(gunzip(_, out, log)) } orElse + Pack.unpack(tempLoaderFile, loaderFile, log) + FileUtilities.clean(tempLoaderFile :: Nil, true, log) + result.toLeft(loaderFile) + } + + Control.thread(zipResource.io(installEntry, "reading", log)(readString(_, log))) { installString => + Control.thread(parseInstall(installString)) { install => + zipResource.io(projectEntry, "reading", log)(unzip(_, Path.fromFile(directory), log)).left.toOption orElse + Control.thread(extractLoader()) { loaderFile => + run(loaderFile, directory, install) + } + } + } + } + private def parseInstall(installString: String): Either[String, Install] = + { + installString.split(separator) match + { + case Array(allOptions, allActions) => + val options = allOptions.split("""\n""").toList + val actions = allActions.split("""\n""").toList + Right( Install(options, actions) ) + case _ => Left("Invalid install script (no separator found)") + } + } + private def filterEmpty(list: List[String]) = list.filter(!_.isEmpty) + private def run(loader: File, project: File, install: Install) = + { + val command = "java" :: "-cp" :: loader.getAbsolutePath :: filterEmpty(install.options) ::: "sbt.boot.Boot" :: filterEmpty(install.actions) + val builder = new java.lang.ProcessBuilder(command.toArray : _*) + builder.directory(project) + //import BasicIO.{processFully, transferFully} + //val standardIO = new ProcessIO(transferFully(System.in, _, 0), processFully(System.out.println), processFully(System.err.println)) + val exitCode = ( Process(builder) ! )//( Process(builder) run standardIO).exitValue() + if(exitCode == 0) + None + else + Some("sbt exited with nonzero exit code: " + exitCode) + } + private def trimExtension(name: String, ext: String) = + { + if(name.endsWith(ext)) + name.substring(0, name.length - ext.length) + else + name + } + // keep this in sync with sbt.extract.SelfExtractingProject + private def separator = "====================" +} +private final case class Install(options: List[String], actions: List[String]) extends NotNull \ No newline at end of file diff --git a/install/plugin/src/main/scala/SelfExtractingProject.scala b/install/plugin/src/main/scala/SelfExtractingProject.scala new file mode 100644 index 000000000..516228676 --- /dev/null +++ b/install/plugin/src/main/scala/SelfExtractingProject.scala @@ -0,0 +1,79 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt.extract + +import java.io.{ByteArrayOutputStream, File} +import FileUtilities.{classLocationFile, clean, createTemporaryDirectory, download, transferAndClose, unzip, write, zip} +import SelfExtractingProject.{flat, separator} + +trait SelfExtractingProject extends Project +{ + protected def createSelfExtractingJar(actions: List[String], jvmOptions: List[String], projectZip: Path, outputJar: Path): Option[String] = + { + def jarForClass(name: String) = Path.fromFile(classLocationFile(Class.forName(name))) + val loaderJar = jarForClass("sbt.boot.Boot") + val bytes = new ByteArrayOutputStream + transferAndClose(this.getClass.getResourceAsStream("extract.location"), bytes, log) orElse + { + val extractorJarLocation = bytes.toString("UTF-8") + createSelfExtractingJar(actions, jvmOptions, projectZip, loaderJar, extractorJarLocation, outputJar) + } + } + private def createSelfExtractingJar(actions: List[String], jvmOptions: List[String], projectZip: Path, loaderJar: Path, extractorJarLocation: String, outputJar: Path): Option[String] = + { + val installContents = jvmOptions.mkString("\n") + separator + actions.mkString("\n") + withTemporaryDirectory(log) { tmp => + val tmpPath = Path.fromFile(tmp) + write(new File(tmp, "install"), installContents, log) orElse + unzip(this.getClass.getResource(extractorJarLocation), tmpPath, log).left.toOption orElse + Control.thread(compressLoader(loaderJar)) { compressedLoader => + zip( (tmpPath ##) :: flat(projectZip) :: compressedLoader :: Nil, outputJar, true, log) + } + } + } + private def withTemporaryDirectory(log: Logger)(f: File => Option[String]) = + { + Control.thread(createTemporaryDirectory(log)) { dir => + Control.trapUnitAndFinally("", log) + { f(dir) } + { clean(Path.fromFile(dir) :: Nil, true, log) } + } + } + private def compressLoader(loaderJar: Path): Either[String, Path] = + { + val jarName = loaderJar.asFile.getName + val dotIndex = jarName.lastIndexOf('.') + val baseName = + if(dotIndex > 0) jarName.substring(0, dotIndex) + else jarName + val packedName = baseName + ".pack" + val packed = outputPath / packedName + val packedAndGzip = (outputPath ##) / (packedName + ".gz") + val result = + Pack.pack(loaderJar, packed, log) orElse + FileUtilities.gzip(packed, packedAndGzip, log) + result.toLeft(packedAndGzip) + } +} +trait BasicSelfExtractingProject extends BasicScalaProject with SelfExtractingProject +{ + def installActions: List[String] = update.name :: `package`.name :: Nil + def jvmOptions: List[String] = Nil + def selfExtractingJar: Path = outputPath / (artifactBaseName + "-setup.jar") + + lazy val installer = installerAction + def installerAction = task { createSelfExtractingJar(installActions, jvmOptions, packageProjectZip, selfExtractingJar) } dependsOn packageProject +} + +object SelfExtractingProject +{ + // keep this in sync with sbt.extract.Main.separator + def separator = "====================" + private def flat(p: Path) = + p match + { + case rp: RelativePath => (rp.parentPath ##) / rp.component + case _ => p + } +} \ No newline at end of file diff --git a/install/project/build.properties b/install/project/build.properties new file mode 100644 index 000000000..b2811cd24 --- /dev/null +++ b/install/project/build.properties @@ -0,0 +1,7 @@ +#Project properties +#Thu Jun 25 14:31:30 EDT 2009 +project.organization=org.scala-tools.sbt +project.name=extract +sbt.version=0.4.7-p13 +project.version=0.1 +scala.version=2.7.5 diff --git a/install/project/build/InstallProject.scala b/install/project/build/InstallProject.scala new file mode 100644 index 000000000..b31acfd53 --- /dev/null +++ b/install/project/build/InstallProject.scala @@ -0,0 +1,90 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +import sbt._ + +import java.io.File +import java.nio.charset.Charset + +class InstallerProject(info: ProjectInfo) extends ParentProject(info) +{ + lazy val installExtractor: InstallExtractProject = project("extract", "Installer Extractor", new InstallExtractProject(_, installPlugin)) + lazy val installPlugin: InstallPluginProject = project("plugin", "Installer Plugin", new InstallPluginProject(_, installExtractor), installExtractor) +} +protected class InstallPluginProject(info: ProjectInfo, extract: => InstallExtractProject) extends PluginProject(info) +{ + private lazy val extractProject = extract + override def crossScalaVersions = Set("2.7.2", "2.7.3", "2.7.4", "2.7.5") + override def mainResources = super.mainResources +++ extractProject.outputJar +++ extractLocation + + def extractLocation = (outputPath ##) / "extract.location" + lazy val writeProperties = task { FileUtilities.write(extractLocation.asFile, extractProject.outputJar.relativePath, Charset.forName("UTF-8"), log) } + override def packageAction = super.packageAction dependsOn(extractProject.proguard, writeProperties) + + val publishTo = "Scala Tools Nexus" at "http://nexus.scala-tools.org/content/repositories/releases/" + Credentials(Path.fromFile(System.getProperty("user.home")) / ".ivy2" / ".credentials", log) +} +protected class InstallExtractProject(info: ProjectInfo, pluginProject: => InstallPluginProject) extends DefaultProject(info) +{ + override def publishLocalAction = publishAction + override def publishAction = task {None} + override def unmanagedClasspath = super.unmanagedClasspath +++ Path.lazyPathFinder(Path.fromFile(FileUtilities.sbtJar) :: Nil) + private lazy val plugin = pluginProject + val mainClassName = "sbt.extract.Main" + val proguardConfigurationPath: Path = outputPath / "proguard.pro" + val toolsConfig = config("tools") + val defaultConfig = Configurations.Default + val proguardJar = "net.sf.proguard" % "proguard" % "4.3" % "tools->default" + def rootProjectDirectory = rootProject.info.projectPath + def outputJar = (plugin.outputPath ##) / defaultJarName + + /******** Proguard *******/ + lazy val proguard = proguardTask dependsOn(`package`, writeProguardConfiguration) + lazy val writeProguardConfiguration = writeProguardConfigurationTask dependsOn `package` + + private def proguardTask = + task + { + FileUtilities.clean(outputJar :: Nil, log) + val proguardClasspathString = Path.makeString(managedClasspath(toolsConfig).get) + val configFile = proguardConfigurationPath.asFile.getAbsolutePath + val exitValue = Process("java", List("-Xmx256M", "-cp", proguardClasspathString, "proguard.ProGuard", "@" + configFile)) ! log + if(exitValue == 0) None else Some("Proguard failed with nonzero exit code (" + exitValue + ")") + } + private def writeProguardConfigurationTask = + task + { + // the template for the proguard configuration file + val outTemplate = """ + |-dontoptimize + |-dontobfuscate + |-dontnote + |-dontwarn + |-libraryjars %s + |%s + |-outjars %s + |-ignorewarnings + |-keep public class %s { + | public static void main(java.lang.String[]); + |}""" + + val defaultJar = jarPath.absolutePath + log.debug("proguard configuration using main jar " + defaultJar) + val externalDependencies = (mainCompileConditional.analysis.allExternals).map(_.getAbsoluteFile).filter(_.getName.endsWith(".jar")) + debugJars("external dependencies", externalDependencies) + // partition jars from the external jar dependencies of this project by whether they are located in the project directory + // if they are, they are specified with -injars, otherwise they are specified with -libraryjars + val (externalJars, libraryJars) = externalDependencies.toList.partition{jar => Path.relativize(rootProjectDirectory, jar).isDefined} + debugJars("library jars", libraryJars) + val sbtJarString = FileUtilities.sbtJar.getAbsolutePath + "(!META-INF/**,!licenses/**,LICENSE,NOTICE,!*.xml)" + val externalJarStrings = externalJars.map( _ + "(!META-INF/**,!*.properties)") + // exclude properties files and manifests from scala-library jar + val inJars = (defaultJar :: externalJarStrings).map("-injars " + _).mkString("\n") + + val proguardConfiguration = outTemplate.stripMargin.format(libraryJars.mkString(File.pathSeparator), inJars, outputJar.absolutePath, mainClassName) + log.debug("Proguard configuration written to " + proguardConfigurationPath) + FileUtilities.write(proguardConfigurationPath.asFile, proguardConfiguration, log) + } + private def debugJars[T](label: String, jars: Iterable[T]): Unit = + log.debug("proguard configuration " + label + ": \n\t" + jars.mkString("\n\t")) +} \ No newline at end of file diff --git a/licenses/LICENSE_Apache b/licenses/LICENSE_Apache new file mode 100644 index 000000000..d9a10c0d8 --- /dev/null +++ b/licenses/LICENSE_Apache @@ -0,0 +1,176 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/licenses/LICENSE_Scala b/licenses/LICENSE_Scala new file mode 100644 index 000000000..968bcd6e9 --- /dev/null +++ b/licenses/LICENSE_Scala @@ -0,0 +1,35 @@ +SCALA LICENSE + +Copyright (c) 2002-2008 EPFL, Lausanne, unless otherwise specified. +All rights reserved. + +This software was developed by the Programming Methods Laboratory of the +Swiss Federal Institute of Technology (EPFL), Lausanne, Switzerland. + +Permission to use, copy, modify, and distribute this software in source +or binary form for any purpose with or without fee is hereby granted, +provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of the EPFL nor the names of its contributors + may be used to endorse or promote products derived from this + software without specific prior written permission. + + +THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. \ No newline at end of file diff --git a/licenses/LICENSE_ScalaCheck b/licenses/LICENSE_ScalaCheck new file mode 100644 index 000000000..d8ad3a10d --- /dev/null +++ b/licenses/LICENSE_ScalaCheck @@ -0,0 +1,32 @@ +ScalaCheck LICENSE + +Copyright (c) 2007, Rickard Nilsson +All rights reserved. + +Permission to use, copy, modify, and distribute this software in source +or binary form for any purpose with or without fee is hereby granted, +provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of the author nor the names of its contributors + may be used to endorse or promote products derived from this + software without specific prior written permission. + + +THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. \ No newline at end of file diff --git a/licenses/LICENSE_sbt b/licenses/LICENSE_sbt new file mode 100644 index 000000000..1b82fe7ca --- /dev/null +++ b/licenses/LICENSE_sbt @@ -0,0 +1,25 @@ +Copyright (c) 2008 Mark Harrah, David MacIver +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +3. The name of the author may not be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/licenses/LICENSE_specs b/licenses/LICENSE_specs new file mode 100644 index 000000000..81af36785 --- /dev/null +++ b/licenses/LICENSE_specs @@ -0,0 +1,23 @@ +Copyright (c) 2007-2008 Eric Torreborre + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +Neither the name of specs nor the names of its contributors may be +used to endorse or promote products derived from this software without +specific prior written permission. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/project/build.properties b/project/build.properties new file mode 100644 index 000000000..3938597ef --- /dev/null +++ b/project/build.properties @@ -0,0 +1,7 @@ +#Project properties +#Thu Jun 25 20:59:30 EDT 2009 +project.organization=sbt +project.name=Simple Build Tool Parent +sbt.version=0.4.6 +project.version=0.5 +scala.version=2.7.2 diff --git a/project/build/src/CrossCompileProject.scala b/project/build/src/CrossCompileProject.scala new file mode 100644 index 000000000..a302aa80a --- /dev/null +++ b/project/build/src/CrossCompileProject.scala @@ -0,0 +1,207 @@ +import sbt._ + +import java.io.File +import scala.xml.NodeSeq + +/** Support for compiling sbt across multiple versions of Scala. The scala compiler is run in a +* separate JVM and no partial compilation is done.*/ +abstract class CrossCompileProject extends BasicScalaProject +{ + /** Used for 2.8.0-SNAPSHOT*/ + val scalaToolsSnapshots = "Scala Tools Snapshots" at "http://scala-tools.org/repo-snapshots" + + /* The base configuration names for the versions of Scala*/ + private val version2_7_2 = "2.7.2" + private val version2_7_3 = "2.7.3" + private val version2_7_4 = "2.7.4" + private val version2_7_5 = "2.7.5" + private val version2_8_0 = "2.8.0.r18093-b20090623200909" + private val base = "base" + + /* The configurations for the versions of Scala.*/ + private val conf_2_7_2 = config(version2_7_2) + private val conf_2_7_3 = config(version2_7_3) + private val conf_2_7_4 = config(version2_7_4) + private val conf_2_7_5 = config(version2_7_5) + private val conf_2_8_0 = config(version2_8_0) + private val conf_base = config(base) + // the list of all configurations cross-compile supports + private val allConfigurations = conf_2_7_2 :: conf_2_7_3 :: conf_2_7_4 :: conf_2_7_5 :: conf_2_8_0 :: Nil + // the list of configurations to actually build against + private val buildConfigurations = conf_2_7_2 :: conf_2_7_3 :: conf_2_7_4 :: conf_2_7_5 :: Nil//allConfigurations not currently used because of issues with 2.8.0 + // the configuration to use for normal development (when cross-building is not done) + private def developmentVersion = buildConfigurations.first + + /* Methods to derive the configuration name from the base name 'v'.*/ + private def optional(v: Configuration) = config("optional-" + v.toString) + private def scalac(v: Configuration) = config("scalac-" + v.toString) + private def sbt(v: Configuration) = config("sbt_" + v.toString) + private def depConf(v: Configuration) = v.toString + "->default" + + // =========== Cross-compilation across scala versions =========== + + // The dependencies that should go in each configuration are: + // base Required dependencies that are the same across all scala versions. + // Required dependencies to use with Scala + // optional-base Optional dependencies that are the same for all scala versions + // optional- Optional dependencies to use with Scala + // compile Used for normal development, it should extend a specific and optional- + // scalac- The scala compiler for Scala + // There should be a jar publication for each version of scala. The artifact should be named sbt_. + override def ivyXML = + ( + + + { variableConfigurations } + + + + + { publications } + + + + + + + + + { variableDependencies(conf_2_7_2, /*ScalaTest*/"0.9.3", /*Specs*/"1.4.0", false) } + { variableDependencies(conf_2_7_3, /*ScalaTest*/"0.9.4", /*Specs*/"1.4.3", true) } + { variableDependencies(conf_2_7_4, /*ScalaTest*/"0.9.5", /*Specs*/"1.4.3", true) } + { variableDependencies(conf_2_7_5, /*ScalaTest*/"0.9.5", /*Specs*/"1.4.3", true) } + { variableDependencies(conf_2_8_0, /*ScalaTest*/"0.9.5", /*Specs*/"1.4.3", true) } + ) + + /** Creates a publication (an 'artifact' element) for each Scala version */ + private def publications: NodeSeq = + { + for(conf <- buildConfigurations) yield + + } + /** Creates the main, optional, and scalac configurations for each Scala version*/ + private def variableConfigurations: NodeSeq = + { + buildConfigurations flatMap + { conf => + scalaComment(conf) ++ + ( + + ) + } + } + /** Defines the dependencies for the given version of Scala, ScalaTest, and Specs. If uniformTestOrg is true, + * the 'org.scala-tools.testing' organization is used. Otherwise, 'org.' is prefixed to the module name. */ + private def variableDependencies(scalaVersion: Configuration, scalaTestVersion: String, specsVersion: String, uniformTestOrg: Boolean) = + { + if(buildConfigurations.contains(scalaVersion)) + { + scalaComment(scalaVersion) ++ + { + if(scalaVersion eq conf_2_8_0) + Nil + else + { + testDependency("scalatest", scalaTestVersion, uniformTestOrg, scalaVersion) ++ + testDependency("specs", specsVersion, uniformTestOrg, scalaVersion) ++ + testDependency("scalacheck", "1.5", false, scalaVersion) + } + } ++ + scalaDependency("scala-compiler", scalaVersion) ++ scalaDependency("scala-library", scalaVersion) ++ + { + if(scalaVersion == conf_2_8_0) + + else + NodeSeq.Empty + } + } + else + Nil + } + private def scalaDependency(name: String, scalaVersion: Configuration) = + + + /** Creates a comment containing the version of Scala*/ + private def scalaComment(scalaVersion: Configuration) = scala.xml.Comment("Scala " + scalaVersion) + /** Creates a dependency element for a test. See 'testOrg' for a description of uniformTestOrg.*/ + + private def testDependency(name: String, version: String, uniformTestOrg: Boolean, baseConf: Configuration) = + + + /** Returns the organization for the given test library. If uniform is true, + * the 'org.scala-tools.testing' organization is used. Otherwise, 'org.' is prefixed to the module name.*/ + private def testOrg(name: String, uniform: Boolean) = + if(uniform) "org.scala-tools.testing" + else "org." + name + + /** Disable filtering Scala jars from dependency management, because we need them and are putting them + * in custom configurations and are using them in a separate process than sbt runs in.*/ + override def filterScalaJars = false + + /** The lib directory is now only for building using the 'build' script.*/ + override def unmanagedClasspath = path("ignore_lib_directory") + /** When cross-compiling, replace mainCompilePath with the classes directory for the version being compiled.*/ + override def fullUnmanagedClasspath(config: Configuration) = + if( (Configurations.Default :: Configurations.defaultMavenConfigurations) contains config) + super.fullUnmanagedClasspath(config) + else + classesPath(config) +++ mainResourcesPath + + // include the optional- dependencies as well as the ones common across all scala versions + def optionalClasspath(version: Configuration) = fullClasspath(optional(version)) +++ super.optionalClasspath + + private val CompilerMainClass = "scala.tools.nsc.Main" + // use a publish configuration that publishes the 'base' + all configurations (base is required because + // the configurations extend it) + private val conf = new DefaultPublishConfiguration("local", "release") + { + override def configurations: Option[Iterable[Configuration]] = Some(config(base) :: buildConfigurations) + } + // the actions for cross-version packaging and publishing + lazy val crossPackage = buildConfigurations.map(packageForScala) + lazy val crossDeliverLocal = deliverTask(conf, updateOptions) dependsOn(crossPackage : _*) + lazy val crossPublishLocal = publishTask(conf, updateOptions) dependsOn(crossDeliverLocal) + // Creates a task that produces a packaged sbt compiled against Scala scalaVersion. + // The jar is named 'sbt_-.jar' + private def packageForScala(scalaVersion: Configuration) = + { + val classes = classesPath(scalaVersion) ** "*" + val jarName = crossJarName(scalaVersion) + val packageActionName = crossActionName("package", scalaVersion) + val compileAction = compileForScala(scalaVersion) named(crossActionName("compile", scalaVersion)) + packageTask(classes +++ mainResources, outputPath, jarName, packageOptions) dependsOn(compileAction) named(packageActionName) + } + private def crossActionName(base: String, scalaVersion: Configuration) = base + " [ " + scalaVersion.toString + " ] " + private def crossJarName(scalaVersion: Configuration) = sbt(scalaVersion) + "-" + version.toString + ".jar" + // This creates a task that compiles sbt against the given version of scala. Classes are put in classes-. + private def compileForScala(version: Configuration)= + task + { + val classes = classesPath(version) + val toClean = (outputPath / crossJarName(version)) +++ (classes ** "*") + val setupResult = + FileUtilities.clean(toClean.get, true, log) orElse + FileUtilities.createDirectory(classes, log) + for(err <- setupResult) log.error(err) + // the classpath containing the scalac compiler + val compilerClasspath = concatPaths(fullClasspath(scalac(version))) + + // The libraries to compile sbt against + val classpath = fullClasspath(version) +++ optionalClasspath(version) + val sources: List[String] = pathListStrings(mainSources) + val compilerOptions = List("-cp", concatPaths(classpath), "-d", classes.toString) + val compilerArguments: List[String] = compilerOptions ::: sources + + // the compiler classpath has to be appended to the boot classpath to work properly + val allArguments = "-Xmx512M" :: ("-Xbootclasspath/a:" + compilerClasspath) :: CompilerMainClass :: compilerArguments + log.debug("Running external compiler with command: java " + allArguments.mkString(" ")) + val exitValue = Process("java", allArguments) ! log + if(exitValue == 0) + None + else + Some("Nonzero exit value (" + exitValue + ") when calling scalac " + version + " with options: \n" + compilerOptions.mkString(" ")) + } + private def concatPaths(p: PathFinder): String = Path.makeString(p.get) + private def pathListStrings(p: PathFinder): List[String] = p.get.map(_.absolutePath).toList + private def classesPath(scalaVersion: Configuration) = ("target" / ("classes-" + scalaVersion.toString)) ## +} diff --git a/project/build/src/LoaderProject.scala b/project/build/src/LoaderProject.scala new file mode 100644 index 000000000..927d60fe5 --- /dev/null +++ b/project/build/src/LoaderProject.scala @@ -0,0 +1,129 @@ +import sbt._ + +import LoaderProject._ +import java.io.File + +// a project for the sbt launcher +// the main content of this project definition is setting up and running proguard +// to combine and compact all dependencies into a single jar +protected/* removes the ambiguity as to which project is the entry point by making this class non-public*/ + class LoaderProject(info: ProjectInfo) extends DefaultProject(info) +{ + val mainClassName = "sbt.boot.Boot" + val baseName = "sbt-launcher" + val proguardConfigurationPath: Path = outputPath / "proguard.pro" + lazy val outputJar: Path = rootProject.outputPath / (baseName + "-" + version + ".jar") + def rootProjectDirectory = rootProject.info.projectPath + + override def mainClass = Some(mainClassName) + override def defaultJarBaseName = baseName + "-" + version.toString + + /****** Resources *****/ + def extraResources = descendents(info.projectPath / "licenses", "*") +++ "LICENSE" +++ "NOTICE" + override def mainResources = super.mainResources +++ extraResources + + /****** Dependencies *******/ + val defaultConfig = config("default") + val toolsConfig = config("tools") + val ivy = "org.apache.ivy" % "ivy" % "2.0.0" + val proguardJar = "net.sf.proguard" % "proguard" % "4.3" % "tools->default" + + /******** Proguard *******/ + lazy val proguard = proguardTask dependsOn(`package`, writeProguardConfiguration) describedAs(ProguardDescription) + lazy val writeProguardConfiguration = writeProguardConfigurationTask dependsOn `package` describedAs WriteProguardDescription + + private def proguardTask = + task + { + FileUtilities.clean(outputJar :: Nil, log) + val proguardClasspath = managedClasspath(toolsConfig) + val proguardClasspathString = Path.makeString(proguardClasspath.get) + val configFile = proguardConfigurationPath.asFile.getAbsolutePath + val exitValue = Process("java", List("-Xmx128M", "-cp", proguardClasspathString, "proguard.ProGuard", "@" + configFile)) ! log + if(exitValue == 0) None else Some("Proguard failed with nonzero exit code (" + exitValue + ")") + } + private def writeProguardConfigurationTask = + task + { + // these are classes that need to be explicitly kept because they are loaded reflectively + val ivyKeepResolvers = + "org.apache.ivy.plugins.resolver.URLResolver" :: + "org.apache.ivy.plugins.resolver.IBiblioResolver" :: + Nil + // the template for the proguard configuration file + val outTemplate = """ + |-dontoptimize + |-dontobfuscate + |-dontnote + |-dontwarn + |-libraryjars %s + |-injars %s(!META-INF/**,!fr/**,!**/antlib.xml,!**/*.png) + |-injars %s(!META-INF/**) + |%s + |-outjars %s + |-ignorewarnings + |%s + |%s + |-keep public class %s { + | public static void main(java.lang.String[]); + |}""" + + val defaultJar = (outputPath / defaultJarName).asFile.getAbsolutePath + log.debug("proguard configuration using main jar " + defaultJar) + val ivyKeepOptions = ivyKeepResolvers.map("-keep public class " + _ + allPublic).mkString("\n") + val runtimeClasspath = runClasspath.get.map(_.asFile).toList + val jlineJars = runtimeClasspath.filter(isJLineJar) + val externalDependencies = (mainCompileConditional.analysis.allExternals).map(_.getAbsoluteFile).filter(_.getName.endsWith(".jar")) + log.debug("proguard configuration external dependencies: \n\t" + externalDependencies.mkString("\n\t")) + // partition jars from the external jar dependencies of this project by whether they are located in the project directory + // if they are, they are specified with -injars, otherwise they are specified with -libraryjars + val (externalJars, libraryJars) = externalDependencies.toList.partition(jar => Path.relativize(rootProjectDirectory, jar).isDefined) + log.debug("proguard configuration library jars locations: " + libraryJars.mkString(", ")) + // pull out Ivy in order to exclude resources inside + val (ivyJars, externalJarsNoIvy) = externalJars.partition(_.getName.startsWith("ivy")) + log.debug("proguard configuration ivy jar location: " + ivyJars.mkString(", ")) + // the loader uses JLine, so there is a dependency on the compiler (because JLine is distributed with the compiler, + // it finds the JLine classes from the compiler jar instead of the jline jar on the classpath), but we don't want to + // include the version of JLine from the compiler. + val includeExternalJars = externalJarsNoIvy.filter(jar => !isJarX(jar, "scala-compiler")) + // exclude properties files and manifests from scala-library jar + val inJars = (defaultJar :: includeExternalJars.map( _ + "(!META-INF/**,!*.properties)")).map("-injars " + _).mkString("\n") + + withJar(ivyJars, "Ivy") { ivyJar => + withJar(jlineJars, "JLine") { jlineJar => + val proguardConfiguration = + outTemplate.stripMargin.format(libraryJars.mkString(File.pathSeparator), + ivyJar.getAbsolutePath, jlineJar.getAbsolutePath, + inJars, outputJar.absolutePath, ivyKeepOptions, keepJLine, mainClassName) + log.debug("Proguard configuration written to " + proguardConfigurationPath) + FileUtilities.write(proguardConfigurationPath.asFile, proguardConfiguration, log) + } + } + } + private def withJar(files: List[File], name: String)(f: File => Option[String]): Option[String] = + files match + { + case Nil => Some(name + " not present (try running update)") + case jar :: _ => f(jar) + } + private def isJLineJar(file: File) = isJarX(file, "jline") + private def isJarX(file: File, x: String) = + { + val name = file.getName + name.startsWith(x) && name.endsWith(".jar") + } + // class body declaration for proguard that keeps all public members + private val allPublic = " {\n public * ;\n}" + + private val keepJLine = + """ + |-keep public class jline.** { + | public protected *; + |} + """.stripMargin +} +object LoaderProject +{ + val ProguardDescription = "Produces the final compacted jar that contains only the minimum classes needed using proguard." + val WriteProguardDescription = "Creates the configuration file to use with proguard." +} \ No newline at end of file diff --git a/project/build/src/SbtProject.scala b/project/build/src/SbtProject.scala new file mode 100644 index 000000000..f7b815f12 --- /dev/null +++ b/project/build/src/SbtProject.scala @@ -0,0 +1,74 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah + */ +import sbt._ + +import java.io.File + +class SbtProject(info: ProjectInfo) extends ParentProject(info) +{ + // Launcher sub project. + lazy val boot = project("boot", "Simple Build Tool Loader", new LoaderProject(_)) + // Main builder sub project + lazy val main = project(info.projectPath, "Simple Build Tool", new MainProject(_)) + // One-shot build for users building from trunk + lazy val fullBuild = task { None } dependsOn(boot.proguard, main.crossPublishLocal) describedAs + "Builds the loader and builds main sbt against all supported versions of Scala and installs to the local repository." + + override def shouldCheckOutputDirectories = false + override def baseUpdateOptions = QuietUpdate :: Nil + + override def parallelExecution = true + override def deliverLocalAction = noAction + private def noAction = task { None } + override def publishLocalAction = noAction +} + +protected class MainProject(val info: ProjectInfo) extends CrossCompileProject +{ + override def mainScalaSources = + if(Project.currentScalaVersion.map(_.startsWith("2.8")).getOrElse(false)) // cannot compile against test libraries currently + Path.lazyPathFinder { super.mainScalaSources.get.filter(_.asFile.getName.endsWith("TestFrameworkImpl.scala")) } + else + super.mainScalaSources + override def defaultJarBaseName = "sbt_" + version.toString + /** Additional resources to include in the produced jar.*/ + def extraResources = descendents(info.projectPath / "licenses", "*") +++ "LICENSE" +++ "NOTICE" + override def mainResources = super.mainResources +++ extraResources + override def mainClass = Some("sbt.Main") + override def testOptions = ExcludeTests("sbt.ReflectiveSpecification" :: "sbt.ProcessSpecification" :: Nil) :: super.testOptions.toList + + // ======== Scripted testing ========== + + def sbtTestResources = testResourcesPath / "sbt-test-resources" + + lazy val testNoScripted = super.testAction + override def testAction = testNoScripted dependsOn(scripted) + lazy val scripted = scriptedTask dependsOn(testCompile, `package`) + def scriptedTask = + task + { + log.info("Running scripted tests...") + log.info("") + // load ScriptedTests using a ClassLoader that loads from the project classpath so that the version + // of sbt being built is tested, not the one doing the building. + val loader = ScriptedLoader(scriptedClasspath.toArray) + val scriptedClass = Class.forName(ScriptedClassName, true, loader) + val scriptedConstructor = scriptedClass.getConstructor(classOf[File], classOf[Function2[String, String, Boolean]]) + val runner = scriptedConstructor.newInstance(sbtTestResources.asFile, filter) + runner.asInstanceOf[{def scriptedTests(log: Logger): Option[String]}].scriptedTests(log) + } + /** The classpath to use for scripted tests. This ensures that the version of sbt being built it the one used for testing.*/ + private def scriptedClasspath = + { + val buildClasspath = classOf[SbtProject]. getProtectionDomain.getCodeSource.getLocation.toURI.toURL + val scalacJar = FileUtilities.scalaCompilerJar.toURI.toURL + val ivy = runClasspath.get.filter(_.asFile.getName.startsWith("ivy-")).map(_.asURL).toList + val builtSbtJar = (outputPath / defaultJarName).asURL + builtSbtJar :: buildClasspath :: scalacJar :: ivy + } + + val ScriptedClassName = "scripted.ScriptedTests" + + val filter = (group: String, name: String) => true +} \ No newline at end of file diff --git a/project/build/src/ScriptedLoader.scala b/project/build/src/ScriptedLoader.scala new file mode 100644 index 000000000..cf73edb6c --- /dev/null +++ b/project/build/src/ScriptedLoader.scala @@ -0,0 +1,31 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah + */ +import sbt._ + +import java.net.URL + +package sbt { // need access to LoaderBase, which is private in package sbt + object ScriptedLoader + { + def apply(paths: Array[URL]): ClassLoader = new ScriptedLoader(paths) + } + private class ScriptedLoader(paths: Array[URL]) extends LoaderBase(paths, classOf[ScriptedLoader].getClassLoader) + { + private val delegateFor = List("sbt.Logger", "sbt.LogEvent", "sbt.SetLevel", "sbt.Success", "sbt.Log", "sbt.SetTrace", "sbt.Trace", "sbt.ControlEvent") + def doLoadClass(className: String): Class[_] = + { + // Logger needs to be loaded from the version of sbt building the project because we need to pass + // a Logger from that loader into ScriptedTests. + // All other sbt classes should be loaded from the project classpath so that we test those classes with 'scripted' + if(!shouldDelegate(className) && (className.startsWith("sbt.") || className.startsWith("scripted.") || className.startsWith("scala.tools."))) + findClass(className) + else + selfLoadClass(className) + } + + private def shouldDelegate(className: String) = delegateFor.exists(check => isNestedOrSelf(className, check)) + private def isNestedOrSelf(className: String, checkAgainst: String) = + className == checkAgainst || className.startsWith(checkAgainst + "$") + } +} \ No newline at end of file diff --git a/project/build/src/ScriptedTests.scala b/project/build/src/ScriptedTests.scala new file mode 100644 index 000000000..3d4650990 --- /dev/null +++ b/project/build/src/ScriptedTests.scala @@ -0,0 +1,70 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ + +package scripted + +import sbt._ +import java.io.File + +trait ScriptedTestFilter extends NotNull +{ + def accept(group: String, name: String): Boolean +} +class BasicFilter(f: (String, String) => Boolean) extends ScriptedTestFilter +{ + def accept(group: String, name: String) = f(group, name) +} + +object AcceptAllFilter extends ScriptedTestFilter +{ + def accept(group: String, name: String): Boolean = true +} +class ScriptedTests(testResources: Resources, filter: ScriptedTestFilter) extends NotNull +{ + def this(resourceBaseDirectory: File, filter: (String, String) => Boolean) = this(new Resources(resourceBaseDirectory), new BasicFilter(filter)) + def this(resourceBaseDirectory: File, filter: ScriptedTestFilter) = this(new Resources(resourceBaseDirectory), filter) + def this(testResources: Resources) = this(testResources, AcceptAllFilter) + def this(resourceBaseDirectory: File) = this(new Resources(resourceBaseDirectory)) + + val ScriptFilename = "test" + import testResources._ + + private def includeDirectory(file: File) = file.getName != ".svn" + def scriptedTests(log: Logger): Option[String] = + { + System.setProperty("sbt.scala.version", "") + var success = true + for(group <- baseDirectory.listFiles(DirectoryFilter) if includeDirectory(group)) + { + log.info("Test group " + group.getName) + for(test <- group.listFiles(DirectoryFilter) if includeDirectory(test)) + { + val testName = test.getName + if(!filter.accept(group.getName, testName)) + log.warn(" Test " + testName + " skipped.") + else + scriptedTest(test, log) match + { + case Some(err) => + log.error(" Test " + testName + " failed: " + err) + success = false + case None => log.info(" Test " + testName + " succeeded.") + } + } + } + if(success) + None + else + Some("One or more tests failed.") + } + + def scriptedTest(group: String, name: String, log: Logger): Option[String] = + readOnlyResourceDirectory(group, name).fold(err => Some(err), testDirectory => scriptedTest(testDirectory, log)) + def scriptedTest(testDirectory: File, log: Logger): Option[String] = + { + (for(script <- (new TestScriptParser(testDirectory, log)).parse(new File(testDirectory, ScriptFilename)).right; + u <- withProject(testDirectory, log)(script).right ) + yield u).left.toOption + } +} diff --git a/project/build/src/TestScriptParser.scala b/project/build/src/TestScriptParser.scala new file mode 100644 index 000000000..bab2fc1fc --- /dev/null +++ b/project/build/src/TestScriptParser.scala @@ -0,0 +1,269 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ + +package scripted + +import sbt._ +import java.io.{BufferedReader, File, InputStreamReader} + +/* +statement* +statement ::= ('$' | '>') word+ '[' word ']' +word ::= [^ \[\]]+ +comment ::= '#' [^ \n\r]* ('\n' | '\r' | eof) +*/ +import scala.util.parsing.combinator._ +import scala.util.parsing.input.Positional + +import TestScriptParser._ +private class TestScriptParser(baseDirectory: File, log: Logger) extends RegexParsers with NotNull +{ + type Statement = Project => Either[String, ReloadProject] + type PStatement = Statement with Positional + + private def evaluateList(list: List[PStatement])(p: Project): WithProjectResult[Unit] = + list match + { + case Nil => ValueResult(()) + case head :: tail => + head(p) match + { + case Left(msg) => new ErrorResult(msg) + case Right(reload) => ContinueResult(p =>evaluateList(tail)(p), reload) + } + } + + def script: Parser[Project => WithProjectResult[Unit]] = rep1(space ~> statement <~ space) ^^ evaluateList + def statement: Parser[PStatement] = + positioned + { + (StartRegex ~! rep1(word) ~! "[" ~! word ~! "]") ^^ + { + case start ~ command ~ open ~ result ~ close => + val successExpected = result.toLowerCase == SuccessLiteral.toLowerCase + new Statement with Positional + { selfPositional => + def apply(p: Project) = + { + val result = + try + { + start match + { + case CommandStart => evaluateCommand(command, successExpected, selfPositional)(p) + case ActionStart => evaluateAction(command, successExpected)(p).toLeft(NoReload) + } + } + catch + { + case e: Exception => + log.trace(e) + Left(e.toString) + } + result.left.map(message => linePrefix(this) + message) + } + } + } + } + private def linePrefix(p: Positional) = "{line " + p.pos.line + "} " + def space = """(\s+|(\#[^\n\r]*))*""".r + def word: Parser[String] = ("\'" ~> "[^'\n\r]*".r <~ "\'") | ("\"" ~> "[^\"\n\r]*".r <~ "\"") | WordRegex + def parse(scriptFile: File): Either[String, Project => WithProjectResult[Unit]] = + { + def parseReader(reader: java.io.Reader) = + parseAll(script, reader) match + { + case Success(result, next) => Right(result) + case err: NoSuccess => + { + val pos = err.next.pos + Left("Could not parse test script '" + scriptFile.getCanonicalPath + + "' (" + pos.line + "," + pos.column + "): " + err.msg) + } + } + FileUtilities.readValue(scriptFile, log)(parseReader) + } + + private def scriptError(message: String): Some[String] = Some("Test script error: " + message) + private def wrongArguments(commandName: String, args: List[String]): Some[String] = + scriptError("Command '" + commandName + "' does not accept arguments (found '" + spacedString(args) + "').") + private def wrongArguments(commandName: String, requiredArgs: String, args: List[String]): Some[String] = + scriptError("Wrong number of arguments to " + commandName + " command. " + requiredArgs + " required, found: '" + spacedString(args) + "'.") + private def evaluateCommand(command: List[String], successExpected: Boolean, position: Positional)(project: Project): Either[String, ReloadProject] = + { + command match + { + case "reload" :: Nil => Right(if(successExpected) new ReloadSuccessExpected(linePrefix(position)) else ReloadErrorExpected) + case x => evaluateCommandNoReload(x, successExpected)(project).toLeft(NoReload) + } + } + private def evaluateCommandNoReload(command: List[String], successExpected: Boolean)(project: Project): Option[String] = + { + evaluate(successExpected, "Command '" + command.firstOption.getOrElse("") + "'", project) + { + command match + { + case Nil => scriptError("No command specified.") + case "touch" :: paths => touch(paths, project) + case "delete" :: paths => delete(paths, project) + case "mkdir" :: paths => makeDirectories(paths, project) + case "copy-file" :: from :: to :: Nil => copyFile(from, to, project) + case "copy-file" :: args => wrongArguments("copy-file", "Two paths", args) + case "sync" :: from :: to :: Nil => sync(from, to, project) + case "sync" :: args => wrongArguments("sync", "Two directory paths", args) + case "copy" :: paths => copy(paths, project) + case "exists" :: paths => exists(paths, project) + case "absent" :: paths => absent(paths, project) + case "pause" :: Nil => readLine("Press enter to continue. "); println(); None + case "pause" :: args => wrongArguments("pause", args) + case "newer" :: a :: b :: Nil => newer(a, b, project) + case "newer" :: args => wrongArguments("newer", "Two paths", args) + case "sleep" :: time :: Nil => trap("Error while sleeping:") { Thread.sleep(time.toLong) } + case "sleep" :: args => wrongArguments("sleep", "Time in milliseconds", args) + case "exec" :: command :: args => execute(command, args, project) + case "exec" :: other => wrongArguments("exec", "Command and arguments", other) + case "reload" :: args => wrongArguments("reload", args) + case unknown :: arguments => scriptError("Unknown command " + unknown) + } + } + } + private def foreachBufferedLogger(project: Project)(f: BufferedLogger => Unit) + { + project.topologicalSort.foreach(p => p.log match { case buffered: BufferedLogger => f(buffered); case _ => () }) + } + private def evaluate(successExpected: Boolean, label: String, project: Project)(body: => Option[String]): Option[String] = + { + def startRecordingLog() { foreachBufferedLogger(project)(_.startRecording()) } + def playLog() { foreachBufferedLogger(project)(_.playAll()) } + def stopLog() { foreachBufferedLogger(project)(_.stop()) } + + startRecordingLog() + val result = + body match + { + case None => + if(successExpected) None + else + { + playLog() + Some(label + " succeeded (expected failure).") + } + case Some(failure) => + if(successExpected) + { + playLog() + Some(label + " failed (expected success): " + failure) + } + else None + } + stopLog() + result + } + private def evaluateAction(action: List[String], successExpected: Boolean)(project: Project): Option[String] = + { + def actionToString = action.mkString(" ") + action match + { + case Nil => scriptError("No action specified.") + case head :: Nil if project.taskNames.toSeq.contains(head)=> + evaluate(successExpected, "Action '" + actionToString + "'", project)(project.act(head)) + case head :: tail => + evaluate(successExpected, "Method '" + actionToString + "'", project)(project.call(head, tail.toArray)) + } + } + private def spacedString[T](l: Seq[T]) = l.mkString(" ") + private def wrap(result: Option[String]) = result.flatMap(scriptError) + private def trap(errorPrefix: String)(action: => Unit) = wrap( Control.trapUnit(errorPrefix, log) { action; None } ) + + private def fromStrings(paths: List[String], project: Project) = paths.map(path => fromString(path, project)) + private def fromString(path: String, project: Project) = Path.fromString(project.info.projectPath, path) + private def touch(paths: List[String], project: Project) = + if(paths.isEmpty) + scriptError("No paths specified for touch command.") + else + wrap(lazyFold(paths) { path => FileUtilities.touch(fromString(path, project), log) }) + + private def delete(paths: List[String], project: Project) = + if(paths.isEmpty) + scriptError("No paths specified for delete command.") + else + wrap(FileUtilities.clean(fromStrings(paths, project), true, log)) + private def sync(from: String, to: String, project: Project) = + wrap(FileUtilities.sync(fromString(from, project), fromString(to, project), log)) + private def copyFile(from: String, to: String, project: Project) = + wrap(FileUtilities.copyFile(fromString(from, project), fromString(to, project), log)) + private def copy(paths: List[String], project: Project) = + paths match + { + case Nil => scriptError("No paths specified for copy command.") + case path :: Nil => scriptError("No destination specified for copy command.") + case _ => + val mapped = fromStrings(paths, project).toArray + val last = mapped.length - 1 + wrap(FileUtilities.copy(mapped.take(last), mapped(last), log).left.toOption) + } + private def makeDirectories(paths: List[String], project: Project) = + fromStrings(paths, project) match + { + case Nil => scriptError("No paths specified for mkdir command.") + case p => FileUtilities.createDirectories(p, project.log) + } + private def newer(a: String, b: String, project: Project) = + trap("Error testing if '" + a + "' is newer than '" + b + "'") + { + val pathA = fromString(a, project) + val pathB = fromString(b, project) + pathA.exists && (!pathB.exists || pathA.lastModified > pathB.lastModified) + } + private def exists(paths: List[String], project: Project) = + fromStrings(paths, project).filter(!_.exists) match + { + case Nil => None + case x => Some("File(s) did not exist: " + x.mkString("[ ", " , ", " ]")) + } + private def absent(paths: List[String], project: Project) = + fromStrings(paths, project).filter(_.exists) match + { + case Nil => None + case x => Some("File(s) existed: " + x.mkString("[ ", " , ", " ]")) + } + private def execute(command: String, args: List[String], project: Project) = + { + if(command.trim.isEmpty) + Some("Command was empty.") + else + { + Control.trapUnit("Error running command: ", project.log) + { + val builder = new java.lang.ProcessBuilder((command :: args).toArray : _*).directory(project.info.projectDirectory) + val exitValue = Process(builder) ! log + if(exitValue == 0) + None + else + Some("Nonzero exit value (" + exitValue + ")") + } + } + } +} +private object TestScriptParser +{ + val SuccessLiteral = "success" + val Failure = "error" + val CommandStart = "$" + val ActionStart = ">" + val WordRegex = """[^ \[\]\s'\"][^ \[\]\s]*""".r + val StartRegex = ("[" + CommandStart + ActionStart + "]").r + + final def lazyFold[T](list: List[T])(f: T => Option[String]): Option[String] = + list match + { + case Nil => None + case head :: tail => + f(head) match + { + case None => lazyFold(tail)(f) + case x => x + } + } +} diff --git a/src/main/resources/scalac-plugin.xml b/src/main/resources/scalac-plugin.xml new file mode 100644 index 000000000..54ec5669c --- /dev/null +++ b/src/main/resources/scalac-plugin.xml @@ -0,0 +1,4 @@ + + sbt-analyze + sbt.Analyzer + diff --git a/src/main/scala/sbt/Analysis.scala b/src/main/scala/sbt/Analysis.scala new file mode 100644 index 000000000..4498fd800 --- /dev/null +++ b/src/main/scala/sbt/Analysis.scala @@ -0,0 +1,220 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah + */ +package sbt + +trait TaskAnalysis[Source, Product, External] extends NotNull +{ + import scala.collection.Set + def save(): Option[String] + def revert(): Option[String] + def clear(): Unit + + def allSources: Set[Source] + def allProducts: Set[Product] + def allExternals: Set[External] + + def sourceDependencies(source: Source): Option[Set[Source]] + def products(source: Source): Option[Set[Product]] + def externalDependencies(external: External): Option[Set[Source]] + + def addSource(source: Source): Unit + def addExternalDependency(dependsOn: External, source: Source): Unit + def addSourceDependency(dependsOn: Source, source: Source): Unit + def addProduct(source: Source, product: Product): Unit + + def removeSource(source: Source): Unit + def removeDependent(source: Source): Unit + def removeDependencies(source: Source): Option[Set[Source]] + def removeExternalDependency(external: External): Unit +} + +import java.io.File +import BasicAnalysis._ +import impl.MapUtilities.{add, all, read, mark, readOnlyIterable, write} +import scala.collection.mutable.{HashMap, HashSet, ListBuffer, Map, Set} + +sealed class BasicAnalysis(analysisPath: Path, projectPath: Path, log: Logger) extends TaskAnalysis[Path, Path, File] +{ + private val sourceDependencyMap: Map[Path, Set[Path]] = new HashMap + private val productMap: Map[Path, Set[Path]] = new HashMap + private val externalDependencyMap: Map[File, Set[Path]] = new HashMap + + final type AnyMapToSource = Map[K, Set[Path]] forSome {type K} + final type AnySourceMap = Map[Path, T] forSome {type T} + final type AnySourceSetMap = Map[Path, Set[T]] forSome {type T} + final type AnyMap = Map[K, V] forSome { type K; type V } + + protected def mapsToClear = List[AnyMap](sourceDependencyMap, productMap, externalDependencyMap) + protected def mapsToRemoveSource = List[AnySourceMap](sourceDependencyMap, productMap) + protected def mapsToRemoveDependent = List[AnyMapToSource](sourceDependencyMap, externalDependencyMap) + protected def mapsToMark = List[AnySourceSetMap](sourceDependencyMap, productMap) + + def clear() + { + for(map <- mapsToClear) + map.clear() + } + def removeSource(source: Path) + { + for(sourceProducts <- productMap.get(source)) + FileUtilities.clean(sourceProducts, true, log) + for(map <- mapsToRemoveSource) + map -= source + } + def removeSelfDependency(source: Path) + { + for(deps <- sourceDependencyMap.get(source)) + deps -= source + } + def removeDependent(source: Path) + { + for(map <- mapsToRemoveDependent; deps <- map.values) + deps -= source + } + def removeDependencies(source: Path) = sourceDependencyMap.removeKey(source) + def removeExternalDependency(dep: File) = externalDependencyMap.removeKey(dep.getAbsoluteFile) + + def externalDependencies(external: File) = externalDependencyMap.get(external.getAbsoluteFile) + def sourceDependencies(source: Path) = sourceDependencyMap.get(source) + def products(sources: Iterable[Path]): Iterable[Path] = + { + val buffer = new ListBuffer[Path] + for(source <- sources; sourceProducts <- productMap.get(source)) + buffer ++= sourceProducts + buffer.readOnly + } + def products(source: Path) = productMap.get(source) + + def allSources = sourceDependencyMap.keySet + def allProducts: Set[Path] = HashSet(flatten(productMap.values.toList) : _*) + def allExternals = externalDependencyMap.keySet + + def allExternalDependencies = readOnlyIterable(externalDependencyMap) + def allDependencies = readOnlyIterable(sourceDependencyMap) + + def addSourceDependency(on: Path, from: Path) = add(on, from, sourceDependencyMap) + def addExternalDependency(on: File, from: Path) = add(on.getAbsoluteFile, from, externalDependencyMap) + def addProduct(source: Path, file: Path) = add(source, file, productMap) + def addSource(source: Path) = + { + for(map <- mapsToMark) + mark(source, map) + } + + import Format._ // get implicits for data types + implicit val path: Format[Path] = Format.path(projectPath) + implicit val pathSet: Format[Set[Path]] = Format.set + + protected def backedMaps: Iterable[Backed[_,_]] = + Backed(sourceDependencyMap, DependenciesLabel, DependenciesFileName) :: + Backed(productMap, GeneratedLabel, GeneratedFileName) :: + Backed(externalDependencyMap, ExternalDependenciesLabel, ExternalDependenciesFileName) :: + Nil + + def revert() = load() + private def loadBacked[Key,Value](b: Backed[Key,Value]) = read(b.map, analysisPath / b.name, log)(b.keyFormat, b.valueFormat) + private def storeBacked[Key,Value](b: Backed[Key,Value]) = write(b.map, b.label, analysisPath / b.name, log)(b.keyFormat, b.valueFormat) + final def load(): Option[String] = Control.lazyFold(backedMaps.toList)(backed =>loadBacked(backed)) + final def save(): Option[String] = Control.lazyFold(backedMaps.toList)(backed => storeBacked(backed)) +} +object BasicAnalysis +{ + private def flatten(s: Iterable[Set[Path]]): Seq[Path] = s.flatMap(x => x.toSeq).toSeq + + val GeneratedFileName = "generated_files" + val DependenciesFileName = "dependencies" + val ExternalDependenciesFileName = "external" + + val GeneratedLabel = "Generated Classes" + val DependenciesLabel = "Source Dependencies" + val ExternalDependenciesLabel = "External Dependencies" + + def load(analysisPath: Path, projectPath: Path, log: Logger): Either[String, BasicAnalysis] = + { + val analysis = new BasicAnalysis(analysisPath, projectPath, log) + analysis.load().toLeft(analysis) + } +} +object CompileAnalysis +{ + val HashesFileName = "hashes" + val TestsFileName = "tests" + val ApplicationsFileName = "applications" + val ProjectDefinitionsName = "projects" + + val HashesLabel = "Source Hashes" + val TestsLabel = "Tests" + val ApplicationsLabel = "Classes with main methods" + val ProjectDefinitionsLabel = "Project Definitions" + + def load(analysisPath: Path, projectPath: Path, log: Logger): Either[String, CompileAnalysis] = + { + val analysis = new CompileAnalysis(analysisPath, projectPath, log) + analysis.load().toLeft(analysis) + } +} +import CompileAnalysis._ +import Format._ // get implicits for data types +sealed class BasicCompileAnalysis protected (analysisPath: Path, projectPath: Path, log: Logger) extends BasicAnalysis(analysisPath, projectPath, log) +{ + /*private */val hashesMap = new HashMap[Path, Array[Byte]] + + override protected def mapsToClear = hashesMap :: super.mapsToClear + override protected def mapsToRemoveSource = hashesMap :: super.mapsToRemoveSource + + def setHash(source: Path, hash: Array[Byte]) { hashesMap(source) = hash } + def clearHash(source: Path) { hashesMap.removeKey(source) } + def hash(source: Path) = hashesMap.get(source) + def clearHashes() { hashesMap.clear() } + + def getClasses(sources: PathFinder, outputDirectory: Path): PathFinder = + Path.lazyPathFinder + { + val basePath = (outputDirectory ##) + for(c <- products(sources.get)) yield + Path.relativize(basePath, c).getOrElse(c) + } + + implicit val stringSet: Format[Set[String]] = Format.set + override protected def backedMaps = Backed(hashesMap, HashesLabel, HashesFileName) :: super.backedMaps.toList +} +private[sbt] final class BuilderCompileAnalysis(analysisPath: Path, projectPath: Path, log: Logger) extends BasicCompileAnalysis(analysisPath, projectPath, log) +{ + private val projectDefinitionMap = new HashMap[Path, Set[String]] + override protected def mapsToClear = projectDefinitionMap :: super.mapsToClear + override protected def mapsToRemoveSource = projectDefinitionMap :: super.mapsToRemoveSource + def allProjects = all(projectDefinitionMap) + def addProjectDefinition(source: Path, className: String) = add(source, className, projectDefinitionMap) + + override protected def backedMaps = + Backed(projectDefinitionMap, ProjectDefinitionsLabel, ProjectDefinitionsName) :: + super.backedMaps +} +final class CompileAnalysis(analysisPath: Path, projectPath: Path, log: Logger) extends BasicCompileAnalysis(analysisPath, projectPath, log) +{ + private val testMap = new HashMap[Path, Set[TestDefinition]] + private val applicationsMap = new HashMap[Path, Set[String]] + def allTests = all(testMap) + def allApplications = all(applicationsMap) + def addTest(source: Path, test: TestDefinition) = add(source, test, testMap) + def addApplication(source: Path, className: String) = add(source, className, applicationsMap) + + def testSourceMap: Map[String, Path] = + { + val map = new HashMap[String, Path] + for( (source, tests) <- testMap; test <- tests) map(test.testClassName) = source + map + } + + override protected def mapsToClear = applicationsMap :: testMap :: super.mapsToClear + override protected def mapsToRemoveSource = applicationsMap :: testMap :: super.mapsToRemoveSource + + implicit val testSet: Format[Set[TestDefinition]] = Format.set + override protected def backedMaps = + Backed(testMap, TestsLabel, TestsFileName) :: + Backed(applicationsMap, ApplicationsLabel, ApplicationsFileName) :: + super.backedMaps +} +/** A map that is persisted in a properties file named 'name' and with 'label'. 'keyFormat' and 'valueFormat' are used to (de)serialize. */ +final case class Backed[Key, Value](map: Map[Key, Value], label: String, name: String)(implicit val keyFormat: Format[Key], val valueFormat: Format[Value]) extends NotNull \ No newline at end of file diff --git a/src/main/scala/sbt/AnalysisCallback.scala b/src/main/scala/sbt/AnalysisCallback.scala new file mode 100644 index 000000000..e20d9190b --- /dev/null +++ b/src/main/scala/sbt/AnalysisCallback.scala @@ -0,0 +1,93 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah + */ +package sbt + +import java.io.File + +object AnalysisCallback +{ + private val map = new scala.collection.mutable.HashMap[Int, AnalysisCallback] + private var nextID: Int = 0 + def register(callback: AnalysisCallback): Int = + { + val id = nextID + nextID += 1 + map(id) = callback + id + } + def apply(id: Int): Option[AnalysisCallback] = map.get(id) + def unregister(id: Int) + { + map -= id + } +} + +trait AnalysisCallback extends NotNull +{ + /** The names of classes that the analyzer should find subclasses of.*/ + def superclassNames: Iterable[String] + /** The base path for the project.*/ + def basePath: Path + /** Called when the the given superclass could not be found on the classpath by the compiler.*/ + def superclassNotFound(superclassName: String): Unit + /** Called before the source at the given location is processed. */ + def beginSource(sourcePath: Path): Unit + /** Called when the a subclass of one of the classes given in superclassNames is + * discovered.*/ + def foundSubclass(sourcePath: Path, subclassName: String, superclassName: String, isModule: Boolean): Unit + /** Called to indicate that the source file sourcePath depends on the source file + * dependsOnPath.*/ + def sourceDependency(dependsOnPath: Path, sourcePath: Path): Unit + /** Called to indicate that the source file sourcePath depends on the jar + * jarPath.*/ + def jarDependency(jarPath: File, sourcePath: Path): Unit + /** Called to indicate that the source file sourcePath depends on the class file + * classFile.*/ + def classDependency(classFile: File, sourcePath: Path): Unit + /** Called to indicate that the source file sourcePath produces a class file at + * modulePath.*/ + def generatedClass(sourcePath: Path, modulePath: Path): Unit + /** Called after the source at the given location has been processed. */ + def endSource(sourcePath: Path): Unit + /** Called when a module with a public 'main' method with the right signature is found.*/ + def foundApplication(sourcePath: Path, className: String): Unit +} +abstract class BasicAnalysisCallback[A <: BasicCompileAnalysis](val basePath: Path, val superclassNames: Iterable[String], + protected val analysis: A) extends AnalysisCallback +{ + def superclassNotFound(superclassName: String) {} + + def beginSource(sourcePath: Path) + { + analysis.addSource(sourcePath) + } + def sourceDependency(dependsOnPath: Path, sourcePath: Path) + { + analysis.addSourceDependency(dependsOnPath, sourcePath) + } + def jarDependency(jarFile: File, sourcePath: Path) + { + analysis.addExternalDependency(jarFile, sourcePath) + } + def classDependency(classFile: File, sourcePath: Path) + { + analysis.addExternalDependency(classFile, sourcePath) + } + def generatedClass(sourcePath: Path, modulePath: Path) + { + analysis.addProduct(sourcePath, modulePath) + } + def endSource(sourcePath: Path) + { + analysis.removeSelfDependency(sourcePath) + } +} +abstract class BasicCompileAnalysisCallback(basePath: Path, superclassNames: Iterable[String], analysis: CompileAnalysis) + extends BasicAnalysisCallback(basePath, superclassNames, analysis) +{ + def foundApplication(sourcePath: Path, className: String) + { + analysis.addApplication(sourcePath, className) + } +} \ No newline at end of file diff --git a/src/main/scala/sbt/Analyzer.scala b/src/main/scala/sbt/Analyzer.scala new file mode 100644 index 000000000..3e45d804a --- /dev/null +++ b/src/main/scala/sbt/Analyzer.scala @@ -0,0 +1,234 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah + */ +package sbt + +import scala.tools.nsc.{io, plugins, symtab, Global, Phase} +import io.{AbstractFile, PlainFile, ZipArchive} +import plugins.{Plugin, PluginComponent} +import symtab.Flags +import scala.collection.mutable.{HashMap, HashSet, Map, Set} + +import java.io.File + +object Analyzer +{ + val PluginName = "sbt-analyzer" + val CallbackIDOptionName = "callback:" +} +class Analyzer(val global: Global) extends Plugin +{ + import global._ + import Analyzer._ + + val name = PluginName + val description = "A plugin to find all concrete instances of a given class and extract dependency information." + val components = List[PluginComponent](Component) + + private var callbackOption: Option[AnalysisCallback] = None + + override def processOptions(options: List[String], error: String => Unit) + { + for(option <- options) + { + if(option.startsWith(CallbackIDOptionName)) + callbackOption = AnalysisCallback(option.substring(CallbackIDOptionName.length).toInt) + else + error("Option for sbt analyzer plugin not understood: " + option) + } + if(callbackOption.isEmpty) + error("Callback ID not specified for sbt analyzer plugin.") + } + + override val optionsHelp: Option[String] = + { + val prefix = " -P:" + name + ":" + Some(prefix + CallbackIDOptionName + " Set the callback id.\n") + } + + /* ================================================== */ + // These two templates abuse scope for source compatibility between Scala 2.7.x and 2.8.x so that a single + // sbt codebase compiles with both series of versions. + // In 2.8.x, PluginComponent.runsAfter has type List[String] and the method runsBefore is defined on + // PluginComponent with default value Nil. + // In 2.7.x, runsBefore does not exist on PluginComponent and PluginComponent.runsAfter has type String. + // + // Therefore, in 2.8.x, object runsBefore is shadowed by PluginComponent.runsBefore (which is Nil) and so + // afterPhase :: runsBefore + // is equivalent to List[String](afterPhase) + // In 2.7.x, object runsBefore is not shadowed and so runsAfter has type String. + private object runsBefore { def :: (s: String) = s } + private abstract class CompatiblePluginComponent(afterPhase: String) extends PluginComponent + { + override val runsAfter = afterPhase :: runsBefore + } + /* ================================================== */ + + private object Component extends CompatiblePluginComponent("jvm") + { + val global = Analyzer.this.global + val phaseName = Analyzer.this.name + def newPhase(prev: Phase) = new AnalyzerPhase(prev) + } + + private class AnalyzerPhase(prev: Phase) extends Phase(prev) + { + def name = Analyzer.this.name + def run + { + val callback = callbackOption.get + val projectPath = callback.basePath + val projectPathString = Path.basePathString(projectPath).getOrElse({error("Could not determine base path for " + projectPath); ""}) + def relativize(file: File) = Path.relativize(projectPath, projectPathString, file) + + val outputDir = new File(global.settings.outdir.value) + val outputPathOption = relativize(outputDir) + if(outputPathOption.isEmpty) + error("Output directory " + outputDir.getAbsolutePath + " must be in the project directory.") + val outputPath = outputPathOption.get + + val superclassNames = callback.superclassNames.map(newTermName) + val superclassesAll = + for(name <- superclassNames) yield + { + try { Some(global.definitions.getClass(name)) } + catch { case fe: scala.tools.nsc.FatalError => callback.superclassNotFound(name.toString); None } + } + val superclasses = superclassesAll.filter(_.isDefined).map(_.get) + + for(unit <- currentRun.units) + { + // build dependencies structure + val sourceFile = unit.source.file.file + val sourcePathOption = relativize(sourceFile) + if(sourcePathOption.isEmpty) + error("Source file " + sourceFile.getAbsolutePath + " must be in the project directory.") + val sourcePath = sourcePathOption.get + callback.beginSource(sourcePath) + for(on <- unit.depends) + { + val onSource = on.sourceFile + if(onSource == null) + { + classFile(on) match + { + case Some(f) => + { + f match + { + case ze: ZipArchive#Entry => callback.jarDependency(new File(ze.getArchive.getName), sourcePath) + case pf: PlainFile => + { + // ignore dependencies in the output directory: these are handled by source dependencies + if(Path.relativize(outputPath, pf.file).isEmpty) + callback.classDependency(pf.file, sourcePath) + } + case _ => () + } + } + case None => () + } + } + else + { + for(depPath <- relativize(onSource.file)) + callback.sourceDependency(depPath, sourcePath) + } + } + + // find subclasses and modules with main methods + for(clazz @ ClassDef(mods, n, _, _) <- unit.body) + { + val sym = clazz.symbol + if(sym != NoSymbol && mods.isPublic && !mods.isAbstract && !mods.isTrait && + !sym.isImplClass && sym.isStatic && !sym.isNestedClass) + { + val isModule = sym.isModuleClass + for(superclass <- superclasses.filter(sym.isSubClass)) + callback.foundSubclass(sourcePath, sym.fullNameString, superclass.fullNameString, isModule) + if(isModule && hasMainMethod(sym)) + callback.foundApplication(sourcePath, sym.fullNameString) + } + } + + // build list of generated classes + for(iclass <- unit.icode) + { + val sym = iclass.symbol + def addGenerated(separatorRequired: Boolean) + { + val classPath = pathOfClass(outputPath, sym, separatorRequired) + if(classPath.asFile.exists) + callback.generatedClass(sourcePath, classPath) + } + if(sym.isModuleClass && !sym.isImplClass) + { + if(isTopLevelModule(sym) && sym.linkedClassOfModule == NoSymbol) + addGenerated(false) + addGenerated(true) + } + else + addGenerated(false) + } + callback.endSource(sourcePath) + } + } + } + + private def classFile(sym: Symbol): Option[AbstractFile] = + { + import scala.tools.nsc.symtab.Flags + val name = sym.fullNameString(java.io.File.separatorChar) + (if (sym.hasFlag(Flags.MODULE)) "$" else "") + val entry = classPath.root.find(name, false) + if (entry ne null) + Some(entry.classFile) + else + None + } + + private def isTopLevelModule(sym: Symbol): Boolean = + atPhase (currentRun.picklerPhase.next) { + sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass + } + private def pathOfClass(outputPath: Path, s: Symbol, separatorRequired: Boolean): Path = + pathOfClass(outputPath, s, separatorRequired, ".class") + private def pathOfClass(outputPath: Path, s: Symbol, separatorRequired: Boolean, postfix: String): Path = + { + if(s.owner.isPackageClass && s.isPackageClass) + packagePath(outputPath, s) / postfix + else + pathOfClass(outputPath, s.owner.enclClass, true, s.simpleName + (if(separatorRequired) "$" else "") + postfix) + } + private def packagePath(outputPath: Path, s: Symbol): Path = + { + if(s.isEmptyPackageClass || s.isRoot) + outputPath + else + packagePath(outputPath, s.owner.enclClass) / s.simpleName.toString + } + + private def hasMainMethod(sym: Symbol): Boolean = + { + val main = sym.info.nonPrivateMember(newTermName("main"))//nme.main) + main.tpe match + { + case OverloadedType(pre, alternatives) => alternatives.exists(alt => isVisible(alt) && isMainType(pre.memberType(alt))) + case tpe => isVisible(main) && isMainType(main.owner.thisType.memberType(main)) + } + } + private def isVisible(sym: Symbol) = sym != NoSymbol && sym.isPublic && !sym.isDeferred + private def isMainType(tpe: Type) = + { + tpe match + { + // singleArgument is of type Symbol in 2.8.0 and type Type in 2.7.x + case MethodType(List(singleArgument), result) => isUnitType(result) && isStringArray(singleArgument) + case _ => false + } + } + private lazy val StringArrayType = appliedType(definitions.ArrayClass.typeConstructor, definitions.StringClass.tpe :: Nil) + // isStringArray is overloaded to handle the incompatibility between 2.7.x and 2.8.0 + private def isStringArray(tpe: Type): Boolean = tpe.typeSymbol == StringArrayType.typeSymbol + private def isStringArray(sym: Symbol): Boolean = isStringArray(sym.tpe) + private def isUnitType(tpe: Type) = tpe.typeSymbol == definitions.UnitClass +} \ No newline at end of file diff --git a/src/main/scala/sbt/AutoCompilerPlugins.scala b/src/main/scala/sbt/AutoCompilerPlugins.scala new file mode 100644 index 000000000..1b5a94864 --- /dev/null +++ b/src/main/scala/sbt/AutoCompilerPlugins.scala @@ -0,0 +1,33 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt + +trait AutoCompilerPlugins extends BasicScalaProject +{ + import Configurations.CompilerPlugin + abstract override def ivyConfigurations = + { + val superConfigurations = super.ivyConfigurations.toList + val newConfigurations = + if(superConfigurations.isEmpty) + { + if(useDefaultConfigurations) + CompilerPlugin :: Configurations.defaultMavenConfigurations + else + Configurations.Default :: CompilerPlugin :: Nil + } + else + CompilerPlugin :: superConfigurations + log.debug("Auto configurations: " + newConfigurations.toList.mkString(", ")) + Configurations.removeDuplicates(newConfigurations) + } + abstract override def compileOptions = compilerPlugins ++ super.compileOptions + + /** A PathFinder that provides the classpath to search for compiler plugins. */ + def pluginClasspath = fullClasspath(CompilerPlugin) + protected def compilerPlugins: List[CompileOption] = + ClasspathUtilities.compilerPlugins(pluginClasspath.get).map(plugin => new CompileOption("-Xplugin:" + plugin.getAbsolutePath)).toList + + def compilerPlugin(dependency: ModuleID) = dependency % "plugin->default(compile)" +} \ No newline at end of file diff --git a/src/main/scala/sbt/BasicProjectTypes.scala b/src/main/scala/sbt/BasicProjectTypes.scala new file mode 100644 index 000000000..f5020d411 --- /dev/null +++ b/src/main/scala/sbt/BasicProjectTypes.scala @@ -0,0 +1,578 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt + +import StringUtilities.{appendable,nonEmpty} + +/** A project that provides a classpath. */ +trait ClasspathProject extends Project +{ + /** The local classpath for this project.*/ + def projectClasspath(config: Configuration): PathFinder + + /** Returns the classpath of this project and the classpaths of all dependencies for the + * given configuration. Specifically, this concatentates projectClasspath(config) for all + * projects of type ClasspathProject in topologicalSort. */ + def fullClasspath(config: Configuration): PathFinder = + Path.lazyPathFinder + { + val set = new wrap.MutableSetWrapper(new java.util.LinkedHashSet[Path]) + for(project <- topologicalSort) + { + project match + { + case sp: ClasspathProject => set ++= sp.projectClasspath(config).get + case _ => () + } + } + set.toList + } +} +trait BasicDependencyProject extends BasicManagedProject with UnmanagedClasspathProject +{ + /** This returns the classpath for only this project for the given configuration.*/ + def projectClasspath(config: Configuration) = fullUnmanagedClasspath(config) +++ managedClasspath(config) +} +/** A project that provides a directory in which jars can be manually managed.*/ +trait UnmanagedClasspathProject extends ClasspathProject +{ + /** The location of the manually managed (unmanaged) dependency directory.*/ + def dependencyPath: Path + /** The classpath containing all jars in the unmanaged directory. */ + def unmanagedClasspath: PathFinder = + { + val base = descendents(dependencyPath, "*.jar") + if(scratch) + base +++ (info.projectPath * "*.jar") + else + base + } + /** The classpath containing all unmanaged classpath elements for the given configuration. This typically includes + * at least 'unmanagedClasspath'.*/ + def fullUnmanagedClasspath(config: Configuration): PathFinder +} + +/** A project that provides automatic dependency management.*/ +trait ManagedProject extends ClasspathProject +{ + trait ManagedOption extends ActionOption + final class ManagedFlagOption extends ManagedOption + /** An update option that specifies that unneeded files should be pruned from the managed library directory + * after updating. */ + final val Synchronize = new ManagedFlagOption + /** An update option that specifies that Ivy should validate configurations.*/ + final val Validate = new ManagedFlagOption + /** An update option that puts Ivy into a quieter logging mode.*/ + final val QuietUpdate = new ManagedFlagOption + /** An update option that adds the scala-tools.org releases repository to the set of resolvers, unless + * no inline repositories are present and an ivysettings.xml file is present.*/ + final val AddScalaToolsReleases = new ManagedFlagOption + /** An update option that specifies that an error should be generated if no inline dependencies, resolvers, + * XML file, or Ivy or Maven configuration files are present.*/ + final val ErrorIfNoConfiguration = new ManagedFlagOption + /** An update option that explicitly specifies the dependency manager to use. This can be used to + * override the default precendence. */ + final case class LibraryManager(m: Manager) extends ManagedOption + /** An update option that overrides the default Ivy cache location. */ + final case class CacheDirectory(dir: Path) extends ManagedOption + final case class CheckScalaVersion(configs: Iterable[Configuration], checkExplicit: Boolean, filterImplicit: Boolean) extends ManagedOption + + private def withConfigurations(outputPattern: String, managedDependencyPath: Path, options: Seq[ManagedOption]) + (doWith: (IvyConfiguration, UpdateConfiguration) => Option[String]) = + { + var synchronize = false + var validate = false + var quiet = false + var addScalaTools = false + var errorIfNoConfiguration = false + var manager: Manager = new AutoDetectManager(projectID) + var cacheDirectory: Option[Path] = None + var checkScalaVersion: Option[IvyScala] = None + for(option <- options) + { + option match + { + case Synchronize => synchronize = true + case Validate => validate = true + case LibraryManager(m) => manager = m + case QuietUpdate => quiet = true + case AddScalaToolsReleases => addScalaTools = true + case ErrorIfNoConfiguration => errorIfNoConfiguration = true + case CacheDirectory(dir) => cacheDirectory = Some(dir) + case CheckScalaVersion(configs, checkExplicit, filterImplicit) => + checkScalaVersion = getScalaVersion.map(version => new IvyScala(version, configs, checkExplicit, filterImplicit)) + case _ => log.warn("Ignored unknown managed option " + option) + } + } + val ivyPaths = new IvyPaths(info.projectPath, managedDependencyPath, cacheDirectory) + val ivyFlags = new IvyFlags(validate, addScalaTools, errorIfNoConfiguration) + val ivyConfiguration = new IvyConfiguration(ivyPaths, manager, ivyFlags, checkScalaVersion, log) + val updateConfiguration = new UpdateConfiguration(outputPattern, synchronize, quiet) + doWith(ivyConfiguration, updateConfiguration) + } + private def getScalaVersion = + { + val v = scalaVersion.value + if(v.isEmpty) None + else Some(v) + } + private def withIvyTask(doTask: => Option[String]) = + task + { + try { doTask } + catch + { + case e: NoClassDefFoundError => + log.trace(e) + Some("Apache Ivy is required for dependency management (" + e.toString + ")") + } + } + def updateTask(outputPattern: String, managedDependencyPath: Path, options: ManagedOption*): Task = + updateTask(outputPattern, managedDependencyPath, options) + def updateTask(outputPattern: String, managedDependencyPath: Path, options: => Seq[ManagedOption]) = + withIvyTask(withConfigurations(outputPattern, managedDependencyPath, options)(ManageDependencies.update)) + + def publishTask(publishConfiguration: => PublishConfiguration, options: => Seq[ManagedOption]) = + withIvyTask + { + val publishConfig = publishConfiguration + import publishConfig._ + withConfigurations("", managedDependencyPath, options) { (ivyConf, ignore) => + val delivered = if(publishIvy) Some(deliveredPattern) else None + ManageDependencies.publish(ivyConf, resolverName, srcArtifactPatterns, delivered, configurations) } + } + def deliverTask(deliverConfiguration: => PublishConfiguration, options: => Seq[ManagedOption]) = + withIvyTask + { + val deliverConfig = deliverConfiguration + import deliverConfig._ + withConfigurations("", managedDependencyPath, options) { (ivyConf, updateConf) => + ManageDependencies.deliver(ivyConf, updateConf, status, deliveredPattern, extraDependencies, configurations) + } + } + def makePomTask(output: => Path, extraDependencies: => Iterable[ModuleID], configurations: => Option[Iterable[Configuration]], options: => Seq[ManagedOption]) = + withIvyTask(withConfigurations("", managedDependencyPath, options) { (ivyConf, ignore) => + ManageDependencies.makePom(ivyConf, extraDependencies, configurations, output.asFile) }) + + def cleanCacheTask(managedDependencyPath: Path, options: => Seq[ManagedOption]) = + withIvyTask(withConfigurations("", managedDependencyPath, options) { (ivyConf, ignore) => ManageDependencies.cleanCache(ivyConf) }) + + def cleanLibTask(managedDependencyPath: Path) = task { FileUtilities.clean(managedDependencyPath.get, log) } + + /** This is the public ID of the project (used for publishing, for example) */ + def moduleID: String = normalizedName + appendable(crossScalaVersionString) + /** This is the full public ID of the project (used for publishing, for example) */ + def projectID: ModuleID = ModuleID(organization, moduleID, version.toString) + + /** This is the default name for artifacts (such as jars) without any version string.*/ + def artifactID = moduleID + /** This is the default name for artifacts (such as jars) including the version string.*/ + def artifactBaseName = artifactID + "-" + version.toString + def artifacts: Iterable[Artifact] + + def managedDependencyPath: Path + /** The managed classpath for the given configuration. This can be overridden to add jars from other configurations + * so that the Ivy 'extends' mechanism is not required. That way, the jars are only copied to one configuration.*/ + def managedClasspath(config: Configuration): PathFinder = configurationClasspath(config) + /** All dependencies in the given configuration. */ + final def configurationClasspath(config: Configuration): PathFinder = descendents(configurationPath(config), "*.jar") + /** The base path to which dependencies in configuration 'config' are downloaded.*/ + def configurationPath(config: Configuration): Path = managedDependencyPath / config.toString + + import StringUtilities.nonEmpty + implicit def toGroupID(groupID: String): GroupID = + { + nonEmpty(groupID, "Group ID") + new GroupID(groupID, ScalaVersion.currentString) + } + implicit def toRepositoryName(name: String): RepositoryName = + { + nonEmpty(name, "Repository name") + new RepositoryName(name) + } + implicit def moduleIDConfigurable(m: ModuleID): ModuleIDConfigurable = + { + require(m.configurations.isEmpty, "Configurations already specified for module " + m) + new ModuleIDConfigurable(m) + } + + /** Creates a new configuration with the given name.*/ + def config(name: String) = new Configuration(name) +} +/** This class groups required configuration for the deliver and publish tasks. */ +trait PublishConfiguration extends NotNull +{ + /** The name of the resolver to which publishing should be done.*/ + def resolverName: String + /** The Ivy pattern used to determine the delivered Ivy file location. An example is + * (outputPath / "[artifact]-[revision].[ext]").relativePath. */ + def deliveredPattern: String + /** Ivy patterns used to find artifacts for publishing. An example pattern is + * (outputPath / "[artifact]-[revision].[ext]").relativePath */ + def srcArtifactPatterns: Iterable[String] + /** Additional dependencies to include for delivering/publishing only. These are typically dependencies on + * subprojects. */ + def extraDependencies: Iterable[ModuleID] + /** The status to use when delivering or publishing. This might be "release" or "integration" or another valid Ivy status. */ + def status: String + /** The configurations to include in the publish/deliver action: specify none for all configurations. */ + def configurations: Option[Iterable[Configuration]] + /** True if the Ivy file should be published. */ + def publishIvy: Boolean +} +object ManagedStyle extends Enumeration +{ + val Maven, Ivy = Value +} +import ManagedStyle.{Ivy, Maven, Value => ManagedType} +trait BasicManagedProject extends ManagedProject with ReflectiveManagedProject with BasicDependencyPaths +{ + import BasicManagedProject._ + /** The dependency manager that represents inline declarations. The default manager packages the information + * from 'ivyXML', 'projectID', 'repositories', and 'libraryDependencies' and does not typically need to be + * be overridden. */ + def manager = new SimpleManager(ivyXML, true, projectID, repositories, ivyConfigurations, defaultConfiguration, artifacts, libraryDependencies.toList: _*) + + /** The pattern for Ivy to use when retrieving dependencies into the local project. Classpath management + * depends on the first directory being [conf] and the extension being [ext].*/ + def outputPattern = "[conf]/[artifact](-[revision]).[ext]" + /** Override this to specify the publications, configurations, and/or dependencies sections of an Ivy file. + * See http://code.google.com/p/simple-build-tool/wiki/LibraryManagement for details.*/ + def ivyXML: scala.xml.NodeSeq = scala.xml.NodeSeq.Empty + /** The base options passed to the 'update' action. */ + def baseUpdateOptions = checkScalaVersion :: Validate :: Synchronize :: QuietUpdate :: AddScalaToolsReleases :: Nil + override def ivyConfigurations: Iterable[Configuration] = + { + val reflective = super.ivyConfigurations + if(useDefaultConfigurations) + { + if(reflective.isEmpty && !useIntegrationTestConfiguration) + Nil + else + { + val base = Configurations.defaultMavenConfigurations ++ reflective + val allConfigurations = + if(useIntegrationTestConfiguration) + base ++ List(Configurations.IntegrationTest) + else + base + Configurations.removeDuplicates(allConfigurations) + } + } + else + reflective + } + def useIntegrationTestConfiguration = false + def defaultConfiguration: Option[Configuration] = Some(Configurations.DefaultConfiguration(useDefaultConfigurations)) + def useMavenConfigurations = true // TBD: set to true and deprecate + def useDefaultConfigurations = useMavenConfigurations + def managedStyle: ManagedType = Maven + protected implicit final val defaultPatterns: RepositoryHelpers.Patterns = + { + managedStyle match + { + case Maven => Resolver.mavenStylePatterns + case Ivy => Resolver.ivyStylePatterns + } + } + /** The options provided to the 'update' action. This is by default the options in 'baseUpdateOptions'. + * If 'manager' has any dependencies, resolvers, or inline Ivy XML (which by default happens when inline + * dependency management is used), it is passed as the dependency manager.*/ + def updateOptions: Seq[ManagedOption] = + { + val m = manager + if(m.dependencies.isEmpty && m.resolvers.isEmpty && ivyXML.isEmpty && m.artifacts.isEmpty && m.configurations.isEmpty) + baseUpdateOptions + else + LibraryManager(m) :: baseUpdateOptions + } + def deliverOptions: Seq[ManagedOption] = updateOptions.filter { case _: CheckScalaVersion => false; case _ => true } + def publishOptions: Seq[ManagedOption] = deliverOptions + /** True if the 'provided' configuration should be included on the 'compile' classpath. The default value is true.*/ + def includeProvidedWithCompile = true + /** True if the default implicit extensions should be used when determining classpaths. The default value is true. */ + def defaultConfigurationExtensions = true + /** If true, verify that explicit dependencies on Scala libraries use the same version as scala.version. */ + def checkExplicitScalaDependencies = true + /** If true, filter dependencies on scala-library and scala-compiler. This is true by default to avoid conflicts with + * the jars provided by sbt. You can set this to false to download these jars. Overriding checkScalaInConfigurations might + * be more appropriate, however.*/ + def filterScalaJars = true + /** The configurations to check/filter.*/ + def checkScalaInConfigurations: Iterable[Configuration] = + { + val all = ivyConfigurations + if(all.isEmpty) + Configurations.defaultMavenConfigurations + else + all + } + def checkScalaVersion = CheckScalaVersion(checkScalaInConfigurations, checkExplicitScalaDependencies, filterScalaJars) + def defaultPublishRepository: Option[Resolver] = + { + reflectiveRepositories.get("publish-to") orElse + info.parent.flatMap + { + case managed: BasicManagedProject => managed.defaultPublishRepository + case _ => None + } + } + /** Includes the Provided configuration on the Compile classpath, the Compile configuration on the Runtime classpath, + * and Compile and Runtime on the Test classpath. Including Provided can be disabled by setting + * includeProvidedWithCompile to false. Including Compile and Runtime can be disabled by setting + * defaultConfigurationExtensions to false.*/ + override def managedClasspath(config: Configuration) = + { + import Configurations.{Compile, CompilerPlugin, Default, Provided, Runtime, Test} + val baseClasspath = configurationClasspath(config) + config match + { + case Compile => + val baseCompileClasspath = baseClasspath +++ managedClasspath(Default) + if(includeProvidedWithCompile) + baseCompileClasspath +++ managedClasspath(Provided) + else + baseCompileClasspath + case Runtime if defaultConfigurationExtensions => baseClasspath +++ managedClasspath(Compile) + case Test if defaultConfigurationExtensions => baseClasspath +++ managedClasspath(Runtime) + case _ => baseClasspath + } + } + + protected def updateAction = updateTask(outputPattern, managedDependencyPath, updateOptions) describedAs UpdateDescription + protected def cleanLibAction = cleanLibTask(managedDependencyPath) describedAs CleanLibDescription + protected def cleanCacheAction = cleanCacheTask(managedDependencyPath, updateOptions) describedAs CleanCacheDescription + + protected def deliverProjectDependencies: Iterable[ModuleID] = + { + val interDependencies = new scala.collection.mutable.ListBuffer[ModuleID] + dependencies.foreach(dep => dep match { case mp: ManagedProject => interDependencies += mp.projectID; case _ => () }) + if(filterScalaJars) + interDependencies ++= deliverScalaDependencies + interDependencies.readOnly + } + protected def deliverScalaDependencies: Iterable[ModuleID] = Nil + protected def makePomAction = makePomTask(pomPath, deliverProjectDependencies, None, updateOptions) + protected def deliverLocalAction = deliverTask(publishLocalConfiguration, deliverOptions) + protected def publishLocalAction = + { + val dependencies = deliverLocal :: publishPomDepends + publishTask(publishLocalConfiguration, publishOptions) dependsOn(dependencies : _*) + } + protected def publishLocalConfiguration = new DefaultPublishConfiguration("local", "release", true) + protected def deliverAction = deliverTask(publishConfiguration, deliverOptions) + protected def publishAction = + { + val dependencies = deliver :: publishPomDepends + publishTask(publishConfiguration, publishOptions) dependsOn(dependencies : _*) + } + private def publishPomDepends = if(managedStyle == Maven) makePom :: Nil else Nil + protected def publishConfiguration = + { + val repository = defaultPublishRepository.getOrElse(error("Repository to publish to not specified.")) + val publishIvy = managedStyle != Maven + new DefaultPublishConfiguration(repository, "release", publishIvy) + } + protected class DefaultPublishConfiguration(val resolverName: String, val status: String, val publishIvy: Boolean) extends PublishConfiguration + { + def this(resolver: Resolver, status: String, publishIvy: Boolean) = this(resolver.name, status, publishIvy) + def this(resolverName: String, status: String) = this(resolverName, status, true) + def this(resolver: Resolver, status: String) = this(resolver.name, status) + + protected def deliveredPathPattern = outputPath / "[artifact]-[revision].[ext]" + def deliveredPattern = deliveredPathPattern.relativePath + def srcArtifactPatterns: Iterable[String] = + { + val pathPatterns = + (outputPath / "[artifact]-[revision]-[type].[ext]") :: + (outputPath / "[artifact]-[revision].[ext]") :: + Nil + pathPatterns.map(_.relativePath) + } + def extraDependencies: Iterable[ModuleID] = Nil//deliverProjectDependencies + /** The configurations to include in the publish/deliver action: specify none for all public configurations. */ + def configurations: Option[Iterable[Configuration]] = None + } + + lazy val update = updateAction + lazy val makePom = makePomAction + lazy val deliverLocal = deliverLocalAction + lazy val publishLocal = publishLocalAction + lazy val deliver = deliverAction + lazy val publish = publishAction + lazy val cleanLib = cleanLibAction + lazy val cleanCache = cleanCacheAction +} + +object BasicManagedProject +{ + val UpdateDescription = + "Resolves and retrieves automatically managed dependencies." + val CleanLibDescription = + "Deletes the managed library directory." + val CleanCacheDescription = + "Deletes the cache of artifacts downloaded for automatically managed dependencies." +} + +trait BasicDependencyPaths extends ManagedProject +{ + import BasicDependencyPaths._ + def dependencyDirectoryName = DefaultDependencyDirectoryName + def managedDirectoryName = DefaultManagedDirectoryName + def pomName = artifactBaseName + PomExtension + def dependencyPath = path(dependencyDirectoryName) + def managedDependencyPath = crossPath(managedDependencyRootPath) + def managedDependencyRootPath: Path = managedDirectoryName + def pomPath = outputPath / pomName +} +object BasicDependencyPaths +{ + val DefaultManagedDirectoryName = "lib_managed" + val DefaultManagedSourceDirectoryName = "src_managed" + val DefaultDependencyDirectoryName = "lib" + val PomExtension = ".pom" +} + +object StringUtilities +{ + def normalize(s: String) = s.toLowerCase.replaceAll("""\s+""", "-") + def nonEmpty(s: String, label: String) + { + require(s.trim.length > 0, label + " cannot be empty.") + } + def appendable(s: String) = if(s.isEmpty) "" else "_" + s +} +final class GroupID private[sbt] (groupID: String, scalaVersion: String) extends NotNull +{ + def % (artifactID: String) = groupArtifact(artifactID) + def %% (artifactID: String) = + { + require(!scalaVersion.isEmpty, "Cannot use %% when the sbt launcher is not used.") + groupArtifact(artifactID + appendable(scalaVersion)) + } + private def groupArtifact(artifactID: String) = + { + nonEmpty(artifactID, "Artifact ID") + new GroupArtifactID(groupID, artifactID) + } +} +final class GroupArtifactID private[sbt] (groupID: String, artifactID: String) extends NotNull +{ + def % (revision: String): ModuleID = + { + nonEmpty(revision, "Revision") + ModuleID(groupID, artifactID, revision, None) + } +} +final class ModuleIDConfigurable private[sbt] (moduleID: ModuleID) extends NotNull +{ + def % (configurations: String): ModuleID = + { + nonEmpty(configurations, "Configurations") + import moduleID._ + ModuleID(organization, name, revision, Some(configurations)) + } +} +final class RepositoryName private[sbt] (name: String) extends NotNull +{ + def at (location: String) = + { + nonEmpty(location, "Repository location") + new MavenRepository(name, location) + } +} + +import scala.collection.{Map, mutable} +/** A Project that determines its tasks by reflectively finding all vals with a type +* that conforms to Task.*/ +trait ReflectiveTasks extends Project +{ + def tasks: Map[String, Task] = reflectiveTaskMappings + def reflectiveTaskMappings : Map[String, Task] = Reflective.reflectiveMappings[Task](this) +} +/** A Project that determines its method tasks by reflectively finding all vals with a type +* that conforms to MethodTask.*/ +trait ReflectiveMethods extends Project +{ + def methods: Map[String, MethodTask] = reflectiveMethodMappings + def reflectiveMethodMappings : Map[String, MethodTask] = Reflective.reflectiveMappings[MethodTask](this) +} +/** A Project that determines its dependencies on other projects by reflectively +* finding all vals with a type that conforms to Project.*/ +trait ReflectiveModules extends Project +{ + override def subProjects: Map[String, Project] = reflectiveModuleMappings + def reflectiveModuleMappings : Map[String, Project] = Reflective.reflectiveMappings[Project](this) +} +/** A Project that determines its dependencies on other projects by reflectively +* finding all vals with a type that conforms to Project and determines its tasks +* by reflectively finding all vals with a type that conforms to Task.*/ +trait ReflectiveProject extends ReflectiveModules with ReflectiveTasks with ReflectiveMethods + +/** This Project subclass is used to contain other projects as dependencies.*/ +class ParentProject(val info: ProjectInfo) extends BasicDependencyProject +{ + def dependencies = info.dependencies ++ subProjects.values.toList + /** The directories to which a project writes are listed here and is used + * to check a project and its dependencies for collisions.*/ + override def outputDirectories = managedDependencyPath :: outputPath :: Nil + def fullUnmanagedClasspath(config: Configuration) = unmanagedClasspath +} + +object Reflective +{ + def reflectiveMappings[T](obj: AnyRef)(implicit m: scala.reflect.Manifest[T]): Map[String, T] = + { + val mappings = new mutable.OpenHashMap[String, T] + for ((name, value) <- ReflectUtilities.allVals[T](obj)) + mappings(ReflectUtilities.transformCamelCase(name, '-')) = value + mappings + } +} + +/** A Project that determines its library dependencies by reflectively finding all vals with a type +* that conforms to ModuleID.*/ +trait ReflectiveLibraryDependencies extends ManagedProject +{ + def excludeIDs: Iterable[ModuleID] = projectID :: Nil + def libraryDependencies: Set[ModuleID] = reflectiveLibraryDependencies + def reflectiveLibraryDependencies : Set[ModuleID] = Set[ModuleID](Reflective.reflectiveMappings[ModuleID](this).values.toList: _*) -- excludeIDs +} + +trait ReflectiveConfigurations extends Project +{ + def ivyConfigurations: Iterable[Configuration] = reflectiveIvyConfigurations + def reflectiveIvyConfigurations: Set[Configuration] = Configurations.removeDuplicates(Reflective.reflectiveMappings[Configuration](this).values.toList) +} +trait ReflectiveArtifacts extends ManagedProject +{ + def managedStyle: ManagedType + def artifacts: Set[Artifact] = + { + val reflective = reflectiveArtifacts + managedStyle match + { + case Maven =>reflective ++ List(Artifact(artifactID, "pom", "pom")) + case Ivy => reflective + } + } + def reflectiveArtifacts: Set[Artifact] = Set(Reflective.reflectiveMappings[Artifact](this).values.toList: _*) +} +/** A Project that determines its library dependencies by reflectively finding all vals with a type +* that conforms to ModuleID.*/ +trait ReflectiveRepositories extends Project +{ + def repositories: Set[Resolver] = + { + val reflective = Set[Resolver](reflectiveRepositories.values.toList: _*) + info.parent match + { + case Some(p: ReflectiveRepositories) => p.repositories ++ reflective + case None => reflective + } + } + def reflectiveRepositories: Map[String, Resolver] = Reflective.reflectiveMappings[Resolver](this) +} + +trait ReflectiveManagedProject extends ReflectiveProject with ReflectiveArtifacts with ReflectiveRepositories with ReflectiveLibraryDependencies with ReflectiveConfigurations \ No newline at end of file diff --git a/src/main/scala/sbt/BuilderProject.scala b/src/main/scala/sbt/BuilderProject.scala new file mode 100644 index 000000000..0cf3a82a1 --- /dev/null +++ b/src/main/scala/sbt/BuilderProject.scala @@ -0,0 +1,210 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah, David MacIver + */ +package sbt + +import BasicProjectPaths._ + +sealed abstract class InternalProject extends Project +{ + override final def historyPath = None + override def tasks: Map[String, Task] = Map.empty + override final protected def disableCrossPaths = false + override final def shouldCheckOutputDirectories = false +} +private sealed abstract class BasicBuilderProject extends InternalProject with SimpleScalaProject +{ + def sourceFilter = "*.scala" | "*.java" + def jarFilter: NameFilter = "*.jar" + def compilePath = outputPath / DefaultMainCompileDirectoryName + def mainResourcesPath = path(DefaultResourcesDirectoryName) + def dependencyPath = path(DefaultDependencyDirectoryName) + def libraries = descendents(dependencyPath, jarFilter) + override final def dependencies = Nil + + protected final def logInfo(messages: String*): Unit = atInfo { messages.foreach(message => log.info(message)) } + protected final def atInfo(action: => Unit) + { + val oldLevel = log.getLevel + log.setLevel(Level.Info) + action + log.setLevel(oldLevel) + } + + def projectClasspath = compilePath +++ libraries +++ sbtJarPath + def sbtJarPath = Path.lazyPathFinder { Path.fromFile(FileUtilities.sbtJar) :: Nil } + + abstract class BuilderCompileConfiguration extends AbstractCompileConfiguration + { + def projectPath = info.projectPath + def log = BasicBuilderProject.this.log + def options = (Deprecation :: Unchecked :: Nil).map(_.asString) + def javaOptions = Nil + def maxErrors = ScalaProject.DefaultMaximumCompileErrors + def compileOrder = CompileOrder.Mixed + } + def definitionCompileConfiguration = + new BuilderCompileConfiguration + { + def label = "builder" + def sources = (info.projectPath * sourceFilter) +++ path(DefaultSourceDirectoryName).descendentsExcept(sourceFilter, defaultExcludes) + def outputDirectory = compilePath + def classpath = projectClasspath + def analysisPath = outputPath / DefaultMainAnalysisDirectoryName + } + + def tpe: String + + val definitionCompileConditional = new BuilderCompileConditional(definitionCompileConfiguration, tpe) + final class BuilderCompileConditional(config: BuilderCompileConfiguration, tpe: String) extends AbstractCompileConditional(config) + { + type AnalysisType = BuilderCompileAnalysis + override protected def constructAnalysis(analysisPath: Path, projectPath: Path, log: Logger) = + new BuilderCompileAnalysis(analysisPath, projectPath, log) + override protected def execute(cAnalysis: ConditionalAnalysis): Option[String] = + { + if(cAnalysis.dirtySources.isEmpty) + None + else + { + definitionChanged() + logInfo( + "Recompiling " + tpe + "...", + "\t" + cAnalysis.toString) + super.execute(cAnalysis) + } + } + protected def analysisCallback: AnalysisCallback = + new BasicAnalysisCallback(info.projectPath, List(Project.ProjectClassName), analysis) + { + def foundApplication(sourcePath: Path, className: String) {} + def foundSubclass(sourcePath: Path, subclassName: String, superclassName: String, isModule: Boolean) + { + if(superclassName == Project.ProjectClassName && !isModule) + { + log.debug("Found " + tpe + " " + subclassName) + analysis.addProjectDefinition(sourcePath, subclassName) + } + } + } + } + protected def definitionChanged() {} + lazy val compile = compileTask + def compileTask = task { definitionCompileConditional.run } + + def projectDefinition: Either[String, Option[String]] = + { + definitionCompileConditional.analysis.allProjects.toList match + { + case Nil => + log.debug("No " + tpe + "s detected using default project.") + Right(None) + case singleDefinition :: Nil => Right(Some(singleDefinition)) + case multipleDefinitions =>Left(multipleDefinitions.mkString("Multiple " + tpe + "s detected: \n\t","\n\t","\n")) + } + } + override final def methods = Map.empty +} +/** The project definition used to build project definitions. */ +private final class BuilderProject(val info: ProjectInfo, val pluginPath: Path, override protected val logImpl: Logger) extends BasicBuilderProject +{ + private lazy val pluginProject = + { + if(pluginPath.exists) + Some(new PluginBuilderProject(ProjectInfo(pluginPath.asFile, Nil, None))) + else + None + } + override def projectClasspath = super.projectClasspath +++ pluginProject.map(_.pluginClasspath).getOrElse(Path.emptyPathFinder) + def tpe = "project definition" + + override def compileTask = super.compileTask dependsOn(pluginProject.map(_.syncPlugins).toList : _*) + + final class PluginBuilderProject(val info: ProjectInfo) extends BasicBuilderProject + { + override protected def logImpl = BuilderProject.this.log + val pluginUptodate = propertyOptional[Boolean](false) + def tpe = "plugin definition" + def managedSourcePath = path(BasicDependencyPaths.DefaultManagedSourceDirectoryName) + def managedDependencyPath = crossPath(BasicDependencyPaths.DefaultManagedDirectoryName) + override protected def definitionChanged() { setUptodate(false) } + private def setUptodate(flag: Boolean) + { + pluginUptodate() = flag + saveEnvironment() + } + + private def pluginTask(f: => Option[String]) = task { if(!pluginUptodate.value) f else None } + + lazy val syncPlugins = pluginTask(sync()) dependsOn(extractSources) + lazy val extractSources = pluginTask(extract()) dependsOn(update) + lazy val update = pluginTask(loadAndUpdate()) dependsOn(compile) + + private def sync() = pluginCompileConditional.run orElse { setUptodate(true); None } + private def extract() = + { + FileUtilities.clean(managedSourcePath, log) orElse + Control.lazyFold(plugins.get.toList) { jar => + Control.thread(FileUtilities.unzip(jar, extractTo(jar), sourceFilter, log)) { extracted => + if(!extracted.isEmpty) + logInfo("\tExtracted source plugin " + jar + " ...") + None + } + } + } + private def loadAndUpdate() = + { + Control.thread(projectDefinition) { + case Some(definition) => + logInfo("\nUpdating plugins") + val pluginInfo = ProjectInfo(info.projectPath.asFile, Nil, None) + val pluginBuilder = Project.constructProject(pluginInfo, Project.getProjectClass[PluginDefinition](definition, projectClasspath)) + pluginBuilder.projectName() = "Plugin builder" + pluginBuilder.projectVersion() = OpaqueVersion("1.0") + val result = pluginBuilder.update.run + if(result.isEmpty) + { + atInfo { + log.success("Plugins updated successfully.") + log.info("") + } + } + result + case None => None + } + } + def extractTo(jar: Path) = + { + val name = jar.asFile.getName + managedSourcePath / name.substring(0, name.length - ".jar".length) + } + def plugins = descendents(managedDependencyPath, jarFilter) + def pluginClasspath = plugins +++ pluginCompileConfiguration.outputDirectory + + lazy val pluginCompileConditional = new BuilderCompileConditional(pluginCompileConfiguration, "plugin") + lazy val pluginCompileConfiguration = + new BuilderCompileConfiguration + { + def label = "plugin builder" + def sources = descendents(managedSourcePath, sourceFilter) + def outputDirectory = outputPath / "plugin-classes" + def classpath = sbtJarPath + def analysisPath = outputPath / "plugin-analysis" + } + } +} +class PluginDefinition(val info: ProjectInfo) extends InternalProject with BasicManagedProject +{ + override final def outputPattern = "[artifact](-[revision]).[ext]" + override final val tasks = Map("update" -> update) + override def projectClasspath(config: Configuration) = Path.emptyPathFinder + override def dependencies = info.dependencies +} +class PluginProject(info: ProjectInfo) extends DefaultProject(info) +{ + override def unmanagedClasspath = super.unmanagedClasspath +++ Path.lazyPathFinder(Path.fromFile(FileUtilities.sbtJar) :: Nil) + override def packageAction = packageSrc + override def packageSrcJar = jarPath + override def useMavenConfigurations = true + override def managedStyle = ManagedStyle.Maven +} \ No newline at end of file diff --git a/src/main/scala/sbt/ClasspathUtilities.scala b/src/main/scala/sbt/ClasspathUtilities.scala new file mode 100644 index 000000000..48dc42ade --- /dev/null +++ b/src/main/scala/sbt/ClasspathUtilities.scala @@ -0,0 +1,197 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah + */ +package sbt + +import java.io.File +import java.net.{URI, URL, URLClassLoader} +import java.util.Collections +import scala.collection.Set +import scala.collection.mutable.{HashSet, ListBuffer} + +private[sbt] object ClasspathUtilities +{ + def toClasspath(finder: PathFinder): Array[URL] = toClasspath(finder.get) + def toClasspath(paths: Iterable[Path]): Array[URL] = paths.map(_.asURL).toSeq.toArray + def toLoader(finder: PathFinder): ClassLoader = toLoader(finder.get) + def toLoader(paths: Iterable[Path]): ClassLoader = new URLClassLoader(toClasspath(paths), getClass.getClassLoader) + + def isArchive(path: Path): Boolean = isArchive(path.asFile) + def isArchive(file: File): Boolean = isArchiveName(file.getName) + def isArchiveName(fileName: String) = fileName.endsWith(".jar") || fileName.endsWith(".zip") + // Partitions the given classpath into (jars, directories) + def separate(paths: Iterable[File]): (Iterable[File], Iterable[File]) = paths.partition(isArchive) + // Partitions the given classpath into (jars, directories) + def separatePaths(paths: Iterable[Path]) = separate(paths.map(_.asFile.getCanonicalFile)) + private[sbt] def buildSearchPaths(classpath: Iterable[Path]): (wrap.Set[File], wrap.Set[File]) = + { + val (jars, dirs) = separatePaths(classpath) + (linkedSet(jars ++ extraJars.toList), linkedSet(dirs ++ extraDirs.toList)) + } + private[sbt] def onClasspath(classpathJars: wrap.Set[File], classpathDirectories: wrap.Set[File], file: File): Boolean = + { + val f = file.getCanonicalFile + if(ClasspathUtilities.isArchive(f)) + classpathJars.contains(f) + else + classpathDirectories.toList.find(Path.relativize(_, f).isDefined).isDefined + } + + /** Returns all entries in 'classpath' that correspond to a compiler plugin.*/ + def compilerPlugins(classpath: Iterable[Path]): Iterable[File] = + { + val loader = new URLClassLoader(classpath.map(_.asURL).toList.toArray) + wrap.Wrappers.toList(loader.getResources("scalac-plugin.xml")).flatMap(asFile) + } + /** Converts the given URL to a File. If the URL is for an entry in a jar, the File for the jar is returned. */ + private[sbt] def asFile(url: URL) = + { + try + { + url.getProtocol match + { + case "file" => new File(url.toURI) :: Nil + case "jar" => + val path = url.getPath + val end = path.indexOf('!') + new File(new URI(if(end == -1) path else path.substring(0, end))) :: Nil + case _ => Nil + } + } + catch { case e: Exception => Nil } + } + + private lazy val (extraJars, extraDirs) = + { + import scala.tools.nsc.GenericRunnerCommand + val settings = (new GenericRunnerCommand(Nil, message => error(message))).settings + val bootPaths = FileUtilities.pathSplit(settings.bootclasspath.value).map(p => new File(p)).toList + val (bootJars, bootDirs) = separate(bootPaths) + val extJars = + { + val buffer = new ListBuffer[File] + def findJars(dir: File) + { + buffer ++= dir.listFiles(new SimpleFileFilter(isArchive)) + for(dir <- dir.listFiles(DirectoryFilter)) + findJars(dir) + } + for(path <- FileUtilities.pathSplit(settings.extdirs.value); val dir = new File(path) if dir.isDirectory) + findJars(dir) + buffer.readOnly.map(_.getCanonicalFile) + } + (linkedSet(extJars ++ bootJars), linkedSet(bootDirs)) + } + private def linkedSet[T](s: Iterable[T]): wrap.Set[T] = + { + val set = new wrap.MutableSetWrapper(new java.util.LinkedHashSet[T]) + set ++= s + set.readOnly + } +} + +private abstract class LoaderBase(urls: Array[URL], parent: ClassLoader) extends URLClassLoader(urls, parent) with NotNull +{ + require(parent != null) // included because a null parent is legitimate in Java + @throws(classOf[ClassNotFoundException]) + override final def loadClass(className: String, resolve: Boolean): Class[_] = + { + val loaded = findLoadedClass(className) + val found = + if(loaded == null) + doLoadClass(className) + else + loaded + + if(resolve) + resolveClass(found) + found + } + protected def doLoadClass(className: String): Class[_] + protected final def selfLoadClass(className: String): Class[_] = super.loadClass(className, false) +} +private class IntermediateLoader(urls: Array[URL], parent: ClassLoader) extends LoaderBase(urls, parent) with NotNull +{ + def doLoadClass(className: String): Class[_] = + { + // if this loader is asked to load an sbt class, it must be because the project we are building is sbt itself, + // so we want to load the version of classes on the project classpath, not the parent + if(className.startsWith(Loaders.SbtPackage)) + findClass(className) + else + selfLoadClass(className) + } +} +/** Delegates class loading to `parent` for all classes included by `filter`. An attempt to load classes excluded by `filter` +* results in a `ClassNotFoundException`.*/ +private class FilteredLoader(parent: ClassLoader, filter: ClassFilter) extends ClassLoader(parent) with NotNull +{ + require(parent != null) // included because a null parent is legitimate in Java + def this(parent: ClassLoader, excludePackages: Iterable[String]) = this(parent, new ExcludePackagesFilter(excludePackages)) + + @throws(classOf[ClassNotFoundException]) + override final def loadClass(className: String, resolve: Boolean): Class[_] = + { + if(filter.include(className)) + super.loadClass(className, resolve) + else + throw new ClassNotFoundException(className) + } +} +private class SelectiveLoader(urls: Array[URL], parent: ClassLoader, filter: ClassFilter) extends URLClassLoader(urls, parent) with NotNull +{ + require(parent != null) // included because a null parent is legitimate in Java + def this(urls: Array[URL], parent: ClassLoader, includePackages: Iterable[String]) = this(urls, parent, new IncludePackagesFilter(includePackages)) + + @throws(classOf[ClassNotFoundException]) + override final def loadClass(className: String, resolve: Boolean): Class[_] = + { + if(filter.include(className)) + super.loadClass(className, resolve) + else + { + val loaded = parent.loadClass(className) + if(resolve) + resolveClass(loaded) + loaded + } + } +} +private trait ClassFilter +{ + def include(className: String): Boolean +} +private abstract class PackageFilter(packages: Iterable[String]) extends ClassFilter +{ + require(packages.forall(_.endsWith("."))) + protected final def matches(className: String): Boolean = packages.exists(className.startsWith) +} +private class ExcludePackagesFilter(exclude: Iterable[String]) extends PackageFilter(exclude) +{ + def include(className: String): Boolean = !matches(className) +} +private class IncludePackagesFilter(include: Iterable[String]) extends PackageFilter(include) +{ + def include(className: String): Boolean = matches(className) +} + +private class LazyFrameworkLoader(runnerClassName: String, urls: Array[URL], parent: ClassLoader, grandparent: ClassLoader) + extends LoaderBase(urls, parent) with NotNull +{ + def doLoadClass(className: String): Class[_] = + { + if(Loaders.isNestedOrSelf(className, runnerClassName)) + findClass(className) + else if(Loaders.isSbtClass(className)) // we circumvent the parent loader because we know that we want the + grandparent.loadClass(className) // version of sbt that is currently the builder (not the project being built) + else + parent.loadClass(className) + } +} +private object Loaders +{ + val SbtPackage = "sbt." + def isNestedOrSelf(className: String, checkAgainst: String) = + className == checkAgainst || className.startsWith(checkAgainst + "$") + def isSbtClass(className: String) = className.startsWith(Loaders.SbtPackage) +} \ No newline at end of file diff --git a/src/main/scala/sbt/Compile.scala b/src/main/scala/sbt/Compile.scala new file mode 100644 index 000000000..3dd80ec86 --- /dev/null +++ b/src/main/scala/sbt/Compile.scala @@ -0,0 +1,279 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah + */ +package sbt + +object CompileOrder extends Enumeration +{ + val Mixed, JavaThenScala, ScalaThenJava = Value +} +sealed abstract class CompilerCore +{ + val ClasspathOptionString = "-classpath" + val OutputOptionString = "-d" + + // Returns false if there were errors, true if there were not. + protected def process(args: List[String], log: Logger): Boolean + // Returns false if there were errors, true if there were not. + protected def processJava(args: List[String], log: Logger): Boolean = true + def actionStartMessage(label: String): String + def actionNothingToDoMessage: String + def actionSuccessfulMessage: String + def actionUnsuccessfulMessage: String + + final def apply(label: String, sources: Iterable[Path], classpathString: String, outputDirectory: Path, options: Seq[String], log: Logger): Option[String] = + apply(label, sources, classpathString, outputDirectory, options, Nil, CompileOrder.Mixed, log) + final def apply(label: String, sources: Iterable[Path], classpathString: String, outputDirectory: Path, options: Seq[String], javaOptions: Seq[String], order: CompileOrder.Value, log: Logger): Option[String] = + { + log.info(actionStartMessage(label)) + val classpathOption: List[String] = + if(classpathString.isEmpty) + Nil + else + List(ClasspathOptionString, classpathString) + val outputDir = outputDirectory.asFile + FileUtilities.createDirectory(outputDir, log) orElse + { + val classpathAndOut: List[String] = OutputOptionString :: outputDir.getAbsolutePath :: classpathOption + + Control.trapUnit("Compiler error: ", log) + { + val sourceList = sources.map(_.asFile.getAbsolutePath).toList + if(sourceList.isEmpty) + { + log.info(actionNothingToDoMessage) + None + } + else + { + def filteredSources(extension: String) = sourceList.filter(_.endsWith(extension)) + def compile(label: String, sources: List[String], options: Seq[String])(process: (List[String], Logger) => Boolean) = + { + if(sources.isEmpty) + { + log.debug("No "+label+" sources to compile.") + true + } + else + { + val arguments = (options ++ classpathAndOut ++ sources).toList + log.debug(label + " arguments: " + arguments.mkString(" ")) + process(arguments, log) + } + } + def scalaCompile = () => + { + val scalaSourceList = if(order == CompileOrder.Mixed) sourceList else filteredSources(".scala") + compile("Scala", scalaSourceList, options)(process) + } + def javaCompile = () => + { + val javaSourceList = filteredSources(".java") + compile("Java", javaSourceList, javaOptions)(processJava) + } + + val (first, second) = if(order == CompileOrder.JavaThenScala) (javaCompile, scalaCompile) else (scalaCompile, javaCompile) + if(first() && second()) + { + log.info(actionSuccessfulMessage) + None + } + else + Some(actionUnsuccessfulMessage) + } + } + } + } +} + +sealed abstract class CompilerBase extends CompilerCore +{ + def actionStartMessage(label: String) = "Compiling " + label + " sources..." + val actionNothingToDoMessage = "Nothing to compile." + val actionSuccessfulMessage = "Compilation successful." + val actionUnsuccessfulMessage = "Compilation unsuccessful." +} +final class ForkCompile(config: ForkScalaCompiler) extends CompilerBase +{ + import java.io.File + protected def process(arguments: List[String], log: Logger) = + Fork.scalac(config.javaHome, config.compileJVMOptions, config.scalaJars, arguments, log) == 0 + override protected def processJava(args: List[String], log: Logger) = + Fork.javac(config.javaHome, args, log) == 0 +} +object ForkCompile +{ + def apply(config: ForkScalaCompiler, conditional: CompileConditional) = + { + import conditional.config.{compileOrder, classpath, javaOptions, label, log, options, outputDirectory, sources} + // recompile only if any sources were modified after any classes or no classes exist + val sourcePaths = sources.get + val newestSource = (0L /: sourcePaths)(_ max _.lastModified) + val products = (outputDirectory ** GlobFilter("*.class")).get + val oldestClass = (java.lang.Long.MAX_VALUE /: products)(_ min _.lastModified) + if(products.isEmpty || newestSource > oldestClass) + { + // full recompile, since we are not doing proper dependency tracking + FileUtilities.clean(outputDirectory :: Nil, log) + val compiler = new ForkCompile(config) + FileUtilities.createDirectory(outputDirectory.asFile, log) + compiler(label, sourcePaths, Path.makeString(classpath.get), outputDirectory, options, javaOptions, compileOrder, log) + } + else + { + log.info("Compilation up to date.") + None + } + } +} + +// The following code is based on scala.tools.nsc.Main and scala.tools.nsc.ScalaDoc +// Copyright 2005-2008 LAMP/EPFL +// Original author: Martin Odersky + +final class Compile(maximumErrors: Int) extends CompilerBase +{ + protected def process(arguments: List[String], log: Logger) = + { + import scala.tools.nsc.{CompilerCommand, FatalError, Global, Settings, reporters, util} + import util.FakePos + var reporter = new LoggerReporter(maximumErrors, log) + val settings = new Settings(reporter.error) + val command = new CompilerCommand(arguments, settings, error, false) + + object compiler extends Global(command.settings, reporter) + if(!reporter.hasErrors) + { + val run = new compiler.Run + run compile command.files + reporter.printSummary() + } + !reporter.hasErrors + } + override protected def processJava(args: List[String], log: Logger) = + (Process("javac", args) ! log) == 0 +} +final class Scaladoc(maximumErrors: Int) extends CompilerCore +{ + protected def process(arguments: List[String], log: Logger) = + { + import scala.tools.nsc.{doc, CompilerCommand, FatalError, Global, reporters, util} + import util.FakePos + val reporter = new LoggerReporter(maximumErrors, log) + val docSettings: doc.Settings = new doc.Settings(reporter.error) + val command = new CompilerCommand(arguments, docSettings, error, false) + object compiler extends Global(command.settings, reporter) + { + override val onlyPresentation = true + } + if(!reporter.hasErrors) + { + val run = new compiler.Run + run compile command.files + val generator = new doc.DefaultDocDriver + { + lazy val global: compiler.type = compiler + lazy val settings = docSettings + } + generator.process(run.units) + reporter.printSummary() + } + !reporter.hasErrors + } + def actionStartMessage(label: String) = "Generating API documentation for " + label + " sources..." + val actionNothingToDoMessage = "No sources specified." + val actionSuccessfulMessage = "API documentation generation successful." + def actionUnsuccessfulMessage = "API documentation generation unsuccessful." +} + +// The following code is based on scala.tools.nsc.reporters.{AbstractReporter, ConsoleReporter} +// Copyright 2002-2008 LAMP/EPFL +// Original author: Martin Odersky +final class LoggerReporter(maximumErrors: Int, log: Logger) extends scala.tools.nsc.reporters.Reporter +{ + import scala.tools.nsc.util.{FakePos,Position} + private val positions = new scala.collection.mutable.HashMap[Position, Severity] + + def error(msg: String) { error(FakePos("scalac"), msg) } + + def printSummary() + { + if(WARNING.count > 0) + log.warn(countElementsAsString(WARNING.count, "warning") + " found") + if(ERROR.count > 0) + log.error(countElementsAsString(ERROR.count, "error") + " found") + } + + def display(pos: Position, msg: String, severity: Severity) + { + severity.count += 1 + if(severity != ERROR || maximumErrors < 0 || severity.count <= maximumErrors) + print(severityToLevel(severity), pos, msg) + } + private def severityToLevel(severity: Severity): Level.Value = + severity match + { + case ERROR => Level.Error + case WARNING => Level.Warn + case INFO => Level.Info + } + + private def print(level: Level.Value, posIn: Position, msg: String) + { + if(posIn == null) + log.log(level, msg) + else + { + val pos = posIn.inUltimateSource(posIn.source.getOrElse(null)) + def message = + { + val sourcePrefix = + pos match + { + case FakePos(msg) => msg + " " + case _ => pos.source.map(_.file.path).getOrElse("") + } + val lineNumberString = pos.line.map(line => ":" + line + ":").getOrElse(":") + " " + sourcePrefix + lineNumberString + msg + } + log.log(level, message) + if (!pos.line.isEmpty) + { + log.log(level, pos.lineContent.stripLineEnd) // source line with error/warning + for(column <- pos.column if column > 0) // pointer to the column position of the error/warning + log.log(level, (" " * (column-1)) + '^') + } + } + } + override def reset = + { + super.reset + positions.clear + } + + protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean) + { + severity match + { + case WARNING | ERROR => + { + if(!testAndLog(pos, severity)) + display(pos, msg, severity) + } + case _ => display(pos, msg, severity) + } + } + + private def testAndLog(pos: Position, severity: Severity): Boolean = + { + if(pos == null || pos.offset.isEmpty) + false + else if(positions.get(pos).map(_ >= severity).getOrElse(false)) + true + else + { + positions(pos) = severity + false + } + } +} \ No newline at end of file diff --git a/src/main/scala/sbt/Conditional.scala b/src/main/scala/sbt/Conditional.scala new file mode 100644 index 000000000..fe757a469 --- /dev/null +++ b/src/main/scala/sbt/Conditional.scala @@ -0,0 +1,381 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah + */ +package sbt + +trait Conditional[Source, Product, External] extends NotNull +{ + type AnalysisType <: TaskAnalysis[Source, Product, External] + val analysis: AnalysisType = loadAnalysis + + protected def loadAnalysis: AnalysisType + protected def log: Logger + + protected def productType: String + protected def productTypePlural: String + + protected def sourcesToProcess: Iterable[Source] + + protected def sourceExists(source: Source): Boolean + protected def sourceLastModified(source: Source): Long + + protected def productExists(product: Product): Boolean + protected def productLastModified(product: Product): Long + + protected def externalInfo(externals: Iterable[External]): Iterable[(External, ExternalInfo)] + + protected def execute(cAnalysis: ConditionalAnalysis): Option[String] + + final case class ExternalInfo(available: Boolean, lastModified: Long) extends NotNull + trait ConditionalAnalysis extends NotNull + { + def dirtySources: Iterable[Source] + def cleanSources: Iterable[Source] + def directlyModifiedSourcesCount: Int + def invalidatedSourcesCount: Int + def removedSourcesCount: Int + } + + final def run = + { + val result = execute(analyze) + processingComplete(result.isEmpty) + result + } + private def analyze = + { + import scala.collection.mutable.HashSet + + val sourcesSnapshot = sourcesToProcess + val removedSources = new HashSet[Source] + removedSources ++= analysis.allSources + removedSources --= sourcesSnapshot + val removedCount = removedSources.size + for(removed <- removedSources) + analysis.removeDependent(removed) + + val unmodified = new HashSet[Source] + val modified = new HashSet[Source] + + for(source <- sourcesSnapshot) + { + if(isSourceModified(source)) + { + log.debug("Source " + source + " directly modified.") + modified += source + } + else + { + log.debug("Source " + source + " unmodified.") + unmodified += source + } + } + val directlyModifiedCount = modified.size + for((external, info) <- externalInfo(analysis.allExternals)) + { + val dependentSources = analysis.externalDependencies(external).getOrElse(Set.empty) + if(info.available) + { + val dependencyLastModified = info.lastModified + for(dependentSource <- dependentSources; dependentProducts <- analysis.products(dependentSource)) + { + dependentProducts.find(p => productLastModified(p) < dependencyLastModified) match + { + case Some(modifiedProduct) => + { + log.debug(productType + " " + modifiedProduct + " older than external dependency " + external) + unmodified -= dependentSource + modified += dependentSource + } + case None => () + } + } + } + else + { + log.debug("External dependency " + external + " not found.") + unmodified --= dependentSources + modified ++= dependentSources + analysis.removeExternalDependency(external) + } + } + + val handled = new scala.collection.mutable.HashSet[Source] + val transitive = !java.lang.Boolean.getBoolean("sbt.intransitive") + def markModified(changed: Iterable[Source]) { for(c <- changed if !handled.contains(c)) markSourceModified(c) } + def markSourceModified(src: Source) + { + unmodified -= src + modified += src + handled += src + if(transitive) + markDependenciesModified(src) + } + def markDependenciesModified(src: Source) { analysis.removeDependencies(src).map(markModified) } + + markModified(modified.toList) + if(transitive) + removedSources.foreach(markDependenciesModified) + + for(changed <- removedSources ++ modified) + analysis.removeSource(changed) + + new ConditionalAnalysis + { + def dirtySources = wrap.Wrappers.readOnly(modified) + def cleanSources = wrap.Wrappers.readOnly(unmodified) + def directlyModifiedSourcesCount = directlyModifiedCount + def invalidatedSourcesCount = dirtySources.size - directlyModifiedCount + def removedSourcesCount = removedCount + override def toString = + { + " Source analysis: " + directlyModifiedSourcesCount + " new/modified, " + + invalidatedSourcesCount + " indirectly invalidated, " + + removedSourcesCount + " removed." + } + } + } + + protected def checkLastModified = true + protected def noProductsImpliesModified = true + protected def isSourceModified(source: Source) = + { + analysis.products(source) match + { + case None => + { + log.debug("New file " + source) + true + } + case Some(sourceProducts) => + { + val sourceModificationTime = sourceLastModified(source) + def isOutofdate(p: Product) = + !productExists(p) || (checkLastModified && productLastModified(p) < sourceModificationTime) + + sourceProducts.find(isOutofdate) match + { + case Some(modifiedProduct) => + log.debug("Outdated " + productType + ": " + modifiedProduct + " for source " + source) + true + case None => + if(noProductsImpliesModified && sourceProducts.isEmpty) + { + // necessary for change detection that depends on last modified + log.debug("Source " + source + " has no products, marking it modified.") + true + } + else + false + } + } + } + } + protected def processingComplete(success: Boolean) + { + if(success) + { + analysis.save() + log.info(" Post-analysis: " + analysis.allProducts.toSeq.length + " " + productTypePlural + ".") + } + else + analysis.revert() + } +} + +abstract class AbstractCompileConfiguration extends NotNull +{ + def label: String + def sources: PathFinder + def outputDirectory: Path + def classpath: PathFinder + def analysisPath: Path + def projectPath: Path + def log: Logger + def options: Seq[String] + def javaOptions: Seq[String] + def maxErrors: Int + def compileOrder: CompileOrder.Value +} +abstract class CompileConfiguration extends AbstractCompileConfiguration +{ + def testDefinitionClassNames: Iterable[String] +} +import java.io.File +class CompileConditional(override val config: CompileConfiguration) extends AbstractCompileConditional(config) +{ + import config._ + type AnalysisType = CompileAnalysis + protected def constructAnalysis(analysisPath: Path, projectPath: Path, log: Logger) = + new CompileAnalysis(analysisPath, projectPath, log) + protected def analysisCallback = new CompileAnalysisCallback + protected class CompileAnalysisCallback extends BasicCompileAnalysisCallback(projectPath, testDefinitionClassNames, analysis) + { + def foundSubclass(sourcePath: Path, subclassName: String, superclassName: String, isModule: Boolean) + { + analysis.addTest(sourcePath, TestDefinition(isModule, subclassName, superclassName)) + } + } +} +abstract class AbstractCompileConditional(val config: AbstractCompileConfiguration) extends Conditional[Path, Path, File] +{ + import config._ + type AnalysisType <: BasicCompileAnalysis + protected def loadAnalysis = + { + val a = constructAnalysis(analysisPath, projectPath, log) + for(errorMessage <- a.load()) + error(errorMessage) + a + } + protected def constructAnalysis(analysisPath: Path, projectPath: Path, log: Logger): AnalysisType + + protected def log = config.log + + protected def productType = "class" + protected def productTypePlural = "classes" + protected def sourcesToProcess = sources.get + + protected def sourceExists(source: Path) = source.asFile.exists + protected def sourceLastModified(source: Path) = source.asFile.lastModified + + protected def productExists(product: Path) = product.asFile.exists + protected def productLastModified(product: Path) = product.asFile.lastModified + + protected def externalInfo(externals: Iterable[File]) = + { + val (classpathJars, classpathDirs) = ClasspathUtilities.buildSearchPaths(classpath.get) + for(external <- externals) yield + { + val available = external.exists && ClasspathUtilities.onClasspath(classpathJars, classpathDirs, external) + if(!available) + log.debug("External " + external + (if(external.exists) " not on classpath." else " does not exist.")) + (external, ExternalInfo(available, external.lastModified)) + } + } + + import ChangeDetection.{LastModifiedOnly, HashOnly, HashAndLastModified, HashAndProductsExist} + protected def changeDetectionMethod: ChangeDetection.Value = HashAndProductsExist + override protected def checkLastModified = changeDetectionMethod != HashAndProductsExist + override protected def noProductsImpliesModified = changeDetectionMethod == LastModifiedOnly + override protected def isSourceModified(source: Path) = + changeDetectionMethod match + { + case HashAndLastModified | HashAndProductsExist => + // behavior will differ because of checkLastModified + // hash modified must come first so that the latest hash is calculated for every source + hashModified(source) || super.isSourceModified(source) + case HashOnly => hashModified(source) + case LastModifiedOnly => super.isSourceModified(source) + } + + import scala.collection.mutable.{Buffer, ListBuffer} + private val newHashes: Buffer[(Path, Option[Array[Byte]])] = new ListBuffer + private def warnHashError(source: Path, message: String) + { + log.warn("Error computing hash for source " + source + ": " + message) + newHashes += ((source, None)) + } + protected def hashModified(source: Path) = + { + source.isDirectory || + (analysis.hash(source) match + { + case None => + log.debug("Source " + source + " had no hash, marking modified.") + Hash(source, log).fold(err => warnHashError(source, err), newHash => newHashes += ((source, Some(newHash)))) + true + case Some(oldHash) => + { + Hash(source, log) match + { + case Left(err) => + warnHashError(source, err) + log.debug("Assuming source is modified because of error.") + true + case Right(newHash) => + newHashes += ((source, Some(newHash))) + val different = !(oldHash deepEquals newHash) + if(different) + log.debug("Hash for source " + source + " changed (was " + Hash.toHex(oldHash) + + ", is now " + Hash.toHex(newHash) + "), marking modified.") + different + } + } + }) + } + protected def execute(executeAnalysis: ConditionalAnalysis) = + { + log.info(executeAnalysis.toString) + finishHashes() + import executeAnalysis.dirtySources + + // the output directory won't show up in the classpath unless it exists, so do this before classpath.get + val outputDir = outputDirectory.asFile + FileUtilities.createDirectory(outputDir, log) + + val cp = classpath.get + if(!dirtySources.isEmpty) + checkClasspath(cp) + val classpathString = Path.makeString(cp) + val id = AnalysisCallback.register(analysisCallback) + val allOptions = (("-Xplugin:" + FileUtilities.sbtJar.getAbsolutePath) :: + ("-P:sbt-analyzer:callback:" + id.toString) :: Nil) ++ options + val r = (new Compile(config.maxErrors))(label, dirtySources, classpathString, outputDirectory, allOptions, javaOptions, compileOrder, log) + AnalysisCallback.unregister(id) + if(log.atLevel(Level.Debug)) + { + /** This checks that the plugin accounted for all classes in the output directory.*/ + val classes = scala.collection.mutable.HashSet(analysis.allProducts.toSeq: _*) + var missed = 0 + for(c <- (outputDirectory ** GlobFilter("*.class")).get) + { + if(!classes.contains(c)) + { + missed += 1 + log.debug("Missed class: " + c) + } + } + log.debug("Total missed classes: " + missed) + } + r + } + private def finishHashes() + { + if(changeDetectionMethod == LastModifiedOnly) + analysis.clearHashes() + else + { + for((path, hash) <- newHashes) + { + hash match + { + case None => analysis.clearHash(path) + case Some(hash) => analysis.setHash(path, hash) + } + } + } + newHashes.clear() + } + private def checkClasspath(cp: Iterable[Path]) + { + import scala.collection.mutable.{HashMap, HashSet, Set} + val collisions = new HashMap[String, Set[Path]] + for(jar <- cp if ClasspathUtilities.isArchive(jar)) + collisions.getOrElseUpdate(jar.asFile.getName, new HashSet[Path]) += jar + for((name, jars) <- collisions) + { + if(jars.size > 1) + { + log.warn("Possible duplicate classpath locations for jar " + name + ": ") + for(jar <- jars) log.warn("\t" + jar.absolutePath) + } + } + } + + protected def analysisCallback: AnalysisCallback +} +object ChangeDetection extends Enumeration +{ + val LastModifiedOnly, HashOnly, HashAndLastModified, HashAndProductsExist = Value +} \ No newline at end of file diff --git a/src/main/scala/sbt/Control.scala b/src/main/scala/sbt/Control.scala new file mode 100644 index 000000000..8117fe29a --- /dev/null +++ b/src/main/scala/sbt/Control.scala @@ -0,0 +1,73 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah + */ +package sbt + +/** The trap methods execute the provided code in a try block and handle a thrown exception.*/ +object Control +{ + def trap[T](errorMessagePrefix: => String, log: Logger)(execute: => Either[String, T]): Either[String, T] = + try { execute } + catch { case e => log.trace(e); Left(errorMessagePrefix + e.toString) } + + def trapAndFinally[T](errorMessagePrefix: => String, log: Logger)(execute: => Either[String, T])(doFinally: => Unit): Either[String, T] = + try { execute } + catch { case e => log.trace(e); Left(errorMessagePrefix + e.toString) } + finally { trapAndLog(log)(doFinally) } + + def trapUnit(errorMessagePrefix: => String, log: Logger)(execute: => Option[String]): Option[String] = + try { execute } + catch { case e => log.trace(e); Some(errorMessagePrefix + e.toString) } + + def trapUnitAndFinally(errorMessagePrefix: => String, log: Logger)(execute: => Option[String])(doFinally: => Unit): Option[String] = + try { execute } + catch { case e => log.trace(e); Some(errorMessagePrefix + e.toString) } + finally { trapAndLog(log)(doFinally) } + + def trap(execute: => Unit) + { + try { execute } + catch { case e: Exception => () } + } + def trapAndLog(log: Logger)(execute: => Unit) + { + try { execute } + catch { case e => log.trace(e); log.error(e.toString) } + } + def convertException[T](t: => T): Either[Exception, T] = + { + try { Right(t) } + catch { case e: Exception => Left(e) } + } + def convertErrorMessage[T](log: Logger)(t: => T): Either[String, T] = + { + try { Right(t) } + catch { case e: Exception => log.trace(e); Left(e.toString) } + } + + def getOrError[T](result: Either[String, T]): T = result.fold(error, x=>x) + final def lazyFold[T](list: List[T])(f: T => Option[String]): Option[String] = + list match + { + case Nil => None + case head :: tail => + f(head) match + { + case None => lazyFold(tail)(f) + case x => x + } + } + final def lazyFold[T, S](list: List[T], value: S)(f: (S,T) => Either[String, S]): Either[String, S] = + list match + { + case Nil => Right(value) + case head :: tail => + f(value, head) match + { + case Right(newValue) => lazyFold(tail, newValue)(f) + case x => x + } + } + def thread[T](e: Either[String, T])(f: T => Option[String]): Option[String] = + e.right.flatMap( t => f(t).toLeft(()) ).left.toOption +} \ No newline at end of file diff --git a/src/main/scala/sbt/Dag.scala b/src/main/scala/sbt/Dag.scala new file mode 100644 index 000000000..8a18491e6 --- /dev/null +++ b/src/main/scala/sbt/Dag.scala @@ -0,0 +1,30 @@ +/* sbt -- Simple Build Tool + * Copyright 2008 David MacIver + */ +package sbt; + +import scala.collection.mutable; + +trait Dag[Node <: Dag[Node]]{ + self : Node => + + def dependencies : Iterable[Node] + + def topologicalSort = { + val discovered = new mutable.HashSet[Node]; + val finished = new wrap.MutableSetWrapper(new java.util.LinkedHashSet[Node]) + + def visit(dag : Node){ + if (!discovered(dag)) { + discovered(dag) = true; + dag.dependencies.foreach(visit); + finished += dag; + } + } + + visit(self); + + finished.toList; + } +} + diff --git a/src/main/scala/sbt/DefaultProject.scala b/src/main/scala/sbt/DefaultProject.scala new file mode 100644 index 000000000..554f98472 --- /dev/null +++ b/src/main/scala/sbt/DefaultProject.scala @@ -0,0 +1,458 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah, David MacIver + */ +package sbt + +/** The default project when no project is explicitly configured and the common base class for +* configuring a project.*/ +class DefaultProject(val info: ProjectInfo) extends BasicScalaProject with MavenStyleScalaPaths +class DefaultWebProject(val info: ProjectInfo) extends BasicWebScalaProject with MavenStyleWebScalaPaths + + +import BasicScalaProject._ +import ScalaProject.{optionsAsString, javaOptionsAsString} +import java.io.File +import java.util.jar.Attributes + +/** This class defines concrete instances of actions from ScalaProject using overridable paths, +* options, and configuration. */ +abstract class BasicScalaProject extends ScalaProject with BasicDependencyProject with ScalaPaths +{ + /** The explicitly specified class to be run by the 'run' action. + * See http://code.google.com/p/simple-build-tool/wiki/RunningProjectCode for details.*/ + def mainClass: Option[String] = None + /** Gets the main class to use. This is used by package and run to determine which main + * class to run or include as the Main-Class attribute. + * If `mainClass` is explicitly specified, it is used. Otherwise, the main class is selected from + * the classes with a main method as automatically detected by the analyzer plugin. + * `promptIfMultipleChoices` controls the behavior when multiple main classes are detected. + * If true, it prompts the user to select which main class to use. If false, it prints a warning + * and returns no main class.*/ + def getMainClass(promptIfMultipleChoices: Boolean) = + mainClass orElse + { + val applications = mainCompileConditional.analysis.allApplications.toList + impl.SelectMainClass(promptIfMultipleChoices, applications) orElse + { + if(!promptIfMultipleChoices && !applications.isEmpty) + warnMultipleMainClasses(log) + None + } + } + /** Specifies the value of the `Class-Path` attribute in the manifest of the main jar. */ + def manifestClassPath: Option[String] = None + def dependencies = info.dependencies ++ subProjects.values.toList + + val mainCompileConditional = new CompileConditional(mainCompileConfiguration) + val testCompileConditional = new CompileConditional(testCompileConfiguration) + + def compileOrder = CompileOrder.Mixed + + /** The main artifact produced by this project. To redefine the main artifact, override `defaultMainArtifact` + * Additional artifacts are defined by `val`s of type `Artifact`.*/ + lazy val mainArtifact = defaultMainArtifact + /** Defines the default main Artifact assigned to `mainArtifact`. By default, this is a jar file with name given + * by `artifactID`.*/ + protected def defaultMainArtifact = Artifact(artifactID, "jar", "jar") + + import Project._ + + /** The options provided to the 'compile' action to pass to the Scala compiler.*/ + def compileOptions: Seq[CompileOption] = Deprecation :: Nil + /** The options provided to the 'compile' action to pass to the Java compiler. */ + def javaCompileOptions: Seq[JavaCompileOption] = Nil + /** The options provided to the 'test-compile' action, defaulting to those for the 'compile' action.*/ + def testCompileOptions: Seq[CompileOption] = compileOptions + /** The options provided to the 'test-compile' action to pass to the Java compiler. */ + def testJavaCompileOptions: Seq[JavaCompileOption] = javaCompileOptions + + /** The options provided to the 'doc' and 'docTest' actions.*/ + def documentOptions: Seq[ScaladocOption] = + LinkSource :: + documentTitle(name + " " + version + " API") :: + windowTitle(name + " " + version + " API") :: + Nil + /** The options provided to the 'test' action..*/ + def testOptions: Seq[TestOption] = + TestListeners(testListeners) :: + TestFilter(includeTest) :: + Nil + /** The options provided to the clean action. You can add files to be removed and files to be preserved here.*/ + def cleanOptions: Seq[CleanOption] = + ClearAnalysis(mainCompileConditional.analysis) :: + ClearAnalysis(testCompileConditional.analysis) :: + historyPath.map(history => Preserve(history)).toList + + def packageOptions: Seq[PackageOption] = + manifestClassPath.map(cp => ManifestAttributes( (Attributes.Name.CLASS_PATH, cp) )).toList ::: + getMainClass(false).map(MainClass(_)).toList + + private def succeededTestPath = testAnalysisPath / "succeeded-tests" + private def quickOptions(failedOnly: Boolean) = + { + val path = succeededTestPath + val analysis = testCompileConditional.analysis + TestFilter(new impl.TestQuickFilter(analysis, failedOnly, path, log)) :: TestListeners(new impl.TestStatusReporter(path, log) :: Nil) :: Nil + } + + protected def includeTest(test: String): Boolean = true + + /** This is called to create the initial directories when a user makes a new project from + * sbt.*/ + override final def initializeDirectories() + { + FileUtilities.createDirectories(directoriesToCreate.map(_.asFile), log) match + { + case Some(errorMessage) => log.error("Could not initialize directory structure: " + errorMessage) + case None => log.success("Successfully initialized directory structure.") + } + } + import Configurations._ + /** The managed configuration to use when determining the classpath for a Scala interpreter session.*/ + def consoleConfiguration = Test + + /** A PathFinder that provides the classpath to pass to scaladoc. It is the same as the compile classpath + * by default. */ + def docClasspath = compileClasspath + /** A PathFinder that provides the classpath to pass to the compiler.*/ + def compileClasspath = fullClasspath(Compile) +++ optionalClasspath + /** A PathFinder that provides the classpath to use when unit testing.*/ + def testClasspath = fullClasspath(Test) +++ optionalClasspath + /** A PathFinder that provides the classpath to use when running the class specified by 'getMainClass'.*/ + def runClasspath = fullClasspath(Runtime) +++ optionalClasspath + /** A PathFinder that provides the classpath to use for a Scala interpreter session.*/ + def consoleClasspath = fullClasspath(consoleConfiguration) +++ optionalClasspath + /** A PathFinder that corresponds to Maven's optional scope. It includes any managed libraries in the + * 'optional' configuration for this project only.*/ + def optionalClasspath = managedClasspath(Optional) + /** A PathFinder that contains the jars that should be included in a comprehensive package. This is + * by default the 'runtime' classpath excluding the 'provided' classpath.*/ + def publicClasspath = runClasspath --- fullClasspath(Provided) + + /** This returns the unmanaged classpath for only this project for the given configuration. It by + * default includes the main compiled classes for this project and the libraries in this project's + * unmanaged library directory (lib) and the managed directory for the specified configuration. It + * also adds the resource directories appropriate to the configuration.*/ + def fullUnmanagedClasspath(config: Configuration) = + { + config match + { + case CompilerPlugin => unmanagedClasspath + case Runtime => runUnmanagedClasspath + case Test => testUnmanagedClasspath + case _ => mainUnmanagedClasspath + } + } + /** The unmanaged base classpath. By default, the unmanaged classpaths for test and run include this classpath. */ + protected def mainUnmanagedClasspath = mainCompilePath +++ mainResourceClasspath +++ unmanagedClasspath + /** The unmanaged classpath for the run configuration. By default, it includes the base classpath returned by + * `mainUnmanagedClasspath`.*/ + protected def runUnmanagedClasspath = mainUnmanagedClasspath +++ mainDependencies.scalaCompiler + /** The unmanaged classpath for the test configuration. By default, it includes the run classpath, which includes the base + * classpath returned by `mainUnmanagedClasspath`.*/ + protected def testUnmanagedClasspath = testCompilePath +++ testResourceClasspath +++ testDependencies.scalaCompiler +++ runUnmanagedClasspath + + /** @deprecated Use `mainDependencies.scalaJars`*/ + @deprecated protected final def scalaJars: Iterable[File] = mainDependencies.scalaJars.get.map(_.asFile) + /** An analysis of the jar dependencies of the main Scala sources. It is only valid after main source compilation. + * See the LibraryDependencies class for details. */ + final def mainDependencies = new LibraryDependencies(this, mainCompileConditional) + /** An analysis of the jar dependencies of the test Scala sources. It is only valid after test source compilation. + * See the LibraryDependencies class for details. */ + final def testDependencies = new LibraryDependencies(this, testCompileConditional) + + /** The list of test frameworks to use for testing. Note that adding frameworks to this list + * for an active project currently requires an explicit 'clean' to properly update the set of tests to + * run*/ + def testFrameworks: Iterable[TestFramework] = ScalaCheckFramework :: SpecsFramework :: ScalaTestFramework :: Nil + /** The list of listeners for testing. */ + def testListeners: Seq[TestReportListener] = new LogTestReportListener(log) :: Nil + + def mainLabel = "main" + def testLabel = "test" + + def mainCompileConfiguration = new MainCompileConfig + def testCompileConfiguration = new TestCompileConfig + abstract class BaseCompileConfig extends CompileConfiguration + { + def log = BasicScalaProject.this.log + def projectPath = info.projectPath + def baseCompileOptions: Seq[CompileOption] + lazy val localBaseOptions = baseCompileOptions + def options = optionsAsString(localBaseOptions.filter(!_.isInstanceOf[MaxCompileErrors])) + def maxErrors = maximumErrors(localBaseOptions) + def compileOrder = BasicScalaProject.this.compileOrder + } + class MainCompileConfig extends BaseCompileConfig + { + def baseCompileOptions = compileOptions + def label = mainLabel + def sources = mainSources + def outputDirectory = mainCompilePath + def classpath = compileClasspath + def analysisPath = mainAnalysisPath + def testDefinitionClassNames = Nil + def javaOptions = javaOptionsAsString(javaCompileOptions) + } + class TestCompileConfig extends BaseCompileConfig + { + def baseCompileOptions = testCompileOptions + def label = testLabel + def sources = testSources + def outputDirectory = testCompilePath + def classpath = testClasspath + def analysisPath = testAnalysisPath + def testDefinitionClassNames = testFrameworks.map(_.testSuperClassName) + def javaOptions = javaOptionsAsString(testJavaCompileOptions) + } + + /** Configures forking the compiler and runner. Use ForkScalaCompiler, ForkScalaRun or mix together.*/ + def fork: Option[ForkScala] = None + private def doCompile(conditional: CompileConditional) = + { + fork match + { + case Some(fc: ForkScalaCompiler) => ForkCompile(fc, conditional) + case _ => conditional.run + } + } + private def getRunner = + { + fork match + { + case Some(fr: ForkScalaRun) => new ForkRun(fr) + case _ => Run + } + } + + protected def compileAction = task { doCompile(mainCompileConditional) } describedAs MainCompileDescription + protected def testCompileAction = task { doCompile(testCompileConditional) } dependsOn compile describedAs TestCompileDescription + protected def cleanAction = cleanTask(outputPath, cleanOptions) describedAs CleanDescription + protected def runAction = task { args => runTask(getMainClass(true), runClasspath, args, getRunner) dependsOn(compile) } describedAs RunDescription + protected def consoleQuickAction = consoleTask(consoleClasspath, getRunner) describedAs ConsoleQuickDescription + protected def consoleAction = consoleTask(consoleClasspath, getRunner).dependsOn(testCompile) describedAs ConsoleDescription + protected def docAction = scaladocTask(mainLabel, mainSources, mainDocPath, docClasspath, documentOptions).dependsOn(compile) describedAs DocDescription + protected def docTestAction = scaladocTask(testLabel, testSources, testDocPath, docClasspath, documentOptions).dependsOn(testCompile) describedAs TestDocDescription + protected def testAction = defaultTestTask(testOptions) + protected def testOnlyAction = testQuickMethod(testCompileConditional.analysis, testOptions)(options => + defaultTestTask(options)) describedAs(TestOnlyDescription) + protected def testQuickAction = defaultTestQuickMethod(false) describedAs(TestQuickDescription) + protected def testFailedAction = defaultTestQuickMethod(true) describedAs(TestFailedDescription) + protected def defaultTestQuickMethod(failedOnly: Boolean) = + testQuickMethod(testCompileConditional.analysis, testOptions)(options => defaultTestTask(quickOptions(failedOnly) ::: options.toList)) + protected def defaultTestTask(testOptions: => Seq[TestOption]) = + testTask(testFrameworks, testClasspath, testCompileConditional.analysis, testOptions).dependsOn(testCompile) describedAs TestDescription + + override protected def makePomAction = super.makePomAction dependsOn(`package`) + override protected def deliverLocalAction = super.deliverLocalAction dependsOn(`package`) + override protected def deliverAction = super.deliverAction dependsOn(`package`) + + protected def packageAction = packageTask(packagePaths, jarPath, packageOptions).dependsOn(compile) describedAs PackageDescription + protected def packageTestAction = packageTask(packageTestPaths, packageTestJar).dependsOn(testCompile) describedAs TestPackageDescription + protected def packageDocsAction = packageTask(mainDocPath ##, packageDocsJar, Recursive).dependsOn(doc) describedAs DocPackageDescription + protected def packageSrcAction = packageTask(packageSourcePaths, packageSrcJar) describedAs SourcePackageDescription + protected def packageTestSrcAction = packageTask(packageTestSourcePaths, packageTestSrcJar) describedAs TestSourcePackageDescription + protected def packageProjectAction = zipTask(packageProjectPaths, packageProjectZip) describedAs ProjectPackageDescription + + protected def docAllAction = (doc && docTest) describedAs DocAllDescription + protected def packageAllAction = task { None } dependsOn(`package`, packageTest, packageSrc, packageTestSrc, packageDocs) describedAs PackageAllDescription + protected def graphAction = graphTask(graphPath, mainCompileConditional.analysis).dependsOn(compile) + protected def incrementVersionAction = task { incrementVersionNumber(); None } describedAs IncrementVersionDescription + protected def releaseAction = (test && packageAll && incrementVersion) describedAs ReleaseDescription + + lazy val compile = compileAction + lazy val testCompile = testCompileAction + lazy val clean = cleanAction + lazy val run = runAction + lazy val consoleQuick = consoleQuickAction + lazy val console = consoleAction + lazy val doc = docAction + lazy val docTest = docTestAction + lazy val test = testAction + lazy val `package` = packageAction + lazy val packageTest = packageTestAction + lazy val packageDocs = packageDocsAction + lazy val packageSrc = packageSrcAction + lazy val packageTestSrc = packageTestSrcAction + lazy val packageProject = packageProjectAction + lazy val docAll = docAllAction + lazy val packageAll = packageAllAction + lazy val graph = graphAction + lazy val incrementVersion = incrementVersionAction + lazy val release = releaseAction + + lazy val testQuick = testQuickAction + lazy val testFailed = testFailedAction + lazy val testOnly = testOnlyAction + + def jarsOfProjectDependencies = Path.lazyPathFinder { + topologicalSort.dropRight(1) flatMap { p => + p match + { + case bpp: BasicScalaPaths => List(bpp.jarPath) + case _ => Nil + } + } + } + override def deliverScalaDependencies: Iterable[ModuleID] = + { + val snapshot = mainDependencies.snapshot + mapScalaModule(snapshot.scalaLibrary, ManageDependencies.ScalaLibraryID) ++ + mapScalaModule(snapshot.scalaCompiler, ManageDependencies.ScalaCompilerID) + } + override def watchPaths = mainSources +++ testSources +++ mainResources +++ testResources +} +abstract class BasicWebScalaProject extends BasicScalaProject with WebScalaProject with WebScalaPaths +{ + import BasicWebScalaProject._ + override def watchPaths = super.watchPaths +++ webappResources + + lazy val prepareWebapp = prepareWebappAction + protected def prepareWebappAction = + prepareWebappTask(webappResources, temporaryWarPath, webappClasspath, mainDependencies.scalaJars) dependsOn(compile) + + def webappClasspath = publicClasspath + def jettyRunClasspath = testClasspath + def jettyWebappPath = temporaryWarPath + lazy val jettyRun = jettyRunAction + protected def jettyRunAction = + jettyRunTask(jettyWebappPath, jettyContextPath, jettyPort, jettyRunClasspath, "test", scanDirectories.map(_.asFile), scanInterval) dependsOn(prepareWebapp) describedAs(JettyRunDescription) + + /** The directories that should be watched to determine if the web application needs to be reloaded..*/ + def scanDirectories: Seq[Path] = jettyWebappPath :: Nil + /** The time in seconds between scans that check whether the web application should be reloaded.*/ + def scanInterval: Int = 3 + /** The port that Jetty runs on. */ + def jettyPort: Int = JettyRun.DefaultPort + + lazy val jettyRestart = jettyStop && jettyRun + lazy val jettyStop = jettyStopAction + protected def jettyStopAction = jettyStopTask describedAs(JettyStopDescription) + + /** The clean action for a web project is modified so that it first stops jetty if it is running, + * since the webapp directory will be removed by the clean.*/ + override def cleanAction = super.cleanAction dependsOn jettyStop + + /** Redefine the `package` action to make a war file.*/ + override protected def packageAction = packageTask(descendents(temporaryWarPath ##, "*"), warPath, Nil) dependsOn(prepareWebapp) describedAs PackageWarDescription + + /** Redefine the default main artifact to be a war file.*/ + override protected def defaultMainArtifact = Artifact(artifactID, "war", "war") +} + +object BasicScalaProject +{ + val CleanDescription = + "Deletes all generated files (the target directory)." + val MainCompileDescription = + "Compiles main sources." + val TestCompileDescription = + "Compiles test sources." + val TestDescription = + "Runs all tests detected during compilation." + val TestOnlyDescription = + "Runs the tests provided as arguments." + val TestFailedDescription = + "Runs the tests provided as arguments if they have not succeeded." + val TestQuickDescription = + "Runs the tests provided as arguments if they have not succeeded or their dependencies changed." + val DocDescription = + "Generates API documentation for main Scala source files using scaladoc." + val TestDocDescription = + "Generates API documentation for test Scala source files using scaladoc." + val RunDescription = + "Runs the main class for the project with the provided arguments." + val ConsoleDescription = + "Starts the Scala interpreter with the project classes on the classpath." + val ConsoleQuickDescription = + "Starts the Scala interpreter with the project classes on the classpath without running compile first." + val PackageDescription = + "Creates a jar file containing main classes and resources." + val TestPackageDescription = + "Creates a jar file containing test classes and resources." + val DocPackageDescription = + "Creates a jar file containing generated API documentation." + val SourcePackageDescription = + "Creates a jar file containing all main source files and resources." + val TestSourcePackageDescription = + "Creates a jar file containing all test source files and resources." + val ProjectPackageDescription = + "Creates a zip file containing the entire project, excluding generated files." + val PackageAllDescription = + "Executes all package tasks except package-project." + val DocAllDescription = + "Generates both main and test documentation." + val IncrementVersionDescription = + "Increments the micro part of the version (the third number) by one. (This is only valid for versions of the form #.#.#-*)" + val ReleaseDescription = + "Compiles, tests, generates documentation, packages, and increments the version." + + private def warnMultipleMainClasses(log: Logger) = + { + log.warn("No Main-Class attribute will be added automatically added:") + log.warn("Multiple classes with a main method were detected. Specify main class explicitly with:") + log.warn(" override mainClass = Some(\"className\")") + } + private def mapScalaModule(in: Iterable[_], id: String) = + { + ScalaVersion.current.toList.flatMap { scalaVersion => + in.map(jar => ModuleID(ManageDependencies.ScalaOrganization, id, scalaVersion)) + } + } +} +object BasicWebScalaProject +{ + val PackageWarDescription = + "Creates a war file." + val JettyStopDescription = + "Stops the Jetty server that was started with the jetty-run action." + val JettyRunDescription = + "Starts the Jetty server and serves this project as a web application." +} +/** Analyzes the dependencies of a project after compilation. All methods except `snapshot` return a +* `PathFinder`. The underlying calculations are repeated for each call to PathFinder.get. */ +final class LibraryDependencies(project: Project, conditional: CompileConditional) extends NotNull +{ + /** Library jars located in unmanaged or managed dependency paths.*/ + def libraries: PathFinder = pathFinder(snapshot.libraries) + /** Library jars located outside of the project.*/ + def external: PathFinder = pathFinder(snapshot.external) + /** The Scala library jar.*/ + def scalaLibrary: PathFinder = pathFinder(snapshot.scalaLibrary) + /** The Scala compiler jar.*/ + def scalaCompiler: PathFinder = pathFinder(snapshot.scalaCompiler) + /** All jar dependencies.*/ + def all: PathFinder = pathFinder(snapshot.all) + /** The Scala library and compiler jars.*/ + def scalaJars: PathFinder = pathFinder(snapshot.scalaJars) + + /** Returns an object that has all analyzed dependency information frozen at the time of this method call. */ + def snapshot = new Dependencies + + private def rootProjectDirectory = project.rootProject.info.projectPath + + final class Dependencies + { + import LibraryDependencies._ + val all = conditional.analysis.allExternals.filter(ClasspathUtilities.isArchive).map(_.getAbsoluteFile) + private[this] val (internal, externalAll) = all.toList.partition(jar => Path.relativize(rootProjectDirectory, jar).isDefined) + private[this] val (bootScalaJars, librariesNoScala) = internal.partition(isScalaJar) + private[this] val (externalScalaJars, externalNoScala) = externalAll.partition(isScalaJar) + val scalaJars = externalScalaJars ::: bootScalaJars + val (scalaLibrary, scalaCompiler) = scalaJars.partition(isScalaLibraryJar) + def external = externalNoScala + def libraries = librariesNoScala + } + + private def pathFinder(it: => Iterable[File]) = Path.lazyPathFinder(it.map(Path.fromFile)) +} +private object LibraryDependencies +{ + private def ScalaLibraryPrefix = ManageDependencies.ScalaLibraryID + private def ScalaCompilerPrefix = ManageDependencies.ScalaCompilerID + private def ScalaJarPrefixes = List(ScalaCompilerPrefix, ScalaLibraryPrefix) + private def isScalaJar(file: File) = ClasspathUtilities.isArchive(file) && ScalaJarPrefixes.exists(isNamed(file)) + private def isScalaLibraryJar(file: File) = isNamed(file)(ScalaLibraryPrefix) + private def isNamed(file: File)(name: String) = file.getName.startsWith(name) + +} \ No newline at end of file diff --git a/src/main/scala/sbt/DotGraph.scala b/src/main/scala/sbt/DotGraph.scala new file mode 100644 index 000000000..18168ced9 --- /dev/null +++ b/src/main/scala/sbt/DotGraph.scala @@ -0,0 +1,47 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah + */ +package sbt + +import java.io.{File, Writer} + +object DotGraph +{ + def apply(analysis: BasicCompileAnalysis, outputDirectory: Path, log: Logger) = + { + val outputDir = outputDirectory.asFile + + def generateGraph[Key, Value](fileName: String, graphName: String, graph: Iterable[(Key, scala.collection.Set[Value])], + keyToString: Key => String, valueToString: Value => String) = + { + FileUtilities.write(new File(outputDir, fileName), log) + { + (writer: Writer) => + { + def writeLine(line: String) = FileUtilities.writeLine(writer, line) + writeLine("digraph " + graphName + " {") + for( (dependsOn, dependants) <- graph; dependant <- dependants) + writeLine(valueToString(dependant) + " -> " + keyToString(dependsOn)) + writeLine("}") + None + } + } + } + FileUtilities.createDirectory(outputDir, log) orElse + generateGraph(BasicAnalysis.DependenciesFileName, "dependencies", analysis.allDependencies, + sourceToString, sourceToString) orElse + generateGraph(BasicAnalysis.ExternalDependenciesFileName, "externalDependencies", analysis.allExternalDependencies, + fileToString, sourceToString) + } + private def sourceToString(source: Path) = fileToString(source.asFile) + private def fileToString(file: File) = + { + val rawName = file.getName + val name = + if(rawName.endsWith(".scala")) + rawName.substring(0, rawName.length - ".scala".length) + else + rawName + "\"" + name + "\"" + } +} \ No newline at end of file diff --git a/src/main/scala/sbt/Environment.scala b/src/main/scala/sbt/Environment.scala new file mode 100644 index 000000000..3f35f0a55 --- /dev/null +++ b/src/main/scala/sbt/Environment.scala @@ -0,0 +1,345 @@ +/* sbt -- Simple Build Tool + * Copyright 2008 Mark Harrah, David MacIver + */ +package sbt + +import impl.PropertiesUtilities +import scala.reflect.Manifest + +trait Environment +{ + abstract class Property[T] extends NotNull + { + /** Explicitly sets the value of this property to 'v'.*/ + def update(v: T): Unit + /** Returns the current value of this property or throws an exception if the value could not be obtained.*/ + def value: T = resolve.value + /** Returns the current value of this property in an 'Option'. 'None' is used to indicate that the + * value could not obtained.*/ + def get: Option[T] = resolve.toOption + /** Returns full information about this property's current value. */ + def resolve: PropertyResolution[T] + + def foreach(f: T => Unit): Unit = resolve.foreach(f) + } + + /** Creates a system property with the given name and no default value.*/ + def system[T](propName: String)(implicit format: Format[T]): Property[T] + /** Creates a system property with the given name and the given default value to use if no value is explicitly specified.*/ + def systemOptional[T](propName: String, defaultValue: => T)(implicit format: Format[T]): Property[T] + /** Creates a user-defined property that has no default value. The property will try to inherit its value + * from a parent environment (if one exists) if its value is not explicitly specified. An explicitly specified + * value will persist between builds if the object returned by this method is assigned to a 'val' in this + * 'Environment'.*/ + def property[T](implicit manifest: Manifest[T], format: Format[T]): Property[T] + /** Creates a user-defined property that has no default value. The property will try to inherit its value + * from a parent environment (if one exists) if its value is not explicitly specified. An explicitly specified + * value will persist between builds if the object returned by this method is assigned to a 'val' in this + * 'Environment'. The given 'format' is used to convert an instance of 'T' to and from the 'String' representation + * used for persistence.*/ + def propertyF[T](format: Format[T])(implicit manifest: Manifest[T]): Property[T] = property(manifest, format) + /** Creates a user-defined property with no default value and no value inheritance from a parent environment. + * Its value will persist between builds if the returned object is assigned to a 'val' in this 'Environment'.*/ + def propertyLocal[T](implicit manifest: Manifest[T], format: Format[T]): Property[T] + /** Creates a user-defined property with no default value and no value inheritance from a parent environment. + * The property's value will persist between builds if the object returned by this method is assigned to a + * 'val' in this 'Environment'. The given 'format' is used to convert an instance of 'T' to and from the + * 'String' representation used for persistence.*/ + def propertyLocalF[T](format: Format[T])(implicit manifest: Manifest[T]): Property[T] = propertyLocal(manifest, format) + /** Creates a user-defined property that uses the given default value if no value is explicitly specified for this property. The property's value will persist between builds + * if the object returned by this method is assigned to a 'val' in this 'Environment'.*/ + def propertyOptional[T](defaultValue: => T)(implicit manifest: Manifest[T], format: Format[T]): Property[T] + /** Creates a user-defined property with no value inheritance from a parent environment but with the given default + * value if no value is explicitly specified for this property. The property's value will persist between builds + * if the object returned by this method is assigned to a 'val' in this 'Environment'. The given 'format' is used + * to convert an instance of 'T' to and from the 'String' representation used for persistence.*/ + def propertyOptionalF[T](defaultValue: => T, format: Format[T])(implicit manifest: Manifest[T]): Property[T] = + propertyOptional(defaultValue)(manifest, format) +} + +import scala.collection.Map +trait BasicEnvironment extends Environment +{ + protected def log: Logger + /** The location of the properties file that backs the user-defined properties. */ + def envBackingPath: Path + /** The environment from which user-defined properties inherit (if enabled). */ + protected def parentEnvironment: Option[BasicEnvironment] = None + /** The identifier used in messages to refer to this environment. */ + def environmentLabel = envBackingPath.absolutePath + + private[this] var isModified = false + private[sbt] def setEnvironmentModified(modified: Boolean) { synchronized { isModified = modified } } + private[this] def isEnvironmentModified = synchronized { isModified } + + + implicit val IntFormat: Format[Int] = new SimpleFormat[Int] { def fromString(s: String) = java.lang.Integer.parseInt(s) } + implicit val LongFormat: Format[Long] = new SimpleFormat[Long] { def fromString(s: String) = java.lang.Long.parseLong(s) } + implicit val DoubleFormat: Format[Double] = new SimpleFormat[Double] { def fromString(s: String) = java.lang.Double.parseDouble(s) } + implicit val BooleanFormat: Format[Boolean] = new SimpleFormat[Boolean] { def fromString(s: String) = java.lang.Boolean.valueOf(s).booleanValue } + implicit val StringFormat: Format[String] = Format.string + val NonEmptyStringFormat: Format[String] = new SimpleFormat[String] + { + def fromString(s: String) = + { + val trimmed = s.trim + if(trimmed.isEmpty) + error("The empty string is not allowed.") + trimmed + } + } + implicit val VersionFormat: Format[Version] = + new SimpleFormat[Version] + { + def fromString(s: String) = Version.fromString(s).fold(msg => error(msg), x => x) + } + implicit val FileFormat = Format.file + + + /** Implementation of 'Property' for user-defined properties. */ + private[sbt] class UserProperty[T](lazyDefaultValue: => Option[T], format: Format[T], inheritEnabled: Boolean, + inheritFirst: Boolean, private[BasicEnvironment] val manifest: Manifest[T]) extends Property[T] + { + /** The name of this property is used for persistence in the properties file and as an identifier in messages.*/ + lazy val name = propertyMap.find( p => p._2 eq this ).map(_._1) + /** Gets the name of this property or an alternative if the name is not available.*/ + private def nameString = name.getOrElse("") + /** The lazily evaluated default value for this property.*/ + private lazy val defaultValue = lazyDefaultValue + /** The explicitly set value for this property.*/ + private[BasicEnvironment] var explicitValue = + { + def initialValue = for(n <- name; stringValue <- initialValues.get(n)) yield format.fromString(stringValue) + new LazyVar[Option[T]](initialValue) // ensure propertyMap is initialized before a read occurs + } + def update(v: T): Unit = synchronized { explicitValue() = Some(v); setEnvironmentModified(true) } + def resolve: PropertyResolution[T] = + synchronized + { + if(inheritFirst) resolveInheritFirst + else resolveDefaultFirst + } + private def resolveInheritFirst = + explicitValue() match + { + case Some(v) => DefinedValue(v, false, false) + case None => + val inherited = inheritedValue + // note that the following means the default value will not be used if an exception occurs inheriting + inherited orElse + { + defaultValue match + { + case Some(v) => DefinedValue(v, false, true) + case None => inherited + } + } + } + private def resolveDefaultFirst = + (explicitValue() orElse defaultValue) match + { + case Some(v) => DefinedValue(v, false, explicitValue().isEmpty) + case None => inheritedValue + } + + private def inheritedValue: PropertyResolution[T] = + { + val propOption = if(inheritEnabled) parentProperty else None + propOption match + { + case Some(prop) => tryToInherit(prop) + case None => UndefinedValue(nameString, environmentLabel) + } + } + private def parentProperty = for(parent <- parentEnvironment; n <- name; prop <- parent.propertyMap.get(n)) yield prop + + private def tryToInherit[R](prop: BasicEnvironment#UserProperty[R]): PropertyResolution[T] = + { + if(prop.manifest <:< manifest) + markInherited(prop.resolve.asInstanceOf[PropertyResolution[T]]) + else + ResolutionException("Could not inherit property '" + nameString + "' from '" + environmentLabel + "':\n" + + "\t Property had type " + prop.manifest + ", expected type " + manifest, None) + } + private def markInherited(result: PropertyResolution[T]) = + result match + { + case DefinedValue(v, isInherited, isDefault) => DefinedValue(v, true, isDefault) + case x => x + } + + override def toString = nameString + "=" + resolve + + /** Gets the explicitly set value converted to a 'String'.*/ + private[sbt] def getStringValue: Option[String] = explicitValue().map(format.toString) + /** Explicitly sets the value for this property by converting the given string value.*/ + private[sbt] def setStringValue(s: String) { update(format.fromString(s)) } + } + /** Implementation of 'Property' for system properties (i.e. System.getProperty/setProperty) */ + private class SystemProperty[T](val name: String, lazyDefaultValue: => Option[T], val format: Format[T]) extends Property[T] + { + def resolve = + { + val rawValue = System.getProperty(name) + if(rawValue == null) + notFound + else + { + Control.convertException(format.fromString(rawValue)) match + { + case Left(e) => ResolutionException("Error parsing system property '" + name + "': " + e.toString, Some(e)) + case Right(x) => DefinedValue(x, false, false) + } + } + } + /** Handles resolution when the property has no explicit value. If there is a default value, that is returned, + * otherwise, UndefinedValue is returned.*/ + private def notFound = + { + defaultValue match + { + case Some(dv) => + { + log.debug("System property '" + name + "' does not exist, using provided default.") + DefinedValue(dv, false, true) + } + case None => UndefinedValue(name, environmentLabel) + } + } + protected lazy val defaultValue = lazyDefaultValue + def update(t: T) + { + for(e <- Control.convertException(System.setProperty(name, format.toString(t))).left) + { + log.trace(e) + log.warn("Error setting system property '" + name + "': " + e.toString) + } + } + override def toString = name + "=" + resolve + } + + def system[T](propertyName: String)(implicit format: Format[T]): Property[T] = + new SystemProperty[T](propertyName, None, format) + def systemOptional[T](propertyName: String, defaultValue: => T)(implicit format: Format[T]): Property[T] = + new SystemProperty[T](propertyName, Some(defaultValue), format) + + def property[T](implicit manifest: Manifest[T], format: Format[T]): Property[T] = + new UserProperty[T](None, format, true, false, manifest) + def propertyLocal[T](implicit manifest: Manifest[T], format: Format[T]): Property[T] = + new UserProperty[T](None, format, false, false, manifest) + def propertyOptional[T](defaultValue: => T)(implicit manifest: Manifest[T], format: Format[T]): Property[T] = + propertyOptional(defaultValue, false)(manifest, format) + def propertyOptional[T](defaultValue: => T, inheritFirst: Boolean)(implicit manifest: Manifest[T], format: Format[T]): Property[T] = + new UserProperty[T](Some(defaultValue), format, true, inheritFirst, manifest) + + private type AnyUserProperty = UserProperty[_] + /** Maps property name to property. The map is constructed by reflecting vals defined on this object, + * so it should not be referenced during initialization or else subclass properties will be missed.**/ + private lazy val propertyMap: Map[String, AnyUserProperty] = + { + log.debug("Discovering properties") + val propertyMap = new scala.collection.mutable.HashMap[String, AnyUserProperty] + // AnyProperty is required because the return type of the property*[T] methods is Property[T] + // and so the vals we are looking for have type Property[T] and not UserProperty[T] + // We then only keep instances of UserProperty + val vals = Environment.reflectiveMappings(this, classOf[Property[_]]) + for( (name, property: AnyUserProperty) <- vals) + propertyMap(name) = property + propertyMap.readOnly + } + private val initialValues: Map[String, String] = + { + val map = new scala.collection.mutable.HashMap[String, String] + for(errorMsg <- impl.MapUtilities.read(map, envBackingPath, log)) + log.error("Error loading properties from " + environmentLabel + " : " + errorMsg) + map.readOnly + } + + def propertyNames: Iterable[String] = propertyMap.keys.toList + def getPropertyNamed(name: String): Option[UserProperty[_]] = propertyMap.get(name) + def propertyNamed(name: String): UserProperty[_] = propertyMap(name) + def saveEnvironment(): Option[String] = + { + if(isEnvironmentModified) + { + val properties = new java.util.Properties + for( (name, variable) <- propertyMap; stringValue <- variable.getStringValue) + properties.setProperty(name, stringValue) + val result = PropertiesUtilities.write(properties, "Project properties", envBackingPath, log) + setEnvironmentModified(false) + result + } + else + None + } + private[sbt] def uninitializedProperties: Iterable[(String, Property[_])] = propertyMap.filter(_._2.get.isEmpty) +} +private object Environment +{ + def reflectiveMappings[T](obj: AnyRef, clazz: Class[T]): Map[String, T] = + { + val mappings = new scala.collection.mutable.OpenHashMap[String, T] + for ((name, value) <- ReflectUtilities.allValsC(obj, clazz)) + mappings(ReflectUtilities.transformCamelCase(name, '.')) = value + mappings + } +} + +sealed trait PropertyResolution[+T] extends NotNull +{ + def value: T + def orElse[R >: T](r: => PropertyResolution[R]): PropertyResolution[R] + def toOption: Option[T] + def foreach(f: T => Unit): Unit + def map[R](f: T => R): PropertyResolution[R] + def flatMap[R](f: T => PropertyResolution[R]): PropertyResolution[R] +} +sealed trait NoPropertyValue extends PropertyResolution[Nothing] +{ self: RuntimeException with PropertyResolution[Nothing] => + + def value = throw this + def toOption = None + def map[R](f: Nothing => R): PropertyResolution[R] = this + def flatMap[R](f: Nothing => PropertyResolution[R]): PropertyResolution[R] = this + def foreach(f: Nothing => Unit) {} +} +final case class ResolutionException(message: String, exception: Option[Throwable]) + extends RuntimeException(message, exception.getOrElse(null)) with NoPropertyValue +{ + def orElse[R](r: => PropertyResolution[R]) = this +} +final case class UndefinedValue(name: String, environmentLabel: String) + extends RuntimeException("Value for property '" + name + "' from " + environmentLabel + " is undefined.") with NoPropertyValue +{ + def orElse[R](r: => PropertyResolution[R]) = + r match + { + case u: UndefinedValue => this + case _ => r + } +} +final case class DefinedValue[T](value: T, isInherited: Boolean, isDefault: Boolean) extends PropertyResolution[T] +{ + def toOption = Some(value) + def orElse[R >: T](r: => PropertyResolution[R]) = this + def map[R](f: T => R) = DefinedValue[R](f(value), isInherited, isDefault) + def flatMap[R](f: T => PropertyResolution[R]) = f(value) + def foreach(f: T => Unit) { f(value) } +} +private final class LazyVar[T](initialValue: => T) extends NotNull +{ + private[this] var value: Option[T] = None + def apply() = + synchronized + { + value match + { + case Some(v) => v + case None => + val newValue = initialValue + value = Some(newValue) + newValue + } + } + def update(newValue: T) = synchronized { value = Some(newValue) } +} \ No newline at end of file diff --git a/src/main/scala/sbt/ExitHook.scala b/src/main/scala/sbt/ExitHook.scala new file mode 100644 index 000000000..bce31c74c --- /dev/null +++ b/src/main/scala/sbt/ExitHook.scala @@ -0,0 +1,43 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt + +/** Defines a function to call as sbt exits.*/ +trait ExitHook extends NotNull +{ + /** Provides a name for this hook to be used to provide feedback to the user. */ + def name: String + /** Subclasses should implement this method, which is called when this hook is executed. */ + def runBeforeExiting(): Unit +} + +object ExitHooks +{ + /** This is a list of hooks to call when sbt is finished executing.*/ + private val exitHooks = new scala.collection.mutable.HashSet[ExitHook] + /** Adds a hook to call before sbt exits. */ + private[sbt] def register(hook: ExitHook) { exitHooks += hook } + /** Removes a hook. */ + private[sbt] def unregister(hook: ExitHook) { exitHooks -= hook } + /** Calls each registered exit hook, trapping any exceptions so that each hook is given a chance to run. */ + private[sbt] def runExitHooks(log: Logger) + { + for(hook <- exitHooks.toList) + { + try + { + log.debug("Running exit hook '" + hook.name + "'...") + hook.runBeforeExiting() + } + catch + { + case e => + { + log.trace(e); + log.error("Error running exit hook '" + hook.name + "': " + e.toString) + } + } + } + } +} \ No newline at end of file diff --git a/src/main/scala/sbt/FileTask.scala b/src/main/scala/sbt/FileTask.scala new file mode 100644 index 000000000..80ba1f9d6 --- /dev/null +++ b/src/main/scala/sbt/FileTask.scala @@ -0,0 +1,108 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt + +import scala.collection.{mutable, Map, Set} + +sealed trait ProductsSources extends NotNull +{ + def products: Iterable[Path] + def sources: Iterable[Path] +} +sealed trait ProductsWrapper extends NotNull +{ + def from(sources: => Iterable[Path]): ProductsSources = from(Path.lazyPathFinder(sources)) + def from(sources: PathFinder): ProductsSources +} +/** Provides methods to define tasks with basic conditional execution based on the sources +* and products of the task. */ +trait FileTasks extends Project +{ + implicit def wrapProduct(product: => Path): ProductsWrapper = FileTasks.wrapProduct(product) + implicit def wrapProducts(productsList: => Iterable[Path]): ProductsWrapper = FileTasks.wrapProducts(productsList) + /** Runs 'action' if the given products are out of date with respect to the given sources. */ + def fileTask(label: String, files: ProductsSources)(action: => Option[String]): Task = + task { FileTasks.runOption(label, files, log)(action) } + /** Runs 'action' if any of the given products do not exist. */ + def fileTask(label: String, products: => Iterable[Path])(action: => Option[String]): Task = + task { FileTasks.existenceCheck[Option[String]](label, products, log)(action)(None) } + + /** Creates a new task that performs 'action' only when the given products are out of date with respect to the given sources.. */ + def fileTask(files: ProductsSources)(action: => Option[String]): Task = fileTask("", files)(action) + /** Creates a new task that performs 'action' only when at least one of the given products does not exist.. */ + def fileTask(products: => Iterable[Path])(action: => Option[String]): Task = fileTask("", products)(action) + +} +object FileTasks +{ + implicit def wrapProduct(product: => Path): ProductsWrapper = wrapProducts(product :: Nil) + implicit def wrapProducts(productsList: => Iterable[Path]): ProductsWrapper = + new ProductsWrapper + { + def from(sourceFinder: PathFinder) = + new ProductsSources + { + def products = productsList + def sources = sourceFinder.get + } + } + /** Runs 'ifOutofdate' if the given products are out of date with respect to the given sources.*/ + def runOption(label: String, files: ProductsSources, log: Logger)(ifOutofdate: => Option[String]): Option[String] = + { + val result = apply[Option[String]](label, files, log)(ifOutofdate)(None) + if(result.isDefined) + FileUtilities.clean(files.products, true, log) + result + } + /** Returns 'ifOutofdate' if the given products are out of date with respect to the given sources. Otherwise, returns ifUptodate. */ + def apply[T](label: String, files: ProductsSources, log: Logger)(ifOutofdate: => T)(ifUptodate: => T): T = + { + val products = files.products + existenceCheck[T](label, products, log)(ifOutofdate) + { + val sources = files.sources + if(sources.isEmpty) + { + log.debug("Running " + label + " task because no sources exist.") + ifOutofdate + } + else + { + val oldestProductModifiedTime = mapLastModified(products).reduceLeft(_ min _) + val newestSourceModifiedTime = mapLastModified(sources).reduceLeft(_ max _) + if(oldestProductModifiedTime < newestSourceModifiedTime) + { + if(log.atLevel(Level.Debug)) + { + log.debug("Running " + label + " task because the following sources are newer than at least one product: ") + logDebugIndented(sources.filter(_.lastModified > oldestProductModifiedTime), log) + log.debug(" The following products are older than at least one source: ") + logDebugIndented(products.filter(_.lastModified < newestSourceModifiedTime), log) + } + ifOutofdate + } + else + ifUptodate + } + } + } + /** Checks that all 'products' exist. If they do, 'ifAllExists' is returned, otherwise 'products' is returned.*/ + private def existenceCheck[T](label: String, products: Iterable[Path], log: Logger)(action: => T)(ifAllExist: => T) = + { + val nonexisting = products.filter(!_.exists) + if(nonexisting.isEmpty) + ifAllExist + else + { + if(log.atLevel(Level.Debug)) + { + log.debug("Running " + label + " task because at least one product does not exist:") + logDebugIndented(nonexisting, log) + } + action + } + } + private def logDebugIndented[T](it: Iterable[T], log: Logger) { it.foreach(x => log.debug("\t" + x)) } + private def mapLastModified(paths: Iterable[Path]): Iterable[Long] = paths.map(_.lastModified) +} \ No newline at end of file diff --git a/src/main/scala/sbt/FileUtilities.scala b/src/main/scala/sbt/FileUtilities.scala new file mode 100644 index 000000000..a13cefad5 --- /dev/null +++ b/src/main/scala/sbt/FileUtilities.scala @@ -0,0 +1,892 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah, Nathan Hamblen + */ +package sbt + +import java.io.{Closeable, File, FileInputStream, FileOutputStream, InputStream, OutputStream} +import java.io.{ByteArrayOutputStream, InputStreamReader, OutputStreamWriter} +import java.io.{BufferedReader, BufferedWriter, FileReader, FileWriter, Reader, Writer} +import java.util.zip.{GZIPInputStream, GZIPOutputStream} +import java.net.URL +import java.nio.charset.{Charset, CharsetDecoder, CharsetEncoder} +import java.nio.channels.FileChannel +import java.util.jar.{Attributes, JarEntry, JarFile, JarInputStream, JarOutputStream, Manifest} +import java.util.zip.{GZIPOutputStream, ZipEntry, ZipFile, ZipInputStream, ZipOutputStream} + +import OpenResource._ + +final class Preserved private[sbt](toRestore: scala.collection.Map[File, Path], temp: File) extends NotNull +{ + def restore(log: Logger) = + { + try + { + Control.lazyFold(toRestore.toList) { case (src, dest) => + FileUtilities.copyFile(src, dest.asFile, log) + } + } + finally { FileUtilities.clean(Path.fromFile(temp) :: Nil, true, log) } + } +} + +/** A collection of file related methods. */ +object FileUtilities +{ + import wrap.Wrappers.readOnly + /** The size of the byte or char buffer used in various methods.*/ + private val BufferSize = 8192 + private val Newline = System.getProperty("line.separator") + /** A pattern used to split a String by path separator characters.*/ + private val PathSeparatorPattern = java.util.regex.Pattern.compile(File.pathSeparator) + + /** Splits a String around path separator characters. */ + private[sbt] def pathSplit(s: String) = PathSeparatorPattern.split(s) + + def preserve(paths: Iterable[Path], log: Logger): Either[String, Preserved] = + { + for(tmp <- createTemporaryDirectory(log).right) yield + { + val pathMap = new scala.collection.mutable.HashMap[File, Path] + val destinationDirectory = Path.fromFile(tmp) + for(source <- paths) + { + val toPath = Path.fromString(destinationDirectory, source.relativePath) + copyFile(source, toPath, log) + pathMap(toPath.asFile) = source + } + new Preserved(readOnly(pathMap), tmp) + } + } + + /** Gzips the file 'in' and writes it to 'out'. 'in' cannot be the same file as 'out'. */ + def gzip(in: Path, out: Path, log: Logger): Option[String] = + { + require(in != out, "Input file cannot be the same as the output file.") + readStream(in.asFile, log) { inputStream => + writeStream(out.asFile, log) { outputStream => + gzip(inputStream, outputStream, log) + } + } + } + /** Gzips the InputStream 'in' and writes it to 'output'. Neither stream is closed.*/ + def gzip(input: InputStream, output: OutputStream, log: Logger): Option[String] = + gzipOutputStream.ioOption(output, "gzipping", log) { gzStream => transfer(input, gzStream, log) } + + def gunzip(input: InputStream, output: OutputStream, log: Logger): Option[String] = + gzipInputStream.ioOption(input, "gunzipping", log) { gzStream => transfer(gzStream, output, log) } + /** Gunzips the file 'in' and writes it to 'out'. 'in' cannot be the same file as 'out'. */ + def gunzip(in: Path, out: Path, log: Logger): Option[String] = + { + require(in != out, "Input file cannot be the same as the output file.") + readStream(in.asFile, log) { inputStream => + writeStream(out.asFile, log) { outputStream => + gunzip(inputStream, outputStream, log) + } + } + } + + /** Creates a jar file. + * @param sources The files to include in the jar file. The path used for the jar is + * relative to the base directory for the source. That is, the path in the jar for source + * (basePath ##) / x / y is x / y. + * @param outputJar The file to write the jar to. + * @param manifest The manifest for the jar. + * @param recursive If true, any directories in sources are recursively processed. Otherwise, + * they are not + * @param log The Logger to use. */ + def jar(sources: Iterable[Path], outputJar: Path, manifest: Manifest, recursive: Boolean, log: Logger) = + archive(sources, outputJar, Some(manifest), recursive, log) + @deprecated def pack(sources: Iterable[Path], outputJar: Path, manifest: Manifest, recursive: Boolean, log: Logger) = + jar(sources, outputJar, manifest, recursive, log) + /** Creates a zip file. + * @param sources The files to include in the jar file. The path used for the jar is + * relative to the base directory for the source. That is, the path in the jar for source + * (basePath ##) / x / y is x / y. + * @param outputZip The file to write the zip to. + * @param recursive If true, any directories in sources are recursively processed. Otherwise, + * they are not + * @param log The Logger to use. */ + def zip(sources: Iterable[Path], outputZip: Path, recursive: Boolean, log: Logger) = + archive(sources, outputZip, None, recursive, log) + + private def archive(sources: Iterable[Path], outputPath: Path, manifest: Option[Manifest], recursive: Boolean, log: Logger) = + { + log.info("Packaging " + outputPath + " ...") + val outputFile = outputPath.asFile + if(outputFile.isDirectory) + Some("Specified output file " + outputFile + " is a directory.") + else + { + val outputDir = outputFile.getParentFile + val result = createDirectory(outputDir, log) orElse + withZipOutput(outputFile, manifest, log) + { output => + val createEntry: (String => ZipEntry) = if(manifest.isDefined) new JarEntry(_) else new ZipEntry(_) + writeZip(sources, output, recursive, log)(createEntry) + } + if(result.isEmpty) + log.info("Packaging complete.") + result + } + } + + private def writeZip(sources: Iterable[Path], output: ZipOutputStream, recursive: Boolean, log: Logger)(createEntry: String => ZipEntry) = + { + def add(source: Path) + { + val sourceFile = source.asFile + if(sourceFile.isDirectory) + { + if(recursive) + wrapNull(sourceFile.listFiles).foreach(file => add(source / file.getName)) + } + else if(sourceFile.exists) + { + val relativePath = source.relativePathString("/") + log.debug("\tAdding " + source + " as " + relativePath + " ...") + val nextEntry = createEntry(relativePath) + nextEntry.setTime(sourceFile.lastModified) + output.putNextEntry(nextEntry) + transferAndClose(new FileInputStream(sourceFile), output, log) + } + else + log.warn("\tSource " + source + " does not exist.") + } + sources.foreach(add) + output.closeEntry() + None + } + + private def withZipOutput(file: File, manifest: Option[Manifest], log: Logger)(f: ZipOutputStream => Option[String]): Option[String] = + { + writeStream(file, log) + { + fileOut => + { + val (zipOut, ext) = + manifest match + { + case Some(mf) => + { + import Attributes.Name.MANIFEST_VERSION + val main = mf.getMainAttributes + if(!main.containsKey(MANIFEST_VERSION)) + main.put(MANIFEST_VERSION, "1.0") + (new JarOutputStream(fileOut, mf), "jar") + } + case None => (new ZipOutputStream(fileOut), "zip") + } + Control.trapUnitAndFinally("Error writing " + ext + ": ", log) + { f(zipOut) } { zipOut.close } + } + } + } + import scala.collection.Set + /** Unzips the contents of the zip file from to the toDirectory directory.*/ + def unzip(from: Path, toDirectory: Path, log: Logger): Either[String, Set[Path]] = + unzip(from, toDirectory, AllPassFilter, log) + /** Unzips the contents of the zip file from to the toDirectory directory.*/ + def unzip(from: File, toDirectory: Path, log: Logger): Either[String, Set[Path]] = + unzip(from, toDirectory, AllPassFilter, log) + /** Unzips the contents of the zip file from to the toDirectory directory.*/ + def unzip(from: InputStream, toDirectory: Path, log: Logger): Either[String, Set[Path]] = + unzip(from, toDirectory, AllPassFilter, log) + /** Unzips the contents of the zip file from to the toDirectory directory.*/ + def unzip(from: URL, toDirectory: Path, log: Logger): Either[String, Set[Path]] = + unzip(from, toDirectory, AllPassFilter, log) + + /** Unzips the contents of the zip file from to the toDirectory directory. + * Only the entries that match the given filter are extracted. */ + def unzip(from: Path, toDirectory: Path, filter: NameFilter, log: Logger): Either[String, Set[Path]] = + unzip(from.asFile, toDirectory, filter, log) + /** Unzips the contents of the zip file from to the toDirectory directory. + * Only the entries that match the given filter are extracted. */ + def unzip(from: File, toDirectory: Path, filter: NameFilter, log: Logger): Either[String, Set[Path]] = + readStreamValue(from, log)(in => unzip(in, toDirectory, filter, log)) + /** Unzips the contents of the zip file from to the toDirectory directory. + * Only the entries that match the given filter are extracted. */ + def unzip(from: URL, toDirectory: Path, filter: NameFilter, log: Logger): Either[String, Set[Path]] = + readStreamValue(from, log) { stream => unzip(stream, toDirectory, filter, log) } + /** Unzips the contents of the zip file from to the toDirectory directory. + * Only the entries that match the given filter are extracted. */ + def unzip(from: InputStream, toDirectory: Path, filter: NameFilter, log: Logger): Either[String, Set[Path]] = + { + createDirectory(toDirectory, log) match + { + case Some(err) => Left(err) + case None => zipInputStream.io(from, "unzipping", log) { zipInput => extract(zipInput, toDirectory, filter, log) } + } + } + private def extract(from: ZipInputStream, toDirectory: Path, filter: NameFilter, log: Logger) = + { + val set = new scala.collection.mutable.HashSet[Path] + def next(): Option[String] = + { + val entry = from.getNextEntry + if(entry == null) + None + else + { + val name = entry.getName + val result = + if(filter.accept(name)) + { + val target = Path.fromString(toDirectory, name) + log.debug("Extracting zip entry '" + name + "' to '" + target + "'") + val result = + if(entry.isDirectory) + createDirectory(target, log) + else + { + set += target + writeStream(target.asFile, log) { out => FileUtilities.transfer(from, out, log) } + } + //target.asFile.setLastModified(entry.getTime) + result + } + else + { + log.debug("Ignoring zip entry '" + name + "'") + None + } + from.closeEntry() + result match { case None => next(); case x => x } + } + } + next().toLeft(readOnly(set)) + } + + /** Copies all bytes from the given input stream to the given output stream. + * Neither stream is closed.*/ + def transfer(in: InputStream, out: OutputStream, log: Logger): Option[String] = + transferImpl(in, out, false, log) + /** Copies all bytes from the given input stream to the given output stream. The + * input stream is closed after the method completes.*/ + def transferAndClose(in: InputStream, out: OutputStream, log: Logger): Option[String] = + transferImpl(in, out, true, log) + private def transferImpl(in: InputStream, out: OutputStream, close: Boolean, log: Logger): Option[String] = + { + Control.trapUnitAndFinally("Error during transfer: ", log) + { + val buffer = new Array[Byte](BufferSize) + def read: None.type = + { + val byteCount = in.read(buffer) + if(byteCount >= 0) + { + out.write(buffer, 0, byteCount) + read + } + else + None + } + read + } + { if(close) in.close } + } + + /** Creates a file at the given location.*/ + def touch(path: Path, log: Logger): Option[String] = touch(path.asFile, log) + /** Creates a file at the given location.*/ + def touch(file: File, log: Logger): Option[String] = + { + Control.trapUnit("Could not create file " + file + ": ", log) + { + if(file.exists) + { + def updateFailBase = "Could not update last modified for file " + file + Control.trapUnit(updateFailBase + ": ", log) + { if(file.setLastModified(System.currentTimeMillis)) None else Some(updateFailBase) } + } + else + createDirectory(file.getParentFile, log) orElse { file.createNewFile(); None } + } + } + /** Creates a directory at the given location.*/ + def createDirectory(dir: Path, log: Logger): Option[String] = createDirectory(dir.asFile, log) + /** Creates a directory at the given location.*/ + def createDirectory(dir: File, log: Logger): Option[String] = + { + Control.trapUnit("Could not create directory " + dir + ": ", log) + { + if(dir.exists) + { + if(dir.isDirectory) + None + else + Some(dir + " exists and is not a directory.") + } + else + { + dir.mkdirs() + log.debug("Created directory " + dir) + None + } + } + } + /** Creates directories at the given locations.*/ + def createDirectories(d: Seq[Path], log: Logger): Option[String] = createDirectories(d.toList.map(_.asFile), log) + /** Creates directories at the given locations.*/ + def createDirectories(d: List[File], log: Logger): Option[String] = + d match + { + case Nil => None + case head :: tail => createDirectory(head, log) orElse createDirectories(tail, log) + } + /** The maximum number of times a unique temporary filename is attempted to be created.*/ + private val MaximumTries = 10 + /** Creates a temporary directory and returns it.*/ + def createTemporaryDirectory(log: Logger): Either[String, File] = + { + def create(tries: Int): Either[String, File] = + { + if(tries > MaximumTries) + Left("Could not create temporary directory.") + else + { + val randomName = "sbt_" + java.lang.Integer.toHexString(random.nextInt) + val f = new File(temporaryDirectory, randomName) + + if(createDirectory(f, log).isEmpty) + Right(f) + else + create(tries + 1) + } + } + create(0) + } + + def withTemporaryDirectory(log: Logger)(action: File => Option[String]): Option[String] = + doInTemporaryDirectory(log: Logger)(file => action(file).toLeft(())).left.toOption + /** Creates a temporary directory and provides its location to the given function. The directory + * is deleted after the function returns.*/ + def doInTemporaryDirectory[T](log: Logger)(action: File => Either[String, T]): Either[String, T] = + { + def doInDirectory(dir: File): Either[String, T] = + { + Control.trapAndFinally("", log) + { action(dir) } + { delete(dir, true, log) } + } + createTemporaryDirectory(log).right.flatMap(doInDirectory) + } + def withTemporaryFile[T](log: Logger, prefix: String, postfix: String)(action: File => Either[String, T]): Either[String, T] = + { + Control.trap("Error creating temporary file: ", log) + { + val file = File.createTempFile(prefix, postfix) + Control.trapAndFinally("", log) + { action(file) } + { file.delete() } + } + } + + /** Copies the files declared in sources to the destinationDirectory + * directory. The source directory hierarchy is flattened so that all copies are immediate + * children of destinationDirectory. Directories are not recursively entered.*/ + def copyFlat(sources: Iterable[Path], destinationDirectory: Path, log: Logger) = + { + val targetSet = new scala.collection.mutable.HashSet[Path] + copyImpl(sources, destinationDirectory, log) + { + source => + { + val from = source.asFile + val toPath = destinationDirectory / from.getName + targetSet += toPath + val to = toPath.asFile + if(!to.exists || from.lastModified > to.lastModified && !from.isDirectory) + { + log.debug("Copying " + source + " to " + toPath) + copyFile(from, to, log) + } + else + None + } + }.toLeft(readOnly(targetSet)) + } + private def copyImpl(sources: Iterable[Path], destinationDirectory: Path, log: Logger) + (doCopy: Path => Option[String]): Option[String] = + { + val target = destinationDirectory.asFile + val creationError = + if(target.isDirectory) + None + else + createDirectory(target, log) + def copy(sources: List[Path]): Option[String] = + { + sources match + { + case src :: remaining => + { + doCopy(src) match + { + case None => copy(remaining) + case error => error + } + } + case Nil => None + } + } + creationError orElse ( Control.trapUnit("", log) { copy(sources.toList) } ) + } + /** Retrieves the content of the given URL and writes it to the given File. */ + def download(url: URL, to: File, log: Logger) = + { + readStream(url, log) { inputStream => + writeStream(to, log) { outputStream => + transfer(inputStream, outputStream, log) + } + } + } + /** Copies the files declared in sources to the destinationDirectory + * directory. Directories are not recursively entered. The destination hierarchy matches the + * source paths relative to any base directories. For example: + * + * A source (basePath ##) / x / y is copied to destinationDirectory / x / y. + * */ + def copy(sources: Iterable[Path], destinationDirectory: Path, log: Logger) = + { + val targetSet = new scala.collection.mutable.HashSet[Path] + copyImpl(sources, destinationDirectory, log) + { + source => + { + val from = source.asFile + val toPath = Path.fromString(destinationDirectory, source.relativePath) + targetSet += toPath + val to = toPath.asFile + if(!to.exists || from.lastModified > to.lastModified) + { + if(from.isDirectory) + createDirectory(to, log) + else + { + log.debug("Copying " + source + " to " + toPath) + copyFile(from, to, log) + } + } + else + None + } + }.toLeft(readOnly(targetSet)) + } + + /** Copies the files declared in sources to the targetDirectory + * directory. The source directory hierarchy is flattened so that all copies are immediate + * children of targetDirectory. Directories are not recursively entered.*/ + def copyFilesFlat(sources: Iterable[File], targetDirectory: Path, log: Logger) = + { + require(targetDirectory.asFile.isDirectory, "Target '" + targetDirectory + "' is not a directory.") + val byName = new scala.collection.mutable.HashMap[String, File] + for(source <- sources) byName.put(source.getName, source) + val uniquelyNamedSources = byName.values + val targetSet = new scala.collection.mutable.HashSet[Path] + def copy(source: File): Option[String] = + { + if(source.isDirectory) + copyAll(source.listFiles.toList) + else if(source.exists) + { + val targetPath = targetDirectory / source.getName + targetSet += targetPath + if(!targetPath.exists || source.lastModified > targetPath.lastModified) + { + log.debug("Copying " + source + " to " + targetPath) + copyFile(source, targetPath.asFile, log) + } + else + None + } + else + None + } + def copyAll(sources: List[File]): Option[String] = + sources match + { + case head :: tail => + copy(head) match + { + case None => copyAll(tail) + case x => x + } + case Nil => None + } + + Control.trap("Error copying files: ", log) { copyAll(uniquelyNamedSources.toList).toLeft(readOnly(targetSet)) } + } + /** Copies sourceFile to targetFile. If targetFile + * exists, it is overwritten. Note that unlike higher level copies in FileUtilities, this + * method always performs the copy, even if sourceFile is older than targetFile.*/ + def copyFile(sourceFile: Path, targetFile: Path, log: Logger): Option[String] = + copyFile(sourceFile.asFile, targetFile.asFile, log) + /** Copies sourceFile to targetFile. If targetFile + * exists, it is overwritten. Note that unlike higher level copies in FileUtilities, this + * method always performs the copy, even if sourceFile is older than targetFile.*/ + def copyFile(sourceFile: File, targetFile: File, log: Logger): Option[String] = + { + require(sourceFile.exists, "Source file '" + sourceFile.getAbsolutePath + "' does not exist.") + require(!sourceFile.isDirectory, "Source file '" + sourceFile.getAbsolutePath + "' is a directory.") + readChannel(sourceFile, log)( + in => writeChannel(targetFile, log) { + out => { + val copied = out.transferFrom(in, 0, in.size) + if(copied == in.size) + None + else + Some("Could not copy '" + sourceFile + "' to '" + targetFile + "' (" + copied + "/" + in.size + " bytes copied)") + } + } + ) + } + + /** Synchronizes the contents of the sourceDirectory directory to the + * targetDirectory directory.*/ + def sync(sourceDirectory: Path, targetDirectory: Path, log: Logger): Option[String] = + { + copy(((sourceDirectory ##) ** AllPassFilter).get, targetDirectory, log).right.flatMap + { copiedTo => prune(targetDirectory, copiedTo, log).toLeft(()) }.left.toOption + } + def prune(directory: Path, keepOnly: Iterable[Path], log: Logger): Option[String] = + { + val existing = ((directory ##) ** AllPassFilter).get + val toRemove = scala.collection.mutable.HashSet(existing.toSeq: _*) + toRemove --= keepOnly + if(log.atLevel(Level.Debug)) + toRemove.foreach(r => log.debug("Pruning " + r)) + clean(toRemove, true, log) + } + + /** Copies the contents of the source directory to the target directory .*/ + def copyDirectory(source: Path, target: Path, log: Logger): Option[String] = + copyDirectory(source.asFile, target.asFile, log) + /** Copies the contents of the source directory to the target directory .*/ + def copyDirectory(source: File, target: File, log: Logger): Option[String] = + { + require(source.isDirectory, "Source '" + source.getAbsolutePath + "' is not a directory.") + require(!target.exists, "Target '" + target.getAbsolutePath + "' already exists.") + def copyDirectory(sourceDir: File, targetDir: File): Option[String] = + createDirectory(targetDir, log) orElse copyContents(sourceDir, targetDir) + def copyContents(sourceDir: File, targetDir: File): Option[String] = + sourceDir.listFiles.foldLeft(None: Option[String]) + { + (result, file) => + result orElse + { + val targetFile = new File(targetDir, file.getName) + if(file.isDirectory) + copyDirectory(file, targetFile) + else + copyFile(file, targetFile, log) + } + } + copyDirectory(source, target) + } + + + /** Deletes the given file recursively.*/ + def clean(file: Path, log: Logger): Option[String] = clean(file :: Nil, log) + /** Deletes the given files recursively.*/ + def clean(files: Iterable[Path], log: Logger): Option[String] = clean(files, false, log) + /** Deletes the given files recursively. quiet determines the logging level. + * If it is true, each file in files is logged at the info level. + * If it is false, the debug level is used.*/ + def clean(files: Iterable[Path], quiet: Boolean, log: Logger): Option[String] = + deleteFiles(files.map(_.asFile), quiet, log) + + private def deleteFiles(files: Iterable[File], quiet: Boolean, log: Logger): Option[String] = + ((None: Option[String]) /: files)( (result, file) => result orElse delete(file, quiet, log)) + private def delete(file: File, quiet: Boolean, log: Logger): Option[String] = + { + def logMessage(message: => String) + { + log.log(if(quiet) Level.Debug else Level.Info, message) + } + Control.trapUnit("Error deleting file " + file + ": ", log) + { + if(file.isDirectory) + { + logMessage("Deleting directory " + file) + deleteFiles(wrapNull(file.listFiles), true, log) + file.delete + } + else if(file.exists) + { + logMessage("Deleting file " + file) + file.delete + } + None + } + } + + /** Appends the given String content to the provided file using the default encoding. + * A new file is created if it does not exist.*/ + def append(file: File, content: String, log: Logger): Option[String] = append(file, content, Charset.defaultCharset, log) + /** Appends the given String content to the provided file using the given encoding. + * A new file is created if it does not exist.*/ + def append(file: File, content: String, charset: Charset, log: Logger): Option[String] = + write(file, content, charset, true, log) + + /** Writes the given String content to the provided file using the default encoding. + * If the file exists, it is overwritten.*/ + def write(file: File, content: String, log: Logger): Option[String] = write(file, content, Charset.defaultCharset, log) + /** Writes the given String content to the provided file using the given encoding. + * If the file already exists, it is overwritten.*/ + def write(file: File, content: String, charset: Charset, log: Logger): Option[String] = + write(file, content, charset, false, log) + private def write(file: File, content: String, charset: Charset, append: Boolean, log: Logger): Option[String] = + { + if(charset.newEncoder.canEncode(content)) + write(file, charset, append, log) { w => w.write(content); None } + else + Some("String cannot be encoded by default charset.") + } + + /** Opens a Writer on the given file using the default encoding, + * passes it to the provided function, and closes the Writer.*/ + def write(file: File, log: Logger)(f: Writer => Option[String]): Option[String] = + write(file, Charset.defaultCharset, log)(f) + /** Opens a Writer on the given file using the given encoding, + * passes it to the provided function, and closes the Writer.*/ + def write(file: File, charset: Charset, log: Logger)(f: Writer => Option[String]): Option[String] = + write(file, charset, false, log)(f) + private def write(file: File, charset: Charset, append: Boolean, log: Logger)(f: Writer => Option[String]): Option[String] = + fileWriter(charset, append).ioOption(file, Writing, log)(f) + + /** Opens a Reader on the given file using the default encoding, + * passes it to the provided function, and closes the Reader.*/ + def read(file: File, log: Logger)(f: Reader => Option[String]): Option[String] = + read(file, Charset.defaultCharset, log)(f) + /** Opens a Reader on the given file using the default encoding, + * passes it to the provided function, and closes the Reader.*/ + def read(file: File, charset: Charset, log: Logger)(f: Reader => Option[String]): Option[String] = + fileReader(charset).ioOption(file, Reading, log)(f) + /** Opens a Reader on the given file using the default encoding, + * passes it to the provided function, and closes the Reader.*/ + def readValue[R](file: File, log: Logger)(f: Reader => Either[String, R]): Either[String, R] = + readValue(file, Charset.defaultCharset, log)(f) + /** Opens a Reader on the given file using the given encoding, + * passes it to the provided function, and closes the Reader.*/ + def readValue[R](file: File, charset: Charset, log: Logger)(f: Reader => Either[String, R]): Either[String, R] = + fileReader(charset).io(file, Reading, log)(f) + + /** Reads the contents of the given file into a String using the default encoding. + * The resulting String is wrapped in Right.*/ + def readString(file: File, log: Logger): Either[String, String] = readString(file, Charset.defaultCharset, log) + /** Reads the contents of the given file into a String using the given encoding. + * The resulting String is wrapped in Right.*/ + def readString(file: File, charset: Charset, log: Logger): Either[String, String] = readValue(file, charset, log)(readString) + + def readString(in: InputStream, log: Logger): Either[String, String] = readString(in, Charset.defaultCharset, log) + def readString(in: InputStream, charset: Charset, log: Logger): Either[String, String] = + streamReader.io((in, charset), Reading, log)(readString) + def readString(in: Reader, log: Logger): Either[String, String] = + Control.trapAndFinally("Error reading bytes from reader: ", log) + { readString(in) } + { in.close() } + private def readString(in: Reader): Either[String, String] = + { + val builder = new StringBuilder + val buffer = new Array[Char](BufferSize) + def readNext() + { + val read = in.read(buffer, 0, buffer.length) + if(read >= 0) + { + builder.append(buffer, 0, read) + readNext() + } + else + None + } + readNext() + Right(builder.toString) + } + /** Appends the given bytes to the given file. */ + def append(file: File, bytes: Array[Byte], log: Logger): Option[String] = + writeBytes(file, bytes, true, log) + /** Writes the given bytes to the given file. If the file already exists, it is overwritten.*/ + def write(file: File, bytes: Array[Byte], log: Logger): Option[String] = + writeBytes(file, bytes, false, log) + private def writeBytes(file: File, bytes: Array[Byte], append: Boolean, log: Logger): Option[String] = + writeStream(file, append, log) { out => out.write(bytes); None } + + /** Reads the entire file into a byte array. */ + def readBytes(file: File, log: Logger): Either[String, Array[Byte]] = readStreamValue(file, log)(readBytes) + def readBytes(in: InputStream, log: Logger): Either[String, Array[Byte]] = + Control.trapAndFinally("Error reading bytes from input stream: ", log) + { readBytes(in) } + { in.close() } + private def readBytes(in: InputStream): Either[String, Array[Byte]] = + { + val out = new ByteArrayOutputStream + val buffer = new Array[Byte](BufferSize) + def readNext() + { + val read = in.read(buffer) + if(read >= 0) + { + out.write(buffer, 0, read) + readNext() + } + } + readNext() + Right(out.toByteArray) + } + + /** Opens an OutputStream on the given file with append=true and passes the stream + * to the provided function. The stream is closed before this function returns.*/ + def appendStream(file: File, log: Logger)(f: OutputStream => Option[String]): Option[String] = + fileOutputStream(true).ioOption(file, Appending, log)(f) + /** Opens an OutputStream on the given file and passes the stream + * to the provided function. The stream is closed before this function returns.*/ + def writeStream(file: File, log: Logger)(f: OutputStream => Option[String]): Option[String] = + fileOutputStream(false).ioOption(file, Writing, log)(f) + private def writeStream(file: File, append: Boolean, log: Logger)(f: OutputStream => Option[String]): Option[String] = + if(append) appendStream(file, log)(f) else writeStream(file, log)(f) + /** Opens an InputStream on the given file and passes the stream + * to the provided function. The stream is closed before this function returns.*/ + def readStream(file: File, log: Logger)(f: InputStream => Option[String]): Option[String] = + fileInputStream.ioOption(file, Reading, log)(f) + /** Opens an InputStream on the given file and passes the stream + * to the provided function. The stream is closed before this function returns.*/ + def readStreamValue[R](file: File, log: Logger)(f: InputStream => Either[String, R]): Either[String, R] = + fileInputStream.io(file, Reading, log)(f) + /** Opens an InputStream on the given URL and passes the stream + * to the provided function. The stream is closed before this function returns.*/ + def readStream(url: URL, log: Logger)(f: InputStream => Option[String]): Option[String] = + urlInputStream.ioOption(url, Reading, log)(f) + /** Opens an InputStream on the given URL and passes the stream + * to the provided function. The stream is closed before this function returns.*/ + def readStreamValue[R](url: URL, log: Logger)(f: InputStream => Either[String, R]): Either[String, R] = + urlInputStream.io(url, Reading, log)(f) + + /** Opens a FileChannel on the given file for writing and passes the channel + * to the given function. The channel is closed before this function returns.*/ + def writeChannel(file: File, log: Logger)(f: FileChannel => Option[String]): Option[String] = + fileOutputChannel.ioOption(file, Writing, log)(f) + /** Opens a FileChannel on the given file for reading and passes the channel + * to the given function. The channel is closed before this function returns.*/ + def readChannel(file: File, log: Logger)(f: FileChannel => Option[String]): Option[String] = + fileInputChannel.ioOption(file, Reading, log)(f) + /** Opens a FileChannel on the given file for reading and passes the channel + * to the given function. The channel is closed before this function returns.*/ + def readChannelValue[R](file: File, log: Logger)(f: FileChannel => Either[String, R]): Either[String, R] = + fileInputChannel.io(file, Reading, log)(f) + + private[sbt] def wrapNull(a: Array[File]): Array[File] = + if(a == null) + new Array[File](0) + else + a + + /** Writes the given string to the writer followed by a newline.*/ + private[sbt] def writeLine(writer: Writer, line: String) + { + writer.write(line) + writer.write(Newline) + } + + /** The directory in which temporary files are placed.*/ + val temporaryDirectory = new File(System.getProperty("java.io.tmpdir")) + def classLocation(cl: Class[_]): URL = cl.getProtectionDomain.getCodeSource.getLocation + def classLocationFile(cl: Class[_]): File = new File(classLocation(cl).toURI) + def classLocation[T](implicit mf: scala.reflect.Manifest[T]): URL = classLocation(mf.erasure) + def classLocationFile[T](implicit mf: scala.reflect.Manifest[T]): File = classLocationFile(mf.erasure) + + /** The location of the jar containing this class.*/ + lazy val sbtJar: File = classLocationFile(getClass) + lazy val scalaLibraryJar: File = classLocationFile[scala.ScalaObject] + lazy val scalaCompilerJar: File = classLocationFile[scala.tools.nsc.Settings] + + /** The producer of randomness for unique name generation.*/ + private val random = new java.util.Random + + private val Reading = "reading" + private val Writing = "writing" + private val Appending = "appending" +} + +private abstract class OpenResource[Source, T] extends NotNull +{ + import OpenResource.{unwrapEither, wrapEither} + protected def open(src: Source, log: Logger): Either[String, T] + def ioOption(src: Source, op: String, log: Logger)(f: T => Option[String]) = + unwrapEither( io(src, op, log)(wrapEither(f)) ) + def io[R](src: Source, op: String, log: Logger)(f: T => Either[String,R]): Either[String, R] = + open(src, log).right flatMap + { + resource => Control.trapAndFinally("Error " + op + " "+ src + ": ", log) + { f(resource) } + { close(resource) } + } + protected def close(out: T): Unit +} +private trait CloseableOpenResource[Source, T <: Closeable] extends OpenResource[Source, T] +{ + protected def close(out: T): Unit = out.close() +} +import scala.reflect.{Manifest => SManifest} +private abstract class WrapOpenResource[Source, T <: Closeable](implicit srcMf: SManifest[Source], targetMf: SManifest[T]) extends CloseableOpenResource[Source, T] +{ + private def label[S](m: SManifest[S]) = m.erasure.getSimpleName + protected def open(source: Source): T + protected final def open(source: Source, log: Logger): Either[String, T] = + Control.trap("Error wrapping " + label(srcMf) + " in " + label(targetMf) + ": ", log) { Right(open(source)) } +} +private abstract class OpenFile[T] extends OpenResource[File, T] +{ + protected def open(file: File): T + protected final def open(file: File, log: Logger): Either[String, T] = + { + val parent = file.getParentFile + if(parent != null) + FileUtilities.createDirectory(parent, log) + Control.trap("Error opening " + file + ": ", log) { Right(open(file)) } + } +} +private abstract class CloseableOpenFile[T <: Closeable] extends OpenFile[T] with CloseableOpenResource[File, T] +private object OpenResource +{ + private def wrapEither[R](f: R => Option[String]): (R => Either[String, Unit]) = (r: R) => f(r).toLeft(()) + private def unwrapEither(e: Either[String, Unit]): Option[String] = e.left.toOption + + def fileOutputStream(append: Boolean) = + new CloseableOpenFile[FileOutputStream] { protected def open(file: File) = new FileOutputStream(file, append) } + def fileInputStream = new CloseableOpenFile[FileInputStream] + { protected def open(file: File) = new FileInputStream(file) } + def urlInputStream = new CloseableOpenResource[URL, InputStream] + { protected def open(url: URL, log: Logger) = Control.trap("Error opening " + url + ": ", log) { Right(url.openStream) } } + def fileOutputChannel = new CloseableOpenFile[FileChannel] + { protected def open(f: File) = (new FileOutputStream(f)).getChannel } + def fileInputChannel = new CloseableOpenFile[FileChannel] + { protected def open(f: File) = (new FileInputStream(f)).getChannel } + def fileWriter(charset: Charset, append: Boolean) = new CloseableOpenFile[Writer] + { protected def open(f: File) = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(f, append), charset)) } + def fileReader(charset: Charset) = new CloseableOpenFile[Reader] + { protected def open(f: File) = new BufferedReader(new InputStreamReader(new FileInputStream(f), charset)) } + def jarFile(verify: Boolean) = new OpenFile[JarFile] + { protected def open(f: File) = new JarFile(f, verify) + override protected def close(j: JarFile) = j.close() } + def zipFile = new OpenFile[ZipFile] + { protected def open(f: File) = new ZipFile(f) + override protected def close(z: ZipFile) = z.close() } + def streamReader = new WrapOpenResource[(InputStream, Charset), Reader] + { protected def open(streamCharset: (InputStream, Charset)) = new InputStreamReader(streamCharset._1, streamCharset._2) } + def gzipInputStream = new WrapOpenResource[InputStream, GZIPInputStream] + { protected def open(in: InputStream) = new GZIPInputStream(in) } + def zipInputStream = new WrapOpenResource[InputStream, ZipInputStream] + { protected def open(in: InputStream) = new ZipInputStream(in) } + def gzipOutputStream = new WrapOpenResource[OutputStream, GZIPOutputStream] + { protected def open(out: OutputStream) = new GZIPOutputStream(out) + override protected def close(out: GZIPOutputStream) = out.finish() } + def jarOutputStream = new WrapOpenResource[OutputStream, JarOutputStream] + { protected def open(out: OutputStream) = new JarOutputStream(out) } + def jarInputStream = new WrapOpenResource[InputStream, JarInputStream] + { protected def open(in: InputStream) = new JarInputStream(in) } + def zipEntry(zip: ZipFile) = new CloseableOpenResource[ZipEntry, InputStream] { + protected def open(entry: ZipEntry, log: Logger) = + Control.trap("Error opening " + entry.getName + " in " + zip + ": ", log) { Right(zip.getInputStream(entry)) } + } +} \ No newline at end of file diff --git a/src/main/scala/sbt/Fork.scala b/src/main/scala/sbt/Fork.scala new file mode 100644 index 000000000..278a77843 --- /dev/null +++ b/src/main/scala/sbt/Fork.scala @@ -0,0 +1,82 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt + +import java.io.File + +abstract class ForkJava extends NotNull +{ + def javaHome: Option[File] = None +} +abstract class ForkScala extends ForkJava +{ + def scalaJars: Iterable[File] = None +} +trait ForkScalaRun extends ForkScala +{ + def workingDirectory: Option[File] = None + def runJVMOptions: Seq[String] = Nil +} +trait ForkScalaCompiler extends ForkScala +{ + def compileJVMOptions: Seq[String] = Nil +} + +import java.lang.{ProcessBuilder => JProcessBuilder} +object Fork +{ + private val ScalacMainClass = "scala.tools.nsc.Main" + private val ScalaMainClass = "scala.tools.nsc.MainGenericRunner" + val java = new ForkJava("java") + val javac = new ForkJava("javac") + val scala = new ForkScala(ScalaMainClass) + val scalac = new ForkScala(ScalacMainClass) + + private def javaCommand(javaHome: Option[File], name: String): File = + { + val home = javaHome.getOrElse(new File(System.getProperty("java.home"))) + new File(new File(home, "bin"), name) + } + final class ForkJava(commandName: String) extends NotNull + { + def apply(javaHome: Option[File], options: Seq[String], log: Logger): Int = + apply(javaHome, options, None, log) + def apply(javaHome: Option[File], options: Seq[String], workingDirectory: File, log: Logger): Int = + apply(javaHome, options, Some(workingDirectory), log) + def apply(javaHome: Option[File], options: Seq[String], workingDirectory: Option[File], log: Logger): Int = + apply(javaHome, options, workingDirectory, Map.empty, log) + /** env is additional environment variables*/ + def apply(javaHome: Option[File], options: Seq[String], workingDirectory: Option[File], env: Map[String, String], log: Logger): Int = + { + val executable = javaCommand(javaHome, commandName).getAbsolutePath + val command = (executable :: options.toList).toArray + log.debug("Forking java process: " + command.mkString(" ") + workingDirectory.map("\n\tin " + _.getAbsolutePath).getOrElse("")) + val builder = new JProcessBuilder(command : _*) + workingDirectory.foreach(wd => builder.directory(wd)) + val environment = builder.environment + for( (key, value) <- env ) + environment.put(key, value) + Process(builder) ! log + } + } + final class ForkScala(mainClassName: String) extends NotNull + { + def apply(javaHome: Option[File], jvmOptions: Seq[String], scalaJars: Iterable[File], arguments: Seq[String], log: Logger): Int = + apply(javaHome, jvmOptions, scalaJars, arguments, None, log) + def apply(javaHome: Option[File], jvmOptions: Seq[String], scalaJars: Iterable[File], arguments: Seq[String], workingDirectory: File, log: Logger): Int = + apply(javaHome, jvmOptions, scalaJars, arguments, Some(workingDirectory), log) + def apply(javaHome: Option[File], jvmOptions: Seq[String], scalaJars: Iterable[File], arguments: Seq[String], workingDirectory: Option[File], log: Logger): Int = + { + val scalaClasspath = + if(scalaJars.isEmpty) + FileUtilities.scalaLibraryJar :: FileUtilities.scalaCompilerJar :: Nil + else + scalaJars + val scalaClasspathString = "-Xbootclasspath/a:" + scalaClasspath.map(_.getAbsolutePath).mkString(File.pathSeparator) + val mainClass = if(mainClassName.isEmpty) Nil else mainClassName :: Nil + val options = jvmOptions ++ (scalaClasspathString :: mainClass ::: arguments.toList) + Fork.java(javaHome, options, workingDirectory, log) + } + } +} \ No newline at end of file diff --git a/src/main/scala/sbt/Format.scala b/src/main/scala/sbt/Format.scala new file mode 100644 index 000000000..5e2f70425 --- /dev/null +++ b/src/main/scala/sbt/Format.scala @@ -0,0 +1,45 @@ +/* sbt -- Simple Build Tool + * Copyright 2008 Mark Harrah, David MacIver + */ +package sbt + +import java.io.File +import scala.collection.mutable.{HashSet, Set} + +trait Format[T] extends NotNull +{ + def toString(t: T): String + def fromString(s: String): T +} +abstract class SimpleFormat[T] extends Format[T] +{ + def toString(t: T) = t.toString +} +object Format +{ + def path(basePath: Path): Format[Path] = new Format[Path] + { + def toString(path: Path) = path.relativePath + def fromString(s: String) = Path.fromString(basePath, s) + } + implicit val file: Format[File] = new Format[File] + { + def toString(file: File) = file.getAbsolutePath + def fromString(s: String) = (new File(s)).getAbsoluteFile + } + implicit val hash: Format[Array[Byte]] = new Format[Array[Byte]] + { + def toString(hash: Array[Byte]) = Hash.toHex(hash) + def fromString(hash: String) = Hash.fromHex(hash) + } + def set[T](implicit format: Format[T]): Format[Set[T]] = new Format[Set[T]] + { + def toString(set: Set[T]) = set.toList.map(format.toString).mkString(File.pathSeparator) + def fromString(s: String) = (new HashSet[T]) ++ FileUtilities.pathSplit(s).map(_.trim).filter(!_.isEmpty).map(format.fromString) + } + implicit val string: Format[String] = new SimpleFormat[String] { def fromString(s: String) = s } + implicit val test: Format[TestDefinition] = new SimpleFormat[TestDefinition] + { + def fromString(s: String) = TestParser.parse(s).fold(error, x => x) + } +} \ No newline at end of file diff --git a/src/main/scala/sbt/Hash.scala b/src/main/scala/sbt/Hash.scala new file mode 100644 index 000000000..2a8434394 --- /dev/null +++ b/src/main/scala/sbt/Hash.scala @@ -0,0 +1,79 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt + +import java.io.{ByteArrayInputStream, File, InputStream} + +object Hash +{ + private val BufferSize = 8192 + def toHex(bytes: Array[Byte]): String = + { + val buffer = new StringBuilder(bytes.length * 2) + for(i <- 0 until bytes.length) + { + val b = bytes(i) + val bi: Int = if(b < 0) b + 256 else b + buffer append toHex((bi >>> 4).asInstanceOf[Byte]) + buffer append toHex((bi & 0x0F).asInstanceOf[Byte]) + } + buffer.toString + } + def fromHex(hex: String): Array[Byte] = + { + require((hex.length & 1) == 0, "Hex string must have length 2n.") + val array = new Array[Byte](hex.length >> 1) + for(i <- 0 until hex.length by 2) + { + val c1 = hex.charAt(i) + val c2 = hex.charAt(i+1) + array(i >> 1) = ((fromHex(c1) << 4) | fromHex(c2)).asInstanceOf[Byte] + } + array + } + /** Calculates the SHA-1 hash of the given String.*/ + def apply(s: String, log: Logger): Either[String, Array[Byte]] = apply(new ByteArrayInputStream(s.getBytes("UTF-8")), log) + /** Calculates the SHA-1 hash of the given file.*/ + def apply(path: Path, log: Logger): Either[String, Array[Byte]] = apply(path.asFile, log) + /** Calculates the SHA-1 hash of the given file.*/ + def apply(file: File, log: Logger): Either[String, Array[Byte]] = + FileUtilities.readStreamValue(file, log) { stream => apply(stream, log) } + /** Calculates the SHA-1 hash of the given stream, closing it when finished.*/ + def apply(stream: InputStream, log: Logger): Either[String, Array[Byte]] = + { + import java.security.{MessageDigest, DigestInputStream} + val digest = MessageDigest.getInstance("SHA") + Control.trapAndFinally("Error computing digest: ", log) + { + val dis = new DigestInputStream(stream, digest) + val buffer = new Array[Byte](BufferSize) + while(dis.read(buffer) >= 0) {} + dis.close() + Right(digest.digest) + } + { stream.close() } + } + + private def toHex(b: Byte): Char = + { + require(b >= 0 && b <= 15, "Byte " + b + " was not between 0 and 15") + if(b < 10) + ('0'.asInstanceOf[Int] + b).asInstanceOf[Char] + else + ('a'.asInstanceOf[Int] + (b-10)).asInstanceOf[Char] + } + private def fromHex(c: Char): Int = + { + val b = + if(c >= '0' && c <= '9') + (c - '0') + else if(c >= 'a' && c <= 'f') + (c - 'a') + 10 + else if(c >= 'A' && c <= 'F') + (c - 'A') + 10 + else + throw new RuntimeException("Invalid hex character: '" + c + "'.") + b + } +} \ No newline at end of file diff --git a/src/main/scala/sbt/IntegrationTesting.scala b/src/main/scala/sbt/IntegrationTesting.scala new file mode 100644 index 000000000..f37592748 --- /dev/null +++ b/src/main/scala/sbt/IntegrationTesting.scala @@ -0,0 +1,85 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Steven Blundy, Mark Harrah + */ +package sbt + +import ScalaProject.{optionsAsString, javaOptionsAsString} + +trait IntegrationTesting extends NotNull +{ + /** Override to provide pre-test setup. */ + protected def pretests: Option[String] = None + /** Override to provide post-test cleanup. */ + protected def posttests: Option[String] = None +} +trait ScalaIntegrationTesting extends IntegrationTesting +{ self: ScalaProject => + + protected def integrationTestTask(frameworks: Iterable[TestFramework], classpath: PathFinder, analysis: CompileAnalysis, options: => Seq[TestOption]) = + testTask(frameworks, classpath, analysis, options) +} + +trait BasicScalaIntegrationTesting extends BasicIntegrationTesting with MavenStyleIntegrationTestPaths +{ self: BasicScalaProject => } +/** A fully featured integration testing that may be mixed in with any subclass of BasicScalaProject. + * Pre-suite setup and post-suite cleanup are provide by overriding pretests and posttests respectively.*/ +trait BasicIntegrationTesting extends ScalaIntegrationTesting with IntegrationTestPaths with BasicDependencyProject +{ + self: BasicScalaProject => + + import BasicScalaIntegrationTesting._ + + lazy val integrationTestCompile = integrationTestCompileAction + lazy val integrationTest = integrationTestAction + + val integrationTestCompileConditional = new CompileConditional(integrationTestCompileConfiguration) + + protected def integrationTestAction = integrationTestTask(integrationTestFrameworks, integrationTestClasspath, integrationTestCompileConditional.analysis, integrationTestOptions) dependsOn integrationTestCompile describedAs IntegrationTestCompileDescription + protected def integrationTestCompileAction = integrationTestCompileTask() dependsOn compile describedAs IntegrationTestDescription + + protected def integrationTestCompileTask() = task{ integrationTestCompileConditional.run } + + def integrationTestOptions: Seq[TestOption] = + TestSetup(() => pretests) :: + TestCleanup(() => posttests) :: + testOptions.toList + def integrationTestCompileOptions = testCompileOptions + def javaIntegrationTestCompileOptions: Seq[JavaCompileOption] = testJavaCompileOptions + + def integrationTestConfiguration = if(useIntegrationTestConfiguration) Configurations.IntegrationTest else Configurations.Test + def integrationTestClasspath = fullClasspath(integrationTestConfiguration) +++ optionalClasspath + + def integrationTestLabel = "integration-test" + def integrationTestCompileConfiguration = new IntegrationTestCompileConfig + + protected def integrationTestDependencies = new LibraryDependencies(this, integrationTestCompileConditional) + + def integrationTestFrameworks = testFrameworks + override def useIntegrationTestConfiguration = false + abstract override def fullUnmanagedClasspath(config: Configuration) = + { + val superClasspath = super.fullUnmanagedClasspath(config) + if(config == integrationTestConfiguration) + integrationTestCompilePath +++ integrationTestResourcesPath +++ superClasspath + else + superClasspath + } + + class IntegrationTestCompileConfig extends BaseCompileConfig + { + def label = integrationTestLabel + def sources = integrationTestSources + def outputDirectory = integrationTestCompilePath + def classpath = integrationTestClasspath + def analysisPath = integrationTestAnalysisPath + def baseCompileOptions = integrationTestCompileOptions + def javaOptions = javaOptionsAsString(javaCompileOptions) + def testDefinitionClassNames = integrationTestFrameworks.map(_.testSuperClassName) + } +} + +object BasicScalaIntegrationTesting +{ + val IntegrationTestCompileDescription = "Compiles integration test sources." + val IntegrationTestDescription = "Runs all integration tests detected during compilation." +} diff --git a/src/main/scala/sbt/LineReader.scala b/src/main/scala/sbt/LineReader.scala new file mode 100644 index 000000000..1332f6aac --- /dev/null +++ b/src/main/scala/sbt/LineReader.scala @@ -0,0 +1,97 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah + */ +package sbt + +trait LineReader extends NotNull +{ + def readLine(prompt: String): Option[String] +} +class Completors(val projectAction: String, val projectNames: Iterable[String], + val generalCommands: Iterable[String], val propertyActions: Iterable[String], + val prefixes: Iterable[String]) extends NotNull +import jline.ConsoleReader +abstract class JLine extends LineReader +{ + protected[this] val reader: ConsoleReader + def readLine(prompt: String) = + reader.readLine(prompt) match + { + case null => None + case x => Some(x.trim) + } +} +object SimpleReader extends JLine +{ + protected[this] val reader = + { + val cr = new ConsoleReader + cr.setBellEnabled(false) + cr + } +} +class JLineReader(historyPath: Option[Path], completors: Completors, log: Logger) extends JLine +{ + import completors._ + import jline.{ArgumentCompletor, Completor, MultiCompletor, NullCompletor, SimpleCompletor} + + private val generalCompletor = simpleCompletor(generalCommands) + private val projectCompletor = simpleArgumentCompletor(projectAction :: Nil, projectNames) + + private val completor = new MultiCompletor() + + protected[this] val reader = + { + val cr = new ConsoleReader + cr.setBellEnabled(false) + for(historyLocation <- historyPath) + { + val historyFile = historyLocation.asFile + Control.trapAndLog(log) + { + historyFile.getParentFile.mkdirs() + cr.getHistory.setHistoryFile(historyFile) + } + } + cr.addCompletor(completor) + cr + } + + /** Used for a single argument so that the argument can have spaces in it.*/ + object SingleArgumentDelimiter extends ArgumentCompletor.AbstractArgumentDelimiter + { + def isDelimiterChar(buffer: String, pos: Int) = + (buffer.charAt(pos) == ' ') && buffer.substring(0, pos).trim.indexOf(' ') == -1 + } + + private def simpleCompletor(completions: Iterable[String]) = new SimpleCompletor(completions.toList.toArray) + private def simpleArgumentCompletor(first: Iterable[String], second: Iterable[String]) = + singleArgumentCompletor(simpleCompletor(first), simpleCompletor(second)) + private def singleArgumentCompletor(first: Completor, second: Completor) = + { + val completors = Array(first, second, new NullCompletor) + val c = new ArgumentCompletor(completors, SingleArgumentDelimiter) + c.setStrict(true) + c + } + private def repeatedArgumentCompletor(first: Completor, repeat: Completor) = + { + val c = new ArgumentCompletor(Array(first, repeat)) + c.setStrict(true) + c + } + + private def propertyCompletor(propertyNames: Iterable[String]) = + simpleArgumentCompletor(propertyActions, propertyNames) + private def prefixedCompletor(baseCompletor: Completor) = + singleArgumentCompletor(simpleCompletor(prefixes.toList.toArray), baseCompletor) + def setVariableCompletions(taskNames: Iterable[String], propertyNames: Iterable[String], extra: Iterable[(String, Iterable[String])] ) + { + import scala.collection.immutable.TreeSet + val taskCompletor = simpleCompletor(TreeSet(taskNames.toSeq : _*)) + val extraCompletors = for( (first, repeat) <- extra) yield repeatedArgumentCompletor(simpleCompletor(first :: Nil), simpleCompletor(repeat)) + val baseCompletors = generalCompletor :: taskCompletor :: projectCompletor :: propertyCompletor(propertyNames) :: extraCompletors.toList + val baseCompletor = new MultiCompletor(baseCompletors.toArray) + completor.setCompletors( Array(baseCompletor, prefixedCompletor(baseCompletor)) ) + } +} \ No newline at end of file diff --git a/src/main/scala/sbt/Logger.scala b/src/main/scala/sbt/Logger.scala new file mode 100644 index 000000000..b637a4499 --- /dev/null +++ b/src/main/scala/sbt/Logger.scala @@ -0,0 +1,337 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah + */ +package sbt + +import scala.collection.mutable.{Buffer, HashMap, ListBuffer} + +sealed trait LogEvent extends NotNull +final class Success(val msg: String) extends LogEvent +final class Log(val level: Level.Value, val msg: String) extends LogEvent +final class Trace(val exception: Throwable) extends LogEvent +final class SetLevel(val newLevel: Level.Value) extends LogEvent +final class SetTrace(val enabled: Boolean) extends LogEvent +final class ControlEvent(val event: ControlEvent.Value, val msg: String) extends LogEvent + +object ControlEvent extends Enumeration +{ + val Start, Header, Finish = Value +} + +abstract class Logger extends NotNull +{ + def getLevel: Level.Value + def setLevel(newLevel: Level.Value) + def enableTrace(flag: Boolean) + def traceEnabled: Boolean + + def atLevel(level: Level.Value) = level.id >= getLevel.id + def trace(t: => Throwable): Unit + final def debug(message: => String): Unit = log(Level.Debug, message) + final def info(message: => String): Unit = log(Level.Info, message) + final def warn(message: => String): Unit = log(Level.Warn, message) + final def error(message: => String): Unit = log(Level.Error, message) + def success(message: => String): Unit + def log(level: Level.Value, message: => String): Unit + def control(event: ControlEvent.Value, message: => String): Unit + + /** Use this method to ensure calls. */ + def logAll(events: Seq[LogEvent]): Unit + /** Defined in terms of other methods in Logger and should not be called from them. */ + final def log(event: LogEvent) + { + event match + { + case s: Success => success(s.msg) + case l: Log => log(l.level, l.msg) + case t: Trace => trace(t.exception) + case setL: SetLevel => setLevel(setL.newLevel) + case setT: SetTrace => enableTrace(setT.enabled) + case c: ControlEvent => control(c.event, c.msg) + } + } +} + +/** Implements the level-setting methods of Logger.*/ +abstract class BasicLogger extends Logger +{ + private var traceEnabledVar = true + private var level: Level.Value = Level.Info + def getLevel = level + def setLevel(newLevel: Level.Value) { level = newLevel } + def enableTrace(flag: Boolean) { traceEnabledVar = flag } + def traceEnabled = traceEnabledVar +} + +final class SynchronizedLogger(delegate: Logger) extends Logger +{ + def getLevel = { synchronized { delegate.getLevel } } + def setLevel(newLevel: Level.Value) { synchronized { delegate.setLevel(newLevel) } } + def enableTrace(enabled: Boolean) { synchronized { delegate.enableTrace(enabled) } } + def traceEnabled: Boolean = { synchronized { delegate.traceEnabled } } + + def trace(t: => Throwable) { synchronized { delegate.trace(t) } } + def log(level: Level.Value, message: => String) { synchronized { delegate.log(level, message) } } + def success(message: => String) { synchronized { delegate.success(message) } } + def control(event: ControlEvent.Value, message: => String) { synchronized { delegate.control(event, message) } } + def logAll(events: Seq[LogEvent]) { synchronized { delegate.logAll(events) } } +} + +final class MultiLogger(delegates: List[Logger]) extends BasicLogger +{ + override def setLevel(newLevel: Level.Value) + { + super.setLevel(newLevel) + dispatch(new SetLevel(newLevel)) + } + override def enableTrace(enabled: Boolean) + { + super.enableTrace(enabled) + dispatch(new SetTrace(enabled)) + } + def trace(t: => Throwable) { dispatch(new Trace(t)) } + def log(level: Level.Value, message: => String) { dispatch(new Log(level, message)) } + def success(message: => String) { dispatch(new Success(message)) } + def logAll(events: Seq[LogEvent]) { delegates.foreach(_.logAll(events)) } + def control(event: ControlEvent.Value, message: => String) { delegates.foreach(_.control(event, message)) } + private def dispatch(event: LogEvent) { delegates.foreach(_.log(event)) } +} + +/** A logger that can buffer the logging done on it by currently executing Thread and +* then can flush the buffer to the delegate logger provided in the constructor. Use +* 'startRecording' to start buffering and then 'play' from to flush the buffer for the +* current Thread to the backing logger. The logging level set at the +* time a message is originally logged is used, not the level at the time 'play' is +* called. +* +* This class assumes that it is the only client of the delegate logger. +* +* This logger is thread-safe. +* */ +final class BufferedLogger(delegate: Logger) extends Logger +{ + private[this] val buffers = wrap.Wrappers.weakMap[Thread, Buffer[LogEvent]] + /* The recording depth part is to enable a weak nesting of recording calls. When recording is + * nested (recordingDepth >= 2), calls to play/playAll add the buffers for worker Threads to the + * serial buffer (main Thread) and calls to clear/clearAll clear worker Thread buffers only. */ + private[this] def recording = recordingDepth > 0 + private[this] var recordingDepth = 0 + + private[this] val mainThread = Thread.currentThread + private[this] def getBuffer(key: Thread) = buffers.getOrElseUpdate(key, new ListBuffer[LogEvent]) + private[this] def buffer = getBuffer(key) + private[this] def key = Thread.currentThread + private[this] def serialBuffer = getBuffer(mainThread) + + private[this] def inWorker = Thread.currentThread ne mainThread + + /** Enables buffering. */ + def startRecording() { synchronized { recordingDepth += 1 } } + /** Flushes the buffer to the delegate logger for the current thread. This method calls logAll on the delegate + * so that the messages are written consecutively. The buffer is cleared in the process. */ + def play(): Unit = + synchronized + { + if(recordingDepth == 1) + delegate.logAll(wrap.Wrappers.readOnly(buffer)) + else if(recordingDepth > 1 && inWorker) + serialBuffer ++= buffer + } + def playAll(): Unit = + synchronized + { + if(recordingDepth == 1) + { + for(buffer <- buffers.values) + delegate.logAll(wrap.Wrappers.readOnly(buffer)) + } + else if(recordingDepth > 1) + { + for((key, buffer) <- buffers.toList if key ne mainThread) + serialBuffer ++= buffer + } + } + /** Clears buffered events for the current thread. It does not disable buffering. */ + def clear(): Unit = synchronized { if(recordingDepth == 1 || inWorker) buffers -= key } + /** Clears buffered events for all threads and disables buffering. */ + def stop(): Unit = + synchronized + { + clearAll() + if(recordingDepth > 0) + recordingDepth -= 1 + } + /** Clears buffered events for all threads. */ + def clearAll(): Unit = + synchronized + { + if(recordingDepth <= 1) + buffers.clear() + else + { + val serial = serialBuffer + buffers.clear() + buffers(mainThread) = serial + } + } + def runAndFlush[T](f: => T): T = + { + try { f } + finally { play(); clear() } + } + + def setLevel(newLevel: Level.Value): Unit = + synchronized { + if(recording) buffer += new SetLevel(newLevel) + delegate.setLevel(newLevel) + } + def getLevel = synchronized { delegate.getLevel } + def traceEnabled = synchronized { delegate.traceEnabled } + def enableTrace(flag: Boolean): Unit = + synchronized + { + if(recording) buffer += new SetTrace(flag) + delegate.enableTrace(flag) + } + + def trace(t: => Throwable): Unit = + synchronized + { + if(traceEnabled) + { + if(recording) buffer += new Trace(t) + else delegate.trace(t) + } + } + def success(message: => String): Unit = + synchronized + { + if(atLevel(Level.Info)) + { + if(recording) + buffer += new Success(message) + else + delegate.success(message) + } + } + def log(level: Level.Value, message: => String): Unit = + synchronized + { + if(atLevel(level)) + { + if(recording) + buffer += new Log(level, message) + else + delegate.log(level, message) + } + } + def logAll(events: Seq[LogEvent]): Unit = + synchronized + { + if(recording) + buffer ++= events + else + delegate.logAll(events) + } + def control(event: ControlEvent.Value, message: => String): Unit = + synchronized + { + if(atLevel(Level.Info)) + { + if(recording) + buffer += new ControlEvent(event, message) + else + delegate.control(event, message) + } + } +} + +object ConsoleLogger +{ + private def os = System.getProperty("os.name") + private def isWindows = os.toLowerCase.indexOf("windows") >= 0 + private def formatExplicitlyDisabled = java.lang.Boolean.getBoolean("sbt.log.noformat") + private val formatEnabled = !(formatExplicitlyDisabled || isWindows) +} + +/** A logger that logs to the console. On non-windows systems, the level labels are +* colored. +* +* This logger is not thread-safe.*/ +class ConsoleLogger extends BasicLogger +{ + import ConsoleLogger.formatEnabled + def messageColor(level: Level.Value) = Console.RESET + def labelColor(level: Level.Value) = + level match + { + case Level.Error => Console.RED + case Level.Warn => Console.YELLOW + case _ => Console.RESET + } + def successLabelColor = Console.GREEN + def successMessageColor = Console.RESET + override def success(message: => String) + { + if(atLevel(Level.Info)) + log(successLabelColor, Level.SuccessLabel, successMessageColor, message) + } + def trace(t: => Throwable): Unit = + System.out.synchronized + { + if(traceEnabled) + t.printStackTrace + } + def log(level: Level.Value, message: => String) + { + if(atLevel(level)) + log(labelColor(level), level.toString, messageColor(level), message) + } + private def setColor(color: String) + { + if(formatEnabled) + System.out.synchronized { System.out.print(color) } + } + private def log(labelColor: String, label: String, messageColor: String, message: String): Unit = + System.out.synchronized + { + for(line <- message.split("""\n""")) + { + setColor(Console.RESET) + System.out.print('[') + setColor(labelColor) + System.out.print(label) + setColor(Console.RESET) + System.out.print("] ") + setColor(messageColor) + System.out.print(line) + setColor(Console.RESET) + System.out.println() + } + } + + def logAll(events: Seq[LogEvent]) = System.out.synchronized { events.foreach(log) } + def control(event: ControlEvent.Value, message: => String) + { log(labelColor(Level.Info), Level.Info.toString, Console.BLUE, message) } +} + +/** An enumeration defining the levels available for logging. A level includes all of the levels +* with id larger than its own id. For example, Warn (id=3) includes Error (id=4).*/ +object Level extends Enumeration with NotNull +{ + val Debug = Value(1, "debug") + val Info = Value(2, "info") + val Warn = Value(3, "warn") + val Error = Value(4, "error") + /** Defines the label to use for success messages. A success message is logged at the info level but + * uses this label. Because the label for levels is defined in this module, the success + * label is also defined here. */ + val SuccessLabel = "success" + + // added because elements was renamed to iterator in 2.8.0 nightly + def levels = Debug :: Info :: Warn :: Error :: Nil + /** Returns the level with the given name wrapped in Some, or None if no level exists for that name. */ + def apply(s: String) = levels.find(s == _.toString) + /** Same as apply, defined for use in pattern matching. */ + private[sbt] def unapply(s: String) = apply(s) +} \ No newline at end of file diff --git a/src/main/scala/sbt/Main.scala b/src/main/scala/sbt/Main.scala new file mode 100644 index 000000000..e17c4b555 --- /dev/null +++ b/src/main/scala/sbt/Main.scala @@ -0,0 +1,673 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Steven Blundy, Mark Harrah, David MacIver, Mikko Peltonen + */ +package sbt + +import scala.collection.immutable.TreeSet + +private trait RunCompleteAction extends NotNull +private class Exit(val code: Int) extends RunCompleteAction +private object Reload extends RunCompleteAction + +/** This class is the entry point for sbt. If it is given any arguments, it interprets them +* as actions, executes the corresponding actions, and exits. If there were no arguments provided, +* sbt enters interactive mode.*/ +object Main +{ + /** The entry point for sbt. If arguments are specified, they are interpreted as actions, executed, + * and then the program terminates. If no arguments are specified, the program enters interactive + * mode. Call run if you need to run sbt in the same JVM.*/ + def main(args: Array[String]) + { + val exitCode = run(args) + if(exitCode == RebootExitCode) + { + println("Rebooting is not supported when the sbt loader is not used.") + println("Please manually restart sbt.") + } + System.exit(exitCode) + } + val RebootExitCode = -1 + val NormalExitCode = 0 + val SetupErrorExitCode = 1 + val SetupDeclinedExitCode = 2 + val LoadErrorExitCode = 3 + val UsageErrorExitCode = 4 + val BuildErrorExitCode = 5 + def run(args: Array[String]): Int = + { + val startTime = System.currentTimeMillis + Project.loadProject match + { + case err: LoadSetupError => + println("\n" + err.message) + ExitHooks.runExitHooks(Project.bootLogger) + SetupErrorExitCode + case LoadSetupDeclined => + ExitHooks.runExitHooks(Project.bootLogger) + SetupDeclinedExitCode + case err: LoadError => + { + val log = Project.bootLogger + println(err.message) + ExitHooks.runExitHooks(log) + // Because this is an error that can probably be corrected, prompt user to try again. + val line = + try { SimpleReader.readLine("\n Hit enter to retry or 'exit' to quit: ") } + catch + { + case e => + log.trace(e) + log.error(e.toString) + None + } + line match + { + case Some(l) => if(!isTerminateAction(l)) run(args) else NormalExitCode + case None => LoadErrorExitCode + } + } + case success: LoadSuccess => + { + import success.project + val doNext: RunCompleteAction = + // in interactive mode, fill all undefined properties + if(args.length > 0 || fillUndefinedProjectProperties(project.topologicalSort.toList.reverse)) + startProject(project, args, startTime) + else + new Exit(NormalExitCode) + ExitHooks.runExitHooks(project.log) + doNext match + { + case Reload => run(args) + case x: Exit => x.code + } + } + } + } + /** Returns true if the project should be reloaded, false if sbt should exit.*/ + private def startProject(project: Project, args: Array[String], startTime: Long): RunCompleteAction = + { + project.log.info("Building project " + project.name + " " + project.version.toString + " using " + project.getClass.getName) + val scalaVersionOpt = ScalaVersion.current orElse project.scalaVersion.get + for(sbtVersion <- project.sbtVersion.get; scalaVersion <- scalaVersionOpt if !sbtVersion.isEmpty && !scalaVersion.isEmpty) + project.log.info(" with sbt " + sbtVersion + " and Scala " + scalaVersion) + args match + { + case Array() => + CrossBuild.load() match + { + case None => + project.log.info("No actions specified, interactive session started. Execute 'help' for more information.") + val doNext = interactive(project) + printTime(project, startTime, "session") + doNext + case Some(cross) => + crossBuildNext(project, cross) + new Exit(RebootExitCode) + } + case CrossBuild(action) => + val exitCode = + CrossBuild.load() match + { + case None => if(startCrossBuild(project, action)) RebootExitCode else BuildErrorExitCode + case Some(cross) => if(crossBuildNext(project, cross)) RebootExitCode else NormalExitCode + } + new Exit(exitCode) + case _ => + val exitCode = + Control.lazyFold(args.toList)(handleBatchCommand(project)) match + { + case None => project.log.success("Build completed successfully."); NormalExitCode + case Some(errorMessage) => + project.log.error("Error during build" + (if(errorMessage.isEmpty) "." else ": " + errorMessage) ) + BuildErrorExitCode + } + printTime(project, startTime, "build") + new Exit(exitCode) + } + } + private def crossBuildNext(project: Project, cross: CrossBuild) = + { + val setScalaVersion = (newVersion: String) => { System.setProperty(ScalaVersion.LiveKey, newVersion); () } + val complete = + if(handleAction(project, cross.command)) + cross.versionComplete(setScalaVersion) + else + cross.error(setScalaVersion) + if(complete) + printTime(project, cross.startTime, "cross-build") + !complete + } + + /** The name of the command that loads a console with access to the current project through the variable 'project'.*/ + val ProjectConsoleAction = "console-project" + /** The name of the command that shows the current project and logging level of that project.*/ + val ShowCurrent = "current" + /** The name of the command that shows all available actions.*/ + val ShowActions = "actions" + /** The name of the command that sets the currently active project.*/ + val ProjectAction = "project" + /** The name of the command that shows all available projects.*/ + val ShowProjectsAction = "projects" + /** The list of lowercase command names that may be used to terminate the program.*/ + val TerminateActions: Iterable[String] = "exit" :: "quit" :: Nil + /** The name of the command that sets the value of the property given as its argument.*/ + val SetAction = "set" + /** The name of the command that gets the value of the property given as its argument.*/ + val GetAction = "get" + /** The name of the command that displays the help message. */ + val HelpAction = "help" + /** The command for rebooting sbt. Requires sbt to have been launched by the loader.*/ + val RebootCommand = "reboot" + /** The name of the command that reloads a project. This is useful for when the project definition has changed. */ + val ReloadAction = "reload" + /** The name of the command that toggles logging stacktraces. */ + val TraceCommand = "trace" + /** The name of the command that compiles all sources continuously when they are modified. */ + val ContinuousCompileCommand = "cc" + /** The prefix used to identify a request to execute the remaining input on source changes.*/ + val ContinuousExecutePrefix = "~" + /** The prefix used to identify a request to execute the remaining input across multiple Scala versions.*/ + val CrossBuildPrefix = "+" + /** Error message for when the user tries to prefix an action with CrossBuildPrefix but the loader is not used.*/ + val CrossBuildUnsupported = "Cross-building is not supported when the loader is not used." + + /** The number of seconds between polling by the continuous compile command.*/ + val ContinuousCompilePollDelaySeconds = 1 + + /** The list of all available commands at the interactive prompt in addition to the tasks defined + * by a project.*/ + protected def interactiveCommands: Iterable[String] = basicCommands.toList ++ logLevels.toList + /** The list of logging levels.*/ + private def logLevels: Iterable[String] = TreeSet.empty[String] ++ Level.levels.map(_.toString) + /** The list of all interactive commands other than logging level.*/ + private def basicCommands: Iterable[String] = TreeSet(ShowProjectsAction, ShowActions, ShowCurrent, HelpAction, + RebootCommand, ReloadAction, TraceCommand, ContinuousCompileCommand, ProjectConsoleAction) + + /** Enters interactive mode for the given root project. It uses JLine for tab completion and + * history. It returns normally when the user terminates or reloads the interactive session. That is, + * it does not call System.exit to quit. + **/ + private def interactive(baseProject: Project): RunCompleteAction = + { + val projectNames = baseProject.topologicalSort.map(_.name) + val prefixes = ContinuousExecutePrefix :: Nil + val completors = new Completors(ProjectAction, projectNames, interactiveCommands, List(GetAction, SetAction), prefixes) + val reader = new JLineReader(baseProject.historyPath, completors, baseProject.log) + def updateTaskCompletions(project: Project) + { + val methodCompletions = for( (name, method) <- project.methods) yield (name, method.completions) + reader.setVariableCompletions(project.taskNames, project.propertyNames, methodCompletions) + } + + /** Prompts the user for the next command using 'currentProject' as context. + * If the command indicates that the user wishes to terminate or reload the session, + * the function returns the appropriate value. + * Otherwise, the command is handled and this function is called again + * (tail recursively) to prompt for the next command. */ + def loop(currentProject: Project): RunCompleteAction = + { + updateTaskCompletions(baseProject) // this is done after every command because the completions could change due to the action invoked + reader.readLine("> ") match + { + case Some(line) => + { + val trimmed = line.trim + if(trimmed.isEmpty) + loop(currentProject) + else if(isTerminateAction(trimmed)) + new Exit(NormalExitCode) + else if(ReloadAction == trimmed) + Reload + else if(RebootCommand == trimmed) + new Exit(RebootExitCode) + else if(trimmed.startsWith(CrossBuildPrefix)) + { + if(startCrossBuild(currentProject, trimmed.substring(CrossBuildPrefix.length).trim)) + new Exit(RebootExitCode) + else + loop(currentProject) + } + else if(trimmed.startsWith(ProjectAction + " ")) + { + val projectName = trimmed.substring(ProjectAction.length + 1) + baseProject.topologicalSort.find(_.name == projectName) match + { + case Some(newProject) => + { + printProject("Set current project to ", newProject) + updateTaskCompletions(newProject) + loop(newProject) + } + case None => + { + currentProject.log.error("Invalid project name '" + projectName + "' (type 'projects' to list available projects).") + loop(currentProject) + } + } + } + else + { + if(trimmed == HelpAction) + displayInteractiveHelp() + else if(trimmed == ShowProjectsAction) + baseProject.topologicalSort.foreach(listProject) + else if(trimmed.startsWith(SetAction + " ")) + setProperty(currentProject, trimmed.substring(SetAction.length + 1)) + else if(trimmed.startsWith(GetAction + " ")) + getProperty(currentProject, trimmed.substring(GetAction.length + 1)) + else if(trimmed == ProjectConsoleAction) + showResult(Run.projectConsole(currentProject), currentProject.log) + else + handleInteractiveCommand(currentProject, trimmed) + loop(currentProject) + } + } + case None => new Exit(NormalExitCode) + } + } + + loop(baseProject) + } + private def printCmd(name:String, desc:String) = Console.println("\t" + name + ": " + desc) + private def displayBatchHelp() = { + Console.println("You may execute any project action or method or one of the commands described below.") + Console.println("Available Commands:") + printCommonCommands() + } + private def printCommonCommands() + { + printCmd("", "Executes the project specified action.") + printCmd(" *", "Executes the project specified method.") + printCmd(ContinuousExecutePrefix + " ", "Executes the project specified action or method whenever source files change.") + printCmd(CrossBuildPrefix + " ", "Executes the project specified action or method for all versions of Scala defined in crossScalaVersions.") + printCmd(ShowActions, "Shows all available actions.") + printCmd(RebootCommand, "Changes to scala.version or sbt.version are processed and the project definition is reloaded.") + printCmd(HelpAction, "Displays this help message.") + } + private def displayInteractiveHelp() = { + Console.println("You may execute any project action or one of the commands described below. Only one action " + + "may be executed at a time in interactive mode and is entered by name, as it would be at the command line." + + " Also, tab completion is available.") + Console.println("Available Commands:") + + printCommonCommands() + printCmd(ReloadAction, "Reloads sbt, recompiling modified project definitions if necessary.") + printCmd(ShowCurrent, "Shows the current project and logging level of that project.") + printCmd(Level.levels.mkString(", "), "Set logging for the current project to the specified level.") + printCmd(TraceCommand, "Toggles whether logging stack traces is enabled.") + printCmd(ProjectAction + " ", "Sets the currently active project.") + printCmd(ShowProjectsAction, "Shows all available projects.") + printCmd(TerminateActions.elements.mkString(", "), "Terminates the program.") + printCmd(SetAction + " ", "Sets the value of the property given as its argument.") + printCmd(GetAction + " ", "Gets the value of the property given as its argument.") + printCmd(ProjectConsoleAction, "Enters the Scala interpreter with the current project bound to the variable 'current' and all members imported.") + } + private def listProject(p: Project) = printProject("\t", p) + private def printProject(prefix: String, p: Project): Unit = + Console.println(prefix + p.name + " " + p.version) + + private def startCrossBuild(project: Project, action: String) = + { + checkBooted && checkAction(project, action) && + { + val againstScalaVersions = project.crossScalaVersions + val versionsDefined = !againstScalaVersions.isEmpty + if(versionsDefined) + CrossBuild(againstScalaVersions, action, System.currentTimeMillis) + else + Console.println("Project does not declare any Scala versions to cross-build against.") + versionsDefined + } + } + private def checkBooted = + Project.booted || + { + Console.println(CrossBuildUnsupported) + false + } + + /** Handles the given command string provided by batch mode execution..*/ + private def handleBatchCommand(project: Project)(command: String): Option[String] = + { + command.trim match + { + case HelpAction => displayBatchHelp(); None + case ShowActions => showActions(project); None + case CrossBuild(crossBuildAction) => Some(CrossBuildUnsupported) + case action => if(handleAction(project, action)) None else Some("") + } + } + + /** Handles the given command string provided at the command line.*/ + private def handleInteractiveCommand(project: Project, command: String) + { + command match + { + case GetAction => getArgumentError(project.log) + case SetAction => setArgumentError(project.log) + case ProjectAction => setProjectError(project.log) + case ShowCurrent => + printProject("Current project is ", project) + Console.println("Current log level is " + project.log.getLevel) + printTraceEnabled(project) + case ShowActions => showActions(project) + case TraceCommand => toggleTrace(project) + case Level(level) => setLevel(project, level) + case ContinuousCompileCommand => compileContinuously(project) + case action if action.startsWith(ContinuousExecutePrefix) => + executeContinuously(project, action.substring(ContinuousExecutePrefix.length).trim) + case action => handleAction(project, action) + } + } + private def showActions(project: Project): Unit = + Console.println(project.taskAndMethodList) + + // returns true if it succeeded (needed by noninteractive handleCommand) + private def handleAction(project: Project, action: String): Boolean = + { + def show(result: Option[String]): Boolean = showResult(result, project.log) + val startTime = System.currentTimeMillis + val result = withAction(project, action)( (name, params) => show(project.call(name, params)))( name => show(project.act(name))) + printTime(project, startTime, "") + result + } + private def showResult(result: Option[String], log: Logger): Boolean = + { + result match + { + case Some(errorMessage) => log.error(errorMessage); false + case None => log.success("Successful."); true + } + } + // true if the action exists + private def checkAction(project: Project, actionString: String): Boolean = + withAction(project, actionString)( (n,p) => true)( n => true) + private def withAction(project: Project, actionString: String)(ifMethod: (String, Array[String]) => Boolean)(ifAction: String => Boolean): Boolean = + { + def didNotExist(taskType: String, name: String) = + { + project.log.error("No " + taskType + " named '" + name + "' exists.") + project.log.info("Execute 'help' for a list of commands or 'actions' for a list of available project actions and methods.") + false + } + impl.CommandParser.parse(actionString) match + { + case Left(errMsg) => project.log.error(errMsg); false + case Right((name, parameters)) => + if(project.methods.contains(name)) + ifMethod(name, parameters.toArray) + else if(!parameters.isEmpty) + didNotExist("method", name) + else if(project.deepTasks.contains(name)) + ifAction(name) + else + didNotExist("action", name) + } + } + + /** Toggles whether stack traces are enabled.*/ + private def toggleTrace(project: Project) + { + val newValue = !project.log.traceEnabled + project.topologicalSort.foreach(_.log.enableTrace(newValue)) + printTraceEnabled(project) + } + private def printTraceEnabled(project: Project) + { + Console.println("Stack traces are " + (if(project.log.traceEnabled) "enabled" else "disabled")) + } + /** Sets the logging level on the given project.*/ + private def setLevel(project: Project, level: Level.Value) + { + project.topologicalSort.foreach(_.log.setLevel(level)) + Console.println("Set log level to " + project.log.getLevel) + } + /** Prints the elapsed time to the given project's log using the given + * initial time and the label 's'.*/ + private def printTime(project: Project, startTime: Long, s: String) + { + val endTime = System.currentTimeMillis() + project.log.info("") + val ss = if(s.isEmpty) "" else s + " " + project.log.info("Total " + ss + "time: " + (endTime - startTime + 500) / 1000 + " s") + } + /** Provides a partial message describing why the given property is undefined. */ + private def undefinedMessage(property: Project#UserProperty[_]): String = + { + property.resolve match + { + case vu: UndefinedValue => " is not defined." + case e: ResolutionException => " has invalid value: " + e.toString + case _ => "" + } + } + /** Prompts the user for the value of undefined properties. 'first' is true if this is the first time + * that the current property has been prompted.*/ + private def fillUndefinedProperties(project: Project, properties: List[(String, Project#Property[_])], first: Boolean): Boolean = + { + properties match + { + case (name, variable) :: tail => + { + val shouldAdvanceOrQuit = + variable match + { + case property: Project#UserProperty[_] => + if(first) + project.log.error(" Property '" + name + "' " + undefinedMessage(property)) + for(newValue <- SimpleReader.readLine(" Enter new value for " + name + " : ")) yield + { + try + { + property.setStringValue(newValue) + true + } + catch + { + case e => + project.log.error("Invalid value: " + e.getMessage) + false + } + } + case _ => Some(true) + } + shouldAdvanceOrQuit match + { + case Some(shouldAdvance) => fillUndefinedProperties(project, if(shouldAdvance) tail else properties, shouldAdvance) + case None => false + } + } + case Nil => true + } + } + /** Iterates over the undefined properties in the given projects, prompting the user for the value of each undefined + * property.*/ + private def fillUndefinedProjectProperties(projects: List[Project]): Boolean = + { + projects match + { + case project :: remaining => + { + val uninitialized = project.uninitializedProperties.toList + if(uninitialized.isEmpty) + fillUndefinedProjectProperties(remaining) + else + { + project.log.error("Project in " + project.info.projectDirectory.getAbsolutePath + " has undefined properties.") + val result = fillUndefinedProperties(project, uninitialized, true) && fillUndefinedProjectProperties(remaining) + project.saveEnvironment() + result + } + } + case Nil => true + } + } + /** Prints the value of the property with the given name in the given project. */ + private def getProperty(project: Project, propertyName: String) + { + if(propertyName.isEmpty) + project.log.error("No property name specified.") + else + { + project.getPropertyNamed(propertyName) match + { + case Some(property) => + { + property.resolve match + { + case u: UndefinedValue => project.log.error("Value of property '" + propertyName + "' is undefined.") + case ResolutionException(m, e) => project.log.error(m) + case DefinedValue(value, isInherited, isDefault) => Console.println(value.toString) + } + } + case None => + { + val value = System.getProperty(propertyName) + if(value == null) + project.log.error("No property named '" + propertyName + "' is defined.") + else + Console.println(value) + } + } + } + } + /** Separates the space separated property name/value pair and stores the value in the user-defined property + * with the given name in the given project. If no such property exists, the value is stored in a system + * property. */ + private def setProperty(project: Project, propertyNameAndValue: String) + { + val m = """(\S+)(\s+\S.*)?""".r.pattern.matcher(propertyNameAndValue) + if(m.matches()) + { + val name = m.group(1) + val newValue = + { + val v = m.group(2) + if(v == null) "" else v.trim + } + project.getPropertyNamed(name) match + { + case Some(property) => + { + val succeeded = + try + { + property.setStringValue(newValue) + val isScalaVersion = property == project.scalaVersion + val isSbtVersion = property == project.sbtVersion + if(isScalaVersion || isSbtVersion) + { + val changed = if(isSbtVersion) "sbt " else "Scala " + Console.println(" Build will use " + changed + newValue + " after running 'reboot' command or restarting sbt.") + } + else + Console.println(" Set property '" + name + "' = '" + newValue + "'") + } + catch { case e => project.log.error("Error setting property '" + name + "' in " + project.environmentLabel + ": " + e.toString) } + project.saveEnvironment() + } + case None => + { + System.setProperty(name, newValue) + project.log.info(" Set system property '" + name + "' = '" + newValue + "'") + } + } + } + else + setArgumentError(project.log) + } + + private def compileContinuously(project: Project) = executeContinuously(project, "test-compile") + private def executeContinuously(project: Project, action: String) + { + if(checkAction(project, action)) + { + SourceModificationWatch.watchUntil(project, ContinuousCompilePollDelaySeconds)(System.in.available() > 0) + { + handleAction(project, action) + Console.println("Waiting for source changes... (press any key to interrupt)") + } + while (System.in.available() > 0) System.in.read() + } + } + + private def isTerminateAction(s: String) = TerminateActions.elements.contains(s.toLowerCase) + private def setArgumentError(log: Logger) { log.error("Invalid arguments for 'set': expected property name and new value.") } + private def getArgumentError(log: Logger) { log.error("Invalid arguments for 'get': expected property name.") } + private def setProjectError(log: Logger) { log.error("Invalid arguments for 'project': expected project name.") } +} +private class CrossBuild(val remainingScalaVersions: Set[String], val command: String, val startTime: Long) +{ + def error(setScalaVersion: String => Unit) = clearScalaVersion(setScalaVersion) + private def clearScalaVersion(setScalaVersion: String => Unit) = + { + CrossBuild.clear() + setScalaVersion("") + true + } + def versionComplete(setScalaVersion: String => Unit) = + { + val remaining = remainingScalaVersions - ScalaVersion.currentString + if(remaining.isEmpty) + clearScalaVersion(setScalaVersion) + else + { + CrossBuild.setProperties(remaining, command, startTime.toString) + setScalaVersion(remaining.toSeq.first) + false + } + } +} +private object CrossBuild +{ + private val RemainingScalaVersionsKey = "sbt.remaining.scala.versions" + private val CrossCommandKey = "sbt.cross.build.command" + private val StartTimeKey = "sbt.cross.start.time" + private def setProperties(remainingScalaVersions: Set[String], command: String, startTime: String) + { + System.setProperty(RemainingScalaVersionsKey, remainingScalaVersions.mkString(" ")) + System.setProperty(CrossCommandKey, command) + System.setProperty(StartTimeKey, startTime) + } + private def getProperty(key: String) = + { + val value = System.getProperty(key) + if(value == null) + "" + else + value.trim + } + private def clear() { setProperties(Set.empty, "", "") } + def load() = + { + val command = getProperty(CrossCommandKey) + val remaining = getProperty(RemainingScalaVersionsKey) + val startTime = getProperty(StartTimeKey) + if(command.isEmpty || remaining.isEmpty || startTime.isEmpty) + None + else + Some(new CrossBuild(Set(remaining.split(" ") : _*), command, startTime.toLong)) + } + def apply(remainingScalaVersions: Set[String], command: String, startTime: Long) = + { + setProperties(remainingScalaVersions, command, startTime.toString) + new CrossBuild(remainingScalaVersions, command, startTime) + } + import Main.CrossBuildPrefix + def unapply(s: String): Option[String] = + { + val trimmed = s.trim + if(trimmed.startsWith(CrossBuildPrefix)) + Some(trimmed.substring(CrossBuildPrefix.length).trim) + else + None + } + def unapply(s: Array[String]): Option[String] = + s match + { + case Array(CrossBuild(crossBuildAction)) => Some(crossBuildAction) + case _ => None + } +} \ No newline at end of file diff --git a/src/main/scala/sbt/ManageDependencies.scala b/src/main/scala/sbt/ManageDependencies.scala new file mode 100644 index 000000000..ba761be89 --- /dev/null +++ b/src/main/scala/sbt/ManageDependencies.scala @@ -0,0 +1,743 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah + */ +package sbt + +import java.io.File +import java.net.URL +import java.util.Collections +import scala.collection.mutable.HashSet + +import Artifact.{defaultExtension, defaultType} + +import org.apache.ivy.{core, plugins, util, Ivy} +import core.LogOptions +import core.cache.DefaultRepositoryCacheManager +import core.deliver.DeliverOptions +import core.module.descriptor.{DefaultArtifact, DefaultDependencyArtifactDescriptor, MDArtifact} +import core.module.descriptor.{DefaultDependencyDescriptor, DefaultModuleDescriptor, DependencyDescriptor, ModuleDescriptor} +import core.module.descriptor.{DefaultExcludeRule, ExcludeRule} +import core.module.id.{ArtifactId,ModuleId, ModuleRevisionId} +import core.publish.PublishOptions +import core.resolve.ResolveOptions +import core.retrieve.RetrieveOptions +import core.settings.IvySettings +import plugins.matcher.{ExactPatternMatcher, PatternMatcher} +import plugins.parser.ModuleDescriptorParser +import plugins.parser.m2.{PomModuleDescriptorParser,PomModuleDescriptorWriter} +import plugins.parser.xml.XmlModuleDescriptorParser +import plugins.repository.{BasicResource, Resource} +import plugins.repository.url.URLResource +import plugins.resolver.{ChainResolver, DependencyResolver, IBiblioResolver} +import plugins.resolver.{AbstractPatternsBasedResolver, AbstractSshBasedResolver, FileSystemResolver, SFTPResolver, SshResolver, URLResolver} +import util.{Message, MessageLogger} + +final class IvyScala(val scalaVersion: String, val configurations: Iterable[Configuration], val checkExplicit: Boolean, val filterImplicit: Boolean) extends NotNull +final class IvyPaths(val projectDirectory: Path, val managedLibDirectory: Path, val cacheDirectory: Option[Path]) extends NotNull +final class IvyFlags(val validate: Boolean, val addScalaTools: Boolean, val errorIfNoConfiguration: Boolean) extends NotNull +final class IvyConfiguration(val paths: IvyPaths, val manager: Manager, val flags: IvyFlags, val ivyScala: Option[IvyScala], val log: Logger) extends NotNull +final class UpdateConfiguration(val outputPattern: String, val synchronize: Boolean, val quiet: Boolean) extends NotNull +object ManageDependencies +{ + val DefaultIvyConfigFilename = "ivysettings.xml" + val DefaultIvyFilename = "ivy.xml" + val DefaultMavenFilename = "pom.xml" + val ScalaOrganization = "org.scala-lang" + val ScalaLibraryID = "scala-library" + val ScalaCompilerID = "scala-compiler" + + private def defaultIvyFile(project: Path) = project / DefaultIvyFilename + private def defaultIvyConfiguration(project: Path) = project / DefaultIvyConfigFilename + private def defaultPOM(project: Path) = project / DefaultMavenFilename + + /** Configures Ivy using the provided configuration 'config' and calls 'doWithIvy'. This method takes care of setting up and cleaning up Ivy.*/ + private def withIvy(config: IvyConfiguration)(doWithIvy: (Ivy, ModuleDescriptor, String) => Option[String]) = + withIvyValue(config)( (ivy, module, default) => doWithIvy(ivy, module, default).toLeft(()) ).left.toOption + /** Configures Ivy using the provided configuration 'config' and calls 'doWithIvy'. This method takes care of setting up and cleaning up Ivy.*/ + private def withIvyValue[T](config: IvyConfiguration)(doWithIvy: (Ivy, ModuleDescriptor, String) => Either[String, T]) = + { + import config._ + val logger = new IvyLogger(log) + Message.setDefaultLogger(logger) + val ivy = Ivy.newInstance() + ivy.getLoggerEngine.pushLogger(logger) + + /** Parses the given Maven pom 'pomFile'.*/ + def readPom(pomFile: File) = + Control.trap("Could not read pom: ", log) + { Right((PomModuleDescriptorParser.getInstance.parseDescriptor(ivy.getSettings, toURL(pomFile), flags.validate)), "compile") } + /** Parses the given Ivy file 'ivyFile'.*/ + def readIvyFile(ivyFile: File) = + Control.trap("Could not read Ivy file: ", log) + { + val url = toURL(ivyFile) + val parser = new CustomXmlParser.CustomParser(ivy.getSettings) + parser.setValidate(flags.validate) + parser.setSource(url) + parser.parse() + Right((parser.getModuleDescriptor(), parser.getDefaultConf)) + } + /** Parses the given in-memory Ivy file 'xml', using the existing 'moduleID' and specifying the given 'defaultConfiguration'. */ + def parseXMLDependencies(xml: scala.xml.NodeSeq, moduleID: DefaultModuleDescriptor, defaultConfiguration: String) = + parseDependencies(xml.toString, moduleID, defaultConfiguration) + /** Parses the given in-memory Ivy file 'xml', using the existing 'moduleID' and specifying the given 'defaultConfiguration'. */ + def parseDependencies(xml: String, moduleID: DefaultModuleDescriptor, defaultConfiguration: String): Either[String, CustomXmlParser.CustomParser] = + Control.trap("Could not read dependencies: ", log) + { + val parser = new CustomXmlParser.CustomParser(ivy.getSettings) + parser.setMd(moduleID) + parser.setDefaultConf(defaultConfiguration) + parser.setValidate(flags.validate) + val resource = new ByteResource(xml.getBytes) + parser.setInput(resource.openStream) + parser.setResource(resource) + parser.parse() + Right(parser) + } + /** Configures Ivy using the specified Ivy configuration file. This method is used when the manager is explicitly requested to be MavenManager or + * IvyManager. If a file is not specified, Ivy is configured with defaults and scala-tools releases is added as a repository.*/ + def configure(configFile: Option[Path]) + { + configFile match + { + case Some(path) => ivy.configure(path.asFile) + case None => + configureDefaults() + scalaTools() + } + } + /** Adds the scala-tools.org releases maven repository to the list of resolvers if configured to do so in IvyFlags.*/ + def scalaTools() + { + if(flags.addScalaTools) + { + log.debug("Added Scala Tools Releases repository.") + addResolvers(ivy.getSettings, ScalaToolsReleases :: Nil, log) + } + } + /** Configures Ivy using defaults. This is done when no ivy-settings.xml exists. */ + def configureDefaults() + { + ivy.configureDefault + val settings = ivy.getSettings + for(dir <- paths.cacheDirectory) settings.setDefaultCache(dir.asFile) + settings.setBaseDir(paths.projectDirectory.asFile) + configureCache(settings) + } + /** Called to configure Ivy when the configured dependency manager is SbtManager and inline configuration is specified or if the manager + * is AutodetectManager. It will configure Ivy with an 'ivy-settings.xml' file if there is one, or configure the defaults and add scala-tools as + * a repository otherwise.*/ + def autodetectConfiguration() + { + log.debug("Autodetecting configuration.") + val defaultIvyConfigFile = defaultIvyConfiguration(paths.projectDirectory).asFile + if(defaultIvyConfigFile.canRead) + ivy.configure(defaultIvyConfigFile) + else + { + configureDefaults() + scalaTools() + } + } + /** Called to determine dependencies when the dependency manager is SbtManager and no inline dependencies (Scala or XML) are defined + * or if the manager is AutodetectManager. It will try to read from pom.xml first and then ivy.xml if pom.xml is not found. If neither is found, + * Ivy is configured with defaults unless IvyFlags.errorIfNoConfiguration is true, in which case an error is generated.*/ + def autodetectDependencies(module: ModuleRevisionId) = + { + log.debug("Autodetecting dependencies.") + val defaultPOMFile = defaultPOM(paths.projectDirectory).asFile + if(defaultPOMFile.canRead) + readPom(defaultPOMFile) + else + { + val defaultIvy = defaultIvyFile(paths.projectDirectory).asFile + if(defaultIvy.canRead) + readIvyFile(defaultIvy) + else if(flags.errorIfNoConfiguration) + Left("No readable dependency configuration found. Need " + DefaultIvyFilename + " or " + DefaultMavenFilename) + else + { + val defaultConf = ModuleDescriptor.DEFAULT_CONFIGURATION + log.warn("No readable dependency configuration found, using defaults.") + val moduleID = DefaultModuleDescriptor.newDefaultInstance(module) + addMainArtifact(moduleID) + addDefaultArtifact(defaultConf, moduleID) + Right((moduleID, defaultConf)) + } + } + } + /** Creates an Ivy module descriptor according the manager configured. The default configuration for dependencies + * is also returned.*/ + def moduleDescriptor: Either[String, (ModuleDescriptor, String)] = + config.manager match + { + case mm: MavenManager => + { + log.debug("Maven configuration explicitly requested.") + configure(mm.configuration) + readPom(mm.pom.asFile) + } + case im: IvyManager => + { + log.debug("Ivy configuration explicitly requested.") + configure(im.configuration) + readIvyFile(im.dependencies.asFile) + } + case adm: AutoDetectManager => + { + log.debug("No dependency manager explicitly specified.") + autodetectConfiguration() + autodetectDependencies(toID(adm.module)) + } + case sm: SbtManager => + { + import sm._ + if(resolvers.isEmpty && autodetectUnspecified) + autodetectConfiguration() + else + { + log.debug("Using inline repositories.") + configureDefaults() + val extra = if(flags.addScalaTools) ScalaToolsReleases :: resolvers.toList else resolvers + addResolvers(ivy.getSettings, extra, log) + } + if(autodetect) + autodetectDependencies(toID(module)) + else + { + val moduleID = + { + val mod = new DefaultModuleDescriptor(toID(module), "release", null, false) + mod.setLastModified(System.currentTimeMillis) + configurations.foreach(config => mod.addConfiguration(toIvyConfiguration(config))) + mod + } + val defaultConf = defaultConfiguration getOrElse Configurations.config(ModuleDescriptor.DEFAULT_CONFIGURATION) + log.debug("Using inline dependencies specified in Scala" + (if(dependenciesXML.isEmpty) "." else " and XML.")) + for(parser <- parseXMLDependencies(wrapped(module, dependenciesXML), moduleID, defaultConf.name).right) yield + { + addArtifacts(moduleID, artifacts) + addDependencies(moduleID, dependencies, parser) + addMainArtifact(moduleID) + (moduleID, parser.getDefaultConf) + } + } + } + } + /** Creates a full ivy file for 'module' using the 'dependencies' XML as the part after the <info>...</info> section. */ + def wrapped(module: ModuleID, dependencies: scala.xml.NodeSeq) = + { + import module._ + + + {dependencies} + + } + /** Performs checks/adds filters on Scala dependencies (if enabled in IvyScala). */ + def checkModule(moduleAndConf: (ModuleDescriptor, String)): Either[String, (ModuleDescriptor, String)] = + ivyScala match + { + case Some(check) => + val (module, conf) = moduleAndConf + val explicitCheck = + if(check.checkExplicit) + checkDependencies(module, check.scalaVersion, check.configurations) + else + None + explicitCheck match + { + case None => + if(check.filterImplicit) + { + val asDefault = toDefaultModuleDescriptor(module) + excludeScalaJars(asDefault, check.configurations) + Right( (asDefault, conf) ) + } + else + Right(moduleAndConf) + case Some(err) => Left(err) + } + case None => Right(moduleAndConf) + } + + this.synchronized // Ivy is not thread-safe. In particular, it uses a static DocumentBuilder, which is not thread-safe + { + ivy.pushContext() + try + { + moduleDescriptor.right.flatMap(checkModule).right.flatMap { mdAndConf => + doWithIvy(ivy, mdAndConf._1, mdAndConf._2) + } + } + finally { ivy.popContext() } + } + } + /** Checks the immediate dependencies of module for dependencies on scala jars and verifies that the version on the + * dependencies matches scalaVersion. */ + private def checkDependencies(module: ModuleDescriptor, scalaVersion: String, configurations: Iterable[Configuration]): Option[String] = + { + val configSet = configurationSet(configurations) + Control.lazyFold(module.getDependencies.toList) + { dep => + val id = dep.getDependencyRevisionId + if(id.getOrganisation == ScalaOrganization && id.getRevision != scalaVersion && dep.getModuleConfigurations.exists(configSet.contains)) + Some("Different Scala version specified in dependency ("+ id.getRevision + ") than in project (" + scalaVersion + ").") + else + None + } + } + private def configurationSet(configurations: Iterable[Configuration]) = + HashSet(configurations.map(_.toString).toSeq : _*) + /** Adds exclusions for the scala library and compiler jars so that they are not downloaded. This is + * done because normally these jars are already on the classpath and cannot/should not be overridden. The version + * of Scala to use is done by setting scala.version in the project definition. */ + private def excludeScalaJars(module: DefaultModuleDescriptor, configurations: Iterable[Configuration]) + { + val configurationNames = + { + val names = module.getConfigurationsNames + if(configurations.isEmpty) + names + else + { + import scala.collection.mutable.HashSet + val configSet = configurationSet(configurations) + configSet.intersect(HashSet(names : _*)) + configSet.toArray + } + } + def excludeScalaJar(name: String) + { module.addExcludeRule(excludeRule(ScalaOrganization, name, configurationNames)) } + excludeScalaJar(ScalaLibraryID) + excludeScalaJar(ScalaCompilerID) + } + private def configureCache(settings: IvySettings) + { + settings.getDefaultRepositoryCacheManager match + { + case manager: DefaultRepositoryCacheManager => + manager.setUseOrigin(true) + manager.setChangingMatcher(PatternMatcher.REGEXP); + manager.setChangingPattern(".*-SNAPSHOT"); + case _ => () + } + } + /** Creates an ExcludeRule that excludes artifacts with the given module organization and name for + * the given configurations. */ + private def excludeRule(organization: String, name: String, configurationNames: Iterable[String]): ExcludeRule = + { + val artifact = new ArtifactId(ModuleId.newInstance(organization, name), "*", "*", "*") + val rule = new DefaultExcludeRule(artifact, ExactPatternMatcher.INSTANCE, Collections.emptyMap[AnyRef,AnyRef]) + configurationNames.foreach(rule.addConfiguration) + rule + } + /** Clears the Ivy cache, as configured by 'config'. */ + def cleanCache(config: IvyConfiguration) = + { + def doClean(ivy: Ivy, module: ModuleDescriptor, default: String) = + Control.trapUnit("Could not clean cache: ", config.log) + { ivy.getSettings.getRepositoryCacheManagers.foreach(_.clean()); None } + + withIvy(config)(doClean) + } + /** Creates a Maven pom from the given Ivy configuration*/ + def makePom(config: IvyConfiguration, extraDependencies: Iterable[ModuleID], configurations: Option[Iterable[Configuration]], output: File) = + { + def doMakePom(ivy: Ivy, md: ModuleDescriptor, default: String) = + Control.trapUnit("Could not make pom: ", config.log) + { + val module = addLateDependencies(ivy, md, default, extraDependencies) + val pomModule = keepConfigurations(module, configurations) + PomModuleDescriptorWriter.write(pomModule, DefaultConfigurationMapping, output) + config.log.info("Wrote " + output.getAbsolutePath) + None + } + withIvy(config)(doMakePom) + } + private def addDefaultArtifact(defaultConf: String, moduleID: DefaultModuleDescriptor) = + moduleID.addArtifact(defaultConf, new MDArtifact(moduleID, moduleID.getModuleRevisionId.getName, defaultType, defaultExtension)) + // todo: correct default configuration for extra dependencies + private def addLateDependencies(ivy: Ivy, md: ModuleDescriptor, defaultConfiguration: String, extraDependencies: Iterable[ModuleID]) = + { + val module = toDefaultModuleDescriptor(md) + val parser = new CustomXmlParser.CustomParser(ivy.getSettings) + parser.setMd(module) + val defaultConf = if(defaultConfiguration.contains("->")) defaultConfiguration else (defaultConfiguration + "->default") + parser.setDefaultConf(defaultConf) + addDependencies(module, extraDependencies, parser) + module + } + private def getConfigurations(module: ModuleDescriptor, configurations: Option[Iterable[Configuration]]) = + configurations match + { + case Some(confs) => confs.map(_.name).toList.toArray + case None => module.getPublicConfigurationsNames + } + /** Retain dependencies only with the configurations given, or all public configurations of `module` if `configurations` is None. + * This is currently only preserves the information required by makePom*/ + private def keepConfigurations(module: ModuleDescriptor, configurations: Option[Iterable[Configuration]]): ModuleDescriptor = + { + val keepConfigurations = getConfigurations(module, configurations) + val keepSet = Set(keepConfigurations.toSeq : _*) + def translate(dependency: DependencyDescriptor) = + { + val keep = dependency.getModuleConfigurations.filter(keepSet.contains) + if(keep.isEmpty) + None + else // TODO: translate the dependency to contain only configurations to keep + Some(dependency) + } + val newModule = new DefaultModuleDescriptor(module.getModuleRevisionId, "", null) + newModule.setHomePage(module.getHomePage) + for(dependency <- module.getDependencies; translated <- translate(dependency)) + newModule.addDependency(translated) + newModule + } + private def addConfigurations(configurations: Iterable[String], to: { def setConfs(c: Array[String]): AnyRef }): Unit = + to.setConfs(configurations.toList.toArray) + + def deliver(ivyConfig: IvyConfiguration, updateConfig: UpdateConfiguration, status: String, deliverIvyPattern: String, extraDependencies: Iterable[ModuleID], configurations: Option[Iterable[Configuration]]) = + { + def doDeliver(ivy: Ivy, md: ModuleDescriptor, default: String) = + Control.trapUnit("Could not deliver: ", ivyConfig.log) + { + val module = addLateDependencies(ivy, md, default, extraDependencies) + resolve(ivy, updateConfig, module) orElse + { + val revID = module.getModuleRevisionId + val options = DeliverOptions.newInstance(ivy.getSettings).setStatus(status) + options.setConfs(getConfigurations(module, configurations)) + + ivy.deliver(revID, revID.getRevision, deliverIvyPattern, options) + None + } + } + withIvy(ivyConfig)(doDeliver) + } + // todo: map configurations, extra dependencies + def publish(ivyConfig: IvyConfiguration, resolverName: String, srcArtifactPatterns: Iterable[String], deliveredIvyPattern: Option[String], configurations: Option[Iterable[Configuration]]) = + { + def doPublish(ivy: Ivy, md: ModuleDescriptor, default: String) = + Control.trapUnit("Could not publish: ", ivyConfig.log) + { + val revID = md.getModuleRevisionId + val patterns = new java.util.ArrayList[String] + srcArtifactPatterns.foreach(pattern => patterns.add(pattern)) + val options = (new PublishOptions).setOverwrite(true) + deliveredIvyPattern.foreach(options.setSrcIvyPattern) + options.setConfs(getConfigurations(md, configurations)) + ivy.publish(revID, patterns, resolverName, options) + None + } + withIvy(ivyConfig)(doPublish) + } + /** Resolves and retrieves dependencies. 'ivyConfig' is used to produce an Ivy file and configuration. + * 'updateConfig' configures the actual resolution and retrieval process. */ + def update(ivyConfig: IvyConfiguration, updateConfig: UpdateConfiguration) = + { + def processModule(ivy: Ivy, module: ModuleDescriptor, default: String) = + { + import updateConfig._ + Control.trapUnit("Could not process dependencies: ", ivyConfig.log) + { + resolve(ivy, updateConfig, module) orElse + { + val retrieveOptions = new RetrieveOptions + retrieveOptions.setSync(synchronize) + val patternBase = ivyConfig.paths.managedLibDirectory.absolutePath + val pattern = + if(patternBase.endsWith(File.separator)) + patternBase + outputPattern + else + patternBase + File.separatorChar + outputPattern + ivy.retrieve(module.getModuleRevisionId, pattern, retrieveOptions) + None + } + } + } + + withIvy(ivyConfig)(processModule) + } + private def resolve(ivy: Ivy, updateConfig: UpdateConfiguration, module: ModuleDescriptor) = + { + import updateConfig._ + val resolveOptions = new ResolveOptions + if(quiet) + resolveOptions.setLog(LogOptions.LOG_DOWNLOAD_ONLY) + val resolveReport = ivy.resolve(module, resolveOptions) + if(resolveReport.hasError) + Some(Set(resolveReport.getAllProblemMessages.toArray: _*).mkString(System.getProperty("line.separator"))) + else + None + } + /** This method is used to add inline dependencies to the provided module. */ + private def addDependencies(moduleID: DefaultModuleDescriptor, dependencies: Iterable[ModuleID], parser: CustomXmlParser.CustomParser) + { + for(dependency <- dependencies) + { + val dependencyDescriptor = new DefaultDependencyDescriptor(moduleID, toID(dependency), false, dependency.isChanging, dependency.isTransitive) + dependency.configurations match + { + case None => // The configuration for this dependency was not explicitly specified, so use the default + parser.parseDepsConfs(parser.getDefaultConf, dependencyDescriptor) + case Some(confs) => // The configuration mapping (looks like: test->default) was specified for this dependency + parser.parseDepsConfs(confs, dependencyDescriptor) + } + for(artifact <- dependency.explicitArtifacts) + { + import artifact.{name, `type`, extension, url} + val ivyArtifact = new DefaultDependencyArtifactDescriptor(dependencyDescriptor, name, `type`, extension, url.getOrElse(null), null) + for(conf <- dependencyDescriptor.getModuleConfigurations) + dependencyDescriptor.addDependencyArtifact(conf, ivyArtifact) + } + moduleID.addDependency(dependencyDescriptor) + } + } + private def addArtifacts(moduleID: DefaultModuleDescriptor, artifacts: Iterable[Artifact]) + { + val allConfigurations = moduleID.getPublicConfigurationsNames + for(artifact <- artifacts) + { + val configurationStrings = + { + val artifactConfigurations = artifact.configurations + if(artifactConfigurations.isEmpty) + allConfigurations + else + artifactConfigurations.map(_.name) + } + val ivyArtifact = toIvyArtifact(moduleID, artifact, configurationStrings) + configurationStrings.foreach(configuration => moduleID.addArtifact(configuration, ivyArtifact)) + } + } + private def toURL(file: File) = file.toURI.toURL + /** Adds the ivy.xml main artifact. */ + private def addMainArtifact(moduleID: DefaultModuleDescriptor) + { + val artifact = DefaultArtifact.newIvyArtifact(moduleID.getResolvedModuleRevisionId, moduleID.getPublicationDate) + moduleID.setModuleArtifact(artifact) + moduleID.check() + } + /** Sets the resolvers for 'settings' to 'resolvers'. This is done by creating a new chain and making it the default. */ + private def addResolvers(settings: IvySettings, resolvers: Iterable[Resolver], log: Logger) + { + val newDefault = new ChainResolver + newDefault.setName("redefined-public") + resolvers.foreach(r => newDefault.add(ConvertResolver(r))) + newDefault.add(settings.getDefaultResolver) + settings.addResolver(newDefault) + settings.setDefaultResolver(newDefault.getName) + if(log.atLevel(Level.Debug)) + { + log.debug("Using extra repositories:") + resolvers.foreach(r => log.debug("\t" + r.toString)) + } + } + private def toIvyConfiguration(configuration: Configuration) = + { + import org.apache.ivy.core.module.descriptor.{Configuration => IvyConfig} + import IvyConfig.Visibility._ + import configuration._ + new IvyConfig(name, if(isPublic) PUBLIC else PRIVATE, description, extendsConfigs.map(_.name).toArray, transitive, null) + } + /** Converts the given sbt module id into an Ivy ModuleRevisionId.*/ + private def toID(m: ModuleID) = + { + import m._ + ModuleRevisionId.newInstance(organization, name, revision) + } + private def toIvyArtifact(moduleID: ModuleDescriptor, a: Artifact, configurations: Iterable[String]): MDArtifact = + { + val artifact = new MDArtifact(moduleID, a.name, a.`type`, a.extension) + configurations.foreach(artifact.addConfiguration) + artifact + } + /** An implementation of Ivy's Resource class that provides the Ivy file from a byte array. This is used to support + * inline Ivy file XML.*/ + private class ByteResource(bytes: Array[Byte]) extends + BasicResource("Inline XML dependencies", true, bytes.length, System.currentTimeMillis, true) + { + override def openStream = new java.io.ByteArrayInputStream(bytes) + } + /** Subclasses the default Ivy file parser in order to provide access to protected methods.*/ + private object CustomXmlParser extends XmlModuleDescriptorParser with NotNull + { + import XmlModuleDescriptorParser.Parser + class CustomParser(settings: IvySettings) extends Parser(CustomXmlParser, settings) with NotNull + { + def setSource(url: URL) = + { + super.setResource(new URLResource(url)) + super.setInput(url) + } + /** Overridden because the super implementation overwrites the module descriptor.*/ + override def setResource(res: Resource) {} + override def setMd(md: DefaultModuleDescriptor) = super.setMd(md) + override def parseDepsConfs(confs: String, dd: DefaultDependencyDescriptor) = super.parseDepsConfs(confs, dd) + override def getDefaultConf = super.getDefaultConf + override def setDefaultConf(conf: String) = super.setDefaultConf(conf) + } + } + /** This code converts the given ModuleDescriptor to a DefaultModuleDescriptor by casting or generating an error. + * Ivy always produces a DefaultModuleDescriptor, so this should be reasonable. */ + private def toDefaultModuleDescriptor(md: ModuleDescriptor) = + md match + { + case dmd: DefaultModuleDescriptor => dmd + case _ => error("Unknown ModuleDescriptor type.") + } +} + +private object ConvertResolver +{ + /** Converts the given sbt resolver into an Ivy resolver..*/ + def apply(r: Resolver) = + { + r match + { + case repo: MavenRepository => + { + val resolver = new IBiblioResolver + initializeMavenStyle(resolver, repo.name, repo.root) + resolver + } + case JavaNet1Repository => + { + // Thanks to Matthias Pfau for posting how to use the Maven 1 repository on java.net with Ivy: + // http://www.nabble.com/Using-gradle-Ivy-with-special-maven-repositories-td23775489.html + val resolver = new IBiblioResolver { override def convertM2IdForResourceSearch(mrid: ModuleRevisionId) = mrid } + initializeMavenStyle(resolver, JavaNet1Repository.name, "http://download.java.net/maven/1/") + resolver.setPattern("[organisation]/[ext]s/[module]-[revision](-[classifier]).[ext]") + resolver + } + case repo: SshRepository => + { + val resolver = new SshResolver + initializeSSHResolver(resolver, repo) + repo.publishPermissions.foreach(perm => resolver.setPublishPermissions(perm)) + resolver + } + case repo: SftpRepository => + { + val resolver = new SFTPResolver + initializeSSHResolver(resolver, repo) + resolver + } + case repo: FileRepository => + { + val resolver = new FileSystemResolver + resolver.setName(repo.name) + initializePatterns(resolver, repo.patterns) + import repo.configuration.{isLocal, isTransactional} + resolver.setLocal(isLocal) + isTransactional.foreach(value => resolver.setTransactional(value.toString)) + resolver + } + case repo: URLRepository => + { + val resolver = new URLResolver + resolver.setName(repo.name) + initializePatterns(resolver, repo.patterns) + resolver + } + } + } + private def initializeMavenStyle(resolver: IBiblioResolver, name: String, root: String) + { + resolver.setName(name) + resolver.setM2compatible(true) + resolver.setRoot(root) + } + private def initializeSSHResolver(resolver: AbstractSshBasedResolver, repo: SshBasedRepository) + { + resolver.setName(repo.name) + resolver.setPassfile(null) + initializePatterns(resolver, repo.patterns) + initializeConnection(resolver, repo.connection) + } + private def initializeConnection(resolver: AbstractSshBasedResolver, connection: RepositoryHelpers.SshConnection) + { + import resolver._ + import connection._ + hostname.foreach(setHost) + port.foreach(setPort) + authentication foreach + { + case RepositoryHelpers.PasswordAuthentication(user, password) => + setUser(user) + setUserPassword(password) + case RepositoryHelpers.KeyFileAuthentication(file, password) => + setKeyFile(file) + setKeyFilePassword(password) + } + } + private def initializePatterns(resolver: AbstractPatternsBasedResolver, patterns: RepositoryHelpers.Patterns) + { + resolver.setM2compatible(patterns.isMavenCompatible) + patterns.ivyPatterns.foreach(resolver.addIvyPattern) + patterns.artifactPatterns.foreach(resolver.addArtifactPattern) + } +} + +private object DefaultConfigurationMapping extends PomModuleDescriptorWriter.ConfigurationScopeMapping(new java.util.HashMap) +{ + override def getScope(confs: Array[String]) = + { + Configurations.defaultMavenConfigurations.find(conf => confs.contains(conf.name)) match + { + case Some(conf) => conf.name + case None => + if(confs.isEmpty || confs(0) == Configurations.Default.name) + null + else + confs(0) + } + } + override def isOptional(confs: Array[String]) = confs.isEmpty || (confs.length == 1 && confs(0) == Configurations.Optional.name) +} + +/** Interface between Ivy logging and sbt logging. */ +private final class IvyLogger(log: Logger) extends MessageLogger +{ + private var progressEnabled = false + + def log(msg: String, level: Int) + { + import Message.{MSG_DEBUG, MSG_VERBOSE, MSG_INFO, MSG_WARN, MSG_ERR} + level match + { + case MSG_DEBUG | MSG_VERBOSE => debug(msg) + case MSG_INFO => info(msg) + case MSG_WARN => warn(msg) + case MSG_ERR => error(msg) + } + } + def rawlog(msg: String, level: Int) + { + log(msg, level) + } + import Level.{Debug, Info, Warn, Error} + def debug(msg: String) = logImpl(msg, Debug) + def verbose(msg: String) = debug(msg) + def deprecated(msg: String) = warn(msg) + def info(msg: String) = logImpl(msg, Info) + def rawinfo(msg: String) = info(msg) + def warn(msg: String) = logImpl(msg, Warn) + def error(msg: String) = logImpl(msg, Error) + + private def logImpl(msg: String, level: Level.Value) = log.log(level, msg) + + private def emptyList = java.util.Collections.emptyList[T forSome { type T}] + def getProblems = emptyList + def getWarns = emptyList + def getErrors = emptyList + + def clearProblems = () + def sumupProblems = () + def progress = () + def endProgress = () + + def endProgress(msg: String) = info(msg) + def isShowProgress = false + def setShowProgress(progress: Boolean) {} +} diff --git a/src/main/scala/sbt/ManagedInterface.scala b/src/main/scala/sbt/ManagedInterface.scala new file mode 100644 index 000000000..9063e901b --- /dev/null +++ b/src/main/scala/sbt/ManagedInterface.scala @@ -0,0 +1,376 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah + */ +package sbt + +import java.io.File +import java.net.{URI, URL} +import scala.xml.NodeSeq +import org.apache.ivy.plugins.resolver.IBiblioResolver +import org.apache.ivy.util.url.CredentialsStore + +sealed abstract class Manager extends NotNull +/** This explicitly requests auto detection as a dependency manager. It will first check for a 'pom.xml' file and if that does not exist, an 'ivy.xml' file. +* Ivy is configured using the detected file or uses defaults.*/ +final class AutoDetectManager(val module: ModuleID) extends Manager +/** This explicitly requests that the Maven pom 'pom' be used to determine dependencies. An Ivy configuration file to use may be specified in +* 'configuration', since Ivy currently cannot extract Maven repositories from a pom file. Otherwise, defaults are used.*/ +final class MavenManager(val configuration: Option[Path], val pom: Path) extends Manager +/** This explicitly requests that the Ivy file 'dependencies' be used to determine dependencies. An Ivy configuration file to use may be specified in +* 'configuration'. Otherwise, defaults are used.*/ +final class IvyManager(val configuration: Option[Path], val dependencies: Path) extends Manager +/** This manager directly specifies the dependencies, resolvers, and configurations through sbt wrapper classes and through an in-memory +* Ivy XML file. */ +sealed trait SbtManager extends Manager +{ + def autodetect: Boolean + def module: ModuleID + def resolvers: Iterable[Resolver] + def dependencies: Iterable[ModuleID] + def autodetectUnspecified: Boolean + def dependenciesXML: NodeSeq + def configurations: Iterable[Configuration] + def defaultConfiguration: Option[Configuration] + def artifacts: Iterable[Artifact] +} +final class SimpleManager private[sbt] (val dependenciesXML: NodeSeq, val autodetectUnspecified: Boolean, + val module: ModuleID, val resolvers: Iterable[Resolver], explicitConfigurations: Iterable[Configuration], + val defaultConfiguration: Option[Configuration], val artifacts: Iterable[Artifact], val dependencies: ModuleID*) extends SbtManager +{ + def autodetect = dependencies.isEmpty && dependenciesXML.isEmpty && artifacts.isEmpty && explicitConfigurations.isEmpty && autodetectUnspecified + def configurations = + if(explicitConfigurations.isEmpty && !autodetect) + { + defaultConfiguration match + { + case Some(Configurations.DefaultIvyConfiguration) => Configurations.Default :: Nil + case Some(Configurations.DefaultMavenConfiguration) => Configurations.defaultMavenConfigurations + case _ => Nil + } + } + else + explicitConfigurations +} + +final case class ModuleID(organization: String, name: String, revision: String, configurations: Option[String], isChanging: Boolean, isTransitive: Boolean, explicitArtifacts: Seq[Artifact]) extends NotNull +{ + override def toString = organization + ":" + name + ":" + revision + // () required for chaining + def notTransitive() = intransitive() + def intransitive() = ModuleID(organization, name, revision, configurations, isChanging, false, explicitArtifacts) + def changing() = ModuleID(organization, name, revision, configurations, true, isTransitive, explicitArtifacts) + def from(url: String) = artifacts(Artifact(name, new URL(url))) + def artifacts(newArtifacts: Artifact*) = ModuleID(organization, name, revision, configurations, isChanging, isTransitive, newArtifacts ++ explicitArtifacts) +} +object ModuleID +{ + def apply(organization: String, name: String, revision: String): ModuleID = ModuleID(organization, name, revision, None) + def apply(organization: String, name: String, revision: String, configurations: Option[String]): ModuleID = + ModuleID(organization, name, revision, configurations, false, true) + def apply(organization: String, name: String, revision: String, configurations: Option[String], isChanging: Boolean, isTransitive: Boolean): ModuleID = + ModuleID(organization, name, revision, configurations, isChanging, isTransitive, Nil) +} +sealed trait Resolver extends NotNull +{ + def name: String +} +sealed case class MavenRepository(name: String, root: String) extends Resolver +{ + override def toString = name + ": " + root +} + +object RepositoryHelpers +{ + final case class Patterns(ivyPatterns: Seq[String], artifactPatterns: Seq[String], isMavenCompatible: Boolean) extends NotNull + { + private[sbt] def mavenStyle(): Patterns = Patterns(ivyPatterns, artifactPatterns, true) + private[sbt] def withIvys(patterns: Seq[String]): Patterns = Patterns(patterns ++ ivyPatterns, artifactPatterns, isMavenCompatible) + private[sbt] def withArtifacts(patterns: Seq[String]): Patterns = Patterns(ivyPatterns, patterns ++ artifactPatterns, isMavenCompatible) + } + final case class SshConnection(authentication: Option[SshAuthentication], hostname: Option[String], port: Option[Int]) extends NotNull + { + def copy(authentication: Option[SshAuthentication]) = SshConnection(authentication, hostname, port) + } + /** Configuration specific to an Ivy filesystem resolver. */ + final case class FileConfiguration(isLocal: Boolean, isTransactional: Option[Boolean]) extends NotNull + { + def transactional() = FileConfiguration(isLocal, Some(true)) + def nontransactional() = FileConfiguration(isLocal, Some(false)) + def nonlocal() = FileConfiguration(false, isTransactional) + } + sealed trait SshAuthentication extends NotNull + final case class PasswordAuthentication(user: String, password: String) extends SshAuthentication + final case class KeyFileAuthentication(keyfile: File, password: String) extends SshAuthentication +} +import RepositoryHelpers.{Patterns, SshConnection, FileConfiguration} +import RepositoryHelpers.{KeyFileAuthentication, PasswordAuthentication, SshAuthentication} + +/** sbt interface to an Ivy repository based on patterns, which is most Ivy repositories.*/ +sealed abstract class PatternsBasedRepository extends Resolver +{ + type RepositoryType <: PatternsBasedRepository + /** Should be implemented to create a new copy of this repository but with `patterns` as given.*/ + protected def copy(patterns: Patterns): RepositoryType + + /** The object representing the configured patterns for this repository. */ + def patterns: Patterns + + /** Enables maven 2 compatibility for this repository. */ + def mavenStyle() = copy(patterns.mavenStyle()) + /** Adds the given patterns for resolving/publishing Ivy files.*/ + def ivys(ivyPatterns: String*): RepositoryType = copy(patterns.withIvys(ivyPatterns)) + /** Adds the given patterns for resolving/publishing artifacts.*/ + def artifacts(artifactPatterns: String*): RepositoryType = copy(patterns.withArtifacts(artifactPatterns)) +} +/** sbt interface for an Ivy filesystem repository. More convenient construction is done using Resolver.file. */ +final case class FileRepository(name: String, configuration: FileConfiguration, patterns: Patterns) extends PatternsBasedRepository +{ + type RepositoryType = FileRepository + protected def copy(patterns: Patterns): FileRepository = FileRepository(name, configuration, patterns) + private def copy(configuration: FileConfiguration) = FileRepository(name, configuration, patterns) + def transactional() = copy(configuration.transactional()) + def nonlocal() = copy(configuration.nonlocal()) +} +final case class URLRepository(name: String, patterns: Patterns) extends PatternsBasedRepository +{ + type RepositoryType = URLRepository + protected def copy(patterns: Patterns): URLRepository = URLRepository(name, patterns) +} +/** sbt interface for an Ivy ssh-based repository (ssh and sftp). Requires the Jsch library.. */ +sealed abstract class SshBasedRepository extends PatternsBasedRepository +{ + type RepositoryType <: SshBasedRepository + protected def copy(connection: SshConnection): RepositoryType + private def copy(authentication: SshAuthentication): RepositoryType = copy(connection.copy(Some(authentication))) + + /** The object representing the configured ssh connection for this repository. */ + def connection: SshConnection + + /** Configures this to use the specified user name and password when connecting to the remote repository. */ + def as(user: String, password: String): RepositoryType = copy(new PasswordAuthentication(user, password)) + /** Configures this to use the specified keyfile and password for the keyfile when connecting to the remote repository. */ + def as(keyfile: File, password: String): RepositoryType = copy(new KeyFileAuthentication(keyfile, password)) +} +/** sbt interface for an Ivy repository over ssh. More convenient construction is done using Resolver.ssh. */ +final case class SshRepository(name: String, connection: SshConnection, patterns: Patterns, publishPermissions: Option[String]) extends SshBasedRepository +{ + type RepositoryType = SshRepository + protected def copy(patterns: Patterns): SshRepository = SshRepository(name, connection, patterns, publishPermissions) + protected def copy(connection: SshConnection): SshRepository = SshRepository(name, connection, patterns, publishPermissions) + /** Defines the permissions to set when publishing to this repository. */ + def withPermissions(publishPermissions: String): SshRepository = withPermissions(Some(publishPermissions)) + def withPermissions(publishPermissions: Option[String]): SshRepository = SshRepository(name, connection, patterns, publishPermissions) +} +/** sbt interface for an Ivy repository over sftp. More convenient construction is done using Resolver.sftp. */ +final case class SftpRepository(name: String, connection: SshConnection, patterns: Patterns) extends SshBasedRepository +{ + type RepositoryType = SftpRepository + protected def copy(patterns: Patterns): SftpRepository = SftpRepository(name, connection, patterns) + protected def copy(connection: SshConnection): SftpRepository = SftpRepository(name, connection, patterns) +} + +import Resolver._ +object ScalaToolsReleases extends MavenRepository(ScalaToolsReleasesName, ScalaToolsReleasesRoot) +object ScalaToolsSnapshots extends MavenRepository(ScalaToolsSnapshotsName, ScalaToolsSnapshotsRoot) +object DefaultMavenRepository extends MavenRepository("Maven2 Repository", IBiblioResolver.DEFAULT_M2_ROOT) +object JavaNet1Repository extends Resolver +{ + def name = "java.net Maven1 Repository" +} + +object Resolver +{ + val ScalaToolsReleasesName = "Scala-Tools Maven2 Repository" + val ScalaToolsSnapshotsName = "Scala-Tools Maven2 Snapshots Repository" + val ScalaToolsReleasesRoot = "http://scala-tools.org/repo-releases" + val ScalaToolsSnapshotsRoot = "http://scala-tools.org/repo-snapshots" + + /** A base class for defining factories for interfaces to Ivy repositories that require a hostname , port, and patterns. */ + sealed abstract class Define[RepositoryType <: SshBasedRepository] extends NotNull + { + /** Subclasses should implement this method to */ + protected def construct(name: String, connection: SshConnection, patterns: Patterns): RepositoryType + /** Constructs this repository type with the given `name`. `basePatterns` are the initial patterns to use. A ManagedProject + * has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/ + def apply(name: String)(implicit basePatterns: Patterns): RepositoryType = + apply(name, None, None, None) + /** Constructs this repository type with the given `name` and `hostname`. `basePatterns` are the initial patterns to use. + * A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/ + def apply(name: String, hostname: String)(implicit basePatterns: Patterns): RepositoryType = + apply(name, Some(hostname), None, None) + /** Constructs this repository type with the given `name`, `hostname`, and the `basePath` against which the initial + * patterns will be resolved. `basePatterns` are the initial patterns to use. + * A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/ + def apply(name: String, hostname: String, basePath: String)(implicit basePatterns: Patterns): RepositoryType = + apply(name, Some(hostname), None, Some(basePath)) + /** Constructs this repository type with the given `name`, `hostname`, and `port`. `basePatterns` are the initial patterns to use. + * A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/ + def apply(name: String, hostname: String, port: Int)(implicit basePatterns: Patterns): RepositoryType = + apply(name, Some(hostname), Some(port), None) + /** Constructs this repository type with the given `name`, `hostname`, `port`, and the `basePath` against which the initial + * patterns will be resolved. `basePatterns` are the initial patterns to use. + * A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/ + def apply(name: String, hostname: String, port: Int, basePath: String)(implicit basePatterns: Patterns): RepositoryType = + apply(name, Some(hostname), Some(port), Some(basePath)) + /** Constructs this repository type with the given `name`, `hostname`, `port`, and the `basePath` against which the initial + * patterns will be resolved. `basePatterns` are the initial patterns to use. All but the `name` are optional (use None). + * A ManagedProject has an implicit defining these initial patterns based on a setting for either Maven or Ivy style patterns.*/ + def apply(name: String, hostname: Option[String], port: Option[Int], basePath: Option[String])(implicit basePatterns: Patterns): RepositoryType = + construct(name, SshConnection(None, hostname, port), resolvePatterns(basePath, basePatterns)) + } + /** A factory to construct an interface to an Ivy SSH resolver.*/ + object ssh extends Define[SshRepository] + { + protected def construct(name: String, connection: SshConnection, patterns: Patterns) = SshRepository(name, connection, patterns, None) + } + /** A factory to construct an interface to an Ivy SFTP resolver.*/ + object sftp extends Define[SftpRepository] + { + protected def construct(name: String, connection: SshConnection, patterns: Patterns) = SftpRepository(name, connection, patterns) + } + /** A factory to construct an interface to an Ivy filesytem resolver. */ + object file + { + /** Constructs a file resolver with the given name. The patterns to use must be explicitly specified + * using the `ivys` or `artifacts` methods on the constructed resolver object.*/ + def apply(name: String): FileRepository = FileRepository(name, defaultFileConfiguration, ivyStylePatterns) + /** Constructs a file resolver with the given name and base directory. */ + def apply(name: String, baseDirectory: File)(implicit basePatterns: Patterns): FileRepository = + baseRepository(baseDirectory.toURI)(FileRepository(name, defaultFileConfiguration, _)) + } + object url + { + /** Constructs a URL resolver with the given name. The patterns to use must be explicitly specified + * using the `ivys` or `artifacts` methods on the constructed resolver object.*/ + def apply(name: String): URLRepository = URLRepository(name, ivyStylePatterns) + /** Constructs a file resolver with the given name and base directory. */ + def apply(name: String, baseURL: URL)(implicit basePatterns: Patterns): URLRepository = + baseRepository(baseURL.toURI)(URLRepository(name, _)) + } + private def baseRepository[T](baseURI: java.net.URI)(construct: Patterns => T)(implicit basePatterns: Patterns): T = + construct(resolvePatterns(baseURI.normalize, basePatterns)) + + /** If `base` is None, `patterns` is returned unchanged. + * Otherwise, the ivy file and artifact patterns in `patterns` are resolved against the given base. */ + private def resolvePatterns(base: Option[String], patterns: Patterns): Patterns = + base match + { + case Some(path) => resolvePatterns(pathURI(path), patterns) + case None => patterns + } + /** Resolves the ivy file and artifact patterns in `patterns` against the given base. */ + private def resolvePatterns(base: URI, basePatterns: Patterns): Patterns = + { + def resolve(pattern: String) = base.resolve(pathURI(pattern)).getPath + def resolveAll(patterns: Seq[String]) = patterns.map(resolve) + Patterns(resolveAll(basePatterns.ivyPatterns), resolveAll(basePatterns.artifactPatterns), basePatterns.isMavenCompatible) + } + /** Constructs a `URI` with the path component set to `path` and the other components set to null.*/ + private def pathURI(path: String) = new URI(null, null, path, null) + + def defaultFileConfiguration = FileConfiguration(true, None) + def mavenStylePatterns = Patterns(Nil, mavenStyleBasePattern :: Nil, true) + def ivyStylePatterns = Patterns(Nil, Nil, false) + + def defaultPatterns = mavenStylePatterns + def mavenStyleBasePattern = "[organisation]/[module]/[revision]/[artifact]-[revision](-[classifier]).[ext]" +} + +object Configurations +{ + def config(name: String) = new Configuration(name) + def defaultMavenConfigurations = Compile :: Runtime :: Test :: Provided :: System :: Optional :: Sources :: Javadoc :: Nil + + lazy val Default = config("default") + lazy val Compile = config("compile") + lazy val IntegrationTest = config("it") hide + lazy val Provided = config("provided") + lazy val Javadoc = config("javadoc") + lazy val Runtime = config("runtime") + lazy val Test = config("test") hide + lazy val Sources = config("sources") + lazy val System = config("system") + lazy val Optional = config("optional") + + lazy val CompilerPlugin = config("plugin") hide + + private[sbt] val DefaultMavenConfiguration = defaultConfiguration(true) + private[sbt] val DefaultIvyConfiguration = defaultConfiguration(false) + private[sbt] def DefaultConfiguration(mavenStyle: Boolean) = if(mavenStyle) DefaultMavenConfiguration else DefaultIvyConfiguration + private[sbt] def defaultConfiguration(mavenStyle: Boolean) = + { + val base = if(mavenStyle) Configurations.Compile else Configurations.Default + config(base.name + "->default(compile)") + } + + private[sbt] def removeDuplicates(configs: Iterable[Configuration]) = Set(scala.collection.mutable.Map(configs.map(config => (config.name, config)).toSeq: _*).values.toList: _*) +} +/** Represents an Ivy configuration. */ +final case class Configuration(name: String, description: String, isPublic: Boolean, extendsConfigs: List[Configuration], transitive: Boolean) extends NotNull +{ + require(name != null && !name.isEmpty) + require(description != null) + def this(name: String) = this(name, "", true, Nil, true) + def describedAs(newDescription: String) = Configuration(name, newDescription, isPublic, extendsConfigs, transitive) + def extend(configs: Configuration*) = Configuration(name, description, isPublic, configs.toList ::: extendsConfigs, transitive) + def notTransitive = intransitive + def intransitive = Configuration(name, description, isPublic, extendsConfigs, false) + def hide = Configuration(name, description, false, extendsConfigs, transitive) + override def toString = name +} + +final case class Artifact(name: String, `type`: String, extension: String, configurations: Iterable[Configuration], url: Option[URL]) extends NotNull +object Artifact +{ + def apply(name: String): Artifact = Artifact(name, defaultType, defaultExtension, Nil, None) + def apply(name: String, `type`: String, extension: String): Artifact = Artifact(name, `type`, extension, Nil, None) + def apply(name: String, url: URL): Artifact =Artifact(name, extract(url, defaultType), extract(url, defaultExtension), Nil, Some(url)) + val defaultExtension = "jar" + val defaultType = "jar" + private[this] def extract(url: URL, default: String) = + { + val s = url.toString + val i = s.lastIndexOf('.') + if(i >= 0) + s.substring(i+1) + else + default + } +} + +object Credentials +{ + /** Add the provided credentials to Ivy's credentials cache.*/ + def add(realm: String, host: String, userName: String, passwd: String): Unit = + CredentialsStore.INSTANCE.addCredentials(realm, host, userName, passwd) + /** Load credentials from the given file into Ivy's credentials cache.*/ + def apply(file: String, log: Logger): Unit = apply(Path.fromFile(file), log) + /** Load credentials from the given file into Ivy's credentials cache.*/ + def apply(file: File, log: Logger): Unit = apply(Path.fromFile(file), log) + /** Load credentials from the given file into Ivy's credentials cache.*/ + def apply(path: Path, log: Logger) + { + val msg = + if(path.exists) + { + val properties = new scala.collection.mutable.HashMap[String, String] + def get(keys: List[String]) = keys.flatMap(properties.get).firstOption.toRight(keys.head + " not specified in credentials file: " + path) + + impl.MapUtilities.read(properties, path, log) orElse + { + List.separate( List(RealmKeys, HostKeys, UserKeys, PasswordKeys).map(get) ) match + { + case (Nil, List(realm, host, user, pass)) => add(realm, host, user, pass); None + case (errors, _) => Some(errors.mkString("\n")) + } + } + } + else + Some("Credentials file " + path + " does not exist") + msg.foreach(x => log.warn(x)) + } + private[this] val RealmKeys = List("realm") + private[this] val HostKeys = List("host", "hostname") + private[this] val UserKeys = List("user", "user.name", "username") + private[this] val PasswordKeys = List("password", "pwd", "pass", "passwd") +} \ No newline at end of file diff --git a/src/main/scala/sbt/ModuleUtilities.scala b/src/main/scala/sbt/ModuleUtilities.scala new file mode 100644 index 000000000..ab017c1b6 --- /dev/null +++ b/src/main/scala/sbt/ModuleUtilities.scala @@ -0,0 +1,14 @@ +/* sbt -- Simple Build Tool + * Copyright 2008 Mark Harrah + */ +package sbt + +object ModuleUtilities +{ + def getObject(className: String, loader: ClassLoader) = + { + val obj = Class.forName(className + "$", true, loader) + val singletonField = obj.getField("MODULE$") + singletonField.get(null) + } +} \ No newline at end of file diff --git a/src/main/scala/sbt/NameFilter.scala b/src/main/scala/sbt/NameFilter.scala new file mode 100644 index 000000000..3387806b6 --- /dev/null +++ b/src/main/scala/sbt/NameFilter.scala @@ -0,0 +1,72 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah + */ +package sbt + +import java.io.File +import java.util.regex.Pattern + +trait FileFilter extends java.io.FileFilter with NotNull +{ + def || (filter: FileFilter): FileFilter = new SimpleFileFilter( file => accept(file) || filter.accept(file) ) + def && (filter: FileFilter): FileFilter = new SimpleFileFilter( file => accept(file) && filter.accept(file) ) + def -- (filter: FileFilter): FileFilter = new SimpleFileFilter( file => accept(file) && !filter.accept(file) ) + def unary_- : FileFilter = new SimpleFileFilter( file => !accept(file) ) +} +trait NameFilter extends FileFilter with NotNull +{ + def accept(name: String): Boolean + final def accept(file: File): Boolean = accept(file.getName) + def | (filter: NameFilter): NameFilter = new SimpleFilter( name => accept(name) || filter.accept(name) ) + def & (filter: NameFilter): NameFilter = new SimpleFilter( name => accept(name) && filter.accept(name) ) + def - (filter: NameFilter): NameFilter = new SimpleFilter( name => accept(name) && !filter.accept(name) ) + override def unary_- : NameFilter = new SimpleFilter( name => !accept(name) ) +} +object HiddenFileFilter extends FileFilter { + def accept(file: File) = file.isHidden && file.getName != "." +} +object ExistsFileFilter extends FileFilter { + def accept(file: File) = file.exists +} +object DirectoryFilter extends FileFilter { + def accept(file: File) = file.isDirectory +} +class SimpleFileFilter(val acceptFunction: File => Boolean) extends FileFilter +{ + def accept(file: File) = acceptFunction(file) +} +class ExactFilter(val matchName: String) extends NameFilter +{ + def accept(name: String) = matchName == name +} +class SimpleFilter(val acceptFunction: String => Boolean) extends NameFilter +{ + def accept(name: String) = acceptFunction(name) +} +class PatternFilter(val pattern: Pattern) extends NameFilter +{ + def accept(name: String) = pattern.matcher(name).matches +} +object AllPassFilter extends NameFilter +{ + def accept(name: String) = true +} +object NothingFilter extends NameFilter +{ + def accept(name: String) = false +} + +object GlobFilter +{ + def apply(expression: String): NameFilter = + { + require(!expression.exists(java.lang.Character.isISOControl), "Control characters not allowed in filter expression.") + if(expression == "*") + AllPassFilter + else if(expression.indexOf('*') < 0) // includes case where expression is empty + new ExactFilter(expression) + else + new PatternFilter(Pattern.compile(expression.split("\\*", -1).map(quote).mkString(".*"))) + } + private def quote(s: String) = if(s.isEmpty) "" else Pattern.quote(s.replaceAll("\n", """\n""")) +} \ No newline at end of file diff --git a/src/main/scala/sbt/Pack.scala b/src/main/scala/sbt/Pack.scala new file mode 100644 index 000000000..ecfed74a2 --- /dev/null +++ b/src/main/scala/sbt/Pack.scala @@ -0,0 +1,87 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt + +import java.io.{File, FileOutputStream} +import java.util.jar.{JarEntry, JarFile, JarOutputStream, Pack200} +import scala.collection.Map +import FileUtilities._ + +object Pack +{ + def pack(jarPath: Path, out: Path, log: Logger): Option[String] = pack(jarPath, out, defaultPackerOptions, log) + def pack(jarPath: Path, out: Path, options: Map[String, String], log: Logger): Option[String] = + { + val packer = Pack200.newPacker + val properties = new wrap.MutableMapWrapper(packer.properties) + properties ++= options + + OpenResource.jarFile(false).ioOption(jarPath.asFile, "applying pack200 compression to jar", log) { f => + writeStream(out.asFile, log) { stream => + packer.pack(f, stream) + None + } + } + } + def unpack(packedPath: Path, toJarPath: Path, log: Logger): Option[String] = + { + val unpacker = Pack200.newUnpacker + writeStream(toJarPath.asFile, log) { fileStream => + val jarOut = new JarOutputStream(fileStream) + Control.trapUnitAndFinally("Error unpacking '" + packedPath + "': ", log) + { unpacker.unpack(packedPath.asFile, jarOut); None } + { jarOut.close() } + } + } + def defaultPackerOptions: Map[String, String] = scala.collection.immutable.Map() +} + +import java.net.URL +/** This is somewhat of a mess and is not entirely correct. jarsigner doesn't work properly +* on scalaz and it is difficult to determine whether a jar is both signed and valid. */ +object SignJar +{ + final class SignOption private[SignJar](val toList: List[String], val signOnly: Boolean) extends NotNull + { + override def toString = toList.mkString(" ") + } + def keyStore(url: URL) = new SignOption("-keystore" :: url.toExternalForm :: Nil, true) + def signedJar(p: Path) = new SignOption("-signedjar" :: p.asFile.getAbsolutePath :: Nil, true) + def verbose = new SignOption("-verbose" :: Nil, false) + def sigFile(name: String) = new SignOption("-sigfile" :: name :: Nil, true) + def storeType(t: String) = new SignOption("-storetype" :: t :: Nil, false) + def provider(p: String) = new SignOption("-provider" :: p :: Nil, false) + def providerName(p: String) = new SignOption("-providerName" :: p :: Nil, false) + def storePassword(p: String) = new SignOption("-storepass" :: p :: Nil, true) + def keyPassword(p: String) = new SignOption("-keypass" :: p :: Nil, true) + + private def VerifyOption = "-verify" + + /** Uses jarsigner to sign the given jar. */ + def sign(jarPath: Path, alias: String, options: Seq[SignOption], log: Logger): Option[String] = + { + require(!alias.trim.isEmpty, "Alias cannot be empty") + val arguments = options.toList.flatMap(_.toList) ::: jarPath.asFile.getAbsolutePath :: alias :: Nil + execute("Signed " + jarPath, "signing", arguments, log) + } + /** Uses jarsigner to verify the given jar.*/ + def verify(jarPath: Path, options: Seq[SignOption], log: Logger): Option[String] = + { + val arguments = options.filter(!_.signOnly).toList.flatMap(_.toList) ::: VerifyOption :: jarPath.asFile.getAbsolutePath :: Nil + execute("Verified " + jarPath, "verifying", arguments, log) + } + private def execute(successMessage: String, action: String, arguments: List[String], log: Logger): Option[String] = + { + val exitCode = Process(CommandName, arguments) ! log + if(exitCode == 0) + { + log.debug(successMessage) + None + } + else + Some("Error " + action + " jar (exit code was " + exitCode + ".)") + } + + private val CommandName = "jarsigner" +} \ No newline at end of file diff --git a/src/main/scala/sbt/ParallelRunner.scala b/src/main/scala/sbt/ParallelRunner.scala new file mode 100644 index 000000000..74ce7a8ee --- /dev/null +++ b/src/main/scala/sbt/ParallelRunner.scala @@ -0,0 +1,464 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt + +/** This file provides the parallel execution engine of sbt. It is a fairly general module, with pluggable Schedulers and Strategies. +* +* There are three main componenets to the engine: Distributors, Schedulers, and Strategies. +* +* A Scheduler provides work that is ready to execute. The main type of Scheduler in sbt is a scheduler +* of nodes in a directed, acyclic graph.. This type of scheduler provides work when its +* dependencies have finished executing successfully. Another type of scheduler is a MultiScheduler, which draws work +* from sub-schedulers. +* +* A Strategy is used by a Scheduler to select the work to process from the work that is ready. It is notified as work +* becomes ready. It is requested to select work to process from the work that is ready. The main Strategy in sbt is the +* OrderedStrategy, which prioritizes work according to some ordering defined by its constructor. The primary ordering +* used in sbt is based on the longest length of the processing path that includes the node being ordered. +* +* A Distributor uses a Scheduler to obtain work according up to the maximum work allowed to run at once. It runs each +* unit of work in its own Thread. +**/ + +import java.util.concurrent.LinkedBlockingQueue +import scala.collection.{immutable, mutable} +import immutable.TreeSet + +/** Interface to the Distributor/Scheduler system for running tasks with dependencies described by a directed acyclic graph.*/ +object ParallelRunner +{ + /** Executes work for nodes in an acyclic directed graph with root node `node`. The name of a node is provided + * by the `name` function, the work to perform for a node by `action`, and the logger to use for a node by `log`. + * The maximum number of tasks to execute simultaneously is `maximumTasks`. */ + def run[D <: Dag[D]](node: D, name: D => String, action: D => Option[String], maximumTasks: Int, log: D => Logger): List[WorkFailure[D]] = + { + val info = DagInfo(node) + // Create a strategy that gives each node a uniform self cost and uses the maximum cost to execute it and the nodes that depend on it + // to determine which node to run. The self cost could be modified to include more information about a node, such as the size of input files + val strategy = defaultStrategy(info) + val jobScheduler = CompoundScheduler(new DagScheduler(info, strategy), strategy) + val distributor = new Distributor(jobScheduler, action, maximumTasks, log) + val result = distributor.run().toList + for( WorkFailure(work, message) <- result ) yield WorkFailure(work, "Error running " + name(work) + ": " + message) + } + def dagScheduler[D <: Dag[D]](node: D) = + { + val info = DagInfo(node) + new DagScheduler(info, defaultStrategy(info)) + } + private def defaultStrategy[D <: Dag[D]](info: DagInfo[D]) = MaxPathStrategy((d: D) => 1, info) +} +/** Requests work from `scheduler` and processes it using `doWork`. This class limits the amount of work processing at any given time +* to `workers`.*/ +final class Distributor[D](scheduler: Scheduler[D], doWork: D => Option[String], workers: Int, log: D => Logger) extends NotNull +{ + require(workers > 0) + final def run(): Iterable[WorkFailure[D]] = (new Run).run() + + private final class Run extends NotNull + { + private[this] val schedule = scheduler.run + /** The number of threads currently running. */ + private[this] var running = 0 + /** Pending notifications of completed work. */ + private[this] val complete = new java.util.concurrent.LinkedBlockingQueue[Done] + + private[Distributor] def run(): Iterable[WorkFailure[D]] = + { + next() + if(isIdle && !schedule.hasPending) // test if all work is complete + schedule.failures + else + { + waitForCompletedWork() // wait for some work to complete + run() // continue + } + } + // true if the maximum number of worker threads are currently running + private def atMaximum = running == workers + private def availableWorkers = workers - running + // true if no worker threads are currently running + private def isIdle = running == 0 + // process more work + private def next() + { + // if the maximum threads are being used, do nothing + // if all work is complete or the scheduler is waiting for current work to complete, do nothing + if(!atMaximum && schedule.hasPending) + { + val nextWork = schedule.next(availableWorkers) + val nextSize = nextWork.size + assume(nextSize <= availableWorkers, "Scheduler provided more work (" + nextSize + ") than allowed (" + availableWorkers + ")") + assume(nextSize > 0 || !isIdle, "Distributor idle and the scheduler indicated work pending, but provided no work.") + nextWork.foreach(process) + } + } + // wait on the blocking queue `complete` until some work finishes and notify the scheduler + private def waitForCompletedWork() + { + require(running > 0) + val done = complete.take() + running -= 1 + schedule.complete(done.data, done.result) + } + private def process(data: D) + { + require(running + 1 <= workers) + running += 1 + new Worker(data).start() + } + private class Worker(data: D) extends Thread with NotNull + { + override def run() + { + val result = Control.trapUnit("", log(data))(doWork(data)) + complete.put( new Done(result, data) ) + } + } + } + private final class Done(val result: Option[String], val data: D) extends NotNull +} +final case class WorkFailure[D](work: D, message: String) extends NotNull +{ + override def toString = message +} +/** Schedules work of type D. A Scheduler determines what work is ready to be processed. +* A Scheduler is itself immutable. It creates a mutable object for each scheduler run.*/ +trait Scheduler[D] extends NotNull +{ + /** Starts a new run. The returned object is a new Run, representing a single scheduler run. All state for the run + * is encapsulated in this object.*/ + def run: Run + trait Run extends NotNull + { + /** Notifies this scheduler that work has completed with the given result (Some with the error message or None if the work succeeded).*/ + def complete(d: D, result: Option[String]): Unit + /** Returns true if there is any more work to be done, although remaining work can be blocked + * waiting for currently running work to complete.*/ + def hasPending: Boolean + /**Returns true if this scheduler has no more work to be done, ever.*/ + def isComplete: Boolean + /** Returns up to 'max' units of work. `max` is always positive. The returned sequence cannot be empty if there is + * no work currently being processed.*/ + def next(max: Int): Seq[D] + /** A list of failures that occurred to this point, as reported to the `complete` method. */ + def failures: Iterable[WorkFailure[D]] + } +} +/** A Strategy selects the work to process from work that is ready to be processed.*/ +private trait ScheduleStrategy[D] extends NotNull +{ + /** Starts a new run. The returned object is a new Run, representing a single strategy run. All state for the run + * is handled through this object and is encapsulated in this object.*/ + def run: Run + trait Run extends NotNull + { + /** Adds the given work to the list of work that is ready to run.*/ + def workReady(dep: D): Unit + /** Returns true if there is work ready to be run. */ + def hasReady: Boolean + /** Provides up to `max` units of work. `max` is always positive and this method is not called + * if hasReady is false. The returned list cannot be empty is there is work ready to be run.*/ + def next(max: Int): List[D] + } +} + +/** A scheduler for nodes of a directed-acyclic graph. It requires the root of the graph +* and a strategy to select which available nodes to run on limited resources.*/ +private[sbt] final class DagScheduler[D <: Dag[D]](info: DagInfo[D], strategy: ScheduleStrategy[D]) extends Scheduler[D] +{ + def run: Run = new Run + { + val infoRun = info.run + val strategyRun = strategy.run + + // find nodes that are ready to be run (no dependencies) + { + val startReady = for( (key, value) <- infoRun.remainingDepsRun if(value.isEmpty)) yield key + infoRun.remainingDepsRun --= startReady + startReady.foreach(strategyRun.workReady) + } + + val failures = new mutable.ListBuffer[WorkFailure[D]] + def next(max: Int) = strategyRun.next(max) + def complete(work: D, result: Option[String]) + { + result match + { + case None => infoRun.complete(work, strategyRun.workReady) + case Some(errorMessage) => + infoRun.clear(work) + failures += WorkFailure(work, errorMessage) + } + } + def isComplete = !strategyRun.hasReady && infoRun.reverseDepsRun.isEmpty + // the strategy might not have any work ready if the remaining work needs currently executing work to finish first + def hasPending = strategyRun.hasReady || !infoRun.remainingDepsRun.isEmpty + } +} +private object MaxPathStrategy +{ + def apply[D <: Dag[D]](selfCost: D => Int, info: DagInfo[D]): ScheduleStrategy[D] = + { + val cost = // compute the cost of the longest execution path ending at each node + { + val cost = new mutable.HashMap[D, Int] + def computeCost(work: D): Int = info.reverseDeps.getOrElse(work, immutable.Set.empty[D]).foldLeft(0)(_ max getCost(_)) + selfCost(work) + def getCost(work: D): Int = cost.getOrElseUpdate(work, computeCost(work)) + info.remainingDeps.keys.foreach(getCost) + wrap.Wrappers.readOnly(cost) + } + // create a function to compare units of work. This is not as simple as cost(a) compare cost(b) because it cannot return 0 for + // unequal nodes (at least for the Ordered comparison) + + // 2.8.0 uses Ordering + implicit val compareOrdering: Ordering[D] = + new Ordering[D] + { + def compare(a: D, b: D) = + { + val base = cost(a) compare cost(b) + if(base == 0) + a.hashCode compare b.hashCode // this is required because TreeSet interprets 0 as equal + else + base + } + } + // 2.7.x uses an implicit view to Ordered + implicit val compare = + (a: D) => new Ordered[D] { + def compare(b: D) = compareOrdering.compare(a, b) + } + new OrderedStrategy(new TreeSet()) + } +} +/** A strategy that adds work to a tree and selects the last key as the next work to be done. */ +private class OrderedStrategy[D](ready: TreeSet[D]) extends ScheduleStrategy[D] +{ + def run = new Run + { + private[this] var readyRun = ready + def next(max: Int): List[D] = nextImpl(max, Nil) + private[this] def nextImpl(remaining: Int, accumulated: List[D]): List[D] = + { + if(remaining <= 0 || readyRun.isEmpty) + accumulated + else + { + val next = readyRun.lastKey + readyRun -= next + nextImpl(remaining - 1, next :: accumulated) + } + } + def workReady(dep: D) { readyRun += dep } + def hasReady = !readyRun.isEmpty + } +} +/** A class that represents state for a DagScheduler and that MaxPathStrategy uses to initialize an OrderedStrategy. */ +private final class DagInfo[D <: Dag[D]](val remainingDeps: immutable.Map[D, immutable.Set[D]], + val reverseDeps: immutable.Map[D, immutable.Set[D]]) extends NotNull +{ + def run = new Run + final class Run extends NotNull + { + val remainingDepsRun = DagInfo.mutableMap(remainingDeps) + val reverseDepsRun = DagInfo.mutableMap(reverseDeps) + /** Called when work does not complete successfully and so all work that (transitively) depends on the work + * must be removed from the maps. */ + def clear(work: D) + { + remainingDepsRun -= work + foreachReverseDep(work)(clear) + } + /** Called when work completes properly. `initial` and `ready` are used for a fold over + * the work that is now ready to go (becaues it was only waiting for `work` to complete).*/ + def complete(work: D, ready: D => Unit) + { + def completed(dependsOnCompleted: D) + { + for(remainingDependencies <- remainingDepsRun.get(dependsOnCompleted)) + { + remainingDependencies -= work + if(remainingDependencies.isEmpty) + { + remainingDepsRun -= dependsOnCompleted + ready(dependsOnCompleted) + } + } + } + foreachReverseDep(work)(completed) + } + private def foreachReverseDep(work: D)(f: D => Unit) { reverseDepsRun.removeKey(work).foreach(_.foreach(f)) } + } +} +/** Constructs forward and reverse dependency map for the given Dag root node. */ +private object DagInfo +{ + /** Constructs the reverse dependency map from the given Dag and + * puts the forward dependencies into a map */ + def apply[D <: Dag[D]](root: D): DagInfo[D] = + { + val remainingDeps = new mutable.HashMap[D, immutable.Set[D]] + val reverseDeps = new mutable.HashMap[D, mutable.Set[D]] + def visitIfUnvisited(node: D): Unit = remainingDeps.getOrElseUpdate(node, processDependencies(node)) + def processDependencies(node: D): Set[D] = + { + val workDependencies = node.dependencies + workDependencies.foreach(visitIfUnvisited) + for(dep <- workDependencies) + reverseDeps.getOrElseUpdate(dep, new mutable.HashSet[D]) += node + immutable.HashSet(workDependencies.toSeq: _*) + } + visitIfUnvisited(root) + new DagInfo(immutable.HashMap(remainingDeps.toSeq : _*), immute(reverseDeps) ) + } + /** Convert a mutable Map with mutable Sets for values to an immutable Map with immutable Sets for values. */ + private def immute[D](map: mutable.Map[D, mutable.Set[D]]): immutable.Map[D, immutable.Set[D]] = + { + val immutedSets = map.map { case (key, value) =>(key, immutable.HashSet(value.toSeq : _*)) } + immutable.HashMap(immutedSets.toSeq :_*) + } + /** Convert an immutable Map with immutable Sets for values to a mutable Map with mutable Sets for values. */ + private def mutableMap[D](map: immutable.Map[D, immutable.Set[D]]): mutable.Map[D, mutable.Set[D]] = + { + val mutableSets = map.map { case (key, value) =>(key, mutable.HashSet(value.toSeq : _*)) } + mutable.HashMap(mutableSets.toSeq :_*) + } +} +/** A scheduler that can get work from sub-schedulers. The `schedulers` argument to the constructor +* is a sequence of the initial schedulers and the key to provide to a client that uses the 'detailedComplete' +* method when the scheduler completes its work.*/ +private final class MultiScheduler[D, T](schedulers: (Scheduler[D], T)*) extends Scheduler[D] +{ + /** Returns a Run instance that represents a scheduler run.*/ + def run = new MultiRun + final class MultiRun extends Run + { + val owners = new mutable.HashMap[D, Scheduler[D]#Run] + val failures = new mutable.ListBuffer[WorkFailure[D]] + val schedules = mutable.HashMap[Scheduler[D]#Run, T](schedulers.map { case (scheduler, completeKey) => (scheduler.run, completeKey)} : _*) + def +=(schedule: Scheduler[D]#Run, completeKey: T) { schedules(schedule) = completeKey } + + def isComplete = schedules.keys.forall(_.isComplete) + def hasPending = schedules.keys.exists(_.hasPending) + def next(max: Int) = nextImpl(max, schedules.keys.toList, Nil) + + private def nextImpl(max: Int, remaining: List[Scheduler[D]#Run], accumulatedWork: List[D]): Seq[D] = + { + if(max == 0 || remaining.isEmpty) + accumulatedWork + else + { + val currentSchedule = remaining.head + if(currentSchedule.hasPending) + { + val newWork = currentSchedule.next(max).toList + newWork.foreach(work => owners.put(work, currentSchedule)) + nextImpl(max - newWork.size, remaining.tail, newWork ::: accumulatedWork) + } + else + nextImpl(max, remaining.tail, accumulatedWork) + } + } + + def complete(work: D, result: Option[String]) { detailedComplete(work, result) } + def detailedComplete(work: D, result: Option[String]) = + { + def complete(forOwner: Scheduler[D]#Run) = + { + forOwner.complete(work, result) + if(forOwner.isComplete) + { + failures ++= forOwner.failures + Some(forOwner, schedules.removeKey(forOwner).get) + } + else + None + } + owners.removeKey(work).flatMap(complete) + } + } +} +/** This scheduler allows a unit of work to provide nested work. +* +* When a unit of work that implements CompoundWork is returned for processing by `multi`, this scheduler will request the work's +* nested scheduler that represents the nested work to be done. The new scheduler will be added to `multi`. When the new scheduler +* is finished providing work, a final scheduler is run.*/ +private final class CompoundScheduler[D](multi: MultiScheduler[D, Option[FinalWork[D]]], finalWorkStrategy: ScheduleStrategy[D]) extends Scheduler[D] +{ + def run: Run = new Run + { + val multiRun = multi.run + val strategyRun = finalWorkStrategy.run + + def isComplete = multiRun.isComplete && !strategyRun.hasReady + def hasPending = strategyRun.hasReady || multiRun.hasPending || multiRun.schedules.values.exists(_.isDefined) + def complete(work: D, result: Option[String]) = + { + for( (scheduler, Some(finalWorkTodo)) <- multiRun.detailedComplete(work, result) ) + { + multiRun += (finalWorkTodo.doFinally.run, None) + if(scheduler.failures.isEmpty) + strategyRun workReady finalWorkTodo.compound + else + multiRun.complete(finalWorkTodo.compound, Some("One or more subtasks failed")) + } + } + def failures = multiRun.failures + def next(max: Int) = nextImpl(max, Nil) + private def nextImpl(max: Int, processedNextWork: List[D]): Seq[D] = + { + if(max > 0) + { + if(strategyRun.hasReady) + { + val newWork = strategyRun.next(max) + nextImpl(max - newWork.size, newWork ::: processedNextWork) + } + else if(multiRun.hasPending) + { + val multiWork = multiRun.next(max) + if(multiWork.isEmpty) + processedNextWork + else + { + val expandedWork = (processedNextWork /: multiWork)(expand) + val remaining = max - (expandedWork.size - processedNextWork.size) + nextImpl(remaining, expandedWork) + } + } + else + processedNextWork + } + else + processedNextWork + } + private def expand(accumulate: List[D], work: D): List[D] = + { + work match + { + case c: CompoundWork[D] => + val subWork = c.work + addFinal(subWork.scheduler, new FinalWork(work, subWork.doFinally)) + accumulate + case _ => work :: accumulate + } + } + private def addFinal(schedule: Scheduler[D], work: FinalWork[D]) { multiRun += (schedule.run, Some(work)) } + } +} +private object CompoundScheduler +{ + def apply[D](scheduler: Scheduler[D], strategy: ScheduleStrategy[D]) : Scheduler[D] = + new CompoundScheduler(new MultiScheduler[D, Option[FinalWork[D]]]( (scheduler, None) ), strategy) +} +private final class FinalWork[D](val compound: D, val doFinally: Scheduler[D]) extends NotNull +/** This represents nested work. The work provided by `scheduler` is processed first. The work provided by `doFinally` is processed +* after `scheduler` completes regardless of the success of `scheduler`.*/ +final class SubWork[D](val scheduler: Scheduler[D], val doFinally: Scheduler[D]) extends NotNull +/** Work that implements this interface provides nested work to be done before this work is processed.*/ +trait CompoundWork[D] extends NotNull +{ + def work: SubWork[D] +} \ No newline at end of file diff --git a/src/main/scala/sbt/Path.scala b/src/main/scala/sbt/Path.scala new file mode 100644 index 000000000..7f746f4f4 --- /dev/null +++ b/src/main/scala/sbt/Path.scala @@ -0,0 +1,336 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah + */ +package sbt + +import Path._ +import FileUtilities.wrapNull +import java.io.File +import scala.collection.mutable.{Set, HashSet} + +/** A Path represents a file in a project. +* @see sbt.PathFinder*/ +sealed abstract class Path extends PathFinder with NotNull +{ + /** Creates a base directory for this path. This is used by copy and zip functions + * to determine the relative path that should be used in the destination. For example, + * if the following path is specified to be copied to directory 'd', + * + * ((a / b) ##) / x / y + * + * the copied path would be + * + * d / x / y + * + * The relativePath method is used to return the relative path to the base directory. */ + override def ## : Path = new BaseDirectory(this) + private[sbt] def addTo(pathSet: Set[Path]) + { + if(asFile.exists) + pathSet += this + } + override def / (component: String): Path = if(component == ".") this else new RelativePath(this, component) + /** True if and only if the file represented by this path exists.*/ + def exists = asFile.exists + /** True if and only if the file represented by this path is a directory.*/ + def isDirectory = asFile.isDirectory + /** The last modified time of the file represented by this path.*/ + def lastModified = asFile.lastModified + /* True if and only if file that this path represents exists and the file represented by the path 'p' + * does not exist or was modified before the file for this path.*/ + def newerThan(p: Path): Boolean = exists && (!p.exists || lastModified > p.lastModified) + /* True if and only if file that this path represents does not exist or the file represented by the path 'p' + * exists and was modified after the file for this path.*/ + def olderThan(p: Path): Boolean = p newerThan this + /** The file represented by this path.*/ + def asFile: File + /** The file represented by this path converted to a URL.*/ + def asURL = asFile.toURI.toURL + /** The string representation of this path relative to the base directory. The project directory is the + * default base directory if one is not specified explicitly using the ## operator.*/ + lazy val relativePath: String = relativePathString(sep.toString) + def relativePathString(separator: String): String + final def projectRelativePath: String = projectRelativePathString(sep.toString) + def projectRelativePathString(separator: String): String + def absolutePath: String = asFile.getAbsolutePath + private[sbt] def prependTo(s: String): String + + /** Equality of Paths is defined in terms of the underlying File.*/ + override final def equals(other: Any) = + other match + { + case op: Path => asFile == op.asFile + case _ => false + } + /** The hash code of a Path is that of the underlying File.*/ + override final def hashCode = asFile.hashCode +} +private final class BaseDirectory(private[sbt] val path: Path) extends Path +{ + override def ## : Path = this + override def toString = path.toString + def asFile = path.asFile + def relativePathString(separator: String) = "" + def projectRelativePathString(separator: String) = path.projectRelativePathString(separator) + private[sbt] def prependTo(s: String) = "." + sep + s +} +private[sbt] final class FilePath(file: File) extends Path +{ + lazy val asFile = absolute(file) + override def toString = absolutePath + def relativePathString(separator: String) = asFile.getName + def projectRelativePathString(separator: String) = relativePathString(separator) + private[sbt] def prependTo(s: String) = absolutePath + sep + s +} +private[sbt] final class ProjectDirectory(file: File) extends Path +{ + lazy val asFile = absolute(file) + override def toString = "." + def relativePathString(separator: String) = "" + def projectRelativePathString(separator: String) = "" + private[sbt] def prependTo(s: String) = "." + sep + s +} +private[sbt] final class RelativePath(val parentPath: Path, val component: String) extends Path +{ + checkComponent(component) + override def toString = parentPath prependTo component + lazy val asFile = new File(parentPath.asFile, component) + private[sbt] def prependTo(s: String) = parentPath prependTo (component + sep + s) + def relativePathString(separator: String) = relative(parentPath.relativePathString(separator), separator) + def projectRelativePathString(separator: String) = relative(parentPath.projectRelativePathString(separator), separator) + private def relative(parentRelative: String, separator: String) = + { + if(parentRelative.isEmpty) + component + else + parentRelative + separator + component + } +} +object Path +{ + import java.io.File + import File.pathSeparator + + def fileProperty(name: String) = Path.fromFile(System.getProperty(name)) + def userHome = fileProperty("user.home") + + def absolute(file: File) = new File(file.toURI.normalize).getAbsoluteFile + /** Constructs a String representation of Paths. The absolute path String of each Path is + * separated by the platform's path separator.*/ + def makeString(paths: Iterable[Path]): String = paths.map(_.absolutePath).mkString(pathSeparator) + + /** Constructs a String representation of Paths. The relative path String of each Path is + * separated by the platform's path separator.*/ + def makeRelativeString(paths: Iterable[Path]): String = makeRelativeString(paths, sep.toString) + def makeRelativeString(paths: Iterable[Path], separator: String): String = paths.map(_.relativePathString(separator)).mkString(pathSeparator) + + def splitString(projectPath: Path, value: String): Iterable[Path] = + { + for(pathString <- FileUtilities.pathSplit(value) if pathString.length > 0) yield + Path.fromString(projectPath, pathString) + } + + /** A PathFinder that always produces the empty set of Paths.*/ + def emptyPathFinder = + new PathFinder + { + private[sbt] def addTo(pathSet: Set[Path]) {} + } + /** A PathFinder that selects the paths provided by the paths argument, which is + * reevaluated on each call to the PathFinder's get method. */ + def lazyPathFinder(paths: => Iterable[Path]): PathFinder = + new PathFinder + { + private[sbt] def addTo(pathSet: Set[Path]) = pathSet ++= paths + } + + /** The separator character of the platform.*/ + val sep = java.io.File.separatorChar + + /** Checks the string to verify that it is a legal path component. The string must be non-empty, + * not a slash, and not '.' or '..'.*/ + def checkComponent(c: String): String = + { + require(c.length > 0, "Path component must not be empty") + require(c.indexOf('/') == -1, "Path component '" + c + "' must not have forward slashes in it") + require(c.indexOf('\\') == -1, "Path component '" + c + "' must not have backslashes in it") + require(c != "..", "Path component cannot be '..'") + require(c != ".", "Path component cannot be '.'") + c + } + /** Converts a path string relative to the given base path to a Path. */ + def fromString(basePath: Path, value: String): Path = + { + if(value.isEmpty) + basePath + else + { + val components = value.split("""[/\\]""") + (basePath /: components)( (path, component) => path / component ) + } + } + def baseAncestor(path: Path): Option[Path] = + path match + { + case pd: ProjectDirectory => None + case fp: FilePath => None + case rp: RelativePath => baseAncestor(rp.parentPath) + case b: BaseDirectory => Some(b.path) + } + + def relativize(basePath: Path, path: Path): Option[Path] = relativize(basePath, path.asFile) + def relativize(basePath: Path, file: File): Option[Path] = + basePathString(basePath) flatMap { baseString => relativize(basePath, baseString, file) } + def relativize(basePath: Path, basePathString: String, file: File): Option[Path] = + { + val pathString = file.getAbsolutePath + if(pathString.startsWith(basePathString)) + Some(fromString(basePath, pathString.substring(basePathString.length))) + else + None + } + private[sbt] def relativize(baseFile: File, file: File): Option[String] = + { + val pathString = file.getAbsolutePath + baseFileString(baseFile) flatMap + { + baseString => + { + if(pathString.startsWith(baseString)) + Some(pathString.substring(baseString.length)) + else + None + } + } + } + private[sbt] def basePathString(basePath: Path): Option[String] = baseFileString(basePath.asFile) + private def baseFileString(baseFile: File): Option[String] = + { + if(baseFile.isDirectory) + { + val cp = baseFile.getAbsolutePath + assert(cp.length > 0) + if(cp.charAt(cp.length - 1) == File.separatorChar) + Some(cp) + else + Some(cp + File.separatorChar) + } + else + None + } + def fromFile(file: String): Path = fromFile(new File(file)) + def fromFile(file: File): Path = new FilePath(file) +} + +/** A path finder constructs a set of paths. The set is evaluated by a call to the get +* method. The set will be different for different calls to get if the underlying filesystem +* has changed.*/ +sealed abstract class PathFinder extends NotNull +{ + /** The union of the paths found by this PathFinder with the paths found by 'paths'.*/ + def +++(paths: PathFinder): PathFinder = new Paths(this, paths) + /** Excludes all paths from excludePaths from the paths selected by this PathFinder.*/ + def ---(excludePaths: PathFinder): PathFinder = new ExcludePaths(this, excludePaths) + /** Constructs a new finder that selects all paths with a name that matches filter and are + * descendents of paths selected by this finder.*/ + def **(filter: FileFilter): PathFinder = new DescendentOrSelfPathFinder(this, filter) + /** Constructs a new finder that selects all paths with a name that matches filter and are + * immediate children of paths selected by this finder.*/ + def *(filter: FileFilter): PathFinder = new ChildPathFinder(this, filter) + /** Constructs a new finder that selects all paths with name literal that are immediate children + * of paths selected by this finder.*/ + def / (literal: String): PathFinder = new ChildPathFinder(this, new ExactFilter(literal)) + /** Constructs a new finder that selects all paths with name literal that are immediate children + * of paths selected by this finder.*/ + final def \ (literal: String): PathFinder = this / literal + + /** Makes the paths selected by this finder into base directories. + * @see Path.## + */ + def ## : PathFinder = new BasePathFinder(this) + + /** Selects all descendent paths with a name that matches include and do not have an intermediate + * path with a name that matches intermediateExclude. Typical usage is: + * + * descendentsExcept("*.jar", ".svn")*/ + def descendentsExcept(include: FileFilter, intermediateExclude: FileFilter): PathFinder = + (this ** include) --- (this ** intermediateExclude ** include) + + /** Evaluates this finder. The set returned by this method will reflect the underlying filesystem at the + * time of calling. If the filesystem changes, two calls to this method might be different.*/ + final def get: scala.collection.Set[Path] = + { + val pathSet = new HashSet[Path] + addTo(pathSet) + wrap.Wrappers.readOnly(pathSet) + } + private[sbt] def addTo(pathSet: Set[Path]) +} +private class BasePathFinder(base: PathFinder) extends PathFinder +{ + private[sbt] def addTo(pathSet: Set[Path]) + { + for(path <- base.get) + pathSet += (path ##) + } +} +private abstract class FilterPath extends PathFinder with FileFilter +{ + def parent: PathFinder + def filter: FileFilter + final def accept(file: File) = filter.accept(file) + + protected def handlePath(path: Path, pathSet: Set[Path]) + { + for(matchedFile <- wrapNull(path.asFile.listFiles(this))) + pathSet += path / matchedFile.getName + } +} +private class DescendentOrSelfPathFinder(val parent: PathFinder, val filter: FileFilter) extends FilterPath +{ + private[sbt] def addTo(pathSet: Set[Path]) + { + for(path <- parent.get) + { + if(accept(path.asFile)) + pathSet += path + handlePathDescendent(path, pathSet) + } + } + private def handlePathDescendent(path: Path, pathSet: Set[Path]) + { + handlePath(path, pathSet) + for(childDirectory <- wrapNull(path.asFile.listFiles(DirectoryFilter))) + handlePathDescendent(path / childDirectory.getName, pathSet) + } +} +private class ChildPathFinder(val parent: PathFinder, val filter: FileFilter) extends FilterPath +{ + private[sbt] def addTo(pathSet: Set[Path]) + { + for(path <- parent.get) + handlePath(path, pathSet) + } +} +private class Paths(a: PathFinder, b: PathFinder) extends PathFinder +{ + private[sbt] def addTo(pathSet: Set[Path]) + { + a.addTo(pathSet) + b.addTo(pathSet) + } +} +private class ExcludePaths(include: PathFinder, exclude: PathFinder) extends PathFinder +{ + private[sbt] def addTo(pathSet: Set[Path]) + { + val includeSet = new HashSet[Path] + include.addTo(includeSet) + + val excludeSet = new HashSet[Path] + exclude.addTo(excludeSet) + + includeSet --= excludeSet + pathSet ++= includeSet + } +} \ No newline at end of file diff --git a/src/main/scala/sbt/Process.scala b/src/main/scala/sbt/Process.scala new file mode 100644 index 000000000..702cba554 --- /dev/null +++ b/src/main/scala/sbt/Process.scala @@ -0,0 +1,89 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt + +import java.lang.{Process => JProcess, ProcessBuilder => JProcessBuilder} +import java.io.{Closeable, File, IOException} +import java.io.{BufferedReader, InputStream, InputStreamReader, OutputStream, PipedInputStream, PipedOutputStream} +import java.net.URL + +/** Methods for constructing simple commands that can then be combined. */ +object Process +{ + implicit def apply(command: String): ProcessBuilder = apply(command.split("""\s+""")) // TODO: use CommandParser + implicit def apply(command: Seq[String]): ProcessBuilder = apply(new JProcessBuilder(command.toArray : _*)) + def apply(command: String, arguments: Seq[String]): ProcessBuilder = apply(new JProcessBuilder((command :: arguments.toList).toArray : _*)) + implicit def apply(builder: JProcessBuilder): ProcessBuilder = new SimpleProcessBuilder(builder) + implicit def apply(file: File): FilePartialBuilder = new FileBuilder(file) + implicit def apply(url: URL): URLPartialBuilder = new URLBuilder(url) +} + +trait URLPartialBuilder extends NotNull +{ + def #>(b: ProcessBuilder): ProcessBuilder + def #>>(b: File): ProcessBuilder + def #>(b: File): ProcessBuilder +} +trait FilePartialBuilder extends NotNull +{ + def #>(b: ProcessBuilder): ProcessBuilder + def #<(b: ProcessBuilder): ProcessBuilder + def #<(url: URL): ProcessBuilder + def #>>(b: File): ProcessBuilder + def #>(b: File): ProcessBuilder + def #<(file: File): ProcessBuilder + def #<<(file: File): ProcessBuilder +} + +/** Represents a process that is running or has finished running. +* It may be a compound process with several underlying native processes (such as 'a #&& b`).*/ +trait Process extends NotNull +{ + /** Blocks until this process exits and returns the exit code.*/ + def exitValue(): Int + /** Destroys this process. */ + def destroy(): Unit +} +/** Represents a runnable process. */ +trait ProcessBuilder extends NotNull +{ + /** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are + * sent to the console.*/ + def ! : Int + /** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are + * sent to the given Logger.*/ + def !(log: Logger): Int + /** Starts the process represented by this builder. Standard output and error are sent to the console.*/ + def run(): Process + /** Starts the process represented by this builder. Standard output and error are sent to the given Logger.*/ + def run(log: Logger): Process + /** Starts the process represented by this builder. I/O is handled by the given ProcessIO instance.*/ + def run(io: ProcessIO): Process + + /** Constructs a command that runs this command first and then `other` if this command succeeds.*/ + def #&& (other: ProcessBuilder): ProcessBuilder + /** Constructs a command that runs this command first and then `other` if this command does not succeed.*/ + def #|| (other: ProcessBuilder): ProcessBuilder + /** Constructs a command that will run this command and pipes the output to `other`. `other` must be a simple command.*/ + def #| (other: ProcessBuilder): ProcessBuilder + /** Constructs a command that will run this command and then `other`. The exit code will be the exit code of `other`.*/ + def ## (other: ProcessBuilder): ProcessBuilder + /** Reads the given file into the input stream of this process. */ + def #< (f: File): ProcessBuilder + /** Reads the given URL into the input stream of this process. */ + def #< (f: URL): ProcessBuilder + /** Writes the output stream of this process to the given file. */ + def #> (f: File): ProcessBuilder + /** Appends the output stream of this process to the given file. */ + def #>> (f: File): ProcessBuilder + + def canPipeTo: Boolean +} +/** Each method will be called in a separate thread.*/ +final class ProcessIO(val writeInput: OutputStream => Unit, val processOutput: InputStream => Unit, val processError: InputStream => Unit) extends NotNull +{ + def withOutput(process: InputStream => Unit): ProcessIO = new ProcessIO(writeInput, process, processError) + def withError(process: InputStream => Unit): ProcessIO = new ProcessIO(writeInput, processOutput, process) + def withInput(write: OutputStream => Unit): ProcessIO = new ProcessIO(write, processOutput, processError) +} \ No newline at end of file diff --git a/src/main/scala/sbt/Project.scala b/src/main/scala/sbt/Project.scala new file mode 100644 index 000000000..8eaf126ee --- /dev/null +++ b/src/main/scala/sbt/Project.scala @@ -0,0 +1,462 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah, David MacIver + */ +package sbt + +import java.io.File +import scala.collection._ +import FileUtilities._ +import Project._ + +trait Project extends TaskManager with Dag[Project] with BasicEnvironment +{ + /** The logger for this project definition. */ + final val log: Logger = logImpl + protected def logImpl: Logger = new BufferedLogger(new ConsoleLogger) + + trait ActionOption extends NotNull + + /** Basic project information. */ + def info: ProjectInfo + /** The project name. */ + final def name: String = projectName.value + /** The project version. */ + final def version: Version = projectVersion.value + /** The project organization. */ + final def organization: String = projectOrganization.value + /** True if the project should cater to a quick throwaway project setup.*/ + def scratch = projectScratch.value + + final type ManagerType = Project + final type ManagedTask = Project#Task + /** The tasks declared on this project. */ + def tasks: Map[String, Task] + /** The task methods declared on this project */ + def methods: Map[String, MethodTask] + /** The names of all available tasks that may be called through `act`. These include + * the names of the Tasks in `tasks` and those of all dependencies.*/ + def taskNames: Iterable[String] = deepTasks.keys.toList + /** The names of all available method tasks that may be called through `call`. These + * only include the names of the MethodTasks in `methods` and not those of dependencies.*/ + def methodNames: Iterable[String] = methods.keys.toList + /** A description of all available method tasks in this project, but not of dependencies. */ + def methodList: String = descriptionList(methods) + /** A description of all available tasks in this project and all dependencies. If there + * are different tasks with the same name, only one will be included. */ + def taskList: String = descriptionList(deepTasks) + + final def taskName(task: Task) = tasks.find( _._2 eq task ).map(_._1).getOrElse(UnnamedName) + /** A description of all available tasks in this project and all dependencies and all + * available method tasks in this project, but not of dependencies. If there + * are different tasks or methods with the same name, only one will be included. */ + def taskAndMethodList: String = descriptionList(tasksAndMethods) + /** The actions and methods declared on this project. */ + final def tasksAndMethods: Map[String, Described] = + immutable.TreeMap.empty[String, Described] ++ methods ++ tasks + private def descriptionList(described: Map[String, Described]): String = + { + val buffer = new StringBuilder + for((name, d) <- described) + buffer.append("\t" + name + d.description.map(x => ": " + x).getOrElse("") + "\n") + buffer.toString + } + /** Combines the method task maps of this project and all dependencies.*/ + private[sbt] def deepMethods: Map[String, Project#MethodTask] = deep(_.methods) + /** Combines the task maps of this project and all dependencies.*/ + private[sbt] def deepTasks: Map[String, Project#Task] = deep(_.tasks) + private def deep[T](p: Project => Map[String, T]): Map[String, T] = + { + var tasks: immutable.SortedMap[String,T] = new immutable.TreeMap[String, T] + for(dependentProject <- topologicalSort) + tasks ++= p(dependentProject).elements + tasks + } + /** A map of names to projects for all subprojects of this project. These are typically explicitly + * specified for the project and are different from those specified in the project constructor. The + * main use within sbt is in ParentProject.*/ + def subProjects: Map[String, Project] = immutable.Map.empty + /** The name of this project and the names of all subprojects/dependencies, transitively.*/ + def projectNames: Iterable[String] = + { + val names = new mutable.HashSet[String] + names ++= subProjects.keys + for(dependentProject <- topologicalSort) + names ++= dependentProject.tasks.keys + names.toList + } + + def call(name: String, parameters: Array[String]): Option[String] = + { + methods.get(name) match + { + case Some(method) =>run(method(parameters), name) + case None => Some("Method '" + name + "' does not exist.") + } + } + private def run(task: Project#Task, taskName: String): Option[String] = + impl.RunTask(task, taskName, parallelExecution) match + { + case Nil => None + case x => Some(Set(x: _*).mkString("\n")) + } + + /** Executes the task with the given name. This involves executing the task for all + * project dependencies (transitive) and then for this project. Not every dependency + * must define a task with the given name. If this project and all dependencies + * do not define a task with the given name, an error is generated indicating this.*/ + def act(name: String): Option[String] = + { + val ordered = topologicalSort + val definedTasks = ordered.flatMap(_.tasks.get(name).toList) + def virtualTask(name: String): Task = new Task(None, definedTasks.filter(!_.interactive), false, None) + + if(definedTasks.isEmpty) + Some("Action '" + name + "' does not exist.") + else + { + tasks.get(name) match + { + case None => + val virtual = virtualTask(name) + if(virtual.dependencies.size == definedTasks.size) + run(virtual, name) + else + { + Some("Cannot run interactive action '" + name + + "' defined on multiple subprojects (change to the desired project with 'project ').") + } + case Some(task) => run(task, name) + } + } + } + + /** Logs the list of projects at the debug level.*/ + private def showBuildOrder(order: Iterable[Project]) + { + log.debug("Project build order:") + order.foreach(x => log.debug(" " + x.name) ) + log.debug("") + } + + /** Converts a String to a path relative to the project directory of this project. */ + implicit def path(component: String): Path = info.projectPath / component + /** Converts a String to a simple name filter. * has the special meaning: zero or more of any character */ + implicit def filter(simplePattern: String): NameFilter = GlobFilter(simplePattern) + + /** Loads the project at the given path and declares the project to have the given + * dependencies. This method will configure the project according to the + * project/ directory in the directory denoted by path.*/ + def project(path: Path, deps: Project*): Project = getProject(Project.loadProject(path, deps, Some(this), log), path) + + /** Loads the project at the given path using the given name and inheriting this project's version. + * The builder class is the default builder class, sbt.DefaultProject. The loaded project is declared + * to have the given dependencies. Any project/build/ directory for the project is ignored.*/ + def project(path: Path, name: String, deps: Project*): Project = project(path, name, Project.DefaultBuilderClass, deps: _*) + + /** Loads the project at the given path using the given name and inheriting it's version from this project. + * The Project implementation used is given by builderClass. The dependencies are declared to be + * deps. Any project/build/ directory for the project is ignored.*/ + def project[P <: Project](path: Path, name: String, builderClass: Class[P], deps: Project*): P = + { + require(builderClass != this.getClass, "Cannot recursively construct projects of same type: " + builderClass.getName) + project(path, name, info => Project.constructProject(info, builderClass), deps: _*) + } + /** Loads the project at the given path using the given name and inheriting it's version from this project. + * The construct function is used to obtain the Project instance. Any project/build/ directory for the project + * is ignored. The project is declared to have the dependencies given by deps.*/ + def project[P <: Project](path: Path, name: String, construct: ProjectInfo => P, deps: Project*): P = + initialize(construct(ProjectInfo(path.asFile, deps, Some(this))), Some(new SetupInfo(name, None, None, false)), log) + + /** Initializes the project directories when a user has requested that sbt create a new project.*/ + def initializeDirectories() {} + /** True if projects should be run in parallel, false if they should run sequentially. + * This only has an effect for multi-projects. If this project has a parent, this value is + * inherited from that parent project.*/ + def parallelExecution: Boolean = + info.parent match + { + case Some(parent) => parent.parallelExecution + case None => false + } + + /** True if a project and its dependencies should be checked to ensure that their + * output directories are not the same, false if they should not be checked. */ + def shouldCheckOutputDirectories = true + + /** The list of directories to which this project writes. This is used to verify that multiple + * projects have not been defined with the same output directories. */ + def outputDirectories: Iterable[Path] = outputRootPath :: Nil + def rootProject = Project.rootProject(this) + /** The path to the file that provides persistence for properties.*/ + final def envBackingPath = info.builderPath / Project.DefaultEnvBackingName + /** The path to the file that provides persistence for history. */ + def historyPath: Option[Path] = Some(outputRootPath / ".history") + def outputPath = crossPath(outputRootPath) + def outputRootPath = outputDirectoryName + def outputDirectoryName = DefaultOutputDirectoryName + + private def getProject(result: LoadResult, path: Path): Project = + result match + { + case LoadSetupDeclined => Predef.error("No project exists at path " + path) + case lse: LoadSetupError => Predef.error("Error setting up new project at path " + Path + " : " + lse.message) + case err: LoadError => Predef.error("Error loading project at path " + path + " : " + err.message) + case success: LoadSuccess => success.project + } + + /** The property for the project's version. */ + final val projectVersion = property[Version] + /** The property for the project's name. */ + final val projectName = propertyLocalF[String](NonEmptyStringFormat) + /** The property for the project's organization. Defaults to the parent project's organization or the project name if there is no parent. */ + final val projectOrganization = propertyOptional[String](normalizedName, true) + /** The property that defines the version of Scala to build this project with by default. This property is only + * ready by `sbt` on startup and reboot. When cross-building, this value may be different from the actual + * version of Scala being used to build the project. ScalaVersion.current and ScalaVersion.cross should be used + * to read the version of Scala building the project. This should only be used to change the version of Scala used + * for normal development (not cross-building)*/ + final val scalaVersion = propertyOptional[String]("") + final val sbtVersion = propertyOptional[String]("") + final val projectInitialize = propertyOptional[Boolean](false) + final val projectScratch = propertyOptional[Boolean](false) + + /** If this project is cross-building, returns `base` with an additional path component containing the scala version. + * Otherwise, this returns `base`. + * By default, cross-building is enabled when a project is loaded by the loader and crossScalaVersions is not empty.*/ + def crossPath(base: Path) = ScalaVersion.withCross(disableCrossPaths)(base / ScalaVersion.crossString(_), base) + /** If modifying paths for cross-building is enabled, this returns ScalaVersion.currentString. + * Otherwise, this returns the empty string. */ + def crossScalaVersionString: String = if(disableCrossPaths) "" else ScalaVersion.currentString + + /** True if crossPath should be the identity function.*/ + protected def disableCrossPaths = crossScalaVersions.isEmpty + /** By default, this is empty and cross-building is disabled. Overriding this to a Set of Scala versions + * will enable cross-building against those versions.*/ + def crossScalaVersions = scala.collection.immutable.Set.empty[String] + /** A `PathFinder` that determines the files watched when an action is run with a preceeding ~ when this is the current + * project. This project does not need to include the watched paths for projects that this project depends on.*/ + def watchPaths: PathFinder = Path.emptyPathFinder + + protected final override def parentEnvironment = info.parent + + // .* included because svn doesn't mark .svn hidden + def defaultExcludes: FileFilter = (".*" - ".") || HiddenFileFilter + /** Short for parent.descendentsExcept(include, defaultExcludes)*/ + def descendents(parent: PathFinder, include: FileFilter) = parent.descendentsExcept(include, defaultExcludes) + override def toString = "Project " + projectName.get.getOrElse("at " + environmentLabel) + + def normalizedName = StringUtilities.normalize(name) +} +private[sbt] sealed trait LoadResult extends NotNull +private[sbt] final class LoadSuccess(val project: Project) extends LoadResult +private[sbt] final class LoadError(val message: String) extends LoadResult +private[sbt] final object LoadSetupDeclined extends LoadResult +private[sbt] final class LoadSetupError(val message: String) extends LoadResult + +object Project +{ + val UnnamedName = "" + val BootDirectoryName = "boot" + val DefaultOutputDirectoryName = "target" + val DefaultEnvBackingName = "build.properties" + val DefaultBuilderClassName = "sbt.DefaultProject" + val DefaultBuilderClass = Class.forName(DefaultBuilderClassName).asSubclass(classOf[Project]) + + /** The name of the directory for project definitions.*/ + val BuilderProjectDirectoryName = "build" + /** The name of the directory for plugin definitions.*/ + val PluginProjectDirectoryName = "plugins" + /** The name of the class that all projects must inherit from.*/ + val ProjectClassName = classOf[Project].getName + + /** The logger that should be used before the root project definition is loaded.*/ + private[sbt] def bootLogger = + { + val log = new ConsoleLogger + log.setLevel(Level.Debug) + log.enableTrace(true) + log + } + + private[sbt] def booted = java.lang.Boolean.getBoolean("sbt.boot") + + /** Loads the project in the current working directory.*/ + private[sbt] def loadProject: LoadResult = loadProject(bootLogger) + /** Loads the project in the current working directory.*/ + private[sbt] def loadProject(log: Logger): LoadResult = checkOutputDirectories(loadProject(new File("."), Nil, None, log)) + /** Loads the project in the directory given by 'path' and with the given dependencies.*/ + private[sbt] def loadProject(path: Path, deps: Iterable[Project], parent: Option[Project], log: Logger): LoadResult = + loadProject(path.asFile, deps, parent, log) + /** Loads the project in the directory given by 'projectDirectory' and with the given dependencies.*/ + private[sbt] def loadProject(projectDirectory: File, deps: Iterable[Project], parent: Option[Project], log: Logger): LoadResult = + { + val info = ProjectInfo(projectDirectory, deps, parent) + ProjectInfo.setup(info, log) match + { + case err: SetupError => new LoadSetupError(err.message) + case SetupDeclined => LoadSetupDeclined + case AlreadySetup => loadProject(info, None, log) + case setup: SetupInfo => loadProject(info, Some(setup), log) + } + } + private def loadProject(info: ProjectInfo, setupInfo: Option[SetupInfo], log: Logger): LoadResult = + { + try + { + val oldLevel = log.getLevel + log.setLevel(Level.Warn) + val result = + for(builderClass <- getProjectDefinition(info, log).right) yield + initialize(constructProject(info, builderClass), setupInfo, log) + log.setLevel(oldLevel) + result.fold(new LoadError(_), new LoadSuccess(_)) + } + catch + { + case ite: java.lang.reflect.InvocationTargetException => + { + val cause = + if(ite.getCause == null) ite + else ite.getCause + errorLoadingProject(cause, log) + } + case nme: NoSuchMethodException => new LoadError("Constructor with one argument of type sbt.ProjectInfo required for project definition.") + case e: Exception => errorLoadingProject(e, log) + } + } + /** Logs the stack trace and returns an error message in Left.*/ + private def errorLoadingProject(e: Throwable, log: Logger) = + { + log.trace(e) + new LoadError("Error loading project: " + e.toString) + } + /** Loads the project for the given `info` and represented by an instance of 'builderClass'.*/ + private[sbt] def constructProject[P <: Project](info: ProjectInfo, builderClass: Class[P]): P = + builderClass.getConstructor(classOf[ProjectInfo]).newInstance(info) + /** Checks the project's dependencies, initializes its environment, and possibly its directories.*/ + private def initialize[P <: Project](p: P, setupInfo: Option[SetupInfo], log: Logger): P = + { + setupInfo match + { + case Some(setup) => + { + p.projectName() = setup.name + for(v <- setup.version) + p.projectVersion() = v + for(org <- setup.organization) + p.projectOrganization() = org + if(!setup.initializeDirectories) + p.setEnvironmentModified(false) + for(errorMessage <- p.saveEnvironment()) + log.error(errorMessage) + if(setup.initializeDirectories) + p.initializeDirectories() + } + case None => + if(p.projectInitialize.value) + { + p.initializeDirectories() + p.projectInitialize() = false + for(errorMessage <- p.saveEnvironment()) + log.error(errorMessage) + } + } + val useName = p.projectName.get.getOrElse("at " + p.info.projectDirectory.getAbsolutePath) + checkDependencies(useName, p.info.dependencies, log) + p + } + /** Compiles the project definition classes and returns the project definition class name + * and the class loader that should be used to load the definition. */ + private def getProjectDefinition(info: ProjectInfo, buildLog: Logger): Either[String, Class[P] forSome { type P <: Project }] = + { + val builderProjectPath = info.builderPath / BuilderProjectDirectoryName + if(builderProjectPath.asFile.isDirectory) + { + val pluginProjectPath = info.builderPath / PluginProjectDirectoryName + val builderProject = new BuilderProject(ProjectInfo(builderProjectPath.asFile, Nil, None), pluginProjectPath, buildLog) + builderProject.compile.run.toLeft(()).right.flatMap { ignore => + builderProject.projectDefinition.right.map { + case Some(definition) => getProjectClass[Project](definition, builderProject.projectClasspath) + case None => DefaultBuilderClass + } + } + } + else + Right(DefaultBuilderClass) + } + /** Verifies that the given list of project dependencies contains no nulls. The + * String argument should be the project name with the dependencies.*/ + private def checkDependencies(forProject: String, deps: Iterable[Project], log: Logger) + { + for(nullDep <- deps.find(_ == null)) + { + log.error("Project " + forProject + " had a null dependency. This is probably an initialization problem and might be due to a circular dependency.") + throw new RuntimeException("Null dependency in project " + forProject) + } + } + /** Verifies that output directories of the given project and all of its dependencies are + * all different. No verification is done if the project overrides + * 'shouldCheckOutputDirectories' to be false. The 'Project.outputDirectories' method is + * used to determine a project's output directories. */ + private def checkOutputDirectories(result: LoadResult): LoadResult = + result match + { + case success: LoadSuccess => + if(success.project.shouldCheckOutputDirectories) + checkOutputDirectoriesImpl(success.project) + else + success + case x => x + } + /** Verifies that output directories of the given project and all of its dependencies are + * all different. The 'Project.outputDirectories' method is used to determine a project's + * output directories. */ + private def checkOutputDirectoriesImpl(project: Project): LoadResult = + { + val projects = project.topologicalSort + import scala.collection.mutable.{HashMap, HashSet, Set} + val outputDirectories = new HashMap[Path, Set[Project]] + for(p <- projects; path <- p.outputDirectories) + outputDirectories.getOrElseUpdate(path, new HashSet[Project]) += p + val shared = outputDirectories.filter(_._2.size > 1) + if(shared.isEmpty) + new LoadSuccess(project) + else + { + val sharedString = + { + val s = + for((path, projectsSharingPath) <- shared) yield + projectsSharingPath.map(_.name).mkString(", ") + " share " + path + s.mkString("\n\t") + } + new LoadError("The same directory is used for output for multiple projects:\n\t" + sharedString + + "\n (If this is intentional, use 'override def shouldCheckOutputDirectories = false' in your project definition.)") + } + } + import scala.reflect.Manifest + private[sbt] def getProjectClass[P <: Project](name: String, classpath: PathFinder)(implicit mf: Manifest[P]): Class[P] = + { + val loader =ClasspathUtilities.toLoader(classpath) + val builderClass = Class.forName(name, false, loader) + val projectClass = mf.erasure + require(projectClass.isAssignableFrom(builderClass), "Builder class '" + builderClass + "' does not extend " + projectClass.getName + ".") + builderClass.asSubclass(projectClass).asInstanceOf[Class[P]] + } + + /** Writes the project name and a separator to the project's log at the info level.*/ + def showProjectHeader(project: Project) + { + val projectHeader = "Project " + project.name + project.log.info("") + project.log.info(projectHeader) + project.log.info("=" * projectHeader.length) + } + + def rootProject(p: Project): Project = + p.info.parent match + { + case Some(parent) => rootProject(parent) + case None => p + } +} diff --git a/src/main/scala/sbt/ProjectInfo.scala b/src/main/scala/sbt/ProjectInfo.scala new file mode 100644 index 000000000..80002cb1b --- /dev/null +++ b/src/main/scala/sbt/ProjectInfo.scala @@ -0,0 +1,107 @@ +/* sbt -- Simple Build Tool + * Copyright 2008 Mark Harrah + */ +package sbt + +import java.io.File +import FileUtilities._ + +final case class ProjectInfo(projectDirectory: File, dependencies: Iterable[Project], parent: Option[Project]) extends NotNull +{ + val projectPath = new ProjectDirectory(projectDirectory) + val builderPath = projectPath / ProjectInfo.MetadataDirectoryName + def bootPath = builderPath / Project.BootDirectoryName + def builderProjectPath = builderPath / Project.BuilderProjectDirectoryName + def builderProjectOutputPath = builderProjectPath / Project.DefaultOutputDirectoryName + def pluginsPath = builderPath / Project.PluginProjectDirectoryName + def pluginsOutputPath = pluginsPath / Project.DefaultOutputDirectoryName + def pluginsManagedSourcePath = pluginsPath / BasicDependencyPaths.DefaultManagedSourceDirectoryName + def pluginsManagedDependencyPath = pluginsPath / BasicDependencyPaths.DefaultManagedDirectoryName +} + +private[sbt] sealed trait SetupResult extends NotNull +private[sbt] final object SetupDeclined extends SetupResult +private[sbt] final class SetupError(val message: String) extends SetupResult +private[sbt] final object AlreadySetup extends SetupResult +private[sbt] final class SetupInfo(val name: String, val version: Option[Version], val organization: Option[String], val initializeDirectories: Boolean) extends SetupResult + +object ProjectInfo +{ + val MetadataDirectoryName = "project" + private val DefaultOrganization = "empty" + + def setup(info: ProjectInfo, log: Logger): SetupResult = + { + val builderDirectory = info.builderPath.asFile + if(builderDirectory.exists) + { + if(builderDirectory.isDirectory) + AlreadySetup + else + new SetupError("'" + builderDirectory.getAbsolutePath + "' is not a directory.") + } + else + setupProject(info.projectDirectory, log) + } + private def setupProject(projectDirectory: File, log: Logger): SetupResult = + { + if(confirmPrompt("No project found. Create new project?", false)) + { + val name = trim(SimpleReader.readLine("Project Name: ")) + if(name.isEmpty) + new SetupError("Project not created: no name specified.") + else + { + val organization = + { + val org = trim(SimpleReader.readLine("Organization [" + DefaultOrganization + "]: ")) + if(org.isEmpty) + DefaultOrganization + else + org + } + readVersion(projectDirectory, log) match + { + case None => new SetupError("Project not created: no version specified.") + case Some(version) => + if(verifyCreateProject(name, version, organization)) + new SetupInfo(name, Some(version), Some(organization), true) + else + SetupDeclined + } + } + } + else + SetupDeclined + } + private def verifyCreateProject(name: String, version: Version, organization: String): Boolean = + confirmPrompt("Create new project " + name + " " + version + " with organization " + organization +" ?", true) + + private def confirmPrompt(question: String, defaultYes: Boolean) = + { + val choices = if(defaultYes) " (Y/n) " else " (y/N) " + val answer = trim(SimpleReader.readLine(question + choices)) + val yes = "y" :: "yes" :: (if(defaultYes) List("") else Nil) + yes.contains(answer.toLowerCase) + } + + private def readVersion(projectDirectory: File, log: Logger): Option[Version] = + { + val version = trim(SimpleReader.readLine("Version: ")) + if(version.isEmpty) + None + else + { + Version.fromString(version) match + { + case Left(errorMessage) => + { + log.error("Invalid version: " + errorMessage) + readVersion(projectDirectory, log) + } + case Right(v) => Some(v) + } + } + } + private def trim(s: Option[String]) = s.getOrElse("") +} \ No newline at end of file diff --git a/src/main/scala/sbt/ProjectPaths.scala b/src/main/scala/sbt/ProjectPaths.scala new file mode 100644 index 000000000..c29dc41fa --- /dev/null +++ b/src/main/scala/sbt/ProjectPaths.scala @@ -0,0 +1,300 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah + */ +package sbt + +trait PackagePaths extends NotNull +{ + def jarPath: Path + def packageTestJar: Path + def packageDocsJar: Path + def packageSrcJar: Path + def packageTestSrcJar: Path + def packageProjectZip: Path +} +/** These are the paths required by BasicScalaProject.*/ +trait ScalaPaths extends PackagePaths +{ + /** A PathFinder that selects all main sources.*/ + def mainSources: PathFinder + /** A PathFinder that selects all test sources.*/ + def testSources: PathFinder + /** A PathFinder that selects all main resources.*/ + def mainResources: PathFinder + /** A PathFinder that selects all test resources. */ + def testResources: PathFinder + + def mainResourceClasspath: PathFinder + def testResourceClasspath: PathFinder + + def mainCompilePath: Path + def testCompilePath: Path + def mainAnalysisPath: Path + def testAnalysisPath: Path + def mainDocPath: Path + def testDocPath: Path + def graphPath: Path + + /** A PathFinder that selects all the classes compiled from the main sources.*/ + def mainClasses: PathFinder + /** A PathFinder that selects all the classes compiled from the test sources.*/ + def testClasses: PathFinder + + /** Declares all paths to be packaged by the package action.*/ + def packagePaths: PathFinder + /** Declares all paths to be packaged by the package-test action.*/ + def packageTestPaths: PathFinder + /** Declares all sources to be packaged by the package-src action.*/ + def packageSourcePaths: PathFinder + /** Declares all sources to be packaged by the package-test-src action.*/ + def packageTestSourcePaths: PathFinder + /** Declares all paths to be packaged by the package-project action.*/ + def packageProjectPaths: PathFinder + + /** These are the directories that are created when a user makes a new project from sbt.*/ + protected def directoriesToCreate: List[Path] + /** The directories to which a project writes are listed here and is used + * to check a project and its dependencies for collisions.*/ + def outputDirectories: Iterable[Path] + + def artifactBaseName: String +} + +trait BasicScalaPaths extends Project with ScalaPaths +{ + def mainSourceRoots: PathFinder + def testSourceRoots: PathFinder + def mainResourcesPath: PathFinder + def testResourcesPath: PathFinder + def managedDependencyRootPath: Path + def dependencyPath: Path + + protected def sources(base: PathFinder) = descendents(base, sourceExtensions) + protected def sourceExtensions = "*.scala" | "*.java" + + def mainSources = + { + val normal = sources(mainSourceRoots) + if(scratch) + normal +++ (info.projectPath * sourceExtensions) + else + normal + } + def testSources = sources(testSourceRoots) + + def mainResourceClasspath = mainResourcesPath + def testResourceClasspath = testResourcesPath + def mainResources = descendents(mainResourcesPath ##, "*") + def testResources = descendents(testResourcesPath ##, "*") + + def mainClasses = (mainCompilePath ##) ** "*.class" + def testClasses = (testCompilePath ##) ** "*.class" + + def packagePaths = mainClasses +++ mainResources + def packageTestPaths = testClasses +++ testResources + def packageSourcePaths = mainSources +++ mainResources + def packageTestSourcePaths = testSources +++ testResources + def packageProjectPaths = descendents( (info.projectPath ##), "*") --- (packageProjectExcludes ** "*") + protected def packageProjectExcludes: PathFinder = + outputRootPath +++ managedDependencyRootPath +++ + info.bootPath +++ info.builderProjectOutputPath +++ + info.pluginsOutputPath +++ info.pluginsManagedSourcePath +++ info.pluginsManagedDependencyPath + + override def outputDirectories = outputRootPath :: managedDependencyRootPath :: Nil +} + +@deprecated trait BasicProjectPaths extends MavenStyleScalaPaths +trait MavenStyleScalaPaths extends BasicScalaPaths with BasicPackagePaths +{ + import BasicProjectPaths._ + + def outputPath: Path + + def sourceDirectoryName = DefaultSourceDirectoryName + def mainDirectoryName = DefaultMainDirectoryName + def scalaDirectoryName = DefaultScalaDirectoryName + def javaDirectoryName = DefaultJavaDirectoryName + def resourcesDirectoryName = DefaultResourcesDirectoryName + def testDirectoryName = DefaultTestDirectoryName + def mainCompileDirectoryName = DefaultMainCompileDirectoryName + def testCompileDirectoryName = DefaultTestCompileDirectoryName + def docDirectoryName = DefaultDocDirectoryName + def apiDirectoryName = DefaultAPIDirectoryName + def graphDirectoryName = DefaultGraphDirectoryName + def mainAnalysisDirectoryName = DefaultMainAnalysisDirectoryName + def testAnalysisDirectoryName = DefaultTestAnalysisDirectoryName + + def sourcePath = path(sourceDirectoryName) + + def mainSourcePath = sourcePath / mainDirectoryName + def mainScalaSourcePath = mainSourcePath / scalaDirectoryName + def mainJavaSourcePath = mainSourcePath / javaDirectoryName + def mainResourcesPath = mainSourcePath / resourcesDirectoryName + def mainDocPath = docPath / mainDirectoryName / apiDirectoryName + def mainCompilePath = outputPath / mainCompileDirectoryName + def mainAnalysisPath = outputPath / mainAnalysisDirectoryName + + def testSourcePath = sourcePath / testDirectoryName + def testJavaSourcePath = testSourcePath / javaDirectoryName + def testScalaSourcePath = testSourcePath / scalaDirectoryName + def testResourcesPath = testSourcePath / resourcesDirectoryName + def testDocPath = docPath / testDirectoryName / apiDirectoryName + def testCompilePath = outputPath / testCompileDirectoryName + def testAnalysisPath = outputPath / testAnalysisDirectoryName + + def docPath = outputPath / docDirectoryName + def graphPath = outputPath / graphDirectoryName + + /** These are the directories that are created when a user makes a new project from sbt.*/ + protected def directoriesToCreate: List[Path] = + dependencyPath :: + mainScalaSourcePath :: + mainResourcesPath :: + testScalaSourcePath :: + testResourcesPath :: + Nil + + def mainSourceRoots = mainJavaSourcePath +++ mainScalaSourcePath + def testSourceRoots = testJavaSourcePath +++ testScalaSourcePath +} + +trait BasicPackagePaths extends ScalaPaths with PackagePaths +{ + def outputPath: Path + + def defaultJarBaseName: String = artifactBaseName + def defaultJarName = defaultJarBaseName + ".jar" + def jarPath = outputPath / defaultJarName + def packageTestJar = defaultJarPath("-test.jar") + def packageDocsJar = defaultJarPath("-docs.jar") + def packageSrcJar= defaultJarPath("-src.jar") + def packageTestSrcJar = defaultJarPath("-test-src.jar") + def packageProjectZip = defaultJarPath("-project.zip") + def defaultJarPath(extension: String) = outputPath / (artifactBaseName + extension) +} + +object BasicProjectPaths +{ + val DefaultSourceDirectoryName = "src" + val DefaultMainCompileDirectoryName = "classes" + val DefaultTestCompileDirectoryName = "test-classes" + val DefaultDocDirectoryName = "doc" + val DefaultAPIDirectoryName = "api" + val DefaultGraphDirectoryName = "graph" + val DefaultMainAnalysisDirectoryName = "analysis" + val DefaultTestAnalysisDirectoryName = "test-analysis" + + val DefaultMainDirectoryName = "main" + val DefaultScalaDirectoryName = "scala" + val DefaultJavaDirectoryName = "java" + val DefaultResourcesDirectoryName = "resources" + val DefaultTestDirectoryName = "test" + + // forwarders to new locations + def BootDirectoryName = Project.BootDirectoryName + def DefaultManagedDirectoryName = BasicDependencyPaths.DefaultManagedDirectoryName + def DefaultDependencyDirectoryName = BasicDependencyPaths.DefaultDependencyDirectoryName +} + +trait WebScalaPaths extends ScalaPaths +{ + def temporaryWarPath: Path + def webappResources: PathFinder + def jettyContextPath: String + def warPath: Path +} +@deprecated trait WebProjectPaths extends MavenStyleWebScalaPaths +trait MavenStyleWebScalaPaths extends WebScalaPaths with MavenStyleScalaPaths +{ + import WebProjectPaths._ + def temporaryWarPath = outputPath / webappDirectoryName + def webappPath = mainSourcePath / webappDirectoryName + def webappDirectoryName = DefaultWebappDirectoryName + def jettyContextPath = DefaultJettyContextPath + def defaultWarName = defaultJarBaseName + ".war" + def warPath = outputPath / defaultWarName + /** Additional files to include in the web application. */ + protected def extraWebappFiles: PathFinder = Path.emptyPathFinder + def webappResources = descendents(webappPath ##, "*") +++ extraWebappFiles +} +object WebProjectPaths +{ + val DefaultWebappDirectoryName = "webapp" + val DefaultJettyContextPath = "/" +} + +/** Defines default paths for a webstart project. It directly extends WebstartOptions to make +* it easy to implement and override webstart options in the common case of one webstartTask per +* project.*/ +trait WebstartPaths extends ScalaPaths +{ + import WebstartPaths._ + + def outputPath: Path + def jnlpPath: Path + + def webstartOutputDirectory = outputPath / webstartDirectoryName + + def jnlpFile = webstartOutputDirectory / jnlpFileName + def webstartLibDirectory = webstartOutputDirectory / webstartLibName + def webstartZip: Option[Path] = Some(outputPath / webstartZipName) + def jnlpResourcesPath = jnlpPath / BasicProjectPaths.DefaultResourcesDirectoryName + + def webstartLibName = DefaultWebstartLibName + def webstartDirectoryName = DefaultWebstartDirectoryName + + def webstartZipName: String + def jnlpFileName: String +} +object WebstartPaths +{ + val DefaultWebstartDirectoryName = "webstart" + val DefaultJnlpName = "jnlp" + val DefaultWebstartLibName = "lib" +} +trait MavenStyleWebstartPaths extends WebstartPaths with MavenStyleScalaPaths +{ + import WebstartPaths._ + def jnlpPath = mainSourcePath / DefaultJnlpName + def webstartMainJar = jarPath + def jnlpFileName = DefaultJnlpFileName + def webstartZipName = artifactBaseName + ".zip" + def DefaultJnlpFileName = artifactBaseName + ".jnlp" +} + +trait IntegrationTestPaths extends NotNull +{ + def integrationTestSources: PathFinder + def integrationTestResourcesPath: Path + + def integrationTestCompilePath: Path + def integrationTestAnalysisPath: Path +} +trait BasicIntegrationTestPaths extends IntegrationTestPaths +{ + def integrationTestScalaSourcePath: Path + def integrationTestSources = sources(integrationTestScalaSourcePath) + protected def sources(base: Path): PathFinder +} +trait MavenStyleIntegrationTestPaths extends BasicIntegrationTestPaths with MavenStyleScalaPaths +{ + import IntegrationTestPaths._ + + def integrationTestDirectoryName = DefaultIntegrationTestDirectoryName + def integrationTestCompileDirectoryName = DefaultIntegrationTestCompileDirectoryName + def integrationTestAnalysisDirectoryName = DefaultIntegrationTestAnalysisDirectoryName + + def integrationTestSourcePath = sourcePath / integrationTestDirectoryName + def integrationTestScalaSourcePath = integrationTestSourcePath / scalaDirectoryName + def integrationTestResourcesPath = integrationTestSourcePath / resourcesDirectoryName + + def integrationTestCompilePath = outputPath / integrationTestCompileDirectoryName + def integrationTestAnalysisPath = outputPath / integrationTestAnalysisDirectoryName +} + +object IntegrationTestPaths +{ + val DefaultIntegrationTestDirectoryName = "it" + val DefaultIntegrationTestCompileDirectoryName = "it-classes" + val DefaultIntegrationTestAnalysisDirectoryName = "it-analysis" +} \ No newline at end of file diff --git a/src/main/scala/sbt/ReflectUtilities.scala b/src/main/scala/sbt/ReflectUtilities.scala new file mode 100644 index 000000000..02822190a --- /dev/null +++ b/src/main/scala/sbt/ReflectUtilities.scala @@ -0,0 +1,52 @@ +/* sbt -- Simple Build Tool + * Copyright 2008 David MacIver, Mark Harrah + */ +package sbt; + +import scala.collection._ + +object ReflectUtilities +{ + def transformCamelCase(name: String, separator: Char) = + { + val buffer = new StringBuilder + for(char <- name) + { + import java.lang.Character._ + if(isUpperCase(char)) + { + buffer += separator + buffer += toLowerCase(char) + } + else + buffer += char + } + buffer.toString + } + + def ancestry(clazz : Class[_]) : List[Class[_]] = + if (clazz == classOf[AnyRef] || !classOf[AnyRef].isAssignableFrom(clazz)) List(clazz) + else clazz :: ancestry(clazz.getSuperclass); + + def fields(clazz : Class[_]) = + mutable.OpenHashMap(ancestry(clazz). + flatMap(_.getDeclaredFields). + map(f => (f.getName, f)):_*) + + def allValsC[T](self: AnyRef, clazz: Class[T]): Map[String, T] = + { + val mappings = new mutable.OpenHashMap[String, T] + val correspondingFields = fields(self.getClass) + for(method <- self.getClass.getMethods) + { + if(method.getParameterTypes.length == 0 && clazz.isAssignableFrom(method.getReturnType)) + { + for(field <- correspondingFields.get(method.getName) if field.getType == method.getReturnType) + mappings(method.getName) = method.invoke(self).asInstanceOf[T] + } + } + mappings + } + def allVals[T](self: AnyRef)(implicit mt: scala.reflect.Manifest[T]): Map[String, T] = + allValsC(self, mt.erasure).asInstanceOf[Map[String,T]] +} diff --git a/src/main/scala/sbt/Resources.scala b/src/main/scala/sbt/Resources.scala new file mode 100644 index 000000000..388e82b3c --- /dev/null +++ b/src/main/scala/sbt/Resources.scala @@ -0,0 +1,149 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah + */ +package sbt + +import java.io.File +import FileUtilities._ + +object Resources +{ + def apply(basePath: String) = + { + require(basePath.startsWith("/")) + val resource = getClass.getResource(basePath) + if(resource == null) + throw new Exception("Resource base directory '" + basePath + "' not on classpath.") + else + { + val file = new File(resource.toURI) + if(file.exists) + new Resources(file) + else + throw new Exception("Resource base directory '" + basePath + "' does not exist.") + } + } + private val LoadErrorPrefix = "Error loading initial project: " +} + +class Resources(val baseDirectory: File) +{ + import Resources._ + // The returned directory is not actually read-only, but it should be treated that way + def readOnlyResourceDirectory(group: String, name: String): Either[String, File] = + { + val groupDirectory = new File(baseDirectory, group) + if(groupDirectory.isDirectory) + { + val resourceDirectory = new File(groupDirectory, name) + if(resourceDirectory.isDirectory) + Right(resourceDirectory) + else + Left("Resource directory '" + name + "' in group '" + group + "' not found.") + } + else + Left("Group '" + group + "' not found.") + } + def readWriteResourceDirectory[T](group: String, name: String, log: Logger) + (withDirectory: File => Either[String, T]): Either[String, T] = + readOnlyResourceDirectory(group, name).right flatMap(file => readWriteResourceDirectory(file, log)(withDirectory)) + def readWriteResourceDirectory[T](readOnly: File, log: Logger) + (withDirectory: File => Either[String, T]): Either[String, T] = + { + require(readOnly.isDirectory) + def readWrite(readOnly: File)(temporary: File): Either[String, T] = + { + val readWriteDirectory = new File(temporary, readOnly.getName) + FileUtilities.copyDirectory(readOnly, readWriteDirectory, log).toLeft(()).right flatMap { x => + withDirectory(readWriteDirectory) + } + } + doInTemporaryDirectory(log)(readWrite(readOnly)) + } + + def withProject[T](projectDirectory: File, log: Logger)(f: Project => WithProjectResult[T]): Either[String, T] = + readWriteResourceDirectory(projectDirectory, log)(withProject(log)(f)) + def withProject[T](group: String, name: String, log: Logger)(f: Project => WithProjectResult[T]): Either[String, T] = + readWriteResourceDirectory(group, name, log)(withProject(log)(f)) + def withProject[T](log: Logger)(f: Project => WithProjectResult[T])(dir: File): Either[String, T] = + withProject(log, None, new ReloadSuccessExpected(LoadErrorPrefix), dir )(f) + private def withProject[T](log: Logger, previousProject: Option[Project], reload: ReloadProject, dir: File) + (f: Project => WithProjectResult[T]): Either[String, T] = + { + require(previousProject.isDefined || reload != NoReload, "Previous project undefined and reload not requested.") + val loadResult = + if(reload == NoReload && previousProject.isDefined) + Right(previousProject.get) + else + { + val buffered = new BufferedLogger(log) + def error(msg: String) = + { + buffered.playAll() + buffered.stop() + Left(msg) + } + + buffered.startRecording() + resultToEither(Project.loadProject(dir, Nil, None, buffered)) match + { + case Left(msg) => + reload match + { + case ReloadErrorExpected => + buffered.stop() + previousProject.toRight("Initial project load failed.") + case s: ReloadSuccessExpected => error(s.prefixIfError + msg) + case NoReload /* shouldn't happen */=> error(msg) + } + case Right(p) => + reload match + { + case ReloadErrorExpected => error("Expected project load failure, but it succeeded.") + case _ => + buffered.stop() + Right(p) + } + } + } + loadResult match + { + case Right(project) => + project.log.enableTrace(log.traceEnabled) + project.log.setLevel(log.getLevel) + f(project) match + { + case ContinueResult(newF, newReload) => withProject(log, Some(project), newReload, dir)(newF) + case ValueResult(value) => Right(value) + case err: ErrorResult => Left(err.message) + } + case Left(message) => Left(message) + } + } + + def resultToEither(result: LoadResult): Either[String, Project] = + result match + { + case success: LoadSuccess => Right(success.project) + case err: LoadError => Left(err.message) + case err: LoadSetupError => Left(err.message) + case LoadSetupDeclined => Left("Setup declined") + } +} +sealed trait ReloadProject extends NotNull +final object ReloadErrorExpected extends ReloadProject +final class ReloadSuccessExpected(val prefixIfError: String) extends ReloadProject +final object NoReload extends ReloadProject + +sealed trait WithProjectResult[+T] extends NotNull +final case class ContinueResult[T](f: Project => WithProjectResult[T], reload: ReloadProject) extends WithProjectResult[T] +final case class ValueResult[T](value: T) extends WithProjectResult[T] +final class ErrorResult(val message: String) extends WithProjectResult[Nothing] +object ContinueResult +{ + def apply[T](f: Project => WithProjectResult[T], prefixIfError: Option[String]) = + { + val reload = prefixIfError match { case None => NoReload; case Some(p) => new ReloadSuccessExpected(p) } + new ContinueResult[T](f, reload) + } +} \ No newline at end of file diff --git a/src/main/scala/sbt/Run.scala b/src/main/scala/sbt/Run.scala new file mode 100644 index 000000000..f4dbb412e --- /dev/null +++ b/src/main/scala/sbt/Run.scala @@ -0,0 +1,167 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah + */ +package sbt + +import scala.tools.nsc.{GenericRunnerCommand, Interpreter, InterpreterLoop, ObjectRunner, Settings} +import scala.tools.nsc.interpreter.InteractiveReader +import scala.tools.nsc.reporters.Reporter +import scala.tools.nsc.util.ClassPath + +import java.io.File +import java.net.{URL, URLClassLoader} + +trait ScalaRun +{ + def console(classpath: Iterable[Path], log: Logger): Option[String] + def run(mainClass: String, classpath: Iterable[Path], options: Seq[String], log: Logger): Option[String] +} +class ForkRun(config: ForkScalaRun) extends ScalaRun +{ + def console(classpath: Iterable[Path], log: Logger): Option[String] = + { + error("Forking the interpreter is not implemented.") + //val exitCode = Fork.scala(config.javaHome, config.runJVMOptions, config.scalaJars, classpathOption(classpath), config.workingDirectory, log) + //processExitCode(exitCode, "interpreter") + } + def run(mainClass: String, classpath: Iterable[Path], options: Seq[String], log: Logger): Option[String] = + { + val scalaOptions = classpathOption(classpath) ::: mainClass :: options.toList + val exitCode = Fork.scala(config.javaHome, config.runJVMOptions, config.scalaJars, scalaOptions, config.workingDirectory, log) + processExitCode(exitCode, "runner") + } + private def classpathOption(classpath: Iterable[Path]) = "-cp" :: Path.makeString(classpath) :: Nil + private def processExitCode(exitCode: Int, label: String) = + { + if(exitCode == 0) + None + else + Some("Nonzero exit code returned from " + label + ": " + exitCode) + } +} + +/** This module is an interface to starting the scala interpreter or runner.*/ +object Run extends ScalaRun +{ + /** Starts an interactive scala interpreter session with the given classpath.*/ + def console(classpath: Iterable[Path], log: Logger) = + createSettings(log) + { + (settings: Settings) => + { + settings.classpath.value = Path.makeString(classpath) + log.info("Starting scala interpreter...") + log.debug(" Classpath: " + settings.classpath.value) + log.info("") + Control.trapUnit("Error during session: ", log) + { + val loop = new InterpreterLoop + executeTrapExit(loop.main(settings), log) + } + } + } + /** Executes the given function, trapping calls to System.exit. */ + private def executeTrapExit(f: => Unit, log: Logger): Option[String] = + { + val exitCode = TrapExit(f, log) + if(exitCode == 0) + { + log.debug("Exited with code 0") + None + } + else + Some("Nonzero exit code: " + exitCode) + } + /** Runs the class 'mainClass' using the given classpath and options using the scala runner.*/ + def run(mainClass: String, classpath: Iterable[Path], options: Seq[String], log: Logger) = + { + createSettings(log) + { + (settings: Settings) => + { + Control.trapUnit("Error during run: ", log) + { + val classpathURLs = classpath.map(_.asURL).toList + val bootClasspath = FileUtilities.pathSplit(settings.bootclasspath.value) + val extraURLs = + for(pathString <- bootClasspath if pathString.length > 0) yield + (new java.io.File(pathString)).toURI.toURL + log.info("Running " + mainClass + " ...") + log.debug(" Classpath:" + (classpathURLs ++ extraURLs).mkString("\n\t", "\n\t","")) + executeTrapExit( ObjectRunner.run(classpathURLs ++ extraURLs, mainClass, options.toList), log ) + } + } + } + } + /** If mainClassOption is None, then the interactive scala interpreter is started with the given classpath. + * Otherwise, the class wrapped by Some is run using the scala runner with the given classpath and + * options. */ + def apply(mainClassOption: Option[String], classpath: Iterable[Path], options: Seq[String], log: Logger) = + { + mainClassOption match + { + case Some(mainClass) => run(mainClass, classpath, options, log) + case None => console(classpath, log) + } + } + /** Create a settings object and execute the provided function if the settings are created ok.*/ + private def createSettings(log: Logger)(f: Settings => Option[String]) = + { + val command = new GenericRunnerCommand(Nil, message => log.error(message)) + if(command.ok) + f(command.settings) + else + Some(command.usageMsg) + } + + /** Starts a Scala interpreter session with 'project' bound to the value 'current' in the console + * and the following two lines executed: + * import sbt._ + * import current._ + */ + def projectConsole(project: Project): Option[String] = + { + import project.log + createSettings(log) { interpreterSettings => + createSettings(log) { compilerSettings => + log.info("Starting scala interpreter with project definition " + project.name + " ...") + log.info("") + Control.trapUnit("Error during session: ", log) + { + val loop = new ProjectInterpreterLoop(compilerSettings, project) + executeTrapExit(loop.main(interpreterSettings), log) + } + }} + } + /** A custom InterpreterLoop with the purpose of creating an interpreter with Project 'project' bound to the value 'current', + * and the following two lines interpreted: + * import sbt._ + * import current._. + * To do this, + * 1) The compiler uses a different settings instance: 'compilerSettings', which will have its classpath set to include the classpath + * of the loader that loaded 'project'. The compiler can then find the classes it needs to compile code referencing the project. + * 2) The parent class loader for the interpreter is the loader that loaded the project, so that the project can be bound to a variable + * in the interpreter. + */ + private class ProjectInterpreterLoop(compilerSettings: Settings, project: Project) extends InterpreterLoop + { + override def createInterpreter() + { + val loader = project.getClass.getClassLoader.asInstanceOf[URLClassLoader] + compilerSettings.classpath.value = loader.getURLs.flatMap(ClasspathUtilities.asFile).map(_.getAbsolutePath).mkString(File.pathSeparator) + project.log.debug(" Compiler classpath: " + compilerSettings.classpath.value) + + in = InteractiveReader.createDefault() + interpreter = new Interpreter(settings) + { + override protected def parentClassLoader = loader + override protected def newCompiler(settings: Settings, reporter: Reporter) = super.newCompiler(compilerSettings, reporter) + } + interpreter.setContextClassLoader() + interpreter.bind("current", project.getClass.getName, project) + interpreter.interpret("import sbt._") + interpreter.interpret("import Process._") + interpreter.interpret("import current._") + } + } +} \ No newline at end of file diff --git a/src/main/scala/sbt/ScalaProject.scala b/src/main/scala/sbt/ScalaProject.scala new file mode 100644 index 000000000..d9b38592f --- /dev/null +++ b/src/main/scala/sbt/ScalaProject.scala @@ -0,0 +1,345 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah, David MacIver + */ +package sbt + +import FileUtilities._ +import java.io.File +import java.util.jar.{Attributes, Manifest} +import scala.collection.mutable.ListBuffer + +trait SimpleScalaProject extends Project +{ + def errorTask(message: String) = task{ Some(message) } + + trait CleanOption extends ActionOption + case class ClearAnalysis(analysis: TaskAnalysis[_, _, _]) extends CleanOption + case class Preserve(paths: PathFinder) extends CleanOption + + case class CompileOption(val asString: String) extends ActionOption + case class JavaCompileOption(val asString: String) extends ActionOption + + val Deprecation = CompileOption("-deprecation") + val ExplainTypes = CompileOption("-explaintypes") + val Optimize = CompileOption("-optimise") + def Optimise = Optimize + val Verbose = CompileOption("-verbose") + val Unchecked = CompileOption("-unchecked") + val DisableWarnings = CompileOption("-nowarn") + def target(target: Target.Value) = CompileOption("-target:" + target) + object Target extends Enumeration + { + val Java1_5 = Value("jvm-1.5") + val Java1_4 = Value("jvm-1.4") + val Msil = Value("msil") + } + + def cleanTask(paths: PathFinder, options: CleanOption*): Task = + cleanTask(paths, options) + def cleanTask(paths: PathFinder, options: => Seq[CleanOption]): Task = + task + { + val cleanOptions = options + val preservePaths = for(Preserve(preservePaths) <- cleanOptions; toPreserve <- preservePaths.get) yield toPreserve + Control.thread(FileUtilities.preserve(preservePaths, log)) + { preserved => + val pathClean = FileUtilities.clean(paths.get, log) + for(ClearAnalysis(analysis) <- cleanOptions) + { + analysis.clear() + analysis.save() + } + val restored = preserved.restore(log) + pathClean orElse restored + } + } +} +trait ScalaProject extends SimpleScalaProject with FileTasks +{ + import ScalaProject._ + + final case class MaxCompileErrors(val value: Int) extends CompileOption("") with ScaladocOption { def asList = Nil } + trait PackageOption extends ActionOption + trait TestOption extends ActionOption + + case class TestSetup(setup: () => Option[String]) extends TestOption + case class TestCleanup(cleanup: () => Option[String]) extends TestOption + case class ExcludeTests(tests: Iterable[String]) extends TestOption + case class TestListeners(listeners: Iterable[TestReportListener]) extends TestOption + case class TestFilter(filterTest: String => Boolean) extends TestOption + + case class JarManifest(m: Manifest) extends PackageOption + { + assert(m != null) + } + case class MainClass(mainClassName: String) extends PackageOption + case class ManifestAttributes(attributes: (Attributes.Name, String)*) extends PackageOption + case object Recursive extends PackageOption + def ManifestAttributes(attributes: (String, String)*): ManifestAttributes = + { + val converted = for( (name,value) <- attributes ) yield (new Attributes.Name(name), value) + new ManifestAttributes(converted : _*) + } + + + trait ScaladocOption extends ActionOption + { + def asList: List[String] + } + case class SimpleDocOption(optionValue: String) extends ScaladocOption + { + def asList = List(optionValue) + } + case class CompoundDocOption(label: String, value: String) extends ScaladocOption + { + def asList = List(label, value) + } + val LinkSource = SimpleDocOption("-linksource") + val NoComment = SimpleDocOption("-nocomment") + def access(access: Access.Value) = SimpleDocOption("-access:" + access) + def documentBottom(bottomText: String) = CompoundDocOption("-bottom", bottomText) + def documentCharset(charset: String) = CompoundDocOption("-charset", charset) + def documentTitle(title: String) = CompoundDocOption("-doctitle", title) + def documentFooter(footerText: String) = CompoundDocOption("-footer", footerText) + def documentHeader(headerText: String) = CompoundDocOption("-header", headerText) + def stylesheetFile(path: Path) = CompoundDocOption("-stylesheetfile", path.asFile.getAbsolutePath) + def documentTop(topText: String) = CompoundDocOption("-top", topText) + def windowTitle(title: String) = CompoundDocOption("-windowtitle", title) + + object Access extends Enumeration + { + val Public = Value("public") + val Default = Value("protected") + val Private = Value("private") + } + + def consoleTask(classpath : PathFinder): Task = + consoleTask(classpath, Run) + def consoleTask(classpath : PathFinder, runner: ScalaRun): Task = + interactiveTask { runner.console(classpath.get, log) } + + def runTask(mainClass: => Option[String], classpath: PathFinder, options: String*): Task = + runTask(mainClass, classpath, options) + def runTask(mainClass: => Option[String], classpath: PathFinder, options: => Seq[String]): Task = + runTask(mainClass, classpath, options, Run) + def runTask(mainClass: => Option[String], classpath: PathFinder, options: => Seq[String], runner: ScalaRun): Task = + task + { + mainClass match + { + case Some(main) => runner.run(main, classpath.get, options, log) + case None => Some("No main class specified.") + } + } + + def syncTask(sourceDirectory: Path, destinationDirectory: Path): Task = + task { FileUtilities.sync(sourceDirectory, destinationDirectory, log) } + def copyTask(sources: PathFinder, destinationDirectory: Path): Task = + task { FileUtilities.copy(sources.get, destinationDirectory, log).left.toOption } + + def testTask(frameworks: Iterable[TestFramework], classpath: PathFinder, analysis: CompileAnalysis, options: TestOption*): Task = + testTask(frameworks, classpath, analysis, options) + def testTask(frameworks: Iterable[TestFramework], classpath: PathFinder, analysis: CompileAnalysis, options: => Seq[TestOption]): Task = + { + def work = + { + val (begin, work, end) = testTasks(frameworks, classpath, analysis, options) + val beginTasks = begin.map(toTask).toSeq // test setup tasks + val workTasks = work.map(w => toTask(w) dependsOn(beginTasks : _*)) // the actual tests + val endTasks = end.map(toTask).toSeq // tasks that perform test cleanup and are run regardless of success of tests + val endTask = task { None } named("test-cleanup") dependsOn(endTasks : _*) + val rootTask = task { None } named("test-complete") dependsOn(workTasks.toSeq : _*) // the task that depends on all test subtasks + new SubWork[Project#Task](ParallelRunner.dagScheduler(rootTask), ParallelRunner.dagScheduler(endTask)) + } + new CompoundTask(work) + } + private def toTask(testTask: NamedTestTask) = task(testTask.run()) named(testTask.name) + + def graphTask(outputDirectory: Path, analysis: CompileAnalysis): Task = task { DotGraph(analysis, outputDirectory, log) } + def scaladocTask(label: String, sources: PathFinder, outputDirectory: Path, classpath: PathFinder, options: ScaladocOption*): Task = + scaladocTask(label, sources, outputDirectory, classpath, options) + def scaladocTask(label: String, sources: PathFinder, outputDirectory: Path, classpath: PathFinder, options: => Seq[ScaladocOption]): Task = + task + { + val classpathString = Path.makeString(classpath.get) + val optionsLocal = options + val maxErrors = maximumErrors(optionsLocal) + (new Scaladoc(maxErrors))(label, sources.get, classpathString, outputDirectory, optionsLocal.flatMap(_.asList), log) + } + + def packageTask(sources: PathFinder, outputDirectory: Path, jarName: => String, options: PackageOption*): Task = + packageTask(sources, outputDirectory / jarName, options) + def packageTask(sources: PathFinder, outputDirectory: Path, jarName: => String, options: => Seq[PackageOption]): Task = + packageTask(sources: PathFinder, outputDirectory / jarName, options) + def packageTask(sources: PathFinder, jarPath: => Path, options: PackageOption*): Task = + packageTask(sources, jarPath, options) + def packageTask(sources: PathFinder, jarPath: => Path, options: => Seq[PackageOption]): Task = + fileTask("package", jarPath from sources) + { + import wrap.{MutableMapWrapper,Wrappers} + /** Copies the mappings in a2 to a1, mutating a1. */ + def mergeAttributes(a1: Attributes, a2: Attributes) + { + for( (key, value) <- Wrappers.toList(a2)) + a1.put(key, value) + } + + val manifest = new Manifest + var recursive = false + for(option <- options) + { + option match + { + case JarManifest(mergeManifest) => + { + mergeAttributes(manifest.getMainAttributes, mergeManifest.getMainAttributes) + val entryMap = new MutableMapWrapper(manifest.getEntries) + for((key, value) <- Wrappers.toList(mergeManifest.getEntries)) + { + entryMap.get(key) match + { + case Some(attributes) => mergeAttributes(attributes, value) + case None => entryMap += (key, value) + } + } + } + case Recursive => recursive = true + case MainClass(mainClassName) => + manifest.getMainAttributes.put(Attributes.Name.MAIN_CLASS, mainClassName) + case ManifestAttributes(attributes @ _*) => + val main = manifest.getMainAttributes + for( (name, value) <- attributes) + main.put(name, value) + case _ => log.warn("Ignored unknown package option " + option) + } + } + val jarPathLocal = jarPath + FileUtilities.clean(jarPathLocal :: Nil, log) orElse + FileUtilities.jar(sources.get, jarPathLocal, manifest, recursive, log) + } + def zipTask(sources: PathFinder, outputDirectory: Path, zipName: => String): Task = + zipTask(sources, outputDirectory / zipName) + def zipTask(sources: PathFinder, zipPath: => Path): Task = + fileTask("zip", zipPath from sources) { FileUtilities.zip(sources.get, zipPath, false, log) } + def incrementVersionNumber() + { + projectVersion.get match + { + case Some(v: BasicVersion) => + { + val newVersion = incrementImpl(v) + log.info("Changing version to " + newVersion) + projectVersion() = newVersion + } + case a => () + } + } + protected def incrementImpl(v: BasicVersion): Version = v.incrementMicro + protected def testTasks(frameworks: Iterable[TestFramework], classpath: PathFinder, analysis: CompileAnalysis, options: => Seq[TestOption]) = { + import scala.collection.mutable.HashSet + + val testFilters = new ListBuffer[String => Boolean] + val excludeTestsSet = new HashSet[String] + val setup, cleanup = new ListBuffer[() => Option[String]] + val testListeners = new ListBuffer[TestReportListener] + + options.foreach { + case TestFilter(include) => testFilters += include + case ExcludeTests(exclude) => excludeTestsSet ++= exclude + case TestListeners(listeners) => testListeners ++= listeners + case TestSetup(setupFunction) => setup += setupFunction + case TestCleanup(cleanupFunction) => cleanup += cleanupFunction + } + + if(excludeTestsSet.size > 0 && log.atLevel(Level.Debug)) + { + log.debug("Excluding tests: ") + excludeTestsSet.foreach(test => log.debug("\t" + test)) + } + def includeTest(test: TestDefinition) = !excludeTestsSet.contains(test.testClassName) && testFilters.forall(filter => filter(test.testClassName)) + val tests = HashSet.empty[TestDefinition] ++ analysis.allTests.filter(includeTest) + TestFramework.testTasks(frameworks, classpath.get, tests, log, testListeners.readOnly, false, setup.readOnly, cleanup.readOnly) + } + private def flatten[T](i: Iterable[Iterable[T]]) = i.flatMap(x => x) + + protected def testQuickMethod(testAnalysis: CompileAnalysis, options: => Seq[TestOption])(toRun: Seq[TestOption] => Task) = + task { tests => + val (exactFilters, testFilters) = tests.toList.map(GlobFilter.apply).partition(_.isInstanceOf[ExactFilter]) + val includeTests = exactFilters.map(_.asInstanceOf[ExactFilter].matchName) + val toCheck = scala.collection.mutable.HashSet(includeTests: _*) + toCheck --= testAnalysis.allTests.map(_.testClassName) + if(!toCheck.isEmpty && log.atLevel(Level.Warn)) + { + log.warn("Test(s) not found:") + toCheck.foreach(test => log.warn("\t" + test)) + } + val includeTestsSet = scala.collection.mutable.HashSet(includeTests: _*) + val newOptions = + if(includeTests.isEmpty && testFilters.isEmpty) options + else TestFilter(test => includeTestsSet.contains(test) || testFilters.exists(_.accept(test))) :: options.toList + toRun(newOptions) + } completeWith testAnalysis.allTests.map(_.testClassName).toList + + protected final def maximumErrors[T <: ActionOption](options: Seq[T]) = + (for( MaxCompileErrors(maxErrors) <- options) yield maxErrors).firstOption.getOrElse(DefaultMaximumCompileErrors) +} +trait WebScalaProject extends ScalaProject +{ + @deprecated protected def prepareWebappTask(webappContents: PathFinder, warPath: => Path, classpath: PathFinder, extraJars: => Iterable[File]): Task = + prepareWebappTask(webappContents, warPath, classpath, Path.lazyPathFinder(extraJars.map(Path.fromFile))) + protected def prepareWebappTask(webappContents: PathFinder, warPath: => Path, classpath: PathFinder, extraJars: PathFinder): Task = + task + { + val webInfPath = warPath / "WEB-INF" + val webLibDirectory = webInfPath / "lib" + val classesTargetDirectory = webInfPath / "classes" + + val (libs, directories) = classpath.get.toList.partition(ClasspathUtilities.isArchive) + val classesAndResources = descendents(Path.lazyPathFinder(directories) ##, "*") + if(log.atLevel(Level.Debug)) + directories.foreach(d => log.debug(" Copying the contents of directory " + d + " to " + classesTargetDirectory)) + + import FileUtilities.{copy, copyFlat, copyFilesFlat, clean} + (copy(webappContents.get, warPath, log).right flatMap { copiedWebapp => + copy(classesAndResources.get, classesTargetDirectory, log).right flatMap { copiedClasses => + copyFlat(libs, webLibDirectory, log).right flatMap { copiedLibs => + copyFilesFlat(extraJars.get.map(_.asFile), webLibDirectory, log).right flatMap { copiedExtraLibs => + { + val toRemove = scala.collection.mutable.HashSet((warPath ** "*").get.toSeq : _*) + toRemove --= copiedWebapp + toRemove --= copiedClasses + toRemove --= copiedLibs + toRemove --= copiedExtraLibs + val (directories, files) = toRemove.toList.partition(_.isDirectory) + if(log.atLevel(Level.Debug)) + files.foreach(r => log.debug("Pruning file " + r)) + val result = + clean(files, true, log) orElse + { + val emptyDirectories = directories.filter(directory => directory.asFile.listFiles.isEmpty) + if(log.atLevel(Level.Debug)) + emptyDirectories.foreach(r => log.debug("Pruning directory " + r)) + clean(emptyDirectories, true, log) + } + result.toLeft(()) + } + }}}}).left.toOption + } + def jettyRunTask(warPath: => Path, defaultContextPath: => String, port: Int, classpath: PathFinder, classpathName: String, scanDirectories: Seq[File], scanInterval: Int): Task = + task { JettyRun(classpath.get, classpathName, warPath, defaultContextPath, port, scanDirectories, scanInterval, log) } + def jettyRunTask(warPath: => Path, defaultContextPath: => String, classpath: PathFinder, classpathName: String, scanDirectories: Seq[File], scanInterval: Int): Task = + jettyRunTask(warPath, defaultContextPath, JettyRun.DefaultPort, classpath, classpathName, scanDirectories, scanInterval) + def jettyRunTask(warPath: => Path, defaultContextPath: => String, classpath: PathFinder, classpathName: String, + jettyConfigurationXML: scala.xml.NodeSeq, jettyConfigurationFiles: Seq[File]): Task = + task { JettyRun(classpath.get, classpathName, warPath, defaultContextPath, jettyConfigurationXML, jettyConfigurationFiles, log) } + def jettyStopTask = task { JettyRun.stop(); None } +} +object ScalaProject +{ + val DefaultMaximumCompileErrors = 100 + val AnalysisDirectoryName = "analysis" + val MainClassKey = "Main-Class" + val TestResourcesProperty = "sbt.test.resources" + def optionsAsString(options: Seq[ScalaProject#CompileOption]) = options.map(_.asString).filter(!_.isEmpty) + def javaOptionsAsString(options: Seq[ScalaProject#JavaCompileOption]) = options.map(_.asString) +} diff --git a/src/main/scala/sbt/ScalaVersion.scala b/src/main/scala/sbt/ScalaVersion.scala new file mode 100644 index 000000000..6366359d2 --- /dev/null +++ b/src/main/scala/sbt/ScalaVersion.scala @@ -0,0 +1,47 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt + +/** Provides access to the current version of Scala being used to build a project. These methods typically +* return None or the empty string when the loader is not used. */ +object ScalaVersion +{ + /** The name of the system property containing the Scala version used for this project.*/ + private[sbt] val LiveKey = "sbt.scala.version" + private[sbt] def crossString(v: String) = "scala_" + v + /** Returns the current version of Scala being used to build the project, unless the sbt loader is not being used, + * in which case this is the empty string.*/ + def currentString = + { + val v = System.getProperty(LiveKey) + if(v == null) + "" + else + v.trim + } + /** Returns the current version of Scala being used to build the project. If the sbt loader is not being + * used, this returns None. Otherwise, the value returned by this method is fixed for the duration of + * a Project's existence. It only changes on reboot (during which a Project is recreated).*/ + val current: Option[String] = + { + val sv = currentString + if(sv.isEmpty) + None + else + Some(sv) + } + private[sbt] def withCross[T](crossDisabled: Boolean)(withVersion: String => T, disabled: => T): T = + { + if(crossDisabled) + disabled + else + { + current match + { + case Some(scalaV) => withVersion(scalaV) + case _ => disabled + } + } + } +} \ No newline at end of file diff --git a/src/main/scala/sbt/SourceModificationWatch.scala b/src/main/scala/sbt/SourceModificationWatch.scala new file mode 100644 index 000000000..9e737f70c --- /dev/null +++ b/src/main/scala/sbt/SourceModificationWatch.scala @@ -0,0 +1,32 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mikko Peltonen, Mark Harrah + */ +package sbt + +object SourceModificationWatch +{ + def watchUntil(project: Project, pollDelaySec: Int)(terminationCondition: => Boolean)(onSourcesModified: => Unit) + { + def sourceFiles: Iterable[java.io.File] = + sourcesFinder.get.map(Path.relativize(project.info.projectPath, _)).filter(_.isDefined).map(_.get.asFile) + def sourcesFinder: PathFinder = (Path.emptyPathFinder /: project.topologicalSort)(_ +++ _.watchPaths) + def loop(lastCallbackCallTime: Long, previousFileCount: Int) + { + val (lastModifiedTime, fileCount) = sourceFiles.foldLeft((0L, 0)){(acc, file) => (Math.max(acc._1, file.lastModified), acc._2 + 1)} + val newCallbackCallTime = + // check if sources are modified + if (lastModifiedTime > lastCallbackCallTime || previousFileCount != fileCount) + { + val now = System.currentTimeMillis + onSourcesModified + now + } + else + lastCallbackCallTime + Thread.sleep(pollDelaySec * 1000) + if(!terminationCondition) + loop(newCallbackCallTime, fileCount) + } + loop(0L, 0) + } +} \ No newline at end of file diff --git a/src/main/scala/sbt/TaskManager.scala b/src/main/scala/sbt/TaskManager.scala new file mode 100644 index 000000000..23678c6d2 --- /dev/null +++ b/src/main/scala/sbt/TaskManager.scala @@ -0,0 +1,88 @@ +/* sbt -- Simple Build Tool + * Copyright 2008 David MacIver, Mark Harrah + */ +package sbt + +trait Described extends NotNull +{ + def description: Option[String] +} +trait TaskManager{ + type ManagerType >: this.type <: TaskManager + type ManagedTask >: Task <: TaskManager#Task with Dag[ManagedTask] + /** Creates a task that executes the given action when invoked.*/ + def task(action : => Option[String]) = new Task(None, Nil, false, action) + /** An interactive task is one that is not executed across all dependent projects when + * it is called directly. The dependencies of the task are still invoked across all dependent + * projects, however. */ + def interactiveTask(action: => Option[String]) = new Task(None, Nil, true, action) + /** Creates a method task that executes the given action when invoked. */ + def task(action: Array[String] => ManagedTask) = new MethodTask(None, action, Nil) + + def taskName(t: Task): String + + /** A method task is an action that has parameters. Note that it is not a Task, though, + * because it requires arguments to perform its work. It therefore cannot be a dependency of + * a Task..*/ + final class MethodTask(val description: Option[String], action: Array[String] => ManagedTask, getCompletions: => Seq[String]) extends Described + { + /** Creates a new method task, identical to this method task, except with thE[String]e given description.*/ + def describedAs(description : String) = new MethodTask(Some(description), action, getCompletions) + /** Invokes this method task with the given arguments.*/ + def apply(arguments: Array[String]) = action(arguments) + def manager: ManagerType = TaskManager.this + def completeWith(add: => Seq[String]) = new MethodTask(description, action, add) + def completions = getCompletions + } + + sealed class Task(val explicitName: Option[String], val description : Option[String], val dependencies : List[ManagedTask], + val interactive: Boolean, action : => Option[String]) extends Dag[ManagedTask] with Described + { + def this(description : Option[String], dependencies : List[ManagedTask], interactive: Boolean, action : => Option[String]) = + this(None, description, dependencies, interactive, action) + checkTaskDependencies(dependencies) + def manager: ManagerType = TaskManager.this + def name = explicitName.getOrElse(taskName(this)) + def named(name: String) = construct(Some(name), description,dependencies, interactive, action) + override def toString = "Task " + name + + /** Creates a new task, identical to this task, except with the additional dependencies specified.*/ + def dependsOn(tasks : ManagedTask*) = setDependencies(tasks.toList ::: dependencies) + private[sbt] def setDependencies(dependencyList: List[ManagedTask]) = + { + checkTaskDependencies(dependencyList) + construct(explicitName, description, dependencyList, interactive, action) + } + /** Creates a new task, identical to this task, except with the given description.*/ + def describedAs(description : String) = construct(explicitName, Some(description), dependencies, interactive, action); + private[sbt] def invoke = action; + + final def setInteractive = construct(explicitName, description, dependencies, true, action) + final def run = runSequentially(topologicalSort) + final def runDependenciesOnly = runSequentially(topologicalSort.dropRight(1)) + private def runSequentially(tasks: List[ManagedTask]) = Control.lazyFold(tasks)(_.invoke) + + def &&(that : Task) = + construct(explicitName, None, dependencies ::: that.dependencies, interactive || that.interactive, this.invoke.orElse(that.invoke)) + + protected def construct(explicitName: Option[String], description: Option[String], dependencies: List[ManagedTask], interactive: Boolean, + action : => Option[String]): Task = new Task(explicitName, description, dependencies, interactive, action) + } + final class CompoundTask private (explicitName: Option[String], description : Option[String], dependencies : List[ManagedTask], interactive: Boolean, + action : => Option[String], createWork: => SubWork[Project#Task]) extends Task(description, dependencies, interactive, action) + with CompoundWork[Project#Task] + { + def this(createWork: => SubWork[Project#Task]) = this(None, None, Nil, false, None, createWork) + override protected def construct(explicitName: Option[String], description: Option[String], dependencies: List[ManagedTask], + interactive: Boolean, action : => Option[String]) = new CompoundTask(explicitName, description, dependencies, interactive, action, createWork) + def work = createWork + } + + private def checkTaskDependencies(dependencyList: List[ManagedTask]) + { + val nullDependencyIndex = dependencyList.findIndexOf(_ == null) + require(nullDependencyIndex < 0, "Dependency (at index " + nullDependencyIndex + ") is null. This may be an initialization issue or a circular dependency.") + val interactiveDependencyIndex = dependencyList.findIndexOf(_.interactive) + require(interactiveDependencyIndex < 0, "Dependency (at index " + interactiveDependencyIndex + ") is interactive. Interactive tasks cannot be dependencies.") + } +} diff --git a/src/main/scala/sbt/TestFramework.scala b/src/main/scala/sbt/TestFramework.scala new file mode 100644 index 000000000..7e6a59fb4 --- /dev/null +++ b/src/main/scala/sbt/TestFramework.scala @@ -0,0 +1,221 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Steven Blundy, Mark Harrah + */ +package sbt + +object Result extends Enumeration +{ + val Error, Passed, Failed = Value +} +object ClassType extends Enumeration +{ + val Module, Class = Value +} + +trait TestFramework extends NotNull +{ + def name: String + def testSuperClassName: String + def testSubClassType: ClassType.Value + + def testRunner(classLoader: ClassLoader, listeners: Iterable[TestReportListener], log: Logger): TestRunner +} +trait TestRunner extends NotNull +{ + def run(testClassName: String): Result.Value +} + +abstract class BasicTestRunner extends TestRunner +{ + protected def log: Logger + protected def listeners: Seq[TestReportListener] + + final def run(testClass: String): Result.Value = + { + safeListenersCall(_.startGroup(testClass)) + try + { + val result = runTest(testClass) + safeListenersCall(_.endGroup(testClass, result)) + result + } + catch + { + case e => + { + safeListenersCall(_.endGroup(testClass, e)) + Result.Error + } + } + } + def runTest(testClass: String): Result.Value + + protected def fire(event: TestEvent) = safeListenersCall(_.testEvent(event)) + protected def safeListenersCall(call: (TestReportListener) => Unit) = TestFramework.safeForeach(listeners, log)(call) +} + +final class NamedTestTask(val name: String, action: => Option[String]) extends NotNull { def run() = action } +object TestFramework +{ + def runTests(frameworks: Iterable[TestFramework], classpath: Iterable[Path], tests: Iterable[TestDefinition], log: Logger, + listeners: Iterable[TestReportListener]) = + { + val (start, runTests, end) = testTasks(frameworks, classpath, tests, log, listeners, true, Nil, Nil) + def run(tasks: Iterable[NamedTestTask]) = tasks.foreach(_.run()) + run(start) + run(runTests) + run(end) + } + + private val ScalaCompilerJarPackages = "scala.tools.nsc." :: "jline." :: "ch.epfl.lamp." :: Nil + + private val TestStartName = "test-start" + private val TestFinishName = "test-finish" + + private[sbt] def safeForeach[T](it: Iterable[T], log: Logger)(f: T => Unit): Unit = it.foreach(i => Control.trapAndLog(log){ f(i) } ) + import scala.collection.{Map, Set} + def testTasks(frameworks: Iterable[TestFramework], classpath: Iterable[Path], tests: Iterable[TestDefinition], log: Logger, + listeners: Iterable[TestReportListener], endErrorsEnabled: Boolean, setup: Iterable[() => Option[String]], + cleanup: Iterable[() => Option[String]]): (Iterable[NamedTestTask], Iterable[NamedTestTask], Iterable[NamedTestTask]) = + { + val mappedTests = testMap(frameworks, tests) + if(mappedTests.isEmpty) + (new NamedTestTask(TestStartName, None) :: Nil, Nil, new NamedTestTask(TestFinishName, { log.info("No tests to run."); None }) :: Nil ) + else + createTestTasks(classpath, mappedTests, log, listeners, endErrorsEnabled, setup, cleanup) + } + private def testMap(frameworks: Iterable[TestFramework], tests: Iterable[TestDefinition]): Map[TestFramework, Set[String]] = + { + import scala.collection.mutable.{HashMap, HashSet, Set} + val map = new HashMap[TestFramework, Set[String]] + if(!frameworks.isEmpty) + { + for(test <- tests) + { + def isTestForFramework(framework: TestFramework) = + (framework.testSubClassType == ClassType.Module) == test.isModule && + framework.testSuperClassName == test.superClassName + + for(framework <- frameworks.find(isTestForFramework)) + map.getOrElseUpdate(framework, new HashSet[String]) += test.testClassName + } + } + wrap.Wrappers.readOnly(map) + } + private def createTasks(work: Iterable[() => Option[String]], baseName: String) = + work.toList.zipWithIndex.map{ case (work, index) => new NamedTestTask(baseName + " " + (index+1), work()) } + + private def createTestTasks(classpath: Iterable[Path], tests: Map[TestFramework, Set[String]], log: Logger, + listeners: Iterable[TestReportListener], endErrorsEnabled: Boolean, setup: Iterable[() => Option[String]], + cleanup: Iterable[() => Option[String]]) = + { + val filterCompilerLoader = new FilteredLoader(getClass.getClassLoader, ScalaCompilerJarPackages) + val loader: ClassLoader = new IntermediateLoader(classpath.map(_.asURL).toSeq.toArray, filterCompilerLoader) + val testsListeners = listeners.filter(_.isInstanceOf[TestsListener]).map(_.asInstanceOf[TestsListener]) + def foreachListenerSafe(f: TestsListener => Unit): Unit = safeForeach(testsListeners, log)(f) + + import Result.{Error,Passed,Failed} + object result + { + private[this] var value: Result.Value = Passed + def apply() = synchronized { value } + def update(v: Result.Value): Unit = synchronized { if(value != Error) value = v } + } + val startTask = new NamedTestTask(TestStartName, {foreachListenerSafe(_.doInit); None}) :: createTasks(setup, "Test setup") + val testTasks = + tests flatMap { case (framework, testClassNames) => + + val runner = framework.testRunner(loader, listeners, log) + for(testClassName <- testClassNames) yield + { + def runTest() = + { + val oldLoader = Thread.currentThread.getContextClassLoader + Thread.currentThread.setContextClassLoader(loader) + try { + runner.run(testClassName) match + { + case Error => result() = Error; Some("ERROR occurred during testing.") + case Failed => result() = Failed; Some("Test FAILED") + case _ => None + } + } + finally { + Thread.currentThread.setContextClassLoader(oldLoader) + } + } + new NamedTestTask(testClassName, runTest()) + } + } + def end() = + { + foreachListenerSafe(_.doComplete(result())) + result() match + { + case Error => if(endErrorsEnabled) Some("ERROR occurred during testing.") else None + case Failed => if(endErrorsEnabled) Some("One or more tests FAILED.") else None + case Passed => + { + log.info(" ") + log.info("All tests PASSED.") + None + } + } + } + val endTask = new NamedTestTask(TestFinishName, end() ) :: createTasks(cleanup, "Test cleanup") + (startTask, testTasks, endTask) + } +} + +abstract class LazyTestFramework extends TestFramework +{ + /** The class name of the the test runner that executes + * tests for this framework.*/ + protected def testRunnerClassName: String + + /** Creates an instance of the runner given by 'testRunnerClassName'.*/ + final def testRunner(projectLoader: ClassLoader, listeners: Iterable[TestReportListener], log: Logger): TestRunner = + { + val runnerClassName = testRunnerClassName + val frameworkClasspath = FileUtilities.classLocation(getClass) + val sbtURL = FileUtilities.sbtJar.toURI.toURL + val lazyLoader = new LazyFrameworkLoader(runnerClassName, Array(frameworkClasspath, sbtURL), projectLoader, getClass.getClassLoader) + val runnerClass = Class.forName(runnerClassName, true, lazyLoader).asSubclass(classOf[TestRunner]) + + runnerClass.getConstructor(classOf[Logger], classOf[Seq[TestReportListener]], classOf[ClassLoader]).newInstance(log, listeners, projectLoader) + } +} + +/** The test framework definition for ScalaTest.*/ +object ScalaTestFramework extends LazyTestFramework +{ + val name = "ScalaTest" + val SuiteClassName = "org.scalatest.Suite" + + def testSuperClassName = SuiteClassName + def testSubClassType = ClassType.Class + + def testRunnerClassName = "sbt.impl.ScalaTestRunner" +} +/** The test framework definition for ScalaCheck.*/ +object ScalaCheckFramework extends LazyTestFramework +{ + val name = "ScalaCheck" + val PropertiesClassName = "org.scalacheck.Properties" + + def testSuperClassName = PropertiesClassName + def testSubClassType = ClassType.Module + + def testRunnerClassName = "sbt.impl.ScalaCheckRunner" +} +/** The test framework definition for specs.*/ +object SpecsFramework extends LazyTestFramework +{ + val name = "specs" + val SpecificationClassName = "org.specs.Specification" + + def testSuperClassName = SpecificationClassName + def testSubClassType = ClassType.Module + + def testRunnerClassName = "sbt.impl.SpecsRunner" +} \ No newline at end of file diff --git a/src/main/scala/sbt/TestReportListener.scala b/src/main/scala/sbt/TestReportListener.scala new file mode 100644 index 000000000..09e2a97d0 --- /dev/null +++ b/src/main/scala/sbt/TestReportListener.scala @@ -0,0 +1,316 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Steven Blundy, Mark Harrah + */ + +package sbt + +trait TestReportListener +{ + /** called for each class or equivalent grouping */ + def startGroup(name: String) + /** called for each test method or equivalent */ + def testEvent(event: TestEvent) + /** called if there was an error during test */ + def endGroup(name: String, t: Throwable) + /** called if test completed */ + def endGroup(name: String, result: Result.Value) +} + +trait TestsListener extends TestReportListener +{ + /** called once, at beginning. */ + def doInit + /** called once, at end. */ + def doComplete(finalResult: Result.Value) + /** called once, at end, if the test framework throws an exception. */ + @deprecated def doComplete(t: Throwable) +} + +abstract class WriterReportListener(val log: Logger) extends TestsListener +{ + import java.io.{IOException, PrintWriter, Writer} + import scala.collection.mutable.{Buffer, ListBuffer} + + protected case class Summary(count: Int, failures: Int, errors: Int, skipped: Int, message: Option[String], cause: Option[Throwable]) extends NotNull + private var out: Option[PrintWriter] = None + private var groupCount: Int = 0 + private var groupFailures: Int = 0 + private var groupErrors: Int = 0 + private var groupSkipped: Int = 0 + private var groupMessages: Seq[String] = Nil + + protected val passedEventHandler: TestEvent => Summary = (event: TestEvent) => event match + { + case SpecificationReportEvent(successes, failures, errors, skipped, desc, systems, subSpecs) => Summary(successes, failures, errors, skipped, None, None) + case IgnoredEvent(name, Some(message)) => Summary(1, 0, 0, 1, Some(message), None) + case IgnoredEvent(name, None) => Summary(1, 0, 0, 1, None, None) + case _ => Summary(1, 0, 0, 0, None, None) + } + protected val failedEventHandler: TestEvent => Summary = (event: TestEvent) => event match + { + case FailedEvent(name, msg) => Summary(1, 1, 0, 0, Some("! " + name + ": " + msg), None) + case TypedErrorEvent(name, event, Some(msg), cause) => Summary(1, 1, 0, 0, Some(event + " - " + name + ": " + msg), cause) + case TypedErrorEvent(name, event, None, cause) => Summary(1, 1, 0, 0, Some(event + " - " + name), cause) + case ErrorEvent(msg) => Summary(1, 1, 0, 0, Some(msg), None) + case SpecificationReportEvent(successes, failures, errors, skipped, desc, systems, subSpecs) => Summary(successes + failures + errors + skipped, failures, errors, skipped, Some(desc), None) + case _ => {log.warn("Unrecognized failure: " + event); Summary(1, 1, 0, 0, None, None)} + } + protected val errorEventHandler: TestEvent => Summary = (event: TestEvent) => event match + { + case FailedEvent(name, msg) => Summary(1, 0, 1, 0, Some("! " + name + ": " + msg), None) + case TypedErrorEvent(name, event, Some(msg), cause) => Summary(1, 0, 1, 0, Some(event + " - " + name + ": " + msg), cause) + case TypedErrorEvent(name, event, None, cause) => Summary(1, 0, 1, 0, Some(event + " - " + name), cause) + case ErrorEvent(msg) => Summary(1, 0, 1, 0, Some(msg), None) + case SpecificationReportEvent(successes, failures, errors, skipped, desc, systems, subSpecs) => Summary(successes + failures + errors + skipped, failures, errors, skipped, Some(desc), None) + case _ => {log.warn("Unrecognized error: " + event); Summary(1, 0, 1, 0, None, None)} + } + protected def open: Writer + protected def close = + { + onOut(_.close()) + out = None + } + def doInit = Control.trapAndLog(log){ out = Some(new PrintWriter(open)) } + def doComplete(finalResult: Result.Value) = + { + finalResult match + { + case Result.Error => println("Error during Tests") + case Result.Passed => println("All Tests Passed") + case Result.Failed => println("Tests Failed") + } + close + } + def doComplete(t: Throwable) = + { + println("Exception in Test Framework") + onOut(t.printStackTrace(_)) + close + } + def startGroup(name: String) = + { + groupCount = 0 + groupFailures = 0 + groupErrors = 0 + groupSkipped = 0 + groupMessages = Nil + } + def testEvent(event: TestEvent) = event.result match + { + case Some(result) => + { + val Summary(count, failures, errors, skipped, msg, cause) = result match + { + case Result.Passed => passedEventHandler(event) + case Result.Failed => failedEventHandler(event) + case Result.Error => errorEventHandler(event) + } + groupCount += count + groupFailures += failures + groupErrors += errors + groupSkipped += skipped + groupMessages ++= msg.toList + } + case None => {} + } + def endGroup(name: String, t: Throwable) = + { + groupMessages = Nil + println("Exception in " + name) + onOut(t.printStackTrace(_)) + } + def endGroup(name: String, result: Result.Value) = + { + result match + { + case Result.Error => println("Error: " + name + " - Count " + groupCount + ", Failed " + groupFailures + ", Errors " + groupErrors) + case Result.Passed => println("Passed: " + name + " - Count " + groupCount + ", Failed " + groupFailures + ", Errors " + groupErrors) + case Result.Failed => println("Failed: " + name + " - Count " + groupCount + ", Failed " + groupFailures + ", Errors " + groupErrors) + } + if(!groupMessages.isEmpty) + { + groupMessages.foreach(println(_)) + groupMessages = Nil + println("") + } + } + protected def onOut(f: PrintWriter => Unit) = Control.trapAndLog(log){ + out match + { + case Some(pw) => f(pw) + case None => log.warn("Method called when output was not open") + } + } + protected def println(s: String) = onOut(_.println(s)) +} + +class FileReportListener(val file: Path, log: Logger) extends WriterReportListener(log) +{ + def open = new java.io.FileWriter(file.asFile) +} + +abstract class TestEvent extends NotNull +{ + def result: Option[Result.Value] +} + +sealed abstract class ScalaCheckEvent extends TestEvent +final case class PassedEvent(name: String, msg: String) extends ScalaCheckEvent { def result = Some(Result.Passed) } +final case class FailedEvent(name: String, msg: String) extends ScalaCheckEvent { def result = Some(Result.Failed) } + +sealed abstract class ScalaTestEvent(val result: Option[Result.Value]) extends TestEvent +final case class TypedEvent(name: String, `type`: String, msg: Option[String])(result: Option[Result.Value]) extends ScalaTestEvent(result) +final case class TypedErrorEvent(name: String, `type`: String, msg: Option[String], cause: Option[Throwable])(result: Option[Result.Value]) extends ScalaTestEvent(result) +final case class MessageEvent(msg: String) extends ScalaTestEvent(None) +final case class ErrorEvent(msg: String) extends ScalaTestEvent(None) +final case class IgnoredEvent(name: String, msg: Option[String]) extends ScalaTestEvent(Some(Result.Passed)) + +sealed abstract class SpecsEvent extends TestEvent +final case class SpecificationReportEvent(successes: Int, failures: Int, errors: Int, skipped: Int, pretty: String, systems: Seq[SystemReportEvent], subSpecs: Seq[SpecificationReportEvent]) extends SpecsEvent +{ + def result = if(errors > 0) Some(Result.Error) else if(failures > 0) Some(Result.Failed) else Some(Result.Passed) +} +final case class SystemReportEvent(description: String, verb: String, skippedSus:Option[Throwable], literateDescription: Option[Seq[String]], examples: Seq[ExampleReportEvent]) extends SpecsEvent { def result = None } +final case class ExampleReportEvent(description: String, errors: Seq[Throwable], failures: Seq[RuntimeException], skipped: Seq[RuntimeException], subExamples: Seq[ExampleReportEvent]) extends SpecsEvent { def result = None } + +trait EventOutput[E <: TestEvent] +{ + def output(e: E): Unit +} + +sealed abstract class LazyEventOutput[E <: TestEvent](val log: Logger) extends EventOutput[E] + +class ScalaCheckOutput(log: Logger) extends LazyEventOutput[ScalaCheckEvent](log) +{ + def output(event: ScalaCheckEvent) = event match + { + case PassedEvent(name, msg) => log.info("+ " + name + ": " + msg) + case FailedEvent(name, msg) => log.error("! " + name + ": " + msg) + } +} + +class ScalaTestOutput(log: Logger) extends LazyEventOutput[ScalaTestEvent](log) +{ + def output(event: ScalaTestEvent) = event match + { + case TypedEvent(name, event, Some(msg)) => log.info(event + " - " + name + ": " + msg) + case TypedEvent(name, event, None) => log.info(event + " - " + name) + case TypedErrorEvent(name, event, Some(msg), cause) => logError(event + " - " + name + ": " + msg, cause) + case TypedErrorEvent(name, event, None, cause) => logError(event + " - " + name, cause) + case MessageEvent(msg) => log.info(msg) + case ErrorEvent(msg) => logError(msg, None) + case IgnoredEvent(name, Some(msg)) => log.info("Test ignored - " + name + ": " + msg) + case IgnoredEvent(name, None) => log.info("Test ignored - " + name) + } + private def logError(message: String, cause: Option[Throwable]) + { + cause.foreach(x => log.trace(x)) + log.error(message) + } +} + +class SpecsOutput(val log: Logger) extends EventOutput[SpecsEvent] +{ + private val Indent = " " + + def output(event: SpecsEvent) = event match + { + case sre: SpecificationReportEvent => reportSpecification(sre, "") + case sre: SystemReportEvent => reportSystem(sre, "") + case ere: ExampleReportEvent => reportExample(ere, "") + } + + /* The following is closely based on org.specs.runner.OutputReporter, + * part of specs, which is Copyright 2007-2008 Eric Torreborre. + * */ + + private def reportSpecification(specification: SpecificationReportEvent, padding: String) + { + val newIndent = padding + Indent + reportSpecifications(specification.subSpecs, newIndent) + reportSystems(specification.systems, newIndent) + } + private def reportSpecifications(specifications: Iterable[SpecificationReportEvent], padding: String) + { + for(specification <- specifications) + reportSpecification(specification, padding) + } + private def reportSystems(systems: Iterable[SystemReportEvent], padding: String) + { + for(system <- systems) + reportSystem(system, padding) + } + private def reportSystem(sus: SystemReportEvent, padding: String) + { + log.info(padding + sus.description + " " + sus.verb + sus.skippedSus.map(" (skipped: " + _.getMessage + ")").getOrElse("")) + for(description <- sus.literateDescription) + log.info(padding + description.mkString) + reportExamples(sus.examples, padding) + log.info(" ") + } + private def reportExamples(examples: Iterable[ExampleReportEvent], padding: String) + { + for(example <- examples) + { + reportExample(example, padding) + reportExamples(example.subExamples, padding + Indent) + } + } + private def status(example: ExampleReportEvent) = + { + if (example.errors.size + example.failures.size > 0) + "x " + else if (example.skipped.size > 0) + "o " + else + "+ " + } + private def reportExample(example: ExampleReportEvent, padding: String) + { + log.info(padding + status(example) + example.description) + for(skip <- example.skipped) + { + log.trace(skip) + log.warn(padding + skip.toString) + } + for(e <- example.failures ++ example.errors) + { + log.trace(e) + log.error(padding + e.toString) + } + } +} + +class LogTestReportListener(val log: Logger) extends TestReportListener +{ + lazy val scalaCheckOutput: EventOutput[ScalaCheckEvent] = createScalaCheckOutput + lazy val scalaTestOutput: EventOutput[ScalaTestEvent] = createScalaTestOutput + lazy val specsOutput: EventOutput[SpecsEvent] = createSpecsOutput + + protected def createScalaCheckOutput = new ScalaCheckOutput(log) + protected def createScalaTestOutput = new ScalaTestOutput(log) + protected def createSpecsOutput = new SpecsOutput(log) + + def startGroup(name: String) {} + def testEvent(event: TestEvent) + { + log.debug("in testEvent:" + event) + event match + { + case sce: ScalaCheckEvent => scalaCheckOutput.output(sce) + case ste: ScalaTestEvent => scalaTestOutput.output(ste) + case se: SpecsEvent => specsOutput.output(se) + case e => handleOtherTestEvent(e) + } + } + protected def handleOtherTestEvent(event: TestEvent) {} + def endGroup(name: String, t: Throwable) + { + log.error("Could not run test " + name + ": " + t.toString) + log.trace(t) + } + def endGroup(name: String, result: Result.Value) + { + log.debug("in endGroup:" + result) + } +} diff --git a/src/main/scala/sbt/TrapExit.scala b/src/main/scala/sbt/TrapExit.scala new file mode 100644 index 000000000..c66b8f208 --- /dev/null +++ b/src/main/scala/sbt/TrapExit.scala @@ -0,0 +1,238 @@ +/* sbt -- Simple Build Tool + * Copyright 2008 Mark Harrah + * + * Partially based on exit trapping in Nailgun by Pete Kirkham, + * copyright 2004, Martian Software, Inc + * licensed under Apache 2.0 License. + */ +package sbt + +import scala.collection.Set +import scala.reflect.Manifest + +/** This provides functionality to catch System.exit calls to prevent the JVM from terminating. +* This is useful for executing user code that may call System.exit, but actually exiting is +* undesirable. This file handles the call to exit by disposing all top-level windows and interrupting +* all user started threads. It does not stop the threads and does not call shutdown hooks. It is +* therefore inappropriate to use this with code that requires shutdown hooks or creates threads that +* do not terminate. This category of code should only be called by forking the JVM. */ +object TrapExit +{ + /** Executes the given thunk in a context where System.exit(code) throws + * a custom SecurityException, which is then caught and the exit code returned. + * Otherwise, 0 is returned. No other exceptions are handled by this method.*/ + def apply(execute: => Unit, log: Logger): Int = + { + log.debug("Starting sandboxed run...") + + /** Take a snapshot of the threads that existed before execution in order to determine + * the threads that were created by 'execute'.*/ + val originalThreads = allThreads + val code = new ExitCode + val customThreadGroup = new ExitThreadGroup(new ExitHandler(Thread.getDefaultUncaughtExceptionHandler, originalThreads, code)) + val executionThread = new Thread(customThreadGroup, "run-main") { override def run() { execute } } + + val originalSecurityManager = System.getSecurityManager + try + { + val newSecurityManager = new TrapExitSecurityManager(originalSecurityManager, customThreadGroup) + System.setSecurityManager(newSecurityManager) + + executionThread.start() + + log.debug("Waiting for threads to exit or System.exit to be called.") + waitForExit(originalThreads, log) + log.debug("Interrupting remaining threads (should be all daemons).") + interruptAll(originalThreads) // should only be daemon threads left now + log.debug("Sandboxed run complete..") + code.value.getOrElse(0) + } + finally { System.setSecurityManager(originalSecurityManager) } + } + // wait for all non-daemon threads to terminate + private def waitForExit(originalThreads: Set[Thread], log: Logger) + { + var daemonsOnly = true + processThreads(originalThreads, thread => + if(!thread.isDaemon) + { + daemonsOnly = false + waitOnThread(thread, log) + } + ) + if(!daemonsOnly) + waitForExit(originalThreads, log) + } + /** Waits for the given thread to exit. */ + private def waitOnThread(thread: Thread, log: Logger) + { + log.debug("Waiting for thread " + thread.getName + " to exit") + thread.join + log.debug("\tThread " + thread.getName + " exited.") + } + /** Returns the exit code of the System.exit that caused the given Exception, or rethrows the exception + * if its cause was not calling System.exit.*/ + private def exitCode(e: Throwable) = + withCause[TrapExitSecurityException, Int](e) + {exited => exited.exitCode} + {other => throw other} + /** Recurses into the causes of the given exception looking for a cause of type CauseType. If one is found, `withType` is called with that cause. + * If not, `notType` is called with the root cause.*/ + private def withCause[CauseType <: Throwable, T](e: Throwable)(withType: CauseType => T)(notType: Throwable => T)(implicit mf: Manifest[CauseType]): T = + { + val clazz = mf.erasure + if(clazz.isInstance(e)) + withType(e.asInstanceOf[CauseType]) + else + { + val cause = e.getCause + if(cause == null) + notType(e) + else + withCause(cause)(withType)(notType)(mf) + } + } + + /** Returns all threads that are not in the 'system' thread group and are not the AWT implementation + * thread (AWT-XAWT, AWT-Windows, ...)*/ + private def allThreads: Set[Thread] = + { + val allThreads = wrap.Wrappers.toList(Thread.getAllStackTraces.keySet) + val threads = new scala.collection.mutable.HashSet[Thread] + for(thread <- allThreads if !isSystemThread(thread)) + threads += thread + threads + } + /** Returns true if the given thread is in the 'system' thread group and is an AWT thread other than + * AWT-EventQueue or AWT-Shutdown.*/ + private def isSystemThread(t: Thread) = + { + val name = t.getName + if(name.startsWith("AWT-")) + !(name.startsWith("AWT-EventQueue") || name.startsWith("AWT-Shutdown")) + else + { + val group = t.getThreadGroup + (group != null) && (group.getName == "system") + } + } + /** Calls the provided function for each thread in the system as provided by the + * allThreads function except those in ignoreThreads.*/ + private def processThreads(ignoreThreads: Set[Thread], process: Thread => Unit) + { + allThreads.filter(thread => !ignoreThreads.contains(thread)).foreach(process) + } + /** Handles System.exit by disposing all frames and calling interrupt on all user threads */ + private def stopAll(originalThreads: Set[Thread]) + { + disposeAllFrames() + interruptAll(originalThreads) + } + private def disposeAllFrames() + { + val allFrames = java.awt.Frame.getFrames + if(allFrames.length > 0) + { + allFrames.foreach(_.dispose) // dispose all top-level windows, which will cause the AWT-EventQueue-* threads to exit + Thread.sleep(2000) // AWT Thread doesn't exit immediately, so wait to interrupt it + } + } + // interrupt all threads that appear to have been started by the user + private def interruptAll(originalThreads: Set[Thread]): Unit = + processThreads(originalThreads, safeInterrupt) + // interrupts the given thread, but first replaces the exception handler so that the InterruptedException is not printed + private def safeInterrupt(thread: Thread) + { + if(!thread.getName.startsWith("AWT-")) + { + thread.setUncaughtExceptionHandler(new TrapInterrupt(thread.getUncaughtExceptionHandler)) + thread.interrupt + } + } + // an uncaught exception handler that swallows InterruptedExceptions and otherwise defers to originalHandler + private final class TrapInterrupt(originalHandler: Thread.UncaughtExceptionHandler) extends Thread.UncaughtExceptionHandler + { + def uncaughtException(thread: Thread, e: Throwable) + { + withCause[InterruptedException, Unit](e) + {interrupted => ()} + {other => originalHandler.uncaughtException(thread, e) } + thread.setUncaughtExceptionHandler(originalHandler) + } + } + /** An uncaught exception handler that delegates to the original uncaught exception handler except when + * the cause was a call to System.exit (which generated a SecurityException)*/ + private final class ExitHandler(originalHandler: Thread.UncaughtExceptionHandler, originalThreads: Set[Thread], codeHolder: ExitCode) extends Thread.UncaughtExceptionHandler + { + def uncaughtException(t: Thread, e: Throwable) + { + try + { + codeHolder.set(exitCode(e)) // will rethrow e if it was not because of a call to System.exit + stopAll(originalThreads) + } + catch + { + case _ => originalHandler.uncaughtException(t, e) + } + } + } + private final class ExitThreadGroup(handler: Thread.UncaughtExceptionHandler) extends ThreadGroup("trap.exit") + { + override def uncaughtException(t: Thread, e: Throwable) = handler.uncaughtException(t, e) + } +} +private final class ExitCode extends NotNull +{ + private var code: Option[Int] = None + def set(c: Int): Unit = synchronized { code = code orElse Some(c) } + def value: Option[Int] = synchronized { code } +} +/////// These two classes are based on similar classes in Nailgun +/** A custom SecurityManager to disallow System.exit. */ +private final class TrapExitSecurityManager(delegateManager: SecurityManager, group: ThreadGroup) extends SecurityManager +{ + import java.security.Permission + override def checkExit(status: Int) + { + val stack = Thread.currentThread.getStackTrace + if(stack == null || stack.exists(isRealExit)) + throw new TrapExitSecurityException(status) + } + /** This ensures that only actual calls to exit are trapped and not just calls to check if exit is allowed.*/ + private def isRealExit(element: StackTraceElement): Boolean = + element.getClassName == "java.lang.Runtime" && element.getMethodName == "exit" + override def checkPermission(perm: Permission) + { + if(delegateManager != null) + delegateManager.checkPermission(perm) + } + override def checkPermission(perm: Permission, context: AnyRef) + { + if(delegateManager != null) + delegateManager.checkPermission(perm, context) + } + override def getThreadGroup = group +} +/** A custom SecurityException that tries not to be caught.*/ +private final class TrapExitSecurityException(val exitCode: Int) extends SecurityException +{ + private var accessAllowed = false + def allowAccess + { + accessAllowed = true + } + override def printStackTrace = ifAccessAllowed(super.printStackTrace) + override def toString = ifAccessAllowed(super.toString) + override def getCause = ifAccessAllowed(super.getCause) + override def getMessage = ifAccessAllowed(super.getMessage) + override def fillInStackTrace = ifAccessAllowed(super.fillInStackTrace) + override def getLocalizedMessage = ifAccessAllowed(super.getLocalizedMessage) + private def ifAccessAllowed[T](f: => T): T = + { + if(accessAllowed) + f + else + throw this + } +} \ No newline at end of file diff --git a/src/main/scala/sbt/Version.scala b/src/main/scala/sbt/Version.scala new file mode 100644 index 000000000..b14e8ae52 --- /dev/null +++ b/src/main/scala/sbt/Version.scala @@ -0,0 +1,65 @@ +/* sbt -- Simple Build Tool + * Copyright 2008 Mark Harrah + */ +package sbt + +sealed trait Version extends NotNull +case class BasicVersion(major: Int, minor: Option[Int], micro: Option[Int], extra: Option[String]) extends Version +{ + import Version._ + require(major >= 0, "Major revision must be nonnegative.") + require(minor.isDefined || micro.isEmpty, "Cannot define micro revision without defining minor revision.") + requirePositive(minor) + requirePositive(micro) + require(isValidExtra(extra)) + + def incrementMicro = BasicVersion(major, minor orElse Some(0), increment(micro), extra) + def incrementMinor = BasicVersion(major, increment(minor), micro, extra) + def incrementMajor = BasicVersion(major+1, minor, micro, extra) + + override def toString = major + + minor.map(minorI => "." + minorI + micro.map(microI => "." + microI).getOrElse("")).getOrElse("") + + extra.map(x => "-" + x).getOrElse("") +} +case class OpaqueVersion(value: String) extends Version +{ + require(!value.trim.isEmpty) + override def toString = value +} +object Version +{ + private[sbt] def increment(i: Option[Int]) = Some(i.getOrElse(0) + 1) + private[sbt] def requirePositive(i: Option[Int]) { i.foreach(x => require(x >= 0)) } + + import java.util.regex.Pattern + val versionPattern = Pattern.compile("""(\d+)(?:\.(\d+)(?:\.(\d+))?)?(?:-(.+))?""") + def fromString(v: String): Either[String, Version] = + { + val trimmed = v.trim + if(trimmed.isEmpty) + Left("Version cannot be empty.") + else + { + val matcher = versionPattern.matcher(trimmed) + import matcher._ + if(matches) + { + def toOption(index: Int) = + { + val v = group(index) + if(v == null) None else Some(v) + } + def toInt(index: Int) = toOption(index).map(_.toInt) + val extra = toOption(4) + if(isValidExtra(extra)) + Right(BasicVersion(group(1).toInt, toInt(2), toInt(3), extra)) + else + Right(OpaqueVersion(trimmed)) + } + else + Right(OpaqueVersion(trimmed)) + } + } + def isValidExtra(e: Option[String]): Boolean = e.map(isValidExtra).getOrElse(true) + def isValidExtra(s: String): Boolean = !(s.trim.isEmpty || s.exists(java.lang.Character.isISOControl)) +} diff --git a/src/main/scala/sbt/WebApp.scala b/src/main/scala/sbt/WebApp.scala new file mode 100644 index 000000000..392c4ba12 --- /dev/null +++ b/src/main/scala/sbt/WebApp.scala @@ -0,0 +1,245 @@ +/* sbt -- Simple Build Tool + * Copyright 2008 Mark Harrah + */ +package sbt + +import java.io.File +import java.net.{URL, URLClassLoader} +import scala.xml.NodeSeq + +object JettyRun extends ExitHook +{ + val DefaultPort = 8080 + + ExitHooks.register(this) + + def name = "jetty-shutdown" + def runBeforeExiting() { stop() } + private var running: Option[Stoppable] = None + private def started(s: Stoppable) { running = Some(s) } + def stop() + { + synchronized + { + running.foreach(_.stop()) + running = None + } + } + def apply(classpath: Iterable[Path], classpathName: String, war: Path, defaultContextPath: String, jettyConfigurationXML: NodeSeq, + jettyConfigurationFiles: Seq[File], log: Logger): Option[String] = + run(classpathName, new JettyRunConfiguration(war, defaultContextPath, DefaultPort, jettyConfigurationXML, + jettyConfigurationFiles, Nil, 0, toURLs(classpath)), log) + def apply(classpath: Iterable[Path], classpathName: String, war: Path, defaultContextPath: String, port: Int, scanDirectories: Seq[File], + scanPeriod: Int, log: Logger): Option[String] = + run(classpathName, new JettyRunConfiguration(war, defaultContextPath, port, NodeSeq.Empty, Nil, scanDirectories, scanPeriod, toURLs(classpath)), log) + private def toURLs(paths: Iterable[Path]) = paths.map(_.asURL).toSeq + private def run(classpathName: String, configuration: JettyRunConfiguration, log: Logger): Option[String] = + synchronized + { + import configuration._ + def runJetty() = + { + val baseLoader = this.getClass.getClassLoader + val loader: ClassLoader = new SelectiveLoader(classpathURLs.toArray, baseLoader, "org.mortbay." :: "javax.servlet." :: Nil) + val lazyLoader = new LazyFrameworkLoader(implClassName, Array(FileUtilities.sbtJar.toURI.toURL), loader, baseLoader) + val runner = ModuleUtilities.getObject(implClassName, lazyLoader).asInstanceOf[JettyRun] + runner(configuration, log) + } + + if(running.isDefined) + Some("Jetty is already running.") + else + { + try + { + started(runJetty()) + None + } + catch + { + case e: NoClassDefFoundError => runError(e, "Jetty and its dependencies must be on the " + classpathName + " classpath: ", log) + case e => runError(e, "Error running Jetty: ", log) + } + } + } + private val implClassName = "sbt.LazyJettyRun" + + private def runError(e: Throwable, messageBase: String, log: Logger) = + { + log.trace(e) + Some(messageBase + e.toString) + } +} + +private trait Stoppable +{ + def stop(): Unit +} +private trait JettyRun +{ + def apply(configuration: JettyRunConfiguration, log: Logger): Stoppable +} +private class JettyRunConfiguration(val war: Path, val defaultContextPath: String, val port: Int, + val jettyConfigurationXML: NodeSeq, val jettyConfigurationFiles: Seq[File], + val scanDirectories: Seq[File], val scanInterval: Int, val classpathURLs: Seq[URL]) extends NotNull + +/* This class starts Jetty. +* NOTE: DO NOT actively use this class. You will see NoClassDefFoundErrors if you fail +* to do so.Only use its name in JettyRun for reflective loading. This allows using +* the Jetty libraries provided on the project classpath instead of requiring them to be +* available on sbt's classpath at startup. +*/ +private object LazyJettyRun extends JettyRun +{ + import org.mortbay.jetty.{Handler, Server} + import org.mortbay.jetty.nio.SelectChannelConnector + import org.mortbay.jetty.webapp.WebAppContext + import org.mortbay.log.Log + import org.mortbay.util.Scanner + import org.mortbay.xml.XmlConfiguration + + import java.lang.ref.{Reference, WeakReference} + + val DefaultMaxIdleTime = 30000 + + def apply(configuration: JettyRunConfiguration, log: Logger): Stoppable = + { + import configuration._ + val oldLog = Log.getLog + Log.setLog(new JettyLogger(log)) + val server = new Server + val useDefaults = jettyConfigurationXML.isEmpty && jettyConfigurationFiles.isEmpty + + val listener = + if(useDefaults) + { + configureDefaultConnector(server, port) + def createLoader = new URLClassLoader(classpathURLs.toArray, this.getClass.getClassLoader) + val webapp = new WebAppContext(war.absolutePath, defaultContextPath) + webapp.setClassLoader(createLoader) + server.setHandler(webapp) + + Some(new Scanner.BulkListener { + def filesChanged(files: java.util.List[_]) { + reload(server, webapp.setClassLoader(createLoader), log) + } + }) + } + else + { + for(x <- jettyConfigurationXML) + (new XmlConfiguration(x.toString)).configure(server) + for(file <- jettyConfigurationFiles) + (new XmlConfiguration(file.toURI.toURL)).configure(server) + None + } + + def configureScanner() = + { + if(listener.isEmpty || scanDirectories.isEmpty) + None + else + { + log.debug("Scanning for changes to: " + scanDirectories.mkString(", ")) + val scanner = new Scanner + val list = new java.util.ArrayList[File] + scanDirectories.foreach(x => list.add(x)) + scanner.setScanDirs(list) + scanner.setRecursive(true) + scanner.setScanInterval(scanInterval) + scanner.setReportExistingFilesOnStartup(false) + scanner.addListener(listener.get) + scanner.start() + Some(new WeakReference(scanner)) + } + } + + try + { + server.start() + new StopServer(new WeakReference(server), configureScanner(), oldLog) + } + catch { case e => server.stop(); throw e } + } + private def configureDefaultConnector(server: Server, port: Int) + { + val defaultConnector = new SelectChannelConnector + defaultConnector.setPort(port) + defaultConnector.setMaxIdleTime(DefaultMaxIdleTime) + server.addConnector(defaultConnector) + } + private class StopServer(serverReference: Reference[Server], scannerReferenceOpt: Option[Reference[Scanner]], oldLog: org.mortbay.log.Logger) extends Stoppable + { + def stop() + { + val server = serverReference.get + if(server != null) + server.stop() + for(scannerReference <- scannerReferenceOpt) + { + val scanner = scannerReference.get + if(scanner != null) + scanner.stop() + } + Log.setLog(oldLog) + } + } + private def reload(server: Server, reconfigure: => Unit, log: Logger) + { + JettyRun.synchronized + { + log.info("Reloading web application...") + val handlers = wrapNull(server.getHandlers, server.getHandler) + log.debug("Stopping handlers: " + handlers.mkString(", ")) + handlers.foreach(_.stop) + log.debug("Reconfiguring...") + reconfigure + log.debug("Restarting handlers: " + handlers.mkString(", ")) + handlers.foreach(_.start) + log.info("Reload complete.") + } + } + private def wrapNull(a: Array[Handler], b: Handler) = + (a, b) match + { + case (null, null) => Nil + case (null, notB) => notB :: Nil + case (notA, null) => notA.toList + case (notA, notB) => notB :: notA.toList + } + private class JettyLogger(delegate: Logger) extends org.mortbay.log.Logger + { + def isDebugEnabled = delegate.atLevel(Level.Debug) + def setDebugEnabled(enabled: Boolean) = delegate.setLevel(if(enabled) Level.Debug else Level.Info) + + def getLogger(name: String) = this + def info(msg: String, arg0: AnyRef, arg1: AnyRef) { delegate.info(format(msg, arg0, arg1)) } + def debug(msg: String, arg0: AnyRef, arg1: AnyRef) { delegate.debug(format(msg, arg0, arg1)) } + def warn(msg: String, arg0: AnyRef, arg1: AnyRef) { delegate.warn(format(msg, arg0, arg1)) } + def warn(msg: String, th: Throwable) + { + delegate.warn(msg) + delegate.trace(th) + } + def debug(msg: String, th: Throwable) + { + delegate.debug(msg) + delegate.trace(th) + } + private def format(msg: String, arg0: AnyRef, arg1: AnyRef) = + { + def toString(arg: AnyRef) = if(arg == null) "" else arg.toString + val pieces = msg.split("""\{\}""", 3) + if(pieces.length == 1) + pieces(0) + else + { + val base = pieces(0) + toString(arg0) + pieces(1) + if(pieces.length == 2) + base + else + base + toString(arg1) + pieces(2) + } + } + } +} diff --git a/src/main/scala/sbt/Webstart.scala b/src/main/scala/sbt/Webstart.scala new file mode 100644 index 000000000..e717ec026 --- /dev/null +++ b/src/main/scala/sbt/Webstart.scala @@ -0,0 +1,277 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt + +import java.io.File +import scala.xml.{Elem, NodeSeq} +import Control._ + +/** Defines the configurable parameters for the webstart task. */ +trait WebstartOptions extends NotNull +{ + /** The main jar to use for webstart.*/ + def webstartMainJar: Path + /** The location to put all generated files for webstart.*/ + def webstartOutputDirectory: Path + /** Generates the .jnlp file using the provided resource descriptions. Each resource description + * provides the path of the jar relative to 'webstartOutputDirectory' and whether or not + * it is the main jar.*/ + def jnlpXML(jars: Seq[WebstartJarResource]): Elem + /** The location to write the .jnlp file to. It must be in 'webstartOutputDirectory'.*/ + def jnlpFile: Path + /** The location to put all jars that are not the main jar. It must be in 'webstartOutputDirectory'.*/ + def webstartLibDirectory: Path + /** The libraries needed for webstart. Note that only jars are used; directories are discarded.*/ + def webstartLibraries: PathFinder + /** Libraries external to the project needed for webstart. This is mainly for scala libraries.*/ + def webstartExtraLibraries: PathFinder + /** Resources to copy to the webstart output directory.*/ + def webstartResources: PathFinder + /** If defined, this specifies where to create a zip of the webstart output directory. It cannot be + * in the output directory.*/ + def webstartZip: Option[Path] + + /** If defined, configures signing of jars. All jars (main and libraries) are signed using + * this configuration.*/ + def webstartSignConfiguration: Option[SignConfiguration] + /** If true, pack200 compression is applied to all jars (main and libraries). A version of each jar + * without pack200 compression is still created in the webstart output directory.*/ + def webstartPack200: Boolean + /** If true, gzip compression will be applied to all jars. If pack200 compression is enabled, + * gzip compression is also applied to the archives with pack200 compression. A version of + * each file without gzip compression is still created in the webstart output directory. */ + def webstartGzip: Boolean +} +/** Represents a library included in the webstart distribution. Name is the filename of the jar. +* href is the path of the jar relative to the webstart output directory. isMain is true only for +* the main jar. */ +final class WebstartJarResource(val name: String, val href: String, val isMain: Boolean) extends NotNull +/** Configuration for signing jars. */ +final class SignConfiguration(val alias: String, val options: Seq[SignJar.SignOption]) extends NotNull +/** A scala project that produces a webstart distribution. */ +trait WebstartScalaProject extends ScalaProject +{ + import WebstartScalaProject._ + /** Creates a task that produces a webstart distribution using the given options.*/ + def webstartTask(options: WebstartOptions) = + task + { + import options._ + FileUtilities.createDirectories(webstartOutputDirectory :: webstartLibDirectory :: Nil, log) // ignore errors + verifyOptions(options) + + def relativize(jar: Path) = Path.relativize(webstartOutputDirectory ##, jar) getOrElse + error("Jar (" + jar + ") was not in webstart output directory (" + webstartOutputDirectory + ").") + def signAndPack(jars: List[Path], targetDirectory: Path): Either[String, List[Path]] = + { + lazyFold(jars, Nil: List[Path]) + { (allJars, jar) => + val signPackResult = + webstartSignConfiguration match + { + case Some(config) => + if(webstartPack200) + signAndPack200(jar, config, targetDirectory, log) + else + signOnly(jar, config, targetDirectory, log) + case None => + if(webstartPack200) + pack200Only(jar, targetDirectory, log) + else + copyJar(jar, targetDirectory, log).right.map(jars => new Jars(jars, Nil)) + } + val deleteOriginal = webstartPack200 + signPackResult.right flatMap { addJars => + if(webstartGzip) + Control.lazyFold(addJars.gzippable, addJars.allJars ::: allJars) + { (accumulate, jar) => gzipJar(jar, deleteOriginal, log).right.map(_ ::: accumulate) } + else + Right(addJars.allJars ::: allJars) + } + } + } + + import FileUtilities._ + + val jars = (webstartLibraries +++ webstartExtraLibraries).get.filter(ClasspathUtilities.isArchive) + def process(jars: Iterable[Path]) = for(jar <- jars if jar.asFile.getName.endsWith(".jar")) yield relativize(jar) + + thread(signAndPack(webstartMainJar :: Nil, webstartOutputDirectory)) { mainJars => + thread(signAndPack(jars.toList, webstartLibDirectory)) { libJars => + writeXML(jnlpXML(jarResources(process(mainJars), process(libJars))), jnlpFile, log) orElse + thread(copy(webstartResources.get, webstartOutputDirectory, log)) { copiedResources => + val keep = jnlpFile +++ Path.lazyPathFinder(mainJars ++ libJars ++ copiedResources) +++ + webstartOutputDirectory +++ webstartLibDirectory + prune(webstartOutputDirectory, keep.get, log) orElse + webstartZip.flatMap( zipPath => zip(List(webstartOutputDirectory ##), zipPath, true, log) ) + } + } + } + } + /** Creates default XML elements for a JNLP file for the given resources.*/ + protected def defaultElements(resources: Seq[WebstartJarResource]): NodeSeq = NodeSeq.fromSeq(resources.map(defaultElement)) + /** Creates a default XML element for a JNLP file for the given resource.*/ + protected def defaultElement(resource: WebstartJarResource): Elem = + + +} +private class Jars(val gzippable: List[Path], val nonGzippable: List[Path]) extends NotNull +{ + def allJars = gzippable ::: nonGzippable +} +private object WebstartScalaProject +{ + import FileTasks.{runOption, wrapProduct, wrapProducts} + /** Changes the extension of the Path of the given jar from ".jar" to newExtension. If append is true, + * the new extension is simply appended to the jar's filename. */ + private def appendExtension(jar: Path, newExtension: String) = + jar match + { + case rp: RelativePath => rp.parentPath / (rp.component + newExtension) + case x => x + } + private def gzipJarPath(jar: Path) = appendExtension(jar, ".gz") + private def packPath(jar: Path) = appendExtension(jar, ".pack") + private def signOnly(jar: Path, signConfiguration: SignConfiguration, targetDirectory: Path, log: Logger) = + { + val targetJar = targetDirectory / jar.asFile.getName + runOption("sign", targetJar from jar, log) { + log.debug("Signing " + jar) + signAndVerify(jar, signConfiguration, targetJar, log) + }.toLeft(new Jars(targetJar :: Nil, Nil)) + } + private def signAndVerify(jar: Path, signConfiguration: SignConfiguration, targetJar: Path, log: Logger) = + { + import SignJar._ + sign(jar, signConfiguration.alias, signedJar(targetJar) :: signConfiguration.options.toList, log) orElse + verify(jar, signConfiguration.options, log).map(err => "Signed jar failed verification: " + err) + } + private def gzipJar(jar: Path, deleteOriginal: Boolean, log: Logger) = + { + val gzipJar = gzipJarPath(jar) + runOption("gzip", gzipJar from jar, log) + { + log.debug("Gzipping " + jar) + FileUtilities.gzip(jar, gzipJar, log) orElse + (if(deleteOriginal) FileUtilities.clean(jar :: Nil, true, log) else None) + }.toLeft(gzipJar :: Nil) + } + /** Properly performs both signing and pack200 compression and verifies the result. This method only does anything if + * its outputs are out of date with respect to 'jar'. Note that it does not determine if the signing configuration has changed. + * See java.util.jar.Pack200 for more information.*/ + private def signAndPack200(jar: Path, signConfiguration: SignConfiguration, targetDirectory: Path, log: Logger) = + { + val signedJar = targetDirectory / jar.asFile.getName + val packedJar = packPath(signedJar) + import signConfiguration._ + + runOption("sign and pack200", List(packedJar, signedJar) from jar, log) { + log.debug("Applying pack200 compression and signing " + jar) + signAndPack(jar, signedJar, packedJar, alias, options, log) orElse + signAndVerify(jar, signConfiguration, signedJar, log) + }.toLeft(new Jars(packedJar :: Nil, signedJar :: Nil)) + } + /** Properly performs both signing and pack200 compression and verifies the result. See java.util.jar.Pack200 for more information.*/ + private def signAndPack(jarPath: Path, signedPath: Path, out: Path, alias: String, options: Seq[SignJar.SignOption], log: Logger): Option[String] = + { + import Pack._ + import SignJar._ + pack(jarPath, out, log) orElse + unpack(out, signedPath, log) orElse + sign(signedPath, alias, options, log) orElse + pack(signedPath, out, log) orElse + unpack(out, signedPath, log) orElse + verify(signedPath, options, log) + } + private def pack200Only(jar: Path, targetDirectory: Path, log: Logger) = + { + val targetJar = targetDirectory / jar.asFile.getName + val packedJar = packPath(targetJar) + val packResult = + runOption("pack200", packedJar from jar, log) + { + log.debug("Applying pack200 compression to " + jar) + Pack.pack(jar, packedJar, log) + } + packResult match + { + case Some(err) => Left(err) + case None => copyJar(jar, targetDirectory, log).right.map(jars => new Jars(packedJar :: Nil, jars)) + } + } + private def copyJar(jar: Path, targetDirectory: Path, log: Logger) = + { + val targetJar = targetDirectory / jar.asFile.getName + runOption("copy jar", targetJar from jar, log)( FileUtilities.copyFile(jar, targetJar, log) ).toLeft(targetJar :: Nil) + } + /** Writes the XML string 'xmlString' to the file 'outputPath'.*/ + private def writeXML(xmlString: String, outputPath: Path, log: Logger): Option[String] = + FileUtilities.write(outputPath.asFile, xmlString, log) + /** Writes the XML string 'xmlString' to the file 'outputPath' if the hashes are different.*/ + private def writeXML(xml: Elem, outputPath: Path, log: Logger): Option[String] = + { + val xmlString = scala.xml.Utility.toXML(xml, false) + if(!outputPath.exists) + { + log.debug("JNLP file did not exist, writing inline XML to " + outputPath) + writeXML(xmlString, outputPath, log) + } + else + { + val result = + for( xmlHash <- Hash(xmlString, log).right; fileHash <- Hash(outputPath, log).right ) yield + { + if(xmlHash deepEquals fileHash) + { + log.debug("JNLP file " + outputPath + " uptodate.") + None + } + else + { + log.debug("Inline JNLP XML modified, updating file " + outputPath + ".") + writeXML(xmlString, outputPath, log) + } + } + result.fold(err => Some(err), x => x) + } + } + private def jarResource(isMain: Boolean)(jar: Path): WebstartJarResource = + new WebstartJarResource(jar.asFile.getName, jar.relativePathString("/"), isMain) + private def jarResources(mainJars: Iterable[Path], libraries: Iterable[Path]): Seq[WebstartJarResource] = + mainJars.map(jarResource(true)).toList ::: libraries.map(jarResource(false)).toList + + /** True iff 'directory' is an ancestor (strictly) of 'check'.*/ + private def isInDirectory(directory: Path, check: Path) = Path.relativize(directory, check).isDefined && directory != check + /** Checks the paths in the given options for validity. See the documentation for WebstartOptions.*/ + private def verifyOptions(options: WebstartOptions) + { + import options._ + require(isInDirectory(webstartOutputDirectory, webstartLibDirectory), + "Webstart dependency directory (" + webstartLibDirectory + ") must be a subdirectory of webstart output directory (" + + webstartOutputDirectory + ").") + require(isInDirectory(webstartOutputDirectory, jnlpFile), "Webstart JNLP file output location (" + jnlpFile + + ") must be in the webstart output directory (" + webstartOutputDirectory + ").") + for(wz <- webstartZip) + require(!isInDirectory(webstartOutputDirectory, wz), + "Webstart output zip location (" + wz + " cannot be in webstart output directory (" + webstartOutputDirectory + ").") + } +} +/** The default extension point for a webstart project. There is one method that is required to be defined: jnlpXML. +* 'webstartSignConfiguration', 'webstartPack200', and 'webstartGzip' are methods of interest. */ +abstract class DefaultWebstartProject(val info: ProjectInfo) extends BasicWebstartProject with MavenStyleWebstartPaths +/** Defines default implementations of all methods in WebstartOptions except for jnlpXML. packageAction is overridden +* to create a webstart distribution after the normal package operation. */ +abstract class BasicWebstartProject extends BasicScalaProject with WebstartScalaProject with WebstartOptions with WebstartPaths +{ + def webstartSignConfiguration: Option[SignConfiguration] = None + + def webstartExtraLibraries = mainDependencies.scalaJars + def webstartLibraries = publicClasspath +++ jarsOfProjectDependencies + def webstartResources = descendents(jnlpResourcesPath ##, AllPassFilter) + + def webstartPack200 = true + def webstartGzip = true + + override def packageAction = super.packageAction && webstartTask(this) +} \ No newline at end of file diff --git a/src/main/scala/sbt/impl/CommandParser.scala b/src/main/scala/sbt/impl/CommandParser.scala new file mode 100644 index 000000000..b64011fed --- /dev/null +++ b/src/main/scala/sbt/impl/CommandParser.scala @@ -0,0 +1,54 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt.impl + +import scala.util.parsing.combinator.Parsers +import scala.util.parsing.input.CharSequenceReader +import scala.util.parsing.input.CharArrayReader.EofCh + +/** Parses a command of the form: +* identifier argument* +* where argument may be quoted to include spaces and +* quotes and backslashes should be escaped. +* (Most of the complexity is for decent error handling.)*/ +private[sbt] object CommandParser extends Parsers +{ + type Elem = Char + def parse(commandString: String): Either[String, (String, List[String])] = + { + command(new CharSequenceReader(commandString.trim, 0)) match + { + case Success(id ~ args, next) => Right((id, args)) + case err: NoSuccess => + { + val pos = err.next.pos + Left("Could not parse command: (" + pos.line + "," + pos.column + "): " + err.msg) + } + } + } + def command = phrase(identifier ~! (argument*)) + def identifier = unquoted | err("Expected identifier") + def argument = ( (whitespaceChar+) ~> (unquoted | quoted) ) + + def unquoted: Parser[String] = ((unquotedChar ~! (unquotedMainChar*)) ^^ { case a ~ tail => (a :: tail).mkString("") }) + def quoted: Parser[String] = quote ~> quotedChars <~ (quote | err("Missing closing quote character")) + + def quotedChars: Parser[String] = (escape | nonescapeChar)* + def escape: Parser[Char] = backslash ~> (escapeChar | err("Illegal escape")) + def escapeChar: Parser[Char] = quote | backslash + def nonescapeChar: Parser[Char] = elem("", ch => !isEscapeChar(ch) && ch != EofCh) + def unquotedChar: Parser[Char] = elem("", ch => !isEscapeChar(ch) && !Character.isWhitespace(ch) && ch != EofCh) + def unquotedMainChar: Parser[Char] = unquotedChar | (errorIfEscape ~> failure("")) + + private def errorIfEscape = (not(quote) | err("Unexpected quote character")) ~> + (not(backslash) | err("Escape sequences can only occur in a quoted argument")) + + private def isEscapeChar(ch: Char) = ch == '\\' || ch == '"' + + def quote: Parser[Char] = '"' + def backslash: Parser[Char] = '\\' + def whitespaceChar: Parser[Char] = elem("whitespace", ch => Character.isWhitespace(ch)) + + private implicit def toString(p: Parser[List[Char]]): Parser[String] = p ^^ {_ mkString "" } +} \ No newline at end of file diff --git a/src/main/scala/sbt/impl/MapUtilities.scala b/src/main/scala/sbt/impl/MapUtilities.scala new file mode 100644 index 000000000..66a56c98f --- /dev/null +++ b/src/main/scala/sbt/impl/MapUtilities.scala @@ -0,0 +1,59 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah + */ + +package sbt.impl + +import java.util.Properties +import java.io.{File, FileInputStream, FileOutputStream, InputStream, OutputStream} +import scala.collection.mutable.{HashMap, HashSet, ListBuffer, Map, Set} + +private[sbt] object PropertiesUtilities +{ + def write(properties: Properties, label: String, to: Path, log: Logger) = + FileUtilities.writeStream(to.asFile, log)(output => { properties.store(output, label); None }) + def load(properties: Properties, from: Path, log: Logger): Option[String] = + { + val file = from.asFile + if(file.exists) + FileUtilities.readStream(file, log)( input => { properties.load(input); None }) + else + None + } + def propertyNames(properties: Properties): Iterable[String] = + wrap.Wrappers.toList(properties.propertyNames).map(_.toString) +} + +private[sbt] object MapUtilities +{ + def write[Key, Value](map: Map[Key, Value], label: String, to: Path, log: Logger)(implicit keyFormat: Format[Key], valueFormat: Format[Value]): Option[String] = + { + val properties = new Properties + map foreach { pair => properties.setProperty(keyFormat.toString(pair._1), valueFormat.toString(pair._2)) } + PropertiesUtilities.write(properties, label, to, log) + } + def read[Key, Value](map: Map[Key, Value], from: Path, log: Logger)(implicit keyFormat: Format[Key], valueFormat: Format[Value]): Option[String] = + { + map.clear + val properties = new Properties + PropertiesUtilities.load(properties, from, log) orElse + { + for(name <- PropertiesUtilities.propertyNames(properties)) + map.put( keyFormat.fromString(name), valueFormat.fromString(properties.getProperty(name))) + None + } + } + def all[Key, Value](map: Map[Key, Set[Value]]): Iterable[Value] = + map.values.toList.flatMap(set => set.toList) + + def readOnlyIterable[Key, Value](i: Map[Key, Set[Value]]): Iterable[(Key, scala.collection.Set[Value])] = + for( (key, set) <- i.elements.toList) yield (key, wrap.Wrappers.readOnly(set))//.readOnly) + + def mark[Key, Value](source: Key, map: Map[Key, Set[Value]]) + { + if(!map.contains(source)) + map.put(source, new HashSet[Value]) + } + def add[Key, Value](key: Key, value: Value, map: Map[Key, Set[Value]]): Unit = + map.getOrElseUpdate(key, new HashSet[Value]) + value +} \ No newline at end of file diff --git a/src/main/scala/sbt/impl/ProcessImpl.scala b/src/main/scala/sbt/impl/ProcessImpl.scala new file mode 100644 index 000000000..0d5373bfa --- /dev/null +++ b/src/main/scala/sbt/impl/ProcessImpl.scala @@ -0,0 +1,353 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt + +import java.lang.{Process => JProcess, ProcessBuilder => JProcessBuilder} +import java.io.{BufferedReader, Closeable, InputStream, InputStreamReader, IOException, OutputStream} +import java.io.{PipedInputStream, PipedOutputStream} +import java.io.{File, FileInputStream, FileOutputStream} +import java.net.URL + +import scala.concurrent.ops.future +import scala.concurrent.SyncVar + +/** Runs provided code in a new Thread and returns the Thread instance. */ +private object Spawn +{ + def apply(f: => Unit): Thread = + { + val thread = new Thread() { override def run() = { f } } + thread.start() + thread + } +} + +private object BasicIO +{ + def apply(log: Logger) = new ProcessIO(ignoreOut, processFully(log, Level.Info), processFully(log, Level.Error)) + + def ignoreOut = (i: OutputStream) => () + val BufferSize = 8192 + def close(c: java.io.Closeable) = try { c.close() } catch { case _: java.io.IOException => () } + def processFully(log: Logger, level: Level.Value)(i: InputStream) { processFully(line => log.log(level, line))(i) } + def processFully(processLine: String => Unit)(i: InputStream) + { + val reader = new BufferedReader(new InputStreamReader(i)) + def readFully() + { + val line = reader.readLine() + if(line != null) + { + processLine(line) + readFully() + } + } + readFully() + } + def standard: ProcessIO = new ProcessIO(ignoreOut, processFully(System.out.println), processFully(System.err.println)) + + def transferFully(in: InputStream, out: OutputStream) + { + val continueCount = 1//if(in.isInstanceOf[PipedInputStream]) 1 else 0 + val buffer = new Array[Byte](BufferSize) + def read + { + val byteCount = in.read(buffer) + if(byteCount >= continueCount) + { + out.write(buffer, 0, byteCount) + read + } + } + read + } +} + + +private abstract class AbstractProcessBuilder extends ProcessBuilder +{ + def #&&(other: ProcessBuilder): ProcessBuilder = new AndProcessBuilder(this, other) + def #||(other: ProcessBuilder): ProcessBuilder = new OrProcessBuilder(this, other) + def #|(other: ProcessBuilder): ProcessBuilder = + { + require(other.canPipeTo, "Piping to multiple processes is not supported.") + new PipedProcessBuilder(this, other, false) + } + def ##(other: ProcessBuilder): ProcessBuilder = new SequenceProcessBuilder(this, other) + + def #< (f: File): ProcessBuilder = new PipedProcessBuilder(new FileInput(f), this, false) + def #< (url: URL): ProcessBuilder = new PipedProcessBuilder(new URLInput(url), this, false) + def #> (f: File): ProcessBuilder = new PipedProcessBuilder(this, new FileOutput(f, false), false) + def #>> (f: File): ProcessBuilder = new PipedProcessBuilder(this, new FileOutput(f, true), true) + + def run(): Process = run(BasicIO.standard) + def run(log: Logger): Process = run(BasicIO(log)) + + def ! = run().exitValue() + def !(log: Logger) = + { + val log2 = new BufferedLogger(log) + log2.startRecording() + try { run(log2).exitValue() } + finally { log2.playAll(); log2.clearAll() } + } + def !(io: ProcessIO) = run(io).exitValue() + + def canPipeTo = false +} +private[sbt] class URLBuilder(url: URL) extends URLPartialBuilder +{ + def #>(b: ProcessBuilder): ProcessBuilder = b #< url + def #>>(file: File): ProcessBuilder = toFile(file, true) + def #>(file: File): ProcessBuilder = toFile(file, false) + private def toFile(file: File, append: Boolean) = new PipedProcessBuilder(new URLInput(url), new FileOutput(file, append), false) +} +private[sbt] class FileBuilder(base: File) extends FilePartialBuilder +{ + def #>(b: ProcessBuilder): ProcessBuilder = b #< base + def #<(b: ProcessBuilder): ProcessBuilder = b #> base + def #<(url: URL): ProcessBuilder = new URLBuilder(url) #> base + def #>>(file: File): ProcessBuilder = pipe(base, file, true) + def #>(file: File): ProcessBuilder = pipe(base, file, false) + def #<(file: File): ProcessBuilder = pipe(file, base, false) + def #<<(file: File): ProcessBuilder = pipe(file, base, true) + private def pipe(from: File, to: File, append: Boolean) = new PipedProcessBuilder(new FileInput(from), new FileOutput(to, append), false) +} + +private abstract class BasicBuilder extends AbstractProcessBuilder +{ + protected[this] def checkNotThis(a: ProcessBuilder) = require(a != this, "Compound process '" + a + "' cannot contain itself.") + final def run(io: ProcessIO): Process = + { + val p = createProcess(io) + p.start() + p + } + protected[this] def createProcess(io: ProcessIO): BasicProcess +} +private abstract class BasicProcess extends Process +{ + def start(): Unit +} + +private abstract class CompoundProcess extends BasicProcess +{ + def destroy() { destroyer() } + def exitValue() = getExitValue().getOrElse(error("No exit code: process destroyed.")) + + def start() = getExitValue + + protected lazy val (getExitValue, destroyer) = + { + val code = new SyncVar[Option[Int]]() + code.set(None) + val thread = Spawn(code.set(runAndExitValue())) + + ( + future { thread.join(); code.get }, + () => thread.interrupt() + ) + } + + /** Start and block until the exit value is available and then return it in Some. Return None if destroyed (use 'run')*/ + protected[this] def runAndExitValue(): Option[Int] + + protected[this] def runInterruptible[T](action: => T)(destroyImpl: => Unit): Option[T] = + { + try { Some(action) } + catch { case _: InterruptedException => destroyImpl; None } + } +} + +private abstract class SequentialProcessBuilder(a: ProcessBuilder, b: ProcessBuilder, operatorString: String) extends BasicBuilder +{ + checkNotThis(a) + checkNotThis(b) + override def toString = " ( " + a + " " + operatorString + " " + b + " ) " +} +private class PipedProcessBuilder(first: ProcessBuilder, second: ProcessBuilder, toError: Boolean) extends SequentialProcessBuilder(first, second, if(toError) "#|!" else "#|") +{ + override def createProcess(io: ProcessIO) = new PipedProcesses(first, second, io, toError) +} +private class AndProcessBuilder(first: ProcessBuilder, second: ProcessBuilder) extends SequentialProcessBuilder(first, second, "#&&") +{ + override def createProcess(io: ProcessIO) = new AndProcess(first, second, io) +} +private class OrProcessBuilder(first: ProcessBuilder, second: ProcessBuilder) extends SequentialProcessBuilder(first, second, "#||") +{ + override def createProcess(io: ProcessIO) = new OrProcess(first, second, io) +} +private class SequenceProcessBuilder(first: ProcessBuilder, second: ProcessBuilder) extends SequentialProcessBuilder(first, second, "##") +{ + override def createProcess(io: ProcessIO) = new ProcessSequence(first, second, io) +} + +private class SequentialProcess(a: ProcessBuilder, b: ProcessBuilder, io: ProcessIO, evaluateSecondProcess: Int => Boolean) extends CompoundProcess +{ + protected[this] override def runAndExitValue() = + { + val first = a.run(io) + runInterruptible(first.exitValue)(first.destroy()) flatMap + { codeA => + if(evaluateSecondProcess(codeA)) + { + val second = b.run(io) + runInterruptible(second.exitValue)(second.destroy()) + } + else + Some(codeA) + } + } +} +private class AndProcess(a: ProcessBuilder, b: ProcessBuilder, io: ProcessIO) extends SequentialProcess(a, b, io, _ == 0) +private class OrProcess(a: ProcessBuilder, b: ProcessBuilder, io: ProcessIO) extends SequentialProcess(a, b, io, _ != 0) +private class ProcessSequence(a: ProcessBuilder, b: ProcessBuilder, io: ProcessIO) extends SequentialProcess(a, b, io, ignore => true) + + +private class PipedProcesses(a: ProcessBuilder, b: ProcessBuilder, defaultIO: ProcessIO, toError: Boolean) extends CompoundProcess +{ + protected[this] override def runAndExitValue() = + { + val currentSource = new SyncVar[Option[InputStream]] + val pipeOut = new PipedOutputStream + val source = new PipeSource(currentSource, pipeOut, a.toString) + source.start() + + val pipeIn = new PipedInputStream(pipeOut) + val currentSink = new SyncVar[Option[OutputStream]] + val sink = new PipeSink(pipeIn, currentSink, b.toString) + sink.start() + + def handleOutOrError(fromOutput: InputStream) = currentSource.put(Some(fromOutput)) + + val firstIO = + if(toError) + defaultIO.withError(handleOutOrError) + else + defaultIO.withOutput(handleOutOrError) + val secondIO = defaultIO.withInput(toInput => currentSink.put(Some(toInput)) ) + + val second = b.run(secondIO) + val first = a.run(firstIO) + try + { + runInterruptible { + first.exitValue + currentSource.put(None) + currentSink.put(None) + val result = second.exitValue + result + } { + first.destroy() + second.destroy() + } + } + finally + { + BasicIO.close(pipeIn) + BasicIO.close(pipeOut) + } + } +} +private class PipeSource(currentSource: SyncVar[Option[InputStream]], pipe: PipedOutputStream, label: => String) extends Thread +{ + final override def run() + { + currentSource.get match + { + case Some(source) => + try { BasicIO.transferFully(source, pipe) } + catch { case e: IOException => println("I/O error " + e.getMessage + " for process: " + label); e.printStackTrace() } + finally + { + BasicIO.close(source) + currentSource.unset() + } + run() + case None => + currentSource.unset() + BasicIO.close(pipe) + } + } +} +private class PipeSink(pipe: PipedInputStream, currentSink: SyncVar[Option[OutputStream]], label: => String) extends Thread +{ + final override def run() + { + currentSink.get match + { + case Some(sink) => + try { BasicIO.transferFully(pipe, sink) } + catch { case e: IOException => println("I/O error " + e.getMessage + " for process: " + label); e.printStackTrace() } + finally + { + BasicIO.close(sink) + currentSink.unset() + } + run() + case None => + currentSink.unset() + } + } +} + + +/** Represents a simple command without any redirection or combination. */ +private[sbt] class SimpleProcessBuilder(p: JProcessBuilder) extends AbstractProcessBuilder +{ + override def run(io: ProcessIO): Process = + { + val process = p.start() // start the external process + import io.{writeInput, processOutput, processError} + // spawn threads that process the input, output, and error streams using the functions defined in `io` + val inThread = Spawn(writeInput(process.getOutputStream)) + val outThread = Spawn(processOutput(process.getInputStream)) + val errorThread = + if(!p.redirectErrorStream) + Spawn(processError(process.getErrorStream)) :: Nil + else + Nil + new SimpleProcess(process, inThread :: outThread :: errorThread) + } + override def toString = p.command.toString + override def canPipeTo = true +} +/** A thin wrapper around a java.lang.Process. `ioThreads` are the Threads created to do I/O. +* The implementation of `exitValue` waits until these threads die before returning. */ +private class SimpleProcess(p: JProcess, ioThreads: Iterable[Thread]) extends Process +{ + override def exitValue() = + { + p.waitFor() // wait for the process to terminate + ioThreads.foreach(_.join()) // this ensures that all output is complete before returning (waitFor does not ensure this) + p.exitValue() + } + override def destroy() = p.destroy() +} + +private class FileOutput(file: File, append: Boolean) extends OutputStreamBuilder(new FileOutputStream(file, append), file.getAbsolutePath) +private class URLInput(url: URL) extends InputStreamBuilder(url.openStream, url.toString) +private class FileInput(file: File) extends InputStreamBuilder(new FileInputStream(file), file.getAbsolutePath) + +private class OutputStreamBuilder(stream: => OutputStream, label: String) extends ThreadProcessBuilder(label, _.writeInput(stream)) +private class InputStreamBuilder(stream: => InputStream, label: String) extends ThreadProcessBuilder(label, _.processOutput(stream)) + +private abstract class ThreadProcessBuilder(override val toString: String, runImpl: ProcessIO => Unit) extends AbstractProcessBuilder +{ + override def run(io: ProcessIO): Process = + { + val success = new SyncVar[Boolean] + success.put(false) + new ThreadProcess(Spawn {runImpl(io); success.set(true) }, success) + } +} +private class ThreadProcess(thread: Thread, success: SyncVar[Boolean]) extends Process +{ + override def exitValue() = + { + thread.join() + if(success.get) 0 else 1 + } + override def destroy() { thread.interrupt() } +} \ No newline at end of file diff --git a/src/main/scala/sbt/impl/RunTask.scala b/src/main/scala/sbt/impl/RunTask.scala new file mode 100644 index 000000000..8b87e4143 --- /dev/null +++ b/src/main/scala/sbt/impl/RunTask.scala @@ -0,0 +1,164 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ + package sbt.impl + +import scala.collection.{immutable, mutable} +import scala.collection.Map +import sbt.wrap.Wrappers.identityMap + +private[sbt] object RunTask +{ + final type Task = Project#Task + def apply(root: Task, rootName: String): List[WorkFailure[Task]] = apply(root, rootName, true) + def apply(root: Task, rootName: String, parallelExecution: Boolean): List[WorkFailure[Task]] = + apply(root, rootName, if(parallelExecution) Runtime.getRuntime.availableProcessors else 1) + def apply(root: Task, rootName: String, maximumTasks: Int): List[WorkFailure[Task]] = (new RunTask(root, rootName, maximumTasks)).run() +} +import RunTask._ +private final class RunTask(root: Task, rootName: String, maximumTasks: Int) extends NotNull +{ + require(maximumTasks >= 1) + def parallel = maximumTasks > 1 + def multiProject = allProjects.size >= 2 + def run(): List[WorkFailure[Task]] = + { + try + { + runTasksExceptRoot() match + { + case Nil => + val result = runTask(root, rootName) + result.map( errorMessage => WorkFailure(root, "Error running " + rootName + ": " + errorMessage) ).toList + case failures => failures + } + } + finally + { + for(project <- allProjects; saveError <- project.saveEnvironment) + project.log.warn("Could not save properties for project " + project.name + ": " + saveError) + } + } + // This runs all tasks except the root.task. + // It uses a buffered logger in record mode to ensure that all output for a given task is consecutive + // it ignores the root task so that the root task may be run with buffering disabled so that the output + // occurs without delay. + private def runTasksExceptRoot() = + { + withBuffered(_.startRecording()) + try { ParallelRunner.run(expandedRoot, expandedTaskName, runIfNotRoot, maximumTasks, (t: Task) => t.manager.log) } + finally { withBuffered(_.stop()) } + } + private def withBuffered(f: BufferedLogger => Unit) + { + for(buffered <- bufferedLoggers) + Control.trap(f(buffered)) + } + /** Will be called in its own thread. Runs the given task if it is not the root task.*/ + private def runIfNotRoot(action: Task): Option[String] = + { + if(isRoot(action)) + None + else + runTask(action, expandedTaskName(action)) + } + private def isRoot(t: Task) = t == expandedRoot + /** Will be called in its own thread except for the root task. */ + private def runTask(action: Task, actionName: String): Option[String] = + { + val label = if(multiProject) (action.manager.name + " / " + actionName) else actionName + def banner(event: ControlEvent.Value, firstSeparator: String, secondSeparator: String) = + Control.trap(action.manager.log.control(event, firstSeparator + " " + label + " " + secondSeparator)) + if(parallel) + { + try { banner(ControlEvent.Start, "\n ", "...") } + finally { flush(action) } + } + banner(ControlEvent.Header, "\n==", "==") + try { action.invoke } + catch { case e: Exception => action.manager.log.trace(e); Some(e.toString) } + finally + { + banner(ControlEvent.Finish, "==", "==") + if(parallel) + flush(action) + } + } + private def trapFinally(toTrap: => Unit)(runFinally: => Unit) + { + try { toTrap } + catch { case e: Exception => () } + finally { try { runFinally } catch { case e: Exception => () } } + } + private def flush(action: Task) + { + for(buffered <- bufferedLogger(action.manager)) + Control.trap(flush(buffered)) + } + private def flush(buffered: BufferedLogger) + { + buffered.play() + buffered.clear() + } + + /* Most of the following is for implicitly adding dependencies (see the expand method)*/ + private val projectDependencyCache = identityMap[Project, Iterable[Project]] + private def dependencies(project: Project) = projectDependencyCache.getOrElseUpdate(project, project.topologicalSort.dropRight(1)) + + private val expandedCache = identityMap[Task, Task] + private def expanded(task: Task): Task = expandedCache.getOrElseUpdate(task, expandImpl(task)) + + private val expandedTaskNameCache = identityMap[Task, String] + private def expandedTaskName(task: Task) = + if(task == expandedRoot) + rootName + else + expandedTaskNameCache.getOrElse(task, task.name) + + private val nameToTaskCache = identityMap[Project, Map[String, Task]] + private def nameToTaskMap(project: Project): Map[String, Task] = nameToTaskCache.getOrElseUpdate(project, project.tasks) + private def taskForName(project: Project, name: String): Option[Task] = nameToTaskMap(project).get(name) + + private val taskNameCache = identityMap[Project, Map[Task, String]] + private def taskName(task: Task) = + { + val project = task.manager + taskNameCache.getOrElseUpdate(project, taskNameMap(project)).get(task) + } + + private val expandedRoot = expand(root) + private val allTasks = expandedRoot.topologicalSort + private val allProjects = Set(allTasks.map(_.manager).toSeq : _*) + private val bufferedLoggers = if(parallel) allProjects.toList.flatMap(bufferedLogger) else Nil + + /** Adds implicit dependencies, which are tasks with the same name in the project dependencies + * of the enclosing project of the task.*/ + private def expand(root: Task): Task = expanded(root) + private def expandImpl(task: Task): Task = + { + val nameOption = taskName(task) + val explicitDependencies = task.dependencies + val implicitDependencies = nameOption.map(name => dependencies(task.manager).flatMap(noninteractiveTask(name)) ).getOrElse(Nil) + val allDependencies = mutable.HashSet( (explicitDependencies ++ implicitDependencies).toSeq : _* ) + val expandedTask = task.setDependencies(allDependencies.toList.map(expanded)) + nameOption.foreach(name => expandedTaskNameCache(expandedTask) = name) + expandedTask + } + private def noninteractiveTask(name: String)(project: Project): Option[Task] = + taskForName(project, name) flatMap { task => + if(task.interactive) + { + project.log.debug("Not including task " + name + " in project " + project.name + ": interactive tasks can only be run directly.") + None + } + else + Some(task) + } + private def taskNameMap(project: Project) = mutable.Map(nameToTaskMap(project).map(_.swap).toSeq : _*) + private def bufferedLogger(project: Project): Option[BufferedLogger] = + project.log match + { + case buffered: BufferedLogger => Some(buffered) + case _ => None + } +} \ No newline at end of file diff --git a/src/main/scala/sbt/impl/SelectMainClass.scala b/src/main/scala/sbt/impl/SelectMainClass.scala new file mode 100644 index 000000000..f24b5deee --- /dev/null +++ b/src/main/scala/sbt/impl/SelectMainClass.scala @@ -0,0 +1,44 @@ +package sbt.impl + +private[sbt] object SelectMainClass +{ + def apply(promptIfMultipleChoices: Boolean, mainClasses: List[String]) = + { + mainClasses match + { + case Nil => None + case head :: Nil => Some(head) + case multiple => + if(promptIfMultipleChoices) + { + println("\nMultiple main classes detected, select one to run:\n") + for( (className, index) <- multiple.zipWithIndex ) + println(" [" + (index+1) + "] " + className) + val line = trim(SimpleReader.readLine("\nEnter number: ")) + println("") + toInt(line, multiple.length) map multiple.apply + } + else + None + } + } + private def trim(s: Option[String]) = s.getOrElse("") + private def toInt(s: String, size: Int) = + try + { + val i = s.toInt + if(i > 0 && i <= size) + Some(i-1) + else + { + println("Number out of range: was " + i + ", expected number between 1 and " + size) + None + } + } + catch + { + case nfe: NumberFormatException => + println("Invalid number: " + nfe.toString) + None + } +} \ No newline at end of file diff --git a/src/main/scala/sbt/impl/TestFrameworkImpl.scala b/src/main/scala/sbt/impl/TestFrameworkImpl.scala new file mode 100755 index 000000000..2603eed3f --- /dev/null +++ b/src/main/scala/sbt/impl/TestFrameworkImpl.scala @@ -0,0 +1,186 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Steven Blundy, Mark Harrah + */ + +package sbt.impl + +import scala.xml.{Elem, Group} + +/* The following classes run tests for their associated test framework. +* NOTE #1: DO NOT actively use these classes. Only specify their names to LazyTestFramework +* for reflective loading. This allows using the test libraries provided on the +* project classpath instead of requiring global versions. +* NOTE #2: Keep all active uses of these test frameworks inside these classes so that sbt +* runs without error when a framework is not available at runtime and no tests for that +* framework are defined.*/ + +/** The test runner for ScalaCheck tests. */ +private[sbt] class ScalaCheckRunner(val log: Logger, val listeners: Seq[TestReportListener], val testLoader: ClassLoader) extends BasicTestRunner +{ + import org.scalacheck.{Pretty, Properties, Test} + def runTest(testClassName: String): Result.Value = + { + val test = ModuleUtilities.getObject(testClassName, testLoader).asInstanceOf[Properties] + if(Test.checkProperties(test, Test.defaultParams, propReport, testReport).find(!_._2.passed).isEmpty) + Result.Passed + else + Result.Failed + } + private def propReport(pName: String, s: Int, d: Int) {} + private def testReport(pName: String, res: Test.Result) = + { + if(res.passed) + fire(PassedEvent(pName, Pretty.pretty(res))) + else + fire(FailedEvent(pName, Pretty.pretty(res))) + } +} +/** The test runner for ScalaTest suites. */ +private[sbt] class ScalaTestRunner(val log: Logger, val listeners: Seq[TestReportListener], val testLoader: ClassLoader) extends BasicTestRunner +{ + def runTest(testClassName: String): Result.Value = + { + import org.scalatest.{Stopper, Suite} + val testClass = Class.forName(testClassName, true, testLoader).asSubclass(classOf[Suite]) + val test = testClass.newInstance + val reporter = new ScalaTestReporter + val stopper = new Stopper { override def stopRequested = false } + test.execute(None, reporter, stopper, Set.empty, Set("org.scalatest.Ignore"), Map.empty, None) + if(reporter.succeeded) + Result.Passed + else + Result.Failed + } + + /** An implementation of Reporter for ScalaTest. */ + private class ScalaTestReporter extends org.scalatest.Reporter with NotNull + { + import org.scalatest.Report + override def testIgnored(report: Report) = + { + if(report.message.trim.isEmpty) fire(IgnoredEvent(report.name, None)) + else fire(IgnoredEvent(report.name, Some(report.message.trim))) + } + override def testStarting(report: Report) { info(report, "Test starting", None) } + override def testSucceeded(report: Report) { info(report, "Test succeeded", Some(Result.Passed)) } + override def testFailed(report: Report) + { + succeeded = false + error(report, "Test failed", Some(Result.Failed)) + } + + override def infoProvided(report : Report) { info(report, "", None) } + + override def suiteStarting(report: Report) { info(report, "Suite starting", None) } + override def suiteCompleted(report: Report) { info(report, "Suite completed", None) } + override def suiteAborted(report: Report) { error(report, "Suite aborted", None) } + + override def runStarting(testCount: Int) { fire(MessageEvent("Run starting")) } + override def runStopped() + { + succeeded = false + fire(ErrorEvent("Run stopped")) + } + override def runAborted(report: Report) + { + succeeded = false + error(report, "Run aborted", None) + } + override def runCompleted() { log.info("Run completed.") } + + private def error(report: Report, event: String, result: Option[Result.Value]) { logReport(report, event, result, Level.Error) } + private def info(report: Report, event: String, result: Option[Result.Value]) { logReport(report, event, result, Level.Info) } + private def logReport(report: Report, event: String, result: Option[Result.Value], level: Level.Value) + { + level match + { + case Level.Error => + if(report.message.trim.isEmpty) + fire(TypedErrorEvent(report.name, event, None, report.throwable)(result)) + else + fire(TypedErrorEvent(report.name, event, Some(report.message.trim), report.throwable)(result)) + case Level.Info => + if(report.message.trim.isEmpty) + fire(TypedEvent(report.name, event, None)(result)) + else + fire(TypedEvent(report.name, event, Some(report.message.trim))(result)) + case l => log.warn("Level not expected:" + l) + } + } + + var succeeded = true + } +} +/** The test runner for specs tests. */ +private[sbt] class SpecsRunner(val log: Logger, val listeners: Seq[TestReportListener], val testLoader: ClassLoader) extends BasicTestRunner +{ + import org.specs.Specification + import org.specs.specification.{Example, Sus} + + def runTest(testClassName: String): Result.Value = + { + val test = ModuleUtilities.getObject(testClassName, testLoader).asInstanceOf[Specification] + val event = reportSpecification(test) + fire(event) + if(test.isFailing) + Result.Failed + else + Result.Passed + } + + /* The following is closely based on org.specs.runner.OutputReporter, + * part of specs, which is Copyright 2007-2008 Eric Torreborre. + * */ + + private def reportSpecification(spec: Specification): SpecificationReportEvent = + { + // this is for binary compatibility between specs 1.4.x and 1.5.0: the ancestor of Specification containing these two methods changed + val reflectedSpec: { def systems: Seq[Sus]; def subSpecifications: Seq[Specification] } = spec + + return SpecificationReportEvent(spec.successes.size, spec.failures.size, spec.errors.size, spec.skipped.size, spec.pretty, + reportSystems(reflectedSpec.systems), reportSpecifications(reflectedSpec.subSpecifications)) + } + private def reportSpecifications(specifications: Seq[Specification]): Seq[SpecificationReportEvent] = + { + for(specification <- specifications) yield + reportSpecification(specification) + } + private def reportSystems(systems: Seq[Sus]): Seq[SystemReportEvent] = + { + for(system <- systems) yield + reportSystem(system) + } + private def reportSystem(sus: Sus): SystemReportEvent = + { + def format = + { + class ElemDesc(e: Elem) { def desc = e.child } + implicit def elemToDesc(e: Elem): ElemDesc = new ElemDesc(e) + + for(description <- sus.literateDescription) yield + { + // for source compatibility between specs 1.4.x and 1.5.0: + // in specs 1.5.0, description is LiterateDescription + // in specs < 1.5.0, description is Elem + // LiterateDescription.desc is a Node + // Elem.child is a Seq[Node] + // each has a map[T](f: Node => T): Seq[T] defined so we implicitly convert + // an Elem e to an intermediate object that has desc defined to be e.child + + //description.child.map(_.text) // Elem equivalent + description.desc.map(_.text) // LiterateDescription + } + } + + SystemReportEvent(sus.description, sus.verb, sus.skippedSus, format, reportExamples(sus.examples)) + } + private def reportExamples(examples: Seq[Example]): Seq[ExampleReportEvent] = + { + for(example <- examples) yield + reportExample(example) + } + private def reportExample(example: Example): ExampleReportEvent = + { + ExampleReportEvent(example.description, example.errors, example.failures, example.skipped, reportExamples(example.subExamples)) + } +} diff --git a/src/main/scala/sbt/impl/TestParser.scala b/src/main/scala/sbt/impl/TestParser.scala new file mode 100644 index 000000000..69c7b5c72 --- /dev/null +++ b/src/main/scala/sbt/impl/TestParser.scala @@ -0,0 +1,47 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah + */ +package sbt + +/* The following implements the simple syntax for storing test definitions. +* The syntax is: +* +* definition := isModule? className separator className +* isModule := '' +* separator := '<<' +*/ + +import scala.util.parsing.combinator._ + +import TestParser._ +/** Represents a test implemented by 'testClassName' of type 'superClassName'.*/ +final case class TestDefinition(isModule: Boolean, testClassName: String, superClassName: String) extends NotNull +{ + override def toString = + (if(isModule) IsModuleLiteral else "") + testClassName + SubSuperSeparator + superClassName +} +final class TestParser extends RegexParsers with NotNull +{ + def test: Parser[TestDefinition] = + ( isModule ~! className ~! SubSuperSeparator ~! className ) ^^ + { case module ~ testName ~ SubSuperSeparator ~ superName => TestDefinition(module, testName.trim, superName.trim) } + def isModule: Parser[Boolean] = (IsModuleLiteral?) ^^ (_.isDefined) + def className: Parser[String] = ClassNameRegexString.r + + def parse(testDefinitionString: String): Either[String, TestDefinition] = + { + def parseError(msg: String) = Left("Could not parse test definition '" + testDefinitionString + "': " + msg) + parseAll(test, testDefinitionString) match + { + case Success(result, next) => Right(result) + case err: NoSuccess => parseError(err.msg) + } + } +} +object TestParser +{ + val IsModuleLiteral = "" + val SubSuperSeparator = "<<" + val ClassNameRegexString = """[^<]+""" + def parse(testDefinitionString: String): Either[String, TestDefinition] = (new TestParser).parse(testDefinitionString) +} \ No newline at end of file diff --git a/src/main/scala/sbt/impl/TestStatusReporter.scala b/src/main/scala/sbt/impl/TestStatusReporter.scala new file mode 100644 index 000000000..dddf0eb5a --- /dev/null +++ b/src/main/scala/sbt/impl/TestStatusReporter.scala @@ -0,0 +1,74 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt.impl + +import java.io.File +import scala.collection.mutable.{HashMap, Map} + +/** Only intended to be used once per instance. */ +private[sbt] class TestStatusReporter(path: Path, log: Logger) extends TestsListener +{ + private lazy val succeeded: Map[String, Long] = TestStatus.read(path, log) + + def doInit {} + def startGroup(name: String) { succeeded removeKey name } + def testEvent(event: TestEvent) {} + def endGroup(name: String, t: Throwable) {} + def endGroup(name: String, result: Result.Value) + { + if(result == Result.Passed) + succeeded(name) = System.currentTimeMillis + } + def doComplete(finalResult: Result.Value) { complete() } + def doComplete(t: Throwable) { complete() } + + private def complete() + { + TestStatus.write(succeeded, "Successful Tests", path, log) + } +} + +private[sbt] class TestQuickFilter(testAnalysis: CompileAnalysis, failedOnly: Boolean, path: Path, log: Logger) extends (String => Boolean) with NotNull +{ + private lazy val exclude = TestStatus.read(path, log) + private lazy val map = testAnalysis.testSourceMap + def apply(test: String) = + exclude.get(test) match + { + case None => true // include because this test has not been run or did not succeed + case Some(lastSuccessTime) => // succeeded the last time it was run + if(failedOnly) + false // don't include because the last time succeeded + else + testAnalysis.products(map(test)) match + { + case None => true + case Some(products) => products.exists(lastSuccessTime <= _.lastModified) // include if the test is newer than the last run + } + } +} +private object TestStatus +{ + import java.util.Properties + def read(path: Path, log: Logger): Map[String, Long] = + { + val map = new HashMap[String, Long] + val properties = new Properties + logError(PropertiesUtilities.load(properties, path, log), "loading", log) + for(test <- PropertiesUtilities.propertyNames(properties)) + map.put(test, properties.getProperty(test).toLong) + map + } + def write(map: Map[String, Long], label: String, path: Path, log: Logger) + { + val properties = new Properties + for( (test, lastSuccessTime) <- map) + properties.setProperty(test, lastSuccessTime.toString) + logError(PropertiesUtilities.write(properties, label, path, log), "writing", log) + } + private def logError(result: Option[String], action: String, log: Logger) + { + result.foreach(msg => log.error("Error " + action + " test status: " + msg)) + } +} \ No newline at end of file diff --git a/src/main/scala/sbt/wrap/Wrappers.scala b/src/main/scala/sbt/wrap/Wrappers.scala new file mode 100644 index 000000000..34f0ba55b --- /dev/null +++ b/src/main/scala/sbt/wrap/Wrappers.scala @@ -0,0 +1,112 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt.wrap + +// This file exists for compatibility between Scala 2.7.x and 2.8.0 + +import java.util.{Map => JMap, Set => JSet} + +private[sbt] object Wrappers +{ + def identityMap[K,V] = new MutableMapWrapper(new java.util.IdentityHashMap[K,V]) + def weakMap[K,V] = new MutableMapWrapper(new java.util.WeakHashMap[K,V]) + def toList[K,V](s: java.util.Map[K,V]): List[(K,V)] = toList(s.entrySet).map(e => (e.getKey, e.getValue)) + def toList[T](s: java.util.Collection[T]): List[T] = toList(s.iterator) + def toList[T](s: java.util.Iterator[T]): List[T] = + { + def add(l: List[T]): List[T] = + if(s.hasNext) + add(s.next() :: l) + else + l + add(Nil).reverse + } + def toList[T](s: java.util.Enumeration[T]): List[T] = + { + def add(l: List[T]): List[T] = + if(s.hasMoreElements) + add(s.nextElement() :: l) + else + l + add(Nil).reverse + } + def readOnly[K,V](map: scala.collection.mutable.Map[K,V]): scala.collection.Map[K,V] = map//.readOnly + def readOnly[T](set: scala.collection.mutable.Set[T]): scala.collection.Set[T] = set//.readOnly + def readOnly[T](buffer: scala.collection.mutable.Buffer[T]): Seq[T] = buffer//.readOnly +} + +private[sbt] sealed abstract class Iterable[T] extends NotNull +{ + def foreach(f: T => Unit) = toList.foreach(f) + def toList: List[T] +} +private[sbt] sealed trait Removable[T] extends NotNull +{ + def -=(t: T) : Unit + def --=(all: Iterable[T]) { all.foreach(-=) } + def --=(all: scala.Iterable[T]) { all.foreach(-=) } +} +private[sbt] sealed trait Addable[T] extends NotNull +{ + def +=(t: T) : Unit + def ++=(all: Iterable[T]) { all.foreach(+=) } + def ++=(all: scala.Iterable[T]) { all.foreach(+=) } +} +private[sbt] sealed abstract class Set[T] extends Iterable[T] +{ + def contains(t: T): Boolean +} +private[sbt] sealed class SetWrapper[T](val underlying: JSet[T]) extends Set[T] +{ + def contains(t: T) = underlying.contains(t) + def toList =Wrappers.toList(underlying.iterator) +} +private[sbt] final class MutableSetWrapper[T](wrapped: JSet[T]) extends SetWrapper[T](wrapped) with Addable[T] with Removable[T] +{ + def +=(t: T) { underlying.add(t) } + def -=(t: T) { underlying.remove(t) } + def readOnly: Set[T] = this +} +private[sbt] sealed abstract class Map[K,V] extends Iterable[(K,V)] +{ + def apply(key: K): V + def get(key: K): Option[V] + final def getOrElse[V2 >: V](key: K, default: => V2): V2 = + get(key) match + { + case Some(value) => value + case None => default + } +} +private[sbt] sealed abstract class MapWrapper[K,V](val underlying: JMap[K,V]) extends Map[K,V] +{ + final def apply(key: K) = underlying.get(key) + final def get(key: K) = + { + val value = underlying.get(key) + if(value == null) + None + else + Some(value) + } + final def toList = Wrappers.toList(underlying) + final def values = toList.map(_._2) +} +private[sbt] sealed class MutableMapWrapper[K,V](wrapped: JMap[K,V]) extends MapWrapper[K,V](wrapped) with Removable[K] with Addable[(K,V)] +{ + final def getOrElseUpdate(key: K, default: => V): V = + get(key) match + { + case Some(value) => value + case None => + val newValue = default + underlying.put(key, newValue) + newValue + } + final def clear() = underlying.clear() + final def update(key: K, value: V) { underlying.put(key, value) } + final def +=(pair: (K, V) ) { update(pair._1, pair._2) } + final def -=(key: K) { underlying.remove(key) } + final def readOnly: Map[K,V] = this +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject2.scala b/src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject2.scala new file mode 100644 index 000000000..2cb141caa --- /dev/null +++ b/src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject2.scala @@ -0,0 +1,6 @@ +import sbt._ + +class TestProject(info: ProjectInfo) extends DefaultProject(info) with Marker +{ + lazy val interactiveTest = interactiveTask { mark() } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject3.scala b/src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject3.scala new file mode 100644 index 000000000..dc958905b --- /dev/null +++ b/src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject3.scala @@ -0,0 +1,7 @@ +import sbt._ + +class TestProject(info: ProjectInfo) extends ParentProject(info) with Marker +{ + val subA = project("a", "A") + lazy val interactiveTest = interactiveTask { mark() } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject4.scala b/src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject4.scala new file mode 100644 index 000000000..cc75690a1 --- /dev/null +++ b/src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject4.scala @@ -0,0 +1,10 @@ +import sbt._ + +class TestProject(info: ProjectInfo) extends ParentProject(info) +{ + val subA = project("a", "A", new SubA(_)) + class SubA(info: ProjectInfo) extends DefaultProject(info) with Marker + { + lazy val interactiveTest = interactiveTask { mark() } + } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject5.scala b/src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject5.scala new file mode 100644 index 000000000..4488a08b5 --- /dev/null +++ b/src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject5.scala @@ -0,0 +1,12 @@ +import sbt._ + +class TestProject(info: ProjectInfo) extends ParentProject(info) with Marker +{ + val subA = project("a", "A", new SubA(_)) + lazy val interactiveTest = interactiveTask { mark() } + + class SubA(info: ProjectInfo) extends DefaultProject(info) + { + lazy val interactiveTest = interactiveTask { Some("Child interactive task should not be called.") } + } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject6.scala b/src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject6.scala new file mode 100644 index 000000000..ecd8ab4b0 --- /dev/null +++ b/src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject6.scala @@ -0,0 +1,12 @@ +import sbt._ + +class TestProject(info: ProjectInfo) extends ParentProject(info) +{ + val subA = project("a", "A", new SubA(_)) + lazy val interactiveTest = task { Some("Parent task should not be called") } + + class SubA(info: ProjectInfo) extends DefaultProject(info) + { + lazy val interactiveTest = interactiveTask { Some("Child task should not be called.") } + } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject7.scala b/src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject7.scala new file mode 100644 index 000000000..82a3ed6f8 --- /dev/null +++ b/src/test/resources/sbt-test-resources/actions/interactive/changes/TestProject7.scala @@ -0,0 +1,12 @@ +import sbt._ + +class TestProject(info: ProjectInfo) extends ParentProject(info) with Marker +{ + val subA = project("a", "A", new SubA(_)) + lazy val interactiveTest = task { mark() } + + class SubA(info: ProjectInfo) extends DefaultProject(info) with Marker + { + lazy val interactiveTest = task { mark() } + } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/actions/interactive/project/build.properties b/src/test/resources/sbt-test-resources/actions/interactive/project/build.properties new file mode 100644 index 000000000..a7e217501 --- /dev/null +++ b/src/test/resources/sbt-test-resources/actions/interactive/project/build.properties @@ -0,0 +1,4 @@ +#Project properties +#Fri Jan 30 20:49:57 EST 2009 +project.name=Interactive Actions Test +project.version=1.0 diff --git a/src/test/resources/sbt-test-resources/actions/interactive/project/build/src/Marker.scala b/src/test/resources/sbt-test-resources/actions/interactive/project/build/src/Marker.scala new file mode 100644 index 000000000..693fdd31e --- /dev/null +++ b/src/test/resources/sbt-test-resources/actions/interactive/project/build/src/Marker.scala @@ -0,0 +1,13 @@ +import sbt._ + +trait Marker extends NotNull +{ self: Project => + def toMark: Path = "ran" + def mark() = + { + if(toMark.exists) + Some("Already ran") + else + FileUtilities.touch(toMark, log) + } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/actions/interactive/project/build/src/TestProject.scala b/src/test/resources/sbt-test-resources/actions/interactive/project/build/src/TestProject.scala new file mode 100644 index 000000000..993915379 --- /dev/null +++ b/src/test/resources/sbt-test-resources/actions/interactive/project/build/src/TestProject.scala @@ -0,0 +1,6 @@ +import sbt._ + +class TestProject(info: ProjectInfo) extends DefaultProject(info) with Marker +{ + lazy val interactiveTest = task { mark() } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/actions/interactive/test b/src/test/resources/sbt-test-resources/actions/interactive/test new file mode 100644 index 000000000..58946fe31 --- /dev/null +++ b/src/test/resources/sbt-test-resources/actions/interactive/test @@ -0,0 +1,46 @@ +# This test verifies the behavior of actions declared interactive + +# Single project, non-interactive task +> interactive-test [success] +$ exists ran [success] +$ delete ran [success] + +# Single project, interactive task +$ copy-file changes/TestProject2.scala project/build/src/TestProject.scala [success] +$ reload [success] +> interactive-test [success] +$ exists ran [success] +$ delete ran [success] + +# Multi-project, single interactive task on parent project +$ copy-file changes/TestProject3.scala project/build/src/TestProject.scala [success] +$ reload [success] +> interactive-test [success] +$ exists ran [success] +$ delete ran [success] + +# Multi-project, single interactive task on child project +$ copy-file changes/TestProject4.scala project/build/src/TestProject.scala [success] +$ reload [success] +> interactive-test [failure] + +# Multi-project, two interactive tasks with same name, which is allowed because it is defined on parent +$ copy-file changes/TestProject5.scala project/build/src/TestProject.scala [success] +$ reload [success] +> interactive-test [success] +$ exists "ran" [success] +$ delete "ran" [success] + +# Multi-project, interactive on subproject + non-interactive on parent, which cannot be run from parent +$ copy-file changes/TestProject6.scala project/build/src/TestProject.scala [success] +$ reload [success] +> interactive-test [failure] + +# Multi-project, two non-interactive tasks with same name, which is allowed +$ copy-file changes/TestProject7.scala project/build/src/TestProject.scala [success] +$ reload [success] +> interactive-test [success] +$ exists "ran" [success] +$ exists "a/ran" [success] +$ delete "ran" [success] +$ delete "a/ran" [success] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/compiler-project/run-test/project/build.properties b/src/test/resources/sbt-test-resources/compiler-project/run-test/project/build.properties new file mode 100644 index 000000000..0d1673349 --- /dev/null +++ b/src/test/resources/sbt-test-resources/compiler-project/run-test/project/build.properties @@ -0,0 +1,3 @@ +project.organization=test +project.name=Interpreter Project Test +project.version=1.0 \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/compiler-project/run-test/project/build/src/TestProject.scala b/src/test/resources/sbt-test-resources/compiler-project/run-test/project/build/src/TestProject.scala new file mode 100644 index 000000000..868aa0cff --- /dev/null +++ b/src/test/resources/sbt-test-resources/compiler-project/run-test/project/build/src/TestProject.scala @@ -0,0 +1,7 @@ +import sbt._ + +class TestProject(info: ProjectInfo) extends DefaultProject(info) +{ + override def useMavenConfigurations = true + val sc = "org.scala-tools.testing" % "scalacheck" % "1.5" % "test->default" +} diff --git a/src/test/resources/sbt-test-resources/compiler-project/run-test/src/main/scala/Foo.scala b/src/test/resources/sbt-test-resources/compiler-project/run-test/src/main/scala/Foo.scala new file mode 100644 index 000000000..871a9d9b7 --- /dev/null +++ b/src/test/resources/sbt-test-resources/compiler-project/run-test/src/main/scala/Foo.scala @@ -0,0 +1,27 @@ +package foo.bar + +class Holder { var value: Any = _ } + +import scala.tools.nsc.{Interpreter, Settings} + +class Foo { + val settings = new Settings() + settings.classpath.value = new java.io.File(classOf[Holder].getProtectionDomain.getCodeSource.getLocation.toURI).getAbsolutePath + val inter = new Interpreter(settings) + + def eval(code: String): Any = { + val h = new Holder + inter.bind("$r_", h.getClass.getName, h) + val r = inter.interpret("$r_.value = " + code) + h.value + } +} + +object Test +{ + def main(args: Array[String]) + { + val foo = new Foo + foo.eval("3") + } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/compiler-project/run-test/src/test/scala/FooTest.scala b/src/test/resources/sbt-test-resources/compiler-project/run-test/src/test/scala/FooTest.scala new file mode 100644 index 000000000..fb0601e2d --- /dev/null +++ b/src/test/resources/sbt-test-resources/compiler-project/run-test/src/test/scala/FooTest.scala @@ -0,0 +1,12 @@ +package foo.bar + +import org.scalacheck._ + +object FooTest extends Properties("Foo") +{ + specify("Set", (i: Int) => { try { + val foo = new Foo + foo.eval(i.toString) == i + } catch { case e => e.printStackTrace(); false } + }) +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/compiler-project/run-test/test b/src/test/resources/sbt-test-resources/compiler-project/run-test/test new file mode 100644 index 000000000..dd9d7d54a --- /dev/null +++ b/src/test/resources/sbt-test-resources/compiler-project/run-test/test @@ -0,0 +1,11 @@ +> update +[success] + +> run +[success] + +> clean +[success] + +> test +[success] diff --git a/src/test/resources/sbt-test-resources/dependency-management/exclude-transitive/project/build.properties b/src/test/resources/sbt-test-resources/dependency-management/exclude-transitive/project/build.properties new file mode 100644 index 000000000..e66951c84 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/exclude-transitive/project/build.properties @@ -0,0 +1,2 @@ +project.name=Exclude Transitive +project.version=1.0 diff --git a/src/test/resources/sbt-test-resources/dependency-management/exclude-transitive/project/build/src/TestProject.scala b/src/test/resources/sbt-test-resources/dependency-management/exclude-transitive/project/build/src/TestProject.scala new file mode 100644 index 000000000..cf2d2ef40 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/exclude-transitive/project/build/src/TestProject.scala @@ -0,0 +1,26 @@ +import sbt._ + +class TestProject(info: ProjectInfo) extends DefaultProject(info) +{ + def transitive(dep: ModuleID) = if("transitive".asFile.exists) dep else dep.intransitive() + val javaMail = transitive("javax.mail" % "mail" % "1.4.1") + + lazy val checkTransitive = task { check(true) } + lazy val checkIntransitive = task { check(false) } + + private def check(transitive: Boolean) = + { + val downloaded = compileClasspath.get + val jars = downloaded.size + if(transitive) + { + if(jars > 1) None + else Some("Transitive dependencies not downloaded") + } + else + { + if(jars == 1) None + else Some("Transitive dependencies downloaded (" + downloaded.mkString(", ") + ")") + } + } +} diff --git a/src/test/resources/sbt-test-resources/dependency-management/exclude-transitive/test b/src/test/resources/sbt-test-resources/dependency-management/exclude-transitive/test new file mode 100644 index 000000000..2a5c10a79 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/exclude-transitive/test @@ -0,0 +1,32 @@ +# load the project definition with transitive dependencies enabled +# and check that they are not downloaded + +$ touch transitive +[success] +$ reload +[success] + +> update +[success] + +> check-transitive +[success] +> check-intransitive +[failure] + + +# load the project definition with transitive dependencies disabled +# and check that they are not downloaded + +$ delete transitive +[success] +$ reload +[success] + +> update +[success] + +> check-transitive +[failure] +> check-intransitive +[success] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/inherit-repo/changes/CorrectProject.scala b/src/test/resources/sbt-test-resources/dependency-management/inherit-repo/changes/CorrectProject.scala new file mode 100644 index 000000000..6a247bcd2 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/inherit-repo/changes/CorrectProject.scala @@ -0,0 +1,19 @@ +import sbt._ + +class TestProject(info: ProjectInfo) extends ParentProject(info) +{ + val addRepo = "Extra Test Repository" at "http://dev.camptocamp.com/files/m2_repo/" + val sub = project("sub", "Sub Project", new SubProject(_)) + def ivyCacheDirectory = outputPath / "ivy-cache" + override def updateOptions = CacheDirectory(ivyCacheDirectory) :: super.updateOptions.toList + + class SubProject(info: ProjectInfo) extends DefaultProject(info) + { + def ivyCacheDirectory = outputPath / "ivy-cache" + override def updateOptions = CacheDirectory(ivyCacheDirectory) :: super.updateOptions.toList + override def ivyXML = + + + + } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/inherit-repo/changes/CorrectProject2.scala b/src/test/resources/sbt-test-resources/dependency-management/inherit-repo/changes/CorrectProject2.scala new file mode 100644 index 000000000..a51ad119d --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/inherit-repo/changes/CorrectProject2.scala @@ -0,0 +1,21 @@ +import sbt._ + +class TestProject(info: ProjectInfo) extends ParentProject(info) +{ + val addRepo = "Extra Test Repository" at "http://dev.camptocamp.com/files/m2_repo/" + val sub = project("sub", "Sub Project", new SubProject(_)) + def ivyCacheDirectory = outputPath / "ivy-cache" + override def updateOptions = CacheDirectory(ivyCacheDirectory) :: super.updateOptions.toList + + class SubProject(info: ProjectInfo) extends DefaultProject(info) + { + val addRepo = "Extra Test Repository" at "http://dev.camptocamp.com/files/m2_repo/" + + def ivyCacheDirectory = outputPath / "ivy-cache" + override def updateOptions = CacheDirectory(ivyCacheDirectory) :: super.updateOptions.toList + override def ivyXML = + + + + } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/inherit-repo/changes/CorrectProject3.scala b/src/test/resources/sbt-test-resources/dependency-management/inherit-repo/changes/CorrectProject3.scala new file mode 100644 index 000000000..a51ad119d --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/inherit-repo/changes/CorrectProject3.scala @@ -0,0 +1,21 @@ +import sbt._ + +class TestProject(info: ProjectInfo) extends ParentProject(info) +{ + val addRepo = "Extra Test Repository" at "http://dev.camptocamp.com/files/m2_repo/" + val sub = project("sub", "Sub Project", new SubProject(_)) + def ivyCacheDirectory = outputPath / "ivy-cache" + override def updateOptions = CacheDirectory(ivyCacheDirectory) :: super.updateOptions.toList + + class SubProject(info: ProjectInfo) extends DefaultProject(info) + { + val addRepo = "Extra Test Repository" at "http://dev.camptocamp.com/files/m2_repo/" + + def ivyCacheDirectory = outputPath / "ivy-cache" + override def updateOptions = CacheDirectory(ivyCacheDirectory) :: super.updateOptions.toList + override def ivyXML = + + + + } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/inherit-repo/project/build.properties b/src/test/resources/sbt-test-resources/dependency-management/inherit-repo/project/build.properties new file mode 100644 index 000000000..e82b3b326 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/inherit-repo/project/build.properties @@ -0,0 +1,5 @@ +#Project properties +#Wed Apr 29 17:43:40 EDT 2009 +project.organization=sbt +project.name=Repository Inheritance +project.version=1.0 diff --git a/src/test/resources/sbt-test-resources/dependency-management/inherit-repo/project/build/src/TestProject.scala b/src/test/resources/sbt-test-resources/dependency-management/inherit-repo/project/build/src/TestProject.scala new file mode 100644 index 000000000..a576d9609 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/inherit-repo/project/build/src/TestProject.scala @@ -0,0 +1,18 @@ +import sbt._ + +class TestProject(info: ProjectInfo) extends ParentProject(info) +{ + val sub = project("sub", "Sub Project", new SubProject(_)) + def ivyCacheDirectory = outputPath / "ivy-cache" + override def updateOptions = CacheDirectory(ivyCacheDirectory) :: super.updateOptions.toList + + class SubProject(info: ProjectInfo) extends DefaultProject(info) + { + def ivyCacheDirectory = outputPath / "ivy-cache" + override def updateOptions = CacheDirectory(ivyCacheDirectory) :: super.updateOptions.toList + override def ivyXML = + + + + } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/inherit-repo/test b/src/test/resources/sbt-test-resources/dependency-management/inherit-repo/test new file mode 100644 index 000000000..3c58ae287 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/inherit-repo/test @@ -0,0 +1,31 @@ +# This should fail because the sub project declares a dependency that exists in an extra repository, which we haven't declared +> update +[failure] + +# Copy the project definition with the extra repository declared in the parent and reload +$ copy-file changes/CorrectProject.scala project/build/src/TestProject.scala +[success] +$ reload +[success] + +# Try updating again, which should work because the repository declared in the parent should be inherited by the child +> update +[success] + +# Copy the project definition with the extra repository declared in the child and parent and reload +$ copy-file changes/CorrectProject2.scala project/build/src/TestProject.scala +[success] +$ reload +[success] + +> update +[success] + +# Copy the project definition with the extra repository declared in the child and reload +$ copy-file changes/CorrectProject3.scala project/build/src/TestProject.scala +[success] +$ reload +[success] + +> update +[success] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/inline-default/project/build.properties b/src/test/resources/sbt-test-resources/dependency-management/inline-default/project/build.properties new file mode 100644 index 000000000..7c89429d7 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/inline-default/project/build.properties @@ -0,0 +1,2 @@ +project.name=test +project.version=1.0.0 \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/inline-default/project/build/src/TestProject.scala b/src/test/resources/sbt-test-resources/dependency-management/inline-default/project/build/src/TestProject.scala new file mode 100644 index 000000000..36c4016f4 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/inline-default/project/build/src/TestProject.scala @@ -0,0 +1,20 @@ +import sbt._ + +class TestProject(info: ProjectInfo) extends DefaultProject(info) +{ + val httpclient = "org.apache.httpcomponents" % "httpclient" % "4.0-beta2" intransitive() + + override def useDefaultConfigurations = + if("useDefaultConfigurations".asFile.exists) true + else false + + lazy val checkDefault = task { check(Configurations.Default) } + lazy val checkCompile = task { check(Configurations.Compile) } + lazy val checkClasspath = task { if(compileClasspath.get.isEmpty) Some("Dependency in default configuration not added to classpath") else None } + + private def check(config: Configuration) = + if(configurationClasspath(config).get.isEmpty) + Some("Dependency in " + config.name + " configuration not downloaded") + else + None +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/inline-default/test b/src/test/resources/sbt-test-resources/dependency-management/inline-default/test new file mode 100644 index 000000000..025cf0969 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/inline-default/test @@ -0,0 +1,42 @@ +## run test with useDefaultConfigurations=false + +# Download jars. If successful, httpclient should be downloaded to the 'default' configuration +> update +[success] + +# The jar should exist in the 'default' configuration ... +> check-default +[success] +# but not in the 'compile' configuration ... +> check-compile +[failure] +# It should be present on the compile classpath +> check-classpath +[success] + +# reset test +> clean-lib +[success] +# Indicate to the project definition that we now want useDefaultConfigurations = true +$ touch useDefaultConfigurations +[success] +# Reload for change to take effect +$ reload +[success] + +## Rerun test with useDefaultConfigurations=true + +# Download jars. If successful, httpclient should be downloaded to the 'compile' configuration +> update +[success] + + +# The jar should not exist in the 'default' configuration ... +> check-default +[failure] +# It should exist in the 'compile' configuration +> check-compile +[success] +# It should be present on the compile classpath +> check-classpath +[success] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/inline-dependencies-a/project/build.properties b/src/test/resources/sbt-test-resources/dependency-management/inline-dependencies-a/project/build.properties new file mode 100644 index 000000000..2a3934bd8 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/inline-dependencies-a/project/build.properties @@ -0,0 +1,4 @@ +#Project properties +#Fri Jan 30 20:49:57 EST 2009 +project.name=Inline Dependency Test A +project.version=1.0 diff --git a/src/test/resources/sbt-test-resources/dependency-management/inline-dependencies-a/project/build/src/UpdateTestProject.scala b/src/test/resources/sbt-test-resources/dependency-management/inline-dependencies-a/project/build/src/UpdateTestProject.scala new file mode 100644 index 000000000..272ff3250 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/inline-dependencies-a/project/build/src/UpdateTestProject.scala @@ -0,0 +1,8 @@ +import sbt._ + +class UpdateTestProject(info: ProjectInfo) extends DefaultProject(info) +{ + val sc = "org.scalacheck" % "scalacheck" % "1.5" + def ivyCacheDirectory = outputPath / "ivy-cache" + override def updateOptions = CacheDirectory(ivyCacheDirectory) :: super.updateOptions.toList +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/inline-dependencies-a/test b/src/test/resources/sbt-test-resources/dependency-management/inline-dependencies-a/test new file mode 100644 index 000000000..2110da54b --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/inline-dependencies-a/test @@ -0,0 +1,5 @@ +> update +[success] + +$ exists lib_managed/compile/scalacheck-1.5.jar +[success] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/changes/scala-tools-ivysettings.xml b/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/changes/scala-tools-ivysettings.xml new file mode 100644 index 000000000..ea1584c39 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/changes/scala-tools-ivysettings.xml @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/changes/scalacheck-ivy.xml b/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/changes/scalacheck-ivy.xml new file mode 100644 index 000000000..522e78a48 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/changes/scalacheck-ivy.xml @@ -0,0 +1,7 @@ + + + + + + + \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/ivy.xml b/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/ivy.xml new file mode 100644 index 000000000..000bffbdd --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/ivy.xml @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/ivysettings.xml b/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/ivysettings.xml new file mode 100644 index 000000000..03b905979 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/ivysettings.xml @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/project/build.properties b/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/project/build.properties new file mode 100644 index 000000000..6f75f736f --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/project/build.properties @@ -0,0 +1,4 @@ +#Project properties +#Sun Feb 01 13:49:30 EST 2009 +project.name=Ivy Settings Test +project.version=1.0.0 diff --git a/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/test b/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/test new file mode 100644 index 000000000..04e8dd8c9 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-a/test @@ -0,0 +1,20 @@ +> update +[success] + +$ copy-file changes/scalacheck-ivy.xml ivy.xml +[success] + +> update +[failure] + +$ absent lib_managed/default/scalacheck-1.5.jar +[success] + +$ copy-file changes/scala-tools-ivysettings.xml ivysettings.xml +[success] + +> update +[success] + +$ exists lib_managed/default/scalacheck-1.5.jar +[success] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-b/changes/scala-tools-ivysettings.xml b/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-b/changes/scala-tools-ivysettings.xml new file mode 100644 index 000000000..ea1584c39 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-b/changes/scala-tools-ivysettings.xml @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-b/ivysettings.xml b/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-b/ivysettings.xml new file mode 100644 index 000000000..03b905979 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-b/ivysettings.xml @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-b/project/build.properties b/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-b/project/build.properties new file mode 100644 index 000000000..c7a658215 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-b/project/build.properties @@ -0,0 +1,4 @@ +#Project properties +#Sun Feb 01 15:33:35 EST 2009 +project.name=Ivy Settings Test B +project.version=1.0.1 diff --git a/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-b/project/build/src/UpdateTestProject.scala b/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-b/project/build/src/UpdateTestProject.scala new file mode 100644 index 000000000..0f45f4a59 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-b/project/build/src/UpdateTestProject.scala @@ -0,0 +1,6 @@ +import sbt._ + +class UpdateTestProject(info: ProjectInfo) extends DefaultProject(info) +{ + val sc = "org.scalacheck" % "scalacheck" % "1.5" +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-b/test b/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-b/test new file mode 100644 index 000000000..90953e12e --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/ivy-settings-b/test @@ -0,0 +1,14 @@ +> update +[error] + +$ absent lib_managed/compile/scalacheck-1.5.jar +[success] + +$ copy-file changes/scala-tools-ivysettings.xml ivysettings.xml +[success] + +> update +[success] + +$ exists lib_managed/compile/scalacheck-1.5.jar +[success] diff --git a/src/test/resources/sbt-test-resources/dependency-management/java.net/project/build.properties b/src/test/resources/sbt-test-resources/dependency-management/java.net/project/build.properties new file mode 100644 index 000000000..7c89429d7 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/java.net/project/build.properties @@ -0,0 +1,2 @@ +project.name=test +project.version=1.0.0 \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/java.net/project/build/src/TestProject.scala b/src/test/resources/sbt-test-resources/dependency-management/java.net/project/build/src/TestProject.scala new file mode 100644 index 000000000..c98c0e6b0 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/java.net/project/build/src/TestProject.scala @@ -0,0 +1,7 @@ +import sbt._ + +class TestProject(info: ProjectInfo) extends DefaultProject(info) +{ + val javaNet = JavaNet1Repository + val ejb = "javax.ejb" % "ejb-api" % "3.0" +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/java.net/test b/src/test/resources/sbt-test-resources/dependency-management/java.net/test new file mode 100644 index 000000000..93397d07c --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/java.net/test @@ -0,0 +1,2 @@ +> update +[success] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/provided/project/build.properties b/src/test/resources/sbt-test-resources/dependency-management/provided/project/build.properties new file mode 100644 index 000000000..59a846bd8 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/provided/project/build.properties @@ -0,0 +1,2 @@ +project.version=2.0 +project.name=Test \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/provided/project/build/src/TestProject.scala b/src/test/resources/sbt-test-resources/dependency-management/provided/project/build/src/TestProject.scala new file mode 100644 index 000000000..244050208 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/provided/project/build/src/TestProject.scala @@ -0,0 +1,32 @@ +import sbt._ + +class TestProject(info: ProjectInfo) extends DefaultWebProject(info) +{ + override def useMavenConfigurations = true + private val provided = "useProvided".asFile.exists + private val configuration = if(provided) Configurations.Provided else Configurations.Compile + val j = "javax.servlet" % "servlet-api" % "2.5" % (configuration.name + "->default") + + lazy val checkPublic = check(publicClasspath, !provided) + lazy val checkRun = check(runClasspath, true) + lazy val checkCompile = check(compileClasspath, true) + lazy val checkProvided = check(fullClasspath(Configurations.Provided), provided) + + private def check(classpath: PathFinder, shouldBeIncluded: Boolean) = + task { checkServletAPI(shouldBeIncluded, "classpath")(classpath.get) } + + lazy val checkWar = task { Control.thread(FileUtilities.unzip(warPath, outputPath / "exploded", log))(checkServletAPI(!provided, "war")) } + private def checkServletAPI(shouldBeIncluded: Boolean, label: String)(paths: Iterable[Path]) = + { + val servletAPI = paths.find(_.asFile.getName.contains("servlet-api")) + if(shouldBeIncluded) + { + if(servletAPI.isEmpty) + Some("Servlet API should have been included in " + label + ".") + else + None + } + else + servletAPI.map(_ + " incorrectly included in " + label + ".") + } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/provided/test b/src/test/resources/sbt-test-resources/dependency-management/provided/test new file mode 100644 index 000000000..23adf7948 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/provided/test @@ -0,0 +1,51 @@ +# verify that the classpaths are correct for when a dependency is in the provided configuration +$ touch useProvided +[success] +$ reload +[success] + +> update +[success] + +> check-run +[success] +> check-compile +[success] +> check-provided +[success] +> check-public +[success] + +> package +[success] + +> check-war +[success] + +# verify that the classpaths are correct for when a dependency is in the compile configuration +$ delete useProvided +[success] +$ reload +[success] + +> update +[success] + +> check-run +[success] +> check-compile +[success] +> check-provided +[success] +> check-public +[success] + +# prepare-webapp is last modified based, so we need to force it to do work +$ delete target +[success] + +> package +[success] + +> check-war +[success] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/url/project/build.properties b/src/test/resources/sbt-test-resources/dependency-management/url/project/build.properties new file mode 100644 index 000000000..cefcdcae9 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/url/project/build.properties @@ -0,0 +1,2 @@ +project.name=Test +project.version=1.0 diff --git a/src/test/resources/sbt-test-resources/dependency-management/url/project/build/src/TestProject.scala b/src/test/resources/sbt-test-resources/dependency-management/url/project/build/src/TestProject.scala new file mode 100644 index 000000000..4b4205da8 --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/url/project/build/src/TestProject.scala @@ -0,0 +1,25 @@ +import sbt._ + +import java.net.{URL, URLClassLoader} + +class TestProject(info: ProjectInfo) extends DefaultProject(info) +{ + override def useMavenConfigurations = true + val direct = "slinky" % "slinky" % "2.1" % "test->default" from "http://slinky2.googlecode.com/svn/artifacts/2.1/slinky.jar" + lazy val checkInTest = checkClasspath(testClasspath) + lazy val checkInCompile = checkClasspath(compileClasspath) + private def checkClasspath(cp: PathFinder) = + task + { + try + { + Class.forName("slinky.http.Application", false, new URLClassLoader(cp.get.map(_.asURL).toList.toArray)) + None + } + catch + { + case _: ClassNotFoundException => + Some("Dependency not downloaded.") + } + } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/dependency-management/url/test b/src/test/resources/sbt-test-resources/dependency-management/url/test new file mode 100644 index 000000000..20481d07b --- /dev/null +++ b/src/test/resources/sbt-test-resources/dependency-management/url/test @@ -0,0 +1,12 @@ +> check-in-test +[failure] +> check-in-compile +[failure] + +> update +[success] + +> check-in-test +[success] +> check-in-compile +[failure] diff --git a/src/test/resources/sbt-test-resources/java/analysis/project/build.properties b/src/test/resources/sbt-test-resources/java/analysis/project/build.properties new file mode 100644 index 000000000..274d96c88 --- /dev/null +++ b/src/test/resources/sbt-test-resources/java/analysis/project/build.properties @@ -0,0 +1,5 @@ +#Project properties +#Sat Apr 18 15:26:14 EDT 2009 +project.organization=empty +project.name=Java Dependency Analysis +project.version=1.0 diff --git a/src/test/resources/sbt-test-resources/java/analysis/src/main/java/test/R.java b/src/test/resources/sbt-test-resources/java/analysis/src/main/java/test/R.java new file mode 100644 index 000000000..df1d987e7 --- /dev/null +++ b/src/test/resources/sbt-test-resources/java/analysis/src/main/java/test/R.java @@ -0,0 +1,16 @@ +package test; + +public final class R { + public static final class attr { + } + public static final class drawable { + public static final int icon=0x7f020000; + } + public static final class layout { + public static final int main=0x7f030000; + } + public static final class string { + public static final int app_name=0x7f040001; + public static final int hello=0x7f040000; + } +} diff --git a/src/test/resources/sbt-test-resources/java/analysis/test b/src/test/resources/sbt-test-resources/java/analysis/test new file mode 100644 index 000000000..0e5cfa345 --- /dev/null +++ b/src/test/resources/sbt-test-resources/java/analysis/test @@ -0,0 +1,2 @@ +> compile +[success] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/java/basic/project/build.properties b/src/test/resources/sbt-test-resources/java/basic/project/build.properties new file mode 100644 index 000000000..3a234a13f --- /dev/null +++ b/src/test/resources/sbt-test-resources/java/basic/project/build.properties @@ -0,0 +1,5 @@ +#Project properties +#Sat Apr 18 15:22:08 EDT 2009 +project.organization=empty +project.name=Java Test +project.version=1.0 diff --git a/src/test/resources/sbt-test-resources/java/basic/src/main/java/test/R.java b/src/test/resources/sbt-test-resources/java/basic/src/main/java/test/R.java new file mode 100644 index 000000000..aef73327d --- /dev/null +++ b/src/test/resources/sbt-test-resources/java/basic/src/main/java/test/R.java @@ -0,0 +1,6 @@ +package test; + +public final class R { + public static final int y = 4; + public static int x = (new stest.S()).y(); +} diff --git a/src/test/resources/sbt-test-resources/java/basic/src/main/scala/S.scala b/src/test/resources/sbt-test-resources/java/basic/src/main/scala/S.scala new file mode 100644 index 000000000..f9b30c89e --- /dev/null +++ b/src/test/resources/sbt-test-resources/java/basic/src/main/scala/S.scala @@ -0,0 +1,7 @@ +package stest + +class S +{ + val x = test.R.y + val y = 5 +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/java/basic/test b/src/test/resources/sbt-test-resources/java/basic/test new file mode 100644 index 000000000..0e5cfa345 --- /dev/null +++ b/src/test/resources/sbt-test-resources/java/basic/test @@ -0,0 +1,2 @@ +> compile +[success] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/java/options/project/build.properties b/src/test/resources/sbt-test-resources/java/options/project/build.properties new file mode 100644 index 000000000..3a234a13f --- /dev/null +++ b/src/test/resources/sbt-test-resources/java/options/project/build.properties @@ -0,0 +1,5 @@ +#Project properties +#Sat Apr 18 15:22:08 EDT 2009 +project.organization=empty +project.name=Java Test +project.version=1.0 diff --git a/src/test/resources/sbt-test-resources/java/options/project/build/src/JavaProject.scala b/src/test/resources/sbt-test-resources/java/options/project/build/src/JavaProject.scala new file mode 100644 index 000000000..99bdd529e --- /dev/null +++ b/src/test/resources/sbt-test-resources/java/options/project/build/src/JavaProject.scala @@ -0,0 +1,8 @@ +import sbt._ + +// verify that javaCompileOptions are used +class JavaProject(info: ProjectInfo) extends DefaultProject(info) +{ + // make the source target 1.4 so that we get an error when these options are used + override def javaCompileOptions = ("-source" :: "1.4" :: Nil).map(JavaCompileOption(_)) +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/java/options/src/main/java/test/R.java b/src/test/resources/sbt-test-resources/java/options/src/main/java/test/R.java new file mode 100644 index 000000000..3fda19f3b --- /dev/null +++ b/src/test/resources/sbt-test-resources/java/options/src/main/java/test/R.java @@ -0,0 +1,9 @@ +package test; + +import java.util.ArrayList; +import java.util.List; + +public final class R { + public static final int y = 4; + private static List z = new ArrayList(); +} diff --git a/src/test/resources/sbt-test-resources/java/options/src/main/scala/S.scala b/src/test/resources/sbt-test-resources/java/options/src/main/scala/S.scala new file mode 100644 index 000000000..f9b30c89e --- /dev/null +++ b/src/test/resources/sbt-test-resources/java/options/src/main/scala/S.scala @@ -0,0 +1,7 @@ +package stest + +class S +{ + val x = test.R.y + val y = 5 +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/java/options/test b/src/test/resources/sbt-test-resources/java/options/test new file mode 100644 index 000000000..a10e99135 --- /dev/null +++ b/src/test/resources/sbt-test-resources/java/options/test @@ -0,0 +1,2 @@ +> compile +[failure] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/package/lazy-name/project/build.properties b/src/test/resources/sbt-test-resources/package/lazy-name/project/build.properties new file mode 100755 index 000000000..8bf006b1e --- /dev/null +++ b/src/test/resources/sbt-test-resources/package/lazy-name/project/build.properties @@ -0,0 +1,4 @@ +#Project properties +#Tue Feb 03 14:28:27 EST 2009 +project.name=Lazy Package Name +project.version=0.1.1 diff --git a/src/test/resources/sbt-test-resources/package/lazy-name/test b/src/test/resources/sbt-test-resources/package/lazy-name/test new file mode 100755 index 000000000..3b4b67352 --- /dev/null +++ b/src/test/resources/sbt-test-resources/package/lazy-name/test @@ -0,0 +1,26 @@ +> package +[success] + +$ exists "target/lazy-package-name-0.1.1.jar" +[success] + +> clean +[success] + +> increment-version +[success] + +> package +[success] + +$ exists "target/lazy-package-name-0.1.2.jar" +[success] + +> increment-version +[success] + +> package +[success] + +$ exists "target/lazy-package-name-0.1.3.jar" +[success] diff --git a/src/test/resources/sbt-test-resources/package/manifest/project/build.properties b/src/test/resources/sbt-test-resources/package/manifest/project/build.properties new file mode 100644 index 000000000..e273be069 --- /dev/null +++ b/src/test/resources/sbt-test-resources/package/manifest/project/build.properties @@ -0,0 +1,4 @@ +#Project properties +#Mon Feb 02 20:49:59 EST 2009 +project.name=Jar Manifest Test +project.version=0.2 diff --git a/src/test/resources/sbt-test-resources/package/manifest/project/build/src/ManifestTestProject.scala b/src/test/resources/sbt-test-resources/package/manifest/project/build/src/ManifestTestProject.scala new file mode 100644 index 000000000..3cec27fde --- /dev/null +++ b/src/test/resources/sbt-test-resources/package/manifest/project/build/src/ManifestTestProject.scala @@ -0,0 +1,16 @@ +import sbt._ + +class ManifestTestProject(info: ProjectInfo) extends DefaultProject(info) +{ + val scalaHome = system[String]("scala.home") + override def mainClass = Some("jartest.Main") + def manifestExtra = + { + import java.util.jar._ + val mf = new Manifest + for(scalaH <- scalaHome.get) + mf.getMainAttributes.put(Attributes.Name.CLASS_PATH, scalaH + "/lib/scala-library.jar") + mf + } + override def packageOptions = JarManifest(manifestExtra) :: super.packageOptions.toList +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/package/manifest/src/main/scala/jartest/Main.scala b/src/test/resources/sbt-test-resources/package/manifest/src/main/scala/jartest/Main.scala new file mode 100644 index 000000000..be96a910f --- /dev/null +++ b/src/test/resources/sbt-test-resources/package/manifest/src/main/scala/jartest/Main.scala @@ -0,0 +1,6 @@ +package jartest + +object Main +{ + def main(args: Array[String]) {} +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/package/manifest/test b/src/test/resources/sbt-test-resources/package/manifest/test new file mode 100644 index 000000000..61331f4fa --- /dev/null +++ b/src/test/resources/sbt-test-resources/package/manifest/test @@ -0,0 +1,11 @@ +> package +[success] + +$ "exists" "./target/jar-manifest-test-0.2.jar" +[success] + +$ exec java -jar "./target/jar-manifest-test-0.2.jar" +[success] + +> run +[success] diff --git a/src/test/resources/sbt-test-resources/package/resources/project/build.properties b/src/test/resources/sbt-test-resources/package/resources/project/build.properties new file mode 100644 index 000000000..2c43d1a10 --- /dev/null +++ b/src/test/resources/sbt-test-resources/package/resources/project/build.properties @@ -0,0 +1,4 @@ +#Project properties +#Mon Feb 02 20:49:59 EST 2009 +project.name=Main Resources Test +project.version=0.1 diff --git a/src/test/resources/sbt-test-resources/package/resources/project/build/src/ManifestTestProject.scala b/src/test/resources/sbt-test-resources/package/resources/project/build/src/ManifestTestProject.scala new file mode 100644 index 000000000..3cec27fde --- /dev/null +++ b/src/test/resources/sbt-test-resources/package/resources/project/build/src/ManifestTestProject.scala @@ -0,0 +1,16 @@ +import sbt._ + +class ManifestTestProject(info: ProjectInfo) extends DefaultProject(info) +{ + val scalaHome = system[String]("scala.home") + override def mainClass = Some("jartest.Main") + def manifestExtra = + { + import java.util.jar._ + val mf = new Manifest + for(scalaH <- scalaHome.get) + mf.getMainAttributes.put(Attributes.Name.CLASS_PATH, scalaH + "/lib/scala-library.jar") + mf + } + override def packageOptions = JarManifest(manifestExtra) :: super.packageOptions.toList +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/package/resources/src/main/resources/main_resource_test b/src/test/resources/sbt-test-resources/package/resources/src/main/resources/main_resource_test new file mode 100644 index 000000000..b08a24406 --- /dev/null +++ b/src/test/resources/sbt-test-resources/package/resources/src/main/resources/main_resource_test @@ -0,0 +1 @@ +This is a resource to test that sbt includes main resources in the packaged jar. \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/package/resources/src/main/scala/jartest/Main.scala b/src/test/resources/sbt-test-resources/package/resources/src/main/scala/jartest/Main.scala new file mode 100644 index 000000000..1998ccc6d --- /dev/null +++ b/src/test/resources/sbt-test-resources/package/resources/src/main/scala/jartest/Main.scala @@ -0,0 +1,10 @@ +package jartest + +object Main +{ + def main(args: Array[String]) + { + if(getClass.getResource("main_resource_test") == null) + System.exit(1) + } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/package/resources/test b/src/test/resources/sbt-test-resources/package/resources/test new file mode 100644 index 000000000..3e4d841cb --- /dev/null +++ b/src/test/resources/sbt-test-resources/package/resources/test @@ -0,0 +1,36 @@ +#This test verifies two things: +# 1) That sbt properly puts resources from src/main/resources on the runtime classpath +# 2) That sbt properly packages resources from src/main/resources into the jar + +# This should fail because the Main object is in package jartest and the resource is directly +# in src/main/resources +> run +[failure] + +> package +[success] + +# This should fail because sbt should include the resource in the jar but it won't have the right +# directory structure +$ exec java -jar "./target/main-resources-test-0.1.jar" +[failure] + +# Give the resource the right directory structure +$ mkdir src/main/resources/jartest [success] +$ copy-file src/main/resources/main_resource_test src/main/resources/jartest/main_resource_test [success] +$ delete src/main/resources/main_resource_test [success] + +# This should succeed because sbt should put the resource on the runClasspath +> run +[success] + +# This is necessary because package bases whether or not to run on last modified times, which don't have +# high enough resolution to notice the above move of main_resource_test +> clean [success] + +> package +[success] + +# This should succeed because sbt should include the resource in the jar with the right directory structure +$ exec java -jar "./target/main-resources-test-0.1.jar" +[success] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/project/Class.forName/changes/LibTestProject.scala b/src/test/resources/sbt-test-resources/project/Class.forName/changes/LibTestProject.scala new file mode 100644 index 000000000..121545671 --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/Class.forName/changes/LibTestProject.scala @@ -0,0 +1,6 @@ +import sbt._ + +class LibTestProject(info: ProjectInfo) extends DefaultProject(info) +{ + lazy val useJar = task { injar.Test.other; None } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/project/Class.forName/project/build.properties b/src/test/resources/sbt-test-resources/project/Class.forName/project/build.properties new file mode 100644 index 000000000..aa5b7e794 --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/Class.forName/project/build.properties @@ -0,0 +1,5 @@ +#Project properties +#Mon Mar 23 16:23:49 EDT 2009 +project.organization=empty +project.name=definition-lib-forname-test +project.version=1.0 diff --git a/src/test/resources/sbt-test-resources/project/Class.forName/src/main/scala/Test.scala b/src/test/resources/sbt-test-resources/project/Class.forName/src/main/scala/Test.scala new file mode 100644 index 000000000..6e1105cd7 --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/Class.forName/src/main/scala/Test.scala @@ -0,0 +1,8 @@ +package injar + +object Test +{ + def other = Class.forName("injar.OtherTest") +} + +class OtherTest \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/project/Class.forName/test b/src/test/resources/sbt-test-resources/project/Class.forName/test new file mode 100644 index 000000000..00c356c76 --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/Class.forName/test @@ -0,0 +1,19 @@ +# this step builds a test jar for use by the project definition +> package +[success] + +$ copy-file target/definition-lib-forname-test-1.0.jar project/build/lib/test.jar +[success] + +$ copy-file changes/LibTestProject.scala project/build/src/LibTestProject.scala +[success] + +# the copied project definition depends on the Test module in test.jar and will +# fail to compile if sbt did not put the jars in project/build/lib/ on the compile classpath +$ reload +[success] + +# The project definition uses the class in test.jar and will fail here if sbt did not put the +# jars in project/build/lib on the runtime classpath +> use-jar +[success] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/project/flatten/project/build.properties b/src/test/resources/sbt-test-resources/project/flatten/project/build.properties new file mode 100644 index 000000000..6ad62f9f3 --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/flatten/project/build.properties @@ -0,0 +1,5 @@ +#Project properties +#Fri May 15 12:14:00 EDT 2009 +project.organization=test +project.name=Flatten Source Hierarchy +project.version=1.0 diff --git a/src/test/resources/sbt-test-resources/project/flatten/project/build/src/FlatProject.scala b/src/test/resources/sbt-test-resources/project/flatten/project/build/src/FlatProject.scala new file mode 100644 index 000000000..abafb603e --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/flatten/project/build/src/FlatProject.scala @@ -0,0 +1,23 @@ +import sbt._ + +class FlatProject(info: ProjectInfo) extends DefaultProject(info) +{ + override def useMavenConfigurations = true + val sc = "org.scalacheck" % "scalacheck" % "1.5" % "test->default" + + def sourceFilter = "*.java" | "*.scala" + override def mainSources = descendents(sourcePath ##, sourceFilter) + override def mainResources = descendents(sourcePath ##, -sourceFilter) + + override def testSourcePath = "test-src" + override def testSources = descendents(testSourcePath ##, sourceFilter) + override def testResources = descendents(testSourcePath ##, -sourceFilter) + + lazy val unpackageProject = + task + { + FileUtilities.unzip(outputPath / (artifactBaseName + "-project.zip"), info.projectPath, "src/*", log).left.toOption + } dependsOn(cleanSrc) + + lazy val cleanSrc = cleanTask(sourcePath +++ testSourcePath) +} diff --git a/src/test/resources/sbt-test-resources/project/flatten/src/JavaA.java b/src/test/resources/sbt-test-resources/project/flatten/src/JavaA.java new file mode 100644 index 000000000..4b0a4410c --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/flatten/src/JavaA.java @@ -0,0 +1,4 @@ +public class JavaA +{ + public int inc(int i) { return i+1; } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/project/flatten/src/ScalaA.scala b/src/test/resources/sbt-test-resources/project/flatten/src/ScalaA.scala new file mode 100644 index 000000000..bf212bddb --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/flatten/src/ScalaA.scala @@ -0,0 +1,6 @@ +package a.b + +class ScalaA +{ + def increment(i: Int) = i + 1 +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/project/flatten/src/a/JavaB.java b/src/test/resources/sbt-test-resources/project/flatten/src/a/JavaB.java new file mode 100644 index 000000000..988ed4701 --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/flatten/src/a/JavaB.java @@ -0,0 +1,6 @@ +package a; + +public class JavaB +{ + public int dec(int i) { return i-1; } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/project/flatten/src/a/ScalaB.scala b/src/test/resources/sbt-test-resources/project/flatten/src/a/ScalaB.scala new file mode 100644 index 000000000..590ee17bb --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/flatten/src/a/ScalaB.scala @@ -0,0 +1,16 @@ +package b + +class ScalaB +{ + def decrement(i: Int) = i - 1 +} +object ScalaC +{ + def loadResources() + { + resource("/main-resource") + resource("main-resource-a") + resource("/a/main-resource-a") + } + def resource(s: String) = assert(getClass.getResource(s) != null, "Could not find resource '" + s + "'") +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/project/flatten/src/a/test-resource-a b/src/test/resources/sbt-test-resources/project/flatten/src/a/test-resource-a new file mode 100644 index 000000000..9754de6ae --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/flatten/src/a/test-resource-a @@ -0,0 +1 @@ +This is a test resource. \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/project/flatten/src/test-resource b/src/test/resources/sbt-test-resources/project/flatten/src/test-resource new file mode 100644 index 000000000..9754de6ae --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/flatten/src/test-resource @@ -0,0 +1 @@ +This is a test resource. \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/project/flatten/test b/src/test/resources/sbt-test-resources/project/flatten/test new file mode 100644 index 000000000..0b85c1c16 --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/flatten/test @@ -0,0 +1,25 @@ +# This test verifies that sbt works after the source hierarchy has been flattened and merged +# so that resources and Java and Scala sources are side by side under src/ + +> update +[success] + +> test +[success] + +# This part verifies that the package-src action works properly under a flattened/merged source hierarchy + +> package-project +[success] + +$ delete src +[success] + +> test +[failure] + +> unpackage-project +[success] + +> test +[success] diff --git a/src/test/resources/sbt-test-resources/project/flatten/test-src/SimpleTest.scala b/src/test/resources/sbt-test-resources/project/flatten/test-src/SimpleTest.scala new file mode 100644 index 000000000..54fa5f2de --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/flatten/test-src/SimpleTest.scala @@ -0,0 +1,10 @@ +import org.scalacheck._ + +class SimpleTest extends Properties("Simple") +{ + specify("increment scala", (i: Int) => (new a.b.ScalaA).increment(i) == i+1) + specify("increment java", (i: Int) => (new JavaA).inc(i) == i+1) + + specify("decrement scala", (i: Int) => (new b.ScalaB).decrement(i) == i+1) + specify("decrement java", (i: Int) => (new a.JavaB).dec(i) == i+1) +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/project/flatten/test-src/c/ResourcesTest.scala b/src/test/resources/sbt-test-resources/project/flatten/test-src/c/ResourcesTest.scala new file mode 100644 index 000000000..7f7d60034 --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/flatten/test-src/c/ResourcesTest.scala @@ -0,0 +1,19 @@ +package d + +import org.scalacheck._ + +class ResourcesTest extends Properties("Resources") +{ + specify("load main resources ok", (a: Boolean) => { b.ScalaC.loadResources(); true }) + specify("load test resources ok", (a: Boolean) => { ScalaD.loadResources(); true }) +} +object ScalaD +{ + def loadResources() + { + resource("/test-resource") + resource("test-resource-c") + resource("/c/test-resource-c") + } + def resource(s: String) = assert(getClass.getResource(s) != null, "Could not find resource '" + s + "'") +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/project/flatten/test-src/c/test-resource-c b/src/test/resources/sbt-test-resources/project/flatten/test-src/c/test-resource-c new file mode 100644 index 000000000..9754de6ae --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/flatten/test-src/c/test-resource-c @@ -0,0 +1 @@ +This is a test resource. \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/project/flatten/test-src/test-resource b/src/test/resources/sbt-test-resources/project/flatten/test-src/test-resource new file mode 100644 index 000000000..9754de6ae --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/flatten/test-src/test-resource @@ -0,0 +1 @@ +This is a test resource. \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/project/lib/changes/LibTestProject.scala b/src/test/resources/sbt-test-resources/project/lib/changes/LibTestProject.scala new file mode 100644 index 000000000..c586969ae --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/lib/changes/LibTestProject.scala @@ -0,0 +1,6 @@ +import sbt._ + +class LibTestProject(info: ProjectInfo) extends DefaultProject(info) +{ + lazy val useJar = task { injar.Test.foo } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/project/lib/project/build.properties b/src/test/resources/sbt-test-resources/project/lib/project/build.properties new file mode 100644 index 000000000..237e3120c --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/lib/project/build.properties @@ -0,0 +1,5 @@ +#Project properties +#Mon Mar 23 16:23:49 EDT 2009 +project.organization=empty +project.name=definition-lib-test +project.version=1.0 diff --git a/src/test/resources/sbt-test-resources/project/lib/src/main/scala/Test.scala b/src/test/resources/sbt-test-resources/project/lib/src/main/scala/Test.scala new file mode 100644 index 000000000..bfa832aec --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/lib/src/main/scala/Test.scala @@ -0,0 +1,6 @@ +package injar + +object Test +{ + def foo: Option[String] = None +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/project/lib/test b/src/test/resources/sbt-test-resources/project/lib/test new file mode 100644 index 000000000..a6870d084 --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/lib/test @@ -0,0 +1,19 @@ +# this step builds a test jar for use by the project definition +> package +[success] + +$ copy-file target/definition-lib-test-1.0.jar project/build/lib/test.jar +[success] + +$ copy-file changes/LibTestProject.scala project/build/src/LibTestProject.scala +[success] + +# the copied project definition depends on the Test module in test.jar and will +# fail to compile if sbt did not put the jars in project/build/lib/ on the compile classpath +$ reload +[success] + +# The project definition uses the class in test.jar and will fail here if sbt did not put the +# jars in project/build/lib on the runtime classpath +> use-jar +[success] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/project/multi/changes/MultiProject.scala b/src/test/resources/sbt-test-resources/project/multi/changes/MultiProject.scala new file mode 100644 index 000000000..1fbb3af93 --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/multi/changes/MultiProject.scala @@ -0,0 +1,6 @@ +package test + +import sbt._ + +class TestProject(info: ProjectInfo) extends DefaultProject(info) +class AnotherProject(info: ProjectInfo) extends DefaultProject(info) \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/project/multi/changes/SingleAndTraitProject.scala b/src/test/resources/sbt-test-resources/project/multi/changes/SingleAndTraitProject.scala new file mode 100644 index 000000000..46c2904cb --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/multi/changes/SingleAndTraitProject.scala @@ -0,0 +1,9 @@ +package test + +import sbt._ + +class TestProject(info: ProjectInfo) extends DefaultProject(info) + +trait NotAProject extends Project +abstract class AnotherNonProject extends Project +object YetAnotherNonProject extends DefaultProject(ProjectInfo(new java.io.File("."), Nil, None)) \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/project/multi/changes/SingleProject.scala b/src/test/resources/sbt-test-resources/project/multi/changes/SingleProject.scala new file mode 100644 index 000000000..f5c85946a --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/multi/changes/SingleProject.scala @@ -0,0 +1,5 @@ +package test + +import sbt._ + +class TestProject(info: ProjectInfo) extends DefaultProject(info) \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/project/multi/changes/SinglePublicProject.scala b/src/test/resources/sbt-test-resources/project/multi/changes/SinglePublicProject.scala new file mode 100644 index 000000000..9ea008c42 --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/multi/changes/SinglePublicProject.scala @@ -0,0 +1,7 @@ +package test + +import sbt._ + +class TestProject(info: ProjectInfo) extends DefaultProject(info) +protected class NotMainProject(info: ProjectInfo) extends DefaultProject(info) +private class AnotherNotMainProject(info: ProjectInfo) extends DefaultProject(info) \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/project/multi/project/build.properties b/src/test/resources/sbt-test-resources/project/multi/project/build.properties new file mode 100644 index 000000000..2b4d7e5a1 --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/multi/project/build.properties @@ -0,0 +1,2 @@ +project.name=Test +project.version=1.0 \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/project/multi/test b/src/test/resources/sbt-test-resources/project/multi/test new file mode 100644 index 000000000..317a22840 --- /dev/null +++ b/src/test/resources/sbt-test-resources/project/multi/test @@ -0,0 +1,31 @@ +# There should be no ambiguity with a single project definition + +$ copy-file changes/SingleProject.scala project/build/src/TestProject.scala +[success] + +$ reload +[success] + +# Again, no ambiguity with a single project definition and any number of abstract classes/traits implementing Project + +$ copy-file changes/SingleAndTraitProject.scala project/build/src/TestProject.scala +[success] + +$ reload +[success] + +# Multiple public projects should be an error + +$ copy-file changes/MultiProject.scala project/build/src/TestProject.scala +[success] + +$ reload +[failure] + +# One public project and any number of non-public projects should not be an error + +$ copy-file changes/SinglePublicProject.scala project/build/src/TestProject.scala +[success] + +$ reload +[success] diff --git a/src/test/resources/sbt-test-resources/properties/multi-project-a/project/build.properties b/src/test/resources/sbt-test-resources/properties/multi-project-a/project/build.properties new file mode 100644 index 000000000..f74f0984a --- /dev/null +++ b/src/test/resources/sbt-test-resources/properties/multi-project-a/project/build.properties @@ -0,0 +1,4 @@ +#Project properties +#Tue Feb 17 21:42:42 EST 2009 +project.name=Properties Subproject Test +project.version=1.0 diff --git a/src/test/resources/sbt-test-resources/properties/multi-project-a/project/build/src/TestProject.scala b/src/test/resources/sbt-test-resources/properties/multi-project-a/project/build/src/TestProject.scala new file mode 100644 index 000000000..263e8c573 --- /dev/null +++ b/src/test/resources/sbt-test-resources/properties/multi-project-a/project/build/src/TestProject.scala @@ -0,0 +1,7 @@ +import sbt._ + +class TestProject(info: ProjectInfo) extends ParentProject(info) +{ + val a = project("a", "Sub project A") + val b = project("b", "Sub project B") +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/properties/multi-project-a/test b/src/test/resources/sbt-test-resources/properties/multi-project-a/test new file mode 100644 index 000000000..b21259178 --- /dev/null +++ b/src/test/resources/sbt-test-resources/properties/multi-project-a/test @@ -0,0 +1,5 @@ +$ absent a/project +[success] + +$ absent b/project +[success] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/properties/multi-project-b/project/build.properties b/src/test/resources/sbt-test-resources/properties/multi-project-b/project/build.properties new file mode 100644 index 000000000..f74f0984a --- /dev/null +++ b/src/test/resources/sbt-test-resources/properties/multi-project-b/project/build.properties @@ -0,0 +1,4 @@ +#Project properties +#Tue Feb 17 21:42:42 EST 2009 +project.name=Properties Subproject Test +project.version=1.0 diff --git a/src/test/resources/sbt-test-resources/properties/multi-project-b/project/build/src/TestProject.scala b/src/test/resources/sbt-test-resources/properties/multi-project-b/project/build/src/TestProject.scala new file mode 100644 index 000000000..b16b32c25 --- /dev/null +++ b/src/test/resources/sbt-test-resources/properties/multi-project-b/project/build/src/TestProject.scala @@ -0,0 +1,11 @@ +import sbt._ + +class TestProject(info: ProjectInfo) extends ParentProject(info) +{ + val a = subproject("a", "Sub project A") + val b = subproject("b", "Sub project B") + + private def subproject(path: Path, name: String) = project(path, name, new TestSubProject(_)) + + class TestSubProject(info: ProjectInfo) extends DefaultProject(info) +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/properties/multi-project-b/test b/src/test/resources/sbt-test-resources/properties/multi-project-b/test new file mode 100644 index 000000000..b21259178 --- /dev/null +++ b/src/test/resources/sbt-test-resources/properties/multi-project-b/test @@ -0,0 +1,5 @@ +$ absent a/project +[success] + +$ absent b/project +[success] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/run/daemon-exit/project/build.properties b/src/test/resources/sbt-test-resources/run/daemon-exit/project/build.properties new file mode 100644 index 000000000..825710b2b --- /dev/null +++ b/src/test/resources/sbt-test-resources/run/daemon-exit/project/build.properties @@ -0,0 +1,3 @@ +project.version=1.0 +project.name=Daemon Thread Test +project.organization=sbt diff --git a/src/test/resources/sbt-test-resources/run/daemon-exit/src/main/scala/Daemon.scala b/src/test/resources/sbt-test-resources/run/daemon-exit/src/main/scala/Daemon.scala new file mode 100644 index 000000000..a56d89708 --- /dev/null +++ b/src/test/resources/sbt-test-resources/run/daemon-exit/src/main/scala/Daemon.scala @@ -0,0 +1,23 @@ +// This test verifies that System.exit from a daemon thread works properly + +object DaemonExit +{ + def main(args: Array[String]) + { + val t = new Thread { + override def run() { + Thread.sleep(1000) + System.exit(0) + } + } + // t.setDaemon(true) + t.start() + + val t2 = new Thread { + override def run() { + synchronized { wait() } + } + } + t2.start() + } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/run/daemon-exit/test b/src/test/resources/sbt-test-resources/run/daemon-exit/test new file mode 100644 index 000000000..57bd0350e --- /dev/null +++ b/src/test/resources/sbt-test-resources/run/daemon-exit/test @@ -0,0 +1,2 @@ +> run +[success] diff --git a/src/test/resources/sbt-test-resources/run/daemon/project/build.properties b/src/test/resources/sbt-test-resources/run/daemon/project/build.properties new file mode 100644 index 000000000..825710b2b --- /dev/null +++ b/src/test/resources/sbt-test-resources/run/daemon/project/build.properties @@ -0,0 +1,3 @@ +project.version=1.0 +project.name=Daemon Thread Test +project.organization=sbt diff --git a/src/test/resources/sbt-test-resources/run/daemon/src/main/scala/Daemon.scala b/src/test/resources/sbt-test-resources/run/daemon/src/main/scala/Daemon.scala new file mode 100644 index 000000000..a15a0e4cf --- /dev/null +++ b/src/test/resources/sbt-test-resources/run/daemon/src/main/scala/Daemon.scala @@ -0,0 +1,13 @@ +object Daemon +{ + def main(args: Array[String]) + { + val t = new Thread { + override def run() { + synchronized { wait() } + } + } + t.setDaemon(true); + t.start + } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/run/daemon/test b/src/test/resources/sbt-test-resources/run/daemon/test new file mode 100644 index 000000000..57bd0350e --- /dev/null +++ b/src/test/resources/sbt-test-resources/run/daemon/test @@ -0,0 +1,2 @@ +> run +[success] diff --git a/src/test/resources/sbt-test-resources/run/spawn-exit/project/build.properties b/src/test/resources/sbt-test-resources/run/spawn-exit/project/build.properties new file mode 100644 index 000000000..825710b2b --- /dev/null +++ b/src/test/resources/sbt-test-resources/run/spawn-exit/project/build.properties @@ -0,0 +1,3 @@ +project.version=1.0 +project.name=Daemon Thread Test +project.organization=sbt diff --git a/src/test/resources/sbt-test-resources/run/spawn-exit/src/main/scala/Spawn.scala b/src/test/resources/sbt-test-resources/run/spawn-exit/src/main/scala/Spawn.scala new file mode 100644 index 000000000..766ed2955 --- /dev/null +++ b/src/test/resources/sbt-test-resources/run/spawn-exit/src/main/scala/Spawn.scala @@ -0,0 +1,30 @@ +// The purpose of this test is to verify that sbt.TrapExit properly waits for Threads started after +// the main method exits and that it handles System.exit from a second generation thread. +// The first thread waits 1s for the main method to exit and then creates another thread. +// This thread waits another second before calling System.exit. The first thread hangs around to +// ensure that TrapExit actually processes the exit. + +object Spawn +{ + def main(args: Array[String]) + { + (new ThreadA).start + } + class ThreadA extends Thread + { + override def run() + { + Thread.sleep(1000) + (new ThreadB).start() + synchronized { wait() } + } + } + class ThreadB extends Thread + { + override def run() + { + Thread.sleep(1000) + System.exit(0) + } + } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/run/spawn-exit/test b/src/test/resources/sbt-test-resources/run/spawn-exit/test new file mode 100644 index 000000000..57bd0350e --- /dev/null +++ b/src/test/resources/sbt-test-resources/run/spawn-exit/test @@ -0,0 +1,2 @@ +> run +[success] diff --git a/src/test/resources/sbt-test-resources/run/spawn/project/build.properties b/src/test/resources/sbt-test-resources/run/spawn/project/build.properties new file mode 100644 index 000000000..825710b2b --- /dev/null +++ b/src/test/resources/sbt-test-resources/run/spawn/project/build.properties @@ -0,0 +1,3 @@ +project.version=1.0 +project.name=Daemon Thread Test +project.organization=sbt diff --git a/src/test/resources/sbt-test-resources/run/spawn/src/main/scala/Spawn.scala b/src/test/resources/sbt-test-resources/run/spawn/src/main/scala/Spawn.scala new file mode 100644 index 000000000..4845e572b --- /dev/null +++ b/src/test/resources/sbt-test-resources/run/spawn/src/main/scala/Spawn.scala @@ -0,0 +1,35 @@ +// The purpose of this test is to verify that sbt.TrapExit properly waits for Threads started after +// the main method exits. +// The first thread waits 1s for the main method to exit and then creates another thread. +// This thread waits another second before exiting. + +object Spawn +{ + def main(args: Array[String]) + { + (new ThreadA).start + } + class ThreadA extends Thread + { + override def run() + { + sleep() + (new ThreadB).start() + } + } + class ThreadB extends Thread + { + override def run() { sleep() } + } + private def sleep() + { + try { Thread.sleep(1000) } + catch + { + case e: InterruptedException => + val msg = "TrapExit improperly interrupted non-daemon thread" + System.err.println(msg) + error(msg) + } + } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/run/spawn/test b/src/test/resources/sbt-test-resources/run/spawn/test new file mode 100644 index 000000000..57bd0350e --- /dev/null +++ b/src/test/resources/sbt-test-resources/run/spawn/test @@ -0,0 +1,2 @@ +> run +[success] diff --git a/src/test/resources/sbt-test-resources/source-dependencies/empty-a/changes/A.scala b/src/test/resources/sbt-test-resources/source-dependencies/empty-a/changes/A.scala new file mode 100644 index 000000000..0c52ad165 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/empty-a/changes/A.scala @@ -0,0 +1,6 @@ +package a + +object A +{ + def x = "A" +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/empty-a/changes/A2.scala b/src/test/resources/sbt-test-resources/source-dependencies/empty-a/changes/A2.scala new file mode 100644 index 000000000..2e85f9808 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/empty-a/changes/A2.scala @@ -0,0 +1,6 @@ +/*package a + +object A +{ + def x = "A" +}*/ \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/empty-a/changes/B.scala b/src/test/resources/sbt-test-resources/source-dependencies/empty-a/changes/B.scala new file mode 100644 index 000000000..7865ddcec --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/empty-a/changes/B.scala @@ -0,0 +1,6 @@ +package a + +class B +{ + def x = A.x +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/empty-a/project/build.properties b/src/test/resources/sbt-test-resources/source-dependencies/empty-a/project/build.properties new file mode 100644 index 000000000..5fe3e1d87 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/empty-a/project/build.properties @@ -0,0 +1,4 @@ +#Project properties +#Tue Jan 27 07:55:42 EST 2009 +project.name=Empty Source Test +project.version=1.0 diff --git a/src/test/resources/sbt-test-resources/source-dependencies/empty-a/test b/src/test/resources/sbt-test-resources/source-dependencies/empty-a/test new file mode 100644 index 000000000..2e58e8cb9 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/empty-a/test @@ -0,0 +1,41 @@ +$ copy-file changes/A.scala src/main/scala/A.scala +[success] + +> compile +[success] + +$ copy-file changes/A2.scala src/main/scala/A.scala +[success] + +> compile +[success] + +$ copy-file changes/B.scala src/main/scala/B.scala +[success] + +> compile +[failure] + +$ copy-file changes/A.scala src/main/scala/A.scala +[success] + +> compile +[success] + +$ delete src/main/scala/B.scala +[success] + +$ copy-file changes/A2.scala src/main/scala/A.scala +[success] + +> compile +[success] + +$ copy-file changes/A.scala src/main/scala/A.scala +[success] + +$ copy-file changes/B.scala src/main/scala/B.scala +[success] + +> compile +[success] diff --git a/src/test/resources/sbt-test-resources/source-dependencies/new-cyclic/changes/A2.scala b/src/test/resources/sbt-test-resources/source-dependencies/new-cyclic/changes/A2.scala new file mode 100644 index 000000000..c530efeb4 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/new-cyclic/changes/A2.scala @@ -0,0 +1 @@ +trait A { val x = (new B).y } diff --git a/src/test/resources/sbt-test-resources/source-dependencies/new-cyclic/project/build.properties b/src/test/resources/sbt-test-resources/source-dependencies/new-cyclic/project/build.properties new file mode 100644 index 000000000..66fbd1686 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/new-cyclic/project/build.properties @@ -0,0 +1,4 @@ +#Project properties +#Mon Feb 09 21:05:16 EST 2009 +project.name=Test +project.version=1.0 diff --git a/src/test/resources/sbt-test-resources/source-dependencies/new-cyclic/src/main/scala/A.scala b/src/test/resources/sbt-test-resources/source-dependencies/new-cyclic/src/main/scala/A.scala new file mode 100644 index 000000000..d6d4ebdd9 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/new-cyclic/src/main/scala/A.scala @@ -0,0 +1 @@ +trait A { val x = "hello" } diff --git a/src/test/resources/sbt-test-resources/source-dependencies/new-cyclic/src/main/scala/B.scala b/src/test/resources/sbt-test-resources/source-dependencies/new-cyclic/src/main/scala/B.scala new file mode 100644 index 000000000..4fa74512d --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/new-cyclic/src/main/scala/B.scala @@ -0,0 +1 @@ +class B extends A { val y = x } diff --git a/src/test/resources/sbt-test-resources/source-dependencies/new-cyclic/test b/src/test/resources/sbt-test-resources/source-dependencies/new-cyclic/test new file mode 100644 index 000000000..1a8ead2f3 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/new-cyclic/test @@ -0,0 +1,8 @@ +> compile +[success] + +$ copy-file changes/A2.scala src/main/scala/A.scala +[success] + +> compile +[failure] diff --git a/src/test/resources/sbt-test-resources/source-dependencies/remove-test-a/changes/1.scala b/src/test/resources/sbt-test-resources/source-dependencies/remove-test-a/changes/1.scala new file mode 100644 index 000000000..1444a85e8 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/remove-test-a/changes/1.scala @@ -0,0 +1,6 @@ +package test + +object TestScriptTest +{ + val x: Int = "" +} diff --git a/src/test/resources/sbt-test-resources/source-dependencies/remove-test-a/project/build.properties b/src/test/resources/sbt-test-resources/source-dependencies/remove-test-a/project/build.properties new file mode 100644 index 000000000..8c4503ca9 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/remove-test-a/project/build.properties @@ -0,0 +1,4 @@ +#Project properties +#Fri Jan 23 22:29:49 EST 2009 +project.name=Test Project A +project.version=1.0 diff --git a/src/test/resources/sbt-test-resources/source-dependencies/remove-test-a/test b/src/test/resources/sbt-test-resources/source-dependencies/remove-test-a/test new file mode 100644 index 000000000..e3d44e4e1 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/remove-test-a/test @@ -0,0 +1,14 @@ +> compile +[success] + +$ copy-file changes/1.scala src/main/scala/1.scala +[success] + +> compile +[error] + +$ delete src/main/scala/1.scala +[success] + +> compile +[success] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/changes/A2.scala b/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/changes/A2.scala new file mode 100644 index 000000000..b971413bf --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/changes/A2.scala @@ -0,0 +1,6 @@ +package test + +object A +{ + def test = B.length +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/changes/B3.scala b/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/changes/B3.scala new file mode 100644 index 000000000..c5a1267df --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/changes/B3.scala @@ -0,0 +1,6 @@ +package test + +object B +{ + def length: Int = 5 +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/changes/B4.scala b/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/changes/B4.scala new file mode 100644 index 000000000..86a6cb4b9 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/changes/B4.scala @@ -0,0 +1,5 @@ +package test + +object B +{ +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/changes/B5.scala b/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/changes/B5.scala new file mode 100644 index 000000000..8829a8f73 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/changes/B5.scala @@ -0,0 +1,5 @@ +/*package test + +object B +{ +}*/ \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/project/build.properties b/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/project/build.properties new file mode 100644 index 000000000..fcc97e13f --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/project/build.properties @@ -0,0 +1,4 @@ +#Project properties +#Sat Jan 24 19:12:23 EST 2009 +project.name=Remove Test B +project.version=1.0 diff --git a/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/src/main/scala/A.scala b/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/src/main/scala/A.scala new file mode 100644 index 000000000..af481a935 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/src/main/scala/A.scala @@ -0,0 +1,5 @@ +package test + +object A +{ +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/src/main/scala/B.scala b/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/src/main/scala/B.scala new file mode 100644 index 000000000..86a6cb4b9 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/src/main/scala/B.scala @@ -0,0 +1,5 @@ +package test + +object B +{ +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/test b/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/test new file mode 100644 index 000000000..4d9f786f3 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/remove-test-b/test @@ -0,0 +1,50 @@ +> compile +[success] + +$ copy-file changes/A2.scala src/main/scala/A.scala +[success] + +> compile +[error] + +$ copy-file changes/B3.scala src/main/scala/B.scala +[success] + +> compile +[success] + +$ delete src/main/scala/B.scala +[success] + +> compile +[failure] + +$ copy-file changes/B3.scala src/main/scala/B.scala +[success] + +> compile +[success] + +$ copy-file changes/B4.scala src/main/scala/B.scala +[success] + +> compile +[failure] + +$ copy-file changes/B3.scala src/main/scala/B.scala +[success] + +> compile +[success] + +$ copy-file changes/B5.scala src/main/scala/B.scala +[success] + +> compile +[failure] + +$ copy-file changes/B3.scala src/main/scala/B.scala +[success] + +> compile +[success] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/replace-test-a/changes/first.scala b/src/test/resources/sbt-test-resources/source-dependencies/replace-test-a/changes/first.scala new file mode 100644 index 000000000..d21cd83e2 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/replace-test-a/changes/first.scala @@ -0,0 +1,4 @@ +object First +{ + def main(args: Array[String]) {} +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/replace-test-a/changes/second.scala b/src/test/resources/sbt-test-resources/source-dependencies/replace-test-a/changes/second.scala new file mode 100644 index 000000000..e64067f1e --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/replace-test-a/changes/second.scala @@ -0,0 +1,4 @@ +object Second +{ + def main(args: Array[String]) {} +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/replace-test-a/project/build.properties b/src/test/resources/sbt-test-resources/source-dependencies/replace-test-a/project/build.properties new file mode 100644 index 000000000..2b4d7e5a1 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/replace-test-a/project/build.properties @@ -0,0 +1,2 @@ +project.name=Test +project.version=1.0 \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/replace-test-a/project/build/src/TestProject.scala b/src/test/resources/sbt-test-resources/source-dependencies/replace-test-a/project/build/src/TestProject.scala new file mode 100644 index 000000000..a50180c06 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/replace-test-a/project/build/src/TestProject.scala @@ -0,0 +1,9 @@ +import sbt._ +import java.net.URLClassLoader +class TestProject(info: ProjectInfo) extends DefaultProject(info) +{ + lazy val checkFirst = checkTask("First") + lazy val checkSecond = checkTask("Second") + private def checkTask(className: String) = task { doCheck(className); None } + private def doCheck(className: String) = Class.forName(className, false, new URLClassLoader(runClasspath.get.map(_.asURL).toList.toArray)) +} diff --git a/src/test/resources/sbt-test-resources/source-dependencies/replace-test-a/test b/src/test/resources/sbt-test-resources/source-dependencies/replace-test-a/test new file mode 100644 index 000000000..cdaf24b09 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/replace-test-a/test @@ -0,0 +1,19 @@ +$ copy-file changes/first.scala src/main/scala/A.scala +[success] + +> compile +[success] +> check-first +[success] +> check-second +[failure] + +$ copy-file changes/second.scala src/main/scala/A.scala +[success] + +> compile +[success] +> check-first +[failure] +> check-second +[success] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/transitive-a/changes/A2.scala b/src/test/resources/sbt-test-resources/source-dependencies/transitive-a/changes/A2.scala new file mode 100644 index 000000000..8811bc83d --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/transitive-a/changes/A2.scala @@ -0,0 +1,4 @@ +object A +{ + val x = 5 +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/transitive-a/project/build.properties b/src/test/resources/sbt-test-resources/source-dependencies/transitive-a/project/build.properties new file mode 100644 index 000000000..309b0ced2 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/transitive-a/project/build.properties @@ -0,0 +1,4 @@ +#Project properties +#Mon Jan 26 19:13:08 EST 2009 +project.name=Transitive A +project.version=1.0 diff --git a/src/test/resources/sbt-test-resources/source-dependencies/transitive-a/src/main/scala/A.scala b/src/test/resources/sbt-test-resources/source-dependencies/transitive-a/src/main/scala/A.scala new file mode 100644 index 000000000..a0121ded0 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/transitive-a/src/main/scala/A.scala @@ -0,0 +1,4 @@ +object A +{ + val x = "a" +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/transitive-a/src/main/scala/B.scala b/src/test/resources/sbt-test-resources/source-dependencies/transitive-a/src/main/scala/B.scala new file mode 100644 index 000000000..da3e21492 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/transitive-a/src/main/scala/B.scala @@ -0,0 +1,4 @@ +object B +{ + val y = A.x +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/transitive-a/src/main/scala/C.scala b/src/test/resources/sbt-test-resources/source-dependencies/transitive-a/src/main/scala/C.scala new file mode 100644 index 000000000..0c2345e0f --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/transitive-a/src/main/scala/C.scala @@ -0,0 +1,4 @@ +object C +{ + val z = B.y.length +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/transitive-a/test b/src/test/resources/sbt-test-resources/source-dependencies/transitive-a/test new file mode 100644 index 000000000..efa71c72b --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/transitive-a/test @@ -0,0 +1,8 @@ +> compile +[success] + +$ copy-file changes/A2.scala src/main/scala/A.scala +[success] + +> compile +[failure] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/transitive-b/changes/A2.scala b/src/test/resources/sbt-test-resources/source-dependencies/transitive-b/changes/A2.scala new file mode 100644 index 000000000..3a0001416 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/transitive-b/changes/A2.scala @@ -0,0 +1,4 @@ +trait A +{ + val x = 5 +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/transitive-b/project/build.properties b/src/test/resources/sbt-test-resources/source-dependencies/transitive-b/project/build.properties new file mode 100644 index 000000000..309b0ced2 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/transitive-b/project/build.properties @@ -0,0 +1,4 @@ +#Project properties +#Mon Jan 26 19:13:08 EST 2009 +project.name=Transitive A +project.version=1.0 diff --git a/src/test/resources/sbt-test-resources/source-dependencies/transitive-b/src/main/scala/A.scala b/src/test/resources/sbt-test-resources/source-dependencies/transitive-b/src/main/scala/A.scala new file mode 100644 index 000000000..d49070e79 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/transitive-b/src/main/scala/A.scala @@ -0,0 +1,4 @@ +trait A +{ + val x = "a" +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/transitive-b/src/main/scala/B.scala b/src/test/resources/sbt-test-resources/source-dependencies/transitive-b/src/main/scala/B.scala new file mode 100644 index 000000000..310eb5b60 --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/transitive-b/src/main/scala/B.scala @@ -0,0 +1 @@ +trait B extends A \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/transitive-b/src/main/scala/C.scala b/src/test/resources/sbt-test-resources/source-dependencies/transitive-b/src/main/scala/C.scala new file mode 100644 index 000000000..da117fc3f --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/transitive-b/src/main/scala/C.scala @@ -0,0 +1,4 @@ +trait C extends B +{ + val z = x.length +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/source-dependencies/transitive-b/test b/src/test/resources/sbt-test-resources/source-dependencies/transitive-b/test new file mode 100644 index 000000000..efa71c72b --- /dev/null +++ b/src/test/resources/sbt-test-resources/source-dependencies/transitive-b/test @@ -0,0 +1,8 @@ +> compile +[success] + +$ copy-file changes/A2.scala src/main/scala/A.scala +[success] + +> compile +[failure] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/Class.forName/ivy.xml b/src/test/resources/sbt-test-resources/tests/Class.forName/ivy.xml new file mode 100644 index 000000000..d1cc76e1a --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/Class.forName/ivy.xml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/Class.forName/project/build.properties b/src/test/resources/sbt-test-resources/tests/Class.forName/project/build.properties new file mode 100644 index 000000000..c2f7ceba7 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/Class.forName/project/build.properties @@ -0,0 +1,5 @@ +#Project properties +#Tue Mar 24 17:40:29 EDT 2009 +project.organization=empty +project.name=class-forname +project.version=1.0 diff --git a/src/test/resources/sbt-test-resources/tests/Class.forName/src/main/scala/Test.scala b/src/test/resources/sbt-test-resources/tests/Class.forName/src/main/scala/Test.scala new file mode 100644 index 000000000..906b37f37 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/Class.forName/src/main/scala/Test.scala @@ -0,0 +1,10 @@ +package lib + +object Test +{ + def other = Class.forName("lib.OtherTest") + def otherThread = Class.forName("lib.OtherTest2", true, Thread.currentThread.getContextClassLoader) +} + +class OtherTest +class OtherTest2 \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/Class.forName/src/test/scala/Test.scala b/src/test/resources/sbt-test-resources/tests/Class.forName/src/test/scala/Test.scala new file mode 100644 index 000000000..e213f62c7 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/Class.forName/src/test/scala/Test.scala @@ -0,0 +1,15 @@ +package test + +import org.specs._ + +object TestSpecification extends Specification +{ + "Class.forName must work in libraries used in tests" in { + val a: AnyRef = lib.Test.other + a must notBe(null) + } + "Class.forName using Thread.getContextLoader must work in libraries used in tests" in { + val a: AnyRef = lib.Test.otherThread + a must notBe(null) + } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/Class.forName/test b/src/test/resources/sbt-test-resources/tests/Class.forName/test new file mode 100644 index 000000000..401921be5 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/Class.forName/test @@ -0,0 +1,23 @@ +# Create a jar that contains two classes: Test and OtherTest +# Test loads OtherTest using Class.forName +> package +[success] + +# get the specs jar +> update +[success] + +# copy to the lib_managed/test directory to simulate +# a library on the 'test' configuration +$ copy-file target/class-forname-1.0.jar lib_managed/test/forname.jar +[success] + +# Remove the classes that created the jar +$ delete src/main/ target/ +[success] + +# Compile and run the test that calls into the jar created above +# It won't succeed if something is messed up with class loading +> test +[success] + diff --git a/src/test/resources/sbt-test-resources/tests/extend/changes/ScalaCheck.scala b/src/test/resources/sbt-test-resources/tests/extend/changes/ScalaCheck.scala new file mode 100644 index 000000000..4a436c7aa --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/extend/changes/ScalaCheck.scala @@ -0,0 +1,41 @@ +/** This tests implementing a test framework in a project definition. To ensure sbt's builtin ScalaCheck +* test framework is not used, it flips success and failure so that a failing test is marked as succeeding and +* a suceeding test is marked as failing. */ + +package framework + +import sbt._ + +object FrameworkScalaCheck extends LazyTestFramework +{ + val name = "ScalaCheck" + + def testSuperClassName = "org.scalacheck.Properties" + def testSubClassType = ClassType.Module + + def testRunnerClassName = "framework.RunnerScalaCheck" +} + +class RunnerScalaCheck(val log: Logger, val listeners: Seq[TestReportListener], val testLoader: ClassLoader) extends BasicTestRunner +{ + import org.scalacheck.{Pretty, Properties, Test} + def runTest(testClassName: String): Result.Value = + { + val test = ModuleUtilities.getObject(testClassName, testLoader).asInstanceOf[Properties] + val result = Test.checkProperties(test, Test.defaultParams, propReport, testReport).find(!_._2.passed) + if(result.isEmpty) + Result.Failed // intentionally flipped (see top comment) + else + Result.Passed // intentionally flipped (see top comment) + } + private def propReport(pName: String, s: Int, d: Int) {} + private def testReport(name: String, res: Test.Result) + { + val msg = Pretty.pretty(res) + if(res.passed) + log.info("+ " + name + ": " + msg) + else + log.error("! " + name + ": " + msg) + + } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/extend/changes/TestFailure.scala b/src/test/resources/sbt-test-resources/tests/extend/changes/TestFailure.scala new file mode 100644 index 000000000..cbf7461ac --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/extend/changes/TestFailure.scala @@ -0,0 +1,6 @@ + +import org.scalacheck._ +object TestFailure extends Properties("Success -> Failure") +{ + specify("Always true", (i: Int) => true) +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/extend/changes/TestProject2.scala b/src/test/resources/sbt-test-resources/tests/extend/changes/TestProject2.scala new file mode 100644 index 000000000..2f0a0c805 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/extend/changes/TestProject2.scala @@ -0,0 +1,8 @@ +import sbt._ + +class TestProject2(info: ProjectInfo) extends DefaultProject(info) +{ + override def testFrameworks = framework.FrameworkScalaCheck :: Nil + override def useMavenConfigurations = true + val sc = "org.scala-tools.testing" % "scalacheck" % "1.5" % "test->default" +} diff --git a/src/test/resources/sbt-test-resources/tests/extend/changes/TestProject3.scala b/src/test/resources/sbt-test-resources/tests/extend/changes/TestProject3.scala new file mode 100644 index 000000000..80205752d --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/extend/changes/TestProject3.scala @@ -0,0 +1,12 @@ +import sbt._ + +class TestProject3(info: ProjectInfo) extends ParentProject(info) +{ + lazy val child = project("child", "Main", new ChildProject(_)) + class ChildProject(info: ProjectInfo) extends DefaultProject(info) + { + override def testFrameworks = framework.FrameworkScalaCheck :: Nil + override def useMavenConfigurations = true + val sc = "org.scala-tools.testing" % "scalacheck" % "1.5" % "test->default" + } +} diff --git a/src/test/resources/sbt-test-resources/tests/extend/changes/TestSuccess.scala b/src/test/resources/sbt-test-resources/tests/extend/changes/TestSuccess.scala new file mode 100644 index 000000000..06cd573d0 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/extend/changes/TestSuccess.scala @@ -0,0 +1,6 @@ + +import org.scalacheck._ +object TestSuccess extends Properties("Failure -> Success") +{ + specify("Always false", (i: Int) => false) +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/extend/project/build.properties b/src/test/resources/sbt-test-resources/tests/extend/project/build.properties new file mode 100644 index 000000000..6a367a1d6 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/extend/project/build.properties @@ -0,0 +1,5 @@ +#Project properties +#Wed Apr 29 15:43:25 EDT 2009 +project.organization=sbt +project.name=framework +project.version=1.0 diff --git a/src/test/resources/sbt-test-resources/tests/extend/project/build/src/TestProject.scala b/src/test/resources/sbt-test-resources/tests/extend/project/build/src/TestProject.scala new file mode 100644 index 000000000..95e20dce2 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/extend/project/build/src/TestProject.scala @@ -0,0 +1,9 @@ +import sbt._ + +class TestProject(info: ProjectInfo) extends DefaultProject(info) +{ + val sc = "org.scalacheck" % "scalacheck" % "1.5" + + override def updateAction = super.updateAction dependsOn addSbt + lazy val addSbt = task { FileUtilities.copyFile(FileUtilities.sbtJar, (dependencyPath / "sbt.jar").asFile, log) } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/extend/test b/src/test/resources/sbt-test-resources/tests/extend/test new file mode 100644 index 000000000..3b0a9ba30 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/extend/test @@ -0,0 +1,72 @@ +# get ScalaCheck library and copy the main sbt jar to the lib directory +> update +[success] + +# Copy the source file implementing the framework to the src tree +$ copy-file changes/ScalaCheck.scala src/main/scala/ScalaCheck.scala +[success] + +# compile and package the framework +> package +[success] + +# move the test framework jar to the build project and delete the sources and the sbt jar +# The test framework runs ScalaCheck tests, but swaps success and failure +# to ensure the custom framework is being used and not sbt's built-in ScalaCheck support +$ copy-file target/framework-1.0.jar project/build/lib/framework-1.0.jar +[success] +$ delete target/framework-1.0.jar +[success] +$ delete src/main +[success] +$ delete lib +[success] + +# replace the project definition with the one that uses the framework, reload, and update +# (update moves scalatest.jar to the 'test' configuration) +$ copy-file changes/TestProject2.scala project/build/src/TestProject.scala +[success] +$ reload +[success] +> update +[success] + +# Copy a source containing a failing test that the framework should mark as succeeding +$ copy-file changes/TestSuccess.scala src/test/scala/TestSuccess.scala +[success] +> test +[success] + + +# Copy a source containing a successful test that the framework should mark as failing +# First, delete the previous test +$ delete src/test/scala/TestSuccess.scala +[success] +$ copy-file changes/TestFailure.scala src/test/scala/TestFailure.scala +[success] +> test +[failure] + + +# replace the project definition with one that tests a child project +# reload, and update +$ copy-file changes/TestProject3.scala project/build/src/TestProject.scala +[success] +$ reload +[success] +> update +[success] + +# remove sources in the parent project +$ delete src +[success] +# Copy a source containing a failing test that the framework should mark as succeeding +$ copy-file changes/TestSuccess.scala child/src/test/scala/TestSuccess.scala +[success] +> test +[success] +# Copy a source containing a successful test that the framework should mark as failing +$ copy-file changes/TestFailure.scala child/src/test/scala/TestFailure.scala +[success] +> test +[failure] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/resources/ivy.xml b/src/test/resources/sbt-test-resources/tests/resources/ivy.xml new file mode 100644 index 000000000..0a7c1fb35 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/resources/ivy.xml @@ -0,0 +1,7 @@ + + + + + + + \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/resources/project/build.properties b/src/test/resources/sbt-test-resources/tests/resources/project/build.properties new file mode 100644 index 000000000..f6079cb71 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/resources/project/build.properties @@ -0,0 +1,3 @@ +#Project properties +project.name=Resources Test +project.version=1.0 diff --git a/src/test/resources/sbt-test-resources/tests/resources/src/main/resources/MainResource.txt b/src/test/resources/sbt-test-resources/tests/resources/src/main/resources/MainResource.txt new file mode 100644 index 000000000..1c9bf4968 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/resources/src/main/resources/MainResource.txt @@ -0,0 +1 @@ +Main \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/resources/src/test/resources/TestResource.txt b/src/test/resources/sbt-test-resources/tests/resources/src/test/resources/TestResource.txt new file mode 100644 index 000000000..51da4200a --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/resources/src/test/resources/TestResource.txt @@ -0,0 +1 @@ +Success \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/resources/src/test/scala/BasicTest.scala b/src/test/resources/sbt-test-resources/tests/resources/src/test/scala/BasicTest.scala new file mode 100644 index 000000000..9d2540347 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/resources/src/test/scala/BasicTest.scala @@ -0,0 +1,11 @@ +import org.specs._ + +object BasicTest extends Specification +{ + "Test resource on test classpath" in { + getClass.getResource("TestResource.txt") mustNotBe null + } + "Main resource on test classpath" in { + getClass.getResource("MainResource.txt") mustNotBe null + } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/resources/test b/src/test/resources/sbt-test-resources/tests/resources/test new file mode 100644 index 000000000..3653c1158 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/resources/test @@ -0,0 +1,5 @@ +> update +[success] + +> test +[success] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/scalacheck-a/changes/BasicTest.scala b/src/test/resources/sbt-test-resources/tests/scalacheck-a/changes/BasicTest.scala new file mode 100755 index 000000000..9478475b2 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/scalacheck-a/changes/BasicTest.scala @@ -0,0 +1,21 @@ +import org.scalacheck._ + +object BasicTest extends Properties("A basic runnable test") +{ + specify("startsWith", (a: String, b: String) => (a+b).startsWith(a)) +} + +abstract class AbstractNotATest extends Properties("Not a runnable test") +{ + specify("Fail", (a: Int, b: Int) => false) +} + +class ClassNotATest extends Properties("Not a runnable test") +{ + specify("Fail", (a: Int, b: Int) => false) +} + +trait TraitNotATest +{ self: Properties => + specify("Fail", (a: Int, b: Int) => false) +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/scalacheck-a/changes/FailedTest.scala b/src/test/resources/sbt-test-resources/tests/scalacheck-a/changes/FailedTest.scala new file mode 100755 index 000000000..079d0da20 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/scalacheck-a/changes/FailedTest.scala @@ -0,0 +1,21 @@ +import org.scalacheck._ + +object BasicTest extends Properties("A basic runnable test") +{ + specify("startsWith", (a: String, b: String) => (a+b).startsWith(a)) +} + +abstract class AbstractNotATest extends Properties("Not a runnable test") +{ + specify("Fail", (a: Int, b: Int) => false) +} + +object ClassNotATest extends Properties("A failing test") +{ + specify("Fail", (a: Int, b: Int) => false) +} + +trait TraitNotATest extends Properties("Not a runnable test") +{ + specify("Fail", (a: Int, b: Int) => false) +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/scalacheck-a/project/build.properties b/src/test/resources/sbt-test-resources/tests/scalacheck-a/project/build.properties new file mode 100755 index 000000000..f9da83551 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/scalacheck-a/project/build.properties @@ -0,0 +1,4 @@ +#Project properties +#Tue Feb 03 14:43:21 EST 2009 +project.name=ScalaCheck Support Test +project.version=9.3 diff --git a/src/test/resources/sbt-test-resources/tests/scalacheck-a/project/build/src/TestProject.scala b/src/test/resources/sbt-test-resources/tests/scalacheck-a/project/build/src/TestProject.scala new file mode 100755 index 000000000..e5ab1f803 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/scalacheck-a/project/build/src/TestProject.scala @@ -0,0 +1,7 @@ +import sbt._ + +class TestProject(info: ProjectInfo) extends DefaultProject(info) +{ + val scalacheck = "org.scalacheck" % "scalacheck" % "1.5" + val cacheDirectory = outputPath / "cache" +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/scalacheck-a/src/test/scala/DummyTest.scala b/src/test/resources/sbt-test-resources/tests/scalacheck-a/src/test/scala/DummyTest.scala new file mode 100755 index 000000000..e33905e34 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/scalacheck-a/src/test/scala/DummyTest.scala @@ -0,0 +1,2 @@ + +class DummyTest \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/scalacheck-a/test b/src/test/resources/sbt-test-resources/tests/scalacheck-a/test new file mode 100755 index 000000000..d75fcb579 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/scalacheck-a/test @@ -0,0 +1,23 @@ +> test +[success] + +$ copy-file changes/BasicTest.scala src/test/scala/BasicTest.scala +[success] + +$ delete src/test/scala/DummyTest.scala +[success] + +> test-compile +[error] + +> update +[success] + +> test +[success] + +$ copy-file changes/FailedTest.scala src/test/scala/BasicTest.scala +[success] + +> test +[failure] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/scalatest-ignore/project/build.properties b/src/test/resources/sbt-test-resources/tests/scalatest-ignore/project/build.properties new file mode 100644 index 000000000..2f60fce89 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/scalatest-ignore/project/build.properties @@ -0,0 +1,2 @@ +project.name=Ignore Test +project.version=3.9.2 \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/scalatest-ignore/project/build/Test.scala b/src/test/resources/sbt-test-resources/tests/scalatest-ignore/project/build/Test.scala new file mode 100644 index 000000000..14c47cd63 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/scalatest-ignore/project/build/Test.scala @@ -0,0 +1,7 @@ +import sbt._ + +class Test(info: ProjectInfo) extends DefaultProject(info) +{ + override def useMavenConfigurations = true + val st = "org.scala-tools.testing" % "scalatest" % "0.9.5" % "test->default" +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/scalatest-ignore/src/test/scala/IgnoreTest.scala b/src/test/resources/sbt-test-resources/tests/scalatest-ignore/src/test/scala/IgnoreTest.scala new file mode 100644 index 000000000..beb547e75 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/scalatest-ignore/src/test/scala/IgnoreTest.scala @@ -0,0 +1,11 @@ +import org.scalatest._ +import org.scalatest.matchers._ + +class IgnoreTest extends Spec with BeforeAndAfter with MustMatchers { + + describe("This test") { + ignore("should be ignored") { + error("Test ran") + } + } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/scalatest-ignore/test b/src/test/resources/sbt-test-resources/tests/scalatest-ignore/test new file mode 100644 index 000000000..3653c1158 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/scalatest-ignore/test @@ -0,0 +1,5 @@ +> update +[success] + +> test +[success] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/specs-a/changes/BasicTest.scala b/src/test/resources/sbt-test-resources/tests/specs-a/changes/BasicTest.scala new file mode 100755 index 000000000..fc0834d27 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/specs-a/changes/BasicTest.scala @@ -0,0 +1,36 @@ +import org.specs._ + +object BasicTest extends Specification +{ + // from specs example + "'hello world' has 11 characters" in { + "hello world".size mustEqual 11 + } + "'hello world' matches 'h.* w.*'" in { + "hello world" must beMatching("h.* w.*") + } +} + +trait TraitNotATest extends Specification +{ + // would fail if called + "'hello world' has 11 characters" in { + "hello world".size mustEqual 12 + } +} + +abstract class AbstractNotATest extends Specification +{ + // would fail if called + "'hello world' has 11 characters" in { + "hello world".size mustEqual 12 + } +} + +class ClassNotATest extends Specification +{ + // would fail if called + "'hello world' has 11 characters" in { + "hello world".size mustEqual 12 + } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/specs-a/changes/FailTest.scala b/src/test/resources/sbt-test-resources/tests/specs-a/changes/FailTest.scala new file mode 100755 index 000000000..8df53ef80 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/specs-a/changes/FailTest.scala @@ -0,0 +1,36 @@ +import org.specs._ + +object BasicTest extends Specification +{ + // from specs example + "'hello world' has 11 characters" in { + "hello world".size mustEqual 11 + } + "'hello world' matches 'h.* w.*'" in { + "hello world" must beMatching("h.* w.*") + } +} + +trait TraitNotATest extends Specification +{ + // would fail if called + "'hello world' has 11 characters" in { + "hello world".size mustEqual 12 + } +} + +object FailTest extends Specification +{ + // would fail if called + "'hello world' has 11 characters" in { + "hello world".size mustEqual 12 + } +} + +class ClassNotATest extends Specification +{ + // would fail if called + "'hello world' has 11 characters" in { + "hello world".size mustEqual 12 + } +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/specs-a/project/build.properties b/src/test/resources/sbt-test-resources/tests/specs-a/project/build.properties new file mode 100755 index 000000000..ec1f47dd0 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/specs-a/project/build.properties @@ -0,0 +1,4 @@ +#Project properties +#Tue Feb 03 15:05:44 EST 2009 +project.name=Specs Support Test +project.version=5.5.4 diff --git a/src/test/resources/sbt-test-resources/tests/specs-a/project/build/src/TestProject.scala b/src/test/resources/sbt-test-resources/tests/specs-a/project/build/src/TestProject.scala new file mode 100755 index 000000000..a286ac738 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/specs-a/project/build/src/TestProject.scala @@ -0,0 +1,7 @@ +import sbt._ + +class TestProject(info: ProjectInfo) extends DefaultProject(info) +{ + val scalacheck = "org.specs" % "specs" % "1.4.1" + val cacheDirectory = outputPath / "cache" +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/specs-a/src/test/scala/DummyTest.scala b/src/test/resources/sbt-test-resources/tests/specs-a/src/test/scala/DummyTest.scala new file mode 100755 index 000000000..71c316a47 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/specs-a/src/test/scala/DummyTest.scala @@ -0,0 +1,2 @@ + +class DummyTest \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/specs-a/test b/src/test/resources/sbt-test-resources/tests/specs-a/test new file mode 100755 index 000000000..9325193a0 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/specs-a/test @@ -0,0 +1,23 @@ +> test +[success] + +$ copy-file changes/BasicTest.scala src/test/scala/BasicTest.scala +[success] + +$ delete src/test/scala/DummyTest.scala +[success] + +> test-compile +[error] + +> update +[success] + +> test +[success] + +$ copy-file changes/FailTest.scala src/test/scala/BasicTest.scala +[success] + +> test +[failure] \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/specs-nested/project/build.properties b/src/test/resources/sbt-test-resources/tests/specs-nested/project/build.properties new file mode 100755 index 000000000..cc9a4d3b6 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/specs-nested/project/build.properties @@ -0,0 +1,4 @@ +#Project properties +#Wed Feb 25 18:38:37 EST 2009 +project.name=Nested Specifications +project.version=1.0 diff --git a/src/test/resources/sbt-test-resources/tests/specs-nested/project/build/src/TestProject.scala b/src/test/resources/sbt-test-resources/tests/specs-nested/project/build/src/TestProject.scala new file mode 100755 index 000000000..e06023159 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/specs-nested/project/build/src/TestProject.scala @@ -0,0 +1,6 @@ +import sbt._ + +class TestProject(info: ProjectInfo) extends DefaultProject(info) +{ + val scalacheck = "org.specs" % "specs" % "1.4.1" +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/specs-nested/src/test/scala/TestSpecification.scala b/src/test/resources/sbt-test-resources/tests/specs-nested/src/test/scala/TestSpecification.scala new file mode 100755 index 000000000..2380afab5 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/specs-nested/src/test/scala/TestSpecification.scala @@ -0,0 +1,19 @@ +import org.specs._ + +object TestSpecification extends Specification { + "A sample specification1" should { + "return something" in { + "hello" mustNotBe "world" + } + } + + object sampleSpecification extends Specification { + "the first system" should { + "skip one example" in { skip("skipped") } + "have one example ok" in {} + "have one example ko" in { 1 mustBe 2 } + "have one example in error" in { throw new Error("error") } + } + } + +} \ No newline at end of file diff --git a/src/test/resources/sbt-test-resources/tests/specs-nested/test b/src/test/resources/sbt-test-resources/tests/specs-nested/test new file mode 100755 index 000000000..3653c1158 --- /dev/null +++ b/src/test/resources/sbt-test-resources/tests/specs-nested/test @@ -0,0 +1,5 @@ +> update +[success] + +> test +[success] \ No newline at end of file diff --git a/src/test/scala/sbt/DagSpecification.scala b/src/test/scala/sbt/DagSpecification.scala new file mode 100644 index 000000000..4947e0819 --- /dev/null +++ b/src/test/scala/sbt/DagSpecification.scala @@ -0,0 +1,56 @@ +/* sbt -- Simple Build Tool + * Copyright 2008 Mark Harrah */ + +package sbt + +import org.scalacheck._ +import Prop._ + +import scala.collection.mutable.HashSet + +object DagSpecification extends Properties("Dag") +{ + specify("No repeated nodes", (dag: TestDag) => isSet(dag.topologicalSort)) + specify("Sort contains node", (dag: TestDag) => dag.topologicalSort.contains(dag)) + specify("Dependencies precede node", (dag: TestDag) => dependenciesPrecedeNodes(dag.topologicalSort)) + + implicit lazy val arbTestDag: Arbitrary[TestDag] = Arbitrary(Gen.sized(dagGen)) + private def dagGen(nodeCount: Int): Gen[TestDag] = + { + val nodes = new HashSet[TestDag] + def nonterminalGen(p: Gen.Params): Gen[TestDag] = + { + for(i <- 0 until nodeCount; nextDeps <- Gen.someOf(nodes).apply(p)) + nodes += new TestDag(i, nextDeps) + for(nextDeps <- Gen.someOf(nodes)) yield + new TestDag(nodeCount, nextDeps) + } + Gen.parameterized(nonterminalGen) + } + + private def isSet[T](c: Seq[T]) = Set(c: _*).size == c.size + private def dependenciesPrecedeNodes(sort: List[TestDag]) = + { + val seen = new HashSet[TestDag] + def iterate(remaining: List[TestDag]): Boolean = + { + remaining match + { + case Nil => true + case node :: tail => + if(node.dependencies.forall(seen.contains) && !seen.contains(node)) + { + seen += node + iterate(tail) + } + else + false + } + } + iterate(sort) + } +} +class TestDag(id: Int, val dependencies: Iterable[TestDag]) extends Dag[TestDag] +{ + override def toString = id + "->" + dependencies.mkString("[", ",", "]") +} \ No newline at end of file diff --git a/src/test/scala/sbt/EnvironmentSpecification.scala b/src/test/scala/sbt/EnvironmentSpecification.scala new file mode 100644 index 000000000..0468ee397 --- /dev/null +++ b/src/test/scala/sbt/EnvironmentSpecification.scala @@ -0,0 +1,98 @@ +/* sbt -- Simple Build Tool + * Copyright 2009 Mark Harrah + */ +package sbt + +import org.scalacheck._ + +object EnvironmentSpecification extends Properties("Environment") +{ s => + private[this] type Env = BasicEnvironment { def x: Property[Int] } + + val log = new ConsoleLogger + + specify("Non-optional user property assignment", testAssign _) + specify("Optional user property assignment", testDefaultAssign _) + specify("Optional user property default and assignment", testDefault _) + specify("Optional user property default and then assignment", testDefaultThenAssign _) + specify("Uninitialized empty when all properties are initialized", testUninitializedEmpty _) + specify("Uninitialized empty when all properties have defaults", testDefaultUninitializedEmpty _) + specify("Property defaulting to another property ok", propertyDefaultsToProperty _) + specify("Project-style name+organization", (name: String) => projectEmulation(name.trim)) + + private def projectEmulation(testName: String) = + { + import Prop._ + (!testName.isEmpty) ==> + withBacking { backing => + def env = new DefaultEnv(backing) { + final def name: String = projectName.value + final val projectName = propertyLocalF[String](NonEmptyStringFormat) + final val projectOrganization = propertyOptional[String](name, true) + } + val first = env + first.projectName() = testName + first.saveEnvironment + val second = env + env.projectOrganization.value == testName + } + } + private def propertyDefaultsToProperty(value: Int) = + { + withBacking { backing => + val env = new DefaultEnv(backing) { + val base = propertyOptional[Int](value) + val chained = propertyOptional[Int](base.value) + } + env.chained.value == value + } + } + private def testAssign(value: Int) = + { + withEnvironment { env => + env.x() = value + env.x.value == value + } + } + private def testDefaultAssign(value: Int, default: Int) = + { + withDefaultEnvironment(default) { env => + env.x() = value + env.x.value == value + } + } + private def testDefault(value: Int, default: Int) = + { + withDefaultEnvironment(default) { env => + env.x.value == default + } + } + private def testDefaultThenAssign(value: Int, default: Int) = + { + withDefaultEnvironment(default) { env => + env.x.value == default && + { + env.x() = value + env.x.value == value + } + } + } + private def testUninitializedEmpty(value: Int) = + { + withEnvironment { env => + env.x() = value + env.uninitializedProperties.isEmpty + } + } + private def testDefaultUninitializedEmpty(default: Int) = withDefaultEnvironment(default)(_.uninitializedProperties.isEmpty) + + private def defaultEnvironment(default: Int)(backing: Path) = new DefaultEnv(backing) { val x = propertyOptional[Int](default) } + private def environment(backing: Path) = new DefaultEnv(backing) { val x = property[Int] } + + private def withBacking[T](f: Path => T): T = Control.getOrError( FileUtilities.withTemporaryFile(log, "env", "")(file => Right(f(Path.fromFile(file))) ) ) + private def withEnvironment[T](f: Env => T): T = withEnvironmentImpl(environment)(f) + private def withDefaultEnvironment[T](default: Int)(f: Env => T): T = withEnvironmentImpl(defaultEnvironment(default))(f) + private def withEnvironmentImpl[T](env: Path => Env)(f: Env => T): T = withBacking(f compose env) + + private class DefaultEnv(val envBackingPath: Path) extends BasicEnvironment { def log = s.log } +} \ No newline at end of file diff --git a/src/test/scala/sbt/FileUtilitiesSpecification.scala b/src/test/scala/sbt/FileUtilitiesSpecification.scala new file mode 100644 index 000000000..d36807e11 --- /dev/null +++ b/src/test/scala/sbt/FileUtilitiesSpecification.scala @@ -0,0 +1,73 @@ +/* sbt -- Simple Build Tool + * Copyright 2008, 2009 Mark Harrah + */ +package sbt + +import org.scalacheck._ +import java.io.File + +object WriteContentSpecification extends Properties("Write content") +{ + val log = new ConsoleLogger + log.setLevel(Level.Warn) + + specify("Roundtrip string", writeAndCheckString _) + specify("Roundtrip bytes", writeAndCheckBytes _) + specify("Write string overwrites", overwriteAndCheckStrings _) + specify("Write bytes overwrites", overwriteAndCheckBytes _) + specify("Append string appends", appendAndCheckStrings _) + specify("Append bytes appends", appendAndCheckBytes _) + + // make the test independent of underlying platform and allow any unicode character in Strings to be encoded + val charset = java.nio.charset.Charset.forName("UTF-8") + + import FileUtilities._ + private def writeAndCheckString(s: String) = + { + val result = withTemporaryFile( file => writeThen(file, s)( readString(file, charset, log) ) ) + handleResult[String](result, _ == s) + } + private def writeAndCheckBytes(b: Array[Byte]) = + { + val result = withTemporaryFile( file => writeThen(file, b)( readBytes(file, log) ) ) + handleResult[Array[Byte]](result, _ deepEquals b) + } + private def overwriteAndCheckStrings(a: String, b: String) = + { + val result = withTemporaryFile( file => writeThen(file, a)( writeThen(file, b)( readString(file, charset, log) ) ) ) + handleResult[String](result, _ == b) + } + private def overwriteAndCheckBytes(a: Array[Byte], b: Array[Byte]) = + { + val result = withTemporaryFile( file => writeThen(file, a)( writeThen(file, b)( readBytes(file, log) ) ) ) + handleResult[Array[Byte]](result, _ deepEquals b) + } + private def appendAndCheckStrings(a: String, b: String) = + { + val result = withTemporaryFile( file => appendThen(file, a)( appendThen(file, b)( readString(file, charset, log) ) ) ) + handleResult[String](result, _ == (a+b)) + } + private def appendAndCheckBytes(a: Array[Byte], b: Array[Byte]) = + { + val result = withTemporaryFile( file => appendThen(file, a)( appendThen(file, b)( readBytes(file, log) ) ) ) + handleResult[Array[Byte]](result, _ deepEquals (a++b)) + } + + private def withTemporaryFile[T](f: File => Either[String, T]): Either[String, T] = + doInTemporaryDirectory(log) { dir => f(new java.io.File(dir, "out")) } + + private def handleResult[T](result: Either[String, T], check: T => Boolean): Boolean = + result match + { + case Left(err) => log.trace(new RuntimeException(err)); log.error(err); false + case Right(x) => check(x) + } + private def writeThen[T](file: File, content: String)(action: => Either[String, T]) = + write(file, content, charset, log).toLeft(()).right.flatMap { x =>action } + private def writeThen[T](file: File, content: Array[Byte])(action: => Either[String, T]) = + write(file, content, log).toLeft(()).right.flatMap { x =>action } + private def appendThen[T](file: File, content: String)(action: => Either[String, T]) = + append(file, content, charset, log).toLeft(()).right.flatMap { x =>action } + private def appendThen[T](file: File, content: Array[Byte])(action: => Either[String, T]) = + append(file, content, log).toLeft(()).right.flatMap { x =>action } +} \ No newline at end of file diff --git a/src/test/scala/sbt/NameFilterSpecification.scala b/src/test/scala/sbt/NameFilterSpecification.scala new file mode 100644 index 000000000..ec2dd95a4 --- /dev/null +++ b/src/test/scala/sbt/NameFilterSpecification.scala @@ -0,0 +1,39 @@ +/* sbt -- Simple Build Tool + * Copyright 2008 Mark Harrah */ + +package sbt + +import org.scalacheck._ +import Prop._ + +object NameFilterSpecification extends Properties("NameFilter") +{ + specify("All pass accepts everything", (s: String) => AllPassFilter.accept(s)) + specify("Exact filter matches provided string", + (s1: String, s2: String) => (new ExactFilter(s1)).accept(s2) == (s1 == s2) ) + specify("Exact filter matches valid string", (s: String) => (new ExactFilter(s)).accept(s) ) + + specify("Glob filter matches provided string if no *s", + (s1: String, s2: String) => + { + val stripped = stripAsterisksAndControl(s1) + (GlobFilter(stripped).accept(s2) == (stripped == s2)) + }) + specify("Glob filter matches valid string if no *s", + (s: String) => + { + val stripped = stripAsterisksAndControl(s) + GlobFilter(stripped).accept(stripped) + }) + + specify("Glob filter matches valid", + (list: List[String]) => + { + val stripped = list.map(stripAsterisksAndControl) + GlobFilter(stripped.mkString("*")).accept(stripped.mkString) + }) + + /** Raw control characters are stripped because they are not allowed in expressions. + * Asterisks are stripped because they are added under the control of the tests.*/ + private def stripAsterisksAndControl(s: String) = s.filter(c => !java.lang.Character.isISOControl(c) && c != '*').toString +} \ No newline at end of file diff --git a/src/test/scala/sbt/PathSpecification.scala b/src/test/scala/sbt/PathSpecification.scala new file mode 100644 index 000000000..a65e2d98f --- /dev/null +++ b/src/test/scala/sbt/PathSpecification.scala @@ -0,0 +1,109 @@ +/* sbt -- Simple Build Tool + * Copyright 2008 Mark Harrah + */ +package sbt + +import org.scalacheck._ +import Arbitrary.arbitrary +import Prop._ +import java.io.File + +object PathSpecification extends Properties("Path") +{ + val log = new ConsoleLogger + log.setLevel(Level.Warn) + + implicit val pathComponent: Arbitrary[String] = + Arbitrary(for(id <- Gen.identifier) yield trim(id)) // TODO: make a more specific Arbitrary + implicit val projectDirectory: Arbitrary[ProjectDirectory] = Arbitrary(Gen.value(new ProjectDirectory(new File(".")))) + implicit val arbPath: Arbitrary[Path] = Arbitrary(genPath) + + specify("Project directory relative path empty", (projectPath: ProjectDirectory) => projectPath.relativePath.isEmpty) + specify("construction", (dir: ProjectDirectory, components: List[String]) => + pathForComponents(dir, components).asFile == fileForComponents(dir.asFile, components) ) + specify("Relative path", (dir: ProjectDirectory, a: List[String], b: List[String]) => + pathForComponents(pathForComponents(dir, a) ##, b).relativePath == pathString(b) ) + specify("Proper URL conversion", (path: Path) => path.asURL == path.asFile.toURI.toURL) + specify("Path equality", (dir: ProjectDirectory, components: List[String]) => + pathForComponents(dir, components) == pathForComponents(dir, components)) + specify("Base path equality", (dir: ProjectDirectory, a: List[String], b: List[String]) => + pathForComponents(pathForComponents(dir, a) ##, b) == pathForComponents(pathForComponents(dir, a) ##, b) ) + specify("hashCode", (path: Path) => path.hashCode == path.asFile.hashCode) + + // the relativize tests are a bit of a mess because of a few things: + // 1) relativization requires directories to exist + // 2) there is an IOException thrown in touch for paths that are too long (probably should limit the size of the Lists) + // These problems are addressed by the helper method createFileAndDo + + specify("relativize fail", (dir: ProjectDirectory, a: List[String], b: List[String]) => + { + (!a.contains("") && !b.contains("")) ==> + { + createFileAndDo(a, b) + { dir => + { + val shouldFail = (a == b) || !(b startsWith a) // will be true most of the time + val didFail = Path.relativize(pathForComponents(dir, a), pathForComponents(dir, b)).isEmpty + shouldFail == didFail + } + } + } + }) + specify("relativize", (a: List[String], b: List[String]) => + { + (!b.isEmpty && !a.contains("") && !b.contains("")) ==> + { + createFileAndDo(a, b) + { dir => + { + val base = pathForComponents(dir, a) + val path = pathForComponents(base, b) + Path.relativize(base, path) == Some(path) + } + } + } + }) + specify("fromString", (dir: ProjectDirectory, a: List[String]) => + pathForComponents(dir, a) == Path.fromString(dir, pathString(a))) + + private def createFileAndDo(a: List[String], b: List[String])(f: Path => Boolean) = + { + val result = + FileUtilities.doInTemporaryDirectory(log)( dir => + { + FileUtilities.touch(fileForComponents(dir, a ::: b), log) match + { + case None => Right(Some( f(new ProjectDirectory(dir)) )) + case Some(err) => Left(err) + } + }) + result match + { + case Left(err) => throw new RuntimeException(err) + case Right(opt) => opt.isDefined ==> opt.get + } + } + + private def pathString(components: List[String]): String = components.mkString(File.separator) + private def pathForComponents(base: Path, components: List[String]): Path = + components.foldLeft(base)((path, component) => path / component) + private def fileForComponents(base: File, components: List[String]): File = + components.foldLeft(base)((file, component) => new File(file, component)) + private def genPath: Gen[Path] = + for(projectPath <- arbitrary[ProjectDirectory]; + a <- arbitrary[List[String]]; + b <- arbitrary[Option[List[String]]]) + yield + { + val base = pathForComponents(projectPath, trim(a)) + b match + { + case None => base + case Some(relative) => pathForComponents(base ##, trim(relative)) + } + } + private def trim(components: List[String]): List[String] = components.take(MaxComponentCount) + private def trim(component: String): String = component.substring(0, Math.min(component.length, MaxFilenameLength)) + val MaxFilenameLength = 20 + val MaxComponentCount = 6 +} \ No newline at end of file diff --git a/src/test/scala/sbt/ProcessSpecification.scala b/src/test/scala/sbt/ProcessSpecification.scala new file mode 100644 index 000000000..61d530555 --- /dev/null +++ b/src/test/scala/sbt/ProcessSpecification.scala @@ -0,0 +1,91 @@ +package sbt + +import java.io.File +import org.scalacheck.{Prop, Properties} +import Prop._ + +import Process._ + +object ProcessSpecification extends Properties("Process I/O") +{ + private val log = new ConsoleLogger + + specify("Correct exit code", (exitCode: Byte) => checkExit(exitCode)) + specify("#&& correct", (exitCodes: Array[Byte]) => checkBinary(exitCodes)(_ #&& _)(_ && _)) + specify("#|| correct", (exitCodes: Array[Byte]) => checkBinary(exitCodes)(_ #|| _)(_ || _)) + specify("Pipe to output file", (data: Array[Byte]) => checkFileOut(data)) + specify("Pipe to input file", (data: Array[Byte]) => checkFileIn(data)) + specify("Pipe to process", (data: Array[Byte]) => checkPipe(data)) + + private def checkBinary(codes: Array[Byte])(reduceProcesses: (ProcessBuilder, ProcessBuilder) => ProcessBuilder)(reduceExit: (Boolean, Boolean) => Boolean) = + { + (codes.length > 1) ==> + { + val unsignedCodes = codes.map(unsigned) + val exitCode = unsignedCodes.map(code => Process(process("sbt.exit " + code))).reduceLeft(reduceProcesses) ! + val expectedExitCode = unsignedCodes.map(toBoolean).reduceLeft(reduceExit) + toBoolean(exitCode) == expectedExitCode + } + } + private def toBoolean(exitCode: Int) = exitCode == 0 + private def checkExit(code: Byte) = + { + val exitCode = unsigned(code) + (process("sbt.exit " + exitCode) !) == exitCode + } + private def checkFileOut(data: Array[Byte]) = + { + withData(data) { (temporaryFile, temporaryFile2) => + val catCommand = process("sbt.cat " + temporaryFile.getAbsolutePath) + catCommand #> temporaryFile2 + } + } + private def checkFileIn(data: Array[Byte]) = + { + withData(data) { (temporaryFile, temporaryFile2) => + val catCommand = process("sbt.cat") + temporaryFile #> catCommand #> temporaryFile2 + } + } + private def checkPipe(data: Array[Byte]) = + { + withData(data) { (temporaryFile, temporaryFile2) => + val catCommand = process("sbt.cat") + temporaryFile #> catCommand #| catCommand #> temporaryFile2 + } + } + private def temp() = File.createTempFile("sbt", "") + private def withData(data: Array[Byte])(f: (File, File) => ProcessBuilder) = + { + val temporaryFile1 = temp() + val temporaryFile2 = temp() + try + { + FileUtilities.write(temporaryFile1, data, log) + val process = f(temporaryFile1, temporaryFile2) + ( process ! ) == 0 && + { + val result = + for(b1 <- FileUtilities.readBytes(temporaryFile1, log).right; b2 <- FileUtilities.readBytes(temporaryFile2, log).right) yield + b1 deepEquals b2 + result.fold(error, x => x) + } + } + finally + { + temporaryFile1.delete() + temporaryFile2.delete() + } + } + private def unsigned(b: Byte): Int = ((b: Int) +256) % 256 + private def process(command: String) = + { + val ignore = echo // just for the compile dependency so that this test is rerun when TestedProcess.scala changes, not used otherwise + + val thisClasspath = List(getSource[ScalaObject], getSource[sbt.Logger], getSource[sbt.SourceTag]).mkString(File.pathSeparator) + "java -cp " + thisClasspath + " " + command + } + private def getSource[T](implicit mf: scala.reflect.Manifest[T]): String = + (new File(mf.erasure.getProtectionDomain.getCodeSource.getLocation.toURI)).getAbsolutePath +} +private trait SourceTag \ No newline at end of file diff --git a/src/test/scala/sbt/ReflectSpecification.scala b/src/test/scala/sbt/ReflectSpecification.scala new file mode 100644 index 000000000..082359a78 --- /dev/null +++ b/src/test/scala/sbt/ReflectSpecification.scala @@ -0,0 +1,181 @@ +/* sbt -- Simple Build Tool + * Copyright 2008 Mark Harrah + */ +package sbt + +// Still TODO: +// inheritance- hierarchy, overriding + +import org.scalacheck._ +import scala.reflect.Manifest + +// specify members +// specify classes +// map members to classes +// compile, instantiate base +// get all vals +// validate vals + +object ReflectiveSpecification extends Properties("ReflectUtilities") +{ + import ReflectiveArbitrary._ + // pick other modifiers, any name, any type for the member, any type to find, + // pick a class hierarchy, select any class from that hierarchy, add member + // instantiate instance, perform reflection and verify it is empty + specify("Public found", publicFound _) + specify("Private hidden", privateHidden _) + + private def publicFound(isFinal: Boolean, decl: DeclarationType, name: Identifier, rType: ReturnType, + findType: ReturnType, container: ConcreteContainer) = + { + val modifiers: Set[Modifier] = if(isFinal) Set(Final) else Set() + val member = Member(Public, modifiers, decl, name.toString, rType, true) + val shouldFind = decl != Def && rType.manifest <:< findType.manifest + allVals(Map(container -> List(member)), container, findType.manifest).isEmpty == !shouldFind + } + private def privateHidden(isFinal: Boolean, decl: DeclarationType, name: Identifier, rType: ReturnType, + findType: ReturnType, container: ConcreteContainer) = + { + val scope = Private(None) + val modifiers: Set[Modifier] = if(isFinal) Set(Final) else Set() + val member = Member(scope, modifiers, decl, name.toString, rType, true) + allVals(Map(container -> List(member)), container, findType.manifest).isEmpty + } + private def allVals(classes: Map[Container, List[Member]], check: Container, manifest: Manifest[_]) = + { + val instance = ReflectiveCreate.compileInstantiate(classes, check) + ReflectUtilities.allVals(instance)(manifest) + } +} + +object ReflectiveArbitrary +{ + implicit val arbIdentifier: Arbitrary[Identifier] = Arbitrary { for(id <- Gen.identifier) yield Identifier(id) } + implicit val arbDeclarationType: Arbitrary[DeclarationType] = Arbitrary { Gen.elements(Val, Def, LazyVal) } + implicit val arbModifier: Arbitrary[Modifier] = Arbitrary { Gen.elements(Final, Abstract, Override) } + // TODO: parameterize + implicit val arbType: Arbitrary[ReturnType] = + Arbitrary { Gen.elements(classes: _*) } + implicit val arbConcrete: Arbitrary[ConcreteContainer] = Arbitrary(genConcrete) + //implicit val arbContainer: Arbitrary[Container] = Arbitrary { Gen.oneOf(arbConcreteContainer, arbTrait) } + //implicit val arbTrait: Arbitrary[Trait] = Arbitrary { } + + //TODO: inheritance + def genConcrete = for(name <- Gen.identifier) yield ConcreteClass(name, None, Nil) + + val classes = List[ReturnType]( + typ[String]("String", "null"), + typ[Object]("Object", "null"), + typ[Int]("Int", "0"), + typ[List[String]]("List[String]", "Nil"), + typ[Option[Int]]("Option[Int]", "None") ) + + def typ[T](name: String, defaultValue: String)(implicit mf: Manifest[T]) = + BasicReturnType(name, Nil, mf, defaultValue) +} + +object ReflectiveCreate +{ + import scala.collection.mutable + + def compileInstantiate(classes: Map[Container, List[Member]], instantiate: Container): AnyRef = + { + val log = new ConsoleLogger + log.setLevel(Level.Warn) + val code = new StringBuilder + def addMember(m: Member) + { + code.append(m.toString) + code.append("\n") + } + def addClass(c: Container, m: List[Member]) + { + code.append(c.signature) + code.append(" {\n") + m.foreach(addMember) + code.append(" }\n") + } + for((c, members) <- classes) addClass(c, members) + + val codeString = code.toString + def doCompileInstantiate(dir: java.io.File): Either[String, AnyRef] = + { + val basePath = new ProjectDirectory(dir) + val source = basePath / "a.scala" + val sourceFile = source.asFile + val outputDirectory = basePath / "target" + for(writeOK <- FileUtilities.write(sourceFile, codeString, log).toLeft("").right; + compileOK <- (new Compile(100))("reflect", source :: Nil, "", outputDirectory, Nil, log).toLeft("").right) + yield + { + val loader = new java.net.URLClassLoader(Array(outputDirectory.asURL), getClass.getClassLoader) + val c = Class.forName(instantiate.name, true, loader) + c.newInstance.asInstanceOf[AnyRef] + } + } + FileUtilities.doInTemporaryDirectory(log)(doCompileInstantiate) match + { + case Left(err) => log.error(err); log.error(codeString); throw new RuntimeException(err) + case Right(x) => x + } + } +} + +final case class Identifier(override val toString: String) extends NotNull + +sealed abstract class Modifier(override val toString: String) extends NotNull +object Final extends Modifier("final") +object Abstract extends Modifier("abstract") +object Override extends Modifier("override") + +sealed trait Scope extends NotNull +sealed abstract class QualifiedScope(label: String, qualifier: Option[String]) extends Scope +{ + override def toString = label + qualifier.map("[" + _ + "]").getOrElse("") +} +final case class Private(qualifier: Option[String]) extends QualifiedScope("private", qualifier) +final case class Protected(qualifier: Option[String]) extends QualifiedScope("protected", qualifier) +final object Public extends Scope { override def toString = "" } + + +sealed abstract class DeclarationType(override val toString: String) extends NotNull +object LazyVal extends DeclarationType("lazy val") +object Val extends DeclarationType("val") +object Def extends DeclarationType("def") + +sealed abstract class Container(prefix: String) extends NotNull +{ + def signature: String = prefix + " " + name + parents.map(_.name).mkString(" extends ", " with ", "") + def name: String + def mixins: List[Trait] + def parents: List[Container] = mixins +} +sealed abstract class ClassContainer(prefix: String) extends Container(prefix) +{ + def base: Option[ClassContainer] + override def parents = base.toList ::: mixins +} +sealed abstract class ConcreteContainer(prefix: String) extends ClassContainer(prefix) +final case class AbstractClass(name: String, base: Option[ClassContainer], mixins: List[Trait]) extends ClassContainer("abstract class") +final case class ConcreteClass(name: String, base: Option[ClassContainer], mixins: List[Trait]) extends ConcreteContainer("class") +final case class Module(name: String, base: Option[ClassContainer], mixins: List[Trait]) extends ConcreteContainer("object") +final case class Trait(name: String, mixins: List[Trait]) extends Container("trait") + +trait ReturnType +{ + def name: String + def parameters: List[ReturnType] + def manifest: Manifest[_] + def defaultValue: String + override def toString = name +} +sealed case class BasicReturnType(name: String, parameters: List[ReturnType], + manifest: Manifest[_], defaultValue: String) extends ReturnType + +case class Member(scope: Scope, modifiers: Set[Modifier], declaration: DeclarationType, + name: String, mType: ReturnType, valueSpecified: Boolean) extends NotNull +{ + override def toString = scope.toString + modifiers.mkString(" ", " "," ") + + declaration.toString + " " + name + " : " + mType.toString + + (if(valueSpecified) " = " + mType.defaultValue else "") +} \ No newline at end of file diff --git a/src/test/scala/sbt/TestedProcess.scala b/src/test/scala/sbt/TestedProcess.scala new file mode 100644 index 000000000..0e9a6a3c4 --- /dev/null +++ b/src/test/scala/sbt/TestedProcess.scala @@ -0,0 +1,55 @@ +package sbt + +import java.io.File + +object exit +{ + def main(args: Array[String]) + { + System.exit(java.lang.Integer.parseInt(args(0))) + } +} +object cat +{ + def main(args: Array[String]) + { + val result = + if(args.length == 0) + FileUtilities.transfer(System.in, System.out, log) + else + catFiles(args.toList) + result match + { + case Some(err) => System.err.println("Error: " + err); System.exit(1) + case None => System.exit(0) + } + } + private val log = new ConsoleLogger + private def catFiles(filenames: List[String]): Option[String] = + { + filenames match + { + case head :: tail => + val file = new File(head) + if(file.isDirectory) + Some("Is directory: " + file) + else if(file.exists) + { + FileUtilities.readStream(file, log) { stream => + FileUtilities.transfer(stream, System.out, log) + } + catFiles(tail) + } + else + Some("No such file or directory: " + file) + case Nil => None + } + } +} +object echo +{ + def main(args: Array[String]) + { + System.out.println(args.mkString(" ")) + } +} \ No newline at end of file diff --git a/src/test/scala/sbt/VersionSpecification.scala b/src/test/scala/sbt/VersionSpecification.scala new file mode 100644 index 000000000..2c1c0eb5a --- /dev/null +++ b/src/test/scala/sbt/VersionSpecification.scala @@ -0,0 +1,61 @@ +/* sbt -- Simple Build Tool + * Copyright 2008 Mark Harrah + */ +package sbt + +import org.scalacheck._ + +object VersionSpecification extends Properties("Version") +{ + import ArbitraryVersion._ + specify("Empty or whitespace only string not allowed, all others allowed", + (s: String) => Version.fromString(s).isLeft == s.trim.isEmpty) + specify("BasicVersion round trips", checkRoundTrip _) + specify("BasicVersion increment major", checkIncrementMajor _) + specify("BasicVersion increment minor", checkIncrementMinor _) + specify("BasicVersion increment micro", checkIncrementMicro _) + + private def checkRoundTrip(v: BasicVersion) = + { + val v2 = Version.fromString(v.toString) + v2.isRight && v2.right.get == v + } + private def checkIncrementMinor(v: BasicVersion) = checkIncrement(v, _.incrementMinor) + private def checkIncrementMajor(v: BasicVersion) = checkIncrement(v, _.incrementMajor) + private def checkIncrementMicro(v: BasicVersion) = checkIncrement(v, _.incrementMicro) + private def checkIncrement(v: BasicVersion, increment: (BasicVersion => BasicVersion)) = + { + val vNew = increment(v) + checkRoundTrip(vNew) && vNew != v + } +} +object ArbitraryVersion +{ + implicit lazy val arbBasicVersion: Arbitrary[BasicVersion] = Arbitrary(genBasicVersion) + implicit lazy val arbOpaqueVersion: Arbitrary[OpaqueVersion] = Arbitrary(genOpaqueVersion) + implicit lazy val arbVersion: Arbitrary[Version] = Arbitrary(genVersion) + + import Arbitrary._ + import Math.abs + lazy val genBasicVersion = + for{major <- arbInt.arbitrary + minor <- arbOption[Int].arbitrary + micro <- arbOption[Int].arbitrary + extra <- genExtra } + yield + { + if(minor.isEmpty && micro.isDefined) + BasicVersion(abs(major), micro.map(abs), None, extra) + else + BasicVersion(abs(major), minor.map(abs), micro.map(abs), extra) + } + lazy val genOpaqueVersion = for(versionString <- arbString.arbitrary if !versionString.trim.isEmpty) yield OpaqueVersion(versionString) + lazy val genVersion = Gen.frequency((5,genBasicVersion), (1,genOpaqueVersion)) + + private lazy val genExtra = + for(extra <- arbOption[String].arbitrary; + val trimmedExtra = extra.map(_.trim.filter(c => !java.lang.Character.isISOControl(c)).toString); + if Version.isValidExtra(trimmedExtra)) + yield + trimmedExtra +} \ No newline at end of file diff --git a/src/test/scala/sbt/wrap/MutableSetWrapper.scala b/src/test/scala/sbt/wrap/MutableSetWrapper.scala new file mode 100644 index 000000000..caf562c18 --- /dev/null +++ b/src/test/scala/sbt/wrap/MutableSetWrapper.scala @@ -0,0 +1,89 @@ +package sbt.wrap + +import org.scalacheck._ +import java.util.{HashSet, LinkedHashSet} + +object WrappedHashSetTests extends MutableSetWrapperTests(new HashSet[Int]) +object WrappedLinkedHashSetTests extends MutableSetWrapperTests(new LinkedHashSet[Int]) +{ + specify("toList preserves order", checkAddOrdered _) + + private def checkAddOrdered(values: List[Int]) = + { + val set = createWrapped + val check = new scala.collection.mutable.HashSet[Int] + val list = new scala.collection.mutable.ListBuffer[Int] + for(value <- values) + { + set += value + if(!check(value)) + { + check += value + list += value + } + } + list.toList sameElements set.toList + } +} + +abstract class MutableSetWrapperTests(createUnderlying: => java.util.Set[Int]) extends Properties("Mutable Set wrapper (" + createUnderlying.getClass.getName + ")") +{ + protected def createWrapped = new MutableSetWrapper(createUnderlying) + + specify("Contains all added at once", checkBatchAddition _) + specify("Contains all added individually", checkSingleAddition _) + specify("toList contains all added at once", checkBatchToList _) + specify("toList contains all added individually", checkSingleToList _) + specify("Contains all added and not removed", checkRemove _) + + private def checkSingleAddition(values: List[Int]) = + { + val set = createSingleAdd(values) + values.forall(set.contains) + } + private def checkBatchAddition(values: List[Int]) = + { + val set = createBatchAdd(values) + values.forall(set.contains) + } + private def checkBatchToList(values: List[Int]) = + { + val set = createBatchAdd(values) + val check = scala.collection.mutable.HashSet(set.toList : _*) + values.forall(check.contains) + } + private def checkSingleToList(values: List[Int]) = + { + val set = createSingleAdd(values) + val check = scala.collection.mutable.HashSet(set.toList : _*) + values.forall(check.contains) + } + protected final def createBatchAdd(values: List[Int]) = + { + val set = createWrapped + set ++= values + set + } + protected final def createSingleAdd(values: List[Int]) = + { + val set = createWrapped + values.foreach(set += _) + set + } + private def checkRemove(values: List[(Int, Boolean)]) = + { + val set = createWrapped + val check = new scala.collection.mutable.HashSet[Int] + for( (key, _) <- values) + { + set += key + check += key + } + for( (key, false) <- values) + { + set -= key + check -= key + } + values.forall { case (key, _) => set.contains(key) == check.contains(key) } + } +} \ No newline at end of file