diff --git a/.drone.yml b/.drone.yml index e2e256726d..1b99775ab8 100644 --- a/.drone.yml +++ b/.drone.yml @@ -1,6 +1,9 @@ matrix: CI_SCALA_VERSION: - 2.12.6 + RUN_SCRIPTED: + - ./bin/run-ci-scripted.sh + - ./bin/run-ci-scripted-to-jar.sh clone: git: @@ -30,6 +33,7 @@ pipeline: - export DRONE_DIR="/drone" - git fetch --tags && git log | head -n 20 - ./bin/run-ci.sh + - ${RUN_SCRIPTED} rebuild_cache: image: appleboy/drone-sftp-cache diff --git a/bin/run-ci-scripted-to-jar.sh b/bin/run-ci-scripted-to-jar.sh new file mode 100755 index 0000000000..f3f2968e01 --- /dev/null +++ b/bin/run-ci-scripted-to-jar.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash +set -eu +set -o nounset + +PROJECT_ROOT="zincRoot" +sbt -Dfile.encoding=UTF-8 \ + -J-XX:ReservedCodeCacheSize=512M \ + -J-Xms1024M -J-Xmx4096M -J-server \ + "zincScripted/test:run --to-jar" diff --git a/bin/run-ci-scripted.sh b/bin/run-ci-scripted.sh new file mode 100755 index 0000000000..b323e2c8bb --- /dev/null +++ b/bin/run-ci-scripted.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash +set -eu +set -o nounset + +PROJECT_ROOT="zincRoot" +sbt -Dfile.encoding=UTF-8 \ + -J-XX:ReservedCodeCacheSize=512M \ + -J-Xms1024M -J-Xmx4096M -J-server \ + "zincScripted/test:run" diff --git a/bin/run-ci.sh b/bin/run-ci.sh index c706889b37..6f472e289e 100755 --- a/bin/run-ci.sh +++ b/bin/run-ci.sh @@ -13,5 +13,4 @@ sbt -Dfile.encoding=UTF-8 \ "$PROJECT_ROOT/test:compile" \ crossTestBridges \ "publishBridges" \ - "$PROJECT_ROOT/test" \ - "zincScripted/test:run" + "$PROJECT_ROOT/test" \ No newline at end of file diff --git a/build.sbt b/build.sbt index fe4b702c20..affc718dd3 100644 --- a/build.sbt +++ b/build.sbt @@ -165,6 +165,15 @@ lazy val zinc = (project in file("zinc")) .settings( name := "zinc", mimaSettings, + mimaBinaryIssueFilters ++= Seq( + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCompilerImpl.compileIncrementally"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCompilerImpl.inputs"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCompilerImpl.compile"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.MixedAnalyzingCompiler.config"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.MixedAnalyzingCompiler.makeConfig"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.MixedAnalyzingCompiler.this"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.CompileConfiguration.this") + ) ) lazy val zincTesting = (project in internalPath / "zinc-testing") @@ -281,7 +290,11 @@ lazy val zincCore = (project in internalPath / "zinc-core") exclude[ReversedMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.findClassDependencies"), exclude[ReversedMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.invalidateClassesInternally"), exclude[ReversedMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.invalidateClassesExternally"), - exclude[ReversedMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.findAPIChange") + exclude[ReversedMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.findAPIChange"), + exclude[IncompatibleMethTypeProblem]("sbt.internal.inc.Incremental.prune"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCompile.apply"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.AnalysisCallback#Builder.this"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.AnalysisCallback.this") ) } ) @@ -391,7 +404,10 @@ lazy val compilerInterface212 = (project in internalPath / "compiler-interface") exclude[ReversedMissingMethodProblem]("xsbti.compile.ExternalHooks#Lookup.hashClasspath"), exclude[ReversedMissingMethodProblem]("xsbti.compile.ScalaInstance.loaderLibraryOnly"), exclude[DirectMissingMethodProblem]("xsbti.api.AnalyzedClass.of"), - exclude[DirectMissingMethodProblem]("xsbti.api.AnalyzedClass.create") + exclude[DirectMissingMethodProblem]("xsbti.api.AnalyzedClass.create"), + exclude[ReversedMissingMethodProblem]("xsbti.AnalysisCallback.classesInOutputJar"), + exclude[ReversedMissingMethodProblem]("xsbti.compile.IncrementalCompiler.compile"), + exclude[DirectMissingMethodProblem]("xsbti.compile.IncrementalCompiler.compile") ) }, ) @@ -745,7 +761,10 @@ lazy val zincClassfile212 = zincClassfileTemplate .settings( scalaVersion := scala212, crossScalaVersions := Seq(scala212), - target := (target in zincClassfileTemplate).value.getParentFile / "target-2.12" + target := (target in zincClassfileTemplate).value.getParentFile / "target-2.12", + mimaBinaryIssueFilters ++= Seq( + exclude[DirectMissingMethodProblem]("sbt.internal.inc.classfile.Analyze.apply") + ) ) // re-implementation of scripted engine @@ -831,6 +850,7 @@ lazy val otherRootSettings = Seq( Scripted.scriptedPrescripted := { (_: File) => () }, Scripted.scriptedUnpublished := scriptedUnpublishedTask.evaluated, Scripted.scriptedSource := (sourceDirectory in zinc).value / "sbt-test", + Scripted.scriptedCompileToJar := false, publishAll := { val _ = (publishLocal).all(ScopeFilter(inAnyProject)).value } @@ -845,6 +865,7 @@ def scriptedTask: Def.Initialize[InputTask[Unit]] = Def.inputTask { scriptedSource.value, result, scriptedBufferLog.value, + scriptedCompileToJar.value, scriptedPrescripted.value ) } @@ -857,6 +878,7 @@ def scriptedUnpublishedTask: Def.Initialize[InputTask[Unit]] = Def.inputTask { scriptedSource.value, result, scriptedBufferLog.value, + scriptedCompileToJar.value, scriptedPrescripted.value ) } diff --git a/internal/compiler-bridge/src/main/scala/xsbt/API.scala b/internal/compiler-bridge/src/main/scala/xsbt/API.scala index edfa9bfcc0..f21ab5c6fa 100644 --- a/internal/compiler-bridge/src/main/scala/xsbt/API.scala +++ b/internal/compiler-bridge/src/main/scala/xsbt/API.scala @@ -96,7 +96,7 @@ final class API(val global: CallbackGlobal) extends Compat with GlobalHelpers wi * * This method only takes care of non-local classes because local classes have no * relevance in the correctness of the algorithm and can be registered after genbcode. - * Local classes are only used to contruct the relations of products and to produce + * Local classes are only used to construct the relations of products and to produce * the list of generated files + stamps, but names referring to local classes **never** * show up in the name hashes of classes' APIs, hence never considered for name hashing. * @@ -116,9 +116,17 @@ final class API(val global: CallbackGlobal) extends Compat with GlobalHelpers wi def registerProductNames(names: FlattenedNames): Unit = { // Guard against a local class in case it surreptitiously leaks here if (!symbol.isLocalClass) { - val classFileName = s"${names.binaryName}.class" - val outputDir = global.settings.outputDirs.outputDirFor(sourceFile).file - val classFile = new java.io.File(outputDir, classFileName) + val pathToClassFile = s"${names.binaryName}.class" + val classFile = { + JarUtils.outputJar match { + case Some(outputJar) => + new java.io.File(JarUtils.classNameInJar(outputJar, pathToClassFile)) + case None => + val outputDir = global.settings.outputDirs.outputDirFor(sourceFile).file + new java.io.File(outputDir, pathToClassFile) + } + } + val zincClassName = names.className val srcClassName = classNameAsString(symbol) callback.generatedNonLocalClass(sourceJavaFile, classFile, zincClassName, srcClassName) diff --git a/internal/compiler-bridge/src/main/scala/xsbt/Analyzer.scala b/internal/compiler-bridge/src/main/scala/xsbt/Analyzer.scala index 78e8136c99..a168a18764 100644 --- a/internal/compiler-bridge/src/main/scala/xsbt/Analyzer.scala +++ b/internal/compiler-bridge/src/main/scala/xsbt/Analyzer.scala @@ -7,7 +7,10 @@ package xsbt +import java.io.File + import scala.tools.nsc.Phase +import scala.collection.JavaConverters._ object Analyzer { def name = "xsbt-analyzer" @@ -22,15 +25,39 @@ final class Analyzer(val global: CallbackGlobal) extends LocateClassFile { "Finds concrete instances of provided superclasses, and application entry points." def name = Analyzer.name + /** + * When straight-to-jar compilation is enabled, returns the classes + * that are found in the jar of the last compilation. This method + * gets the existing classes from the analysis callback and adapts + * it for consumption in the compiler bridge. + * + * It's lazy because it triggers a read of the zip, which may be + * unnecessary if there are no local classes in a compilation unit. + */ + private lazy val classesWrittenByGenbcode: Set[String] = { + JarUtils.outputJar match { + case Some(jar) => + val classes = global.callback.classesInOutputJar().asScala + classes.map(JarUtils.classNameInJar(jar, _)).toSet + case None => Set.empty + } + } + def apply(unit: CompilationUnit): Unit = { if (!unit.isJava) { val sourceFile = unit.source.file.file for (iclass <- unit.icode) { val sym = iclass.symbol - val outputDir = settings.outputDirs.outputDirFor(sym.sourceFile).file + lazy val outputDir = settings.outputDirs.outputDirFor(sym.sourceFile).file def addGenerated(separatorRequired: Boolean): Unit = { - val classFile = fileForClass(outputDir, sym, separatorRequired) - if (classFile.exists()) { + val locatedClass = { + JarUtils.outputJar match { + case Some(outputJar) => locateClassInJar(sym, outputJar, separatorRequired) + case None => locatePlainClassFile(sym, separatorRequired) + } + } + + locatedClass.foreach { classFile => assert(sym.isClass, s"${sym.fullName} is not a class") // Use own map of local classes computed before lambdalift to ascertain class locality if (localToNonLocalClass.isLocal(sym).getOrElse(true)) { @@ -49,5 +76,18 @@ final class Analyzer(val global: CallbackGlobal) extends LocateClassFile { } } } + + private def locatePlainClassFile(sym: Symbol, separatorRequired: Boolean): Option[File] = { + val outputDir = settings.outputDirs.outputDirFor(sym.sourceFile).file + val classFile = fileForClass(outputDir, sym, separatorRequired) + if (classFile.exists()) Some(classFile) else None + } + + private def locateClassInJar(sym: Symbol, jar: File, sepRequired: Boolean): Option[File] = { + val classFile = pathToClassFile(sym, sepRequired) + val classInJar = JarUtils.classNameInJar(jar, classFile) + if (!classesWrittenByGenbcode.contains(classInJar)) None + else Some(new File(classInJar)) + } } } diff --git a/internal/compiler-bridge/src/main/scala/xsbt/CallbackGlobal.scala b/internal/compiler-bridge/src/main/scala/xsbt/CallbackGlobal.scala index a7794ae8b8..0c83e9ef94 100644 --- a/internal/compiler-bridge/src/main/scala/xsbt/CallbackGlobal.scala +++ b/internal/compiler-bridge/src/main/scala/xsbt/CallbackGlobal.scala @@ -14,6 +14,8 @@ import scala.tools.nsc._ import io.AbstractFile import java.io.File +import scala.reflect.io.PlainFile + /** Defines the interface of the incremental compiler hiding implementation details. */ sealed abstract class CallbackGlobal(settings: Settings, reporter: reporters.Reporter, @@ -38,6 +40,8 @@ sealed abstract class CallbackGlobal(settings: Settings, } } + lazy val JarUtils = new JarUtils(outputDirs) + /** * Defines the sbt phase in which the dependency analysis is performed. * The reason why this is exposed in the callback global is because it's used @@ -134,19 +138,44 @@ sealed class ZincCompiler(settings: Settings, dreporter: DelegatingReporter, out private final val fqnsToAssociatedFiles = perRunCaches.newMap[String, (AbstractFile, Boolean)]() - /** Returns the associated file of a fully qualified name and whether it's on the classpath. */ + /** + * Returns the associated file of a fully qualified name and whether it's on the classpath. + * Note that the abstract file returned must exist. + */ def findAssociatedFile(fqn: String): Option[(AbstractFile, Boolean)] = { - def getOutputClass(name: String): Option[AbstractFile] = { - // This could be improved if a hint where to look is given. - val className = name.replace('.', '/') + ".class" - outputDirs.map(new File(_, className)).find((_.exists)).map((AbstractFile.getFile(_))) + def findOnPreviousCompilationProducts(name: String): Option[AbstractFile] = { + // This class file path is relative to the output jar/directory and computed from class name + val classFilePath = name.replace('.', '/') + ".class" + + JarUtils.outputJar match { + case Some(outputJar) => + if (!callback.classesInOutputJar().contains(classFilePath)) None + else { + /* + * Important implementation detail: `classInJar` has the format of `$JAR!$CLASS_REF` + * which is, of course, a path to a file that does not exist. This file path is + * interpreted especially by Zinc to decompose the format under straight-to-jar + * compilation. For this strategy to work, `PlainFile` must **not** check that + * this file does exist or not because, if it does, it will return `null` in + * `processExternalDependency` and the dependency will not be correctly registered. + * If scalac breaks this contract (the check for existence is done when creating + * a normal reflect file but not a plain file), Zinc will not work correctly. + */ + Some(new PlainFile(JarUtils.classNameInJar(outputJar, classFilePath))) + } + + case None => // The compiler outputs class files in a classes directory (the default) + // This lookup could be improved if a hint where to look is given. + outputDirs.map(new File(_, classFilePath)).find(_.exists()).map(AbstractFile.getFile(_)) + } } def findOnClassPath(name: String): Option[AbstractFile] = classPath.findClass(name).flatMap(_.binary.asInstanceOf[Option[AbstractFile]]) fqnsToAssociatedFiles.get(fqn).orElse { - val newResult = getOutputClass(fqn).map(f => (f, true)) + val newResult = findOnPreviousCompilationProducts(fqn) + .map(f => (f, true)) .orElse(findOnClassPath(fqn).map(f => (f, false))) newResult.foreach(res => fqnsToAssociatedFiles.put(fqn, res)) newResult diff --git a/internal/compiler-bridge/src/main/scala/xsbt/JarUtils.scala b/internal/compiler-bridge/src/main/scala/xsbt/JarUtils.scala new file mode 100644 index 0000000000..e23bfe8d89 --- /dev/null +++ b/internal/compiler-bridge/src/main/scala/xsbt/JarUtils.scala @@ -0,0 +1,37 @@ +package xsbt + +import java.io.File + +/** + * This is a utility class that provides a set of functions that + * are used to implement straight to jar compilation. + * + * [[sbt.internal.inc.JarUtils]] is an object that has similar purpose and + * duplicates some of the code, as it is difficult to share it. Any change + * in the logic of this file must be applied to the other `JarUtils` too! + */ +final class JarUtils(outputDirs: Iterable[File]) { + // This is an equivalent of asking if it runs on Windows where the separator is `\` + private val isSlashSeparator: Boolean = File.separatorChar == '/' + + /** + * The jar file that is used as output for classes. If the output is + * not set to a single .jar file, value of this field is [[None]]. + */ + val outputJar: Option[File] = { + outputDirs match { + case Seq(file) if file.getName.endsWith(".jar") => Some(file) + case _ => None + } + } + + /** + * Creates an identifier for a class located inside a jar. + * + * It follows the format to encode inter-jar dependencies that + * is established in [[sbt.internal.inc.JarUtils.ClassInJar]]. + */ + def classNameInJar(jar: File, classFilePath: String): String = { + s"$jar!${if (isSlashSeparator) classFilePath else classFilePath.replace('/', File.separatorChar)}" + } +} diff --git a/internal/compiler-bridge/src/main/scala/xsbt/LocateClassFile.scala b/internal/compiler-bridge/src/main/scala/xsbt/LocateClassFile.scala index aae1a70cf1..c338b33c51 100644 --- a/internal/compiler-bridge/src/main/scala/xsbt/LocateClassFile.scala +++ b/internal/compiler-bridge/src/main/scala/xsbt/LocateClassFile.scala @@ -43,4 +43,7 @@ abstract class LocateClassFile extends Compat with ClassName { protected def fileForClass(outputDirectory: File, s: Symbol, separatorRequired: Boolean): File = new File(outputDirectory, flatclassName(s, File.separatorChar, separatorRequired) + ".class") + + protected def pathToClassFile(s: Symbol, separatorRequired: Boolean): String = + flatclassName(s, File.separatorChar, separatorRequired) + ".class" } diff --git a/internal/compiler-interface/src/main/contraband-java/xsbti/compile/CompileOptions.java b/internal/compiler-interface/src/main/contraband-java/xsbti/compile/CompileOptions.java index 7890ac33f4..020b076f62 100644 --- a/internal/compiler-interface/src/main/contraband-java/xsbti/compile/CompileOptions.java +++ b/internal/compiler-interface/src/main/contraband-java/xsbti/compile/CompileOptions.java @@ -19,6 +19,12 @@ public static CompileOptions create(java.io.File[] _classpath, java.io.File[] _s public static CompileOptions of(java.io.File[] _classpath, java.io.File[] _sources, java.io.File _classesDirectory, String[] _scalacOptions, String[] _javacOptions, int _maxErrors, java.util.function.Function<xsbti.Position, xsbti.Position> _sourcePositionMapper, xsbti.compile.CompileOrder _order) { return new CompileOptions(_classpath, _sources, _classesDirectory, _scalacOptions, _javacOptions, _maxErrors, _sourcePositionMapper, _order); } + public static CompileOptions create(java.io.File[] _classpath, java.io.File[] _sources, java.io.File _classesDirectory, String[] _scalacOptions, String[] _javacOptions, int _maxErrors, java.util.function.Function<xsbti.Position, xsbti.Position> _sourcePositionMapper, xsbti.compile.CompileOrder _order, java.util.Optional<java.io.File> _temporaryClassesDirectory) { + return new CompileOptions(_classpath, _sources, _classesDirectory, _scalacOptions, _javacOptions, _maxErrors, _sourcePositionMapper, _order, _temporaryClassesDirectory); + } + public static CompileOptions of(java.io.File[] _classpath, java.io.File[] _sources, java.io.File _classesDirectory, String[] _scalacOptions, String[] _javacOptions, int _maxErrors, java.util.function.Function<xsbti.Position, xsbti.Position> _sourcePositionMapper, xsbti.compile.CompileOrder _order, java.util.Optional<java.io.File> _temporaryClassesDirectory) { + return new CompileOptions(_classpath, _sources, _classesDirectory, _scalacOptions, _javacOptions, _maxErrors, _sourcePositionMapper, _order, _temporaryClassesDirectory); + } /** * The classpath to use for compilation. * This will be modified according to the ClasspathOptions used to configure the ScalaCompiler. @@ -38,6 +44,12 @@ public static CompileOptions of(java.io.File[] _classpath, java.io.File[] _sourc private java.util.function.Function<xsbti.Position, xsbti.Position> sourcePositionMapper; /** Controls the order in which Java and Scala sources are compiled. */ private xsbti.compile.CompileOrder order; + /** + * Points to a temporary classes directory where the compiler can put compilation products + * of any kind. The lifetime of these compilation products is short and the temporary + * classes directory only needs to exist during one incremental compiler cycle. + */ + private java.util.Optional<java.io.File> temporaryClassesDirectory; protected CompileOptions() { super(); classpath = new java.io.File[0]; @@ -48,6 +60,7 @@ protected CompileOptions() { maxErrors = 100; sourcePositionMapper = new java.util.function.Function<xsbti.Position, xsbti.Position>() { public xsbti.Position apply(xsbti.Position a) { return a; } }; order = xsbti.compile.CompileOrder.Mixed; + temporaryClassesDirectory = java.util.Optional.empty(); } protected CompileOptions(java.io.File[] _classpath, java.io.File[] _sources, java.io.File _classesDirectory, String[] _scalacOptions, String[] _javacOptions, int _maxErrors, java.util.function.Function<xsbti.Position, xsbti.Position> _sourcePositionMapper, xsbti.compile.CompileOrder _order) { super(); @@ -59,6 +72,19 @@ protected CompileOptions(java.io.File[] _classpath, java.io.File[] _sources, jav maxErrors = _maxErrors; sourcePositionMapper = _sourcePositionMapper; order = _order; + temporaryClassesDirectory = java.util.Optional.empty(); + } + protected CompileOptions(java.io.File[] _classpath, java.io.File[] _sources, java.io.File _classesDirectory, String[] _scalacOptions, String[] _javacOptions, int _maxErrors, java.util.function.Function<xsbti.Position, xsbti.Position> _sourcePositionMapper, xsbti.compile.CompileOrder _order, java.util.Optional<java.io.File> _temporaryClassesDirectory) { + super(); + classpath = _classpath; + sources = _sources; + classesDirectory = _classesDirectory; + scalacOptions = _scalacOptions; + javacOptions = _javacOptions; + maxErrors = _maxErrors; + sourcePositionMapper = _sourcePositionMapper; + order = _order; + temporaryClassesDirectory = _temporaryClassesDirectory; } public java.io.File[] classpath() { return this.classpath; @@ -84,29 +110,35 @@ public java.util.function.Function<xsbti.Position, xsbti.Position> sourcePositio public xsbti.compile.CompileOrder order() { return this.order; } + public java.util.Optional<java.io.File> temporaryClassesDirectory() { + return this.temporaryClassesDirectory; + } public CompileOptions withClasspath(java.io.File[] classpath) { - return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order); + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory); } public CompileOptions withSources(java.io.File[] sources) { - return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order); + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory); } public CompileOptions withClassesDirectory(java.io.File classesDirectory) { - return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order); + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory); } public CompileOptions withScalacOptions(String[] scalacOptions) { - return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order); + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory); } public CompileOptions withJavacOptions(String[] javacOptions) { - return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order); + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory); } public CompileOptions withMaxErrors(int maxErrors) { - return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order); + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory); } public CompileOptions withSourcePositionMapper(java.util.function.Function<xsbti.Position, xsbti.Position> sourcePositionMapper) { - return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order); + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory); } public CompileOptions withOrder(xsbti.compile.CompileOrder order) { - return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order); + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory); + } + public CompileOptions withTemporaryClassesDirectory(java.util.Optional<java.io.File> temporaryClassesDirectory) { + return new CompileOptions(classpath, sources, classesDirectory, scalacOptions, javacOptions, maxErrors, sourcePositionMapper, order, temporaryClassesDirectory); } public boolean equals(Object obj) { if (this == obj) { @@ -115,13 +147,13 @@ public boolean equals(Object obj) { return false; } else { CompileOptions o = (CompileOptions)obj; - return java.util.Arrays.deepEquals(this.classpath(), o.classpath()) && java.util.Arrays.deepEquals(this.sources(), o.sources()) && this.classesDirectory().equals(o.classesDirectory()) && java.util.Arrays.deepEquals(this.scalacOptions(), o.scalacOptions()) && java.util.Arrays.deepEquals(this.javacOptions(), o.javacOptions()) && (this.maxErrors() == o.maxErrors()) && this.sourcePositionMapper().equals(o.sourcePositionMapper()) && this.order().equals(o.order()); + return java.util.Arrays.deepEquals(this.classpath(), o.classpath()) && java.util.Arrays.deepEquals(this.sources(), o.sources()) && this.classesDirectory().equals(o.classesDirectory()) && java.util.Arrays.deepEquals(this.scalacOptions(), o.scalacOptions()) && java.util.Arrays.deepEquals(this.javacOptions(), o.javacOptions()) && (this.maxErrors() == o.maxErrors()) && this.sourcePositionMapper().equals(o.sourcePositionMapper()) && this.order().equals(o.order()) && this.temporaryClassesDirectory().equals(o.temporaryClassesDirectory()); } } public int hashCode() { - return 37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (17 + "xsbti.compile.CompileOptions".hashCode()) + java.util.Arrays.deepHashCode(classpath())) + java.util.Arrays.deepHashCode(sources())) + classesDirectory().hashCode()) + java.util.Arrays.deepHashCode(scalacOptions())) + java.util.Arrays.deepHashCode(javacOptions())) + (new Integer(maxErrors())).hashCode()) + sourcePositionMapper().hashCode()) + order().hashCode()); + return 37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (37 * (17 + "xsbti.compile.CompileOptions".hashCode()) + java.util.Arrays.deepHashCode(classpath())) + java.util.Arrays.deepHashCode(sources())) + classesDirectory().hashCode()) + java.util.Arrays.deepHashCode(scalacOptions())) + java.util.Arrays.deepHashCode(javacOptions())) + (new Integer(maxErrors())).hashCode()) + sourcePositionMapper().hashCode()) + order().hashCode()) + temporaryClassesDirectory().hashCode()); } public String toString() { - return "CompileOptions(" + "classpath: " + classpath() + ", " + "sources: " + sources() + ", " + "classesDirectory: " + classesDirectory() + ", " + "scalacOptions: " + scalacOptions() + ", " + "javacOptions: " + javacOptions() + ", " + "maxErrors: " + maxErrors() + ", " + "sourcePositionMapper: " + sourcePositionMapper() + ", " + "order: " + order() + ")"; + return "CompileOptions(" + "classpath: " + classpath() + ", " + "sources: " + sources() + ", " + "classesDirectory: " + classesDirectory() + ", " + "scalacOptions: " + scalacOptions() + ", " + "javacOptions: " + javacOptions() + ", " + "maxErrors: " + maxErrors() + ", " + "sourcePositionMapper: " + sourcePositionMapper() + ", " + "order: " + order() + ", " + "temporaryClassesDirectory: " + temporaryClassesDirectory() + ")"; } } diff --git a/internal/compiler-interface/src/main/contraband/incremental.json b/internal/compiler-interface/src/main/contraband/incremental.json index 3310d349fa..9f56c28026 100644 --- a/internal/compiler-interface/src/main/contraband/incremental.json +++ b/internal/compiler-interface/src/main/contraband/incremental.json @@ -312,6 +312,17 @@ "default": "xsbti.compile.CompileOrder.Mixed", "doc": "Controls the order in which Java and Scala sources are compiled.", "since": "0.1.0" + }, + { + "name": "temporaryClassesDirectory", + "type": "java.util.Optional<java.io.File>", + "default": "java.util.Optional.empty()", + "doc": [ + "Points to a temporary classes directory where the compiler can put compilation products", + "of any kind. The lifetime of these compilation products is short and the temporary", + "classes directory only needs to exist during one incremental compiler cycle." + ], + "since": "1.3.0" } ] }, diff --git a/internal/compiler-interface/src/main/java/xsbti/AnalysisCallback.java b/internal/compiler-interface/src/main/java/xsbti/AnalysisCallback.java index 9454875bc9..8e0c138bcc 100644 --- a/internal/compiler-interface/src/main/java/xsbti/AnalysisCallback.java +++ b/internal/compiler-interface/src/main/java/xsbti/AnalysisCallback.java @@ -8,6 +8,7 @@ package xsbti; import xsbti.api.DependencyContext; + import java.io.File; import java.util.EnumSet; @@ -172,4 +173,29 @@ void problem(String what, * phase defined by <code>xsbt-analyzer</code> should be added. */ boolean enabled(); + + /** + * Return class files in output jar at a given point in time. + * + * When straight-to-jar compilation is enabled, the following entrypoint + * in the analysis callback tells the compiler which classes can be found + * in the jar used as a compilation target (where all class files will be + * store). The entrypoint will return all the paths to class files in Zinc + * format, an example would be `xsbti/AnalysisCallback.class`. + * + * This entrypoint serves two main purposes: + * + * 1. Before the dependency phase is run, it returns the class files found + * in the jar previous to the current compilation. + * 2. After dependency has run, when called again, it returns the class + * files written by the compiler in genbcode. + * + * The second purpose is required because the compiler cannot communicate + * us via an internal programmatic API which files has written in genbcode + * and therefore we need to pay the price of opening the jar again to figure + * it out. If the compiler is to expose an entry point for this data, we + * can repurpose `classesInOutputJar` to only do 1). + */ + java.util.Set<String> classesInOutputJar(); + } diff --git a/internal/compiler-interface/src/main/java/xsbti/compile/IncrementalCompiler.java b/internal/compiler-interface/src/main/java/xsbti/compile/IncrementalCompiler.java index a00c5c69a3..a6d2417a81 100644 --- a/internal/compiler-interface/src/main/java/xsbti/compile/IncrementalCompiler.java +++ b/internal/compiler-interface/src/main/java/xsbti/compile/IncrementalCompiler.java @@ -79,6 +79,8 @@ public interface IncrementalCompiler { * the current compilation progress. * @param incrementalOptions An Instance of {@link IncOptions} that * configures the incremental compiler behaviour. + * @param temporaryClassesDirectory A directory where incremental compiler + * can put temporary class files or jars. * @param extra An array of sbt tuples with extra options. * @param logger An instance of {@link Logger} that logs Zinc output. * @@ -104,6 +106,7 @@ CompileResult compile(ScalaCompiler scalaCompiler, java.lang.Boolean skip, Optional<CompileProgress> progress, IncOptions incrementalOptions, + Optional<File> temporaryClassesDirectory, T2<String, String>[] extra, Logger logger); } diff --git a/internal/compiler-interface/src/main/java/xsbti/compile/SingleOutput.java b/internal/compiler-interface/src/main/java/xsbti/compile/SingleOutput.java index 61f5e9f198..53bd65c4a0 100755 --- a/internal/compiler-interface/src/main/java/xsbti/compile/SingleOutput.java +++ b/internal/compiler-interface/src/main/java/xsbti/compile/SingleOutput.java @@ -16,13 +16,15 @@ */ public interface SingleOutput extends Output { /** - * Return the directory where class files should be generated. + * Return the **directory or jar** where class files should be generated + * and written to. The method name is a misnamer since it can return a + * jar file when straight-to-jar compilation is enabled. * <p> - * Incremental compilation manages the class files in this directory, so - * don't play with this directory out of the Zinc API. Zinc already takes - * care of deleting classes before every compilation run. + * Incremental compilation manages the class files in this file, so don't + * play with this directory out of the Zinc API. Zinc already takes care + * of deleting classes before every compilation run. * <p> - * This directory must be exclusively used for one set of sources. + * This file or directory must be exclusively used for one set of sources. */ public File getOutputDirectory(); diff --git a/internal/compiler-interface/src/main/java/xsbti/compile/analysis/ReadStamps.java b/internal/compiler-interface/src/main/java/xsbti/compile/analysis/ReadStamps.java index 67a5af675d..412f877835 100644 --- a/internal/compiler-interface/src/main/java/xsbti/compile/analysis/ReadStamps.java +++ b/internal/compiler-interface/src/main/java/xsbti/compile/analysis/ReadStamps.java @@ -72,4 +72,5 @@ public interface ReadStamps { * @see xsbti.compile.analysis.ReadStamps#product(File) */ public Map<File, Stamp> getAllProductStamps(); + } diff --git a/internal/compiler-interface/src/test/scala/xsbti/TestCallback.scala b/internal/compiler-interface/src/test/scala/xsbti/TestCallback.scala index b2535d3286..1a97c0d16f 100644 --- a/internal/compiler-interface/src/test/scala/xsbti/TestCallback.scala +++ b/internal/compiler-interface/src/test/scala/xsbti/TestCallback.scala @@ -2,8 +2,9 @@ package xsbti import java.io.File import java.util +import java.util.Optional -import xsbti.api.{ ClassLike, DependencyContext } +import xsbti.api.{ DependencyContext, ClassLike } import scala.collection.mutable.ArrayBuffer @@ -77,6 +78,8 @@ class TestCallback extends AnalysisCallback { override def dependencyPhaseCompleted(): Unit = {} override def apiPhaseCompleted(): Unit = {} + + override def classesInOutputJar(): util.Set[String] = java.util.Collections.emptySet() } object TestCallback { diff --git a/internal/zinc-classfile/src/main/java/sbt/internal/inc/zip/ZipCentralDir.java b/internal/zinc-classfile/src/main/java/sbt/internal/inc/zip/ZipCentralDir.java new file mode 100644 index 0000000000..9362b03cc0 --- /dev/null +++ b/internal/zinc-classfile/src/main/java/sbt/internal/inc/zip/ZipCentralDir.java @@ -0,0 +1,742 @@ +/* + * Copyright (c) 2009, 2013, Oracle and/or its affiliates. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * - Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * - Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * - Neither the name of Oracle nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS + * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +/* + * This source code is provided to illustrate the usage of a given feature + * or technique and has been deliberately simplified. Additional steps + * required for a production-quality application, such as security checks, + * input validation and proper error handling, might not be present in + * this sample code. + */ + + +package sbt.internal.inc.zip; + +import java.io.IOException; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.nio.channels.SeekableByteChannel; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.*; +import java.util.zip.ZipError; +import java.util.zip.ZipException; + +import static java.nio.file.StandardOpenOption.READ; +import static sbt.internal.inc.zip.ZipConstants.*; +import static sbt.internal.inc.zip.ZipUtils.*; + +/** + * A FileSystem built on a zip file + * + * @author Xueming Shen + */ + +/** + * Modified implementation of [[com.sun.nio.zipfs.ZipFileSystem]] that allows to: + * read index (central directory), modify it and write at specified offset. + * + * The changes focus on making public whatever is required and remove what is not. + * It is possible to use unmodified ZipFileSystem to implement operations required + * for Straight to Jar but it does not work in place (has to recreate zips) and does + * not allow to disable compression that makes it not efficient enough. + */ +public class ZipCentralDir { + + private final byte[] cen; // CEN & ENDHDR + private END end; + private final SeekableByteChannel ch; + private LinkedHashMap<IndexNode, IndexNode> inodes; + private static byte[] ROOTPATH = new byte[0]; + private final IndexNode LOOKUPKEY = IndexNode.keyOf(null); + private List<Entry> elist; + private static final boolean isWindows = + System.getProperty("os.name").startsWith("Windows"); + + public ZipCentralDir(Path zfpath) throws IOException { + this.ch = Files.newByteChannel(zfpath, READ); + this.cen = initCEN(); + elist = readEntries(); + ch.close(); + } + + public long getCentralDirStart() { + return end.cenoff; + } + + public void setCentralDirStart(long value) { + end.cenoff = value; + } + + public List<Entry> getHeaders() { + return elist; + } + + public void setHeaders(List<Entry> value) { + elist = value; + } + + public void dump(OutputStream os) throws IOException { + long written = 0; + for (Entry entry : elist) { + written += entry.writeCEN(os); + } + end.centot = elist.size(); + end.cenlen = written; + end.write(os, written); + } + + private List<Entry> readEntries() throws IOException { + List<Entry> elist = new ArrayList<>(); + for (IndexNode inode : inodes.values()) { + if (inode.pos == -1) { + continue; // pseudo directory node + } + Entry e = Entry.readCEN(this, inode.pos); + elist.add(e); + } + return elist; + } + + // Reads zip file central directory. Returns the file position of first + // CEN header, otherwise returns -1 if an error occurred. If zip->msg != NULL + // then the error was a zip format error and zip->msg has the error text. + // Always pass in -1 for knownTotal; it's used for a recursive call. + private byte[] initCEN() throws IOException { + end = findEND(); + // position of first LOC header (usually 0) + long locpos; + if (end.endpos == 0) { + inodes = new LinkedHashMap<>(10); + locpos = 0; + buildNodeTree(); + return null; // only END header present + } + if (end.cenlen > end.endpos) + zerror("invalid END header (bad central directory size)"); + long cenpos = end.endpos - end.cenlen; // position of CEN table + + // Get position of first local file (LOC) header, taking into + // account that there may be a stub prefixed to the zip file. + locpos = cenpos - end.cenoff; + if (locpos < 0) + zerror("invalid END header (bad central directory offset)"); + + // read in the CEN and END + byte[] cen = new byte[(int)(end.cenlen + ENDHDR)]; + if (readFullyAt(cen, 0, cen.length, cenpos) != end.cenlen + ENDHDR) { + zerror("read CEN tables failed"); + } + // Iterate through the entries in the central directory + inodes = new LinkedHashMap<>(end.centot + 1); + int pos = 0; + int limit = cen.length - ENDHDR; + while (pos < limit) { + if (CENSIG(cen, pos) != CENSIG) + zerror("invalid CEN header (bad signature)"); + int method = CENHOW(cen, pos); + int nlen = CENNAM(cen, pos); + int elen = CENEXT(cen, pos); + int clen = CENCOM(cen, pos); + if ((CENFLG(cen, pos) & 1) != 0) + zerror("invalid CEN header (encrypted entry)"); + if (method != METHOD_STORED && method != METHOD_DEFLATED) + zerror("invalid CEN header (unsupported compression method: " + method + ")"); + if (pos + CENHDR + nlen > limit) + zerror("invalid CEN header (bad header size)"); + byte[] name = Arrays.copyOfRange(cen, pos + CENHDR, pos + CENHDR + nlen); + IndexNode inode = new IndexNode(name, pos); + inodes.put(inode, inode); + // skip ext and comment + pos += (CENHDR + nlen + elen + clen); + } + if (pos + ENDHDR != cen.length) { + zerror("invalid CEN header (bad header size)"); + } + buildNodeTree(); + return cen; + } + + private END findEND() throws IOException + { + byte[] buf = new byte[READBLOCKSZ]; + long ziplen = ch.size(); + long minHDR = (ziplen - END_MAXLEN) > 0 ? ziplen - END_MAXLEN : 0; + long minPos = minHDR - (buf.length - ENDHDR); + + for (long pos = ziplen - buf.length; pos >= minPos; pos -= (buf.length - ENDHDR)) + { + int off = 0; + if (pos < 0) { + // Pretend there are some NUL bytes before start of file + off = (int)-pos; + Arrays.fill(buf, 0, off, (byte)0); + } + int len = buf.length - off; + if (readFullyAt(buf, off, len, pos + off) != len) + zerror("zip END header not found"); + + // Now scan the block backwards for END header signature + for (int i = buf.length - ENDHDR; i >= 0; i--) { + if (buf[i+0] == (byte)'P' && + buf[i+1] == (byte)'K' && + buf[i+2] == (byte)'\005' && + buf[i+3] == (byte)'\006' && + (pos + i + ENDHDR + ENDCOM(buf, i) == ziplen)) { + // Found END header + buf = Arrays.copyOfRange(buf, i, i + ENDHDR); + END end = new END(); + end.endsub = ENDSUB(buf); + end.centot = ENDTOT(buf); + end.cenlen = ENDSIZ(buf); + end.cenoff = ENDOFF(buf); + end.comlen = ENDCOM(buf); + end.endpos = pos + i; + if (end.cenlen == ZIP64_MINVAL || + end.cenoff == ZIP64_MINVAL || + end.centot == ZIP64_MINVAL32) + { + // need to find the zip64 end; + byte[] loc64 = new byte[ZIP64_LOCHDR]; + if (readFullyAt(loc64, 0, loc64.length, end.endpos - ZIP64_LOCHDR) + != loc64.length) { + return end; + } + long end64pos = ZIP64_LOCOFF(loc64); + byte[] end64buf = new byte[ZIP64_ENDHDR]; + if (readFullyAt(end64buf, 0, end64buf.length, end64pos) + != end64buf.length) { + return end; + } + // end64 found, re-calcualte everything. + end.cenlen = ZIP64_ENDSIZ(end64buf); + end.cenoff = ZIP64_ENDOFF(end64buf); + end.centot = (int)ZIP64_ENDTOT(end64buf); // assume total < 2g + end.endpos = end64pos; + } + return end; + } + } + } + zerror("zip END header not found"); + return null; //make compiler happy + } + + // Internal node that links a "name" to its pos in cen table. + // The node itself can be used as a "key" to lookup itself in + // the HashMap inodes. + static class IndexNode { + byte[] name; + int hashcode; // node is hashable/hashed by its name + int pos = -1; // position in cen table, -1 menas the + String nameAsString; + // entry does not exists in zip file + IndexNode(byte[] name, int pos) { + setName(name); + this.pos = pos; + } + + static IndexNode keyOf(byte[] name) { // get a lookup key; + return new IndexNode(name, -1); + } + + public final void setName(byte[] name) { + this.name = name; + this.hashcode = Arrays.hashCode(name); + } + + public final String getName() { + if (nameAsString == null) { + this.nameAsString = new String(name); + } + return this.nameAsString; + } + + final IndexNode as(byte[] name) { // reuse the node, mostly + setName(name); // as a lookup "key" + return this; + } + + public boolean equals(Object other) { + if (!(other instanceof IndexNode)) { + return false; + } + return Arrays.equals(name, ((IndexNode)other).name); + } + + public int hashCode() { + return hashcode; + } + + IndexNode() {} + IndexNode sibling; + IndexNode child; // 1st child + } + + private static void zerror(String msg) { + throw new ZipError(msg); + } + + private void buildNodeTree() { + HashSet<IndexNode> dirs = new HashSet<>(); + IndexNode root = new IndexNode(ROOTPATH, -1); + inodes.put(root, root); + dirs.add(root); + for (IndexNode node : inodes.keySet().toArray(new IndexNode[0])) { + addToTree(node, dirs); + } + } + + // ZIP directory has two issues: + // (1) ZIP spec does not require the ZIP file to include + // directory entry + // (2) all entries are not stored/organized in a "tree" + // structure. + // A possible solution is to build the node tree ourself as + // implemented below. + private void addToTree(IndexNode inode, HashSet<IndexNode> dirs) { + if (dirs.contains(inode)) { + return; + } + IndexNode parent; + byte[] name = inode.name; + byte[] pname = getParent(name); + if (inodes.containsKey(LOOKUPKEY.as(pname))) { + parent = inodes.get(LOOKUPKEY); + } else { // pseudo directory entry + parent = new IndexNode(pname, -1); + inodes.put(parent, parent); + } + addToTree(parent, dirs); + inode.sibling = parent.child; + parent.child = inode; + if (name[name.length -1] == '/') + dirs.add(inode); + } + + private static byte[] getParent(byte[] path) { + int off = path.length - 1; + if (off > 0 && path[off] == '/') // isDirectory + off--; + while (off > 0 && path[off] != '/') { off--; } + if (off <= 0) + return ROOTPATH; + return Arrays.copyOf(path, off + 1); + } + + // Reads len bytes of data from the specified offset into buf. + // Returns the total number of bytes read. + // Each/every byte read from here (except the cen, which is mapped). + private long readFullyAt(byte[] buf, int off, long len, long pos) throws IOException + { + ByteBuffer bb = ByteBuffer.wrap(buf); + bb.position(off); + bb.limit((int)(off + len)); + return readFullyAt(bb, pos); + } + + private long readFullyAt(ByteBuffer bb, long pos) throws IOException + { + return ch.position(pos).read(bb); + } + + // End of central directory record + static class END { + int endsub; // endsub + int centot; // 4 bytes + long cenlen; // 4 bytes + long cenoff; // 4 bytes + int comlen; // comment length + byte[] comment; + + /* members of Zip64 end of central directory locator */ + long endpos; + + void write(OutputStream os, long offset) throws IOException { + boolean hasZip64 = false; + long xlen = cenlen; + long xoff = cenoff; + if (xlen >= ZIP64_MINVAL) { + xlen = ZIP64_MINVAL; + hasZip64 = true; + } + if (xoff >= ZIP64_MINVAL) { + xoff = ZIP64_MINVAL; + hasZip64 = true; + } + int count = centot; + if (count >= ZIP64_MINVAL32) { + count = ZIP64_MINVAL32; + hasZip64 = true; + } + if (hasZip64) { + long off64 = offset; + //zip64 end of central directory record + writeInt(os, ZIP64_ENDSIG); // zip64 END record signature + writeLong(os, ZIP64_ENDHDR - 12); // size of zip64 end + writeShort(os, 45); // version made by + writeShort(os, 45); // version needed to extract + writeInt(os, 0); // number of this disk + writeInt(os, 0); // central directory start disk + writeLong(os, centot); // number of directory entires on disk + writeLong(os, centot); // number of directory entires + writeLong(os, cenlen); // length of central directory + writeLong(os, cenoff); // offset of central directory + + //zip64 end of central directory locator + writeInt(os, ZIP64_LOCSIG); // zip64 END locator signature + writeInt(os, 0); // zip64 END start disk + writeLong(os, off64); // offset of zip64 END + writeInt(os, 1); // total number of disks (?) + } + writeInt(os, ENDSIG); // END record signature + writeShort(os, 0); // number of this disk + writeShort(os, 0); // central directory start disk + writeShort(os, count); // number of directory entries on disk + writeShort(os, count); // total number of directory entries + writeInt(os, xlen); // length of central directory + writeInt(os, xoff); // offset of central directory + if (comment != null) { // zip file comment + writeShort(os, comment.length); + writeBytes(os, comment); + } else { + writeShort(os, 0); + } + } + } + + public static class Entry extends IndexNode { + + // entry attributes + int version; + int flag; + int method = -1; // compression method + long mtime = -1; // last modification time (in DOS time) + long atime = -1; // last access time + long ctime = -1; // create time + long crc = -1; // crc-32 of entry data + long csize = -1; // compressed size of entry data + long size = -1; // uncompressed size of entry data + byte[] extra; + + // cen + int versionMade; + int disk; + int attrs; + long attrsEx; + long locoff; + byte[] comment; + + Entry() {} + + public final long getLastModifiedTime() { + return mtime; + } + + public final long getEntryOffset() { + return locoff; + } + + public final void setEntryOffset(long value) { + this.locoff = value; + } + + int version() throws ZipException { + if (method == METHOD_DEFLATED) + return 20; + else if (method == METHOD_STORED) + return 10; + throw new ZipException("unsupported compression method"); + } + + ///////////////////// CEN ////////////////////// + static Entry readCEN(ZipCentralDir zipfs, int pos) + throws IOException + { + return new Entry().cen(zipfs, pos); + } + + private Entry cen(ZipCentralDir zipfs, int pos) + throws IOException + { + byte[] cen = zipfs.cen; + if (CENSIG(cen, pos) != CENSIG) + zerror("invalid CEN header (bad signature)"); + versionMade = CENVEM(cen, pos); + version = CENVER(cen, pos); + flag = CENFLG(cen, pos); + method = CENHOW(cen, pos); + mtime = dosToJavaTime(CENTIM(cen, pos)); + crc = CENCRC(cen, pos); + csize = CENSIZ(cen, pos); + size = CENLEN(cen, pos); + int nlen = CENNAM(cen, pos); + int elen = CENEXT(cen, pos); + int clen = CENCOM(cen, pos); + disk = CENDSK(cen, pos); + attrs = CENATT(cen, pos); + attrsEx = CENATX(cen, pos); + locoff = CENOFF(cen, pos); + + pos += CENHDR; + setName(Arrays.copyOfRange(cen, pos, pos + nlen)); + + pos += nlen; + if (elen > 0) { + extra = Arrays.copyOfRange(cen, pos, pos + elen); + pos += elen; + readExtra(zipfs); + } + if (clen > 0) { + comment = Arrays.copyOfRange(cen, pos, pos + clen); + } + return this; + } + + int writeCEN(OutputStream os) throws IOException + { + int version0 = version(); + long csize0 = csize; + long size0 = size; + long locoff0 = locoff; + int elen64 = 0; // extra for ZIP64 + int elenNTFS = 0; // extra for NTFS (a/c/mtime) + int elenEXTT = 0; // extra for Extended Timestamp + boolean foundExtraTime = false; // if time stamp NTFS, EXTT present + + // confirm size/length + int nlen = (name != null) ? name.length : 0; + int elen = (extra != null) ? extra.length : 0; + int eoff = 0; + int clen = (comment != null) ? comment.length : 0; + if (csize >= ZIP64_MINVAL) { + csize0 = ZIP64_MINVAL; + elen64 += 8; // csize(8) + } + if (size >= ZIP64_MINVAL) { + size0 = ZIP64_MINVAL; // size(8) + elen64 += 8; + } + if (locoff >= ZIP64_MINVAL) { + locoff0 = ZIP64_MINVAL; + elen64 += 8; // offset(8) + } + if (elen64 != 0) { + elen64 += 4; // header and data sz 4 bytes + } + while (eoff + 4 < elen) { + int tag = SH(extra, eoff); + int sz = SH(extra, eoff + 2); + if (tag == EXTID_EXTT || tag == EXTID_NTFS) { + foundExtraTime = true; + } + eoff += (4 + sz); + } + if (!foundExtraTime) { + if (isWindows) { // use NTFS + elenNTFS = 36; // total 36 bytes + } else { // Extended Timestamp otherwise + elenEXTT = 9; // only mtime in cen + } + } + writeInt(os, CENSIG); // CEN header signature + if (elen64 != 0) { + writeShort(os, 45); // ver 4.5 for zip64 + writeShort(os, 45); + } else { + writeShort(os, version0); // version made by + writeShort(os, version0); // version needed to extract + } + writeShort(os, flag); // general purpose bit flag + writeShort(os, method); // compression method + // last modification time + writeInt(os, (int)javaToDosTime(mtime)); + writeInt(os, crc); // crc-32 + writeInt(os, csize0); // compressed size + writeInt(os, size0); // uncompressed size + writeShort(os, name.length); + writeShort(os, elen + elen64 + elenNTFS + elenEXTT); + + if (comment != null) { + writeShort(os, Math.min(clen, 0xffff)); + } else { + writeShort(os, 0); + } + writeShort(os, 0); // starting disk number + writeShort(os, 0); // internal file attributes (unused) + writeInt(os, 0); // external file attributes (unused) + writeInt(os, locoff0); // relative offset of local header + writeBytes(os, name); + if (elen64 != 0) { + writeShort(os, EXTID_ZIP64);// Zip64 extra + writeShort(os, elen64 - 4); // size of "this" extra block + if (size0 == ZIP64_MINVAL) + writeLong(os, size); + if (csize0 == ZIP64_MINVAL) + writeLong(os, csize); + if (locoff0 == ZIP64_MINVAL) + writeLong(os, locoff); + } + if (elenNTFS != 0) { + writeShort(os, EXTID_NTFS); + writeShort(os, elenNTFS - 4); + writeInt(os, 0); // reserved + writeShort(os, 0x0001); // NTFS attr tag + writeShort(os, 24); + writeLong(os, javaToWinTime(mtime)); + writeLong(os, javaToWinTime(atime)); + writeLong(os, javaToWinTime(ctime)); + } + if (elenEXTT != 0) { + writeShort(os, EXTID_EXTT); + writeShort(os, elenEXTT - 4); + if (ctime == -1) + os.write(0x3); // mtime and atime + else + os.write(0x7); // mtime, atime and ctime + writeInt(os, javaToUnixTime(mtime)); + } + if (extra != null) // whatever not recognized + writeBytes(os, extra); + if (comment != null) //TBD: 0, Math.min(commentBytes.length, 0xffff)); + writeBytes(os, comment); + return CENHDR + nlen + elen + clen + elen64 + elenNTFS + elenEXTT; + } + + ///////////////////// LOC ////////////////////// + // read NTFS, UNIX and ZIP64 data from cen.extra + void readExtra(ZipCentralDir zipfs) throws IOException { + if (extra == null) + return; + int elen = extra.length; + int off = 0; + int newOff = 0; + while (off + 4 < elen) { + // extra spec: HeaderID+DataSize+Data + int pos = off; + int tag = SH(extra, pos); + int sz = SH(extra, pos + 2); + pos += 4; + if (pos + sz > elen) // invalid data + break; + switch (tag) { + case EXTID_ZIP64 : + if (size == ZIP64_MINVAL) { + if (pos + 8 > elen) // invalid zip64 extra + break; // fields, just skip + size = LL(extra, pos); + pos += 8; + } + if (csize == ZIP64_MINVAL) { + if (pos + 8 > elen) + break; + csize = LL(extra, pos); + pos += 8; + } + if (locoff == ZIP64_MINVAL) { + if (pos + 8 > elen) + break; + locoff = LL(extra, pos); + pos += 8; + } + break; + case EXTID_NTFS: + if (sz < 32) + break; + pos += 4; // reserved 4 bytes + if (SH(extra, pos) != 0x0001) + break; + if (SH(extra, pos + 2) != 24) + break; + // override the loc field, datatime here is + // more "accurate" + mtime = winToJavaTime(LL(extra, pos + 4)); + atime = winToJavaTime(LL(extra, pos + 12)); + ctime = winToJavaTime(LL(extra, pos + 20)); + break; + case EXTID_EXTT: + // spec says the Extened timestamp in cen only has mtime + // need to read the loc to get the extra a/ctime + byte[] buf = new byte[LOCHDR]; + if (zipfs.readFullyAt(buf, 0, buf.length , locoff) + != buf.length) + throw new ZipException("loc: reading failed"); + if (LOCSIG(buf) != LOCSIG) + throw new ZipException("loc: wrong sig ->" + + Long.toString(LOCSIG(buf), 16)); + + int locElen = LOCEXT(buf); + if (locElen < 9) // EXTT is at lease 9 bytes + break; + int locNlen = LOCNAM(buf); + buf = new byte[locElen]; + if (zipfs.readFullyAt(buf, 0, buf.length , locoff + LOCHDR + locNlen) + != buf.length) + throw new ZipException("loc extra: reading failed"); + int locPos = 0; + while (locPos + 4 < buf.length) { + int locTag = SH(buf, locPos); + int locSZ = SH(buf, locPos + 2); + locPos += 4; + if (locTag != EXTID_EXTT) { + locPos += locSZ; + continue; + } + int flag = CH(buf, locPos++); + if ((flag & 0x1) != 0) { + mtime = unixToJavaTime(LG(buf, locPos)); + locPos += 4; + } + if ((flag & 0x2) != 0) { + atime = unixToJavaTime(LG(buf, locPos)); + locPos += 4; + } + if ((flag & 0x4) != 0) { + ctime = unixToJavaTime(LG(buf, locPos)); + locPos += 4; + } + break; + } + break; + default: // unknown tag + System.arraycopy(extra, off, extra, newOff, sz + 4); + newOff += (sz + 4); + } + off += (sz + 4); + } + if (newOff != 0 && newOff != extra.length) + extra = Arrays.copyOf(extra, newOff); + else + extra = null; + } + } + +} diff --git a/internal/zinc-classfile/src/main/java/sbt/internal/inc/zip/ZipConstants.java b/internal/zinc-classfile/src/main/java/sbt/internal/inc/zip/ZipConstants.java new file mode 100644 index 0000000000..abfd6d072b --- /dev/null +++ b/internal/zinc-classfile/src/main/java/sbt/internal/inc/zip/ZipConstants.java @@ -0,0 +1,153 @@ +/* + * Copyright (c) 2009, 2012, Oracle and/or its affiliates. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * - Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * - Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * - Neither the name of Oracle nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS + * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +/* + * This source code is provided to illustrate the usage of a given feature + * or technique and has been deliberately simplified. Additional steps + * required for a production-quality application, such as security checks, + * input validation and proper error handling, might not be present in + * this sample code. + */ + + +package sbt.internal.inc.zip; + + +/** + * + * @author Xueming Shen + */ + +class ZipConstants { + /* + * Compression methods + */ + static final int METHOD_STORED = 0; + static final int METHOD_DEFLATED = 8; + + /* + * Header signatures + */ + static long LOCSIG = 0x04034b50L; // "PK\003\004" + static long CENSIG = 0x02014b50L; // "PK\001\002" + static long ENDSIG = 0x06054b50L; // "PK\005\006" + + /* + * Header sizes in bytes (including signatures) + */ + static final int LOCHDR = 30; // LOC header size + static final int CENHDR = 46; // CEN header size + static final int ENDHDR = 22; // END header size + + /* + * ZIP64 constants + */ + static final long ZIP64_ENDSIG = 0x06064b50L; // "PK\006\006" + static final long ZIP64_LOCSIG = 0x07064b50L; // "PK\006\007" + static final int ZIP64_ENDHDR = 56; // ZIP64 end header size + static final int ZIP64_LOCHDR = 20; // ZIP64 end loc header size + + static final int ZIP64_MINVAL32 = 0xFFFF; + static final long ZIP64_MINVAL = 0xFFFFFFFFL; + + /* + * Extra field header ID + */ + static final int EXTID_ZIP64 = 0x0001; // ZIP64 + static final int EXTID_NTFS = 0x000a; // NTFS + static final int EXTID_EXTT = 0x5455; // Info-ZIP Extended Timestamp + + /* + * fields access methods + */ + /////////////////////////////////////////////////////// + static final int CH(byte[] b, int n) { + return Byte.toUnsignedInt(b[n]); + } + + static final int SH(byte[] b, int n) { + return Byte.toUnsignedInt(b[n]) | (Byte.toUnsignedInt(b[n + 1]) << 8); + } + + static final long LG(byte[] b, int n) { + return ((SH(b, n)) | (SH(b, n + 2) << 16)) & 0xffffffffL; + } + + static final long LL(byte[] b, int n) { + return (LG(b, n)) | (LG(b, n + 4) << 32); + } + + static final long GETSIG(byte[] b) { + return LG(b, 0); + } + + // local file (LOC) header fields + static final long LOCSIG(byte[] b) { return LG(b, 0); } // signature + static final int LOCNAM(byte[] b) { return SH(b, 26);} // filename length + static final int LOCEXT(byte[] b) { return SH(b, 28);} // extra field length + + // end of central directory header (END) fields + static final int ENDSUB(byte[] b) { return SH(b, 8); } // number of entries on this disk + static final int ENDTOT(byte[] b) { return SH(b, 10);} // total number of entries + static final long ENDSIZ(byte[] b) { return LG(b, 12);} // central directory size + static final long ENDOFF(byte[] b) { return LG(b, 16);} // central directory offset + static final int ENDCOM(byte[] b) { return SH(b, 20);} // size of zip file comment + static final int ENDCOM(byte[] b, int off) { return SH(b, off + 20);} + + // zip64 end of central directory recoder fields + static final long ZIP64_ENDTOD(byte[] b) { return LL(b, 24);} // total number of entries on disk + static final long ZIP64_ENDTOT(byte[] b) { return LL(b, 32);} // total number of entries + static final long ZIP64_ENDSIZ(byte[] b) { return LL(b, 40);} // central directory size + static final long ZIP64_ENDOFF(byte[] b) { return LL(b, 48);} // central directory offset + static final long ZIP64_LOCOFF(byte[] b) { return LL(b, 8);} // zip64 end offset + + // central directory header (CEN) fields + static final long CENSIG(byte[] b, int pos) { return LG(b, pos + 0); } + static final int CENVEM(byte[] b, int pos) { return SH(b, pos + 4); } + static final int CENVER(byte[] b, int pos) { return SH(b, pos + 6); } + static final int CENFLG(byte[] b, int pos) { return SH(b, pos + 8); } + static final int CENHOW(byte[] b, int pos) { return SH(b, pos + 10);} + static final long CENTIM(byte[] b, int pos) { return LG(b, pos + 12);} + static final long CENCRC(byte[] b, int pos) { return LG(b, pos + 16);} + static final long CENSIZ(byte[] b, int pos) { return LG(b, pos + 20);} + static final long CENLEN(byte[] b, int pos) { return LG(b, pos + 24);} + static final int CENNAM(byte[] b, int pos) { return SH(b, pos + 28);} + static final int CENEXT(byte[] b, int pos) { return SH(b, pos + 30);} + static final int CENCOM(byte[] b, int pos) { return SH(b, pos + 32);} + static final int CENDSK(byte[] b, int pos) { return SH(b, pos + 34);} + static final int CENATT(byte[] b, int pos) { return SH(b, pos + 36);} + static final long CENATX(byte[] b, int pos) { return LG(b, pos + 38);} + static final long CENOFF(byte[] b, int pos) { return LG(b, pos + 42);} + + /* The END header is followed by a variable length comment of size < 64k. */ + static final long END_MAXLEN = 0xFFFF + ENDHDR; + static final int READBLOCKSZ = 128; +} diff --git a/internal/zinc-classfile/src/main/java/sbt/internal/inc/zip/ZipUtils.java b/internal/zinc-classfile/src/main/java/sbt/internal/inc/zip/ZipUtils.java new file mode 100644 index 0000000000..ffc6c30c27 --- /dev/null +++ b/internal/zinc-classfile/src/main/java/sbt/internal/inc/zip/ZipUtils.java @@ -0,0 +1,144 @@ +/* + * Copyright (c) 2009, 2011, Oracle and/or its affiliates. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * - Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * - Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * - Neither the name of Oracle nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS + * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +/* + * This source code is provided to illustrate the usage of a given feature + * or technique and has been deliberately simplified. Additional steps + * required for a production-quality application, such as security checks, + * input validation and proper error handling, might not be present in + * this sample code. + */ + + +package sbt.internal.inc.zip; + +import java.io.IOException; +import java.io.OutputStream; +import java.util.Date; +import java.util.concurrent.TimeUnit; + +/** + * + * @author Xueming Shen + */ + +class ZipUtils { + + /* + * Writes a 16-bit short to the output stream in little-endian byte order. + */ + static void writeShort(OutputStream os, int v) throws IOException { + os.write(v & 0xff); + os.write((v >>> 8) & 0xff); + } + + /* + * Writes a 32-bit int to the output stream in little-endian byte order. + */ + static void writeInt(OutputStream os, long v) throws IOException { + os.write((int)(v & 0xff)); + os.write((int)((v >>> 8) & 0xff)); + os.write((int)((v >>> 16) & 0xff)); + os.write((int)((v >>> 24) & 0xff)); + } + + /* + * Writes a 64-bit int to the output stream in little-endian byte order. + */ + static void writeLong(OutputStream os, long v) throws IOException { + os.write((int)(v & 0xff)); + os.write((int)((v >>> 8) & 0xff)); + os.write((int)((v >>> 16) & 0xff)); + os.write((int)((v >>> 24) & 0xff)); + os.write((int)((v >>> 32) & 0xff)); + os.write((int)((v >>> 40) & 0xff)); + os.write((int)((v >>> 48) & 0xff)); + os.write((int)((v >>> 56) & 0xff)); + } + + /* + * Writes an array of bytes to the output stream. + */ + static void writeBytes(OutputStream os, byte[] b) + throws IOException + { + os.write(b, 0, b.length); + } + + /* + * Converts DOS time to Java time (number of milliseconds since epoch). + */ + static long dosToJavaTime(long dtime) { + Date d = new Date((int)(((dtime >> 25) & 0x7f) + 80), + (int)(((dtime >> 21) & 0x0f) - 1), + (int)((dtime >> 16) & 0x1f), + (int)((dtime >> 11) & 0x1f), + (int)((dtime >> 5) & 0x3f), + (int)((dtime << 1) & 0x3e)); + return d.getTime(); + } + + /* + * Converts Java time to DOS time. + */ + static long javaToDosTime(long time) { + Date d = new Date(time); + int year = d.getYear() + 1900; + if (year < 1980) { + return (1 << 21) | (1 << 16); + } + return (year - 1980) << 25 | (d.getMonth() + 1) << 21 | + d.getDate() << 16 | d.getHours() << 11 | d.getMinutes() << 5 | + d.getSeconds() >> 1; + } + + + // used to adjust values between Windows and java epoch + private static final long WINDOWS_EPOCH_IN_MICROSECONDS = -11644473600000000L; + static long winToJavaTime(long wtime) { + return TimeUnit.MILLISECONDS.convert( + wtime / 10 + WINDOWS_EPOCH_IN_MICROSECONDS, TimeUnit.MICROSECONDS); + } + + static long javaToWinTime(long time) { + return (TimeUnit.MICROSECONDS.convert(time, TimeUnit.MILLISECONDS) + - WINDOWS_EPOCH_IN_MICROSECONDS) * 10; + } + + static long unixToJavaTime(long utime) { + return TimeUnit.MILLISECONDS.convert(utime, TimeUnit.SECONDS); + } + + static long javaToUnixTime(long time) { + return TimeUnit.SECONDS.convert(time, TimeUnit.MILLISECONDS); + } + +} diff --git a/internal/zinc-classfile/src/main/scala/sbt/internal/inc/IndexBasedZipFsOps.scala b/internal/zinc-classfile/src/main/scala/sbt/internal/inc/IndexBasedZipFsOps.scala new file mode 100644 index 0000000000..23e068c4a1 --- /dev/null +++ b/internal/zinc-classfile/src/main/scala/sbt/internal/inc/IndexBasedZipFsOps.scala @@ -0,0 +1,57 @@ +package sbt.internal.inc + +import java.io.OutputStream +import java.nio.file.Path + +import sbt.internal.inc.zip.ZipCentralDir + +import scala.collection.JavaConverters._ + +/** + * The concrete implementation of [[sbt.internal.inc.IndexBasedZipOps]] + * based on [[sbt.internal.inc.zip.ZipCentralDir]]. + */ +object IndexBasedZipFsOps extends IndexBasedZipOps { + override type CentralDir = ZipCentralDir + override type Header = ZipCentralDir.Entry + + override protected def readCentralDir(path: Path): CentralDir = { + new ZipCentralDir(path) + } + + override protected def getCentralDirStart(centralDir: CentralDir): Long = { + centralDir.getCentralDirStart + } + + override protected def setCentralDirStart(centralDir: CentralDir, centralDirStart: Long): Unit = { + centralDir.setCentralDirStart(centralDirStart) + } + + override protected def getHeaders(centralDir: CentralDir): Seq[Header] = { + centralDir.getHeaders.asScala + } + override protected def setHeaders(centralDir: CentralDir, headers: Seq[Header]): Unit = { + centralDir.setHeaders(new java.util.ArrayList[Header](headers.asJava)) + } + + override protected def getFileName(header: Header): String = { + header.getName + } + + override protected def getFileOffset(header: Header): Long = { + header.getEntryOffset + } + + override protected def setFileOffset(header: Header, offset: Long): Unit = { + header.setEntryOffset(offset) + } + + override protected def getLastModifiedTime(header: Header): Long = { + header.getLastModifiedTime + } + + override protected def writeCentralDir(centralDir: CentralDir, + outputStream: OutputStream): Unit = { + centralDir.dump(outputStream) + } +} diff --git a/internal/zinc-classfile/src/main/scala/sbt/internal/inc/IndexBasedZipOps.scala b/internal/zinc-classfile/src/main/scala/sbt/internal/inc/IndexBasedZipOps.scala new file mode 100644 index 0000000000..573e2dbafb --- /dev/null +++ b/internal/zinc-classfile/src/main/scala/sbt/internal/inc/IndexBasedZipOps.scala @@ -0,0 +1,307 @@ +package sbt.internal.inc + +import java.nio.channels.{ FileChannel, Channels, ReadableByteChannel } +import java.io._ +import java.nio.file.{ Files, Path } +import java.util.UUID +import java.util.zip.{ Deflater, ZipOutputStream, ZipEntry } + +import sbt.io.{ IO, Using } + +/** + * Provides efficient implementation of operations on zip files * that are + * used for implementation of the Straight to Jar feature. + * + * The implementation is based on index (aka central directory) that is + * located at the end of the zip file and contains among others the name/path + * and offset where the actual data of stored file is located. Reading zips + * should always be done based on that index, which means that it is often enough + * to manipulate this index without rewriting the other part of the file. + * This class heavily relies on this fact. + * + * This class abstracts over the actual operations on index i.e. reading, manipulating + * and storing it making it easy to replace. + */ +abstract class IndexBasedZipOps extends CreateZip { + + /** + * Reads timestamps of zip entries. On the first access to a given zip + * it reads the timestamps once and keeps them cached for future lookups. + * + * It only supports reading stamps from a single zip. The zip passed as + * an argument is only used to initialize the cache and is later ignored. + * This is enough as stamps are only read from the output jar. + */ + final class CachedStamps(zip: File) { + private val cachedNameToTimestamp: Map[String, Long] = initializeCache(zip) + + def getStamp(entry: String): Long = { + cachedNameToTimestamp.getOrElse(entry, 0) + } + + private def initializeCache(zipFile: File): Map[String, Long] = { + if (zipFile.exists()) { + val centralDir = readCentralDir(zipFile.toPath) + val headers = getHeaders(centralDir) + headers.map(header => getFileName(header) -> getLastModifiedTime(header))( + collection.breakOut) + } else { + Map.empty + } + } + } + + /** + * Removes specified entries from given zip file by replacing current index + * with a version without those entries. + * @param zipFile the zip file to remove entries from + * @param entries paths to files inside the jar e.g. sbt/internal/inc/IndexBasedZipOps.class + */ + def removeEntries(zipFile: File, entries: Iterable[String]): Unit = { + removeEntries(zipFile.toPath, entries.toSet) + } + + /** + * Merges two zip files. It works by appending contents of `from` + * to `into`. Indices are combined, in case of duplicates, the + * final entries that are used are from `from`. + * The final merged zip is available under `into` path, and `from` + * is deleted. + * + * @param into the target zip file to merge to + * @param from the source zip file that is added/merged to `into` + */ + def mergeArchives(into: File, from: File): Unit = { + mergeArchives(into.toPath, from.toPath) + } + + /** + * Adds `files` (plain files) to the specified zip file. Implemented by creating + * a new zip with the plain files. If `zipFile` already exists, the archives will + * be merged. + * Plain files are not removed after this operation. + * + * @param zipFile A zip file to add files to + * @param files a sequence of tuples with actual file to include and the path in + * the zip where it should be put. + */ + def includeInArchive(zipFile: File, files: Seq[(File, String)]): Unit = { + if (zipFile.exists()) { + val tempZip = zipFile.toPath.resolveSibling(s"${UUID.randomUUID()}.jar").toFile + createZip(tempZip, files) + mergeArchives(zipFile, tempZip) + } else { + createZip(zipFile, files) + } + } + + /** + * Reads the current index from given zip file + * + * @param zipFile path to the zip file + * @return current index + */ + def readCentralDir(zipFile: File): CentralDir = { + readCentralDir(zipFile.toPath) + } + + /** + * Replaces index inside the zip file. + * + * @param zipFile the zip file that should have the index updated + * @param centralDir the index to be stored in the file + */ + def writeCentralDir(zipFile: File, centralDir: CentralDir): Unit = { + writeCentralDir(zipFile.toPath, centralDir) + } + + def listEntries(zipFile: File): Seq[String] = { + val centralDir = readCentralDir(zipFile) + val headers = getHeaders(centralDir) + headers.map(getFileName) + } + + /** + * Represents the central directory (index) of a zip file. It must contain the start offset + * (where it is located in the zip file) and list of headers + */ + type CentralDir + + /** + * Represents a header of a zip entry located inside the central directory. It has to contain + * the timestamp, name/path and offset to the actual data in zip file. + */ + type Header + + private def writeCentralDir(path: Path, newCentralDir: CentralDir): Unit = { + val currentCentralDir = readCentralDir(path) + val currentCentralDirStart = truncateCentralDir(currentCentralDir, path) + finalizeZip(newCentralDir, path, currentCentralDirStart) + } + + private def removeEntries(path: Path, toRemove: Set[String]): Unit = { + val centralDir = readCentralDir(path) + removeEntriesFromCentralDir(centralDir, toRemove) + val writeOffset = truncateCentralDir(centralDir, path) + finalizeZip(centralDir, path, writeOffset) + } + + private def removeEntriesFromCentralDir(centralDir: CentralDir, toRemove: Set[String]): Unit = { + val headers = getHeaders(centralDir) + val clearedHeaders = headers.filterNot(header => toRemove.contains(getFileName(header))) + setHeaders(centralDir, clearedHeaders) + } + + private def mergeArchives(target: Path, source: Path): Unit = { + val targetCentralDir = readCentralDir(target) + val sourceCentralDir = readCentralDir(source) + + // "source" will start where "target" ends + val sourceStart = truncateCentralDir(targetCentralDir, target) + // "source" data (files) is as long as from its beginning till the start of central dir + val sourceLength = getCentralDirStart(sourceCentralDir) + + transferAll(source, target, startPos = sourceStart, bytesToTransfer = sourceLength) + + val mergedHeaders = mergeHeaders(targetCentralDir, sourceCentralDir, sourceStart) + setHeaders(targetCentralDir, mergedHeaders) + + val centralDirStart = sourceStart + sourceLength + finalizeZip(targetCentralDir, target, centralDirStart) + + Files.delete(source) + } + + private def mergeHeaders( + targetCentralDir: CentralDir, + sourceCentralDir: CentralDir, + sourceStart: Long + ): Seq[Header] = { + val sourceHeaders = getHeaders(sourceCentralDir) + sourceHeaders.foreach { header => + // potentially offsets should be updated for each header + // not only in central directory but a valid zip tool + // should not rely on that unless the file is corrupted + val currentOffset = getFileOffset(header) + val newOffset = currentOffset + sourceStart + setFileOffset(header, newOffset) + } + + // override files from target with files from source + val sourceNames = sourceHeaders.map(getFileName).toSet + val targetHeaders = + getHeaders(targetCentralDir).filterNot(h => sourceNames.contains(getFileName(h))) + + targetHeaders ++ sourceHeaders + } + + private def truncateCentralDir(centralDir: CentralDir, path: Path): Long = { + val sizeAfterTruncate = getCentralDirStart(centralDir) + new FileOutputStream(path.toFile, true).getChannel + .truncate(sizeAfterTruncate) + .close() + sizeAfterTruncate + } + + private def finalizeZip( + centralDir: CentralDir, + path: Path, + centralDirStart: Long + ): Unit = { + setCentralDirStart(centralDir, centralDirStart) + val fileOutputStream = new FileOutputStream(path.toFile, /*append =*/ true) + fileOutputStream.getChannel.position(centralDirStart) + val outputStream = new BufferedOutputStream(fileOutputStream) + writeCentralDir(centralDir, outputStream) + outputStream.close() + } + + private def transferAll( + source: Path, + target: Path, + startPos: Long, + bytesToTransfer: Long + ): Unit = { + val sourceFile = openFileForReading(source) + val targetFile = openFileForWriting(target) + var remaining = bytesToTransfer + var offset = startPos + while (remaining > 0) { + val transferred = + targetFile.transferFrom(sourceFile, /*position =*/ offset, /*count = */ remaining) + offset += transferred + remaining -= transferred + } + sourceFile.close() + targetFile.close() + } + + private def openFileForReading(path: Path): ReadableByteChannel = { + Channels.newChannel(new BufferedInputStream(Files.newInputStream(path))) + } + + private def openFileForWriting(path: Path): FileChannel = { + new FileOutputStream(path.toFile, /*append = */ true).getChannel + } + + protected def readCentralDir(path: Path): CentralDir + + protected def getCentralDirStart(centralDir: CentralDir): Long + protected def setCentralDirStart(centralDir: CentralDir, centralDirStart: Long): Unit + + protected def getHeaders(centralDir: CentralDir): Seq[Header] + protected def setHeaders(centralDir: CentralDir, headers: Seq[Header]): Unit + + protected def getFileName(header: Header): String + + protected def getFileOffset(header: Header): Long + protected def setFileOffset(header: Header, offset: Long): Unit + protected def getLastModifiedTime(header: Header): Long + + protected def writeCentralDir(centralDir: CentralDir, outputStream: OutputStream): Unit + +} + +// Adapted from sbt.io.IO.zip - disabled compression and simplified +sealed trait CreateZip { + + def createZip(target: File, files: Seq[(File, String)]): Unit = { + IO.createDirectory(target.getParentFile) + withZipOutput(target) { output => + writeZip(files, output) + } + } + + private def withZipOutput(file: File)(f: ZipOutputStream => Unit): Unit = { + Using.fileOutputStream()(file) { fileOut => + val zipOut = new ZipOutputStream(fileOut) + zipOut.setMethod(ZipOutputStream.DEFLATED) + zipOut.setLevel(Deflater.NO_COMPRESSION) + try { f(zipOut) } finally { zipOut.close() } + } + } + + private def writeZip(files: Seq[(File, String)], output: ZipOutputStream): Unit = { + val now = System.currentTimeMillis() + + def makeFileEntry(name: String): ZipEntry = { + val entry = new ZipEntry(name) + entry.setTime(now) + entry + } + + def addFileEntry(file: File, name: String): Unit = { + output.putNextEntry(makeFileEntry(name)) + IO.transfer(file, output) + output.closeEntry() + } + + files.foreach { case (file, name) => addFileEntry(file, normalizeName(name)) } + } + + private def normalizeName(name: String): String = { + val sep = File.separatorChar + if (sep == '/') name else name.replace(sep, '/') + } + +} diff --git a/internal/zinc-classfile/src/main/scala/sbt/internal/inc/JarUtils.scala b/internal/zinc-classfile/src/main/scala/sbt/internal/inc/JarUtils.scala new file mode 100644 index 0000000000..6f5dbec15c --- /dev/null +++ b/internal/zinc-classfile/src/main/scala/sbt/internal/inc/JarUtils.scala @@ -0,0 +1,418 @@ +package sbt.internal.inc + +import sbt.io.IO +import java.util.zip.ZipFile +import java.io.File +import java.util.UUID + +import sbt.io.syntax.URL +import xsbti.compile.{ Output, SingleOutput } + +/** + * This is a utility class that provides a set of functions that + * are used to implement straight to jar compilation. + * + * [[xsbt.JarUtils]] is a class that has similar purpose and + * duplicates some of the code, as it is difficult to share it. + */ +object JarUtils { + + /** Represents a path to a class file located inside a jar, relative to this jar */ + type ClassFilePath = String + + /** `ClassInJar` is an identifier for a class located inside a jar. + * For plain class files it is enough to simply use the actual file + * system path. A class in a jar is identified as a path to the jar + * and path to the class within that jar (`RelClass`). Those two values + * are held in one string separated by `!`. Slashes in both + * paths are consistent with `File.separatorChar` as the actual + * string is usually kept in `File` object. + * + * As an example: given a jar file "C:\develop\zinc\target\output.jar" + * and a relative path to the class "sbt/internal/inc/Compile.class" + * The resulting identifier would be: + * "C:\develop\zinc\target\output.jar!sbt\internal\inc\Compile.class" + */ + class ClassInJar(override val toString: String) extends AnyVal { + def toClassFilePath: ClassFilePath = splitJarReference._2 + def splitJarReference: (File, ClassFilePath) = { + val Array(jar, cls) = toString.split("!") + // ClassInJar stores RelClass part with File.separatorChar, however actual paths in zips always use '/' + val classFilePath = cls.replace('\\', '/') + (new File(jar), classFilePath) + } + + /** + * Wraps the string value inside a [[java.io.File]] object. + * File is needed to e.g. be compatible with [[xsbti.compile.analysis.ReadStamps]] interface. + */ + def toFile: File = new File(toString) + + } + + object ClassInJar { + + private val forwardSlash = File.separatorChar == '/' + + /** + * The base constructor for `ClassInJar` + * + * @param jar the jar file + * @param cls the relative path to class within the jar + * @return a proper ClassInJar identified by given jar and path to class + */ + def apply(jar: File, cls: ClassFilePath): ClassInJar = { + // This identifier will be stored as a java.io.File. Its constructor will normalize slashes + // which means that the identifier to be consistent should at all points have consistent + // slashes for safe comparisons, especially in sets or maps. + val classFilePath = if (forwardSlash) cls else cls.replace('/', File.separatorChar) + new ClassInJar(s"$jar!$classFilePath") + } + + /** + * Converts an URL to a class in a jar to `ClassInJar`. The method is rather trivial + * as it also takes precomputed path to the jar that it logically should extract itself. + * However as it is computed at the callsite anyway, to avoid recomputation it is passed + * as a parameter/ + * + * As an example, given a URL: + * "jar:file:///C:/develop/zinc/target/output.jar!/sbt/internal/inc/Compile.class" + * and a file: "C:\develop\zinc\target\output.jar" + * it will create a `ClassInJar` represented as: + * "C:\develop\zinc\target\output.jar!sbt\internal\inc\Compile.class" + * + * @param url url to a class inside a jar + * @param jar a jar file where the class is located in + * @return the class inside a jar represented as `ClassInJar` + */ + def fromURL(url: URL, jar: File): ClassInJar = { + val Array(_, cls) = url.getPath.split("!/") + apply(jar, cls) + } + + /** Initialized `ClassInJar` based on its serialized value stored inside a file */ + def fromFile(f: File): ClassInJar = new ClassInJar(f.toString) + } + + /** + * Options that have to be specified when running scalac in order + * for Straight to Jar to work properly. + * + * -YdisableFlatCpCaching is needed to disable caching the output jar + * that changes between compilation runs (incremental compilation cycles). + * Caching may hide those changes and lead into incorrect results. + */ + val scalacOptions = Set("-YdisableFlatCpCaching") + + /** + * Options that have to be specified when running javac in order + * for Straight to Jar to work properly. + * + * -XDuseOptimizedZip=false holds jars open that causes problems + * with locks on Windows. + */ + val javacOptions = Set("-XDuseOptimizedZip=false") + + /** Reads current index of a jar file to allow restoring it later with `unstashIndex` */ + def stashIndex(jar: File): IndexBasedZipFsOps.CentralDir = { + IndexBasedZipFsOps.readCentralDir(jar) + } + + /** Replaces index in given jar file with specified one */ + def unstashIndex(jar: File, index: IndexBasedZipFsOps.CentralDir): Unit = { + IndexBasedZipFsOps.writeCentralDir(jar, index) + } + + /** + * Adds plain files to specified jar file. See [[sbt.internal.inc.IndexBasedZipOps#includeInArchive]] for details. + */ + def includeInJar(jar: File, files: Seq[(File, ClassFilePath)]): Unit = { + IndexBasedZipFsOps.includeInArchive(jar, files) + } + + /** + * Merges contents of two jars. See [[sbt.internal.inc.IndexBasedZipOps#mergeArchives]] for details. + */ + def mergeJars(into: File, from: File): Unit = { + IndexBasedZipFsOps.mergeArchives(into, from) + } + + /** Lists class file entries in jar e.g. sbt/internal/inc/JarUtils.class */ + def listClassFiles(jar: File): Seq[String] = { + IndexBasedZipFsOps.listEntries(jar).filter(_.endsWith(".class")) + } + + /** + * Removes specified entries from a jar file. + */ + def removeFromJar(jarFile: File, classes: Iterable[ClassFilePath]): Unit = { + if (jarFile.exists()) { + IndexBasedZipFsOps.removeEntries(jarFile, classes) + } + } + + /** + * Reads all timestamps from given jar file. Returns a function that + * allows to access them by `ClassInJar` wrapped in `File`. + */ + def readStamps(jar: File): File => Long = { + val stamps = new IndexBasedZipFsOps.CachedStamps(jar) + file => + stamps.getStamp(ClassInJar.fromFile(file).toClassFilePath) + } + + /** + * Runs the compilation with previous jar if required. + * + * When compiling directly to a jar, scalac will produce + * a jar file, if one exists it will be overwritten rather + * than updated. For sake of incremental compilation it + * is required to merge the output from previous compilation(s) + * and the current one. To make it work, the jar output from + * previous compilation is stored aside (renamed) to avoid + * overwrite. The compilation is run normally to the specified + * output jar. The produced output jar is then merged with + * jar from previous compilation(s). + * + * Classes from previous jar need to be available for the current + * compiler run - they need to be added to the classpath. This is + * implemented by taking a function that given additional classpath + * runs the compilation. + * + * If compilation fails, it does not produce a jar, the previous jar + * is simply reverted (moved to output jar path). + * + * If the previous output does not exist or the output is not a jar + * at all (JarUtils feature is disabled) this function runs a normal + * compilation. + * + * @param output output for scalac compilation + * @param callback analysis callback used to set previus jar + * @param compile function that given extra classpath for compiler runs the compilation + */ + def withPreviousJar(output: Output)(compile: /*extra classpath: */ Seq[File] => Unit): Unit = { + preparePreviousJar(output) match { + case Some((prevJar, outputJar)) => + try { + compile(Seq(prevJar)) + } catch { + case e: Exception => + IO.move(prevJar, outputJar) + throw e + } + cleanupPreviousJar(prevJar, outputJar) + case None => + compile(Nil) + } + } + + /** + * If compilation to jar is enabled and previous jar existed + * will prepare the prev jar, i.e. move the existing output + * to temporary location. It will return tuple of the path + * to moved prev jar and path to output jar. + * The returned prev jar file should be added to the classpath + * of the compiler. + */ + def preparePreviousJar(output: Output): Option[(File, File)] = { + getOutputJar(output) + .filter(_.exists()) + .map { outputJar => + val prevJar = createPrevJarPath() + IO.move(outputJar, prevJar) + (prevJar, outputJar) + } + } + + /** + * Performs cleanup after successful compilation that involved + * previous jar. It merges the previous jar with the new output + * and puts the merged file back into output jar path. + * */ + def cleanupPreviousJar(prevJar: File, outputJar: File): Unit = { + if (outputJar.exists()) { + JarUtils.mergeJars(into = prevJar, from = outputJar) + } + IO.move(prevJar, outputJar) + } + + private var tempDir: File = _ + + /** + * Ensures that temporary directory exists. + * + * @param temporaryClassesDirectory path to temporary directory for classes. + * If not specified, a default will be used. + */ + def setupTempClassesDir(temporaryClassesDirectory: Option[File]): Unit = { + temporaryClassesDirectory match { + case Some(dir) => + IO.createDirectory(dir) + tempDir = dir + case None => + tempDir = new File(IO.temporaryDirectory, "zinc_temp_classes_dir") + } + } + + private def createPrevJarPath(): File = { + val prevJarName = s"$prevJarPrefix-${UUID.randomUUID()}.jar" + tempDir.toPath.resolve(prevJarName).toFile + } + + val prevJarPrefix: String = "prev-jar" + + /** Checks if given file stores a ClassInJar */ + def isClassInJar(file: File): Boolean = { + file.toString.split("!") match { + case Array(jar, _) => jar.endsWith(".jar") + case _ => false + } + } + + /** + * Determines if Straight to Jar compilations is enabled + * by inspecting if compilation output is a jar file + */ + def isCompilingToJar(output: Output): Boolean = { + getOutputJar(output).isDefined + } + + /** Extracts a jar file from the output if it is set to be a single jar. */ + def getOutputJar(output: Output): Option[File] = { + output match { + case s: SingleOutput => + Some(s.getOutputDirectory).filter(_.getName.endsWith(".jar")) + case _ => None + } + } + + /** + * As javac does not support compiling directly to jar it is required to + * change its output to a directory that is temporary, as after compilation + * the plain classes are put into a zip file and merged with the output jar. + * + * This method returns path to this directory based on output jar. The result + * of this method has to be deterministic as it is called from different places + * independently. + */ + def javacTempOutput(outputJar: File): File = { + val outJarName = outputJar.getName + val outDirName = outJarName + "-javac-output" + outputJar.toPath.resolveSibling(outDirName).toFile + } + + /** + * The returned `OutputJarContent` object provides access + * to current content of output jar. + * It is prepared to be accessed from zinc's custom compiler + * phases. With some assumptions on how it works the content + * can be cached and read only when necessary. + * + * Implementation details: + * The content has to be `reset` before each zinc run. This + * sets the output and reads the current contents of output + * jar if it exists. The next reading will be necessary + * in xsbt-analyzer phase which is after jvm phase, so the + * jar with new contents will appear. To figure out that it + * is in that place, a call to `dependencyPhaseCompleted` is + * expected. As content is not accessed between dependency + * and analysis phases, we can be sure that we are after jvm. + * The contents of jar will not change until next scalac run + * (except for javac case) so we should not read the content. + * This is ensured by a call to `scalacRunCompleted` method. + * After scalac run, it is possible that javac will run, and + * its output will be added to the output jar. To have consistent + * state, after adding those classes `addClasses` should be called. + * Thanks to this we know what is the content of prev jar during + * the compilation, without the need to actually read it. + * The completion of next dependency phase will trigger reading + * the output jar again. Note that at the point of reading we + * have both prev jar and new output jar with just compiled classes + * so the contents of those (currently stored and just read) have + * bo be combined. Last thing to do is track class deletions + * while pruning between iterations, which is done through + * `removeClasses` method. + */ + def createOutputJarContent(output: Output): OutputJarContent = { + getOutputJar(output) match { + case Some(jar) => new ValidOutputJarContent(jar) + case None => NoOutputJar + } + } + + sealed abstract class OutputJarContent { + def dependencyPhaseCompleted(): Unit + def scalacRunCompleted(): Unit + def addClasses(classes: Set[ClassFilePath]): Unit + def removeClasses(classes: Set[ClassFilePath]): Unit + def get(): Set[ClassFilePath] + } + + private object NoOutputJar extends OutputJarContent { + def dependencyPhaseCompleted(): Unit = () + def scalacRunCompleted(): Unit = () + def addClasses(classes: Set[ClassFilePath]): Unit = () + def removeClasses(classes: Set[ClassFilePath]): Unit = () + def get(): Set[ClassFilePath] = Set.empty + } + + private class ValidOutputJarContent(outputJar: File) extends OutputJarContent { + private var content: Set[ClassFilePath] = Set.empty + private var shouldReadJar: Boolean = false + + update() + + def dependencyPhaseCompleted(): Unit = { + shouldReadJar = true + } + + def scalacRunCompleted(): Unit = { + shouldReadJar = false + } + + def addClasses(classes: Set[ClassFilePath]): Unit = { + content ++= classes + } + + def removeClasses(classes: Set[ClassFilePath]): Unit = { + content --= classes + } + + def get(): Set[ClassFilePath] = { + if (shouldReadJar) update() + shouldReadJar = false + content + } + + private def update(): Unit = { + if (outputJar.exists()) { + content ++= JarUtils.listClassFiles(outputJar).toSet + } + } + } + + /* Methods below are only used for test code. They are not optimized for performance. */ + /** Reads timestamp of given jared class */ + def readModifiedTime(jc: ClassInJar): Long = { + val (jar, cls) = jc.splitJarReference + if (jar.exists()) { + withZipFile(jar) { zip => + Option(zip.getEntry(cls)).map(_.getLastModifiedTime.toMillis).getOrElse(0) + } + } else 0 + } + + /** Checks if given jared class exists */ + def exists(jc: ClassInJar): Boolean = { + val (jar, cls) = jc.splitJarReference + jar.exists() && { + withZipFile(jar)(zip => zip.getEntry(cls) != null) + } + } + + private def withZipFile[A](zip: File)(f: ZipFile => A): A = { + val file = new ZipFile(zip) + try f(file) + finally file.close() + } +} diff --git a/internal/zinc-classfile/src/main/scala/sbt/internal/inc/classfile/Analyze.scala b/internal/zinc-classfile/src/main/scala/sbt/internal/inc/classfile/Analyze.scala index 94a6e9c903..6a5aa7357f 100644 --- a/internal/zinc-classfile/src/main/scala/sbt/internal/inc/classfile/Analyze.scala +++ b/internal/zinc-classfile/src/main/scala/sbt/internal/inc/classfile/Analyze.scala @@ -15,17 +15,26 @@ import mutable.{ ArrayBuffer, Buffer } import scala.annotation.tailrec import java.io.File import java.net.URL + import xsbti.api.DependencyContext import xsbti.api.DependencyContext._ import sbt.io.IO import sbt.util.Logger +import xsbti.compile.{ Output, SingleOutput } private[sbt] object Analyze { - def apply[T](newClasses: Seq[File], sources: Seq[File], log: Logger)( + def apply[T](newClasses: Seq[File], + sources: Seq[File], + log: Logger, + output: Output, + finalJarOutput: Option[File])( analysis: xsbti.AnalysisCallback, loader: ClassLoader, readAPI: (File, Seq[Class[_]]) => Set[(String, String)]): Unit = { val sourceMap = sources.toSet[File].groupBy(_.getName) + // For performance reasons, precompute these as they are static throughout this analysis + val outputJarOrNull: File = finalJarOutput.getOrElse(null) + val singleOutputOrNull: File = output.getSingleOutput.orElse(null) def load(tpe: String, errMsg: => Option[String]): Option[Class[_]] = { if (tpe.endsWith("module-info")) None @@ -67,11 +76,16 @@ private[sbt] object Analyze { val srcClassName = loadEnclosingClass(loadedClass) + val finalClassFile = { + if (singleOutputOrNull == null || outputJarOrNull == null) newClass + else resolveFinalClassFile(newClass, singleOutputOrNull, outputJarOrNull, log) + } + srcClassName match { case Some(className) => - analysis.generatedNonLocalClass(source, newClass, binaryClassName, className) + analysis.generatedNonLocalClass(source, finalClassFile, binaryClassName, className) classNames += className - case None => analysis.generatedLocalClass(source, newClass) + case None => analysis.generatedLocalClass(source, finalClassFile) } sourceToClassFiles(source) += classFile @@ -110,7 +124,7 @@ private[sbt] object Analyze { def loadFromClassloader(): Option[File] = { for { url <- Option(loader.getResource(classNameToClassFile(onBinaryName))) - file <- urlAsFile(url, log) + file <- urlAsFile(url, log, finalJarOutput) } yield { classfilesCache(onBinaryName) = file; file } } @@ -122,8 +136,18 @@ private[sbt] object Analyze { analysis.classDependency(scalaLikeTypeName, fromClassName, context) } else { val cachedOrigin = classfilesCache.get(onBinaryName) - for (file <- cachedOrigin.orElse(loadFromClassloader())) - analysis.binaryDependency(file, onBinaryName, fromClassName, source, context) + for (file <- cachedOrigin.orElse(loadFromClassloader())) { + val binaryFile = { + if (singleOutputOrNull == null || outputJarOrNull == null) file + else resolveFinalClassFile(file, singleOutputOrNull, outputJarOrNull, log) + } + + analysis.binaryDependency(binaryFile, + onBinaryName, + fromClassName, + source, + context) + } } } case None => // It could be a stale class file, ignore @@ -168,13 +192,53 @@ private[sbt] object Analyze { } } } - private[this] def urlAsFile(url: URL, log: Logger): Option[File] = - try IO.urlAsFile(url) + + /** + * When straight-to-jar compilation is enabled, classes are compiled to a temporary directory + * because javac cannot compile to jar directly. The paths to class files that can be observed + * here through the file system or class loaders are located in temporary output directory for + * javac. As this output will be eventually included in the output jar (`finalJarOutput`), the + * analysis (products) have to be changed accordingly. + * + * Given `finalJarOutput = Some("/develop/zinc/target/output.jar")` and + * `output = "/develop/zinc/target/output.jar-javac-output"`, this method turns + * `/develop/zinc/target/output.jar-javac-output/sbt/internal/inc/Compile.class` + * into + * `/develop/zinc/target/output.jar!/sbt/internal/inc/Compile.class` + */ + private def resolveFinalClassFile( + realClassFile: File, + outputDir: File, + outputJar: File, + log: Logger + ): File = { + IO.relativize(outputDir, realClassFile) match { + case Some(relativeClass) => JarUtils.ClassInJar(outputJar, relativeClass).toFile + case None => + log.error(s"Class file $realClassFile is not relative to $outputDir") + realClassFile + } + } + + private[this] def urlAsFile(url: URL, log: Logger, finalJarOutput: Option[File]): Option[File] = + try urlAsFile(url, finalJarOutput) catch { case e: Exception => log.warn("Could not convert URL '" + url.toExternalForm + "' to File: " + e.toString) None } + + private def urlAsFile(url: URL, finalJarOutput: Option[File]): Option[File] = { + IO.urlAsFile(url).map { file => + // IO.urlAsFile removes the class reference in the jar url, let's add it back. + if (finalJarOutput.exists(_ == file)) { + JarUtils.ClassInJar.fromURL(url, file).toFile + } else { + file + } + } + } + private def trapAndLog(log: Logger)(execute: => Unit): Unit = { try { execute } catch { case e: Throwable => log.trace(e); log.error(e.toString) } } diff --git a/internal/zinc-classfile/src/main/scala/sbt/internal/inc/classfile/Parser.scala b/internal/zinc-classfile/src/main/scala/sbt/internal/inc/classfile/Parser.scala index ffbaf72579..2b9441b031 100644 --- a/internal/zinc-classfile/src/main/scala/sbt/internal/inc/classfile/Parser.scala +++ b/internal/zinc-classfile/src/main/scala/sbt/internal/inc/classfile/Parser.scala @@ -11,7 +11,10 @@ package inc package classfile import java.net.URL -import java.io.{ DataInputStream, File, InputStream } +import java.io.{ BufferedInputStream, InputStream, File, DataInputStream } + +import sbt.internal.io.ErrorHandling + import scala.annotation.switch import sbt.io.Using @@ -27,7 +30,15 @@ private[sbt] object Parser { Using.fileInputStream(file)(parse(file.toString)).right.get def apply(url: URL): ClassFile = - Using.urlInputStream(url)(parse(url.toString)).right.get + usingUrlInputStreamWithoutCaching(url)(parse(url.toString)).right.get + + // JarURLConnection with caching enabled will never close the jar + private val usingUrlInputStreamWithoutCaching = Using.resource((u: URL) => + ErrorHandling.translate("Error opening " + u + ": ") { + val urlConnection = u.openConnection() + urlConnection.setUseCaches(false) + new BufferedInputStream(urlConnection.getInputStream()) + }) private def parse(readableName: String)(is: InputStream): Either[String, ClassFile] = Right(parseImpl(readableName, is)) diff --git a/internal/zinc-classfile/src/test/scala/sbt/internal/inc/classfile/JavaCompilerForUnitTesting.scala b/internal/zinc-classfile/src/test/scala/sbt/internal/inc/classfile/JavaCompilerForUnitTesting.scala index f9e0ed3602..047ef42446 100644 --- a/internal/zinc-classfile/src/test/scala/sbt/internal/inc/classfile/JavaCompilerForUnitTesting.scala +++ b/internal/zinc-classfile/src/test/scala/sbt/internal/inc/classfile/JavaCompilerForUnitTesting.scala @@ -12,6 +12,7 @@ import sbt.internal.util.ConsoleLogger import xsbti.api.DependencyContext._ import xsbti.{ AnalysisCallback, TestCallback } import xsbti.TestCallback.ExtractedClassDependencies +import xsbti.compile.SingleOutput import scala.collection.JavaConverters._ @@ -61,9 +62,12 @@ object JavaCompilerForUnitTesting { // - extract api representation out of Class (and saved it via a side effect) // - extract all base classes. // we extract just parents as this is enough for testing - Analyze(classFiles, srcFiles, logger)(analysisCallback, - classloader, - readAPI(analysisCallback, _, _)) + + val output = new SingleOutput { def getOutputDirectory: File = classesDir } + Analyze(classFiles, srcFiles, logger, output, finalJarOutput = None)( + analysisCallback, + classloader, + readAPI(analysisCallback, _, _)) (srcFiles, analysisCallback) } } diff --git a/internal/zinc-classpath/src/main/scala/sbt/internal/inc/classpath/ClassLoaders.scala b/internal/zinc-classpath/src/main/scala/sbt/internal/inc/classpath/ClassLoaders.scala index c2c2d1889b..44871975d6 100644 --- a/internal/zinc-classpath/src/main/scala/sbt/internal/inc/classpath/ClassLoaders.scala +++ b/internal/zinc-classpath/src/main/scala/sbt/internal/inc/classpath/ClassLoaders.scala @@ -58,6 +58,14 @@ final class SelfFirstLoader(classpath: Seq[URL], parent: ClassLoader) /** Doesn't load any classes itself, but instead verifies that all classes loaded through `parent` either come from `root` or `classpath`.*/ final class ClasspathFilter(parent: ClassLoader, root: ClassLoader, classpath: Set[File]) extends ClassLoader(parent) { + + def close(): Unit = { + parent match { + case ucl: URLClassLoader => ucl.close() + case _ => () + } + } + override def toString = s"""|ClasspathFilter( | parent = $parent diff --git a/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/javac/LocalJava.scala b/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/javac/LocalJava.scala index 029e989c3d..4a712cb4e7 100644 --- a/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/javac/LocalJava.scala +++ b/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/javac/LocalJava.scala @@ -12,6 +12,9 @@ package inc package javac import java.io.{ File, OutputStream, PrintWriter, Writer } +import java.nio.charset.Charset +import java.util.Locale + import javax.tools.JavaFileManager.Location import javax.tools.JavaFileObject.Kind import javax.tools.{ @@ -19,7 +22,9 @@ import javax.tools.{ ForwardingJavaFileManager, ForwardingJavaFileObject, JavaFileManager, - JavaFileObject + JavaFileObject, + StandardJavaFileManager, + DiagnosticListener } import sbt.internal.util.LoggerWriter @@ -126,8 +131,6 @@ final class LocalJavaCompiler(compiler: javax.tools.JavaCompiler) extends XJavaC val logWriter = new PrintWriter(logger) log.debug("Attempting to call " + compiler + " directly...") val diagnostics = new DiagnosticsReporter(reporter) - val fileManager = compiler.getStandardFileManager(diagnostics, null, null) - val jfiles = fileManager.getJavaFileObjectsFromFiles(sources.toList.asJava) /* Local Java compiler doesn't accept `-J<flag>` options, strip them. */ val (invalidOptions, cleanedOptions) = options partition (_ startsWith "-J") @@ -136,6 +139,15 @@ final class LocalJavaCompiler(compiler: javax.tools.JavaCompiler) extends XJavaC log.warn(invalidOptions.mkString("\t", ", ", "")) } + val fileManager = { + if (cleanedOptions.contains("-XDuseOptimizedZip=false")) { + fileManagerWithoutOptimizedZips(diagnostics) + } else { + compiler.getStandardFileManager(diagnostics, null, null) + } + } + + val jfiles = fileManager.getJavaFileObjectsFromFiles(sources.toList.asJava) val customizedFileManager = { val maybeClassFileManager = incToolOptions.classFileManager() if (incToolOptions.useCustomizedFileManager && maybeClassFileManager.isPresent) @@ -160,10 +172,42 @@ final class LocalJavaCompiler(compiler: javax.tools.JavaCompiler) extends XJavaC * to javac's behaviour, we report fail compilation from diagnostics. */ compileSuccess = success && !diagnostics.hasErrors } finally { + customizedFileManager.close() logger.flushLines(if (compileSuccess) Level.Warn else Level.Error) } compileSuccess } + + /** + * Rewrite of [[javax.tools.JavaCompiler.getStandardFileManager]] method that also sets + * useOptimizedZip=false flag. With forked javac adding this option to arguments just works. + * Here, as `FileManager` is created before `CompilationTask` options do not get passed + * properly. Also there is no access to `com.sun.tools.javac` classes, hence the reflection... + */ + private def fileManagerWithoutOptimizedZips( + diagnostics: DiagnosticsReporter): StandardJavaFileManager = { + val classLoader = compiler.getClass.getClassLoader + val contextClass = Class.forName("com.sun.tools.javac.util.Context", true, classLoader) + val optionsClass = Class.forName("com.sun.tools.javac.util.Options", true, classLoader) + val javacFileManagerClass = + Class.forName("com.sun.tools.javac.file.JavacFileManager", true, classLoader) + + val `Options.instance` = optionsClass.getMethod("instance", contextClass) + val `context.put` = contextClass.getMethod("put", classOf[Class[_]], classOf[Object]) + val `options.put` = optionsClass.getMethod("put", classOf[String], classOf[String]) + val `new JavacFileManager` = + javacFileManagerClass.getConstructor(contextClass, classOf[Boolean], classOf[Charset]) + + val context = contextClass.newInstance().asInstanceOf[AnyRef] + `context.put`.invoke(context, classOf[Locale], null) + `context.put`.invoke(context, classOf[DiagnosticListener[_]], diagnostics) + val options = `Options.instance`.invoke(null, context) + `options.put`.invoke(options, "useOptimizedZip", "false") + + `new JavacFileManager` + .newInstance(context, Boolean.box(true), null) + .asInstanceOf[StandardJavaFileManager] + } } /** diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/ClassFileManager.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/ClassFileManager.scala index 81f3fbf661..b51ed3a3be 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/ClassFileManager.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/ClassFileManager.scala @@ -15,11 +15,12 @@ import java.util.Optional import collection.mutable import xsbti.compile.{ - ClassFileManager => XClassFileManager, - ClassFileManagerType, - DeleteImmediatelyManagerType, IncOptions, - TransactionalManagerType + DeleteImmediatelyManagerType, + TransactionalManagerType, + ClassFileManagerType, + ClassFileManager => XClassFileManager, + Output } object ClassFileManager { @@ -42,6 +43,33 @@ object ClassFileManager { xsbti.compile.WrappedClassFileManager.of(internal, external.toOptional) } + def getDefaultClassFileManager( + classFileManagerType: Optional[ClassFileManagerType], + output: Output, + outputJarContent: JarUtils.OutputJarContent + ): XClassFileManager = { + if (classFileManagerType.isPresent) { + classFileManagerType.get match { + case _: DeleteImmediatelyManagerType => deleteImmediately(output, outputJarContent) + case m: TransactionalManagerType => + transactional(output, outputJarContent, m.backupDirectory, m.logger) + } + } else deleteImmediately(output, outputJarContent) + } + + def getClassFileManager( + options: IncOptions, + output: Output, + outputJarContent: JarUtils.OutputJarContent + ): XClassFileManager = { + import sbt.internal.inc.JavaInterfaceUtil.{ EnrichOptional, EnrichOption } + val internal = + getDefaultClassFileManager(options.classfileManagerType, output, outputJarContent) + val external = Option(options.externalHooks()) + .flatMap(ext => ext.getExternalClassFileManager.toOption) + xsbti.compile.WrappedClassFileManager.of(internal, external.toOptional) + } + private final class DeleteClassFileManager extends XClassFileManager { override def delete(classes: Array[File]): Unit = IO.deleteFilesEmptyDirs(classes) @@ -56,6 +84,16 @@ object ClassFileManager { */ def deleteImmediately: XClassFileManager = new DeleteClassFileManager + def deleteImmediatelyFromJar(outputJar: File, + outputJarContent: JarUtils.OutputJarContent): XClassFileManager = + new DeleteClassFileManagerForJar(outputJar, outputJarContent) + + def deleteImmediately(output: Output, + outputJarContent: JarUtils.OutputJarContent): XClassFileManager = { + val outputJar = JarUtils.getOutputJar(output) + outputJar.fold(deleteImmediately)(deleteImmediatelyFromJar(_, outputJarContent)) + } + /** * Constructs a transactional [[ClassFileManager]] implementation that restores class * files to the way they were before compilation if there is an error. Otherwise, it @@ -66,6 +104,21 @@ object ClassFileManager { def transactional(tempDir0: File, logger: sbt.util.Logger): XClassFileManager = new TransactionalClassFileManager(tempDir0, logger) + def transactionalForJar(outputJar: File, + outputJarContent: JarUtils.OutputJarContent): XClassFileManager = { + new TransactionalClassFileManagerForJar(outputJar, outputJarContent) + } + + def transactional( + output: Output, + outputJarContent: JarUtils.OutputJarContent, + tempDir: File, + logger: sbt.util.Logger + ): XClassFileManager = { + val outputJar = JarUtils.getOutputJar(output) + outputJar.fold(transactional(tempDir, logger))(transactionalForJar(_, outputJarContent)) + } + private final class TransactionalClassFileManager(tempDir0: File, logger: sbt.util.Logger) extends XClassFileManager { val tempDir = tempDir0.getCanonicalFile @@ -114,4 +167,49 @@ object ClassFileManager { target } } + + private final class DeleteClassFileManagerForJar( + outputJar: File, + outputJarContent: JarUtils.OutputJarContent + ) extends XClassFileManager { + override def delete(classes: Array[File]): Unit = { + val relClasses = classes.map(c => JarUtils.ClassInJar.fromFile(c).toClassFilePath) + outputJarContent.removeClasses(relClasses.toSet) + JarUtils.removeFromJar(outputJar, relClasses) + } + override def generated(classes: Array[File]): Unit = () + override def complete(success: Boolean): Unit = () + } + + /** + * Version of [[sbt.internal.inc.ClassFileManager.TransactionalClassFileManager]] + * that works when sources are compiled directly to a jar file. + * + * Before compilation the index is read from the output jar if it exists + * and after failed compilation it is reverted. This implementation relies + * on the fact that nothing is actually removed from jar during incremental + * compilation. Files are only removed from index or new files are appended + * and potential overwrite is also handled by replacing index entry. For this + * reason the old index with offsets to old files will still be valid. + */ + private final class TransactionalClassFileManagerForJar( + outputJar: File, + outputJarContent: JarUtils.OutputJarContent + ) extends XClassFileManager { + private val backedUpIndex = Some(outputJar).filter(_.exists()).map(JarUtils.stashIndex) + + override def delete(classesInJar: Array[File]): Unit = { + val classes = classesInJar.map(c => JarUtils.ClassInJar.fromFile(c).toClassFilePath) + JarUtils.removeFromJar(outputJar, classes) + outputJarContent.removeClasses(classes.toSet) + } + + override def generated(classes: Array[File]): Unit = () + + override def complete(success: Boolean): Unit = { + if (!success) { + backedUpIndex.foreach(index => JarUtils.unstashIndex(outputJar, index)) + } + } + } } diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/Compile.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/Compile.scala index c5a432105b..faa1001feb 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/Compile.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/Compile.scala @@ -13,11 +13,11 @@ import sbt.internal.inc.Analysis.{ LocalProduct, NonLocalProduct } import xsbt.api.{ APIUtil, HashAPI, NameHashing } import xsbti.api._ import xsbti.compile.{ - ClassFileManager => XClassFileManager, CompileAnalysis, DependencyChanges, IncOptions, - Output + Output, + ClassFileManager => XClassFileManager } import xsbti.{ Position, Problem, Severity, UseScope } import sbt.util.Logger @@ -25,8 +25,9 @@ import sbt.util.InterfaceUtil.jo2o import java.io.File import java.util +import scala.collection.JavaConverters._ import xsbti.api.DependencyContext -import xsbti.compile.analysis.ReadStamps +import xsbti.compile.analysis.{ ReadStamps, Stamp } /** * Helper methods for running incremental compilation. All this is responsible for is @@ -55,9 +56,12 @@ object IncrementalCompile { previous0: CompileAnalysis, output: Output, log: Logger, - options: IncOptions): (Boolean, Analysis) = { + options: IncOptions, + outputJarContent: JarUtils.OutputJarContent + ): (Boolean, Analysis) = { val previous = previous0 match { case a: Analysis => a } - val current = Stamps.initial(Stamper.forLastModified, Stamper.forHash, Stamper.forLastModified) + val current = + Stamps.initial(Stamper.forLastModified, Stamper.forHash, Stamper.forLastModified) val internalBinaryToSourceClassName = (binaryClassName: String) => previous.relations.productClassName.reverse(binaryClassName).headOption val internalSourceToClassNamesMap: File => Set[String] = (f: File) => @@ -75,9 +79,12 @@ object IncrementalCompile { externalAPI, current, output, - options), + options, + outputJarContent), log, - options + options, + output, + outputJarContent ) } catch { case _: xsbti.CompileCancelled => @@ -106,16 +113,20 @@ private object AnalysisCallback { externalAPI: (File, String) => Option[AnalyzedClass], current: ReadStamps, output: Output, - options: IncOptions + options: IncOptions, + outputJarContent: JarUtils.OutputJarContent ) { - def build(): AnalysisCallback = new AnalysisCallback( - internalBinaryToSourceClassName, - internalSourceToClassNamesMap, - externalAPI, - current, - output, - options - ) + def build(): AnalysisCallback = { + new AnalysisCallback( + internalBinaryToSourceClassName, + internalSourceToClassNamesMap, + externalAPI, + current, + output, + options, + outputJarContent + ) + } } } @@ -125,7 +136,8 @@ private final class AnalysisCallback( externalAPI: (File, String) => Option[AnalyzedClass], stampReader: ReadStamps, output: Output, - options: IncOptions + options: IncOptions, + outputJarContent: JarUtils.OutputJarContent ) extends xsbti.AnalysisCallback { private[this] val compilation: Compilation = Compilation(output) @@ -300,8 +312,10 @@ private final class AnalysisCallback( override def enabled(): Boolean = options.enabled - def get: Analysis = + def get: Analysis = { + outputJarContent.scalacRunCompleted() addUsedNames(addCompilation(addProductsAndDeps(Analysis.empty))) + } def getOrNil[A, B](m: collection.Map[A, Seq[B]], a: A): Seq[B] = m.get(a).toList.flatten def addCompilation(base: Analysis): Analysis = @@ -356,7 +370,15 @@ private final class AnalysisCallback( ) } - def addProductsAndDeps(base: Analysis): Analysis = + def createStamperForProducts(): File => Stamp = { + JarUtils.getOutputJar(output) match { + case Some(outputJar) => Stamper.forLastModifiedInJar(outputJar) + case None => stampReader.product _ + } + } + + def addProductsAndDeps(base: Analysis): Analysis = { + val stampProduct = createStamperForProducts() (base /: srcs) { case (a, src) => val stamp = stampReader.source(src) @@ -367,7 +389,7 @@ private final class AnalysisCallback( getOrNil(mainClasses, src)) val binaries = binaryDeps.getOrElse(src, Nil: Iterable[File]) val localProds = localClasses.getOrElse(src, Nil: Iterable[File]) map { classFile => - val classFileStamp = stampReader.product(classFile) + val classFileStamp = stampProduct(classFile) LocalProduct(classFile, classFileStamp) } val binaryToSrcClassName = (classNames.getOrElse(src, Set.empty) map { @@ -376,7 +398,7 @@ private final class AnalysisCallback( val nonLocalProds = nonLocalClasses.getOrElse(src, Nil: Iterable[(File, String)]) map { case (classFile, binaryClassName) => val srcClassName = binaryToSrcClassName(binaryClassName) - val classFileStamp = stampReader.product(classFile) + val classFileStamp = stampProduct(classFile) NonLocalProduct(srcClassName, binaryClassName, classFile, classFileStamp) } @@ -394,8 +416,16 @@ private final class AnalysisCallback( externalDeps, binDeps) } + } - override def dependencyPhaseCompleted(): Unit = {} + override def dependencyPhaseCompleted(): Unit = { + outputJarContent.dependencyPhaseCompleted() + } override def apiPhaseCompleted(): Unit = {} + + override def classesInOutputJar(): java.util.Set[String] = { + outputJarContent.get().asJava + } + } diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala index e5fc777566..4d58f26247 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala @@ -14,10 +14,11 @@ import java.io.File import sbt.util.{ Level, Logger } import xsbti.compile.analysis.{ ReadStamps, Stamp => XStamp } import xsbti.compile.{ + ClassFileManager => XClassFileManager, CompileAnalysis, DependencyChanges, IncOptions, - ClassFileManager => XClassFileManager + Output } /** @@ -46,6 +47,7 @@ object Incremental { * @param callbackBuilder The builder that builds callback where we report dependency issues. * @param log The log where we write debugging information * @param options Incremental compilation options + * @param outputJarContent Object that holds cached content of output jar * @param profiler An implementation of an invalidation profiler, empty by default. * @param equivS The means of testing whether two "Stamps" are the same. * @return @@ -60,12 +62,15 @@ object Incremental { callbackBuilder: AnalysisCallback.Builder, log: sbt.util.Logger, options: IncOptions, + output: Output, + outputJarContent: JarUtils.OutputJarContent, profiler: InvalidationProfiler = InvalidationProfiler.empty )(implicit equivS: Equiv[XStamp]): (Boolean, Analysis) = { val previous = previous0 match { case a: Analysis => a } val runProfiler = profiler.profileRun val incremental: IncrementalCommon = new IncrementalNameHashing(log, options, runProfiler) - val initialChanges = incremental.detectInitialChanges(sources, previous, current, lookup) + val initialChanges = + incremental.detectInitialChanges(sources, previous, current, lookup, output) val binaryChanges = new DependencyChanges { val modifiedBinaries = initialChanges.binaryDeps.toArray val modifiedClasses = initialChanges.external.allModified.toArray @@ -79,7 +84,7 @@ object Incremental { incremental.log.debug( "All initially invalidated classes: " + initialInvClasses + "\n" + "All initially invalidated sources:" + initialInvSources + "\n") - val analysis = manageClassfiles(options) { classfileManager => + val analysis = manageClassfiles(options, output, outputJarContent) { classfileManager => incremental.cycle(initialInvClasses, initialInvSources, sources, @@ -120,15 +125,23 @@ object Incremental { private[inc] def apiDebug(options: IncOptions): Boolean = options.apiDebug || java.lang.Boolean.getBoolean(apiDebugProp) - private[sbt] def prune(invalidatedSrcs: Set[File], previous0: CompileAnalysis): Analysis = { + private[sbt] def prune(invalidatedSrcs: Set[File], + previous0: CompileAnalysis, + output: Output, + outputJarContent: JarUtils.OutputJarContent): Analysis = { val previous = previous0.asInstanceOf[Analysis] - IncrementalCommon.pruneClassFilesOfInvalidations(invalidatedSrcs, - previous, - ClassFileManager.deleteImmediately) + IncrementalCommon.pruneClassFilesOfInvalidations( + invalidatedSrcs, + previous, + ClassFileManager.deleteImmediately(output, outputJarContent)) } - private[this] def manageClassfiles[T](options: IncOptions)(run: XClassFileManager => T): T = { - val classfileManager = ClassFileManager.getClassFileManager(options) + private[this] def manageClassfiles[T]( + options: IncOptions, + output: Output, + outputJarContent: JarUtils.OutputJarContent + )(run: XClassFileManager => T): T = { + val classfileManager = ClassFileManager.getClassFileManager(options, output, outputJarContent) val result = try run(classfileManager) catch { case e: Throwable => diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala index 920af2b9e4..e9cd4251c7 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala @@ -14,7 +14,12 @@ import java.io.File import sbt.util.Logger import xsbt.api.APIUtil import xsbti.api.AnalyzedClass -import xsbti.compile.{ DependencyChanges, IncOptions, ClassFileManager => XClassFileManager } +import xsbti.compile.{ + DependencyChanges, + IncOptions, + Output, + ClassFileManager => XClassFileManager +} import xsbti.compile.analysis.{ ReadStamps, Stamp => XStamp } import scala.annotation.tailrec @@ -234,7 +239,8 @@ private[inc] abstract class IncrementalCommon( sources: Set[File], previousAnalysis: Analysis, stamps: ReadStamps, - lookup: Lookup + lookup: Lookup, + output: Output )(implicit equivS: Equiv[XStamp]): InitialChanges = { import IncrementalCommon.{ isBinaryModified, findExternalAnalyzedClass } val previous = previousAnalysis.stamps @@ -251,8 +257,12 @@ private[inc] abstract class IncrementalCommon( } } - val removedProducts = lookup.removedProducts(previousAnalysis).getOrElse { - previous.allProducts.filter(p => !equivS.equiv(previous.product(p), stamps.product(p))).toSet + val removedProducts: Set[File] = lookup.removedProducts(previousAnalysis).getOrElse { + val currentProductsStamps = + JarUtils.getOutputJar(output).fold(stamps.product _)(Stamper.forLastModifiedInJar) + previous.allProducts + .filter(p => !equivS.equiv(previous.product(p), currentProductsStamps(p))) + .toSet } val changedBinaries: Set[File] = lookup.changedBinaries(previousAnalysis).getOrElse { diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/MiniSetupUtil.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/MiniSetupUtil.scala index eac6366ab1..c8c98542f1 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/MiniSetupUtil.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/MiniSetupUtil.scala @@ -110,7 +110,7 @@ object MiniSetupUtil { new Equiv[MiniOptions] { def equiv(a: MiniOptions, b: MiniOptions) = { equivScalacOpts.equiv(a.scalacOptions, b.scalacOptions) && - (a.javacOptions sameElements b.javacOptions) + equivJavacOptions.equiv(a.javacOptions, b.javacOptions) } } } @@ -128,6 +128,13 @@ object MiniSetupUtil { } def equivScalacOptions(ignoredRegexes: Array[String]): Equiv[Array[String]] = { + equivCompilerOptions(ignoredRegexes) + } + + // ignoring -d as it is overridden anyway + val equivJavacOptions: Equiv[Array[String]] = equivCompilerOptions(Array("-d .*")) + + def equivCompilerOptions(ignoredRegexes: Array[String]): Equiv[Array[String]] = { def groupWithParams(opts: Array[String]): Set[String] = { def isParam(s: String) = !s.startsWith("-") def recur(opts: List[String], res: Set[String]): Set[String] = opts match { diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/Stamp.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/Stamp.scala index f96eb3b6f0..df76a73634 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/Stamp.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/Stamp.scala @@ -141,9 +141,14 @@ object Stamper { catch { case _: IOException => EmptyStamp } } - val forHash = (toStamp: File) => tryStamp(Hash.ofFile(toStamp)) - val forLastModified = (toStamp: File) => + val forHash: File => XStamp = (toStamp: File) => tryStamp(Hash.ofFile(toStamp)) + val forLastModified: File => XStamp = (toStamp: File) => tryStamp(new LastModified(IO.getModifiedTimeOrZero(toStamp))) + def forLastModifiedInJar(jar: File): File => XStamp = { + val stamps = JarUtils.readStamps(jar) + (file: File) => + new LastModified(stamps(file)) + } } object Stamps { @@ -241,6 +246,7 @@ private class InitialStamps(prodStamp: File => XStamp, binStamp: File => XStamp) extends ReadStamps { import collection.mutable.{ HashMap, Map } + // cached stamps for files that do not change during compilation private val sources: Map[File, XStamp] = new HashMap private val binaries: Map[File, XStamp] = new HashMap diff --git a/internal/zinc-scripted/src/test/scala/sbt/inc/MainScriptedRunner.scala b/internal/zinc-scripted/src/test/scala/sbt/inc/MainScriptedRunner.scala index e286c229b2..143104f5e7 100644 --- a/internal/zinc-scripted/src/test/scala/sbt/inc/MainScriptedRunner.scala +++ b/internal/zinc-scripted/src/test/scala/sbt/inc/MainScriptedRunner.scala @@ -10,16 +10,22 @@ import xsbti.compile.CompilerBridgeProvider import sbt.util.Logger final class MainScriptedRunner { - def run(resourceBaseDirectory: File, bufferLog: Boolean, tests: Array[String]): Unit = { + def run( + resourceBaseDirectory: File, + bufferLog: Boolean, + compileToJar: Boolean, + tests: Array[String] + ): Unit = { IO.withTemporaryDirectory { tempDir => // Create a global temporary directory to store the bridge et al - val handlers = new MainScriptedHandlers(tempDir) + val handlers = new MainScriptedHandlers(tempDir, compileToJar) ScriptedRunnerImpl.run(resourceBaseDirectory, bufferLog, tests, handlers, 4) } } } -final class MainScriptedHandlers(tempDir: File) extends IncScriptedHandlers(tempDir) { +final class MainScriptedHandlers(tempDir: File, compileToJar: Boolean) + extends IncScriptedHandlers(tempDir, compileToJar) { // Create a provider that uses the bridges from the classes directory of the projects val provider: CompilerBridgeProvider = { val compilerBridge210 = ScalaBridge( @@ -56,7 +62,7 @@ final class MainScriptedHandlers(tempDir: File) extends IncScriptedHandlers(temp case x: ManagedLogger => x case _ => sys.error("Expected ManagedLogger") } - new BloopIncHandler(config.testDirectory(), tempDir, provider, logger) + new BloopIncHandler(config.testDirectory(), tempDir, provider, logger, compileToJar) } ) } @@ -65,7 +71,8 @@ final class BloopIncHandler( directory: File, cacheDir: File, provider: CompilerBridgeProvider, - logger: ManagedLogger -) extends IncHandler(directory, cacheDir, logger) { + logger: ManagedLogger, + compileToJar: Boolean +) extends IncHandler(directory, cacheDir, logger, compileToJar) { override def getZincProvider(targetDir: File, log: Logger): CompilerBridgeProvider = provider } diff --git a/internal/zinc-scripted/src/test/scala/sbt/inc/ScriptedMain.scala b/internal/zinc-scripted/src/test/scala/sbt/inc/ScriptedMain.scala index 0781e3b176..aff23a6fd5 100644 --- a/internal/zinc-scripted/src/test/scala/sbt/inc/ScriptedMain.scala +++ b/internal/zinc-scripted/src/test/scala/sbt/inc/ScriptedMain.scala @@ -9,21 +9,19 @@ import sbt.io.{ AllPassFilter, NameFilter } import scala.language.reflectiveCalls object ScriptedMain { - def main(args: Array[String]): Unit = { - val (isUserDefined, buffer) = args.lastOption match { - case Some(last) => - if (last == "true") (true, true) - else if (last == "false") (true, false) - else (false, true) - case None => (false, true) + private val DisableBuffering = "--no-buffer" + private val CompileToJar = "--to-jar" + private val Flags = Set(DisableBuffering, CompileToJar) - } + def main(args: Array[String]): Unit = { + val compileToJar = args.contains(CompileToJar) + val disableBuffering = args.contains(DisableBuffering) + val argsToParse = args.filterNot(Flags.contains) val sourceDir = BuildInfo.sourceDirectory.toPath.resolve("sbt-test").toFile - val argsToParse = if (isUserDefined) args.init else args val tests = detectScriptedTests(sourceDir) val parsed = argsToParse.toList.flatMap(arg => parseScripted(tests, sourceDir, arg)) - runScripted(sourceDir, parsed, buffer) + runScripted(sourceDir, parsed, buffer = !disableBuffering, compileToJar) } private def detectScriptedTests(scriptedBase: File): Map[String, Set[String]] = { @@ -65,6 +63,7 @@ object ScriptedMain { def run( resourceBaseDirectory: File, bufferLog: Boolean, + compileToJar: Boolean, tests: Array[String] ): Unit } @@ -73,7 +72,8 @@ object ScriptedMain { def runScripted( source: File, args: Seq[String], - buffer: Boolean + buffer: Boolean, + compileToJar: Boolean ): Unit = { println(s"About to run tests: ${args.mkString("\n * ", "\n * ", "\n")}") // Force Log4J to not use a thread context classloader otherwise it throws a CCE @@ -82,7 +82,7 @@ object ScriptedMain { val loader = ClasspathUtilities.toLoader(classpath) val bridgeClass = Class.forName("sbt.inc.MainScriptedRunner", true, loader) val bridge = bridgeClass.newInstance.asInstanceOf[IncScriptedRunner] - try bridge.run(source, buffer, args.toArray) + try bridge.run(source, buffer, compileToJar, args.toArray) catch { case ite: java.lang.reflect.InvocationTargetException => throw ite.getCause } } diff --git a/internal/zinc-scripted/src/test/scala/sbt/internal/inc/IncHandler.scala b/internal/zinc-scripted/src/test/scala/sbt/internal/inc/IncHandler.scala index f342ae7ed9..6ad1eb15c8 100644 --- a/internal/zinc-scripted/src/test/scala/sbt/internal/inc/IncHandler.scala +++ b/internal/zinc-scripted/src/test/scala/sbt/internal/inc/IncHandler.scala @@ -31,7 +31,7 @@ import java.lang.reflect.Method import java.lang.reflect.Modifier.{ isPublic, isStatic } import java.util.{ Optional, Properties } -import sbt.internal.inc.classpath.{ ClassLoaderCache, ClasspathUtilities } +import sbt.internal.inc.classpath.{ ClassLoaderCache, ClasspathUtilities, ClasspathFilter } import sbt.internal.scripted.{ StatementHandler, TestFailed } import sbt.internal.util.ManagedLogger import sjsonnew.support.scalajson.unsafe.{ Converter, Parser => JsonParser } @@ -49,7 +49,7 @@ final case class Build(projects: Seq[Project]) final case class IncInstance(si: xsbti.compile.ScalaInstance, cs: XCompilers) -class IncHandler(directory: File, cacheDir: File, scriptedLog: ManagedLogger) +class IncHandler(directory: File, cacheDir: File, scriptedLog: ManagedLogger, compileToJar: Boolean) extends BridgeProviderSpecification with StatementHandler { @@ -76,7 +76,8 @@ class IncHandler(directory: File, cacheDir: File, scriptedLog: ManagedLogger) val in = p.in.getOrElse(directory / p.name) val version = p.scalaVersion.getOrElse(scala.util.Properties.versionNumberString) val deps = p.dependsOn.toVector.flatten - val project = ProjectStructure(p.name, deps, in, scriptedLog, lookupProject, version) + val project = + ProjectStructure(p.name, deps, in, scriptedLog, lookupProject, version, compileToJar) buildStructure(p.name) = project } } @@ -88,7 +89,9 @@ class IncHandler(directory: File, cacheDir: File, scriptedLog: ManagedLogger) implicit val projectFormat = caseClass(Project.apply _, Project.unapply _)("name", "dependsOn", "in", "scalaVersion") implicit val buildFormat = caseClass(Build.apply _, Build.unapply _)("projects") - val json = JsonParser.parseFromFile(directory / "build.json").get + // Do not parseFromFile as it leaves file open, causing problems on Windows. + val channel = new FileInputStream(directory / "build.json").getChannel + val json = JsonParser.parseFromChannel(channel).get Converter.fromJsonUnsafe[Build](json) } else Build(projects = Vector(Project(name = RootIdentifier).copy(in = Some(directory)))) } @@ -165,12 +168,21 @@ class IncHandler(directory: File, cacheDir: File, scriptedLog: ManagedLogger) val analysis = p.compile(i) p.discoverMainClasses(Some(analysis.apis)) match { case Seq(mainClassName) => - val classpath: Array[File] = (i.si.allJars :+ p.classesDir) map { _.getAbsoluteFile } + val classpath: Array[File] = + ((i.si.allJars :+ p.classesDir) ++ p.outputJar).map(_.getAbsoluteFile) val loader = ClasspathUtilities.makeLoader(classpath, i.si, directory) - val main = p.getMainMethod(mainClassName, loader) - p.invokeMain(loader, main, params) - case _ => - throw new TestFailed("Found more than one main class.") + try { + val main = p.getMainMethod(mainClassName, loader) + p.invokeMain(loader, main, params) + } finally { + loader match { + case f: ClasspathFilter => f.close() + } + } + case Seq() => + throw new TestFailed(s"Did not find any main class") + case s => + throw new TestFailed(s"Found more than one main class: $s") } }, noArgs("package") { case (p, i) => p.packageBin(i) }, @@ -212,8 +224,10 @@ case class ProjectStructure( baseDirectory: File, scriptedLog: ManagedLogger, lookupProject: String => ProjectStructure, - scalaVersion: String + scalaVersion: String, + compileToJar: Boolean ) extends BridgeProviderSpecification { + val compiler = new IncrementalCompilerImpl val maxErrors = 100 class PerClasspathEntryLookupImpl( @@ -227,6 +241,7 @@ case class ProjectStructure( } val targetDir = baseDirectory / "target" val classesDir = targetDir / "classes" + val outputJar = if (compileToJar) Some(classesDir / "output.jar") else None val generatedClassFiles = classesDir ** "*.class" val scalaSourceDirectory = baseDirectory / "src" / "main" / "scala" val javaSourceDirectory = baseDirectory / "src" / "main" / "java" @@ -261,7 +276,9 @@ case class ProjectStructure( f1 orElse { case _ => None } } def dependsOnRef: Vector[ProjectStructure] = dependsOn map { lookupProject(_) } - def internalClasspath: Vector[File] = dependsOnRef map { _.classesDir } + def internalClasspath: Vector[File] = dependsOnRef flatMap { proj => + Vector(proj.classesDir) ++ proj.outputJar + } def checkSame(i: IncInstance): Unit = cachedStore.get.toOption match { @@ -329,10 +346,21 @@ case class ProjectStructure( def checkProducts(i: IncInstance, src: String, expected: List[String]): Unit = { val analysis = compile(i) + + def isWindows: Boolean = sys.props("os.name").toLowerCase.startsWith("win") def relativeClassDir(f: File): File = f.relativeTo(classesDir) getOrElse f + def normalizePath(path: String): String = { + if (isWindows) path.replace('\\', '/') else path + } def products(srcFile: String): Set[String] = { val productFiles = analysis.relations.products(baseDirectory / srcFile) - productFiles.map(relativeClassDir).map(_.getPath) + productFiles.map { file => + if (JarUtils.isClassInJar(file)) { + JarUtils.ClassInJar.fromFile(file).toClassFilePath + } else { + normalizePath(relativeClassDir(file).getPath) + } + } } def assertClasses(expected: Set[String], actual: Set[String]) = assert(expected == actual, s"Expected $expected products, got $actual") @@ -342,9 +370,12 @@ case class ProjectStructure( } def checkNoGeneratedClassFiles(): Unit = { - val allClassFiles = generatedClassFiles.get.mkString("\n\t") - if (!allClassFiles.isEmpty) - sys.error(s"Classes existed:\n\t$allClassFiles") + val allPlainClassFiles = generatedClassFiles.get.map(_.toString) + val allClassesInJar = outputJar.toSeq.filter(_.exists()).flatMap(JarUtils.listClassFiles) + if (allPlainClassFiles.nonEmpty || allClassesInJar.nonEmpty) { + val allClassFiles = allPlainClassFiles ++ allClassesInJar + sys.error(s"Classes existed:\n\t${allClassFiles.mkString("\n\t")}") + } } def checkDependencies(i: IncInstance, className: String, expected: List[String]): Unit = { @@ -392,11 +423,11 @@ case class ProjectStructure( extra ) - val classpath = - (i.si.allJars.toList ++ (unmanagedJars :+ classesDir) ++ internalClasspath).toArray + val output = outputJar.getOrElse(classesDir) + val classpath = (i.si.allJars.toList ++ (unmanagedJars :+ output) ++ internalClasspath).toArray val in = compiler.inputs(classpath, sources.toArray, - classesDir, + output, scalacOptions, Array(), maxErrors, @@ -404,7 +435,8 @@ case class ProjectStructure( CompileOrder.Mixed, cs, setup, - previousResult) + previousResult, + Optional.empty()) val result = compiler.compile(in, scriptedLog) val analysis = result.analysis match { case a: Analysis => a } cachedStore.set(AnalysisContents.create(analysis, result.setup)) @@ -414,17 +446,22 @@ case class ProjectStructure( def packageBin(i: IncInstance): Unit = { compile(i) - val jar = targetDir / s"$name.jar" - val manifest = new Manifest - val sources = - (classesDir ** -DirectoryFilter).get flatMap { - case x => - IO.relativize(classesDir, x) match { - case Some(path) => List((x, path)) - case _ => Nil + val targetJar = targetDir / s"$name.jar" + outputJar match { + case Some(currentJar) => + IO.copy(Seq(currentJar -> targetJar)) + () + case None => + val manifest = new Manifest + val sources = + (classesDir ** -DirectoryFilter).get flatMap { x => + IO.relativize(classesDir, x) match { + case Some(path) => List((x, path)) + case _ => Nil + } } - } - IO.jar(sources, jar, manifest) + IO.jar(sources, targetJar, manifest) + } } def unrecognizedArguments(commandName: String, args: List[String]): Unit = diff --git a/internal/zinc-scripted/src/test/scala/sbt/internal/inc/IncScriptedRunner.scala b/internal/zinc-scripted/src/test/scala/sbt/internal/inc/IncScriptedRunner.scala index 0f5cca876b..1bc774acce 100644 --- a/internal/zinc-scripted/src/test/scala/sbt/internal/inc/IncScriptedRunner.scala +++ b/internal/zinc-scripted/src/test/scala/sbt/internal/inc/IncScriptedRunner.scala @@ -9,10 +9,15 @@ import sbt.util.Logger import scala.collection.parallel.ParSeq class IncScriptedRunner { - def run(resourceBaseDirectory: File, bufferLog: Boolean, tests: Array[String]): Unit = { + def run( + resourceBaseDirectory: File, + bufferLog: Boolean, + compileToJar: Boolean, + tests: Array[String] + ): Unit = { IO.withTemporaryDirectory { tempDir => // Create a global temporary directory to store the bridge et al - val handlers = new IncScriptedHandlers(tempDir) + val handlers = new IncScriptedHandlers(tempDir, compileToJar) ScriptedRunnerImpl.run(resourceBaseDirectory, bufferLog, tests, handlers, 4) } } diff --git a/internal/zinc-scripted/src/test/scala/sbt/internal/inc/ScriptedHandlers.scala b/internal/zinc-scripted/src/test/scala/sbt/internal/inc/ScriptedHandlers.scala index 6dfad9cf9f..f09ab57b62 100644 --- a/internal/zinc-scripted/src/test/scala/sbt/internal/inc/ScriptedHandlers.scala +++ b/internal/zinc-scripted/src/test/scala/sbt/internal/inc/ScriptedHandlers.scala @@ -16,7 +16,7 @@ class SleepingHandler(val handler: StatementHandler, delay: Long) extends Statem override def finish(state: State) = handler.finish(state) } -class IncScriptedHandlers(globalCacheDir: File) extends HandlersProvider { +class IncScriptedHandlers(globalCacheDir: File, compileToJar: Boolean) extends HandlersProvider { def getHandlers(config: ScriptConfig): Map[Char, StatementHandler] = Map( '$' -> new SleepingHandler(new ZincFileCommands(config.testDirectory()), 500), '#' -> CommentHandler, @@ -26,7 +26,7 @@ class IncScriptedHandlers(globalCacheDir: File) extends HandlersProvider { case x: ManagedLogger => x case _ => sys.error("Expected ManagedLogger") } - new IncHandler(config.testDirectory(), globalCacheDir, logger) + new IncHandler(config.testDirectory(), globalCacheDir, logger, compileToJar) } ) } diff --git a/internal/zinc-scripted/src/test/scala/sbt/internal/inc/ZincFileCommands.scala b/internal/zinc-scripted/src/test/scala/sbt/internal/inc/ZincFileCommands.scala index b1158d2a13..2088e0d31c 100644 --- a/internal/zinc-scripted/src/test/scala/sbt/internal/inc/ZincFileCommands.scala +++ b/internal/zinc-scripted/src/test/scala/sbt/internal/inc/ZincFileCommands.scala @@ -1,18 +1,80 @@ package sbt.internal.inc import java.io.File +import java.nio.file.Paths import sbt.internal.scripted.FileCommands +import sbt.io.IO class ZincFileCommands(baseDirectory: File) extends FileCommands(baseDirectory) { override def commandMap: Map[String, List[String] => Unit] = { super.commandMap + { "pause" noArg { // Redefine pause not to use `System.console`, which is too restrictive - println(s"Pausing in $baseDirectory. Press enter to continue.") + println(s"Pausing in $baseDirectory (press enter to continue)") scala.io.StdIn.readLine() println("Restarting the execution.") } } } + + override def absent(paths: List[String]): Unit = { + val present = paths.filter(exists) + if (present.nonEmpty) + scriptError("File(s) existed: " + present.mkString("[ ", " , ", " ]")) + } + + override def newer(a: String, b: String): Unit = { + val isNewer = exists(a) && (!exists(b) || getModifiedTimeOrZero(a) > getModifiedTimeOrZero(b)) + if (!isNewer) { + scriptError(s"$a is not newer than $b") + } + } + + override def exists(paths: List[String]): Unit = { + val notPresent = paths.filterNot(exists) + if (notPresent.nonEmpty) { + scriptError("File(s) did not exist: " + notPresent.mkString("[ ", " , ", " ]")) + } + } + + private def exists(path: String): Boolean = { + pathFold(path)(_.exists(), JarUtils.exists)(_ || _) + } + + private def getModifiedTimeOrZero(path: String): Long = { + pathFold(path)(IO.getModifiedTimeOrZero, JarUtils.readModifiedTime)(_ max _) + } + + /** + * Folds over representations of path (analogously to Either#fold). + * Path can be a path to regular file or a jared class. + * + * This method is pretty hacky but makes scripted tests work with + * Straight to Jar compilation and without changing assertions there. + * The path will always point to actual file, but this method will + * attempt to run a function for both plain file and jared class + * and then decide which result is relevant with `combine` function. + * + * As an example, it will convert path "target/classes/C.class" to + * "/tmp/sbt_535fddcd/target/classes/a/b/c/C.class" + * as well as to + * "/tmp/sbt_535fddcd/target/classes/output.jar!a/b/c/C.class" + * and run functions on them, e.g. checking if one of those files exists. + */ + private def pathFold[A](path: String)( + transformPlain: File => A, + transformJared: JarUtils.ClassInJar => A + )(combine: (A, A) => A): A = { + val jaredRes = { + val relBasePath = "target/classes" + IO.relativize(new File(relBasePath), new File(path)).map { relClass => + val jar = Paths.get(baseDirectory.toString, relBasePath, "output.jar").toFile + transformJared(JarUtils.ClassInJar(jar, relClass)) + } + } + val regularRes = transformPlain(fromString(path)) + jaredRes.map(combine(_, regularRes)).getOrElse(regularRes) + } + } diff --git a/project/Scripted.scala b/project/Scripted.scala index fb16162c05..808934bb87 100644 --- a/project/Scripted.scala +++ b/project/Scripted.scala @@ -15,6 +15,7 @@ object Scripted { "Saves you some time when only your test has changed") val scriptedSource = settingKey[File]("") val scriptedPrescripted = taskKey[File => Unit]("") + val scriptedCompileToJar = settingKey[Boolean]("Compile directly to jar in scripted tests") import sbt.complete._ import DefaultParsers._ @@ -65,7 +66,7 @@ object Scripted { // Interface to cross class loader type IncScriptedRunner = { - def run(resourceBaseDirectory: File, bufferLog: Boolean, tests: Array[String]): Unit + def run(resourceBaseDirectory: File, bufferLog: Boolean, compileToJar: Boolean, tests: Array[String]): Unit } def doScripted(scriptedSbtClasspath: Seq[Attributed[File]], @@ -73,6 +74,7 @@ object Scripted { sourcePath: File, args: Seq[String], bufferLog: Boolean, + compileToJar: Boolean, prescripted: File => Unit): Unit = { System.err.println(s"About to run tests: ${args.mkString("\n * ", "\n * ", "\n")}") // Force Log4J to not use a thread context classloader otherwise it throws a CCE @@ -83,7 +85,7 @@ object Scripted { val bridge = bridgeClass.newInstance.asInstanceOf[IncScriptedRunner] // val launcherVmOptions = Array("-XX:MaxPermSize=256M") // increased after a failure in scripted source-dependencies/macro try { - bridge.run(sourcePath, bufferLog, args.toArray) + bridge.run(sourcePath, bufferLog, compileToJar, args.toArray) } catch { case ite: java.lang.reflect.InvocationTargetException => throw ite.getCause } } } diff --git a/zinc/src/main/scala/sbt/internal/inc/CompileConfiguration.scala b/zinc/src/main/scala/sbt/internal/inc/CompileConfiguration.scala index 2a02dc3f7c..23a8df5663 100644 --- a/zinc/src/main/scala/sbt/internal/inc/CompileConfiguration.scala +++ b/zinc/src/main/scala/sbt/internal/inc/CompileConfiguration.scala @@ -49,5 +49,6 @@ final class CompileConfiguration( val compiler: xsbti.compile.ScalaCompiler, val javac: xsbti.compile.JavaCompiler, val cache: GlobalsCache, - val incOptions: IncOptions + val incOptions: IncOptions, + val outputJarContent: JarUtils.OutputJarContent ) diff --git a/zinc/src/main/scala/sbt/internal/inc/IncrementalCompilerImpl.scala b/zinc/src/main/scala/sbt/internal/inc/IncrementalCompilerImpl.scala index 3902d5203a..6d173ec052 100644 --- a/zinc/src/main/scala/sbt/internal/inc/IncrementalCompilerImpl.scala +++ b/zinc/src/main/scala/sbt/internal/inc/IncrementalCompilerImpl.scala @@ -65,6 +65,7 @@ class IncrementalCompilerImpl extends IncrementalCompiler { order, skip, incrementalCompilerOptions, + temporaryClassesDirectory.toOption, extraOptions )(logger) } @@ -96,6 +97,8 @@ class IncrementalCompilerImpl extends IncrementalCompiler { * the current compilation progress. * @param incrementalOptions An Instance of [[IncOptions]] that configures * the incremental compiler behaviour. + * @param temporaryClassesDirectory A directory where incremental compiler + * will put temporary class files or jars. * @param extra An array of sbt tuples with extra options. * @param logger An instance of [[Logger]] that logs Zinc output. * @return An instance of [[xsbti.compile.CompileResult]] that holds @@ -121,8 +124,9 @@ class IncrementalCompilerImpl extends IncrementalCompiler { skip: java.lang.Boolean, progress: Optional[xsbti.compile.CompileProgress], incrementalOptions: xsbti.compile.IncOptions, + temporaryClassesDirectory: Optional[File], extra: Array[xsbti.T2[String, String]], - logger: xsbti.Logger + logger: xsbti.Logger, ) = { val extraInScala = extra.toList.map(_.toScalaTuple) compileIncrementally( @@ -142,6 +146,7 @@ class IncrementalCompilerImpl extends IncrementalCompiler { compileOrder, skip: Boolean, incrementalOptions, + temporaryClassesDirectory.toOption, extraInScala )(logger) } @@ -176,7 +181,7 @@ class IncrementalCompilerImpl extends IncrementalCompiler { val msg = s"""## Exception when compiling $numberSources to $outputString - |${e.getMessage} + |${e.toString} |${ex.getStackTrace.mkString("\n")} """ logger.error(InterfaceUtil.toSupplier(msg.stripMargin)) @@ -233,13 +238,33 @@ class IncrementalCompilerImpl extends IncrementalCompiler { compileOrder: CompileOrder = Mixed, skip: Boolean = false, incrementalOptions: IncOptions, - extra: List[(String, String)] + temporaryClassesDirectory: Option[File], + extra: List[(String, String)], )(implicit logger: Logger): CompileResult = { handleCompilationError(sources, output, logger) { val prev = previousAnalysis match { case Some(previous) => previous case None => Analysis.empty } + + val compileStraightToJar = JarUtils.isCompilingToJar(output) + + // otherwise jars on classpath will not be closed, especially prev jar. + if (compileStraightToJar) sys.props.put("scala.classpath.closeZip", "true") + + val extraScalacOptions = { + val scalaVersion = scalaCompiler.scalaInstance.version + if (compileStraightToJar && scalaVersion.startsWith("2.12")) { + JarUtils.scalacOptions + } else Seq.empty + } + + val extraJavacOptions = if (compileStraightToJar) { + JarUtils.javacOptions + } else Seq.empty + + val outputJarContent = JarUtils.createOutputJarContent(output) + val config = MixedAnalyzingCompiler.makeConfig( scalaCompiler, javaCompiler, @@ -248,8 +273,8 @@ class IncrementalCompilerImpl extends IncrementalCompiler { output, cache, progress, - scalaOptions, - javaOptions, + scalaOptions ++ extraScalacOptions, + javaOptions ++ extraJavacOptions, prev, previousSetup, perClasspathEntryLookup, @@ -257,10 +282,12 @@ class IncrementalCompilerImpl extends IncrementalCompiler { compileOrder, skip, incrementalOptions, + outputJarContent, extra ) if (skip) CompileResult.of(prev, config.currentSetup, false) else { + JarUtils.setupTempClassesDir(temporaryClassesDirectory) val (analysis, changed) = compileInternal( MixedAnalyzingCompiler(config)(logger), equivCompileSetup( @@ -295,9 +322,9 @@ class IncrementalCompilerImpl extends IncrementalCompiler { previousAnalysis else if (!equivPairs.equiv(previous.extra, currentSetup.extra)) Analysis.empty - else Incremental.prune(srcsSet, previousAnalysis) + else Incremental.prune(srcsSet, previousAnalysis, output, outputJarContent) case None => - Incremental.prune(srcsSet, previousAnalysis) + Incremental.prune(srcsSet, previousAnalysis, output, outputJarContent) } // Run the incremental compilation @@ -308,7 +335,8 @@ class IncrementalCompilerImpl extends IncrementalCompiler { analysis, output, log, - incOptions + incOptions, + outputJarContent ) compile.swap } @@ -347,7 +375,8 @@ class IncrementalCompilerImpl extends IncrementalCompiler { order: CompileOrder, compilers: Compilers, setup: Setup, - pr: PreviousResult + pr: PreviousResult, + temporaryClassesDirectory: Optional[File] ): Inputs = { val compileOptions = { CompileOptions.of( @@ -358,7 +387,8 @@ class IncrementalCompilerImpl extends IncrementalCompiler { javacOptions, maxErrors, foldMappers(sourcePositionMappers), - order + order, + temporaryClassesDirectory ) } inputs(compileOptions, compilers, setup, pr) diff --git a/zinc/src/main/scala/sbt/internal/inc/MixedAnalyzingCompiler.scala b/zinc/src/main/scala/sbt/internal/inc/MixedAnalyzingCompiler.scala index dff1f8823f..03a1482d32 100644 --- a/zinc/src/main/scala/sbt/internal/inc/MixedAnalyzingCompiler.scala +++ b/zinc/src/main/scala/sbt/internal/inc/MixedAnalyzingCompiler.scala @@ -10,17 +10,17 @@ package internal package inc import java.io.File -import java.lang.ref.{ Reference, SoftReference } +import java.lang.ref.{ SoftReference, Reference } import java.util.Optional -import inc.javac.AnalyzingJavaCompiler import xsbti.{ Reporter, AnalysisCallback => XAnalysisCallback } import xsbti.compile.CompileOrder._ import xsbti.compile._ -import sbt.io.IO +import sbt.io.{ IO, DirectoryFilter } import sbt.util.{ InterfaceUtil, Logger } import sbt.internal.inc.JavaInterfaceUtil.EnrichOption import sbt.internal.inc.caching.ClasspathCache +import sbt.internal.inc.javac.AnalyzingJavaCompiler import xsbti.compile.{ ClassFileManager => XClassFileManager } /** An instance of an analyzing compiler that can run both javac + scalac. */ @@ -28,17 +28,15 @@ final class MixedAnalyzingCompiler( val scalac: xsbti.compile.ScalaCompiler, val javac: AnalyzingJavaCompiler, val config: CompileConfiguration, - val log: Logger + val log: Logger, + outputJarContent: JarUtils.OutputJarContent ) { - import config._ - import currentSetup._ - - private[this] val absClasspath = classpath.map(_.getAbsoluteFile) + private[this] val absClasspath = toAbsolute(config.classpath) /** Mechanism to work with compiler arguments. */ private[this] val cArgs = - new CompilerArguments(compiler.scalaInstance, compiler.classpathOptions) + new CompilerArguments(config.compiler.scalaInstance, config.compiler.classpathOptions) /** * Compiles the given Java/Scala files. @@ -54,33 +52,40 @@ final class MixedAnalyzingCompiler( callback: XAnalysisCallback, classfileManager: XClassFileManager ): Unit = { + val output = config.currentSetup.output val outputDirs = outputDirectories(output) outputDirs.foreach { d => - if (!d.getPath.endsWith(".jar")) + if (d.getName.endsWith(".jar")) + IO.createDirectory(d.getParentFile) + else { IO.createDirectory(d) + } } - val incSrc = sources.filter(include) + val incSrc = config.sources.filter(include) val (javaSrcs, scalaSrcs) = incSrc.partition(javaOnly) logInputs(log, javaSrcs.size, scalaSrcs.size, outputDirs) /** Compile Scala sources. */ def compileScala(): Unit = if (scalaSrcs.nonEmpty) { - val sources = if (order == Mixed) incSrc else scalaSrcs - val arguments = cArgs(Nil, absClasspath, None, options.scalacOptions) - timed("Scala compilation", log) { - compiler.compile( - sources.toArray, - changes, - arguments.toArray, - output, - callback, - reporter, - config.cache, - log, - progress.toOptional - ) + JarUtils.withPreviousJar(output) { extraClasspath => + val sources = if (config.currentSetup.order == Mixed) incSrc else scalaSrcs + val cp = toAbsolute(extraClasspath) ++ absClasspath + val arguments = cArgs(Nil, cp, None, config.currentSetup.options.scalacOptions) + timed("Scala compilation", log) { + config.compiler.compile( + sources.toArray, + changes, + arguments.toArray, + output, + callback, + config.reporter, + config.cache, + log, + config.progress.toOptional + ) + } } } @@ -91,26 +96,42 @@ final class MixedAnalyzingCompiler( val incToolOptions = IncToolOptions.of( Optional.of(classfileManager), - incOptions.useCustomizedFileManager() + config.incOptions.useCustomizedFileManager() ) - val joptions = options.javacOptions().toArray[String] - javac.compile( - javaSrcs, - joptions, - output, - callback, - incToolOptions, - reporter, - log, - progress - ) + val joptions = config.currentSetup.options.javacOptions + + JarUtils.getOutputJar(output) match { + case Some(outputJar) => + val outputDir = JarUtils.javacTempOutput(outputJar) + IO.createDirectory(outputDir) + javac.compile(javaSrcs, + joptions, + CompileOutput(outputDir), + Some(outputJar), + callback, + incToolOptions, + config.reporter, + log, + config.progress) + putJavacOutputInJar(outputJar, outputDir) + case None => + javac.compile(javaSrcs, + joptions, + output, + finalJarOutput = None, + callback, + incToolOptions, + config.reporter, + log, + config.progress) + } } } } /* `Mixed` order defaults to `ScalaThenJava` behaviour. * See https://github.com/sbt/zinc/issues/234. */ - if (order == JavaThenScala) { + if (config.currentSetup.order == JavaThenScala) { compileJava(); compileScala() } else { compileScala(); compileJava() @@ -120,6 +141,27 @@ final class MixedAnalyzingCompiler( log.info("Done compiling.") } + private def putJavacOutputInJar(outputJar: File, outputDir: File): Unit = { + import sbt.io.syntax._ + val compiledClasses = (outputDir ** -DirectoryFilter).get.flatMap { classFile => + IO.relativize(outputDir, classFile) match { + case Some(relPath) => + List((classFile, relPath)) + case _ => Nil + } + } + + if (compiledClasses.nonEmpty) { + JarUtils.includeInJar(outputJar, compiledClasses) + outputJarContent.addClasses(compiledClasses.map(_._2).toSet) + } + IO.delete(outputDir) + } + + private def toAbsolute(extraClasspath: Seq[File]) = { + extraClasspath.map(_.getAbsoluteFile) + } + private[this] def outputDirectories(output: Output): Seq[File] = { output match { case single: SingleOutput => List(single.getOutputDirectory) @@ -180,6 +222,7 @@ object MixedAnalyzingCompiler { compileOrder: CompileOrder = Mixed, skip: Boolean = false, incrementalCompilerOptions: IncOptions, + outputJarContent: JarUtils.OutputJarContent, extra: List[(String, String)] ): CompileConfiguration = { val lookup = incrementalCompilerOptions.externalHooks().getExternalLookup @@ -218,7 +261,8 @@ object MixedAnalyzingCompiler { reporter, skip, cache, - incrementalCompilerOptions + incrementalCompilerOptions, + outputJarContent ) } @@ -235,7 +279,8 @@ object MixedAnalyzingCompiler { reporter: Reporter, skip: Boolean, cache: GlobalsCache, - incrementalCompilerOptions: IncOptions + incrementalCompilerOptions: IncOptions, + outputJarContent: JarUtils.OutputJarContent ): CompileConfiguration = { new CompileConfiguration( sources, @@ -249,7 +294,8 @@ object MixedAnalyzingCompiler { compiler, javac, cache, - incrementalCompilerOptions + incrementalCompilerOptions, + outputJarContent ) } @@ -259,13 +305,19 @@ object MixedAnalyzingCompiler { ): (Seq[File], String => Option[File]) = { import config._ import currentSetup._ + // If we are compiling straight to jar, as javac does not support this, + // it will be compiled to a temporary directory (with deterministic name) + // and then added to the final jar. This temporary directory has to be + // available for sbt.internal.inc.classfile.Analyze to work correctly. + val tempJavacOutput = + JarUtils.getOutputJar(currentSetup.output).map(JarUtils.javacTempOutput).toSeq val absClasspath = classpath.map(_.getAbsoluteFile) val cArgs = new CompilerArguments(compiler.scalaInstance, compiler.classpathOptions) val searchClasspath = explicitBootClasspath(options.scalacOptions) ++ withBootclasspath( cArgs, absClasspath - ) + ) ++ tempJavacOutput (searchClasspath, Locate.entry(searchClasspath, perClasspathEntryLookup)) } @@ -291,7 +343,8 @@ object MixedAnalyzingCompiler { searchClasspath ), config, - log + log, + outputJarContent ) } diff --git a/zinc/src/main/scala/sbt/internal/inc/javac/AnalyzingJavaCompiler.scala b/zinc/src/main/scala/sbt/internal/inc/javac/AnalyzingJavaCompiler.scala index ef999e9030..9ed29687e2 100644 --- a/zinc/src/main/scala/sbt/internal/inc/javac/AnalyzingJavaCompiler.scala +++ b/zinc/src/main/scala/sbt/internal/inc/javac/AnalyzingJavaCompiler.scala @@ -11,6 +11,7 @@ package inc package javac import java.io.File +import java.net.URLClassLoader import sbt.internal.inc.classfile.Analyze import sbt.internal.inc.classpath.ClasspathUtilities @@ -46,12 +47,35 @@ final class AnalyzingJavaCompiler private[sbt] ( val searchClasspath: Seq[File] ) extends JavaCompiler { + // for compatibility + def compile( + sources: Seq[File], + options: Seq[String], + output: Output, + callback: AnalysisCallback, + incToolOptions: IncToolOptions, + reporter: XReporter, + log: XLogger, + progressOpt: Option[CompileProgress] + ): Unit = { + compile(sources, + options, + output, + finalJarOutput = None, + callback, + incToolOptions, + reporter, + log, + progressOpt) + } + /** * Compile some java code using the current configured compiler. * * @param sources The sources to compile * @param options The options for the Java compiler * @param output The output configuration for this compiler + * @param finalJarOutput The output that will be used for straight to jar compilation. * @param callback A callback to report discovered source/binary dependencies on. * @param incToolOptions The component that manages generated class files. * @param reporter A reporter where semantic compiler failures can be reported. @@ -63,6 +87,7 @@ final class AnalyzingJavaCompiler private[sbt] ( sources: Seq[File], options: Seq[String], output: Output, + finalJarOutput: Option[File], callback: AnalysisCallback, incToolOptions: IncToolOptions, reporter: XReporter, @@ -107,9 +132,6 @@ final class AnalyzingJavaCompiler private[sbt] ( (classesFinder, classesFinder.get, srcs) } - // Construct class loader to analyze dependencies of generated class files - val loader = ClasspathUtilities.toLoader(searchClasspath) - // Record progress for java compilation val javaCompilationPhase = "Java compilation" progressOpt.map { progress => @@ -155,13 +177,25 @@ final class AnalyzingJavaCompiler private[sbt] ( progress.advance(1, 2) } + // Construct class loader to analyze dependencies of generated class files + val loader = ClasspathUtilities.toLoader(searchClasspath) + timed(javaAnalysisPhase, log) { for ((classesFinder, oldClasses, srcs) <- memo) { val newClasses = Set(classesFinder.get: _*) -- oldClasses - Analyze(newClasses.toSeq, srcs, log)(callback, loader, readAPI) + Analyze(newClasses.toSeq, srcs, log, output, finalJarOutput)(callback, loader, readAPI) } } + // After using the classloader it should be closed. Otherwise it will keep the accessed + // jars open. Especially, when zinc is compiling directly to jar, that jar will be locked + // not allowing to change it in further compilation cycles (on Windows). + // This also affects jars in the classpath that come from dependency resolution. + loader match { + case u: URLClassLoader => u.close() + case _ => () + } + // Report that we reached the end progressOpt.foreach { progress => progress.advance(2, 2) diff --git a/zinc/src/sbt-test/source-dependencies/anon-class-java-depends-on-scala/JJ.java b/zinc/src/sbt-test/source-dependencies/anon-class-java-depends-on-scala/JJ.java index 3b9d25102f..4792932ecf 100644 --- a/zinc/src/sbt-test/source-dependencies/anon-class-java-depends-on-scala/JJ.java +++ b/zinc/src/sbt-test/source-dependencies/anon-class-java-depends-on-scala/JJ.java @@ -1,11 +1,11 @@ public class JJ { - public static void main(String[] args) { - // Declare anonymous class depending on Scala class - S s = new S() { - public void foo(String s) { - System.out.println(s); - } - }; - s.foo("ahoy"); - } + public static void main(String[] args) { + // Declare anonymous class depending on Scala class + S s = new S() { + public void foo(String s) { + System.out.println(s); + } + }; + s.foo("ahoy"); + } } diff --git a/zinc/src/sbt-test/source-dependencies/default-arguments-separate-compilation/incOptions.properties b/zinc/src/sbt-test/source-dependencies/default-arguments-separate-compilation/incOptions.properties index 17f05bf462..adfc92c361 100644 --- a/zinc/src/sbt-test/source-dependencies/default-arguments-separate-compilation/incOptions.properties +++ b/zinc/src/sbt-test/source-dependencies/default-arguments-separate-compilation/incOptions.properties @@ -1,2 +1 @@ -apiDebug = true relationsDebug = true diff --git a/zinc/src/test/scala/sbt/inc/BaseCompilerSpec.scala b/zinc/src/test/scala/sbt/inc/BaseCompilerSpec.scala index e291cf320f..83d6ec42b2 100644 --- a/zinc/src/test/scala/sbt/inc/BaseCompilerSpec.scala +++ b/zinc/src/test/scala/sbt/inc/BaseCompilerSpec.scala @@ -180,7 +180,8 @@ class BaseCompilerSpec extends BridgeProviderSpecification { CompileOrder.Mixed, cs, setup, - prev) + prev, + Optional.empty()) def doCompile(newInputs: Inputs => Inputs = identity): CompileResult = { lastCompiledUnits = Set.empty diff --git a/zinc/src/test/scala/sbt/inc/MultiProjectIncrementalSpec.scala b/zinc/src/test/scala/sbt/inc/MultiProjectIncrementalSpec.scala index 794b67c520..a9258e2575 100644 --- a/zinc/src/test/scala/sbt/inc/MultiProjectIncrementalSpec.scala +++ b/zinc/src/test/scala/sbt/inc/MultiProjectIncrementalSpec.scala @@ -79,7 +79,8 @@ class MultiProjectIncrementalSpec extends BridgeProviderSpecification { CompileOrder.Mixed, cs, setup, - prev0) + prev0, + Optional.empty()) // This registers `test.pkg.Ext1` as the class name on the binary stamp val result0 = compiler.compile(in, log) val contents = AnalysisContents.create(result0.analysis(), result0.setup()) @@ -100,7 +101,8 @@ class MultiProjectIncrementalSpec extends BridgeProviderSpecification { CompileOrder.Mixed, cs, setup, - prev1) + prev1, + Optional.empty()) // This registers `test.pkg.Ext2` as the class name on the binary stamp, // which means `test.pkg.Ext1` is no longer in the stamp. val result1 = compiler.compile(in1, log) @@ -137,7 +139,8 @@ class MultiProjectIncrementalSpec extends BridgeProviderSpecification { CompileOrder.Mixed, cs, setup2, - emptyPrev) + emptyPrev, + Optional.empty()) val result2 = compiler.compile(in2, log) fileStore2.set(AnalysisContents.create(result2.analysis(), result2.setup())) @@ -176,7 +179,8 @@ class MultiProjectIncrementalSpec extends BridgeProviderSpecification { CompileOrder.Mixed, cs, setup3, - prev) + prev, + Optional.empty()) val result3 = compiler.compile(in3, log) val a3 = result3.analysis match { case a: Analysis => a } fileStore.set(AnalysisContents.create(a3, result3.setup)) diff --git a/zinc/src/test/scala/sbt/inc/cached/CachedHashingSpec.scala b/zinc/src/test/scala/sbt/inc/cached/CachedHashingSpec.scala index f43eae172d..1a2667a59d 100644 --- a/zinc/src/test/scala/sbt/inc/cached/CachedHashingSpec.scala +++ b/zinc/src/test/scala/sbt/inc/cached/CachedHashingSpec.scala @@ -9,8 +9,9 @@ package sbt.inc.cached import java.nio.file.{ Path, Paths } import java.io.File + import sbt.inc.{ BaseCompilerSpec, SourceFiles } -import sbt.internal.inc.{ Analysis, CompileOutput, MixedAnalyzingCompiler } +import sbt.internal.inc.{ CompileOutput, Analysis, MixedAnalyzingCompiler, JarUtils } import sbt.io.IO class CachedHashingSpec extends BaseCompilerSpec { @@ -35,13 +36,14 @@ class CachedHashingSpec extends BaseCompilerSpec { val javac = compilers.javaTools.javac val scalac = compilers.scalac val giganticClasspath = file(sys.props("user.home"))./(".ivy2").**("*.jar").get.take(500) + val output = CompileOutput(options.classesDirectory) def genConfig = MixedAnalyzingCompiler.makeConfig( scalac, javac, options.sources, giganticClasspath, - CompileOutput(options.classesDirectory), + output, setup.cache, setup.progress.toOption, options.scalacOptions, @@ -53,6 +55,7 @@ class CachedHashingSpec extends BaseCompilerSpec { options.order, setup.skip, setup.incrementalCompilerOptions, + JarUtils.createOutputJarContent(output), setup.extra.toList.map(_.toScalaTuple) ) @@ -77,13 +80,14 @@ class CachedHashingSpec extends BaseCompilerSpec { val javac = compilers.javaTools.javac val scalac = compilers.scalac val fakeLibraryJar = tempDir / "lib" / "foo.jar" + val output = CompileOutput(options.classesDirectory) def genConfig = MixedAnalyzingCompiler.makeConfig( scalac, javac, options.sources, List(fakeLibraryJar), - CompileOutput(options.classesDirectory), + output, setup.cache, setup.progress.toOption, options.scalacOptions, @@ -95,6 +99,7 @@ class CachedHashingSpec extends BaseCompilerSpec { options.order, setup.skip, setup.incrementalCompilerOptions, + JarUtils.createOutputJarContent(output), setup.extra.toList.map(_.toScalaTuple) )