From a813eb96f4c0524969b00c42424ee053098179a9 Mon Sep 17 00:00:00 2001 From: jvican Date: Fri, 24 Aug 2018 16:24:34 +0200 Subject: [PATCH 1/6] Remove stale `ClassToSourceMapper` functions And inline the actual used function in the companion of `IncrementalCommon`, with a better and simpler implementation. --- .../internal/inc/ClassToSourceMapper.scala | 90 ------------------- .../sbt/internal/inc/IncrementalCommon.scala | 40 ++++++--- 2 files changed, 28 insertions(+), 102 deletions(-) delete mode 100644 internal/zinc-core/src/main/scala/sbt/internal/inc/ClassToSourceMapper.scala diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/ClassToSourceMapper.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/ClassToSourceMapper.scala deleted file mode 100644 index 3e5864572c..0000000000 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/ClassToSourceMapper.scala +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Zinc - The incremental compiler for Scala. - * Copyright 2011 - 2017, Lightbend, Inc. - * Copyright 2008 - 2010, Mark Harrah - * This software is released under the terms written in LICENSE. - */ - -package sbt -package internal -package inc - -import java.io.File - -import sbt.internal.util.Relation -import xsbt.api.APIUtil - -/** - * Maps class-based dependencies to source dependencies using `classes` relation. - * - * The mapping is performed using two relations that track declared classes before - * and after recompilation of sources. This way, we can properly map dependencies - * on classes that have been moved between source files. In such case, a single - * class can be mapped to two different source files. - */ -class ClassToSourceMapper(previousRelations: Relations, recompiledRelations: Relations) { - - def toSrcFile(className: String): Set[File] = { - val srcs = previousRelations.classes.reverse(className) ++ - recompiledRelations.classes.reverse(className) - if (srcs.isEmpty) - sys.error(s"No entry for class $className in classes relation.") - else - srcs - } - - def isDefinedInScalaSrc(className: String): Boolean = { - toSrcFile(className).forall(srcFile => APIUtil.isScalaSourceName(srcFile.getName)) - } - - /** - * Maps both forward and backward parts of passed relation using toSrcFile method. - * - * This method should be used to map internal (within single project) class - * dependencies to source dependencies. - */ - def convertToSrcDependency(classDependency: Relation[String, String]): Relation[File, File] = { - def convertRelationMap(m: Map[String, Set[String]]): Map[File, Set[File]] = { - val pairs = m.toSeq.flatMap { - case (key, values) => - val keySrcs = toSrcFile(key) - val valueSrcs = values.flatMap(toSrcFile) - keySrcs.toSeq.flatMap(keySrc => valueSrcs.toSeq.map(keySrc -> _)) - } - aggregateValues(pairs) - } - val forwardMap = convertRelationMap(classDependency.forwardMap) - val reverseMap = convertRelationMap(classDependency.reverseMap) - Relation.make(forwardMap, reverseMap) - } - - /** - * Converts class dependency into source-class dependency using toSrcFile method. - * - * This method should be used to convert internal class->external class dependencies into - * internal source->external class dependencies. - */ - def convertToExternalSrcDependency( - classDependency: Relation[String, String]): Relation[File, String] = { - def convertMapKeys(m: Map[String, Set[String]]): Map[File, Set[String]] = { - val pairs = m.toSeq.flatMap { - case (key, values) => - val keySrcs = toSrcFile(key) - keySrcs.toSeq.flatMap(keySrc => values.toSeq.map(keySrc -> _)) - } - aggregateValues(pairs) - } - def convertMapValues(m: Map[String, Set[String]]): Map[String, Set[File]] = - m.mapValues(_.flatMap(toSrcFile)) - val forwardMap = convertMapKeys(classDependency.forwardMap) - val reverseMap = convertMapValues(classDependency.reverseMap) - Relation.make(forwardMap, reverseMap) - } - - private def aggregateValues[T, U](s: Seq[(T, U)]): Map[T, Set[U]] = { - s.foldLeft(Map.empty[T, Set[U]].withDefaultValue(Set.empty)) { - case (acc, (k, v)) => acc.updated(k, acc(k) + v) - } - } - -} diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala index 4e70ff987a..4c743ea98b 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala @@ -11,7 +11,8 @@ package inc import java.io.File -import xsbti.api.{ AnalyzedClass, DefinitionType } +import xsbt.api.APIUtil +import xsbti.api.AnalyzedClass import xsbti.compile.{ Changes, CompileAnalysis, @@ -33,8 +34,6 @@ private[inc] abstract class IncrementalCommon(val log: sbt.util.Logger, options: val wrappedLog = new Incremental.PrefixingLogger("[inv] ")(log) def debug(s: => String) = if (options.relationsDebug) wrappedLog.debug(s) else () - // TODO: the Analysis for the last successful compilation should get returned + Boolean indicating success - // TODO: full external name changes, scopeInvalidations @tailrec final def cycle(invalidatedRaw: Set[String], modifiedSrcs: Set[File], allSources: Set[File], @@ -76,14 +75,13 @@ private[inc] abstract class IncrementalCommon(val log: sbt.util.Logger, options: debug("\nChanges:\n" + incChanges) val transitiveStep = options.transitiveStep - val classToSourceMapper = new ClassToSourceMapper(previous.relations, current.relations) val incrementallyInvalidated = invalidateIncremental( current.relations, - current.apis, incChanges, recompiledClasses, cycleNum >= transitiveStep, - classToSourceMapper.isDefinedInScalaSrc) + IncrementalCommon.comesFromScalaSource(previous.relations, Some(current.relations)) + ) val allInvalidated = if (lookup.shouldDoIncrementalCompilation(incrementallyInvalidated, current)) incrementallyInvalidated @@ -260,7 +258,6 @@ private[inc] abstract class IncrementalCommon(val log: sbt.util.Logger, options: } def invalidateIncremental(previous: Relations, - apis: APIs, changes: APIChanges, recompiledClasses: Set[String], transitive: Boolean, @@ -322,11 +319,11 @@ private[inc] abstract class IncrementalCommon(val log: sbt.util.Logger, options: val invalidatedClasses = removedClasses ++ dependentOnRemovedClasses ++ modifiedClasses val byProduct = changes.removedProducts.flatMap(previous.produced) val byBinaryDep = changes.binaryDeps.flatMap(previous.usesLibrary) - val classToSrc = new ClassToSourceMapper(previous, previous) - val byExtSrcDep = { - //changes.external.modified.flatMap(previous.usesExternal) // ++ scopeInvalidations - invalidateByAllExternal(previous, changes.external, classToSrc.isDefinedInScalaSrc) - } + val byExtSrcDep = invalidateByAllExternal( + previous, + changes.external, + IncrementalCommon.comesFromScalaSource(previous) + ) checkAbsolute(addedSrcs.toList) @@ -502,3 +499,22 @@ private[inc] abstract class IncrementalCommon(val log: sbt.util.Logger, options: xs.toSet } } + +object IncrementalCommon { + + /** Tell if given class names comes from a Scala source file or not by inspecting relations. */ + def comesFromScalaSource( + previous: Relations, + current: Option[Relations] = None + )(className: String): Boolean = { + val previousSourcesWithClassName = previous.classes.reverse(className) + val newSourcesWithClassName = current.map(_.classes.reverse(className)).getOrElse(Set.empty) + if (previousSourcesWithClassName.isEmpty && newSourcesWithClassName.isEmpty) + sys.error(s"Fatal Zinc error: no entry for class $className in classes relation.") + else { + // Makes sure that the dependency doesn't possibly come from Java + previousSourcesWithClassName.forall(src => APIUtil.isScalaSourceName(src.getName)) && + newSourcesWithClassName.forall(src => APIUtil.isScalaSourceName(src.getName)) + } + } +} From 396713b4638f439bb3fafaf29cc8cacff4830a39 Mon Sep 17 00:00:00 2001 From: Jorge Vicente Cantero Date: Sat, 25 Aug 2018 22:13:09 +0200 Subject: [PATCH 2/6] Reorganise code in `IncrementalCommon` and document it While I was trying to implement some improvements here, I realize that the current code can hardly be read and reused. I struggled as well to understand the nitty-gritty of the algorithm as the control flow was not clear and the order of functions was confusing. This commit aims to fix this by reorganising the code to allow for more reuse (functions in `IncrementalCommon`'s companion), reorganizes the outline of `IncrementalCommon` to have clearer names and structure and documents the most important parts of the code with implementation and contract details. --- .../scala/sbt/internal/inc/Incremental.scala | 16 +- .../sbt/internal/inc/IncrementalCommon.scala | 921 +++++++++++------- .../internal/inc/IncrementalNameHashing.scala | 86 +- .../main/scala/sbt/internal/inc/Lookup.scala | 5 + 4 files changed, 625 insertions(+), 403 deletions(-) diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala index f07ae95c18..533f032f7e 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala @@ -63,7 +63,7 @@ object Incremental { val previous = previous0 match { case a: Analysis => a } val incremental: IncrementalCommon = new IncrementalNameHashing(log, options) - val initialChanges = incremental.changedInitial(sources, previous, current, lookup) + val initialChanges = incremental.detectInitialChanges(sources, previous, current, lookup) val binaryChanges = new DependencyChanges { val modifiedBinaries = initialChanges.binaryDeps.toArray val modifiedClasses = initialChanges.external.allModified.toArray @@ -118,15 +118,11 @@ object Incremental { private[inc] def apiDebug(options: IncOptions): Boolean = options.apiDebug || java.lang.Boolean.getBoolean(apiDebugProp) - private[sbt] def prune(invalidatedSrcs: Set[File], previous: CompileAnalysis): Analysis = - prune(invalidatedSrcs, previous, ClassFileManager.deleteImmediately) - - private[sbt] def prune(invalidatedSrcs: Set[File], - previous0: CompileAnalysis, - classfileManager: XClassFileManager): Analysis = { - val previous = previous0 match { case a: Analysis => a } - classfileManager.delete(invalidatedSrcs.flatMap(previous.relations.products).toArray) - previous -- invalidatedSrcs + private[sbt] def prune(invalidatedSrcs: Set[File], previous0: CompileAnalysis): Analysis = { + val previous = previous0.asInstanceOf[Analysis] + IncrementalCommon.pruneClassFilesOfInvalidations(invalidatedSrcs, + previous, + ClassFileManager.deleteImmediately) } private[this] def manageClassfiles[T](options: IncOptions)(run: XClassFileManager => T): T = { diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala index 4c743ea98b..c22f43a206 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala @@ -11,321 +11,348 @@ package inc import java.io.File +import sbt.util.Logger import xsbt.api.APIUtil import xsbti.api.AnalyzedClass -import xsbti.compile.{ - Changes, - CompileAnalysis, - DependencyChanges, - IncOptions, - ClassFileManager => XClassFileManager -} +import xsbti.compile.{ DependencyChanges, IncOptions, ClassFileManager => XClassFileManager } import xsbti.compile.analysis.{ ReadStamps, Stamp => XStamp } import scala.annotation.tailrec -private[inc] abstract class IncrementalCommon(val log: sbt.util.Logger, options: IncOptions) { - - // setting the related system property to true will skip checking that the class name - // still comes from the same classpath entry. This can workaround bugs in classpath construction, - // such as the currently problematic -javabootclasspath. This is subject to removal at any time. - private[this] def skipClasspathLookup = java.lang.Boolean.getBoolean("xsbt.skip.cp.lookup") - - val wrappedLog = new Incremental.PrefixingLogger("[inv] ")(log) - def debug(s: => String) = if (options.relationsDebug) wrappedLog.debug(s) else () - - @tailrec final def cycle(invalidatedRaw: Set[String], - modifiedSrcs: Set[File], - allSources: Set[File], - binaryChanges: DependencyChanges, - lookup: ExternalLookup, - previous: Analysis, - doCompile: (Set[File], DependencyChanges) => Analysis, - classfileManager: XClassFileManager, - cycleNum: Int): Analysis = - if (invalidatedRaw.isEmpty && modifiedSrcs.isEmpty) - previous +/** + * Defines the core logic to compile incrementally and apply the class invalidation after + * every compiler run. This class defines only the core logic and the algorithm-specific + * bits are implemented in its subclasses. + * + * In the past, there were several incremental compiler strategies. Now, there's only + * one, the default [[IncrementalNameHashing]] strategy that invalidates classes based + * on name hashes. + * + * @param log An instance of a logger. + * @param options An instance of incremental compiler options. + */ +private[inc] abstract class IncrementalCommon(val log: Logger, options: IncOptions) { + // Work around bugs in classpath handling such as the "currently" problematic -javabootclasspath + private[this] def enableShallowLookup: Boolean = + java.lang.Boolean.getBoolean("xsbt.skip.cp.lookup") + + private[this] final val wrappedLog = new Incremental.PrefixingLogger("[inv] ")(log) + def debug(s: => String): Unit = if (options.relationsDebug) wrappedLog.debug(s) else () + + /** + * Compile a project as many times as it is required incrementally. This logic is the start + * point of the incremental compiler and the place where all the invalidation logic happens. + * + * The current logic does merge the compilation step and the analysis step, by making them + * execute sequentially. There are cases where, for performance reasons, build tools and + * users of Zinc may be interested in separating the two. If this is the case, the user needs + * to reimplement this logic by copy pasting this logic and relying on the utils defined + * in `IncrementalCommon`. + * + * @param invalidatedClasses The invalidated classes either initially or by a previous cycle. + * @param initialChangedSources The initial changed sources by the user, empty if previous cycle. + * @param allSources All the sources defined in the project and compiled in the first iteration. + * @param binaryChanges The initially detected changes derived from [[InitialChanges]]. + * @param lookup The lookup instance to query classpath and analysis information. + * @param previous The last analysis file known of this project. + * @param doCompile A function that compiles a project and returns an analysis file. + * @param classfileManager The manager that takes care of class files in compilation. + * @param cycleNum The counter of incremental compiler cycles. + * @return A fresh analysis file after all the incremental compiles have been run. + */ + @tailrec final def cycle( + invalidatedClasses: Set[String], + initialChangedSources: Set[File], + allSources: Set[File], + binaryChanges: DependencyChanges, + lookup: ExternalLookup, + previous: Analysis, + doCompile: (Set[File], DependencyChanges) => Analysis, + classfileManager: XClassFileManager, + cycleNum: Int + ): Analysis = { + if (invalidatedClasses.isEmpty && initialChangedSources.isEmpty) previous else { - val invalidatedPackageObjects = - this.invalidatedPackageObjects(invalidatedRaw, previous.relations, previous.apis) - if (invalidatedPackageObjects.nonEmpty) - log.debug(s"Invalidated package objects: $invalidatedPackageObjects") - val withPackageObjects = invalidatedRaw ++ invalidatedPackageObjects - val invalidatedClasses = withPackageObjects - - val (current, recompiledRecently) = recompileClasses(invalidatedClasses, - modifiedSrcs, - allSources, - binaryChanges, - previous, - doCompile, - classfileManager) - - // If we recompiled all sources no need to check what is changed since there is nothing more to recompile + // Compute all the invalidated classes by aggregating invalidated package objects + val invalidatedByPackageObjects = + invalidatedPackageObjects(invalidatedClasses, previous.relations, previous.apis) + val classesToRecompile = invalidatedClasses ++ invalidatedByPackageObjects + + // Computes which source files are mapped to the invalidated classes and recompile them + val invalidatedSources = + mapInvalidationsToSources(classesToRecompile, initialChangedSources, allSources, previous) + val (current, recompiledRecently) = + recompileClasses(invalidatedSources, binaryChanges, previous, doCompile, classfileManager) + + // Return immediate analysis as all sources have been recompiled if (recompiledRecently == allSources) current else { - // modifiedSrc have to be mapped to class names both of previous and current analysis because classes might be - // removed (it's handled by `previous`) or added (it's handled by `current`) or renamed (it's handled by both) - val recompiledClasses = invalidatedClasses ++ - modifiedSrcs.flatMap(previous.relations.classNames) ++ modifiedSrcs.flatMap( - current.relations.classNames) - - val incChanges = - changedIncremental(recompiledClasses, previous.apis.internalAPI, current.apis.internalAPI) - - debug("\nChanges:\n" + incChanges) - val transitiveStep = options.transitiveStep - val incrementallyInvalidated = invalidateIncremental( + val recompiledClasses: Set[String] = { + // Represents classes detected as changed externally and internally (by a previous cycle) + classesToRecompile ++ + // Maps the changed sources by the user to class names we can count as invalidated + initialChangedSources.flatMap(previous.relations.classNames) ++ + initialChangedSources.flatMap(current.relations.classNames) + } + + val newApiChanges = + detectAPIChanges(recompiledClasses, previous.apis.internalAPI, current.apis.internalAPI) + debug("\nChanges:\n" + newApiChanges) + val nextInvalidations = invalidateAfterInternalCompilation( current.relations, - incChanges, + newApiChanges, recompiledClasses, - cycleNum >= transitiveStep, + cycleNum >= options.transitiveStep, IncrementalCommon.comesFromScalaSource(previous.relations, Some(current.relations)) ) - val allInvalidated = - if (lookup.shouldDoIncrementalCompilation(incrementallyInvalidated, current)) - incrementallyInvalidated - else Set.empty[String] - - cycle(allInvalidated, - Set.empty, - allSources, - emptyChanges, - lookup, - current, - doCompile, - classfileManager, - cycleNum + 1) + + val continue = lookup.shouldDoIncrementalCompilation(nextInvalidations, current) + cycle( + if (continue) nextInvalidations else Set.empty, + Set.empty, + allSources, + IncrementalCommon.emptyChanges, + lookup, + current, + doCompile, + classfileManager, + cycleNum + 1 + ) } } + } - private[this] def recompileClasses(classes: Set[String], - modifiedSrcs: Set[File], - allSources: Set[File], - binaryChanges: DependencyChanges, - previous: Analysis, - doCompile: (Set[File], DependencyChanges) => Analysis, - classfileManager: XClassFileManager): (Analysis, Set[File]) = { - val invalidatedSources = classes.flatMap(previous.relations.definesClass) ++ modifiedSrcs - val invalidatedSourcesForCompilation = expand(invalidatedSources, allSources) - val pruned = Incremental.prune(invalidatedSourcesForCompilation, previous, classfileManager) - debug("********* Pruned: \n" + pruned.relations + "\n*********") + def mapInvalidationsToSources( + invalidatedClasses: Set[String], + aggregateSources: Set[File], + allSources: Set[File], + previous: Analysis + ): Set[File] = { + def expand(invalidated: Set[File]): Set[File] = { + val recompileAllFraction = options.recompileAllFraction + if (invalidated.size <= allSources.size * recompileAllFraction) invalidated + else { + log.debug( + s"Recompiling all sources: number of invalidated sources > ${recompileAllFraction * 100.00}% of all sources") + allSources ++ invalidated // Union because `all` doesn't contain removed sources + } + } - val fresh = doCompile(invalidatedSourcesForCompilation, binaryChanges) - // For javac as class files are added to classfileManager as they are generated, so - // this step is redundant. For scalac this is still necessary. TODO: do the same for scalac. - classfileManager.generated(fresh.relations.allProducts.toArray) - debug("********* Fresh: \n" + fresh.relations + "\n*********") - val merged = pruned ++ fresh //.copy(relations = pruned.relations ++ fresh.relations, apis = pruned.apis ++ fresh.apis) - debug("********* Merged: \n" + merged.relations + "\n*********") - (merged, invalidatedSourcesForCompilation) + expand(invalidatedClasses.flatMap(previous.relations.definesClass) ++ aggregateSources) } - private[this] def emptyChanges: DependencyChanges = new DependencyChanges { - val modifiedBinaries = new Array[File](0) - val modifiedClasses = new Array[String](0) - def isEmpty = true - } - private[this] def expand(invalidated: Set[File], all: Set[File]): Set[File] = { - val recompileAllFraction = options.recompileAllFraction - if (invalidated.size > all.size * recompileAllFraction) { - log.debug( - "Recompiling all " + all.size + " sources: invalidated sources (" + invalidated.size + ") exceeded " + (recompileAllFraction * 100.0) + "% of all sources") - all ++ invalidated // need the union because all doesn't contain removed sources - } else invalidated - } + def recompileClasses( + sources: Set[File], + binaryChanges: DependencyChanges, + previous: Analysis, + doCompile: (Set[File], DependencyChanges) => Analysis, + classfileManager: XClassFileManager + ): (Analysis, Set[File]) = { + val pruned = + IncrementalCommon.pruneClassFilesOfInvalidations(sources, previous, classfileManager) + debug("********* Pruned: \n" + pruned.relations + "\n*********") + val fresh = doCompile(sources, binaryChanges) + debug("********* Fresh: \n" + fresh.relations + "\n*********") + val merged = pruned ++ fresh - protected def invalidatedPackageObjects(invalidatedClasses: Set[String], - relations: Relations, - apis: APIs): Set[String] + /* This is required for both scala compilation and forked java compilation, despite + * being redundant for the most common Java compilation (using the local compiler). */ + classfileManager.generated(fresh.relations.allProducts.toArray) - /** - * Logs API changes using debug-level logging. The API are obtained using the APIDiff class. - * - * NOTE: This method creates a new APIDiff instance on every invocation. - */ - private def logApiChanges(apiChanges: Iterable[APIChange], - oldAPIMapping: String => AnalyzedClass, - newAPIMapping: String => AnalyzedClass): Unit = { - val contextSize = options.apiDiffContextSize - try { - val wrappedLog = new Incremental.PrefixingLogger("[diff] ")(log) - val apiDiff = new APIDiff - apiChanges foreach { - case APIChangeDueToMacroDefinition(src) => - wrappedLog.debug( - s"Public API is considered to be changed because $src contains a macro definition.") - case apiChange: NamesChange => - val src = apiChange.modifiedClass - val oldApi = oldAPIMapping(src) - val newApi = newAPIMapping(src) - val apiUnifiedPatch = - apiDiff.generateApiDiff(src.toString, oldApi.api, newApi.api, contextSize) - wrappedLog.debug(s"Detected a change in a public API ($src):\n$apiUnifiedPatch") - } - } catch { - case e: Exception => - log.error("An exception has been thrown while trying to dump an api diff.") - log.trace(e) - } + debug("********* Merged: \n" + merged.relations + "\n*********") + (merged, sources) } /** - * Accepts the classes that were recompiled during the last step and functions - * providing the API before and after the last step. The functions should return - * an empty API if the class did not/does not exist. + * Detects the API changes of `recompiledClasses`. + * + * @param recompiledClasses The list of classes that were recompiled in this round. + * @param oldAPI A function that returns the previous class associated with a given class name. + * @param newAPI A function that returns the current class associated with a given class name. + * @return A list of API changes of the given two analyzed classes. */ - def changedIncremental( - lastClasses: collection.Set[String], + def detectAPIChanges( + recompiledClasses: collection.Set[String], oldAPI: String => AnalyzedClass, newAPI: String => AnalyzedClass ): APIChanges = { - val apiChanges = lastClasses.flatMap { className => - sameClass(className, oldAPI(className), newAPI(className)) + def classDiff(className: String, a: AnalyzedClass, b: AnalyzedClass): Option[APIChange] = { + if (a.compilationTimestamp() == b.compilationTimestamp() && (a.apiHash == b.apiHash)) None + else { + val hasMacro = a.hasMacro || b.hasMacro + if (hasMacro && IncOptions.getRecompileOnMacroDef(options)) { + Some(APIChangeDueToMacroDefinition(className)) + } else findAPIChange(className, a, b) + } } + val apiChanges = recompiledClasses.flatMap(name => classDiff(name, oldAPI(name), newAPI(name))) if (Incremental.apiDebug(options) && apiChanges.nonEmpty) { logApiChanges(apiChanges, oldAPI, newAPI) } - new APIChanges(apiChanges) } - def sameClass(className: String, a: AnalyzedClass, b: AnalyzedClass): Option[APIChange] = { - // Clients of a modified class (ie, one that doesn't satisfy `shortcutSameClass`) containing macros must be recompiled. - val hasMacro = a.hasMacro || b.hasMacro - if (shortcutSameClass(a, b)) { - None - } else { - if (hasMacro && IncOptions.getRecompileOnMacroDef(options)) { - Some(APIChangeDueToMacroDefinition(className)) - } else sameAPI(className, a, b) - } - } - - protected def sameAPI(className: String, a: AnalyzedClass, b: AnalyzedClass): Option[APIChange] - - def shortcutSameClass(a: AnalyzedClass, b: AnalyzedClass): Boolean = - a.compilationTimestamp() == b.compilationTimestamp() && (a.apiHash == b.apiHash) - - def changedInitial(sources: Set[File], - previousAnalysis0: CompileAnalysis, - current: ReadStamps, - lookup: Lookup)(implicit equivS: Equiv[XStamp]): InitialChanges = { - val previousAnalysis = previousAnalysis0 match { case a: Analysis => a } + /** + * Detects the initial changes after the first compiler iteration is over. + * + * This method only requires the compiled sources, the previous analysis and the + * stamps reader to be able to populate [[InitialChanges]] with all the data + * structures that will be used for the first incremental compiler cycle. + * + * The logic of this method takes care of the following tasks: + * + * 1. Detecting the sources that changed between the past and present compiler iteration. + * 2. Detecting the removed products based on the stamps from the previous and current products. + * 3. Detects the class names changed in a library (classpath entry such as jars or analysis). + * 4. Computes the API changes in dependent and external projects. + * + * @param sources The sources that were compiled. + * @param previousAnalysis The analysis from the previous compilation. + * @param stamps The stamps reader to get stamp for sources, products and binaries. + * @param lookup The lookup instance that provides hooks and inspects the classpath. + * @param equivS A function to compare stamps. + * @return An instance of [[InitialChanges]]. + */ + def detectInitialChanges( + sources: Set[File], + previousAnalysis: Analysis, + stamps: ReadStamps, + lookup: Lookup + )(implicit equivS: Equiv[XStamp]): InitialChanges = { + import IncrementalCommon.{ isBinaryModified, findExternalAnalyzedClass } val previous = previousAnalysis.stamps val previousRelations = previousAnalysis.relations - val previousAPIs = previousAnalysis.apis - val srcChanges = lookup.changedSources(previousAnalysis).getOrElse { - def sourceModified(f: File): Boolean = - !equivS.equiv(previous.source(f), current.source(f)) - changes(previous.allSources.toSet, sources, sourceModified _) + val sourceChanges = lookup.changedSources(previousAnalysis).getOrElse { + val previousSources = previous.allSources.toSet + new UnderlyingChanges[File] { + private val inBoth = previousSources & sources + val removed = previousSources -- inBoth + val added = sources -- inBoth + val (changed, unmodified) = + inBoth.partition(f => !equivS.equiv(previous.source(f), stamps.source(f))) + } } val removedProducts = lookup.removedProducts(previousAnalysis).getOrElse { - previous.allProducts - .filter(p => !equivS.equiv(previous.product(p), current.product(p))) - .toSet + previous.allProducts.filter(p => !equivS.equiv(previous.product(p), stamps.product(p))).toSet } - val binaryDepChanges = lookup.changedBinaries(previousAnalysis).getOrElse { - previous.allBinaries - .filter(externalBinaryModified(lookup, previous, current, previousRelations)) - .toSet + val changedBinaries: Set[File] = lookup.changedBinaries(previousAnalysis).getOrElse { + val detectChange = + isBinaryModified(enableShallowLookup, lookup, previous, stamps, previousRelations, log) + previous.allBinaries.filter(detectChange).toSet } - val incrementalExtApiChanges = changedIncremental(previousAPIs.allExternals, - previousAPIs.externalAPI, - currentExternalAPI(lookup)) - val extApiChanges = - if (lookup.shouldDoIncrementalCompilation(incrementalExtApiChanges.allModified.toSet, - previousAnalysis)) incrementalExtApiChanges - else new APIChanges(Nil) - - InitialChanges(srcChanges, removedProducts, binaryDepChanges, extApiChanges) - } + val externalApiChanges: APIChanges = { + val incrementalExternalChanges = { + val previousAPIs = previousAnalysis.apis + val externalFinder = findExternalAnalyzedClass(lookup) _ + detectAPIChanges(previousAPIs.allExternals, previousAPIs.externalAPI, externalFinder) + } - def changes(previous: Set[File], - current: Set[File], - existingModified: File => Boolean): Changes[File] = { - new UnderlyingChanges[File] { - private val inBoth = previous & current - val removed = previous -- inBoth - val added = current -- inBoth - val (changed, unmodified) = inBoth.partition(existingModified) + val changedExternalClassNames = incrementalExternalChanges.allModified.toSet + if (!lookup.shouldDoIncrementalCompilation(changedExternalClassNames, previousAnalysis)) + new APIChanges(Nil) + else incrementalExternalChanges } + + InitialChanges(sourceChanges, removedProducts, changedBinaries, externalApiChanges) } - def invalidateIncremental(previous: Relations, - changes: APIChanges, - recompiledClasses: Set[String], - transitive: Boolean, - isScalaClass: String => Boolean): Set[String] = { - val dependsOnClass = previous.memberRef.internal.reverse _ - val propagated: Set[String] = - if (transitive) + /** + * Invalidates classes internally to a project after an incremental compiler run. + * + * @param relations The relations produced by the immediate previous incremental compiler cycle. + * @param changes The changes produced by the immediate previous incremental compiler cycle. + * @param recompiledClasses The immediately recompiled class names. + * @param invalidateTransitively A flag that tells whether transitive invalidations should be + * applied. This flag is only enabled when there have been more + * than `incOptions.transitiveStep` incremental runs. + * @param isScalaClass A function to know if a class name comes from a Scala source file or not. + * @return A list of invalidated class names for the next incremental compiler run. + */ + def invalidateAfterInternalCompilation( + relations: Relations, + changes: APIChanges, + recompiledClasses: Set[String], + invalidateTransitively: Boolean, + isScalaClass: String => Boolean + ): Set[String] = { + val firstClassInvalidation: Set[String] = { + if (invalidateTransitively) { + // Invalidate by brute force (normally happens when we've done more than 3 incremental runs) + val dependsOnClass = relations.memberRef.internal.reverse _ transitiveDependencies(dependsOnClass, changes.allModified.toSet) - else - invalidateIntermediate(previous, changes, isScalaClass) + } else { + includeTransitiveInitialInvalidations( + changes.allModified.toSet, + changes.apiChanges.flatMap(invalidateClassesInternally(relations, _, isScalaClass)).toSet, + findClassDependencies(_, relations) + ) + } + } - val dups = invalidateDuplicates(previous) - if (dups.nonEmpty) - log.debug("Invalidated due to generated class file collision: " + dups) + // Invalidate classes linked with a class file that is produced by more than one source file + val secondClassInvalidation = IncrementalCommon.invalidateNamesProducingSameClassFile(relations) + if (secondClassInvalidation.nonEmpty) + log.debug(s"Invalidated due to generated class file collision: ${secondClassInvalidation}") - val inv: Set[String] = propagated ++ dups - val newlyInvalidated = (inv -- recompiledClasses) ++ dups - log.debug( - "All newly invalidated classes after taking into account (previously) recompiled classes:" + newlyInvalidated) - if (newlyInvalidated.isEmpty) Set.empty else inv + val newInvalidations = (firstClassInvalidation -- recompiledClasses) ++ secondClassInvalidation + if (newInvalidations.isEmpty) { + log.debug("No classes were invalidated.") + Set.empty + } else { + val allInvalidatedClasses: Set[String] = firstClassInvalidation ++ secondClassInvalidation + log.debug(s"Invalidated classes: ${allInvalidatedClasses.mkString(", ")}") + allInvalidatedClasses + } } - /** Invalidate all classes that claim to produce the same class file as another class. */ - def invalidateDuplicates(merged: Relations): Set[String] = - merged.srcProd.reverseMap.flatMap { - case (_, sources) => - if (sources.size > 1) sources.flatMap(merged.classNames) else Nil - }.toSet - /** - * Returns the transitive class dependencies of `initial`. + * Returns the transitive class dependencies of an `initial` set of class names. + * * Because the intermediate steps do not pull in cycles, this result includes the initial classes * if they are part of a cycle containing newly invalidated classes. */ def transitiveDependencies(dependsOnClass: String => Set[String], initial: Set[String]): Set[String] = { - val transitiveWithInitial = transitiveDeps(initial)(dependsOnClass) - val transitivePartial = includeInitialCond(initial, transitiveWithInitial, dependsOnClass) + val transitiveWithInitial = IncrementalCommon.transitiveDeps(initial, log)(dependsOnClass) + val transitivePartial = + includeTransitiveInitialInvalidations(initial, transitiveWithInitial, dependsOnClass) log.debug("Final step, transitive dependencies:\n\t" + transitivePartial) transitivePartial } /** Invalidates classes and sources based on initially detected 'changes' to the sources, products, and dependencies.*/ def invalidateInitial(previous: Relations, changes: InitialChanges): (Set[String], Set[File]) = { - def classNames(srcs: Set[File]): Set[String] = - srcs.flatMap(previous.classNames) + def classNames(srcs: Set[File]): Set[String] = srcs.flatMap(previous.classNames) def toImmutableSet(srcs: java.util.Set[File]): Set[File] = { import scala.collection.JavaConverters.asScalaIteratorConverter srcs.iterator().asScala.toSet } val srcChanges = changes.internalSrc + val removedSrcs = toImmutableSet(srcChanges.getRemoved) val modifiedSrcs = toImmutableSet(srcChanges.getChanged) val addedSrcs = toImmutableSet(srcChanges.getAdded) - val removedSrcs = toImmutableSet(srcChanges.getRemoved) + IncrementalCommon.checkAbsolute(addedSrcs) + val removedClasses = classNames(removedSrcs) val dependentOnRemovedClasses = removedClasses.flatMap(previous.memberRef.internal.reverse) val modifiedClasses = classNames(modifiedSrcs) val invalidatedClasses = removedClasses ++ dependentOnRemovedClasses ++ modifiedClasses + val byProduct = changes.removedProducts.flatMap(previous.produced) val byBinaryDep = changes.binaryDeps.flatMap(previous.usesLibrary) - val byExtSrcDep = invalidateByAllExternal( - previous, - changes.external, - IncrementalCommon.comesFromScalaSource(previous) - ) - - checkAbsolute(addedSrcs.toList) + val byExtSrcDep = { + // Invalidate changes + val isScalaSource = IncrementalCommon.comesFromScalaSource(previous) _ + changes.external.apiChanges.iterator.flatMap { externalAPIChange => + invalidateClassesExternally(previous, externalAPIChange, isScalaSource) + }.toSet + } val allInvalidatedClasses = invalidatedClasses ++ byExtSrcDep val allInvalidatedSourcefiles = addedSrcs ++ modifiedSrcs ++ byProduct ++ byBinaryDep @@ -349,172 +376,342 @@ private[inc] abstract class IncrementalCommon(val log: sbt.util.Logger, options: (allInvalidatedClasses, allInvalidatedSourcefiles) } - private[this] def checkAbsolute(addedSources: List[File]): Unit = - if (addedSources.nonEmpty) { - addedSources.filterNot(_.isAbsolute) match { - case first :: more => - val fileStrings = more match { - case Nil => first.toString - case x :: Nil => s"$first and $x" - case _ => s"$first and ${more.size} others" - } - sys.error( - s"The incremental compiler requires absolute sources, but some were relative: $fileStrings") - case Nil => - } - } - def invalidateByAllExternal(relations: Relations, - externalAPIChanges: APIChanges, - isScalaClass: String => Boolean): Set[String] = { - (externalAPIChanges.apiChanges.flatMap { externalAPIChange => - invalidateByExternal(relations, externalAPIChange, isScalaClass) - }).toSet + /** + * Invalidates inheritance dependencies, transitively. Then, invalidates direct dependencies. Finally, excludes initial dependencies not + * included in a cycle with newly invalidated classes. + */ + def invalidateClasses(previous: Relations, + changes: APIChanges, + isScalaClass: String => Boolean): Set[String] = { + includeTransitiveInitialInvalidations( + changes.allModified.toSet, + changes.apiChanges.flatMap(invalidateClassesInternally(previous, _, isScalaClass)).toSet, + findClassDependencies(_, previous) + ) } - /** Classes invalidated by `external` classes in other projects according to the previous `relations`. */ - protected def invalidateByExternal(relations: Relations, - externalAPIChange: APIChange, - isScalaClass: String => Boolean): Set[String] + /** + * Conditionally include initial classes that are dependencies of newly invalidated classes. + * Initial classes included in this step can be because of a cycle, but not always. + */ + /** + * Returns the invalidations that are the result of the `currentInvalidations` + the + * `previousInvalidations` that depend transitively on `currentInvalidations`. + * + * We do this step on every incremental compiler iteration of a project where + * `previousInvalidations` typically refers to the classes invalidated in the + * previous incremental compiler cycle. + * + * @param previousInvalidations + * @param currentInvalidations + * @param findClassDependencies + * @return + */ + private[this] def includeTransitiveInitialInvalidations( + previousInvalidations: Set[String], + currentInvalidations: Set[String], + findClassDependencies: String => Set[String] + ): Set[String] = { + val newInvalidations = currentInvalidations -- previousInvalidations + log.debug("New invalidations:\n\t" + newInvalidations) + + val newTransitiveInvalidations = + IncrementalCommon.transitiveDeps(newInvalidations, log)(findClassDependencies) + // Include the initial invalidations that are present in the transitive new invalidations + val includedInitialInvalidations = newTransitiveInvalidations & previousInvalidations - /** Intermediate invalidation step: steps after the initial invalidation, but before the final transitive invalidation. */ - def invalidateIntermediate(relations: Relations, - changes: APIChanges, - isScalaClass: String => Boolean): Set[String] = { - invalidateClasses(relations, changes, isScalaClass) + log.debug( + "Previously invalidated, but (transitively) depend on new invalidations:\n\t" + includedInitialInvalidations) + newInvalidations ++ includedInitialInvalidations } /** - * Invalidates inheritance dependencies, transitively. Then, invalidates direct dependencies. Finally, excludes initial dependencies not - * included in a cycle with newly invalidated classes. + * Logs API changes using debug-level logging. The API are obtained using the APIDiff class. + * + * NOTE: This method creates a new APIDiff instance on every invocation. */ - private def invalidateClasses(relations: Relations, - changes: APIChanges, - isScalaClass: String => Boolean): Set[String] = { - val initial = changes.allModified.toSet - val all = (changes.apiChanges flatMap { change => - invalidateClass(relations, change, isScalaClass) - }).toSet - includeInitialCond(initial, all, allDeps(relations)) + private def logApiChanges( + apiChanges: Iterable[APIChange], + oldAPIMapping: String => AnalyzedClass, + newAPIMapping: String => AnalyzedClass + ): Unit = { + val contextSize = options.apiDiffContextSize + try { + val wrappedLog = new Incremental.PrefixingLogger("[diff] ")(log) + val apiDiff = new APIDiff + apiChanges foreach { + case APIChangeDueToMacroDefinition(src) => + wrappedLog.debug(s"Detected API change because $src contains a macro definition.") + case TraitPrivateMembersModified(modifiedClass) => + wrappedLog.debug(s"Detect change in private members of trait ${modifiedClass}.") + case apiChange: NamesChange => + val src = apiChange.modifiedClass + val oldApi = oldAPIMapping(src) + val newApi = newAPIMapping(src) + val apiUnifiedPatch = + apiDiff.generateApiDiff(src.toString, oldApi.api, newApi.api, contextSize) + wrappedLog.debug(s"Detected a change in a public API ($src):\n$apiUnifiedPatch") + } + } catch { + case e: Exception => + log.error("An exception has been thrown while trying to dump an api diff.") + log.trace(e) + } } - protected def allDeps(relations: Relations): (String) => Set[String] + /** + * Add package objects that inherit from the set of invalidated classes to avoid + * "class file needed by package is missing" compilation errors. + * + * This might be to conservative. We probably only need the package objects for packages + * of invalidated classes. + * + * @param invalidatedClasses The set of invalidated classes. + * @param relations The current relations. + * @param apis The current APIs information. + * @return The set of invalidated classes + the set of package objects. + */ + protected def invalidatedPackageObjects( + invalidatedClasses: Set[String], + relations: Relations, + apis: APIs + ): Set[String] - protected def invalidateClass(relations: Relations, - change: APIChange, - isScalaClass: String => Boolean): Set[String] + /** + * Find an API change between the `previous` and `current` class representations of `className`. + * + * @param className The class name that identifies both analyzed classes. + * @param previous The analyzed class that comes from the previous analysis. + * @param current The analyzed class that comes from the current analysis. + * @return An optional API change detected between `previous` and `current`. + */ + protected def findAPIChange( + className: String, + previous: AnalyzedClass, + current: AnalyzedClass + ): Option[APIChange] /** - * Conditionally include initial classes that are dependencies of newly invalidated classes. - * Initial classes included in this step can be because of a cycle, but not always. + * Finds the class dependencies of `className` given an instance of [[Relations]]. + * + * @param className The class name from which we detect dependencies. + * @param relations The instance of relations. + * @return A collection of classes that depend on `className`. */ - private[this] def includeInitialCond(initial: Set[String], - currentInvalidations: Set[String], - allDeps: String => Set[String]): Set[String] = { - val newInv = currentInvalidations -- initial - log.debug("New invalidations:\n\t" + newInv) - val transitiveOfNew = transitiveDeps(newInv)(allDeps) - val initialDependsOnNew = transitiveOfNew & initial - log.debug( - "Previously invalidated, but (transitively) depend on new invalidations:\n\t" + initialDependsOnNew) - newInv ++ initialDependsOnNew + protected def findClassDependencies( + className: String, + relations: Relations + ): Set[String] + + /** + * Invalidates a set of class names given the current relations and an internal API change. + * + * This step happens in every cycle of the incremental compiler as it is required to know + * what classes were invalidated given the previous incremental compiler run. + * + * @param currentRelations The relations from the previous analysis file of the compiled project. + * @param externalAPIChange The internal API change detected by [[invalidateAfterInternalCompilation()]]. + * @param isScalaClass A function that tell us whether a class is defined in a Scala file or not. + */ + protected def invalidateClassesInternally( + relations: Relations, + change: APIChange, + isScalaClass: String => Boolean + ): Set[String] + + /** + * Invalidates a set of class names given the current relations and an external API change + * that has been detected in upstream projects. This step only happens in `invalidateInitial` + * because that's where external changes need to be detected and properly invalidated. + * + * @param currentRelations The relations from the previous analysis file of the compiled project. + * @param externalAPIChange The external API change detected by [[detectInitialChanges()]]. + * @param isScalaClass A function that tell us whether a class is defined in a Scala file or not. + */ + protected def invalidateClassesExternally( + currentRelations: Relations, + externalAPIChange: APIChange, + isScalaClass: String => Boolean + ): Set[String] +} + +object IncrementalCommon { + + /** Tell if given class names comes from a Scala source file or not by inspecting relations. */ + def comesFromScalaSource( + previous: Relations, + current: Option[Relations] = None + )(className: String): Boolean = { + val previousSourcesWithClassName = previous.classes.reverse(className) + val newSourcesWithClassName = current.map(_.classes.reverse(className)).getOrElse(Set.empty) + if (previousSourcesWithClassName.isEmpty && newSourcesWithClassName.isEmpty) + sys.error(s"Fatal Zinc error: no entry for class $className in classes relation.") + else { + // Makes sure that the dependency doesn't possibly come from Java + previousSourcesWithClassName.forall(src => APIUtil.isScalaSourceName(src.getName)) && + newSourcesWithClassName.forall(src => APIUtil.isScalaSourceName(src.getName)) + } + } + + /** Invalidate all classes that claim to produce the same class file as another class. */ + def invalidateNamesProducingSameClassFile(merged: Relations): Set[String] = { + merged.srcProd.reverseMap.flatMap { + case (_, sources) => if (sources.size > 1) sources.flatMap(merged.classNames(_)) else Nil + }.toSet } - def externalBinaryModified( + /** + * Figure out whether a binary class file (identified by a class name) coming from a library + * has changed or not. This function is performed at the beginning of the incremental compiler + * algorithm to figure out which binary class names from the classpath (also called external + * binaries) have changed since the last compilation of this module. + * + * @param lookup A lookup instance to ask questions about the classpath. + * @param previousStamps The stamps associated with the previous compilation. + * @param currentStamps The stamps associated with the current compilation. + * @param previousRelations The relation from the previous compiler iteration. + * @param log A logger. + * @param equivS An equivalence function to compare stamps. + * @return + */ + def isBinaryModified( + skipClasspathLookup: Boolean, lookup: Lookup, - previous: Stamps, - current: ReadStamps, - previousRelations: Relations)(implicit equivS: Equiv[XStamp]): File => Boolean = - dependsOn => { - def inv(reason: String): Boolean = { - log.debug("Invalidating " + dependsOn + ": " + reason) - true - } - def entryModified(className: String, classpathEntry: File): Boolean = { - val resolved = Locate.resolve(classpathEntry, className) - if (resolved.getCanonicalPath != dependsOn.getCanonicalPath) - inv("class " + className + " now provided by " + resolved.getCanonicalPath) - else - fileModified(dependsOn, resolved) + previousStamps: Stamps, + currentStamps: ReadStamps, + previousRelations: Relations, + log: Logger + )(implicit equivS: Equiv[XStamp]): File => Boolean = { (binaryFile: File) => + { + def invalidateBinary(reason: String): Boolean = { + log.debug(s"Invalidating '$binaryFile' because $reason"); true } - def fileModified(previousFile: File, currentFile: File): Boolean = { - val previousStamp = previous.binary(previousFile) - val currentStamp = current.binary(currentFile) - if (equivS.equiv(previousStamp, currentStamp)) - false - else - inv("stamp changed from " + previousStamp + " to " + currentStamp) + + def compareStamps(previousFile: File, currentFile: File): Boolean = { + val previousStamp = previousStamps.binary(previousFile) + val currentStamp = currentStamps.binary(currentFile) + if (equivS.equiv(previousStamp, currentStamp)) false + else invalidateBinary(s"$previousFile ($previousStamp) != $currentFile ($currentStamp)") } - def dependencyModified(file: File): Boolean = { + + def isBinaryChanged(file: File): Boolean = { + def compareOriginClassFile(className: String, classpathEntry: File): Boolean = { + val resolved = Locate.resolve(classpathEntry, className) + val resolvedCanonical = resolved.getCanonicalPath + if (resolvedCanonical != binaryFile.getCanonicalPath) + invalidateBinary(s"${className} is now provided by ${resolvedCanonical}") + else compareStamps(binaryFile, resolved) + } + val classNames = previousRelations.libraryClassNames(file) - classNames exists { binaryClassName => - // classpath has not changed since the last compilation, so use the faster detection. - if (lookup.changedClasspathHash.isEmpty) + classNames.exists { binaryClassName => + if (lookup.changedClasspathHash.isEmpty) { + // If classpath is not changed, the only possible change needs to come from same project lookup.lookupAnalysis(binaryClassName) match { - case None => false - case Some(_) => inv(s"shadowing is detected for class $binaryClassName") - } else + case None => false + // Most of the cases this is a build tool misconfiguration when using Zinc + case Some(a) => invalidateBinary(s"${binaryClassName} came from analysis $a") + } + } else { + // Find lookup.lookupOnClasspath(binaryClassName) match { - case None => inv(s"could not find class $binaryClassName on the classpath.") - case Some(e) => entryModified(binaryClassName, e) + case None => + invalidateBinary(s"could not find class $binaryClassName on the classpath.") + case Some(classpathEntry) => compareOriginClassFile(binaryClassName, classpathEntry) } + } } } - (if (skipClasspathLookup) fileModified(dependsOn, dependsOn) - else dependencyModified(dependsOn)) - } - def currentExternalAPI(lookup: Lookup): String => AnalyzedClass = { binaryClassName => - { - orEmpty( - for { - analysis0 <- lookup.lookupAnalysis(binaryClassName) - analysis = analysis0 match { case a: Analysis => a } - className <- analysis.relations.productClassName.reverse(binaryClassName).headOption - } yield analysis.apis.internalAPI(className) - ) + if (skipClasspathLookup) compareStamps(binaryFile, binaryFile) + else isBinaryChanged(binaryFile) } } - def orEmpty(o: Option[AnalyzedClass]): AnalyzedClass = o getOrElse APIs.emptyAnalyzedClass - def orTrue(o: Option[Boolean]): Boolean = o getOrElse true + /** + * Find the external [[AnalyzedClass]] (from another analysis) given a class name. + * + * @param lookup An instance that provides access to classpath or external project queries. + * @return The [[AnalyzedClass]] associated with the given class name. + */ + def findExternalAnalyzedClass(lookup: Lookup)(binaryClassName: String): AnalyzedClass = { + val maybeInternalAPI = for { + analysis0 <- lookup.lookupAnalysis(binaryClassName) + analysis = analysis0 match { case a: Analysis => a } + className <- analysis.relations.productClassName.reverse(binaryClassName).headOption + } yield analysis.apis.internalAPI(className) + maybeInternalAPI.getOrElse(APIs.emptyAnalyzedClass) + } - protected def transitiveDeps[T](nodes: Iterable[T], logging: Boolean = true)( - dependencies: T => Iterable[T]): Set[T] = { - val xs = new collection.mutable.HashSet[T] + def transitiveDeps[T]( + nodes: Iterable[T], + log: Logger, + logging: Boolean = true + )(dependencies: T => Iterable[T]): Set[T] = { + val visited = new collection.mutable.HashSet[T] def all(from: T, tos: Iterable[T]): Unit = tos.foreach(to => visit(from, to)) - def visit(from: T, to: T): Unit = - if (!xs.contains(to)) { - if (logging) - log.debug(s"Including $to by $from") - xs += to + def visit(from: T, to: T): Unit = { + if (!visited.contains(to)) { + if (logging) log.debug(s"Including $to by $from") + visited += to all(to, dependencies(to)) } - if (logging) - log.debug("Initial set of included nodes: " + nodes) - nodes foreach { start => - xs += start + } + + if (logging) log.debug(s"Initial set of included nodes: ${nodes.mkString(", ")}") + nodes.foreach { start => + visited += start all(start, dependencies(start)) } - xs.toSet + visited.toSet } -} - -object IncrementalCommon { - /** Tell if given class names comes from a Scala source file or not by inspecting relations. */ - def comesFromScalaSource( - previous: Relations, - current: Option[Relations] = None - )(className: String): Boolean = { - val previousSourcesWithClassName = previous.classes.reverse(className) - val newSourcesWithClassName = current.map(_.classes.reverse(className)).getOrElse(Set.empty) - if (previousSourcesWithClassName.isEmpty && newSourcesWithClassName.isEmpty) - sys.error(s"Fatal Zinc error: no entry for class $className in classes relation.") + /** + * Check that a collection of files are absolute and not relative. + * + * For legacy reasons, the logic to check the absolute path of source files has been + * implemented in the core invalidation algorithm logic. It remains here as there are + * more important things to do than fixing this issue. + * + * @param addedSources + */ + def checkAbsolute(addedSources: Iterable[File]): Unit = { + if (addedSources.isEmpty) () else { - // Makes sure that the dependency doesn't possibly come from Java - previousSourcesWithClassName.forall(src => APIUtil.isScalaSourceName(src.getName)) && - newSourcesWithClassName.forall(src => APIUtil.isScalaSourceName(src.getName)) + addedSources.filterNot(_.isAbsolute).toList match { + case first :: more => + val fileStrings = more match { + case Nil => first.toString + case x :: Nil => s"$first and $x" + case _ => s"$first and ${more.size} others" + } + sys.error(s"Expected absolute source files instead of ${fileStrings}.") + case Nil => () + } } } + + def emptyChanges: DependencyChanges = new DependencyChanges { + val modifiedBinaries = new Array[File](0) + val modifiedClasses = new Array[String](0) + def isEmpty = true + } + + /** + * Prunes from the analysis and deletes the class files of `invalidatedSources`. + * + * @param invalidatedSources The set of invalidated sources. + * @param previous The previous analysis instance. + * @param classfileManager The class file manager. + * @return An instance of analysis that doesn't contain the invalidated sources. + */ + def pruneClassFilesOfInvalidations( + invalidatedSources: Set[File], + previous: Analysis, + classfileManager: XClassFileManager + ): Analysis = { + classfileManager.delete(invalidatedSources.flatMap(previous.relations.products).toArray) + previous -- invalidatedSources + } } diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalNameHashing.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalNameHashing.scala index 4d2b621c91..f7b9b207c8 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalNameHashing.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalNameHashing.scala @@ -9,34 +9,47 @@ package sbt package internal package inc +import sbt.util.Logger import xsbti.compile.IncOptions import xsbti.api.{ AnalyzedClass, DefinitionType } import xsbt.api.SameAPI /** - * Implementation of incremental algorithm known as "name hashing". It differs from the default implementation - * by applying pruning (filter) of member reference dependencies based on used and modified simple names. + * Implement the name hashing heuristics to invalidate classes. * - * See MemberReferenceInvalidationStrategy for some more information. + * It's defined `private[inc]` to allow other libraries to extend this class to their discretion. + * Note that the rest of the Zinc public API will default on the use of the private and final + * `IncrementalNameHashing`. + * + * See [[MemberRefInvalidator]] for more information on how the name heuristics work to invalidate + * member references. */ -private final class IncrementalNameHashing(log: sbt.util.Logger, options: IncOptions) +private[inc] class IncrementalNameHashingCommon(log: Logger, options: IncOptions) extends IncrementalCommon(log, options) { + import IncrementalCommon.transitiveDeps private val memberRefInvalidator = new MemberRefInvalidator(log, options.logRecompileOnMacro()) - // Package objects are fragile: if they inherit from an invalidated class, get "class file needed by package is missing" error - // This might be too conservative: we probably only need package objects for packages of invalidated classes. - protected def invalidatedPackageObjects(invalidatedClasses: Set[String], - relations: Relations, - apis: APIs): Set[String] = { - transitiveDeps(invalidatedClasses, logging = false)(relations.inheritance.internal.reverse) filter { - _.endsWith(".package") - } + /** @inheritdoc */ + protected def invalidatedPackageObjects( + invalidatedClasses: Set[String], + relations: Relations, + apis: APIs + ): Set[String] = { + val findSubclasses = relations.inheritance.internal.reverse _ + debug("Invalidate package objects by inheritance only...") + val invalidatedPackageObjects = + transitiveDeps(invalidatedClasses, log)(findSubclasses).filter(_.endsWith(".package")) + debug(s"Package object invalidations: ${invalidatedPackageObjects.mkString(", ")}") + invalidatedPackageObjects } - override protected def sameAPI(className: String, - a: AnalyzedClass, - b: AnalyzedClass): Option[APIChange] = { + /** @inheritdoc */ + override protected def findAPIChange( + className: String, + a: AnalyzedClass, + b: AnalyzedClass + ): Option[APIChange] = { if (SameAPI(a, b)) { if (SameAPI.hasSameExtraHash(a, b)) None else { @@ -63,10 +76,12 @@ private final class IncrementalNameHashing(log: sbt.util.Logger, options: IncOpt } } - /** Invalidates classes based on initially detected 'changes' to the sources, products, and dependencies. */ - override protected def invalidateByExternal(relations: Relations, - externalAPIChange: APIChange, - isScalaClass: String => Boolean): Set[String] = { + /** @inheritdoc */ + override protected def invalidateClassesExternally( + relations: Relations, + externalAPIChange: APIChange, + isScalaClass: String => Boolean + ): Set[String] = { val modifiedBinaryClassName = externalAPIChange.modifiedClass val invalidationReason = memberRefInvalidator.invalidationReason(externalAPIChange) log.debug( @@ -107,21 +122,24 @@ private final class IncrementalNameHashing(log: sbt.util.Logger, options: IncOpt private def invalidateByInheritance(relations: Relations, modified: String): Set[String] = { val inheritanceDeps = relations.inheritance.internal.reverse _ log.debug(s"Invalidating (transitively) by inheritance from $modified...") - val transitiveInheritance = transitiveDeps(Set(modified))(inheritanceDeps) + val transitiveInheritance = transitiveDeps(Set(modified), log)(inheritanceDeps) log.debug("Invalidated by transitive inheritance dependency: " + transitiveInheritance) transitiveInheritance } - private def invalidateByLocalInheritance(relations: Relations, modified: String): Set[String] = { - val localInheritanceDeps = relations.localInheritance.internal.reverse(modified) - if (localInheritanceDeps.nonEmpty) - log.debug(s"Invalidate by local inheritance: $modified -> $localInheritanceDeps") - localInheritanceDeps - } + /** @inheritdoc */ + override protected def invalidateClassesInternally( + relations: Relations, + change: APIChange, + isScalaClass: String => Boolean + ): Set[String] = { + def invalidateByLocalInheritance(relations: Relations, modified: String): Set[String] = { + val localInheritanceDeps = relations.localInheritance.internal.reverse(modified) + if (localInheritanceDeps.nonEmpty) + log.debug(s"Invalidate by local inheritance: $modified -> $localInheritanceDeps") + localInheritanceDeps + } - override protected def invalidateClass(relations: Relations, - change: APIChange, - isScalaClass: String => Boolean): Set[String] = { val modifiedClass = change.modifiedClass val transitiveInheritance = invalidateByInheritance(relations, modifiedClass) val localInheritance = @@ -157,6 +175,12 @@ private final class IncrementalNameHashing(log: sbt.util.Logger, options: IncOpt all } - override protected def allDeps(relations: Relations): (String) => Set[String] = - cls => relations.memberRef.internal.reverse(cls) + /** @inheritdoc */ + override protected def findClassDependencies( + className: String, + relations: Relations + ): Set[String] = relations.memberRef.internal.reverse(className) } + +private final class IncrementalNameHashing(log: Logger, options: IncOptions) + extends IncrementalNameHashingCommon(log, options) diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/Lookup.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/Lookup.scala index 02a563191f..19f6017519 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/Lookup.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/Lookup.scala @@ -44,6 +44,11 @@ trait Lookup extends ExternalLookup { def lookupAnalysis(binaryClassName: String): Option[CompileAnalysis] } +/** + * Defines a hook interface that IDEs or build tools can mock to modify the way + * Zinc invalidates the incremental compiler. These hooks operate at a high-level + * of abstraction and only allow to modify the inputs of the initial change detection. + */ trait ExternalLookup extends ExternalHooks.Lookup { import sbt.internal.inc.JavaInterfaceUtil.EnrichOption import scala.collection.JavaConverters._ From 3dd4a2fb22e3b06d250d58e58fa1d5f92fdffdbe Mon Sep 17 00:00:00 2001 From: Jorge Vicente Cantero Date: Sun, 26 Aug 2018 14:16:39 +0200 Subject: [PATCH 3/6] Add profiling invalidation utils (zprof) zprof is the name I've chosen for this small profiler (or tracker if you will) of the invalidation logic. The profiled data is formalized in an internal format that is not supposed to be used by normal users, but rather by us (Zinc) and related tools (Bloop). The current profiled data exposes details of how the incremental compiler works internally and how it invalidates classes. This is the realization of an idea I registered here: https://github.com/sbt/zinc/pull/550 With this idea, this data will not only be useful for debugging but for providing an automatic way of reporting bugs in Zinc. The infrastructure is far from finished but it's already in a usable state for libraries that depend on Zinc directly and have direct access to `Incremental`. By default, no profiler is used. Only people that change the profiler argument for `Incremental.compile` will be able to get the run profiles. --- build.sbt | 1 + .../zinc-core/src/main/protobuf/zprof.proto | 68 +++++ .../scala/sbt/internal/inc/Incremental.scala | 12 +- .../sbt/internal/inc/IncrementalCommon.scala | 32 ++- .../internal/inc/IncrementalNameHashing.scala | 30 +- .../internal/inc/InvalidationProfiler.scala | 267 ++++++++++++++++++ 6 files changed, 394 insertions(+), 16 deletions(-) create mode 100644 internal/zinc-core/src/main/protobuf/zprof.proto create mode 100644 internal/zinc-core/src/main/scala/sbt/internal/inc/InvalidationProfiler.scala diff --git a/build.sbt b/build.sbt index 977ed4bb87..c22089770c 100644 --- a/build.sbt +++ b/build.sbt @@ -243,6 +243,7 @@ lazy val zincCore = (project in internalPath / "zinc-core") name := "zinc Core", compileOrder := sbt.CompileOrder.Mixed, mimaSettings, + PB.targets in Compile := List(scalapb.gen() -> (sourceManaged in Compile).value), ) .configure(addSbtIO, addSbtUtilLogging, addSbtUtilRelation) diff --git a/internal/zinc-core/src/main/protobuf/zprof.proto b/internal/zinc-core/src/main/protobuf/zprof.proto new file mode 100644 index 0000000000..716ebf1d7b --- /dev/null +++ b/internal/zinc-core/src/main/protobuf/zprof.proto @@ -0,0 +1,68 @@ +syntax = "proto3"; + +package sbt.internal.inc; + +/////////////////////////////////////////////////////////////////////////////////////////////// +///////////////////////////////////////// ZINC PROF /////////////////////////////////////////// +/////////////////////////////////////////////////////////////////////////////////////////////// + +message Profile { + repeated ZincRun runs = 1; + repeated string string_table = 2; +} + +message ZincRun { + InitialChanges initial = 1; + repeated CycleInvalidation cycles = 3; +} + +message CycleInvalidation { + repeated int64 invalidated = 1; + repeated int64 invalidatedByPackageObjects = 2; + repeated int64 initialSources = 3; + repeated int64 invalidatedSources = 4; + repeated int64 recompiledClasses = 5; + + int64 startTimeNanos = 6; // Start time of compilation (UTC) as nanoseconds past the epoch + int64 compilationDurationNanos = 7; // Duration of the compilation profile in nanoseconds + repeated ApiChange changesAfterRecompilation = 8; + + repeated InvalidationEvent events = 9; + repeated int64 nextInvalidations = 10; + bool shouldCompileIncrementally = 11; +} + +message InvalidationEvent { + string kind = 1; + repeated int64 inputs = 2; + repeated int64 outputs = 3; + string reason = 4; +} + +message Changes { + repeated int64 added = 1; + repeated int64 removed = 2; + repeated int64 modified = 3; +} + +message ApiChange { + int64 modifiedClass = 1; + string reason = 2; + repeated UsedName usedNames = 3; // Can be empty if the change is not related to names +} + +message InitialChanges { + Changes changes = 1; + repeated int64 removedProducts = 2; + repeated int64 binaryDependencies = 3; + repeated ApiChange externalChanges = 4; +} + +message UsedName { + int64 name = 1; + repeated Scope scopes = 2; +} + +message Scope { + int64 kind = 1; +} diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala index 533f032f7e..e5fc777566 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala @@ -14,10 +14,10 @@ import java.io.File import sbt.util.{ Level, Logger } import xsbti.compile.analysis.{ ReadStamps, Stamp => XStamp } import xsbti.compile.{ - ClassFileManager => XClassFileManager, CompileAnalysis, DependencyChanges, - IncOptions + IncOptions, + ClassFileManager => XClassFileManager } /** @@ -46,6 +46,7 @@ object Incremental { * @param callbackBuilder The builder that builds callback where we report dependency issues. * @param log The log where we write debugging information * @param options Incremental compilation options + * @param profiler An implementation of an invalidation profiler, empty by default. * @param equivS The means of testing whether two "Stamps" are the same. * @return * A flag of whether or not compilation completed successfully, and the resulting dependency analysis object. @@ -58,11 +59,12 @@ object Incremental { compile: (Set[File], DependencyChanges, xsbti.AnalysisCallback, XClassFileManager) => Unit, callbackBuilder: AnalysisCallback.Builder, log: sbt.util.Logger, - options: IncOptions + options: IncOptions, + profiler: InvalidationProfiler = InvalidationProfiler.empty )(implicit equivS: Equiv[XStamp]): (Boolean, Analysis) = { val previous = previous0 match { case a: Analysis => a } - val incremental: IncrementalCommon = - new IncrementalNameHashing(log, options) + val runProfiler = profiler.profileRun + val incremental: IncrementalCommon = new IncrementalNameHashing(log, options, runProfiler) val initialChanges = incremental.detectInitialChanges(sources, previous, current, lookup) val binaryChanges = new DependencyChanges { val modifiedBinaries = initialChanges.binaryDeps.toArray diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala index c22f43a206..920af2b9e4 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala @@ -31,7 +31,11 @@ import scala.annotation.tailrec * @param log An instance of a logger. * @param options An instance of incremental compiler options. */ -private[inc] abstract class IncrementalCommon(val log: Logger, options: IncOptions) { +private[inc] abstract class IncrementalCommon( + val log: Logger, + options: IncOptions, + profiler: RunProfiler +) extends InvalidationProfilerUtils { // Work around bugs in classpath handling such as the "currently" problematic -javabootclasspath private[this] def enableShallowLookup: Boolean = java.lang.Boolean.getBoolean("xsbt.skip.cp.lookup") @@ -81,11 +85,11 @@ private[inc] abstract class IncrementalCommon(val log: Logger, options: IncOptio // Computes which source files are mapped to the invalidated classes and recompile them val invalidatedSources = mapInvalidationsToSources(classesToRecompile, initialChangedSources, allSources, previous) - val (current, recompiledRecently) = + val current = recompileClasses(invalidatedSources, binaryChanges, previous, doCompile, classfileManager) // Return immediate analysis as all sources have been recompiled - if (recompiledRecently == allSources) current + if (invalidatedSources == allSources) current else { val recompiledClasses: Set[String] = { // Represents classes detected as changed externally and internally (by a previous cycle) @@ -107,6 +111,18 @@ private[inc] abstract class IncrementalCommon(val log: Logger, options: IncOptio ) val continue = lookup.shouldDoIncrementalCompilation(nextInvalidations, current) + + profiler.registerCycle( + invalidatedClasses, + invalidatedByPackageObjects, + initialChangedSources, + invalidatedSources, + recompiledClasses, + newApiChanges, + nextInvalidations, + continue + ) + cycle( if (continue) nextInvalidations else Set.empty, Set.empty, @@ -147,20 +163,20 @@ private[inc] abstract class IncrementalCommon(val log: Logger, options: IncOptio previous: Analysis, doCompile: (Set[File], DependencyChanges) => Analysis, classfileManager: XClassFileManager - ): (Analysis, Set[File]) = { + ): Analysis = { val pruned = IncrementalCommon.pruneClassFilesOfInvalidations(sources, previous, classfileManager) debug("********* Pruned: \n" + pruned.relations + "\n*********") val fresh = doCompile(sources, binaryChanges) debug("********* Fresh: \n" + fresh.relations + "\n*********") - val merged = pruned ++ fresh /* This is required for both scala compilation and forked java compilation, despite * being redundant for the most common Java compilation (using the local compiler). */ classfileManager.generated(fresh.relations.allProducts.toArray) + val merged = pruned ++ fresh debug("********* Merged: \n" + merged.relations + "\n*********") - (merged, sources) + merged } /** @@ -258,7 +274,9 @@ private[inc] abstract class IncrementalCommon(val log: Logger, options: IncOptio else incrementalExternalChanges } - InitialChanges(sourceChanges, removedProducts, changedBinaries, externalApiChanges) + val init = InitialChanges(sourceChanges, removedProducts, changedBinaries, externalApiChanges) + profiler.registerInitial(init) + init } /** diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalNameHashing.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalNameHashing.scala index f7b9b207c8..2791de24f3 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalNameHashing.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalNameHashing.scala @@ -24,8 +24,11 @@ import xsbt.api.SameAPI * See [[MemberRefInvalidator]] for more information on how the name heuristics work to invalidate * member references. */ -private[inc] class IncrementalNameHashingCommon(log: Logger, options: IncOptions) - extends IncrementalCommon(log, options) { +private[inc] class IncrementalNameHashingCommon( + log: Logger, + options: IncOptions, + profiler: RunProfiler +) extends IncrementalCommon(log, options, profiler) { import IncrementalCommon.transitiveDeps private val memberRefInvalidator = new MemberRefInvalidator(log, options.logRecompileOnMacro()) @@ -142,13 +145,32 @@ private[inc] class IncrementalNameHashingCommon(log: Logger, options: IncOptions val modifiedClass = change.modifiedClass val transitiveInheritance = invalidateByInheritance(relations, modifiedClass) + profiler.registerEvent( + zprof.InvalidationEvent.InheritanceKind, + List(modifiedClass), + transitiveInheritance, + s"The invalidated class names inherit directly or transitively on ${modifiedClass}." + ) + val localInheritance = transitiveInheritance.flatMap(invalidateByLocalInheritance(relations, _)) + profiler.registerEvent( + zprof.InvalidationEvent.LocalInheritanceKind, + transitiveInheritance, + localInheritance, + s"The invalidated class names inherit (via local inheritance) directly or transitively on ${modifiedClass}." + ) val memberRefSrcDeps = relations.memberRef.internal val memberRefInvalidation = memberRefInvalidator.get(memberRefSrcDeps, relations.names, change, isScalaClass) val memberRef = transitiveInheritance flatMap memberRefInvalidation + profiler.registerEvent( + zprof.InvalidationEvent.MemberReferenceKind, + transitiveInheritance, + memberRef, + s"The invalidated class names refer directly or transitively to ${modifiedClass}." + ) val all = transitiveInheritance ++ localInheritance ++ memberRef def debugMessage: String = { @@ -182,5 +204,5 @@ private[inc] class IncrementalNameHashingCommon(log: Logger, options: IncOptions ): Set[String] = relations.memberRef.internal.reverse(className) } -private final class IncrementalNameHashing(log: Logger, options: IncOptions) - extends IncrementalNameHashingCommon(log, options) +private final class IncrementalNameHashing(log: Logger, options: IncOptions, profiler: RunProfiler) + extends IncrementalNameHashingCommon(log, options, profiler) diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/InvalidationProfiler.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/InvalidationProfiler.scala new file mode 100644 index 0000000000..d6286f75a9 --- /dev/null +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/InvalidationProfiler.scala @@ -0,0 +1,267 @@ +package sbt.internal.inc + +import java.io.File + +import xsbti.UseScope + +import scala.collection.mutable +import scala.collection.mutable.ArrayBuffer + +/** + * Defines a profiler interface that translates to the profiling protobuf schema. + * + * The protobuf schema has been mildly inspired from pprof: + * https://github.com/google/pprof/blob/master/proto/profile.proto + * + * A profiler interface should only be used by project, and not globally, as + * this class is not thread safe. + */ +abstract class InvalidationProfiler { + def profileRun: RunProfiler + def registerRun(run: zprof.ZincRun): Unit +} + +object InvalidationProfiler { + final val empty: InvalidationProfiler = new InvalidationProfiler { + override def profileRun: RunProfiler = RunProfiler.empty + override def registerRun(run: zprof.ZincRun): Unit = () + } +} + +class ZincInvalidationProfiler extends InvalidationProfiler { + private final var lastKnownIndex: Long = -1L + private final val stringTable1: ArrayBuffer[String] = new ArrayBuffer[String](1000) + private final val stringTable2: ArrayBuffer[String] = new ArrayBuffer[String](10) + private final val stringTableIndices: mutable.HashMap[String, Long] = + new mutable.HashMap[String, Long] + + def profileRun: RunProfiler = new ZincProfilerImplementation + + private final var runs: List[zprof.ZincRun] = Nil + def registerRun(run: zprof.ZincRun): Unit = { + runs = run :: runs + () + } + + /** + * Returns an immutable zprof profile that can be serialized. + * + * It is recommended to only perform this operation when we are + * going to persist the profiled protobuf data to disk. Do not + * call this function after every compiler iteration as the aggregation + * of the symbol tables may be expensive, it's recommended to + * persist this file periodically. + * + * @return An immutable zprof profile that can be persisted via protobuf. + */ + def toProfile: zprof.Profile = zprof.Profile( + runs = runs, + stringTable = stringTable1 ++ stringTable2 + ) + + private[inc] class ZincProfilerImplementation extends RunProfiler { + private def toStringTableIndex(string: String): Long = { + stringTableIndices.get(string) match { + case Some(index) => + if (index <= Integer.MAX_VALUE) { + val newIndex = index.toInt + stringTable1.apply(newIndex) + newIndex + } else { + val newIndex = (index - Integer.MAX_VALUE.toLong).toInt + stringTable2.apply(newIndex) + newIndex + } + case None => + val newIndex = lastKnownIndex + 1 + // Depending on the size of the index, use the first or second symbol table + if (newIndex <= Integer.MAX_VALUE) { + stringTable1.insert(newIndex.toInt, string) + } else { + val newIndex2 = (newIndex - Integer.MAX_VALUE.toLong).toInt + stringTable2.insert(newIndex2, string) + } + stringTableIndices.put(string, newIndex) + lastKnownIndex = lastKnownIndex + 1 + newIndex + } + } + + private def toStringTableIndices(strings: Iterable[String]): Iterable[Long] = + strings.map(toStringTableIndex(_)) + + private final var compilationStartNanos: Long = 0L + private final var compilationDurationNanos: Long = 0L + def timeCompilation(startNanos: Long, durationNanos: Long): Unit = { + compilationStartNanos = startNanos + compilationDurationNanos = durationNanos + } + + private def toPathStrings(files: Iterable[File]): Iterable[String] = + files.map(_.getAbsolutePath) + + def toApiChanges(changes: APIChanges): Iterable[zprof.ApiChange] = { + def toUsedNames(names: Iterable[UsedName]): Iterable[zprof.UsedName] = { + import scala.collection.JavaConverters._ + names.map { name => + val scopes = name.scopes.asScala.map { + case UseScope.Default => zprof.Scope(toStringTableIndex("default")) + case UseScope.Implicit => zprof.Scope(toStringTableIndex("implicit")) + case UseScope.PatMatTarget => zprof.Scope(toStringTableIndex("patmat target")) + } + zprof.UsedName(toStringTableIndex(name.name), scopes.toList) + } + } + + changes.apiChanges.map { + case change: APIChangeDueToMacroDefinition => + zprof.ApiChange( + toStringTableIndex(change.modifiedClass), + "API change due to macro definition." + ) + case change: TraitPrivateMembersModified => + zprof.ApiChange( + toStringTableIndex(change.modifiedClass), + s"API change due to existence of private trait members in modified class." + ) + case NamesChange(modifiedClass, modifiedNames) => + val usedNames = toUsedNames(modifiedNames.names).toList + zprof.ApiChange( + toStringTableIndex(modifiedClass), + s"Standard API name change in modified class.", + usedNames = usedNames + ) + } + } + + private final var initial: Option[zprof.InitialChanges] = None + def registerInitial(changes: InitialChanges): Unit = { + import scala.collection.JavaConverters._ + val fileChanges = changes.internalSrc + val profChanges = zprof.Changes( + added = toStringTableIndices(toPathStrings(fileChanges.getAdded.asScala)).toList, + removed = toStringTableIndices(toPathStrings(fileChanges.getRemoved.asScala)).toList, + modified = toStringTableIndices(toPathStrings(fileChanges.getChanged.asScala)).toList + ) + initial = Some( + zprof.InitialChanges( + changes = Some(profChanges), + removedProducts = toStringTableIndices(toPathStrings(changes.removedProducts)).toList, + binaryDependencies = toStringTableIndices(toPathStrings(changes.binaryDeps)).toList, + externalChanges = toApiChanges(changes.external).toList + ) + ) + } + + private final var currentEvents: List[zprof.InvalidationEvent] = Nil + def registerEvent( + kind: String, + inputs: Iterable[String], + outputs: Iterable[String], + reason: String + ): Unit = { + val event = zprof.InvalidationEvent( + kind = kind, + inputs = toStringTableIndices(inputs).toList, + outputs = toStringTableIndices(outputs).toList, + reason = reason + ) + + currentEvents = event :: currentEvents + } + + private final var cycles: List[zprof.CycleInvalidation] = Nil + def registerCycle( + invalidatedClasses: Iterable[String], + invalidatedPackageObjects: Iterable[String], + initialSources: Iterable[File], + invalidatedSources: Iterable[File], + recompiledClasses: Iterable[String], + changesAfterRecompilation: APIChanges, + nextInvalidations: Iterable[String], + shouldCompileIncrementally: Boolean + ): Unit = { + val newCycle = zprof.CycleInvalidation( + invalidated = toStringTableIndices(invalidatedClasses).toList, + invalidatedByPackageObjects = toStringTableIndices(invalidatedPackageObjects).toList, + initialSources = toStringTableIndices(toPathStrings(initialSources)).toList, + invalidatedSources = toStringTableIndices(toPathStrings(invalidatedSources)).toList, + recompiledClasses = toStringTableIndices(recompiledClasses).toList, + changesAfterRecompilation = toApiChanges(changesAfterRecompilation).toList, + nextInvalidations = toStringTableIndices(nextInvalidations).toList, + startTimeNanos = compilationStartNanos, + compilationDurationNanos = compilationDurationNanos, + events = currentEvents, + shouldCompileIncrementally = shouldCompileIncrementally + ) + + cycles = newCycle :: cycles + () + } + } +} + +abstract class RunProfiler { + def timeCompilation( + startNanos: Long, + durationNanos: Long + ): Unit + + def registerInitial( + changes: InitialChanges + ): Unit + + def registerEvent( + kind: String, + inputs: Iterable[String], + outputs: Iterable[String], + reason: String + ): Unit + + def registerCycle( + invalidatedClasses: Iterable[String], + invalidatedPackageObjects: Iterable[String], + initialSources: Iterable[File], + invalidatedSources: Iterable[File], + recompiledClasses: Iterable[String], + changesAfterRecompilation: APIChanges, + nextInvalidations: Iterable[String], + shouldCompileIncrementally: Boolean + ): Unit +} + +object RunProfiler { + final val empty = new RunProfiler { + def timeCompilation(startNanos: Long, durationNanos: Long): Unit = () + def registerInitial(changes: InitialChanges): Unit = () + + def registerEvent( + kind: String, + inputs: Iterable[String], + outputs: Iterable[String], + reason: String + ): Unit = () + def registerCycle( + invalidatedClasses: Iterable[String], + invalidatedPackageObjects: Iterable[String], + initialSources: Iterable[File], + invalidatedSources: Iterable[File], + recompiledClasses: Iterable[String], + changesAfterRecompilation: APIChanges, + nextInvalidations: Iterable[String], + shouldCompileIncrementally: Boolean + ): Unit = () + } +} + +trait InvalidationProfilerUtils { + // Define this so that we can provide default labels for events in protobuf-generate companion + implicit class InvalidationEventXCompanion(invalidationEvent: zprof.InvalidationEvent.type) { + final val LocalInheritanceKind = "local inheritance" + final val InheritanceKind = "inheritance" + final val MemberReferenceKind = "member reference" + } +} + +// So that others users from outside [[IncrementalCommon]] can use the labels +object InvalidationProfilerUtils extends InvalidationProfilerUtils From 13c7d4db9e98f408783fa8b71239f60e1bd45779 Mon Sep 17 00:00:00 2001 From: Jorge Vicente Cantero Date: Sun, 26 Aug 2018 20:04:14 +0200 Subject: [PATCH 4/6] Exclude binary compatibility errors in zincCore These methods are not exposed to the public API of Zinc and can therefore be changed with certainty. --- build.sbt | 40 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/build.sbt b/build.sbt index c22089770c..fe4b702c20 100644 --- a/build.sbt +++ b/build.sbt @@ -244,6 +244,46 @@ lazy val zincCore = (project in internalPath / "zinc-core") compileOrder := sbt.CompileOrder.Mixed, mimaSettings, PB.targets in Compile := List(scalapb.gen() -> (sourceManaged in Compile).value), + mimaBinaryIssueFilters ++= { + import com.typesafe.tools.mima.core._ + import com.typesafe.tools.mima.core.ProblemFilters._ + List( + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalNameHashing.allDeps"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalNameHashing.sameAPI"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalNameHashing.invalidateClass"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalNameHashing.invalidateByExternal"), + exclude[DirectAbstractMethodProblem]("sbt.internal.inc.IncrementalCommon.invalidatedPackageObjects"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalNameHashing.this"), + exclude[MissingClassProblem]("sbt.internal.inc.ClassToSourceMapper"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.Incremental.compile"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.Incremental.prune"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.changes"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.sameClass"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.allDeps"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.sameAPI"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.invalidateIntermediate"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.invalidateByAllExternal"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.invalidateDuplicates"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.transitiveDeps"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.invalidateClass"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.externalBinaryModified"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.invalidateIncremental"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.changedInitial"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.transitiveDeps$default$2"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.orTrue"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.invalidateByExternal"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.wrappedLog"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.shortcutSameClass"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.orEmpty"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.changedIncremental"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.currentExternalAPI"), + exclude[DirectMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.this"), + exclude[ReversedMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.findClassDependencies"), + exclude[ReversedMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.invalidateClassesInternally"), + exclude[ReversedMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.invalidateClassesExternally"), + exclude[ReversedMissingMethodProblem]("sbt.internal.inc.IncrementalCommon.findAPIChange") + ) + } ) .configure(addSbtIO, addSbtUtilLogging, addSbtUtilRelation) From 90b56cd827b4d380d1621ed5e7f99932aeab65d1 Mon Sep 17 00:00:00 2001 From: Jorge Date: Mon, 27 Aug 2018 11:03:32 +0200 Subject: [PATCH 5/6] Add some docs to clarify design --- .../sbt/internal/inc/InvalidationProfiler.scala | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/InvalidationProfiler.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/InvalidationProfiler.scala index d6286f75a9..811db31b8d 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/InvalidationProfiler.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/InvalidationProfiler.scala @@ -30,8 +30,16 @@ object InvalidationProfiler { class ZincInvalidationProfiler extends InvalidationProfiler { private final var lastKnownIndex: Long = -1L + /* These string tables contain any kind of repeated string that is likely to occur + * in the protobuf profiling data. This includes used names, class names, source + * files and class files (their paths), as well as other repeated strings. This is + * done to keep the memory overhead of the profiler to a minimum. */ private final val stringTable1: ArrayBuffer[String] = new ArrayBuffer[String](1000) - private final val stringTable2: ArrayBuffer[String] = new ArrayBuffer[String](10) + private final val stringTable2: ArrayBuffer[String] = new ArrayBuffer[String](0) + + /* Maps strings to indices. The indices are long because we're overprotecting ourselves + * in case the string table grows gigantic. This should not happen, but as the profiling + * scheme of pprof does it and it's not cumbersome to implement it, we replicate the same design. */ private final val stringTableIndices: mutable.HashMap[String, Long] = new mutable.HashMap[String, Long] @@ -201,6 +209,12 @@ class ZincInvalidationProfiler extends InvalidationProfiler { } } +/** + * Defines the interface of a profiler. This interface is used in the guts of + * [[IncrementalCommon]] and [[IncrementalNameHashing]]. A profiler of a run + * is instantiated afresh in `Incremental.compile` and then added to the profiler + * instance managed by the client. + */ abstract class RunProfiler { def timeCompilation( startNanos: Long, From 11092ad4ce518da8e5dd37e3769c47bb4cafaade Mon Sep 17 00:00:00 2001 From: jvican Date: Mon, 27 Aug 2018 11:15:17 +0200 Subject: [PATCH 6/6] Remove secondary string table I realized how stupid it was the idea of keeping two string tables intead of one, since it's unlikely we'll ever have more than 2147483647 unique string instances and the JVM doesn't allow to instantiate such a big array in normal heap sizes. So we remove this part of the design that we inherited from pprof. --- .../zinc-core/src/main/protobuf/zprof.proto | 36 ++++++------ .../internal/inc/InvalidationProfiler.scala | 55 ++++++++----------- 2 files changed, 42 insertions(+), 49 deletions(-) diff --git a/internal/zinc-core/src/main/protobuf/zprof.proto b/internal/zinc-core/src/main/protobuf/zprof.proto index 716ebf1d7b..946bcfddcb 100644 --- a/internal/zinc-core/src/main/protobuf/zprof.proto +++ b/internal/zinc-core/src/main/protobuf/zprof.proto @@ -6,6 +6,10 @@ package sbt.internal.inc; ///////////////////////////////////////// ZINC PROF /////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////// +// This protobuf scheme is inspired by https://github.com/google/pprof/blob/master/proto/profile.proto +// As pprof, it uses a string table and all the supposed strings in the format are represented as an +// index (int32) of that string table. This is done to minimize overhead in memory and disk. + message Profile { repeated ZincRun runs = 1; repeated string string_table = 2; @@ -17,52 +21,52 @@ message ZincRun { } message CycleInvalidation { - repeated int64 invalidated = 1; - repeated int64 invalidatedByPackageObjects = 2; - repeated int64 initialSources = 3; - repeated int64 invalidatedSources = 4; - repeated int64 recompiledClasses = 5; + repeated int32 invalidated = 1; + repeated int32 invalidatedByPackageObjects = 2; + repeated int32 initialSources = 3; + repeated int32 invalidatedSources = 4; + repeated int32 recompiledClasses = 5; int64 startTimeNanos = 6; // Start time of compilation (UTC) as nanoseconds past the epoch int64 compilationDurationNanos = 7; // Duration of the compilation profile in nanoseconds repeated ApiChange changesAfterRecompilation = 8; repeated InvalidationEvent events = 9; - repeated int64 nextInvalidations = 10; + repeated int32 nextInvalidations = 10; bool shouldCompileIncrementally = 11; } message InvalidationEvent { string kind = 1; - repeated int64 inputs = 2; - repeated int64 outputs = 3; + repeated int32 inputs = 2; + repeated int32 outputs = 3; string reason = 4; } message Changes { - repeated int64 added = 1; - repeated int64 removed = 2; - repeated int64 modified = 3; + repeated int32 added = 1; + repeated int32 removed = 2; + repeated int32 modified = 3; } message ApiChange { - int64 modifiedClass = 1; + int32 modifiedClass = 1; string reason = 2; repeated UsedName usedNames = 3; // Can be empty if the change is not related to names } message InitialChanges { Changes changes = 1; - repeated int64 removedProducts = 2; - repeated int64 binaryDependencies = 3; + repeated int32 removedProducts = 2; + repeated int32 binaryDependencies = 3; repeated ApiChange externalChanges = 4; } message UsedName { - int64 name = 1; + int32 name = 1; repeated Scope scopes = 2; } message Scope { - int64 kind = 1; + int32 kind = 1; } diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/InvalidationProfiler.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/InvalidationProfiler.scala index 811db31b8d..fcc4828d6a 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/InvalidationProfiler.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/InvalidationProfiler.scala @@ -29,19 +29,18 @@ object InvalidationProfiler { } class ZincInvalidationProfiler extends InvalidationProfiler { - private final var lastKnownIndex: Long = -1L - /* These string tables contain any kind of repeated string that is likely to occur + private final var lastKnownIndex: Int = -1 + /* The string table contains any kind of repeated string that is likely to occur * in the protobuf profiling data. This includes used names, class names, source * files and class files (their paths), as well as other repeated strings. This is * done to keep the memory overhead of the profiler to a minimum. */ - private final val stringTable1: ArrayBuffer[String] = new ArrayBuffer[String](1000) - private final val stringTable2: ArrayBuffer[String] = new ArrayBuffer[String](0) - + private final val stringTable: ArrayBuffer[String] = new ArrayBuffer[String](1000) + /* Maps strings to indices. The indices are long because we're overprotecting ourselves * in case the string table grows gigantic. This should not happen, but as the profiling * scheme of pprof does it and it's not cumbersome to implement it, we replicate the same design. */ - private final val stringTableIndices: mutable.HashMap[String, Long] = - new mutable.HashMap[String, Long] + private final val stringTableIndices: mutable.HashMap[String, Int] = + new mutable.HashMap[String, Int] def profileRun: RunProfiler = new ZincProfilerImplementation @@ -56,46 +55,36 @@ class ZincInvalidationProfiler extends InvalidationProfiler { * * It is recommended to only perform this operation when we are * going to persist the profiled protobuf data to disk. Do not - * call this function after every compiler iteration as the aggregation - * of the symbol tables may be expensive, it's recommended to - * persist this file periodically. + * call this function after every compiler iteration as you will + * write a symbol table in every persisted protobuf file. It's + * better to persist this file periodically after several runs + * so that the overhead in disk is not high. * * @return An immutable zprof profile that can be persisted via protobuf. */ def toProfile: zprof.Profile = zprof.Profile( runs = runs, - stringTable = stringTable1 ++ stringTable2 + stringTable = stringTable ) private[inc] class ZincProfilerImplementation extends RunProfiler { - private def toStringTableIndex(string: String): Long = { + private def toStringTableIndex(string: String): Int = { stringTableIndices.get(string) match { case Some(index) => - if (index <= Integer.MAX_VALUE) { - val newIndex = index.toInt - stringTable1.apply(newIndex) - newIndex - } else { - val newIndex = (index - Integer.MAX_VALUE.toLong).toInt - stringTable2.apply(newIndex) - newIndex - } + val newIndex = index.toInt + stringTable.apply(newIndex) + newIndex case None => val newIndex = lastKnownIndex + 1 // Depending on the size of the index, use the first or second symbol table - if (newIndex <= Integer.MAX_VALUE) { - stringTable1.insert(newIndex.toInt, string) - } else { - val newIndex2 = (newIndex - Integer.MAX_VALUE.toLong).toInt - stringTable2.insert(newIndex2, string) - } + stringTable.insert(newIndex.toInt, string) stringTableIndices.put(string, newIndex) lastKnownIndex = lastKnownIndex + 1 newIndex } } - private def toStringTableIndices(strings: Iterable[String]): Iterable[Long] = + private def toStringTableIndices(strings: Iterable[String]): Iterable[Int] = strings.map(toStringTableIndex(_)) private final var compilationStartNanos: Long = 0L @@ -210,11 +199,11 @@ class ZincInvalidationProfiler extends InvalidationProfiler { } /** - * Defines the interface of a profiler. This interface is used in the guts of - * [[IncrementalCommon]] and [[IncrementalNameHashing]]. A profiler of a run - * is instantiated afresh in `Incremental.compile` and then added to the profiler - * instance managed by the client. - */ + * Defines the interface of a profiler. This interface is used in the guts of + * [[IncrementalCommon]] and [[IncrementalNameHashing]]. A profiler of a run + * is instantiated afresh in `Incremental.compile` and then added to the profiler + * instance managed by the client. + */ abstract class RunProfiler { def timeCompilation( startNanos: Long,