Skip to content

Commit

Permalink
Merge branch '1.10.x' into feature/cycles-registration
Browse files Browse the repository at this point in the history
  • Loading branch information
eed3si9n authored Dec 5, 2024
2 parents f032a9a + 7e96c09 commit 5d17b33
Show file tree
Hide file tree
Showing 14 changed files with 77 additions and 43 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/cla.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,15 @@ jobs:
AUTHOR: ${{ github.event.pull_request.user.login }}
run: |
echo "Pull request submitted by $AUTHOR";
signed=$(curl -s "https://www.lightbend.com/contribute/cla/scala/check/$AUTHOR" | jq -r ".signed");
signed=$(curl -s "https://contribute.akka.io/contribute/cla/scala/check/$AUTHOR" | jq -r ".signed");
if [ "$signed" = "true" ] ; then
echo "CLA check for $AUTHOR successful";
else
echo "CLA check for $AUTHOR failed";
echo "Please sign the Scala CLA to contribute to the Scala compiler.";
echo "Go to https://www.lightbend.com/contribute/cla/scala and then";
echo "Go to https://contribute.akka.io/contribute/cla/scala and then";
echo "comment on the pull request to ask for a new check.";
echo "";
echo "Check if CLA is signed: https://www.lightbend.com/contribute/cla/scala/check/$AUTHOR";
echo "Check if CLA is signed: https://contribute.akka.io/contribute/cla/scala/check/$AUTHOR";
exit 1;
fi;
10 changes: 5 additions & 5 deletions .scala-steward.conf
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@
# either. hopefully this is a reasonable compromise value?
pullRequests.frequency = "14 days"

updates.ignore = [
# as per discussion on sbt/zinc#1236, this is
# "if it ain't broke don't fix it" territory
{ groupId = "com.google.protobuf" },
{ groupId = "org.eclipse.jgit" }
updates.pin = [
{ groupId = "com.google.protobuf", artifactId = "protobuf-java", version="3." },
{ groupId = "com.google.protobuf", artifactId = "protoc", version="3." },
# jgit 7 requires Java 17+
{ groupId = "org.eclipse.jgit", artifactId = "org.eclipse.jgit", version="6." }
]
2 changes: 1 addition & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ ThisBuild / version := {
nightlyVersion match {
case Some(v) => v
case _ =>
if ((ThisBuild / isSnapshot).value) "1.10.4-SNAPSHOT"
if ((ThisBuild / isSnapshot).value) "1.10.6-SNAPSHOT"
else old
}
}
Expand Down
5 changes: 3 additions & 2 deletions internal/compiler-bridge/src/main/scala/xsbt/ExtractAPI.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import xsbti.api._
import scala.annotation.tailrec
import scala.tools.nsc.Global
import scala.PartialFunction.cond
import ExtractAPI.ConstructorWithDefaultArgument

/**
* Extracts full (including private members) API representation out of Symbols and Types.
Expand Down Expand Up @@ -836,9 +837,8 @@ class ExtractAPI[GlobalType <: Global](
constructorNameAsString(s.enclClass)
else {
val decoded = name.decode
val constructorWithDefaultArgument = "<init>\\$default\\$(\\d+)".r
decoded match {
case constructorWithDefaultArgument(index) => constructorNameAsString(s.enclClass, index)
case ConstructorWithDefaultArgument(index) => constructorNameAsString(s.enclClass, index)
case _ => decoded
}
}
Expand Down Expand Up @@ -866,4 +866,5 @@ class ExtractAPI[GlobalType <: Global](

object ExtractAPI {
private val emptyAnnotationArray = new Array[xsbti.api.Annotation](0)
private val ConstructorWithDefaultArgument = "<init>\\$default\\$(\\d+)".r
}
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,11 @@ static AnalysisStore sync(AnalysisStore analysisStore) {
*/
void set(AnalysisContents analysisContents);

/**
* Resets in memory cached {@link AnalysisContents}
*/
default void clearCache() {}

final class CachedAnalysisStore implements AnalysisStore {
private AnalysisStore underlying;
private Optional<AnalysisContents> lastStore = Optional.empty();
Expand All @@ -121,6 +126,10 @@ public void set(AnalysisContents analysisContents) {
underlying.set(analysisContents);
lastStore = Optional.of(analysisContents);
}

public void clearCache() {
lastStore = Optional.empty();
}
}

final class SyncedAnalysisStore implements AnalysisStore {
Expand All @@ -141,5 +150,11 @@ public void set(AnalysisContents analysisContents) {
underlying.set(analysisContents);
}
}

public void clearCache() {
if (underlying instanceof CachedAnalysisStore) {
underlying.clearCache();
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ class AnalysisFormatBenchmark {
)
writeAll(
"-ref-cbin-nosort",
ConsistentFileAnalysisStore.binary(_, ReadWriteMappers.getEmptyMappers, sort = false),
ConsistentFileAnalysisStore.binary(_, ReadWriteMappers.getEmptyMappers, reproducible = false),
cached
)
println("Sizes:")
Expand Down Expand Up @@ -94,7 +94,11 @@ class AnalysisFormatBenchmark {
bh.consume(
writeAll(
"-test-cbin-nosort",
ConsistentFileAnalysisStore.binary(_, ReadWriteMappers.getEmptyMappers, sort = false),
ConsistentFileAnalysisStore.binary(
_,
ReadWriteMappers.getEmptyMappers,
reproducible = false
),
cached
)
)
Expand All @@ -104,7 +108,7 @@ class AnalysisFormatBenchmark {
cached.foreach {
case (s, a) =>
val ser = new NullSerializer
val af = new ConsistentAnalysisFormat(ReadWriteMappers.getEmptyMappers, sort = true)
val af = new ConsistentAnalysisFormat(ReadWriteMappers.getEmptyMappers, reproducible = true)
af.write(ser, a.getAnalysis, a.getMiniSetup)
bh.consume(ser.count)
}
Expand All @@ -115,7 +119,8 @@ class AnalysisFormatBenchmark {
cached.foreach {
case (s, a) =>
val ser = new NullSerializer
val af = new ConsistentAnalysisFormat(ReadWriteMappers.getEmptyMappers, sort = false)
val af =
new ConsistentAnalysisFormat(ReadWriteMappers.getEmptyMappers, reproducible = false)
af.write(ser, a.getAnalysis, a.getMiniSetup)
bh.consume(ser.count)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -266,12 +266,12 @@ object ScalaInstance {

/** Return all the required Scala jars from a path `scalaHome`. */
def allJars(scalaHome: File): Seq[File] =
IO.listFiles(scalaLib(scalaHome)).toIndexedSeq.filter(f => !blacklist(f.getName))
IO.listFiles(scalaLib(scalaHome)).toIndexedSeq.filter(f => !excludeList(f.getName))

private[this] def scalaLib(scalaHome: File): File =
new File(scalaHome, "lib")

private[this] val blacklist: Set[String] = Set(
private[this] val excludeList: Set[String] = Set(
"scala-actors.jar",
"scalacheck.jar",
"scala-partest.jar",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ import Compat._
* - Faster serialization and deserialization than the existing binary format.
* - Smaller implementation than either of the existing formats.
*/
class ConsistentAnalysisFormat(val mappers: ReadWriteMappers, sort: Boolean) {
class ConsistentAnalysisFormat(val mappers: ReadWriteMappers, reproducible: Boolean) {
import ConsistentAnalysisFormat._

private[this] final val VERSION = 1100029
Expand All @@ -52,6 +52,7 @@ class ConsistentAnalysisFormat(val mappers: ReadWriteMappers, sort: Boolean) {
writeAPIs(out, analysis0.apis, setup.storeApis())
writeSourceInfos(out, analysis0.infos)
// we do not read or write the Compilations
// as zinc does not use Compilations from deserialized analysis
out.int(VERSION)
out.end()
}
Expand All @@ -64,6 +65,7 @@ class ConsistentAnalysisFormat(val mappers: ReadWriteMappers, sort: Boolean) {
val apis = readAPIs(in, setup.storeApis())
val infos = readSourceInfos(in)
// we do not read or write the Compilations
// as zinc does not use Compilations from deserialized analysis
val compilations = Compilations.of(Nil)
readVersion(in)
in.end()
Expand All @@ -76,9 +78,12 @@ class ConsistentAnalysisFormat(val mappers: ReadWriteMappers, sort: Boolean) {
name: String,
map: scala.collection.Iterable[(String, V)],
perEntry: Int = 1
)(f: V => Unit): Unit =
if (sort) out.writeSortedStringMap(name, map, perEntry)(f)
)(f: V => Unit): Unit = {
// For reproducible output, need to write strings in sorted order
// otherwise strings may be written in different order resulting in different output
if (reproducible) out.writeSortedStringMap(name, map, perEntry)(f)
else out.writeColl(name, map, perEntry + 1) { kv => out.string(kv._1); f(kv._2) }
}

private[this] def readVersion(in: Deserializer): Unit = {
val ver = in.int()
Expand Down Expand Up @@ -158,7 +163,7 @@ class ConsistentAnalysisFormat(val mappers: ReadWriteMappers, sort: Boolean) {
out.string(ac.provenance())
out.int(ac.extraHash())
val nh0 = ac.nameHashes()
val nh = if (nh0.length > 1 && sort) {
val nh = if (nh0.length > 1 && reproducible) {
val nh = nh0.clone()
Arrays.sort(nh, nameHashComparator)
nh
Expand Down Expand Up @@ -199,14 +204,17 @@ class ConsistentAnalysisFormat(val mappers: ReadWriteMappers, sort: Boolean) {
}

private[this] def writeAPIs(out: Serializer, apis: APIs, storeApis: Boolean): Unit = {
def write(n: String, m: Map[String, AnalyzedClass]): Unit =
def write(n: String, m: Map[String, AnalyzedClass]): Unit = {
writeMaybeSortedStringMap(
out,
n,
m.mapValues(_.withCompilationTimestamp(DefaultCompilationTimestamp))
if (reproducible) m.mapValues(_.withCompilationTimestamp(DefaultCompilationTimestamp))
else m
) { ac =>
writeAnalyzedClass(out, ac, storeApis)
}
}

write("internal", apis.internal)
write("external", apis.external)
}
Expand Down Expand Up @@ -353,7 +361,7 @@ class ConsistentAnalysisFormat(val mappers: ReadWriteMappers, sort: Boolean) {
rel.forwardMap.view.map { case (k, vs) => kf(k) -> vs }
) { vs =>
val a = vs.iterator.map(vf).toArray
if (sort) Arrays.sort(a, implicitly[Ordering[String]])
if (reproducible) Arrays.sort(a, implicitly[Ordering[String]])
out.writeColl("item", a)(out.string)
}
def wrS(name: String, rel: Relation[String, String]): Unit =
Expand Down Expand Up @@ -428,11 +436,11 @@ class ConsistentAnalysisFormat(val mappers: ReadWriteMappers, sort: Boolean) {
if (sc.contains(UseScope.PatMatTarget)) i += 4
(un.name, i.toByte)
}.toArray.groupBy(_._2)
val groups = if (sort) groups0.toVector.sortBy(_._1) else groups0
val groups = if (reproducible) groups0.toVector.sortBy(_._1) else groups0
out.writeColl("groups", groups, 2) { case (g, gNames) =>
out.byte(g)
val names = gNames.map(_._1)
if (sort) Arrays.sort(names, implicitly[Ordering[String]])
if (reproducible) Arrays.sort(names, implicitly[Ordering[String]])
out.writeStringColl("names", names)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,12 @@ object ConsistentFileAnalysisStore {
def text(
file: File,
mappers: ReadWriteMappers,
sort: Boolean = true,
reproducible: Boolean = true,
parallelism: Int = Runtime.getRuntime.availableProcessors()
): XAnalysisStore =
new AStore(
file,
new ConsistentAnalysisFormat(mappers, sort),
new ConsistentAnalysisFormat(mappers, reproducible),
SerializerFactory.text,
parallelism
)
Expand All @@ -49,7 +49,7 @@ object ConsistentFileAnalysisStore {
binary(
file,
mappers = ReadWriteMappers.getEmptyMappers(),
sort = true,
reproducible = true,
)

def binary(
Expand All @@ -59,18 +59,18 @@ object ConsistentFileAnalysisStore {
binary(
file,
mappers,
sort = true,
reproducible = true,
)

def binary(
file: File,
mappers: ReadWriteMappers,
sort: Boolean,
reproducible: Boolean,
parallelism: Int = Runtime.getRuntime.availableProcessors()
): XAnalysisStore =
new AStore(
file,
new ConsistentAnalysisFormat(mappers, sort),
new ConsistentAnalysisFormat(mappers, reproducible),
SerializerFactory.binary,
parallelism
)
Expand Down
4 changes: 2 additions & 2 deletions project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ object Dependencies {
val compilerBridgeVersions = Seq(scala212, scala210, scala211, scala213)
val scala212_213 = Seq(defaultScalaVersion, scala213)

private val ioVersion = nightlyVersion.getOrElse("1.10.1")
private val utilVersion = nightlyVersion.getOrElse("1.10.5")
private val ioVersion = nightlyVersion.getOrElse("1.10.2")
private val utilVersion = nightlyVersion.getOrElse("1.10.6")

private val sbtIO = "org.scala-sbt" %% "io" % ioVersion

Expand Down
2 changes: 1 addition & 1 deletion project/build.properties
Original file line number Diff line number Diff line change
@@ -1 +1 @@
sbt.version=1.10.5
sbt.version=1.10.6
2 changes: 1 addition & 1 deletion project/plugins.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,6 @@ addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.10.0")
addSbtPlugin("com.github.sbt" % "sbt-protobuf" % "0.8.1")
addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.4")
addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.13.1")
addSbtPlugin("com.eed3si9n" % "sbt-projectmatrix" % "0.10.0")
addSbtPlugin("com.eed3si9n" % "sbt-projectmatrix" % "0.10.1")
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "2.2.0")
addSbtPlugin("org.jetbrains.scala" % "sbt-ide-settings" % "1.1.2")
Original file line number Diff line number Diff line change
Expand Up @@ -507,7 +507,7 @@ object MixedAnalyzingCompiler {
useTextAnalysis = useTextAnalysis,
useConsistent = false,
mappers = ReadWriteMappers.getEmptyMappers(),
sort = true,
reproducible = true,
parallelism = Runtime.getRuntime.availableProcessors(),
)

Expand All @@ -516,7 +516,7 @@ object MixedAnalyzingCompiler {
useTextAnalysis: Boolean,
useConsistent: Boolean,
mappers: ReadWriteMappers,
sort: Boolean,
reproducible: Boolean,
parallelism: Int,
): AnalysisStore = {
val fileStore = (useTextAnalysis, useConsistent) match {
Expand All @@ -526,7 +526,7 @@ object MixedAnalyzingCompiler {
ConsistentFileAnalysisStore.binary(
file = analysisFile.toFile,
mappers = mappers,
sort = sort,
reproducible = reproducible,
parallelism = parallelism,
)
case (true, false) =>
Expand All @@ -535,7 +535,7 @@ object MixedAnalyzingCompiler {
ConsistentFileAnalysisStore.text(
file = analysisFile.toFile,
mappers = mappers,
sort = sort,
reproducible = reproducible,
parallelism = parallelism,
)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ import xsbti.{

import sbt.util.InterfaceUtil
import sbt.util.Logger
import xsbt.Log.debug

/**
* Define a Java compiler that reports on any discovered source dependencies or
Expand Down Expand Up @@ -165,18 +166,19 @@ final class AnalyzingJavaCompiler private[sbt] (
options,
scalaInstance,
classpathOptions
)
).toArray
val javaSources: Array[VirtualFile] =
sources.sortBy(_.id).toArray
debug(log, prettyPrintCompilationArguments(args))
val success =
javac.run(javaSources, args.toArray, output, incToolOptions, reporter, log)
javac.run(javaSources, args, output, incToolOptions, reporter, log)
if (!success) {
/* Assume that no Scalac problems are reported for a Javac-related
* reporter. This relies on the incremental compiler will not run
* Javac compilation if Scala compilation fails, which means that
* the same reporter won't be used for `AnalyzingJavaCompiler`. */
val msg = "javac returned non-zero exit code"
throw new CompileFailed(args.toArray, msg, reporter.problems())
throw new CompileFailed(args, msg, reporter.problems())
}
}

Expand Down Expand Up @@ -264,4 +266,7 @@ final class AnalyzingJavaCompiler private[sbt] (
log.debug(label + " took " + (elapsed / 1e9) + " s")
result
}

private def prettyPrintCompilationArguments(args: Array[String]) =
args.mkString("[zinc] The Java compiler is invoked with:\n\t", "\n\t", "")
}

0 comments on commit 5d17b33

Please sign in to comment.