Skip to content

Commit

Permalink
Some fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
szeiger committed Jan 9, 2024
1 parent d08b03c commit cf4896e
Show file tree
Hide file tree
Showing 6 changed files with 81 additions and 55 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import Compat._
class ConsistentAnalysisFormat(val mappers: ReadWriteMappers, sort: Boolean) {
import ConsistentAnalysisFormat._

private[this] final val VERSION = 1000028
private[this] final val VERSION = 1100028
private[this] final val readMapper = mappers.getReadMapper
private[this] final val writeMapper = mappers.getWriteMapper

Expand All @@ -44,22 +44,17 @@ class ConsistentAnalysisFormat(val mappers: ReadWriteMappers, sort: Boolean) {
out.end()
}

def read(in: Deserializer): (CompileAnalysis, MiniSetup) =
try {
readVersion(in)
val setup = readMiniSetup(in)
val relations = readRelations(in)
val stamps = readStamps(in)
val apis = readAPIs(in, setup.storeApis())
val infos = readSourceInfos(in)
readVersion(in)
in.end()
(Analysis.Empty.copy(stamps, apis, relations, infos, Compilations.of(Nil)), setup)
} catch {
case t: Throwable =>
t.printStackTrace() // TODO
throw t
}
def read(in: Deserializer): (CompileAnalysis, MiniSetup) = {
readVersion(in)
val setup = readMiniSetup(in)
val relations = readRelations(in)
val stamps = readStamps(in)
val apis = readAPIs(in, setup.storeApis())
val infos = readSourceInfos(in)
readVersion(in)
in.end()
(Analysis.Empty.copy(stamps, apis, relations, infos, Compilations.of(Nil)), setup)
}

@inline
private[this] final def writeMaybeSortedStringMap[V](
Expand Down Expand Up @@ -172,7 +167,7 @@ class ConsistentAnalysisFormat(val mappers: ReadWriteMappers, sort: Boolean) {
val hm = in.bool()
val p = in.string()
val nhNames = in.readStringArray()
val nhScopes = in.readArray[UseScope]() { UseScope.values()(in.byte()) }
val nhScopes = in.readArray[UseScope]() { UseScope.values()(in.byte().toInt) }
val nhHashes = in.readArray[Int]() { in.int() }
val nameHashes = new Array[NameHash](nhNames.length)
var i = 0
Expand All @@ -189,7 +184,11 @@ class ConsistentAnalysisFormat(val mappers: ReadWriteMappers, sort: Boolean) {

private[this] def writeAPIs(out: Serializer, apis: APIs, storeApis: Boolean): Unit = {
def write(n: String, m: Map[String, AnalyzedClass]): Unit =
writeMaybeSortedStringMap(out, n, m.mapValues(_.withCompilationTimestamp(-1L))) { ac =>
writeMaybeSortedStringMap(
out,
n,
m.mapValues(_.withCompilationTimestamp(DefaultCompilationTimestamp))
) { ac =>
writeAnalyzedClass(out, ac, storeApis)
}
write("internal", apis.internal)
Expand Down Expand Up @@ -304,7 +303,7 @@ class ConsistentAnalysisFormat(val mappers: ReadWriteMappers, sort: Boolean) {
val scalacOptions = in.readArray() { readMapper.mapScalacOption(in.string()) }
val javacOptions = in.readArray() { readMapper.mapJavacOption(in.string()) }
val compilerVersion = in.string()
val compileOrder = CompileOrder.values()(in.byte())
val compileOrder = CompileOrder.values()(in.byte().toInt)
val skipApiStoring = in.bool()
val extra = in.readArray(2) { InterfaceUtil.t2(in.string() -> in.string()) }
readMapper.mapMiniSetup(MiniSetup.of(
Expand Down Expand Up @@ -414,7 +413,7 @@ class ConsistentAnalysisFormat(val mappers: ReadWriteMappers, sort: Boolean) {
private[this] def readUsedNameSet(in: Deserializer): Set[UsedName] = {
in.readBlock {
val data = in.readColl[Vector[UsedName], Vector[Vector[UsedName]]](Vector, 2) {
val i = in.byte()
val i = in.byte().toInt
val names = in.readStringSeq()
names.iterator.map { n => UsedName(n, useScopes(i)) }.toVector
}
Expand All @@ -440,7 +439,7 @@ class ConsistentAnalysisFormat(val mappers: ReadWriteMappers, sort: Boolean) {
private[this] def readClassLike(in: Deserializer): ClassLike = in.readBlock {
val name = in.string()
val access = readAccess(in)
val modifiers = InternalApiProxy.Modifiers(in.byte())
val modifiers = InternalApiProxy.Modifiers(in.byte().toInt)
val annotations = in.readArray[Annotation]()(readAnnotation(in))
val definitionType = readDefinitionType(in)
val selfType = SafeLazyProxy.strict(readType(in))
Expand Down Expand Up @@ -508,7 +507,7 @@ class ConsistentAnalysisFormat(val mappers: ReadWriteMappers, sort: Boolean) {
out.byte(dt.ordinal().toByte)

private[this] def readDefinitionType(in: Deserializer): DefinitionType =
DefinitionType.values()(in.byte())
DefinitionType.values()(in.byte().toInt)

private[this] def writeTypeParameter(out: Serializer, tp: TypeParameter): Unit =
out.writeBlock("TypeParameter") {
Expand All @@ -525,7 +524,7 @@ class ConsistentAnalysisFormat(val mappers: ReadWriteMappers, sort: Boolean) {
in.string(),
in.readArray[Annotation]()(readAnnotation(in)),
in.readArray[TypeParameter]()(readTypeParameter(in)),
Variance.values()(in.byte()),
Variance.values()(in.byte().toInt),
readType(in),
readType(in)
)
Expand Down Expand Up @@ -639,7 +638,7 @@ class ConsistentAnalysisFormat(val mappers: ReadWriteMappers, sort: Boolean) {
private[this] def readClassDefinition(in: Deserializer): ClassDefinition = in.readBlock {
val name = in.string()
val access = readAccess(in)
val modifiers = InternalApiProxy.Modifiers(in.byte())
val modifiers = InternalApiProxy.Modifiers(in.byte().toInt)
val annotations = in.readArray[Annotation]()(readAnnotation(in))
in.byte() match {
case 0 => ClassLikeDef.of(
Expand Down Expand Up @@ -699,7 +698,7 @@ class ConsistentAnalysisFormat(val mappers: ReadWriteMappers, sort: Boolean) {
in.string(),
readType(in),
in.bool(),
ParameterModifier.values()(in.byte())
ParameterModifier.values()(in.byte().toInt)
)
},
in.bool()
Expand Down Expand Up @@ -777,14 +776,16 @@ object ConsistentAnalysisFormat {
private final val ThisQualifierSingleton = ThisQualifier.of()
private final val UnqualifiedSingleton = Unqualified.of()
private final val PublicSingleton = Public.of()

private final val useScopes: Array[EnumSet[UseScope]] = Array.tabulate(8) { i =>
val e = EnumSet.noneOf(classOf[UseScope])
if ((i & 1) != 0) e.add(UseScope.Default)
if ((i & 2) != 0) e.add(UseScope.Implicit)
if ((i & 4) != 0) e.add(UseScope.PatMatTarget)
e
}
private final val DefaultCompilationTimestamp: Long = 1262304042000L // 2010-01-01T00:00:42Z

private final val useScopes: Array[EnumSet[UseScope]] =
Array.tabulate(8) { i =>
val e = EnumSet.noneOf(classOf[UseScope])
if ((i & 1) != 0) e.add(UseScope.Default)
if ((i & 2) != 0) e.add(UseScope.Implicit)
if ((i & 4) != 0) e.add(UseScope.PatMatTarget)
e
}

private final val nameHashComparator: Comparator[NameHash] = new Comparator[NameHash] {
def compare(o1: NameHash, o2: NameHash): Int = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,24 +14,51 @@ package sbt.internal.inc.consistent

import java.io.{ File, FileInputStream, FileOutputStream }
import java.util.Optional

import sbt.io.{ IO, Using }
import xsbti.compile.{ AnalysisContents, AnalysisStore => XAnalysisStore }
import scala.util.control.Exception.allCatch

import scala.util.control.Exception.allCatch
import xsbti.compile.analysis.ReadWriteMappers

import scala.concurrent.ExecutionContext

object ConsistentFileAnalysisStore {
def text(file: File, mappers: ReadWriteMappers, sort: Boolean = true): XAnalysisStore =
new AStore(file, new ConsistentAnalysisFormat(mappers, sort), SerializerFactory.text)
def text(
file: File,
mappers: ReadWriteMappers,
sort: Boolean = true,
ec: ExecutionContext = ExecutionContext.global,
parallelism: Int = Runtime.getRuntime.availableProcessors()
): XAnalysisStore =
new AStore(
file,
new ConsistentAnalysisFormat(mappers, sort),
SerializerFactory.text,
ec,
parallelism
)

def binary(file: File, mappers: ReadWriteMappers, sort: Boolean = true): XAnalysisStore =
new AStore(file, new ConsistentAnalysisFormat(mappers, sort), SerializerFactory.binary)
def binary(
file: File,
mappers: ReadWriteMappers,
sort: Boolean = true,
ec: ExecutionContext = ExecutionContext.global,
parallelism: Int = Runtime.getRuntime.availableProcessors()
): XAnalysisStore =
new AStore(
file,
new ConsistentAnalysisFormat(mappers, sort),
SerializerFactory.binary,
ec,
parallelism
)

private final class AStore[S <: Serializer, D <: Deserializer](
file: File,
format: ConsistentAnalysisFormat,
sf: SerializerFactory[S, D]
sf: SerializerFactory[S, D],
ec: ExecutionContext = ExecutionContext.global,
parallelism: Int = Runtime.getRuntime.availableProcessors()
) extends XAnalysisStore {

def set(analysisContents: AnalysisContents): Unit = {
Expand All @@ -41,7 +68,7 @@ object ConsistentFileAnalysisStore {
if (!file.getParentFile.exists()) file.getParentFile.mkdirs()
val fout = new FileOutputStream(tmpAnalysisFile)
try {
val gout = new ParallelGzipOutputStream(fout)
val gout = new ParallelGzipOutputStream(fout, ec, parallelism)
val ser = sf.serializerFor(gout)
format.write(ser, analysis, setup)
gout.close()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,17 +50,14 @@ object ParallelGzipOutputStream {
private val header = Array[Byte](0x1f.toByte, 0x8b.toByte, Deflater.DEFLATED, 0, 0, 0, 0, 0, 0, 0)
}

final class ParallelGzipOutputStream(
out: OutputStream,
ec: ExecutionContext = ExecutionContext.global, // NOLINT
parallelism: Int = Runtime.getRuntime.availableProcessors()
) extends FilterOutputStream(out) {
final class ParallelGzipOutputStream(out: OutputStream, ec: ExecutionContext, parallelism: Int)
extends FilterOutputStream(out) {
import ParallelGzipOutputStream._

private final val crc = new CRC32
private final val queueLimit = parallelism * 3
private final val pending =
mutable.Queue.empty[Future[Block]] // new mutable.ArrayDeque[Future[Block]](queueLimit)
// preferred on 2.13: new mutable.ArrayDeque[Future[Block]](queueLimit)
private final val pending = mutable.Queue.empty[Future[Block]]
private var current: Block = new Block
private var free: Block = _
private var total = 0L
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -228,14 +228,14 @@ class TextSerializer(out: Writer) extends Serializer {
case '\n' => out.write("\\n")
case '\r' => out.write("\\r")
case '\\' => out.write("\\\\")
case c => out.write(c)
case c => out.write(c.toInt)
}
out.write('\n')
}
}
def bool(b: Boolean): Unit = long(if (b) 1 else 0)
def int(i: Int): Unit = long(i)
def byte(b: Byte): Unit = long(b)
def int(i: Int): Unit = long(i.toLong)
def byte(b: Byte): Unit = long(b.toLong)
def long(l: Long): Unit = {
count()
printIndent()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ class ConsistentAnalysisFormatIntegrationSuite extends AnyFunSuite {
val api2 = read(ConsistentFileAnalysisStore.binary(f1, ReadWriteMappers.getEmptyMappers))
val f2 = write("cbin2.zip", api2, sort = false)
val api3 = read(ConsistentFileAnalysisStore.binary(f2, ReadWriteMappers.getEmptyMappers))
val f3 = write("cbin3.zip", api2)
val f3 = write("cbin3.zip", api3)
assert(Arrays.equals(IO.readBytes(f1), IO.readBytes(f3)), s"same output for $d")
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,10 @@ import scala.util.Random
import org.scalatest.funsuite.AnyFunSuite
import sbt.internal.inc.consistent._
import sbt.io.IO

import Compat._

import scala.concurrent.ExecutionContext

class ConsistentAnalysisFormatSuite extends AnyFunSuite {

def writeTo(out: Serializer): Unit = {
Expand Down Expand Up @@ -99,7 +100,7 @@ class ConsistentAnalysisFormatSuite extends AnyFunSuite {
val a = new Array[Byte](size)
rnd.nextBytes(a)
val bout = new ByteArrayOutputStream()
val gout = new ParallelGzipOutputStream(bout, parallelism = threads)
val gout = new ParallelGzipOutputStream(bout, ExecutionContext.global, parallelism = threads)
gout.write(a)
gout.close()
val gin =
Expand Down

0 comments on commit cf4896e

Please sign in to comment.