From ecc332fe6feccb893226c5705038772fb36b9f9b Mon Sep 17 00:00:00 2001 From: Wojciech Mazur Date: Tue, 22 Oct 2024 18:56:56 +0200 Subject: [PATCH] Scala 2 forwardport: `-Yprofile-trace` (#19897) * Scala 2 tracing profiler backport from https://github.com/scala/scala/pull/7364 extended with more Scala 3 idiomatic syntax based on inlined methods * Fixes the `context.profiler` which could have been `null`, now it's initially a NoOp Profiler * Check dependencies of `-Yprofile-enabled` dependent tasks, now we get an error if `-Yprofile-trace` is set without `-Yprofile-enabled` --- compiler/src/dotty/tools/dotc/Compiler.scala | 5 +- compiler/src/dotty/tools/dotc/Run.scala | 7 +- .../tools/dotc/config/ScalaSettings.scala | 10 +- .../dotty/tools/dotc/config/Settings.scala | 15 +- .../src/dotty/tools/dotc/core/Contexts.scala | 1 + .../src/dotty/tools/dotc/core/Phases.scala | 2 +- .../dotty/tools/dotc/core/SymbolLoaders.scala | 12 +- .../tools/dotc/profile/ChromeTrace.scala | 190 +++++++++++++ .../dotty/tools/dotc/profile/FileUtils.scala | 204 ++++++++++++++ .../dotty/tools/dotc/profile/Profiler.scala | 256 ++++++++++++++---- .../dotc/profile/ThreadPoolFactory.scala | 4 +- .../dotty/tools/dotc/transform/Splicer.scala | 5 +- .../dotty/tools/dotc/typer/Implicits.scala | 2 +- .../src/dotty/tools/dotc/typer/Typer.scala | 8 +- .../dotty/tools/dotc/typer/TyperPhase.scala | 2 +- .../tools/dotc/profile/ChromeTraceTest.scala | 93 +++++++ .../tools/dotc/profile/FileUtilsTest.scala | 91 +++++++ .../pc/completions/CompletionProvider.scala | 6 +- 18 files changed, 835 insertions(+), 78 deletions(-) create mode 100644 compiler/src/dotty/tools/dotc/profile/ChromeTrace.scala create mode 100644 compiler/src/dotty/tools/dotc/profile/FileUtils.scala create mode 100644 compiler/test/dotty/tools/dotc/profile/ChromeTraceTest.scala create mode 100644 compiler/test/dotty/tools/dotc/profile/FileUtilsTest.scala diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index f9503e4f8554..d8ba1ab5dc2e 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -152,7 +152,10 @@ class Compiler { List(new GenBCode) :: // Generate JVM bytecode Nil - var runId: Int = 1 + // TODO: Initially 0, so that the first nextRunId call would return InitialRunId == 1 + // Changing the initial runId from 1 to 0 makes the scala2-library-bootstrap fail to compile, + // when the underlying issue is fixed, please update dotc.profiler.RealProfiler.chromeTrace logic + private var runId: Int = 1 def nextRunId: Int = { runId += 1; runId } diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index 11a0430480d9..50fd668c7696 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -339,10 +339,9 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint if phaseWillRun then Stats.trackTime(s"phase time ms/$phase") { val start = System.currentTimeMillis - val profileBefore = profiler.beforePhase(phase) - try units = phase.runOn(units) - catch case _: InterruptedException => cancelInterrupted() - profiler.afterPhase(phase, profileBefore) + profiler.onPhase(phase): + try units = phase.runOn(units) + catch case _: InterruptedException => cancelInterrupted() if (ctx.settings.Xprint.value.containsPhase(phase)) for (unit <- units) def printCtx(unit: CompilationUnit) = phase.printingContext( diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 72a051ea8154..6ef33d24f8be 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -441,12 +441,10 @@ private sealed trait YSettings: val YlegacyLazyVals: Setting[Boolean] = BooleanSetting(ForkSetting, "Ylegacy-lazy-vals", "Use legacy (pre 3.3.0) implementation of lazy vals.") val YcompileScala2Library: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycompile-scala2-library", "Used when compiling the Scala 2 standard library.") val YprofileEnabled: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprofile-enabled", "Enable profiling.") - val YprofileDestination: Setting[String] = StringSetting(ForkSetting, "Yprofile-destination", "file", "Where to send profiling output - specify a file, default is to the console.", "") - //.withPostSetHook( _ => YprofileEnabled.value = true ) - val YprofileExternalTool: Setting[List[String]] = PhasesSetting(ForkSetting, "Yprofile-external-tool", "Enable profiling for a phase using an external tool hook. Generally only useful for a single phase.", "typer") - //.withPostSetHook( _ => YprofileEnabled.value = true ) - val YprofileRunGcBetweenPhases: Setting[List[String]] = PhasesSetting(ForkSetting, "Yprofile-run-gc", "Run a GC between phases - this allows heap size to be accurate at the expense of more time. Specify a list of phases, or *", "_") - //.withPostSetHook( _ => YprofileEnabled.value = true ) + val YprofileDestination: Setting[String] = StringSetting(ForkSetting, "Yprofile-destination", "file", "Where to send profiling output - specify a file, default is to the console.", "", depends = List(YprofileEnabled -> true)) + val YprofileExternalTool: Setting[List[String]] = PhasesSetting(ForkSetting, "Yprofile-external-tool", "Enable profiling for a phase using an external tool hook. Generally only useful for a single phase.", "typer", depends = List(YprofileEnabled -> true)) + val YprofileRunGcBetweenPhases: Setting[List[String]] = PhasesSetting(ForkSetting, "Yprofile-run-gc", "Run a GC between phases - this allows heap size to be accurate at the expense of more time. Specify a list of phases, or *", "_", depends = List(YprofileEnabled -> true)) + val YprofileTrace: Setting[String] = StringSetting(ForkSetting, "Yprofile-trace", "file", s"Capture trace of compilation in JSON Chrome Trace format to the specified file. This option requires ${YprofileEnabled.name}. The output file can be visualized using https://ui.perfetto.dev/.", "", depends = List(YprofileEnabled -> true)) val YbestEffort: Setting[Boolean] = BooleanSetting(ForkSetting, "Ybest-effort", "Enable best-effort compilation attempting to produce betasty to the META-INF/best-effort directory, regardless of errors, as part of the pickler phase.") val YwithBestEffortTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Ywith-best-effort-tasty", "Allow to compile using best-effort tasty files. If such file is used, the compiler will stop after the pickler phase.") diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index a5fc6a64aa45..f85f2cc57de4 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -73,6 +73,11 @@ object Settings: def validateSettingString(name: String): Unit = assert(settingCharacters.matches(name), s"Setting string $name contains invalid characters") + /** List of setting-value pairs that are required for another setting to be valid. + * For example, `s = Setting(..., depends = List(YprofileEnabled -> true))` + * means that `s` requires `YprofileEnabled` to be set to `true`. + */ + type SettingDependencies = List[(Setting[?], Any)] case class Setting[T: ClassTag] private[Settings] ( category: SettingCategory, @@ -83,7 +88,7 @@ object Settings: choices: Option[Seq[?]] = None, prefix: Option[String] = None, aliases: List[String] = Nil, - depends: List[(Setting[?], Any)] = Nil, + depends: SettingDependencies = Nil, ignoreInvalidArgs: Boolean = false, preferPrevious: Boolean = false, propertyClass: Option[Class[?]] = None, @@ -385,8 +390,8 @@ object Settings: def BooleanSetting(category: SettingCategory, name: String, descr: String, initialValue: Boolean = false, aliases: List[String] = Nil, preferPrevious: Boolean = false, deprecation: Option[Deprecation] = None, ignoreInvalidArgs: Boolean = false): Setting[Boolean] = publish(Setting(category, prependName(name), descr, initialValue, aliases = aliases, preferPrevious = preferPrevious, deprecation = deprecation, ignoreInvalidArgs = ignoreInvalidArgs)) - def StringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: String, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[String] = - publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases, deprecation = deprecation)) + def StringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: String, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None, depends: SettingDependencies = Nil): Setting[String] = + publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases, deprecation = deprecation, depends = depends)) def ChoiceSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[String], default: String, aliases: List[String] = Nil, legacyArgs: Boolean = false, deprecation: Option[Deprecation] = None): Setting[String] = publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases, legacyArgs = legacyArgs, deprecation = deprecation)) @@ -412,8 +417,8 @@ object Settings: def PathSetting(category: SettingCategory, name: String, descr: String, default: String, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[String] = publish(Setting(category, prependName(name), descr, default, aliases = aliases, deprecation = deprecation)) - def PhasesSetting(category: SettingCategory, name: String, descr: String, default: String = "", aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[String]] = - publish(Setting(category, prependName(name), descr, if (default.isEmpty) Nil else List(default), aliases = aliases, deprecation = deprecation)) + def PhasesSetting(category: SettingCategory, name: String, descr: String, default: String = "", aliases: List[String] = Nil, deprecation: Option[Deprecation] = None, depends: SettingDependencies = Nil): Setting[List[String]] = + publish(Setting(category, prependName(name), descr, if (default.isEmpty) Nil else List(default), aliases = aliases, deprecation = deprecation, depends = depends)) def PrefixSetting(category: SettingCategory, name0: String, descr: String, deprecation: Option[Deprecation] = None): Setting[List[String]] = val name = prependName(name0) diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index 388720e7f3f4..d69c7408d0b1 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -769,6 +769,7 @@ object Contexts { .updated(settingsStateLoc, settingsGroup.defaultState) .updated(notNullInfosLoc, Nil) .updated(compilationUnitLoc, NoCompilationUnit) + .updated(profilerLoc, Profiler.NoOp) c._searchHistory = new SearchRoot c._gadtState = GadtState(GadtConstraint.empty) c diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index 85df3f9f2c18..015cf6fc0f2c 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -370,7 +370,7 @@ object Phases { // Test that we are in a state where we need to check if the phase should be skipped for a java file, // this prevents checking the expensive `unit.typedAsJava` unnecessarily. val doCheckJava = skipIfJava && !isAfterLastJavaPhase - for unit <- units do + for unit <- units do ctx.profiler.onUnit(this, unit): given unitCtx: Context = runCtx.fresh.setPhase(this.start).setCompilationUnit(unit).withRootImports if ctx.run.enterUnit(unit) then try diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index 51e6a5e6138a..5690720a1b3f 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -8,7 +8,7 @@ import java.nio.channels.ClosedByInterruptException import scala.util.control.NonFatal import dotty.tools.dotc.classpath.FileUtils.{hasTastyExtension, hasBetastyExtension} -import dotty.tools.io.{ ClassPath, ClassRepresentation, AbstractFile } +import dotty.tools.io.{ ClassPath, ClassRepresentation, AbstractFile, NoAbstractFile } import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions import Contexts.*, Symbols.*, Flags.*, SymDenotations.*, Types.*, Scopes.*, Names.* @@ -333,7 +333,15 @@ abstract class SymbolLoader extends LazyType { self => def description(using Context): String = s"proxy to ${self.description}" } - override def complete(root: SymDenotation)(using Context): Unit = { + private inline def profileCompletion[T](root: SymDenotation)(inline body: T)(using Context): T = { + val sym = root.symbol + def associatedFile = root.symbol.associatedFile match + case file: AbstractFile => file + case _ => NoAbstractFile + ctx.profiler.onCompletion(sym, associatedFile)(body) + } + + override def complete(root: SymDenotation)(using Context): Unit = profileCompletion(root) { def signalError(ex: Exception): Unit = { if (ctx.debug) ex.printStackTrace() val msg = ex.getMessage() diff --git a/compiler/src/dotty/tools/dotc/profile/ChromeTrace.scala b/compiler/src/dotty/tools/dotc/profile/ChromeTrace.scala new file mode 100644 index 000000000000..c33039f46398 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/profile/ChromeTrace.scala @@ -0,0 +1,190 @@ +// Scala 2 compiler backport of https://github.com/scala/scala/pull/7364 +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package dotty.tools.dotc.profile + +import scala.language.unsafeNulls + +import java.io.Closeable +import java.lang.management.ManagementFactory +import java.nio.file.{Files, Path} +import java.util +import java.util.concurrent.TimeUnit + +import scala.collection.mutable + +object ChromeTrace { + private object EventType { + final val Start = "B" + final val Instant = "I" + final val End = "E" + final val Complete = "X" + + final val Counter = "C" + + final val AsyncStart = "b" + final val AsyncInstant = "n" + final val AsyncEnd = "e" + } +} + +/** Allows writing a subset of captrue traces based on https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/preview# + * Can be visualized using https://ui.perfetto.dev/, Chrome's about://tracing (outdated) or the tooling in https://www.google.com.au/search?q=catapult+tracing&oq=catapult+tracing+&aqs=chrome..69i57.3974j0j4&sourceid=chrome&ie=UTF-8 */ +final class ChromeTrace(f: Path) extends Closeable { + import ChromeTrace.EventType + private val traceWriter = FileUtils.newAsyncBufferedWriter(f) + private val context = mutable.Stack[JsonContext](TopContext) + private val tidCache = new ThreadLocal[String]() { + override def initialValue(): String = "%05d".format(Thread.currentThread().getId()) + } + objStart() + fld("traceEvents") + context.push(ValueContext) + arrStart() + traceWriter.newLine() + + private val pid = ManagementFactory.getRuntimeMXBean().getName().replaceAll("@.*", "") + + override def close(): Unit = { + arrEnd() + objEnd() + context.pop() + tidCache.remove() + traceWriter.close() + } + + def traceDurationEvent(name: String, startNanos: Long, durationNanos: Long, tid: String = this.tid(), pidSuffix: String = ""): Unit = { + val durationMicros = nanosToMicros(durationNanos) + val startMicros = nanosToMicros(startNanos) + objStart() + str("cat", "scalac") + str("name", name) + str("ph", EventType.Complete) + str("tid", tid) + writePid(pidSuffix) + lng("ts", startMicros) + lng("dur", durationMicros) + objEnd() + traceWriter.newLine() + } + + private def writePid(pidSuffix: String) = { + if (pidSuffix == "") + str("pid", pid) + else + str2("pid", pid, "-", pidSuffix) + } + + def traceCounterEvent(name: String, counterName: String, count: Long, processWide: Boolean): Unit = { + objStart() + str("cat", "scalac") + str("name", name) + str("ph", EventType.Counter) + str("tid", tid()) + writePid(pidSuffix = if (processWide) "" else tid()) + lng("ts", microTime()) + fld("args") + objStart() + lng(counterName, count) + objEnd() + objEnd() + traceWriter.newLine() + } + + def traceDurationEventStart(cat: String, name: String, colour: String = "", pidSuffix: String = tid()): Unit = traceDurationEventStartEnd(EventType.Start, cat, name, colour, pidSuffix) + def traceDurationEventEnd(cat: String, name: String, colour: String = "", pidSuffix: String = tid()): Unit = traceDurationEventStartEnd(EventType.End, cat, name, colour, pidSuffix) + + private def traceDurationEventStartEnd(eventType: String, cat: String, name: String, colour: String, pidSuffix: String = ""): Unit = { + objStart() + str("cat", cat) + str("name", name) + str("ph", eventType) + writePid(pidSuffix) + str("tid", tid()) + lng("ts", microTime()) + if (colour != "") { + str("cname", colour) + } + objEnd() + traceWriter.newLine() + } + + private def tid(): String = tidCache.get() + + private def nanosToMicros(t: Long): Long = TimeUnit.NANOSECONDS.toMicros(t) + + private def microTime(): Long = nanosToMicros(System.nanoTime()) + + private sealed abstract class JsonContext + private case class ArrayContext(var first: Boolean) extends JsonContext + private case class ObjectContext(var first: Boolean) extends JsonContext + private case object ValueContext extends JsonContext + private case object TopContext extends JsonContext + + private def str(name: String, value: String): Unit = { + fld(name) + traceWriter.write("\"") + traceWriter.write(value) // This assumes no escaping is needed + traceWriter.write("\"") + } + private def str2(name: String, value: String, valueContinued1: String, valueContinued2: String): Unit = { + fld(name) + traceWriter.write("\"") + traceWriter.write(value) // This assumes no escaping is needed + traceWriter.write(valueContinued1) // This assumes no escaping is needed + traceWriter.write(valueContinued2) // This assumes no escaping is needed + traceWriter.write("\"") + } + private def lng(name: String, value: Long): Unit = { + fld(name) + traceWriter.write(String.valueOf(value)) + traceWriter.write("") + } + private def objStart(): Unit = { + context.top match { + case ac @ ArrayContext(first) => + if (first) ac.first = false + else traceWriter.write(",") + case _ => + } + context.push(ObjectContext(true)) + traceWriter.write("{") + } + private def objEnd(): Unit = { + traceWriter.write("}") + context.pop() + } + private def arrStart(): Unit = { + traceWriter.write("[") + context.push(ArrayContext(true)) + } + private def arrEnd(): Unit = { + traceWriter.write("]") + context.pop() + } + + private def fld(name: String) = { + val topContext = context.top + topContext match { + case oc @ ObjectContext(first) => + if (first) oc.first = false + else traceWriter.write(",") + case context => + throw new IllegalStateException("Wrong context: " + context) + } + traceWriter.write("\"") + traceWriter.write(name) + traceWriter.write("\"") + traceWriter.write(":") + } +} \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/profile/FileUtils.scala b/compiler/src/dotty/tools/dotc/profile/FileUtils.scala new file mode 100644 index 000000000000..4aec428c05bf --- /dev/null +++ b/compiler/src/dotty/tools/dotc/profile/FileUtils.scala @@ -0,0 +1,204 @@ +// Scala 2 compiler backport of https://github.com/scala/scala/pull/7364 + +/* +* Scala (https://www.scala-lang.org) +* +* Copyright EPFL and Lightbend, Inc. +* +* Licensed under Apache License 2.0 +* (http://www.apache.org/licenses/LICENSE-2.0). +* +* See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package dotty.tools.dotc.profile + +import scala.language.unsafeNulls + +import java.io.{BufferedWriter, IOException, OutputStreamWriter, Writer} +import java.nio.CharBuffer +import java.nio.charset.{Charset, CharsetEncoder, StandardCharsets} +import java.nio.file.{Files, OpenOption, Path} +import java.util.concurrent.LinkedBlockingQueue +import java.util.concurrent.atomic.AtomicBoolean + + +import scala.concurrent.duration.Duration +import scala.concurrent.{Await, Promise} +import scala.util.{Failure, Success} +import scala.annotation.internal.sharable + +object FileUtils { + def newAsyncBufferedWriter(path: Path, charset: Charset = StandardCharsets.UTF_8.nn, options: Array[OpenOption] = NO_OPTIONS, threadsafe: Boolean = false): LineWriter = { + val encoder: CharsetEncoder = charset.newEncoder + val writer = new OutputStreamWriter(Files.newOutputStream(path, options: _*), encoder) + newAsyncBufferedWriter(new BufferedWriter(writer), threadsafe) + } + def newAsyncBufferedWriter(underlying: Writer, threadsafe: Boolean): LineWriter = { + val async = new AsyncBufferedWriter(underlying) + if (threadsafe) new ThreadsafeWriter(async) else async + } + private val NO_OPTIONS = new Array[OpenOption](0) + + sealed abstract class LineWriter extends Writer { + def newLine(): Unit + } + private class ThreadsafeWriter(val underlying: AsyncBufferedWriter) extends LineWriter { + lock = underlying + override def write(c: Int): Unit = + lock.synchronized (underlying.write(c)) + + override def write(cbuf: Array[Char]): Unit = + lock.synchronized (underlying.write(cbuf)) + + override def write(cbuf: Array[Char], off: Int, len: Int): Unit = + lock.synchronized (underlying.write(cbuf, off, len)) + + override def write(str: String): Unit = + lock.synchronized (underlying.write(str)) + + override def write(str: String, off: Int, len: Int): Unit = + lock.synchronized (underlying.write(str, off, len)) + + override def flush(): Unit = + lock.synchronized (underlying.flush()) + + override def close(): Unit = + lock.synchronized (underlying.close()) + + override def newLine(): Unit = + lock.synchronized (underlying.newLine()) + + } + + private object AsyncBufferedWriter { + @sharable private val Close = CharBuffer.allocate(0) + @sharable private val Flush = CharBuffer.allocate(0) + } + private class AsyncBufferedWriter(val underlying: Writer, bufferSize : Int = 4096) extends LineWriter { + private var current: CharBuffer = allocate + override def write(c: Int): Unit = super.write(c) + private def flushAsync(): Unit = { + background.ensureProcessed(current) + current = allocate + } +// allocate or reuse a CharArray which is guaranteed to have a backing array + private def allocate: CharBuffer = { + val reused = background.reuseBuffer + if (reused eq null) CharBuffer.allocate(bufferSize) + else { + //we don't care about race conditions + background.reuseBuffer = null + reused.clear() + reused + } + } + + override def write(cbuf: Array[Char], initialOffset: Int, initialLength: Int): Unit = { + var offset = initialOffset + var length = initialLength + while (length > 0) { + val capacity = current.remaining() + if (length <= capacity) { + current.put(cbuf, offset, length) + length = 0 + } else { + current.put(cbuf, offset, capacity) + flushAsync() + length -= capacity + offset += capacity + } + } + } + + override def write(s: String, initialOffset: Int, initialLength: Int): Unit = { + var offset = initialOffset + var length = initialLength + while (length > 0) { + val capacity = current.remaining() + if (length <= capacity) { + current.put(s, offset, offset + length) + length = 0 + } else { + current.put(s, offset, offset + capacity) + flushAsync() + length -= capacity + offset += capacity + } + } + } + + def newLine(): Unit = write(scala.util.Properties.lineSeparator) + + /** slightly breaks the flush contract in that the flush is not complete when the method returns */ + override def flush(): Unit = { + flushAsync() + } + + override def close(): Unit = { + background.ensureProcessed(current) + background.ensureProcessed(AsyncBufferedWriter.Close) + current = null + Await.result(background.asyncStatus.future, Duration.Inf) + underlying.close() + } + private object background extends Runnable{ + + import scala.concurrent.ExecutionContext.Implicits.global + + private val pending = new LinkedBlockingQueue[CharBuffer] + //a failure detected will case an Failure, Success indicates a close + val asyncStatus = Promise[Unit]() + private val scheduled = new AtomicBoolean + @volatile var reuseBuffer: CharBuffer = _ + + def ensureProcessed(buffer: CharBuffer): Unit = { + if (asyncStatus.isCompleted) { + asyncStatus.future.value.get match { + case Success(()) => throw new IllegalStateException("closed") + case Failure(t) => throw new IOException("async failure", t) + } + } + + //order is essential - add to the queue before the CAS + pending.add(buffer) + if (scheduled.compareAndSet(false, true)) { + global.execute(background) + } + } + + def run(): Unit = { + try { + while (!pending.isEmpty) { + val next = pending.poll() + if (next eq AsyncBufferedWriter.Flush) { + underlying.flush() + } else if (next eq AsyncBufferedWriter.Close) { + underlying.flush() + underlying.close() + asyncStatus.trySuccess(()) + } else { + val array = next.array() + next.flip() + underlying.write(array, next.arrayOffset() + next.position(), next.limit()) + reuseBuffer = next + } + } + } catch { + case t: Throwable => + asyncStatus.tryFailure(t) + throw t + } + finally scheduled.set(false) + + //we are not scheduled any more + //as a last check ensure that we didnt race with an addition to the queue + //order is essential - queue is checked before CAS + if ((!pending.isEmpty) && scheduled.compareAndSet(false, true)) { + global.execute(background) + } + } + } + } +} \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/profile/Profiler.scala b/compiler/src/dotty/tools/dotc/profile/Profiler.scala index a13c9d41b529..69a806215ddd 100644 --- a/compiler/src/dotty/tools/dotc/profile/Profiler.scala +++ b/compiler/src/dotty/tools/dotc/profile/Profiler.scala @@ -4,6 +4,7 @@ import scala.annotation.* import scala.language.unsafeNulls import java.io.{FileWriter, PrintWriter} +import java.nio.file.Paths import java.lang.management.{ManagementFactory, GarbageCollectorMXBean, RuntimeMXBean, MemoryMXBean, ClassLoadingMXBean, CompilationMXBean} import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicInteger @@ -12,8 +13,15 @@ import javax.management.{Notification, NotificationEmitter, NotificationListener import dotty.tools.dotc.core.Phases.Phase import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.CompilationUnit +import dotty.tools.dotc.core.Types.Type +import dotty.tools.dotc.core.Symbols.{Symbol, NoSymbol} +import dotty.tools.dotc.core.Flags +import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions import dotty.tools.io.AbstractFile import annotation.internal.sharable +import dotty.tools.dotc.core.Periods.InitialRunId +import scala.collection.mutable.UnrolledBuffer object Profiler { def apply()(using Context): Profiler = @@ -25,14 +33,19 @@ object Profiler { new RealProfiler(reporter) } - private[profile] val emptySnap: ProfileSnap = ProfileSnap(0, "", 0, 0, 0, 0, 0, 0) + final def NoOp: Profiler = NoOpProfiler + + private[profile] val emptySnap: ProfileSnap = ProfileSnap(0, "", 0, 0, 0, 0, 0, 0, 0, 0) } -case class GcEventData(pool:String, reportTimeNs: Long, gcStartMillis:Long, gcEndMillis:Long, name:String, action:String, cause:String, threads:Long) +case class GcEventData(pool:String, reportTimeNs: Long, gcStartMillis:Long, gcEndMillis:Long, durationMillis: Long, name:String, action:String, cause:String, threads:Long){ + val endNanos = System.nanoTime() +} case class ProfileSnap(threadId: Long, threadName: String, snapTimeNanos : Long, idleTimeNanos:Long, cpuTimeNanos: Long, userTimeNanos: Long, - allocatedBytes:Long, heapBytes:Long) { + allocatedBytes:Long, heapBytes:Long, + totalClassesLoaded: Long, totalJITCompilationTime: Long) { def updateHeap(heapBytes:Long): ProfileSnap = copy(heapBytes = heapBytes) } @@ -66,22 +79,63 @@ case class ProfileRange(start: ProfileSnap, end:ProfileSnap, phase:Phase, purpos def retainedHeapMB: Double = toMegaBytes(end.heapBytes - start.heapBytes) } +private opaque type TracedEventId <: String = String +private object TracedEventId: + def apply(stringValue: String): TracedEventId = stringValue + final val Empty: TracedEventId = "" + sealed trait Profiler { def finished(): Unit - def beforePhase(phase: Phase): ProfileSnap - - def afterPhase(phase: Phase, profileBefore: ProfileSnap): Unit + inline def onPhase[T](phase: Phase)(inline body: T): T = + val (event, snapshot) = beforePhase(phase) + try body + finally afterPhase(event, phase, snapshot) + protected final val EmptyPhaseEvent = (TracedEventId.Empty, Profiler.emptySnap) + protected def beforePhase(phase: Phase): (TracedEventId, ProfileSnap) = EmptyPhaseEvent + protected def afterPhase(event: TracedEventId, phase: Phase, profileBefore: ProfileSnap): Unit = () + + inline def onUnit[T](phase: Phase, unit: CompilationUnit)(inline body: T): T = + val event = beforeUnit(phase, unit) + try body + finally afterUnit(event) + protected def beforeUnit(phase: Phase, unit: CompilationUnit): TracedEventId = TracedEventId.Empty + protected def afterUnit(event: TracedEventId): Unit = () + + inline def onTypedDef[T](sym: Symbol)(inline body: T): T = + val event = beforeTypedDef(sym) + try body + finally afterTypedDef(event) + protected def beforeTypedDef(sym: Symbol): TracedEventId = TracedEventId.Empty + protected def afterTypedDef(token: TracedEventId): Unit = () + + inline def onImplicitSearch[T](pt: Type)(inline body: T): T = + val event = beforeImplicitSearch(pt) + try body + finally afterImplicitSearch(event) + protected def beforeImplicitSearch(pt: Type): TracedEventId = TracedEventId.Empty + protected def afterImplicitSearch(event: TracedEventId): Unit = () + + inline def onMacroSplice[T](macroSym: Symbol)(inline body: T): T = + val event = beforeMacroSplice(macroSym) + try body + finally afterMacroSplice(event) + protected def beforeMacroSplice(macroSym: Symbol): TracedEventId = TracedEventId.Empty + protected def afterMacroSplice(event: TracedEventId): Unit = () + + inline def onCompletion[T](root: Symbol, associatedFile: => AbstractFile)(inline body: T): T = + val (event, completionName) = beforeCompletion(root, associatedFile) + try body + finally afterCompletion(event, completionName) + protected final val EmptyCompletionEvent = (TracedEventId.Empty, "") + protected def beforeCompletion(root: Symbol, associatedFile: => AbstractFile): (TracedEventId, String) = EmptyCompletionEvent + protected def afterCompletion(event: TracedEventId, completionName: String): Unit = () } private [profile] object NoOpProfiler extends Profiler { - - override def beforePhase(phase: Phase): ProfileSnap = Profiler.emptySnap - - override def afterPhase(phase: Phase, profileBefore: ProfileSnap): Unit = () - override def finished(): Unit = () } + private [profile] object RealProfiler { import scala.jdk.CollectionConverters.* val runtimeMx: RuntimeMXBean = ManagementFactory.getRuntimeMXBean @@ -92,17 +146,6 @@ private [profile] object RealProfiler { val threadMx: ExtendedThreadMxBean = ExtendedThreadMxBean.proxy if (threadMx.isThreadCpuTimeSupported) threadMx.setThreadCpuTimeEnabled(true) private val idGen = new AtomicInteger() -} - -private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) extends Profiler with NotificationListener { - def completeBackground(threadRange: ProfileRange): Unit = - reporter.reportBackground(this, threadRange) - - def outDir: AbstractFile = ctx.settings.outputDir.value - - val id: Int = RealProfiler.idGen.incrementAndGet() - - private val mainThread = Thread.currentThread() @nowarn("cat=deprecation") private[profile] def snapThread(idleTimeNanos: Long): ProfileSnap = { @@ -117,13 +160,47 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) cpuTimeNanos = threadMx.getCurrentThreadCpuTime, userTimeNanos = threadMx.getCurrentThreadUserTime, allocatedBytes = threadMx.getThreadAllocatedBytes(Thread.currentThread().getId), - heapBytes = readHeapUsage() + heapBytes = readHeapUsage(), + totalClassesLoaded = classLoaderMx.getTotalLoadedClassCount, + totalJITCompilationTime = compileMx.getTotalCompilationTime ) } private def readHeapUsage() = RealProfiler.memoryMx.getHeapMemoryUsage.getUsed +} + +private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) extends Profiler with NotificationListener { + val id: Int = RealProfiler.idGen.incrementAndGet() + private val mainThread = Thread.currentThread() + private val gcEvents = UnrolledBuffer[GcEventData]() + private var nextAfterUnitSnap: Long = System.nanoTime() + + private final val GcThreadId = "GC" + + enum Category: + def name: String = this.toString().toLowerCase() + case Run, Phase, File, TypeCheck, Implicit, Macro, Completion + private [profile] val chromeTrace = + if ctx.settings.YprofileTrace.isDefault + then null + else + val filename = ctx.settings.YprofileTrace.value + // Compilation units requiring multi-stage compilation (macros) would create a new profiler instances + // We need to store the traces in the seperate file to prevent overriding its content. + // Alternatives: sharing ChromeTrace instance between all runs / manual concatation after all runs are done + // FIXME: The first assigned runId is equal to 2 instead of 1 (InitialRunId). + // Fix me when bug described in Compiler.runId is resolved by removing +/- 1 adjustments + val suffix = if ctx.runId > InitialRunId + 1 then s".${ctx.runId - 1}" else "" + ChromeTrace(Paths.get(s"$filename$suffix")) + + private val compilerRunEvent: TracedEventId = traceDurationStart(Category.Run, s"scalac-$id") + + def completeBackground(threadRange: ProfileRange): Unit = + reporter.reportBackground(this, threadRange) + + def outDir: AbstractFile = ctx.settings.outputDir.value @nowarn - private def doGC: Unit = { + private def doGC(): Unit = { System.gc() System.runFinalization() } @@ -142,6 +219,15 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) case gc => } reporter.close(this) + if chromeTrace != null then + traceDurationEnd(Category.Run, compilerRunEvent) + for gcEvent <- gcEvents + do { + val durationNanos = TimeUnit.MILLISECONDS.toNanos(gcEvent.durationMillis) + val startNanos = gcEvent.endNanos - durationNanos + chromeTrace.traceDurationEvent(gcEvent.name, startNanos, durationNanos, tid = GcThreadId) + } + chromeTrace.close() } @@ -150,10 +236,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) import java.lang.{Integer => jInt} val reportNs = System.nanoTime() val data = notification.getUserData - val seq = notification.getSequenceNumber - val message = notification.getMessage val tpe = notification.getType - val time= notification.getTimeStamp data match { case cd: CompositeData if tpe == "com.sun.management.gc.notification" => val name = cd.get("gcName").toString @@ -164,49 +247,127 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) val startTime = info.get("startTime").asInstanceOf[jLong].longValue() val endTime = info.get("endTime").asInstanceOf[jLong].longValue() val threads = info.get("GcThreadCount").asInstanceOf[jInt].longValue() - reporter.reportGc(GcEventData("", reportNs, startTime, endTime, name, action, cause, threads)) + val gcEvent = GcEventData("", reportNs, startTime, endTime, duration, name, action, cause, threads) + synchronized { gcEvents += gcEvent } + reporter.reportGc(gcEvent) } } - override def afterPhase(phase: Phase, snapBefore: ProfileSnap): Unit = { + override def afterPhase(event: TracedEventId, phase: Phase, snapBefore: ProfileSnap): Unit = { assert(mainThread eq Thread.currentThread()) - val initialSnap = snapThread(0) + val initialSnap = RealProfiler.snapThread(0) if (ctx.settings.YprofileExternalTool.value.contains(phase.toString)) { println("Profile hook stop") ExternalToolHook.after() } val finalSnap = if (ctx.settings.YprofileRunGcBetweenPhases.value.contains(phase.toString)) { - doGC - initialSnap.updateHeap(readHeapUsage()) + doGC() + initialSnap.updateHeap(RealProfiler.readHeapUsage()) } else initialSnap - + traceDurationEnd(Category.Phase, event) + traceThreadSnapshotCounters() reporter.reportForeground(this, ProfileRange(snapBefore, finalSnap, phase, "", 0, Thread.currentThread)) } - override def beforePhase(phase: Phase): ProfileSnap = { + override def beforePhase(phase: Phase): (TracedEventId, ProfileSnap) = { assert(mainThread eq Thread.currentThread()) + traceThreadSnapshotCounters() + val eventId = traceDurationStart(Category.Phase, phase.phaseName) if (ctx.settings.YprofileRunGcBetweenPhases.value.contains(phase.toString)) - doGC + doGC() if (ctx.settings.YprofileExternalTool.value.contains(phase.toString)) { println("Profile hook start") ExternalToolHook.before() } - snapThread(0) + (eventId, RealProfiler.snapThread(0)) + } + + override def beforeUnit(phase: Phase, unit: CompilationUnit): TracedEventId = { + assert(mainThread eq Thread.currentThread()) + if chromeTrace != null then + traceThreadSnapshotCounters() + traceDurationStart(Category.File, unit.source.name) + else TracedEventId.Empty + } + + override def afterUnit(event: TracedEventId): Unit = { + assert(mainThread eq Thread.currentThread()) + if chromeTrace != null then + traceDurationEnd(Category.File, event) + traceThreadSnapshotCounters() } -} + private def traceThreadSnapshotCounters(initialSnap: => ProfileSnap = RealProfiler.snapThread(0)) = + if chromeTrace != null && System.nanoTime() > nextAfterUnitSnap then { + val snap = initialSnap + chromeTrace.traceCounterEvent("allocBytes", "allocBytes", snap.allocatedBytes, processWide = false) + chromeTrace.traceCounterEvent("heapBytes", "heapBytes", snap.heapBytes, processWide = true) + chromeTrace.traceCounterEvent("classesLoaded", "classesLoaded", snap.totalClassesLoaded, processWide = true) + chromeTrace.traceCounterEvent("jitCompilationTime", "jitCompilationTime", snap.totalJITCompilationTime, processWide = true) + chromeTrace.traceCounterEvent("userTime", "userTime", snap.userTimeNanos, processWide = false) + chromeTrace.traceCounterEvent("cpuTime", "cpuTime", snap.cpuTimeNanos, processWide = false) + chromeTrace.traceCounterEvent("idleTime", "idleTime", snap.idleTimeNanos, processWide = false) + nextAfterUnitSnap = System.nanoTime() + 10 * 1000 * 1000 + } -case class EventType(name: String) -object EventType { - //main thread with other tasks - val MAIN: EventType = EventType("main") - //other task ( background thread) - val BACKGROUND: EventType = EventType("background") - //total for compile - val GC: EventType = EventType("GC") + override def beforeTypedDef(sym: Symbol): TracedEventId = traceDurationStart(Category.TypeCheck, symbolName(sym)) + override def afterTypedDef(event: TracedEventId): Unit = traceDurationEnd(Category.TypeCheck, event) + + override def beforeImplicitSearch(pt: Type): TracedEventId = traceDurationStart(Category.Implicit, s"?[${symbolName(pt.typeSymbol)}]", colour = "yellow") + override def afterImplicitSearch(event: TracedEventId): Unit = traceDurationEnd(Category.Implicit, event, colour = "yellow") + + override def beforeMacroSplice(macroSym: Symbol): TracedEventId = traceDurationStart(Category.Macro, s"«${symbolName(macroSym)}»", colour = "olive") + override def afterMacroSplice(event: TracedEventId): Unit = traceDurationEnd(Category.Macro, event, colour = "olive") + + override def beforeCompletion(root: Symbol, associatedFile: => AbstractFile): (TracedEventId, String) = + if chromeTrace == null + then EmptyCompletionEvent + else + val completionName = this.completionName(root, associatedFile) + val event = TracedEventId(associatedFile.name) + chromeTrace.traceDurationEventStart(Category.Completion.name, "↯", colour = "thread_state_sleeping") + chromeTrace.traceDurationEventStart(Category.File.name, event) + chromeTrace.traceDurationEventStart(Category.Completion.name, completionName) + (event, completionName) + + override def afterCompletion(event: TracedEventId, completionName: String): Unit = + if chromeTrace != null + then + chromeTrace.traceDurationEventEnd(Category.Completion.name, completionName) + chromeTrace.traceDurationEventEnd(Category.File.name, event) + chromeTrace.traceDurationEventEnd(Category.Completion.name, "↯", colour = "thread_state_sleeping") + + private inline def traceDurationStart(category: Category, inline eventName: String, colour: String = ""): TracedEventId = + if chromeTrace == null + then TracedEventId.Empty + else + val event = TracedEventId(eventName) + chromeTrace.traceDurationEventStart(category.name, event, colour) + event + + private inline def traceDurationEnd(category: Category, event: TracedEventId, colour: String = ""): Unit = + if chromeTrace != null then + chromeTrace.traceDurationEventEnd(category.name, event, colour) + + private def symbolName(sym: Symbol): String = s"${sym.showKind} ${sym.showName}" + private def completionName(root: Symbol, associatedFile: AbstractFile): String = + def isTopLevel = root.owner != NoSymbol && root.owner.is(Flags.Package) + if root.is(Flags.Package) || isTopLevel + then root.javaBinaryName + else + val enclosing = root.enclosingClass + s"${enclosing.javaBinaryName}::${root.name}" } +enum EventType(name: String): + // main thread with other tasks + case MAIN extends EventType("main") + // other task ( background thread) + case BACKGROUND extends EventType("background") + // total for compile + case GC extends EventType("GC") + sealed trait ProfileReporter { def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit @@ -259,9 +420,8 @@ class StreamProfileReporter(out:PrintWriter) extends ProfileReporter { out.println(s"${EventType.GC},$start,${data.reportTimeNs},${data.gcStartMillis}, ${data.gcEndMillis},${data.name},${data.action},${data.cause},${data.threads}") } - override def close(profiler: RealProfiler): Unit = { - out.flush - out.close + out.flush() + out.close() } } diff --git a/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala b/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala index e3ea69d9be06..1a81153b9b08 100644 --- a/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala +++ b/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala @@ -94,9 +94,9 @@ object ThreadPoolFactory { val data = new ThreadProfileData localData.set(data) - val profileStart = profiler.snapThread(0) + val profileStart = RealProfiler.snapThread(0) try worker.run finally { - val snap = profiler.snapThread(data.idleNs) + val snap = RealProfiler.snapThread(data.idleNs) val threadRange = ProfileRange(profileStart, snap, phase, shortId, data.taskCount, Thread.currentThread()) profiler.completeBackground(threadRange) } diff --git a/compiler/src/dotty/tools/dotc/transform/Splicer.scala b/compiler/src/dotty/tools/dotc/transform/Splicer.scala index e42f997e7265..b5386d5bd1df 100644 --- a/compiler/src/dotty/tools/dotc/transform/Splicer.scala +++ b/compiler/src/dotty/tools/dotc/transform/Splicer.scala @@ -47,13 +47,14 @@ object Splicer { def splice(tree: Tree, splicePos: SrcPos, spliceExpansionPos: SrcPos, classLoader: ClassLoader)(using Context): Tree = tree match { case Quote(quotedTree, Nil) => quotedTree case _ => - val macroOwner = newSymbol(ctx.owner, nme.MACROkw, Macro | Synthetic, defn.AnyType, coord = tree.span) + val owner = ctx.owner + val macroOwner = newSymbol(owner, nme.MACROkw, Macro | Synthetic, defn.AnyType, coord = tree.span) try val sliceContext = SpliceScope.contextWithNewSpliceScope(splicePos.sourcePos).withOwner(macroOwner) inContext(sliceContext) { val oldContextClassLoader = Thread.currentThread().getContextClassLoader Thread.currentThread().setContextClassLoader(classLoader) - try { + try ctx.profiler.onMacroSplice(owner){ val interpreter = new SpliceInterpreter(splicePos, classLoader) // Some parts of the macro are evaluated during the unpickling performed in quotedExprToTree diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index c42b196b8dfb..0727c83d8469 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1082,7 +1082,7 @@ trait Implicits: * it should be applied, EmptyTree otherwise. * @param span The position where errors should be reported. */ - def inferImplicit(pt: Type, argument: Tree, span: Span)(using Context): SearchResult = + def inferImplicit(pt: Type, argument: Tree, span: Span)(using Context): SearchResult = ctx.profiler.onImplicitSearch(pt): trace(s"search implicit ${pt.show}, arg = ${argument.show}: ${argument.tpe.show}", implicits, show = true) { record("inferImplicit") assert(ctx.phase.allowsImplicitSearch, diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 23855455e67b..3810bc66841e 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2823,7 +2823,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => typed(rhs) - def typedValDef(vdef: untpd.ValDef, sym: Symbol)(using Context): Tree = { + def typedValDef(vdef: untpd.ValDef, sym: Symbol)(using Context): Tree = ctx.profiler.onTypedDef(sym) { val ValDef(name, tpt, _) = vdef checkNonRootName(vdef.name, vdef.nameSpan) completeAnnotations(vdef, sym) @@ -2851,7 +2851,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer sym.owner.info.decls.openForMutations.unlink(sym) EmptyTree - def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(using Context): Tree = if !sym.info.exists then retractDefDef(sym) else { + def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(using Context): Tree = if !sym.info.exists then retractDefDef(sym) else ctx.profiler.onTypedDef(sym) { // TODO: - Remove this when `scala.language.experimental.erasedDefinitions` is no longer experimental. // - Modify signature to `erased def erasedValue[T]: T` @@ -2963,7 +2963,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if !sym.is(Module) && !sym.isConstructor && sym.info.finalResultType.isErasedClass then sym.setFlag(Erased) - def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(using Context): Tree = { + def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(using Context): Tree = ctx.profiler.onTypedDef(sym) { val TypeDef(name, rhs) = tdef completeAnnotations(tdef, sym) val rhs1 = tdef.rhs match @@ -2977,7 +2977,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer assignType(cpy.TypeDef(tdef)(name, rhs1), sym) } - def typedClassDef(cdef: untpd.TypeDef, cls: ClassSymbol)(using Context): Tree = { + def typedClassDef(cdef: untpd.TypeDef, cls: ClassSymbol)(using Context): Tree = ctx.profiler.onTypedDef(cls) { if (!cls.info.isInstanceOf[ClassInfo]) return EmptyTree.assertingErrorsReported val TypeDef(name, impl @ Template(constr, _, self, _)) = cdef: @unchecked diff --git a/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala b/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala index 0c63f5b4ecb1..264d0f170769 100644 --- a/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala +++ b/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala @@ -43,7 +43,7 @@ class TyperPhase(addRootImports: Boolean = true) extends Phase { def typeCheck(using Context)(using subphase: SubPhase): Boolean = monitor(subphase.name) { val unit = ctx.compilationUnit try - if !unit.suspended then + if !unit.suspended then ctx.profiler.onUnit(ctx.phase, unit): unit.tpdTree = ctx.typer.typedExpr(unit.untpdTree) typr.println("typed: " + unit.source) record("retained untyped trees", unit.untpdTree.treeSize) diff --git a/compiler/test/dotty/tools/dotc/profile/ChromeTraceTest.scala b/compiler/test/dotty/tools/dotc/profile/ChromeTraceTest.scala new file mode 100644 index 000000000000..07dc53da1f83 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/profile/ChromeTraceTest.scala @@ -0,0 +1,93 @@ +package dotty.tools.dotc.profile + +import java.io.* + +import org.junit.Assert.* +import org.junit.* +import java.nio.file.Files +import java.nio.charset.StandardCharsets +import java.util.concurrent.locks.LockSupport +import scala.concurrent.duration.* + +class ChromeTraceTest: + private def testTraceOutputs(generator: ChromeTrace => Unit)(checkContent: PartialFunction[List[String], Unit]): Unit = { + val outfile = Files.createTempFile("trace-", ".json").nn + val tracer = new ChromeTrace(outfile) + try generator(tracer) + finally tracer.close() + val contentLines = scala.io.Source.fromFile(outfile.toFile().nn).getLines().toList + checkContent.applyOrElse( + contentLines, + content => fail(s"Invalid output lines: ${content.mkString(System.lineSeparator().nn)}") + ) + } + + @Test def traceCounterEvent(): Unit = testTraceOutputs{ tracer => + tracer.traceCounterEvent("foo", "counter1", 42, processWide = true) + tracer.traceCounterEvent("bar", "counter2", 21, processWide = false) + }{ + case """{"traceEvents":[""" :: + s"""{"cat":"scalac","name":"foo","ph":"C","tid":"${tid1}","pid":"${pid1}","ts":${ts1},"args":{"counter1":42}}""" :: + s""",{"cat":"scalac","name":"bar","ph":"C","tid":"${tid2}","pid":"${pid2}","ts":${ts2},"args":{"counter2":21}}""" :: + "]}" :: Nil => + assertEquals(tid1, tid2) + assertTrue(tid1.toIntOption.isDefined) + assertNotEquals(pid1, pid2) + assertTrue(pid1.toIntOption.isDefined) + assertEquals(s"$pid1-$tid1", pid2) + assertTrue(ts1.toLong < ts2.toLong) + } + + @Test def traceDurationEvent(): Unit = testTraceOutputs{ tracer => + tracer.traceDurationEvent(name = "name1", startNanos = 1000L, durationNanos = 2500L, tid = "this-thread") + tracer.traceDurationEvent(name = "name2", startNanos = 1000L, durationNanos = 5000L, tid = "this-thread", pidSuffix = "pidSuffix") + }{ + case """{"traceEvents":[""" :: + s"""{"cat":"scalac","name":"name1","ph":"X","tid":"this-thread","pid":"${pid1}","ts":1,"dur":2}""" :: + s""",{"cat":"scalac","name":"name2","ph":"X","tid":"this-thread","pid":"${pid2}","ts":1,"dur":5}""" :: + "]}" :: Nil => + assertTrue(pid1.toIntOption.isDefined) + assertEquals(s"$pid1-pidSuffix", pid2) + } + + @Test def traceDurationEvents(): Unit = { + val testStart = System.nanoTime() + testTraceOutputs{ tracer => + tracer.traceDurationEventStart(cat = "test1", name = "event1") + LockSupport.parkNanos(2.millis.toNanos) + tracer.traceDurationEventStart(cat = "test2", name = "event2", colour = "RED", pidSuffix = "pid-suffix") + LockSupport.parkNanos(4.millis.toNanos) + tracer.traceDurationEventEnd(cat = "test2", name = "event2") + LockSupport.parkNanos(8.millis.toNanos) + tracer.traceDurationEventEnd(cat = "test1", name = "event1", colour = "RED", pidSuffix = "pid-suffix") + }{ + case """{"traceEvents":[""" :: + s"""{"cat":"test1","name":"event1","ph":"B","pid":"${pid1}","tid":"${tid1}","ts":${ts1}}""" :: + s""",{"cat":"test2","name":"event2","ph":"B","pid":"${pid2}","tid":"${tid2}","ts":${ts2},"cname":"RED"}""" :: + s""",{"cat":"test2","name":"event2","ph":"E","pid":"${pid3}","tid":"${tid3}","ts":${ts3}}""" :: + s""",{"cat":"test1","name":"event1","ph":"E","pid":"${pid4}","tid":"${tid4}","ts":${ts4},"cname":"RED"}""" :: + "]}" :: Nil => + val traceEnd = System.nanoTime() + assertTrue(tid1.toIntOption.isDefined) + assertEquals(pid1, pid3) + assertTrue(pid1.endsWith(s"-$tid1")) + assertEquals(pid2, pid4) + assertTrue(pid2.endsWith("-pid-suffix")) + List(tid1, tid2, tid3).foreach: tid => + assertEquals(tid4, tid) + List(pid1, pid2, pid3, pid4).foreach: pid => + assertTrue(pid.takeWhile(_ != '-').toIntOption.isDefined) + + List(ts1, ts2, ts3, ts4).map(_.toLong) match { + case all @ List(ts1, ts2, ts3, ts4) => + all.foreach: ts => + // Timestamps are presented using Epoch microsecondos + assertTrue(ts >= testStart / 1000) + assertTrue(ts <= traceEnd / 1000) + assertTrue(ts2 >= ts1 + 2.millis.toMicros) + assertTrue(ts3 >= ts2 + 4.millis.toMicros) + assertTrue(ts4 >= ts3 + 8.millis.toMicros) + case _ => fail("unreachable") + } + } +} diff --git a/compiler/test/dotty/tools/dotc/profile/FileUtilsTest.scala b/compiler/test/dotty/tools/dotc/profile/FileUtilsTest.scala new file mode 100644 index 000000000000..3253cff52057 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/profile/FileUtilsTest.scala @@ -0,0 +1,91 @@ +package dotty.tools.dotc.profile + +import java.io.* + +import org.junit.Assert.* +import org.junit.* + +class FileUtilsTest { + + @Test def writeIsSame(): Unit = { + val fileTest = File.createTempFile("FileUtilsTest", "t1").nn + val fileExpected = File.createTempFile("FileUtilsTest", "t2").nn + + val sTest = FileUtils.newAsyncBufferedWriter(new FileWriter(fileTest), threadsafe = false) + val sExpected = new BufferedWriter(new FileWriter(fileExpected)) + + def writeBoth(s:String, asChars: Boolean) = { + if (asChars) { + sTest.write(s.toCharArray) + sExpected.write(s.toCharArray) + } else { + sTest.write(s) + sExpected.write(s) + } + } + + for (i <- 1 to 2000) { + writeBoth(s"line $i text;", asChars = true) + writeBoth(s"line $i chars", asChars = false) + sTest.newLine() + sExpected.newLine() + } + sTest.close() + sExpected.close() + + assertEquals(fileExpected.length(),fileTest.length()) + + val expIn = new BufferedReader(new FileReader(fileExpected)) + val testIn = new BufferedReader(new FileReader(fileTest)) + + var exp = expIn.readLine() + while (exp ne null) { + val actual = testIn.readLine() + assertEquals(exp, actual) + exp = expIn.readLine() + } + expIn.close() + testIn.close() + fileTest.delete() + fileExpected.delete() + } + + @Ignore + @Test def showPerformance(): Unit = { + //warmup + for (i <- 1 to 1000) { + writeIsSame() + } + + val fileTest = File.createTempFile("FileUtilsTest", "t1").nn + val fileExpected = File.createTempFile("FileUtilsTest", "t2").nn + + for (i <- 1 to 10) { + val sTest = FileUtils.newAsyncBufferedWriter(fileTest.toPath.nn) + val sExpected = new BufferedWriter(new FileWriter(fileExpected)) + + val t1 = System.nanoTime() + List.tabulate(10000) {i => + sTest.write(s"line $i text;") + sTest.newLine() + } + val t2 = System.nanoTime() + sTest.close() + val t3 = System.nanoTime() + List.tabulate(10000) {i => + sExpected.write(s"line $i text;") + sExpected.newLine() + } + val t4 = System.nanoTime() + sExpected.close() + + println(s"async took ${t2 - t1} ns") + println(s"buffered took ${t4 - t3} ns") + + fileTest.delete() + fileExpected.delete() + } + } + +} + diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala index 5578fab412d1..adaeadb12978 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala @@ -21,6 +21,7 @@ import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.Completion import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.parsing.Tokens +import dotty.tools.dotc.profile.Profiler import dotty.tools.dotc.util.SourceFile import dotty.tools.pc.AutoImports.AutoImportEdits import dotty.tools.pc.AutoImports.AutoImportsGenerator @@ -75,7 +76,10 @@ class CompletionProvider( val pos = driver.sourcePosition(params) val (items, isIncomplete) = driver.compilationUnits.get(uri) match case Some(unit) => - val newctx = ctx.fresh.setCompilationUnit(unit).withPhase(Phases.typerPhase(using ctx)) + val newctx = ctx.fresh + .setCompilationUnit(unit) + .setProfiler(Profiler()(using ctx)) + .withPhase(Phases.typerPhase(using ctx)) val tpdPath0 = Interactive.pathTo(unit.tpdTree, pos.span)(using newctx) val adjustedPath = Interactive.resolveTypedOrUntypedPath(tpdPath0, pos)(using newctx)