Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update Splice main to version 0.3.10-snapshot.20250130.8182.0.v2323309b (automatic PR) #277

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -70,3 +70,4 @@ apps/splitwell/src/test/resources/splitwell-bundle*.tar.gz

**/SingletonCookie
__pycache__/
docs/src/deployment/observability/metrics_reference.rst
2 changes: 1 addition & 1 deletion LATEST_RELEASE
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0.3.8
0.3.9
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0.3.9
0.3.10
Original file line number Diff line number Diff line change
@@ -1,5 +1,11 @@
package org.lfdecentralizedtrust.splice.integration.tests

import com.digitalasset.canton.integration.BaseEnvironmentDefinition
import org.apache.pekko.actor.ActorSystem
import org.apache.pekko.http.scaladsl.Http
import org.apache.pekko.http.scaladsl.client.RequestBuilding.Get
import org.apache.pekko.http.scaladsl.model.StatusCodes
import org.apache.pekko.http.scaladsl.model.headers.Host
import org.lfdecentralizedtrust.splice.environment.EnvironmentImpl
import org.lfdecentralizedtrust.splice.integration.EnvironmentDefinition
import org.lfdecentralizedtrust.splice.integration.tests.SpliceTests.SpliceTestConsoleEnvironment
Expand All @@ -8,13 +14,12 @@ import org.lfdecentralizedtrust.splice.util.{
FrontendLoginUtil,
WalletFrontendTestUtil,
}
import com.digitalasset.canton.integration.BaseEnvironmentDefinition

import java.lang.ProcessBuilder
import java.nio.file.{Path, Paths}
import scala.concurrent.duration.*
import scala.jdk.CollectionConverters.*
import scala.sys.process.*
import java.lang.ProcessBuilder
import java.nio.file.{Path, Paths}

class DockerComposeValidatorFrontendIntegrationTest
extends FrontendIntegrationTest("frontend")
Expand All @@ -32,7 +37,7 @@ class DockerComposeValidatorFrontendIntegrationTest
extraClue: String = "",
startFlags: Seq[String] = Seq.empty,
extraEnv: Seq[(String, String)] = Seq.empty,
) = {
): Unit = {
val command = (Seq(
"build-tools/splice-compose.sh",
"start",
Expand Down Expand Up @@ -246,6 +251,32 @@ class DockerComposeValidatorFrontendIntegrationTest
)
}
}

clue("validator and participant metrics work") {
implicit val sys: ActorSystem = env.actorSystem
registerHttpConnectionPoolsCleanup(env)

def metricsAreAvailableFor(node: String) = {
val result = Http()
.singleRequest(
Get(s"http://localhost/metrics")
// java can't resolve the *.localhost domain, so we need to set the Host header manually
.withHeaders(Host(s"$node.localhost"))
)
.futureValue
result.status should be(StatusCodes.OK)
result.entity.toStrict(10.seconds).futureValue.data.utf8String should include(
"target_info" // basic metric included by opentelemtry
)
}

metricsAreAvailableFor(
"validator"
)
metricsAreAvailableFor(
"participant"
)
}
}
}

Expand Down
42 changes: 24 additions & 18 deletions apps/common/frontend/src/contexts/LedgerApiContext.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,15 @@ export class LedgerApiClient {
const response = await fetch(`${this.jsonApiUrl}v2/users/${encodeURIComponent(userId)}`, {
headers: this.headers,
});
const responseBody = await response.json();
return responseBody.user;
if (response.ok) {
const responseBody = await response.json();
return responseBody.user;
} else {
const responseBody = await response.text();
throw new Error(
`getPrimaryParty: HTTP ${response.status} ${response.statusText}: ${responseBody}`
);
}
},
this.userId
);
Expand Down Expand Up @@ -119,28 +126,27 @@ export class LedgerApiClient {
package_id_selection_preference: [],
};

const describeChoice = `Exercised choice: actAs=${JSON.stringify(
actAs
)}, readAs=${JSON.stringify(readAs)}, choiceName=${choice.choiceName}, templateId=${
choice.template().templateId
}, contractId=${contractId}`;

const responseBody = await fetch(
`${this.jsonApiUrl}v2/commands/submit-and-wait-for-transaction-tree`,
{ headers: this.headers, method: 'POST', body: JSON.stringify(body) }
)
.then(r => {
console.debug(
`Exercised choice: actAs=${JSON.stringify(actAs)}, readAs=${JSON.stringify(
readAs
)}, choiceName=${choice.choiceName}, templateId=${
choice.template().templateId
}, contractId=${contractId} succeeded.`
);
return r.json();
.then(async r => {
if (r.ok) {
console.debug(`${describeChoice} succeeded.`);
return r.json();
} else {
const body = await r.text();
throw new Error(`HTTP ${r.status} ${r.statusText}: ${body}`);
}
})
.catch(e => {
console.debug(
`Exercised choice: actAs=${JSON.stringify(actAs)}, readAs=${JSON.stringify(
readAs
)}, choiceName=${choice.choiceName}, templateId=${
choice.template().templateId
}, contractId=${contractId} failed: ${JSON.stringify(e)}`
);
console.debug(`${describeChoice} failed: ${JSON.stringify(e)}`);
throw e;
});

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ object DomainParamsStore {
domainUnpausedPromise: Option[Promise[Unit]],
)

private class Metrics(metricsFactory: LabeledMetricsFactory) extends AutoCloseable {
class Metrics(metricsFactory: LabeledMetricsFactory) extends AutoCloseable {

private val prefix: MetricName = SpliceMetrics.MetricsPrefix :+ "domain_params_store"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,14 +106,18 @@ trait SpliceDbTest extends DbTest with BeforeAndAfterAll { this: Suite =>
val dbLockPort: Int = 54321
implicit val tc: TraceContext = TraceContext.empty
logger.info("Acquiring SpliceDbTest lock")
val lockTimeout = 10.minutes // expectation: Db tests won't take longer than 5m
// Needs to be long enough to allow all other concurrently started tests to finish,
// we therefore use a time roughly equal to the expected maximal duration of the entire CI job.
val lockTimeout = 20.minutes
dbLockSocket = BaseTest.eventually(lockTimeout)(
Try(new ServerSocket(dbLockPort))
.fold(
e => {
logger.debug(s"Acquiring SpliceDbTest lock: port $dbLockPort is in use")
throw new TestFailedException(
s"Failed to acquire lock within timeout ($lockTimeout).",
s"Failed to acquire lock within timeout ($lockTimeout). " +
"We start many tests suites in parallel but wait for the lock before actually running test in this suite. " +
"Either increase the timeout, or reduce the number of test suites running in the same CI job.",
e,
0,
)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
// Copyright (c) 2024 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package org.lfdecentralizedtrust.splice.metrics

import better.files.File
import com.daml.metrics.api.MetricsContext
import com.digitalasset.canton.discard.Implicits.DiscardOps
import com.digitalasset.canton.metrics.{MetricDoc, MetricsDocGenerator}
import com.digitalasset.canton.topology.PartyId
import org.lfdecentralizedtrust.splice.admin.api.client.DamlGrpcClientMetrics
import org.lfdecentralizedtrust.splice.automation.TriggerMetrics
import org.lfdecentralizedtrust.splice.scan.store.db.DbScanStoreMetrics
import org.lfdecentralizedtrust.splice.sv.automation.singlesv.{SequencerPruningMetrics}
import org.lfdecentralizedtrust.splice.sv.automation.ReportSvStatusMetricsExportTrigger
import org.lfdecentralizedtrust.splice.sv.store.db.DbSvDsoStoreMetrics
import org.lfdecentralizedtrust.splice.store.{DomainParamsStore, HistoryMetrics, StoreMetrics}
import org.lfdecentralizedtrust.splice.wallet.metrics.AmuletMetrics

final case class GeneratedMetrics(
common: List[MetricDoc.Item],
validator: List[MetricDoc.Item],
sv: List[MetricDoc.Item],
scan: List[MetricDoc.Item],
) {
def render(): String =
Seq(
s"""|..
| Copyright (c) 2024 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
|..
| SPDX-License-Identifier: Apache-2.0
|
|.. _metrics-reference:
|
|Metrics Reference
|=================
|""".stripMargin,
renderSection("Common", common),
renderSection("Validator", validator),
renderSection("SV", sv),
renderSection("Scan", scan),
).mkString("\n")

def renderSection(prefix: String, metrics: List[MetricDoc.Item]): String = {
val header = s"$prefix Metrics"
(Seq(
header,
"+" * header.length,
) ++
// We seem to automatically pull in the daml.cache metrics which make no sense for splice at this point
metrics.filter(m => !m.name.startsWith("daml.cache")).map(renderMetric(_))).mkString("\n")
}

def renderMetric(metric: MetricDoc.Item): String =
Seq(
metric.name,
"^" * metric.name.length,
s"* **Summary**: ${metric.summary}",
s"* **Description**: ${metric.description}",
s"* **Type**: ${metric.metricType}",
s"* **Qualification**: ${metric.qualification}",
"\n",
).mkString("\n")
}

object MetricsDocs {
private def metricsDocs(): GeneratedMetrics = {
val walletUserParty = PartyId.tryFromProtoPrimitive("wallet_user::namespace")
val svParty = PartyId.tryFromProtoPrimitive("sv::namespace")
val generator = new MetricsDocGenerator()
// common
new DomainParamsStore.Metrics(generator)
new HistoryMetrics(generator)(MetricsContext.Empty)
new StoreMetrics(generator)(MetricsContext.Empty)
new DamlGrpcClientMetrics(generator, "")
new TriggerMetrics(generator)
val commonMetrics = generator.getAll()
generator.reset()
// validator
new AmuletMetrics(walletUserParty, generator)
val validatorMetrics = generator.getAll()
generator.reset()
// sv
new DbSvDsoStoreMetrics(generator)
new SequencerPruningMetrics(generator)
new ReportSvStatusMetricsExportTrigger.SvCometBftMetrics(generator)
new ReportSvStatusMetricsExportTrigger.SvStatusMetrics(
ReportSvStatusMetricsExportTrigger.SvId(svParty.toProtoPrimitive, "svName"),
generator,
)
val svMetrics = generator.getAll()
generator.reset()
// scan
new DbScanStoreMetrics(generator)
val scanMetrics = generator.getAll()
generator.reset()
GeneratedMetrics(
commonMetrics,
validatorMetrics,
svMetrics,
scanMetrics,
)
}

def main(args: Array[String]): Unit = {
val file = File(args(0))
file.parent.createDirectoryIfNotExists()
val docs = metricsDocs()
file.overwrite(docs.render()).discard[File]
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -151,9 +151,9 @@ class ReportSvStatusMetricsExportTrigger(

object ReportSvStatusMetricsExportTrigger {

private case class SvId(svParty: String, svName: String)
case class SvId(svParty: String, svName: String)

private case class SvCometBftMetrics(
case class SvCometBftMetrics(
metricsFactory: LabeledMetricsFactory
) extends AutoCloseable {

Expand Down Expand Up @@ -188,7 +188,7 @@ object ReportSvStatusMetricsExportTrigger {
}
}

private case class SvStatusMetrics(
case class SvStatusMetrics(
svId: SvId,
metricsFactory: LabeledMetricsFactory,
) extends AutoCloseable {
Expand Down
50 changes: 47 additions & 3 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@ lazy val root: Project = (project in file("."))
`apps-splitwell`,
`apps-sv`,
`apps-app`,
`apps-metrics-docs`,
`apps-wallet`,
`apps-frontends`,
`splice-util-daml`,
Expand Down Expand Up @@ -186,7 +187,7 @@ lazy val docs = project
val srcDir = sourceDirectory.value
val log = streams.value.log
val cacheDir = streams.value.cacheDirectory
val cache = FileFunction.cached(cacheDir) { _ =>
val cacheDamlDocs = FileFunction.cached(cacheDir) { _ =>
runCommand(
Seq("./gen-daml-docs.sh"),
log,
Expand All @@ -203,9 +204,30 @@ lazy val docs = project
(`splice-validator-lifecycle-daml` / Compile / damlBuild).value ++
(`splice-wallet-daml` / Compile / damlBuild).value ++
(`splice-wallet-payments-daml` / Compile / damlBuild).value
cache(
cacheDamlDocs(
damlSources.toSet
).toSeq
import scala.sys.process._
val classPath = (`apps-metrics-docs` / Runtime / dependencyClasspath).value.files
val cacheMetricsDocs = FileFunction.cached(cacheDir) { _ =>
val metricsReferencePath = srcDir / "deployment" / "observability" / "metrics_reference.rst"
// This seems to be the easiest way to run a target from another SBT project and has the advantage
// that it is much faster than the approach taken by Canton of running the target from bundle with a console script.
runCommand(
Seq(
"java",
"-cp",
classPath.mkString(":"),
"org.lfdecentralizedtrust.splice.metrics.MetricsDocs",
metricsReferencePath.toString,
),
log,
None,
Some(baseDir),
)
Set.empty
}
cacheMetricsDocs(Set()).toSeq
}.taskValue,
bundle := {
(Compile / resources).value
Expand Down Expand Up @@ -1317,7 +1339,20 @@ checkErrors := {
checkLogs("log/canton_network_test.clog", Seq("canton_network_test_log"))
}

lazy val `apps-app` =
lazy val `apps-metrics-docs` =
project
.in(file("apps/metrics-docs"))
.dependsOn(
`apps-common`,
`apps-scan`,
`apps-sv`,
`apps-validator`,
)
.settings(
Headers.ApacheDAHeaderSettings
)

lazy val `apps-app`: Project =
project
.in(file("apps/app"))
.dependsOn(
Expand Down Expand Up @@ -1376,6 +1411,10 @@ printTests := {
def isNonDevNetTest(name: String): Boolean = name.contains("NonDevNet")
def isPreflightIntegrationTest(name: String): Boolean = name.contains("PreflightIntegrationTest")

def isIntegrationTest(name: String): Boolean =
name.contains("org.lfdecentralizedtrust.splice.integration.tests") || name.contains(
"IntegrationTest"
)
def isCoreDeploymentPreflightIntegrationTest(name: String): Boolean = isPreflightIntegrationTest(
name
) && !isValidator1DeploymentPreflightIntegrationTest(
Expand Down Expand Up @@ -1434,6 +1473,11 @@ printTests := {

// Order matters as each test is included in just one group, with the first match being used
val testSplitRules = Seq(
(
"Unit tests",
"test-full-class-names-non-integration.log",
(t: String) => !isIntegrationTest(t),
),
(
"Daml ciupgrade vote",
"test-daml-ciupgrade-vote.log",
Expand Down
Loading