Skip to content

Commit

Permalink
Merge branch 'master' into latest-pekko-milestone
Browse files Browse the repository at this point in the history
  • Loading branch information
pjfanning authored Sep 2, 2024
2 parents 17940b2 + 124e223 commit d5830dc
Show file tree
Hide file tree
Showing 9 changed files with 78 additions and 31 deletions.
10 changes: 6 additions & 4 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
name: Test all the things

on: [ push, pull_request ]
env:
JAVA_OPTS: -Xms5120M -Xmx5120M -Xss6M -XX:ReservedCodeCacheSize=256M -Dfile.encoding=UTF-8
JVM_OPTS: -Xms5120M -Xmx5120M -Xss6M -XX:ReservedCodeCacheSize=256M -Dfile.encoding=UTF-8

jobs:
ci:
runs-on: self-hosted
env:
JAVA_OPTS: -Xms5120M -Xmx5120M -Xss6M -XX:ReservedCodeCacheSize=256M -Dfile.encoding=UTF-8
JVM_OPTS: -Xms5120M -Xmx5120M -Xss6M -XX:ReservedCodeCacheSize=256M -Dfile.encoding=UTF-8
steps:
- uses: actions/checkout@v1
- run: git fetch -f --depth=1 origin '+refs/tags/*:refs/tags/*'
Expand All @@ -24,6 +23,9 @@ jobs:
- uses: actions/checkout@v1
- uses: coursier/cache-action@v6
- uses: coursier/setup-action@v1
with:
jvm: adopt:8
apps: sbt
- name: Test
run: sbt -v "+scalafmtCheckAll"

2 changes: 2 additions & 0 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -436,6 +436,7 @@ lazy val `kamon-spring` = (project in file("instrumentation/kamon-spring"))

okHttp % "test",
"com.h2database" % "h2" % "1.4.200" % "test",
"javax.xml.bind" % "jaxb-api" % "2.3.1" % "test",
"org.springframework.boot" % "spring-boot-starter-data-jpa" % "2.4.2" % "test",
scalatest % "test",
logbackClassic % "test",
Expand Down Expand Up @@ -855,6 +856,7 @@ lazy val `kamon-apache-cxf` = (project in file("instrumentation/kamon-apache-cxf
"org.mock-server" % "mockserver-client-java" % "5.13.2" % "test",
"com.dimafeng" %% "testcontainers-scala" % "0.41.0" % "test",
"com.dimafeng" %% "testcontainers-scala-mockserver" % "0.41.0" % "test",
"javax.xml.bind" % "jaxb-api" % "2.3.1" % "test",
"org.apache.cxf" % "cxf-rt-frontend-jaxws" % "3.3.6" % "test",
"org.apache.cxf" % "cxf-rt-transports-http" % "3.3.6" % "test",
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,8 @@ class MessageSpec
}

override val container: MockServerContainer = MockServerContainer()
lazy val clientExpectation: MockServerExpectations = new MockServerExpectations("localhost", container.serverPort)
lazy val clientExpectation: MockServerExpectations =
new MockServerExpectations(container.container.getHost, container.serverPort)

override protected def beforeAll(): Unit = {
super.beforeAll()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,8 @@ class HttpRequestSpec
}

override val container: MockServerContainer = MockServerContainer()
lazy val clientExpectation: MockServerExpectations = new MockServerExpectations("localhost", container.serverPort)
lazy val clientExpectation: MockServerExpectations =
new MockServerExpectations(container.container.getHost, container.serverPort)

override protected def beforeAll(): Unit = {
super.beforeAll()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,8 @@ class HttpUriRequestSpec
}

override val container: MockServerContainer = MockServerContainer()
lazy val clientExpectation: MockServerExpectations = new MockServerExpectations("localhost", container.serverPort)
lazy val clientExpectation: MockServerExpectations =
new MockServerExpectations(container.container.getHost, container.serverPort)

override protected def beforeAll(): Unit = {
super.beforeAll()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,24 +77,6 @@ class CassandraClientTracingInstrumentationSpec
}
}

"not swallow exceptions" in {
val query = QueryBuilder
.select("name")
.from("illegaltablename")
.where(QueryBuilder.eq("name", "kamon"))
.allowFiltering()
.setFetchSize(5)

assertThrows[DriverException] {
session.execute(query)
}

eventually(timeout(10 seconds)) {
val span = testSpanReporter().nextSpan()
span should not be empty
}
}

"trace individual page executions" in {
val query = QueryBuilder
.select("name")
Expand All @@ -118,6 +100,24 @@ class CassandraClientTracingInstrumentationSpec
clientSpan.get.tags.get(plainBoolean("cassandra.driver.rs.has-more")) shouldBe true
}
}

"not swallow exceptions" in {
val query = QueryBuilder
.select("name")
.from("illegaltablename")
.where(QueryBuilder.eq("name", "kamon"))
.allowFiltering()
.setFetchSize(5)

assertThrows[DriverException] {
session.execute(query)
}

eventually(timeout(10 seconds)) {
val span = testSpanReporter().nextSpan()
span should not be empty
}
}
}

var session: Session = _
Expand Down
2 changes: 2 additions & 0 deletions reporters/kamon-datadog/src/main/resources/reference.conf
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,8 @@ kamon {
compression = false
}

# The log level in which to log failures to submit metrics.
failure-log-level = "error"

# All time values are collected in nanoseconds,
# to scale before sending to datadog set "time-units" to "s" or "ms" or "µs".
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ import kamon.{module, Kamon}
import kamon.datadog.DatadogAPIReporter.Configuration
import kamon.module.{MetricReporter, ModuleFactory}
import org.slf4j.LoggerFactory
import org.slf4j.event.Level

import scala.util.{Failure, Success}

Expand Down Expand Up @@ -68,7 +69,7 @@ class DatadogAPIReporter(
override def reportPeriodSnapshot(snapshot: PeriodSnapshot): Unit = {
httpClient.doPost("application/json; charset=utf-8", buildRequestBody(snapshot)) match {
case Failure(e) =>
logger.error(e.getMessage)
logger.logAtLevel(configuration.failureLogLevel, e.getMessage)
case Success(response) =>
logger.trace(response)
}
Expand Down Expand Up @@ -166,7 +167,8 @@ private object DatadogAPIReporter {
timeUnit: MeasurementUnit,
informationUnit: MeasurementUnit,
extraTags: Seq[(String, String)],
tagFilter: Filter
tagFilter: Filter,
failureLogLevel: Level
)

implicit class QuoteInterp(val sc: StringContext) extends AnyVal {
Expand All @@ -175,15 +177,22 @@ private object DatadogAPIReporter {

def readConfiguration(config: Config): Configuration = {
val datadogConfig = config.getConfig("kamon.datadog")

// Remove the "host" tag since it gets added to the datadog payload separately
val extraTags = EnvironmentTags
.from(Kamon.environment, datadogConfig.getConfig("environment-tags"))
.without("host")
.all()
.map(p => p.key -> Tag.unwrapValue(p).toString)

Configuration(
datadogConfig.getConfig("api"),
timeUnit = readTimeUnit(datadogConfig.getString("time-unit")),
informationUnit = readInformationUnit(datadogConfig.getString("information-unit")),
// Remove the "host" tag since it gets added to the datadog payload separately
EnvironmentTags.from(Kamon.environment, datadogConfig.getConfig("environment-tags")).without("host").all().map(
p => p.key -> Tag.unwrapValue(p).toString
),
Kamon.filter("kamon.datadog.environment-tags.filter")
extraTags = extraTags,
tagFilter = Kamon.filter("kamon.datadog.environment-tags.filter"),
failureLogLevel = readLogLevel(datadogConfig.getString("failure-log-level"))
)
}
}
29 changes: 29 additions & 0 deletions reporters/kamon-datadog/src/main/scala/kamon/datadog/package.scala
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ import com.typesafe.config.Config
import kamon.metric.MeasurementUnit
import kamon.metric.MeasurementUnit.{information, time}
import okhttp3._
import org.slf4j.Logger
import org.slf4j.event.Level

import scala.util.{Failure, Success, Try}

Expand All @@ -36,6 +38,23 @@ package object datadog {
}
}

implicit class LoggerExtras(val logger: Logger) extends AnyVal {
def logAtLevel(level: Level, msg: String): Unit = {
level match {
case Level.TRACE =>
logger.trace(msg)
case Level.DEBUG =>
logger.debug(msg)
case Level.INFO =>
logger.info(msg)
case Level.WARN =>
logger.warn(msg)
case Level.ERROR =>
logger.error(msg)
}
}
}

private[datadog] case class HttpClient(
apiUrl: String,
apiKey: Option[String],
Expand Down Expand Up @@ -133,4 +152,14 @@ package object datadog {
case "gb" => information.gigabytes
case other => sys.error(s"Invalid time unit setting [$other], the possible values are [b, kb, mb, gb]")
}

def readLogLevel(level: String): Level = level match {
case "trace" => Level.TRACE
case "debug" => Level.DEBUG
case "info" => Level.INFO
case "warn" => Level.WARN
case "error" => Level.ERROR
case other =>
sys.error(s"Invalid log level setting [$other], the possible values are [trace, debug, info, warn, error]")
}
}

0 comments on commit d5830dc

Please sign in to comment.