diff --git a/build.sbt b/build.sbt
index 1a47a4922..77a99a2e6 100644
--- a/build.sbt
+++ b/build.sbt
@@ -456,6 +456,7 @@ lazy val `kamon-akka` = (project in file("instrumentation/kamon-akka"))
.enablePlugins(JavaAgent)
.disablePlugins(AssemblyPlugin)
.settings(instrumentationSettings: _*)
+ .settings(crossScalaVersions += `scala_3_version`)
.dependsOn(
`kamon-scala-future` % "compile,common,akka-2.5,akka-2.6",
`kamon-testkit` % "test,test-common,test-akka-2.5,test-akka-2.6"
@@ -464,31 +465,45 @@ lazy val `kamon-akka` = (project in file("instrumentation/kamon-akka"))
def akkaHttpVersion(scalaVersion: String) = scalaVersion match {
case "2.11" => "10.1.12"
+ case "3" => "10.5.0"
case _ => "10.2.8"
}
+def akkaStreamVersion(scalaVersion: String) = scalaVersion match {
+ case "3" => "2.7.0"
+ case _ => "2.5.32"
+}
+
+def versionedScalaSourceDirectories(sourceDir: File, scalaVersion: String): List[File] =
+ scalaVersion match {
+ case "3" => List(sourceDir / "scala-2.13+")
+ case "2.13" => List(sourceDir / "scala-2.13+")
+ case _ => Nil
+ }
lazy val `kamon-akka-http` = (project in file("instrumentation/kamon-akka-http"))
.enablePlugins(JavaAgent)
.disablePlugins(AssemblyPlugin)
.settings(instrumentationSettings)
.settings(Seq(
+ Compile / unmanagedSourceDirectories ++= versionedScalaSourceDirectories((Compile / sourceDirectory).value, scalaBinaryVersion.value),
resolvers += Resolver.bintrayRepo("hseeberger", "maven"),
javaAgents += "org.mortbay.jetty.alpn" % "jetty-alpn-agent" % "2.0.10" % "test",
libraryDependencies ++= Seq(
kanelaAgent % "provided",
"com.typesafe.akka" %% "akka-http" % akkaHttpVersion(scalaBinaryVersion.value) % "provided",
"com.typesafe.akka" %% "akka-http2-support" % akkaHttpVersion(scalaBinaryVersion.value) % "provided",
- "com.typesafe.akka" %% "akka-stream" % "2.5.32" % "provided",
+ "com.typesafe.akka" %% "akka-stream" % akkaStreamVersion(scalaBinaryVersion.value) % "provided",
scalatest % "test",
slf4jApi % "test",
slf4jnop % "test",
okHttp % "test",
"com.typesafe.akka" %% "akka-http-testkit" % akkaHttpVersion(scalaBinaryVersion.value) % "test",
- "de.heikoseeberger" %% "akka-http-json4s" % "1.27.0" % "test",
- "org.json4s" %% "json4s-native" % "3.6.7" % "test",
- ),
- )).dependsOn(`kamon-akka`, `kamon-testkit` % "test")
+ "de.heikoseeberger" %% "akka-http-json4s" % "1.27.0" % "test" cross CrossVersion.for3Use2_13 intransitive(),
+ "org.json4s" %% "json4s-native" % "4.0.6" % "test",
+ )))
+ .settings(crossScalaVersions += `scala_3_version`)
+ .dependsOn(`kamon-akka`, `kamon-testkit` % "test")
@@ -497,7 +512,10 @@ lazy val `kamon-pekko` = (project in file("instrumentation/kamon-pekko"))
.disablePlugins(AssemblyPlugin)
.settings(instrumentationSettings: _*)
.settings(Seq(
- crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`),
+ crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`, scala_3_version),
+ libraryDependencies ++= Seq(
+ "org.apache.pekko" %% "pekko-actor" % pekkoHttpVersion % "provided"
+ )
))
.dependsOn(
`kamon-scala-future` % "compile",
@@ -511,8 +529,7 @@ lazy val `kamon-pekko-http` = (project in file("instrumentation/kamon-pekko-http
.disablePlugins(AssemblyPlugin)
.settings(instrumentationSettings)
.settings(Seq(
- javaAgents += "org.mortbay.jetty.alpn" % "jetty-alpn-agent" % "2.0.10" % "test",
- crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`),
+ crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`, scala_3_version),
libraryDependencies ++= Seq(
kanelaAgent % "provided",
"org.apache.pekko" %% "pekko-http" % pekkoHttpVersion % "provided",
diff --git a/instrumentation/kamon-akka-http/src/main/scala-2.13/kamon/instrumentation/akka/http/AkkaHttpServerInstrumentation.scala b/instrumentation/kamon-akka-http/src/main/scala-2.13+/kamon/instrumentation/akka/http/AkkaHttpServerInstrumentation.scala
similarity index 94%
rename from instrumentation/kamon-akka-http/src/main/scala-2.13/kamon/instrumentation/akka/http/AkkaHttpServerInstrumentation.scala
rename to instrumentation/kamon-akka-http/src/main/scala-2.13+/kamon/instrumentation/akka/http/AkkaHttpServerInstrumentation.scala
index 563f687c3..3249bb74b 100644
--- a/instrumentation/kamon-akka-http/src/main/scala-2.13/kamon/instrumentation/akka/http/AkkaHttpServerInstrumentation.scala
+++ b/instrumentation/kamon-akka-http/src/main/scala-2.13+/kamon/instrumentation/akka/http/AkkaHttpServerInstrumentation.scala
@@ -8,7 +8,7 @@ import akka.http.scaladsl.server.PathMatcher.{Matched, Unmatched}
import akka.http.scaladsl.server.directives.{BasicDirectives, CompleteOrRecoverWithMagnet, OnSuccessMagnet}
import akka.http.scaladsl.server.directives.RouteDirectives.reject
import akka.http.scaladsl.server._
-import akka.http.scaladsl.server.util.Tupler
+import akka.http.scaladsl.server.util.{Tuple, Tupler}
import akka.http.scaladsl.util.FastFuture
import kamon.Kamon
import kamon.instrumentation.akka.http.HasMatchingContext.PathMatchingContext
@@ -27,6 +27,7 @@ import akka.stream.scaladsl.Flow
import kamon.context.Context
import kanela.agent.libs.net.bytebuddy.matcher.ElementMatchers.isPublic
+import scala.annotation.static
import scala.collection.immutable
@@ -53,7 +54,7 @@ class AkkaHttpServerInstrumentation extends InstrumentationBuilder {
.advise(method("bindAndHandleAsync") and isPublic(), classOf[Http2ExtBindAndHandleAdvice])
onType("akka.http.impl.engine.http2.Http2Blueprint$")
- .intercept(method("handleWithStreamIdHeader"), Http2BlueprintInterceptor)
+ .intercept(method("handleWithStreamIdHeader"), classOf[Http2BlueprintInterceptor])
/**
* The rest of these sections are just about making sure that we can generate an appropriate operation name (i.e. free
@@ -61,7 +62,7 @@ class AkkaHttpServerInstrumentation extends InstrumentationBuilder {
*/
onType("akka.http.scaladsl.server.RequestContextImpl")
.mixin(classOf[HasMatchingContext.Mixin])
- .intercept(method("copy"), RequestContextCopyInterceptor)
+ .intercept(method("copy"), classOf[RequestContextCopyInterceptor])
onType("akka.http.scaladsl.server.directives.PathDirectives")
.intercept(method("rawPathPrefix"), classOf[PathDirectivesRawPathPrefixInterceptor])
@@ -263,10 +264,11 @@ object LastAutomaticOperationNameEdit {
new LastAutomaticOperationNameEdit(operationName, allowAutomaticChanges)
}
+class RequestContextCopyInterceptor
object RequestContextCopyInterceptor {
@RuntimeType
- def copy(@This context: RequestContext, @SuperCall copyCall: Callable[RequestContext]): RequestContext = {
+ @static def copy(@This context: RequestContext, @SuperCall copyCall: Callable[RequestContext]): RequestContext = {
val copiedRequestContext = copyCall.call()
copiedRequestContext.asInstanceOf[HasMatchingContext].setMatchingContext(context.asInstanceOf[HasMatchingContext].matchingContext)
copiedRequestContext
@@ -277,8 +279,8 @@ class PathDirectivesRawPathPrefixInterceptor
object PathDirectivesRawPathPrefixInterceptor {
import BasicDirectives._
- def rawPathPrefix[T](@Argument(0) matcher: PathMatcher[T]): Directive[T] = {
- implicit val LIsTuple = matcher.ev
+ @static def rawPathPrefix[T](@Argument(0) matcher: PathMatcher[T]): Directive[T] = {
+ implicit val LIsTuple: Tuple[T] = matcher.ev
extract { ctx =>
val fullPath = ctx.unmatchedPath.toString()
@@ -294,7 +296,7 @@ object PathDirectivesRawPathPrefixInterceptor {
(ctx, matching)
} flatMap {
case (ctx, Matched(rest, values)) =>
- tprovide(values) & mapRequestContext(_ withUnmatchedPath rest) & mapRouteResult { routeResult =>
+ tprovide[T](values) & mapRequestContext(_ withUnmatchedPath rest) & mapRouteResult { routeResult =>
if(routeResult.isInstanceOf[Rejected])
ctx.asInstanceOf[HasMatchingContext].popOneMatchingContext()
@@ -307,6 +309,7 @@ object PathDirectivesRawPathPrefixInterceptor {
}
}
+class Http2BlueprintInterceptor
object Http2BlueprintInterceptor {
case class HandlerWithEndpoint(interface: String, port: Int, handler: HttpRequest => Future[HttpResponse])
@@ -316,7 +319,7 @@ object Http2BlueprintInterceptor {
}
@RuntimeType
- def handleWithStreamIdHeader(@Argument(1) handler: HttpRequest => Future[HttpResponse],
+ @static def handleWithStreamIdHeader(@Argument(1) handler: HttpRequest => Future[HttpResponse],
@SuperCall zuper: Callable[Flow[HttpRequest, HttpResponse, NotUsed]]): Flow[HttpRequest, HttpResponse, NotUsed] = {
handler match {
diff --git a/instrumentation/kamon-akka-http/src/test/scala/kamon/akka/http/AkkaHttpClientTracingSpec.scala b/instrumentation/kamon-akka-http/src/test/scala/kamon/akka/http/AkkaHttpClientTracingSpec.scala
index 1b4b036d8..0322f9de1 100644
--- a/instrumentation/kamon-akka-http/src/test/scala/kamon/akka/http/AkkaHttpClientTracingSpec.scala
+++ b/instrumentation/kamon-akka-http/src/test/scala/kamon/akka/http/AkkaHttpClientTracingSpec.scala
@@ -31,6 +31,7 @@ import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
import org.scalatest.OptionValues
+import scala.concurrent.ExecutionContextExecutor
import scala.concurrent.duration._
class AkkaHttpClientTracingSpec extends AnyWordSpecLike with Matchers with InitAndStopKamonAfterAll with MetricInspection.Syntax
@@ -38,9 +39,9 @@ class AkkaHttpClientTracingSpec extends AnyWordSpecLike with Matchers with InitA
import TestWebServer.Endpoints._
- implicit private val system = ActorSystem("http-client-instrumentation-spec")
- implicit private val executor = system.dispatcher
- implicit private val materializer = ActorMaterializer()
+ implicit private val system: ActorSystem = ActorSystem("http-client-instrumentation-spec")
+ implicit private val executor: ExecutionContextExecutor = system.dispatcher
+ implicit private val materializer: ActorMaterializer = ActorMaterializer()
val timeoutTest: FiniteDuration = 5 second
val interface = "127.0.0.1"
diff --git a/instrumentation/kamon-akka-http/src/test/scala/kamon/akka/http/AkkaHttpServerMetricsSpec.scala b/instrumentation/kamon-akka-http/src/test/scala/kamon/akka/http/AkkaHttpServerMetricsSpec.scala
index 2ed25dd7e..d76e48e25 100644
--- a/instrumentation/kamon-akka-http/src/test/scala/kamon/akka/http/AkkaHttpServerMetricsSpec.scala
+++ b/instrumentation/kamon-akka-http/src/test/scala/kamon/akka/http/AkkaHttpServerMetricsSpec.scala
@@ -29,7 +29,7 @@ import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
import org.scalatest.OptionValues
-import scala.concurrent.Future
+import scala.concurrent.{ExecutionContextExecutor, Future}
import scala.concurrent.duration._
class AkkaHttpServerMetricsSpec extends AnyWordSpecLike with Matchers with InitAndStopKamonAfterAll with InstrumentInspection.Syntax
@@ -37,9 +37,9 @@ class AkkaHttpServerMetricsSpec extends AnyWordSpecLike with Matchers with InitA
import TestWebServer.Endpoints._
- implicit private val system = ActorSystem("http-server-metrics-instrumentation-spec")
- implicit private val executor = system.dispatcher
- implicit private val materializer = ActorMaterializer()
+ implicit private val system: ActorSystem = ActorSystem("http-server-metrics-instrumentation-spec")
+ implicit private val executor: ExecutionContextExecutor = system.dispatcher
+ implicit private val materializer: ActorMaterializer = ActorMaterializer()
val port = 8083
val interface = "127.0.0.1"
diff --git a/instrumentation/kamon-akka-http/src/test/scala/kamon/akka/http/AkkaHttpServerTracingSpec.scala b/instrumentation/kamon-akka-http/src/test/scala/kamon/akka/http/AkkaHttpServerTracingSpec.scala
index 00a5cefc8..2f2883ab3 100644
--- a/instrumentation/kamon-akka-http/src/test/scala/kamon/akka/http/AkkaHttpServerTracingSpec.scala
+++ b/instrumentation/kamon-akka-http/src/test/scala/kamon/akka/http/AkkaHttpServerTracingSpec.scala
@@ -31,15 +31,17 @@ import java.util.UUID
import javax.net.ssl.{HostnameVerifier, SSLSession}
import scala.concurrent.duration._
import scala.collection.JavaConverters._
+import scala.concurrent.ExecutionContextExecutor
+import scala.util.Try
class AkkaHttpServerTracingSpec extends AnyWordSpecLike with Matchers with ScalaFutures with Inside with InitAndStopKamonAfterAll
with MetricInspection.Syntax with Reconfigure with TestWebServer with Eventually with OptionValues with TestSpanReporter {
import TestWebServer.Endpoints._
- implicit private val system = ActorSystem("http-server-instrumentation-spec")
- implicit private val executor = system.dispatcher
- implicit private val materializer = ActorMaterializer()
+ implicit private val system: ActorSystem = ActorSystem("http-server-instrumentation-spec")
+ implicit private val executor: ExecutionContextExecutor = system.dispatcher
+ implicit private val materializer: ActorMaterializer = ActorMaterializer()
val (sslSocketFactory, trustManager) = clientSSL()
val okHttp = new OkHttpClient.Builder()
@@ -228,7 +230,12 @@ class AkkaHttpServerTracingSpec extends AnyWordSpecLike with Matchers with Scala
"correctly time entity transfer timings" in {
val target = s"$protocol://$interface:$port/$stream"
- client.newCall(new Request.Builder().url(target).build()).execute()
+ def probablyScala3 = util.Properties.releaseVersion.contains("2.13.10")
+
+ def makeCall = client.newCall(new Request.Builder().url(target).build()).execute()
+ // akka 2.7.0 is flaky on this
+ if (probablyScala3) Try(makeCall).orElse(Try(makeCall))
+ else makeCall
val span = eventually(timeout(10 seconds)) {
val span = testSpanReporter().nextSpan().value
diff --git a/instrumentation/kamon-akka-http/src/test/scala/kamon/akka/http/ServerFlowWrapperSpec.scala b/instrumentation/kamon-akka-http/src/test/scala/kamon/akka/http/ServerFlowWrapperSpec.scala
index 27803f3e4..f8ac71d5e 100644
--- a/instrumentation/kamon-akka-http/src/test/scala/kamon/akka/http/ServerFlowWrapperSpec.scala
+++ b/instrumentation/kamon-akka-http/src/test/scala/kamon/akka/http/ServerFlowWrapperSpec.scala
@@ -11,11 +11,13 @@ import org.scalatest.concurrent.ScalaFutures
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
+import scala.concurrent.ExecutionContextExecutor
+
class ServerFlowWrapperSpec extends AnyWordSpecLike with Matchers with ScalaFutures with InitAndStopKamonAfterAll {
- implicit private val system = ActorSystem("http-client-instrumentation-spec")
- implicit private val executor = system.dispatcher
- implicit private val materializer = ActorMaterializer()
+ implicit private val system: ActorSystem = ActorSystem("http-client-instrumentation-spec")
+ implicit private val executor: ExecutionContextExecutor = system.dispatcher
+ implicit private val materializer: ActorMaterializer = ActorMaterializer()
private val okReturningFlow = Flow[HttpRequest].map { _ =>
HttpResponse(status = StatusCodes.OK, entity = HttpEntity("OK"))
diff --git a/instrumentation/kamon-akka-http/src/test/scala/kamon/testkit/TestWebServer.scala b/instrumentation/kamon-akka-http/src/test/scala/kamon/testkit/TestWebServer.scala
index ebf719bbb..b55551733 100644
--- a/instrumentation/kamon-akka-http/src/test/scala/kamon/testkit/TestWebServer.scala
+++ b/instrumentation/kamon-akka-http/src/test/scala/kamon/testkit/TestWebServer.scala
@@ -36,18 +36,19 @@ import kamon.instrumentation.akka.http.TracingDirectives
import org.json4s.{DefaultFormats, native}
import kamon.tag.Lookups.plain
import kamon.trace.Trace
+import org.json4s.native.Serialization
import scala.concurrent.{ExecutionContext, Future}
trait TestWebServer extends TracingDirectives {
- implicit val serialization = native.Serialization
- implicit val formats = DefaultFormats
+ implicit val serialization: Serialization.type = native.Serialization
+ implicit val formats: DefaultFormats.type = DefaultFormats
import Json4sSupport._
def startServer(interface: String, port: Int, https: Boolean = false)(implicit system: ActorSystem): WebServer = {
import Endpoints._
implicit val ec: ExecutionContext = system.dispatcher
- implicit val materializer = ActorMaterializer()
+ implicit val materializer: ActorMaterializer = ActorMaterializer()
val routes = logRequest("routing-request") {
get {
diff --git a/instrumentation/kamon-akka/build.sbt b/instrumentation/kamon-akka/build.sbt
index dee2bb4a6..dadb10f94 100644
--- a/instrumentation/kamon-akka/build.sbt
+++ b/instrumentation/kamon-akka/build.sbt
@@ -3,7 +3,7 @@ import Def.Initialize
val `Akka-2.4-version` = "2.4.20"
val `Akka-2.5-version` = "2.5.32"
-val `Akka-2.6-version` = "2.6.20"
+val `Akka-2.6-version` = "2.6.21"
/**
* Compile Configurations
@@ -31,7 +31,7 @@ configs(
// The Common configuration should always depend on the latest version of Akka. All code in the Common configuration
// should be source compatible with all Akka versions.
inConfig(Common)(Defaults.compileSettings ++ Seq(
- crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`)
+ crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`, scala_3_version)
))
libraryDependencies ++= { if(scalaBinaryVersion.value == "2.11") Seq.empty else Seq(
@@ -50,7 +50,7 @@ libraryDependencies ++= { if(scalaBinaryVersion.value == "2.11") Seq.empty else
inConfig(`Compile-Akka-2.6`)(Defaults.compileSettings ++ Seq(
- crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`),
+ crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`, scala_3_version),
sources := joinSources(Common, `Compile-Akka-2.6`).value
))
@@ -73,7 +73,7 @@ inConfig(`Compile-Akka-2.5`)(Defaults.compileSettings ++ Seq(
sources := joinSources(Common, `Compile-Akka-2.5`).value
))
-libraryDependencies ++= Seq(
+libraryDependencies ++= {if (scalaVersion.value startsWith "3") Seq.empty else Seq(
kanelaAgent % `Compile-Akka-2.5`,
scalatest % `Test-Akka-2.5`,
logbackClassic % `Test-Akka-2.5`,
@@ -85,21 +85,28 @@ libraryDependencies ++= Seq(
"com.typesafe.akka" %% "akka-cluster-sharding" % `Akka-2.5-version` % `Compile-Akka-2.5`,
"com.typesafe.akka" %% "akka-protobuf" % `Akka-2.5-version` % `Compile-Akka-2.5`,
"com.typesafe.akka" %% "akka-testkit" % `Akka-2.5-version` % `Test-Akka-2.5`
-)
+)}
// Ensure that the packaged artifact contains the instrumentation for all Akka versions.
Compile / packageBin / mappings := Def.taskDyn {
- if(scalaBinaryVersion.value == "2.11")
+ if(scalaBinaryVersion.value == "2.11") {
Def.task {
joinProducts((`Compile-Akka-2.5` / products).value) ++
joinProducts((Common / unmanagedResourceDirectories).value)
}
- else
+ } else if (scalaVersion.value startsWith "3") {
+ Def.task {
+ joinProducts((`Compile-Akka-2.6` / products).value) ++
+ joinProducts((Common / unmanagedResourceDirectories).value)
+ }
+ } else {
Def.task {
joinProducts(
(`Compile-Akka-2.5` / products).value ++
(`Compile-Akka-2.6` / products).value
- ) ++ joinProducts((Common / unmanagedResourceDirectories).value)}
+ ) ++ joinProducts((Common / unmanagedResourceDirectories).value)
+ }
+ }
}.value
// Ensure that the packaged sources contains the instrumentation for all Akka versions.
@@ -108,26 +115,38 @@ Compile / packageSrc / mappings := Def.taskDyn {
Def.task {
(`Compile-Akka-2.5` / packageSrc / mappings).value ++
(Common / packageSrc / mappings).value
+ }
+ } else if (scalaVersion.value startsWith "3") {
+ Def.task {
+ (`Compile-Akka-2.6` / packageSrc / mappings).value ++
+ (Common / packageSrc / mappings).value
}
- } else
+ } else {
Def.task {
(`Compile-Akka-2.5` / packageSrc / mappings).value ++
(`Compile-Akka-2.6` / packageSrc / mappings).value ++
(Common / packageSrc / mappings).value
}
+ }
}.value
// Compile will return the compile analysis for the Common configuration but will run on all Akka configurations.
Compile / compile := Def.taskDyn {
- if(scalaBinaryVersion.value == "2.11")
+ if(scalaBinaryVersion.value == "2.11") {
Def.task {
(`Compile-Akka-2.5` / compile).value
}
- else
+ } else if (scalaVersion.value startsWith "3"){
+
+ Def.task {
+ (`Compile-Akka-2.6` / compile).value
+ }
+ } else {
Def.task {
(`Compile-Akka-2.5` / compile).value
(`Compile-Akka-2.6` / compile).value
}
+ }
}.value
exportJars := true
@@ -145,7 +164,7 @@ lazy val baseTestSettings = Seq(
)
inConfig(TestCommon)(Defaults.testSettings ++ instrumentationSettings ++ baseTestSettings ++ Seq(
- crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`)
+ crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`, scala_3_version)
))
inConfig(`Test-Akka-2.5`)(Defaults.testSettings ++ instrumentationSettings ++ baseTestSettings ++ Seq(
@@ -155,20 +174,26 @@ inConfig(`Test-Akka-2.5`)(Defaults.testSettings ++ instrumentationSettings ++ ba
))
inConfig(`Test-Akka-2.6`)(Defaults.testSettings ++ instrumentationSettings ++ baseTestSettings ++ Seq(
- crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`),
+ crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`, scala_3_version),
sources := joinSources(TestCommon, `Test-Akka-2.6`).value,
unmanagedResourceDirectories ++= (Common / unmanagedResourceDirectories).value,
unmanagedResourceDirectories ++= (TestCommon / unmanagedResourceDirectories).value
))
Test / test := Def.taskDyn {
- if(scalaBinaryVersion.value == "2.11")
+ if(scalaBinaryVersion.value == "2.11") {
Def.task {
(`Test-Akka-2.5` / test).value
}
- else
+ } else if (scalaVersion.value startsWith "3") {
+ Def.task {
+ (`Test-Akka-2.6` / test).value
+ }
+ }
+ else {
Def.task {
(`Test-Akka-2.5` / test).value
(`Test-Akka-2.6` / test).value
}
+ }
}.value
\ No newline at end of file
diff --git a/instrumentation/kamon-akka/src/akka-2.6/scala/akka/remote/artery/KamonRemoteInstrument.scala b/instrumentation/kamon-akka/src/akka-2.6/scala/akka/remote/artery/KamonRemoteInstrument.scala
index d43d1b000..7f2ad3b0f 100644
--- a/instrumentation/kamon-akka/src/akka-2.6/scala/akka/remote/artery/KamonRemoteInstrument.scala
+++ b/instrumentation/kamon-akka/src/akka-2.6/scala/akka/remote/artery/KamonRemoteInstrument.scala
@@ -9,6 +9,7 @@ import kanela.agent.libs.net.bytebuddy.asm.Advice
import org.slf4j.LoggerFactory
import java.nio.ByteBuffer
+import scala.annotation.static
import scala.util.control.NonFatal
class KamonRemoteInstrument(system: ExtendedActorSystem) extends RemoteInstrument {
@@ -85,12 +86,12 @@ object CaptureCurrentInboundEnvelope {
}
@Advice.OnMethodEnter
- def enter(@Advice.Argument(0) inboundEnvelope: InboundEnvelope): Unit = {
+ @static def enter(@Advice.Argument(0) inboundEnvelope: InboundEnvelope): Unit = {
CurrentInboundEnvelope.set(inboundEnvelope)
}
@Advice.OnMethodExit
- def exit(): Unit = {
+ @static def exit(): Unit = {
CurrentInboundEnvelope.remove()
}
}
diff --git a/instrumentation/kamon-akka/src/akka-2.6/scala/kamon/instrumentation/akka/instrumentations/akka_26/ActorMonitorInstrumentation.scala b/instrumentation/kamon-akka/src/akka-2.6/scala/kamon/instrumentation/akka/instrumentations/akka_26/ActorMonitorInstrumentation.scala
index ede62c633..aa5529412 100644
--- a/instrumentation/kamon-akka/src/akka-2.6/scala/kamon/instrumentation/akka/instrumentations/akka_26/ActorMonitorInstrumentation.scala
+++ b/instrumentation/kamon-akka/src/akka-2.6/scala/kamon/instrumentation/akka/instrumentations/akka_26/ActorMonitorInstrumentation.scala
@@ -8,6 +8,7 @@ import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.libs.net.bytebuddy.implementation.bind.annotation.Argument
import org.slf4j.LoggerFactory
+import scala.annotation.static
import scala.util.control.NonFatal
class ActorMonitorInstrumentation extends InstrumentationBuilder with VersionFiltering {
@@ -19,7 +20,7 @@ class ActorMonitorInstrumentation extends InstrumentationBuilder with VersionFil
* so we're forced to extract the original message type.
*/
onSubTypesOf("kamon.instrumentation.akka.instrumentations.ActorMonitor")
- .intercept(method("extractMessageClass"), MessageClassAdvice)
+ .intercept(method("extractMessageClass"), classOf[MessageClassAdvice])
}
}
@@ -27,20 +28,21 @@ class MessageClassAdvice
object MessageClassAdvice {
private val logger = LoggerFactory.getLogger(classOf[MessageClassAdvice])
- def extractMessageClass(@Argument(0) envelope: Envelope): String = {
+ @static def extractMessageClass(@Argument(0) envelope: Any): String = {
+ val e = envelope.asInstanceOf[Envelope]
try {
- envelope.message match {
+ e.message match {
case message: WrappedMessage => ActorCellInfo.simpleClassName(message.message.getClass)
- case _ => ActorCellInfo.simpleClassName(envelope.message.getClass)
+ case _ => ActorCellInfo.simpleClassName(e.message.getClass)
}
} catch {
// NoClassDefFound is thrown in early versions of akka 2.6
// so we can safely fallback to the original method
case _: NoClassDefFoundError =>
- ActorCellInfo.simpleClassName(envelope.message.getClass)
- case NonFatal(e) =>
- logger.info(s"Expected NoClassDefFoundError, got: ${e}")
- ActorCellInfo.simpleClassName(envelope.message.getClass)
+ ActorCellInfo.simpleClassName(e.message.getClass)
+ case NonFatal(ex) =>
+ logger.info(s"Expected NoClassDefFoundError, got: ${ex}")
+ ActorCellInfo.simpleClassName(e.message.getClass)
}
}
}
diff --git a/instrumentation/kamon-akka/src/akka-2.6/scala/kamon/instrumentation/akka/instrumentations/akka_26/DispatcherInstrumentation.scala b/instrumentation/kamon-akka/src/akka-2.6/scala/kamon/instrumentation/akka/instrumentations/akka_26/DispatcherInstrumentation.scala
index 75523ac4b..1c2166847 100644
--- a/instrumentation/kamon-akka/src/akka-2.6/scala/kamon/instrumentation/akka/instrumentations/akka_26/DispatcherInstrumentation.scala
+++ b/instrumentation/kamon-akka/src/akka-2.6/scala/kamon/instrumentation/akka/instrumentations/akka_26/DispatcherInstrumentation.scala
@@ -17,7 +17,6 @@
package kamon.instrumentation.akka.instrumentations.akka_26
import java.util.concurrent.{AbstractExecutorService, Callable, ExecutorService, ThreadFactory, TimeUnit}
-
import akka.dispatch.{DefaultExecutorServiceConfigurator, DispatcherPrerequisites, Dispatchers, ExecutorServiceFactory, ExecutorServiceFactoryProvider, ForkJoinExecutorConfigurator, PinnedDispatcherConfigurator, ThreadPoolExecutorConfigurator}
import kamon.instrumentation.akka.instrumentations.VersionFiltering
import kamon.Kamon
@@ -29,6 +28,8 @@ import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.libs.net.bytebuddy.asm.Advice
import kanela.agent.libs.net.bytebuddy.implementation.bind.annotation.{Argument, SuperCall, This}
+import scala.annotation.static
+
class DispatcherInstrumentation extends InstrumentationBuilder with VersionFiltering {
onAkka("2.6", "2.7") {
@@ -41,7 +42,7 @@ class DispatcherInstrumentation extends InstrumentationBuilder with VersionFilte
onSubTypesOf("akka.dispatch.ExecutorServiceFactory")
.mixin(classOf[HasDispatcherPrerequisites.Mixin])
.mixin(classOf[HasDispatcherName.Mixin])
- .intercept(method("createExecutorService"), InstrumentNewExecutorServiceOnAkka26)
+ .intercept(method("createExecutorService"), classOf[InstrumentNewExecutorServiceOnAkka26])
/**
* First step on getting the Actor System name is to read it from the prerequisites instance passed to the
@@ -77,10 +78,11 @@ class DispatcherInstrumentation extends InstrumentationBuilder with VersionFilte
}
+class CaptureDispatcherPrerequisitesOnExecutorConfigurator
object CaptureDispatcherPrerequisitesOnExecutorConfigurator {
@Advice.OnMethodExit(suppress = classOf[Throwable])
- def exit(@Advice.This configurator: Any, @Advice.Argument(1) prerequisites: DispatcherPrerequisites): Unit = {
+ @static def exit(@Advice.This configurator: Any, @Advice.Argument(1) prerequisites: DispatcherPrerequisites): Unit = {
configurator match {
case fjec: ForkJoinExecutorConfigurator => fjec.asInstanceOf[HasDispatcherPrerequisites].setDispatcherPrerequisites(prerequisites)
case tpec: ThreadPoolExecutorConfigurator => tpec.threadPoolConfig.asInstanceOf[HasDispatcherPrerequisites].setDispatcherPrerequisites(prerequisites)
@@ -91,19 +93,21 @@ object CaptureDispatcherPrerequisitesOnExecutorConfigurator {
}
}
+class CopyDispatcherInfoToExecutorServiceFactory
object CopyDispatcherInfoToExecutorServiceFactory {
@Advice.OnMethodExit
- def exit(@Advice.This poolConfig: HasDispatcherPrerequisites, @Advice.Argument(0) dispatcherName: String, @Advice.Return factory: Any): Unit = {
+ @static def exit(@Advice.This poolConfig: HasDispatcherPrerequisites, @Advice.Argument(0) dispatcherName: String, @Advice.Return factory: Any): Unit = {
val factoryWithMixins = factory.asInstanceOf[HasDispatcherName with HasDispatcherPrerequisites]
factoryWithMixins.setDispatcherPrerequisites(poolConfig.dispatcherPrerequisites)
factoryWithMixins.setDispatcherName(dispatcherName)
}
}
+class InstrumentNewExecutorServiceOnAkka26
object InstrumentNewExecutorServiceOnAkka26 {
- def around(@This factory: HasDispatcherPrerequisites with HasDispatcherName, @SuperCall callable: Callable[ExecutorService]): ExecutorService = {
+ @static def around(@This factory: HasDispatcherPrerequisites with HasDispatcherName, @SuperCall callable: Callable[ExecutorService]): ExecutorService = {
val executor = callable.call()
val actorSystemName = factory.dispatcherPrerequisites.settings.name
val dispatcherName = factory.dispatcherName
@@ -123,10 +127,11 @@ object InstrumentNewExecutorServiceOnAkka26 {
}
}
+class ThreadPoolConfigCopyAdvice
object ThreadPoolConfigCopyAdvice {
@Advice.OnMethodExit
- def exit(@Advice.This original: Any, @Advice.Return copy: Any): Unit = {
+ @static def exit(@Advice.This original: Any, @Advice.Return copy: Any): Unit = {
copy.asInstanceOf[HasDispatcherPrerequisites].setDispatcherPrerequisites(original.asInstanceOf[HasDispatcherPrerequisites].dispatcherPrerequisites)
copy.asInstanceOf[HasDispatcherName].setDispatcherName(original.asInstanceOf[HasDispatcherName].dispatcherName)
}
diff --git a/instrumentation/kamon-akka/src/akka-2.6/scala/kamon/instrumentation/akka/instrumentations/akka_26/remote/RemotingInstrumentation.scala b/instrumentation/kamon-akka/src/akka-2.6/scala/kamon/instrumentation/akka/instrumentations/akka_26/remote/RemotingInstrumentation.scala
index bb01c145c..5d1b937dd 100644
--- a/instrumentation/kamon-akka/src/akka-2.6/scala/kamon/instrumentation/akka/instrumentations/akka_26/remote/RemotingInstrumentation.scala
+++ b/instrumentation/kamon-akka/src/akka-2.6/scala/kamon/instrumentation/akka/instrumentations/akka_26/remote/RemotingInstrumentation.scala
@@ -14,6 +14,8 @@ import kamon.instrumentation.context.{CaptureCurrentContextOnExit, HasContext}
import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.libs.net.bytebuddy.asm.Advice
+import scala.annotation.static
+
class RemotingInstrumentation extends InstrumentationBuilder with VersionFiltering {
@@ -70,41 +72,45 @@ class RemotingInstrumentation extends InstrumentationBuilder with VersionFilteri
}
+class ArteryMessageDispatcherAdvice
object ArteryMessageDispatcherAdvice {
@Advice.OnMethodEnter
- def enter(@Advice.Argument(0) envelope: Any): Storage.Scope =
+ @static def enter(@Advice.Argument(0) envelope: Any): Storage.Scope =
Kamon.storeContext(envelope.asInstanceOf[HasContext].context)
@Advice.OnMethodExit
- def exit(@Advice.Enter scope: Storage.Scope): Unit =
+ @static def exit(@Advice.Enter scope: Storage.Scope): Unit =
scope.close()
}
+class CopyContextOnReusableEnvelope
object CopyContextOnReusableEnvelope {
@Advice.OnMethodExit
- def exit(@Advice.This oldEnvelope: Any, @Advice.Return newEnvelope: Any): Unit =
+ @static def exit(@Advice.This oldEnvelope: Any, @Advice.Return newEnvelope: Any): Unit =
newEnvelope.asInstanceOf[HasContext].setContext(oldEnvelope.asInstanceOf[HasContext].context)
}
+class CaptureCurrentContextOnReusableEnvelope
object CaptureCurrentContextOnReusableEnvelope {
@Advice.OnMethodExit
- def exit(@Advice.Return envelope: Any): Unit = {
+ @static def exit(@Advice.Return envelope: Any): Unit = {
envelope.asInstanceOf[HasContext].setContext(Kamon.currentContext())
}
}
+class WriteSendWithContext
object WriteSendWithContext {
@Advice.OnMethodEnter
- def enter(@Advice.Argument(0) send: Any): Scope = {
+ @static def enter(@Advice.Argument(0) send: Any): Scope = {
Kamon.storeContext(send.asInstanceOf[HasContext].context)
}
@Advice.OnMethodExit
- def exit(@Advice.Enter scope: Scope): Unit = {
+ @static def exit(@Advice.Enter scope: Scope): Unit = {
scope.asInstanceOf[Scope].close()
}
}
@@ -122,23 +128,25 @@ object HasSerializationInstruments {
}
}
+class InitializeActorSystemAdvice
object InitializeActorSystemAdvice {
@Advice.OnMethodExit
- def exit(@Advice.This system: ActorSystem with HasSerializationInstruments): Unit =
+ @static def exit(@Advice.This system: ActorSystem with HasSerializationInstruments): Unit =
system.setSerializationInstruments(new SerializationInstruments(system.name))
}
+class MeasureSerializationTime
object MeasureSerializationTime {
@Advice.OnMethodEnter
- def enter(): Long = {
+ @static def enter(): Long = {
if(AkkaRemoteInstrumentation.settings().trackSerializationMetrics) System.nanoTime() else 0L
}
@Advice.OnMethodExit
- def exit(@Advice.Argument(0) system: AnyRef, @Advice.Enter startNanoTime: Long): Unit = {
+ @static def exit(@Advice.Argument(0) system: AnyRef, @Advice.Enter startNanoTime: Long): Unit = {
if(startNanoTime != 0L) {
system.asInstanceOf[HasSerializationInstruments]
.serializationInstruments
@@ -148,15 +156,16 @@ object MeasureSerializationTime {
}
}
+class MeasureDeserializationTime
object MeasureDeserializationTime {
@Advice.OnMethodEnter
- def enter(): Long = {
+ @static def enter(): Long = {
if(AkkaRemoteInstrumentation.settings().trackSerializationMetrics) System.nanoTime() else 0L
}
@Advice.OnMethodExit
- def exit(@Advice.Argument(0) system: AnyRef, @Advice.Enter startNanoTime: Long, @Advice.Return msg: Any): Unit = {
+ @static def exit(@Advice.Argument(0) system: AnyRef, @Advice.Enter startNanoTime: Long, @Advice.Return msg: Any): Unit = {
if(AkkaPrivateAccess.isSystemMessage(msg)) {
msg match {
diff --git a/instrumentation/kamon-akka/src/akka-2.6/scala/kamon/instrumentation/akka/instrumentations/akka_26/remote/internal/AkkaPduProtobufCodecDecodeMessageMethodAdvisor.scala b/instrumentation/kamon-akka/src/akka-2.6/scala/kamon/instrumentation/akka/instrumentations/akka_26/remote/internal/AkkaPduProtobufCodecDecodeMessageMethodAdvisor.scala
index 54e1f606c..06a8ba93b 100644
--- a/instrumentation/kamon-akka/src/akka-2.6/scala/kamon/instrumentation/akka/instrumentations/akka_26/remote/internal/AkkaPduProtobufCodecDecodeMessageMethodAdvisor.scala
+++ b/instrumentation/kamon-akka/src/akka-2.6/scala/kamon/instrumentation/akka/instrumentations/akka_26/remote/internal/AkkaPduProtobufCodecDecodeMessageMethodAdvisor.scala
@@ -9,6 +9,8 @@ import kamon.context.BinaryPropagation.ByteStreamReader
import kamon.instrumentation.akka.AkkaRemoteMetrics
import kanela.agent.libs.net.bytebuddy.asm.Advice.{Argument, OnMethodEnter}
+import scala.annotation.static
+
/**
* Advisor for akka.remote.transport.AkkaPduProtobufCodec$::decodeMessage
*/
@@ -17,7 +19,7 @@ class AkkaPduProtobufCodecDecodeMessage
object AkkaPduProtobufCodecDecodeMessage {
@OnMethodEnter
- def enter(@Argument(0) bs: ByteString, @Argument(1) provider: RemoteActorRefProvider, @Argument(2) localAddress: Address): Unit = {
+ @static def enter(@Argument(0) bs: ByteString, @Argument(1) provider: RemoteActorRefProvider, @Argument(2) localAddress: Address): Unit = {
val ackAndEnvelope = AckAndContextAwareEnvelopeContainer.parseFrom(bs.toArray)
if (ackAndEnvelope.hasEnvelope && ackAndEnvelope.getEnvelope.hasTraceContext) {
val remoteCtx = ackAndEnvelope.getEnvelope.getTraceContext
diff --git a/instrumentation/kamon-akka/src/akka-2.6/scala/kamon/instrumentation/akka/instrumentations/akka_26/remote/internal/ArterySerializationAdvice.scala b/instrumentation/kamon-akka/src/akka-2.6/scala/kamon/instrumentation/akka/instrumentations/akka_26/remote/internal/ArterySerializationAdvice.scala
index 78b5f7d95..4ba88a88f 100644
--- a/instrumentation/kamon-akka/src/akka-2.6/scala/kamon/instrumentation/akka/instrumentations/akka_26/remote/internal/ArterySerializationAdvice.scala
+++ b/instrumentation/kamon-akka/src/akka-2.6/scala/kamon/instrumentation/akka/instrumentations/akka_26/remote/internal/ArterySerializationAdvice.scala
@@ -11,6 +11,8 @@ import kamon.instrumentation.akka.AkkaRemoteMetrics
import kamon.instrumentation.context.HasContext
import kanela.agent.libs.net.bytebuddy.asm.Advice
+import scala.annotation.static
+
/**
* For Artery messages we will always add two sections to the end of each serialized message: the Context and the size
@@ -25,12 +27,12 @@ class SerializeForArteryAdvice
object SerializeForArteryAdvice {
@Advice.OnMethodEnter
- def enter(): Long = {
+ @static def enter(): Long = {
System.nanoTime()
}
@Advice.OnMethodExit
- def exit(@Advice.Argument(0) serialization: Serialization, @Advice.Argument(1) envelope: OutboundEnvelope,
+ @static def exit(@Advice.Argument(0) serialization: Serialization, @Advice.Argument(1) envelope: OutboundEnvelope,
@Advice.Argument(3) envelopeBuffer: EnvelopeBuffer, @Advice.Enter startTime: Long): Unit = {
val instruments = AkkaRemoteMetrics.serializationInstruments(serialization.system.name)
@@ -75,7 +77,7 @@ object DeserializeForArteryAdvice {
)
@Advice.OnMethodEnter
- def exit(@Advice.Argument(5) envelopeBuffer: EnvelopeBuffer): DeserializationInfo = {
+ @static def exit(@Advice.Argument(5) envelopeBuffer: EnvelopeBuffer): DeserializationInfo = {
val startTime = System.nanoTime()
val messageBuffer = envelopeBuffer.byteBuffer
val messageStart = messageBuffer.position()
@@ -102,7 +104,7 @@ object DeserializeForArteryAdvice {
}
@Advice.OnMethodExit(onThrowable = classOf[Throwable])
- def exit(@Advice.Argument(0) system: ActorSystem, @Advice.Argument(5) envelopeBuffer: EnvelopeBuffer,
+ @static def exit(@Advice.Argument(0) system: ActorSystem, @Advice.Argument(5) envelopeBuffer: EnvelopeBuffer,
@Advice.Enter deserializationInfo: DeserializationInfo, @Advice.Thrown error: Throwable): Unit = {
if(error == null) {
@@ -142,7 +144,7 @@ class CaptureContextOnInboundEnvelope
object CaptureContextOnInboundEnvelope {
@Advice.OnMethodEnter
- def enter(@Advice.This inboundEnvelope: Any): Unit = {
+ @static def enter(@Advice.This inboundEnvelope: Any): Unit = {
val lastContext = DeserializeForArteryAdvice.LastDeserializedContext.get()
if(lastContext != null) {
inboundEnvelope.asInstanceOf[HasContext].setContext(lastContext)
diff --git a/instrumentation/kamon-akka/src/common/scala-2/kamon/instrumentation/akka/instrumentations/ActorInstrumentation.scala b/instrumentation/kamon-akka/src/common/scala-2/kamon/instrumentation/akka/instrumentations/ActorInstrumentation.scala
new file mode 100644
index 000000000..7e0ed4b79
--- /dev/null
+++ b/instrumentation/kamon-akka/src/common/scala-2/kamon/instrumentation/akka/instrumentations/ActorInstrumentation.scala
@@ -0,0 +1,46 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2018 the kamon project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.instrumentation.akka.instrumentations
+
+import akka.actor.instrumentation.ReplaceWithAdvice
+import kanela.agent.api.instrumentation.InstrumentationBuilder
+
+class ActorInstrumentation extends InstrumentationBuilder {
+
+ /**
+ * This is where most of the Actor processing magic happens. Handling of messages, errors and system messages.
+ */
+ onType("akka.actor.ActorCell")
+ .mixin(classOf[HasActorMonitor.Mixin])
+ .advise(isConstructor, ActorCellConstructorAdvice)
+ .advise(method("invoke"), classOf[ActorCellInvokeAdvice])
+ .advise(method("handleInvokeFailure"), HandleInvokeFailureMethodAdvice)
+ .advise(method("terminate"), TerminateMethodAdvice)
+ .advise(method("sendMessage").and(takesArguments(1)), SendMessageAdvice)
+ .advise(method("swapMailbox"), ActorCellSwapMailboxAdvice)
+ .advise(method("invokeAll$1"), InvokeAllMethodInterceptor)
+
+ /**
+ * Ensures that the Context is properly propagated when messages are temporarily stored on an UnstartedCell.
+ */
+ onType("akka.actor.UnstartedCell")
+ .mixin(classOf[HasActorMonitor.Mixin])
+ .advise(isConstructor, RepointableActorCellConstructorAdvice)
+ .advise(method("sendMessage").and(takesArguments(1)), SendMessageAdvice)
+ .advise(method("replaceWith"), classOf[ReplaceWithAdvice])
+
+}
\ No newline at end of file
diff --git a/instrumentation/kamon-akka/src/common/scala-2/kamon/instrumentation/akka/instrumentations/EventStreamInstrumentation.scala b/instrumentation/kamon-akka/src/common/scala-2/kamon/instrumentation/akka/instrumentations/EventStreamInstrumentation.scala
new file mode 100644
index 000000000..ca6c11377
--- /dev/null
+++ b/instrumentation/kamon-akka/src/common/scala-2/kamon/instrumentation/akka/instrumentations/EventStreamInstrumentation.scala
@@ -0,0 +1,30 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2018 the kamon project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.instrumentation.akka.instrumentations
+
+import kanela.agent.api.instrumentation.InstrumentationBuilder
+
+class EventStreamInstrumentation extends InstrumentationBuilder {
+
+ /**
+ * Counts dead letters and unhandled messages as they are published on the EventStream.
+ */
+ onType("akka.event.EventStream")
+ .mixin(classOf[HasSystem.Mixin])
+ .advise(isConstructor.and(takesArguments(2)), ConstructorAdvice)
+ .advise(method("publish").and(takesArguments(1)), PublishMethodAdvice)
+}
diff --git a/instrumentation/kamon-akka/src/common/scala-3/kamon/instrumentation/akka/instrumentations/ActorInstrumentation.scala b/instrumentation/kamon-akka/src/common/scala-3/kamon/instrumentation/akka/instrumentations/ActorInstrumentation.scala
new file mode 100644
index 000000000..3142e4ff9
--- /dev/null
+++ b/instrumentation/kamon-akka/src/common/scala-3/kamon/instrumentation/akka/instrumentations/ActorInstrumentation.scala
@@ -0,0 +1,50 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2018 the kamon project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.instrumentation.akka.instrumentations
+
+import akka.actor.instrumentation.ReplaceWithAdvice
+import kanela.agent.api.instrumentation.InstrumentationBuilder
+
+class ActorInstrumentation extends InstrumentationBuilder {
+
+ /**
+ * This is where most of the Actor processing magic happens. Handling of messages, errors and system messages.
+ */
+ onType("akka.actor.dungeon.Dispatch")
+ .advise(method("sendMessage").and(takesArguments(1)), classOf[SendMessageAdvice])
+ .advise(method("swapMailbox"), classOf[ActorCellSwapMailboxAdvice])
+
+ onType("akka.actor.dungeon.FaultHandling")
+ .advise(method("handleInvokeFailure"), classOf[HandleInvokeFailureMethodAdvice])
+ .advise(method("terminate"), classOf[TerminateMethodAdvice])
+
+ onType("akka.actor.ActorCell")
+ .mixin(classOf[HasActorMonitor.Mixin])
+ .advise(isConstructor, classOf[ActorCellConstructorAdvice])
+ .advise(method("invoke"), classOf[ActorCellInvokeAdvice])
+ .advise(method("invokeAll$1"), classOf[InvokeAllMethodInterceptor])
+
+ /**
+ * Ensures that the Context is properly propagated when messages are temporarily stored on an UnstartedCell.
+ */
+ onType("akka.actor.UnstartedCell")
+ .mixin(classOf[HasActorMonitor.Mixin])
+ .advise(isConstructor, RepointableActorCellConstructorAdvice)
+ .advise(method("sendMessage").and(takesArguments(1)), SendMessageAdvice)
+ .advise(method("replaceWith"), classOf[ReplaceWithAdvice])
+
+}
\ No newline at end of file
diff --git a/instrumentation/kamon-akka/src/common/scala-3/kamon/instrumentation/akka/instrumentations/EventStreamInstrumentation.scala b/instrumentation/kamon-akka/src/common/scala-3/kamon/instrumentation/akka/instrumentations/EventStreamInstrumentation.scala
new file mode 100644
index 000000000..5217291c2
--- /dev/null
+++ b/instrumentation/kamon-akka/src/common/scala-3/kamon/instrumentation/akka/instrumentations/EventStreamInstrumentation.scala
@@ -0,0 +1,31 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2018 the kamon project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.instrumentation.akka.instrumentations
+
+import kanela.agent.api.instrumentation.InstrumentationBuilder
+
+class EventStreamInstrumentation extends InstrumentationBuilder {
+
+ /**
+ * Counts dead letters and unhandled messages as they are published on the EventStream.
+ */
+ onType("akka.event.EventStream")
+ .mixin(classOf[HasSystem.Mixin])
+ .advise(isConstructor.and(takesArguments(2)), ConstructorAdvice)
+ onType("akka.event.SubchannelClassification")
+ .advise(method("publish").and(takesArguments(1)), PublishMethodAdvice)
+}
diff --git a/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/ActorInstrumentation.scala b/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/ActorInstrumentation.scala
index ad1352c5d..92264325e 100644
--- a/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/ActorInstrumentation.scala
+++ b/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/ActorInstrumentation.scala
@@ -26,31 +26,7 @@ import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.libs.net.bytebuddy.asm.Advice
import kanela.agent.libs.net.bytebuddy.asm.Advice.{Argument, OnMethodEnter, OnMethodExit, This}
-class ActorInstrumentation extends InstrumentationBuilder {
-
- /**
- * This is where most of the Actor processing magic happens. Handling of messages, errors and system messages.
- */
- onType("akka.actor.ActorCell")
- .mixin(classOf[HasActorMonitor.Mixin])
- .advise(isConstructor, ActorCellConstructorAdvice)
- .advise(method("invoke"), classOf[ActorCellInvokeAdvice])
- .advise(method("handleInvokeFailure"), HandleInvokeFailureMethodAdvice)
- .advise(method("sendMessage").and(takesArguments(1)), SendMessageAdvice)
- .advise(method("terminate"), TerminateMethodAdvice)
- .advise(method("swapMailbox"), ActorCellSwapMailboxAdvice)
- .advise(method("invokeAll$1"), InvokeAllMethodInterceptor)
-
- /**
- * Ensures that the Context is properly propagated when messages are temporarily stored on an UnstartedCell.
- */
- onType("akka.actor.UnstartedCell")
- .mixin(classOf[HasActorMonitor.Mixin])
- .advise(isConstructor, RepointableActorCellConstructorAdvice)
- .advise(method("sendMessage").and(takesArguments(1)), SendMessageAdvice)
- .advise(method("replaceWith"), classOf[ReplaceWithAdvice])
-
-}
+import scala.annotation.static
trait HasActorMonitor {
def actorMonitor: ActorMonitor
@@ -68,10 +44,11 @@ object HasActorMonitor {
cell.asInstanceOf[HasActorMonitor].actorMonitor
}
+class ActorCellSwapMailboxAdvice
object ActorCellSwapMailboxAdvice {
@Advice.OnMethodEnter
- def enter(@Advice.This cell: Any, @Advice.Argument(0) newMailbox: Any): Boolean = {
+ @static def enter(@Advice.This cell: Any, @Advice.Argument(0) newMailbox: Any): Boolean = {
val isShuttingDown = AkkaPrivateAccess.isDeadLettersMailbox(cell, newMailbox)
if(isShuttingDown)
actorMonitor(cell).onTerminationStart()
@@ -80,31 +57,33 @@ object ActorCellSwapMailboxAdvice {
}
@Advice.OnMethodExit
- def exit(@Advice.This cell: Any, @Advice.Return oldMailbox: Any, @Advice.Enter isShuttingDown: Boolean): Unit = {
+ @static def exit(@Advice.This cell: Any, @Advice.Return oldMailbox: Any, @Advice.Enter isShuttingDown: Boolean): Unit = {
if(oldMailbox != null && isShuttingDown) {
actorMonitor(cell).onDroppedMessages(AkkaPrivateAccess.mailboxMessageCount(oldMailbox))
}
}
}
+class InvokeAllMethodInterceptor
object InvokeAllMethodInterceptor {
@Advice.OnMethodEnter
- def enter(@Advice.Argument(0) message: Any): Option[Scope] =
+ @static def enter(@Advice.Argument(0) message: Any): Option[Scope] =
message match {
case m: HasContext => Some(Kamon.storeContext(m.context))
case _ => None
}
@Advice.OnMethodExit
- def exit(@Advice.Enter scope: Option[Scope]): Unit =
+ @static def exit(@Advice.Enter scope: Option[Scope]): Unit =
scope.foreach(_.close())
}
+class SendMessageAdvice
object SendMessageAdvice {
@OnMethodEnter(suppress = classOf[Throwable])
- def onEnter(@This cell: Any, @Argument(0) envelope: Object): Unit = {
+ @static def onEnter(@This cell: Any, @Argument(0) envelope: Object): Unit = {
val instrumentation = actorMonitor(cell)
envelope.asInstanceOf[HasContext].setContext(instrumentation.captureEnvelopeContext())
@@ -112,32 +91,36 @@ object SendMessageAdvice {
}
}
+class RepointableActorCellConstructorAdvice
object RepointableActorCellConstructorAdvice {
@Advice.OnMethodExit(suppress = classOf[Throwable])
- def onExit(@This cell: Any, @Argument(0) system: ActorSystem, @Argument(1) ref: ActorRef, @Argument(3) parent: ActorRef): Unit =
+ @static def onExit(@This cell: Any, @Argument(0) system: ActorSystem, @Argument(1) ref: ActorRef, @Argument(3) parent: ActorRef): Unit =
cell.asInstanceOf[HasActorMonitor].setActorMonitor(ActorMonitor.from(cell, ref, parent, system))
}
+class ActorCellConstructorAdvice
object ActorCellConstructorAdvice {
@OnMethodExit(suppress = classOf[Throwable])
- def onExit(@This cell: Any, @Argument(0) system: ActorSystem, @Argument(1) ref: ActorRef, @Argument(4) parent: ActorRef): Unit =
+ @static def onExit(@This cell: Any, @Argument(0) system: ActorSystem, @Argument(1) ref: ActorRef, @Argument(4) parent: ActorRef): Unit =
cell.asInstanceOf[HasActorMonitor].setActorMonitor(ActorMonitor.from(cell, ref, parent, system))
}
+class HandleInvokeFailureMethodAdvice
object HandleInvokeFailureMethodAdvice {
@OnMethodEnter(suppress = classOf[Throwable])
- def onEnter(@This cell: Any, @Argument(1) failure: Throwable): Unit =
+ @static def onEnter(@This cell: Any, @Argument(1) failure: Throwable): Unit =
actorMonitor(cell).onFailure(failure)
}
+class TerminateMethodAdvice
object TerminateMethodAdvice {
@OnMethodEnter(suppress = classOf[Throwable])
- def onEnter(@This cell: Any): Unit = {
+ @static def onEnter(@This cell: Any): Unit = {
actorMonitor(cell).cleanup()
if (AkkaPrivateAccess.isRoutedActorCell(cell)) {
diff --git a/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/ActorLoggingInstrumentation.scala b/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/ActorLoggingInstrumentation.scala
index 4a644974c..d29d82f13 100644
--- a/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/ActorLoggingInstrumentation.scala
+++ b/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/ActorLoggingInstrumentation.scala
@@ -23,6 +23,8 @@ import kamon.instrumentation.context.HasContext
import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.libs.net.bytebuddy.asm.Advice.{Argument, Enter, OnMethodEnter, OnMethodExit}
+import scala.annotation.static
+
class ActorLoggingInstrumentation extends InstrumentationBuilder {
/**
@@ -36,13 +38,14 @@ class ActorLoggingInstrumentation extends InstrumentationBuilder {
.advise(method("withMdc"), WithMdcMethodAdvice)
}
+class WithMdcMethodAdvice
object WithMdcMethodAdvice {
@OnMethodEnter
- def enter(@Argument(1) logEvent: LogEvent): Scope =
+ @static def enter(@Argument(1) logEvent: LogEvent): Scope =
Kamon.storeContext(logEvent.asInstanceOf[HasContext].context)
@OnMethodExit
- def exit(@Enter scope: Scope): Unit =
+ @static def exit(@Enter scope: Scope): Unit =
scope.close()
}
\ No newline at end of file
diff --git a/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/ActorRefInstrumentation.scala b/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/ActorRefInstrumentation.scala
index 9c5619172..30c80f511 100644
--- a/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/ActorRefInstrumentation.scala
+++ b/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/ActorRefInstrumentation.scala
@@ -7,6 +7,8 @@ import kamon.instrumentation.context.HasContext
import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.libs.net.bytebuddy.asm.Advice
+import scala.annotation.static
+
class ActorRefInstrumentation extends InstrumentationBuilder {
/**
@@ -38,14 +40,15 @@ object HasGroupPath {
}
}
+class RepointableActorRefPointAdvice
object RepointableActorRefPointAdvice {
@Advice.OnMethodEnter
- def enter(@Advice.This repointableActorRef: Object): Scope =
+ @static def enter(@Advice.This repointableActorRef: Object): Scope =
Kamon.storeContext(repointableActorRef.asInstanceOf[HasContext].context)
@Advice.OnMethodExit
- def exit(@Advice.Enter scope: Scope, @Advice.This repointableActorRef: Object): Unit = {
+ @static def exit(@Advice.Enter scope: Scope, @Advice.This repointableActorRef: Object): Unit = {
scope.close()
repointableActorRef
diff --git a/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/AskPatternInstrumentation.scala b/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/AskPatternInstrumentation.scala
index d93fccc67..6d9f83c50 100644
--- a/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/AskPatternInstrumentation.scala
+++ b/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/AskPatternInstrumentation.scala
@@ -27,7 +27,7 @@ import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.libs.net.bytebuddy.asm.Advice.{Argument, OnMethodExit, Origin, Return}
import org.slf4j.LoggerFactory
-import scala.compat.Platform.EOL
+import scala.annotation.static
import scala.concurrent.Future
class AskPatternInstrumentation extends InstrumentationBuilder {
@@ -52,7 +52,7 @@ object AskPatternInstrumentation {
)
@OnMethodExit(suppress = classOf[Throwable])
- def onExit(@Origin origin: String, @Return future: Future[AnyRef], @Argument(0) actor: ActorRef, @Argument(2) timeout: Timeout) = {
+ @static def onExit(@Origin origin: String, @Return future: Future[AnyRef], @Argument(0) actor: ActorRef, @Argument(2) timeout: Timeout) = {
if(AkkaPrivateAccess.isInternalAndActiveActorRef(actor) && Kamon.currentContext().nonEmpty()) {
AkkaInstrumentation.settings().askPatternWarning match {
diff --git a/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/ClusterInstrumentation.scala b/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/ClusterInstrumentation.scala
index 07bff8cb3..921a1e709 100644
--- a/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/ClusterInstrumentation.scala
+++ b/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/ClusterInstrumentation.scala
@@ -10,6 +10,7 @@ import kamon.tag.TagSet
import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.libs.net.bytebuddy.asm.Advice
+import scala.annotation.static
import scala.collection.mutable
class ClusterInstrumentation extends InstrumentationBuilder with VersionFiltering {
@@ -20,10 +21,11 @@ class ClusterInstrumentation extends InstrumentationBuilder with VersionFilterin
}
}
+class AfterClusterInitializationAdvice
object AfterClusterInitializationAdvice {
@Advice.OnMethodExit
- def onClusterExtensionCreated(@Advice.Argument(0) system: ExtendedActorSystem, @Advice.Return clusterExtension: Cluster): Unit = {
+ @static def onClusterExtensionCreated(@Advice.Argument(0) system: ExtendedActorSystem, @Advice.Return clusterExtension: Cluster): Unit = {
val settings = AkkaInstrumentation.settings()
if(settings.exposeClusterMetrics) {
val stateExporter = system.systemActorOf(Props[ClusterInstrumentation.ClusterStateExporter], "kamon-cluster-state-exporter")
diff --git a/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/EnvelopeInstrumentation.scala b/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/EnvelopeInstrumentation.scala
index bde1999fa..96b560d30 100644
--- a/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/EnvelopeInstrumentation.scala
+++ b/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/EnvelopeInstrumentation.scala
@@ -20,6 +20,8 @@ import kamon.instrumentation.context.{HasContext, HasTimestamp}
import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.libs.net.bytebuddy.asm.Advice
+import scala.annotation.static
+
class EnvelopeInstrumentation extends InstrumentationBuilder {
@@ -32,10 +34,11 @@ class EnvelopeInstrumentation extends InstrumentationBuilder {
.advise(method("copy"), EnvelopeCopyAdvice)
}
+class EnvelopeCopyAdvice
object EnvelopeCopyAdvice {
@Advice.OnMethodExit
- def exit(@Advice.Return newEnvelope: Any, @Advice.This envelope: Any): Unit = {
+ @static def exit(@Advice.Return newEnvelope: Any, @Advice.This envelope: Any): Unit = {
newEnvelope.asInstanceOf[HasContext].setContext(envelope.asInstanceOf[HasContext].context)
newEnvelope.asInstanceOf[HasTimestamp].setTimestamp(envelope.asInstanceOf[HasTimestamp].timestamp)
}
diff --git a/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/EventStreamInstrumentation.scala b/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/EventStreamInstrumentation.scala
index 7f2f246f8..398cad107 100644
--- a/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/EventStreamInstrumentation.scala
+++ b/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/EventStreamInstrumentation.scala
@@ -19,36 +19,35 @@ package kamon.instrumentation.akka.instrumentations
import akka.actor.{ActorSystem, DeadLetter, UnhandledMessage}
import kamon.instrumentation.akka.AkkaMetrics
import kanela.agent.api.instrumentation.InstrumentationBuilder
+import kanela.agent.libs.net.bytebuddy.asm.Advice
import kanela.agent.libs.net.bytebuddy.asm.Advice.{Argument, OnMethodExit, This}
-class EventStreamInstrumentation extends InstrumentationBuilder {
-
- /**
- * Counts dead letters and unhandled messages as they are published on the EventStream.
- */
- onType("akka.event.EventStream")
- .mixin(classOf[HasSystem.Mixin])
- .advise(isConstructor.and(takesArguments(2)), ConstructorAdvice)
- .advise(method("publish").and(takesArguments(1)), PublishMethodAdvice)
-}
-
+import scala.annotation.static
+class ConstructorAdvice
object ConstructorAdvice {
@OnMethodExit(suppress = classOf[Throwable])
- def exit(@This eventStream: HasSystem, @Argument(0) system:ActorSystem): Unit = {
+ @static def exit(@Advice.This eventStream: HasSystem, @Argument(0) system:ActorSystem): Unit = {
eventStream.setSystem(system)
}
}
+class PublishMethodAdvice
object PublishMethodAdvice {
@OnMethodExit(suppress = classOf[Throwable])
- def exit(@This stream:HasSystem, @Argument(0) event: AnyRef):Unit = event match {
- case _: DeadLetter => AkkaMetrics.forSystem(stream.system.name).deadLetters.increment()
- case _: UnhandledMessage => AkkaMetrics.forSystem(stream.system.name).unhandledMessages.increment()
- case _ => ()
- }
+ @static def exit(@This any: Any, @Argument(0) event: AnyRef): Unit =
+ try {
+ def stream = any.asInstanceOf[HasSystem]
+ event match {
+ case _: DeadLetter => AkkaMetrics.forSystem(stream.system.name).deadLetters.increment()
+ case _: UnhandledMessage => AkkaMetrics.forSystem(stream.system.name).unhandledMessages.increment()
+ case _ => ()
+ }
+ } catch {
+ case _: ClassCastException => ()
+ }
}
trait HasSystem {
diff --git a/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/RouterInstrumentation.scala b/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/RouterInstrumentation.scala
index 42cb0ba48..fbadd5f75 100644
--- a/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/RouterInstrumentation.scala
+++ b/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/RouterInstrumentation.scala
@@ -4,6 +4,8 @@ import akka.actor.{ActorRef, ActorSystem, Props}
import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.libs.net.bytebuddy.asm.Advice._
+import scala.annotation.static
+
class RouterInstrumentation extends InstrumentationBuilder {
/**
@@ -60,35 +62,38 @@ object HasRouterMonitor {
}
}
+class RoutedActorRefConstructorAdvice
object RoutedActorRefConstructorAdvice {
@OnMethodExit(suppress = classOf[Throwable])
- def exit(@This ref: ActorRef, @Argument(1) routerProps: Props, @Argument(4) routeeProps: Props): Unit = {
+ @static def exit(@This ref: ActorRef, @Argument(1) routerProps: Props, @Argument(4) routeeProps: Props): Unit = {
val routedRef = ref.asInstanceOf[HasRouterProps]
routedRef.setRouteeProps(routeeProps)
routedRef.setRouterProps(routerProps)
}
}
+class RoutedActorCellConstructorAdvice
object RoutedActorCellConstructorAdvice {
@OnMethodExit(suppress = classOf[Throwable])
- def exit(@This cell: Any, @Argument(0) system: ActorSystem, @Argument(1) ref: ActorRef, @Argument(5) parent: ActorRef): Unit = {
+ @static def exit(@This cell: Any, @Argument(0) system: ActorSystem, @Argument(1) ref: ActorRef, @Argument(5) parent: ActorRef): Unit = {
cell.asInstanceOf[HasRouterMonitor].setRouterMonitor(RouterMonitor.from(cell, ref, parent, system))
}
}
+class SendMessageOnRouterAdvice
object SendMessageOnRouterAdvice {
def routerInstrumentation(cell: Any): RouterMonitor =
cell.asInstanceOf[HasRouterMonitor].routerMonitor
@OnMethodEnter(suppress = classOf[Throwable])
- def onEnter(@This cell: Any): Long =
+ @static def onEnter(@This cell: Any): Long =
routerInstrumentation(cell).processMessageStart()
@OnMethodExit(suppress = classOf[Throwable])
- def onExit(@This cell: Any, @Enter timestampBeforeProcessing: Long): Unit =
+ @static def onExit(@This cell: Any, @Enter timestampBeforeProcessing: Long): Unit =
routerInstrumentation(cell).processMessageEnd(timestampBeforeProcessing)
}
diff --git a/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/internal/CellWrapper.scala b/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/internal/CellWrapper.scala
index e09040343..2086b444f 100644
--- a/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/internal/CellWrapper.scala
+++ b/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/instrumentations/internal/CellWrapper.scala
@@ -38,17 +38,15 @@ import kamon.instrumentation.context.HasContext
* will be propagated for all queued messages.
*/
class CellWrapper(val underlying: Cell) extends Cell {
- override def sendMessage(msg: Envelope): Unit = {
- if(msg.isInstanceOf[HasContext]) {
- val context = msg.asInstanceOf[HasContext].context
- Kamon.runWithContext(context) {
- underlying.sendMessage(msg)
- }
- }
- else {
+ override def sendMessage(msg: Envelope): Unit = try {
+ val context = msg.asInstanceOf[HasContext].context
+ Kamon.runWithContext(context) {
underlying.sendMessage(msg)
}
}
+ catch {
+ case _: ClassCastException => underlying.sendMessage(msg)
+ }
override def sendSystemMessage(msg: SystemMessage): Unit =
underlying.sendSystemMessage(msg)
diff --git a/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/remote/ShardingInstrumentation.scala b/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/remote/ShardingInstrumentation.scala
index 6dd860ede..c24605453 100644
--- a/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/remote/ShardingInstrumentation.scala
+++ b/instrumentation/kamon-akka/src/common/scala/kamon/instrumentation/akka/remote/ShardingInstrumentation.scala
@@ -9,6 +9,8 @@ import kamon.util.Filter
import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.libs.net.bytebuddy.asm.Advice
+import scala.annotation.static
+
class ShardingInstrumentation extends InstrumentationBuilder with VersionFiltering {
onAkka("2.5", "2.6", "2.7") {
@@ -91,10 +93,11 @@ object HasShardCounters {
}
}
+class InitializeShardRegionAdvice
object InitializeShardRegionAdvice {
@Advice.OnMethodExit
- def exit(@Advice.This region: Actor with HasShardingInstruments, @Advice.Argument(0) typeName: String): Unit = {
+ @static def exit(@Advice.This region: Actor with HasShardingInstruments, @Advice.Argument(0) typeName: String): Unit = {
region.setShardingInstruments(new ShardingInstruments(region.context.system.name, typeName))
val system = region.context.system
@@ -105,10 +108,11 @@ object InitializeShardRegionAdvice {
}
}
+class InitializeShardAdvice
object InitializeShardAdvice {
@Advice.OnMethodExit
- def exit(@Advice.This shard: Actor with HasShardingInstruments with HasShardCounters, @Advice.Argument(0) typeName: String,
+ @static def exit(@Advice.This shard: Actor with HasShardingInstruments with HasShardCounters, @Advice.Argument(0) typeName: String,
@Advice.Argument(1) shardID: String): Unit = {
val shardingInstruments = new ShardingInstruments(shard.context.system.name, typeName)
@@ -120,10 +124,11 @@ object InitializeShardAdvice {
}
}
+class DeliverMessageOnShardRegion
object DeliverMessageOnShardRegion {
@Advice.OnMethodEnter
- def enter(@Advice.This region: HasShardingInstruments, @Advice.Argument(0) message: Any): Unit = {
+ @static def enter(@Advice.This region: HasShardingInstruments, @Advice.Argument(0) message: Any): Unit = {
// NOTE: The "deliverMessage" method also handles the "RestartShard" message, which is not an user-facing message
// but it should not happen so often so we wont do any additional matching on it to filter it out of the
// metric.
@@ -132,32 +137,36 @@ object DeliverMessageOnShardRegion {
}
+class RegionPostStopAdvice
object RegionPostStopAdvice {
@Advice.OnMethodExit
- def enter(@Advice.This shard: HasShardingInstruments): Unit =
+ @static def enter(@Advice.This shard: HasShardingInstruments): Unit =
shard.shardingInstruments.remove()
}
+class ShardInitializedAdvice
object ShardInitializedAdvice {
@Advice.OnMethodExit
- def enter(@Advice.This shard: HasShardingInstruments): Unit =
+ @static def enter(@Advice.This shard: HasShardingInstruments): Unit =
shard.shardingInstruments.hostedShards.increment()
}
+class ShardPostStopStoppedAdvice
object ShardPostStopStoppedAdvice {
@Advice.OnMethodExit
- def enter(@Advice.This shard: HasShardingInstruments): Unit =
+ @static def enter(@Advice.This shard: HasShardingInstruments): Unit =
shard.shardingInstruments.hostedShards.decrement()
}
+class ShardGetOrCreateEntityAdvice
object ShardGetOrCreateEntityAdvice {
@Advice.OnMethodEnter
- def enter(@Advice.This shard: Actor with HasShardingInstruments with HasShardCounters, @Advice.Argument(0) entityID: String): Unit = {
+ @static def enter(@Advice.This shard: Actor with HasShardingInstruments with HasShardCounters, @Advice.Argument(0) entityID: String): Unit = {
if(shard.context.child(entityID).isEmpty) {
// The entity is not created just yet, but we know that it will be created right after this.
shard.shardingInstruments.hostedEntities.increment()
@@ -166,18 +175,20 @@ object ShardGetOrCreateEntityAdvice {
}
}
+class ShardEntityTerminatedAdvice
object ShardEntityTerminatedAdvice {
@Advice.OnMethodEnter
- def enter(@Advice.This shard: Actor with HasShardingInstruments with HasShardCounters): Unit = {
+ @static def enter(@Advice.This shard: Actor with HasShardingInstruments with HasShardCounters): Unit = {
shard.shardingInstruments.hostedEntities.decrement()
shard.hostedEntitiesCounter.decrementAndGet()
}
}
+class ShardDeliverMessageAdvice
object ShardDeliverMessageAdvice {
@Advice.OnMethodEnter
- def enter(@Advice.This shard: Actor with HasShardingInstruments with HasShardCounters): Unit = {
+ @static def enter(@Advice.This shard: Actor with HasShardingInstruments with HasShardCounters): Unit = {
shard.processedMessagesCounter.incrementAndGet()
}
diff --git a/instrumentation/kamon-akka/src/test-common/scala/kamon/instrumentation/akka/ActorCellInstrumentationSpec.scala b/instrumentation/kamon-akka/src/test-common/scala/kamon/instrumentation/akka/ActorCellInstrumentationSpec.scala
index 44ac75834..aefa73139 100644
--- a/instrumentation/kamon-akka/src/test-common/scala/kamon/instrumentation/akka/ActorCellInstrumentationSpec.scala
+++ b/instrumentation/kamon-akka/src/test-common/scala/kamon/instrumentation/akka/ActorCellInstrumentationSpec.scala
@@ -21,20 +21,21 @@ import akka.routing._
import akka.testkit.{ImplicitSender, TestKit}
import akka.util.Timeout
import kamon.Kamon
-import kamon.testkit.{InitAndStopKamonAfterAll, MetricInspection}
import kamon.tag.Lookups._
+import kamon.testkit.{InitAndStopKamonAfterAll, MetricInspection}
import org.scalatest.BeforeAndAfterAll
import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
import scala.collection.mutable.ListBuffer
+import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
class ActorCellInstrumentationSpec extends TestKit(ActorSystem("ActorCellInstrumentationSpec")) with AnyWordSpecLike
with BeforeAndAfterAll with ImplicitSender with Eventually with MetricInspection.Syntax with Matchers with InitAndStopKamonAfterAll {
- implicit lazy val executionContext = system.dispatcher
+ implicit lazy val executionContext: ExecutionContext = system.dispatcher
import ContextTesting._
"the message passing instrumentation" should {
@@ -59,7 +60,7 @@ class ActorCellInstrumentationSpec extends TestKit(ActorSystem("ActorCellInstrum
}
"propagate the current context when using the ask pattern" in new EchoActorFixture {
- implicit val timeout = Timeout(1 seconds)
+ implicit val timeout: Timeout = Timeout(1 seconds)
Kamon.runWithContext(testContext("propagate-with-ask")) {
// The pipe pattern use Futures internally, so FutureTracing test should cover the underpinnings of it.
(contextEchoActor ? "test") pipeTo (testActor)
@@ -122,11 +123,11 @@ class ActorCellInstrumentationSpec extends TestKit(ActorSystem("ActorCellInstrum
}
trait EchoActorFixture {
- val contextEchoActor = system.actorOf(Props[ContextStringEcho])
+ val contextEchoActor: ActorRef = system.actorOf(Props[ContextStringEcho])
}
trait EchoSimpleRouterFixture {
- val router = {
+ val router: Router = {
val routees = Vector.fill(5) {
val r = system.actorOf(Props[ContextStringEcho])
ActorRefRoutee(r)
@@ -136,22 +137,22 @@ class ActorCellInstrumentationSpec extends TestKit(ActorSystem("ActorCellInstrum
}
trait EchoPoolRouterFixture {
- val pool = system.actorOf(RoundRobinPool(nrOfInstances = 5).props(Props[ContextStringEcho]), "pool-router")
+ val pool: ActorRef = system.actorOf(RoundRobinPool(nrOfInstances = 5).props(Props[ContextStringEcho]), "pool-router")
}
trait EchoGroupRouterFixture {
- val routees = Vector.fill(5) {
+ val routees: Vector[ActorRef] = Vector.fill(5) {
system.actorOf(Props[ContextStringEcho])
}
- val group = system.actorOf(RoundRobinGroup(routees.map(_.path.toStringWithoutAddress)).props(), "group-router")
+ val group: ActorRef = system.actorOf(RoundRobinGroup(routees.map(_.path.toStringWithoutAddress)).props(), "group-router")
}
}
class ContextStringEcho extends Actor {
import ContextTesting._
- def receive = {
+ def receive: Receive = {
case _: String =>
sender ! Kamon.currentContext().getTag(plain(TestKey))
}
diff --git a/instrumentation/kamon-akka/src/test-common/scala/kamon/instrumentation/akka/AkkaTestKitInstrumentation.scala b/instrumentation/kamon-akka/src/test-common/scala/kamon/instrumentation/akka/AkkaTestKitInstrumentation.scala
index ff71cbaa7..d3e11ae07 100644
--- a/instrumentation/kamon-akka/src/test-common/scala/kamon/instrumentation/akka/AkkaTestKitInstrumentation.scala
+++ b/instrumentation/kamon-akka/src/test-common/scala/kamon/instrumentation/akka/AkkaTestKitInstrumentation.scala
@@ -3,6 +3,8 @@ package kamon.instrumentation.akka
import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.libs.net.bytebuddy.asm.Advice
+import scala.annotation.static
+
class AkkaTestKitInstrumentation extends InstrumentationBuilder {
/**
@@ -15,10 +17,11 @@ class AkkaTestKitInstrumentation extends InstrumentationBuilder {
.advise(method("receiveOne"), DelayReceiveOne)
}
+class DelayReceiveOne
object DelayReceiveOne {
@Advice.OnMethodExit(suppress = classOf[Throwable])
- def exit(): Unit =
+ @static def exit(): Unit =
Thread.sleep(5)
}
diff --git a/instrumentation/kamon-akka/src/test-common/scala/kamon/instrumentation/akka/AskPatternInstrumentationSpec.scala b/instrumentation/kamon-akka/src/test-common/scala/kamon/instrumentation/akka/AskPatternInstrumentationSpec.scala
index b355b9f31..8f28f5e0d 100644
--- a/instrumentation/kamon-akka/src/test-common/scala/kamon/instrumentation/akka/AskPatternInstrumentationSpec.scala
+++ b/instrumentation/kamon-akka/src/test-common/scala/kamon/instrumentation/akka/AskPatternInstrumentationSpec.scala
@@ -28,13 +28,14 @@ import kamon.instrumentation.akka.ContextTesting._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
+import scala.concurrent.ExecutionContextExecutor
import scala.concurrent.duration._
class AskPatternInstrumentationSpec extends TestKit(ActorSystem("AskPatternInstrumentationSpec")) with AnyWordSpecLike
with InitAndStopKamonAfterAll with ImplicitSender {
- implicit lazy val ec = system.dispatcher
- implicit val askTimeout = Timeout(10 millis)
+ implicit lazy val ec: ExecutionContextExecutor = system.dispatcher
+ implicit val askTimeout: Timeout = Timeout(10 millis)
// TODO: Make this work with ActorSelections
@@ -93,7 +94,7 @@ class AskPatternInstrumentationSpec extends TestKit(ActorSystem("AskPatternInstr
}
class NoReply extends Actor {
- def receive = {
+ def receive: Receive = {
case _ =>
}
}
diff --git a/instrumentation/kamon-akka/src/test-common/scala/kamon/instrumentation/akka/MessageTracingSpec.scala b/instrumentation/kamon-akka/src/test-common/scala/kamon/instrumentation/akka/MessageTracingSpec.scala
index 1e2aeae41..c8162e2c7 100644
--- a/instrumentation/kamon-akka/src/test-common/scala/kamon/instrumentation/akka/MessageTracingSpec.scala
+++ b/instrumentation/kamon-akka/src/test-common/scala/kamon/instrumentation/akka/MessageTracingSpec.scala
@@ -178,7 +178,7 @@ class MessageTracingSpec extends TestKit(ActorSystem("MessageTracing")) with Any
}
"not track Akka Streams actors" in {
- implicit val timeout = Timeout(10 seconds)
+ implicit val timeout: Timeout = Timeout(10 seconds)
val actorWithMaterializer = system.actorOf(Props[ActorWithMaterializer])
val finishedStream = Kamon.runWithSpan(Kamon.serverSpanBuilder("wrapper", "test").start()) {
@@ -222,7 +222,7 @@ class TracingTestActor extends Actor {
}
class ActorWithMaterializer extends Actor {
- implicit val mat = ActorMaterializer()
+ implicit val mat: Materializer = ActorMaterializer()
override def receive: Receive = {
case "stream" =>
diff --git a/instrumentation/kamon-akka/src/test-common/scala/kamon/instrumentation/akka/SystemMessageInstrumentationSpec.scala b/instrumentation/kamon-akka/src/test-common/scala/kamon/instrumentation/akka/SystemMessageInstrumentationSpec.scala
index a9b56f76e..5c57c74f6 100644
--- a/instrumentation/kamon-akka/src/test-common/scala/kamon/instrumentation/akka/SystemMessageInstrumentationSpec.scala
+++ b/instrumentation/kamon-akka/src/test-common/scala/kamon/instrumentation/akka/SystemMessageInstrumentationSpec.scala
@@ -25,13 +25,14 @@ import kamon.instrumentation.akka.ContextTesting._
import kamon.tag.Lookups._
import org.scalatest.BeforeAndAfterAll
import org.scalatest.matchers.should.Matchers
-import org.scalatest.wordspec.{AnyWordSpec, AnyWordSpecLike}
+import org.scalatest.wordspec.AnyWordSpecLike
+import scala.concurrent.ExecutionContext
import scala.util.control.NonFatal
class SystemMessageInstrumentationSpec extends TestKit(ActorSystem("ActorSystemMessageInstrumentationSpec")) with AnyWordSpecLike with Matchers
with BeforeAndAfterAll with ImplicitSender {
- implicit lazy val executionContext = system.dispatcher
+ implicit lazy val executionContext: ExecutionContext = system.dispatcher
"the system message passing instrumentation" should {
"capture and propagate the current context while processing the Create message in top level actors" in {
diff --git a/instrumentation/kamon-pekko-http/src/main/scala-2.12/kamon/instrumentation/pekko/http/PekkoHttpServerInstrumentation.scala b/instrumentation/kamon-pekko-http/src/main/scala-2.12/kamon/instrumentation/pekko/http/PekkoHttpServerInstrumentation.scala
deleted file mode 100644
index eb82331c2..000000000
--- a/instrumentation/kamon-pekko-http/src/main/scala-2.12/kamon/instrumentation/pekko/http/PekkoHttpServerInstrumentation.scala
+++ /dev/null
@@ -1,341 +0,0 @@
-/*
- * Copyright 2013-2021 The Kamon Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package kamon.instrumentation.pekko.http
-
-import java.util.concurrent.Callable
-import org.apache.pekko.http.scaladsl.marshalling.{ToEntityMarshaller, ToResponseMarshallable, ToResponseMarshaller}
-import org.apache.pekko.http.scaladsl.model.StatusCodes.Redirection
-import org.apache.pekko.http.scaladsl.model.{HttpHeader, HttpRequest, HttpResponse, StatusCode, Uri}
-import org.apache.pekko.http.scaladsl.server.PathMatcher.{Matched, Unmatched}
-import org.apache.pekko.http.scaladsl.server.directives.{BasicDirectives, CompleteOrRecoverWithMagnet, OnSuccessMagnet}
-import org.apache.pekko.http.scaladsl.server.directives.RouteDirectives.reject
-import org.apache.pekko.http.scaladsl.server._
-import org.apache.pekko.http.scaladsl.server.util.Tupler
-import org.apache.pekko.http.scaladsl.util.FastFuture
-import kamon.Kamon
-import kamon.instrumentation.pekko.http.HasMatchingContext.PathMatchingContext
-import kamon.instrumentation.context.{HasContext, InvokeWithCapturedContext}
-import kanela.agent.api.instrumentation.InstrumentationBuilder
-import kanela.agent.api.instrumentation.mixin.Initializer
-import kanela.agent.libs.net.bytebuddy.implementation.bind.annotation._
-
-import scala.concurrent.{ExecutionContext, Future, Promise}
-import scala.util.control.NonFatal
-import scala.util.{Failure, Success, Try}
-import java.util.regex.Pattern
-import org.apache.pekko.NotUsed
-import org.apache.pekko.http.scaladsl.server.RouteResult.Rejected
-import org.apache.pekko.stream.scaladsl.Flow
-import kamon.context.Context
-import kanela.agent.libs.net.bytebuddy.matcher.ElementMatchers.isPublic
-
-import scala.collection.immutable
-
-
-class PekkoHttpServerInstrumentation extends InstrumentationBuilder {
-
- /**
- * When instrumenting bindAndHandle what we do is wrap the Flow[HttpRequest, HttpResponse, NotUsed] provided by
- * the user and add all the processing there. This is the part of the instrumentation that performs Context
- * propagation, tracing and gather metrics using the HttpServerInstrumentation packed in common.
- *
- * One important point about the HTTP Server instrumentation is that because it is almost impossible to have a proper
- * operation name before the request processing hits the routing tree, we are delaying the sampling decision to the
- * point at which we have some operation name.
- */
-
- onType("org.apache.pekko.http.scaladsl.HttpExt")
- .advise(method("bindAndHandle"), classOf[HttpExtBindAndHandleAdvice])
-
- /**
- * For the HTTP/2 instrumentation, since the parts where we can capture the interface/port and the actual flow
- * creation happen at different times we are wrapping the handler with the interface/port data and reading that
- * information when turning the handler function into a flow and wrapping it the same way we would for HTTP/1.
- */
-
- onType("org.apache.pekko.http.impl.engine.http2.Http2Ext")
- .advise(method("bindAndHandleAsync") and isPublic(), classOf[Http2ExtBindAndHandleAdvice])
-
- onType("org.apache.pekko.http.impl.engine.http2.Http2Blueprint$")
- .intercept(method("handleWithStreamIdHeader"), Http2BlueprintInterceptor)
-
- /**
- * The rest of these sections are just about making sure that we can generate an appropriate operation name (i.e. free
- * of variables) and take a Sampling Decision in case none has been taken so far.
- */
- onType("org.apache.pekko.http.scaladsl.server.RequestContextImpl")
- .mixin(classOf[HasMatchingContext.Mixin])
- .intercept(method("copy"), RequestContextCopyInterceptor)
-
- onType("org.apache.pekko.http.scaladsl.server.directives.PathDirectives")
- .intercept(method("rawPathPrefix"), classOf[PathDirectivesRawPathPrefixInterceptor])
-
- onType("org.apache.pekko.http.scaladsl.server.directives.FutureDirectives")
- .intercept(method("onComplete"), classOf[ResolveOperationNameOnRouteInterceptor])
-
- onTypes("org.apache.pekko.http.scaladsl.server.directives.OnSuccessMagnet$", "org.apache.pekko.http.scaladsl.server.directives.CompleteOrRecoverWithMagnet$")
- .intercept(method("apply"), classOf[ResolveOperationNameOnRouteInterceptor])
-
- onType("org.apache.pekko.http.scaladsl.server.directives.RouteDirectives")
- .intercept(method("complete"), classOf[ResolveOperationNameOnRouteInterceptor])
- .intercept(method("redirect"), classOf[ResolveOperationNameOnRouteInterceptor])
- .intercept(method("failWith"), classOf[ResolveOperationNameOnRouteInterceptor])
-
- /**
- * Support for HTTP/1 and HTTP/2 at the same time.
- *
- */
-
- onType("org.apache.pekko.stream.scaladsl.FlowOps")
- .advise(method("mapAsync"), classOf[FlowOpsMapAsyncAdvice])
-}
-
-trait HasMatchingContext {
- def defaultOperationName: String
- def matchingContext: Seq[PathMatchingContext]
- def setMatchingContext(ctx: Seq[PathMatchingContext]): Unit
- def setDefaultOperationName(defaultOperationName: String): Unit
- def prependMatchingContext(matched: PathMatchingContext): Unit
- def popOneMatchingContext(): Unit
-}
-
-object HasMatchingContext {
-
- case class PathMatchingContext (
- fullPath: String,
- matched: Matched[_]
- )
-
- class Mixin(var matchingContext: Seq[PathMatchingContext], var defaultOperationName: String) extends HasMatchingContext {
-
- override def setMatchingContext(matchingContext: Seq[PathMatchingContext]): Unit =
- this.matchingContext = matchingContext
-
- override def setDefaultOperationName(defaultOperationName: String): Unit =
- this.defaultOperationName = defaultOperationName
-
- override def prependMatchingContext(matched: PathMatchingContext): Unit =
- matchingContext = matched +: matchingContext
-
- override def popOneMatchingContext(): Unit =
- matchingContext = matchingContext.tail
-
- @Initializer
- def initialize(): Unit =
- matchingContext = Seq.empty
- }
-}
-
-class ResolveOperationNameOnRouteInterceptor
-object ResolveOperationNameOnRouteInterceptor {
- import org.apache.pekko.http.scaladsl.util.FastFuture._
-
- // We are replacing some of the basic directives here to ensure that we will resolve both the Sampling Decision and
- // the operation name before the request gets to the actual handling code (presumably inside of a "complete"
- // directive.
-
- def complete(m: => ToResponseMarshallable): StandardRoute =
- StandardRoute(resolveOperationName(_).complete(m))
-
- def complete[T](status: StatusCode, v: => T)(implicit m: ToEntityMarshaller[T]): StandardRoute =
- StandardRoute(resolveOperationName(_).complete((status, v)))
-
- def complete[T](status: StatusCode, headers: immutable.Seq[HttpHeader], v: => T)(implicit m: ToEntityMarshaller[T]): StandardRoute =
- complete((status, headers, v))
-
- def redirect(uri: Uri, redirectionType: Redirection): StandardRoute =
- StandardRoute(resolveOperationName(_).redirect(uri, redirectionType))
-
- def failWith(error: Throwable): StandardRoute = {
- Kamon.currentSpan().fail(error)
- StandardRoute(resolveOperationName(_).fail(error))
- }
-
- def onComplete[T](future: => Future[T]): Directive1[Try[T]] =
- Directive { inner => ctx =>
- import ctx.executionContext
- resolveOperationName(ctx)
- future.fast.transformWith(t => inner(Tuple1(t))(ctx))
- }
-
- def apply[T](future: => Future[T])(implicit tupler: Tupler[T]): OnSuccessMagnet { type Out = tupler.Out } =
- new OnSuccessMagnet {
- type Out = tupler.Out
- val directive = Directive[tupler.Out] { inner => ctx =>
- import ctx.executionContext
- resolveOperationName(ctx)
- future.fast.flatMap(t => inner(tupler(t))(ctx))
- }(tupler.OutIsTuple)
- }
-
- def apply[T](future: => Future[T])(implicit m: ToResponseMarshaller[T]): CompleteOrRecoverWithMagnet =
- new CompleteOrRecoverWithMagnet {
- val directive = Directive[Tuple1[Throwable]] { inner => ctx =>
- import ctx.executionContext
- resolveOperationName(ctx)
- future.fast.transformWith {
- case Success(res) => ctx.complete(res)
- case Failure(error) => inner(Tuple1(error))(ctx)
- }
- }
- }
-
- private def resolveOperationName(requestContext: RequestContext): RequestContext = {
-
- // We will only change the operation name if the last edit made to it was an automatic one. At this point, the only
- // way in which the operation name might have changed is if the user changed it with the operationName directive or
- // by accessing the Span and changing it directly there, so we wouldn't want to overwrite that.
-
- Kamon.currentContext().get(LastAutomaticOperationNameEdit.Key).foreach(lastEdit => {
- val currentSpan = Kamon.currentSpan()
-
- if(lastEdit.allowAutomaticChanges) {
- if(currentSpan.operationName() == lastEdit.operationName) {
- val allMatches = requestContext.asInstanceOf[HasMatchingContext].matchingContext.reverse.map(singleMatch)
- val operationName = allMatches.mkString("")
-
- if(operationName.nonEmpty) {
- currentSpan
- .name(operationName)
- .takeSamplingDecision()
-
- lastEdit.operationName = operationName
- }
- } else {
- lastEdit.allowAutomaticChanges = false
- }
- } else {
- currentSpan.takeSamplingDecision()
- }
- })
-
- requestContext
- }
-
- private def singleMatch(matching: PathMatchingContext): String = {
- val rest = matching.matched.pathRest.toString()
- val consumedCount = matching.fullPath.length - rest.length
- val consumedSegment = matching.fullPath.substring(0, consumedCount)
-
- matching.matched.extractions match {
- case () => //string segment matched
- consumedSegment
- case tuple: Product =>
- val values = tuple.productIterator.toList map {
- case Some(x) => List(x.toString)
- case None => Nil
- case long: Long => List(long.toString, long.toHexString)
- case int: Int => List(int.toString, int.toHexString)
- case a: Any => List(a.toString)
- }
- values.flatten.fold(consumedSegment) { (full, value) =>
- val r = "(?i)(^|/)" + Pattern.quote(value) + "($|/)"
- full.replaceFirst(r, "$1{}$2")
- }
- }
- }
-}
-
-/**
- * Tracks the last operation name that was automatically assigned to an operation via instrumentation. The
- * instrumentation might assign a name to the operations via settings on the HTTP Server instrumentation instance or
- * via the Path directives instrumentation, but might never reassign a name if the user somehow assigned their own name
- * to the operation. Users chan change operation names by:
- * - Using operation mappings via configuration of the HTTP Server.
- * - Providing a custom HTTP Operation Name Generator for the server.
- * - Using the "operationName" directive.
- * - Directly accessing the Span for the current operation and changing the name on it.
- *
- */
-class LastAutomaticOperationNameEdit(
- @volatile var operationName: String,
- @volatile var allowAutomaticChanges: Boolean
-)
-
-object LastAutomaticOperationNameEdit {
- val Key = Context.key[Option[LastAutomaticOperationNameEdit]]("laone", None)
-
- def apply(operationName: String, allowAutomaticChanges: Boolean): LastAutomaticOperationNameEdit =
- new LastAutomaticOperationNameEdit(operationName, allowAutomaticChanges)
-}
-
-object RequestContextCopyInterceptor {
-
- @RuntimeType
- def copy(@This context: RequestContext, @SuperCall copyCall: Callable[RequestContext]): RequestContext = {
- val copiedRequestContext = copyCall.call()
- copiedRequestContext.asInstanceOf[HasMatchingContext].setMatchingContext(context.asInstanceOf[HasMatchingContext].matchingContext)
- copiedRequestContext
- }
-}
-
-class PathDirectivesRawPathPrefixInterceptor
-object PathDirectivesRawPathPrefixInterceptor {
- import BasicDirectives._
-
- def rawPathPrefix[T](@Argument(0) matcher: PathMatcher[T]): Directive[T] = {
- implicit val LIsTuple = matcher.ev
-
- extract { ctx =>
- val fullPath = ctx.unmatchedPath.toString()
- val matching = matcher(ctx.unmatchedPath)
-
- matching match {
- case m: Matched[_] =>
- ctx.asInstanceOf[HasMatchingContext]
- .prependMatchingContext(PathMatchingContext(fullPath, m))
- case _ =>
- }
-
- (ctx, matching)
- } flatMap {
- case (ctx, Matched(rest, values)) =>
- tprovide(values) & mapRequestContext(_ withUnmatchedPath rest) & mapRouteResult { routeResult =>
-
- if(routeResult.isInstanceOf[Rejected])
- ctx.asInstanceOf[HasMatchingContext].popOneMatchingContext()
-
- routeResult
- }
-
- case (_, Unmatched) => reject
- }
- }
-}
-
-
-object Http2BlueprintInterceptor {
-
- case class HandlerWithEndpoint(interface: String, port: Int, handler: HttpRequest => Future[HttpResponse])
- extends (HttpRequest => Future[HttpResponse]) {
-
- override def apply(request: HttpRequest): Future[HttpResponse] = handler(request)
- }
-
- @RuntimeType
- def handleWithStreamIdHeader(@Argument(1) handler: HttpRequest => Future[HttpResponse],
- @SuperCall zuper: Callable[Flow[HttpRequest, HttpResponse, NotUsed]]): Flow[HttpRequest, HttpResponse, NotUsed] = {
-
- handler match {
- case HandlerWithEndpoint(interface, port, _) =>
- ServerFlowWrapper(zuper.call(), interface, port)
-
- case _ =>
- zuper.call()
- }
- }
-}
diff --git a/instrumentation/kamon-pekko-http/src/main/scala-2.13/kamon/instrumentation/pekko/http/PekkoHttpServerInstrumentation.scala b/instrumentation/kamon-pekko-http/src/main/scala/kamon/instrumentation/pekko/http/PekkoHttpServerInstrumentation.scala
similarity index 95%
rename from instrumentation/kamon-pekko-http/src/main/scala-2.13/kamon/instrumentation/pekko/http/PekkoHttpServerInstrumentation.scala
rename to instrumentation/kamon-pekko-http/src/main/scala/kamon/instrumentation/pekko/http/PekkoHttpServerInstrumentation.scala
index 778c32911..561909a4c 100644
--- a/instrumentation/kamon-pekko-http/src/main/scala-2.13/kamon/instrumentation/pekko/http/PekkoHttpServerInstrumentation.scala
+++ b/instrumentation/kamon-pekko-http/src/main/scala/kamon/instrumentation/pekko/http/PekkoHttpServerInstrumentation.scala
@@ -1,33 +1,29 @@
package kamon.instrumentation.pekko.http
-import java.util.concurrent.Callable
-import org.apache.pekko.http.scaladsl.marshalling.{ToEntityMarshaller, ToResponseMarshallable, ToResponseMarshaller}
-import org.apache.pekko.http.scaladsl.model.StatusCodes.Redirection
-import org.apache.pekko.http.scaladsl.model.{HttpHeader, HttpRequest, HttpResponse, StatusCode, Uri}
-import org.apache.pekko.http.scaladsl.server.PathMatcher.{Matched, Unmatched}
-import org.apache.pekko.http.scaladsl.server.directives.{BasicDirectives, CompleteOrRecoverWithMagnet, OnSuccessMagnet}
-import org.apache.pekko.http.scaladsl.server.directives.RouteDirectives.reject
-import org.apache.pekko.http.scaladsl.server._
-import org.apache.pekko.http.scaladsl.server.util.Tupler
-import org.apache.pekko.http.scaladsl.util.FastFuture
import kamon.Kamon
+import kamon.context.Context
import kamon.instrumentation.pekko.http.HasMatchingContext.PathMatchingContext
-import kamon.instrumentation.context.{HasContext, InvokeWithCapturedContext}
import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.api.instrumentation.mixin.Initializer
import kanela.agent.libs.net.bytebuddy.implementation.bind.annotation._
-
-import scala.concurrent.{Batchable, ExecutionContext, Future, Promise}
-import scala.util.control.NonFatal
-import scala.util.{Failure, Success, Try}
-import java.util.regex.Pattern
+import kanela.agent.libs.net.bytebuddy.matcher.ElementMatchers.isPublic
import org.apache.pekko.NotUsed
+import org.apache.pekko.http.scaladsl.marshalling.{ToEntityMarshaller, ToResponseMarshallable, ToResponseMarshaller}
+import org.apache.pekko.http.scaladsl.model.StatusCodes.Redirection
+import org.apache.pekko.http.scaladsl.model._
+import org.apache.pekko.http.scaladsl.server.PathMatcher.{Matched, Unmatched}
import org.apache.pekko.http.scaladsl.server.RouteResult.Rejected
+import org.apache.pekko.http.scaladsl.server._
+import org.apache.pekko.http.scaladsl.server.directives.RouteDirectives.reject
+import org.apache.pekko.http.scaladsl.server.directives.{BasicDirectives, CompleteOrRecoverWithMagnet, OnSuccessMagnet}
+import org.apache.pekko.http.scaladsl.server.util.{Tuple, Tupler}
import org.apache.pekko.stream.scaladsl.Flow
-import kamon.context.Context
-import kanela.agent.libs.net.bytebuddy.matcher.ElementMatchers.isPublic
+import java.util.concurrent.Callable
+import java.util.regex.Pattern
import scala.collection.immutable
+import scala.concurrent.Future
+import scala.util.{Failure, Success, Try}
class PekkoHttpServerInstrumentation extends InstrumentationBuilder {
@@ -271,7 +267,7 @@ object PathDirectivesRawPathPrefixInterceptor {
import BasicDirectives._
def rawPathPrefix[T](@Argument(0) matcher: PathMatcher[T]): Directive[T] = {
- implicit val LIsTuple = matcher.ev
+ implicit val LIsTuple: Tuple[T] = matcher.ev
extract { ctx =>
val fullPath = ctx.unmatchedPath.toString()
@@ -287,7 +283,7 @@ object PathDirectivesRawPathPrefixInterceptor {
(ctx, matching)
} flatMap {
case (ctx, Matched(rest, values)) =>
- tprovide(values) & mapRequestContext(_ withUnmatchedPath rest) & mapRouteResult { routeResult =>
+ tprovide[T](values) & mapRequestContext(_ withUnmatchedPath rest) & mapRouteResult { routeResult =>
if(routeResult.isInstanceOf[Rejected])
ctx.asInstanceOf[HasMatchingContext].popOneMatchingContext()
diff --git a/instrumentation/kamon-pekko-http/src/test/scala/kamon/pekko/http/PekkoHttpClientTracingSpec.scala b/instrumentation/kamon-pekko-http/src/test/scala/kamon/pekko/http/PekkoHttpClientTracingSpec.scala
index 2f5822320..7ff1f0c44 100644
--- a/instrumentation/kamon-pekko-http/src/test/scala/kamon/pekko/http/PekkoHttpClientTracingSpec.scala
+++ b/instrumentation/kamon-pekko-http/src/test/scala/kamon/pekko/http/PekkoHttpClientTracingSpec.scala
@@ -23,7 +23,7 @@ import org.apache.pekko.actor.ActorSystem
import org.apache.pekko.http.scaladsl.Http
import org.apache.pekko.http.scaladsl.model.HttpRequest
import org.apache.pekko.http.scaladsl.model.headers.RawHeader
-import org.apache.pekko.stream.ActorMaterializer
+import org.apache.pekko.stream.{ActorMaterializer, Materializer}
import org.json4s._
import org.json4s.native.JsonMethods._
import org.scalatest.OptionValues
@@ -31,6 +31,7 @@ import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
+import scala.concurrent.ExecutionContextExecutor
import scala.concurrent.duration._
class PekkoHttpClientTracingSpec extends AnyWordSpecLike with Matchers with InitAndStopKamonAfterAll with MetricInspection.Syntax
@@ -38,14 +39,14 @@ class PekkoHttpClientTracingSpec extends AnyWordSpecLike with Matchers with Init
import TestWebServer.Endpoints._
- implicit private val system = ActorSystem("http-client-instrumentation-spec")
- implicit private val executor = system.dispatcher
- implicit private val materializer = ActorMaterializer()
+ implicit private val system: ActorSystem = ActorSystem("http-client-instrumentation-spec")
+ implicit private val executor: ExecutionContextExecutor = system.dispatcher
+ implicit private val materializer: Materializer = Materializer(system)
val timeoutTest: FiniteDuration = 5 second
val interface = "127.0.0.1"
val port = 8080
- val webServer = startServer(interface, port)
+ val webServer: WebServer = startServer(interface, port)
"the Pekko HTTP client instrumentation" should {
"create a client Span when using the request level API - Http().singleRequest(...)" in {
diff --git a/instrumentation/kamon-pekko-http/src/test/scala/kamon/pekko/http/PekkoHttpServerMetricsSpec.scala b/instrumentation/kamon-pekko-http/src/test/scala/kamon/pekko/http/PekkoHttpServerMetricsSpec.scala
index 9dbfb4b6a..3e655015b 100644
--- a/instrumentation/kamon-pekko-http/src/test/scala/kamon/pekko/http/PekkoHttpServerMetricsSpec.scala
+++ b/instrumentation/kamon-pekko-http/src/test/scala/kamon/pekko/http/PekkoHttpServerMetricsSpec.scala
@@ -22,14 +22,14 @@ import org.apache.pekko.actor.ActorSystem
import org.apache.pekko.http.scaladsl.Http
import org.apache.pekko.http.scaladsl.model.{HttpRequest, HttpResponse}
import org.apache.pekko.http.scaladsl.settings.ClientConnectionSettings
-import org.apache.pekko.stream.ActorMaterializer
+import org.apache.pekko.stream.{ActorMaterializer, Materializer}
import org.apache.pekko.stream.scaladsl.{Sink, Source}
import org.scalatest.OptionValues
import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
-import scala.concurrent.Future
+import scala.concurrent.{ExecutionContextExecutor, Future}
import scala.concurrent.duration._
class PekkoHttpServerMetricsSpec extends AnyWordSpecLike with Matchers with InitAndStopKamonAfterAll with InstrumentInspection.Syntax
@@ -37,14 +37,14 @@ class PekkoHttpServerMetricsSpec extends AnyWordSpecLike with Matchers with Init
import TestWebServer.Endpoints._
- implicit private val system = ActorSystem("http-server-metrics-instrumentation-spec")
- implicit private val executor = system.dispatcher
- implicit private val materializer = ActorMaterializer()
+ implicit private val system: ActorSystem = ActorSystem("http-server-metrics-instrumentation-spec")
+ implicit private val executor: ExecutionContextExecutor = system.dispatcher
+ implicit private val materializer: Materializer = Materializer(system)
val port = 8083
val interface = "127.0.0.1"
val timeoutTest: FiniteDuration = 5 second
- val webServer = startServer(interface, port)
+ val webServer: WebServer = startServer(interface, port)
"the Pekko HTTP server instrumentation" should {
"track the number of open connections and active requests on the Server side" in {
diff --git a/instrumentation/kamon-pekko-http/src/test/scala/kamon/pekko/http/PekkoHttpServerTracingSpec.scala b/instrumentation/kamon-pekko-http/src/test/scala/kamon/pekko/http/PekkoHttpServerTracingSpec.scala
index ae80ee77a..5da805ac1 100644
--- a/instrumentation/kamon-pekko-http/src/test/scala/kamon/pekko/http/PekkoHttpServerTracingSpec.scala
+++ b/instrumentation/kamon-pekko-http/src/test/scala/kamon/pekko/http/PekkoHttpServerTracingSpec.scala
@@ -17,7 +17,6 @@
package kamon.pekko.http
import org.apache.pekko.actor.ActorSystem
-import org.apache.pekko.stream.ActorMaterializer
import kamon.tag.Lookups.{plain, plainBoolean, plainLong}
import kamon.testkit._
import kamon.trace.Span.Mark
@@ -31,6 +30,7 @@ import java.util.UUID
import javax.net.ssl.{HostnameVerifier, SSLSession}
import scala.concurrent.duration._
import scala.collection.JavaConverters._
+import scala.concurrent.ExecutionContext
import scala.util.control.NonFatal
class PekkoHttpServerTracingSpec extends AnyWordSpecLike with Matchers with ScalaFutures with Inside with InitAndStopKamonAfterAll
@@ -38,8 +38,8 @@ class PekkoHttpServerTracingSpec extends AnyWordSpecLike with Matchers with Scal
import TestWebServer.Endpoints._
- implicit private val system = ActorSystem("http-server-instrumentation-spec")
- implicit private val executor = system.dispatcher
+ implicit private val system: ActorSystem = ActorSystem("http-server-instrumentation-spec")
+ implicit private val executor: ExecutionContext = system.dispatcher
val (sslSocketFactory, trustManager) = clientSSL()
val okHttp = new OkHttpClient.Builder()
diff --git a/instrumentation/kamon-pekko-http/src/test/scala/kamon/pekko/http/ServerFlowWrapperSpec.scala b/instrumentation/kamon-pekko-http/src/test/scala/kamon/pekko/http/ServerFlowWrapperSpec.scala
index 9246dbbaa..06def45ff 100644
--- a/instrumentation/kamon-pekko-http/src/test/scala/kamon/pekko/http/ServerFlowWrapperSpec.scala
+++ b/instrumentation/kamon-pekko-http/src/test/scala/kamon/pekko/http/ServerFlowWrapperSpec.scala
@@ -4,18 +4,20 @@ import kamon.instrumentation.pekko.http.ServerFlowWrapper
import kamon.testkit.InitAndStopKamonAfterAll
import org.apache.pekko.actor.ActorSystem
import org.apache.pekko.http.scaladsl.model._
-import org.apache.pekko.stream.ActorMaterializer
+import org.apache.pekko.stream.{ActorMaterializer, Materializer}
import org.apache.pekko.stream.scaladsl.{Flow, Sink, Source}
import org.apache.pekko.util.ByteString
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
+import scala.concurrent.ExecutionContextExecutor
+
class ServerFlowWrapperSpec extends AnyWordSpecLike with Matchers with ScalaFutures with InitAndStopKamonAfterAll {
- implicit private val system = ActorSystem("http-client-instrumentation-spec")
- implicit private val executor = system.dispatcher
- implicit private val materializer = ActorMaterializer()
+ implicit private val system: ActorSystem = ActorSystem("http-client-instrumentation-spec")
+ implicit private val executor: ExecutionContextExecutor = system.dispatcher
+ implicit private val materializer: Materializer = Materializer(system)
private val okReturningFlow = Flow[HttpRequest].map { _ =>
HttpResponse(status = StatusCodes.OK, entity = HttpEntity("OK"))
diff --git a/instrumentation/kamon-pekko-http/src/test/scala/kamon/testkit/TestWebServer.scala b/instrumentation/kamon-pekko-http/src/test/scala/kamon/testkit/TestWebServer.scala
index 490e6bd42..e4d3f892d 100644
--- a/instrumentation/kamon-pekko-http/src/test/scala/kamon/testkit/TestWebServer.scala
+++ b/instrumentation/kamon-pekko-http/src/test/scala/kamon/testkit/TestWebServer.scala
@@ -33,14 +33,15 @@ import org.apache.pekko.util.ByteString
import javax.net.ssl.{KeyManagerFactory, SSLContext, SSLSocketFactory, TrustManagerFactory, X509TrustManager}
import kamon.Kamon
import kamon.instrumentation.pekko.http.TracingDirectives
-import org.json4s.{DefaultFormats, native}
+import org.json4s.{DefaultFormats, native, Serialization}
import kamon.tag.Lookups.plain
import kamon.trace.Trace
+
import scala.concurrent.{ExecutionContext, Future}
trait TestWebServer extends TracingDirectives {
- implicit val serialization = native.Serialization
- implicit val formats = DefaultFormats
+ implicit val serialization: Serialization = native.Serialization
+ implicit val formats: DefaultFormats = DefaultFormats
import Json4sSupport._
def startServer(interface: String, port: Int, https: Boolean = false)(implicit system: ActorSystem): WebServer = {
@@ -184,7 +185,7 @@ trait TestWebServer extends TracingDirectives {
new WebServer(interface, port, "http", Http().newServerAt(interface, port).bindFlow(routes))
}
- def httpContext() = {
+ def httpContext(): HttpsConnectionContext = {
val password = "kamon".toCharArray
val ks = KeyStore.getInstance("PKCS12")
ks.load(getClass.getClassLoader.getResourceAsStream("https/server.p12"), password)
diff --git a/instrumentation/kamon-pekko/build.sbt b/instrumentation/kamon-pekko/build.sbt
index c4b7ac397..5596992a1 100644
--- a/instrumentation/kamon-pekko/build.sbt
+++ b/instrumentation/kamon-pekko/build.sbt
@@ -1,7 +1,7 @@
// The Common configuration should always depend on the latest version of Pekko. All code in the Common configuration
// should be source compatible with all Pekko versions.
inConfig(Compile)(Defaults.compileSettings ++ Seq(
- crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`)
+ crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`, scala_3_version)
))
val pekkoVersion = "1.0.1"
@@ -33,5 +33,5 @@ lazy val baseTestSettings = Seq(
)
inConfig(Test)(Defaults.testSettings ++ instrumentationSettings ++ baseTestSettings ++ Seq(
- crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`)
+ crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`, scala_3_version),
))
diff --git a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ActorCellInfo.scala b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ActorCellInfo.scala
index 604f8047b..8f1108f34 100644
--- a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ActorCellInfo.scala
+++ b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ActorCellInfo.scala
@@ -42,9 +42,9 @@ object ActorCellInfo {
val (actorOrRouterClass, routeeClass) =
if(isRouter)
- (props.routerConfig.getClass, Some(ref.asInstanceOf[HasRouterProps].routeeProps.actorClass))
+ (props.routerConfig.getClass, Some(ref.asInstanceOf[HasRouterProps].routeeProps.actorClass()))
else if (isRoutee)
- (parent.asInstanceOf[HasRouterProps].routerProps.routerConfig.getClass, Some(props.actorClass))
+ (parent.asInstanceOf[HasRouterProps].routerProps.routerConfig.getClass, Some(props.actorClass()))
else
(props.actorClass(), None)
diff --git a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ActorInstrumentation.scala b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ActorInstrumentation.scala
index 1a7a5eefe..4f1821974 100644
--- a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ActorInstrumentation.scala
+++ b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ActorInstrumentation.scala
@@ -16,44 +16,51 @@
package kamon.instrumentation.pekko.instrumentations
-import org.apache.pekko.actor.{ActorRef, ActorSystem}
import kamon.Kamon
import kamon.context.Storage.Scope
-import kamon.instrumentation.pekko.instrumentations.HasActorMonitor.actorMonitor
import kamon.instrumentation.context.{HasContext, HasTimestamp}
+import kamon.instrumentation.pekko.instrumentations.HasActorMonitor.actorMonitor
import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.libs.net.bytebuddy.asm.Advice
import kanela.agent.libs.net.bytebuddy.asm.Advice.{Argument, OnMethodEnter, OnMethodExit, This}
import org.apache.pekko.actor.instrumentation.ReplaceWithAdvice
+import org.apache.pekko.actor.{ActorRef, ActorSystem}
+
+import scala.annotation.static
class ActorInstrumentation extends InstrumentationBuilder {
+ onType("org.apache.pekko.actor.dungeon.Dispatch")
+ .advise(method("sendMessage").and(takesArguments(1)), classOf[SendMessageAdvice])
+ .advise(method("swapMailbox"), classOf[ActorCellSwapMailboxAdvice])
+
+ onType("org.apache.pekko.actor.dungeon.FaultHandling")
+ .advise(method("handleInvokeFailure"), classOf[HandleInvokeFailureMethodAdvice])
+ .advise(method("terminate"), classOf[TerminateMethodAdvice])
+
/**
- * This is where most of the Actor processing magic happens. Handling of messages, errors and system messages.
- */
+ * This is where most of the Actor processing magic happens. Handling of messages, errors and system messages.
+ */
onType("org.apache.pekko.actor.ActorCell")
.mixin(classOf[HasActorMonitor.Mixin])
- .advise(isConstructor, ActorCellConstructorAdvice)
+ .advise(isConstructor, classOf[ActorCellConstructorAdvice])
.advise(method("invoke"), classOf[ActorCellInvokeAdvice])
- .advise(method("handleInvokeFailure"), HandleInvokeFailureMethodAdvice)
- .advise(method("sendMessage").and(takesArguments(1)), SendMessageAdvice)
- .advise(method("terminate"), TerminateMethodAdvice)
- .advise(method("swapMailbox"), ActorCellSwapMailboxAdvice)
- .advise(method("invokeAll$1"), InvokeAllMethodInterceptor)
+ .advise(method("invokeAll$1"), classOf[InvokeAllMethodInterceptor])
/**
- * Ensures that the Context is properly propagated when messages are temporarily stored on an UnstartedCell.
- */
+ * Ensures that the Context is properly propagated when messages are temporarily stored on an UnstartedCell.
+ */
onType("org.apache.pekko.actor.UnstartedCell")
.mixin(classOf[HasActorMonitor.Mixin])
- .advise(isConstructor, RepointableActorCellConstructorAdvice)
- .advise(method("sendMessage").and(takesArguments(1)), SendMessageAdvice)
+ .advise(isConstructor, classOf[RepointableActorCellConstructorAdvice])
+ .advise(method("sendMessage").and(takesArguments(1)), classOf[SendMessageAdvice])
.advise(method("replaceWith"), classOf[ReplaceWithAdvice])
}
trait HasActorMonitor {
def actorMonitor: ActorMonitor
+
def setActorMonitor(actorMonitor: ActorMonitor): Unit
}
@@ -68,43 +75,49 @@ object HasActorMonitor {
cell.asInstanceOf[HasActorMonitor].actorMonitor
}
+class ActorCellSwapMailboxAdvice
+
object ActorCellSwapMailboxAdvice {
@Advice.OnMethodEnter
- def enter(@Advice.This cell: Any, @Advice.Argument(0) newMailbox: Any): Boolean = {
+ @static def enter(@Advice.This cell: Any, @Advice.Argument(0) newMailbox: Any): Boolean = {
val isShuttingDown = PekkoPrivateAccess.isDeadLettersMailbox(cell, newMailbox)
- if(isShuttingDown)
+ if (isShuttingDown)
actorMonitor(cell).onTerminationStart()
isShuttingDown
}
@Advice.OnMethodExit
- def exit(@Advice.This cell: Any, @Advice.Return oldMailbox: Any, @Advice.Enter isShuttingDown: Boolean): Unit = {
- if(oldMailbox != null && isShuttingDown) {
+ @static def exit(@Advice.This cell: Any, @Advice.Return oldMailbox: Any, @Advice.Enter isShuttingDown: Boolean): Unit = {
+ if (oldMailbox != null && isShuttingDown) {
actorMonitor(cell).onDroppedMessages(PekkoPrivateAccess.mailboxMessageCount(oldMailbox))
}
}
}
+class InvokeAllMethodInterceptor
+
object InvokeAllMethodInterceptor {
@Advice.OnMethodEnter
- def enter(@Advice.Argument(0) message: Any): Option[Scope] =
+ @static def enter(@Advice.Argument(0) message: Any): Option[Scope] =
message match {
case m: HasContext => Some(Kamon.storeContext(m.context))
case _ => None
}
@Advice.OnMethodExit
- def exit(@Advice.Enter scope: Option[Scope]): Unit =
+ @static def exit(@Advice.Enter scope: Option[Scope]): Unit =
scope.foreach(_.close())
}
+class SendMessageAdvice
+
object SendMessageAdvice {
@OnMethodEnter(suppress = classOf[Throwable])
- def onEnter(@This cell: Any, @Argument(0) envelope: Object): Unit = {
+ @static def onEnter(@This cell: Any, @Argument(0) envelope: Object): Unit = {
val instrumentation = actorMonitor(cell)
envelope.asInstanceOf[HasContext].setContext(instrumentation.captureEnvelopeContext())
@@ -112,32 +125,40 @@ object SendMessageAdvice {
}
}
+class RepointableActorCellConstructorAdvice
+
object RepointableActorCellConstructorAdvice {
@Advice.OnMethodExit(suppress = classOf[Throwable])
- def onExit(@This cell: Any, @Argument(0) system: ActorSystem, @Argument(1) ref: ActorRef, @Argument(3) parent: ActorRef): Unit =
+ @static def onExit(@This cell: Any, @Argument(0) system: ActorSystem, @Argument(1) ref: ActorRef, @Argument(3) parent: ActorRef): Unit =
cell.asInstanceOf[HasActorMonitor].setActorMonitor(ActorMonitor.from(cell, ref, parent, system))
}
+class ActorCellConstructorAdvice
+
object ActorCellConstructorAdvice {
@OnMethodExit(suppress = classOf[Throwable])
- def onExit(@This cell: Any, @Argument(0) system: ActorSystem, @Argument(1) ref: ActorRef, @Argument(4) parent: ActorRef): Unit =
+ @static def onExit(@This cell: Any, @Argument(0) system: ActorSystem, @Argument(1) ref: ActorRef, @Argument(4) parent: ActorRef): Unit =
cell.asInstanceOf[HasActorMonitor].setActorMonitor(ActorMonitor.from(cell, ref, parent, system))
}
+class HandleInvokeFailureMethodAdvice
+
object HandleInvokeFailureMethodAdvice {
@OnMethodEnter(suppress = classOf[Throwable])
- def onEnter(@This cell: Any, @Argument(1) failure: Throwable): Unit =
+ @static def onEnter(@This cell: Any, @Argument(1) failure: Throwable): Unit =
actorMonitor(cell).onFailure(failure)
}
+class TerminateMethodAdvice
+
object TerminateMethodAdvice {
@OnMethodEnter(suppress = classOf[Throwable])
- def onEnter(@This cell: Any): Unit = {
+ @static def onEnter(@This cell: Any): Unit = {
actorMonitor(cell).cleanup()
if (PekkoPrivateAccess.isRoutedActorCell(cell)) {
diff --git a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ActorLoggingInstrumentation.scala b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ActorLoggingInstrumentation.scala
index 64ced4df0..76dcd2e40 100644
--- a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ActorLoggingInstrumentation.scala
+++ b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ActorLoggingInstrumentation.scala
@@ -23,6 +23,8 @@ import kamon.instrumentation.context.HasContext
import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.libs.net.bytebuddy.asm.Advice.{Argument, Enter, OnMethodEnter, OnMethodExit}
+import scala.annotation.static
+
class ActorLoggingInstrumentation extends InstrumentationBuilder {
/**
@@ -33,16 +35,17 @@ class ActorLoggingInstrumentation extends InstrumentationBuilder {
.mixin(classOf[HasContext.MixinWithInitializer])
onType("org.apache.pekko.event.slf4j.Slf4jLogger")
- .advise(method("withMdc"), WithMdcMethodAdvice)
+ .advise(method("withMdc"), classOf[WithMdcMethodAdvice])
}
+class WithMdcMethodAdvice
object WithMdcMethodAdvice {
@OnMethodEnter
- def enter(@Argument(1) logEvent: LogEvent): Scope =
+ @static def enter(@Argument(1) logEvent: LogEvent): Scope =
Kamon.storeContext(logEvent.asInstanceOf[HasContext].context)
@OnMethodExit
- def exit(@Enter scope: Scope): Unit =
+ @static def exit(@Enter scope: Scope): Unit =
scope.close()
}
\ No newline at end of file
diff --git a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ActorMonitorInstrumentation.scala b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ActorMonitorInstrumentation.scala
index cb314ddea..369b54c93 100644
--- a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ActorMonitorInstrumentation.scala
+++ b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ActorMonitorInstrumentation.scala
@@ -7,6 +7,7 @@ import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.libs.net.bytebuddy.implementation.bind.annotation.Argument
import org.slf4j.LoggerFactory
+import scala.annotation.static
import scala.util.control.NonFatal
class ActorMonitorInstrumentation extends InstrumentationBuilder {
@@ -16,27 +17,28 @@ class ActorMonitorInstrumentation extends InstrumentationBuilder {
* so we're forced to extract the original message type.
*/
onSubTypesOf("kamon.instrumentation.pekko.instrumentations.ActorMonitor")
- .intercept(method("extractMessageClass"), MessageClassAdvice)
+ .intercept(method("extractMessageClass"), classOf[MessageClassAdvice])
}
class MessageClassAdvice
object MessageClassAdvice {
private val logger = LoggerFactory.getLogger(classOf[MessageClassAdvice])
- def extractMessageClass(@Argument(0) envelope: Envelope): String = {
+ @static def extractMessageClass(@Argument(0) envelope: Any): String = {
+ val e = envelope.asInstanceOf[Envelope]
try {
- envelope.message match {
+ e.message match {
case message: WrappedMessage => ActorCellInfo.simpleClassName(message.message.getClass)
- case _ => ActorCellInfo.simpleClassName(envelope.message.getClass)
+ case _ => ActorCellInfo.simpleClassName(e.message.getClass)
}
} catch {
// NoClassDefFound is thrown in early versions of akka 2.6
// so we can safely fallback to the original method
case _: NoClassDefFoundError =>
- ActorCellInfo.simpleClassName(envelope.message.getClass)
- case NonFatal(e) =>
- logger.info(s"Expected NoClassDefFoundError, got: ${e}")
- ActorCellInfo.simpleClassName(envelope.message.getClass)
+ ActorCellInfo.simpleClassName(e.message.getClass)
+ case NonFatal(ex) =>
+ logger.info(s"Expected NoClassDefFoundError, got: ${ex}")
+ ActorCellInfo.simpleClassName(e.message.getClass)
}
}
}
diff --git a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ActorRefInstrumentation.scala b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ActorRefInstrumentation.scala
index ccfaab2a9..02b153019 100644
--- a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ActorRefInstrumentation.scala
+++ b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ActorRefInstrumentation.scala
@@ -7,6 +7,8 @@ import kamon.instrumentation.context.HasContext
import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.libs.net.bytebuddy.asm.Advice
+import scala.annotation.static
+
class ActorRefInstrumentation extends InstrumentationBuilder {
/**
@@ -22,7 +24,7 @@ class ActorRefInstrumentation extends InstrumentationBuilder {
*/
onType("org.apache.pekko.actor.RepointableActorRef")
.mixin(classOf[HasContext.MixinWithInitializer])
- .advise(method("point"), RepointableActorRefPointAdvice)
+ .advise(method("point"), classOf[RepointableActorRefPointAdvice])
}
trait HasGroupPath {
@@ -38,14 +40,15 @@ object HasGroupPath {
}
}
+class RepointableActorRefPointAdvice
object RepointableActorRefPointAdvice {
@Advice.OnMethodEnter
- def enter(@Advice.This repointableActorRef: Object): Scope =
+ @static def enter(@Advice.This repointableActorRef: Object): Scope =
Kamon.storeContext(repointableActorRef.asInstanceOf[HasContext].context)
@Advice.OnMethodExit
- def exit(@Advice.Enter scope: Scope, @Advice.This repointableActorRef: Object): Unit = {
+ @static def exit(@Advice.Enter scope: Scope, @Advice.This repointableActorRef: Object): Unit = {
scope.close()
repointableActorRef
diff --git a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/AskPatternInstrumentation.scala b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/AskPatternInstrumentation.scala
index 251017200..2f05b85ab 100644
--- a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/AskPatternInstrumentation.scala
+++ b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/AskPatternInstrumentation.scala
@@ -27,6 +27,7 @@ import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.libs.net.bytebuddy.asm.Advice.{Argument, OnMethodExit, Origin, Return}
import org.slf4j.LoggerFactory
+import scala.annotation.static
import scala.compat.Platform.EOL
import scala.concurrent.Future
@@ -52,7 +53,7 @@ object AskPatternInstrumentation {
)
@OnMethodExit(suppress = classOf[Throwable])
- def onExit(@Origin origin: String, @Return future: Future[AnyRef], @Argument(0) actor: ActorRef, @Argument(2) timeout: Timeout) = {
+ @static def onExit(@Origin origin: String, @Return future: Future[AnyRef], @Argument(0) actor: ActorRef, @Argument(2) timeout: Timeout) = {
if(PekkoPrivateAccess.isInternalAndActiveActorRef(actor) && Kamon.currentContext().nonEmpty()) {
PekkoInstrumentation.settings().askPatternWarning match {
diff --git a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ClusterInstrumentation.scala b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ClusterInstrumentation.scala
index f2d641c3b..d02d0ace4 100644
--- a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ClusterInstrumentation.scala
+++ b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/ClusterInstrumentation.scala
@@ -10,21 +10,23 @@ import kamon.tag.TagSet
import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.libs.net.bytebuddy.asm.Advice
+import scala.annotation.static
import scala.collection.mutable
class ClusterInstrumentation extends InstrumentationBuilder {
onType("org.apache.pekko.cluster.Cluster$")
- .advise(method("createExtension").and(takesArguments(1)), AfterClusterInitializationAdvice)
+ .advise(method("createExtension").and(takesArguments(1)), classOf[AfterClusterInitializationAdvice])
}
+class AfterClusterInitializationAdvice
object AfterClusterInitializationAdvice {
@Advice.OnMethodExit
- def onClusterExtensionCreated(@Advice.Argument(0) system: ExtendedActorSystem, @Advice.Return clusterExtension: Cluster): Unit = {
+ @static def onClusterExtensionCreated(@Advice.Argument(0) system: ExtendedActorSystem, @Advice.Return clusterExtension: Cluster): Unit = {
val settings = PekkoInstrumentation.settings()
if(settings.exposeClusterMetrics) {
- val stateExporter = system.systemActorOf(Props[ClusterInstrumentation.ClusterStateExporter], "kamon-cluster-state-exporter")
+ val stateExporter = system.systemActorOf(Props[ClusterInstrumentation.ClusterStateExporter](), "kamon-cluster-state-exporter")
clusterExtension.subscribe(stateExporter, classOf[ClusterEvent.ClusterDomainEvent])
}
}
diff --git a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/DispatcherInstrumentation.scala b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/DispatcherInstrumentation.scala
index c1b64dae4..afd1d4a50 100644
--- a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/DispatcherInstrumentation.scala
+++ b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/DispatcherInstrumentation.scala
@@ -17,7 +17,6 @@
package kamon.instrumentation.pekko.instrumentations
import java.util.concurrent.{AbstractExecutorService, Callable, ExecutorService, ThreadFactory, TimeUnit}
-
import org.apache.pekko.dispatch.{DefaultExecutorServiceConfigurator, DispatcherPrerequisites, Dispatchers, ExecutorServiceFactory, ExecutorServiceFactoryProvider, ForkJoinExecutorConfigurator, PinnedDispatcherConfigurator, ThreadPoolExecutorConfigurator}
import kamon.Kamon
import kamon.instrumentation.pekko.PekkoInstrumentation
@@ -28,6 +27,8 @@ import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.libs.net.bytebuddy.asm.Advice
import kanela.agent.libs.net.bytebuddy.implementation.bind.annotation.{Argument, SuperCall, This}
+import scala.annotation.static
+
class DispatcherInstrumentation extends InstrumentationBuilder {
/**
@@ -38,7 +39,7 @@ class DispatcherInstrumentation extends InstrumentationBuilder {
onSubTypesOf("org.apache.pekko.dispatch.ExecutorServiceFactory")
.mixin(classOf[HasDispatcherPrerequisites.Mixin])
.mixin(classOf[HasDispatcherName.Mixin])
- .intercept(method("createExecutorService"), InstrumentNewExecutorServiceOnPekko)
+ .intercept(method("createExecutorService"), classOf[InstrumentNewExecutorServiceOnPekko])
/**
* First step on getting the Actor System name is to read it from the prerequisites instance passed to the
@@ -50,7 +51,7 @@ class DispatcherInstrumentation extends InstrumentationBuilder {
"org.apache.pekko.dispatch.PinnedDispatcherConfigurator",
"org.apache.pekko.dispatch.DefaultExecutorServiceConfigurator")
.mixin(classOf[HasDispatcherPrerequisites.Mixin])
- .advise(isConstructor, CaptureDispatcherPrerequisitesOnExecutorConfigurator)
+ .advise(isConstructor, classOf[CaptureDispatcherPrerequisitesOnExecutorConfigurator])
/**
* Copies the Actor System and Dispatcher names to the ExecutorServiceFactory instances for the two types of
@@ -62,21 +63,22 @@ class DispatcherInstrumentation extends InstrumentationBuilder {
"org.apache.pekko.dispatch.PinnedDispatcherConfigurator",
"org.apache.pekko.dispatch.DefaultExecutorServiceConfigurator")
.mixin(classOf[HasDispatcherName.Mixin])
- .advise(method("createExecutorServiceFactory"), CopyDispatcherInfoToExecutorServiceFactory)
+ .advise(method("createExecutorServiceFactory"), classOf[CopyDispatcherInfoToExecutorServiceFactory])
/**
* This ensures that the ActorSystem name is not lost when creating PinnedDispatcher instances.
*/
onType("org.apache.pekko.dispatch.ThreadPoolConfig")
.mixin(classOf[HasDispatcherPrerequisites.Mixin])
- .advise(method("copy"), ThreadPoolConfigCopyAdvice)
+ .advise(method("copy"), classOf[ThreadPoolConfigCopyAdvice])
}
+class CaptureDispatcherPrerequisitesOnExecutorConfigurator
object CaptureDispatcherPrerequisitesOnExecutorConfigurator {
@Advice.OnMethodExit(suppress = classOf[Throwable])
- def exit(@Advice.This configurator: Any, @Advice.Argument(1) prerequisites: DispatcherPrerequisites): Unit = {
+ @static def exit(@Advice.This configurator: Any, @Advice.Argument(1) prerequisites: DispatcherPrerequisites): Unit = {
configurator match {
case fjec: ForkJoinExecutorConfigurator => fjec.asInstanceOf[HasDispatcherPrerequisites].setDispatcherPrerequisites(prerequisites)
case tpec: ThreadPoolExecutorConfigurator => tpec.threadPoolConfig.asInstanceOf[HasDispatcherPrerequisites].setDispatcherPrerequisites(prerequisites)
@@ -87,19 +89,21 @@ object CaptureDispatcherPrerequisitesOnExecutorConfigurator {
}
}
+class CopyDispatcherInfoToExecutorServiceFactory
object CopyDispatcherInfoToExecutorServiceFactory {
@Advice.OnMethodExit
- def exit(@Advice.This poolConfig: HasDispatcherPrerequisites, @Advice.Argument(0) dispatcherName: String, @Advice.Return factory: Any): Unit = {
+ @static def exit(@Advice.This poolConfig: HasDispatcherPrerequisites, @Advice.Argument(0) dispatcherName: String, @Advice.Return factory: Any): Unit = {
val factoryWithMixins = factory.asInstanceOf[HasDispatcherName with HasDispatcherPrerequisites]
factoryWithMixins.setDispatcherPrerequisites(poolConfig.dispatcherPrerequisites)
factoryWithMixins.setDispatcherName(dispatcherName)
}
}
+class InstrumentNewExecutorServiceOnPekko
object InstrumentNewExecutorServiceOnPekko {
- def around(@This factory: HasDispatcherPrerequisites with HasDispatcherName, @SuperCall callable: Callable[ExecutorService]): ExecutorService = {
+ @static def around(@This factory: HasDispatcherPrerequisites with HasDispatcherName, @SuperCall callable: Callable[ExecutorService]): ExecutorService = {
val executor = callable.call()
val actorSystemName = factory.dispatcherPrerequisites.settings.name
val dispatcherName = factory.dispatcherName
@@ -119,10 +123,11 @@ object InstrumentNewExecutorServiceOnPekko {
}
}
+class ThreadPoolConfigCopyAdvice
object ThreadPoolConfigCopyAdvice {
@Advice.OnMethodExit
- def exit(@Advice.This original: Any, @Advice.Return copy: Any): Unit = {
+ @static def exit(@Advice.This original: Any, @Advice.Return copy: Any): Unit = {
copy.asInstanceOf[HasDispatcherPrerequisites].setDispatcherPrerequisites(original.asInstanceOf[HasDispatcherPrerequisites].dispatcherPrerequisites)
copy.asInstanceOf[HasDispatcherName].setDispatcherName(original.asInstanceOf[HasDispatcherName].dispatcherName)
}
diff --git a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/EnvelopeInstrumentation.scala b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/EnvelopeInstrumentation.scala
index e606b16b8..c508d9e5b 100644
--- a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/EnvelopeInstrumentation.scala
+++ b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/EnvelopeInstrumentation.scala
@@ -20,6 +20,8 @@ import kamon.instrumentation.context.{HasContext, HasTimestamp}
import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.libs.net.bytebuddy.asm.Advice
+import scala.annotation.static
+
class EnvelopeInstrumentation extends InstrumentationBuilder {
@@ -29,13 +31,14 @@ class EnvelopeInstrumentation extends InstrumentationBuilder {
onType("org.apache.pekko.dispatch.Envelope")
.mixin(classOf[HasContext.Mixin])
.mixin(classOf[HasTimestamp.Mixin])
- .advise(method("copy"), EnvelopeCopyAdvice)
+ .advise(method("copy"), classOf[EnvelopeCopyAdvice])
}
+class EnvelopeCopyAdvice
object EnvelopeCopyAdvice {
@Advice.OnMethodExit
- def exit(@Advice.Return newEnvelope: Any, @Advice.This envelope: Any): Unit = {
+ @static def exit(@Advice.Return newEnvelope: Any, @Advice.This envelope: Any): Unit = {
newEnvelope.asInstanceOf[HasContext].setContext(envelope.asInstanceOf[HasContext].context)
newEnvelope.asInstanceOf[HasTimestamp].setTimestamp(envelope.asInstanceOf[HasTimestamp].timestamp)
}
diff --git a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/EventStreamInstrumentation.scala b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/EventStreamInstrumentation.scala
index 8fc97c988..4c0084f01 100644
--- a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/EventStreamInstrumentation.scala
+++ b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/EventStreamInstrumentation.scala
@@ -21,6 +21,8 @@ import kamon.instrumentation.pekko.PekkoMetrics
import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.libs.net.bytebuddy.asm.Advice.{Argument, OnMethodExit, This}
+import scala.annotation.static
+
class EventStreamInstrumentation extends InstrumentationBuilder {
/**
@@ -28,27 +30,35 @@ class EventStreamInstrumentation extends InstrumentationBuilder {
*/
onType("org.apache.pekko.event.EventStream")
.mixin(classOf[HasSystem.Mixin])
- .advise(isConstructor.and(takesArguments(2)), ConstructorAdvice)
- .advise(method("publish").and(takesArguments(1)), PublishMethodAdvice)
+ .advise(isConstructor.and(takesArguments(2)), classOf[ConstructorAdvice])
+ onType("org.apache.pekko.event.SubchannelClassification")
+ .advise(method("publish").and(takesArguments(1)), classOf[PublishMethodAdvice])
}
-
+class ConstructorAdvice
object ConstructorAdvice {
@OnMethodExit(suppress = classOf[Throwable])
- def exit(@This eventStream: HasSystem, @Argument(0) system:ActorSystem): Unit = {
+ @static def exit(@This eventStream: HasSystem, @Argument(0) system:ActorSystem): Unit = {
eventStream.setSystem(system)
}
}
+class PublishMethodAdvice
object PublishMethodAdvice {
@OnMethodExit(suppress = classOf[Throwable])
- def exit(@This stream:HasSystem, @Argument(0) event: AnyRef):Unit = event match {
- case _: DeadLetter => PekkoMetrics.forSystem(stream.system.name).deadLetters.increment()
- case _: UnhandledMessage => PekkoMetrics.forSystem(stream.system.name).unhandledMessages.increment()
- case _ => ()
- }
+ @static def exit(@This any: Any, @Argument(0) event: AnyRef):Unit =
+ try {
+ val stream = any.asInstanceOf[HasSystem]
+ event match {
+ case _: DeadLetter => PekkoMetrics.forSystem(stream.system.name).deadLetters.increment()
+ case _: UnhandledMessage => PekkoMetrics.forSystem(stream.system.name).unhandledMessages.increment()
+ case _ => ()
+ }
+ } catch {
+ case _: ClassCastException => ()
+ }
}
trait HasSystem {
diff --git a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/RouterInstrumentation.scala b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/RouterInstrumentation.scala
index eea73102d..3aa2eec67 100644
--- a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/RouterInstrumentation.scala
+++ b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/RouterInstrumentation.scala
@@ -4,6 +4,8 @@ import org.apache.pekko.actor.{ActorRef, ActorSystem, Props}
import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.libs.net.bytebuddy.asm.Advice._
+import scala.annotation.static
+
class RouterInstrumentation extends InstrumentationBuilder {
/**
@@ -11,16 +13,16 @@ class RouterInstrumentation extends InstrumentationBuilder {
*/
onType("org.apache.pekko.routing.RoutedActorCell")
.mixin(classOf[HasRouterMonitor.Mixin])
- .advise(isConstructor, RoutedActorCellConstructorAdvice)
- .advise(method("sendMessage").and(takesArguments(1)), SendMessageAdvice)
- .advise(method("sendMessage").and(takesArguments(1)), SendMessageOnRouterAdvice)
+ .advise(isConstructor, classOf[RoutedActorCellConstructorAdvice])
+ .advise(method("sendMessage").and(takesArguments(1)), classOf[SendMessageAdvice])
+ .advise(method("sendMessage").and(takesArguments(1)), classOf[SendMessageOnRouterAdvice])
/**
* Captures the router and routee Props so that we can properly apply tags to the router metrics.
*/
onType("org.apache.pekko.routing.RoutedActorRef")
.mixin(classOf[HasRouterProps.Mixin])
- .advise(isConstructor, RoutedActorRefConstructorAdvice)
+ .advise(isConstructor, classOf[RoutedActorRefConstructorAdvice])
}
@@ -60,35 +62,38 @@ object HasRouterMonitor {
}
}
+class RoutedActorRefConstructorAdvice
object RoutedActorRefConstructorAdvice {
@OnMethodExit(suppress = classOf[Throwable])
- def exit(@This ref: ActorRef, @Argument(1) routerProps: Props, @Argument(4) routeeProps: Props): Unit = {
+ @static def exit(@This ref: ActorRef, @Argument(1) routerProps: Props, @Argument(4) routeeProps: Props): Unit = {
val routedRef = ref.asInstanceOf[HasRouterProps]
routedRef.setRouteeProps(routeeProps)
routedRef.setRouterProps(routerProps)
}
}
+class RoutedActorCellConstructorAdvice
object RoutedActorCellConstructorAdvice {
@OnMethodExit(suppress = classOf[Throwable])
- def exit(@This cell: Any, @Argument(0) system: ActorSystem, @Argument(1) ref: ActorRef, @Argument(5) parent: ActorRef): Unit = {
+ @static def exit(@This cell: Any, @Argument(0) system: ActorSystem, @Argument(1) ref: ActorRef, @Argument(5) parent: ActorRef): Unit = {
cell.asInstanceOf[HasRouterMonitor].setRouterMonitor(RouterMonitor.from(cell, ref, parent, system))
}
}
+class SendMessageOnRouterAdvice
object SendMessageOnRouterAdvice {
def routerInstrumentation(cell: Any): RouterMonitor =
cell.asInstanceOf[HasRouterMonitor].routerMonitor
@OnMethodEnter(suppress = classOf[Throwable])
- def onEnter(@This cell: Any): Long =
+ @static def onEnter(@This cell: Any): Long =
routerInstrumentation(cell).processMessageStart()
@OnMethodExit(suppress = classOf[Throwable])
- def onExit(@This cell: Any, @Enter timestampBeforeProcessing: Long): Unit =
+ @static def onExit(@This cell: Any, @Enter timestampBeforeProcessing: Long): Unit =
routerInstrumentation(cell).processMessageEnd(timestampBeforeProcessing)
}
diff --git a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/internal/CellWrapper.scala b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/internal/CellWrapper.scala
index 288bc91fd..1a28669e3 100644
--- a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/internal/CellWrapper.scala
+++ b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/instrumentations/internal/CellWrapper.scala
@@ -38,17 +38,15 @@ import kamon.instrumentation.context.HasContext
* will be propagated for all queued messages.
*/
class CellWrapper(val underlying: Cell) extends Cell {
- override def sendMessage(msg: Envelope): Unit = {
- if(msg.isInstanceOf[HasContext]) {
- val context = msg.asInstanceOf[HasContext].context
- Kamon.runWithContext(context) {
- underlying.sendMessage(msg)
- }
- }
- else {
+ override def sendMessage(msg: Envelope): Unit = try {
+ val context = msg.asInstanceOf[HasContext].context
+ Kamon.runWithContext(context) {
underlying.sendMessage(msg)
}
}
+ catch {
+ case _: ClassCastException => underlying.sendMessage(msg)
+ }
override def sendSystemMessage(msg: SystemMessage): Unit =
underlying.sendSystemMessage(msg)
diff --git a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/MessageBufferInstrumentation.scala b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/MessageBufferInstrumentation.scala
index 9beeb6324..b50fc9c21 100644
--- a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/MessageBufferInstrumentation.scala
+++ b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/MessageBufferInstrumentation.scala
@@ -13,7 +13,7 @@ class MessageBufferInstrumentation extends InstrumentationBuilder {
*/
onType("org.apache.pekko.util.MessageBuffer$Node")
.mixin(classOf[HasContext.Mixin])
- .advise(isConstructor, CaptureCurrentContextOnExit)
- .advise(method("apply"), InvokeWithCapturedContext)
+ .advise(isConstructor, classOf[CaptureCurrentContextOnExit])
+ .advise(method("apply"), classOf[InvokeWithCapturedContext])
}
diff --git a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/RemotingInstrumentation.scala b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/RemotingInstrumentation.scala
index 12a3cbd58..241bdca11 100644
--- a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/RemotingInstrumentation.scala
+++ b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/RemotingInstrumentation.scala
@@ -13,6 +13,8 @@ import kanela.agent.libs.net.bytebuddy.asm.Advice
import org.apache.pekko.kamon.instrumentation.pekko.remote.internal.{PekkoPduProtobufCodecConstructMessageMethodInterceptor, PekkoPduProtobufCodecDecodeMessage}
import org.apache.pekko.remote.artery.CaptureCurrentInboundEnvelope
+import scala.annotation.static
+
class RemotingInstrumentation extends InstrumentationBuilder {
@@ -23,10 +25,10 @@ class RemotingInstrumentation extends InstrumentationBuilder {
*/
onType("org.apache.pekko.remote.EndpointManager$Send")
.mixin(classOf[HasContext.Mixin])
- .advise(isConstructor, CaptureCurrentContextOnExit)
+ .advise(isConstructor, classOf[CaptureCurrentContextOnExit])
onType("org.apache.pekko.remote.EndpointWriter")
- .advise(method("writeSend"), WriteSendWithContext)
+ .advise(method("writeSend"), classOf[WriteSendWithContext])
/**
* Reads and writes the Pekko PDU using a modified version of the Protobuf that has an extra field for a Context
@@ -42,65 +44,69 @@ class RemotingInstrumentation extends InstrumentationBuilder {
*/
onType("org.apache.pekko.actor.ActorSystemImpl")
.mixin(classOf[HasSerializationInstruments.Mixin])
- .advise(isConstructor, InitializeActorSystemAdvice)
+ .advise(isConstructor, classOf[InitializeActorSystemAdvice])
/**
* Artery
*/
onType("org.apache.pekko.remote.artery.ReusableOutboundEnvelope")
.mixin(classOf[HasContext.Mixin])
- .advise(method("copy"), CopyContextOnReusableEnvelope)
+ .advise(method("copy"), classOf[CopyContextOnReusableEnvelope])
onType("org.apache.pekko.remote.artery.Association")
- .advise(method("createOutboundEnvelope$1"), CaptureCurrentContextOnReusableEnvelope)
+ .advise(method("createOutboundEnvelope$1"), classOf[CaptureCurrentContextOnReusableEnvelope])
onType("org.apache.pekko.remote.artery.RemoteInstruments")
.advise(method("deserialize"), classOf[CaptureCurrentInboundEnvelope])
onType("org.apache.pekko.remote.artery.ReusableInboundEnvelope")
.mixin(classOf[HasContext.Mixin])
- .advise(method("copyForLane"), CopyContextOnReusableEnvelope)
+ .advise(method("copyForLane"), classOf[CopyContextOnReusableEnvelope])
onType("org.apache.pekko.remote.artery.MessageDispatcher")
- .advise(method("dispatch"), ArteryMessageDispatcherAdvice)
+ .advise(method("dispatch"), classOf[ArteryMessageDispatcherAdvice])
}
+class ArteryMessageDispatcherAdvice
object ArteryMessageDispatcherAdvice {
@Advice.OnMethodEnter
- def enter(@Advice.Argument(0) envelope: Any): Storage.Scope =
+ @static def enter(@Advice.Argument(0) envelope: Any): Storage.Scope =
Kamon.storeContext(envelope.asInstanceOf[HasContext].context)
@Advice.OnMethodExit
- def exit(@Advice.Enter scope: Storage.Scope): Unit =
+ @static def exit(@Advice.Enter scope: Storage.Scope): Unit =
scope.close()
}
+class CopyContextOnReusableEnvelope
object CopyContextOnReusableEnvelope {
@Advice.OnMethodExit
- def exit(@Advice.This oldEnvelope: Any, @Advice.Return newEnvelope: Any): Unit =
+ @static def exit(@Advice.This oldEnvelope: Any, @Advice.Return newEnvelope: Any): Unit =
newEnvelope.asInstanceOf[HasContext].setContext(oldEnvelope.asInstanceOf[HasContext].context)
}
+class CaptureCurrentContextOnReusableEnvelope
object CaptureCurrentContextOnReusableEnvelope {
@Advice.OnMethodExit
- def exit(@Advice.Return envelope: Any): Unit = {
+ @static def exit(@Advice.Return envelope: Any): Unit = {
envelope.asInstanceOf[HasContext].setContext(Kamon.currentContext())
}
}
+class WriteSendWithContext
object WriteSendWithContext {
@Advice.OnMethodEnter
- def enter(@Advice.Argument(0) send: Any): Scope = {
+ @static def enter(@Advice.Argument(0) send: Any): Scope = {
Kamon.storeContext(send.asInstanceOf[HasContext].context)
}
@Advice.OnMethodExit
- def exit(@Advice.Enter scope: Scope): Unit = {
+ @static def exit(@Advice.Enter scope: Object): Unit = {
scope.asInstanceOf[Scope].close()
}
}
@@ -118,23 +124,25 @@ object HasSerializationInstruments {
}
}
+class InitializeActorSystemAdvice
object InitializeActorSystemAdvice {
@Advice.OnMethodExit
- def exit(@Advice.This system: ActorSystem with HasSerializationInstruments): Unit =
+ @static def exit(@Advice.This system: ActorSystem with HasSerializationInstruments): Unit =
system.setSerializationInstruments(new SerializationInstruments(system.name))
}
+class MeasureSerializationTime
object MeasureSerializationTime {
@Advice.OnMethodEnter
- def enter(): Long = {
+ @static def enter(): Long = {
if(PekkoRemoteInstrumentation.settings().trackSerializationMetrics) System.nanoTime() else 0L
}
@Advice.OnMethodExit
- def exit(@Advice.Argument(0) system: AnyRef, @Advice.Enter startNanoTime: Long): Unit = {
+ @static def exit(@Advice.Argument(0) system: AnyRef, @Advice.Enter startNanoTime: Long): Unit = {
if(startNanoTime != 0L) {
system.asInstanceOf[HasSerializationInstruments]
.serializationInstruments
@@ -144,15 +152,16 @@ object MeasureSerializationTime {
}
}
+class MeasureDeserializationTime
object MeasureDeserializationTime {
@Advice.OnMethodEnter
- def enter(): Long = {
+ @static def enter(): Long = {
if(PekkoRemoteInstrumentation.settings().trackSerializationMetrics) System.nanoTime() else 0L
}
@Advice.OnMethodExit
- def exit(@Advice.Argument(0) system: AnyRef, @Advice.Enter startNanoTime: Long, @Advice.Return msg: Any): Unit = {
+ @static def exit(@Advice.Argument(0) system: AnyRef, @Advice.Enter startNanoTime: Long, @Advice.Return msg: Any): Unit = {
if(PekkoPrivateAccess.isSystemMessage(msg)) {
msg match {
diff --git a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/ShardingInstrumentation.scala b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/ShardingInstrumentation.scala
index b769f7073..069f30c6d 100644
--- a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/ShardingInstrumentation.scala
+++ b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/ShardingInstrumentation.scala
@@ -8,6 +8,8 @@ import kamon.util.Filter
import kanela.agent.api.instrumentation.InstrumentationBuilder
import kanela.agent.libs.net.bytebuddy.asm.Advice
+import scala.annotation.static
+
class ShardingInstrumentation extends InstrumentationBuilder {
/**
@@ -16,9 +18,9 @@ class ShardingInstrumentation extends InstrumentationBuilder {
*/
onType("org.apache.pekko.cluster.sharding.ShardRegion")
.mixin(classOf[HasShardingInstruments.Mixin])
- .advise(isConstructor, InitializeShardRegionAdvice)
- .advise(method("deliverMessage"), DeliverMessageOnShardRegion)
- .advise(method("postStop"), RegionPostStopAdvice)
+ .advise(isConstructor, classOf[InitializeShardRegionAdvice])
+ .advise(method("deliverMessage"), classOf[DeliverMessageOnShardRegion])
+ .advise(method("postStop"), classOf[RegionPostStopAdvice])
/**
@@ -32,16 +34,16 @@ class ShardingInstrumentation extends InstrumentationBuilder {
onType("org.apache.pekko.cluster.sharding.Shard")
.mixin(classOf[HasShardingInstruments.Mixin])
.mixin(classOf[HasShardCounters.Mixin])
- .advise(isConstructor, InitializeShardAdvice)
- .advise(method("onLeaseAcquired"), ShardInitializedAdvice)
- .advise(method("postStop"), ShardPostStopStoppedAdvice)
- .advise(method("getOrCreateEntity"), ShardGetOrCreateEntityAdvice)
- .advise(method("entityTerminated"), ShardEntityTerminatedAdvice)
- .advise(method("org$apache$pekko$cluster$sharding$Shard$$deliverMessage"), ShardDeliverMessageAdvice)
- .advise(method("deliverMessage"), ShardDeliverMessageAdvice)
+ .advise(isConstructor, classOf[InitializeShardAdvice])
+ .advise(method("onLeaseAcquired"), classOf[ShardInitializedAdvice])
+ .advise(method("postStop"), classOf[ShardPostStopStoppedAdvice])
+ .advise(method("getOrCreateEntity"), classOf[ShardGetOrCreateEntityAdvice])
+ .advise(method("entityTerminated"), classOf[ShardEntityTerminatedAdvice])
+ .advise(method("org$apache$pekko$cluster$sharding$Shard$$deliverMessage"), classOf[ShardDeliverMessageAdvice])
+ .advise(method("deliverMessage"), classOf[ShardDeliverMessageAdvice])
onType("org.apache.pekko.cluster.sharding.Shard")
- .advise(method("shardInitialized"), ShardInitializedAdvice)
+ .advise(method("shardInitialized"), classOf[ShardInitializedAdvice])
}
@@ -74,10 +76,11 @@ object HasShardCounters {
}
}
+class InitializeShardRegionAdvice
object InitializeShardRegionAdvice {
@Advice.OnMethodExit
- def exit(@Advice.This region: Actor with HasShardingInstruments, @Advice.Argument(0) typeName: String): Unit = {
+ @static def exit(@Advice.This region: Actor with HasShardingInstruments, @Advice.Argument(0) typeName: String): Unit = {
region.setShardingInstruments(new ShardingInstruments(region.context.system.name, typeName))
val system = region.context.system
@@ -88,10 +91,11 @@ object InitializeShardRegionAdvice {
}
}
+class InitializeShardAdvice
object InitializeShardAdvice {
@Advice.OnMethodExit
- def exit(@Advice.This shard: Actor with HasShardingInstruments with HasShardCounters, @Advice.Argument(0) typeName: String,
+ @static def exit(@Advice.This shard: Actor with HasShardingInstruments with HasShardCounters, @Advice.Argument(0) typeName: String,
@Advice.Argument(1) shardID: String): Unit = {
val shardingInstruments = new ShardingInstruments(shard.context.system.name, typeName)
@@ -103,10 +107,11 @@ object InitializeShardAdvice {
}
}
+class DeliverMessageOnShardRegion
object DeliverMessageOnShardRegion {
@Advice.OnMethodEnter
- def enter(@Advice.This region: HasShardingInstruments, @Advice.Argument(0) message: Any): Unit = {
+ @static def enter(@Advice.This region: Object with HasShardingInstruments, @Advice.Argument(0) message: Any): Unit = {
// NOTE: The "deliverMessage" method also handles the "RestartShard" message, which is not an user-facing message
// but it should not happen so often so we wont do any additional matching on it to filter it out of the
// metric.
@@ -115,32 +120,36 @@ object DeliverMessageOnShardRegion {
}
+class RegionPostStopAdvice
object RegionPostStopAdvice {
@Advice.OnMethodExit
- def enter(@Advice.This shard: HasShardingInstruments): Unit =
+ @static def enter(@Advice.This shard: Object with HasShardingInstruments): Unit =
shard.shardingInstruments.remove()
}
+class ShardInitializedAdvice
object ShardInitializedAdvice {
@Advice.OnMethodExit
- def enter(@Advice.This shard: HasShardingInstruments): Unit =
+ @static def enter(@Advice.This shard: Object with HasShardingInstruments): Unit =
shard.shardingInstruments.hostedShards.increment()
}
+class ShardPostStopStoppedAdvice
object ShardPostStopStoppedAdvice {
@Advice.OnMethodExit
- def enter(@Advice.This shard: HasShardingInstruments): Unit =
+ @static def enter(@Advice.This shard: Object with HasShardingInstruments): Unit =
shard.shardingInstruments.hostedShards.decrement()
}
+class ShardGetOrCreateEntityAdvice
object ShardGetOrCreateEntityAdvice {
@Advice.OnMethodEnter
- def enter(@Advice.This shard: Actor with HasShardingInstruments with HasShardCounters, @Advice.Argument(0) entityID: String): Unit = {
+ @static def enter(@Advice.This shard: Actor with HasShardingInstruments with HasShardCounters, @Advice.Argument(0) entityID: String): Unit = {
if(shard.context.child(entityID).isEmpty) {
// The entity is not created just yet, but we know that it will be created right after this.
shard.shardingInstruments.hostedEntities.increment()
@@ -149,18 +158,20 @@ object ShardGetOrCreateEntityAdvice {
}
}
+class ShardEntityTerminatedAdvice
object ShardEntityTerminatedAdvice {
@Advice.OnMethodEnter
- def enter(@Advice.This shard: Actor with HasShardingInstruments with HasShardCounters): Unit = {
+ @static def enter(@Advice.This shard: Actor with HasShardingInstruments with HasShardCounters): Unit = {
shard.shardingInstruments.hostedEntities.decrement()
shard.hostedEntitiesCounter.decrementAndGet()
}
}
+class ShardDeliverMessageAdvice
object ShardDeliverMessageAdvice {
@Advice.OnMethodEnter
- def enter(@Advice.This shard: Actor with HasShardingInstruments with HasShardCounters): Unit = {
+ @static def enter(@Advice.This shard: Actor with HasShardingInstruments with HasShardCounters): Unit = {
shard.processedMessagesCounter.incrementAndGet()
}
diff --git a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/artery/KamonRemoteInstrument.scala b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/artery/KamonRemoteInstrument.scala
index a82ae5e9b..9cf5d2e8c 100644
--- a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/artery/KamonRemoteInstrument.scala
+++ b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/artery/KamonRemoteInstrument.scala
@@ -9,6 +9,7 @@ import kanela.agent.libs.net.bytebuddy.asm.Advice
import org.slf4j.LoggerFactory
import java.nio.ByteBuffer
+import scala.annotation.static
import scala.util.control.NonFatal
class KamonRemoteInstrument(system: ExtendedActorSystem) extends RemoteInstrument {
@@ -85,12 +86,12 @@ object CaptureCurrentInboundEnvelope {
}
@Advice.OnMethodEnter
- def enter(@Advice.Argument(0) inboundEnvelope: InboundEnvelope): Unit = {
+ @static def enter(@Advice.Argument(0) inboundEnvelope: Object with InboundEnvelope): Unit = {
CurrentInboundEnvelope.set(inboundEnvelope)
}
@Advice.OnMethodExit
- def exit(): Unit = {
+ @static def exit(): Unit = {
CurrentInboundEnvelope.remove()
}
}
diff --git a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/internal/ArterySerializationAdvice.scala b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/internal/ArterySerializationAdvice.scala
index 5641210e4..44f4b7a85 100644
--- a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/internal/ArterySerializationAdvice.scala
+++ b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/internal/ArterySerializationAdvice.scala
@@ -1,7 +1,6 @@
package org.apache.pekko.remote.kamon.instrumentation.pekko.remote.internal.remote
import java.nio.ByteBuffer
-
import org.apache.pekko.remote.artery._
import org.apache.pekko.actor.ActorSystem
import org.apache.pekko.serialization.Serialization
@@ -11,6 +10,8 @@ import kamon.instrumentation.pekko.PekkoRemoteMetrics
import kamon.instrumentation.context.HasContext
import kanela.agent.libs.net.bytebuddy.asm.Advice
+import scala.annotation.static
+
/**
* For Artery messages we will always add two sections to the end of each serialized message: the Context and the size
@@ -25,12 +26,12 @@ class SerializeForArteryAdvice
object SerializeForArteryAdvice {
@Advice.OnMethodEnter
- def enter(): Long = {
+ @static def enter(): Long = {
System.nanoTime()
}
@Advice.OnMethodExit
- def exit(@Advice.Argument(0) serialization: Serialization, @Advice.Argument(1) envelope: OutboundEnvelope,
+ @static def exit(@Advice.Argument(0) serialization: Serialization, @Advice.Argument(1) envelope: OutboundEnvelope,
@Advice.Argument(3) envelopeBuffer: EnvelopeBuffer, @Advice.Enter startTime: Long): Unit = {
val instruments = PekkoRemoteMetrics.serializationInstruments(serialization.system.name)
@@ -75,7 +76,7 @@ object DeserializeForArteryAdvice {
)
@Advice.OnMethodEnter
- def exit(@Advice.Argument(5) envelopeBuffer: EnvelopeBuffer): DeserializationInfo = {
+ @static def exit(@Advice.Argument(5) envelopeBuffer: EnvelopeBuffer): DeserializationInfo = {
val startTime = System.nanoTime()
val messageBuffer = envelopeBuffer.byteBuffer
val messageStart = messageBuffer.position()
@@ -102,7 +103,7 @@ object DeserializeForArteryAdvice {
}
@Advice.OnMethodExit(onThrowable = classOf[Throwable])
- def exit(@Advice.Argument(0) system: ActorSystem, @Advice.Argument(5) envelopeBuffer: EnvelopeBuffer,
+ @static def exit(@Advice.Argument(0) system: ActorSystem, @Advice.Argument(5) envelopeBuffer: EnvelopeBuffer,
@Advice.Enter deserializationInfo: DeserializationInfo, @Advice.Thrown error: Throwable): Unit = {
if(error == null) {
@@ -142,7 +143,7 @@ class CaptureContextOnInboundEnvelope
object CaptureContextOnInboundEnvelope {
@Advice.OnMethodEnter
- def enter(@Advice.This inboundEnvelope: Any): Unit = {
+ @static def enter(@Advice.This inboundEnvelope: Any): Unit = {
val lastContext = DeserializeForArteryAdvice.LastDeserializedContext.get()
if(lastContext != null) {
inboundEnvelope.asInstanceOf[HasContext].setContext(lastContext)
diff --git a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/internal/PekkoPduProtobufCodecDecodeMessageMethodAdvisor.scala b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/internal/PekkoPduProtobufCodecDecodeMessageMethodAdvisor.scala
index 6fc297e4f..1c861b01b 100644
--- a/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/internal/PekkoPduProtobufCodecDecodeMessageMethodAdvisor.scala
+++ b/instrumentation/kamon-pekko/src/main/scala/kamon/instrumentation/pekko/remote/internal/PekkoPduProtobufCodecDecodeMessageMethodAdvisor.scala
@@ -9,6 +9,8 @@ import kamon.context.BinaryPropagation.ByteStreamReader
import kamon.instrumentation.pekko.PekkoRemoteMetrics
import kanela.agent.libs.net.bytebuddy.asm.Advice.{Argument, OnMethodEnter}
+import scala.annotation.static
+
/**
* Advisor for org.apache.pekko.remote.transport.PekkoPduProtobufCodec$::decodeMessage
*/
@@ -17,7 +19,7 @@ class PekkoPduProtobufCodecDecodeMessage
object PekkoPduProtobufCodecDecodeMessage {
@OnMethodEnter
- def enter(@Argument(0) bs: ByteString, @Argument(1) provider: RemoteActorRefProvider, @Argument(2) localAddress: Address): Unit = {
+ @static def enter(@Argument(0) bs: ByteString, @Argument(1) provider: RemoteActorRefProvider, @Argument(2) localAddress: Address): Unit = {
val ackAndEnvelope = AckAndContextAwareEnvelopeContainer.parseFrom(bs.toArray)
if (ackAndEnvelope.hasEnvelope && ackAndEnvelope.getEnvelope.hasTraceContext) {
val remoteCtx = ackAndEnvelope.getEnvelope.getTraceContext
diff --git a/instrumentation/kamon-pekko/src/test/scala/kamon/instrumentation/pekko/ActorCellInstrumentationSpec.scala b/instrumentation/kamon-pekko/src/test/scala/kamon/instrumentation/pekko/ActorCellInstrumentationSpec.scala
index c85ed0d29..abe57b89c 100644
--- a/instrumentation/kamon-pekko/src/test/scala/kamon/instrumentation/pekko/ActorCellInstrumentationSpec.scala
+++ b/instrumentation/kamon-pekko/src/test/scala/kamon/instrumentation/pekko/ActorCellInstrumentationSpec.scala
@@ -29,12 +29,13 @@ import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
import scala.collection.mutable.ListBuffer
+import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
class ActorCellInstrumentationSpec extends TestKit(ActorSystem("ActorCellInstrumentationSpec")) with AnyWordSpecLike
with BeforeAndAfterAll with ImplicitSender with Eventually with MetricInspection.Syntax with Matchers with InitAndStopKamonAfterAll {
- implicit lazy val executionContext = system.dispatcher
+ implicit lazy val executionContext: ExecutionContext = system.dispatcher
import ContextTesting._
"the message passing instrumentation" should {
@@ -59,7 +60,7 @@ class ActorCellInstrumentationSpec extends TestKit(ActorSystem("ActorCellInstrum
}
"propagate the current context when using the ask pattern" in new EchoActorFixture {
- implicit val timeout = Timeout(1 seconds)
+ implicit val timeout: Timeout = Timeout(1 seconds)
Kamon.runWithContext(testContext("propagate-with-ask")) {
// The pipe pattern use Futures internally, so FutureTracing test should cover the underpinnings of it.
(contextEchoActor ? "test") pipeTo (testActor)
diff --git a/instrumentation/kamon-pekko/src/test/scala/kamon/instrumentation/pekko/AskPatternInstrumentationSpec.scala b/instrumentation/kamon-pekko/src/test/scala/kamon/instrumentation/pekko/AskPatternInstrumentationSpec.scala
index ab4fc7fec..a5f51a267 100644
--- a/instrumentation/kamon-pekko/src/test/scala/kamon/instrumentation/pekko/AskPatternInstrumentationSpec.scala
+++ b/instrumentation/kamon-pekko/src/test/scala/kamon/instrumentation/pekko/AskPatternInstrumentationSpec.scala
@@ -17,24 +17,24 @@
package kamon.instrumentation.pekko
+import com.typesafe.config.ConfigFactory
+import kamon.Kamon
+import kamon.instrumentation.pekko.ContextTesting._
+import kamon.testkit.InitAndStopKamonAfterAll
import org.apache.pekko.actor._
import org.apache.pekko.pattern.ask
import org.apache.pekko.testkit.{EventFilter, ImplicitSender, TestKit}
import org.apache.pekko.util.Timeout
-import com.typesafe.config.ConfigFactory
-import kamon.Kamon
-import kamon.testkit.InitAndStopKamonAfterAll
-import kamon.instrumentation.pekko.ContextTesting._
-import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
+import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
class AskPatternInstrumentationSpec extends TestKit(ActorSystem("AskPatternInstrumentationSpec")) with AnyWordSpecLike
with InitAndStopKamonAfterAll with ImplicitSender {
- implicit lazy val ec = system.dispatcher
- implicit val askTimeout = Timeout(10 millis)
+ implicit lazy val ec: ExecutionContext = system.dispatcher
+ implicit val askTimeout: Timeout = Timeout(10 millis)
// TODO: Make this work with ActorSelections
@@ -93,7 +93,7 @@ class AskPatternInstrumentationSpec extends TestKit(ActorSystem("AskPatternInstr
}
class NoReply extends Actor {
- def receive = {
+ def receive: Receive = {
case _ =>
}
}
diff --git a/instrumentation/kamon-pekko/src/test/scala/kamon/instrumentation/pekko/MessageTracingSpec.scala b/instrumentation/kamon-pekko/src/test/scala/kamon/instrumentation/pekko/MessageTracingSpec.scala
index c11cd5877..5fc20ecd4 100644
--- a/instrumentation/kamon-pekko/src/test/scala/kamon/instrumentation/pekko/MessageTracingSpec.scala
+++ b/instrumentation/kamon-pekko/src/test/scala/kamon/instrumentation/pekko/MessageTracingSpec.scala
@@ -38,11 +38,11 @@ class MessageTracingSpec extends TestKit(ActorSystem("MessageTracing")) with Any
traced ! "ping"
expectMsg("pong")
- eventually(timeout(2 seconds)) {
- val span = testSpanReporter.nextSpan().value
+ eventually(timeout(4 seconds)) {
+ val span = testSpanReporter().nextSpan().value
val spanTags = stringTag(span) _
spanTags("component") shouldBe "pekko.actor"
- span.operationName shouldBe("tell(String)")
+ span.operationName shouldBe "tell(String)"
spanTags("pekko.actor.path") shouldNot include ("filteredout")
spanTags("pekko.actor.path") should be ("MessageTracing/user/traced-probe-1")
}
@@ -54,9 +54,9 @@ class MessageTracingSpec extends TestKit(ActorSystem("MessageTracing")) with Any
expectMsg("pong")
eventually(timeout(2 seconds)) {
- val span = testSpanReporter.nextSpan().value
+ val span = testSpanReporter().nextSpan().value
val spanTags = stringTag(span) _
- span.operationName shouldBe("tell(String)")
+ span.operationName shouldBe "tell(String)"
spanTags("component") shouldBe "pekko.actor"
spanTags("pekko.system") shouldBe "MessageTracing"
spanTags("pekko.actor.path") shouldBe "MessageTracing/user/traced"
@@ -68,9 +68,9 @@ class MessageTracingSpec extends TestKit(ActorSystem("MessageTracing")) with Any
Await.ready(pong, 10 seconds)
eventually(timeout(2 seconds)) {
- val span = testSpanReporter.nextSpan().value
+ val span = testSpanReporter().nextSpan().value
val spanTags = stringTag(span) _
- span.operationName shouldBe("ask(String)")
+ span.operationName shouldBe "ask(String)"
spanTags("component") shouldBe "pekko.actor"
spanTags("pekko.system") shouldBe "MessageTracing"
spanTags("pekko.actor.path") shouldBe "MessageTracing/user/traced"
@@ -87,8 +87,8 @@ class MessageTracingSpec extends TestKit(ActorSystem("MessageTracing")) with Any
expectMsg("pong")
// Span for the first actor message
- val firstSpanID = eventually(timeout(2 seconds)) {
- val span = testSpanReporter.nextSpan().value
+ val firstSpanID = eventually(timeout(4 seconds)) {
+ val span = testSpanReporter().nextSpan().value
val spanTags = stringTag(span) _
spanTags("component") shouldBe "pekko.actor"
@@ -100,8 +100,8 @@ class MessageTracingSpec extends TestKit(ActorSystem("MessageTracing")) with Any
}
// Span for the second actor message
- eventually(timeout(2 seconds)) {
- val span = testSpanReporter.nextSpan().value
+ eventually(timeout(4 seconds)) {
+ val span = testSpanReporter().nextSpan().value
val spanTags = stringTag(span) _
span.parentId shouldBe firstSpanID
span.operationName should include("tell(String)")
@@ -122,10 +122,10 @@ class MessageTracingSpec extends TestKit(ActorSystem("MessageTracing")) with Any
expectMsg("pong")
// Span for the first actor message
- val firstSpanID = eventually(timeout(2 seconds)) {
- val span = testSpanReporter.nextSpan().value
+ val firstSpanID = eventually(timeout(4 seconds)) {
+ val span = testSpanReporter().nextSpan().value
val spanTags = stringTag(span) _
- span.operationName shouldBe("tell(Tuple2)")
+ span.operationName shouldBe "tell(Tuple2)"
spanTags("component") shouldBe "pekko.actor"
spanTags("pekko.system") shouldBe "MessageTracing"
spanTags("pekko.actor.path") shouldBe "MessageTracing/user/traced-chain-first"
@@ -136,11 +136,11 @@ class MessageTracingSpec extends TestKit(ActorSystem("MessageTracing")) with Any
}
// Span for the second actor message
- eventually(timeout(2 seconds)) {
- val span = testSpanReporter.nextSpan().value
+ eventually(timeout(4 seconds)) {
+ val span = testSpanReporter().nextSpan().value
val spanTags = stringTag(span) _
span.parentId shouldBe firstSpanID
- span.operationName shouldBe("tell(String)")
+ span.operationName shouldBe "tell(String)"
spanTags("component") shouldBe "pekko.actor"
spanTags("pekko.system") shouldBe "MessageTracing"
spanTags("pekko.actor.path") shouldBe "MessageTracing/user/traced-chain-last"
@@ -157,7 +157,7 @@ class MessageTracingSpec extends TestKit(ActorSystem("MessageTracing")) with Any
expectMsg("pong")
eventually(timeout(2 seconds)) {
- val spanTags = stringTag(testSpanReporter.nextSpan().value) _
+ val spanTags = stringTag(testSpanReporter().nextSpan().value) _
spanTags("component") shouldBe "pekko.actor"
spanTags("pekko.actor.path") shouldNot include ("nontraced-pool-router")
spanTags("pekko.actor.path") should be ("MessageTracing/user/traced-routee-one")
@@ -171,17 +171,17 @@ class MessageTracingSpec extends TestKit(ActorSystem("MessageTracing")) with Any
expectMsg("pong")
eventually(timeout(2 seconds)) {
- val spanTags = stringTag(testSpanReporter.nextSpan().value) _
+ val spanTags = stringTag(testSpanReporter().nextSpan().value) _
spanTags("component") shouldBe "pekko.actor"
spanTags("pekko.actor.path") should be ("MessageTracing/user/traced-pool-router")
}
}
"not track Pekko Streams actors" in {
- implicit val timeout = Timeout(10 seconds)
+ implicit val timeout: Timeout = Timeout(10 seconds)
val actorWithMaterializer = system.actorOf(Props[ActorWithMaterializer])
- val finishedStream = Kamon.runWithSpan(Kamon.serverSpanBuilder("wrapper", "test").start()) {
+ val _ = Kamon.runWithSpan(Kamon.serverSpanBuilder("wrapper", "test").start()) {
actorWithMaterializer.ask("stream").mapTo[String]
}
@@ -222,7 +222,7 @@ class TracingTestActor extends Actor {
}
class ActorWithMaterializer extends Actor {
- implicit val mat = ActorMaterializer()
+ implicit val mat: Materializer = Materializer(context)
override def receive: Receive = {
case "stream" =>
diff --git a/instrumentation/kamon-pekko/src/test/scala/kamon/instrumentation/pekko/SystemMessageInstrumentationSpec.scala b/instrumentation/kamon-pekko/src/test/scala/kamon/instrumentation/pekko/SystemMessageInstrumentationSpec.scala
index 3aeb7c84d..063104857 100644
--- a/instrumentation/kamon-pekko/src/test/scala/kamon/instrumentation/pekko/SystemMessageInstrumentationSpec.scala
+++ b/instrumentation/kamon-pekko/src/test/scala/kamon/instrumentation/pekko/SystemMessageInstrumentationSpec.scala
@@ -17,28 +17,29 @@
package kamon.instrumentation.pekko
-import org.apache.pekko.actor.SupervisorStrategy.{Escalate, Restart, Resume, Stop}
-import org.apache.pekko.actor._
-import org.apache.pekko.testkit.{ImplicitSender, TestKit}
import kamon.Kamon
import kamon.instrumentation.pekko.ContextTesting._
import kamon.tag.Lookups._
+import org.apache.pekko.actor.SupervisorStrategy.{Escalate, Restart, Resume, Stop}
+import org.apache.pekko.actor._
+import org.apache.pekko.testkit.{ImplicitSender, TestKit}
import org.scalatest.BeforeAndAfterAll
import org.scalatest.matchers.should.Matchers
-import org.scalatest.wordspec.{AnyWordSpec, AnyWordSpecLike}
+import org.scalatest.wordspec.AnyWordSpecLike
+import scala.concurrent.ExecutionContextExecutor
import scala.util.control.NonFatal
class SystemMessageInstrumentationSpec extends TestKit(ActorSystem("ActorSystemMessageInstrumentationSpec")) with AnyWordSpecLike with Matchers
with BeforeAndAfterAll with ImplicitSender {
- implicit lazy val executionContext = system.dispatcher
+ implicit lazy val executionContext: ExecutionContextExecutor = system.dispatcher
"the system message passing instrumentation" should {
"capture and propagate the current context while processing the Create message in top level actors" in {
Kamon.runWithContext(testContext("creating-top-level-actor")) {
system.actorOf(Props(new Actor {
testActor ! propagatedContextKey()
- def receive: Actor.Receive = { case any => }
+ def receive: Actor.Receive = { case _ => }
}))
}
@@ -123,19 +124,19 @@ class SystemMessageInstrumentationSpec extends TestKit(ActorSystem("ActorSystemM
sendPostStop: Boolean = false, sendPreStart: Boolean = false): ActorRef = {
class GrandParent extends Actor {
- val child = context.actorOf(Props(new Parent))
+ val child: ActorRef = context.actorOf(Props(new Parent))
override def supervisorStrategy: SupervisorStrategy = OneForOneStrategy() {
case NonFatal(_) => testActor ! propagatedContextKey(); Stop
}
- def receive = {
+ def receive: Receive = {
case any => child forward any
}
}
class Parent extends Actor {
- val child = context.actorOf(Props(new Child))
+ val child: ActorRef = context.actorOf(Props(new Child))
override def supervisorStrategy: SupervisorStrategy = OneForOneStrategy() {
case NonFatal(_) => testActor ! propagatedContextKey(); directive
@@ -152,7 +153,7 @@ class SystemMessageInstrumentationSpec extends TestKit(ActorSystem("ActorSystemM
}
class Child extends Actor {
- def receive = {
+ def receive: Receive = {
case "fail" => throw new ArithmeticException("Division by zero.")
case "context" => sender ! propagatedContextKey()
}
diff --git a/project/Build.scala b/project/Build.scala
index f28406208..cbe479060 100644
--- a/project/Build.scala
+++ b/project/Build.scala
@@ -59,7 +59,7 @@ object BaseProject extends AutoPlugin {
val `scala_2.11_version` = "2.11.12"
val `scala_2.12_version` = "2.12.15"
val `scala_2.13_version` = "2.13.8"
- val scala_3_version = "3.2.0"
+ val scala_3_version = "3.3.1"
// This installs the GPG signing key from the
setupGpg()