Skip to content

Commit

Permalink
[JSON-API] Put the request & response bodies into the log ctx if the …
Browse files Browse the repository at this point in the history
…log level is debug (for specific statements) (#10298)

* [JSON-API] Log json request & response bodies in debug

This also readds logging of incoming requests and the responses which are being send out.

changelog_begin

- [JSON-API] Logging of the request and response bodies are now available for appropriate requests if the chosen log level is equal or lower than DEBUG. These can then be found in the logging context of the request begin & end log messages (The field names in the ctx are "request_body" and "response_body").

changelog_end

* Move the http request throughput marking to the right place including the logging of the processing time

* Ensure that the processing time measuring is implemented consistent
  • Loading branch information
realvictorprm authored Jul 19, 2021
1 parent 3c0010b commit 6abb903
Show file tree
Hide file tree
Showing 9 changed files with 171 additions and 9 deletions.
2 changes: 1 addition & 1 deletion ledger-service/http-json/src/it/resources/logback.xml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
<!-- encoders are assigned the type
ch.qos.logback.classic.encoder.PatternLayoutEncoder by default -->
<encoder>
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg %replace(, context: %marker){', context: $', ''} %n</pattern>
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%replace(, context: %marker){', context: $', ''} %n</pattern>
</encoder>
</appender>

Expand Down
2 changes: 1 addition & 1 deletion ledger-service/http-json/src/main/resources/logback.xml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
<!-- encoders are assigned the type
ch.qos.logback.classic.encoder.PatternLayoutEncoder by default -->
<encoder>
<pattern>%date{"dd-MM-yyyy HH:mm:ss.SSS", UTC} [%thread] %-5level %logger{36} - %msg %replace(, context: %marker){', context: $', ''} %n</pattern>
<pattern>%date{"dd-MM-yyyy HH:mm:ss.SSS", UTC} [%thread] %-5level %logger{36} - %msg%replace(, context: %marker){', context: $', ''} %n</pattern>
</encoder>
</else>
</if>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,12 @@ import akka.http.scaladsl.model.headers.{
ModeledCustomHeader,
ModeledCustomHeaderCompanion,
OAuth2BearerToken,
`Content-Type`,
`X-Forwarded-Proto`,
}
import akka.http.scaladsl.server
import akka.http.scaladsl.server.Directives.extractClientIP
import akka.http.scaladsl.server.{Directive0, Route}
import akka.http.scaladsl.server.{Directive, Directive0, PathMatcher, Route}
import akka.http.scaladsl.server.RouteResult._
import akka.stream.Materializer
import akka.stream.scaladsl.{Flow, Source}
Expand Down Expand Up @@ -47,7 +49,6 @@ import scala.util.control.NonFatal
import com.daml.logging.{ContextualizedLogger, LoggingContextOf}
import com.daml.metrics.{Metrics, Timed}
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Directive

class Endpoints(
allowNonHttps: Boolean,
Expand All @@ -59,6 +60,7 @@ class Endpoints(
healthService: HealthService,
encoder: DomainJsonEncoder,
decoder: DomainJsonDecoder,
shouldLogHttpBodies: Boolean,
maxTimeToCollectRequest: FiniteDuration = FiniteDuration(5, "seconds"),
)(implicit ec: ExecutionContext, mat: Materializer) {

Expand All @@ -77,18 +79,121 @@ class Endpoints(
private def toRoute(res: => Future[Error \/ SearchResult[Error \/ JsValue]]): Route =
responseToRoute(httpResponse(res))

private def mkRequestLogMsg(request: HttpRequest, remoteAddress: RemoteAddress) =
s"Incoming ${request.method.value} request on ${request.uri} from $remoteAddress"

private def mkResponseLogMsg(response: HttpResponse) =
s"Responding to client with HTTP ${response.status}"

// Always put this directive after a path to ensure
// that you don't log request bodies multiple times (simply because a matching test was made multiple times).
// TL;DR JUST PUT THIS THING AFTER YOUR FINAL PATH MATCHING
private def logRequestResponseHelper(
logIncomingRequest: (HttpRequest, RemoteAddress) => Future[Unit],
logResponse: HttpResponse => Future[Unit],
): Directive0 =
extractRequest & extractClientIP tflatMap { case (request, remoteAddress) =>
mapRouteResultFuture { responseF =>
for {
_ <- logIncomingRequest(request, remoteAddress)
response <- responseF
_ <- response match {
case Complete(httpResponse) => logResponse(httpResponse)
case _ =>
Future.failed(
new RuntimeException(
"""Logging the request & response should never happen on routes which get rejected.
|Make sure to place the directive only at places where a match is guaranteed (e.g. after the path directive).""".stripMargin
)
)
}
} yield response
}
}

private def logJsonRequestAndResult(implicit
lc: LoggingContextOf[InstanceUUID with RequestID]
): Directive0 = {
def logWithHttpMessageBodyIfAvailable(
httpMessage: HttpMessage,
msg: String,
bodyKind: String,
): Future[Unit] =
if (
httpMessage
.header[`Content-Type`]
.map(_.contentType)
.contains(ContentTypes.`application/json`)
)
httpMessage
.entity()
.toStrict(maxTimeToCollectRequest)
.map(it =>
withEnrichedLoggingContext(
LoggingContextOf.label[RequestEntity],
s"${bodyKind}_body" -> it.data.utf8String.parseJson,
)
.run(implicit lc => logger.info(msg))
)
.recover { case ex =>
logger.error("Failed to extract body for logging", ex)
}
else Future.successful(logger.info(msg))
logRequestResponseHelper(
(request, remoteAddress) =>
logWithHttpMessageBodyIfAvailable(
request,
mkRequestLogMsg(request, remoteAddress),
"request",
),
httpResponse =>
logWithHttpMessageBodyIfAvailable(
httpResponse,
mkResponseLogMsg(httpResponse),
"response",
),
)
}

def logRequestAndResultSimple(implicit
lc: LoggingContextOf[InstanceUUID with RequestID]
): Directive0 =
logRequestResponseHelper(
(request, remoteAddress) =>
Future.successful(logger.info(mkRequestLogMsg(request, remoteAddress))),
httpResponse => Future.successful(logger.info(mkResponseLogMsg(httpResponse))),
)

val logRequestAndResultFn: LoggingContextOf[InstanceUUID with RequestID] => Directive0 =
if (shouldLogHttpBodies) lc => logJsonRequestAndResult(lc)
else lc => logRequestAndResultSimple(lc)

def logRequestAndResult(implicit lc: LoggingContextOf[InstanceUUID with RequestID]): Directive0 =
logRequestAndResultFn(lc)

def all(implicit
lc0: LoggingContextOf[InstanceUUID],
metrics: Metrics,
): Route = extractClientIP & extractRequest apply { (remoteAddress, req) =>
): Route = extractRequest apply { req =>
implicit val lc: LoggingContextOf[InstanceUUID with RequestID] =
extendWithRequestIdLogCtx(identity)(lc0)
import metrics.daml.HttpJsonApi._
def withTimer(timer: Timer) =
Directive[Unit] { (fn: Unit => Route) =>
logger.info(s"Incoming request on ${req.uri} from $remoteAddress")
fn(()).andThen(res => Timed.future(timer, res))
val markThroughputAndLogProcessingTime: Directive0 = Directive { (fn: Unit => Route) =>
val t0 = System.nanoTime
metrics.daml.HttpJsonApi.httpRequestThroughput.mark()
fn(()).andThen { res =>
res.onComplete(_ => logger.trace(s"Processed request after ${System.nanoTime() - t0}ns"))
res
}
}
// As futures are eager it is best to start the timer rather sooner than later
// to get accurate timings. This also consistent with the implementation above which
// logs the processing time.
def withTimer(timer: Timer) = Directive { (fn: Unit => Route) => ctx =>
Timed.future(timer, fn(())(ctx))
}
def path[L](pm: PathMatcher[L]) =
server.Directives.path(pm) & markThroughputAndLogProcessingTime & logRequestAndResult
val withCmdSubmitTimer: Directive0 = withTimer(commandSubmissionTimer)
val withFetchTimer: Directive0 = withTimer(fetchTimer)
concat(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ import scalaz._
import java.nio.file.Path
import scala.concurrent.duration.FiniteDuration
import scala.concurrent.{Await, ExecutionContext, Future}
import ch.qos.logback.classic.{Level => LogLevel}

object HttpService {

Expand Down Expand Up @@ -179,6 +180,7 @@ object HttpService {
healthService,
encoder,
decoder,
logLevel.exists(!_.isGreaterOrEqual(LogLevel.INFO)), // Everything below DEBUG enables this
)

websocketService = new WebSocketService(
Expand Down
1 change: 1 addition & 0 deletions libs-scala/contextualized-logging/BUILD.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ da_scala_library(
scala_deps = [
"@maven//:com_typesafe_akka_akka_actor",
"@maven//:com_typesafe_akka_akka_stream",
"@maven//:io_spray_spray_json",
],
scalacopts = ["-Xsource:2.13"],
tags = ["maven_coordinates=com.daml:contextualized-logging:__VERSION__"],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,49 @@ package com.daml.logging

import com.daml.logging.entries.LoggingValue
import com.fasterxml.jackson.core.JsonGenerator
import spray.json.{
JsArray,
JsBoolean,
JsFalse,
JsNull,
JsNumber,
JsObject,
JsString,
JsTrue,
JsValue,
}

private[logging] object LoggingValueSerializer {
def writeJsValue(jsValue: JsValue, generator: JsonGenerator): Unit = {
def write(jsValue: JsValue): Unit =
jsValue match {
case JsNull =>
generator.writeNull()
case JsTrue =>
generator.writeBoolean(true)
case JsFalse =>
generator.writeBoolean(false)
case JsBoolean(value) =>
generator.writeBoolean(value)
case JsNumber(value) =>
generator.writeNumber(value.bigDecimal)
case JsString(value) =>
generator.writeString(value)
case JsObject(fields) =>
generator.writeStartObject()
fields.foreach { case (key, value) =>
generator.writeFieldName(key)
write(value)
}
generator.writeEndObject()
case JsArray(elements) =>
generator.writeStartArray()
elements.foreach(value => write(value))
generator.writeEndArray()
}
write(jsValue)
}

def writeValue(value: LoggingValue, generator: JsonGenerator): Unit = {
value match {
case LoggingValue.Empty =>
Expand All @@ -25,6 +66,8 @@ private[logging] object LoggingValueSerializer {
generator.writeStartArray()
sequence.foreach(writeValue(_, generator))
generator.writeEndArray()
case LoggingValue.OfJson(jsValue) =>
writeJsValue(jsValue, generator)
case LoggingValue.Nested(entries) =>
generator.writeStartObject()
new LoggingMarker(entries.contents).writeTo(generator)
Expand Down
3 changes: 3 additions & 0 deletions libs-scala/logging-entries/BUILD.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,9 @@ load(
da_scala_library(
name = "logging-entries",
srcs = glob(["src/main/scala/**/*.scala"]),
scala_deps = [
"@maven//:io_spray_spray_json",
],
scalacopts = ["-Xsource:2.13"],
tags = ["maven_coordinates=com.daml:logging-entries:__VERSION__"],
visibility = [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@

package com.daml.logging.entries

import spray.json.JsValue

import scala.language.implicitConversions

sealed trait LoggingValue
Expand Down Expand Up @@ -30,6 +32,8 @@ object LoggingValue {

final case class Nested(entries: LoggingEntries) extends LoggingValue

final case class OfJson(json: JsValue) extends LoggingValue

@inline
implicit def from[T](value: T)(implicit toLoggingValue: ToLoggingValue[T]): LoggingValue =
toLoggingValue.toLoggingValue(value)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@

package com.daml.logging.entries

import spray.json.JsValue

import java.time.{Duration, Instant}

trait ToLoggingValue[-T] {
Expand All @@ -13,6 +15,8 @@ object ToLoggingValue {
// This is not implicit because we only want to expose it for specific types.
val ToStringToLoggingValue: ToLoggingValue[Any] = value => LoggingValue.OfString(value.toString)

implicit val `JsValue to LoggingValue`: ToLoggingValue[JsValue] = LoggingValue.OfJson(_)

implicit val `String to LoggingValue`: ToLoggingValue[String] = LoggingValue.OfString(_)

implicit val `Boolean to LoggingValue`: ToLoggingValue[Boolean] = {
Expand Down

0 comments on commit 6abb903

Please sign in to comment.