Skip to content

Commit

Permalink
Add some traces for our own query process
Browse files Browse the repository at this point in the history
Lazy val the rootmost span, no point in regenerating that

Author: @johanoskarsson
Fixes #56
URL: #56
  • Loading branch information
johanoskarsson committed Jul 2, 2012
1 parent d01971f commit 8f6bdcd
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import com.twitter.zipkin.query.conversions.TraceToTimeline
import com.twitter.logging.Logger
import java.nio.ByteBuffer
import com.twitter.zipkin.adapter.ThriftAdapter
import com.twitter.finagle.tracing.{Trace => FTrace}

/**
* Represents a trace, a bundle of spans.
Expand Down Expand Up @@ -69,7 +70,7 @@ case class Trace(spans: Seq[Span]) {
* from the root service, then we want the one just below that.
* FIXME if there are holes in the trace this might not return the correct span
*/
def getRootMostSpan: Option[Span] = {
lazy val getRootMostSpan: Option[Span] = {
getRootSpan.orElse {
val idSpan = getIdToSpanMap
spans.headOption.map { s =>
Expand Down Expand Up @@ -134,6 +135,7 @@ case class Trace(spans: Seq[Span]) {
}

def toThrift: gen.Trace = {
FTrace.record("toThrift")
gen.Trace(spans.map { ThriftAdapter(_) })
}

Expand All @@ -142,12 +144,14 @@ case class Trace(spans: Seq[Span]) {
* cannot construct a trace summary. Could be that we have no spans.
*/
def toTraceSummary: Option[TraceSummary] = {
FTrace.record("toTraceSummary")
for (traceId <- id; startEnd <- getStartAndEndTimestamp)
yield TraceSummary(traceId, startEnd.start, startEnd.end, (startEnd.end - startEnd.start).toInt,
serviceCounts, endpoints.toList)
}

def toTimeline: Option[gen.TraceTimeline] = {
FTrace.record("toTimeline")
traceToTimeline.toTraceTimeline(this)
}

Expand All @@ -161,10 +165,10 @@ case class Trace(spans: Seq[Span]) {
* @return span id -> depth in the tree
*/
def toSpanDepths: Option[Map[Long, Int]] = {
FTrace.record("toSpanDepths")
getRootMostSpan match {
case None => return None
case Some(s) => {
// TODO we should cache this rootmost span tree between operations
val spanTree = getSpanTree(s, getIdToChildrenMap)
Some(spanTree.depths(1))
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package com.twitter.zipkin.query.adjusters
import com.twitter.zipkin.gen
import scala.collection.Map
import com.twitter.zipkin.common._
import com.twitter.finagle.tracing.{Trace => FTrace}


class TimeSkewAdjuster extends Adjuster {
Expand All @@ -30,7 +31,8 @@ class TimeSkewAdjuster extends Adjuster {
* Adjusts Spans timestamps so that each child happens after their parents.
* This is to counteract clock skew on servers, we want the Trace to happen in order.
*/
def adjust(trace: Trace) : Trace = {
def adjust(trace: Trace): Trace = {
FTrace.record("timeskew.adjust")
trace.getRootSpan match {
case None => return trace // no root span found, returning as is
case Some(s) => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,14 @@ import java.nio.ByteBuffer
import com.twitter.cassie.codecs.Codec
import com.twitter.scrooge.{ThriftStructCodec, BinaryThriftStructSerializer, ThriftStruct}
import com.twitter.ostrich.stats.Stats
import com.twitter.finagle.tracing.Trace

class ScroogeThriftCodec[T <: ThriftStruct](structCodec: ThriftStructCodec[T]) extends Codec[T] {
val serializer = new BinaryThriftStructSerializer[T] { def codec = structCodec }

def encode(t: T): ByteBuffer = {
Stats.time("scroogecodec.serialize") {
Trace.record("scroogecodec.serialize")
val serialized = serializer.toBytes(t)
Stats.addMetric("scroogecodec.serialized", serialized.size)
b2b(serialized)
Expand All @@ -33,6 +35,7 @@ class ScroogeThriftCodec[T <: ThriftStruct](structCodec: ThriftStructCodec[T]) e

def decode(ary: ByteBuffer): T = {
Stats.time("scroogecodec.deserialize") {
Trace.record("scroogecodec.deserialize")
serializer.fromBytes(b2b(ary))
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ import com.twitter.cassie.codecs.Codec
import com.twitter.zipkin.util.Util
import org.iq80.snappy.Snappy
import com.twitter.ostrich.stats.Stats
import com.twitter.finagle.tracing.Trace

/**
* Cassie codec that wraps another and compresses/decompresses that data.
Expand All @@ -32,6 +33,7 @@ class SnappyCodec[T](codec: Codec[T]) extends Codec[T] {

def encode(t: T): ByteBuffer = {
Stats.time("snappycodec.compress") {
Trace.record("snappycodec.compress")
val arr = Util.getArrayFromBuffer(codec.encode(t))
val compressArr = new Array[Byte](Snappy.maxCompressedLength(arr.length))
val compressLen = Snappy.compress(arr, 0, arr.length, compressArr, 0)
Expand All @@ -42,6 +44,7 @@ class SnappyCodec[T](codec: Codec[T]) extends Codec[T] {

def decode(ary: ByteBuffer): T = {
Stats.time("snappycodec.decompress") {
Trace.record("snappycodec.decompress")
val arr = Util.getArrayFromBuffer(ary)
val uncompressedArr = Snappy.uncompress(arr, 0, arr.length)
codec.decode(ByteBuffer.wrap(uncompressedArr))
Expand Down

0 comments on commit 8f6bdcd

Please sign in to comment.