-
Notifications
You must be signed in to change notification settings - Fork 291
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #2951 from DataDog/tyler/kafka-streams-latest
Fix instrumentation for Kafka Streams 2.6+
- Loading branch information
Showing
4 changed files
with
231 additions
and
38 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
212 changes: 212 additions & 0 deletions
212
...agent/instrumentation/kafka-streams-0.11/src/latestDepTest/groovy/KafkaStreamsTest.groovy
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,212 @@ | ||
import datadog.trace.agent.test.AgentTestRunner | ||
import datadog.trace.bootstrap.instrumentation.api.InstrumentationTags | ||
import datadog.trace.bootstrap.instrumentation.api.Tags | ||
import org.apache.kafka.clients.consumer.ConsumerRecord | ||
import org.apache.kafka.common.serialization.Serdes | ||
import org.apache.kafka.streams.KafkaStreams | ||
import org.apache.kafka.streams.StreamsBuilder | ||
import org.apache.kafka.streams.StreamsConfig | ||
import org.apache.kafka.streams.kstream.KStream | ||
import org.apache.kafka.streams.kstream.Produced | ||
import org.apache.kafka.streams.kstream.ValueMapper | ||
import org.junit.ClassRule | ||
import org.springframework.kafka.core.DefaultKafkaConsumerFactory | ||
import org.springframework.kafka.core.DefaultKafkaProducerFactory | ||
import org.springframework.kafka.core.KafkaTemplate | ||
import org.springframework.kafka.listener.ContainerProperties | ||
import org.springframework.kafka.listener.KafkaMessageListenerContainer | ||
import org.springframework.kafka.listener.MessageListener | ||
import org.springframework.kafka.test.EmbeddedKafkaBroker | ||
import org.springframework.kafka.test.rule.EmbeddedKafkaRule | ||
import org.springframework.kafka.test.utils.ContainerTestUtils | ||
import org.springframework.kafka.test.utils.KafkaTestUtils | ||
import spock.lang.Shared | ||
|
||
import java.util.concurrent.LinkedBlockingQueue | ||
import java.util.concurrent.TimeUnit | ||
|
||
class KafkaStreamsTest extends AgentTestRunner { | ||
static final STREAM_PENDING = "test.pending" | ||
static final STREAM_PROCESSED = "test.processed" | ||
|
||
@Shared | ||
@ClassRule | ||
EmbeddedKafkaRule kafkaRule = new EmbeddedKafkaRule(1, true, STREAM_PENDING, STREAM_PROCESSED) | ||
@Shared | ||
EmbeddedKafkaBroker embeddedKafka = kafkaRule.embeddedKafka | ||
|
||
def "test kafka produce and consume with streams in-between"() { | ||
setup: | ||
def config = new Properties() | ||
def producerProps = KafkaTestUtils.producerProps(embeddedKafka.getBrokersAsString()) | ||
config.putAll(producerProps) | ||
config.put(StreamsConfig.APPLICATION_ID_CONFIG, "test-application") | ||
config.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()) | ||
config.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()) | ||
|
||
// CONFIGURE CONSUMER | ||
def consumerFactory = new DefaultKafkaConsumerFactory<String, String>(KafkaTestUtils.consumerProps("sender", "false", embeddedKafka)) | ||
|
||
def consumerContainer = new KafkaMessageListenerContainer<>(consumerFactory, new ContainerProperties(STREAM_PROCESSED)) | ||
|
||
// create a thread safe queue to store the processed message | ||
def records = new LinkedBlockingQueue<ConsumerRecord<String, String>>() | ||
|
||
// setup a Kafka message listener | ||
consumerContainer.setupMessageListener(new MessageListener<String, String>() { | ||
@Override | ||
void onMessage(ConsumerRecord<String, String> record) { | ||
// ensure consistent ordering of traces | ||
// this is the last processing step so we should see 2 traces here | ||
TEST_WRITER.waitForTraces(3) | ||
TEST_TRACER.activeSpan().setTag("testing", 123) | ||
records.add(record) | ||
} | ||
}) | ||
|
||
// start the container and underlying message listener | ||
consumerContainer.start() | ||
|
||
// wait until the container has the required number of assigned partitions | ||
ContainerTestUtils.waitForAssignment(consumerContainer, embeddedKafka.getPartitionsPerTopic()) | ||
|
||
// CONFIGURE PROCESSOR | ||
StreamsBuilder builder = new StreamsBuilder() | ||
KStream<String, String> textLines = builder.stream(STREAM_PENDING) | ||
def values = textLines | ||
.mapValues(new ValueMapper<String, String>() { | ||
@Override | ||
String apply(String textLine) { | ||
TEST_WRITER.waitForTraces(2) // ensure consistent ordering of traces | ||
TEST_TRACER.activeSpan().setTag("asdf", "testing") | ||
return textLine.toLowerCase() | ||
} | ||
}) | ||
|
||
def producer = Produced.with(Serdes.String(), Serdes.String()) | ||
values.to(STREAM_PROCESSED, producer) | ||
KafkaStreams streams = new KafkaStreams(builder.build(), config) | ||
streams.start() | ||
|
||
// CONFIGURE PRODUCER | ||
def producerFactory = new DefaultKafkaProducerFactory<String, String>(producerProps) | ||
def kafkaTemplate = new KafkaTemplate<String, String>(producerFactory) | ||
|
||
when: | ||
String greeting = "TESTING TESTING 123!" | ||
kafkaTemplate.send(STREAM_PENDING, greeting) | ||
|
||
then: | ||
// check that the message was received | ||
def received = records.poll(10, TimeUnit.SECONDS) | ||
received.value() == greeting.toLowerCase() | ||
received.key() == null | ||
|
||
assertTraces(4) { | ||
trace(1) { | ||
// PRODUCER span 0 | ||
span { | ||
serviceName "kafka" | ||
operationName "kafka.produce" | ||
resourceName "Produce Topic $STREAM_PENDING" | ||
spanType "queue" | ||
errored false | ||
parent() | ||
tags { | ||
"$Tags.COMPONENT" "java-kafka" | ||
"$Tags.SPAN_KIND" Tags.SPAN_KIND_PRODUCER | ||
defaultTags() | ||
} | ||
} | ||
} | ||
trace(1) { | ||
// CONSUMER span 0 | ||
span { | ||
serviceName "kafka" | ||
operationName "kafka.consume" | ||
resourceName "Consume Topic $STREAM_PENDING" | ||
spanType "queue" | ||
errored false | ||
childOf trace(0)[0] | ||
tags { | ||
"$Tags.COMPONENT" "java-kafka" | ||
"$Tags.SPAN_KIND" Tags.SPAN_KIND_CONSUMER | ||
"$InstrumentationTags.PARTITION" { it >= 0 } | ||
"$InstrumentationTags.OFFSET" 0 | ||
"$InstrumentationTags.RECORD_QUEUE_TIME_MS" { it >= 0 } | ||
defaultTags(true) | ||
} | ||
} | ||
} | ||
trace(2) { | ||
sortSpansByStart() | ||
|
||
// STREAMING span 0 | ||
span { | ||
serviceName "kafka" | ||
operationName "kafka.consume" | ||
resourceName "Consume Topic $STREAM_PENDING" | ||
spanType "queue" | ||
errored false | ||
childOf trace(0)[0] | ||
|
||
tags { | ||
"$Tags.COMPONENT" "java-kafka" | ||
"$Tags.SPAN_KIND" Tags.SPAN_KIND_CONSUMER | ||
"$InstrumentationTags.PARTITION" { it >= 0 } | ||
"$InstrumentationTags.OFFSET" 0 | ||
"asdf" "testing" | ||
defaultTags(true) | ||
} | ||
} | ||
|
||
// STREAMING span 1 | ||
span { | ||
serviceName "kafka" | ||
operationName "kafka.produce" | ||
resourceName "Produce Topic $STREAM_PROCESSED" | ||
spanType "queue" | ||
errored false | ||
childOf span(0) | ||
|
||
tags { | ||
"$Tags.COMPONENT" "java-kafka" | ||
"$Tags.SPAN_KIND" Tags.SPAN_KIND_PRODUCER | ||
defaultTags() | ||
} | ||
} | ||
} | ||
trace(1) { | ||
// CONSUMER span 0 | ||
span { | ||
serviceName "kafka" | ||
operationName "kafka.consume" | ||
resourceName "Consume Topic $STREAM_PROCESSED" | ||
spanType "queue" | ||
errored false | ||
childOf trace(2)[0] | ||
tags { | ||
"$Tags.COMPONENT" "java-kafka" | ||
"$Tags.SPAN_KIND" Tags.SPAN_KIND_CONSUMER | ||
"$InstrumentationTags.PARTITION" { it >= 0 } | ||
"$InstrumentationTags.OFFSET" 0 | ||
"$InstrumentationTags.RECORD_QUEUE_TIME_MS" { it >= 0 } | ||
"testing" 123 | ||
defaultTags(true) | ||
} | ||
} | ||
} | ||
} | ||
|
||
def headers = received.headers() | ||
headers.iterator().hasNext() | ||
new String(headers.headers("x-datadog-trace-id").iterator().next().value()) == "${TEST_WRITER[2][0].traceId}" | ||
new String(headers.headers("x-datadog-parent-id").iterator().next().value()) == "${TEST_WRITER[2][0].spanId}" | ||
|
||
|
||
cleanup: | ||
producerFactory?.destroy() | ||
streams?.close() | ||
consumerContainer?.stop() | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters