-
Notifications
You must be signed in to change notification settings - Fork 11
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat: support confluent-kafka-go v1 and v2 (#320)
Requires: DataDog/dd-trace-go#2907 --------- Co-authored-by: Dario Castañé <[email protected]> Co-authored-by: Romain Marcadier <[email protected]> Co-authored-by: Romain Marcadier <[email protected]>
- Loading branch information
1 parent
bfd6900
commit 6652139
Showing
14 changed files
with
1,441 additions
and
12 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Large diffs are not rendered by default.
Oops, something went wrong.
19 changes: 19 additions & 0 deletions
19
_integration-tests/tests/confluent-kafka-go.v1/gen_test.go
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
146 changes: 146 additions & 0 deletions
146
_integration-tests/tests/confluent-kafka-go.v1/kafka.go
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,146 @@ | ||
// Unless explicitly stated otherwise all files in this repository are licensed | ||
// under the Apache License Version 2.0. | ||
// This product includes software developed at Datadog (https://www.datadoghq.com/). | ||
// Copyright 2023-present Datadog, Inc. | ||
|
||
//go:build integration && !windows | ||
|
||
package kafka | ||
|
||
import ( | ||
"context" | ||
"strings" | ||
"testing" | ||
"time" | ||
|
||
"github.com/confluentinc/confluent-kafka-go/kafka" | ||
"github.com/stretchr/testify/require" | ||
kafkatest "github.com/testcontainers/testcontainers-go/modules/kafka" | ||
|
||
"datadoghq.dev/orchestrion/_integration-tests/utils" | ||
"datadoghq.dev/orchestrion/_integration-tests/validator/trace" | ||
) | ||
|
||
var ( | ||
topic = "gotest" | ||
consumerGroup = "gotest" | ||
partition = int32(0) | ||
) | ||
|
||
type TestCase struct { | ||
container *kafkatest.KafkaContainer | ||
addr []string | ||
} | ||
|
||
func (tc *TestCase) Setup(t *testing.T) { | ||
utils.SkipIfProviderIsNotHealthy(t) | ||
container, addr := utils.StartKafkaTestContainer(t) | ||
tc.container = container | ||
tc.addr = []string{addr} | ||
} | ||
|
||
func (tc *TestCase) Run(t *testing.T) { | ||
tc.produceMessage(t) | ||
tc.consumeMessage(t) | ||
} | ||
|
||
func (tc *TestCase) kafkaBootstrapServers() string { | ||
return strings.Join(tc.addr, ",") | ||
} | ||
|
||
func (tc *TestCase) produceMessage(t *testing.T) { | ||
t.Helper() | ||
|
||
cfg := &kafka.ConfigMap{ | ||
"bootstrap.servers": tc.kafkaBootstrapServers(), | ||
"go.delivery.reports": true, | ||
} | ||
delivery := make(chan kafka.Event, 1) | ||
|
||
producer, err := kafka.NewProducer(cfg) | ||
require.NoError(t, err, "failed to create producer") | ||
defer func() { | ||
<-delivery | ||
producer.Close() | ||
}() | ||
|
||
err = producer.Produce(&kafka.Message{ | ||
TopicPartition: kafka.TopicPartition{ | ||
Topic: &topic, | ||
Partition: partition, | ||
}, | ||
Key: []byte("key2"), | ||
Value: []byte("value2"), | ||
}, delivery) | ||
require.NoError(t, err, "failed to send message") | ||
} | ||
|
||
func (tc *TestCase) consumeMessage(t *testing.T) { | ||
t.Helper() | ||
|
||
cfg := &kafka.ConfigMap{ | ||
"group.id": consumerGroup, | ||
"bootstrap.servers": tc.kafkaBootstrapServers(), | ||
"fetch.wait.max.ms": 500, | ||
"socket.timeout.ms": 1500, | ||
"session.timeout.ms": 1500, | ||
"enable.auto.offset.store": false, | ||
} | ||
c, err := kafka.NewConsumer(cfg) | ||
require.NoError(t, err, "failed to create consumer") | ||
defer c.Close() | ||
|
||
err = c.Assign([]kafka.TopicPartition{ | ||
{Topic: &topic, Partition: 0}, | ||
}) | ||
require.NoError(t, err) | ||
|
||
m, err := c.ReadMessage(3000 * time.Millisecond) | ||
require.NoError(t, err) | ||
|
||
_, err = c.CommitMessage(m) | ||
require.NoError(t, err) | ||
|
||
require.Equal(t, "key2", string(m.Key)) | ||
} | ||
|
||
func (tc *TestCase) Teardown(t *testing.T) { | ||
ctx, cancel := context.WithTimeout(context.Background(), 15*time.Second) | ||
defer cancel() | ||
|
||
require.NoError(t, tc.container.Terminate(ctx)) | ||
} | ||
|
||
func (*TestCase) ExpectedTraces() trace.Traces { | ||
return trace.Traces{ | ||
{ | ||
Tags: map[string]any{ | ||
"name": "kafka.produce", | ||
"type": "queue", | ||
"service": "kafka", | ||
"resource": "Produce Topic gotest", | ||
}, | ||
Meta: map[string]string{ | ||
"span.kind": "producer", | ||
"component": "confluentinc/confluent-kafka-go/kafka", | ||
"messaging.system": "kafka", | ||
}, | ||
Children: trace.Traces{ | ||
{ | ||
Tags: map[string]any{ | ||
"name": "kafka.consume", | ||
"type": "queue", | ||
"service": "kafka", | ||
"resource": "Consume Topic gotest", | ||
}, | ||
Meta: map[string]string{ | ||
"span.kind": "consumer", | ||
"component": "confluentinc/confluent-kafka-go/kafka", | ||
"messaging.system": "kafka", | ||
"messaging.kafka.bootstrap.servers": "localhost", | ||
}, | ||
}, | ||
}, | ||
}, | ||
} | ||
} |
26 changes: 26 additions & 0 deletions
26
_integration-tests/tests/confluent-kafka-go.v1/skip_windows.go
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,26 @@ | ||
// Unless explicitly stated otherwise all files in this repository are licensed | ||
// under the Apache License Version 2.0. | ||
// This product includes software developed at Datadog (https://www.datadoghq.com/). | ||
// Copyright 2023-present Datadog, Inc. | ||
|
||
//go:build integration && windows | ||
|
||
package kafka | ||
|
||
import ( | ||
"testing" | ||
|
||
"datadoghq.dev/orchestrion/_integration-tests/validator/trace" | ||
) | ||
|
||
type skip struct{} | ||
|
||
func (skip) Setup(t *testing.T) { | ||
t.Skip("skipping test since confluent-kafka-go requires extra setup to build on Windows: https://github.com/confluentinc/confluent-kafka-go/issues/889") | ||
} | ||
|
||
func (skip) Run(t *testing.T) {} | ||
func (skip) Teardown(t *testing.T) {} | ||
func (skip) ExpectedTraces() trace.Traces { return nil } | ||
|
||
type TestCase = skip |
19 changes: 19 additions & 0 deletions
19
_integration-tests/tests/confluent-kafka-go.v2/gen_test.go
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
146 changes: 146 additions & 0 deletions
146
_integration-tests/tests/confluent-kafka-go.v2/kafka.go
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,146 @@ | ||
// Unless explicitly stated otherwise all files in this repository are licensed | ||
// under the Apache License Version 2.0. | ||
// This product includes software developed at Datadog (https://www.datadoghq.com/). | ||
// Copyright 2023-present Datadog, Inc. | ||
|
||
//go:build integration && !windows | ||
|
||
package kafka | ||
|
||
import ( | ||
"context" | ||
"strings" | ||
"testing" | ||
"time" | ||
|
||
"github.com/confluentinc/confluent-kafka-go/v2/kafka" | ||
"github.com/stretchr/testify/require" | ||
kafkatest "github.com/testcontainers/testcontainers-go/modules/kafka" | ||
|
||
"datadoghq.dev/orchestrion/_integration-tests/utils" | ||
"datadoghq.dev/orchestrion/_integration-tests/validator/trace" | ||
) | ||
|
||
var ( | ||
topic = "gotest" | ||
consumerGroup = "gotest" | ||
partition = int32(0) | ||
) | ||
|
||
type TestCase struct { | ||
container *kafkatest.KafkaContainer | ||
addr []string | ||
} | ||
|
||
func (tc *TestCase) Setup(t *testing.T) { | ||
utils.SkipIfProviderIsNotHealthy(t) | ||
container, addr := utils.StartKafkaTestContainer(t) | ||
tc.container = container | ||
tc.addr = []string{addr} | ||
} | ||
|
||
func (tc *TestCase) Run(t *testing.T) { | ||
tc.produceMessage(t) | ||
tc.consumeMessage(t) | ||
} | ||
|
||
func (tc *TestCase) kafkaBootstrapServers() string { | ||
return strings.Join(tc.addr, ",") | ||
} | ||
|
||
func (tc *TestCase) produceMessage(t *testing.T) { | ||
t.Helper() | ||
|
||
cfg := &kafka.ConfigMap{ | ||
"bootstrap.servers": tc.kafkaBootstrapServers(), | ||
"go.delivery.reports": true, | ||
} | ||
delivery := make(chan kafka.Event, 1) | ||
|
||
producer, err := kafka.NewProducer(cfg) | ||
require.NoError(t, err, "failed to create producer") | ||
defer func() { | ||
<-delivery | ||
producer.Close() | ||
}() | ||
|
||
err = producer.Produce(&kafka.Message{ | ||
TopicPartition: kafka.TopicPartition{ | ||
Topic: &topic, | ||
Partition: partition, | ||
}, | ||
Key: []byte("key2"), | ||
Value: []byte("value2"), | ||
}, delivery) | ||
require.NoError(t, err, "failed to send message") | ||
} | ||
|
||
func (tc *TestCase) consumeMessage(t *testing.T) { | ||
t.Helper() | ||
|
||
cfg := &kafka.ConfigMap{ | ||
"group.id": consumerGroup, | ||
"bootstrap.servers": tc.kafkaBootstrapServers(), | ||
"fetch.wait.max.ms": 500, | ||
"socket.timeout.ms": 1500, | ||
"session.timeout.ms": 1500, | ||
"enable.auto.offset.store": false, | ||
} | ||
c, err := kafka.NewConsumer(cfg) | ||
require.NoError(t, err, "failed to create consumer") | ||
defer c.Close() | ||
|
||
err = c.Assign([]kafka.TopicPartition{ | ||
{Topic: &topic, Partition: 0}, | ||
}) | ||
require.NoError(t, err) | ||
|
||
m, err := c.ReadMessage(3000 * time.Millisecond) | ||
require.NoError(t, err) | ||
|
||
_, err = c.CommitMessage(m) | ||
require.NoError(t, err) | ||
|
||
require.Equal(t, "key2", string(m.Key)) | ||
} | ||
|
||
func (tc *TestCase) Teardown(t *testing.T) { | ||
ctx, cancel := context.WithTimeout(context.Background(), 15*time.Second) | ||
defer cancel() | ||
|
||
require.NoError(t, tc.container.Terminate(ctx)) | ||
} | ||
|
||
func (*TestCase) ExpectedTraces() trace.Traces { | ||
return trace.Traces{ | ||
{ | ||
Tags: map[string]any{ | ||
"name": "kafka.produce", | ||
"type": "queue", | ||
"service": "kafka", | ||
"resource": "Produce Topic gotest", | ||
}, | ||
Meta: map[string]string{ | ||
"span.kind": "producer", | ||
"component": "confluentinc/confluent-kafka-go/kafka.v2", | ||
"messaging.system": "kafka", | ||
}, | ||
Children: trace.Traces{ | ||
{ | ||
Tags: map[string]any{ | ||
"name": "kafka.consume", | ||
"type": "queue", | ||
"service": "kafka", | ||
"resource": "Consume Topic gotest", | ||
}, | ||
Meta: map[string]string{ | ||
"span.kind": "consumer", | ||
"component": "confluentinc/confluent-kafka-go/kafka.v2", | ||
"messaging.system": "kafka", | ||
"messaging.kafka.bootstrap.servers": "localhost", | ||
}, | ||
}, | ||
}, | ||
}, | ||
} | ||
} |
Oops, something went wrong.