From 7b25137f449d8da81981499b97e093f0f0730e2b Mon Sep 17 00:00:00 2001 From: Mostafa Moradian Date: Mon, 8 Aug 2022 00:49:14 +0200 Subject: [PATCH] WIP: export schema registry client and the serdes functionality --- avro.go | 184 ++-------- bytearray.go | 35 +- bytearray_test.go | 37 -- configuration.go | 45 --- configuration_test.go | 72 ---- error_codes.go | 31 +- go.mod | 22 +- go.sum | 61 ++-- interfaces.go | 25 ++ json.go | 59 +++ jsonschema.go | 169 --------- jsonschema_test.go | 120 ------ kafka_helpers_test.go | 58 ++- module.go | 41 +-- consumer.go => reader.go | 40 +- consumer_test.go => reader_test.go | 98 ++--- schema_registry.go | 341 +++++++++++++++--- schema_registry_test.go | 184 ++++++---- scripts/helpers/schema_registry.js | 6 - scripts/test_avro.js | 133 ------- ...st_avro_named_strategy_and_magic_prefix.js | 97 ----- scripts/test_avro_no_key.js | 111 ------ scripts/test_avro_no_schema_registry.js | 81 +++++ scripts/test_avro_with_schema_registry.js | 112 ++++-- .../test_avro_with_schema_registry_no_key.js | 94 ----- scripts/test_bytes.js | 63 ++-- scripts/test_json.js | 84 +++-- scripts/test_json_with_snappy_compression.js | 102 ------ .../test_jsonschema_with_schema_registry.js | 115 +++--- scripts/test_sasl_auth.js | 59 +-- scripts/test_string.js | 117 ++++++ serde.go | 83 ----- serde_registry.go | 33 -- serde_registry_test.go | 31 -- serde_test.go | 70 ---- serdes.go | 129 +++++++ avro_test.go => serdes_test.go | 172 ++++++--- string.go | 25 +- string_test.go | 31 -- topic_test.go | 15 +- utils.go | 58 +++ producer.go => writer.go | 78 +--- producer_test.go => writer_test.go | 84 ++--- 43 files changed, 1567 insertions(+), 2038 deletions(-) delete mode 100644 bytearray_test.go delete mode 100644 configuration.go delete mode 100644 configuration_test.go create mode 100644 interfaces.go create mode 100644 json.go delete mode 100644 jsonschema.go delete mode 100644 jsonschema_test.go rename consumer.go => reader.go (89%) rename consumer_test.go => reader_test.go (75%) delete mode 100644 scripts/helpers/schema_registry.js delete mode 100644 scripts/test_avro.js delete mode 100644 scripts/test_avro_named_strategy_and_magic_prefix.js delete mode 100644 scripts/test_avro_no_key.js create mode 100644 scripts/test_avro_no_schema_registry.js delete mode 100644 scripts/test_avro_with_schema_registry_no_key.js delete mode 100644 scripts/test_json_with_snappy_compression.js create mode 100644 scripts/test_string.js delete mode 100644 serde.go delete mode 100644 serde_registry.go delete mode 100644 serde_registry_test.go delete mode 100644 serde_test.go create mode 100644 serdes.go rename avro_test.go => serdes_test.go (78%) delete mode 100644 string_test.go create mode 100644 utils.go rename producer.go => writer.go (79%) rename producer_test.go => writer_test.go (75%) diff --git a/avro.go b/avro.go index b7478aa..5c20740 100644 --- a/avro.go +++ b/avro.go @@ -1,175 +1,43 @@ package kafka -import ( - "github.com/linkedin/goavro/v2" - "github.com/riferrei/srclient" -) - -const ( - AvroSerializer string = "io.confluent.kafka.serializers.KafkaAvroSerializer" - AvroDeserializer string = "io.confluent.kafka.serializers.KafkaAvroDeserializer" -) - -// SerializeAvro serializes the given data to wire-formatted Avro binary format and returns it -// as a byte array. It uses the given version to retrieve the schema from Schema Registry, otherwise -// it uses the given schema to manually create the codec and encode the data. The configuration -// is used to configure the Schema Registry client. The element is used to define the subject. -// The data should be a string. -// nolint: funlen -func SerializeAvro( - configuration Configuration, topic string, data interface{}, - element Element, schema string, version int, -) ([]byte, *Xk6KafkaError) { - var bytesData []byte - if stringData, ok := data.(string); ok { - bytesData = []byte(stringData) - } else { - return nil, NewXk6KafkaError(failedTypeCast, "Failed to cast to string", nil) - } - - client := SchemaRegistryClientWithConfiguration(configuration.SchemaRegistry) - - subject, subjectNameError := GetSubjectName(schema, topic, element, configuration.Producer.SubjectNameStrategy) - if subjectNameError != nil { - return nil, subjectNameError - } - - var schemaInfo *srclient.Schema - schemaID := 0 - - var xk6KafkaError *Xk6KafkaError - - if schema != "" { - // Schema is provided, so we need to create it and get the schema ID - schemaInfo, xk6KafkaError = CreateSchema(client, subject, schema, srclient.Avro) - } else { - // Schema is not provided, so we need to fetch the schema from the Schema Registry - schemaInfo, xk6KafkaError = GetSchema(client, subject, schema, srclient.Avro, version) - } - - if xk6KafkaError != nil { - logger.WithField("error", xk6KafkaError).Warn( - "Failed to create or get schema, manually encoding the data") - codec, err := goavro.NewCodec(schema) - if err != nil { - return nil, NewXk6KafkaError(failedCreateAvroCodec, - "Failed to create codec for encoding Avro", - err) - } - - avroEncodedData, _, err := codec.NativeFromTextual(bytesData) - if err != nil { - return nil, NewXk6KafkaError(failedEncodeToAvro, - "Failed to encode data into Avro", - err) - } +type AvroSerde struct { + Serdes +} - bytesData, err = codec.BinaryFromNative(nil, avroEncodedData) - if err != nil { - return nil, NewXk6KafkaError(failedEncodeAvroToBinary, - "Failed to encode Avro data into binary", - err) - } +func (*AvroSerde) Serialize(data interface{}, schema *Schema) ([]byte, error) { + jsonBytes, err := toJSONBytes(data) + if err != nil { + return nil, err } - if schemaInfo != nil { - schemaID = schemaInfo.ID() - - // Encode the data into Avro and then the wire format - avroEncodedData, _, err := schemaInfo.Codec().NativeFromTextual(bytesData) - if err != nil { - return nil, NewXk6KafkaError(failedEncodeToAvro, - "Failed to encode data into Avro", - err) - } - - bytesData, err = schemaInfo.Codec().BinaryFromNative(nil, avroEncodedData) - if err != nil { - return nil, NewXk6KafkaError(failedEncodeAvroToBinary, - "Failed to encode Avro data into binary", - err) - } + encodedData, _, err := schema.Codec().NativeFromTextual(jsonBytes) + if err != nil { + err := NewXk6KafkaError(failedToEncode, "Failed to encode data", err) + return nil, err } - return EncodeWireFormat(bytesData, schemaID), nil -} - -// DeserializeAvro deserializes the given data from wire-formatted Avro binary format and returns it -// as a byte array. It uses the given version to retrieve the schema from Schema Registry, otherwise -// it uses the given schema to manually create the codec and decode the data. The configuration -// is used to configure the Schema Registry client. The element is used to define the subject. -// The data should be a byte array. -// nolint: funlen -func DeserializeAvro( - configuration Configuration, topic string, data []byte, - element Element, schema string, version int, -) (interface{}, *Xk6KafkaError) { - schemaID, bytesDecodedData, err := DecodeWireFormat(data) + bytesData, err := schema.Codec().BinaryFromNative(nil, encodedData) if err != nil { - return nil, NewXk6KafkaError(failedDecodeFromWireFormat, - "Failed to remove wire format from the binary data", + err := NewXk6KafkaError(failedToEncodeToBinary, + "Failed to encode data into binary", err) + return nil, err } - var schemaInfo *srclient.Schema - var xk6KafkaError *Xk6KafkaError - var getSchemaError error - - client := SchemaRegistryClientWithConfiguration(configuration.SchemaRegistry) + return bytesData, nil +} - subject, subjectNameError := GetSubjectName(schema, topic, element, configuration.Consumer.SubjectNameStrategy) - if subjectNameError != nil { - return nil, subjectNameError +func (*AvroSerde) Deserialize(data []byte, schema *Schema) (interface{}, error) { + decodedData, _, err := schema.Codec().NativeFromBinary(data) + if err != nil { + err := NewXk6KafkaError( + failedToDecodeFromBinary, "Failed to decode data", err) + return nil, err } - // nolint: gocritic - if schema != "" { - // Schema is provided, so we need to create it and get the schema ID - schemaInfo, xk6KafkaError = CreateSchema(client, subject, schema, srclient.Avro) - } else if configuration.Consumer.UseMagicPrefix { - // Schema is not provided and no valid version flag, - // so we use te schemaID in the magic prefix - schemaInfo, getSchemaError = client.GetSchema(schemaID) - if getSchemaError != nil { - xk6KafkaError = NewXk6KafkaError(failedCreateAvroCodec, - "Failed to get schema by magic prefix", - getSchemaError) - } + if data, ok := decodedData.(map[string]interface{}); ok { + return data, nil } else { - // Schema is not provided, so we need to fetch the schema from the Schema Registry - schemaInfo, xk6KafkaError = GetSchema(client, subject, schema, srclient.Avro, version) + return nil, ErrInvalidDataType } - - if xk6KafkaError != nil { - logger.WithField("error", xk6KafkaError).Warn( - "Failed to create or get schema, manually decoding the data") - codec, err := goavro.NewCodec(schema) - if err != nil { - return nil, NewXk6KafkaError(failedCreateAvroCodec, - "Failed to create codec for decoding Avro", - err) - } - - avroDecodedData, _, err := codec.NativeFromBinary(bytesDecodedData) - if err != nil { - return nil, NewXk6KafkaError(failedDecodeAvroFromBinary, - "Failed to decode data from Avro", - err) - } - - return avroDecodedData, nil - } - - if schemaInfo != nil { - // Decode the data from Avro - avroDecodedData, _, err := schemaInfo.Codec().NativeFromBinary(bytesDecodedData) - if err != nil { - return nil, NewXk6KafkaError(failedDecodeAvroFromBinary, - "Failed to decode data from Avro", - err) - } - return avroDecodedData, nil - } - - return bytesDecodedData, nil } diff --git a/bytearray.go b/bytearray.go index aa2ca7b..308f672 100644 --- a/bytearray.go +++ b/bytearray.go @@ -1,31 +1,27 @@ package kafka -import ( - "github.com/riferrei/srclient" -) +import "github.com/riferrei/srclient" -const ( - ByteArray srclient.SchemaType = "BYTEARRAY" +type ByteArraySerde struct { + Serdes +} - ByteArraySerializer string = "org.apache.kafka.common.serialization.ByteArraySerializer" - ByteArrayDeserializer string = "org.apache.kafka.common.serialization.ByteArrayDeserializer" +const ( + Bytes srclient.SchemaType = "BYTES" ) -// SerializeByteArray serializes the given data into a byte array and returns it. -// If the data is not a byte array, an error is returned. The configuration, topic, element, -// schema and version are just used to conform with the interface. -func SerializeByteArray( - configuration Configuration, topic string, data interface{}, - element Element, schema string, version int, -) ([]byte, *Xk6KafkaError) { +// Serialize serializes the given data into a byte array. +func (*ByteArraySerde) Serialize(data interface{}, schema *Schema) ([]byte, error) { switch data := data.(type) { + case []byte: + return data, nil case []interface{}: arr := make([]byte, len(data)) for i, u := range data { if u, ok := u.(float64); ok { arr[i] = byte(u) } else { - return nil, NewXk6KafkaError(failedTypeCast, "Failed to cast to float64", nil) + return nil, ErrFailedTypeCast } } return arr, nil @@ -34,12 +30,7 @@ func SerializeByteArray( } } -// DeserializeByteArray deserializes the given data from a byte array and returns it. -// It just returns the data as is. The configuration, topic, element, schema and version -// are just used to conform with the interface. -func DeserializeByteArray( - configuration Configuration, topic string, data []byte, - element Element, schema string, version int, -) (interface{}, *Xk6KafkaError) { +// DeserializeByteArray returns the data as-is, because it is already a byte array. +func (*ByteArraySerde) Deserialize(data []byte, schema *Schema) (interface{}, error) { return data, nil } diff --git a/bytearray_test.go b/bytearray_test.go deleted file mode 100644 index 23f2808..0000000 --- a/bytearray_test.go +++ /dev/null @@ -1,37 +0,0 @@ -package kafka - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -const ( - originalData string = "test" -) - -// TestSerializeByteArray tests the serialization of a byte array into binary. -func TestSerializeByteArray(t *testing.T) { - var data float64 = 98 - originalData := []interface{}{data} - result, err := SerializeByteArray(Configuration{}, "", originalData, "", "", 0) - assert.Nil(t, err) - assert.Equal(t, []byte{0x62}, result) -} - -// TestSerializeByteArrayFails tests the serialization of a byte array into binary and fails -// on invalid data type. -func TestSerializeByteArrayFails(t *testing.T) { - _, err := SerializeByteArray(Configuration{}, "", originalData, "", "", 0) - assert.NotNil(t, err) - assert.Equal(t, err.Message, "Invalid data type provided for serializer/deserializer") - assert.Equal(t, err.Code, invalidDataType) -} - -// TestDeserializeByteArray tests the deserialization of a byte array into binary. -func TestDeserializeByteArray(t *testing.T) { - originalData := []byte{1, 2, 3} - result, err := DeserializeByteArray(Configuration{}, "", originalData, "", "", 0) - assert.Equal(t, []byte{1, 2, 3}, result) - assert.Nil(t, err) -} diff --git a/configuration.go b/configuration.go deleted file mode 100644 index 39fc587..0000000 --- a/configuration.go +++ /dev/null @@ -1,45 +0,0 @@ -package kafka - -type ConsumerConfiguration struct { - KeyDeserializer string `json:"keyDeserializer"` - ValueDeserializer string `json:"valueDeserializer"` - SubjectNameStrategy string `json:"subjectNameStrategy"` - UseMagicPrefix bool `json:"useMagicPrefix"` -} - -type ProducerConfiguration struct { - KeySerializer string `json:"keySerializer"` - ValueSerializer string `json:"valueSerializer"` - SubjectNameStrategy string `json:"subjectNameStrategy"` -} - -type Configuration struct { - Consumer ConsumerConfiguration `json:"consumer"` - Producer ProducerConfiguration `json:"producer"` - SchemaRegistry SchemaRegistryConfiguration `json:"schemaRegistry"` -} - -// ValidateConfiguration validates the given configuration. -func ValidateConfiguration(configuration Configuration) *Xk6KafkaError { - if (Configuration{}) == configuration { - // No configuration, fallback to default - return nil - } - - if useSerializer(configuration, Key) || useSerializer(configuration, Value) { - return nil - } - - return nil -} - -// GivenCredentials returns true if the given configuration has credentials. -func GivenCredentials(configuration Configuration) bool { - if (Configuration{}) == configuration || - (SchemaRegistryConfiguration{}) == configuration.SchemaRegistry || - (BasicAuth{}) == configuration.SchemaRegistry.BasicAuth { - return false - } - return configuration.SchemaRegistry.BasicAuth.Username != "" && - configuration.SchemaRegistry.BasicAuth.Password != "" -} diff --git a/configuration_test.go b/configuration_test.go deleted file mode 100644 index 33c0c15..0000000 --- a/configuration_test.go +++ /dev/null @@ -1,72 +0,0 @@ -package kafka - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -var configuration = Configuration{ - Consumer: ConsumerConfiguration{ - KeyDeserializer: StringDeserializer, - ValueDeserializer: StringDeserializer, - }, - Producer: ProducerConfiguration{ - KeySerializer: StringSerializer, - ValueSerializer: StringSerializer, - }, - SchemaRegistry: SchemaRegistryConfiguration{ - URL: "http://localhost:8081", - BasicAuth: BasicAuth{ - Username: "username", - Password: "password", - }, - UseLatest: true, - }, -} - -// TestValidateConfiguration tests the validation of a given configuration. -func TestValidateConfiguration(t *testing.T) { - err := ValidateConfiguration(configuration) - assert.Nil(t, err) -} - -// TestValidateConfigurationFallbackToDefaults tests the validation of a given configuration -// and falls back to default on invalid configuration. -func TestValidateConfigurationFallbackToDefaults(t *testing.T) { - configuration := Configuration{} - - err := ValidateConfiguration(configuration) - assert.Nil(t, err) -} - -// TestGivenCredentials tests the validation of a given credentials. -func TestGivenCredentials(t *testing.T) { - configuration := Configuration{ - SchemaRegistry: SchemaRegistryConfiguration{ - URL: "http://localhost:8081", - BasicAuth: BasicAuth{ - Username: "username", - Password: "password", - }, - UseLatest: true, - }, - } - - valid := GivenCredentials(configuration) - assert.True(t, valid) -} - -// TestGivenCredentialsFails tests if credentials are given in Schema Registry config -// and fails on no auth creds. -func TestGivenCredentialsFails(t *testing.T) { - configuration := Configuration{ - SchemaRegistry: SchemaRegistryConfiguration{ - URL: "http://localhost:8081", - UseLatest: true, - }, - } - - valid := GivenCredentials(configuration) - assert.False(t, valid) -} diff --git a/error_codes.go b/error_codes.go index db19adc..1001c33 100644 --- a/error_codes.go +++ b/error_codes.go @@ -17,20 +17,22 @@ const ( dialerError errCode = 1005 noTLSConfig errCode = 1006 failedTypeCast errCode = 1007 + unsupportedOperation errCode = 1008 // serdes errors. invalidDataType errCode = 2000 failedDecodeFromWireFormat errCode = 2001 failedCreateAvroCodec errCode = 2002 - failedEncodeToAvro errCode = 2003 - failedEncodeAvroToBinary errCode = 2004 - failedDecodeAvroFromBinary errCode = 2005 + failedToEncode errCode = 2003 + failedToEncodeToBinary errCode = 2004 + failedToDecodeFromBinary errCode = 2005 failedCreateJSONSchemaCodec errCode = 2006 failedUnmarshalJSON errCode = 2007 failedValidateJSON errCode = 2008 failedEncodeToJSON errCode = 2009 failedDecodeJSONFromBinary errCode = 2010 failedToUnmarshalSchema errCode = 2011 + invalidSerdeType errCode = 2012 // producer. failedWriteMessage errCode = 3000 @@ -48,9 +50,11 @@ const ( failedAppendCaCertFile errCode = 5004 // schema registry. - messageTooShort errCode = 6000 - schemaNotFound errCode = 6001 - schemaCreationFailed errCode = 6002 + messageTooShort errCode = 6000 + schemaNotFound errCode = 6001 + schemaCreationFailed errCode = 6002 + failedGetSubjectName errCode = 6003 + failedConfigureSchemaRegistryClient errCode = 6004 // topics. failedGetController errCode = 7000 @@ -60,6 +64,9 @@ const ( ) var ( + // ErrUnsupported is the error returned when the operation is not supported. + ErrUnsupportedOperation = NewXk6KafkaError(unsupportedOperation, "Operation not supported", nil) + // ErrForbiddenInInitContext is used when a Kafka producer was used in the init context. ErrForbiddenInInitContext = NewXk6KafkaError( kafkaForbiddenInInitContext, @@ -72,9 +79,21 @@ var ( "Invalid data type provided for serializer/deserializer", nil) + // ErrFailedTypeCast is used when a type cast failed. + ErrFailedTypeCast = NewXk6KafkaError(failedTypeCast, "Failed to cast type", nil) + + // ErrUnknownSerdesType is used when a serdes type is not supported. + ErrUnknownSerdesType = NewXk6KafkaError(invalidSerdeType, "Unknown serdes type", nil) + // ErrNotEnoughArguments is used when a function is called with too few arguments. ErrNotEnoughArguments = errors.New("not enough arguments") + // ErrNoSchemaRegistryClient is used when a schema registry client is not configured correctly. + ErrNoSchemaRegistryClient = NewXk6KafkaError( + failedConfigureSchemaRegistryClient, + "Failed to configure the schema registry client", + nil) + ErrInvalidPEMData = errors.New("tls: failed to find any PEM data in certificate input") ) diff --git a/go.mod b/go.mod index 48376de..a717540 100644 --- a/go.mod +++ b/go.mod @@ -3,24 +3,24 @@ module github.com/mostafa/xk6-kafka go 1.18 require ( - github.com/dop251/goja v0.0.0-20220705101429-189bfeb9f530 + github.com/dop251/goja v0.0.0-20220806120448-1444e6b94559 github.com/linkedin/goavro/v2 v2.11.1 github.com/riferrei/srclient v0.5.4 github.com/santhosh-tekuri/jsonschema/v5 v5.0.0 - github.com/segmentio/kafka-go v0.4.32 - github.com/sirupsen/logrus v1.8.1 - github.com/stretchr/testify v1.7.1 + github.com/segmentio/kafka-go v0.4.33 + github.com/sirupsen/logrus v1.9.0 + github.com/stretchr/testify v1.8.0 go.k6.io/k6 v0.39.0 gopkg.in/guregu/null.v3 v3.5.0 ) require ( github.com/davecgh/go-spew v1.1.1 // indirect - github.com/dlclark/regexp2 v1.4.1-0.20201116162257-a2a8dda75c91 // indirect + github.com/dlclark/regexp2 v1.7.0 // indirect github.com/fatih/color v1.13.0 // indirect github.com/go-sourcemap/sourcemap v2.1.4-0.20211119122758-180fcef48034+incompatible // indirect github.com/golang/snappy v0.0.4 // indirect - github.com/klauspost/compress v1.15.7 // indirect + github.com/klauspost/compress v1.15.9 // indirect github.com/mailru/easyjson v0.7.7 // indirect github.com/mattn/go-colorable v0.1.12 // indirect github.com/mattn/go-isatty v0.0.14 // indirect @@ -29,14 +29,14 @@ require ( github.com/pierrec/lz4/v4 v4.1.15 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/serenize/snaker v0.0.0-20201027110005-a7ad2135616e // indirect - github.com/spf13/afero v1.8.2 // indirect + github.com/spf13/afero v1.9.2 // indirect github.com/xdg/scram v1.0.5 // indirect github.com/xdg/stringprep v1.0.3 // indirect - golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d // indirect - golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f // indirect - golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e // indirect + golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa // indirect + golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4 // indirect + golang.org/x/sys v0.0.0-20220804214406-8e32c043e418 // indirect golang.org/x/text v0.3.7 // indirect - golang.org/x/time v0.0.0-20220609170525-579cf78fd858 // indirect + golang.org/x/time v0.0.0-20220722155302-e5dcc9cfc0b9 // indirect golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index 755b880..f3df97c 100644 --- a/go.sum +++ b/go.sum @@ -51,13 +51,12 @@ github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ3 github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/dlclark/regexp2 v1.4.1-0.20201116162257-a2a8dda75c91 h1:Izz0+t1Z5nI16/II7vuEo/nHjodOg0p7+OiDpjX5t1E= github.com/dlclark/regexp2 v1.4.1-0.20201116162257-a2a8dda75c91/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= +github.com/dlclark/regexp2 v1.7.0 h1:7lJfhqlPssTb1WQx4yvTHN0uElPEv52sbaECrAQxjAo= +github.com/dlclark/regexp2 v1.7.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= github.com/dop251/goja v0.0.0-20211022113120-dc8c55024d06/go.mod h1:R9ET47fwRVRPZnOGvHxxhuZcbrMCuiqOz3Rlrh4KSnk= -github.com/dop251/goja v0.0.0-20220516123900-4418d4575a41 h1:yRPjAkkuR/E/tsVG7QmhzEeEtD3P2yllxsT1/ftURb0= -github.com/dop251/goja v0.0.0-20220516123900-4418d4575a41/go.mod h1:TQJQ+ZNyFVvUtUEtCZxBhfWiH7RJqR3EivNmvD6Waik= -github.com/dop251/goja v0.0.0-20220705101429-189bfeb9f530 h1:936YSsrki8Z6H48PPFbATV674Gpmh444xXaX+O5wwFQ= -github.com/dop251/goja v0.0.0-20220705101429-189bfeb9f530/go.mod h1:TQJQ+ZNyFVvUtUEtCZxBhfWiH7RJqR3EivNmvD6Waik= +github.com/dop251/goja v0.0.0-20220806120448-1444e6b94559 h1:S3U65m9SN2p5CJpT3CDuqhN+rNJZXDoABYPKdQ7DOfY= +github.com/dop251/goja v0.0.0-20220806120448-1444e6b94559/go.mod h1:1jWwHOtOkEqsfX6tYsufUc7BBTuGHH2ekiJabpkN4CA= github.com/dop251/goja_nodejs v0.0.0-20210225215109-d91c329300e7/go.mod h1:hn7BA7c8pLvoGndExHudxTDKZ84Pyvv+90pbBjbTz0Y= github.com/dop251/goja_nodejs v0.0.0-20211022123610-8dd9abb0616d/go.mod h1:DngW8aVqWbuLRMHItjPUyqdj+HWPvnQe8V8y1nDpIbM= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= @@ -143,9 +142,9 @@ github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFF github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/compress v1.14.2/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= -github.com/klauspost/compress v1.15.7 h1:7cgTQxJCU/vy+oP/E3B9RGbQTgbiVzIJWIKOLoAsPok= github.com/klauspost/compress v1.15.7/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU= +github.com/klauspost/compress v1.15.9 h1:wKRjX6JRtDdrE9qwa4b/Cip7ACOshUI4smpCQanqjSY= +github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= @@ -174,7 +173,6 @@ github.com/onsi/ginkgo v1.14.0 h1:2mOpI4JVVPBN+WQRa0WKH2eXR+Ey+uK4n7Zj0aYpIQA= github.com/onsi/gomega v1.10.1 h1:o0+MgICZLuZ7xjH7Vx6zS/zcu93/BEp1VwkIW1mEXCE= github.com/oxtoacart/bpool v0.0.0-20190530202638-03653db5a59c h1:rp5dCmg/yLR3mgFuSOe4oEnDDmGLROTvMragMUXpTQw= github.com/oxtoacart/bpool v0.0.0-20190530202638-03653db5a59c/go.mod h1:X07ZCGwUbLaax7L0S3Tw4hpejzu63ZrrQiUe6W0hcy0= -github.com/pierrec/lz4/v4 v4.1.14/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pierrec/lz4/v4 v4.1.15 h1:MO0/ucJhngq7299dKLwIMtgTfbkoSPF6AoMYDd8Q4q0= github.com/pierrec/lz4/v4 v4.1.15/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -189,26 +187,25 @@ github.com/rogpeppe/go-internal v1.6.1 h1:/FiVV8dS/e+YqF2JvO3yXRFbBLTIuSDkuC7aBO github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/santhosh-tekuri/jsonschema/v5 v5.0.0 h1:TToq11gyfNlrMFZiYujSekIsPd9AmsA2Bj/iv+s4JHE= github.com/santhosh-tekuri/jsonschema/v5 v5.0.0/go.mod h1:FKdcjfQW6rpZSnxxUvEA5H/cDPdvJ/SZJQLWWXWGrZ0= -github.com/segmentio/kafka-go v0.4.32 h1:Ohr+9E+kDv/Ld2UPJN9hnKZRd2qgiqCmI8v2e1qlfLM= -github.com/segmentio/kafka-go v0.4.32/go.mod h1:JAPPIiY3MQIwVHj64CWOP0LsFFfQ7H0w69kuoxnMIS0= +github.com/segmentio/kafka-go v0.4.33 h1:XHYuEifMYFVCU9A2p1wJprd7xHQKS+Sn6xgBr11+30k= +github.com/segmentio/kafka-go v0.4.33/go.mod h1:GAjxBQJdQMB5zfNA21AhpaqOB2Mu+w3De4ni3Gbm8y0= github.com/serenize/snaker v0.0.0-20201027110005-a7ad2135616e h1:zWKUYT07mGmVBH+9UgnHXd/ekCK99C8EbDSAt5qsjXE= github.com/serenize/snaker v0.0.0-20201027110005-a7ad2135616e/go.mod h1:Yow6lPLSAXx2ifx470yD/nUe22Dv5vBvxK/UK9UUTVs= -github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= -github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/spf13/afero v1.8.2 h1:xehSyVa0YnHWsJ49JFljMpg1HX19V6NDZ1fkm1Xznbo= -github.com/spf13/afero v1.8.2/go.mod h1:CtAatgMJh6bJEIs48Ay/FOnkljP3WeGUG0MC1RfAqwo= +github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0= +github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/spf13/afero v1.9.2 h1:j49Hj62F0n+DaZ1dDCvhABaPNSGNkt32oRFxI33IEMw= +github.com/spf13/afero v1.9.2/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.1 h1:5TQK59W5E3v0r2duFAb7P95B6hEeOyEnHRa8MjYSMTY= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= +github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/xdg/scram v1.0.5 h1:TuS0RFmt5Is5qm9Tm2SoD89OPqe4IRiFtyFY4iwWXsw= github.com/xdg/scram v1.0.5/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= -github.com/xdg/stringprep v1.0.0/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= github.com/xdg/stringprep v1.0.3 h1:cmL5Enob4W83ti/ZHuZLuKD/xqJfus4fVPwE+/BDm+4= github.com/xdg/stringprep v1.0.3/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -224,15 +221,15 @@ go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190506204251-e1dfcc566284/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d h1:sK3txAijHtOK88l68nt020reeT1ZdKLIYetKl95FzVY= golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa h1:zuSxTR4o9y82ebqCUJYNGJbGPo6sKVl54f/TVDObg1c= +golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -297,7 +294,9 @@ golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwY golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20220531201128-c960675eff93 h1:MYimHLfoXEpOhqd/zgoA/uoXzHB86AEky4LAx5ij9xA= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220706163947-c90051bbdb60 h1:8NSylCMxLW4JvserAndSgFL7aPli6A68yf0bYFTcWCM= +golang.org/x/net v0.0.0-20220706163947-c90051bbdb60/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -317,8 +316,8 @@ golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f h1:Ax0t5p6N38Ga0dThY21weqDEyz2oklo4IvDkpigvkD8= -golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4 h1:uVc8UZUe6tr40fFVnUP5Oj+veunVezqYl9z7DYw9xzw= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -330,7 +329,6 @@ golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -354,15 +352,17 @@ golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220702020025-31831981b65f h1:xdsejrW/0Wf2diT5CPp3XmKUNbr7Xvw8kYilQ+6qjRY= -golang.org/x/sys v0.0.0-20220702020025-31831981b65f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e h1:CsOuNlbOuf0mzxJIefr6Q4uAUetRUwZE4qt7VfzP+xo= -golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220804214406-8e32c043e418 h1:9vYwv7OjYaky/tlAeD7C4oC9EsPTlaFl1H2jS++V+ME= +golang.org/x/sys v0.0.0-20220804214406-8e32c043e418/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -375,8 +375,8 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20220609170525-579cf78fd858 h1:Dpdu/EMxGMFgq0CeYMh4fazTD2vtlZRYE7wyynxJb9U= -golang.org/x/time v0.0.0-20220609170525-579cf78fd858/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20220722155302-e5dcc9cfc0b9 h1:ftMN5LMiBFjbzleLqtoBZk7KdJwhuybIU+FckUHgoyQ= +golang.org/x/time v0.0.0-20220722155302-e5dcc9cfc0b9/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= @@ -534,7 +534,6 @@ gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20220512140231-539c8e751b99/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/interfaces.go b/interfaces.go new file mode 100644 index 0000000..168cb0c --- /dev/null +++ b/interfaces.go @@ -0,0 +1,25 @@ +package kafka + +import ( + "github.com/riferrei/srclient" +) + +type Serdes interface { + Serialize(data interface{}, schema *Schema) ([]byte, error) + Deserialize(data []byte, schema *Schema) (interface{}, error) +} + +var TypesRegistry map[string]Serdes = map[string]Serdes{ + String.String(): &StringSerde{}, + Bytes.String(): &ByteArraySerde{}, + srclient.Json.String(): &JSONSerde{}, + srclient.Avro.String(): &AvroSerde{}, +} + +func GetSerdes(schemaType string) (Serdes, error) { + if serdes, ok := TypesRegistry[schemaType]; ok { + return serdes, nil + } else { + return nil, ErrUnknownSerdesType + } +} diff --git a/json.go b/json.go new file mode 100644 index 0000000..ca42fdb --- /dev/null +++ b/json.go @@ -0,0 +1,59 @@ +package kafka + +import ( + "encoding/json" +) + +type JSONSerde struct { + Serdes +} + +// Serialize serializes a JSON object as map to bytes. +func (*JSONSerde) Serialize(data interface{}, schema *Schema) ([]byte, error) { + var jsonObject []byte + if data, ok := data.(map[string]interface{}); ok { + if encodedData, err := json.Marshal(data); err == nil { + jsonObject = encodedData + } else { + return nil, err + } + } else { + return nil, ErrInvalidDataType + } + + if schema != nil { + // Validate the JSON object against the schema only if the schema is + // provided. + if err := schema.JsonSchema().Validate(data); err != nil { + err := NewXk6KafkaError(failedValidateJSON, + "Failed to validate JSON against schema", + err) + return nil, err + } + } + + return jsonObject, nil +} + +// Deserialize deserializes a map from bytes to be exported as object to JS. +func (*JSONSerde) Deserialize(data []byte, schema *Schema) (interface{}, error) { + var jsonObject interface{} + if err := json.Unmarshal(data, &jsonObject); err != nil { + return nil, NewXk6KafkaError(failedUnmarshalJSON, + "Failed to unmarshal JSON data", + err) + } + + if schema != nil { + // Validate the JSON object against the schema only if the schema is + // provided. + if err := schema.JsonSchema().Validate(jsonObject); err != nil { + err := NewXk6KafkaError(failedDecodeJSONFromBinary, + "Failed to decode data from JSON", + err) + return nil, err + } + } + + return jsonObject, nil +} diff --git a/jsonschema.go b/jsonschema.go deleted file mode 100644 index 7a06b06..0000000 --- a/jsonschema.go +++ /dev/null @@ -1,169 +0,0 @@ -package kafka - -import ( - "encoding/json" - - "github.com/riferrei/srclient" - "github.com/santhosh-tekuri/jsonschema/v5" -) - -const ( - JSONSchemaSerializer string = "io.confluent.kafka.serializers.json.KafkaJsonSchemaSerializer" - JSONSchemaDeserializer string = "io.confluent.kafka.serializers.json.KafkaJsonSchemaDeserializer" -) - -// SerializeJSON serializes the data to JSON and adds the wire format to the data and -// returns the serialized data. It uses the given version to retrieve the schema from -// Schema Registry, otherwise it uses the given schema to manually create the codec and -// encode the data. The configuration is used to configure the Schema Registry client. -// The element is used to define the subject. The data should be a string. -// nolint: funlen -func SerializeJSON( - configuration Configuration, topic string, data interface{}, - element Element, schema string, version int, -) ([]byte, *Xk6KafkaError) { - var bytesData []byte - if stringData, ok := data.(string); ok { - bytesData = []byte(stringData) - } else { - return nil, NewXk6KafkaError(failedTypeCast, "Failed to cast to string", nil) - } - - client := SchemaRegistryClientWithConfiguration(configuration.SchemaRegistry) - subject := topic + "-" + string(element) - var schemaInfo *srclient.Schema - schemaID := 0 - - var xk6KafkaError *Xk6KafkaError - - if schema != "" { - // Schema is provided, so we need to create it and get the schema ID - schemaInfo, xk6KafkaError = CreateSchema(client, subject, schema, srclient.Json) - } else { - // Schema is not provided, so we need to fetch the schema from the Schema Registry - schemaInfo, xk6KafkaError = GetSchema(client, subject, schema, srclient.Json, version) - } - - if xk6KafkaError != nil { - logger.WithField("error", xk6KafkaError).Warn( - "Failed to create or get schema, manually encoding the data") - codec, err := jsonschema.CompileString(subject, schema) - if err != nil { - return nil, NewXk6KafkaError(failedCreateJSONSchemaCodec, - "Failed to create codec for encoding JSON", - err) - } - - var jsonBytes interface{} - if err := json.Unmarshal(bytesData, &jsonBytes); err != nil { - return nil, NewXk6KafkaError(failedUnmarshalJSON, - "Failed to unmarshal JSON data", - err) - } - - if err := codec.Validate(jsonBytes); err != nil { - return nil, NewXk6KafkaError(failedValidateJSON, - "Failed to validate JSON data", - err) - } - } - - if schemaInfo != nil { - schemaID = schemaInfo.ID() - - var jsonBytes interface{} - if err := json.Unmarshal(bytesData, &jsonBytes); err != nil { - return nil, NewXk6KafkaError(failedUnmarshalJSON, - "Failed to unmarshal JSON data", - err) - } - - // Encode the data into JSON and then the wire format - err := schemaInfo.JsonSchema().Validate(jsonBytes) - if err != nil { - return nil, NewXk6KafkaError(failedEncodeToJSON, - "Failed to encode data into JSON", - err) - } - } - - return EncodeWireFormat(bytesData, schemaID), nil -} - -// DeserializeJSON deserializes the data from JSON and returns the decoded data. It -// uses the given version to retrieve the schema from Schema Registry, otherwise it -// uses the given schema to manually create the codec and decode the data. The -// configuration is used to configure the Schema Registry client. The element is -// used to define the subject. The data should be a byte array. -// nolint: funlen -func DeserializeJSON( - configuration Configuration, topic string, data []byte, - element Element, schema string, version int, -) (interface{}, *Xk6KafkaError) { - _, bytesDecodedData, err := DecodeWireFormat(data) - if err != nil { - return nil, NewXk6KafkaError(failedDecodeFromWireFormat, - "Failed to remove wire format from the binary data", - err) - } - - client := SchemaRegistryClientWithConfiguration(configuration.SchemaRegistry) - subject := topic + "-" + string(element) - var schemaInfo *srclient.Schema - - var xk6KafkaError *Xk6KafkaError - - if schema != "" { - // Schema is provided, so we need to create it and get the schema ID - schemaInfo, xk6KafkaError = CreateSchema(client, subject, schema, srclient.Json) - } else { - // Schema is not provided, so we need to fetch the schema from the Schema Registry - schemaInfo, xk6KafkaError = GetSchema(client, subject, schema, srclient.Json, version) - } - - if xk6KafkaError != nil { - logger.WithField("error", xk6KafkaError).Warn( - "Failed to create or get schema, manually decoding the data") - codec, err := jsonschema.CompileString(string(element), schema) - if err != nil { - return nil, NewXk6KafkaError(failedCreateJSONSchemaCodec, - "Failed to create codec for decoding JSON data", - err) - } - - var jsonBytes interface{} - if err := json.Unmarshal(bytesDecodedData, &jsonBytes); err != nil { - return nil, NewXk6KafkaError(failedUnmarshalJSON, - "Failed to unmarshal JSON data", - err) - } - - if err := codec.Validate(jsonBytes); err != nil { - return jsonBytes, NewXk6KafkaError(failedValidateJSON, - "Failed to validate JSON data, yet returning the data", - err) - } - - return jsonBytes, nil - } - - if schemaInfo != nil { - var jsonBytes interface{} - if err := json.Unmarshal(bytesDecodedData, &jsonBytes); err != nil { - return nil, NewXk6KafkaError(failedUnmarshalJSON, - "Failed to unmarshal JSON data", - err) - } - - // Decode the data from Json - err := schemaInfo.JsonSchema().Validate(jsonBytes) - if err != nil { - return nil, NewXk6KafkaError(failedDecodeJSONFromBinary, - "Failed to decode data from JSON", - err) - } - return jsonBytes, nil - } - - return bytesDecodedData, nil -} diff --git a/jsonschema_test.go b/jsonschema_test.go deleted file mode 100644 index 221b716..0000000 --- a/jsonschema_test.go +++ /dev/null @@ -1,120 +0,0 @@ -package kafka - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -var ( - jsonConfig = Configuration{ - Producer: ProducerConfiguration{ - ValueSerializer: JSONSchemaSerializer, - KeySerializer: JSONSchemaSerializer, - }, - Consumer: ConsumerConfiguration{ - ValueDeserializer: JSONSchemaDeserializer, - KeyDeserializer: JSONSchemaDeserializer, - }, - } - jsonSchema = `{"type":"object","title":"Key","properties":{"field": {"type":"string"}},"required":["field"]}` -) - -// TestSerializeDeserializeJson tests serialization and deserialization (and validation) of -// JSON data. -func TestSerializeDeserializeJson(t *testing.T) { - // Test with a schema registry, which fails and manually (de)serializes the data. - for _, element := range []Element{Key, Value} { - // Serialize the key or value. - serialized, err := SerializeJSON(jsonConfig, "topic", `{"field":"value"}`, element, jsonSchema, 0) - assert.Nil(t, err) - assert.NotNil(t, serialized) - // 4 bytes for magic byte, 1 byte for schema ID, and the rest is the data. - assert.GreaterOrEqual(t, len(serialized), 10) - - // Deserialize the key or value (removes the magic bytes). - deserialized, err := DeserializeJSON(jsonConfig, "topic", serialized, element, jsonSchema, 0) - assert.Nil(t, err) - assert.Equal(t, map[string]interface{}{"field": "value"}, deserialized) - } -} - -// TestSerializeDeserializeJsonFailsOnSchemaError tests serialization and deserialization (and -// validation) of JSON data and fails on schema error. -func TestSerializeDeserializeJsonFailsOnSchemaError(t *testing.T) { - schema := `{` - - for _, element := range []Element{Key, Value} { - // Serialize the key or value. - serialized, err := SerializeJSON(jsonConfig, "topic", `{"field":"value"}`, element, schema, 0) - assert.Nil(t, serialized) - assert.Error(t, err.Unwrap()) - assert.Equal(t, "Failed to create codec for encoding JSON", err.Message) - assert.Equal(t, failedCreateJSONSchemaCodec, err.Code) - - // Deserialize the key or value. - deserialized, err := DeserializeJSON(jsonConfig, "topic", []byte{0, 2, 3, 4, 5, 6}, element, schema, 0) - assert.Nil(t, deserialized) - assert.Error(t, err.Unwrap()) - assert.Equal(t, "Failed to create codec for decoding JSON data", err.Message) - assert.Equal(t, failedCreateJSONSchemaCodec, err.Code) - } -} - -// TestSerializeDeserializeJsonFailsOnWireFormatError tests serialization and deserialization (and -// validation) of JSON data and fails on wire format error. -func TestSerializeDeserializeJsonFailsOnWireFormatError(t *testing.T) { - schema := `{}` - - for _, element := range []Element{Key, Value} { - // Deserialize an empty key or value. - deserialized, err := DeserializeJSON(jsonConfig, "topic", []byte{}, element, schema, 0) - assert.Nil(t, deserialized) - assert.Error(t, err.Unwrap()) - assert.Equal(t, "Failed to remove wire format from the binary data", err.Message) - assert.Equal(t, failedDecodeFromWireFormat, err.Code) - - // Deserialize a broken key or value. - // Proper wire-formatted message has 5 bytes (the wire format) plus data. - deserialized, err = DeserializeJSON(jsonConfig, "topic", []byte{1, 2, 3, 4}, element, schema, 0) - assert.Nil(t, deserialized) - assert.Error(t, err.Unwrap()) - assert.Equal(t, "Failed to remove wire format from the binary data", err.Message) - assert.Equal(t, failedDecodeFromWireFormat, err.Code) - } -} - -// TestSerializeDeserializeJsonFailsOnMarshalError tests serialization and deserialization (and -// validation) of JSON data and fails on JSON marshal error. -func TestSerializeDeserializeJsonFailsOnMarshalError(t *testing.T) { - data := `{"nonExistingField":"` - - for _, element := range []Element{Key, Value} { - serialized, err := SerializeJSON(jsonConfig, "topic", data, element, jsonSchema, 0) - assert.Nil(t, serialized) - assert.Error(t, err.Unwrap()) - assert.Equal(t, "Failed to unmarshal JSON data", err.Message) - assert.Equal(t, failedUnmarshalJSON, err.Code) - - deserialized, err := DeserializeJSON(jsonConfig, "topic", []byte{0, 2, 3, 4, 5, 6}, element, jsonSchema, 0) - assert.Nil(t, deserialized) - assert.Error(t, err.Unwrap()) - assert.Equal(t, "Failed to unmarshal JSON data", err.Message) - assert.Equal(t, failedUnmarshalJSON, err.Code) - } -} - -// TestSerializeDeserializeJsonFailsOnValidationError tests serialization and deserialization (and -// validation) of JSON data and fails on JSON validation error. -func TestSerializeDeserializeJsonFailsOnValidationError(t *testing.T) { - // JSON schema validation fails, but the data is still returned. - data := `{"nonExistingField":"value"}` - - for _, element := range []Element{Key, Value} { - serialized, err := SerializeJSON(jsonConfig, "topic", data, element, jsonSchema, 0) - assert.Nil(t, err) - assert.NotNil(t, serialized) - // 4 bytes for magic byte, 1 byte for schema ID, and the rest is the data. - assert.GreaterOrEqual(t, len(serialized), 28) - } -} diff --git a/kafka_helpers_test.go b/kafka_helpers_test.go index df94203..541b708 100644 --- a/kafka_helpers_test.go +++ b/kafka_helpers_test.go @@ -6,6 +6,7 @@ import ( "testing" "github.com/dop251/goja" + kafkago "github.com/segmentio/kafka-go" "github.com/stretchr/testify/require" "go.k6.io/k6/js/common" "go.k6.io/k6/js/modulestest" @@ -23,9 +24,9 @@ type kafkaTest struct { cancelContext context.CancelFunc } -// GetTestModuleInstance returns a new instance of the Kafka module for testing. +// getTestModuleInstance returns a new instance of the Kafka module for testing. // nolint: golint,revive -func GetTestModuleInstance(tb testing.TB) *kafkaTest { +func getTestModuleInstance(tb testing.TB) *kafkaTest { tb.Helper() runtime := goja.New() runtime.SetFieldNameMapper(common.FieldNameMapper{}) @@ -77,8 +78,8 @@ func (k *kafkaTest) moveToVUCode() error { return nil } -// GetCounterMetricsValues returns the samples of the collected metrics in the VU. -func (k *kafkaTest) GetCounterMetricsValues() map[string]float64 { +// getCounterMetricsValues returns the samples of the collected metrics in the VU. +func (k *kafkaTest) getCounterMetricsValues() map[string]float64 { metricsValues := make(map[string]float64) for _, sampleContainer := range metrics.GetBufferedSamples(k.samples) { @@ -90,3 +91,52 @@ func (k *kafkaTest) GetCounterMetricsValues() map[string]float64 { } return metricsValues } + +// newWriter creates a Kafka writer for the reader tests. +func (k *kafkaTest) newWriter(topicName string) *kafkago.Writer { + // Create a writer to produce messages. + return k.module.Kafka.writer(&WriterConfig{ + Brokers: []string{"localhost:9092"}, + Topic: topicName, + }) +} + +// newReader creates a Kafka reader for the reader tests. +func (k *kafkaTest) newReader(topicName string) *kafkago.Reader { + // Create a reader to consume messages. + return k.module.Kafka.reader(&ReaderConfig{ + Brokers: []string{"localhost:9092"}, + Topic: topicName, + }) +} + +// createTopic creates a topic. +func (k *kafkaTest) createTopic(topicName string) { + // Create a connection to Kafka. + connection := k.module.Kafka.getKafkaControllerConnection(&ConnectionConfig{ + Address: "localhost:9092", + }) + defer connection.Close() + + // Create a topic. + k.module.Kafka.createTopic(connection, &kafkago.TopicConfig{Topic: topicName}) +} + +// topicExists checks if a topic exists. +func (k *kafkaTest) topicExists(topicName string) bool { + // Create a connection to Kafka. + connection := k.module.Kafka.getKafkaControllerConnection(&ConnectionConfig{ + Address: "localhost:9092", + }) + defer connection.Close() + + // Create a topic. + topics := k.module.Kafka.listTopics(connection) + for _, topic := range topics { + if topic == topicName { + return true + } + } + + return false +} diff --git a/module.go b/module.go index e9d9958..f8ce5ed 100644 --- a/module.go +++ b/module.go @@ -4,6 +4,7 @@ import ( "crypto/tls" "github.com/dop251/goja" + "github.com/riferrei/srclient" kafkago "github.com/segmentio/kafka-go" "github.com/segmentio/kafka-go/compress" "github.com/sirupsen/logrus" @@ -64,11 +65,9 @@ func init() { type ( Kafka struct { - vu modules.VU - metrics kafkaMetrics - serializerRegistry *Serde[Serializer] - deserializerRegistry *Serde[Deserializer] - exports *goja.Object + vu modules.VU + metrics kafkaMetrics + exports *goja.Object } RootModule struct{} Module struct { @@ -98,11 +97,9 @@ func (*RootModule) NewModuleInstance(virtualUser modules.VU) modules.Instance { // Create a new Kafka module. moduleInstance := &Module{ Kafka: &Kafka{ - vu: virtualUser, - metrics: metrics, - serializerRegistry: NewSerializersRegistry(), - deserializerRegistry: NewDeserializersRegistry(), - exports: runtime.NewObject(), + vu: virtualUser, + metrics: metrics, + exports: runtime.NewObject(), }, } @@ -122,6 +119,7 @@ func (*RootModule) NewModuleInstance(virtualUser modules.VU) modules.Instance { mustExport("Reader", moduleInstance.readerClass) // The Connection is a constructor and must be called with new, e.g. new Connection(...). mustExport("Connection", moduleInstance.connectionClass) + mustExport("SchemaRegistry", moduleInstance.schemaRegistryClientClass) // This causes the struct fields to be exported to the native (camelCases) JS code. virtualUser.Runtime().SetFieldNameMapper(goja.TagFieldNameMapper("json", true)) @@ -185,20 +183,19 @@ func (m *Module) defineConstants() { mustAddProp("ISOLATION_LEVEL_READ_UNCOMMITTED", isolationLevelReadUncommitted) mustAddProp("ISOLATION_LEVEL_READ_COMMITTED", isolationLevelReadCommitted) - // Serde types - mustAddProp("STRING_SERIALIZER", StringSerializer) - mustAddProp("STRING_DESERIALIZER", StringDeserializer) - mustAddProp("BYTE_ARRAY_SERIALIZER", ByteArraySerializer) - mustAddProp("BYTE_ARRAY_DESERIALIZER", ByteArrayDeserializer) - mustAddProp("JSON_SCHEMA_SERIALIZER", JSONSchemaSerializer) - mustAddProp("JSON_SCHEMA_DESERIALIZER", JSONSchemaDeserializer) - mustAddProp("AVRO_SERIALIZER", AvroSerializer) - mustAddProp("AVRO_DESERIALIZER", AvroDeserializer) - mustAddProp("PROTOBUF_SERIALIZER", ProtobufSerializer) - mustAddProp("PROTOBUF_DESERIALIZER", ProtobufDeserializer) - // TopicNameStrategy types mustAddProp("TOPIC_NAME_STRATEGY", TopicNameStrategy) mustAddProp("RECORD_NAME_STRATEGY", RecordNameStrategy) mustAddProp("TOPIC_RECORD_NAME_STRATEGY", TopicRecordNameStrategy) + + // Element types + mustAddProp("KEY", string(Key)) + mustAddProp("VALUE", string(Value)) + + // Schema types + mustAddProp("SCHEMA_TYPE_STRING", String.String()) + mustAddProp("SCHEMA_TYPE_BYTES", Bytes.String()) + mustAddProp("SCHEMA_TYPE_AVRO", srclient.Avro.String()) + mustAddProp("SCHEMA_TYPE_JSON", srclient.Json.String()) + mustAddProp("SCHEMA_TYPE_PROTOBUF", srclient.Protobuf.String()) } diff --git a/consumer.go b/reader.go similarity index 89% rename from consumer.go rename to reader.go index 69bf562..bf735a0 100644 --- a/consumer.go +++ b/reader.go @@ -27,8 +27,6 @@ var ( IsolationLevels map[string]kafkago.IsolationLevel - DefaultDeserializer = StringDeserializer - MaxWait = time.Millisecond * 200 RebalanceTimeout = time.Second * 5 ) @@ -65,10 +63,7 @@ type ReaderConfig struct { } type ConsumeConfig struct { - Limit int64 `json:"limit"` - Config Configuration `json:"config"` - KeySchema string `json:"keySchema"` - ValueSchema string `json:"valueSchema"` + Limit int64 `json:"limit"` } // readerClass is a wrapper around kafkago.reader and acts as a JS constructor @@ -236,14 +231,6 @@ func (k *Kafka) reader(readerConfig *ReaderConfig) *kafkago.Reader { return reader } -// getDeserializer returns the deserializer for the given schema. -func (k *Kafka) getDeserializer(schema string) Deserializer { - if de, ok := k.deserializerRegistry.Registry[schema]; ok { - return de.GetDeserializer() - } - return DeserializeString -} - // consume consumes messages from the given reader. // nolint: funlen func (k *Kafka) consume( @@ -265,15 +252,6 @@ func (k *Kafka) consume( consumeConfig.Limit = 1 } - if err := ValidateConfiguration(consumeConfig.Config); err != nil { - consumeConfig.Config.Consumer.KeyDeserializer = DefaultDeserializer - consumeConfig.Config.Consumer.ValueDeserializer = DefaultDeserializer - logger.WithField("error", err).Warn("Using default string serializers") - } - - keyDeserializer := k.getDeserializer(consumeConfig.Config.Consumer.KeyDeserializer) - valueDeserializer := k.getDeserializer(consumeConfig.Config.Consumer.ValueDeserializer) - messages := make([]map[string]interface{}, 0) for i := int64(0); i < consumeConfig.Limit; i++ { @@ -315,23 +293,11 @@ func (k *Kafka) consume( } if len(msg.Key) > 0 { - var wrappedError *Xk6KafkaError - message["key"], wrappedError = keyDeserializer( - consumeConfig.Config, reader.Config().Topic, msg.Key, - Key, consumeConfig.KeySchema, 0) - if wrappedError != nil && wrappedError.Unwrap() != nil { - logger.WithField("error", wrappedError).Error(wrappedError) - } + message["key"] = msg.Key } if len(msg.Value) > 0 { - var wrappedError *Xk6KafkaError - message["value"], wrappedError = valueDeserializer( - consumeConfig.Config, reader.Config().Topic, msg.Value, - Value, consumeConfig.ValueSchema, 0) - if wrappedError != nil && wrappedError.Unwrap() != nil { - logger.WithField("error", wrappedError).Error(wrappedError) - } + message["value"] = msg.Value } messages = append(messages, message) diff --git a/consumer_test.go b/reader_test.go similarity index 75% rename from consumer_test.go rename to reader_test.go index 1ffa79b..4114345 100644 --- a/consumer_test.go +++ b/reader_test.go @@ -4,43 +4,20 @@ import ( "encoding/json" "testing" - kafkago "github.com/segmentio/kafka-go" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) -// initializeConsumerTest creates a k6 instance with the xk6-kafka extension -// and then it creates a Kafka topic and a Kafka writer. -func initializeConsumerTest(t *testing.T) (*kafkaTest, *kafkago.Writer) { - t.Helper() - test := GetTestModuleInstance(t) - - // Create a Kafka topic - connection := test.module.Kafka.getKafkaControllerConnection(&ConnectionConfig{ - Address: "localhost:9092", - }) - defer connection.Close() - - test.module.Kafka.createTopic(connection, &kafkago.TopicConfig{ - Topic: "test-topic", - }) - - // Create a writer to produce messages. - writer := test.module.Kafka.writer(&WriterConfig{ - Brokers: []string{"localhost:9092"}, - Topic: "test-topic", - }) - assert.NotNil(t, writer) - - return test, writer -} - // TestConsume tests the consume function. // nolint: funlen func TestConsume(t *testing.T) { - test, writer := initializeConsumerTest(t) + test := getTestModuleInstance(t) + test.createTopic("test-topic") + writer := test.newWriter("test-topic") defer writer.Close() + assert.True(t, test.topicExists("test-topic")) + // Create a reader to consume messages. assert.NotPanics(t, func() { reader := test.module.Kafka.reader(&ReaderConfig{ @@ -58,8 +35,8 @@ func TestConsume(t *testing.T) { test.module.Kafka.produce(writer, &ProduceConfig{ Messages: []Message{ { - Key: "key1", - Value: "value1", + Key: test.module.Kafka.serialize(&Container{Data: "key1"}), + Value: test.module.Kafka.serialize(&Container{Data: "value1"}), Offset: 0, }, }, @@ -70,21 +47,22 @@ func TestConsume(t *testing.T) { assert.NotPanics(t, func() { messages := test.module.Kafka.consume(reader, &ConsumeConfig{Limit: 1}) assert.Equal(t, 1, len(messages)) - if key, ok := messages[0]["key"].(string); ok { - assert.Equal(t, "key1", key) - } else { - assert.Fail(t, "key is not a string") + + result := test.module.Kafka.deserialize(&Container{Data: messages[0]["key"]}) + + if key, ok := result.([]byte); ok { + assert.Equal(t, "key1", string(key)) } - if value, ok := messages[0]["value"].(string); ok { - assert.Equal(t, "value1", value) - } else { - assert.Fail(t, "value is not a string") + + result = test.module.Kafka.deserialize(&Container{Data: messages[0]["value"]}) + if value, ok := result.([]byte); ok { + assert.Equal(t, "value1", string(value)) } }) }) // Check if one message was consumed. - metricsValues := test.GetCounterMetricsValues() + metricsValues := test.getCounterMetricsValues() assert.Equal(t, 1.0, metricsValues[test.module.metrics.ReaderDials.Name]) assert.Equal(t, 2.0, metricsValues[test.module.metrics.ReaderFetches.Name]) assert.Equal(t, 1.0, metricsValues[test.module.metrics.ReaderMessages.Name]) @@ -104,13 +82,12 @@ func TestConsume(t *testing.T) { assert.Equal(t, 0.0, metricsValues[test.module.metrics.ReaderMaxWait.Name]) assert.Equal(t, 0.0, metricsValues[test.module.metrics.ReaderQueueLength.Name]) assert.Equal(t, 0.0, metricsValues[test.module.metrics.ReaderQueueCapacity.Name]) - - // _ = test.module.Kafka.DeleteTopic("localhost:9092", "test-topic", "") } // TestConsumeWithoutKey tests the consume function without a key. func TestConsumeWithoutKey(t *testing.T) { - test, writer := initializeConsumerTest(t) + test := getTestModuleInstance(t) + writer := test.newWriter("test-topic") defer writer.Close() // Create a reader to consume messages. @@ -131,7 +108,7 @@ func TestConsumeWithoutKey(t *testing.T) { test.module.Kafka.produce(writer, &ProduceConfig{ Messages: []Message{ { - Value: "value1", + Value: test.module.Kafka.serialize(&Container{Data: "value1"}), Offset: 1, }, }, @@ -143,16 +120,16 @@ func TestConsumeWithoutKey(t *testing.T) { messages := test.module.Kafka.consume(reader, &ConsumeConfig{Limit: 1}) assert.Equal(t, 1, len(messages)) assert.NotContains(t, messages[0], "key") - if value, ok := messages[0]["value"].(string); ok { - assert.Equal(t, "value1", value) - } else { - assert.Fail(t, "value is not a string") + + result := test.module.Kafka.deserialize(&Container{Data: messages[0]["value"]}) + if value, ok := result.([]byte); ok { + assert.Equal(t, "value1", string(value)) } }) }) // Check if one message was consumed. - metricsValues := test.GetCounterMetricsValues() + metricsValues := test.getCounterMetricsValues() assert.Equal(t, 1.0, metricsValues[test.module.metrics.ReaderDials.Name]) assert.Equal(t, 0.0, metricsValues[test.module.metrics.ReaderErrors.Name]) assert.Equal(t, 6.0, metricsValues[test.module.metrics.ReaderBytes.Name]) @@ -162,7 +139,8 @@ func TestConsumeWithoutKey(t *testing.T) { // TestConsumerContextCancelled tests the consume function and fails on a cancelled context. func TestConsumerContextCancelled(t *testing.T) { - test, writer := initializeConsumerTest(t) + test := getTestModuleInstance(t) + writer := test.newWriter("test-topic") defer writer.Close() // Create a reader to consume messages. @@ -182,7 +160,7 @@ func TestConsumerContextCancelled(t *testing.T) { test.module.Kafka.produce(writer, &ProduceConfig{ Messages: []Message{ { - Value: "value1", + Value: test.module.Kafka.serialize(&Container{Data: "value1"}), Offset: 2, }, }, @@ -199,7 +177,7 @@ func TestConsumerContextCancelled(t *testing.T) { }) // Check if one message was consumed. - metricsValues := test.GetCounterMetricsValues() + metricsValues := test.getCounterMetricsValues() assert.Equal(t, 0.0, metricsValues[test.module.metrics.ReaderDials.Name]) assert.Equal(t, 0.0, metricsValues[test.module.metrics.ReaderErrors.Name]) assert.Equal(t, 0.0, metricsValues[test.module.metrics.ReaderBytes.Name]) @@ -209,7 +187,8 @@ func TestConsumerContextCancelled(t *testing.T) { // TestConsumeJSON tests the consume function with a JSON value. func TestConsumeJSON(t *testing.T) { - test, writer := initializeConsumerTest(t) + test := getTestModuleInstance(t) + writer := test.newWriter("test-topic") defer writer.Close() // Create a reader to consume messages. @@ -233,7 +212,7 @@ func TestConsumeJSON(t *testing.T) { test.module.Kafka.produce(writer, &ProduceConfig{ Messages: []Message{ { - Value: string(serialized), + Value: serialized, Offset: 3, }, }, @@ -245,16 +224,9 @@ func TestConsumeJSON(t *testing.T) { messages := test.module.Kafka.consume(reader, &ConsumeConfig{Limit: 1}) assert.Equal(t, 1, len(messages)) - type F struct { - Field string `json:"field"` - } - var f *F - if data, ok := messages[0]["value"].(string); ok { - jsonErr = json.Unmarshal([]byte(data), &f) - assert.Nil(t, jsonErr) - assert.Equal(t, "value", f.Field) - } else { - assert.Fail(t, "value is not a string") + result := test.module.Kafka.deserialize(&Container{Data: messages[0]["value"]}) + if data, ok := result.(map[string]interface{}); ok { + assert.Equal(t, "value", data["field"]) } }) }) diff --git a/schema_registry.go b/schema_registry.go index 09f927f..98602d4 100644 --- a/schema_registry.go +++ b/schema_registry.go @@ -6,7 +6,11 @@ import ( "fmt" "net/http" + "github.com/dop251/goja" + "github.com/linkedin/goavro/v2" "github.com/riferrei/srclient" + "github.com/santhosh-tekuri/jsonschema/v5" + "go.k6.io/k6/js/common" ) type Element string @@ -26,7 +30,6 @@ type BasicAuth struct { type SchemaRegistryConfiguration struct { URL string `json:"url"` BasicAuth BasicAuth `json:"basicAuth"` - UseLatest bool `json:"useLatest"` TLS TLSConfig `json:"tls"` } @@ -36,41 +39,215 @@ const ( TopicRecordNameStrategy string = "TopicRecordNameStrategy" ) -// DecodeWireFormat removes the proprietary 5-byte prefix from the Avro, ProtoBuf -// or JSONSchema payload. -// https://docs.confluent.io/platform/current/schema-registry/serdes-develop/index.html#wire-format -func DecodeWireFormat(message []byte) (int, []byte, *Xk6KafkaError) { - if len(message) < MagicPrefixSize { - return 0, nil, NewXk6KafkaError(messageTooShort, - "Invalid message: message too short to contain schema id.", nil) +// Schema is a wrapper around the schema registry schema. +// The Codec() and JsonSchema() methods will return the respective codecs (duck-typing). +type Schema struct { + ID int `json:"id"` + Schema string `json:"schema"` + SchemaType *srclient.SchemaType `json:"schemaType"` + Version int `json:"version"` + References []srclient.Reference `json:"references"` + Subject string `json:"subject"` + codec *goavro.Codec + jsonSchema *jsonschema.Schema +} + +type SubjectNameConfig struct { + Schema string `json:"schema"` + Topic string `json:"topic"` + Element Element `json:"element"` + SubjectNameStrategy string `json:"subjectNameStrategy"` +} + +type WireFormat struct { + SchemaID int `json:"schemaId"` + Data []byte `json:"data"` +} + +// Codec ensures access to Codec +// Will try to initialize a new one if it hasn't been initialized before +// Will return nil if it can't initialize a codec from the schema +func (s *Schema) Codec() *goavro.Codec { + if s.codec == nil { + codec, err := goavro.NewCodec(s.Schema) + if err == nil { + s.codec = codec + } } - if message[0] != 0 { - return 0, nil, NewXk6KafkaError(messageTooShort, - "Invalid message: invalid start byte.", nil) + return s.codec +} + +// JsonSchema ensures access to JsonSchema +// Will try to initialize a new one if it hasn't been initialized before +// Will return nil if it can't initialize a json schema from the schema +func (s *Schema) JsonSchema() *jsonschema.Schema { + if s.jsonSchema == nil { + jsonSchema, err := jsonschema.CompileString("schema.json", s.Schema) + if err == nil { + s.jsonSchema = jsonSchema + } } - magicPrefix := int(binary.BigEndian.Uint32(message[1:MagicPrefixSize])) - return magicPrefix, message[MagicPrefixSize:], nil + return s.jsonSchema } -// EncodeWireFormat adds the proprietary 5-byte prefix to the Avro, ProtoBuf or -// JSONSchema payload. -// https://docs.confluent.io/platform/current/schema-registry/serdes-develop/index.html#wire-format -func EncodeWireFormat(data []byte, schemaID int) []byte { - schemaIDBytes := make([]byte, MagicPrefixSize-1) - binary.BigEndian.PutUint32(schemaIDBytes, uint32(schemaID)) - return append(append([]byte{0}, schemaIDBytes...), data...) +func (k *Kafka) schemaRegistryClientClass(call goja.ConstructorCall) *goja.Object { + runtime := k.vu.Runtime() + var configuration *SchemaRegistryConfiguration + var schemaRegistryClient *srclient.SchemaRegistryClient + + if len(call.Arguments) == 1 { + if params, ok := call.Argument(0).Export().(map[string]interface{}); ok { + if b, err := json.Marshal(params); err != nil { + common.Throw(runtime, err) + } else { + if err = json.Unmarshal(b, &configuration); err != nil { + common.Throw(runtime, err) + } + } + } + + schemaRegistryClient = k.schemaRegistryClient(configuration) + } + + schemaRegistryClientObject := runtime.NewObject() + // This is the schema registry client object itself + if err := schemaRegistryClientObject.Set("This", schemaRegistryClient); err != nil { + common.Throw(runtime, err) + } + + err := schemaRegistryClientObject.Set("getSchema", func(call goja.FunctionCall) goja.Value { + if len(call.Arguments) == 0 { + common.Throw(runtime, ErrNotEnoughArguments) + } + + if schemaRegistryClient == nil { + common.Throw(runtime, ErrNoSchemaRegistryClient) + } + + var schema *Schema + if params, ok := call.Argument(0).Export().(map[string]interface{}); ok { + if b, err := json.Marshal(params); err != nil { + common.Throw(runtime, err) + } else { + if err = json.Unmarshal(b, &schema); err != nil { + common.Throw(runtime, err) + } + } + } + + return runtime.ToValue(k.getSchema(schemaRegistryClient, schema)) + }) + if err != nil { + common.Throw(runtime, err) + } + + err = schemaRegistryClientObject.Set("createSchema", func(call goja.FunctionCall) goja.Value { + if len(call.Arguments) == 0 { + common.Throw(runtime, ErrNotEnoughArguments) + } + + if schemaRegistryClient == nil { + common.Throw(runtime, ErrNoSchemaRegistryClient) + } + + var schema *Schema + if params, ok := call.Argument(0).Export().(map[string]interface{}); ok { + if b, err := json.Marshal(params); err != nil { + common.Throw(runtime, err) + } else { + if err = json.Unmarshal(b, &schema); err != nil { + common.Throw(runtime, err) + } + } + } + + return runtime.ToValue(k.createSchema(schemaRegistryClient, schema)) + }) + if err != nil { + common.Throw(runtime, err) + } + + var subjectNameConfig *SubjectNameConfig + err = schemaRegistryClientObject.Set("getSubjectName", func(call goja.FunctionCall) goja.Value { + if len(call.Arguments) == 0 { + common.Throw(runtime, ErrNotEnoughArguments) + } + + if params, ok := call.Argument(0).Export().(map[string]interface{}); ok { + if b, err := json.Marshal(params); err != nil { + common.Throw(runtime, err) + } else { + if err = json.Unmarshal(b, &subjectNameConfig); err != nil { + common.Throw(runtime, err) + } + } + } + + return runtime.ToValue(k.getSubjectName(subjectNameConfig)) + }) + if err != nil { + common.Throw(runtime, err) + } + + err = schemaRegistryClientObject.Set("serialize", func(call goja.FunctionCall) goja.Value { + if len(call.Arguments) == 0 { + common.Throw(runtime, ErrNotEnoughArguments) + } + + var metadata *Container + if params, ok := call.Argument(0).Export().(interface{}); ok { + if b, err := json.Marshal(params); err != nil { + common.Throw(runtime, err) + } else { + if err = json.Unmarshal(b, &metadata); err != nil { + common.Throw(runtime, err) + } + } + } + + return runtime.ToValue(k.serialize(metadata)) + }) + if err != nil { + common.Throw(runtime, err) + } + + err = schemaRegistryClientObject.Set("deserialize", func(call goja.FunctionCall) goja.Value { + if len(call.Arguments) == 0 { + common.Throw(runtime, ErrNotEnoughArguments) + } + + var metadata *Container + if params, ok := call.Argument(0).Export().(interface{}); ok { + if b, err := json.Marshal(params); err != nil { + common.Throw(runtime, err) + } else { + if err = json.Unmarshal(b, &metadata); err != nil { + common.Throw(runtime, err) + } + } + } + + return runtime.ToValue(k.deserialize(metadata)) + }) + if err != nil { + common.Throw(runtime, err) + } + + return schemaRegistryClientObject } -// SchemaRegistryClientWithConfiguration creates a SchemaRegistryClient instance +// schemaRegistryClient creates a schemaRegistryClient instance // with the given configuration. It will also configure auth and TLS credentials if exists. -func SchemaRegistryClientWithConfiguration(configuration SchemaRegistryConfiguration) *srclient.SchemaRegistryClient { +func (k *Kafka) schemaRegistryClient( + configuration *SchemaRegistryConfiguration) *srclient.SchemaRegistryClient { + runtime := k.vu.Runtime() var srClient *srclient.SchemaRegistryClient tlsConfig, err := GetTLSConfig(configuration.TLS) if err != nil { // Ignore the error if we're not using TLS if err.Code != noTLSConfig { - logger.WithField("error", err).Error("Cannot process TLS config") + common.Throw(runtime, err) } srClient = srclient.CreateSchemaRegistryClient(configuration.URL) } @@ -92,72 +269,128 @@ func SchemaRegistryClientWithConfiguration(configuration SchemaRegistryConfigura return srClient } -// GetSchema returns the schema for the given subject and schema ID and version. -func GetSchema( - client *srclient.SchemaRegistryClient, subject string, schema string, schemaType srclient.SchemaType, version int, -) (*srclient.Schema, *Xk6KafkaError) { +// getSchema returns the schema for the given subject and schema ID and version. +func (k *Kafka) getSchema(client *srclient.SchemaRegistryClient, schema *Schema) *Schema { + runtime := k.vu.Runtime() // The client always caches the schema. var schemaInfo *srclient.Schema var err error // Default version of the schema is the latest version. - if version == 0 { - schemaInfo, err = client.GetLatestSchema(subject) + if schema.Version == 0 { + schemaInfo, err = client.GetLatestSchema(schema.Subject) } else { - schemaInfo, err = client.GetSchemaByVersion(subject, version) + schemaInfo, err = client.GetSchemaByVersion( + schema.Subject, schema.Version) } if err != nil { - return nil, NewXk6KafkaError(schemaNotFound, - "Failed to get schema from schema registry", err) + err := NewXk6KafkaError(schemaNotFound, "Failed to get schema from schema registry", err) + common.Throw(runtime, err) + return nil } - return schemaInfo, nil + return &Schema{ + ID: schemaInfo.ID(), + Version: schemaInfo.Version(), + Schema: schemaInfo.Schema(), + SchemaType: schemaInfo.SchemaType(), + References: schemaInfo.References(), + Subject: schema.Subject, + } } -// CreateSchema creates a new schema in the schema registry. -func CreateSchema( - client *srclient.SchemaRegistryClient, subject string, schema string, schemaType srclient.SchemaType, -) (*srclient.Schema, *Xk6KafkaError) { - schemaInfo, err := client.CreateSchema(subject, schema, schemaType) +// createSchema creates a new schema in the schema registry. +func (k *Kafka) createSchema(client *srclient.SchemaRegistryClient, schema *Schema) *Schema { + runtime := k.vu.Runtime() + schemaInfo, err := client.CreateSchema( + schema.Subject, + schema.Schema, + *schema.SchemaType, + schema.References...) if err != nil { - return nil, NewXk6KafkaError(schemaCreationFailed, "Failed to create schema.", err) + err := NewXk6KafkaError(schemaCreationFailed, "Failed to create schema.", err) + common.Throw(runtime, err) + return nil + } + + return &Schema{ + ID: schemaInfo.ID(), + Version: schemaInfo.Version(), + Schema: schemaInfo.Schema(), + SchemaType: schemaInfo.SchemaType(), + References: schemaInfo.References(), + Subject: schema.Subject, } - return schemaInfo, nil } -// GetSubjectName return the subject name strategy for the given schema and topic. -func GetSubjectName(schema string, topic string, element Element, subjectNameStrategy string) (string, *Xk6KafkaError) { - if subjectNameStrategy == "" || subjectNameStrategy == TopicNameStrategy { - return topic + "-" + string(element), nil +// getSubjectName return the subject name strategy for the given schema and topic. +func (k *Kafka) getSubjectName(subjectNameConfig *SubjectNameConfig) string { + if subjectNameConfig.SubjectNameStrategy == "" || + subjectNameConfig.SubjectNameStrategy == TopicNameStrategy { + return subjectNameConfig.Topic + "-" + string(subjectNameConfig.Element) } + runtime := k.vu.Runtime() var schemaMap map[string]interface{} - err := json.Unmarshal([]byte(schema), &schemaMap) + err := json.Unmarshal([]byte(subjectNameConfig.Schema), &schemaMap) if err != nil { - return "", NewXk6KafkaError(failedToUnmarshalSchema, "Failed to unmarshal schema", nil) + err := NewXk6KafkaError(failedToUnmarshalSchema, "Failed to unmarshal schema", nil) + common.Throw(runtime, err) } recordName := "" if namespace, ok := schemaMap["namespace"]; ok { if namespace, ok := namespace.(string); ok { recordName = namespace + "." } else { - return "", NewXk6KafkaError(failedTypeCast, "Failed to cast to string", nil) + err := NewXk6KafkaError(failedTypeCast, "Failed to cast to string", nil) + common.Throw(runtime, err) } } if name, ok := schemaMap["name"]; ok { if name, ok := name.(string); ok { recordName += name } else { - return "", NewXk6KafkaError(failedTypeCast, "Failed to cast to string", nil) + err := NewXk6KafkaError(failedTypeCast, "Failed to cast to string", nil) + common.Throw(runtime, err) } } - if subjectNameStrategy == RecordNameStrategy { - return recordName, nil + if subjectNameConfig.SubjectNameStrategy == RecordNameStrategy { + return recordName } - if subjectNameStrategy == TopicRecordNameStrategy { - return topic + "-" + recordName, nil + if subjectNameConfig.SubjectNameStrategy == TopicRecordNameStrategy { + return subjectNameConfig.Topic + "-" + recordName } - return "", NewXk6KafkaError(failedEncodeToAvro, fmt.Sprintf( - "Unknown subject name strategy: %v", subjectNameStrategy), nil) + err = NewXk6KafkaError(failedToEncode, fmt.Sprintf( + "Unknown subject name strategy: %v", subjectNameConfig.SubjectNameStrategy), nil) + common.Throw(runtime, err) + return "" +} + +// encodeWireFormat adds the proprietary 5-byte prefix to the Avro, ProtoBuf or +// JSONSchema payload. +// https://docs.confluent.io/platform/current/schema-registry/serdes-develop/index.html#wire-format +func (k *Kafka) encodeWireFormat(data []byte, schemaID int) []byte { + schemaIDBytes := make([]byte, MagicPrefixSize-1) + binary.BigEndian.PutUint32(schemaIDBytes, uint32(schemaID)) + return append(append([]byte{0}, schemaIDBytes...), data...) +} + +// decodeWireFormat removes the proprietary 5-byte prefix from the Avro, ProtoBuf +// or JSONSchema payload. +// https://docs.confluent.io/platform/current/schema-registry/serdes-develop/index.html#wire-format +func (k *Kafka) decodeWireFormat(message []byte) []byte { + runtime := k.vu.Runtime() + if len(message) < MagicPrefixSize { + err := NewXk6KafkaError(messageTooShort, + "Invalid message: message too short to contain schema id.", nil) + common.Throw(runtime, err) + return nil + } + if message[0] != 0 { + err := NewXk6KafkaError(messageTooShort, "Invalid message: invalid start byte.", nil) + common.Throw(runtime, err) + return nil + } + return message[MagicPrefixSize:] } diff --git a/schema_registry_test.go b/schema_registry_test.go index 9837ab1..20efc2e 100644 --- a/schema_registry_test.go +++ b/schema_registry_test.go @@ -3,7 +3,6 @@ package kafka import ( "testing" - "github.com/riferrei/srclient" "github.com/stretchr/testify/assert" ) @@ -11,42 +10,45 @@ var avroSchemaForSRTests = `{"type":"record","name":"Schema","fields":[{"name":" // TestDecodeWireFormat tests the decoding of a wire-formatted message. func TestDecodeWireFormat(t *testing.T) { + test := getTestModuleInstance(t) + encoded := []byte{0, 1, 2, 3, 4, 5} decoded := []byte{5} prefix := 16909060 - magic, result, err := DecodeWireFormat(encoded) - assert.Nil(t, err) + result := test.module.Kafka.encodeWireFormat(encoded, prefix) assert.Equal(t, decoded, result) - assert.Equal(t, magic, prefix) } // TestDecodeWireFormatFails tests the decoding of a wire-formatted message and // fails because the message is too short. func TestDecodeWireFormatFails(t *testing.T) { + test := getTestModuleInstance(t) + encoded := []byte{0, 1, 2, 3} // too short - _, result, err := DecodeWireFormat(encoded) - assert.Nil(t, result) - assert.NotNil(t, err) - assert.Equal(t, "Invalid message: message too short to contain schema id.", err.Message) - assert.Equal(t, messageTooShort, err.Code) - assert.Nil(t, err.Unwrap()) + assert.PanicsWithError(t, "Invalid message: message too short to contain schema id.", func() { + test.module.Kafka.decodeWireFormat(encoded) + }) } // TestEncodeWireFormat tests the encoding of a message and adding wire-format to it. func TestEncodeWireFormat(t *testing.T) { + test := getTestModuleInstance(t) + data := []byte{6} schemaID := 5 encoded := []byte{0, 0, 0, 0, 5, 6} - result := EncodeWireFormat(data, schemaID) + result := test.module.Kafka.encodeWireFormat(data, schemaID) assert.Equal(t, encoded, result) } // TestSchemaRegistryClient tests the creation of a SchemaRegistryClient instance // with the given configuration. func TestSchemaRegistryClient(t *testing.T) { + test := getTestModuleInstance(t) + srConfig := SchemaRegistryConfiguration{ URL: "http://localhost:8081", BasicAuth: BasicAuth{ @@ -54,13 +56,15 @@ func TestSchemaRegistryClient(t *testing.T) { Password: "password", }, } - srClient := SchemaRegistryClientWithConfiguration(srConfig) + srClient := test.module.Kafka.schemaRegistryClient(&srConfig) assert.NotNil(t, srClient) } // TestSchemaRegistryClientWithTLSConfig tests the creation of a SchemaRegistryClient instance // with the given configuration along with TLS configuration. func TestSchemaRegistryClientWithTLSConfig(t *testing.T) { + test := getTestModuleInstance(t) + srConfig := SchemaRegistryConfiguration{ URL: "http://localhost:8081", BasicAuth: BasicAuth{ @@ -73,13 +77,15 @@ func TestSchemaRegistryClientWithTLSConfig(t *testing.T) { ServerCaPem: "fixtures/caroot.cer", }, } - srClient := SchemaRegistryClientWithConfiguration(srConfig) + srClient := test.module.Kafka.schemaRegistryClient(&srConfig) assert.NotNil(t, srClient) } // TestGetLatestSchemaFails tests getting the latest schema and fails because // the configuration is invalid. func TestGetLatestSchemaFails(t *testing.T) { + test := getTestModuleInstance(t) + srConfig := SchemaRegistryConfiguration{ URL: "http://localhost:8081", BasicAuth: BasicAuth{ @@ -87,16 +93,21 @@ func TestGetLatestSchemaFails(t *testing.T) { Password: "password", }, } - srClient := SchemaRegistryClientWithConfiguration(srConfig) - schema, err := GetSchema(srClient, "test-subject", "test-schema", srclient.Avro, 0) - assert.Nil(t, schema) - assert.NotNil(t, err) - assert.Equal(t, "Failed to get schema from schema registry", err.Message) + srClient := test.module.Kafka.schemaRegistryClient(&srConfig) + assert.Panics(t, func() { + schema := test.module.Kafka.getSchema(srClient, &Schema{ + Subject: "test-subject", + Version: 0, + }) + assert.Equal(t, schema, nil) + }) } // TestGetSchemaFails tests getting the first version of the schema and fails because // the configuration is invalid. func TestGetSchemaFails(t *testing.T) { + test := getTestModuleInstance(t) + srConfig := SchemaRegistryConfiguration{ URL: "http://localhost:8081", BasicAuth: BasicAuth{ @@ -104,16 +115,21 @@ func TestGetSchemaFails(t *testing.T) { Password: "password", }, } - srClient := SchemaRegistryClientWithConfiguration(srConfig) - schema, err := GetSchema(srClient, "test-subject", "test-schema", srclient.Avro, 1) - assert.Nil(t, schema) - assert.NotNil(t, err) - assert.Equal(t, "Failed to get schema from schema registry", err.Message) + srClient := test.module.Kafka.schemaRegistryClient(&srConfig) + assert.Panics(t, func() { + schema := test.module.Kafka.getSchema(srClient, &Schema{ + Subject: "test-subject", + Version: 0, + }) + assert.Equal(t, schema, nil) + }) } // TestCreateSchemaFails tests creating the schema and fails because the // configuration is invalid. func TestCreateSchemaFails(t *testing.T) { + test := getTestModuleInstance(t) + srConfig := SchemaRegistryConfiguration{ URL: "http://localhost:8081", BasicAuth: BasicAuth{ @@ -121,82 +137,126 @@ func TestCreateSchemaFails(t *testing.T) { Password: "password", }, } - srClient := SchemaRegistryClientWithConfiguration(srConfig) - schema, err := CreateSchema(srClient, "test-subject", "test-schema", srclient.Avro) - assert.Nil(t, schema) - assert.NotNil(t, err) - assert.Equal(t, "Failed to create schema.", err.Message) + srClient := test.module.Kafka.schemaRegistryClient(&srConfig) + assert.Panics(t, func() { + schema := test.module.Kafka.getSchema(srClient, &Schema{ + Subject: "test-subject", + Version: 0, + }) + assert.Equal(t, schema, nil) + }) } func TestGetSubjectNameFailsIfInvalidSchema(t *testing.T) { - _, err := GetSubjectName(`Bad Schema`, "test-topic", Value, RecordNameStrategy) - assert.NotNil(t, err) - assert.Contains(t, err.Message, "Failed to unmarshal schema") + test := getTestModuleInstance(t) + + assert.Panics(t, func() { + subjectName := test.module.Kafka.getSubjectName(&SubjectNameConfig{ + Schema: `Bad Schema`, + Topic: "test-topic", + SubjectNameStrategy: RecordNameStrategy, + Element: Value, + }) + assert.Equal(t, subjectName, "") + }) } func TestGetSubjectNameFailsIfSubjectNameStrategyUnknown(t *testing.T) { - _, err := GetSubjectName(avroSchemaForSRTests, "test-topic", Value, "Unknown") - assert.NotNil(t, err) - assert.Contains(t, err.Message, "Unknown subject name strategy") + test := getTestModuleInstance(t) + + assert.Panics(t, func() { + subjectName := test.module.Kafka.getSubjectName(&SubjectNameConfig{ + Schema: avroSchemaForSRTests, + Topic: "test-topic", + SubjectNameStrategy: "Unknown", + Element: Value, + }) + assert.Equal(t, subjectName, "") + }) } func TestGetSubjectNameCanUseDefaultSubjectNameStrategy(t *testing.T) { + test := getTestModuleInstance(t) + for _, element := range []Element{Key, Value} { - subject, err := GetSubjectName(avroSchemaForSRTests, "test-topic", element, "") - assert.Nil(t, err) - assert.Equal(t, "test-topic-"+string(element), subject) + subjectName := test.module.Kafka.getSubjectName(&SubjectNameConfig{ + Schema: avroSchemaForSRTests, + Topic: "test-topic", + SubjectNameStrategy: "", + Element: element, + }) + assert.Equal(t, "test-topic-"+string(element), subjectName) } } func TestGetSubjectNameCanUseTopicNameStrategy(t *testing.T) { + test := getTestModuleInstance(t) + for _, element := range []Element{Key, Value} { - subject, err := GetSubjectName(avroSchemaForSRTests, "test-topic", element, TopicNameStrategy) - assert.Nil(t, err) - assert.Equal(t, "test-topic-"+string(element), subject) + subjectName := test.module.Kafka.getSubjectName(&SubjectNameConfig{ + Schema: avroSchemaForSRTests, + Topic: "test-topic", + SubjectNameStrategy: TopicNameStrategy, + Element: element, + }) + assert.Equal(t, "test-topic-"+string(element), subjectName) } } func TestGetSubjectNameCanUseTopicRecordNameStrategyWithNamespace(t *testing.T) { + test := getTestModuleInstance(t) + avroSchema := `{ "type":"record", "namespace":"com.example.person", "name":"Schema", "fields":[{"name":"field","type":"string"}]}` - subject, err := GetSubjectName(avroSchema, "test-topic", Value, TopicRecordNameStrategy) - assert.Nil(t, err) - assert.Equal(t, "test-topic-com.example.person.Schema", subject) + subjectName := test.module.Kafka.getSubjectName(&SubjectNameConfig{ + Schema: avroSchema, + Topic: "test-topic", + SubjectNameStrategy: TopicRecordNameStrategy, + Element: Value, + }) + assert.Equal(t, "test-topic-com.example.person.Schema", subjectName) } func TestGetSubjectNameCanUseTopicRecordNameStrategyWithoutNamespace(t *testing.T) { - subject, err := GetSubjectName(avroSchemaForSRTests, "test-topic", Value, TopicRecordNameStrategy) - assert.Nil(t, err) - assert.Equal(t, "test-topic-Schema", subject) + test := getTestModuleInstance(t) + + subjectName := test.module.Kafka.getSubjectName(&SubjectNameConfig{ + Schema: avroSchemaForSRTests, + Topic: "test-topic", + SubjectNameStrategy: TopicRecordNameStrategy, + Element: Value, + }) + assert.Equal(t, "test-topic-Schema", subjectName) } func TestGetSubjectNameCanUseRecordNameStrategyWithoutNamespace(t *testing.T) { - subject, err := GetSubjectName(avroSchemaForSRTests, "test-topic", Value, RecordNameStrategy) - assert.Nil(t, err) + test := getTestModuleInstance(t) + + subject := test.module.Kafka.getSubjectName(&SubjectNameConfig{ + Schema: avroSchemaForSRTests, + Topic: "test-topic", + SubjectNameStrategy: RecordNameStrategy, + Element: Value, + }) assert.Equal(t, "Schema", subject) } func TestGetSubjectNameCanUseRecordNameStrategyWithNamespace(t *testing.T) { + test := getTestModuleInstance(t) + avroSchema := `{ "type":"record", "namespace":"com.example.person", "name":"Schema", "fields":[{"name":"field","type":"string"}]}` - subject, err := GetSubjectName(avroSchema, "test-topic", Value, RecordNameStrategy) - assert.Nil(t, err) - assert.Equal(t, "com.example.person.Schema", subject) -} - -func TestDecodeWireRequiresMagicPrefixByte(t *testing.T) { - encoded := []byte{1, 1, 2, 3, 4} // too short - - _, result, err := DecodeWireFormat(encoded) - assert.Nil(t, result) - assert.NotNil(t, err) - assert.Equal(t, "Invalid message: invalid start byte.", err.Message) - assert.Equal(t, messageTooShort, err.Code) - assert.Nil(t, err.Unwrap()) + subjectName := test.module.Kafka.getSubjectName(&SubjectNameConfig{ + Schema: avroSchema, + Topic: "test-topic", + SubjectNameStrategy: RecordNameStrategy, + Element: Value, + }) + assert.Equal(t, "com.example.person.Schema", subjectName) } diff --git a/scripts/helpers/schema_registry.js b/scripts/helpers/schema_registry.js deleted file mode 100644 index 4747d4e..0000000 --- a/scripts/helpers/schema_registry.js +++ /dev/null @@ -1,6 +0,0 @@ -import http from 'k6/http'; - -export function getSubject(subject){ - return http.get(`http://localhost:8081/subjects/${subject}/versions/1`) -} - diff --git a/scripts/test_avro.js b/scripts/test_avro.js deleted file mode 100644 index 8098747..0000000 --- a/scripts/test_avro.js +++ /dev/null @@ -1,133 +0,0 @@ -/* - -This is a k6 test script that imports the xk6-kafka and -tests Kafka with a 200 Avro messages per iteration. - -*/ - -import { check } from "k6"; -import { Writer, Reader, Connection } from "k6/x/kafka"; // import kafka extension - -const brokers = ["localhost:9092"]; -const topic = "xk6_kafka_avro_topic"; - -const writer = new Writer({ - brokers: brokers, - topic: topic, - autoCreateTopic: true, -}); -const reader = new Reader({ - brokers: brokers, - topic: topic, -}); -const connection = new Connection({ - address: brokers[0], -}); - -if (__VU == 0) { - connection.createTopic({ topic: topic }); -} - -const keySchema = JSON.stringify({ - type: "record", - name: "Key", - namespace: "dev.mostafa.xk6.kafka", - fields: [ - { - name: "correlationId", - type: "string", - }, - ], -}); - -const valueSchema = JSON.stringify({ - type: "record", - name: "Value", - namespace: "dev.mostafa.xk6.kafka", - fields: [ - { - name: "name", - type: "string", - }, - { - name: "version", - type: "string", - }, - { - name: "author", - type: "string", - }, - { - name: "description", - type: "string", - }, - { - name: "url", - type: "string", - }, - { - name: "index", - type: "int", - }, - ], -}); - -export default function () { - for (let index = 0; index < 100; index++) { - let messages = [ - { - key: JSON.stringify({ - correlationId: "test-id-abc-" + index, - }), - value: JSON.stringify({ - name: "xk6-kafka", - version: "0.2.1", - author: "Mostafa Moradian", - description: - "k6 extension to load test Apache Kafka with support for Avro messages", - url: "https://mostafa.dev", - index: index, - }), - }, - { - key: JSON.stringify({ - correlationId: "test-id-def-" + index, - }), - value: JSON.stringify({ - name: "xk6-kafka", - version: "0.2.1", - author: "Mostafa Moradian", - description: - "k6 extension to load test Apache Kafka with support for Avro messages", - url: "https://mostafa.dev", - index: index, - }), - }, - ]; - writer.produce({ - messages: messages, - keySchema: keySchema, - valueSchema: valueSchema, - }); - } - - // Read 10 messages only - let messages = reader.consume({ - limit: 10, - keySchema: keySchema, - valueSchema: valueSchema, - }); - check(messages, { - "10 messages returned": (msgs) => msgs.length == 10, - }); -} - -export function teardown(data) { - if (__VU == 0) { - // Delete the topic - connection.deleteTopic(topic); - } - writer.close(); - reader.close(); - connection.close(); -} diff --git a/scripts/test_avro_named_strategy_and_magic_prefix.js b/scripts/test_avro_named_strategy_and_magic_prefix.js deleted file mode 100644 index c509c3f..0000000 --- a/scripts/test_avro_named_strategy_and_magic_prefix.js +++ /dev/null @@ -1,97 +0,0 @@ -/* -This is a k6 test script that imports the xk6-kafka and -tests Kafka with a 100 Avro messages per iteration. -*/ - -import { check } from "k6"; -import { - Writer, - Reader, - Connection, - AVRO_SERIALIZER, - AVRO_DESERIALIZER, - RECORD_NAME_STRATEGY, -} from "k6/x/kafka"; -import { getSubject } from "./helpers/schema_registry.js"; - -const brokers = ["localhost:9092"]; -const topic = "test_schema_registry_consume_magic_prefix"; - -const writer = new Writer({ - brokers: brokers, - topic: topic, - autoCreateTopic: true, -}); -const reader = new Reader({ - brokers: brokers, - topic: topic, -}); -const connection = new Connection({ - address: brokers[0], -}); - -if (__VU == 0) { - connection.createTopic({ topic: topic }); -} - -let config = { - consumer: { - valueDeserializer: AVRO_DESERIALIZER, - userMagicPrefix: true, - }, - producer: { - valueSerializer: AVRO_SERIALIZER, - subjectNameStrategy: RECORD_NAME_STRATEGY, - }, - schemaRegistry: { - url: "http://localhost:8081", - }, -}; - -export default function () { - let message = { - value: JSON.stringify({ - firstname: "firstname", - lastname: "lastname", - }), - }; - const valueSchema = JSON.stringify({ - name: "MagicNameValueSchema", - type: "record", - namespace: "com.example", - fields: [ - { - name: "firstname", - type: "string", - }, - { - name: "lastname", - type: "string", - }, - ], - }); - writer.produce({ - messages: [message], - config: config, - valueSchema: valueSchema, - }); - - check(getSubject("com.example.MagicNameValueSchema"), { - "status is 200": (r) => r.status === 200, - }); - - let messages = reader.consume({ limit: 1, config: config, valueSchema: valueSchema }); - check(messages, { - "1 message returned": (msgs) => msgs.length === 1, - }); -} - -export function teardown(data) { - if (__VU == 0) { - // Delete the topic - connection.deleteTopic(topic); - } - writer.close(); - reader.close(); - connection.close(); -} diff --git a/scripts/test_avro_no_key.js b/scripts/test_avro_no_key.js deleted file mode 100644 index 6d11263..0000000 --- a/scripts/test_avro_no_key.js +++ /dev/null @@ -1,111 +0,0 @@ -/* - -This is a k6 test script that imports the xk6-kafka and -tests Kafka by sending 200 Avro messages per iteration -without any associated key. -*/ - -import { check } from "k6"; -import { Writer, Reader, Connection } from "k6/x/kafka"; // import kafka extension - -const brokers = ["localhost:9092"]; -const topic = "xk6_kafka_avro_topic"; - -const writer = new Writer({ - brokers: brokers, - topic: topic, - autoCreateTopic: true, -}); -const reader = new Reader({ - brokers: brokers, - topic: topic, -}); -const connection = new Connection({ - address: brokers[0], -}); - -if (__VU == 0) { - connection.createTopic({ topic: topic }); -} - -const valueSchema = JSON.stringify({ - type: "record", - name: "Value", - namespace: "dev.mostafa.xk6.kafka", - fields: [ - { - name: "name", - type: "string", - }, - { - name: "version", - type: "string", - }, - { - name: "author", - type: "string", - }, - { - name: "description", - type: "string", - }, - { - name: "url", - type: "string", - }, - { - name: "index", - type: "int", - }, - ], -}); - -export default function () { - for (let index = 0; index < 100; index++) { - let messages = [ - { - value: JSON.stringify({ - name: "xk6-kafka", - version: "0.2.1", - author: "Mostafa Moradian", - description: - "k6 extension to load test Apache Kafka with support for Avro messages", - url: "https://mostafa.dev", - index: index, - }), - }, - { - value: JSON.stringify({ - name: "xk6-kafka", - version: "0.2.1", - author: "Mostafa Moradian", - description: - "k6 extension to load test Apache Kafka with support for Avro messages", - url: "https://mostafa.dev", - index: index, - }), - }, - ]; - writer.produce({ messages: messages, valueSchema: valueSchema }); - } - - // Read 10 messages only - let messages = reader.consume({ limit: 10, valueSchema: valueSchema }); - check(messages, { - "10 messages returned": (msgs) => msgs.length == 10, - }); - - for (let index = 0; index < messages.length; index++) { - console.debug("Received Message: " + JSON.stringify(messages[index])); - } -} - -export function teardown(data) { - if (__VU == 0) { - // Delete the topic - connection.deleteTopic(topic); - } - writer.close(); - reader.close(); - connection.close(); -} diff --git a/scripts/test_avro_no_schema_registry.js b/scripts/test_avro_no_schema_registry.js new file mode 100644 index 0000000..014d9c6 --- /dev/null +++ b/scripts/test_avro_no_schema_registry.js @@ -0,0 +1,81 @@ +/* + +This is a k6 test script that imports the xk6-kafka and +tests Kafka by sending 200 Avro messages per iteration +without any associated key. +*/ + +import { check } from "k6"; +import { Writer, Reader, Connection, SchemaRegistry, SCHEMA_TYPE_AVRO } from "k6/x/kafka"; // import kafka extension + +const brokers = ["localhost:9092"]; +const topic = "xk6_kafka_avro_topic"; + +const writer = new Writer({ + brokers: brokers, + topic: topic, + autoCreateTopic: true, +}); +const reader = new Reader({ + brokers: brokers, + topic: topic, +}); +const connection = new Connection({ + address: brokers[0], +}); +const schemaRegistry = new SchemaRegistry(); + +if (__VU == 0) { + connection.createTopic({ topic: topic }); +} + +const valueSchema = JSON.stringify({ + type: "record", + name: "Value", + namespace: "dev.mostafa.xk6.kafka", + fields: [ + { + name: "name", + type: "string", + }, + ], +}); + +export default function () { + for (let index = 0; index < 100; index++) { + let messages = [ + { + value: schemaRegistry.serialize({ + data: { + name: "xk6-kafka", + }, + schema: { schema: valueSchema }, + schemaType: SCHEMA_TYPE_AVRO, + }), + }, + ]; + writer.produce({ messages: messages }); + } + + // Read 10 messages only + let messages = reader.consume({ limit: 10 }); + check(messages, { + "10 messages returned": (msgs) => msgs.length == 10, + "value is correct": (msgs) => + schemaRegistry.deserialize({ + data: msgs[0].value, + schema: { schema: valueSchema }, + schemaType: SCHEMA_TYPE_AVRO, + }).name == "xk6-kafka", + }); +} + +export function teardown(data) { + if (__VU == 0) { + // Delete the topic + connection.deleteTopic(topic); + } + writer.close(); + reader.close(); + connection.close(); +} diff --git a/scripts/test_avro_with_schema_registry.js b/scripts/test_avro_with_schema_registry.js index 78c6056..ab86e7f 100644 --- a/scripts/test_avro_with_schema_registry.js +++ b/scripts/test_avro_with_schema_registry.js @@ -4,7 +4,17 @@ tests Kafka with a 100 Avro messages per iteration. */ import { check } from "k6"; -import { Writer, Reader, Connection, AVRO_SERIALIZER, AVRO_DESERIALIZER } from "k6/x/kafka"; // import kafka extension +import { + Writer, + Reader, + Connection, + SchemaRegistry, + KEY, + VALUE, + TOPIC_NAME_STRATEGY, + RECORD_NAME_STRATEGY, + SCHEMA_TYPE_AVRO, +} from "k6/x/kafka"; // import kafka extension const brokers = ["localhost:9092"]; const topic = "com.example.person"; @@ -21,6 +31,9 @@ const reader = new Reader({ const connection = new Connection({ address: brokers[0], }); +const schemaRegistry = new SchemaRegistry({ + url: "http://localhost:8081", +}); if (__VU == 0) { connection.createTopic({ topic: topic }); @@ -44,59 +57,92 @@ const valueSchema = `{ "namespace": "com.example.value", "fields": [ { - "name": "firstname", + "name": "firstName", "type": "string" }, { - "name": "lastname", + "name": "lastName", "type": "string" } ] }`; -var config = { - consumer: { - keyDeserializer: AVRO_DESERIALIZER, - valueDeserializer: AVRO_DESERIALIZER, - }, - producer: { - keySerializer: AVRO_SERIALIZER, - valueSerializer: AVRO_SERIALIZER, - }, - schemaRegistry: { - url: "http://localhost:8081", - }, -}; +const keySubjectName = schemaRegistry.getSubjectName({ + topic: topic, + element: KEY, + subjectNameStrategy: TOPIC_NAME_STRATEGY, + schema: keySchema, +}); + +const valueSubjectName = schemaRegistry.getSubjectName({ + topic: topic, + element: VALUE, + subjectNameStrategy: RECORD_NAME_STRATEGY, + schema: valueSchema, +}); + +const keySchemaObject = schemaRegistry.createSchema({ + subject: keySubjectName, + schema: keySchema, + schemaType: SCHEMA_TYPE_AVRO, +}); + +const valueSchemaObject = schemaRegistry.createSchema({ + subject: valueSubjectName, + schema: valueSchema, + schemaType: SCHEMA_TYPE_AVRO, +}); export default function () { for (let index = 0; index < 100; index++) { let messages = [ { - key: JSON.stringify({ - ssn: "ssn-" + index, + key: schemaRegistry.serialize({ + data: { + ssn: "ssn-" + index, + }, + schema: keySchemaObject, + schemaType: SCHEMA_TYPE_AVRO, }), - value: JSON.stringify({ - firstname: "firstname-" + index, - lastname: "lastname-" + index, + value: schemaRegistry.serialize({ + data: { + firstName: "firstName-" + index, + lastName: "lastName-" + index, + }, + schema: valueSchemaObject, + schemaType: SCHEMA_TYPE_AVRO, }), }, ]; - writer.produce({ - messages: messages, - config: config, - keySchema: keySchema, - valueSchema: valueSchema, - }); + writer.produce({ messages: messages }); } - let messages = reader.consume({ - limit: 20, - config: config, - keySchema: keySchema, - valueSchema: valueSchema, - }); + let messages = reader.consume({ limit: 20 }); check(messages, { "20 message returned": (msgs) => msgs.length == 20, + "key starts with 'ssn-' string": (msgs) => + schemaRegistry + .deserialize({ + data: msgs[0].key, + schema: keySchemaObject, + schemaType: SCHEMA_TYPE_AVRO, + }) + .ssn.startsWith("ssn-"), + "value contains 'firstName-' and 'lastName-' strings": (msgs) => + schemaRegistry + .deserialize({ + data: msgs[0].value, + schema: valueSchemaObject, + schemaType: SCHEMA_TYPE_AVRO, + }) + .firstName.startsWith("firstName-") && + schemaRegistry + .deserialize({ + data: msgs[0].value, + schema: valueSchemaObject, + schemaType: SCHEMA_TYPE_AVRO, + }) + .lastName.startsWith("lastName-"), }); } diff --git a/scripts/test_avro_with_schema_registry_no_key.js b/scripts/test_avro_with_schema_registry_no_key.js deleted file mode 100644 index bb30773..0000000 --- a/scripts/test_avro_with_schema_registry_no_key.js +++ /dev/null @@ -1,94 +0,0 @@ -/* -This is a k6 test script that imports the xk6-kafka and -tests Kafka with a 100 Avro messages per iteration. -*/ - -import { check } from "k6"; -import { Writer, Reader, Connection, AVRO_SERIALIZER, AVRO_DESERIALIZER } from "k6/x/kafka"; // import kafka extension - -const brokers = ["localhost:9092"]; -const topic = "com.example.person"; - -const writer = new Writer({ - brokers: brokers, - topic: topic, - autoCreateTopic: true, -}); -const reader = new Reader({ - brokers: brokers, - topic: topic, -}); -const connection = new Connection({ - address: brokers[0], -}); - -if (__VU == 0) { - connection.createTopic({ topic: topic }); -} - -const valueSchema = `{ - "name": "ValueSchema", - "type": "record", - "namespace": "com.example", - "fields": [ - { - "name": "firstname", - "type": "string" - }, - { - "name": "lastname", - "type": "string" - } - ] -}`; - -var config = { - consumer: { - keyDeserializer: "", - valueDeserializer: AVRO_DESERIALIZER, - }, - producer: { - keySerializer: "", - valueSerializer: AVRO_SERIALIZER, - }, - schemaRegistry: { - url: "http://localhost:8081", - }, -}; - -export default function () { - for (let index = 0; index < 100; index++) { - let messages = [ - { - value: JSON.stringify({ - firstname: "firstname-" + index, - lastname: "lastname-" + index, - }), - }, - ]; - writer.produce({ - messages: messages, - config: config, - valueSchema: valueSchema, - }); - } - - let messages = reader.consume({ limit: 20, config: config, valueSchema: valueSchema }); - check(messages, { - "20 message returned": (msgs) => msgs.length == 20, - }); - - for (let index = 0; index < messages.length; index++) { - console.debug("Received Message: " + JSON.stringify(messages[index])); - } -} - -export function teardown(data) { - if (__VU == 0) { - // Delete the topic - connection.deleteTopic(topic); - } - writer.close(); - reader.close(); - connection.close(); -} diff --git a/scripts/test_bytes.js b/scripts/test_bytes.js index 260a2d9..3f0fe38 100644 --- a/scripts/test_bytes.js +++ b/scripts/test_bytes.js @@ -6,15 +6,7 @@ tests Kafka with a 200 byte array messages per iteration. */ import { check } from "k6"; -import { - Writer, - Reader, - Connection, - STRING_SERIALIZER, - STRING_DESERIALIZER, - BYTE_ARRAY_SERIALIZER, - BYTE_ARRAY_DESERIALIZER, -} from "k6/x/kafka"; // import kafka extension +import { Writer, Reader, Connection, SchemaRegistry, SCHEMA_TYPE_BYTES } from "k6/x/kafka"; // import kafka extension const brokers = ["localhost:9092"]; const topic = "xk6_kafka_byte_array_topic"; @@ -31,52 +23,61 @@ const reader = new Reader({ const connection = new Connection({ address: brokers[0], }); +const schemaRegistry = new SchemaRegistry(); if (__VU == 0) { connection.createTopic({ topic: topic }); } -var config = { - producer: { - keySerializer: STRING_SERIALIZER, - valueSerializer: BYTE_ARRAY_SERIALIZER, - }, - consumer: { - keyDeserializer: STRING_DESERIALIZER, - valueDeserializer: BYTE_ARRAY_DESERIALIZER, - }, -}; - const payload = "byte array payload"; export default function () { for (let index = 0; index < 100; index++) { let messages = [ { - key: "test-id-abc-" + index, - value: Array.from(payload, (x) => x.charCodeAt(0)), + // The data type of the key is a string + key: schemaRegistry.serialize({ + data: Array.from("test-id-abc-" + index, (x) => x.charCodeAt(0)), + schemaType: SCHEMA_TYPE_BYTES, + }), + // The data type of the value is a byte array + value: schemaRegistry.serialize({ + data: Array.from(payload, (x) => x.charCodeAt(0)), + schemaType: SCHEMA_TYPE_BYTES, + }), }, { - key: "test-id-def-" + index, - value: Array.from(payload, (x) => x.charCodeAt(0)), + key: schemaRegistry.serialize({ + data: Array.from("test-id-def-" + index, (x) => x.charCodeAt(0)), + schemaType: SCHEMA_TYPE_BYTES, + }), + value: schemaRegistry.serialize({ + data: Array.from(payload, (x) => x.charCodeAt(0)), + schemaType: SCHEMA_TYPE_BYTES, + }), }, ]; writer.produce({ messages: messages, - config: config, }); } // Read 10 messages only - let messages = reader.consume({ - limit: 10, - config: config, - }); + let messages = reader.consume({ limit: 10 }); check(messages, { "10 messages returned": (msgs) => msgs.length == 10, - "key starts with 'test-id-' string": (msgs) => msgs[0].key.startsWith("test-id-"), - "payload is correct": (msgs) => String.fromCharCode(...msgs[0].value) === payload, + "key starts with 'test-id-' string": (msgs) => + String.fromCharCode( + ...schemaRegistry.deserialize({ data: msgs[0].key, schemaType: SCHEMA_TYPE_BYTES }) + ).startsWith("test-id-"), + "value is correct": (msgs) => + String.fromCharCode( + ...schemaRegistry.deserialize({ + data: msgs[0].value, + schemaType: SCHEMA_TYPE_BYTES, + }) + ) == payload, }); } diff --git a/scripts/test_json.js b/scripts/test_json.js index f0f2d87..57d376a 100644 --- a/scripts/test_json.js +++ b/scripts/test_json.js @@ -7,7 +7,14 @@ tests Kafka with a 200 JSON messages per iteration. import { check } from "k6"; // import * as kafka from "k6/x/kafka"; -import { Writer, Reader, Connection } from "k6/x/kafka"; // import kafka extension +import { + Writer, + Reader, + Connection, + SchemaRegistry, + CODEC_SNAPPY, + SCHEMA_TYPE_JSON, +} from "k6/x/kafka"; // import kafka extension // Prints module-level constants // console.log(kafka); @@ -21,6 +28,7 @@ const writer = new Writer({ brokers: brokers, topic: topic, autoCreateTopic: true, + compression: CODEC_SNAPPY, }); const reader = new Reader({ brokers: brokers, @@ -29,9 +37,18 @@ const reader = new Reader({ const connection = new Connection({ address: brokers[0], }); +const schemaRegistry = new SchemaRegistry(); if (__VU == 0) { - connection.createTopic({ topic: topic }); + connection.createTopic({ + topic: topic, + configEntries: [ + { + configName: "compression.type", + configValue: CODEC_SNAPPY, + }, + ], + }); } export const options = { @@ -46,16 +63,24 @@ export default function () { for (let index = 0; index < 100; index++) { let messages = [ { - key: JSON.stringify({ - correlationId: "test-id-abc-" + index, + // The data type of the key is JSON + key: schemaRegistry.serialize({ + data: { + correlationId: "test-id-abc-" + index, + }, + schemaType: SCHEMA_TYPE_JSON, }), - value: JSON.stringify({ - name: "xk6-kafka", - version: "0.9.0", - author: "Mostafa Moradian", - description: - "k6 extension to load test Apache Kafka with support for Avro messages", - index: index, + // The data type of the value is JSON + value: schemaRegistry.serialize({ + data: { + name: "xk6-kafka", + version: "0.9.0", + author: "Mostafa Moradian", + description: + "k6 extension to load test Apache Kafka with support for Avro messages", + index: index, + }, + schemaType: SCHEMA_TYPE_JSON, }), headers: { mykey: "myvalue", @@ -65,16 +90,22 @@ export default function () { time: new Date(), // Will be converted to timestamp automatically }, { - key: JSON.stringify({ - correlationId: "test-id-def-" + index, + key: schemaRegistry.serialize({ + data: { + correlationId: "test-id-def-" + index, + }, + schemaType: SCHEMA_TYPE_JSON, }), - value: JSON.stringify({ - name: "xk6-kafka", - version: "0.9.0", - author: "Mostafa Moradian", - description: - "k6 extension to load test Apache Kafka with support for Avro messages", - index: index, + value: schemaRegistry.serialize({ + data: { + name: "xk6-kafka", + version: "0.9.0", + author: "Mostafa Moradian", + description: + "k6 extension to load test Apache Kafka with support for Avro messages", + index: index, + }, + schemaType: SCHEMA_TYPE_JSON, }), headers: { mykey: "myvalue", @@ -94,8 +125,17 @@ export default function () { check(messages[0], { "Topic equals to xk6_kafka_json_topic": (msg) => msg["topic"] == topic, - "Key is correct": (msg) => JSON.parse(msg["key"])["correlationId"].startsWith("test-id-"), - "Value is correct": (msg) => JSON.parse(msg["value"])["name"] == "xk6-kafka", + "Key contains key/value and is JSON": (msg) => + schemaRegistry + .deserialize({ data: msg.key, schemaType: SCHEMA_TYPE_JSON }) + .correlationId.startsWith("test-id-"), + "Value contains key/value and is JSON": (msg) => + typeof schemaRegistry.deserialize({ + data: msg.value, + schemaType: SCHEMA_TYPE_JSON, + }) == "object" && + schemaRegistry.deserialize({ data: msg.value, schemaType: SCHEMA_TYPE_JSON }).name == + "xk6-kafka", "Header equals {'mykey': 'myvalue'}": (msg) => "mykey" in msg.headers && String.fromCharCode(...msg.headers["mykey"]) == "myvalue", "Time is past": (msg) => new Date(msg["time"]) < new Date(), diff --git a/scripts/test_json_with_snappy_compression.js b/scripts/test_json_with_snappy_compression.js deleted file mode 100644 index 124c138..0000000 --- a/scripts/test_json_with_snappy_compression.js +++ /dev/null @@ -1,102 +0,0 @@ -/* - -This is a k6 test script that imports the xk6-kafka and -tests Kafka with a 200 JSON messages per iteration. - -*/ - -import { check } from "k6"; -import { Writer, Reader, Connection, CODEC_SNAPPY } from "k6/x/kafka"; // import kafka extension - -const brokers = ["localhost:9092"]; -const topic = "xk6_kafka_json_snappy_topic"; - -const writer = new Writer({ - brokers: brokers, - topic: topic, - compression: CODEC_SNAPPY, -}); -const reader = new Reader({ - brokers: brokers, - topic: topic, -}); -const connection = new Connection({ - address: brokers[0], -}); - -const replicationFactor = 1; -const partitions = 1; - -if (__VU == 0) { - // Create the topic or do nothing if the topic exists. - /* - Supported compression codecs: - - - CODEC_GZIP - - CODEC_SNAPPY - - CODEC_LZ4 - - CODEC_ZSTD - */ - connection.createTopic({ - topic: topic, - numPartitions: partitions, - replicationFactor: replicationFactor, - configEntries: [ - { - configName: "compression.type", - configValue: CODEC_SNAPPY, - }, - ], - }); -} - -export default function () { - for (let index = 0; index < 100; index++) { - let messages = [ - { - key: JSON.stringify({ - correlationId: "test-id-abc-" + index, - }), - value: JSON.stringify({ - name: "xk6-kafka", - version: "0.2.1", - author: "Mostafa Moradian", - description: - "k6 extension to load test Apache Kafka with support for Avro messages", - index: index, - }), - }, - { - key: JSON.stringify({ - correlationId: "test-id-def-" + index, - }), - value: JSON.stringify({ - name: "xk6-kafka", - version: "0.2.1", - author: "Mostafa Moradian", - description: - "k6 extension to load test Apache Kafka with support for Avro messages", - index: index, - }), - }, - ]; - - writer.produce({ messages: messages }); - } - - // Read 10 messages only - let messages = reader.consume({ limit: 10 }); - check(messages, { - "10 messages returned": (msgs) => msgs.length == 10, - }); -} - -export function teardown(data) { - if (__VU == 0) { - // Delete the topic - connection.deleteTopic(topic); - } - writer.close(); - reader.close(); - connection.close(); -} diff --git a/scripts/test_jsonschema_with_schema_registry.js b/scripts/test_jsonschema_with_schema_registry.js index b578c80..755bd7c 100644 --- a/scripts/test_jsonschema_with_schema_registry.js +++ b/scripts/test_jsonschema_with_schema_registry.js @@ -8,8 +8,11 @@ import { Writer, Reader, Connection, - JSON_SCHEMA_SERIALIZER, - JSON_SCHEMA_DESERIALIZER, + SchemaRegistry, + KEY, + VALUE, + SCHEMA_TYPE_JSON, + TOPIC_NAME_STRATEGY, } from "k6/x/kafka"; // import kafka extension const brokers = ["localhost:9092"]; @@ -27,6 +30,9 @@ const reader = new Reader({ const connection = new Connection({ address: brokers[0], }); +const schemaRegistry = new SchemaRegistry({ + url: "http://localhost:8081", +}); if (__VU == 0) { connection.createTopic({ topic: topic }); @@ -58,61 +64,86 @@ const valueSchema = JSON.stringify({ }, }); -var config = { - consumer: { - keyDeserializer: JSON_SCHEMA_DESERIALIZER, - valueDeserializer: JSON_SCHEMA_DESERIALIZER, - }, - producer: { - keySerializer: JSON_SCHEMA_SERIALIZER, - valueSerializer: JSON_SCHEMA_SERIALIZER, - }, - schemaRegistry: { - url: "http://localhost:8081", - }, -}; +const keySubjectName = schemaRegistry.getSubjectName({ + topic: topic, + element: KEY, + subjectNameStrategy: TOPIC_NAME_STRATEGY, + schema: keySchema, +}); + +const valueSubjectName = schemaRegistry.getSubjectName({ + topic: topic, + element: VALUE, + subjectNameStrategy: TOPIC_NAME_STRATEGY, + schema: valueSchema, +}); + +const keySchemaObject = schemaRegistry.createSchema({ + subject: keySubjectName, + schema: keySchema, + schemaType: SCHEMA_TYPE_JSON, +}); + +const valueSchemaObject = schemaRegistry.createSchema({ + subject: valueSubjectName, + schema: valueSchema, + schemaType: SCHEMA_TYPE_JSON, +}); export default function () { for (let index = 0; index < 100; index++) { let messages = [ { - key: JSON.stringify({ - key: "key" + index, + key: schemaRegistry.serialize({ + data: { + key: "key-" + index, + }, + schema: keySchemaObject, + schemaType: SCHEMA_TYPE_JSON, }), - value: JSON.stringify({ - firstName: "firstName-" + index, - lastName: "lastName-" + index, + value: schemaRegistry.serialize({ + data: { + firstName: "firstName-" + index, + lastName: "lastName-" + index, + }, + schema: valueSchemaObject, + schemaType: SCHEMA_TYPE_JSON, }), }, ]; - writer.produce({ - messages: messages, - config: config, - keySchema: keySchema, - valueSchema: valueSchema, - }); + writer.produce({ messages: messages }); } - let messages = reader.consume({ - limit: 20, - config: config, - keySchema: keySchema, - valueSchema: valueSchema, - }); + let messages = reader.consume({ limit: 20 }); check(messages, { "20 message returned": (msgs) => msgs.length == 20, }); - check(messages[0], { - "Topic equals to xk6_jsonschema_test": (msg) => msg.topic == topic, - "Key is correct": (msg) => msg.key.key == "key0", - "Value is correct": (msg) => - msg.value.firstName == "firstName-0" && msg.value.lastName == "lastName-0", - "Headers are correct": (msg) => msg.headers.hasOwnProperty("mykey") == false, - "Time is past": (msg) => new Date(msg["time"]) < new Date(), - "Offset is correct": (msg) => msg.offset == 0, - "Partition is correct": (msg) => msg.partition == 0, - "High watermark is gte zero": (msg) => msg["highWaterMark"] >= 0, + check(messages, { + "20 message returned": (msgs) => msgs.length == 20, + "key starts with 'key-' string": (msgs) => + schemaRegistry + .deserialize({ + data: msgs[0].key, + schema: keySchemaObject, + schemaType: SCHEMA_TYPE_JSON, + }) + .key.startsWith("key-"), + "value contains 'firstName-' and 'lastName-' strings": (msgs) => + schemaRegistry + .deserialize({ + data: msgs[0].value, + schema: valueSchemaObject, + schemaType: SCHEMA_TYPE_JSON, + }) + .firstName.startsWith("firstName-") && + schemaRegistry + .deserialize({ + data: msgs[0].value, + schema: valueSchemaObject, + schemaType: SCHEMA_TYPE_JSON, + }) + .lastName.startsWith("lastName-"), }); } diff --git a/scripts/test_sasl_auth.js b/scripts/test_sasl_auth.js index 0ef10f1..fe62e72 100644 --- a/scripts/test_sasl_auth.js +++ b/scripts/test_sasl_auth.js @@ -7,7 +7,15 @@ also uses SASL authentication. */ import { check } from "k6"; -import { Writer, Reader, Connection, SASL_PLAIN, TLS_1_2 } from "k6/x/kafka"; // import kafka extension +import { + Writer, + Reader, + Connection, + SchemaRegistry, + SCHEMA_TYPE_JSON, + SASL_PLAIN, + TLS_1_2, +} from "k6/x/kafka"; // import kafka extension export const options = { // This is used for testing purposes. For real-world use, you should use your own options: @@ -58,7 +66,7 @@ const tlsConfig = { }; const offset = 0; -// partition and groupID are mutually exclusive +// partition and groupId are mutually exclusive const partition = 0; const numPartitions = 1; const replicationFactor = 1; @@ -73,7 +81,6 @@ const reader = new Reader({ brokers: brokers, topic: topic, partition: partition, - groupID: groupID, offset: offset, sasl: saslConfig, tls: tlsConfig, @@ -83,6 +90,7 @@ const connection = new Connection({ sasl: saslConfig, tls: tlsConfig, }); +const schemaRegistry = new SchemaRegistry(); if (__VU == 0) { connection.createTopic({ @@ -97,29 +105,31 @@ export default function () { for (let index = 0; index < 100; index++) { let messages = [ { - key: JSON.stringify({ - correlationId: "test-id-abc-" + index, + key: schemaRegistry.serialize({ + data: { + correlationId: "test-id-abc-" + index, + }, + schemaType: SCHEMA_TYPE_JSON, }), - value: JSON.stringify({ - name: "xk6-kafka", - version: "0.2.1", - author: "Mostafa Moradian", - description: - "k6 extension to load test Apache Kafka with support for Avro messages", - index: index, + value: schemaRegistry.serialize({ + data: { + name: "xk6-kafka", + }, + schemaType: SCHEMA_TYPE_JSON, }), }, { - key: JSON.stringify({ - correlationId: "test-id-def-" + index, + key: schemaRegistry.serialize({ + data: { + correlationId: "test-id-def-" + index, + }, + schemaType: SCHEMA_TYPE_JSON, }), - value: JSON.stringify({ - name: "xk6-kafka", - version: "0.2.1", - author: "Mostafa Moradian", - description: - "k6 extension to load test Apache Kafka with support for Avro messages", - index: index, + value: schemaRegistry.serialize({ + data: { + name: "xk6-kafka", + }, + schemaType: SCHEMA_TYPE_JSON, }), }, ]; @@ -131,6 +141,13 @@ export default function () { let messages = reader.consume({ limit: 10 }); check(messages, { "10 messages returned": (msgs) => msgs.length == 10, + "key is correct": (msgs) => + schemaRegistry + .deserialize({ data: msgs[0].key, schemaType: SCHEMA_TYPE_JSON }) + .correlationId.startsWith("test-id-"), + "value is correct": (msgs) => + schemaRegistry.deserialize({ data: msgs[0].value, schemaType: SCHEMA_TYPE_JSON }) + .name == "xk6-kafka", }); } diff --git a/scripts/test_string.js b/scripts/test_string.js new file mode 100644 index 0000000..46e82d8 --- /dev/null +++ b/scripts/test_string.js @@ -0,0 +1,117 @@ +/* + +This is a k6 test script that imports the xk6-kafka and +tests Kafka with a 200 JSON messages per iteration. + +*/ + +import { check } from "k6"; +// import * as kafka from "k6/x/kafka"; +import { Writer, Reader, Connection, SchemaRegistry, SCHEMA_TYPE_STRING } from "k6/x/kafka"; // import kafka extension + +// Prints module-level constants +// console.log(kafka); + +const brokers = ["localhost:9092"]; +const topic = "xk6_kafka_json_topic"; + +const writer = new Writer({ + brokers: brokers, + topic: topic, + autoCreateTopic: true, +}); +const reader = new Reader({ + brokers: brokers, + topic: topic, +}); +const connection = new Connection({ + address: brokers[0], +}); +const schemaRegistry = new SchemaRegistry(); + +if (__VU == 0) { + connection.createTopic({ topic: topic }); +} + +export const options = { + thresholds: { + // Base thresholds to see if the writer or reader is working + "kafka.writer.error.count": ["count == 0"], + "kafka.reader.error.count": ["count == 0"], + }, +}; + +export default function () { + for (let index = 0; index < 100; index++) { + let messages = [ + { + key: schemaRegistry.serialize({ + data: "test-key-string", + schemaType: SCHEMA_TYPE_STRING, + }), + value: schemaRegistry.serialize({ + data: "test-value-string", + schemaType: SCHEMA_TYPE_STRING, + }), + headers: { + mykey: "myvalue", + }, + offset: index, + partition: 0, + time: new Date(), // Will be converted to timestamp automatically + }, + { + key: schemaRegistry.serialize({ + data: "test-key-string", + schemaType: SCHEMA_TYPE_STRING, + }), + value: schemaRegistry.serialize({ + data: "test-value-string", + schemaType: SCHEMA_TYPE_STRING, + }), + headers: { + mykey: "myvalue", + }, + }, + ]; + + writer.produce({ messages: messages }); + } + + // Read 10 messages only + let messages = reader.consume({ limit: 10 }); + + check(messages, { + "10 messages are received": (messages) => messages.length == 10, + }); + + check(messages[0], { + "Topic equals to xk6_kafka_json_topic": (msg) => msg["topic"] == topic, + "Key is a string and is correct": (msg) => + schemaRegistry.deserialize({ data: msg.key, schemaType: SCHEMA_TYPE_STRING }) == + "test-key-string", + "Value is a string and is correct": (msg) => + typeof schemaRegistry.deserialize({ + data: msg.value, + schemaType: SCHEMA_TYPE_STRING, + }) == "string" && + schemaRegistry.deserialize({ data: msg.value, schemaType: SCHEMA_TYPE_STRING }) == + "test-value-string", + "Header equals {'mykey': 'myvalue'}": (msg) => + "mykey" in msg.headers && String.fromCharCode(...msg.headers["mykey"]) == "myvalue", + "Time is past": (msg) => new Date(msg["time"]) < new Date(), + "Partition is zero": (msg) => msg["partition"] == 0, + "Offset is gte zero": (msg) => msg["offset"] >= 0, + "High watermark is gte zero": (msg) => msg["highWaterMark"] >= 0, + }); +} + +export function teardown(data) { + if (__VU == 0) { + // Delete the topic + connection.deleteTopic(topic); + } + writer.close(); + reader.close(); + connection.close(); +} diff --git a/serde.go b/serde.go deleted file mode 100644 index 07ad1f8..0000000 --- a/serde.go +++ /dev/null @@ -1,83 +0,0 @@ -package kafka - -import ( - "reflect" - - "github.com/riferrei/srclient" -) - -type ( - Serializer func( - configuration Configuration, topic string, data interface{}, - element Element, schema string, version int) ([]byte, *Xk6KafkaError) - Deserializer func(configuration Configuration, topic string, data []byte, - element Element, schema string, version int) (interface{}, *Xk6KafkaError) -) - -const ( - // TODO: move these to their own package. - ProtobufSerializer string = "io.confluent.kafka.serializers.protobuf.KafkaProtobufSerializer" - ProtobufDeserializer string = "io.confluent.kafka.serializers.protobuf.KafkaProtobufDeserializer" -) - -// useSerializer returns true if the serializer should be used based on the given configuration. -func useSerializer(configuration Configuration, element Element) bool { - if reflect.ValueOf(configuration).IsZero() || reflect.ValueOf(configuration.Producer).IsZero() { - return false - } - - if (element == Key && configuration.Producer.KeySerializer != "") || - (element == Value && configuration.Producer.ValueSerializer != "") { - return true - } - - return false -} - -// useDeserializer returns true if the deserializer should be used based on the given configuration. -func useDeserializer(configuration Configuration, element Element) bool { - if reflect.ValueOf(configuration).IsZero() || reflect.ValueOf(configuration.Consumer).IsZero() { - return false - } - - if (element == Key && configuration.Consumer.KeyDeserializer != "") || - (element == Value && configuration.Consumer.ValueDeserializer != "") { - return true - } - - return false -} - -type SerdeType[T Serializer | Deserializer] struct { - Function T - Class string - SchemaType srclient.SchemaType - WireFormatted bool -} - -// NewSerdes constructs a new SerdeType. -func NewSerdes[T Serializer | Deserializer]( - function T, class string, schemaType srclient.SchemaType, wireFormatted bool, -) *SerdeType[T] { - return &SerdeType[T]{function, class, schemaType, wireFormatted} -} - -// GetSerializer returns the serializer if the given type is Serializer. -func (s *SerdeType[Serializer]) GetSerializer() Serializer { - return s.Function -} - -// GetDeserializer returns the deserializer if the given type is Deserializer. -func (s *SerdeType[Deserializer]) GetDeserializer() Deserializer { - return s.Function -} - -// GetSchemaType returns the schema type. -func (s *SerdeType[T]) GetSchemaType() srclient.SchemaType { - return s.SchemaType -} - -// IsWireFormatted returns true if the schema is wire formatted. -func (s *SerdeType[T]) IsWireFormatted() bool { - return s.WireFormatted -} diff --git a/serde_registry.go b/serde_registry.go deleted file mode 100644 index 103ab36..0000000 --- a/serde_registry.go +++ /dev/null @@ -1,33 +0,0 @@ -package kafka - -import "github.com/riferrei/srclient" - -type Serde[T Serializer | Deserializer] struct { - Registry map[string]*SerdeType[T] -} - -// NewSerializersRegistry creates a new instance of the Serializer registry. -func NewSerializersRegistry() *Serde[Serializer] { - return &Serde[Serializer]{ - Registry: map[string]*SerdeType[Serializer]{ - StringSerializer: NewSerdes[Serializer](SerializeString, StringSerializer, String, false), - ByteArraySerializer: NewSerdes[Serializer](SerializeByteArray, ByteArraySerializer, ByteArray, false), - AvroSerializer: NewSerdes[Serializer](SerializeAvro, AvroSerializer, srclient.Avro, true), - ProtobufSerializer: NewSerdes[Serializer](nil, ProtobufSerializer, srclient.Protobuf, true), - JSONSchemaSerializer: NewSerdes[Serializer](SerializeJSON, JSONSchemaSerializer, srclient.Json, true), - }, - } -} - -// NewDeserializersRegistry creates a new instance of the Deserializer registry. -func NewDeserializersRegistry() *Serde[Deserializer] { - return &Serde[Deserializer]{ - Registry: map[string]*SerdeType[Deserializer]{ - StringDeserializer: NewSerdes[Deserializer](DeserializeString, StringDeserializer, String, false), - ByteArrayDeserializer: NewSerdes[Deserializer](DeserializeByteArray, ByteArrayDeserializer, ByteArray, false), - AvroDeserializer: NewSerdes[Deserializer](DeserializeAvro, AvroDeserializer, srclient.Avro, true), - ProtobufDeserializer: NewSerdes[Deserializer](nil, ProtobufDeserializer, srclient.Protobuf, true), - JSONSchemaDeserializer: NewSerdes[Deserializer](DeserializeJSON, JSONSchemaDeserializer, srclient.Json, true), - }, - } -} diff --git a/serde_registry_test.go b/serde_registry_test.go deleted file mode 100644 index 1e9852e..0000000 --- a/serde_registry_test.go +++ /dev/null @@ -1,31 +0,0 @@ -package kafka - -import ( - "reflect" - "runtime" - "testing" - - "github.com/stretchr/testify/assert" -) - -// getFuncName returns the name of the function as string. -func getFuncName(function interface{}) string { - return runtime.FuncForPC(reflect.ValueOf(function).Pointer()).Name() -} - -// TestSerializersRegistry tests the serdes registry. -func TestSerdesRegistry(t *testing.T) { - serializersRegistry := NewSerializersRegistry() - deserializersRegistry := NewDeserializersRegistry() - - assert.Equal(t, 5, len(serializersRegistry.Registry)) - assert.Equal(t, String, serializersRegistry.Registry[StringSerializer].GetSchemaType()) - assert.Equal(t, "github.com/mostafa/xk6-kafka.SerializeString", - getFuncName(serializersRegistry.Registry[StringSerializer].GetSerializer())) - assert.False(t, serializersRegistry.Registry[StringSerializer].IsWireFormatted()) - - assert.Equal(t, 5, len(deserializersRegistry.Registry)) - assert.Equal(t, "github.com/mostafa/xk6-kafka.DeserializeString", - getFuncName(deserializersRegistry.Registry[StringDeserializer].GetDeserializer())) - assert.False(t, deserializersRegistry.Registry[StringDeserializer].IsWireFormatted()) -} diff --git a/serde_test.go b/serde_test.go deleted file mode 100644 index 2bca52f..0000000 --- a/serde_test.go +++ /dev/null @@ -1,70 +0,0 @@ -package kafka - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -// TestUseSerializer tests whether a serializer should be used based on the configuration. -func TestUseSerializer(t *testing.T) { - config := Configuration{ - Producer: ProducerConfiguration{ - ValueSerializer: AvroSerializer, - KeySerializer: AvroSerializer, - }, - } - - assert.True(t, useSerializer(config, Key)) - assert.True(t, useSerializer(config, Value)) -} - -type UseSerializerDeserializerTest struct { - config Configuration - element Element - result bool -} - -// TestUseSerializerDeserializerFails tests whether a serializer or deserializer should be used -// based on the configuration and fails if the configuration is invalid. -func TestUseSerializerDeserializerFails(t *testing.T) { - params := []UseSerializerDeserializerTest{ - {config: Configuration{}, element: Key, result: false}, - {config: Configuration{}, element: Value, result: false}, - {config: Configuration{Producer: ProducerConfiguration{}}, element: Key, result: false}, - {config: Configuration{Producer: ProducerConfiguration{}}, element: Value, result: false}, - {config: Configuration{Consumer: ConsumerConfiguration{}}, element: Key, result: false}, - {config: Configuration{Consumer: ConsumerConfiguration{}}, element: Value, result: false}, - {config: Configuration{SchemaRegistry: SchemaRegistryConfiguration{}}, element: Key, result: false}, - { - config: Configuration{SchemaRegistry: SchemaRegistryConfiguration{}}, element: Value, - result: false, - }, - { - config: Configuration{Producer: ProducerConfiguration{ValueSerializer: "unknown codec"}}, - element: Key, result: false, - }, - { - config: Configuration{Producer: ProducerConfiguration{KeySerializer: "unknown codec"}}, - element: Value, result: false, - }, - } - - for _, param := range params { - assert.Equal(t, param.result, useSerializer(param.config, param.element)) - assert.Equal(t, param.result, useDeserializer(param.config, param.element)) - } -} - -// TestUseDeserializer tests whether a deserializer should be used based on the configuration. -func TestUseDeserializer(t *testing.T) { - config := Configuration{ - Consumer: ConsumerConfiguration{ - ValueDeserializer: AvroDeserializer, - KeyDeserializer: AvroDeserializer, - }, - } - - assert.True(t, useDeserializer(config, Key)) - assert.True(t, useDeserializer(config, Value)) -} diff --git a/serdes.go b/serdes.go new file mode 100644 index 0000000..90e387d --- /dev/null +++ b/serdes.go @@ -0,0 +1,129 @@ +package kafka + +import ( + "github.com/riferrei/srclient" + "go.k6.io/k6/js/common" +) + +type Container struct { + Data interface{} `json:"data"` + Schema *Schema `json:"schema"` + SchemaType string `json:"schemaType"` +} + +// serialize checks whether the incoming data has a schema or not. +// If the data has a schema, it encodes the data into Avro, JSONSchema or Protocol Buffer. +// Then it adds the wire format prefix and returns the binary to be used in key or value. +// If no schema is passed, it treats the data as a byte array, a string or a JSON object without +// a JSONSchema. Then, it returns the data as a byte array. +// nolint: funlen +func (k *Kafka) serialize(container *Container) []byte { + if container.Schema == nil { + // we are dealing with a byte array, a string or a JSON object without a JSONSchema + serde, err := GetSerdes(container.SchemaType) + if err != nil { + common.Throw(k.vu.Runtime(), err) + } + + data, err := serde.Serialize(container.Data, nil) + if err != nil { + common.Throw(k.vu.Runtime(), err) + } + return data + } else { + // we are dealing with binary data to be encoded with Avro, JSONSchema or Protocol Buffer + + switch container.SchemaType { + case srclient.Avro.String(), srclient.Json.String(): + serde, err := GetSerdes(container.SchemaType) + if err != nil { + common.Throw(k.vu.Runtime(), err) + return nil + } + + bytesData, err := serde.Serialize(container.Data, container.Schema) + if err != nil { + common.Throw(k.vu.Runtime(), err) + return nil + } + + return k.encodeWireFormat(bytesData, container.Schema.ID) + default: + common.Throw(k.vu.Runtime(), ErrUnsupportedOperation) + return nil + } + } +} + +// deserialize checks whether the incoming data has a schema or not. +// If the data has a schema, it removes the wire format prefix and decodes the data into JSON +// using Avro, JSONSchema or Protocol Buffer schemas. It returns the decoded data as JSON object. +// If no schema is passed, it treats the data as a byte array, a string or a JSON object without +// a JSONSchema. Then, it returns the data based on how it can decode it. +// nolint: funlen +func (k *Kafka) deserialize(container *Container) interface{} { + if container.Schema == nil { + // we are dealing with a byte array, a string or a JSON object without a JSONSchema + serde, err := GetSerdes(container.SchemaType) + if err != nil { + common.Throw(k.vu.Runtime(), err) + return nil + } + + if isBase64Encoded(container.Data.(string)) { + if data, err := base64ToBytes(container.Data.(string)); err != nil { + common.Throw(k.vu.Runtime(), err) + return nil + } else { + if result, err := serde.Deserialize(data, nil); err != nil { + common.Throw(k.vu.Runtime(), err) + return nil + } else { + return result + } + } + } + + return container.Data.([]byte) + } else { + // we are dealing with binary data to be encoded with Avro, JSONSchema or Protocol Buffer + runtime := k.vu.Runtime() + + var jsonBytes []byte + // Decode the data into JSON bytes from base64-encoded data + if isBase64Encoded(container.Data.(string)) { + if data, err := base64ToBytes(container.Data.(string)); err != nil { + common.Throw(k.vu.Runtime(), err) + return nil + } else { + // Remove wire format prefix + jsonBytes = k.decodeWireFormat(data) + } + } + + switch container.SchemaType { + case srclient.Avro.String(), srclient.Json.String(): + serde, err := GetSerdes(container.SchemaType) + if err != nil { + common.Throw(k.vu.Runtime(), err) + return nil + } + + deserialized, err := serde.Deserialize(jsonBytes, container.Schema) + if err != nil { + common.Throw(k.vu.Runtime(), err) + return nil + } + + if jsonObj, ok := deserialized.(map[string]interface{}); ok { + return jsonObj + } else { + common.Throw(k.vu.Runtime(), ErrInvalidDataType) + return nil + } + default: + common.Throw(runtime, ErrUnsupportedOperation) + return nil + } + } +} diff --git a/avro_test.go b/serdes_test.go similarity index 78% rename from avro_test.go rename to serdes_test.go index 6c039e3..fa2a3e3 100644 --- a/avro_test.go +++ b/serdes_test.go @@ -5,45 +5,122 @@ import ( "github.com/riferrei/srclient" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) -var ( - avroConfig = Configuration{ - Producer: ProducerConfiguration{ - ValueSerializer: AvroSerializer, - KeySerializer: AvroSerializer, +// TestSerdes tests serialization and deserialization of messages with different schemas. +func TestSerdes(t *testing.T) { + test := getTestModuleInstance(t) + + test.createTopic("test-topic") + writer := test.newWriter("test-topic") + defer writer.Close() + reader := test.newReader("test-topic") + defer reader.Close() + + // Switch to VU code. + require.NoError(t, test.moveToVUCode()) + + containers := []*Container{ + { + Data: "string", + SchemaType: String.String(), }, - Consumer: ConsumerConfiguration{ - ValueDeserializer: AvroDeserializer, - KeyDeserializer: AvroDeserializer, + { + Data: []byte("byte array"), + SchemaType: Bytes.String(), + }, + { + Data: []byte{62, 79, 74, 65, 20, 61, 72, 72, 61, 79}, // byte array + SchemaType: Bytes.String(), + }, + { + Data: map[string]interface{}{ + "string": "some-string", + "number": 1, + "float": 1.1, + "bool": true, + "null": nil, + "array": []interface{}{1, 2, 3}, + "object": map[string]interface{}{ + "string": "string-value", + "number": 1, + "float": 1.1, + "bool": true, + "null": nil, + "array": []interface{}{1, 2, 3}, + }, + }, + SchemaType: srclient.Json.String(), + }, + { + Data: map[string]interface{}{"key": "value"}, + Schema: &Schema{ + ID: 1, + Schema: `{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "key": {"type": "string"} + }, + "required": ["key"] + }`, + Version: 1, + Subject: "json-schema", + }, + SchemaType: srclient.Json.String(), + }, + { + Data: map[string]interface{}{"key": "value"}, + Schema: &Schema{ + ID: 2, + Schema: `{ + "type":"record", + "name":"Schema", + "namespace":"io.confluent.kafka.avro", + "fields":[{"name":"key","type":"string"}]}`, + Version: 1, + Subject: "avro-schema", + }, + SchemaType: srclient.Avro.String(), }, } - avroSchemaForAvroTests = `{ - "type":"record", - "name":"Schema", - "namespace":"io.confluent.kafka.avro", - "fields":[{"name":"field","type":"string"}]}` - data = `{"field":"value"}` -) -// TestSerializeDeserializeAvro tests serialization and deserialization of Avro messages. -func TestSerializeDeserializeAvro(t *testing.T) { - // Test with a schema registry, which fails and manually (de)serializes the data. - for _, element := range []Element{Key, Value} { - // Serialize the key or value - serialized, err := SerializeAvro(avroConfig, "topic", `{"field":"value"}`, element, avroSchemaForAvroTests, 0) - assert.Nil(t, err) + for _, container := range containers { + // Test with a schema registry, which fails and manually (de)serializes the data. + serialized := test.module.Kafka.serialize(container) assert.NotNil(t, serialized) // 4 bytes for magic byte, 1 byte for schema ID, and the rest is the data. - assert.GreaterOrEqual(t, len(serialized), 10) - - // Deserialize the key or value (removes the magic bytes). - deserialized, err := DeserializeAvro(avroConfig, "", serialized, element, avroSchemaForAvroTests, 0) - assert.Nil(t, err) - assert.Equal(t, map[string]interface{}{"field": "value"}, deserialized) + assert.GreaterOrEqual(t, len(serialized), 5) + + // Send data to Kafka. + test.module.Kafka.produce(writer, &ProduceConfig{ + Messages: []Message{ + { + Value: serialized, + }, + }, + }) + + // Read data from Kafka. + messages := test.module.Kafka.consume(reader, &ConsumeConfig{ + Limit: 1, + }) + assert.Equal(t, 1, len(messages)) + + if value, ok := messages[0]["value"].(interface{}); ok { + // Deserialize the key or value (removes the magic bytes). + deserialized := test.module.Kafka.deserialize(&Container{ + Data: value, + Schema: container.Schema, + SchemaType: container.SchemaType, + }) + assert.Equal(t, container.Data, deserialized) + } } } +/* // TestSerializeDeserializeAvroFailsOnSchemaError tests serialization and // deserialization of Avro messages and fails on schema error. func TestSerializeDeserializeAvroFailsOnSchemaError(t *testing.T) { @@ -99,13 +176,13 @@ func TestSerializeDeserializeAvroFailsOnEncodeDecodeError(t *testing.T) { assert.Nil(t, serialized) assert.Error(t, err.Unwrap()) assert.Equal(t, "Failed to encode data into Avro", err.Message) - assert.Equal(t, failedEncodeToAvro, err.Code) + assert.Equal(t, failedToEncode, err.Code) deserialized, err := DeserializeAvro(avroConfig, "topic", []byte{0, 1, 2, 3, 5}, element, avroSchemaForAvroTests, 0) assert.Nil(t, deserialized) assert.Error(t, err.Unwrap()) assert.Equal(t, "Failed to decode data from Avro", err.Message) - assert.Equal(t, failedDecodeAvroFromBinary, err.Code) + assert.Equal(t, failedToDecodeFromBinary, err.Code) } } @@ -133,9 +210,11 @@ func TestAvroSerializeTopicNameStrategy(t *testing.T) { assert.NotNil(t, serialized) expectedSubject := topic + "-value" - srClient := SchemaRegistryClientWithConfiguration(config.SchemaRegistry) - schemaResult, err := GetSchema(srClient, expectedSubject, schema, srclient.Avro, 0) - assert.Nil(t, err) + srClient := schemaRegistryClient(&config.SchemaRegistry) + schemaResult := getSchema(srClient, &SchemaConfig{ + Subject: expectedSubject, + Version: 0, + }) assert.NotNil(t, schemaResult) } @@ -163,9 +242,11 @@ func TestAvroSerializeTopicNameStrategyIsDefaultStrategy(t *testing.T) { assert.NotNil(t, serialized) expectedSubject := topic + "-value" - srClient := SchemaRegistryClientWithConfiguration(config.SchemaRegistry) - schemaResult, err := GetSchema(srClient, expectedSubject, schema, srclient.Avro, 0) - assert.Nil(t, err) + srClient := schemaRegistryClient(&config.SchemaRegistry) + schemaResult := getSchema(srClient, &SchemaConfig{ + Subject: expectedSubject, + Version: 0, + }) assert.NotNil(t, schemaResult) } @@ -193,9 +274,11 @@ func TestAvroSerializeTopicRecordNameStrategy(t *testing.T) { assert.NotNil(t, serialized) expectedSubject := topic + "-io.confluent.kafka.avro.TestAvroSerializeTopicRecordNameStrategy" - srClient := SchemaRegistryClientWithConfiguration(config.SchemaRegistry) - schemaResult, err := GetSchema(srClient, expectedSubject, schema, srclient.Avro, 0) - assert.Nil(t, err) + srClient := schemaRegistryClient(&config.SchemaRegistry) + schemaResult := getSchema(srClient, &SchemaConfig{ + Subject: expectedSubject, + Version: 0, + }) assert.NotNil(t, schemaResult) } @@ -223,10 +306,12 @@ func TestAvroSerializeRecordNameStrategy(t *testing.T) { assert.NotNil(t, serialized) expectedSubject := "io.confluent.kafka.avro.TestAvroSerializeRecordNameStrategy" - srClient := SchemaRegistryClientWithConfiguration(config.SchemaRegistry) - resultSchema, err := GetSchema(srClient, expectedSubject, avroSchemaForAvroTests, srclient.Avro, 0) - assert.Nil(t, err) - assert.NotNil(t, resultSchema) + srClient := schemaRegistryClient(&config.SchemaRegistry) + schemaResult := getSchema(srClient, &SchemaConfig{ + Subject: expectedSubject, + Version: 0, + }) + assert.NotNil(t, schemaResult) } // TestAvroDeserializeUsingMagicPrefix tests deserialization of Avro messages @@ -400,3 +485,4 @@ func TestAvroDeserializeUsingSubjectNameStrategyTopicName(t *testing.T) { } assert.Nil(t, dErr) } +*/ diff --git a/string.go b/string.go index 92afcd0..de37b76 100644 --- a/string.go +++ b/string.go @@ -1,21 +1,17 @@ package kafka -import ( - "github.com/riferrei/srclient" -) +import "github.com/riferrei/srclient" + +type StringSerde struct { + Serdes +} const ( String srclient.SchemaType = "STRING" - - StringSerializer string = "org.apache.kafka.common.serialization.StringSerializer" - StringDeserializer string = "org.apache.kafka.common.serialization.StringDeserializer" ) -// SerializeString serializes a string to bytes. -func SerializeString( - configuration Configuration, topic string, data interface{}, - element Element, schema string, version int, -) ([]byte, *Xk6KafkaError) { +// Serialize serializes a string to bytes. +func (*StringSerde) Serialize(data interface{}, schema *Schema) ([]byte, error) { switch data := data.(type) { case string: return []byte(data), nil @@ -24,10 +20,7 @@ func SerializeString( } } -// DeserializeString deserializes a string from bytes. -func DeserializeString( - configuration Configuration, topic string, data []byte, - element Element, schema string, version int, -) (interface{}, *Xk6KafkaError) { +// Deserialize deserializes a string from bytes. +func (*StringSerde) Deserialize(data []byte, schema *Schema) (interface{}, error) { return string(data), nil } diff --git a/string_test.go b/string_test.go deleted file mode 100644 index 1af91ad..0000000 --- a/string_test.go +++ /dev/null @@ -1,31 +0,0 @@ -package kafka - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -// TestSerializeString tests the serialization of a string. -func TestSerializeString(t *testing.T) { - result, err := SerializeString(Configuration{}, "", originalData, "", "", 0) - assert.Nil(t, err) - assert.Equal(t, []byte(originalData), result) -} - -// TestSerializeStringFails tests the serialization of a string and -// fails if the given type is not string. -func TestSerializeStringFails(t *testing.T) { - originalData := 123 - _, err := SerializeString(Configuration{}, "", originalData, "", "", 0) - assert.EqualErrorf( - t, err, "Invalid data type provided for serializer/deserializer", - "Expected error message is correct") -} - -// TestDeserializeString tests the deserialization of a string. -func TestDeserializeString(t *testing.T) { - result, err := DeserializeString(Configuration{}, "", []byte(originalData), "", "", 0) - assert.Equal(t, originalData, result) - assert.Nil(t, err) -} diff --git a/topic_test.go b/topic_test.go index c2d356d..9869218 100644 --- a/topic_test.go +++ b/topic_test.go @@ -10,7 +10,7 @@ import ( // TestGetKafkaControllerConnection tests whether a connection can be established to a kafka broker. func TestGetKafkaControllerConnection(t *testing.T) { - test := GetTestModuleInstance(t) + test := getTestModuleInstance(t) assert.NotPanics(t, func() { connection := test.module.Kafka.getKafkaControllerConnection(&ConnectionConfig{ Address: "localhost:9092", @@ -23,7 +23,7 @@ func TestGetKafkaControllerConnection(t *testing.T) { // TestGetKafkaControllerConnectionFails tests whether a connection can be established // to a kafka broker and fails if the given broker is not reachable. func TestGetKafkaControllerConnectionFails(t *testing.T) { - test := GetTestModuleInstance(t) + test := getTestModuleInstance(t) assert.Panics(t, func() { connection := test.module.Kafka.getKafkaControllerConnection(&ConnectionConfig{ @@ -35,25 +35,26 @@ func TestGetKafkaControllerConnectionFails(t *testing.T) { // TestTopics tests various functions to create, delete and list topics. func TestTopics(t *testing.T) { - test := GetTestModuleInstance(t) + test := getTestModuleInstance(t) require.NoError(t, test.moveToVUCode()) assert.NotPanics(t, func() { + topic := "test-topics" connection := test.module.Kafka.getKafkaControllerConnection(&ConnectionConfig{ Address: "localhost:9092", }) test.module.Kafka.createTopic(connection, &kafkago.TopicConfig{ - Topic: "test-topic", + Topic: topic, }) topics := test.module.Kafka.listTopics(connection) - assert.Contains(t, topics, "test-topic") + assert.Contains(t, topics, topic) - test.module.Kafka.deleteTopic(connection, "test-topic") + test.module.Kafka.deleteTopic(connection, topic) topics = test.module.Kafka.listTopics(connection) - assert.NotContains(t, topics, "test-topic") + assert.NotContains(t, topics, topic) connection.Close() }) diff --git a/utils.go b/utils.go new file mode 100644 index 0000000..aac6193 --- /dev/null +++ b/utils.go @@ -0,0 +1,58 @@ +package kafka + +import ( + "encoding/base64" + "encoding/json" + "unicode/utf8" + + "github.com/dop251/goja" +) + +// freeze disallows resetting or changing the properties of the object. +func freeze(o *goja.Object) { + for _, key := range o.Keys() { + if err := o.DefineDataProperty( + key, o.Get(key), goja.FLAG_FALSE, goja.FLAG_FALSE, goja.FLAG_TRUE); err != nil { + panic(err) + } + } +} + +// base64ToBytes converts the base64 encoded data to bytes. +func base64ToBytes(data string) ([]byte, error) { + return base64.StdEncoding.DecodeString(data) +} + +// isBase64Encoded checks whether the data is base64 encoded. +func isBase64Encoded(data string) bool { + _, err := base64ToBytes(data) + return err == nil +} + +// toJSONBytes encodes a map into JSON bytes +func toJSONBytes(data interface{}) ([]byte, error) { + if data, ok := data.(map[string]interface{}); ok { + if jsonData, err := json.Marshal(data); err == nil { + return jsonData, nil + } else { + return nil, err + } + } else { + return nil, ErrInvalidDataType + } +} + +func isJSONString(data []byte) bool { + if json.Valid(data) { + var js map[string]interface{} + if err := json.Unmarshal(data, &js); err != nil { + return false + } + return true + } + return false +} + +func isValidUTF8String(data []byte) bool { + return utf8.Valid(data) +} diff --git a/producer.go b/writer.go similarity index 79% rename from producer.go rename to writer.go index 73838b2..6eec803 100644 --- a/producer.go +++ b/writer.go @@ -33,22 +33,8 @@ var ( // Balancers is a map of balancer names to their respective balancers. Balancers map[string]kafkago.Balancer - - // DefaultSerializer is string serializer. - DefaultSerializer = StringSerializer ) -// freeze disallows resetting or changing the properties of the object. -func freeze(o *goja.Object) { - for _, key := range o.Keys() { - err := o.DefineDataProperty( - key, o.Get(key), goja.FLAG_FALSE, goja.FLAG_FALSE, goja.FLAG_TRUE) - if err != nil { - panic(err) - } - } -} - type WriterConfig struct { AutoCreateTopic bool `json:"autoCreateTopic"` ConnectLogger bool `json:"connectLogger"` @@ -74,8 +60,8 @@ type Message struct { Partition int `json:"partition"` Offset int64 `json:"offset"` HighWaterMark int64 `json:"highWaterMark"` - Key interface{} `json:"key"` - Value interface{} `json:"value"` + Key []byte `json:"key"` + Value []byte `json:"value"` Headers map[string]interface{} `json:"headers"` // If not set at the creation, Time will be automatically set when @@ -84,10 +70,7 @@ type Message struct { } type ProduceConfig struct { - Messages []Message `json:"messages"` - Config Configuration `json:"config"` - KeySchema string `json:"keySchema"` - ValueSchema string `json:"valueSchema"` + Messages []Message `json:"messages"` } // writerClass is a wrapper around kafkago.writer and acts as a JS constructor @@ -209,14 +192,6 @@ func (k *Kafka) writer(writerConfig *WriterConfig) *kafkago.Writer { return writer } -// getSerializer returns the serializer for the given schema. -func (k *Kafka) getSerializer(schema string) Serializer { - if ser, ok := k.serializerRegistry.Registry[schema]; ok { - return ser.GetSerializer() - } - return SerializeString -} - // produce sends messages to Kafka with the given configuration. // nolint: funlen func (k *Kafka) produce(writer *kafkago.Writer, produceConfig *ProduceConfig) { @@ -232,16 +207,6 @@ func (k *Kafka) produce(writer *kafkago.Writer, produceConfig *ProduceConfig) { common.Throw(k.vu.Runtime(), err) } - var err error - if err := ValidateConfiguration(produceConfig.Config); err != nil { - produceConfig.Config.Producer.KeySerializer = DefaultSerializer - produceConfig.Config.Producer.ValueSerializer = DefaultSerializer - logger.WithField("error", err).Warn("Using default string serializers") - } - - keySerializer := k.getSerializer(produceConfig.Config.Producer.KeySerializer) - valueSerializer := k.getSerializer(produceConfig.Config.Producer.ValueSerializer) - kafkaMessages := make([]kafkago.Message, len(produceConfig.Messages)) for index, message := range produceConfig.Messages { kafkaMessages[index] = kafkago.Message{ @@ -261,27 +226,15 @@ func (k *Kafka) produce(writer *kafkago.Writer, produceConfig *ProduceConfig) { } // If a key was provided, add it to the message. Keys are optional. - if message.Key != "" { - keyData, err := keySerializer( - produceConfig.Config, writer.Stats().Topic, - message.Key, Key, produceConfig.KeySchema, 0) - if err != nil && err.Unwrap() != nil { - logger.WithField("error", err).Error(err) - } - - kafkaMessages[index].Key = keyData + if message.Key != nil { + kafkaMessages[index].Key = message.Key } // Then add the value to the message. - valueData, err := valueSerializer( - produceConfig.Config, writer.Stats().Topic, - message.Value, Value, produceConfig.ValueSchema, 0) - if err != nil && err.Unwrap() != nil { - logger.WithField("error", err).Error(err) + if message.Value != nil { + kafkaMessages[index].Value = message.Value } - kafkaMessages[index].Value = valueData - // If headers are provided, add them to the message. if len(message.Headers) > 0 { for key, value := range message.Headers { @@ -297,19 +250,10 @@ func (k *Kafka) produce(writer *kafkago.Writer, produceConfig *ProduceConfig) { k.reportWriterStats(writer.Stats()) - if originalErr != nil { - if errors.Is(originalErr, k.vu.Context().Err()) { - logger.WithField("error", k.vu.Context().Err()).Error(k.vu.Context().Err()) - common.Throw(k.vu.Runtime(), - NewXk6KafkaError(contextCancelled, "Context cancelled.", originalErr)) - } else { - // TODO: fix this - // Ignore stats reporting errors here, because we can't return twice, - // and there is no way to wrap the error in another one. - logger.WithField("error", originalErr).Error(originalErr) - common.Throw(k.vu.Runtime(), - NewXk6KafkaError(failedWriteMessage, "Failed to write messages.", err)) - } + if originalErr != nil && errors.Is(originalErr, k.vu.Context().Err()) { + logger.WithField("error", k.vu.Context().Err()).Error(k.vu.Context().Err()) + common.Throw(k.vu.Runtime(), + NewXk6KafkaError(contextCancelled, "Context cancelled.", originalErr)) } } diff --git a/producer_test.go b/writer_test.go similarity index 75% rename from producer_test.go rename to writer_test.go index 1d200a6..ae5f69f 100644 --- a/producer_test.go +++ b/writer_test.go @@ -1,11 +1,9 @@ package kafka import ( - "encoding/json" "testing" "time" - kafkago "github.com/segmentio/kafka-go" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -13,13 +11,13 @@ import ( // TestProduce tests the produce function. // nolint: funlen func TestProduce(t *testing.T) { - test := GetTestModuleInstance(t) + test := getTestModuleInstance(t) + assert.True(t, test.topicExists("test-topic")) assert.NotPanics(t, func() { writer := test.module.Kafka.writer(&WriterConfig{ - Brokers: []string{"localhost:9092"}, - Topic: "test-topic", - AutoCreateTopic: true, + Brokers: []string{"localhost:9092"}, + Topic: "test-topic", }) assert.NotNil(t, writer) defer writer.Close() @@ -29,12 +27,12 @@ func TestProduce(t *testing.T) { test.module.Kafka.produce(writer, &ProduceConfig{ Messages: []Message{ { - Key: "key1", - Value: "value1", + Key: test.module.Kafka.serialize(&Container{Data: "key1"}), + Value: test.module.Kafka.serialize(&Container{Data: "value1"}), }, { - Key: "key2", - Value: "value2", + Key: test.module.Kafka.serialize(&Container{Data: "key2"}), + Value: test.module.Kafka.serialize(&Container{Data: "value2"}), }, }, }) @@ -47,12 +45,12 @@ func TestProduce(t *testing.T) { test.module.Kafka.produce(writer, &ProduceConfig{ Messages: []Message{ { - Key: "key1", - Value: "value1", + Key: test.module.Kafka.serialize(&Container{Data: "key1"}), + Value: test.module.Kafka.serialize(&Container{Data: "value1"}), }, { - Key: "key2", - Value: "value2", + Key: test.module.Kafka.serialize(&Container{Data: "key2"}), + Value: test.module.Kafka.serialize(&Container{Data: "value2"}), }, }, }) @@ -60,7 +58,7 @@ func TestProduce(t *testing.T) { }) // Check if two message were produced. - metricsValues := test.GetCounterMetricsValues() + metricsValues := test.getCounterMetricsValues() assert.Equal(t, 2.0, metricsValues[test.module.metrics.WriterWrites.Name]) assert.Equal(t, 2.0, metricsValues[test.module.metrics.WriterMessages.Name]) assert.Equal(t, 64.0, metricsValues[test.module.metrics.WriterBytes.Name]) @@ -81,7 +79,7 @@ func TestProduce(t *testing.T) { // TestProduceWithoutKey tests the produce function without a key. func TestProduceWithoutKey(t *testing.T) { - test := GetTestModuleInstance(t) + test := getTestModuleInstance(t) assert.NotPanics(t, func() { writer := test.module.Kafka.writer(&WriterConfig{ @@ -90,19 +88,6 @@ func TestProduceWithoutKey(t *testing.T) { assert.NotNil(t, writer) defer writer.Close() - // Create a topic before producing messages, otherwise tests will fail. - assert.NotPanics(t, func() { - connection := test.module.getKafkaControllerConnection(&ConnectionConfig{ - Address: "localhost:9092", - }) - test.module.createTopic(connection, &kafkago.TopicConfig{ - Topic: "test-topic", - NumPartitions: 1, - ReplicationFactor: 1, - }) - connection.Close() - }) - require.NoError(t, test.moveToVUCode()) // Produce two messages in the VU function. @@ -110,13 +95,13 @@ func TestProduceWithoutKey(t *testing.T) { test.module.Kafka.produce(writer, &ProduceConfig{ Messages: []Message{ { - Value: "value1", + Value: test.module.Kafka.serialize(&Container{Data: "value1"}), Topic: "test-topic", Offset: 0, Time: time.Now(), }, { - Value: "value2", + Value: test.module.Kafka.serialize(&Container{Data: "value2"}), Topic: "test-topic", }, }, @@ -125,7 +110,7 @@ func TestProduceWithoutKey(t *testing.T) { }) // Check if two message were produced. - metricsValues := test.GetCounterMetricsValues() + metricsValues := test.getCounterMetricsValues() assert.Equal(t, 0.0, metricsValues[test.module.metrics.WriterErrors.Name]) // Notice the smaller size because the key is not present (64 -> 56). assert.Equal(t, 56.0, metricsValues[test.module.metrics.WriterBytes.Name]) @@ -135,13 +120,12 @@ func TestProduceWithoutKey(t *testing.T) { // TestProducerContextCancelled tests the produce function with a cancelled context. func TestProducerContextCancelled(t *testing.T) { - test := GetTestModuleInstance(t) + test := getTestModuleInstance(t) assert.NotPanics(t, func() { writer := test.module.Kafka.writer(&WriterConfig{ - Brokers: []string{"localhost:9092"}, - Topic: "test-topic", - AutoCreateTopic: true, + Brokers: []string{"localhost:9092"}, + Topic: "test-topic", }) assert.NotNil(t, writer) defer writer.Close() @@ -156,12 +140,12 @@ func TestProducerContextCancelled(t *testing.T) { test.module.Kafka.produce(writer, &ProduceConfig{ Messages: []Message{ { - Key: "key1", - Value: "value1", + Key: test.module.Kafka.serialize(&Container{Data: "key1"}), + Value: test.module.Kafka.serialize(&Container{Data: "value1"}), }, { - Key: "key2", - Value: "value2", + Key: test.module.Kafka.serialize(&Container{Data: "key2"}), + Value: test.module.Kafka.serialize(&Container{Data: "value2"}), }, }, }) @@ -170,7 +154,7 @@ func TestProducerContextCancelled(t *testing.T) { // Cancelled context is immediately reflected in metrics, because // we need the context object to update the metrics. - metricsValues := test.GetCounterMetricsValues() + metricsValues := test.getCounterMetricsValues() assert.Equal(t, 0.0, metricsValues[test.module.metrics.WriterErrors.Name]) assert.Equal(t, 0.0, metricsValues[test.module.metrics.WriterBytes.Name]) assert.Equal(t, 0.0, metricsValues[test.module.metrics.WriterMessages.Name]) @@ -179,30 +163,26 @@ func TestProducerContextCancelled(t *testing.T) { // TestProduceJSON tests the produce function with a JSON value. func TestProduceJSON(t *testing.T) { - // TODO: change this once the interfaces accept JSON - - test := GetTestModuleInstance(t) + test := getTestModuleInstance(t) assert.NotPanics(t, func() { writer := test.module.Kafka.writer(&WriterConfig{ - Brokers: []string{"localhost:9092"}, - Topic: "test-topic", - AutoCreateTopic: true, + Brokers: []string{"localhost:9092"}, + Topic: "test-topic", }) assert.NotNil(t, writer) defer writer.Close() require.NoError(t, test.moveToVUCode()) - serialized, jsonErr := json.Marshal(map[string]interface{}{"field": "value"}) - assert.Nil(t, jsonErr) - // Produce a message in the VU function. assert.NotPanics(t, func() { test.module.Kafka.produce(writer, &ProduceConfig{ Messages: []Message{ { - Value: string(serialized), + Value: test.module.Kafka.serialize(&Container{ + Data: map[string]interface{}{"field": "value"}, + }), }, }, }) @@ -210,7 +190,7 @@ func TestProduceJSON(t *testing.T) { }) // Check if one message was produced. - metricsValues := test.GetCounterMetricsValues() + metricsValues := test.getCounterMetricsValues() assert.Equal(t, 0.0, metricsValues[test.module.metrics.WriterErrors.Name]) assert.Equal(t, 39, int(metricsValues[test.module.metrics.WriterBytes.Name])) assert.Equal(t, 1.0, metricsValues[test.module.metrics.WriterMessages.Name])