Skip to content

Commit

Permalink
Declare key/value format for CONSUMER in kafka file
Browse files Browse the repository at this point in the history
Fixes jlandersen#112

Signed-off-by: azerr <[email protected]>
  • Loading branch information
angelozerr authored and fbricon committed Mar 17, 2021
1 parent d3694f4 commit f576541
Show file tree
Hide file tree
Showing 10 changed files with 354 additions and 13 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ All notable changes to Kafka extension will be documented in this file.
## [0.12.0]
### Added
- Extension API to contribute clusters. See [#123](https://github.com/jlandersen/vscode-kafka/issues/123).
- declare key/value formats for CONSUMER in kafka file. See [#112](https://github.com/jlandersen/vscode-kafka/issues/112).

### Changed
- Improved the "New topic" wizard: the replication factor is now read from the broker configuration. Input will be skipped if value can't be higher than 1. See [#64](https://github.com/jlandersen/vscode-kafka/issues/64).
Expand Down
18 changes: 18 additions & 0 deletions docs/Consuming.md
Original file line number Diff line number Diff line change
Expand Up @@ -50,9 +50,27 @@ The `CONSUMER` block defines:
* 0,1,2
* 0-2
* 0,2-3
* `key-format` : [deserializer](#Deserializer) to use for the key *[optional]*.
* `value-format` : [deserializer](#Deserializer) to use for the value *[optional]*.

#### Deserializer

Deserializer can have the following value:

* `none`: no deserializer (ignores content).
* `string`: similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.StringDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/StringDeserializer.java) which currently only supports `UTF-8` encoding.
* `double`: similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.DoubleDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/DoubleDeserializer.java).
* `float`: similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.FloatDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/FloatDeserializer.java).
* `integer`: similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.IntegerDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/IntegerDeserializer.java).
* `long`: similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.LongDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/LongDeserializer.java).
* `short`: similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.ShortDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/ShortDeserializer.java).

#### Code Lens

A codelens is displayed above each `CONSUMER` line, and provides `Start consumer` / `Stop consumer` commands depending on the consumer group status.

#### Completion

Completion snippets can help you quickly bootstrap new `CONSUMER` blocks:

![Consumer snippets](assets/kafka-file-consumer-snippet.png)
Expand Down
22 changes: 22 additions & 0 deletions snippets/consumers.json
Original file line number Diff line number Diff line change
Expand Up @@ -21,5 +21,27 @@
"partitions: ${4|0|}"
],
"description": "A consumer with a partitions filter"
},
"key-format-consumer": {
"prefix": [
"key-format-consumer"
],
"body": [
"CONSUMER ${1:consumer-group-id}",
"topic: ${2:topic_name}",
"key-format: ${3|none,string,double,float,integer,long,short|}"
],
"description": "A consumer with a key format"
},
"value-format-consumer": {
"prefix": [
"value-format-consumer"
],
"body": [
"CONSUMER ${1:consumer-group-id}",
"topic: ${2:topic_name}",
"value-format: ${3|none,string,double,float,integer,long,short|}"
],
"description": "A consumer with a value format"
}
}
42 changes: 34 additions & 8 deletions src/client/consumer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,15 @@ import { URLSearchParams } from "url";
import * as vscode from "vscode";
import { getWorkspaceSettings, InitialConsumerOffset, ClusterSettings } from "../settings";
import { ConnectionOptions, createKafka } from "./client";
import { deserialize, MessageFormat, SerializationdResult } from "./serialization";

interface ConsumerOptions extends ConnectionOptions {
consumerGroupId: string;
topicId: string;
fromOffset: InitialConsumerOffset | string;
partitions?: number[];
messageKeyFormat?: MessageFormat;
messageValueFormat?: MessageFormat;
}

export interface RecordReceivedEvent {
Expand All @@ -18,10 +21,10 @@ export interface RecordReceivedEvent {

export interface ConsumedRecord {
topic: string;
value: string | Buffer | null;
value: string | Buffer | null | SerializationdResult;
offset?: string;
partition?: number;
key?: string | Buffer;
key?: string | Buffer | SerializationdResult;
}

export interface ConsumerChangedStatusEvent {
Expand Down Expand Up @@ -58,7 +61,7 @@ export class Consumer implements vscode.Disposable {
public error: any;

constructor(public uri: vscode.Uri, clusterSettings: ClusterSettings) {
const { clusterId, consumerGroupId, topicId, fromOffset, partitions } = extractConsumerInfoUri(uri);
const { clusterId, consumerGroupId, topicId, fromOffset, partitions, messageKeyFormat, messageValueFormat } = extractConsumerInfoUri(uri);
this.clusterId = clusterId;
const cluster = clusterSettings.get(clusterId);

Expand All @@ -75,7 +78,9 @@ export class Consumer implements vscode.Disposable {
consumerGroupId: consumerGroupId,
topicId,
fromOffset: fromOffset || settings.consumerOffset,
partitions: parsePartitions(partitions)
partitions: parsePartitions(partitions),
messageKeyFormat,
messageValueFormat
};
}
catch (e) {
Expand Down Expand Up @@ -108,6 +113,8 @@ export class Consumer implements vscode.Disposable {

this.consumer.run({
eachMessage: async ({ topic, partition, message }) => {
message.key = deserialize(message.key, this.options.messageKeyFormat);
message.value = deserialize(message.value, this.options.messageValueFormat);
this.onDidReceiveMessageEmitter.fire({
uri: this.uri,
record: { topic: topic, partition: partition, ...message },
Expand Down Expand Up @@ -350,18 +357,24 @@ export interface ConsumerInfoUri {
topicId: InitialConsumerOffset | string;
fromOffset?: string;
partitions?: string;
messageKeyFormat?: MessageFormat;
messageValueFormat?: MessageFormat;
}

const TOPIC_QUERY_PARAMETER = 'topic';
const FROM_QUERY_PARAMETER = 'from';
const PARTITIONS_QUERY_PARAMETER = 'partitions';
const KEY_FORMAT_QUERY_PARAMETER = 'key';
const VALUE_FORMAT_QUERY_PARAMETER = 'value';

export function createConsumerUri(info: ConsumerInfoUri): vscode.Uri {
const path = `kafka:${info.clusterId}/${info.consumerGroupId}`;
let query = '';
query = addQueryParameter(query, TOPIC_QUERY_PARAMETER, info.topicId);
query = addQueryParameter(query, FROM_QUERY_PARAMETER, info.fromOffset);
query = addQueryParameter(query, PARTITIONS_QUERY_PARAMETER, info.partitions);
query = addQueryParameter(query, KEY_FORMAT_QUERY_PARAMETER, info.messageKeyFormat);
query = addQueryParameter(query, VALUE_FORMAT_QUERY_PARAMETER, info.messageValueFormat);
return vscode.Uri.parse(path + query);
}

Expand All @@ -378,13 +391,26 @@ export function extractConsumerInfoUri(uri: vscode.Uri): ConsumerInfoUri {
const topicId = urlParams.get(TOPIC_QUERY_PARAMETER) || '';
const from = urlParams.get(FROM_QUERY_PARAMETER);
const partitions = urlParams.get(PARTITIONS_QUERY_PARAMETER);
return {
const messageKeyFormat = urlParams.get(KEY_FORMAT_QUERY_PARAMETER);
const messageValueFormat = urlParams.get(VALUE_FORMAT_QUERY_PARAMETER);
const result: ConsumerInfoUri = {
clusterId,
consumerGroupId,
topicId,
fromOffset: from && from.trim().length > 0 ? from : undefined,
partitions: partitions && partitions.trim().length > 0 ? partitions : undefined
topicId
};
if (from && from.trim().length > 0) {
result.fromOffset = from;
}
if (partitions && partitions.trim().length > 0) {
result.partitions = partitions;
}
if (messageKeyFormat && messageKeyFormat.trim().length > 0) {
result.messageKeyFormat = messageKeyFormat as MessageFormat;
}
if (messageValueFormat && messageValueFormat.trim().length > 0) {
result.messageValueFormat = messageValueFormat as MessageFormat;
}
return result;
}

export function parsePartitions(partitions?: string): number[] | undefined {
Expand Down
118 changes: 118 additions & 0 deletions src/client/serialization.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
export type MessageFormat = "none" | "string" | "double" | "float" | "integer" | "long" | "short" ;

export type SerializationdResult = any | Error;

export class SerializationException extends Error { }

// ---------------- Deserializers ----------------

interface Deserializer {
deserialize(data: Buffer): any;
}

const deserializerRegistry: Map<MessageFormat, Deserializer> = new Map();

export function deserialize(data: Buffer | null, format?: MessageFormat): SerializationdResult | null {
if (data === null || !format) {
return data;
}
if (format === "none") {
return '';
}
try {
const deserializer = getDeserializer(format);
if (!deserializer) {
throw new SerializationException(`Cannot find a deserializer for ${format} format.`);
}
return deserializer.deserialize(data);
}
catch (e) {
return e;
}
}

function getDeserializer(format: MessageFormat): Deserializer | undefined {
return deserializerRegistry.get(format);
}

class DoubleDeserializer implements Deserializer {

deserialize(data: Buffer | null): any {
if (data === null) {
return null;
}
if (data.length !== 8) {
throw new SerializationException("Size of data received by DoubleDeserializer is not 8");
}
return data.readDoubleBE(0);
}
}

class FloatDeserializer implements Deserializer {

deserialize(data: Buffer | null): any {
if (data === null) {
return null;
}
if (data.length !== 4) {
throw new SerializationException("Size of data received by FloatDeserializer is not 4");
}
return data.readFloatBE(0);
}
}

class IntegerDeserializer implements Deserializer {

deserialize(data: Buffer | null): any {
if (data === null) {
return null;
}
if (data.length !== 4) {
throw new Error("Size of data received by IntegerDeserializer is not 4");
}
return data.readInt32BE(0);
}
}

class LongDeserializer implements Deserializer {

deserialize(data: Buffer | null): any {
if (data === null) {
return null;
}
if (data.length !== 8) {
throw new SerializationException("Size of data received by LongDeserializer is not 8");
}
return data.readBigInt64BE(0);
}
}

class ShortDeserializer implements Deserializer {

deserialize(data: Buffer | null): any {
if (data === null) {
return null;
}
if (data.length !== 2) {
throw new SerializationException("Size of data received by ShortDeserializer is not 2");
}
return data.readInt16BE(0);
}
}

class StringDeserializer implements Deserializer {

deserialize(data: Buffer | null): any {
if (data === null) {
return null;
}
return data.toString();
}
}

deserializerRegistry.set("double", new DoubleDeserializer());
deserializerRegistry.set("float", new FloatDeserializer());
deserializerRegistry.set("integer", new IntegerDeserializer());
deserializerRegistry.set("long", new LongDeserializer());
deserializerRegistry.set("short", new ShortDeserializer());
deserializerRegistry.set("string", new StringDeserializer());
17 changes: 16 additions & 1 deletion src/kafka-file/codeLensProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -220,6 +220,8 @@ export class KafkaFileCodeLensProvider implements vscode.CodeLensProvider, vscod
let topicId;
let partitions;
let offset = "";
let keyFormat;
let valueFormat;
for (let currentLine = range.start.line; currentLine <= range.end.line; currentLine++) {
const lineText = document.lineAt(currentLine).text;

Expand All @@ -242,14 +244,27 @@ export class KafkaFileCodeLensProvider implements vscode.CodeLensProvider, vscod
partitions = lineText.substr("partitions:".length).trim();
continue;
}

if (lineText.startsWith("key-format:")) {
keyFormat = lineText.substr("key-format:".length).trim();
continue;
}

if (lineText.startsWith("value-format:")) {
valueFormat = lineText.substr("value-format:".length).trim();
continue;
}

break;
}
return {
clusterId: selectedClusterId,
consumerGroupId,
topicId,
fromOffset: offset,
partitions
partitions,
messageKeyFormat: keyFormat,
messageValueFormat: valueFormat
} as LaunchConsumerCommand;
}
}
6 changes: 6 additions & 0 deletions src/providers/consumerVirtualTextDocumentProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,12 @@ export class ConsumerVirtualTextDocumentProvider implements vscode.TextDocumentC
if (consumer.options.partitions) {
line += ` - partitions: ${consumer.options.partitions}\n`;
}
if (consumer.options.messageKeyFormat) {
line += ` - key format: ${consumer.options.messageKeyFormat}\n`;
}
if (consumer.options.messageValueFormat) {
line += ` - value format: ${consumer.options.messageValueFormat}\n`;
}
line += `\n`;
this.updateBuffer(consumer.uri, line);
}
Expand Down
6 changes: 3 additions & 3 deletions src/test/suite/client/consumer.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,21 +39,21 @@ suite("Extract consumer URI Test Suite", () => {
test("Consumer URI simple", () => {
assert.deepStrictEqual(
extractConsumerInfoUri(vscode.Uri.parse(`kafka:cluster-id/group-id?topic=topic-id`)),
{ clusterId: 'cluster-id', consumerGroupId: 'group-id', topicId: 'topic-id', fromOffset: undefined, partitions: undefined }
{ clusterId: 'cluster-id', consumerGroupId: 'group-id', topicId: 'topic-id' }
);
});

test("Consumer URI with offset", () => {
assert.deepStrictEqual(
extractConsumerInfoUri(vscode.Uri.parse(`kafka:cluster-id/group-id?topic=topic-id&from=1`)),
{ clusterId: 'cluster-id', consumerGroupId: 'group-id', topicId: 'topic-id', fromOffset: '1', partitions: undefined }
{ clusterId: 'cluster-id', consumerGroupId: 'group-id', topicId: 'topic-id', fromOffset: '1' }
);
});

test("Consumer URI with partitions", () => {
assert.deepStrictEqual(
extractConsumerInfoUri(vscode.Uri.parse(`kafka:cluster-id/group-id?topic=topic-id&partitions=0-5`)),
{ clusterId: 'cluster-id', consumerGroupId: 'group-id', topicId: 'topic-id', fromOffset: undefined, partitions: '0-5' }
{ clusterId: 'cluster-id', consumerGroupId: 'group-id', topicId: 'topic-id', partitions: '0-5' }
);
});

Expand Down
Loading

0 comments on commit f576541

Please sign in to comment.