Skip to content

Commit

Permalink
Completion support for kafka file
Browse files Browse the repository at this point in the history
Fixes #146

Signed-off-by: azerr <[email protected]>
  • Loading branch information
angelozerr committed Apr 6, 2021
1 parent 0d5867e commit e192dfa
Show file tree
Hide file tree
Showing 12 changed files with 1,508 additions and 61 deletions.
4 changes: 2 additions & 2 deletions snippets/producers.json
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
"PRODUCER ${1:key-formatted-message}",
"topic: ${2:topic_name}",
"key: ${3:mykeyq}",
"key-format: ${3|none,double,float,integer,long,short|}",
"key-format: ${3|string,double,float,integer,long,short|}",
"${4:{{random.words}}}",
"",
"###",
Expand All @@ -56,7 +56,7 @@
"PRODUCER ${1:formatted-message}",
"topic: ${2:topic_name}",
"key: ${3:mykeyq}",
"value-format: ${3|none,double,float,integer,long,short|}",
"value-format: ${3|string,double,float,integer,long,short|}",
"${4:{{random.words}}}",
"",
"###",
Expand Down
31 changes: 27 additions & 4 deletions src/kafka-file/kafkaFileClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import { ClusterSettings } from "../settings/clusters";
import { getLanguageModelCache, LanguageModelCache } from './languageModelCache';
import { KafkaFileDocument } from "./languageservice/parser/kafkaFileParser";
import { ConsumerLaunchStateProvider, getLanguageService, LanguageService, ProducerLaunchStateProvider, SelectedClusterProvider } from "./languageservice/kafkaFileLanguageService";
import { runSafeAsync } from "./runner";
import { runSafeAsync } from "./utils/runner";

export function startLanguageClient(
clusterSettings: ClusterSettings,
Expand Down Expand Up @@ -41,7 +41,7 @@ export function startLanguageClient(
{ language: "kafka", scheme: "untitled" },
{ language: "kafka", scheme: "kafka" },
];

// Code Lenses
const codeLensProvider = new KafkaFileCodeLensProvider(kafkaFileDocuments, languageService);
context.subscriptions.push(
Expand All @@ -61,6 +61,10 @@ export function startLanguageClient(
codeLensProvider.refresh();
});

// Completion
context.subscriptions.push(
vscode.languages.registerCompletionItemProvider(documentSelector, new KafkaFileCompletionItemProvider(kafkaFileDocuments, languageService)));

return {
dispose() {
kafkaFileDocuments.dispose();
Expand Down Expand Up @@ -103,7 +107,7 @@ class AbstractKafkaFileFeature {
constructor(
private kafkaFileDocuments: LanguageModelCache<KafkaFileDocument>,
protected readonly languageService: LanguageService
) {}
) { }

getKafkaFileDocument(document: vscode.TextDocument): KafkaFileDocument {
return this.kafkaFileDocuments.get(document);
Expand Down Expand Up @@ -133,4 +137,23 @@ class KafkaFileCodeLensProvider extends AbstractKafkaFileFeature implements vsco
refresh() {
this._onDidChangeCodeLenses.fire();
}
}
}

class KafkaFileCompletionItemProvider extends AbstractKafkaFileFeature implements vscode.CompletionItemProvider {

constructor(
kafkaFileDocuments: LanguageModelCache<KafkaFileDocument>,
languageService: LanguageService
) {
super(kafkaFileDocuments, languageService);
}

provideCompletionItems(document: vscode.TextDocument, position: vscode.Position, token: vscode.CancellationToken, context: vscode.CompletionContext): vscode.ProviderResult<vscode.CompletionItem[] | vscode.CompletionList> {
return runSafeAsync(async () => {
const kafkaFileDocument = this.getKafkaFileDocument(document);
return this.languageService.doComplete(document, kafkaFileDocument, position);
}, new vscode.CompletionList(), `Error while computing code lenses for ${document.uri}`, token);
}

}

27 changes: 16 additions & 11 deletions src/kafka-file/languageservice/kafkaFileLanguageService.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import { CodeLens, TextDocument, Uri } from "vscode";
import { CodeLens, CompletionList, Position, TextDocument, Uri } from "vscode";
import { ConsumerLaunchState } from "../../client";
import { ProducerLaunchState } from "../../client/producer";
import { KafkaFileDocument, parseKafkaFile } from "./parser/kafkaFileParser";
import { KafkaFileDocumentCodeLenses } from "./services/codeLensProvider";
import { KafkaFileCodeLenses } from "./services/codeLensProvider";
import { KafkaFileCompletion } from "./services/completion";

/**
* Provider API which gets the state for a given producer.
Expand All @@ -27,41 +28,45 @@ export interface SelectedClusterProvider {

/**
* Kafka language service API.
*
*
*/
export interface LanguageService {
/**
* Parse the given text document and returns an AST.
*
*
* @param document the text document of a kafka file.
*
*
* @returns the parsed AST.
*/
parseKafkaFileDocument(document: TextDocument): KafkaFileDocument;

/**
* Returns the code lenses for the given text document and parsed AST.
*
*
* @param document the text document.
* @param kafkaFileDocument the parsed AST.
*
*
* @returns the code lenses.
*/
getCodeLenses(document: TextDocument, kafkaFileDocument: KafkaFileDocument): CodeLens[];

doComplete(document: TextDocument, kafkaFileDocument: KafkaFileDocument, position: Position): CompletionList | undefined
}

/**
* Returns the Kafka file language service which manages codelens, completion, validation features for kafka file.
*
*
* @param producerLaunchStateProvider the provider which gets the state for a given producer.
* @param consumerLaunchStateProvider the provider which gets the state for a given consumer.
* @param selectedClusterProvider the provider which gets the selected cluster id and name.
* @param selectedClusterProvider the provider which gets the selected cluster id and name.
*/
export function getLanguageService(producerLaunchStateProvider: ProducerLaunchStateProvider, consumerLaunchStateProvider: ConsumerLaunchStateProvider, selectedClusterProvider: SelectedClusterProvider): LanguageService {

const kafkaFileDocumentCodeLenses = new KafkaFileDocumentCodeLenses(producerLaunchStateProvider, consumerLaunchStateProvider, selectedClusterProvider);
const kafkaFileCodeLenses = new KafkaFileCodeLenses(producerLaunchStateProvider, consumerLaunchStateProvider, selectedClusterProvider);
const kafkaFileCompletion = new KafkaFileCompletion();
return {
parseKafkaFileDocument: (document: TextDocument) => parseKafkaFile(document),
getCodeLenses: kafkaFileDocumentCodeLenses.getCodeLenses.bind(kafkaFileDocumentCodeLenses)
getCodeLenses: kafkaFileCodeLenses.getCodeLenses.bind(kafkaFileCodeLenses),
doComplete: kafkaFileCompletion.doComplete.bind(kafkaFileCompletion)
};
}
175 changes: 175 additions & 0 deletions src/kafka-file/languageservice/model.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,175 @@
export interface ModelDefinition {
name: string;
description: string;
enum?: ModelDefinition[];
}

export const consumerProperties = [
{
name: "topic",
description: "The topic id *[required]*"
},
{
name: "from",
description: "The offset from which the consumer group will start consuming messages from. Possible values are: `earliest`, `latest`, or an integer value. *[optional]*.",
enum: [
{
name: "earliest"
},
{
name: "last"
},
{
name: "0"
}
]
},
{
name: "key-format",
description: "[Deserializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Consuming.md#Deserializer) to use for the key *[optional]*.",
enum: [
{
name: "none",
description: "No deserializer (ignores content)"
},
{
name: "string",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.StringDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/StringDeserializer.java) which currently only supports `UTF-8` encoding."
},
{
name: "double",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.DoubleDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/DoubleDeserializer.java)."
},
{
name: "float",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.FloatDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/FloatDeserializer.java)."
},
{
name: "integer",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.IntegerDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/IntegerDeserializer.java)."
},
{
name: "long",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.LongDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/LongDeserializer.java)."
},
{
name: "short",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.ShortDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/ShortDeserializer.java)."
}
]
},
{
name: "value-format",
description: "[Deserializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Consuming.md#Deserializer) to use for the value *[optional]*.",
enum: [
{
name: "none",
description: "No deserializer (ignores content)"
},
{
name: "string",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.StringDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/StringDeserializer.java) which currently only supports `UTF-8` encoding."
},
{
name: "double",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.DoubleDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/DoubleDeserializer.java)."
},
{
name: "float",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.FloatDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/FloatDeserializer.java)."
},
{
name: "integer",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.IntegerDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/IntegerDeserializer.java)."
},
{
name: "long",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.LongDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/LongDeserializer.java)."
},
{
name: "short",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.ShortDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/ShortDeserializer.java)."
}
]
},
{
name: "partitions",
description: "the partition number(s), or a partitions range, or a combinaison of partitions ranges *[optional]*. eg:\n* 0\n* 0,1,2\n* 0-2\n* 0,2-3",
enum: [
{
name: "0"
}
]
}
] as ModelDefinition[];

export const producerProperties = [
{
name: "topic",
description: "The topic id *[required]*"
},
{
name: "key",
description: "The key *[optional]*."
},
{
name: "key-format",
description: "[Serializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Producing.md#Serializer) to use for the key *[optional]*.",
enum: [
{
name: "string",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.StringSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/StringSerializer.java) which currently only supports `UTF-8` encoding."
},
{
name: "double",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.DoubleSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/DoubleSerializer.java)."
},
{
name: "float",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.FloatSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/FloatSerializer.java)."
},
{
name: "integer",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.IntegerSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/IntegerSerializer.java)."
},
{
name: "long",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.LongSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/LongSerializer.java)."
},
{
name: "short",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.ShortSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/ShortSerializer.java)."
}
]
},
{
name: "value-format",
description: "[Serializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Producing.md#Serializer) to use for the value *[optional]*.",
enum: [
{
name: "string",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.StringSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/StringSerializer.java) which currently only supports `UTF-8` encoding."
},
{
name: "double",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.DoubleSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/DoubleSerializer.java)."
},
{
name: "float",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.FloatSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/FloatSerializer.java)."
},
{
name: "integer",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.IntegerSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/IntegerSerializer.java)."
},
{
name: "long",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.LongSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/LongSerializer.java)."
},
{
name: "short",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.ShortSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/ShortSerializer.java)."
}
]
}
] as ModelDefinition[];
Loading

0 comments on commit e192dfa

Please sign in to comment.