Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Completion support for kafka file #148

Merged
merged 1 commit into from
Apr 6, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ All notable changes to Kafka extension will be documented in this file.
- Extension API to contribute clusters. See [#123](https://github.com/jlandersen/vscode-kafka/issues/123).
- declare key/value formats for CONSUMER in kafka file. See [#112](https://github.com/jlandersen/vscode-kafka/issues/112).
- declare key/value formats for PRODUCER in kafka file. See [#113](https://github.com/jlandersen/vscode-kafka/issues/113).
- completion support for property names, values of CONSUMER and PRODUCER block. See [#146](https://github.com/jlandersen/vscode-kafka/issues/146).

### Changed
- Improved the "New topic" wizard: the replication factor is now read from the broker configuration. Input will be skipped if value can't be higher than 1. See [#64](https://github.com/jlandersen/vscode-kafka/issues/64).
Expand Down
4 changes: 2 additions & 2 deletions snippets/producers.json
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
"PRODUCER ${1:key-formatted-message}",
"topic: ${2:topic_name}",
"key: ${3:mykeyq}",
"key-format: ${3|none,double,float,integer,long,short|}",
"key-format: ${3|string,double,float,integer,long,short|}",
"${4:{{random.words}}}",
"",
"###",
Expand All @@ -56,7 +56,7 @@
"PRODUCER ${1:formatted-message}",
"topic: ${2:topic_name}",
"key: ${3:mykeyq}",
"value-format: ${3|none,double,float,integer,long,short|}",
"value-format: ${3|string,double,float,integer,long,short|}",
"${4:{{random.words}}}",
"",
"###",
Expand Down
31 changes: 27 additions & 4 deletions src/kafka-file/kafkaFileClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import { ClusterSettings } from "../settings/clusters";
import { getLanguageModelCache, LanguageModelCache } from './languageModelCache';
import { KafkaFileDocument } from "./languageservice/parser/kafkaFileParser";
import { ConsumerLaunchStateProvider, getLanguageService, LanguageService, ProducerLaunchStateProvider, SelectedClusterProvider } from "./languageservice/kafkaFileLanguageService";
import { runSafeAsync } from "./runner";
import { runSafeAsync } from "./utils/runner";

export function startLanguageClient(
clusterSettings: ClusterSettings,
Expand Down Expand Up @@ -41,7 +41,7 @@ export function startLanguageClient(
{ language: "kafka", scheme: "untitled" },
{ language: "kafka", scheme: "kafka" },
];

// Code Lenses
const codeLensProvider = new KafkaFileCodeLensProvider(kafkaFileDocuments, languageService);
context.subscriptions.push(
Expand All @@ -61,6 +61,10 @@ export function startLanguageClient(
codeLensProvider.refresh();
});

// Completion
context.subscriptions.push(
vscode.languages.registerCompletionItemProvider(documentSelector, new KafkaFileCompletionItemProvider(kafkaFileDocuments, languageService)));

return {
dispose() {
kafkaFileDocuments.dispose();
Expand Down Expand Up @@ -103,7 +107,7 @@ class AbstractKafkaFileFeature {
constructor(
private kafkaFileDocuments: LanguageModelCache<KafkaFileDocument>,
protected readonly languageService: LanguageService
) {}
) { }

getKafkaFileDocument(document: vscode.TextDocument): KafkaFileDocument {
return this.kafkaFileDocuments.get(document);
Expand Down Expand Up @@ -133,4 +137,23 @@ class KafkaFileCodeLensProvider extends AbstractKafkaFileFeature implements vsco
refresh() {
this._onDidChangeCodeLenses.fire();
}
}
}

class KafkaFileCompletionItemProvider extends AbstractKafkaFileFeature implements vscode.CompletionItemProvider {

constructor(
kafkaFileDocuments: LanguageModelCache<KafkaFileDocument>,
languageService: LanguageService
) {
super(kafkaFileDocuments, languageService);
}

provideCompletionItems(document: vscode.TextDocument, position: vscode.Position, token: vscode.CancellationToken, context: vscode.CompletionContext): vscode.ProviderResult<vscode.CompletionItem[] | vscode.CompletionList> {
return runSafeAsync(async () => {
const kafkaFileDocument = this.getKafkaFileDocument(document);
return this.languageService.doComplete(document, kafkaFileDocument, position);
}, new vscode.CompletionList(), `Error while computing code lenses for ${document.uri}`, token);
}

}

34 changes: 23 additions & 11 deletions src/kafka-file/languageservice/kafkaFileLanguageService.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import { CodeLens, TextDocument, Uri } from "vscode";
import { CodeLens, CompletionList, Position, TextDocument, Uri } from "vscode";
import { ConsumerLaunchState } from "../../client";
import { ProducerLaunchState } from "../../client/producer";
import { KafkaFileDocument, parseKafkaFile } from "./parser/kafkaFileParser";
import { KafkaFileDocumentCodeLenses } from "./services/codeLensProvider";
import { KafkaFileCodeLenses } from "./services/codeLensProvider";
import { KafkaFileCompletion } from "./services/completion";

/**
* Provider API which gets the state for a given producer.
Expand All @@ -27,41 +28,52 @@ export interface SelectedClusterProvider {

/**
* Kafka language service API.
*
*
*/
export interface LanguageService {
/**
* Parse the given text document and returns an AST.
*
*
* @param document the text document of a kafka file.
*
*
* @returns the parsed AST.
*/
parseKafkaFileDocument(document: TextDocument): KafkaFileDocument;

/**
* Returns the code lenses for the given text document and parsed AST.
*
*
* @param document the text document.
* @param kafkaFileDocument the parsed AST.
*
*
* @returns the code lenses.
*/
getCodeLenses(document: TextDocument, kafkaFileDocument: KafkaFileDocument): CodeLens[];

/**
* Returns the completion result for the given text document and parsed AST at given position.
*
* @param document the text document.
* @param kafkaFileDocument the parsed AST.
* @param position the position where the completion was triggered.
*/
doComplete(document: TextDocument, kafkaFileDocument: KafkaFileDocument, position: Position): CompletionList | undefined
}

/**
* Returns the Kafka file language service which manages codelens, completion, validation features for kafka file.
*
*
* @param producerLaunchStateProvider the provider which gets the state for a given producer.
* @param consumerLaunchStateProvider the provider which gets the state for a given consumer.
* @param selectedClusterProvider the provider which gets the selected cluster id and name.
* @param selectedClusterProvider the provider which gets the selected cluster id and name.
*/
export function getLanguageService(producerLaunchStateProvider: ProducerLaunchStateProvider, consumerLaunchStateProvider: ConsumerLaunchStateProvider, selectedClusterProvider: SelectedClusterProvider): LanguageService {

const kafkaFileDocumentCodeLenses = new KafkaFileDocumentCodeLenses(producerLaunchStateProvider, consumerLaunchStateProvider, selectedClusterProvider);
const kafkaFileCodeLenses = new KafkaFileCodeLenses(producerLaunchStateProvider, consumerLaunchStateProvider, selectedClusterProvider);
const kafkaFileCompletion = new KafkaFileCompletion();
return {
parseKafkaFileDocument: (document: TextDocument) => parseKafkaFile(document),
getCodeLenses: kafkaFileDocumentCodeLenses.getCodeLenses.bind(kafkaFileDocumentCodeLenses)
getCodeLenses: kafkaFileCodeLenses.getCodeLenses.bind(kafkaFileCodeLenses),
doComplete: kafkaFileCompletion.doComplete.bind(kafkaFileCompletion)
};
}
175 changes: 175 additions & 0 deletions src/kafka-file/languageservice/model.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,175 @@
export interface ModelDefinition {
name: string;
description: string;
enum?: ModelDefinition[];
}

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

would be preferable if the documentation linked to the embedded doc instead

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I tried, but it seems that it's not supported.

export const consumerProperties = [
{
name: "topic",
description: "The topic id *[required]*"
},
{
name: "from",
description: "The offset from which the consumer group will start consuming messages from. Possible values are: `earliest`, `latest`, or an integer value. *[optional]*.",
enum: [
{
name: "earliest"
},
{
name: "last"
},
{
name: "0"
}
]
},
{
name: "key-format",
description: "[Deserializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Consuming.md#Deserializer) to use for the key *[optional]*.",
enum: [
{
name: "none",
description: "No deserializer (ignores content)"
},
{
name: "string",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.StringDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/StringDeserializer.java) which currently only supports `UTF-8` encoding."
},
{
name: "double",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.DoubleDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/DoubleDeserializer.java)."
},
{
name: "float",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.FloatDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/FloatDeserializer.java)."
},
{
name: "integer",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.IntegerDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/IntegerDeserializer.java)."
},
{
name: "long",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.LongDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/LongDeserializer.java)."
},
{
name: "short",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.ShortDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/ShortDeserializer.java)."
}
]
},
{
name: "value-format",
description: "[Deserializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Consuming.md#Deserializer) to use for the value *[optional]*.",
enum: [
{
name: "none",
description: "No deserializer (ignores content)"
},
{
name: "string",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.StringDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/StringDeserializer.java) which currently only supports `UTF-8` encoding."
},
{
name: "double",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.DoubleDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/DoubleDeserializer.java)."
},
{
name: "float",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.FloatDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/FloatDeserializer.java)."
},
{
name: "integer",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.IntegerDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/IntegerDeserializer.java)."
},
{
name: "long",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.LongDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/LongDeserializer.java)."
},
{
name: "short",
description: "Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.ShortDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/ShortDeserializer.java)."
}
]
},
{
name: "partitions",
description: "the partition number(s), or a partitions range, or a combinaison of partitions ranges *[optional]*. eg:\n* 0\n* 0,1,2\n* 0-2\n* 0,2-3",
enum: [
{
name: "0"
}
]
}
] as ModelDefinition[];

export const producerProperties = [
{
name: "topic",
description: "The topic id *[required]*"
},
{
name: "key",
description: "The key *[optional]*."
},
{
name: "key-format",
description: "[Serializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Producing.md#Serializer) to use for the key *[optional]*.",
enum: [
{
name: "string",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.StringSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/StringSerializer.java) which currently only supports `UTF-8` encoding."
},
{
name: "double",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.DoubleSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/DoubleSerializer.java)."
},
{
name: "float",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.FloatSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/FloatSerializer.java)."
},
{
name: "integer",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.IntegerSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/IntegerSerializer.java)."
},
{
name: "long",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.LongSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/LongSerializer.java)."
},
{
name: "short",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.ShortSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/ShortSerializer.java)."
}
]
},
{
name: "value-format",
description: "[Serializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Producing.md#Serializer) to use for the value *[optional]*.",
enum: [
{
name: "string",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.StringSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/StringSerializer.java) which currently only supports `UTF-8` encoding."
},
{
name: "double",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.DoubleSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/DoubleSerializer.java)."
},
{
name: "float",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.FloatSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/FloatSerializer.java)."
},
{
name: "integer",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.IntegerSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/IntegerSerializer.java)."
},
{
name: "long",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.LongSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/LongSerializer.java)."
},
{
name: "short",
description: "Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.ShortSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/ShortSerializer.java)."
}
]
}
] as ModelDefinition[];
Loading