diff --git a/CHANGELOG.md b/CHANGELOG.md
index b043ebd6..d9e3cfcb 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -7,6 +7,7 @@ All notable changes to `Tools for Apache Kafka®` are documented in this file.
- Hide internal [strimzi](https://strimzi.io/) topics/consumers by default. See [#176](https://github.com/jlandersen/vscode-kafka/pull/176).
- Validation for available topics in `.kafka` files. See [#153](https://github.com/jlandersen/vscode-kafka/issues/153).
- Simplify snippets. See [#180](https://github.com/jlandersen/vscode-kafka/pull/180).
+- Hover support in `.kafka` files. See [#149](https://github.com/jlandersen/vscode-kafka/issues/149).
## [0.12.0] - 2021-04-26
### Added
diff --git a/docs/Consuming.md b/docs/Consuming.md
index 8be5cbc9..892da260 100644
--- a/docs/Consuming.md
+++ b/docs/Consuming.md
@@ -101,6 +101,13 @@ Existing topic validation is done only when cluster is `connected`. If the topic
![Existing topic validation](assets/kafka-file-consumer-topic-validation.png)
+#### Hover
+
+Hover for properties documentation and topic informations is available in .kafka files.
+
+Here is an example of hover on topic:
+
+![Existing topic validation](assets/kafka-file-consumer-topic-hover.png)
### Start Consumer command
diff --git a/docs/Producing.md b/docs/Producing.md
index 3634b8f6..a865224d 100644
--- a/docs/Producing.md
+++ b/docs/Producing.md
@@ -83,6 +83,14 @@ Validation will help you write valid producers in .kafka files.
![Existing topic validation](assets/kafka-file-producer-topic-validation.png)
+### Hover
+
+Hover for properties documentation and topic informations is available in .kafka files.
+
+Here is an example of hover on topic:
+
+![Existing topic validation](assets/kafka-file-producer-topic-hover.png)
+
## Randomized content
Record content can be randomized by injecting mustache-like placeholders of [faker.js properties](https://github.com/Marak/faker.js#api-methods), like ``{{name.lastName}}`` or ``{{random.number}}``. Some randomized properties can be localized via the `kafka.producers.fakerjs.locale` setting.
diff --git a/docs/assets/kafka-file-consumer-topic-hover.png b/docs/assets/kafka-file-consumer-topic-hover.png
new file mode 100644
index 00000000..c4cf4823
Binary files /dev/null and b/docs/assets/kafka-file-consumer-topic-hover.png differ
diff --git a/docs/assets/kafka-file-producer-topic-hover.png b/docs/assets/kafka-file-producer-topic-hover.png
new file mode 100644
index 00000000..d564dce6
Binary files /dev/null and b/docs/assets/kafka-file-producer-topic-hover.png differ
diff --git a/src/docs/markdownPreviewProvider.ts b/src/docs/markdownPreviewProvider.ts
index 71cc61df..523cacba 100644
--- a/src/docs/markdownPreviewProvider.ts
+++ b/src/docs/markdownPreviewProvider.ts
@@ -62,7 +62,7 @@ class MarkdownPreviewProvider implements Disposable {
return `${linkText}`;
});
body = await commands.executeCommand(MARKDOWN_API_RENDER, markdownString);
- if(body !== undefined) {
+ if (body !== undefined) {
this.documentCache.set(markdownFilePath, body);
}
}
@@ -120,3 +120,15 @@ class MarkdownPreviewProvider implements Disposable {
}
export const markdownPreviewProvider: MarkdownPreviewProvider = new MarkdownPreviewProvider();
+
+export type EmbeddedPage = "Consuming" | "Producing";
+
+type ConsumingSection = "deserializer" | "kafka-file";
+
+type ProducingSection = "serializer" | "kafka-file" | "randomized-content";
+
+export type EmbeddedSection = ConsumingSection | ProducingSection;
+
+export function getDocumentationPageUri(page: EmbeddedPage, section: EmbeddedSection) {
+ return `command:vscode-kafka.open.docs.page?%5B%7B%22page%22%3A%22${page}%22%2C%22section%22%3A%22${section}%22%7D%5D`;
+}
diff --git a/src/kafka-file/kafkaFileClient.ts b/src/kafka-file/kafkaFileClient.ts
index 890e2333..39852fc0 100644
--- a/src/kafka-file/kafkaFileClient.ts
+++ b/src/kafka-file/kafkaFileClient.ts
@@ -164,6 +164,12 @@ export function startLanguageClient(
const diagnostics = new KafkaFileDiagnostics(kafkaFileDocuments, languageService, clusterSettings, clientAccessor, modelProvider, workspaceSettings);
context.subscriptions.push(diagnostics);
+ // Hover
+ const hover = new KafkaFileHoverProvider(kafkaFileDocuments, languageService);
+ context.subscriptions.push(
+ vscode.languages.registerHoverProvider(documentSelector, hover)
+ );
+
// Open / Close document
context.subscriptions.push(vscode.workspace.onDidOpenTextDocument(e => {
if (e.languageId === 'kafka') {
@@ -277,7 +283,7 @@ class KafkaFileCompletionItemProvider extends AbstractKafkaFileFeature implement
return runSafeAsync(async () => {
const kafkaFileDocument = this.getKafkaFileDocument(document);
return this.languageService.doComplete(document, kafkaFileDocument, this.workspaceSettings.producerFakerJSEnabled, position);
- }, new vscode.CompletionList(), `Error while computing code lenses for ${document.uri}`, token);
+ }, new vscode.CompletionList(), `Error while computing completion for ${document.uri}`, token);
}
}
@@ -359,3 +365,13 @@ class KafkaFileDiagnostics extends AbstractKafkaFileFeature implements vscode.Di
this.diagnosticCollection.dispose();
}
}
+
+class KafkaFileHoverProvider extends AbstractKafkaFileFeature implements vscode.HoverProvider {
+ provideHover(document: vscode.TextDocument, position: vscode.Position, token: vscode.CancellationToken): vscode.ProviderResult {
+ return runSafeAsync(async () => {
+ const kafkaFileDocument = this.getKafkaFileDocument(document);
+ return this.languageService.doHover(document, kafkaFileDocument, position);
+ }, null, `Error while computing hover for ${document.uri}`, token);
+ }
+
+}
\ No newline at end of file
diff --git a/src/kafka-file/languageservice/kafkaFileLanguageService.ts b/src/kafka-file/languageservice/kafkaFileLanguageService.ts
index e88f5535..cf27cfb4 100644
--- a/src/kafka-file/languageservice/kafkaFileLanguageService.ts
+++ b/src/kafka-file/languageservice/kafkaFileLanguageService.ts
@@ -1,4 +1,4 @@
-import { CodeLens, CompletionList, Diagnostic, Position, TextDocument, Uri } from "vscode";
+import { CodeLens, CompletionList, Diagnostic, Hover, Position, TextDocument, Uri } from "vscode";
import { ClientState, ConsumerLaunchState } from "../../client";
import { BrokerConfigs } from "../../client/config";
import { ProducerLaunchState } from "../../client/producer";
@@ -6,6 +6,7 @@ import { KafkaFileDocument, parseKafkaFile } from "./parser/kafkaFileParser";
import { KafkaFileCodeLenses } from "./services/codeLensProvider";
import { KafkaFileCompletion } from "./services/completion";
import { KafkaFileDiagnostics } from "./services/diagnostics";
+import { KafkaFileHover } from "./services/hover";
/**
* Provider API which gets the state for a given producer.
@@ -49,6 +50,7 @@ export interface TopicProvider {
*
*/
export interface LanguageService {
+
/**
* Parse the given text document and returns an AST.
*
@@ -85,6 +87,15 @@ export interface LanguageService {
* @param kafkaFileDocument the parsed AST.
*/
doDiagnostics(document: TextDocument, kafkaFileDocument: KafkaFileDocument, producerFakerJSEnabled: boolean): Promise;
+
+ /**
+ * Returns the hover result for the given text document and parsed AST at given position.
+ *
+ * @param document the text document.
+ * @param kafkaFileDocument the parsed AST.
+ * @param position the position where the hover was triggered.
+ */
+ doHover(document: TextDocument, kafkaFileDocument: KafkaFileDocument, position: Position): Promise;
}
/**
@@ -100,10 +111,18 @@ export function getLanguageService(producerLaunchStateProvider: ProducerLaunchSt
const codeLenses = new KafkaFileCodeLenses(producerLaunchStateProvider, consumerLaunchStateProvider, selectedClusterProvider);
const completion = new KafkaFileCompletion(selectedClusterProvider, topicProvider);
const diagnostics = new KafkaFileDiagnostics(selectedClusterProvider, topicProvider);
+ const hover = new KafkaFileHover(selectedClusterProvider, topicProvider);
return {
parseKafkaFileDocument: (document: TextDocument) => parseKafkaFile(document),
getCodeLenses: codeLenses.getCodeLenses.bind(codeLenses),
doComplete: completion.doComplete.bind(completion),
- doDiagnostics: diagnostics.doDiagnostics.bind(diagnostics)
+ doDiagnostics: diagnostics.doDiagnostics.bind(diagnostics),
+ doHover: hover.doHover.bind(hover)
};
}
+
+export function createTopicDocumentation(topic: TopicDetail): string {
+ return `Topic \`${topic.id}\`\n` +
+ ` * partition count: \`${topic.partitionCount}\`\n` +
+ ` * replication factor: \`${topic.replicationFactor}\`\n`;
+}
\ No newline at end of file
diff --git a/src/kafka-file/languageservice/model.ts b/src/kafka-file/languageservice/model.ts
index 360fe7b1..caef9998 100644
--- a/src/kafka-file/languageservice/model.ts
+++ b/src/kafka-file/languageservice/model.ts
@@ -1,3 +1,5 @@
+import { getDocumentationPageUri } from "../../docs/markdownPreviewProvider";
+
export class Model {
private cache = new Map();
@@ -16,18 +18,22 @@ export class Model {
}
public hasDefinitionEnum(name: string, value: string): boolean {
+ return this.getDefinitionEnum(name, value) !== undefined;
+ }
+
+ public getDefinitionEnum(name: string, value: string): ModelDefinition | undefined {
const definition = this.getDefinition(name);
if (!definition) {
- return false;
+ return undefined;
}
if (definition.enum) {
for (const item of definition.enum) {
if (item.name === value) {
- return true;
+ return item;
}
}
}
- return false;
+ return undefined;
}
}
@@ -59,7 +65,7 @@ const consumerProperties = [
},
{
name: "key-format",
- description: "[Deserializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Consuming.md#Deserializer) to use for the key *[optional]*.",
+ description: `[Deserializer](${getDocumentationPageUri('Consuming', 'deserializer')}) to use for the key *[optional]*.`,
enum: [
{
name: "none",
@@ -93,7 +99,7 @@ const consumerProperties = [
},
{
name: "value-format",
- description: "[Deserializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Consuming.md#Deserializer) to use for the value *[optional]*.",
+ description: `[Deserializer](${getDocumentationPageUri('Consuming', 'deserializer')}) to use for the value *[optional]*.`,
enum: [
{
name: "none",
@@ -148,7 +154,7 @@ const producerProperties = [
},
{
name: "key-format",
- description: "[Serializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Producing.md#Serializer) to use for the key *[optional]*.",
+ description: `[Serializer](${getDocumentationPageUri('Producing', 'serializer')}) to use for the key *[optional]*.`,
enum: [
{
name: "string",
@@ -178,7 +184,7 @@ const producerProperties = [
},
{
name: "value-format",
- description: "[Serializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Producing.md#Serializer) to use for the value *[optional]*.",
+ description: `[Deserializer](${getDocumentationPageUri('Producing', 'serializer')}) to use for the value *[optional]*.`,
enum: [
{
name: "string",
@@ -381,18 +387,18 @@ const fakerjsAPI = [
] as ModelDefinition[];
export interface PartModelProvider {
- getPart(name: string) : PartModelProvider | undefined;
+ getPart(name: string): PartModelProvider | undefined;
}
-class PartModel implements PartModelProvider{
+class PartModel implements PartModelProvider {
private cache = new Map();
- getPart(name: string) : PartModelProvider | undefined {
+ getPart(name: string): PartModelProvider | undefined {
return this.cache.get(name);
}
- getOrCreate(name: string) : PartModelProvider {
+ getOrCreate(name: string): PartModelProvider {
let part = this.getPart(name);
if (!part) {
part = new PartModel();
@@ -411,12 +417,12 @@ class FakerJSModel extends Model implements PartModelProvider {
const parts = definition.name.split('.');
let partModel = this.root;
parts.forEach(part => {
- partModel = partModel.getOrCreate(part);
+ partModel = partModel.getOrCreate(part);
});
});
}
- getPart(name: string) : PartModelProvider | undefined {
+ getPart(name: string): PartModelProvider | undefined {
return this.root.getPart(name);
}
}
diff --git a/src/kafka-file/languageservice/parser/kafkaFileParser.ts b/src/kafka-file/languageservice/parser/kafkaFileParser.ts
index b759dab1..db3fc849 100644
--- a/src/kafka-file/languageservice/parser/kafkaFileParser.ts
+++ b/src/kafka-file/languageservice/parser/kafkaFileParser.ts
@@ -17,7 +17,9 @@ export enum NodeKind {
export interface Node {
start: Position;
end: Position;
+ range(): Range;
findNodeBefore(offset: Position): Node;
+ findNodeAt(offset: Position): Node;
lastChild: Node | undefined;
parent: Node | undefined;
kind: NodeKind;
@@ -32,10 +34,20 @@ class BaseNode implements Node {
}
+ public range(): Range {
+ const start = this.start;
+ const end = this.end;
+ return new Range(start, end);
+ }
+
public findNodeBefore(offset: Position): Node {
return this;
}
+ public findNodeAt(offset: Position): Node {
+ return this;
+ }
+
public get lastChild(): Node | undefined { return undefined; }
}
@@ -65,6 +77,17 @@ class ChildrenNode extends BaseNode {
return this;
}
+ public findNodeAt(offset: Position): Node {
+ const idx = findFirst(this.children, c => offset.isBeforeOrEqual(c.start)) - 1;
+ if (idx >= 0) {
+ const child = this.children[idx];
+ if (offset.isAfter(child.start) && offset.isBeforeOrEqual(child.end)) {
+ return child.findNodeAt(offset);
+ }
+ }
+ return this;
+ }
+
public get lastChild(): Node | undefined { return this.children.length ? this.children[this.children.length - 1] : void 0; };
}
@@ -84,6 +107,7 @@ export class Chunk extends BaseNode {
constructor(public readonly content: string, start: Position, end: Position, kind: NodeKind) {
super(start, end, kind);
}
+
}
export class Property extends BaseNode {
@@ -106,12 +130,6 @@ export class Property extends BaseNode {
return this.value?.content.trim();
}
- public get propertyRange(): Range {
- const start = this.start;
- const end = this.end;
- return new Range(start, end);
- }
-
public get propertyKeyRange(): Range {
const start = this.start;
const end = this.assignerCharacter ? new Position(this.start.line, this.assignerCharacter) : this.end;
@@ -159,6 +177,13 @@ export class Property extends BaseNode {
}
return true;
}
+
+ findNodeAt(position : Position) : Node {
+ if (this.isBeforeAssigner(position)) {
+ return this.key?.findNodeAt(position) || this;
+ }
+ return this.value?.findNodeAt(position) || this;
+ }
}
export abstract class Block extends ChildrenNode {
@@ -175,7 +200,7 @@ export abstract class Block extends ChildrenNode {
getPropertyValue(name: string): string | undefined {
const property = this.getProperty(name);
- return property?.value?.content;
+ return property?.propertyValue;
}
getProperty(name: string): Property | undefined {
diff --git a/src/kafka-file/languageservice/services/completion.ts b/src/kafka-file/languageservice/services/completion.ts
index 2cd2a90b..36aec62a 100644
--- a/src/kafka-file/languageservice/services/completion.ts
+++ b/src/kafka-file/languageservice/services/completion.ts
@@ -1,5 +1,5 @@
import { TextDocument, Position, CompletionList, CompletionItem, SnippetString, MarkdownString, CompletionItemKind, Range } from "vscode";
-import { SelectedClusterProvider, TopicDetail, TopicProvider } from "../kafkaFileLanguageService";
+import { createTopicDocumentation, SelectedClusterProvider, TopicProvider } from "../kafkaFileLanguageService";
import { consumerModel, fakerjsAPIModel, Model, ModelDefinition, producerModel } from "../model";
import { Block, BlockType, Chunk, ConsumerBlock, KafkaFileDocument, MustacheExpression, NodeKind, ProducerBlock, Property } from "../parser/kafkaFileParser";
@@ -134,7 +134,7 @@ export class KafkaFileCompletion {
const item = new CompletionItem(currentName);
item.kind = CompletionItemKind.Property;
if (definition.description) {
- item.documentation = new MarkdownString(definition.description);
+ item.documentation = createMarkdownString(definition.description);
}
const insertText = new SnippetString(`${currentName}: `);
const values = await this.getValues(definition);
@@ -198,7 +198,7 @@ export class KafkaFileCompletion {
const item = new CompletionItem(value);
item.kind = CompletionItemKind.Value;
if (definition.description) {
- item.documentation = new MarkdownString(definition.description);
+ item.documentation = createMarkdownString(definition.description);
}
const insertText = new SnippetString(' ');
insertText.appendText(value);
@@ -219,7 +219,7 @@ export class KafkaFileCompletion {
const item = new CompletionItem(value);
item.kind = CompletionItemKind.Variable;
if (definition.description) {
- item.documentation = new MarkdownString(definition.description);
+ item.documentation = createMarkdownString(definition.description);
}
const insertText = new SnippetString('');
insertText.appendText(value);
@@ -235,11 +235,6 @@ export class KafkaFileCompletion {
return;
}
- function createDocumentation(topic: TopicDetail): string {
- return `Topic \`${topic.id}\`\n` +
- ` * partition count: \`${topic.partitionCount}\`\n` +
- ` * replication factor: \`${topic.replicationFactor}\`\n`;
- }
const valueRange = property.propertyValueRange;
try {
const topics = await this.topicProvider.getTopics(clusterId);
@@ -247,7 +242,7 @@ export class KafkaFileCompletion {
const value = topic.id;
const item = new CompletionItem(value);
item.kind = CompletionItemKind.Value;
- item.documentation = new MarkdownString(createDocumentation(topic));
+ item.documentation = new MarkdownString(createTopicDocumentation(topic));
const insertText = new SnippetString(' ');
insertText.appendText(value);
item.insertText = insertText;
@@ -281,3 +276,9 @@ export class KafkaFileCompletion {
}
}
}
+
+function createMarkdownString(contents : string) {
+ const doc = new MarkdownString(contents);
+ doc.isTrusted = true;
+ return doc;
+}
\ No newline at end of file
diff --git a/src/kafka-file/languageservice/services/diagnostics.ts b/src/kafka-file/languageservice/services/diagnostics.ts
index d05a4135..94e07152 100644
--- a/src/kafka-file/languageservice/services/diagnostics.ts
+++ b/src/kafka-file/languageservice/services/diagnostics.ts
@@ -208,14 +208,14 @@ export class KafkaFileDiagnostics {
const assigner = property.assignerCharacter;
if (!assigner) {
// Error => topic
- const range = property.propertyRange;
+ const range = property.range();
diagnostics.push(new Diagnostic(range, `Missing ':' sign after '${propertyName}'`, DiagnosticSeverity.Error));
return;
}
// 1.2. property must declare a key
if (!propertyName) {
// Error => :string
- const range = property.propertyRange;
+ const range = property.range();
diagnostics.push(new Diagnostic(range, "Property must define a name before ':' sign", DiagnosticSeverity.Error));
return;
}
@@ -280,7 +280,7 @@ export class KafkaFileDiagnostics {
// The topic validation is done, only when the cluster is connected
if (!await this.topicProvider.getTopic(clusterId, topicId)) {
// The topic doesn't exist, report an error
- const range = topicProperty.propertyTrimmedValueRange || topicProperty.propertyRange;
+ const range = topicProperty.propertyTrimmedValueRange || topicProperty.range();
const autoCreate = await this.topicProvider.getAutoCreateTopicEnabled(clusterId);
const errorMessage = getTopicErrorMessage(topicId, autoCreate, blockType);
const severity = getTopicErrorSeverity(autoCreate);
diff --git a/src/kafka-file/languageservice/services/hover.ts b/src/kafka-file/languageservice/services/hover.ts
new file mode 100644
index 00000000..e30c011a
--- /dev/null
+++ b/src/kafka-file/languageservice/services/hover.ts
@@ -0,0 +1,170 @@
+import { Hover, MarkdownString, Position, Range, TextDocument } from "vscode";
+import { getDocumentationPageUri } from "../../../docs/markdownPreviewProvider";
+import { createTopicDocumentation, SelectedClusterProvider, TopicProvider } from "../kafkaFileLanguageService";
+import { consumerModel, Model, producerModel } from "../model";
+import { Block, BlockType, Chunk, ConsumerBlock, KafkaFileDocument, MustacheExpression, NodeKind, ProducerBlock, Property } from "../parser/kafkaFileParser";
+
+export class KafkaFileHover {
+
+ constructor(private selectedClusterProvider: SelectedClusterProvider, private topicProvider: TopicProvider) {
+
+ }
+
+ async doHover(document: TextDocument, kafkaFileDocument: KafkaFileDocument, position: Position): Promise {
+ // Get the AST node before the position where complation was triggered
+ const node = kafkaFileDocument.findNodeAt(position);
+ if (!node) {
+ return;
+ }
+ switch (node.kind) {
+
+ case NodeKind.consumerBlock: {
+ const block = node;
+ const topic = block.getPropertyValue('topic');
+ return createHover(`Consumer declaration${topic ? ` for topic \`${topic}\`` : ''}.\n\nSee [here](${getDocumentationPageUri('Consuming', 'kafka-file')}) for more informations.`, node.range());
+ }
+
+ case NodeKind.producerBlock: {
+ const block = node;
+ const topic = block.getPropertyValue('topic');
+ return createHover(`Producer declaration${topic ? ` for topic \`${topic}\`` : ''}.\n\nSee [here](${getDocumentationPageUri('Producing', 'kafka-file')}) for more informations.`, node.range());
+ }
+
+ case NodeKind.propertyKey: {
+ const propertyKey = node;
+ const property = propertyKey.parent;
+ const propertyName = propertyKey.content;
+ const propertyKeyRange = propertyKey.range();
+ const block = property.parent;
+ if (block.type === BlockType.consumer) {
+ // CONSUMER
+ // key|:
+
+ // or
+
+ // CONSUMER
+ // key|
+ return await this.getHoverForConsumerPropertyNames(propertyName, propertyKeyRange, block);
+ } else {
+ // PRODUCER
+ // key|:
+ return await this.getHoverForProducerPropertyNames(propertyName, propertyKeyRange, block);
+ }
+ }
+
+ case NodeKind.propertyValue: {
+ const propertyValue = node;
+ const property = propertyValue.parent;
+ const block = property.parent;
+ if (block.type === BlockType.consumer) {
+ // CONSUMER
+ // key-format: |
+ return await this.getHoverForConsumerPropertyValues(propertyValue, property, block);
+ } else {
+ // PRODUCER
+ // key-format: |
+ return await this.getHoverForProducerPropertyValues(propertyValue, property, block);
+ }
+ }
+
+ case NodeKind.mustacheExpression: {
+ const expression = node;
+ return createHover(`FakerJS expression.\n\nSee [here](${getDocumentationPageUri('Producing', 'randomized-content')}) for more informations.`, expression.enclosedExpressionRange);
+ }
+
+ case NodeKind.producerValue: {
+ return createHover(`Producer value.\n\nSee [here](${getDocumentationPageUri('Producing', 'kafka-file')}) for more informations.`, node.range());
+ }
+ }
+ }
+
+ async getHoverForConsumerPropertyNames(propertyName: string, propertyKeyRange: Range, block: ConsumerBlock): Promise {
+ return await this.getHoverForPropertyNames(propertyName, propertyKeyRange, block, consumerModel);
+ }
+
+ async getHoverForProducerPropertyNames(propertyName: string, propertyKeyRange: Range, block: ProducerBlock): Promise {
+ return await this.getHoverForPropertyNames(propertyName, propertyKeyRange, block, producerModel);
+ }
+
+ async getHoverForPropertyNames(propertyName: string, propertyKeyRange: Range, block: Block, metadata: Model): Promise {
+ const definition = metadata.getDefinition(propertyName);
+ if (definition && definition.description) {
+ return createHover(definition.description, propertyKeyRange);
+ }
+ }
+
+ async getHoverForConsumerPropertyValues(propertyValue: Chunk, property: Property, block: ConsumerBlock): Promise {
+ const propertyName = property.propertyName;
+ switch (propertyName) {
+ case 'topic':
+ // CONSUMER
+ // topic: |
+ return await this.getHoverForTopic(property);
+ default:
+ // CONSUMER
+ // key-format: |
+ return await this.getHoverForPropertyValues(propertyValue, property, block, consumerModel);
+ }
+ }
+
+
+ async getHoverForProducerPropertyValues(propertyValue: Chunk, property: Property, block: ProducerBlock): Promise {
+ const propertyName = property.propertyName;
+ switch (propertyName) {
+ case 'topic':
+ // PRODUCER
+ // topic: |
+ return await this.getHoverForTopic(property);
+ default:
+ // PRODUCER
+ // key-format: |
+ return await this.getHoverForPropertyValues(propertyValue, property, block, producerModel);
+ }
+ }
+
+ async getHoverForTopic(property: Property): Promise {
+ const propertyValue = property.value;
+ if (!propertyValue) {
+ return;
+ }
+ const { clusterId } = this.selectedClusterProvider.getSelectedCluster();
+ if (!clusterId) {
+ return;
+ }
+
+ try {
+ const topicId = propertyValue.content.trim();
+ const topics = await this.topicProvider.getTopics(clusterId);
+ if (topics.length > 0) {
+ const topic = topics
+ .find(t => t.id === topicId);
+ if (topic) {
+ return new Hover(createTopicDocumentation(topic), propertyValue.range());
+ }
+ }
+ }
+ catch (e) {
+ return;
+ }
+
+ return undefined;
+ }
+
+ async getHoverForPropertyValues(propertyValue: Chunk, property: Property, block: Block, metadata: Model): Promise {
+ const propertyName = property.propertyName;
+ if (!propertyName) {
+ return;
+ }
+ const definition = metadata.getDefinitionEnum(propertyName, propertyValue.content.trim());
+ if (definition && definition.description) {
+ return createHover(definition.description, property.propertyTrimmedValueRange);
+ }
+ return undefined;
+ }
+}
+
+function createHover(contents: string, range?: Range): Hover {
+ const doc = new MarkdownString(contents);
+ doc.isTrusted = true;
+ return new Hover(doc, range);
+}
\ No newline at end of file
diff --git a/src/test/suite/kafka-file/languageservice/hover.test.ts b/src/test/suite/kafka-file/languageservice/hover.test.ts
new file mode 100644
index 00000000..3eff2be8
--- /dev/null
+++ b/src/test/suite/kafka-file/languageservice/hover.test.ts
@@ -0,0 +1,310 @@
+import { ClientState } from "../../../../client";
+import { getLanguageService } from "../../../../kafka-file/languageservice/kafkaFileLanguageService";
+import { assertHover, hover, LanguageServiceConfig, position } from "./kafkaAssert";
+
+suite("Kafka File Hover Test Suite", () => {
+
+ test("Empty hover", async () => {
+ await assertHover('');
+
+ await assertHover('ab|cd');
+
+ });
+
+});
+
+suite("Kafka File CONSUMER Hover Test Suite", () => {
+
+ test("CONSUMER declaration no topic Hover", async () => {
+
+ await assertHover(
+ 'CONS|UMER\n',
+ hover(
+ `Consumer declaration.\n\nSee [here](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Consuming.md#kafka-file) for more informations.`,
+ position(0, 0),
+ position(1, 0)
+ )
+ );
+
+ });
+
+ test("CONSUMER declaration with topic Hover", async () => {
+
+ await assertHover(
+ 'CONS|UMER\n' +
+ 'topic: abcd',
+ hover(
+ `Consumer declaration for the topic \`abcd\`.\n\n\See [here](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Consuming.md#kafka-file) for more informations.`,
+ position(0, 0),
+ position(1, 11)
+ )
+ );
+
+ });
+
+ test("topic property name Hover", async () => {
+
+ await assertHover(
+ 'CONSUMER\n' +
+ 'top|ic: abcd',
+ hover(
+ `The topic id *[required]*`,
+ position(1, 0),
+ position(1, 5)
+ )
+ );
+
+ });
+
+ test("topic property value Hover", async () => {
+
+ await assertHover(
+ 'CONSUMER\n' +
+ 'topic: ab|cd'
+ );
+
+ const languageServiceConfig = new LanguageServiceConfig();
+ languageServiceConfig.setTopics('cluster1', [{ id: 'abcd', partitionCount: 1, replicationFactor: 1 }]);
+ const connectedCuster = { clusterId: 'cluster1', clusterName: 'CLUSTER_1', clusterState: ClientState.connected };
+ languageServiceConfig.setSelectedCluster(connectedCuster);
+ const languageService = getLanguageService(languageServiceConfig, languageServiceConfig, languageServiceConfig, languageServiceConfig);
+
+ await assertHover(
+ 'CONSUMER\n' +
+ 'topic: ab|cd',
+ hover(
+ 'Topic `abcd`\n * partition count: `1`\n * replication factor: `1`\n',
+ position(1, 6),
+ position(1, 11)
+ ),
+ languageService
+ );
+
+ });
+
+ test("from property name Hover", async () => {
+
+ await assertHover(
+ 'CONSUMER\n' +
+ 'fro|m: earliest',
+ hover(
+ 'The offset from which the consumer group will start consuming messages from. Possible values are: `earliest`, `latest`, or an integer value. *[optional]*.',
+ position(1, 0),
+ position(1, 4)
+ )
+ );
+
+ });
+
+ test("key-format property name Hover", async () => {
+
+ await assertHover(
+ 'CONSUMER\n' +
+ 'key-for|mat: string',
+ hover(
+ '[Deserializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Consuming.md#Deserializer) to use for the key *[optional]*.',
+ position(1, 0),
+ position(1, 10)
+ )
+ );
+
+ });
+
+ test("key-format property value Hover", async () => {
+
+ await assertHover(
+ 'CONSUMER\n' +
+ 'key-format: stri|ng',
+ hover(
+ 'Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.StringDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/StringDeserializer.java) which currently only supports `UTF-8` encoding.',
+ position(1, 12),
+ position(1, 18)
+ )
+ );
+
+ });
+
+ test("value-format property name Hover", async () => {
+
+ await assertHover(
+ 'CONSUMER\n' +
+ 'value-for|mat: string',
+ hover(
+ '[Deserializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Consuming.md#Deserializer) to use for the value *[optional]*.',
+ position(1, 0),
+ position(1, 12)
+ )
+ );
+
+ });
+
+ test("value-format property value Hover", async () => {
+
+ await assertHover(
+ 'CONSUMER\n' +
+ 'value-format: stri|ng',
+ hover(
+ 'Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.StringDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/StringDeserializer.java) which currently only supports `UTF-8` encoding.',
+ position(1, 14),
+ position(1, 20)
+ )
+ );
+
+ });
+
+ test("partitions property name Hover", async () => {
+
+ await assertHover(
+ 'CONSUMER\n' +
+ 'partition|s: 0',
+ hover(
+ 'the partition number(s), or a partitions range, or a combinaison of partitions ranges *[optional]*. eg:\n* 0\n* 0,1,2\n* 0-2\n* 0,2-3',
+ position(1, 0),
+ position(1, 10)
+ )
+ );
+
+ });
+
+});
+
+suite("Kafka File PRODUCER Hover Test Suite", () => {
+
+ test("PRODUCER declaration no topic Hover", async () => {
+
+ await assertHover(
+ 'PRODU|CER\n',
+ hover(
+ `Producer declaration.\n\nSee [here](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Producing.md#kafka-file) for more informations.`,
+ position(0, 0),
+ position(1, 0)
+ )
+ );
+
+ });
+
+ test("PRODUCER declaration with topic Hover", async () => {
+
+ await assertHover(
+ 'PRODU|CER\n' +
+ 'topic: abcd',
+ hover(
+ `Producer declaration for the topic \`abcd\`.\n\n\See [here](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Producing.md#kafka-file) for more informations.`,
+ position(0, 0),
+ position(1, 11)
+ )
+ );
+
+ test("topic property name Hover", async () => {
+
+ await assertHover(
+ 'PRODUCER\n' +
+ 'top|ic: abcd',
+ hover(
+ `The topic id *[required]*`,
+ position(1, 0),
+ position(1, 5)
+ )
+ );
+
+ });
+
+ test("topic property value Hover", async () => {
+
+ await assertHover(
+ 'PRODUCER\n' +
+ 'topic: ab|cd'
+ );
+
+ const languageServiceConfig = new LanguageServiceConfig();
+ languageServiceConfig.setTopics('cluster1', [{ id: 'abcd', partitionCount: 1, replicationFactor: 1 }]);
+ const connectedCuster = { clusterId: 'cluster1', clusterName: 'CLUSTER_1', clusterState: ClientState.connected };
+ languageServiceConfig.setSelectedCluster(connectedCuster);
+ const languageService = getLanguageService(languageServiceConfig, languageServiceConfig, languageServiceConfig, languageServiceConfig);
+
+ await assertHover(
+ 'PRODUCER\n' +
+ 'topic: ab|cd',
+ hover(
+ 'Topic `abcd`\n * partition count: `1`\n * replication factor: `1`\n',
+ position(1, 6),
+ position(1, 11)
+ ),
+ languageService
+ );
+
+ });
+
+ test("key property name Hover", async () => {
+
+ await assertHover(
+ 'PRODUCER\n' +
+ 'ke|y: abcd',
+ hover(
+ 'The key *[optional]*.',
+ position(1, 0),
+ position(1, 3)
+ )
+ );
+
+ });
+
+ test("key-format property name Hover", async () => {
+
+ await assertHover(
+ 'PRODUCER\n' +
+ 'key-for|mat: string',
+ hover(
+ '[Serializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Producing.md#Serializer) to use for the key *[optional]*.',
+ position(1, 0),
+ position(1, 10)
+ )
+ );
+
+ });
+
+ test("key-format property value Hover", async () => {
+
+ await assertHover(
+ 'PRODUCER\n' +
+ 'key-format: stri|ng',
+ hover(
+ 'Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.StringDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/StringSerializer.java) which currently only supports `UTF-8` encoding.',
+ position(1, 12),
+ position(1, 18)
+ )
+ );
+
+ });
+
+ test("value-format property name Hover", async () => {
+
+ await assertHover(
+ 'PRODUCER\n' +
+ 'value-for|mat: string',
+ hover(
+ '[Serializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Producing.md#Serializer) to use for the value *[optional]*.',
+ position(1, 0),
+ position(1, 12)
+ )
+ );
+
+ });
+
+ test("value-format property value Hover", async () => {
+
+ await assertHover(
+ 'PRODUCER\n' +
+ 'value-format: stri|ng',
+ hover(
+ 'Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.StringSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/Seserializer.java) which currently only supports `UTF-8` encoding.',
+ position(1, 14),
+ position(1, 20)
+ )
+ );
+
+ });
+
+ });
+
+});
\ No newline at end of file
diff --git a/src/test/suite/kafka-file/languageservice/kafkaAssert.ts b/src/test/suite/kafka-file/languageservice/kafkaAssert.ts
index 8d7f0c0d..2d970f60 100644
--- a/src/test/suite/kafka-file/languageservice/kafkaAssert.ts
+++ b/src/test/suite/kafka-file/languageservice/kafkaAssert.ts
@@ -1,5 +1,5 @@
import * as assert from "assert";
-import { CodeLens, Position, Range, Command, Uri, workspace, CompletionList, SnippetString, Diagnostic, DiagnosticSeverity } from "vscode";
+import { CodeLens, Position, Range, Command, Uri, workspace, CompletionList, SnippetString, Diagnostic, DiagnosticSeverity, Hover } from "vscode";
import { ClientState, ConsumerLaunchState } from "../../../../client";
import { BrokerConfigs } from "../../../../client/config";
import { ProducerLaunchState } from "../../../../client/producer";
@@ -12,7 +12,7 @@ export class LanguageServiceConfig implements ProducerLaunchStateProvider, Consu
private consumerLaunchStates = new Map();
- private selectedCluster: { clusterId?: string, clusterName?: string, clusterState? : ClientState } | undefined;
+ private selectedCluster: { clusterId?: string, clusterName?: string, clusterState?: ClientState } | undefined;
private topicsCache = new Map();
@@ -50,7 +50,7 @@ export class LanguageServiceConfig implements ProducerLaunchStateProvider, Consu
return {};
}
- public setSelectedCluster(selectedCluster: { clusterId?: string, clusterName?: string, clusterState? : ClientState }) {
+ public setSelectedCluster(selectedCluster: { clusterId?: string, clusterName?: string, clusterState?: ClientState }) {
this.selectedCluster = selectedCluster;
}
@@ -66,11 +66,11 @@ export class LanguageServiceConfig implements ProducerLaunchStateProvider, Consu
return topics.find(topic => topic.id === topicId);
}
-
- public setAutoCreateConfig(autoCreateConfig : BrokerConfigs.AutoCreateTopicResult) {
- this.autoCreateConfig= autoCreateConfig;
+
+ public setAutoCreateConfig(autoCreateConfig: BrokerConfigs.AutoCreateTopicResult) {
+ this.autoCreateConfig = autoCreateConfig;
}
-
+
async getAutoCreateTopicEnabled(clusterid: string): Promise {
return this.autoCreateConfig;
}
@@ -159,6 +159,24 @@ export async function assertDiagnostics(content: string, expected: Array