Skip to content

Commit

Permalink
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Provide documentation on hover, in .kafka files
Browse files Browse the repository at this point in the history
Fixes jlandersen#149

Signed-off-by: azerr <azerr@redhat.com>
angelozerr committed May 3, 2021
1 parent 399d860 commit 1f832c7
Showing 15 changed files with 641 additions and 44 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -7,6 +7,7 @@ All notable changes to `Tools for Apache Kafka®` are documented in this file.
- Hide internal [strimzi](https://strimzi.io/) topics/consumers by default. See [#176](https://github.com/jlandersen/vscode-kafka/pull/176).
- Validation for available topics in `.kafka` files. See [#153](https://github.com/jlandersen/vscode-kafka/issues/153).
- Simplify snippets. See [#180](https://github.com/jlandersen/vscode-kafka/pull/180).
- Hover support in `.kafka` files. See [#149](https://github.com/jlandersen/vscode-kafka/issues/149).

## [0.12.0] - 2021-04-26
### Added
7 changes: 7 additions & 0 deletions docs/Consuming.md
Original file line number Diff line number Diff line change
@@ -101,6 +101,13 @@ Existing topic validation is done only when cluster is `connected`. If the topic

![Existing topic validation](assets/kafka-file-consumer-topic-validation.png)

#### Hover

Hover for properties documentation and topic informations is available in .kafka files.

Here is an example of hover on topic:

![Existing topic validation](assets/kafka-file-consumer-topic-hover.png)

### Start Consumer command

8 changes: 8 additions & 0 deletions docs/Producing.md
Original file line number Diff line number Diff line change
@@ -83,6 +83,14 @@ Validation will help you write valid producers in .kafka files.

![Existing topic validation](assets/kafka-file-producer-topic-validation.png)

### Hover

Hover for properties documentation and topic informations is available in .kafka files.

Here is an example of hover on topic:

![Existing topic validation](assets/kafka-file-producer-topic-hover.png)

## Randomized content

Record content can be randomized by injecting mustache-like placeholders of [faker.js properties](https://github.com/Marak/faker.js#api-methods), like ``{{name.lastName}}`` or ``{{random.number}}``. Some randomized properties can be localized via the `kafka.producers.fakerjs.locale` setting.
Binary file added docs/assets/kafka-file-consumer-topic-hover.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
14 changes: 13 additions & 1 deletion src/docs/markdownPreviewProvider.ts
Original file line number Diff line number Diff line change
@@ -62,7 +62,7 @@ class MarkdownPreviewProvider implements Disposable {
return `<a href="command:vscode-kafka.open.docs.page?%5B%7B%22page%22%3A%22${page}%22%2C%22section%22%3A%22${section}%22%7D%5D">${linkText}</a>`;
});
body = await commands.executeCommand(MARKDOWN_API_RENDER, markdownString);
if(body !== undefined) {
if (body !== undefined) {
this.documentCache.set(markdownFilePath, body);
}
}
@@ -120,3 +120,15 @@ class MarkdownPreviewProvider implements Disposable {
}

export const markdownPreviewProvider: MarkdownPreviewProvider = new MarkdownPreviewProvider();

export type EmbeddedPage = "Consuming" | "Producing";

type ConsumingSection = "deserializer" | "kafka-file";

type ProducingSection = "serializer" | "kafka-file" | "randomized-content";

export type EmbeddedSection = ConsumingSection | ProducingSection;

export function getDocumentationPageUri(page: EmbeddedPage, section: EmbeddedSection) {
return `command:vscode-kafka.open.docs.page?%5B%7B%22page%22%3A%22${page}%22%2C%22section%22%3A%22${section}%22%7D%5D`;
}
18 changes: 17 additions & 1 deletion src/kafka-file/kafkaFileClient.ts
Original file line number Diff line number Diff line change
@@ -164,6 +164,12 @@ export function startLanguageClient(
const diagnostics = new KafkaFileDiagnostics(kafkaFileDocuments, languageService, clusterSettings, clientAccessor, modelProvider, workspaceSettings);
context.subscriptions.push(diagnostics);

// Hover
const hover = new KafkaFileHoverProvider(kafkaFileDocuments, languageService);
context.subscriptions.push(
vscode.languages.registerHoverProvider(documentSelector, hover)
);

// Open / Close document
context.subscriptions.push(vscode.workspace.onDidOpenTextDocument(e => {
if (e.languageId === 'kafka') {
@@ -277,7 +283,7 @@ class KafkaFileCompletionItemProvider extends AbstractKafkaFileFeature implement
return runSafeAsync(async () => {
const kafkaFileDocument = this.getKafkaFileDocument(document);
return this.languageService.doComplete(document, kafkaFileDocument, this.workspaceSettings.producerFakerJSEnabled, position);
}, new vscode.CompletionList(), `Error while computing code lenses for ${document.uri}`, token);
}, new vscode.CompletionList(), `Error while computing completion for ${document.uri}`, token);
}

}
@@ -359,3 +365,13 @@ class KafkaFileDiagnostics extends AbstractKafkaFileFeature implements vscode.Di
this.diagnosticCollection.dispose();
}
}

class KafkaFileHoverProvider extends AbstractKafkaFileFeature implements vscode.HoverProvider {
provideHover(document: vscode.TextDocument, position: vscode.Position, token: vscode.CancellationToken): vscode.ProviderResult<vscode.Hover> {
return runSafeAsync(async () => {
const kafkaFileDocument = this.getKafkaFileDocument(document);
return this.languageService.doHover(document, kafkaFileDocument, position);
}, null, `Error while computing hover for ${document.uri}`, token);
}

}
23 changes: 21 additions & 2 deletions src/kafka-file/languageservice/kafkaFileLanguageService.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
import { CodeLens, CompletionList, Diagnostic, Position, TextDocument, Uri } from "vscode";
import { CodeLens, CompletionList, Diagnostic, Hover, Position, TextDocument, Uri } from "vscode";
import { ClientState, ConsumerLaunchState } from "../../client";
import { BrokerConfigs } from "../../client/config";
import { ProducerLaunchState } from "../../client/producer";
import { KafkaFileDocument, parseKafkaFile } from "./parser/kafkaFileParser";
import { KafkaFileCodeLenses } from "./services/codeLensProvider";
import { KafkaFileCompletion } from "./services/completion";
import { KafkaFileDiagnostics } from "./services/diagnostics";
import { KafkaFileHover } from "./services/hover";

/**
* Provider API which gets the state for a given producer.
@@ -49,6 +50,7 @@ export interface TopicProvider {
*
*/
export interface LanguageService {

/**
* Parse the given text document and returns an AST.
*
@@ -85,6 +87,15 @@ export interface LanguageService {
* @param kafkaFileDocument the parsed AST.
*/
doDiagnostics(document: TextDocument, kafkaFileDocument: KafkaFileDocument, producerFakerJSEnabled: boolean): Promise<Diagnostic[]>;

/**
* Returns the hover result for the given text document and parsed AST at given position.
*
* @param document the text document.
* @param kafkaFileDocument the parsed AST.
* @param position the position where the hover was triggered.
*/
doHover(document: TextDocument, kafkaFileDocument: KafkaFileDocument, position: Position): Promise<Hover | undefined>;
}

/**
@@ -100,10 +111,18 @@ export function getLanguageService(producerLaunchStateProvider: ProducerLaunchSt
const codeLenses = new KafkaFileCodeLenses(producerLaunchStateProvider, consumerLaunchStateProvider, selectedClusterProvider);
const completion = new KafkaFileCompletion(selectedClusterProvider, topicProvider);
const diagnostics = new KafkaFileDiagnostics(selectedClusterProvider, topicProvider);
const hover = new KafkaFileHover(selectedClusterProvider, topicProvider);
return {
parseKafkaFileDocument: (document: TextDocument) => parseKafkaFile(document),
getCodeLenses: codeLenses.getCodeLenses.bind(codeLenses),
doComplete: completion.doComplete.bind(completion),
doDiagnostics: diagnostics.doDiagnostics.bind(diagnostics)
doDiagnostics: diagnostics.doDiagnostics.bind(diagnostics),
doHover: hover.doHover.bind(hover)
};
}

export function createTopicDocumentation(topic: TopicDetail): string {
return `Topic \`${topic.id}\`\n` +
` * partition count: \`${topic.partitionCount}\`\n` +
` * replication factor: \`${topic.replicationFactor}\`\n`;
}
32 changes: 19 additions & 13 deletions src/kafka-file/languageservice/model.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import { getDocumentationPageUri } from "../../docs/markdownPreviewProvider";

export class Model {

private cache = new Map<string, ModelDefinition>();
@@ -16,18 +18,22 @@ export class Model {
}

public hasDefinitionEnum(name: string, value: string): boolean {
return this.getDefinitionEnum(name, value) !== undefined;
}

public getDefinitionEnum(name: string, value: string): ModelDefinition | undefined {
const definition = this.getDefinition(name);
if (!definition) {
return false;
return undefined;
}
if (definition.enum) {
for (const item of definition.enum) {
if (item.name === value) {
return true;
return item;
}
}
}
return false;
return undefined;
}
}

@@ -59,7 +65,7 @@ const consumerProperties = [
},
{
name: "key-format",
description: "[Deserializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Consuming.md#Deserializer) to use for the key *[optional]*.",
description: `[Deserializer](${getDocumentationPageUri('Consuming', 'deserializer')}) to use for the key *[optional]*.`,
enum: [
{
name: "none",
@@ -93,7 +99,7 @@ const consumerProperties = [
},
{
name: "value-format",
description: "[Deserializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Consuming.md#Deserializer) to use for the value *[optional]*.",
description: `[Deserializer](${getDocumentationPageUri('Consuming', 'deserializer')}) to use for the value *[optional]*.`,
enum: [
{
name: "none",
@@ -148,7 +154,7 @@ const producerProperties = [
},
{
name: "key-format",
description: "[Serializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Producing.md#Serializer) to use for the key *[optional]*.",
description: `[Serializer](${getDocumentationPageUri('Producing', 'serializer')}) to use for the key *[optional]*.`,
enum: [
{
name: "string",
@@ -178,7 +184,7 @@ const producerProperties = [
},
{
name: "value-format",
description: "[Serializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Producing.md#Serializer) to use for the value *[optional]*.",
description: `[Deserializer](${getDocumentationPageUri('Producing', 'serializer')}) to use for the value *[optional]*.`,
enum: [
{
name: "string",
@@ -381,18 +387,18 @@ const fakerjsAPI = [
] as ModelDefinition[];

export interface PartModelProvider {
getPart(name: string) : PartModelProvider | undefined;
getPart(name: string): PartModelProvider | undefined;
}

class PartModel implements PartModelProvider{
class PartModel implements PartModelProvider {

private cache = new Map<string, PartModelProvider>();

getPart(name: string) : PartModelProvider | undefined {
getPart(name: string): PartModelProvider | undefined {
return this.cache.get(name);
}

getOrCreate(name: string) : PartModelProvider {
getOrCreate(name: string): PartModelProvider {
let part = this.getPart(name);
if (!part) {
part = new PartModel();
@@ -411,12 +417,12 @@ class FakerJSModel extends Model implements PartModelProvider {
const parts = definition.name.split('.');
let partModel = this.root;
parts.forEach(part => {
partModel = <PartModel> partModel.getOrCreate(part);
partModel = <PartModel>partModel.getOrCreate(part);
});
});
}

getPart(name: string) : PartModelProvider | undefined {
getPart(name: string): PartModelProvider | undefined {
return this.root.getPart(name);
}
}
39 changes: 32 additions & 7 deletions src/kafka-file/languageservice/parser/kafkaFileParser.ts
Original file line number Diff line number Diff line change
@@ -17,7 +17,9 @@ export enum NodeKind {
export interface Node {
start: Position;
end: Position;
range(): Range;
findNodeBefore(offset: Position): Node;
findNodeAt(offset: Position): Node;
lastChild: Node | undefined;
parent: Node | undefined;
kind: NodeKind;
@@ -32,10 +34,20 @@ class BaseNode implements Node {

}

public range(): Range {
const start = this.start;
const end = this.end;
return new Range(start, end);
}

public findNodeBefore(offset: Position): Node {
return this;
}

public findNodeAt(offset: Position): Node {
return this;
}

public get lastChild(): Node | undefined { return undefined; }
}

@@ -65,6 +77,17 @@ class ChildrenNode<T extends Node> extends BaseNode {
return this;
}

public findNodeAt(offset: Position): Node {
const idx = findFirst(this.children, c => offset.isBeforeOrEqual(c.start)) - 1;
if (idx >= 0) {
const child = this.children[idx];
if (offset.isAfter(child.start) && offset.isBeforeOrEqual(child.end)) {
return child.findNodeAt(offset);
}
}
return this;
}

public get lastChild(): Node | undefined { return this.children.length ? this.children[this.children.length - 1] : void 0; };
}

@@ -84,6 +107,7 @@ export class Chunk extends BaseNode {
constructor(public readonly content: string, start: Position, end: Position, kind: NodeKind) {
super(start, end, kind);
}

}

export class Property extends BaseNode {
@@ -106,12 +130,6 @@ export class Property extends BaseNode {
return this.value?.content.trim();
}

public get propertyRange(): Range {
const start = this.start;
const end = this.end;
return new Range(start, end);
}

public get propertyKeyRange(): Range {
const start = this.start;
const end = this.assignerCharacter ? new Position(this.start.line, this.assignerCharacter) : this.end;
@@ -159,6 +177,13 @@ export class Property extends BaseNode {
}
return true;
}

findNodeAt(position : Position) : Node {
if (this.isBeforeAssigner(position)) {
return this.key?.findNodeAt(position) || this;
}
return this.value?.findNodeAt(position) || this;
}
}

export abstract class Block extends ChildrenNode<Property | Chunk> {
@@ -175,7 +200,7 @@ export abstract class Block extends ChildrenNode<Property | Chunk> {

getPropertyValue(name: string): string | undefined {
const property = this.getProperty(name);
return property?.value?.content;
return property?.propertyValue;
}

getProperty(name: string): Property | undefined {
21 changes: 11 additions & 10 deletions src/kafka-file/languageservice/services/completion.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { TextDocument, Position, CompletionList, CompletionItem, SnippetString, MarkdownString, CompletionItemKind, Range } from "vscode";
import { SelectedClusterProvider, TopicDetail, TopicProvider } from "../kafkaFileLanguageService";
import { createTopicDocumentation, SelectedClusterProvider, TopicProvider } from "../kafkaFileLanguageService";
import { consumerModel, fakerjsAPIModel, Model, ModelDefinition, producerModel } from "../model";
import { Block, BlockType, Chunk, ConsumerBlock, KafkaFileDocument, MustacheExpression, NodeKind, ProducerBlock, Property } from "../parser/kafkaFileParser";

@@ -134,7 +134,7 @@ export class KafkaFileCompletion {
const item = new CompletionItem(currentName);
item.kind = CompletionItemKind.Property;
if (definition.description) {
item.documentation = new MarkdownString(definition.description);
item.documentation = createMarkdownString(definition.description);
}
const insertText = new SnippetString(`${currentName}: `);
const values = await this.getValues(definition);
@@ -198,7 +198,7 @@ export class KafkaFileCompletion {
const item = new CompletionItem(value);
item.kind = CompletionItemKind.Value;
if (definition.description) {
item.documentation = new MarkdownString(definition.description);
item.documentation = createMarkdownString(definition.description);
}
const insertText = new SnippetString(' ');
insertText.appendText(value);
@@ -219,7 +219,7 @@ export class KafkaFileCompletion {
const item = new CompletionItem(value);
item.kind = CompletionItemKind.Variable;
if (definition.description) {
item.documentation = new MarkdownString(definition.description);
item.documentation = createMarkdownString(definition.description);
}
const insertText = new SnippetString('');
insertText.appendText(value);
@@ -235,19 +235,14 @@ export class KafkaFileCompletion {
return;
}

function createDocumentation(topic: TopicDetail): string {
return `Topic \`${topic.id}\`\n` +
` * partition count: \`${topic.partitionCount}\`\n` +
` * replication factor: \`${topic.replicationFactor}\`\n`;
}
const valueRange = property.propertyValueRange;
try {
const topics = await this.topicProvider.getTopics(clusterId);
topics.forEach((topic) => {
const value = topic.id;
const item = new CompletionItem(value);
item.kind = CompletionItemKind.Value;
item.documentation = new MarkdownString(createDocumentation(topic));
item.documentation = new MarkdownString(createTopicDocumentation(topic));
const insertText = new SnippetString(' ');
insertText.appendText(value);
item.insertText = insertText;
@@ -281,3 +276,9 @@ export class KafkaFileCompletion {
}
}
}

function createMarkdownString(contents : string) {
const doc = new MarkdownString(contents);
doc.isTrusted = true;
return doc;
}
6 changes: 3 additions & 3 deletions src/kafka-file/languageservice/services/diagnostics.ts
Original file line number Diff line number Diff line change
@@ -208,14 +208,14 @@ export class KafkaFileDiagnostics {
const assigner = property.assignerCharacter;
if (!assigner) {
// Error => topic
const range = property.propertyRange;
const range = property.range();
diagnostics.push(new Diagnostic(range, `Missing ':' sign after '${propertyName}'`, DiagnosticSeverity.Error));
return;
}
// 1.2. property must declare a key
if (!propertyName) {
// Error => :string
const range = property.propertyRange;
const range = property.range();
diagnostics.push(new Diagnostic(range, "Property must define a name before ':' sign", DiagnosticSeverity.Error));
return;
}
@@ -280,7 +280,7 @@ export class KafkaFileDiagnostics {
// The topic validation is done, only when the cluster is connected
if (!await this.topicProvider.getTopic(clusterId, topicId)) {
// The topic doesn't exist, report an error
const range = topicProperty.propertyTrimmedValueRange || topicProperty.propertyRange;
const range = topicProperty.propertyTrimmedValueRange || topicProperty.range();
const autoCreate = await this.topicProvider.getAutoCreateTopicEnabled(clusterId);
const errorMessage = getTopicErrorMessage(topicId, autoCreate, blockType);
const severity = getTopicErrorSeverity(autoCreate);
170 changes: 170 additions & 0 deletions src/kafka-file/languageservice/services/hover.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,170 @@
import { Hover, MarkdownString, Position, Range, TextDocument } from "vscode";
import { getDocumentationPageUri } from "../../../docs/markdownPreviewProvider";
import { createTopicDocumentation, SelectedClusterProvider, TopicProvider } from "../kafkaFileLanguageService";
import { consumerModel, Model, producerModel } from "../model";
import { Block, BlockType, Chunk, ConsumerBlock, KafkaFileDocument, MustacheExpression, NodeKind, ProducerBlock, Property } from "../parser/kafkaFileParser";

export class KafkaFileHover {

constructor(private selectedClusterProvider: SelectedClusterProvider, private topicProvider: TopicProvider) {

}

async doHover(document: TextDocument, kafkaFileDocument: KafkaFileDocument, position: Position): Promise<Hover | undefined> {
// Get the AST node before the position where complation was triggered
const node = kafkaFileDocument.findNodeAt(position);
if (!node) {
return;
}
switch (node.kind) {

case NodeKind.consumerBlock: {
const block = <Block>node;
const topic = block.getPropertyValue('topic');
return createHover(`Consumer declaration${topic ? ` for topic \`${topic}\`` : ''}.\n\nSee [here](${getDocumentationPageUri('Consuming', 'kafka-file')}) for more informations.`, node.range());
}

case NodeKind.producerBlock: {
const block = <Block>node;
const topic = block.getPropertyValue('topic');
return createHover(`Producer declaration${topic ? ` for topic \`${topic}\`` : ''}.\n\nSee [here](${getDocumentationPageUri('Producing', 'kafka-file')}) for more informations.`, node.range());
}

case NodeKind.propertyKey: {
const propertyKey = <Chunk>node;
const property = <Property>propertyKey.parent;
const propertyName = propertyKey.content;
const propertyKeyRange = propertyKey.range();
const block = <Block>property.parent;
if (block.type === BlockType.consumer) {
// CONSUMER
// key|:

// or

// CONSUMER
// key|
return await this.getHoverForConsumerPropertyNames(propertyName, propertyKeyRange, <ConsumerBlock>block);
} else {
// PRODUCER
// key|:
return await this.getHoverForProducerPropertyNames(propertyName, propertyKeyRange, <ProducerBlock>block);
}
}

case NodeKind.propertyValue: {
const propertyValue = <Chunk>node;
const property = <Property>propertyValue.parent;
const block = <Block>property.parent;
if (block.type === BlockType.consumer) {
// CONSUMER
// key-format: |
return await this.getHoverForConsumerPropertyValues(propertyValue, property, <ConsumerBlock>block);
} else {
// PRODUCER
// key-format: |
return await this.getHoverForProducerPropertyValues(propertyValue, property, <ProducerBlock>block);
}
}

case NodeKind.mustacheExpression: {
const expression = <MustacheExpression>node;
return createHover(`FakerJS expression.\n\nSee [here](${getDocumentationPageUri('Producing', 'randomized-content')}) for more informations.`, expression.enclosedExpressionRange);
}

case NodeKind.producerValue: {
return createHover(`Producer value.\n\nSee [here](${getDocumentationPageUri('Producing', 'kafka-file')}) for more informations.`, node.range());
}
}
}

async getHoverForConsumerPropertyNames(propertyName: string, propertyKeyRange: Range, block: ConsumerBlock): Promise<Hover | undefined> {
return await this.getHoverForPropertyNames(propertyName, propertyKeyRange, block, consumerModel);
}

async getHoverForProducerPropertyNames(propertyName: string, propertyKeyRange: Range, block: ProducerBlock): Promise<Hover | undefined> {
return await this.getHoverForPropertyNames(propertyName, propertyKeyRange, block, producerModel);
}

async getHoverForPropertyNames(propertyName: string, propertyKeyRange: Range, block: Block, metadata: Model): Promise<Hover | undefined> {
const definition = metadata.getDefinition(propertyName);
if (definition && definition.description) {
return createHover(definition.description, propertyKeyRange);
}
}

async getHoverForConsumerPropertyValues(propertyValue: Chunk, property: Property, block: ConsumerBlock): Promise<Hover | undefined> {
const propertyName = property.propertyName;
switch (propertyName) {
case 'topic':
// CONSUMER
// topic: |
return await this.getHoverForTopic(property);
default:
// CONSUMER
// key-format: |
return await this.getHoverForPropertyValues(propertyValue, property, block, consumerModel);
}
}


async getHoverForProducerPropertyValues(propertyValue: Chunk, property: Property, block: ProducerBlock): Promise<Hover | undefined> {
const propertyName = property.propertyName;
switch (propertyName) {
case 'topic':
// PRODUCER
// topic: |
return await this.getHoverForTopic(property);
default:
// PRODUCER
// key-format: |
return await this.getHoverForPropertyValues(propertyValue, property, block, producerModel);
}
}

async getHoverForTopic(property: Property): Promise<Hover | undefined> {
const propertyValue = property.value;
if (!propertyValue) {
return;
}
const { clusterId } = this.selectedClusterProvider.getSelectedCluster();
if (!clusterId) {
return;
}

try {
const topicId = propertyValue.content.trim();
const topics = await this.topicProvider.getTopics(clusterId);
if (topics.length > 0) {
const topic = topics
.find(t => t.id === topicId);
if (topic) {
return createHover(createTopicDocumentation(topic), propertyValue.range());
}
}
}
catch (e) {
return;
}

return undefined;
}

async getHoverForPropertyValues(propertyValue: Chunk, property: Property, block: Block, metadata: Model): Promise<Hover | undefined> {
const propertyName = property.propertyName;
if (!propertyName) {
return;
}
const definition = metadata.getDefinitionEnum(propertyName, propertyValue.content.trim());
if (definition && definition.description) {
return createHover(definition.description, property.propertyTrimmedValueRange);
}
return undefined;
}
}

function createHover(contents: string, range?: Range): Hover {
const doc = new MarkdownString(contents);
doc.isTrusted = true;
return new Hover(doc, range);
}
309 changes: 309 additions & 0 deletions src/test/suite/kafka-file/languageservice/hover.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,309 @@
import { ClientState } from "../../../../client";
import { getLanguageService } from "../../../../kafka-file/languageservice/kafkaFileLanguageService";
import { assertHover, hover, LanguageServiceConfig, position } from "./kafkaAssert";

suite("Kafka File Hover Test Suite", () => {

test("Empty hover", async () => {
await assertHover('');

await assertHover('ab|cd');

});

});

suite("Kafka File CONSUMER Hover Test Suite", () => {

test("CONSUMER declaration no topic Hover", async () => {

await assertHover(
'CONS|UMER\n',
hover(
`Consumer declaration.\n\nSee [here](command:vscode-kafka.open.docs.page?%5B%7B%22page%22%3A%22Consuming%22%2C%22section%22%3A%22kafka-file%22%7D%5D) for more informations.`,
position(0, 0),
position(1, 0)
)
);

});

test("CONSUMER declaration with topic Hover", async () => {

await assertHover(
'CONS|UMER\n' +
'topic: abcd',
hover(
`Consumer declaration for topic \`abcd\`.\n\n\See [here](command:vscode-kafka.open.docs.page?%5B%7B%22page%22%3A%22Consuming%22%2C%22section%22%3A%22kafka-file%22%7D%5D) for more informations.`,
position(0, 0),
position(1, 11)
)
);

});

test("topic property name Hover", async () => {

await assertHover(
'CONSUMER\n' +
'top|ic: abcd',
hover(
`The topic id *[required]*`,
position(1, 0),
position(1, 5)
)
);

});

test("topic property value Hover", async () => {

await assertHover(
'CONSUMER\n' +
'topic: ab|cd'
);

const languageServiceConfig = new LanguageServiceConfig();
languageServiceConfig.setTopics('cluster1', [{ id: 'abcd', partitionCount: 1, replicationFactor: 1 }]);
const connectedCuster = { clusterId: 'cluster1', clusterName: 'CLUSTER_1', clusterState: ClientState.connected };
languageServiceConfig.setSelectedCluster(connectedCuster);
const languageService = getLanguageService(languageServiceConfig, languageServiceConfig, languageServiceConfig, languageServiceConfig);

await assertHover(
'CONSUMER\n' +
'topic: ab|cd',
hover(
'Topic `abcd`\n * partition count: `1`\n * replication factor: `1`\n',
position(1, 6),
position(1, 11)
),
languageService
);

});

test("from property name Hover", async () => {

await assertHover(
'CONSUMER\n' +
'fro|m: earliest',
hover(
'The offset from which the consumer group will start consuming messages from. Possible values are: `earliest`, `latest`, or an integer value. *[optional]*.',
position(1, 0),
position(1, 4)
)
);

});

test("key-format property name Hover", async () => {

await assertHover(
'CONSUMER\n' +
'key-for|mat: string',
hover(
'[Deserializer](command:vscode-kafka.open.docs.page?%5B%7B%22page%22%3A%22Consuming%22%2C%22section%22%3A%22deserializer%22%7D%5D) to use for the key *[optional]*.',
position(1, 0),
position(1, 10)
)
);

});

test("key-format property value Hover", async () => {

await assertHover(
'CONSUMER\n' +
'key-format: stri|ng',
hover(
'Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.StringDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/StringDeserializer.java) which currently only supports `UTF-8` encoding.',
position(1, 12),
position(1, 18)
)
);

});

test("value-format property name Hover", async () => {

await assertHover(
'CONSUMER\n' +
'value-for|mat: string',
hover(
'[Deserializer](command:vscode-kafka.open.docs.page?%5B%7B%22page%22%3A%22Consuming%22%2C%22section%22%3A%22deserializer%22%7D%5D) to use for the value *[optional]*.',
position(1, 0),
position(1, 12)
)
);

});

test("value-format property value Hover", async () => {

await assertHover(
'CONSUMER\n' +
'value-format: stri|ng',
hover(
'Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.StringDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/StringDeserializer.java) which currently only supports `UTF-8` encoding.',
position(1, 14),
position(1, 20)
)
);

});

test("partitions property name Hover", async () => {

await assertHover(
'CONSUMER\n' +
'partition|s: 0',
hover(
'the partition number(s), or a partitions range, or a combinaison of partitions ranges *[optional]*. eg:\n* 0\n* 0,1,2\n* 0-2\n* 0,2-3',
position(1, 0),
position(1, 10)
)
);

});

});

suite("Kafka File PRODUCER Hover Test Suite", () => {

test("PRODUCER declaration no topic Hover", async () => {

await assertHover(
'PRODU|CER\n',
hover(
`Producer declaration.\n\nSee [here](command:vscode-kafka.open.docs.page?%5B%7B%22page%22%3A%22Producing%22%2C%22section%22%3A%22kafka-file%22%7D%5D) for more informations.`,
position(0, 0),
position(1, 0)
)
);

});

test("PRODUCER declaration with topic Hover", async () => {

await assertHover(
'PRODU|CER\n' +
'topic: abcd',
hover(
`Producer declaration for topic \`abcd\`.\n\n\See [here](command:vscode-kafka.open.docs.page?%5B%7B%22page%22%3A%22Producing%22%2C%22section%22%3A%22kafka-file%22%7D%5D) for more informations.`,
position(0, 0),
position(1, 11)
)
);
});

test("topic property name Hover", async () => {

await assertHover(
'PRODUCER\n' +
'top|ic: abcd',
hover(
`The topic id *[required]*`,
position(1, 0),
position(1, 5)
)
);

});

test("topic property value Hover", async () => {

await assertHover(
'PRODUCER\n' +
'topic: ab|cd'
);

const languageServiceConfig = new LanguageServiceConfig();
languageServiceConfig.setTopics('cluster1', [{ id: 'abcd', partitionCount: 1, replicationFactor: 1 }]);
const connectedCuster = { clusterId: 'cluster1', clusterName: 'CLUSTER_1', clusterState: ClientState.connected };
languageServiceConfig.setSelectedCluster(connectedCuster);
const languageService = getLanguageService(languageServiceConfig, languageServiceConfig, languageServiceConfig, languageServiceConfig);

await assertHover(
'PRODUCER\n' +
'topic: ab|cd',
hover(
'Topic `abcd`\n * partition count: `1`\n * replication factor: `1`\n',
position(1, 6),
position(1, 11)
),
languageService
);

});

test("key property name Hover", async () => {

await assertHover(
'PRODUCER\n' +
'ke|y: abcd',
hover(
'The key *[optional]*.',
position(1, 0),
position(1, 3)
)
);

});

test("key-format property name Hover", async () => {

await assertHover(
'PRODUCER\n' +
'key-for|mat: string',
hover(
'[Serializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Producing.md#Serializer) to use for the key *[optional]*.',
position(1, 0),
position(1, 10)
)
);

});

test("key-format property value Hover", async () => {

await assertHover(
'PRODUCER\n' +
'key-format: stri|ng',
hover(
'Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.StringDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/StringSerializer.java) which currently only supports `UTF-8` encoding.',
position(1, 12),
position(1, 18)
)
);

});

test("value-format property name Hover", async () => {

await assertHover(
'PRODUCER\n' +
'value-for|mat: string',
hover(
'[Serializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Producing.md#Serializer) to use for the value *[optional]*.',
position(1, 0),
position(1, 12)
)
);

});

test("value-format property value Hover", async () => {

await assertHover(
'PRODUCER\n' +
'value-format: stri|ng',
hover(
'Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.StringSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/Seserializer.java) which currently only supports `UTF-8` encoding.',
position(1, 14),
position(1, 20)
)
);

});

});
37 changes: 30 additions & 7 deletions src/test/suite/kafka-file/languageservice/kafkaAssert.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import * as assert from "assert";
import { CodeLens, Position, Range, Command, Uri, workspace, CompletionList, SnippetString, Diagnostic, DiagnosticSeverity } from "vscode";
import { CodeLens, Position, Range, Command, Uri, workspace, CompletionList, SnippetString, Diagnostic, DiagnosticSeverity, Hover, MarkdownString } from "vscode";
import { ClientState, ConsumerLaunchState } from "../../../../client";
import { BrokerConfigs } from "../../../../client/config";
import { ProducerLaunchState } from "../../../../client/producer";
@@ -12,7 +12,7 @@ export class LanguageServiceConfig implements ProducerLaunchStateProvider, Consu

private consumerLaunchStates = new Map<string, ConsumerLaunchState>();

private selectedCluster: { clusterId?: string, clusterName?: string, clusterState? : ClientState } | undefined;
private selectedCluster: { clusterId?: string, clusterName?: string, clusterState?: ClientState } | undefined;

private topicsCache = new Map<string, TopicDetail[]>();

@@ -50,7 +50,7 @@ export class LanguageServiceConfig implements ProducerLaunchStateProvider, Consu
return {};
}

public setSelectedCluster(selectedCluster: { clusterId?: string, clusterName?: string, clusterState? : ClientState }) {
public setSelectedCluster(selectedCluster: { clusterId?: string, clusterName?: string, clusterState?: ClientState }) {
this.selectedCluster = selectedCluster;
}

@@ -66,11 +66,11 @@ export class LanguageServiceConfig implements ProducerLaunchStateProvider, Consu
return topics.find(topic => topic.id === topicId);
}

public setAutoCreateConfig(autoCreateConfig : BrokerConfigs.AutoCreateTopicResult) {
this.autoCreateConfig= autoCreateConfig;

public setAutoCreateConfig(autoCreateConfig: BrokerConfigs.AutoCreateTopicResult) {
this.autoCreateConfig = autoCreateConfig;
}

async getAutoCreateTopicEnabled(clusterid: string): Promise<BrokerConfigs.AutoCreateTopicResult> {
return this.autoCreateConfig;
}
@@ -159,6 +159,29 @@ export async function assertDiagnostics(content: string, expected: Array<Diagnos
assert.deepStrictEqual(actual, expected);
}

// Hover assert

export function hover(contents: string, start: Position, end: Position): Hover {
const r = range(start, end);
const doc = new MarkdownString(contents);
doc.isTrusted = true;
return new Hover(doc, r);
}

export async function assertHover(value: string, expected?: Hover, ls = languageService) {
const offset = value.indexOf('|');
value = value.substr(0, offset) + value.substr(offset + 1);

let document = await getDocument(value);
const position = document.positionAt(offset);
let ast = ls.parseKafkaFileDocument(document);
const actual = await ls.doHover(document, ast, position);
assert.deepStrictEqual(actual, expected);
if (actual?.contents && expected?.contents) {
assert.deepStrictEqual((<MarkdownString>actual.contents[0]).value, (<MarkdownString>expected.contents[0]).value);
}
}

// Kafka parser assert

export interface ExpectedChunckResult {

0 comments on commit 1f832c7

Please sign in to comment.