Skip to content

Commit

Permalink
Provide documentation on hover, in .kafka files
Browse files Browse the repository at this point in the history
Fixes jlandersen#149

Signed-off-by: azerr <[email protected]>
  • Loading branch information
angelozerr committed May 3, 2021
1 parent 399d860 commit 86b4b3e
Show file tree
Hide file tree
Showing 15 changed files with 639 additions and 44 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ All notable changes to `Tools for Apache Kafka®` are documented in this file.
- Hide internal [strimzi](https://strimzi.io/) topics/consumers by default. See [#176](https://github.com/jlandersen/vscode-kafka/pull/176).
- Validation for available topics in `.kafka` files. See [#153](https://github.com/jlandersen/vscode-kafka/issues/153).
- Simplify snippets. See [#180](https://github.com/jlandersen/vscode-kafka/pull/180).
- Hover support in `.kafka` files. See [#149](https://github.com/jlandersen/vscode-kafka/issues/149).

## [0.12.0] - 2021-04-26
### Added
Expand Down
7 changes: 7 additions & 0 deletions docs/Consuming.md
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,13 @@ Existing topic validation is done only when cluster is `connected`. If the topic

![Existing topic validation](assets/kafka-file-consumer-topic-validation.png)

#### Hover

Hover for properties documentation and topic informations is available in .kafka files.

Here is an example of hover on topic:

![Existing topic validation](assets/kafka-file-consumer-topic-hover.png)

### Start Consumer command

Expand Down
8 changes: 8 additions & 0 deletions docs/Producing.md
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,14 @@ Validation will help you write valid producers in .kafka files.

![Existing topic validation](assets/kafka-file-producer-topic-validation.png)

### Hover

Hover for properties documentation and topic informations is available in .kafka files.

Here is an example of hover on topic:

![Existing topic validation](assets/kafka-file-producer-topic-hover.png)

## Randomized content

Record content can be randomized by injecting mustache-like placeholders of [faker.js properties](https://github.com/Marak/faker.js#api-methods), like ``{{name.lastName}}`` or ``{{random.number}}``. Some randomized properties can be localized via the `kafka.producers.fakerjs.locale` setting.
Expand Down
Binary file added docs/assets/kafka-file-consumer-topic-hover.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
14 changes: 13 additions & 1 deletion src/docs/markdownPreviewProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ class MarkdownPreviewProvider implements Disposable {
return `<a href="command:vscode-kafka.open.docs.page?%5B%7B%22page%22%3A%22${page}%22%2C%22section%22%3A%22${section}%22%7D%5D">${linkText}</a>`;
});
body = await commands.executeCommand(MARKDOWN_API_RENDER, markdownString);
if(body !== undefined) {
if (body !== undefined) {
this.documentCache.set(markdownFilePath, body);
}
}
Expand Down Expand Up @@ -120,3 +120,15 @@ class MarkdownPreviewProvider implements Disposable {
}

export const markdownPreviewProvider: MarkdownPreviewProvider = new MarkdownPreviewProvider();

export type EmbeddedPage = "Consuming" | "Producing";

type ConsumingSection = "deserializer" | "kafka-file";

type ProducingSection = "serializer" | "kafka-file" | "randomized-content";

export type EmbeddedSection = ConsumingSection | ProducingSection;

export function getDocumentationPageUri(page: EmbeddedPage, section: EmbeddedSection) {
return `command:vscode-kafka.open.docs.page?%5B%7B%22page%22%3A%22${page}%22%2C%22section%22%3A%22${section}%22%7D%5D`;
}
18 changes: 17 additions & 1 deletion src/kafka-file/kafkaFileClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,12 @@ export function startLanguageClient(
const diagnostics = new KafkaFileDiagnostics(kafkaFileDocuments, languageService, clusterSettings, clientAccessor, modelProvider, workspaceSettings);
context.subscriptions.push(diagnostics);

// Hover
const hover = new KafkaFileHoverProvider(kafkaFileDocuments, languageService);
context.subscriptions.push(
vscode.languages.registerHoverProvider(documentSelector, hover)
);

// Open / Close document
context.subscriptions.push(vscode.workspace.onDidOpenTextDocument(e => {
if (e.languageId === 'kafka') {
Expand Down Expand Up @@ -277,7 +283,7 @@ class KafkaFileCompletionItemProvider extends AbstractKafkaFileFeature implement
return runSafeAsync(async () => {
const kafkaFileDocument = this.getKafkaFileDocument(document);
return this.languageService.doComplete(document, kafkaFileDocument, this.workspaceSettings.producerFakerJSEnabled, position);
}, new vscode.CompletionList(), `Error while computing code lenses for ${document.uri}`, token);
}, new vscode.CompletionList(), `Error while computing completion for ${document.uri}`, token);
}

}
Expand Down Expand Up @@ -359,3 +365,13 @@ class KafkaFileDiagnostics extends AbstractKafkaFileFeature implements vscode.Di
this.diagnosticCollection.dispose();
}
}

class KafkaFileHoverProvider extends AbstractKafkaFileFeature implements vscode.HoverProvider {
provideHover(document: vscode.TextDocument, position: vscode.Position, token: vscode.CancellationToken): vscode.ProviderResult<vscode.Hover> {
return runSafeAsync(async () => {
const kafkaFileDocument = this.getKafkaFileDocument(document);
return this.languageService.doHover(document, kafkaFileDocument, position);
}, null, `Error while computing hover for ${document.uri}`, token);
}

}
23 changes: 21 additions & 2 deletions src/kafka-file/languageservice/kafkaFileLanguageService.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
import { CodeLens, CompletionList, Diagnostic, Position, TextDocument, Uri } from "vscode";
import { CodeLens, CompletionList, Diagnostic, Hover, Position, TextDocument, Uri } from "vscode";
import { ClientState, ConsumerLaunchState } from "../../client";
import { BrokerConfigs } from "../../client/config";
import { ProducerLaunchState } from "../../client/producer";
import { KafkaFileDocument, parseKafkaFile } from "./parser/kafkaFileParser";
import { KafkaFileCodeLenses } from "./services/codeLensProvider";
import { KafkaFileCompletion } from "./services/completion";
import { KafkaFileDiagnostics } from "./services/diagnostics";
import { KafkaFileHover } from "./services/hover";

/**
* Provider API which gets the state for a given producer.
Expand Down Expand Up @@ -49,6 +50,7 @@ export interface TopicProvider {
*
*/
export interface LanguageService {

/**
* Parse the given text document and returns an AST.
*
Expand Down Expand Up @@ -85,6 +87,15 @@ export interface LanguageService {
* @param kafkaFileDocument the parsed AST.
*/
doDiagnostics(document: TextDocument, kafkaFileDocument: KafkaFileDocument, producerFakerJSEnabled: boolean): Promise<Diagnostic[]>;

/**
* Returns the hover result for the given text document and parsed AST at given position.
*
* @param document the text document.
* @param kafkaFileDocument the parsed AST.
* @param position the position where the hover was triggered.
*/
doHover(document: TextDocument, kafkaFileDocument: KafkaFileDocument, position: Position): Promise<Hover | undefined>;
}

/**
Expand All @@ -100,10 +111,18 @@ export function getLanguageService(producerLaunchStateProvider: ProducerLaunchSt
const codeLenses = new KafkaFileCodeLenses(producerLaunchStateProvider, consumerLaunchStateProvider, selectedClusterProvider);
const completion = new KafkaFileCompletion(selectedClusterProvider, topicProvider);
const diagnostics = new KafkaFileDiagnostics(selectedClusterProvider, topicProvider);
const hover = new KafkaFileHover(selectedClusterProvider, topicProvider);
return {
parseKafkaFileDocument: (document: TextDocument) => parseKafkaFile(document),
getCodeLenses: codeLenses.getCodeLenses.bind(codeLenses),
doComplete: completion.doComplete.bind(completion),
doDiagnostics: diagnostics.doDiagnostics.bind(diagnostics)
doDiagnostics: diagnostics.doDiagnostics.bind(diagnostics),
doHover: hover.doHover.bind(hover)
};
}

export function createTopicDocumentation(topic: TopicDetail): string {
return `Topic \`${topic.id}\`\n` +
` * partition count: \`${topic.partitionCount}\`\n` +
` * replication factor: \`${topic.replicationFactor}\`\n`;
}
32 changes: 19 additions & 13 deletions src/kafka-file/languageservice/model.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import { getDocumentationPageUri } from "../../docs/markdownPreviewProvider";

export class Model {

private cache = new Map<string, ModelDefinition>();
Expand All @@ -16,18 +18,22 @@ export class Model {
}

public hasDefinitionEnum(name: string, value: string): boolean {
return this.getDefinitionEnum(name, value) !== undefined;
}

public getDefinitionEnum(name: string, value: string): ModelDefinition | undefined {
const definition = this.getDefinition(name);
if (!definition) {
return false;
return undefined;
}
if (definition.enum) {
for (const item of definition.enum) {
if (item.name === value) {
return true;
return item;
}
}
}
return false;
return undefined;
}
}

Expand Down Expand Up @@ -59,7 +65,7 @@ const consumerProperties = [
},
{
name: "key-format",
description: "[Deserializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Consuming.md#Deserializer) to use for the key *[optional]*.",
description: `[Deserializer](${getDocumentationPageUri('Consuming', 'deserializer')}) to use for the key *[optional]*.`,
enum: [
{
name: "none",
Expand Down Expand Up @@ -93,7 +99,7 @@ const consumerProperties = [
},
{
name: "value-format",
description: "[Deserializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Consuming.md#Deserializer) to use for the value *[optional]*.",
description: `[Deserializer](${getDocumentationPageUri('Consuming', 'deserializer')}) to use for the value *[optional]*.`,
enum: [
{
name: "none",
Expand Down Expand Up @@ -148,7 +154,7 @@ const producerProperties = [
},
{
name: "key-format",
description: "[Serializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Producing.md#Serializer) to use for the key *[optional]*.",
description: `[Serializer](${getDocumentationPageUri('Producing', 'serializer')}) to use for the key *[optional]*.`,
enum: [
{
name: "string",
Expand Down Expand Up @@ -178,7 +184,7 @@ const producerProperties = [
},
{
name: "value-format",
description: "[Serializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Producing.md#Serializer) to use for the value *[optional]*.",
description: `[Deserializer](${getDocumentationPageUri('Producing', 'serializer')}) to use for the value *[optional]*.`,
enum: [
{
name: "string",
Expand Down Expand Up @@ -381,18 +387,18 @@ const fakerjsAPI = [
] as ModelDefinition[];

export interface PartModelProvider {
getPart(name: string) : PartModelProvider | undefined;
getPart(name: string): PartModelProvider | undefined;
}

class PartModel implements PartModelProvider{
class PartModel implements PartModelProvider {

private cache = new Map<string, PartModelProvider>();

getPart(name: string) : PartModelProvider | undefined {
getPart(name: string): PartModelProvider | undefined {
return this.cache.get(name);
}

getOrCreate(name: string) : PartModelProvider {
getOrCreate(name: string): PartModelProvider {
let part = this.getPart(name);
if (!part) {
part = new PartModel();
Expand All @@ -411,12 +417,12 @@ class FakerJSModel extends Model implements PartModelProvider {
const parts = definition.name.split('.');
let partModel = this.root;
parts.forEach(part => {
partModel = <PartModel> partModel.getOrCreate(part);
partModel = <PartModel>partModel.getOrCreate(part);
});
});
}

getPart(name: string) : PartModelProvider | undefined {
getPart(name: string): PartModelProvider | undefined {
return this.root.getPart(name);
}
}
Expand Down
39 changes: 32 additions & 7 deletions src/kafka-file/languageservice/parser/kafkaFileParser.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,9 @@ export enum NodeKind {
export interface Node {
start: Position;
end: Position;
range(): Range;
findNodeBefore(offset: Position): Node;
findNodeAt(offset: Position): Node;
lastChild: Node | undefined;
parent: Node | undefined;
kind: NodeKind;
Expand All @@ -32,10 +34,20 @@ class BaseNode implements Node {

}

public range(): Range {
const start = this.start;
const end = this.end;
return new Range(start, end);
}

public findNodeBefore(offset: Position): Node {
return this;
}

public findNodeAt(offset: Position): Node {
return this;
}

public get lastChild(): Node | undefined { return undefined; }
}

Expand Down Expand Up @@ -65,6 +77,17 @@ class ChildrenNode<T extends Node> extends BaseNode {
return this;
}

public findNodeAt(offset: Position): Node {
const idx = findFirst(this.children, c => offset.isBeforeOrEqual(c.start)) - 1;
if (idx >= 0) {
const child = this.children[idx];
if (offset.isAfter(child.start) && offset.isBeforeOrEqual(child.end)) {
return child.findNodeAt(offset);
}
}
return this;
}

public get lastChild(): Node | undefined { return this.children.length ? this.children[this.children.length - 1] : void 0; };
}

Expand All @@ -84,6 +107,7 @@ export class Chunk extends BaseNode {
constructor(public readonly content: string, start: Position, end: Position, kind: NodeKind) {
super(start, end, kind);
}

}

export class Property extends BaseNode {
Expand All @@ -106,12 +130,6 @@ export class Property extends BaseNode {
return this.value?.content.trim();
}

public get propertyRange(): Range {
const start = this.start;
const end = this.end;
return new Range(start, end);
}

public get propertyKeyRange(): Range {
const start = this.start;
const end = this.assignerCharacter ? new Position(this.start.line, this.assignerCharacter) : this.end;
Expand Down Expand Up @@ -159,6 +177,13 @@ export class Property extends BaseNode {
}
return true;
}

findNodeAt(position : Position) : Node {
if (this.isBeforeAssigner(position)) {
return this.key?.findNodeAt(position) || this;
}
return this.value?.findNodeAt(position) || this;
}
}

export abstract class Block extends ChildrenNode<Property | Chunk> {
Expand All @@ -175,7 +200,7 @@ export abstract class Block extends ChildrenNode<Property | Chunk> {

getPropertyValue(name: string): string | undefined {
const property = this.getProperty(name);
return property?.value?.content;
return property?.propertyValue;
}

getProperty(name: string): Property | undefined {
Expand Down
Loading

0 comments on commit 86b4b3e

Please sign in to comment.