forked from jlandersen/vscode-kafka
-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Provide documentation on hover, in .kafka files
Fixes jlandersen#149 Signed-off-by: azerr <azerr@redhat.com>
1 parent
399d860
commit 1f832c7
Showing
15 changed files
with
641 additions
and
44 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,170 @@ | ||
import { Hover, MarkdownString, Position, Range, TextDocument } from "vscode"; | ||
import { getDocumentationPageUri } from "../../../docs/markdownPreviewProvider"; | ||
import { createTopicDocumentation, SelectedClusterProvider, TopicProvider } from "../kafkaFileLanguageService"; | ||
import { consumerModel, Model, producerModel } from "../model"; | ||
import { Block, BlockType, Chunk, ConsumerBlock, KafkaFileDocument, MustacheExpression, NodeKind, ProducerBlock, Property } from "../parser/kafkaFileParser"; | ||
|
||
export class KafkaFileHover { | ||
|
||
constructor(private selectedClusterProvider: SelectedClusterProvider, private topicProvider: TopicProvider) { | ||
|
||
} | ||
|
||
async doHover(document: TextDocument, kafkaFileDocument: KafkaFileDocument, position: Position): Promise<Hover | undefined> { | ||
// Get the AST node before the position where complation was triggered | ||
const node = kafkaFileDocument.findNodeAt(position); | ||
if (!node) { | ||
return; | ||
} | ||
switch (node.kind) { | ||
|
||
case NodeKind.consumerBlock: { | ||
const block = <Block>node; | ||
const topic = block.getPropertyValue('topic'); | ||
return createHover(`Consumer declaration${topic ? ` for topic \`${topic}\`` : ''}.\n\nSee [here](${getDocumentationPageUri('Consuming', 'kafka-file')}) for more informations.`, node.range()); | ||
} | ||
|
||
case NodeKind.producerBlock: { | ||
const block = <Block>node; | ||
const topic = block.getPropertyValue('topic'); | ||
return createHover(`Producer declaration${topic ? ` for topic \`${topic}\`` : ''}.\n\nSee [here](${getDocumentationPageUri('Producing', 'kafka-file')}) for more informations.`, node.range()); | ||
} | ||
|
||
case NodeKind.propertyKey: { | ||
const propertyKey = <Chunk>node; | ||
const property = <Property>propertyKey.parent; | ||
const propertyName = propertyKey.content; | ||
const propertyKeyRange = propertyKey.range(); | ||
const block = <Block>property.parent; | ||
if (block.type === BlockType.consumer) { | ||
// CONSUMER | ||
// key|: | ||
|
||
// or | ||
|
||
// CONSUMER | ||
// key| | ||
return await this.getHoverForConsumerPropertyNames(propertyName, propertyKeyRange, <ConsumerBlock>block); | ||
} else { | ||
// PRODUCER | ||
// key|: | ||
return await this.getHoverForProducerPropertyNames(propertyName, propertyKeyRange, <ProducerBlock>block); | ||
} | ||
} | ||
|
||
case NodeKind.propertyValue: { | ||
const propertyValue = <Chunk>node; | ||
const property = <Property>propertyValue.parent; | ||
const block = <Block>property.parent; | ||
if (block.type === BlockType.consumer) { | ||
// CONSUMER | ||
// key-format: | | ||
return await this.getHoverForConsumerPropertyValues(propertyValue, property, <ConsumerBlock>block); | ||
} else { | ||
// PRODUCER | ||
// key-format: | | ||
return await this.getHoverForProducerPropertyValues(propertyValue, property, <ProducerBlock>block); | ||
} | ||
} | ||
|
||
case NodeKind.mustacheExpression: { | ||
const expression = <MustacheExpression>node; | ||
return createHover(`FakerJS expression.\n\nSee [here](${getDocumentationPageUri('Producing', 'randomized-content')}) for more informations.`, expression.enclosedExpressionRange); | ||
} | ||
|
||
case NodeKind.producerValue: { | ||
return createHover(`Producer value.\n\nSee [here](${getDocumentationPageUri('Producing', 'kafka-file')}) for more informations.`, node.range()); | ||
} | ||
} | ||
} | ||
|
||
async getHoverForConsumerPropertyNames(propertyName: string, propertyKeyRange: Range, block: ConsumerBlock): Promise<Hover | undefined> { | ||
return await this.getHoverForPropertyNames(propertyName, propertyKeyRange, block, consumerModel); | ||
} | ||
|
||
async getHoverForProducerPropertyNames(propertyName: string, propertyKeyRange: Range, block: ProducerBlock): Promise<Hover | undefined> { | ||
return await this.getHoverForPropertyNames(propertyName, propertyKeyRange, block, producerModel); | ||
} | ||
|
||
async getHoverForPropertyNames(propertyName: string, propertyKeyRange: Range, block: Block, metadata: Model): Promise<Hover | undefined> { | ||
const definition = metadata.getDefinition(propertyName); | ||
if (definition && definition.description) { | ||
return createHover(definition.description, propertyKeyRange); | ||
} | ||
} | ||
|
||
async getHoverForConsumerPropertyValues(propertyValue: Chunk, property: Property, block: ConsumerBlock): Promise<Hover | undefined> { | ||
const propertyName = property.propertyName; | ||
switch (propertyName) { | ||
case 'topic': | ||
// CONSUMER | ||
// topic: | | ||
return await this.getHoverForTopic(property); | ||
default: | ||
// CONSUMER | ||
// key-format: | | ||
return await this.getHoverForPropertyValues(propertyValue, property, block, consumerModel); | ||
} | ||
} | ||
|
||
|
||
async getHoverForProducerPropertyValues(propertyValue: Chunk, property: Property, block: ProducerBlock): Promise<Hover | undefined> { | ||
const propertyName = property.propertyName; | ||
switch (propertyName) { | ||
case 'topic': | ||
// PRODUCER | ||
// topic: | | ||
return await this.getHoverForTopic(property); | ||
default: | ||
// PRODUCER | ||
// key-format: | | ||
return await this.getHoverForPropertyValues(propertyValue, property, block, producerModel); | ||
} | ||
} | ||
|
||
async getHoverForTopic(property: Property): Promise<Hover | undefined> { | ||
const propertyValue = property.value; | ||
if (!propertyValue) { | ||
return; | ||
} | ||
const { clusterId } = this.selectedClusterProvider.getSelectedCluster(); | ||
if (!clusterId) { | ||
return; | ||
} | ||
|
||
try { | ||
const topicId = propertyValue.content.trim(); | ||
const topics = await this.topicProvider.getTopics(clusterId); | ||
if (topics.length > 0) { | ||
const topic = topics | ||
.find(t => t.id === topicId); | ||
if (topic) { | ||
return createHover(createTopicDocumentation(topic), propertyValue.range()); | ||
} | ||
} | ||
} | ||
catch (e) { | ||
return; | ||
} | ||
|
||
return undefined; | ||
} | ||
|
||
async getHoverForPropertyValues(propertyValue: Chunk, property: Property, block: Block, metadata: Model): Promise<Hover | undefined> { | ||
const propertyName = property.propertyName; | ||
if (!propertyName) { | ||
return; | ||
} | ||
const definition = metadata.getDefinitionEnum(propertyName, propertyValue.content.trim()); | ||
if (definition && definition.description) { | ||
return createHover(definition.description, property.propertyTrimmedValueRange); | ||
} | ||
return undefined; | ||
} | ||
} | ||
|
||
function createHover(contents: string, range?: Range): Hover { | ||
const doc = new MarkdownString(contents); | ||
doc.isTrusted = true; | ||
return new Hover(doc, range); | ||
} |
309 changes: 309 additions & 0 deletions
309
src/test/suite/kafka-file/languageservice/hover.test.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,309 @@ | ||
import { ClientState } from "../../../../client"; | ||
import { getLanguageService } from "../../../../kafka-file/languageservice/kafkaFileLanguageService"; | ||
import { assertHover, hover, LanguageServiceConfig, position } from "./kafkaAssert"; | ||
|
||
suite("Kafka File Hover Test Suite", () => { | ||
|
||
test("Empty hover", async () => { | ||
await assertHover(''); | ||
|
||
await assertHover('ab|cd'); | ||
|
||
}); | ||
|
||
}); | ||
|
||
suite("Kafka File CONSUMER Hover Test Suite", () => { | ||
|
||
test("CONSUMER declaration no topic Hover", async () => { | ||
|
||
await assertHover( | ||
'CONS|UMER\n', | ||
hover( | ||
`Consumer declaration.\n\nSee [here](command:vscode-kafka.open.docs.page?%5B%7B%22page%22%3A%22Consuming%22%2C%22section%22%3A%22kafka-file%22%7D%5D) for more informations.`, | ||
position(0, 0), | ||
position(1, 0) | ||
) | ||
); | ||
|
||
}); | ||
|
||
test("CONSUMER declaration with topic Hover", async () => { | ||
|
||
await assertHover( | ||
'CONS|UMER\n' + | ||
'topic: abcd', | ||
hover( | ||
`Consumer declaration for topic \`abcd\`.\n\n\See [here](command:vscode-kafka.open.docs.page?%5B%7B%22page%22%3A%22Consuming%22%2C%22section%22%3A%22kafka-file%22%7D%5D) for more informations.`, | ||
position(0, 0), | ||
position(1, 11) | ||
) | ||
); | ||
|
||
}); | ||
|
||
test("topic property name Hover", async () => { | ||
|
||
await assertHover( | ||
'CONSUMER\n' + | ||
'top|ic: abcd', | ||
hover( | ||
`The topic id *[required]*`, | ||
position(1, 0), | ||
position(1, 5) | ||
) | ||
); | ||
|
||
}); | ||
|
||
test("topic property value Hover", async () => { | ||
|
||
await assertHover( | ||
'CONSUMER\n' + | ||
'topic: ab|cd' | ||
); | ||
|
||
const languageServiceConfig = new LanguageServiceConfig(); | ||
languageServiceConfig.setTopics('cluster1', [{ id: 'abcd', partitionCount: 1, replicationFactor: 1 }]); | ||
const connectedCuster = { clusterId: 'cluster1', clusterName: 'CLUSTER_1', clusterState: ClientState.connected }; | ||
languageServiceConfig.setSelectedCluster(connectedCuster); | ||
const languageService = getLanguageService(languageServiceConfig, languageServiceConfig, languageServiceConfig, languageServiceConfig); | ||
|
||
await assertHover( | ||
'CONSUMER\n' + | ||
'topic: ab|cd', | ||
hover( | ||
'Topic `abcd`\n * partition count: `1`\n * replication factor: `1`\n', | ||
position(1, 6), | ||
position(1, 11) | ||
), | ||
languageService | ||
); | ||
|
||
}); | ||
|
||
test("from property name Hover", async () => { | ||
|
||
await assertHover( | ||
'CONSUMER\n' + | ||
'fro|m: earliest', | ||
hover( | ||
'The offset from which the consumer group will start consuming messages from. Possible values are: `earliest`, `latest`, or an integer value. *[optional]*.', | ||
position(1, 0), | ||
position(1, 4) | ||
) | ||
); | ||
|
||
}); | ||
|
||
test("key-format property name Hover", async () => { | ||
|
||
await assertHover( | ||
'CONSUMER\n' + | ||
'key-for|mat: string', | ||
hover( | ||
'[Deserializer](command:vscode-kafka.open.docs.page?%5B%7B%22page%22%3A%22Consuming%22%2C%22section%22%3A%22deserializer%22%7D%5D) to use for the key *[optional]*.', | ||
position(1, 0), | ||
position(1, 10) | ||
) | ||
); | ||
|
||
}); | ||
|
||
test("key-format property value Hover", async () => { | ||
|
||
await assertHover( | ||
'CONSUMER\n' + | ||
'key-format: stri|ng', | ||
hover( | ||
'Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.StringDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/StringDeserializer.java) which currently only supports `UTF-8` encoding.', | ||
position(1, 12), | ||
position(1, 18) | ||
) | ||
); | ||
|
||
}); | ||
|
||
test("value-format property name Hover", async () => { | ||
|
||
await assertHover( | ||
'CONSUMER\n' + | ||
'value-for|mat: string', | ||
hover( | ||
'[Deserializer](command:vscode-kafka.open.docs.page?%5B%7B%22page%22%3A%22Consuming%22%2C%22section%22%3A%22deserializer%22%7D%5D) to use for the value *[optional]*.', | ||
position(1, 0), | ||
position(1, 12) | ||
) | ||
); | ||
|
||
}); | ||
|
||
test("value-format property value Hover", async () => { | ||
|
||
await assertHover( | ||
'CONSUMER\n' + | ||
'value-format: stri|ng', | ||
hover( | ||
'Similar deserializer to the Kafka Java client [org.apache.kafka.common.serialization.StringDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/StringDeserializer.java) which currently only supports `UTF-8` encoding.', | ||
position(1, 14), | ||
position(1, 20) | ||
) | ||
); | ||
|
||
}); | ||
|
||
test("partitions property name Hover", async () => { | ||
|
||
await assertHover( | ||
'CONSUMER\n' + | ||
'partition|s: 0', | ||
hover( | ||
'the partition number(s), or a partitions range, or a combinaison of partitions ranges *[optional]*. eg:\n* 0\n* 0,1,2\n* 0-2\n* 0,2-3', | ||
position(1, 0), | ||
position(1, 10) | ||
) | ||
); | ||
|
||
}); | ||
|
||
}); | ||
|
||
suite("Kafka File PRODUCER Hover Test Suite", () => { | ||
|
||
test("PRODUCER declaration no topic Hover", async () => { | ||
|
||
await assertHover( | ||
'PRODU|CER\n', | ||
hover( | ||
`Producer declaration.\n\nSee [here](command:vscode-kafka.open.docs.page?%5B%7B%22page%22%3A%22Producing%22%2C%22section%22%3A%22kafka-file%22%7D%5D) for more informations.`, | ||
position(0, 0), | ||
position(1, 0) | ||
) | ||
); | ||
|
||
}); | ||
|
||
test("PRODUCER declaration with topic Hover", async () => { | ||
|
||
await assertHover( | ||
'PRODU|CER\n' + | ||
'topic: abcd', | ||
hover( | ||
`Producer declaration for topic \`abcd\`.\n\n\See [here](command:vscode-kafka.open.docs.page?%5B%7B%22page%22%3A%22Producing%22%2C%22section%22%3A%22kafka-file%22%7D%5D) for more informations.`, | ||
position(0, 0), | ||
position(1, 11) | ||
) | ||
); | ||
}); | ||
|
||
test("topic property name Hover", async () => { | ||
|
||
await assertHover( | ||
'PRODUCER\n' + | ||
'top|ic: abcd', | ||
hover( | ||
`The topic id *[required]*`, | ||
position(1, 0), | ||
position(1, 5) | ||
) | ||
); | ||
|
||
}); | ||
|
||
test("topic property value Hover", async () => { | ||
|
||
await assertHover( | ||
'PRODUCER\n' + | ||
'topic: ab|cd' | ||
); | ||
|
||
const languageServiceConfig = new LanguageServiceConfig(); | ||
languageServiceConfig.setTopics('cluster1', [{ id: 'abcd', partitionCount: 1, replicationFactor: 1 }]); | ||
const connectedCuster = { clusterId: 'cluster1', clusterName: 'CLUSTER_1', clusterState: ClientState.connected }; | ||
languageServiceConfig.setSelectedCluster(connectedCuster); | ||
const languageService = getLanguageService(languageServiceConfig, languageServiceConfig, languageServiceConfig, languageServiceConfig); | ||
|
||
await assertHover( | ||
'PRODUCER\n' + | ||
'topic: ab|cd', | ||
hover( | ||
'Topic `abcd`\n * partition count: `1`\n * replication factor: `1`\n', | ||
position(1, 6), | ||
position(1, 11) | ||
), | ||
languageService | ||
); | ||
|
||
}); | ||
|
||
test("key property name Hover", async () => { | ||
|
||
await assertHover( | ||
'PRODUCER\n' + | ||
'ke|y: abcd', | ||
hover( | ||
'The key *[optional]*.', | ||
position(1, 0), | ||
position(1, 3) | ||
) | ||
); | ||
|
||
}); | ||
|
||
test("key-format property name Hover", async () => { | ||
|
||
await assertHover( | ||
'PRODUCER\n' + | ||
'key-for|mat: string', | ||
hover( | ||
'[Serializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Producing.md#Serializer) to use for the key *[optional]*.', | ||
position(1, 0), | ||
position(1, 10) | ||
) | ||
); | ||
|
||
}); | ||
|
||
test("key-format property value Hover", async () => { | ||
|
||
await assertHover( | ||
'PRODUCER\n' + | ||
'key-format: stri|ng', | ||
hover( | ||
'Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.StringDeserializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/StringSerializer.java) which currently only supports `UTF-8` encoding.', | ||
position(1, 12), | ||
position(1, 18) | ||
) | ||
); | ||
|
||
}); | ||
|
||
test("value-format property name Hover", async () => { | ||
|
||
await assertHover( | ||
'PRODUCER\n' + | ||
'value-for|mat: string', | ||
hover( | ||
'[Serializer](https://github.com/jlandersen/vscode-kafka/blob/master/docs/Producing.md#Serializer) to use for the value *[optional]*.', | ||
position(1, 0), | ||
position(1, 12) | ||
) | ||
); | ||
|
||
}); | ||
|
||
test("value-format property value Hover", async () => { | ||
|
||
await assertHover( | ||
'PRODUCER\n' + | ||
'value-format: stri|ng', | ||
hover( | ||
'Similar serializer to the Kafka Java client [org.apache.kafka.common.serialization.StringSerializer](https://github.com/apache/kafka/blob/master/clients/src/main/java/org/apache/kafka/common/serialization/Seserializer.java) which currently only supports `UTF-8` encoding.', | ||
position(1, 14), | ||
position(1, 20) | ||
) | ||
); | ||
|
||
}); | ||
|
||
}); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters