diff --git a/CHANGELOG.md b/CHANGELOG.md index d398dab..f2ba6e8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ All notable changes to `Tools for Apache Kafka®` are documented in this file. - Declare key/value formats for PRODUCER in kafka file. See [#113](https://github.com/jlandersen/vscode-kafka/issues/113). - Completion support for property names and values of CONSUMER and PRODUCER blocks. See [#146](https://github.com/jlandersen/vscode-kafka/issues/146). - Completion support for fakerJS PRODUCER key and value. See [#152](https://github.com/jlandersen/vscode-kafka/issues/152). +- Completion support for available topics for CONSUMER and PRODUCER blocks. See [#150](https://github.com/jlandersen/vscode-kafka/issues/150). ### Changed - Renamed extension as `Tools for Apache Kafka®` diff --git a/docs/Consuming.md b/docs/Consuming.md index 4260b89..bcfa595 100644 --- a/docs/Consuming.md +++ b/docs/Consuming.md @@ -85,6 +85,10 @@ Completion is available for ![Property value completion](assets/kafka-file-consumer-property-value-completion.png) + * topic: + +![Topic completion](assets/kafka-file-consumer-topic-completion.png) + ### Start Consumer command ![Start Consumer from command palette](assets/start-consumer-from-command.png) diff --git a/docs/Producing.md b/docs/Producing.md index 423c061..3ae9e51 100644 --- a/docs/Producing.md +++ b/docs/Producing.md @@ -59,10 +59,14 @@ Completion is available for ![Property value completion](assets/kafka-file-producer-property-value-completion.png) - * randomized content (see following section) + * randomized content (see following section): ![FakerJS completion](assets/kafka-file-producer-fakerjs-completion.png) + * topic: + +![Topic completion](assets/kafka-file-producer-topic-completion.png) + ## Randomized content Record content can be randomized by injecting mustache-like placeholders of [faker.js properties](https://github.com/Marak/faker.js#api-methods), like ``{{name.lastName}}`` or ``{{random.number}}``. Some randomized properties can be localized via the `kafka.producers.fakerjs.locale` setting. diff --git a/docs/assets/kafka-file-consumer-topic-completion.png b/docs/assets/kafka-file-consumer-topic-completion.png new file mode 100644 index 0000000..2146f54 Binary files /dev/null and b/docs/assets/kafka-file-consumer-topic-completion.png differ diff --git a/docs/assets/kafka-file-producer-topic-completion.png b/docs/assets/kafka-file-producer-topic-completion.png new file mode 100644 index 0000000..dc5df68 Binary files /dev/null and b/docs/assets/kafka-file-producer-topic-completion.png differ diff --git a/src/explorer/kafkaExplorer.ts b/src/explorer/kafkaExplorer.ts index a0d8502..87bf643 100644 --- a/src/explorer/kafkaExplorer.ts +++ b/src/explorer/kafkaExplorer.ts @@ -4,7 +4,7 @@ import { ClientAccessor } from "../client"; import { WorkspaceSettings, ClusterSettings } from "../settings"; import { NodeBase } from "./models/nodeBase"; import { TreeView } from "vscode"; -import { KafkaModel } from "./models/kafka"; +import { KafkaModel, KafkaModelProvider } from "./models/kafka"; import { ClusterItem } from "./models/cluster"; import { EOL } from 'os'; import { TopicItem } from "./models/topics"; @@ -17,7 +17,7 @@ const TREEVIEW_ID = 'kafkaExplorer'; /** * Kafka explorer to show in a tree clusters, topics. */ -export class KafkaExplorer implements vscode.Disposable, vscode.TreeDataProvider { +export class KafkaExplorer implements KafkaModelProvider, vscode.Disposable, vscode.TreeDataProvider { private onDidChangeTreeDataEvent: vscode.EventEmitter = new vscode.EventEmitter(); @@ -63,10 +63,7 @@ export class KafkaExplorer implements vscode.Disposable, vscode.TreeDataProvider async getChildren(element?: NodeBase): Promise { if (!element) { - if (!this.root) { - this.root = new KafkaModel(this.clusterSettings, this.clientAccessor); - } - element = this.root; + element = this.getDataModel(); } return element.getChildren(); } @@ -180,4 +177,17 @@ export class KafkaExplorer implements vscode.Disposable, vscode.TreeDataProvider } } + + /** + * Returns the kafka data model. + * + * @returns the kafka data model. + */ + public getDataModel(): KafkaModel { + if (!this.root) { + this.root = new KafkaModel(this.clusterSettings, this.clientAccessor); + } + return this.root; + } + } diff --git a/src/explorer/models/cluster.ts b/src/explorer/models/cluster.ts index 701f09c..73b5e11 100644 --- a/src/explorer/models/cluster.ts +++ b/src/explorer/models/cluster.ts @@ -61,10 +61,16 @@ export class ClusterItem extends NodeBase implements Disposable { } async findTopictemByName(topicName: string): Promise { - const topics = (await this.getTopicGroupItem()).getChildren(); - return topics - .then(t => - t.find(child => (child).topic.id === topicName)); + const topics = await this.getTopics(); + return topics.find(child => (child).topic.id === topicName); + } + + /** + * Returns the topics of the cluster. + * @returns the topics of the cluster. + */ + async getTopics() { + return (await this.getTopicGroupItem()).getChildren(); } private async getTopicGroupItem(): Promise { @@ -72,3 +78,5 @@ export class ClusterItem extends NodeBase implements Disposable { } } + + diff --git a/src/explorer/models/kafka.ts b/src/explorer/models/kafka.ts index 4911425..c845103 100644 --- a/src/explorer/models/kafka.ts +++ b/src/explorer/models/kafka.ts @@ -4,6 +4,10 @@ import { ClusterSettings } from "../../settings"; import { ClusterItem } from "./cluster"; import { NodeBase } from "./nodeBase"; +export interface KafkaModelProvider { + getDataModel(): KafkaModel; +} + export class KafkaModel extends NodeBase implements Disposable { public contextValue = ""; @@ -33,10 +37,8 @@ export class KafkaModel extends NodeBase implements Disposable { ); } - async findClusterItemById(clusterId: string): Promise { - return this.getChildren() - .then(clusters => - clusters.find(child => (child).cluster.id === clusterId) - ); + async findClusterItemById(clusterId: string): Promise { + const clusters = await this.getChildren(); + return clusters.find(child => (child).cluster.id === clusterId); } } diff --git a/src/extension.ts b/src/extension.ts index dba0328..f775d19 100644 --- a/src/extension.ts +++ b/src/extension.ts @@ -138,7 +138,7 @@ export function activate(context: vscode.ExtensionContext): KafkaExtensionPartic // .kafka file related context.subscriptions.push( - startLanguageClient(clusterSettings, producerCollection, consumerCollection, context) + startLanguageClient(clusterSettings, producerCollection, consumerCollection, explorer, context) ); context.subscriptions.push( diff --git a/src/kafka-file/kafkaFileClient.ts b/src/kafka-file/kafkaFileClient.ts index a7404d6..c2f5246 100644 --- a/src/kafka-file/kafkaFileClient.ts +++ b/src/kafka-file/kafkaFileClient.ts @@ -5,13 +5,16 @@ import { ClusterSettings } from "../settings/clusters"; import { getLanguageModelCache, LanguageModelCache } from './languageModelCache'; import { KafkaFileDocument } from "./languageservice/parser/kafkaFileParser"; -import { ConsumerLaunchStateProvider, getLanguageService, LanguageService, ProducerLaunchStateProvider, SelectedClusterProvider } from "./languageservice/kafkaFileLanguageService"; +import { ConsumerLaunchStateProvider, getLanguageService, LanguageService, ProducerLaunchStateProvider, SelectedClusterProvider, TopicDetail, TopicProvider } from "./languageservice/kafkaFileLanguageService"; import { runSafeAsync } from "./utils/runner"; +import { TopicItem } from "../explorer"; +import { KafkaModelProvider } from "../explorer/models/kafka"; export function startLanguageClient( clusterSettings: ClusterSettings, producerCollection: ProducerCollection, consumerCollection: ConsumerCollection, + modelProvider: KafkaModelProvider, context: vscode.ExtensionContext ): vscode.Disposable { @@ -20,7 +23,7 @@ export function startLanguageClient( const kafkaFileDocuments = getLanguageModelCache(10, 60, document => languageService.parseKafkaFileDocument(document)); // Create the Kafka file language service. - const languageService = createLanguageService(clusterSettings, producerCollection, consumerCollection); + const languageService = createLanguageService(clusterSettings, producerCollection, consumerCollection, modelProvider); // Open / Close document context.subscriptions.push(vscode.workspace.onDidOpenTextDocument(e => { @@ -74,7 +77,7 @@ export function startLanguageClient( }; } -function createLanguageService(clusterSettings: ClusterSettings, producerCollection: ProducerCollection, consumerCollection: ConsumerCollection): LanguageService { +function createLanguageService(clusterSettings: ClusterSettings, producerCollection: ProducerCollection, consumerCollection: ConsumerCollection, modelProvider: KafkaModelProvider): LanguageService { const producerLaunchStateProvider = { getProducerLaunchState(uri: vscode.Uri): ProducerLaunchState { const producer = producerCollection.get(uri); @@ -90,7 +93,6 @@ function createLanguageService(clusterSettings: ClusterSettings, producerCollect } as ConsumerLaunchStateProvider; const selectedClusterProvider = { - getSelectedCluster() { const selected = clusterSettings.selected; return { @@ -98,10 +100,22 @@ function createLanguageService(clusterSettings: ClusterSettings, producerCollect clusterName: selected?.name, }; } - } as SelectedClusterProvider; - return getLanguageService(producerLaunchStateProvider, consumerLaunchStateProvider, selectedClusterProvider); + const topicProvider = { + async getTopics(clusterId: string): Promise { + // Retrieve the proper cluster item from the explorer + const model = modelProvider.getDataModel(); + const cluster = await model.findClusterItemById(clusterId); + if (!cluster) { + return []; + } + // Returns topics from the cluster + return (await cluster.getTopics()).map(child => (child).topic); + } + } as TopicProvider; + + return getLanguageService(producerLaunchStateProvider, consumerLaunchStateProvider, selectedClusterProvider, topicProvider); } class AbstractKafkaFileFeature { diff --git a/src/kafka-file/languageservice/kafkaFileLanguageService.ts b/src/kafka-file/languageservice/kafkaFileLanguageService.ts index 384da38..654f448 100644 --- a/src/kafka-file/languageservice/kafkaFileLanguageService.ts +++ b/src/kafka-file/languageservice/kafkaFileLanguageService.ts @@ -26,6 +26,19 @@ export interface SelectedClusterProvider { getSelectedCluster(): { clusterId?: string, clusterName?: string }; } +export interface TopicDetail { + id: string; + partitionCount: number; + replicationFactor: number; +} + +/** + * Provider API which gets topics from given cluster id. + */ +export interface TopicProvider { + getTopics(clusterid: string): Promise; +} + /** * Kafka language service API. * @@ -52,12 +65,12 @@ export interface LanguageService { /** * Returns the completion result for the given text document and parsed AST at given position. - * + * * @param document the text document. * @param kafkaFileDocument the parsed AST. * @param position the position where the completion was triggered. */ - doComplete(document: TextDocument, kafkaFileDocument: KafkaFileDocument, position: Position): CompletionList | undefined + doComplete(document: TextDocument, kafkaFileDocument: KafkaFileDocument, position: Position): Promise } /** @@ -66,11 +79,12 @@ export interface LanguageService { * @param producerLaunchStateProvider the provider which gets the state for a given producer. * @param consumerLaunchStateProvider the provider which gets the state for a given consumer. * @param selectedClusterProvider the provider which gets the selected cluster id and name. + * @param topicProvider the provider which returns topics from a given cluster id. */ -export function getLanguageService(producerLaunchStateProvider: ProducerLaunchStateProvider, consumerLaunchStateProvider: ConsumerLaunchStateProvider, selectedClusterProvider: SelectedClusterProvider): LanguageService { +export function getLanguageService(producerLaunchStateProvider: ProducerLaunchStateProvider, consumerLaunchStateProvider: ConsumerLaunchStateProvider, selectedClusterProvider: SelectedClusterProvider, topicProvider: TopicProvider): LanguageService { const kafkaFileCodeLenses = new KafkaFileCodeLenses(producerLaunchStateProvider, consumerLaunchStateProvider, selectedClusterProvider); - const kafkaFileCompletion = new KafkaFileCompletion(); + const kafkaFileCompletion = new KafkaFileCompletion(selectedClusterProvider, topicProvider); return { parseKafkaFileDocument: (document: TextDocument) => parseKafkaFile(document), getCodeLenses: kafkaFileCodeLenses.getCodeLenses.bind(kafkaFileCodeLenses), diff --git a/src/kafka-file/languageservice/services/codeLensProvider.ts b/src/kafka-file/languageservice/services/codeLensProvider.ts index 74a2bc6..4f4fcb2 100644 --- a/src/kafka-file/languageservice/services/codeLensProvider.ts +++ b/src/kafka-file/languageservice/services/codeLensProvider.ts @@ -5,6 +5,9 @@ import { LaunchConsumerCommand, ProduceRecordCommand, ProduceRecordCommandHandle import { ProducerLaunchStateProvider, ConsumerLaunchStateProvider, SelectedClusterProvider } from "../kafkaFileLanguageService"; import { Block, BlockType, ConsumerBlock, KafkaFileDocument, ProducerBlock } from "../parser/kafkaFileParser"; +/** + * Kafka file codeLens support. + */ export class KafkaFileCodeLenses { constructor(private producerLaunchStateProvider: ProducerLaunchStateProvider, private consumerLaunchStateProvider: ConsumerLaunchStateProvider, private selectedClusterProvider: SelectedClusterProvider) { diff --git a/src/kafka-file/languageservice/services/completion.ts b/src/kafka-file/languageservice/services/completion.ts index defdcf6..a4ada38 100644 --- a/src/kafka-file/languageservice/services/completion.ts +++ b/src/kafka-file/languageservice/services/completion.ts @@ -1,15 +1,24 @@ import { TextDocument, Position, CompletionList, CompletionItem, SnippetString, MarkdownString, CompletionItemKind, Range } from "vscode"; +import { SelectedClusterProvider, TopicDetail, TopicProvider } from "../kafkaFileLanguageService"; import { consumerProperties, fakerjsAPI, ModelDefinition, producerProperties } from "../model"; import { Block, BlockType, Chunk, ConsumerBlock, KafkaFileDocument, MustacheExpression, NodeKind, ProducerBlock, Property } from "../parser/kafkaFileParser"; +/** + * Kafka file completion support. + */ export class KafkaFileCompletion { - doComplete(document: TextDocument, kafkaFileDocument: KafkaFileDocument, position: Position): CompletionList | undefined { + constructor(private selectedClusterProvider: SelectedClusterProvider, private topicProvider: TopicProvider) { + + } + async doComplete(document: TextDocument, kafkaFileDocument: KafkaFileDocument, position: Position): Promise { + // Get the AST node before the position where complation was triggered const node = kafkaFileDocument.findNodeBefore(position); if (!node) { return; } + // Following comments with use the '|' character to show the position where the complation is trigerred const items: Array = []; switch (node.kind) { case NodeKind.consumerBlock: { @@ -17,7 +26,7 @@ export class KafkaFileCompletion { // CONSUMER // | const lineRange = document.lineAt(position.line).range; - this.collectConsumerPropertyNames(undefined, lineRange, node, items); + await this.collectConsumerPropertyNames(undefined, lineRange, node, items); } break; } @@ -26,7 +35,7 @@ export class KafkaFileCompletion { // PRODUCER // | const lineRange = document.lineAt(position.line).range; - this.collectProducerPropertyNames(undefined, lineRange, node, items); + await this.collectProducerPropertyNames(undefined, lineRange, node, items); } break; } @@ -35,32 +44,17 @@ export class KafkaFileCompletion { const previous = new Position(position.line - 1, 1); const previousNode = kafkaFileDocument.findNodeBefore(previous); if (previousNode && previousNode.kind !== NodeKind.producerValue) { + // PRODUCER + // topic: abcd + // | + + // or + + // PRODUCER + // to|pic const lineRange = document.lineAt(position.line).range; const block = (previousNode.kind === NodeKind.producerBlock) ? previousNode : previousNode.parent; - this.collectProducerPropertyNames(undefined, lineRange, block, items); - } - break; - } - case NodeKind.propertyKey: { - const propertyKey = node; - const block = propertyKey.parent; - const lineRange = document.lineAt(position.line).range; - const propertyName = propertyKey.content; - if (block.type === BlockType.consumer) { - this.collectConsumerPropertyNames(propertyName, lineRange, block, items); - } else { - this.collectProducerPropertyNames(propertyName, lineRange, block, items); - } - break; - } - case NodeKind.propertyValue: { - const propertyValue = node; - const property = propertyValue.parent; - const block = propertyValue.parent; - if (block.type === BlockType.consumer) { - this.collectConsumerPropertyValues(propertyValue, property, block, items); - } else { - this.collectProducerPropertyValues(propertyValue, property, block, items); + await this.collectProducerPropertyNames(undefined, lineRange, block, items); } break; } @@ -71,48 +65,70 @@ export class KafkaFileCompletion { const propertyName = position.line === property.start.line ? property.propertyName : undefined; const lineRange = document.lineAt(position.line).range; if (block.type === BlockType.consumer) { - this.collectConsumerPropertyNames(propertyName, lineRange, block, items); + // CONSUMER + // key|: + + // or + + // CONSUMER + // key| + await this.collectConsumerPropertyNames(propertyName, lineRange, block, items); } else { - this.collectProducerPropertyNames(propertyName, lineRange, block, items); + // PRODUCER + // key|: + await this.collectProducerPropertyNames(propertyName, lineRange, block, items); } } else { const propertyValue = property.value; const expression = propertyValue?.findNodeBefore(position); if (expression && expression.kind === NodeKind.mustacheExpression) { - this.collectFakerJSExpression(expression, items); + // Completion was triggered inside a mustache expression which is inside the property value + + // PRODUCER + // key: abcd-{{|}} + this.collectFakerJSExpressions(expression, items); } else { const block = property.parent; if (block.type === BlockType.consumer) { - this.collectConsumerPropertyValues(propertyValue, property, block, items); + // CONSUMER + // key-format: | + await this.collectConsumerPropertyValues(propertyValue, property, block, items); } else { - this.collectProducerPropertyValues(propertyValue, property, block, items); + // PRODUCER + // key-format: | + await this.collectProducerPropertyValues(propertyValue, property, block, items); } } } break; } case NodeKind.mustacheExpression: { + // Completion was triggered inside a mustache expression which is inside the PRODUCER value + + // PRODUCER + // topic: abcd + // {{|}} const expression = node; - this.collectFakerJSExpression(expression, items); + this.collectFakerJSExpressions(expression, items); break; } } return new CompletionList(items, true); } - collectConsumerPropertyNames(propertyName: string | undefined, lineRange: Range, block: ConsumerBlock, items: Array) { - this.collectPropertyNames(propertyName, lineRange, block, consumerProperties, items); + async collectConsumerPropertyNames(propertyName: string | undefined, lineRange: Range, block: ConsumerBlock, items: Array) { + await this.collectPropertyNames(propertyName, lineRange, block, consumerProperties, items); } - collectProducerPropertyNames(propertyName: string | undefined, lineRange: Range, block: ProducerBlock, items: Array) { - this.collectPropertyNames(propertyName, lineRange, block, producerProperties, items); + async collectProducerPropertyNames(propertyName: string | undefined, lineRange: Range, block: ProducerBlock, items: Array) { + await this.collectPropertyNames(propertyName, lineRange, block, producerProperties, items); } - collectPropertyNames(propertyName: string | undefined, lineRange: Range, block: Block, metadata: ModelDefinition[], items: Array) { + async collectPropertyNames(propertyName: string | undefined, lineRange: Range, block: Block, metadata: ModelDefinition[], items: Array) { const existingProperties = block.properties .filter(property => property.key) .map(property => property.key?.content); - metadata.forEach((definition) => { + for (const definition of metadata) { const currentName = definition.name; if (existingProperties.indexOf(currentName) === -1 || propertyName === currentName) { const item = new CompletionItem(currentName); @@ -121,8 +137,9 @@ export class KafkaFileCompletion { item.documentation = new MarkdownString(definition.description); } const insertText = new SnippetString(`${currentName}: `); - if (definition.enum) { - insertText.appendChoice(definition.enum.map(item => item.name)); + const values = await this.getValues(definition); + if (values) { + insertText.appendChoice(values); } else { insertText.appendPlaceholder(currentName); } @@ -130,30 +147,36 @@ export class KafkaFileCompletion { item.range = lineRange; items.push(item); } - }); + }; } - collectConsumerPropertyValues(propertyValue: Chunk | undefined, property: Property, block: ConsumerBlock, items: Array) { + async collectConsumerPropertyValues(propertyValue: Chunk | undefined, property: Property, block: ConsumerBlock, items: Array) { const propertyName = property.propertyName; switch (propertyName) { case 'topic': - + // CONSUMER + // topic: | + await this.collectTopics(property, items); break; - default: + // CONSUMER + // key-format: | this.collectPropertyValues(propertyValue, property, block, consumerProperties, items); break; } } - collectProducerPropertyValues(propertyValue: Chunk | undefined, property: Property, block: ProducerBlock, items: Array) { + async collectProducerPropertyValues(propertyValue: Chunk | undefined, property: Property, block: ProducerBlock, items: Array) { const propertyName = property.propertyName; switch (propertyName) { case 'topic': - + // PRODUCER + // topic: | + await this.collectTopics(property, items); break; - default: + // PRODUCER + // key-format: | this.collectPropertyValues(propertyValue, property, block, producerProperties, items); break; } @@ -174,7 +197,6 @@ export class KafkaFileCompletion { if (definition.description) { item.documentation = new MarkdownString(definition.description); } - const insertText = new SnippetString(' '); insertText.appendText(value); item.insertText = insertText; @@ -183,7 +205,7 @@ export class KafkaFileCompletion { }); } - collectFakerJSExpression(expression: MustacheExpression, items: CompletionItem[]) { + collectFakerJSExpressions(expression: MustacheExpression, items: CompletionItem[]) { const expressionRange = expression.expressionRange; fakerjsAPI.forEach((definition) => { const value = definition.name; @@ -199,4 +221,56 @@ export class KafkaFileCompletion { items.push(item); }); } + + async collectTopics(property: Property, items: Array) { + const { clusterId } = this.selectedClusterProvider.getSelectedCluster(); + if (!clusterId) { + return; + } + + function createDocumentation(topic: TopicDetail): string { + return `Topic \`${topic.id}\`\n` + + ` * partition count: \`${topic.partitionCount}\`\n` + + ` * replication factor: \`${topic.replicationFactor}\`\n`; + } + const valueRange = property.propertyValueRange; + try { + const topics = await this.topicProvider.getTopics(clusterId); + topics.forEach((topic) => { + const value = topic.id; + const item = new CompletionItem(value); + item.kind = CompletionItemKind.Value; + item.documentation = new MarkdownString(createDocumentation(topic)); + const insertText = new SnippetString(' '); + insertText.appendText(value); + item.insertText = insertText; + item.range = valueRange; + items.push(item); + }); + } + catch (e) { + + } + } + + async getValues(definition: ModelDefinition): Promise { + if (definition.enum) { + return definition.enum.map(item => item.name); + } + if (definition.name === 'topic') { + // TODO : manage list of topics as choices, but how to handle when cluster is not available? + /*const { clusterId } = this.selectedClusterProvider.getSelectedCluster(); + if (clusterId) { + try { + const topics = await this.topicProvider.getTopics(clusterId); + if (topics.length > 0) { + return topics.map(item => item.id); + } + } + catch (e) { + return; + } + }*/ + } + } } diff --git a/src/test/suite/kafka-file/languageservice/codeLens.test.ts b/src/test/suite/kafka-file/languageservice/codeLens.test.ts index 805328f..88538c9 100644 --- a/src/test/suite/kafka-file/languageservice/codeLens.test.ts +++ b/src/test/suite/kafka-file/languageservice/codeLens.test.ts @@ -6,7 +6,7 @@ suite("Kafka File CodeLens Test Suite", () => { test("Empty blocks", async () => { const languageServiceConfig = new LanguageServiceConfig(); - const languageService = getLanguageService(languageServiceConfig, languageServiceConfig, languageServiceConfig); + const languageService = getLanguageService(languageServiceConfig, languageServiceConfig, languageServiceConfig, languageServiceConfig); await assertCodeLens('', [], languageService); await assertCodeLens(' ', [], languageService); @@ -21,7 +21,7 @@ suite("Kafka File PRODUCER CodeLens Test Suite", () => { test("PRODUCER without cluster selection", async () => { const languageServiceConfig = new LanguageServiceConfig(); - const languageService = getLanguageService(languageServiceConfig, languageServiceConfig, languageServiceConfig); + const languageService = getLanguageService(languageServiceConfig, languageServiceConfig, languageServiceConfig, languageServiceConfig); await assertCodeLens('PRODUCER', [ codeLens(position(0, 0), position(0, 0), { @@ -45,27 +45,27 @@ suite("Kafka File PRODUCER CodeLens Test Suite", () => { }) ], languageService); - await assertCodeLens( - 'PRODUCER\n' + - '### XXXXXXXXXXXXXXXXXXXXXXXX\n' + - 'PRODUCER', - [ - codeLens(position(0, 0), position(0, 0), { - command: 'vscode-kafka.explorer.selectcluster', - title: 'Select a cluster' - }), - codeLens(position(2, 0), position(2, 0), { - command: 'vscode-kafka.explorer.selectcluster', - title: 'Select a cluster' - }) - ], languageService); + await assertCodeLens( + 'PRODUCER\n' + + '### XXXXXXXXXXXXXXXXXXXXXXXX\n' + + 'PRODUCER', + [ + codeLens(position(0, 0), position(0, 0), { + command: 'vscode-kafka.explorer.selectcluster', + title: 'Select a cluster' + }), + codeLens(position(2, 0), position(2, 0), { + command: 'vscode-kafka.explorer.selectcluster', + title: 'Select a cluster' + }) + ], languageService); }); test("PRODUCER with cluster selection", async () => { const languageServiceConfig = new LanguageServiceConfig(); languageServiceConfig.setSelectedCluster({ clusterId: 'cluster1', clusterName: 'CLUSTER_1' }); - const languageService = getLanguageService(languageServiceConfig, languageServiceConfig, languageServiceConfig); + const languageService = getLanguageService(languageServiceConfig, languageServiceConfig, languageServiceConfig, languageServiceConfig); await assertCodeLens('PRODUCER', [ codeLens(position(0, 0), position(0, 0), { @@ -109,8 +109,8 @@ suite("Kafka File PRODUCER CodeLens Test Suite", () => { 'key: a-key\n' + 'topic: abcd\n' + 'key-format: long\n' + - 'value-format: string\n' + - 'ABCD\n' + + 'value-format: string\n' + + 'ABCD\n' + 'EFGH', [ codeLens(position(0, 0), position(0, 0), { command: 'vscode-kafka.producer.produce', @@ -158,7 +158,7 @@ suite("Kafka File CONSUMER CodeLens Test Suite", () => { test("CONSUMER without cluster selection", async () => { const languageServiceConfig = new LanguageServiceConfig(); - const languageService = getLanguageService(languageServiceConfig, languageServiceConfig, languageServiceConfig); + const languageService = getLanguageService(languageServiceConfig, languageServiceConfig, languageServiceConfig, languageServiceConfig); await assertCodeLens('CONSUMER group-1', [ codeLens(position(0, 0), position(0, 0), { @@ -187,7 +187,7 @@ suite("Kafka File CONSUMER CodeLens Test Suite", () => { const languageServiceConfig = new LanguageServiceConfig(); languageServiceConfig.setSelectedCluster({ clusterId: 'cluster1', clusterName: 'CLUSTER_1' }); languageServiceConfig.setConsumerLaunchState('cluster1', 'group-1', ConsumerLaunchState.started); - const languageService = getLanguageService(languageServiceConfig, languageServiceConfig, languageServiceConfig); + const languageService = getLanguageService(languageServiceConfig, languageServiceConfig, languageServiceConfig, languageServiceConfig); await assertCodeLens('CONSUMER group-1', [ codeLens(position(0, 0), position(0, 0), { diff --git a/src/test/suite/kafka-file/languageservice/completionTopic.test.ts b/src/test/suite/kafka-file/languageservice/completionTopic.test.ts new file mode 100644 index 0000000..67b623a --- /dev/null +++ b/src/test/suite/kafka-file/languageservice/completionTopic.test.ts @@ -0,0 +1,64 @@ +import { CompletionItemKind } from "vscode"; +import { getLanguageService } from "../../../../kafka-file/languageservice/kafkaFileLanguageService"; +import { LanguageServiceConfig, position, range, testCompletion } from "./kafkaAssert"; + +const languageServiceConfig = new LanguageServiceConfig(); +languageServiceConfig.setSelectedCluster({ clusterId: 'cluster1', clusterName: 'CLUSTER_1' }); +languageServiceConfig.setTopics('cluster1', [{id : 'abcd', partitionCount : 1 , replicationFactor : 1}]); +const languageService = getLanguageService(languageServiceConfig, languageServiceConfig, languageServiceConfig, languageServiceConfig); + +suite("Kafka File Completion with Topics Test Suite", () => { + + test("Empty completion", async () => { + await testCompletion('', { + items: [] + }, false, languageService); + + await testCompletion('ab|cd', { + items: [] + }, false, languageService); + + }); + +}); + +suite("Kafka File PRODUCER Topic Completion Test Suite", () => { + + test("PRODUCER Topic Completion", async () => { + + + await testCompletion( + 'PRODUCER a\n' + + 'topic: |' + , { + items: [ + { + label: 'abcd', kind: CompletionItemKind.Value, + insertText: ' abcd', + range: range(position(1, 6), position(1, 7)) + } + ] + }, false, languageService); + }); + +}); + +suite("Kafka File CONSUMER Topic Completion Test Suite", () => { + + test("CONSUMER Topic Completion", async () => { + + await testCompletion( + 'CONSUMER a\n' + + 'topic: |' + , { + items: [ + { + label: 'abcd', kind: CompletionItemKind.Value, + insertText: ' abcd', + range: range(position(1, 6), position(1, 7)) + } + ] + }, false, languageService); + }); + +}); diff --git a/src/test/suite/kafka-file/languageservice/kafkaAssert.ts b/src/test/suite/kafka-file/languageservice/kafkaAssert.ts index 0555e66..9e2e076 100644 --- a/src/test/suite/kafka-file/languageservice/kafkaAssert.ts +++ b/src/test/suite/kafka-file/languageservice/kafkaAssert.ts @@ -2,10 +2,10 @@ import * as assert from "assert"; import { CodeLens, Position, Range, Command, Uri, workspace, CompletionList, SnippetString } from "vscode"; import { ConsumerLaunchState } from "../../../../client"; import { ProducerLaunchState } from "../../../../client/producer"; -import { ConsumerLaunchStateProvider, getLanguageService, LanguageService, ProducerLaunchStateProvider, SelectedClusterProvider } from "../../../../kafka-file/languageservice/kafkaFileLanguageService"; +import { ConsumerLaunchStateProvider, getLanguageService, LanguageService, ProducerLaunchStateProvider, SelectedClusterProvider, TopicDetail, TopicProvider } from "../../../../kafka-file/languageservice/kafkaFileLanguageService"; import { BlockType, ProducerBlock } from "../../../../kafka-file/languageservice/parser/kafkaFileParser"; -export class LanguageServiceConfig implements ProducerLaunchStateProvider, ConsumerLaunchStateProvider, SelectedClusterProvider { +export class LanguageServiceConfig implements ProducerLaunchStateProvider, ConsumerLaunchStateProvider, SelectedClusterProvider, TopicProvider { private producerLaunchStates = new Map(); @@ -13,6 +13,7 @@ export class LanguageServiceConfig implements ProducerLaunchStateProvider, Consu private selectedCluster: { clusterId?: string, clusterName?: string } | undefined; + private topicsCache = new Map(); getProducerLaunchState(uri: Uri): ProducerLaunchState { const key = uri.toString(); const state = this.producerLaunchStates.get(key); @@ -49,10 +50,16 @@ export class LanguageServiceConfig implements ProducerLaunchStateProvider, Consu this.selectedCluster = selectedCluster; } + public setTopics(clusterId: string, topics: TopicDetail[]) { + this.topicsCache.set(clusterId, topics); + } + async getTopics(clusterId: string): Promise { + return this.topicsCache.get(clusterId) || []; + } } const languageServiceConfig = new LanguageServiceConfig(); -const languageService = getLanguageService(languageServiceConfig, languageServiceConfig, languageServiceConfig); +const languageService = getLanguageService(languageServiceConfig, languageServiceConfig, languageServiceConfig, languageServiceConfig); export function getSimpleLanguageService() { return languageService; @@ -85,15 +92,14 @@ export async function assertCodeLens(content: string, expected: Array, } // Completion assert - -export async function testCompletion(value: string, expected: CompletionList, partial = false) { +export async function testCompletion(value: string, expected: CompletionList, partial = false, ls = languageService) { const offset = value.indexOf('|'); value = value.substr(0, offset) + value.substr(offset + 1); let document = await getDocument(value); const position = document.positionAt(offset); - let ast = languageService.parseKafkaFileDocument(document); - const list = languageService.doComplete(document, ast, position); + let ast = ls.parseKafkaFileDocument(document); + const list = await ls.doComplete(document, ast, position); const items = list?.items; // no duplicate labels @@ -112,10 +118,10 @@ export async function testCompletion(value: string, expected: CompletionList, pa } expected.items.forEach((expectedItem, i) => { const actualItem = items[i]; - assert.deepStrictEqual(actualItem.label, expectedItem.label); - assert.deepStrictEqual(actualItem.kind, expectedItem.kind); - assert.deepStrictEqual((actualItem.insertText)?.value, expectedItem.insertText); - assert.deepStrictEqual(actualItem.range, expectedItem.range); + assert.deepStrictEqual(actualItem?.label, expectedItem.label); + assert.deepStrictEqual(actualItem?.kind, expectedItem.kind); + assert.deepStrictEqual((actualItem?.insertText)?.value, expectedItem.insertText); + assert.deepStrictEqual(actualItem?.range, expectedItem.range); }); } }