diff --git a/packages/editor/src/browser/editor-preferences.ts b/packages/editor/src/browser/editor-preferences.ts index 0a0652e0fab18..6c6a3c82295fe 100644 --- a/packages/editor/src/browser/editor-preferences.ts +++ b/packages/editor/src/browser/editor-preferences.ts @@ -109,7 +109,7 @@ const codeEditorPreferenceProperties = { }, 'editor.semanticHighlighting.enabled': { 'type': 'boolean', - 'default': false, + 'default': true, 'description': 'Controls whether the semanticHighlighting is shown for the languages that support it.' }, 'editor.stablePeek': { diff --git a/packages/monaco/src/browser/monaco-editor-provider.ts b/packages/monaco/src/browser/monaco-editor-provider.ts index 36ccafaf09d6e..52418fa18c151 100644 --- a/packages/monaco/src/browser/monaco-editor-provider.ts +++ b/packages/monaco/src/browser/monaco-editor-provider.ts @@ -95,6 +95,32 @@ export class MonacoEditorProvider { const staticServices = monaco.services.StaticServices; const init = staticServices.init.bind(monaco.services.StaticServices); + const themeService = staticServices.standaloneThemeService.get(); + const originalGetTheme: (typeof themeService)['getTheme'] = themeService.getTheme.bind(themeService); + const patchedGetTokenStyleMetadataFlag = '__patched_getTokenStyleMetadata'; + // based on https://github.com/microsoft/vscode/commit/4731a227e377da8cb14ed5697dd1ba8faea40538 + // TODO remove after migrating to monaco 0.21 + themeService.getTheme = () => { + const theme = originalGetTheme(); + if (!(patchedGetTokenStyleMetadataFlag in theme)) { + Object.defineProperty(theme, patchedGetTokenStyleMetadataFlag, { enumerable: false, configurable: false, writable: false, value: true }); + theme.getTokenStyleMetadata = (type, modifiers) => { + // use theme rules match + const style = theme.tokenTheme._match([type].concat(modifiers).join('.')); + const metadata = style.metadata; + const foreground = monaco.modes.TokenMetadata.getForeground(metadata); + const fontStyle = monaco.modes.TokenMetadata.getFontStyle(metadata); + return { + foreground: foreground, + italic: Boolean(fontStyle & monaco.modes.FontStyle.Italic), + bold: Boolean(fontStyle & monaco.modes.FontStyle.Bold), + underline: Boolean(fontStyle & monaco.modes.FontStyle.Underline) + }; + }; + } + return theme; + }; + monaco.services.StaticServices.init = o => { const result = init(o); result[0].set(monaco.services.ICodeEditorService, codeEditorService); diff --git a/packages/monaco/src/typings/monaco/index.d.ts b/packages/monaco/src/typings/monaco/index.d.ts index d529d11af3be5..19468e051c682 100644 --- a/packages/monaco/src/typings/monaco/index.d.ts +++ b/packages/monaco/src/typings/monaco/index.d.ts @@ -762,6 +762,14 @@ declare module monaco.services { get(overrides?: monaco.editor.IEditorOverrideServices): T; } + // https://github.com/microsoft/vscode/blob/0eb3a02ca2bcfab5faa3dc6e52d7c079efafcab0/src/vs/platform/theme/common/themeService.ts#L78 + export interface ITokenStyle { + readonly foreground?: number; + readonly bold?: boolean; + readonly underline?: boolean; + readonly italic?: boolean; + } + // https://github.com/theia-ide/vscode/blob/standalone/0.20.x/src/vs/editor/standalone/common/standaloneThemeService.ts#L28 export interface IStandaloneThemeService extends monaco.theme.IThemeService { // https://github.com/theia-ide/vscode/blob/standalone/0.20.x/src/vs/editor/standalone/browser/standaloneThemeServiceImpl.ts#L178 @@ -779,11 +787,14 @@ declare module monaco.services { // https://github.com/theia-ide/vscode/blob/standalone/0.20.x/src/vs/platform/theme/common/themeService.ts#L98 getColor(color: string, useDefault?: boolean): monaco.color.Color | undefined; + + getTokenStyleMetadata(type: string, modifiers: string[], modelLanguage: string): ITokenStyle | undefined; } // https://github.com/theia-ide/vscode/blob/standalone/0.20.x/src/vs/editor/common/modes/supports/tokenization.ts#L188 export interface TokenTheme { match(languageId: LanguageId, scope: string): number; + _match(token: string): any; getColorMap(): monaco.color.Color[]; } @@ -1301,6 +1312,15 @@ declare module monaco.modes { } export const TokenizationRegistry: TokenizationRegistry; + // https://github.com/microsoft/vscode/blob/0eb3a02ca2bcfab5faa3dc6e52d7c079efafcab0/src/vs/editor/common/modes.ts#L66-L76 + export const enum FontStyle { + NotSet = -1, + None = 0, + Italic = 1, + Bold = 2, + Underline = 4 + } + // https://github.com/theia-ide/vscode/blob/standalone/0.20.x/src/vs/editor/common/modes.ts#L148 export class TokenMetadata { diff --git a/packages/plugin-ext/src/common/plugin-api-rpc.ts b/packages/plugin-ext/src/common/plugin-api-rpc.ts index 73dc859b8b6f9..e258d5eddf12a 100644 --- a/packages/plugin-ext/src/common/plugin-api-rpc.ts +++ b/packages/plugin-ext/src/common/plugin-api-rpc.ts @@ -1279,6 +1279,9 @@ export interface LanguagesExt { $provideColorPresentations(handle: number, resource: UriComponents, colorInfo: RawColorInfo, token: CancellationToken): PromiseLike; $provideRenameEdits(handle: number, resource: UriComponents, position: Position, newName: string, token: CancellationToken): PromiseLike; $resolveRenameLocation(handle: number, resource: UriComponents, position: Position, token: CancellationToken): PromiseLike; + $provideDocumentSemanticTokens(handle: number, resource: UriComponents, previousResultId: number, token: CancellationToken): Promise; + $releaseDocumentSemanticTokens(handle: number, semanticColoringResultId: number): void; + $provideDocumentRangeSemanticTokens(handle: number, resource: UriComponents, range: Range, token: CancellationToken): Promise; $provideRootDefinition(handle: number, resource: UriComponents, location: Position, token: CancellationToken): Promise; $provideCallers(handle: number, definition: CallHierarchyDefinition, token: CancellationToken): Promise; } @@ -1323,6 +1326,10 @@ export interface LanguagesMain { $registerSelectionRangeProvider(handle: number, pluginInfo: PluginInfo, selector: SerializedDocumentFilter[]): void; $registerDocumentColorProvider(handle: number, pluginInfo: PluginInfo, selector: SerializedDocumentFilter[]): void; $registerRenameProvider(handle: number, pluginInfo: PluginInfo, selector: SerializedDocumentFilter[], supportsResolveInitialValues: boolean): void; + $registerDocumentSemanticTokensProvider(handle: number, pluginInfo: PluginInfo, selector: SerializedDocumentFilter[], + legend: theia.SemanticTokensLegend, eventHandle: number | undefined): void; + $emitDocumentSemanticTokensEvent(eventHandle: number): void; + $registerDocumentRangeSemanticTokensProvider(handle: number, pluginInfo: PluginInfo, selector: SerializedDocumentFilter[], legend: theia.SemanticTokensLegend): void; $registerCallHierarchyProvider(handle: number, selector: SerializedDocumentFilter[]): void; } @@ -1549,7 +1556,7 @@ export interface AuthenticationMain { $getProviderIds(): Promise; $updateSessions(providerId: string, event: AuthenticationSessionsChangeEvent): void; $getSession(providerId: string, scopes: string[], extensionId: string, extensionName: string, - options: { createIfNone?: boolean, clearSessionPreference?: boolean }): Promise; + options: { createIfNone?: boolean, clearSessionPreference?: boolean }): Promise; $logout(providerId: string, sessionId: string): Promise; } diff --git a/packages/plugin-ext/src/common/semantic-tokens-dto.ts b/packages/plugin-ext/src/common/semantic-tokens-dto.ts new file mode 100644 index 0000000000000..1bdc780a31395 --- /dev/null +++ b/packages/plugin-ext/src/common/semantic-tokens-dto.ts @@ -0,0 +1,182 @@ +/******************************************************************************** + * Copyright (C) 2020 TypeFox and others. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v. 2.0 which is available at + * http://www.eclipse.org/legal/epl-2.0. + * + * This Source Code may also be made available under the following Secondary + * Licenses when the conditions for such availability set forth in the Eclipse + * Public License v. 2.0 are satisfied: GNU General Public License, version 2 + * with the GNU Classpath Exception which is available at + * https://www.gnu.org/software/classpath/license.html. + * + * SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 + ********************************************************************************/ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +// copied and modified from https://github.com/microsoft/vscode/blob/0eb3a02ca2bcfab5faa3dc6e52d7c079efafcab0/src/vs/workbench/api/common/shared/semanticTokensDto.ts + +import { BinaryBuffer } from '@theia/core/lib/common/buffer'; + +let _isLittleEndian = true; +let _isLittleEndianComputed = false; +function isLittleEndian(): boolean { + if (!_isLittleEndianComputed) { + _isLittleEndianComputed = true; + const test = new Uint8Array(2); + test[0] = 1; + test[1] = 2; + const view = new Uint16Array(test.buffer); + _isLittleEndian = (view[0] === (2 << 8) + 1); + } + return _isLittleEndian; +} + +export interface IFullSemanticTokensDto { + id: number; + type: 'full'; + data: Uint32Array; +} + +export interface IDeltaSemanticTokensDto { + id: number; + type: 'delta'; + deltas: { start: number; deleteCount: number; data?: Uint32Array; }[]; +} + +export type ISemanticTokensDto = IFullSemanticTokensDto | IDeltaSemanticTokensDto; + +const enum EncodedSemanticTokensType { + Full = 1, + Delta = 2 +} + +function reverseEndianness(arr: Uint8Array): void { + for (let i = 0, len = arr.length; i < len; i += 4) { + // flip bytes 0<->3 and 1<->2 + const b0 = arr[i + 0]; + const b1 = arr[i + 1]; + const b2 = arr[i + 2]; + const b3 = arr[i + 3]; + arr[i + 0] = b3; + arr[i + 1] = b2; + arr[i + 2] = b1; + arr[i + 3] = b0; + } +} + +function toLittleEndianBuffer(arr: Uint32Array): BinaryBuffer { + const uint8Arr = new Uint8Array(arr.buffer, arr.byteOffset, arr.length * 4); + if (!isLittleEndian()) { + // the byte order must be changed + reverseEndianness(uint8Arr); + } + return BinaryBuffer.wrap(uint8Arr); +} + +function fromLittleEndianBuffer(buff: BinaryBuffer): Uint32Array { + const uint8Arr = buff.buffer; + if (!isLittleEndian()) { + // the byte order must be changed + reverseEndianness(uint8Arr); + } + if (uint8Arr.byteOffset % 4 === 0) { + return new Uint32Array(uint8Arr.buffer, uint8Arr.byteOffset, uint8Arr.length / 4); + } else { + // unaligned memory access doesn't work on all platforms + const data = new Uint8Array(uint8Arr.byteLength); + data.set(uint8Arr); + return new Uint32Array(data.buffer, data.byteOffset, data.length / 4); + } +} + +export function encodeSemanticTokensDto(semanticTokens: ISemanticTokensDto): BinaryBuffer { + const dest = new Uint32Array(encodeSemanticTokensDtoSize(semanticTokens)); + let offset = 0; + dest[offset++] = semanticTokens.id; + if (semanticTokens.type === 'full') { + dest[offset++] = EncodedSemanticTokensType.Full; + dest[offset++] = semanticTokens.data.length; + dest.set(semanticTokens.data, offset); offset += semanticTokens.data.length; + } else { + dest[offset++] = EncodedSemanticTokensType.Delta; + dest[offset++] = semanticTokens.deltas.length; + for (const delta of semanticTokens.deltas) { + dest[offset++] = delta.start; + dest[offset++] = delta.deleteCount; + if (delta.data) { + dest[offset++] = delta.data.length; + dest.set(delta.data, offset); offset += delta.data.length; + } else { + dest[offset++] = 0; + } + } + } + return toLittleEndianBuffer(dest); +} + +function encodeSemanticTokensDtoSize(semanticTokens: ISemanticTokensDto): number { + let result = 0; + result += ( + + 1 // id + + 1 // type + ); + if (semanticTokens.type === 'full') { + result += ( + + 1 // data length + + semanticTokens.data.length + ); + } else { + result += ( + + 1 // delta count + ); + result += ( + + 1 // start + + 1 // deleteCount + + 1 // data length + ) * semanticTokens.deltas.length; + for (const delta of semanticTokens.deltas) { + if (delta.data) { + result += delta.data.length; + } + } + } + return result; +} + +export function decodeSemanticTokensDto(_buff: BinaryBuffer): ISemanticTokensDto { + const src = fromLittleEndianBuffer(_buff); + let offset = 0; + const id = src[offset++]; + const type: EncodedSemanticTokensType = src[offset++]; + if (type === EncodedSemanticTokensType.Full) { + const length = src[offset++]; + const data = src.subarray(offset, offset + length); offset += length; + return { + id: id, + type: 'full', + data: data + }; + } + const deltaCount = src[offset++]; + const deltas: { start: number; deleteCount: number; data?: Uint32Array; }[] = []; + for (let i = 0; i < deltaCount; i++) { + const start = src[offset++]; + const deleteCount = src[offset++]; + const length = src[offset++]; + let data: Uint32Array | undefined; + if (length > 0) { + data = src.subarray(offset, offset + length); offset += length; + } + deltas[i] = { start, deleteCount, data }; + } + return { + id: id, + type: 'delta', + deltas: deltas + }; +} diff --git a/packages/plugin-ext/src/main/browser/languages-main.ts b/packages/plugin-ext/src/main/browser/languages-main.ts index b5de631172c07..3b3f2c95c8b64 100644 --- a/packages/plugin-ext/src/main/browser/languages-main.ts +++ b/packages/plugin-ext/src/main/browser/languages-main.ts @@ -43,7 +43,7 @@ import { RPCProtocol } from '../../common/rpc-protocol'; import { MonacoLanguages, WorkspaceSymbolProvider } from '@theia/monaco/lib/browser/monaco-languages'; import CoreURI from '@theia/core/lib/common/uri'; import { Disposable, DisposableCollection } from '@theia/core/lib/common/disposable'; -import { Emitter } from '@theia/core/lib/common/event'; +import { Emitter, Event } from '@theia/core/lib/common/event'; import { ProblemManager } from '@theia/markers/lib/browser'; import * as vst from 'vscode-languageserver-types'; import * as theia from '@theia/plugin'; @@ -56,6 +56,7 @@ import { Position, DocumentUri } from 'vscode-languageserver-types'; import { ObjectIdentifier } from '../../common/object-identifier'; import { mixin } from '../../common/types'; import { relative } from '../../common/paths-util'; +import { decodeSemanticTokensDto } from '../../common/semantic-tokens-dto'; @injectable() export class LanguagesMainImpl implements LanguagesMain, Disposable { @@ -797,6 +798,91 @@ export class LanguagesMainImpl implements LanguagesMain, Disposable { return this.proxy.$resolveRenameLocation(handle, model.uri, position, token); } + // --- semantic tokens + + $registerDocumentSemanticTokensProvider(handle: number, pluginInfo: PluginInfo, selector: SerializedDocumentFilter[], legend: theia.SemanticTokensLegend, + eventHandle: number | undefined): void { + const languageSelector = this.toLanguageSelector(selector); + let event: Event | undefined = undefined; + if (typeof eventHandle === 'number') { + const emitter = new Emitter(); + this.register(eventHandle, emitter); + event = emitter.event; + } + const provider = this.createDocumentSemanticTokensProvider(handle, legend, event); + this.register(handle, monaco.languages.registerDocumentSemanticTokensProvider(languageSelector, provider)); + } + + protected createDocumentSemanticTokensProvider(handle: number, legend: theia.SemanticTokensLegend, event?: Event): monaco.languages.DocumentSemanticTokensProvider { + return { + releaseDocumentSemanticTokens: resultId => { + if (resultId) { + this.proxy.$releaseDocumentSemanticTokens(handle, parseInt(resultId, 10)); + } + }, + getLegend: () => legend, + provideDocumentSemanticTokens: async (model, lastResultId, token) => { + const nLastResultId = lastResultId ? parseInt(lastResultId, 10) : 0; + const encodedDto = await this.proxy.$provideDocumentSemanticTokens(handle, model.uri, nLastResultId, token); + if (!encodedDto) { + return null; + } + if (token.isCancellationRequested) { + return null; + } + const dto = decodeSemanticTokensDto(encodedDto); + if (dto.type === 'full') { + return { + resultId: String(dto.id), + data: dto.data + }; + } + return { + resultId: String(dto.id), + edits: dto.deltas + }; + } + }; + } + + $emitDocumentSemanticTokensEvent(eventHandle: number): void { + const obj = this.services.get(eventHandle); + if (obj instanceof Emitter) { + obj.fire(undefined); + } + } + + $registerDocumentRangeSemanticTokensProvider(handle: number, pluginInfo: PluginInfo, selector: SerializedDocumentFilter[], legend: theia.SemanticTokensLegend): void { + const languageSelector = this.toLanguageSelector(selector); + const provider = this.createDocumentRangeSemanticTokensProvider(handle, legend); + this.register(handle, monaco.languages.registerDocumentRangeSemanticTokensProvider(languageSelector, provider)); + } + + protected createDocumentRangeSemanticTokensProvider(handle: number, legend: theia.SemanticTokensLegend): monaco.languages.DocumentRangeSemanticTokensProvider { + return { + getLegend: () => legend, + provideDocumentRangeSemanticTokens: async (model, range, token) => { + const encodedDto = await this.proxy.$provideDocumentRangeSemanticTokens(handle, model.uri, range, token); + if (!encodedDto) { + return null; + } + if (token.isCancellationRequested) { + return null; + } + const dto = decodeSemanticTokensDto(encodedDto); + if (dto.type === 'full') { + return { + resultId: String(dto.id), + data: dto.data + }; + } + throw new Error('Unexpected'); + } + }; + } + + // --- suggest + protected toLanguageSelector(filters: SerializedDocumentFilter[]): monaco.modes.LanguageSelector & LanguageSelector { return filters.map(filter => { let pattern: string | (monaco.modes.IRelativePattern & RelativePattern) | undefined; diff --git a/packages/plugin-ext/src/plugin/documents.ts b/packages/plugin-ext/src/plugin/documents.ts index 4989afccd06f2..db07f7dda8c88 100644 --- a/packages/plugin-ext/src/plugin/documents.ts +++ b/packages/plugin-ext/src/plugin/documents.ts @@ -199,6 +199,14 @@ export class DocumentsExtImpl implements DocumentsExt { return undefined; } + public getDocument(resource: theia.Uri): theia.TextDocument { + const data = this.getDocumentData(resource); + if (!data?.document) { + throw new Error(`Unable to retrieve document from URI '${resource}'`); + } + return data.document; + } + /** * Retrieve document and open it in the editor if need. * diff --git a/packages/plugin-ext/src/plugin/languages.ts b/packages/plugin-ext/src/plugin/languages.ts index 807a81827547c..eba9329625350 100644 --- a/packages/plugin-ext/src/plugin/languages.ts +++ b/packages/plugin-ext/src/plugin/languages.ts @@ -88,6 +88,8 @@ import { Event } from '@theia/core/lib/common/event'; import { CommandRegistryImpl } from './command-registry'; import { DeclarationAdapter } from './languages/declaration'; import { CallHierarchyAdapter } from './languages/call-hierarchy'; +import { BinaryBuffer } from '@theia/core/lib/common/buffer'; +import { DocumentSemanticTokensAdapter, DocumentRangeSemanticTokensAdapter } from './languages/semantic-highlighting'; /* eslint-disable @typescript-eslint/indent */ type Adapter = CompletionAdapter | @@ -111,7 +113,9 @@ type Adapter = CompletionAdapter | SelectionRangeProviderAdapter | ColorProviderAdapter | RenameAdapter | - CallHierarchyAdapter; + CallHierarchyAdapter | + DocumentRangeSemanticTokensAdapter | + DocumentSemanticTokensAdapter; /* eslint-enable @typescript-eslint/indent */ export class LanguagesExtImpl implements LanguagesExt { @@ -588,6 +592,46 @@ export class LanguagesExtImpl implements LanguagesExt { return this.withAdapter(handle, CallHierarchyAdapter, adapter => adapter.provideCallers(definition, token), undefined); } // ### Call Hierarchy Provider end + + // #region semantic coloring + + registerDocumentSemanticTokensProvider(selector: theia.DocumentSelector, provider: theia.DocumentSemanticTokensProvider, legend: theia.SemanticTokensLegend, + pluginInfo: PluginInfo): theia.Disposable { + const eventHandle = (typeof provider.onDidChangeSemanticTokens === 'function' ? this.nextCallId() : undefined); + + const handle = this.addNewAdapter(new DocumentSemanticTokensAdapter(this.documents, provider)); + this.proxy.$registerDocumentSemanticTokensProvider(handle, pluginInfo, this.transformDocumentSelector(selector), legend, eventHandle); + let result = this.createDisposable(handle); + + if (eventHandle) { + // eslint-disable-next-line no-unsanitized/method + const subscription = provider.onDidChangeSemanticTokens!(_ => this.proxy.$emitDocumentSemanticTokensEvent(eventHandle)); + result = Disposable.from(result, subscription); + } + + return result; + } + + $provideDocumentSemanticTokens(handle: number, resource: UriComponents, previousResultId: number, token: theia.CancellationToken): Promise { + return this.withAdapter(handle, DocumentSemanticTokensAdapter, adapter => adapter.provideDocumentSemanticTokens(URI.revive(resource), previousResultId, token), null); + } + + $releaseDocumentSemanticTokens(handle: number, semanticColoringResultId: number): void { + this.withAdapter(handle, DocumentSemanticTokensAdapter, adapter => adapter.releaseDocumentSemanticColoring(semanticColoringResultId), undefined); + } + + registerDocumentRangeSemanticTokensProvider(selector: theia.DocumentSelector, provider: theia.DocumentRangeSemanticTokensProvider, + legend: theia.SemanticTokensLegend, pluginInfo: PluginInfo): theia.Disposable { + const handle = this.addNewAdapter(new DocumentRangeSemanticTokensAdapter(this.documents, provider)); + this.proxy.$registerDocumentRangeSemanticTokensProvider(handle, pluginInfo, this.transformDocumentSelector(selector), legend); + return this.createDisposable(handle); + } + + $provideDocumentRangeSemanticTokens(handle: number, resource: UriComponents, range: Range, token: theia.CancellationToken): Promise { + return this.withAdapter(handle, DocumentRangeSemanticTokensAdapter, adapter => adapter.provideDocumentRangeSemanticTokens(URI.revive(resource), range, token), null); + } + + // #endregion } function serializeEnterRules(rules?: theia.OnEnterRule[]): SerializedOnEnterRule[] | undefined { diff --git a/packages/plugin-ext/src/plugin/languages/semantic-highlighting.ts b/packages/plugin-ext/src/plugin/languages/semantic-highlighting.ts new file mode 100644 index 0000000000000..053d53533346e --- /dev/null +++ b/packages/plugin-ext/src/plugin/languages/semantic-highlighting.ts @@ -0,0 +1,211 @@ +/******************************************************************************** + * Copyright (C) 2020 TypeFox and others. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v. 2.0 which is available at + * http://www.eclipse.org/legal/epl-2.0. + * + * This Source Code may also be made available under the following Secondary + * Licenses when the conditions for such availability set forth in the Eclipse + * Public License v. 2.0 are satisfied: GNU General Public License, version 2 + * with the GNU Classpath Exception which is available at + * https://www.gnu.org/software/classpath/license.html. + * + * SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 + ********************************************************************************/ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +// copied and modified from https://github.com/microsoft/vscode/blob/0eb3a02ca2bcfab5faa3dc6e52d7c079efafcab0/src/vs/workbench/api/common/extHostLanguageFeatures.ts#L692-L869 + +/* eslint-disable @typescript-eslint/consistent-type-definitions */ + +import { BinaryBuffer } from '@theia/core/src/common/buffer'; +import * as theia from '@theia/plugin'; +import { URI } from 'vscode-uri'; +import { SemanticTokens, SemanticTokensEdit, SemanticTokensEdits } from '../types-impl'; +import { DocumentsExtImpl } from '../documents'; +import { toRange } from '../type-converters'; +import { encodeSemanticTokensDto } from '../../common/semantic-tokens-dto'; +import { Range } from '../../common/plugin-api-rpc-model'; + +class SemanticTokensPreviousResult { + constructor( + public readonly resultId: string | undefined, + public readonly tokens?: Uint32Array, + ) { } +} + +type RelaxedSemanticTokens = { readonly resultId?: string; readonly data: number[]; }; +type RelaxedSemanticTokensEdit = { readonly start: number; readonly deleteCount: number; readonly data?: number[]; }; +type RelaxedSemanticTokensEdits = { readonly resultId?: string; readonly edits: RelaxedSemanticTokensEdit[]; }; + +type ProvidedSemanticTokens = theia.SemanticTokens | RelaxedSemanticTokens; +type ProvidedSemanticTokensEdits = theia.SemanticTokensEdits | RelaxedSemanticTokensEdits; + +export class DocumentSemanticTokensAdapter { + + private readonly _previousResults: Map; + private _nextResultId = 1; + + constructor( + private readonly _documents: DocumentsExtImpl, + private readonly _provider: theia.DocumentSemanticTokensProvider, + ) { + this._previousResults = new Map(); + } + + async provideDocumentSemanticTokens(resource: URI, previousResultId: number, token: theia.CancellationToken): Promise { + const doc = this._documents.getDocument(resource); + const previousResult = (previousResultId !== 0 ? this._previousResults.get(previousResultId) : null); + let value: ProvidedSemanticTokens | ProvidedSemanticTokensEdits | null | undefined; + if (previousResult && typeof previousResult.resultId === 'string' && typeof this._provider.provideDocumentSemanticTokensEdits === 'function') { + value = await this._provider.provideDocumentSemanticTokensEdits(doc, previousResult.resultId, token); + } else { + value = await this._provider.provideDocumentSemanticTokens(doc, token); + } + if (previousResult) { + this._previousResults.delete(previousResultId); + } + if (!value) { + return null; + } + value = DocumentSemanticTokensAdapter._fixProvidedSemanticTokens(value); + return this._send(DocumentSemanticTokensAdapter._convertToEdits(previousResult, value), value); + } + + async releaseDocumentSemanticColoring(semanticColoringResultId: number): Promise { + this._previousResults.delete(semanticColoringResultId); + } + + private static _fixProvidedSemanticTokens(v: ProvidedSemanticTokens | ProvidedSemanticTokensEdits): theia.SemanticTokens | theia.SemanticTokensEdits { + if (DocumentSemanticTokensAdapter._isSemanticTokens(v)) { + if (DocumentSemanticTokensAdapter._isCorrectSemanticTokens(v)) { + return v; + } + return new SemanticTokens(new Uint32Array(v.data), v.resultId); + } else if (DocumentSemanticTokensAdapter._isSemanticTokensEdits(v)) { + if (DocumentSemanticTokensAdapter._isCorrectSemanticTokensEdits(v)) { + return v; + } + return new SemanticTokensEdits(v.edits.map(edit => new SemanticTokensEdit(edit.start, edit.deleteCount, edit.data ? + new Uint32Array(edit.data) : edit.data)), v.resultId); + } + return v; + } + + private static _isSemanticTokens(v: ProvidedSemanticTokens | ProvidedSemanticTokensEdits): v is ProvidedSemanticTokens { + return v && !!((v as ProvidedSemanticTokens).data); + } + + private static _isCorrectSemanticTokens(v: ProvidedSemanticTokens): v is theia.SemanticTokens { + return (v.data instanceof Uint32Array); + } + + private static _isSemanticTokensEdits(v: ProvidedSemanticTokens | ProvidedSemanticTokensEdits): v is ProvidedSemanticTokensEdits { + return v && Array.isArray((v as ProvidedSemanticTokensEdits).edits); + } + + private static _isCorrectSemanticTokensEdits(v: ProvidedSemanticTokensEdits): v is theia.SemanticTokensEdits { + for (const edit of v.edits) { + if (!(edit.data instanceof Uint32Array)) { + return false; + } + } + return true; + } + + private static _convertToEdits(previousResult: SemanticTokensPreviousResult | null | undefined, newResult: theia.SemanticTokens | theia.SemanticTokensEdits): + theia.SemanticTokens | theia.SemanticTokensEdits { + if (!DocumentSemanticTokensAdapter._isSemanticTokens(newResult)) { + return newResult; + } + if (!previousResult || !previousResult.tokens) { + return newResult; + } + const oldData = previousResult.tokens; + const oldLength = oldData.length; + const newData = newResult.data; + const newLength = newData.length; + + let commonPrefixLength = 0; + const maxCommonPrefixLength = Math.min(oldLength, newLength); + while (commonPrefixLength < maxCommonPrefixLength && oldData[commonPrefixLength] === newData[commonPrefixLength]) { + commonPrefixLength++; + } + + if (commonPrefixLength === oldLength && commonPrefixLength === newLength) { + // complete overlap! + return new SemanticTokensEdits([], newResult.resultId); + } + + let commonSuffixLength = 0; + const maxCommonSuffixLength = maxCommonPrefixLength - commonPrefixLength; + while (commonSuffixLength < maxCommonSuffixLength && oldData[oldLength - commonSuffixLength - 1] === newData[newLength - commonSuffixLength - 1]) { + commonSuffixLength++; + } + + return new SemanticTokensEdits([{ + start: commonPrefixLength, + deleteCount: (oldLength - commonPrefixLength - commonSuffixLength), + data: newData.subarray(commonPrefixLength, newLength - commonSuffixLength) + }], newResult.resultId); + } + + private _send(value: theia.SemanticTokens | theia.SemanticTokensEdits, original: theia.SemanticTokens | theia.SemanticTokensEdits): BinaryBuffer | null { + if (DocumentSemanticTokensAdapter._isSemanticTokens(value)) { + const myId = this._nextResultId++; + this._previousResults.set(myId, new SemanticTokensPreviousResult(value.resultId, value.data)); + return encodeSemanticTokensDto({ + id: myId, + type: 'full', + data: value.data + }); + } + + if (DocumentSemanticTokensAdapter._isSemanticTokensEdits(value)) { + const myId = this._nextResultId++; + if (DocumentSemanticTokensAdapter._isSemanticTokens(original)) { + // store the original + this._previousResults.set(myId, new SemanticTokensPreviousResult(original.resultId, original.data)); + } else { + this._previousResults.set(myId, new SemanticTokensPreviousResult(value.resultId)); + } + return encodeSemanticTokensDto({ + id: myId, + type: 'delta', + deltas: (value.edits || []).map(edit => ({ start: edit.start, deleteCount: edit.deleteCount, data: edit.data })) + }); + } + + return null; + } +} + +export class DocumentRangeSemanticTokensAdapter { + + constructor( + private readonly _documents: DocumentsExtImpl, + private readonly _provider: theia.DocumentRangeSemanticTokensProvider, + ) { + } + + async provideDocumentRangeSemanticTokens(resource: URI, range: Range, token: theia.CancellationToken): Promise { + const doc = this._documents.getDocument(resource); + const value = await this._provider.provideDocumentRangeSemanticTokens(doc, toRange(range), token); + if (!value) { + return null; + } + return this._send(value); + } + + private _send(value: theia.SemanticTokens): BinaryBuffer | null { + return encodeSemanticTokensDto({ + id: 0, + type: 'full', + data: value.data + }); + } +} diff --git a/packages/plugin-ext/src/plugin/plugin-context.ts b/packages/plugin-ext/src/plugin/plugin-context.ts index 625c9df62c32f..2e4f79e32b04c 100644 --- a/packages/plugin-ext/src/plugin/plugin-context.ts +++ b/packages/plugin-ext/src/plugin/plugin-context.ts @@ -123,7 +123,12 @@ import { CallHierarchyIncomingCall, CallHierarchyOutgoingCall, TimelineItem, - EnvironmentVariableMutatorType + EnvironmentVariableMutatorType, + SemanticTokensLegend, + SemanticTokensBuilder, + SemanticTokens, + SemanticTokensEdits, + SemanticTokensEdit } from './types-impl'; import { AuthenticationExtImpl } from './authentication-ext'; import { SymbolKind } from '../common/plugin-api-rpc-model'; @@ -672,6 +677,14 @@ export function createAPIFactory( registerRenameProvider(selector: theia.DocumentSelector, provider: theia.RenameProvider): theia.Disposable { return languagesExt.registerRenameProvider(selector, provider, pluginToPluginInfo(plugin)); }, + registerDocumentSemanticTokensProvider(selector: theia.DocumentSelector, provider: theia.DocumentSemanticTokensProvider, legend: theia.SemanticTokensLegend): + theia.Disposable { + return languagesExt.registerDocumentSemanticTokensProvider(selector, provider, legend, pluginToPluginInfo(plugin)); + }, + registerDocumentRangeSemanticTokensProvider(selector: theia.DocumentSelector, provider: theia.DocumentRangeSemanticTokensProvider, legend: theia.SemanticTokensLegend): + theia.Disposable { + return languagesExt.registerDocumentRangeSemanticTokensProvider(selector, provider, legend, pluginToPluginInfo(plugin)); + }, registerCallHierarchyProvider(selector: theia.DocumentSelector, provider: theia.CallHierarchyProvider): theia.Disposable { return languagesExt.registerCallHierarchyProvider(selector, provider); } @@ -915,7 +928,12 @@ export function createAPIFactory( CallHierarchyIncomingCall, CallHierarchyOutgoingCall, TimelineItem, - EnvironmentVariableMutatorType + EnvironmentVariableMutatorType, + SemanticTokensLegend, + SemanticTokensBuilder, + SemanticTokens, + SemanticTokensEdits, + SemanticTokensEdit }; }; } diff --git a/packages/plugin-ext/src/plugin/types-impl.ts b/packages/plugin-ext/src/plugin/types-impl.ts index f363be14a1ccc..c3103dd494d5c 100644 --- a/packages/plugin-ext/src/plugin/types-impl.ts +++ b/packages/plugin-ext/src/plugin/types-impl.ts @@ -2185,3 +2185,232 @@ export class TimelineItem { this.timestamp = timestamp; } } + +// #region Semantic Coloring + +export class SemanticTokensLegend { + public readonly tokenTypes: string[]; + public readonly tokenModifiers: string[]; + + constructor(tokenTypes: string[], tokenModifiers: string[] = []) { + this.tokenTypes = tokenTypes; + this.tokenModifiers = tokenModifiers; + } +} + +function isStrArrayOrUndefined(arg: any): arg is string[] | undefined { + return ((typeof arg === 'undefined') || (Array.isArray(arg) && arg.every(e => typeof e === 'string'))); +} + +export class SemanticTokensBuilder { + + private _prevLine: number; + private _prevChar: number; + private _dataIsSortedAndDeltaEncoded: boolean; + private _data: number[]; + private _dataLen: number; + private _tokenTypeStrToInt: Map; + private _tokenModifierStrToInt: Map; + private _hasLegend: boolean; + + constructor(legend?: theia.SemanticTokensLegend) { + this._prevLine = 0; + this._prevChar = 0; + this._dataIsSortedAndDeltaEncoded = true; + this._data = []; + this._dataLen = 0; + this._tokenTypeStrToInt = new Map(); + this._tokenModifierStrToInt = new Map(); + this._hasLegend = false; + if (legend) { + this._hasLegend = true; + for (let i = 0, len = legend.tokenTypes.length; i < len; i++) { + this._tokenTypeStrToInt.set(legend.tokenTypes[i], i); + } + for (let i = 0, len = legend.tokenModifiers.length; i < len; i++) { + this._tokenModifierStrToInt.set(legend.tokenModifiers[i], i); + } + } + } + + public push(line: number, char: number, length: number, tokenType: number, tokenModifiers?: number): void; + public push(range: Range, tokenType: string, tokenModifiers?: string[]): void; + public push(arg0: any, arg1: any, arg2: any, arg3?: any, arg4?: any): void { + if (typeof arg0 === 'number' && typeof arg1 === 'number' && typeof arg2 === 'number' && typeof arg3 === 'number' && + (typeof arg4 === 'number' || typeof arg4 === 'undefined')) { + if (typeof arg4 === 'undefined') { + arg4 = 0; + } + // 1st overload + return this._pushEncoded(arg0, arg1, arg2, arg3, arg4); + } + if (Range.isRange(arg0) && typeof arg1 === 'string' && isStrArrayOrUndefined(arg2)) { + // 2nd overload + return this._push(arg0, arg1, arg2); + } + throw illegalArgument(); + } + + private _push(range: theia.Range, tokenType: string, tokenModifiers?: string[]): void { + if (!this._hasLegend) { + throw new Error('Legend must be provided in constructor'); + } + if (range.start.line !== range.end.line) { + throw new Error('`range` cannot span multiple lines'); + } + if (!this._tokenTypeStrToInt.has(tokenType)) { + throw new Error('`tokenType` is not in the provided legend'); + } + const line = range.start.line; + const char = range.start.character; + const length = range.end.character - range.start.character; + const nTokenType = this._tokenTypeStrToInt.get(tokenType)!; + let nTokenModifiers = 0; + if (tokenModifiers) { + for (const tokenModifier of tokenModifiers) { + if (!this._tokenModifierStrToInt.has(tokenModifier)) { + throw new Error('`tokenModifier` is not in the provided legend'); + } + const nTokenModifier = this._tokenModifierStrToInt.get(tokenModifier)!; + nTokenModifiers |= (1 << nTokenModifier) >>> 0; + } + } + this._pushEncoded(line, char, length, nTokenType, nTokenModifiers); + } + + private _pushEncoded(line: number, char: number, length: number, tokenType: number, tokenModifiers: number): void { + if (this._dataIsSortedAndDeltaEncoded && (line < this._prevLine || (line === this._prevLine && char < this._prevChar))) { + // push calls were ordered and are no longer ordered + this._dataIsSortedAndDeltaEncoded = false; + + // Remove delta encoding from data + const tokenCount = (this._data.length / 5) | 0; + let prevLine = 0; + let prevChar = 0; + for (let i = 0; i < tokenCount; i++) { + // eslint-disable-next-line no-shadow + let line = this._data[5 * i]; + // eslint-disable-next-line no-shadow + let char = this._data[5 * i + 1]; + + if (line === 0) { + // on the same line as previous token + line = prevLine; + char += prevChar; + } else { + // on a different line than previous token + line += prevLine; + } + + this._data[5 * i] = line; + this._data[5 * i + 1] = char; + + prevLine = line; + prevChar = char; + } + } + + let pushLine = line; + let pushChar = char; + if (this._dataIsSortedAndDeltaEncoded && this._dataLen > 0) { + pushLine -= this._prevLine; + if (pushLine === 0) { + pushChar -= this._prevChar; + } + } + + this._data[this._dataLen++] = pushLine; + this._data[this._dataLen++] = pushChar; + this._data[this._dataLen++] = length; + this._data[this._dataLen++] = tokenType; + this._data[this._dataLen++] = tokenModifiers; + + this._prevLine = line; + this._prevChar = char; + } + + private static _sortAndDeltaEncode(data: number[]): Uint32Array { + const pos: number[] = []; + const tokenCount = (data.length / 5) | 0; + for (let i = 0; i < tokenCount; i++) { + pos[i] = i; + } + pos.sort((a, b) => { + const aLine = data[5 * a]; + const bLine = data[5 * b]; + if (aLine === bLine) { + const aChar = data[5 * a + 1]; + const bChar = data[5 * b + 1]; + return aChar - bChar; + } + return aLine - bLine; + }); + const result = new Uint32Array(data.length); + let prevLine = 0; + let prevChar = 0; + for (let i = 0; i < tokenCount; i++) { + const srcOffset = 5 * pos[i]; + const line = data[srcOffset + 0]; + const char = data[srcOffset + 1]; + const length = data[srcOffset + 2]; + const tokenType = data[srcOffset + 3]; + const tokenModifiers = data[srcOffset + 4]; + + const pushLine = line - prevLine; + const pushChar = (pushLine === 0 ? char - prevChar : char); + + const dstOffset = 5 * i; + result[dstOffset + 0] = pushLine; + result[dstOffset + 1] = pushChar; + result[dstOffset + 2] = length; + result[dstOffset + 3] = tokenType; + result[dstOffset + 4] = tokenModifiers; + + prevLine = line; + prevChar = char; + } + + return result; + } + + public build(resultId?: string): SemanticTokens { + if (!this._dataIsSortedAndDeltaEncoded) { + return new SemanticTokens(SemanticTokensBuilder._sortAndDeltaEncode(this._data), resultId); + } + return new SemanticTokens(new Uint32Array(this._data), resultId); + } +} + +export class SemanticTokens { + readonly resultId?: string; + readonly data: Uint32Array; + + constructor(data: Uint32Array, resultId?: string) { + this.resultId = resultId; + this.data = data; + } +} + +export class SemanticTokensEdit { + readonly start: number; + readonly deleteCount: number; + readonly data?: Uint32Array; + + constructor(start: number, deleteCount: number, data?: Uint32Array) { + this.start = start; + this.deleteCount = deleteCount; + this.data = data; + } +} + +export class SemanticTokensEdits { + readonly resultId?: string; + readonly edits: SemanticTokensEdit[]; + + constructor(edits: SemanticTokensEdit[], resultId?: string) { + this.resultId = resultId; + this.edits = edits; + } +} + +// #endregion diff --git a/packages/plugin/src/theia.d.ts b/packages/plugin/src/theia.d.ts index 723936c733364..5ec0f197e4e62 100644 --- a/packages/plugin/src/theia.d.ts +++ b/packages/plugin/src/theia.d.ts @@ -7504,6 +7504,233 @@ declare module '@theia/plugin' { prepareRename?(document: TextDocument, position: Position, token: CancellationToken): ProviderResult; } + /** + * A semantic tokens legend contains the needed information to decipher + * the integer encoded representation of semantic tokens. + */ + export class SemanticTokensLegend { + /** + * The possible token types. + */ + readonly tokenTypes: string[]; + /** + * The possible token modifiers. + */ + readonly tokenModifiers: string[]; + + constructor(tokenTypes: string[], tokenModifiers?: string[]); + } + + /** + * A semantic tokens builder can help with creating a `SemanticTokens` instance + * which contains delta encoded semantic tokens. + */ + export class SemanticTokensBuilder { + + constructor(legend?: SemanticTokensLegend); + + /** + * Add another token. + * + * @param line The token start line number (absolute value). + * @param char The token start character (absolute value). + * @param length The token length in characters. + * @param tokenType The encoded token type. + * @param tokenModifiers The encoded token modifiers. + */ + push(line: number, char: number, length: number, tokenType: number, tokenModifiers?: number): void; + + /** + * Add another token. Use only when providing a legend. + * + * @param range The range of the token. Must be single-line. + * @param tokenType The token type. + * @param tokenModifiers The token modifiers. + */ + push(range: Range, tokenType: string, tokenModifiers?: string[]): void; + + /** + * Finish and create a `SemanticTokens` instance. + */ + build(resultId?: string): SemanticTokens; + } + + /** + * Represents semantic tokens, either in a range or in an entire document. + * @see [provideDocumentSemanticTokens](#DocumentSemanticTokensProvider.provideDocumentSemanticTokens) for an explanation of the format. + * @see [SemanticTokensBuilder](#SemanticTokensBuilder) for a helper to create an instance. + */ + export class SemanticTokens { + /** + * The result id of the tokens. + * + * This is the id that will be passed to `DocumentSemanticTokensProvider.provideDocumentSemanticTokensEdits` (if implemented). + */ + readonly resultId?: string; + /** + * The actual tokens data. + * @see [provideDocumentSemanticTokens](#DocumentSemanticTokensProvider.provideDocumentSemanticTokens) for an explanation of the format. + */ + readonly data: Uint32Array; + + constructor(data: Uint32Array, resultId?: string); + } + + /** + * Represents edits to semantic tokens. + * @see [provideDocumentSemanticTokensEdits](#DocumentSemanticTokensProvider.provideDocumentSemanticTokensEdits) for an explanation of the format. + */ + export class SemanticTokensEdits { + /** + * The result id of the tokens. + * + * This is the id that will be passed to `DocumentSemanticTokensProvider.provideDocumentSemanticTokensEdits` (if implemented). + */ + readonly resultId?: string; + /** + * The edits to the tokens data. + * All edits refer to the initial data state. + */ + readonly edits: SemanticTokensEdit[]; + + constructor(edits: SemanticTokensEdit[], resultId?: string); + } + + /** + * Represents an edit to semantic tokens. + * @see [provideDocumentSemanticTokensEdits](#DocumentSemanticTokensProvider.provideDocumentSemanticTokensEdits) for an explanation of the format. + */ + export class SemanticTokensEdit { + /** + * The start offset of the edit. + */ + readonly start: number; + /** + * The count of elements to remove. + */ + readonly deleteCount: number; + /** + * The elements to insert. + */ + readonly data?: Uint32Array; + + constructor(start: number, deleteCount: number, data?: Uint32Array); + } + + /** + * The document semantic tokens provider interface defines the contract between extensions and + * semantic tokens. + */ + export interface DocumentSemanticTokensProvider { + /** + * An optional event to signal that the semantic tokens from this provider have changed. + */ + onDidChangeSemanticTokens?: Event; + + /** + * Tokens in a file are represented as an array of integers. The position of each token is expressed relative to + * the token before it, because most tokens remain stable relative to each other when edits are made in a file. + * + * --- + * In short, each token takes 5 integers to represent, so a specific token `i` in the file consists of the following array indices: + * - at index `5*i` - `deltaLine`: token line number, relative to the previous token + * - at index `5*i+1` - `deltaStart`: token start character, relative to the previous token (relative to 0 or the previous token's start if they are on the same line) + * - at index `5*i+2` - `length`: the length of the token. A token cannot be multiline. + * - at index `5*i+3` - `tokenType`: will be looked up in `SemanticTokensLegend.tokenTypes`. We currently ask that `tokenType` < 65536. + * - at index `5*i+4` - `tokenModifiers`: each set bit will be looked up in `SemanticTokensLegend.tokenModifiers` + * + * --- + * ### How to encode tokens + * + * Here is an example for encoding a file with 3 tokens in a uint32 array: + * ``` + * { line: 2, startChar: 5, length: 3, tokenType: "property", tokenModifiers: ["private", "static"] }, + * { line: 2, startChar: 10, length: 4, tokenType: "type", tokenModifiers: [] }, + * { line: 5, startChar: 2, length: 7, tokenType: "class", tokenModifiers: [] } + * ``` + * + * 1. First of all, a legend must be devised. This legend must be provided up-front and capture all possible token types. + * For this example, we will choose the following legend which must be passed in when registering the provider: + * ``` + * tokenTypes: ['property', 'type', 'class'], + * tokenModifiers: ['private', 'static'] + * ``` + * + * 2. The first transformation step is to encode `tokenType` and `tokenModifiers` as integers using the legend. Token types are looked + * up by index, so a `tokenType` value of `1` means `tokenTypes[1]`. Multiple token modifiers can be set by using bit flags, + * so a `tokenModifier` value of `3` is first viewed as binary `0b00000011`, which means `[tokenModifiers[0], tokenModifiers[1]]` because + * bits 0 and 1 are set. Using this legend, the tokens now are: + * ``` + * { line: 2, startChar: 5, length: 3, tokenType: 0, tokenModifiers: 3 }, + * { line: 2, startChar: 10, length: 4, tokenType: 1, tokenModifiers: 0 }, + * { line: 5, startChar: 2, length: 7, tokenType: 2, tokenModifiers: 0 } + * ``` + * + * 3. The next step is to represent each token relative to the previous token in the file. In this case, the second token + * is on the same line as the first token, so the `startChar` of the second token is made relative to the `startChar` + * of the first token, so it will be `10 - 5`. The third token is on a different line than the second token, so the + * `startChar` of the third token will not be altered: + * ``` + * { deltaLine: 2, deltaStartChar: 5, length: 3, tokenType: 0, tokenModifiers: 3 }, + * { deltaLine: 0, deltaStartChar: 5, length: 4, tokenType: 1, tokenModifiers: 0 }, + * { deltaLine: 3, deltaStartChar: 2, length: 7, tokenType: 2, tokenModifiers: 0 } + * ``` + * + * 4. Finally, the last step is to inline each of the 5 fields for a token in a single array, which is a memory friendly representation: + * ``` + * // 1st token, 2nd token, 3rd token + * [ 2,5,3,0,3, 0,5,4,1,0, 3,2,7,2,0 ] + * ``` + * + * @see [SemanticTokensBuilder](#SemanticTokensBuilder) for a helper to encode tokens as integers. + * *NOTE*: When doing edits, it is possible that multiple edits occur until VS Code decides to invoke the semantic tokens provider. + * *NOTE*: If the provider cannot temporarily compute semantic tokens, it can indicate this by throwing an error with the message 'Busy'. + */ + provideDocumentSemanticTokens(document: TextDocument, token: CancellationToken): ProviderResult; + + /** + * Instead of always returning all the tokens in a file, it is possible for a `DocumentSemanticTokensProvider` to implement + * this method (`provideDocumentSemanticTokensEdits`) and then return incremental updates to the previously provided semantic tokens. + * + * --- + * ### How tokens change when the document changes + * + * Suppose that `provideDocumentSemanticTokens` has previously returned the following semantic tokens: + * ``` + * // 1st token, 2nd token, 3rd token + * [ 2,5,3,0,3, 0,5,4,1,0, 3,2,7,2,0 ] + * ``` + * + * Also suppose that after some edits, the new semantic tokens in a file are: + * ``` + * // 1st token, 2nd token, 3rd token + * [ 3,5,3,0,3, 0,5,4,1,0, 3,2,7,2,0 ] + * ``` + * It is possible to express these new tokens in terms of an edit applied to the previous tokens: + * ``` + * [ 2,5,3,0,3, 0,5,4,1,0, 3,2,7,2,0 ] // old tokens + * [ 3,5,3,0,3, 0,5,4,1,0, 3,2,7,2,0 ] // new tokens + * + * edit: { start: 0, deleteCount: 1, data: [3] } // replace integer at offset 0 with 3 + * ``` + * + * *NOTE*: If the provider cannot compute `SemanticTokensEdits`, it can "give up" and return all the tokens in the document again. + * *NOTE*: All edits in `SemanticTokensEdits` contain indices in the old integers array, so they all refer to the previous result state. + */ + provideDocumentSemanticTokensEdits?(document: TextDocument, previousResultId: string, token: CancellationToken): ProviderResult; + } + + /** + * The document range semantic tokens provider interface defines the contract between extensions and + * semantic tokens. + */ + export interface DocumentRangeSemanticTokensProvider { + /** + * @see [provideDocumentSemanticTokens](#DocumentSemanticTokensProvider.provideDocumentSemanticTokens). + */ + provideDocumentRangeSemanticTokens(document: TextDocument, range: Range, token: CancellationToken): ProviderResult; + } + export namespace languages { /** * Return the identifiers of all known languages. @@ -7897,6 +8124,38 @@ declare module '@theia/plugin' { */ export function registerRenameProvider(selector: DocumentSelector, provider: RenameProvider): Disposable; + /** + * Register a semantic tokens provider for a whole document. + * + * Multiple providers can be registered for a language. In that case providers are sorted + * by their [score](#languages.match) and the best-matching provider is used. Failure + * of the selected provider will cause a failure of the whole operation. + * + * @param selector A selector that defines the documents this provider is applicable to. + * @param provider A document semantic tokens provider. + * @return A [disposable](#Disposable) that unregisters this provider when being disposed. + */ + export function registerDocumentSemanticTokensProvider(selector: DocumentSelector, provider: DocumentSemanticTokensProvider, legend: SemanticTokensLegend): Disposable; + + /** + * Register a semantic tokens provider for a document range. + * + * *Note:* If a document has both a `DocumentSemanticTokensProvider` and a `DocumentRangeSemanticTokensProvider`, + * the range provider will be invoked only initially, for the time in which the full document provider takes + * to resolve the first request. Once the full document provider resolves the first request, the semantic tokens + * provided via the range provider will be discarded and from that point forward, only the document provider + * will be used. + * + * Multiple providers can be registered for a language. In that case providers are sorted + * by their [score](#languages.match) and the best-matching provider is used. Failure + * of the selected provider will cause a failure of the whole operation. + * + * @param selector A selector that defines the documents this provider is applicable to. + * @param provider A document range semantic tokens provider. + * @return A [disposable](#Disposable) that unregisters this provider when being disposed. + */ + export function registerDocumentRangeSemanticTokensProvider(selector: DocumentSelector, provider: DocumentRangeSemanticTokensProvider, legend: SemanticTokensLegend): Disposable; + /** * Register a call hierarchy provider. *