diff --git a/CHANGELOG.md b/CHANGELOG.md index 51a824c..dffb08a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ All notable changes to this project will be documented in this file. ### Fixed - ignore quotes in comments if it's within an embedded argument with CRLF used as a newline delimiter ([#113](https://github.com/rcjsuen/dockerfile-language-service/issues/113)) - prevent 0-length semantic tokens from being added ([#114](https://github.com/rcjsuen/dockerfile-language-service/issues/114)) +- correct typo in the comment handling logic of semantic tokens with CRLF newlines ([#116](https://github.com/rcjsuen/dockerfile-language-service/issues/116)) ## [0.10.1] - 2023-05-31 ### Fixed diff --git a/src/dockerSemanticTokens.ts b/src/dockerSemanticTokens.ts index cebf6fd..939501e 100644 --- a/src/dockerSemanticTokens.ts +++ b/src/dockerSemanticTokens.ts @@ -410,7 +410,7 @@ export class DockerSemanticTokens { const escapedCh = this.content.charAt(j); switch (escapedCh) { case '\r': - j + 1; + j++; case '\n': i = j; continue stringsCheck; diff --git a/test/dockerSemanticTokens.test.ts b/test/dockerSemanticTokens.test.ts index 444215c..fd1154f 100644 --- a/test/dockerSemanticTokens.test.ts +++ b/test/dockerSemanticTokens.test.ts @@ -2203,6 +2203,28 @@ describe("Dockerfile Semantic Token tests", () => { assertEdit(tokens.data, SemanticTokenTypes.parameter, 40, 1, 0, 4); assertEdit(tokens.data, SemanticTokenTypes.string, 45, 0, 5, 7); }); + + it("multiline instruction with string content before and after escaped newline with an embedded comment", () => { + let content = `RUN a''\\\n#\n''`; + let tokens = computeSemanticTokens(content); + assert.strictEqual(tokens.data.length, 30); + assertEdit(tokens.data, SemanticTokenTypes.keyword, 0, 0, 0, 3); + assertEdit(tokens.data, SemanticTokenTypes.parameter, 5, 0, 4, 1); + assertEdit(tokens.data, SemanticTokenTypes.string, 10, 0, 1, 2); + assertEdit(tokens.data, SemanticTokenTypes.macro, 15, 0, 2, 1); + assertEdit(tokens.data, SemanticTokenTypes.comment, 20, 1, 0, 1); + assertEdit(tokens.data, SemanticTokenTypes.string, 25, 1, 0, 2); + + content = `RUN a''\\\r\n#\r\n''`; + tokens = computeSemanticTokens(content); + assert.strictEqual(tokens.data.length, 30); + assertEdit(tokens.data, SemanticTokenTypes.keyword, 0, 0, 0, 3); + assertEdit(tokens.data, SemanticTokenTypes.parameter, 5, 0, 4, 1); + assertEdit(tokens.data, SemanticTokenTypes.string, 10, 0, 1, 2); + assertEdit(tokens.data, SemanticTokenTypes.macro, 15, 0, 2, 1); + assertEdit(tokens.data, SemanticTokenTypes.comment, 20, 1, 0, 1); + assertEdit(tokens.data, SemanticTokenTypes.string, 25, 1, 0, 2); + }); }); }); });