Skip to content

Commit

Permalink
Merge pull request #252 from stencilproject/fix/lexer-range
Browse files Browse the repository at this point in the history
Fix lexer range calculation for tokens
  • Loading branch information
djbe authored Sep 26, 2018
2 parents c7dbba4 + 524c0ac commit 7417332
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 5 deletions.
9 changes: 9 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,14 @@
# Stencil Changelog

## Master

### Bug Fixes

- Fixed a bug in Stencil 0.13 where tags without spaces were incorrectly parsed.
[David Jennes](https://github.com/djbe)
[#252](https://github.com/stencilproject/Stencil/pull/252)


## 0.13.0

### Breaking
Expand Down
2 changes: 1 addition & 1 deletion Sources/Lexer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ class Scanner {

for (index, char) in content.unicodeScalars.enumerated() {
if foundChar && char == Scanner.tokenEndDelimiter {
let result = String(content.prefix(index))
let result = String(content.prefix(index + 1))
content = String(content.dropFirst(index + 1))
range = range.upperBound..<originalContent.index(range.upperBound, offsetBy: index + 1)
return result
Expand Down
16 changes: 12 additions & 4 deletions Tests/StencilTests/LexerSpec.swift
Original file line number Diff line number Diff line change
Expand Up @@ -16,23 +16,31 @@ class LexerTests: XCTestCase {
let tokens = lexer.tokenize()

try expect(tokens.count) == 1
try expect(tokens.first) == .text(value: "Hello World", at: SourceMap(location: ("Hello World", 1, 0)))
try expect(tokens.first) == .text(value: "Hello World", at: makeSourceMap("Hello World", for: lexer))
}

$0.it("can tokenize a comment") {
let lexer = Lexer(templateString: "{# Comment #}")
let tokens = lexer.tokenize()

try expect(tokens.count) == 1
try expect(tokens.first) == .comment(value: "Comment", at: SourceMap(location: ("{# Comment #}", 1, 3)))
try expect(tokens.first) == .comment(value: "Comment", at: makeSourceMap("Comment", for: lexer))
}

$0.it("can tokenize a variable") {
let lexer = Lexer(templateString: "{{ Variable }}")
let tokens = lexer.tokenize()

try expect(tokens.count) == 1
try expect(tokens.first) == .variable(value: "Variable", at: SourceMap(location: ("{{ Variable }}", 1, 3)))
try expect(tokens.first) == .variable(value: "Variable", at: makeSourceMap("Variable", for: lexer))
}

$0.it("can tokenize a token without spaces") {
let lexer = Lexer(templateString: "{{Variable}}")
let tokens = lexer.tokenize()

try expect(tokens.count) == 1
try expect(tokens.first) == .variable(value: "Variable", at: makeSourceMap("Variable", for: lexer))
}

$0.it("can tokenize unclosed tag by ignoring it") {
Expand All @@ -41,7 +49,7 @@ class LexerTests: XCTestCase {
let tokens = lexer.tokenize()

try expect(tokens.count) == 1
try expect(tokens.first) == .text(value: "", at: SourceMap(location: ("{{ thing", 1, 0)))
try expect(tokens.first) == .text(value: "", at: makeSourceMap("{{ thing", for: lexer))
}

$0.it("can tokenize a mixture of content") {
Expand Down

0 comments on commit 7417332

Please sign in to comment.