Skip to content

Commit

Permalink
process template lines when Lexer is created not when parsing each to…
Browse files Browse the repository at this point in the history
…ken. (#230)
  • Loading branch information
ilyapuchka authored Sep 10, 2018
1 parent 00e71c1 commit acda1b0
Show file tree
Hide file tree
Showing 7 changed files with 52 additions and 51 deletions.
8 changes: 4 additions & 4 deletions Sources/Errors.swift
Original file line number Diff line number Diff line change
Expand Up @@ -66,11 +66,11 @@ open class SimpleErrorReporter: ErrorReporter {

func describe(token: Token) -> String {
let templateName = token.sourceMap.filename ?? ""
let line = token.sourceMap.line
let highlight = "\(String(Array(repeating: " ", count: line.offset)))^\(String(Array(repeating: "~", count: max(token.contents.characters.count - 1, 0))))"
let location = token.sourceMap.location
let highlight = "\(String(Array(repeating: " ", count: location.lineOffset)))^\(String(Array(repeating: "~", count: max(token.contents.characters.count - 1, 0))))"

return "\(templateName)\(line.number):\(line.offset): error: \(templateError.reason)\n"
+ "\(line.content)\n"
return "\(templateName)\(location.lineNumber):\(location.lineOffset): error: \(templateError.reason)\n"
+ "\(location.content)\n"
+ "\(highlight)\n"
}

Expand Down
43 changes: 21 additions & 22 deletions Sources/Lexer.swift
Original file line number Diff line number Diff line change
@@ -1,12 +1,20 @@
import Foundation

typealias Line = (content: String, number: UInt, range: Range<String.Index>)

struct Lexer {
let templateName: String?
let templateString: String
let lines: [Line]

init(templateName: String? = nil, templateString: String) {
self.templateName = templateName
self.templateString = templateString

self.lines = templateString.components(separatedBy: .newlines).enumerated().flatMap {
guard !$0.element.isEmpty else { return nil }
return (content: $0.element, number: UInt($0.offset + 1), templateString.range(of: $0.element)!)
}
}

func createToken(string: String, at range: Range<String.Index>) -> Token {
Expand All @@ -25,8 +33,8 @@ struct Lexer {
if string.hasPrefix("{{") || string.hasPrefix("{%") || string.hasPrefix("{#") {
let value = strip()
let range = templateString.range(of: value, range: range) ?? range
let line = templateString.rangeLine(range)
let sourceMap = SourceMap(filename: templateName, line: line)
let location = rangeLocation(range)
let sourceMap = SourceMap(filename: templateName, location: location)

if string.hasPrefix("{{") {
return .variable(value: value, at: sourceMap)
Expand All @@ -37,8 +45,8 @@ struct Lexer {
}
}

let line = templateString.rangeLine(range)
let sourceMap = SourceMap(filename: templateName, line: line)
let location = rangeLocation(range)
let sourceMap = SourceMap(filename: templateName, location: location)
return .text(value: string, at: sourceMap)
}

Expand Down Expand Up @@ -72,6 +80,14 @@ struct Lexer {
return tokens
}

func rangeLocation(_ range: Range<String.Index>) -> ContentLocation {
guard let line = self.lines.first(where: { $0.range.contains(range.lowerBound) }) else {
return ("", 0, 0)
}
let offset = templateString.distance(from: line.range.lowerBound, to: range.lowerBound)
return (line.content, line.number, offset)
}

}

class Scanner {
Expand Down Expand Up @@ -179,23 +195,6 @@ extension String {
let last = findLastNot(character: character) ?? endIndex
return String(self[first..<last])
}

public func rangeLine(_ range: Range<String.Index>) -> RangeLine {
var lineNumber: UInt = 0
var offset: Int = 0
var lineContent = ""

for line in components(separatedBy: CharacterSet.newlines) {
lineNumber += 1
lineContent = line
if let rangeOfLine = self.range(of: line), rangeOfLine.contains(range.lowerBound) {
offset = distance(from: rangeOfLine.lowerBound, to: range.lowerBound)
break
}
}

return (lineContent, lineNumber, offset)
}
}

public typealias RangeLine = (content: String, number: UInt, offset: Int)
public typealias ContentLocation = (content: String, lineNumber: UInt, lineOffset: Int)
6 changes: 3 additions & 3 deletions Sources/Parser.swift
Original file line number Diff line number Diff line change
Expand Up @@ -125,9 +125,9 @@ public class TokenParser {
}
// find offset of filter in the containing token so that only filter is highligted, not the whole token
if let filterTokenRange = containingToken.contents.range(of: filterToken) {
var rangeLine = containingToken.sourceMap.line
rangeLine.offset += containingToken.contents.distance(from: containingToken.contents.startIndex, to: filterTokenRange.lowerBound)
syntaxError.token = .variable(value: filterToken, at: SourceMap(filename: containingToken.sourceMap.filename, line: rangeLine))
var location = containingToken.sourceMap.location
location.lineOffset += containingToken.contents.distance(from: containingToken.contents.startIndex, to: filterTokenRange.lowerBound)
syntaxError.token = .variable(value: filterToken, at: SourceMap(filename: containingToken.sourceMap.filename, location: location))
} else {
syntaxError.token = containingToken
}
Expand Down
8 changes: 4 additions & 4 deletions Sources/Tokenizer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -57,17 +57,17 @@ extension String {

public struct SourceMap: Equatable {
public let filename: String?
public let line: RangeLine
public let location: ContentLocation

init(filename: String? = nil, line: RangeLine = ("", 0, 0)) {
init(filename: String? = nil, location: ContentLocation = ("", 0, 0)) {
self.filename = filename
self.line = line
self.location = location
}

static let unknown = SourceMap()

public static func ==(lhs: SourceMap, rhs: SourceMap) -> Bool {
return lhs.filename == rhs.filename && lhs.line == rhs.line
return lhs.filename == rhs.filename && lhs.location == rhs.location
}
}

Expand Down
5 changes: 3 additions & 2 deletions Tests/StencilTests/EnvironmentSpec.swift
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,9 @@ func testEnvironment() {
guard let range = template.templateString.range(of: token) else {
fatalError("Can't find '\(token)' in '\(template)'")
}
let rangeLine = template.templateString.rangeLine(range)
let sourceMap = SourceMap(filename: template.name, line: rangeLine)
let lexer = Lexer(templateString: template.templateString)
let location = lexer.rangeLocation(range)
let sourceMap = SourceMap(filename: template.name, location: location)
let token = Token.block(value: token, at: sourceMap)
return TemplateSyntaxError(reason: description, token: token, stackTrace: [])
}
Expand Down
5 changes: 3 additions & 2 deletions Tests/StencilTests/FilterSpec.swift
Original file line number Diff line number Diff line change
Expand Up @@ -221,8 +221,9 @@ func testFilter() {
guard let range = template.templateString.range(of: token) else {
fatalError("Can't find '\(token)' in '\(template)'")
}
let rangeLine = template.templateString.rangeLine(range)
let sourceMap = SourceMap(filename: template.name, line: rangeLine)
let lexer = Lexer(templateString: template.templateString)
let location = lexer.rangeLocation(range)
let sourceMap = SourceMap(filename: template.name, location: location)
let token = Token.block(value: token, at: sourceMap)
return TemplateSyntaxError(reason: description, token: token, stackTrace: [])
}
Expand Down
28 changes: 14 additions & 14 deletions Tests/StencilTests/LexerSpec.swift
Original file line number Diff line number Diff line change
Expand Up @@ -9,23 +9,23 @@ func testLexer() {
let tokens = lexer.tokenize()

try expect(tokens.count) == 1
try expect(tokens.first) == .text(value: "Hello World", at: SourceMap(line: ("Hello World", 1, 0)))
try expect(tokens.first) == .text(value: "Hello World", at: SourceMap(location: ("Hello World", 1, 0)))
}

$0.it("can tokenize a comment") {
let lexer = Lexer(templateString: "{# Comment #}")
let tokens = lexer.tokenize()

try expect(tokens.count) == 1
try expect(tokens.first) == .comment(value: "Comment", at: SourceMap(line: ("{# Comment #}", 1, 3)))
try expect(tokens.first) == .comment(value: "Comment", at: SourceMap(location: ("{# Comment #}", 1, 3)))
}

$0.it("can tokenize a variable") {
let lexer = Lexer(templateString: "{{ Variable }}")
let tokens = lexer.tokenize()

try expect(tokens.count) == 1
try expect(tokens.first) == .variable(value: "Variable", at: SourceMap(line: ("{{ Variable }}", 1, 3)))
try expect(tokens.first) == .variable(value: "Variable", at: SourceMap(location: ("{{ Variable }}", 1, 3)))
}

$0.it("can tokenize unclosed tag by ignoring it") {
Expand All @@ -34,7 +34,7 @@ func testLexer() {
let tokens = lexer.tokenize()

try expect(tokens.count) == 1
try expect(tokens.first) == .text(value: "", at: SourceMap(line: ("{{ thing", 1, 0)))
try expect(tokens.first) == .text(value: "", at: SourceMap(location: ("{{ thing", 1, 0)))
}

$0.it("can tokenize a mixture of content") {
Expand All @@ -43,9 +43,9 @@ func testLexer() {
let tokens = lexer.tokenize()

try expect(tokens.count) == 3
try expect(tokens[0]) == Token.text(value: "My name is ", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "My name is ")!)))
try expect(tokens[1]) == Token.variable(value: "myname", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "myname")!)))
try expect(tokens[2]) == Token.text(value: ".", at: SourceMap(line: templateString.rangeLine(templateString.range(of: ".")!)))
try expect(tokens[0]) == Token.text(value: "My name is ", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "My name is ")!)))
try expect(tokens[1]) == Token.variable(value: "myname", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "myname")!)))
try expect(tokens[2]) == Token.text(value: ".", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: ".")!)))
}

$0.it("can tokenize two variables without being greedy") {
Expand All @@ -54,8 +54,8 @@ func testLexer() {
let tokens = lexer.tokenize()

try expect(tokens.count) == 2
try expect(tokens[0]) == Token.variable(value: "thing", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "thing")!)))
try expect(tokens[1]) == Token.variable(value: "name", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "name")!)))
try expect(tokens[0]) == Token.variable(value: "thing", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "thing")!)))
try expect(tokens[1]) == Token.variable(value: "name", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "name")!)))
}

$0.it("can tokenize an unclosed block") {
Expand Down Expand Up @@ -84,11 +84,11 @@ func testLexer() {
let tokens = lexer.tokenize()

try expect(tokens.count) == 5
try expect(tokens[0]) == Token.text(value: "My name is ", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "My name is")!)))
try expect(tokens[1]) == Token.block(value: "if name and name", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "{%")!)))
try expect(tokens[2]) == Token.variable(value: "name", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "name", options: [.backwards])!)))
try expect(tokens[3]) == Token.block(value: "endif", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "endif")!)))
try expect(tokens[4]) == Token.text(value: ".", at: SourceMap(line: templateString.rangeLine(templateString.range(of: ".")!)))
try expect(tokens[0]) == Token.text(value: "My name is ", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "My name is")!)))
try expect(tokens[1]) == Token.block(value: "if name and name", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "{%")!)))
try expect(tokens[2]) == Token.variable(value: "name", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "name", options: [.backwards])!)))
try expect(tokens[3]) == Token.block(value: "endif", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "endif")!)))
try expect(tokens[4]) == Token.text(value: ".", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: ".")!)))
}
}
}

0 comments on commit acda1b0

Please sign in to comment.