diff --git a/Sources/Errors.swift b/Sources/Errors.swift index 407a9e25..55b976a8 100644 --- a/Sources/Errors.swift +++ b/Sources/Errors.swift @@ -66,11 +66,11 @@ open class SimpleErrorReporter: ErrorReporter { func describe(token: Token) -> String { let templateName = token.sourceMap.filename ?? "" - let line = token.sourceMap.line - let highlight = "\(String(Array(repeating: " ", count: line.offset)))^\(String(Array(repeating: "~", count: max(token.contents.characters.count - 1, 0))))" + let location = token.sourceMap.location + let highlight = "\(String(Array(repeating: " ", count: location.lineOffset)))^\(String(Array(repeating: "~", count: max(token.contents.characters.count - 1, 0))))" - return "\(templateName)\(line.number):\(line.offset): error: \(templateError.reason)\n" - + "\(line.content)\n" + return "\(templateName)\(location.lineNumber):\(location.lineOffset): error: \(templateError.reason)\n" + + "\(location.content)\n" + "\(highlight)\n" } diff --git a/Sources/Lexer.swift b/Sources/Lexer.swift index ec833d5c..fdbc8805 100644 --- a/Sources/Lexer.swift +++ b/Sources/Lexer.swift @@ -1,12 +1,20 @@ import Foundation +typealias Line = (content: String, number: UInt, range: Range) + struct Lexer { let templateName: String? let templateString: String + let lines: [Line] init(templateName: String? = nil, templateString: String) { self.templateName = templateName self.templateString = templateString + + self.lines = templateString.components(separatedBy: .newlines).enumerated().flatMap { + guard !$0.element.isEmpty else { return nil } + return (content: $0.element, number: UInt($0.offset + 1), templateString.range(of: $0.element)!) + } } func createToken(string: String, at range: Range) -> Token { @@ -25,8 +33,8 @@ struct Lexer { if string.hasPrefix("{{") || string.hasPrefix("{%") || string.hasPrefix("{#") { let value = strip() let range = templateString.range(of: value, range: range) ?? range - let line = templateString.rangeLine(range) - let sourceMap = SourceMap(filename: templateName, line: line) + let location = rangeLocation(range) + let sourceMap = SourceMap(filename: templateName, location: location) if string.hasPrefix("{{") { return .variable(value: value, at: sourceMap) @@ -37,8 +45,8 @@ struct Lexer { } } - let line = templateString.rangeLine(range) - let sourceMap = SourceMap(filename: templateName, line: line) + let location = rangeLocation(range) + let sourceMap = SourceMap(filename: templateName, location: location) return .text(value: string, at: sourceMap) } @@ -72,6 +80,14 @@ struct Lexer { return tokens } + func rangeLocation(_ range: Range) -> ContentLocation { + guard let line = self.lines.first(where: { $0.range.contains(range.lowerBound) }) else { + return ("", 0, 0) + } + let offset = templateString.distance(from: line.range.lowerBound, to: range.lowerBound) + return (line.content, line.number, offset) + } + } class Scanner { @@ -179,23 +195,6 @@ extension String { let last = findLastNot(character: character) ?? endIndex return String(self[first..) -> RangeLine { - var lineNumber: UInt = 0 - var offset: Int = 0 - var lineContent = "" - - for line in components(separatedBy: CharacterSet.newlines) { - lineNumber += 1 - lineContent = line - if let rangeOfLine = self.range(of: line), rangeOfLine.contains(range.lowerBound) { - offset = distance(from: rangeOfLine.lowerBound, to: range.lowerBound) - break - } - } - - return (lineContent, lineNumber, offset) - } } -public typealias RangeLine = (content: String, number: UInt, offset: Int) +public typealias ContentLocation = (content: String, lineNumber: UInt, lineOffset: Int) diff --git a/Sources/Parser.swift b/Sources/Parser.swift index 81d93355..b36f1606 100644 --- a/Sources/Parser.swift +++ b/Sources/Parser.swift @@ -125,9 +125,9 @@ public class TokenParser { } // find offset of filter in the containing token so that only filter is highligted, not the whole token if let filterTokenRange = containingToken.contents.range(of: filterToken) { - var rangeLine = containingToken.sourceMap.line - rangeLine.offset += containingToken.contents.distance(from: containingToken.contents.startIndex, to: filterTokenRange.lowerBound) - syntaxError.token = .variable(value: filterToken, at: SourceMap(filename: containingToken.sourceMap.filename, line: rangeLine)) + var location = containingToken.sourceMap.location + location.lineOffset += containingToken.contents.distance(from: containingToken.contents.startIndex, to: filterTokenRange.lowerBound) + syntaxError.token = .variable(value: filterToken, at: SourceMap(filename: containingToken.sourceMap.filename, location: location)) } else { syntaxError.token = containingToken } diff --git a/Sources/Tokenizer.swift b/Sources/Tokenizer.swift index a243f802..53f52054 100644 --- a/Sources/Tokenizer.swift +++ b/Sources/Tokenizer.swift @@ -57,17 +57,17 @@ extension String { public struct SourceMap: Equatable { public let filename: String? - public let line: RangeLine + public let location: ContentLocation - init(filename: String? = nil, line: RangeLine = ("", 0, 0)) { + init(filename: String? = nil, location: ContentLocation = ("", 0, 0)) { self.filename = filename - self.line = line + self.location = location } static let unknown = SourceMap() public static func ==(lhs: SourceMap, rhs: SourceMap) -> Bool { - return lhs.filename == rhs.filename && lhs.line == rhs.line + return lhs.filename == rhs.filename && lhs.location == rhs.location } } diff --git a/Tests/StencilTests/EnvironmentSpec.swift b/Tests/StencilTests/EnvironmentSpec.swift index aa68c3a0..4c8abed1 100644 --- a/Tests/StencilTests/EnvironmentSpec.swift +++ b/Tests/StencilTests/EnvironmentSpec.swift @@ -44,8 +44,9 @@ func testEnvironment() { guard let range = template.templateString.range(of: token) else { fatalError("Can't find '\(token)' in '\(template)'") } - let rangeLine = template.templateString.rangeLine(range) - let sourceMap = SourceMap(filename: template.name, line: rangeLine) + let lexer = Lexer(templateString: template.templateString) + let location = lexer.rangeLocation(range) + let sourceMap = SourceMap(filename: template.name, location: location) let token = Token.block(value: token, at: sourceMap) return TemplateSyntaxError(reason: description, token: token, stackTrace: []) } diff --git a/Tests/StencilTests/FilterSpec.swift b/Tests/StencilTests/FilterSpec.swift index 6c9139f6..fe40fc4e 100644 --- a/Tests/StencilTests/FilterSpec.swift +++ b/Tests/StencilTests/FilterSpec.swift @@ -221,8 +221,9 @@ func testFilter() { guard let range = template.templateString.range(of: token) else { fatalError("Can't find '\(token)' in '\(template)'") } - let rangeLine = template.templateString.rangeLine(range) - let sourceMap = SourceMap(filename: template.name, line: rangeLine) + let lexer = Lexer(templateString: template.templateString) + let location = lexer.rangeLocation(range) + let sourceMap = SourceMap(filename: template.name, location: location) let token = Token.block(value: token, at: sourceMap) return TemplateSyntaxError(reason: description, token: token, stackTrace: []) } diff --git a/Tests/StencilTests/LexerSpec.swift b/Tests/StencilTests/LexerSpec.swift index 2a9f1e11..1babed98 100644 --- a/Tests/StencilTests/LexerSpec.swift +++ b/Tests/StencilTests/LexerSpec.swift @@ -9,7 +9,7 @@ func testLexer() { let tokens = lexer.tokenize() try expect(tokens.count) == 1 - try expect(tokens.first) == .text(value: "Hello World", at: SourceMap(line: ("Hello World", 1, 0))) + try expect(tokens.first) == .text(value: "Hello World", at: SourceMap(location: ("Hello World", 1, 0))) } $0.it("can tokenize a comment") { @@ -17,7 +17,7 @@ func testLexer() { let tokens = lexer.tokenize() try expect(tokens.count) == 1 - try expect(tokens.first) == .comment(value: "Comment", at: SourceMap(line: ("{# Comment #}", 1, 3))) + try expect(tokens.first) == .comment(value: "Comment", at: SourceMap(location: ("{# Comment #}", 1, 3))) } $0.it("can tokenize a variable") { @@ -25,7 +25,7 @@ func testLexer() { let tokens = lexer.tokenize() try expect(tokens.count) == 1 - try expect(tokens.first) == .variable(value: "Variable", at: SourceMap(line: ("{{ Variable }}", 1, 3))) + try expect(tokens.first) == .variable(value: "Variable", at: SourceMap(location: ("{{ Variable }}", 1, 3))) } $0.it("can tokenize unclosed tag by ignoring it") { @@ -34,7 +34,7 @@ func testLexer() { let tokens = lexer.tokenize() try expect(tokens.count) == 1 - try expect(tokens.first) == .text(value: "", at: SourceMap(line: ("{{ thing", 1, 0))) + try expect(tokens.first) == .text(value: "", at: SourceMap(location: ("{{ thing", 1, 0))) } $0.it("can tokenize a mixture of content") { @@ -43,9 +43,9 @@ func testLexer() { let tokens = lexer.tokenize() try expect(tokens.count) == 3 - try expect(tokens[0]) == Token.text(value: "My name is ", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "My name is ")!))) - try expect(tokens[1]) == Token.variable(value: "myname", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "myname")!))) - try expect(tokens[2]) == Token.text(value: ".", at: SourceMap(line: templateString.rangeLine(templateString.range(of: ".")!))) + try expect(tokens[0]) == Token.text(value: "My name is ", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "My name is ")!))) + try expect(tokens[1]) == Token.variable(value: "myname", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "myname")!))) + try expect(tokens[2]) == Token.text(value: ".", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: ".")!))) } $0.it("can tokenize two variables without being greedy") { @@ -54,8 +54,8 @@ func testLexer() { let tokens = lexer.tokenize() try expect(tokens.count) == 2 - try expect(tokens[0]) == Token.variable(value: "thing", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "thing")!))) - try expect(tokens[1]) == Token.variable(value: "name", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "name")!))) + try expect(tokens[0]) == Token.variable(value: "thing", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "thing")!))) + try expect(tokens[1]) == Token.variable(value: "name", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "name")!))) } $0.it("can tokenize an unclosed block") { @@ -84,11 +84,11 @@ func testLexer() { let tokens = lexer.tokenize() try expect(tokens.count) == 5 - try expect(tokens[0]) == Token.text(value: "My name is ", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "My name is")!))) - try expect(tokens[1]) == Token.block(value: "if name and name", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "{%")!))) - try expect(tokens[2]) == Token.variable(value: "name", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "name", options: [.backwards])!))) - try expect(tokens[3]) == Token.block(value: "endif", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "endif")!))) - try expect(tokens[4]) == Token.text(value: ".", at: SourceMap(line: templateString.rangeLine(templateString.range(of: ".")!))) + try expect(tokens[0]) == Token.text(value: "My name is ", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "My name is")!))) + try expect(tokens[1]) == Token.block(value: "if name and name", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "{%")!))) + try expect(tokens[2]) == Token.variable(value: "name", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "name", options: [.backwards])!))) + try expect(tokens[3]) == Token.block(value: "endif", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "endif")!))) + try expect(tokens[4]) == Token.text(value: ".", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: ".")!))) } } }