From 07a6b2aea5c476b7de5dbd8bc9a8e8d0ab18028d Mon Sep 17 00:00:00 2001 From: ethorpe Date: Wed, 4 Apr 2018 13:57:33 +1000 Subject: [PATCH 1/6] Rewrites scanner for better performance. This is primarily an improvement under Ubuntu Cleanup readability a little bit Rewrite original scan function so it's available. Syntax improvements Fix deprecation warnings in Lexer Cleanup some syntax issues lexer t t --- Sources/Lexer.swift | 90 ++++++++++++++++++--------------------------- 1 file changed, 35 insertions(+), 55 deletions(-) diff --git a/Sources/Lexer.swift b/Sources/Lexer.swift index 26d6a1ad..6781f0d6 100644 --- a/Sources/Lexer.swift +++ b/Sources/Lexer.swift @@ -7,6 +7,13 @@ struct Lexer { let templateString: String let lines: [Line] + private static let tokenChars: [Unicode.Scalar] = ["{", "%", "#"] + private static let tokenCharMap: [Unicode.Scalar: Unicode.Scalar] = [ + "{": "}", + "%": "%", + "#": "#" + ] + init(templateName: String? = nil, templateString: String) { self.templateName = templateName self.templateString = templateString @@ -20,9 +27,7 @@ struct Lexer { func createToken(string: String, at range: Range) -> Token { func strip() -> String { guard string.count > 4 else { return "" } - let start = string.index(string.startIndex, offsetBy: 2) - let end = string.index(string.endIndex, offsetBy: -2) - let trimmed = String(string[start.. + private static let tokenStartDelimiter: Unicode.Scalar = "{" + private static let tokenEndDelimiter: Unicode.Scalar = "}" + init(_ content: String) { self.originalContent = content self.content = content @@ -105,64 +106,43 @@ class Scanner { return content.isEmpty } - func scan(until: String, returnUntil: Bool = false) -> String { - var index = content.startIndex - - if until.isEmpty { - return "" - } - - range = range.upperBound.. String { + var foundChar = false - if returnUntil { - range = range.lowerBound.. (String, String)? { - if until.isEmpty { - return nil - } + func scanForTokenStart(_ tokenChars: [Unicode.Scalar]) -> (Unicode.Scalar, String)? { + var foundBrace = false - var index = content.startIndex range = range.upperBound.. String.Index? { var index = startIndex From 4f84627caa594a2982f19284fe03d42e1b39609d Mon Sep 17 00:00:00 2001 From: David Jennes Date: Sat, 22 Sep 2018 00:54:10 +0200 Subject: [PATCH 2/6] Add test for crashing --- Tests/StencilTests/LexerSpec.swift | 50 +++++++++++++++++------------- 1 file changed, 28 insertions(+), 22 deletions(-) diff --git a/Tests/StencilTests/LexerSpec.swift b/Tests/StencilTests/LexerSpec.swift index 6f49a4cc..02f6d5ee 100644 --- a/Tests/StencilTests/LexerSpec.swift +++ b/Tests/StencilTests/LexerSpec.swift @@ -61,36 +61,42 @@ class LexerTests: XCTestCase { $0.it("can tokenize an unclosed block") { let lexer = Lexer(templateString: "{%}") - let _ = lexer.tokenize() + _ = lexer.tokenize() + } + + $0.it("can tokenize incorrect syntax without crashing") { + let lexer = Lexer(templateString: "func some() {{% if %}") + _ = lexer.tokenize() } $0.it("can tokenize an empty variable") { let lexer = Lexer(templateString: "{{}}") - let _ = lexer.tokenize() + _ = lexer.tokenize() } $0.it("can tokenize with new lines") { let templateString = """ - My name is {% - if name - and - name - %}{{ - name - }}{% - endif %}. - """ - - let lexer = Lexer(templateString: templateString) - - let tokens = lexer.tokenize() - - try expect(tokens.count) == 5 - try expect(tokens[0]) == Token.text(value: "My name is ", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "My name is")!))) - try expect(tokens[1]) == Token.block(value: "if name and name", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "{%")!))) - try expect(tokens[2]) == Token.variable(value: "name", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "name", options: [.backwards])!))) - try expect(tokens[3]) == Token.block(value: "endif", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "endif")!))) - try expect(tokens[4]) == Token.text(value: ".", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: ".")!))) + My name is {% + if name + and + name + %}{{ + name + }}{% + endif %}. + """ + + let lexer = Lexer(templateString: templateString) + + let tokens = lexer.tokenize() + + try expect(tokens.count) == 5 + try expect(tokens[0]) == Token.text(value: "My name is ", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "My name is")!))) + try expect(tokens[1]) == Token.block(value: "if name and name", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "{%")!))) + try expect(tokens[2]) == Token.variable(value: "name", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "name", options: [.backwards])!))) + try expect(tokens[3]) == Token.block(value: "endif", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "endif")!))) + try expect(tokens[4]) == Token.text(value: ".", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: ".")!))) + } } } } From e77bd22e839eea185e8d22d1da78a0f741e2f8ee Mon Sep 17 00:00:00 2001 From: Liquidsoul Date: Sat, 1 Sep 2018 12:15:15 +0200 Subject: [PATCH 3/6] Add changelog entry --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9ce06c87..9645027b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -40,6 +40,11 @@ - Update to Spectre 0.9.0. [Ilya Puchka](https://github.com/ilyapuchka) [#247](https://github.com/stencilproject/Stencil/pull/247) +- Optimise Scanner performance. + [Eric Thorpe](https://github.com/trametheka) + [Sébastien Duperron](https://github.com/Liquidsoul) + [David Jennes](https://github.com/djbe) + [#226](https://github.com/stencilproject/Stencil/pull/226) ## 0.12.1 From 652dcd246dcb8a9779617b950f5a42d9c8abee6c Mon Sep 17 00:00:00 2001 From: David Jennes Date: Sat, 22 Sep 2018 03:33:40 +0200 Subject: [PATCH 4/6] Add lexer test for escape sequence --- Tests/StencilTests/LexerSpec.swift | 43 ++++++++++++++++++++---------- 1 file changed, 29 insertions(+), 14 deletions(-) diff --git a/Tests/StencilTests/LexerSpec.swift b/Tests/StencilTests/LexerSpec.swift index 02f6d5ee..c772d30d 100644 --- a/Tests/StencilTests/LexerSpec.swift +++ b/Tests/StencilTests/LexerSpec.swift @@ -5,6 +5,11 @@ import Spectre class LexerTests: XCTestCase { func testLexer() { describe("Lexer") { + func makeSourceMap(_ token: String, for lexer: Lexer, options: String.CompareOptions = []) -> SourceMap { + guard let range = lexer.templateString.range(of: token, options: options) else { fatalError("Token not found") } + return SourceMap(location: lexer.rangeLocation(range)) + } + $0.it("can tokenize text") { let lexer = Lexer(templateString: "Hello World") let tokens = lexer.tokenize() @@ -44,9 +49,9 @@ class LexerTests: XCTestCase { let tokens = lexer.tokenize() try expect(tokens.count) == 3 - try expect(tokens[0]) == Token.text(value: "My name is ", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "My name is ")!))) - try expect(tokens[1]) == Token.variable(value: "myname", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "myname")!))) - try expect(tokens[2]) == Token.text(value: ".", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: ".")!))) + try expect(tokens[0]) == Token.text(value: "My name is ", at: makeSourceMap("My name is ", for: lexer)) + try expect(tokens[1]) == Token.variable(value: "myname", at: makeSourceMap("myname", for: lexer)) + try expect(tokens[2]) == Token.text(value: ".", at: makeSourceMap(".", for: lexer)) } $0.it("can tokenize two variables without being greedy") { @@ -55,8 +60,8 @@ class LexerTests: XCTestCase { let tokens = lexer.tokenize() try expect(tokens.count) == 2 - try expect(tokens[0]) == Token.variable(value: "thing", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "thing")!))) - try expect(tokens[1]) == Token.variable(value: "name", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "name")!))) + try expect(tokens[0]) == Token.variable(value: "thing", at: makeSourceMap("thing", for: lexer)) + try expect(tokens[1]) == Token.variable(value: "name", at: makeSourceMap("name", for: lexer)) } $0.it("can tokenize an unclosed block") { @@ -85,18 +90,28 @@ class LexerTests: XCTestCase { }}{% endif %}. """ + let lexer = Lexer(templateString: templateString) + let tokens = lexer.tokenize() - let lexer = Lexer(templateString: templateString) + try expect(tokens.count) == 5 + try expect(tokens[0]) == Token.text(value: "My name is ", at: makeSourceMap("My name is", for: lexer)) + try expect(tokens[1]) == Token.block(value: "if name and name", at: makeSourceMap("{%", for: lexer)) + try expect(tokens[2]) == Token.variable(value: "name", at: makeSourceMap("name", for: lexer, options: .backwards)) + try expect(tokens[3]) == Token.block(value: "endif", at: makeSourceMap("endif", for: lexer)) + try expect(tokens[4]) == Token.text(value: ".", at: makeSourceMap(".", for: lexer)) + } - let tokens = lexer.tokenize() + $0.it("can tokenize escape sequences") { + let templateString = "class Some {{ '{' }}{% if true %}{{ stuff }}{% endif %}" + let lexer = Lexer(templateString: templateString) + let tokens = lexer.tokenize() - try expect(tokens.count) == 5 - try expect(tokens[0]) == Token.text(value: "My name is ", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "My name is")!))) - try expect(tokens[1]) == Token.block(value: "if name and name", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "{%")!))) - try expect(tokens[2]) == Token.variable(value: "name", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "name", options: [.backwards])!))) - try expect(tokens[3]) == Token.block(value: "endif", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "endif")!))) - try expect(tokens[4]) == Token.text(value: ".", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: ".")!))) - } + try expect(tokens.count) == 5 + try expect(tokens[0]) == Token.text(value: "class Some ", at: makeSourceMap("class Some ", for: lexer)) + try expect(tokens[1]) == Token.variable(value: "'{'", at: makeSourceMap("'{'", for: lexer)) + try expect(tokens[2]) == Token.block(value: "if true", at: makeSourceMap("if true", for: lexer)) + try expect(tokens[3]) == Token.variable(value: "stuff", at: makeSourceMap("stuff", for: lexer)) + try expect(tokens[4]) == Token.block(value: "endif", at: makeSourceMap("endif", for: lexer)) } } } From fff93f18dd7359e0774ad9fe3d1413fc247c7180 Mon Sep 17 00:00:00 2001 From: David Jennes Date: Mon, 24 Sep 2018 00:15:52 +0200 Subject: [PATCH 5/6] Add performance test (no reporting yet) --- Tests/StencilTests/LexerSpec.swift | 13 +- Tests/StencilTests/XCTestManifests.swift | 1 + Tests/StencilTests/fixtures/huge.html | 1131 ++++++++++++++++++++++ 3 files changed, 1144 insertions(+), 1 deletion(-) create mode 100644 Tests/StencilTests/fixtures/huge.html diff --git a/Tests/StencilTests/LexerSpec.swift b/Tests/StencilTests/LexerSpec.swift index c772d30d..ebc114e4 100644 --- a/Tests/StencilTests/LexerSpec.swift +++ b/Tests/StencilTests/LexerSpec.swift @@ -1,6 +1,7 @@ -import XCTest +import PathKit import Spectre @testable import Stencil +import XCTest class LexerTests: XCTestCase { func testLexer() { @@ -115,4 +116,14 @@ class LexerTests: XCTestCase { } } } + + func testPerformance() throws { + let path = Path(#file) + ".." + "fixtures" + "huge.html" + let content: String = try path.read() + + measure { + let lexer = Lexer(templateString: content) + _ = lexer.tokenize() + } + } } diff --git a/Tests/StencilTests/XCTestManifests.swift b/Tests/StencilTests/XCTestManifests.swift index 84f6cce4..73cf026c 100644 --- a/Tests/StencilTests/XCTestManifests.swift +++ b/Tests/StencilTests/XCTestManifests.swift @@ -57,6 +57,7 @@ extension InheritenceTests { extension LexerTests { static let __allTests = [ ("testLexer", testLexer), + ("testPerformance", testPerformance), ] } diff --git a/Tests/StencilTests/fixtures/huge.html b/Tests/StencilTests/fixtures/huge.html new file mode 100644 index 00000000..c338ed69 --- /dev/null +++ b/Tests/StencilTests/fixtures/huge.html @@ -0,0 +1,1131 @@ + + + + {% block title %}Rond De Tafel + {% if sort == "new" %} + {{ block.super }} - Nieuwste spellen + {% elif sort == "upcoming" %} + {{ block.super }} - Binnenkort op de agenda + {% elif sort == "near-me" %} + {{ block.super }} - In mijn buurt + {% endif %} + {% endblock %} + + + + + + + + + + {% block opengraph %} + + + {% endblock %} + + + + + + + + {% block additional-head %}{% endblock %} + + + + + + + +
+
+ +
    + {% if sort == "new" %} +
  • + + Nieuw + +
  • + {% else %} +
  • + + Nieuw + +
  • + {% endif %} + {% if sort == "upcoming" %} +
  • + + Binnenkort + +
  • + {% else %} +
  • + + Binnenkort + +
  • + {% endif %} + {% if sort == "near-me" %} +
  • + + Dichtbij + +
  • + {% else %} +
  • + + Dichtbij + +
  • + {% endif %} +
+ + +
+ +{% if sort == "new" %} +

Nieuwste spellen

+{% elif sort == "upcoming" %} +

Binnenkort op de agenda

+{% elif sort == "near-me" %} +

In mijn buurt

+{% endif %} + +{% if base.user %} +
+ Spellen die je zelf organiseert worden niet getoond op deze pagina. + Deze spellen zijn te vinden in je persoonlijk menu, onder Mijn spellen. +
+{% endif %} + +{% if sort == "near-me" and not base.user.location %} +
+ {% if base.user %} + Om deze functie te activeren moet je een adres ingeven bij Instellingen. + {% else %} + Om deze functie te activeren moet je eerst aanmelden. + Daarna kan je een adres ingeven bij Instellingen. + {% endif %} +
+ +{% elif activities %} + {% for activity in activities %} + + + + + +
+
{{ activity.name }}
+

+ + + {{ activity.shortDate }} + + {{ activity.longDate }} + + om {{ activity.time }} +
+ + + {{ activity.time }}
+
+ {{ activity.host.name }}
+ {{ activity.location.city }} + {% if base.user.location %} + ({{ activity.distance }}km) + {% endif %} +

+
+
+ {% endfor %} + +{% else %} +

Geen spellen gepland.

+{% endif %} +
+ +
+ © 2018 - Rond De Tafel
+ Like ons op Facebook
+ Broncode beschikbaar op GitHub +
+ + + + + {% block additional-body %}{% endblock %} + + + + + + {% block title %}Rond De Tafel + {% if sort == "new" %} + {{ block.super }} - Nieuwste spellen + {% elif sort == "upcoming" %} + {{ block.super }} - Binnenkort op de agenda + {% elif sort == "near-me" %} + {{ block.super }} - In mijn buurt + {% endif %} + {% endblock %} + + + + + + + + + + {% block opengraph %} + + + {% endblock %} + + + + + + + + {% block additional-head %}{% endblock %} + + + + + + + +
+
+ +
    + {% if sort == "new" %} +
  • + + Nieuw + +
  • + {% else %} +
  • + + Nieuw + +
  • + {% endif %} + {% if sort == "upcoming" %} +
  • + + Binnenkort + +
  • + {% else %} +
  • + + Binnenkort + +
  • + {% endif %} + {% if sort == "near-me" %} +
  • + + Dichtbij + +
  • + {% else %} +
  • + + Dichtbij + +
  • + {% endif %} +
+ + +
+ +{% if sort == "new" %} +

Nieuwste spellen

+{% elif sort == "upcoming" %} +

Binnenkort op de agenda

+{% elif sort == "near-me" %} +

In mijn buurt

+{% endif %} + +{% if base.user %} +
+ Spellen die je zelf organiseert worden niet getoond op deze pagina. + Deze spellen zijn te vinden in je persoonlijk menu, onder Mijn spellen. +
+{% endif %} + +{% if sort == "near-me" and not base.user.location %} +
+ {% if base.user %} + Om deze functie te activeren moet je een adres ingeven bij Instellingen. + {% else %} + Om deze functie te activeren moet je eerst aanmelden. + Daarna kan je een adres ingeven bij Instellingen. + {% endif %} +
+ +{% elif activities %} + {% for activity in activities %} + + + + + +
+
{{ activity.name }}
+

+ + + {{ activity.shortDate }} + + {{ activity.longDate }} + + om {{ activity.time }} +
+ + + {{ activity.time }}
+
+ {{ activity.host.name }}
+ {{ activity.location.city }} + {% if base.user.location %} + ({{ activity.distance }}km) + {% endif %} +

+
+
+ {% endfor %} + +{% else %} +

Geen spellen gepland.

+{% endif %} +
+ +
+ © 2018 - Rond De Tafel
+ Like ons op Facebook
+ Broncode beschikbaar op GitHub +
+ + + + + {% block additional-body %}{% endblock %} + + + + + + {% block title %}Rond De Tafel + {% if sort == "new" %} + {{ block.super }} - Nieuwste spellen + {% elif sort == "upcoming" %} + {{ block.super }} - Binnenkort op de agenda + {% elif sort == "near-me" %} + {{ block.super }} - In mijn buurt + {% endif %} + {% endblock %} + + + + + + + + + + {% block opengraph %} + + + {% endblock %} + + + + + + + + {% block additional-head %}{% endblock %} + + + + + + + +
+
+ +
    + {% if sort == "new" %} +
  • + + Nieuw + +
  • + {% else %} +
  • + + Nieuw + +
  • + {% endif %} + {% if sort == "upcoming" %} +
  • + + Binnenkort + +
  • + {% else %} +
  • + + Binnenkort + +
  • + {% endif %} + {% if sort == "near-me" %} +
  • + + Dichtbij + +
  • + {% else %} +
  • + + Dichtbij + +
  • + {% endif %} +
+ + +
+ +{% if sort == "new" %} +

Nieuwste spellen

+{% elif sort == "upcoming" %} +

Binnenkort op de agenda

+{% elif sort == "near-me" %} +

In mijn buurt

+{% endif %} + +{% if base.user %} +
+ Spellen die je zelf organiseert worden niet getoond op deze pagina. + Deze spellen zijn te vinden in je persoonlijk menu, onder Mijn spellen. +
+{% endif %} + +{% if sort == "near-me" and not base.user.location %} +
+ {% if base.user %} + Om deze functie te activeren moet je een adres ingeven bij Instellingen. + {% else %} + Om deze functie te activeren moet je eerst aanmelden. + Daarna kan je een adres ingeven bij Instellingen. + {% endif %} +
+ +{% elif activities %} + {% for activity in activities %} + + + + + +
+
{{ activity.name }}
+

+ + + {{ activity.shortDate }} + + {{ activity.longDate }} + + om {{ activity.time }} +
+ + + {{ activity.time }}
+
+ {{ activity.host.name }}
+ {{ activity.location.city }} + {% if base.user.location %} + ({{ activity.distance }}km) + {% endif %} +

+
+
+ {% endfor %} + +{% else %} +

Geen spellen gepland.

+{% endif %} +
+ +
+ © 2018 - Rond De Tafel
+ Like ons op Facebook
+ Broncode beschikbaar op GitHub +
+ + + + + {% block additional-body %}{% endblock %} + + From cb4e51484660d17c92c0baa8e4c6f1e1b0f8e1a0 Mon Sep 17 00:00:00 2001 From: David Jennes Date: Wed, 26 Sep 2018 00:27:31 +0200 Subject: [PATCH 6/6] Code documentation --- Sources/Lexer.swift | 52 ++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 51 insertions(+), 1 deletion(-) diff --git a/Sources/Lexer.swift b/Sources/Lexer.swift index 6781f0d6..015b7d77 100644 --- a/Sources/Lexer.swift +++ b/Sources/Lexer.swift @@ -7,7 +7,12 @@ struct Lexer { let templateString: String let lines: [Line] + /// The potential token start characters. In a template these appear after a + /// `{` character, for example `{{`, `{%`, `{#`, ... private static let tokenChars: [Unicode.Scalar] = ["{", "%", "#"] + + /// The token end characters, corresponding to their token start characters. + /// For example, a variable token starts with `{{` and ends with `}}` private static let tokenCharMap: [Unicode.Scalar: Unicode.Scalar] = [ "{": "}", "%": "%", @@ -24,6 +29,15 @@ struct Lexer { } } + /// Create a token that will be passed on to the parser, with the given + /// content and a range. The content will be tested to see if it's a + /// `variable`, a `block` or a `comment`, otherwise it'll default to a simple + /// `text` token. + /// + /// - Parameters: + /// - string: The content string of the token + /// - range: The range within the template content, used for smart + /// error reporting func createToken(string: String, at range: Range) -> Token { func strip() -> String { guard string.count > 4 else { return "" } @@ -55,7 +69,10 @@ struct Lexer { return .text(value: string, at: sourceMap) } - /// Returns an array of tokens from a given template string. + /// Transforms the template into a list of tokens, that will eventually be + /// passed on to the parser. + /// + /// - Returns: The list of tokens (see `createToken(string: at:)`). func tokenize() -> [Token] { var tokens: [Token] = [] @@ -78,6 +95,11 @@ struct Lexer { return tokens } + /// Finds the line matching the given range (for a token) + /// + /// - Parameter range: The range to search for. + /// - Returns: The content for that line, the line number and offset within + /// the line. func rangeLocation(_ range: Range) -> ContentLocation { guard let line = self.lines.first(where: { $0.range.contains(range.lowerBound) }) else { return ("", 0, 0) @@ -93,7 +115,9 @@ class Scanner { var content: String var range: Range + /// The start delimiter for a token. private static let tokenStartDelimiter: Unicode.Scalar = "{" + /// And the corresponding end delimiter for a token. private static let tokenEndDelimiter: Unicode.Scalar = "}" init(_ content: String) { @@ -106,6 +130,17 @@ class Scanner { return content.isEmpty } + /// Scans for the end of a token, with a specific ending character. If we're + /// searching for the end of a block token `%}`, this method receives a `%`. + /// The scanner will search for that `%` followed by a `}`. + /// + /// Note: if the end of a token is found, the `content` and `range` + /// properties are updated to reflect this. `content` will be set to what + /// remains of the template after the token. `range` will be set to the range + /// of the token within the template. + /// + /// - Parameter tokenChar: The token end character to search for. + /// - Returns: The content of a token, or "" if no token end was found. func scanForTokenEnd(_ tokenChar: Unicode.Scalar) -> String { var foundChar = false @@ -124,6 +159,21 @@ class Scanner { return "" } + /// Scans for the start of a token, with a list of potential starting + /// characters. To scan for the start of variables (`{{`), blocks (`{%`) and + /// comments (`{#`), this method receives the characters `{`, `%` and `#`. + /// The scanner will search for a `{`, followed by one of the search + /// characters. It will give the found character, and the content that came + /// before the token. + /// + /// Note: if the start of a token is found, the `content` and `range` + /// properties are updated to reflect this. `content` will be set to what + /// remains of the template starting with the token. `range` will be set to + /// the start of the token within the template. + /// + /// - Parameter tokenChars: List of token start characters to search for. + /// - Returns: The found token start character, together with the content + /// before the token, or nil of no token start was found. func scanForTokenStart(_ tokenChars: [Unicode.Scalar]) -> (Unicode.Scalar, String)? { var foundBrace = false