Skip to content

Commit

Permalink
Implement parameterizing rules for list
Browse files Browse the repository at this point in the history
  • Loading branch information
ydah committed Nov 4, 2023
1 parent 3f624f7 commit cc8877d
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 1 deletion.
7 changes: 7 additions & 0 deletions lib/lrama/grammar.rb
Original file line number Diff line number Diff line change
Expand Up @@ -578,6 +578,13 @@ def normalize_rules
@rules << Rule.new(id: @rules.count, lhs: lhs, rhs: [nonempty_list_token], code: c, precedence_sym: precedence_sym, lineno: lineno)
@rules << Rule.new(id: @rules.count, lhs: nonempty_list_token, rhs: [token], code: c, precedence_sym: precedence_sym, lineno: lineno)
@rules << Rule.new(id: @rules.count, lhs: nonempty_list_token, rhs: [nonempty_list_token, token], code: c, precedence_sym: precedence_sym, lineno: lineno)
elsif rhs2.any? {|r| r.type == Token::List }
list_token = Token.new(type: Token::Ident, s_value: "list_#{rhs2[0].s_value}")
token = Token.new(type: Token::Ident, s_value: rhs2[0].s_value)
add_term(id: list_token)
@rules << Rule.new(id: @rules.count, lhs: lhs, rhs: [list_token], code: c, precedence_sym: precedence_sym, lineno: lineno)
@rules << Rule.new(id: @rules.count, lhs: list_token, rhs: [], code: c, precedence_sym: precedence_sym, lineno: lineno)
@rules << Rule.new(id: @rules.count, lhs: list_token, rhs: [list_token, token], code: c, precedence_sym: precedence_sym, lineno: lineno)
else
@rules << Rule.new(id: @rules.count, lhs: lhs, rhs: rhs2, code: c, precedence_sym: precedence_sym, lineno: lineno)
end
Expand Down
2 changes: 1 addition & 1 deletion lib/lrama/lexer.rb
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ def lex_token
return [@scanner.matched, @scanner.matched]
when @scanner.scan(/#{PERCENT_TOKENS.join('|')}/)
return [@scanner.matched, @scanner.matched]
when @scanner.scan(/[\?\+]/)
when @scanner.scan(/[\?\+\*]/)
return [@scanner.matched, @scanner.matched]
when @scanner.scan(/<\w+>/)
return [:TAG, build_token(type: Token::Tag, s_value: @scanner.matched)]
Expand Down
1 change: 1 addition & 0 deletions lib/lrama/lexer/token.rb
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ def self.define_type(name)
define_type(:Char) # '+'
define_type(:Option) # actual?
define_type(:Nonempty_list) # actual+
define_type(:List) # actual*
end
end
end
5 changes: 5 additions & 0 deletions parser.y
Original file line number Diff line number Diff line change
Expand Up @@ -304,6 +304,11 @@ rule
token = Token.new(type: Token::Nonempty_list, s_value: val[1])
result = val[0].append(token)
}
| rhs "*"
{
token = Token.new(type: Token::List, s_value: val[1])
result = val[0].append(token)
}
| rhs "{"
{
if @prec_seen
Expand Down

0 comments on commit cc8877d

Please sign in to comment.