diff --git a/lib/lrama/parser.rb b/lib/lrama/parser.rb index 03fbe064..6a35dba2 100644 --- a/lib/lrama/parser.rb +++ b/lib/lrama/parser.rb @@ -654,7 +654,7 @@ def token_to_str(t) module Lrama class Parser < Racc::Parser -module_eval(<<'...end parser.y/module_eval...', 'parser.y', 418) +module_eval(<<'...end parser.y/module_eval...', 'parser.y', 417) include Lrama::Report::Duration @@ -1290,14 +1290,14 @@ def _reduce_12(val, _values, result) # reduce 13 omitted -module_eval(<<'.,.,', 'parser.y', 55) +module_eval(<<'.,.,', 'parser.y', 54) def _reduce_14(val, _values, result) result = val[1] ? val[1].unshift(val[0]) : val result end .,., -module_eval(<<'.,.,', 'parser.y', 55) +module_eval(<<'.,.,', 'parser.y', 54) def _reduce_15(val, _values, result) result = val[1] ? val[1].unshift(val[0]) : val result @@ -1306,7 +1306,7 @@ def _reduce_15(val, _values, result) # reduce 16 omitted -module_eval(<<'.,.,', 'parser.y', 27) +module_eval(<<'.,.,', 'parser.y', 26) def _reduce_17(val, _values, result) @grammar.expect = val[1] result @@ -1317,7 +1317,7 @@ def _reduce_17(val, _values, result) # reduce 19 omitted -module_eval(<<'.,.,', 'parser.y', 32) +module_eval(<<'.,.,', 'parser.y', 31) def _reduce_20(val, _values, result) val[1].each {|token| @grammar.lex_param = Grammar::Code::NoReferenceCode.new(type: :lex_param, token_code: token).token_code.s_value @@ -1327,7 +1327,7 @@ def _reduce_20(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 38) +module_eval(<<'.,.,', 'parser.y', 37) def _reduce_21(val, _values, result) val[1].each {|token| @grammar.parse_param = Grammar::Code::NoReferenceCode.new(type: :parse_param, token_code: token).token_code.s_value @@ -1337,7 +1337,7 @@ def _reduce_21(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 44) +module_eval(<<'.,.,', 'parser.y', 43) def _reduce_22(val, _values, result) @grammar.add_percent_code(id: val[1], code: val[2]) @@ -1345,7 +1345,7 @@ def _reduce_22(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 48) +module_eval(<<'.,.,', 'parser.y', 47) def _reduce_23(val, _values, result) @grammar.initial_action = Grammar::Code::InitialActionCode.new(type: :initial_action, token_code: val[1]) @@ -1353,14 +1353,14 @@ def _reduce_23(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 50) +module_eval(<<'.,.,', 'parser.y', 49) def _reduce_24(val, _values, result) @grammar.no_stdlib = true result end .,., -module_eval(<<'.,.,', 'parser.y', 51) +module_eval(<<'.,.,', 'parser.y', 50) def _reduce_25(val, _values, result) @grammar.locations = true result @@ -1369,21 +1369,21 @@ def _reduce_25(val, _values, result) # reduce 26 omitted -module_eval(<<'.,.,', 'parser.y', 110) +module_eval(<<'.,.,', 'parser.y', 109) def _reduce_27(val, _values, result) result = val[1] ? val[1].unshift(val[0]) : val result end .,., -module_eval(<<'.,.,', 'parser.y', 110) +module_eval(<<'.,.,', 'parser.y', 109) def _reduce_28(val, _values, result) result = val[1] ? val[1].unshift(val[0]) : val result end .,., -module_eval(<<'.,.,', 'parser.y', 56) +module_eval(<<'.,.,', 'parser.y', 55) def _reduce_29(val, _values, result) @grammar.set_union( Grammar::Code::NoReferenceCode.new(type: :union, token_code: val[1]), @@ -1400,7 +1400,7 @@ def _reduce_29(val, _values, result) # reduce 32 omitted -module_eval(<<'.,.,', 'parser.y', 66) +module_eval(<<'.,.,', 'parser.y', 65) def _reduce_33(val, _values, result) @grammar.add_destructor( ident_or_tags: val[2], @@ -1412,7 +1412,7 @@ def _reduce_33(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 74) +module_eval(<<'.,.,', 'parser.y', 73) def _reduce_34(val, _values, result) @grammar.add_printer( ident_or_tags: val[2], @@ -1424,7 +1424,7 @@ def _reduce_34(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 82) +module_eval(<<'.,.,', 'parser.y', 81) def _reduce_35(val, _values, result) @grammar.add_error_token( ident_or_tags: val[2], @@ -1436,7 +1436,7 @@ def _reduce_35(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 90) +module_eval(<<'.,.,', 'parser.y', 89) def _reduce_36(val, _values, result) @grammar.after_shift = val[1] @@ -1444,7 +1444,7 @@ def _reduce_36(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 94) +module_eval(<<'.,.,', 'parser.y', 93) def _reduce_37(val, _values, result) @grammar.before_reduce = val[1] @@ -1452,7 +1452,7 @@ def _reduce_37(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 98) +module_eval(<<'.,.,', 'parser.y', 97) def _reduce_38(val, _values, result) @grammar.after_reduce = val[1] @@ -1460,7 +1460,7 @@ def _reduce_38(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 102) +module_eval(<<'.,.,', 'parser.y', 101) def _reduce_39(val, _values, result) @grammar.after_shift_error_token = val[1] @@ -1468,7 +1468,7 @@ def _reduce_39(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 106) +module_eval(<<'.,.,', 'parser.y', 105) def _reduce_40(val, _values, result) @grammar.after_pop_stack = val[1] @@ -1478,7 +1478,7 @@ def _reduce_40(val, _values, result) # reduce 41 omitted -module_eval(<<'.,.,', 'parser.y', 112) +module_eval(<<'.,.,', 'parser.y', 111) def _reduce_42(val, _values, result) val[1].each {|hash| hash[:tokens].each {|id| @@ -1490,7 +1490,7 @@ def _reduce_42(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 120) +module_eval(<<'.,.,', 'parser.y', 119) def _reduce_43(val, _values, result) val[1].each {|hash| hash[:tokens].each {|id| @@ -1504,7 +1504,7 @@ def _reduce_43(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 130) +module_eval(<<'.,.,', 'parser.y', 129) def _reduce_44(val, _values, result) val[1].each {|hash| hash[:tokens].each {|id| @@ -1518,7 +1518,7 @@ def _reduce_44(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 140) +module_eval(<<'.,.,', 'parser.y', 139) def _reduce_45(val, _values, result) val[1].each {|hash| hash[:tokens].each {|id| @@ -1532,7 +1532,7 @@ def _reduce_45(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 150) +module_eval(<<'.,.,', 'parser.y', 149) def _reduce_46(val, _values, result) val[1].each {|hash| hash[:tokens].each {|id| @@ -1546,21 +1546,21 @@ def _reduce_46(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 179) +module_eval(<<'.,.,', 'parser.y', 178) def _reduce_47(val, _values, result) result = val[1] ? val[1].unshift(val[0]) : val result end .,., -module_eval(<<'.,.,', 'parser.y', 179) +module_eval(<<'.,.,', 'parser.y', 178) def _reduce_48(val, _values, result) result = val[1] ? val[1].unshift(val[0]) : val result end .,., -module_eval(<<'.,.,', 'parser.y', 161) +module_eval(<<'.,.,', 'parser.y', 160) def _reduce_49(val, _values, result) val[0].each {|token_declaration| @grammar.add_term(id: token_declaration[0], alias_name: token_declaration[2], token_id: token_declaration[1], tag: nil, replace: true) @@ -1570,7 +1570,7 @@ def _reduce_49(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 167) +module_eval(<<'.,.,', 'parser.y', 166) def _reduce_50(val, _values, result) val[1].each {|token_declaration| @grammar.add_term(id: token_declaration[0], alias_name: token_declaration[2], token_id: token_declaration[1], tag: val[0], replace: true) @@ -1580,7 +1580,7 @@ def _reduce_50(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 173) +module_eval(<<'.,.,', 'parser.y', 172) def _reduce_51(val, _values, result) val[2].each {|token_declaration| @grammar.add_term(id: token_declaration[0], alias_name: token_declaration[2], token_id: token_declaration[1], tag: val[1], replace: true) @@ -1594,7 +1594,7 @@ def _reduce_51(val, _values, result) # reduce 53 omitted -module_eval(<<'.,.,', 'parser.y', 178) +module_eval(<<'.,.,', 'parser.y', 177) def _reduce_54(val, _values, result) result = val result @@ -1605,7 +1605,7 @@ def _reduce_54(val, _values, result) # reduce 56 omitted -module_eval(<<'.,.,', 'parser.y', 182) +module_eval(<<'.,.,', 'parser.y', 181) def _reduce_57(val, _values, result) rule = Grammar::ParameterizingRule::Rule.new(val[1].s_value, val[3], val[7], tag: val[5]) @grammar.add_parameterizing_rule(rule) @@ -1614,7 +1614,7 @@ def _reduce_57(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 188) +module_eval(<<'.,.,', 'parser.y', 187) def _reduce_58(val, _values, result) rule = Grammar::ParameterizingRule::Rule.new(val[2].s_value, [], val[4], is_inline: true) @grammar.add_parameterizing_rule(rule) @@ -1623,7 +1623,7 @@ def _reduce_58(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 193) +module_eval(<<'.,.,', 'parser.y', 192) def _reduce_59(val, _values, result) rule = Grammar::ParameterizingRule::Rule.new(val[2].s_value, val[4], val[7], is_inline: true) @grammar.add_parameterizing_rule(rule) @@ -1632,21 +1632,21 @@ def _reduce_59(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 197) +module_eval(<<'.,.,', 'parser.y', 196) def _reduce_60(val, _values, result) result = [val[0]] result end .,., -module_eval(<<'.,.,', 'parser.y', 198) +module_eval(<<'.,.,', 'parser.y', 197) def _reduce_61(val, _values, result) result = val[0].append(val[2]) result end .,., -module_eval(<<'.,.,', 'parser.y', 202) +module_eval(<<'.,.,', 'parser.y', 201) def _reduce_62(val, _values, result) builder = val[0] result = [builder] @@ -1655,7 +1655,7 @@ def _reduce_62(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 207) +module_eval(<<'.,.,', 'parser.y', 206) def _reduce_63(val, _values, result) builder = val[2] result = val[0].append(builder) @@ -1668,7 +1668,7 @@ def _reduce_63(val, _values, result) # reduce 65 omitted -module_eval(<<'.,.,', 'parser.y', 213) +module_eval(<<'.,.,', 'parser.y', 212) def _reduce_66(val, _values, result) reset_precs result = Grammar::ParameterizingRule::Rhs.new @@ -1677,7 +1677,7 @@ def _reduce_66(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 218) +module_eval(<<'.,.,', 'parser.y', 217) def _reduce_67(val, _values, result) token = val[1] token.alias_name = val[2] @@ -1689,7 +1689,7 @@ def _reduce_67(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 226) +module_eval(<<'.,.,', 'parser.y', 225) def _reduce_68(val, _values, result) builder = val[0] builder.symbols << Lrama::Lexer::Token::InstantiateRule.new(s_value: val[2], location: @lexer.location, args: [val[1]]) @@ -1699,7 +1699,7 @@ def _reduce_68(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 232) +module_eval(<<'.,.,', 'parser.y', 231) def _reduce_69(val, _values, result) builder = val[0] builder.symbols << Lrama::Lexer::Token::InstantiateRule.new(s_value: val[1].s_value, location: @lexer.location, args: val[3], lhs_tag: val[5]) @@ -1709,7 +1709,7 @@ def _reduce_69(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 238) +module_eval(<<'.,.,', 'parser.y', 237) def _reduce_70(val, _values, result) user_code = val[1] user_code.alias_name = val[2] @@ -1721,7 +1721,7 @@ def _reduce_70(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 246) +module_eval(<<'.,.,', 'parser.y', 245) def _reduce_71(val, _values, result) sym = @grammar.find_symbol_by_id!(val[2]) @prec_seen = true @@ -1735,42 +1735,42 @@ def _reduce_71(val, _values, result) # reduce 72 omitted -module_eval(<<'.,.,', 'parser.y', 254) +module_eval(<<'.,.,', 'parser.y', 253) def _reduce_73(val, _values, result) result = val[0].s_value result end .,., -module_eval(<<'.,.,', 'parser.y', 261) +module_eval(<<'.,.,', 'parser.y', 260) def _reduce_74(val, _values, result) result = val[1] ? val[1].unshift(val[0]) : val result end .,., -module_eval(<<'.,.,', 'parser.y', 261) +module_eval(<<'.,.,', 'parser.y', 260) def _reduce_75(val, _values, result) result = val[1] ? val[1].unshift(val[0]) : val result end .,., -module_eval(<<'.,.,', 'parser.y', 256) +module_eval(<<'.,.,', 'parser.y', 255) def _reduce_76(val, _values, result) result = [{tag: nil, tokens: val[0]}] result end .,., -module_eval(<<'.,.,', 'parser.y', 257) +module_eval(<<'.,.,', 'parser.y', 256) def _reduce_77(val, _values, result) result = [{tag: val[0], tokens: val[1]}] result end .,., -module_eval(<<'.,.,', 'parser.y', 258) +module_eval(<<'.,.,', 'parser.y', 257) def _reduce_78(val, _values, result) result = val[0].append({tag: val[1], tokens: val[2]}) result @@ -1781,7 +1781,7 @@ def _reduce_78(val, _values, result) # reduce 80 omitted -module_eval(<<'.,.,', 'parser.y', 264) +module_eval(<<'.,.,', 'parser.y', 263) def _reduce_81(val, _values, result) begin_c_declaration("}") @@ -1789,7 +1789,7 @@ def _reduce_81(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 268) +module_eval(<<'.,.,', 'parser.y', 267) def _reduce_82(val, _values, result) end_c_declaration @@ -1797,7 +1797,7 @@ def _reduce_82(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 272) +module_eval(<<'.,.,', 'parser.y', 271) def _reduce_83(val, _values, result) result = val[2] @@ -1805,49 +1805,49 @@ def _reduce_83(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 280) +module_eval(<<'.,.,', 'parser.y', 279) def _reduce_84(val, _values, result) result = val[1] ? val[1].unshift(val[0]) : val result end .,., -module_eval(<<'.,.,', 'parser.y', 280) +module_eval(<<'.,.,', 'parser.y', 279) def _reduce_85(val, _values, result) result = val[1] ? val[1].unshift(val[0]) : val result end .,., -module_eval(<<'.,.,', 'parser.y', 275) +module_eval(<<'.,.,', 'parser.y', 274) def _reduce_86(val, _values, result) result = [{tag: nil, tokens: val[0]}] result end .,., -module_eval(<<'.,.,', 'parser.y', 276) +module_eval(<<'.,.,', 'parser.y', 275) def _reduce_87(val, _values, result) result = [{tag: val[0], tokens: val[1]}] result end .,., -module_eval(<<'.,.,', 'parser.y', 277) +module_eval(<<'.,.,', 'parser.y', 276) def _reduce_88(val, _values, result) result = val[0].append({tag: val[1], tokens: val[2]}) result end .,., -module_eval(<<'.,.,', 'parser.y', 279) +module_eval(<<'.,.,', 'parser.y', 278) def _reduce_89(val, _values, result) on_action_error("ident after %prec", val[0]) if @prec_seen result end .,., -module_eval(<<'.,.,', 'parser.y', 280) +module_eval(<<'.,.,', 'parser.y', 279) def _reduce_90(val, _values, result) on_action_error("char after %prec", val[0]) if @prec_seen result @@ -1862,7 +1862,7 @@ def _reduce_90(val, _values, result) # reduce 94 omitted -module_eval(<<'.,.,', 'parser.y', 288) +module_eval(<<'.,.,', 'parser.y', 287) def _reduce_95(val, _values, result) lhs = val[0] lhs.alias_name = val[1] @@ -1876,7 +1876,7 @@ def _reduce_95(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 299) +module_eval(<<'.,.,', 'parser.y', 298) def _reduce_96(val, _values, result) builder = val[0] if !builder.line @@ -1888,7 +1888,7 @@ def _reduce_96(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 307) +module_eval(<<'.,.,', 'parser.y', 306) def _reduce_97(val, _values, result) builder = val[2] if !builder.line @@ -1900,7 +1900,7 @@ def _reduce_97(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 316) +module_eval(<<'.,.,', 'parser.y', 315) def _reduce_98(val, _values, result) reset_precs result = @grammar.create_rule_builder(@rule_counter, @midrule_action_counter) @@ -1909,7 +1909,7 @@ def _reduce_98(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 321) +module_eval(<<'.,.,', 'parser.y', 320) def _reduce_99(val, _values, result) token = val[1] token.alias_name = val[2] @@ -1921,7 +1921,7 @@ def _reduce_99(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 329) +module_eval(<<'.,.,', 'parser.y', 328) def _reduce_100(val, _values, result) token = Lrama::Lexer::Token::InstantiateRule.new(s_value: val[2], alias_name: val[3], location: @lexer.location, args: [val[1]], lhs_tag: val[4]) builder = val[0] @@ -1933,7 +1933,7 @@ def _reduce_100(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 337) +module_eval(<<'.,.,', 'parser.y', 336) def _reduce_101(val, _values, result) token = Lrama::Lexer::Token::InstantiateRule.new(s_value: val[1].s_value, alias_name: val[5], location: @lexer.location, args: val[3], lhs_tag: val[6]) builder = val[0] @@ -1945,7 +1945,7 @@ def _reduce_101(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 345) +module_eval(<<'.,.,', 'parser.y', 344) def _reduce_102(val, _values, result) user_code = val[1] user_code.alias_name = val[2] @@ -1958,7 +1958,7 @@ def _reduce_102(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 354) +module_eval(<<'.,.,', 'parser.y', 353) def _reduce_103(val, _values, result) sym = @grammar.find_symbol_by_id!(val[2]) @prec_seen = true @@ -1970,56 +1970,56 @@ def _reduce_103(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 361) +module_eval(<<'.,.,', 'parser.y', 360) def _reduce_104(val, _values, result) result = "option" result end .,., -module_eval(<<'.,.,', 'parser.y', 362) +module_eval(<<'.,.,', 'parser.y', 361) def _reduce_105(val, _values, result) result = "nonempty_list" result end .,., -module_eval(<<'.,.,', 'parser.y', 363) +module_eval(<<'.,.,', 'parser.y', 362) def _reduce_106(val, _values, result) result = "list" result end .,., -module_eval(<<'.,.,', 'parser.y', 365) +module_eval(<<'.,.,', 'parser.y', 364) def _reduce_107(val, _values, result) result = [val[0]] result end .,., -module_eval(<<'.,.,', 'parser.y', 366) +module_eval(<<'.,.,', 'parser.y', 365) def _reduce_108(val, _values, result) result = val[0].append(val[2]) result end .,., -module_eval(<<'.,.,', 'parser.y', 367) +module_eval(<<'.,.,', 'parser.y', 366) def _reduce_109(val, _values, result) result = [Lrama::Lexer::Token::InstantiateRule.new(s_value: val[1].s_value, location: @lexer.location, args: val[0])] result end .,., -module_eval(<<'.,.,', 'parser.y', 368) +module_eval(<<'.,.,', 'parser.y', 367) def _reduce_110(val, _values, result) result = [Lrama::Lexer::Token::InstantiateRule.new(s_value: val[0].s_value, location: @lexer.location, args: val[2])] result end .,., -module_eval(<<'.,.,', 'parser.y', 372) +module_eval(<<'.,.,', 'parser.y', 371) def _reduce_111(val, _values, result) if @prec_seen on_action_error("multiple User_code after %prec", val[0]) if @code_after_prec @@ -2031,7 +2031,7 @@ def _reduce_111(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 380) +module_eval(<<'.,.,', 'parser.y', 379) def _reduce_112(val, _values, result) end_c_declaration @@ -2039,7 +2039,7 @@ def _reduce_112(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 384) +module_eval(<<'.,.,', 'parser.y', 383) def _reduce_113(val, _values, result) result = val[2] @@ -2047,14 +2047,14 @@ def _reduce_113(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 387) +module_eval(<<'.,.,', 'parser.y', 386) def _reduce_114(val, _values, result) result = val[1].s_value result end .,., -module_eval(<<'.,.,', 'parser.y', 391) +module_eval(<<'.,.,', 'parser.y', 390) def _reduce_115(val, _values, result) begin_c_declaration('\Z') @grammar.epilogue_first_lineno = @lexer.line + 1 @@ -2063,7 +2063,7 @@ def _reduce_115(val, _values, result) end .,., -module_eval(<<'.,.,', 'parser.y', 396) +module_eval(<<'.,.,', 'parser.y', 395) def _reduce_116(val, _values, result) end_c_declaration @grammar.epilogue = val[2].s_value @@ -2090,7 +2090,7 @@ def _reduce_116(val, _values, result) # reduce 125 omitted -module_eval(<<'.,.,', 'parser.y', 413) +module_eval(<<'.,.,', 'parser.y', 412) def _reduce_126(val, _values, result) result = Lrama::Lexer::Token::Ident.new(s_value: val[0]) result diff --git a/parser.y b/parser.y index d6680db9..52603a72 100644 --- a/parser.y +++ b/parser.y @@ -23,7 +23,6 @@ rule } | "%require" STRING - bison_declaration: grammar_declaration | "%expect" INTEGER { @grammar.expect = val[1] } | "%define" variable value